From fe0bf509f528dbdc50c7182f81057c6a4e15e4bd Mon Sep 17 00:00:00 2001 From: Rob Austein Date: Sat, 5 Apr 2014 22:42:12 +0000 Subject: Source tree reorg, phase 1. Almost everything moved, no file contents changed. svn path=/branches/tk685/; revision=5757 --- ca/Doxyfile | 1705 ++++ ca/Makefile.in | 339 + ca/README | 11 + ca/doc/README | 16 + ca/doc/mainpage.py | 71 + ca/doc/pubd-bpki.dot | 42 + ca/doc/rpkid-bpki.dot | 76 + ca/examples/asns.csv | 5 + ca/examples/prefixes.csv | 8 + ca/examples/roas.csv | 5 + ca/examples/rsyncd.conf | 53 + ca/irbe_cli | 388 + ca/irdbd | 21 + ca/pubd | 21 + ca/rootd | 21 + ca/rpki-confgen | 291 + ca/rpki-confgen.xml | 900 ++ ca/rpki-manage | 13 + ca/rpki-sql-backup | 53 + ca/rpki-sql-setup | 311 + ca/rpki-start-servers | 76 + ca/rpki.wsgi | 45 + ca/rpkic | 21 + ca/rpkid | 21 + ca/rpkigui-apache-conf-gen | 483 + ca/rpkigui-check-expired | 61 + ca/rpkigui-import-routes | 115 + ca/rpkigui-query-routes | 66 + ca/rpkigui-rcynic | 54 + ca/tests/Makefile.in | 91 + ca/tests/left-right-protocol-samples.xml | 1093 +++ ca/tests/myrpki-xml-parse-test.py | 101 + ca/tests/old_irdbd.py | 19 + ca/tests/old_irdbd.sql | 143 + ca/tests/publication-protocol-samples.xml | 370 + ca/tests/rcynic.conf | 14 + ca/tests/revoke.yaml | 420 + ca/tests/rootd.yaml | 24 + ca/tests/rpki | 1 + ca/tests/smoketest.1.yaml | 89 + ca/tests/smoketest.2.yaml | 126 + ca/tests/smoketest.3.yaml | 81 + ca/tests/smoketest.4.yaml | 72 + ca/tests/smoketest.5.yaml | 65 + ca/tests/smoketest.6.yaml | 81 + ca/tests/smoketest.7.yaml | 77 + ca/tests/smoketest.8.yaml | 41 + ca/tests/smoketest.9.yaml | 849 ++ ca/tests/smoketest.clean.sql | 54 + ca/tests/smoketest.py | 1630 ++++ ca/tests/smoketest.setup.sql | 112 + ca/tests/split-protocol-samples.xsl | 40 + ca/tests/sql-cleaner.py | 61 + ca/tests/sql-dumper.py | 43 + ca/tests/testpoke.py | 152 + ca/tests/testpoke.xsl | 78 + ca/tests/testpoke.yaml | 24 + ca/tests/up-down-protocol-samples/Makefile | 11 + .../up-down-protocol-samples/error_response.xml | 9 + ca/tests/up-down-protocol-samples/issue1.xml | 25 + ca/tests/up-down-protocol-samples/issue2.xml | 24 + .../up-down-protocol-samples/issue_response.xml | 117 + ca/tests/up-down-protocol-samples/list.xml | 6 + .../up-down-protocol-samples/list_response.xml | 171 + ca/tests/up-down-protocol-samples/revoke.xml | 9 + .../up-down-protocol-samples/revoke_response.xml | 9 + ca/tests/xml-parse-test.py | 119 + ca/tests/yamlconf.py | 794 ++ ca/tests/yamltest-test-all.sh | 58 + ca/tests/yamltest.py | 875 ++ ca/upgrade-scripts/upgrade-rpkid-to-0.5709.py | 38 + ext/POW.c | 9253 ++++++++++++++++++++ potpourri/analyze-rcynic-history.py | 290 + potpourri/analyze-transition.py | 88 + potpourri/apnic-to-csv.py | 55 + potpourri/application-x-rpki-mailcap-handler.sh | 53 + potpourri/arin-to-csv.py | 114 + potpourri/cross_certify.py | 74 + potpourri/csvgrep.py | 72 + potpourri/expand-roa-prefixes.py | 79 + potpourri/extract-key.py | 64 + potpourri/fakerootd.py | 50 + potpourri/find-roa-expiration.py | 61 + potpourri/format-application-x-rpki.py | 132 + potpourri/gc_summary.awk | 72 + potpourri/gc_summary.py | 112 + potpourri/generate-ripe-root-cert.py | 57 + potpourri/gski.py | 21 + potpourri/guess-roas-from-routeviews.py | 63 + potpourri/iana-to-csv.py | 85 + potpourri/missing-oids.py | 38 + potpourri/object-dates.py | 63 + potpourri/pcap-to-xml.sh | 36 + potpourri/print-profile.py | 20 + potpourri/rcynic-diff.py | 114 + potpourri/rcynic-lta | 1055 +++ potpourri/rcynic-lta.yaml | 69 + potpourri/repo0-testbed-daily | 19 + potpourri/repo0-testbed-monthly | 22 + potpourri/repo0-testbed-weekly | 96 + potpourri/ripe-asns-to-csv.py | 108 + potpourri/ripe-prefixes-to-csv.awk | 37 + potpourri/ripe-to-csv.awk | 124 + potpourri/ripe-to-csv.py | 138 + potpourri/roa-to-irr.py | 159 + potpourri/rp-sqlite | 425 + potpourri/rp-sqlite.yaml | 53 + potpourri/rpki | 1 + potpourri/rpkidemo | 495 ++ potpourri/rpkidemo.pem | 23 + potpourri/rpkigui-flatten-roas.py | 37 + potpourri/rpkigui-reset-demo.py | 48 + potpourri/rpkigui-sync-users.py | 32 + potpourri/rrd-rcynic-history.py | 201 + potpourri/setup-rootd.sh | 36 + potpourri/show-cms-ee-certs.sh | 25 + potpourri/show-tracking-data.py | 39 + potpourri/signed-object-dates.py | 40 + potpourri/splitbase64.xsl | 66 + potpourri/testbed-rootcert.py | 66 + potpourri/translate-handles.py | 49 + potpourri/upgrade-add-ghostbusters.py | 73 + potpourri/verify-bpki.sh | 43 + potpourri/whack-ripe-asns.py | 83 + potpourri/whack-ripe-prefixes.py | 101 + potpourri/x509-dot.py | 170 + rcynic/Doxyfile | 1679 ---- rcynic/Makefile.in | 237 - rcynic/README | 13 - rcynic/bio_f_linebreak.c | 268 - rcynic/bio_f_linebreak.h | 10 - rcynic/defstack.h | 134 - rcynic/make-tal.sh | 42 - rcynic/rc-scripts/darwin/RCynic | 42 - rcynic/rc-scripts/darwin/StartupParameters.plist | 19 - rcynic/rc-scripts/freebsd/rc.d.rcynic | 44 - rcynic/rcynic-cron.py | 106 - rcynic/rcynic-html.py | 658 -- rcynic/rcynic-svn.py | 190 - rcynic/rcynic-text.py | 118 - rcynic/rcynic.c | 6070 ------------- rcynic/rcynic.xsl | 312 - rcynic/rpki-torrent.py | 721 -- rcynic/rules.darwin.mk | 108 - rcynic/rules.freebsd.mk | 56 - rcynic/rules.linux.mk | 92 - rcynic/rules.unknown.mk | 4 - rcynic/sample-rcynic.conf | 39 - rcynic/sample-trust-anchors/README | 26 - rcynic/sample-trust-anchors/afrinic.tal | 8 - rcynic/sample-trust-anchors/altca.tal | 9 - .../apnic-rpki-root-afrinic-origin.tal | 8 - .../apnic-rpki-root-arin-origin.tal | 8 - .../apnic-rpki-root-iana-origin.tal | 8 - .../apnic-rpki-root-lacnic-origin.tal | 8 - .../apnic-rpki-root-ripe-origin.tal | 8 - rcynic/sample-trust-anchors/apnic-testbed.tal | 9 - .../sample-trust-anchors/bbn-testbed.tal.disabled | 8 - rcynic/sample-trust-anchors/lacnic.tal | 8 - rcynic/sample-trust-anchors/ripe-ncc-root.tal | 9 - rcynic/sample-trust-anchors/ripe-pilot.tal | 7 - rcynic/sample-trust-anchors/rpki.net-testbed.tal | 9 - .../testbed-apnicrpki.tal.disabled | 8 - rcynic/static-rsync/Makefile.in | 44 - rcynic/static-rsync/README | 15 - rcynic/static-rsync/patches/patch-CVE-2007-4091 | 60 - rcynic/static-rsync/rsync-2.6.9.tar.gz | Bin 811841 -> 0 bytes rcynic/validation_status.py | 34 - rp/rcynic/Doxyfile | 1679 ++++ rp/rcynic/Makefile.in | 237 + rp/rcynic/README | 13 + rp/rcynic/bio_f_linebreak.c | 268 + rp/rcynic/bio_f_linebreak.h | 10 + rp/rcynic/defstack.h | 134 + rp/rcynic/make-tal.sh | 42 + rp/rcynic/rc-scripts/darwin/RCynic | 42 + .../rc-scripts/darwin/StartupParameters.plist | 19 + rp/rcynic/rc-scripts/freebsd/rc.d.rcynic | 44 + rp/rcynic/rcynic-cron | 106 + rp/rcynic/rcynic-html | 658 ++ rp/rcynic/rcynic-svn | 190 + rp/rcynic/rcynic-text | 118 + rp/rcynic/rcynic.c | 6070 +++++++++++++ rp/rcynic/rcynic.xsl | 312 + rp/rcynic/rpki-torrent.py | 721 ++ rp/rcynic/rules.darwin.mk | 108 + rp/rcynic/rules.freebsd.mk | 56 + rp/rcynic/rules.linux.mk | 92 + rp/rcynic/rules.unknown.mk | 4 + rp/rcynic/sample-rcynic.conf | 39 + rp/rcynic/sample-trust-anchors/README | 26 + rp/rcynic/sample-trust-anchors/afrinic.tal | 8 + rp/rcynic/sample-trust-anchors/altca.tal | 9 + .../apnic-rpki-root-afrinic-origin.tal | 8 + .../apnic-rpki-root-arin-origin.tal | 8 + .../apnic-rpki-root-iana-origin.tal | 8 + .../apnic-rpki-root-lacnic-origin.tal | 8 + .../apnic-rpki-root-ripe-origin.tal | 8 + rp/rcynic/sample-trust-anchors/apnic-testbed.tal | 9 + .../sample-trust-anchors/bbn-testbed.tal.disabled | 8 + rp/rcynic/sample-trust-anchors/lacnic.tal | 8 + rp/rcynic/sample-trust-anchors/ripe-ncc-root.tal | 9 + rp/rcynic/sample-trust-anchors/ripe-pilot.tal | 7 + .../sample-trust-anchors/rpki.net-testbed.tal | 9 + .../testbed-apnicrpki.tal.disabled | 8 + rp/rcynic/static-rsync/Makefile.in | 44 + rp/rcynic/static-rsync/README | 15 + rp/rcynic/static-rsync/patches/patch-CVE-2007-4091 | 60 + rp/rcynic/static-rsync/rsync-2.6.9.tar.gz | Bin 0 -> 811841 bytes rp/rcynic/validation_status | 34 + rp/rtr-origin/Makefile.in | 63 + rp/rtr-origin/README | 11 + rp/rtr-origin/rtr-origin | 2278 +++++ rp/rtr-origin/rules.darwin.mk | 9 + rp/rtr-origin/rules.freebsd.mk | 37 + rp/rtr-origin/rules.linux.mk | 29 + rp/rtr-origin/rules.unknown.mk | 8 + rp/rtr-origin/server.sh | 17 + rp/rtr-origin/sshd.conf | 23 + rp/utils/Makefile.in | 9 + rp/utils/README | 12 + rp/utils/dot.awk | 34 + rp/utils/find_roa.c | 356 + rp/utils/find_roa/Makefile.in | 56 + rp/utils/hashdir.c | 217 + rp/utils/hashdir/Makefile.in | 55 + rp/utils/print_roa.c | 384 + rp/utils/print_roa/Makefile.in | 52 + rp/utils/print_rpki_manifest.c | 235 + rp/utils/print_rpki_manifest/Makefile.in | 52 + rp/utils/scan_roas.c | 305 + rp/utils/scan_roas/Makefile.in | 52 + rp/utils/scan_routercerts.py | 69 + rp/utils/scan_routercerts/Makefile.in | 41 + rp/utils/strip_roa.sh | 39 + rp/utils/table.awk | 35 + rp/utils/test_roa.sh | 35 + rp/utils/uri.c | 248 + rp/utils/uri/Makefile.in | 31 + rpki/POW/__init__.py | 7 + rpki/__init__.py | 2 + rpki/adns.py | 368 + rpki/async.py | 420 + rpki/cli.py | 277 + rpki/config.py | 301 + rpki/csv_utils.py | 112 + rpki/daemonize.py | 133 + rpki/exceptions.py | 367 + rpki/gui/__init__.py | 0 rpki/gui/api/__init__.py | 0 rpki/gui/api/urls.py | 22 + rpki/gui/app/TODO | 60 + rpki/gui/app/__init__.py | 0 rpki/gui/app/admin.py | 0 rpki/gui/app/check_expired.py | 209 + rpki/gui/app/forms.py | 442 + rpki/gui/app/glue.py | 132 + rpki/gui/app/migrations/0001_initial.py | 192 + .../0002_auto__add_field_resourcecert_conf.py | 117 + .../app/migrations/0003_set_conf_from_parent.py | 116 + .../0004_auto__chg_field_resourcecert_conf.py | 115 + .../0005_auto__chg_field_resourcecert_parent.py | 115 + rpki/gui/app/migrations/0006_add_conf_acl.py | 168 + rpki/gui/app/migrations/0007_default_acls.py | 165 + rpki/gui/app/migrations/0008_add_alerts.py | 176 + rpki/gui/app/migrations/__init__.py | 0 rpki/gui/app/models.py | 420 + rpki/gui/app/range_list.py | 252 + rpki/gui/app/static/css/bootstrap.min.css | 9 + .../app/static/img/glyphicons-halflings-white.png | Bin 0 -> 8777 bytes rpki/gui/app/static/img/glyphicons-halflings.png | Bin 0 -> 12799 bytes rpki/gui/app/static/img/sui-riu.ico | Bin 0 -> 6126 bytes rpki/gui/app/static/js/bootstrap.min.js | 6 + rpki/gui/app/static/js/jquery-1.8.3.min.js | 2 + rpki/gui/app/templates/404.html | 11 + rpki/gui/app/templates/500.html | 11 + .../gui/app/templates/app/alert_confirm_clear.html | 21 + .../app/templates/app/alert_confirm_delete.html | 17 + rpki/gui/app/templates/app/alert_detail.html | 31 + rpki/gui/app/templates/app/alert_list.html | 31 + rpki/gui/app/templates/app/app_base.html | 31 + rpki/gui/app/templates/app/app_confirm_delete.html | 21 + rpki/gui/app/templates/app/app_form.html | 19 + rpki/gui/app/templates/app/bootstrap_form.html | 26 + rpki/gui/app/templates/app/child_detail.html | 48 + rpki/gui/app/templates/app/client_detail.html | 25 + rpki/gui/app/templates/app/client_list.html | 22 + rpki/gui/app/templates/app/conf_empty.html | 17 + rpki/gui/app/templates/app/conf_list.html | 17 + rpki/gui/app/templates/app/dashboard.html | 230 + .../templates/app/ghostbuster_confirm_delete.html | 20 + .../templates/app/ghostbusterrequest_detail.html | 64 + .../app/templates/app/import_resource_form.html | 9 + .../app/templates/app/object_confirm_delete.html | 21 + rpki/gui/app/templates/app/parent_detail.html | 67 + rpki/gui/app/templates/app/pubclient_list.html | 10 + rpki/gui/app/templates/app/repository_detail.html | 19 + .../app/templates/app/resource_holder_list.html | 37 + rpki/gui/app/templates/app/roa_detail.html | 40 + .../templates/app/roarequest_confirm_delete.html | 59 + .../app/templates/app/roarequest_confirm_form.html | 60 + .../app/roarequest_confirm_multi_form.html | 66 + rpki/gui/app/templates/app/roarequest_form.html | 50 + .../app/templates/app/roarequest_multi_form.html | 28 + rpki/gui/app/templates/app/route_detail.html | 58 + rpki/gui/app/templates/app/routes_view.html | 55 + rpki/gui/app/templates/app/user_list.html | 37 + rpki/gui/app/templates/base.html | 63 + rpki/gui/app/templates/registration/login.html | 25 + rpki/gui/app/templatetags/__init__.py | 0 rpki/gui/app/templatetags/app_extras.py | 58 + rpki/gui/app/templatetags/bootstrap_pager.py | 55 + rpki/gui/app/timestamp.py | 25 + rpki/gui/app/urls.py | 81 + rpki/gui/app/views.py | 1314 +++ rpki/gui/cacheview/__init__.py | 0 rpki/gui/cacheview/forms.py | 51 + rpki/gui/cacheview/misc.py | 31 + rpki/gui/cacheview/models.py | 237 + .../templates/cacheview/addressrange_detail.html | 18 + .../templates/cacheview/cacheview_base.html | 10 + .../cacheview/templates/cacheview/cert_detail.html | 105 + .../templates/cacheview/ghostbuster_detail.html | 13 + .../templates/cacheview/global_summary.html | 26 + .../templates/cacheview/query_result.html | 21 + .../cacheview/templates/cacheview/roa_detail.html | 18 + .../cacheview/templates/cacheview/search_form.html | 17 + .../templates/cacheview/search_result.html | 42 + .../templates/cacheview/signedobject_detail.html | 58 + rpki/gui/cacheview/tests.py | 23 + rpki/gui/cacheview/urls.py | 32 + rpki/gui/cacheview/util.py | 432 + rpki/gui/cacheview/views.py | 172 + rpki/gui/decorators.py | 31 + rpki/gui/default_settings.py | 171 + rpki/gui/models.py | 150 + rpki/gui/routeview/__init__.py | 0 rpki/gui/routeview/api.py | 69 + rpki/gui/routeview/models.py | 81 + rpki/gui/routeview/util.py | 236 + rpki/gui/script_util.py | 43 + rpki/gui/urls.py | 36 + rpki/gui/views.py | 30 + rpki/http.py | 1070 +++ rpki/ipaddrs.py | 137 + rpki/irdb/__init__.py | 26 + rpki/irdb/models.py | 646 ++ rpki/irdb/router.py | 95 + rpki/irdb/zookeeper.py | 1682 ++++ rpki/irdbd.py | 266 + rpki/left_right.py | 1300 +++ rpki/log.py | 199 + rpki/myrpki.py | 23 + rpki/mysql_import.py | 65 + rpki/oids.py | 101 + rpki/old_irdbd.py | 325 + rpki/pubd.py | 174 + rpki/publication.py | 466 + rpki/rcynic.py | 275 + rpki/relaxng.py | 2441 ++++++ rpki/resource_set.py | 1148 +++ rpki/rootd.py | 385 + rpki/rpkic.py | 877 ++ rpki/rpkid.py | 2500 ++++++ rpki/rpkid_tasks.py | 750 ++ rpki/sql.py | 424 + rpki/sql_schemas.py | 319 + rpki/sundial.py | 289 + rpki/up_down.py | 732 ++ rpki/x509.py | 2031 +++++ rpki/xml_utils.py | 494 ++ rpkid/Doxyfile | 1705 ---- rpkid/Makefile.in | 339 - rpkid/README | 11 - rpkid/doc/README | 16 - rpkid/doc/mainpage.py | 71 - rpkid/doc/pubd-bpki.dot | 42 - rpkid/doc/rpkid-bpki.dot | 76 - rpkid/examples/asns.csv | 5 - rpkid/examples/prefixes.csv | 8 - rpkid/examples/roas.csv | 5 - rpkid/examples/rsyncd.conf | 53 - rpkid/ext/POW.c | 9253 -------------------- rpkid/irbe_cli | 388 - rpkid/irdbd | 21 - rpkid/left-right-schema.rnc | 323 - rpkid/left-right-schema.rng | 1089 --- rpkid/myrpki.rnc | 164 - rpkid/myrpki.rng | 379 - rpkid/portal-gui/rpki.wsgi | 45 - rpkid/portal-gui/scripts/rpki-manage | 13 - rpkid/portal-gui/scripts/rpkigui-apache-conf-gen | 483 - rpkid/portal-gui/scripts/rpkigui-check-expired | 61 - rpkid/portal-gui/scripts/rpkigui-flatten-roas.py | 37 - rpkid/portal-gui/scripts/rpkigui-import-routes | 115 - rpkid/portal-gui/scripts/rpkigui-query-routes | 66 - rpkid/portal-gui/scripts/rpkigui-rcynic | 54 - rpkid/portal-gui/scripts/rpkigui-reset-demo.py | 48 - rpkid/portal-gui/scripts/rpkigui-sync-users.py | 32 - rpkid/pubd | 21 - rpkid/pubd.sql | 59 - rpkid/publication-schema.rnc | 137 - rpkid/publication-schema.rng | 574 -- rpkid/rootd | 21 - rpkid/router-certificate-schema.rnc | 61 - rpkid/router-certificate-schema.rng | 98 - rpkid/rpki-confgen | 291 - rpkid/rpki-confgen.xml | 900 -- rpkid/rpki-sql-backup | 53 - rpkid/rpki-sql-setup | 311 - rpkid/rpki-start-servers | 76 - rpkid/rpki/POW/__init__.py | 7 - rpkid/rpki/__init__.py | 2 - rpkid/rpki/adns.py | 368 - rpkid/rpki/async.py | 420 - rpkid/rpki/cli.py | 277 - rpkid/rpki/config.py | 301 - rpkid/rpki/csv_utils.py | 112 - rpkid/rpki/daemonize.py | 133 - rpkid/rpki/exceptions.py | 367 - rpkid/rpki/gui/__init__.py | 0 rpkid/rpki/gui/api/__init__.py | 0 rpkid/rpki/gui/api/urls.py | 22 - rpkid/rpki/gui/app/TODO | 60 - rpkid/rpki/gui/app/__init__.py | 0 rpkid/rpki/gui/app/admin.py | 0 rpkid/rpki/gui/app/check_expired.py | 209 - rpkid/rpki/gui/app/forms.py | 442 - rpkid/rpki/gui/app/glue.py | 132 - rpkid/rpki/gui/app/migrations/0001_initial.py | 192 - .../0002_auto__add_field_resourcecert_conf.py | 117 - .../app/migrations/0003_set_conf_from_parent.py | 116 - .../0004_auto__chg_field_resourcecert_conf.py | 115 - .../0005_auto__chg_field_resourcecert_parent.py | 115 - rpkid/rpki/gui/app/migrations/0006_add_conf_acl.py | 168 - rpkid/rpki/gui/app/migrations/0007_default_acls.py | 165 - rpkid/rpki/gui/app/migrations/0008_add_alerts.py | 176 - rpkid/rpki/gui/app/migrations/__init__.py | 0 rpkid/rpki/gui/app/models.py | 420 - rpkid/rpki/gui/app/range_list.py | 252 - rpkid/rpki/gui/app/static/css/bootstrap.min.css | 9 - .../app/static/img/glyphicons-halflings-white.png | Bin 8777 -> 0 bytes .../gui/app/static/img/glyphicons-halflings.png | Bin 12799 -> 0 bytes rpkid/rpki/gui/app/static/img/sui-riu.ico | Bin 6126 -> 0 bytes rpkid/rpki/gui/app/static/js/bootstrap.min.js | 6 - rpkid/rpki/gui/app/static/js/jquery-1.8.3.min.js | 2 - rpkid/rpki/gui/app/templates/404.html | 11 - rpkid/rpki/gui/app/templates/500.html | 11 - .../gui/app/templates/app/alert_confirm_clear.html | 21 - .../app/templates/app/alert_confirm_delete.html | 17 - rpkid/rpki/gui/app/templates/app/alert_detail.html | 31 - rpkid/rpki/gui/app/templates/app/alert_list.html | 31 - rpkid/rpki/gui/app/templates/app/app_base.html | 31 - .../gui/app/templates/app/app_confirm_delete.html | 21 - rpkid/rpki/gui/app/templates/app/app_form.html | 19 - .../rpki/gui/app/templates/app/bootstrap_form.html | 26 - rpkid/rpki/gui/app/templates/app/child_detail.html | 48 - .../rpki/gui/app/templates/app/client_detail.html | 25 - rpkid/rpki/gui/app/templates/app/client_list.html | 22 - rpkid/rpki/gui/app/templates/app/conf_empty.html | 17 - rpkid/rpki/gui/app/templates/app/conf_list.html | 17 - rpkid/rpki/gui/app/templates/app/dashboard.html | 230 - .../templates/app/ghostbuster_confirm_delete.html | 20 - .../templates/app/ghostbusterrequest_detail.html | 64 - .../app/templates/app/import_resource_form.html | 9 - .../app/templates/app/object_confirm_delete.html | 21 - .../rpki/gui/app/templates/app/parent_detail.html | 67 - .../rpki/gui/app/templates/app/pubclient_list.html | 10 - .../gui/app/templates/app/repository_detail.html | 19 - .../app/templates/app/resource_holder_list.html | 37 - rpkid/rpki/gui/app/templates/app/roa_detail.html | 40 - .../templates/app/roarequest_confirm_delete.html | 59 - .../app/templates/app/roarequest_confirm_form.html | 60 - .../app/roarequest_confirm_multi_form.html | 66 - .../gui/app/templates/app/roarequest_form.html | 50 - .../app/templates/app/roarequest_multi_form.html | 28 - rpkid/rpki/gui/app/templates/app/route_detail.html | 58 - rpkid/rpki/gui/app/templates/app/routes_view.html | 55 - rpkid/rpki/gui/app/templates/app/user_list.html | 37 - rpkid/rpki/gui/app/templates/base.html | 63 - .../rpki/gui/app/templates/registration/login.html | 25 - rpkid/rpki/gui/app/templatetags/__init__.py | 0 rpkid/rpki/gui/app/templatetags/app_extras.py | 58 - rpkid/rpki/gui/app/templatetags/bootstrap_pager.py | 55 - rpkid/rpki/gui/app/timestamp.py | 25 - rpkid/rpki/gui/app/urls.py | 81 - rpkid/rpki/gui/app/views.py | 1314 --- rpkid/rpki/gui/cacheview/__init__.py | 0 rpkid/rpki/gui/cacheview/forms.py | 51 - rpkid/rpki/gui/cacheview/misc.py | 31 - rpkid/rpki/gui/cacheview/models.py | 237 - .../templates/cacheview/addressrange_detail.html | 18 - .../templates/cacheview/cacheview_base.html | 10 - .../cacheview/templates/cacheview/cert_detail.html | 105 - .../templates/cacheview/ghostbuster_detail.html | 13 - .../templates/cacheview/global_summary.html | 26 - .../templates/cacheview/query_result.html | 21 - .../cacheview/templates/cacheview/roa_detail.html | 18 - .../cacheview/templates/cacheview/search_form.html | 17 - .../templates/cacheview/search_result.html | 42 - .../templates/cacheview/signedobject_detail.html | 58 - rpkid/rpki/gui/cacheview/tests.py | 23 - rpkid/rpki/gui/cacheview/urls.py | 32 - rpkid/rpki/gui/cacheview/util.py | 432 - rpkid/rpki/gui/cacheview/views.py | 172 - rpkid/rpki/gui/decorators.py | 31 - rpkid/rpki/gui/default_settings.py | 171 - rpkid/rpki/gui/models.py | 150 - rpkid/rpki/gui/routeview/__init__.py | 0 rpkid/rpki/gui/routeview/api.py | 69 - rpkid/rpki/gui/routeview/models.py | 81 - rpkid/rpki/gui/routeview/util.py | 236 - rpkid/rpki/gui/script_util.py | 43 - rpkid/rpki/gui/urls.py | 36 - rpkid/rpki/gui/views.py | 30 - rpkid/rpki/http.py | 1070 --- rpkid/rpki/ipaddrs.py | 137 - rpkid/rpki/irdb/__init__.py | 26 - rpkid/rpki/irdb/models.py | 646 -- rpkid/rpki/irdb/router.py | 95 - rpkid/rpki/irdb/zookeeper.py | 1682 ---- rpkid/rpki/irdbd.py | 266 - rpkid/rpki/left_right.py | 1300 --- rpkid/rpki/log.py | 199 - rpkid/rpki/myrpki.py | 23 - rpkid/rpki/mysql_import.py | 65 - rpkid/rpki/oids.py | 101 - rpkid/rpki/old_irdbd.py | 325 - rpkid/rpki/pubd.py | 174 - rpkid/rpki/publication.py | 466 - rpkid/rpki/rcynic.py | 275 - rpkid/rpki/relaxng.py | 2441 ------ rpkid/rpki/resource_set.py | 1148 --- rpkid/rpki/rootd.py | 385 - rpkid/rpki/rpkic.py | 877 -- rpkid/rpki/rpkid.py | 2500 ------ rpkid/rpki/rpkid_tasks.py | 750 -- rpkid/rpki/sql.py | 424 - rpkid/rpki/sql_schemas.py | 319 - rpkid/rpki/sundial.py | 289 - rpkid/rpki/up_down.py | 732 -- rpkid/rpki/x509.py | 2031 ----- rpkid/rpki/xml_utils.py | 494 -- rpkid/rpkic | 21 - rpkid/rpkid | 21 - rpkid/rpkid.sql | 250 - rpkid/setup.py | 93 - rpkid/setup_extensions.py | 94 - rpkid/tests/Makefile.in | 91 - rpkid/tests/left-right-protocol-samples.xml | 1093 --- rpkid/tests/myrpki-xml-parse-test.py | 101 - rpkid/tests/old_irdbd.py | 19 - rpkid/tests/old_irdbd.sql | 143 - rpkid/tests/publication-protocol-samples.xml | 370 - rpkid/tests/rcynic.conf | 14 - rpkid/tests/revoke.yaml | 420 - rpkid/tests/rootd.yaml | 24 - rpkid/tests/rpki | 1 - rpkid/tests/smoketest.1.yaml | 89 - rpkid/tests/smoketest.2.yaml | 126 - rpkid/tests/smoketest.3.yaml | 81 - rpkid/tests/smoketest.4.yaml | 72 - rpkid/tests/smoketest.5.yaml | 65 - rpkid/tests/smoketest.6.yaml | 81 - rpkid/tests/smoketest.7.yaml | 77 - rpkid/tests/smoketest.8.yaml | 41 - rpkid/tests/smoketest.9.yaml | 849 -- rpkid/tests/smoketest.clean.sql | 54 - rpkid/tests/smoketest.py | 1630 ---- rpkid/tests/smoketest.setup.sql | 112 - rpkid/tests/split-protocol-samples.xsl | 40 - rpkid/tests/sql-cleaner.py | 61 - rpkid/tests/sql-dumper.py | 43 - rpkid/tests/testpoke.py | 152 - rpkid/tests/testpoke.xsl | 78 - rpkid/tests/testpoke.yaml | 24 - rpkid/tests/up-down-protocol-samples/Makefile | 11 - .../up-down-protocol-samples/error_response.xml | 9 - rpkid/tests/up-down-protocol-samples/issue1.xml | 25 - rpkid/tests/up-down-protocol-samples/issue2.xml | 24 - .../up-down-protocol-samples/issue_response.xml | 117 - rpkid/tests/up-down-protocol-samples/list.xml | 6 - .../up-down-protocol-samples/list_response.xml | 171 - rpkid/tests/up-down-protocol-samples/revoke.xml | 9 - .../up-down-protocol-samples/revoke_response.xml | 9 - rpkid/tests/xml-parse-test.py | 119 - rpkid/tests/yamlconf.py | 794 -- rpkid/tests/yamltest-test-all.sh | 58 - rpkid/tests/yamltest.py | 875 -- rpkid/up-down-schema.rnc | 113 - rpkid/up-down-schema.rng | 277 - rpkid/upgrade-scripts/upgrade-rpkid-to-0.5709.py | 38 - rtr-origin/Makefile.in | 63 - rtr-origin/README | 11 - rtr-origin/rtr-origin.py | 2278 ----- rtr-origin/rules.darwin.mk | 9 - rtr-origin/rules.freebsd.mk | 37 - rtr-origin/rules.linux.mk | 29 - rtr-origin/rules.unknown.mk | 8 - rtr-origin/server.sh | 17 - rtr-origin/sshd.conf | 23 - schemas/relaxng/left-right-schema.rnc | 323 + schemas/relaxng/left-right-schema.rng | 1089 +++ schemas/relaxng/myrpki.rnc | 164 + schemas/relaxng/myrpki.rng | 379 + schemas/relaxng/publication-schema.rnc | 137 + schemas/relaxng/publication-schema.rng | 574 ++ schemas/relaxng/router-certificate-schema.rnc | 61 + schemas/relaxng/router-certificate-schema.rng | 98 + schemas/relaxng/up-down-schema.rnc | 113 + schemas/relaxng/up-down-schema.rng | 277 + schemas/sql/pubd.sql | 59 + schemas/sql/rpkid.sql | 250 + scripts/analyze-rcynic-history.py | 290 - scripts/analyze-transition.py | 88 - scripts/apnic-to-csv.py | 55 - scripts/application-x-rpki-mailcap-handler.sh | 53 - scripts/arin-to-csv.py | 114 - scripts/cross_certify.py | 74 - scripts/csvgrep.py | 72 - scripts/expand-roa-prefixes.py | 79 - scripts/extract-key.py | 64 - scripts/fakerootd.py | 50 - scripts/find-roa-expiration.py | 61 - scripts/format-application-x-rpki.py | 132 - scripts/gc_summary.awk | 72 - scripts/gc_summary.py | 112 - scripts/generate-ripe-root-cert.py | 57 - scripts/gski.py | 21 - scripts/guess-roas-from-routeviews.py | 63 - scripts/iana-to-csv.py | 85 - scripts/missing-oids.py | 38 - scripts/object-dates.py | 63 - scripts/pcap-to-xml.sh | 36 - scripts/print-profile.py | 20 - scripts/rcynic-diff.py | 114 - scripts/rcynic-lta | 1055 --- scripts/rcynic-lta.yaml | 69 - scripts/repo0-testbed-daily | 19 - scripts/repo0-testbed-monthly | 22 - scripts/repo0-testbed-weekly | 96 - scripts/ripe-asns-to-csv.py | 108 - scripts/ripe-prefixes-to-csv.awk | 37 - scripts/ripe-to-csv.awk | 124 - scripts/ripe-to-csv.py | 138 - scripts/roa-to-irr.py | 159 - scripts/rp-sqlite | 425 - scripts/rp-sqlite.yaml | 53 - scripts/rpki | 1 - scripts/rpkidemo | 495 -- scripts/rpkidemo.pem | 23 - scripts/rrd-rcynic-history.py | 201 - scripts/setup-rootd.sh | 36 - scripts/show-cms-ee-certs.sh | 25 - scripts/show-tracking-data.py | 39 - scripts/signed-object-dates.py | 40 - scripts/splitbase64.xsl | 66 - scripts/testbed-rootcert.py | 66 - scripts/translate-handles.py | 49 - scripts/upgrade-add-ghostbusters.py | 73 - scripts/verify-bpki.sh | 43 - scripts/whack-ripe-asns.py | 83 - scripts/whack-ripe-prefixes.py | 101 - scripts/x509-dot.py | 170 - setup.py | 93 + setup_extensions.py | 94 + utils/Makefile.in | 9 - utils/README | 12 - utils/find_roa/Makefile.in | 56 - utils/find_roa/find_roa.c | 356 - utils/find_roa/test_roa.sh | 35 - utils/hashdir/Makefile.in | 55 - utils/hashdir/hashdir.c | 217 - utils/print_roa/Makefile.in | 52 - utils/print_roa/print_roa.c | 384 - utils/print_roa/strip_roa.sh | 39 - utils/print_rpki_manifest/Makefile.in | 52 - utils/print_rpki_manifest/print_rpki_manifest.c | 235 - utils/scan_roas/Makefile.in | 52 - utils/scan_roas/scan_roas.c | 305 - utils/scan_routercerts/Makefile.in | 41 - utils/scan_routercerts/scan_routercerts | 69 - utils/uri/Makefile.in | 31 - utils/uri/dot.awk | 34 - utils/uri/table.awk | 35 - utils/uri/uri.c | 248 - 686 files changed, 77263 insertions(+), 77263 deletions(-) create mode 100644 ca/Doxyfile create mode 100644 ca/Makefile.in create mode 100644 ca/README create mode 100644 ca/doc/README create mode 100644 ca/doc/mainpage.py create mode 100644 ca/doc/pubd-bpki.dot create mode 100644 ca/doc/rpkid-bpki.dot create mode 100644 ca/examples/asns.csv create mode 100644 ca/examples/prefixes.csv create mode 100644 ca/examples/roas.csv create mode 100644 ca/examples/rsyncd.conf create mode 100755 ca/irbe_cli create mode 100755 ca/irdbd create mode 100755 ca/pubd create mode 100755 ca/rootd create mode 100755 ca/rpki-confgen create mode 100644 ca/rpki-confgen.xml create mode 100755 ca/rpki-manage create mode 100755 ca/rpki-sql-backup create mode 100755 ca/rpki-sql-setup create mode 100755 ca/rpki-start-servers create mode 100644 ca/rpki.wsgi create mode 100755 ca/rpkic create mode 100755 ca/rpkid create mode 100755 ca/rpkigui-apache-conf-gen create mode 100755 ca/rpkigui-check-expired create mode 100755 ca/rpkigui-import-routes create mode 100755 ca/rpkigui-query-routes create mode 100755 ca/rpkigui-rcynic create mode 100644 ca/tests/Makefile.in create mode 100644 ca/tests/left-right-protocol-samples.xml create mode 100644 ca/tests/myrpki-xml-parse-test.py create mode 100644 ca/tests/old_irdbd.py create mode 100644 ca/tests/old_irdbd.sql create mode 100644 ca/tests/publication-protocol-samples.xml create mode 100644 ca/tests/rcynic.conf create mode 100644 ca/tests/revoke.yaml create mode 100644 ca/tests/rootd.yaml create mode 120000 ca/tests/rpki create mode 100644 ca/tests/smoketest.1.yaml create mode 100644 ca/tests/smoketest.2.yaml create mode 100644 ca/tests/smoketest.3.yaml create mode 100644 ca/tests/smoketest.4.yaml create mode 100644 ca/tests/smoketest.5.yaml create mode 100644 ca/tests/smoketest.6.yaml create mode 100644 ca/tests/smoketest.7.yaml create mode 100644 ca/tests/smoketest.8.yaml create mode 100644 ca/tests/smoketest.9.yaml create mode 100644 ca/tests/smoketest.clean.sql create mode 100644 ca/tests/smoketest.py create mode 100644 ca/tests/smoketest.setup.sql create mode 100644 ca/tests/split-protocol-samples.xsl create mode 100644 ca/tests/sql-cleaner.py create mode 100644 ca/tests/sql-dumper.py create mode 100644 ca/tests/testpoke.py create mode 100644 ca/tests/testpoke.xsl create mode 100644 ca/tests/testpoke.yaml create mode 100644 ca/tests/up-down-protocol-samples/Makefile create mode 100644 ca/tests/up-down-protocol-samples/error_response.xml create mode 100644 ca/tests/up-down-protocol-samples/issue1.xml create mode 100644 ca/tests/up-down-protocol-samples/issue2.xml create mode 100644 ca/tests/up-down-protocol-samples/issue_response.xml create mode 100644 ca/tests/up-down-protocol-samples/list.xml create mode 100644 ca/tests/up-down-protocol-samples/list_response.xml create mode 100644 ca/tests/up-down-protocol-samples/revoke.xml create mode 100644 ca/tests/up-down-protocol-samples/revoke_response.xml create mode 100644 ca/tests/xml-parse-test.py create mode 100644 ca/tests/yamlconf.py create mode 100644 ca/tests/yamltest-test-all.sh create mode 100644 ca/tests/yamltest.py create mode 100644 ca/upgrade-scripts/upgrade-rpkid-to-0.5709.py create mode 100644 ext/POW.c create mode 100644 potpourri/analyze-rcynic-history.py create mode 100644 potpourri/analyze-transition.py create mode 100644 potpourri/apnic-to-csv.py create mode 100755 potpourri/application-x-rpki-mailcap-handler.sh create mode 100644 potpourri/arin-to-csv.py create mode 100644 potpourri/cross_certify.py create mode 100644 potpourri/csvgrep.py create mode 100644 potpourri/expand-roa-prefixes.py create mode 100644 potpourri/extract-key.py create mode 100644 potpourri/fakerootd.py create mode 100644 potpourri/find-roa-expiration.py create mode 100644 potpourri/format-application-x-rpki.py create mode 100644 potpourri/gc_summary.awk create mode 100644 potpourri/gc_summary.py create mode 100644 potpourri/generate-ripe-root-cert.py create mode 100644 potpourri/gski.py create mode 100644 potpourri/guess-roas-from-routeviews.py create mode 100644 potpourri/iana-to-csv.py create mode 100644 potpourri/missing-oids.py create mode 100644 potpourri/object-dates.py create mode 100644 potpourri/pcap-to-xml.sh create mode 100644 potpourri/print-profile.py create mode 100644 potpourri/rcynic-diff.py create mode 100755 potpourri/rcynic-lta create mode 100644 potpourri/rcynic-lta.yaml create mode 100755 potpourri/repo0-testbed-daily create mode 100755 potpourri/repo0-testbed-monthly create mode 100755 potpourri/repo0-testbed-weekly create mode 100644 potpourri/ripe-asns-to-csv.py create mode 100644 potpourri/ripe-prefixes-to-csv.awk create mode 100644 potpourri/ripe-to-csv.awk create mode 100644 potpourri/ripe-to-csv.py create mode 100644 potpourri/roa-to-irr.py create mode 100755 potpourri/rp-sqlite create mode 100644 potpourri/rp-sqlite.yaml create mode 120000 potpourri/rpki create mode 100755 potpourri/rpkidemo create mode 100644 potpourri/rpkidemo.pem create mode 100644 potpourri/rpkigui-flatten-roas.py create mode 100644 potpourri/rpkigui-reset-demo.py create mode 100644 potpourri/rpkigui-sync-users.py create mode 100644 potpourri/rrd-rcynic-history.py create mode 100644 potpourri/setup-rootd.sh create mode 100644 potpourri/show-cms-ee-certs.sh create mode 100644 potpourri/show-tracking-data.py create mode 100644 potpourri/signed-object-dates.py create mode 100644 potpourri/splitbase64.xsl create mode 100644 potpourri/testbed-rootcert.py create mode 100644 potpourri/translate-handles.py create mode 100644 potpourri/upgrade-add-ghostbusters.py create mode 100755 potpourri/verify-bpki.sh create mode 100644 potpourri/whack-ripe-asns.py create mode 100644 potpourri/whack-ripe-prefixes.py create mode 100644 potpourri/x509-dot.py delete mode 100644 rcynic/Doxyfile delete mode 100644 rcynic/Makefile.in delete mode 100644 rcynic/README delete mode 100644 rcynic/bio_f_linebreak.c delete mode 100644 rcynic/bio_f_linebreak.h delete mode 100644 rcynic/defstack.h delete mode 100755 rcynic/make-tal.sh delete mode 100755 rcynic/rc-scripts/darwin/RCynic delete mode 100644 rcynic/rc-scripts/darwin/StartupParameters.plist delete mode 100755 rcynic/rc-scripts/freebsd/rc.d.rcynic delete mode 100644 rcynic/rcynic-cron.py delete mode 100644 rcynic/rcynic-html.py delete mode 100644 rcynic/rcynic-svn.py delete mode 100644 rcynic/rcynic-text.py delete mode 100644 rcynic/rcynic.c delete mode 100644 rcynic/rcynic.xsl delete mode 100644 rcynic/rpki-torrent.py delete mode 100644 rcynic/rules.darwin.mk delete mode 100644 rcynic/rules.freebsd.mk delete mode 100644 rcynic/rules.linux.mk delete mode 100644 rcynic/rules.unknown.mk delete mode 100644 rcynic/sample-rcynic.conf delete mode 100644 rcynic/sample-trust-anchors/README delete mode 100644 rcynic/sample-trust-anchors/afrinic.tal delete mode 100644 rcynic/sample-trust-anchors/altca.tal delete mode 100644 rcynic/sample-trust-anchors/apnic-rpki-root-afrinic-origin.tal delete mode 100644 rcynic/sample-trust-anchors/apnic-rpki-root-arin-origin.tal delete mode 100644 rcynic/sample-trust-anchors/apnic-rpki-root-iana-origin.tal delete mode 100644 rcynic/sample-trust-anchors/apnic-rpki-root-lacnic-origin.tal delete mode 100644 rcynic/sample-trust-anchors/apnic-rpki-root-ripe-origin.tal delete mode 100644 rcynic/sample-trust-anchors/apnic-testbed.tal delete mode 100644 rcynic/sample-trust-anchors/bbn-testbed.tal.disabled delete mode 100644 rcynic/sample-trust-anchors/lacnic.tal delete mode 100644 rcynic/sample-trust-anchors/ripe-ncc-root.tal delete mode 100644 rcynic/sample-trust-anchors/ripe-pilot.tal delete mode 100644 rcynic/sample-trust-anchors/rpki.net-testbed.tal delete mode 100644 rcynic/sample-trust-anchors/testbed-apnicrpki.tal.disabled delete mode 100644 rcynic/static-rsync/Makefile.in delete mode 100644 rcynic/static-rsync/README delete mode 100644 rcynic/static-rsync/patches/patch-CVE-2007-4091 delete mode 100644 rcynic/static-rsync/rsync-2.6.9.tar.gz delete mode 100644 rcynic/validation_status.py create mode 100644 rp/rcynic/Doxyfile create mode 100644 rp/rcynic/Makefile.in create mode 100644 rp/rcynic/README create mode 100644 rp/rcynic/bio_f_linebreak.c create mode 100644 rp/rcynic/bio_f_linebreak.h create mode 100644 rp/rcynic/defstack.h create mode 100755 rp/rcynic/make-tal.sh create mode 100755 rp/rcynic/rc-scripts/darwin/RCynic create mode 100644 rp/rcynic/rc-scripts/darwin/StartupParameters.plist create mode 100755 rp/rcynic/rc-scripts/freebsd/rc.d.rcynic create mode 100755 rp/rcynic/rcynic-cron create mode 100755 rp/rcynic/rcynic-html create mode 100755 rp/rcynic/rcynic-svn create mode 100755 rp/rcynic/rcynic-text create mode 100644 rp/rcynic/rcynic.c create mode 100644 rp/rcynic/rcynic.xsl create mode 100644 rp/rcynic/rpki-torrent.py create mode 100644 rp/rcynic/rules.darwin.mk create mode 100644 rp/rcynic/rules.freebsd.mk create mode 100644 rp/rcynic/rules.linux.mk create mode 100644 rp/rcynic/rules.unknown.mk create mode 100644 rp/rcynic/sample-rcynic.conf create mode 100644 rp/rcynic/sample-trust-anchors/README create mode 100644 rp/rcynic/sample-trust-anchors/afrinic.tal create mode 100644 rp/rcynic/sample-trust-anchors/altca.tal create mode 100644 rp/rcynic/sample-trust-anchors/apnic-rpki-root-afrinic-origin.tal create mode 100644 rp/rcynic/sample-trust-anchors/apnic-rpki-root-arin-origin.tal create mode 100644 rp/rcynic/sample-trust-anchors/apnic-rpki-root-iana-origin.tal create mode 100644 rp/rcynic/sample-trust-anchors/apnic-rpki-root-lacnic-origin.tal create mode 100644 rp/rcynic/sample-trust-anchors/apnic-rpki-root-ripe-origin.tal create mode 100644 rp/rcynic/sample-trust-anchors/apnic-testbed.tal create mode 100644 rp/rcynic/sample-trust-anchors/bbn-testbed.tal.disabled create mode 100644 rp/rcynic/sample-trust-anchors/lacnic.tal create mode 100644 rp/rcynic/sample-trust-anchors/ripe-ncc-root.tal create mode 100644 rp/rcynic/sample-trust-anchors/ripe-pilot.tal create mode 100644 rp/rcynic/sample-trust-anchors/rpki.net-testbed.tal create mode 100644 rp/rcynic/sample-trust-anchors/testbed-apnicrpki.tal.disabled create mode 100644 rp/rcynic/static-rsync/Makefile.in create mode 100644 rp/rcynic/static-rsync/README create mode 100644 rp/rcynic/static-rsync/patches/patch-CVE-2007-4091 create mode 100644 rp/rcynic/static-rsync/rsync-2.6.9.tar.gz create mode 100755 rp/rcynic/validation_status create mode 100644 rp/rtr-origin/Makefile.in create mode 100644 rp/rtr-origin/README create mode 100755 rp/rtr-origin/rtr-origin create mode 100644 rp/rtr-origin/rules.darwin.mk create mode 100644 rp/rtr-origin/rules.freebsd.mk create mode 100644 rp/rtr-origin/rules.linux.mk create mode 100644 rp/rtr-origin/rules.unknown.mk create mode 100755 rp/rtr-origin/server.sh create mode 100644 rp/rtr-origin/sshd.conf create mode 100644 rp/utils/Makefile.in create mode 100644 rp/utils/README create mode 100644 rp/utils/dot.awk create mode 100644 rp/utils/find_roa.c create mode 100644 rp/utils/find_roa/Makefile.in create mode 100644 rp/utils/hashdir.c create mode 100644 rp/utils/hashdir/Makefile.in create mode 100644 rp/utils/print_roa.c create mode 100644 rp/utils/print_roa/Makefile.in create mode 100644 rp/utils/print_rpki_manifest.c create mode 100644 rp/utils/print_rpki_manifest/Makefile.in create mode 100644 rp/utils/scan_roas.c create mode 100644 rp/utils/scan_roas/Makefile.in create mode 100755 rp/utils/scan_routercerts.py create mode 100644 rp/utils/scan_routercerts/Makefile.in create mode 100755 rp/utils/strip_roa.sh create mode 100644 rp/utils/table.awk create mode 100644 rp/utils/test_roa.sh create mode 100644 rp/utils/uri.c create mode 100644 rp/utils/uri/Makefile.in create mode 100644 rpki/POW/__init__.py create mode 100644 rpki/__init__.py create mode 100644 rpki/adns.py create mode 100644 rpki/async.py create mode 100644 rpki/cli.py create mode 100644 rpki/config.py create mode 100644 rpki/csv_utils.py create mode 100644 rpki/daemonize.py create mode 100644 rpki/exceptions.py create mode 100644 rpki/gui/__init__.py create mode 100644 rpki/gui/api/__init__.py create mode 100644 rpki/gui/api/urls.py create mode 100644 rpki/gui/app/TODO create mode 100644 rpki/gui/app/__init__.py create mode 100644 rpki/gui/app/admin.py create mode 100644 rpki/gui/app/check_expired.py create mode 100644 rpki/gui/app/forms.py create mode 100644 rpki/gui/app/glue.py create mode 100644 rpki/gui/app/migrations/0001_initial.py create mode 100644 rpki/gui/app/migrations/0002_auto__add_field_resourcecert_conf.py create mode 100644 rpki/gui/app/migrations/0003_set_conf_from_parent.py create mode 100644 rpki/gui/app/migrations/0004_auto__chg_field_resourcecert_conf.py create mode 100644 rpki/gui/app/migrations/0005_auto__chg_field_resourcecert_parent.py create mode 100644 rpki/gui/app/migrations/0006_add_conf_acl.py create mode 100644 rpki/gui/app/migrations/0007_default_acls.py create mode 100644 rpki/gui/app/migrations/0008_add_alerts.py create mode 100644 rpki/gui/app/migrations/__init__.py create mode 100644 rpki/gui/app/models.py create mode 100755 rpki/gui/app/range_list.py create mode 100644 rpki/gui/app/static/css/bootstrap.min.css create mode 100644 rpki/gui/app/static/img/glyphicons-halflings-white.png create mode 100644 rpki/gui/app/static/img/glyphicons-halflings.png create mode 100644 rpki/gui/app/static/img/sui-riu.ico create mode 100644 rpki/gui/app/static/js/bootstrap.min.js create mode 100644 rpki/gui/app/static/js/jquery-1.8.3.min.js create mode 100644 rpki/gui/app/templates/404.html create mode 100644 rpki/gui/app/templates/500.html create mode 100644 rpki/gui/app/templates/app/alert_confirm_clear.html create mode 100644 rpki/gui/app/templates/app/alert_confirm_delete.html create mode 100644 rpki/gui/app/templates/app/alert_detail.html create mode 100644 rpki/gui/app/templates/app/alert_list.html create mode 100644 rpki/gui/app/templates/app/app_base.html create mode 100644 rpki/gui/app/templates/app/app_confirm_delete.html create mode 100644 rpki/gui/app/templates/app/app_form.html create mode 100644 rpki/gui/app/templates/app/bootstrap_form.html create mode 100644 rpki/gui/app/templates/app/child_detail.html create mode 100644 rpki/gui/app/templates/app/client_detail.html create mode 100644 rpki/gui/app/templates/app/client_list.html create mode 100644 rpki/gui/app/templates/app/conf_empty.html create mode 100644 rpki/gui/app/templates/app/conf_list.html create mode 100644 rpki/gui/app/templates/app/dashboard.html create mode 100644 rpki/gui/app/templates/app/ghostbuster_confirm_delete.html create mode 100644 rpki/gui/app/templates/app/ghostbusterrequest_detail.html create mode 100644 rpki/gui/app/templates/app/import_resource_form.html create mode 100644 rpki/gui/app/templates/app/object_confirm_delete.html create mode 100644 rpki/gui/app/templates/app/parent_detail.html create mode 100644 rpki/gui/app/templates/app/pubclient_list.html create mode 100644 rpki/gui/app/templates/app/repository_detail.html create mode 100644 rpki/gui/app/templates/app/resource_holder_list.html create mode 100644 rpki/gui/app/templates/app/roa_detail.html create mode 100644 rpki/gui/app/templates/app/roarequest_confirm_delete.html create mode 100644 rpki/gui/app/templates/app/roarequest_confirm_form.html create mode 100644 rpki/gui/app/templates/app/roarequest_confirm_multi_form.html create mode 100644 rpki/gui/app/templates/app/roarequest_form.html create mode 100644 rpki/gui/app/templates/app/roarequest_multi_form.html create mode 100644 rpki/gui/app/templates/app/route_detail.html create mode 100644 rpki/gui/app/templates/app/routes_view.html create mode 100644 rpki/gui/app/templates/app/user_list.html create mode 100644 rpki/gui/app/templates/base.html create mode 100644 rpki/gui/app/templates/registration/login.html create mode 100644 rpki/gui/app/templatetags/__init__.py create mode 100644 rpki/gui/app/templatetags/app_extras.py create mode 100644 rpki/gui/app/templatetags/bootstrap_pager.py create mode 100644 rpki/gui/app/timestamp.py create mode 100644 rpki/gui/app/urls.py create mode 100644 rpki/gui/app/views.py create mode 100644 rpki/gui/cacheview/__init__.py create mode 100644 rpki/gui/cacheview/forms.py create mode 100644 rpki/gui/cacheview/misc.py create mode 100644 rpki/gui/cacheview/models.py create mode 100644 rpki/gui/cacheview/templates/cacheview/addressrange_detail.html create mode 100644 rpki/gui/cacheview/templates/cacheview/cacheview_base.html create mode 100644 rpki/gui/cacheview/templates/cacheview/cert_detail.html create mode 100644 rpki/gui/cacheview/templates/cacheview/ghostbuster_detail.html create mode 100644 rpki/gui/cacheview/templates/cacheview/global_summary.html create mode 100644 rpki/gui/cacheview/templates/cacheview/query_result.html create mode 100644 rpki/gui/cacheview/templates/cacheview/roa_detail.html create mode 100644 rpki/gui/cacheview/templates/cacheview/search_form.html create mode 100644 rpki/gui/cacheview/templates/cacheview/search_result.html create mode 100644 rpki/gui/cacheview/templates/cacheview/signedobject_detail.html create mode 100644 rpki/gui/cacheview/tests.py create mode 100644 rpki/gui/cacheview/urls.py create mode 100644 rpki/gui/cacheview/util.py create mode 100644 rpki/gui/cacheview/views.py create mode 100644 rpki/gui/decorators.py create mode 100644 rpki/gui/default_settings.py create mode 100644 rpki/gui/models.py create mode 100644 rpki/gui/routeview/__init__.py create mode 100644 rpki/gui/routeview/api.py create mode 100644 rpki/gui/routeview/models.py create mode 100644 rpki/gui/routeview/util.py create mode 100644 rpki/gui/script_util.py create mode 100644 rpki/gui/urls.py create mode 100644 rpki/gui/views.py create mode 100644 rpki/http.py create mode 100644 rpki/ipaddrs.py create mode 100644 rpki/irdb/__init__.py create mode 100644 rpki/irdb/models.py create mode 100644 rpki/irdb/router.py create mode 100644 rpki/irdb/zookeeper.py create mode 100644 rpki/irdbd.py create mode 100644 rpki/left_right.py create mode 100644 rpki/log.py create mode 100644 rpki/myrpki.py create mode 100644 rpki/mysql_import.py create mode 100644 rpki/oids.py create mode 100644 rpki/old_irdbd.py create mode 100644 rpki/pubd.py create mode 100644 rpki/publication.py create mode 100644 rpki/rcynic.py create mode 100644 rpki/relaxng.py create mode 100644 rpki/resource_set.py create mode 100644 rpki/rootd.py create mode 100644 rpki/rpkic.py create mode 100644 rpki/rpkid.py create mode 100644 rpki/rpkid_tasks.py create mode 100644 rpki/sql.py create mode 100644 rpki/sql_schemas.py create mode 100644 rpki/sundial.py create mode 100644 rpki/up_down.py create mode 100644 rpki/x509.py create mode 100644 rpki/xml_utils.py delete mode 100644 rpkid/Doxyfile delete mode 100644 rpkid/Makefile.in delete mode 100644 rpkid/README delete mode 100644 rpkid/doc/README delete mode 100644 rpkid/doc/mainpage.py delete mode 100644 rpkid/doc/pubd-bpki.dot delete mode 100644 rpkid/doc/rpkid-bpki.dot delete mode 100644 rpkid/examples/asns.csv delete mode 100644 rpkid/examples/prefixes.csv delete mode 100644 rpkid/examples/roas.csv delete mode 100644 rpkid/examples/rsyncd.conf delete mode 100644 rpkid/ext/POW.c delete mode 100755 rpkid/irbe_cli delete mode 100755 rpkid/irdbd delete mode 100644 rpkid/left-right-schema.rnc delete mode 100644 rpkid/left-right-schema.rng delete mode 100644 rpkid/myrpki.rnc delete mode 100644 rpkid/myrpki.rng delete mode 100644 rpkid/portal-gui/rpki.wsgi delete mode 100755 rpkid/portal-gui/scripts/rpki-manage delete mode 100755 rpkid/portal-gui/scripts/rpkigui-apache-conf-gen delete mode 100755 rpkid/portal-gui/scripts/rpkigui-check-expired delete mode 100644 rpkid/portal-gui/scripts/rpkigui-flatten-roas.py delete mode 100755 rpkid/portal-gui/scripts/rpkigui-import-routes delete mode 100755 rpkid/portal-gui/scripts/rpkigui-query-routes delete mode 100755 rpkid/portal-gui/scripts/rpkigui-rcynic delete mode 100644 rpkid/portal-gui/scripts/rpkigui-reset-demo.py delete mode 100644 rpkid/portal-gui/scripts/rpkigui-sync-users.py delete mode 100755 rpkid/pubd delete mode 100644 rpkid/pubd.sql delete mode 100644 rpkid/publication-schema.rnc delete mode 100644 rpkid/publication-schema.rng delete mode 100755 rpkid/rootd delete mode 100644 rpkid/router-certificate-schema.rnc delete mode 100644 rpkid/router-certificate-schema.rng delete mode 100755 rpkid/rpki-confgen delete mode 100644 rpkid/rpki-confgen.xml delete mode 100755 rpkid/rpki-sql-backup delete mode 100755 rpkid/rpki-sql-setup delete mode 100755 rpkid/rpki-start-servers delete mode 100644 rpkid/rpki/POW/__init__.py delete mode 100644 rpkid/rpki/__init__.py delete mode 100644 rpkid/rpki/adns.py delete mode 100644 rpkid/rpki/async.py delete mode 100644 rpkid/rpki/cli.py delete mode 100644 rpkid/rpki/config.py delete mode 100644 rpkid/rpki/csv_utils.py delete mode 100644 rpkid/rpki/daemonize.py delete mode 100644 rpkid/rpki/exceptions.py delete mode 100644 rpkid/rpki/gui/__init__.py delete mode 100644 rpkid/rpki/gui/api/__init__.py delete mode 100644 rpkid/rpki/gui/api/urls.py delete mode 100644 rpkid/rpki/gui/app/TODO delete mode 100644 rpkid/rpki/gui/app/__init__.py delete mode 100644 rpkid/rpki/gui/app/admin.py delete mode 100644 rpkid/rpki/gui/app/check_expired.py delete mode 100644 rpkid/rpki/gui/app/forms.py delete mode 100644 rpkid/rpki/gui/app/glue.py delete mode 100644 rpkid/rpki/gui/app/migrations/0001_initial.py delete mode 100644 rpkid/rpki/gui/app/migrations/0002_auto__add_field_resourcecert_conf.py delete mode 100644 rpkid/rpki/gui/app/migrations/0003_set_conf_from_parent.py delete mode 100644 rpkid/rpki/gui/app/migrations/0004_auto__chg_field_resourcecert_conf.py delete mode 100644 rpkid/rpki/gui/app/migrations/0005_auto__chg_field_resourcecert_parent.py delete mode 100644 rpkid/rpki/gui/app/migrations/0006_add_conf_acl.py delete mode 100644 rpkid/rpki/gui/app/migrations/0007_default_acls.py delete mode 100644 rpkid/rpki/gui/app/migrations/0008_add_alerts.py delete mode 100644 rpkid/rpki/gui/app/migrations/__init__.py delete mode 100644 rpkid/rpki/gui/app/models.py delete mode 100755 rpkid/rpki/gui/app/range_list.py delete mode 100644 rpkid/rpki/gui/app/static/css/bootstrap.min.css delete mode 100644 rpkid/rpki/gui/app/static/img/glyphicons-halflings-white.png delete mode 100644 rpkid/rpki/gui/app/static/img/glyphicons-halflings.png delete mode 100644 rpkid/rpki/gui/app/static/img/sui-riu.ico delete mode 100644 rpkid/rpki/gui/app/static/js/bootstrap.min.js delete mode 100644 rpkid/rpki/gui/app/static/js/jquery-1.8.3.min.js delete mode 100644 rpkid/rpki/gui/app/templates/404.html delete mode 100644 rpkid/rpki/gui/app/templates/500.html delete mode 100644 rpkid/rpki/gui/app/templates/app/alert_confirm_clear.html delete mode 100644 rpkid/rpki/gui/app/templates/app/alert_confirm_delete.html delete mode 100644 rpkid/rpki/gui/app/templates/app/alert_detail.html delete mode 100644 rpkid/rpki/gui/app/templates/app/alert_list.html delete mode 100644 rpkid/rpki/gui/app/templates/app/app_base.html delete mode 100644 rpkid/rpki/gui/app/templates/app/app_confirm_delete.html delete mode 100644 rpkid/rpki/gui/app/templates/app/app_form.html delete mode 100644 rpkid/rpki/gui/app/templates/app/bootstrap_form.html delete mode 100644 rpkid/rpki/gui/app/templates/app/child_detail.html delete mode 100644 rpkid/rpki/gui/app/templates/app/client_detail.html delete mode 100644 rpkid/rpki/gui/app/templates/app/client_list.html delete mode 100644 rpkid/rpki/gui/app/templates/app/conf_empty.html delete mode 100644 rpkid/rpki/gui/app/templates/app/conf_list.html delete mode 100644 rpkid/rpki/gui/app/templates/app/dashboard.html delete mode 100644 rpkid/rpki/gui/app/templates/app/ghostbuster_confirm_delete.html delete mode 100644 rpkid/rpki/gui/app/templates/app/ghostbusterrequest_detail.html delete mode 100644 rpkid/rpki/gui/app/templates/app/import_resource_form.html delete mode 100644 rpkid/rpki/gui/app/templates/app/object_confirm_delete.html delete mode 100644 rpkid/rpki/gui/app/templates/app/parent_detail.html delete mode 100644 rpkid/rpki/gui/app/templates/app/pubclient_list.html delete mode 100644 rpkid/rpki/gui/app/templates/app/repository_detail.html delete mode 100644 rpkid/rpki/gui/app/templates/app/resource_holder_list.html delete mode 100644 rpkid/rpki/gui/app/templates/app/roa_detail.html delete mode 100644 rpkid/rpki/gui/app/templates/app/roarequest_confirm_delete.html delete mode 100644 rpkid/rpki/gui/app/templates/app/roarequest_confirm_form.html delete mode 100644 rpkid/rpki/gui/app/templates/app/roarequest_confirm_multi_form.html delete mode 100644 rpkid/rpki/gui/app/templates/app/roarequest_form.html delete mode 100644 rpkid/rpki/gui/app/templates/app/roarequest_multi_form.html delete mode 100644 rpkid/rpki/gui/app/templates/app/route_detail.html delete mode 100644 rpkid/rpki/gui/app/templates/app/routes_view.html delete mode 100644 rpkid/rpki/gui/app/templates/app/user_list.html delete mode 100644 rpkid/rpki/gui/app/templates/base.html delete mode 100644 rpkid/rpki/gui/app/templates/registration/login.html delete mode 100644 rpkid/rpki/gui/app/templatetags/__init__.py delete mode 100644 rpkid/rpki/gui/app/templatetags/app_extras.py delete mode 100644 rpkid/rpki/gui/app/templatetags/bootstrap_pager.py delete mode 100644 rpkid/rpki/gui/app/timestamp.py delete mode 100644 rpkid/rpki/gui/app/urls.py delete mode 100644 rpkid/rpki/gui/app/views.py delete mode 100644 rpkid/rpki/gui/cacheview/__init__.py delete mode 100644 rpkid/rpki/gui/cacheview/forms.py delete mode 100644 rpkid/rpki/gui/cacheview/misc.py delete mode 100644 rpkid/rpki/gui/cacheview/models.py delete mode 100644 rpkid/rpki/gui/cacheview/templates/cacheview/addressrange_detail.html delete mode 100644 rpkid/rpki/gui/cacheview/templates/cacheview/cacheview_base.html delete mode 100644 rpkid/rpki/gui/cacheview/templates/cacheview/cert_detail.html delete mode 100644 rpkid/rpki/gui/cacheview/templates/cacheview/ghostbuster_detail.html delete mode 100644 rpkid/rpki/gui/cacheview/templates/cacheview/global_summary.html delete mode 100644 rpkid/rpki/gui/cacheview/templates/cacheview/query_result.html delete mode 100644 rpkid/rpki/gui/cacheview/templates/cacheview/roa_detail.html delete mode 100644 rpkid/rpki/gui/cacheview/templates/cacheview/search_form.html delete mode 100644 rpkid/rpki/gui/cacheview/templates/cacheview/search_result.html delete mode 100644 rpkid/rpki/gui/cacheview/templates/cacheview/signedobject_detail.html delete mode 100644 rpkid/rpki/gui/cacheview/tests.py delete mode 100644 rpkid/rpki/gui/cacheview/urls.py delete mode 100644 rpkid/rpki/gui/cacheview/util.py delete mode 100644 rpkid/rpki/gui/cacheview/views.py delete mode 100644 rpkid/rpki/gui/decorators.py delete mode 100644 rpkid/rpki/gui/default_settings.py delete mode 100644 rpkid/rpki/gui/models.py delete mode 100644 rpkid/rpki/gui/routeview/__init__.py delete mode 100644 rpkid/rpki/gui/routeview/api.py delete mode 100644 rpkid/rpki/gui/routeview/models.py delete mode 100644 rpkid/rpki/gui/routeview/util.py delete mode 100644 rpkid/rpki/gui/script_util.py delete mode 100644 rpkid/rpki/gui/urls.py delete mode 100644 rpkid/rpki/gui/views.py delete mode 100644 rpkid/rpki/http.py delete mode 100644 rpkid/rpki/ipaddrs.py delete mode 100644 rpkid/rpki/irdb/__init__.py delete mode 100644 rpkid/rpki/irdb/models.py delete mode 100644 rpkid/rpki/irdb/router.py delete mode 100644 rpkid/rpki/irdb/zookeeper.py delete mode 100644 rpkid/rpki/irdbd.py delete mode 100644 rpkid/rpki/left_right.py delete mode 100644 rpkid/rpki/log.py delete mode 100644 rpkid/rpki/myrpki.py delete mode 100644 rpkid/rpki/mysql_import.py delete mode 100644 rpkid/rpki/oids.py delete mode 100644 rpkid/rpki/old_irdbd.py delete mode 100644 rpkid/rpki/pubd.py delete mode 100644 rpkid/rpki/publication.py delete mode 100644 rpkid/rpki/rcynic.py delete mode 100644 rpkid/rpki/relaxng.py delete mode 100644 rpkid/rpki/resource_set.py delete mode 100644 rpkid/rpki/rootd.py delete mode 100644 rpkid/rpki/rpkic.py delete mode 100644 rpkid/rpki/rpkid.py delete mode 100644 rpkid/rpki/rpkid_tasks.py delete mode 100644 rpkid/rpki/sql.py delete mode 100644 rpkid/rpki/sql_schemas.py delete mode 100644 rpkid/rpki/sundial.py delete mode 100644 rpkid/rpki/up_down.py delete mode 100644 rpkid/rpki/x509.py delete mode 100644 rpkid/rpki/xml_utils.py delete mode 100755 rpkid/rpkic delete mode 100755 rpkid/rpkid delete mode 100644 rpkid/rpkid.sql delete mode 100644 rpkid/setup.py delete mode 100644 rpkid/setup_extensions.py delete mode 100644 rpkid/tests/Makefile.in delete mode 100644 rpkid/tests/left-right-protocol-samples.xml delete mode 100644 rpkid/tests/myrpki-xml-parse-test.py delete mode 100644 rpkid/tests/old_irdbd.py delete mode 100644 rpkid/tests/old_irdbd.sql delete mode 100644 rpkid/tests/publication-protocol-samples.xml delete mode 100644 rpkid/tests/rcynic.conf delete mode 100644 rpkid/tests/revoke.yaml delete mode 100644 rpkid/tests/rootd.yaml delete mode 120000 rpkid/tests/rpki delete mode 100644 rpkid/tests/smoketest.1.yaml delete mode 100644 rpkid/tests/smoketest.2.yaml delete mode 100644 rpkid/tests/smoketest.3.yaml delete mode 100644 rpkid/tests/smoketest.4.yaml delete mode 100644 rpkid/tests/smoketest.5.yaml delete mode 100644 rpkid/tests/smoketest.6.yaml delete mode 100644 rpkid/tests/smoketest.7.yaml delete mode 100644 rpkid/tests/smoketest.8.yaml delete mode 100644 rpkid/tests/smoketest.9.yaml delete mode 100644 rpkid/tests/smoketest.clean.sql delete mode 100644 rpkid/tests/smoketest.py delete mode 100644 rpkid/tests/smoketest.setup.sql delete mode 100644 rpkid/tests/split-protocol-samples.xsl delete mode 100644 rpkid/tests/sql-cleaner.py delete mode 100644 rpkid/tests/sql-dumper.py delete mode 100644 rpkid/tests/testpoke.py delete mode 100644 rpkid/tests/testpoke.xsl delete mode 100644 rpkid/tests/testpoke.yaml delete mode 100644 rpkid/tests/up-down-protocol-samples/Makefile delete mode 100644 rpkid/tests/up-down-protocol-samples/error_response.xml delete mode 100644 rpkid/tests/up-down-protocol-samples/issue1.xml delete mode 100644 rpkid/tests/up-down-protocol-samples/issue2.xml delete mode 100644 rpkid/tests/up-down-protocol-samples/issue_response.xml delete mode 100644 rpkid/tests/up-down-protocol-samples/list.xml delete mode 100644 rpkid/tests/up-down-protocol-samples/list_response.xml delete mode 100644 rpkid/tests/up-down-protocol-samples/revoke.xml delete mode 100644 rpkid/tests/up-down-protocol-samples/revoke_response.xml delete mode 100644 rpkid/tests/xml-parse-test.py delete mode 100644 rpkid/tests/yamlconf.py delete mode 100644 rpkid/tests/yamltest-test-all.sh delete mode 100644 rpkid/tests/yamltest.py delete mode 100644 rpkid/up-down-schema.rnc delete mode 100644 rpkid/up-down-schema.rng delete mode 100644 rpkid/upgrade-scripts/upgrade-rpkid-to-0.5709.py delete mode 100644 rtr-origin/Makefile.in delete mode 100644 rtr-origin/README delete mode 100755 rtr-origin/rtr-origin.py delete mode 100644 rtr-origin/rules.darwin.mk delete mode 100644 rtr-origin/rules.freebsd.mk delete mode 100644 rtr-origin/rules.linux.mk delete mode 100644 rtr-origin/rules.unknown.mk delete mode 100755 rtr-origin/server.sh delete mode 100644 rtr-origin/sshd.conf create mode 100644 schemas/relaxng/left-right-schema.rnc create mode 100644 schemas/relaxng/left-right-schema.rng create mode 100644 schemas/relaxng/myrpki.rnc create mode 100644 schemas/relaxng/myrpki.rng create mode 100644 schemas/relaxng/publication-schema.rnc create mode 100644 schemas/relaxng/publication-schema.rng create mode 100644 schemas/relaxng/router-certificate-schema.rnc create mode 100644 schemas/relaxng/router-certificate-schema.rng create mode 100644 schemas/relaxng/up-down-schema.rnc create mode 100644 schemas/relaxng/up-down-schema.rng create mode 100644 schemas/sql/pubd.sql create mode 100644 schemas/sql/rpkid.sql delete mode 100644 scripts/analyze-rcynic-history.py delete mode 100644 scripts/analyze-transition.py delete mode 100644 scripts/apnic-to-csv.py delete mode 100755 scripts/application-x-rpki-mailcap-handler.sh delete mode 100644 scripts/arin-to-csv.py delete mode 100644 scripts/cross_certify.py delete mode 100644 scripts/csvgrep.py delete mode 100644 scripts/expand-roa-prefixes.py delete mode 100644 scripts/extract-key.py delete mode 100644 scripts/fakerootd.py delete mode 100644 scripts/find-roa-expiration.py delete mode 100644 scripts/format-application-x-rpki.py delete mode 100644 scripts/gc_summary.awk delete mode 100644 scripts/gc_summary.py delete mode 100644 scripts/generate-ripe-root-cert.py delete mode 100644 scripts/gski.py delete mode 100644 scripts/guess-roas-from-routeviews.py delete mode 100644 scripts/iana-to-csv.py delete mode 100644 scripts/missing-oids.py delete mode 100644 scripts/object-dates.py delete mode 100644 scripts/pcap-to-xml.sh delete mode 100644 scripts/print-profile.py delete mode 100644 scripts/rcynic-diff.py delete mode 100755 scripts/rcynic-lta delete mode 100644 scripts/rcynic-lta.yaml delete mode 100755 scripts/repo0-testbed-daily delete mode 100755 scripts/repo0-testbed-monthly delete mode 100755 scripts/repo0-testbed-weekly delete mode 100644 scripts/ripe-asns-to-csv.py delete mode 100644 scripts/ripe-prefixes-to-csv.awk delete mode 100644 scripts/ripe-to-csv.awk delete mode 100644 scripts/ripe-to-csv.py delete mode 100644 scripts/roa-to-irr.py delete mode 100755 scripts/rp-sqlite delete mode 100644 scripts/rp-sqlite.yaml delete mode 120000 scripts/rpki delete mode 100755 scripts/rpkidemo delete mode 100644 scripts/rpkidemo.pem delete mode 100644 scripts/rrd-rcynic-history.py delete mode 100644 scripts/setup-rootd.sh delete mode 100644 scripts/show-cms-ee-certs.sh delete mode 100644 scripts/show-tracking-data.py delete mode 100644 scripts/signed-object-dates.py delete mode 100644 scripts/splitbase64.xsl delete mode 100644 scripts/testbed-rootcert.py delete mode 100644 scripts/translate-handles.py delete mode 100644 scripts/upgrade-add-ghostbusters.py delete mode 100755 scripts/verify-bpki.sh delete mode 100644 scripts/whack-ripe-asns.py delete mode 100644 scripts/whack-ripe-prefixes.py delete mode 100644 scripts/x509-dot.py create mode 100644 setup.py create mode 100644 setup_extensions.py delete mode 100644 utils/Makefile.in delete mode 100644 utils/README delete mode 100644 utils/find_roa/Makefile.in delete mode 100644 utils/find_roa/find_roa.c delete mode 100644 utils/find_roa/test_roa.sh delete mode 100644 utils/hashdir/Makefile.in delete mode 100644 utils/hashdir/hashdir.c delete mode 100644 utils/print_roa/Makefile.in delete mode 100644 utils/print_roa/print_roa.c delete mode 100755 utils/print_roa/strip_roa.sh delete mode 100644 utils/print_rpki_manifest/Makefile.in delete mode 100644 utils/print_rpki_manifest/print_rpki_manifest.c delete mode 100644 utils/scan_roas/Makefile.in delete mode 100644 utils/scan_roas/scan_roas.c delete mode 100644 utils/scan_routercerts/Makefile.in delete mode 100755 utils/scan_routercerts/scan_routercerts delete mode 100644 utils/uri/Makefile.in delete mode 100644 utils/uri/dot.awk delete mode 100644 utils/uri/table.awk delete mode 100644 utils/uri/uri.c diff --git a/ca/Doxyfile b/ca/Doxyfile new file mode 100644 index 00000000..33f39f4b --- /dev/null +++ b/ca/Doxyfile @@ -0,0 +1,1705 @@ +# Doxyfile 1.7.3 + +# This file describes the settings to be used by the documentation system +# doxygen (www.doxygen.org) for a project. +# +# All text after a hash (#) is considered a comment and will be ignored. +# The format is: +# TAG = value [value, ...] +# For lists items can also be appended using: +# TAG += value [value, ...] +# Values that contain spaces should be placed between quotes (" "). + +#--------------------------------------------------------------------------- +# Project related configuration options +#--------------------------------------------------------------------------- + +# This tag specifies the encoding used for all characters in the config file +# that follow. The default is UTF-8 which is also the encoding used for all +# text before the first occurrence of this tag. Doxygen uses libiconv (or the +# iconv built into libc) for the transcoding. See +# http://www.gnu.org/software/libiconv for the list of possible encodings. + +DOXYFILE_ENCODING = UTF-8 + +# The PROJECT_NAME tag is a single word (or a sequence of words surrounded +# by quotes) that should identify the project. + +PROJECT_NAME = "RPKI Engine" + +# The PROJECT_NUMBER tag can be used to enter a project or revision number. +# This could be handy for archiving the generated documentation or +# if some version control system is used. + +PROJECT_NUMBER = 1.0 + +# Using the PROJECT_BRIEF tag one can provide an optional one line description for a project that appears at the top of each page and should give viewer a quick idea about the purpose of the project. Keep the description short. + +PROJECT_BRIEF = + +# With the PROJECT_LOGO tag one can specify an logo or icon that is +# included in the documentation. The maximum height of the logo should not +# exceed 55 pixels and the maximum width should not exceed 200 pixels. +# Doxygen will copy the logo to the output directory. + +PROJECT_LOGO = + +# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) +# base path where the generated documentation will be put. +# If a relative path is entered, it will be relative to the location +# where doxygen was started. If left blank the current directory will be used. + +OUTPUT_DIRECTORY = doc + +# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create +# 4096 sub-directories (in 2 levels) under the output directory of each output +# format and will distribute the generated files over these directories. +# Enabling this option can be useful when feeding doxygen a huge amount of +# source files, where putting all generated files in the same directory would +# otherwise cause performance problems for the file system. + +CREATE_SUBDIRS = NO + +# The OUTPUT_LANGUAGE tag is used to specify the language in which all +# documentation generated by doxygen is written. Doxygen will use this +# information to generate all constant output in the proper language. +# The default language is English, other supported languages are: +# Afrikaans, Arabic, Brazilian, Catalan, Chinese, Chinese-Traditional, +# Croatian, Czech, Danish, Dutch, Esperanto, Farsi, Finnish, French, German, +# Greek, Hungarian, Italian, Japanese, Japanese-en (Japanese with English +# messages), Korean, Korean-en, Lithuanian, Norwegian, Macedonian, Persian, +# Polish, Portuguese, Romanian, Russian, Serbian, Serbian-Cyrillic, Slovak, +# Slovene, Spanish, Swedish, Ukrainian, and Vietnamese. + +OUTPUT_LANGUAGE = English + +# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will +# include brief member descriptions after the members that are listed in +# the file and class documentation (similar to JavaDoc). +# Set to NO to disable this. + +BRIEF_MEMBER_DESC = YES + +# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend +# the brief description of a member or function before the detailed description. +# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the +# brief descriptions will be completely suppressed. + +REPEAT_BRIEF = YES + +# This tag implements a quasi-intelligent brief description abbreviator +# that is used to form the text in various listings. Each string +# in this list, if found as the leading text of the brief description, will be +# stripped from the text and the result after processing the whole list, is +# used as the annotated text. Otherwise, the brief description is used as-is. +# If left blank, the following values are used ("$name" is automatically +# replaced with the name of the entity): "The $name class" "The $name widget" +# "The $name file" "is" "provides" "specifies" "contains" +# "represents" "a" "an" "the" + +ABBREVIATE_BRIEF = "The $name class" \ + "The $name widget" \ + "The $name file" \ + is \ + provides \ + specifies \ + contains \ + represents \ + a \ + an \ + the + +# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then +# Doxygen will generate a detailed section even if there is only a brief +# description. + +ALWAYS_DETAILED_SEC = NO + +# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all +# inherited members of a class in the documentation of that class as if those +# members were ordinary class members. Constructors, destructors and assignment +# operators of the base classes will not be shown. + +INLINE_INHERITED_MEMB = NO + +# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full +# path before files name in the file list and in the header files. If set +# to NO the shortest path that makes the file name unique will be used. + +FULL_PATH_NAMES = NO + +# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag +# can be used to strip a user-defined part of the path. Stripping is +# only done if one of the specified strings matches the left-hand part of +# the path. The tag can be used to show relative paths in the file list. +# If left blank the directory from which doxygen is run is used as the +# path to strip. + +STRIP_FROM_PATH = + +# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of +# the path mentioned in the documentation of a class, which tells +# the reader which header file to include in order to use a class. +# If left blank only the name of the header file containing the class +# definition is used. Otherwise one should specify the include paths that +# are normally passed to the compiler using the -I flag. + +STRIP_FROM_INC_PATH = + +# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter +# (but less readable) file names. This can be useful if your file system +# doesn't support long names like on DOS, Mac, or CD-ROM. + +SHORT_NAMES = NO + +# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen +# will interpret the first line (until the first dot) of a JavaDoc-style +# comment as the brief description. If set to NO, the JavaDoc +# comments will behave just like regular Qt-style comments +# (thus requiring an explicit @brief command for a brief description.) + +JAVADOC_AUTOBRIEF = NO + +# If the QT_AUTOBRIEF tag is set to YES then Doxygen will +# interpret the first line (until the first dot) of a Qt-style +# comment as the brief description. If set to NO, the comments +# will behave just like regular Qt-style comments (thus requiring +# an explicit \brief command for a brief description.) + +QT_AUTOBRIEF = NO + +# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make Doxygen +# treat a multi-line C++ special comment block (i.e. a block of //! or /// +# comments) as a brief description. This used to be the default behaviour. +# The new default is to treat a multi-line C++ comment block as a detailed +# description. Set this tag to YES if you prefer the old behaviour instead. + +MULTILINE_CPP_IS_BRIEF = NO + +# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented +# member inherits the documentation from any documented member that it +# re-implements. + +INHERIT_DOCS = YES + +# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce +# a new page for each member. If set to NO, the documentation of a member will +# be part of the file/class/namespace that contains it. + +SEPARATE_MEMBER_PAGES = NO + +# The TAB_SIZE tag can be used to set the number of spaces in a tab. +# Doxygen uses this value to replace tabs by spaces in code fragments. + +TAB_SIZE = 8 + +# This tag can be used to specify a number of aliases that acts +# as commands in the documentation. An alias has the form "name=value". +# For example adding "sideeffect=\par Side Effects:\n" will allow you to +# put the command \sideeffect (or @sideeffect) in the documentation, which +# will result in a user-defined paragraph with heading "Side Effects:". +# You can put \n's in the value part of an alias to insert newlines. + +ALIASES = + +# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C +# sources only. Doxygen will then generate output that is more tailored for C. +# For instance, some of the names that are used will be different. The list +# of all members will be omitted, etc. + +OPTIMIZE_OUTPUT_FOR_C = NO + +# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java +# sources only. Doxygen will then generate output that is more tailored for +# Java. For instance, namespaces will be presented as packages, qualified +# scopes will look different, etc. + +OPTIMIZE_OUTPUT_JAVA = YES + +# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran +# sources only. Doxygen will then generate output that is more tailored for +# Fortran. + +OPTIMIZE_FOR_FORTRAN = NO + +# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL +# sources. Doxygen will then generate output that is tailored for +# VHDL. + +OPTIMIZE_OUTPUT_VHDL = NO + +# Doxygen selects the parser to use depending on the extension of the files it +# parses. With this tag you can assign which parser to use for a given extension. +# Doxygen has a built-in mapping, but you can override or extend it using this +# tag. The format is ext=language, where ext is a file extension, and language +# is one of the parsers supported by doxygen: IDL, Java, Javascript, CSharp, C, +# C++, D, PHP, Objective-C, Python, Fortran, VHDL, C, C++. For instance to make +# doxygen treat .inc files as Fortran files (default is PHP), and .f files as C +# (default is Fortran), use: inc=Fortran f=C. Note that for custom extensions +# you also need to set FILE_PATTERNS otherwise the files are not read by doxygen. + +EXTENSION_MAPPING = + +# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want +# to include (a tag file for) the STL sources as input, then you should +# set this tag to YES in order to let doxygen match functions declarations and +# definitions whose arguments contain STL classes (e.g. func(std::string); v.s. +# func(std::string) {}). This also makes the inheritance and collaboration +# diagrams that involve STL classes more complete and accurate. + +BUILTIN_STL_SUPPORT = NO + +# If you use Microsoft's C++/CLI language, you should set this option to YES to +# enable parsing support. + +CPP_CLI_SUPPORT = NO + +# Set the SIP_SUPPORT tag to YES if your project consists of sip sources only. +# Doxygen will parse them like normal C++ but will assume all classes use public +# instead of private inheritance when no explicit protection keyword is present. + +SIP_SUPPORT = NO + +# For Microsoft's IDL there are propget and propput attributes to indicate getter +# and setter methods for a property. Setting this option to YES (the default) +# will make doxygen replace the get and set methods by a property in the +# documentation. This will only work if the methods are indeed getting or +# setting a simple type. If this is not the case, or you want to show the +# methods anyway, you should set this option to NO. + +IDL_PROPERTY_SUPPORT = YES + +# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC +# tag is set to YES, then doxygen will reuse the documentation of the first +# member in the group (if any) for the other members of the group. By default +# all members of a group must be documented explicitly. + +DISTRIBUTE_GROUP_DOC = NO + +# Set the SUBGROUPING tag to YES (the default) to allow class member groups of +# the same type (for instance a group of public functions) to be put as a +# subgroup of that type (e.g. under the Public Functions section). Set it to +# NO to prevent subgrouping. Alternatively, this can be done per class using +# the \nosubgrouping command. + +SUBGROUPING = YES + +# When TYPEDEF_HIDES_STRUCT is enabled, a typedef of a struct, union, or enum +# is documented as struct, union, or enum with the name of the typedef. So +# typedef struct TypeS {} TypeT, will appear in the documentation as a struct +# with name TypeT. When disabled the typedef will appear as a member of a file, +# namespace, or class. And the struct will be named TypeS. This can typically +# be useful for C code in case the coding convention dictates that all compound +# types are typedef'ed and only the typedef is referenced, never the tag name. + +TYPEDEF_HIDES_STRUCT = NO + +# The SYMBOL_CACHE_SIZE determines the size of the internal cache use to +# determine which symbols to keep in memory and which to flush to disk. +# When the cache is full, less often used symbols will be written to disk. +# For small to medium size projects (<1000 input files) the default value is +# probably good enough. For larger projects a too small cache size can cause +# doxygen to be busy swapping symbols to and from disk most of the time +# causing a significant performance penalty. +# If the system has enough physical memory increasing the cache will improve the +# performance by keeping more symbols in memory. Note that the value works on +# a logarithmic scale so increasing the size by one will roughly double the +# memory usage. The cache size is given by this formula: +# 2^(16+SYMBOL_CACHE_SIZE). The valid range is 0..9, the default is 0, +# corresponding to a cache size of 2^16 = 65536 symbols + +SYMBOL_CACHE_SIZE = 0 + +#--------------------------------------------------------------------------- +# Build related configuration options +#--------------------------------------------------------------------------- + +# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in +# documentation are documented, even if no documentation was available. +# Private class members and static file members will be hidden unless +# the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES + +EXTRACT_ALL = YES + +# If the EXTRACT_PRIVATE tag is set to YES all private members of a class +# will be included in the documentation. + +EXTRACT_PRIVATE = YES + +# If the EXTRACT_STATIC tag is set to YES all static members of a file +# will be included in the documentation. + +EXTRACT_STATIC = YES + +# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) +# defined locally in source files will be included in the documentation. +# If set to NO only classes defined in header files are included. + +EXTRACT_LOCAL_CLASSES = YES + +# This flag is only useful for Objective-C code. When set to YES local +# methods, which are defined in the implementation section but not in +# the interface are included in the documentation. +# If set to NO (the default) only methods in the interface are included. + +EXTRACT_LOCAL_METHODS = NO + +# If this flag is set to YES, the members of anonymous namespaces will be +# extracted and appear in the documentation as a namespace called +# 'anonymous_namespace{file}', where file will be replaced with the base +# name of the file that contains the anonymous namespace. By default +# anonymous namespaces are hidden. + +EXTRACT_ANON_NSPACES = NO + +# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all +# undocumented members of documented classes, files or namespaces. +# If set to NO (the default) these members will be included in the +# various overviews, but no documentation section is generated. +# This option has no effect if EXTRACT_ALL is enabled. + +HIDE_UNDOC_MEMBERS = NO + +# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all +# undocumented classes that are normally visible in the class hierarchy. +# If set to NO (the default) these classes will be included in the various +# overviews. This option has no effect if EXTRACT_ALL is enabled. + +HIDE_UNDOC_CLASSES = NO + +# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, Doxygen will hide all +# friend (class|struct|union) declarations. +# If set to NO (the default) these declarations will be included in the +# documentation. + +HIDE_FRIEND_COMPOUNDS = NO + +# If the HIDE_IN_BODY_DOCS tag is set to YES, Doxygen will hide any +# documentation blocks found inside the body of a function. +# If set to NO (the default) these blocks will be appended to the +# function's detailed documentation block. + +HIDE_IN_BODY_DOCS = NO + +# The INTERNAL_DOCS tag determines if documentation +# that is typed after a \internal command is included. If the tag is set +# to NO (the default) then the documentation will be excluded. +# Set it to YES to include the internal documentation. + +INTERNAL_DOCS = NO + +# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate +# file names in lower-case letters. If set to YES upper-case letters are also +# allowed. This is useful if you have classes or files whose names only differ +# in case and if your file system supports case sensitive file names. Windows +# and Mac users are advised to set this option to NO. + +CASE_SENSE_NAMES = YES + +# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen +# will show members with their full class and namespace scopes in the +# documentation. If set to YES the scope will be hidden. + +HIDE_SCOPE_NAMES = NO + +# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen +# will put a list of the files that are included by a file in the documentation +# of that file. + +SHOW_INCLUDE_FILES = YES + +# If the FORCE_LOCAL_INCLUDES tag is set to YES then Doxygen +# will list include files with double quotes in the documentation +# rather than with sharp brackets. + +FORCE_LOCAL_INCLUDES = NO + +# If the INLINE_INFO tag is set to YES (the default) then a tag [inline] +# is inserted in the documentation for inline members. + +INLINE_INFO = YES + +# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen +# will sort the (detailed) documentation of file and class members +# alphabetically by member name. If set to NO the members will appear in +# declaration order. + +SORT_MEMBER_DOCS = YES + +# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the +# brief documentation of file, namespace and class members alphabetically +# by member name. If set to NO (the default) the members will appear in +# declaration order. + +SORT_BRIEF_DOCS = YES + +# If the SORT_MEMBERS_CTORS_1ST tag is set to YES then doxygen +# will sort the (brief and detailed) documentation of class members so that +# constructors and destructors are listed first. If set to NO (the default) +# the constructors will appear in the respective orders defined by +# SORT_MEMBER_DOCS and SORT_BRIEF_DOCS. +# This tag will be ignored for brief docs if SORT_BRIEF_DOCS is set to NO +# and ignored for detailed docs if SORT_MEMBER_DOCS is set to NO. + +SORT_MEMBERS_CTORS_1ST = NO + +# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the +# hierarchy of group names into alphabetical order. If set to NO (the default) +# the group names will appear in their defined order. + +SORT_GROUP_NAMES = NO + +# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be +# sorted by fully-qualified names, including namespaces. If set to +# NO (the default), the class list will be sorted only by class name, +# not including the namespace part. +# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. +# Note: This option applies only to the class list, not to the +# alphabetical list. + +SORT_BY_SCOPE_NAME = YES + +# If the STRICT_PROTO_MATCHING option is enabled and doxygen fails to do proper type resolution of all parameters of a function it will reject a +# match between the prototype and the implementation of a member function even if there is only one candidate or it is obvious which candidate to choose by doing a simple string match. By disabling STRICT_PROTO_MATCHING doxygen +# will still accept a match between prototype and implementation in such cases. + +STRICT_PROTO_MATCHING = NO + +# The GENERATE_TODOLIST tag can be used to enable (YES) or +# disable (NO) the todo list. This list is created by putting \todo +# commands in the documentation. + +GENERATE_TODOLIST = YES + +# The GENERATE_TESTLIST tag can be used to enable (YES) or +# disable (NO) the test list. This list is created by putting \test +# commands in the documentation. + +GENERATE_TESTLIST = YES + +# The GENERATE_BUGLIST tag can be used to enable (YES) or +# disable (NO) the bug list. This list is created by putting \bug +# commands in the documentation. + +GENERATE_BUGLIST = YES + +# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or +# disable (NO) the deprecated list. This list is created by putting +# \deprecated commands in the documentation. + +GENERATE_DEPRECATEDLIST= YES + +# The ENABLED_SECTIONS tag can be used to enable conditional +# documentation sections, marked by \if sectionname ... \endif. + +ENABLED_SECTIONS = + +# The MAX_INITIALIZER_LINES tag determines the maximum number of lines +# the initial value of a variable or macro consists of for it to appear in +# the documentation. If the initializer consists of more lines than specified +# here it will be hidden. Use a value of 0 to hide initializers completely. +# The appearance of the initializer of individual variables and macros in the +# documentation can be controlled using \showinitializer or \hideinitializer +# command in the documentation regardless of this setting. + +MAX_INITIALIZER_LINES = 30 + +# Set the SHOW_USED_FILES tag to NO to disable the list of files generated +# at the bottom of the documentation of classes and structs. If set to YES the +# list will mention the files that were used to generate the documentation. + +SHOW_USED_FILES = YES + +# If the sources in your project are distributed over multiple directories +# then setting the SHOW_DIRECTORIES tag to YES will show the directory hierarchy +# in the documentation. The default is NO. + +SHOW_DIRECTORIES = NO + +# Set the SHOW_FILES tag to NO to disable the generation of the Files page. +# This will remove the Files entry from the Quick Index and from the +# Folder Tree View (if specified). The default is YES. + +SHOW_FILES = YES + +# Set the SHOW_NAMESPACES tag to NO to disable the generation of the +# Namespaces page. +# This will remove the Namespaces entry from the Quick Index +# and from the Folder Tree View (if specified). The default is YES. + +SHOW_NAMESPACES = YES + +# The FILE_VERSION_FILTER tag can be used to specify a program or script that +# doxygen should invoke to get the current version for each file (typically from +# the version control system). Doxygen will invoke the program by executing (via +# popen()) the command , where is the value of +# the FILE_VERSION_FILTER tag, and is the name of an input file +# provided by doxygen. Whatever the program writes to standard output +# is used as the file version. See the manual for examples. + +FILE_VERSION_FILTER = "perl -e '@a = split(q( ), qx(svn stat -v $ARGV[0])); shift @a until $a[0] =~ /^[0-9]+$/ or @a == 0; shift @a; print shift(@a), qq(\n)'" + +# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed +# by doxygen. The layout file controls the global structure of the generated +# output files in an output format independent way. The create the layout file +# that represents doxygen's defaults, run doxygen with the -l option. +# You can optionally specify a file name after the option, if omitted +# DoxygenLayout.xml will be used as the name of the layout file. + +LAYOUT_FILE = + +#--------------------------------------------------------------------------- +# configuration options related to warning and progress messages +#--------------------------------------------------------------------------- + +# The QUIET tag can be used to turn on/off the messages that are generated +# by doxygen. Possible values are YES and NO. If left blank NO is used. + +QUIET = YES + +# The WARNINGS tag can be used to turn on/off the warning messages that are +# generated by doxygen. Possible values are YES and NO. If left blank +# NO is used. + +WARNINGS = YES + +# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings +# for undocumented members. If EXTRACT_ALL is set to YES then this flag will +# automatically be disabled. + +WARN_IF_UNDOCUMENTED = YES + +# If WARN_IF_DOC_ERROR is set to YES, doxygen will generate warnings for +# potential errors in the documentation, such as not documenting some +# parameters in a documented function, or documenting parameters that +# don't exist or using markup commands wrongly. + +WARN_IF_DOC_ERROR = YES + +# The WARN_NO_PARAMDOC option can be enabled to get warnings for +# functions that are documented, but have no documentation for their parameters +# or return value. If set to NO (the default) doxygen will only warn about +# wrong or incomplete parameter documentation, but not about the absence of +# documentation. + +WARN_NO_PARAMDOC = NO + +# The WARN_FORMAT tag determines the format of the warning messages that +# doxygen can produce. The string should contain the $file, $line, and $text +# tags, which will be replaced by the file and line number from which the +# warning originated and the warning text. Optionally the format may contain +# $version, which will be replaced by the version of the file (if it could +# be obtained via FILE_VERSION_FILTER) + +WARN_FORMAT = "$file:$line: $text" + +# The WARN_LOGFILE tag can be used to specify a file to which warning +# and error messages should be written. If left blank the output is written +# to stderr. + +WARN_LOGFILE = + +#--------------------------------------------------------------------------- +# configuration options related to the input files +#--------------------------------------------------------------------------- + +# The INPUT tag can be used to specify the files and/or directories that contain +# documented source files. You may enter file names like "myfile.cpp" or +# directories like "/usr/src/myproject". Separate the files or directories +# with spaces. + +INPUT = doc \ + rpki \ + rpki/gui \ + rpki/gui/app \ + rpki/gui/app/templates \ + rpki/gui/app/templates/registration \ + rpki/gui/app/templates/rpkigui \ + rpki/gui/cacheview \ + rpki/gui/cacheview/templates \ + rpki/gui/cacheview/templates/cacheview \ + rpki/gui/templates \ + rpkid.py \ + pubd.py \ + irdbd.py \ + rootd.py \ + irbe_cli.py + +# This tag can be used to specify the character encoding of the source files +# that doxygen parses. Internally doxygen uses the UTF-8 encoding, which is +# also the default input encoding. Doxygen uses libiconv (or the iconv built +# into libc) for the transcoding. See http://www.gnu.org/software/libiconv for +# the list of possible encodings. + +INPUT_ENCODING = UTF-8 + +# If the value of the INPUT tag contains directories, you can use the +# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp +# and *.h) to filter out the source-files in the directories. If left +# blank the following patterns are tested: +# *.c *.cc *.cxx *.cpp *.c++ *.d *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh +# *.hxx *.hpp *.h++ *.idl *.odl *.cs *.php *.php3 *.inc *.m *.mm *.dox *.py +# *.f90 *.f *.for *.vhd *.vhdl + +FILE_PATTERNS = *.py + +# The RECURSIVE tag can be used to turn specify whether or not subdirectories +# should be searched for input files as well. Possible values are YES and NO. +# If left blank NO is used. + +RECURSIVE = NO + +# The EXCLUDE tag can be used to specify files and/or directories that should +# excluded from the INPUT source files. This way you can easily exclude a +# subdirectory from a directory tree whose root is specified with the INPUT tag. + +EXCLUDE = + +# The EXCLUDE_SYMLINKS tag can be used select whether or not files or +# directories that are symbolic links (a Unix file system feature) are excluded +# from the input. + +EXCLUDE_SYMLINKS = NO + +# If the value of the INPUT tag contains directories, you can use the +# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude +# certain files from those directories. Note that the wildcards are matched +# against the file with absolute path, so to exclude all test directories +# for example use the pattern */test/* + +EXCLUDE_PATTERNS = + +# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names +# (namespaces, classes, functions, etc.) that should be excluded from the +# output. The symbol name can be a fully qualified name, a word, or if the +# wildcard * is used, a substring. Examples: ANamespace, AClass, +# AClass::ANamespace, ANamespace::*Test + +EXCLUDE_SYMBOLS = + +# The EXAMPLE_PATH tag can be used to specify one or more files or +# directories that contain example code fragments that are included (see +# the \include command). + +EXAMPLE_PATH = . \ + examples + +# If the value of the EXAMPLE_PATH tag contains directories, you can use the +# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp +# and *.h) to filter out the source-files in the directories. If left +# blank all files are included. + +EXAMPLE_PATTERNS = * + +# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be +# searched for input files to be used with the \include or \dontinclude +# commands irrespective of the value of the RECURSIVE tag. +# Possible values are YES and NO. If left blank NO is used. + +EXAMPLE_RECURSIVE = NO + +# The IMAGE_PATH tag can be used to specify one or more files or +# directories that contain image that are included in the documentation (see +# the \image command). + +IMAGE_PATH = $(IMAGE_PATH) + +# The INPUT_FILTER tag can be used to specify a program that doxygen should +# invoke to filter for each input file. Doxygen will invoke the filter program +# by executing (via popen()) the command , where +# is the value of the INPUT_FILTER tag, and is the name of an +# input file. Doxygen will then use the output that the filter program writes +# to standard output. +# If FILTER_PATTERNS is specified, this tag will be +# ignored. + +INPUT_FILTER = + +# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern +# basis. +# Doxygen will compare the file name with each pattern and apply the +# filter if there is a match. +# The filters are a list of the form: +# pattern=filter (like *.cpp=my_cpp_filter). See INPUT_FILTER for further +# info on how filters are used. If FILTER_PATTERNS is empty or if +# non of the patterns match the file name, INPUT_FILTER is applied. + +FILTER_PATTERNS = + +# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using +# INPUT_FILTER) will be used to filter the input files when producing source +# files to browse (i.e. when SOURCE_BROWSER is set to YES). + +FILTER_SOURCE_FILES = NO + +# The FILTER_SOURCE_PATTERNS tag can be used to specify source filters per file +# pattern. A pattern will override the setting for FILTER_PATTERN (if any) +# and it is also possible to disable source filtering for a specific pattern +# using *.ext= (so without naming a filter). This option only has effect when +# FILTER_SOURCE_FILES is enabled. + +FILTER_SOURCE_PATTERNS = + +#--------------------------------------------------------------------------- +# configuration options related to source browsing +#--------------------------------------------------------------------------- + +# If the SOURCE_BROWSER tag is set to YES then a list of source files will +# be generated. Documented entities will be cross-referenced with these sources. +# Note: To get rid of all source code in the generated output, make sure also +# VERBATIM_HEADERS is set to NO. + +SOURCE_BROWSER = YES + +# Setting the INLINE_SOURCES tag to YES will include the body +# of functions and classes directly in the documentation. + +INLINE_SOURCES = NO + +# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct +# doxygen to hide any special comment blocks from generated source code +# fragments. Normal C and C++ comments will always remain visible. + +STRIP_CODE_COMMENTS = YES + +# If the REFERENCED_BY_RELATION tag is set to YES +# then for each documented function all documented +# functions referencing it will be listed. + +REFERENCED_BY_RELATION = YES + +# If the REFERENCES_RELATION tag is set to YES +# then for each documented function all documented entities +# called/used by that function will be listed. + +REFERENCES_RELATION = YES + +# If the REFERENCES_LINK_SOURCE tag is set to YES (the default) +# and SOURCE_BROWSER tag is set to YES, then the hyperlinks from +# functions in REFERENCES_RELATION and REFERENCED_BY_RELATION lists will +# link to the source code. +# Otherwise they will link to the documentation. + +REFERENCES_LINK_SOURCE = YES + +# If the USE_HTAGS tag is set to YES then the references to source code +# will point to the HTML generated by the htags(1) tool instead of doxygen +# built-in source browser. The htags tool is part of GNU's global source +# tagging system (see http://www.gnu.org/software/global/global.html). You +# will need version 4.8.6 or higher. + +USE_HTAGS = NO + +# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen +# will generate a verbatim copy of the header file for each class for +# which an include is specified. Set to NO to disable this. + +VERBATIM_HEADERS = NO + +#--------------------------------------------------------------------------- +# configuration options related to the alphabetical class index +#--------------------------------------------------------------------------- + +# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index +# of all compounds will be generated. Enable this if the project +# contains a lot of classes, structs, unions or interfaces. + +ALPHABETICAL_INDEX = YES + +# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then +# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns +# in which this list will be split (can be a number in the range [1..20]) + +COLS_IN_ALPHA_INDEX = 5 + +# In case all classes in a project start with a common prefix, all +# classes will be put under the same header in the alphabetical index. +# The IGNORE_PREFIX tag can be used to specify one or more prefixes that +# should be ignored while generating the index headers. + +IGNORE_PREFIX = + +#--------------------------------------------------------------------------- +# configuration options related to the HTML output +#--------------------------------------------------------------------------- + +# If the GENERATE_HTML tag is set to YES (the default) Doxygen will +# generate HTML output. + +GENERATE_HTML = YES + +# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `html' will be used as the default path. + +HTML_OUTPUT = html + +# The HTML_FILE_EXTENSION tag can be used to specify the file extension for +# each generated HTML page (for example: .htm,.php,.asp). If it is left blank +# doxygen will generate files with .html extension. + +HTML_FILE_EXTENSION = .html + +# The HTML_HEADER tag can be used to specify a personal HTML header for +# each generated HTML page. If it is left blank doxygen will generate a +# standard header. + +HTML_HEADER = + +# The HTML_FOOTER tag can be used to specify a personal HTML footer for +# each generated HTML page. If it is left blank doxygen will generate a +# standard footer. + +HTML_FOOTER = + +# The HTML_STYLESHEET tag can be used to specify a user-defined cascading +# style sheet that is used by each HTML page. It can be used to +# fine-tune the look of the HTML output. If the tag is left blank doxygen +# will generate a default style sheet. Note that doxygen will try to copy +# the style sheet file to the HTML output directory, so don't put your own +# stylesheet in the HTML output directory as well, or it will be erased! + +HTML_STYLESHEET = + +# The HTML_COLORSTYLE_HUE tag controls the color of the HTML output. +# Doxygen will adjust the colors in the stylesheet and background images +# according to this color. Hue is specified as an angle on a colorwheel, +# see http://en.wikipedia.org/wiki/Hue for more information. +# For instance the value 0 represents red, 60 is yellow, 120 is green, +# 180 is cyan, 240 is blue, 300 purple, and 360 is red again. +# The allowed range is 0 to 359. + +HTML_COLORSTYLE_HUE = 220 + +# The HTML_COLORSTYLE_SAT tag controls the purity (or saturation) of +# the colors in the HTML output. For a value of 0 the output will use +# grayscales only. A value of 255 will produce the most vivid colors. + +HTML_COLORSTYLE_SAT = 100 + +# The HTML_COLORSTYLE_GAMMA tag controls the gamma correction applied to +# the luminance component of the colors in the HTML output. Values below +# 100 gradually make the output lighter, whereas values above 100 make +# the output darker. The value divided by 100 is the actual gamma applied, +# so 80 represents a gamma of 0.8, The value 220 represents a gamma of 2.2, +# and 100 does not change the gamma. + +HTML_COLORSTYLE_GAMMA = 80 + +# If the HTML_TIMESTAMP tag is set to YES then the footer of each generated HTML +# page will contain the date and time when the page was generated. Setting +# this to NO can help when comparing the output of multiple runs. + +HTML_TIMESTAMP = YES + +# If the HTML_ALIGN_MEMBERS tag is set to YES, the members of classes, +# files or namespaces will be aligned in HTML using tables. If set to +# NO a bullet list will be used. + +HTML_ALIGN_MEMBERS = YES + +# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML +# documentation will contain sections that can be hidden and shown after the +# page has loaded. For this to work a browser that supports +# JavaScript and DHTML is required (for instance Mozilla 1.0+, Firefox +# Netscape 6.0+, Internet explorer 5.0+, Konqueror, or Safari). + +HTML_DYNAMIC_SECTIONS = NO + +# If the GENERATE_DOCSET tag is set to YES, additional index files +# will be generated that can be used as input for Apple's Xcode 3 +# integrated development environment, introduced with OSX 10.5 (Leopard). +# To create a documentation set, doxygen will generate a Makefile in the +# HTML output directory. Running make will produce the docset in that +# directory and running "make install" will install the docset in +# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find +# it at startup. +# See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html +# for more information. + +GENERATE_DOCSET = NO + +# When GENERATE_DOCSET tag is set to YES, this tag determines the name of the +# feed. A documentation feed provides an umbrella under which multiple +# documentation sets from a single provider (such as a company or product suite) +# can be grouped. + +DOCSET_FEEDNAME = "Doxygen generated docs" + +# When GENERATE_DOCSET tag is set to YES, this tag specifies a string that +# should uniquely identify the documentation set bundle. This should be a +# reverse domain-name style string, e.g. com.mycompany.MyDocSet. Doxygen +# will append .docset to the name. + +DOCSET_BUNDLE_ID = org.doxygen.Project + +# When GENERATE_PUBLISHER_ID tag specifies a string that should uniquely identify +# the documentation publisher. This should be a reverse domain-name style +# string, e.g. com.mycompany.MyDocSet.documentation. + +DOCSET_PUBLISHER_ID = org.doxygen.Publisher + +# The GENERATE_PUBLISHER_NAME tag identifies the documentation publisher. + +DOCSET_PUBLISHER_NAME = Publisher + +# If the GENERATE_HTMLHELP tag is set to YES, additional index files +# will be generated that can be used as input for tools like the +# Microsoft HTML help workshop to generate a compiled HTML help file (.chm) +# of the generated HTML documentation. + +GENERATE_HTMLHELP = NO + +# If the GENERATE_HTMLHELP tag is set to YES, the CHM_FILE tag can +# be used to specify the file name of the resulting .chm file. You +# can add a path in front of the file if the result should not be +# written to the html output directory. + +CHM_FILE = + +# If the GENERATE_HTMLHELP tag is set to YES, the HHC_LOCATION tag can +# be used to specify the location (absolute path including file name) of +# the HTML help compiler (hhc.exe). If non-empty doxygen will try to run +# the HTML help compiler on the generated index.hhp. + +HHC_LOCATION = + +# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag +# controls if a separate .chi index file is generated (YES) or that +# it should be included in the master .chm file (NO). + +GENERATE_CHI = NO + +# If the GENERATE_HTMLHELP tag is set to YES, the CHM_INDEX_ENCODING +# is used to encode HtmlHelp index (hhk), content (hhc) and project file +# content. + +CHM_INDEX_ENCODING = + +# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag +# controls whether a binary table of contents is generated (YES) or a +# normal table of contents (NO) in the .chm file. + +BINARY_TOC = NO + +# The TOC_EXPAND flag can be set to YES to add extra items for group members +# to the contents of the HTML help documentation and to the tree view. + +TOC_EXPAND = NO + +# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and +# QHP_VIRTUAL_FOLDER are set, an additional index file will be generated +# that can be used as input for Qt's qhelpgenerator to generate a +# Qt Compressed Help (.qch) of the generated HTML documentation. + +GENERATE_QHP = NO + +# If the QHG_LOCATION tag is specified, the QCH_FILE tag can +# be used to specify the file name of the resulting .qch file. +# The path specified is relative to the HTML output folder. + +QCH_FILE = + +# The QHP_NAMESPACE tag specifies the namespace to use when generating +# Qt Help Project output. For more information please see +# http://doc.trolltech.com/qthelpproject.html#namespace + +QHP_NAMESPACE = + +# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating +# Qt Help Project output. For more information please see +# http://doc.trolltech.com/qthelpproject.html#virtual-folders + +QHP_VIRTUAL_FOLDER = doc + +# If QHP_CUST_FILTER_NAME is set, it specifies the name of a custom filter to +# add. For more information please see +# http://doc.trolltech.com/qthelpproject.html#custom-filters + +QHP_CUST_FILTER_NAME = + +# The QHP_CUST_FILT_ATTRS tag specifies the list of the attributes of the +# custom filter to add. For more information please see +# +# Qt Help Project / Custom Filters. + +QHP_CUST_FILTER_ATTRS = + +# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this +# project's +# filter section matches. +# +# Qt Help Project / Filter Attributes. + +QHP_SECT_FILTER_ATTRS = + +# If the GENERATE_QHP tag is set to YES, the QHG_LOCATION tag can +# be used to specify the location of Qt's qhelpgenerator. +# If non-empty doxygen will try to run qhelpgenerator on the generated +# .qhp file. + +QHG_LOCATION = + +# If the GENERATE_ECLIPSEHELP tag is set to YES, additional index files +# will be generated, which together with the HTML files, form an Eclipse help +# plugin. To install this plugin and make it available under the help contents +# menu in Eclipse, the contents of the directory containing the HTML and XML +# files needs to be copied into the plugins directory of eclipse. The name of +# the directory within the plugins directory should be the same as +# the ECLIPSE_DOC_ID value. After copying Eclipse needs to be restarted before +# the help appears. + +GENERATE_ECLIPSEHELP = NO + +# A unique identifier for the eclipse help plugin. When installing the plugin +# the directory name containing the HTML and XML files should also have +# this name. + +ECLIPSE_DOC_ID = org.doxygen.Project + +# The DISABLE_INDEX tag can be used to turn on/off the condensed index at +# top of each HTML page. The value NO (the default) enables the index and +# the value YES disables it. + +DISABLE_INDEX = NO + +# This tag can be used to set the number of enum values (range [0,1..20]) +# that doxygen will group on one line in the generated HTML documentation. +# Note that a value of 0 will completely suppress the enum values from appearing in the overview section. + +ENUM_VALUES_PER_LINE = 4 + +# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index +# structure should be generated to display hierarchical information. +# If the tag value is set to YES, a side panel will be generated +# containing a tree-like index structure (just like the one that +# is generated for HTML Help). For this to work a browser that supports +# JavaScript, DHTML, CSS and frames is required (i.e. any modern browser). +# Windows users are probably better off using the HTML help feature. + +GENERATE_TREEVIEW = NO + +# By enabling USE_INLINE_TREES, doxygen will generate the Groups, Directories, +# and Class Hierarchy pages using a tree view instead of an ordered list. + +USE_INLINE_TREES = NO + +# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be +# used to set the initial width (in pixels) of the frame in which the tree +# is shown. + +TREEVIEW_WIDTH = 250 + +# When the EXT_LINKS_IN_WINDOW option is set to YES doxygen will open +# links to external symbols imported via tag files in a separate window. + +EXT_LINKS_IN_WINDOW = NO + +# Use this tag to change the font size of Latex formulas included +# as images in the HTML documentation. The default is 10. Note that +# when you change the font size after a successful doxygen run you need +# to manually remove any form_*.png images from the HTML output directory +# to force them to be regenerated. + +FORMULA_FONTSIZE = 10 + +# Use the FORMULA_TRANPARENT tag to determine whether or not the images +# generated for formulas are transparent PNGs. Transparent PNGs are +# not supported properly for IE 6.0, but are supported on all modern browsers. +# Note that when changing this option you need to delete any form_*.png files +# in the HTML output before the changes have effect. + +FORMULA_TRANSPARENT = YES + +# Enable the USE_MATHJAX option to render LaTeX formulas using MathJax +# (see http://www.mathjax.org) which uses client side Javascript for the +# rendering instead of using prerendered bitmaps. Use this if you do not +# have LaTeX installed or if you want to formulas look prettier in the HTML +# output. When enabled you also need to install MathJax separately and +# configure the path to it using the MATHJAX_RELPATH option. + +USE_MATHJAX = NO + +# When MathJax is enabled you need to specify the location relative to the +# HTML output directory using the MATHJAX_RELPATH option. The destination +# directory should contain the MathJax.js script. For instance, if the mathjax +# directory is located at the same level as the HTML output directory, then +# MATHJAX_RELPATH should be ../mathjax. The default value points to the mathjax.org site, so you can quickly see the result without installing +# MathJax, but it is strongly recommended to install a local copy of MathJax +# before deployment. + +MATHJAX_RELPATH = http://www.mathjax.org/mathjax + +# When the SEARCHENGINE tag is enabled doxygen will generate a search box +# for the HTML output. The underlying search engine uses javascript +# and DHTML and should work on any modern browser. Note that when using +# HTML help (GENERATE_HTMLHELP), Qt help (GENERATE_QHP), or docsets +# (GENERATE_DOCSET) there is already a search function so this one should +# typically be disabled. For large projects the javascript based search engine +# can be slow, then enabling SERVER_BASED_SEARCH may provide a better solution. + +SEARCHENGINE = YES + +# When the SERVER_BASED_SEARCH tag is enabled the search engine will be +# implemented using a PHP enabled web server instead of at the web client +# using Javascript. Doxygen will generate the search PHP script and index +# file to put on the web server. The advantage of the server +# based approach is that it scales better to large projects and allows +# full text search. The disadvantages are that it is more difficult to setup +# and does not have live searching capabilities. + +SERVER_BASED_SEARCH = NO + +#--------------------------------------------------------------------------- +# configuration options related to the LaTeX output +#--------------------------------------------------------------------------- + +# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will +# generate Latex output. + +GENERATE_LATEX = NO + +# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `latex' will be used as the default path. + +LATEX_OUTPUT = latex + +# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be +# invoked. If left blank `latex' will be used as the default command name. +# Note that when enabling USE_PDFLATEX this option is only used for +# generating bitmaps for formulas in the HTML output, but not in the +# Makefile that is written to the output directory. + +LATEX_CMD_NAME = latex + +# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to +# generate index for LaTeX. If left blank `makeindex' will be used as the +# default command name. + +MAKEINDEX_CMD_NAME = makeindex + +# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact +# LaTeX documents. This may be useful for small projects and may help to +# save some trees in general. + +COMPACT_LATEX = YES + +# The PAPER_TYPE tag can be used to set the paper type that is used +# by the printer. Possible values are: a4, letter, legal and +# executive. If left blank a4wide will be used. + +PAPER_TYPE = letter + +# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX +# packages that should be included in the LaTeX output. + +EXTRA_PACKAGES = + +# The LATEX_HEADER tag can be used to specify a personal LaTeX header for +# the generated latex document. The header should contain everything until +# the first chapter. If it is left blank doxygen will generate a +# standard header. Notice: only use this tag if you know what you are doing! + +LATEX_HEADER = + +# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated +# is prepared for conversion to pdf (using ps2pdf). The pdf file will +# contain links (just like the HTML output) instead of page references +# This makes the output suitable for online browsing using a pdf viewer. + +PDF_HYPERLINKS = YES + +# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of +# plain latex in the generated Makefile. Set this option to YES to get a +# higher quality PDF documentation. + +USE_PDFLATEX = YES + +# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode. +# command to the generated LaTeX files. This will instruct LaTeX to keep +# running if errors occur, instead of asking the user for help. +# This option is also used when generating formulas in HTML. + +LATEX_BATCHMODE = YES + +# If LATEX_HIDE_INDICES is set to YES then doxygen will not +# include the index chapters (such as File Index, Compound Index, etc.) +# in the output. + +LATEX_HIDE_INDICES = YES + +# If LATEX_SOURCE_CODE is set to YES then doxygen will include +# source code with syntax highlighting in the LaTeX output. +# Note that which sources are shown also depends on other settings +# such as SOURCE_BROWSER. + +LATEX_SOURCE_CODE = NO + +#--------------------------------------------------------------------------- +# configuration options related to the RTF output +#--------------------------------------------------------------------------- + +# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output +# The RTF output is optimized for Word 97 and may not look very pretty with +# other RTF readers or editors. + +GENERATE_RTF = NO + +# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `rtf' will be used as the default path. + +RTF_OUTPUT = rtf + +# If the COMPACT_RTF tag is set to YES Doxygen generates more compact +# RTF documents. This may be useful for small projects and may help to +# save some trees in general. + +COMPACT_RTF = NO + +# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated +# will contain hyperlink fields. The RTF file will +# contain links (just like the HTML output) instead of page references. +# This makes the output suitable for online browsing using WORD or other +# programs which support those fields. +# Note: wordpad (write) and others do not support links. + +RTF_HYPERLINKS = NO + +# Load stylesheet definitions from file. Syntax is similar to doxygen's +# config file, i.e. a series of assignments. You only have to provide +# replacements, missing definitions are set to their default value. + +RTF_STYLESHEET_FILE = + +# Set optional variables used in the generation of an rtf document. +# Syntax is similar to doxygen's config file. + +RTF_EXTENSIONS_FILE = + +#--------------------------------------------------------------------------- +# configuration options related to the man page output +#--------------------------------------------------------------------------- + +# If the GENERATE_MAN tag is set to YES (the default) Doxygen will +# generate man pages + +GENERATE_MAN = NO + +# The MAN_OUTPUT tag is used to specify where the man pages will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `man' will be used as the default path. + +MAN_OUTPUT = man + +# The MAN_EXTENSION tag determines the extension that is added to +# the generated man pages (default is the subroutine's section .3) + +MAN_EXTENSION = .3 + +# If the MAN_LINKS tag is set to YES and Doxygen generates man output, +# then it will generate one additional man file for each entity +# documented in the real man page(s). These additional files +# only source the real man page, but without them the man command +# would be unable to find the correct page. The default is NO. + +MAN_LINKS = NO + +#--------------------------------------------------------------------------- +# configuration options related to the XML output +#--------------------------------------------------------------------------- + +# If the GENERATE_XML tag is set to YES Doxygen will +# generate an XML file that captures the structure of +# the code including all documentation. + +GENERATE_XML = YES + +# The XML_OUTPUT tag is used to specify where the XML pages will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `xml' will be used as the default path. + +XML_OUTPUT = xml + +# The XML_SCHEMA tag can be used to specify an XML schema, +# which can be used by a validating XML parser to check the +# syntax of the XML files. + +XML_SCHEMA = + +# The XML_DTD tag can be used to specify an XML DTD, +# which can be used by a validating XML parser to check the +# syntax of the XML files. + +XML_DTD = + +# If the XML_PROGRAMLISTING tag is set to YES Doxygen will +# dump the program listings (including syntax highlighting +# and cross-referencing information) to the XML output. Note that +# enabling this will significantly increase the size of the XML output. + +XML_PROGRAMLISTING = YES + +#--------------------------------------------------------------------------- +# configuration options for the AutoGen Definitions output +#--------------------------------------------------------------------------- + +# If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will +# generate an AutoGen Definitions (see autogen.sf.net) file +# that captures the structure of the code including all +# documentation. Note that this feature is still experimental +# and incomplete at the moment. + +GENERATE_AUTOGEN_DEF = NO + +#--------------------------------------------------------------------------- +# configuration options related to the Perl module output +#--------------------------------------------------------------------------- + +# If the GENERATE_PERLMOD tag is set to YES Doxygen will +# generate a Perl module file that captures the structure of +# the code including all documentation. Note that this +# feature is still experimental and incomplete at the +# moment. + +GENERATE_PERLMOD = NO + +# If the PERLMOD_LATEX tag is set to YES Doxygen will generate +# the necessary Makefile rules, Perl scripts and LaTeX code to be able +# to generate PDF and DVI output from the Perl module output. + +PERLMOD_LATEX = NO + +# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be +# nicely formatted so it can be parsed by a human reader. +# This is useful +# if you want to understand what is going on. +# On the other hand, if this +# tag is set to NO the size of the Perl module output will be much smaller +# and Perl will parse it just the same. + +PERLMOD_PRETTY = YES + +# The names of the make variables in the generated doxyrules.make file +# are prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. +# This is useful so different doxyrules.make files included by the same +# Makefile don't overwrite each other's variables. + +PERLMOD_MAKEVAR_PREFIX = + +#--------------------------------------------------------------------------- +# Configuration options related to the preprocessor +#--------------------------------------------------------------------------- + +# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will +# evaluate all C-preprocessor directives found in the sources and include +# files. + +ENABLE_PREPROCESSING = YES + +# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro +# names in the source code. If set to NO (the default) only conditional +# compilation will be performed. Macro expansion can be done in a controlled +# way by setting EXPAND_ONLY_PREDEF to YES. + +MACRO_EXPANSION = NO + +# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES +# then the macro expansion is limited to the macros specified with the +# PREDEFINED and EXPAND_AS_DEFINED tags. + +EXPAND_ONLY_PREDEF = NO + +# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files +# in the INCLUDE_PATH (see below) will be search if a #include is found. + +SEARCH_INCLUDES = YES + +# The INCLUDE_PATH tag can be used to specify one or more directories that +# contain include files that are not input files but should be processed by +# the preprocessor. + +INCLUDE_PATH = + +# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard +# patterns (like *.h and *.hpp) to filter out the header-files in the +# directories. If left blank, the patterns specified with FILE_PATTERNS will +# be used. + +INCLUDE_FILE_PATTERNS = + +# The PREDEFINED tag can be used to specify one or more macro names that +# are defined before the preprocessor is started (similar to the -D option of +# gcc). The argument of the tag is a list of macros of the form: name +# or name=definition (no spaces). If the definition and the = are +# omitted =1 is assumed. To prevent a macro definition from being +# undefined via #undef or recursively expanded use the := operator +# instead of the = operator. + +PREDEFINED = + +# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then +# this tag can be used to specify a list of macro names that should be expanded. +# The macro definition that is found in the sources will be used. +# Use the PREDEFINED tag if you want to use a different macro definition that overrules the definition found in the source code. + +EXPAND_AS_DEFINED = + +# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then +# doxygen's preprocessor will remove all references to function-like macros +# that are alone on a line, have an all uppercase name, and do not end with a +# semicolon, because these will confuse the parser if not removed. + +SKIP_FUNCTION_MACROS = YES + +#--------------------------------------------------------------------------- +# Configuration::additions related to external references +#--------------------------------------------------------------------------- + +# The TAGFILES option can be used to specify one or more tagfiles. +# Optionally an initial location of the external documentation +# can be added for each tagfile. The format of a tag file without +# this location is as follows: +# +# TAGFILES = file1 file2 ... +# Adding location for the tag files is done as follows: +# +# TAGFILES = file1=loc1 "file2 = loc2" ... +# where "loc1" and "loc2" can be relative or absolute paths or +# URLs. If a location is present for each tag, the installdox tool +# does not have to be run to correct the links. +# Note that each tag file must have a unique name +# (where the name does NOT include the path) +# If a tag file is not located in the directory in which doxygen +# is run, you must also specify the path to the tagfile here. + +TAGFILES = + +# When a file name is specified after GENERATE_TAGFILE, doxygen will create +# a tag file that is based on the input files it reads. + +GENERATE_TAGFILE = + +# If the ALLEXTERNALS tag is set to YES all external classes will be listed +# in the class index. If set to NO only the inherited external classes +# will be listed. + +ALLEXTERNALS = NO + +# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed +# in the modules index. If set to NO, only the current project's groups will +# be listed. + +EXTERNAL_GROUPS = YES + +# The PERL_PATH should be the absolute path and name of the perl script +# interpreter (i.e. the result of `which perl'). + +PERL_PATH = /usr/bin/perl + +#--------------------------------------------------------------------------- +# Configuration options related to the dot tool +#--------------------------------------------------------------------------- + +# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will +# generate a inheritance diagram (in HTML, RTF and LaTeX) for classes with base +# or super classes. Setting the tag to NO turns the diagrams off. Note that +# this option also works with HAVE_DOT disabled, but it is recommended to +# install and use dot, since it yields more powerful graphs. + +CLASS_DIAGRAMS = NO + +# You can define message sequence charts within doxygen comments using the \msc +# command. Doxygen will then run the mscgen tool (see +# http://www.mcternan.me.uk/mscgen/) to produce the chart and insert it in the +# documentation. The MSCGEN_PATH tag allows you to specify the directory where +# the mscgen tool resides. If left empty the tool is assumed to be found in the +# default search path. + +MSCGEN_PATH = + +# If set to YES, the inheritance and collaboration graphs will hide +# inheritance and usage relations if the target is undocumented +# or is not a class. + +HIDE_UNDOC_RELATIONS = NO + +# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is +# available from the path. This tool is part of Graphviz, a graph visualization +# toolkit from AT&T and Lucent Bell Labs. The other options in this section +# have no effect if this option is set to NO (the default) + +HAVE_DOT = YES + +# The DOT_NUM_THREADS specifies the number of dot invocations doxygen is +# allowed to run in parallel. When set to 0 (the default) doxygen will +# base this on the number of processors available in the system. You can set it +# explicitly to a value larger than 0 to get control over the balance +# between CPU load and processing speed. + +DOT_NUM_THREADS = 0 + +# By default doxygen will write a font called Helvetica to the output +# directory and reference it in all dot files that doxygen generates. +# When you want a differently looking font you can specify the font name +# using DOT_FONTNAME. You need to make sure dot is able to find the font, +# which can be done by putting it in a standard location or by setting the +# DOTFONTPATH environment variable or by setting DOT_FONTPATH to the directory +# containing the font. + +DOT_FONTNAME = FreeSans + +# The DOT_FONTSIZE tag can be used to set the size of the font of dot graphs. +# The default size is 10pt. + +DOT_FONTSIZE = 10 + +# By default doxygen will tell dot to use the output directory to look for the +# FreeSans.ttf font (which doxygen will put there itself). If you specify a +# different font using DOT_FONTNAME you can set the path where dot +# can find it using this tag. + +DOT_FONTPATH = + +# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for each documented class showing the direct and +# indirect inheritance relations. Setting this tag to YES will force the +# the CLASS_DIAGRAMS tag to NO. + +CLASS_GRAPH = YES + +# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for each documented class showing the direct and +# indirect implementation dependencies (inheritance, containment, and +# class references variables) of the class with other documented classes. + +COLLABORATION_GRAPH = YES + +# If the GROUP_GRAPHS and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for groups, showing the direct groups dependencies + +GROUP_GRAPHS = YES + +# If the UML_LOOK tag is set to YES doxygen will generate inheritance and +# collaboration diagrams in a style similar to the OMG's Unified Modeling +# Language. + +UML_LOOK = YES + +# If set to YES, the inheritance and collaboration graphs will show the +# relations between templates and their instances. + +TEMPLATE_RELATIONS = YES + +# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT +# tags are set to YES then doxygen will generate a graph for each documented +# file showing the direct and indirect include dependencies of the file with +# other documented files. + +INCLUDE_GRAPH = YES + +# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and +# HAVE_DOT tags are set to YES then doxygen will generate a graph for each +# documented header file showing the documented files that directly or +# indirectly include this file. + +INCLUDED_BY_GRAPH = YES + +# If the CALL_GRAPH and HAVE_DOT options are set to YES then +# doxygen will generate a call dependency graph for every global function +# or class method. Note that enabling this option will significantly increase +# the time of a run. So in most cases it will be better to enable call graphs +# for selected functions only using the \callgraph command. + +CALL_GRAPH = YES + +# If the CALLER_GRAPH and HAVE_DOT tags are set to YES then +# doxygen will generate a caller dependency graph for every global function +# or class method. Note that enabling this option will significantly increase +# the time of a run. So in most cases it will be better to enable caller +# graphs for selected functions only using the \callergraph command. + +CALLER_GRAPH = YES + +# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen +# will generate a graphical hierarchy of all classes instead of a textual one. + +GRAPHICAL_HIERARCHY = YES + +# If the DIRECTORY_GRAPH, SHOW_DIRECTORIES and HAVE_DOT tags are set to YES +# then doxygen will show the dependencies a directory has on other directories +# in a graphical way. The dependency relations are determined by the #include +# relations between the files in the directories. + +DIRECTORY_GRAPH = YES + +# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images +# generated by dot. Possible values are png, svg, gif or svg. +# If left blank png will be used. + +DOT_IMAGE_FORMAT = png + +# The tag DOT_PATH can be used to specify the path where the dot tool can be +# found. If left blank, it is assumed the dot tool can be found in the path. + +DOT_PATH = + +# The DOTFILE_DIRS tag can be used to specify one or more directories that +# contain dot files that are included in the documentation (see the +# \dotfile command). + +DOTFILE_DIRS = doc + +# The MSCFILE_DIRS tag can be used to specify one or more directories that +# contain msc files that are included in the documentation (see the +# \mscfile command). + +MSCFILE_DIRS = + +# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of +# nodes that will be shown in the graph. If the number of nodes in a graph +# becomes larger than this value, doxygen will truncate the graph, which is +# visualized by representing a node as a red box. Note that doxygen if the +# number of direct children of the root node in a graph is already larger than +# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note +# that the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH. + +DOT_GRAPH_MAX_NODES = 50 + +# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the +# graphs generated by dot. A depth value of 3 means that only nodes reachable +# from the root by following a path via at most 3 edges will be shown. Nodes +# that lay further from the root node will be omitted. Note that setting this +# option to 1 or 2 may greatly reduce the computation time needed for large +# code bases. Also note that the size of a graph can be further restricted by +# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction. + +MAX_DOT_GRAPH_DEPTH = 0 + +# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent +# background. This is disabled by default, because dot on Windows does not +# seem to support this out of the box. Warning: Depending on the platform used, +# enabling this option may lead to badly anti-aliased labels on the edges of +# a graph (i.e. they become hard to read). + +DOT_TRANSPARENT = NO + +# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output +# files in one run (i.e. multiple -o and -T options on the command line). This +# makes dot run faster, but since only newer versions of dot (>1.8.10) +# support this, this feature is disabled by default. + +DOT_MULTI_TARGETS = YES + +# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will +# generate a legend page explaining the meaning of the various boxes and +# arrows in the dot generated graphs. + +GENERATE_LEGEND = NO + +# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will +# remove the intermediate dot files that are used to generate +# the various graphs. + +DOT_CLEANUP = NO diff --git a/ca/Makefile.in b/ca/Makefile.in new file mode 100644 index 00000000..d36a3163 --- /dev/null +++ b/ca/Makefile.in @@ -0,0 +1,339 @@ +# $Id$ + +PYTHON = @PYTHON@ +TRANG = @TRANG@ + +CFLAGS = @CFLAGS@ +LDFLAGS = @LDFLAGS@ @POW_LDFLAGS@ +LIBS = @LIBS@ + +INSTALL = @INSTALL@ -m 555 + +prefix = @prefix@ +exec_prefix = @exec_prefix@ +datarootdir = @datarootdir@ +datadir = @datadir@ +localstatedir = @localstatedir@ +sharedstatedir = @sharedstatedir@ +sysconfdir = @sysconfdir@ +bindir = @bindir@ +sbindir = @sbindir@ +libexecdir = @libexecdir@ +sysconfdir = @sysconfdir@ + +abs_builddir = @abs_builddir@ +abs_top_srcdir = @abs_top_srcdir@ +abs_top_builddir= @abs_top_builddir@ +srcdir = @srcdir@ + +WSGI_DAEMON_PROCESS = @WSGI_DAEMON_PROCESS@ +WSGI_PROCESS_GROUP = @WSGI_PROCESS_GROUP@ +RCYNIC_HTML_DIR = @RCYNIC_HTML_DIR@ +APACHE_VERSION = @APACHE_VERSION@ +WSGI_PYTHON_EGG_CACHE_DIR = @WSGI_PYTHON_EGG_CACHE_DIR@ +WSGI_PYTHON_EGG_CACHE_USER = @WSGI_PYTHON_EGG_CACHE_USER@ + +RPKID_INSTALL_TARGETS = @RPKID_INSTALL_TARGETS@ + +SETUP_PY_INSTALL_LAYOUT = @SETUP_PY_INSTALL_LAYOUT@ + +SETUP_PY_ROOT = `${PYTHON} -c 'import sys; print "--root " + sys.argv[1] if sys.argv[1] else ""' '${DESTDIR}'` + +POW_SO = rpki/POW/_POW.so + +all:: rpki/autoconf.py setup_autoconf.py rpki/relaxng.py myrpki.rng rpki/sql_schemas.py ${POW_SO} build/stamp + +.FORCE: + +${POW_SO}: .FORCE setup_autoconf.py + ${PYTHON} setup.py build_ext --inplace + +build/stamp: .FORCE setup_autoconf.py + ${PYTHON} setup.py build + touch $@ + +clean:: + rm -rf ${POW_SO} build dist + +RNGS = left-right-schema.rng up-down-schema.rng publication-schema.rng myrpki.rng router-certificate-schema.rng + +rpki/relaxng.py: ${abs_top_srcdir}/buildtools/make-relaxng.py ${RNGS} + ${PYTHON} ${abs_top_srcdir}/buildtools/make-relaxng.py ${RNGS} >$@.tmp + mv $@.tmp $@ + +left-right-schema.rng: left-right-schema.rnc + ${TRANG} left-right-schema.rnc left-right-schema.rng + +up-down-schema.rng: up-down-schema.rnc + ${TRANG} up-down-schema.rnc up-down-schema.rng + +publication-schema.rng: publication-schema.rnc + ${TRANG} publication-schema.rnc publication-schema.rng + +myrpki.rng: myrpki.rnc + ${TRANG} myrpki.rnc myrpki.rng + +router-certificate-schema.rng: router-certificate-schema.rnc + ${TRANG} router-certificate-schema.rnc router-certificate-schema.rng + +rpki/sql_schemas.py: ${abs_top_srcdir}/buildtools/make-sql-schemas.py rpkid.sql pubd.sql + ${PYTHON} ${abs_top_srcdir}/buildtools/make-sql-schemas.py >$@.tmp + mv $@.tmp $@ + +clean:: + find . -type f -name '*.py[co]' -delete + cd tests; $(MAKE) $@ + +install:: ${RPKID_INSTALL_TARGETS} + +install-always:: all + ${PYTHON} setup.py install ${SETUP_PY_ROOT} ${SETUP_PY_INSTALL_LAYOUT} --record installed + @echo + @echo "== Default configuration file location is ${sysconfdir}/rpki.conf ==" + @echo + ${INSTALL} examples/rpki.conf ${DESTDIR}${sysconfdir}/rpki.conf.sample + ${INSTALL} -d ${DESTDIR}${datarootdir}/rpki/publication + +uninstall deinstall:: + -${libexecdir}/rpkigui-apache-conf-gen --remove --verbose + xargs rm -fv $@ + +doc/rpkid.dot: rpkid.sql + sh ${abs_top_srcdir}/buildtools/graphviz-sql.sh $? >$@ + +.SUFFIXES: .dot .png .pdf .eps + +.dot.pdf: + dot -Tps2 $? | ps2pdf - $@ + +.dot.eps: + dot -o $@ -Teps $? + +.dot.png: + dot -o $@ -Tpng $? + +dot: doc/pubd.dot doc/rpkid.dot + +eps: doc/pubd.eps doc/rpkid.eps doc/rpkid-bpki.eps doc/pubd-bpki.eps + +png: doc/pubd.png doc/rpkid.png doc/rpkid-bpki.png doc/pubd-bpki.png + +pdf: doc/pubd.pdf doc/rpkid.pdf doc/rpkid-bpki.pdf doc/pubd-bpki.pdf + +docclean: + rm -rf doc/html doc/latex doc/xml + rm -f doc/*.eps doc/*.pdf doc/*.png + rm -f doc/pubd.dot doc/rpkid.dot + +html: dot eps png + TZ='' IMAGE_PATH=${abs_builddir}/doc doxygen + +docs: dot eps png html pdf + +## + +distclean:: clean docclean + cd tests; ${MAKE} $@ + rm -f TAGS Makefile + +all:: examples/rpki.conf + +examples/rpki.conf: rpki/autoconf.py rpki-confgen rpki-confgen.xml + ${PYTHON} rpki-confgen \ + --read-xml rpki-confgen.xml \ + --autoconf \ + --set myrpki::handle=`hostname -f | sed 's/[.]/_/g'` \ + --set myrpki::rpkid_server_host=`hostname -f` \ + --set myrpki::pubd_server_host=`hostname -f` \ + --pwgen myrpki::shared_sql_password \ + --pwgen web_portal::secret-key \ + --write-conf $@ + +clean:: + rm -f examples/rpki.conf + +rpki/autoconf.py: Makefile + @echo 'Generating $@'; \ + (echo '# Automatically generated. DO NOT EDIT.'; \ + echo ; \ + echo 'bindir = "${bindir}"'; \ + echo 'datarootdir = "${datarootdir}"'; \ + echo 'localstatedir = "${localstatedir}"'; \ + echo 'sbindir = "${sbindir}"'; \ + echo 'sharedstatedir = "${sharedstatedir}"'; \ + echo 'sysconfdir = "${sysconfdir}"'; \ + echo 'libexecdir = "${libexecdir}"'; \ + echo ; \ + echo 'WSGI_DAEMON_PROCESS = "${WSGI_DAEMON_PROCESS}"'; \ + echo 'WSGI_PROCESS_GROUP = "${WSGI_PROCESS_GROUP}"'; \ + echo 'RCYNIC_HTML_DIR = "${RCYNIC_HTML_DIR}"'; \ + echo 'APACHE_VERSION = "${APACHE_VERSION}"'; \ + echo 'WSGI_PYTHON_EGG_CACHE_DIR = "${WSGI_PYTHON_EGG_CACHE_DIR}"'; \ + echo 'WSGI_PYTHON_EGG_CACHE_USER = "${WSGI_PYTHON_EGG_CACHE_USER}"'; \ + ) > $@ + +clean:: + rm -f rpki/autoconf.py + +setup_autoconf.py: rpki/autoconf.py + @echo 'Generating $@'; \ + (cat rpki/autoconf.py; \ + echo ; \ + echo 'CFLAGS = """${CFLAGS}"""'; \ + echo 'LDFLAGS = """${LDFLAGS}"""'; \ + echo 'LIBS = """${LIBS}"""'; \ + ) > $@ + +clean:: + rm -f setup_autoconf.py setup_autoconf.pyc + +install-postconf: \ + install-user install-egg-cache install-conf install-apache install-mysql install-django install-bpki install-cron + +# This should create user "rpkid" and group "rpkid", but as we have +# not yet tested our ability to run in such a configuration, this +# would be a little premature. Can steal Makefile code for this from +# rcynic when we're ready to do something with it. + +install-user: + @true + +# This is only necessary on some platforms (currently FreeBSD, +# due to shortcomings in the way that Python eggs are installed +# as system libraries). + +install-egg-cache: + @if test 'X${WSGI_PYTHON_EGG_CACHE_DIR}' != X && test ! -d '${WSGI_PYTHON_EGG_CACHE_DIR}'; then \ + mkdir -v '${WSGI_PYTHON_EGG_CACHE_DIR}'; \ + if test 'X${WSGI_PYTHON_EGG_CACHE_USER}' != X; then \ + chown '${WSGI_PYTHON_EGG_CACHE_USER}' '${WSGI_PYTHON_EGG_CACHE_DIR}'; \ + fi; \ + fi + +# We used to play the FreeBSD game of copying rpki.conf.sample to +# rpki.conf on install and removing rpki.conf if it's identical to +# rpki.conf.sample in uninstall, but that turns out to work poorly +# with generated passwords. So now we copy rpki.conf.sample if and +# only if rpki.conf does not exist, and we leave removal of rpki.conf +# for the user to deal with. This in turn leaves us with a different +# problem of how to upgrade rpki.conf, but at least in the FreeBSD +# universe we're supposed to leave that problem for the user. + +install-conf: + if test -f ${DESTDIR}${sysconfdir}/rpki.conf; \ + then \ + true; \ + else \ + cp -p ${DESTDIR}${sysconfdir}/rpki.conf.sample ${DESTDIR}${sysconfdir}/rpki.conf; \ + fi + +uninstall deinstall:: +# if cmp -s ${DESTDIR}${sysconfdir}/rpki.conf ${DESTDIR}${sysconfdir}/rpki.conf.sample; then rm -f ${DESTDIR}${sysconfdir}/rpki.conf; else true; fi + rm -f ${DESTDIR}${sysconfdir}/rpki.conf.sample + +install-apache: + ${libexecdir}/rpkigui-apache-conf-gen --install --verbose + +install-mysql: + ${sbindir}/rpki-sql-setup + +install-django: + ${sbindir}/rpki-manage syncdb --noinput + ${sbindir}/rpki-manage migrate app + +install-bpki: + ${sbindir}/rpkic initialize_server_bpki + +# This needs to set up crontab entries for rpkigui-check-expired, +# rpkigui-import-routes, and rpkic update_bpki. They probably don't +# want run under the same user IDs either, so what with having to use +# /usr/bin/crontab on some platforms, this should be entirely too +# entertaining. Might be ok to run them all as user rpkid eventually. +# +# We really should be running exactly the same cron setup/teardown +# code here as we do in platform-specific post-installation scripts, +# but for now we just use crontab(1) here on all platforms. + +install-cron: install-cron-using-crontab + +uninstall deinstall:: uninstall-cron-using-crontab + +# Code for setting up and tearing down cron jobs using the crontab(1) +# program. We don't use this on all platforms, but we do use it on +# more than one, so it's broken out here as common code. +# +# CRONTAB_USER really should be rpkid, but we don't have the rest of +# the package set up for that yet, so run it as root for now. + +CRONTAB_USER = root + +install-cron-using-crontab: + @crontab -l -u ${CRONTAB_USER} 2>/dev/null | \ + awk -v t=`hexdump -n 2 -e '"%u\n"' /dev/urandom` ' \ + BEGIN { \ + cmd["${libexecdir}/rpkigui-import-routes"] = sprintf("%2u */2 * * *", t % 60); \ + cmd["${libexecdir}/rpkigui-check-expired"] = "@daily "; \ + cmd["${sbindir}/rpkic update_bpki" ] = "30 3 * * * "; \ + } \ + { \ + print; \ + for (i in cmd) \ + if ($$0 ~ i) \ + found[i] = $$0; \ + } \ + END { \ + for (i in cmd) \ + if (!found[i]) \ + print cmd[i] "\texec " i; \ + }' | \ + crontab -u ${CRONTAB_USER} - + +uninstall-cron-using-crontab: + @crontab -l -u ${CRONTAB_USER} 2>/dev/null | \ + awk ' \ + BEGIN { \ + empty = 1; \ + } \ + $$0 !~ "${libexecdir}/rpkigui-import-routes" && \ + $$0 !~ "${libexecdir}/rpkigui-check-expired" && \ + $$0 !~ "${sbindir}/rpkic update_bpki" { \ + empty = 0; \ + print | "/usr/bin/crontab -u ${CRONTAB_USER} -"; \ + } \ + END { \ + if (empty) \ + system("/usr/bin/crontab -u ${CRONTAB_USER} -r"); \ + }' diff --git a/ca/README b/ca/README new file mode 100644 index 00000000..91d372de --- /dev/null +++ b/ca/README @@ -0,0 +1,11 @@ +$Id$ -*- Text -*- + +RPKI CA tools. + +See: + +- The primary documentation at http://trac.rpki.net/ + +- The PDF manual in ../doc/manual.pdf, or + +- The flat text pages ../doc/doc.RPKI.CA* diff --git a/ca/doc/README b/ca/doc/README new file mode 100644 index 00000000..33902d7e --- /dev/null +++ b/ca/doc/README @@ -0,0 +1,16 @@ +$Id$ + +Internals documentation for the RPKI CA tools. + +Once upon a time this included the hand-written documentation for the +CA tools, but that is now part of the overall package documentation. +What's left here is just what Doxygen generates from the source code +and a few Graphviz diagrams. + +At the moment the control for the stuff generated here is still +../Makefile, that may change at some point. + +We no longer generate the documentation here automatically, as it's +kind of large and we're not sure anybody else cares about it, so if +you want this manual you'll have to install Doxygen and build it +yourself. diff --git a/ca/doc/mainpage.py b/ca/doc/mainpage.py new file mode 100644 index 00000000..4570547b --- /dev/null +++ b/ca/doc/mainpage.py @@ -0,0 +1,71 @@ +## @file +# @details +# Doxygen documentation source, expressed as Python comments to make Doxygen happy. +# +# $Id$ +# +# Copyright (C) 2009--2012 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. +# +# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +## @mainpage RPKI Engine Reference Manual +# +# This collection of Python modules implements an RPKI CA engine. +# +# See http://trac.rpki.net/ for the RPKI tools package documentation. +# +# The documentation you're reading is generated automatically by +# Doxygen from comments and documentation in +# the code. +# +# At one point this manual also included documentation for the CA +# tools, but that has been integrated into the overall package +# documentation. This manual is now just the CA tools internals. + +## @page sql-schemas SQL database schemas +# +# @li @subpage rpkid-sql "rpkid database schema" +# @li @subpage pubd-sql "pubd database schema" + +## @page rpkid-sql rpkid SQL schema +# +# @image html rpkid.png "Diagram of rpkid.sql" +# @image latex rpkid.eps "Diagram of rpkid.sql" height=\textheight +# +# @verbinclude rpkid.sql + +## @page pubd-sql pubd SQL Schema +# +# @image html pubd.png "Diagram of pubd.sql" +# @image latex pubd.eps "Diagram of pubd.sql" width=\textwidth +# +# @verbinclude pubd.sql + +# Local Variables: +# mode:python +# compile-command: "cd ../.. && ./config.status && cd rpkid && make docs" +# End: diff --git a/ca/doc/pubd-bpki.dot b/ca/doc/pubd-bpki.dot new file mode 100644 index 00000000..44ad8a90 --- /dev/null +++ b/ca/doc/pubd-bpki.dot @@ -0,0 +1,42 @@ +// $Id$ + +// Color code: +// Black: Operating entity +// Red: Cross-certified client +// +// Shape code: +// Octagon: TA +// Diamond: CA +// Record: EE + +digraph bpki_pubd { + splines = true; + size = "14,14"; + node [ fontname = Times, fontsize = 9 ]; + + // Operating entity + node [ color = black, fontcolor = black, shape = record ]; + TA [ shape = octagon, label = "BPKI TA" ]; + pubd [ label = "pubd|{HTTPS server|CMS}" ]; + ctl [ label = "Control|{HTTPS client|CMS}" ]; + + // Clients + node [ color = red, fontcolor = red, shape = diamond ]; + Alice_CA; + Bob_CA; + node [ color = red, fontcolor = red, shape = record ]; + Alice_EE [ label = "Alice\nEE|{HTTPS client|CMS}" ]; + Bob_EE [ label = "Bob\nEE|{HTTPS client|CMS}" ]; + + edge [ color = black, style = dotted ]; + TA -> pubd; + TA -> ctl; + + edge [ color = black, style = solid ]; + TA -> Alice_CA; + TA -> Bob_CA; + + edge [ color = red, style = solid ]; + Alice_CA -> Alice_EE; + Bob_CA -> Bob_EE; +} diff --git a/ca/doc/rpkid-bpki.dot b/ca/doc/rpkid-bpki.dot new file mode 100644 index 00000000..651591cb --- /dev/null +++ b/ca/doc/rpkid-bpki.dot @@ -0,0 +1,76 @@ +// $Id$ + +// Color code: +// Black: Hosting entity +// Blue: Hosted entity +// Red: Cross-certified peer +// +// Shape code: +// Octagon: TA +// Diamond: CA +// Record: EE + +digraph bpki_rpkid { + splines = true; + size = "14,14"; + node [ fontname = Times, fontsize = 9 ]; + + // Hosting entity + node [ color = black, shape = record ]; + TA [ shape = octagon, label = "BPKI TA" ]; + rpkid [ label = "rpkid|{HTTPS server|HTTPS left-right client|CMS left-right}" ]; + irdbd [ label = "irdbd|{HTTPS left-right server|CMS left-right}" ]; + irbe [ label = "IRBE|{HTTPS left-right client|CMS left-right}" ]; + + // Hosted entities + node [ color = blue, fontcolor = blue ]; + Alice_CA [ shape = diamond ]; + Alice_EE [ label = "Alice\nBSC EE|{HTTPS up-down client|CMS up-down}" ]; + Ellen_CA [ shape = diamond ]; + Ellen_EE [ label = "Ellen\nBSC EE|{HTTPS up-down client|CMS up-down}" ]; + + // Peers + node [ color = red, fontcolor = red, shape = diamond ]; + Bob_CA; + Carol_CA; + Dave_CA; + Frank_CA; + Ginny_CA; + Harry_CA; + node [ shape = record ]; + Bob_EE [ label = "Bob\nEE|{HTTPS up-down|CMS up-down}" ]; + Carol_EE [ label = "Carol\nEE|{HTTPS up-down|CMS up-down}" ]; + Dave_EE [ label = "Dave\nEE|{HTTPS up-down|CMS up-down}" ]; + Frank_EE [ label = "Frank\nEE|{HTTPS up-down|CMS up-down}" ]; + Ginny_EE [ label = "Ginny\nEE|{HTTPS up-down|CMS up-down}" ]; + Harry_EE [ label = "Bob\nEE|{HTTPS up-down|CMS up-down}" ]; + + edge [ color = black, style = solid ]; + TA -> Alice_CA; + TA -> Ellen_CA; + + edge [ color = black, style = dotted ]; + TA -> rpkid; + TA -> irdbd; + TA -> irbe; + + edge [ color = blue, style = solid ]; + Alice_CA -> Bob_CA; + Alice_CA -> Carol_CA; + Alice_CA -> Dave_CA; + Ellen_CA -> Frank_CA; + Ellen_CA -> Ginny_CA; + Ellen_CA -> Harry_CA; + + edge [ color = blue, style = dotted ]; + Alice_CA -> Alice_EE; + Ellen_CA -> Ellen_EE; + + edge [ color = red, style = solid ]; + Bob_CA -> Bob_EE; + Carol_CA -> Carol_EE; + Dave_CA -> Dave_EE; + Frank_CA -> Frank_EE; + Ginny_CA -> Ginny_EE; + Harry_CA -> Harry_EE; +} diff --git a/ca/examples/asns.csv b/ca/examples/asns.csv new file mode 100644 index 00000000..9d742740 --- /dev/null +++ b/ca/examples/asns.csv @@ -0,0 +1,5 @@ +# $Id$ +# +# Syntax: +# +Alice 64533 diff --git a/ca/examples/prefixes.csv b/ca/examples/prefixes.csv new file mode 100644 index 00000000..ece18d32 --- /dev/null +++ b/ca/examples/prefixes.csv @@ -0,0 +1,8 @@ +# $Id$ +# +# Syntax: / +# or: - +# +Alice 192.0.2.0/27 +Bob 192.0.2.44-192.0.2.100 +Bob 10.0.0.0/8 diff --git a/ca/examples/roas.csv b/ca/examples/roas.csv new file mode 100644 index 00000000..e4ec3074 --- /dev/null +++ b/ca/examples/roas.csv @@ -0,0 +1,5 @@ +# $Id$ +# +# Syntax: /- +# +10.3.0.44/32 666 Mom diff --git a/ca/examples/rsyncd.conf b/ca/examples/rsyncd.conf new file mode 100644 index 00000000..faf1dd0d --- /dev/null +++ b/ca/examples/rsyncd.conf @@ -0,0 +1,53 @@ +# $Id$ +# +# Sample rsyncd.conf file for use with pubd. You may need to +# customize this for the conventions on your system. See the rsync +# and rsyncd.conf manual pages for a complete explanation of how to +# configure rsyncd, this is just a simple configuration to get you +# started. +# +# There are two parameters in the following which you should set to +# appropriate values for your system: +# +# "myname" is the rsync module name to configure, as in +# "rsync://rpki.example.org/rpki/"; see the publication_rsync_module +# parameter in rpki.conf +# +# "/some/where/publication" is the absolute pathname of the directory +# where you told pubd to place its outputs; see the +# publication_base_directory parameter in rpki.conf. +# +# You may need to adjust other parameters for your system environment. +# +# Copyright (C) 2009-2010 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +pid file = /var/run/rsyncd.pid +uid = nobody +gid = nobody + +[rpki] + use chroot = no + read only = yes + transfer logging = yes + path = /some/where/publication + comment = RPKI Testbed + +[root] + # This one is only relevant if you're running rootd. + use chroot = no + read only = yes + transfer logging = yes + path = /some/where/publication.root + comment = RPKI Testbed Root diff --git a/ca/irbe_cli b/ca/irbe_cli new file mode 100755 index 00000000..9deac6d6 --- /dev/null +++ b/ca/irbe_cli @@ -0,0 +1,388 @@ +#!/usr/bin/env python + +# $Id$ +# +# Copyright (C) 2009--2013 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. +# +# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +__doc__ = """ +Command line IR back-end control program for rpkid and pubd. +""" + +# Command line processing of this program is too complex and +# idiosyncratic to be worth trying to reimplement using argparse. + +import sys +import getopt +import textwrap +import rpki.left_right +import rpki.http +import rpki.x509 +import rpki.config +import rpki.log +import rpki.publication +import rpki.async + +pem_out = None + +class UsageWrapper(textwrap.TextWrapper): + """ + Call interface around Python textwrap.Textwrapper class. + """ + + def __call__(self, *args): + """ + Format arguments, with TextWrapper indentation. + """ + return self.fill(textwrap.dedent(" ".join(args))) + +usage_fill = UsageWrapper(subsequent_indent = " " * 4) + +class reply_elt_mixin(object): + """ + Protocol mix-in for printout of reply PDUs. + """ + + is_cmd = False + + def client_reply_decode(self): + pass + + def client_reply_show(self): + print self.element_name + for i in self.attributes + self.elements: + if getattr(self, i) is not None: + print " %s: %s" % (i, getattr(self, i)) + +class cmd_elt_mixin(reply_elt_mixin): + """ + Protocol mix-in for command line client element PDUs. + """ + + is_cmd = True + + ## @var excludes + # XML attributes and elements that should not be allowed as command + # line arguments. + excludes = () + + @classmethod + def usage(cls): + """ + Generate usage message for this PDU. + """ + args = " ".join("--" + x + "=" for x in cls.attributes + cls.elements if x not in cls.excludes) + bools = " ".join("--" + x for x in cls.booleans) + if args and bools: + return args + " " + bools + else: + return args or bools + + def client_getopt(self, argv): + """ + Parse options for this class. + """ + # pylint: disable=W0621 + opts, argv = getopt.getopt(argv, "", [x + "=" for x in self.attributes + self.elements if x not in self.excludes] + list(self.booleans)) + for o, a in opts: + o = o[2:] + handler = getattr(self, "client_query_" + o, None) + if handler is not None: + handler(a) + elif o in self.booleans: + setattr(self, o, True) + else: + assert o in self.attributes + setattr(self, o, a) + return argv + + def client_query_bpki_cert(self, arg): + """ + Special handler for --bpki_cert option. + """ + self.bpki_cert = rpki.x509.X509(Auto_file = arg) + + def client_query_glue(self, arg): + """ + Special handler for --bpki_glue option. + """ + self.bpki_glue = rpki.x509.X509(Auto_file = arg) + + def client_query_bpki_cms_cert(self, arg): + """ + Special handler for --bpki_cms_cert option. + """ + self.bpki_cms_cert = rpki.x509.X509(Auto_file = arg) + + def client_query_cms_glue(self, arg): + """ + Special handler for --bpki_cms_glue option. + """ + self.bpki_cms_glue = rpki.x509.X509(Auto_file = arg) + +class cmd_msg_mixin(object): + """ + Protocol mix-in for command line client message PDUs. + """ + + @classmethod + def usage(cls): + """ + Generate usage message for this PDU. + """ + for k, v in cls.pdus.items(): + if v.is_cmd: + print usage_fill(k, v.usage()) + +# left-right protcol + +class left_right_msg(cmd_msg_mixin, rpki.left_right.msg): + + class self_elt(cmd_elt_mixin, rpki.left_right.self_elt): + pass + + class bsc_elt(cmd_elt_mixin, rpki.left_right.bsc_elt): + + excludes = ("pkcs10_request",) + + def client_query_signing_cert(self, arg): + """--signing_cert option.""" + self.signing_cert = rpki.x509.X509(Auto_file = arg) + + def client_query_signing_cert_crl(self, arg): + """--signing_cert_crl option.""" + self.signing_cert_crl = rpki.x509.CRL(Auto_file = arg) + + def client_reply_decode(self): + global pem_out + if pem_out is not None and self.pkcs10_request is not None: + if isinstance(pem_out, str): + pem_out = open(pem_out, "w") + pem_out.write(self.pkcs10_request.get_PEM()) + + class parent_elt(cmd_elt_mixin, rpki.left_right.parent_elt): + pass + + class child_elt(cmd_elt_mixin, rpki.left_right.child_elt): + pass + + class repository_elt(cmd_elt_mixin, rpki.left_right.repository_elt): + pass + + class list_published_objects_elt(cmd_elt_mixin, rpki.left_right.list_published_objects_elt): + excludes = ("uri",) + + class list_received_resources_elt(cmd_elt_mixin, rpki.left_right.list_received_resources_elt): + excludes = ("parent_handle", "notBefore", "notAfter", "uri", "sia_uri", "aia_uri", "asn", "ipv4", "ipv6") + + class report_error_elt(reply_elt_mixin, rpki.left_right.report_error_elt): + pass + + pdus = dict((x.element_name, x) + for x in (self_elt, bsc_elt, parent_elt, child_elt, repository_elt, + list_published_objects_elt, list_received_resources_elt, report_error_elt)) + +class left_right_sax_handler(rpki.left_right.sax_handler): + pdu = left_right_msg + +class left_right_cms_msg(rpki.left_right.cms_msg): + saxify = left_right_sax_handler.saxify + +# Publication protocol + +class publication_msg(cmd_msg_mixin, rpki.publication.msg): + + class config_elt(cmd_elt_mixin, rpki.publication.config_elt): + + def client_query_bpki_crl(self, arg): + """ + Special handler for --bpki_crl option. + """ + self.bpki_crl = rpki.x509.CRL(Auto_file = arg) + + class client_elt(cmd_elt_mixin, rpki.publication.client_elt): + pass + + class certificate_elt(cmd_elt_mixin, rpki.publication.certificate_elt): + pass + + class crl_elt(cmd_elt_mixin, rpki.publication.crl_elt): + pass + + class manifest_elt(cmd_elt_mixin, rpki.publication.manifest_elt): + pass + + class roa_elt(cmd_elt_mixin, rpki.publication.roa_elt): + pass + + class report_error_elt(reply_elt_mixin, rpki.publication.report_error_elt): + pass + + class ghostbuster_elt(cmd_elt_mixin, rpki.publication.ghostbuster_elt): + pass + + pdus = dict((x.element_name, x) + for x in (config_elt, client_elt, certificate_elt, crl_elt, + manifest_elt, roa_elt, report_error_elt, + ghostbuster_elt)) + +class publication_sax_handler(rpki.publication.sax_handler): + pdu = publication_msg + +class publication_cms_msg(rpki.publication.cms_msg): + saxify = publication_sax_handler.saxify + +# Usage + +top_opts = ["config=", "help", "pem_out=", "quiet", "verbose"] + +def usage(code = 1): + if __doc__ is not None: + print __doc__.strip() + print + print "Usage:" + print + print "# Top-level options:" + print usage_fill(*["--" + x for x in top_opts]) + print + print "# left-right protocol:" + left_right_msg.usage() + print + print "# publication protocol:" + publication_msg.usage() + sys.exit(code) + +# Main program + +rpki.log.init("irbe_cli") + +argv = sys.argv[1:] + +if not argv: + usage(0) + +cfg_file = None +verbose = True + +opts, argv = getopt.getopt(argv, "c:hpqv?", top_opts) +for o, a in opts: + if o in ("-?", "-h", "--help"): + usage(0) + elif o in ("-c", "--config"): + cfg_file = a + elif o in ("-p", "--pem_out"): + pem_out = a + elif o in ("-q", "--quiet"): + verbose = False + elif o in ("-v", "--verbose"): + verbose = True + +if not argv: + usage(1) + +cfg = rpki.config.parser(cfg_file, "irbe_cli") + +q_msg_left_right = [] +q_msg_publication = [] + +while argv: + if argv[0] in left_right_msg.pdus: + q_pdu = left_right_msg.pdus[argv[0]]() + q_msg = q_msg_left_right + elif argv[0] in publication_msg.pdus: + q_pdu = publication_msg.pdus[argv[0]]() + q_msg = q_msg_publication + else: + usage(1) + argv = q_pdu.client_getopt(argv[1:]) + q_msg.append(q_pdu) + +from django.conf import settings + +settings.configure( + DATABASES = { "default" : { + "ENGINE" : "django.db.backends.mysql", + "NAME" : cfg.get("sql-database", section = "irdbd"), + "USER" : cfg.get("sql-username", section = "irdbd"), + "PASSWORD" : cfg.get("sql-password", section = "irdbd"), + "HOST" : "", + "PORT" : "", + "OPTIONS" : { "init_command": "SET storage_engine=INNODB" }}}, + INSTALLED_APPS = ("rpki.irdb",), +) + +import rpki.irdb + +server_ca = rpki.irdb.ServerCA.objects.get() +irbe = server_ca.ee_certificates.get(purpose = "irbe") + +if q_msg_left_right: + + class left_right_proto(object): + cms_msg = left_right_cms_msg + msg = left_right_msg + + rpkid = server_ca.ee_certificates.get(purpose = "rpkid") + + rpkid_url = "http://%s:%s/left-right/" % ( + cfg.get("server-host", section = "rpkid"), + cfg.get("server-port", section = "rpkid")) + + call_rpkid = rpki.async.sync_wrapper(rpki.http.caller( + proto = left_right_proto, + client_key = irbe.private_key, + client_cert = irbe.certificate, + server_ta = server_ca.certificate, + server_cert = rpkid.certificate, + url = rpkid_url, + debug = verbose)) + + call_rpkid(*q_msg_left_right) + +if q_msg_publication: + + class publication_proto(object): + msg = publication_msg + cms_msg = publication_cms_msg + + pubd = server_ca.ee_certificates.get(purpose = "pubd") + + pubd_url = "http://%s:%s/control/" % ( + cfg.get("server-host", section = "pubd"), + cfg.get("server-port", section = "pubd")) + + call_pubd = rpki.async.sync_wrapper(rpki.http.caller( + proto = publication_proto, + client_key = irbe.private_key, + client_cert = irbe.certificate, + server_ta = server_ca.certificate, + server_cert = pubd.certificate, + url = pubd_url, + debug = verbose)) + + call_pubd(*q_msg_publication) diff --git a/ca/irdbd b/ca/irdbd new file mode 100755 index 00000000..493e3d72 --- /dev/null +++ b/ca/irdbd @@ -0,0 +1,21 @@ +#!/usr/bin/env python + +# $Id$ +# +# Copyright (C) 2010 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +if __name__ == "__main__": + import rpki.irdbd + rpki.irdbd.main() diff --git a/ca/pubd b/ca/pubd new file mode 100755 index 00000000..7d8ecbfa --- /dev/null +++ b/ca/pubd @@ -0,0 +1,21 @@ +#!/usr/bin/env python + +# $Id$ +# +# Copyright (C) 2010 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +if __name__ == "__main__": + import rpki.pubd + rpki.pubd.main() diff --git a/ca/rootd b/ca/rootd new file mode 100755 index 00000000..cb59f958 --- /dev/null +++ b/ca/rootd @@ -0,0 +1,21 @@ +#!/usr/bin/env python + +# $Id$ +# +# Copyright (C) 2010 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +if __name__ == "__main__": + import rpki.rootd + rpki.rootd.main() diff --git a/ca/rpki-confgen b/ca/rpki-confgen new file mode 100755 index 00000000..f531bee8 --- /dev/null +++ b/ca/rpki-confgen @@ -0,0 +1,291 @@ +#!/usr/bin/env python + +# $Id$ +# +# Copyright (C) 2014 Dragon Research Labs ("DRL") +# Portions copyright (C) 2013 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notices and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND DRL AND ISC DISCLAIM ALL +# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL OR +# ISC BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL +# DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA +# OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +# TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +import os +import sys +import argparse +import base64 +import textwrap + +from lxml.etree import Element, SubElement, ElementTree + +space4 = " " * 4 +space6 = " " * 6 +space8 = " " * 8 +star78 = "*" * 78 + +wiki_wrapper = textwrap.TextWrapper() +conf_wrapper = textwrap.TextWrapper(initial_indent = "# ", subsequent_indent = "# ") +xml6_wrapper = textwrap.TextWrapper(initial_indent = space6, subsequent_indent = space6) +xml8_wrapper = textwrap.TextWrapper(initial_indent = space8, subsequent_indent = space8) + +class Option(object): + + def __init__(self, name, value, doc): + self.name = name + self.value = value + self.doc = doc + + @property + def width(self): + return len(self.name) + + def to_xml(self): + x = Element("option", name = self.name) + if self.value is not None: + x.set("value", self.value) + for d in self.doc: + SubElement(x, "doc").text = "\n" + xml8_wrapper.fill(d) + "\n" + space6 + return x + + def to_wiki(self, f): + f.write("\n== %s == #%s\n" % (self.name, self.name)) + for d in self.doc: + f.write("\n%s\n" % wiki_wrapper.fill(d)) + if self.value is None: + f.write("\n%s\n" % wiki_wrapper.fill("No default value.")) + else: + f.write("\n{{{\n#!ini\n%s = %s\n}}}\n" % (self.name, self.value)) + + def to_conf(self, f, width): + for i, d in enumerate(self.doc): + f.write("%s\n%s\n" % ("" if i == 0 else "#", conf_wrapper.fill(d))) + if self.value is None: + f.write("\n#%-*s = ???\n" % (width - 1, self.name)) + else: + f.write("\n%-*s = %s\n" % (width, self.name, self.value)) + +class Section(object): + + def __init__(self, name): + self.name = name + self.doc = [] + self.options = [] + + @property + def width(self): + return max(o.width for o in self.options) + + @classmethod + def from_xml(cls, elt): + self = cls(name = elt.get("name")) + for x in elt.iterchildren("doc"): + self.doc.append(" ".join(x.text.split())) + for x in elt.iterchildren("option"): + self.options.append(Option(name = x.get("name"), value = x.get("value"), + doc = [" ".join(d.text.split()) + for d in x.iterchildren("doc")])) + return self + + def to_xml(self): + x = Element("section", name = self.name) + for d in self.doc: + SubElement(x, "doc").text = "\n" + xml6_wrapper.fill(d) + "\n" + space4 + x.extend(o.to_xml() for o in self.options) + return x + + def to_wiki(self, f): + f.write("\n= [%s] section = #%s\n" % (self.name, self.name)) + for d in self.doc: + f.write("\n%s\n" % wiki_wrapper.fill(d)) + for o in self.options: + o.to_wiki(f) + + def to_conf(self, f, width): + f.write("\n" + "#" * 78 + "\n\n[" + self.name + "]\n") + if self.doc: + f.write("\n##") + for i, d in enumerate(self.doc): + f.write("%s\n%s\n" % ("" if i == 0 else "#", conf_wrapper.fill(d))) + f.write("##\n") + for o in self.options: + o.to_conf(f, width) + +def wiki_header(f, ident, toc): + f.write("\n".join(( + "{{{", + "#!comment", + "", + star78, + "THIS PAGE WAS GENERATED AUTOMATICALLY, DO NOT EDIT.", + "", + "Generated from " + ident, + " by $Id$", + star78, + "", + "}}}", + ""))) + if toc is not None: + f.write("[[TracNav(%s)]]\n" % toc) + f.write("[[PageOutline]]\n") + +def conf_header(f, ident): + f.write("\n".join(( + "# Automatically generated. Edit as needed, but be careful of overwriting.", + "#", + "# Generated from " + ident, + "# by $Id$", + ""))) + + +# http://stackoverflow.com/questions/9027028/argparse-argument-order + +class CustomAction(argparse.Action): + def __call__(self, parser, namespace, values, option_string = None): + if not "ordered_args" in namespace: + namespace.ordered_args = [] + namespace.ordered_args.append((self.dest, values)) + +class CustomFlagAction(argparse.Action): + def __init__(self, option_strings, dest, default = None, + required = False, help = None): + super(CustomFlagAction, self).__init__( + option_strings = option_strings, dest = dest, nargs = 0, + const = None, default = default, required = required, help = help) + def __call__(self, parser, namespace, values, option_string = None): + if not "ordered_args" in namespace: + namespace.ordered_args = [] + namespace.ordered_args.append((self.dest, None)) + + +class main(object): + + def __init__(self): + self.sections = [] + self.section_map = None + self.option_map = None + self.ident = None + self.toc = None + + parser = argparse.ArgumentParser(description = __doc__) + parser.add_argument("--read-xml", metavar = "FILE", action = CustomAction, + required = True, type = argparse.FileType("r"), + help = "XML input file defining sections and options") + parser.add_argument("--write-xml", metavar = "FILE", action = CustomAction, + help = "XML file to write") + parser.add_argument("--write-wiki", metavar = "FILE", action = CustomAction, + help = "TracWiki file to write") + parser.add_argument("--write-conf", metavar = "FILE", action = CustomAction, + help = "rpki.conf configuration file to write") + parser.add_argument("--set", metavar = "VARVAL", action = CustomAction, + help = "variable setting in form \"VAR=VAL\"") + parser.add_argument("--pwgen", metavar = "VAR", action = CustomAction, + help = "set variable to generated password") + parser.add_argument("--toc", metavar = "TRACNAV", action = CustomAction, + help = "set TOC value to use with TracNav plugin") + parser.add_argument("--autoconf", action = CustomFlagAction, + help = "configure [autoconf] section") + args = parser.parse_args() + + for cmd, arg in args.ordered_args: + getattr(self, "do_" + cmd)(arg) + + + def do_read_xml(self, arg): + self.option_map = None + root = ElementTree(file = arg).getroot() + self.ident = root.get("ident") + self.sections.extend(Section.from_xml(x) for x in root) + self.option_map = {} + self.section_map = {} + for section in self.sections: + if section.name in self.section_map: + sys.exit("Duplicate section %s" % section.name) + self.section_map[section.name] = section + for option in section.options: + name = (section.name, option.name) + if name in self.option_map: + sys.exit("Duplicate option %s::%s" % name) + self.option_map[name] = option + + + def do_set(self, arg): + try: + name, value = arg.split("=", 1) + section, option = name.split("::") + except ValueError: + sys.exit("Couldn't parse --set specification \"%s\"" % a) + name = (section, option) + if name not in self.option_map: + sys.exit("Couldn't find option %s::%s" % name) + self.option_map[name].value = value + + + def do_pwgen(self, arg): + try: + section, option = arg.split("::") + except ValueError: + sys.exit("Couldn't parse --pwgen specification \"%s\"" % a) + name = (section, option) + if name not in self.option_map: + sys.exit("Couldn't find option %s::%s" % name) + self.option_map[name].value = base64.urlsafe_b64encode(os.urandom(66)) + + + def do_autoconf(self, ignored): + try: + import rpki.autoconf + for option in self.section_map["autoconf"].options: + try: + option.value = getattr(rpki.autoconf, option.name) + except AttributeError: + pass + except ImportError: + sys.exit("rpki.autoconf module is not available") + except KeyError: + sys.exit("Couldn't find autoconf section") + + + def do_write_xml(self, arg): + x = Element("configuration", ident = self.ident) + x.extend(s.to_xml() for s in self.sections) + ElementTree(x).write(arg, pretty_print = True, encoding = "us-ascii") + + + def do_write_wiki(self, arg): + if "%" in arg: + for section in self.sections: + with open(arg % section.name, "w") as f: + wiki_header(f, self.ident, self.toc) + section.to_wiki(f) + else: + with open(arg, "w") as f: + for i, section in enumerate(self.sections): + if i == 0: + wiki_header(f, self.ident, self.toc) + else: + f.write("\f\n") + section.to_wiki(f) + + + def do_write_conf(self, arg): + with open(arg, "w") as f: + conf_header(f, self.ident) + width = max(s.width for s in self.sections) + for section in self.sections: + section.to_conf(f, width) + + + def do_toc(self, arg): + self.toc = arg + + +if __name__ == "__main__": + main() diff --git a/ca/rpki-confgen.xml b/ca/rpki-confgen.xml new file mode 100644 index 00000000..e0ed273a --- /dev/null +++ b/ca/rpki-confgen.xml @@ -0,0 +1,900 @@ + + + + +
+ + + The "`[myrpki]`" section contains all the parameters that you + really need to configure. The name "`myrpki`" is historical and + may change in the future. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + rpkid's default config file is the system `rpki.conf` file. + Start rpkid with "`-c filename`" to choose a different config + file. All options are in the "`[rpkid]`" section. BPKI + Certificates and keys may be in either DER or PEM format. + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + irdbd's default configuration file is the system `rpki.conf` + file. Start irdbd with "`-c filename`" to choose a different + configuration file. All options are in the "`[irdbd]`" section. + + + + Since irdbd is part of the back-end system, it has direct access to + the back-end's SQL database, and thus is able to pull its own BPKI + configuration directly from the database, and thus needs a bit less + configuration than the other daemons. + + + + + + + + + + + + + + +
+ +
+ + + pubd's default configuration file is the system `rpki.conf` + file. Start pubd with "`-c filename`" to choose a different + configuration file. All options are in the "`[pubd]`" section. + BPKI certificates and keys may be either DER or PEM format. + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + You don't need to run rootd unless you're IANA, are certifying + private address space, or are an RIR which refuses to accept IANA as + the root of the public address hierarchy. + + + + Ok, if that wasn't enough to scare you off: rootd is a mess, and + needs to be rewritten, or, better, merged into rpkid. It + doesn't use the publication protocol, and it requires far too + many configuration parameters. + + + + rootd was originally intended to be a very simple program which + simplified rpkid enormously by moving one specific task (acting + as the root CA of an RPKI certificate hierarchy) out of rpkid. + As the specifications and code (mostly the latter) have evolved, + however, this task has become more complicated, and rootd would + have to become much more complicated to keep up. + + + + Don't run rootd unless you're sure that you need to do so. + + + + Still think you need to run rootd? OK, but remember, you have + been warned.... + + + + rootd's default configuration file is the system `rpki.conf` + file. Start rootd with "`-c filename`" to choose a different + configuration file. All options are in the "`[rootd]`" section. + Certificates and keys may be in either DER or PEM format. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + Glue to allow the Django application to pull user configuration + from this file rather than directly editing settings.py. + + + + + + + + + + + + +
+ +
+ + + rpki-confgen --autoconf records the current autoconf settings + here, so that other options can refer to them. The section name + "autoconf" is magic, don't change it. + + + + + + + + + + +
+ +
diff --git a/ca/rpki-manage b/ca/rpki-manage new file mode 100755 index 00000000..0d581ce9 --- /dev/null +++ b/ca/rpki-manage @@ -0,0 +1,13 @@ +#!/usr/bin/env python + +import os +from django.core.management import execute_from_command_line + +# django-admin seems to have problems creating the superuser account when +# $LANG is unset or is set to something totally incompatible with UTF-8. +if os.environ.get('LANG') in (None, "", "C"): + os.environ['LANG'] = 'en_US.UTF-8' + +os.environ['DJANGO_SETTINGS_MODULE'] = 'rpki.gui.default_settings' + +execute_from_command_line() diff --git a/ca/rpki-sql-backup b/ca/rpki-sql-backup new file mode 100755 index 00000000..0b2d079d --- /dev/null +++ b/ca/rpki-sql-backup @@ -0,0 +1,53 @@ +#!/usr/bin/env python + +# $Id$ +# +# Copyright (C) 2014 Dragon Research Labs ("DRL") +# Portions copyright (C) 2010-2013 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notices and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND DRL AND ISC DISCLAIM ALL +# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL OR +# ISC BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL +# DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA +# OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +# TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +Back up data from SQL databases, looking at config file to figure out +which databases and what credentials to use with them. +""" + +import subprocess +import os +import argparse +import sys +import time +import rpki.config + +os.environ["TZ"] = "UTC" +time.tzset() + +parser = argparse.ArgumentParser(description = __doc__) +parser.add_argument("-c", "--config", + help = "override default location of configuration file") +parser.add_argument("-o", "--output", + type = argparse.FileType("wb"), default = sys.stdout, + help = "destination for SQL dump (default: stdout)") +args = parser.parse_args() + +cfg = rpki.config.parser(args.config, "myrpki") + +for name in ("rpkid", "irdbd", "pubd"): + if cfg.getboolean("start_" + name, False): + subprocess.check_call( + ("mysqldump", "--add-drop-database", + "-u", cfg.get("sql-username", section = name), + "-p" + cfg.get("sql-password", section = name), + "-B", cfg.get("sql-database", section = name)), + stdout = args.output) diff --git a/ca/rpki-sql-setup b/ca/rpki-sql-setup new file mode 100755 index 00000000..40a78532 --- /dev/null +++ b/ca/rpki-sql-setup @@ -0,0 +1,311 @@ +#!/usr/bin/env python + +# $Id$ +# +# Copyright (C) 2014 Dragon Research Labs ("DRL") +# Portions copyright (C) 2009-2013 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notices and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND DRL AND ISC DISCLAIM ALL +# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL OR +# ISC BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL +# DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA +# OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +# TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +import os +import sys +import glob +import getpass +import argparse +import datetime +import rpki.config +import rpki.version +import rpki.autoconf +import rpki.sql_schemas + +from rpki.mysql_import import MySQLdb, _mysql_exceptions + +ER_NO_SUCH_TABLE = 1146 # See mysqld_ername.h + + +class RootDB(object): + """ + Class to wrap MySQL actions that require root-equivalent access so + we can defer such actions until we're sure they're really needed. + Overall goal here is to prompt the user for the root password once + at most, and not at all when not necessary. + """ + + def __init__(self, mysql_defaults = None): + self.initialized = False + self.mysql_defaults = mysql_defaults + + def __getattr__(self, name): + if self.initialized: + raise AttributeError + if self.mysql_defaults is None: + self.db = MySQLdb.connect(db = "mysql", + user = "root", + passwd = getpass.getpass("Please enter your MySQL root password: ")) + else: + mysql_cfg = rpki.config.parser(self.mysql_defaults, "client") + self.db = MySQLdb.connect(db = "mysql", + user = mysql_cfg.get("user"), + passwd = mysql_cfg.get("password")) + self.cur = self.db.cursor() + self.cur.execute("SHOW DATABASES") + self.databases = set(d[0] for d in self.cur.fetchall()) + self.initialized = True + return getattr(self, name) + + def close(self): + if self.initialized: + self.db.close() + + +class UserDB(object): + """ + Class to wrap MySQL access parameters for a particular database. + + NB: The SQL definitions for the upgrade_version table is embedded in + this class rather than being declared in any of the .sql files. + This is deliberate: nothing but the upgrade system should ever touch + this table, and it's simpler to keep everything in one place. + + We have to be careful about SQL commits here, because CREATE TABLE + implies an automatic commit. So presence of the magic table per se + isn't significant, only its content (or lack thereof). + """ + + upgrade_version_table_schema = """ + CREATE TABLE upgrade_version ( + version TEXT NOT NULL, + updated DATETIME NOT NULL + ) ENGINE=InnoDB + """ + + def __init__(self, name): + self.name = name + self.database = cfg.get("sql-database", section = name) + self.username = cfg.get("sql-username", section = name) + self.password = cfg.get("sql-password", section = name) + self.db = None + self.cur = None + + def open(self): + self.db = MySQLdb.connect(db = self.database, user = self.username, passwd = self.password) + self.db.autocommit(False) + self.cur = self.db.cursor() + + def close(self): + if self.cur is not None: + self.cur.close() + self.cur = None + if self.db is not None: + self.db.commit() + self.db.close() + self.db = None + + @property + def exists_and_accessible(self): + try: + MySQLdb.connect(db = self.database, user = self.username, passwd = self.password).close() + except: + return False + else: + return True + + @property + def version(self): + try: + self.cur.execute("SELECT version FROM upgrade_version") + v = self.cur.fetchone() + return Version(None if v is None else v[0]) + except _mysql_exceptions.ProgrammingError, e: + if e.args[0] != ER_NO_SUCH_TABLE: + raise + log("Creating upgrade_version table in %s" % self.name) + self.cur.execute(self.upgrade_version_table_schema) + return Version(None) + + @version.setter + def version(self, v): + if v > self.version: + self.cur.execute("DELETE FROM upgrade_version") + self.cur.execute("INSERT upgrade_version (version, updated) VALUES (%s, %s)", (v, datetime.datetime.now())) + self.db.commit() + log("Updated %s to %s" % (self.name, v)) + + @property + def schema(self): + lines = [] + for line in getattr(rpki.sql_schemas, self.name, "").splitlines(): + line = " ".join(line.split()) + if line and not line.startswith("--"): + lines.append(line) + return [statement.strip() for statement in " ".join(lines).rstrip(";").split(";") if statement.strip()] + + +class Version(object): + """ + A version number. This is a class in its own right to force the + comparision and string I/O behavior we want. + """ + + def __init__(self, v): + if v is None: + v = "0.0" + self.v = tuple(v.lower().split(".")) + + def __str__(self): + return ".".join(self.v) + + def __cmp__(self, other): + return cmp(self.v, other.v) + + +class Upgrade(object): + """ + One upgrade script. Really, just its filename and the Version + object we parse from its filename, we don't need to read the script + itself except when applying it, but we do need to sort all the + available upgrade scripts into version order. + """ + + @classmethod + def load_all(cls, name, dir): + g = os.path.join(dir, "upgrade-%s-to-*.py" % name) + for fn in glob.iglob(g): + yield cls(g, fn) + + def __init__(self, g, fn): + head, sep, tail = g.partition("*") + self.fn = fn + self.version = Version(fn[len(head):-len(tail)]) + + def __cmp__(self, other): + return cmp(self.version, other.version) + + def apply(self, db): + # db is an argument here primarily so the script we exec can get at it + log("Applying %s to %s" % (self.fn, db.name)) + with open(self.fn, "r") as f: + exec f + + +def do_drop(name): + db = UserDB(name) + if db.database in root.databases: + log("DROP DATABASE %s" % db.database) + root.cur.execute("DROP DATABASE %s" % db.database) + root.db.commit() + +def do_create(name): + db = UserDB(name) + log("CREATE DATABASE %s" % db.database) + root.cur.execute("CREATE DATABASE %s" % db.database) + log("GRANT ALL ON %s.* TO %s@localhost IDENTIFIED BY ###" % (db.database, db.username)) + root.cur.execute("GRANT ALL ON %s.* TO %s@localhost IDENTIFIED BY %%s" % (db.database, db.username), + (db.password,)) + root.db.commit() + db.open() + for statement in db.schema: + if not statement.upper().startswith("DROP TABLE"): + log(statement) + db.cur.execute(statement) + db.version = current_version + db.close() + +def do_script_drop(name): + db = UserDB(name) + print "DROP DATABASE IF EXISTS %s;" % db.database + +def do_drop_and_create(name): + do_drop(name) + do_create(name) + +def do_fix_grants(name): + db = UserDB(name) + if not db.exists_and_accessible: + log("GRANT ALL ON %s.* TO %s@localhost IDENTIFIED BY ###" % (db.database, db.username)) + root.cur.execute("GRANT ALL ON %s.* TO %s@localhost IDENTIFIED BY %%s" % (db.database, db.username), + (db.password,)) + root.db.commit() + +def do_create_if_missing(name): + db = UserDB(name) + if not db.exists_and_accessible: + do_create(name) + +def do_apply_upgrades(name): + upgrades = sorted(Upgrade.load_all(name, args.upgrade_scripts)) + if upgrades: + db = UserDB(name) + db.open() + log("Current version of %s is %s" % (db.name, db.version)) + for upgrade in upgrades: + if upgrade.version > db.version: + upgrade.apply(db) + db.version = upgrade.version + db.version = current_version + db.close() + +def log(text): + if args.verbose: + print "#", text + +parser = argparse.ArgumentParser(description = """\ +Automated setup of all SQL stuff used by the RPKI CA tools. Pulls +configuration from rpki.conf, prompts for MySQL password when needed. +""") +group = parser.add_mutually_exclusive_group() +parser.add_argument("-c", "--config", + help = "specify alternate location for rpki.conf") +parser.add_argument("-v", "--verbose", action = "store_true", + help = "whistle while you work") +parser.add_argument("--mysql-defaults", + help = "specify MySQL root access credentials via a configuration file") +parser.add_argument("--upgrade-scripts", + default = os.path.join(rpki.autoconf.datarootdir, "rpki", "upgrade-scripts"), + help = "override default location of upgrade scripts") +group.add_argument("--create", + action = "store_const", dest = "dispatch", const = do_create, + help = "create databases and load schemas") +group.add_argument("--drop", + action = "store_const", dest = "dispatch", const = do_drop, + help = "drop databases") +group.add_argument("--script-drop", + action = "store_const", dest = "dispatch", const = do_script_drop, + help = "send SQL commands to drop databases to standard output") +group.add_argument("--drop-and-create", + action = "store_const", dest = "dispatch", const = do_drop_and_create, + help = "drop databases then recreate them and load schemas") +group.add_argument("--fix-grants", + action = "store_const", dest = "dispatch", const = do_fix_grants, + help = "whack database access to match current configuration file") +group.add_argument("--create-if-missing", + action = "store_const", dest = "dispatch", const = do_create_if_missing, + help = "create databases and load schemas if they don't exist already") +group.add_argument("--apply-upgrades", + action = "store_const", dest = "dispatch", const = do_apply_upgrades, + help = "apply upgrade scripts to existing databases") +parser.set_defaults(dispatch = do_create_if_missing) +args = parser.parse_args() + +try: + cfg = rpki.config.parser(args.config, "myrpki") + root = RootDB(args.mysql_defaults) + current_version = Version(rpki.version.VERSION) + for name in ("irdbd", "rpkid", "pubd"): + if cfg.getboolean("start_" + name, False): + args.dispatch(name) + root.close() +except Exception, e: + #sys.exit(str(e)) + raise diff --git a/ca/rpki-start-servers b/ca/rpki-start-servers new file mode 100755 index 00000000..edaffb2e --- /dev/null +++ b/ca/rpki-start-servers @@ -0,0 +1,76 @@ +#!/usr/bin/env python + +# $Id$ +# +# Copyright (C) 2014 Dragon Research Labs ("DRL") +# Portions copyright (C) 2009--2013 Internet Systems Consortium ("ISC") +# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notices and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND ARIN DISCLAIM ALL +# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL, +# ISC, OR ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, +# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION +# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +""" +Start servers, using config file to figure out which servers the user +wants started. +""" + +import subprocess +import os +import argparse +import sys +import time +import rpki.config +import rpki.autoconf + +os.environ["TZ"] = "UTC" +time.tzset() + +parser = argparse.ArgumentParser(description = __doc__) +parser.add_argument("-c", "--config", + help = "override default location of configuration file") +parser.add_argument("-d", "--debug", action = "store_true", + help = "enable debugging") +parser.add_argument("--logdir", default = ".", + help = "where to write write log files when debugging") +args = parser.parse_args() + +cfg = rpki.config.parser(args.config, "myrpki") + +def run(name): + # pylint: disable=E1103 + cmd = (os.path.join(rpki.autoconf.libexecdir, name), "-c", cfg.filename) + if args.debug: + proc = subprocess.Popen(cmd + ("-d",), + stdout = open(os.path.join(args.logdir, name + ".log"), "a"), + stderr = subprocess.STDOUT) + else: + proc = subprocess.Popen(cmd) + if args.debug and proc.poll() is None: + print "Started %s, pid %s" % (name, proc.pid) + elif not args.debug and proc.wait() == 0: + print "Started %s" % name + else: + print "Problem starting %s, pid %s" % (name, proc.pid) + + +if cfg.getboolean("start_irdbd", cfg.getboolean("run_rpkid", False)): + run("irdbd") + +if cfg.getboolean("start_rpkid", cfg.getboolean("run_rpkid", False)): + run("rpkid") + +if cfg.getboolean("start_pubd", cfg.getboolean("run_pubd", False)): + run("pubd") + +if cfg.getboolean("start_rootd", cfg.getboolean("run_rootd", False)): + run("rootd") diff --git a/ca/rpki.wsgi b/ca/rpki.wsgi new file mode 100644 index 00000000..72ba75ac --- /dev/null +++ b/ca/rpki.wsgi @@ -0,0 +1,45 @@ +# Copyright (C) 2010, 2011 SPARTA, Inc. dba Cobham Analytic Solutions +# Copyright (C) 2012, 2013 SPARTA, Inc. a Parsons Company +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +# This is an example wsgi application for use with mod_wsgi and apache. + +__version__ = '$Id$' + +import sys +import os +import rpki.autoconf + +os.environ['DJANGO_SETTINGS_MODULE'] = 'rpki.gui.default_settings' + +# Needed for local_settings.py +sys.path.insert(1, rpki.autoconf.sysconfdir + '/rpki') + +# Kludge to disable use of setproctitle in rpki.log. For reasons +# unknown, at least on Ubuntu 12.04 LTS, we dump core with a segment +# violation if we try to load that module in this process, even though +# it works fine in other processes on the same system. Not yet sure +# what this is about, just disable setproctitle in WSGI case for now. +os.environ['DISABLE_SETPROCTITLE'] = 'yes' + +# Kludge to set PYTHON_EGG_CACHE, mostly for FreeBSD where the ports +# system installs Python eggs in their zipped format and expects each +# user application to unpack them into its own egg cache. +if not os.environ.get('PYTHON_EGG_CACHE') and rpki.autoconf.WSGI_PYTHON_EGG_CACHE_DIR: + os.environ['PYTHON_EGG_CACHE'] = rpki.autoconf.WSGI_PYTHON_EGG_CACHE_DIR + +import django.core.handlers.wsgi +application = django.core.handlers.wsgi.WSGIHandler() + +# vim:ft=python diff --git a/ca/rpkic b/ca/rpkic new file mode 100755 index 00000000..333a5eb7 --- /dev/null +++ b/ca/rpkic @@ -0,0 +1,21 @@ +#!/usr/bin/env python + +# $Id$ +# +# Copyright (C) 2010-2011 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +if __name__ == "__main__": + import rpki.rpkic + rpki.rpkic.main() diff --git a/ca/rpkid b/ca/rpkid new file mode 100755 index 00000000..a4cc6cd3 --- /dev/null +++ b/ca/rpkid @@ -0,0 +1,21 @@ +#!/usr/bin/env python + +# $Id$ +# +# Copyright (C) 2010 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +if __name__ == "__main__": + import rpki.rpkid + rpki.rpkid.main() diff --git a/ca/rpkigui-apache-conf-gen b/ca/rpkigui-apache-conf-gen new file mode 100755 index 00000000..6201c364 --- /dev/null +++ b/ca/rpkigui-apache-conf-gen @@ -0,0 +1,483 @@ +#!/usr/bin/env python + +# $Id$ +# +# Copyright (C) 2013 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +import os +import re +import sys +import socket +import urllib2 +import argparse +import platform +import textwrap +import subprocess +import rpki.autoconf + +fqdn = socket.getfqdn() + +vhost_template = """\ +# +# By default, this configuration assumes that you use name-based +# virtual hosting. If that's not what you want, you may need +# to change this. +# + + + # + # By default, we enable an HTTPS virtual host on this machine's + # fully qualified domain name. This works for simple + # configurations, but if you're running a more complex Apache + # configuration or want to run the GUI on a different hostname, + # you may need to change this. + # + ServerName %(fqdn)s + + # + # Configure the WSGI application to run as a separate process from + # the Apache daemon itself. + # + %(WSGI_DAEMON_PROCESS)s + %(WSGI_PROCESS_GROUP)s + + # + # Allow access to our WSGI directory. + # + +%(allow)s + + + # + # Define the URL to the RPKI GUI + # + WSGIScriptAlias / %(datarootdir)s/rpki/wsgi/rpki.wsgi + + # + # Allow access to static content (icons, etc). + # + +%(allow)s + + + # + # Add the aliases Django expects for static content. + # + Alias /media/ %(datarootdir)s/rpki/media/ + Alias /site_media/ %(datarootdir)s/rpki/media/ + + # + # Allow access to the directory where rcynic-html writes + # its output files. + # + +%(allow)s + + + # + # Add alias pointing to rcynic-html's output files. + # + # If for some reason you need to change this, be careful to leave + # the trailing slash off the URL, otherwise /rcynic will be + # swallowed by the WSGIScriptAlias + # + Alias /rcynic %(RCYNIC_HTML_DIR)s/ + + # + # Redirect to the GUI dashboard when someone hits the bare vhost. + # + RedirectMatch ^/$ /rpki/ + + # + # Enable HTTPS + # + SSLEngine on + + # + # Specify HTTPS server certificate and key files for this virtual host. + # This should suffice for simple configurations, but if you're running + # a more complex Apache configuration you may need to change or remove + # these lines. + # + SSLCertificateFile %(sysconfdir)s/rpki/apache.cer + SSLCertificateKeyFile %(sysconfdir)s/rpki/apache.key + + # + # Take pity on users running Internet Exploder + # + BrowserMatch "MSIE [2-6]" ssl-unclean-shutdown nokeepalive downgrade-1.0 force-response-1.0 + BrowserMatch "MSIE [17-9]" ssl-unclean-shutdown + + +""" + +allow_22_template = ''' + Order deny,allow + Allow from all\ +''' + +allow_24_template = ''' + Require all granted\ +''' + +name_virtual_host_template = '''\ +# +# In most cases we want to use name-based virtual hosting. If this causes +# problems with your existing Apache configuration, try commenting out this line. +# +NameVirtualHost *:443 + +''' + +def Guess(args): + """ + Guess what platform this is and dispatch to platform constructor. + """ + + system = platform.system() + if system == "FreeBSD": + return FreeBSD(args) + if system == "Darwin": + return Darwin(args) + if system == "Linux": + distro = platform.linux_distribution()[0].lower() + if distro in ("debian", "ubuntu"): + return Debian(args) + if distro in ("fedora", "centos"): + return Redhat(args) + raise NotImplementedError("Can't guess what platform this is, sorry") + +class Platform(object): + """ + Abstract base class representing an operating system platform. + """ + + apache_cer = os.path.join(rpki.autoconf.sysconfdir, "rpki", "apache.cer") + apache_key = os.path.join(rpki.autoconf.sysconfdir, "rpki", "apache.key") + + apache_conf = os.path.join(rpki.autoconf.sysconfdir, "rpki", "apache.conf") + apache_conf_sample = apache_conf + ".sample" + + apache_conf_preface = "" + + def __init__(self, args): + self.args = args + self.log("RPKI Apache configuration: platform \"%s\", action \"%s\"" % ( + self.__class__.__name__, args.action)) + getattr(self, args.action)() + + def log(self, msg): + if self.args.verbose: + print msg + + def run(self, *cmd, **kwargs): + self.log("Running %s" % " ".join(cmd)) + subprocess.check_call(cmd, **kwargs) + + req_cmd = ("openssl", "req", "-new", + "-config", "/dev/stdin", + "-out", "/dev/stdout", + "-keyout", apache_key, + "-newkey", "rsa:2048") + + x509_cmd = ("openssl", "x509", "-req", "-sha256", + "-signkey", apache_key, + "-in", "/dev/stdin", + "-out", apache_cer, + "-days", "3650") + + req_conf = '''\ + [req] + default_bits = 2048 + default_md = sha256 + distinguished_name = req_dn + prompt = no + encrypt_key = no + [req_dn] + CN = %s + ''' % fqdn + + def unlink(self, fn, silent = False): + if os.path.lexists(fn): + if not silent: + self.log("Removing %s" % fn) + os.unlink(fn) + elif not silent: + self.log("Would have removed %s if it existed" % fn) + + def del_certs(self, silent = False): + self.unlink(self.apache_cer, silent) + self.unlink(self.apache_key, silent) + + def add_certs(self): + if os.path.exists(self.apache_cer) and os.path.exists(self.apache_key): + return + self.del_certs() + req = subprocess.Popen(self.req_cmd, + stdin = subprocess.PIPE, + stdout = subprocess.PIPE, + stderr = open("/dev/null", "w")) + x509 = subprocess.Popen(self.x509_cmd, + stdin = req.stdout, + stderr = open("/dev/null", "w")) + req.stdin.write(self.req_conf) + req.stdin.close() + if req.wait(): + raise subprocess.CalledProcessError(req.returncode, self.req_cmd) + if x509.wait(): + raise subprocess.CalledProcessError(x509.returncode, self.x509_cmd) + self.log("Created %s and %s, chmoding %s" % ( + self.apache_cer, self.apache_key, self.apache_key)) + os.chmod(self.apache_key, 0600) + + _vhost = None + + @property + def vhost(self): + if self._vhost is None: + allow = allow_22_template if self.args.apache_version <= 22 else allow_24_template + self._vhost = vhost_template % dict(rpki.autoconf.__dict__, fqdn = fqdn, allow = allow) + return self._vhost + + @property + def name_virtual_host(self): + return name_virtual_host_template if self.args.apache_version <= 22 else "" + + @property + def too_complex(self): + return textwrap.dedent('''\ + # It looks like you already have HTTPS enabled in your + # Apache configuration, which makes your configuration too + # complex for us to enable support for the RPKI GUI automatically. + # + # To enable support, take a look at %s + # and copy what you need from that file into %s, + # paying attention to the comments which mark the bits that + # you might (or might not) need to change or omit, depending + # on the details of your particular Apache configuration. + ''' % (self.apache_conf_sample, self.apache_conf)) + + def install(self): + with open(self.apache_conf_sample, "w") as f: + self.log("Writing %s" % f.name) + f.write(self.apache_conf_preface) + f.write(self.name_virtual_host) + f.write(self.vhost) + if not os.path.exists(self.apache_conf): + self.unlink(self.apache_conf) + with open(self.apache_conf, "w") as f: + self.log("Writing %s" % f.name) + if self.test_url("https://%s/" % fqdn): + f.write(self.too_complex) + sys.stdout.write(self.too_complex) + else: + if not self.test_tcp("localhost", 443): + f.write(self.apache_conf_preface) + f.write(self.name_virtual_host) + f.write(self.vhost) + if not os.path.exists(self.apache_conf_target): + self.unlink(self.apache_conf_target) + self.log("Symlinking %s to %s" % ( + self.apache_conf_target, self.apache_conf)) + os.symlink(self.apache_conf, self.apache_conf_target) + self.add_certs() + self.enable() + self.restart() + + def enable(self): + pass + + def disable(self): + pass + + def remove(self): + try: + same = open(self.apache_conf, "r").read() == open(self.apache_conf_sample, "r").read() + except: + same = False + self.unlink(self.apache_conf_sample) + if same: + self.unlink(self.apache_conf) + self.unlink(self.apache_conf_target) + self.disable() + self.restart() + + def purge(self): + self.remove() + self.unlink(self.apache_conf) + self.del_certs() + + @staticmethod + def test_url(url = "https://localhost/"): + try: + urllib2.urlopen(url).close() + except IOError: + return False + else: + return True + + @staticmethod + def test_tcp(host = "localhost", port = 443, family = socket.AF_UNSPEC, proto = socket.SOCK_STREAM): + try: + addrinfo = socket.getaddrinfo(host, port, family, proto) + except socket.error: + return False + for af, socktype, proto, canon, sa in addrinfo: + try: + s = socket.socket(af, socktype, proto) + s.connect(sa) + s.close() + except socket.error: + continue + else: + return True + return False + +class FreeBSD(Platform): + """ + FreeBSD. + """ + + # On FreeBSD we have to ask httpd what version it is before we know + # where to put files or what to call the service. In FreeBSD's makefiles, + # this value is called APACHE_VERSION, and is calculated thusly: + # + # httpd -V | sed -ne 's/^Server version: Apache\/\([0-9]\)\.\([0-9]*\).*/\1\2/p' + + _apache_name = None + + @property + def apache_name(self): + if self._apache_name is None: + self._apache_name = "apache%s" % self.args.apache_version + return self._apache_name + + @property + def apache_conf_target(self): + return "/usr/local/etc/%s/Includes/rpki.conf" % self.apache_name + + apache_conf_preface = textwrap.dedent('''\ + # These directives tell Apache to listen on the HTTPS port + # and to enable name-based virtual hosting. If you already + # have HTTPS enabled elsewhere in your configuration, you may + # need to remove these. + + Listen [::]:443 + Listen 0.0.0.0:443 + ''') + + def restart(self): + self.run("service", self.apache_name, "restart") + +class Debian(Platform): + """ + Debian and related platforms like Ubuntu. + """ + + apache_conf_target = "/etc/apache2/sites-available/rpki" + + snake_oil_cer = "/etc/ssl/certs/ssl-cert-snakeoil.pem" + snake_oil_key = "/etc/ssl/private/ssl-cert-snakeoil.key" + + def add_certs(self): + if not os.path.exists(self.snake_oil_cer) or not os.path.exists(self.snake_oil_key): + return Platform.add_certs(self) + if not os.path.exists(self.apache_cer): + self.unlink(self.apache_cer) + os.symlink(self.snake_oil_cer, self.apache_cer) + if not os.path.exists(self.apache_key): + self.unlink(self.apache_key) + os.symlink(self.snake_oil_key, self.apache_key) + + def enable(self): + self.run("a2enmod", "ssl") + self.run("a2ensite", "rpki") + # + # In light of BREACH and CRIME attacks, mod_deflate is looking + # like a bad idea, so make sure it's off. + self.run("a2dismod", "deflate") + + def disable(self): + self.run("a2dissite", "rpki") + + def restart(self): + self.run("service", "apache2", "restart") + +class NIY(Platform): + def __init__(self, args): + raise NotImplementedError("Platform %s not implemented yet, sorry" % self.__class__.__name__) + +class Redhat(NIY): + """ + Redhat family of Linux distributions (Fedora, CentOS). + """ + +class Darwin(NIY): + """ + Mac OS X (aka Darwin). + """ + +def main(): + """ + Generate and (de)install configuration suitable for using Apache httpd + to drive the RPKI web interface under WSGI. + """ + + parser = argparse.ArgumentParser(description = __doc__) + group1 = parser.add_mutually_exclusive_group() + group2 = parser.add_mutually_exclusive_group() + + parser.add_argument("-v", "--verbose", + help = "whistle while you work", action = "store_true") + parser.add_argument("--apache-version", + help = "Apache version (default " + rpki.autoconf.APACHE_VERSION + ")", + type = int, default = rpki.autoconf.APACHE_VERSION) + + group1.add_argument("--freebsd", + help = "configure for FreeBSD", + action = "store_const", dest = "platform", const = FreeBSD) + group1.add_argument("--debian", "--ubuntu", + help = "configure for Debian/Ubuntu", + action = "store_const", dest = "platform", const = Debian) + group1.add_argument("--redhat", "--fedora", "--centos", + help = "configure for Redhat/Fedora/CentOS", + action = "store_const", dest = "platform", const = Redhat) + group1.add_argument("--macosx", "--darwin", + help = "configure for Mac OS X (Darwin)", + action = "store_const", dest = "platform", const = Darwin) + group1.add_argument("--guess", + help = "guess which platform configuration to use", + action = "store_const", dest = "platform", const = Guess) + + group2.add_argument("-i", "--install", + help = "install configuration", + action = "store_const", dest = "action", const = "install") + group2.add_argument("-r", "--remove", "--deinstall", "--uninstall", + help = "remove configuration", + action = "store_const", dest = "action", const = "remove") + group2.add_argument("-P", "--purge", + help = "remove configuration with extreme prejudice", + action = "store_const", dest = "action", const = "purge") + + parser.set_defaults(platform = Guess, action = "install") + args = parser.parse_args() + + try: + args.platform(args) + except Exception, e: + sys.exit(str(e)) + +if __name__ == "__main__": + main() diff --git a/ca/rpkigui-check-expired b/ca/rpkigui-check-expired new file mode 100755 index 00000000..eb0c7fbb --- /dev/null +++ b/ca/rpkigui-check-expired @@ -0,0 +1,61 @@ +#!/usr/bin/env python + +# Copyright (C) 2012 SPARTA, Inc. a Parsons Company +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +__version__ = '$Id$' + +from optparse import OptionParser +import logging +import sys + +# configure django ORM +from rpki.gui.script_util import setup +setup() + +from rpki.gui.app.check_expired import notify_expired + +usage = '%prog [ -nV ] [ handle1 handle2... ]' + +description = """Generate a report detailing all RPKI/BPKI certificates which +are due for impending expiration. If no resource handles are specified, a +report about all resource handles hosted by the local rpkid instance will be +generated.""" + +parser = OptionParser(usage, description=description) +parser.add_option('-V', '--version', help='display script version', + action='store_true', dest='version', default=False) +parser.add_option('-f', '--from', metavar='ADDRESS', dest='from_email', + help='specify the return email address for notifications') +parser.add_option('-t', '--expire-time', dest='expire_days', metavar='DAYS', + help='specify the number of days in the future to check') +parser.add_option('-l', '--level', dest='log_level', default='WARNING', + help='Set logging level [Default: %default]') +(options, args) = parser.parse_args() +if options.version: + print __version__ + sys.exit(0) + +v = getattr(logging, options.log_level.upper()) +logging.basicConfig(level=v) +logging.info('logging level set to ' + logging.getLevelName(v)) + +kwargs = {} +if options.from_email: + kwargs['from_email'] = options.from_email +if options.expire_days: + kwargs['expire_days'] = int(options.expire_days) +notify_expired(**kwargs) + +sys.exit(0) diff --git a/ca/rpkigui-import-routes b/ca/rpkigui-import-routes new file mode 100755 index 00000000..234a865b --- /dev/null +++ b/ca/rpkigui-import-routes @@ -0,0 +1,115 @@ +#!/usr/bin/env python + +# Copyright (C) 2012, 2013 SPARTA, Inc. a Parsons Company +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +__version__ = '$Id$' + +import optparse +import logging +import time +import random +import signal +import errno +import atexit +import fcntl +import sys +import os + +# configure django ORM +from rpki.gui.script_util import setup +setup() + +from rpki.gui.routeview.util import import_routeviews_dump + + +class BadArgument(Exception): + pass + + +def timed_out(*ignored): + logging.info('timed out') + sys.exit(1) + + +if __name__ == '__main__': + parser = optparse.OptionParser( + usage='%prog [options] [PATH]', + description="""This tool is used to import the IPv4/6 BGP table dumps +from routeviews.org into the RPKI Web Portal database. If the +input file is a bzip2 compressed file, it will be decompressed +automatically.""") + parser.add_option('-t', '--type', dest='filetype', metavar='TYPE', + help='Specify the input file type (auto, text, mrt) [Default: %default]') + parser.add_option('-l', '--level', dest='log_level', default='ERROR', + help='Set logging level [Default: %default]') + parser.add_option('-u', '--bunzip2', dest='bunzip', metavar='PROG', + help='Specify bunzip2 program to use') + parser.add_option('-b', '--bgpdump', dest='bgpdump', metavar='PROG', + help='Specify path to bgdump binary') + parser.add_option('-j', '--jitter', dest='jitter', type='int', + help='Specify upper bound of startup delay, in seconds [Default: %default]') + parser.add_option('--lockfile', dest='lockfile', + help='Set name of lock file; empty string disables locking [Default: %default]') + parser.add_option('--timeout', dest='timeout', type='int', + help='Specify timeout for download and import, in seconds [Default: %default]') + parser.set_defaults(debug=False, verbose=False, filetype='auto', jitter=0, + lockfile='/tmp/rpkigui-import-routes.lock', timeout=90*60) + options, args = parser.parse_args() + + v = getattr(logging, options.log_level.upper()) + logging.basicConfig(level=v) + atexit.register(logging.shutdown) + logging.info('logging level set to ' + logging.getLevelName(v)) + + if options.bgpdump: + BGPDUMP = os.path.expanduser(options.bgpdump) + + if options.jitter > 0: + try: + delay = random.SystemRandom().randint(0, options.jitter) + except NotImplementedError: + delay = random.randint(0, options.jitter) + logging.info('jitter active, delaying startup for %d seconds' % delay) + time.sleep(delay) + + if options.lockfile: + try: + lock = os.open(options.lockfile, os.O_RDONLY | os.O_CREAT | os.O_NONBLOCK, 0666) + fcntl.flock(lock, fcntl.LOCK_EX | fcntl.LOCK_NB) + except (IOError, OSError), e: + if e.errno == errno.EAGAIN: + logging.info('lock held by another process') + sys.exit(0) + else: + logging.exception(e) + sys.exit(1) + + try: + if len(args) > 1: + raise BadArgument('more than one filename specified') + + if options.timeout > 0: + signal.signal(signal.SIGALRM, timed_out) + signal.setitimer(signal.ITIMER_REAL, options.timeout) + + import_routeviews_dump(*args) + + if options.timeout > 0: + signal.setitimer(signal.ITIMER_REAL, 0) + + except Exception as e: + logging.exception(e) + sys.exit(1) + diff --git a/ca/rpkigui-query-routes b/ca/rpkigui-query-routes new file mode 100755 index 00000000..1f698f23 --- /dev/null +++ b/ca/rpkigui-query-routes @@ -0,0 +1,66 @@ +#!/usr/bin/env python + +# Copyright (C) 2013 SPARTA, Inc. a Parsons Company +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +__version__ = '$Id$' + +import sys +import optparse + +from rpki.gui.script_util import setup +setup() + +from rpki.gui.routeview import models as rv +from rpki.resource_set import resource_range_ip + +parser = optparse.OptionParser( + usage='%prog [options] PREFIX', + description='query the rpki web portal database for routes covering a ' + 'prefix specified as an argument, and display the validity and covering ' + 'ROAs for each route', + version=__version__, +) +options, args = parser.parse_args() + +if len(args) == 0: + print 'error: Specify an address/prefix' + sys.exit(1) + +# allow bare IP addresses +if '/' not in args[0]: + args[0] = args[0] + '/32' + +r = resource_range_ip.parse_str(args[0]) + +qs = rv.RouteOrigin.objects.filter( + prefix_min__lte=r.min, + prefix_max__gte=r.max +) + + +def validity_marker(route, roa, roa_prefix): + "Return + if the roa would cause the route to be accepted, or - if not" + # we already know the ROA covers this route because they are returned + # from RouteOrigin.roas, so just check the ASN and max prefix length + return '-' if (roa.asid == 0 or route.asn != roa.asid or + route.prefixlen > roa_prefix.max_length) else '+' + +# xxx.xxx.xxx.xxx/xx-xx is 22 characters +for route in qs: + print route.as_resource_range(), route.asn, route.status + for pfx in route.roa_prefixes: + for roa in pfx.roas.all(): + print validity_marker(route, roa, pfx), pfx.as_roa_prefix(), roa.asid, roa.repo.uri + print diff --git a/ca/rpkigui-rcynic b/ca/rpkigui-rcynic new file mode 100755 index 00000000..8fb91a70 --- /dev/null +++ b/ca/rpkigui-rcynic @@ -0,0 +1,54 @@ +#!/usr/bin/env python + +# Copyright (C) 2011 SPARTA, Inc. dba Cobham +# Copyright (C) 2012, 2013 SPARTA, Inc. a Parsons Company +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +__version__ = '$Id$' + +# probably should be exported from rpki.gui.cacheview.util +default_logfile = '/var/rcynic/data/rcynic.xml' +default_root = '/var/rcynic/data' + +import logging +import sys + +from rpki.gui.script_util import setup +setup() + +from rpki.gui.cacheview.util import import_rcynic_xml + +if __name__ == '__main__': + import optparse + + parser = optparse.OptionParser() + parser.add_option("-l", "--level", dest="log_level", default='ERROR', + help="specify the logging level [default: %default]") + parser.add_option( + "-f", "--file", dest="logfile", + help="specify the rcynic XML file to parse [default: %default]", + default=default_logfile) + parser.add_option( + "-r", "--root", + help="specify the chroot directory for the rcynic jail [default: %default]", + metavar="DIR", default=default_root) + options, args = parser.parse_args(sys.argv) + + v = getattr(logging, options.log_level.upper()) + logging.basicConfig(level=v) + logging.info('log level set to %s' % logging.getLevelName(v)) + + import_rcynic_xml(options.root, options.logfile) + + logging.shutdown() diff --git a/ca/tests/Makefile.in b/ca/tests/Makefile.in new file mode 100644 index 00000000..b63e8dc3 --- /dev/null +++ b/ca/tests/Makefile.in @@ -0,0 +1,91 @@ +# $Id$ + +PYTHON = @PYTHON@ +abs_top_builddir = @abs_top_builddir@ + +all: protocol-samples + +clean: + rm -rf smoketest.dir left-right-protocol-samples publication-protocol-samples yamltest.dir rcynic.xml rcynic-data + +protocol-samples: left-right-protocol-samples/.stamp publication-protocol-samples/.stamp + +left-right-protocol-samples/.stamp: left-right-protocol-samples.xml split-protocol-samples.xsl + rm -rf left-right-protocol-samples + mkdir left-right-protocol-samples + xsltproc --param verbose 0 --stringparam dir left-right-protocol-samples split-protocol-samples.xsl left-right-protocol-samples.xml + touch $@ + +publication-protocol-samples/.stamp: publication-protocol-samples.xml split-protocol-samples.xsl + rm -rf publication-protocol-samples + mkdir publication-protocol-samples + xsltproc --param verbose 0 --stringparam dir publication-protocol-samples split-protocol-samples.xsl publication-protocol-samples.xml + touch $@ + +parse-test: protocol-samples + ${PYTHON} xml-parse-test.py + +all-tests:: parse-test + +all-tests:: + ${PYTHON} smoketest.py smoketest.1.yaml + +all-tests:: + ${PYTHON} smoketest.py smoketest.2.yaml + +test all-tests:: + ${PYTHON} smoketest.py smoketest.3.yaml + +all-tests:: + ${PYTHON} smoketest.py smoketest.4.yaml + +all-tests:: + ${PYTHON} smoketest.py smoketest.5.yaml + +test all-tests:: + ${PYTHON} smoketest.py smoketest.6.yaml + +all-tests:: + ${PYTHON} smoketest.py smoketest.7.yaml + +profile: all + find smoketest.dir -name '*.prof' -delete + ${PYTHON} smoketest.py smoketest.2.yaml -p + for i in smoketest.dir/*.prof; do ${PYTHON} -c "import pstats;pstats.Stats('$$i').sort_stats('time').print_stats()"; done + +# This isn't a full exercise of the yamltest framework, but is +# probably as good as we can do under make. + +YAMLTEST_CONFIG = smoketest.1.yaml + +yamltest: + rm -rf yamltest.dir rcynic-data + ${PYTHON} sql-cleaner.py + ${PYTHON} yamltest.py ${YAMLTEST_CONFIG} + +YAMLCONF_CONFIG = ${YAMLTEST_CONFIG} + +yamlconf: + rm -rf yamltest.dir rcynic-data + ${PYTHON} sql-cleaner.py + ${PYTHON} yamlconf.py --loopback ${YAMLCONF_CONFIG} + @echo + ${PYTHON} yamltest.py --skip_config --synchronize ${YAMLCONF_CONFIG} + +yamltest-resume yamlconf-resume: + ${PYTHON} yamltest.py --skip_config ${YAMLCONF_CONFIG} + +yamlconf-profile: + rm -rf yamltest.dir rcynic-data + ${PYTHON} sql-cleaner.py + ${PYTHON} yamlconf.py --loopback --profile yamlconf.prof ${YAMLCONF_CONFIG} + @echo + ${PYTHON} yamltest.py --skip_config --synchronize --profile ${YAMLCONF_CONFIG} + +backup: + ${PYTHON} sql-dumper.py + tar cvvJf yamltest.backup.$$(TZ='' date +%Y.%m.%d.%H.%M.%S).txz screenlog.* yamltest.dir backup.*.sql + rm backup.*.sql + +distclean: clean + rm -f rcynic.xml Makefile diff --git a/ca/tests/left-right-protocol-samples.xml b/ca/tests/left-right-protocol-samples.xml new file mode 100644 index 00000000..7b97386d --- /dev/null +++ b/ca/tests/left-right-protocol-samples.xml @@ -0,0 +1,1093 @@ + + + + + + + + + + + + + + + + MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV + BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN + MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS + b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArKYUtJaM5PH5917S + G2ACc7iBYdQO2HYyu8Gb6i9Q2Gxc3cWEX7RTBvgOL79pWf3GIdnoupzMnoZVtY3G + Ux2G/0WkmLui2TCeDhcfXdQ4rcp8J3V/6ESj+yuEPPOG8UN17mUKKgujrch6ZvgC + DO9AyOK/uXu+ABQXTPsn2pVe2EVh3V004ShLi8GKgVdqb/rW/6GTg0Xb/zLT6WWM + uT++6sXTlztJdQYkRamJvKfQDU1naC8mAkGf79Tba0xyBGAUII0GfREY6t4/+NAP + 2Yyb3xNlBqcJoTov0JfNKHZcCZePr79j7LK/hkZxxip+Na9xDpE+oQRV+DRukCRJ + diqg+wIDAQABo1AwTjAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBTDEsXJe6pjAQD4 + ULlB7+GMDBlimTAfBgNVHSMEGDAWgBTDEsXJe6pjAQD4ULlB7+GMDBlimTANBgkq + hkiG9w0BAQUFAAOCAQEAWWkNcW6S1tKKqtzJsdfhjJiAAPQmOXJskv0ta/8f6Acg + cum1YieNdtT0n96P7CUHOWP8QBb91JzeewR7b6WJLwb1Offs3wNq3kk75pJe89r4 + XY39EZHhMW+Dv0PhIKu2CgD4LeyH1FVTQkF/QObGEmkn+s+HTsuzd1l2VLwcP1Sm + sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH + YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq + 3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ== + + + MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV + BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN + MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS + b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArKYUtJaM5PH5917S + G2ACc7iBYdQO2HYyu8Gb6i9Q2Gxc3cWEX7RTBvgOL79pWf3GIdnoupzMnoZVtY3G + Ux2G/0WkmLui2TCeDhcfXdQ4rcp8J3V/6ESj+yuEPPOG8UN17mUKKgujrch6ZvgC + DO9AyOK/uXu+ABQXTPsn2pVe2EVh3V004ShLi8GKgVdqb/rW/6GTg0Xb/zLT6WWM + uT++6sXTlztJdQYkRamJvKfQDU1naC8mAkGf79Tba0xyBGAUII0GfREY6t4/+NAP + 2Yyb3xNlBqcJoTov0JfNKHZcCZePr79j7LK/hkZxxip+Na9xDpE+oQRV+DRukCRJ + diqg+wIDAQABo1AwTjAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBTDEsXJe6pjAQD4 + ULlB7+GMDBlimTAfBgNVHSMEGDAWgBTDEsXJe6pjAQD4ULlB7+GMDBlimTANBgkq + hkiG9w0BAQUFAAOCAQEAWWkNcW6S1tKKqtzJsdfhjJiAAPQmOXJskv0ta/8f6Acg + cum1YieNdtT0n96P7CUHOWP8QBb91JzeewR7b6WJLwb1Offs3wNq3kk75pJe89r4 + XY39EZHhMW+Dv0PhIKu2CgD4LeyH1FVTQkF/QObGEmkn+s+HTsuzd1l2VLwcP1Sm + sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH + YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq + 3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ== + + + + + + + + + + + + + + + + MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV + BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN + MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS + b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArKYUtJaM5PH5917S + G2ACc7iBYdQO2HYyu8Gb6i9Q2Gxc3cWEX7RTBvgOL79pWf3GIdnoupzMnoZVtY3G + Ux2G/0WkmLui2TCeDhcfXdQ4rcp8J3V/6ESj+yuEPPOG8UN17mUKKgujrch6ZvgC + DO9AyOK/uXu+ABQXTPsn2pVe2EVh3V004ShLi8GKgVdqb/rW/6GTg0Xb/zLT6WWM + uT++6sXTlztJdQYkRamJvKfQDU1naC8mAkGf79Tba0xyBGAUII0GfREY6t4/+NAP + 2Yyb3xNlBqcJoTov0JfNKHZcCZePr79j7LK/hkZxxip+Na9xDpE+oQRV+DRukCRJ + diqg+wIDAQABo1AwTjAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBTDEsXJe6pjAQD4 + ULlB7+GMDBlimTAfBgNVHSMEGDAWgBTDEsXJe6pjAQD4ULlB7+GMDBlimTANBgkq + hkiG9w0BAQUFAAOCAQEAWWkNcW6S1tKKqtzJsdfhjJiAAPQmOXJskv0ta/8f6Acg + cum1YieNdtT0n96P7CUHOWP8QBb91JzeewR7b6WJLwb1Offs3wNq3kk75pJe89r4 + XY39EZHhMW+Dv0PhIKu2CgD4LeyH1FVTQkF/QObGEmkn+s+HTsuzd1l2VLwcP1Sm + sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH + YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq + 3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ== + + + MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV + BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN + MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS + b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArKYUtJaM5PH5917S + G2ACc7iBYdQO2HYyu8Gb6i9Q2Gxc3cWEX7RTBvgOL79pWf3GIdnoupzMnoZVtY3G + Ux2G/0WkmLui2TCeDhcfXdQ4rcp8J3V/6ESj+yuEPPOG8UN17mUKKgujrch6ZvgC + DO9AyOK/uXu+ABQXTPsn2pVe2EVh3V004ShLi8GKgVdqb/rW/6GTg0Xb/zLT6WWM + uT++6sXTlztJdQYkRamJvKfQDU1naC8mAkGf79Tba0xyBGAUII0GfREY6t4/+NAP + 2Yyb3xNlBqcJoTov0JfNKHZcCZePr79j7LK/hkZxxip+Na9xDpE+oQRV+DRukCRJ + diqg+wIDAQABo1AwTjAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBTDEsXJe6pjAQD4 + ULlB7+GMDBlimTAfBgNVHSMEGDAWgBTDEsXJe6pjAQD4ULlB7+GMDBlimTANBgkq + hkiG9w0BAQUFAAOCAQEAWWkNcW6S1tKKqtzJsdfhjJiAAPQmOXJskv0ta/8f6Acg + cum1YieNdtT0n96P7CUHOWP8QBb91JzeewR7b6WJLwb1Offs3wNq3kk75pJe89r4 + XY39EZHhMW+Dv0PhIKu2CgD4LeyH1FVTQkF/QObGEmkn+s+HTsuzd1l2VLwcP1Sm + sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH + YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq + 3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ== + + + + + + + + + + + + MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV + BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN + MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS + b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArKYUtJaM5PH5917S + G2ACc7iBYdQO2HYyu8Gb6i9Q2Gxc3cWEX7RTBvgOL79pWf3GIdnoupzMnoZVtY3G + Ux2G/0WkmLui2TCeDhcfXdQ4rcp8J3V/6ESj+yuEPPOG8UN17mUKKgujrch6ZvgC + DO9AyOK/uXu+ABQXTPsn2pVe2EVh3V004ShLi8GKgVdqb/rW/6GTg0Xb/zLT6WWM + uT++6sXTlztJdQYkRamJvKfQDU1naC8mAkGf79Tba0xyBGAUII0GfREY6t4/+NAP + 2Yyb3xNlBqcJoTov0JfNKHZcCZePr79j7LK/hkZxxip+Na9xDpE+oQRV+DRukCRJ + diqg+wIDAQABo1AwTjAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBTDEsXJe6pjAQD4 + ULlB7+GMDBlimTAfBgNVHSMEGDAWgBTDEsXJe6pjAQD4ULlB7+GMDBlimTANBgkq + hkiG9w0BAQUFAAOCAQEAWWkNcW6S1tKKqtzJsdfhjJiAAPQmOXJskv0ta/8f6Acg + cum1YieNdtT0n96P7CUHOWP8QBb91JzeewR7b6WJLwb1Offs3wNq3kk75pJe89r4 + XY39EZHhMW+Dv0PhIKu2CgD4LeyH1FVTQkF/QObGEmkn+s+HTsuzd1l2VLwcP1Sm + sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH + YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq + 3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ== + + + MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV + BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN + MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS + b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArKYUtJaM5PH5917S + G2ACc7iBYdQO2HYyu8Gb6i9Q2Gxc3cWEX7RTBvgOL79pWf3GIdnoupzMnoZVtY3G + Ux2G/0WkmLui2TCeDhcfXdQ4rcp8J3V/6ESj+yuEPPOG8UN17mUKKgujrch6ZvgC + DO9AyOK/uXu+ABQXTPsn2pVe2EVh3V004ShLi8GKgVdqb/rW/6GTg0Xb/zLT6WWM + uT++6sXTlztJdQYkRamJvKfQDU1naC8mAkGf79Tba0xyBGAUII0GfREY6t4/+NAP + 2Yyb3xNlBqcJoTov0JfNKHZcCZePr79j7LK/hkZxxip+Na9xDpE+oQRV+DRukCRJ + diqg+wIDAQABo1AwTjAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBTDEsXJe6pjAQD4 + ULlB7+GMDBlimTAfBgNVHSMEGDAWgBTDEsXJe6pjAQD4ULlB7+GMDBlimTANBgkq + hkiG9w0BAQUFAAOCAQEAWWkNcW6S1tKKqtzJsdfhjJiAAPQmOXJskv0ta/8f6Acg + cum1YieNdtT0n96P7CUHOWP8QBb91JzeewR7b6WJLwb1Offs3wNq3kk75pJe89r4 + XY39EZHhMW+Dv0PhIKu2CgD4LeyH1FVTQkF/QObGEmkn+s+HTsuzd1l2VLwcP1Sm + sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH + YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq + 3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ== + + + + + + + + + + + + + + + + + + + MIIDHTCCAgWgAwIBAgIJAKUUCoKn9ovVMA0GCSqGSIb3DQEBBQUAMCYxJDAiBgNV + BAMTG1Rlc3QgQ2VydGlmaWNhdGUgQWxpY2UgUm9vdDAeFw0wNzA4MDExOTUzMDda + Fw0wNzA4MzExOTUzMDdaMCQxIjAgBgNVBAMTGVRlc3QgQ2VydGlmaWNhdGUgQWxp + Y2UgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDmLngkGT5kWsXd + IgLeV+5zNvcDt0+D4cds1cu+bw6Y/23z1+ooA8fU1gXQ28bl6ELM8WRLHgcntqzr + 5UX6S1xPdNfFYt8z4E1ZuvwCPsxcSwVdlYRvzAGNQivDpcJ75Mf5DTeDpr6wm7yn + 2pzxvQIet5djOX51RVGA3hOwCbhq2ceHs0ZruWG3T70H3Sa1ZVxP7m0DJlsSZa6v + 3oEeFOKZQlqrgeU74mJyLAGx/fNbIw+UBrvejfjZobIv985vQ06DZ5S2AquQ2bht + O/2bW3yqeOjH98YK0zlOpYtaZ2fyx4JLjHCspoki6+4W9UG+TuqdkB20mRsr25XT + 9kLuwIGZAgMBAAGjUDBOMAwGA1UdEwQFMAMBAf8wHQYDVR0OBBYEFF6I4IR33h/s + vOa4Vsw2icPo8TgaMB8GA1UdIwQYMBaAFG9ed1KlOQDyB+k7Yeb8LSjG5FDtMA0G + CSqGSIb3DQEBBQUAA4IBAQDVzBuGyXIq/rfMjoNKIHTUgppkc+FjS02cFASpB5mk + ksSpGWYHMZKlqz47qDi44KAG+kmPIPOT0em81+/VGeY1oizJyKSeNDhNboth5oTu + ShDr4flTQCoYvRxm1wh8WIIg09nwibzGztuV1XxtdzfQV5kK5bMBlDXnUfAYydsO + jc52x5f4tgdcfBhjnMzkCAx2kvw5Wp3NekkOKl5YYnPK++zT9IBwqrqJmsJvyLPO + vvqVBYkoBWRbmcy6wVU8JpYegNNgVRbi6zeAq33gS75m9uy+4z8Ql6DqVF0s/y+/ + 240tLCW62X98EzrALKsxhkqVZCtdc5HSRaOQr0K3I03S + + + + + + + cmVxdWVzdAo= + + + + + + + MIIDHTCCAgWgAwIBAgIJAKUUCoKn9ovVMA0GCSqGSIb3DQEBBQUAMCYxJDAiBgNV + BAMTG1Rlc3QgQ2VydGlmaWNhdGUgQWxpY2UgUm9vdDAeFw0wNzA4MDExOTUzMDda + Fw0wNzA4MzExOTUzMDdaMCQxIjAgBgNVBAMTGVRlc3QgQ2VydGlmaWNhdGUgQWxp + Y2UgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDmLngkGT5kWsXd + IgLeV+5zNvcDt0+D4cds1cu+bw6Y/23z1+ooA8fU1gXQ28bl6ELM8WRLHgcntqzr + 5UX6S1xPdNfFYt8z4E1ZuvwCPsxcSwVdlYRvzAGNQivDpcJ75Mf5DTeDpr6wm7yn + 2pzxvQIet5djOX51RVGA3hOwCbhq2ceHs0ZruWG3T70H3Sa1ZVxP7m0DJlsSZa6v + 3oEeFOKZQlqrgeU74mJyLAGx/fNbIw+UBrvejfjZobIv985vQ06DZ5S2AquQ2bht + O/2bW3yqeOjH98YK0zlOpYtaZ2fyx4JLjHCspoki6+4W9UG+TuqdkB20mRsr25XT + 9kLuwIGZAgMBAAGjUDBOMAwGA1UdEwQFMAMBAf8wHQYDVR0OBBYEFF6I4IR33h/s + vOa4Vsw2icPo8TgaMB8GA1UdIwQYMBaAFG9ed1KlOQDyB+k7Yeb8LSjG5FDtMA0G + CSqGSIb3DQEBBQUAA4IBAQDVzBuGyXIq/rfMjoNKIHTUgppkc+FjS02cFASpB5mk + ksSpGWYHMZKlqz47qDi44KAG+kmPIPOT0em81+/VGeY1oizJyKSeNDhNboth5oTu + ShDr4flTQCoYvRxm1wh8WIIg09nwibzGztuV1XxtdzfQV5kK5bMBlDXnUfAYydsO + jc52x5f4tgdcfBhjnMzkCAx2kvw5Wp3NekkOKl5YYnPK++zT9IBwqrqJmsJvyLPO + vvqVBYkoBWRbmcy6wVU8JpYegNNgVRbi6zeAq33gS75m9uy+4z8Ql6DqVF0s/y+/ + 240tLCW62X98EzrALKsxhkqVZCtdc5HSRaOQr0K3I03S + + + MIIBfjBoAgEBMA0GCSqGSIb3DQEBCwUAMCYxJDAiBgNVBAMTG1Rlc3QgQ2VydGlm + aWNhdGUgUklSIFNFTEYtMRcNMDgwNTAxMDQ1MjAxWhcNMDgwNTMxMDQ1MjAxWqAO + MAwwCgYDVR0UBAMCAQEwDQYJKoZIhvcNAQELBQADggEBACTbbaYh+f4EtXFIKPwH + K2NYq/MrhE2BnHDyA43siryddtac1E2bOtXPkC74nY5yGm4wZU07qPovJNGu1McG + J2hV2uUyAN00lJU3EikrS1ewz7vqjINar1ZUMDkh0wMYKLB9S8SdwNvCf1vcjshz + yasBRse9PCH1R0bmDaP8FZM47P55dKiijaN87HQKyZPOExFslnWH+Nr+mAF1xost + pwGcc3jreVZWbtQ2RdUDJYcNrSSCH8JYqd5ZgAYcE53xxy43rKcULz054GDFcS/B + rprwJgfrjkPttAl80cfrVOUl77ZFfFxzOeHCmQMl9VSoCxmWvnBCBBO4H7meJ7NO + gyc= + + + + + + + + + + + + + + + + MIIDHTCCAgWgAwIBAgIJAKUUCoKn9ovVMA0GCSqGSIb3DQEBBQUAMCYxJDAiBgNV + BAMTG1Rlc3QgQ2VydGlmaWNhdGUgQWxpY2UgUm9vdDAeFw0wNzA4MDExOTUzMDda + Fw0wNzA4MzExOTUzMDdaMCQxIjAgBgNVBAMTGVRlc3QgQ2VydGlmaWNhdGUgQWxp + Y2UgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDmLngkGT5kWsXd + IgLeV+5zNvcDt0+D4cds1cu+bw6Y/23z1+ooA8fU1gXQ28bl6ELM8WRLHgcntqzr + 5UX6S1xPdNfFYt8z4E1ZuvwCPsxcSwVdlYRvzAGNQivDpcJ75Mf5DTeDpr6wm7yn + 2pzxvQIet5djOX51RVGA3hOwCbhq2ceHs0ZruWG3T70H3Sa1ZVxP7m0DJlsSZa6v + 3oEeFOKZQlqrgeU74mJyLAGx/fNbIw+UBrvejfjZobIv985vQ06DZ5S2AquQ2bht + O/2bW3yqeOjH98YK0zlOpYtaZ2fyx4JLjHCspoki6+4W9UG+TuqdkB20mRsr25XT + 9kLuwIGZAgMBAAGjUDBOMAwGA1UdEwQFMAMBAf8wHQYDVR0OBBYEFF6I4IR33h/s + vOa4Vsw2icPo8TgaMB8GA1UdIwQYMBaAFG9ed1KlOQDyB+k7Yeb8LSjG5FDtMA0G + CSqGSIb3DQEBBQUAA4IBAQDVzBuGyXIq/rfMjoNKIHTUgppkc+FjS02cFASpB5mk + ksSpGWYHMZKlqz47qDi44KAG+kmPIPOT0em81+/VGeY1oizJyKSeNDhNboth5oTu + ShDr4flTQCoYvRxm1wh8WIIg09nwibzGztuV1XxtdzfQV5kK5bMBlDXnUfAYydsO + jc52x5f4tgdcfBhjnMzkCAx2kvw5Wp3NekkOKl5YYnPK++zT9IBwqrqJmsJvyLPO + vvqVBYkoBWRbmcy6wVU8JpYegNNgVRbi6zeAq33gS75m9uy+4z8Ql6DqVF0s/y+/ + 240tLCW62X98EzrALKsxhkqVZCtdc5HSRaOQr0K3I03S + + + + + + + + + + + + MIIDHTCCAgWgAwIBAgIJAKUUCoKn9ovVMA0GCSqGSIb3DQEBBQUAMCYxJDAiBgNV + BAMTG1Rlc3QgQ2VydGlmaWNhdGUgQWxpY2UgUm9vdDAeFw0wNzA4MDExOTUzMDda + Fw0wNzA4MzExOTUzMDdaMCQxIjAgBgNVBAMTGVRlc3QgQ2VydGlmaWNhdGUgQWxp + Y2UgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDmLngkGT5kWsXd + IgLeV+5zNvcDt0+D4cds1cu+bw6Y/23z1+ooA8fU1gXQ28bl6ELM8WRLHgcntqzr + 5UX6S1xPdNfFYt8z4E1ZuvwCPsxcSwVdlYRvzAGNQivDpcJ75Mf5DTeDpr6wm7yn + 2pzxvQIet5djOX51RVGA3hOwCbhq2ceHs0ZruWG3T70H3Sa1ZVxP7m0DJlsSZa6v + 3oEeFOKZQlqrgeU74mJyLAGx/fNbIw+UBrvejfjZobIv985vQ06DZ5S2AquQ2bht + O/2bW3yqeOjH98YK0zlOpYtaZ2fyx4JLjHCspoki6+4W9UG+TuqdkB20mRsr25XT + 9kLuwIGZAgMBAAGjUDBOMAwGA1UdEwQFMAMBAf8wHQYDVR0OBBYEFF6I4IR33h/s + vOa4Vsw2icPo8TgaMB8GA1UdIwQYMBaAFG9ed1KlOQDyB+k7Yeb8LSjG5FDtMA0G + CSqGSIb3DQEBBQUAA4IBAQDVzBuGyXIq/rfMjoNKIHTUgppkc+FjS02cFASpB5mk + ksSpGWYHMZKlqz47qDi44KAG+kmPIPOT0em81+/VGeY1oizJyKSeNDhNboth5oTu + ShDr4flTQCoYvRxm1wh8WIIg09nwibzGztuV1XxtdzfQV5kK5bMBlDXnUfAYydsO + jc52x5f4tgdcfBhjnMzkCAx2kvw5Wp3NekkOKl5YYnPK++zT9IBwqrqJmsJvyLPO + vvqVBYkoBWRbmcy6wVU8JpYegNNgVRbi6zeAq33gS75m9uy+4z8Ql6DqVF0s/y+/ + 240tLCW62X98EzrALKsxhkqVZCtdc5HSRaOQr0K3I03S + + + + + + + + + + + + + + + + + + MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV + BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN + MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS + b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArKYUtJaM5PH5917S + G2ACc7iBYdQO2HYyu8Gb6i9Q2Gxc3cWEX7RTBvgOL79pWf3GIdnoupzMnoZVtY3G + Ux2G/0WkmLui2TCeDhcfXdQ4rcp8J3V/6ESj+yuEPPOG8UN17mUKKgujrch6ZvgC + DO9AyOK/uXu+ABQXTPsn2pVe2EVh3V004ShLi8GKgVdqb/rW/6GTg0Xb/zLT6WWM + uT++6sXTlztJdQYkRamJvKfQDU1naC8mAkGf79Tba0xyBGAUII0GfREY6t4/+NAP + 2Yyb3xNlBqcJoTov0JfNKHZcCZePr79j7LK/hkZxxip+Na9xDpE+oQRV+DRukCRJ + diqg+wIDAQABo1AwTjAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBTDEsXJe6pjAQD4 + ULlB7+GMDBlimTAfBgNVHSMEGDAWgBTDEsXJe6pjAQD4ULlB7+GMDBlimTANBgkq + hkiG9w0BAQUFAAOCAQEAWWkNcW6S1tKKqtzJsdfhjJiAAPQmOXJskv0ta/8f6Acg + cum1YieNdtT0n96P7CUHOWP8QBb91JzeewR7b6WJLwb1Offs3wNq3kk75pJe89r4 + XY39EZHhMW+Dv0PhIKu2CgD4LeyH1FVTQkF/QObGEmkn+s+HTsuzd1l2VLwcP1Sm + sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH + YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq + 3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ== + + + MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV + BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN + MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS + b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArKYUtJaM5PH5917S + G2ACc7iBYdQO2HYyu8Gb6i9Q2Gxc3cWEX7RTBvgOL79pWf3GIdnoupzMnoZVtY3G + Ux2G/0WkmLui2TCeDhcfXdQ4rcp8J3V/6ESj+yuEPPOG8UN17mUKKgujrch6ZvgC + DO9AyOK/uXu+ABQXTPsn2pVe2EVh3V004ShLi8GKgVdqb/rW/6GTg0Xb/zLT6WWM + uT++6sXTlztJdQYkRamJvKfQDU1naC8mAkGf79Tba0xyBGAUII0GfREY6t4/+NAP + 2Yyb3xNlBqcJoTov0JfNKHZcCZePr79j7LK/hkZxxip+Na9xDpE+oQRV+DRukCRJ + diqg+wIDAQABo1AwTjAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBTDEsXJe6pjAQD4 + ULlB7+GMDBlimTAfBgNVHSMEGDAWgBTDEsXJe6pjAQD4ULlB7+GMDBlimTANBgkq + hkiG9w0BAQUFAAOCAQEAWWkNcW6S1tKKqtzJsdfhjJiAAPQmOXJskv0ta/8f6Acg + cum1YieNdtT0n96P7CUHOWP8QBb91JzeewR7b6WJLwb1Offs3wNq3kk75pJe89r4 + XY39EZHhMW+Dv0PhIKu2CgD4LeyH1FVTQkF/QObGEmkn+s+HTsuzd1l2VLwcP1Sm + sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH + YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq + 3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ== + + + + + + + + + + + + MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV + BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN + MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS + b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArKYUtJaM5PH5917S + G2ACc7iBYdQO2HYyu8Gb6i9Q2Gxc3cWEX7RTBvgOL79pWf3GIdnoupzMnoZVtY3G + Ux2G/0WkmLui2TCeDhcfXdQ4rcp8J3V/6ESj+yuEPPOG8UN17mUKKgujrch6ZvgC + DO9AyOK/uXu+ABQXTPsn2pVe2EVh3V004ShLi8GKgVdqb/rW/6GTg0Xb/zLT6WWM + uT++6sXTlztJdQYkRamJvKfQDU1naC8mAkGf79Tba0xyBGAUII0GfREY6t4/+NAP + 2Yyb3xNlBqcJoTov0JfNKHZcCZePr79j7LK/hkZxxip+Na9xDpE+oQRV+DRukCRJ + diqg+wIDAQABo1AwTjAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBTDEsXJe6pjAQD4 + ULlB7+GMDBlimTAfBgNVHSMEGDAWgBTDEsXJe6pjAQD4ULlB7+GMDBlimTANBgkq + hkiG9w0BAQUFAAOCAQEAWWkNcW6S1tKKqtzJsdfhjJiAAPQmOXJskv0ta/8f6Acg + cum1YieNdtT0n96P7CUHOWP8QBb91JzeewR7b6WJLwb1Offs3wNq3kk75pJe89r4 + XY39EZHhMW+Dv0PhIKu2CgD4LeyH1FVTQkF/QObGEmkn+s+HTsuzd1l2VLwcP1Sm + sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH + YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq + 3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ== + + + MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV + BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN + MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS + b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArKYUtJaM5PH5917S + G2ACc7iBYdQO2HYyu8Gb6i9Q2Gxc3cWEX7RTBvgOL79pWf3GIdnoupzMnoZVtY3G + Ux2G/0WkmLui2TCeDhcfXdQ4rcp8J3V/6ESj+yuEPPOG8UN17mUKKgujrch6ZvgC + DO9AyOK/uXu+ABQXTPsn2pVe2EVh3V004ShLi8GKgVdqb/rW/6GTg0Xb/zLT6WWM + uT++6sXTlztJdQYkRamJvKfQDU1naC8mAkGf79Tba0xyBGAUII0GfREY6t4/+NAP + 2Yyb3xNlBqcJoTov0JfNKHZcCZePr79j7LK/hkZxxip+Na9xDpE+oQRV+DRukCRJ + diqg+wIDAQABo1AwTjAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBTDEsXJe6pjAQD4 + ULlB7+GMDBlimTAfBgNVHSMEGDAWgBTDEsXJe6pjAQD4ULlB7+GMDBlimTANBgkq + hkiG9w0BAQUFAAOCAQEAWWkNcW6S1tKKqtzJsdfhjJiAAPQmOXJskv0ta/8f6Acg + cum1YieNdtT0n96P7CUHOWP8QBb91JzeewR7b6WJLwb1Offs3wNq3kk75pJe89r4 + XY39EZHhMW+Dv0PhIKu2CgD4LeyH1FVTQkF/QObGEmkn+s+HTsuzd1l2VLwcP1Sm + sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH + YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq + 3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ== + + + + + + + + + + + + + + + + MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV + BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN + MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS + b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArKYUtJaM5PH5917S + G2ACc7iBYdQO2HYyu8Gb6i9Q2Gxc3cWEX7RTBvgOL79pWf3GIdnoupzMnoZVtY3G + Ux2G/0WkmLui2TCeDhcfXdQ4rcp8J3V/6ESj+yuEPPOG8UN17mUKKgujrch6ZvgC + DO9AyOK/uXu+ABQXTPsn2pVe2EVh3V004ShLi8GKgVdqb/rW/6GTg0Xb/zLT6WWM + uT++6sXTlztJdQYkRamJvKfQDU1naC8mAkGf79Tba0xyBGAUII0GfREY6t4/+NAP + 2Yyb3xNlBqcJoTov0JfNKHZcCZePr79j7LK/hkZxxip+Na9xDpE+oQRV+DRukCRJ + diqg+wIDAQABo1AwTjAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBTDEsXJe6pjAQD4 + ULlB7+GMDBlimTAfBgNVHSMEGDAWgBTDEsXJe6pjAQD4ULlB7+GMDBlimTANBgkq + hkiG9w0BAQUFAAOCAQEAWWkNcW6S1tKKqtzJsdfhjJiAAPQmOXJskv0ta/8f6Acg + cum1YieNdtT0n96P7CUHOWP8QBb91JzeewR7b6WJLwb1Offs3wNq3kk75pJe89r4 + XY39EZHhMW+Dv0PhIKu2CgD4LeyH1FVTQkF/QObGEmkn+s+HTsuzd1l2VLwcP1Sm + sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH + YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq + 3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ== + + + MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV + BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN + MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS + b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArKYUtJaM5PH5917S + G2ACc7iBYdQO2HYyu8Gb6i9Q2Gxc3cWEX7RTBvgOL79pWf3GIdnoupzMnoZVtY3G + Ux2G/0WkmLui2TCeDhcfXdQ4rcp8J3V/6ESj+yuEPPOG8UN17mUKKgujrch6ZvgC + DO9AyOK/uXu+ABQXTPsn2pVe2EVh3V004ShLi8GKgVdqb/rW/6GTg0Xb/zLT6WWM + uT++6sXTlztJdQYkRamJvKfQDU1naC8mAkGf79Tba0xyBGAUII0GfREY6t4/+NAP + 2Yyb3xNlBqcJoTov0JfNKHZcCZePr79j7LK/hkZxxip+Na9xDpE+oQRV+DRukCRJ + diqg+wIDAQABo1AwTjAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBTDEsXJe6pjAQD4 + ULlB7+GMDBlimTAfBgNVHSMEGDAWgBTDEsXJe6pjAQD4ULlB7+GMDBlimTANBgkq + hkiG9w0BAQUFAAOCAQEAWWkNcW6S1tKKqtzJsdfhjJiAAPQmOXJskv0ta/8f6Acg + cum1YieNdtT0n96P7CUHOWP8QBb91JzeewR7b6WJLwb1Offs3wNq3kk75pJe89r4 + XY39EZHhMW+Dv0PhIKu2CgD4LeyH1FVTQkF/QObGEmkn+s+HTsuzd1l2VLwcP1Sm + sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH + YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq + 3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ== + + + + + + + + + + + + MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV + BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN + MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS + b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArKYUtJaM5PH5917S + G2ACc7iBYdQO2HYyu8Gb6i9Q2Gxc3cWEX7RTBvgOL79pWf3GIdnoupzMnoZVtY3G + Ux2G/0WkmLui2TCeDhcfXdQ4rcp8J3V/6ESj+yuEPPOG8UN17mUKKgujrch6ZvgC + DO9AyOK/uXu+ABQXTPsn2pVe2EVh3V004ShLi8GKgVdqb/rW/6GTg0Xb/zLT6WWM + uT++6sXTlztJdQYkRamJvKfQDU1naC8mAkGf79Tba0xyBGAUII0GfREY6t4/+NAP + 2Yyb3xNlBqcJoTov0JfNKHZcCZePr79j7LK/hkZxxip+Na9xDpE+oQRV+DRukCRJ + diqg+wIDAQABo1AwTjAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBTDEsXJe6pjAQD4 + ULlB7+GMDBlimTAfBgNVHSMEGDAWgBTDEsXJe6pjAQD4ULlB7+GMDBlimTANBgkq + hkiG9w0BAQUFAAOCAQEAWWkNcW6S1tKKqtzJsdfhjJiAAPQmOXJskv0ta/8f6Acg + cum1YieNdtT0n96P7CUHOWP8QBb91JzeewR7b6WJLwb1Offs3wNq3kk75pJe89r4 + XY39EZHhMW+Dv0PhIKu2CgD4LeyH1FVTQkF/QObGEmkn+s+HTsuzd1l2VLwcP1Sm + sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH + YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq + 3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ== + + + MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV + BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN + MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS + b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArKYUtJaM5PH5917S + G2ACc7iBYdQO2HYyu8Gb6i9Q2Gxc3cWEX7RTBvgOL79pWf3GIdnoupzMnoZVtY3G + Ux2G/0WkmLui2TCeDhcfXdQ4rcp8J3V/6ESj+yuEPPOG8UN17mUKKgujrch6ZvgC + DO9AyOK/uXu+ABQXTPsn2pVe2EVh3V004ShLi8GKgVdqb/rW/6GTg0Xb/zLT6WWM + uT++6sXTlztJdQYkRamJvKfQDU1naC8mAkGf79Tba0xyBGAUII0GfREY6t4/+NAP + 2Yyb3xNlBqcJoTov0JfNKHZcCZePr79j7LK/hkZxxip+Na9xDpE+oQRV+DRukCRJ + diqg+wIDAQABo1AwTjAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBTDEsXJe6pjAQD4 + ULlB7+GMDBlimTAfBgNVHSMEGDAWgBTDEsXJe6pjAQD4ULlB7+GMDBlimTANBgkq + hkiG9w0BAQUFAAOCAQEAWWkNcW6S1tKKqtzJsdfhjJiAAPQmOXJskv0ta/8f6Acg + cum1YieNdtT0n96P7CUHOWP8QBb91JzeewR7b6WJLwb1Offs3wNq3kk75pJe89r4 + XY39EZHhMW+Dv0PhIKu2CgD4LeyH1FVTQkF/QObGEmkn+s+HTsuzd1l2VLwcP1Sm + sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH + YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq + 3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ== + + + + + + + + + + + + + + + + + MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV + BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN + MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS + b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArKYUtJaM5PH5917S + G2ACc7iBYdQO2HYyu8Gb6i9Q2Gxc3cWEX7RTBvgOL79pWf3GIdnoupzMnoZVtY3G + Ux2G/0WkmLui2TCeDhcfXdQ4rcp8J3V/6ESj+yuEPPOG8UN17mUKKgujrch6ZvgC + DO9AyOK/uXu+ABQXTPsn2pVe2EVh3V004ShLi8GKgVdqb/rW/6GTg0Xb/zLT6WWM + uT++6sXTlztJdQYkRamJvKfQDU1naC8mAkGf79Tba0xyBGAUII0GfREY6t4/+NAP + 2Yyb3xNlBqcJoTov0JfNKHZcCZePr79j7LK/hkZxxip+Na9xDpE+oQRV+DRukCRJ + diqg+wIDAQABo1AwTjAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBTDEsXJe6pjAQD4 + ULlB7+GMDBlimTAfBgNVHSMEGDAWgBTDEsXJe6pjAQD4ULlB7+GMDBlimTANBgkq + hkiG9w0BAQUFAAOCAQEAWWkNcW6S1tKKqtzJsdfhjJiAAPQmOXJskv0ta/8f6Acg + cum1YieNdtT0n96P7CUHOWP8QBb91JzeewR7b6WJLwb1Offs3wNq3kk75pJe89r4 + XY39EZHhMW+Dv0PhIKu2CgD4LeyH1FVTQkF/QObGEmkn+s+HTsuzd1l2VLwcP1Sm + sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH + YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq + 3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ== + + + + + + + + + + + + MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV + BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN + MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS + b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArKYUtJaM5PH5917S + G2ACc7iBYdQO2HYyu8Gb6i9Q2Gxc3cWEX7RTBvgOL79pWf3GIdnoupzMnoZVtY3G + Ux2G/0WkmLui2TCeDhcfXdQ4rcp8J3V/6ESj+yuEPPOG8UN17mUKKgujrch6ZvgC + DO9AyOK/uXu+ABQXTPsn2pVe2EVh3V004ShLi8GKgVdqb/rW/6GTg0Xb/zLT6WWM + uT++6sXTlztJdQYkRamJvKfQDU1naC8mAkGf79Tba0xyBGAUII0GfREY6t4/+NAP + 2Yyb3xNlBqcJoTov0JfNKHZcCZePr79j7LK/hkZxxip+Na9xDpE+oQRV+DRukCRJ + diqg+wIDAQABo1AwTjAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBTDEsXJe6pjAQD4 + ULlB7+GMDBlimTAfBgNVHSMEGDAWgBTDEsXJe6pjAQD4ULlB7+GMDBlimTANBgkq + hkiG9w0BAQUFAAOCAQEAWWkNcW6S1tKKqtzJsdfhjJiAAPQmOXJskv0ta/8f6Acg + cum1YieNdtT0n96P7CUHOWP8QBb91JzeewR7b6WJLwb1Offs3wNq3kk75pJe89r4 + XY39EZHhMW+Dv0PhIKu2CgD4LeyH1FVTQkF/QObGEmkn+s+HTsuzd1l2VLwcP1Sm + sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH + YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq + 3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ== + + + + + + + + + + + + + + + + MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV + BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN + MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS + b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArKYUtJaM5PH5917S + G2ACc7iBYdQO2HYyu8Gb6i9Q2Gxc3cWEX7RTBvgOL79pWf3GIdnoupzMnoZVtY3G + Ux2G/0WkmLui2TCeDhcfXdQ4rcp8J3V/6ESj+yuEPPOG8UN17mUKKgujrch6ZvgC + DO9AyOK/uXu+ABQXTPsn2pVe2EVh3V004ShLi8GKgVdqb/rW/6GTg0Xb/zLT6WWM + uT++6sXTlztJdQYkRamJvKfQDU1naC8mAkGf79Tba0xyBGAUII0GfREY6t4/+NAP + 2Yyb3xNlBqcJoTov0JfNKHZcCZePr79j7LK/hkZxxip+Na9xDpE+oQRV+DRukCRJ + diqg+wIDAQABo1AwTjAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBTDEsXJe6pjAQD4 + ULlB7+GMDBlimTAfBgNVHSMEGDAWgBTDEsXJe6pjAQD4ULlB7+GMDBlimTANBgkq + hkiG9w0BAQUFAAOCAQEAWWkNcW6S1tKKqtzJsdfhjJiAAPQmOXJskv0ta/8f6Acg + cum1YieNdtT0n96P7CUHOWP8QBb91JzeewR7b6WJLwb1Offs3wNq3kk75pJe89r4 + XY39EZHhMW+Dv0PhIKu2CgD4LeyH1FVTQkF/QObGEmkn+s+HTsuzd1l2VLwcP1Sm + sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH + YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq + 3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ== + + + + + + + + + + + + MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV + BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN + MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS + b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArKYUtJaM5PH5917S + G2ACc7iBYdQO2HYyu8Gb6i9Q2Gxc3cWEX7RTBvgOL79pWf3GIdnoupzMnoZVtY3G + Ux2G/0WkmLui2TCeDhcfXdQ4rcp8J3V/6ESj+yuEPPOG8UN17mUKKgujrch6ZvgC + DO9AyOK/uXu+ABQXTPsn2pVe2EVh3V004ShLi8GKgVdqb/rW/6GTg0Xb/zLT6WWM + uT++6sXTlztJdQYkRamJvKfQDU1naC8mAkGf79Tba0xyBGAUII0GfREY6t4/+NAP + 2Yyb3xNlBqcJoTov0JfNKHZcCZePr79j7LK/hkZxxip+Na9xDpE+oQRV+DRukCRJ + diqg+wIDAQABo1AwTjAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBTDEsXJe6pjAQD4 + ULlB7+GMDBlimTAfBgNVHSMEGDAWgBTDEsXJe6pjAQD4ULlB7+GMDBlimTANBgkq + hkiG9w0BAQUFAAOCAQEAWWkNcW6S1tKKqtzJsdfhjJiAAPQmOXJskv0ta/8f6Acg + cum1YieNdtT0n96P7CUHOWP8QBb91JzeewR7b6WJLwb1Offs3wNq3kk75pJe89r4 + XY39EZHhMW+Dv0PhIKu2CgD4LeyH1FVTQkF/QObGEmkn+s+HTsuzd1l2VLwcP1Sm + sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH + YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq + 3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ== + + + + + + + + + + + + + + + + + + MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV + BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN + MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS + b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArKYUtJaM5PH5917S + G2ACc7iBYdQO2HYyu8Gb6i9Q2Gxc3cWEX7RTBvgOL79pWf3GIdnoupzMnoZVtY3G + Ux2G/0WkmLui2TCeDhcfXdQ4rcp8J3V/6ESj+yuEPPOG8UN17mUKKgujrch6ZvgC + DO9AyOK/uXu+ABQXTPsn2pVe2EVh3V004ShLi8GKgVdqb/rW/6GTg0Xb/zLT6WWM + uT++6sXTlztJdQYkRamJvKfQDU1naC8mAkGf79Tba0xyBGAUII0GfREY6t4/+NAP + 2Yyb3xNlBqcJoTov0JfNKHZcCZePr79j7LK/hkZxxip+Na9xDpE+oQRV+DRukCRJ + diqg+wIDAQABo1AwTjAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBTDEsXJe6pjAQD4 + ULlB7+GMDBlimTAfBgNVHSMEGDAWgBTDEsXJe6pjAQD4ULlB7+GMDBlimTANBgkq + hkiG9w0BAQUFAAOCAQEAWWkNcW6S1tKKqtzJsdfhjJiAAPQmOXJskv0ta/8f6Acg + cum1YieNdtT0n96P7CUHOWP8QBb91JzeewR7b6WJLwb1Offs3wNq3kk75pJe89r4 + XY39EZHhMW+Dv0PhIKu2CgD4LeyH1FVTQkF/QObGEmkn+s+HTsuzd1l2VLwcP1Sm + sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH + YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq + 3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ== + + + MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV + BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN + MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS + b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArKYUtJaM5PH5917S + G2ACc7iBYdQO2HYyu8Gb6i9Q2Gxc3cWEX7RTBvgOL79pWf3GIdnoupzMnoZVtY3G + Ux2G/0WkmLui2TCeDhcfXdQ4rcp8J3V/6ESj+yuEPPOG8UN17mUKKgujrch6ZvgC + DO9AyOK/uXu+ABQXTPsn2pVe2EVh3V004ShLi8GKgVdqb/rW/6GTg0Xb/zLT6WWM + uT++6sXTlztJdQYkRamJvKfQDU1naC8mAkGf79Tba0xyBGAUII0GfREY6t4/+NAP + 2Yyb3xNlBqcJoTov0JfNKHZcCZePr79j7LK/hkZxxip+Na9xDpE+oQRV+DRukCRJ + diqg+wIDAQABo1AwTjAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBTDEsXJe6pjAQD4 + ULlB7+GMDBlimTAfBgNVHSMEGDAWgBTDEsXJe6pjAQD4ULlB7+GMDBlimTANBgkq + hkiG9w0BAQUFAAOCAQEAWWkNcW6S1tKKqtzJsdfhjJiAAPQmOXJskv0ta/8f6Acg + cum1YieNdtT0n96P7CUHOWP8QBb91JzeewR7b6WJLwb1Offs3wNq3kk75pJe89r4 + XY39EZHhMW+Dv0PhIKu2CgD4LeyH1FVTQkF/QObGEmkn+s+HTsuzd1l2VLwcP1Sm + sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH + YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq + 3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ== + + + + + + + + + + + + MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV + BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN + MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS + b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArKYUtJaM5PH5917S + G2ACc7iBYdQO2HYyu8Gb6i9Q2Gxc3cWEX7RTBvgOL79pWf3GIdnoupzMnoZVtY3G + Ux2G/0WkmLui2TCeDhcfXdQ4rcp8J3V/6ESj+yuEPPOG8UN17mUKKgujrch6ZvgC + DO9AyOK/uXu+ABQXTPsn2pVe2EVh3V004ShLi8GKgVdqb/rW/6GTg0Xb/zLT6WWM + uT++6sXTlztJdQYkRamJvKfQDU1naC8mAkGf79Tba0xyBGAUII0GfREY6t4/+NAP + 2Yyb3xNlBqcJoTov0JfNKHZcCZePr79j7LK/hkZxxip+Na9xDpE+oQRV+DRukCRJ + diqg+wIDAQABo1AwTjAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBTDEsXJe6pjAQD4 + ULlB7+GMDBlimTAfBgNVHSMEGDAWgBTDEsXJe6pjAQD4ULlB7+GMDBlimTANBgkq + hkiG9w0BAQUFAAOCAQEAWWkNcW6S1tKKqtzJsdfhjJiAAPQmOXJskv0ta/8f6Acg + cum1YieNdtT0n96P7CUHOWP8QBb91JzeewR7b6WJLwb1Offs3wNq3kk75pJe89r4 + XY39EZHhMW+Dv0PhIKu2CgD4LeyH1FVTQkF/QObGEmkn+s+HTsuzd1l2VLwcP1Sm + sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH + YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq + 3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ== + + + MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV + BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN + MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS + b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArKYUtJaM5PH5917S + G2ACc7iBYdQO2HYyu8Gb6i9Q2Gxc3cWEX7RTBvgOL79pWf3GIdnoupzMnoZVtY3G + Ux2G/0WkmLui2TCeDhcfXdQ4rcp8J3V/6ESj+yuEPPOG8UN17mUKKgujrch6ZvgC + DO9AyOK/uXu+ABQXTPsn2pVe2EVh3V004ShLi8GKgVdqb/rW/6GTg0Xb/zLT6WWM + uT++6sXTlztJdQYkRamJvKfQDU1naC8mAkGf79Tba0xyBGAUII0GfREY6t4/+NAP + 2Yyb3xNlBqcJoTov0JfNKHZcCZePr79j7LK/hkZxxip+Na9xDpE+oQRV+DRukCRJ + diqg+wIDAQABo1AwTjAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBTDEsXJe6pjAQD4 + ULlB7+GMDBlimTAfBgNVHSMEGDAWgBTDEsXJe6pjAQD4ULlB7+GMDBlimTANBgkq + hkiG9w0BAQUFAAOCAQEAWWkNcW6S1tKKqtzJsdfhjJiAAPQmOXJskv0ta/8f6Acg + cum1YieNdtT0n96P7CUHOWP8QBb91JzeewR7b6WJLwb1Offs3wNq3kk75pJe89r4 + XY39EZHhMW+Dv0PhIKu2CgD4LeyH1FVTQkF/QObGEmkn+s+HTsuzd1l2VLwcP1Sm + sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH + YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq + 3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ== + + + + + + + + + + + + + + + + MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV + BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN + MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS + b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArKYUtJaM5PH5917S + G2ACc7iBYdQO2HYyu8Gb6i9Q2Gxc3cWEX7RTBvgOL79pWf3GIdnoupzMnoZVtY3G + Ux2G/0WkmLui2TCeDhcfXdQ4rcp8J3V/6ESj+yuEPPOG8UN17mUKKgujrch6ZvgC + DO9AyOK/uXu+ABQXTPsn2pVe2EVh3V004ShLi8GKgVdqb/rW/6GTg0Xb/zLT6WWM + uT++6sXTlztJdQYkRamJvKfQDU1naC8mAkGf79Tba0xyBGAUII0GfREY6t4/+NAP + 2Yyb3xNlBqcJoTov0JfNKHZcCZePr79j7LK/hkZxxip+Na9xDpE+oQRV+DRukCRJ + diqg+wIDAQABo1AwTjAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBTDEsXJe6pjAQD4 + ULlB7+GMDBlimTAfBgNVHSMEGDAWgBTDEsXJe6pjAQD4ULlB7+GMDBlimTANBgkq + hkiG9w0BAQUFAAOCAQEAWWkNcW6S1tKKqtzJsdfhjJiAAPQmOXJskv0ta/8f6Acg + cum1YieNdtT0n96P7CUHOWP8QBb91JzeewR7b6WJLwb1Offs3wNq3kk75pJe89r4 + XY39EZHhMW+Dv0PhIKu2CgD4LeyH1FVTQkF/QObGEmkn+s+HTsuzd1l2VLwcP1Sm + sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH + YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq + 3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ== + + + MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV + BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN + MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS + b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArKYUtJaM5PH5917S + G2ACc7iBYdQO2HYyu8Gb6i9Q2Gxc3cWEX7RTBvgOL79pWf3GIdnoupzMnoZVtY3G + Ux2G/0WkmLui2TCeDhcfXdQ4rcp8J3V/6ESj+yuEPPOG8UN17mUKKgujrch6ZvgC + DO9AyOK/uXu+ABQXTPsn2pVe2EVh3V004ShLi8GKgVdqb/rW/6GTg0Xb/zLT6WWM + uT++6sXTlztJdQYkRamJvKfQDU1naC8mAkGf79Tba0xyBGAUII0GfREY6t4/+NAP + 2Yyb3xNlBqcJoTov0JfNKHZcCZePr79j7LK/hkZxxip+Na9xDpE+oQRV+DRukCRJ + diqg+wIDAQABo1AwTjAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBTDEsXJe6pjAQD4 + ULlB7+GMDBlimTAfBgNVHSMEGDAWgBTDEsXJe6pjAQD4ULlB7+GMDBlimTANBgkq + hkiG9w0BAQUFAAOCAQEAWWkNcW6S1tKKqtzJsdfhjJiAAPQmOXJskv0ta/8f6Acg + cum1YieNdtT0n96P7CUHOWP8QBb91JzeewR7b6WJLwb1Offs3wNq3kk75pJe89r4 + XY39EZHhMW+Dv0PhIKu2CgD4LeyH1FVTQkF/QObGEmkn+s+HTsuzd1l2VLwcP1Sm + sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH + YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq + 3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ== + + + + + + + + + + + + MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV + BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN + MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS + b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArKYUtJaM5PH5917S + G2ACc7iBYdQO2HYyu8Gb6i9Q2Gxc3cWEX7RTBvgOL79pWf3GIdnoupzMnoZVtY3G + Ux2G/0WkmLui2TCeDhcfXdQ4rcp8J3V/6ESj+yuEPPOG8UN17mUKKgujrch6ZvgC + DO9AyOK/uXu+ABQXTPsn2pVe2EVh3V004ShLi8GKgVdqb/rW/6GTg0Xb/zLT6WWM + uT++6sXTlztJdQYkRamJvKfQDU1naC8mAkGf79Tba0xyBGAUII0GfREY6t4/+NAP + 2Yyb3xNlBqcJoTov0JfNKHZcCZePr79j7LK/hkZxxip+Na9xDpE+oQRV+DRukCRJ + diqg+wIDAQABo1AwTjAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBTDEsXJe6pjAQD4 + ULlB7+GMDBlimTAfBgNVHSMEGDAWgBTDEsXJe6pjAQD4ULlB7+GMDBlimTANBgkq + hkiG9w0BAQUFAAOCAQEAWWkNcW6S1tKKqtzJsdfhjJiAAPQmOXJskv0ta/8f6Acg + cum1YieNdtT0n96P7CUHOWP8QBb91JzeewR7b6WJLwb1Offs3wNq3kk75pJe89r4 + XY39EZHhMW+Dv0PhIKu2CgD4LeyH1FVTQkF/QObGEmkn+s+HTsuzd1l2VLwcP1Sm + sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH + YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq + 3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ== + + + MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV + BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN + MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS + b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArKYUtJaM5PH5917S + G2ACc7iBYdQO2HYyu8Gb6i9Q2Gxc3cWEX7RTBvgOL79pWf3GIdnoupzMnoZVtY3G + Ux2G/0WkmLui2TCeDhcfXdQ4rcp8J3V/6ESj+yuEPPOG8UN17mUKKgujrch6ZvgC + DO9AyOK/uXu+ABQXTPsn2pVe2EVh3V004ShLi8GKgVdqb/rW/6GTg0Xb/zLT6WWM + uT++6sXTlztJdQYkRamJvKfQDU1naC8mAkGf79Tba0xyBGAUII0GfREY6t4/+NAP + 2Yyb3xNlBqcJoTov0JfNKHZcCZePr79j7LK/hkZxxip+Na9xDpE+oQRV+DRukCRJ + diqg+wIDAQABo1AwTjAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBTDEsXJe6pjAQD4 + ULlB7+GMDBlimTAfBgNVHSMEGDAWgBTDEsXJe6pjAQD4ULlB7+GMDBlimTANBgkq + hkiG9w0BAQUFAAOCAQEAWWkNcW6S1tKKqtzJsdfhjJiAAPQmOXJskv0ta/8f6Acg + cum1YieNdtT0n96P7CUHOWP8QBb91JzeewR7b6WJLwb1Offs3wNq3kk75pJe89r4 + XY39EZHhMW+Dv0PhIKu2CgD4LeyH1FVTQkF/QObGEmkn+s+HTsuzd1l2VLwcP1Sm + sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH + YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq + 3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ== + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MIIBrjCBlwIBATANBgkqhkiG9w0BAQsFADAzMTEwLwYDVQQDEygwQzQzMDRCNjVDNTlBRDk3 + QUNFRDMzNDY2REVFRDdDMTU0OEM4MTZEFw0wOTA5MjgyMDUxNDlaFw0wOTA5MjgyMTUxNDla + oDAwLjAfBgNVHSMEGDAWgBQMQwS2XFmtl6ztM0Zt7tfBVIyBbTALBgNVHRQEBAICAWkwDQYJ + KoZIhvcNAQELBQADggEBAIRT1nriWsvZO9QtLKEiO7SZE3YZqMqDXS1auGBxEZtcLyF93ct6 + dstbiWHXjlHLztgePXh970BsTYhiVbKKnZvS0pjI8a9vr9b2Dyia3QG8ArV0zXVKhAGku5v+ + RG9d+f/VH0GMQ8ITcjJOKOaK0afv1KmQsydAb8KLKAGhCs7zeuztQG2k3YP6BE2OOPYK9fYk + EGHz0kPGp/oD/PJJfmPlKD4Uk4mSvM6e5ksgKg0BnxoU9RMkWjAeGVxk0F+SDG5sPmCsVOgB + fBk4i7H945v/zs7bLLMJxTs8+ao4iCDuknjbGhjWmi9xrTXDtcCXx607rPDkJQcJE2WnRS/U + HIA= + + + MIIHBQYJKoZIhvcNAQcCoIIG9jCCBvICAQMxDTALBglghkgBZQMEAgEwggEfBgsqhkiG9w0B + CRABGqCCAQ4EggEKMIIBBgICAWoYDzIwMDkwOTI4MjA1MTQ5WhgPMjAwOTA5MjgyMTUxNDla + BglghkgBZQMEAgEwgdIwRBYfREVNRXRseFpyWmVzN1ROR2JlN1h3VlNNZ1cwLmNybAMhAPgd + nO/fVdSWmPrnxJAf4JXrf0J/dHv9en+Tsqrz4WjcMEQWH2xkdnhjSEdkcjNvS0hjUGotZ3Vr + bWV0TlJaMC5yb2EDIQAxseZlGDtfsvDOBv1X2ElR8k/V78ynwSBGM22F5DYXUTBEFh94b3BO + R2NzQl9wN2VhZllxWGF0bVZWOEhaZDAucm9hAyEAKIKdRZhS1tawepRzVXtdP1imh6zPymWp + dCjYJUDqzY2gggQLMIIEBzCCAu+gAwIBAgIBATANBgkqhkiG9w0BAQsFADAzMTEwLwYDVQQD + EygwQzQzMDRCNjVDNTlBRDk3QUNFRDMzNDY2REVFRDdDMTU0OEM4MTZEMB4XDTA5MDkxODIx + NDE1NFoXDTEwMDkxMzExMDcwOVowMzExMC8GA1UEAxMoMEQ3MjU5REEyNEY5OTRFNTVCN0E2 + NkQxMDBEOUE5REJFMURGODIwNzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKwo + COG8kQMKuAVMOe5eTlasUpFQ2OT2VNmJtJ7NPQ1rLm1ahVUcTXUn7p+ikmJYpwFRACZ6htgF + n51RL43lm/pITTSVc1A6fofkrnoNImwMG8Pj8Z46H6kbJOM69NW5asjvA5DfSu73cltGHPRg + DQqt1k/3+aWqPWiYS7OGbQdDYGmy3T5VNlc+DBzyAM2VxNrLNF5Imv1NbfLw0Bp/gvayeApe + AjhjraCP7ZQxyXesLbBZrjQz1MXpi4DOZtY8gYwaMNgeU56jR9tpM5IDY5zSPHKZyJVvLQnT + iQfMKasHYMcFDtDrRH7t+YQlmt40uby0YsIIcv5FWJf1OBHnyYcCAwEAAaOCASQwggEgMB0G + A1UdDgQWBBQNclnaJPmU5Vt6ZtEA2anb4d+CBzAfBgNVHSMEGDAWgBQMQwS2XFmtl6ztM0Zt + 7tfBVIyBbTBaBgNVHR8EUzBRME+gTaBLhklyc3luYzovL2FyaW4ucnBraS5uZXQvYXJpbi9p + c2MvSVNDLTk0LVovMi9ERU1FdGx4WnJaZXM3VE5HYmU3WHdWU01nVzAuY3JsMFgGCCsGAQUF + BwEBBEwwSjBIBggrBgEFBQcwAoY8cnN5bmM6Ly9hcmluLnJwa2kubmV0L2FyaW4vMS9ERU1F + dGx4WnJaZXM3VE5HYmU3WHdWU01nVzAuY2VyMBgGA1UdIAEB/wQOMAwwCgYIKwYBBQUHDgIw + DgYDVR0PAQH/BAQDAgeAMA0GCSqGSIb3DQEBCwUAA4IBAQCZtr0XdjKRitItHi7UfUx6hTp2 + BOwoaNoOCEKw+dRMDYEgfRKbWSRd3gyVR3F1DV41aT1vDm56+WoN2Td1WEi2H0q22f1iIKuS + m0MkOpdqVZGOYHLTErv22XzDf7ifdGo3RkW7QOQ3D1n6Qraft5AB3aHskCofvPx3CBGFHKWh + N5HXnh+J/Bly2EwxPYs4yibx6K8stnxwIwsmo7DvjdPwv+VnrmIb7pxOpvqHjEQEs7Wy9Y47 + NP3Ym2YLwbIqAuN6F9kF7DeCanBt0HeFqMsOowz11ll1xBAwcpz/bxVwyAwWUoJNncoJCrjz + n2gPGYKqW80qgQwL8vBLFtBevZbyMYIBqjCCAaYCAQOAFA1yWdok+ZTlW3pm0QDZqdvh34IH + MAsGCWCGSAFlAwQCAaBrMBoGCSqGSIb3DQEJAzENBgsqhkiG9w0BCRABGjAcBgkqhkiG9w0B + CQUxDxcNMDkwOTI4MjA1MTQ5WjAvBgkqhkiG9w0BCQQxIgQgYA8+0xE+taAr6cM6tEAt4Wh6 + BWT8Xu76a6YSZt9hb2kwDQYJKoZIhvcNAQEBBQAEggEAmD/WNppqwMtpQw+RkqIbcg3HT7fg + RRK+ehJfcyCqP/t7vUu65cAcz02gbT3LHZDkqtGD9WjgjoxSVNrYiS+4TEQbt0AXrSQFqr9F + ud2eujpeGpx56VVYgE/Jef9MfiYXSjWj9oveEWR1OdRFVCn6TW6+t1n6OMTNhnDxYt9t4NZV + OCK95aHm9vi7d8CMZfPnZMQuXiNmHberYkxLu5LZJ84C2GqGbyBllkFp2KUGKoWgMyeKkk0q + yML8lQJAFAyjnXJ+doGbqfTUpVH4q4drqRb73WbL0zf/Z2HGwhDlTmsAdjparWdQcfXIVrJF + ynS1fab9XZfj+VtBFKjooDjaLw== + + + MIIGnQYJKoZIhvcNAQcCoIIGjjCCBooCAQMxDTALBglghkgBZQMEAgEwMQYLKoZIhvcNAQkQ + ARigIgQgMB4CAg3lMBgwFgQCAAEwEDAGAwQAwAUEMAYDBADABQWgggSTMIIEjzCCA3egAwIB + AgIBAjANBgkqhkiG9w0BAQsFADAzMTEwLwYDVQQDEygwQzQzMDRCNjVDNTlBRDk3QUNFRDMz + NDY2REVFRDdDMTU0OEM4MTZEMB4XDTA5MDkxODIyNTkzM1oXDTEwMDkxMzExMDcwOVowMzEx + MC8GA1UEAxMoOTVEQkYxNzA3MTlEQUY3QTBBMURDM0UzRkEwQkE0OTlFQjRENDU5RDCCASIw + DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALpn7TNbq1aYIa9fQG660Rz3dUfpx/dZEOJc + /PD5sxLSBCgcMJKGUb7RGajyI4pbIwVax1w+A4Ie38YjSl6p95FdwBMDX2w6OwePdLwDC+0R + zCf2p/F4Go79glYssEKjFGYvUDwm8SwJ3dr8XdlgdjbT4zIrMZj9SVOgreeNGOU+jcn8HvX3 + 94/0w49JDzPwKmHzGSlBTunzQ4pYZyZ2R+rDKuTJqRslEdD5KOFjNV2s4owWkhJzCermBj2u + CFExS+0Fc0In9Q3X5PcTMp2L1Gj4sdgc2Kbc8VTWU7kgF5M/15HThgRy+Ldx/b05w22mJV7L + 6yMkNGfRpn4CxLFD0U8CAwEAAaOCAawwggGoMB0GA1UdDgQWBBSV2/FwcZ2vegodw+P6C6SZ + 601FnTAfBgNVHSMEGDAWgBQMQwS2XFmtl6ztM0Zt7tfBVIyBbTBaBgNVHR8EUzBRME+gTaBL + hklyc3luYzovL2FyaW4ucnBraS5uZXQvYXJpbi9pc2MvSVNDLTk0LVovMi9ERU1FdGx4WnJa + ZXM3VE5HYmU3WHdWU01nVzAuY3JsMFgGCCsGAQUFBwEBBEwwSjBIBggrBgEFBQcwAoY8cnN5 + bmM6Ly9hcmluLnJwa2kubmV0L2FyaW4vMS9ERU1FdGx4WnJaZXM3VE5HYmU3WHdWU01nVzAu + Y2VyMBgGA1UdIAEB/wQOMAwwCgYIKwYBBQUHDgIwDgYDVR0PAQH/BAQDAgeAMGUGCCsGAQUF + BwELBFkwVzBVBggrBgEFBQcwC4ZJcnN5bmM6Ly9hcmluLnJwa2kubmV0L2FyaW4vaXNjL0lT + Qy05NC1aLzIvbGR2eGNIR2RyM29LSGNQai1ndWttZXROUlowLnJvYTAfBggrBgEFBQcBBwEB + /wQQMA4wDAQCAAEwBgMEAcAFBDANBgkqhkiG9w0BAQsFAAOCAQEAIjdpXxwxe9sK9RkqzvDP + to3zdDhpLf29XqKKkNhux2pXXeMcRR5vNN13mguuxaO/uQtrFTBgk8EJn7CfhmIoZpZClcAS + cvIyYSBbc/VspOdmPH2DnQGFjBk/qpEUs3W3Us5/w6x2BnjuUtRBj5eQQ5kydtHTy/URSX7i + K76ngiTsDL6e77UVu8KY+EutZU3b2HH73qfeP8L4HJ2rpm5tnHZEECcAHS20bhqTqyuECcuK + FBhQA2Ye8LtVg/CbZixZNqb5bfcCj72HzsZAKC57gzu/ROJ43wINcwgCkYUeWM8eoFJoYCaQ + z1avg/vDGBrZtkNQQJt9mXoxaJF25YEuRDGCAaowggGmAgEDgBSV2/FwcZ2vegodw+P6C6SZ + 601FnTALBglghkgBZQMEAgGgazAaBgkqhkiG9w0BCQMxDQYLKoZIhvcNAQkQARgwHAYJKoZI + hvcNAQkFMQ8XDTA5MDkxODIyNTkzM1owLwYJKoZIhvcNAQkEMSIEIEU7rNCYuAgPtZckEMP4 + MMUl4hMwvLJ4KWHgg1fZNkJMMA0GCSqGSIb3DQEBAQUABIIBAA6fqD9/VisrRFIqRbwFpG/B + fkmnZGPequD7JPgJR/O/7ofUe1yunPugdPoDe+bTrEaUfyj6xAcdXXwR2fKHF8HyCPMclqCB + aQNZH/nHnawrwOXem8qwnKRyn7hOXyKPxar4VIVg90JFttgaM/l9W++PV02KQS8GlFRymvpg + Eca4THQ5/VWe/3V5dAOEGFUl0/WAjYId+jYzF9oHKSeZTqWmpvDaX4Pc+xkydw18kQBsovnv + +N931gu2r5I/XB/MGgGvXNWozK7RuMn55i5hMqI2NQs+/b7/AQU0+/i3g7SlLA8iZwHq49U2 + ZXRCjLXcy0tQOWVsMnGfReN8oNDhHbc= + + + MIIGoQYJKoZIhvcNAQcCoIIGkjCCBo4CAQMxDTALBglghkgBZQMEAgEwMAYLKoZIhvcNAQkQ + ARigIQQfMB0CAgUAMBcwFQQCAAEwDzAFAwMAlRQwBgMEA8yYuKCCBJgwggSUMIIDfKADAgEC + AgEDMA0GCSqGSIb3DQEBCwUAMDMxMTAvBgNVBAMTKDBDNDMwNEI2NUM1OUFEOTdBQ0VEMzM0 + NjZERUVEN0MxNTQ4QzgxNkQwHhcNMDkwOTE4MjI1OTU1WhcNMTAwOTEzMTEwNzA5WjAzMTEw + LwYDVQQDEyhDNjhBNEQxOUNCMDFGRTlFREU2OUY2MkE1REFCNjY1NTVGMDc2NUREMIIBIjAN + BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2z9OLSVMT632SBjbKcwfnZtkIOeaFG8oRZKe + u6q7E3krOxXuK2Yxafz5d2+FJBBFHWSmtKuHpeR2rkUWOZlgIWny6u9hb7vzt2wvuEXjBI7H + Bn3sNgGOIgHyuWjUxWyy6gr1K4x437XaMUeMx7xy/82DSgqyK0298EoiPlg8wQau38WFx+FZ + cu2Bnf3prc2l3oSRKNPAE7l4P6DKnjy3VPQT6xCt5PEscVDFzkMeJXrGe48GwJzV0ZbUQHeZ + /eMAsWyZIp5K4kciU6A552ImLjim64HXaviyHiv0rHAUImBoK5AbnpH1yOZ93StqD0iFEJMF + HubclLRuJRFomX05DwIDAQABo4IBsTCCAa0wHQYDVR0OBBYEFMaKTRnLAf6e3mn2Kl2rZlVf + B2XdMB8GA1UdIwQYMBaAFAxDBLZcWa2XrO0zRm3u18FUjIFtMFoGA1UdHwRTMFEwT6BNoEuG + SXJzeW5jOi8vYXJpbi5ycGtpLm5ldC9hcmluL2lzYy9JU0MtOTQtWi8yL0RFTUV0bHhaclpl + czdUTkdiZTdYd1ZTTWdXMC5jcmwwWAYIKwYBBQUHAQEETDBKMEgGCCsGAQUFBzAChjxyc3lu + YzovL2FyaW4ucnBraS5uZXQvYXJpbi8xL0RFTUV0bHhaclplczdUTkdiZTdYd1ZTTWdXMC5j + ZXIwGAYDVR0gAQH/BA4wDDAKBggrBgEFBQcOAjAOBgNVHQ8BAf8EBAMCB4AwZQYIKwYBBQUH + AQsEWTBXMFUGCCsGAQUFBzALhklyc3luYzovL2FyaW4ucnBraS5uZXQvYXJpbi9pc2MvSVND + LTk0LVovMi94b3BOR2NzQl9wN2VhZllxWGF0bVZWOEhaZDAucm9hMCQGCCsGAQUFBwEHAQH/ + BBUwEzARBAIAATALAwMAlRQDBAPMmLgwDQYJKoZIhvcNAQELBQADggEBAMmzrOxl/SA7uEHR + 4D5jCMNFZaKkh9Shf2Uqg+JpD88BPVLdBDHmG7CmFSI42puZk76SIrXLjyaUv3kP4wKNXOug + c3/80bynPgT+25kTeJc5T4Th735fzJZantqfG+uBQmC2Rk5mihTAL1wweIBFBYcmjAWSmuo9 + N84XWOikQnkPLAsiX75mT1E2BZB5te6UruWHRtlMggNvE72zrZBYAhk+bCC5HdkAhwA3dah5 + SsMSOoGgniFoWlLq3COV+ga1OkJgYVRQHXGP7Fjh7YCU2yUygKaf5Yniqh1ijbjJvVz419QY + ZflO9//gP3IM5ClbnWR4RhzZFKJ4DGz+lDmHIugxggGqMIIBpgIBA4AUxopNGcsB/p7eafYq + XatmVV8HZd0wCwYJYIZIAWUDBAIBoGswGgYJKoZIhvcNAQkDMQ0GCyqGSIb3DQEJEAEYMBwG + CSqGSIb3DQEJBTEPFw0wOTA5MTgyMjU5NTVaMC8GCSqGSIb3DQEJBDEiBCC4ptBgQZ1Ktxau + h1foPe9MJiB8XZJ21ynmZ7BPTWLQVTANBgkqhkiG9w0BAQEFAASCAQBlAxAGN2Tcvi8tF5qk + ECahrFZn0qvOw9tQRKNwwC5SRUQWQcd6Pi7g0znLVS0Y5iOZB7QvHiuPXvVAR7cxwjRlEZy2 + kmERAbrq7ROweJjb9L5JsacRSWUfG7JQjdqMSGLOf3gqlidBnDrKlNIWfyGntpZZFmIGKo9X + 5U8PWrCGkb+2AZT/tpt0eMGRhdgGX0n987dEhUbU7k9dZZXA7ou/g1MSL2HHfH17mL9rQqzN + UwHopIkNlG0ljGy7xI2wjjcvUCDi0Ns/asqxlz6icHgXhrhLyZy3JlcjG7/v2dm0MdZLFg4m + FN/5lE6Ayt2VEDfVNRfMzD6ezxb8PZc2astn + + + + + + + + text string + + + + + + + diff --git a/ca/tests/myrpki-xml-parse-test.py b/ca/tests/myrpki-xml-parse-test.py new file mode 100644 index 00000000..10b9cd58 --- /dev/null +++ b/ca/tests/myrpki-xml-parse-test.py @@ -0,0 +1,101 @@ +# $Id$ +# +# Copyright (C) 2009--2012 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +Test parser and display tool for myrpki.xml files. +""" + +import lxml.etree, rpki.resource_set, base64, subprocess + +relaxng = lxml.etree.RelaxNG(file = "myrpki.rng") + +tree = lxml.etree.parse("myrpki.xml").getroot() + +if False: + print lxml.etree.tostring(tree, pretty_print = True, encoding = "us-ascii", xml_declaration = True) + +relaxng.assertValid(tree) + +def showitems(y): + if False: + for k, v in y.items(): + if v: + print " ", k, v + +def tag(t): + return "{http://www.hactrn.net/uris/rpki/myrpki/}" + t + +print "My handle:", tree.get("handle") + +print "Children:" +for x in tree.getiterator(tag("child")): + print " ", x + print " Handle:", x.get("handle") + print " ASNS: ", rpki.resource_set.resource_set_as(x.get("asns")) + print " IPv4: ", rpki.resource_set.resource_set_ipv4(x.get("v4")) + print " Valid: ", x.get("valid_until") + showitems(x) +print + +print "ROA requests:" +for x in tree.getiterator(tag("roa_request")): + print " ", x + print " ASN: ", x.get("asn") + print " IPv4:", rpki.resource_set.roa_prefix_set_ipv4(x.get("v4")) + print " IPv6:", rpki.resource_set.roa_prefix_set_ipv6(x.get("v6")) + showitems(x) +print + +def showpem(label, b64, kind): + cmd = ("openssl", kind, "-noout", "-text", "-inform", "DER") + if kind == "x509": + cmd += ("-certopt", "no_pubkey,no_sigdump") + p = subprocess.Popen(cmd, stdin = subprocess.PIPE, stdout = subprocess.PIPE) + text = p.communicate(input = base64.b64decode(b64))[0] + if p.returncode != 0: + raise subprocess.CalledProcessError(returncode = p.returncode, cmd = cmd) + print label, text + +for x in tree.getiterator(tag("child")): + cert = x.findtext(tag("bpki_certificate")) + if cert: + showpem("Child", cert, "x509") + +for x in tree.getiterator(tag("parent")): + print "Parent URI:", x.get("service_uri") + cert = x.findtext(tag("bpki_certificate")) + if cert: + showpem("Parent", cert, "x509") + +ca = tree.findtext(tag("bpki_ca_certificate")) +if ca: + showpem("CA", ca, "x509") + +bsc = tree.findtext(tag("bpki_bsc_certificate")) +if bsc: + showpem("BSC EE", bsc, "x509") + +repo = tree.findtext(tag("bpki_repository_certificate")) +if repo: + showpem("Repository", repo, "x509") + +req = tree.findtext(tag("bpki_bsc_pkcs10")) +if req: + showpem("BSC EE", req, "req") + +crl = tree.findtext(tag("bpki_crl")) +if crl: + showpem("CA", crl, "crl") diff --git a/ca/tests/old_irdbd.py b/ca/tests/old_irdbd.py new file mode 100644 index 00000000..d258e4c0 --- /dev/null +++ b/ca/tests/old_irdbd.py @@ -0,0 +1,19 @@ +# $Id$ +# +# Copyright (C) 2010-2012 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +if __name__ == "__main__": + import rpki.old_irdbd + rpki.old_irdbd.main() diff --git a/ca/tests/old_irdbd.sql b/ca/tests/old_irdbd.sql new file mode 100644 index 00000000..e773bb2e --- /dev/null +++ b/ca/tests/old_irdbd.sql @@ -0,0 +1,143 @@ +-- $Id$ + +-- Copyright (C) 2009--2011 Internet Systems Consortium ("ISC") +-- +-- Permission to use, copy, modify, and distribute this software for any +-- purpose with or without fee is hereby granted, provided that the above +-- copyright notice and this permission notice appear in all copies. +-- +-- THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +-- REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +-- AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +-- INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +-- LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +-- OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +-- PERFORMANCE OF THIS SOFTWARE. + +-- Copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") +-- +-- Permission to use, copy, modify, and distribute this software for any +-- purpose with or without fee is hereby granted, provided that the above +-- copyright notice and this permission notice appear in all copies. +-- +-- THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH +-- REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +-- AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, +-- INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +-- LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +-- OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +-- PERFORMANCE OF THIS SOFTWARE. + +-- SQL objects needed by irdbd.py. You only need this if you're using +-- irdbd.py as your IRDB; if you have a "real" backend you can do +-- anything you like so long as you implement the relevant portion of +-- the left-right protocol. + +-- DROP TABLE commands must be in correct (reverse dependency) order +-- to satisfy FOREIGN KEY constraints. + +DROP TABLE IF EXISTS roa_request_prefix; +DROP TABLE IF EXISTS roa_request; +DROP TABLE IF EXISTS registrant_net; +DROP TABLE IF EXISTS registrant_asn; +DROP TABLE IF EXISTS registrant; +DROP TABLE IF EXISTS ghostbuster_request; +DROP TABLE IF EXISTS ee_certificate_asn; +DROP TABLE IF EXISTS ee_certificate_net; +DROP TABLE IF EXISTS ee_certificate; + +CREATE TABLE registrant ( + registrant_id SERIAL NOT NULL, + registrant_handle VARCHAR(255) NOT NULL, + registrant_name TEXT, + registry_handle VARCHAR(255), + valid_until DATETIME NOT NULL, + PRIMARY KEY (registrant_id), + UNIQUE (registry_handle, registrant_handle) +) ENGINE=InnoDB; + +CREATE TABLE registrant_asn ( + start_as BIGINT UNSIGNED NOT NULL, + end_as BIGINT UNSIGNED NOT NULL, + registrant_id BIGINT UNSIGNED NOT NULL, + PRIMARY KEY (registrant_id, start_as, end_as), + CONSTRAINT registrant_asn_registrant_id + FOREIGN KEY (registrant_id) REFERENCES registrant (registrant_id) + ON DELETE CASCADE ON UPDATE CASCADE +) ENGINE=InnoDB; + +CREATE TABLE registrant_net ( + start_ip VARCHAR(40) NOT NULL, + end_ip VARCHAR(40) NOT NULL, + version TINYINT UNSIGNED NOT NULL, + registrant_id BIGINT UNSIGNED NOT NULL, + PRIMARY KEY (registrant_id, version, start_ip, end_ip), + CONSTRAINT registrant_net_registrant_id + FOREIGN KEY (registrant_id) REFERENCES registrant (registrant_id) + ON DELETE CASCADE ON UPDATE CASCADE +) ENGINE=InnoDB; + +CREATE TABLE roa_request ( + roa_request_id SERIAL NOT NULL, + self_handle VARCHAR(255) NOT NULL, + asn BIGINT UNSIGNED NOT NULL, + PRIMARY KEY (roa_request_id) +) ENGINE=InnoDB; + +CREATE TABLE roa_request_prefix ( + prefix VARCHAR(40) NOT NULL, + prefixlen TINYINT UNSIGNED NOT NULL, + max_prefixlen TINYINT UNSIGNED NOT NULL, + version TINYINT UNSIGNED NOT NULL, + roa_request_id BIGINT UNSIGNED NOT NULL, + PRIMARY KEY (roa_request_id, prefix, prefixlen, max_prefixlen), + CONSTRAINT roa_request_prefix_roa_request_id + FOREIGN KEY (roa_request_id) REFERENCES roa_request (roa_request_id) + ON DELETE CASCADE ON UPDATE CASCADE +) ENGINE=InnoDB; + +CREATE TABLE ghostbuster_request ( + ghostbuster_request_id SERIAL NOT NULL, + self_handle VARCHAR(255) NOT NULL, + parent_handle VARCHAR(255), + vcard LONGBLOB NOT NULL, + PRIMARY KEY (ghostbuster_request_id) +) ENGINE=InnoDB; + +CREATE TABLE ee_certificate ( + ee_certificate_id SERIAL NOT NULL, + self_handle VARCHAR(255) NOT NULL, + pkcs10 LONGBLOB NOT NULL, + gski VARCHAR(27) NOT NULL, + cn VARCHAR(64) NOT NULL, + sn VARCHAR(64), + eku TEXT NOT NULL, + valid_until DATETIME NOT NULL, + PRIMARY KEY (ee_certificate_id), + UNIQUE (self_handle, gski) +) ENGINE=InnoDB; + +CREATE TABLE ee_certificate_asn ( + start_as BIGINT UNSIGNED NOT NULL, + end_as BIGINT UNSIGNED NOT NULL, + ee_certificate_id BIGINT UNSIGNED NOT NULL, + PRIMARY KEY (ee_certificate_id, start_as, end_as), + CONSTRAINT ee_certificate_asn_ee_certificate_id + FOREIGN KEY (ee_certificate_id) REFERENCES ee_certificate (ee_certificate_id) + ON DELETE CASCADE ON UPDATE CASCADE +) ENGINE=InnoDB; + +CREATE TABLE ee_certificate_net ( + version TINYINT UNSIGNED NOT NULL, + start_ip VARCHAR(40) NOT NULL, + end_ip VARCHAR(40) NOT NULL, + ee_certificate_id BIGINT UNSIGNED NOT NULL, + PRIMARY KEY (ee_certificate_id, version, start_ip, end_ip), + CONSTRAINT ee_certificate_net_ee_certificate_id + FOREIGN KEY (ee_certificate_id) REFERENCES ee_certificate (ee_certificate_id) + ON DELETE CASCADE ON UPDATE CASCADE +) ENGINE=InnoDB; + +-- Local Variables: +-- indent-tabs-mode: nil +-- End: diff --git a/ca/tests/publication-protocol-samples.xml b/ca/tests/publication-protocol-samples.xml new file mode 100644 index 00000000..96b095a7 --- /dev/null +++ b/ca/tests/publication-protocol-samples.xml @@ -0,0 +1,370 @@ + + + + + + + + MIIBezBlAgEBMA0GCSqGSIb3DQEBCwUAMCMxITAfBgNVBAMTGFRlc3QgQ2VydGlm + aWNhdGUgcHViZCBUQRcNMDgwNjAyMjE0OTQ1WhcNMDgwNzAyMjE0OTQ1WqAOMAww + CgYDVR0UBAMCAQEwDQYJKoZIhvcNAQELBQADggEBAFWCWgBl4ljVqX/CHo+RpqYt + vmKMnjPVflMXUB7i28RGP4DAq4l7deDU7Q82xEJyE4TXMWDWAV6UG6uUGum0VHWO + cj9ohqyiZUGfOsKg2hbwkETm8sAENOsi1yNdyKGk6jZ16aF5fubxQqZa1pdGCSac + 1/ZYC5sLLhEz3kmz+B9z9mXFVc5TgAh4dN3Gy5ftF8zZAFpDGnS4biCnRVqhGv6R + 0Lh/5xmii+ZU6kNDhbeMsjJg+ZOmtN+wMeHSIbjiy0WuuaZ3k2xSh0C94anrHBZA + vvCRhbazjR0Ef5OMZ5lcllw3uO8IHuoisHKkehy4Y0GySdj98fV+OuiRTH9vt/M= + + + + + + + + + + + + + + + + MIIBezBlAgEBMA0GCSqGSIb3DQEBCwUAMCMxITAfBgNVBAMTGFRlc3QgQ2VydGlm + aWNhdGUgcHViZCBUQRcNMDgwNjAyMjE0OTQ1WhcNMDgwNzAyMjE0OTQ1WqAOMAww + CgYDVR0UBAMCAQEwDQYJKoZIhvcNAQELBQADggEBAFWCWgBl4ljVqX/CHo+RpqYt + vmKMnjPVflMXUB7i28RGP4DAq4l7deDU7Q82xEJyE4TXMWDWAV6UG6uUGum0VHWO + cj9ohqyiZUGfOsKg2hbwkETm8sAENOsi1yNdyKGk6jZ16aF5fubxQqZa1pdGCSac + 1/ZYC5sLLhEz3kmz+B9z9mXFVc5TgAh4dN3Gy5ftF8zZAFpDGnS4biCnRVqhGv6R + 0Lh/5xmii+ZU6kNDhbeMsjJg+ZOmtN+wMeHSIbjiy0WuuaZ3k2xSh0C94anrHBZA + vvCRhbazjR0Ef5OMZ5lcllw3uO8IHuoisHKkehy4Y0GySdj98fV+OuiRTH9vt/M= + + + + + + + + + + MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV + BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN + MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS + b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArKYUtJaM5PH5917S + G2ACc7iBYdQO2HYyu8Gb6i9Q2Gxc3cWEX7RTBvgOL79pWf3GIdnoupzMnoZVtY3G + Ux2G/0WkmLui2TCeDhcfXdQ4rcp8J3V/6ESj+yuEPPOG8UN17mUKKgujrch6ZvgC + DO9AyOK/uXu+ABQXTPsn2pVe2EVh3V004ShLi8GKgVdqb/rW/6GTg0Xb/zLT6WWM + uT++6sXTlztJdQYkRamJvKfQDU1naC8mAkGf79Tba0xyBGAUII0GfREY6t4/+NAP + 2Yyb3xNlBqcJoTov0JfNKHZcCZePr79j7LK/hkZxxip+Na9xDpE+oQRV+DRukCRJ + diqg+wIDAQABo1AwTjAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBTDEsXJe6pjAQD4 + ULlB7+GMDBlimTAfBgNVHSMEGDAWgBTDEsXJe6pjAQD4ULlB7+GMDBlimTANBgkq + hkiG9w0BAQUFAAOCAQEAWWkNcW6S1tKKqtzJsdfhjJiAAPQmOXJskv0ta/8f6Acg + cum1YieNdtT0n96P7CUHOWP8QBb91JzeewR7b6WJLwb1Offs3wNq3kk75pJe89r4 + XY39EZHhMW+Dv0PhIKu2CgD4LeyH1FVTQkF/QObGEmkn+s+HTsuzd1l2VLwcP1Sm + sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH + YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq + 3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ== + + + + + + + + + + + + MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV + BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN + MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS + b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArKYUtJaM5PH5917S + G2ACc7iBYdQO2HYyu8Gb6i9Q2Gxc3cWEX7RTBvgOL79pWf3GIdnoupzMnoZVtY3G + Ux2G/0WkmLui2TCeDhcfXdQ4rcp8J3V/6ESj+yuEPPOG8UN17mUKKgujrch6ZvgC + DO9AyOK/uXu+ABQXTPsn2pVe2EVh3V004ShLi8GKgVdqb/rW/6GTg0Xb/zLT6WWM + uT++6sXTlztJdQYkRamJvKfQDU1naC8mAkGf79Tba0xyBGAUII0GfREY6t4/+NAP + 2Yyb3xNlBqcJoTov0JfNKHZcCZePr79j7LK/hkZxxip+Na9xDpE+oQRV+DRukCRJ + diqg+wIDAQABo1AwTjAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBTDEsXJe6pjAQD4 + ULlB7+GMDBlimTAfBgNVHSMEGDAWgBTDEsXJe6pjAQD4ULlB7+GMDBlimTANBgkq + hkiG9w0BAQUFAAOCAQEAWWkNcW6S1tKKqtzJsdfhjJiAAPQmOXJskv0ta/8f6Acg + cum1YieNdtT0n96P7CUHOWP8QBb91JzeewR7b6WJLwb1Offs3wNq3kk75pJe89r4 + XY39EZHhMW+Dv0PhIKu2CgD4LeyH1FVTQkF/QObGEmkn+s+HTsuzd1l2VLwcP1Sm + sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH + YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq + 3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ== + + + + + + + + + + + + + + + + MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV + BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN + MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS + b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArKYUtJaM5PH5917S + G2ACc7iBYdQO2HYyu8Gb6i9Q2Gxc3cWEX7RTBvgOL79pWf3GIdnoupzMnoZVtY3G + Ux2G/0WkmLui2TCeDhcfXdQ4rcp8J3V/6ESj+yuEPPOG8UN17mUKKgujrch6ZvgC + DO9AyOK/uXu+ABQXTPsn2pVe2EVh3V004ShLi8GKgVdqb/rW/6GTg0Xb/zLT6WWM + uT++6sXTlztJdQYkRamJvKfQDU1naC8mAkGf79Tba0xyBGAUII0GfREY6t4/+NAP + 2Yyb3xNlBqcJoTov0JfNKHZcCZePr79j7LK/hkZxxip+Na9xDpE+oQRV+DRukCRJ + diqg+wIDAQABo1AwTjAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBTDEsXJe6pjAQD4 + ULlB7+GMDBlimTAfBgNVHSMEGDAWgBTDEsXJe6pjAQD4ULlB7+GMDBlimTANBgkq + hkiG9w0BAQUFAAOCAQEAWWkNcW6S1tKKqtzJsdfhjJiAAPQmOXJskv0ta/8f6Acg + cum1YieNdtT0n96P7CUHOWP8QBb91JzeewR7b6WJLwb1Offs3wNq3kk75pJe89r4 + XY39EZHhMW+Dv0PhIKu2CgD4LeyH1FVTQkF/QObGEmkn+s+HTsuzd1l2VLwcP1Sm + sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH + YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq + 3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ== + + + + + + + + + + + + MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV + BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN + MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS + b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArKYUtJaM5PH5917S + G2ACc7iBYdQO2HYyu8Gb6i9Q2Gxc3cWEX7RTBvgOL79pWf3GIdnoupzMnoZVtY3G + Ux2G/0WkmLui2TCeDhcfXdQ4rcp8J3V/6ESj+yuEPPOG8UN17mUKKgujrch6ZvgC + DO9AyOK/uXu+ABQXTPsn2pVe2EVh3V004ShLi8GKgVdqb/rW/6GTg0Xb/zLT6WWM + uT++6sXTlztJdQYkRamJvKfQDU1naC8mAkGf79Tba0xyBGAUII0GfREY6t4/+NAP + 2Yyb3xNlBqcJoTov0JfNKHZcCZePr79j7LK/hkZxxip+Na9xDpE+oQRV+DRukCRJ + diqg+wIDAQABo1AwTjAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBTDEsXJe6pjAQD4 + ULlB7+GMDBlimTAfBgNVHSMEGDAWgBTDEsXJe6pjAQD4ULlB7+GMDBlimTANBgkq + hkiG9w0BAQUFAAOCAQEAWWkNcW6S1tKKqtzJsdfhjJiAAPQmOXJskv0ta/8f6Acg + cum1YieNdtT0n96P7CUHOWP8QBb91JzeewR7b6WJLwb1Offs3wNq3kk75pJe89r4 + XY39EZHhMW+Dv0PhIKu2CgD4LeyH1FVTQkF/QObGEmkn+s+HTsuzd1l2VLwcP1Sm + sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH + YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq + 3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ== + + + + + + + + + + + + + + + + + MIIE+jCCA+KgAwIBAgIBDTANBgkqhkiG9w0BAQsFADAzMTEwLwYDVQQDEyhERjRBODAxN0U2 + NkE5RTkxNzJFNDYxMkQ4Q0Y0QzgzRjIzOERFMkEzMB4XDTA4MDUyMjE4MDUxMloXDTA4MDUy + NDE3NTQ1M1owMzExMC8GA1UEAxMoOEZCODIxOEYwNkU1MEFCNzAyQTdEOTZEQzhGMENEQ0Q4 + MjhGN0YxNzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMeziKp0k5nP7v6SZoNs + XIMQYRgNtC6Fr/9Xm/1yQHomiPqHUk47rHhGojYiK5AhkrwoYhkH4UjJl2iwklDYczXuaBU3 + F5qrKlZ4aZnjIxdlP7+hktVpeApL6yuJTUAYeC3UIxnLDVdD6phydZ/FOQluffiNDjzteCCv + oyOUatqt8WB+oND6LToHp028g1YUYLHG6mur0dPdcHOVXLSmUDuZ1HDz1nDuYvIVKjB/MpH9 + aW9XeaQ6ZFIlZVPwuuvI2brR+ThH7Gv27GL/o8qFdC300VQfoTZ+rKPGDE8K1cI906BL4kiw + x9z0oiDcE96QCz+B0vsjc9mGaA1jgAxlXWsCAwEAAaOCAhcwggITMB0GA1UdDgQWBBSPuCGP + BuUKtwKn2W3I8M3Ngo9/FzAfBgNVHSMEGDAWgBTfSoAX5mqekXLkYS2M9Mg/I43iozBVBgNV + HR8ETjBMMEqgSKBGhkRyc3luYzovL2xvY2FsaG9zdDo0NDAwL3Rlc3RiZWQvUklSLzEvMzBx + QUYtWnFucEZ5NUdFdGpQVElQeU9ONHFNLmNybDBFBggrBgEFBQcBAQQ5MDcwNQYIKwYBBQUH + MAKGKXJzeW5jOi8vbG9jYWxob3N0OjQ0MDAvdGVzdGJlZC9XT01CQVQuY2VyMBgGA1UdIAEB + /wQOMAwwCgYIKwYBBQUHDgIwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwgZsG + CCsGAQUFBwELBIGOMIGLMDQGCCsGAQUFBzAFhihyc3luYzovL2xvY2FsaG9zdDo0NDAwL3Rl + c3RiZWQvUklSL1IwLzEvMFMGCCsGAQUFBzAKhkdyc3luYzovL2xvY2FsaG9zdDo0NDAwL3Rl + c3RiZWQvUklSL1IwLzEvajdnaGp3YmxDcmNDcDlsdHlQRE56WUtQZnhjLm1uZjAaBggrBgEF + BQcBCAEB/wQLMAmgBzAFAgMA/BUwPgYIKwYBBQUHAQcBAf8ELzAtMCsEAgABMCUDAwAKAzAO + AwUAwAACAQMFAcAAAiAwDgMFAsAAAiwDBQDAAAJkMA0GCSqGSIb3DQEBCwUAA4IBAQCEhuH7 + jtI2PJY6+zwv306vmCuXhtu9Lr2mmRw2ZErB8EMcb5xypMrNqMoKeu14K2x4a4RPJkK4yATh + M81FPNRsU5mM0acIRnAPtxjHvPME7PHN2w2nGLASRsZmaa+b8A7SSOxVcFURazENztppsolH + eTpm0cpLItK7mNpudUg1JGuFo94VLf1MnE2EqARG1vTsNhel/SM/UvOArCCOBvf0Gz7kSuup + DSZ7qx+LiDmtEsLdbGNQBiYPbLrDk41PHrxdx28qIj7ejZkRzNFw/3pi8/XK281h8zeHoFVu + 6ghRPy5dbOA4akX/KG6b8XIx0iwPYdLiDbdWFbtTdPcXBauY + + + + + + + + + + + + + + + + + + + + MIIBwzCBrAIBATANBgkqhkiG9w0BAQsFADAzMTEwLwYDVQQDEyhERjRBODAxN0U2NkE5RTkx + NzJFNDYxMkQ4Q0Y0QzgzRjIzOERFMkEzFw0wODA1MjIxODA0MTZaFw0wODA1MjIxODA1MTZa + MBQwEgIBAhcNMDgwNTIyMTc1ODQwWqAvMC0wHwYDVR0jBBgwFoAU30qAF+ZqnpFy5GEtjPTI + PyON4qMwCgYDVR0UBAMCAQYwDQYJKoZIhvcNAQELBQADggEBAKkM0Fb/pJpHVHWZyjp4wojH + W2KkvA/DFtBiz3moxocSnkDVP3QI19uVvqdC6nH3hJyFmsAMwULR0f1XU/V4j+X+FqYEl6Nv + p8zAEPIB4r8xbEFs7udRwXRAjkJmOQbv9aomF2i+d7jpTFVJxShZWOgsoGEhIy/aktKQrOIR + c4ZDrXpQwXVj2Y7+cGVfQ4gvnPOdlyLcnNovoegazATvA3EcidBNPWRg7XTCz0LVBEB7JgPd + nNyXRg35HdMEHBl7U9uUQJXP7S02oaQ1ehNDMfaJPgBBpQtAnM1lIzJfevd9+e4ywGsRpxAV + 8wxTXSPd1jwuKtS0kwrgsrQ8Ya85xUE= + + + + + + + + + + + + + + + + + + + + MIIHCgYJKoZIhvcNAQcCoIIG+zCCBvcCAQMxDTALBglghkgBZQMEAgEwggEeBgsqhkiG9w0B + CRABGqCCAQ0EggEJMIIBBQIBEhgPMjAwODA1MjIxODA1MTVaGA8yMDA4MDUyMjE4MDYxNVoG + CWCGSAFlAwQCATCB0jBEFh9ZbTVUTzRJYnlDb0pNZ3E2R2o4dG41Mng5U0UuY2VyAyEA4L8Z + WMyuhOx+o6kUfsRR++QjSaRaATy4UOeVtjvZVqYwRBYfWnRxbjB3NEVFbU9hclAzQmd1SUY3 + MDhhNTM4LmNlcgMhAGQI1gYJotxWmwzcmpLNFZJ656uWOjcPYANlbNz80xm8MEQWH2xxa1Vx + RHEwMDBESW9ZVjlybXdLTGdrN2F6by5jZXIDIQB7jRAEpkPvc4s4PX9vDvnTifj3BIE145FO + 1ne2kEejVqCCBBEwggQNMIIC9aADAgECAgEFMA0GCSqGSIb3DQEBCwUAMDMxMTAvBgNVBAMT + KDhGQjgyMThGMDZFNTBBQjcwMkE3RDk2REM4RjBDRENEODI4RjdGMTcwHhcNMDgwNTIyMTc1 + NzQ5WhcNMDgwNTI0MTc1NDUzWjAzMTEwLwYDVQQDEyhERkRBMjMyMUJENEVCMDNFQTE1RkUy + N0NGRkRGMEFGRkU1QjBFNjY4MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2/Gk + AHW5pDqye0+TvUp7sl0rVgmTfeHpVp18ypxvuovogVJgkjEtBEikfaFU0646wYD6JM6IJFJX + lWLWd7bVmhkWViKuZL0VmT2wpUToNHCLUGUQUVVX8R7oSHFdTArv2AqH+6yt0LmczDH1y2M6 + 2Tgkz9wZ9ryyuPx3VX4PkHzUMlkGFICj1fvyXkcAu8jBaxR9UME1c413TPaMi6lMh1HUmtVN + LJMP5+/SnwEAW/Z3dPClCFIgQXK3nAKPVzAIwADEiqhK7cSchhO7ikI1CVt0XzG4n7oaILc3 + Hq/DAxyiutw5GlkUlKPri2YJzJ3+H4P+TveSa/b02fVA5csm/QIDAQABo4IBKjCCASYwHQYD + VR0OBBYEFN/aIyG9TrA+oV/ifP/fCv/lsOZoMB8GA1UdIwQYMBaAFI+4IY8G5Qq3AqfZbcjw + zc2Cj38XMFgGA1UdHwRRME8wTaBLoEmGR3JzeW5jOi8vbG9jYWxob3N0OjQ0MDAvdGVzdGJl + ZC9SSVIvUjAvMS9qN2doandibENyY0NwOWx0eVBETnpZS1BmeGMuY3JsMGAGCCsGAQUFBwEB + BFQwUjBQBggrBgEFBQcwAoZEcnN5bmM6Ly9sb2NhbGhvc3Q6NDQwMC90ZXN0YmVkL1JJUi8x + L2o3Z2hqd2JsQ3JjQ3A5bHR5UEROellLUGZ4Yy5jZXIwGAYDVR0gAQH/BA4wDDAKBggrBgEF + BQcOAjAOBgNVHQ8BAf8EBAMCB4AwDQYJKoZIhvcNAQELBQADggEBADpsE9HfgVTgmX1WeJTE + fm87CXuOoGH85RFiAngSt5kR4gYCyadklOZ7Eta+ERUZVu4tcKO6sJOTuHPfVrAvR0VpgH+j + PvXboYWSfwJdi00BC28ScrVM2zarA7B10+J6Oq8tbFlAyVBkrbuPet/axmndBtGWhrBTynGl + nc/5L371Lxy6CrOYqXO0Qx3SrOKaailAe3zTIpHQeACqnPdL00zIBw/hVy/VNaH1wy+FmhAz + TsmsQUrMyovJcu/ry5w0KHlP8BTnqfykikCWR+Lw0VQHmpJGAbtrmsOeIbfLY1zl7A81lDAl + AG/ZH1DUdDOUIXMLHWur+D2rwjp7RL16LHYxggGqMIIBpgIBA4AU39ojIb1OsD6hX+J8/98K + /+Ww5mgwCwYJYIZIAWUDBAIBoGswGgYJKoZIhvcNAQkDMQ0GCyqGSIb3DQEJEAEaMBwGCSqG + SIb3DQEJBTEPFw0wODA1MjIxODA1MTVaMC8GCSqGSIb3DQEJBDEiBCBj/GjEQw3LgKPf5DTz + 8eu1fcp6/cQjqqne6ZqFkF42azANBgkqhkiG9w0BAQEFAASCAQBOY0uHNMwy/o1nFANSgha5 + PZxt8fz+wTrbeomCb+lxqQKq1clcSiQORVGc8NmqC8sS5OR3eTw/3qnK9yPHxz2UQ4hn1pBa + +Zy5veM61qMaXCw6w98EyNcvUfA1AkezAjkabfHQDs3o4Ezh49thXXyRcBoF+O6Lmi+LZbT2 + 4jvfFbaXW9zsb6/DaoDkeHnlk+YYgfSP4wOnkK5uqxtDW8QpMPq3GGdIp0oJDkzEdj7VsWIL + 9JP2mxxL8fTPVUyAPOmURYwYDXqhke2O9eVDiCYhrEfB8/84Rint4Cj8n5aCujnAtqtwxHpD + 0NRYO/V1MjhG+ARy1vRH1Dm0r92RBam3 + + + + + + + + + + + + + + + + + + + + MIIGmwYJKoZIhvcNAQcCoIIGjDCCBogCAQMxDTALBglghkgBZQMEAgEwKgYLKoZIhvcNAQkQ + ARigGwQZMBcCAgKaMBEwDwQCAAEwCTAHAwUACgMALKCCBJgwggSUMIIDfKADAgECAgEJMA0G + CSqGSIb3DQEBCwUAMDMxMTAvBgNVBAMTKDhGQjgyMThGMDZFNTBBQjcwMkE3RDk2REM4RjBD + RENEODI4RjdGMTcwHhcNMDgwNTIyMTc1ODI0WhcNMDgwNTI0MTc1NDUzWjAzMTEwLwYDVQQD + Eyg5NkE5MTRBODNBQjREMzQwQzhBMTg1N0RBRTZDMEEyRTA5M0I2QjNBMIIBIjANBgkqhkiG + 9w0BAQEFAAOCAQ8AMIIBCgKCAQEApoK50BjW5bcF4gsdaYhndtVADZvQk3RCsvuqDElF6uLi + 9BYQq/NHyDOIMyJtvCmzjdv3Y135n1sNO7YvssqHlt7dMfCQTD5ND1GpFnQLdWP7stWM5AbO + nJV6+PtDITUA/QHOli7Do0YCUgR6G+1QJsMu0DK+TRSzBJ6WP7WIYOBOOg3y/NKc1rkWhS1Q + dcQepbHgQYZHzzpjNDR6+oYVuhuUEWx1P6O4pv/p+tpE0SDua7jBjMywIYHkPQBecf2IX1RU + WNojB9dJlnRx5YUUneP2SvF2MrmdDbclgzwhf6alqD2OjiMuoBOG8yeTKcuhzCMnrFAklbst + 6x3Rnq9BswIDAQABo4IBsTCCAa0wHQYDVR0OBBYEFJapFKg6tNNAyKGFfa5sCi4JO2s6MB8G + A1UdIwQYMBaAFI+4IY8G5Qq3AqfZbcjwzc2Cj38XMFgGA1UdHwRRME8wTaBLoEmGR3JzeW5j + Oi8vbG9jYWxob3N0OjQ0MDAvdGVzdGJlZC9SSVIvUjAvMS9qN2doandibENyY0NwOWx0eVBE + TnpZS1BmeGMuY3JsMGAGCCsGAQUFBwEBBFQwUjBQBggrBgEFBQcwAoZEcnN5bmM6Ly9sb2Nh + bGhvc3Q6NDQwMC90ZXN0YmVkL1JJUi8xL2o3Z2hqd2JsQ3JjQ3A5bHR5UEROellLUGZ4Yy5j + ZXIwGAYDVR0gAQH/BA4wDDAKBggrBgEFBQcOAjAOBgNVHQ8BAf8EBAMCB4AwYwYIKwYBBQUH + AQsEVzBVMFMGCCsGAQUFBzALhkdyc3luYzovL2xvY2FsaG9zdDo0NDAwL3Rlc3RiZWQvUklS + L1IwLzEvbHFrVXFEcTAwMERJb1lWOXJtd0tMZ2s3YXpvLnJvYTAgBggrBgEFBQcBBwEB/wQR + MA8wDQQCAAEwBwMFAAoDACwwDQYJKoZIhvcNAQELBQADggEBAL8iHwsyGOYhhIf3nVuL361y + TOJSP8SR0mtQLHULPl+GkYk+5MRNWtL8ucTXFvniYJtOCXEGGEIO9eDXvkQIXQSz/qbF9URQ + fuf38ghRza257syVhal6UHTgCFYuRIO9CUjcU1vkWUxH05BBIHlYdtlIQbAG/mRsCPCEgSmG + bbQaomGlUOqmJMlKxLLcoAtz2vDrwVotgHyfS5h2mgINFjnlLcNLTci+sfs7/aQAkDYx7K98 + se/ZlMorvGkFNhHoOTcGIrWkYsfkbTygVwWRm278PaB3o4449Kvsg/gb8BZeHXRs68cr5Mcf + jP7Q6jeypjTgDBnwb1yzoJIKWszFuSgxggGqMIIBpgIBA4AUlqkUqDq000DIoYV9rmwKLgk7 + azowCwYJYIZIAWUDBAIBoGswGgYJKoZIhvcNAQkDMQ0GCyqGSIb3DQEJEAEYMBwGCSqGSIb3 + DQEJBTEPFw0wODA1MjIxNzU4MjRaMC8GCSqGSIb3DQEJBDEiBCDCyf9v9Wed515TRp2WwnyM + 1rk6dB///X+aqIym2e9jdTANBgkqhkiG9w0BAQEFAASCAQAFvzrHeRPW+wn4WSyoyBEq0zKS + Cyh5tu1qTR0NHs6Rr/p8Pk81P1HQLND/U+znJZKLWlO2niEHUXPIicPDYchbj8ApH9VxKA+1 + lCWllOzFAsYyZFr3/VNs9pVp2eT4F9eEYBrBVDSNrD72MMTlWm1T5MEXqltTJJOCKzUEX96x + 91iW6A+4erop7S8hpCnxqkTin4bFVreqYcGc4CC4bh+L9pPqJnURcEk7Qeu/WEHQBm38voB4 + S11qRZNrJMQ99oiJR7hXDIBm66HjGqoUL2gPCfpgJEVVnM9pVv2k889z4eTTck2Qj54gga2W + Xkvw4Je420aDx88s9T2+PqXcbZ4g + + + + + + + + + + + + + + + + + + + text string + + + + + + + diff --git a/ca/tests/rcynic.conf b/ca/tests/rcynic.conf new file mode 100644 index 00000000..ea31fe58 --- /dev/null +++ b/ca/tests/rcynic.conf @@ -0,0 +1,14 @@ +# $Id$ +# +# rcynic configuration for looking at yamltest results. + +[rcynic] +xml-summary = rcynic.xml +jitter = 0 +use-links = yes +use-syslog = no +use-stderr = yes +log-level = log_debug +max-parallel-fetches = 32 + +trust-anchor-locator = yamltest.dir/root.tal diff --git a/ca/tests/revoke.yaml b/ca/tests/revoke.yaml new file mode 100644 index 00000000..2edb8335 --- /dev/null +++ b/ca/tests/revoke.yaml @@ -0,0 +1,420 @@ +# $Id: smoketest.1.yaml 3881 2011-06-17 18:32:54Z sra $ + +# Copyright (C) 2009-2012 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. +# +# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +name: RIR +crl_interval: 5m +regen_margin: 2m +valid_for: 2d +kids: + - name: R0 + kids: + - name: Alice + ipv4: 192.0.2.1-192.0.2.33 + asn: 64533 + roa_request: + - asn: 42 + ipv4: 192.0.2.32/32 + - name: Bob + ipv4: 192.0.2.44-192.0.2.100 + ipv4: 10.3.0.0/16 + roa_request: + - asn: 666 + ipv4: 10.3.0.44/32 + +--- +- shell sleep 1; + dir=rcynic.`date +%s`.data; mkdir $dir; + cd rcynic-data; + pax -rwl . ../$dir; find . -type f -name '*.cer' | + sort | + xargs ../../../../utils/uri/uri -s + >../${dir%.data}.uris; + sleep 1 +- name: R0 + rekey: +- sleep 10 + +--- +- shell sleep 1; + dir=rcynic.`date +%s`.data; mkdir $dir; + cd rcynic-data; + pax -rwl . ../$dir; find . -type f -name '*.cer' | + sort | + xargs ../../../../utils/uri/uri -s + >../${dir%.data}.uris; + sleep 1 + +- name: R0 + revoke: +- sleep 10 + +--- +- shell sleep 1; + dir=rcynic.`date +%s`.data; mkdir $dir; + cd rcynic-data; + pax -rwl . ../$dir; find . -type f -name '*.cer' | + sort | + xargs ../../../../utils/uri/uri -s + >../${dir%.data}.uris; + sleep 1 +- sleep 30 + +--- +- shell sleep 1; + dir=rcynic.`date +%s`.data; mkdir $dir; + cd rcynic-data; + pax -rwl . ../$dir; find . -type f -name '*.cer' | + sort | + xargs ../../../../utils/uri/uri -s + >../${dir%.data}.uris; + sleep 1 +- name: R0 + rekey: +- sleep 10 + +--- +- shell sleep 1; + dir=rcynic.`date +%s`.data; mkdir $dir; + cd rcynic-data; + pax -rwl . ../$dir; find . -type f -name '*.cer' | + sort | + xargs ../../../../utils/uri/uri -s + >../${dir%.data}.uris; + sleep 1 +- name: R0 + revoke: +- sleep 10 + +--- +- shell sleep 1; + dir=rcynic.`date +%s`.data; mkdir $dir; + cd rcynic-data; + pax -rwl . ../$dir; find . -type f -name '*.cer' | + sort | + xargs ../../../../utils/uri/uri -s + >../${dir%.data}.uris; + sleep 1 +- sleep 30 + +--- +- shell sleep 1; + dir=rcynic.`date +%s`.data; mkdir $dir; + cd rcynic-data; + pax -rwl . ../$dir; find . -type f -name '*.cer' | + sort | + xargs ../../../../utils/uri/uri -s + >../${dir%.data}.uris; + sleep 1 +- name: R0 + rekey: +- sleep 10 + +--- +- shell sleep 1; + dir=rcynic.`date +%s`.data; mkdir $dir; + cd rcynic-data; + pax -rwl . ../$dir; find . -type f -name '*.cer' | + sort | + xargs ../../../../utils/uri/uri -s + >../${dir%.data}.uris; + sleep 1 +- name: R0 + revoke: +- sleep 10 + +--- +- shell sleep 1; + dir=rcynic.`date +%s`.data; mkdir $dir; + cd rcynic-data; + pax -rwl . ../$dir; find . -type f -name '*.cer' | + sort | + xargs ../../../../utils/uri/uri -s + >../${dir%.data}.uris; + sleep 1 +- sleep 30 + +--- +- shell sleep 1; + dir=rcynic.`date +%s`.data; mkdir $dir; + cd rcynic-data; + pax -rwl . ../$dir; find . -type f -name '*.cer' | + sort | + xargs ../../../../utils/uri/uri -s + >../${dir%.data}.uris; + sleep 1 +- name: R0 + rekey: +- sleep 10 + +--- +- shell sleep 1; + dir=rcynic.`date +%s`.data; mkdir $dir; + cd rcynic-data; + pax -rwl . ../$dir; find . -type f -name '*.cer' | + sort | + xargs ../../../../utils/uri/uri -s + >../${dir%.data}.uris; + sleep 1 +- name: R0 + revoke: +- sleep 10 + +--- +- shell sleep 1; + dir=rcynic.`date +%s`.data; mkdir $dir; + cd rcynic-data; + pax -rwl . ../$dir; find . -type f -name '*.cer' | + sort | + xargs ../../../../utils/uri/uri -s + >../${dir%.data}.uris; + sleep 1 +- sleep 30 + +--- +- shell sleep 1; + dir=rcynic.`date +%s`.data; mkdir $dir; + cd rcynic-data; + pax -rwl . ../$dir; find . -type f -name '*.cer' | + sort | + xargs ../../../../utils/uri/uri -s + >../${dir%.data}.uris; + sleep 1 +- name: R0 + rekey: +- sleep 10 + +--- +- shell sleep 1; + dir=rcynic.`date +%s`.data; mkdir $dir; + cd rcynic-data; + pax -rwl . ../$dir; find . -type f -name '*.cer' | + sort | + xargs ../../../../utils/uri/uri -s + >../${dir%.data}.uris; + sleep 1 +- name: R0 + revoke: +- sleep 10 + +--- +- shell sleep 1; + dir=rcynic.`date +%s`.data; mkdir $dir; + cd rcynic-data; + pax -rwl . ../$dir; find . -type f -name '*.cer' | + sort | + xargs ../../../../utils/uri/uri -s + >../${dir%.data}.uris; + sleep 1 +- sleep 30 + +--- +- shell sleep 1; + dir=rcynic.`date +%s`.data; mkdir $dir; + cd rcynic-data; + pax -rwl . ../$dir; find . -type f -name '*.cer' | + sort | + xargs ../../../../utils/uri/uri -s + >../${dir%.data}.uris; + sleep 1 +- name: R0 + rekey: +- sleep 10 + +--- +- shell sleep 1; + dir=rcynic.`date +%s`.data; mkdir $dir; + cd rcynic-data; + pax -rwl . ../$dir; find . -type f -name '*.cer' | + sort | + xargs ../../../../utils/uri/uri -s + >../${dir%.data}.uris; + sleep 1 +- name: R0 + revoke: +- sleep 10 + +--- +- shell sleep 1; + dir=rcynic.`date +%s`.data; mkdir $dir; + cd rcynic-data; + pax -rwl . ../$dir; find . -type f -name '*.cer' | + sort | + xargs ../../../../utils/uri/uri -s + >../${dir%.data}.uris; + sleep 1 +- sleep 30 + +--- +- shell sleep 1; + dir=rcynic.`date +%s`.data; mkdir $dir; + cd rcynic-data; + pax -rwl . ../$dir; find . -type f -name '*.cer' | + sort | + xargs ../../../../utils/uri/uri -s + >../${dir%.data}.uris; + sleep 1 +- name: R0 + rekey: +- sleep 10 + +--- +- shell sleep 1; + dir=rcynic.`date +%s`.data; mkdir $dir; + cd rcynic-data; + pax -rwl . ../$dir; find . -type f -name '*.cer' | + sort | + xargs ../../../../utils/uri/uri -s + >../${dir%.data}.uris; + sleep 1 +- name: R0 + revoke: +- sleep 10 + +--- +- shell sleep 1; + dir=rcynic.`date +%s`.data; mkdir $dir; + cd rcynic-data; + pax -rwl . ../$dir; find . -type f -name '*.cer' | + sort | + xargs ../../../../utils/uri/uri -s + >../${dir%.data}.uris; + sleep 1 +- sleep 30 + +--- +- shell sleep 1; + dir=rcynic.`date +%s`.data; mkdir $dir; + cd rcynic-data; + pax -rwl . ../$dir; find . -type f -name '*.cer' | + sort | + xargs ../../../../utils/uri/uri -s + >../${dir%.data}.uris; + sleep 1 +- name: R0 + rekey: +- sleep 10 + +--- +- shell sleep 1; + dir=rcynic.`date +%s`.data; mkdir $dir; + cd rcynic-data; + pax -rwl . ../$dir; find . -type f -name '*.cer' | + sort | + xargs ../../../../utils/uri/uri -s + >../${dir%.data}.uris; + sleep 1 +- name: R0 + revoke: +- sleep 10 + +--- +- shell sleep 1; + dir=rcynic.`date +%s`.data; mkdir $dir; + cd rcynic-data; + pax -rwl . ../$dir; find . -type f -name '*.cer' | + sort | + xargs ../../../../utils/uri/uri -s + >../${dir%.data}.uris; + sleep 1 +- sleep 30 + +--- +- shell sleep 1; + dir=rcynic.`date +%s`.data; mkdir $dir; + cd rcynic-data; + pax -rwl . ../$dir; find . -type f -name '*.cer' | + sort | + xargs ../../../../utils/uri/uri -s + >../${dir%.data}.uris; + sleep 1 +- name: R0 + rekey: +- sleep 10 + +--- +- shell sleep 1; + dir=rcynic.`date +%s`.data; mkdir $dir; + cd rcynic-data; + pax -rwl . ../$dir; find . -type f -name '*.cer' | + sort | + xargs ../../../../utils/uri/uri -s + >../${dir%.data}.uris; + sleep 1 +- name: R0 + revoke: +- sleep 10 + +--- +- shell sleep 1; + dir=rcynic.`date +%s`.data; mkdir $dir; + cd rcynic-data; + pax -rwl . ../$dir; find . -type f -name '*.cer' | + sort | + xargs ../../../../utils/uri/uri -s + >../${dir%.data}.uris; + sleep 1 +- sleep 30 + +--- +- shell sleep 1; + dir=rcynic.`date +%s`.data; mkdir $dir; + cd rcynic-data; + pax -rwl . ../$dir; find . -type f -name '*.cer' | + sort | + xargs ../../../../utils/uri/uri -s + >../${dir%.data}.uris; + sleep 1 +- name: R0 + rekey: +- sleep 10 + +--- +- shell sleep 1; + dir=rcynic.`date +%s`.data; mkdir $dir; + cd rcynic-data; + pax -rwl . ../$dir; find . -type f -name '*.cer' | + sort | + xargs ../../../../utils/uri/uri -s + >../${dir%.data}.uris; + sleep 1 +- name: R0 + revoke: +- sleep 10 + +--- +- shell sleep 1; + dir=rcynic.`date +%s`.data; mkdir $dir; + cd rcynic-data; + pax -rwl . ../$dir; find . -type f -name '*.cer' | + sort | + xargs ../../../../utils/uri/uri -s + >../${dir%.data}.uris; + sleep 1 +- sleep 30 diff --git a/ca/tests/rootd.yaml b/ca/tests/rootd.yaml new file mode 100644 index 00000000..2ee5dcd4 --- /dev/null +++ b/ca/tests/rootd.yaml @@ -0,0 +1,24 @@ +# $Id$ +--- +version: 1 +posturl: https://localhost:4401/up-down/1 +recipient-id: "rootd" +sender-id: "RIR" + +cms-cert-file: RIR-RPKI-EE.cer +cms-key-file: RIR-RPKI-EE.key +cms-ca-cert-file: rootd-TA.cer +cms-cert-chain-file: [ RIR-RPKI-CA.cer ] + +ssl-cert-file: RIR-RPKI-EE.cer +ssl-key-file: RIR-RPKI-EE.key +ssl-ca-cert-file: rootd-TA.cer + +requests: + list: + type: list + issue: + type: issue + class: 1 + sia: + - rsync://localhost:4400/testbed/RIR/ diff --git a/ca/tests/rpki b/ca/tests/rpki new file mode 120000 index 00000000..8d289d0b --- /dev/null +++ b/ca/tests/rpki @@ -0,0 +1 @@ +../rpki \ No newline at end of file diff --git a/ca/tests/smoketest.1.yaml b/ca/tests/smoketest.1.yaml new file mode 100644 index 00000000..914aaae4 --- /dev/null +++ b/ca/tests/smoketest.1.yaml @@ -0,0 +1,89 @@ +# $Id$ +# +# Copyright (C) 2013--2014 Dragon Research Labs ("DRL") +# Portions copyright (C) 2009--2012 Internet Systems Consortium ("ISC") +# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notices and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND ARIN DISCLAIM ALL +# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL, +# ISC, OR ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, +# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION +# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +name: RIR +crl_interval: 5m +regen_margin: 2m +valid_for: 2d +kids: + - name: R0 + ghostbuster: | + BEGIN:VCARD + VERSION:4.0 + FN:R0 + ORG:Organizational Entity + ADR;TYPE=WORK:;;42 Twisty Passage;Deep Cavern;WA;98666;U.S.A. + TEL;TYPE=VOICE,TEXT,WORK;VALUE=uri:tel:+1-666-555-1212 + TEL;TYPE=FAX,WORK;VALUE=uri:tel:+1-666-555-1213 + EMAIL:human@example.com + END:VCARD + kids: + - name: Alice + ipv4: 192.0.2.1-192.0.2.33 + asn: 64533 + roa_request: + - asn: 42 + ipv4: 192.0.2.32/32 + router_cert: + - router_id: 666 + asn: 42 + - name: Bob + ipv4: 192.0.2.44-192.0.2.100 + ipv4: 10.3.0.0/16 + roa_request: + - asn: 666 + ipv4: 10.3.0.44/32 + +--- +- shell set -x; + rtr_origin='python ../../../rtr-origin/rtr-origin.py'; + $rtr_origin --cronjob rcynic-data/authenticated && + $rtr_origin --show +--- +- name: R0 + rekey: +--- +- name: R0 + revoke: +--- +- name: Alice + valid_add: 10 +- name: R0 + roa_request_add: + - asn: 17 + ipv4: 10.3.0.1/32, 10.0.0.44/32 +--- +- shell set -x; + rtr_origin='python ../../../rtr-origin/rtr-origin.py'; + $rtr_origin --cronjob rcynic-data/authenticated && + $rtr_origin --show +--- +- sleep 30 +--- +- sleep 30 +--- +- sleep 30 +--- +- sleep 30 +--- +- sleep 30 +--- +- sleep 30 +--- +- sleep 30 diff --git a/ca/tests/smoketest.2.yaml b/ca/tests/smoketest.2.yaml new file mode 100644 index 00000000..0cdec650 --- /dev/null +++ b/ca/tests/smoketest.2.yaml @@ -0,0 +1,126 @@ +# $Id$ + +# Copyright (C) 2009 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. +# +# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +name: RIR +valid_for: 2d +kids: + - name: R0 + kids: + - name: Alice + hosted_by: R0 + ipv4: 192.0.2.1-192.0.2.33 + asn: 64533 + - name: Bob + hosted_by: R0 + ipv4: 192.0.2.44-192.0.2.100 + - name: R1 + kids: + - name: Carol + hosted_by: R1 + ipv6: 2001:db8::44-2001:db8::100 + - name: Dave + hosted_by: R1 + ipv6: 2001:db8::10:0:44/128 + asn: 64544 + - name: R2 + kids: + - name: Elena + hosted_by: R2 + ipv4: 10.0.0.0/24 + ipv6: 2001:db8::a00:0/120 + - name: Frank + hosted_by: R2 + ipv4: 10.3.0.0/24 + ipv6: 2001:db8::a03:0/120 + - name: R3 + kids: + - name: Ginny + hosted_by: R3 + asn: 64534-64540 + - name: Harry + hosted_by: R3 + asn: 666-677 + - name: R4 + kids: + - name: Ilse + hosted_by: R4 + ipv4: 10.3.0.0/16 + - name: Jack + hosted_by: R4 + ipv4: 10.2.0.0/16 + - name: R5 + kids: + - name: Kari + hosted_by: R5 + asn: 222-233 + - name: Leon + hosted_by: R5 + asn: 244-255 + - name: R6 + kids: + - name: Mary + hosted_by: R6 + ipv4: 10.77.0.0/16 + - name: Neal + hosted_by: R6 + ipv4: 10.66.0.0/16 + - name: R7 + kids: + - name: Olga + hosted_by: R7 + ipv4: 10.88.0.0/16 + - name: Piet + hosted_by: R7 + ipv4: 10.99.0.0/16 + - name: R8 + kids: + - name: Qi + hosted_by: R8 + asn: 111-122 + - name: Rex + hosted_by: R8 + asn: 333-344 + - name: R9 + kids: + - name: Sandra + hosted_by: R9 + asn: 555-566 + - name: Thad + hosted_by: R9 + asn: 577-588 +--- +- name: Alice + add_as: 33 +--- +- name: Alice + sub_as: 33 +--- +- name: Alice + valid_for: 365d diff --git a/ca/tests/smoketest.3.yaml b/ca/tests/smoketest.3.yaml new file mode 100644 index 00000000..e6a10a12 --- /dev/null +++ b/ca/tests/smoketest.3.yaml @@ -0,0 +1,81 @@ +# $Id$ + +# Copyright (C) 2009-2010 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. +# +# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +name: RIR +crl_interval: 2d +regen_margin: 1h +valid_for: 90d +kids: + - name: R0 + kids: + - name: Alice + ipv4: 192.0.2.0-192.0.2.33 + ipv6: 2002:0a00::/32 + asn: 64533 + roa_request: + - asn: 42 + ipv4: 192.0.2.0/30-32,192.0.2.32/32 + ipv6: 2002:0a00::/32-128 + - name: Bob + ipv4: 192.0.2.44-192.0.2.100 + ipv4: 10.3.0.0/16 + roa_request: + - asn: 666 + ipv4: 10.3.0.0/23 +--- +#- shell find publication -type f -name '*.roa' +# -print -exec ../../../utils/print_roa/print_roa {} \; +#- shell find publication -type f -name '*.mft' +# -print -exec ../../../utils/print_manifest/print_manifest {} \; +#--- +#- shell find publication -type f -name '*.roa' +# -print -exec ../../../utils/print_roa/print_roa {} \; +#- shell find publication -type f -name '*.mft' +# -print -exec ../../../utils/print_manifest/print_manifest {} \; +#--- +- shell set -x; + rtr_origin=../../../rtr-origin/rtr-origin; + $rtr_origin --cronjob rcynic-data/authenticated && + $rtr_origin --show +--- +- name: Alice + roa_request_del: + - asn: 42 + ipv4: 192.0.2.0/30-32,192.0.2.32/32 + ipv6: 2002:0a00::/32-128 + roa_request_add: + - asn: 666 + ipv4: 192.0.2.0/30-32,192.0.2.32/32 + ipv6: 2002:0a00::/32-128 +--- +- shell set -x; + rtr_origin=../../../rtr-origin/rtr-origin; + $rtr_origin --cronjob rcynic-data/authenticated && + $rtr_origin --show diff --git a/ca/tests/smoketest.4.yaml b/ca/tests/smoketest.4.yaml new file mode 100644 index 00000000..c0d446bc --- /dev/null +++ b/ca/tests/smoketest.4.yaml @@ -0,0 +1,72 @@ +# $Id$ + +# Copyright (C) 2011 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. +# +# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +# This is a test of what happens when certificates go missing in +# action, IRDB data expires, etc. Expected result: +# +# - RIR, R0, and Alice get certs +# - Bob gets no cert at all +# - RO and Alice have short-lived certs, which go away +# - Test ends with only RIR having a cert +# +# If run on a very slow machine, the 60 second expiration may have +# already passed by the time everything is up and running, in which +# case nobody but RIR will ever get any certs. +# +# The extra cycles with no sleep are deliberate, at one point we had a +# cycle where parent would issue a cert that had already expired, +# which led to a tight loop of revocation and reissuance every cycle; +# we're checking to make sure that doesn't happen anymore, although +# things should never get to that point because list_response should +# discourage the child from ever asking for a cert in the first place. + +name: RIR +valid_for: 60 +kids: + - name: R0 + kids: + - name: Alice + ipv4: 192.0.2.1-192.0.2.33 + asn: 64533 + - name: Bob + ipv4: 192.0.2.34-192.0.2.65 + valid_for: -10 +--- +--- +--- +--- +--- +--- +- sleep 30 +--- +- sleep 30 +--- +--- +--- diff --git a/ca/tests/smoketest.5.yaml b/ca/tests/smoketest.5.yaml new file mode 100644 index 00000000..c6304dfc --- /dev/null +++ b/ca/tests/smoketest.5.yaml @@ -0,0 +1,65 @@ +# $Id$ + +# Copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +rootd: + lifetime: 2m30s +name: RIR +crl_interval: 1m30s +regen_margin: 2m +valid_for: 1h +kids: + - name: R0 + kids: + - name: Alice + ipv4: 192.0.2.1-192.0.2.33 + asn: 64533 + roa_request: + - asn: 42 + ipv4: 192.0.2.32/32 + - name: Bob + ipv4: 192.0.2.44-192.0.2.100 + ipv4: 10.3.0.0/16 + roa_request: + - asn: 666 + ipv4: 10.3.0.44/32 +--- +- sleep 30 +--- +- sleep 30 +--- +- sleep 30 +--- +- sleep 30 +--- +- sleep 30 +--- +- sleep 30 +--- +- sleep 30 +--- +- sleep 30 +--- +- sleep 30 +--- +- sleep 30 +--- +- sleep 30 +--- +- sleep 30 +--- +- sleep 30 +--- +- sleep 30 diff --git a/ca/tests/smoketest.6.yaml b/ca/tests/smoketest.6.yaml new file mode 100644 index 00000000..e8d65433 --- /dev/null +++ b/ca/tests/smoketest.6.yaml @@ -0,0 +1,81 @@ +# $Id$ + +# Copyright (C) 2009-2013 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +name: RIR +valid_for: 4w +kids: + - name: Alice + hosted_by: RIR + # + # To test immediate expiration + #valid_for: 5m + # + # To test what happens when we reach rgen_margin + #valid_for: 2w2h5m + #valid_for: 2w5m + # + kids: + - name: Betty + hosted_by: RIR + kids: + - name: Carol + hosted_by: RIR + ghostbuster: | + BEGIN:VCARD + VERSION:4.0 + FN:Carol Clever + EMAIL:carol@example.org + END:VCARD + kids: + - name: Dana + hosted_by: RIR + kids: + - name: Eve + hosted_by: RIR + kids: + - name: Fiona + hosted_by: RIR + ipv4: 192.0.2.1-192.0.2.33 + asn: 64533 + ipv6: 2001:db8::44-2001:db8::100 + roa_request: + - asn: 64533 + ipv6: 2001:db8::80/121 + ghostbusters: + - | + BEGIN:VCARD + VERSION:4.0 + FN:Fiona Fearless + EMAIL:fiona@example.org + END:VCARD + - | + BEGIN:VCARD + VERSION:4.0 + FN:Frank Fearless + EMAIL:frank@example.org + END:VCARD +--- +- name: Fiona + add_as: 33 +--- +- name: Fiona + sub_as: 33 +--- +--- +--- +--- +#- name: Fiona +# valid_for: 365d diff --git a/ca/tests/smoketest.7.yaml b/ca/tests/smoketest.7.yaml new file mode 100644 index 00000000..fedd2fff --- /dev/null +++ b/ca/tests/smoketest.7.yaml @@ -0,0 +1,77 @@ +# $Id$ + +# Copyright (C) 2009 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +# Test configuration to generate some initial ROAs for Pradosh to use +# in testing his code, until Randy has the live testbed set up. + +# Python code to remove overlaps in a set of prefixes (needed to take +# something like this peval output and feed it into smoketest.py): +# +# import rpki.resource_set +# +# r = rpki.resource_set.resource_set_ipv4() +# +# for p in prefixes: +# r = r.union(rpki.resource_set.resource_set_ipv4(p)) +# +# print r +# +# +# where "prefixes" is a sequence of prefixes in text form, eg, what +# you'd get if you applied .split(", ") to the peval output below. + + +# ran.psg.com:/usr/home/randy> peval as3130 +# ({198.180.152.0/24, 198.180.153.0/24, 198.180.152.0/25, 198.180.152.128/25, 198.180.150.0/24, 198.180.151.0/24, 198.133.206.0/24, 192.83.230.0/24, 147.28.0.0/16, 147.28.128.0/17, 147.28.128.0/18, 147.28.192.0/18, 147.28.192.0/19, 147.28.192.0/20, 147.28.192.0/21, 147.28.192.0/22, 147.28.192.0/23, 147.28.192.0/24, 147.28.192.0/25, 147.28.192.0/26, 147.28.128.0/19, 147.28.128.0/20, 147.28.128.0/21, 147.28.128.0/22, 147.28.128.0/23, 147.28.128.0/24, 147.28.128.0/25, 147.28.128.0/26}) +# +# ran.psg.com:/usr/home/randy> peval as2914 +# ({216.167.0.0/17, 216.105.240.0/24, 216.44.0.0/16, 216.42.0.0/16, 213.198.0.0/17, 213.130.32.0/19, 212.119.0.0/19, 212.105.160.0/19, 211.130.96.0/19, 211.1.32.0/19, 211.1.60.0/22, 211.1.48.0/23, 211.1.32.0/20, 210.175.160.0/19, 209.243.96.0/20, 209.243.70.0/23, 209.238.0.0/16, 209.227.0.0/17, 209.207.128.0/17, 209.189.0.0/17, 209.170.0.0/18, 209.168.0.0/17, 209.162.64.0/18, 209.157.0.0/16, 209.139.128.0/18, 209.139.0.0/17, 209.130.0.0/17, 209.124.0.0/19, 209.112.96.0/20, 209.107.64.0/19, 209.107.0.0/18, 209.75.0.0/16, 209.70.0.0/16, 209.69.0.0/16, 209.59.32.0/19, 209.57.0.0/16, 209.43.128.0/17, 209.41.0.0/18, 209.39.0.0/16, 209.24.0.0/16, 209.21.0.0/18, 208.55.0.0/16, 207.241.0.0/17, 207.207.128.0/19, 207.206.0.0/17, 207.201.128.0/18, 207.199.0.0/17, 207.198.128.0/17, 207.197.128.0/17, 207.196.0.0/17, 207.195.240.0/20, 207.159.0.0/18, 207.158.192.0/18, 207.156.128.0/17, 207.153.128.0/17, 207.152.64.0/18, 207.150.128.0/19, 207.150.0.0/17, 207.137.0.0/16, 207.126.254.0/23, 207.126.240.0/21, 207.111.64.0/18, 207.97.0.0/17, 207.91.64.0/18, 207.71.64.0/18, 207.67.128.0/17, 207.58.0.0/17, 207.56.0.0/15, 207.55.192.0/19, 207.55.128.0/18, 207.33.0.0/16, 207.32.64.0/18, 207.31.192.0/18, 207.22.64.0/18, 207.21.128.0/18, 207.21.0.0/17, 207.20.0.0/16, 206.252.0.0/19, 206.239.0.0/16, 206.222.32.0/19, 206.213.64.0/18, 206.197.192.0/24, 206.197.81.0/24, 206.184.0.0/16, 206.183.192.0/19, 206.169.194.0/24, 206.169.195.0/24, 206.169.186.0/24, 206.169.41.0/24, 206.166.128.0/18, 206.163.192.0/19, 206.163.128.0/18, 206.163.0.0/17, 206.86.0.0/16, 206.82.32.0/19, 206.80.32.0/19, 206.68.0.0/15, 206.58.0.0/16, 206.55.0.0/18, 206.54.0.0/18, 206.52.0.0/16, 206.50.0.0/16, 206.14.0.0/16, 205.238.0.0/18, 205.212.0.0/16, 205.157.128.0/20, 205.153.56.0/22, 205.149.160.0/19, 205.146.0.0/16, 204.247.0.0/16, 204.245.128.0/17, 204.233.0.0/16, 204.227.160.0/19, 204.200.0.0/14, 204.194.176.0/21, 204.170.0.0/15, 204.156.128.0/19, 204.156.0.0/19, 204.142.0.0/15, 204.141.0.0/16, 204.108.0.0/23, 204.75.146.0/24, 204.68.197.0/24, 204.62.232.0/24, 204.57.32.0/19, 204.42.0.0/16, 204.0.0.0/14, 204.0.43.0/24, 203.215.136.0/23, 203.208.120.0/21, 203.208.120.0/22, 203.208.124.0/22, 203.208.120.0/23, 203.208.122.0/23, 203.208.124.0/23, 203.208.126.0/23, 203.208.120.0/24, 203.208.121.0/24, 203.208.122.0/24, 203.208.123.0/24, 203.208.124.0/24, 203.208.125.0/24, 203.208.126.0/24, 203.208.127.0/24, 203.208.80.0/21, 203.208.80.0/22, 203.208.84.0/22, 203.208.80.0/23, 203.208.82.0/23, 203.208.84.0/23, 203.208.86.0/23, 203.208.80.0/24, 203.208.81.0/24, 203.208.82.0/24, 203.208.83.0/24, 203.208.84.0/24, 203.208.85.0/24, 203.208.86.0/24, 203.208.87.0/24, 203.205.112.0/20, 203.131.240.0/20, 203.131.248.0/21, 203.105.64.0/19, 203.105.80.0/21, 203.105.72.0/22, 203.78.192.0/20, 203.33.3.0/24, 203.32.132.0/24, 203.20.71.0/24, 203.12.225.0/24, 202.237.244.0/24, 202.163.134.0/24, 202.163.132.0/24, 202.163.128.0/22, 202.163.128.0/24, 202.163.129.0/24, 202.163.130.0/24, 202.163.131.0/24, 202.153.208.0/20, 202.69.224.0/20, 202.68.64.0/20, 202.68.64.0/21, 202.68.72.0/21, 202.68.64.0/22, 202.68.68.0/22, 202.68.72.0/22, 202.68.76.0/22, 202.68.64.0/23, 202.68.66.0/23, 202.68.68.0/23, 202.68.70.0/23, 202.68.72.0/23, 202.68.74.0/23, 202.68.76.0/23, 202.68.78.0/23, 202.68.64.0/24, 202.68.65.0/24, 202.68.66.0/24, 202.68.67.0/24, 202.68.68.0/24, 202.68.69.0/24, 202.68.70.0/24, 202.68.71.0/24, 202.68.72.0/24, 202.68.73.0/24, 202.68.74.0/24, 202.68.75.0/24, 202.68.76.0/24, 202.68.77.0/24, 202.68.78.0/24, 202.68.79.0/24, 202.47.16.0/20, 202.23.124.0/24, 200.15.0.0/16, 199.245.16.0/20, 199.240.0.0/16, 199.236.0.0/14, 199.234.0.0/16, 199.224.0.0/20, 199.217.128.0/17, 199.212.0.0/24, 199.201.197.0/24, 199.184.226.0/24, 199.184.212.0/24, 199.164.210.0/24, 199.103.128.0/17, 199.73.40.0/23, 199.73.32.0/21, 199.4.64.0/18, 198.252.194.0/23, 198.247.0.0/16, 198.232.16.0/24, 198.172.0.0/15, 198.170.0.0/15, 198.170.208.0/24, 198.138.0.0/15, 198.106.0.0/15, 198.104.0.0/16, 198.88.0.0/16, 198.87.0.0/16, 198.84.16.0/20, 198.66.0.0/16, 198.64.0.0/15, 198.63.0.0/16, 195.234.244.0/22, 192.220.0.0/16, 192.217.0.0/16, 192.204.0.0/16, 192.195.85.0/24, 192.159.82.0/24, 192.147.176.0/22, 192.147.175.0/24, 192.147.160.0/21, 192.108.74.0/23, 192.102.248.0/24, 192.80.12.0/22, 192.67.240.0/23, 192.67.236.0/22, 192.41.219.0/24, 192.41.171.0/24, 192.11.188.0/24, 170.253.0.0/16, 170.250.0.0/16, 170.249.64.0/19, 170.249.0.0/19, 168.143.0.0/16, 165.254.0.0/16, 164.162.0.0/16, 161.58.0.0/16, 159.230.128.0/20, 159.230.138.0/24, 157.238.0.0/16, 157.107.0.0/16, 154.37.0.0/16, 140.174.0.0/16, 131.103.0.0/16, 130.94.0.0/16, 130.94.60.0/24, 129.250.0.0/16, 129.192.196.0/22, 129.7.136.0/24, 128.242.0.0/16, 128.241.0.0/16, 128.241.83.0/29, 128.121.0.0/16, 125.56.144.0/21, 125.56.152.0/21, 124.40.0.0/18, 124.40.0.0/19, 124.40.32.0/19, 122.255.80.0/20, 120.29.160.0/19, 120.29.144.0/21, 119.161.104.0/21, 118.215.168.0/21, 118.215.136.0/21, 118.215.64.0/21, 118.214.208.0/21, 118.214.216.0/21, 117.104.128.0/19, 117.104.64.0/18, 117.103.176.0/20, 116.51.16.0/21, 96.17.167.0/24, 96.17.157.0/24, 96.17.155.0/24, 96.17.32.0/20, 96.16.224.0/21, 96.16.232.0/21, 96.16.240.0/21, 96.16.248.0/21, 96.6.224.0/20, 96.6.176.0/20, 96.6.144.0/20, 96.6.40.0/24, 91.186.160.0/19, 89.238.138.0/24, 83.231.128.0/17, 82.112.96.0/19, 81.93.208.0/20, 81.93.176.0/20, 81.93.189.0/24, 81.25.192.0/20, 81.20.64.0/20, 81.19.96.0/20, 80.68.16.0/21, 72.247.200.0/21, 72.247.128.0/21, 72.247.125.0/24, 72.247.56.0/22, 72.247.52.0/22, 72.246.32.0/21, 69.192.96.0/20, 69.192.32.0/20, 69.192.48.0/20, 69.55.56.0/23, 69.41.176.0/21, 69.41.168.0/21, 69.41.166.0/23, 69.41.165.0/24, 69.41.160.0/24, 66.249.144.0/24, 66.187.28.0/24, 64.7.64.0/19, 62.73.160.0/19, 61.251.96.0/20, 61.213.160.0/19, 61.213.144.0/20, 61.200.80.0/20, 61.200.80.0/21, 61.200.88.0/21, 61.120.144.0/20, 61.120.144.0/21, 61.120.152.0/21, 61.114.112.0/20, 61.114.120.0/21, 61.114.112.0/22, 61.58.32.0/20, 61.28.200.0/24, 61.28.199.0/24, 60.254.153.0/24, 60.254.132.0/22, 59.151.184.0/22}) +# +# ran.psg.com:/usr/home/randy> peval as1239 +# ({207.7.0.0/18, 204.248.180.0/25, 204.241.122.0/24, 204.217.244.0/24, 203.98.192.0/19, 193.188.96.0/23, 192.77.142.0/24, 192.31.36.0/24, 192.31.32.0/22, 192.23.224.0/21, 192.23.208.0/20, 192.23.76.0/24, 192.23.75.0/24, 163.183.0.0/16, 157.245.70.0/24, 134.32.0.0/16, 129.87.0.0/16, 85.237.96.0/19, 72.246.128.0/20, 65.168.150.0/23, 65.168.149.0/24, 63.172.252.0/22, 63.171.143.128/25, 63.169.52.128/25}) +# +# ran.psg.com:/usr/home/randy> peval as701 +# ({208.91.236.0/22, 203.33.196.0/24, 203.27.251.0/24, 198.80.148.0/24, 198.80.131.0/24, 157.130.103.144/30, 140.222.224.0/24, 65.243.171.0/24, 63.122.162.212/30, 63.116.191.0/24, 63.81.136.0/24, 17.0.0.0/8, 17.128.0.0/9}) + + +name: Alice + +valid_for: 2d + +ipv4: 17.0.0.0/8,59.151.184.0/22,60.254.132.0/22,60.254.153.0/24,61.28.199.0-61.28.200.255,61.58.32.0/20,61.114.112.0/20,61.120.144.0/20,61.200.80.0/20,61.213.144.0-61.213.191.255,61.251.96.0/20,62.73.160.0/19,63.81.136.0/24,63.116.191.0/24,63.122.162.212/30,63.169.52.128/25,63.171.143.128/25,63.172.252.0/22,64.7.64.0/19,65.168.149.0-65.168.151.255,65.243.171.0/24,66.187.28.0/24,66.249.144.0/24,69.41.160.0/24,69.41.165.0-69.41.183.255,69.55.56.0/23,69.192.32.0/19,69.192.96.0/20,72.246.32.0/21,72.246.128.0/20,72.247.52.0-72.247.59.255,72.247.125.0/24,72.247.128.0/21,72.247.200.0/21,80.68.16.0/21,81.19.96.0/20,81.20.64.0/20,81.25.192.0/20,81.93.176.0/20,81.93.208.0/20,82.112.96.0/19,83.231.128.0/17,85.237.96.0/19,89.238.138.0/24,91.186.160.0/19,96.6.40.0/24,96.6.144.0/20,96.6.176.0/20,96.6.224.0/20,96.16.224.0/19,96.17.32.0/20,96.17.155.0/24,96.17.157.0/24,96.17.167.0/24,116.51.16.0/21,117.103.176.0/20,117.104.64.0-117.104.159.255,118.214.208.0/20,118.215.64.0/21,118.215.136.0/21,118.215.168.0/21,119.161.104.0/21,120.29.144.0/21,120.29.160.0/19,122.255.80.0/20,124.40.0.0/18,125.56.144.0/20,128.121.0.0/16,128.241.0.0-128.242.255.255,129.7.136.0/24,129.87.0.0/16,129.192.196.0/22,129.250.0.0/16,130.94.0.0/16,131.103.0.0/16,134.32.0.0/16,140.174.0.0/16,140.222.224.0/24,147.28.0.0/16,154.37.0.0/16,157.107.0.0/16,157.130.103.144/30,157.238.0.0/16,157.245.70.0/24,159.230.128.0/20,161.58.0.0/16,163.183.0.0/16,164.162.0.0/16,165.254.0.0/16,168.143.0.0/16,170.249.0.0/19,170.249.64.0/19,170.250.0.0/16,170.253.0.0/16,192.11.188.0/24,192.23.75.0-192.23.76.255,192.23.208.0-192.23.231.255,192.31.32.0-192.31.36.255,192.41.171.0/24,192.41.219.0/24,192.67.236.0-192.67.241.255,192.77.142.0/24,192.80.12.0/22,192.83.230.0/24,192.102.248.0/24,192.108.74.0/23,192.147.160.0/21,192.147.175.0-192.147.179.255,192.159.82.0/24,192.195.85.0/24,192.204.0.0/16,192.217.0.0/16,192.220.0.0/16,193.188.96.0/23,195.234.244.0/22,198.63.0.0-198.66.255.255,198.80.131.0/24,198.80.148.0/24,198.84.16.0/20,198.87.0.0-198.88.255.255,198.104.0.0/16,198.106.0.0/15,198.133.206.0/24,198.138.0.0/15,198.170.0.0-198.173.255.255,198.180.150.0-198.180.153.255,198.232.16.0/24,198.247.0.0/16,198.252.194.0/23,199.4.64.0/18,199.73.32.0-199.73.41.255,199.103.128.0/17,199.164.210.0/24,199.184.212.0/24,199.184.226.0/24,199.201.197.0/24,199.212.0.0/24,199.217.128.0/17,199.224.0.0/20,199.234.0.0/16,199.236.0.0-199.240.255.255,199.245.16.0/20,200.15.0.0/16,202.23.124.0/24,202.47.16.0/20,202.68.64.0/20,202.69.224.0/20,202.153.208.0/20,202.163.128.0-202.163.132.255,202.163.134.0/24,202.237.244.0/24,203.12.225.0/24,203.20.71.0/24,203.27.251.0/24,203.32.132.0/24,203.33.3.0/24,203.33.196.0/24,203.78.192.0/20,203.98.192.0/19,203.105.64.0/19,203.131.240.0/20,203.205.112.0/20,203.208.80.0/21,203.208.120.0/21,203.215.136.0/23,204.0.0.0/14,204.42.0.0/16,204.57.32.0/19,204.62.232.0/24,204.68.197.0/24,204.75.146.0/24,204.108.0.0/23,204.141.0.0-204.143.255.255,204.156.0.0/19,204.156.128.0/19,204.170.0.0/15,204.194.176.0/21,204.200.0.0/14,204.217.244.0/24,204.227.160.0/19,204.233.0.0/16,204.241.122.0/24,204.245.128.0/17,204.247.0.0/16,204.248.180.0/25,205.146.0.0/16,205.149.160.0/19,205.153.56.0/22,205.157.128.0/20,205.212.0.0/16,205.238.0.0/18,206.14.0.0/16,206.50.0.0/16,206.52.0.0/16,206.54.0.0/18,206.55.0.0/18,206.58.0.0/16,206.68.0.0/15,206.80.32.0/19,206.82.32.0/19,206.86.0.0/16,206.163.0.0-206.163.223.255,206.166.128.0/18,206.169.41.0/24,206.169.186.0/24,206.169.194.0/23,206.183.192.0/19,206.184.0.0/16,206.197.81.0/24,206.197.192.0/24,206.213.64.0/18,206.222.32.0/19,206.239.0.0/16,206.252.0.0/19,207.7.0.0/18,207.20.0.0-207.21.191.255,207.22.64.0/18,207.31.192.0/18,207.32.64.0/18,207.33.0.0/16,207.55.128.0-207.55.223.255,207.56.0.0-207.58.127.255,207.67.128.0/17,207.71.64.0/18,207.91.64.0/18,207.97.0.0/17,207.111.64.0/18,207.126.240.0/21,207.126.254.0/23,207.137.0.0/16,207.150.0.0-207.150.159.255,207.152.64.0/18,207.153.128.0/17,207.156.128.0/17,207.158.192.0-207.159.63.255,207.195.240.0-207.196.127.255,207.197.128.0/17,207.198.128.0-207.199.127.255,207.201.128.0/18,207.206.0.0/17,207.207.128.0/19,207.241.0.0/17,208.55.0.0/16,208.91.236.0/22,209.21.0.0/18,209.24.0.0/16,209.39.0.0/16,209.41.0.0/18,209.43.128.0/17,209.57.0.0/16,209.59.32.0/19,209.69.0.0-209.70.255.255,209.75.0.0/16,209.107.0.0-209.107.95.255,209.112.96.0/20,209.124.0.0/19,209.130.0.0/17,209.139.0.0-209.139.191.255,209.157.0.0/16,209.162.64.0/18,209.168.0.0/17,209.170.0.0/18,209.189.0.0/17,209.207.128.0/17,209.227.0.0/17,209.238.0.0/16,209.243.70.0/23,209.243.96.0/20,210.175.160.0/19,211.1.32.0/19,211.130.96.0/19,212.105.160.0/19,212.119.0.0/19,213.130.32.0/19,213.198.0.0/17,216.42.0.0/16,216.44.0.0/16,216.105.240.0/24,216.167.0.0/17 + +roa_request: + + - asn: 3130 + ipv4: 198.180.152.0/24,198.180.153.0/24,198.180.152.0/25,198.180.152.128/25,198.180.150.0/24,198.180.151.0/24,198.133.206.0/24,192.83.230.0/24,147.28.0.0/16-24 + + - asn: 2914 + ipv4: 216.167.0.0/17,216.105.240.0/24,216.44.0.0/16,216.42.0.0/16,213.198.0.0/17,213.130.32.0/19,212.119.0.0/19,212.105.160.0/19,211.130.96.0/19,211.1.32.0/19,211.1.60.0/22,211.1.48.0/23,211.1.32.0/20,210.175.160.0/19,209.243.96.0/20,209.243.70.0/23,209.238.0.0/16,209.227.0.0/17,209.207.128.0/17,209.189.0.0/17,209.170.0.0/18,209.168.0.0/17,209.162.64.0/18,209.157.0.0/16,209.139.128.0/18,209.139.0.0/17,209.130.0.0/17,209.124.0.0/19,209.112.96.0/20,209.107.64.0/19,209.107.0.0/18,209.75.0.0/16,209.70.0.0/16,209.69.0.0/16,209.59.32.0/19,209.57.0.0/16,209.43.128.0/17,209.41.0.0/18,209.39.0.0/16,209.24.0.0/16,209.21.0.0/18,208.55.0.0/16,207.241.0.0/17,207.207.128.0/19,207.206.0.0/17,207.201.128.0/18,207.199.0.0/17,207.198.128.0/17,207.197.128.0/17,207.196.0.0/17,207.195.240.0/20,207.159.0.0/18,207.158.192.0/18,207.156.128.0/17,207.153.128.0/17,207.152.64.0/18,207.150.128.0/19,207.150.0.0/17,207.137.0.0/16,207.126.254.0/23,207.126.240.0/21,207.111.64.0/18,207.97.0.0/17,207.91.64.0/18,207.71.64.0/18,207.67.128.0/17,207.58.0.0/17,207.56.0.0/15,207.55.192.0/19,207.55.128.0/18,207.33.0.0/16,207.32.64.0/18,207.31.192.0/18,207.22.64.0/18,207.21.128.0/18,207.21.0.0/17,207.20.0.0/16,206.252.0.0/19,206.239.0.0/16,206.222.32.0/19,206.213.64.0/18,206.197.192.0/24,206.197.81.0/24,206.184.0.0/16,206.183.192.0/19,206.169.194.0/24,206.169.195.0/24,206.169.186.0/24,206.169.41.0/24,206.166.128.0/18,206.163.192.0/19,206.163.128.0/18,206.163.0.0/17,206.86.0.0/16,206.82.32.0/19,206.80.32.0/19,206.68.0.0/15,206.58.0.0/16,206.55.0.0/18,206.54.0.0/18,206.52.0.0/16,206.50.0.0/16,206.14.0.0/16,205.238.0.0/18,205.212.0.0/16,205.157.128.0/20,205.153.56.0/22,205.149.160.0/19,205.146.0.0/16,204.247.0.0/16,204.245.128.0/17,204.233.0.0/16,204.227.160.0/19,204.200.0.0/14,204.194.176.0/21,204.170.0.0/15,204.156.128.0/19,204.156.0.0/19,204.142.0.0/15,204.141.0.0/16,204.108.0.0/23,204.75.146.0/24,204.68.197.0/24,204.62.232.0/24,204.57.32.0/19,204.42.0.0/16,204.0.0.0/14,204.0.43.0/24,203.215.136.0/23,203.208.120.0/21,203.208.120.0/22,203.208.124.0/22,203.208.120.0/23,203.208.122.0/23,203.208.124.0/23,203.208.126.0/23,203.208.120.0/24,203.208.121.0/24,203.208.122.0/24,203.208.123.0/24,203.208.124.0/24,203.208.125.0/24,203.208.126.0/24,203.208.127.0/24,203.208.80.0/21,203.208.80.0/22,203.208.84.0/22,203.208.80.0/23,203.208.82.0/23,203.208.84.0/23,203.208.86.0/23,203.208.80.0/24,203.208.81.0/24,203.208.82.0/24,203.208.83.0/24,203.208.84.0/24,203.208.85.0/24,203.208.86.0/24,203.208.87.0/24,203.205.112.0/20,203.131.240.0/20,203.131.248.0/21,203.105.64.0/19,203.105.80.0/21,203.105.72.0/22,203.78.192.0/20,203.33.3.0/24,203.32.132.0/24,203.20.71.0/24,203.12.225.0/24,202.237.244.0/24,202.163.134.0/24,202.163.132.0/24,202.163.128.0/22,202.163.128.0/24,202.163.129.0/24,202.163.130.0/24,202.163.131.0/24,202.153.208.0/20,202.69.224.0/20,202.68.64.0/20,202.68.64.0/21,202.68.72.0/21,202.68.64.0/22,202.68.68.0/22,202.68.72.0/22,202.68.76.0/22,202.68.64.0/23,202.68.66.0/23,202.68.68.0/23,202.68.70.0/23,202.68.72.0/23,202.68.74.0/23,202.68.76.0/23,202.68.78.0/23,202.68.64.0/24,202.68.65.0/24,202.68.66.0/24,202.68.67.0/24,202.68.68.0/24,202.68.69.0/24,202.68.70.0/24,202.68.71.0/24,202.68.72.0/24,202.68.73.0/24,202.68.74.0/24,202.68.75.0/24,202.68.76.0/24,202.68.77.0/24,202.68.78.0/24,202.68.79.0/24,202.47.16.0/20,202.23.124.0/24,200.15.0.0/16,199.245.16.0/20,199.240.0.0/16,199.236.0.0/14,199.234.0.0/16,199.224.0.0/20,199.217.128.0/17,199.212.0.0/24,199.201.197.0/24,199.184.226.0/24,199.184.212.0/24,199.164.210.0/24,199.103.128.0/17,199.73.40.0/23,199.73.32.0/21,199.4.64.0/18,198.252.194.0/23,198.247.0.0/16,198.232.16.0/24,198.172.0.0/15,198.170.0.0/15,198.170.208.0/24,198.138.0.0/15,198.106.0.0/15,198.104.0.0/16,198.88.0.0/16,198.87.0.0/16,198.84.16.0/20,198.66.0.0/16,198.64.0.0/15,198.63.0.0/16,195.234.244.0/22,192.220.0.0/16,192.217.0.0/16,192.204.0.0/16,192.195.85.0/24,192.159.82.0/24,192.147.176.0/22,192.147.175.0/24,192.147.160.0/21,192.108.74.0/23,192.102.248.0/24,192.80.12.0/22,192.67.240.0/23,192.67.236.0/22,192.41.219.0/24,192.41.171.0/24,192.11.188.0/24,170.253.0.0/16,170.250.0.0/16,170.249.64.0/19,170.249.0.0/19,168.143.0.0/16,165.254.0.0/16,164.162.0.0/16,161.58.0.0/16,159.230.128.0/20,159.230.138.0/24,157.238.0.0/16,157.107.0.0/16,154.37.0.0/16,140.174.0.0/16,131.103.0.0/16,130.94.0.0/16,130.94.60.0/24,129.250.0.0/16,129.192.196.0/22,129.7.136.0/24,128.242.0.0/16,128.241.0.0/16,128.241.83.0/29,128.121.0.0/16,125.56.144.0/21,125.56.152.0/21,124.40.0.0/18,124.40.0.0/19,124.40.32.0/19,122.255.80.0/20,120.29.160.0/19,120.29.144.0/21,119.161.104.0/21,118.215.168.0/21,118.215.136.0/21,118.215.64.0/21,118.214.208.0/21,118.214.216.0/21,117.104.128.0/19,117.104.64.0/18,117.103.176.0/20,116.51.16.0/21,96.17.167.0/24,96.17.157.0/24,96.17.155.0/24,96.17.32.0/20,96.16.224.0/21,96.16.232.0/21,96.16.240.0/21,96.16.248.0/21,96.6.224.0/20,96.6.176.0/20,96.6.144.0/20,96.6.40.0/24,91.186.160.0/19,89.238.138.0/24,83.231.128.0/17,82.112.96.0/19,81.93.208.0/20,81.93.176.0/20,81.93.189.0/24,81.25.192.0/20,81.20.64.0/20,81.19.96.0/20,80.68.16.0/21,72.247.200.0/21,72.247.128.0/21,72.247.125.0/24,72.247.56.0/22,72.247.52.0/22,72.246.32.0/21,69.192.96.0/20,69.192.32.0/20,69.192.48.0/20,69.55.56.0/23,69.41.176.0/21,69.41.168.0/21,69.41.166.0/23,69.41.165.0/24,69.41.160.0/24,66.249.144.0/24,66.187.28.0/24,64.7.64.0/19,62.73.160.0/19,61.251.96.0/20,61.213.160.0/19,61.213.144.0/20,61.200.80.0/20,61.200.80.0/21,61.200.88.0/21,61.120.144.0/20,61.120.144.0/21,61.120.152.0/21,61.114.112.0/20,61.114.120.0/21,61.114.112.0/22,61.58.32.0/20,61.28.200.0/24,61.28.199.0/24,60.254.153.0/24,60.254.132.0/22,59.151.184.0/22 + + - asn: 1239 + ipv4: 207.7.0.0/18,204.248.180.0/25,204.241.122.0/24,204.217.244.0/24,203.98.192.0/19,193.188.96.0/23,192.77.142.0/24,192.31.36.0/24,192.31.32.0/22,192.23.224.0/21,192.23.208.0/20,192.23.76.0/24,192.23.75.0/24,163.183.0.0/16,157.245.70.0/24,134.32.0.0/16,129.87.0.0/16,85.237.96.0/19,72.246.128.0/20,65.168.150.0/23,65.168.149.0/24,63.172.252.0/22,63.171.143.128/25,63.169.52.128/25 + + - asn: 701 + ipv4: 208.91.236.0/22,203.33.196.0/24,203.27.251.0/24,198.80.148.0/24,198.80.131.0/24,157.130.103.144/30,140.222.224.0/24,65.243.171.0/24,63.122.162.212/30,63.116.191.0/24,63.81.136.0/24,17.0.0.0/8,17.128.0.0/9 + +--- +- shell set -x; + find publication -type f -name '*.roa' + -print -exec ../../../utils/print_roa/print_roa {} \; + ; + rtr_origin=../../../rtr-origin/rtr-origin; + $rtr_origin --cronjob rcynic-data/authenticated && + $rtr_origin --show diff --git a/ca/tests/smoketest.8.yaml b/ca/tests/smoketest.8.yaml new file mode 100644 index 00000000..cd6d1e7a --- /dev/null +++ b/ca/tests/smoketest.8.yaml @@ -0,0 +1,41 @@ +# $Id$ + +# Copyright (C) 2009-2010 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +name: RIR +valid_for: 2d +kids: + - name: Alice + hosted_by: RIR + kids: + - name: Betty + hosted_by: RIR + kids: + - name: Carol + hosted_by: RIR + kids: + - name: Dana + hosted_by: RIR + kids: + - name: Eve + hosted_by: RIR + kids: + - name: Fiona + hosted_by: RIR + ipv4: 10.0.0.0/8 + asn: 64533 + roa_request: + - asn: 64533 + ipv4: 10.0.0.0/24 diff --git a/ca/tests/smoketest.9.yaml b/ca/tests/smoketest.9.yaml new file mode 100644 index 00000000..3efc4f08 --- /dev/null +++ b/ca/tests/smoketest.9.yaml @@ -0,0 +1,849 @@ +# $Id$ + +# Copyright (C) 2009-2010 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. +# +# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +name: RIR +crl_interval: 30s +regen_margin: 30s +valid_for: 2m +kids: + - name: R0 + kids: + - name: Alice + ipv4: 192.0.2.1-192.0.2.33 + asn: 64533 + roa_request: + - asn: 42 + ipv4: 192.0.2.32/32 + - name: Bob + ipv4: 192.0.2.44-192.0.2.100 + ipv4: 10.3.0.0/16 + roa_request: + - asn: 666 + ipv4: 10.3.0.44/32 +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m +--- +- sleep 15 +- name: RIR + valid_for: 2m +- name: R0 + valid_for: 2m +- name: Alice + valid_for: 2m +- name: Bob + valid_for: 2m diff --git a/ca/tests/smoketest.clean.sql b/ca/tests/smoketest.clean.sql new file mode 100644 index 00000000..9f5ff9fb --- /dev/null +++ b/ca/tests/smoketest.clean.sql @@ -0,0 +1,54 @@ +-- $Id$ + +-- Copyright (C) 2009 Internet Systems Consortium ("ISC") +-- +-- Permission to use, copy, modify, and distribute this software for any +-- purpose with or without fee is hereby granted, provided that the above +-- copyright notice and this permission notice appear in all copies. +-- +-- THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +-- REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +-- AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +-- INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +-- LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +-- OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +-- PERFORMANCE OF THIS SOFTWARE. + +-- Clean up databases left behind by smoketest.py et al. + +DROP DATABASE IF EXISTS irdb0; +DROP DATABASE IF EXISTS irdb1; +DROP DATABASE IF EXISTS irdb2; +DROP DATABASE IF EXISTS irdb3; +DROP DATABASE IF EXISTS irdb4; +DROP DATABASE IF EXISTS irdb5; +DROP DATABASE IF EXISTS irdb6; +DROP DATABASE IF EXISTS irdb7; +DROP DATABASE IF EXISTS irdb8; +DROP DATABASE IF EXISTS irdb9; +DROP DATABASE IF EXISTS irdb10; +DROP DATABASE IF EXISTS irdb11; +DROP DATABASE IF EXISTS rpki0; +DROP DATABASE IF EXISTS rpki1; +DROP DATABASE IF EXISTS rpki2; +DROP DATABASE IF EXISTS rpki3; +DROP DATABASE IF EXISTS rpki4; +DROP DATABASE IF EXISTS rpki5; +DROP DATABASE IF EXISTS rpki6; +DROP DATABASE IF EXISTS rpki7; +DROP DATABASE IF EXISTS rpki8; +DROP DATABASE IF EXISTS rpki9; +DROP DATABASE IF EXISTS rpki10; +DROP DATABASE IF EXISTS rpki11; +DROP DATABASE IF EXISTS pubd0; +DROP DATABASE IF EXISTS pubd1; +DROP DATABASE IF EXISTS pubd2; +DROP DATABASE IF EXISTS pubd3; +DROP DATABASE IF EXISTS pubd4; +DROP DATABASE IF EXISTS pubd5; +DROP DATABASE IF EXISTS pubd6; +DROP DATABASE IF EXISTS pubd7; +DROP DATABASE IF EXISTS pubd8; +DROP DATABASE IF EXISTS pubd9; +DROP DATABASE IF EXISTS pubd10; +DROP DATABASE IF EXISTS pubd11; diff --git a/ca/tests/smoketest.py b/ca/tests/smoketest.py new file mode 100644 index 00000000..28bedaa4 --- /dev/null +++ b/ca/tests/smoketest.py @@ -0,0 +1,1630 @@ +#!/usr/bin/env python + +""" +Test framework to configure and drive a collection of rpkid.py and +old_irdbd.py instances under control of a master script. + +yaml_file is a YAML description the tests to be run, and is intended +to be implementation-agnostic. + +CONFIG contains settings for various implementation-specific +things that don't belong in yaml_file. +""" + +# $Id$ +# +# Copyright (C) 2013--2014 Dragon Research Labs ("DRL") +# Portions copyright (C) 2009--2012 Internet Systems Consortium ("ISC") +# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notices and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND ARIN DISCLAIM ALL +# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL, +# ISC, OR ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, +# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION +# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +# pylint: disable=W0621 + +import os +import yaml +import subprocess +import signal +import time +import argparse +import sys +import errno +import rpki.resource_set +import rpki.sundial +import rpki.x509 +import rpki.http +import rpki.log +import rpki.left_right +import rpki.config +import rpki.publication +import rpki.async + +from rpki.mysql_import import MySQLdb + +os.environ["TZ"] = "UTC" +time.tzset() + +parser = argparse.ArgumentParser(description = __doc__) +parser.add_argument("-c", "--config", + help = "configuration file") +parser.add_argument("--profile", action = "store_true", + help = "enable profiling") +parser.add_argument("-y", action = "store_true", + help = "ignored, present only for backwards compatability") +parser.add_argument("yaml_file", type = argparse.FileType("r"), + help = "YAML description of test network") +args = parser.parse_args() + +cfg = rpki.config.parser(args.config, "smoketest", allow_missing = True) + +# Load the YAML script early, so we can report errors ASAP + +yaml_script = [y for y in yaml.safe_load_all(args.yaml_file)] + +# Define port allocator early, so we can use it while reading config + +def allocate_port(): + """ + Allocate a TCP port number. + """ + global base_port + p = base_port + base_port += 1 + return p + +# Most filenames in the following are relative to the working directory. + +smoketest_name = cfg.get("smoketest_name", "smoketest") +smoketest_dir = cfg.get("smoketest_dir", smoketest_name + ".dir") + +irdb_db_pass = cfg.get("irdb_db_pass", "fnord") +rpki_db_pass = cfg.get("rpki_db_pass", "fnord") +pubd_db_pass = cfg.get("pubd_db_pass", "fnord") +pubd_db_name = cfg.get("pubd_db_name", "pubd0") +pubd_db_user = cfg.get("pubd_db_user", "pubd") + +base_port = int(cfg.get("base_port", "4400")) + +rsyncd_port = allocate_port() +rootd_port = allocate_port() +pubd_port = allocate_port() + +rsyncd_module = cfg.get("rsyncd_module", smoketest_name) +rootd_sia = cfg.get("rootd_sia", "rsync://localhost:%d/%s/" % (rsyncd_port, rsyncd_module)) + +rootd_name = cfg.get("rootd_name", "rootd") +rsyncd_name = cfg.get("rsyncd_name", "rsyncd") +rcynic_name = cfg.get("rcynic_name", "rcynic") +pubd_name = cfg.get("pubd_name", "pubd") + +prog_python = cfg.get("prog_python", sys.executable) +prog_rpkid = cfg.get("prog_rpkid", "../../rpkid") +prog_irdbd = cfg.get("prog_irdbd", "../old_irdbd.py") +prog_poke = cfg.get("prog_poke", "../testpoke.py") +prog_rootd = cfg.get("prog_rootd", "../../rootd") +prog_pubd = cfg.get("prog_pubd", "../../pubd") +prog_rsyncd = cfg.get("prog_rsyncd", "rsync") +prog_rcynic = cfg.get("prog_rcynic", "../../../rcynic/rcynic") +prog_openssl = cfg.get("prog_openssl", "../../../openssl/openssl/apps/openssl") + +rcynic_stats = cfg.get("rcynic_stats", "echo ; ../../../rcynic/rcynic-text %s.xml ; echo" % rcynic_name) + +rpki_sql_file = cfg.get("rpki_sql_file", "../rpkid.sql") +irdb_sql_file = cfg.get("irdb_sql_file", "old_irdbd.sql") +pub_sql_file = cfg.get("pub_sql_file", "../pubd.sql") + +startup_delay = int(cfg.get("startup_delay", "10")) + +rsyncd_dir = None +pubd_ta = None +pubd_irbe_key = None +pubd_irbe_cert = None +pubd_pubd_cert = None + +pubd_last_cms_time = None + +ecdsa_params = None + +class CantRekeyYAMLLeaf(Exception): + """ + Can't rekey YAML leaf. + """ + +class CouldntIssueBSCEECertificate(Exception): + """ + Couldn't issue BSC EE certificate + """ + +sql_conversions = MySQLdb.converters.conversions.copy() +sql_conversions.update({ + rpki.sundial.datetime : MySQLdb.converters.DateTime2literal, + MySQLdb.converters.FIELD_TYPE.DATETIME : rpki.sundial.datetime.DateTime_or_None }) + +def main(): + """ + Main program. + """ + + rpki.log.init(smoketest_name, use_syslog = False) + rpki.log.info("Starting") + + pubd_process = None + rootd_process = None + rsyncd_process = None + + rpki_sql = mangle_sql(rpki_sql_file) + irdb_sql = mangle_sql(irdb_sql_file) + pubd_sql = mangle_sql(pub_sql_file) + + rpki.log.info("Initializing test directory") + + # Connect to test directory, creating it if necessary + try: + os.chdir(smoketest_dir) + except OSError: + os.makedirs(smoketest_dir) + os.chdir(smoketest_dir) + + # Now that we're in the right directory, we can figure out whether + # we have a private openssl executable to use + global prog_openssl + if not os.path.exists(prog_openssl): + prog_openssl = "openssl" + + # Discard everything but keys, which take a while to generate. + # Apparently os.walk() can't tell the difference between directories + # and symlinks to directories, so we have to handle both. + for root, dirs, files in os.walk(".", topdown = False): + for fn in files: + if not fn.endswith(".key"): + os.remove(os.path.join(root, fn)) + for d in dirs: + try: + os.rmdir(os.path.join(root, d)) + except OSError, e: + if e.errno == errno.ENOTDIR: + os.remove(os.path.join(root, d)) + else: + raise + + rpki.log.info("Reading master YAML configuration") + y = yaml_script.pop(0) + + rpki.log.info("Constructing internal allocation database") + db = allocation_db(y) + + rpki.log.info("Constructing BPKI keys and certs for rootd") + setup_bpki_cert_chain(rootd_name, ee = ("RPKI",)) + + rpki.log.info("Constructing BPKI keys and certs for pubd") + setup_bpki_cert_chain(pubd_name, ee = ("PUBD", "IRBE")) + + + for a in db: + a.setup_bpki_certs() + + setup_publication(pubd_sql) + setup_rootd(db.root, y.get("rootd", {})) + setup_rsyncd() + setup_rcynic() + + for a in db.engines: + a.setup_conf_file() + a.setup_sql(rpki_sql, irdb_sql) + a.sync_sql() + + try: + + rpki.log.info("Starting rootd") + rootd_process = subprocess.Popen((prog_python, prog_rootd, "-d", "-c", rootd_name + ".conf")) + + rpki.log.info("Starting pubd") + pubd_process = subprocess.Popen((prog_python, prog_pubd, "-d", "-c", pubd_name + ".conf") + + (("-p", pubd_name + ".prof") if args.profile else ())) + + rpki.log.info("Starting rsyncd") + rsyncd_process = subprocess.Popen((prog_rsyncd, "--daemon", "--no-detach", "--config", rsyncd_name + ".conf")) + + # Start rpkid and irdbd instances + for a in db.engines: + a.run_daemons() + + # From this point on we'll be running event-driven, so the rest of + # the code until final exit is all closures. + + def start(): + rpki.async.iterator(db.engines, create_rpki_objects, created_rpki_objects) + + def create_rpki_objects(iterator, a): + a.create_rpki_objects(iterator) + + def created_rpki_objects(): + + # Set pubd's BPKI CRL + set_pubd_crl(yaml_loop) + + def yaml_loop(): + + # This is probably where we should be updating expired BPKI + # objects, particular CRLs + + rpki.log.info("Running cron for all RPKI engines") + rpki.async.iterator(db.engines, run_cron, run_yaml) + + def run_cron(iterator, a): + a.run_cron(iterator) + + def run_yaml(): + + # Run rcynic to check results + run_rcynic() + + # Apply next delta if we have one; otherwise, we're done. + if yaml_script: + rpki.log.info("Applying deltas") + db.apply_delta(yaml_script.pop(0), apply_delta_done) + else: + rpki.log.info("No more deltas to apply, done") + rpki.async.exit_event_loop() + + def apply_delta_done(): + + # Resync IRDBs + for a in db.engines: + a.sync_sql() + + # Loop until we run out of control YAML + yaml_loop() + + rpki.log.info("Sleeping %d seconds while daemons start up" % startup_delay) + rpki.async.timer(start).set(rpki.sundial.timedelta(seconds = startup_delay)) + rpki.async.event_loop() + + # At this point we have gone into event-driven code. + + rpki.log.info("Event loop exited normally") + + except Exception, e: + + rpki.log.info("Event loop exited with an exception: %r" % e) + rpki.log.traceback() + + finally: + + rpki.log.info("Cleaning up") + for a in db.engines: + a.kill_daemons() + for proc, name in ((rootd_process, "rootd"), + (pubd_process, "pubd"), + (rsyncd_process, "rsyncd")): + # pylint: disable=E1103 + if proc is not None and proc.poll() is None: + rpki.log.info("Killing %s, pid %s" % (name, proc.pid)) + try: + proc.terminate() + except OSError: + pass + if proc is not None: + rpki.log.info("Daemon %s, pid %s exited with code %s" % (name, proc.pid, proc.wait())) + +def cmd_sleep(cb, interval): + """ + Set an alarm, then wait for it to go off. + """ + howlong = rpki.sundial.timedelta.parse(interval) + rpki.log.info("Sleeping %r" % howlong) + rpki.async.timer(cb).set(howlong) + +def cmd_shell(cb, *cmd): + """ + Run a shell command. + """ + cmd = " ".join(cmd) + status = subprocess.call(cmd, shell = True) + rpki.log.info("Shell command returned status %d" % status) + cb() + +def cmd_echo(cb, *words): + """ + Echo some text to the log. + """ + rpki.log.note(" ".join(words)) + cb() + +## @var cmds +# Dispatch table for commands embedded in delta sections + +cmds = { "sleep" : cmd_sleep, + "shell" : cmd_shell, + "echo" : cmd_echo } + +class roa_request(object): + """ + Representation for a roa_request object. + """ + + def __init__(self, asn, ipv4, ipv6): + self.asn = asn + self.v4 = rpki.resource_set.roa_prefix_set_ipv4("".join(ipv4.split())) if ipv4 else None + self.v6 = rpki.resource_set.roa_prefix_set_ipv6("".join(ipv6.split())) if ipv6 else None + + def __eq__(self, other): + return self.asn == other.asn and self.v4 == other.v4 and self.v6 == other.v6 + + def __hash__(self): + v4 = tuple(self.v4) if self.v4 is not None else None + v6 = tuple(self.v6) if self.v6 is not None else None + return self.asn.__hash__() + v4.__hash__() + v6.__hash__() + + def __str__(self): + if self.v4 and self.v6: s = str(self.v4) + "," + str(self.v6) + elif self.v4: s = str(self.v4) + else: s = str(self.v6) + return "%s: %s" % (self.asn, s) + + @classmethod + def parse(cls, yaml): + return cls(yaml.get("asn"), yaml.get("ipv4"), yaml.get("ipv6")) + +class router_cert(object): + """ + Representation for a router_cert object. + """ + + _ecparams = None + + @classmethod + def ecparams(cls): + if cls._ecparams is None: + cls._ecparams = rpki.x509.KeyParams.generateEC() + return cls._ecparams + + def __init__(self, asn, router_id): + self.asn = rpki.resource_set.resource_set_as("".join(str(asn).split())) + self.router_id = router_id + self.keypair = rpki.x509.ECDSA.generate(self.ecparams()) + self.pkcs10 = rpki.x509.PKCS10.create(keypair = self.keypair) + self.gski = self.pkcs10.gSKI() + self.cn = "ROUTER-%08x" % self.asn[0].min + self.sn = "%08x" % self.router_id + self.eku = rpki.oids.id_kp_bgpsec_router + + def __eq__(self, other): + return self.asn == other.asn and self.sn == other.sn and self.gski == other.gski + + def __hash__(self): + v6 = tuple(self.v6) if self.v6 is not None else None + return tuple(self.asn).__hash__() + sn.__hash__() + self.gski.__hash__() + + def __str__(self): + return "%s: %s: %s" % (self.asn, self.cn, self.sn, self.gski) + + @classmethod + def parse(cls, yaml): + return cls(yaml.get("asn"), yaml.get("router_id")) + +class allocation_db(list): + """ + Representation of all the entities and allocations in the test + system. Almost everything is generated out of this database. + """ + + def __init__(self, yaml): + """ + Initialize database from the (first) YAML document. + """ + + list.__init__(self) + self.root = allocation(yaml, self) + assert self.root.is_root + if self.root.crl_interval is None: + self.root.crl_interval = rpki.sundial.timedelta.parse(cfg.get("crl_interval", "1d")).convert_to_seconds() + if self.root.regen_margin is None: + self.root.regen_margin = rpki.sundial.timedelta.parse(cfg.get("regen_margin", "1d")).convert_to_seconds() + for a in self: + if a.sia_base is None: + a.sia_base = (rootd_sia + "root/trunk/" if a.is_root else a.parent.sia_base) + a.name + "/" + if a.base.valid_until is None: + a.base.valid_until = a.parent.base.valid_until + if a.crl_interval is None: + a.crl_interval = a.parent.crl_interval + if a.regen_margin is None: + a.regen_margin = a.parent.regen_margin + a.client_handle = "/".join(a.sia_base.split("/")[4:]).rstrip("/") + self.root.closure() + self.map = dict((a.name, a) for a in self) + self.engines = [a for a in self if a.is_engine] + for i, a in enumerate(self.engines): + a.set_engine_number(i) + for a in self: + if a.is_hosted: + a.hosted_by = self.map[a.hosted_by] + a.hosted_by.hosts.append(a) + assert a.is_twig, "%s is not twig" % a.name + assert not a.hosted_by.is_hosted, "%s is hosted by a hosted entity" % a.name + + def apply_delta(self, delta, cb): + """ + Apply a delta or run a command. + """ + + def loop(iterator, d): + if isinstance(d, str): + c = d.split() + cmds[c[0]](iterator, *c[1:]) + else: + self.map[d["name"]].apply_delta(d, iterator) + + def done(): + self.root.closure() + cb() + + if delta is None: + cb() + else: + rpki.async.iterator(delta, loop, done) + + def dump(self): + """ + Print content of the database. + """ + for a in self: + print a + +class allocation(object): + + parent = None + irdb_db_name = None + irdb_port = None + rpki_db_name = None + rpki_port = None + crl_interval = None + regen_margin = None + last_cms_time = None + rpkid_process = None + irdbd_process = None + + def __init__(self, yaml, db, parent = None): + """ + Initialize one entity and insert it into the database. + """ + db.append(self) + self.name = yaml["name"] + self.parent = parent + self.kids = [allocation(k, db, self) for k in yaml.get("kids", ())] + valid_until = None + if "valid_until" in yaml: + valid_until = rpki.sundial.datetime.from_datetime(yaml.get("valid_until")) + if valid_until is None and "valid_for" in yaml: + valid_until = rpki.sundial.now() + rpki.sundial.timedelta.parse(yaml["valid_for"]) + self.base = rpki.resource_set.resource_bag( + asn = rpki.resource_set.resource_set_as(yaml.get("asn")), + v4 = rpki.resource_set.resource_set_ipv4(yaml.get("ipv4")), + v6 = rpki.resource_set.resource_set_ipv6(yaml.get("ipv6")), + valid_until = valid_until) + self.sia_base = yaml.get("sia_base") + if "crl_interval" in yaml: + self.crl_interval = rpki.sundial.timedelta.parse(yaml["crl_interval"]).convert_to_seconds() + if "regen_margin" in yaml: + self.regen_margin = rpki.sundial.timedelta.parse(yaml["regen_margin"]).convert_to_seconds() + self.roa_requests = [roa_request.parse(y) for y in yaml.get("roa_request", yaml.get("route_origin", ()))] + for r in self.roa_requests: + if r.v4: + self.base.v4 |= r.v4.to_resource_set() + if r.v6: + self.base.v6 |= r.v6.to_resource_set() + self.router_certs = [router_cert.parse(y) for y in yaml.get("router_cert", ())] + for r in self.router_certs: + self.base.asn |= r.asn + self.hosted_by = yaml.get("hosted_by") + self.extra_conf = yaml.get("extra_conf", []) + self.hosts = [] + + def closure(self): + """ + Compute the transitive resource closure. + """ + resources = self.base + for kid in self.kids: + resources |= kid.closure() + self.resources = resources + return resources + + def apply_delta(self, yaml, cb): + """ + Apply deltas to this entity. + """ + + rpki.log.info("Applying delta: %s" % yaml) + + def loop(iterator, kv): + if kv[0] == "name": + iterator() + else: + getattr(self, "apply_" + kv[0])(kv[1], iterator) + + rpki.async.iterator(yaml.items(), loop, cb) + + def apply_add_as(self, text, cb): + self.base.asn |= rpki.resource_set.resource_set_as(text) + cb() + + def apply_add_v4(self, text, cb): + self.base.v4 |= rpki.resource_set.resource_set_ipv4(text) + cb() + + def apply_add_v6(self, text, cb): + self.base.v6 |= rpki.resource_set.resource_set_ipv6(text) + cb() + + def apply_sub_as(self, text, cb): + self.base.asn |= rpki.resource_set.resource_set_as(text) + cb() + + def apply_sub_v4(self, text, cb): + self.base.v4 |= rpki.resource_set.resource_set_ipv4(text) + cb() + + def apply_sub_v6(self, text, cb): + self.base.v6 |= rpki.resource_set.resource_set_ipv6(text) + cb() + + def apply_valid_until(self, stamp, cb): + self.base.valid_until = rpki.sundial.datetime.from_datetime(stamp) + cb() + + def apply_valid_for(self, text, cb): + self.base.valid_until = rpki.sundial.now() + rpki.sundial.timedelta.parse(text) + cb() + + def apply_valid_add(self, text, cb): + self.base.valid_until += rpki.sundial.timedelta.parse(text) + cb() + + def apply_valid_sub(self, text, cb): + self.base.valid_until -= rpki.sundial.timedelta.parse(text) + cb() + + def apply_roa_request_add(self, yaml, cb): + for y in yaml: + r = roa_request.parse(y) + if r not in self.roa_requests: + self.roa_requests.append(r) + cb() + + def apply_roa_request_del(self, yaml, cb): + for y in yaml: + r = roa_request.parse(y) + if r in self.roa_requests: + self.roa_requests.remove(r) + cb() + + def apply_router_cert_add(self, yaml, cb): + for y in yaml: + r = router_cert.parse(y) + if r not in self.router_certs: + self.router_certs.append(r) + cb() + + def apply_router_cert_del(self, yaml, cb): + for y in yaml: + r = router_cert.parse(y) + if r in self.router_certs: + self.router_certs.remove(r) + cb() + + def apply_rekey(self, target, cb): + + def done(e): + if isinstance(e, Exception): + rpki.log.traceback() + raise e + cb() + + if target is None: + rpki.log.info("Rekeying %s" % self.name) + self.call_rpkid([rpki.left_right.self_elt.make_pdu( + action = "set", self_handle = self.name, rekey = "yes")], cb = done) + else: + rpki.log.info("Rekeying %s %s" % (self.name, target)) + self.call_rpkid([rpki.left_right.parent_elt.make_pdu( + action = "set", self_handle = self.name, parent_handle = target, rekey = "yes")], cb = done) + + def apply_revoke(self, target, cb): + + def done(e): + if isinstance(e, Exception): + rpki.log.traceback() + raise e + cb() + + if target is None: + rpki.log.info("Revoking %s" % self.name) + self.call_rpkid([rpki.left_right.self_elt.make_pdu( + action = "set", self_handle = self.name, revoke = "yes")], cb = done) + else: + rpki.log.info("Revoking %s %s" % (self.name, target)) + self.call_rpkid([rpki.left_right.parent_elt.make_pdu( + action = "set", self_handle = self.name, parent_handle = target, revoke = "yes")], cb = done) + + def __str__(self): + s = self.name + "\n" + if self.resources.asn: s += " ASN: %s\n" % self.resources.asn + if self.resources.v4: s += " IPv4: %s\n" % self.resources.v4 + if self.resources.v6: s += " IPv6: %s\n" % self.resources.v6 + if self.kids: s += " Kids: %s\n" % ", ".join(k.name for k in self.kids) + if self.parent: s += " Up: %s\n" % self.parent.name + if self.sia_base: s += " SIA: %s\n" % self.sia_base + return s + "Until: %s\n" % self.resources.valid_until + + + @property + def is_root(self): + return self.parent is None + + @property + def is_twig(self): + return not self.is_root + + @property + def is_hosted(self): + return self.hosted_by is not None + + @property + def is_engine(self): + return not self.is_hosted + + def set_engine_number(self, n): + """ + Set the engine number for this entity. + """ + self.irdb_db_name = "irdb%d" % n + self.irdb_port = allocate_port() + self.rpki_db_name = "rpki%d" % n + self.rpki_port = allocate_port() + + def get_rpki_port(self): + """ + Get rpki port to use for this entity. + """ + if self.is_hosted: + assert self.hosted_by.rpki_port is not None + return self.hosted_by.rpki_port + else: + assert self.rpki_port is not None + return self.rpki_port + + def setup_bpki_certs(self): + """ + Create BPKI certificates for this entity. + """ + rpki.log.info("Constructing BPKI keys and certs for %s" % self.name) + setup_bpki_cert_chain(name = self.name, + ee = ("RPKI", "IRDB", "IRBE"), + ca = ("SELF",)) + self.rpkid_ta = rpki.x509.X509(PEM_file = self.name + "-TA.cer") + self.irbe_key = rpki.x509.RSA( PEM_file = self.name + "-IRBE.key") + self.irbe_cert = rpki.x509.X509(PEM_file = self.name + "-IRBE.cer") + self.rpkid_cert = rpki.x509.X509(PEM_file = self.name + "-RPKI.cer") + + def setup_conf_file(self): + """ + Write config files for this entity. + """ + rpki.log.info("Writing config files for %s" % self.name) + assert self.rpki_port is not None + d = { "my_name" : self.name, + "irdb_db_name" : self.irdb_db_name, + "irdb_db_pass" : irdb_db_pass, + "irdb_port" : self.irdb_port, + "rpki_db_name" : self.rpki_db_name, + "rpki_db_pass" : rpki_db_pass, + "rpki_port" : self.rpki_port } + f = open(self.name + ".conf", "w") + f.write(conf_fmt_1 % d) + for line in self.extra_conf: + f.write(line + "\n") + f.close() + + def setup_sql(self, rpki_sql, irdb_sql): + """ + Set up this entity's IRDB. + """ + rpki.log.info("Setting up MySQL for %s" % self.name) + db = MySQLdb.connect(user = "rpki", db = self.rpki_db_name, passwd = rpki_db_pass, + conv = sql_conversions) + cur = db.cursor() + db.autocommit(True) + for sql in rpki_sql: + try: + cur.execute(sql) + except Exception: + if "DROP TABLE IF EXISTS" not in sql.upper(): + raise + db.close() + db = MySQLdb.connect(user = "irdb", db = self.irdb_db_name, passwd = irdb_db_pass, + conv = sql_conversions) + cur = db.cursor() + db.autocommit(True) + for sql in irdb_sql: + try: + cur.execute(sql) + except Exception: + if "DROP TABLE IF EXISTS" not in sql.upper(): + raise + for s in [self] + self.hosts: + for kid in s.kids: + cur.execute("INSERT registrant (registrant_handle, registry_handle, valid_until) VALUES (%s, %s, %s)", + (kid.name, s.name, kid.resources.valid_until)) + db.close() + + def sync_sql(self): + """ + Whack this entity's IRDB to match our master database. We do this + once during setup, then do it again every time we apply a delta to + this entity. + """ + rpki.log.info("Updating MySQL data for IRDB %s" % self.name) + db = MySQLdb.connect(user = "irdb", db = self.irdb_db_name, passwd = irdb_db_pass, + conv = sql_conversions) + cur = db.cursor() + db.autocommit(True) + cur.execute("DELETE FROM registrant_asn") + cur.execute("DELETE FROM registrant_net") + cur.execute("DELETE FROM roa_request_prefix") + cur.execute("DELETE FROM roa_request") + cur.execute("DELETE FROM ee_certificate_asn") + cur.execute("DELETE FROM ee_certificate_net") + cur.execute("DELETE FROM ee_certificate") + + for s in [self] + self.hosts: + for kid in s.kids: + cur.execute("SELECT registrant_id FROM registrant WHERE registrant_handle = %s AND registry_handle = %s", + (kid.name, s.name)) + registrant_id = cur.fetchone()[0] + for as_range in kid.resources.asn: + cur.execute("INSERT registrant_asn (start_as, end_as, registrant_id) VALUES (%s, %s, %s)", + (as_range.min, as_range.max, registrant_id)) + for v4_range in kid.resources.v4: + cur.execute("INSERT registrant_net (start_ip, end_ip, version, registrant_id) VALUES (%s, %s, 4, %s)", + (v4_range.min, v4_range.max, registrant_id)) + for v6_range in kid.resources.v6: + cur.execute("INSERT registrant_net (start_ip, end_ip, version, registrant_id) VALUES (%s, %s, 6, %s)", + (v6_range.min, v6_range.max, registrant_id)) + cur.execute("UPDATE registrant SET valid_until = %s WHERE registrant_id = %s", + (kid.resources.valid_until, registrant_id)) + for r in s.roa_requests: + cur.execute("INSERT roa_request (self_handle, asn) VALUES (%s, %s)", + (s.name, r.asn)) + roa_request_id = cur.lastrowid + for version, prefix_set in ((4, r.v4), (6, r.v6)): + if prefix_set: + cur.executemany("INSERT roa_request_prefix " + "(roa_request_id, prefix, prefixlen, max_prefixlen, version) " + "VALUES (%s, %s, %s, %s, %s)", + ((roa_request_id, x.prefix, x.prefixlen, x.max_prefixlen, version) + for x in prefix_set)) + for r in s.router_certs: + cur.execute("INSERT ee_certificate (self_handle, pkcs10, gski, cn, sn, eku, valid_until) " + "VALUES (%s, %s, %s, %s, %s, %s, %s)", + (s.name, r.pkcs10.get_DER(), r.gski, r.cn, r.sn, r.eku, s.resources.valid_until)) + ee_certificate_id = cur.lastrowid + cur.executemany("INSERT ee_certificate_asn (ee_certificate_id, start_as, end_as) VALUES (%s, %s, %s)", + ((ee_certificate_id, a.min, a.max) for a in r.asn)) + db.close() + + def run_daemons(self): + """ + Run daemons for this entity. + """ + rpki.log.info("Running daemons for %s" % self.name) + self.rpkid_process = subprocess.Popen((prog_python, prog_rpkid, "-d", "-c", self.name + ".conf") + + (("-p", self.name + ".prof") if args.profile else ())) + self.irdbd_process = subprocess.Popen((prog_python, prog_irdbd, "-d", "-c", self.name + ".conf")) + + def kill_daemons(self): + """ + Kill daemons for this entity. + """ + # pylint: disable=E1103 + for proc, name in ((self.rpkid_process, "rpkid"), + (self.irdbd_process, "irdbd")): + if proc is not None and proc.poll() is None: + rpki.log.info("Killing daemon %s pid %s for %s" % (name, proc.pid, self.name)) + try: + proc.terminate() + except OSError: + pass + if proc is not None: + rpki.log.info("Daemon %s pid %s for %s exited with code %s" % ( + name, proc.pid, self.name, proc.wait())) + + def call_rpkid(self, pdus, cb): + """ + Send a left-right message to this entity's RPKI daemon and return + the response. + + If this entity is hosted (does not run its own RPKI daemon), all + of this happens with the hosting RPKI daemon. + """ + + rpki.log.info("Calling rpkid for %s" % self.name) + + if self.is_hosted: + rpki.log.info("rpkid %s is hosted by rpkid %s, switching" % (self.name, self.hosted_by.name)) + self = self.hosted_by + assert not self.is_hosted + + assert isinstance(pdus, (list, tuple)) + assert self.rpki_port is not None + + q_msg = rpki.left_right.msg.query(*pdus) + q_cms = rpki.left_right.cms_msg() + q_der = q_cms.wrap(q_msg, self.irbe_key, self.irbe_cert) + q_url = "http://localhost:%d/left-right" % self.rpki_port + + rpki.log.debug(q_cms.pretty_print_content()) + + def done(r_der): + rpki.log.info("Callback from rpkid %s" % self.name) + r_cms = rpki.left_right.cms_msg(DER = r_der) + r_msg = r_cms.unwrap((self.rpkid_ta, self.rpkid_cert)) + self.last_cms_time = r_cms.check_replay(self.last_cms_time, q_url) + rpki.log.debug(r_cms.pretty_print_content()) + assert r_msg.is_reply + for r_pdu in r_msg: + assert not isinstance(r_pdu, rpki.left_right.report_error_elt) + cb(r_msg) + + def lose(e): + raise + + rpki.http.client( + url = q_url, + msg = q_der, + callback = done, + errback = lose) + + def cross_certify(self, certificant, reverse = False): + """ + Cross-certify and return the resulting certificate. + """ + + if reverse: + certifier = certificant + certificant = self.name + "-SELF" + else: + certifier = self.name + "-SELF" + certfile = certifier + "-" + certificant + ".cer" + + rpki.log.info("Cross certifying %s into %s's BPKI (%s)" % (certificant, certifier, certfile)) + + child = rpki.x509.X509(Auto_file = certificant + ".cer") + parent = rpki.x509.X509(Auto_file = certifier + ".cer") + keypair = rpki.x509.RSA(Auto_file = certifier + ".key") + serial_file = certifier + ".srl" + + now = rpki.sundial.now() + notAfter = now + rpki.sundial.timedelta(days = 30) + + try: + f = open(serial_file, "r") + serial = f.read() + f.close() + serial = int(serial.splitlines()[0], 16) + except IOError: + serial = 1 + + x = parent.bpki_cross_certify( + keypair = keypair, + source_cert = child, + serial = serial, + notAfter = notAfter, + now = now) + + f = open(serial_file, "w") + f.write("%02x\n" % (serial + 1)) + f.close() + + f = open(certfile, "w") + f.write(x.get_PEM()) + f.close() + + rpki.log.debug("Cross certified %s:" % certfile) + rpki.log.debug(" Issuer %s [%s]" % (x.getIssuer(), x.hAKI())) + rpki.log.debug(" Subject %s [%s]" % (x.getSubject(), x.hSKI())) + return x + + def create_rpki_objects(self, cb): + """ + Create RPKI engine objects for this engine. + + Root node of the engine tree is special, it too has a parent but + that one is the magic self-signed micro engine. + + The rest of this is straightforward. There are a lot of objects + to create, but we can do batch them all into one honking PDU, then + issue one more PDU to set BSC EE certificates based on the PKCS + #10 requests we get back when we tell rpkid to generate BSC keys. + """ + + assert not self.is_hosted + + selves = [self] + self.hosts + + for i, s in enumerate(selves): + rpki.log.info("Creating RPKI objects for [%d] %s" % (i, s.name)) + + rpkid_pdus = [] + pubd_pdus = [] + + for s in selves: + + rpkid_pdus.append(rpki.left_right.self_elt.make_pdu( + action = "create", + self_handle = s.name, + crl_interval = s.crl_interval, + regen_margin = s.regen_margin, + bpki_cert = (s.cross_certify(s.hosted_by.name + "-TA", reverse = True) + if s.is_hosted else + rpki.x509.X509(Auto_file = s.name + "-SELF.cer")))) + + rpkid_pdus.append(rpki.left_right.bsc_elt.make_pdu( + action = "create", + self_handle = s.name, + bsc_handle = "b", + generate_keypair = True)) + + pubd_pdus.append(rpki.publication.client_elt.make_pdu( + action = "create", + client_handle = s.client_handle, + base_uri = s.sia_base, + bpki_cert = s.cross_certify(pubd_name + "-TA", reverse = True))) + + rpkid_pdus.append(rpki.left_right.repository_elt.make_pdu( + action = "create", + self_handle = s.name, + bsc_handle = "b", + repository_handle = "r", + bpki_cert = s.cross_certify(pubd_name + "-TA"), + peer_contact_uri = "http://localhost:%d/client/%s" % (pubd_port, s.client_handle))) + + for k in s.kids: + rpkid_pdus.append(rpki.left_right.child_elt.make_pdu( + action = "create", + self_handle = s.name, + child_handle = k.name, + bsc_handle = "b", + bpki_cert = s.cross_certify(k.name + "-SELF"))) + + if s.is_root: + rootd_cert = s.cross_certify(rootd_name + "-TA") + rpkid_pdus.append(rpki.left_right.parent_elt.make_pdu( + action = "create", + self_handle = s.name, + parent_handle = "rootd", + bsc_handle = "b", + repository_handle = "r", + sia_base = s.sia_base, + bpki_cms_cert = rootd_cert, + sender_name = s.name, + recipient_name = "rootd", + peer_contact_uri = "http://localhost:%s/" % rootd_port)) + else: + rpkid_pdus.append(rpki.left_right.parent_elt.make_pdu( + action = "create", + self_handle = s.name, + parent_handle = s.parent.name, + bsc_handle = "b", + repository_handle = "r", + sia_base = s.sia_base, + bpki_cms_cert = s.cross_certify(s.parent.name + "-SELF"), + sender_name = s.name, + recipient_name = s.parent.name, + peer_contact_uri = "http://localhost:%s/up-down/%s/%s" % (s.parent.get_rpki_port(), + s.parent.name, s.name))) + + def one(): + call_pubd(pubd_pdus, cb = two) + + def two(vals): + self.call_rpkid(rpkid_pdus, cb = three) + + def three(vals): + + bsc_dict = dict((b.self_handle, b) for b in vals if isinstance(b, rpki.left_right.bsc_elt)) + + bsc_pdus = [] + + for s in selves: + b = bsc_dict[s.name] + + rpki.log.info("Issuing BSC EE cert for %s" % s.name) + cmd = (prog_openssl, "x509", "-req", "-sha256", "-extfile", s.name + "-RPKI.conf", + "-extensions", "req_x509_ext", "-days", "30", + "-CA", s.name + "-SELF.cer", "-CAkey", s.name + "-SELF.key", "-CAcreateserial", "-text") + signer = subprocess.Popen(cmd, stdin = subprocess.PIPE, stdout = subprocess.PIPE, stderr = subprocess.PIPE) + signed = signer.communicate(input = b.pkcs10_request.get_PEM()) + if not signed[0]: + rpki.log.warn(signed[1]) + raise CouldntIssueBSCEECertificate, "Couldn't issue BSC EE certificate" + s.bsc_ee = rpki.x509.X509(PEM = signed[0]) + s.bsc_crl = rpki.x509.CRL(PEM_file = s.name + "-SELF.crl") + rpki.log.info("BSC EE cert for %s SKI %s" % (s.name, s.bsc_ee.hSKI())) + + bsc_pdus.append(rpki.left_right.bsc_elt.make_pdu( + action = "set", + self_handle = s.name, + bsc_handle = "b", + signing_cert = s.bsc_ee, + signing_cert_crl = s.bsc_crl)) + + self.call_rpkid(bsc_pdus, cb = four) + + def four(vals): + cb() + + one() + + def setup_yaml_leaf(self): + """ + Generate certificates and write YAML scripts for leaf nodes. + + We're cheating a bit here: properly speaking, we can't generate + issue or revoke requests without knowing the class, which is + generated on the fly, but at the moment the test case is + simplistic enough that the class will always be "1", so we just + wire in that value for now. + + Well, ok, we just broke that assumption. Now we do something even + nastier, just to eke a bit more life out of this kludge. This + really needs to be rewritten, but it may require a different tool + than testpoke. + """ + + if not os.path.exists(self.name + ".key"): + rpki.log.info("Generating RPKI key for %s" % self.name) + subprocess.check_call((prog_openssl, "genrsa", "-out", self.name + ".key", "2048" ), + stdout = subprocess.PIPE, stderr = subprocess.STDOUT) + ski = rpki.x509.RSA(PEM_file = self.name + ".key").gSKI() + + if self.parent.is_hosted: + parent_host = self.parent.hosted_by.name + else: + parent_host = self.parent.name + + self.cross_certify(self.parent.name + "-SELF") + self.cross_certify(parent_host + "-TA") + + rpki.log.info("Writing leaf YAML for %s" % self.name) + f = open(self.name + ".yaml", "w") + f.write(yaml_fmt_1 % { + "parent_name" : self.parent.name, + "parent_host" : parent_host, + "my_name" : self.name, + "http_port" : self.parent.get_rpki_port(), + "class_name" : 2 if self.parent.is_hosted else 1, + "sia" : self.sia_base, + "ski" : ski }) + f.close() + + def run_cron(self, cb): + """ + Trigger cron run for this engine. + """ + + rpki.log.info("Running cron for %s" % self.name) + + assert self.rpki_port is not None + + def done(result): + assert result == "OK", 'Expected "OK" result from cronjob, got %r' % result + cb() + + rpki.http.client( + url = "http://localhost:%d/cronjob" % self.rpki_port, + msg = "Run cron now, please", + callback = done, + errback = done) + + def run_yaml(self): + """ + Run YAML scripts for this leaf entity. Since we're not bothering + to check the class list returned by the list command, the issue + command may fail, so we treat failure of the list command as an + error, but only issue a warning when issue fails. + """ + + rpki.log.info("Running YAML for %s" % self.name) + subprocess.check_call((prog_python, prog_poke, "-y", self.name + ".yaml", "-r", "list")) + if subprocess.call((prog_python, prog_poke, "-y", self.name + ".yaml", "-r", "issue")) != 0: + rpki.log.warn("YAML issue command failed for %s, continuing" % self.name) + +def setup_bpki_cert_chain(name, ee = (), ca = ()): + """ + Build a set of BPKI certificates. + """ + s = "exec >/dev/null 2>&1\n" + #s = "set -x\n" + for kind in ("TA",) + ee + ca: + d = { "name" : name, + "kind" : kind, + "ca" : "false" if kind in ee else "true", + "openssl" : prog_openssl } + f = open("%(name)s-%(kind)s.conf" % d, "w") + f.write(bpki_cert_fmt_1 % d) + f.close() + if not os.path.exists("%(name)s-%(kind)s.key" % d): + s += bpki_cert_fmt_2 % d + s += bpki_cert_fmt_3 % d + d = { "name" : name, "openssl" : prog_openssl } + s += bpki_cert_fmt_4 % d + for kind in ee + ca: + d["kind"] = kind + s += bpki_cert_fmt_5 % d + for kind in ("TA",) + ca: + d["kind"] = kind + s += bpki_cert_fmt_6 % d + subprocess.check_call(s, shell = True) + +def setup_rootd(rpkid, rootd_yaml): + """ + Write the config files for rootd. + """ + rpkid.cross_certify(rootd_name + "-TA", reverse = True) + rpki.log.info("Writing config files for %s" % rootd_name) + d = { "rootd_name" : rootd_name, + "rootd_port" : rootd_port, + "rpkid_name" : rpkid.name, + "rootd_sia" : rootd_sia, + "rsyncd_dir" : rsyncd_dir, + "openssl" : prog_openssl, + "lifetime" : rootd_yaml.get("lifetime", "30d") } + f = open(rootd_name + ".conf", "w") + f.write(rootd_fmt_1 % d) + f.close() + s = "exec >/dev/null 2>&1\n" + #s = "set -x\n" + if not os.path.exists("root.key"): + s += rootd_fmt_2 % d + s += rootd_fmt_3 % d + subprocess.check_call(s, shell = True) + +def setup_rcynic(): + """ + Write the config file for rcynic. + """ + rpki.log.info("Config file for rcynic") + d = { "rcynic_name" : rcynic_name, + "rootd_name" : rootd_name, + "rootd_sia" : rootd_sia } + f = open(rcynic_name + ".conf", "w") + f.write(rcynic_fmt_1 % d) + f.close() + +def setup_rsyncd(): + """ + Write the config file for rsyncd. + """ + rpki.log.info("Config file for rsyncd") + d = { "rsyncd_name" : rsyncd_name, + "rsyncd_port" : rsyncd_port, + "rsyncd_module" : rsyncd_module, + "rsyncd_dir" : rsyncd_dir } + f = open(rsyncd_name + ".conf", "w") + f.write(rsyncd_fmt_1 % d) + f.close() + +def setup_publication(pubd_sql): + """ + Set up publication daemon. + """ + rpki.log.info("Configure publication daemon") + publication_dir = os.getcwd() + "/publication" + assert rootd_sia.startswith("rsync://") + global rsyncd_dir + rsyncd_dir = publication_dir + "/".join(rootd_sia.split("/")[4:]) + if not rsyncd_dir.endswith("/"): + rsyncd_dir += "/" + os.makedirs(rsyncd_dir + "root/trunk") + db = MySQLdb.connect(db = pubd_db_name, user = pubd_db_user, passwd = pubd_db_pass, + conv = sql_conversions) + cur = db.cursor() + db.autocommit(True) + for sql in pubd_sql: + try: + cur.execute(sql) + except Exception: + if "DROP TABLE IF EXISTS" not in sql.upper(): + raise + db.close() + d = { "pubd_name" : pubd_name, + "pubd_port" : pubd_port, + "pubd_db_name" : pubd_db_name, + "pubd_db_user" : pubd_db_user, + "pubd_db_pass" : pubd_db_pass, + "pubd_dir" : rsyncd_dir } + f = open(pubd_name + ".conf", "w") + f.write(pubd_fmt_1 % d) + f.close() + global pubd_ta + global pubd_irbe_key + global pubd_irbe_cert + global pubd_pubd_cert + pubd_ta = rpki.x509.X509(Auto_file = pubd_name + "-TA.cer") + pubd_irbe_key = rpki.x509.RSA( Auto_file = pubd_name + "-IRBE.key") + pubd_irbe_cert = rpki.x509.X509(Auto_file = pubd_name + "-IRBE.cer") + pubd_pubd_cert = rpki.x509.X509(Auto_file = pubd_name + "-PUBD.cer") + +def call_pubd(pdus, cb): + """ + Send a publication message to publication daemon and return the + response. + """ + rpki.log.info("Calling pubd") + q_msg = rpki.publication.msg.query(*pdus) + q_cms = rpki.publication.cms_msg() + q_der = q_cms.wrap(q_msg, pubd_irbe_key, pubd_irbe_cert) + q_url = "http://localhost:%d/control" % pubd_port + + rpki.log.debug(q_cms.pretty_print_content()) + + def call_pubd_cb(r_der): + global pubd_last_cms_time + r_cms = rpki.publication.cms_msg(DER = r_der) + r_msg = r_cms.unwrap((pubd_ta, pubd_pubd_cert)) + pubd_last_cms_time = r_cms.check_replay(pubd_last_cms_time, q_url) + rpki.log.debug(r_cms.pretty_print_content()) + assert r_msg.is_reply + for r_pdu in r_msg: + assert not isinstance(r_pdu, rpki.publication.report_error_elt) + cb(r_msg) + + def call_pubd_eb(e): + rpki.log.warn("Problem calling pubd: %s" % e) + rpki.log.traceback() + + rpki.http.client( + url = q_url, + msg = q_der, + callback = call_pubd_cb, + errback = call_pubd_eb) + +def set_pubd_crl(cb): + """ + Whack publication daemon's bpki_crl. This must be configured before + publication daemon starts talking to its clients, and must be + updated whenever we update the CRL. + """ + rpki.log.info("Setting pubd's BPKI CRL") + crl = rpki.x509.CRL(Auto_file = pubd_name + "-TA.crl") + call_pubd([rpki.publication.config_elt.make_pdu(action = "set", bpki_crl = crl)], cb = lambda ignored: cb()) + +last_rcynic_run = None + +def run_rcynic(): + """ + Run rcynic to see whether what was published makes sense. + """ + rpki.log.info("Running rcynic") + env = os.environ.copy() + env["TZ"] = "" + global last_rcynic_run + if int(time.time()) == last_rcynic_run: + time.sleep(1) + subprocess.check_call((prog_rcynic, "-c", rcynic_name + ".conf"), env = env) + subprocess.call(rcynic_stats, shell = True, env = env) + last_rcynic_run = int(time.time()) + os.link("%s.xml" % rcynic_name, "%s.%s.xml" % (rcynic_name, last_rcynic_run)) + +def mangle_sql(filename): + """ + Mangle an SQL file into a sequence of SQL statements. + """ + words = [] + f = open(filename) + for line in f: + words.extend(line.partition("--")[0].split()) + f.close() + return " ".join(words).strip(";").split(";") + +bpki_cert_fmt_1 = '''\ +[req] +distinguished_name = req_dn +x509_extensions = req_x509_ext +prompt = no +default_md = sha256 + +[req_dn] +CN = Test Certificate %(name)s %(kind)s + +[req_x509_ext] +basicConstraints = critical,CA:%(ca)s +subjectKeyIdentifier = hash +authorityKeyIdentifier = keyid:always + + +[ca] +default_ca = ca_default + +[ca_default] + +certificate = %(name)s-%(kind)s.cer +serial = %(name)s-%(kind)s.srl +private_key = %(name)s-%(kind)s.key +database = %(name)s-%(kind)s.idx +crlnumber = %(name)s-%(kind)s.cnm +default_crl_days = 30 +default_md = sha256 +''' + +bpki_cert_fmt_2 = '''\ +%(openssl)s genrsa -out %(name)s-%(kind)s.key 2048 && +''' + +bpki_cert_fmt_3 = '''\ +%(openssl)s req -new \ + -sha256 \ + -key %(name)s-%(kind)s.key \ + -out %(name)s-%(kind)s.req \ + -config %(name)s-%(kind)s.conf && +touch %(name)s-%(kind)s.idx && +echo >%(name)s-%(kind)s.cnm 01 && +''' + +bpki_cert_fmt_4 = '''\ +%(openssl)s x509 -req -sha256 \ + -in %(name)s-TA.req \ + -out %(name)s-TA.cer \ + -extfile %(name)s-TA.conf \ + -extensions req_x509_ext \ + -signkey %(name)s-TA.key \ + -days 60 -text \ +''' + +bpki_cert_fmt_5 = ''' && \ +%(openssl)s x509 -req \ + -sha256 \ + -in %(name)s-%(kind)s.req \ + -out %(name)s-%(kind)s.cer \ + -extfile %(name)s-%(kind)s.conf \ + -extensions req_x509_ext \ + -days 30 \ + -text \ + -CA %(name)s-TA.cer \ + -CAkey %(name)s-TA.key \ + -CAcreateserial \ +''' + +bpki_cert_fmt_6 = ''' && \ +%(openssl)s ca -batch \ + -gencrl \ + -out %(name)s-%(kind)s.crl \ + -config %(name)s-%(kind)s.conf \ +''' + +yaml_fmt_1 = '''--- +version: 1 +posturl: http://localhost:%(http_port)s/up-down/%(parent_name)s/%(my_name)s +recipient-id: "%(parent_name)s" +sender-id: "%(my_name)s" + +cms-cert-file: %(my_name)s-RPKI.cer +cms-key-file: %(my_name)s-RPKI.key +cms-ca-cert-file: %(my_name)s-TA.cer +cms-crl-file: %(my_name)s-TA.crl +cms-ca-certs-file: + - %(my_name)s-TA-%(parent_name)s-SELF.cer + +ssl-cert-file: %(my_name)s-RPKI.cer +ssl-key-file: %(my_name)s-RPKI.key +ssl-ca-cert-file: %(my_name)s-TA.cer +ssl-ca-certs-file: + - %(my_name)s-TA-%(parent_host)s-TA.cer + +# We're cheating here by hardwiring the class name + +requests: + list: + type: list + issue: + type: issue + class: %(class_name)s + sia: + - %(sia)s + cert-request-key-file: %(my_name)s.key + revoke: + type: revoke + class: %(class_name)s + ski: %(ski)s +''' + +conf_fmt_1 = '''\ + +[irdbd] + +startup-message = This is %(my_name)s irdbd + +sql-database = %(irdb_db_name)s +sql-username = irdb +sql-password = %(irdb_db_pass)s +bpki-ta = %(my_name)s-TA.cer +rpkid-cert = %(my_name)s-RPKI.cer +irdbd-cert = %(my_name)s-IRDB.cer +irdbd-key = %(my_name)s-IRDB.key +http-url = http://localhost:%(irdb_port)d/ +enable_tracebacks = yes + +[irbe_cli] + +rpkid-bpki-ta = %(my_name)s-TA.cer +rpkid-cert = %(my_name)s-RPKI.cer +rpkid-irbe-cert = %(my_name)s-IRBE.cer +rpkid-irbe-key = %(my_name)s-IRBE.key +rpkid-url = http://localhost:%(rpki_port)d/left-right +enable_tracebacks = yes + +[rpkid] + +startup-message = This is %(my_name)s rpkid + +sql-database = %(rpki_db_name)s +sql-username = rpki +sql-password = %(rpki_db_pass)s + +bpki-ta = %(my_name)s-TA.cer +rpkid-key = %(my_name)s-RPKI.key +rpkid-cert = %(my_name)s-RPKI.cer +irdb-cert = %(my_name)s-IRDB.cer +irbe-cert = %(my_name)s-IRBE.cer + +irdb-url = http://localhost:%(irdb_port)d/ + +server-host = localhost +server-port = %(rpki_port)d + +use-internal-cron = false +enable_tracebacks = yes +''' + +rootd_fmt_1 = '''\ + +[rootd] + +bpki-ta = %(rootd_name)s-TA.cer +rootd-bpki-cert = %(rootd_name)s-RPKI.cer +rootd-bpki-key = %(rootd_name)s-RPKI.key +rootd-bpki-crl = %(rootd_name)s-TA.crl +child-bpki-cert = %(rootd_name)s-TA-%(rpkid_name)s-SELF.cer + +server-port = %(rootd_port)s + +rpki-root-dir = %(rsyncd_dir)sroot +rpki-base-uri = %(rootd_sia)sroot/ +rpki-root-cert-uri = %(rootd_sia)sroot.cer + +rpki-root-key = root.key +rpki-root-cert = root.cer + +rpki-subject-pkcs10 = %(rootd_name)s.subject.pkcs10 +rpki-subject-lifetime = %(lifetime)s + +rpki-root-crl = root.crl +rpki-root-manifest = root.mft + +rpki-class-name = trunk +rpki-subject-cert = trunk.cer + +include-bpki-crl = yes +enable_tracebacks = yes + +[req] +default_bits = 2048 +encrypt_key = no +distinguished_name = req_dn +prompt = no +default_md = sha256 +default_days = 60 + +[req_dn] +CN = Completely Bogus Test Root (NOT FOR PRODUCTION USE) + +[req_x509_ext] +basicConstraints = critical,CA:true +subjectKeyIdentifier = hash +authorityKeyIdentifier = keyid:always + +[req_x509_rpki_ext] +basicConstraints = critical,CA:true +subjectKeyIdentifier = hash +keyUsage = critical,keyCertSign,cRLSign +subjectInfoAccess = @sia +sbgp-autonomousSysNum = critical,AS:0-4294967295 +sbgp-ipAddrBlock = critical,IPv4:0.0.0.0/0,IPv6:0::/0 +certificatePolicies = critical, @rpki_certificate_policy + +[sia] + +1.3.6.1.5.5.7.48.5;URI = %(rootd_sia)sroot/ +1.3.6.1.5.5.7.48.10;URI = %(rootd_sia)sroot/root.mft + +[rpki_certificate_policy] + +policyIdentifier = 1.3.6.1.5.5.7.14.2 +''' + +rootd_fmt_2 = '''\ +%(openssl)s genrsa -out root.key 2048 && +''' + +rootd_fmt_3 = '''\ +echo >%(rootd_name)s.tal %(rootd_sia)sroot.cer && +echo >>%(rootd_name)s.tal && +%(openssl)s rsa -pubout -in root.key | +awk '!/-----(BEGIN|END)/' >>%(rootd_name)s.tal && +%(openssl)s req -new -text -sha256 \ + -key root.key \ + -out %(rootd_name)s.req \ + -config %(rootd_name)s.conf \ + -extensions req_x509_rpki_ext && +%(openssl)s x509 -req -sha256 \ + -in %(rootd_name)s.req \ + -out root.cer \ + -outform DER \ + -extfile %(rootd_name)s.conf \ + -extensions req_x509_rpki_ext \ + -signkey root.key && +ln -f root.cer %(rsyncd_dir)s +''' + +rcynic_fmt_1 = '''\ +[rcynic] +xml-summary = %(rcynic_name)s.xml +jitter = 0 +use-links = yes +use-syslog = no +use-stderr = yes +log-level = log_debug +trust-anchor-locator = %(rootd_name)s.tal +''' + +rsyncd_fmt_1 = '''\ +port = %(rsyncd_port)d +address = localhost + +[%(rsyncd_module)s] +read only = yes +transfer logging = yes +use chroot = no +path = %(rsyncd_dir)s +comment = RPKI test +''' + +pubd_fmt_1 = '''\ +[pubd] + +sql-database = %(pubd_db_name)s +sql-username = %(pubd_db_user)s +sql-password = %(pubd_db_pass)s +bpki-ta = %(pubd_name)s-TA.cer +pubd-cert = %(pubd_name)s-PUBD.cer +pubd-key = %(pubd_name)s-PUBD.key +irbe-cert = %(pubd_name)s-IRBE.cer +server-host = localhost +server-port = %(pubd_port)d +publication-base = %(pubd_dir)s +enable_tracebacks = yes +''' + +main() diff --git a/ca/tests/smoketest.setup.sql b/ca/tests/smoketest.setup.sql new file mode 100644 index 00000000..326988f1 --- /dev/null +++ b/ca/tests/smoketest.setup.sql @@ -0,0 +1,112 @@ +-- $Id$ +-- +-- Run this manually under the MySQL CLI to set up databases for testdb.py. +-- testdb.py doesn't do this automatically because it requires privileges +-- that smoketest.py doesn't (or at least shouldn't) have. + +-- Copyright (C) 2009 Internet Systems Consortium ("ISC") +-- +-- Permission to use, copy, modify, and distribute this software for any +-- purpose with or without fee is hereby granted, provided that the above +-- copyright notice and this permission notice appear in all copies. +-- +-- THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +-- REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +-- AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +-- INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +-- LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +-- OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +-- PERFORMANCE OF THIS SOFTWARE. +-- +-- Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") +-- +-- Permission to use, copy, modify, and distribute this software for any +-- purpose with or without fee is hereby granted, provided that the above +-- copyright notice and this permission notice appear in all copies. +-- +-- THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH +-- REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +-- AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, +-- INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +-- LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +-- OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +-- PERFORMANCE OF THIS SOFTWARE. + + +CREATE DATABASE irdb0; +CREATE DATABASE irdb1; +CREATE DATABASE irdb2; +CREATE DATABASE irdb3; +CREATE DATABASE irdb4; +CREATE DATABASE irdb5; +CREATE DATABASE irdb6; +CREATE DATABASE irdb7; +CREATE DATABASE irdb8; +CREATE DATABASE irdb9; +CREATE DATABASE irdb10; +CREATE DATABASE irdb11; + +CREATE DATABASE rpki0; +CREATE DATABASE rpki1; +CREATE DATABASE rpki2; +CREATE DATABASE rpki3; +CREATE DATABASE rpki4; +CREATE DATABASE rpki5; +CREATE DATABASE rpki6; +CREATE DATABASE rpki7; +CREATE DATABASE rpki8; +CREATE DATABASE rpki9; +CREATE DATABASE rpki10; +CREATE DATABASE rpki11; + +CREATE DATABASE pubd0; +CREATE DATABASE pubd1; +CREATE DATABASE pubd2; +CREATE DATABASE pubd3; +CREATE DATABASE pubd4; +CREATE DATABASE pubd5; +CREATE DATABASE pubd6; +CREATE DATABASE pubd7; +CREATE DATABASE pubd8; +CREATE DATABASE pubd9; +CREATE DATABASE pubd10; +CREATE DATABASE pubd11; + +GRANT ALL ON irdb0.* TO irdb@localhost IDENTIFIED BY 'fnord'; +GRANT ALL ON irdb1.* TO irdb@localhost IDENTIFIED BY 'fnord'; +GRANT ALL ON irdb2.* TO irdb@localhost IDENTIFIED BY 'fnord'; +GRANT ALL ON irdb3.* TO irdb@localhost IDENTIFIED BY 'fnord'; +GRANT ALL ON irdb4.* TO irdb@localhost IDENTIFIED BY 'fnord'; +GRANT ALL ON irdb5.* TO irdb@localhost IDENTIFIED BY 'fnord'; +GRANT ALL ON irdb6.* TO irdb@localhost IDENTIFIED BY 'fnord'; +GRANT ALL ON irdb7.* TO irdb@localhost IDENTIFIED BY 'fnord'; +GRANT ALL ON irdb8.* TO irdb@localhost IDENTIFIED BY 'fnord'; +GRANT ALL ON irdb9.* TO irdb@localhost IDENTIFIED BY 'fnord'; +GRANT ALL ON irdb10.* TO irdb@localhost IDENTIFIED BY 'fnord'; +GRANT ALL ON irdb11.* TO irdb@localhost IDENTIFIED BY 'fnord'; + +GRANT ALL ON rpki0.* TO rpki@localhost IDENTIFIED BY 'fnord'; +GRANT ALL ON rpki1.* TO rpki@localhost IDENTIFIED BY 'fnord'; +GRANT ALL ON rpki2.* TO rpki@localhost IDENTIFIED BY 'fnord'; +GRANT ALL ON rpki3.* TO rpki@localhost IDENTIFIED BY 'fnord'; +GRANT ALL ON rpki4.* TO rpki@localhost IDENTIFIED BY 'fnord'; +GRANT ALL ON rpki5.* TO rpki@localhost IDENTIFIED BY 'fnord'; +GRANT ALL ON rpki6.* TO rpki@localhost IDENTIFIED BY 'fnord'; +GRANT ALL ON rpki7.* TO rpki@localhost IDENTIFIED BY 'fnord'; +GRANT ALL ON rpki8.* TO rpki@localhost IDENTIFIED BY 'fnord'; +GRANT ALL ON rpki9.* TO rpki@localhost IDENTIFIED BY 'fnord'; +GRANT ALL ON rpki10.* TO rpki@localhost IDENTIFIED BY 'fnord'; +GRANT ALL ON rpki11.* TO rpki@localhost IDENTIFIED BY 'fnord'; + +GRANT ALL ON pubd0.* TO pubd@localhost IDENTIFIED BY 'fnord'; +GRANT ALL ON pubd1.* TO pubd@localhost IDENTIFIED BY 'fnord'; +GRANT ALL ON pubd2.* TO pubd@localhost IDENTIFIED BY 'fnord'; +GRANT ALL ON pubd3.* TO pubd@localhost IDENTIFIED BY 'fnord'; +GRANT ALL ON pubd4.* TO pubd@localhost IDENTIFIED BY 'fnord'; +GRANT ALL ON pubd5.* TO pubd@localhost IDENTIFIED BY 'fnord'; +GRANT ALL ON pubd6.* TO pubd@localhost IDENTIFIED BY 'fnord'; +GRANT ALL ON pubd7.* TO pubd@localhost IDENTIFIED BY 'fnord'; +GRANT ALL ON pubd8.* TO pubd@localhost IDENTIFIED BY 'fnord'; +GRANT ALL ON pubd9.* TO pubd@localhost IDENTIFIED BY 'fnord'; +GRANT ALL ON pubd10.* TO pubd@localhost IDENTIFIED BY 'fnord'; +GRANT ALL ON pubd11.* TO pubd@localhost IDENTIFIED BY 'fnord'; diff --git a/ca/tests/split-protocol-samples.xsl b/ca/tests/split-protocol-samples.xsl new file mode 100644 index 00000000..8800b6da --- /dev/null +++ b/ca/tests/split-protocol-samples.xsl @@ -0,0 +1,40 @@ + + + + + . + + + + + + + + + Writing + + + Automatically generated, do not edit. + + + + + diff --git a/ca/tests/sql-cleaner.py b/ca/tests/sql-cleaner.py new file mode 100644 index 00000000..c5b25ac2 --- /dev/null +++ b/ca/tests/sql-cleaner.py @@ -0,0 +1,61 @@ +# $Id$ +# +# Copyright (C) 2009--2012 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +(Re)Initialize SQL tables used by these programs. +""" + +import rpki.config +import rpki.sql_schemas +from rpki.mysql_import import MySQLdb + +cfg = rpki.config.parser(None, "yamltest", allow_missing = True) + +for name in ("rpkid", "irdbd", "pubd"): + + username = cfg.get("%s_sql_username" % name, name[:4]) + password = cfg.get("%s_sql_password" % name, "fnord") + + schema = [] + for line in getattr(rpki.sql_schemas, name, "").splitlines(): + schema.extend(line.partition("--")[0].split()) + schema = " ".join(schema).strip(";").split(";") + schema = [statement.strip() for statement in schema if statement and "DROP TABLE" not in statement] + + db = MySQLdb.connect(user = username, passwd = password) + cur = db.cursor() + + cur.execute("SHOW DATABASES") + + databases = [r[0] for r in cur.fetchall() if r[0][:4] == name[:4] and r[0][4:].isdigit()] + + for database in databases: + + cur.execute("USE " + database) + + cur.execute("SHOW TABLES") + tables = [r[0] for r in cur.fetchall()] + + cur.execute("SET foreign_key_checks = 0") + for table in tables: + cur.execute("DROP TABLE %s" % table) + cur.execute("SET foreign_key_checks = 1") + + for statement in schema: + cur.execute(statement) + + cur.close() + db.close() diff --git a/ca/tests/sql-dumper.py b/ca/tests/sql-dumper.py new file mode 100644 index 00000000..f4a7681d --- /dev/null +++ b/ca/tests/sql-dumper.py @@ -0,0 +1,43 @@ +# $Id$ +# +# Copyright (C) 2009--2012 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +Dump backup copies of SQL tables used by these programs. +""" + +import subprocess +import rpki.config +from rpki.mysql_import import MySQLdb + +cfg = rpki.config.parser(None, "yamltest", allow_missing = True) + +for name in ("rpkid", "irdbd", "pubd"): + + username = cfg.get("%s_sql_username" % name, name[:4]) + password = cfg.get("%s_sql_password" % name, "fnord") + + cmd = ["mysqldump", "-u", username, "-p" + password, "--databases"] + + db = MySQLdb.connect(user = username, passwd = password) + cur = db.cursor() + + cur.execute("SHOW DATABASES") + cmd.extend(r[0] for r in cur.fetchall() if r[0][:4] == name[:4] and r[0][4:].isdigit()) + + cur.close() + db.close() + + subprocess.check_call(cmd, stdout = open("backup.%s.sql" % name, "w")) diff --git a/ca/tests/testpoke.py b/ca/tests/testpoke.py new file mode 100644 index 00000000..fd5ab206 --- /dev/null +++ b/ca/tests/testpoke.py @@ -0,0 +1,152 @@ +# $Id$ +# +# Copyright (C) 2014 Dragon Research Labs ("DRL") +# Portions copyright (C) 2009--2012 Internet Systems Consortium ("ISC") +# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notices and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND ARIN DISCLAIM ALL +# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL, +# ISC, OR ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, +# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION +# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +""" +Trivial RPKI up-down protocol client, for testing. + +Configuration file is YAML to be compatable with APNIC rpki_poke.pl tool. +""" + +import os +import time +import argparse +import sys +import yaml +import rpki.resource_set +import rpki.up_down +import rpki.left_right +import rpki.x509 +import rpki.http +import rpki.config +import rpki.exceptions +import rpki.relaxng +import rpki.log +import rpki.async + +os.environ["TZ"] = "UTC" +time.tzset() + +parser = argparse.ArgumentParser(description = __doc__) +parser.add_argument("-y", "--yaml", required = True, type = argparse.FileType("r"), + help = "configuration file") +parser.add_argument("-r", "--request", + help = "request name") +parser.add_argument("-d", "--debug", + help = "enable debugging") +args = parser.parse_args() + +rpki.log.init("testpoke") + +if args.debug: + rpki.log.set_trace(True) + +yaml_data = yaml.load(args.yaml) + +yaml_cmd = args.request + +if yaml_cmd is None and len(yaml_data["requests"]) == 1: + yaml_cmd = yaml_data["requests"].keys()[0] + +yaml_req = yaml_data["requests"][yaml_cmd] + +def get_PEM(name, cls, y = yaml_data): + if name in y: + return cls(PEM = y[name]) + if name + "-file" in y: + return cls(PEM_file = y[name + "-file"]) + return None + +def get_PEM_chain(name, cert = None): + chain = [] + if cert is not None: + chain.append(cert) + if name in yaml_data: + chain.extend([rpki.x509.X509(PEM = x) for x in yaml_data[name]]) + elif name + "-file" in yaml_data: + chain.extend([rpki.x509.X509(PEM_file = x) for x in yaml_data[name + "-file"]]) + return chain + +def query_up_down(q_pdu): + q_msg = rpki.up_down.message_pdu.make_query( + payload = q_pdu, + sender = yaml_data["sender-id"], + recipient = yaml_data["recipient-id"]) + q_der = rpki.up_down.cms_msg().wrap(q_msg, cms_key, cms_certs, cms_crl) + + def done(r_der): + global last_cms_timestamp + r_cms = rpki.up_down.cms_msg(DER = r_der) + r_msg = r_cms.unwrap([cms_ta] + cms_ca_certs) + last_cms_timestamp = r_cms.check_replay(last_cms_timestamp) + print r_cms.pretty_print_content() + try: + r_msg.payload.check_response() + except (rpki.async.ExitNow, SystemExit): + raise + except Exception, e: + fail(e) + + rpki.http.want_persistent_client = False + + rpki.http.client( + msg = q_der, + url = yaml_data["posturl"], + callback = done, + errback = fail) + +def do_list(): + query_up_down(rpki.up_down.list_pdu()) + +def do_issue(): + q_pdu = rpki.up_down.issue_pdu() + req_key = get_PEM("cert-request-key", rpki.x509.RSA, yaml_req) or cms_key + q_pdu.class_name = yaml_req["class"] + q_pdu.pkcs10 = rpki.x509.PKCS10.create( + keypair = req_key, + is_ca = True, + caRepository = yaml_req["sia"][0], + rpkiManifest = yaml_req["sia"][0] + req_key.gSKI() + ".mft") + query_up_down(q_pdu) + +def do_revoke(): + q_pdu = rpki.up_down.revoke_pdu() + q_pdu.class_name = yaml_req["class"] + q_pdu.ski = yaml_req["ski"] + query_up_down(q_pdu) + +dispatch = { "list" : do_list, "issue" : do_issue, "revoke" : do_revoke } + +def fail(e): # pylint: disable=W0621 + rpki.log.traceback(args.debug) + sys.exit("Testpoke failed: %s" % e) + +cms_ta = get_PEM("cms-ca-cert", rpki.x509.X509) +cms_cert = get_PEM("cms-cert", rpki.x509.X509) +cms_key = get_PEM("cms-key", rpki.x509.RSA) +cms_crl = get_PEM("cms-crl", rpki.x509.CRL) +cms_certs = get_PEM_chain("cms-cert-chain", cms_cert) +cms_ca_certs = get_PEM_chain("cms-ca-certs") + +last_cms_timestamp = None + +try: + dispatch[yaml_req["type"]]() + rpki.async.event_loop() +except Exception, e: + fail(e) diff --git a/ca/tests/testpoke.xsl b/ca/tests/testpoke.xsl new file mode 100644 index 00000000..91658b0b --- /dev/null +++ b/ca/tests/testpoke.xsl @@ -0,0 +1,78 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + -----BEGIN CERTIFICATE----- + + + -----END CERTIFICATE----- + + + + diff --git a/ca/tests/testpoke.yaml b/ca/tests/testpoke.yaml new file mode 100644 index 00000000..f2b2c618 --- /dev/null +++ b/ca/tests/testpoke.yaml @@ -0,0 +1,24 @@ +--- +# $Id$ + +version: 1 +posturl: http://localhost:4433/up-down/1 +recipient-id: wombat +sender-id: "1" + +cms-cert-file: biz-certs/Frank-EE.cer +cms-key-file: biz-certs/Frank-EE.key +cms-ca-cert-file: biz-certs/Bob-Root.cer +cms-cert-chain-file: [ biz-certs/Frank-CA.cer ] + +requests: + list: + type: list + issue: + type: issue + class: 1 + sia: [ "rsync://bandicoot.invalid/some/where/" ] + revoke: + type: revoke + class: 1 + ski: "CB5K6APY-4KcGAW9jaK_cVPXKX0" diff --git a/ca/tests/up-down-protocol-samples/Makefile b/ca/tests/up-down-protocol-samples/Makefile new file mode 100644 index 00000000..10ee791a --- /dev/null +++ b/ca/tests/up-down-protocol-samples/Makefile @@ -0,0 +1,11 @@ +XMLLINT = xmllint --noout --relaxng +JING = java -jar /usr/local/share/java/classes/jing.jar +SCHEMA = ../up-down-medium-schema.rng + +all: jing xmllint + +jing: + ${JING} ${SCHEMA} *.xml + +xmllint: + ${XMLLINT} ${SCHEMA} *.xml diff --git a/ca/tests/up-down-protocol-samples/error_response.xml b/ca/tests/up-down-protocol-samples/error_response.xml new file mode 100644 index 00000000..83af6649 --- /dev/null +++ b/ca/tests/up-down-protocol-samples/error_response.xml @@ -0,0 +1,9 @@ + + + 2001 + [Readable text] + diff --git a/ca/tests/up-down-protocol-samples/issue1.xml b/ca/tests/up-down-protocol-samples/issue1.xml new file mode 100644 index 00000000..4b8366f9 --- /dev/null +++ b/ca/tests/up-down-protocol-samples/issue1.xml @@ -0,0 +1,25 @@ + + + + MIICYTCCAUkCAQAwHDEaMBgGA1UEAxMRVEVTVCBFTlRJVFkgSVNQNWMwggEiMA0G + CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDIi6ElZd/uon9Ur1IKGhr6DXWzPOng + KdOJIOlRSWcsQ9qgLNREs5YUqQd3YLlvAe+OVKV0rFpn+DBNEPmsn7h1YQv253zq + m1yYeks+xOJZQtMZyg9YDrfIgk7lu6z9kuWIsvxkz244OxiD/OemrvuQNtDhyk2Q + QQ8POyrADNl7fehQE/YJc4Kj0uO7ggiHf9K7Dg56KLYlArXZUfwzMkdH/89/vO4A + AbsFXi4Dmq2VO8rCxodkdDmqWWuu4KdRGgfyjkyOZS/f8pm64LaKT8AgcnmYAI8N + UBM90T6Mvdx0qTOoVh0xeHznAp6NChQSbdM3x3rwhBD+/k0olyZuCIWhAgMBAAGg + ADANBgkqhkiG9w0BAQUFAAOCAQEAj9bYIVfREySBzUhQSlbNi9kfdXgivC/4A7pn + b4sMm081S05u0QLhyh1XNF/L3/U5yVElVHE8xobM/CuAkXpy7N5GSYj2T28Fmn77 + 1y/xdGg6Jp26OkbrqY3gjQAaMigYg9/6tPAc9fgLiQAJLUUYb2hRqaqu4Ze8RrxU + RsnVpAHWYDFWJhNqEp8eErzAVLqxpmoYJKgmpK6TKyYKuf8+xf3Rlkb4+iu2FotR + DQrmcd6jmMjp9xLejDEuoPgcfpVP2CB1jUCAIW7yE7+a7vj9Mop1gs61zP8y/p2V + rVnXgEy93WZLjQt1D29oKhlcFGtCG4nqIBCDAWVuz/LGACB85w== + + diff --git a/ca/tests/up-down-protocol-samples/issue2.xml b/ca/tests/up-down-protocol-samples/issue2.xml new file mode 100644 index 00000000..a991cbcd --- /dev/null +++ b/ca/tests/up-down-protocol-samples/issue2.xml @@ -0,0 +1,24 @@ + + + + MIICYTCCAUkCAQAwHDEaMBgGA1UEAxMRVEVTVCBFTlRJVFkgSVNQNWMwggEiMA0G + CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDIi6ElZd/uon9Ur1IKGhr6DXWzPOng + KdOJIOlRSWcsQ9qgLNREs5YUqQd3YLlvAe+OVKV0rFpn+DBNEPmsn7h1YQv253zq + m1yYeks+xOJZQtMZyg9YDrfIgk7lu6z9kuWIsvxkz244OxiD/OemrvuQNtDhyk2Q + QQ8POyrADNl7fehQE/YJc4Kj0uO7ggiHf9K7Dg56KLYlArXZUfwzMkdH/89/vO4A + AbsFXi4Dmq2VO8rCxodkdDmqWWuu4KdRGgfyjkyOZS/f8pm64LaKT8AgcnmYAI8N + UBM90T6Mvdx0qTOoVh0xeHznAp6NChQSbdM3x3rwhBD+/k0olyZuCIWhAgMBAAGg + ADANBgkqhkiG9w0BAQUFAAOCAQEAj9bYIVfREySBzUhQSlbNi9kfdXgivC/4A7pn + b4sMm081S05u0QLhyh1XNF/L3/U5yVElVHE8xobM/CuAkXpy7N5GSYj2T28Fmn77 + 1y/xdGg6Jp26OkbrqY3gjQAaMigYg9/6tPAc9fgLiQAJLUUYb2hRqaqu4Ze8RrxU + RsnVpAHWYDFWJhNqEp8eErzAVLqxpmoYJKgmpK6TKyYKuf8+xf3Rlkb4+iu2FotR + DQrmcd6jmMjp9xLejDEuoPgcfpVP2CB1jUCAIW7yE7+a7vj9Mop1gs61zP8y/p2V + rVnXgEy93WZLjQt1D29oKhlcFGtCG4nqIBCDAWVuz/LGACB85w== + + diff --git a/ca/tests/up-down-protocol-samples/issue_response.xml b/ca/tests/up-down-protocol-samples/issue_response.xml new file mode 100644 index 00000000..1ee34f62 --- /dev/null +++ b/ca/tests/up-down-protocol-samples/issue_response.xml @@ -0,0 +1,117 @@ + + + + + MIID3jCCAsagAwIBAgIBAzANBgkqhkiG9w0BAQUFADAbMRkwFwYDVQQDExBURVNU + IEVOVElUWSBMSVIzMB4XDTA3MDgwMTE0NDgyMloXDTA4MDczMTE0NDgyMlowHDEa + MBgGA1UEAxMRVEVTVCBFTlRJVFkgSVNQNWEwggEiMA0GCSqGSIb3DQEBAQUAA4IB + DwAwggEKAoIBAQDmS614KGvmUBtlgdWNK1Z3zbvJR6CqMrAsrB/x5JArwjNv51Ox + 0B2rBSedt6HuqE/IWzYj4xLkUVknzf16qtxWBaFzq3ndPIKyj6757MA2OOYCqv2J + YCFSW7YzgHXlf/2sbuzUmiYvfihFFilHffOKctXkZfr0VG+uSDNiwTLxK4MzNmNg + nrzH55ldUdrNL4+DRyCe6cyjcsByvUktxFLqb9pCRnGQx69/n8fdC5aWPEWfwOpl + akPj85LV4XPAbiD1F+XRWNohs+kMTfDovXy374HJ9XDPqCB94mr5G2apyHHWMvhy + PYOZGQ0Ma+n4ks0zF4ZqPa8NBZSrHNQspEXLAgMBAAGjggEqMIIBJjAPBgNVHRMB + Af8EBTADAQH/MB0GA1UdDgQWBBQJ8BQLefsL/6jvVLnsPrmL0Muc7DAfBgNVHSME + GDAWgBSYvgT/gNGrlTmqPfIOZ30AraP9xTAOBgNVHQ8BAf8EBAMCAQYwQgYIKwYB + BQUHAQsENjA0MDIGCCsGAQUFBzAFhiZyc3luYzovL3dvbWJhdHMtci11cy5oYWN0 + cm4ubmV0L0lTUDVhLzBEBggrBgEFBQcBAQQ4MDYwNAYIKwYBBQUHMAKGKHJzeW5j + Oi8vd29tYmF0cy1yLXVzLmhhY3Rybi5uZXQvTElSMy5jZXIwOQYIKwYBBQUHAQcB + Af8EKjAoMAwEAgABMAYDBAAKAAAwGAQCAAIwEgMQACABDbgAAAAAAAAAAAoAADAN + BgkqhkiG9w0BAQUFAAOCAQEAkzKZYt3F6vAfWFAQN9M5N9n2klEmL9b9b4K4Vmv9 + DPNCBFbtZytCAphWB/FILS60MrvXHCcUoOmtOx3+Cw5D3yKX8Y9z2HbWmw2/7iDo + dxejgwGzI0OFa79vzC5pRwVz9CFFlMiuIShBFpHuSElmWmcxcQTJSXGU1fSGXHvG + Pv6RHSGzFJhUrW5RKOmoIrqk0JyM49R8IRAM+aMA+MOfALRTNAavW0pDlcuy+4wY + AIYRKF4k4ZDYZ9gA/LYnH56xvpEXwRE1bpxgUC5n8wQrdIn5/pJz3R5EgWe4CGOo + n/SMvEfe8d+LEc0C7LmtCwYoDOKENoOF809GVkbV9fjL8w== + + + MIID3jCCAsagAwIBAgIBAjANBgkqhkiG9w0BAQUFADAbMRkwFwYDVQQDExBURVNU + IEVOVElUWSBMSVIzMB4XDTA3MDgwMTE0NDgyMFoXDTA4MDczMTE0NDgyMFowHDEa + MBgGA1UEAxMRVEVTVCBFTlRJVFkgSVNQNWIwggEiMA0GCSqGSIb3DQEBAQUAA4IB + DwAwggEKAoIBAQC/j1nY/PodBHApznsBZCFA3FxD/kyviMhim76cco+KpTSKOyON + m4pPv2asaHGc/WhZ9b+fTS611uP6vfNgU1y3EayVC8CHzZmelFeN7AW436r8jjjT + D2VtCWDy4ZiBcthRPkGRsxCV9fXQ+eVcoYX6cSaF49FMAn8U4h5KipZontYWpe+t + tYNizSN0fIJWtNE0U1qKemGfrlRb7/lW3odrQpK8SfS1wzUHShhH0pLGHBZ0dLHp + OTxTEgWd69ycciuXTSchd5Z9TM55DPunuJlrZiAuVpxEtONegMR9eKG0BfcgfSYe + RL9daRU8eiRnvbm1CA8zTa87Lee5qx0r1vtzAgMBAAGjggEqMIIBJjAPBgNVHRMB + Af8EBTADAQH/MB0GA1UdDgQWBBRss2WU/safSlCdTYtAGqH9lxeXkjAfBgNVHSME + GDAWgBSYvgT/gNGrlTmqPfIOZ30AraP9xTAOBgNVHQ8BAf8EBAMCAQYwQgYIKwYB + BQUHAQsENjA0MDIGCCsGAQUFBzAFhiZyc3luYzovL3dvbWJhdHMtci11cy5oYWN0 + cm4ubmV0L0lTUDViLzBEBggrBgEFBQcBAQQ4MDYwNAYIKwYBBQUHMAKGKHJzeW5j + Oi8vd29tYmF0cy1yLXVzLmhhY3Rybi5uZXQvTElSMy5jZXIwOQYIKwYBBQUHAQcB + Af8EKjAoMAwEAgABMAYDBAAKAwAwGAQCAAIwEgMQACABDbgAAAAAAAAAAAoDADAN + BgkqhkiG9w0BAQUFAAOCAQEARNgVrXF+6W7sMytC7YyKSt+CpJGZV7AvzKNZKv8k + xazhefrXkrpyK0caz4BtCHbptZFgNR/dDOC9M3wn0PcRTh9ISgW8beNfut16uj1F + fZdylJvNMXa4lt/wfRbzKqPicusCH0nutkRIW2mZuLuAO8v1vKr4umgZU+z/rXWu + glEA7OeBwmvPoqKixbgER5GtnTNySKIVVa1DUo/2CaPT/YjT48P0zXHoy6rnNgcn + 2emkoegzzS2cN+5I5I+O8IRnZInqmiPgEgElgEFw+rg6xw23yax5Nyqx12J56tt0 + tPWGhrYe1dCwKZajWKn3P9+NMcGQ0d8bw/QU+B3RyVeVfw== + + + MIIDxjCCAq6gAwIBAgIBATANBgkqhkiG9w0BAQUFADAbMRkwFwYDVQQDExBURVNU + IEVOVElUWSBMSVIzMB4XDTA3MDgwMTE0NDgxOFoXDTA4MDczMTE0NDgxOFowHDEa + MBgGA1UEAxMRVEVTVCBFTlRJVFkgSVNQNWMwggEiMA0GCSqGSIb3DQEBAQUAA4IB + DwAwggEKAoIBAQDIi6ElZd/uon9Ur1IKGhr6DXWzPOngKdOJIOlRSWcsQ9qgLNRE + s5YUqQd3YLlvAe+OVKV0rFpn+DBNEPmsn7h1YQv253zqm1yYeks+xOJZQtMZyg9Y + DrfIgk7lu6z9kuWIsvxkz244OxiD/OemrvuQNtDhyk2QQQ8POyrADNl7fehQE/YJ + c4Kj0uO7ggiHf9K7Dg56KLYlArXZUfwzMkdH/89/vO4AAbsFXi4Dmq2VO8rCxodk + dDmqWWuu4KdRGgfyjkyOZS/f8pm64LaKT8AgcnmYAI8NUBM90T6Mvdx0qTOoVh0x + eHznAp6NChQSbdM3x3rwhBD+/k0olyZuCIWhAgMBAAGjggESMIIBDjAPBgNVHRMB + Af8EBTADAQH/MB0GA1UdDgQWBBQth8Ga+FgrvcL4fjBHs6mIN8nrRjAfBgNVHSME + GDAWgBSYvgT/gNGrlTmqPfIOZ30AraP9xTAOBgNVHQ8BAf8EBAMCAQYwQgYIKwYB + BQUHAQsENjA0MDIGCCsGAQUFBzAFhiZyc3luYzovL3dvbWJhdHMtci11cy5oYWN0 + cm4ubmV0L0lTUDVjLzBEBggrBgEFBQcBAQQ4MDYwNAYIKwYBBQUHMAKGKHJzeW5j + Oi8vd29tYmF0cy1yLXVzLmhhY3Rybi5uZXQvTElSMy5jZXIwIQYIKwYBBQUHAQgB + Af8EEjAQoA4wDDAKAgMA/BYCAwD8HDANBgkqhkiG9w0BAQUFAAOCAQEAUGsbhHfl + kwhe3EIkhnARJPgRkWgPCJtJ9konhROT7VlJ0Pim0kSrJWlBWUCLeKvSjQmowP4g + SddHxN4ZoXnSjb0pCDeomrZeViVQ2hxH6L/tHkl5SIEHl9MvFOe8junvgpq9GGAI + CFcibkW7Gp9p4A+GQkns0l9v+wGwuVZmqmJk4YBo7hHZRbg6/IFL1MD3HKeXmn33 + lCwFhjUuDIMXRbY/1k5fui13QUolN7mLSk60NvXJ94Tga68c1eCIpapvhxAYw69G + 7mOX42aYu1FnidZNj7Lt9jOuW0REHlavrG17HxP5loTuCNtLH1ZIrJcO7rUz9C0D + YqMybYWFUqZHyg== + + + MIIEFTCCAv2gAwIBAgIBDjANBgkqhkiG9w0BAQUFADAaMRgwFgYDVQQDEw9URVNU + IEVOVElUWSBSSVIwHhcNMDcwODAxMTQ0ODE4WhcNMDgwNzMxMTQ0ODE4WjAbMRkw + FwYDVQQDExBURVNUIEVOVElUWSBMSVIzMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A + MIIBCgKCAQEAoyFXYWSvERjUy96m3K3ZLA8PWJ9+yIVVESZMfPBraBqeagyP5tw9 + g1gqzHesGXNvWvNuJKzNGtwdC0xE9W2LChc9hvno/uZg5Z9AauWU6JpWFxccq8GM + N0ArVb8sXtyNyiV/il/u+xaG6+AI0ybl43DFDGv7G49rXPbiSlilNQHqBRs+zoS+ + tT9tGBZLaOV5TIh9tqVlozrCMtytj4oF7vbpeoDaEqkPWrXS0zGsPtMZJS0o3nls + zv13ZtXjL6nL+YWMILuihiPwk5UgBHjHxwem/vD0RbvPeCvdzpwIpUZoEEzXBWJs + hlotfwY4wk27RIcAQ3nSj/NrsvRcHLloAQIDAQABo4IBYzCCAV8wDwYDVR0TAQH/ + BAUwAwEB/zAdBgNVHQ4EFgQUmL4E/4DRq5U5qj3yDmd9AK2j/cUwHwYDVR0jBBgw + FoAU+7inozZICqCf8C7ci2i8s1xFJdcwDgYDVR0PAQH/BAQDAgEGMEEGCCsGAQUF + BwELBDUwMzAxBggrBgEFBQcwBYYlcnN5bmM6Ly93b21iYXRzLXItdXMuaGFjdHJu + Lm5ldC9MSVIzLzBDBggrBgEFBQcBAQQ3MDUwMwYIKwYBBQUHMAKGJ3JzeW5jOi8v + d29tYmF0cy1yLXVzLmhhY3Rybi5uZXQvUklSLmNlcjAhBggrBgEFBQcBCAEB/wQS + MBCgDjAMMAoCAwD8FgIDAPwcMFEGCCsGAQUFBwEHAQH/BEIwQDASBAIAATAMAwQA + CgAAAwQACgMAMCoEAgACMCQDEAAgAQ24AAAAAAAAAAAKAAADEAAgAQ24AAAAAAAA + AAAKAwAwDQYJKoZIhvcNAQEFBQADggEBAEhmCa7kUuozB6aST0Gd2XStJBcR1oWI + 8mZS5WEOjnjbVvuryDEb0fLs3x2HgCHZgZ7IAOg31cNxJpc1Ff6ZYEG+m3LpkcG/ + yOMllfOVK8RQSY+nKuya2fm2J3dCOKogEjBW20HwxNd1WgFLrDaOTR9V+iROBPKs + 3ppMPp6ksPqEqDU/3N3bLHROIISlFwWHilXuTK5ZAnzncDIQnm+zUuxI/0d3v6Fp + 8VxVlNBHqzo0VpakZOkxwqo01qJRsoxVaIxeetGNQ4noPhtj6bEM4Y8xDS9f3R7o + eEHUSTnKonMemm/AB3KZnjwL7rkL2FI1ThmDRO3Z3lprbThjThJF8EU= + + + diff --git a/ca/tests/up-down-protocol-samples/list.xml b/ca/tests/up-down-protocol-samples/list.xml new file mode 100644 index 00000000..01a803f3 --- /dev/null +++ b/ca/tests/up-down-protocol-samples/list.xml @@ -0,0 +1,6 @@ + + diff --git a/ca/tests/up-down-protocol-samples/list_response.xml b/ca/tests/up-down-protocol-samples/list_response.xml new file mode 100644 index 00000000..09634955 --- /dev/null +++ b/ca/tests/up-down-protocol-samples/list_response.xml @@ -0,0 +1,171 @@ + + + + + MIID3jCCAsagAwIBAgIBAzANBgkqhkiG9w0BAQUFADAbMRkwFwYDVQQDExBURVNU + IEVOVElUWSBMSVIzMB4XDTA3MDgwMTE0NDgyMloXDTA4MDczMTE0NDgyMlowHDEa + MBgGA1UEAxMRVEVTVCBFTlRJVFkgSVNQNWEwggEiMA0GCSqGSIb3DQEBAQUAA4IB + DwAwggEKAoIBAQDmS614KGvmUBtlgdWNK1Z3zbvJR6CqMrAsrB/x5JArwjNv51Ox + 0B2rBSedt6HuqE/IWzYj4xLkUVknzf16qtxWBaFzq3ndPIKyj6757MA2OOYCqv2J + YCFSW7YzgHXlf/2sbuzUmiYvfihFFilHffOKctXkZfr0VG+uSDNiwTLxK4MzNmNg + nrzH55ldUdrNL4+DRyCe6cyjcsByvUktxFLqb9pCRnGQx69/n8fdC5aWPEWfwOpl + akPj85LV4XPAbiD1F+XRWNohs+kMTfDovXy374HJ9XDPqCB94mr5G2apyHHWMvhy + PYOZGQ0Ma+n4ks0zF4ZqPa8NBZSrHNQspEXLAgMBAAGjggEqMIIBJjAPBgNVHRMB + Af8EBTADAQH/MB0GA1UdDgQWBBQJ8BQLefsL/6jvVLnsPrmL0Muc7DAfBgNVHSME + GDAWgBSYvgT/gNGrlTmqPfIOZ30AraP9xTAOBgNVHQ8BAf8EBAMCAQYwQgYIKwYB + BQUHAQsENjA0MDIGCCsGAQUFBzAFhiZyc3luYzovL3dvbWJhdHMtci11cy5oYWN0 + cm4ubmV0L0lTUDVhLzBEBggrBgEFBQcBAQQ4MDYwNAYIKwYBBQUHMAKGKHJzeW5j + Oi8vd29tYmF0cy1yLXVzLmhhY3Rybi5uZXQvTElSMy5jZXIwOQYIKwYBBQUHAQcB + Af8EKjAoMAwEAgABMAYDBAAKAAAwGAQCAAIwEgMQACABDbgAAAAAAAAAAAoAADAN + BgkqhkiG9w0BAQUFAAOCAQEAkzKZYt3F6vAfWFAQN9M5N9n2klEmL9b9b4K4Vmv9 + DPNCBFbtZytCAphWB/FILS60MrvXHCcUoOmtOx3+Cw5D3yKX8Y9z2HbWmw2/7iDo + dxejgwGzI0OFa79vzC5pRwVz9CFFlMiuIShBFpHuSElmWmcxcQTJSXGU1fSGXHvG + Pv6RHSGzFJhUrW5RKOmoIrqk0JyM49R8IRAM+aMA+MOfALRTNAavW0pDlcuy+4wY + AIYRKF4k4ZDYZ9gA/LYnH56xvpEXwRE1bpxgUC5n8wQrdIn5/pJz3R5EgWe4CGOo + n/SMvEfe8d+LEc0C7LmtCwYoDOKENoOF809GVkbV9fjL8w== + + + MIID3jCCAsagAwIBAgIBAjANBgkqhkiG9w0BAQUFADAbMRkwFwYDVQQDExBURVNU + IEVOVElUWSBMSVIzMB4XDTA3MDgwMTE0NDgyMFoXDTA4MDczMTE0NDgyMFowHDEa + MBgGA1UEAxMRVEVTVCBFTlRJVFkgSVNQNWIwggEiMA0GCSqGSIb3DQEBAQUAA4IB + DwAwggEKAoIBAQC/j1nY/PodBHApznsBZCFA3FxD/kyviMhim76cco+KpTSKOyON + m4pPv2asaHGc/WhZ9b+fTS611uP6vfNgU1y3EayVC8CHzZmelFeN7AW436r8jjjT + D2VtCWDy4ZiBcthRPkGRsxCV9fXQ+eVcoYX6cSaF49FMAn8U4h5KipZontYWpe+t + tYNizSN0fIJWtNE0U1qKemGfrlRb7/lW3odrQpK8SfS1wzUHShhH0pLGHBZ0dLHp + OTxTEgWd69ycciuXTSchd5Z9TM55DPunuJlrZiAuVpxEtONegMR9eKG0BfcgfSYe + RL9daRU8eiRnvbm1CA8zTa87Lee5qx0r1vtzAgMBAAGjggEqMIIBJjAPBgNVHRMB + Af8EBTADAQH/MB0GA1UdDgQWBBRss2WU/safSlCdTYtAGqH9lxeXkjAfBgNVHSME + GDAWgBSYvgT/gNGrlTmqPfIOZ30AraP9xTAOBgNVHQ8BAf8EBAMCAQYwQgYIKwYB + BQUHAQsENjA0MDIGCCsGAQUFBzAFhiZyc3luYzovL3dvbWJhdHMtci11cy5oYWN0 + cm4ubmV0L0lTUDViLzBEBggrBgEFBQcBAQQ4MDYwNAYIKwYBBQUHMAKGKHJzeW5j + Oi8vd29tYmF0cy1yLXVzLmhhY3Rybi5uZXQvTElSMy5jZXIwOQYIKwYBBQUHAQcB + Af8EKjAoMAwEAgABMAYDBAAKAwAwGAQCAAIwEgMQACABDbgAAAAAAAAAAAoDADAN + BgkqhkiG9w0BAQUFAAOCAQEARNgVrXF+6W7sMytC7YyKSt+CpJGZV7AvzKNZKv8k + xazhefrXkrpyK0caz4BtCHbptZFgNR/dDOC9M3wn0PcRTh9ISgW8beNfut16uj1F + fZdylJvNMXa4lt/wfRbzKqPicusCH0nutkRIW2mZuLuAO8v1vKr4umgZU+z/rXWu + glEA7OeBwmvPoqKixbgER5GtnTNySKIVVa1DUo/2CaPT/YjT48P0zXHoy6rnNgcn + 2emkoegzzS2cN+5I5I+O8IRnZInqmiPgEgElgEFw+rg6xw23yax5Nyqx12J56tt0 + tPWGhrYe1dCwKZajWKn3P9+NMcGQ0d8bw/QU+B3RyVeVfw== + + + MIIDxjCCAq6gAwIBAgIBATANBgkqhkiG9w0BAQUFADAbMRkwFwYDVQQDExBURVNU + IEVOVElUWSBMSVIzMB4XDTA3MDgwMTE0NDgxOFoXDTA4MDczMTE0NDgxOFowHDEa + MBgGA1UEAxMRVEVTVCBFTlRJVFkgSVNQNWMwggEiMA0GCSqGSIb3DQEBAQUAA4IB + DwAwggEKAoIBAQDIi6ElZd/uon9Ur1IKGhr6DXWzPOngKdOJIOlRSWcsQ9qgLNRE + s5YUqQd3YLlvAe+OVKV0rFpn+DBNEPmsn7h1YQv253zqm1yYeks+xOJZQtMZyg9Y + DrfIgk7lu6z9kuWIsvxkz244OxiD/OemrvuQNtDhyk2QQQ8POyrADNl7fehQE/YJ + c4Kj0uO7ggiHf9K7Dg56KLYlArXZUfwzMkdH/89/vO4AAbsFXi4Dmq2VO8rCxodk + dDmqWWuu4KdRGgfyjkyOZS/f8pm64LaKT8AgcnmYAI8NUBM90T6Mvdx0qTOoVh0x + eHznAp6NChQSbdM3x3rwhBD+/k0olyZuCIWhAgMBAAGjggESMIIBDjAPBgNVHRMB + Af8EBTADAQH/MB0GA1UdDgQWBBQth8Ga+FgrvcL4fjBHs6mIN8nrRjAfBgNVHSME + GDAWgBSYvgT/gNGrlTmqPfIOZ30AraP9xTAOBgNVHQ8BAf8EBAMCAQYwQgYIKwYB + BQUHAQsENjA0MDIGCCsGAQUFBzAFhiZyc3luYzovL3dvbWJhdHMtci11cy5oYWN0 + cm4ubmV0L0lTUDVjLzBEBggrBgEFBQcBAQQ4MDYwNAYIKwYBBQUHMAKGKHJzeW5j + Oi8vd29tYmF0cy1yLXVzLmhhY3Rybi5uZXQvTElSMy5jZXIwIQYIKwYBBQUHAQgB + Af8EEjAQoA4wDDAKAgMA/BYCAwD8HDANBgkqhkiG9w0BAQUFAAOCAQEAUGsbhHfl + kwhe3EIkhnARJPgRkWgPCJtJ9konhROT7VlJ0Pim0kSrJWlBWUCLeKvSjQmowP4g + SddHxN4ZoXnSjb0pCDeomrZeViVQ2hxH6L/tHkl5SIEHl9MvFOe8junvgpq9GGAI + CFcibkW7Gp9p4A+GQkns0l9v+wGwuVZmqmJk4YBo7hHZRbg6/IFL1MD3HKeXmn33 + lCwFhjUuDIMXRbY/1k5fui13QUolN7mLSk60NvXJ94Tga68c1eCIpapvhxAYw69G + 7mOX42aYu1FnidZNj7Lt9jOuW0REHlavrG17HxP5loTuCNtLH1ZIrJcO7rUz9C0D + YqMybYWFUqZHyg== + + + MIIEFTCCAv2gAwIBAgIBDjANBgkqhkiG9w0BAQUFADAaMRgwFgYDVQQDEw9URVNU + IEVOVElUWSBSSVIwHhcNMDcwODAxMTQ0ODE4WhcNMDgwNzMxMTQ0ODE4WjAbMRkw + FwYDVQQDExBURVNUIEVOVElUWSBMSVIzMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A + MIIBCgKCAQEAoyFXYWSvERjUy96m3K3ZLA8PWJ9+yIVVESZMfPBraBqeagyP5tw9 + g1gqzHesGXNvWvNuJKzNGtwdC0xE9W2LChc9hvno/uZg5Z9AauWU6JpWFxccq8GM + N0ArVb8sXtyNyiV/il/u+xaG6+AI0ybl43DFDGv7G49rXPbiSlilNQHqBRs+zoS+ + tT9tGBZLaOV5TIh9tqVlozrCMtytj4oF7vbpeoDaEqkPWrXS0zGsPtMZJS0o3nls + zv13ZtXjL6nL+YWMILuihiPwk5UgBHjHxwem/vD0RbvPeCvdzpwIpUZoEEzXBWJs + hlotfwY4wk27RIcAQ3nSj/NrsvRcHLloAQIDAQABo4IBYzCCAV8wDwYDVR0TAQH/ + BAUwAwEB/zAdBgNVHQ4EFgQUmL4E/4DRq5U5qj3yDmd9AK2j/cUwHwYDVR0jBBgw + FoAU+7inozZICqCf8C7ci2i8s1xFJdcwDgYDVR0PAQH/BAQDAgEGMEEGCCsGAQUF + BwELBDUwMzAxBggrBgEFBQcwBYYlcnN5bmM6Ly93b21iYXRzLXItdXMuaGFjdHJu + Lm5ldC9MSVIzLzBDBggrBgEFBQcBAQQ3MDUwMwYIKwYBBQUHMAKGJ3JzeW5jOi8v + d29tYmF0cy1yLXVzLmhhY3Rybi5uZXQvUklSLmNlcjAhBggrBgEFBQcBCAEB/wQS + MBCgDjAMMAoCAwD8FgIDAPwcMFEGCCsGAQUFBwEHAQH/BEIwQDASBAIAATAMAwQA + CgAAAwQACgMAMCoEAgACMCQDEAAgAQ24AAAAAAAAAAAKAAADEAAgAQ24AAAAAAAA + AAAKAwAwDQYJKoZIhvcNAQEFBQADggEBAEhmCa7kUuozB6aST0Gd2XStJBcR1oWI + 8mZS5WEOjnjbVvuryDEb0fLs3x2HgCHZgZ7IAOg31cNxJpc1Ff6ZYEG+m3LpkcG/ + yOMllfOVK8RQSY+nKuya2fm2J3dCOKogEjBW20HwxNd1WgFLrDaOTR9V+iROBPKs + 3ppMPp6ksPqEqDU/3N3bLHROIISlFwWHilXuTK5ZAnzncDIQnm+zUuxI/0d3v6Fp + 8VxVlNBHqzo0VpakZOkxwqo01qJRsoxVaIxeetGNQ4noPhtj6bEM4Y8xDS9f3R7o + eEHUSTnKonMemm/AB3KZnjwL7rkL2FI1ThmDRO3Z3lprbThjThJF8EU= + + + + + MIIDzDCCArSgAwIBAgIBCTANBgkqhkiG9w0BAQUFADAbMRkwFwYDVQQDExBURVNU + IEVOVElUWSBMSVIxMB4XDTA3MDgwMTE0NDgyMloXDTA4MDczMTE0NDgyMlowGzEZ + MBcGA1UEAxMQVEVTVCBFTlRJVFkgSVNQMjCCASIwDQYJKoZIhvcNAQEBBQADggEP + ADCCAQoCggEBANB338Qhrxtaa6inKNdDyJttJdiNf5Er45X9kmCsFBLXI2iFSw7b + K+Y44EjbGDePQMCQWA4/CWdfjj8EdQZgkkLz5EUENZVd6SJCLPZcpn15jOEIGXw1 + nTr95/+bKbXuiUfMDYOg4XOvHwmEqAuDzHmIv3wdc9arQhtkmlwZgyud5a1MWAV2 + lXAj7qXAMcqip8gdHvLJ8j04gsJT5VSG8nyxc+Hc6YZzCKxZO74vWMFCxYAYjDoK + KjL2/ijQKFKDxjBpUZBZGZvT1MLgUmrBTlmaGOR4Llf5fytddijJycV+5UOhm2jS + Bhy+P2n5wvqeT2jPY2/bbfxnNcCxbgo37DMCAwEAAaOCARkwggEVMA8GA1UdEwEB + /wQFMAMBAf8wHQYDVR0OBBYEFHOyFhrN3NcwYA/6gZX4ovVOlfOtMB8GA1UdIwQY + MBaAFIqUF/lT8luUVFbfdlETKfZxGaizMA4GA1UdDwEB/wQEAwIBBjBBBggrBgEF + BQcBCwQ1MDMwMQYIKwYBBQUHMAWGJXJzeW5jOi8vd29tYmF0cy1yLXVzLmhhY3Ry + bi5uZXQvSVNQMi8wRAYIKwYBBQUHAQEEODA2MDQGCCsGAQUFBzAChihyc3luYzov + L3dvbWJhdHMtci11cy5oYWN0cm4ubmV0L0xJUjEuY2VyMCkGCCsGAQUFBwEHAQH/ + BBowGDAWBAIAATAQMA4DBQLAAAIsAwUAwAACZDANBgkqhkiG9w0BAQUFAAOCAQEA + CvG1rzj5fZOV1Oq/SO+NYzxOHIA9egYgQg4NUpmqSz6v17RhR0+3tPfMmzxepTs8 + ut23KieOG7RcPGvR2f/CEvedgrrPdTS81wu01qhPWJNqriN6N+Mu8XCK3fUO+t+w + PxLUWqwzrRUcpdy+CMOOGg81Eg7e77iAeJCp648AChUdBRI6HTfp9PlKd25pJ7fj + f654MpKGbTkWXllPkxC1sL4cJUcq4o+Sn1zAKkjXUwAUjp6G6s+mIWZQiZU5Pv8n + lYXvPciYf83+wTBllLGtSFyut8hk6WmiB8rC1/5jS96pJaGRSxejqd0r99GlPre+ + QgMe2TRfFuM1esod7j1M1Q== + + + MIID9jCCAt6gAwIBAgIBEDANBgkqhkiG9w0BAQUFADAaMRgwFgYDVQQDEw9URVNU + IEVOVElUWSBSSVIwHhcNMDcwODAxMTQ0ODE4WhcNMDgwNzMxMTQ0ODE4WjAbMRkw + FwYDVQQDExBURVNUIEVOVElUWSBMSVIxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A + MIIBCgKCAQEAr10c+dm71QHhWzbMUfb9hldgqp7H7E4Fr/tRXHrCWMSoV64UYum8 + tnJ9z0nISkCCSvQ+MLWUJZ5seIFXQ9aFAo3RnLXXNC/iqX0YJ7VHmkIWyJB/lizd + uJgXH3diSggALeBzDDk3ug+nWVlMfM3iXNeYNhBsiD5FmaaIL/Z/MUm6QisTecKy + 8QnZrTekQbZtRqEYBaBTB47gmLLR/Wdod2TV8/4dIjaeJloaqhiUwyx+mq++LJ1e + dSxJ1jcrBh/MY5d+7ixfZ69NYj56HwzhHgLy0gZ1rj8RvI4PE2Q4FDYdXQLsr2XV + uWj0ImYr70dbrTvyr7ZxDJRWinwBNvA6PwIDAQABo4IBRDCCAUAwDwYDVR0TAQH/ + BAUwAwEB/zAdBgNVHQ4EFgQUipQX+VPyW5RUVt92URMp9nEZqLMwHwYDVR0jBBgw + FoAU+7inozZICqCf8C7ci2i8s1xFJdcwDgYDVR0PAQH/BAQDAgEGMEEGCCsGAQUF + BwELBDUwMzAxBggrBgEFBQcwBYYlcnN5bmM6Ly93b21iYXRzLXItdXMuaGFjdHJu + Lm5ldC9MSVIxLzBDBggrBgEFBQcBAQQ3MDUwMwYIKwYBBQUHMAKGJ3JzeW5jOi8v + d29tYmF0cy1yLXVzLmhhY3Rybi5uZXQvUklSLmNlcjAaBggrBgEFBQcBCAEB/wQL + MAmgBzAFAgMA/BUwOQYIKwYBBQUHAQcBAf8EKjAoMCYEAgABMCAwDgMFAMAAAgED + BQHAAAIgMA4DBQLAAAIsAwUAwAACZDANBgkqhkiG9w0BAQUFAAOCAQEAcn3dpGAj + ceSZKAuaulzTl0ty64mBPBGFjCXtebJQpeiuDjd0+SyhvpaDNUANNvkyFnQlnPcP + zUZHjrnNrAx+06yEXvYx9KnyBc2C1+DXOySbxxXR253CHZL3Gam4oWcK+z0jOgWD + KQVQ4wAnqYD+u1HxPjsMmK7x7tETckZkj0syTs9kzxqlsTSm8F8Y+ES7E+qNXyR9 + OxVgjr70vdgEp9AQftMQZ781SclWz7eLe5sXC1TuIct1sD6NssHGfCaxfFipSjEk + zeU/pZodfREUQSrlVbbb9HU0N59eHfGGKvZ0vojhuWPOrVzpPJGKTI20aQPn+VJ5 + KH3Nf1ICSa7Vxw== + + + diff --git a/ca/tests/up-down-protocol-samples/revoke.xml b/ca/tests/up-down-protocol-samples/revoke.xml new file mode 100644 index 00000000..eb4b3efb --- /dev/null +++ b/ca/tests/up-down-protocol-samples/revoke.xml @@ -0,0 +1,9 @@ + + + + diff --git a/ca/tests/up-down-protocol-samples/revoke_response.xml b/ca/tests/up-down-protocol-samples/revoke_response.xml new file mode 100644 index 00000000..9f4ebacc --- /dev/null +++ b/ca/tests/up-down-protocol-samples/revoke_response.xml @@ -0,0 +1,9 @@ + + + + diff --git a/ca/tests/xml-parse-test.py b/ca/tests/xml-parse-test.py new file mode 100644 index 00000000..42b54695 --- /dev/null +++ b/ca/tests/xml-parse-test.py @@ -0,0 +1,119 @@ +# $Id$ +# +# Copyright (C) 2010 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. +# +# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +import glob, lxml.etree, lxml.sax +import rpki.up_down, rpki.left_right, rpki.publication, rpki.relaxng + +verbose = False + +def test(fileglob, rng, sax_handler, encoding, tester = None): + files = glob.glob(fileglob) + files.sort() + for f in files: + print "" + handler = sax_handler() + elt_in = lxml.etree.parse(f).getroot() + if verbose: + print "" + print lxml.etree.tostring(elt_in, pretty_print = True, encoding = encoding, xml_declaration = True) + rng.assertValid(elt_in) + lxml.sax.saxify(elt_in, handler) + elt_out = handler.result.toXML() + if verbose: + print "" + print lxml.etree.tostring(elt_out, pretty_print = True, encoding = encoding, xml_declaration = True) + rng.assertValid(elt_out) + if (tester): + tester(elt_in, elt_out, handler.result) + if verbose: + print + +def pprint(pairs): + if verbose: + for thing, name in pairs: + if thing is not None: + print "[%s]" % name + print thing.get_POW().pprint() + +def ud_tester(elt_in, elt_out, msg): + assert isinstance(msg, rpki.up_down.message_pdu) + if isinstance(msg.payload, rpki.up_down.list_response_pdu): + for c in msg.payload.classes: + pprint([(c.certs[i].cert, ("%s certificate #%d" % (c.class_name, i))) for i in xrange(len(c.certs))] + [(c.issuer, ("%s issuer" % c.class_name))]) + +def lr_tester(elt_in, elt_out, msg): + assert isinstance(msg, rpki.left_right.msg) + for obj in msg: + if isinstance(obj, rpki.left_right.self_elt): + pprint(((obj.bpki_cert, "BPKI cert"), + (obj.bpki_glue, "BPKI glue"))) + if isinstance(obj, rpki.left_right.bsc_elt): + pprint(((obj.signing_cert, "Signing certificate"), + (obj.signing_cert_crl, "Signing certificate CRL"))) + # (obj.pkcs10_request, "PKCS #10 request") + if isinstance(obj, rpki.left_right.parent_elt): + pprint(((obj.bpki_cms_cert, "CMS certificate"), + (obj.bpki_cms_glue, "CMS glue"))) + if isinstance(obj, (rpki.left_right.child_elt, rpki.left_right.repository_elt)): + pprint(((obj.bpki_cert, "Certificate"), + (obj.bpki_glue, "Glue"))) + +def pp_tester(elt_in, elt_out, msg): + assert isinstance(msg, rpki.publication.msg) + for obj in msg: + if isinstance(obj, rpki.publication.client_elt): + pprint(((obj.bpki_cert, "BPKI cert"), + (obj.bpki_glue, "BPKI glue"))) + if isinstance(obj, rpki.publication.certificate_elt): + pprint(((obj.payload, "RPKI cert"),)) + if isinstance(obj, rpki.publication.crl_elt): + pprint(((obj.payload, "RPKI CRL"),)) + if isinstance(obj, rpki.publication.manifest_elt): + pprint(((obj.payload, "RPKI manifest"),)) + if isinstance(obj, rpki.publication.roa_elt): + pprint(((obj.payload, "ROA"),)) + +test(fileglob = "up-down-protocol-samples/*.xml", + rng = rpki.relaxng.up_down, + sax_handler = rpki.up_down.sax_handler, + encoding = "utf-8", + tester = ud_tester) + +test(fileglob = "left-right-protocol-samples/*.xml", + rng = rpki.relaxng.left_right, + sax_handler = rpki.left_right.sax_handler, + encoding = "us-ascii", + tester = lr_tester) + +test(fileglob = "publication-protocol-samples/*.xml", + rng = rpki.relaxng.publication, + sax_handler = rpki.publication.sax_handler, + encoding = "us-ascii", + tester = pp_tester) diff --git a/ca/tests/yamlconf.py b/ca/tests/yamlconf.py new file mode 100644 index 00000000..3c71d3cd --- /dev/null +++ b/ca/tests/yamlconf.py @@ -0,0 +1,794 @@ +# $Id$ +# +# Copyright (C) 2013--2014 Dragon Research Labs ("DRL") +# Portions copyright (C) 2009--2012 Internet Systems Consortium ("ISC") +# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notices and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND ARIN DISCLAIM ALL +# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL, +# ISC, OR ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, +# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION +# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +""" +Test configuration tool, using the same YAML test description format +as smoketest.py and yamltest.py, but doing just the IRDB configuration +for a massive testbed, via direct use of the rpki.irdb library code. + +For most purposes, you don't want this, but when building a +configuration for tens or hundreds of thousands of elements, being +able to do the initial configuration stage quickly can help a lot. +""" + +# pylint: disable=W0702,W0621,W0602 + +import subprocess +import re +import os +import sys +import yaml +import time +import argparse +import rpki.resource_set +import rpki.sundial +import rpki.config +import rpki.log +import rpki.csv_utils +import rpki.x509 +import rpki.sql_schemas + +from rpki.mysql_import import MySQLdb + +section_regexp = re.compile(r"\s*\[\s*(.+?)\s*\]\s*$") +variable_regexp = re.compile(r"\s*([-a-zA-Z0-9_]+)\s*=\s*(.+?)\s*$") + +flat_publication = False +only_one_pubd = True +yaml_file = None +loopback = False +quiet = False +dns_suffix = None +mysql_rootuser = None +mysql_rootpass = None +publication_base = None +publication_root = None + +# The SQL username mismatch between rpkid/examples/rpki.conf and +# rpkid/tests/smoketest.setup.sql is completely stupid and really +# should be cleaned up at some point...but not today, at least not as +# part of writing this program. These default values are wired into +# yamltest to match smoketest.setup.sql, so wire them in here too but +# in a more obvious way. + +config_overrides = { + "irdbd_sql_username" : "irdb", "irdbd_sql_password" : "fnord", + "rpkid_sql_username" : "rpki", "rpkid_sql_password" : "fnord", + "pubd_sql_username" : "pubd", "pubd_sql_password" : "fnord" } + +def cleanpath(*names): + return os.path.normpath(os.path.join(*names)) + +this_dir = os.getcwd() +test_dir = None +rpki_conf = None + +class roa_request(object): + """ + Representation of a ROA request. + """ + + def __init__(self, asn, ipv4, ipv6): + self.asn = asn + self.v4 = rpki.resource_set.roa_prefix_set_ipv4("".join(ipv4.split())) if ipv4 else None + self.v6 = rpki.resource_set.roa_prefix_set_ipv6("".join(ipv6.split())) if ipv6 else None + + def __eq__(self, other): + return self.asn == other.asn and self.v4 == other.v4 and self.v6 == other.v6 + + def __hash__(self): + v4 = tuple(self.v4) if self.v4 is not None else None + v6 = tuple(self.v6) if self.v6 is not None else None + return self.asn.__hash__() + v4.__hash__() + v6.__hash__() + + def __str__(self): + if self.v4 and self.v6: + return "%s: %s,%s" % (self.asn, self.v4, self.v6) + else: + return "%s: %s" % (self.asn, self.v4 or self.v6) + + @classmethod + def parse(cls, y): + return cls(y.get("asn"), y.get("ipv4"), y.get("ipv6")) + +class allocation_db(list): + """ + Allocation database. + """ + + def __init__(self, y): + list.__init__(self) + self.root = allocation(y, self) + assert self.root.is_root + if self.root.crl_interval is None: + self.root.crl_interval = 60 * 60 + if self.root.regen_margin is None: + self.root.regen_margin = 24 * 60 * 60 + if self.root.base.valid_until is None: + self.root.base.valid_until = rpki.sundial.now() + rpki.sundial.timedelta(days = 2) + for a in self: + if a.base.valid_until is None: + a.base.valid_until = a.parent.base.valid_until + if a.crl_interval is None: + a.crl_interval = a.parent.crl_interval + if a.regen_margin is None: + a.regen_margin = a.parent.regen_margin + self.root.closure() + self.map = dict((a.name, a) for a in self) + for a in self: + if a.is_hosted: + a.hosted_by = self.map[a.hosted_by] + a.hosted_by.hosts.append(a) + assert not a.is_root and not a.hosted_by.is_hosted + + def dump(self): + for a in self: + a.dump() + + +class allocation(object): + """ + One entity in our allocation database. Every entity in the database + is assumed to hold resources. Entities that don't have the + hosted_by property run their own copies of rpkid, irdbd, and pubd. + """ + + base_port = 4400 + base_engine = -1 + parent = None + crl_interval = None + regen_margin = None + engine = -1 + rpkid_port = 4404 + irdbd_port = 4403 + pubd_port = 4402 + rootd_port = 4401 + rsync_port = 873 + + @classmethod + def allocate_port(cls): + cls.base_port += 1 + return cls.base_port + + @classmethod + def allocate_engine(cls): + cls.base_engine += 1 + return cls.base_engine + + def __init__(self, y, db, parent = None): + db.append(self) + self.name = y["name"] + self.parent = parent + self.kids = [allocation(k, db, self) for k in y.get("kids", ())] + valid_until = None + if "valid_until" in y: + valid_until = rpki.sundial.datetime.from_datetime(y.get("valid_until")) + if valid_until is None and "valid_for" in y: + valid_until = rpki.sundial.now() + rpki.sundial.timedelta.parse(y["valid_for"]) + self.base = rpki.resource_set.resource_bag( + asn = rpki.resource_set.resource_set_as(y.get("asn")), + v4 = rpki.resource_set.resource_set_ipv4(y.get("ipv4")), + v6 = rpki.resource_set.resource_set_ipv6(y.get("ipv6")), + valid_until = valid_until) + if "crl_interval" in y: + self.crl_interval = rpki.sundial.timedelta.parse(y["crl_interval"]).convert_to_seconds() + if "regen_margin" in y: + self.regen_margin = rpki.sundial.timedelta.parse(y["regen_margin"]).convert_to_seconds() + if "ghostbusters" in y: + self.ghostbusters = y.get("ghostbusters") + elif "ghostbuster" in y: + self.ghostbusters = [y.get("ghostbuster")] + else: + self.ghostbusters = [] + self.roa_requests = [roa_request.parse(r) for r in y.get("roa_request", ())] + for r in self.roa_requests: + if r.v4: + self.base.v4 |= r.v4.to_resource_set() + if r.v6: + self.base.v6 |= r.v6.to_resource_set() + self.hosted_by = y.get("hosted_by") + self.hosts = [] + if not self.is_hosted: + self.engine = self.allocate_engine() + if loopback and not self.is_hosted: + self.rpkid_port = self.allocate_port() + self.irdbd_port = self.allocate_port() + if loopback and self.runs_pubd: + self.pubd_port = self.allocate_port() + self.rsync_port = self.allocate_port() + if loopback and self.is_root: + self.rootd_port = self.allocate_port() + + def closure(self): + resources = self.base + for kid in self.kids: + resources |= kid.closure() + self.resources = resources + return resources + + @property + def hostname(self): + if loopback: + return "localhost" + elif dns_suffix: + return self.name + "." + dns_suffix.lstrip(".") + else: + return self.name + + @property + def rsync_server(self): + if loopback: + return "%s:%s" % (self.pubd.hostname, self.pubd.rsync_port) + else: + return self.pubd.hostname + + def dump(self): + if not quiet: + print str(self) + + def __str__(self): + s = self.name + ":\n" + if self.resources.asn: s += " ASNs: %s\n" % self.resources.asn + if self.resources.v4: s += " IPv4: %s\n" % self.resources.v4 + if self.resources.v6: s += " IPv6: %s\n" % self.resources.v6 + if self.kids: s += " Kids: %s\n" % ", ".join(k.name for k in self.kids) + if self.parent: s += " Up: %s\n" % self.parent.name + if self.is_hosted: s += " Host: %s\n" % self.hosted_by.name + if self.hosts: s += " Hosts: %s\n" % ", ".join(h.name for h in self.hosts) + for r in self.roa_requests: s += " ROA: %s\n" % r + if not self.is_hosted: s += " IPort: %s\n" % self.irdbd_port + if self.runs_pubd: s += " PPort: %s\n" % self.pubd_port + if not self.is_hosted: s += " RPort: %s\n" % self.rpkid_port + if self.runs_pubd: s += " SPort: %s\n" % self.rsync_port + if self.is_root: s += " TPort: %s\n" % self.rootd_port + return s + " Until: %s\n" % self.resources.valid_until + + @property + def is_root(self): + return self.parent is None + + @property + def is_hosted(self): + return self.hosted_by is not None + + @property + def runs_pubd(self): + return self.is_root or not (self.is_hosted or only_one_pubd) + + def path(self, *names): + return cleanpath(test_dir, self.host.name, *names) + + def csvout(self, fn): + path = self.path(fn) + if not quiet: + print "Writing", path + return rpki.csv_utils.csv_writer(path) + + def up_down_url(self): + return "http://%s:%d/up-down/%s/%s" % (self.parent.host.hostname, + self.parent.host.rpkid_port, + self.parent.name, + self.name) + + def dump_asns(self, fn): + with self.csvout(fn) as f: + for k in self.kids: + f.writerows((k.name, a) for a in k.resources.asn) + + def dump_prefixes(self, fn): + with self.csvout(fn) as f: + for k in self.kids: + f.writerows((k.name, p) for p in (k.resources.v4 + k.resources.v6)) + + def dump_roas(self, fn): + with self.csvout(fn) as f: + for g1, r in enumerate(self.roa_requests): + f.writerows((p, r.asn, "G%08d%08d" % (g1, g2)) + for g2, p in enumerate((r.v4 + r.v6 if r.v4 and r.v6 else r.v4 or r.v6 or ()))) + + def dump_ghostbusters(self, fn): + if self.ghostbusters: + path = self.path(fn) + if not quiet: + print "Writing", path + with open(path, "w") as f: + for i, g in enumerate(self.ghostbusters): + if i > 0: + f.write("\n") + f.write(g) + + @property + def pubd(self): + s = self + while not s.runs_pubd: + s = s.parent + return s + + @property + def client_handle(self): + path = [] + s = self + if not flat_publication: + while not s.runs_pubd: + path.append(s) + s = s.parent + path.append(s) + return ".".join(i.name for i in reversed(path)) + + @property + def host(self): + return self.hosted_by or self + + @property + def publication_base_directory(self): + if not loopback and publication_base is not None: + return publication_base + else: + return self.path("publication") + + @property + def publication_root_directory(self): + if not loopback and publication_root is not None: + return publication_root + else: + return self.path("publication.root") + + def dump_conf(self): + + r = dict( + handle = self.name, + run_rpkid = str(not self.is_hosted), + run_pubd = str(self.runs_pubd), + run_rootd = str(self.is_root), + irdbd_sql_username = "irdb", + rpkid_sql_username = "rpki", + rpkid_server_host = self.hostname, + rpkid_server_port = str(self.rpkid_port), + irdbd_server_host = "localhost", + irdbd_server_port = str(self.irdbd_port), + rootd_server_port = str(self.rootd_port), + pubd_sql_username = "pubd", + pubd_server_host = self.pubd.hostname, + pubd_server_port = str(self.pubd.pubd_port), + publication_rsync_server = self.rsync_server) + + if loopback: + r.update( + irdbd_sql_database = self.irdb_name, + rpkid_sql_database = "rpki%d" % self.engine, + pubd_sql_database = "pubd%d" % self.engine, + bpki_servers_directory = self.path(), + publication_base_directory = self.publication_base_directory) + + r.update(config_overrides) + + with open(self.path("rpki.conf"), "w") as f: + f.write("# Automatically generated, do not edit\n") + if not quiet: + print "Writing", f.name + + section = None + for line in open(rpki_conf): + m = section_regexp.match(line) + if m: + section = m.group(1) + m = variable_regexp.match(line) + option = m.group(1) if m and section == "myrpki" else None + if option and option in r: + line = "%s = %s\n" % (option, r[option]) + f.write(line) + + def dump_rsyncd(self): + lines = [] + if self.runs_pubd: + lines.extend(( + "# Automatically generated, do not edit", + "port = %d" % self.rsync_port, + "address = %s" % self.hostname, + "log file = rsyncd.log", + "read only = yes", + "use chroot = no", + "[rpki]", + "path = %s" % self.publication_base_directory, + "comment = RPKI test")) + if self.is_root: + assert self.runs_pubd + lines.extend(( + "[root]", + "path = %s" % self.publication_root_directory, + "comment = RPKI test root")) + if lines: + with open(self.path("rsyncd.conf"), "w") as f: + if not quiet: + print "Writing", f.name + f.writelines(line + "\n" for line in lines) + + @property + def irdb_name(self): + return "irdb%d" % self.host.engine + + @property + def irdb(self): + prior_name = self.zoo.handle + return rpki.irdb.database( + self.irdb_name, + on_entry = lambda: self.zoo.reset_identity(self.name), + on_exit = lambda: self.zoo.reset_identity(prior_name)) + + def syncdb(self): + import django.core.management + assert not self.is_hosted + django.core.management.call_command("syncdb", + database = self.irdb_name, + load_initial_data = False, + interactive = False, + verbosity = 0) + + def hire_zookeeper(self): + assert not self.is_hosted + self._zoo = rpki.irdb.Zookeeper( + cfg = rpki.config.parser(self.path("rpki.conf")), + logstream = None if quiet else sys.stdout) + + @property + def zoo(self): + return self.host._zoo + + def dump_root(self): + + assert self.is_root and not self.is_hosted + + root_resources = rpki.resource_set.resource_bag( + asn = rpki.resource_set.resource_set_as("0-4294967295"), + v4 = rpki.resource_set.resource_set_ipv4("0.0.0.0/0"), + v6 = rpki.resource_set.resource_set_ipv6("::/0")) + + root_key = rpki.x509.RSA.generate(quiet = True) + + root_uri = "rsync://%s/rpki/" % self.rsync_server + + root_sia = (root_uri, root_uri + "root.mft", None) + + root_cert = rpki.x509.X509.self_certify( + keypair = root_key, + subject_key = root_key.get_public(), + serial = 1, + sia = root_sia, + notAfter = rpki.sundial.now() + rpki.sundial.timedelta(days = 365), + resources = root_resources) + + with open(self.path("publication.root", "root.cer"), "wb") as f: + f.write(root_cert.get_DER()) + + with open(self.path("root.key"), "wb") as f: + f.write(root_key.get_DER()) + + with open(cleanpath(test_dir, "root.tal"), "w") as f: + f.write("rsync://%s/root/root.cer\n\n%s" % ( + self.rsync_server, root_key.get_public().get_Base64())) + + def mkdir(self, *path): + path = self.path(*path) + if not quiet: + print "Creating directory", path + os.makedirs(path) + + def dump_sql(self): + if not self.is_hosted: + with open(self.path("rpkid.sql"), "w") as f: + if not quiet: + print "Writing", f.name + f.write(rpki.sql_schemas.rpkid) + if self.runs_pubd: + with open(self.path("pubd.sql"), "w") as f: + if not quiet: + print "Writing", f.name + f.write(rpki.sql_schemas.pubd) + if not self.is_hosted: + username = config_overrides["irdbd_sql_username"] + password = config_overrides["irdbd_sql_password"] + cmd = ("mysqldump", "-u", username, "-p" + password, self.irdb_name) + with open(self.path("irdbd.sql"), "w") as f: + if not quiet: + print "Writing", f.name + subprocess.check_call(cmd, stdout = f) + + +def pre_django_sql_setup(needed): + + username = config_overrides["irdbd_sql_username"] + password = config_overrides["irdbd_sql_password"] + + # If we have the MySQL root password, just blow away and recreate + # the required databases. Otherwise, check for missing databases, + # then blow away all tables in the required databases. In either + # case, we assume that the Django syncdb code will populate + # databases as necessary, all we need to do here is provide empty + # databases for the Django code to fill in. + + if mysql_rootpass is not None: + if mysql_rootpass: + db = MySQLdb.connect(user = mysql_rootuser, passwd = mysql_rootpass) + else: + db = MySQLdb.connect(user = mysql_rootuser) + cur = db.cursor() + for database in needed: + try: + cur.execute("DROP DATABASE IF EXISTS %s" % database) + except: + pass + cur.execute("CREATE DATABASE %s" % database) + cur.execute("GRANT ALL ON %s.* TO %s@localhost IDENTIFIED BY %%s" % ( + database, username), (password,)) + + else: + db = MySQLdb.connect(user = username, passwd = password) + cur = db.cursor() + cur.execute("SHOW DATABASES") + existing = set(r[0] for r in cur.fetchall()) + if needed - existing: + sys.stderr.write("The following databases are missing:\n") + for database in sorted(needed - existing): + sys.stderr.write(" %s\n" % database) + sys.stderr.write("Please create them manually or put MySQL root password in my config file\n") + sys.exit("Missing databases and MySQL root password not known, can't continue") + for database in needed: + db.select_db(database) + cur.execute("SHOW TABLES") + tables = [r[0] for r in cur.fetchall()] + cur.execute("SET foreign_key_checks = 0") + for table in tables: + cur.execute("DROP TABLE %s" % table) + cur.execute("SET foreign_key_checks = 1") + + cur.close() + db.commit() + db.close() + +class timestamp(object): + + def __init__(self, *args): + self.count = 0 + self.start = self.tick = rpki.sundial.now() + + def __call__(self, *args): + now = rpki.sundial.now() + if not quiet: + print "[Count %s last %s total %s now %s]" % ( + self.count, now - self.tick, now - self.start, now) + self.tick = now + self.count += 1 + + +def main(): + + global flat_publication + global config_overrides + global only_one_pubd + global loopback + global dns_suffix + global mysql_rootuser + global mysql_rootpass + global yaml_file + global test_dir + global rpki_conf + global publication_base + global publication_root + global quiet + + os.environ["TZ"] = "UTC" + time.tzset() + + parser = argparse.ArgumentParser(description = "yamlconf") + parser.add_argument("-c", "--config", help = "configuration file") + parser.add_argument("--dns_suffix", + help = "DNS suffix to add to hostnames") + parser.add_argument("-l", "--loopback", action = "store_true", + help = "Configure for use with yamltest on localhost") + parser.add_argument("-f", "--flat_publication", action = "store_true", + help = "Use flat publication model") + parser.add_argument("-q", "--quiet", action = "store_true", + help = "Work more quietly") + parser.add_argument("--profile", + help = "Filename for profile output") + parser.add_argument("yaml_file", type = argparse.FileType("r"), + help = "YAML file describing network to build") + args = parser.parse_args() + + dns_suffix = args.dns_suffix + loopback = args.loopback + flat_publication = args.flat_publication + quiet = args.quiet + yaml_file = args.yaml_file + + rpki.log.init("yamlconf", use_syslog = False) + + # Allow optional config file for this tool to override default + # passwords: this is mostly so that I can show a complete working + # example without publishing my own server's passwords. + + cfg = rpki.config.parser(args.config, "yamlconf", allow_missing = True) + try: + cfg.set_global_flags() + except: + pass + + # Use of "yamltest.dir" is deliberate: intent is for what we write to + # be usable with "yamltest --skip_config". + + only_one_pubd = cfg.getboolean("only_one_pubd", True) + test_dir = cfg.get("test_directory", cleanpath(this_dir, "yamltest.dir")) + rpki_conf = cfg.get("rpki_conf", cleanpath(this_dir, "..", "examples/rpki.conf")) + mysql_rootuser = cfg.get("mysql_rootuser", "root") + + try: + mysql_rootpass = cfg.get("mysql_rootpass") + except: + pass + + try: + publication_base = cfg.get("publication_base") + except: + pass + + try: + publication_root = cfg.get("publication_root") + except: + pass + + for k in ("rpkid_sql_password", "irdbd_sql_password", "pubd_sql_password", + "rpkid_sql_username", "irdbd_sql_username", "pubd_sql_username"): + if cfg.has_option(k): + config_overrides[k] = cfg.get(k) + + if args.profile: + import cProfile + prof = cProfile.Profile() + try: + prof.runcall(body) + finally: + prof.dump_stats(args.profile) + if not quiet: + print + print "Dumped profile data to %s" % args.profile + else: + body() + +def body(): + + global rpki + + ts = timestamp() + + for root, dirs, files in os.walk(test_dir, topdown = False): + for fn in files: + os.unlink(os.path.join(root, fn)) + for d in dirs: + os.rmdir(os.path.join(root, d)) + + if not quiet: + print + print "Reading YAML", yaml_file.name + + db = allocation_db(yaml.safe_load_all(yaml_file).next()) + + # Show what we loaded + + #db.dump() + + # Do pre-Django SQL setup + + pre_django_sql_setup(set(d.irdb_name for d in db if not d.is_hosted)) + + # Now ready for fun with multiple databases in Django! + + # https://docs.djangoproject.com/en/1.4/topics/db/multi-db/ + # https://docs.djangoproject.com/en/1.4/topics/db/sql/ + + database_template = { + "ENGINE" : "django.db.backends.mysql", + "USER" : config_overrides["irdbd_sql_username"], + "PASSWORD" : config_overrides["irdbd_sql_password"], + "HOST" : "", + "PORT" : "", + "OPTIONS" : { "init_command": "SET storage_engine=INNODB" }} + + databases = dict((d.irdb_name, + dict(database_template, NAME = d.irdb_name)) + for d in db if not d.is_hosted) + + databases["default"] = databases[db.root.irdb_name] + + from django.conf import settings + + settings.configure( + DATABASES = databases, + DATABASE_ROUTERS = ["rpki.irdb.router.DBContextRouter"], + INSTALLED_APPS = ("rpki.irdb",)) + + import rpki.irdb + + rpki.irdb.models.ca_certificate_lifetime = rpki.sundial.timedelta(days = 3652 * 2) + rpki.irdb.models.ee_certificate_lifetime = rpki.sundial.timedelta(days = 3652) + + ts() + + for d in db: + if not quiet: + print + print "Configuring", d.name + + if not d.is_hosted: + d.mkdir() + if d.runs_pubd: + d.mkdir("publication") + if d.is_root: + d.mkdir("publication.root") + + if not d.is_hosted: + d.dump_conf() + d.dump_rsyncd() + + d.dump_asns("%s.asns.csv" % d.name) + d.dump_prefixes("%s.prefixes.csv" % d.name) + d.dump_roas("%s.roas.csv" % d.name) + d.dump_ghostbusters("%s.ghostbusters.vcard" % d.name) + + if not d.is_hosted: + if not quiet: + print "Initializing SQL" + d.syncdb() + if not quiet: + print "Hiring zookeeper" + d.hire_zookeeper() + + with d.irdb: + if not quiet: + print "Creating identity" + x = d.zoo.initialize() + + if d.is_root: + if not quiet: + print "Creating RPKI root certificate and TAL" + d.dump_root() + x = d.zoo.configure_rootd() + + else: + with d.parent.irdb: + x = d.parent.zoo.configure_child(x.file)[0] + x = d.zoo.configure_parent(x.file)[0] + + with d.pubd.irdb: + x = d.pubd.zoo.configure_publication_client(x.file, flat = flat_publication)[0] + d.zoo.configure_repository(x.file) + + if loopback and not d.is_hosted: + with d.irdb: + d.zoo.write_bpki_files() + + ts() + + if not loopback: + if not quiet: + print + for d in db: + d.dump_sql() + +if __name__ == "__main__": + main() diff --git a/ca/tests/yamltest-test-all.sh b/ca/tests/yamltest-test-all.sh new file mode 100644 index 00000000..8daea04e --- /dev/null +++ b/ca/tests/yamltest-test-all.sh @@ -0,0 +1,58 @@ +#!/bin/sh - +# $Id$ + +# Copyright (C) 2009-2013 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +set -x + +export TZ=UTC + +test -z "$STY" && exec screen -L sh $0 + +screen -X split +screen -X focus + +: ${runtime=900} + +for yaml in smoketest.*.yaml +do + settitle "$yaml: Starting" + rm -rf test rcynic-data + python sql-cleaner.py + now=$(date +%s) + finish=$(($now + $runtime)) + title="$yaml: will finish at $(date -r $finish)" + settitle "$title" + screen sh -c "settitle '$title'; exec python yamltest.py -p yamltest.pid $yaml" + date + sleep 180 + date + while test $(date +%s) -lt $finish + do + sleep 30 + date + ../../rcynic/rcynic + ../../rcynic/rcynic-text rcynic.xml + ../../utils/scan_roas/scan_roas rcynic-data/authenticated + date + echo "$title" + done + if test -r yamltest.pid + then + kill -INT $(cat yamltest.pid) + sleep 30 + fi + make backup +done diff --git a/ca/tests/yamltest.py b/ca/tests/yamltest.py new file mode 100644 index 00000000..5eb3bd2f --- /dev/null +++ b/ca/tests/yamltest.py @@ -0,0 +1,875 @@ +#!/usr/bin/env python + +""" +Test framework, using the same YAML test description format as +smoketest.py, but using the rpkic.py tool to do all the back-end +work. Reads YAML file, generates .csv and .conf files, runs daemons +and waits for one of them to exit. +""" + +# $Id$ +# +# Copyright (C) 2013--2014 Dragon Research Labs ("DRL") +# Portions copyright (C) 2009--2012 Internet Systems Consortium ("ISC") +# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notices and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND ARIN DISCLAIM ALL +# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL, +# ISC, OR ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, +# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION +# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +# Much of the YAML handling code lifted from smoketest.py. +# +# Still to do: +# +# - Implement smoketest.py-style delta actions, that is, modify the +# allocation database under control of the YAML file, dump out new +# .csv files, and run rpkic.py again to feed resulting changes into +# running daemons. +# + +# pylint: disable=W0702,W0621 + +import subprocess +import re +import os +import argparse +import sys +import yaml +import signal +import time +import lxml.etree +import rpki.resource_set +import rpki.sundial +import rpki.config +import rpki.log +import rpki.csv_utils +import rpki.x509 +import rpki.relaxng + +# Nasty regular expressions for parsing config files. Sadly, while +# the Python ConfigParser supports writing config files, it does so in +# such a limited way that it's easier just to hack this ourselves. + +section_regexp = re.compile(r"\s*\[\s*(.+?)\s*\]\s*$") +variable_regexp = re.compile(r"\s*([-a-zA-Z0-9_]+)\s*=\s*(.+?)\s*$") + +def cleanpath(*names): + """ + Construct normalized pathnames. + """ + return os.path.normpath(os.path.join(*names)) + +# Pathnames for various things we need + +this_dir = os.getcwd() +test_dir = cleanpath(this_dir, "yamltest.dir") +rpkid_dir = cleanpath(this_dir, "..") + +prog_rpkic = cleanpath(rpkid_dir, "rpkic") +prog_rpkid = cleanpath(rpkid_dir, "rpkid") +prog_irdbd = cleanpath(rpkid_dir, "irdbd") +prog_pubd = cleanpath(rpkid_dir, "pubd") +prog_rootd = cleanpath(rpkid_dir, "rootd") + +class roa_request(object): + """ + Representation of a ROA request. + """ + + def __init__(self, asn, ipv4, ipv6): + self.asn = asn + self.v4 = rpki.resource_set.roa_prefix_set_ipv4("".join(ipv4.split())) if ipv4 else None + self.v6 = rpki.resource_set.roa_prefix_set_ipv6("".join(ipv6.split())) if ipv6 else None + + def __eq__(self, other): + return self.asn == other.asn and self.v4 == other.v4 and self.v6 == other.v6 + + def __hash__(self): + v4 = tuple(self.v4) if self.v4 is not None else None + v6 = tuple(self.v6) if self.v6 is not None else None + return self.asn.__hash__() + v4.__hash__() + v6.__hash__() + + def __str__(self): + if self.v4 and self.v6: + return "%s: %s,%s" % (self.asn, self.v4, self.v6) + else: + return "%s: %s" % (self.asn, self.v4 or self.v6) + + @classmethod + def parse(cls, y): + """ + Parse a ROA request from YAML format. + """ + return cls(y.get("asn"), y.get("ipv4"), y.get("ipv6")) + + +class router_cert(object): + """ + Representation for a router_cert object. + """ + + _ecparams = None + + @classmethod + def ecparams(cls): + if cls._ecparams is None: + cls._ecparams = rpki.x509.KeyParams.generateEC() + return cls._ecparams + + def __init__(self, asn, router_id): + self.asn = rpki.resource_set.resource_set_as("".join(str(asn).split())) + self.router_id = router_id + self.keypair = rpki.x509.ECDSA.generate(self.ecparams()) + self.pkcs10 = rpki.x509.PKCS10.create(keypair = self.keypair) + self.gski = self.pkcs10.gSKI() + + def __eq__(self, other): + return self.asn == other.asn and self.router_id == other.router_id and self.gski == other.gski + + def __hash__(self): + v6 = tuple(self.v6) if self.v6 is not None else None + return tuple(self.asn).__hash__() + self.router_id.__hash__() + self.gski.__hash__() + + def __str__(self): + return "%s: %s: %s" % (self.asn, self.router_id, self.gski) + + @classmethod + def parse(cls, yaml): + return cls(yaml.get("asn"), yaml.get("router_id")) + +class allocation_db(list): + """ + Our allocation database. + """ + + def __init__(self, yaml): + list.__init__(self) + self.root = allocation(yaml, self) + assert self.root.is_root + if self.root.crl_interval is None: + self.root.crl_interval = 60 * 60 + if self.root.regen_margin is None: + self.root.regen_margin = 24 * 60 * 60 + if self.root.base.valid_until is None: + self.root.base.valid_until = rpki.sundial.now() + rpki.sundial.timedelta(days = 2) + for a in self: + if a.base.valid_until is None: + a.base.valid_until = a.parent.base.valid_until + if a.crl_interval is None: + a.crl_interval = a.parent.crl_interval + if a.regen_margin is None: + a.regen_margin = a.parent.regen_margin + self.root.closure() + self.map = dict((a.name, a) for a in self) + for a in self: + if a.is_hosted: + a.hosted_by = self.map[a.hosted_by] + a.hosted_by.hosts.append(a) + assert not a.is_root and not a.hosted_by.is_hosted + + def dump(self): + """ + Show contents of allocation database. + """ + for a in self: + a.dump() + + +class allocation(object): + """ + One entity in our allocation database. Every entity in the database + is assumed to hold resources, so needs at least rpkic services. + Entities that don't have the hosted_by property run their own copies + of rpkid, irdbd, and pubd, so they also need myirbe services. + """ + + base_port = None + parent = None + crl_interval = None + regen_margin = None + rootd_port = None + engine = -1 + rpkid_port = -1 + irdbd_port = -1 + pubd_port = -1 + rsync_port = -1 + rootd_port = -1 + rpkic_counter = 0L + + @classmethod + def allocate_port(cls): + """ + Allocate a TCP port. + """ + cls.base_port += 1 + return cls.base_port + + base_engine = -1 + + @classmethod + def allocate_engine(cls): + """ + Allocate an engine number, mostly used to construct MySQL database + names. + """ + cls.base_engine += 1 + return cls.base_engine + + def __init__(self, yaml, db, parent = None): + db.append(self) + self.name = yaml["name"] + self.parent = parent + self.kids = [allocation(k, db, self) for k in yaml.get("kids", ())] + valid_until = None + if "valid_until" in yaml: + valid_until = rpki.sundial.datetime.from_datetime(yaml.get("valid_until")) + if valid_until is None and "valid_for" in yaml: + valid_until = rpki.sundial.now() + rpki.sundial.timedelta.parse(yaml["valid_for"]) + self.base = rpki.resource_set.resource_bag( + asn = rpki.resource_set.resource_set_as(yaml.get("asn")), + v4 = rpki.resource_set.resource_set_ipv4(yaml.get("ipv4")), + v6 = rpki.resource_set.resource_set_ipv6(yaml.get("ipv6")), + valid_until = valid_until) + if "crl_interval" in yaml: + self.crl_interval = rpki.sundial.timedelta.parse(yaml["crl_interval"]).convert_to_seconds() + if "regen_margin" in yaml: + self.regen_margin = rpki.sundial.timedelta.parse(yaml["regen_margin"]).convert_to_seconds() + self.roa_requests = [roa_request.parse(y) for y in yaml.get("roa_request", yaml.get("route_origin", ()))] + self.router_certs = [router_cert.parse(y) for y in yaml.get("router_cert", ())] + if "ghostbusters" in yaml: + self.ghostbusters = yaml.get("ghostbusters") + elif "ghostbuster" in yaml: + self.ghostbusters = [yaml.get("ghostbuster")] + else: + self.ghostbusters = [] + for r in self.roa_requests: + if r.v4: + self.base.v4 |= r.v4.to_resource_set() + if r.v6: + self.base.v6 |= r.v6.to_resource_set() + for r in self.router_certs: + self.base.asn |= r.asn + self.hosted_by = yaml.get("hosted_by") + self.hosts = [] + if not self.is_hosted: + self.engine = self.allocate_engine() + self.rpkid_port = self.allocate_port() + self.irdbd_port = self.allocate_port() + if self.runs_pubd: + self.pubd_port = self.allocate_port() + self.rsync_port = self.allocate_port() + if self.is_root: + self.rootd_port = self.allocate_port() + + def closure(self): + """ + Compute resource closure of this node and its children, to avoid a + lot of tedious (and error-prone) duplication in the YAML file. + """ + resources = self.base + for kid in self.kids: + resources |= kid.closure() + self.resources = resources + return resources + + def dump(self): + """ + Show content of this allocation node. + """ + print str(self) + + def __str__(self): + s = self.name + ":\n" + if self.resources.asn: s += " ASNs: %s\n" % self.resources.asn + if self.resources.v4: s += " IPv4: %s\n" % self.resources.v4 + if self.resources.v6: s += " IPv6: %s\n" % self.resources.v6 + if self.kids: s += " Kids: %s\n" % ", ".join(k.name for k in self.kids) + if self.parent: s += " Up: %s\n" % self.parent.name + if self.is_hosted: s += " Host: %s\n" % self.hosted_by.name + if self.hosts: s += " Hosts: %s\n" % ", ".join(h.name for h in self.hosts) + for r in self.roa_requests: s += " ROA: %s\n" % r + if not self.is_hosted: s += " IPort: %s\n" % self.irdbd_port + if self.runs_pubd: s += " PPort: %s\n" % self.pubd_port + if not self.is_hosted: s += " RPort: %s\n" % self.rpkid_port + if self.runs_pubd: s += " SPort: %s\n" % self.rsync_port + if self.is_root: s += " TPort: %s\n" % self.rootd_port + return s + " Until: %s\n" % self.resources.valid_until + + @property + def is_root(self): + """ + Is this the root node? + """ + return self.parent is None + + @property + def is_hosted(self): + """ + Is this entity hosted? + """ + return self.hosted_by is not None + + @property + def runs_pubd(self): + """ + Does this entity run a pubd? + """ + return self.is_root or not (self.is_hosted or only_one_pubd) + + def path(self, *names): + """ + Construct pathnames in this entity's test directory. + """ + return cleanpath(test_dir, self.host.name, *names) + + def csvout(self, fn): + """ + Open and log a CSV output file. + """ + path = self.path(fn) + print "Writing", path + return rpki.csv_utils.csv_writer(path) + + def up_down_url(self): + """ + Construct service URL for this node's parent. + """ + return "http://localhost:%d/up-down/%s/%s" % (self.parent.host.rpkid_port, + self.parent.name, + self.name) + + def dump_asns(self): + """ + Write Autonomous System Numbers CSV file. + """ + fn = "%s.asns.csv" % d.name + if not args.skip_config: + f = self.csvout(fn) + for k in self.kids: + f.writerows((k.name, a) for a in k.resources.asn) + f.close() + if not args.stop_after_config: + self.run_rpkic("load_asns", fn) + + def dump_prefixes(self): + """ + Write prefixes CSV file. + """ + fn = "%s.prefixes.csv" % d.name + if not args.skip_config: + f = self.csvout(fn) + for k in self.kids: + f.writerows((k.name, p) for p in (k.resources.v4 + k.resources.v6)) + f.close() + if not args.stop_after_config: + self.run_rpkic("load_prefixes", fn) + + def dump_roas(self): + """ + Write ROA CSV file. + """ + fn = "%s.roas.csv" % d.name + if not args.skip_config: + f = self.csvout(fn) + for g1, r in enumerate(self.roa_requests): + f.writerows((p, r.asn, "G%08d%08d" % (g1, g2)) + for g2, p in enumerate((r.v4 + r.v6 if r.v4 and r.v6 else r.v4 or r.v6 or ()))) + f.close() + if not args.stop_after_config: + self.run_rpkic("load_roa_requests", fn) + + def dump_ghostbusters(self): + """ + Write Ghostbusters vCard file. + """ + if self.ghostbusters: + fn = "%s.ghostbusters.vcard" % d.name + if not args.skip_config: + path = self.path(fn) + print "Writing", path + f = open(path, "w") + for i, g in enumerate(self.ghostbusters): + if i: + f.write("\n") + f.write(g) + f.close() + if not args.stop_after_config: + self.run_rpkic("load_ghostbuster_requests", fn) + + def dump_router_certificates(self): + """ + Write EE certificates (router certificates, etc). + """ + if self.router_certs: + fn = "%s.routercerts.xml" % d.name + if not args.skip_config: + path = self.path(fn) + print "Writing", path + xmlns = "{http://www.hactrn.net/uris/rpki/router-certificate/}" + xml = lxml.etree.Element(xmlns + "router_certificate_requests", version = "1") + for r in self.router_certs: + x = lxml.etree.SubElement(xml, xmlns + "router_certificate_request", + router_id = str(r.router_id), + asn = str(r.asn), + valid_until = str(self.resources.valid_until)) + x.text = r.pkcs10.get_Base64() + rpki.relaxng.router_certificate.assertValid(xml) + lxml.etree.ElementTree(xml).write(path, pretty_print = True) + if not args.stop_after_config: + self.run_rpkic("add_router_certificate_request", fn) + + @property + def pubd(self): + """ + Walk up tree until we find somebody who runs pubd. + """ + s = self + while not s.runs_pubd: + s = s.parent + return s + + @property + def client_handle(self): + """ + Work out what pubd configure_publication_client will call us. + """ + path = [] + s = self + if not args.flat_publication: + while not s.runs_pubd: + path.append(s) + s = s.parent + path.append(s) + return ".".join(i.name for i in reversed(path)) + + @property + def host(self): + return self.hosted_by or self + + def dump_conf(self): + """ + Write configuration file for OpenSSL and RPKI tools. + """ + + r = dict( + handle = self.name, + run_rpkid = str(not self.is_hosted), + run_pubd = str(self.runs_pubd), + run_rootd = str(self.is_root), + irdbd_sql_database = "irdb%d" % self.engine, + irdbd_sql_username = "irdb", + rpkid_sql_database = "rpki%d" % self.engine, + rpkid_sql_username = "rpki", + rpkid_server_host = "localhost", + rpkid_server_port = str(self.rpkid_port), + irdbd_server_host = "localhost", + irdbd_server_port = str(self.irdbd_port), + rootd_server_port = str(self.rootd_port), + pubd_sql_database = "pubd%d" % self.engine, + pubd_sql_username = "pubd", + pubd_server_host = "localhost", + pubd_server_port = str(self.pubd.pubd_port), + publication_rsync_server = "localhost:%s" % self.pubd.rsync_port, + bpki_servers_directory = self.path(), + publication_base_directory = self.path("publication"), + shared_sql_password = "fnord") + + r.update(config_overrides) + + f = open(self.path("rpki.conf"), "w") + f.write("# Automatically generated, do not edit\n") + print "Writing", f.name + + section = None + for line in open(cleanpath(rpkid_dir, "examples/rpki.conf")): + m = section_regexp.match(line) + if m: + section = m.group(1) + m = variable_regexp.match(line) + option = m.group(1) if m and section == "myrpki" else None + if option and option in r: + line = "%s = %s\n" % (option, r[option]) + f.write(line) + + f.close() + + def dump_rsyncd(self): + """ + Write rsyncd configuration file. + """ + + if self.runs_pubd: + f = open(self.path("rsyncd.conf"), "w") + print "Writing", f.name + f.writelines(s + "\n" for s in + ("# Automatically generated, do not edit", + "port = %d" % self.rsync_port, + "address = localhost", + "[rpki]", + "log file = rsyncd.log", + "read only = yes", + "use chroot = no", + "path = %s" % self.path("publication"), + "comment = RPKI test", + "[root]", + "log file = rsyncd_root.log", + "read only = yes", + "use chroot = no", + "path = %s" % self.path("publication.root"), + "comment = RPKI test root")) + f.close() + + @classmethod + def next_rpkic_counter(cls): + cls.rpkic_counter += 10000 + return str(cls.rpkic_counter) + + def run_rpkic(self, *argv): + """ + Run rpkic for this entity. + """ + cmd = [prog_rpkic, "-i", self.name, "-c", self.path("rpki.conf")] + if args.profile: + cmd.append("--profile") + cmd.append(self.path("rpkic.%s.prof" % rpki.sundial.now())) + cmd.extend(str(a) for a in argv if a is not None) + print 'Running "%s"' % " ".join(cmd) + env = os.environ.copy() + env["YAMLTEST_RPKIC_COUNTER"] = self.next_rpkic_counter() + subprocess.check_call(cmd, cwd = self.host.path(), env = env) + + def run_python_daemon(self, prog): + """ + Start a Python daemon and return a subprocess.Popen object + representing the running daemon. + """ + basename = os.path.splitext(os.path.basename(prog))[0] + cmd = [prog, "-d", "-c", self.path("rpki.conf")] + if args.profile and basename != "rootd": + cmd.append("--profile") + cmd.append(self.path(basename + ".prof")) + log = basename + ".log" + p = subprocess.Popen(cmd, + cwd = self.path(), + stdout = open(self.path(log), "w"), + stderr = subprocess.STDOUT) + print 'Running %s for %s: pid %d process %r' % (" ".join(cmd), self.name, p.pid, p) + return p + + def run_rpkid(self): + """ + Run rpkid. + """ + return self.run_python_daemon(prog_rpkid) + + def run_irdbd(self): + """ + Run irdbd. + """ + return self.run_python_daemon(prog_irdbd) + + def run_pubd(self): + """ + Run pubd. + """ + return self.run_python_daemon(prog_pubd) + + def run_rootd(self): + """ + Run rootd. + """ + return self.run_python_daemon(prog_rootd) + + def run_rsyncd(self): + """ + Run rsyncd. + """ + p = subprocess.Popen(("rsync", "--daemon", "--no-detach", "--config", "rsyncd.conf"), + cwd = self.path()) + print "Running rsyncd for %s: pid %d process %r" % (self.name, p.pid, p) + return p + +def create_root_certificate(db_root): + + print "Creating rootd RPKI root certificate" + + root_resources = rpki.resource_set.resource_bag( + asn = rpki.resource_set.resource_set_as("0-4294967295"), + v4 = rpki.resource_set.resource_set_ipv4("0.0.0.0/0"), + v6 = rpki.resource_set.resource_set_ipv6("::/0")) + + root_key = rpki.x509.RSA.generate(quiet = True) + + root_uri = "rsync://localhost:%d/rpki/" % db_root.pubd.rsync_port + + root_sia = (root_uri, root_uri + "root.mft", None) + + root_cert = rpki.x509.X509.self_certify( + keypair = root_key, + subject_key = root_key.get_public(), + serial = 1, + sia = root_sia, + notAfter = rpki.sundial.now() + rpki.sundial.timedelta(days = 365), + resources = root_resources) + + f = open(db_root.path("publication.root/root.cer"), "wb") + f.write(root_cert.get_DER()) + f.close() + + f = open(db_root.path("root.key"), "wb") + f.write(root_key.get_DER()) + f.close() + + f = open(os.path.join(test_dir, "root.tal"), "w") + f.write("rsync://localhost:%d/root/root.cer\n\n" % db_root.pubd.rsync_port) + f.write(root_key.get_public().get_Base64()) + f.close() + + + +os.environ["TZ"] = "UTC" +time.tzset() + +parser = argparse.ArgumentParser(description = __doc__) +parser.add_argument("-c", "--config", + help = "configuration file") +parser.add_argument("-f", "--flat_publication", action = "store_true", + help = "disable hierarchical publication") +parser.add_argument("-k", "--keep_going", action = "store_true", + help = "keep going until all subprocesses exit") +parser.add_argument("-p", "--pidfile", + help = "save pid to this file") +parser.add_argument("--skip_config", action = "store_true", + help = "skip over configuration phase") +parser.add_argument("--stop_after_config", action = "store_true", + help = "stop after configuration phase") +parser.add_argument("--synchronize", action = "store_true", + help = "synchronize IRDB with daemons") +parser.add_argument("--profile", action = "store_true", + help = "enable profiling") +parser.add_argument("yaml_file", type = argparse.FileType("r"), + help = "YAML description of test network") +args = parser.parse_args() + +try: + + if args.pidfile is not None: + open(args.pidfile, "w").write("%s\n" % os.getpid()) + + rpki.log.init("yamltest", use_syslog = False) + + # Allow optional config file for this tool to override default + # passwords: this is mostly so that I can show a complete working + # example without publishing my own server's passwords. + + cfg = rpki.config.parser(args.config, "yamltest", allow_missing = True) + + only_one_pubd = cfg.getboolean("only_one_pubd", True) + allocation.base_port = cfg.getint("base_port", 4400) + + config_overrides = dict( + (k, cfg.get(k)) + for k in ("rpkid_sql_password", "irdbd_sql_password", "pubd_sql_password", + "rpkid_sql_username", "irdbd_sql_username", "pubd_sql_username") + if cfg.has_option(k)) + + # Start clean, maybe + + if not args.skip_config: + for root, dirs, files in os.walk(test_dir, topdown = False): + for fn in files: + os.unlink(os.path.join(root, fn)) + for d in dirs: + os.rmdir(os.path.join(root, d)) + + # Read first YAML doc in file and process as compact description of + # test layout and resource allocations. Ignore subsequent YAML docs, + # they're for smoketest.py, not this script. + + db = allocation_db(yaml.safe_load_all(args.yaml_file).next()) + + # Show what we loaded + + #db.dump() + + if args.skip_config: + + print "Skipping pre-daemon configuration, assuming you already did that" + + else: + + # Set up each entity in our test, create publication directories, + # and initialize server BPKI. + + for d in db: + if not d.is_hosted: + os.makedirs(d.path()) + d.dump_conf() + if d.runs_pubd: + os.makedirs(d.path("publication")) + d.dump_rsyncd() + if d.is_root: + os.makedirs(d.path("publication.root")) + d.run_rpkic("initialize_server_bpki") + + # Initialize resource holding BPKI and generate self-descriptor + # for each entity. + + for d in db: + d.run_rpkic("create_identity", d.name) + + # Create RPKI root certificate. + + create_root_certificate(db.root) + + # Set up rootd. + + db.root.run_rpkic("configure_root") + + # From here on we need to pay attention to initialization order. We + # used to do all the pre-configure_daemons stuff before running any + # of the daemons, but that doesn't work right in hosted cases, so we + # have to interleave configuration with starting daemons, just as + # one would in the real world for this sort of thing. + + progs = [] + + try: + + for d in db: + + if not d.is_hosted: + print + print "Running daemons for", d.name + if d.is_root: + progs.append(d.run_rootd()) + progs.append(d.run_irdbd()) + progs.append(d.run_rpkid()) + if d.runs_pubd: + progs.append(d.run_pubd()) + progs.append(d.run_rsyncd()) + + if args.synchronize or not args.skip_config: + + print + print "Giving daemons time to start up" + time.sleep(20) + assert all(p.poll() is None for p in progs) + + if args.skip_config: + + print + print "Skipping configure_*, you'll have to do that yourself if needed" + + else: + + for d in db: + + print + print "Configuring", d.name + print + if d.is_root: + assert not d.is_hosted + d.run_rpkic("configure_publication_client", + "--flat" if args.flat_publication else None, + d.path("%s.%s.repository-request.xml" % (d.name, d.name))) + print + d.run_rpkic("configure_repository", + d.path("%s.repository-response.xml" % d.client_handle)) + print + else: + d.parent.run_rpkic("configure_child", + "--valid_until", d.resources.valid_until, + d.path("%s.identity.xml" % d.name)) + print + d.run_rpkic("configure_parent", + d.parent.path("%s.%s.parent-response.xml" % (d.parent.name, d.name))) + print + d.pubd.run_rpkic("configure_publication_client", + "--flat" if args.flat_publication else None, + d.path("%s.%s.repository-request.xml" % (d.name, d.parent.name))) + print + d.run_rpkic("configure_repository", + d.pubd.path("%s.repository-response.xml" % d.client_handle)) + print + + print + print "Done with initial configuration" + print + + if args.synchronize: + print + print "Synchronizing" + print + for d in db: + if not d.is_hosted: + d.run_rpkic("synchronize") + + if args.synchronize or not args.skip_config: + print + print "Loading CSV files" + print + for d in db: + d.dump_asns() + d.dump_prefixes() + d.dump_roas() + d.dump_ghostbusters() + d.dump_router_certificates() + + # Wait until something terminates. + + if not args.stop_after_config or args.keep_going: + print + print "Waiting for daemons to exit" + signal.signal(signal.SIGCHLD, lambda *dont_care: None) + while (any(p.poll() is None for p in progs) + if args.keep_going else + all(p.poll() is None for p in progs)): + signal.pause() + + finally: + + print + print "Shutting down" + print + + signal.signal(signal.SIGCHLD, signal.SIG_DFL) + + if args.profile: + how_long = 300 + else: + how_long = 30 + + how_often = how_long / 2 + + for i in xrange(how_long): + if i % how_often == 0: + for p in progs: + if p.poll() is None: + print "Politely nudging pid %d" % p.pid + p.terminate() + print + if all(p.poll() is not None for p in progs): + break + time.sleep(1) + + for p in progs: + if p.poll() is None: + print "Pulling the plug on pid %d" % p.pid + p.kill() + + for p in progs: + print "Program pid %d %r returned %d" % (p.pid, p, p.wait()) + +finally: + if args.pidfile is not None: + os.unlink(args.pidfile) diff --git a/ca/upgrade-scripts/upgrade-rpkid-to-0.5709.py b/ca/upgrade-scripts/upgrade-rpkid-to-0.5709.py new file mode 100644 index 00000000..aa8e3ec1 --- /dev/null +++ b/ca/upgrade-scripts/upgrade-rpkid-to-0.5709.py @@ -0,0 +1,38 @@ +# $Id$ +# +# Copyright (C) 2014 Dragon Research Labs ("DRL") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND DRL DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL DRL BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +Upgrade RPKI SQL databases to schema expected by 0.5709. + +This code is evaluated in the context of rpki-sql-setup's +do_apply_upgrades() function and has access to its variables. +""" + +db.cur.execute(""" + CREATE TABLE ee_cert ( + ee_cert_id SERIAL NOT NULL, + ski BINARY(20) NOT NULL, + cert LONGBLOB NOT NULL, + published DATETIME, + self_id BIGINT UNSIGNED NOT NULL, + ca_detail_id BIGINT UNSIGNED NOT NULL, + PRIMARY KEY (ee_cert_id), + CONSTRAINT ee_cert_self_id + FOREIGN KEY (self_id) REFERENCES self (self_id) ON DELETE CASCADE, + CONSTRAINT ee_cert_ca_detail_id + FOREIGN KEY (ca_detail_id) REFERENCES ca_detail (ca_detail_id) ON DELETE CASCADE + ) ENGINE=InnoDB +""") diff --git a/ext/POW.c b/ext/POW.c new file mode 100644 index 00000000..b5d9ccaf --- /dev/null +++ b/ext/POW.c @@ -0,0 +1,9253 @@ +/* + * This module started out as the core of Peter Shannon's "Python + * OpenSSL Wrappers" package, an excellent but somewhat dated package + * which I encountered while looking for some halfway sane way to cram + * RFC 3779 certificate support code into Python. + * + * At this point enough of the code has been added or rewritten that + * it's unclear (either way) whether this code properly qualifies as a + * derivative work. Given that both Peter's original code and all of + * subsequent changes to it were done under something equivalent to a + * BSD license, this may not matter very much, but the following + * attempts to give proper credit to all concerned. + * + **** + * + * Copyright (C) 2009--2013 Internet Systems Consortium ("ISC") + * + * Permission to use, copy, modify, and distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH + * REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY + * AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, + * INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM + * LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE + * OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR + * PERFORMANCE OF THIS SOFTWARE. + * + **** + * + * Portions copyright (C) 2006--2008 American Registry for Internet + * Numbers ("ARIN") + * + * Permission to use, copy, modify, and distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH + * REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY + * AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, + * INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM + * LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE + * OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR + * PERFORMANCE OF THIS SOFTWARE. + * + **** + * + * Portions Copyright (c) 2001, 2002, Peter Shannon + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * + * * Redistributions of source code must retain the above + * copyright notice, this list of conditions and the following + * disclaimer. + * + * * Redistributions in binary form must reproduce the above + * copyright notice, this list of conditions and the following + * disclaimer in the documentation and/or other materials + * provided with the distribution. + * + * * The name of the contributors may be used to endorse or promote + * products derived from this software without specific prior + * written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS + * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS + * OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +/* $Id$ */ + +#define PY_SSIZE_T_CLEAN 1 +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include + +#include +#include +#include +#include + +/* + * GCC attribute to let us tell GCC not to whine about unused formal + * parameters when we're in maximal warning mode. + */ +#ifdef __GNUC__ +#define GCC_UNUSED __attribute__((unused)) +#else +define GCC_UNUSED +#endif + +/* + * Maximum size of a raw IP (v4 or v6) address, in bytes. + */ +#define RAW_IPADDR_BUFLEN 16 + +/* + * Maximum size of an ASN.1 Integer converted from a Python Long, in bytes. + */ +#define MAX_ASN1_INTEGER_LEN 20 + +/* Digests */ +#define MD5_DIGEST 2 +#define SHA_DIGEST 3 +#define SHA1_DIGEST 4 +#define SHA256_DIGEST 6 +#define SHA384_DIGEST 7 +#define SHA512_DIGEST 8 + +/* Object format */ +#define SHORTNAME_FORMAT 1 +#define LONGNAME_FORMAT 2 +#define OIDNAME_FORMAT 3 + +/* AsymmetricParam EC curves */ +#define EC_P256_CURVE NID_X9_62_prime256v1 + +/* Object check functions */ +#define POW_X509_Check(op) PyObject_TypeCheck(op, &POW_X509_Type) +#define POW_X509Store_Check(op) PyObject_TypeCheck(op, &POW_X509Store_Type) +#define POW_X509StoreCTX_Check(op) PyObject_TypeCheck(op, &POW_X509StoreCTX_Type) +#define POW_CRL_Check(op) PyObject_TypeCheck(op, &POW_CRL_Type) +#define POW_Asymmetric_Check(op) PyObject_TypeCheck(op, &POW_Asymmetric_Type) +#define POW_AsymmetricParams_Check(op) PyObject_TypeCheck(op, &POW_AsymmetricParams_Type) +#define POW_Digest_Check(op) PyObject_TypeCheck(op, &POW_Digest_Type) +#define POW_CMS_Check(op) PyObject_TypeCheck(op, &POW_CMS_Type) +#define POW_IPAddress_Check(op) PyObject_TypeCheck(op, &POW_IPAddress_Type) +#define POW_ROA_Check(op) PyObject_TypeCheck(op, &POW_ROA_Type) +#define POW_Manifest_Check(op) PyObject_TypeCheck(op, &POW_Manifest_Type) +#define POW_ROA_Check(op) PyObject_TypeCheck(op, &POW_ROA_Type) + +static char pow_module__doc__ [] = + "Python interface to RFC-3779-enabled OpenSSL. This code is intended\n" + "to support the rpki.net toolset.\n" + "\n" + "This code started out life as Peter Shannon's excellent \"Python OpenSSL\n" + "Wrappers\" package. It has been extensively modified since then, to add\n" + "support for things needed for the RPKI protocols, to upgrade the code\n" + "to use modern (circa Python 2.7) classes, and to remove code not\n" + "needed for RPKI.\n" + ; + +#define LAME_DISCLAIMER_IN_ALL_CLASS_DOCUMENTATION \ + "The documentation for this class used to provide a nice example of how\n" \ + "to use the class. Sadly, most of what was in that example is now\n" \ + "obsolete due to recent or impending API changes. Once the new API is\n" \ + "stable, this documentation should be rewritten to provide such examples.\n" + +/* + * Handle NIDs we wish OpenSSL knew about. This is carefully (we + * hope) written to do nothing at all for any NID that OpenSSL knows + * about; the intent is just to add definitions for things OpenSSL + * doesn't know about yet. Of necessity, this is a bit gross, since + * it confounds runtime static variables with predefined macro names, + * but we try to put all the magic associated with this in one place. + */ + +#ifndef NID_rpkiManifest +static int NID_rpkiManifest; +#endif + +#ifndef NID_signedObject +static int NID_signedObject; +#endif + +static const struct { + int *nid; + const char *oid; + const char *sn; + const char *ln; +} missing_nids[] = { + +#ifndef NID_rpkiManifest + {&NID_rpkiManifest, "1.3.6.1.5.5.7.48.10", "id-ad-rpkiManifest", "RPKI Manifest"}, +#endif + +#ifndef NID_signedObject + {&NID_signedObject, "1.3.6.1.5.5.7.48.11", "id-ad-signedObject", "Signed Object"} +#endif + +}; + +/* + * IP versions. + */ + +typedef struct ipaddress_version { + unsigned version; + unsigned afi; + unsigned af; + unsigned length; +} ipaddress_version; + +static const ipaddress_version ipaddress_version_4 = { + 4, IANA_AFI_IPV4, AF_INET, 4 +}; + +static const ipaddress_version ipaddress_version_6 = { + 6, IANA_AFI_IPV6, AF_INET6, 16 +}; + +static const ipaddress_version * const ipaddress_versions[] = { + &ipaddress_version_4, &ipaddress_version_6 +}; + +/* + * Names of bits in the KeyUsage BitString (RFC 5280 4.2.1.3). + */ + +static const char * const key_usage_bit_names[] = { + "digitalSignature", /* (0) */ + "nonRepudiation", /* (1) */ + "keyEncipherment", /* (2) */ + "dataEncipherment", /* (3) */ + "keyAgreement", /* (4) */ + "keyCertSign", /* (5) */ + "cRLSign", /* (6) */ + "encipherOnly", /* (7) */ + "decipherOnly", /* (8) */ + NULL +}; + +/* + * Exception objects. + */ + +static PyObject + *ErrorObject, + *OpenSSLErrorObject, + *POWErrorObject, + *NotVerifiedErrorObject; + +/* + * Constructor for customized datetime class. + */ + +static PyObject *custom_datetime; + +/* + * "ex_data" index for pointer we want to attach to X509_STORE_CTX so + * we can extract it in callbacks. + */ + +static int x509_store_ctx_ex_data_idx = -1; + +/* + * Declarations of type objects (definitions come later). + */ + +static PyTypeObject + POW_X509_Type, + POW_X509Store_Type, + POW_X509StoreCTX_Type, + POW_CRL_Type, + POW_Asymmetric_Type, + POW_AsymmetricParams_Type, + POW_Digest_Type, + POW_CMS_Type, + POW_IPAddress_Type, + POW_ROA_Type, + POW_Manifest_Type, + POW_ROA_Type, + POW_PKCS10_Type; + +/* + * Object internals. + */ + +typedef struct { + PyObject_HEAD + unsigned char address[16]; + const struct ipaddress_version *type; +} ipaddress_object; + +typedef struct { + PyObject_HEAD + X509 *x509; +} x509_object; + +typedef struct { + PyObject_HEAD + X509_STORE *store; + PyObject *ctxclass; +} x509_store_object; + +typedef struct { + PyObject_HEAD + X509_STORE_CTX *ctx; + x509_store_object *store; +} x509_store_ctx_object; + +typedef struct { + PyObject_HEAD + X509_CRL *crl; +} crl_object; + +typedef struct { + PyObject_HEAD + EVP_PKEY *pkey; +} asymmetric_object; + +typedef struct { + PyObject_HEAD + EVP_PKEY *pkey; +} asymmetric_params_object; + +typedef struct { + PyObject_HEAD + EVP_MD_CTX digest_ctx; + int digest_type; +} digest_object; + +typedef struct { + PyObject_HEAD + CMS_ContentInfo *cms; +} cms_object; + +typedef struct { + cms_object cms; /* Subclass of CMS */ + ROA *roa; +} roa_object; + +typedef struct { + cms_object cms; /* Subclass of CMS */ + Manifest *manifest; +} manifest_object; + +typedef struct { + PyObject_HEAD + X509_REQ *pkcs10; + X509_EXTENSIONS *exts; +} pkcs10_object; + + + +/* + * Utility functions. + */ + +/* + * Minimal intervention debug-by-printf() hack, use only for good. + */ + +#if 0 +#define KVETCH(_msg_) write(2, _msg_ "\n", sizeof(_msg_)) +#else +#define KVETCH(_msg_) ((void) 0) +#endif + +#if 0 +#define ENTERING(_name_) KVETCH("Entering " #_name_ "()") +#else +#define ENTERING(_name_) ((void) 0) +#endif + +/* + * Error handling macros. All of macros assume that there's a cleanup + * label named "error" which these macros can use as a goto target. + */ + +#define lose(_msg_) \ + do { \ + PyErr_SetString(POWErrorObject, (_msg_)); \ + goto error; \ + } while (0) + +#define lose_no_memory() \ + do { \ + PyErr_NoMemory(); \ + goto error; \ + } while (0) + +#define lose_type_error(_msg_) \ + do { \ + PyErr_SetString(PyExc_TypeError, (_msg_)); \ + goto error; \ + } while (0) + +#define lose_value_error(_msg_) \ + do { \ + PyErr_SetString(PyExc_ValueError, (_msg_)); \ + goto error; \ + } while (0) + +#define lose_openssl_error(_msg_) \ + do { \ + set_openssl_exception(OpenSSLErrorObject, (_msg_), 0); \ + goto error; \ + } while (0) + +#define lose_not_verified(_msg_) \ + do { \ + PyErr_SetString(NotVerifiedErrorObject, (_msg_)); \ + goto error; \ + } while (0) + +#define assert_no_unhandled_openssl_errors() \ + do { \ + if (ERR_peek_error()) { \ + set_openssl_exception(OpenSSLErrorObject, NULL, __LINE__); \ + goto error; \ + } \ + } while (0) + +#define POW_assert(_cond_) \ + do { \ + if (!(_cond_)) { \ + (void) PyErr_Format(POWErrorObject, \ + "Assertion %s failed at " __FILE__ ":%d", \ + #_cond_, __LINE__); \ + goto error; \ + } \ + } while (0) + +/* + * Consolidate some tedious EVP-related switch statements. + */ + +static const EVP_MD * +evp_digest_factory(int digest_type) +{ + switch (digest_type) { + case MD5_DIGEST: return EVP_md5(); + case SHA_DIGEST: return EVP_sha(); + case SHA1_DIGEST: return EVP_sha1(); + case SHA256_DIGEST: return EVP_sha256(); + case SHA384_DIGEST: return EVP_sha384(); + case SHA512_DIGEST: return EVP_sha512(); + default: return NULL; + } +} + +/* + * Raise an exception with data pulled from the OpenSSL error stack. + * Exception value is a tuple with some internal structure. + * + * If a string error message is supplied, that string is the first + * element of the exception value tuple. + * + * If a non-zero line number is supplied, a string listing this as an + * unhandled exception detected at that line will be the next element + * of the exception value tuple (or the first, if no error message was + * supplied). + * + * Remainder of exception value tuple is zero or more tuples, each + * representing one error from the stack. + * + * Each error tuple contains six slots: + * - the numeric error code + * - string translation of numeric error code ("reason") + * - name of library in which error occurred + * - name of function in which error occurred + * - name of file in which error occurred + * - line number in file where error occurred + */ + +static void +set_openssl_exception(PyObject *error_class, const char *msg, const int unhandled_line) +{ + PyObject *errtuple = NULL; + PyObject *errlist = NULL; + unsigned long err; + const char *file; + int line; + + if ((errlist = PyList_New(0)) == NULL) + return; + + if (msg) { + PyObject *s = PyString_FromString(msg); + (void) PyList_Append(errlist, s); + Py_XDECREF(s); + } + + if (unhandled_line) { + PyObject *s = PyString_FromFormat("Unhandled OpenSSL error at " __FILE__ ":%d!", unhandled_line); + (void) PyList_Append(errlist, s); + Py_XDECREF(s); + } + + while ((err = ERR_get_error_line(&file, &line)) != 0) { + PyObject *t = Py_BuildValue("(issssi)", + err, + ERR_reason_error_string(err), + ERR_lib_error_string(err), + ERR_func_error_string(err), + file, + line); + (void) PyList_Append(errlist, t); + Py_XDECREF(t); + } + + if ((errtuple = PyList_AsTuple(errlist)) != NULL) + PyErr_SetObject(error_class, errtuple); + + Py_XDECREF(errtuple); + Py_XDECREF(errlist); +} + +static X509_NAME * +x509_object_helper_set_name(PyObject *dn_obj) +{ + PyObject *rdn_obj = NULL; + PyObject *pair_obj = NULL; + PyObject *type_obj = NULL; + PyObject *value_obj = NULL; + X509_NAME *name = NULL; + char *type_str, *value_str; + int asn1_type, i, j; + + if ((name = X509_NAME_new()) == NULL) + lose_no_memory(); + + for (i = 0; i < PySequence_Size(dn_obj); i++) { + + if ((rdn_obj = PySequence_GetItem(dn_obj, i)) == NULL) + goto error; + + if (!PySequence_Check(rdn_obj) || PySequence_Size(rdn_obj) == 0) + lose_type_error("each RDN must be a sequence with at least one element"); + + for (j = 0; j < PySequence_Size(rdn_obj); j++) { + + if ((pair_obj = PySequence_GetItem(rdn_obj, j)) == NULL) + goto error; + + if (!PySequence_Check(pair_obj) || PySequence_Size(pair_obj) != 2) + lose_type_error("each name entry must be a two-element sequence"); + + if ((type_obj = PySequence_GetItem(pair_obj, 0)) == NULL || + (type_str = PyString_AsString(type_obj)) == NULL || + (value_obj = PySequence_GetItem(pair_obj, 1)) == NULL || + (value_str = PyString_AsString(value_obj)) == NULL) + goto error; + + if ((asn1_type = ASN1_PRINTABLE_type((unsigned char *) value_str, -1)) != V_ASN1_PRINTABLESTRING) + asn1_type = V_ASN1_UTF8STRING; + + if (!X509_NAME_add_entry_by_txt(name, type_str, asn1_type, + (unsigned char *) value_str, + strlen((char *) value_str), + -1, (j ? -1 : 0))) + lose("Unable to add name entry"); + + Py_XDECREF(pair_obj); + Py_XDECREF(type_obj); + Py_XDECREF(value_obj); + pair_obj = type_obj = value_obj = NULL; + } + + Py_XDECREF(rdn_obj); + rdn_obj = NULL; + } + + return name; + + error: + X509_NAME_free(name); + Py_XDECREF(rdn_obj); + Py_XDECREF(pair_obj); + Py_XDECREF(type_obj); + Py_XDECREF(value_obj); + return NULL; +} + +static PyObject * +x509_object_helper_get_name(X509_NAME *name, int format) +{ + X509_NAME_ENTRY *entry = NULL; + PyObject *result = NULL; + PyObject *rdn = NULL; + PyObject *item = NULL; + const char *oid = NULL; + char oidbuf[512]; + int i, set = -1; + + /* + * Overall theory here: multi-value RDNs are very rare in the wild. + * We should support them, so we don't throw an exception if handed + * one in a BPKI certificate, but with minimal effort. What we care + * about here is optimizing for the common case of single-valued RDNs. + */ + + if ((result = PyTuple_New(X509_NAME_entry_count(name))) == NULL) + goto error; + + for (i = 0; i < X509_NAME_entry_count(name); i++) { + + if ((entry = X509_NAME_get_entry(name, i)) == NULL) + lose("Couldn't get certificate name"); + + if (entry->set < 0 || entry->set < set || entry->set > set + 1) + lose("X509_NAME->set value out of expected range"); + + switch (format) { + case SHORTNAME_FORMAT: + oid = OBJ_nid2sn(OBJ_obj2nid(entry->object)); + break; + case LONGNAME_FORMAT: + oid = OBJ_nid2ln(OBJ_obj2nid(entry->object)); + break; + case OIDNAME_FORMAT: + oid = NULL; + break; + default: + lose("Unknown name format"); + } + + if (oid == NULL) { + if (OBJ_obj2txt(oidbuf, sizeof(oidbuf), entry->object, 1) <= 0) + lose_openssl_error("Couldn't translate OID"); + oid = oidbuf; + } + + if (entry->set > set) { + + set++; + if ((item = Py_BuildValue("((ss#))", oid, ASN1_STRING_data(entry->value), + (Py_ssize_t) ASN1_STRING_length(entry->value))) == NULL) + goto error; + PyTuple_SET_ITEM(result, set, item); + item = NULL; + + } else { + + if ((rdn = PyTuple_GetItem(result, set)) == NULL) + goto error; + (void) _PyTuple_Resize(&rdn, PyTuple_Size(rdn) + 1); + PyTuple_SET_ITEM(result, set, rdn); + if (rdn == NULL) + goto error; + if ((item = Py_BuildValue("(ss#)", oid, ASN1_STRING_data(entry->value), + (Py_ssize_t) ASN1_STRING_length(entry->value))) == NULL) + goto error; + PyTuple_SetItem(rdn, PyTuple_Size(rdn) - 1, item); + rdn = item = NULL; + + } + } + + if (++set != PyTuple_Size(result)) { + if (set < 0 || set > PyTuple_Size(result)) + lose("Impossible set count for DN, something went horribly wrong"); + _PyTuple_Resize(&result, set); + } + + return result; + + error: + Py_XDECREF(item); + Py_XDECREF(result); + return NULL; +} + +static STACK_OF(X509) * +x509_helper_iterable_to_stack(PyObject *iterable) +{ + STACK_OF(X509) *stack = NULL; + PyObject *iterator = NULL; + PyObject *item = NULL; + + if ((stack = sk_X509_new_null()) == NULL) + lose_no_memory(); + + if (iterable != Py_None) { + + if ((iterator = PyObject_GetIter(iterable)) == NULL) + goto error; + + while ((item = PyIter_Next(iterator)) != NULL) { + + if (!POW_X509_Check(item)) + lose_type_error("Inapropriate type"); + + if (!sk_X509_push(stack, ((x509_object *) item)->x509)) + lose("Couldn't add X509 object to stack"); + + Py_XDECREF(item); + item = NULL; + } + } + + Py_XDECREF(iterator); + return stack; + + error: + Py_XDECREF(iterator); + Py_XDECREF(item); + sk_X509_free(stack); + return NULL; +} + +/* + * Pull items off an OpenSSL STACK and put them into a Python tuple. + * Assumes that handler is stealing the OpenSSL references to the + * items in the STACK, so shifts consumed frames off the stack so that + * the appropriate _pop_free() destructor can clean up on failures. + * This is OK because all current uses of this function are processing + * the result of OpenSSL xxx_get1_xxx() methods which we have to free + * in any case. + */ + +static x509_object *x509_object_new_helper(PyTypeObject *, X509 *); +static crl_object *crl_object_new_helper (PyTypeObject *, X509_CRL *); + +static PyObject * +stack_to_tuple_helper(_STACK *sk, PyObject *(*handler)(void *)) +{ + PyObject *result = NULL; + PyObject *obj = NULL; + int i; + + if ((result = PyTuple_New(sk_num(sk))) == NULL) + goto error; + + for (i = 0; sk_num(sk); i++) { + if ((obj = handler(sk_value(sk, 0))) == NULL) + goto error; + sk_shift(sk); + if (PyTuple_SetItem(result, i, obj) != 0) + goto error; + obj = NULL; + } + + return result; + + error: + + Py_XDECREF(obj); + return NULL; +} + +static PyObject * +stack_to_tuple_helper_get_x509(void *cert) +{ + x509_object *obj; + + ENTERING(stack_to_tuple_helper_get_x509); + + if ((obj = x509_object_new_helper(NULL, cert)) == NULL) + return NULL; + + return (PyObject *) obj; +} + +static PyObject * +stack_to_tuple_helper_get_crl(void *crl) +{ + crl_object *obj; + + ENTERING(stack_to_tuple_helper_get_crl); + + if ((obj = crl_object_new_helper(NULL, crl)) == NULL) + return NULL; + + return (PyObject *) obj; +} + +/* + * Time conversion functions. Obvious mapping into Python data types + * is datetime, or, rather, our customized rpki.sundial.datetime. + * + * Unsuprisingly, it's easiest for us to map between GeneralizedTime + * (as restricted by RFC 5280) and datetime. Conversion between + * GeneralizedTime and UTCTime is handled automatically according to + * the RFC 5280 rules for those ASN.1 types where it's required. + */ + +static PyObject * +ASN1_TIME_to_Python(ASN1_TIME *t) +{ + ASN1_GENERALIZEDTIME *g = NULL; + PyObject *result = NULL; + int year, month, day, hour, minute, second; + + if ((g = ASN1_TIME_to_generalizedtime(t, NULL)) == NULL) + lose_openssl_error("Couldn't convert ASN.1 TIME"); + + if (sscanf((char *) g->data, "%4d%2d%2d%2d%2d%2dZ", + &year, &month, &day, &hour, &minute, &second) != 6) + lose("Couldn't scan ASN.1 TIME value"); + + if (custom_datetime != NULL && custom_datetime != Py_None) + result = PyObject_CallFunction(custom_datetime, "iiiiii", + year, month, day, hour, minute, second); + else + result = PyDateTime_FromDateAndTime(year, month, day, hour, minute, second, 0); + + error: + ASN1_GENERALIZEDTIME_free(g); + return result; +} + +static ASN1_TIME * +Python_to_ASN1_TIME(PyObject *arg, const int object_requires_utctime) +{ + char buf[sizeof("20010401123456Z") + 1]; + ASN1_TIME *result = NULL; + const char *s = NULL; + int ok; + + if (PyDateTime_Check(arg)) { + if (snprintf(buf, sizeof(buf), "%4d%02d%02d%02d%02d%02dZ", + PyDateTime_GET_YEAR(arg), + PyDateTime_GET_MONTH(arg), + PyDateTime_GET_DAY(arg), + PyDateTime_DATE_GET_HOUR(arg), + PyDateTime_DATE_GET_MINUTE(arg), + PyDateTime_DATE_GET_SECOND(arg)) >= (int) sizeof(buf)) + lose("Internal error -- GeneralizedTime buffer too small"); + s = buf; + } + + if (s == NULL && (s = PyString_AsString(arg)) == NULL) + goto error; + + if (strlen(s) < 10) + lose_type_error("String is too short to parse as a valid ASN.1 TIME"); + + if ((result = ASN1_TIME_new()) == NULL) + lose_no_memory(); + + if (object_requires_utctime && + ((s[0] == '1' && s[1] == '9' && s[2] > '4') || + (s[0] == '2' && s[1] == '0' && s[2] < '5'))) + ok = ASN1_UTCTIME_set_string(result, s + 2); + else + ok = ASN1_GENERALIZEDTIME_set_string(result, s); + + if (ok) + return result; + + error: + ASN1_TIME_free(result); + return NULL; +} + +/* + * Extract a Python string from a memory BIO. + */ +static PyObject * +BIO_to_PyString_helper(BIO *bio) +{ + char *ptr = NULL; + Py_ssize_t len = 0; + + if ((len = BIO_get_mem_data(bio, &ptr)) == 0) + lose_openssl_error("Unable to get BIO data"); + + return Py_BuildValue("s#", ptr, len); + + error: + return NULL; +} + +static PyObject * +read_from_string_helper(PyObject *(*object_read_helper)(PyTypeObject *, BIO *), + PyTypeObject *type, + PyObject *args) +{ + PyObject *result = NULL; + char *src = NULL; + BIO *bio = NULL; + Py_ssize_t len = 0; + + if (!PyArg_ParseTuple(args, "s#", &src, &len)) + goto error; + + if ((bio = BIO_new_mem_buf(src, len)) == NULL) + lose_no_memory(); + + result = object_read_helper(type, bio); + + error: + BIO_free(bio); + return result; +} + +static PyObject * +read_from_file_helper(PyObject *(*object_read_helper)(PyTypeObject *, BIO *), + PyTypeObject *type, + PyObject *args) +{ + const char *filename = NULL; + PyObject *result = NULL; + BIO *bio = NULL; + + if (!PyArg_ParseTuple(args, "s", &filename)) + goto error; + + if ((bio = BIO_new_file(filename, "rb")) == NULL) + lose_openssl_error("Could not open file"); + + result = object_read_helper(type, bio); + + error: + BIO_free(bio); + return result; +} + +/* + * Simplify entries in method definition tables. See the "Common + * Object Structures" section of the API manual for available flags. + */ +#define Define_Method(__python_name__, __c_name__, __flags__) \ + { #__python_name__, (PyCFunction) __c_name__, __flags__, __c_name__##__doc__ } + +#define Define_Class_Method(__python_name__, __c_name__, __flags__) \ + Define_Method(__python_name__, __c_name__, (__flags__) | METH_CLASS) + +/* + * Convert an ASN1_INTEGER into a Python integer or long. + */ +static PyObject * +ASN1_INTEGER_to_PyLong(ASN1_INTEGER *arg) +{ + PyObject *result = NULL; + PyObject *obj = NULL; + + if ((obj = _PyLong_FromByteArray(ASN1_STRING_data(arg), + ASN1_STRING_length(arg), + 0, 0)) != NULL) + result = PyNumber_Int(obj); + + Py_XDECREF(obj); + return result; +} + +/* + * Convert a Python long to an ASN1_INTEGER. + * This is just nasty, do not read on a full stomach. + * + * Maximum size of integer to be converted here is taken from RFC 5280 + * 4.1.2.2, which sets a maximum of 20 octets for an X.509 certificate + * serial number. + * + * In theory we could use _PyLong_NumBits() to determine the length of + * the long before converting, and raise OverflowError if it's too big. + * Hmm. + */ +static ASN1_INTEGER * +PyLong_to_ASN1_INTEGER(PyObject *arg) +{ + PyObject *obj = NULL; + ASN1_INTEGER *a = NULL; + unsigned char buf[MAX_ASN1_INTEGER_LEN]; + size_t len; + + memset(buf, 0, sizeof(buf)); + + /* + * Make sure argument is a PyLong small enough that its length (in + * bits!) doesn't overflow a size_t (which is a mis-use of size_t, + * but take that up with whoever wrote _PyLong_NumBits()...). + */ + if ((obj = PyNumber_Long(arg)) == NULL || + (len = _PyLong_NumBits(obj)) == (size_t) -1) + goto error; + + /* + * Next make sure it's a non-negative integer small enough to fit in + * our buffer. If we really thought we needed to support larger + * integers we could allocate this dynamically, but we don't, so + * it's not worth the overhead. + * + * Paranoia: We can't convert len to bytes yet, because that + * requires rounding up and we don't know yet that we have enough + * headroom to do that arithmetic without overflowing a size_t. + */ + if (_PyLong_Sign(obj) < 0 || (len / 8) + 1 > sizeof(buf)) { + PyErr_SetObject(PyExc_OverflowError, obj); + goto error; + } + + /* + * Now that we know we're dealing with a sane number of bits, + * convert it to bytes. + */ + len = (len + 7) / 8; + + /* + * Extract that many bytes. + */ + if (_PyLong_AsByteArray((PyLongObject *) obj, buf, len, 0, 0) < 0) + goto error; + + /* + * We're done with the PyLong now. + */ + Py_XDECREF(obj); + obj = NULL; + + /* + * Generate the ASN1_INTEGER and return it. + */ + if ((a = ASN1_INTEGER_new()) == NULL || + (a->length < (int) len + 1 && (a->data = OPENSSL_realloc(a->data, len + 1)) == NULL)) + lose_no_memory(); + + a->type = V_ASN1_INTEGER; + a->length = len; + a->data[len] = 0; + memcpy(a->data, buf, len); + + return a; + + error: + Py_XDECREF(obj); + ASN1_INTEGER_free(a); + return NULL; +} + +/* + * Handle missing NIDs. + */ + +static int +create_missing_nids(void) +{ + int i; + + for (i = 0; i < (int) (sizeof(missing_nids) / sizeof(*missing_nids)); i++) + if ((*missing_nids[i].nid = OBJ_txt2nid(missing_nids[i].oid)) == NID_undef && + (*missing_nids[i].nid = OBJ_create(missing_nids[i].oid, + missing_nids[i].sn, + missing_nids[i].ln)) == NID_undef) + return 0; + + return 1; +} + +static PyObject * +ASN1_OBJECT_to_PyString(const ASN1_OBJECT *oid) +{ + PyObject *result = NULL; + char buf[512]; + + ENTERING(ASN1_OBJECT_to_PyString); + + if (OBJ_obj2txt(buf, sizeof(buf), oid, 1) <= 0) + lose_openssl_error("Couldn't translate OID"); + + result = PyString_FromString(buf); + + error: + return result; +} + + + +/* + * Extension functions. Calling sequence here is a little weird, + * because it turns out that the simplest way to avoid massive + * duplication of code between classes is to work directly with + * X509_EXTENSIONS objects. + */ + +static PyObject * +extension_get_key_usage(X509_EXTENSIONS **exts) +{ + ASN1_BIT_STRING *ext = NULL; + PyObject *result = NULL; + PyObject *token = NULL; + int bit = -1; + + ENTERING(extension_get_key_usage); + + if (!exts) + goto error; + + if ((ext = X509V3_get_d2i(*exts, NID_key_usage, NULL, NULL)) == NULL) + Py_RETURN_NONE; + + if ((result = PyFrozenSet_New(NULL)) == NULL) + goto error; + + for (bit = 0; key_usage_bit_names[bit] != NULL; bit++) { + if (ASN1_BIT_STRING_get_bit(ext, bit) && + ((token = PyString_FromString(key_usage_bit_names[bit])) == NULL || + PySet_Add(result, token) < 0)) + goto error; + Py_XDECREF(token); + token = NULL; + } + + ASN1_BIT_STRING_free(ext); + return result; + + error: + ASN1_BIT_STRING_free(ext); + Py_XDECREF(token); + Py_XDECREF(result); + return NULL; +} + +static PyObject * +extension_set_key_usage(X509_EXTENSIONS **exts, PyObject *args) +{ + ASN1_BIT_STRING *ext = NULL; + PyObject *iterable = NULL; + PyObject *critical = Py_True; + PyObject *iterator = NULL; + PyObject *item = NULL; + const char *token; + int bit = -1; + int ok = 0; + + ENTERING(extension_set_key_usage); + + if (!exts) + goto error; + + if ((ext = ASN1_BIT_STRING_new()) == NULL) + lose_no_memory(); + + if (!PyArg_ParseTuple(args, "O|O", &iterable, &critical) || + (iterator = PyObject_GetIter(iterable)) == NULL) + goto error; + + while ((item = PyIter_Next(iterator)) != NULL) { + + if ((token = PyString_AsString(item)) == NULL) + goto error; + + for (bit = 0; key_usage_bit_names[bit] != NULL; bit++) + if (!strcmp(token, key_usage_bit_names[bit])) + break; + + if (key_usage_bit_names[bit] == NULL) + lose("Unrecognized KeyUsage token"); + + if (!ASN1_BIT_STRING_set_bit(ext, bit, 1)) + lose_no_memory(); + + Py_XDECREF(item); + item = NULL; + } + + if (!X509V3_add1_i2d(exts, NID_key_usage, ext, + PyObject_IsTrue(critical), + X509V3_ADD_REPLACE)) + lose_openssl_error("Couldn't add KeyUsage extension to OpenSSL object"); + + ok = 1; + + error: /* Fall through */ + ASN1_BIT_STRING_free(ext); + Py_XDECREF(iterator); + Py_XDECREF(item); + + if (ok) + Py_RETURN_NONE; + else + return NULL; +} + +static PyObject * +extension_get_basic_constraints(X509_EXTENSIONS **exts) +{ + BASIC_CONSTRAINTS *ext = NULL; + PyObject *result = NULL; + + ENTERING(extension_get_basic_constraints); + + if (!exts) + goto error; + + if ((ext = X509V3_get_d2i(*exts, NID_basic_constraints, NULL, NULL)) == NULL) + Py_RETURN_NONE; + + if (ext->pathlen == NULL) + result = Py_BuildValue("(NO)", PyBool_FromLong(ext->ca), Py_None); + else + result = Py_BuildValue("(Nl)", PyBool_FromLong(ext->ca), ASN1_INTEGER_get(ext->pathlen)); + + error: + BASIC_CONSTRAINTS_free(ext); + return result; +} + +static PyObject * +extension_set_basic_constraints(X509_EXTENSIONS **exts, PyObject *args) +{ + BASIC_CONSTRAINTS *ext = NULL; + PyObject *is_ca = NULL; + PyObject *pathlen_obj = Py_None; + PyObject *critical = Py_True; + long pathlen = -1; + int ok = 0; + + ENTERING(extension_set_basic_constraints); + + if (!exts) + goto error; + + if (!PyArg_ParseTuple(args, "O|OO", &is_ca, &pathlen_obj, &critical)) + goto error; + + if (pathlen_obj != Py_None && (pathlen = PyInt_AsLong(pathlen_obj)) < 0) + lose_type_error("Bad pathLenConstraint value"); + + if ((ext = BASIC_CONSTRAINTS_new()) == NULL) + lose_no_memory(); + + ext->ca = PyObject_IsTrue(is_ca) ? 0xFF : 0; + + if (pathlen_obj != Py_None && + ((ext->pathlen == NULL && (ext->pathlen = ASN1_INTEGER_new()) == NULL) || + !ASN1_INTEGER_set(ext->pathlen, pathlen))) + lose_no_memory(); + + if (!X509V3_add1_i2d(exts, NID_basic_constraints, ext, + PyObject_IsTrue(critical), X509V3_ADD_REPLACE)) + lose_openssl_error("Couldn't add BasicConstraints extension to OpenSSL object"); + + ok = 1; + + error: + BASIC_CONSTRAINTS_free(ext); + + if (ok) + Py_RETURN_NONE; + else + return NULL; +} + +static PyObject * +extension_get_sia(X509_EXTENSIONS **exts) +{ + AUTHORITY_INFO_ACCESS *ext = NULL; + PyObject *result = NULL; + PyObject *result_caRepository = NULL; + PyObject *result_rpkiManifest = NULL; + PyObject *result_signedObject = NULL; + int n_caRepository = 0; + int n_rpkiManifest = 0; + int n_signedObject = 0; + const char *uri; + PyObject *obj; + int i, nid; + + ENTERING(pkcs10_object_get_sia); + + if (!exts) + goto error; + + if ((ext = X509V3_get_d2i(*exts, NID_sinfo_access, NULL, NULL)) == NULL) + Py_RETURN_NONE; + + /* + * Easiest to do this in two passes, first pass just counts URIs. + */ + + for (i = 0; i < sk_ACCESS_DESCRIPTION_num(ext); i++) { + ACCESS_DESCRIPTION *a = sk_ACCESS_DESCRIPTION_value(ext, i); + if (a->location->type != GEN_URI) + continue; + nid = OBJ_obj2nid(a->method); + if (nid == NID_caRepository) { + n_caRepository++; + continue; + } + if (nid == NID_rpkiManifest) { + n_rpkiManifest++; + continue; + } + if (nid == NID_signedObject) { + n_signedObject++; + continue; + } + } + + if (((result_caRepository = PyTuple_New(n_caRepository)) == NULL) || + ((result_rpkiManifest = PyTuple_New(n_rpkiManifest)) == NULL) || + ((result_signedObject = PyTuple_New(n_signedObject)) == NULL)) + goto error; + + n_caRepository = n_rpkiManifest = n_signedObject = 0; + + for (i = 0; i < sk_ACCESS_DESCRIPTION_num(ext); i++) { + ACCESS_DESCRIPTION *a = sk_ACCESS_DESCRIPTION_value(ext, i); + if (a->location->type != GEN_URI) + continue; + nid = OBJ_obj2nid(a->method); + uri = (char *) ASN1_STRING_data(a->location->d.uniformResourceIdentifier); + if (nid == NID_caRepository) { + if ((obj = PyString_FromString(uri)) == NULL) + goto error; + PyTuple_SET_ITEM(result_caRepository, n_caRepository++, obj); + continue; + } + if (nid == NID_rpkiManifest) { + if ((obj = PyString_FromString(uri)) == NULL) + goto error; + PyTuple_SET_ITEM(result_rpkiManifest, n_rpkiManifest++, obj); + continue; + } + if (nid == NID_signedObject) { + if ((obj = PyString_FromString(uri)) == NULL) + goto error; + PyTuple_SET_ITEM(result_signedObject, n_signedObject++, obj); + continue; + } + } + + result = Py_BuildValue("(OOO)", + result_caRepository, + result_rpkiManifest, + result_signedObject); + + error: + AUTHORITY_INFO_ACCESS_free(ext); + Py_XDECREF(result_caRepository); + Py_XDECREF(result_rpkiManifest); + Py_XDECREF(result_signedObject); + return result; +} + +static PyObject * +extension_set_sia(X509_EXTENSIONS **exts, PyObject *args, PyObject *kwds) +{ + static char *kwlist[] = {"caRepository", "rpkiManifest", "signedObject", NULL}; + AUTHORITY_INFO_ACCESS *ext = NULL; + PyObject *caRepository = Py_None; + PyObject *rpkiManifest = Py_None; + PyObject *signedObject = Py_None; + PyObject *iterator = NULL; + ASN1_OBJECT *oid = NULL; + PyObject **pobj = NULL; + PyObject *item = NULL; + ACCESS_DESCRIPTION *a = NULL; + int i, nid = NID_undef, ok = 0; + Py_ssize_t urilen; + char *uri; + + ENTERING(extension_set_sia); + + if (!exts) + goto error; + + if (!PyArg_ParseTupleAndKeywords(args, kwds, "|OOO", kwlist, + &caRepository, &rpkiManifest, &signedObject)) + goto error; + + if ((ext = AUTHORITY_INFO_ACCESS_new()) == NULL) + lose_no_memory(); + + /* + * This is going to want refactoring, because it's ugly, because we + * want to reuse code for AIA, and because it'd be nice to support a + * single URI as an abbreviation for a collection containing one URI. + */ + + for (i = 0; i < 3; i++) { + switch (i) { + case 0: pobj = &caRepository; nid = NID_caRepository; break; + case 1: pobj = &rpkiManifest; nid = NID_rpkiManifest; break; + case 2: pobj = &signedObject; nid = NID_signedObject; break; + } + + if (*pobj == Py_None) + continue; + + if ((oid = OBJ_nid2obj(nid)) == NULL) + lose_openssl_error("Couldn't find SIA accessMethod OID"); + + if ((iterator = PyObject_GetIter(*pobj)) == NULL) + goto error; + + while ((item = PyIter_Next(iterator)) != NULL) { + + if (PyString_AsStringAndSize(item, &uri, &urilen) < 0) + goto error; + + if ((a = ACCESS_DESCRIPTION_new()) == NULL || + (a->method = OBJ_dup(oid)) == NULL || + (a->location->d.uniformResourceIdentifier = ASN1_IA5STRING_new()) == NULL || + !ASN1_OCTET_STRING_set(a->location->d.uniformResourceIdentifier, (unsigned char *) uri, urilen)) + lose_no_memory(); + + a->location->type = GEN_URI; + + if (!sk_ACCESS_DESCRIPTION_push(ext, a)) + lose_no_memory(); + + a = NULL; + Py_XDECREF(item); + item = NULL; + } + + Py_XDECREF(iterator); + iterator = NULL; + } + + if (!X509V3_add1_i2d(exts, NID_sinfo_access, ext, 0, X509V3_ADD_REPLACE)) + lose_openssl_error("Couldn't add SIA extension to OpenSSL object"); + + ok = 1; + + error: + AUTHORITY_INFO_ACCESS_free(ext); + ACCESS_DESCRIPTION_free(a); + Py_XDECREF(item); + Py_XDECREF(iterator); + + if (ok) + Py_RETURN_NONE; + else + return NULL; +} + +static PyObject * +extension_get_eku(X509_EXTENSIONS **exts) +{ + EXTENDED_KEY_USAGE *ext = NULL; + PyObject *result = NULL; + PyObject *oid = NULL; + int i; + + ENTERING(extension_get_eku); + + if (!exts) + goto error; + + if ((ext = X509V3_get_d2i(*exts, NID_ext_key_usage, NULL, NULL)) == NULL) + Py_RETURN_NONE; + + if ((result = PyFrozenSet_New(NULL)) == NULL) + goto error; + + for (i = 0; i < sk_ASN1_OBJECT_num(ext); i++) { + if ((oid = ASN1_OBJECT_to_PyString(sk_ASN1_OBJECT_value(ext, i))) == NULL || + PySet_Add(result, oid) < 0) + goto error; + Py_XDECREF(oid); + oid = NULL; + } + + sk_ASN1_OBJECT_pop_free(ext, ASN1_OBJECT_free); + return result; + + error: + sk_ASN1_OBJECT_pop_free(ext, ASN1_OBJECT_free); + Py_XDECREF(oid); + Py_XDECREF(result); + return NULL; +} + +static PyObject * +extension_set_eku(X509_EXTENSIONS **exts, PyObject *args) +{ + EXTENDED_KEY_USAGE *ext = NULL; + PyObject *iterable = NULL; + PyObject *critical = Py_False; + PyObject *iterator = NULL; + PyObject *item = NULL; + ASN1_OBJECT *obj = NULL; + const char *txt; + int ok = 0; + + ENTERING(extension_set_eku); + + if (!exts) + goto error; + + if ((ext = sk_ASN1_OBJECT_new_null()) == NULL) + lose_no_memory(); + + if (!PyArg_ParseTuple(args, "O|O", &iterable, &critical) || + (iterator = PyObject_GetIter(iterable)) == NULL) + goto error; + + while ((item = PyIter_Next(iterator)) != NULL) { + + if ((txt = PyString_AsString(item)) == NULL) + goto error; + + if ((obj = OBJ_txt2obj(txt, 1)) == NULL) + lose("Couldn't parse OID"); + + if (!sk_ASN1_OBJECT_push(ext, obj)) + lose_no_memory(); + + obj = NULL; + Py_XDECREF(item); + item = NULL; + } + + if (sk_ASN1_OBJECT_num(ext) < 1) + lose("Empty ExtendedKeyUsage extension"); + + if (!X509V3_add1_i2d(exts, NID_ext_key_usage, ext, + PyObject_IsTrue(critical), + X509V3_ADD_REPLACE)) + lose_openssl_error("Couldn't add ExtendedKeyUsage extension to OpenSSL object"); + + ok = 1; + + error: /* Fall through */ + sk_ASN1_OBJECT_pop_free(ext, ASN1_OBJECT_free); + Py_XDECREF(item); + Py_XDECREF(iterator); + + if (ok) + Py_RETURN_NONE; + else + return NULL; +} + +static PyObject * +extension_get_ski(X509_EXTENSIONS **exts) +{ + ASN1_OCTET_STRING *ext = NULL; + PyObject *result = NULL; + + ENTERING(extension_get_ski); + + if (!exts) + goto error; + + if ((ext = X509V3_get_d2i(*exts, NID_subject_key_identifier, NULL, NULL)) == NULL) + Py_RETURN_NONE; + + result = Py_BuildValue("s#", ASN1_STRING_data(ext), + (Py_ssize_t) ASN1_STRING_length(ext)); + + error: /* Fall through */ + ASN1_OCTET_STRING_free(ext); + return result; +} + +static PyObject * +extension_set_ski(X509_EXTENSIONS **exts, PyObject *args) +{ + ASN1_OCTET_STRING *ext = NULL; + const unsigned char *buf = NULL; + Py_ssize_t len; + int ok = 0; + + ENTERING(extension_set_ski); + + if (!exts) + goto error; + + if (!PyArg_ParseTuple(args, "s#", &buf, &len)) + goto error; + + if ((ext = ASN1_OCTET_STRING_new()) == NULL || + !ASN1_OCTET_STRING_set(ext, buf, len)) + lose_no_memory(); + + /* + * RFC 5280 says this MUST be non-critical. + */ + + if (!X509V3_add1_i2d(exts, NID_subject_key_identifier, + ext, 0, X509V3_ADD_REPLACE)) + lose_openssl_error("Couldn't add SKI extension to OpenSSL object"); + + ok = 1; + + error: + ASN1_OCTET_STRING_free(ext); + + if (ok) + Py_RETURN_NONE; + else + return NULL; +} + +static PyObject * +extension_get_aki(X509_EXTENSIONS **exts) +{ + AUTHORITY_KEYID *ext = NULL; + PyObject *result = NULL; + + ENTERING(extension_get_aki); + + if (!exts) + goto error; + + if ((ext = X509V3_get_d2i(*exts, NID_authority_key_identifier, NULL, NULL)) == NULL) + Py_RETURN_NONE; + + result = Py_BuildValue("s#", ASN1_STRING_data(ext->keyid), + (Py_ssize_t) ASN1_STRING_length(ext->keyid)); + + error: /* Fall through */ + AUTHORITY_KEYID_free(ext); + return result; +} + +static PyObject * +extension_set_aki(X509_EXTENSIONS **exts, PyObject *args) +{ + AUTHORITY_KEYID *ext = NULL; + const unsigned char *buf = NULL; + Py_ssize_t len; + int ok = 0; + + ENTERING(extension_set_aki); + + assert (exts); + + if (!PyArg_ParseTuple(args, "s#", &buf, &len)) + goto error; + + if ((ext = AUTHORITY_KEYID_new()) == NULL || + (ext->keyid == NULL && (ext->keyid = ASN1_OCTET_STRING_new()) == NULL) || + !ASN1_OCTET_STRING_set(ext->keyid, buf, len)) + lose_no_memory(); + + /* + * RFC 5280 says this MUST be non-critical. + */ + + if (!X509V3_add1_i2d(exts, NID_authority_key_identifier, + ext, 0, X509V3_ADD_REPLACE)) + lose_openssl_error("Couldn't add AKI extension to OpenSSL object"); + + ok = 1; + + error: + AUTHORITY_KEYID_free(ext); + + if (ok) + Py_RETURN_NONE; + else + return NULL; +} + + + +/* + * IPAddress object. + */ + +static PyObject * +ipaddress_object_new(PyTypeObject *type, PyObject *args, PyObject *kwds) +{ + static char *kwlist[] = {"initializer", "version", NULL}; + ipaddress_object *self = NULL; + PyObject *init = NULL; + PyObject *pylong = NULL; + int version = 0; + const char *s = NULL; + int v; + + ENTERING(ipaddress_object_new); + + if (!PyArg_ParseTupleAndKeywords(args, kwds, "O|i", kwlist, &init, &version) || + (self = (ipaddress_object *) type->tp_alloc(type, 0)) == NULL) + goto error; + + if (POW_IPAddress_Check(init)) { + ipaddress_object *src = (ipaddress_object *) init; + memcpy(self->address, src->address, sizeof(self->address)); + self->type = src->type; + return (PyObject *) self; + } + + if ((s = PyString_AsString(init)) == NULL) + PyErr_Clear(); + else if (version == 0) + version = strchr(s, ':') ? 6 : 4; + + self->type = NULL; + + for (v = 0; v < (int) (sizeof(ipaddress_versions)/sizeof(*ipaddress_versions)); v++) + if ((unsigned) version == ipaddress_versions[v]->version) + self->type = ipaddress_versions[v]; + + if (self->type == NULL) + lose("Unknown IP version number"); + + if (s != NULL) { + if (inet_pton(self->type->af, s, self->address) <= 0) + lose("Couldn't parse IP address"); + return (PyObject *) self; + } + + if ((pylong = PyNumber_Long(init)) != NULL) { + if (_PyLong_AsByteArray((PyLongObject *) pylong, self->address, self->type->length, 0, 0) < 0) + goto error; + Py_XDECREF(pylong); + return (PyObject *) self; + } + + lose_type_error("Couldn't convert initializer to IPAddress"); + + error: + Py_XDECREF(self); + Py_XDECREF(pylong); + return NULL; +} + +static PyObject * +ipaddress_object_str(ipaddress_object *self) +{ + char addrstr[sizeof("aaaa:bbbb:cccc:dddd:eeee:ffff:255.255.255.255") + 1]; + + ENTERING(ipaddress_object_str); + + if (!inet_ntop(self->type->af, self->address, addrstr, sizeof(addrstr))) + lose("Couldn't convert IP address"); + + return PyString_FromString(addrstr); + + error: + return NULL; +} + +static PyObject * +ipaddress_object_repr(ipaddress_object *self) +{ + char addrstr[sizeof("aaaa:bbbb:cccc:dddd:eeee:ffff:255.255.255.255") + 1]; + + ENTERING(ipaddress_object_repr); + + if (!inet_ntop(self->type->af, self->address, addrstr, sizeof(addrstr))) + lose("Couldn't convert IP address"); + + return PyString_FromFormat("<%s object %s at %p>", + self->ob_type->tp_name, addrstr, self); + + error: + return NULL; +} + +static int +ipaddress_object_compare(PyObject *arg1, PyObject *arg2) +{ + PyObject *obj1 = PyNumber_Long(arg1); + PyObject *obj2 = PyNumber_Long(arg2); + int cmp = -1; + + ENTERING(ipaddress_object_compare); + + if (obj1 != NULL && obj2 != NULL) + cmp = PyObject_Compare(obj1, obj2); + + Py_XDECREF(obj1); + Py_XDECREF(obj2); + return cmp; +} + +static PyObject * +ipaddress_object_richcompare(PyObject *arg1, PyObject *arg2, int op) +{ + PyObject *obj1 = PyNumber_Long(arg1); + PyObject *obj2 = PyNumber_Long(arg2); + PyObject *result = NULL; + + ENTERING(ipaddress_object_richcompare); + + if (obj1 != NULL && obj2 != NULL) + result = PyObject_RichCompare(obj1, obj2, op); + + Py_XDECREF(obj1); + Py_XDECREF(obj2); + return result; +} + +static long +ipaddress_object_hash(ipaddress_object *self) +{ + unsigned long h = 0; + int i; + + ENTERING(ipaddress_object_hash); + + for (i = 0; (unsigned) i < self->type->length; i++) + h ^= self->address[i] << ((i & 3) << 3); + + return (long) h == -1 ? 0 : (long) h; +} + +static char ipaddress_object_from_bytes__doc__[] = + "Construct an IPAddress object from a sequence of bytes.\n" + "\n" + "Argument must be a Python string of exactly 4 or 16 bytes.\n" + ; + +static PyObject * +ipaddress_object_from_bytes(PyTypeObject *type, PyObject *args) +{ + ipaddress_object *result = NULL; + char *bytes = NULL; + Py_ssize_t len; + int v; + + ENTERING(ipaddress_object_from_bytes); + + if (!PyArg_ParseTuple(args, "s#", &bytes, &len)) + goto error; + + if ((result = (ipaddress_object *) type->tp_alloc(type, 0)) == NULL) + goto error; + + result->type = NULL; + + for (v = 0; v < (int) (sizeof(ipaddress_versions)/sizeof(*ipaddress_versions)); v++) + if (len == ipaddress_versions[v]->length) + result->type = ipaddress_versions[v]; + + if (result->type == NULL) + lose("Unknown IP version number"); + + memcpy(result->address, bytes, len); + return (PyObject *) result; + + error: + Py_XDECREF(result); + return NULL; +} + +static char ipaddress_object_to_bytes__doc__[] = + "Return the binary value of this IPAddress as a Python string\n" + "of exactly 4 or 16 bytes.\n" + ; + +static PyObject * +ipaddress_object_to_bytes(ipaddress_object *self) +{ + ENTERING(ipaddress_object_from_bytes); + return PyString_FromStringAndSize((char *) self->address, self->type->length); +} + +static PyObject * +ipaddress_object_get_bits(ipaddress_object *self, GCC_UNUSED void *closure) +{ + ENTERING(ipaddress_object_get_bits); + return PyInt_FromLong(self->type->length * 8); +} + +static PyObject * +ipaddress_object_get_version(ipaddress_object *self, GCC_UNUSED void *closure) +{ + ENTERING(ipaddress_object_get_version); + return PyInt_FromLong(self->type->version); +} + +static PyObject * +ipaddress_object_number_binary_helper(binaryfunc function, PyObject *arg1, PyObject *arg2) +{ + ipaddress_object *addr = NULL; + ipaddress_object *addr1 = NULL; + ipaddress_object *addr2 = NULL; + ipaddress_object *result = NULL; + PyObject *obj1 = NULL; + PyObject *obj2 = NULL; + PyObject *obj3 = NULL; + PyObject *obj4 = NULL; + + if (POW_IPAddress_Check(arg1)) + addr1 = (ipaddress_object *) arg1; + + if (POW_IPAddress_Check(arg2)) + addr2 = (ipaddress_object *) arg2; + + if ((addr1 == NULL && addr2 == NULL) || + (addr1 != NULL && addr2 != NULL && addr1->type != addr2->type) || + (obj1 = PyNumber_Long(arg1)) == NULL || + (obj2 = PyNumber_Long(arg2)) == NULL) { + result = (ipaddress_object *) Py_NotImplemented; + Py_INCREF(result); + goto error; + } + + if ((obj3 = function(obj1, obj2)) == NULL) + goto error; + + if ((obj4 = PyNumber_Long(obj3)) == NULL) + lose("Couldn't convert result"); + + addr = addr1 != NULL ? addr1 : addr2; + + if ((result = (ipaddress_object *) addr->ob_type->tp_alloc(addr->ob_type, 0)) == NULL) + goto error; + + result->type = addr->type; + + if (_PyLong_AsByteArray((PyLongObject *) obj4, result->address, result->type->length, 0, 0) < 0) { + Py_XDECREF(result); + result = NULL; + } + + error: /* Fall through */ + Py_XDECREF(obj1); + Py_XDECREF(obj2); + Py_XDECREF(obj3); + Py_XDECREF(obj4); + + return (PyObject *) result; +} + +static PyObject * +ipaddress_object_number_long(PyObject *arg) +{ + ipaddress_object *addr = (ipaddress_object *) arg; + + ENTERING(ipaddress_object_number_long); + + if (!POW_IPAddress_Check(arg)) + return Py_INCREF(Py_NotImplemented), Py_NotImplemented; + + return _PyLong_FromByteArray(addr->address, addr->type->length, 0, 0); +} + +static PyObject * +ipaddress_object_number_int(PyObject *arg) +{ + ENTERING(ipaddress_object_number_int); + return ipaddress_object_number_long(arg); +} + +static PyObject * +ipaddress_object_number_add(PyObject *arg1, PyObject *arg2) +{ + ENTERING(ipaddress_object_number_add); + return ipaddress_object_number_binary_helper(PyNumber_Add, arg1, arg2); +} + +static PyObject * +ipaddress_object_number_subtract(PyObject *arg1, PyObject *arg2) +{ + ENTERING(ipaddress_object_number_subtract); + return ipaddress_object_number_binary_helper(PyNumber_Subtract, arg1, arg2); +} + +static PyObject * +ipaddress_object_number_lshift(PyObject *arg1, PyObject *arg2) +{ + ENTERING(ipaddress_object_number_lshift); + return ipaddress_object_number_binary_helper(PyNumber_Lshift, arg1, arg2); +} + +static PyObject * +ipaddress_object_number_rshift(PyObject *arg1, PyObject *arg2) +{ + ENTERING(ipaddress_object_number_rshift); + return ipaddress_object_number_binary_helper(PyNumber_Rshift, arg1, arg2); +} + +static PyObject * +ipaddress_object_number_and(PyObject *arg1, PyObject *arg2) +{ + ENTERING(ipaddress_object_number_and); + return ipaddress_object_number_binary_helper(PyNumber_And, arg1, arg2); +} + +static PyObject * +ipaddress_object_number_xor(PyObject *arg1, PyObject *arg2) +{ + ENTERING(ipaddress_object_number_xor); + return ipaddress_object_number_binary_helper(PyNumber_Xor, arg1, arg2); +} + +static PyObject * +ipaddress_object_number_or(PyObject *arg1, PyObject *arg2) +{ + ENTERING(ipaddress_object_number_or); + return ipaddress_object_number_binary_helper(PyNumber_Or, arg1, arg2); +} + +static int +ipaddress_object_number_nonzero(ipaddress_object *self) +{ + int i; + + ENTERING(ipaddress_object_number_nonzero); + + for (i = 0; (unsigned) i < self->type->length; i++) + if (self->address[i] != 0) + return 1; + return 0; +} + +static PyObject * +ipaddress_object_number_invert(ipaddress_object *self) +{ + ipaddress_object *result = NULL; + int i; + + ENTERING(ipaddress_object_number_invert); + + if ((result = (ipaddress_object *) self->ob_type->tp_alloc(self->ob_type, 0)) == NULL) + goto error; + + result->type = self->type; + + for (i = 0; (unsigned) i < self->type->length; i++) + result->address[i] = ~self->address[i]; + + error: /* Fall through */ + return (PyObject *) result; +} + +static char ipaddress_object_copy__doc__[] = + "" + ; + +static PyObject * +ipaddress_object_copy(ipaddress_object *self, GCC_UNUSED PyObject *args) +{ + ipaddress_object *result = NULL; + + ENTERING(ipaddress_object_copy); + + if ((result = (ipaddress_object *) self->ob_type->tp_alloc(self->ob_type, 0)) == NULL) + goto error; + + memcpy(result->address, self->address, sizeof(result->address)); + result->type = self->type; + + error: + return (PyObject *) result; +} + +static struct PyMethodDef ipaddress_object_methods[] = { + Define_Method(__copy__, ipaddress_object_copy, METH_VARARGS), + Define_Method(__deepcopy__, ipaddress_object_copy, METH_VARARGS), + Define_Method(toBytes, ipaddress_object_to_bytes, METH_NOARGS), + Define_Class_Method(fromBytes, ipaddress_object_from_bytes, METH_VARARGS), + {NULL} +}; + +static PyGetSetDef ipaddress_object_getsetters[] = { + {"bits", (getter) ipaddress_object_get_bits}, + {"version", (getter) ipaddress_object_get_version}, + {NULL} +}; + +static PyNumberMethods ipaddress_NumberMethods = { + ipaddress_object_number_add, /* nb_add */ + ipaddress_object_number_subtract, /* nb_subtract */ + 0, /* nb_multiply */ + 0, /* nb_divide */ + 0, /* nb_remainder */ + 0, /* nb_divmod */ + 0, /* nb_power */ + 0, /* nb_negative */ + 0, /* nb_positive */ + 0, /* nb_absolute */ + (inquiry) ipaddress_object_number_nonzero, /* nb_nonzero */ + (unaryfunc) ipaddress_object_number_invert, /* nb_invert */ + ipaddress_object_number_lshift, /* nb_lshift */ + ipaddress_object_number_rshift, /* nb_rshift */ + ipaddress_object_number_and, /* nb_and */ + ipaddress_object_number_xor, /* nb_xor */ + ipaddress_object_number_or, /* nb_or */ + 0, /* nb_coerce */ + ipaddress_object_number_int, /* nb_int */ + ipaddress_object_number_long, /* nb_long */ + 0, /* nb_float */ + 0, /* nb_oct */ + 0, /* nb_hex */ + 0, /* nb_inplace_add */ + 0, /* nb_inplace_subtract */ + 0, /* nb_inplace_multiply */ + 0, /* nb_inplace_divide */ + 0, /* nb_inplace_remainder */ + 0, /* nb_inplace_power */ + 0, /* nb_inplace_lshift */ + 0, /* nb_inplace_rshift */ + 0, /* nb_inplace_and */ + 0, /* nb_inplace_xor */ + 0, /* nb_inplace_or */ + 0, /* nb_floor_divide */ + 0, /* nb_true_divide */ + 0, /* nb_inplace_floor_divide */ + 0, /* nb_inplace_true_divide */ + 0, /* nb_index */ +}; + +static PyTypeObject POW_IPAddress_Type = { + PyObject_HEAD_INIT(NULL) + 0, /* ob_size */ + "rpki.POW.IPAddress", /* tp_name */ + sizeof(ipaddress_object), /* tp_basicsize */ + 0, /* tp_itemsize */ + 0, /* tp_dealloc */ + 0, /* tp_print */ + 0, /* tp_getattr */ + 0, /* tp_setattr */ + ipaddress_object_compare, /* tp_compare */ + (reprfunc) ipaddress_object_repr, /* tp_repr */ + &ipaddress_NumberMethods, /* tp_as_number */ + 0, /* tp_as_sequence */ + 0, /* tp_as_mapping */ + (hashfunc) ipaddress_object_hash, /* tp_hash */ + 0, /* tp_call */ + (reprfunc) ipaddress_object_str, /* tp_str */ + 0, /* tp_getattro */ + 0, /* tp_setattro */ + 0, /* tp_as_buffer */ + Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_CHECKTYPES, /* tp_flags */ + 0, /* tp_doc */ + 0, /* tp_traverse */ + 0, /* tp_clear */ + ipaddress_object_richcompare, /* tp_richcompare */ + 0, /* tp_weaklistoffset */ + 0, /* tp_iter */ + 0, /* tp_iternext */ + ipaddress_object_methods, /* tp_methods */ + 0, /* tp_members */ + ipaddress_object_getsetters, /* tp_getset */ + 0, /* tp_base */ + 0, /* tp_dict */ + 0, /* tp_descr_get */ + 0, /* tp_descr_set */ + 0, /* tp_dictoffset */ + 0, /* tp_init */ + 0, /* tp_alloc */ + ipaddress_object_new, /* tp_new */ +}; + + + +/* + * X509 object. + */ + +static x509_object * +x509_object_new_helper(PyTypeObject *type, X509 *x) +{ + x509_object *self; + + if (type == NULL) + type = &POW_X509_Type; + + if ((self = (x509_object *) type->tp_alloc(type, 0)) == NULL) + return NULL; + + self->x509 = x; + return self; +} + +static PyObject * +x509_object_new(PyTypeObject *type, GCC_UNUSED PyObject *args, GCC_UNUSED PyObject *kwds) +{ + x509_object *self = NULL; + X509 *x = NULL; + + ENTERING(x509_object_new); + + if ((x = X509_new()) == NULL) + lose_no_memory(); + + if ((self = x509_object_new_helper(type, x)) == NULL) + goto error; + + return (PyObject *) self; + + error: + X509_free(x); + return NULL; +} + +static void +x509_object_dealloc(x509_object *self) +{ + ENTERING(x509_object_dealloc); + X509_free(self->x509); + self->ob_type->tp_free((PyObject*) self); +} + +static PyObject * +x509_object_pem_read_helper(PyTypeObject *type, BIO *bio) +{ + x509_object *self = NULL; + + ENTERING(x509_object_pem_read_helper); + + if ((self = (x509_object *) x509_object_new(type, NULL, NULL)) == NULL) + goto error; + + if (!PEM_read_bio_X509(bio, &self->x509, NULL, NULL)) + lose_openssl_error("Couldn't load PEM encoded certificate"); + + return (PyObject *) self; + + error: + + Py_XDECREF(self); + return NULL; +} + +static PyObject * +x509_object_der_read_helper(PyTypeObject *type, BIO *bio) +{ + x509_object *self; + + ENTERING(x509_object_der_read_helper); + + if ((self = (x509_object *) x509_object_new(type, NULL, NULL)) == NULL) + goto error; + + if (!d2i_X509_bio(bio, &self->x509)) + lose_openssl_error("Couldn't load DER encoded certificate"); + + return (PyObject *) self; + + error: + Py_XDECREF(self); + return NULL; +} + +static char x509_object_pem_read__doc__[] = + "Read a PEM-encoded X.509 object from a string.\n" + ; + +static PyObject * +x509_object_pem_read(PyTypeObject *type, PyObject *args) +{ + ENTERING(x509_object_pem_read); + return read_from_string_helper(x509_object_pem_read_helper, type, args); +} + +static char x509_object_pem_read_file__doc__[] = + "Read a PEM-encoded X.509 object from a file.\n" + ; + +static PyObject * +x509_object_pem_read_file(PyTypeObject *type, PyObject *args) +{ + ENTERING(x509_object_pem_read_file); + return read_from_file_helper(x509_object_pem_read_helper, type, args); +} + +static char x509_object_der_read__doc__[] = + "Read a DER-encoded X.509 object from a string.\n" + ; + +static PyObject * +x509_object_der_read(PyTypeObject *type, PyObject *args) +{ + ENTERING(x509_object_der_read); + return read_from_string_helper(x509_object_der_read_helper, type, args); +} + +static char x509_object_der_read_file__doc__[] = + "Read a DER-encoded X.509 object from a file.\n" + ; + +static PyObject * +x509_object_der_read_file(PyTypeObject *type, PyObject *args) +{ + ENTERING(x509_object_der_read_file); + return read_from_file_helper(x509_object_der_read_helper, type, args); +} + +static char x509_object_pem_write__doc__[] = + "Return the PEM encoding of this certificate, as a string.\n" + ; + +static PyObject * +x509_object_pem_write(x509_object *self) +{ + PyObject *result = NULL; + BIO *bio = NULL; + + ENTERING(x509_object_pem_write); + + if ((bio = BIO_new(BIO_s_mem())) == NULL) + lose_no_memory(); + + if (!PEM_write_bio_X509(bio, self->x509)) + lose_openssl_error("Unable to write certificate"); + + result = BIO_to_PyString_helper(bio); + + error: /* Fall through */ + BIO_free(bio); + return result; +} + +static char x509_object_der_write__doc__[] = + "Return the DER encoding of this certificate, as a string.\n" + ; + +static PyObject * +x509_object_der_write(x509_object *self) +{ + PyObject *result = NULL; + BIO *bio = NULL; + + ENTERING(x509_object_der_write); + + if ((bio = BIO_new(BIO_s_mem())) == NULL) + lose_no_memory(); + + if (!i2d_X509_bio(bio, self->x509)) + lose_openssl_error("Unable to write certificate"); + + result = BIO_to_PyString_helper(bio); + + error: /* Fall through */ + BIO_free(bio); + return result; +} + +static X509_EXTENSIONS ** +x509_object_extension_helper(x509_object *self) +{ + if (self && self->x509 && self->x509->cert_info) + return &self->x509->cert_info->extensions; + PyErr_SetString(PyExc_ValueError, "Can't find X509_EXTENSIONS in X509 object"); + return NULL; +} + +static char x509_object_get_public_key__doc__[] = + "Return the public key from this certificate object,\n" + "as an Asymmetric object.\n" + ; + +static PyObject * +x509_object_get_public_key(x509_object *self) +{ + PyTypeObject *type = &POW_Asymmetric_Type; + asymmetric_object *asym = NULL; + + ENTERING(x509_object_get_public_key); + + if ((asym = (asymmetric_object *) type->tp_alloc(type, 0)) == NULL) + goto error; + + if ((asym->pkey = X509_get_pubkey(self->x509)) == NULL) + lose_openssl_error("Couldn't extract public key from certificate"); + + return (PyObject *) asym; + + error: + Py_XDECREF(asym); + return NULL; +} + +static char x509_object_set_public_key__doc__[] = + "Set the public key of this certificate object.\n" + "\n" + "The \"key\" parameter should be an instance of the Asymmetric class,\n" + "containing a public key.\n" + ; + +static PyObject * +x509_object_set_public_key(x509_object *self, PyObject *args) +{ + asymmetric_object *asym; + + ENTERING(x509_object_set_public_key); + + if (!PyArg_ParseTuple(args, "O!", &POW_Asymmetric_Type, &asym)) + goto error; + + if (!X509_set_pubkey(self->x509, asym->pkey)) + lose_openssl_error("Couldn't set certificate's public key"); + + Py_RETURN_NONE; + + error: + return NULL; +} + +static char x509_object_sign__doc__[] = + "Sign a certificate with a private key.\n" + "\n" + "The \"key\" parameter should be an instance of the Asymmetric class,\n" + "containing a private key.\n" + "\n" + "The optional \"digest\" parameter indicates which digest to compute and\n" + "sign, and should be one of the following:\n" + "\n" + "* MD5_DIGEST\n" + "* SHA_DIGEST\n" + "* SHA1_DIGEST\n" + "* SHA256_DIGEST\n" + "* SHA384_DIGEST\n" + "* SHA512_DIGEST\n" + "\n" + "The default digest algorithm is SHA-256.\n" + ; + +static PyObject * +x509_object_sign(x509_object *self, PyObject *args) +{ + asymmetric_object *asym; + int digest_type = SHA256_DIGEST; + const EVP_MD *digest_method = NULL; + + ENTERING(x509_object_sign); + + if (!PyArg_ParseTuple(args, "O!|i", &POW_Asymmetric_Type, &asym, &digest_type)) + goto error; + + if ((digest_method = evp_digest_factory(digest_type)) == NULL) + lose("Unsupported digest algorithm"); + + if (!X509_sign(self->x509, asym->pkey, digest_method)) + lose_openssl_error("Couldn't sign certificate"); + + Py_RETURN_NONE; + + error: + return NULL; +} + +static char x509_object_get_version__doc__[] = + "Return version number of this certificate.\n" + ; + +static PyObject * +x509_object_get_version(x509_object *self) +{ + ENTERING(x509_object_get_version); + return Py_BuildValue("l", X509_get_version(self->x509)); +} + +static char x509_object_set_version__doc__[] = + "Set version number of this certificate.\n" + "\n" + "The \"version\" parameter should be an integer.\n" + ; + +static PyObject * +x509_object_set_version(x509_object *self, PyObject *args) +{ + long version = 0; + + ENTERING(x509_object_set_version); + + if (!PyArg_ParseTuple(args, "l", &version)) + goto error; + + if (!X509_set_version(self->x509, version)) + lose("Couldn't set certificate version"); + + Py_RETURN_NONE; + + error: + + return NULL; +} + +static char x509_object_get_serial__doc__[] = + "Return the serial number of this certificate.\n" + ; + +static PyObject * +x509_object_get_serial(x509_object *self) +{ + ENTERING(x509_object_get_serial); + return Py_BuildValue("N", ASN1_INTEGER_to_PyLong(X509_get_serialNumber(self->x509))); +} + +static char x509_object_set_serial__doc__[] = + "Set the serial number of this certificate.\n" + "\n" + "The \"serial\" parameter should ba an integer.\n" + ; + +static PyObject * +x509_object_set_serial(x509_object *self, PyObject *args) +{ + ASN1_INTEGER *a_serial = NULL; + PyObject *p_serial = NULL; + int ok = 0; + + ENTERING(x509_object_set_serial); + + if (!PyArg_ParseTuple(args, "O", &p_serial) || + (a_serial = PyLong_to_ASN1_INTEGER(p_serial)) == NULL) + goto error; + + if (!X509_set_serialNumber(self->x509, a_serial)) + lose_no_memory(); + + ok = 1; + + error: + ASN1_INTEGER_free(a_serial); + + if (ok) + Py_RETURN_NONE; + else + return NULL; +} + +static char x509_object_get_issuer__doc__[] = + "Return this certificate's issuer name, represented as a tuple.\n" + "\n" + "Each element of this tuple is another tuple representing one\n" + "\"Relative Distinguished Name\" (RDN), each element of which in turn\n" + "is yet another tuple representing one AttributeTypeAndValue pair.\n" + "\n" + "In practice, RDNs containing multiple attributes are rare, thus the RDN\n" + "tuples will usually be exactly one element long, but using the\n" + "tuple-of-tuples-of-tuples format lets us represent the general case.\n" + "\n" + "The AttributeTypeANdValue pairs are two-element tuples, the first\n" + "element of which is a string representing an Object Identifier (OID),\n" + "the second of which contains the attribute value.\n" + "\n" + "This method takes an optional \"format\" parameter which controls\n" + "the format in which OIDs are returned. Allowed values are:\n" + "\n" + " * SHORTNAME_FORMAT (the OpenSSL \"short name\" for this OID)\n" + " * LONGNAME_FORMAT (the OpenSSL \"long name\" for this OID)\n" + " * OIDNAME_FORMAT (the OID in dotted decimal numeric format)\n" + "\n" + "The default is OIDNAME_FORMAT.\n" + "\n" + "See RFC 5280 section 4.1.2.4 for details of the ASN.1 structure.\n" + ; + +static PyObject * +x509_object_get_issuer(x509_object *self, PyObject *args) +{ + PyObject *result = NULL; + int format = OIDNAME_FORMAT; + + ENTERING(x509_object_get_issuer); + + if (!PyArg_ParseTuple(args, "|i", &format)) + goto error; + + result = x509_object_helper_get_name(X509_get_issuer_name(self->x509), + format); + + error: /* Fall through */ + return result; +} + +static char x509_object_get_subject__doc__[] = + "Return this certificate's subject name, as a tuple.\n" + "\n" + "See the documentation for the \"getIssuer\" method for details on the\n" + "structure of the return value and use of the optional \"format\"\n" + "parameter.\n" + ; + +static PyObject * +x509_object_get_subject(x509_object *self, PyObject *args) +{ + PyObject *result = NULL; + int format = OIDNAME_FORMAT; + + ENTERING(x509_object_get_subject); + + if (!PyArg_ParseTuple(args, "|i", &format)) + goto error; + + result = x509_object_helper_get_name(X509_get_subject_name(self->x509), + format); + + error: /* Fall through */ + return result; +} + +static char x509_object_set_subject__doc__[] = + "Set this certificate's subject name.\n" + "\n" + "The \"name\" parameter should be in the same format as the return\n" + "value from the \"getIssuer\" method.\n" + ; + +static PyObject * +x509_object_set_subject(x509_object *self, PyObject *args) +{ + PyObject *name_sequence = NULL; + X509_NAME *name = NULL; + + ENTERING(x509_object_set_subject); + + if (!PyArg_ParseTuple(args, "O", &name_sequence)) + goto error; + + if (!PySequence_Check(name_sequence)) + lose_type_error("Inapropriate type"); + + if ((name = x509_object_helper_set_name(name_sequence)) == NULL) + goto error; + + if (!X509_set_subject_name(self->x509, name)) + lose("Unable to set subject name"); + + X509_NAME_free(name); + + Py_RETURN_NONE; + + error: + X509_NAME_free(name); + return NULL; +} + +static char x509_object_set_issuer__doc__[] = + "Set this certificate's issuer name.\n" + "\n" + "The \"name\" parameter should be in the same format as the return\n" + "value from the \"getIssuer\" method.\n" + ; + +static PyObject * +x509_object_set_issuer(x509_object *self, PyObject *args) +{ + PyObject *name_sequence = NULL; + X509_NAME *name = NULL; + + ENTERING(x509_object_set_issuer); + + if (!PyArg_ParseTuple(args, "O", &name_sequence)) + goto error; + + if (!PySequence_Check(name_sequence)) + lose_type_error("Inapropriate type"); + + if ((name = x509_object_helper_set_name(name_sequence)) == NULL) + goto error; + + if (!X509_set_issuer_name(self->x509, name)) + lose("Unable to set issuer name"); + + X509_NAME_free(name); + + Py_RETURN_NONE; + + error: + X509_NAME_free(name); + return NULL; +} + +static char x509_object_get_not_before__doc__[] = + "Return this certificate's \"notBefore\" value as a datetime.\n" + ; + +static PyObject * +x509_object_get_not_before (x509_object *self) +{ + ENTERING(x509_object_get_not_before); + return ASN1_TIME_to_Python(X509_get_notBefore(self->x509)); +} + +static char x509_object_get_not_after__doc__[] = + "Return this certificate's \"notAfter\" value as a datetime.\n" + ; + +static PyObject * +x509_object_get_not_after (x509_object *self) +{ + ENTERING(x509_object_get_not_after); + return ASN1_TIME_to_Python(X509_get_notAfter(self->x509)); +} + +static char x509_object_set_not_after__doc__[] = + "Set this certificate's \"notAfter\" value.\n" + "\n" + "The \"time\" parameter should be a datetime object.\n" + ; + +static PyObject * +x509_object_set_not_after (x509_object *self, PyObject *args) +{ + PyObject *o = NULL; + ASN1_TIME *t = NULL; + + ENTERING(x509_object_set_not_after); + + if (!PyArg_ParseTuple(args, "O", &o)) + goto error; + + if ((t = Python_to_ASN1_TIME(o, 1)) == NULL) + lose("Couldn't convert notAfter string"); + + if (!X509_set_notAfter(self->x509, t)) + lose("Couldn't set notAfter"); + + ASN1_TIME_free(t); + Py_RETURN_NONE; + + error: + ASN1_TIME_free(t); + return NULL; +} + +static char x509_object_set_not_before__doc__[] = + "Set this certificate's \"notBefore\" value.\n" + "\n" + "The \"time\" parameter should be a datetime object.\n" + ; + +static PyObject * +x509_object_set_not_before (x509_object *self, PyObject *args) +{ + PyObject *o = NULL; + ASN1_TIME *t = NULL; + + ENTERING(x509_object_set_not_before); + + if (!PyArg_ParseTuple(args, "O", &o)) + goto error; + + if ((t = Python_to_ASN1_TIME(o, 1)) == NULL) + lose("Couldn't convert notBefore string"); + + if (!X509_set_notBefore(self->x509, t)) + lose("Couldn't set notBefore"); + + ASN1_TIME_free(t); + Py_RETURN_NONE; + + error: + ASN1_TIME_free(t); + return NULL; +} + +static char x509_object_clear_extensions__doc__[] = + "Clear all extensions attached to this certificate.\n" + ; + +static PyObject * +x509_object_clear_extensions(x509_object *self) +{ + X509_EXTENSION *ext; + + ENTERING(x509_object_clear_extensions); + + while ((ext = X509_delete_ext(self->x509, 0)) != NULL) + X509_EXTENSION_free(ext); + + Py_RETURN_NONE; +} + +static char x509_object_get_ski__doc__[] = + "Return the Subject Key Identifier (SKI) value for this\n" + "certificate, or None if the certificate has no SKI extension.\n" + ; + +static PyObject * +x509_object_get_ski(x509_object *self) +{ + return extension_get_ski(x509_object_extension_helper(self)); +} + +static char x509_object_set_ski__doc__[] = + "Set the Subject Key Identifier (SKI) value for this certificate.\n" + ; + +static PyObject * +x509_object_set_ski(x509_object *self, PyObject *args) +{ + return extension_set_ski(x509_object_extension_helper(self), args); +} + +static char x509_object_get_aki__doc__[] = + "Return the Authority Key Identifier (AKI) keyid value for this\n" + "certificate, or None if the certificate has no AKI extension or has an\n" + "AKI extension with no keyIdentifier value.\n" + ; + +static PyObject * +x509_object_get_aki(x509_object *self) +{ + return extension_get_aki(x509_object_extension_helper(self)); +} + +static char x509_object_set_aki__doc__[] = + "Set the Authority Key Identifier (AKI) value for this certificate.\n" + "\n" + "We only support the keyIdentifier method, as that's the only form\n" + "which is legal for RPKI certificates.\n" + ; + +static PyObject * +x509_object_set_aki(x509_object *self, PyObject *args) +{ + return extension_set_aki(x509_object_extension_helper(self), args); +} + +static char x509_object_get_key_usage__doc__[] = + "Return a FrozenSet of strings representing the KeyUsage\n" + "settings for this certificate, or None if the certificate has no\n" + "KeyUsage extension. The bits have the same names as in RFC 5280.\n" + ; + +static PyObject * +x509_object_get_key_usage(x509_object *self) +{ + return extension_get_key_usage(x509_object_extension_helper(self)); +} + +static char x509_object_set_key_usage__doc__[] = + "Set the KeyUsage extension for this certificate.\n" + "\n" + "Argument \"iterable\" should be an iterable object which returns zero or more\n" + "strings naming bits to be enabled. The bits have the same names as in RFC 5280.\n" + "\n" + "Optional argument \"critical\" is a boolean indicating whether the extension\n" + "should be marked as critical or not. RFC 5280 4.2.1.3 says this extension SHOULD\n" + "be marked as critical when used, so the default is True.\n" + ; + +static PyObject * +x509_object_set_key_usage(x509_object *self, PyObject *args) +{ + return extension_set_key_usage(x509_object_extension_helper(self), args); +} + +static char x509_object_get_eku__doc__[] = + "Return a FrozenSet of object identifiers representing the\n" + "ExtendedKeyUsage settings for this certificate, or None if\n" + "the certificate has no ExtendedKeyUsage extension.\n" + ; + +static PyObject * +x509_object_get_eku(x509_object *self) +{ + return extension_get_eku(x509_object_extension_helper(self)); +} + +static char x509_object_set_eku__doc__[] = + "Set the ExtendedKeyUsage extension for this certificate.\n" + "\n" + "Argument \"iterable\" should be an iterable object which returns one or more\n" + "object identifiers.\n" + "\n" + "Optional argument \"critical\" is a boolean indicating whether the extension\n" + "should be marked as critical or not. RFC 6487 4.8.5 says this extension\n" + "MUST NOT be marked as non-critical when used, so the default is False.\n" + ; + +static PyObject * +x509_object_set_eku(x509_object *self, PyObject *args) +{ + return extension_set_eku(x509_object_extension_helper(self), args); +} + +static char x509_object_get_rfc3779__doc__[] = + "Return this certificate's RFC 3779 resources.\n" + "\n" + "Return value is a three-element tuple: the first element is the ASN\n" + "resources, the second is the IPv4 resources, the third is the IPv6\n" + "resources. Each of these elements in turn is either the string\n" + "\"inherit\" or a tuple representing a set of ranges of ASNs or IP\n" + "addresses.\n" + "\n" + "Each range is a two-element tuple, respectively representing the low\n" + "and high ends of the range, inclusive. ASN ranges are represented by\n" + "pairs of integers, IP address ranges are represented by pairs of\n" + "IPAddress objects.\n" + ; + +static PyObject * +x509_object_get_rfc3779(x509_object *self) +{ + PyObject *result = NULL; + PyObject *asn_result = NULL; + PyObject *ipv4_result = NULL; + PyObject *ipv6_result = NULL; + PyObject *range = NULL; + PyObject *range_b = NULL; + PyObject *range_e = NULL; + ASIdentifiers *asid = NULL; + IPAddrBlocks *addr = NULL; + int i, j; + + ENTERING(x509_object_get_rfc3779); + + if ((asid = X509_get_ext_d2i(self->x509, NID_sbgp_autonomousSysNum, NULL, NULL)) != NULL && + asid->asnum != NULL) { + switch (asid->asnum->type) { + + case ASIdentifierChoice_inherit: + if ((asn_result = PyString_FromString("inherit")) == NULL) + goto error; + break; + + case ASIdentifierChoice_asIdsOrRanges: + + if ((asn_result = PyTuple_New(sk_ASIdOrRange_num(asid->asnum->u.asIdsOrRanges))) == NULL) + goto error; + + for (i = 0; i < sk_ASIdOrRange_num(asid->asnum->u.asIdsOrRanges); i++) { + ASIdOrRange *aor = sk_ASIdOrRange_value(asid->asnum->u.asIdsOrRanges, i); + ASN1_INTEGER *b = NULL; + ASN1_INTEGER *e = NULL; + + switch (aor->type) { + + case ASIdOrRange_id: + b = e = aor->u.id; + break; + + case ASIdOrRange_range: + b = aor->u.range->min; + e = aor->u.range->max; + break; + + default: + lose_type_error("Unexpected asIdsOrRanges type"); + } + + if (ASN1_STRING_type(b) == V_ASN1_NEG_INTEGER || + ASN1_STRING_type(e) == V_ASN1_NEG_INTEGER) + lose_type_error("I don't believe in negative ASNs"); + + if ((range_b = ASN1_INTEGER_to_PyLong(b)) == NULL || + (range_e = ASN1_INTEGER_to_PyLong(e)) == NULL || + (range = Py_BuildValue("(NN)", range_b, range_e)) == NULL) + goto error; + + PyTuple_SET_ITEM(asn_result, i, range); + range = range_b = range_e = NULL; + } + + break; + + default: + lose_type_error("Unexpected ASIdentifierChoice type"); + } + } + + if ((addr = X509_get_ext_d2i(self->x509, NID_sbgp_ipAddrBlock, NULL, NULL)) != NULL) { + for (i = 0; i < sk_IPAddressFamily_num(addr); i++) { + IPAddressFamily *f = sk_IPAddressFamily_value(addr, i); + const struct ipaddress_version *ip_type = NULL; + const unsigned int afi = v3_addr_get_afi(f); + PyObject **result_obj = NULL; + int addr_len = 0; + + switch (afi) { + case IANA_AFI_IPV4: result_obj = &ipv4_result; ip_type = &ipaddress_version_4; break; + case IANA_AFI_IPV6: result_obj = &ipv6_result; ip_type = &ipaddress_version_6; break; + default: lose_type_error("Unknown AFI"); + } + + if (*result_obj != NULL) + lose_type_error("Duplicate IPAddressFamily"); + + if (f->addressFamily->length > 2) + lose_type_error("Unsupported SAFI"); + + switch (f->ipAddressChoice->type) { + + case IPAddressChoice_inherit: + if ((*result_obj = PyString_FromString("inherit")) == NULL) + goto error; + continue; + + case IPAddressChoice_addressesOrRanges: + break; + + default: + lose_type_error("Unexpected IPAddressChoice type"); + } + + if ((*result_obj = PyTuple_New(sk_IPAddressOrRange_num(f->ipAddressChoice->u.addressesOrRanges))) == NULL) + goto error; + + for (j = 0; j < sk_IPAddressOrRange_num(f->ipAddressChoice->u.addressesOrRanges); j++) { + IPAddressOrRange *aor = sk_IPAddressOrRange_value(f->ipAddressChoice->u.addressesOrRanges, j); + ipaddress_object *addr_b = NULL; + ipaddress_object *addr_e = NULL; + + if ((range_b = POW_IPAddress_Type.tp_alloc(&POW_IPAddress_Type, 0)) == NULL || + (range_e = POW_IPAddress_Type.tp_alloc(&POW_IPAddress_Type, 0)) == NULL) + goto error; + + addr_b = (ipaddress_object *) range_b; + addr_e = (ipaddress_object *) range_e; + + if ((addr_len = v3_addr_get_range(aor, afi, addr_b->address, addr_e->address, + sizeof(addr_b->address))) == 0) + lose_type_error("Couldn't unpack IP addresses from BIT STRINGs"); + + addr_b->type = addr_e->type = ip_type; + + if ((range = Py_BuildValue("(NN)", range_b, range_e)) == NULL) + goto error; + + PyTuple_SET_ITEM(*result_obj, j, range); + range = range_b = range_e = NULL; + } + } + } + + result = Py_BuildValue("(OOO)", + (asn_result == NULL ? Py_None : asn_result), + (ipv4_result == NULL ? Py_None : ipv4_result), + (ipv6_result == NULL ? Py_None : ipv6_result)); + + error: /* Fall through */ + ASIdentifiers_free(asid); + sk_IPAddressFamily_pop_free(addr, IPAddressFamily_free); + Py_XDECREF(range_b); + Py_XDECREF(range_e); + Py_XDECREF(range); + Py_XDECREF(asn_result); + Py_XDECREF(ipv4_result); + Py_XDECREF(ipv6_result); + + return result; +} + +static char x509_object_set_rfc3779__doc__[] = + "Set this certificate's RFC 3779 resources.\n" + "\n" + "This method takes three arguments: \"asn\", \"ipv4\", and \"ipv6\".\n" + "\n" + "Each of these arguments can be:\n" + "\n" + "* None, to omit this kind of resource;\n" + "\n" + "* The string \"inherit\", to specify RFC 3779 resource inheritance; or\n" + "\n" + "* An iterable object which returns range pairs of the appropriate type.\n" + "\n" + "Range pairs are as returned by the .getRFC3779() method.\n" + ; + +static PyObject * +x509_object_set_rfc3779(x509_object *self, PyObject *args, PyObject *kwds) +{ + static char *kwlist[] = {"asn", "ipv4", "ipv6", NULL}; + PyObject *asn_arg = Py_None; + PyObject *ipv4_arg = Py_None; + PyObject *ipv6_arg = Py_None; + PyObject *iterator = NULL; + PyObject *item = NULL; + PyObject *fast = NULL; + ASIdentifiers *asid = NULL; + IPAddrBlocks *addr = NULL; + ASN1_INTEGER *asid_b = NULL; + ASN1_INTEGER *asid_e = NULL; + ipaddress_object *addr_b = NULL; + ipaddress_object *addr_e = NULL; + int empty = 0; + + ENTERING(x509_object_set_rfc3779); + + if (!PyArg_ParseTupleAndKeywords(args, kwds, "|OOO", kwlist, &asn_arg, &ipv4_arg, &ipv6_arg)) + goto error; + + if (asn_arg != Py_None) { + + empty = 1; + + if ((asid = ASIdentifiers_new()) == NULL) + lose_no_memory(); + + if (PyString_Check(asn_arg)) { + + if (strcmp(PyString_AsString(asn_arg), "inherit")) + lose_type_error("ASID must be an iterable that returns range pairs, or the string \"inherit\""); + + if (!v3_asid_add_inherit(asid, V3_ASID_ASNUM)) + lose_no_memory(); + + empty = 0; + + } else { + + if ((iterator = PyObject_GetIter(asn_arg)) == NULL) + goto error; + + while ((item = PyIter_Next(iterator)) != NULL) { + + if ((fast = PySequence_Fast(item, "ASN range must be a sequence")) == NULL) + goto error; + + if (PySequence_Fast_GET_SIZE(fast) != 2) + lose_type_error("ASN range must be two-element sequence"); + + if ((asid_b = PyLong_to_ASN1_INTEGER(PySequence_Fast_GET_ITEM(fast, 0))) == NULL) + goto error; + + switch (PyObject_RichCompareBool(PySequence_Fast_GET_ITEM(fast, 0), + PySequence_Fast_GET_ITEM(fast, 1), Py_EQ)) { + case 0: + if ((asid_e = PyLong_to_ASN1_INTEGER(PySequence_Fast_GET_ITEM(fast, 1))) == NULL) + goto error; + break; + case 1: + break; + default: + goto error; + } + + if (!v3_asid_add_id_or_range(asid, V3_ASID_ASNUM, asid_b, asid_e)) + lose_openssl_error("Couldn't add range to ASID"); + + asid_b = asid_e = NULL; + Py_XDECREF(item); + Py_XDECREF(fast); + item = fast = NULL; + empty = 0; + } + + Py_XDECREF(iterator); + iterator = NULL; + } + + if (!empty && (!v3_asid_canonize(asid) || + !X509_add1_ext_i2d(self->x509, NID_sbgp_autonomousSysNum, + asid, 1, X509V3_ADD_REPLACE))) + lose_openssl_error("Couldn't add ASID extension to certificate"); + } + + if (ipv4_arg != Py_None || ipv6_arg != Py_None) { + int v; + + empty = 1; + + if ((addr = sk_IPAddressFamily_new_null()) == NULL) + lose_no_memory(); + + /* + * Cheap trick to let us inline all of this instead of being + * forced to use a separate function. Refactor, some day. + */ + + for (v = 0; v < (int) (sizeof(ipaddress_versions)/sizeof(*ipaddress_versions)); v++) { + const struct ipaddress_version *ip_type = ipaddress_versions[v]; + PyObject **argp; + + switch (ip_type->version) { + case 4: argp = &ipv4_arg; break; + case 6: argp = &ipv6_arg; break; + default: continue; /* Never happens */ + } + + if (PyString_Check(*argp)) { + + if (strcmp(PyString_AsString(*argp), "inherit")) + lose_type_error("Argument must be an iterable that returns range pairs, or the string \"inherit\""); + + if (!v3_addr_add_inherit(addr, ip_type->afi, NULL)) + lose_no_memory(); + + empty = 0; + + } else { + + if ((iterator = PyObject_GetIter(*argp)) == NULL) + goto error; + + while ((item = PyIter_Next(iterator)) != NULL) { + + if ((fast = PySequence_Fast(item, "Address range must be a sequence")) == NULL) + goto error; + + if (PySequence_Fast_GET_SIZE(fast) != 2 || + !POW_IPAddress_Check(PySequence_Fast_GET_ITEM(fast, 0)) || + !POW_IPAddress_Check(PySequence_Fast_GET_ITEM(fast, 1))) + lose_type_error("Address range must be two-element sequence of IPAddress objects"); + + addr_b = (ipaddress_object *) PySequence_Fast_GET_ITEM(fast, 0); + addr_e = (ipaddress_object *) PySequence_Fast_GET_ITEM(fast, 1); + + if (addr_b->type != ip_type || + addr_e->type != ip_type || + memcmp(addr_b->address, addr_e->address, ip_type->length) > 0) + lose("Address range must be two-element sequence of IPAddress objects in ascending order"); + + if (!v3_addr_add_range(addr, ip_type->afi, NULL, addr_b->address, addr_e->address)) + lose_openssl_error("Couldn't add range to IPAddrBlock"); + + Py_XDECREF(item); + Py_XDECREF(fast); + item = fast = NULL; + addr_b = addr_e = NULL; + empty = 0; + } + + Py_XDECREF(iterator); + iterator = NULL; + } + } + + if (!empty && (!v3_addr_canonize(addr) || + !X509_add1_ext_i2d(self->x509, NID_sbgp_ipAddrBlock, + addr, 1, X509V3_ADD_REPLACE))) + lose_openssl_error("Couldn't add IPAddrBlock extension to certificate"); + } + + Py_RETURN_NONE; + + error: + ASN1_INTEGER_free(asid_b); + ASN1_INTEGER_free(asid_e); + ASIdentifiers_free(asid); + sk_IPAddressFamily_pop_free(addr, IPAddressFamily_free); + Py_XDECREF(iterator); + Py_XDECREF(item); + Py_XDECREF(fast); + return NULL; +} + +static char x509_object_get_basic_constraints__doc__[] = + "Return BasicConstraints for this certificate.\n" + "\n" + "If this certificate has no BasicConstraints extension, this method\n" + "returns None.\n" + "\n" + "Otherwise, this method returns a two-element tuple. The first element\n" + "of the tuple is a boolean representing the extension's cA value; the\n" + "second element of the tuple is either an integer representing the\n" + "pathLenConstraint value or None if there is no pathLenConstraint.\n" + ; + +static PyObject * +x509_object_get_basic_constraints(x509_object *self) +{ + return extension_get_basic_constraints(x509_object_extension_helper(self)); +} + +static char x509_object_set_basic_constraints__doc__[] = + "Set BasicConstraints for this certificate.\n" + "\n" + "First argument \"ca\" is a boolean indicating whether the certificate\n" + "is a CA certificate or not.\n" + "\n" + "Optional second argument \"pathLenConstraint\" is a non-negative integer\n" + "specifying the pathLenConstraint value for this certificate; this value\n" + "may only be set for CA certificates." + "\n" + "Optional third argument \"critical\" specifies whether the extension\n" + "should be marked as critical. RFC 5280 4.2.1.9 requires that CA\n" + "certificates mark this extension as critical, so the default is True.\n" + ; + +static PyObject * +x509_object_set_basic_constraints(x509_object *self, PyObject *args) +{ + return extension_set_basic_constraints(x509_object_extension_helper(self), args); +} + +static char x509_object_get_sia__doc__[] = + "Get SIA values for this certificate.\n" + "\n" + "If the certificate has no SIA extension, this method returns None.\n" + "\n" + "Otherwise, it returns a tuple containing three values:\n" + "caRepository URIs, rpkiManifest URIs, and signedObject URIs.\n" + "Each of these values is a tuple of strings, representing an ordered\n" + "sequence of URIs. Any or all of these sequences may be empty.\n" + "\n" + "Any other accessMethods are ignored, as are any non-URI\n" + "accessLocations.\n" + ; + +static PyObject * +x509_object_get_sia(x509_object *self) +{ + return extension_get_sia(x509_object_extension_helper(self)); +} + +static char x509_object_set_sia__doc__[] = + "Set SIA values for this certificate. Takes three arguments:\n" + "\"caRepository\", \"rpkiManifest\", and \"signedObject\".\n" + "Each of these should be an iterable which returns URIs.\n" + "\n" + "None is acceptable as an alternate way of specifying an empty\n" + "collection of URIs for a particular argument.\n" + ; + +static PyObject * +x509_object_set_sia(x509_object *self, PyObject *args, PyObject *kwds) +{ + return extension_set_sia(x509_object_extension_helper(self), args, kwds); +} + +static char x509_object_get_aia__doc__[] = + "Get this certificate's AIA values.\n" + "\n" + "If the certificate has no AIA extension, this method returns None.\n" + "\n" + "Otherwise, this returns a sequence of caIssuers URIs.\n" + "\n" + "Any other accessMethods are ignored, as are any non-URI\n" + "accessLocations.\n" + ; + +static PyObject * +x509_object_get_aia(x509_object *self) +{ + AUTHORITY_INFO_ACCESS *ext = NULL; + PyObject *result = NULL; + const char *uri; + PyObject *obj; + int i, n = 0; + + ENTERING(x509_object_get_aia); + + if ((ext = X509_get_ext_d2i(self->x509, NID_info_access, NULL, NULL)) == NULL) + Py_RETURN_NONE; + + for (i = 0; i < sk_ACCESS_DESCRIPTION_num(ext); i++) { + ACCESS_DESCRIPTION *a = sk_ACCESS_DESCRIPTION_value(ext, i); + if (a->location->type == GEN_URI && + OBJ_obj2nid(a->method) == NID_ad_ca_issuers) + n++; + } + + if (((result = PyTuple_New(n)) == NULL)) + goto error; + + n = 0; + + for (i = 0; i < sk_ACCESS_DESCRIPTION_num(ext); i++) { + ACCESS_DESCRIPTION *a = sk_ACCESS_DESCRIPTION_value(ext, i); + if (a->location->type == GEN_URI && OBJ_obj2nid(a->method) == NID_ad_ca_issuers) { + uri = (char *) ASN1_STRING_data(a->location->d.uniformResourceIdentifier); + if ((obj = PyString_FromString(uri)) == NULL) + goto error; + PyTuple_SET_ITEM(result, n++, obj); + } + } + + AUTHORITY_INFO_ACCESS_free(ext); + return result; + + error: + AUTHORITY_INFO_ACCESS_free(ext); + Py_XDECREF(result); + return NULL; +} + +static char x509_object_set_aia__doc__[] = + "Set AIA URIs for this certificate.\n" + "\n" + "Argument is a iterable which returns caIssuers URIs.\n" + ; + +static PyObject * +x509_object_set_aia(x509_object *self, PyObject *args) +{ + AUTHORITY_INFO_ACCESS *ext = NULL; + PyObject *caIssuers = NULL; + PyObject *iterator = NULL; + ASN1_OBJECT *oid = NULL; + PyObject *item = NULL; + ACCESS_DESCRIPTION *a = NULL; + int ok = 0; + Py_ssize_t urilen; + char *uri; + + ENTERING(x509_object_set_aia); + + if (!PyArg_ParseTuple(args, "O", &caIssuers)) + goto error; + + if ((ext = AUTHORITY_INFO_ACCESS_new()) == NULL) + lose_no_memory(); + + if ((oid = OBJ_nid2obj(NID_ad_ca_issuers)) == NULL) + lose_openssl_error("Couldn't find AIA accessMethod OID"); + + if ((iterator = PyObject_GetIter(caIssuers)) == NULL) + goto error; + + while ((item = PyIter_Next(iterator)) != NULL) { + + if (PyString_AsStringAndSize(item, &uri, &urilen) < 0) + goto error; + + if ((a = ACCESS_DESCRIPTION_new()) == NULL || + (a->method = OBJ_dup(oid)) == NULL || + (a->location->d.uniformResourceIdentifier = ASN1_IA5STRING_new()) == NULL || + !ASN1_OCTET_STRING_set(a->location->d.uniformResourceIdentifier, (unsigned char *) uri, urilen)) + lose_no_memory(); + + a->location->type = GEN_URI; + + if (!sk_ACCESS_DESCRIPTION_push(ext, a)) + lose_no_memory(); + + a = NULL; + Py_XDECREF(item); + item = NULL; + } + + Py_XDECREF(iterator); + iterator = NULL; + + if (!X509_add1_ext_i2d(self->x509, NID_info_access, ext, 0, X509V3_ADD_REPLACE)) + lose_openssl_error("Couldn't add AIA extension to certificate"); + + ok = 1; + + error: + AUTHORITY_INFO_ACCESS_free(ext); + ACCESS_DESCRIPTION_free(a); + Py_XDECREF(item); + Py_XDECREF(iterator); + + if (ok) + Py_RETURN_NONE; + else + return NULL; +} + +static char x509_object_get_crldp__doc__[] = + "Get CRL Distribution Point (CRLDP) values for this certificate.\n" + "\n" + "If the certificate has no CRLDP extension, this method returns None.\n" + "\n" + "Otherwise, it returns a sequence of URIs representing distributionPoint\n" + "fullName values found in the first Distribution Point. Other CRLDP\n" + "fields are ignored, as are subsequent Distribution Points and any non-URI\n" + "fullName values.\n" + ; + +static PyObject * +x509_object_get_crldp(x509_object *self) +{ + CRL_DIST_POINTS *ext = NULL; + DIST_POINT *dp = NULL; + PyObject *result = NULL; + const char *uri; + PyObject *obj; + int i, n = 0; + + ENTERING(x509_object_get_crldp); + + if ((ext = X509_get_ext_d2i(self->x509, NID_crl_distribution_points, NULL, NULL)) == NULL || + (dp = sk_DIST_POINT_value(ext, 0)) == NULL || + dp->distpoint == NULL || + dp->distpoint->type != 0) + Py_RETURN_NONE; + + for (i = 0; i < sk_GENERAL_NAME_num(dp->distpoint->name.fullname); i++) { + GENERAL_NAME *gn = sk_GENERAL_NAME_value(dp->distpoint->name.fullname, i); + if (gn->type == GEN_URI) + n++; + } + + if (((result = PyTuple_New(n)) == NULL)) + goto error; + + n = 0; + + for (i = 0; i < sk_GENERAL_NAME_num(dp->distpoint->name.fullname); i++) { + GENERAL_NAME *gn = sk_GENERAL_NAME_value(dp->distpoint->name.fullname, i); + if (gn->type == GEN_URI) { + uri = (char *) ASN1_STRING_data(gn->d.uniformResourceIdentifier); + if ((obj = PyString_FromString(uri)) == NULL) + goto error; + PyTuple_SET_ITEM(result, n++, obj); + } + } + + sk_DIST_POINT_pop_free(ext, DIST_POINT_free); + return result; + + error: + sk_DIST_POINT_pop_free(ext, DIST_POINT_free); + Py_XDECREF(result); + return NULL; +} + +static char x509_object_set_crldp__doc__[] = + "Set CRLDP values for this certificate.\n" + "\n" + "Argument is a iterable which returns distributionPoint fullName URIs.\n" + ; + +static PyObject * +x509_object_set_crldp(x509_object *self, PyObject *args) +{ + CRL_DIST_POINTS *ext = NULL; + PyObject *fullNames = NULL; + PyObject *iterator = NULL; + PyObject *item = NULL; + DIST_POINT *dp = NULL; + GENERAL_NAME *gn = NULL; + Py_ssize_t urilen; + char *uri; + int ok = 0; + + ENTERING(x509_object_set_crldp); + + if (!PyArg_ParseTuple(args, "O", &fullNames)) + goto error; + + if ((ext = sk_DIST_POINT_new_null()) == NULL || + (dp = DIST_POINT_new()) == NULL || + (dp->distpoint = DIST_POINT_NAME_new()) == NULL || + (dp->distpoint->name.fullname = sk_GENERAL_NAME_new_null()) == NULL) + lose_no_memory(); + + dp->distpoint->type = 0; + + if ((iterator = PyObject_GetIter(fullNames)) == NULL) + goto error; + + while ((item = PyIter_Next(iterator)) != NULL) { + + if (PyString_AsStringAndSize(item, &uri, &urilen) < 0) + goto error; + + if ((gn = GENERAL_NAME_new()) == NULL || + (gn->d.uniformResourceIdentifier = ASN1_IA5STRING_new()) == NULL || + !ASN1_OCTET_STRING_set(gn->d.uniformResourceIdentifier, (unsigned char *) uri, urilen)) + lose_no_memory(); + + gn->type = GEN_URI; + + if (!sk_GENERAL_NAME_push(dp->distpoint->name.fullname, gn)) + lose_no_memory(); + + gn = NULL; + Py_XDECREF(item); + item = NULL; + } + + Py_XDECREF(iterator); + iterator = NULL; + + if (!sk_DIST_POINT_push(ext, dp)) + lose_no_memory(); + + dp = NULL; + + if (!X509_add1_ext_i2d(self->x509, NID_crl_distribution_points, ext, 0, X509V3_ADD_REPLACE)) + lose_openssl_error("Couldn't add CRLDP extension to certificate"); + + ok = 1; + + error: + sk_DIST_POINT_pop_free(ext, DIST_POINT_free); + DIST_POINT_free(dp); + GENERAL_NAME_free(gn); + Py_XDECREF(item); + Py_XDECREF(iterator); + + if (ok) + Py_RETURN_NONE; + else + return NULL; +} + +static char x509_object_get_certificate_policies__doc__[] = + "Get Certificate Policies values for this certificate.\n" + "\n" + "If this certificate has no Certificate Policies extension, this method\n" + "returns None.\n" + "\n" + "Otherwise, this method returns a sequence of Object Identifiers.\n" + "\n" + "Policy qualifiers, if any, are ignored.\n" + ; + +static PyObject * +x509_object_get_certificate_policies(x509_object *self) +{ + CERTIFICATEPOLICIES *ext = NULL; + PyObject *result = NULL; + PyObject *obj; + int i; + + ENTERING(x509_object_get_certificate_policies); + + if ((ext = X509_get_ext_d2i(self->x509, NID_certificate_policies, NULL, NULL)) == NULL) + Py_RETURN_NONE; + + if (((result = PyTuple_New(sk_POLICYINFO_num(ext))) == NULL)) + goto error; + + for (i = 0; i < sk_POLICYINFO_num(ext); i++) { + POLICYINFO *p = sk_POLICYINFO_value(ext, i); + + if ((obj = ASN1_OBJECT_to_PyString(p->policyid)) == NULL) + goto error; + + PyTuple_SET_ITEM(result, i, obj); + } + + sk_POLICYINFO_pop_free(ext, POLICYINFO_free); + return result; + + error: + sk_POLICYINFO_pop_free(ext, POLICYINFO_free); + Py_XDECREF(result); + return NULL; +} + +static char x509_object_set_certificate_policies__doc__[] = + "Set Certificate Policies for this certificate.\n" + "\n" + "Argument is a iterable which returns policy OIDs.\n" + "\n" + "Policy qualifier are not supported.\n" + "\n" + "The extension will be marked as critical, since there's not much point\n" + "in using this extension without making it critical.\n" + ; + +static PyObject * +x509_object_set_certificate_policies(x509_object *self, PyObject *args) +{ + CERTIFICATEPOLICIES *ext = NULL; + PyObject *policies = NULL; + PyObject *iterator = NULL; + POLICYINFO *pol = NULL; + PyObject *item = NULL; + const char *oid; + int ok = 0; + + ENTERING(x509_object_set_certificate_policies); + + if (!PyArg_ParseTuple(args, "O", &policies)) + goto error; + + if ((ext = sk_POLICYINFO_new_null()) == NULL) + lose_no_memory(); + + if ((iterator = PyObject_GetIter(policies)) == NULL) + goto error; + + while ((item = PyIter_Next(iterator)) != NULL) { + + if ((oid = PyString_AsString(item)) == NULL) + goto error; + + if ((pol = POLICYINFO_new()) == NULL) + lose_no_memory(); + + if ((pol->policyid = OBJ_txt2obj(oid, 1)) == NULL) + lose("Couldn't parse OID"); + + if (!sk_POLICYINFO_push(ext, pol)) + lose_no_memory(); + + pol = NULL; + Py_XDECREF(item); + item = NULL; + } + + Py_XDECREF(iterator); + iterator = NULL; + + if (!X509_add1_ext_i2d(self->x509, NID_certificate_policies, ext, 1, X509V3_ADD_REPLACE)) + lose_openssl_error("Couldn't add CERTIFICATE_POLICIES extension to certificate"); + + ok = 1; + + error: + POLICYINFO_free(pol); + sk_POLICYINFO_pop_free(ext, POLICYINFO_free); + Py_XDECREF(item); + Py_XDECREF(iterator); + + if (ok) + Py_RETURN_NONE; + else + return NULL; +} + +/* + * May want EKU handlers eventually, skip for now. + */ + +static char x509_object_pprint__doc__[] = + "Return a pretty-printed rendition of this certificate.\n" + ; + +static PyObject * +x509_object_pprint(x509_object *self) +{ + PyObject *result = NULL; + BIO *bio = NULL; + + ENTERING(x509_object_pprint); + + if ((bio = BIO_new(BIO_s_mem())) == NULL) + lose_no_memory(); + + if (!X509_print(bio, self->x509)) + lose_openssl_error("Unable to pretty-print certificate"); + + result = BIO_to_PyString_helper(bio); + + error: /* Fall through */ + BIO_free(bio); + return result; +} + +static struct PyMethodDef x509_object_methods[] = { + Define_Method(pemWrite, x509_object_pem_write, METH_NOARGS), + Define_Method(derWrite, x509_object_der_write, METH_NOARGS), + Define_Method(sign, x509_object_sign, METH_VARARGS), + Define_Method(getPublicKey, x509_object_get_public_key, METH_NOARGS), + Define_Method(setPublicKey, x509_object_set_public_key, METH_VARARGS), + Define_Method(getVersion, x509_object_get_version, METH_NOARGS), + Define_Method(setVersion, x509_object_set_version, METH_VARARGS), + Define_Method(getSerial, x509_object_get_serial, METH_NOARGS), + Define_Method(setSerial, x509_object_set_serial, METH_VARARGS), + Define_Method(getIssuer, x509_object_get_issuer, METH_VARARGS), + Define_Method(setIssuer, x509_object_set_issuer, METH_VARARGS), + Define_Method(getSubject, x509_object_get_subject, METH_VARARGS), + Define_Method(setSubject, x509_object_set_subject, METH_VARARGS), + Define_Method(getNotBefore, x509_object_get_not_before, METH_NOARGS), + Define_Method(getNotAfter, x509_object_get_not_after, METH_NOARGS), + Define_Method(setNotAfter, x509_object_set_not_after, METH_VARARGS), + Define_Method(setNotBefore, x509_object_set_not_before, METH_VARARGS), + Define_Method(clearExtensions, x509_object_clear_extensions, METH_NOARGS), + Define_Method(pprint, x509_object_pprint, METH_NOARGS), + Define_Method(getSKI, x509_object_get_ski, METH_NOARGS), + Define_Method(setSKI, x509_object_set_ski, METH_VARARGS), + Define_Method(getAKI, x509_object_get_aki, METH_NOARGS), + Define_Method(setAKI, x509_object_set_aki, METH_VARARGS), + Define_Method(getKeyUsage, x509_object_get_key_usage, METH_NOARGS), + Define_Method(setKeyUsage, x509_object_set_key_usage, METH_VARARGS), + Define_Method(getEKU, x509_object_get_eku, METH_NOARGS), + Define_Method(setEKU, x509_object_set_eku, METH_VARARGS), + Define_Method(getRFC3779, x509_object_get_rfc3779, METH_NOARGS), + Define_Method(setRFC3779, x509_object_set_rfc3779, METH_KEYWORDS), + Define_Method(getBasicConstraints, x509_object_get_basic_constraints, METH_NOARGS), + Define_Method(setBasicConstraints, x509_object_set_basic_constraints, METH_VARARGS), + Define_Method(getSIA, x509_object_get_sia, METH_NOARGS), + Define_Method(setSIA, x509_object_set_sia, METH_KEYWORDS), + Define_Method(getAIA, x509_object_get_aia, METH_NOARGS), + Define_Method(setAIA, x509_object_set_aia, METH_VARARGS), + Define_Method(getCRLDP, x509_object_get_crldp, METH_NOARGS), + Define_Method(setCRLDP, x509_object_set_crldp, METH_VARARGS), + Define_Method(getCertificatePolicies, x509_object_get_certificate_policies, METH_NOARGS), + Define_Method(setCertificatePolicies, x509_object_set_certificate_policies, METH_VARARGS), + Define_Class_Method(pemRead, x509_object_pem_read, METH_VARARGS), + Define_Class_Method(pemReadFile, x509_object_pem_read_file, METH_VARARGS), + Define_Class_Method(derRead, x509_object_der_read, METH_VARARGS), + Define_Class_Method(derReadFile, x509_object_der_read_file, METH_VARARGS), + {NULL} +}; + +static char POW_X509_Type__doc__[] = + "This class represents an X.509 certificate.\n" + "\n" + LAME_DISCLAIMER_IN_ALL_CLASS_DOCUMENTATION + ; + +static PyTypeObject POW_X509_Type = { + PyObject_HEAD_INIT(0) + 0, /* ob_size */ + "rpki.POW.X509", /* tp_name */ + sizeof(x509_object), /* tp_basicsize */ + 0, /* tp_itemsize */ + (destructor)x509_object_dealloc, /* tp_dealloc */ + 0, /* tp_print */ + 0, /* tp_getattr */ + 0, /* tp_setattr */ + 0, /* tp_compare */ + 0, /* tp_repr */ + 0, /* tp_as_number */ + 0, /* tp_as_sequence */ + 0, /* tp_as_mapping */ + 0, /* tp_hash */ + 0, /* tp_call */ + 0, /* tp_str */ + 0, /* tp_getattro */ + 0, /* tp_setattro */ + 0, /* tp_as_buffer */ + Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /* tp_flags */ + POW_X509_Type__doc__, /* tp_doc */ + 0, /* tp_traverse */ + 0, /* tp_clear */ + 0, /* tp_richcompare */ + 0, /* tp_weaklistoffset */ + 0, /* tp_iter */ + 0, /* tp_iternext */ + x509_object_methods, /* tp_methods */ + 0, /* tp_members */ + 0, /* tp_getset */ + 0, /* tp_base */ + 0, /* tp_dict */ + 0, /* tp_descr_get */ + 0, /* tp_descr_set */ + 0, /* tp_dictoffset */ + 0, /* tp_init */ + 0, /* tp_alloc */ + x509_object_new, /* tp_new */ +}; + + + +/* + * X509Store object. + */ + +static PyObject * +x509_store_object_new(PyTypeObject *type, GCC_UNUSED PyObject *args, GCC_UNUSED PyObject *kwds) +{ + x509_store_object *self = NULL; + + ENTERING(x509_store_object_new); + + if ((self = (x509_store_object *) type->tp_alloc(type, 0)) == NULL) + goto error; + + if ((self->store = X509_STORE_new()) == NULL) + lose_no_memory(); + + self->ctxclass = (PyObject *) &POW_X509StoreCTX_Type; + Py_XINCREF(self->ctxclass); + return (PyObject *) self; + + error: + Py_XDECREF(self); + return NULL; +} + +static void +x509_store_object_dealloc(x509_store_object *self) +{ + ENTERING(x509_store_object_dealloc); + X509_STORE_free(self->store); + Py_XDECREF(self->ctxclass); + self->ob_type->tp_free((PyObject*) self); +} + +static char x509_store_object_add_trust__doc__[] = + "Add a trusted certificate to this certificate store object.\n" + "\n" + "The \"certificate\" parameter should be an instance of the X509 class.\n" + ; + +static PyObject * +x509_store_object_add_trust(x509_store_object *self, PyObject *args) +{ + x509_object *x509 = NULL; + + ENTERING(x509_store_object_add_trust); + + if (!PyArg_ParseTuple(args, "O!", &POW_X509_Type, &x509)) + goto error; + + X509_STORE_add_cert(self->store, x509->x509); + + Py_RETURN_NONE; + + error: + + return NULL; +} + +static char x509_store_object_add_crl__doc__[] = + "Add a CRL to this certificate store object.\n" + "\n" + "The \"crl\" parameter should be an instance of the CRL class.\n" + ; + +static PyObject * +x509_store_object_add_crl(x509_store_object *self, PyObject *args) +{ + crl_object *crl = NULL; + + ENTERING(x509_store_object_add_crl); + + if (!PyArg_ParseTuple(args, "O!", &POW_CRL_Type, &crl)) + goto error; + + X509_STORE_add_crl(self->store, crl->crl); + + Py_RETURN_NONE; + + error: + + return NULL; +} + +static char x509_store_object_set_flags__doc__[] = + "Set validation flags for this X509Store.\n" + "\n" + "Argument is an integer containing bit flags to set.\n" + ; + +static PyObject * +x509_store_object_set_flags (x509_store_object *self, PyObject *args) +{ + unsigned long flags; + + if (!PyArg_ParseTuple(args, "k", &flags)) + goto error; + + if (!X509_VERIFY_PARAM_set_flags(self->store->param, flags)) + lose_openssl_error("X509_VERIFY_PARAM_set_flags() failed"); + + Py_RETURN_NONE; + + error: + return NULL; +} + +static char x509_store_object_clear_flags__doc__[] = + "Clear validation flags for this X509Store.\n" + "\n" + "Argument is an integer containing bit flags to clear.\n" + ; + +static PyObject * +x509_store_object_clear_flags (x509_store_object *self, PyObject *args) +{ + unsigned long flags; + + if (!PyArg_ParseTuple(args, "k", &flags)) + goto error; + + if (!X509_VERIFY_PARAM_clear_flags(self->store->param, flags)) + lose_openssl_error("X509_VERIFY_PARAM_clear_flags() failed"); + + Py_RETURN_NONE; + + error: + return NULL; +} + +static char x509_store_object_set_context_class__doc__[] = + "Set validation context class factory for this X509Store.\n" + "\n" + "This must be a callable object which takes one argument, an X509Store,\n" + "and returns a subclass of X509StoreCTX. The callable can be a class\n" + "object but need not be, so long as calling it returns an instance of an\n" + "appropriate class. The default is X509StoreCTX.\n" + ; + +static PyObject * +x509_store_object_set_context_class (x509_store_object *self, PyObject *args) +{ + PyObject *ctxclass = (PyObject *) &POW_X509StoreCTX_Type; + + if (!PyArg_ParseTuple(args, "|O", &ctxclass)) + goto error; + + if (!PyCallable_Check(ctxclass)) + lose("Context class must be callable"); + + Py_XDECREF(self->ctxclass); + self->ctxclass = ctxclass; + Py_XINCREF(self->ctxclass); + + Py_RETURN_NONE; + + error: + return NULL; +} + +static char x509_store_object_verify__doc__[] = + "Verify an X509 certificate object using this certificate store.\n" + "\n" + "Optional second argument is an iterable that supplies untrusted certificates\n" + "to be considered when building a chain to the trust anchor.\n" + "\n" + "This method returns an instance of the store's verification context class.\n" + ; + +static PyObject * +x509_store_object_verify(x509_store_object *self, PyObject *args) +{ + x509_store_ctx_object *ctx = NULL; + STACK_OF(X509) *stack = NULL; + x509_object *x509 = NULL; + PyObject *chain = Py_None; + int ok; + + if (!PyArg_ParseTuple(args, "O!|O", &POW_X509_Type, &x509, &chain)) + goto error; + + if ((ctx = (x509_store_ctx_object *) PyObject_CallFunctionObjArgs(self->ctxclass, self, NULL)) == NULL) + goto error; + + if (!POW_X509StoreCTX_Check(ctx)) + lose_type_error("Returned context is not a X509StoreCTX"); + + if (ctx->ctx == NULL) + lose("Uninitialized X509StoreCTX"); + + if (chain != Py_None && (stack = x509_helper_iterable_to_stack(chain)) == NULL) + goto error; + + Py_XINCREF(x509); + Py_XINCREF(chain); + X509_STORE_CTX_set_cert(ctx->ctx, x509->x509); + X509_STORE_CTX_set_chain(ctx->ctx, stack); + + ok = X509_verify_cert(ctx->ctx); + + X509_STORE_CTX_set_chain(ctx->ctx, NULL); + X509_STORE_CTX_set_cert(ctx->ctx, NULL); + Py_XDECREF(chain); + Py_XDECREF(x509); + + sk_X509_free(stack); + + if (PyErr_Occurred()) + goto error; + + if (ok < 0) + lose_openssl_error("X509_verify_cert() raised an exception"); + + return (PyObject *) ctx; + + error: /* fall through */ + Py_XDECREF(ctx); + return NULL; +} + +static struct PyMethodDef x509_store_object_methods[] = { + Define_Method(addTrust, x509_store_object_add_trust, METH_VARARGS), + Define_Method(addCrl, x509_store_object_add_crl, METH_VARARGS), + Define_Method(setContextClass,x509_store_object_set_context_class, METH_VARARGS), + Define_Method(setFlags, x509_store_object_set_flags, METH_VARARGS), + Define_Method(clearFlags, x509_store_object_clear_flags, METH_VARARGS), + Define_Method(verify, x509_store_object_verify, METH_VARARGS), + {NULL} +}; + +static char POW_X509Store_Type__doc__[] = + "This class holds the OpenSSL certificate store objects used in CMS\n" + "and certificate verification.\n" + "\n" + LAME_DISCLAIMER_IN_ALL_CLASS_DOCUMENTATION + ; + +static PyTypeObject POW_X509Store_Type = { + PyObject_HEAD_INIT(0) + 0, /* ob_size */ + "rpki.POW.X509Store", /* tp_name */ + sizeof(x509_store_object), /* tp_basicsize */ + 0, /* tp_itemsize */ + (destructor)x509_store_object_dealloc, /* tp_dealloc */ + 0, /* tp_print */ + 0, /* tp_getattr */ + 0, /* tp_setattr */ + 0, /* tp_compare */ + 0, /* tp_repr */ + 0, /* tp_as_number */ + 0, /* tp_as_sequence */ + 0, /* tp_as_mapping */ + 0, /* tp_hash */ + 0, /* tp_call */ + 0, /* tp_str */ + 0, /* tp_getattro */ + 0, /* tp_setattro */ + 0, /* tp_as_buffer */ + Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /* tp_flags */ + POW_X509Store_Type__doc__, /* tp_doc */ + 0, /* tp_traverse */ + 0, /* tp_clear */ + 0, /* tp_richcompare */ + 0, /* tp_weaklistoffset */ + 0, /* tp_iter */ + 0, /* tp_iternext */ + x509_store_object_methods, /* tp_methods */ + 0, /* tp_members */ + 0, /* tp_getset */ + 0, /* tp_base */ + 0, /* tp_dict */ + 0, /* tp_descr_get */ + 0, /* tp_descr_set */ + 0, /* tp_dictoffset */ + 0, /* tp_init */ + 0, /* tp_alloc */ + x509_store_object_new, /* tp_new */ +}; + + + +/* + * X509StoreCTX object. + */ + +static int +x509_store_ctx_object_verify_cb(int ok, X509_STORE_CTX *ctx) +{ + static char method_name[] = "verify_callback"; + PyObject *self = X509_STORE_CTX_get_ex_data(ctx, x509_store_ctx_ex_data_idx); + PyObject *result = NULL; + + if (self == NULL || !PyObject_HasAttrString(self, method_name)) + return ok; + + if ((result = PyObject_CallMethod(self, method_name, "i", ok)) == NULL) + return -1; + + ok = PyObject_IsTrue(result); + Py_XDECREF(result); + return ok; +} + +static PyObject * +x509_store_ctx_object_new(PyTypeObject *type, GCC_UNUSED PyObject *args, GCC_UNUSED PyObject *kwds) +{ + x509_store_ctx_object *self = NULL; + + ENTERING(x509_store_ctx_object_new); + + if ((self = (x509_store_ctx_object *) type->tp_alloc(type, 0)) == NULL) + goto error; + + self->ctx = NULL; + self->store = NULL; + return (PyObject *) self; + + error: + Py_XDECREF(self); + return NULL; +} + +static int +x509_store_ctx_object_init(x509_store_ctx_object *self, PyObject *args, GCC_UNUSED PyObject *kwds) +{ + x509_store_object *store = NULL; + + if (!PyArg_ParseTuple(args, "|O!", &POW_X509Store_Type, &store)) + goto error; + + if ((self->ctx = X509_STORE_CTX_new()) == NULL) + lose_no_memory(); + + if (!X509_STORE_CTX_init(self->ctx, store ? store->store : NULL, NULL, NULL)) + lose_openssl_error("Couldn't initialize X509_STORE_CTX"); + + if (!X509_STORE_CTX_set_ex_data(self->ctx, x509_store_ctx_ex_data_idx, self)) + lose_openssl_error("Couldn't set X509_STORE_CTX ex_data"); + + Py_XDECREF(self->store); + self->store = store; + Py_XINCREF(self->store); + + X509_VERIFY_PARAM_set_flags(self->ctx->param, X509_V_FLAG_X509_STRICT); + X509_STORE_CTX_set_verify_cb(self->ctx, x509_store_ctx_object_verify_cb); + return 0; + + error: + return -1; +} + +static void +x509_store_ctx_object_dealloc(x509_store_ctx_object *self) +{ + ENTERING(x509_store_ctx_object_dealloc); + X509_STORE_CTX_free(self->ctx); + Py_XDECREF(self->store); + self->ob_type->tp_free((PyObject*) self); +} + +static PyObject * +x509_store_ctx_object_get_store (x509_store_ctx_object *self, GCC_UNUSED void *closure) +{ + return Py_BuildValue("O", self->store == NULL ? Py_None : (PyObject *) self->store); +} + +static char x509_store_ctx_object_get_error__doc__[] = + "Extract verification error code from this X509StoreCTX.\n" + ; + +static PyObject* +x509_store_ctx_object_get_error (x509_store_ctx_object *self) +{ + return Py_BuildValue("i", X509_STORE_CTX_get_error(self->ctx)); +} + +static char x509_store_ctx_object_get_error_string__doc__[] = + "Extract verification error code from this X509StoreCTX.\n" + ; + +static PyObject* +x509_store_ctx_object_get_error_string (x509_store_ctx_object *self) +{ + return Py_BuildValue("s", X509_verify_cert_error_string(X509_STORE_CTX_get_error(self->ctx))); +} + +static char x509_store_ctx_object_get_error_depth__doc__[] = + "Extract verification error depth from this X509StoreCTX.\n" + ; + +static PyObject* +x509_store_ctx_object_get_error_depth (x509_store_ctx_object *self) +{ + return Py_BuildValue("i", X509_STORE_CTX_get_error_depth(self->ctx)); +} + +static char x509_store_ctx_object_get_current_certificate__doc__[] = + "Extract the certificate which caused the current validation error,\n" + "or None if no certificate is relevant.\n" + ; + +static PyObject * +x509_store_ctx_object_get_current_certificate (x509_store_ctx_object *self) +{ + X509 *x = X509_STORE_CTX_get_current_cert(self->ctx); + x509_object *obj = NULL; + + if (x == NULL) + Py_RETURN_NONE; + + if ((x = X509_dup(x)) == NULL) + lose_no_memory(); + + if ((obj = x509_object_new_helper(NULL, x)) == NULL) + goto error; + + return (PyObject *) obj; + + error: + Py_XDECREF(obj); + X509_free(x); + return NULL; +} + +static char x509_store_ctx_object_get_chain__doc__[] = + "Extract certificate chain from X509StoreCTX. If validation\n" + "completed succesfully, this is the complete validation chain;\n" + "otherwise, the returned chain may be invalid or incomplete.\n" + ; + +static PyObject * +x509_store_ctx_object_get_chain (x509_store_ctx_object *self) +{ + STACK_OF(X509) *chain = NULL; + PyObject *result = NULL; + + if ((chain = X509_STORE_CTX_get1_chain(self->ctx)) == NULL) + lose_openssl_error("X509_STORE_CTX_get1_chain() failed"); + + result = stack_to_tuple_helper(CHECKED_PTR_OF(STACK_OF(X509), chain), + stack_to_tuple_helper_get_x509); + + error: /* fall through */ + sk_X509_pop_free(chain, X509_free); + return result; +} + +/* + * For some reason, there are no methods for the policy mechanism for + * X509_STORE, only for X509_STORE_CTX. Presumably we can whack these + * anyway using the X509_VERIFY_PARAM_*() calls, the question is + * whether there's a good reason for this omission. + * + * For the moment, I'm just going to leave the policy stuff + * unimplemented, until we figure out whether it belongs in X509Store + * or X509StoreCTX. + */ + +#define IMPLEMENT_X509StoreCTX_POLICY 0 + +#if IMPLEMENT_X509StoreCTX_POLICY + +static char x509_store_ctx_object_set_policy__doc__[] = + "Set this X509StoreCTX to require a specified certificate policy.\n" + ; + +static PyObject* +x509_store_ctx_object_set_policy (x509_store_ctx_object *self, PyObject *args) +{ + ASN1_OBJECT *policy = NULL; + char *oid = NULL; + + if (!PyArg_ParseTuple(args, "s", &oid)) + goto error; + + if ((policy = OBJ_txt2obj(oid, 1)) == NULL) + lose_openssl_error("Couldn't parse OID"); + + if (!X509_VERIFY_PARAM_set_flags(self->ctx->param, X509_V_FLAG_POLICY_CHECK | X509_V_FLAG_EXPLICIT_POLICY)) + lose_openssl_error("Couldn't set policy flags"); + + if (!X509_VERIFY_PARAM_add0_policy(self->ctx->param, policy)) + lose_openssl_error("Couldn't set policy"); + + Py_RETURN_NONE; + + error: + ASN1_OBJECT_free(policy); + return NULL; +} + +#endif /* IMPLEMENT_X509StoreCTX_POLICY */ + +/* + * See (omnibus) man page for X509_STORE_CTX_get_error() for other + * query methods we might want to expose. Someday we might want to + * support X509_V_FLAG_USE_CHECK_TIME too. + */ + +static struct PyMethodDef x509_store_ctx_object_methods[] = { + Define_Method(getError, x509_store_ctx_object_get_error, METH_NOARGS), + Define_Method(getErrorString, x509_store_ctx_object_get_error_string, METH_NOARGS), + Define_Method(getErrorDepth, x509_store_ctx_object_get_error_depth, METH_NOARGS), + Define_Method(getCurrentCertificate, x509_store_ctx_object_get_current_certificate, METH_NOARGS), + Define_Method(getChain, x509_store_ctx_object_get_chain, METH_NOARGS), + +#if IMPLEMENT_X509StoreCTX_POLICY + Define_Method(setPolicy, x509_store_ctx_object_set_policy, METH_VARARGS), +#endif + {NULL} +}; + +static PyGetSetDef x509_store_ctx_object_getsetters[] = { + {"store", (getter) x509_store_ctx_object_get_store}, + {NULL} +}; + +static char POW_X509StoreCTX_Type__doc__[] = + "This class holds the state of an OpenSSL certificate verification\n" + "operation. Ordinarily, the user will never have cause to instantiate\n" + "this class directly, instead, an object of this class will be returned\n" + "by X509Store.verify().\n" + "\n" + "If you need to see OpenSSL's verification callbacks, you can do so\n" + "by subclassing X509StoreCTX and attaching your subclass to an X509Store\n" + "object using X509Store.setContextClass(). Your subclass should provide\n" + "a .verify_callback() method, wich should expect to receive one argument:\n" + "the integer \"ok\" value passed by OpenSSL's verification callbacks.\n" + "\n" + "The return value from your .verify_callback() method will be is interpreted\n" + "as a boolean value: anything which evaluates to True will be result in a\n" + "return value of 1 to OpenSSL, while anything which evaluates to False will\n" + "result in a return value of 0 to OpenSSL.\n" + ; + +static PyTypeObject POW_X509StoreCTX_Type = { + PyObject_HEAD_INIT(0) + 0, /* ob_size */ + "rpki.POW.X509StoreCTX", /* tp_name */ + sizeof(x509_store_ctx_object), /* tp_basicsize */ + 0, /* tp_itemsize */ + (destructor)x509_store_ctx_object_dealloc,/* tp_dealloc */ + 0, /* tp_print */ + 0, /* tp_getattr */ + 0, /* tp_setattr */ + 0, /* tp_compare */ + 0, /* tp_repr */ + 0, /* tp_as_number */ + 0, /* tp_as_sequence */ + 0, /* tp_as_mapping */ + 0, /* tp_hash */ + 0, /* tp_call */ + 0, /* tp_str */ + 0, /* tp_getattro */ + 0, /* tp_setattro */ + 0, /* tp_as_buffer */ + Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /* tp_flags */ + POW_X509StoreCTX_Type__doc__, /* tp_doc */ + 0, /* tp_traverse */ + 0, /* tp_clear */ + 0, /* tp_richcompare */ + 0, /* tp_weaklistoffset */ + 0, /* tp_iter */ + 0, /* tp_iternext */ + x509_store_ctx_object_methods, /* tp_methods */ + 0, /* tp_members */ + x509_store_ctx_object_getsetters, /* tp_getset */ + 0, /* tp_base */ + 0, /* tp_dict */ + 0, /* tp_descr_get */ + 0, /* tp_descr_set */ + 0, /* tp_dictoffset */ + (initproc) x509_store_ctx_object_init, /* tp_init */ + 0, /* tp_alloc */ + x509_store_ctx_object_new, /* tp_new */ +}; + + + +/* + * CRL object. + */ + +static crl_object * +crl_object_new_helper(PyTypeObject *type, X509_CRL *crl) +{ + crl_object *self = NULL; + + if (type == NULL) + type = &POW_CRL_Type; + + if ((self = (crl_object *) type->tp_alloc(type, 0)) == NULL) + return NULL; + + self->crl = crl; + return self; +} + +static PyObject * +crl_object_new(PyTypeObject *type, GCC_UNUSED PyObject *args, GCC_UNUSED PyObject *kwds) +{ + crl_object *self = NULL; + X509_CRL *crl = NULL; + + ENTERING(crl_object_new); + + if ((crl = X509_CRL_new()) == NULL) + lose_no_memory(); + + if ((self = crl_object_new_helper(type, crl)) == NULL) + goto error; + + return (PyObject *) self; + + error: + X509_CRL_free(crl); + return NULL; +} + +static void +crl_object_dealloc(crl_object *self) +{ + ENTERING(crl_object_dealloc); + X509_CRL_free(self->crl); + self->ob_type->tp_free((PyObject*) self); +} + +static PyObject * +crl_object_pem_read_helper(PyTypeObject *type, BIO *bio) +{ + crl_object *self; + + ENTERING(crl_object_pem_read_helper); + + if ((self = (crl_object *) crl_object_new(type, NULL, NULL)) == NULL) + goto error; + + if (!PEM_read_bio_X509_CRL(bio, &self->crl, NULL, NULL)) + lose_openssl_error("Couldn't PEM encoded load CRL"); + + return (PyObject *) self; + + error: + Py_XDECREF(self); + return NULL; +} + +static PyObject * +crl_object_der_read_helper(PyTypeObject *type, BIO *bio) +{ + crl_object *self; + + ENTERING(crl_object_der_read_helper); + + if ((self = (crl_object *) crl_object_new(type, NULL, NULL)) == NULL) + goto error; + + if (!d2i_X509_CRL_bio(bio, &self->crl)) + lose_openssl_error("Couldn't load DER encoded CRL"); + + return (PyObject *) self; + + error: + Py_XDECREF(self); + return NULL; +} + +static char crl_object_pem_read__doc__[] = + "Read a PEM-encoded CRL object from a string.\n" + ; + +static PyObject * +crl_object_pem_read(PyTypeObject *type, PyObject *args) +{ + ENTERING(crl_object_pem_read); + return read_from_string_helper(crl_object_pem_read_helper, type, args); +} + +static char crl_object_pem_read_file__doc__[] = + "Read a PEM-encoded CRL object from a file.\n" + ; + +static PyObject * +crl_object_pem_read_file(PyTypeObject *type, PyObject *args) +{ + ENTERING(crl_object_pem_read_file); + return read_from_file_helper(crl_object_pem_read_helper, type, args); +} + +static char crl_object_der_read__doc__[] = + "Read a DER-encoded CRL object from a string.\n" + ; + +static PyObject * +crl_object_der_read(PyTypeObject *type, PyObject *args) +{ + ENTERING(crl_object_der_read); + return read_from_string_helper(crl_object_der_read_helper, type, args); +} + +static char crl_object_der_read_file__doc__[] = + "Read a DER-encoded CRL object from a file.\n" + ; + +static PyObject * +crl_object_der_read_file(PyTypeObject *type, PyObject *args) +{ + ENTERING(crl_object_der_read_file); + return read_from_file_helper(crl_object_der_read_helper, type, args); +} + +static X509_EXTENSIONS ** +crl_object_extension_helper(crl_object *self) +{ + if (self && self->crl && self->crl->crl) + return &self->crl->crl->extensions; + PyErr_SetString(PyExc_ValueError, "Can't find X509_EXTENSIONS in CRL object"); + return NULL; +} + +static char crl_object_get_version__doc__[] = + "return the version number of this CRL.\n" + ; + +static PyObject * +crl_object_get_version(crl_object *self) +{ + ENTERING(crl_object_get_version); + return Py_BuildValue("l", X509_CRL_get_version(self->crl)); +} + +static char crl_object_set_version__doc__[] = + "Set the version number of this CRL.\n" + "\n" + "The \"version\" parameter should be a positive integer.\n" + ; + +static PyObject * +crl_object_set_version(crl_object *self, PyObject *args) +{ + long version = 0; + + ENTERING(crl_object_set_version); + + if (!PyArg_ParseTuple(args, "i", &version)) + goto error; + + if (!X509_CRL_set_version(self->crl, version)) + lose_no_memory(); + + Py_RETURN_NONE; + + error: + return NULL; +} + +static char crl_object_get_issuer__doc__[] = + "Return issuer name of this CRL.\n" + "\n" + "See the \"getIssuer()\" method of the X509 class for more details.\n" + ; + +static PyObject * +crl_object_get_issuer(crl_object *self, PyObject *args) +{ + PyObject *result = NULL; + int format = OIDNAME_FORMAT; + + ENTERING(crl_object_get_issuer); + + if (!PyArg_ParseTuple(args, "|i", &format)) + goto error; + + result = x509_object_helper_get_name(X509_CRL_get_issuer(self->crl), format); + + error: /* Fall through */ + return result; +} + +static char crl_object_set_issuer__doc__[] = + "Set this CRL's issuer name.\n" + "\n" + "See the \"setIssuer()\" method of the X509 class for details.\n" + ; + +static PyObject * +crl_object_set_issuer(crl_object *self, PyObject *args) +{ + PyObject *name_sequence = NULL; + X509_NAME *name = NULL; + + ENTERING(crl_object_set_issuer); + + if (!PyArg_ParseTuple(args, "O", &name_sequence)) + goto error; + + if (!PySequence_Check(name_sequence)) + lose_type_error("Inapropriate type"); + + if ((name = x509_object_helper_set_name(name_sequence)) == NULL) + goto error; + + if (!X509_CRL_set_issuer_name(self->crl, name)) + lose_openssl_error("Unable to set issuer name"); + + X509_NAME_free(name); + + Py_RETURN_NONE; + + error: + X509_NAME_free(name); + return NULL; +} + +/* + * NB: OpenSSL is confused about the name of this field, probably for + * backwards compatability with some ancient mistake. What RFC 5280 + * calls "thisUpdate", OpenSSL calls "lastUpdate". + */ + +static char crl_object_set_this_update__doc__[] = + "Set this CRL's \"thisUpdate\" value.\n" + "\n" + "The \"time\" parameter should be a datetime object.\n" + ; + +static PyObject * +crl_object_set_this_update (crl_object *self, PyObject *args) +{ + PyObject *o = NULL; + ASN1_TIME *t = NULL; + + ENTERING(crl_object_set_this_update); + + if (!PyArg_ParseTuple(args, "O", &o)) + goto error; + + if ((t = Python_to_ASN1_TIME(o, 1)) == NULL) + lose("Couldn't convert thisUpdate string"); + + if (!X509_CRL_set_lastUpdate(self->crl, t)) /* sic */ + lose("Couldn't set thisUpdate"); + + ASN1_TIME_free(t); + Py_RETURN_NONE; + + error: + ASN1_TIME_free(t); + return NULL; +} + +static char crl_object_get_this_update__doc__[] = + "Return this CRL's \"thisUpdate\" value as a datetime.\n" + ; + +static PyObject * +crl_object_get_this_update (crl_object *self) +{ + ENTERING(crl_object_get_this_update); + return ASN1_TIME_to_Python(X509_CRL_get_lastUpdate(self->crl)); /* sic */ +} + +static char crl_object_set_next_update__doc__[] = + "Set this CRL's \"nextUpdate\" value.\n" + "\n" + "The \"time\" parameter should be a datetime object.\n" + ; + +static PyObject * +crl_object_set_next_update (crl_object *self, PyObject *args) +{ + PyObject *o = NULL; + ASN1_TIME *t = NULL; + + ENTERING(crl_object_set_next_update); + + if (!PyArg_ParseTuple(args, "O", &o)) + goto error; + + if ((t = Python_to_ASN1_TIME(o, 1)) == NULL) + lose("Couldn't parse nextUpdate string"); + + if (!X509_CRL_set_nextUpdate(self->crl, t)) + lose("Couldn't set nextUpdate"); + + ASN1_TIME_free(t); + Py_RETURN_NONE; + + error: + ASN1_TIME_free(t); + return NULL; +} + +static char crl_object_get_next_update__doc__[] = + "Returns this CRL's \"nextUpdate\" value as a datetime.\n" + ; + +static PyObject * +crl_object_get_next_update (crl_object *self) +{ + ENTERING(crl_object_get_next_update); + return ASN1_TIME_to_Python(X509_CRL_get_nextUpdate(self->crl)); +} + +static char crl_object_add_revocations__doc__[] = + "This method adds a collection of revocations to this CRL.\n" + "\n" + "The \"iterable\" parameter should be an iterable object which returns\n" + "two-element sequences. The first element of each pair should be the\n" + "revoked serial number (an integer), the second element should be the\n" + "revocation date (a datetime object).\n" + ; + +static PyObject * +crl_object_add_revocations(crl_object *self, PyObject *args) +{ + PyObject *iterable = NULL; + PyObject *iterator = NULL; + PyObject *item = NULL; + PyObject *fast = NULL; + X509_REVOKED *revoked = NULL; + ASN1_INTEGER *serial = NULL; + ASN1_TIME *date = NULL; + int ok = 0; + + ENTERING(crl_object_add_revocations); + + if (!PyArg_ParseTuple(args, "O", &iterable) || + (iterator = PyObject_GetIter(iterable)) == NULL) + goto error; + + while ((item = PyIter_Next(iterator)) != NULL) { + + if ((fast = PySequence_Fast(item, "Revocation entry must be a sequence")) == NULL) + goto error; + + if (PySequence_Fast_GET_SIZE(fast) != 2) + lose_type_error("Revocation entry must be two-element sequence"); + + if ((serial = PyLong_to_ASN1_INTEGER(PySequence_Fast_GET_ITEM(fast, 0))) == NULL || + (date = Python_to_ASN1_TIME(PySequence_Fast_GET_ITEM(fast, 1), 1)) == NULL) + goto error; + + if ((revoked = X509_REVOKED_new()) == NULL || + !X509_REVOKED_set_serialNumber(revoked, serial) || + !X509_REVOKED_set_revocationDate(revoked, date)) + lose_no_memory(); + + ASN1_INTEGER_free(serial); + serial = NULL; + + ASN1_TIME_free(date); + date = NULL; + + if (!X509_CRL_add0_revoked(self->crl, revoked)) + lose_no_memory(); + + revoked = NULL; + Py_XDECREF(item); + Py_XDECREF(fast); + item = fast = NULL; + } + + if (!X509_CRL_sort(self->crl)) + lose_openssl_error("Couldn't sort CRL"); + + ok = 1; + + error: + Py_XDECREF(iterator); + Py_XDECREF(item); + Py_XDECREF(fast); + X509_REVOKED_free(revoked); + ASN1_INTEGER_free(serial); + ASN1_TIME_free(date); + + if (ok) + Py_RETURN_NONE; + else + return NULL; +} + +static char crl_object_get_revoked__doc__[] = + "Return a sequence of two-element tuples representing the sequence of\n" + "revoked certificates listed in this CRL.\n" + "\n" + "The first element of each pair is the serialNumber of the revoked\n" + "certificate, the second element is the revocationDate.\n" + ; + +static PyObject * +crl_object_get_revoked(crl_object *self) +{ + STACK_OF(X509_REVOKED) *revoked = NULL; + X509_REVOKED *r = NULL; + PyObject *result = NULL; + PyObject *item = NULL; + PyObject *serial = NULL; + PyObject *date = NULL; + int i; + + ENTERING(crl_object_get_revoked); + + if ((revoked = X509_CRL_get_REVOKED(self->crl)) == NULL) + lose("Inexplicable NULL revocation list pointer"); + + if ((result = PyTuple_New(sk_X509_REVOKED_num(revoked))) == NULL) + goto error; + + for (i = 0; i < sk_X509_REVOKED_num(revoked); i++) { + r = sk_X509_REVOKED_value(revoked, i); + + if ((serial = ASN1_INTEGER_to_PyLong(r->serialNumber)) == NULL || + (date = ASN1_TIME_to_Python(r->revocationDate)) == NULL || + (item = Py_BuildValue("(NN)", serial, date)) == NULL) + goto error; + + PyTuple_SET_ITEM(result, i, item); + item = serial = date = NULL; + } + + return result; + + error: + Py_XDECREF(result); + Py_XDECREF(item); + Py_XDECREF(serial); + Py_XDECREF(date); + return NULL; +} + +static char crl_object_clear_extensions__doc__[] = + "Clear all extensions attached to this CRL.\n" + ; + +static PyObject * +crl_object_clear_extensions(crl_object *self) +{ + X509_EXTENSION *ext; + + ENTERING(crl_object_clear_extensions); + + while ((ext = X509_CRL_delete_ext(self->crl, 0)) != NULL) + X509_EXTENSION_free(ext); + + Py_RETURN_NONE; +} + +static char crl_object_sign__doc__[] = + "Sign this CRL with a private key.\n" + "\n" + "The \"key\" parameter should be an instance of the Asymmetric class,\n" + "containing a private key.\n" + "\n" + "The optional \"digest\" parameter indicates which digest to compute and\n" + "sign, and should be one of the following:\n" + "\n" + "* MD5_DIGEST\n" + "* SHA_DIGEST\n" + "* SHA1_DIGEST\n" + "* SHA256_DIGEST\n" + "* SHA384_DIGEST\n" + "* SHA512_DIGEST\n" + "\n" + "The default digest algorithm is SHA-256.\n" + ; + +static PyObject * +crl_object_sign(crl_object *self, PyObject *args) +{ + asymmetric_object *asym; + int digest_type = SHA256_DIGEST; + const EVP_MD *digest_method = NULL; + + ENTERING(crl_object_sign); + + if (!PyArg_ParseTuple(args, "O!|i", &POW_Asymmetric_Type, &asym, &digest_type)) + goto error; + + if ((digest_method = evp_digest_factory(digest_type)) == NULL) + lose("Unsupported digest algorithm"); + + if (!X509_CRL_sign(self->crl, asym->pkey, digest_method)) + lose_openssl_error("Couldn't sign CRL"); + + Py_RETURN_NONE; + + error: + return NULL; +} + +static char crl_object_verify__doc__[] = + "Verify this CRL's signature.\n" + "\n" + "The check is performed using OpenSSL's X509_CRL_verify() function.\n" + "\n" + "The \"key\" parameter should be an instance of the Asymmetric class\n" + "containing the public key of the purported signer.\n" + ; + +static PyObject * +crl_object_verify(crl_object *self, PyObject *args) +{ + asymmetric_object *asym; + + ENTERING(crl_object_verify); + + if (!PyArg_ParseTuple(args, "O!", &POW_Asymmetric_Type, &asym)) + goto error; + + return PyBool_FromLong(X509_CRL_verify(self->crl, asym->pkey)); + + error: + return NULL; +} + +static char crl_object_pem_write__doc__[] = + "Return the PEM encoding of this CRL, as a string.\n" + ; + +static PyObject * +crl_object_pem_write(crl_object *self) +{ + PyObject *result = NULL; + BIO *bio = NULL; + + ENTERING(crl_object_pem_write); + + if ((bio = BIO_new(BIO_s_mem())) == NULL) + lose_no_memory(); + + if (!PEM_write_bio_X509_CRL(bio, self->crl)) + lose_openssl_error("Unable to write CRL"); + + result = BIO_to_PyString_helper(bio); + + error: /* Fall through */ + BIO_free(bio); + return result; +} + +static char crl_object_der_write__doc__[] = + "Return the DER encoding of this CRL, as a string.\n" + ; + +static PyObject * +crl_object_der_write(crl_object *self) +{ + PyObject *result = NULL; + BIO *bio = NULL; + + ENTERING(crl_object_der_write); + + if ((bio = BIO_new(BIO_s_mem())) == NULL) + lose_no_memory(); + + if (!i2d_X509_CRL_bio(bio, self->crl)) + lose_openssl_error("Unable to write CRL"); + + result = BIO_to_PyString_helper(bio); + + error: /* Fall through */ + BIO_free(bio); + return result; +} + +static char crl_object_get_aki__doc__[] = + "Return the Authority Key Identifier (AKI) keyid value for\n" + "this CRL, or None if the CRL has no AKI extension\n" + "or has an AKI extension with no keyIdentifier value.\n" + ; + +static PyObject * +crl_object_get_aki(crl_object *self) +{ + return extension_get_aki(crl_object_extension_helper(self)); +} + +static char crl_object_set_aki__doc__[] = + "Set the Authority Key Identifier (AKI) value for this\n" + "CRL. We only support the keyIdentifier method, as that's\n" + "the only form which is legal for RPKI certificates.\n" + ; + +static PyObject * +crl_object_set_aki(crl_object *self, PyObject *args) +{ + return extension_set_aki(crl_object_extension_helper(self), args); +} + +static char crl_object_get_crl_number__doc__[] = + "Return the CRL Number extension value from this CRL, an integer.\n" + ; + +static PyObject * +crl_object_get_crl_number(crl_object *self) +{ + ASN1_INTEGER *ext = X509_CRL_get_ext_d2i(self->crl, NID_crl_number, NULL, NULL); + PyObject *result = NULL; + + ENTERING(crl_object_get_crl_number); + + if (ext == NULL) + Py_RETURN_NONE; + + result = Py_BuildValue("N", ASN1_INTEGER_to_PyLong(ext)); + ASN1_INTEGER_free(ext); + return result; +} + +static char crl_object_set_crl_number__doc__[] = + "Set the CRL Number extension value in this CRL.\n" + "\n" + "The \"number\" parameter should be an integer.\n" + ; + +static PyObject * +crl_object_set_crl_number(crl_object *self, PyObject *args) +{ + ASN1_INTEGER *ext = NULL; + PyObject *crl_number = NULL; + + ENTERING(crl_object_set_crl_number); + + if (!PyArg_ParseTuple(args, "O", &crl_number) || + (ext = PyLong_to_ASN1_INTEGER(crl_number)) == NULL) + goto error; + + if (!X509_CRL_add1_ext_i2d(self->crl, NID_crl_number, ext, 0, X509V3_ADD_REPLACE)) + lose_openssl_error("Couldn't add CRL Number extension to CRL"); + + ASN1_INTEGER_free(ext); + Py_RETURN_NONE; + + error: + ASN1_INTEGER_free(ext); + return NULL; +} + +static char crl_object_pprint__doc__[] = + "Return a pretty-printed rendition of this CRL.\n" + ; + +static PyObject * +crl_object_pprint(crl_object *self) +{ + PyObject *result = NULL; + BIO *bio = NULL; + + ENTERING(crl_object_pprint); + + if ((bio = BIO_new(BIO_s_mem())) == NULL) + lose_no_memory(); + + if (!X509_CRL_print(bio, self->crl)) + lose_openssl_error("Unable to pretty-print CRL"); + + result = BIO_to_PyString_helper(bio); + + error: /* Fall through */ + BIO_free(bio); + return result; +} + +static struct PyMethodDef crl_object_methods[] = { + Define_Method(sign, crl_object_sign, METH_VARARGS), + Define_Method(verify, crl_object_verify, METH_VARARGS), + Define_Method(getVersion, crl_object_get_version, METH_NOARGS), + Define_Method(setVersion, crl_object_set_version, METH_VARARGS), + Define_Method(getIssuer, crl_object_get_issuer, METH_VARARGS), + Define_Method(setIssuer, crl_object_set_issuer, METH_VARARGS), + Define_Method(getThisUpdate, crl_object_get_this_update, METH_NOARGS), + Define_Method(setThisUpdate, crl_object_set_this_update, METH_VARARGS), + Define_Method(getNextUpdate, crl_object_get_next_update, METH_NOARGS), + Define_Method(setNextUpdate, crl_object_set_next_update, METH_VARARGS), + Define_Method(getRevoked, crl_object_get_revoked, METH_NOARGS), + Define_Method(addRevocations, crl_object_add_revocations, METH_VARARGS), + Define_Method(clearExtensions, crl_object_clear_extensions, METH_NOARGS), + Define_Method(pemWrite, crl_object_pem_write, METH_NOARGS), + Define_Method(derWrite, crl_object_der_write, METH_NOARGS), + Define_Method(pprint, crl_object_pprint, METH_NOARGS), + Define_Method(getAKI, crl_object_get_aki, METH_NOARGS), + Define_Method(setAKI, crl_object_set_aki, METH_VARARGS), + Define_Method(getCRLNumber, crl_object_get_crl_number, METH_NOARGS), + Define_Method(setCRLNumber, crl_object_set_crl_number, METH_VARARGS), + Define_Class_Method(pemRead, crl_object_pem_read, METH_VARARGS), + Define_Class_Method(pemReadFile, crl_object_pem_read_file, METH_VARARGS), + Define_Class_Method(derRead, crl_object_der_read, METH_VARARGS), + Define_Class_Method(derReadFile, crl_object_der_read_file, METH_VARARGS), + {NULL} +}; + +static char POW_CRL_Type__doc__[] = + "Container for OpenSSL's X509 CRL management facilities.\n" + ; + +static PyTypeObject POW_CRL_Type = { + PyObject_HEAD_INIT(0) + 0, /* ob_size */ + "rpki.POW.CRL", /* tp_name */ + sizeof(crl_object), /* tp_basicsize */ + 0, /* tp_itemsize */ + (destructor)crl_object_dealloc, /* tp_dealloc */ + 0, /* tp_print */ + 0, /* tp_getattr */ + 0, /* tp_setattr */ + 0, /* tp_compare */ + 0, /* tp_repr */ + 0, /* tp_as_number */ + 0, /* tp_as_sequence */ + 0, /* tp_as_mapping */ + 0, /* tp_hash */ + 0, /* tp_call */ + 0, /* tp_str */ + 0, /* tp_getattro */ + 0, /* tp_setattro */ + 0, /* tp_as_buffer */ + Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /* tp_flags */ + POW_CRL_Type__doc__, /* tp_doc */ + 0, /* tp_traverse */ + 0, /* tp_clear */ + 0, /* tp_richcompare */ + 0, /* tp_weaklistoffset */ + 0, /* tp_iter */ + 0, /* tp_iternext */ + crl_object_methods, /* tp_methods */ + 0, /* tp_members */ + 0, /* tp_getset */ + 0, /* tp_base */ + 0, /* tp_dict */ + 0, /* tp_descr_get */ + 0, /* tp_descr_set */ + 0, /* tp_dictoffset */ + 0, /* tp_init */ + 0, /* tp_alloc */ + crl_object_new, /* tp_new */ +}; + + + +/* + * Asymmetric object. + */ + +static PyObject * +asymmetric_object_new(PyTypeObject *type, GCC_UNUSED PyObject *args, GCC_UNUSED PyObject *kwds) +{ + asymmetric_object *self = NULL; + + ENTERING(asymmetric_object_new); + + if ((self = (asymmetric_object *) type->tp_alloc(type, 0)) == NULL) + goto error; + + self->pkey = NULL; + + return (PyObject *) self; + + error: + + Py_XDECREF(self); + return NULL; +} + +static int +asymmetric_object_init(asymmetric_object *self, PyObject *args, PyObject *kwds) +{ + static char *kwlist[] = {NULL}; + + ENTERING(asymmetric_object_init); + + if (!PyArg_ParseTupleAndKeywords(args, kwds, "", kwlist)) + goto error; + + /* + * We used to take arguments to generate an RSA key, but that's + * now in the .generateRSA() class method. + */ + + return 0; + + error: + return -1; +} + +static void +asymmetric_object_dealloc(asymmetric_object *self) +{ + ENTERING(asymmetric_object_dealloc); + EVP_PKEY_free(self->pkey); + self->ob_type->tp_free((PyObject*) self); +} + +static PyObject * +asymmetric_object_pem_read_private_helper(PyTypeObject *type, BIO *bio, char *pass) +{ + asymmetric_object *self = NULL; + + ENTERING(asymmetric_object_pem_read_private_helper); + + if ((self = (asymmetric_object *) asymmetric_object_new(type, NULL, NULL)) == NULL) + goto error; + + if (!PEM_read_bio_PrivateKey(bio, &self->pkey, NULL, pass)) + lose_openssl_error("Couldn't load private key"); + + return (PyObject *) self; + + error: + Py_XDECREF(self); + return NULL; +} + +/* + * We can't use the generic read_from_*_helper() functions here + * because of optional the PEM password, so we just code the two PEM + * read cases for private keys directly. Other than the passphrase, + * code is pretty much the same as the generic functions. + * + * It turns out that OpenSSL is moving away from its old raw PKCS #1.5 + * private key format in favor of PKCS #8. This makes sense, but it + * leaves us with a minor mess to track. Many OpenSSL functions that + * originally expected PKCS #1.5 now also accept PKCS #8, so there's + * no tearing hurry about this, but at some point we might want to + * switch to writing PKCS #8. It looks like this would be relatively + * straightforward: see functions i2d_PKCS8PrivateKey_bio() and + * PEM_write_bio_PKCS8PrivateKey(), and note that PKCS #8 supports + * encrypted private keys in DER format, so the DER methods should + * take a passphrase argument as the PEM methods do. + */ + +static char asymmetric_object_pem_read_private__doc__[] = + "Read a PEM-encoded private key from a string.\n" + "\n" + "Optional second argument is a passphrase for the key.\n" + ; + +static PyObject * +asymmetric_object_pem_read_private(PyTypeObject *type, PyObject *args) +{ + PyObject *result = NULL; + char *pass = NULL; + char *src = NULL; + BIO *bio = NULL; + Py_ssize_t len = 0; + + ENTERING(asymmetric_object_pem_read_private); + + if (!PyArg_ParseTuple(args, "s#|s", &src, &len, &pass)) + goto error; + + if ((bio = BIO_new_mem_buf(src, len)) == NULL) + lose_no_memory(); + + result = asymmetric_object_pem_read_private_helper(type, bio, pass); + + error: + BIO_free(bio); + return result; +} + +static char asymmetric_object_pem_read_private_file__doc__[] = + "Read a PEM-encoded private key from a file.\n" + "\n" + "Optional second argument is a passphrase for the key.\n" + ; + +static PyObject * +asymmetric_object_pem_read_private_file(PyTypeObject *type, PyObject *args) +{ + const char *filename = NULL; + PyObject *result = NULL; + char *pass = NULL; + BIO *bio = NULL; + + ENTERING(asymmetric_object_pem_read_private_file); + + if (!PyArg_ParseTuple(args, "s|s", &filename, &pass)) + goto error; + + if ((bio = BIO_new_file(filename, "rb")) == NULL) + lose_openssl_error("Could not open file"); + + result = asymmetric_object_pem_read_private_helper(type, bio, pass); + + error: + BIO_free(bio); + return result; +} + +static PyObject * +asymmetric_object_der_read_private_helper(PyTypeObject *type, BIO *bio) +{ + asymmetric_object *self = NULL; + + ENTERING(asymmetric_object_der_read_private_helper); + + if ((self = (asymmetric_object *) asymmetric_object_new(type, NULL, NULL)) == NULL) + goto error; + + if (!d2i_PrivateKey_bio(bio, &self->pkey)) + lose_openssl_error("Couldn't load private key"); + + return (PyObject *) self; + + error: + + Py_XDECREF(self); + return NULL; +} + +static char asymmetric_object_der_read_private__doc__[] = + "Read a DER-encoded private key from a string.\n" + ; + +static PyObject * +asymmetric_object_der_read_private(PyTypeObject *type, PyObject *args) +{ + ENTERING(asymmetric_object_der_read_private); + return read_from_string_helper(asymmetric_object_der_read_private_helper, type, args); +} + +static char asymmetric_object_der_read_private_file__doc__[] = + "Read a DER-encoded private key from a file.\n" + ; + +static PyObject * +asymmetric_object_der_read_private_file(PyTypeObject *type, PyObject *args) +{ + ENTERING(asymmetric_object_der_read_private_file); + return read_from_file_helper(asymmetric_object_der_read_private_helper, type, args); +} + +static PyObject * +asymmetric_object_pem_read_public_helper(PyTypeObject *type, BIO *bio) +{ + asymmetric_object *self = NULL; + + ENTERING(asymmetric_object_pem_read_public_helper); + + if ((self = (asymmetric_object *) asymmetric_object_new(type, NULL, NULL)) == NULL) + goto error; + + if (!PEM_read_bio_PUBKEY(bio, &self->pkey, NULL, NULL)) + lose_openssl_error("Couldn't load public key"); + + return (PyObject *) self; + + error: + Py_XDECREF(self); + return NULL; +} + +static PyObject * +asymmetric_object_der_read_public_helper(PyTypeObject *type, BIO *bio) +{ + asymmetric_object *self = NULL; + + ENTERING(asymmetric_object_der_read_public_helper); + + if ((self = (asymmetric_object *) asymmetric_object_new(type, NULL, NULL)) == NULL) + goto error; + + if (!d2i_PUBKEY_bio(bio, &self->pkey)) + lose_openssl_error("Couldn't load public key"); + + return (PyObject *) self; + + error: + + Py_XDECREF(self); + return NULL; +} + +static char asymmetric_object_pem_read_public__doc__[] = + "Read a PEM-encoded public key from a string.\n" + ; + +static PyObject * +asymmetric_object_pem_read_public(PyTypeObject *type, PyObject *args) +{ + ENTERING(asymmetric_object_pem_read_public); + return read_from_string_helper(asymmetric_object_pem_read_public_helper, type, args); +} + +static char asymmetric_object_pem_read_public_file__doc__[] = + "Read a PEM-encoded public key from a file.\n" + ; + +static PyObject * +asymmetric_object_pem_read_public_file(PyTypeObject *type, PyObject *args) +{ + ENTERING(asymmetric_object_pem_read_public_file); + return read_from_file_helper(asymmetric_object_pem_read_public_helper, type, args); +} + +static char asymmetric_object_der_read_public__doc__[] = + "Read a DER-encoded public key from a string.\n" + ; + +static PyObject * +asymmetric_object_der_read_public(PyTypeObject *type, PyObject *args) +{ + ENTERING(asymmetric_object_der_read_public); + return read_from_string_helper(asymmetric_object_der_read_public_helper, type, args); +} + +static char asymmetric_object_der_read_public_file__doc__[] = + "Read a DER-encoded public key from a file.\n" + ; + +static PyObject * +asymmetric_object_der_read_public_file(PyTypeObject *type, PyObject *args) +{ + ENTERING(asymmetric_object_der_read_public_file); + return read_from_file_helper(asymmetric_object_der_read_public_helper, type, args); +} + +static char asymmetric_object_pem_write_private__doc__[] = + "Return the PEM encoding of an \"Asymmetric\" private key.\n" + "\n" + "This method takes an optional parameter \"passphrase\" which, if\n" + "specified, will be used to encrypt the private key with AES-256-CBC.\n" + "\n" + "If you don't specify a passphrase, the key will not be encrypted.\n" + ; + +static PyObject * +asymmetric_object_pem_write_private(asymmetric_object *self, PyObject *args) +{ + PyObject *result = NULL; + char *passphrase = NULL; + const EVP_CIPHER *evp_method = NULL; + BIO *bio = NULL; + + ENTERING(asymmetric_object_pem_write_private); + + if (!PyArg_ParseTuple(args, "|s", &passphrase)) + goto error; + + if ((bio = BIO_new(BIO_s_mem())) == NULL) + lose_no_memory(); + + if (passphrase) + evp_method = EVP_aes_256_cbc(); + + if (!PEM_write_bio_PrivateKey(bio, self->pkey, evp_method, NULL, 0, NULL, passphrase)) + lose_openssl_error("Unable to write key"); + + result = BIO_to_PyString_helper(bio); + + error: /* Fall through */ + BIO_free(bio); + return result; +} + +static char asymmetric_object_pem_write_public__doc__[] = + "Return the PEM encoding of an \"Asymmetric\" public key.\n" + ; + +static PyObject * +asymmetric_object_pem_write_public(asymmetric_object *self) +{ + PyObject *result = NULL; + BIO *bio = NULL; + + ENTERING(asymmetric_object_pem_write_public); + + if ((bio = BIO_new(BIO_s_mem())) == NULL) + lose_no_memory(); + + if (!PEM_write_bio_PUBKEY(bio, self->pkey)) + lose_openssl_error("Unable to write key"); + + result = BIO_to_PyString_helper(bio); + + error: /* Fall through */ + BIO_free(bio); + return result; +} + +static char asymmetric_object_der_write_private__doc__[] = + "Return the DER encoding of an \"Asymmetric\" private key.\n" + ; + +static PyObject * +asymmetric_object_der_write_private(asymmetric_object *self) +{ + PyObject *result = NULL; + BIO *bio = NULL; + + ENTERING(asymmetric_object_der_write_private); + + if ((bio = BIO_new(BIO_s_mem())) == NULL) + lose_no_memory(); + + if (!i2d_PrivateKey_bio(bio, self->pkey)) + lose_openssl_error("Unable to write private key"); + + result = BIO_to_PyString_helper(bio); + + error: /* Fall through */ + BIO_free(bio); + return result; +} + +static char asymmetric_object_der_write_public__doc__[] = + "Return the DER encoding of an \"Asymmetric\" public key.\n" + ; + +static PyObject * +asymmetric_object_der_write_public(asymmetric_object *self) +{ + PyObject *result = NULL; + BIO *bio = NULL; + + ENTERING(asymmetric_object_der_write_public); + + if ((bio = BIO_new(BIO_s_mem())) == NULL) + lose_no_memory(); + + if (!i2d_PUBKEY_bio(bio, self->pkey)) + lose_openssl_error("Unable to write public key"); + + result = BIO_to_PyString_helper(bio); + + error: /* Fall through */ + BIO_free(bio); + return result; +} + +static char asymmetric_object_generate_rsa__doc__[] = + "Generate a new RSA keypair.\n" + "\n" + "Optional argument key_size is the desired key size, in bits;\n" + "if not specified, the default is 2048." + ; + +static PyObject * +asymmetric_object_generate_rsa(PyTypeObject *type, PyObject *args, PyObject *kwds) +{ + static char *kwlist[] = {"key_size", NULL}; + asymmetric_object *self = NULL; + EVP_PKEY_CTX *ctx = NULL; + int key_size = 2048; + int ok = 0; + + ENTERING(asymmetric_object_generate_rsa); + + if (!PyArg_ParseTupleAndKeywords(args, kwds, "|i", kwlist, &key_size)) + goto error; + + if ((self = (asymmetric_object *) asymmetric_object_new(type, NULL, NULL)) == NULL) + goto error; + + /* + * Explictly setting RSA_F4 would be tedious, as it requires messing + * about with bignums, and F4 is the default, so we leave it alone. + * In case this ever changes, the required sequence would be: + * BN_new(), BN_set_word(), EVP_PKEY_CTX_set_rsa_keygen_pubexp(), + * BN_free(). + */ + + if ((ctx = EVP_PKEY_CTX_new_id(EVP_PKEY_RSA, NULL)) == NULL || + EVP_PKEY_keygen_init(ctx) <= 0 || + EVP_PKEY_CTX_set_rsa_keygen_bits(ctx, key_size) <= 0 || + EVP_PKEY_keygen(ctx, &self->pkey) <= 0) + lose_openssl_error("Couldn't generate new RSA key"); + + ok = 1; + + error: + EVP_PKEY_CTX_free(ctx); + + if (ok) + return (PyObject *) self; + + Py_XDECREF(self); + return NULL; +} + +static char asymmetric_object_generate_from_params__doc__[] = + "Generate a new keypair using an AsymmetricParams object.\n" + ; + +static PyObject * +asymmetric_object_generate_from_params(PyTypeObject *type, PyObject *args) +{ + asymmetric_params_object *params = NULL; + asymmetric_object *self = NULL; + EVP_PKEY_CTX *ctx = NULL; + int ok = 0; + + ENTERING(asymmetric_object_generate_from_params); + + if (!PyArg_ParseTuple(args, "O!", &POW_AsymmetricParams_Type, ¶ms)) + goto error; + + if ((self = (asymmetric_object *) asymmetric_object_new(type, NULL, NULL)) == NULL) + goto error; + + if ((ctx = EVP_PKEY_CTX_new(params->pkey, NULL)) == NULL || + EVP_PKEY_keygen_init(ctx) <= 0 || + EVP_PKEY_keygen(ctx, &self->pkey) <= 0) + lose_openssl_error("Couldn't generate new key"); + + ok = 1; + + error: + EVP_PKEY_CTX_free(ctx); + + if (ok) + return (PyObject *) self; + + Py_XDECREF(self); + return NULL; +} + +static char asymmetric_object_calculate_ski__doc__[] = + "Calculate SKI value for this key.\n" + "\n" + "The SKI is the SHA-1 hash of key's SubjectPublicKey value.\n" + ; + +static PyObject * +asymmetric_object_calculate_ski(asymmetric_object *self) +{ + PyObject *result = NULL; + X509_PUBKEY *pubkey = NULL; + unsigned char digest[EVP_MAX_MD_SIZE]; + unsigned digest_length; + + ENTERING(asymmetric_object_calculate_ski); + + if (!X509_PUBKEY_set(&pubkey, self->pkey)) + lose_openssl_error("Couldn't extract public key"); + + if (!EVP_Digest(pubkey->public_key->data, pubkey->public_key->length, + digest, &digest_length, EVP_sha1(), NULL)) + lose_openssl_error("Couldn't calculate SHA-1 digest of public key"); + + result = PyString_FromStringAndSize((char *) digest, digest_length); + + error: + X509_PUBKEY_free(pubkey); + return result; +} + +static struct PyMethodDef asymmetric_object_methods[] = { + Define_Method(pemWritePrivate, asymmetric_object_pem_write_private, METH_VARARGS), + Define_Method(pemWritePublic, asymmetric_object_pem_write_public, METH_NOARGS), + Define_Method(derWritePrivate, asymmetric_object_der_write_private, METH_NOARGS), + Define_Method(derWritePublic, asymmetric_object_der_write_public, METH_NOARGS), + Define_Method(calculateSKI, asymmetric_object_calculate_ski, METH_NOARGS), + Define_Class_Method(pemReadPublic, asymmetric_object_pem_read_public, METH_VARARGS), + Define_Class_Method(pemReadPublicFile, asymmetric_object_pem_read_public_file, METH_VARARGS), + Define_Class_Method(derReadPublic, asymmetric_object_der_read_public, METH_VARARGS), + Define_Class_Method(derReadPublicFile, asymmetric_object_der_read_public_file, METH_VARARGS), + Define_Class_Method(pemReadPrivate, asymmetric_object_pem_read_private, METH_VARARGS), + Define_Class_Method(pemReadPrivateFile, asymmetric_object_pem_read_private_file, METH_VARARGS), + Define_Class_Method(derReadPrivate, asymmetric_object_der_read_private, METH_VARARGS), + Define_Class_Method(derReadPrivateFile, asymmetric_object_der_read_private_file, METH_VARARGS), + Define_Class_Method(generateRSA, asymmetric_object_generate_rsa, METH_KEYWORDS), + Define_Class_Method(generateFromParams, asymmetric_object_generate_from_params, METH_VARARGS), + {NULL} +}; + +static char POW_Asymmetric_Type__doc__[] = + "Container for OpenSSL's EVP_PKEY asymmetric key classes.\n" + "\n" + LAME_DISCLAIMER_IN_ALL_CLASS_DOCUMENTATION + ; + +static PyTypeObject POW_Asymmetric_Type = { + PyObject_HEAD_INIT(0) + 0, /* ob_size */ + "rpki.POW.Asymmetric", /* tp_name */ + sizeof(asymmetric_object), /* tp_basicsize */ + 0, /* tp_itemsize */ + (destructor)asymmetric_object_dealloc, /* tp_dealloc */ + 0, /* tp_print */ + 0, /* tp_getattr */ + 0, /* tp_setattr */ + 0, /* tp_compare */ + 0, /* tp_repr */ + 0, /* tp_as_number */ + 0, /* tp_as_sequence */ + 0, /* tp_as_mapping */ + 0, /* tp_hash */ + 0, /* tp_call */ + 0, /* tp_str */ + 0, /* tp_getattro */ + 0, /* tp_setattro */ + 0, /* tp_as_buffer */ + Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /* tp_flags */ + POW_Asymmetric_Type__doc__, /* tp_doc */ + 0, /* tp_traverse */ + 0, /* tp_clear */ + 0, /* tp_richcompare */ + 0, /* tp_weaklistoffset */ + 0, /* tp_iter */ + 0, /* tp_iternext */ + asymmetric_object_methods, /* tp_methods */ + 0, /* tp_members */ + 0, /* tp_getset */ + 0, /* tp_base */ + 0, /* tp_dict */ + 0, /* tp_descr_get */ + 0, /* tp_descr_set */ + 0, /* tp_dictoffset */ + (initproc) asymmetric_object_init, /* tp_init */ + 0, /* tp_alloc */ + asymmetric_object_new, /* tp_new */ +}; + + + +/* + * AsymmetricParams object. + */ + +static PyObject * +asymmetric_params_object_new(PyTypeObject *type, GCC_UNUSED PyObject *args, GCC_UNUSED PyObject *kwds) +{ + asymmetric_params_object *self = NULL; + + ENTERING(asymmetric_params_object_new); + + if ((self = (asymmetric_params_object *) type->tp_alloc(type, 0)) == NULL) + goto error; + + self->pkey = NULL; + + return (PyObject *) self; + + error: + + Py_XDECREF(self); + return NULL; +} + +static int +asymmetric_params_object_init(asymmetric_params_object *self, PyObject *args, PyObject *kwds) +{ + static char *kwlist[] = {NULL}; + + ENTERING(asymmetric_params_object_init); + + if (!PyArg_ParseTupleAndKeywords(args, kwds, "", kwlist)) + goto error; + + return 0; + + error: + return -1; +} + +static void +asymmetric_params_object_dealloc(asymmetric_params_object *self) +{ + ENTERING(asymmetric_params_object_dealloc); + EVP_PKEY_free(self->pkey); + self->ob_type->tp_free((PyObject*) self); +} + +static PyObject * +asymmetric_params_object_pem_read_helper(PyTypeObject *type, BIO *bio) +{ + asymmetric_params_object *self = NULL; + + ENTERING(asymmetric_params_object_pem_read_helper); + + if ((self = (asymmetric_params_object *) asymmetric_params_object_new(type, NULL, NULL)) == NULL) + goto error; + + if (!PEM_read_bio_Parameters(bio, &self->pkey)) + lose_openssl_error("Couldn't load PEM encoded key parameters"); + + return (PyObject *) self; + + error: + + Py_XDECREF(self); + return NULL; +} + +static char asymmetric_params_object_pem_read__doc__[] = + "Read PEM-encoded key parameters from a string.\n" + ; + +static PyObject * +asymmetric_params_object_pem_read(PyTypeObject *type, PyObject *args) +{ + ENTERING(asymmetric_params_object_pem_read); + return read_from_string_helper(asymmetric_params_object_pem_read_helper, type, args); +} + +static char asymmetric_params_object_pem_read_file__doc__[] = + "Read PEM-encoded key parameters from a file.\n" + ; + +static PyObject * +asymmetric_params_object_pem_read_file(PyTypeObject *type, PyObject *args) +{ + ENTERING(asymmetric_params_object_pem_read_file); + return read_from_file_helper(asymmetric_params_object_pem_read_helper, type, args); +} + +static char asymmetric_params_object_pem_write__doc__[] = + "Return the PEM encoding of this set of key parameters, as a string.\n" + ; + +static PyObject * +asymmetric_params_object_pem_write(asymmetric_params_object *self) +{ + PyObject *result = NULL; + BIO *bio = NULL; + + ENTERING(asymmetric_params_object_pem_write); + + if ((bio = BIO_new(BIO_s_mem())) == NULL) + lose_no_memory(); + + if (PEM_write_bio_Parameters(bio, self->pkey) <= 0) + lose_openssl_error("Unable to write key parameters"); + + result = BIO_to_PyString_helper(bio); + + error: /* Fall through */ + BIO_free(bio); + return result; +} + +static char asymmetric_params_object_generate_ec__doc__[] = + "Generate a new set of EC parameters.\n" + "\n" + "Optional argument curve is a numeric code representing the curve to use;\n" + "if not specified, the default is P-256." + ; + +static PyObject * +asymmetric_params_object_generate_ec(PyTypeObject *type, PyObject *args, PyObject *kwds) +{ + static char *kwlist[] = {"curve", NULL}; + asymmetric_params_object *self = NULL; + EVP_PKEY_CTX *ctx = NULL; + int curve = NID_X9_62_prime256v1; + int ok = 0; + + ENTERING(asymmetric_params_object_generate_ec); + + if (!PyArg_ParseTupleAndKeywords(args, kwds, "|i", kwlist, &curve)) + goto error; + + if ((self = (asymmetric_params_object *) asymmetric_params_object_new(type, NULL, NULL)) == NULL) + goto error; + + if ((ctx = EVP_PKEY_CTX_new_id(EVP_PKEY_EC, NULL)) == NULL || + EVP_PKEY_paramgen_init(ctx) <= 0 || + EVP_PKEY_CTX_set_ec_paramgen_curve_nid(ctx, curve) <= 0 || + EVP_PKEY_paramgen(ctx, &self->pkey) <= 0) + lose_openssl_error("Couldn't generate key parameters"); + + ok = 1; + + error: + EVP_PKEY_CTX_free(ctx); + + if (ok) + return (PyObject *) self; + + Py_XDECREF(self); + return NULL; +} + +static char asymmetric_params_object_generate_dh__doc__[] = + "Generate a new set of DH parameters.\n" + "\n" + "Optional argument prime_length is length of the DH prime parameter\n" + "to use, in bits; if not specified, the default is 2048 bits.\n" + "\n" + "Be warned that generating DH parameters with a 2048-bit prime may\n" + "take a ridiculously long time.\n" + ; + +static PyObject * +asymmetric_params_object_generate_dh(PyTypeObject *type, PyObject *args, PyObject *kwds) +{ + static char *kwlist[] = {"prime_length", NULL}; + asymmetric_params_object *self = NULL; + EVP_PKEY_CTX *ctx = NULL; + int prime_length = 2048; + int ok = 0; + + ENTERING(asymmetric_params_object_generate_dh); + + if (!PyArg_ParseTupleAndKeywords(args, kwds, "|i", kwlist, &prime_length)) + goto error; + + if ((self = (asymmetric_params_object *) asymmetric_params_object_new(type, NULL, NULL)) == NULL) + goto error; + + if ((ctx = EVP_PKEY_CTX_new_id(EVP_PKEY_DH, NULL)) == NULL || + EVP_PKEY_paramgen_init(ctx) <= 0 || + EVP_PKEY_CTX_set_dh_paramgen_prime_len(ctx, prime_length) <= 0 || + EVP_PKEY_paramgen(ctx, &self->pkey) <= 0) + lose_openssl_error("Couldn't generate key parameters"); + + ok = 1; + + error: + EVP_PKEY_CTX_free(ctx); + + if (ok) + return (PyObject *) self; + + Py_XDECREF(self); + return NULL; +} + +static char asymmetric_params_object_generate_dsa__doc__[] = + "Generate a new set of DSA parameters.\n" + "\n" + "Optional argument key_length is the length of the key to generate, in bits;\n" + "if not specified, the default is 2048 bits." + ; + +static PyObject * +asymmetric_params_object_generate_dsa(PyTypeObject *type, PyObject *args, PyObject *kwds) +{ + static char *kwlist[] = {"key_length", NULL}; + asymmetric_params_object *self = NULL; + EVP_PKEY_CTX *ctx = NULL; + int key_length = 2048; + int ok = 0; + + ENTERING(asymmetric_params_object_generate_dsa); + + if (!PyArg_ParseTupleAndKeywords(args, kwds, "|i", kwlist, &key_length)) + goto error; + + if ((self = (asymmetric_params_object *) asymmetric_params_object_new(type, NULL, NULL)) == NULL) + goto error; + + if ((ctx = EVP_PKEY_CTX_new_id(EVP_PKEY_DSA, NULL)) == NULL || + EVP_PKEY_paramgen_init(ctx) <= 0 || + EVP_PKEY_CTX_set_dsa_paramgen_bits(ctx, key_length) <= 0 || + EVP_PKEY_paramgen(ctx, &self->pkey) <= 0) + lose_openssl_error("Couldn't generate key parameters"); + + ok = 1; + + error: + EVP_PKEY_CTX_free(ctx); + + if (ok) + return (PyObject *) self; + + Py_XDECREF(self); + return NULL; +} + +static struct PyMethodDef asymmetric_params_object_methods[] = { + Define_Method(pemWrite, asymmetric_params_object_pem_write, METH_NOARGS), + Define_Class_Method(pemRead, asymmetric_params_object_pem_read, METH_VARARGS), + Define_Class_Method(pemReadFile, asymmetric_params_object_pem_read_file, METH_VARARGS), + Define_Class_Method(generateEC, asymmetric_params_object_generate_ec, METH_KEYWORDS), + Define_Class_Method(generateDH, asymmetric_params_object_generate_dh, METH_KEYWORDS), + Define_Class_Method(generateDSA, asymmetric_params_object_generate_dsa, METH_KEYWORDS), + {NULL} +}; + +static char POW_AsymmetricParams_Type__doc__[] = + "Container for OpenSSL's EVP_PKEY asymmetric key parameter classes.\n" + "\n" + LAME_DISCLAIMER_IN_ALL_CLASS_DOCUMENTATION + ; + +static PyTypeObject POW_AsymmetricParams_Type = { + PyObject_HEAD_INIT(0) + 0, /* ob_size */ + "rpki.POW.AsymmetricParams", /* tp_name */ + sizeof(asymmetric_params_object), /* tp_basicsize */ + 0, /* tp_itemsize */ + (destructor)asymmetric_params_object_dealloc, /* tp_dealloc */ + 0, /* tp_print */ + 0, /* tp_getattr */ + 0, /* tp_setattr */ + 0, /* tp_compare */ + 0, /* tp_repr */ + 0, /* tp_as_number */ + 0, /* tp_as_sequence */ + 0, /* tp_as_mapping */ + 0, /* tp_hash */ + 0, /* tp_call */ + 0, /* tp_str */ + 0, /* tp_getattro */ + 0, /* tp_setattro */ + 0, /* tp_as_buffer */ + Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /* tp_flags */ + POW_AsymmetricParams_Type__doc__, /* tp_doc */ + 0, /* tp_traverse */ + 0, /* tp_clear */ + 0, /* tp_richcompare */ + 0, /* tp_weaklistoffset */ + 0, /* tp_iter */ + 0, /* tp_iternext */ + asymmetric_params_object_methods, /* tp_methods */ + 0, /* tp_members */ + 0, /* tp_getset */ + 0, /* tp_base */ + 0, /* tp_dict */ + 0, /* tp_descr_get */ + 0, /* tp_descr_set */ + 0, /* tp_dictoffset */ + (initproc) asymmetric_params_object_init, /* tp_init */ + 0, /* tp_alloc */ + asymmetric_params_object_new, /* tp_new */ +}; + + + +/* + * Digest object. + */ + +static PyObject * +digest_object_new(PyTypeObject *type, GCC_UNUSED PyObject *args, GCC_UNUSED PyObject *kwds) +{ + digest_object *self = NULL; + + ENTERING(digest_object_new); + + if ((self = (digest_object *) type->tp_alloc(type, 0)) == NULL) + goto error; + + self->digest_type = 0; + + return (PyObject *) self; + + error: + return NULL; +} + +static int +digest_object_init(digest_object *self, PyObject *args, PyObject *kwds) +{ + static char *kwlist[] = {"digest_type", NULL}; + const EVP_MD *digest_method = NULL; + int digest_type = 0; + + ENTERING(digest_object_init); + + if (!PyArg_ParseTupleAndKeywords(args, kwds, "i", kwlist, &digest_type)) + goto error; + + if ((digest_method = evp_digest_factory(digest_type)) == NULL) + lose("Unsupported digest algorithm"); + + self->digest_type = digest_type; + if (!EVP_DigestInit(&self->digest_ctx, digest_method)) + lose_openssl_error("Couldn't initialize digest"); + + return 0; + + error: + return -1; +} + +static void +digest_object_dealloc(digest_object *self) +{ + ENTERING(digest_object_dealloc); + EVP_MD_CTX_cleanup(&self->digest_ctx); + self->ob_type->tp_free((PyObject*) self); +} + +static char digest_object_update__doc__[] = + "Add data to this digest.\n" + "\n" + "the \"data\" parameter should be a string containing the data to be added.\n" + ; + +static PyObject * +digest_object_update(digest_object *self, PyObject *args) +{ + char *data = NULL; + Py_ssize_t len = 0; + + ENTERING(digest_object_update); + + if (!PyArg_ParseTuple(args, "s#", &data, &len)) + goto error; + + if (!EVP_DigestUpdate(&self->digest_ctx, data, len)) + lose_openssl_error("EVP_DigestUpdate() failed"); + + Py_RETURN_NONE; + + error: + return NULL; +} + +static char digest_object_copy__doc__[] = + "Return a copy of this Digest object.\n" + ; + +static PyObject * +digest_object_copy(digest_object *self) +{ + digest_object *new = NULL; + + ENTERING(digest_object_copy); + + if ((new = (digest_object *) digest_object_new(&POW_Digest_Type, NULL, NULL)) == NULL) + goto error; + + new->digest_type = self->digest_type; + if (!EVP_MD_CTX_copy(&new->digest_ctx, &self->digest_ctx)) + lose_openssl_error("Couldn't copy digest"); + + return (PyObject*) new; + + error: + + Py_XDECREF(new); + return NULL; +} + +static char digest_object_digest__doc__[] = + "Return the digest of all the data which this Digest object has processed.\n" + "\n" + "This method can be called at any time and will not effect the internal\n" + "state of the Digest object.\n" + ; + +/* + * Do we really need to do this copy? Nice general operation, but does + * anything we're doing for RPKI care? + */ + +static PyObject * +digest_object_digest(digest_object *self) +{ + unsigned char digest_text[EVP_MAX_MD_SIZE]; + EVP_MD_CTX ctx; + unsigned digest_len = 0; + + ENTERING(digest_object_digest); + + if (!EVP_MD_CTX_copy(&ctx, &self->digest_ctx)) + lose_openssl_error("Couldn't copy digest"); + + EVP_DigestFinal(&ctx, digest_text, &digest_len); + + EVP_MD_CTX_cleanup(&ctx); + + return Py_BuildValue("s#", digest_text, (Py_ssize_t) digest_len); + + error: + return NULL; +} + +static struct PyMethodDef digest_object_methods[] = { + Define_Method(update, digest_object_update, METH_VARARGS), + Define_Method(digest, digest_object_digest, METH_NOARGS), + Define_Method(copy, digest_object_copy, METH_NOARGS), + {NULL} +}; + +static char POW_Digest_Type__doc__[] = + "This class provides access to the digest functionality of OpenSSL.\n" + "It emulates the digest modules in the Python Standard Library, but\n" + "does not currently support the \"hexdigest\" method.\n" + "\n" + "The constructor takes one parameter, the kind of Digest object to create.\n" + "This should be one of the following:\n" + "\n" + " * MD5_DIGEST\n" + " * SHA_DIGEST\n" + " * SHA1_DIGEST\n" + " * SHA256_DIGEST\n" + " * SHA384_DIGEST\n" + " * SHA512_DIGEST\n" + ; + +static PyTypeObject POW_Digest_Type = { + PyObject_HEAD_INIT(0) + 0, /* ob_size */ + "rpki.POW.Digest", /* tp_name */ + sizeof(digest_object), /* tp_basicsize */ + 0, /* tp_itemsize */ + (destructor)digest_object_dealloc, /* tp_dealloc */ + 0, /* tp_print */ + 0, /* tp_getattr */ + 0, /* tp_setattr */ + 0, /* tp_compare */ + 0, /* tp_repr */ + 0, /* tp_as_number */ + 0, /* tp_as_sequence */ + 0, /* tp_as_mapping */ + 0, /* tp_hash */ + 0, /* tp_call */ + 0, /* tp_str */ + 0, /* tp_getattro */ + 0, /* tp_setattro */ + 0, /* tp_as_buffer */ + Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /* tp_flags */ + POW_Digest_Type__doc__, /* tp_doc */ + 0, /* tp_traverse */ + 0, /* tp_clear */ + 0, /* tp_richcompare */ + 0, /* tp_weaklistoffset */ + 0, /* tp_iter */ + 0, /* tp_iternext */ + digest_object_methods, /* tp_methods */ + 0, /* tp_members */ + 0, /* tp_getset */ + 0, /* tp_base */ + 0, /* tp_dict */ + 0, /* tp_descr_get */ + 0, /* tp_descr_set */ + 0, /* tp_dictoffset */ + (initproc) digest_object_init, /* tp_init */ + 0, /* tp_alloc */ + digest_object_new, /* tp_new */ +}; + + + +/* + * CMS object. + */ + +static PyObject * +cms_object_new(PyTypeObject *type, GCC_UNUSED PyObject *args, GCC_UNUSED PyObject *kwds) +{ + cms_object *self; + + ENTERING(cms_object_new); + + if ((self = (cms_object *) type->tp_alloc(type, 0)) != NULL) + return (PyObject *) self; + + Py_XDECREF(self); + return NULL; +} + +static void +cms_object_dealloc(cms_object *self) +{ + ENTERING(cms_object_dealloc); + CMS_ContentInfo_free(self->cms); + self->ob_type->tp_free((PyObject*) self); +} + +static PyObject * +cms_object_pem_read_helper(PyTypeObject *type, BIO *bio) +{ + cms_object *self; + + ENTERING(cms_object_pem_read_helper); + + if ((self = (cms_object *) type->tp_new(type, NULL, NULL)) == NULL) + goto error; + + if (!PEM_read_bio_CMS(bio, &self->cms, NULL, NULL)) + lose_openssl_error("Couldn't load PEM encoded CMS message"); + + return (PyObject *) self; + + error: + Py_XDECREF(self); + return NULL; +} + +static PyObject * +cms_object_der_read_helper(PyTypeObject *type, BIO *bio) +{ + cms_object *self; + + ENTERING(cms_object_der_read_helper); + + if ((self = (cms_object *) type->tp_new(type, NULL, NULL)) == NULL) + goto error; + + if (!d2i_CMS_bio(bio, &self->cms)) + lose_openssl_error("Couldn't load DER encoded CMS message"); + + return (PyObject *) self; + + error: + Py_XDECREF(self); + return NULL; +} + +static char cms_object_pem_read__doc__[] = + "Read a PEM-encoded CMS object from a string.\n" + ; + +static PyObject * +cms_object_pem_read(PyTypeObject *type, PyObject *args) +{ + ENTERING(cms_object_pem_read); + return read_from_string_helper(cms_object_pem_read_helper, type, args); +} + +static char cms_object_pem_read_file__doc__[] = + "Read a PEM-encoded CMS object from a file.\n" + ; + +static PyObject * +cms_object_pem_read_file(PyTypeObject *type, PyObject *args) +{ + ENTERING(cms_object_pem_read_file); + return read_from_file_helper(cms_object_pem_read_helper, type, args); +} + +static char cms_object_der_read__doc__[] = + "Read a DER-encoded CMS object from a string.\n" + ; + +static PyObject * +cms_object_der_read(PyTypeObject *type, PyObject *args) +{ + ENTERING(cms_object_der_read); + return read_from_string_helper(cms_object_der_read_helper, type, args); +} + +static char cms_object_der_read_file__doc__[] = + "Read a DER-encoded CMS object from a file.\n" + ; + +static PyObject * +cms_object_der_read_file(PyTypeObject *type, PyObject *args) +{ + ENTERING(cms_object_der_read_file); + return read_from_file_helper(cms_object_der_read_helper, type, args); +} + +static char cms_object_pem_write__doc__[] = + "Return the DER encoding of this CMS message.\n" + ; + +static PyObject * +cms_object_pem_write(cms_object *self) +{ + PyObject *result = NULL; + BIO *bio = NULL; + + ENTERING(cms_object_pem_write); + + if ((bio = BIO_new(BIO_s_mem())) == NULL) + lose_no_memory(); + + if (!PEM_write_bio_CMS(bio, self->cms)) + lose_openssl_error("Unable to write CMS object"); + + result = BIO_to_PyString_helper(bio); + + error: /* Fall through */ + BIO_free(bio); + return result; +} + +static char cms_object_der_write__doc__[] = + "Return the DER encoding of this CMS message.\n" + ; + +static PyObject * +cms_object_der_write(cms_object *self) +{ + PyObject *result = NULL; + BIO *bio = NULL; + + ENTERING(cms_object_der_write); + + if ((bio = BIO_new(BIO_s_mem())) == NULL) + lose_no_memory(); + + if (!i2d_CMS_bio(bio, self->cms)) + lose_openssl_error("Unable to write CMS object"); + + result = BIO_to_PyString_helper(bio); + + error: /* Fall through */ + BIO_free(bio); + return result; +} + +static int +cms_object_sign_helper(cms_object *self, + BIO *bio, + x509_object *signcert, + asymmetric_object *signkey, + PyObject *x509_iterable, + PyObject *crl_iterable, + char *oid, + unsigned flags) +{ + STACK_OF(X509) *x509_stack = NULL; + ASN1_OBJECT *econtent_type = NULL; + CMS_ContentInfo *cms = NULL; + PyObject *iterator = NULL; + PyObject *item = NULL; + int ok = 0; + + ENTERING(cms_object_sign_helper); + + assert_no_unhandled_openssl_errors(); + + flags &= CMS_NOCERTS | CMS_NOATTR; + flags |= CMS_BINARY | CMS_NOSMIMECAP | CMS_PARTIAL | CMS_USE_KEYID; + + if ((x509_stack = x509_helper_iterable_to_stack(x509_iterable)) == NULL) + goto error; + + assert_no_unhandled_openssl_errors(); + + if (oid && (econtent_type = OBJ_txt2obj(oid, 1)) == NULL) + lose_openssl_error("Couldn't parse OID"); + + assert_no_unhandled_openssl_errors(); + + if ((cms = CMS_sign(NULL, NULL, x509_stack, bio, flags)) == NULL) + lose_openssl_error("Couldn't create CMS message"); + + assert_no_unhandled_openssl_errors(); + + if (econtent_type) + CMS_set1_eContentType(cms, econtent_type); + + assert_no_unhandled_openssl_errors(); + + if (!CMS_add1_signer(cms, signcert->x509, signkey->pkey, EVP_sha256(), flags)) + lose_openssl_error("Couldn't sign CMS message"); + + assert_no_unhandled_openssl_errors(); + + if (crl_iterable != Py_None) { + + if ((iterator = PyObject_GetIter(crl_iterable)) == NULL) + goto error; + + while ((item = PyIter_Next(iterator)) != NULL) { + + if (!POW_CRL_Check(item)) + lose_type_error("Inappropriate type"); + + if (!CMS_add1_crl(cms, ((crl_object *) item)->crl)) + lose_openssl_error("Couldn't add CRL to CMS"); + + assert_no_unhandled_openssl_errors(); + + Py_XDECREF(item); + item = NULL; + } + } + + if (!CMS_final(cms, bio, NULL, flags)) + lose_openssl_error("Couldn't finalize CMS signatures"); + + assert_no_unhandled_openssl_errors(); + + CMS_ContentInfo_free(self->cms); + self->cms = cms; + cms = NULL; + + ok = 1; + + error: /* fall through */ + CMS_ContentInfo_free(cms); + sk_X509_free(x509_stack); + ASN1_OBJECT_free(econtent_type); + Py_XDECREF(iterator); + Py_XDECREF(item); + + return ok; +} + +static char cms_object_sign__doc__[] = + "Sign this CMS message message with a private key.\n" + "\n" + "The \"signcert\" parameter should be the certificate against which the\n" + "message will eventually be verified, an X509 object.\n" + "\n" + "The \"key\" parameter should be the private key with which to sign the\n" + "message, an Asymmetric object.\n" + "\n" + "The \"data\" parameter should be the message to be signed, a string.\n" + "\n" + "The optional \"certs\" parameter should be an iterable supplying X509 objects\n" + "to be included in the signed message.\n" + "\n" + "The optional \"crls\" parameter should be an iterable supplying CRL objects\n" + "to be included in the signed message.\n" + "\n" + "The optional \"eContentType\" parameter should be an Object Identifier\n" + "to use as the eContentType value in the signed message.\n" + "\n" + "The optional \"flags\" parameters should be an integer holding a bitmask,\n" + "and can include the following flags:\n" + "\n" + " * CMS_NOCERTS\n" + " * CMS_NOATTR\n" + ; + +static PyObject * +cms_object_sign(cms_object *self, PyObject *args) +{ + asymmetric_object *signkey = NULL; + x509_object *signcert = NULL; + PyObject *x509_iterable = Py_None; + PyObject *crl_iterable = Py_None; + char *buf = NULL, *oid = NULL; + Py_ssize_t len; + unsigned flags = 0; + BIO *bio = NULL; + int ok = 0; + + ENTERING(cms_object_sign); + + if (!PyArg_ParseTuple(args, "O!O!s#|OOsI", + &POW_X509_Type, &signcert, + &POW_Asymmetric_Type, &signkey, + &buf, &len, + &x509_iterable, + &crl_iterable, + &oid, + &flags)) + goto error; + + assert_no_unhandled_openssl_errors(); + + if ((bio = BIO_new_mem_buf(buf, len)) == NULL) + lose_no_memory(); + + assert_no_unhandled_openssl_errors(); + + if (!cms_object_sign_helper(self, bio, signcert, signkey, + x509_iterable, crl_iterable, oid, flags)) + lose_openssl_error("Couldn't sign CMS object"); + + assert_no_unhandled_openssl_errors(); + + ok = 1; + + error: + BIO_free(bio); + + if (ok) + Py_RETURN_NONE; + else + return NULL; +} + +static BIO * +cms_object_verify_helper(cms_object *self, PyObject *args, PyObject *kwds) +{ + static char *kwlist[] = {"store", "certs", "flags", NULL}; + x509_store_object *store = NULL; + PyObject *certs_iterable = Py_None; + STACK_OF(X509) *certs_stack = NULL; + unsigned flags = 0, ok = 0; + BIO *bio = NULL; + + ENTERING(cms_object_verify_helper); + + if (!PyArg_ParseTupleAndKeywords(args, kwds, "O!|OI", kwlist, + &POW_X509Store_Type, &store, &certs_iterable, &flags)) + goto error; + + if ((bio = BIO_new(BIO_s_mem())) == NULL) + lose_no_memory(); + + assert_no_unhandled_openssl_errors(); + + flags &= (CMS_NOINTERN | CMS_NOCRL | CMS_NO_SIGNER_CERT_VERIFY | + CMS_NO_ATTR_VERIFY | CMS_NO_CONTENT_VERIFY); + + if (certs_iterable != Py_None && + (certs_stack = x509_helper_iterable_to_stack(certs_iterable)) == NULL) + goto error; + + assert_no_unhandled_openssl_errors(); + + if (CMS_verify(self->cms, certs_stack, store->store, NULL, bio, flags) <= 0) + lose_openssl_error("Couldn't verify CMS message"); + + assert_no_unhandled_openssl_errors(); + + ok = 1; + + error: /* fall through */ + sk_X509_free(certs_stack); + + if (ok) + return bio; + + BIO_free(bio); + return NULL; +} + +static char cms_object_verify__doc__[] = + "Verify this CMS message against a trusted certificate store.\n" + "\n" + "The \"store\" parameter is an X509Store object, the trusted certificate\n" + "store to use in verification.\n" + "\n" + "The optional \"certs\" parameter is a set of certificates to search\n" + "for the signer's certificate.\n" + "\n" + "The optional \"flags\" parameter is an integer of bit flags,\n" + "containing zero or more of the following:\n" + "\n" + " * CMS_NOINTERN\n" + " * CMS_NOCRL\n" + " * CMS_NO_SIGNER_CERT_VERIFY\n" + " * CMS_NO_ATTR_VERIFY\n" + " * CMS_NO_CONTENT_VERIFY\n" + ; + +static PyObject * +cms_object_verify(cms_object *self, PyObject *args, PyObject *kwds) +{ + PyObject *result = NULL; + BIO *bio = NULL; + + ENTERING(cms_object_verify); + + if ((bio = cms_object_verify_helper(self, args, kwds)) != NULL) + result = BIO_to_PyString_helper(bio); + + BIO_free(bio); + return result; +} + +static char cms_object_eContentType__doc__[] = + "Return the eContentType OID of this CMS message.\n" + ; + +static PyObject * +cms_object_eContentType(cms_object *self) +{ + const ASN1_OBJECT *oid = NULL; + PyObject *result = NULL; + + ENTERING(cms_object_eContentType); + + if ((oid = CMS_get0_eContentType(self->cms)) == NULL) + lose_openssl_error("Couldn't extract eContentType from CMS message"); + + assert_no_unhandled_openssl_errors(); + + result = ASN1_OBJECT_to_PyString(oid); + + error: + return result; +} + +static char cms_object_signingTime__doc__[] = + "Return the signingTime of this CMS message.\n" + ; + +static PyObject * +cms_object_signingTime(cms_object *self) +{ + PyObject *result = NULL; + STACK_OF(CMS_SignerInfo) *sis = NULL; + CMS_SignerInfo *si = NULL; + X509_ATTRIBUTE *xa = NULL; + ASN1_TYPE *so = NULL; + int i; + + ENTERING(cms_object_signingTime); + + if ((sis = CMS_get0_SignerInfos(self->cms)) == NULL) + lose_openssl_error("Couldn't extract signerInfos from CMS message[1]"); + + if (sk_CMS_SignerInfo_num(sis) != 1) + lose_openssl_error("Couldn't extract signerInfos from CMS message[2]"); + + si = sk_CMS_SignerInfo_value(sis, 0); + + if ((i = CMS_signed_get_attr_by_NID(si, NID_pkcs9_signingTime, -1)) < 0) + lose_openssl_error("Couldn't extract signerInfos from CMS message[3]"); + + if ((xa = CMS_signed_get_attr(si, i)) == NULL) + lose_openssl_error("Couldn't extract signerInfos from CMS message[4]"); + + if (xa->single) + lose("Couldn't extract signerInfos from CMS message[5]"); + + if (sk_ASN1_TYPE_num(xa->value.set) != 1) + lose("Couldn't extract signerInfos from CMS message[6]"); + + if ((so = sk_ASN1_TYPE_value(xa->value.set, 0)) == NULL) + lose("Couldn't extract signerInfos from CMS message[7]"); + + switch (so->type) { + case V_ASN1_UTCTIME: + result = ASN1_TIME_to_Python(so->value.utctime); + break; + case V_ASN1_GENERALIZEDTIME: + result = ASN1_TIME_to_Python(so->value.generalizedtime); + break; + default: + lose("Couldn't extract signerInfos from CMS message[8]"); + } + + error: + return result; +} + +static char cms_object_pprint__doc__[] = + "Return a pretty-printed representation of this CMS message.\n" + ; + +static PyObject * +cms_object_pprint(cms_object *self) +{ + BIO *bio = NULL; + PyObject *result = NULL; + + ENTERING(cms_object_pprint); + + if ((bio = BIO_new(BIO_s_mem())) == NULL) + lose_no_memory(); + + if (!CMS_ContentInfo_print_ctx(bio, self->cms, 0, NULL)) + lose_openssl_error("Unable to pretty-print CMS object"); + + result = BIO_to_PyString_helper(bio); + + error: + BIO_free(bio); + return result; +} + +static char cms_object_certs__doc__[] = + "Return any certificates embedded in this CMS message, as a\n" + "tuple of X509 objects. This tuple will be empty if the message\n" + "wrapper contains no certificates.\n" + ; + +static PyObject * +cms_object_certs(cms_object *self) +{ + STACK_OF(X509) *certs = NULL; + PyObject *result = NULL; + + ENTERING(cms_object_certs); + + if ((certs = CMS_get1_certs(self->cms)) != NULL) + result = stack_to_tuple_helper(CHECKED_PTR_OF(STACK_OF(X509), certs), + stack_to_tuple_helper_get_x509); + else if (!ERR_peek_error()) + result = Py_BuildValue("()"); + else + lose_openssl_error("Couldn't extract certs from CMS message"); + + error: /* fall through */ + sk_X509_pop_free(certs, X509_free); + return result; +} + +static char cms_object_crls__doc__[] = + "Return any CRLs embedded in this CMS message, as a tuple of\n" + "CRL objects. This tuple will be empty if the message contains no CRLs.\n" + ; + +static PyObject * +cms_object_crls(cms_object *self) +{ + STACK_OF(X509_CRL) *crls = NULL; + PyObject *result = NULL; + + ENTERING(cms_object_crls); + + if ((crls = CMS_get1_crls(self->cms)) != NULL) + result = stack_to_tuple_helper(CHECKED_PTR_OF(STACK_OF(X509_CRL), crls), + stack_to_tuple_helper_get_crl); + else if (!ERR_peek_error()) + result = Py_BuildValue("()"); + else + lose_openssl_error("Couldn't extract CRLs from CMS message"); + + error: /* fall through */ + sk_X509_CRL_pop_free(crls, X509_CRL_free); + return result; +} + +static struct PyMethodDef cms_object_methods[] = { + Define_Method(pemWrite, cms_object_pem_write, METH_NOARGS), + Define_Method(derWrite, cms_object_der_write, METH_NOARGS), + Define_Method(sign, cms_object_sign, METH_VARARGS), + Define_Method(verify, cms_object_verify, METH_KEYWORDS), + Define_Method(eContentType, cms_object_eContentType, METH_NOARGS), + Define_Method(signingTime, cms_object_signingTime, METH_NOARGS), + Define_Method(pprint, cms_object_pprint, METH_NOARGS), + Define_Method(certs, cms_object_certs, METH_NOARGS), + Define_Method(crls, cms_object_crls, METH_NOARGS), + Define_Class_Method(pemRead, cms_object_pem_read, METH_VARARGS), + Define_Class_Method(pemReadFile, cms_object_pem_read_file, METH_VARARGS), + Define_Class_Method(derRead, cms_object_der_read, METH_VARARGS), + Define_Class_Method(derReadFile, cms_object_der_read_file, METH_VARARGS), + {NULL} +}; + +static char POW_CMS_Type__doc__[] = + "Wrapper for OpenSSL's CMS class. At present this only handes signed\n" + "objects, as those are the only kind of CMS objects used in RPKI.\n" + ; + +static PyTypeObject POW_CMS_Type = { + PyObject_HEAD_INIT(0) + 0, /* ob_size */ + "rpki.POW.CMS", /* tp_name */ + sizeof(cms_object), /* tp_basicsize */ + 0, /* tp_itemsize */ + (destructor)cms_object_dealloc, /* tp_dealloc */ + 0, /* tp_print */ + 0, /* tp_getattr */ + 0, /* tp_setattr */ + 0, /* tp_compare */ + 0, /* tp_repr */ + 0, /* tp_as_number */ + 0, /* tp_as_sequence */ + 0, /* tp_as_mapping */ + 0, /* tp_hash */ + 0, /* tp_call */ + 0, /* tp_str */ + 0, /* tp_getattro */ + 0, /* tp_setattro */ + 0, /* tp_as_buffer */ + Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /* tp_flags */ + POW_CMS_Type__doc__, /* tp_doc */ + 0, /* tp_traverse */ + 0, /* tp_clear */ + 0, /* tp_richcompare */ + 0, /* tp_weaklistoffset */ + 0, /* tp_iter */ + 0, /* tp_iternext */ + cms_object_methods, /* tp_methods */ + 0, /* tp_members */ + 0, /* tp_getset */ + 0, /* tp_base */ + 0, /* tp_dict */ + 0, /* tp_descr_get */ + 0, /* tp_descr_set */ + 0, /* tp_dictoffset */ + 0, /* tp_init */ + 0, /* tp_alloc */ + cms_object_new, /* tp_new */ +}; + + + +/* + * Manifest object. + */ + +static PyObject * +manifest_object_new(PyTypeObject *type, PyObject *args, PyObject *kwds) +{ + manifest_object *self = NULL; + + ENTERING(manifest_object_new); + + if ((self = (manifest_object *) cms_object_new(type, args, kwds)) != NULL && + (self->manifest = Manifest_new()) != NULL) + return (PyObject *) self; + + Py_XDECREF(self); + return NULL; +} + +static void +manifest_object_dealloc(manifest_object *self) +{ + ENTERING(manifest_object_dealloc); + Manifest_free(self->manifest); + cms_object_dealloc(&self->cms); +} + +static char manifest_object_verify__doc__[] = + "Verify this manifest. See the CMS class's .verify() method for details.\n" + ; + +static PyObject * +manifest_object_verify(manifest_object *self, PyObject *args, PyObject *kwds) +{ + BIO *bio = NULL; + int ok = 0; + + ENTERING(manifest_object_verify); + + if ((bio = cms_object_verify_helper(&self->cms, args, kwds)) == NULL) + goto error; + + if (!ASN1_item_d2i_bio(ASN1_ITEM_rptr(Manifest), bio, &self->manifest)) + lose_openssl_error("Couldn't decode manifest"); + + ok = 1; + + error: + BIO_free(bio); + + if (ok) + Py_RETURN_NONE; + else + return NULL; +} + +static PyObject * +manifest_object_der_read_helper(PyTypeObject *type, BIO *bio) +{ + manifest_object *self; + + ENTERING(manifest_object_der_read_helper); + + if ((self = (manifest_object *) cms_object_der_read_helper(type, bio)) != NULL) + self->manifest = NULL; + + return (PyObject *) self; +} + +static char manifest_object_der_read__doc__[] = + "Read a DER-encoded manifest object from a string.\n" + ; + +static PyObject * +manifest_object_der_read(PyTypeObject *type, PyObject *args) +{ + ENTERING(manifest_object_der_read); + return read_from_string_helper(manifest_object_der_read_helper, type, args); +} + +static char manifest_object_der_read_file__doc__[] = + "Read a DER-encoded manifest object from a file.\n" + ; + +static PyObject * +manifest_object_der_read_file(PyTypeObject *type, PyObject *args) +{ + ENTERING(manifest_object_der_read_file); + return read_from_file_helper(manifest_object_der_read_helper, type, args); +} + +static PyObject * +manifest_object_pem_read_helper(PyTypeObject *type, BIO *bio) +{ + manifest_object *self; + + ENTERING(manifest_object_pem_read_helper); + + if ((self = (manifest_object *) cms_object_pem_read_helper(type, bio)) != NULL) + self->manifest = NULL; + + return (PyObject *) self; +} + +static char manifest_object_pem_read__doc__[] = + "Read a PEM-encoded manifest object from a string.\n" + ; + +static PyObject * +manifest_object_pem_read(PyTypeObject *type, PyObject *args) +{ + ENTERING(manifest_object_pem_read); + return read_from_string_helper(manifest_object_pem_read_helper, type, args); +} + +static char manifest_object_pem_read_file__doc__[] = + "Read a PEM-encoded manifest object from a file.\n" + ; + +static PyObject * +manifest_object_pem_read_file(PyTypeObject *type, PyObject *args) +{ + ENTERING(manifest_object_pem_read_file); + return read_from_file_helper(manifest_object_pem_read_helper, type, args); +} + +static char manifest_object_get_version__doc__[] = + "Return the version number of this manifest.\n" + ; + +static PyObject * +manifest_object_get_version(manifest_object *self) +{ + ENTERING(manifest_object_get_version); + + if (self->manifest == NULL) + lose_not_verified("Can't report version of unverified manifest"); + + if (self->manifest->version) + return Py_BuildValue("N", ASN1_INTEGER_to_PyLong(self->manifest->version)); + else + return PyInt_FromLong(0); + + error: + return NULL; +} + +static char manifest_object_set_version__doc__[] = + "Set the version number of this manifest.\n" + "\n" + "The \"version\" parameter should be a non-negative integer.\n" + "\n" + "As of this writing, zero is both the default and the only defined version.\n" + "Attempting to set any version number other than zero will fail, as we\n" + "don't understand how to write other versions, by definition.\n" + ; + +static PyObject * +manifest_object_set_version(manifest_object *self, PyObject *args) +{ + int version = 0; + + ENTERING(manifest_object_set_version); + + if (!PyArg_ParseTuple(args, "|i", &version)) + goto error; + + if (version != 0) + lose("RFC 6486 only defines RPKI manifest version zero"); + + if (self->manifest == NULL) + lose_not_verified("Can't set version of unverified manifest"); + + ASN1_INTEGER_free(self->manifest->version); + self->manifest->version = NULL; + + Py_RETURN_NONE; + + error: + return NULL; +} + +static char manifest_object_get_manifest_number__doc__[] = + "Return the manifestNumber of this manifest.\n" + ; + +static PyObject * +manifest_object_get_manifest_number(manifest_object *self) +{ + ENTERING(manifest_object_get_manifest_number); + + if (self->manifest == NULL) + lose_not_verified("Can't get manifestNumber of unverified manifest"); + + return Py_BuildValue("N", ASN1_INTEGER_to_PyLong(self->manifest->manifestNumber)); + + error: + return NULL; +} + +static char manifest_object_set_manifest_number__doc__[] = + "Set the manifestNumber of this manifest.\n" + "\n" + "The \"manifestNumber\" parameter should be a non-negative integer.\n" + ; + +static PyObject * +manifest_object_set_manifest_number(manifest_object *self, PyObject *args) +{ + PyObject *manifestNumber = NULL; + PyObject *zero = NULL; + int ok = 0; + + ENTERING(manifest_object_set_manifest_number); + + if (!PyArg_ParseTuple(args, "O", &manifestNumber)) + goto error; + + if ((zero = PyInt_FromLong(0)) == NULL) + goto error; + + switch (PyObject_RichCompareBool(manifestNumber, zero, Py_GE)) { + case -1: + goto error; + case 0: + lose("Negative manifest number is not allowed"); + } + + if (self->manifest == NULL) + lose_not_verified("Can't set manifestNumber of unverified manifest"); + + ASN1_INTEGER_free(self->manifest->manifestNumber); + + if ((self->manifest->manifestNumber = PyLong_to_ASN1_INTEGER(manifestNumber)) == NULL) + goto error; + + ok = 1; + + error: + Py_XDECREF(zero); + + if (ok) + Py_RETURN_NONE; + else + return NULL; +} + +static char manifest_object_set_this_update__doc__[] = + "Set this manifest's \"thisUpdate\" value.\n" + "\n" + "The \"time\" parameter should be a datetime object.\n" + ; + +static PyObject * +manifest_object_set_this_update (manifest_object *self, PyObject *args) +{ + ASN1_TIME *t = NULL; + PyObject *o = NULL; + + ENTERING(manifest_object_set_this_update); + + if (!PyArg_ParseTuple(args, "O", &o)) + goto error; + + if (self->manifest == NULL) + lose_not_verified("Can't set thisUpdate value of unverified manifest"); + + if ((t = Python_to_ASN1_TIME(o, 0)) == NULL) + lose("Couldn't convert thisUpdate string"); + + ASN1_TIME_free(self->manifest->thisUpdate); + self->manifest->thisUpdate = t; + Py_RETURN_NONE; + + error: + ASN1_TIME_free(t); + return NULL; +} + +static char manifest_object_get_this_update__doc__[] = + "Return this manifest's \"thisUpdate\" value as a datetime.\n" + ; + +static PyObject * +manifest_object_get_this_update (manifest_object *self) +{ + ENTERING(manifest_object_get_this_update); + + if (self->manifest == NULL) + lose_not_verified("Can't get thisUpdate value of unverified manifest"); + + return ASN1_TIME_to_Python(self->manifest->thisUpdate); + + error: + return NULL; +} + +static char manifest_object_set_next_update__doc__[] = + "Set this manifest's \"nextUpdate\" value.\n" + "\n" + "The \"time\" parameter should be a datetime object.\n" + ; + +static PyObject * +manifest_object_set_next_update (manifest_object *self, PyObject *args) +{ + ASN1_TIME *t = NULL; + PyObject *o = NULL; + + ENTERING(manifest_object_set_next_update); + + if (!PyArg_ParseTuple(args, "O", &o)) + goto error; + + if (self->manifest == NULL) + lose_not_verified("Can't set nextUpdate value of unverified manifest"); + + if ((t = Python_to_ASN1_TIME(o, 0)) == NULL) + lose("Couldn't parse nextUpdate string"); + + ASN1_TIME_free(self->manifest->nextUpdate); + self->manifest->nextUpdate = t; + Py_RETURN_NONE; + + error: + ASN1_TIME_free(t); + return NULL; +} + +static char manifest_object_get_next_update__doc__[] = + "Return this manifest's \"nextUpdate\" value as a datetime.\n" + ; + +static PyObject * +manifest_object_get_next_update (manifest_object *self) +{ + ENTERING(manifest_object_get_next_update); + + if (self->manifest == NULL) + lose_not_verified("Can't extract nextUpdate value of unverified manifest"); + + return ASN1_TIME_to_Python(self->manifest->nextUpdate); + + error: + return NULL; +} + +static char manifest_object_get_algorithm__doc__[] = + "Return this manifest's fileHashAlg OID.\n" + ; + +static PyObject * +manifest_object_get_algorithm(manifest_object *self) +{ + PyObject *result = NULL; + + ENTERING(manifest_object_get_algorithm); + + if (self->manifest == NULL) + lose_not_verified("Can't extract algorithm OID of unverified manifest"); + + result = ASN1_OBJECT_to_PyString(self->manifest->fileHashAlg); + + error: + return result; +} + +static char manifest_object_set_algorithm__doc__[] = + "Set this manifest's fileHashAlg OID.\n" + ; + +static PyObject * +manifest_object_set_algorithm(manifest_object *self, PyObject *args) +{ + ASN1_OBJECT *oid = NULL; + const char *s = NULL; + + ENTERING(manifest_object_set_algorithm); + + if (!PyArg_ParseTuple(args, "s", &s)) + goto error; + + if (self->manifest == NULL) + lose_not_verified("Can't set algorithm OID for unverified manifest"); + + if ((oid = OBJ_txt2obj(s, 1)) == NULL) + lose_no_memory(); + + ASN1_OBJECT_free(self->manifest->fileHashAlg); + self->manifest->fileHashAlg = oid; + Py_RETURN_NONE; + + error: + ASN1_OBJECT_free(oid); + return NULL; +} + +static char manifest_object_add_files__doc__[] = + "Add a collection of pairs to this manifest.\n" + "\n" + "The \"iterable\" parameter should be an iterable object supplying\n" + "returning two-element sequences; the first element of each sequence\n" + "should be the filename (a text string), the second element should be the\n" + "hash (a binary string).\n" + ; + +static PyObject * +manifest_object_add_files(manifest_object *self, PyObject *args) +{ + PyObject *iterable = NULL; + PyObject *iterator = NULL; + PyObject *item = NULL; + PyObject *fast = NULL; + FileAndHash *fah = NULL; + char *file = NULL; + char *hash = NULL; + Py_ssize_t filelen, hashlen; + int ok = 0; + + ENTERING(manifest_object_add_files); + + if (self->manifest == NULL) + lose_not_verified("Can't add files to unverified manifest"); + + if (!PyArg_ParseTuple(args, "O", &iterable) || + (iterator = PyObject_GetIter(iterable)) == NULL) + goto error; + + while ((item = PyIter_Next(iterator)) != NULL) { + + if ((fast = PySequence_Fast(item, "FileAndHash entry must be a sequence")) == NULL) + goto error; + + if (PySequence_Fast_GET_SIZE(fast) != 2) + lose_type_error("FileAndHash entry must be two-element sequence"); + + if (PyString_AsStringAndSize(PySequence_Fast_GET_ITEM(fast, 0), &file, &filelen) < 0 || + PyString_AsStringAndSize(PySequence_Fast_GET_ITEM(fast, 1), &hash, &hashlen) < 0) + goto error; + + if ((fah = FileAndHash_new()) == NULL || + !ASN1_OCTET_STRING_set(fah->file, (unsigned char *) file, filelen) || + !ASN1_BIT_STRING_set(fah->hash, (unsigned char *) hash, hashlen) || + !sk_FileAndHash_push(self->manifest->fileList, fah)) + lose_no_memory(); + + fah->hash->flags &= ~7; + fah->hash->flags |= ASN1_STRING_FLAG_BITS_LEFT; + + fah = NULL; + Py_XDECREF(item); + Py_XDECREF(fast); + item = fast = NULL; + } + + ok = 1; + + error: + Py_XDECREF(iterator); + Py_XDECREF(item); + Py_XDECREF(fast); + FileAndHash_free(fah); + + if (ok) + Py_RETURN_NONE; + else + return NULL; +} + +static char manifest_object_get_files__doc__[] = + "Return a tuple of pairs representing the contents of\n" + "this manifest.\n" + ; + +static PyObject * +manifest_object_get_files(manifest_object *self) +{ + PyObject *result = NULL; + PyObject *item = NULL; + int i; + + ENTERING(manifest_object_get_files); + + if (self->manifest == NULL) + lose_not_verified("Can't get files from unverified manifest"); + + if (self->manifest->fileList == NULL) + lose("Inexplicable NULL manifest fileList pointer"); + + if ((result = PyTuple_New(sk_FileAndHash_num(self->manifest->fileList))) == NULL) + goto error; + + for (i = 0; i < sk_FileAndHash_num(self->manifest->fileList); i++) { + FileAndHash *fah = sk_FileAndHash_value(self->manifest->fileList, i); + + item = Py_BuildValue("(s#s#)", + ASN1_STRING_data(fah->file), + (Py_ssize_t) ASN1_STRING_length(fah->file), + ASN1_STRING_data(fah->hash), + (Py_ssize_t) ASN1_STRING_length(fah->hash)); + if (item == NULL) + goto error; + + PyTuple_SET_ITEM(result, i, item); + item = NULL; + } + + return result; + + error: + Py_XDECREF(result); + Py_XDECREF(item); + return NULL; +} + +static char manifest_object_sign__doc__[] = + "Sign this manifest. See the CMS class's .sign() method for details.\n" + ; + +static PyObject * +manifest_object_sign(manifest_object *self, PyObject *args) +{ + asymmetric_object *signkey = NULL; + x509_object *signcert = NULL; + PyObject *x509_iterable = Py_None; + PyObject *crl_iterable = Py_None; + char *oid = NULL; + unsigned flags = 0; + BIO *bio = NULL; + int ok = 0; + + ENTERING(manifest_object_sign); + + if (!PyArg_ParseTuple(args, "O!O!|OOsI", + &POW_X509_Type, &signcert, + &POW_Asymmetric_Type, &signkey, + &x509_iterable, + &crl_iterable, + &oid, + &flags)) + goto error; + + if ((bio = BIO_new(BIO_s_mem())) == NULL) + lose_no_memory(); + + assert_no_unhandled_openssl_errors(); + + if (!ASN1_item_i2d_bio(ASN1_ITEM_rptr(Manifest), bio, self->manifest)) + lose_openssl_error("Couldn't encode manifest"); + + assert_no_unhandled_openssl_errors(); + + if (!cms_object_sign_helper(&self->cms, bio, signcert, signkey, + x509_iterable, crl_iterable, oid, flags)) + lose_openssl_error("Couldn't sign manifest"); + + assert_no_unhandled_openssl_errors(); + + ok = 1; + + error: + BIO_free(bio); + + if (ok) + Py_RETURN_NONE; + else + return NULL; +} + +static struct PyMethodDef manifest_object_methods[] = { + Define_Method(getVersion, manifest_object_get_version, METH_NOARGS), + Define_Method(setVersion, manifest_object_set_version, METH_VARARGS), + Define_Method(getManifestNumber, manifest_object_get_manifest_number, METH_NOARGS), + Define_Method(setManifestNumber, manifest_object_set_manifest_number, METH_VARARGS), + Define_Method(getThisUpdate, manifest_object_get_this_update, METH_NOARGS), + Define_Method(setThisUpdate, manifest_object_set_this_update, METH_VARARGS), + Define_Method(getNextUpdate, manifest_object_get_next_update, METH_NOARGS), + Define_Method(setNextUpdate, manifest_object_set_next_update, METH_VARARGS), + Define_Method(getAlgorithm, manifest_object_get_algorithm, METH_NOARGS), + Define_Method(setAlgorithm, manifest_object_set_algorithm, METH_VARARGS), + Define_Method(getFiles, manifest_object_get_files, METH_NOARGS), + Define_Method(addFiles, manifest_object_add_files, METH_VARARGS), + Define_Method(sign, manifest_object_sign, METH_VARARGS), + Define_Method(verify, manifest_object_verify, METH_KEYWORDS), + Define_Class_Method(pemRead, manifest_object_pem_read, METH_VARARGS), + Define_Class_Method(pemReadFile, manifest_object_pem_read_file, METH_VARARGS), + Define_Class_Method(derRead, manifest_object_der_read, METH_VARARGS), + Define_Class_Method(derReadFile, manifest_object_der_read_file, METH_VARARGS), + {NULL} +}; + +static char POW_Manifest_Type__doc__[] = + "This class provides access to RPKI manifest payload.\n" + "Most methods are inherited from or share code with the CMS class.\n" + ; + +static PyTypeObject POW_Manifest_Type = { + PyObject_HEAD_INIT(0) + 0, /* ob_size */ + "rpki.POW.Manifest", /* tp_name */ + sizeof(manifest_object), /* tp_basicsize */ + 0, /* tp_itemsize */ + (destructor)manifest_object_dealloc, /* tp_dealloc */ + 0, /* tp_print */ + 0, /* tp_getattr */ + 0, /* tp_setattr */ + 0, /* tp_compare */ + 0, /* tp_repr */ + 0, /* tp_as_number */ + 0, /* tp_as_sequence */ + 0, /* tp_as_mapping */ + 0, /* tp_hash */ + 0, /* tp_call */ + 0, /* tp_str */ + 0, /* tp_getattro */ + 0, /* tp_setattro */ + 0, /* tp_as_buffer */ + Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /* tp_flags */ + POW_Manifest_Type__doc__, /* tp_doc */ + 0, /* tp_traverse */ + 0, /* tp_clear */ + 0, /* tp_richcompare */ + 0, /* tp_weaklistoffset */ + 0, /* tp_iter */ + 0, /* tp_iternext */ + manifest_object_methods, /* tp_methods */ + 0, /* tp_members */ + 0, /* tp_getset */ + &POW_CMS_Type, /* tp_base */ + 0, /* tp_dict */ + 0, /* tp_descr_get */ + 0, /* tp_descr_set */ + 0, /* tp_dictoffset */ + 0, /* tp_init */ + 0, /* tp_alloc */ + manifest_object_new, /* tp_new */ +}; + + + +/* + * ROA object. + */ + +static PyObject * +roa_object_new(PyTypeObject *type, PyObject *args, PyObject *kwds) +{ + roa_object *self = NULL; + + ENTERING(roa_object_new); + + if ((self = (roa_object *) cms_object_new(type, args, kwds)) != NULL && + (self->roa = ROA_new()) != NULL) + return (PyObject *) self; + + Py_XDECREF(self); + return NULL; +} + +static void +roa_object_dealloc(roa_object *self) +{ + ENTERING(roa_object_dealloc); + ROA_free(self->roa); + cms_object_dealloc(&self->cms); +} + +static char roa_object_verify__doc__[] = + "Verify this ROA. See CMS.verify() for details.\n" + ; + +static PyObject * +roa_object_verify(roa_object *self, PyObject *args, PyObject *kwds) +{ + BIO *bio = NULL; + int ok = 0; + + ENTERING(roa_object_verify); + + if ((bio = cms_object_verify_helper(&self->cms, args, kwds)) == NULL) + goto error; + + if (!ASN1_item_d2i_bio(ASN1_ITEM_rptr(ROA), bio, &self->roa)) + lose_openssl_error("Couldn't decode ROA"); + + ok = 1; + + error: + BIO_free(bio); + + if (ok) + Py_RETURN_NONE; + else + return NULL; +} + +static PyObject * +roa_object_pem_read_helper(PyTypeObject *type, BIO *bio) +{ + roa_object *self; + + ENTERING(roa_object_pem_read_helper); + + if ((self = (roa_object *) cms_object_pem_read_helper(type, bio)) != NULL) + self->roa = NULL; + + return (PyObject *) self; +} + +static PyObject * +roa_object_der_read_helper(PyTypeObject *type, BIO *bio) +{ + roa_object *self; + + ENTERING(roa_object_der_read_helper); + + if ((self = (roa_object *) cms_object_der_read_helper(type, bio)) != NULL) + self->roa = NULL; + + return (PyObject *) self; +} + +static char roa_object_pem_read__doc__[] = + "Read a PEM-encoded ROA object from a string.\n" + ; + +static PyObject * +roa_object_pem_read(PyTypeObject *type, PyObject *args) +{ + ENTERING(roa_object_pem_read); + return read_from_string_helper(roa_object_pem_read_helper, type, args); +} + +static char roa_object_pem_read_file__doc__[] = + "Read a PEM-encoded ROA object from a file.\n" + ; + +static PyObject * +roa_object_pem_read_file(PyTypeObject *type, PyObject *args) +{ + ENTERING(roa_object_pem_read_file); + return read_from_file_helper(roa_object_pem_read_helper, type, args); +} + +static char roa_object_der_read__doc__[] = + "Read a DER-encoded ROA object from a string.\n" + ; + +static PyObject * +roa_object_der_read(PyTypeObject *type, PyObject *args) +{ + ENTERING(roa_object_der_read); + return read_from_string_helper(roa_object_der_read_helper, type, args); +} + +static char roa_object_der_read_file__doc__[] = + "Read a DER-encoded ROA object from a file.\n" + ; + +static PyObject * +roa_object_der_read_file(PyTypeObject *type, PyObject *args) +{ + ENTERING(roa_object_der_read_file); + return read_from_file_helper(roa_object_der_read_helper, type, args); +} + +static char roa_object_get_version__doc__[] = + "Return the version number of this ROA.\n" + ; + +static PyObject * +roa_object_get_version(roa_object *self) +{ + ENTERING(roa_object_get_version); + + if (self->roa == NULL) + lose_not_verified("Can't get version of unverified ROA"); + + if (self->roa->version) + return Py_BuildValue("N", ASN1_INTEGER_to_PyLong(self->roa->version)); + else + return PyInt_FromLong(0); + + error: + return NULL; +} + +static char roa_object_set_version__doc__[] = + "Set the version number of this ROA.\n" + "\n" + "The \"version\" parameter should be a non-negative integer.\n" + "\n" + "As of this writing, zero is both the default and the only defined version.\n" + "Attempting to set any version number other than zero will fail, as we\n" + "don't understand how to write other versions, by definition.\n" + ; + +static PyObject * +roa_object_set_version(roa_object *self, PyObject *args) +{ + int version = 0; + + ENTERING(roa_object_set_version); + + if (self->roa == NULL) + lose_not_verified("Can't set version of unverified ROA"); + + if (!PyArg_ParseTuple(args, "|i", &version)) + goto error; + + if (version != 0) + lose("RFC 6482 only defines ROA version zero"); + + ASN1_INTEGER_free(self->roa->version); + self->roa->version = NULL; + + Py_RETURN_NONE; + + error: + return NULL; +} + +static char roa_object_get_asid__doc__[] = + "Return the Autonomous System ID of this ROA.\n" + ; + +static PyObject * +roa_object_get_asid(roa_object *self) +{ + ENTERING(roa_object_get_asid); + + if (self->roa == NULL) + lose_not_verified("Can't get ASN of unverified ROA"); + + return Py_BuildValue("N", ASN1_INTEGER_to_PyLong(self->roa->asID)); + + error: + return NULL; +} + +static char roa_object_set_asid__doc__[] = + "Sets the Autonomous System ID of this ROA.\n" + "\n" + "The \"asID\" parameter should be a non-negative integer.\n" + ; + +static PyObject * +roa_object_set_asid(roa_object *self, PyObject *args) +{ + PyObject *asID = NULL; + PyObject *zero = NULL; + int ok = 0; + + ENTERING(roa_object_set_asid); + + if (self->roa == NULL) + lose_not_verified("Can't set ASN of unverified ROA"); + + if (!PyArg_ParseTuple(args, "O", &asID)) + goto error; + + if ((zero = PyInt_FromLong(0)) == NULL) + goto error; + + switch (PyObject_RichCompareBool(asID, zero, Py_GE)) { + case -1: + goto error; + case 0: + lose("Negative asID is not allowed"); + } + + ASN1_INTEGER_free(self->roa->asID); + + if ((self->roa->asID = PyLong_to_ASN1_INTEGER(asID)) == NULL) + goto error; + + ok = 1; + + error: + Py_XDECREF(zero); + + if (ok) + Py_RETURN_NONE; + else + return NULL; +} + +static char roa_object_get_prefixes__doc__[] = + "Return this ROA's prefix list. This is a two-element\n" + "tuple: the first element is the IPv4 prefix set, the second is the\n" + "IPv6 prefix set.\n" + "\n" + "Each prefix set is either None, if there are no prefixes for this IP\n" + "version, or a sequence of three-element tuple representing ROA prefix\n" + "entries.\n" + "\n" + "Each ROA prefix entry consists of the prefix itself (an IPAddress),\n" + "the prefix length (an integer), and the maxPrefixLen value, which is\n" + "either an integer or None depending on whether the maxPrefixLen value\n" + "is set for this prefix.\n" + ; + +static PyObject * +roa_object_get_prefixes(roa_object *self) +{ + PyObject *result = NULL; + PyObject *ipv4_result = NULL; + PyObject *ipv6_result = NULL; + PyObject *item = NULL; + ipaddress_object *addr = NULL; + int i, j; + + ENTERING(roa_object_get_prefixes); + + if (self->roa == NULL) + lose_not_verified("Can't get prefixes from unverified ROA"); + + for (i = 0; i < sk_ROAIPAddressFamily_num(self->roa->ipAddrBlocks); i++) { + ROAIPAddressFamily *fam = sk_ROAIPAddressFamily_value(self->roa->ipAddrBlocks, i); + const unsigned afi = (fam->addressFamily->data[0] << 8) | (fam->addressFamily->data[1]); + const ipaddress_version *ip_type = NULL; + PyObject **resultp = NULL; + + switch (afi) { + case IANA_AFI_IPV4: resultp = &ipv4_result; ip_type = &ipaddress_version_4; break; + case IANA_AFI_IPV6: resultp = &ipv6_result; ip_type = &ipaddress_version_6; break; + default: lose_type_error("Unknown AFI"); + } + + if (fam->addressFamily->length > 2) + lose_type_error("Unsupported SAFI"); + + if (*resultp != NULL) + lose_type_error("Duplicate ROAIPAddressFamily"); + + if ((*resultp = PyTuple_New(sk_ROAIPAddress_num(fam->addresses))) == NULL) + goto error; + + for (j = 0; j < sk_ROAIPAddress_num(fam->addresses); j++) { + ROAIPAddress *a = sk_ROAIPAddress_value(fam->addresses, j); + unsigned prefixlen = ((a->IPAddress)->length * 8 - ((a->IPAddress)->flags & 7)); + + if ((addr = (ipaddress_object *) POW_IPAddress_Type.tp_alloc(&POW_IPAddress_Type, 0)) == NULL) + goto error; + + addr->type = ip_type; + + memset(addr->address, 0, sizeof(addr->address)); + + if ((unsigned) a->IPAddress->length > addr->type->length) + lose("ROAIPAddress BIT STRING too long for AFI"); + + if (a->IPAddress->length > 0) { + memcpy(addr->address, a->IPAddress->data, a->IPAddress->length); + + if ((a->IPAddress->flags & 7) != 0) { + unsigned char mask = 0xFF >> (8 - (a->IPAddress->flags & 7)); + addr->address[a->IPAddress->length - 1] &= ~mask; + } + } + + if (a->maxLength == NULL) + item = Py_BuildValue("(NIO)", addr, prefixlen, Py_None); + else + item = Py_BuildValue("(NIl)", addr, prefixlen, ASN1_INTEGER_get(a->maxLength)); + + if (item == NULL) + goto error; + + PyTuple_SET_ITEM(*resultp, j, item); + item = NULL; + addr = NULL; + } + } + + result = Py_BuildValue("(OO)", + (ipv4_result == NULL ? Py_None : ipv4_result), + (ipv6_result == NULL ? Py_None : ipv6_result)); + + error: /* Fall through */ + Py_XDECREF(addr); + Py_XDECREF(item); + Py_XDECREF(ipv4_result); + Py_XDECREF(ipv6_result); + + return result; +} + +static char roa_object_set_prefixes__doc__[] = + "Set this ROA's prefix list.\n" + "\n" + "This method takes two arguments, \"ipv4\" and \"ipv6\". Each of these\n" + "is either None, if no prefixes should be set for this IP version, or\n" + "an iterable object returning ROA prefix entries in the same format as\n" + "returned by the .getPrefixes() method. The maxPrefixLen value may be\n" + "omitted (that is, the ROA prefix entry tuple may be of length two\n" + "rather than of length three); this will be taken as equivalent to\n" + "specifying a maxPrefixLen value of None.\n" + ; + +static PyObject * +roa_object_set_prefixes(roa_object *self, PyObject *args, PyObject *kwds) +{ + static char *kwlist[] = {"ipv4", "ipv6", NULL}; + STACK_OF(ROAIPAddressFamily) *prefixes = NULL; + ROAIPAddressFamily *fam = NULL; + ROAIPAddress *a = NULL; + PyObject *ipv4_arg = Py_None; + PyObject *ipv6_arg = Py_None; + PyObject *iterator = NULL; + PyObject *item = NULL; + PyObject *fast = NULL; + int ok = 0; + int v; + + ENTERING(roa_object_set_prefixes); + + if (self->roa == NULL) + lose_not_verified("Can't set prefixes of unverified ROA"); + + if (!PyArg_ParseTupleAndKeywords(args, kwds, "|OO", kwlist, &ipv4_arg, &ipv6_arg)) + goto error; + + if ((prefixes = sk_ROAIPAddressFamily_new_null()) == NULL) + lose_no_memory(); + + for (v = 0; v < (int) (sizeof(ipaddress_versions)/sizeof(*ipaddress_versions)); v++) { + const struct ipaddress_version *ip_type = ipaddress_versions[v]; + unsigned char afibuf[2]; + PyObject **argp; + + switch (ip_type->version) { + case 4: argp = &ipv4_arg; break; + case 6: argp = &ipv6_arg; break; + default: continue; + } + + if (*argp == Py_None) + continue; + + afibuf[0] = (ip_type->afi >> 8) & 0xFF; + afibuf[1] = (ip_type->afi ) & 0xFF; + + if ((iterator = PyObject_GetIter(*argp)) == NULL) + goto error; + + while ((item = PyIter_Next(iterator)) != NULL) { + unsigned prefixlen, maxprefixlen, bitlen, bytelen; + ipaddress_object *addr = NULL; + PyObject *maxlenobj = Py_None; + + if ((fast = PySequence_Fast(item, "ROA prefix must be a sequence")) == NULL) + goto error; + + switch (PySequence_Fast_GET_SIZE(fast)) { + case 3: + maxlenobj = PySequence_Fast_GET_ITEM(fast, 2); + /* Fall through */ + case 2: + if (!POW_IPAddress_Check(PySequence_Fast_GET_ITEM(fast, 0))) + lose_type_error("First element of ROA prefix must be an IPAddress object"); + addr = (ipaddress_object *) PySequence_Fast_GET_ITEM(fast, 0); + prefixlen = (unsigned) PyInt_AsLong(PySequence_Fast_GET_ITEM(fast, 1)); + if (PyErr_Occurred()) + goto error; + break; + default: + lose_type_error("ROA prefix must be a two- or three-element sequence"); + } + + if (maxlenobj == Py_None) { + maxprefixlen = prefixlen; + } else { + maxprefixlen = (unsigned) PyInt_AsLong(maxlenobj); + if (PyErr_Occurred()) + goto error; + } + + if (addr->type != ip_type) + lose_type_error("Bad ROA prefix"); + + if (prefixlen > addr->type->length * 8) + lose("Bad prefix length"); + + if (maxprefixlen > addr->type->length * 8 || maxprefixlen < prefixlen) + lose("Bad maxLength value"); + + bytelen = (prefixlen + 7) / 8; + bitlen = prefixlen % 8; + + if ((a = ROAIPAddress_new()) == NULL || + (a->IPAddress == NULL && (a->IPAddress = ASN1_BIT_STRING_new()) == NULL) || + !ASN1_BIT_STRING_set(a->IPAddress, addr->address, bytelen)) + lose_no_memory(); + + a->IPAddress->flags &= ~7; + a->IPAddress->flags |= ASN1_STRING_FLAG_BITS_LEFT; + if (bitlen > 0) { + a->IPAddress->data[bytelen - 1] &= ~(0xFF >> bitlen); + a->IPAddress->flags |= 8 - bitlen; + } + + if (prefixlen != maxprefixlen && + ((a->maxLength = ASN1_INTEGER_new()) == NULL || + !ASN1_INTEGER_set(a->maxLength, maxprefixlen))) + lose_no_memory(); + + if (fam == NULL && + ((fam = ROAIPAddressFamily_new()) == NULL || + !sk_ROAIPAddressFamily_push(prefixes, fam) || + !ASN1_OCTET_STRING_set(fam->addressFamily, afibuf, sizeof(afibuf)))) + lose_no_memory(); + + if (!sk_ROAIPAddress_push(fam->addresses, a)) + lose_no_memory(); + + a = NULL; + Py_XDECREF(item); + Py_XDECREF(fast); + item = fast = NULL; + } + + fam = NULL; + Py_XDECREF(iterator); + iterator = NULL; + } + + sk_ROAIPAddressFamily_pop_free(self->roa->ipAddrBlocks, ROAIPAddressFamily_free); + self->roa->ipAddrBlocks = prefixes; + prefixes = NULL; + + ok = 1; + + error: + sk_ROAIPAddressFamily_pop_free(prefixes, ROAIPAddressFamily_free); + ROAIPAddressFamily_free(fam); + ROAIPAddress_free(a); + Py_XDECREF(iterator); + Py_XDECREF(item); + Py_XDECREF(fast); + + if (ok) + Py_RETURN_NONE; + else + return NULL; +} + +static char roa_object_sign__doc__[] = + "Sign this ROA. See CMS.sign() for details.\n" + ; + +static PyObject * +roa_object_sign(roa_object *self, PyObject *args) +{ + asymmetric_object *signkey = NULL; + x509_object *signcert = NULL; + PyObject *x509_iterable = Py_None; + PyObject *crl_iterable = Py_None; + char *oid = NULL; + unsigned flags = 0; + BIO *bio = NULL; + int ok = 0; + + ENTERING(roa_object_sign); + + if (!PyArg_ParseTuple(args, "O!O!|OOsI", + &POW_X509_Type, &signcert, + &POW_Asymmetric_Type, &signkey, + &x509_iterable, + &crl_iterable, + &oid, + &flags)) + goto error; + + if ((bio = BIO_new(BIO_s_mem())) == NULL) + lose_no_memory(); + + assert_no_unhandled_openssl_errors(); + + if (!ASN1_item_i2d_bio(ASN1_ITEM_rptr(ROA), bio, self->roa)) + lose_openssl_error("Couldn't encode ROA"); + + assert_no_unhandled_openssl_errors(); + + if (!cms_object_sign_helper(&self->cms, bio, signcert, signkey, + x509_iterable, crl_iterable, oid, flags)) + lose_openssl_error("Couldn't sign ROA"); + + assert_no_unhandled_openssl_errors(); + + ok = 1; + + error: + BIO_free(bio); + + if (ok) + Py_RETURN_NONE; + else + return NULL; +} + +static struct PyMethodDef roa_object_methods[] = { + Define_Method(getVersion, roa_object_get_version, METH_NOARGS), + Define_Method(setVersion, roa_object_set_version, METH_VARARGS), + Define_Method(getASID, roa_object_get_asid, METH_NOARGS), + Define_Method(setASID, roa_object_set_asid, METH_VARARGS), + Define_Method(getPrefixes, roa_object_get_prefixes, METH_NOARGS), + Define_Method(setPrefixes, roa_object_set_prefixes, METH_KEYWORDS), + Define_Method(sign, roa_object_sign, METH_VARARGS), + Define_Method(verify, roa_object_verify, METH_KEYWORDS), + Define_Class_Method(pemRead, roa_object_pem_read, METH_VARARGS), + Define_Class_Method(pemReadFile, roa_object_pem_read_file, METH_VARARGS), + Define_Class_Method(derRead, roa_object_der_read, METH_VARARGS), + Define_Class_Method(derReadFile, roa_object_der_read_file, METH_VARARGS), + {NULL} +}; + +static char POW_ROA_Type__doc__[] = + "This class provides access to RPKI ROA payload.\n" + "Most methods are inherited from or share code with the CMS class.\n" + ; + +static PyTypeObject POW_ROA_Type = { + PyObject_HEAD_INIT(0) + 0, /* ob_size */ + "rpki.POW.ROA", /* tp_name */ + sizeof(roa_object), /* tp_basicsize */ + 0, /* tp_itemsize */ + (destructor)roa_object_dealloc, /* tp_dealloc */ + 0, /* tp_print */ + 0, /* tp_getattr */ + 0, /* tp_setattr */ + 0, /* tp_compare */ + 0, /* tp_repr */ + 0, /* tp_as_number */ + 0, /* tp_as_sequence */ + 0, /* tp_as_mapping */ + 0, /* tp_hash */ + 0, /* tp_call */ + 0, /* tp_str */ + 0, /* tp_getattro */ + 0, /* tp_setattro */ + 0, /* tp_as_buffer */ + Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /* tp_flags */ + POW_ROA_Type__doc__, /* tp_doc */ + 0, /* tp_traverse */ + 0, /* tp_clear */ + 0, /* tp_richcompare */ + 0, /* tp_weaklistoffset */ + 0, /* tp_iter */ + 0, /* tp_iternext */ + roa_object_methods, /* tp_methods */ + 0, /* tp_members */ + 0, /* tp_getset */ + &POW_CMS_Type, /* tp_base */ + 0, /* tp_dict */ + 0, /* tp_descr_get */ + 0, /* tp_descr_set */ + 0, /* tp_dictoffset */ + 0, /* tp_init */ + 0, /* tp_alloc */ + roa_object_new, /* tp_new */ +}; + + + +/* + * PKCS10 object. + */ + +static PyObject * +pkcs10_object_new(PyTypeObject *type, GCC_UNUSED PyObject *args, GCC_UNUSED PyObject *kwds) +{ + pkcs10_object *self; + + ENTERING(pkcs10_object_new); + + if ((self = (pkcs10_object *) type->tp_alloc(type, 0)) != NULL && + (self->pkcs10 = X509_REQ_new()) != NULL && + (self->exts = sk_X509_EXTENSION_new_null()) != NULL) + return (PyObject *) self; + + Py_XDECREF(self); + return NULL; +} + +static void +pkcs10_object_dealloc(pkcs10_object *self) +{ + ENTERING(pkcs10_object_dealloc); + X509_REQ_free(self->pkcs10); + sk_X509_EXTENSION_pop_free(self->exts, X509_EXTENSION_free); + self->ob_type->tp_free((PyObject*) self); +} + +static PyObject * +pkcs10_object_pem_read_helper(PyTypeObject *type, BIO *bio) +{ + pkcs10_object *self = NULL; + + ENTERING(pkcs10_object_pem_read_helper); + + assert_no_unhandled_openssl_errors(); + + if ((self = (pkcs10_object *) pkcs10_object_new(type, NULL, NULL)) == NULL) + goto error; + + assert_no_unhandled_openssl_errors(); + + if (!PEM_read_bio_X509_REQ(bio, &self->pkcs10, NULL, NULL)) + lose_openssl_error("Couldn't load PEM encoded PKCS#10 request"); + + sk_X509_EXTENSION_pop_free(self->exts, X509_EXTENSION_free); + self->exts = X509_REQ_get_extensions(self->pkcs10); + + assert_no_unhandled_openssl_errors(); + + return (PyObject *) self; + + error: + + Py_XDECREF(self); + return NULL; +} + +static PyObject * +pkcs10_object_der_read_helper(PyTypeObject *type, BIO *bio) +{ + pkcs10_object *self = NULL; + + ENTERING(pkcs10_object_der_read_helper); + + assert_no_unhandled_openssl_errors(); + + if ((self = (pkcs10_object *) pkcs10_object_new(type, NULL, NULL)) == NULL) + goto error; + + assert_no_unhandled_openssl_errors(); + + if (!d2i_X509_REQ_bio(bio, &self->pkcs10)) + lose_openssl_error("Couldn't load DER encoded PKCS#10 request"); + + sk_X509_EXTENSION_pop_free(self->exts, X509_EXTENSION_free); + self->exts = X509_REQ_get_extensions(self->pkcs10); + + assert_no_unhandled_openssl_errors(); + + return (PyObject *) self; + + error: + Py_XDECREF(self); + return NULL; +} + +static char pkcs10_object_pem_read__doc__[] = + "Read a PEM-encoded PKCS#10 object from a string.\n" + ; + +static PyObject * +pkcs10_object_pem_read(PyTypeObject *type, PyObject *args) +{ + ENTERING(pkcs10_object_pem_read); + return read_from_string_helper(pkcs10_object_pem_read_helper, type, args); +} + +static char pkcs10_object_pem_read_file__doc__[] = + "Read a PEM-encoded PKCS#10 object from a file.\n" + ; + +static PyObject * +pkcs10_object_pem_read_file(PyTypeObject *type, PyObject *args) +{ + ENTERING(pkcs10_object_pem_read_file); + return read_from_file_helper(pkcs10_object_pem_read_helper, type, args); +} + +static char pkcs10_object_der_read__doc__[] = + "Read a DER-encoded PKCS#10 object from a string.\n" + ; + +static PyObject * +pkcs10_object_der_read(PyTypeObject *type, PyObject *args) +{ + ENTERING(pkcs10_object_der_read); + return read_from_string_helper(pkcs10_object_der_read_helper, type, args); +} + +static char pkcs10_object_der_read_file__doc__[] = + "Read a DER-encoded PKCS#10 object from a file.\n" + ; + +static PyObject * +pkcs10_object_der_read_file(PyTypeObject *type, PyObject *args) +{ + ENTERING(pkcs10_object_der_read_file); + return read_from_file_helper(pkcs10_object_der_read_helper, type, args); +} + +static char pkcs10_object_pem_write__doc__[] = + "Returns the PEM encoding of this PKCS#10 object.\n" + ; + +static PyObject * +pkcs10_object_pem_write(pkcs10_object *self) +{ + PyObject *result = NULL; + BIO *bio = NULL; + + ENTERING(pkcs10_object_pem_write); + + if ((bio = BIO_new(BIO_s_mem())) == NULL) + lose_no_memory(); + + if (!PEM_write_bio_X509_REQ(bio, self->pkcs10)) + lose_openssl_error("Unable to write PKCS#10 request"); + + result = BIO_to_PyString_helper(bio); + + error: /* Fall through */ + BIO_free(bio); + return result; +} + +static char pkcs10_object_der_write__doc__[] = + "Return the DER encoding of this PKCS#10 object.\n" + ; + +static PyObject * +pkcs10_object_der_write(pkcs10_object *self) +{ + PyObject *result = NULL; + BIO *bio = NULL; + + ENTERING(pkcs10_object_der_write); + + if ((bio = BIO_new(BIO_s_mem())) == NULL) + lose_no_memory(); + + if (!i2d_X509_REQ_bio(bio, self->pkcs10)) + lose_openssl_error("Unable to write PKCS#10 request"); + + result = BIO_to_PyString_helper(bio); + + error: /* Fall through */ + BIO_free(bio); + return result; +} + +static X509_EXTENSIONS ** +pkcs10_object_extension_helper(pkcs10_object *self) +{ + return &self->exts; +} + +static char pkcs10_object_get_public_key__doc__[] = + "Return the public key from this PKCS#10 request, as an Asymmetric\n" + "object.\n" + ; + +static PyObject * +pkcs10_object_get_public_key(pkcs10_object *self) +{ + PyTypeObject *type = &POW_Asymmetric_Type; + asymmetric_object *asym = NULL; + + ENTERING(pkcs10_object_get_public_key); + + if ((asym = (asymmetric_object *) type->tp_alloc(type, 0)) == NULL) + goto error; + + if ((asym->pkey = X509_REQ_get_pubkey(self->pkcs10)) == NULL) + lose_openssl_error("Couldn't extract public key from PKCS#10 request"); + + return (PyObject *) asym; + + error: + Py_XDECREF(asym); + return NULL; +} + +static char pkcs10_object_set_public_key__doc__[] = + "Set the public key for this PKCS#10 request.\n" + "\n" + "The \"key\" parameter should be an instance of the Asymmetric class,\n" + "containing a public key.\n" + ; + +static PyObject * +pkcs10_object_set_public_key(pkcs10_object *self, PyObject *args) +{ + asymmetric_object *asym; + + ENTERING(pkcs10_object_set_public_key); + + if (!PyArg_ParseTuple(args, "O!", &POW_Asymmetric_Type, &asym)) + goto error; + + if (!X509_REQ_set_pubkey(self->pkcs10, asym->pkey)) + lose_openssl_error("Couldn't set certificate's PKCS#10 request"); + + Py_RETURN_NONE; + + error: + return NULL; +} + +static char pkcs10_object_sign__doc__[] = + "Sign a PKCS#10 request with a private key.\n" + "\n" + "The \"key\" parameter should be an instance of the Asymmetric class,\n" + "containing a private key.\n" + "\n" + "The optional \"digest\" parameter indicates which digest to compute and\n" + "sign, and should be one of the following:\n" + "\n" + "* MD5_DIGEST\n" + "* SHA_DIGEST\n" + "* SHA1_DIGEST\n" + "* SHA256_DIGEST\n" + "* SHA384_DIGEST\n" + "* SHA512_DIGEST\n" + "\n" + "The default digest algorithm is SHA-256.\n" + ; + +static PyObject * +pkcs10_object_sign(pkcs10_object *self, PyObject *args) +{ + asymmetric_object *asym; + int loc, digest_type = SHA256_DIGEST; + const EVP_MD *digest_method = NULL; + + ENTERING(pkcs10_object_sign); + + if (!PyArg_ParseTuple(args, "O!|i", &POW_Asymmetric_Type, &asym, &digest_type)) + goto error; + + if ((digest_method = evp_digest_factory(digest_type)) == NULL) + lose("Unsupported digest algorithm"); + + while ((loc = X509_REQ_get_attr_by_NID(self->pkcs10, NID_ext_req, -1)) >= 0) + X509_ATTRIBUTE_free(X509_REQ_delete_attr(self->pkcs10, loc)); + + if (sk_X509_EXTENSION_num(self->exts) > 0 && + !X509_REQ_add_extensions(self->pkcs10, self->exts)) + lose_openssl_error("Couldn't add extensions block to PKCS#10 request"); + + if (!X509_REQ_sign(self->pkcs10, asym->pkey, digest_method)) + lose_openssl_error("Couldn't sign PKCS#10 request"); + + Py_RETURN_NONE; + + error: + return NULL; +} + +static char pkcs10_object_verify__doc__[] = + "Verify a PKCS#10 request.\n" + "\n" + "This calls OpenSSL's X509_REQ_verify() method to check the request's\n" + "self-signature.\n" + ; + +static PyObject * +pkcs10_object_verify(pkcs10_object *self) +{ + EVP_PKEY *pkey = NULL; + int status; + + ENTERING(pkcs10_object_verify); + + if ((pkey = X509_REQ_get_pubkey(self->pkcs10)) == NULL) + lose_openssl_error("Couldn't extract public key from PKCS#10 for verification"); + + if ((status = X509_REQ_verify(self->pkcs10, pkey)) < 0) + lose_openssl_error("Couldn't verify PKCS#10 signature"); + + EVP_PKEY_free(pkey); + return PyBool_FromLong(status); + + error: + EVP_PKEY_free(pkey); + return NULL; +} + +static char pkcs10_object_get_version__doc__[] = + "Return the version number of this PKCS#10 request.\n" + ; + +static PyObject * +pkcs10_object_get_version(pkcs10_object *self) +{ + ENTERING(pkcs10_object_get_version); + return Py_BuildValue("l", X509_REQ_get_version(self->pkcs10)); +} + +static char pkcs10_object_set_version__doc__[] = + "Set the version number of this PKCS#10 request.\n" + "\n" + "The \"version\" parameter should be an integer, but the only defined\n" + "value is zero, so this field is optional and defaults to zero.\n" +; + +static PyObject * +pkcs10_object_set_version(pkcs10_object *self, PyObject *args) +{ + long version = 0; + + ENTERING(pkcs10_object_set_version); + + if (!PyArg_ParseTuple(args, "|l", &version)) + goto error; + + if (version != 0) + lose("RFC 6487 6.1.1 forbids non-zero values for this field"); + + if (!X509_REQ_set_version(self->pkcs10, version)) + lose("Couldn't set certificate version"); + + Py_RETURN_NONE; + + error: + + return NULL; +} + +static char pkcs10_object_get_subject__doc__[] = + "Return this PKCS #10 request's subject name.\n" + "\n" + "See the X509.getIssuer() method for details of the return value and\n" + "use of the optional \"format\" parameter.\n" + ; + +static PyObject * +pkcs10_object_get_subject(pkcs10_object *self, PyObject *args) +{ + PyObject *result = NULL; + int format = OIDNAME_FORMAT; + + ENTERING(pkcs10_object_get_subject); + + if (!PyArg_ParseTuple(args, "|i", &format)) + goto error; + + result = x509_object_helper_get_name(X509_REQ_get_subject_name(self->pkcs10), + format); + + error: /* Fall through */ + return result; +} + +static char pkcs10_object_set_subject__doc__[] = + "Set this PKCS#10 request's subject name.\n" + "\n" + "The \"name\" parameter should be in the same format as the return\n" + "value from the \"getSubject\" method.\n" + ; + +static PyObject * +pkcs10_object_set_subject(pkcs10_object *self, PyObject *args) +{ + PyObject *name_sequence = NULL; + X509_NAME *name = NULL; + + ENTERING(pkcs10_object_set_subject); + + if (!PyArg_ParseTuple(args, "O", &name_sequence)) + goto error; + + if (!PySequence_Check(name_sequence)) + lose_type_error("Inapropriate type"); + + if ((name = x509_object_helper_set_name(name_sequence)) == NULL) + goto error; + + if (!X509_REQ_set_subject_name(self->pkcs10, name)) + lose("Unable to set subject name"); + + X509_NAME_free(name); + + Py_RETURN_NONE; + + error: + X509_NAME_free(name); + return NULL; +} + +static char pkcs10_object_get_key_usage__doc__[] = + "Return a FrozenSet of strings representing the KeyUsage settings for\n" + "this PKCS#10 request, or None if the request has no KeyUsage\n" + "extension. The bits have the same names as in RFC 5280.\n" + ; + +static PyObject * +pkcs10_object_get_key_usage(pkcs10_object *self) +{ + return extension_get_key_usage(pkcs10_object_extension_helper(self)); +} + +static char pkcs10_object_set_key_usage__doc__[] = + "Set the KeyUsage extension for this PKCS#10 request.\n" + "\n" + "Argument \"iterable\" should be an iterable object which returns zero or more\n" + "strings naming bits to be enabled. The bits have the same names as in RFC 5280.\n" + "\n" + "Optional argument \"critical\" is a boolean indicating whether the extension\n" + "should be marked as critical or not. RFC 5280 4.2.1.3 says this extension SHOULD\n" + "be marked as critical when used, so the default is True.\n" + ; + +static PyObject * +pkcs10_object_set_key_usage(pkcs10_object *self, PyObject *args) +{ + return extension_set_key_usage(pkcs10_object_extension_helper(self), args); +} + +static char pkcs10_object_get_eku__doc__[] = + "Return a FrozenSet of object identifiers representing the\n" + "ExtendedKeyUsage settings for this PKCS #10 requst, or None if\n" + "the request has no ExtendedKeyUsage extension.\n" + ; + +static PyObject * +pkcs10_object_get_eku(pkcs10_object *self) +{ + return extension_get_eku(pkcs10_object_extension_helper(self)); +} + +static char pkcs10_object_set_eku__doc__[] = + "Set the ExtendedKeyUsage extension for this PKCS #10 request.\n" + "\n" + "Argument \"iterable\" should be an iterable object which returns one or more\n" + "object identifiers.\n" + "\n" + "Optional argument \"critical\" is a boolean indicating whether the extension\n" + "should be marked as critical or not. RFC 6487 4.8.5 says this extension\n" + "MUST NOT be marked as non-critical when used, so the default is False.\n" + ; + +static PyObject * +pkcs10_object_set_eku(pkcs10_object *self, PyObject *args) +{ + return extension_set_eku(pkcs10_object_extension_helper(self), args); +} + +static char pkcs10_object_get_basic_constraints__doc__[] = + "Return BasicConstraints value for this PKCS#10 request.\n" + "\n" + "If this request has no BasicConstraints extension, this method returns\n" + "None.\n" + "\n" + "Otherwise, this method returns a two-element tuple. The first element\n" + "of the tuple is a boolean representing the extension's cA value; the\n" + "second element of the tuple is either an integer representing\n" + "thepathLenConstraint value or None if there is no pathLenConstraint.\n" + ; + +static PyObject * +pkcs10_object_get_basic_constraints(pkcs10_object *self) +{ + return extension_get_basic_constraints(pkcs10_object_extension_helper(self)); +} + +static char pkcs10_object_set_basic_constraints__doc__[] = + "Set BasicConstraints value for this PKCS#10 request.\n" + "\n" + "First argument \"ca\" is a boolean indicating whether the request\n" + "is for a CA certificate or not.\n" + "\n" + "Optional second argument \"pathLenConstraint\" is None or a\n" + "non-negative integer specifying the pathLenConstraint value for this\n" + "certificate. Per RFC 5280, this value may only be set to an integer\n" + "value for CA certificates." + "\n" + "Optional third argument \"critical\" specifies whether the extension\n" + "should be marked as critical. RFC 5280 4.2.1.9 requires that CA\n" + "certificates mark this extension as critical, so the default is True.\n" + ; + +static PyObject * +pkcs10_object_set_basic_constraints(pkcs10_object *self, PyObject *args) +{ + return extension_set_basic_constraints(pkcs10_object_extension_helper(self), args); +} + +static char pkcs10_object_get_sia__doc__[] = + "Return the SIA values for this PKCS#10 request.\n" + "\n" + "If this request has no SIA extension, this method returns None.\n" + "\n" + "Otherwise, this returns a tuple containing three sequences:\n" + "caRepository URIs, rpkiManifest URIs, and signedObject URIs.\n" + "Any other accessMethods are ignored, as are any non-URI\n" + "accessLocations.\n" + ; + +static PyObject * +pkcs10_object_get_sia(pkcs10_object *self) +{ + return extension_get_sia(pkcs10_object_extension_helper(self)); +} + +static char pkcs10_object_set_sia__doc__[] = + "Set SIA values for this PKCS#10 request.\n" + "\n" + "Takes three arguments: caRepository, rpkiManifest, and signedObject.\n" + "\n" + "Each of these should be an iterable which returns URIs.\n" + "\n" + "None is acceptable as an alternate way of specifying an empty\n" + "collection of URIs for a particular argument.\n" + ; + +static PyObject * +pkcs10_object_set_sia(pkcs10_object *self, PyObject *args, PyObject *kwds) +{ + return extension_set_sia(pkcs10_object_extension_helper(self), args, kwds); +} + +static char pkcs10_object_get_signature_algorithm__doc__[] = + "Return this PKCS #10 reqeuest's signature algorithm OID.\n" + ; + +static PyObject * +pkcs10_object_get_signature_algorithm(pkcs10_object *self) +{ + ASN1_OBJECT *oid = NULL; + + ENTERING(pkcs10_object_get_signature_algorithm); + + X509_ALGOR_get0(&oid, NULL, NULL, self->pkcs10->sig_alg); + + return ASN1_OBJECT_to_PyString(oid); +} + +static char pkcs10_object_get_extension_oids__doc__[] = + "Return the set of extension OIDs used in this request. This is mostly\n" + "useful for enforcing restrictions on what extensions are allowed to be\n" + "present, eg, to conform with the RPKI profile.\n" + ; + +static PyObject * +pkcs10_object_get_extension_oids(pkcs10_object *self) +{ + PyObject *result = NULL; + PyObject *oid = NULL; + int i; + + ENTERING(pkcs10_object_get_extension_oids); + + if ((result = PyFrozenSet_New(NULL)) == NULL) + goto error; + + for (i = 0; i < sk_X509_EXTENSION_num(self->exts); i++) { + X509_EXTENSION *ext = sk_X509_EXTENSION_value(self->exts, i); + if ((oid = ASN1_OBJECT_to_PyString(ext->object)) == NULL || + PySet_Add(result, oid) < 0) + goto error; + Py_XDECREF(oid); + oid = NULL; + } + + return result; + + error: + Py_XDECREF(result); + Py_XDECREF(oid); + return NULL; +} + +/* + * May want EKU handlers eventually, skip for now. + */ + +static char pkcs10_object_pprint__doc__[] = + "Return a pretty-printed rendition of this PKCS#10 request.\n" + ; + +static PyObject * +pkcs10_object_pprint(pkcs10_object *self) +{ + PyObject *result = NULL; + BIO *bio = NULL; + + ENTERING(pkcs10_object_pprint); + + if ((bio = BIO_new(BIO_s_mem())) == NULL) + lose_no_memory(); + + if (!X509_REQ_print(bio, self->pkcs10)) + lose_openssl_error("Unable to pretty-print PKCS#10 request"); + + result = BIO_to_PyString_helper(bio); + + error: /* Fall through */ + BIO_free(bio); + return result; +} + +static struct PyMethodDef pkcs10_object_methods[] = { + Define_Method(pemWrite, pkcs10_object_pem_write, METH_NOARGS), + Define_Method(derWrite, pkcs10_object_der_write, METH_NOARGS), + Define_Method(sign, pkcs10_object_sign, METH_VARARGS), + Define_Method(verify, pkcs10_object_verify, METH_NOARGS), + Define_Method(getPublicKey, pkcs10_object_get_public_key, METH_NOARGS), + Define_Method(setPublicKey, pkcs10_object_set_public_key, METH_VARARGS), + Define_Method(getVersion, pkcs10_object_get_version, METH_NOARGS), + Define_Method(setVersion, pkcs10_object_set_version, METH_VARARGS), + Define_Method(getSubject, pkcs10_object_get_subject, METH_VARARGS), + Define_Method(setSubject, pkcs10_object_set_subject, METH_VARARGS), + Define_Method(pprint, pkcs10_object_pprint, METH_NOARGS), + Define_Method(getKeyUsage, pkcs10_object_get_key_usage, METH_NOARGS), + Define_Method(setKeyUsage, pkcs10_object_set_key_usage, METH_VARARGS), + Define_Method(getEKU, pkcs10_object_get_eku, METH_NOARGS), + Define_Method(setEKU, pkcs10_object_set_eku, METH_VARARGS), + Define_Method(getBasicConstraints, pkcs10_object_get_basic_constraints, METH_NOARGS), + Define_Method(setBasicConstraints, pkcs10_object_set_basic_constraints, METH_VARARGS), + Define_Method(getSIA, pkcs10_object_get_sia, METH_NOARGS), + Define_Method(setSIA, pkcs10_object_set_sia, METH_KEYWORDS), + Define_Method(getSignatureAlgorithm, pkcs10_object_get_signature_algorithm, METH_NOARGS), + Define_Method(getExtensionOIDs, pkcs10_object_get_extension_oids, METH_NOARGS), + Define_Class_Method(pemRead, pkcs10_object_pem_read, METH_VARARGS), + Define_Class_Method(pemReadFile, pkcs10_object_pem_read_file, METH_VARARGS), + Define_Class_Method(derRead, pkcs10_object_der_read, METH_VARARGS), + Define_Class_Method(derReadFile, pkcs10_object_der_read_file, METH_VARARGS), + {NULL} +}; + +static char POW_PKCS10_Type__doc__[] = + "This class represents a PKCS#10 request.\n" + "\n" + LAME_DISCLAIMER_IN_ALL_CLASS_DOCUMENTATION + ; + +static PyTypeObject POW_PKCS10_Type = { + PyObject_HEAD_INIT(0) + 0, /* ob_size */ + "rpki.POW.PKCS10", /* tp_name */ + sizeof(pkcs10_object), /* tp_basicsize */ + 0, /* tp_itemsize */ + (destructor)pkcs10_object_dealloc, /* tp_dealloc */ + 0, /* tp_print */ + 0, /* tp_getattr */ + 0, /* tp_setattr */ + 0, /* tp_compare */ + 0, /* tp_repr */ + 0, /* tp_as_number */ + 0, /* tp_as_sequence */ + 0, /* tp_as_mapping */ + 0, /* tp_hash */ + 0, /* tp_call */ + 0, /* tp_str */ + 0, /* tp_getattro */ + 0, /* tp_setattro */ + 0, /* tp_as_buffer */ + Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /* tp_flags */ + POW_PKCS10_Type__doc__, /* tp_doc */ + 0, /* tp_traverse */ + 0, /* tp_clear */ + 0, /* tp_richcompare */ + 0, /* tp_weaklistoffset */ + 0, /* tp_iter */ + 0, /* tp_iternext */ + pkcs10_object_methods, /* tp_methods */ + 0, /* tp_members */ + 0, /* tp_getset */ + 0, /* tp_base */ + 0, /* tp_dict */ + 0, /* tp_descr_get */ + 0, /* tp_descr_set */ + 0, /* tp_dictoffset */ + 0, /* tp_init */ + 0, /* tp_alloc */ + pkcs10_object_new, /* tp_new */ +}; + + + +/* + * Module functions. + */ + +static char pow_module_add_object__doc__[] = + "Add new a new object identifier to OpenSSL's internal database.\n" + "\n" + "The \"oid\" should be an ASN.1 object identifer, represented as a string\n" + "in dotted-decimal format.\n" + "\n" + "The \"shortName\" parameter should be the OpenSSL \"short name\" to use.\n" + "\n" + "The \"longName\" parameter should be the OpenSSL \"long name\" to use.\n" + ; + +static PyObject * +pow_module_add_object(GCC_UNUSED PyObject *self, PyObject *args) +{ + char *oid = NULL, *sn = NULL, *ln = NULL; + + ENTERING(pow_module_add_object); + + if (!PyArg_ParseTuple(args, "sss", &oid, &sn, &ln)) + goto error; + + if (!OBJ_create(oid, sn, ln)) + lose_openssl_error("Unable to add object"); + + Py_RETURN_NONE; + + error: + + return NULL; +} + +static char pow_module_get_error__doc__[] = + "Pop one error off OpenSSL's global error stack and returns it as a string.\n" + "\n" + "Returns None if the error stack is empty.\n" + ; + +static PyObject * +pow_module_get_error(GCC_UNUSED PyObject *self) +{ + unsigned long error = ERR_get_error(); + char buf[256]; + + ENTERING(pow_module_get_error); + + if (!error) + Py_RETURN_NONE; + + ERR_error_string_n(error, buf, sizeof(buf)); + return Py_BuildValue("s", buf); +} + +static char pow_module_clear_error__doc__[] = + "Remove all errors from OpenSSL's global error stack.\n" + ; + +static PyObject * +pow_module_clear_error(GCC_UNUSED PyObject *self) +{ + ENTERING(pow_module_clear_error); + ERR_clear_error(); + Py_RETURN_NONE; +} + +static char pow_module_seed__doc__[] = + "Add data to OpenSSL's pseudo-random number generator state.\n" + "\n" + "The \"data\" parameter is the seed to add. Entropy of the data is\n" + "assumed to be equal to the length of the data.\n" + ; + +static PyObject * +pow_module_seed(GCC_UNUSED PyObject *self, PyObject *args) +{ + char *data = NULL; + Py_ssize_t datalen = 0; + + ENTERING(pow_module_seed); + + if (!PyArg_ParseTuple(args, "s#", &data, &datalen)) + goto error; + + RAND_seed(data, datalen); + + Py_RETURN_NONE; + + error: + + return NULL; +} + +static char pow_module_add__doc__[] = + "Add data to OpenSSL's pseudo-random number generator state.\n" + "\n" + "The \"data\" parameter is the data to add.\n" + "\n" + "The \"entropy\" parameter should be an estimate of the number of\n" + "random bytes in the data parameter.\n" + ; + +static PyObject * +pow_module_add(GCC_UNUSED PyObject *self, PyObject *args) +{ + char *data = NULL; + Py_ssize_t datalen = 0; + double entropy = 0; + + ENTERING(pow_module_add); + + if (!PyArg_ParseTuple(args, "s#d", &data, &datalen, &entropy)) + goto error; + + RAND_add(data, datalen, entropy); + + Py_RETURN_NONE; + + error: + return NULL; +} + +static char pow_module_write_random_file__doc__[] = + "Write the current state of OpenSSL's pseduo-random number generator to\n" + "a file.\n" + "\n" + "The \"filename\" parameter is the name of the file to write.\n" + ; + +static PyObject * +pow_module_write_random_file(GCC_UNUSED PyObject *self, PyObject *args) +{ + char *filename = NULL; + + ENTERING(pow_module_write_random_file); + + if (!PyArg_ParseTuple(args, "s", &filename)) + goto error; + + if (RAND_write_file(filename) == -1) + lose("Couldn't write random file"); + + Py_RETURN_NONE; + + error: + return NULL; +} + +static char pow_module_read_random_file__doc__[] = + "Restore the state of OpenSSLs pseudo-random number generator from\n" + "data previously saved to a file.\n" + "\n" + "The \"filename\" parameter is the name of the file to read.\n" + ; + +static PyObject * +pow_module_read_random_file(GCC_UNUSED PyObject *self, PyObject *args) +{ + char *file = NULL; + int len = -1; + + ENTERING(pow_module_read_random_file); + + if (!PyArg_ParseTuple(args, "s|i", &file, &len)) + goto error; + + if (!RAND_load_file(file, len)) + lose("Couldn't load random file"); + + Py_RETURN_NONE; + + error: + return NULL; +} + +static char pow_module_custom_datetime__doc__[] = + "Set constructor callback for customized datetime class.\n" + ; + +static PyObject * +pow_module_custom_datetime(GCC_UNUSED PyObject *self, PyObject *args) +{ + PyObject *cb = NULL; + + ENTERING(pow_module_custom_datetime); + + if (!PyArg_ParseTuple(args, "O", &cb)) + goto error; + + Py_XINCREF(cb); + Py_XDECREF(custom_datetime); + custom_datetime = cb; + + Py_RETURN_NONE; + + error: + return NULL; +} + + +static struct PyMethodDef pow_module_methods[] = { + Define_Method(getError, pow_module_get_error, METH_NOARGS), + Define_Method(clearError, pow_module_clear_error, METH_NOARGS), + Define_Method(seed, pow_module_seed, METH_VARARGS), + Define_Method(add, pow_module_add, METH_VARARGS), + Define_Method(readRandomFile, pow_module_read_random_file, METH_VARARGS), + Define_Method(writeRandomFile, pow_module_write_random_file, METH_VARARGS), + Define_Method(addObject, pow_module_add_object, METH_VARARGS), + Define_Method(customDatetime, pow_module_custom_datetime, METH_VARARGS), + {NULL} +}; + + + +/* + * Module initialization. + */ + +void +init_POW(void) +{ + PyObject *m = Py_InitModule3("_POW", pow_module_methods, pow_module__doc__); + int OpenSSL_ok = 1; + + /* + * Python encourages us to use these functions instead of the ones + * in libc, and OpenSSL allows us to do this. The result seems to + * work, and, in theory, gives Python's memory allocator a better + * idea of how much memory we're really using. Not sure why it + * cares, but let's try to be nice about it. + * + * Note that this must be done BEFORE anything in OpenSSL uses + * dynamic memory, and that this will probably fail in horrible ways + * without the build-time code (-Bsymbolic, etc) which isolates our + * copy of the OpenSSL code from any system shared libraries. + * Enough other things already fail in horrible ways without that + * isolation that adding one more doesn't make much difference, but + * if you tinker with the build script and start seeing nasty + * memory-related issues, this might be the cause. + */ + CRYPTO_set_mem_functions(PyMem_Malloc, PyMem_Realloc, PyMem_Free); + + /* + * Import the DateTime API + */ + + PyDateTime_IMPORT; + +#define Define_Class(__type__) \ + do { \ + char *__name__ = strrchr(__type__.tp_name, '.'); \ + if (PyType_Ready(&__type__) == 0 && __name__ != NULL) { \ + Py_INCREF(&__type__); \ + PyModule_AddObject(m, __name__+1, (PyObject *) &__type__); \ + } \ + } while (0) + + Define_Class(POW_X509_Type); + Define_Class(POW_X509Store_Type); + Define_Class(POW_X509StoreCTX_Type); + Define_Class(POW_CRL_Type); + Define_Class(POW_Asymmetric_Type); + Define_Class(POW_AsymmetricParams_Type); + Define_Class(POW_Digest_Type); + Define_Class(POW_CMS_Type); + Define_Class(POW_IPAddress_Type); + Define_Class(POW_Manifest_Type); + Define_Class(POW_ROA_Type); + Define_Class(POW_PKCS10_Type); + +#undef Define_Class + +#define Define_Exception(__name__, __parent__) \ + PyModule_AddObject(m, #__name__, ((__name__##Object) \ + = PyErr_NewException("rpki.POW." #__name__, __parent__, NULL))) + + Define_Exception(Error, NULL); + Define_Exception(OpenSSLError, ErrorObject); + Define_Exception(POWError, ErrorObject); + Define_Exception(NotVerifiedError, ErrorObject); + +#undef Define_Exception + +#define Define_Integer_Constant(__name__) \ + PyModule_AddIntConstant(m, #__name__, __name__) + + /* Object format types */ + Define_Integer_Constant(LONGNAME_FORMAT); + Define_Integer_Constant(SHORTNAME_FORMAT); + Define_Integer_Constant(OIDNAME_FORMAT); + + /* Message digests */ + Define_Integer_Constant(MD5_DIGEST); + Define_Integer_Constant(SHA_DIGEST); + Define_Integer_Constant(SHA1_DIGEST); + Define_Integer_Constant(SHA256_DIGEST); + Define_Integer_Constant(SHA384_DIGEST); + Define_Integer_Constant(SHA512_DIGEST); + + /* CMS flags */ + Define_Integer_Constant(CMS_NOCERTS); + Define_Integer_Constant(CMS_NOATTR); + Define_Integer_Constant(CMS_NOINTERN); + Define_Integer_Constant(CMS_NOCRL); + Define_Integer_Constant(CMS_NO_SIGNER_CERT_VERIFY); + Define_Integer_Constant(CMS_NO_ATTR_VERIFY); + Define_Integer_Constant(CMS_NO_CONTENT_VERIFY); + + /* X509 validation flags */ + Define_Integer_Constant(X509_V_FLAG_CB_ISSUER_CHECK); + Define_Integer_Constant(X509_V_FLAG_USE_CHECK_TIME); + Define_Integer_Constant(X509_V_FLAG_CRL_CHECK); + Define_Integer_Constant(X509_V_FLAG_CRL_CHECK_ALL); + Define_Integer_Constant(X509_V_FLAG_IGNORE_CRITICAL); + Define_Integer_Constant(X509_V_FLAG_X509_STRICT); + Define_Integer_Constant(X509_V_FLAG_ALLOW_PROXY_CERTS); + Define_Integer_Constant(X509_V_FLAG_POLICY_CHECK); + Define_Integer_Constant(X509_V_FLAG_EXPLICIT_POLICY); + Define_Integer_Constant(X509_V_FLAG_INHIBIT_ANY); + Define_Integer_Constant(X509_V_FLAG_INHIBIT_MAP); + Define_Integer_Constant(X509_V_FLAG_NOTIFY_POLICY); + Define_Integer_Constant(X509_V_FLAG_CHECK_SS_SIGNATURE); + + /* X509 validation error codes */ + Define_Integer_Constant(X509_V_OK); + Define_Integer_Constant(X509_V_ERR_UNABLE_TO_GET_ISSUER_CERT); + Define_Integer_Constant(X509_V_ERR_UNABLE_TO_GET_CRL); + Define_Integer_Constant(X509_V_ERR_UNABLE_TO_DECRYPT_CERT_SIGNATURE); + Define_Integer_Constant(X509_V_ERR_UNABLE_TO_DECRYPT_CRL_SIGNATURE); + Define_Integer_Constant(X509_V_ERR_UNABLE_TO_DECODE_ISSUER_PUBLIC_KEY); + Define_Integer_Constant(X509_V_ERR_CERT_SIGNATURE_FAILURE); + Define_Integer_Constant(X509_V_ERR_CRL_SIGNATURE_FAILURE); + Define_Integer_Constant(X509_V_ERR_CERT_NOT_YET_VALID); + Define_Integer_Constant(X509_V_ERR_CERT_HAS_EXPIRED); + Define_Integer_Constant(X509_V_ERR_CRL_NOT_YET_VALID); + Define_Integer_Constant(X509_V_ERR_CRL_HAS_EXPIRED); + Define_Integer_Constant(X509_V_ERR_ERROR_IN_CERT_NOT_BEFORE_FIELD); + Define_Integer_Constant(X509_V_ERR_ERROR_IN_CERT_NOT_AFTER_FIELD); + Define_Integer_Constant(X509_V_ERR_ERROR_IN_CRL_LAST_UPDATE_FIELD); + Define_Integer_Constant(X509_V_ERR_ERROR_IN_CRL_NEXT_UPDATE_FIELD); + Define_Integer_Constant(X509_V_ERR_OUT_OF_MEM); + Define_Integer_Constant(X509_V_ERR_DEPTH_ZERO_SELF_SIGNED_CERT); + Define_Integer_Constant(X509_V_ERR_SELF_SIGNED_CERT_IN_CHAIN); + Define_Integer_Constant(X509_V_ERR_UNABLE_TO_GET_ISSUER_CERT_LOCALLY); + Define_Integer_Constant(X509_V_ERR_UNABLE_TO_VERIFY_LEAF_SIGNATURE); + Define_Integer_Constant(X509_V_ERR_CERT_CHAIN_TOO_LONG); + Define_Integer_Constant(X509_V_ERR_CERT_REVOKED); + Define_Integer_Constant(X509_V_ERR_INVALID_CA); + Define_Integer_Constant(X509_V_ERR_PATH_LENGTH_EXCEEDED); + Define_Integer_Constant(X509_V_ERR_INVALID_PURPOSE); + Define_Integer_Constant(X509_V_ERR_CERT_UNTRUSTED); + Define_Integer_Constant(X509_V_ERR_CERT_REJECTED); + Define_Integer_Constant(X509_V_ERR_SUBJECT_ISSUER_MISMATCH); + Define_Integer_Constant(X509_V_ERR_AKID_SKID_MISMATCH); + Define_Integer_Constant(X509_V_ERR_AKID_ISSUER_SERIAL_MISMATCH); + Define_Integer_Constant(X509_V_ERR_KEYUSAGE_NO_CERTSIGN); + Define_Integer_Constant(X509_V_ERR_UNABLE_TO_GET_CRL_ISSUER); + Define_Integer_Constant(X509_V_ERR_UNHANDLED_CRITICAL_EXTENSION); + Define_Integer_Constant(X509_V_ERR_KEYUSAGE_NO_CRL_SIGN); + Define_Integer_Constant(X509_V_ERR_UNHANDLED_CRITICAL_CRL_EXTENSION); + Define_Integer_Constant(X509_V_ERR_INVALID_NON_CA); + Define_Integer_Constant(X509_V_ERR_PROXY_PATH_LENGTH_EXCEEDED); + Define_Integer_Constant(X509_V_ERR_KEYUSAGE_NO_DIGITAL_SIGNATURE); + Define_Integer_Constant(X509_V_ERR_PROXY_CERTIFICATES_NOT_ALLOWED); + Define_Integer_Constant(X509_V_ERR_INVALID_EXTENSION); + Define_Integer_Constant(X509_V_ERR_INVALID_POLICY_EXTENSION); + Define_Integer_Constant(X509_V_ERR_NO_EXPLICIT_POLICY); + Define_Integer_Constant(X509_V_ERR_UNNESTED_RESOURCE); + Define_Integer_Constant(X509_V_ERR_APPLICATION_VERIFICATION); + + /* AsymmetricParam EC curve codes */ + Define_Integer_Constant(EC_P256_CURVE); + +#undef Define_Integer_Constant + + /* + * Initialise library. + * + * We shouldn't need any of the SSL code or error strings anymore. + * + * If we cared deeply about avoiding references to symmetric cipher + * algorithms and digest algorithms we're not using, we could + * replace the call to OpenSSL_add_all_algorithms() with calls to + * add just the specific algorithms we use rather than all of them. + * For now, don't worry about it. + */ + + OpenSSL_add_all_algorithms(); + ERR_load_crypto_strings(); + + OpenSSL_ok &= create_missing_nids(); + + x509_store_ctx_ex_data_idx = X509_STORE_CTX_get_ex_new_index(0, "x590_store_ctx_object for verify callback", + NULL, NULL, NULL); + + if (PyErr_Occurred() || !OpenSSL_ok) + Py_FatalError("Can't initialize module POW"); +} + +/* + * Local Variables: + * indent-tabs-mode: nil + * End: + */ diff --git a/potpourri/analyze-rcynic-history.py b/potpourri/analyze-rcynic-history.py new file mode 100644 index 00000000..b72d0741 --- /dev/null +++ b/potpourri/analyze-rcynic-history.py @@ -0,0 +1,290 @@ +# $Id$ +# +# Copyright (C) 2011-2012 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +Parse traffic data out of rynic XML output, whack it a bit, print some +summaries and run gnuplot to draw some pictures. +""" + +plot_all_hosts = False + +window_hours = 72 + +import mailbox +import sys +import urlparse +import os +import datetime +import subprocess +import shelve + +from xml.etree.cElementTree import (ElementTree as ElementTree, + fromstring as ElementTreeFromString) + +def parse_utc(s): + return datetime.datetime.strptime(s, "%Y-%m-%dT%H:%M:%SZ") + +class Rsync_History(object): + """ + An Rsync_History object represents one rsync connection. + """ + + def __init__(self, elt): + self.error = elt.get("error") + self.uri = elt.text.strip() + self.hostname = urlparse.urlparse(self.uri).hostname or None + self.elapsed = parse_utc(elt.get("finished")) - parse_utc(elt.get("started")) + +class Host(object): + """ + A host object represents all the data collected for one host. Note + that it (usually) contains a list of all the sessions in which this + host appears. + """ + + def __init__(self, hostname, session_id): + self.hostname = hostname + self.session_id = session_id + self.elapsed = datetime.timedelta(0) + self.connection_count = 0 + self.dead_connections = 0 + self.uris = set() + self.total_connection_time = datetime.timedelta(0) + + def add_rsync_history(self, h): + self.connection_count += 1 + self.elapsed += h.elapsed + self.dead_connections += int(h.error is not None) + self.total_connection_time += h.elapsed + + def add_uri(self, u): + self.uris.add(u) + + def finalize(self): + self.object_count = len(self.uris) + del self.uris + + @property + def failed(self): + return 1 if self.dead_connections else 0 + + @property + def seconds_per_object(self): + if self.failed: + return None + else: + return float(self.elapsed.days * 24 * 60 * 60 + + self.elapsed.seconds + + self.elapsed.microseconds / 10**6) / float(self.object_count) + + @property + def objects_per_connection(self): + if self.failed: + return None + else: + return float(self.object_count) / float(self.connection_count) + + @property + def average_connection_time(self): + return float(self.total_connection_time.days * 24 * 60 * 60 + + self.total_connection_time.seconds + + self.total_connection_time.microseconds / 10**6) / float(self.connection_count) + + class Format(object): + + def __init__(self, attr, title, fmt, ylabel = ""): + self.attr = attr + self.title = title + self.width = len(title) - int("%" in fmt) + self.fmt = "%%%d%s" % (self.width, fmt) + self.oops = "*" * self.width + self.ylabel = ylabel + + def __call__(self, obj): + try: + value = getattr(obj, self.attr) + return None if value is None else self.fmt % value + except ZeroDivisionError: + return self.oops + + format = (Format("connection_count", "Connections", "d", "Connections To Repository (Per Session)"), + Format("object_count", "Objects", "d", "Objects In Repository (Distinct URIs Per Session)"), + Format("objects_per_connection", "Objects/Connection", ".3f", "Objects In Repository / Connections To Repository"), + Format("seconds_per_object", "Seconds/Object", ".3f", "Seconds To Transfer / Object (Average Per Session)"), + Format("failure_rate_running", "Failure Rate", ".3f%%", "Sessions With Failed Connections Within Last %d Hours" % window_hours), + Format("average_connection_time", "Average Connection", ".3f", "Seconds / Connection (Average Per Session)"), + Format("hostname", "Hostname", "s")) + + format_dict = dict((fmt.attr, fmt) for fmt in format) + + def format_field(self, name): + result = self.format_dict[name](self) + return None if result is None else result.strip() + +class Session(dict): + """ + A session corresponds to one XML file. This is a dictionary of Host + objects, keyed by hostname. + """ + + def __init__(self, session_id, msg_key): + self.session_id = session_id + self.msg_key = msg_key + self.date = parse_utc(session_id) + self.calculated_failure_history = False + + @property + def hostnames(self): + return set(self.iterkeys()) + + def get_plot_row(self, name, hostnames): + return (self.session_id,) + tuple(self[h].format_field(name) if h in self else "" for h in hostnames) + + def add_rsync_history(self, h): + if h.hostname not in self: + self[h.hostname] = Host(h.hostname, self.session_id) + self[h.hostname].add_rsync_history(h) + + def add_uri(self, u): + h = urlparse.urlparse(u).hostname + if h and h in self: + self[h].add_uri(u) + + def finalize(self): + for h in self.itervalues(): + h.finalize() + + def calculate_failure_history(self, sessions): + start = self.date - datetime.timedelta(hours = window_hours) + sessions = tuple(s for s in sessions if s.date <= self.date and s.date > start) + for hostname, h in self.iteritems(): + i = n = 0 + for s in sessions: + if hostname in s: + i += s[hostname].failed + n += 1 + h.failure_rate_running = float(100 * i) / n + self.calculated_failure_history = True + +def plotter(f, hostnames, field, logscale = False): + plotlines = sorted(session.get_plot_row(field, hostnames) for session in sessions) + title = Host.format_dict[field].title + ylabel = Host.format_dict[field].ylabel + n = len(hostnames) + 1 + assert all(n == len(plotline) for plotline in plotlines) + if "%%" in Host.format_dict[field].fmt: + f.write('set format y "%.0f%%"\n') + else: + f.write('set format y\n') + if logscale: + f.write("set logscale y\n") + else: + f.write("unset logscale y\n") + f.write(""" + set xdata time + set timefmt '%Y-%m-%dT%H:%M:%SZ' + #set format x '%m/%d' + #set format x '%b%d' + #set format x '%Y-%m-%d' + set format x '%Y-%m' + #set title '""" + title + """' + set ylabel '""" + ylabel + """' + plot""" + ",".join(" '-' using 1:2 with linespoints pointinterval 500 title '%s'" % h for h in hostnames) + "\n") + for i in xrange(1, n): + for plotline in plotlines: + if plotline[i] is not None: + f.write("%s %s\n" % (plotline[0], plotline[i].rstrip("%"))) + f.write("e\n") + +def plot_hosts(hostnames, fields): + for field in fields: + for logscale in (False, True): + gnuplot = subprocess.Popen(("gnuplot",), stdin = subprocess.PIPE) + gnuplot.stdin.write("set terminal pdf\n") + gnuplot.stdin.write("set output '%s/%s-%s.pdf'\n" % (outdir, field, "log" if logscale else "linear")) + plotter(gnuplot.stdin, hostnames, field, logscale = logscale) + gnuplot.stdin.close() + gnuplot.wait() + +outdir = "images" + +if not os.path.exists(outdir): + os.makedirs(outdir) + +mb = mailbox.Maildir("/u/sra/rpki/rcynic-xml", factory = None, create = False) + +if sys.platform == "darwin": # Sigh + shelf = shelve.open("rcynic-xml", "c") +else: + shelf = shelve.open("rcynic-xml.db", "c") + +sessions = [] + +latest = None +parsed = 0 + +for i, key in enumerate(mb.iterkeys(), 1): + sys.stderr.write("\r%s %d/%d/%d..." % ("|\\-/"[i & 3], parsed, i, len(mb))) + + if key in shelf: + session = shelf[key] + + else: + assert not mb[key].is_multipart() + input = ElementTreeFromString(mb[key].get_payload()) + date = input.get("date") + sys.stderr.write("%s..." % date) + session = Session(date, key) + for elt in input.findall("rsync_history"): + session.add_rsync_history(Rsync_History(elt)) + for elt in input.findall("validation_status"): + if elt.get("generation") == "current": + session.add_uri(elt.text.strip()) + session.finalize() + shelf[key] = session + parsed += 1 + + sessions.append(session) + if latest is None or session.session_id > latest.session_id: + latest = session + +sys.stderr.write("\n") + +shelf.sync() + +for session in sessions: + if not getattr(session, "calculated_failure_history", False): + session.calculate_failure_history(sessions) + shelf[session.msg_key] = session + +if plot_all_hosts: + hostnames = sorted(reduce(lambda x, y: x | y, + (s.hostnames for s in sessions), + set())) + +else: + hostnames = ("rpki.apnic.net", "rpki.ripe.net", "localcert.ripe.net", + "repository.lacnic.net", "rpki.afrinic.net", "rpki.arin.net", + "arin.rpki.net", "repo0.rpki.net", "rgnet.rpki.net") + +plot_hosts(hostnames, [fmt.attr for fmt in Host.format if fmt.attr != "hostname"]) + +if latest is not None: + f = open("rcynic.xml", "wb") + f.write(mb[latest.msg_key].get_payload()) + f.close() + +shelf.close() diff --git a/potpourri/analyze-transition.py b/potpourri/analyze-transition.py new file mode 100644 index 00000000..e2125dfb --- /dev/null +++ b/potpourri/analyze-transition.py @@ -0,0 +1,88 @@ +# $Id$ +# +# Copyright (C) 2012 Internet Systems Consortium, Inc. ("ISC") +# +# Permission to use, copy, modify, and/or distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +Compare rcynic.xml files, tell the user what became invalid, and why. +""" + +import sys + +try: + from lxml.etree import ElementTree +except ImportError: + from xml.etree.ElementTree import ElementTree + +class Object(object): + + def __init__(self, session, uri): + self.session = session + self.uri = uri + self.labels = [] + + def add(self, label): + self.labels.append(label) + + def __cmp__(self, other): + return cmp(self.labels, other.labels) + + @property + def accepted(self): + return "object_accepted" in self.labels + +class Session(dict): + + def __init__(self, name): + self.name = name + tree = ElementTree(file = name) + labels = tuple((elt.tag.strip(), elt.text.strip()) for elt in tree.find("labels")) + self.labels = tuple(pair[0] for pair in labels) + self.descrs = dict(labels) + self.date = tree.getroot().get("date") + for elt in tree.findall("validation_status"): + status = elt.get("status") + uri = elt.text.strip() + if status.startswith("rsync_transfer_") or elt.get("generation") != "current": + continue + if uri not in self: + self[uri] = Object(self, uri) + self[uri].add(status) + +skip_labels = frozenset(("object_accepted", "object_rejected")) + +old_db = new_db = None + +for arg in sys.argv[1:]: + + old_db = new_db + new_db = Session(arg) + if old_db is None: + continue + + old_uris = frozenset(old_db) + new_uris = frozenset(new_db) + + for uri in sorted(old_uris - new_uris): + print new_db.date, uri, "dropped" + + for uri in sorted(old_uris & new_uris): + old = old_db[uri] + new = new_db[uri] + if old.accepted and not new.accepted: + print new_db.date, uri, "invalid" + labels = frozenset(new.labels) - frozenset(old.labels) - skip_labels + for label in new.labels: + if label in labels: + print " ", new_db.descrs[label] diff --git a/potpourri/apnic-to-csv.py b/potpourri/apnic-to-csv.py new file mode 100644 index 00000000..62293a51 --- /dev/null +++ b/potpourri/apnic-to-csv.py @@ -0,0 +1,55 @@ +# $Id$ +# +# Copyright (C) 2010-2012 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +Parse APNIC "Extended Allocation and Assignment" reports and write +out (just) the RPKI-relevant fields in myrpki-format CSV syntax. +""" + +from rpki.csv_utils import csv_writer +from rpki.ipaddrs import v4addr + +asns = csv_writer("asns.csv") +prefixes = csv_writer("prefixes.csv") + +for line in open("delegated-apnic-extended-latest"): + + line = line.rstrip() + + if not line.startswith("apnic|") or line.endswith("|summary"): + continue + + try: + registry, cc, rectype, start, value, date, status, opaque_id = line.split("|") + except ValueError: + continue + + if not opaque_id: + continue + + assert registry == "apnic" + + if rectype == "asn": + asns.writerow((opaque_id, "%s-%s" % (start, int(start) + int(value) - 1))) + + elif rectype == "ipv4": + prefixes.writerow((opaque_id, "%s-%s" % (start, v4addr(v4addr(start) + long(value) - 1)))) + + elif rectype == "ipv6": + prefixes.writerow((opaque_id, "%s/%s" % (start, value))) + +asns.close() +prefixes.close() diff --git a/potpourri/application-x-rpki-mailcap-handler.sh b/potpourri/application-x-rpki-mailcap-handler.sh new file mode 100755 index 00000000..891987d9 --- /dev/null +++ b/potpourri/application-x-rpki-mailcap-handler.sh @@ -0,0 +1,53 @@ +#!/bin/sh - +# $Id$ +# +# Copyright (C) 2010 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +# Given the Maildir dump format, one can use Mutt as a viewer with two +# tweaks: +# +# 1) Add to ~/.muttrc +# +# auto_view application/x-rpki +# +# 2) Add to ~/.mailcap +# +# application/x-rpki; /path/to/this/script.sh ; copiousoutput +# +# "copiousoutput" is required by mutt to enable auto_view (inline +# display) behavior. +# +# This script could do fancier things (pretty XML formatting, +# verification checks of the CMS, etcetera) if anybody cared. +# For the moment the main use for this script is debugging. + +# We have to jump through some hoops to figure out where our OpenSSL +# binary is. If you have already installed an OpenSSL binary that +# understands CMS, feel free to use that instead. + +#exec 2>&1; set -x + +: ${AWK=/usr/bin/awk} +: ${OPENSSL=$(/usr/bin/dirname $0)/../openssl/openssl/apps/openssl} +: ${SPLITBASE64=$(/usr/bin/dirname $0)/splitbase64.xsl} +: ${XMLINDENT=/usr/local/bin/xmlindent} +: ${XMLLINT=/usr/local/bin/xmllint} +: ${XSLTPROC=/usr/local/bin/xsltproc} + +# This produces prettier output, but also hangs sometimes, apparently some xmlindent bug dealing with really long XML attributes +#OPENSSL_CONF=/dev/null $OPENSSL cms -verify -nosigs -noverify -inform DER 2>/dev/null | $XSLTPROC $SPLITBASE64 - | $XMLINDENT -i 2 | $AWK NF + +# So we do this instead +OPENSSL_CONF=/dev/null $OPENSSL cms -verify -nosigs -noverify -inform DER 2>/dev/null | $XSLTPROC $SPLITBASE64 - | $XMLLINT -format - diff --git a/potpourri/arin-to-csv.py b/potpourri/arin-to-csv.py new file mode 100644 index 00000000..a4e7ffc3 --- /dev/null +++ b/potpourri/arin-to-csv.py @@ -0,0 +1,114 @@ +# $Id$ +# +# Copyright (C) 2009-2012 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +Parse an ARIN database research dump and write out (just) the +RPKI-relevant fields in myrpki-format CSV syntax. + +NB: The input data for this script comes from ARIN under an agreement +that allows research use but forbids redistribution, so if you think +you need a copy of the data, please talk to ARIN about it, not us. + +Input format used to be RPSL WHOIS dump, but ARIN recently went Java, +so we have to parse a 3.5GB XML "document". Credit to Liza Daly for +explaining the incantations needed to convince lxml to do this nicely, +see: http://www.ibm.com/developerworks/xml/library/x-hiperfparse/ +""" + +import sys +import lxml.etree + +from rpki.csv_utils import csv_writer + +def ns(tag): + return "{http://www.arin.net/bulkwhois/core/v1}" + tag + +tag_asn = ns("asn") +tag_net = ns("net") +tag_org = ns("org") +tag_poc = ns("poc") +tag_orgHandle = ns("orgHandle") +tag_netBlock = ns("netBlock") +tag_type = ns("type") +tag_startAddress = ns("startAddress") +tag_endAddress = ns("endAddress") +tag_startAsNumber = ns("startAsNumber") +tag_endAsNumber = ns("endAsNumber") + +def find(node, tag): + return node.findtext(tag).strip() + +def do_asn(node): + asns.writerow((find(node, tag_orgHandle), + "%s-%s" % (find(node, tag_startAsNumber), + find(node, tag_endAsNumber)))) + +erx_table = { + "AF" : "afrinic", + "AP" : "apnic", + "AR" : "arin", + "AV" : "arin", + "FX" : "afrinic", + "LN" : "lacnic", + "LX" : "lacnic", + "PV" : "apnic", + "PX" : "apnic", + "RN" : "ripe", + "RV" : "ripe", + "RX" : "ripe" } + +def do_net(node): + handle = find(node, tag_orgHandle) + for netblock in node.iter(tag_netBlock): + tag = find(netblock, tag_type) + startAddress = find(netblock, tag_startAddress) + endAddress = find(netblock, tag_endAddress) + if not startAddress.endswith(".000") and not startAddress.endswith(":0000"): + continue + if not endAddress.endswith(".255") and not endAddress.endswith(":FFFF"): + continue + if tag in ("DS", "DA", "IU"): + prefixes.writerow((handle, "%s-%s" % (startAddress, endAddress))) + elif tag in erx_table: + erx.writerow((erx_table[tag], "%s-%s" % (startAddress, endAddress))) + +dispatch = { tag_asn : do_asn, tag_net : do_net } + +asns = csv_writer("asns.csv") +prefixes = csv_writer("prefixes.csv") +erx = csv_writer("erx.csv") + +root = None + +for event, node in lxml.etree.iterparse(sys.stdin): + + if root is None: + root = node + while root.getparent() is not None: + root = root.getparent() + + if node.getparent() is root: + + if node.tag in dispatch: + dispatch[node.tag](node) + + node.clear() + while node.getprevious() is not None: + del node.getparent()[0] + +asns.close() +prefixes.close() +erx.close() diff --git a/potpourri/cross_certify.py b/potpourri/cross_certify.py new file mode 100644 index 00000000..fab7743b --- /dev/null +++ b/potpourri/cross_certify.py @@ -0,0 +1,74 @@ +# $Id$ +# +# Copyright (C) 2014 Dragon Research Labs ("DRL") +# Portions copyright (C) 2009--2012 Internet Systems Consortium ("ISC") +# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notices and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND ARIN DISCLAIM ALL +# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL, +# ISC, OR ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, +# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION +# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +""" +Cross-certification tool to issue a new certificate based on an old +one that was issued by somebody else. The point of the exercise is to +end up with a valid certificate in our own BPKI which has the same +subject name and subject public key as the one we're replacing. +""" + +import os +import sys +import time +import argparse +import rpki.x509 +import rpki.sundial + +os.environ["TZ"] = "UTC" +time.tzset() + +parser = argparse.ArgumentParser(description = __doc__) +parser.add_argument("-i", "--in", required = True, dest = "input", + type = lambda s: rpki.x509.X509(Auto_file = s), + help = "input certificate") +parser.add_argument("-c", "--ca", required = True, + type = lambda s: rpki.x509.X509(Auto_file = s), + help = "issuing certificate") +parser.add_argument("-k", "--key", required = True, + type = lambda s: rpki.x509.RSA(Auto_file = s), + help = "private key of issuing certificate") +parser.add_argument("-s", "--serial", required = True, + help = "serial number file") +parser.add_argument("-o", "--out", + help = "output filename") +parser.add_argument("-l", "--lifetime", + type = rpki.sundial.timedelta, default = "30d", + help = "lifetime of generated certificate") +args = parser.parse_args() + +now = rpki.sundial.now() +notAfter = now + args.lifetime + +try: + with open(args.serial, "r") as f: + serial = int(f.read().splitlines()[0], 16) +except IOError: + serial = 1 + +cert = args.ca.cross_certify(args.key, args.input, serial, notAfter, now) + +with open(args.serial, "w") as f: + f.write("%02x\n" % (serial + 1)) + +if args.out is None: + sys.stdout.write(cert.get_PEM()) +else: + with open(args.out, "w") as f: + f.write(cert.get_PEM()) diff --git a/potpourri/csvgrep.py b/potpourri/csvgrep.py new file mode 100644 index 00000000..68bdd259 --- /dev/null +++ b/potpourri/csvgrep.py @@ -0,0 +1,72 @@ +# $Id$ +# +# Copyright (C) 2010-2012 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +Utility to simplify finding handles in one of the pseudo-RIR databases. + +Usage: python csvgrep.py datum [datum ...] + +where each datum is an ASN, IP address, or IP prefix. + +ASNs are recognized by being pure integers; IP addreses are recognized +by having dots (IPv4) or colons (IPv6). + +After eating all of the command line arguments, we search asns.csv for +any ASNs given, and prefixes.csv for any prefixes given. +""" + +import sys +from rpki.resource_set import resource_set_as, resource_set_ipv4, resource_set_ipv6 +from rpki.csv_utils import csv_reader + +asn = resource_set_as() +ipv4 = resource_set_ipv4() +ipv6 = resource_set_ipv6() + +for datum in sys.argv[1:]: + if datum.replace("-", "").isdigit(): + t = asn + else: + t = ipv6 if ":" in datum else ipv4 + if "-" not in datum and "/" not in datum: + datum = datum + "-" + datum + try: + t.append(t.parse_str(datum)) + except: + print "Error attempting to parse", datum + raise + +#print "Looking for: ASNs %s IPv4 %s IPv6 %s" % (asn, ipv4, ipv6) + +def matches(set1, datum): + set2 = set1.__class__(datum) + if set1.intersection(set2): + return set2 + else: + return False + +if asn: + for h, a in csv_reader("asns.csv", columns = 2): + m = matches(asn, a) + if m: + print h, m + +if ipv4 or ipv6: + for h, a in csv_reader("prefixes.csv", columns = 2): + t = ipv6 if ":" in a else ipv4 + m = t and matches(t, a) + if m: + print h, m diff --git a/potpourri/expand-roa-prefixes.py b/potpourri/expand-roa-prefixes.py new file mode 100644 index 00000000..ae34ea0a --- /dev/null +++ b/potpourri/expand-roa-prefixes.py @@ -0,0 +1,79 @@ +# $Id$ +# +# Copyright (C) 2011 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +I got tired of trying to explain in English how the maxLength macro +hack works in ROAs, so this is an attempt to explain it as code. + +Given one or more ROA prefix sets on the command line, this script +prints out the expansion as a list of prefixes. +""" + +import sys +import rpki.resource_set +import rpki.ipaddrs + +class NotAPrefix(Exception): + """ + Address is not a proper prefix. + """ + +class address_range(object): + """ + Iterator for rpki.ipaddrs address objects. + """ + + def __init__(self, start, stop, step): + self.addr = start + self.stop = stop + self.step = step + self.type = type(start) + + def __iter__(self): + while self.addr < self.stop: + yield self.addr + self.addr = self.type(self.addr + self.step) + +def main(argv): + + prefix_sets = [] + for arg in argv: + if ":" in arg: + prefix_sets.extend(rpki.resource_set.roa_prefix_set_ipv6(arg)) + else: + prefix_sets.extend(rpki.resource_set.roa_prefix_set_ipv4(arg)) + + for prefix_set in prefix_sets: + sys.stdout.write("%s expands to:\n" % prefix_set) + + prefix_type = prefix_set.range_type.datum_type + prefix_min = prefix_set.prefix + prefix_max = prefix_set.prefix + (1L << (prefix_type.bits - prefix_set.prefixlen)) + + for prefixlen in xrange(prefix_set.prefixlen, prefix_set.max_prefixlen + 1): + + step = (1L << (prefix_type.bits - prefixlen)) + mask = step - 1 + + for addr in address_range(prefix_min, prefix_max, step): + if (addr & mask) != 0: + raise NotAPrefix, "%s is not a /%d prefix" % (addr, prefixlen) + sys.stdout.write(" %s/%d\n" % (addr, prefixlen)) + + sys.stdout.write("\n") + +if __name__ == "__main__": + main(sys.argv[1:] if len(sys.argv) > 1 else ["18.0.0.0/8-24"]) diff --git a/potpourri/extract-key.py b/potpourri/extract-key.py new file mode 100644 index 00000000..b85c3d55 --- /dev/null +++ b/potpourri/extract-key.py @@ -0,0 +1,64 @@ +# $Id$ + +# Copyright (C) 2014 Dragon Research Labs ("DRL") +# Portions copyright (C) 2008 American Registry for Internet Numbers ("ARIN") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notices and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND DRL AND AND ARIN DISCLAIM ALL +# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL OR +# ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL +# DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA +# OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +# TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +Extract a private key from rpkid's database. + +This is a debugging tool. rpkid goes to some trouble not to expose +private keys, which is correct for normal operation, but for debugging +it is occasionally useful to be able to extract the private key from +MySQL. This script is just a convenience, it doesn't enable anything +that couldn't be done via the mysql command line tool. + +While we're at this we also extract the corresponding certificate. +""" + +import os +import time +import argparse +import sys +import MySQLdb +import rpki.x509 + +os.environ["TZ"] = "UTC" +time.tzset() + +parser = argparse.ArgumentParser(description = __doc__) +parser.add_argument("-s", "--self", required = True, help = "self handle") +parser.add_argument("-b", "--bsc", required = True, help = "BSC handle") +parser.add_argument("-u", "--user", required = True, help = "MySQL user name") +parser.add_argument("-d", "--db", required = True, help = "MySQL database name") +parser.add_argument("-p", "--password", required = True, help = "MySQL password") +args = parser.parse_args() + +cur = MySQLdb.connect(user = args.user, db = args.db, passwd = args.password).cursor() + +cur.execute( + """ + SELECT bsc.private_key_id, bsc.signing_cert + FROM bsc, self + WHERE self.self_handle = %s AND self.self_id = bsc.self_id AND bsc_handle = %s + """, + (args.self, args.bsc)) + +key, cer = cur.fetchone() + +print rpki.x509.RSA(DER = key).get_PEM() + +if cer: + print rpki.x509.X509(DER = cer).get_PEM() diff --git a/potpourri/fakerootd.py b/potpourri/fakerootd.py new file mode 100644 index 00000000..6275a2a9 --- /dev/null +++ b/potpourri/fakerootd.py @@ -0,0 +1,50 @@ +# $Id$ +# +# Copyright (C) 2011 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +Hack to fake a catatonic rootd, for testing. +""" + +import sys +import socket +import datetime +import signal + +port = int(sys.argv[1]) if len(sys.argv) > 1 else 4405 +limit = int(sys.argv[2]) if len(sys.argv) > 2 else 5 + +print "Listening on port", port + +s4 = socket.socket(socket.AF_INET, socket.SOCK_STREAM) +s4.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) +s4.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) +s4.bind(('', port)) +s4.listen(limit) + +s6 = socket.socket(socket.AF_INET6, socket.SOCK_STREAM) +s6.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) +s6.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) +s6.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, 1) +s6.bind(('::1', port)) +s6.listen(limit) + +print "Going to sleep at", datetime.datetime.utcnow() + +try: + signal.pause() +except KeyboardInterrupt: + sys.exit(0) + diff --git a/potpourri/find-roa-expiration.py b/potpourri/find-roa-expiration.py new file mode 100644 index 00000000..1401dc42 --- /dev/null +++ b/potpourri/find-roa-expiration.py @@ -0,0 +1,61 @@ +# $Id$ +# +# Copyright (C) 2012 Internet Systems Consortium, Inc. ("ISC") +# +# Permission to use, copy, modify, and/or distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +Look for ROAs for particular prefixes, like find_roa, then, for each +ROA we find, dig out the expiration times of all the certificates +involved in the authorization chain, all the way back to the root. +""" + +import sys +import subprocess +import rpki.POW + +def filename_to_uri(filename): + if not filename.startswith(sys.argv[1]): + raise ValueError + return "rsync://" + filename[len(sys.argv[1]):].lstrip("/") + +def uri_to_filename(uri): + if not uri.startswith("rsync://"): + raise ValueError + return sys.argv[1] + "/" + uri[len("rsync://"):] + +def get_aia(x): + for i in xrange(x.countExtensions()): + ext = x.getExtension(i) + if ext[0] == "authorityInfoAccess": + return ext[2][ext[2].index("rsync://"):] + return None + +for line in subprocess.check_output(["find_roa"] + sys.argv[1:]).splitlines(): + + words = line.split() + fn = words.pop() + del words[-1] + print " ".join(words) + + x = rpki.POW.CMS.derReadFile(fn).certs()[0] + uri = get_aia(x) + print x.getNotAfter(), filename_to_uri(fn) + + while uri: + fn = uri_to_filename(uri) + x = rpki.POW.X509.derReadFile(fn) + print x.getNotAfter(), uri + uri = get_aia(x) + + print diff --git a/potpourri/format-application-x-rpki.py b/potpourri/format-application-x-rpki.py new file mode 100644 index 00000000..184103f9 --- /dev/null +++ b/potpourri/format-application-x-rpki.py @@ -0,0 +1,132 @@ +# $Id$ +# +# Copyright (C) 2014 Dragon Research Labs ("DRL") +# Portions copyright (C) 2010--2012 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notices and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND DRL AND ISC DISCLAIM ALL +# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL OR +# ISC BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL +# DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA +# OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +# TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +Take the basic application/x-rpki messages that rpkid and friends +log and translate them into a text version that's easier to search, +without losing any of the original data. We use MH for the output +format because nmh makes a handy viewer. +""" + +import email.mime +import email.mime.application +import email.mime.text +import email.mime.multipart +import email.utils +import email.encoders +import mailbox +import rpki.POW +import lxml.etree +import argparse +import sys +import base64 + +parser = argparse.ArgumentParser(description = __doc__) +parser.add_argument("-i", "--input", required = True, + help = "input Maildir") +parser.add_argument("-m", "--mark", action = "store_true", + help = "mark seen messages") +parser.add_argument("-k", "--kill", action = "store_true", + help = "kill seen messages") +parser.add_argument("-o", "--output", required = True, + help = "output MH folder") +parser.add_argument("-t", "--tag", + default = "{http://www.apnic.net/specs/rescerts/up-down/}message", + help = "XML namespace tag for an input message") +parser.add_argument("-u", "--unseen", action = "store_true", + help = "only process unseen messages") +args = parser.parse_args() + +def pprint_cert(b64): + return rpki.POW.X509.derRead(base64.b64decode(b64)).pprint() + +def up_down(): + msg["X-RPKI-Up-Down-Type"] = xml.get("type") + msg["X-RPKI-Up-Down-Sender"] = xml.get("sender") + msg["X-RPKI-Up-Down-Recipient"] = xml.get("recipient") + msg["Subject"] = "Up-down %s %s => %s" % (xml.get("type"), xml.get("sender"), xml.get("recipient")) + for x in xml: + if x.tag.endswith("class"): + for y in x: + if y.tag.endswith("certificate") or y.tag.endswith("issuer"): + msg.attach(email.mime.text.MIMEText(pprint_cert(y.text))) + +def left_right(): + msg["X-RPKI-Left-Right-Type"] = xml.get("type") + msg["Subject"] = "Left-right %s" % xml.get("type") + +def publication(): + msg["X-RPKI-Left-Right-Type"] = xml.get("type") + msg["Subject"] = "Publication %s" % xml.get("type") + +dispatch = { "{http://www.apnic.net/specs/rescerts/up-down/}message" : up_down, + "{http://www.hactrn.net/uris/rpki/left-right-spec/}msg" : left_right, + "{http://www.hactrn.net/uris/rpki/publication-spec/}msg" : publication } + +def fix_headers(): + if "X-RPKI-PID" in srcmsg or "X-RPKI-Object" in srcmsg: + msg["X-RPKI-PID"] = srcmsg["X-RPKI-PID"] + msg["X-RPKI-Object"] = srcmsg["X-RPKI-Object"] + else: + words = srcmsg["Subject"].split() + msg["X-RPKI-PID"] = words[1] + msg["X-RPKI-Object"] = " ".join(words[4:]) + +destination = None +source = None +try: + destination = mailbox.MH(args.output, factory = None, create = True) + source = mailbox.Maildir(args.input, factory = None) + + for srckey, srcmsg in source.iteritems(): + if args.unseen and "S" in srcmsg.get_flags(): + continue + assert not srcmsg.is_multipart() and srcmsg.get_content_type() == "application/x-rpki" + payload = srcmsg.get_payload(decode = True) + cms = rpki.POW.CMS.derRead(payload) + txt = cms.verify(rpki.POW.X509Store(), None, rpki.POW.CMS_NOCRL | rpki.POW.CMS_NO_SIGNER_CERT_VERIFY | rpki.POW.CMS_NO_ATTR_VERIFY | rpki.POW.CMS_NO_CONTENT_VERIFY) + xml = lxml.etree.fromstring(txt) + tag = xml.tag + if args.tag and tag != args.tag: + continue + msg = email.mime.multipart.MIMEMultipart("related") + msg["X-RPKI-Tag"] = tag + for i in ("Date", "Message-ID", "X-RPKI-Timestamp"): + msg[i] = srcmsg[i] + fix_headers() + if tag in dispatch: + dispatch[tag]() + if "Subject" not in msg: + msg["Subject"] = srcmsg["Subject"] + msg.attach(email.mime.text.MIMEText(txt)) + msg.attach(email.mime.application.MIMEApplication(payload, "x-rpki")) + msg.epilogue = "\n" # Force trailing newline + key = destination.add(msg) + print "Added", key + if args.kill: + del source[srckey] + elif args.mark: + srcmsg.set_subdir("cur") + srcmsg.add_flag("S") + source[srckey] = srcmsg + +finally: + if destination: + destination.close() + if source: + source.close() diff --git a/potpourri/gc_summary.awk b/potpourri/gc_summary.awk new file mode 100644 index 00000000..b3b1bc6a --- /dev/null +++ b/potpourri/gc_summary.awk @@ -0,0 +1,72 @@ +#!/usr/bin/awk -f + +# $Id$ +# +# Copyright (C) 2010 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +# Use gnuplot to graph interesting data from gc_summary lines in rpkid logs. + +BEGIN { + target = ENVIRON["TARGET"] ? ENVIRON["TARGET"] : "tuple"; + outtype = ENVIRON["OUTTYPE"] ? ENVIRON["OUTTYPE"] : "png"; + outname = ENVIRON["OUTNAME"] ? ENVIRON["OUTNAME"] : ""; + print "set xdata time"; + print "set timefmt '%Y-%m-%dT%H:%M:%S'"; + #print "set format x '%d%b'"; + print "set format x '%T'"; + print "set key right bottom"; + if (outname) { + print "set terminal", outtype; + print "set output '" outname "." outtype "'"; + print "set term png size 1024,1024"; + } + if (ARGC <= 2) { + print "plot '-' using 1:2 with linespoints title 'rpkid use of", target, "objects'"; + } else { + cmd = "plot '-' using 1:2 with linespoints title '" ARGV[1] "'"; + for (i = 2; i < ARGC; i++) + cmd = cmd ", '-' using 1:2 with linespoints title '" ARGV[i] "'"; + print cmd; + } +} + +FILENAME != filename && filename { + print "e"; +} + +FILENAME != filename { + print "#", FILENAME + filename = FILENAME; + proc = ""; +} + +$6 == target && proc != $3 && proc { + print ""; +} + +$6 == target && proc != $3 { + proc = $3; +} + +$6 == target { + print "#", $0; + print $1 "T" $2, $5; +} + +END { + print "e"; + if (!outname) + print "pause mouse any"; +} diff --git a/potpourri/gc_summary.py b/potpourri/gc_summary.py new file mode 100644 index 00000000..1f6987bf --- /dev/null +++ b/potpourri/gc_summary.py @@ -0,0 +1,112 @@ +# $Id$ +# +# Copyright (C) 2010 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +# Use gnuplot to graph interesting data from gc_summary lines in rpkid logs. + +import sys, os, time + +class datapoint(object): + + outtype = os.getenv("OUTTYPE", "png") + outname = os.getenv("OUTNAME", "") + timefmt = os.getenv("TIMEFMT", "%T") + pretend = os.getenv("PRETEND_EVERYTHING_CHANGED", False) + threshold = int(os.getenv("THRESHOLD", "100")) + + raw = [] + filenames = [] + + def __init__(self, filename, timestamp, process, count, typesig, line): + self.filename = filename + self.timestamp = timestamp + self.process = process + self.count = count + self.typesig = typesig + self.line = line + self.key = "%s %s" % (filename, typesig) + self.raw.append(self) + if filename not in self.filenames: + self.filenames.append(filename) + + def __cmp__(self, other): + c = cmp(self.key, other.key) + return c if c else cmp(self.timestamp, other.timestamp) + + @classmethod + def plot(cls): + + print "# [%s] Looking for interesting records" % time.strftime("%T") + changed = {} + for i in cls.raw: + if i.key not in changed: + changed[i.key] = set() + changed[i.key].add(i.count) + if cls.pretend: + changed = set(changed.iterkeys()) + else: + changed = set(k for k, v in changed.iteritems() if max(v) - min(v) > cls.threshold) + + if not changed: + print "# [%s] Apparently nothing worth reporting" % time.strftime("%T") + print "print 'Nothing to plot'" + return + + print "# [%s] Header" % time.strftime("%T") + print "set xdata time" + print "set timefmt '%Y-%m-%dT%H:%M:%S'" + print "set format x '%s'" % cls.timefmt + print "set key right bottom" + if cls.outname: + print "set terminal", cls.outtype + print "set output '%s.%s'" % (cls.outname, cls.outtype) + print "set term png size 1024,1024" + print "plot", ", ".join("'-' using 1:2 with linespoints title '%s'" % i for i in changed) + + print "# [%s] Sorting" % time.strftime("%T") + cls.raw.sort() + + key = None + proc = None + for i in cls.raw: + if i.key not in changed: + continue + if key is not None and i.key != key: + print "e" + elif proc is not None and i.process != proc: + print "" + key = i.key + proc = i.process + print "#", i.key, i.line + print i.timestamp, i.count + print "e" + if not cls.outname: + print "pause mouse any" + +for filename in sys.argv[1:]: + print "# [%s] Reading %s" % (time.strftime("%T"), filename) + for line in open(filename): + if "gc_summary:" in line: + word = line.split(None, 6) + if word[4].isdigit() and word[5].startswith("(") and word[5].endswith(")"): + datapoint(filename = filename, + timestamp = word[0] + "T" + word[1], + process = word[2], + count = int(word[4]), + typesig = word[5], + line = line.strip()) + +print "# [%s] Plotting" % time.strftime("%T") +datapoint.plot() diff --git a/potpourri/generate-ripe-root-cert.py b/potpourri/generate-ripe-root-cert.py new file mode 100644 index 00000000..3407bc51 --- /dev/null +++ b/potpourri/generate-ripe-root-cert.py @@ -0,0 +1,57 @@ +# $Id$ +# +# Copyright (C) 2010-2012 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +Parse IANA XML data and write out just what we need to generate a root +cert for Pseudo-RIPE. +""" + +import sys +import lxml.etree +from rpki.csv_utils import csv_writer + +def iterate_xml(filename, tag): + return lxml.etree.parse(filename).getroot().getiterator(tag) + +def ns(tag): + return "{http://www.iana.org/assignments}" + tag + +tag_description = ns("description") +tag_designation = ns("designation") +tag_record = ns("record") +tag_number = ns("number") +tag_prefix = ns("prefix") + +asns = csv_writer("asns.csv") +prefixes = csv_writer("prefixes.csv") + +for record in iterate_xml("as-numbers.xml", tag_record): + if record.findtext(tag_description) == "Assigned by RIPE NCC": + asns.writerow(("RIPE", record.findtext(tag_number))) + +for record in iterate_xml("ipv4-address-space.xml", tag_record): + if record.findtext(tag_designation) in ("RIPE NCC", "Administered by RIPE NCC"): + prefix = record.findtext(tag_prefix) + p, l = prefix.split("/") + assert l == "8", "Violated /8 assumption: %r" % prefix + prefixes.writerow(("RIPE", "%d.0.0.0/8" % int(p))) + +for record in iterate_xml("ipv6-unicast-address-assignments.xml", tag_record): + if record.findtext(tag_description) == "RIPE NCC": + prefixes.writerow(("RIPE", record.findtext(tag_prefix))) + +asns.close() +prefixes.close() diff --git a/potpourri/gski.py b/potpourri/gski.py new file mode 100644 index 00000000..083a59c8 --- /dev/null +++ b/potpourri/gski.py @@ -0,0 +1,21 @@ +# $Id$ + +# Copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +import rpki.x509, sys + +for file in sys.argv[1:]: + cert = rpki.x509.X509(Auto_file = file) + print cert.gSKI(), cert.hSKI(), file diff --git a/potpourri/guess-roas-from-routeviews.py b/potpourri/guess-roas-from-routeviews.py new file mode 100644 index 00000000..d8fb9c4c --- /dev/null +++ b/potpourri/guess-roas-from-routeviews.py @@ -0,0 +1,63 @@ +# $Id$ +# +# Copyright (C) 2009 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +Pull RFC 3779 resources from a cert, attempt to mine routeviews (via +DNS, using the dnspython toolkit) for what the ROAs might look like +for the addresses found in the cert. + +This doesn't handle IPv6, because neither, apparently, does the +routeviews DNS interface. Oh well. + +NB: this is wild-assed guessing at best. Even if the routeviews data +were signed, which it is not, you have no particular reason to believe +it. Do not use output of this script production. Sanity check. +Beware of dog. If you issue ROAs using this script and your wallpaper +peels, your cat explodes, or your children turn blue, it's your own +fault for using this script. You have been warned. +""" + +import sys +import dns.resolver +import rpki.x509 +from rpki.ipaddrs import v4addr +from rpki.resource_set import roa_prefix_ipv4, resource_set_ipv4, resource_range_ipv4 + +roas = [] + +for filename in sys.argv[1:]: + resources = rpki.x509.X509(Auto_file = filename).get_3779resources().v4 + + while resources: + labels = str(resources[0].min).split(".") + labels.reverse() + + try: + for answer in dns.resolver.query(".".join(labels) + ".asn.routeviews.org", "txt"): + asn, prefix, prefixlen = answer.strings + roa_prefix = roa_prefix_ipv4(v4addr(prefix), long(prefixlen)) + roa = "%s\t%s\t%s" % (roa_prefix, long(asn), filename) + if roa not in roas: + roas.append(roa) + resources = resources.difference(resource_set_ipv4([roa_prefix.to_resource_range()])) + + except dns.resolver.NXDOMAIN: + resources = resources.difference(resource_set_ipv4([resource_range_ipv4(resources[0].min, v4addr(resources[0].min + 256))])) + +roas.sort() + +for roa in roas: + print roa diff --git a/potpourri/iana-to-csv.py b/potpourri/iana-to-csv.py new file mode 100644 index 00000000..f803a21e --- /dev/null +++ b/potpourri/iana-to-csv.py @@ -0,0 +1,85 @@ +# $Id$ +# +# Copyright (C) 2010-2012 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +Parse IANA XML data. +""" + +import sys +import lxml.etree +from rpki.csv_utils import csv_reader, csv_writer +from rpki.resource_set import resource_bag + +def iterate_xml(filename, tag): + return lxml.etree.parse(filename).getroot().getiterator(tag) + +def ns(tag): + return "{http://www.iana.org/assignments}" + tag + +tag_description = ns("description") +tag_designation = ns("designation") +tag_record = ns("record") +tag_number = ns("number") +tag_prefix = ns("prefix") +tag_status = ns("status") + +handles = {} +rirs = { "legacy" : resource_bag() } + +for rir in ("AfriNIC", "APNIC", "ARIN", "LACNIC", "RIPE NCC"): + handle = rir.split()[0].lower() + handles[rir] = handles["Assigned by %s" % rir] = handles["Administered by %s" % rir] = handle + rirs[handle] = resource_bag() + +asns = csv_writer("asns.csv") +prefixes = csv_writer("prefixes.csv") + +for record in iterate_xml("as-numbers.xml", tag_record): + description = record.findtext(tag_description) + if description in handles: + asns.writerow((handles[description], record.findtext(tag_number))) + +for record in iterate_xml("ipv4-address-space.xml", tag_record): + designation = record.findtext(tag_designation) + if record.findtext(tag_status) != "RESERVED": + prefix, prefixlen = [int(i) for i in record.findtext(tag_prefix).split("/")] + if prefixlen != 8: + raise ValueError("%s violated /8 assumption" % record.findtext(tag_prefix)) + rirs[handles.get(designation, "legacy")] |= resource_bag.from_str("%d.0.0.0/8" % prefix) + +for record in iterate_xml("ipv6-unicast-address-assignments.xml", tag_record): + description = record.findtext(tag_description) + if record.findtext(tag_description) in handles: + rirs[handles[description]] |= resource_bag.from_str(record.findtext(tag_prefix)) + +erx = list(csv_reader("erx.csv")) +assert all(r in rirs for r, p in erx) + +erx_overrides = resource_bag.from_str(",".join(p for r, p in erx), allow_overlap = True) + +for rir in rirs: + if rir != "legacy": + rirs[rir] -= erx_overrides + rirs[rir] |= resource_bag.from_str(",".join(p for r, p in erx if r == rir), allow_overlap = True) + +for rir, bag in rirs.iteritems(): + for p in bag.v4: + prefixes.writerow((rir, p)) + for p in bag.v6: + prefixes.writerow((rir, p)) + +asns.close() +prefixes.close() diff --git a/potpourri/missing-oids.py b/potpourri/missing-oids.py new file mode 100644 index 00000000..16316eac --- /dev/null +++ b/potpourri/missing-oids.py @@ -0,0 +1,38 @@ +# $Id$ +# +# Copyright (C) 2008 American Registry for Internet Numbers ("ARIN") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +Figure out what OIDs from rpki.oids are missing from dumpasn1's database. +""" + +import rpki.POW.pkix, rpki.oids + +need_header = True + +for oid, name in rpki.oids.oid2name.items(): + try: + rpki.POW.pkix.oid2obj(oid) + except: + o = rpki.POW.pkix.Oid() + o.set(oid) + if need_header: + print + print "# Local additions" + need_header = False + print + print "OID =", " ".join(("%02X" % ord(c)) for c in o.toString()) + print "Comment = RPKI project" + print "Description =", name, "(" + " ".join((str(i) for i in oid)) + ")" diff --git a/potpourri/object-dates.py b/potpourri/object-dates.py new file mode 100644 index 00000000..b99441d6 --- /dev/null +++ b/potpourri/object-dates.py @@ -0,0 +1,63 @@ +#!/usr/bin/env python +# $Id$ + +# Extract notBefore, notAfter, thisUpdate and nextUpdate dates from +# RPKI objects. + +# Copyright (C) 2013--2014 Dragon Research Labs ("DRL") +# +# Permission to use, copy, modify, and/or distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND DRL DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL DRL BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +import sys +import os.path +import rpki.POW + +extract_flags = (rpki.POW.CMS_NOCRL | + rpki.POW.CMS_NO_SIGNER_CERT_VERIFY | + rpki.POW.CMS_NO_ATTR_VERIFY | + rpki.POW.CMS_NO_CONTENT_VERIFY) + +def get_mft(fn): + cms = rpki.POW.Manifest.derReadFile(fn) + cms.verify(rpki.POW.X509Store(), None, extract_flags) + return cms, cms.certs()[0] + +def get_roa(fn): + return None, rpki.POW.CMS.derReadFile(fn).certs()[0] + +def get_gbr(fn): + return None, rpki.POW.CMS.derReadFile(fn).certs()[0] + +def get_crl(fn): + return rpki.POW.CRL.derReadFile(fn), None + +def get_cer(fn): + return None, rpki.POW.X509.derReadFile(fn) + +dispatch = dict(mft = get_mft, + roa = get_roa, + gbr = get_gbr, + crl = get_crl, + cer = get_cer) + +for fn in sys.argv[1:]: + obj, cer = dispatch[os.path.splitext(fn)[1][1:]](fn) + print fn + if cer is not None: + print "notBefore: ", cer.getNotBefore() + if obj is not None: + print "thisUpdate:", obj.getThisUpdate() + print "nextUpdate:", obj.getNextUpdate() + if cer is not None: + print "notAfter: ", cer.getNotAfter() + print diff --git a/potpourri/pcap-to-xml.sh b/potpourri/pcap-to-xml.sh new file mode 100644 index 00000000..73c30880 --- /dev/null +++ b/potpourri/pcap-to-xml.sh @@ -0,0 +1,36 @@ +#!/bin/sh - +# $Id$ +# +# Copyright (C) 2011 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +# Zeroeth cut at a packet decoder for RPKI up-down / left-right / +# publication traffic captured off the wire. Needs work, not suitable +# for general use, depends on a bunch of external programs that I +# happen to have installed...but has been useful to me. + +for p in *.pcap +do + tcptrace -e $p + for i in *.dat + do + j=${i%_contents.dat} + sed '1,/^ $/d' $i >$j.der + openssl cms -verify -noverify -inform DER -in $j.der | xmlindent > $j.xml + k=$(dumpasn1 -a $j.der 2>/dev/null | awk 'BEGIN {FS = "[ \t/:]+"} /signingTime/ {nr = NR + 2} NR == nr {print $6 "-" $5 "-" $4 "T" $7 ":" $8 ":" $9 "Z"}') + mv $j.der $k.$j.der + mv $j.xml $k.$j.xml + rm $i + done +done diff --git a/potpourri/print-profile.py b/potpourri/print-profile.py new file mode 100644 index 00000000..081d2602 --- /dev/null +++ b/potpourri/print-profile.py @@ -0,0 +1,20 @@ +# $Id$ +# +# Copyright (C) 2010 Internet Systems Consortium, Inc. ("ISC") +# +# Permission to use, copy, modify, and/or distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +import pstats, glob + +for f in glob.iglob("*.prof"): + pstats.Stats(f).sort_stats("time").print_stats(50) diff --git a/potpourri/rcynic-diff.py b/potpourri/rcynic-diff.py new file mode 100644 index 00000000..327a7b71 --- /dev/null +++ b/potpourri/rcynic-diff.py @@ -0,0 +1,114 @@ +# $Id$ +# +# Copyright (C) 2012 Internet Systems Consortium, Inc. ("ISC") +# +# Permission to use, copy, modify, and/or distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +Diff a series of rcynic.xml files, sort of. +""" + +import sys + +try: + from lxml.etree import ElementTree +except ImportError: + from xml.etree.ElementTree import ElementTree + +show_backup_generation = False +show_rsync_transfer = False + +class Object(object): + + def __init__(self, session, uri, generation): + self.session = session + self.uri = uri + self.generation = generation + self.labels = [] + + def add(self, label): + self.labels.append(label) + + def __cmp__(self, other): + return cmp(self.labels, other.labels) + +def show(old = None, new = None): + assert old is not None or new is not None + assert old is None or new is None or old.uri == new.uri + if old is None: + obj = new + labels = ["+" + label for label in new.labels] + elif new is None: + obj = old + labels = ["-" + label for label in old.labels] + else: + obj = new + labels = [] + for label in new.session.labels: + if label in new.labels and label in old.labels: + labels.append(label) + elif label in new.labels: + labels.append("+" + label) + elif label in old.labels: + labels.append("-" + label) + labels = " ".join(labels) + if show_backup_generation: + print " ", obj.uri, obj.generation, labels + else: + print " ", obj.uri, labels + +class Session(dict): + + def __init__(self, name): + self.name = name + tree = ElementTree(file = name) + self.labels = [elt.tag.strip() for elt in tree.find("labels")] + for elt in tree.findall("validation_status"): + generation = elt.get("generation") + status = elt.get("status") + uri = elt.text.strip() + if not show_rsync_transfer and status.startswith("rsync_transfer_"): + continue + if show_backup_generation: + key = (uri, generation) + elif generation == "backup": + continue + else: + key = uri + if key not in self: + self[key] = Object(self, uri, generation) + self[key].add(status) + +old_db = new_db = None + +for arg in sys.argv[1:]: + + old_db = new_db + new_db = Session(arg) + + if old_db is None: + continue + + only_old = set(old_db) - set(new_db) + only_new = set(new_db) - set(old_db) + changed = set(key for key in (set(old_db) & set(new_db)) if old_db[key] != new_db[key]) + + if only_old or changed or only_new: + print "Comparing", old_db.name, "with", new_db.name + for key in sorted(only_old): + show(old = old_db[key]) + for key in sorted(changed): + show(old = old_db[key], new = new_db[key]) + for key in sorted(only_new): + show(new = new_db[key]) + print diff --git a/potpourri/rcynic-lta b/potpourri/rcynic-lta new file mode 100755 index 00000000..4c55db92 --- /dev/null +++ b/potpourri/rcynic-lta @@ -0,0 +1,1055 @@ +#!/usr/local/bin/python + +# $Id$ + +# Copyright (C) 2013 Dragon Research Labs ("DRL") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND DRL DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL DRL BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +######################################################################## +# +# DANGER WILL ROBINSON +# +# This is a PROTOTYPE of a local trust anchor mechanism. At the +# moment, it DOES NOT WORK by any sane standard of measurement. It +# produces output, but there is no particular reason to believe said +# output is useful, and fairly good reason to believe that it is not. +# +# With luck, this may eventually mutate into something useful. For +# now, just leave it alone unless you really know what you are doing, +# in which case, on your head be it. +# +# YOU HAVE BEEN WARNED +# +######################################################################## + +import os +import sys +import yaml +import glob +import time +import shutil +import base64 +import socket +import sqlite3 +import weakref +import rpki.POW +import rpki.x509 +import rpki.sundial +import rpki.resource_set + +# Teach SQLite3 about our data types. + +sqlite3.register_adapter(rpki.POW.IPAddress, + lambda x: buffer("_" + x.toBytes())) + +sqlite3.register_converter("RangeVal", + lambda s: long(s) if s.isdigit() else rpki.POW.IPAddress.fromBytes(s[1:])) + +sqlite3.register_adapter(rpki.x509.X501DN, str) + + +class main(object): + + tal_directory = None + constraints = None + rcynic_input = None + rcynic_output = None + tals = None + keyfile = None + + ltakey = None + ltacer = None + + ltauri = "rsync://localhost/lta" + ltasia = ltauri + "/" + ltaaia = ltauri + ".cer" + ltamft = ltauri + "/lta.mft" + ltacrl = ltauri + "/lta.crl" + + cer_delta = rpki.sundial.timedelta(days = 7) + crl_delta = rpki.sundial.timedelta(hours = 1) + + all_mentioned_resources = rpki.resource_set.resource_bag() + + + def __init__(self): + print "Parsing YAML" + self.parse_yaml() + print + print "Parsing TALs" + self.parse_tals() + print + print "Creating DB" + self.rpdb = RPDB(self.db_name) + print + print "Creating CA" + self.create_ca() + print + print "Loading DB" + self.rpdb.load(self.rcynic_input) + print + print "Processing adds and drops" + self.process_add_drop() + print + print "Processing deletions" + self.process_constraint_deletions() + print + print "Re-parenting TAs" + self.re_parent_tas() + print + print "Generating CRL and manifest" + self.generate_crl_and_manifest() + print + print "Committing final changes to DB" + self.rpdb.commit() + print + print "Dumping para-objects" + self.rpdb.dump_paras(self.rcynic_output) + print + print "Closing DB" + self.rpdb.close() + + + def create_ca(self): + self.serial = Serial() + self.ltakey = rpki.x509.RSA.generate(quiet = True) + cer = OutgoingX509.self_certify( + cn = "%s LTA Root Certificate" % socket.getfqdn(), + keypair = self.ltakey, + subject_key = self.ltakey.get_RSApublic(), + serial = self.serial(), + sia = (self.ltasia, self.ltamft, None), + notAfter = rpki.sundial.now() + self.cer_delta, + resources = rpki.resource_set.resource_bag.from_str("0-4294967295,0.0.0.0/0,::/0")) + subject_id = self.rpdb.find_keyname(cer.getSubject(), cer.get_SKI()) + self.rpdb.cur.execute("INSERT INTO outgoing (der, fn2, subject, issuer, uri, key) " + "VALUES (?, 'cer', ?, ?, ?, ?)", + (buffer(cer.get_DER()), subject_id, subject_id, self.ltaaia, + buffer(self.ltakey.get_DER()))) + self.ltacer = self.rpdb.find_outgoing_by_id(self.rpdb.cur.lastrowid) + + + def parse_yaml(self, fn = "rcynic-lta.yaml"): + y = yaml.safe_load(open(fn, "r")) + self.db_name = y["db-name"] + self.tal_directory = y["tal-directory"] + self.rcynic_input = y["rcynic-input"] + self.rcynic_output = y["rcynic-output"] + self.keyfile = y["keyfile"] + self.constraints = [Constraint(yc) for yc in y["constraints"]] + + + def parse_tals(self): + self.tals = {} + for fn in glob.iglob(os.path.join(self.tal_directory, "*.tal")): + with open(fn, "r") as f: + uri = f.readline().strip() + key = rpki.POW.Asymmetric.derReadPublic(base64.b64decode(f.read())) + self.tals[uri] = key + + + @staticmethod + def show_candidates(constraint, candidates): + print + print "Constraint:", repr(constraint) + print "Resources: ", constraint.mentioned_resources + for i, candidate in enumerate(candidates): + print " Candidate #%d id %d depth %d name %s uri %s" % ( + i, candidate.rowid, + candidate.depth, + candidate.subject_name, + candidate.uri) + if constraint.mentioned_resources <= candidate.resources: + print " Matched" + #print " Constraint resources:", constraint.mentioned_resources + #print " Candidate resources: ", candidate.resources + break + else: + print " No match" + + + def process_add_drop(self): + # + # We probably need to create the output root before running this, + # otherwise there's a chance that an "add" constraint will yield + # no viable candidate parent. Not likely to happen with current + # test setup where several of our roots claim 0/0. + # + for constraint in self.constraints: + candidates = self.rpdb.find_by_resource_bag(constraint.mentioned_resources) + candidates.sort(reverse = True, key = lambda candidate: candidate.depth) + #self.show_candidates(constraint, candidates) + constraint.drop(candidates) + constraint.add(candidates) + + + def process_constraint_deletions(self): + for obj in self.rpdb.find_by_resource_bag(self.all_mentioned_resources): + self.add_para(obj, obj.resources - self.all_mentioned_resources) + + + def re_parent_tas(self): + for uri, key in self.tals.iteritems(): + for ta in self.rpdb.find_by_ski_or_uri(key.calculateSKI(), uri): + if ta.para_obj is None: + self.add_para(ta, ta.resources - self.all_mentioned_resources) + + + def add_para(self, obj, resources): + return self.rpdb.add_para( + obj = obj, + resources = resources, + serial = self.serial, + ltacer = self.ltacer, + ltasia = self.ltasia, + ltaaia = self.ltaaia, + ltamft = self.ltamft, + ltacrl = self.ltacrl, + ltakey = self.ltakey) + + + def generate_crl_and_manifest(self): + thisUpdate = rpki.sundial.now() + nextUpdate = thisUpdate + self.crl_delta + serial = self.serial() + issuer = self.ltacer.getSubject() + aki = buffer(self.ltacer.get_SKI()) + + crl = OutgoingCRL.generate( + keypair = self.ltakey, + issuer = self.ltacer, + serial = serial, + thisUpdate = thisUpdate, + nextUpdate = nextUpdate, + revokedCertificates = ()) + + issuer_id = self.rpdb.find_keyname(issuer, aki) + + self.rpdb.cur.execute("INSERT INTO outgoing (der, fn2, subject, issuer, uri) " + "VALUES (?, 'crl', NULL, ?, ?)", + (buffer(crl.get_DER()), issuer_id, self.ltacrl)) + crl = self.rpdb.find_outgoing_by_id(self.rpdb.cur.lastrowid) + + key = rpki.x509.RSA.generate(quiet = True) + + cer = self.ltacer.issue( + keypair = self.ltakey, + subject_key = key.get_RSApublic(), + serial = serial, + sia = (None, None, self.ltamft), + aia = self.ltaaia, + crldp = self.ltacrl, + resources = rpki.resource_set.resource_bag.from_inheritance(), + notAfter = self.ltacer.getNotAfter(), + is_ca = False) + + # Temporary kludge, need more general solution but that requires + # more refactoring than I feel like doing this late in the day. + # + names_and_objs = [(uri, OutgoingObject.create(fn2 = fn2, der = der, uri = uri, + rpdb = None, rowid = None, + subject_id = None, issuer_id = None)) + for fn2, der, uri in + self.rpdb.cur.execute("SELECT fn2, der, uri FROM outgoing WHERE issuer = ?", + (self.ltacer.rowid,))] + + mft = OutgoingSignedManifest.build( + serial = serial, + thisUpdate = thisUpdate, + nextUpdate = nextUpdate, + names_and_objs = names_and_objs, + keypair = key, + certs = cer) + + subject_id = self.rpdb.find_keyname(cer.getSubject(), cer.get_SKI()) + + self.rpdb.cur.execute("INSERT INTO outgoing (der, fn2, subject, issuer, uri, key) " + "VALUES (?, 'mft', ?, ?, ?, ?)", + (buffer(mft.get_DER()), subject_id, issuer_id, self.ltamft, buffer(key.get_DER()))) + + + @staticmethod + def parse_xki(s): + """ + Parse text form of an SKI or AKI. We accept two encodings: + colon-delimited hexadecimal, and URL-safe Base64. The former is + what OpenSSL prints in its text representation of SKI and AKI + extensions; the latter is the g(SKI) value that some RPKI CA engines + (including rpkid) use when constructing filenames. + + In either case, we check that the decoded result contains the right + number of octets to be a SHA-1 hash. + """ + + if ":" in s: + b = "".join(chr(int(c, 16)) for c in s.split(":")) + else: + b = base64.urlsafe_b64decode(s + ("=" * (4 - len(s) % 4))) + if len(b) != 20: + raise RuntimeError("Bad length for SHA1 xKI value: %r" % s) + return b + + + +class Serial(object): + + def __init__(self): + self.value = long(time.time()) << 32 + + def __call__(self): + self.value += 1 + return self.value + + +class ConstrainedObject(object): + # I keep expecting the classes derived from this to have some common + # methods, but so far it hasn't happened. Clean up eventually if not. + pass + +class ConstrainedROA(ConstrainedObject): + + def __init__(self, constraint, y): + self.constraint = constraint + self.asn = long(y["asn"]) if y is not None else None + self.maxlen = long(y["maxlen"]) if y is not None and "maxlen" in y else None + + def drop(self, candidates): + for candidate in candidates: + if isinstance(candidate, IncomingROA) and \ + self.constraint.mentioned_resources == candidate.resources and \ + (self.asn is None or self.asn == candidate.get_POW().getASID()): + print "Dropping ROA %r" % candidate + candidate.disposition = "delete" + + def add(self, candidates): + assert self.asn is not None + for candidate in candidates: + if isinstance(candidate, IncomingX509) and self.constraint.mentioned_resources <= candidate.resources: + print "Should add ROA %s %s\nunder candidate %s (depth %s resources %s)" % ( + self.asn, self.constraint.prefixes, candidate.subject_name, candidate.depth, candidate.resources) + break + +class ConstrainedGBR(ConstrainedObject): + + def __init__(self, constraint, y): + self.constraint = constraint + self.vcard = y + + def drop(self, candidates): + for candidate in candidates: + if isinstance(candidate, IncomingX509) and self.constraint.mentioned_resources == candidate.resources: + print "Dropping GBRs directly under %r" % candidate + for gbr in candidate.find_children("gbr"): + print "Dropping GBR %r" % gbr + gbr.disposition = "delete" + + def add(self, candidates): + assert self.vcard is not None + for candidate in candidates: + if isinstance(candidate, IncomingX509) and self.constraint.mentioned_resources <= candidate.resources: + print "Should add GBR\n%s\nunder candidate %s (depth %s resources %s)" % ( + "\n".join((" " * 4) + line for line in self.vcard.splitlines()), + candidate.subject_name, candidate.depth, candidate.resources) + break + +class ConstrainedRTR(ConstrainedObject): + + def __init__(self, constraint, y): + self.constraint = constraint + self.key = y["key"] if y is not None else None + self.subject = y["subject"] if y is not None else None + + def add(self, candidates): + raise NotImplementedError + + def drop(self, candidates): + for candidate in candidates: + if isinstance(candidate, IncomingX509) and not candidate.is_ca and \ + self.constraint.mentioned_resources == candidate.resources and \ + (self.subject is None or candidate.getSubject() == self.subject): + print "Dropping RTR certificate %r" % candidate + candidate.disposition = "delete" + +class Constraint(object): + + dispatch = dict(roa = ConstrainedROA, + gbr = ConstrainedGBR, + rtr = ConstrainedRTR) + + def __init__(self, y): + self.y = y # Mostly for debugging. I think. + self.prefixes = rpki.resource_set.resource_bag.from_str(str(y.get("prefix", ""))) + self.asns = rpki.resource_set.resource_bag.from_str(str(y.get("asn", ""))) + self.init_drops(y.get("drop", ())) + self.init_adds( y.get("add", ())) + + def init_drops(self, drops): + if drops == "all": + self.drops = tuple(d(self, None) for d in self.dispatch.itervalues()) + else: + dd = [] + for d in (drops if isinstance(drops, (list, tuple)) else [drops]): + if isinstance(d, str): + dd.append(self.dispatch[d[:-1]](self, None)) + elif isinstance(d, dict) and len(d) == 1: + dd.append(self.dispatch[d.keys()[0]](self, d.values()[0])) + else: + raise ValueError("Unexpected drop clause " + repr(drops)) + self.drops = tuple(dd) + + def init_adds(self, adds): + if not all(isinstance(a, dict) and len(a) == 1 for a in adds): + raise ValueError("Expected list of single-entry mappings, got " + repr(adds)) + self.adds = tuple(self.dispatch[a.keys()[0]](self, a.values()[0]) for a in adds) + + def drop(self, candidates): + for d in self.drops: + d.drop(candidates) + + def add(self, candidates): + for a in self.adds: + a.add(candidates) + + def __repr__(self): + return "<%s:%s %r>" % (self.__class__.__module__, self.__class__.__name__, self.y) + + @property + def mentioned_resources(self): + return self.prefixes | self.asns + + +class BaseObject(object): + """ + Mixin to add some SQL-related methods to classes derived from + rpki.x509.DER_object. + """ + + _rpdb = None + _rowid = None + _fn2 = None + _fn2map = None + _uri = None + _subject_id = None + _issuer_id = None + + @property + def rowid(self): + return self._rowid + + @property + def para_resources(self): + return self.resources if self.para_obj is None else self.para_obj.resources + + @property + def fn2(self): + return self._fn2 + + @property + def uri(self): + return self._uri + + @classmethod + def setfn2map(cls, **map): + cls._fn2map = map + for k, v in map.iteritems(): + v._fn2 = k + + @classmethod + def create(cls, rpdb, rowid, fn2, der, uri, subject_id, issuer_id): + self = cls._fn2map[fn2]() + if der is not None: + self.set(DER = der) + self._rpdb = rpdb + self._rowid = rowid + self._uri = uri + self._subject_id = subject_id + self._issuer_id = issuer_id + return self + + @property + def subject_id(self): + return self._subject_id + + @property + def subject_name(self): + return self._rpdb.find_keyname_by_id(self._subject_id)[0] + + @property + def issuer_id(self): + return self._issuer_id + + @property + def issuer_name(self): + return self._rpdb.find_keyname_by_id(self._subject_id)[0] + + +class IncomingObject(BaseObject): + + _depth = None + _is_ca = False + _disposition = None + + @property + def para_obj(self): + if getattr(self, "_para_id", None) is None: + self._rpdb.cur.execute("SELECT replacement FROM incoming WHERE id = ?", (self.rowid,)) + self._para_id = self._rpdb.cur.fetchone()[0] + return self._rpdb.find_outgoing_by_id(self._para_id) + + @para_obj.setter + def para_obj(self, value): + if value is None: + self._rpdb.cur.execute("DELETE FROM outgoing WHERE id IN (SELECT replacement FROM incoming WHERE id = ?)", + (self.rowid,)) + try: + del self._para_id + except AttributeError: + pass + else: + assert isinstance(value.rowid, int) + self._rpdb.cur.execute("UPDATE incoming SET replacement = ? WHERE id = ?", (value.rowid, self.rowid)) + self._para_id = value.rowid + + @property + def disposition(self): + if self._disposition is None: + self._disposition = self._rpdb.cur.execute("SELECT disposition FROM incoming " + "WHERE id = ?", (self.rowid,)).fetchone()[0] + return self._disposition + + @disposition.setter + def disposition(self, value): + self._rpdb.cur.execute("UPDATE incoming SET disposition = ? WHERE id = ?", (value, self.rowid)) + self._disposition = value + + @classmethod + def fromFile(cls, fn): + return cls._fn2map[os.path.splitext(fn)[1][1:]](DER_file = fn) + + @classmethod + def create(cls, rpdb, rowid, fn2, der, uri, subject_id, issuer_id, depth = None, is_ca = False): + assert der is not None + self = super(IncomingObject, cls).create(rpdb, rowid, fn2, der, uri, subject_id, issuer_id) + self._depth = depth + self._is_ca = is_ca + return self + + @property + def depth(self): + return self._depth + + @property + def is_ca(self): + return self._is_ca + + @property + def issuer(self): + if self._issuer_id is None or self._issuer_id == self._subject_id: + return None + return self._rpdb.find_incoming_by_id(self._issuer_id) + + +class OutgoingObject(BaseObject): + + @property + def orig_obj(self): + if getattr(self, "_orig_id", None) is None: + self._rpdb.cur.execute("SELECT id FROM incoming WHERE replacement = ?", (self.rowid,)) + r = self._rpdb.cur.fetchone() + self._orig_id = None if r is None else r[0] + return self._rpdb.find_incoming_by_id(self._orig_id) + + +class BaseX509(rpki.x509.X509): + + @property + def resources(self): + r = self.get_3779resources() + r.valid_until = None + return r + + def find_children(self, fn2 = None): + return self._rpdb._find_results(fn2, "WHERE issuer = ?", [self.subject_id]) + + +class BaseCRL(rpki.x509.CRL): + + @property + def resources(self): + return None + + +class CommonCMS(object): + + @property + def resources(self): + r = rpki.x509.X509(POW = self.get_POW().certs()[0]).get_3779resources() + r.valid_until = None + return r + + +class BaseSignedManifest (rpki.x509.SignedManifest, CommonCMS): pass +class BaseROA (rpki.x509.ROA, CommonCMS): pass +class BaseGhostbuster (rpki.x509.Ghostbuster, CommonCMS): pass + +class IncomingX509 (BaseX509, IncomingObject): pass +class IncomingCRL (BaseCRL, IncomingObject): pass +class IncomingSignedManifest (BaseSignedManifest, IncomingObject): pass +class IncomingROA (BaseROA, IncomingObject): pass +class IncomingGhostbuster (BaseGhostbuster, IncomingObject): pass + +class OutgoingX509 (BaseX509, OutgoingObject): pass +class OutgoingCRL (BaseCRL, OutgoingObject): pass +class OutgoingSignedManifest (BaseSignedManifest, OutgoingObject): pass +class OutgoingROA (BaseROA, OutgoingObject): pass +class OutgoingGhostbuster (BaseGhostbuster, OutgoingObject): pass + +IncomingObject.setfn2map(cer = IncomingX509, + crl = IncomingCRL, + mft = IncomingSignedManifest, + roa = IncomingROA, + gbr = IncomingGhostbuster) + +OutgoingObject.setfn2map(cer = OutgoingX509, + crl = OutgoingCRL, + mft = OutgoingSignedManifest, + roa = OutgoingROA, + gbr = OutgoingGhostbuster) + + +class RPDB(object): + """ + Relying party database. + """ + + def __init__(self, db_name): + + try: + os.unlink(db_name) + except: + pass + + self.db = sqlite3.connect(db_name, detect_types = sqlite3.PARSE_DECLTYPES) + self.db.text_factory = str + self.cur = self.db.cursor() + + self.incoming_cache = weakref.WeakValueDictionary() + self.outgoing_cache = weakref.WeakValueDictionary() + + self.cur.executescript(''' + PRAGMA foreign_keys = on; + + CREATE TABLE keyname ( + id INTEGER PRIMARY KEY NOT NULL, + name TEXT NOT NULL, + keyid BLOB NOT NULL, + UNIQUE (name, keyid)); + + CREATE TABLE incoming ( + id INTEGER PRIMARY KEY NOT NULL, + der BLOB NOT NULL, + fn2 TEXT NOT NULL + CHECK (fn2 IN ('cer', 'crl', 'mft', 'roa', 'gbr')), + uri TEXT NOT NULL, + depth INTEGER, + is_ca BOOLEAN NOT NULL DEFAULT 0, + disposition TEXT NOT NULL + DEFAULT 'keep' + CHECK (disposition IN ('keep', 'delete', 'replace')), + subject INTEGER + REFERENCES keyname(id) + ON DELETE RESTRICT + ON UPDATE RESTRICT, + issuer INTEGER NOT NULL + REFERENCES keyname(id) + ON DELETE RESTRICT + ON UPDATE RESTRICT, + replacement INTEGER + REFERENCES outgoing(id) + ON DELETE SET NULL + ON UPDATE SET NULL, + UNIQUE (der), + UNIQUE (subject, issuer), + CHECK ((subject IS NULL) == (fn2 == 'crl'))); + + CREATE TABLE outgoing ( + id INTEGER PRIMARY KEY NOT NULL, + der BLOB, + key BLOB, + fn2 TEXT NOT NULL + CHECK (fn2 IN ('cer', 'crl', 'mft', 'roa', 'gbr')), + uri TEXT NOT NULL, + subject INTEGER + REFERENCES keyname(id) + ON DELETE RESTRICT + ON UPDATE RESTRICT, + issuer INTEGER NOT NULL + REFERENCES keyname(id) + ON DELETE RESTRICT + ON UPDATE RESTRICT, + UNIQUE (subject, issuer), + CHECK ((key IS NULL) == (fn2 == 'crl')), + CHECK ((subject IS NULL) == (fn2 == 'crl'))); + + CREATE TABLE range ( + id INTEGER NOT NULL + REFERENCES incoming(id) + ON DELETE CASCADE + ON UPDATE CASCADE, + min RangeVal NOT NULL, + max RangeVal NOT NULL, + UNIQUE (id, min, max)); + + ''') + + + def load(self, rcynic_input, spinner = 100): + + start = rpki.sundial.now() + nobj = 0 + + for root, dirs, files in os.walk(rcynic_input): + for fn in files: + fn = os.path.join(root, fn) + + try: + obj = IncomingObject.fromFile(fn) + except: + if spinner: + sys.stderr.write("\r") + sys.stderr.write("Couldn't read %s, skipping\n" % fn) + continue + + if spinner and nobj % spinner == 0: + sys.stderr.write("\r%s %d %s..." % ("|\\-/"[(nobj/spinner) & 3], nobj, rpki.sundial.now() - start)) + + nobj += 1 + + if obj.fn2 == "crl": + ski = None + aki = buffer(obj.get_AKI()) + cer = None + bag = None + issuer = obj.getIssuer() + subject = None + is_ca = False + + else: + if obj.fn2 == "cer": + cer = obj + else: + cer = rpki.x509.X509(POW = obj.get_POW().certs()[0]) + issuer = cer.getIssuer() + subject = cer.getSubject() + ski = buffer(cer.get_SKI()) + aki = cer.get_AKI() + if aki is None: + assert subject == issuer + aki = ski + else: + aki = buffer(aki) + bag = cer.get_3779resources() + is_ca = cer.is_CA() + + der = buffer(obj.get_DER()) + uri = "rsync://" + fn[len(rcynic_input) + 1:] + + self.cur.execute("SELECT id FROM incoming WHERE der = ?", (der,)) + r = self.cur.fetchone() + + if r is not None: + rowid = r[0] + + else: + subject_id = None if ski is None else self.find_keyname(subject, ski) + issuer_id = self.find_keyname(issuer, aki) + + self.cur.execute("INSERT INTO incoming (der, fn2, subject, issuer, uri, is_ca) " + "VALUES (?, ?, ?, ?, ?, ?)", + (der, obj.fn2, subject_id, issuer_id, uri, is_ca)) + rowid = self.cur.lastrowid + + if bag is not None: + for rset in (bag.asn, bag.v4, bag.v6): + if rset is not None: + self.cur.executemany("REPLACE INTO range (id, min, max) VALUES (?, ?, ?)", + ((rowid, i.min, i.max) for i in rset)) + + if spinner: + sys.stderr.write("\r= %d objects in %s.\n" % (nobj, rpki.sundial.now() - start)) + + self.cur.execute("UPDATE incoming SET depth = 0 WHERE subject = issuer") + + for depth in xrange(1, 500): + + self.cur.execute("SELECT COUNT(*) FROM incoming WHERE depth IS NULL") + if self.cur.fetchone()[0] == 0: + break + + if spinner: + sys.stderr.write("\rSetting depth %d..." % depth) + + self.cur.execute(""" + UPDATE incoming SET depth = ? + WHERE depth IS NULL + AND issuer IN (SELECT subject FROM incoming WHERE depth = ?) + """, + (depth, depth - 1)) + + else: + if spinner: + sys.stderr.write("\rSetting depth %d is absurd, giving up, " % depth) + + if spinner: + sys.stderr.write("\nCommitting...") + + self.db.commit() + + if spinner: + sys.stderr.write("done.\n") + + + def add_para(self, obj, resources, serial, ltacer, ltasia, ltaaia, ltamft, ltacrl, ltakey): + + assert isinstance(obj, IncomingX509) + + if obj.para_obj is not None: + resources &= obj.para_obj.resources + + obj.para_obj = None + + if not resources: + return + + pow = obj.get_POW() + + x = rpki.POW.X509() + + x.setVersion( pow.getVersion()) + x.setSubject( pow.getSubject()) + x.setNotBefore( pow.getNotBefore()) + x.setNotAfter( pow.getNotAfter()) + x.setPublicKey( pow.getPublicKey()) + x.setSKI( pow.getSKI()) + x.setBasicConstraints( pow.getBasicConstraints()) + x.setKeyUsage( pow.getKeyUsage()) + x.setCertificatePolicies( pow.getCertificatePolicies()) + x.setSIA( *pow.getSIA()) + + x.setIssuer( ltacer.get_POW().getIssuer()) + x.setAKI( ltacer.get_POW().getSKI()) + x.setAIA( (ltaaia,)) + x.setCRLDP( (ltacrl,)) + + x.setSerial( serial()) + x.setRFC3779( + asn = ((r.min, r.max) for r in resources.asn), + ipv4 = ((r.min, r.max) for r in resources.v4), + ipv6 = ((r.min, r.max) for r in resources.v6)) + + x.sign(ltakey.get_POW(), rpki.POW.SHA256_DIGEST) + cer = OutgoingX509(POW = x) + + ski = buffer(cer.get_SKI()) + aki = buffer(cer.get_AKI()) + bag = cer.get_3779resources() + issuer = cer.getIssuer() + subject = cer.getSubject() + der = buffer(cer.get_DER()) + uri = ltasia + cer.gSKI() + ".cer" + + # This will want to change when we start generating replacement keys for everything. + # This should really be a keypair, not just a public key, same comment. + # + key = buffer(pow.getPublicKey().derWritePublic()) + + subject_id = self.find_keyname(subject, ski) + issuer_id = self.find_keyname(issuer, aki) + + self.cur.execute("INSERT INTO outgoing (der, fn2, subject, issuer, uri, key) " + "VALUES (?, 'cer', ?, ?, ?, ?)", + (der, subject_id, issuer_id, uri, key)) + rowid = self.cur.lastrowid + self.cur.execute("UPDATE incoming SET replacement = ? WHERE id = ?", + (rowid, obj.rowid)) + + # Fix up _orig_id and _para_id here? Maybe later. + + #self.db.commit() + + + def dump_paras(self, rcynic_output): + shutil.rmtree(rcynic_output, ignore_errors = True) + rsync = "rsync://" + for der, uri in self.cur.execute("SELECT der, uri FROM outgoing"): + assert uri.startswith(rsync) + fn = os.path.join(rcynic_output, uri[len(rsync):]) + dn = os.path.dirname(fn) + if not os.path.exists(dn): + os.makedirs(dn) + with open(fn, "wb") as f: + #print ">> Writing", f.name + f.write(der) + + + def find_keyname(self, name, keyid): + keys = (name, buffer(keyid)) + self.cur.execute("SELECT id FROM keyname WHERE name = ? AND keyid = ?", keys) + result = self.cur.fetchone() + if result is None: + self.cur.execute("INSERT INTO keyname (name, keyid) VALUES (?, ?)", keys) + result = self.cur.lastrowid + else: + result = result[0] + return result + + + def find_keyname_by_id(self, rowid): + self.cur.execute("SELECT name, keyid FROM keyname WHERE id = ?", (rowid,)) + result = self.cur.fetchone() + return (None, None) if result is None else result + + + def find_incoming_by_id(self, rowid): + if rowid is None: + return None + if rowid in self.incoming_cache: + return self.incoming_cache[rowid] + r = self._find_results(None, "WHERE id = ?", [rowid]) + assert len(r) < 2 + return r[0] if r else None + + + def find_outgoing_by_id(self, rowid): + if rowid is None: + return None + if rowid in self.outgoing_cache: + return self.outgoing_cache[rowid] + self.cur.execute("SELECT fn2, der, key, uri, subject, issuer FROM outgoing WHERE id = ?", (rowid,)) + r = self.cur.fetchone() + if r is None: + return None + fn2, der, key, uri, subject_id, issuer_id = r + obj = OutgoingObject.create(rpdb = self, rowid = rowid, fn2 = fn2, der = der, uri = uri, + subject_id = subject_id, issuer_id = issuer_id) + self.outgoing_cache[rowid] = obj + return obj + + + def find_by_ski_or_uri(self, ski, uri): + if not ski and not uri: + return [] + j = "" + w = [] + a = [] + if ski: + j = "JOIN keyname ON incoming.subject = keyname.id" + w.append("keyname.keyid = ?") + a.append(buffer(ski)) + if uri: + w.append("incoming.uri = ?") + a.append(uri) + return self._find_results(None, "%s WHERE %s" % (j, " AND ".join(w)), a) + + + # It's easiest to understand overlap conditions by understanding + # non-overlap then inverting and and applying De Morgan's law. + # Ranges A and B do not overlap if: A.min > B.max or B.min > A.max; + # therefore A and B do overlap if: A.min <= B.max and B.min <= A.max. + + def find_by_range(self, range_min, range_max = None, fn2 = None): + if range_max is None: + range_max = range_min + if isinstance(range_min, (str, unicode)): + range_min = long(range_min) if range_min.isdigit() else rpki.POW.IPAddress(range_min) + if isinstance(range_max, (str, unicode)): + range_max = long(range_max) if range_max.isdigit() else rpki.POW.IPAddress(range_max) + assert isinstance(range_min, (int, long, rpki.POW.IPAddress)) + assert isinstance(range_max, (int, long, rpki.POW.IPAddress)) + return self._find_results(fn2, + "JOIN range ON incoming.id = range.id " + "WHERE ? <= range.max AND ? >= range.min", + [range_min, range_max]) + + + def find_by_resource_bag(self, bag, fn2 = None): + assert bag.asn or bag.v4 or bag.v6 + qset = [] + aset = [] + for rset in (bag.asn, bag.v4, bag.v6): + if rset: + for r in rset: + qset.append("(? <= max AND ? >= min)") + aset.append(r.min) + aset.append(r.max) + return self._find_results( + fn2, + """ + JOIN range ON incoming.id = range.id + WHERE + """ + (" OR ".join(qset)), + aset) + + + def _find_results(self, fn2, query, args = None): + if args is None: + args = [] + if fn2 is not None: + query += " AND fn2 = ?" + args.append(fn2) + results = [] + for rowid, fn2, der, uri, subject_id, issuer_id, depth, is_ca in self.cur.execute( + ''' + SELECT DISTINCT + incoming.id, incoming.fn2, + incoming.der, incoming.uri, + incoming.subject, incoming.issuer, + incoming.depth, incoming.is_ca + FROM incoming + ''' + query, args): + if rowid in self.incoming_cache: + obj = self.incoming_cache[rowid] + assert obj.rowid == rowid + else: + obj = IncomingObject.create(rpdb = self, rowid = rowid, fn2 = fn2, der = der, uri = uri, + subject_id = subject_id, issuer_id = issuer_id, depth = depth, + is_ca = is_ca) + self.incoming_cache[rowid] = obj + results.append(obj) + return results + + + def commit(self): + self.db.commit() + + + def close(self): + self.commit() + self.cur.close() + self.db.close() + +if __name__ == "__main__": + #profile = None + profile = "rcynic-lta.prof" + if profile: + import cProfile + prof = cProfile.Profile() + try: + prof.runcall(main) + finally: + prof.dump_stats(profile) + sys.stderr.write("Dumped profile data to %s\n" % profile) + else: + main() + diff --git a/potpourri/rcynic-lta.yaml b/potpourri/rcynic-lta.yaml new file mode 100644 index 00000000..ab17a56c --- /dev/null +++ b/potpourri/rcynic-lta.yaml @@ -0,0 +1,69 @@ +db-name: + /u/sra/rpki/subvert-rpki.hactrn.net/trunk/scripts/rcynic-lta.db + +rcynic-input: + /u/sra/rpki/subvert-rpki.hactrn.net/trunk/rcynic/rcynic-data/authenticated + +rcynic-output: + /u/sra/rpki/subvert-rpki.hactrn.net/trunk/rcynic/rcynic-data/lta-unauthenticated + +tal-directory: + /u/sra/rpki/subvert-rpki.hactrn.net/trunk/rcynic/sample-trust-anchors + +keyfile: + /u/sra/rpki/subvert-rpki.hactrn.net/trunk/scripts/rcynic-lta.key + +common: + + - &VCARD-1 | + BEGIN:VCARD + VERSION:4.0 + FN:R0 + ORG:Organizational Entity + ADR;TYPE=WORK:;;42 Twisty Passage;Deep Cavern;WA;98666;U.S.A. + TEL;TYPE=VOICE,TEXT,WORK;VALUE=uri:tel:+1-666-555-1212 + TEL;TYPE=FAX,WORK;VALUE=uri:tel:+1-666-555-1213 + EMAIL:human@example.com + END:VCARD + - &GBR-1 { gbr: *VCARD-1 } + + - &VCARD-2 | + BEGIN:VCARD + VERSION:4.0 + ORG:Epilogue Technology Corporation + EMAIL:sra@epilogue.com + END:VCARD + - &GBR-2 { gbr: *VCARD-2 } + + - &ROA-666 { roa: { asn: 666 } } + +constraints: + + # Need something for a drop test, sorry Randy + - prefix: 147.28.224.0/19 + drop: roas + + - prefix: 10.0.0.0/8 + add: [ { roa: { asn: 666, maxlen: 16 }}, *GBR-1 ] + + - prefix: 192.168.0.0/16 + drop: all + + - asn: 666 + add: [ *GBR-1 ] + + - prefix: 128.224.0.0/16 + drop: all + add: [ *GBR-2 ] + + - prefix: 128.224.1.0/24 + add: [ *GBR-2, *ROA-666 ] + + - prefix: 128.224.2.0/24 + add: [ *GBR-2, *ROA-666 ] + + - prefix: 149.20.0.0/16 + add: [ *ROA-666 ] + + - prefix: 2001:4f8:3:d::/64 + add: [ *ROA-666 ] diff --git a/potpourri/repo0-testbed-daily b/potpourri/repo0-testbed-daily new file mode 100755 index 00000000..576464c4 --- /dev/null +++ b/potpourri/repo0-testbed-daily @@ -0,0 +1,19 @@ +#!/bin/sh - +# $Id: daily 602 2013-06-02 18:00:25Z sra $ + +home=/home/sra/rpki.testbed + +exec >>$home/logs/daily.log 2>&1 + +set -x + +date + +cd $home + +/usr/local/sbin/rpkic update_bpki + +/usr/local/bin/svn update +/usr/local/bin/svn add --force . +/usr/local/bin/svn ci --message 'Daily auto update' +/usr/local/bin/svn update diff --git a/potpourri/repo0-testbed-monthly b/potpourri/repo0-testbed-monthly new file mode 100755 index 00000000..b7ada110 --- /dev/null +++ b/potpourri/repo0-testbed-monthly @@ -0,0 +1,22 @@ +#!/bin/sh - +# $Id: monthly 602 2013-06-02 18:00:25Z sra $ + +home=/home/sra/rpki.testbed + +exec >>$home/logs/monthly.log 2>&1 + +set -x + +date + +cd $home + +for identity in iana afrinic apnic arin lacnic legacy ripe +do + /usr/local/sbin/rpkic -i $identity renew_all_children +done + +/usr/local/bin/svn update +/usr/local/bin/svn add --force . +/usr/local/bin/svn ci --message 'Monthly auto update' +/usr/local/bin/svn update diff --git a/potpourri/repo0-testbed-weekly b/potpourri/repo0-testbed-weekly new file mode 100755 index 00000000..6f1f8ead --- /dev/null +++ b/potpourri/repo0-testbed-weekly @@ -0,0 +1,96 @@ +#!/bin/sh - +# $Id: weekly 756 2013-11-21 22:54:28Z sra $ +# +# Run weekly periodic IR back-end tasks. + +home=/home/sra/rpki.testbed + +top=/home/sra/subvert-rpki.hactrn.net/trunk + +exec >>$home/logs/weekly.log 2>&1 +set -x +date + +export OPENSSL_CONF=/dev/null +for openssl in $top/openssl/openssl/apps/openssl /usr/local/bin/openssl +do + test -x $openssl && break +done + +## Download all input files. See the fetch script for the current +## list of files, but for a long time now it's been: +## +## http://www.iana.org/assignments/as-numbers/as-numbers.xml +## http://www.iana.org/assignments/ipv4-address-space/ipv4-address-space.xml +## http://www.iana.org/assignments/ipv6-unicast-address-assignments/ipv6-unicast-address-assignments.xml +## ftp://ftp.ripe.net/ripe/dbase/split/ripe.db.aut-num.gz +## ftp://ftp.ripe.net/ripe/dbase/split/ripe.db.inetnum.gz +## ftp://ftp.ripe.net/ripe/dbase/split/ripe.db.inet6num.gz +## ftp://ftp.ripe.net/pub/stats/ripencc/membership/alloclist.txt +## ftp://ftp.apnic.net/public/stats/apnic/delegated-apnic-extended-latest +## +## Along with an ARIN bulkwhois dump which we get under a research NDA +## and retrieve via a mechanism that I'm not going to describe here. + +/bin/sh -x $home/scripts/fetch + +## Process ARIN data first -- we need erx.csv, which comes from ARIN. + +cd $home/arin +/usr/local/bin/unzip -p arin_db.zip arin_db.xml | +/usr/local/bin/python $top/scripts/arin-to-csv.py +/usr/local/bin/python $top/scripts/translate-handles.py asns.csv prefixes.csv + +## Process IANA data, now that we have erx.csv. + +cd $home/iana +/usr/local/bin/python $top/scripts/iana-to-csv.py + +## Process APNIC data. + +cd $home/apnic +/usr/local/bin/python $top/scripts/apnic-to-csv.py +/usr/local/bin/python $top/scripts/translate-handles.py asns.csv prefixes.csv + +## Process RIPE data. RIPE's database is a horror, the less said +## about it the better. +## +## Somewhere along the line we seem to have stopped even trying to +## generate the ASN database for RIPE, not sure why. I've restored it +## here, guess we'll find out if there was a reason why we disabled it. + +cd $home/ripe +/usr/local/bin/python $top/scripts/ripe-asns-to-csv.py +/usr/bin/awk -f $top/scripts/ripe-prefixes-to-csv.awk alloclist.txt | +/bin/cat extra-prefixes.csv - | +/usr/bin/sort -uo prefixes.csv +/usr/local/bin/python $top/scripts/translate-handles.py asns.csv prefixes.csv +/usr/bin/sort -uo prefixes.csv prefixes.csv + +## Not yet doing anything for AfriNIC, LACNIC, or Legacy. + +## Generate root certificate. This is a lot simpler now that we're pretending to be the One True Root. + +cd $home/root +$openssl req -new -x509 -days 90 -set_serial $(/bin/date -u +%s) -config root.conf -out root.cer -key root.key -outform DER +/bin/cp -fp root.cer root.cer.dup && +/bin/mv -f root.cer.dup /home/pubd/publication/root.cer + +## Whack all the files into subversion. + +cd $home +/usr/local/bin/svn update +/usr/local/bin/svn add --force . +/usr/local/bin/svn ci --message 'Weekly auto update' +/usr/local/bin/svn update + +## Feed all the new data into the IRDB. + +for entity in iana afrinic apnic arin lacnic legacy ripe +do + for resource in asns prefixes + do + /bin/test -r $entity/$resource.csv && + /usr/local/sbin/rpkic --identity $entity load_$resource $entity/$resource.csv + done +done diff --git a/potpourri/ripe-asns-to-csv.py b/potpourri/ripe-asns-to-csv.py new file mode 100644 index 00000000..50251ce8 --- /dev/null +++ b/potpourri/ripe-asns-to-csv.py @@ -0,0 +1,108 @@ +# $Id$ +# +# Copyright (C) 2009-2012 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +Parse a WHOIS research dump and write out (just) the RPKI-relevant +fields in myrpki-format CSV syntax. + +NB: The input data for this script is publicly available via FTP, but +you'll have to fetch the data from RIPE yourself, and be sure to see +the terms and conditions referenced by the data file header comments. +""" + +import gzip +from rpki.csv_utils import csv_writer + +class Handle(dict): + + want_tags = () + + debug = False + + def set(self, tag, val): + if tag in self.want_tags: + self[tag] = "".join(val.split(" ")) + + def check(self): + for tag in self.want_tags: + if not tag in self: + return False + if self.debug: + self.log() + return True + + def __repr__(self): + return "<%s %s>" % (self.__class__.__name__, + " ".join("%s:%s" % (tag, self.get(tag, "?")) + for tag in self.want_tags)) + + def log(self): + print repr(self) + + def finish(self, ctx): + self.check() + +class aut_num(Handle): + want_tags = ("aut-num", "mnt-by", "as-name") + + def set(self, tag, val): + if tag == "aut-num" and val.startswith("AS"): + val = val[2:] + Handle.set(self, tag, val) + + def finish(self, ctx): + if self.check(): + ctx.asns.writerow((self["mnt-by"], self["aut-num"])) + +class main(object): + + types = dict((x.want_tags[0], x) for x in (aut_num,)) + + + def finish_statement(self, done): + if self.statement: + tag, sep, val = self.statement.partition(":") + assert sep, "Couldn't find separator in %r" % self.statement + tag = tag.strip().lower() + val = val.strip().upper() + if self.cur is None: + self.cur = self.types[tag]() if tag in self.types else False + if self.cur is not False: + self.cur.set(tag, val) + if done and self.cur: + self.cur.finish(self) + self.cur = None + + filenames = ("ripe.db.aut-num.gz",) + + def __init__(self): + self.asns = csv_writer("asns.csv") + for fn in self.filenames: + f = gzip.open(fn) + self.statement = "" + self.cur = None + for line in f: + line = line.expandtabs().partition("#")[0].rstrip("\n") + if line and not line[0].isalpha(): + self.statement += line[1:] if line[0] == "+" else line + else: + self.finish_statement(not line) + self.statement = line + self.finish_statement(True) + f.close() + self.asns.close() + +main() diff --git a/potpourri/ripe-prefixes-to-csv.awk b/potpourri/ripe-prefixes-to-csv.awk new file mode 100644 index 00000000..37327484 --- /dev/null +++ b/potpourri/ripe-prefixes-to-csv.awk @@ -0,0 +1,37 @@ +#!/usr/bin/awk -f +# $Id$ + +# ftp -pa ftp://ftp.ripe.net/pub/stats/ripencc/membership/alloclist.txt + +function done() { + for (i = 1; i <= n_allocs; i++) + print handle "\t" alloc[i]; + n_allocs = 0; +} + +/^[a-z]/ { + done(); + handle = $0; + nr = NR; +} + +NR == nr + 1 { + name = $0; +} + +NR > nr + 2 && NF > 1 && $2 !~ /:/ { + split($2, a, "/"); + len = a[2]; + split(a[1], a, /[.]/); + for (i = length(a); i < 4; i++) + a[i+1] = 0; + alloc[++n_allocs] = sprintf("%d.%d.%d.%d/%d", a[1], a[2], a[3], a[4], len); +} + +NR > nr + 2 && NF > 1 && $2 ~ /:/ { + alloc[++n_allocs] = $2; +} + +END { + done(); +} diff --git a/potpourri/ripe-to-csv.awk b/potpourri/ripe-to-csv.awk new file mode 100644 index 00000000..5325574f --- /dev/null +++ b/potpourri/ripe-to-csv.awk @@ -0,0 +1,124 @@ +#!/usr/bin/awk -f + +# Parse a WHOIS research dump and write out (just) the RPKI-relevant +# fields in myrpki-format CSV syntax. +# +# Unfortunately, unlike the ARIN and APNIC databases, the RIPE database +# doesn't really have any useful concept of an organizational handle. +# More precisely, while it has handles out the wazoo, none of them are +# useful as a reliable grouping mechanism for tracking which set of +# resources are held by a particular organization. So, instead of being +# able to track all of an organization's resources with a single handle +# as we can in the ARIN and APNIC databases, the best we can do with the +# RIPE database is to track individual resources, each with its own +# resource handle. Well, for prefixes -- ASN entries behave more like +# in the ARIN and APNIC databases. +# +# This is an AWK script rather than a Python script because it is a +# fairly simple stream parser that has to process a ridiculous amount +# of text. AWK turns out to be significantly faster for this. +# +# NB: The input data for this script is publicly available via FTP, but +# you'll have to fetch the data from RIPE yourself, and be sure to see +# the terms and conditions referenced by the data file header comments. +# +# $Id$ +# +# Copyright (C) 2009-2010 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +# On input, ":" is the most useful delimiter +# On output, we want tab-delimited text. +BEGIN { + FS = "[ \t]*:"; + OFS = "\t"; +} + +# Clean up trailing whitespace. +{ + sub(/[ \t]+$/, ""); +} + +# Continuation line: strip comment, if any, then append value, if any, +# to what we had from previous line(s). +/^[^A-Z]/ { + sub(/[ \t]*#.*$/, ""); + if (NF) + val = val $0; + next; +} + +# Anything other than line continuation terminates the previous line, +# so if we were working on a line, we're done with it now, process it. +key { + do_line(); +} + +# Non-empty line and we have no tag, this must be start of a new block. +NF && !tag { + tag = $1; +} + +# One of the tags we care about, clean up and save the data. +/^(AS-NAME|AUT-NUM|INET6NUM|INETNUM|MNT-BY|NETNAME|STATUS):/ { + key = $1; + sub(/^[^ \t]+:/, ""); + sub(/[ \t]*#.*$/, ""); + val = $0; +} + +# Blank line and we have something, process it. +!NF && tag { + do_block(); +} + +# End of file, process final data, if any. +END { + do_line(); + do_block(); +} + +# Handle one line, after line icky RPSL continuation. +function do_line() { + gsub(/[ \t]/, "", val); + if (key && val) + tags[key] = val; + key = ""; + val = ""; +} + +# Dispatch to handle known block types, then clean up so we can start +# a new block. +function do_block() { + if (tag == "INETNUM" || tag == "INET6NUM") + do_prefix(); + else if (tag == "AUT-NUM") + do_asn(); + delete tags; + tag = ""; +} + +# Handle an AUT-NUM block: extract the ASN, use MNT-BY as the handle. +function do_asn() { + sub(/^AS/, "", tags[tag]); + if (tags["MNT-BY"] && tags[tag]) + print tags["MNT-BY"], tags[tag] >"asns.csv"; +} + +# Handle an INETNUM or INET6NUM block: check for the status values we +# care about, use NETNAME as the handle. +function do_prefix() { + if (tags["STATUS"] ~ /^ASSIGNED(P[AI])$/ && tags["NETNAME"] && tags[tag]) + print tags["NETNAME"], tags[tag] >"prefixes.csv"; +} diff --git a/potpourri/ripe-to-csv.py b/potpourri/ripe-to-csv.py new file mode 100644 index 00000000..b864345b --- /dev/null +++ b/potpourri/ripe-to-csv.py @@ -0,0 +1,138 @@ +# $Id$ +# +# Copyright (C) 2009-2012 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +Parse a WHOIS research dump and write out (just) the RPKI-relevant +fields in myrpki-format CSV syntax. + +Unfortunately, unlike the ARIN and APNIC databases, the RIPE database +doesn't really have any useful concept of an organizational handle. +More precisely, while it has handles out the wazoo, none of them are +useful as a reliable grouping mechanism for tracking which set of +resources are held by a particular organization. So, instead of being +able to track all of an organization's resources with a single handle +as we can in the ARIN and APNIC databases, the best we can do with the +RIPE database is to track individual resources, each with its own +resource handle. Well, for prefixes -- ASN entries behave more like +in the ARIN and APNIC databases. + +Feh. + +NB: The input data for this script is publicly available via FTP, but +you'll have to fetch the data from RIPE yourself, and be sure to see +the terms and conditions referenced by the data file header comments. +""" + +import gzip +from rpki.csv_utils import csv_writer + +class Handle(dict): + + want_tags = () + + want_status = ("ASSIGNED", "ASSIGNEDPA", "ASSIGNEDPI") + + debug = False + + def set(self, tag, val): + if tag in self.want_tags: + self[tag] = "".join(val.split(" ")) + + def check(self): + for tag in self.want_tags: + if not tag in self: + return False + if self.debug: + self.log() + return True + + def __repr__(self): + return "<%s %s>" % (self.__class__.__name__, + " ".join("%s:%s" % (tag, self.get(tag, "?")) + for tag in self.want_tags)) + + def log(self): + print repr(self) + + def finish(self, ctx): + self.check() + +class aut_num(Handle): + want_tags = ("aut-num", "mnt-by") # "as-name" + + def set(self, tag, val): + if tag == "aut-num" and val.startswith("AS"): + val = val[2:] + Handle.set(self, tag, val) + + def finish(self, ctx): + if self.check(): + ctx.asns.writerow((self["mnt-by"], self["aut-num"])) + +class inetnum(Handle): + want_tags = ("inetnum", "netname", "status") # "mnt-by" + + def finish(self, ctx): + if self.check() and self["status"] in self.want_status: + ctx.prefixes.writerow((self["netname"], self["inetnum"])) + +class inet6num(Handle): + want_tags = ("inet6num", "netname", "status") # "mnt-by" + + def finish(self, ctx): + if self.check() and self["status"] in self.want_status: + ctx.prefixes.writerow((self["netname"], self["inet6num"])) + +class main(object): + + types = dict((x.want_tags[0], x) for x in (aut_num, inetnum, inet6num)) + + def finish_statement(self, done): + if self.statement: + tag, sep, val = self.statement.partition(":") + assert sep, "Couldn't find separator in %r" % self.statement + tag = tag.strip().lower() + val = val.strip().upper() + if self.cur is None: + self.cur = self.types[tag]() if tag in self.types else False + if self.cur is not False: + self.cur.set(tag, val) + if done and self.cur: + self.cur.finish(self) + self.cur = None + + filenames = ("ripe.db.aut-num.gz", "ripe.db.inet6num.gz", "ripe.db.inetnum.gz") + + def __init__(self): + self.asns = csv_writer("asns.csv") + self.prefixes = csv_writer("prefixes.csv") + for fn in self.filenames: + f = gzip.open(fn) + self.statement = "" + self.cur = None + for line in f: + line = line.expandtabs().partition("#")[0].rstrip("\n") + if line and not line[0].isalpha(): + self.statement += line[1:] if line[0] == "+" else line + else: + self.finish_statement(not line) + self.statement = line + self.finish_statement(True) + f.close() + self.asns.close() + self.prefixes.close() + +main() diff --git a/potpourri/roa-to-irr.py b/potpourri/roa-to-irr.py new file mode 100644 index 00000000..01b2aac8 --- /dev/null +++ b/potpourri/roa-to-irr.py @@ -0,0 +1,159 @@ +# $Id$ +# +# Copyright (C) 2014 Dragon Research Labs ("DRL") +# Portions copyright (C) 2010--2012 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notices and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND DRL AND ISC DISCLAIM ALL +# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL OR +# ISC BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL +# DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA +# OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +# TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +Generate IRR route and route6 objects from ROAs. +""" + +import os +import socket +import sys +import argparse +import errno +import time +import rpki.x509 + +args = None + +class route(object): + """ + Interesting parts of a route object. + """ + + def __init__(self, label, uri, asnum, date, prefix, prefixlen, max_prefixlen): + self.label = label + self.uri = uri + self.asn = asnum + self.date = date + self.prefix = prefix + self.prefixlen = prefixlen + self.max_prefixlen = self.prefixlen if max_prefixlen is None else max_prefixlen + + def __cmp__(self, other): + result = cmp(self.asn, other.asn) + if result == 0: + result = cmp(self.prefix, other.prefix) + if result == 0: + result = cmp(self.prefixlen, other.prefixlen) + if result == 0: + result = cmp(self.max_prefixlen, other.max_prefixlen) + if result == 0: + result = cmp(self.date, other.date) + return result + + def __str__(self): + lines = ( + "%-14s%s/%s" % (self.label, self.prefix, self.prefixlen), + "descr: %s/%s-%s" % (self.prefix, self.prefixlen, self.max_prefixlen), + "origin: AS%d" % self.asn, + "notify: %s" % args.notify, + "mnt-by: %s" % args.mnt_by, + "changed: %s %s" % (args.changed_by, self.date), + "source: %s" % args.source, + "override: %s" % args.password if args.password is not None else None, + "") + return "\n".join(line for line in lines if line is not None) + + def write(self, output_directory): + name = "%s-%s-%s-AS%d-%s" % (self.prefix, self.prefixlen, self.max_prefixlen, self.asn, self.date) + with open(os.path.join(output_directory, name), "w") as f: + f.write(str(self)) + + +class route_list(list): + """ + A list of route objects. + """ + + def __init__(self, rcynic_dir): + for root, dirs, files in os.walk(rcynic_dir): + for f in files: + if f.endswith(".roa"): + path = os.path.join(root, f) + uri = "rsync://" + path[len(rcynic_dir):].lstrip("/") + roa = rpki.x509.ROA(DER_file = path) + roa.extract() + assert roa.get_POW().getVersion() == 0, "ROA version is %d, expected 0" % roa.get_POW().getVersion() + asnum = roa.get_POW().getASID() + notBefore = roa.get_POW().certs()[0].getNotBefore().strftime("%Y%m%d") + v4, v6 = roa.get_POW().getPrefixes() + if v4 is not None: + for prefix, prefixlen, max_prefixlen in v4: + self.append(route("route:", uri, asnum, notBefore, prefix, prefixlen, max_prefixlen)) + if v6 is not None: + for prefix, prefixlen, max_prefixlen in v6: + self.append(route("route6:", uri, asnum, notBefore, prefix, prefixlen, max_prefixlen)) + self.sort() + for i in xrange(len(self) - 2, -1, -1): + if self[i] == self[i + 1]: + del self[i + 1] + +def email_header(f): + if args.email: + f.write("\n".join(( + "From %s" % args.email_from, + "Date: %s" % time.strftime("%d %b %Y %T %z"), + "From: %s" % args.email_from, + "Subject: Fake email header to make irr_rpsl_submit happy", + "Message-Id: <%s.%s@%s>" % (os.getpid(), time.time(), socket.gethostname()), + "", ""))) + +def main(): + + global args + whoami = "%s@%s" % (os.getlogin(), socket.gethostname()) + + parser = argparse.ArgumentParser(description = __doc__) + parser.add_argument("-c", "--changed_by", default = whoami, + help = "override \"changed:\" value") + parser.add_argument("-f", "--from", dest = "email_from", default = whoami, + help = "override \"from:\" header when using --email") + parser.add_argument("-m", "--mnt_by", default = "MAINT-RPKI", + help = "override \"mnt-by:\" value") + parser.add_argument("-n", "--notify", default = whoami, + help = "override \"notify:\" value") + parser.add_argument("-p", "--password", + help = "specify \"override:\" password") + parser.add_argument("-s", "--source", default = "RPKI", + help = "override \"source:\" value") + group = parser.add_mutually_exclusive_group() + group.add_argument("-e", "--email", action = "store_true", + help = "generate fake RFC 822 header suitable for piping to irr_rpsl_submit") + group.add_argument("-o", "--output", + help = "write route and route6 objects to directory OUTPUT, one object per file") + parser.add_argument("authenticated_directory", + help = "directory tree containing authenticated rcynic output") + args = parser.parse_args() + + if not os.path.isdir(args.authenticated_directory): + sys.exit("\"%s\" is not a directory" % args.authenticated_directory) + + routes = route_list(args.authenticated_directory) + + if args.output: + if not os.path.isdir(args.output): + os.makedirs(args.output) + for r in routes: + r.write(args.output) + else: + email_header(sys.stdout) + for r in routes: + sys.stdout.write("%s\n" % r) + +if __name__ == "__main__": + main() diff --git a/potpourri/rp-sqlite b/potpourri/rp-sqlite new file mode 100755 index 00000000..ee43096d --- /dev/null +++ b/potpourri/rp-sqlite @@ -0,0 +1,425 @@ +#!/usr/local/bin/python + +# $Id$ + +# Copyright (C) 2013 Dragon Research Labs ("DRL") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND DRL DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL DRL BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +# Preliminary script to work out what's involved in building an +# SQLite3 database of RP objects. We haven't bothered with this until +# now in rcynic, because we mostly just walk the filesystem tree, but +# LTA and some of the ideas Tim is playing with require a lot of +# lookups based on things that are not the URIs we use as filenames, +# so some kind of indexing may become necessary. Given the complexity +# of building any kind of real index over RFC 3779 resources, +# otherwise fine lightweight tools like the Python shelve library +# probably won't cut it here, and I don't want to add a dependency on +# MySQL on the RP side (yet?), so let's see what we can do with SQLite3. + +import os +import sys +import yaml +import base64 +import sqlite3 +import weakref +import rpki.POW +import rpki.x509 +import rpki.resource_set + +sqlite3.register_adapter(rpki.POW.IPAddress, + lambda x: buffer("_" + x.toBytes())) + +sqlite3.register_converter("RangeVal", + lambda s: long(s) if s.isdigit() else rpki.POW.IPAddress.fromBytes(s[1:])) + + +def main(): + rpdb = RPDB() + rpdb.load() + test(rpdb) + rpdb.close() + +def test(rpdb): + fn2s = [None] + rpdb.fn2map.keys() + + print + print "Testing YAML parsing" + parse_yaml(rpdb) + + print + print "Looking for certificates without AKI" + for r in rpdb.find_by_aki(None, "cer"): + print r, r.uris + print + print "Testing range functions" + for fn2 in fn2s: + if fn2 is not None: + print + print "Restricting search to type", fn2 + print + print "Looking for range that should include adrilankha and psg again" + for r in rpdb.find_by_range("147.28.0.19", "147.28.0.62", fn2): + print r, r.uris + print + print "Looking for range that should include adrilankha" + for r in rpdb.find_by_range("147.28.0.19", "147.28.0.19", fn2): + print r, r.uris + print + print "Looking for range that should include ASN 3130" + for r in rpdb.find_by_range(3130, 3130, fn2): + print r, r.uris + print + print "Moving on to resource sets" + for fn2 in fn2s: + if fn2 is not None: + print + print "Restricting search to type", fn2 + for expr in ("147.28.0.19-147.28.0.62", + "3130", + "2001:418:1::19/128", + "147.28.0.19-147.28.0.62,198.180.150.50/32", + "3130,147.28.0.19-147.28.0.62,198.180.150.50/32", + "2001:418:1::62/128,198.180.150.50/32,2001:418:8006::50/128", + "147.28.0.19-147.28.0.62,2001:418:1::19/128,2001:418:1::62/128,198.180.150.50/32,2001:418:8006::50/128"): + print + print "Trying", expr + for r in rpdb.find_by_resource_bag(rpki.resource_set.resource_bag.from_str(expr), fn2): + print r, r.uris + + +def parse_xki(s): + """ + Parse text form of an SKI or AKI. We accept two encodings: + colon-delimited hexadecimal, and URL-safe Base64. The former is + what OpenSSL prints in its text representation of SKI and AKI + extensions; the latter is the g(SKI) value that some RPKI CA engines + (including rpkid) use when constructing filenames. + + In either case, we check that the decoded result contains the right + number of octets to be a SHA-1 hash. + """ + + if ":" in s: + b = "".join(chr(int(c, 16)) for c in s.split(":")) + else: + b = base64.urlsafe_b64decode(s + ("=" * (4 - len(s) % 4))) + if len(b) != 20: + raise RuntimeError("Bad length for SHA1 xKI value: %r" % s) + return b + + +def parse_yaml(rpdb, fn = "rp-sqlite.yaml"): + yy = yaml.safe_load(open(fn, "r")) + for y in yy: + + ski = None + uri = None + obj = set() + + print + + if "ski" in y: + ski = parse_xki(y["ski"]) + obj.update(rpdb.find_by_ski(ski)) + if "uri" in y: + uri = y["uri"] + obj.update(rpdb.find_by_uri(uri)) + if len(obj) == 1: + obj = obj.pop() + else: + raise RuntimeError("Constraint entry must name a unique object using SKI, URI, or both (%r, %r, %r)" % ( + ski, uri, obj)) + + print "URI:", uri + print "SKI:", " ".join("%02X" % ord(c) for c in ski), "(" + y["ski"] + ")" + + new_resources = old_resources = obj.get_3779resources() + + if "set" in y: + new_resources = rpki.resource_set.resource_bag.from_str(y["set"]) + + if "add" in y: + new_resources = new_resources | rpki.resource_set.resource_bag.from_str(y["add"]) + + if "sub" in y: + new_resources = new_resources - rpki.resource_set.resource_bag.from_str(y["sub"]) + + if new_resources == old_resources: + print "No resource change, skipping" + continue + + print "Old:", old_resources + print "New:", new_resources + print "Add:", new_resources - old_resources + print "Sub:", old_resources - new_resources + + # See draft-ietf-sidr-ltamgmt-08.txt for real processing details, but overview: + # + # - Process constraints file as above to determine list of target + # certificates (2.1). May need to add more fields to YAML hash + # for things like CP, CRLDP, etc, although I'm not entirely sure + # yet which of those it really makes sense to tweak via + # constraints. + # + # - Use resources from selected target certificates to determine + # which additional certificates we need to reissue to remove those + # resources (2.2, "perforation"). In theory we already have SQL + # that will just locate all of these for us. + # + # - Figure out which trust anchors to process (2.3, TA + # re-parenting); we can look in SQL for NULL AKI, but that's just + # a hint, we either have to verify that rcynic accepted those TAs + # or we have to look at the TALs. Looking at TALs is probably + # easier. + # + # At some point we probably need to parse the constraints file into + # Constraints objects or something like that, except that we may + # really need something more general that will accomodate + # perforation and TA reparenting as well. Figure out and refactor + # as we go along, most likely. + + +class RPDB(object): + """ + Relying party database. + + For now just wire in the database name and rcynic root, fix this + later if overall approach seems usable. Might even end up just + being an in-memory SQL database, who knows? + """ + + fn2map = dict(cer = rpki.x509.X509, + crl = rpki.x509.CRL, + mft = rpki.x509.SignedManifest, + roa = rpki.x509.ROA, + gbr = rpki.x509.Ghostbuster) + + def __init__(self, db_name = "rp-sqlite.db", delete_old_db = True): + + if delete_old_db: + try: + os.unlink(db_name) + except: + pass + + exists = os.path.exists(db_name) + + self.db = sqlite3.connect(db_name, detect_types = sqlite3.PARSE_DECLTYPES) + self.db.text_factory = str + self.cur = self.db.cursor() + + self.cache = weakref.WeakValueDictionary() + + if exists: + return + + self.cur.executescript(''' + PRAGMA foreign_keys = on; + + CREATE TABLE object ( + id INTEGER PRIMARY KEY NOT NULL, + der BLOB NOT NULL, + fn2 TEXT NOT NULL, + ski BLOB, + aki BLOB, + inherits BOOLEAN NOT NULL, + UNIQUE (der)); + + CREATE TABLE uri ( + id INTEGER NOT NULL, + uri TEXT NOT NULL, + UNIQUE (uri), + FOREIGN KEY (id) REFERENCES object(id) + ON DELETE CASCADE + ON UPDATE CASCADE); + + CREATE INDEX uri_index ON uri(id); + + CREATE TABLE range ( + id INTEGER NOT NULL, + min RangeVal NOT NULL, + max RangeVal NOT NULL, + UNIQUE (id, min, max), + FOREIGN KEY (id) REFERENCES object(id) + ON DELETE CASCADE + ON UPDATE CASCADE); + + CREATE INDEX range_index ON range(min, max); + ''') + + def load(self, + rcynic_root = os.path.expanduser("~/rpki/subvert-rpki.hactrn.net/trunk/" + "rcynic/rcynic-data/unauthenticated"), + spinner = 100): + + nobj = 0 + + for root, dirs, files in os.walk(rcynic_root): + for fn in files: + fn = os.path.join(root, fn) + fn2 = os.path.splitext(fn)[1][1:] + + try: + obj = self.fn2map[fn2](DER_file = fn) + except: + continue + + if spinner and nobj % spinner == 0: + sys.stderr.write("\r%s %d..." % ("|\\-/"[(nobj/spinner) & 3], nobj)) + + nobj += 1 + + inherits = False + + if fn2 == "crl": + ski = None + aki = buffer(obj.get_AKI()) + cer = None + bag = None + + else: + if fn2 == "cer": + cer = obj + else: + cer = rpki.x509.X509(POW = obj.get_POW().certs()[0]) + ski = buffer(cer.get_SKI()) + try: + aki = buffer(cer.get_AKI()) + except: + aki = None + bag = cer.get_3779resources() + inherits = bag.asn.inherit or bag.v4.inherit or bag.v6.inherit + + der = buffer(obj.get_DER()) + uri = "rsync://" + fn[len(rcynic_root) + 1:] + + try: + self.cur.execute("INSERT INTO object (der, fn2, ski, aki, inherits) VALUES (?, ?, ?, ?, ?)", + (der, fn2, ski, aki, inherits)) + rowid = self.cur.lastrowid + + except sqlite3.IntegrityError: + self.cur.execute("SELECT id FROM object WHERE der = ? AND fn2 = ?", (der, fn2)) + rows = self.cur.fetchall() + rowid = rows[0][0] + assert len(rows) == 1 + + else: + if bag is not None: + for rset in (bag.asn, bag.v4, bag.v6): + if rset is not None: + self.cur.executemany("REPLACE INTO range (id, min, max) VALUES (?, ?, ?)", + ((rowid, i.min, i.max) for i in rset)) + + self.cur.execute("INSERT INTO uri (id, uri) VALUES (?, ?)", + (rowid, uri)) + + if spinner: + sys.stderr.write("\r= %d objects, committing..." % nobj) + + self.db.commit() + + if spinner: + sys.stderr.write("done.\n") + + + def find_by_ski(self, ski, fn2 = None): + if ski is None: + return self._find_results(fn2, "SELECT id, fn2, der FROM object WHERE ski IS NULL") + else: + return self._find_results(fn2, "SELECT id, fn2, der FROM object WHERE ski = ?", [buffer(ski)]) + + def find_by_aki(self, aki, fn2 = None): + if aki is None: + return self._find_results(fn2, "SELECT id, fn2, der FROM object WHERE aki IS NULL") + else: + return self._find_results(fn2, "SELECT id, fn2, der FROM object WHERE aki = ?", [buffer(aki)]) + + def find_by_uri(self, uri): + return self._find_results(None, "SELECT object.id, fn2, der FROM object, uri WHERE uri.uri = ? AND object.id = uri.id", [uri]) + + + # It's easiest to understand overlap conditions by understanding + # non-overlap then inverting and and applying De Morgan's law. Ranges + # A and B do not overlap if either A.min > B.max or A.max < B.min; + # therefore they do overlap if A.min <= B.max and A.max >= B.min. + + def find_by_range(self, range_min, range_max = None, fn2 = None): + if range_max is None: + range_max = range_min + if isinstance(range_min, (str, unicode)): + range_min = long(range_min) if range_min.isdigit() else rpki.POW.IPAddress(range_min) + if isinstance(range_max, (str, unicode)): + range_max = long(range_max) if range_max.isdigit() else rpki.POW.IPAddress(range_max) + assert isinstance(range_min, (int, long, rpki.POW.IPAddress)) + assert isinstance(range_max, (int, long, rpki.POW.IPAddress)) + return self._find_results( + fn2, + """ + SELECT object.id, fn2, der FROM object, range + WHERE ? <= max AND ? >= min AND object.id = range.id + """, + [range_min, range_max]) + + + def find_by_resource_bag(self, bag, fn2 = None): + assert bag.asn or bag.v4 or bag.v6 + qset = [] + aset = [] + for rset in (bag.asn, bag.v4, bag.v6): + if rset: + for r in rset: + qset.append("(? <= max AND ? >= min)") + aset.append(r.min) + aset.append(r.max) + return self._find_results( + fn2, + """ + SELECT object.id, fn2, der FROM object, range + WHERE object.id = range.id AND (%s) + """ % (" OR ".join(qset)), + aset) + + + def _find_results(self, fn2, query, args = None): + if args is None: + args = [] + if fn2 is not None: + assert fn2 in self.fn2map + query += " AND fn2 = ?" + args.append(fn2) + query += " GROUP BY object.id" + results = [] + self.cur.execute(query, args) + selections = self.cur.fetchall() + for rowid, fn2, der in selections: + if rowid in self.cache: + obj = self.cache[rowid] + else: + obj = self.fn2map[fn2](DER = der) + self.cur.execute("SELECT uri FROM uri WHERE id = ?", (rowid,)) + obj.uris = [u[0] for u in self.cur.fetchall()] + obj.uri = obj.uris[0] if len(obj.uris) == 1 else None + self.cache[rowid] = obj + results.append(obj) + return results + + + def close(self): + self.cur.close() + self.db.close() + + +if __name__ == "__main__": + main() diff --git a/potpourri/rp-sqlite.yaml b/potpourri/rp-sqlite.yaml new file mode 100644 index 00000000..4c0a0b8d --- /dev/null +++ b/potpourri/rp-sqlite.yaml @@ -0,0 +1,53 @@ +- ski: B8:14:5D:13:53:7D:AE:6E:E2:E3:95:84:A8:99:EB:7D:1A:7D:E5:DF + uri: rsync://repo0.rpki.net/rpki/root.cer + add: 10.0.0.44/32 + +- ski: A2:B3:2A:99:20:07:7A:E9:A4:9F:3F:02:F2:32:F9:3D:54:F8:7E:78 + uri: rsync://repo0.rpki.net/rpki/root/iana.cer + sub: 10.0.0.0/8 + +- ski: 3NYgwt_HYV91MeCGLWdUL4uq65M + uri: rsync://repo0.rpki.net/rpki/root/iana/5/3NYgwt_HYV91MeCGLWdUL4uq65M.cer + add: 10.0.0.0/8 + +- ski: 3YFTaQOUkPQfhckX_ikYzy0mR9s + uri: rsync://repo0.rpki.net/rpki/root/iana/5/3YFTaQOUkPQfhckX_ikYzy0mR9s.cer + +- ski: 7ew1d5WFCSfhd8lnpmjwOohS_DQ + uri: rsync://repo0.rpki.net/rpki/root/iana/5/7ew1d5WFCSfhd8lnpmjwOohS_DQ.cer + +- ski: PWxDsvUgDdeloE3eQfceV8vbIyg + uri: rsync://repo0.rpki.net/rpki/root/iana/5/PWxDsvUgDdeloE3eQfceV8vbIyg.cer + +- ski: WnOFfpqobEKxzmvddJue3PXsEIQ + uri: rsync://repo0.rpki.net/rpki/root/iana/5/WnOFfpqobEKxzmvddJue3PXsEIQ.cer + +- ski: m6TQTKwKYyVva-Yq__I-Zz25Vns + uri: rsync://repo0.rpki.net/rpki/root/iana/5/m6TQTKwKYyVva-Yq__I-Zz25Vns.cer + +- ski: 8U5wQ47aZuuOXYPGX5BPvlcTfNg + uri: rsync://repo0.rpki.net/rpki/root/iana/ripe/8/8U5wQ47aZuuOXYPGX5BPvlcTfNg.cer + +- ski: bccxGl4Xl4ur3nd1fwQ-1QIwtNA + uri: rsync://repo0.rpki.net/rpki/root/iana/ripe/8/bccxGl4Xl4ur3nd1fwQ-1QIwtNA.cer + +- ski: zbXifbEkZNmOVhYZTCZFfLPxhjM + uri: rsync://repo0.rpki.net/rpki/root/iana/ripe/8/zbXifbEkZNmOVhYZTCZFfLPxhjM.cer + +- ski: XYjTToHopYme-hlwgUyUyYRD_VQ + uri: rsync://repo0.rpki.net/rpki/root/iana/arin/6/XYjTToHopYme-hlwgUyUyYRD_VQ.cer + +- ski: _3I3i3uVmnliCinb2STR2MaxuU8 + uri: rsync://repo0.rpki.net/rpki/root/iana/arin/6/_3I3i3uVmnliCinb2STR2MaxuU8.cer + +- ski: qdtoqOMCNSOdYuNNC7ya3dUaPl4 + uri: rsync://repo0.rpki.net/rpki/root/iana/arin/6/qdtoqOMCNSOdYuNNC7ya3dUaPl4.cer + +- ski: yq4x9MteoM0DQYTh6NLbbmf--QY + uri: rsync://repo0.rpki.net/rpki/root/iana/arin/6/yq4x9MteoM0DQYTh6NLbbmf--QY.cer + +- ski: j2TDGclJnZ7mXpZCQJS0cfkOL34 + uri: rsync://repo0.rpki.net/rpki/root/iana/apnic/7/j2TDGclJnZ7mXpZCQJS0cfkOL34.cer + +- ski: yRk89OOx4yyJHJ6Z1JLLnk0_oDc + uri: rsync://repo0.rpki.net/rpki/root/iana/apnic/7/yRk89OOx4yyJHJ6Z1JLLnk0_oDc.cer diff --git a/potpourri/rpki b/potpourri/rpki new file mode 120000 index 00000000..168548eb --- /dev/null +++ b/potpourri/rpki @@ -0,0 +1 @@ +../rpkid/rpki \ No newline at end of file diff --git a/potpourri/rpkidemo b/potpourri/rpkidemo new file mode 100755 index 00000000..fdb4e1bb --- /dev/null +++ b/potpourri/rpkidemo @@ -0,0 +1,495 @@ +#!/usr/bin/env python + +""" +Hosted GUI client startup script, for workshops, etc. + +As of when this is run, we assume that the tarball (contents TBD and +perhaps changing from one workshop to another) have been unpacked, +that we are on some Unix-like machine, and that we are executing in +a Python interpreter. We have to check anything else we care about. + +In what we hope is the most common case, this script should be run +with no options. + +$Id$ + +Copyright (C) 2010 Internet Systems Consortium ("ISC") + +Permission to use, copy, modify, and distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +""" + +# Check Python version before doing anything else + +import sys + +python_version = sys.version_info[:2] + +have_ssl_module = python_version >= (2, 6) + +if python_version == (2, 5): + print """ + WARNING WARNING WARNING + + You are running Python version 2.5, which does not include + real SSL support. This means that sessions created by this + script will be vulnerable to monkey-in-the-middle attacks. + + Python 2.6 does not have this problem. + """ + while True: + answer = raw_input("Are you SURE you want to proceed? (yes/NO) ").strip().lower() + if answer in ("", "n", "no"): + sys.exit("You have chosen wisely") + elif answer in ("y", "yes"): + print "You have been warned" + break + else: + print 'Please answer "yes" or "no"' + +elif have_ssl_module: + try: + import ssl + except ImportError: + sys.exit("You're running Python 2.6+, but I can't find the ssl module, so you have no SSL support at all, argh!") + +else: + sys.exit("Sorry, this script requires Python 2.6+, I seem to be running in %s" % sys.version) + +# Ok, it's safe to import the other stuff we need now + +import os, subprocess, webbrowser, urllib2, getpass, re, errno, time, email.utils, httplib, socket, getopt, urllib, cookielib +import tempfile +from xml.etree.ElementTree import fromstring as ElementFromString + +def save(filename, data): + """ + Save data to a file. + """ + + tempname = "%s.%d.tmp" % (filename, os.getpid()) + f = open(tempname, "w") + f.write(data) + f.close() + os.rename(tempname, filename) + +def save_error(err): + """ + Save the data from the file-like object "f" into a temporary file + and open a web browser to view the result. + """ + + with tempfile.NamedTemporaryFile(prefix = "rpkidemo-error", suffix = ".html", delete = False) as tmpf: + tmpf.write(err.read()) + + # Save filename for use outside the with statement. This ensures + # the file is properly flushed prior to invoking the web browser. + fname = tmpf.name + + sys.stderr.write("errors saved in %s\n" % fname) + webbrowser.open("file://" + fname) + +class CSV_File(object): + """ + A CSV file that's being maintained by the GUI but being monitored, + downloaded, and used here. + """ + + def __init__(self, filename, url): + self.filename = filename + self.url = url + try: + self.timestamp = os.stat(filename).st_mtime + except: + self.store(0, "") + + def last_modified(self): + """ + Return CSV file timestamp formatted for use with HTTP. + """ + return email.utils.formatdate(self.timestamp, False, True) + + def store(self, timestamp, data): + """ + Save CSV file, and record new timestamp. + """ + save(self.filename, data) + self.timestamp = timestamp + os.utime(self.filename, (time.time(), timestamp)) + + +class AbstractHTTPSConnection(httplib.HTTPSConnection): + """ + Customization of httplib.HTTPSConnection to enable certificate + validation. + + This is an abstract class; subclass must set trust_anchor to the + filename of a anchor file in the format that the ssl module + expects. + """ + + trust_anchor = None + + def connect(self): + assert self.trust_anchor is not None + sock = socket.create_connection((self.host, self.port), self.timeout) + if getattr(self, "_tunnel_host", None): + self.sock = sock + self._tunnel() + self.sock = ssl.wrap_socket(sock, + keyfile = self.key_file, + certfile = self.cert_file, + cert_reqs = ssl.CERT_REQUIRED, + ssl_version = ssl.PROTOCOL_TLSv1, + ca_certs = self.trust_anchor) + + +class main(object): + """ + Main program. + """ + + # Environmental parameters + + top = os.path.realpath(os.path.join((sys.path[0] or "."), "..")) + cwd = os.getcwd() + + # Parameters that we might want to get from a config file. + # Just wire them all in for the moment. + + base_url = "https://demo.rpki.net/" + myrpki_url = base_url + "rpki/" + auth_url = myrpki_url + "demo/login" + example_myrpki_cfg = "%s/rpkid/examples/rpki.conf" % top + working_dir = "%s/rpkidemo-data" % cwd + myrpki_py = "%s/rpkid/myrpki.py" % top + user_agent = "RPKIDemo" + delay = 15 + trust_anchor = "%s/scripts/rpkidemo.pem" % top + + openssl = None + + def setup_openssl(self): + """ + Find a usable version of OpenSSL, or build one if we must. + """ + + def scrape(*args): + return subprocess.Popen(args, stdout = subprocess.PIPE, stderr = subprocess.STDOUT).communicate()[0] + + def usable_openssl(f): + return f is not None and os.path.exists(f) and "-ss_cert" in scrape(f, "ca", "-?") and "Usage cms" in scrape(f, "cms", "-?") + + for d in os.environ["PATH"].split(":"): + f = os.path.join(d, "openssl") + if usable_openssl(f): + self.openssl = f + break + + if self.openssl is None: + print "Couldn't find usable openssl on path, attempting to build one" + subprocess.check_call(("./configure",), cwd = self.top) + subprocess.check_call(("make",), cwd = os.path.join(self.top, "openssl")) + self.openssl = os.path.join(self.top, "openssl", "openssl", "apps", "openssl") + print "Done building openssl" + print + + if usable_openssl(self.openssl): + print "Using", self.openssl + else: + sys.exit("Could not find or build usable version of openssl, giving up") + + @staticmethod + def setup_utc(): + """ + This script thinks in UTC. + """ + + os.environ["TZ"] = "UTC" + time.tzset() + + def setup_username(self): + """ + Get username and password for web interface, construct urllib2 + "opener" tailored for our use, perform an initial GET (ignoring + result, other than exceptions) to test the username and password. + """ + + print "I need to know your username and password on the Django GUI server to proceed" + + while True: + + try: + self.username = raw_input("Username: ") + self.password = getpass.getpass() + + handlers = [] + + self.cookiejar = cookielib.CookieJar() + handlers.append(urllib2.HTTPCookieProcessor(self.cookiejar)) + + if have_ssl_module: + + class HTTPSConnection(AbstractHTTPSConnection): + trust_anchor = self.trust_anchor + + class HTTPSHandler(urllib2.HTTPSHandler): + def https_open(self, req): + return self.do_open(HTTPSConnection, req) + + handlers.append(HTTPSHandler) + + self.opener = urllib2.build_opener(*handlers) + + # Test login credentials + resp = self.opener.open(self.auth_url) # GET + + r = self.opener.open(urllib2.Request( + url = self.auth_url, + data = urllib.urlencode({ "username" : self.username, + "password" : self.password, + "csrfmiddlewaretoken" : self.csrftoken() }), + headers = { "Referer" : self.auth_url, + "User-Agent" : self.user_agent})) # POST + return + + except urllib2.URLError, e: + print "Could not log in to server: %s" % e + print "Please try again" + save_error(e) + + def csrftoken(self): + """ + Pull Django's CSFR token from cookie database. + + Django's login form requires the "csrfmiddlewaretoken." It turns out + this is the same value as the "csrftoken" cookie, so we don't need + to bother parsing the form. + """ + + return [c.value for c in self.cookiejar if c.name == "csrftoken"][0] + + def setup_working_directory(self): + """ + Create working directory and move to it. + """ + + try: + print "Creating", self.working_dir + os.mkdir(self.working_dir) + except OSError, e: + if e.errno != errno.EEXIST: + raise + print self.working_dir, "already exists, reusing it" + os.chdir(self.working_dir) + + def setup_config_file(self): + """ + Generate rpki.conf + """ + + if os.path.exists("rpki.conf"): + print "You already have a rpki.conf file, so I will use it" + return + + print "Generating rpki.conf" + section_regexp = re.compile("\s*\[\s*(.+?)\s*\]\s*$") + variable_regexp = re.compile("\s*([-a-zA-Z0-9_]+)\s*=\s*(.+?)\s*$") + f = open("rpki.conf", "w") + f.write("# Automatically generated, do not edit\n") + section = None + for line in open(self.example_myrpki_cfg): + m = section_regexp.match(line) + if m: + section = m.group(1) + m = variable_regexp.match(line) + option = m.group(1) if m and section == "myrpki" else None + value = m.group(2) if option else None + if option == "handle": + line = "handle = %s\n" % self.username + if option == "openssl": + line = "openssl = %s\n" % self.openssl + if option in ("run_rpkid", "run_pubd", "run_rootd") and value != "false": + line = "%s = false\n" % option + f.write(line) + f.close() + + def myrpki(self, *cmd): + """ + Run a myrpki command. + """ + return subprocess.check_call((sys.executable, self.myrpki_py) + cmd) + + def upload(self, url, filename): + """ + Upload filename to URL, return result. + """ + + url = "%s%s/%s" % (self.myrpki_url, url, self.username) + data = open(filename).read() + print "Uploading", filename, "to", url + post_data = urllib.urlencode({ + "content" : data, + "csrfmiddlewaretoken" : self.csrftoken() }) # POST + try: + return self.opener.open(urllib2.Request(url, post_data, { + "User-Agent" : self.user_agent, + "Referer" : url})) + except urllib2.HTTPError, e: + sys.stderr.write("Problem uploading to URL %s\n" % url) + save_error(e) + raise + + def update(self): + """ + Run configure_resources, upload result, download updated result. + """ + + self.myrpki("configure_resources") + r = self.upload("demo/myrpki-xml", "myrpki.xml") + save("myrpki.xml", r.read()) + + def setup_csv_files(self): + """ + Create CSV file objects and synchronize timestamps. + """ + + self.csv_files = [ + CSV_File("asns.csv", "demo/down/asns/%s" % self.username), + CSV_File("prefixes.csv", "demo/down/prefixes/%s" % self.username), + CSV_File("roas.csv", "demo/down/roas/%s" % self.username) ] + + def upload_for_response(self, url, path): + """ + Upload an XML file to the requested URL and wait for for the server + to signal that a response is ready. + """ + + self.upload(url, path) + + print """ + Waiting for response to upload. This may require action by a human + being on the server side, so it may take a while, please be patient. + """ + + while True: + try: + return self.opener.open(urllib2.Request( + "%s%s/%s" % (self.myrpki_url, url, self.username), + None, + { "User-Agent" : self.user_agent })) + except urllib2.HTTPError, e: + # Portal GUI uses response code 503 to signal "not ready" + if e.code != 503: + sys.stderr.write("Problem getting response from %s: %s\n" % (url, e)) + save_error(e) + raise + time.sleep(self.delay) + + def setup_parent(self): + """ + Upload the user's identity.xml and wait for the portal gui to send + back the parent.xml response. + """ + + r = self.upload_for_response("demo/parent-request", "entitydb/identity.xml") + parent_data = r.read() + save("parent.xml", parent_data) + self.myrpki("configure_parent", "parent.xml") + + # Extract the parent_handle from the xml response and save it for use by + # setup_repository() + self.parent_handle = ElementFromString(parent_data).get("parent_handle") + + def setup_repository(self): + """ + Upload the repository referral to the portal-gui and wait the + response from the repository operator. + """ + + r = self.upload_for_response("demo/repository-request", "entitydb/repositories/%s.xml" % self.parent_handle) + save("repository.xml", r.read()) + self.myrpki("configure_repository", "repository.xml") + + def poll(self, csv_file): + """ + Poll for new version of a CSV file, save if changed, return + boolean indicating whether file has changed. + """ + + try: + url = self.myrpki_url + csv_file.url + r = self.opener.open(urllib2.Request(url, None, { + "If-Modified-Since" : csv_file.last_modified(), + "User-Agent" : self.user_agent })) + timestamp = time.mktime(r.info().getdate("Last-Modified")) + csv_file.store(timestamp, r.read()) + return True + except urllib2.HTTPError, e: + if e.code == 304: # 304 == "Not Modified" + return False + else: + sys.stderr.write("Problem polling URL %s\n" % url) + save_error(e) + raise + + def poll_loop(self): + """ + Loop forever, polling for updates. + """ + + while True: + changed = False + for csv_file in self.csv_files: + if self.poll(csv_file): + changed = True + if changed: + self.update() + time.sleep(self.delay) + + def getopt(self): + """ + Parse options. + """ + opts, argv = getopt.getopt(sys.argv[1:], "hi?", ["help"]) + for o, a in opts: + if o in ("-h", "--help", "-?"): + print __doc__ + sys.exit(0) + if argv: + sys.exit("Unexpected arguments %r" % (argv,)) + + def __init__(self): + self.getopt() + self.setup_utc() + self.setup_openssl() + self.setup_username() + self.setup_working_directory() + self.setup_config_file() + self.setup_csv_files() + self.myrpki("initialize") + self.setup_parent() + self.setup_repository() + self.update() + self.update() + + webbrowser.open(self.myrpki_url) + + self.poll_loop() + +main() + +# Local Variables: +# mode:python +# End: + +# vim:sw=2 ts=8 expandtab diff --git a/potpourri/rpkidemo.pem b/potpourri/rpkidemo.pem new file mode 100644 index 00000000..f96fdb9e --- /dev/null +++ b/potpourri/rpkidemo.pem @@ -0,0 +1,23 @@ +-----BEGIN CERTIFICATE----- +MIIDxjCCAy+gAwIBAgIJALc3/mkoVFOMMA0GCSqGSIb3DQEBBQUAMIGaMR4wHAYD +VQQDExVSR25ldC9QU0duZXQgU2VjdXJpdHkxCzAJBgNVBAYTAlVTMQswCQYDVQQI +EwJXQTEaMBgGA1UEBxMRQmFpbmJyaWRnZSBJc2xhbmQxEzARBgNVBAoTClJHbmV0 +LCBMTEMxDzANBgNVBAsTBlBTR25ldDEcMBoGCSqGSIb3DQEJARYNcmFuZHlAcHNn +LmNvbTAeFw0xMTAyMjcwNDMxMzhaFw0yMTAyMjQwNDMxMzhaMIGaMR4wHAYDVQQD +ExVSR25ldC9QU0duZXQgU2VjdXJpdHkxCzAJBgNVBAYTAlVTMQswCQYDVQQIEwJX +QTEaMBgGA1UEBxMRQmFpbmJyaWRnZSBJc2xhbmQxEzARBgNVBAoTClJHbmV0LCBM +TEMxDzANBgNVBAsTBlBTR25ldDEcMBoGCSqGSIb3DQEJARYNcmFuZHlAcHNnLmNv +bTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAw/lD46076mS4vDHI943dlNPh +KN1EY0wnx7nDga7jh4x8U5gt+MdvdH+kRABR7KVjkaUYKcd+DlAQeNLYXXkXOECz +yN90PgADWucPij6GJn7o9xwNsh2yAhICzqc19RgiKPVJL9V5xWBwKgzkpGG7dcUm +97TjZwhj8B8xcHjVbJ8CAwEAAaOCARAwggEMMAwGA1UdEwQFMAMBAf8wCwYDVR0P +BAQDAgEGMB0GA1UdDgQWBBRUmkatFo7oAUl5SJqUCfAC0LpkgDCBzwYDVR0jBIHH +MIHEgBRUmkatFo7oAUl5SJqUCfAC0LpkgKGBoKSBnTCBmjEeMBwGA1UEAxMVUkdu +ZXQvUFNHbmV0IFNlY3VyaXR5MQswCQYDVQQGEwJVUzELMAkGA1UECBMCV0ExGjAY +BgNVBAcTEUJhaW5icmlkZ2UgSXNsYW5kMRMwEQYDVQQKEwpSR25ldCwgTExDMQ8w +DQYDVQQLEwZQU0duZXQxHDAaBgkqhkiG9w0BCQEWDXJhbmR5QHBzZy5jb22CCQC3 +N/5pKFRTjDANBgkqhkiG9w0BAQUFAAOBgQBHBN06mk++v2fb3DnDiwt0Xqna4oNH +LNN69VaKLHhQ8J0shO4386E9ejLTutWO5VCdRim3a7WuneYSIncFBY76ddipWmuK +chitDDRUn/ccx4pkPoiHBGqthMqSbNGVsamAMOAJzzPyGXdur46wpzz6DtWObsQg +2/Q6evShgNYmtg== +-----END CERTIFICATE----- diff --git a/potpourri/rpkigui-flatten-roas.py b/potpourri/rpkigui-flatten-roas.py new file mode 100644 index 00000000..e21c368b --- /dev/null +++ b/potpourri/rpkigui-flatten-roas.py @@ -0,0 +1,37 @@ +from rpki.gui.script_util import setup +setup() + +from django.db import transaction +from django.db.models import Count +from rpki.gui.app.models import ROARequest +from rpki.irdb.zookeeper import Zookeeper + +handles = set() + + +@transaction.commit_on_success +def flatten(): + for roa in ROARequest.objects.annotate(num_prefixes=Count('prefixes')).filter(num_prefixes__gt=1): + print 'splitting roa for AS%d' % roa.asn + for pfx in roa.prefixes.all(): + # create new roa objects for each prefix + newroa = ROARequest.objects.create( + issuer=roa.issuer, + asn=roa.asn) + newroa.prefixes.create( + version=pfx.version, + prefix=pfx.prefix, + prefixlen=pfx.prefixlen, + max_prefixlen=pfx.max_prefixlen + ) + roa.delete() + handles.add(roa.issuer.handle) + +flatten() + +if handles: + # poke rpkid to run the cron job for each handle that had a roa change + z = Zookeeper() + for h in handles: + z.reset_identity(h) + z.run_rpkid_now() diff --git a/potpourri/rpkigui-reset-demo.py b/potpourri/rpkigui-reset-demo.py new file mode 100644 index 00000000..0a3a1537 --- /dev/null +++ b/potpourri/rpkigui-reset-demo.py @@ -0,0 +1,48 @@ +# Copyright (C) 2012, 2013, 2014 SPARTA, Inc. a Parsons Company +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +This script is used to reset all of the labuser* accounts on demo.rpki.net back +to a state suitable for a new workshop. It removes all ROAs and Ghostbuster +issued by the labuser accounts. + +""" + +__version__ = '$Id$' + +from rpki.gui.script_util import setup +setup() + +import sys + +from rpki.gui.app.models import Conf +from rpki.irdb.models import ROARequest, GhostbusterRequest +from rpki.gui.app.glue import list_received_resources + +for n in xrange(1, 33): + username = 'labuser%02d' % n + print 'removing objects for ' + username + for cls in (ROARequest, GhostbusterRequest): + cls.objects.filter(issuer__handle=username).delete() + conf = Conf.objects.get(handle=username) + conf.clear_alerts() + print '... updating resource certificate cache' + list_received_resources(sys.stdout, conf) + + # Remove delegated resources (see https://trac.rpki.net/ticket/544) + # Note that we do not remove the parent-child relationship, just the + # resources. + for child in conf.children(): + child.asns.delete() + child.address_ranges.delete() diff --git a/potpourri/rpkigui-sync-users.py b/potpourri/rpkigui-sync-users.py new file mode 100644 index 00000000..9c636e95 --- /dev/null +++ b/potpourri/rpkigui-sync-users.py @@ -0,0 +1,32 @@ +# Copyright (C) 2013 SPARTA, Inc. a Parsons Company +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +__version__ = '$Id$' + +""" +Ensure that a web login exists for labuser* resource holder +""" +from rpki.gui.script_util import setup +setup() + +from django.contrib.auth.models import User +from rpki.gui.app.models import Conf, ConfACL + +# mysql> select * from irdb_resourceholderca left outer join auth_user on irdb_resourceholderca.handle = auth_user.username where username=NULL; + +for conf in Conf.objects.filter(handle__startswith='labuser'): + if not User.objects.filter(username=conf.handle).exists(): + print 'creating matching user for ' + conf.handle + user = User.objects.create_user(conf.handle, password='fnord') + ConfACL.objects.create(conf=conf, user=user) diff --git a/potpourri/rrd-rcynic-history.py b/potpourri/rrd-rcynic-history.py new file mode 100644 index 00000000..8a0d50a8 --- /dev/null +++ b/potpourri/rrd-rcynic-history.py @@ -0,0 +1,201 @@ +# $Id$ +# +# Copyright (C) 2011-2012 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +Parse traffic data out of rynic XML output, whack it a bit, and stuff +it into one or more RRDs (Round Robin Databases -- see rrdtool). + +Haven't decided yet whether to draw the resulting pictures here or +elsewhere. + +This is an initial adaptation of analyze-rcynic-history.py, which uses +gnuplot and a shelve database. It's also my first attempt to do +anything with rrdtool, so no doubt I'll get half of it wrong. +""" + +import mailbox +import sys +import urlparse +import os +import time + +from xml.etree.cElementTree import (ElementTree as ElementTree, + fromstring as ElementTreeFromString) + +os.putenv("TZ", "UTC") +time.tzset() + +def parse_utc(s): + return int(time.mktime(time.strptime(s, "%Y-%m-%dT%H:%M:%SZ"))) + +class Host(object): + """ + A host object represents all the data collected for one host for a given session. + """ + + def __init__(self, hostname, timestamp): + self.hostname = hostname + self.timestamp = timestamp + self.elapsed = 0 + self.connections = 0 + self.failures = 0 + self.uris = set() + + def add_connection(self, elt): + self.elapsed += parse_utc(elt.get("finished")) - parse_utc(elt.get("started")) + self.connections += 1 + if elt.get("error") is not None: + self.failures += 1 + + def add_object_uri(self, u): + self.uris.add(u) + + @property + def failed(self): + return 1 if self.failures > 0 else 0 + + @property + def objects(self): + return len(self.uris) + + field_table = (("timestamp", None, None, None), + ("connections", "GAUGE", "Connections", "FF0000"), + ("objects", "GAUGE", "Objects", "00FF00"), + ("elapsed", "GAUGE", "Fetch Time", "0000FF"), + ("failed", "ABSOLUTE", "Failed", "00FFFF")) + + @property + def field_values(self): + return tuple(str(getattr(self, field[0])) for field in self.field_table) + + @classmethod + def field_ds_specifiers(cls, heartbeat = 24 * 60 * 60, minimum = 0, maximum = "U"): + return ["DS:%s:%s:%s:%s:%s" % (field[0], field[1], heartbeat, minimum, maximum) + for field in cls.field_table if field[1] is not None] + + @classmethod + def field_graph_specifiers(cls, hostname): + result = [] + for field in cls.field_table: + if field[1] is not None: + result.append("DEF:%s=%s.rrd:%s:AVERAGE" % (field[0], hostname, field[0])) + result.append("'LINE1:%s#%s:%s'" % (field[0], field[3], field[2])) + return result + + def save(self, rrdtable): + rrdtable.add(self.hostname, self.field_values) + +class Session(dict): + """ + A session corresponds to one XML file. This is a dictionary of Host + objects, keyed by hostname. + """ + + def __init__(self, timestamp): + dict.__init__(self) + self.timestamp = timestamp + + @property + def hostnames(self): + return set(self.iterkeys()) + + def add_connection(self, elt): + hostname = urlparse.urlparse(elt.text.strip()).hostname + if hostname not in self: + self[hostname] = Host(hostname, self.timestamp) + self[hostname].add_connection(elt) + + def add_object_uri(self, u): + h = urlparse.urlparse(u).hostname + if h and h in self: + self[h].add_object_uri(u) + + def save(self, rrdtable): + for h in self.itervalues(): + h.save(rrdtable) + +class RRDTable(dict): + """ + Final data we're going to be sending to rrdtool. We need to buffer + it until we're done because we have to sort it. Might be easier + just to sort the maildir, then again it might be easier to get rid + of the maildir too once we're dealing with current data. We'll see. + """ + + def __init__(self, rrdtool = sys.stdout): + dict.__init__(self) + self.rrdtool = rrdtool + + def add(self, hostname, data): + if hostname not in self: + self[hostname] = [] + self[hostname].append(data) + + def sort(self): + for data in self.itervalues(): + data.sort() + + @property + def oldest(self): + return min(min(datum[0] for datum in data) for data in self.itervalues()) + + rras = tuple("RRA:AVERAGE:0.5:%s:9600" % steps for steps in (1, 4, 24)) + + def create(self): + start = self.oldest + ds_list = Host.field_ds_specifiers() + ds_list.extend(self.rras) + for hostname in self: + if not os.path.exists("%s.rrd" % hostname): + self.rrdtool("create %s.rrd --start %s --step 3600 %s\n" % (hostname, start, " ".join(ds_list))) + + def update(self): + for hostname, data in self.iteritems(): + for datum in data: + self.rrdtool("update %s.rrd %s\n" % (hostname, ":".join(str(d) for d in datum))) + + def graph(self): + for hostname in self: + self.rrdtool("graph %s.png --start -90d %s\n" % (hostname, " ".join(Host.field_graph_specifiers(hostname)))) + +mb = mailbox.Maildir("/u/sra/rpki/rcynic-xml", factory = None, create = False) + +rrdtable = RRDTable() + +for i, key in enumerate(mb.iterkeys(), 1): + sys.stderr.write("\r%s %d/%d..." % ("|\\-/"[i & 3], i, len(mb))) + + assert not mb[key].is_multipart() + input = ElementTreeFromString(mb[key].get_payload()) + date = input.get("date") + sys.stderr.write("%s..." % date) + session = Session(parse_utc(date)) + for elt in input.findall("rsync_history"): + session.add_connection(elt) + for elt in input.findall("validation_status"): + if elt.get("generation") == "current": + session.add_object_uri(elt.text.strip()) + session.save(rrdtable) + + # XXX + #if i > 4: break + +sys.stderr.write("\n") + +rrdtable.create() +rrdtable.sort() +rrdtable.update() +rrdtable.graph() diff --git a/potpourri/setup-rootd.sh b/potpourri/setup-rootd.sh new file mode 100644 index 00000000..41a271b8 --- /dev/null +++ b/potpourri/setup-rootd.sh @@ -0,0 +1,36 @@ +#!/bin/sh - +# +# $Id$ +# +# Copyright (C) 2010 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +# Setting up rootd requires cross-certifying rpkid's resource-holding +# BPKI trust anchor under the BPKI trust anchor that rootd uses. This +# script handles that, albiet in a very ugly way. +# +# Filenames are wired in, you might need to change these if you've +# done something more complicated. + +export RANDFILE=.OpenSSL.whines.unless.I.set.this +export BPKI_DIRECTORY=`pwd`/bpki/servers + +openssl=../openssl/openssl/apps/openssl + +$openssl ca -notext -batch -config rpki.conf \ + -ss_cert bpki/resources/ca.cer \ + -out $BPKI_DIRECTORY/child.cer \ + -extensions ca_x509_ext_xcert0 + +$openssl x509 -noout -text -in $BPKI_DIRECTORY/child.cer diff --git a/potpourri/show-cms-ee-certs.sh b/potpourri/show-cms-ee-certs.sh new file mode 100644 index 00000000..4f5168c7 --- /dev/null +++ b/potpourri/show-cms-ee-certs.sh @@ -0,0 +1,25 @@ +#!/bin/sh - +# +# $Id$ +# +# Copyright (C) 2010 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +openssl=../openssl/openssl/apps/openssl + +for i in "$@" +do + $openssl cms -verify -noverify -out /dev/null -inform DER -certsout /dev/stdout -in $1 | + $openssl x509 -noout -text -certopt no_sigdump,no_pubkey +done diff --git a/potpourri/show-tracking-data.py b/potpourri/show-tracking-data.py new file mode 100644 index 00000000..07e0a144 --- /dev/null +++ b/potpourri/show-tracking-data.py @@ -0,0 +1,39 @@ +# $Id$ +# +# Copyright (C) 2012 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +Show tracking data for a bunch of objects retrieved by rcynic. + +This script takes one required argument, which is the name of a +directory tree containing the validated outpt of an rcynic run. If +you follow the default naming scheme this will be +/some/where/rcynic-data/authenticated. +""" + +import os +import sys +import rpki.x509 +import rpki.sundial + +rcynic_dir = sys.argv[1] + +for root, dirs, files in os.walk(rcynic_dir): + for f in files: + path = os.path.join(root, f) + date = rpki.sundial.datetime.utcfromtimestamp(os.stat(path).st_mtime) + uri = "rsync://" + path[len(rcynic_dir):].lstrip("/") + obj = rpki.x509.uri_dispatch(uri)(DER_file = path) + print date, obj.tracking_data(uri) diff --git a/potpourri/signed-object-dates.py b/potpourri/signed-object-dates.py new file mode 100644 index 00000000..fefd9448 --- /dev/null +++ b/potpourri/signed-object-dates.py @@ -0,0 +1,40 @@ +#!/usr/bin/env python +# $Id$ + +# Extract notBefore, and notAfter values from an RPKI signed object; +# if the object is a manifest, also extract thisUpdate and nextUpdate. + +# Copyright (C) 2013 Dragon Research Labs ("DRL") +# +# Permission to use, copy, modify, and/or distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND DRL DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL DRL BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +import sys +import rpki.POW + +extract_flags = (rpki.POW.CMS_NOCRL | + rpki.POW.CMS_NO_SIGNER_CERT_VERIFY | + rpki.POW.CMS_NO_ATTR_VERIFY | + rpki.POW.CMS_NO_CONTENT_VERIFY) + +for fn in sys.argv[1:]: + cls = rpki.POW.Manifest if fn.endswith(".mft") else rpki.POW.CMS + cms = cls.derReadFile(fn) + cer = cms.certs()[0] + print fn + print " notBefore: ", cer.getNotBefore() + if fn.endswith(".mft"): + cms.verify(rpki.POW.X509Store(), None, extract_flags) + print " thisUpdate:", cms.getThisUpdate() + print " nextUpdate:", cms.getNextUpdate() + print " notAfter: ", cer.getNotAfter() + print diff --git a/potpourri/splitbase64.xsl b/potpourri/splitbase64.xsl new file mode 100644 index 00000000..2b2d2fb1 --- /dev/null +++ b/potpourri/splitbase64.xsl @@ -0,0 +1,66 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/potpourri/testbed-rootcert.py b/potpourri/testbed-rootcert.py new file mode 100644 index 00000000..0716be2f --- /dev/null +++ b/potpourri/testbed-rootcert.py @@ -0,0 +1,66 @@ +# $Id$ +# +# Copyright (C) 2009-2012 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +Generate config for a test RPKI root certificate for resources +specified in asns.csv and prefixes.csv. + +This script is separate from arin-to-csv.py so that we can convert on +the fly rather than having to pull the entire database into memory. +""" + +import sys +from rpki.csv_utils import csv_reader + +if len(sys.argv) not in (2, 4): + sys.exit("Usage: %s holder [asns.csv prefixes.csv]" % sys.argv[0]) + +print '''\ +[req] +default_bits = 2048 +default_md = sha256 +distinguished_name = req_dn +prompt = no +encrypt_key = no + +[req_dn] +CN = Pseudo-%(HOLDER)s testbed root RPKI certificate + +[x509v3_extensions] +basicConstraints = critical,CA:true +subjectKeyIdentifier = hash +keyUsage = critical,keyCertSign,cRLSign +subjectInfoAccess = 1.3.6.1.5.5.7.48.5;URI:rsync://%(holder)s.rpki.net/rpki/%(holder)s/,1.3.6.1.5.5.7.48.10;URI:rsync://%(holder)s.rpki.net/rpki/%(holder)s/root.mft +certificatePolicies = critical,1.3.6.1.5.5.7.14.2 +sbgp-autonomousSysNum = critical,@rfc3779_asns +sbgp-ipAddrBlock = critical,@rfc3997_addrs + +[rfc3779_asns] +''' % { "holder" : sys.argv[1].lower(), + "HOLDER" : sys.argv[1].upper() } + +for i, asn in enumerate(asn for handle, asn in csv_reader(sys.argv[2] if len(sys.argv) > 2 else "asns.csv", columns = 2)): + print "AS.%d = %s" % (i, asn) + +print '''\ + +[rfc3997_addrs] + +''' + +for i, prefix in enumerate(prefix for handle, prefix in csv_reader(sys.argv[3] if len(sys.argv) > 2 else "prefixes.csv", columns = 2)): + v = 6 if ":" in prefix else 4 + print "IPv%d.%d = %s" % (v, i, prefix) diff --git a/potpourri/translate-handles.py b/potpourri/translate-handles.py new file mode 100644 index 00000000..49848277 --- /dev/null +++ b/potpourri/translate-handles.py @@ -0,0 +1,49 @@ +# $Id$ +# +# Copyright (C) 2010-2012 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +Translate handles from the ones provided in a database dump into the +ones we use in our testbed. This has been broken out into a separate +program for two reasons: + +- Conversion of some of the RIR data is a very slow process, and it's + both annoying and unnecessary to run it every time we add a new + participant to the testbed. + +- This handle translation business now has fingers into half a dozen + scripts, so it needs refactoring in any case, either as a common + library function or as a separate script. + +This program takes a list of .CSV files on its command line, and +rewrites them as needed after performing the translation. +""" + +import os +import sys +from rpki.csv_utils import csv_reader, csv_writer + +translations = dict((src, dst) for src, dst in csv_reader("translations.csv", columns = 2)) + +for filename in sys.argv[1:]: + + f = csv_writer(filename) + + for cols in csv_reader(filename): + if cols[0] in translations: + cols[0] = translations[cols[0]] + f.writerow(cols) + + f.close() diff --git a/potpourri/upgrade-add-ghostbusters.py b/potpourri/upgrade-add-ghostbusters.py new file mode 100644 index 00000000..a8c8a92b --- /dev/null +++ b/potpourri/upgrade-add-ghostbusters.py @@ -0,0 +1,73 @@ +# $Id$ +# +# Copyright (C) 2009--2011 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +Add SQL tables needed for Ghostbusters support. +Most of the code here lifted from rpki-sql-setup.py +""" + +import getopt, sys, rpki.config, warnings + +from rpki.mysql_import import MySQLdb + +def fix(name, *statements): + db = MySQLdb.connect(db = cfg.get("sql-database", section = name), + user = cfg.get("sql-username", section = name), + passwd = cfg.get("sql-password", section = name)) + cur = db.cursor() + for statement in statements: + cur.execute(statement) + db.commit() + db.close() + +cfg_file = None + +opts, argv = getopt.getopt(sys.argv[1:], "c:h?", ["config=", "help"]) +for o, a in opts: + if o in ("-h", "--help", "-?"): + print __doc__ + sys.exit(0) + if o in ("-c", "--config"): + cfg_file = a + +cfg = rpki.config.parser(cfg_file, "myrpki") + +fix("irdbd", """ + CREATE TABLE ghostbuster_request ( + ghostbuster_request_id SERIAL NOT NULL, + self_handle VARCHAR(40) NOT NULL, + parent_handle VARCHAR(40), + vcard LONGBLOB NOT NULL, + PRIMARY KEY (ghostbuster_request_id) + ) ENGINE=InnoDB; +""") + +fix("rpkid", """ + CREATE TABLE ghostbuster ( + ghostbuster_id SERIAL NOT NULL, + vcard LONGBLOB NOT NULL, + cert LONGBLOB NOT NULL, + ghostbuster LONGBLOB NOT NULL, + published DATETIME, + self_id BIGINT UNSIGNED NOT NULL, + ca_detail_id BIGINT UNSIGNED NOT NULL, + PRIMARY KEY (ghostbuster_id), + CONSTRAINT ghostbuster_self_id + FOREIGN KEY (self_id) REFERENCES self (self_id) ON DELETE CASCADE, + CONSTRAINT ghostbuster_ca_detail_id + FOREIGN KEY (ca_detail_id) REFERENCES ca_detail (ca_detail_id) ON DELETE CASCADE + ) ENGINE=InnoDB; +""") diff --git a/potpourri/verify-bpki.sh b/potpourri/verify-bpki.sh new file mode 100755 index 00000000..0e36d796 --- /dev/null +++ b/potpourri/verify-bpki.sh @@ -0,0 +1,43 @@ +#!/bin/sh - +# $Id$ +# +# Copyright (C) 2009-2010 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +# Tests of generated BPKI certificates. Kind of cheesy, but does test +# the basic stuff. + +exec 2>&1 + +for bpki in bpki/* +do + crls=$(find $bpki -name '*.crl') + + # Check that CRLs verify properly + for crl in $crls + do + echo -n "$crl: " + openssl crl -CAfile $bpki/ca.cer -noout -in $crl + done + + # Check that issued certificates verify properly + cat $bpki/ca.cer $crls | openssl verify -crl_check -CAfile /dev/stdin $(find $bpki -name '*.cer' ! -name 'ca.cer' ! -name '*.cacert.cer') + +done + +# Check that cross-certified BSC certificates verify properly +if test -d bpki/servers +then + cat bpki/servers/xcert.*.cer | openssl verify -verbose -CAfile bpki/servers/ca.cer -untrusted /dev/stdin bpki/resources/bsc.*.cer +fi diff --git a/potpourri/whack-ripe-asns.py b/potpourri/whack-ripe-asns.py new file mode 100644 index 00000000..9c702271 --- /dev/null +++ b/potpourri/whack-ripe-asns.py @@ -0,0 +1,83 @@ +# $Id$ +# +# Copyright (C) 2010 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +Fix problems in asns.csv generated from RIPE's database. + +RIPE's database contains inconsistancies, overlaps, and format errors +that make it impossible to feed the output of ripe-to-csv.awk directly +into testbed-rootcert.py without OpenSSL rejecting the resulting +root.conf. This script takes a brute force approach to fixing this: +it converts all ASNs to range form, runs the resulting file through +the unix sort program to put the data into canonical order, then reads +it back, merging overlaps, and writing the result in a form acceptable +to testbed-rootcert.py. + +Since we're doing all this anyway, the script also merges adjacent +blocks. + +Ordinarily, it would be dangerous to have the same program act as both +the source and sink of a pipeline, particularly for such a large data +set, as the chance of deadlock would approach 100%, but in this case +we know that the sort program must consume and buffer (somehow) all of +its input before writing a single line of output, so a single script +can safely act as a filter both before and after sort. +""" + +import sys, subprocess + +sorter = subprocess.Popen(("sort", "-T.", "-n"), + stdin = subprocess.PIPE, + stdout = subprocess.PIPE) + +for line in sys.stdin: + handle, asn = line.split() + + if "-" in asn: + range_min, range_max = asn.split("-") + else: + range_min, range_max = asn, asn + + sorter.stdin.write("%d %d\n" % (long(range_min), long(range_max))) + +sorter.stdin.close() + +prev_min = None +prev_max = None + +def show(): + if prev_min and prev_max: + sys.stdout.write("x\t%s-%s\n" % (prev_min, prev_max)) + +for line in sorter.stdout: + this_min, this_max = line.split() + this_min = long(this_min) + this_max = long(this_max) + + if prev_min and prev_max and prev_max + 1 >= this_min: + prev_min = min(prev_min, this_min) + prev_max = max(prev_max, this_max) + + else: + show() + prev_min = this_min + prev_max = this_max + +show() + +sorter.stdout.close() + +sys.exit(sorter.wait()) diff --git a/potpourri/whack-ripe-prefixes.py b/potpourri/whack-ripe-prefixes.py new file mode 100644 index 00000000..52ea3f18 --- /dev/null +++ b/potpourri/whack-ripe-prefixes.py @@ -0,0 +1,101 @@ +# $Id$ +# +# Copyright (C) 2010 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +Fix problems in prefixes.csv generated from RIPE's database. + +RIPE's database contains inconsistancies, overlaps, and format errors +that make it impossible to feed the output of ripe-to-csv.awk directly +into testbed-rootcert.py without OpenSSL rejecting the resulting +root.conf. This script takes a brute force approach to fixing this: +it converts all prefixes and address ranges into pairs of unsigned +decimal integers representing range min and range max, runs the +resulting 3+ million entry file through the unix sort program to put +the data into canonical order, then reads it back, merging overlaps +and converting everything back to ranges of IP addresses, and writing +the result in a form acceptable to testbed-rootcert.py. + +Since we're doing all this anyway, the script also merges adjacent +address blocks, not because OpenSSL can't handle them (it can) but +because doing so cuts out a lot of unnecessary I/O. + +Ordinarily, it would be dangerous to have the same program act as both +the source and sink of a pipeline, particularly for such a large data +set, as the chance of deadlock would approach 100%, but in this case +we know that the sort program must consume and buffer (somehow) all of +its input before writing a single line of output, so a single script +can safely act as a filter both before and after sort. +""" + +import sys +import subprocess +import rpki.ipaddrs + +sorter = subprocess.Popen(("sort", "-T.", "-n"), + stdin = subprocess.PIPE, + stdout = subprocess.PIPE) + +for line in sys.stdin: + handle, prefix = line.split() + + if "-" in prefix: + range_min, range_max = prefix.split("-") + range_min = rpki.ipaddrs.parse(range_min) + range_max = rpki.ipaddrs.parse(range_max) + + else: + address, length = prefix.split("/") + address = rpki.ipaddrs.parse(address) + mask = (1L << (address.bits - int(length))) - 1 + range_min = address & ~mask + range_max = address | mask + + sorter.stdin.write("%d %d\n" % (long(range_min), long(range_max))) + +sorter.stdin.close() + +prev_min = None +prev_max = None + +def address(number): + if number > 0xffffffff: + return rpki.ipaddrs.v6addr(number) + else: + return rpki.ipaddrs.v4addr(number) + +def show(): + if prev_min and prev_max: + sys.stdout.write("x\t%s-%s\n" % (address(prev_min), address(prev_max))) + +for line in sorter.stdout: + this_min, this_max = line.split() + this_min = long(this_min) + this_max = long(this_max) + + if prev_min and prev_max and prev_max + 1 >= this_min: + prev_min = min(prev_min, this_min) + prev_max = max(prev_max, this_max) + + else: + show() + prev_min = this_min + prev_max = this_max + +show() + +sorter.stdout.close() + +sys.exit(sorter.wait()) diff --git a/potpourri/x509-dot.py b/potpourri/x509-dot.py new file mode 100644 index 00000000..42e1543a --- /dev/null +++ b/potpourri/x509-dot.py @@ -0,0 +1,170 @@ +# $Id$ + +""" +Generate .dot description of a certificate tree. + +Copyright (C) 2009-2012 Internet Systems Consortium ("ISC") + +Permission to use, copy, modify, and distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. + +Portions copyright (C) 2008 American Registry for Internet Numbers ("ARIN") + +Permission to use, copy, modify, and distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +""" + +import rpki.POW, sys, glob, os + +class x509(object): + + ski = None + aki = None + + show_file = False + show_ski = False + show_aki = False + show_issuer = True + show_subject = True + + cn_only = True + + subjects = {} + + def __init__(self, filename): + + while filename.startswith("./"): + filename = filename[2:] + + self.filename = filename + + f = open(filename, "rb") + text = f.read() + f.close() + + if "-----BEGIN" in text: + self.pow = rpki.POW.X509.pemRead(text) + else: + self.pow = rpki.POW.X509.derRead(text) + + + try: + self.ski = ":".join(["%02X" % ord(i) for i in self.pow.getSKI()]) + except: + pass + + try: + self.aki = ":".join(["%02X" % ord(i) for i in self.pow.getAKI()]) + except: + pass + + self.subject = self.canonize(self.pow.getSubject()) + self.issuer = self.canonize(self.pow.getIssuer()) + + if self.subject in self.subjects: + self.subjects[self.subject].append(self) + else: + self.subjects[self.subject] = [self] + + def canonize(self, name): + + # Probably should just use rpki.x509.X501DN class here. + + try: + if self.cn_only and name[0][0][0] == "2.5.4.3": + return name[0][0][1] + except: + pass + + return name + + def set_node(self, node): + + self.node = node + + def dot(self): + + label = [] + + if self.show_issuer: + label.append(("Issuer", self.issuer)) + + if self.show_subject: + label.append(("Subject", self.subject)) + + if self.show_file: + label.append(("File", self.filename)) + + if self.show_aki: + label.append(("AKI", self.aki)) + + if self.show_ski: + label.append(("SKI", self.ski)) + + print "#", repr(label) + + if len(label) > 1: + print '%s [shape = record, label = "{%s}"];' % (self.node, "|".join("{%s|%s}" % (x, y) for x, y in label if y is not None)) + else: + print '%s [label = "%s"];' % (self.node, label[0][1]) + + for issuer in self.subjects.get(self.issuer, ()): + + if issuer is self: + print "# Issuer is self" + issuer = None + + if issuer is not None and self.aki is not None and self.ski is not None and self.aki == self.ski: + print "# Self-signed" + issuer = None + + if issuer is not None and self.aki is not None and issuer.ski is not None and self.aki != issuer.ski: + print "# AKI does not match issuer SKI" + issuer = None + + if issuer is not None: + print "%s -> %s;" % (issuer.node, self.node) + + print + +certs = [] + +for topdir in sys.argv[1:] or ["."]: + for dirpath, dirnames, filenames in os.walk(topdir): + certs += [x509(dirpath + "/" + filename) for filename in filenames if filename.endswith(".cer")] + +for i, cert in enumerate(certs): + cert.set_node("cert_%d" % i) + +print """\ +digraph certificates { + +rotate = 90; +#size = "11,8.5"; +splines = true; +ratio = fill; + +""" + +for cert in certs: + cert.dot() + +print "}" diff --git a/rcynic/Doxyfile b/rcynic/Doxyfile deleted file mode 100644 index aa183715..00000000 --- a/rcynic/Doxyfile +++ /dev/null @@ -1,1679 +0,0 @@ -# Doxyfile 1.7.3 - -# This file describes the settings to be used by the documentation system -# doxygen (www.doxygen.org) for a project. -# -# All text after a hash (#) is considered a comment and will be ignored. -# The format is: -# TAG = value [value, ...] -# For lists items can also be appended using: -# TAG += value [value, ...] -# Values that contain spaces should be placed between quotes (" "). - -#--------------------------------------------------------------------------- -# Project related configuration options -#--------------------------------------------------------------------------- - -# This tag specifies the encoding used for all characters in the config file -# that follow. The default is UTF-8 which is also the encoding used for all -# text before the first occurrence of this tag. Doxygen uses libiconv (or the -# iconv built into libc) for the transcoding. See -# http://www.gnu.org/software/libiconv for the list of possible encodings. - -DOXYFILE_ENCODING = UTF-8 - -# The PROJECT_NAME tag is a single word (or a sequence of words surrounded -# by quotes) that should identify the project. - -PROJECT_NAME = rcynic - -# The PROJECT_NUMBER tag can be used to enter a project or revision number. -# This could be handy for archiving the generated documentation or -# if some version control system is used. - -PROJECT_NUMBER = 1.0 - -# Using the PROJECT_BRIEF tag one can provide an optional one line description for a project that appears at the top of each page and should give viewer a quick idea about the purpose of the project. Keep the description short. - -PROJECT_BRIEF = - -# With the PROJECT_LOGO tag one can specify an logo or icon that is -# included in the documentation. The maximum height of the logo should not -# exceed 55 pixels and the maximum width should not exceed 200 pixels. -# Doxygen will copy the logo to the output directory. - -PROJECT_LOGO = - -# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) -# base path where the generated documentation will be put. -# If a relative path is entered, it will be relative to the location -# where doxygen was started. If left blank the current directory will be used. - -OUTPUT_DIRECTORY = doc - -# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create -# 4096 sub-directories (in 2 levels) under the output directory of each output -# format and will distribute the generated files over these directories. -# Enabling this option can be useful when feeding doxygen a huge amount of -# source files, where putting all generated files in the same directory would -# otherwise cause performance problems for the file system. - -CREATE_SUBDIRS = NO - -# The OUTPUT_LANGUAGE tag is used to specify the language in which all -# documentation generated by doxygen is written. Doxygen will use this -# information to generate all constant output in the proper language. -# The default language is English, other supported languages are: -# Afrikaans, Arabic, Brazilian, Catalan, Chinese, Chinese-Traditional, -# Croatian, Czech, Danish, Dutch, Esperanto, Farsi, Finnish, French, German, -# Greek, Hungarian, Italian, Japanese, Japanese-en (Japanese with English -# messages), Korean, Korean-en, Lithuanian, Norwegian, Macedonian, Persian, -# Polish, Portuguese, Romanian, Russian, Serbian, Serbian-Cyrillic, Slovak, -# Slovene, Spanish, Swedish, Ukrainian, and Vietnamese. - -OUTPUT_LANGUAGE = English - -# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will -# include brief member descriptions after the members that are listed in -# the file and class documentation (similar to JavaDoc). -# Set to NO to disable this. - -BRIEF_MEMBER_DESC = YES - -# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend -# the brief description of a member or function before the detailed description. -# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the -# brief descriptions will be completely suppressed. - -REPEAT_BRIEF = YES - -# This tag implements a quasi-intelligent brief description abbreviator -# that is used to form the text in various listings. Each string -# in this list, if found as the leading text of the brief description, will be -# stripped from the text and the result after processing the whole list, is -# used as the annotated text. Otherwise, the brief description is used as-is. -# If left blank, the following values are used ("$name" is automatically -# replaced with the name of the entity): "The $name class" "The $name widget" -# "The $name file" "is" "provides" "specifies" "contains" -# "represents" "a" "an" "the" - -ABBREVIATE_BRIEF = - -# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then -# Doxygen will generate a detailed section even if there is only a brief -# description. - -ALWAYS_DETAILED_SEC = NO - -# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all -# inherited members of a class in the documentation of that class as if those -# members were ordinary class members. Constructors, destructors and assignment -# operators of the base classes will not be shown. - -INLINE_INHERITED_MEMB = NO - -# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full -# path before files name in the file list and in the header files. If set -# to NO the shortest path that makes the file name unique will be used. - -FULL_PATH_NAMES = YES - -# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag -# can be used to strip a user-defined part of the path. Stripping is -# only done if one of the specified strings matches the left-hand part of -# the path. The tag can be used to show relative paths in the file list. -# If left blank the directory from which doxygen is run is used as the -# path to strip. - -STRIP_FROM_PATH = - -# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of -# the path mentioned in the documentation of a class, which tells -# the reader which header file to include in order to use a class. -# If left blank only the name of the header file containing the class -# definition is used. Otherwise one should specify the include paths that -# are normally passed to the compiler using the -I flag. - -STRIP_FROM_INC_PATH = - -# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter -# (but less readable) file names. This can be useful if your file system -# doesn't support long names like on DOS, Mac, or CD-ROM. - -SHORT_NAMES = NO - -# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen -# will interpret the first line (until the first dot) of a JavaDoc-style -# comment as the brief description. If set to NO, the JavaDoc -# comments will behave just like regular Qt-style comments -# (thus requiring an explicit @brief command for a brief description.) - -JAVADOC_AUTOBRIEF = NO - -# If the QT_AUTOBRIEF tag is set to YES then Doxygen will -# interpret the first line (until the first dot) of a Qt-style -# comment as the brief description. If set to NO, the comments -# will behave just like regular Qt-style comments (thus requiring -# an explicit \brief command for a brief description.) - -QT_AUTOBRIEF = NO - -# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make Doxygen -# treat a multi-line C++ special comment block (i.e. a block of //! or /// -# comments) as a brief description. This used to be the default behaviour. -# The new default is to treat a multi-line C++ comment block as a detailed -# description. Set this tag to YES if you prefer the old behaviour instead. - -MULTILINE_CPP_IS_BRIEF = NO - -# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented -# member inherits the documentation from any documented member that it -# re-implements. - -INHERIT_DOCS = YES - -# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce -# a new page for each member. If set to NO, the documentation of a member will -# be part of the file/class/namespace that contains it. - -SEPARATE_MEMBER_PAGES = YES - -# The TAB_SIZE tag can be used to set the number of spaces in a tab. -# Doxygen uses this value to replace tabs by spaces in code fragments. - -TAB_SIZE = 8 - -# This tag can be used to specify a number of aliases that acts -# as commands in the documentation. An alias has the form "name=value". -# For example adding "sideeffect=\par Side Effects:\n" will allow you to -# put the command \sideeffect (or @sideeffect) in the documentation, which -# will result in a user-defined paragraph with heading "Side Effects:". -# You can put \n's in the value part of an alias to insert newlines. - -ALIASES = - -# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C -# sources only. Doxygen will then generate output that is more tailored for C. -# For instance, some of the names that are used will be different. The list -# of all members will be omitted, etc. - -OPTIMIZE_OUTPUT_FOR_C = YES - -# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java -# sources only. Doxygen will then generate output that is more tailored for -# Java. For instance, namespaces will be presented as packages, qualified -# scopes will look different, etc. - -OPTIMIZE_OUTPUT_JAVA = NO - -# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran -# sources only. Doxygen will then generate output that is more tailored for -# Fortran. - -OPTIMIZE_FOR_FORTRAN = NO - -# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL -# sources. Doxygen will then generate output that is tailored for -# VHDL. - -OPTIMIZE_OUTPUT_VHDL = NO - -# Doxygen selects the parser to use depending on the extension of the files it -# parses. With this tag you can assign which parser to use for a given extension. -# Doxygen has a built-in mapping, but you can override or extend it using this -# tag. The format is ext=language, where ext is a file extension, and language -# is one of the parsers supported by doxygen: IDL, Java, Javascript, CSharp, C, -# C++, D, PHP, Objective-C, Python, Fortran, VHDL, C, C++. For instance to make -# doxygen treat .inc files as Fortran files (default is PHP), and .f files as C -# (default is Fortran), use: inc=Fortran f=C. Note that for custom extensions -# you also need to set FILE_PATTERNS otherwise the files are not read by doxygen. - -EXTENSION_MAPPING = - -# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want -# to include (a tag file for) the STL sources as input, then you should -# set this tag to YES in order to let doxygen match functions declarations and -# definitions whose arguments contain STL classes (e.g. func(std::string); v.s. -# func(std::string) {}). This also makes the inheritance and collaboration -# diagrams that involve STL classes more complete and accurate. - -BUILTIN_STL_SUPPORT = NO - -# If you use Microsoft's C++/CLI language, you should set this option to YES to -# enable parsing support. - -CPP_CLI_SUPPORT = NO - -# Set the SIP_SUPPORT tag to YES if your project consists of sip sources only. -# Doxygen will parse them like normal C++ but will assume all classes use public -# instead of private inheritance when no explicit protection keyword is present. - -SIP_SUPPORT = NO - -# For Microsoft's IDL there are propget and propput attributes to indicate getter -# and setter methods for a property. Setting this option to YES (the default) -# will make doxygen replace the get and set methods by a property in the -# documentation. This will only work if the methods are indeed getting or -# setting a simple type. If this is not the case, or you want to show the -# methods anyway, you should set this option to NO. - -IDL_PROPERTY_SUPPORT = YES - -# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC -# tag is set to YES, then doxygen will reuse the documentation of the first -# member in the group (if any) for the other members of the group. By default -# all members of a group must be documented explicitly. - -DISTRIBUTE_GROUP_DOC = NO - -# Set the SUBGROUPING tag to YES (the default) to allow class member groups of -# the same type (for instance a group of public functions) to be put as a -# subgroup of that type (e.g. under the Public Functions section). Set it to -# NO to prevent subgrouping. Alternatively, this can be done per class using -# the \nosubgrouping command. - -SUBGROUPING = YES - -# When TYPEDEF_HIDES_STRUCT is enabled, a typedef of a struct, union, or enum -# is documented as struct, union, or enum with the name of the typedef. So -# typedef struct TypeS {} TypeT, will appear in the documentation as a struct -# with name TypeT. When disabled the typedef will appear as a member of a file, -# namespace, or class. And the struct will be named TypeS. This can typically -# be useful for C code in case the coding convention dictates that all compound -# types are typedef'ed and only the typedef is referenced, never the tag name. - -TYPEDEF_HIDES_STRUCT = NO - -# The SYMBOL_CACHE_SIZE determines the size of the internal cache use to -# determine which symbols to keep in memory and which to flush to disk. -# When the cache is full, less often used symbols will be written to disk. -# For small to medium size projects (<1000 input files) the default value is -# probably good enough. For larger projects a too small cache size can cause -# doxygen to be busy swapping symbols to and from disk most of the time -# causing a significant performance penalty. -# If the system has enough physical memory increasing the cache will improve the -# performance by keeping more symbols in memory. Note that the value works on -# a logarithmic scale so increasing the size by one will roughly double the -# memory usage. The cache size is given by this formula: -# 2^(16+SYMBOL_CACHE_SIZE). The valid range is 0..9, the default is 0, -# corresponding to a cache size of 2^16 = 65536 symbols - -SYMBOL_CACHE_SIZE = 0 - -#--------------------------------------------------------------------------- -# Build related configuration options -#--------------------------------------------------------------------------- - -# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in -# documentation are documented, even if no documentation was available. -# Private class members and static file members will be hidden unless -# the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES - -EXTRACT_ALL = YES - -# If the EXTRACT_PRIVATE tag is set to YES all private members of a class -# will be included in the documentation. - -EXTRACT_PRIVATE = YES - -# If the EXTRACT_STATIC tag is set to YES all static members of a file -# will be included in the documentation. - -EXTRACT_STATIC = YES - -# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) -# defined locally in source files will be included in the documentation. -# If set to NO only classes defined in header files are included. - -EXTRACT_LOCAL_CLASSES = YES - -# This flag is only useful for Objective-C code. When set to YES local -# methods, which are defined in the implementation section but not in -# the interface are included in the documentation. -# If set to NO (the default) only methods in the interface are included. - -EXTRACT_LOCAL_METHODS = NO - -# If this flag is set to YES, the members of anonymous namespaces will be -# extracted and appear in the documentation as a namespace called -# 'anonymous_namespace{file}', where file will be replaced with the base -# name of the file that contains the anonymous namespace. By default -# anonymous namespaces are hidden. - -EXTRACT_ANON_NSPACES = YES - -# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all -# undocumented members of documented classes, files or namespaces. -# If set to NO (the default) these members will be included in the -# various overviews, but no documentation section is generated. -# This option has no effect if EXTRACT_ALL is enabled. - -HIDE_UNDOC_MEMBERS = NO - -# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all -# undocumented classes that are normally visible in the class hierarchy. -# If set to NO (the default) these classes will be included in the various -# overviews. This option has no effect if EXTRACT_ALL is enabled. - -HIDE_UNDOC_CLASSES = NO - -# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, Doxygen will hide all -# friend (class|struct|union) declarations. -# If set to NO (the default) these declarations will be included in the -# documentation. - -HIDE_FRIEND_COMPOUNDS = NO - -# If the HIDE_IN_BODY_DOCS tag is set to YES, Doxygen will hide any -# documentation blocks found inside the body of a function. -# If set to NO (the default) these blocks will be appended to the -# function's detailed documentation block. - -HIDE_IN_BODY_DOCS = NO - -# The INTERNAL_DOCS tag determines if documentation -# that is typed after a \internal command is included. If the tag is set -# to NO (the default) then the documentation will be excluded. -# Set it to YES to include the internal documentation. - -INTERNAL_DOCS = NO - -# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate -# file names in lower-case letters. If set to YES upper-case letters are also -# allowed. This is useful if you have classes or files whose names only differ -# in case and if your file system supports case sensitive file names. Windows -# and Mac users are advised to set this option to NO. - -CASE_SENSE_NAMES = YES - -# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen -# will show members with their full class and namespace scopes in the -# documentation. If set to YES the scope will be hidden. - -HIDE_SCOPE_NAMES = NO - -# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen -# will put a list of the files that are included by a file in the documentation -# of that file. - -SHOW_INCLUDE_FILES = NO - -# If the FORCE_LOCAL_INCLUDES tag is set to YES then Doxygen -# will list include files with double quotes in the documentation -# rather than with sharp brackets. - -FORCE_LOCAL_INCLUDES = NO - -# If the INLINE_INFO tag is set to YES (the default) then a tag [inline] -# is inserted in the documentation for inline members. - -INLINE_INFO = YES - -# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen -# will sort the (detailed) documentation of file and class members -# alphabetically by member name. If set to NO the members will appear in -# declaration order. - -SORT_MEMBER_DOCS = YES - -# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the -# brief documentation of file, namespace and class members alphabetically -# by member name. If set to NO (the default) the members will appear in -# declaration order. - -SORT_BRIEF_DOCS = NO - -# If the SORT_MEMBERS_CTORS_1ST tag is set to YES then doxygen -# will sort the (brief and detailed) documentation of class members so that -# constructors and destructors are listed first. If set to NO (the default) -# the constructors will appear in the respective orders defined by -# SORT_MEMBER_DOCS and SORT_BRIEF_DOCS. -# This tag will be ignored for brief docs if SORT_BRIEF_DOCS is set to NO -# and ignored for detailed docs if SORT_MEMBER_DOCS is set to NO. - -SORT_MEMBERS_CTORS_1ST = NO - -# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the -# hierarchy of group names into alphabetical order. If set to NO (the default) -# the group names will appear in their defined order. - -SORT_GROUP_NAMES = NO - -# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be -# sorted by fully-qualified names, including namespaces. If set to -# NO (the default), the class list will be sorted only by class name, -# not including the namespace part. -# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. -# Note: This option applies only to the class list, not to the -# alphabetical list. - -SORT_BY_SCOPE_NAME = NO - -# If the STRICT_PROTO_MATCHING option is enabled and doxygen fails to do proper type resolution of all parameters of a function it will reject a -# match between the prototype and the implementation of a member function even if there is only one candidate or it is obvious which candidate to choose by doing a simple string match. By disabling STRICT_PROTO_MATCHING doxygen -# will still accept a match between prototype and implementation in such cases. - -STRICT_PROTO_MATCHING = NO - -# The GENERATE_TODOLIST tag can be used to enable (YES) or -# disable (NO) the todo list. This list is created by putting \todo -# commands in the documentation. - -GENERATE_TODOLIST = YES - -# The GENERATE_TESTLIST tag can be used to enable (YES) or -# disable (NO) the test list. This list is created by putting \test -# commands in the documentation. - -GENERATE_TESTLIST = YES - -# The GENERATE_BUGLIST tag can be used to enable (YES) or -# disable (NO) the bug list. This list is created by putting \bug -# commands in the documentation. - -GENERATE_BUGLIST = YES - -# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or -# disable (NO) the deprecated list. This list is created by putting -# \deprecated commands in the documentation. - -GENERATE_DEPRECATEDLIST= YES - -# The ENABLED_SECTIONS tag can be used to enable conditional -# documentation sections, marked by \if sectionname ... \endif. - -ENABLED_SECTIONS = - -# The MAX_INITIALIZER_LINES tag determines the maximum number of lines -# the initial value of a variable or macro consists of for it to appear in -# the documentation. If the initializer consists of more lines than specified -# here it will be hidden. Use a value of 0 to hide initializers completely. -# The appearance of the initializer of individual variables and macros in the -# documentation can be controlled using \showinitializer or \hideinitializer -# command in the documentation regardless of this setting. - -MAX_INITIALIZER_LINES = 30 - -# Set the SHOW_USED_FILES tag to NO to disable the list of files generated -# at the bottom of the documentation of classes and structs. If set to YES the -# list will mention the files that were used to generate the documentation. - -SHOW_USED_FILES = YES - -# If the sources in your project are distributed over multiple directories -# then setting the SHOW_DIRECTORIES tag to YES will show the directory hierarchy -# in the documentation. The default is NO. - -SHOW_DIRECTORIES = NO - -# Set the SHOW_FILES tag to NO to disable the generation of the Files page. -# This will remove the Files entry from the Quick Index and from the -# Folder Tree View (if specified). The default is YES. - -SHOW_FILES = YES - -# Set the SHOW_NAMESPACES tag to NO to disable the generation of the -# Namespaces page. -# This will remove the Namespaces entry from the Quick Index -# and from the Folder Tree View (if specified). The default is YES. - -SHOW_NAMESPACES = YES - -# The FILE_VERSION_FILTER tag can be used to specify a program or script that -# doxygen should invoke to get the current version for each file (typically from -# the version control system). Doxygen will invoke the program by executing (via -# popen()) the command , where is the value of -# the FILE_VERSION_FILTER tag, and is the name of an input file -# provided by doxygen. Whatever the program writes to standard output -# is used as the file version. See the manual for examples. - -FILE_VERSION_FILTER = - -# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed -# by doxygen. The layout file controls the global structure of the generated -# output files in an output format independent way. The create the layout file -# that represents doxygen's defaults, run doxygen with the -l option. -# You can optionally specify a file name after the option, if omitted -# DoxygenLayout.xml will be used as the name of the layout file. - -LAYOUT_FILE = - -#--------------------------------------------------------------------------- -# configuration options related to warning and progress messages -#--------------------------------------------------------------------------- - -# The QUIET tag can be used to turn on/off the messages that are generated -# by doxygen. Possible values are YES and NO. If left blank NO is used. - -QUIET = YES - -# The WARNINGS tag can be used to turn on/off the warning messages that are -# generated by doxygen. Possible values are YES and NO. If left blank -# NO is used. - -WARNINGS = YES - -# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings -# for undocumented members. If EXTRACT_ALL is set to YES then this flag will -# automatically be disabled. - -WARN_IF_UNDOCUMENTED = YES - -# If WARN_IF_DOC_ERROR is set to YES, doxygen will generate warnings for -# potential errors in the documentation, such as not documenting some -# parameters in a documented function, or documenting parameters that -# don't exist or using markup commands wrongly. - -WARN_IF_DOC_ERROR = YES - -# The WARN_NO_PARAMDOC option can be enabled to get warnings for -# functions that are documented, but have no documentation for their parameters -# or return value. If set to NO (the default) doxygen will only warn about -# wrong or incomplete parameter documentation, but not about the absence of -# documentation. - -WARN_NO_PARAMDOC = NO - -# The WARN_FORMAT tag determines the format of the warning messages that -# doxygen can produce. The string should contain the $file, $line, and $text -# tags, which will be replaced by the file and line number from which the -# warning originated and the warning text. Optionally the format may contain -# $version, which will be replaced by the version of the file (if it could -# be obtained via FILE_VERSION_FILTER) - -WARN_FORMAT = "$file:$line: $text" - -# The WARN_LOGFILE tag can be used to specify a file to which warning -# and error messages should be written. If left blank the output is written -# to stderr. - -WARN_LOGFILE = - -#--------------------------------------------------------------------------- -# configuration options related to the input files -#--------------------------------------------------------------------------- - -# The INPUT tag can be used to specify the files and/or directories that contain -# documented source files. You may enter file names like "myfile.cpp" or -# directories like "/usr/src/myproject". Separate the files or directories -# with spaces. - -INPUT = . - -# This tag can be used to specify the character encoding of the source files -# that doxygen parses. Internally doxygen uses the UTF-8 encoding, which is -# also the default input encoding. Doxygen uses libiconv (or the iconv built -# into libc) for the transcoding. See http://www.gnu.org/software/libiconv for -# the list of possible encodings. - -INPUT_ENCODING = UTF-8 - -# If the value of the INPUT tag contains directories, you can use the -# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp -# and *.h) to filter out the source-files in the directories. If left -# blank the following patterns are tested: -# *.c *.cc *.cxx *.cpp *.c++ *.d *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh -# *.hxx *.hpp *.h++ *.idl *.odl *.cs *.php *.php3 *.inc *.m *.mm *.dox *.py -# *.f90 *.f *.for *.vhd *.vhdl - -FILE_PATTERNS = *.[ch] - -# The RECURSIVE tag can be used to turn specify whether or not subdirectories -# should be searched for input files as well. Possible values are YES and NO. -# If left blank NO is used. - -RECURSIVE = NO - -# The EXCLUDE tag can be used to specify files and/or directories that should -# excluded from the INPUT source files. This way you can easily exclude a -# subdirectory from a directory tree whose root is specified with the INPUT tag. - -EXCLUDE = - -# The EXCLUDE_SYMLINKS tag can be used select whether or not files or -# directories that are symbolic links (a Unix file system feature) are excluded -# from the input. - -EXCLUDE_SYMLINKS = NO - -# If the value of the INPUT tag contains directories, you can use the -# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude -# certain files from those directories. Note that the wildcards are matched -# against the file with absolute path, so to exclude all test directories -# for example use the pattern */test/* - -EXCLUDE_PATTERNS = - -# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names -# (namespaces, classes, functions, etc.) that should be excluded from the -# output. The symbol name can be a fully qualified name, a word, or if the -# wildcard * is used, a substring. Examples: ANamespace, AClass, -# AClass::ANamespace, ANamespace::*Test - -EXCLUDE_SYMBOLS = - -# The EXAMPLE_PATH tag can be used to specify one or more files or -# directories that contain example code fragments that are included (see -# the \include command). - -EXAMPLE_PATH = - -# If the value of the EXAMPLE_PATH tag contains directories, you can use the -# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp -# and *.h) to filter out the source-files in the directories. If left -# blank all files are included. - -EXAMPLE_PATTERNS = - -# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be -# searched for input files to be used with the \include or \dontinclude -# commands irrespective of the value of the RECURSIVE tag. -# Possible values are YES and NO. If left blank NO is used. - -EXAMPLE_RECURSIVE = NO - -# The IMAGE_PATH tag can be used to specify one or more files or -# directories that contain image that are included in the documentation (see -# the \image command). - -IMAGE_PATH = - -# The INPUT_FILTER tag can be used to specify a program that doxygen should -# invoke to filter for each input file. Doxygen will invoke the filter program -# by executing (via popen()) the command , where -# is the value of the INPUT_FILTER tag, and is the name of an -# input file. Doxygen will then use the output that the filter program writes -# to standard output. -# If FILTER_PATTERNS is specified, this tag will be -# ignored. - -INPUT_FILTER = - -# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern -# basis. -# Doxygen will compare the file name with each pattern and apply the -# filter if there is a match. -# The filters are a list of the form: -# pattern=filter (like *.cpp=my_cpp_filter). See INPUT_FILTER for further -# info on how filters are used. If FILTER_PATTERNS is empty or if -# non of the patterns match the file name, INPUT_FILTER is applied. - -FILTER_PATTERNS = - -# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using -# INPUT_FILTER) will be used to filter the input files when producing source -# files to browse (i.e. when SOURCE_BROWSER is set to YES). - -FILTER_SOURCE_FILES = NO - -# The FILTER_SOURCE_PATTERNS tag can be used to specify source filters per file -# pattern. A pattern will override the setting for FILTER_PATTERN (if any) -# and it is also possible to disable source filtering for a specific pattern -# using *.ext= (so without naming a filter). This option only has effect when -# FILTER_SOURCE_FILES is enabled. - -FILTER_SOURCE_PATTERNS = - -#--------------------------------------------------------------------------- -# configuration options related to source browsing -#--------------------------------------------------------------------------- - -# If the SOURCE_BROWSER tag is set to YES then a list of source files will -# be generated. Documented entities will be cross-referenced with these sources. -# Note: To get rid of all source code in the generated output, make sure also -# VERBATIM_HEADERS is set to NO. - -SOURCE_BROWSER = YES - -# Setting the INLINE_SOURCES tag to YES will include the body -# of functions and classes directly in the documentation. - -INLINE_SOURCES = NO - -# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct -# doxygen to hide any special comment blocks from generated source code -# fragments. Normal C and C++ comments will always remain visible. - -STRIP_CODE_COMMENTS = NO - -# If the REFERENCED_BY_RELATION tag is set to YES -# then for each documented function all documented -# functions referencing it will be listed. - -REFERENCED_BY_RELATION = YES - -# If the REFERENCES_RELATION tag is set to YES -# then for each documented function all documented entities -# called/used by that function will be listed. - -REFERENCES_RELATION = YES - -# If the REFERENCES_LINK_SOURCE tag is set to YES (the default) -# and SOURCE_BROWSER tag is set to YES, then the hyperlinks from -# functions in REFERENCES_RELATION and REFERENCED_BY_RELATION lists will -# link to the source code. -# Otherwise they will link to the documentation. - -REFERENCES_LINK_SOURCE = YES - -# If the USE_HTAGS tag is set to YES then the references to source code -# will point to the HTML generated by the htags(1) tool instead of doxygen -# built-in source browser. The htags tool is part of GNU's global source -# tagging system (see http://www.gnu.org/software/global/global.html). You -# will need version 4.8.6 or higher. - -USE_HTAGS = NO - -# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen -# will generate a verbatim copy of the header file for each class for -# which an include is specified. Set to NO to disable this. - -VERBATIM_HEADERS = YES - -#--------------------------------------------------------------------------- -# configuration options related to the alphabetical class index -#--------------------------------------------------------------------------- - -# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index -# of all compounds will be generated. Enable this if the project -# contains a lot of classes, structs, unions or interfaces. - -ALPHABETICAL_INDEX = YES - -# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then -# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns -# in which this list will be split (can be a number in the range [1..20]) - -COLS_IN_ALPHA_INDEX = 5 - -# In case all classes in a project start with a common prefix, all -# classes will be put under the same header in the alphabetical index. -# The IGNORE_PREFIX tag can be used to specify one or more prefixes that -# should be ignored while generating the index headers. - -IGNORE_PREFIX = - -#--------------------------------------------------------------------------- -# configuration options related to the HTML output -#--------------------------------------------------------------------------- - -# If the GENERATE_HTML tag is set to YES (the default) Doxygen will -# generate HTML output. - -GENERATE_HTML = YES - -# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `html' will be used as the default path. - -HTML_OUTPUT = html - -# The HTML_FILE_EXTENSION tag can be used to specify the file extension for -# each generated HTML page (for example: .htm,.php,.asp). If it is left blank -# doxygen will generate files with .html extension. - -HTML_FILE_EXTENSION = .html - -# The HTML_HEADER tag can be used to specify a personal HTML header for -# each generated HTML page. If it is left blank doxygen will generate a -# standard header. - -HTML_HEADER = - -# The HTML_FOOTER tag can be used to specify a personal HTML footer for -# each generated HTML page. If it is left blank doxygen will generate a -# standard footer. - -HTML_FOOTER = - -# The HTML_STYLESHEET tag can be used to specify a user-defined cascading -# style sheet that is used by each HTML page. It can be used to -# fine-tune the look of the HTML output. If the tag is left blank doxygen -# will generate a default style sheet. Note that doxygen will try to copy -# the style sheet file to the HTML output directory, so don't put your own -# stylesheet in the HTML output directory as well, or it will be erased! - -HTML_STYLESHEET = - -# The HTML_COLORSTYLE_HUE tag controls the color of the HTML output. -# Doxygen will adjust the colors in the stylesheet and background images -# according to this color. Hue is specified as an angle on a colorwheel, -# see http://en.wikipedia.org/wiki/Hue for more information. -# For instance the value 0 represents red, 60 is yellow, 120 is green, -# 180 is cyan, 240 is blue, 300 purple, and 360 is red again. -# The allowed range is 0 to 359. - -HTML_COLORSTYLE_HUE = 220 - -# The HTML_COLORSTYLE_SAT tag controls the purity (or saturation) of -# the colors in the HTML output. For a value of 0 the output will use -# grayscales only. A value of 255 will produce the most vivid colors. - -HTML_COLORSTYLE_SAT = 100 - -# The HTML_COLORSTYLE_GAMMA tag controls the gamma correction applied to -# the luminance component of the colors in the HTML output. Values below -# 100 gradually make the output lighter, whereas values above 100 make -# the output darker. The value divided by 100 is the actual gamma applied, -# so 80 represents a gamma of 0.8, The value 220 represents a gamma of 2.2, -# and 100 does not change the gamma. - -HTML_COLORSTYLE_GAMMA = 80 - -# If the HTML_TIMESTAMP tag is set to YES then the footer of each generated HTML -# page will contain the date and time when the page was generated. Setting -# this to NO can help when comparing the output of multiple runs. - -HTML_TIMESTAMP = YES - -# If the HTML_ALIGN_MEMBERS tag is set to YES, the members of classes, -# files or namespaces will be aligned in HTML using tables. If set to -# NO a bullet list will be used. - -HTML_ALIGN_MEMBERS = YES - -# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML -# documentation will contain sections that can be hidden and shown after the -# page has loaded. For this to work a browser that supports -# JavaScript and DHTML is required (for instance Mozilla 1.0+, Firefox -# Netscape 6.0+, Internet explorer 5.0+, Konqueror, or Safari). - -HTML_DYNAMIC_SECTIONS = NO - -# If the GENERATE_DOCSET tag is set to YES, additional index files -# will be generated that can be used as input for Apple's Xcode 3 -# integrated development environment, introduced with OSX 10.5 (Leopard). -# To create a documentation set, doxygen will generate a Makefile in the -# HTML output directory. Running make will produce the docset in that -# directory and running "make install" will install the docset in -# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find -# it at startup. -# See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html -# for more information. - -GENERATE_DOCSET = NO - -# When GENERATE_DOCSET tag is set to YES, this tag determines the name of the -# feed. A documentation feed provides an umbrella under which multiple -# documentation sets from a single provider (such as a company or product suite) -# can be grouped. - -DOCSET_FEEDNAME = "Doxygen generated docs" - -# When GENERATE_DOCSET tag is set to YES, this tag specifies a string that -# should uniquely identify the documentation set bundle. This should be a -# reverse domain-name style string, e.g. com.mycompany.MyDocSet. Doxygen -# will append .docset to the name. - -DOCSET_BUNDLE_ID = org.doxygen.Project - -# When GENERATE_PUBLISHER_ID tag specifies a string that should uniquely identify -# the documentation publisher. This should be a reverse domain-name style -# string, e.g. com.mycompany.MyDocSet.documentation. - -DOCSET_PUBLISHER_ID = org.doxygen.Publisher - -# The GENERATE_PUBLISHER_NAME tag identifies the documentation publisher. - -DOCSET_PUBLISHER_NAME = Publisher - -# If the GENERATE_HTMLHELP tag is set to YES, additional index files -# will be generated that can be used as input for tools like the -# Microsoft HTML help workshop to generate a compiled HTML help file (.chm) -# of the generated HTML documentation. - -GENERATE_HTMLHELP = NO - -# If the GENERATE_HTMLHELP tag is set to YES, the CHM_FILE tag can -# be used to specify the file name of the resulting .chm file. You -# can add a path in front of the file if the result should not be -# written to the html output directory. - -CHM_FILE = - -# If the GENERATE_HTMLHELP tag is set to YES, the HHC_LOCATION tag can -# be used to specify the location (absolute path including file name) of -# the HTML help compiler (hhc.exe). If non-empty doxygen will try to run -# the HTML help compiler on the generated index.hhp. - -HHC_LOCATION = - -# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag -# controls if a separate .chi index file is generated (YES) or that -# it should be included in the master .chm file (NO). - -GENERATE_CHI = NO - -# If the GENERATE_HTMLHELP tag is set to YES, the CHM_INDEX_ENCODING -# is used to encode HtmlHelp index (hhk), content (hhc) and project file -# content. - -CHM_INDEX_ENCODING = - -# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag -# controls whether a binary table of contents is generated (YES) or a -# normal table of contents (NO) in the .chm file. - -BINARY_TOC = NO - -# The TOC_EXPAND flag can be set to YES to add extra items for group members -# to the contents of the HTML help documentation and to the tree view. - -TOC_EXPAND = NO - -# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and -# QHP_VIRTUAL_FOLDER are set, an additional index file will be generated -# that can be used as input for Qt's qhelpgenerator to generate a -# Qt Compressed Help (.qch) of the generated HTML documentation. - -GENERATE_QHP = NO - -# If the QHG_LOCATION tag is specified, the QCH_FILE tag can -# be used to specify the file name of the resulting .qch file. -# The path specified is relative to the HTML output folder. - -QCH_FILE = - -# The QHP_NAMESPACE tag specifies the namespace to use when generating -# Qt Help Project output. For more information please see -# http://doc.trolltech.com/qthelpproject.html#namespace - -QHP_NAMESPACE = - -# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating -# Qt Help Project output. For more information please see -# http://doc.trolltech.com/qthelpproject.html#virtual-folders - -QHP_VIRTUAL_FOLDER = doc - -# If QHP_CUST_FILTER_NAME is set, it specifies the name of a custom filter to -# add. For more information please see -# http://doc.trolltech.com/qthelpproject.html#custom-filters - -QHP_CUST_FILTER_NAME = - -# The QHP_CUST_FILT_ATTRS tag specifies the list of the attributes of the -# custom filter to add. For more information please see -# -# Qt Help Project / Custom Filters. - -QHP_CUST_FILTER_ATTRS = - -# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this -# project's -# filter section matches. -# -# Qt Help Project / Filter Attributes. - -QHP_SECT_FILTER_ATTRS = - -# If the GENERATE_QHP tag is set to YES, the QHG_LOCATION tag can -# be used to specify the location of Qt's qhelpgenerator. -# If non-empty doxygen will try to run qhelpgenerator on the generated -# .qhp file. - -QHG_LOCATION = - -# If the GENERATE_ECLIPSEHELP tag is set to YES, additional index files -# will be generated, which together with the HTML files, form an Eclipse help -# plugin. To install this plugin and make it available under the help contents -# menu in Eclipse, the contents of the directory containing the HTML and XML -# files needs to be copied into the plugins directory of eclipse. The name of -# the directory within the plugins directory should be the same as -# the ECLIPSE_DOC_ID value. After copying Eclipse needs to be restarted before -# the help appears. - -GENERATE_ECLIPSEHELP = NO - -# A unique identifier for the eclipse help plugin. When installing the plugin -# the directory name containing the HTML and XML files should also have -# this name. - -ECLIPSE_DOC_ID = org.doxygen.Project - -# The DISABLE_INDEX tag can be used to turn on/off the condensed index at -# top of each HTML page. The value NO (the default) enables the index and -# the value YES disables it. - -DISABLE_INDEX = NO - -# This tag can be used to set the number of enum values (range [0,1..20]) -# that doxygen will group on one line in the generated HTML documentation. -# Note that a value of 0 will completely suppress the enum values from appearing in the overview section. - -ENUM_VALUES_PER_LINE = 4 - -# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index -# structure should be generated to display hierarchical information. -# If the tag value is set to YES, a side panel will be generated -# containing a tree-like index structure (just like the one that -# is generated for HTML Help). For this to work a browser that supports -# JavaScript, DHTML, CSS and frames is required (i.e. any modern browser). -# Windows users are probably better off using the HTML help feature. - -GENERATE_TREEVIEW = NO - -# By enabling USE_INLINE_TREES, doxygen will generate the Groups, Directories, -# and Class Hierarchy pages using a tree view instead of an ordered list. - -USE_INLINE_TREES = NO - -# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be -# used to set the initial width (in pixels) of the frame in which the tree -# is shown. - -TREEVIEW_WIDTH = 250 - -# When the EXT_LINKS_IN_WINDOW option is set to YES doxygen will open -# links to external symbols imported via tag files in a separate window. - -EXT_LINKS_IN_WINDOW = NO - -# Use this tag to change the font size of Latex formulas included -# as images in the HTML documentation. The default is 10. Note that -# when you change the font size after a successful doxygen run you need -# to manually remove any form_*.png images from the HTML output directory -# to force them to be regenerated. - -FORMULA_FONTSIZE = 10 - -# Use the FORMULA_TRANPARENT tag to determine whether or not the images -# generated for formulas are transparent PNGs. Transparent PNGs are -# not supported properly for IE 6.0, but are supported on all modern browsers. -# Note that when changing this option you need to delete any form_*.png files -# in the HTML output before the changes have effect. - -FORMULA_TRANSPARENT = YES - -# Enable the USE_MATHJAX option to render LaTeX formulas using MathJax -# (see http://www.mathjax.org) which uses client side Javascript for the -# rendering instead of using prerendered bitmaps. Use this if you do not -# have LaTeX installed or if you want to formulas look prettier in the HTML -# output. When enabled you also need to install MathJax separately and -# configure the path to it using the MATHJAX_RELPATH option. - -USE_MATHJAX = NO - -# When MathJax is enabled you need to specify the location relative to the -# HTML output directory using the MATHJAX_RELPATH option. The destination -# directory should contain the MathJax.js script. For instance, if the mathjax -# directory is located at the same level as the HTML output directory, then -# MATHJAX_RELPATH should be ../mathjax. The default value points to the mathjax.org site, so you can quickly see the result without installing -# MathJax, but it is strongly recommended to install a local copy of MathJax -# before deployment. - -MATHJAX_RELPATH = http://www.mathjax.org/mathjax - -# When the SEARCHENGINE tag is enabled doxygen will generate a search box -# for the HTML output. The underlying search engine uses javascript -# and DHTML and should work on any modern browser. Note that when using -# HTML help (GENERATE_HTMLHELP), Qt help (GENERATE_QHP), or docsets -# (GENERATE_DOCSET) there is already a search function so this one should -# typically be disabled. For large projects the javascript based search engine -# can be slow, then enabling SERVER_BASED_SEARCH may provide a better solution. - -SEARCHENGINE = YES - -# When the SERVER_BASED_SEARCH tag is enabled the search engine will be -# implemented using a PHP enabled web server instead of at the web client -# using Javascript. Doxygen will generate the search PHP script and index -# file to put on the web server. The advantage of the server -# based approach is that it scales better to large projects and allows -# full text search. The disadvantages are that it is more difficult to setup -# and does not have live searching capabilities. - -SERVER_BASED_SEARCH = NO - -#--------------------------------------------------------------------------- -# configuration options related to the LaTeX output -#--------------------------------------------------------------------------- - -# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will -# generate Latex output. - -GENERATE_LATEX = YES - -# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `latex' will be used as the default path. - -LATEX_OUTPUT = latex - -# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be -# invoked. If left blank `latex' will be used as the default command name. -# Note that when enabling USE_PDFLATEX this option is only used for -# generating bitmaps for formulas in the HTML output, but not in the -# Makefile that is written to the output directory. - -LATEX_CMD_NAME = latex - -# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to -# generate index for LaTeX. If left blank `makeindex' will be used as the -# default command name. - -MAKEINDEX_CMD_NAME = makeindex - -# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact -# LaTeX documents. This may be useful for small projects and may help to -# save some trees in general. - -COMPACT_LATEX = YES - -# The PAPER_TYPE tag can be used to set the paper type that is used -# by the printer. Possible values are: a4, letter, legal and -# executive. If left blank a4wide will be used. - -PAPER_TYPE = letter - -# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX -# packages that should be included in the LaTeX output. - -EXTRA_PACKAGES = - -# The LATEX_HEADER tag can be used to specify a personal LaTeX header for -# the generated latex document. The header should contain everything until -# the first chapter. If it is left blank doxygen will generate a -# standard header. Notice: only use this tag if you know what you are doing! - -LATEX_HEADER = - -# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated -# is prepared for conversion to pdf (using ps2pdf). The pdf file will -# contain links (just like the HTML output) instead of page references -# This makes the output suitable for online browsing using a pdf viewer. - -PDF_HYPERLINKS = YES - -# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of -# plain latex in the generated Makefile. Set this option to YES to get a -# higher quality PDF documentation. - -USE_PDFLATEX = YES - -# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode. -# command to the generated LaTeX files. This will instruct LaTeX to keep -# running if errors occur, instead of asking the user for help. -# This option is also used when generating formulas in HTML. - -LATEX_BATCHMODE = YES - -# If LATEX_HIDE_INDICES is set to YES then doxygen will not -# include the index chapters (such as File Index, Compound Index, etc.) -# in the output. - -LATEX_HIDE_INDICES = YES - -# If LATEX_SOURCE_CODE is set to YES then doxygen will include -# source code with syntax highlighting in the LaTeX output. -# Note that which sources are shown also depends on other settings -# such as SOURCE_BROWSER. - -LATEX_SOURCE_CODE = NO - -#--------------------------------------------------------------------------- -# configuration options related to the RTF output -#--------------------------------------------------------------------------- - -# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output -# The RTF output is optimized for Word 97 and may not look very pretty with -# other RTF readers or editors. - -GENERATE_RTF = NO - -# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `rtf' will be used as the default path. - -RTF_OUTPUT = rtf - -# If the COMPACT_RTF tag is set to YES Doxygen generates more compact -# RTF documents. This may be useful for small projects and may help to -# save some trees in general. - -COMPACT_RTF = NO - -# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated -# will contain hyperlink fields. The RTF file will -# contain links (just like the HTML output) instead of page references. -# This makes the output suitable for online browsing using WORD or other -# programs which support those fields. -# Note: wordpad (write) and others do not support links. - -RTF_HYPERLINKS = NO - -# Load stylesheet definitions from file. Syntax is similar to doxygen's -# config file, i.e. a series of assignments. You only have to provide -# replacements, missing definitions are set to their default value. - -RTF_STYLESHEET_FILE = - -# Set optional variables used in the generation of an rtf document. -# Syntax is similar to doxygen's config file. - -RTF_EXTENSIONS_FILE = - -#--------------------------------------------------------------------------- -# configuration options related to the man page output -#--------------------------------------------------------------------------- - -# If the GENERATE_MAN tag is set to YES (the default) Doxygen will -# generate man pages - -GENERATE_MAN = NO - -# The MAN_OUTPUT tag is used to specify where the man pages will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `man' will be used as the default path. - -MAN_OUTPUT = man - -# The MAN_EXTENSION tag determines the extension that is added to -# the generated man pages (default is the subroutine's section .3) - -MAN_EXTENSION = .3 - -# If the MAN_LINKS tag is set to YES and Doxygen generates man output, -# then it will generate one additional man file for each entity -# documented in the real man page(s). These additional files -# only source the real man page, but without them the man command -# would be unable to find the correct page. The default is NO. - -MAN_LINKS = NO - -#--------------------------------------------------------------------------- -# configuration options related to the XML output -#--------------------------------------------------------------------------- - -# If the GENERATE_XML tag is set to YES Doxygen will -# generate an XML file that captures the structure of -# the code including all documentation. - -GENERATE_XML = NO - -# The XML_OUTPUT tag is used to specify where the XML pages will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `xml' will be used as the default path. - -XML_OUTPUT = xml - -# The XML_SCHEMA tag can be used to specify an XML schema, -# which can be used by a validating XML parser to check the -# syntax of the XML files. - -XML_SCHEMA = - -# The XML_DTD tag can be used to specify an XML DTD, -# which can be used by a validating XML parser to check the -# syntax of the XML files. - -XML_DTD = - -# If the XML_PROGRAMLISTING tag is set to YES Doxygen will -# dump the program listings (including syntax highlighting -# and cross-referencing information) to the XML output. Note that -# enabling this will significantly increase the size of the XML output. - -XML_PROGRAMLISTING = YES - -#--------------------------------------------------------------------------- -# configuration options for the AutoGen Definitions output -#--------------------------------------------------------------------------- - -# If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will -# generate an AutoGen Definitions (see autogen.sf.net) file -# that captures the structure of the code including all -# documentation. Note that this feature is still experimental -# and incomplete at the moment. - -GENERATE_AUTOGEN_DEF = NO - -#--------------------------------------------------------------------------- -# configuration options related to the Perl module output -#--------------------------------------------------------------------------- - -# If the GENERATE_PERLMOD tag is set to YES Doxygen will -# generate a Perl module file that captures the structure of -# the code including all documentation. Note that this -# feature is still experimental and incomplete at the -# moment. - -GENERATE_PERLMOD = NO - -# If the PERLMOD_LATEX tag is set to YES Doxygen will generate -# the necessary Makefile rules, Perl scripts and LaTeX code to be able -# to generate PDF and DVI output from the Perl module output. - -PERLMOD_LATEX = NO - -# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be -# nicely formatted so it can be parsed by a human reader. -# This is useful -# if you want to understand what is going on. -# On the other hand, if this -# tag is set to NO the size of the Perl module output will be much smaller -# and Perl will parse it just the same. - -PERLMOD_PRETTY = YES - -# The names of the make variables in the generated doxyrules.make file -# are prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. -# This is useful so different doxyrules.make files included by the same -# Makefile don't overwrite each other's variables. - -PERLMOD_MAKEVAR_PREFIX = - -#--------------------------------------------------------------------------- -# Configuration options related to the preprocessor -#--------------------------------------------------------------------------- - -# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will -# evaluate all C-preprocessor directives found in the sources and include -# files. - -ENABLE_PREPROCESSING = YES - -# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro -# names in the source code. If set to NO (the default) only conditional -# compilation will be performed. Macro expansion can be done in a controlled -# way by setting EXPAND_ONLY_PREDEF to YES. - -MACRO_EXPANSION = YES - -# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES -# then the macro expansion is limited to the macros specified with the -# PREDEFINED and EXPAND_AS_DEFINED tags. - -EXPAND_ONLY_PREDEF = NO - -# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files -# in the INCLUDE_PATH (see below) will be search if a #include is found. - -SEARCH_INCLUDES = YES - -# The INCLUDE_PATH tag can be used to specify one or more directories that -# contain include files that are not input files but should be processed by -# the preprocessor. - -INCLUDE_PATH = - -# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard -# patterns (like *.h and *.hpp) to filter out the header-files in the -# directories. If left blank, the patterns specified with FILE_PATTERNS will -# be used. - -INCLUDE_FILE_PATTERNS = - -# The PREDEFINED tag can be used to specify one or more macro names that -# are defined before the preprocessor is started (similar to the -D option of -# gcc). The argument of the tag is a list of macros of the form: name -# or name=definition (no spaces). If the definition and the = are -# omitted =1 is assumed. To prevent a macro definition from being -# undefined via #undef or recursively expanded use the := operator -# instead of the = operator. - -PREDEFINED = DOXYGEN_GETS_HOPELESSLY_CONFUSED_BY_THIS_SECTION - -# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then -# this tag can be used to specify a list of macro names that should be expanded. -# The macro definition that is found in the sources will be used. -# Use the PREDEFINED tag if you want to use a different macro definition that overrules the definition found in the source code. - -EXPAND_AS_DEFINED = - -# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then -# doxygen's preprocessor will remove all references to function-like macros -# that are alone on a line, have an all uppercase name, and do not end with a -# semicolon, because these will confuse the parser if not removed. - -SKIP_FUNCTION_MACROS = YES - -#--------------------------------------------------------------------------- -# Configuration::additions related to external references -#--------------------------------------------------------------------------- - -# The TAGFILES option can be used to specify one or more tagfiles. -# Optionally an initial location of the external documentation -# can be added for each tagfile. The format of a tag file without -# this location is as follows: -# -# TAGFILES = file1 file2 ... -# Adding location for the tag files is done as follows: -# -# TAGFILES = file1=loc1 "file2 = loc2" ... -# where "loc1" and "loc2" can be relative or absolute paths or -# URLs. If a location is present for each tag, the installdox tool -# does not have to be run to correct the links. -# Note that each tag file must have a unique name -# (where the name does NOT include the path) -# If a tag file is not located in the directory in which doxygen -# is run, you must also specify the path to the tagfile here. - -TAGFILES = - -# When a file name is specified after GENERATE_TAGFILE, doxygen will create -# a tag file that is based on the input files it reads. - -GENERATE_TAGFILE = - -# If the ALLEXTERNALS tag is set to YES all external classes will be listed -# in the class index. If set to NO only the inherited external classes -# will be listed. - -ALLEXTERNALS = NO - -# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed -# in the modules index. If set to NO, only the current project's groups will -# be listed. - -EXTERNAL_GROUPS = YES - -# The PERL_PATH should be the absolute path and name of the perl script -# interpreter (i.e. the result of `which perl'). - -PERL_PATH = /usr/bin/perl - -#--------------------------------------------------------------------------- -# Configuration options related to the dot tool -#--------------------------------------------------------------------------- - -# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will -# generate a inheritance diagram (in HTML, RTF and LaTeX) for classes with base -# or super classes. Setting the tag to NO turns the diagrams off. Note that -# this option also works with HAVE_DOT disabled, but it is recommended to -# install and use dot, since it yields more powerful graphs. - -CLASS_DIAGRAMS = YES - -# You can define message sequence charts within doxygen comments using the \msc -# command. Doxygen will then run the mscgen tool (see -# http://www.mcternan.me.uk/mscgen/) to produce the chart and insert it in the -# documentation. The MSCGEN_PATH tag allows you to specify the directory where -# the mscgen tool resides. If left empty the tool is assumed to be found in the -# default search path. - -MSCGEN_PATH = - -# If set to YES, the inheritance and collaboration graphs will hide -# inheritance and usage relations if the target is undocumented -# or is not a class. - -HIDE_UNDOC_RELATIONS = NO - -# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is -# available from the path. This tool is part of Graphviz, a graph visualization -# toolkit from AT&T and Lucent Bell Labs. The other options in this section -# have no effect if this option is set to NO (the default) - -HAVE_DOT = YES - -# The DOT_NUM_THREADS specifies the number of dot invocations doxygen is -# allowed to run in parallel. When set to 0 (the default) doxygen will -# base this on the number of processors available in the system. You can set it -# explicitly to a value larger than 0 to get control over the balance -# between CPU load and processing speed. - -DOT_NUM_THREADS = 0 - -# By default doxygen will write a font called Helvetica to the output -# directory and reference it in all dot files that doxygen generates. -# When you want a differently looking font you can specify the font name -# using DOT_FONTNAME. You need to make sure dot is able to find the font, -# which can be done by putting it in a standard location or by setting the -# DOTFONTPATH environment variable or by setting DOT_FONTPATH to the directory -# containing the font. - -DOT_FONTNAME = FreeSans - -# The DOT_FONTSIZE tag can be used to set the size of the font of dot graphs. -# The default size is 10pt. - -DOT_FONTSIZE = 10 - -# By default doxygen will tell dot to use the output directory to look for the -# FreeSans.ttf font (which doxygen will put there itself). If you specify a -# different font using DOT_FONTNAME you can set the path where dot -# can find it using this tag. - -DOT_FONTPATH = - -# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for each documented class showing the direct and -# indirect inheritance relations. Setting this tag to YES will force the -# the CLASS_DIAGRAMS tag to NO. - -CLASS_GRAPH = YES - -# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for each documented class showing the direct and -# indirect implementation dependencies (inheritance, containment, and -# class references variables) of the class with other documented classes. - -COLLABORATION_GRAPH = YES - -# If the GROUP_GRAPHS and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for groups, showing the direct groups dependencies - -GROUP_GRAPHS = YES - -# If the UML_LOOK tag is set to YES doxygen will generate inheritance and -# collaboration diagrams in a style similar to the OMG's Unified Modeling -# Language. - -UML_LOOK = YES - -# If set to YES, the inheritance and collaboration graphs will show the -# relations between templates and their instances. - -TEMPLATE_RELATIONS = NO - -# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT -# tags are set to YES then doxygen will generate a graph for each documented -# file showing the direct and indirect include dependencies of the file with -# other documented files. - -INCLUDE_GRAPH = NO - -# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and -# HAVE_DOT tags are set to YES then doxygen will generate a graph for each -# documented header file showing the documented files that directly or -# indirectly include this file. - -INCLUDED_BY_GRAPH = YES - -# If the CALL_GRAPH and HAVE_DOT options are set to YES then -# doxygen will generate a call dependency graph for every global function -# or class method. Note that enabling this option will significantly increase -# the time of a run. So in most cases it will be better to enable call graphs -# for selected functions only using the \callgraph command. - -CALL_GRAPH = YES - -# If the CALLER_GRAPH and HAVE_DOT tags are set to YES then -# doxygen will generate a caller dependency graph for every global function -# or class method. Note that enabling this option will significantly increase -# the time of a run. So in most cases it will be better to enable caller -# graphs for selected functions only using the \callergraph command. - -CALLER_GRAPH = YES - -# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen -# will generate a graphical hierarchy of all classes instead of a textual one. - -GRAPHICAL_HIERARCHY = YES - -# If the DIRECTORY_GRAPH, SHOW_DIRECTORIES and HAVE_DOT tags are set to YES -# then doxygen will show the dependencies a directory has on other directories -# in a graphical way. The dependency relations are determined by the #include -# relations between the files in the directories. - -DIRECTORY_GRAPH = YES - -# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images -# generated by dot. Possible values are png, svg, gif or svg. -# If left blank png will be used. - -DOT_IMAGE_FORMAT = png - -# The tag DOT_PATH can be used to specify the path where the dot tool can be -# found. If left blank, it is assumed the dot tool can be found in the path. - -DOT_PATH = - -# The DOTFILE_DIRS tag can be used to specify one or more directories that -# contain dot files that are included in the documentation (see the -# \dotfile command). - -DOTFILE_DIRS = - -# The MSCFILE_DIRS tag can be used to specify one or more directories that -# contain msc files that are included in the documentation (see the -# \mscfile command). - -MSCFILE_DIRS = - -# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of -# nodes that will be shown in the graph. If the number of nodes in a graph -# becomes larger than this value, doxygen will truncate the graph, which is -# visualized by representing a node as a red box. Note that doxygen if the -# number of direct children of the root node in a graph is already larger than -# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note -# that the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH. - -DOT_GRAPH_MAX_NODES = 50 - -# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the -# graphs generated by dot. A depth value of 3 means that only nodes reachable -# from the root by following a path via at most 3 edges will be shown. Nodes -# that lay further from the root node will be omitted. Note that setting this -# option to 1 or 2 may greatly reduce the computation time needed for large -# code bases. Also note that the size of a graph can be further restricted by -# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction. - -MAX_DOT_GRAPH_DEPTH = 0 - -# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent -# background. This is disabled by default, because dot on Windows does not -# seem to support this out of the box. Warning: Depending on the platform used, -# enabling this option may lead to badly anti-aliased labels on the edges of -# a graph (i.e. they become hard to read). - -DOT_TRANSPARENT = NO - -# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output -# files in one run (i.e. multiple -o and -T options on the command line). This -# makes dot run faster, but since only newer versions of dot (>1.8.10) -# support this, this feature is disabled by default. - -DOT_MULTI_TARGETS = NO - -# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will -# generate a legend page explaining the meaning of the various boxes and -# arrows in the dot generated graphs. - -GENERATE_LEGEND = YES - -# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will -# remove the intermediate dot files that are used to generate -# the various graphs. - -DOT_CLEANUP = YES diff --git a/rcynic/Makefile.in b/rcynic/Makefile.in deleted file mode 100644 index 06f7d9cd..00000000 --- a/rcynic/Makefile.in +++ /dev/null @@ -1,237 +0,0 @@ -# $Id$ - -NAME = rcynic - -BIN = ${NAME} -SRC = ${NAME}.c -OBJ = ${NAME}.o - -GEN = defstack.h - -OBJS = ${OBJ} bio_f_linebreak.o - -CFLAGS = @CFLAGS@ -Wall -Wshadow -Wmissing-prototypes -Wmissing-declarations -Werror-implicit-function-declaration -LDFLAGS = @LDFLAGS@ @LD_STATIC_FLAG@ -LIBS = @LIBS@ - -AWK = @AWK@ -SORT = @SORT@ -PYTHON = @PYTHON@ -RRDTOOL = @RRDTOOL@ -INSTALL = @INSTALL@ - -abs_top_srcdir = @abs_top_srcdir@ -abs_top_builddir = @abs_top_builddir@ - -prefix = @prefix@ -exec_prefix = @exec_prefix@ -datarootdir = @datarootdir@ -datadir = @datadir@ -localstatedir = @localstatedir@ -sharedstatedir = @sharedstatedir@ -sysconfdir = @sysconfdir@ -bindir = @bindir@ -sbindir = @sbindir@ -libexecdir = @libexecdir@ -sysconfdir = @sysconfdir@ - -abs_builddir = @abs_builddir@ -abs_top_srcdir = @abs_top_srcdir@ -abs_top_builddir = @abs_top_builddir@ -srcdir = @srcdir@ - -RCYNIC_BIN_RCYNIC = @RCYNIC_BIN_RCYNIC@ -RCYNIC_CONF_DATA = @RCYNIC_CONF_DATA@ -RCYNIC_CONF_FILE = @RCYNIC_CONF_FILE@ -RCYNIC_CONF_RSYNC = @RCYNIC_CONF_RSYNC@ -RCYNIC_CONF_TA_DIR = @RCYNIC_CONF_TA_DIR@ -RCYNIC_CRON_USER = @RCYNIC_CRON_USER@ -RCYNIC_DATA_DIR = ${RCYNIC_DIR}/data -RCYNIC_DIR = @RCYNIC_DIR@ -RCYNIC_DIRS = ${RCYNIC_TA_DIR} ${RCYNIC_JAIL_DIRS} ${RCYNIC_DATA_DIR} ${RPKIRTR_DIR} ${RPKIRTR_DIR}/sockets -RCYNIC_GECOS = RPKI Validation System -RCYNIC_GROUP = rcynic -RCYNIC_HTML_DIR = @RCYNIC_HTML_DIR@ -RCYNIC_INSTALL_TARGETS = @RCYNIC_INSTALL_TARGETS@ -RCYNIC_JAIL_DIRS = @RCYNIC_JAIL_DIRS@ -RCYNIC_STATIC_RSYNC = @RCYNIC_STATIC_RSYNC@ -RCYNIC_TA_DIR = @RCYNIC_TA_DIR@ -RCYNIC_USER = rcynic -RPKIRTR_DIR = ${RCYNIC_DIR}/rpki-rtr -RPKIRTR_GECOS = RPKI router server -RPKIRTR_GROUP = rpkirtr -RPKIRTR_MODE = 775 -RPKIRTR_USER = rpkirtr - -SCRIPTS = rcynic-text rcynic-html rcynic-svn validation_status rcynic-cron - -all: ${BIN} ${SCRIPTS} ${RCYNIC_STATIC_RSYNC} - -clean: - if test -r static-rsync/Makefile; then cd static-rsync; ${MAKE} $@; fi - rm -f ${BIN} ${OBJS} ${SCRIPTS} - -${OBJ}: ${SRC} ${GEN} - -${BIN}: ${OBJS} - ${CC} ${CFLAGS} -o $@ ${OBJS} ${LDFLAGS} ${LIBS} - -${GEN}: ${SRC} - ${PYTHON} ${abs_top_srcdir}/buildtools/defstack.py ${SRC} >$@.tmp - mv $@.tmp $@ - -COMPILE_PYTHON = \ - AC_PYTHON_INTERPRETER='${PYTHON}' \ - AC_RRDTOOL_BINARY='${RRDTOOL}' \ - ${PYTHON} ${abs_top_srcdir}/buildtools/make-rcynic-script.py <$? >$@; \ - chmod 755 $@ - -COMPILE_PYTHON_CRON = \ - AC_PYTHON_INTERPRETER='${PYTHON}' \ - AC_RCYNIC_USER='${RCYNIC_USER}' \ - AC_RCYNIC_DIR='${RCYNIC_DIR}' \ - AC_bindir='${bindir}' \ - AC_sbindir='${sbindir}' \ - AC_sysconfdir='${sysconfdir}' \ - AC_libexecdir='${libexecdir}' \ - AC_RCYNIC_HTML_DIR='${RCYNIC_HTML_DIR}' \ - ${PYTHON} ${abs_top_srcdir}/buildtools/make-rcynic-script.py <$? >$@; \ - chmod 755 $@ - -rcynic-text: rcynic-text.py - ${COMPILE_PYTHON} - -rcynic-html: rcynic-html.py - ${COMPILE_PYTHON} - -rcynic-svn: rcynic-svn.py - ${COMPILE_PYTHON} - -validation_status: validation_status.py - ${COMPILE_PYTHON} - -rcynic-cron: rcynic-cron.py - ${COMPILE_PYTHON_CRON} - -tags: TAGS - -TAGS: ${SRC} ${GEN} - etags ${SRC} ${GEN} - -test: ${BIN} - if test -r rcynic.conf; \ - then \ - ./${BIN} -j 0 && \ - test -r rcynic.xml && \ - echo && \ - ./rcynic-text rcynic.xml; \ - else \ - echo No rcynic.conf, skipping test; \ - fi - -uninstall deinstall: - @echo Sorry, automated deinstallation of rcynic is not implemented yet - -distclean: clean - if test -r static-rsync/Makefile; then cd static-rsync; ${MAKE} $@; fi - rm -f Makefile - -static-rsync/rsync: - @echo "Building static rsync for use in chroot jail" - cd static-rsync; ${MAKE} all - -install: all ${RCYNIC_INSTALL_TARGETS} - -install-always: \ - install-directories install-scripts install-rcynic install-rcynic-conf - -install-postconf: \ - install-user-and-group install-directory-ownership install-crontab - -install-jailed: \ - install-static-rsync install-shared-libraries install-rc-scripts - -install-directories: ${RCYNIC_DIRS} - -${RCYNIC_DIRS} ${DESTDIR}${bindir} ${DESTDIR}${sysconfdir}: - ${INSTALL} -v -d $@ - -install-directory-ownership: ${RCYNIC_DATA_DIR} ${RPKIRTR_DIR} ${RPKIRTR_DIR}/sockets - chown ${RCYNIC_USER}:${RCYNIC_GROUP} ${RCYNIC_DATA_DIR} ${RPKIRTR_DIR} - chown ${RPKIRTR_USER}:${RCYNIC_GROUP} ${RPKIRTR_DIR}/sockets - chmod ${RPKIRTR_MODE} ${RPKIRTR_DIR}/sockets - -install-rcynic-conf: ${RCYNIC_CONF_FILE} - -${RCYNIC_CONF_FILE}: - @echo - @echo Found no ${RCYNIC_CONF_FILE}, creating basic config and installing default trust anchor locators. - @echo You might want to edit this. - @echo - ${INSTALL} -v -d ${RCYNIC_TA_DIR} - ${INSTALL} -v -p -m 444 sample-trust-anchors/*.tal ${RCYNIC_TA_DIR} - @echo > $@.tmp '# Basic rcynic configuration file with default trust anchors.' - @echo >>$@.tmp '# See documentation for details.' - @echo >>$@.tmp '' - @echo >>$@.tmp '[rcynic]' - @echo >>$@.tmp 'rsync-program = ${RCYNIC_CONF_RSYNC}' - @echo >>$@.tmp 'authenticated = ${RCYNIC_CONF_DATA}/authenticated' - @echo >>$@.tmp 'unauthenticated = ${RCYNIC_CONF_DATA}/unauthenticated' - @echo >>$@.tmp 'xml-summary = ${RCYNIC_CONF_DATA}/rcynic.xml' - @echo >>$@.tmp 'jitter = 600' - @echo >>$@.tmp 'max-parallel-fetches = 8' - @echo >>$@.tmp 'use-syslog = true' - @echo >>$@.tmp 'log-level = log_usage_err' - @echo >>$@.tmp 'trust-anchor-directory = ${RCYNIC_CONF_TA_DIR}' - @chmod 444 $@.tmp - @mv -f $@.tmp $@ - -install-rcynic: ${RCYNIC_BIN_RCYNIC} - -${RCYNIC_BIN_RCYNIC}: ${BIN} - ${INSTALL} -p -m 555 ${BIN} $@ - -install-static-rsync: ${RCYNIC_DIR}/bin/rsync - -${RCYNIC_DIR}/bin/rsync: static-rsync/rsync - ${INSTALL} -p -m 555 static-rsync/rsync $@ - -install-scripts: \ - ${DESTDIR}${bindir} \ - ${DESTDIR}${bindir}/rcynic-text \ - ${DESTDIR}${bindir}/rcynic-html \ - ${DESTDIR}${bindir}/rcynic-svn \ - ${DESTDIR}${bindir}/rcynic-cron \ - ${DESTDIR}${bindir}/validation_status - -${DESTDIR}${bindir}/rcynic-text: rcynic-text - ${INSTALL} -p -m 555 rcynic-text $@ - -${DESTDIR}${bindir}/rcynic-html: rcynic-html - ${INSTALL} -p -m 555 rcynic-html $@ - -${DESTDIR}${bindir}/rcynic-svn: rcynic-svn - ${INSTALL} -p -m 555 rcynic-svn $@ - -${DESTDIR}${bindir}/rcynic-cron: rcynic-cron - ${INSTALL} -p -m 555 rcynic-cron $@ - -${DESTDIR}${bindir}/validation_status: validation_status - ${INSTALL} -p -m 555 validation_status $@ - -.FORCE: - -install-crontab: .FORCE - @if test "X`/usr/bin/crontab -l -u ${RCYNIC_CRON_USER} 2>/dev/null`" != "X"; \ - then \ - echo "${RCYNIC_CRON_USER} already has a crontab, leaving it alone"; \ - else \ - echo "Setting up ${RCYNIC_CRON_USER}'s crontab to run rcynic-cron script"; \ - ${AWK} -v t=`hexdump -n 2 -e '"%u\n"' /dev/random` '\ - BEGIN {printf "MAILTO=root\n%u * * * *\texec ${bindir}/rcynic-cron\n", t % 60}' | \ - /usr/bin/crontab -u ${RCYNIC_CRON_USER} -; \ - fi - -# Platform-specific rules below here. - -@RCYNIC_MAKE_RULES@ diff --git a/rcynic/README b/rcynic/README deleted file mode 100644 index ecc92ac5..00000000 --- a/rcynic/README +++ /dev/null @@ -1,13 +0,0 @@ -$Id$ - -"Cynical rsync" -- fetch and validate RPKI certificates. - -This is the primary RPKI relying party validation tool. - -See: - -- The primary documentation at http://trac.rpki.net/ - -- The PDF manual in ../doc/manual.pdf, or - -- The flat text page ../doc/doc.RPKI.RP.rcynic diff --git a/rcynic/bio_f_linebreak.c b/rcynic/bio_f_linebreak.c deleted file mode 100644 index c2d9fb4d..00000000 --- a/rcynic/bio_f_linebreak.c +++ /dev/null @@ -1,268 +0,0 @@ -/* $Id$ */ - -/** @file bio_f_linebreak.c - * - * This implements a trivial filter BIO (see OpenSSL manual) which - * does one rather silly thing: on read, it inserts line break into - * the input stream at regular intervals. - * - * You might reasonably ask why anyone would want such a thing. The - * answer is that OpenSSL's Base64 filter BIO has two input modes, - * neither of which is really useful for reading generalized Base64 - * input. In one mode, it requires line breaks at most every 79 - * characters; in the other mode, it requires that there to be no - * whitespace of any kind at all. These modes work for the things - * that OpenSSL itself does with Base64 decoding, but fail miserably - * when used to read free-form Base64 text. - * - * The real solution would be to rewrite OpenSSL's Base64 filter to - * support a third mode in which it accepts generalized Base64 text, - * but that's been suggested before and nothing has been done about - * it, probably because OpenSSL's Base64 implementation is completely - * line-oriented and rather nasty. - * - * So this filter is a stop-gap to let us get the job done. Since it - * uses a (relatively) well-defined OpenSSL internal API, it should be - * reasonably stable. - * - * 98% of the code in this module is derived from "null filter" BIO - * that ships with OpenSSL (BIO_TYPE_NULL_FILTER), so I consider this - * to be a derivative work, thus am leaving it under OpenSSL's license. - */ - -/* Original crypto/bio/bf_null.c code was: - * - * Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com) - * All rights reserved. - * - * This package is an SSL implementation written - * by Eric Young (eay@cryptsoft.com). - * The implementation was written so as to conform with Netscapes SSL. - * - * This library is free for commercial and non-commercial use as long as - * the following conditions are aheared to. The following conditions - * apply to all code found in this distribution, be it the RC4, RSA, - * lhash, DES, etc., code; not just the SSL code. The SSL documentation - * included with this distribution is covered by the same copyright terms - * except that the holder is Tim Hudson (tjh@cryptsoft.com). - * - * Copyright remains Eric Young's, and as such any Copyright notices in - * the code are not to be removed. - * If this package is used in a product, Eric Young should be given attribution - * as the author of the parts of the library used. - * This can be in the form of a textual message at program startup or - * in documentation (online or textual) provided with the package. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions - * are met: - * 1. Redistributions of source code must retain the copyright - * notice, this list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright - * notice, this list of conditions and the following disclaimer in the - * documentation and/or other materials provided with the distribution. - * 3. All advertising materials mentioning features or use of this software - * must display the following acknowledgement: - * "This product includes cryptographic software written by - * Eric Young (eay@cryptsoft.com)" - * The word 'cryptographic' can be left out if the rouines from the library - * being used are not cryptographic related :-). - * 4. If you include any Windows specific code (or a derivative thereof) from - * the apps directory (application code) you must include an acknowledgement: - * "This product includes software written by Tim Hudson (tjh@cryptsoft.com)" - * - * THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND - * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE - * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL - * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS - * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) - * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT - * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY - * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF - * SUCH DAMAGE. - * - * The licence and distribution terms for any publically available version or - * derivative of this code cannot be changed. i.e. this code cannot simply be - * copied and put under another distribution licence - * [including the GNU Public Licence.] - */ - -#include -#include -#include -#include - -#include "bio_f_linebreak.h" - -#ifndef BIO_TYPE_LINEBREAK_FILTER -#define BIO_TYPE_LINEBREAK_FILTER (99 | BIO_TYPE_FILTER) -#endif - -#ifndef LINEBREAK_MAX_LINE -#define LINEBREAK_MAX_LINE 72 /* We break anything longer than this */ -#endif - -static int linebreak_new(BIO *b) -{ - b->init = 1; - b->ptr = NULL; - b->flags = 0; - b->num = 0; - return 1; -} - -static int linebreak_free(BIO *b) -{ - return b != NULL; -} - -static int linebreak_read(BIO *b, char *out, int outl) -{ - int ret = 0, want, n, i; - - if (out == NULL || b->next_bio == NULL || outl <= 0) - return 0; - - while (outl > 0) { - - if (b->num >= LINEBREAK_MAX_LINE) { - b->num = 0; - *out++ = '\n'; - outl--; - ret++; - continue; - } - - want = LINEBREAK_MAX_LINE - b->num; - if (want > outl) - want = outl; - - n = BIO_read(b->next_bio, out, want); - - BIO_clear_retry_flags(b); - BIO_copy_next_retry(b); - - if (n > 0) { - for (i = n - 1; i >= 0; i--) - if (out[i] == '\n') - break; - if (i >= 0) - b->num = n - i - 1; - else - b->num += n; - out += n; - outl -= n; - ret += n; - continue; - } - - if (ret == 0) - ret = n; - break; - } - - return ret; -} - -static int linebreak_write(BIO *b, const char *in, int inl) -{ - int ret = 0; - - if (in == NULL || inl <= 0 || b->next_bio == NULL) - return 0; - - ret = BIO_write(b->next_bio, in, inl); - - BIO_clear_retry_flags(b); - BIO_copy_next_retry(b); - - return ret; -} - -static long linebreak_ctrl(BIO *b, int cmd, long num, void *ptr) -{ - long ret; - - if (b->next_bio == NULL) - return 0; - - switch (cmd) { - - case BIO_C_DO_STATE_MACHINE: - BIO_clear_retry_flags(b); - ret = BIO_ctrl(b->next_bio, cmd, num, ptr); - BIO_copy_next_retry(b); - return ret; - - case BIO_CTRL_DUP: - return 0; - - default: - return BIO_ctrl(b->next_bio, cmd, num, ptr); - - } -} - -static long linebreak_callback_ctrl(BIO *b, int cmd, bio_info_cb *cb) -{ - if (b->next_bio == NULL) - return 0; - else - return BIO_callback_ctrl(b->next_bio, cmd, cb); -} - -static int linebreak_puts(BIO *b, const char *str) -{ - if (b->next_bio == NULL) - return 0; - else - return BIO_puts(b->next_bio, str); -} - -static BIO_METHOD methods_linebreak = { - BIO_TYPE_LINEBREAK_FILTER, - "Linebreak filter", - linebreak_write, - linebreak_read, - linebreak_puts, - NULL, /* No linebreak_gets() */ - linebreak_ctrl, - linebreak_new, - linebreak_free, - linebreak_callback_ctrl, -}; - -BIO_METHOD *BIO_f_linebreak(void) -{ - return &methods_linebreak; -} - - -#ifdef __BIO_F_LINEBREAK_UNIT_TEST__ - -int main (int argc, char *argv[]) -{ - BIO *ich = BIO_new_fd(0, 1); - BIO *och = BIO_new_fd(1, 1); - BIO *fch = BIO_new(BIO_f_linebreak()); - char buffer[4098]; - int n; - - if (ich == NULL || och == NULL || fch == NULL) - return 1; - - BIO_push(fch, ich); - ich = fch; - fch = NULL; - - while ((n = BIO_read(ich, buffer, sizeof(buffer))) > 0) - BIO_write(och, buffer, n); - - BIO_free_all(ich); - BIO_free_all(och); - return 0; -} - -#endif diff --git a/rcynic/bio_f_linebreak.h b/rcynic/bio_f_linebreak.h deleted file mode 100644 index b5becfa6..00000000 --- a/rcynic/bio_f_linebreak.h +++ /dev/null @@ -1,10 +0,0 @@ -/* $Id$ */ - -#ifndef __BIO_F_LINEBREAK__ -#define __BIO_F_LINEBREAK__ - -#include - -BIO_METHOD *BIO_f_linebreak(void); - -#endif /* __BIO_F_LINEBREAK__ */ diff --git a/rcynic/defstack.h b/rcynic/defstack.h deleted file mode 100644 index 370c8129..00000000 --- a/rcynic/defstack.h +++ /dev/null @@ -1,134 +0,0 @@ -/* - * Automatically generated, do not edit. - * Generator $Id: defstack.py 4878 2012-11-15 22:13:53Z sra $ - */ - -#ifndef __RCYNIC_C__DEFSTACK_H__ -#define __RCYNIC_C__DEFSTACK_H__ - -/* - * Safestack macros for validation_status_t. - */ -#define sk_validation_status_t_new(st) SKM_sk_new(validation_status_t, (st)) -#define sk_validation_status_t_new_null() SKM_sk_new_null(validation_status_t) -#define sk_validation_status_t_free(st) SKM_sk_free(validation_status_t, (st)) -#define sk_validation_status_t_num(st) SKM_sk_num(validation_status_t, (st)) -#define sk_validation_status_t_value(st, i) SKM_sk_value(validation_status_t, (st), (i)) -#define sk_validation_status_t_set(st, i, val) SKM_sk_set(validation_status_t, (st), (i), (val)) -#define sk_validation_status_t_zero(st) SKM_sk_zero(validation_status_t, (st)) -#define sk_validation_status_t_push(st, val) SKM_sk_push(validation_status_t, (st), (val)) -#define sk_validation_status_t_unshift(st, val) SKM_sk_unshift(validation_status_t, (st), (val)) -#define sk_validation_status_t_find(st, val) SKM_sk_find(validation_status_t, (st), (val)) -#define sk_validation_status_t_find_ex(st, val) SKM_sk_find_ex(validation_status_t, (st), (val)) -#define sk_validation_status_t_delete(st, i) SKM_sk_delete(validation_status_t, (st), (i)) -#define sk_validation_status_t_delete_ptr(st, ptr) SKM_sk_delete_ptr(validation_status_t, (st), (ptr)) -#define sk_validation_status_t_insert(st, val, i) SKM_sk_insert(validation_status_t, (st), (val), (i)) -#define sk_validation_status_t_set_cmp_func(st, cmp) SKM_sk_set_cmp_func(validation_status_t, (st), (cmp)) -#define sk_validation_status_t_dup(st) SKM_sk_dup(validation_status_t, st) -#define sk_validation_status_t_pop_free(st, free_func) SKM_sk_pop_free(validation_status_t, (st), (free_func)) -#define sk_validation_status_t_shift(st) SKM_sk_shift(validation_status_t, (st)) -#define sk_validation_status_t_pop(st) SKM_sk_pop(validation_status_t, (st)) -#define sk_validation_status_t_sort(st) SKM_sk_sort(validation_status_t, (st)) -#define sk_validation_status_t_is_sorted(st) SKM_sk_is_sorted(validation_status_t, (st)) - -/* - * Safestack macros for walk_ctx_t. - */ -#define sk_walk_ctx_t_new(st) SKM_sk_new(walk_ctx_t, (st)) -#define sk_walk_ctx_t_new_null() SKM_sk_new_null(walk_ctx_t) -#define sk_walk_ctx_t_free(st) SKM_sk_free(walk_ctx_t, (st)) -#define sk_walk_ctx_t_num(st) SKM_sk_num(walk_ctx_t, (st)) -#define sk_walk_ctx_t_value(st, i) SKM_sk_value(walk_ctx_t, (st), (i)) -#define sk_walk_ctx_t_set(st, i, val) SKM_sk_set(walk_ctx_t, (st), (i), (val)) -#define sk_walk_ctx_t_zero(st) SKM_sk_zero(walk_ctx_t, (st)) -#define sk_walk_ctx_t_push(st, val) SKM_sk_push(walk_ctx_t, (st), (val)) -#define sk_walk_ctx_t_unshift(st, val) SKM_sk_unshift(walk_ctx_t, (st), (val)) -#define sk_walk_ctx_t_find(st, val) SKM_sk_find(walk_ctx_t, (st), (val)) -#define sk_walk_ctx_t_find_ex(st, val) SKM_sk_find_ex(walk_ctx_t, (st), (val)) -#define sk_walk_ctx_t_delete(st, i) SKM_sk_delete(walk_ctx_t, (st), (i)) -#define sk_walk_ctx_t_delete_ptr(st, ptr) SKM_sk_delete_ptr(walk_ctx_t, (st), (ptr)) -#define sk_walk_ctx_t_insert(st, val, i) SKM_sk_insert(walk_ctx_t, (st), (val), (i)) -#define sk_walk_ctx_t_set_cmp_func(st, cmp) SKM_sk_set_cmp_func(walk_ctx_t, (st), (cmp)) -#define sk_walk_ctx_t_dup(st) SKM_sk_dup(walk_ctx_t, st) -#define sk_walk_ctx_t_pop_free(st, free_func) SKM_sk_pop_free(walk_ctx_t, (st), (free_func)) -#define sk_walk_ctx_t_shift(st) SKM_sk_shift(walk_ctx_t, (st)) -#define sk_walk_ctx_t_pop(st) SKM_sk_pop(walk_ctx_t, (st)) -#define sk_walk_ctx_t_sort(st) SKM_sk_sort(walk_ctx_t, (st)) -#define sk_walk_ctx_t_is_sorted(st) SKM_sk_is_sorted(walk_ctx_t, (st)) - -/* - * Safestack macros for rsync_ctx_t. - */ -#define sk_rsync_ctx_t_new(st) SKM_sk_new(rsync_ctx_t, (st)) -#define sk_rsync_ctx_t_new_null() SKM_sk_new_null(rsync_ctx_t) -#define sk_rsync_ctx_t_free(st) SKM_sk_free(rsync_ctx_t, (st)) -#define sk_rsync_ctx_t_num(st) SKM_sk_num(rsync_ctx_t, (st)) -#define sk_rsync_ctx_t_value(st, i) SKM_sk_value(rsync_ctx_t, (st), (i)) -#define sk_rsync_ctx_t_set(st, i, val) SKM_sk_set(rsync_ctx_t, (st), (i), (val)) -#define sk_rsync_ctx_t_zero(st) SKM_sk_zero(rsync_ctx_t, (st)) -#define sk_rsync_ctx_t_push(st, val) SKM_sk_push(rsync_ctx_t, (st), (val)) -#define sk_rsync_ctx_t_unshift(st, val) SKM_sk_unshift(rsync_ctx_t, (st), (val)) -#define sk_rsync_ctx_t_find(st, val) SKM_sk_find(rsync_ctx_t, (st), (val)) -#define sk_rsync_ctx_t_find_ex(st, val) SKM_sk_find_ex(rsync_ctx_t, (st), (val)) -#define sk_rsync_ctx_t_delete(st, i) SKM_sk_delete(rsync_ctx_t, (st), (i)) -#define sk_rsync_ctx_t_delete_ptr(st, ptr) SKM_sk_delete_ptr(rsync_ctx_t, (st), (ptr)) -#define sk_rsync_ctx_t_insert(st, val, i) SKM_sk_insert(rsync_ctx_t, (st), (val), (i)) -#define sk_rsync_ctx_t_set_cmp_func(st, cmp) SKM_sk_set_cmp_func(rsync_ctx_t, (st), (cmp)) -#define sk_rsync_ctx_t_dup(st) SKM_sk_dup(rsync_ctx_t, st) -#define sk_rsync_ctx_t_pop_free(st, free_func) SKM_sk_pop_free(rsync_ctx_t, (st), (free_func)) -#define sk_rsync_ctx_t_shift(st) SKM_sk_shift(rsync_ctx_t, (st)) -#define sk_rsync_ctx_t_pop(st) SKM_sk_pop(rsync_ctx_t, (st)) -#define sk_rsync_ctx_t_sort(st) SKM_sk_sort(rsync_ctx_t, (st)) -#define sk_rsync_ctx_t_is_sorted(st) SKM_sk_is_sorted(rsync_ctx_t, (st)) - -/* - * Safestack macros for rsync_history_t. - */ -#define sk_rsync_history_t_new(st) SKM_sk_new(rsync_history_t, (st)) -#define sk_rsync_history_t_new_null() SKM_sk_new_null(rsync_history_t) -#define sk_rsync_history_t_free(st) SKM_sk_free(rsync_history_t, (st)) -#define sk_rsync_history_t_num(st) SKM_sk_num(rsync_history_t, (st)) -#define sk_rsync_history_t_value(st, i) SKM_sk_value(rsync_history_t, (st), (i)) -#define sk_rsync_history_t_set(st, i, val) SKM_sk_set(rsync_history_t, (st), (i), (val)) -#define sk_rsync_history_t_zero(st) SKM_sk_zero(rsync_history_t, (st)) -#define sk_rsync_history_t_push(st, val) SKM_sk_push(rsync_history_t, (st), (val)) -#define sk_rsync_history_t_unshift(st, val) SKM_sk_unshift(rsync_history_t, (st), (val)) -#define sk_rsync_history_t_find(st, val) SKM_sk_find(rsync_history_t, (st), (val)) -#define sk_rsync_history_t_find_ex(st, val) SKM_sk_find_ex(rsync_history_t, (st), (val)) -#define sk_rsync_history_t_delete(st, i) SKM_sk_delete(rsync_history_t, (st), (i)) -#define sk_rsync_history_t_delete_ptr(st, ptr) SKM_sk_delete_ptr(rsync_history_t, (st), (ptr)) -#define sk_rsync_history_t_insert(st, val, i) SKM_sk_insert(rsync_history_t, (st), (val), (i)) -#define sk_rsync_history_t_set_cmp_func(st, cmp) SKM_sk_set_cmp_func(rsync_history_t, (st), (cmp)) -#define sk_rsync_history_t_dup(st) SKM_sk_dup(rsync_history_t, st) -#define sk_rsync_history_t_pop_free(st, free_func) SKM_sk_pop_free(rsync_history_t, (st), (free_func)) -#define sk_rsync_history_t_shift(st) SKM_sk_shift(rsync_history_t, (st)) -#define sk_rsync_history_t_pop(st) SKM_sk_pop(rsync_history_t, (st)) -#define sk_rsync_history_t_sort(st) SKM_sk_sort(rsync_history_t, (st)) -#define sk_rsync_history_t_is_sorted(st) SKM_sk_is_sorted(rsync_history_t, (st)) - -/* - * Safestack macros for task_t. - */ -#define sk_task_t_new(st) SKM_sk_new(task_t, (st)) -#define sk_task_t_new_null() SKM_sk_new_null(task_t) -#define sk_task_t_free(st) SKM_sk_free(task_t, (st)) -#define sk_task_t_num(st) SKM_sk_num(task_t, (st)) -#define sk_task_t_value(st, i) SKM_sk_value(task_t, (st), (i)) -#define sk_task_t_set(st, i, val) SKM_sk_set(task_t, (st), (i), (val)) -#define sk_task_t_zero(st) SKM_sk_zero(task_t, (st)) -#define sk_task_t_push(st, val) SKM_sk_push(task_t, (st), (val)) -#define sk_task_t_unshift(st, val) SKM_sk_unshift(task_t, (st), (val)) -#define sk_task_t_find(st, val) SKM_sk_find(task_t, (st), (val)) -#define sk_task_t_find_ex(st, val) SKM_sk_find_ex(task_t, (st), (val)) -#define sk_task_t_delete(st, i) SKM_sk_delete(task_t, (st), (i)) -#define sk_task_t_delete_ptr(st, ptr) SKM_sk_delete_ptr(task_t, (st), (ptr)) -#define sk_task_t_insert(st, val, i) SKM_sk_insert(task_t, (st), (val), (i)) -#define sk_task_t_set_cmp_func(st, cmp) SKM_sk_set_cmp_func(task_t, (st), (cmp)) -#define sk_task_t_dup(st) SKM_sk_dup(task_t, st) -#define sk_task_t_pop_free(st, free_func) SKM_sk_pop_free(task_t, (st), (free_func)) -#define sk_task_t_shift(st) SKM_sk_shift(task_t, (st)) -#define sk_task_t_pop(st) SKM_sk_pop(task_t, (st)) -#define sk_task_t_sort(st) SKM_sk_sort(task_t, (st)) -#define sk_task_t_is_sorted(st) SKM_sk_is_sorted(task_t, (st)) - -#endif /* __RCYNIC_C__DEFSTACK_H__ */ diff --git a/rcynic/make-tal.sh b/rcynic/make-tal.sh deleted file mode 100755 index 854a76b9..00000000 --- a/rcynic/make-tal.sh +++ /dev/null @@ -1,42 +0,0 @@ -#!/bin/sh - -# $Id$ -# -# Copyright (C) 2010 Internet Systems Consortium, Inc. ("ISC") -# -# Permission to use, copy, modify, and/or distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -# Generate a trust anchor locator (TAL) given the rsync URI for a -# self-signed RFC 3779 certificate. -# -# Usage: make-tal.sh uri [local_copy_of_certificate] -# -# The optional second parameter is the name of a local copy of the -# certificate to be checked against the copy retrieved from the URI; -# if present, this should be a local X.509 file in DER format. - -case "$1" in rsync://*) :;; *) echo 1>&2 "\"$1\" is not a rsync URI"; exit 1;; esac - -tmpfile="make-tal.tmp.$$" -trap "rm -f $tmpfile" 0 1 2 15 - -rsync "$1" "$tmpfile" || exit - -if test -n "$2" -then - diff -q "$tmpfile" "$2" || exit -fi - -echo "$1" -echo -openssl x509 -inform DER -in "$tmpfile" -pubkey -noout | -awk '!/-----(BEGIN|END)/' diff --git a/rcynic/rc-scripts/darwin/RCynic b/rcynic/rc-scripts/darwin/RCynic deleted file mode 100755 index d486a3c3..00000000 --- a/rcynic/rc-scripts/darwin/RCynic +++ /dev/null @@ -1,42 +0,0 @@ -#!/bin/sh - -# -# $Id$ -# -. /etc/rc.common - -name="rcynic" -start_cmd="rcynic_start" -stop_cmd="rcynic_stop" - -: ${rcynic_dir="/var/rcynic"} - -StartService() -{ - /sbin/umount "${rcynic_dir}/dev" 2>/dev/null - - if ! /sbin/mount_devfs devfs "${rcynic_dir}/dev"; then - echo "Mounting devfs on ${rcynic_dir}/dev failed..." - exit 1 - fi - - for i in /etc/localtime /etc/resolv.conf; do - j="${rcynic_dir}${i}" - if /bin/test -r "$i" && ! /usr/bin/cmp -s "$i" "$j"; then - /usr/bin/install -m 444 -o root -g wheel -p "$i" "$j" - fi - done - - /bin/ln -f /var/run/mDNSResponder "${rcynic_dir}/var/run/mDNSResponder" -} - -StopService() -{ - /sbin/umount "${rcynic_dir}/dev" 2>/dev/null -} - -RestartService() -{ - StartService -} - -RunService "$1" diff --git a/rcynic/rc-scripts/darwin/StartupParameters.plist b/rcynic/rc-scripts/darwin/StartupParameters.plist deleted file mode 100644 index ca46b676..00000000 --- a/rcynic/rc-scripts/darwin/StartupParameters.plist +++ /dev/null @@ -1,19 +0,0 @@ - - - - - Description - RCynic Setup - OrderPreference - None - Provides - - RCynic - - Uses - - Network - Resolver - - - diff --git a/rcynic/rc-scripts/freebsd/rc.d.rcynic b/rcynic/rc-scripts/freebsd/rc.d.rcynic deleted file mode 100755 index 9b7aa545..00000000 --- a/rcynic/rc-scripts/freebsd/rc.d.rcynic +++ /dev/null @@ -1,44 +0,0 @@ -#!/bin/sh - -# -# $Id$ -# -# PROVIDE: rcynic -# REQUIRE: DAEMON -# KEYWORD: nojail - -. /etc/rc.subr - -name="rcynic" -start_cmd="rcynic_start" -stop_cmd="rcynic_stop" - -: ${rcynic_dir="/var/rcynic"} - -rcynic_start() -{ - /sbin/umount "${rcynic_dir}/dev" 2>/dev/null - - if ! /sbin/mount -t devfs dev "${rcynic_dir}/dev"; then - echo "Mounting devfs on ${rcynic_dir}/dev failed..." - exit 1 - fi - - /sbin/devfs -m "${rcynic_dir}/dev" rule apply hide - /sbin/devfs -m "${rcynic_dir}/dev" rule apply path null unhide - /sbin/devfs -m "${rcynic_dir}/dev" rule apply path random unhide - - for i in /etc/localtime /etc/resolv.conf; do - j="${rcynic_dir}${i}" - if /bin/test -r "$i" && ! /usr/bin/cmp -s "$i" "$j"; then - /usr/bin/install -m 444 -o root -g wheel -p "$i" "$j" - fi - done -} - -rcynic_stop() -{ - /sbin/umount "${rcynic_dir}/dev" 2>/dev/null -} - -load_rc_config $name -run_rc_command "$1" diff --git a/rcynic/rcynic-cron.py b/rcynic/rcynic-cron.py deleted file mode 100644 index fbe1ebeb..00000000 --- a/rcynic/rcynic-cron.py +++ /dev/null @@ -1,106 +0,0 @@ -# $Id$ -# -# Copyright (C) 2014 Dragon Research Labs ("DRL") -# Portions copyright (C) 2013 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notices and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND DRL AND ISC DISCLAIM ALL -# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED -# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL OR -# ISC BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL -# DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA -# OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER -# TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -""" -Cron job for rcynic and rtr-origin in stock configuration. - -Locking code here works like FreeBSD's lockf(1) utility given -k and --t 0 options, which is both the sanest and simplest combination for -our purposes. In theory this is portable to any Unix-like system. -""" - -import os -import sys -import pwd -import fcntl -import errno -import argparse - -def run(*cmd, **kwargs): - chroot_this = kwargs.pop("chroot_this", False) - cwd = kwargs.pop("cwd", None) - pid = os.fork() - if pid == 0: - if chroot_this: - os.chdir(ac_rcynic_dir) - elif cwd is not None: - os.chdir(cwd) - if we_are_root: - os.initgroups(pw.pw_name, pw.pw_gid) - if chroot_this: - os.chroot(ac_rcynic_dir) - if we_are_root: - os.setgid(pw.pw_gid) - os.setuid(pw.pw_uid) - os.closerange(3, os.sysconf("SC_OPEN_MAX")) - os.execvp(cmd[0], cmd) - os._exit(1) - else: - status = os.waitpid(pid, 0)[1] - if status == 0: - return - elif os.WIFSIGNALED(status): - sys.exit("Process %s exited with signal %s" % (" ".join(cmd), os.WTERMSIG(status))) - elif os.WIFEXITED(status): - sys.exit("Program %s exited with status %s" % (" ".join(cmd), os.WEXITSTATUS(status))) - else: - sys.exit("Program %s exited for unknown reason %s" % (" ".join(cmd), status)) - -parser = argparse.ArgumentParser(description = __doc__) -parser.add_argument("--chroot", action = "store_true", help = "run chrooted") -args = parser.parse_args() - -we_are_root = os.getuid() == 0 - -if args.chroot and not we_are_root: - sys.exit("Only root can --chroot") - -try: - pw = pwd.getpwnam(ac_rcynic_user) -except KeyError: - sys.exit("Could not find passwd entry for user %s" % ac_rcynic_user) - -try: - lock = os.open(os.path.join(ac_rcynic_dir, "data/lock"), os.O_RDONLY | os.O_CREAT | os.O_NONBLOCK, 0666) - fcntl.flock(lock, fcntl.LOCK_EX | fcntl.LOCK_NB) - if we_are_root: - os.fchown(lock, pw.pw_uid, pw.pw_gid) -except (IOError, OSError), e: - if e.errno == errno.EAGAIN: - sys.exit(0) # Another instance of this script is already running, exit silently - else: - sys.exit("Error %r opening lock %r" % (e.strerror, os.path.join(ac_rcynic_dir, "data/lock"))) - -if args.chroot: - run("/bin/rcynic", "-c", "/etc/rcynic.conf", chroot_this = True) -else: - run(os.path.join(ac_bindir, "rcynic"), "-c", os.path.join(ac_sysconfdir, "rcynic.conf")) - -run(os.path.join(ac_bindir, "rtr-origin"), - "--cronjob", - os.path.join(ac_rcynic_dir, "data/authenticated"), - cwd = os.path.join(ac_rcynic_dir, "rpki-rtr")) - -prog = os.path.join(ac_libexecdir, "rpkigui-rcynic") -if os.path.exists(prog): - run(prog) - -if ac_rcynic_html_dir and os.path.exists(os.path.dirname(ac_rcynic_html_dir)): - run(os.path.join(ac_bindir, "rcynic-html"), - os.path.join(ac_rcynic_dir, "data/rcynic.xml"), - ac_rcynic_html_dir) diff --git a/rcynic/rcynic-html.py b/rcynic/rcynic-html.py deleted file mode 100644 index 58e65dde..00000000 --- a/rcynic/rcynic-html.py +++ /dev/null @@ -1,658 +0,0 @@ -# $Id$ -# -# Copyright (C) 2013--2014 Dragon Research Labs ("DRL") -# Portions copyright (C) 2009--2012 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notices and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND DRL AND ISC DISCLAIM ALL -# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED -# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL OR -# ISC BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL -# DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA -# OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER -# TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -""" -Render rcynic's XML output to basic (X)HTML with some rrdtool graphics. -""" - -import sys -import urlparse -import os -import argparse -import time -import subprocess -import copy - -try: - from lxml.etree import (ElementTree, Element, SubElement, Comment) -except ImportError: - from xml.etree.ElementTree import (ElementTree, Element, SubElement, Comment) - -session = None -args = None - -def parse_options(): - - global args - - try: - default_rrdtool_binary = ac_rrdtool_binary - except NameError: - default_rrdtool_binary = "rrdtool" - - parser = argparse.ArgumentParser(description = __doc__) - parser.add_argument("--refresh", type = int, default = 1800, - help = "refresh interval for generated HTML") - parser.add_argument("--hide-problems", action = "store_true", - help = "don't generate \"problems\" page") - parser.add_argument("--hide-graphs", action = "store_true", - help = "don't generate graphs") - parser.add_argument("--hide-object-counts", action = "store_true", - help = "don't display object counts") - parser.add_argument("--dont-update-rrds", action = "store_true", - help = "don't add new data to RRD databases") - parser.add_argument("--png-height", type = int, default = 190, - help = "height of PNG images") - parser.add_argument("--png-width", type = int, default = 1350, - help = "width of PNG images") - parser.add_argument("--svg-height", type = int, default = 600, - help = "height of SVG images") - parser.add_argument("--svg-width", type = int, default = 1200, - help = "width of SVG images") - parser.add_argument("--eps-height", type = int, default = 0, - help = "height of EPS images") - parser.add_argument("--eps-width", type = int, default = 0, - help = "width of EPS images") - parser.add_argument("--rrdtool-binary", default = default_rrdtool_binary, - help = "location of rrdtool binary") - parser.add_argument("input_file", type = argparse.FileType("r"), - help = "XML input file") - parser.add_argument("output_directory", - help = "output directory") - args = parser.parse_args() - - -def parse_utc(s): - return int(time.mktime(time.strptime(s, "%Y-%m-%dT%H:%M:%SZ"))) - -class Label(object): - - moods = ["bad", "warn", "good"] - - def __init__(self, elt): - self.code = elt.tag - self.mood = elt.get("kind") - self.text = elt.text.strip() - self.count = 0 - - def get_count(self): - return self.count - - @property - def sort_key(self): - try: - return self.moods.index(self.mood) - except ValueError: - return len(self.moods) - -class Validation_Status(object): - - def __init__(self, elt, label_map): - self.uri = elt.text.strip() - self.timestamp = elt.get("timestamp") - self.generation = elt.get("generation") - self.hostname = urlparse.urlparse(self.uri).hostname or "[None]" - self.fn2 = os.path.splitext(self.uri)[1] or None if self.generation else None - self.label = label_map[elt.get("status")] - - def sort_key(self): - return (self.label.sort_key, self.timestamp, self.hostname, self.fn2, self.generation) - - @property - def code(self): - return self.label.code - - @property - def mood(self): - return self.label.mood - - @property - def accepted(self): - return self.label.code == "object_accepted" - - @property - def rejected(self): - return self.label.code == "object_rejected" - - @property - def is_current(self): - return self.generation == "current" - - @property - def is_backup(self): - return self.generation == "backup" - - @property - def is_problem(self): - return self.label.mood != "good" - - @property - def is_connection_problem(self): - return self.label.mood != "good" and self.label.code.startswith("rsync_transfer_") - - @property - def is_object_problem(self): - return self.label.mood != "good" and not self.label.code.startswith("rsync_transfer_") - - @property - def is_connection_detail(self): - return self.label.code.startswith("rsync_transfer_") - - @property - def is_object_detail(self): - return not self.label.code.startswith("rsync_transfer_") - -class Problem_Mixin(object): - - @property - def connection_problems(self): - result = [v for v in self.validation_status if v.is_connection_problem] - result.sort(key = Validation_Status.sort_key) - return result - - @property - def object_problems(self): - result = [v for v in self.validation_status if v.is_object_problem] - result.sort(key = Validation_Status.sort_key) - return result - -class Host(Problem_Mixin): - - def __init__(self, hostname, timestamp): - self.hostname = hostname - self.timestamp = timestamp - self.elapsed = 0 - self.connections = 0 - self.failures = 0 - self.uris = set() - self.graph = None - self.counters = {} - self.totals = {} - self.validation_status = [] - - def add_connection(self, elt): - self.elapsed += parse_utc(elt.get("finished")) - parse_utc(elt.get("started")) - self.connections += 1 - if elt.get("error") is not None: - self.failures += 1 - - def add_validation_status(self, v): - self.validation_status.append(v) - if v.generation == "current": - self.uris.add(v.uri) - self.counters[(v.fn2, v.generation, v.label)] = self.get_counter(v.fn2, v.generation, v.label) + 1 - self.totals[v.label] = self.get_total(v.label) + 1 - v.label.count += 1 - - def get_counter(self, fn2, generation, label): - return self.counters.get((fn2, generation, label), 0) - - def get_total(self, label): - return self.totals.get(label, 0) - - @property - def failed(self): - return 1 if self.failures > 0 else 0 - - @property - def objects(self): - return len(self.uris) - - field_table = (("connections", "GAUGE"), - ("objects", "GAUGE"), - ("elapsed", "GAUGE"), - ("failed", "ABSOLUTE")) - - rras = tuple("RRA:AVERAGE:0.5:%s:9600" % steps - for steps in (1, 4, 24)) - - @classmethod - def field_ds_specifiers(cls, heartbeat = 24 * 60 * 60, minimum = 0, maximum = "U"): - return ["DS:%s:%s:%s:%s:%s" % (field[0], field[1], heartbeat, minimum, maximum) - for field in cls.field_table] - - @property - def field_values(self): - return tuple(str(getattr(self, field[0])) for field in self.field_table) - - @classmethod - def field_defs(cls, filebase): - return ["DEF:%s=%s.rrd:%s:AVERAGE" % (field[0], filebase, field[0]) - for field in cls.field_table] - - graph_opts = ( - "--vertical-label", "Sync time (seconds)", - "--right-axis-label", "Objects (count)", - "--lower-limit", "0", - "--right-axis", "1:0", - "--full-size-mode" ) - - graph_cmds = ( - - # Split elapsed into separate data sets, so we can color - # differently to indicate how succesful transfer was. Intent is - # that exactly one of these be defined for every value in elapsed. - - "CDEF:success=failed,UNKN,elapsed,IF", - "CDEF:failure=connections,1,EQ,failed,*,elapsed,UNKN,IF", - "CDEF:partial=connections,1,NE,failed,*,elapsed,UNKN,IF", - - # Show connection timing first, as color-coded semi-transparent - # areas with opaque borders. Intent is to make the colors stand - # out, since they're a major health indicator. Transparency is - # handled via an alpha channel (fourth octet of color code). We - # draw this stuff first so that later lines can overwrite it. - - "AREA:success#00FF0080:Sync time (success)", - "AREA:partial#FFA50080:Sync time (partial failure)", - "AREA:failure#FF000080:Sync time (total failure)", - - "LINE1:success#00FF00", # Green - "LINE1:partial#FFA500", # Orange - "LINE1:failure#FF0000", # Red - - # Now show object counts, as a simple black line. - - "LINE1:objects#000000:Objects", # Black - - # Add averages over period to chart legend. - - "VDEF:avg_elapsed=elapsed,AVERAGE", - "VDEF:avg_connections=connections,AVERAGE", - "VDEF:avg_objects=objects,AVERAGE", - "COMMENT:\j", - "GPRINT:avg_elapsed:Average sync time (seconds)\: %5.2lf", - "GPRINT:avg_connections:Average connection count\: %5.2lf", - "GPRINT:avg_objects:Average object count\: %5.2lf" ) - - graph_periods = (("week", "-1w"), - ("month", "-31d"), - ("year", "-1y")) - - def rrd_run(self, cmd): - try: - cmd = [str(i) for i in cmd] - cmd.insert(0, args.rrdtool_binary) - subprocess.check_call(cmd, stdout = open("/dev/null", "w")) - except OSError, e: - sys.exit("Problem running %s, perhaps you need to set --rrdtool-binary? (%s)" % (args.rrdtool_binary, e)) - except subprocess.CalledProcessError, e: - sys.exit("Failure running %s: %s" % (args.rrdtool_binary, e)) - - def rrd_update(self): - filename = os.path.join(args.output_directory, self.hostname) + ".rrd" - if not os.path.exists(filename): - cmd = ["create", filename, "--start", self.timestamp - 1, "--step", "3600"] - cmd.extend(self.field_ds_specifiers()) - cmd.extend(self.rras) - self.rrd_run(cmd) - self.rrd_run(["update", filename, - "%s:%s" % (self.timestamp, ":".join(str(v) for v in self.field_values))]) - - def rrd_graph(self, html): - filebase = os.path.join(args.output_directory, self.hostname) - formats = [format for format in ("png", "svg", "eps") - if getattr(args, format + "_width") and getattr(args, format + "_height")] - for period, start in self.graph_periods: - for format in formats: - cmds = [ "graph", "%s_%s.%s" % (filebase, period, format), - "--title", "%s last %s" % (self.hostname, period), - "--start", start, - "--width", getattr(args, format + "_width"), - "--height", getattr(args, format + "_height"), - "--imgformat", format.upper() ] - cmds.extend(self.graph_opts) - cmds.extend(self.field_defs(filebase)) - cmds.extend(self.graph_cmds) - self.rrd_run(cmds) - img = Element("img", src = "%s_%s.png" % (self.hostname, period), - width = str(args.png_width), - height = str(args.png_height)) - if self.graph is None: - self.graph = copy.copy(img) - html.BodyElement("h2").text = "%s over last %s" % (self.hostname, period) - html.BodyElement("a", href = "%s_%s_svg.html" % (self.hostname, period)).append(img) - html.BodyElement("br") - svg_html = HTML("%s over last %s" % (self.hostname, period), - "%s_%s_svg" % (self.hostname, period)) - svg_html.BodyElement("img", src = "%s_%s.svg" % (self.hostname, period)) - svg_html.close() - - -class Session(Problem_Mixin): - - def __init__(self): - self.hosts = {} - - self.root = ElementTree(file = args.input_file).getroot() - - self.rcynic_version = self.root.get("rcynic-version") - self.rcynic_date = self.root.get("date") - self.timestamp = parse_utc(self.rcynic_date) - - self.labels = [Label(elt) for elt in self.root.find("labels")] - self.load_validation_status() - - for elt in self.root.findall("rsync_history"): - self.get_host(urlparse.urlparse(elt.text.strip()).hostname).add_connection(elt) - - generations = set() - fn2s = set() - - for v in self.validation_status: - self.get_host(v.hostname).add_validation_status(v) - generations.add(v.generation) - fn2s.add(v.fn2) - - self.labels = [l for l in self.labels if l.count > 0] - - self.hostnames = sorted(self.hosts) - self.generations = sorted(generations) - self.fn2s = sorted(fn2s) - - def load_validation_status(self): - label_map = dict((label.code, label) for label in self.labels) - full_validation_status = [Validation_Status(elt, label_map) - for elt in self.root.findall("validation_status")] - accepted_current = set(v.uri for v in full_validation_status - if v.is_current and v.accepted) - self.validation_status = [v for v in full_validation_status - if not v.is_backup - or v.uri not in accepted_current] - - def get_host(self, hostname): - if hostname not in self.hosts: - self.hosts[hostname] = Host(hostname, self.timestamp) - return self.hosts[hostname] - - def get_sum(self, fn2, generation, label): - return sum(h.get_counter(fn2, generation, label) - for h in self.hosts.itervalues()) - - def rrd_update(self): - if not args.dont_update_rrds: - for h in self.hosts.itervalues(): - h.rrd_update() - -css = ''' - th, td { - text-align: center; padding: 4px; - } - - td.uri { - text-align: left; - } - - thead tr th, tfoot tr td { - font-weight: bold; - } - - .good { - background-color: #77ff77; - } - - .warn { - background-color: yellow; - } - - .bad { - background-color: #ff5500; - } - - body { - font-family: arial, helvetica, serif; - } - - /* Make background-color inherit like color does. */ - #nav { - background-color: inherit; - } - - #nav, #nav ul { - float: left; - width: 100%; - list-style: none; - line-height: 1; - font-weight: normal; - padding: 0; - border-color: black; - border-style: solid; - border-width: 1px 0; - margin: 0 0 1em 0; - } - - #nav a, #nav span { - display: block; - background-color: white; - color: black; - text-decoration: none; - padding: 0.25em 0.75em; - } - - #nav li { - float: left; - padding: 0; - } - - /* Use
    to set submenu width. */ - #nav li ul { - position: absolute; - display: none; - height: auto; - border-width: 1px; - margin: 0; - } - - #nav li li { - width: 100%; - } - - /* Display submenu when hovering. */ - #nav li:hover ul { - display: block; - } - - /* Reverse video when hovering. */ - #nav a:hover, #nav span:hover { - color: white; - background-color: black; - } -''' - -class HTML(object): - - def __init__(self, title, filebase): - - self.filename = os.path.join(args.output_directory, filebase + ".html") - - self.html = Element("html") - self.html.append(Comment(" Generators:\n" + - " " + session.rcynic_version + "\n" + - " $Id$\n")) - self.head = SubElement(self.html, "head") - self.body = SubElement(self.html, "body") - - title += " " + session.rcynic_date - SubElement(self.head, "title").text = title - SubElement(self.body, "h1").text = title - SubElement(self.head, "style", type = "text/css").text = css - - if args.refresh: - SubElement(self.head, "meta", { "http-equiv" : "Refresh", "content" : str(args.refresh) }) - - hostwidth = max(len(hostname) for hostname in session.hostnames) - - toc = SubElement(self.body, "ul", id = "nav") - SubElement(SubElement(toc, "li"), "a", href = "index.html").text = "Overview" - li = SubElement(toc, "li") - SubElement(li, "span").text = "Repositories" - ul = SubElement(li, "ul", style = "width: %sem" % hostwidth) - for hostname in session.hostnames: - SubElement(SubElement(ul, "li"), "a", href = "%s.html" % hostname).text = hostname - SubElement(SubElement(toc, "li"), "a", href = "problems.html").text = "Problems" - li = SubElement(toc, "li") - SubElement(li, "span").text = "All Details" - ul = SubElement(li, "ul", style = "width: 15em") - SubElement(SubElement(ul, "li"), "a", href = "connections.html").text = "All Connections" - SubElement(SubElement(ul, "li"), "a", href = "objects.html").text = "All Objects" - SubElement(self.body, "br") - - def close(self): - ElementTree(element = self.html).write(self.filename) - - def BodyElement(self, tag, **attrib): - return SubElement(self.body, tag, **attrib) - - def counter_table(self, data_func, total_func): - table = self.BodyElement("table", rules = "all", border = "1") - thead = SubElement(table, "thead") - tfoot = SubElement(table, "tfoot") - tbody = SubElement(table, "tbody") - tr = SubElement(thead, "tr") - SubElement(tr, "th") - for label in session.labels: - SubElement(tr, "th").text = label.text - for fn2 in session.fn2s: - for generation in session.generations: - counters = [data_func(fn2, generation, label) for label in session.labels] - if sum(counters) > 0: - tr = SubElement(tbody, "tr") - SubElement(tr, "td").text = ((generation or "") + " " + (fn2 or "")).strip() - for label, count in zip(session.labels, counters): - td = SubElement(tr, "td") - if count > 0: - td.set("class", label.mood) - td.text = str(count) - tr = SubElement(tfoot, "tr") - SubElement(tr, "td").text = "Total" - counters = [total_func(label) for label in session.labels] - for label, count in zip(session.labels, counters): - td = SubElement(tr, "td") - if count > 0: - td.set("class", label.mood) - td.text = str(count) - return table - - def object_count_table(self, session): - table = self.BodyElement("table", rules = "all", border = "1") - thead = SubElement(table, "thead") - tbody = SubElement(table, "tbody") - tfoot = SubElement(table, "tfoot") - fn2s = [fn2 for fn2 in session.fn2s if fn2 is not None] - total = dict((fn2, 0) for fn2 in fn2s) - for hostname in session.hostnames: - tr = SubElement(tbody, "tr") - SubElement(tr, "td").text = hostname - for fn2 in fn2s: - td = SubElement(tr, "td") - count = sum(uri.endswith(fn2) for uri in session.hosts[hostname].uris) - total[fn2] += count - if count > 0: - td.text = str(count) - trhead = SubElement(thead, "tr") - trfoot = SubElement(tfoot, "tr") - SubElement(trhead, "th").text = "Repository" - SubElement(trfoot, "td").text = "Total" - for fn2 in fn2s: - SubElement(trhead, "th").text = fn2 - SubElement(trfoot, "td").text = str(total[fn2]) - return table - - def detail_table(self, records): - if records: - table = self.BodyElement("table", rules = "all", border = "1") - thead = SubElement(table, "thead") - tbody = SubElement(table, "tbody") - tr = SubElement(thead, "tr") - SubElement(tr, "th").text = "Timestamp" - SubElement(tr, "th").text = "Generation" - SubElement(tr, "th").text = "Status" - SubElement(tr, "th").text = "URI" - for v in records: - tr = SubElement(tbody, "tr", { "class" : v.mood }) - SubElement(tr, "td").text = v.timestamp - SubElement(tr, "td").text = v.generation - SubElement(tr, "td").text = v.label.text - SubElement(tr, "td", { "class" : "uri"}).text = v.uri - return table - else: - self.BodyElement("p").text = "None found" - return None - -def main(): - - global session - - os.putenv("TZ", "UTC") - time.tzset() - - parse_options() - - session = Session() - session.rrd_update() - - for hostname in session.hostnames: - html = HTML("Repository details for %s" % hostname, hostname) - html.counter_table(session.hosts[hostname].get_counter, session.hosts[hostname].get_total) - if not args.hide_graphs: - session.hosts[hostname].rrd_graph(html) - if not args.hide_problems: - html.BodyElement("h2").text = "Connection Problems" - html.detail_table(session.hosts[hostname].connection_problems) - html.BodyElement("h2").text = "Object Problems" - html.detail_table(session.hosts[hostname].object_problems) - html.close() - - html = HTML("rcynic summary", "index") - html.BodyElement("h2").text = "Grand totals for all repositories" - html.counter_table(session.get_sum, Label.get_count) - if not args.hide_object_counts: - html.BodyElement("br") - html.BodyElement("hr") - html.BodyElement("br") - html.BodyElement("h2").text = "Current total object counts (distinct URIs)" - html.object_count_table(session) - for hostname in session.hostnames: - html.BodyElement("br") - html.BodyElement("hr") - html.BodyElement("br") - html.BodyElement("h2").text = "Overview for repository %s" % hostname - html.counter_table(session.hosts[hostname].get_counter, session.hosts[hostname].get_total) - if not args.hide_graphs: - html.BodyElement("br") - html.BodyElement("a", href = "%s.html" % hostname).append(session.hosts[hostname].graph) - html.close() - - html = HTML("Problems", "problems") - html.BodyElement("h2").text = "Connection Problems" - html.detail_table(session.connection_problems) - html.BodyElement("h2").text = "Object Problems" - html.detail_table(session.object_problems) - html.close() - - html = HTML("All connections", "connections") - html.detail_table([v for v in session.validation_status if v.is_connection_detail]) - html.close() - - html = HTML("All objects", "objects") - html.detail_table([v for v in session.validation_status if v.is_object_detail]) - html.close() - - -if __name__ == "__main__": - main() diff --git a/rcynic/rcynic-svn.py b/rcynic/rcynic-svn.py deleted file mode 100644 index fd0df500..00000000 --- a/rcynic/rcynic-svn.py +++ /dev/null @@ -1,190 +0,0 @@ -# $Id$ -# -# Copyright (C) 2012 Internet Systems Consortium, Inc. ("ISC") -# -# Permission to use, copy, modify, and/or distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -""" -Archive rcynic output in a Subversion repository. -""" - -import subprocess -import argparse -import datetime -import fcntl -import glob -import os - -try: - from lxml.etree import ElementTree -except ImportError: - from xml.etree.ElementTree import ElementTree - - -mime_types = ( - ("html", "application/xhtml+xml"), - ("cer", "application/pkix-cert"), - ("crl", "application/pkix-crl"), - ("mft", "application/rpki-manifest"), - ("mnf", "application/rpki-manifest"), - ("roa", "application/rpki-roa"), - ("gbr", "application/rpki-ghostbusters")) - - -def run(*cmd, **kwargs): - """ - Run a program, displaying timing data when appropriate. - """ - - t = datetime.datetime.utcnow() - subprocess.check_call(cmd, **kwargs) - if args.show_timing: - now = datetime.datetime.utcnow() - print now, (now - t), " ".join(cmd) - - -def runxml(*cmd): - """ - - Run a program which produces XML output, displaying timing data when - appropriate and returning an ElementTree constructed from the - program's output. - """ - t = datetime.datetime.utcnow() - p = subprocess.Popen(cmd, stdout = subprocess.PIPE) - x = ElementTree(file = p.stdout) - s = p.wait() - if s: - raise subprocess.CalledProcessError(s, cmd[0]) - if args.show_timing: - now = datetime.datetime.utcnow() - print now, (now - t), " ".join(cmd) - return x - - -# Main program. - -parser = argparse.ArgumentParser(description = __doc__) - -parser.add_argument("--show_timing", action = "store_true", help = \ - """ - Show timing data on programs we run. - """) - -parser.add_argument("--verbatim", action = "store_true", help = \ - """ - Whether to archive rcynic's data output exactly as - rcynic writes it or map it into a directory - structure which makes more sense when used with - Subversion. True means archive exactly as rcynic - writes it, interpreting file and directory names - as rsync would, transient directories and all. - False means map the current authenticated/ tree in - rcynic's output to a stable authenticated/ subtree - in the subversion repository, with file and - directory names from the command line shorted to - their last component. - """) - -parser.add_argument("--lockfile", default = "rcynic-svn.lock", help = \ - """ - Lock file to to prevent multiple copies of this - program (eg, running under cron) from stepping on - each other while modifying the working directory. - """) - -parser.add_argument("files_to_archive", nargs = "*", help = \ - """ - Files to archive using Subversion. If omitted, we - assume that some other process has already - modified the Subversion working directory. - """) - -parser.add_argument("working_directory", help = \ - """ - Subversion working directory to use (must already - exist). - """) - -args = parser.parse_args() - -if args.show_timing: - t0 = datetime.datetime.utcnow() - print t0, "Starting" - -# Lock out other instances of this program. We may want some more -# sophsiticated approach when combining this with other programs, but -# this should minimize the risk of multiple copies of this program -# trying to modify the same subversion working directory at the same -# time and messing each other up. We leave the lock file in place -# because doing so removes a potential race condition. - -lock = os.open("cronjob.lock", os.O_RDONLY | os.O_CREAT | os.O_NONBLOCK, 0666) -fcntl.flock(lock, fcntl.LOCK_EX | fcntl.LOCK_NB) - -# Make sure working tree is up to date. - -run("svn", "update", "--quiet", args.working_directory) - -# Copy rcynic's output as appropriate. - -if args.files_to_archive: - - if args.verbatim: - cmd = ["rsync", "--archive", "--quiet", "--delete"] - cmd.extend(args.files_to_archive) - cmd.append(args.working_directory) - run(*cmd) - - else: - for src in args.files_to_archive: - cmd = ["rsync", "--archive", "--quiet", "--delete", "--copy-links"] - cmd.append(src.rstrip("/")) - cmd.append(args.working_directory.rstrip("/") + "/") - run(*cmd) - -# Ask Subversion to add any new files, trying hard to get the MIME -# types right. - -cmd = ["svn", "add", "--quiet", "--force", "--auto-props"] - -for fn2, mime_type in mime_types: - cmd.append("--config-option") - cmd.append("config:auto-props:*.%s=svn:mime-type=%s" % (fn2, mime_type)) - -cmd.append(".") - -run(*cmd, cwd = args.working_directory) - -# Parse XML version of Subversion's status output to figure out what -# files have been deleted, and tell Subversion that we deleted them -# intentionally. - -missing = sorted(entry.get("path") - for entry in runxml("svn", "status", "--xml", args.working_directory).find("target").findall("entry") - if entry.find("wc-status").get("item") == "missing") -deleted = [] - -for path in missing: - if not any(path.startswith(r) for r in deleted): - run("svn", "delete", "--quiet", path) - deleted.append(path + "/") - -# Commit our changes and update the working tree. - -run("svn", "commit", "--quiet", "--message", "Auto update.", args.working_directory) -run("svn", "update", "--quiet", args.working_directory) - -if args.show_timing: - now = datetime.datetime.utcnow() - print now, now - t0, "total runtime" diff --git a/rcynic/rcynic-text.py b/rcynic/rcynic-text.py deleted file mode 100644 index a8e56dac..00000000 --- a/rcynic/rcynic-text.py +++ /dev/null @@ -1,118 +0,0 @@ -# $Id$ -# -# Copyright (C) 2012 Internet Systems Consortium, Inc. ("ISC") -# -# Permission to use, copy, modify, and/or distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -""" -Flat text summary of rcynic.xml. -""" - -import sys -import urlparse -import textwrap - -try: - from lxml.etree import ElementTree -except ImportError: - from xml.etree.ElementTree import ElementTree - -class Label(object): - - def __init__(self, elt): - self.tag = elt.tag - self.width = max(len(s) for s in elt.text.split()) - self.lines = textwrap.wrap(elt.text.strip(), width = self.width) - self.counter = 0 - - def line(self, n): - try: - return " " + self.lines[n].center(self.width) + " " - except IndexError: - return " " * (self.width + 2) - - def add(self): - self.counter += 1 - - @property - def total(self): - return " " + str(self.counter).rjust(self.width) + " " - - @property - def visible(self): - return self.counter > 0 - -class Host(object): - - def __init__(self): - self.counters = {} - - def add(self, label): - self.counters[label] = self.counters.get(label, 0) + 1 - label.add() - - def total(self, label): - if label in self.counters: - return " " + str(self.counters[label]).rjust(label.width) + " " - else: - return " " * (label.width + 2) - -class Session(object): - - def __init__(self, labels): - self.hosts = {} - self.labels = labels - self.map = dict((label.tag, label) for label in labels) - - def add(self, elt): - label = self.map[elt.get("status")] - hostname = urlparse.urlparse(elt.text.strip()).hostname - if hostname not in self.hosts: - self.hosts[hostname] = Host() - self.hosts[hostname].add(label) - - def show(self): - visible = [label for label in self.labels if label.visible] - hostnames = sorted(hostname for hostname in self.hosts if hostname is not None) - hostwidth = max(len(hostname) for hostname in hostnames + ["Hostname"]) - separator = "+-%s-+-%s-+" % ( - "-" * hostwidth, - "-+-".join("-" * label.width for label in visible)) - print separator - for i in xrange(max(len(label.lines) for label in visible)): - print "| %s |%s|" % ( - ("Hostname" if i == 0 else "").ljust(hostwidth), - "|".join(label.line(i) for label in visible)) - print separator - for hostname in hostnames: - print "| %s |%s|" % ( - hostname.ljust(hostwidth), - "|".join(self.hosts[hostname].total(label) for label in visible)) - if hostnames: - print separator - print "| %s |%s|" % ( - "Total".ljust(hostwidth), - "|".join(label.total for label in visible)) - print separator - - -def main(): - for filename in ([sys.stdin] if len(sys.argv) < 2 else sys.argv[1:]): - etree = ElementTree(file = filename) - session = Session([Label(elt) for elt in etree.find("labels")]) - for elt in etree.findall("validation_status"): - session.add(elt) - session.show() - -if __name__ == "__main__": - main() diff --git a/rcynic/rcynic.c b/rcynic/rcynic.c deleted file mode 100644 index dea9c48f..00000000 --- a/rcynic/rcynic.c +++ /dev/null @@ -1,6070 +0,0 @@ -/* - * Copyright (C) 2013--2014 Dragon Research Labs ("DRL") - * Portions copyright (C) 2009--2012 Internet Systems Consortium ("ISC") - * Portions copyright (C) 2006--2008 American Registry for Internet Numbers ("ARIN") - * - * Permission to use, copy, modify, and distribute this software for any - * purpose with or without fee is hereby granted, provided that the above - * copyright notices and this permission notice appear in all copies. - * - * THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND ARIN DISCLAIM ALL - * WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED - * WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL, - * ISC, OR ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR - * CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS - * OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, - * NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION - * WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - */ - -/* $Id$ */ - -/** - * @mainpage - * - * "Cynical rsync": Recursively walk RPKI tree using rsync to pull - * data from remote sites, validating certificates and CRLs as we go. - * - * Doxygen doesn't quite know what to make of a one-file C program, - * and ends up putting most of the interesting data @link rcynic.c - * here. @endlink - */ - -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -#define SYSLOG_NAMES /* defines CODE prioritynames[], facilitynames[] */ -#include - -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -#include -#include - -#include "bio_f_linebreak.h" - -#include "defstack.h" - -#if !defined(FILENAME_MAX) && defined(PATH_MAX) && PATH_MAX > 1024 -#define FILENAME_MAX PATH_MAX -#elif !defined(FILENAME_MAX) -#define FILENAME_MAX 1024 -#endif - -#define SCHEME_RSYNC ("rsync://") -#define SIZEOF_RSYNC (sizeof(SCHEME_RSYNC) - 1) - -/** - * Maximum length of a hostname. - */ -#ifndef HOSTNAME_MAX -#define HOSTNAME_MAX 256 -#endif - -/** - * Maximum length of an URI. - */ -#define URI_MAX (SIZEOF_RSYNC + HOSTNAME_MAX + 1 + FILENAME_MAX) - -/** - * Maximum number of times we try to kill an inferior process before - * giving up. - */ -#define KILL_MAX 10 - -/** - * Version number of XML summary output. - */ -#define XML_SUMMARY_VERSION 1 - -/** - * How much buffer space do we need for a raw address? - */ -#define ADDR_RAW_BUF_LEN 16 - -/** - * How many bytes is a SHA256 digest? - */ -#define HASH_SHA256_LEN 32 - -/** - * Logging levels. Same general idea as syslog(), but our own - * catagories based on what makes sense for this program. Default - * mappings to syslog() priorities are here because it's the easiest - * way to make sure that we assign a syslog level to each of ours. - */ - -#define LOG_LEVELS \ - QQ(log_sys_err, LOG_ERR) /* Error from OS or library */ \ - QQ(log_usage_err, LOG_ERR) /* Bad usage (local error) */ \ - QQ(log_data_err, LOG_NOTICE) /* Bad data, no biscuit */ \ - QQ(log_telemetry, LOG_INFO) /* Normal progress chatter */ \ - QQ(log_verbose, LOG_INFO) /* Extra chatter */ \ - QQ(log_debug, LOG_DEBUG) /* Only useful when debugging */ - -#define QQ(x,y) x , -typedef enum log_level { LOG_LEVELS LOG_LEVEL_T_MAX } log_level_t; -#undef QQ - -#define QQ(x,y) { #x , x }, -static const struct { - const char *name; - log_level_t value; -} log_levels[] = { - LOG_LEVELS -}; -#undef QQ - -/** - * MIB counters derived from OpenSSL. Long list of validation failure - * codes from OpenSSL (crypto/x509/x509_vfy.h). - */ - -#define MIB_COUNTERS_FROM_OPENSSL \ - QV(X509_V_ERR_UNABLE_TO_GET_CRL) \ - QV(X509_V_ERR_UNABLE_TO_DECRYPT_CERT_SIGNATURE) \ - QV(X509_V_ERR_UNABLE_TO_DECRYPT_CRL_SIGNATURE) \ - QV(X509_V_ERR_UNABLE_TO_DECODE_ISSUER_PUBLIC_KEY) \ - QV(X509_V_ERR_CERT_SIGNATURE_FAILURE) \ - QV(X509_V_ERR_CRL_SIGNATURE_FAILURE) \ - QV(X509_V_ERR_CERT_NOT_YET_VALID) \ - QV(X509_V_ERR_CERT_HAS_EXPIRED) \ - QV(X509_V_ERR_CRL_NOT_YET_VALID) \ - QV(X509_V_ERR_ERROR_IN_CERT_NOT_BEFORE_FIELD) \ - QV(X509_V_ERR_ERROR_IN_CERT_NOT_AFTER_FIELD) \ - QV(X509_V_ERR_ERROR_IN_CRL_LAST_UPDATE_FIELD) \ - QV(X509_V_ERR_ERROR_IN_CRL_NEXT_UPDATE_FIELD) \ - QV(X509_V_ERR_DEPTH_ZERO_SELF_SIGNED_CERT) \ - QV(X509_V_ERR_SELF_SIGNED_CERT_IN_CHAIN) \ - QV(X509_V_ERR_UNABLE_TO_GET_ISSUER_CERT_LOCALLY) \ - QV(X509_V_ERR_UNABLE_TO_VERIFY_LEAF_SIGNATURE) \ - QV(X509_V_ERR_CERT_CHAIN_TOO_LONG) \ - QV(X509_V_ERR_CERT_REVOKED) \ - QV(X509_V_ERR_INVALID_CA) \ - QV(X509_V_ERR_PATH_LENGTH_EXCEEDED) \ - QV(X509_V_ERR_INVALID_PURPOSE) \ - QV(X509_V_ERR_CERT_UNTRUSTED) \ - QV(X509_V_ERR_CERT_REJECTED) \ - QV(X509_V_ERR_AKID_SKID_MISMATCH) \ - QV(X509_V_ERR_AKID_ISSUER_SERIAL_MISMATCH) \ - QV(X509_V_ERR_KEYUSAGE_NO_CERTSIGN) \ - QV(X509_V_ERR_UNABLE_TO_GET_CRL_ISSUER) \ - QV(X509_V_ERR_UNHANDLED_CRITICAL_EXTENSION) \ - QV(X509_V_ERR_KEYUSAGE_NO_CRL_SIGN) \ - QV(X509_V_ERR_UNHANDLED_CRITICAL_CRL_EXTENSION) \ - QV(X509_V_ERR_INVALID_NON_CA) \ - QV(X509_V_ERR_PROXY_PATH_LENGTH_EXCEEDED) \ - QV(X509_V_ERR_KEYUSAGE_NO_DIGITAL_SIGNATURE) \ - QV(X509_V_ERR_PROXY_CERTIFICATES_NOT_ALLOWED) \ - QV(X509_V_ERR_INVALID_EXTENSION) \ - QV(X509_V_ERR_INVALID_POLICY_EXTENSION) \ - QV(X509_V_ERR_NO_EXPLICIT_POLICY) \ - QV(X509_V_ERR_UNNESTED_RESOURCE) - -/** - * MIB counters specific to rcynic. - */ - -#define MIB_COUNTERS \ - MIB_COUNTERS_FROM_OPENSSL \ - QB(aia_extension_missing, "AIA extension missing") \ - QB(aia_extension_forbidden, "AIA extension forbidden") \ - QB(aia_uri_missing, "AIA URI missing") \ - QB(aki_extension_issuer_mismatch, "AKI extension issuer mismatch") \ - QB(aki_extension_missing, "AKI extension missing") \ - QB(aki_extension_wrong_format, "AKI extension is wrong format") \ - QB(bad_asidentifiers, "Bad ASIdentifiers extension") \ - QB(bad_certificate_policy, "Bad certificate policy") \ - QB(bad_cms_econtenttype, "Bad CMS eContentType") \ - QB(bad_cms_si_contenttype, "Bad CMS SI ContentType") \ - QB(bad_cms_signer, "Bad CMS signer") \ - QB(bad_cms_signer_infos, "Bad CMS signerInfos") \ - QB(bad_crl, "Bad CRL") \ - QB(bad_ipaddrblocks, "Bad IPAddrBlocks extension") \ - QB(bad_key_usage, "Bad keyUsage") \ - QB(bad_manifest_digest_length, "Bad manifest digest length") \ - QB(bad_public_key, "Bad public key") \ - QB(bad_roa_asID, "Bad ROA asID") \ - QB(bad_certificate_serial_number, "Bad certificate serialNumber") \ - QB(bad_manifest_number, "Bad manifestNumber") \ - QB(certificate_bad_signature, "Bad certificate signature") \ - QB(certificate_failed_validation, "Certificate failed validation") \ - QB(cms_econtent_decode_error, "CMS eContent decode error") \ - QB(cms_includes_crls, "CMS includes CRLs") \ - QB(cms_signer_missing, "CMS signer missing") \ - QB(cms_ski_mismatch, "CMS SKI mismatch") \ - QB(cms_validation_failure, "CMS validation failure") \ - QB(crl_issuer_name_mismatch, "CRL issuer name mismatch") \ - QB(crl_not_in_manifest, "CRL not listed in manifest") \ - QB(crl_not_yet_valid, "CRL not yet valid") \ - QB(crl_number_extension_missing, "CRL number extension missing") \ - QB(crl_number_is_negative, "CRL number is negative") \ - QB(crl_number_out_of_range, "CRL number out of range") \ - QB(crldp_doesnt_match_issuer_sia, "CRLDP doesn't match issuer's SIA") \ - QB(crldp_uri_missing, "CRLDP URI missing") \ - QB(disallowed_x509v3_extension, "Disallowed X.509v3 extension") \ - QB(duplicate_name_in_manifest, "Duplicate name in manifest") \ - QB(inappropriate_eku_extension, "Inappropriate EKU extension") \ - QB(malformed_aia_extension, "Malformed AIA extension") \ - QB(malformed_sia_extension, "Malformed SIA extension") \ - QB(malformed_basic_constraints, "Malformed basicConstraints") \ - QB(malformed_trust_anchor, "Malformed trust anchor") \ - QB(malformed_cadirectory_uri, "Malformed caDirectory URI") \ - QB(malformed_crldp_extension, "Malformed CRDLP extension") \ - QB(malformed_crldp_uri, "Malformed CRDLP URI") \ - QB(malformed_roa_addressfamily, "Malformed ROA addressFamily") \ - QB(malformed_tal_uri, "Malformed TAL URI") \ - QB(manifest_carepository_mismatch, "Manifest caRepository mismatch") \ - QB(manifest_interval_overruns_cert, "Manifest interval overruns certificate") \ - QB(manifest_lists_missing_object, "Manifest lists missing object") \ - QB(manifest_not_yet_valid, "Manifest not yet valid") \ - QB(missing_resources, "Missing resources") \ - QB(nonconformant_asn1_time_value, "Nonconformant ASN.1 time value") \ - QB(nonconformant_public_key_algorithm,"Nonconformant public key algorithm")\ - QB(nonconformant_signature_algorithm, "Nonconformant signature algorithm")\ - QB(nonconformant_digest_algorithm, "Nonconformant digest algorithm") \ - QB(nonconformant_certificate_uid, "Nonconformant certificate UID") \ - QB(object_rejected, "Object rejected") \ - QB(rfc3779_inheritance_required, "RFC 3779 inheritance required") \ - QB(roa_contains_bad_afi_value, "ROA contains bad AFI value") \ - QB(roa_max_prefixlen_too_short, "ROA maxPrefixlen too short") \ - QB(roa_resource_not_in_ee, "ROA resource not in EE") \ - QB(roa_resources_malformed, "ROA resources malformed") \ - QB(rsync_transfer_failed, "rsync transfer failed") \ - QB(rsync_transfer_timed_out, "rsync transfer timed out") \ - QB(safi_not_allowed, "SAFI not allowed") \ - QB(sia_cadirectory_uri_missing, "SIA caDirectory URI missing") \ - QB(sia_extension_missing, "SIA extension missing") \ - QB(sia_manifest_uri_missing, "SIA manifest URI missing") \ - QB(ski_extension_missing, "SKI extension missing") \ - QB(ski_public_key_mismatch, "SKI public key mismatch") \ - QB(trust_anchor_key_mismatch, "Trust anchor key mismatch") \ - QB(trust_anchor_with_crldp, "Trust anchor can't have CRLDP") \ - QB(unknown_afi, "Unknown AFI") \ - QB(unknown_openssl_verify_error, "Unknown OpenSSL verify error") \ - QB(unreadable_trust_anchor, "Unreadable trust anchor") \ - QB(unreadable_trust_anchor_locator, "Unreadable trust anchor locator") \ - QB(wrong_object_version, "Wrong object version") \ - QW(aia_doesnt_match_issuer, "AIA doesn't match issuer") \ - QW(backup_thisupdate_newer_than_current, "Backup thisUpdate newer than current") \ - QW(backup_number_higher_than_current, "Backup number higher than current") \ - QW(bad_thisupdate, "Bad CRL thisUpdate") \ - QW(bad_cms_si_signed_attributes, "Bad CMS SI signed attributes") \ - QW(bad_signed_object_uri, "Bad signedObject URI") \ - QW(crldp_names_newer_crl, "CRLDP names newer CRL") \ - QW(digest_mismatch, "Digest mismatch") \ - QW(ee_certificate_with_1024_bit_key, "EE certificate with 1024 bit key") \ - QW(issuer_uses_multiple_crldp_values, "Issuer uses multiple CRLDP values")\ - QW(multiple_rsync_uris_in_extension, "Multiple rsync URIs in extension") \ - QW(nonconformant_issuer_name, "Nonconformant X.509 issuer name") \ - QW(nonconformant_subject_name, "Nonconformant X.509 subject name") \ - QW(policy_qualifier_cps, "Policy Qualifier CPS") \ - QW(rsync_partial_transfer, "rsync partial transfer") \ - QW(rsync_transfer_skipped, "rsync transfer skipped") \ - QW(sia_extension_missing_from_ee, "SIA extension missing from EE") \ - QW(skipped_because_not_in_manifest, "Skipped because not in manifest") \ - QW(stale_crl_or_manifest, "Stale CRL or manifest") \ - QW(tainted_by_stale_crl, "Tainted by stale CRL") \ - QW(tainted_by_stale_manifest, "Tainted by stale manifest") \ - QW(tainted_by_not_being_in_manifest, "Tainted by not being in manifest") \ - QW(trust_anchor_not_self_signed, "Trust anchor not self-signed") \ - QW(trust_anchor_skipped, "Trust anchor skipped") \ - QW(unknown_object_type_skipped, "Unknown object type skipped") \ - QW(uri_too_long, "URI too long") \ - QW(wrong_cms_si_signature_algorithm, "Wrong CMS SI signature algorithm") \ - QW(wrong_cms_si_digest_algorithm, "Wrong CMS SI digest algorithm") \ - QG(non_rsync_uri_in_extension, "Non-rsync URI in extension") \ - QG(object_accepted, "Object accepted") \ - QG(rechecking_object, "Rechecking object") \ - QG(rsync_transfer_succeeded, "rsync transfer succeeded") \ - QG(validation_ok, "OK") - -#define QV(x) QB(mib_openssl_##x, 0) - -static const char - mib_counter_kind_good[] = "good", - mib_counter_kind_warn[] = "warn", - mib_counter_kind_bad[] = "bad"; - -#define QG(x,y) mib_counter_kind_good , -#define QW(x,y) mib_counter_kind_warn , -#define QB(x,y) mib_counter_kind_bad , -static const char * const mib_counter_kind[] = { MIB_COUNTERS NULL }; -#undef QB -#undef QW -#undef QG - -#define QG(x,y) QQ(x,y) -#define QW(x,y) QQ(x,y) -#define QB(x,y) QQ(x,y) - -#define QQ(x,y) x , -typedef enum mib_counter { MIB_COUNTERS MIB_COUNTER_T_MAX } mib_counter_t; -#undef QQ - -#define QQ(x,y) y , -static const char * const mib_counter_desc[] = { MIB_COUNTERS NULL }; -#undef QQ - -#define QQ(x,y) #x , -static const char * const mib_counter_label[] = { MIB_COUNTERS NULL }; -#undef QQ - -#undef QV - -#define QQ(x,y) 0 , -#define QV(x) x , -static const long mib_counter_openssl[] = { MIB_COUNTERS 0 }; -#undef QV -#undef QQ - -/** - * Object sources. We always try to get fresh copies of objects using - * rsync, but if that fails we try using backup copies from what - * worked the last time we were run. This means that a URI - * potentially represents two different objects, so we need to - * distinguish them for tracking purposes in our validation log. - */ - -#define OBJECT_GENERATIONS \ - QQ(null) \ - QQ(current) \ - QQ(backup) - -#define QQ(x) object_generation_##x , -typedef enum object_generation { OBJECT_GENERATIONS OBJECT_GENERATION_MAX } object_generation_t; -#undef QQ - -#define QQ(x) #x , -static const char * const object_generation_label[] = { OBJECT_GENERATIONS NULL }; -#undef QQ - -/** - * Type-safe string wrapper for URIs. - */ -typedef struct { char s[URI_MAX]; } uri_t; - -/** - * Type-safe string wrapper for filename paths. - */ -typedef struct { char s[FILENAME_MAX]; } path_t; - -/** - * Type-safe wrapper for hash buffers. - */ -typedef struct { unsigned char h[EVP_MAX_MD_SIZE]; } hashbuf_t; - -/** - * Type-safe wrapper for timestamp strings. - */ -typedef struct { char s[sizeof("2001-01-01T00:00:00Z") + 1]; } timestamp_t; - -/** - * Per-URI validation status object. - * uri must be first element. - */ -typedef struct validation_status { - uri_t uri; - object_generation_t generation; - time_t timestamp; - unsigned char events[(MIB_COUNTER_T_MAX + 7) / 8]; - short balance; - struct validation_status *left_child; - struct validation_status *right_child; -} validation_status_t; - -DECLARE_STACK_OF(validation_status_t) - -/** - * Structure to hold data parsed out of a certificate. - */ -typedef struct certinfo { - int ca, ta; - object_generation_t generation; - uri_t uri, sia, aia, crldp, manifest, signedobject; -} certinfo_t; - -typedef struct rcynic_ctx rcynic_ctx_t; - -/** - * States that a walk_ctx_t can be in. - */ -typedef enum { - walk_state_initial, /**< Initial state */ - walk_state_rsync, /**< rsyncing certinfo.sia */ - walk_state_ready, /**< Ready to traverse outputs */ - walk_state_current, /**< prefix = rc->unauthenticated */ - walk_state_backup, /**< prefix = rc->old_authenticated */ - walk_state_done /**< Done walking this cert's outputs */ -} walk_state_t; - -/** - * Context for certificate tree walks. This includes all the stuff - * that we would keep as automatic variables on the call stack if we - * didn't have to use callbacks to support multiple rsync processes. - */ -typedef struct walk_ctx { - unsigned refcount; - certinfo_t certinfo; - X509 *cert; - Manifest *manifest; - object_generation_t manifest_generation; - STACK_OF(OPENSSL_STRING) *filenames; - int manifest_iteration, filename_iteration, stale_manifest; - walk_state_t state; - uri_t crldp; - STACK_OF(X509) *certs; - STACK_OF(X509_CRL) *crls; -} walk_ctx_t; - -DECLARE_STACK_OF(walk_ctx_t) - -/** - * Return codes from rsync functions. - */ -typedef enum { - rsync_status_done, /* Request completed */ - rsync_status_failed, /* Request failed */ - rsync_status_timed_out, /* Request timed out */ - rsync_status_pending, /* Request in progress */ - rsync_status_skipped /* Request not attempted */ -} rsync_status_t; - -/** - * States for asynchronous rsync. - * "initial" must be first. - */ - -#define RSYNC_STATES \ - QQ(initial) \ - QQ(running) \ - QQ(conflict_wait) \ - QQ(retry_wait) \ - QQ(closed) \ - QQ(terminating) - -#define QQ(x) rsync_state_##x, -typedef enum { RSYNC_STATES RSYNC_STATE_T_MAX } rsync_state_t; -#undef QQ - -#define QQ(x) #x , -static const char * const rsync_state_label[] = { RSYNC_STATES NULL }; -#undef QQ - -/** - * Context for asyncronous rsync. - */ -typedef struct rsync_ctx { - uri_t uri; - void (*handler)(rcynic_ctx_t *, const struct rsync_ctx *, const rsync_status_t, const uri_t *, void *); - void *cookie; - rsync_state_t state; - enum { - rsync_problem_none, /* Must be first */ - rsync_problem_timed_out, - rsync_problem_refused - } problem; - unsigned tries; - pid_t pid; - int fd; - time_t started, deadline; - char buffer[URI_MAX * 4]; - size_t buflen; -} rsync_ctx_t; - -DECLARE_STACK_OF(rsync_ctx_t) - -/** - * Record of rsync attempts. - */ -typedef struct rsync_history { - uri_t uri; - time_t started, finished; - rsync_status_t status; - int final_slash; -} rsync_history_t; - -DECLARE_STACK_OF(rsync_history_t) - -/** - * Deferred task. - */ -typedef struct task { - void (*handler)(rcynic_ctx_t *, void *); - void *cookie; -} task_t; - -DECLARE_STACK_OF(task_t) - -/** - * Trust anchor locator (TAL) fetch context. - */ -typedef struct tal_ctx { - uri_t uri; - path_t path; - EVP_PKEY *pkey; -} tal_ctx_t; - -/** - * Extended context for verify callbacks. This is a wrapper around - * OpenSSL's X509_STORE_CTX, and the embedded X509_STORE_CTX @em must be - * the first element of this structure in order for the evil cast to - * do the right thing. This is ugly but safe, as the C language - * promises us that the address of the first element of a structure is - * the same as the address of the structure. - */ -typedef struct rcynic_x509_store_ctx { - X509_STORE_CTX ctx; /* Must be first */ - rcynic_ctx_t *rc; - const certinfo_t *subject; -} rcynic_x509_store_ctx_t; - -/** - * Program context that would otherwise be a mess of global variables. - */ -struct rcynic_ctx { - path_t authenticated, old_authenticated, new_authenticated, unauthenticated; - char *jane, *rsync_program; - STACK_OF(validation_status_t) *validation_status; - STACK_OF(rsync_history_t) *rsync_history; - STACK_OF(rsync_ctx_t) *rsync_queue; - STACK_OF(task_t) *task_queue; - int use_syslog, allow_stale_crl, allow_stale_manifest, use_links; - int require_crl_in_manifest, rsync_timeout, priority[LOG_LEVEL_T_MAX]; - int allow_non_self_signed_trust_anchor, allow_object_not_in_manifest; - int max_parallel_fetches, max_retries, retry_wait_min, run_rsync; - int allow_digest_mismatch, allow_crl_digest_mismatch; - int allow_nonconformant_name, allow_ee_without_signedObject; - int allow_1024_bit_ee_key, allow_wrong_cms_si_attributes; - int rsync_early; - unsigned max_select_time; - validation_status_t *validation_status_in_waiting; - validation_status_t *validation_status_root; - log_level_t log_level; - X509_STORE *x509_store; -}; - - - -/* - * Handle NIDs we wish OpenSSL knew about. This is carefully (we - * hope) written to do nothing at all for any NID that OpenSSL knows - * about; the intent is just to add definitions for things OpenSSL - * doesn't know about yet. Of necessity, this is a bit gross, since - * it confounds runtime static variables with predefined macro names, - * but we try to put all the magic associated with this in one place. - * - * In the long run it might be cleaner to generate this with a trivial - * script and put the result in a shared .h file, but this will do for - * the moment. - */ - -#ifndef NID_ad_rpkiManifest -static int NID_ad_rpkiManifest; -#endif - -#ifndef NID_ad_signedObject -static int NID_ad_signedObject; -#endif - -#ifndef NID_ct_ROA -static int NID_ct_ROA; -#endif - -#ifndef NID_ct_rpkiManifest -static int NID_ct_rpkiManifest; -#endif - -#ifndef NID_ct_rpkiGhostbusters -static int NID_ct_rpkiGhostbusters; -#endif - -#ifndef NID_cp_ipAddr_asNumber -static int NID_cp_ipAddr_asNumber; -#endif - -#ifndef NID_id_kp_bgpsec_router -static int NID_id_kp_bgpsec_router; -#endif - -/** - * Missing NIDs, if any. - */ -static const struct { - int *nid; - const char *oid; - const char *sn; - const char *ln; -} missing_nids[] = { - -#ifndef NID_ad_rpkiManifest - {&NID_ad_rpkiManifest, "1.3.6.1.5.5.7.48.10", "id-ad-rpkiManifest", "RPKI Manifest"}, -#endif - -#ifndef NID_ad_signedObject - {&NID_ad_signedObject, "1.3.6.1.5.5.7.48.11", "id-ad-signedObject", "Signed Object"}, -#endif - -#ifndef NID_ct_ROA - {&NID_ct_ROA, "1.2.840.113549.1.9.16.1.24", "id-ct-routeOriginAttestation", "ROA eContent"}, -#endif - -#ifndef NID_ct_rpkiManifest - {&NID_ct_rpkiManifest, "1.2.840.113549.1.9.16.1.26", "id-ct-rpkiManifest", "RPKI Manifest eContent"}, -#endif - -#ifndef NID_ct_rpkiGhostbusters - {&NID_ct_rpkiGhostbusters, "1.2.840.113549.1.9.16.1.35", "id-ct-rpkiGhostbusters", "RPKI Ghostbusters eContent"}, -#endif - -#ifndef NID_cp_ipAddr_asNumber - {&NID_cp_ipAddr_asNumber, "1.3.6.1.5.5.7.14.2", "id-cp-ipAddr-asNumber", "RPKI Certificate Policy"}, -#endif - -#ifndef NID_id_kp_bgpsec_router - {&NID_id_kp_bgpsec_router, "1.3.6.1.5.5.7.3.30", "id-kp-bgpsec-router", "BGPSEC Router Certificate"}, -#endif - -}; - - - -/** - * Subversion ID data. - */ -static const char svn_id[] = "$Id$"; - -/** - * Suffix we use temporarily during the symlink shuffle. Could be - * almost anything, but we want to do the length check early, before - * we waste a lot of work we'll just have to throw away, so we just - * wire in something short and obvious. - */ -static const char authenticated_symlink_suffix[] = ".new"; - -/** - * Constants for comparisions. We can't build these at compile time, - * so they can't be const, but treat them as if they were once - * allocated. - * - * We probably need both a better scheme for naming NID_ replacements - * and a more comprehensive rewrite of how we handle OIDs OpenSSL - * doesn't know about, so that we neither conflict with defined - * symbols nor duplicate effort nor explode if and when OpenSSL adds - * new OIDs (with or without the names we would have used). - */ - -static const ASN1_INTEGER *asn1_zero, *asn1_four_octets, *asn1_twenty_octets; -static int NID_binary_signing_time; - - - -/** - * Handle missing NIDs. - */ -static int -create_missing_nids(void) -{ - int i; - - for (i = 0; i < (int) (sizeof(missing_nids) / sizeof(*missing_nids)); i++) - if ((*missing_nids[i].nid = OBJ_txt2nid(missing_nids[i].oid)) == NID_undef && - (*missing_nids[i].nid = OBJ_create(missing_nids[i].oid, - missing_nids[i].sn, - missing_nids[i].ln)) == NID_undef) - return 0; - - return 1; -} - - - -/** - * Type-safe wrapper around free() to keep safestack macros happy. - */ -static void OPENSSL_STRING_free(OPENSSL_STRING s) -{ - if (s) - free(s); -} - -/** - * Wrapper around an idiom we use with OPENSSL_STRING stacks. There's - * a bug in the current sk_OPENSSL_STRING_delete() macro that casts - * the return value to the wrong type, so we cast it to something - * innocuous here and avoid using that macro elsewhere. - */ -static void sk_OPENSSL_STRING_remove(STACK_OF(OPENSSL_STRING) *sk, const char *str) -{ - OPENSSL_STRING_free((void *) sk_OPENSSL_STRING_delete(sk, sk_OPENSSL_STRING_find(sk, str))); -} - -/** - * Allocate a new validation_status_t object. - */ -static validation_status_t *validation_status_t_new(void) -{ - validation_status_t *v = malloc(sizeof(*v)); - if (v) - memset(v, 0, sizeof(*v)); - return v; -} - -/** - * Type-safe wrapper around free() to keep safestack macros happy. - */ -static void validation_status_t_free(validation_status_t *v) -{ - if (v) - free(v); -} - - - -/** - * Allocate a new rsync_history_t object. - */ -static rsync_history_t *rsync_history_t_new(void) -{ - rsync_history_t *h = malloc(sizeof(*h)); - if (h) - memset(h, 0, sizeof(*h)); - return h; -} - -/** - * Type-safe wrapper around free() to keep safestack macros happy. - */ -static void rsync_history_t_free(rsync_history_t *h) -{ - if (h) - free(h); -} - -/** - * Compare two rsync_history_t objects. - */ -static int rsync_history_cmp(const rsync_history_t * const *a, const rsync_history_t * const *b) -{ - return strcmp((*a)->uri.s, (*b)->uri.s); -} - - - -/** - * Convert a time_t to a printable string in UTC format. - */ -static const char *time_to_string(timestamp_t *ts, const time_t *t) -{ - time_t now; - size_t n; - - assert(ts != NULL); - - if (t == NULL) { - now = time(0); - t = &now; - } - - n = strftime(ts->s, sizeof(ts->s), "%Y-%m-%dT%H:%M:%SZ", gmtime(t)); - assert(n > 0); - - return ts->s; -} - -/* - * GCC attributes to help catch format string errors. - */ - -#ifdef __GNUC__ - -static void logmsg(const rcynic_ctx_t *rc, - const log_level_t level, - const char *fmt, ...) - __attribute__ ((format (printf, 3, 4))); -#endif - -/** - * Logging. - */ -static void vlogmsg(const rcynic_ctx_t *rc, - const log_level_t level, - const char *fmt, - va_list ap) -{ - assert(rc && fmt); - - if (rc->log_level < level) - return; - - if (rc->use_syslog) { - vsyslog(rc->priority[level], fmt, ap); - } else { - char ts[sizeof("00:00:00")+1]; - time_t t = time(0); - strftime(ts, sizeof(ts), "%H:%M:%S", localtime(&t)); - fprintf(stderr, "%s: ", ts); - if (rc->jane) - fprintf(stderr, "%s: ", rc->jane); - vfprintf(stderr, fmt, ap); - putc('\n', stderr); - } -} - -/** - * Logging. - */ -static void logmsg(const rcynic_ctx_t *rc, - const log_level_t level, - const char *fmt, ...) -{ - va_list ap; - va_start(ap, fmt); - vlogmsg(rc, level, fmt, ap); - va_end(ap); -} - -/** - * Print OpenSSL library errors. - */ -static void log_openssl_errors(const rcynic_ctx_t *rc) -{ - const char *data, *file; - unsigned long code; - char error[256]; - int flags, line; - - if (!rc->log_level < log_verbose) - return; - - while ((code = ERR_get_error_line_data(&file, &line, &data, &flags))) { - ERR_error_string_n(code, error, sizeof(error)); - if (data && (flags & ERR_TXT_STRING)) - logmsg(rc, log_sys_err, "OpenSSL error %s:%d: %s: %s", file, line, error, data); - else - logmsg(rc, log_sys_err, "OpenSSL error %s:%d: %s", file, line, error); - } -} - -/** - * Configure logging. - */ -static int configure_logmsg(rcynic_ctx_t *rc, const char *name) -{ - int i; - - assert(rc && name); - - for (i = 0; i < sizeof(log_levels)/sizeof(*log_levels); i++) { - if (!strcmp(name, log_levels[i].name)) { - rc->log_level = log_levels[i].value; - return 1; - } - } - - logmsg(rc, log_usage_err, "Bad log level %s", name); - return 0; -} - -/** - * Configure syslog. - */ -static int configure_syslog(const rcynic_ctx_t *rc, - int *result, - const CODE *table, - const char *name) -{ - assert(result && table && name); - - while (table->c_name && strcmp(table->c_name, name)) - table++; - - if (table->c_name) { - *result = table->c_val; - return 1; - } else { - logmsg(rc, log_usage_err, "Bad syslog code %s", name); - return 0; - } -} - -/** - * Configure boolean variable. - */ -static int configure_boolean(const rcynic_ctx_t *rc, - int *result, - const char *val) -{ - assert(rc && result && val); - - switch (*val) { - case 'y': case 'Y': case 't': case 'T': case '1': - *result = 1; - return 1; - case 'n': case 'N': case 'f': case 'F': case '0': - *result = 0; - return 1; - default: - logmsg(rc, log_usage_err, "Bad boolean value %s", val); - return 0; - } -} - -/** - * Configure integer variable. - */ -static int configure_integer(const rcynic_ctx_t *rc, - int *result, - const char *val) -{ - long res; - char *p; - - assert(rc && result && val); - - res = strtol(val, &p, 10); - - if (*val != '\0' && *p == '\0') { - *result = (int) res; - return 1; - } else { - logmsg(rc, log_usage_err, "Bad integer value %s", val); - return 0; - } -} - -/** - * Configure unsigned integer variable. - */ -static int configure_unsigned_integer(const rcynic_ctx_t *rc, - unsigned *result, - const char *val) -{ - unsigned long res; - char *p; - - assert(rc && result && val); - - res = strtoul(val, &p, 10); - - if (*val != '\0' && *p == '\0') { - *result = (unsigned) res; - return 1; - } else { - logmsg(rc, log_usage_err, "Bad integer value %s", val); - return 0; - } -} - - - -/** - * Make a directory if it doesn't already exist. - */ -static int mkdir_maybe(const rcynic_ctx_t *rc, const path_t *name) -{ - path_t path; - char *s; - - assert(name != NULL); - if (strlen(name->s) >= sizeof(path.s)) { - logmsg(rc, log_data_err, "Pathname %s too long", name->s); - return 0; - } - strcpy(path.s, name->s); - s = path.s[0] == '/' ? path.s + 1 : path.s; - if ((s = strrchr(s, '/')) == NULL) - return 1; - *s = '\0'; - if (!mkdir_maybe(rc, &path)) { - logmsg(rc, log_sys_err, "Failed to make directory %s", path.s); - return 0; - } - if (!access(path.s, F_OK)) - return 1; - logmsg(rc, log_verbose, "Creating directory %s", path.s); - return mkdir(path.s, 0777) == 0; -} - -/** - * strdup() a string and push it onto a stack. - */ -static int sk_OPENSSL_STRING_push_strdup(STACK_OF(OPENSSL_STRING) *sk, const char *str) -{ - OPENSSL_STRING s = strdup(str); - - if (s && sk_OPENSSL_STRING_push(sk, s)) - return 1; - if (s) - free(s); - return 0; -} - -/** - * Compare two URI strings, for OpenSSL STACK operations. - */ - -static int uri_cmp(const char * const *a, const char * const *b) -{ - return strcmp(*a, *b); -} - -/** - * Is string an rsync URI? - */ -static int is_rsync(const char *uri) -{ - return uri && !strncmp(uri, SCHEME_RSYNC, SIZEOF_RSYNC); -} - -/** - * Convert an rsync URI to a filename, checking for evil character - * sequences. NB: This routine can't call mib_increment(), because - * mib_increment() calls it, so errors detected here only go into - * the log, not the MIB. - */ -static int uri_to_filename(const rcynic_ctx_t *rc, - const uri_t *uri, - path_t *path, - const path_t *prefix) -{ - const char *u; - size_t n; - - path->s[0] = '\0'; - - if (!is_rsync(uri->s)) { - logmsg(rc, log_telemetry, "%s is not an rsync URI, not converting to filename", uri->s); - return 0; - } - - u = uri->s + SIZEOF_RSYNC; - n = strlen(u); - - if (u[0] == '/' || u[0] == '.' || strstr(u, "/../") || - (n >= 3 && !strcmp(u + n - 3, "/.."))) { - logmsg(rc, log_data_err, "Dangerous URI %s, not converting to filename", uri->s); - return 0; - } - - if (prefix) - n += strlen(prefix->s); - - if (n >= sizeof(path->s)) { - logmsg(rc, log_data_err, "URI %s too long, not converting to filename", uri->s); - return 0; - } - - if (prefix) { - strcpy(path->s, prefix->s); - strcat(path->s, u); - } else { - strcpy(path->s, u); - } - - return 1; -} - -/** - * Compare filename fields of two FileAndHash structures. - */ -static int FileAndHash_name_cmp(const FileAndHash * const *a, const FileAndHash * const *b) -{ - return strcmp((char *) (*a)->file->data, (char *) (*b)->file->data); -} - -/** - * Get value of code in a validation_status_t. - */ -static int validation_status_get_code(const validation_status_t *v, - const mib_counter_t code) -{ - assert(v && code < MIB_COUNTER_T_MAX); - return (v->events[code / 8] & (1 << (code % 8))) != 0; -} - -/** - * Set value of code in a validation_status_t. - */ -static void validation_status_set_code(validation_status_t *v, - const mib_counter_t code, - int value) -{ - assert(v && code < MIB_COUNTER_T_MAX); - if (value) - v->events[code / 8] |= (1 << (code % 8)); - else - v->events[code / 8] &= ~(1 << (code % 8)); -} - -/** - * validation_status object comparison, for AVL tree rather than - * OpenSSL stacks. - */ -static int -validation_status_cmp(const validation_status_t *node, - const uri_t *uri, - const object_generation_t generation) -{ - int cmp = ((int) node->generation) - ((int) generation); - if (cmp) - return cmp; - else - return strcmp(uri->s, node->uri.s); -} - -/** - * validation_status AVL tree insertion. Adapted from code written by - * Paul Vixie and explictly placed in the public domain using examples - * from the book: "Algorithms & Data Structures," Niklaus Wirth, - * Prentice-Hall, 1986, ISBN 0-13-022005-1. Thanks, Paul! - */ -static validation_status_t * -validation_status_sprout(validation_status_t **node, - int *needs_balancing, - validation_status_t *new_node) -{ -#ifdef AVL_DEBUG -#define AVL_MSG(msg) sprintf(stderr, "AVL_DEBUG: '%s'\n", msg) -#else -#define AVL_MSG(msg) -#endif - - validation_status_t *p1, *p2, *result; - int cmp; - - /* - * Are we grounded? If so, add the node "here" and set the - * rebalance flag, then exit. - */ - if (*node == NULL) { - AVL_MSG("Grounded, adding new node"); - new_node->left_child = NULL; - new_node->right_child = NULL; - new_node->balance = 0; - *node = new_node; - *needs_balancing = 1; - return *node; - } - - /* - * Compare the data. - */ - cmp = validation_status_cmp(*node, &new_node->uri, new_node->generation); - - /* - * If LESS, prepare to move to the left. - */ - if (cmp < 0) { - - AVL_MSG("LESS. sprouting left."); - result = validation_status_sprout(&(*node)->left_child, needs_balancing, new_node); - - if (*needs_balancing) { - AVL_MSG("LESS: left branch has grown longer"); - - switch ((*node)->balance) { - - case 1: - /* - * Right branch WAS longer; balance is ok now. - */ - AVL_MSG("LESS: case 1.. balance restored implicitly"); - (*node)->balance = 0; - *needs_balancing = 0; - break; - - case 0: - /* - * Balance WAS okay; now left branch longer. - */ - AVL_MSG("LESS: case 0.. balnce bad but still ok"); - (*node)->balance = -1; - break; - - case -1: - /* - * Left branch was already too long. Rebalance. - */ - AVL_MSG("LESS: case -1: rebalancing"); - p1 = (*node)->left_child; - - if (p1->balance == -1) { - AVL_MSG("LESS: single LL"); - (*node)->left_child = p1->right_child; - p1->right_child = *node; - (*node)->balance = 0; - *node = p1; - } - - else { - AVL_MSG("LESS: double LR"); - - p2 = p1->right_child; - p1->right_child = p2->left_child; - p2->left_child = p1; - - (*node)->left_child = p2->right_child; - p2->right_child = *node; - - if (p2->balance == -1) - (*node)->balance = 1; - else - (*node)->balance = 0; - - if (p2->balance == 1) - p1->balance = -1; - else - p1->balance = 0; - *node = p2; - } - - (*node)->balance = 0; - *needs_balancing = 0; - } - } - return result; - } - - /* - * If MORE, prepare to move to the right. - */ - if (cmp > 0) { - - AVL_MSG("MORE: sprouting to the right"); - result = validation_status_sprout(&(*node)->right_child, needs_balancing, new_node); - - if (*needs_balancing) { - AVL_MSG("MORE: right branch has grown longer"); - - switch ((*node)->balance) { - - case -1:AVL_MSG("MORE: balance was off, fixed implicitly"); - (*node)->balance = 0; - *needs_balancing = 0; - break; - - case 0: AVL_MSG("MORE: balance was okay, now off but ok"); - (*node)->balance = 1; - break; - - case 1: AVL_MSG("MORE: balance was off, need to rebalance"); - p1 = (*node)->right_child; - - if (p1->balance == 1) { - AVL_MSG("MORE: single RR"); - (*node)->right_child = p1->left_child; - p1->left_child = *node; - (*node)->balance = 0; - *node = p1; - } - - else { - AVL_MSG("MORE: double RL"); - - p2 = p1->left_child; - p1->left_child = p2->right_child; - p2->right_child = p1; - - (*node)->right_child = p2->left_child; - p2->left_child = *node; - - if (p2->balance == 1) - (*node)->balance = -1; - else - (*node)->balance = 0; - - if (p2->balance == -1) - p1->balance = 1; - else - p1->balance = 0; - - *node = p2; - } /*else*/ - (*node)->balance = 0; - *needs_balancing = 0; - } - } - return result; - } - - /* - * Neither more nor less, found existing node matching key, return it. - */ - AVL_MSG("I found it!"); - *needs_balancing = 0; - return *node; - -#undef AVL_MSG -} - -/** - * Add a validation status entry to internal log. - */ -static void log_validation_status(rcynic_ctx_t *rc, - const uri_t *uri, - const mib_counter_t code, - const object_generation_t generation) -{ - validation_status_t *v = NULL; - int needs_balancing = 0; - - assert(rc && uri && code < MIB_COUNTER_T_MAX && generation < OBJECT_GENERATION_MAX); - - if (!rc->validation_status) - return; - - if (code == rsync_transfer_skipped && !rc->run_rsync) - return; - - if (rc->validation_status_in_waiting == NULL && - (rc->validation_status_in_waiting = validation_status_t_new()) == NULL) { - logmsg(rc, log_sys_err, "Couldn't allocate validation status entry for %s", uri->s); - return; - } - - v = rc->validation_status_in_waiting; - memset(v, 0, sizeof(*v)); - v->uri = *uri; - v->generation = generation; - - v = validation_status_sprout(&rc->validation_status_root, &needs_balancing, v); - if (v == rc->validation_status_in_waiting) - rc->validation_status_in_waiting = NULL; - - if (rc->validation_status_in_waiting == NULL && - !sk_validation_status_t_push(rc->validation_status, v)) { - logmsg(rc, log_sys_err, "Couldn't store validation status entry for %s", uri->s); - return; - } - - v->timestamp = time(0); - - if (validation_status_get_code(v, code)) - return; - - validation_status_set_code(v, code, 1); - - logmsg(rc, log_verbose, "Recording \"%s\" for %s%s%s", - (mib_counter_desc[code] - ? mib_counter_desc[code] - : X509_verify_cert_error_string(mib_counter_openssl[code])), - (generation != object_generation_null ? object_generation_label[generation] : ""), - (generation != object_generation_null ? " " : ""), - uri->s); -} - -/** - * Copy or link a file, as the case may be. - */ -static int cp_ln(const rcynic_ctx_t *rc, const path_t *source, const path_t *target) -{ - struct stat statbuf; - struct utimbuf utimebuf; - FILE *in = NULL, *out = NULL; - int c, ok = 0; - - if (rc->use_links) { - (void) unlink(target->s); - ok = link(source->s, target->s) == 0; - if (!ok) - logmsg(rc, log_sys_err, "Couldn't link %s to %s: %s", - source->s, target->s, strerror(errno)); - return ok; - } - - if ((in = fopen(source->s, "rb")) == NULL || - (out = fopen(target->s, "wb")) == NULL) - goto done; - - while ((c = getc(in)) != EOF) - if (putc(c, out) == EOF) - goto done; - - ok = 1; - - done: - ok &= !(in != NULL && fclose(in) == EOF); - ok &= !(out != NULL && fclose(out) == EOF); - - if (!ok) { - logmsg(rc, log_sys_err, "Couldn't copy %s to %s: %s", - source->s, target->s, strerror(errno)); - return ok; - } - - /* - * Perserve the file modification time to allow for detection of - * changed objects in the authenticated directory. Failure to reset - * the times is not optimal, but is also not critical, thus no - * failure return. - */ - if (stat(source->s, &statbuf) < 0 || - (utimebuf.actime = statbuf.st_atime, - utimebuf.modtime = statbuf.st_mtime, - utime(target->s, &utimebuf) < 0)) - logmsg(rc, log_sys_err, "Couldn't copy inode timestamp from %s to %s: %s", - source->s, target->s, strerror(errno)); - - return ok; -} - -/** - * Install an object. - */ -static int install_object(rcynic_ctx_t *rc, - const uri_t *uri, - const path_t *source, - const object_generation_t generation) -{ - path_t target; - - if (!uri_to_filename(rc, uri, &target, &rc->new_authenticated)) { - logmsg(rc, log_data_err, "Couldn't generate installation name for %s", uri->s); - return 0; - } - - if (!mkdir_maybe(rc, &target)) { - logmsg(rc, log_sys_err, "Couldn't create directory for %s", target.s); - return 0; - } - - if (!cp_ln(rc, source, &target)) - return 0; - log_validation_status(rc, uri, object_accepted, generation); - return 1; -} - -/** - * AVL tree lookup for validation status objects. - */ -static validation_status_t * -validation_status_find(validation_status_t *node, - const uri_t *uri, - const object_generation_t generation) -{ - int cmp; - - while (node != NULL && (cmp = validation_status_cmp(node, uri, generation)) != 0) - node = cmp < 0 ? node->left_child : node->right_child; - - return node; -} - -/** - * Check whether we have a validation status entry corresponding to a - * given filename. This is intended for use during pruning the - * unauthenticated tree, so it only checks the current generation. - */ -static int -validation_status_find_filename(const rcynic_ctx_t *rc, - const char *filename) -{ - uri_t uri; - - if (strlen(filename) + SIZEOF_RSYNC >= sizeof(uri.s)) - return 0; - - strcpy(uri.s, SCHEME_RSYNC); - strcat(uri.s, filename); - - return validation_status_find(rc->validation_status_root, &uri, object_generation_current) != NULL; -} - -/** - * Figure out whether we already have a good copy of an object. This - * is a little more complicated than it sounds, because we might have - * failed the current generation and accepted the backup due to having - * followed the old CA certificate chain first during a key rollover. - * So if this check is of the current object and we have not already - * accepted the current object for this URI, we need to recheck. - * - * We also handle logging when we decide that we do need to check, so - * that the caller doesn't need to concern itself with why we thought - * the check was necessary. - */ -static int skip_checking_this_object(rcynic_ctx_t *rc, - const uri_t *uri, - const object_generation_t generation) -{ - validation_status_t *v = NULL; - path_t path; - - assert(rc && uri && rc->validation_status); - - if (!uri_to_filename(rc, uri, &path, &rc->new_authenticated)) - return 1; - - if (access(path.s, R_OK)) { - logmsg(rc, log_telemetry, "Checking %s", uri->s); - return 0; - } - - if (generation != object_generation_current) - return 1; - - v = validation_status_find(rc->validation_status_root, uri, generation); - - if (v != NULL && validation_status_get_code(v, object_accepted)) - return 1; - - log_validation_status(rc, uri, rechecking_object, generation); - logmsg(rc, log_telemetry, "Rechecking %s", uri->s); - return 0; -} - - - -/** - * Check str for a suffix. - */ -static int endswith(const char *str, const char *suffix) -{ - size_t len_str, len_suffix; - assert(str != NULL && suffix != NULL); - len_str = strlen(str); - len_suffix = strlen(suffix); - return len_str >= len_suffix && !strcmp(str + len_str - len_suffix, suffix); -} - -/** - * Check str for a prefix. - */ -static int startswith(const char *str, const char *prefix) -{ - size_t len_str, len_prefix; - assert(str != NULL && prefix != NULL); - len_str = strlen(str); - len_prefix = strlen(prefix); - return len_str >= len_prefix && !strncmp(str, prefix, len_prefix); -} - -/** - * Convert a filename to a file:// URI, for logging. - */ -static void filename_to_uri(uri_t *uri, - const char *fn) -{ - assert(sizeof("file://") < sizeof(uri->s)); - strcpy(uri->s, "file://"); - if (*fn != '/') { - if (getcwd(uri->s + strlen(uri->s), sizeof(uri->s) - strlen(uri->s)) == NULL || - (!endswith(uri->s, "/") && strlen(uri->s) >= sizeof(uri->s) - 1)) - uri->s[0] = '\0'; - else - strcat(uri->s, "/"); - } - if (uri->s[0] != '\0' && strlen(uri->s) + strlen(fn) < sizeof(uri->s)) - strcat(uri->s, fn); - else - uri->s[0] = '\0'; -} - -/** - * Set a directory name, adding or stripping trailing slash as needed. - */ -static int set_directory(const rcynic_ctx_t *rc, path_t *out, const char *in, const int want_slash) -{ - int has_slash, need_slash; - size_t n; - - assert(rc && in && out); - - n = strlen(in); - - if (n == 0) { - logmsg(rc, log_usage_err, "Empty path"); - return 0; - } - - has_slash = in[n - 1] == '/'; - - need_slash = want_slash && !has_slash; - - if (n + need_slash + 1 > sizeof(out->s)) { - logmsg(rc, log_usage_err, "Path \"%s\" too long", in); - return 0; - } - - strcpy(out->s, in); - if (need_slash) - strcat(out->s, "/"); - else if (has_slash && !want_slash) - out->s[n - 1] = '\0'; - - return 1; -} - -/** - * Test whether a filesystem path points to a directory. - */ -static int is_directory(const path_t *name) -{ - struct stat st; - - assert(name); - return lstat(name->s, &st) == 0 && S_ISDIR(st.st_mode); -} - -/** - * Remove a directory tree, like rm -rf. - */ -static int rm_rf(const path_t *name) -{ - path_t path; - struct dirent *d; - DIR *dir; - int ret = 0; - - assert(name); - - if (!is_directory(name)) - return unlink(name->s) == 0; - - if ((dir = opendir(name->s)) == NULL) - return 0; - - while ((d = readdir(dir)) != NULL) { - if (!strcmp(d->d_name, ".") || !strcmp(d->d_name, "..")) - continue; - if (snprintf(path.s, sizeof(path.s), "%s/%s", name->s, d->d_name) >= sizeof(path.s)) - goto done; - if (unlink(path.s) == 0) - continue; - else if (rm_rf(&path)) - continue; - else - goto done; - } - - ret = rmdir(name->s) == 0; - - done: - closedir(dir); - return ret; -} - -/** - * Construct names for the directories not directly settable by the - * user. - * - * This function also checks for an old-style rc->authenticated - * directory, to simplify upgrade from older versions of rcynic. - */ -static int construct_directory_names(rcynic_ctx_t *rc) -{ - struct stat st; - ssize_t n; - path_t p; - time_t t = time(0); - - p = rc->authenticated; - - n = strlen(p.s); - - if (n + sizeof(authenticated_symlink_suffix) >= sizeof(p.s)) { - logmsg(rc, log_usage_err, "Symlink name would be too long"); - return 0; - } - - if (strftime(p.s + n, sizeof(p.s) - n - 1, ".%Y-%m-%dT%H:%M:%SZ", gmtime(&t)) == 0) { - logmsg(rc, log_usage_err, "Generated path with timestamp would be too long"); - return 0; - } - - if (!set_directory(rc, &rc->new_authenticated, p.s, 1)) - return 0; - - if (!set_directory(rc, &rc->old_authenticated, rc->authenticated.s, 1)) - return 0; - - if (lstat(rc->authenticated.s, &st) == 0 && S_ISDIR((st.st_mode)) && - strlen(rc->authenticated.s) + sizeof(".old") < sizeof(p.s)) { - p = rc->authenticated; - strcat(p.s, ".old"); - rm_rf(&p); - (void) rename(rc->authenticated.s, p.s); - } - - if (lstat(rc->authenticated.s, &st) == 0 && S_ISDIR(st.st_mode)) { - logmsg(rc, log_usage_err, - "Existing %s directory is in the way, please remove it", - rc->authenticated.s); - return 0; - } - - return 1; -} - -/** - * Do final symlink shuffle and cleanup of output directories. - */ -static int finalize_directories(const rcynic_ctx_t *rc) -{ - path_t path, real_old, real_new; - const char *dir; - glob_t g; - int i; - - if (!realpath(rc->old_authenticated.s, real_old.s)) - real_old.s[0] = '\0'; - - if (!realpath(rc->new_authenticated.s, real_new.s)) - real_new.s[0] = '\0'; - - assert(real_new.s[0] && real_new.s[strlen(real_new.s) - 1] != '/'); - - if ((dir = strrchr(real_new.s, '/')) == NULL) - dir = real_new.s; - else - dir++; - - path = rc->authenticated; - - if (strlen(path.s) + sizeof(authenticated_symlink_suffix) >= sizeof(path.s)) - return 0; - strcat(path.s, authenticated_symlink_suffix); - - (void) unlink(path.s); - - if (symlink(dir, path.s) < 0) { - logmsg(rc, log_sys_err, "Couldn't link %s to %s: %s", - path.s, dir, strerror(errno)); - return 0; - } - - if (rename(path.s, rc->authenticated.s) < 0) { - logmsg(rc, log_sys_err, "Couldn't rename %s to %s: %s", - path.s, rc->authenticated.s, strerror(errno)); - return 0; - } - - if (real_old.s[0] && strlen(rc->authenticated.s) + sizeof(".old") < sizeof(path.s)) { - assert(real_old.s[strlen(real_old.s) - 1] != '/'); - - path = rc->authenticated; - strcat(path.s, ".old"); - - (void) unlink(path.s); - - if ((dir = strrchr(real_old.s, '/')) == NULL) - dir = real_old.s; - else - dir++; - - (void) symlink(dir, path.s); - } - - path = rc->authenticated; - assert(strlen(path.s) + sizeof(".*") < sizeof(path.s)); - strcat(path.s, ".*"); - - memset(&g, 0, sizeof(g)); - - if (real_new.s[0] && glob(path.s, 0, 0, &g) == 0) { - for (i = 0; i < g.gl_pathc; i++) - if (realpath(g.gl_pathv[i], path.s) && - strcmp(path.s, real_old.s) && - strcmp(path.s, real_new.s)) - rm_rf(&path); - globfree(&g); - } - - return 1; -} - - - -/** - * Test whether a pair of URIs "conflict", that is, whether attempting - * to rsync both of them at the same time in parallel might cause - * unpredictable behavior. Might need a better name for this test. - * - * Returns non-zero iff the two URIs "conflict". - */ -static int conflicting_uris(const uri_t *a, const uri_t *b) -{ - size_t len_a, len_b; - - assert(a && is_rsync(a->s) && b && is_rsync(b->s)); - - len_a = strlen(a->s); - len_b = strlen(b->s); - - assert(len_a < sizeof(a->s) && len_b < sizeof(b->s)); - - return !strncmp(a->s, b->s, len_a < len_b ? len_a : len_b); -} - - - -/** - * Read non-directory filenames from a directory, so we can check to - * see what's missing from a manifest. - */ -static STACK_OF(OPENSSL_STRING) *directory_filenames(const rcynic_ctx_t *rc, - const walk_state_t state, - const uri_t *uri) -{ - STACK_OF(OPENSSL_STRING) *result = NULL; - path_t dpath, fpath; - const path_t *prefix = NULL; - DIR *dir = NULL; - struct dirent *d; - int ok = 0; - - assert(rc && uri); - - switch (state) { - case walk_state_current: - prefix = &rc->unauthenticated; - break; - case walk_state_backup: - prefix = &rc->old_authenticated; - break; - default: - goto done; - } - - if (!uri_to_filename(rc, uri, &dpath, prefix) || - (dir = opendir(dpath.s)) == NULL || - (result = sk_OPENSSL_STRING_new(uri_cmp)) == NULL) - goto done; - - while ((d = readdir(dir)) != NULL) - if (snprintf(fpath.s, sizeof(fpath.s), "%s/%s", dpath.s, d->d_name) >= sizeof(fpath.s)) { - logmsg(rc, log_data_err, "Local path name %s/%s too long", dpath.s, d->d_name); - goto done; - } - else if (!is_directory(&fpath) && !sk_OPENSSL_STRING_push_strdup(result, d->d_name)) { - logmsg(rc, log_sys_err, "sk_OPENSSL_STRING_push_strdup() failed, probably memory exhaustion"); - goto done; - } - - ok = 1; - - done: - if (dir != NULL) - closedir(dir); - - if (ok) - return result; - - sk_OPENSSL_STRING_pop_free(result, OPENSSL_STRING_free); - return NULL; -} - - - -/** - * Increment walk context reference count. - */ -static void walk_ctx_attach(walk_ctx_t *w) -{ - if (w != NULL) { - w->refcount++; - assert(w->refcount != 0); - } -} - -/** - * Decrement walk context reference count; freeing the context if the - * reference count is now zero. - */ -static void walk_ctx_detach(walk_ctx_t *w) -{ - if (w != NULL && --(w->refcount) == 0) { - assert(w->refcount == 0); - X509_free(w->cert); - Manifest_free(w->manifest); - sk_X509_free(w->certs); - sk_X509_CRL_pop_free(w->crls, X509_CRL_free); - sk_OPENSSL_STRING_pop_free(w->filenames, OPENSSL_STRING_free); - free(w); - } -} - -/** - * Return top context of a walk context stack. - */ -static walk_ctx_t *walk_ctx_stack_head(STACK_OF(walk_ctx_t) *wsk) -{ - return sk_walk_ctx_t_value(wsk, sk_walk_ctx_t_num(wsk) - 1); -} - -/** - * Whether we're done iterating over a walk context. Think of this as - * the thing you call (negated) in the second clause of a conceptual - * "for" loop. - */ -static int walk_ctx_loop_done(STACK_OF(walk_ctx_t) *wsk) -{ - walk_ctx_t *w = walk_ctx_stack_head(wsk); - return wsk == NULL || w == NULL || w->state >= walk_state_done; -} - -/** - * Walk context iterator. Think of this as the thing you call in the - * third clause of a conceptual "for" loop: this reinitializes as - * necessary for the next pass through the loop. - * - * General idea here is that we have several state variables in a walk - * context which collectively define the current pass, product URI, - * etc, and we want to be able to iterate through this sequence via - * the event system. So this function steps to the next state. - * - * Conceptually, w->manifest->fileList and w->filenames form a single - * array with index w->manifest_iteration + w->filename_iteration. - * Beware of fencepost errors, I've gotten this wrong once already. - * Slightly odd coding here is to make it easier to check this. - */ -static void walk_ctx_loop_next(const rcynic_ctx_t *rc, STACK_OF(walk_ctx_t) *wsk) -{ - walk_ctx_t *w = walk_ctx_stack_head(wsk); - int n_manifest, n_filenames; - - assert(rc && wsk && w); - - assert(w->manifest_iteration >= 0 && w->filename_iteration >= 0); - - n_manifest = w->manifest ? sk_FileAndHash_num(w->manifest->fileList) : 0; - n_filenames = w->filenames ? sk_OPENSSL_STRING_num(w->filenames) : 0; - - if (w->manifest_iteration + w->filename_iteration < n_manifest + n_filenames) { - if (w->manifest_iteration < n_manifest) - w->manifest_iteration++; - else - w->filename_iteration++; - } - - assert(w->manifest_iteration <= n_manifest && w->filename_iteration <= n_filenames); - - if (w->manifest_iteration + w->filename_iteration < n_manifest + n_filenames) - return; - - while (!walk_ctx_loop_done(wsk)) { - w->state++; - w->manifest_iteration = 0; - w->filename_iteration = 0; - sk_OPENSSL_STRING_pop_free(w->filenames, OPENSSL_STRING_free); - w->filenames = directory_filenames(rc, w->state, &w->certinfo.sia); - if (w->manifest != NULL || w->filenames != NULL) - return; - } -} - -static int check_manifest(rcynic_ctx_t *rc, STACK_OF(walk_ctx_t) *wsk); - -/** - * Loop initializer for walk context. Think of this as the thing you - * call in the first clause of a conceptual "for" loop. - */ -static void walk_ctx_loop_init(rcynic_ctx_t *rc, STACK_OF(walk_ctx_t) *wsk) -{ - walk_ctx_t *w = walk_ctx_stack_head(wsk); - - assert(rc && wsk && w && w->state == walk_state_ready); - - if (!w->manifest && !check_manifest(rc, wsk)) { - /* - * Simple failure to find a manifest doesn't get here. This is - * for manifest failures that cause us to reject all of this - * certificate's products due to policy knob settings. - */ - w->state = walk_state_done; - return; - } - - if (!w->manifest) - logmsg(rc, log_telemetry, "Couldn't get manifest %s, blundering onward", w->certinfo.manifest.s); - - w->manifest_iteration = 0; - w->filename_iteration = 0; - w->state++; - assert(w->state == walk_state_current); - - assert(w->filenames == NULL); - w->filenames = directory_filenames(rc, w->state, &w->certinfo.sia); - - w->stale_manifest = w->manifest != NULL && X509_cmp_current_time(w->manifest->nextUpdate) < 0; - - while (!walk_ctx_loop_done(wsk) && - (w->manifest == NULL || w->manifest_iteration >= sk_FileAndHash_num(w->manifest->fileList)) && - (w->filenames == NULL || w->filename_iteration >= sk_OPENSSL_STRING_num(w->filenames))) - walk_ctx_loop_next(rc, wsk); -} - -/** - * Extract URI and hash values from walk context. - */ -static int walk_ctx_loop_this(const rcynic_ctx_t *rc, - STACK_OF(walk_ctx_t) *wsk, - uri_t *uri, - const unsigned char **hash, - size_t *hashlen) -{ - const walk_ctx_t *w = walk_ctx_stack_head(wsk); - const char *name = NULL; - FileAndHash *fah = NULL; - - assert(rc && wsk && w && uri && hash && hashlen); - - if (w->manifest != NULL && w->manifest_iteration < sk_FileAndHash_num(w->manifest->fileList)) { - fah = sk_FileAndHash_value(w->manifest->fileList, w->manifest_iteration); - name = (const char *) fah->file->data; - } else if (w->filenames != NULL && w->filename_iteration < sk_OPENSSL_STRING_num(w->filenames)) { - name = sk_OPENSSL_STRING_value(w->filenames, w->filename_iteration); - } - - if (name == NULL) { - logmsg(rc, log_sys_err, "Can't find a URI in walk context, this shouldn't happen: state %d, manifest_iteration %d, filename_iteration %d", - (int) w->state, w->manifest_iteration, w->filename_iteration); - return 0; - } - - if (strlen(w->certinfo.sia.s) + strlen(name) >= sizeof(uri->s)) { - logmsg(rc, log_data_err, "URI %s%s too long, skipping", w->certinfo.sia.s, uri->s); - return 0; - } - - strcpy(uri->s, w->certinfo.sia.s); - strcat(uri->s, name); - - if (fah != NULL) { - sk_OPENSSL_STRING_remove(w->filenames, name); - *hash = fah->hash->data; - *hashlen = fah->hash->length; - } else { - *hash = NULL; - *hashlen = 0; - } - - return 1; -} - -/** - * Create a new walk context stack. - */ -static STACK_OF(walk_ctx_t) *walk_ctx_stack_new(void) -{ - return sk_walk_ctx_t_new_null(); -} - -/** - * Push a walk context onto a walk context stack, return the new context. - */ -static walk_ctx_t *walk_ctx_stack_push(STACK_OF(walk_ctx_t) *wsk, - X509 *x, - const certinfo_t *certinfo) -{ - walk_ctx_t *w; - - if (x == NULL || - (certinfo == NULL) != (sk_walk_ctx_t_num(wsk) == 0) || - (w = malloc(sizeof(*w))) == NULL) - return NULL; - - memset(w, 0, sizeof(*w)); - w->cert = x; - if (certinfo != NULL) - w->certinfo = *certinfo; - else - memset(&w->certinfo, 0, sizeof(w->certinfo)); - - if (!sk_walk_ctx_t_push(wsk, w)) { - free(w); - return NULL; - } - - walk_ctx_attach(w); - return w; -} - -/** - * Pop and discard a walk context from a walk context stack. - */ -static void walk_ctx_stack_pop(STACK_OF(walk_ctx_t) *wsk) -{ - walk_ctx_detach(sk_walk_ctx_t_pop(wsk)); -} - -/** - * Clone a stack of walk contexts. - */ -static STACK_OF(walk_ctx_t) *walk_ctx_stack_clone(STACK_OF(walk_ctx_t) *old_wsk) -{ - STACK_OF(walk_ctx_t) *new_wsk; - int i; - if (old_wsk == NULL || (new_wsk = sk_walk_ctx_t_dup(old_wsk)) == NULL) - return NULL; - for (i = 0; i < sk_walk_ctx_t_num(new_wsk); i++) - walk_ctx_attach(sk_walk_ctx_t_value(new_wsk, i)); - return new_wsk; -} - -/** - * Extract certificate stack from walk context stack. Returns a newly - * created STACK_OF(X509) pointing to the existing cert objects. - * - * NB: This is a shallow copy, so use sk_X509_free() to free it, not - * sk_X509_pop_free(). - */ -static STACK_OF(X509) *walk_ctx_stack_certs(const rcynic_ctx_t *rc, - STACK_OF(walk_ctx_t) *wsk) -{ - STACK_OF(X509) *xsk = sk_X509_new_null(); - walk_ctx_t *w; - int i; - - assert(rc); - - for (i = 0; i < sk_walk_ctx_t_num(wsk); i++) - if ((w = sk_walk_ctx_t_value(wsk, i)) == NULL || - (w->cert != NULL && !sk_X509_push(xsk, w->cert))) - goto fail; - - return xsk; - - fail: - logmsg(rc, log_sys_err, "Couldn't clone walk_ctx_stack, memory exhausted?"); - sk_X509_free(xsk); - return NULL; -} - -/** - * Free a walk context stack, decrementing reference counts of each - * frame on it. - */ -static void walk_ctx_stack_free(STACK_OF(walk_ctx_t) *wsk) -{ - sk_walk_ctx_t_pop_free(wsk, walk_ctx_detach); -} - - - -static int rsync_count_running(const rcynic_ctx_t *); - -/** - * Add a task to the task queue. - */ -static int task_add(const rcynic_ctx_t *rc, - void (*handler)(rcynic_ctx_t *, void *), - void *cookie) -{ - task_t *t = malloc(sizeof(*t)); - - assert(rc && rc->task_queue && handler); - - assert(rsync_count_running(rc) <= rc->max_parallel_fetches); - - if (!t) - return 0; - - t->handler = handler; - t->cookie = cookie; - - if (sk_task_t_push(rc->task_queue, t)) - return 1; - - free(t); - return 0; -} - -/** - * Run tasks until queue is empty. - */ -static void task_run_q(rcynic_ctx_t *rc) -{ - task_t *t; - assert(rc && rc->task_queue); - while ((t = sk_task_t_shift(rc->task_queue)) != NULL) { - t->handler(rc, t->cookie); - free(t); - } -} - - - -/** - * Check cache of whether we've already fetched a particular URI. - */ -static rsync_history_t *rsync_history_uri(const rcynic_ctx_t *rc, - const uri_t *uri) -{ - rsync_history_t h; - char *s; - int i; - - assert(rc && uri && rc->rsync_history); - - if (!is_rsync(uri->s)) - return NULL; - - h.uri = *uri; - - while ((s = strrchr(h.uri.s, '/')) != NULL && s[1] == '\0') - *s = '\0'; - - while ((i = sk_rsync_history_t_find(rc->rsync_history, &h)) < 0) { - if ((s = strrchr(h.uri.s, '/')) == NULL || - (s - h.uri.s) < SIZEOF_RSYNC) - return NULL; - *s = '\0'; - } - - return sk_rsync_history_t_value(rc->rsync_history, i); -} - -/** - * Record that we've already attempted to synchronize a particular - * rsync URI. - */ -static void rsync_history_add(const rcynic_ctx_t *rc, - const rsync_ctx_t *ctx, - const rsync_status_t status) -{ - int final_slash = 0; - rsync_history_t *h; - uri_t uri; - size_t n; - char *s; - - assert(rc && ctx && rc->rsync_history && is_rsync(ctx->uri.s)); - - uri = ctx->uri; - - while ((s = strrchr(uri.s, '/')) != NULL && s[1] == '\0') { - final_slash = 1; - *s = '\0'; - } - - if (status != rsync_status_done) { - - n = SIZEOF_RSYNC + strcspn(uri.s + SIZEOF_RSYNC, "/"); - assert(n < sizeof(uri.s)); - uri.s[n] = '\0'; - final_slash = 1; - - if ((h = rsync_history_uri(rc, &uri)) != NULL) { - assert(h->status != rsync_status_done); - return; - } - } - - if ((h = rsync_history_t_new()) != NULL) { - h->uri = uri; - h->status = status; - h->started = ctx->started; - h->finished = time(0); - h->final_slash = final_slash; - } - - if (h == NULL || !sk_rsync_history_t_push(rc->rsync_history, h)) { - rsync_history_t_free(h); - logmsg(rc, log_sys_err, - "Couldn't add %s to rsync_history, blundering onwards", uri.s); - } -} - - - -/** - * Return count of how many rsync contexts are in running. - */ -static int rsync_count_running(const rcynic_ctx_t *rc) -{ - const rsync_ctx_t *ctx; - int i, n = 0; - - assert(rc && rc->rsync_queue); - - for (i = 0; (ctx = sk_rsync_ctx_t_value(rc->rsync_queue, i)) != NULL; ++i) { - switch (ctx->state) { - case rsync_state_running: - case rsync_state_closed: - case rsync_state_terminating: - n++; - default: - continue; - } - } - - return n; -} - -/** - * Test whether an rsync context conflicts with anything that's - * currently runable. - */ -static int rsync_conflicts(const rcynic_ctx_t *rc, - const rsync_ctx_t *ctx) -{ - const rsync_ctx_t *c; - int i; - - assert(rc && ctx && rc->rsync_queue); - - for (i = 0; (c = sk_rsync_ctx_t_value(rc->rsync_queue, i)) != NULL; ++i) - if (c != ctx && - (c->state == rsync_state_initial || - c->state == rsync_state_running) && - conflicting_uris(&c->uri, &ctx->uri)) - return 1; - - return 0; -} - -/** - * Test whether a rsync context is runable at this time. - */ -static int rsync_runable(const rcynic_ctx_t *rc, - const rsync_ctx_t *ctx) -{ - assert(rc && ctx); - - switch (ctx->state) { - - case rsync_state_initial: - case rsync_state_running: - return 1; - - case rsync_state_retry_wait: - return ctx->deadline <= time(0); - - case rsync_state_closed: - case rsync_state_terminating: - return 0; - - case rsync_state_conflict_wait: - return !rsync_conflicts(rc, ctx); - - default: - break; - } - - return 0; -} - -/** - * Return count of runable rsync contexts. - */ -static int rsync_count_runable(const rcynic_ctx_t *rc) -{ - const rsync_ctx_t *ctx; - int i, n = 0; - - assert(rc && rc->rsync_queue); - - for (i = 0; (ctx = sk_rsync_ctx_t_value(rc->rsync_queue, i)) != NULL; ++i) - if (rsync_runable(rc, ctx)) - n++; - - return n; -} - -/** - * Call rsync context handler, if one is set. - */ -static void rsync_call_handler(rcynic_ctx_t *rc, - rsync_ctx_t *ctx, - const rsync_status_t status) -{ - if (!ctx) - return; - - switch (status) { - - case rsync_status_pending: - case rsync_status_done: - break; - - case rsync_status_failed: - log_validation_status(rc, &ctx->uri, rsync_transfer_failed, object_generation_null); - break; - - case rsync_status_timed_out: - log_validation_status(rc, &ctx->uri, rsync_transfer_timed_out, object_generation_null); - break; - - case rsync_status_skipped: - log_validation_status(rc, &ctx->uri, rsync_transfer_skipped, object_generation_null); - break; - } - - if (ctx->handler) - ctx->handler(rc, ctx, status, &ctx->uri, ctx->cookie); -} - -/** - * Run an rsync process. - */ -static void rsync_run(rcynic_ctx_t *rc, - rsync_ctx_t *ctx) -{ - static const char * const rsync_cmd[] = { - "rsync", "--update", "--times", "--copy-links", "--itemize-changes" - }; - static const char * const rsync_tree_args[] = { - "--recursive", "--delete" - }; - - const char *argv[10]; - path_t path; - int i, argc = 0, flags, pipe_fds[2]; - - pipe_fds[0] = pipe_fds[1] = -1; - - assert(rc && ctx && ctx->pid == 0 && ctx->state != rsync_state_running && rsync_runable(rc, ctx)); - - if (rsync_history_uri(rc, &ctx->uri)) { - logmsg(rc, log_verbose, "Late rsync cache hit for %s", ctx->uri.s); - rsync_call_handler(rc, ctx, rsync_status_done); - (void) sk_rsync_ctx_t_delete_ptr(rc->rsync_queue, ctx); - free(ctx); - return; - } - - assert(rsync_count_running(rc) < rc->max_parallel_fetches); - - logmsg(rc, log_telemetry, "Fetching %s", ctx->uri.s); - - memset(argv, 0, sizeof(argv)); - - for (i = 0; i < sizeof(rsync_cmd)/sizeof(*rsync_cmd); i++) { - assert(argc < sizeof(argv)/sizeof(*argv)); - argv[argc++] = rsync_cmd[i]; - } - if (endswith(ctx->uri.s, "/")) { - for (i = 0; i < sizeof(rsync_tree_args)/sizeof(*rsync_tree_args); i++) { - assert(argc < sizeof(argv)/sizeof(*argv)); - argv[argc++] = rsync_tree_args[i]; - } - } - - if (rc->rsync_program) - argv[0] = rc->rsync_program; - - if (!uri_to_filename(rc, &ctx->uri, &path, &rc->unauthenticated)) { - logmsg(rc, log_data_err, "Couldn't extract filename from URI: %s", ctx->uri.s); - goto lose; - } - - assert(argc < sizeof(argv)/sizeof(*argv)); - argv[argc++] = ctx->uri.s; - - assert(argc < sizeof(argv)/sizeof(*argv)); - argv[argc++] = path.s; - - if (!mkdir_maybe(rc, &path)) { - logmsg(rc, log_sys_err, "Couldn't make target directory: %s", path.s); - goto lose; - } - - for (i = 0; i < argc; i++) - logmsg(rc, log_debug, "rsync argv[%d]: %s", i, argv[i]); - - if (pipe(pipe_fds) < 0) { - logmsg(rc, log_sys_err, "pipe() failed: %s", strerror(errno)); - goto lose; - } - - switch ((ctx->pid = vfork())) { - - case -1: - logmsg(rc, log_sys_err, "vfork() failed: %s", strerror(errno)); - goto lose; - - case 0: - /* - * Child - */ -#define whine(msg) ((void) write(2, msg, sizeof(msg) - 1)) - if (close(pipe_fds[0]) < 0) - whine("close(pipe_fds[0]) failed\n"); - else if (dup2(pipe_fds[1], 1) < 0) - whine("dup2(pipe_fds[1], 1) failed\n"); - else if (dup2(pipe_fds[1], 2) < 0) - whine("dup2(pipe_fds[1], 2) failed\n"); - else if (close(pipe_fds[1]) < 0) - whine("close(pipe_fds[1]) failed\n"); - else if (execvp(argv[0], (char * const *) argv) < 0) - whine("execvp(argv[0], (char * const *) argv) failed\n"); - whine("last system error: "); - write(2, strerror(errno), strlen(strerror(errno))); - whine("\n"); - _exit(1); -#undef whine - - default: - /* - * Parent - */ - ctx->fd = pipe_fds[0]; - if ((flags = fcntl(ctx->fd, F_GETFL, 0)) == -1 || - fcntl(ctx->fd, F_SETFL, flags | O_NONBLOCK) == -1) { - logmsg(rc, log_sys_err, "fcntl(ctx->fd, F_[GS]ETFL, O_NONBLOCK) failed: %s", - strerror(errno)); - goto lose; - } - (void) close(pipe_fds[1]); - ctx->state = rsync_state_running; - ctx->problem = rsync_problem_none; - if (!ctx->started) - ctx->started = time(0); - if (rc->rsync_timeout) - ctx->deadline = time(0) + rc->rsync_timeout; - logmsg(rc, log_verbose, "Subprocess %u started, queued %d, runable %d, running %d, max %d, URI %s", - (unsigned) ctx->pid, sk_rsync_ctx_t_num(rc->rsync_queue), rsync_count_runable(rc), rsync_count_running(rc), rc->max_parallel_fetches, ctx->uri.s); - rsync_call_handler(rc, ctx, rsync_status_pending); - return; - - } - - lose: - if (pipe_fds[0] != -1) - (void) close(pipe_fds[0]); - if (pipe_fds[1] != -1) - (void) close(pipe_fds[1]); - if (rc->rsync_queue && ctx) - (void) sk_rsync_ctx_t_delete_ptr(rc->rsync_queue, ctx); - rsync_call_handler(rc, ctx, rsync_status_failed); - if (ctx->pid > 0) { - (void) kill(ctx->pid, SIGKILL); - ctx->pid = 0; - } -} - -/** - * Process one line of rsync's output. This is a separate function - * primarily to centralize scraping for magic error strings. - */ -static void do_one_rsync_log_line(const rcynic_ctx_t *rc, - rsync_ctx_t *ctx) -{ - unsigned u; - char *s; - - /* - * Send line to our log unless it's empty. - */ - if (ctx->buffer[strspn(ctx->buffer, " \t\n\r")] != '\0') - logmsg(rc, log_telemetry, "rsync[%u]: %s", ctx->pid, ctx->buffer); - - /* - * Check for magic error strings - */ - if ((s = strstr(ctx->buffer, "@ERROR: max connections")) != NULL) { - ctx->problem = rsync_problem_refused; - if (sscanf(s, "@ERROR: max connections (%u) reached -- try again later", &u) == 1) - logmsg(rc, log_verbose, "Subprocess %u reported limit of %u for %s", ctx->pid, u, ctx->uri.s); - } -} - -/** - * Construct select() arguments. - */ -static int rsync_construct_select(const rcynic_ctx_t *rc, - const time_t now, - fd_set *rfds, - struct timeval *tv) -{ - rsync_ctx_t *ctx; - time_t when = 0; - int i, n = 0; - - assert(rc && rc->rsync_queue && rfds && tv && rc->max_select_time >= 0); - - FD_ZERO(rfds); - - for (i = 0; (ctx = sk_rsync_ctx_t_value(rc->rsync_queue, i)) != NULL; ++i) { - -#if 0 - logmsg(rc, log_debug, "+++ ctx[%d] pid %d fd %d state %s started %lu deadline %lu", - i, ctx->pid, ctx->fd, rsync_state_label[ctx->state], - (unsigned long) ctx->started, (unsigned long) ctx->deadline); -#endif - - switch (ctx->state) { - - case rsync_state_running: - assert(ctx->fd >= 0); - FD_SET(ctx->fd, rfds); - if (ctx->fd > n) - n = ctx->fd; - if (!rc->rsync_timeout) - continue; - /* Fall through */ - - case rsync_state_retry_wait: - if (when == 0 || ctx->deadline < when) - when = ctx->deadline; - /* Fall through */ - - default: - continue; - } - } - - if (!when) - tv->tv_sec = rc->max_select_time; - else if (when < now) - tv->tv_sec = 0; - else if (when < now + rc->max_select_time) - tv->tv_sec = when - now; - else - tv->tv_sec = rc->max_select_time; - tv->tv_usec = 0; - return n; -} - -/** - * Convert rsync_status_t to mib_counter_t. - * - * Maybe some day this will go away and we won't be carrying - * essentially the same information in two different databases, but - * for now I'll settle for cleaning up the duplicate code logic. - */ -static mib_counter_t rsync_status_to_mib_counter(rsync_status_t status) -{ - switch (status) { - case rsync_status_done: return rsync_transfer_succeeded; - case rsync_status_timed_out: return rsync_transfer_timed_out; - case rsync_status_failed: return rsync_transfer_failed; - case rsync_status_skipped: return rsync_transfer_skipped; - default: - /* - * Keep GCC from whining about untested cases. - */ - assert(status == rsync_status_done || - status == rsync_status_timed_out || - status == rsync_status_failed || - status == rsync_status_skipped); - return rsync_transfer_failed; - } -} - -/** - * Manager for queue of rsync tasks in progress. - * - * General plan here is to process one completed child, or output - * accumulated from children, or block if there is absolutely nothing - * to do, on the theory that caller had nothing to do either or would - * not have called us. Once we've done something allegedly useful, we - * return, because this is not the event loop; if and when the event - * loop has nothing more important to do, we'll be called again. - * - * So this is the only place where the program blocks waiting for - * children, but we only do it when we know there's nothing else - * useful that we could be doing while we wait. - */ -static void rsync_mgr(rcynic_ctx_t *rc) -{ - rsync_status_t rsync_status; - int i, n, pid_status = -1; - rsync_ctx_t *ctx = NULL; - time_t now = time(0); - struct timeval tv; - fd_set rfds; - pid_t pid; - char *s; - - assert(rc && rc->rsync_queue); - - /* - * Check for exited subprocesses. - */ - - while ((pid = waitpid(-1, &pid_status, WNOHANG)) > 0) { - - /* - * Child exited, handle it. - */ - - logmsg(rc, log_verbose, "Subprocess %u exited with status %d", - (unsigned) pid, WEXITSTATUS(pid_status)); - - for (i = 0; (ctx = sk_rsync_ctx_t_value(rc->rsync_queue, i)) != NULL; ++i) - if (ctx->pid == pid) - break; - if (ctx == NULL) { - assert(i == sk_rsync_ctx_t_num(rc->rsync_queue)); - logmsg(rc, log_sys_err, "Couldn't find rsync context for pid %d", pid); - continue; - } - - close(ctx->fd); - ctx->fd = -1; - - if (ctx->buflen > 0) { - assert(ctx->buflen < sizeof(ctx->buffer)); - ctx->buffer[ctx->buflen] = '\0'; - do_one_rsync_log_line(rc, ctx); - ctx->buflen = 0; - } - - switch (WEXITSTATUS(pid_status)) { - - case 0: - rsync_status = rsync_status_done; - break; - - case 5: /* "Error starting client-server protocol" */ - /* - * Handle remote rsyncd refusing to talk to us because we've - * exceeded its connection limit. Back off for a short - * interval, then retry. - */ - if (ctx->problem == rsync_problem_refused && ctx->tries < rc->max_retries) { - unsigned char r; - if (!RAND_bytes(&r, sizeof(r))) - r = 60; - ctx->deadline = time(0) + rc->retry_wait_min + r; - ctx->state = rsync_state_retry_wait; - ctx->problem = rsync_problem_none; - ctx->pid = 0; - ctx->tries++; - logmsg(rc, log_telemetry, "Scheduling retry for %s", ctx->uri.s); - continue; - } - goto failure; - - case 23: /* "Partial transfer due to error" */ - /* - * This appears to be a catch-all for "something bad happened - * trying to do what you asked me to do". In the cases I've - * seen to date, this is things like "the directory you - * requested isn't there" or "NFS exploded when I tried to touch - * the directory". These aren't network layer failures, so we - * (probably) shouldn't give up on the repository host. - */ - rsync_status = rsync_status_done; - log_validation_status(rc, &ctx->uri, rsync_partial_transfer, object_generation_null); - break; - - default: - failure: - rsync_status = rsync_status_failed; - logmsg(rc, log_data_err, "rsync %u exited with status %d fetching %s", - (unsigned) pid, WEXITSTATUS(pid_status), ctx->uri.s); - break; - } - - if (rc->rsync_timeout && now >= ctx->deadline) - rsync_status = rsync_status_timed_out; - log_validation_status(rc, &ctx->uri, - rsync_status_to_mib_counter(rsync_status), - object_generation_null); - rsync_history_add(rc, ctx, rsync_status); - rsync_call_handler(rc, ctx, rsync_status); - (void) sk_rsync_ctx_t_delete_ptr(rc->rsync_queue, ctx); - free(ctx); - ctx = NULL; - } - - if (pid == -1 && errno != EINTR && errno != ECHILD) - logmsg(rc, log_sys_err, "waitpid() returned error: %s", strerror(errno)); - - assert(rsync_count_running(rc) <= rc->max_parallel_fetches); - - /* - * Look for rsync contexts that have become runable. Odd loop - * structure is because rsync_run() might decide to remove the - * specified rsync task from the queue instead of running it. - */ - for (i = 0; (ctx = sk_rsync_ctx_t_value(rc->rsync_queue, i)) != NULL; i++) { - n = sk_rsync_ctx_t_num(rc->rsync_queue); - if (ctx->state != rsync_state_running && - rsync_runable(rc, ctx) && - rsync_count_running(rc) < rc->max_parallel_fetches) - rsync_run(rc, ctx); - if (n > sk_rsync_ctx_t_num(rc->rsync_queue)) - i--; - } - - assert(rsync_count_running(rc) <= rc->max_parallel_fetches); - - /* - * Check for log text from subprocesses. - */ - - n = rsync_construct_select(rc, now, &rfds, &tv); - - if (n > 0 && tv.tv_sec) - logmsg(rc, log_verbose, "Waiting up to %u seconds for rsync, queued %d, runable %d, running %d, max %d", - (unsigned) tv.tv_sec, sk_rsync_ctx_t_num(rc->rsync_queue), rsync_count_runable(rc), - rsync_count_running(rc), rc->max_parallel_fetches); - - if (n > 0) { -#if 0 - logmsg(rc, log_debug, "++ select(%d, %u)", n, tv.tv_sec); -#endif - n = select(n + 1, &rfds, NULL, NULL, &tv); - } - - if (n > 0) { - - for (i = 0; (ctx = sk_rsync_ctx_t_value(rc->rsync_queue, i)) != NULL; ++i) { - if (ctx->fd <= 0 || !FD_ISSET(ctx->fd, &rfds)) - continue; - - assert(ctx->buflen < sizeof(ctx->buffer) - 1); - - while ((n = read(ctx->fd, ctx->buffer + ctx->buflen, sizeof(ctx->buffer) - 1 - ctx->buflen)) > 0) { - ctx->buflen += n; - assert(ctx->buflen < sizeof(ctx->buffer)); - ctx->buffer[ctx->buflen] = '\0'; - - while ((s = strchr(ctx->buffer, '\n')) != NULL) { - *s++ = '\0'; - do_one_rsync_log_line(rc, ctx); - assert(s > ctx->buffer && s < ctx->buffer + sizeof(ctx->buffer)); - ctx->buflen -= s - ctx->buffer; - assert(ctx->buflen < sizeof(ctx->buffer)); - if (ctx->buflen > 0) - memmove(ctx->buffer, s, ctx->buflen); - ctx->buffer[ctx->buflen] = '\0'; - } - - if (ctx->buflen == sizeof(ctx->buffer) - 1) { - ctx->buffer[sizeof(ctx->buffer) - 1] = '\0'; - do_one_rsync_log_line(rc, ctx); - ctx->buflen = 0; - } - } - - if (n == 0) { - (void) close(ctx->fd); - ctx->fd = -1; - ctx->state = rsync_state_closed; - } - } - } - - assert(rsync_count_running(rc) <= rc->max_parallel_fetches); - - /* - * Deal with children that have been running too long. - */ - if (rc->rsync_timeout) { - for (i = 0; (ctx = sk_rsync_ctx_t_value(rc->rsync_queue, i)) != NULL; ++i) { - int sig; - if (ctx->pid <= 0 || now < ctx->deadline) - continue; - sig = ctx->tries++ < KILL_MAX ? SIGTERM : SIGKILL; - if (ctx->state != rsync_state_terminating) { - ctx->problem = rsync_problem_timed_out; - ctx->state = rsync_state_terminating; - ctx->tries = 0; - logmsg(rc, log_telemetry, "Subprocess %u is taking too long fetching %s, whacking it", (unsigned) ctx->pid, ctx->uri.s); - rsync_history_add(rc, ctx, rsync_status_timed_out); - } else if (sig == SIGTERM) { - logmsg(rc, log_verbose, "Whacking subprocess %u again", (unsigned) ctx->pid); - } else { - logmsg(rc, log_verbose, "Whacking subprocess %u with big hammer", (unsigned) ctx->pid); - } - (void) kill(ctx->pid, sig); - ctx->deadline = now + 1; - } - } -} - -/** - * Set up rsync context and attempt to start it. - */ -static void rsync_init(rcynic_ctx_t *rc, - const uri_t *uri, - void *cookie, - void (*handler)(rcynic_ctx_t *, const rsync_ctx_t *, const rsync_status_t, const uri_t *, void *)) -{ - rsync_ctx_t *ctx = NULL; - - assert(rc && uri && strlen(uri->s) > SIZEOF_RSYNC); - - if (!rc->run_rsync) { - logmsg(rc, log_verbose, "rsync disabled, skipping %s", uri->s); - if (handler) - handler(rc, NULL, rsync_status_skipped, uri, cookie); - return; - } - - if (rsync_history_uri(rc, uri)) { - logmsg(rc, log_verbose, "rsync cache hit for %s", uri->s); - if (handler) - handler(rc, NULL, rsync_status_done, uri, cookie); - return; - } - - if ((ctx = malloc(sizeof(*ctx))) == NULL) { - logmsg(rc, log_sys_err, "malloc(rsync_ctxt_t) failed"); - if (handler) - handler(rc, NULL, rsync_status_failed, uri, cookie); - return; - } - - memset(ctx, 0, sizeof(*ctx)); - ctx->uri = *uri; - ctx->handler = handler; - ctx->cookie = cookie; - ctx->fd = -1; - - if (!sk_rsync_ctx_t_push(rc->rsync_queue, ctx)) { - logmsg(rc, log_sys_err, "Couldn't push rsync state object onto queue, punting %s", ctx->uri.s); - rsync_call_handler(rc, ctx, rsync_status_failed); - free(ctx); - return; - } - - if (rsync_conflicts(rc, ctx)) { - logmsg(rc, log_debug, "New rsync context %s is feeling conflicted", ctx->uri.s); - ctx->state = rsync_state_conflict_wait; - } -} - -/** - * rsync a trust anchor. - */ -static void rsync_ta(rcynic_ctx_t *rc, - const uri_t *uri, - tal_ctx_t *tctx, - void (*handler)(rcynic_ctx_t *, const rsync_ctx_t *, - const rsync_status_t, const uri_t *, void *)) -{ - assert(endswith(uri->s, ".cer")); - rsync_init(rc, uri, tctx, handler); -} - -/** - * rsync an entire subtree, generally rooted at a SIA collection. - */ -static void rsync_tree(rcynic_ctx_t *rc, - const uri_t *uri, - STACK_OF(walk_ctx_t) *wsk, - void (*handler)(rcynic_ctx_t *, const rsync_ctx_t *, - const rsync_status_t, const uri_t *, void *)) -{ - assert(endswith(uri->s, "/")); - rsync_init(rc, uri, wsk, handler); -} - - - -/** - * Clean up old stuff from previous rsync runs. --delete doesn't help - * if the URI changes and we never visit the old URI again. - */ -static int prune_unauthenticated(const rcynic_ctx_t *rc, - const path_t *name, - const size_t baselen) -{ - path_t path; - struct dirent *d; - DIR *dir; - const char *slash; - - assert(rc && name && baselen > 0 && strlen(name->s) >= baselen); - - if (!is_directory(name)) { - logmsg(rc, log_usage_err, "prune: %s is not a directory", name->s); - return 0; - } - - if ((dir = opendir(name->s)) == NULL) { - logmsg(rc, log_sys_err, "prune: opendir() failed on %s: %s", name->s, strerror(errno)); - return 0; - } - - slash = endswith(name->s, "/") ? "" : "/"; - - while ((d = readdir(dir)) != NULL) { - if (!strcmp(d->d_name, ".") || !strcmp(d->d_name, "..")) - continue; - - if (snprintf(path.s, sizeof(path.s), "%s%s%s", name->s, slash, d->d_name) >= sizeof(path.s)) { - logmsg(rc, log_debug, "prune: %s%s%s too long", name->s, slash, d->d_name); - goto done; - } - - if (validation_status_find_filename(rc, path.s + baselen)) { - logmsg(rc, log_debug, "prune: cache hit %s", path.s); - continue; - } - - if (unlink(path.s) == 0) { - logmsg(rc, log_debug, "prune: removed %s", path.s); - continue; - } - - if (prune_unauthenticated(rc, &path, baselen)) - continue; - - logmsg(rc, log_sys_err, "prune: removing %s failed: %s", path.s, strerror(errno)); - goto done; - } - - if (rmdir(name->s) == 0) - logmsg(rc, log_debug, "prune: removed %s", name->s); - else if (errno != ENOTEMPTY) - logmsg(rc, log_sys_err, "prune: couldn't remove %s: %s", name->s, strerror(errno)); - - done: - closedir(dir); - return !d; -} - - - -/** - * Read a DER object using a BIO pipeline that hashes the file content - * as we read it. Returns the internal form of the parsed DER object, - * sets the hash buffer (if specified) as a side effect. The default - * hash algorithm is SHA-256. - */ -static void *read_file_with_hash(const path_t *filename, - const ASN1_ITEM *it, - const EVP_MD *md, - hashbuf_t *hash) -{ - void *result = NULL; - BIO *b; - - if ((b = BIO_new_file(filename->s, "rb")) == NULL) - goto error; - - if (hash != NULL) { - BIO *b2 = BIO_new(BIO_f_md()); - if (b2 == NULL) - goto error; - if (md == NULL) - md = EVP_sha256(); - if (!BIO_set_md(b2, md)) { - BIO_free(b2); - goto error; - } - BIO_push(b2, b); - b = b2; - } - - if ((result = ASN1_item_d2i_bio(it, b, NULL)) == NULL) - goto error; - - if (hash != NULL) { - memset(hash, 0, sizeof(*hash)); - BIO_gets(b, (char *) hash, sizeof(hash->h)); - } - - error: - BIO_free_all(b); - return result; -} - -/** - * Read and hash a certificate. - */ -static X509 *read_cert(const path_t *filename, hashbuf_t *hash) -{ - return read_file_with_hash(filename, ASN1_ITEM_rptr(X509), NULL, hash); -} - -/** - * Read and hash a CRL. - */ -static X509_CRL *read_crl(const path_t *filename, hashbuf_t *hash) -{ - return read_file_with_hash(filename, ASN1_ITEM_rptr(X509_CRL), NULL, hash); -} - -/** - * Read and hash a CMS message. - */ -static CMS_ContentInfo *read_cms(const path_t *filename, hashbuf_t *hash) -{ - return read_file_with_hash(filename, ASN1_ITEM_rptr(CMS_ContentInfo), NULL, hash); -} - - - -/** - * Extract CRLDP data from a certificate. Stops looking after finding - * the first rsync URI. - */ -static int extract_crldp_uri(rcynic_ctx_t *rc, - const uri_t *uri, - const object_generation_t generation, - const STACK_OF(DIST_POINT) *crldp, - uri_t *result) -{ - DIST_POINT *d; - int i; - - assert(rc && uri && crldp && result); - - if (sk_DIST_POINT_num(crldp) != 1) - goto bad; - - d = sk_DIST_POINT_value(crldp, 0); - - if (d->reasons || d->CRLissuer || !d->distpoint || d->distpoint->type != 0) - goto bad; - - for (i = 0; i < sk_GENERAL_NAME_num(d->distpoint->name.fullname); i++) { - GENERAL_NAME *n = sk_GENERAL_NAME_value(d->distpoint->name.fullname, i); - if (n == NULL || n->type != GEN_URI) - goto bad; - if (!is_rsync((char *) n->d.uniformResourceIdentifier->data)) - log_validation_status(rc, uri, non_rsync_uri_in_extension, generation); - else if (sizeof(result->s) <= n->d.uniformResourceIdentifier->length) - log_validation_status(rc, uri, uri_too_long, generation); - else if (result->s[0]) - log_validation_status(rc, uri, multiple_rsync_uris_in_extension, generation); - else - strcpy(result->s, (char *) n->d.uniformResourceIdentifier->data); - } - - return result->s[0]; - - bad: - log_validation_status(rc, uri, malformed_crldp_extension, generation); - return 0; -} - -/** - * Extract SIA or AIA data from a certificate. - */ -static int extract_access_uri(rcynic_ctx_t *rc, - const uri_t *uri, - const object_generation_t generation, - const AUTHORITY_INFO_ACCESS *xia, - const int nid, - uri_t *result, - int *count) -{ - int i; - - assert(rc && uri && xia && result && count); - - for (i = 0; i < sk_ACCESS_DESCRIPTION_num(xia); i++) { - ACCESS_DESCRIPTION *a = sk_ACCESS_DESCRIPTION_value(xia, i); - if (a == NULL || a->location->type != GEN_URI) - return 0; - if (OBJ_obj2nid(a->method) != nid) - continue; - ++*count; - if (!is_rsync((char *) a->location->d.uniformResourceIdentifier->data)) - log_validation_status(rc, uri, non_rsync_uri_in_extension, generation); - else if (sizeof(result->s) <= a->location->d.uniformResourceIdentifier->length) - log_validation_status(rc, uri, uri_too_long, generation); - else if (result->s[0]) - log_validation_status(rc, uri, multiple_rsync_uris_in_extension, generation); - else - strcpy(result->s, (char *) a->location->d.uniformResourceIdentifier->data); - } - return 1; -} - - - -/** - * Check to see whether an AKI extension is present, is of the right - * form, and matches the issuer. - */ -static int check_aki(rcynic_ctx_t *rc, - const uri_t *uri, - const X509 *issuer, - const AUTHORITY_KEYID *aki, - const object_generation_t generation) -{ - assert(rc && uri && issuer && issuer->skid); - - if (aki == NULL) { - log_validation_status(rc, uri, aki_extension_missing, generation); - return 0; - } - - if (!aki->keyid || aki->serial || aki->issuer) { - log_validation_status(rc, uri, aki_extension_wrong_format, generation); - return 0; - } - - if (ASN1_OCTET_STRING_cmp(aki->keyid, issuer->skid)) { - log_validation_status(rc, uri, aki_extension_issuer_mismatch, generation); - return 0; - } - - return 1; -} - - - -/** - * Check whether a Distinguished Name conforms to the rescert profile. - * The profile is very restrictive: it only allows one mandatory - * CommonName field and one optional SerialNumber field, both of which - * must be of type PrintableString. - */ -static int check_allowed_dn(X509_NAME *dn) -{ - X509_NAME_ENTRY *ne; - ASN1_STRING *s; - int loc; - - if (dn == NULL) - return 0; - - switch (X509_NAME_entry_count(dn)) { - - case 2: - if ((loc = X509_NAME_get_index_by_NID(dn, NID_serialNumber, -1)) < 0 || - (ne = X509_NAME_get_entry(dn, loc)) == NULL || - (s = X509_NAME_ENTRY_get_data(ne)) == NULL || - ASN1_STRING_type(s) != V_ASN1_PRINTABLESTRING) - return 0; - - /* Fall through */ - - case 1: - if ((loc = X509_NAME_get_index_by_NID(dn, NID_commonName, -1)) < 0 || - (ne = X509_NAME_get_entry(dn, loc)) == NULL || - (s = X509_NAME_ENTRY_get_data(ne)) == NULL || - ASN1_STRING_type(s) != V_ASN1_PRINTABLESTRING) - return 0; - - return 1; - - default: - return 0; - } -} - -/** - * Check whether an ASN.1 TIME value conforms to RFC 5280 4.1.2.5. - */ -static int check_allowed_time_encoding(ASN1_TIME *t) -{ - switch (t->type) { - - case V_ASN1_UTCTIME: - return t->length == sizeof("yymmddHHMMSSZ") - 1; - - case V_ASN1_GENERALIZEDTIME: - return (t->length == sizeof("yyyymmddHHMMSSZ") - 1 && - strcmp("205", (char *) t->data) <= 0); - - } - return 0; -} - -/** - * Compare ASN1_TIME values. - */ -static int asn1_time_cmp(ASN1_TIME *t1, ASN1_TIME *t2) -{ - ASN1_GENERALIZEDTIME *g1 = ASN1_TIME_to_generalizedtime(t1, NULL); - ASN1_GENERALIZEDTIME *g2 = ASN1_TIME_to_generalizedtime(t2, NULL); - - int cmp = ASN1_STRING_cmp(g1, g2); - - ASN1_GENERALIZEDTIME_free(g1); - ASN1_GENERALIZEDTIME_free(g2); - - return cmp; -} - - - -/** - * Attempt to read and check one CRL from disk. - */ - -static X509_CRL *check_crl_1(rcynic_ctx_t *rc, - const uri_t *uri, - path_t *path, - const path_t *prefix, - X509 *issuer, - const object_generation_t generation) -{ - STACK_OF(X509_REVOKED) *revoked; - X509_CRL *crl = NULL; - EVP_PKEY *pkey; - int i, ret; - - assert(uri && path && issuer); - - if (!uri_to_filename(rc, uri, path, prefix) || - (crl = read_crl(path, NULL)) == NULL) - goto punt; - - if (X509_CRL_get_version(crl) != 1) { - log_validation_status(rc, uri, wrong_object_version, generation); - goto punt; - } - - if (!crl->crl || !crl->crl->sig_alg || !crl->crl->sig_alg->algorithm || - OBJ_obj2nid(crl->crl->sig_alg->algorithm) != NID_sha256WithRSAEncryption) { - log_validation_status(rc, uri, nonconformant_signature_algorithm, generation); - goto punt; - } - - if (!check_allowed_time_encoding(X509_CRL_get_lastUpdate(crl)) || - !check_allowed_time_encoding(X509_CRL_get_nextUpdate(crl))) { - log_validation_status(rc, uri, nonconformant_asn1_time_value, generation); - goto punt; - } - - if (X509_cmp_current_time(X509_CRL_get_lastUpdate(crl)) > 0) { - log_validation_status(rc, uri, crl_not_yet_valid, generation); - goto punt; - } - - if (X509_cmp_current_time(X509_CRL_get_nextUpdate(crl)) < 0) { - log_validation_status(rc, uri, stale_crl_or_manifest, generation); - if (!rc->allow_stale_crl) - goto punt; - } - - if (!check_aki(rc, uri, issuer, crl->akid, generation)) - goto punt; - - if (crl->crl_number == NULL) { - log_validation_status(rc, uri, crl_number_extension_missing, generation); - goto punt; - } - - if (ASN1_INTEGER_cmp(crl->crl_number, asn1_zero) < 0) { - log_validation_status(rc, uri, crl_number_is_negative, generation); - goto punt; - } - - if (ASN1_INTEGER_cmp(crl->crl_number, asn1_twenty_octets) > 0) { - log_validation_status(rc, uri, crl_number_out_of_range, generation); - goto punt; - } - - if (X509_CRL_get_ext_count(crl) != 2) { - log_validation_status(rc, uri, disallowed_x509v3_extension, generation); - goto punt; - } - - if (X509_NAME_cmp(X509_CRL_get_issuer(crl), X509_get_subject_name(issuer))) { - log_validation_status(rc, uri, crl_issuer_name_mismatch, generation); - goto punt; - } - - if (!check_allowed_dn(X509_CRL_get_issuer(crl))) { - log_validation_status(rc, uri, nonconformant_issuer_name, generation); - if (!rc->allow_nonconformant_name) - goto punt; - } - - if ((revoked = X509_CRL_get_REVOKED(crl)) != NULL) { - for (i = sk_X509_REVOKED_num(revoked) - 1; i >= 0; --i) { - if (X509_REVOKED_get_ext_count(sk_X509_REVOKED_value(revoked, i)) > 0) { - log_validation_status(rc, uri, disallowed_x509v3_extension, generation); - goto punt; - } - } - } - - if ((pkey = X509_get_pubkey(issuer)) == NULL) - goto punt; - ret = X509_CRL_verify(crl, pkey); - EVP_PKEY_free(pkey); - - if (ret > 0) - return crl; - - punt: - X509_CRL_free(crl); - return NULL; -} - -/** - * Check whether we already have a particular CRL, attempt to fetch it - * and check issuer's signature if we don't. - * - * General plan here is to do basic checks on both current and backup - * generation CRLs, then, if both generations pass all of our other - * tests, pick the generation with the highest CRL number, to protect - * against replay attacks. - */ -static X509_CRL *check_crl(rcynic_ctx_t *rc, - const uri_t *uri, - X509 *issuer) -{ - X509_CRL *old_crl, *new_crl, *result = NULL; - path_t old_path, new_path; - - if (uri_to_filename(rc, uri, &new_path, &rc->new_authenticated) && - (new_crl = read_crl(&new_path, NULL)) != NULL) - return new_crl; - - logmsg(rc, log_telemetry, "Checking CRL %s", uri->s); - - new_crl = check_crl_1(rc, uri, &new_path, &rc->unauthenticated, - issuer, object_generation_current); - - old_crl = check_crl_1(rc, uri, &old_path, &rc->old_authenticated, - issuer, object_generation_backup); - - if (!new_crl) - result = old_crl; - - else if (!old_crl) - result = new_crl; - - else { - ASN1_GENERALIZEDTIME *g_old = ASN1_TIME_to_generalizedtime(X509_CRL_get_lastUpdate(old_crl), NULL); - ASN1_GENERALIZEDTIME *g_new = ASN1_TIME_to_generalizedtime(X509_CRL_get_lastUpdate(new_crl), NULL); - int num_cmp = ASN1_INTEGER_cmp(old_crl->crl_number, new_crl->crl_number); - int date_cmp = (!g_old || !g_new) ? 0 : ASN1_STRING_cmp(g_old, g_new); - - if (!g_old) - log_validation_status(rc, uri, bad_thisupdate, object_generation_backup); - if (!g_new) - log_validation_status(rc, uri, bad_thisupdate, object_generation_current); - if (num_cmp > 0) - log_validation_status(rc, uri, backup_number_higher_than_current, object_generation_current); - if (g_old && g_new && date_cmp > 0) - log_validation_status(rc, uri, backup_thisupdate_newer_than_current, object_generation_current); - - if (num_cmp > 0 && (!g_old || !g_new || date_cmp > 0)) - result = old_crl; - else - result = new_crl; - - ASN1_GENERALIZEDTIME_free(g_old); - ASN1_GENERALIZEDTIME_free(g_new); - } - - if (result && result == new_crl) - install_object(rc, uri, &new_path, object_generation_current); - else if (!access(new_path.s, F_OK)) - log_validation_status(rc, uri, object_rejected, object_generation_current); - - if (result && result == old_crl) - install_object(rc, uri, &old_path, object_generation_backup); - else if (!result && !access(old_path.s, F_OK)) - log_validation_status(rc, uri, object_rejected, object_generation_backup); - - if (result != new_crl) - X509_CRL_free(new_crl); - - if (result != old_crl) - X509_CRL_free(old_crl); - - return result; -} - - -/** - * Check digest of a CRL we've already accepted. - */ -static int check_crl_digest(const rcynic_ctx_t *rc, - const uri_t *uri, - const unsigned char *hash, - const size_t hashlen) -{ - X509_CRL *crl = NULL; - hashbuf_t hashbuf; - path_t path; - int result; - - assert(rc && uri && hash); - - if (!uri_to_filename(rc, uri, &path, &rc->new_authenticated) || - (crl = read_crl(&path, &hashbuf)) == NULL) - return 0; - - result = hashlen <= sizeof(hashbuf.h) && !memcmp(hashbuf.h, hash, hashlen); - - X509_CRL_free(crl); - - return result; -} - - - -/** - * Validation callback function for use with x509_verify_cert(). - */ -static int check_x509_cb(int ok, X509_STORE_CTX *ctx) -{ - rcynic_x509_store_ctx_t *rctx = (rcynic_x509_store_ctx_t *) ctx; - mib_counter_t code; - - assert(rctx != NULL); - - switch (ctx->error) { - case X509_V_OK: - return ok; - - case X509_V_ERR_SUBJECT_ISSUER_MISMATCH: - /* - * Informational events, not really errors. ctx->check_issued() - * is called in many places where failure to find an issuer is not - * a failure for the calling function. Just leave these alone. - */ - return ok; - - case X509_V_ERR_CRL_HAS_EXPIRED: - /* - * This isn't really an error, exactly. CRLs don't really - * "expire". What OpenSSL really means by this error is just - * "it's now later than the issuer said it intended to publish a - * new CRL". Whether we treat this as an error or not is - * configurable, see the allow_stale_crl parameter. - * - * Deciding whether to allow stale CRLs is check_crl_1()'s job, - * not ours. By the time this callback occurs, we've already - * accepted the CRL; this callback is just notifying us that the - * object being checked is tainted by a stale CRL. So we mark the - * object as tainted and carry on. - */ - log_validation_status(rctx->rc, &rctx->subject->uri, tainted_by_stale_crl, rctx->subject->generation); - ok = 1; - return ok; - - case X509_V_ERR_UNABLE_TO_GET_ISSUER_CERT: - /* - * This is another error that's only an error in the strange world - * of OpenSSL, but a more serious one. By default, OpenSSL - * expects all trust anchors to be self-signed. This is not a - * PKIX requirement, it's just an OpenSSL thing, but one violates - * it at one's peril, because the only way to convince OpenSSL to - * allow a non-self-signed trust anchor is to intercept this - * "error" in the verify callback handler. - * - * So this program supports non-self-signed trust anchors, but be - * warned that enabling this feature may cause this program's - * output not to work with other OpenSSL-based applications. - */ - if (rctx->rc->allow_non_self_signed_trust_anchor) - ok = 1; - log_validation_status(rctx->rc, &rctx->subject->uri, trust_anchor_not_self_signed, rctx->subject->generation); - return ok; - - /* - * Select correct MIB counter for every known OpenSSL verify errors - * except the ones we handle explicitly above, then fall through to - * common handling for all of these. - */ -#define QV(x) \ - case x: \ - code = mib_openssl_##x; \ - break; - MIB_COUNTERS_FROM_OPENSSL; -#undef QV - - default: - code = unknown_openssl_verify_error; - break; - } - - log_validation_status(rctx->rc, &rctx->subject->uri, code, rctx->subject->generation); - return ok; -} - -/** - * Check crypto aspects of a certificate, policy OID, RFC 3779 path - * validation, and conformance to the RPKI certificate profile. - */ -static int check_x509(rcynic_ctx_t *rc, - STACK_OF(walk_ctx_t) *wsk, - const uri_t *uri, - X509 *x, - certinfo_t *certinfo, - const object_generation_t generation) -{ - walk_ctx_t *w = walk_ctx_stack_head(wsk); - rcynic_x509_store_ctx_t rctx; - EVP_PKEY *issuer_pkey = NULL, *subject_pkey = NULL; - unsigned long flags = (X509_V_FLAG_POLICY_CHECK | X509_V_FLAG_EXPLICIT_POLICY | X509_V_FLAG_X509_STRICT); - AUTHORITY_INFO_ACCESS *sia = NULL, *aia = NULL; - STACK_OF(POLICYINFO) *policies = NULL; - ASN1_BIT_STRING *ski_pubkey = NULL; - STACK_OF(DIST_POINT) *crldp = NULL; - EXTENDED_KEY_USAGE *eku = NULL; - BASIC_CONSTRAINTS *bc = NULL; - hashbuf_t ski_hashbuf; - unsigned ski_hashlen, afi; - int i, ok, crit, loc, ex_count, routercert = 0, ret = 0; - - assert(rc && wsk && w && uri && x && w->cert); - - /* - * Cleanup logic will explode if rctx.ctx hasn't been initialized, - * so we need to do this before running any test that can fail. - */ - if (!X509_STORE_CTX_init(&rctx.ctx, rc->x509_store, x, NULL)) - return 0; - - /* - * certinfo == NULL means x is a self-signed trust anchor. - */ - if (certinfo == NULL) - certinfo = &w->certinfo; - - memset(certinfo, 0, sizeof(*certinfo)); - - certinfo->uri = *uri; - certinfo->generation = generation; - - if (ASN1_INTEGER_cmp(X509_get_serialNumber(x), asn1_zero) <= 0 || - ASN1_INTEGER_cmp(X509_get_serialNumber(x), asn1_twenty_octets) > 0) { - log_validation_status(rc, uri, bad_certificate_serial_number, generation); - goto done; - } - - if (!check_allowed_time_encoding(X509_get_notBefore(x)) || - !check_allowed_time_encoding(X509_get_notAfter(x))) { - log_validation_status(rc, uri, nonconformant_asn1_time_value, generation); - goto done; - } - - /* - * Apparently nothing ever looks at these fields, so there are no - * API functions for them. We wouldn't bother either if they - * weren't forbidden by the RPKI certificate profile. - */ - if (!x->cert_info || x->cert_info->issuerUID || x->cert_info->subjectUID) { - log_validation_status(rc, uri, nonconformant_certificate_uid, generation); - goto done; - } - - /* - * Keep track of allowed extensions we've seen. Once we've - * processed all the ones we expect, anything left is an error. - */ - ex_count = X509_get_ext_count(x); - - /* - * We don't use X509_check_ca() to set certinfo->ca anymore, because - * it's not paranoid enough to enforce the RPKI certificate profile, - * but we still call it because we need it (or something) to invoke - * x509v3_cache_extensions() for us. - */ - (void) X509_check_ca(x); - - if ((bc = X509_get_ext_d2i(x, NID_basic_constraints, &crit, NULL)) != NULL) { - ex_count--; - if (!crit || bc->ca <= 0 || bc->pathlen != NULL) { - log_validation_status(rc, uri, malformed_basic_constraints, generation); - goto done; - } - } - - certinfo->ca = bc != NULL; - - if (certinfo == &w->certinfo) { - certinfo->ta = 1; - if (!certinfo->ca) { - log_validation_status(rc, uri, malformed_trust_anchor, generation); - goto done; - } - } - - if ((aia = X509_get_ext_d2i(x, NID_info_access, NULL, NULL)) != NULL) { - int n_caIssuers = 0; - ex_count--; - if (!extract_access_uri(rc, uri, generation, aia, NID_ad_ca_issuers, - &certinfo->aia, &n_caIssuers) || - !certinfo->aia.s[0] || - sk_ACCESS_DESCRIPTION_num(aia) != n_caIssuers) { - log_validation_status(rc, uri, malformed_aia_extension, generation); - goto done; - } - } - - if (certinfo->ta && aia) { - log_validation_status(rc, uri, aia_extension_forbidden, generation); - goto done; - } - - if (!certinfo->ta && !aia) { - log_validation_status(rc, uri, aia_extension_missing, generation); - goto done; - } - - if ((eku = X509_get_ext_d2i(x, NID_ext_key_usage, &crit, NULL)) != NULL) { - ex_count--; - if (crit || certinfo->ca || !endswith(uri->s, ".cer") || sk_ASN1_OBJECT_num(eku) == 0) { - log_validation_status(rc, uri, inappropriate_eku_extension, generation); - goto done; - } - for (i = 0; i < sk_ASN1_OBJECT_num(eku); i++) - routercert |= OBJ_obj2nid(sk_ASN1_OBJECT_value(eku, i)) == NID_id_kp_bgpsec_router; - } - - if ((sia = X509_get_ext_d2i(x, NID_sinfo_access, NULL, NULL)) != NULL) { - int got_caDirectory, got_rpkiManifest, got_signedObject; - int n_caDirectory = 0, n_rpkiManifest = 0, n_signedObject = 0; - ex_count--; - ok = (extract_access_uri(rc, uri, generation, sia, NID_caRepository, - &certinfo->sia, &n_caDirectory) && - extract_access_uri(rc, uri, generation, sia, NID_ad_rpkiManifest, - &certinfo->manifest, &n_rpkiManifest) && - extract_access_uri(rc, uri, generation, sia, NID_ad_signedObject, - &certinfo->signedobject, &n_signedObject)); - got_caDirectory = certinfo->sia.s[0] != '\0'; - got_rpkiManifest = certinfo->manifest.s[0] != '\0'; - got_signedObject = certinfo->signedobject.s[0] != '\0'; - ok &= sk_ACCESS_DESCRIPTION_num(sia) == n_caDirectory + n_rpkiManifest + n_signedObject; - if (certinfo->ca) - ok &= got_caDirectory && got_rpkiManifest && !got_signedObject; - else if (rc->allow_ee_without_signedObject) - ok &= !got_caDirectory && !got_rpkiManifest; - else - ok &= !got_caDirectory && !got_rpkiManifest && got_signedObject; - if (!ok) { - log_validation_status(rc, uri, malformed_sia_extension, generation); - goto done; - } - } else if (certinfo->ca || !rc->allow_ee_without_signedObject) { - log_validation_status(rc, uri, sia_extension_missing, generation); - goto done; - } else if (!routercert) { - log_validation_status(rc, uri, sia_extension_missing_from_ee, generation); - } - - if (certinfo->signedobject.s[0] && strcmp(uri->s, certinfo->signedobject.s)) - log_validation_status(rc, uri, bad_signed_object_uri, generation); - - if ((crldp = X509_get_ext_d2i(x, NID_crl_distribution_points, NULL, NULL)) != NULL) { - ex_count--; - if (!extract_crldp_uri(rc, uri, generation, crldp, &certinfo->crldp)) - goto done; - } - - rctx.rc = rc; - rctx.subject = certinfo; - - if (w->certs == NULL && (w->certs = walk_ctx_stack_certs(rc, wsk)) == NULL) - goto done; - - if (X509_get_version(x) != 2) { - log_validation_status(rc, uri, wrong_object_version, generation); - goto done; - } - - if (!x->cert_info || !x->cert_info->signature || !x->cert_info->signature->algorithm || - OBJ_obj2nid(x->cert_info->signature->algorithm) != NID_sha256WithRSAEncryption) { - log_validation_status(rc, uri, nonconformant_signature_algorithm, generation); - goto done; - } - - if (certinfo->sia.s[0] && certinfo->sia.s[strlen(certinfo->sia.s) - 1] != '/') { - log_validation_status(rc, uri, malformed_cadirectory_uri, generation); - goto done; - } - - if (!w->certinfo.ta && strcmp(w->certinfo.uri.s, certinfo->aia.s)) - log_validation_status(rc, uri, aia_doesnt_match_issuer, generation); - - if (certinfo->ca && !certinfo->sia.s[0]) { - log_validation_status(rc, uri, sia_cadirectory_uri_missing, generation); - goto done; - } - - if (certinfo->ca && !certinfo->manifest.s[0]) { - log_validation_status(rc, uri, sia_manifest_uri_missing, generation); - goto done; - } - - if (certinfo->ca && !startswith(certinfo->manifest.s, certinfo->sia.s)) { - log_validation_status(rc, uri, manifest_carepository_mismatch, generation); - goto done; - } - - if (x->skid) { - ex_count--; - } else { - log_validation_status(rc, uri, ski_extension_missing, generation); - goto done; - } - - if (!check_allowed_dn(X509_get_subject_name(x))) { - log_validation_status(rc, uri, nonconformant_subject_name, generation); - if (!rc->allow_nonconformant_name) - goto done; - } - - if (!check_allowed_dn(X509_get_issuer_name(x))) { - log_validation_status(rc, uri, nonconformant_issuer_name, generation); - if (!rc->allow_nonconformant_name) - goto done; - } - - if ((policies = X509_get_ext_d2i(x, NID_certificate_policies, &crit, NULL)) != NULL) { - POLICYQUALINFO *qualifier = NULL; - POLICYINFO *policy = NULL; - ex_count--; - if (!crit || sk_POLICYINFO_num(policies) != 1 || - (policy = sk_POLICYINFO_value(policies, 0)) == NULL || - OBJ_obj2nid(policy->policyid) != NID_cp_ipAddr_asNumber || - sk_POLICYQUALINFO_num(policy->qualifiers) > 1 || - (sk_POLICYQUALINFO_num(policy->qualifiers) == 1 && - ((qualifier = sk_POLICYQUALINFO_value(policy->qualifiers, 0)) == NULL || - OBJ_obj2nid(qualifier->pqualid) != NID_id_qt_cps))) { - log_validation_status(rc, uri, bad_certificate_policy, generation); - goto done; - } - if (qualifier) - log_validation_status(rc, uri, policy_qualifier_cps, generation); - } - - if (!X509_EXTENSION_get_critical(X509_get_ext(x, X509_get_ext_by_NID(x, NID_key_usage, -1))) || - (x->ex_flags & EXFLAG_KUSAGE) == 0 || - x->ex_kusage != (certinfo->ca ? KU_KEY_CERT_SIGN | KU_CRL_SIGN : KU_DIGITAL_SIGNATURE)) { - log_validation_status(rc, uri, bad_key_usage, generation); - goto done; - } - ex_count--; - - if (x->rfc3779_addr) { - ex_count--; - if (routercert || - (loc = X509_get_ext_by_NID(x, NID_sbgp_ipAddrBlock, -1)) < 0 || - !X509_EXTENSION_get_critical(X509_get_ext(x, loc)) || - !v3_addr_is_canonical(x->rfc3779_addr) || - sk_IPAddressFamily_num(x->rfc3779_addr) == 0) { - log_validation_status(rc, uri, bad_ipaddrblocks, generation); - goto done; - } - for (i = 0; i < sk_IPAddressFamily_num(x->rfc3779_addr); i++) { - IPAddressFamily *f = sk_IPAddressFamily_value(x->rfc3779_addr, i); - afi = v3_addr_get_afi(f); - if (afi != IANA_AFI_IPV4 && afi != IANA_AFI_IPV6) { - log_validation_status(rc, uri, unknown_afi, generation); - goto done; - } - if (f->addressFamily->length != 2) { - log_validation_status(rc, uri, safi_not_allowed, generation); - goto done; - } - } - } - - if (x->rfc3779_asid) { - ex_count--; - if ((loc = X509_get_ext_by_NID(x, NID_sbgp_autonomousSysNum, -1)) < 0 || - !X509_EXTENSION_get_critical(X509_get_ext(x, loc)) || - !v3_asid_is_canonical(x->rfc3779_asid) || - x->rfc3779_asid->asnum == NULL || - x->rfc3779_asid->rdi != NULL || - (routercert && x->rfc3779_asid->asnum->type == ASIdentifierChoice_inherit)) { - log_validation_status(rc, uri, bad_asidentifiers, generation); - goto done; - } - } - - if (!x->rfc3779_addr && !x->rfc3779_asid) { - log_validation_status(rc, uri, missing_resources, generation); - goto done; - } - - subject_pkey = X509_get_pubkey(x); - ok = subject_pkey != NULL; - if (ok) { - ASN1_OBJECT *algorithm; - - (void) X509_PUBKEY_get0_param(&algorithm, NULL, NULL, NULL, X509_get_X509_PUBKEY(x)); - - switch (OBJ_obj2nid(algorithm)) { - - case NID_rsaEncryption: - ok = (EVP_PKEY_type(subject_pkey->type) == EVP_PKEY_RSA && - BN_get_word(subject_pkey->pkey.rsa->e) == 65537); - if (!ok) - break; - if (!certinfo->ca && rc->allow_1024_bit_ee_key && - BN_num_bits(subject_pkey->pkey.rsa->n) == 1024) - log_validation_status(rc, uri, ee_certificate_with_1024_bit_key, generation); - else - ok = BN_num_bits(subject_pkey->pkey.rsa->n) == 2048; - break; - - case NID_X9_62_id_ecPublicKey: - ok = !certinfo->ca && routercert; - break; - - default: - ok = 0; - } - } - if (!ok) { - log_validation_status(rc, uri, bad_public_key, generation); - goto done; - } - - if (x->skid == NULL || - (ski_pubkey = X509_get0_pubkey_bitstr(x)) == NULL || - !EVP_Digest(ski_pubkey->data, ski_pubkey->length, - ski_hashbuf.h, &ski_hashlen, EVP_sha1(), NULL) || - ski_hashlen != 20 || - ski_hashlen != x->skid->length || - memcmp(ski_hashbuf.h, x->skid->data, ski_hashlen)) { - log_validation_status(rc, uri, ski_public_key_mismatch, generation); - goto done; - } - - if (x->akid) { - ex_count--; - if (!check_aki(rc, uri, w->cert, x->akid, generation)) - goto done; - } - - if (!x->akid && !certinfo->ta) { - log_validation_status(rc, uri, aki_extension_missing, generation); - goto done; - } - - if ((issuer_pkey = X509_get_pubkey(w->cert)) == NULL || X509_verify(x, issuer_pkey) <= 0) { - log_validation_status(rc, uri, certificate_bad_signature, generation); - goto done; - } - - if (certinfo->ta) { - - if (certinfo->crldp.s[0]) { - log_validation_status(rc, uri, trust_anchor_with_crldp, generation); - goto done; - } - - } else { - - if (!certinfo->crldp.s[0]) { - log_validation_status(rc, uri, crldp_uri_missing, generation); - goto done; - } - - if (!certinfo->ca && !startswith(certinfo->crldp.s, w->certinfo.sia.s)) { - log_validation_status(rc, uri, crldp_doesnt_match_issuer_sia, generation); - goto done; - } - - if (w->crls == NULL && ((w->crls = sk_X509_CRL_new_null()) == NULL || - !sk_X509_CRL_push(w->crls, NULL))) { - logmsg(rc, log_sys_err, "Internal allocation error setting up CRL for validation"); - goto done; - } - - assert(sk_X509_CRL_num(w->crls) == 1); - assert((w->crldp.s[0] == '\0') == (sk_X509_CRL_value(w->crls, 0) == NULL)); - - if (strcmp(w->crldp.s, certinfo->crldp.s)) { - X509_CRL *old_crl = sk_X509_CRL_value(w->crls, 0); - X509_CRL *new_crl = check_crl(rc, &certinfo->crldp, w->cert); - - if (w->crldp.s[0]) - log_validation_status(rc, uri, issuer_uses_multiple_crldp_values, generation); - - if (new_crl == NULL) { - log_validation_status(rc, uri, bad_crl, generation); - goto done; - } - - if (old_crl && new_crl && ASN1_INTEGER_cmp(old_crl->crl_number, new_crl->crl_number) < 0) { - log_validation_status(rc, uri, crldp_names_newer_crl, generation); - X509_CRL_free(old_crl); - old_crl = NULL; - } - - if (old_crl == NULL) { - sk_X509_CRL_set(w->crls, 0, new_crl); - w->crldp = certinfo->crldp; - } else { - X509_CRL_free(new_crl); - } - } - - assert(sk_X509_CRL_value(w->crls, 0)); - flags |= X509_V_FLAG_CRL_CHECK; - X509_STORE_CTX_set0_crls(&rctx.ctx, w->crls); - } - - if (ex_count > 0) { - log_validation_status(rc, uri, disallowed_x509v3_extension, generation); - goto done; - } - - assert(w->certs != NULL); - X509_STORE_CTX_trusted_stack(&rctx.ctx, w->certs); - X509_STORE_CTX_set_verify_cb(&rctx.ctx, check_x509_cb); - - X509_VERIFY_PARAM_set_flags(rctx.ctx.param, flags); - - X509_VERIFY_PARAM_add0_policy(rctx.ctx.param, OBJ_nid2obj(NID_cp_ipAddr_asNumber)); - - if (X509_verify_cert(&rctx.ctx) <= 0) { - log_validation_status(rc, uri, certificate_failed_validation, generation); - goto done; - } - - ret = 1; - - done: - X509_STORE_CTX_cleanup(&rctx.ctx); - EVP_PKEY_free(issuer_pkey); - EVP_PKEY_free(subject_pkey); - BASIC_CONSTRAINTS_free(bc); - sk_ACCESS_DESCRIPTION_pop_free(sia, ACCESS_DESCRIPTION_free); - sk_ACCESS_DESCRIPTION_pop_free(aia, ACCESS_DESCRIPTION_free); - sk_DIST_POINT_pop_free(crldp, DIST_POINT_free); - sk_POLICYINFO_pop_free(policies, POLICYINFO_free); - sk_ASN1_OBJECT_pop_free(eku, ASN1_OBJECT_free); - - return ret; -} - -/** - * Extract one datum from a CMS_SignerInfo. - */ -static void *extract_si_datum(CMS_SignerInfo *si, - int *n, - const int optional, - const int nid, - const int asn1_type) -{ - int i = CMS_signed_get_attr_by_NID(si, nid, -1); - void *result = NULL; - X509_ATTRIBUTE *a; - - assert(si && n); - - if (i < 0 && optional) - return NULL; - - if (i >= 0 && - CMS_signed_get_attr_by_NID(si, nid, i) < 0 && - (a = CMS_signed_get_attr(si, i)) != NULL && - X509_ATTRIBUTE_count(a) == 1 && - (result = X509_ATTRIBUTE_get0_data(a, 0, asn1_type, NULL)) != NULL) - --*n; - else - *n = -1; - - return result; -} - -/** - * Check a signed CMS object. - */ -static int check_cms(rcynic_ctx_t *rc, - STACK_OF(walk_ctx_t) *wsk, - const uri_t *uri, - path_t *path, - const path_t *prefix, - CMS_ContentInfo **pcms, - X509 **px, - certinfo_t *certinfo, - BIO *bio, - const unsigned char *hash, - const size_t hashlen, - const int expected_eContentType_nid, - const int require_inheritance, - const object_generation_t generation) -{ - STACK_OF(CMS_SignerInfo) *signer_infos = NULL; - CMS_ContentInfo *cms = NULL; - CMS_SignerInfo *si = NULL; - ASN1_OCTET_STRING *sid = NULL; - X509_NAME *si_issuer = NULL; - ASN1_INTEGER *si_serial = NULL; - STACK_OF(X509_CRL) *crls = NULL; - STACK_OF(X509) *certs = NULL; - X509_ALGOR *signature_alg = NULL, *digest_alg = NULL; - ASN1_OBJECT *oid = NULL; - hashbuf_t hashbuf; - X509 *x = NULL; - certinfo_t certinfo_; - int i, result = 0; - - assert(rc && wsk && uri && path && prefix); - - if (!certinfo) - certinfo = &certinfo_; - - if (!uri_to_filename(rc, uri, path, prefix)) - goto error; - - if (hash) - cms = read_cms(path, &hashbuf); - else - cms = read_cms(path, NULL); - - if (!cms) - goto error; - - if (hash && (hashlen > sizeof(hashbuf.h) || - memcmp(hashbuf.h, hash, hashlen))) { - log_validation_status(rc, uri, digest_mismatch, generation); - if (!rc->allow_digest_mismatch) - goto error; - } - - if (OBJ_obj2nid(CMS_get0_eContentType(cms)) != expected_eContentType_nid) { - log_validation_status(rc, uri, bad_cms_econtenttype, generation); - goto error; - } - - if (CMS_verify(cms, NULL, NULL, NULL, bio, CMS_NO_SIGNER_CERT_VERIFY) <= 0) { - log_validation_status(rc, uri, cms_validation_failure, generation); - goto error; - } - - if ((crls = CMS_get1_crls(cms)) != NULL) { - log_validation_status(rc, uri, cms_includes_crls, generation); - goto error; - } - - if ((signer_infos = CMS_get0_SignerInfos(cms)) == NULL || - sk_CMS_SignerInfo_num(signer_infos) != 1 || - (si = sk_CMS_SignerInfo_value(signer_infos, 0)) == NULL || - !CMS_SignerInfo_get0_signer_id(si, &sid, &si_issuer, &si_serial) || - sid == NULL || si_issuer != NULL || si_serial != NULL || - CMS_unsigned_get_attr_count(si) != -1) { - log_validation_status(rc, uri, bad_cms_signer_infos, generation); - goto error; - } - - CMS_SignerInfo_get0_algs(si, NULL, &x, &digest_alg, &signature_alg); - - if (x == NULL) { - log_validation_status(rc, uri, cms_signer_missing, generation); - goto error; - } - - if ((certs = CMS_get1_certs(cms)) == NULL || - sk_X509_num(certs) != 1 || - X509_cmp(x, sk_X509_value(certs, 0))) { - log_validation_status(rc, uri, bad_cms_signer, generation); - goto error; - } - - X509_ALGOR_get0(&oid, NULL, NULL, signature_alg); - i = OBJ_obj2nid(oid); - if (i != NID_sha256WithRSAEncryption && i != NID_rsaEncryption) { - log_validation_status(rc, uri, wrong_cms_si_signature_algorithm, generation); - goto error; - } - - X509_ALGOR_get0(&oid, NULL, NULL, digest_alg); - if (OBJ_obj2nid(oid) != NID_sha256) { - log_validation_status(rc, uri, wrong_cms_si_digest_algorithm, generation); - goto error; - } - - i = CMS_signed_get_attr_count(si); - - (void) extract_si_datum(si, &i, 1, NID_pkcs9_signingTime, V_ASN1_UTCTIME); - (void) extract_si_datum(si, &i, 1, NID_binary_signing_time, V_ASN1_INTEGER); - oid = extract_si_datum(si, &i, 0, NID_pkcs9_contentType, V_ASN1_OBJECT); - (void) extract_si_datum(si, &i, 0, NID_pkcs9_messageDigest, V_ASN1_OCTET_STRING); - - if (i != 0) { - log_validation_status(rc, uri, bad_cms_si_signed_attributes, generation); - if (!rc->allow_wrong_cms_si_attributes) - goto error; - } - - if (OBJ_obj2nid(oid) != expected_eContentType_nid) { - log_validation_status(rc, uri, bad_cms_si_contenttype, generation); - goto error; - } - - if (CMS_SignerInfo_cert_cmp(si, x)) { - log_validation_status(rc, uri, cms_ski_mismatch, generation); - goto error; - } - - if (!check_x509(rc, wsk, uri, x, certinfo, generation)) - goto error; - - if (require_inheritance && x->rfc3779_addr) { - for (i = 0; i < sk_IPAddressFamily_num(x->rfc3779_addr); i++) { - IPAddressFamily *f = sk_IPAddressFamily_value(x->rfc3779_addr, i); - if (f->ipAddressChoice->type != IPAddressChoice_inherit) { - log_validation_status(rc, uri, rfc3779_inheritance_required, generation); - goto error; - } - } - } - - if (require_inheritance && x->rfc3779_asid && x->rfc3779_asid->asnum && - x->rfc3779_asid->asnum->type != ASIdentifierChoice_inherit) { - log_validation_status(rc, uri, rfc3779_inheritance_required, generation); - goto error; - } - - if (pcms) { - *pcms = cms; - cms = NULL; - } - - if (px) - *px = x; - - result = 1; - - error: - CMS_ContentInfo_free(cms); - sk_X509_CRL_pop_free(crls, X509_CRL_free); - sk_X509_pop_free(certs, X509_free); - - return result; -} - - - -/** - * Load certificate, check against manifest, then run it through all - * the check_x509() tests. - */ -static X509 *check_cert_1(rcynic_ctx_t *rc, - STACK_OF(walk_ctx_t) *wsk, - const uri_t *uri, - path_t *path, - const path_t *prefix, - certinfo_t *certinfo, - const unsigned char *hash, - const size_t hashlen, - object_generation_t generation) -{ - hashbuf_t hashbuf; - X509 *x = NULL; - - assert(uri && path && wsk && certinfo); - - if (!uri_to_filename(rc, uri, path, prefix)) - return NULL; - - if (access(path->s, R_OK)) - return NULL; - - if (hash) - x = read_cert(path, &hashbuf); - else - x = read_cert(path, NULL); - - if (!x) { - logmsg(rc, log_sys_err, "Can't read certificate %s", path->s); - goto punt; - } - - if (hash && (hashlen > sizeof(hashbuf.h) || - memcmp(hashbuf.h, hash, hashlen))) { - log_validation_status(rc, uri, digest_mismatch, generation); - if (!rc->allow_digest_mismatch) - goto punt; - } - - if (check_x509(rc, wsk, uri, x, certinfo, generation)) - return x; - - punt: - X509_free(x); - return NULL; -} - -/** - * Try to find a good copy of a certificate either in fresh data or in - * backup data from a previous run of this program. - */ -static X509 *check_cert(rcynic_ctx_t *rc, - STACK_OF(walk_ctx_t) *wsk, - uri_t *uri, - certinfo_t *certinfo, - const unsigned char *hash, - const size_t hashlen) -{ - walk_ctx_t *w = walk_ctx_stack_head(wsk); - object_generation_t generation; - const path_t *prefix = NULL; - path_t path; - X509 *x; - - assert(rc && uri && wsk && w && certinfo); - - switch (w->state) { - case walk_state_current: - prefix = &rc->unauthenticated; - generation = object_generation_current; - break; - case walk_state_backup: - prefix = &rc->old_authenticated; - generation = object_generation_backup; - break; - default: - return NULL; - } - - if (skip_checking_this_object(rc, uri, generation)) - return NULL; - - if ((x = check_cert_1(rc, wsk, uri, &path, prefix, certinfo, - hash, hashlen, generation)) != NULL) - install_object(rc, uri, &path, generation); - else if (!access(path.s, F_OK)) - log_validation_status(rc, uri, object_rejected, generation); - else if (hash && generation == w->manifest_generation) - log_validation_status(rc, uri, manifest_lists_missing_object, generation); - - return x; -} - - - -/** - * Read and check one manifest from disk. - */ -static Manifest *check_manifest_1(rcynic_ctx_t *rc, - STACK_OF(walk_ctx_t) *wsk, - const uri_t *uri, - path_t *path, - const path_t *prefix, - certinfo_t *certinfo, - const object_generation_t generation) -{ - STACK_OF(FileAndHash) *sorted_fileList = NULL; - Manifest *manifest = NULL, *result = NULL; - CMS_ContentInfo *cms = NULL; - FileAndHash *fah = NULL, *fah2 = NULL; - BIO *bio = NULL; - X509 *x; - int i; - - assert(rc && wsk && uri && path && prefix); - - if ((bio = BIO_new(BIO_s_mem())) == NULL) { - logmsg(rc, log_sys_err, "Couldn't allocate BIO for manifest %s", uri->s); - goto done; - } - - if (!check_cms(rc, wsk, uri, path, prefix, &cms, &x, certinfo, bio, NULL, 0, - NID_ct_rpkiManifest, 1, generation)) - goto done; - - if ((manifest = ASN1_item_d2i_bio(ASN1_ITEM_rptr(Manifest), bio, NULL)) == NULL) { - log_validation_status(rc, uri, cms_econtent_decode_error, generation); - goto done; - } - - if (manifest->version) { - log_validation_status(rc, uri, wrong_object_version, generation); - goto done; - } - - if (X509_cmp_current_time(manifest->thisUpdate) > 0) { - log_validation_status(rc, uri, manifest_not_yet_valid, generation); - goto done; - } - - if (X509_cmp_current_time(manifest->nextUpdate) < 0) { - log_validation_status(rc, uri, stale_crl_or_manifest, generation); - if (!rc->allow_stale_manifest) - goto done; - } - - if (asn1_time_cmp(manifest->thisUpdate, X509_get_notBefore(x)) < 0 || - asn1_time_cmp(manifest->nextUpdate, X509_get_notAfter(x)) > 0) { - log_validation_status(rc, uri, manifest_interval_overruns_cert, generation); - goto done; - } - - if (ASN1_INTEGER_cmp(manifest->manifestNumber, asn1_zero) < 0 || - ASN1_INTEGER_cmp(manifest->manifestNumber, asn1_twenty_octets) > 0) { - log_validation_status(rc, uri, bad_manifest_number, generation); - goto done; - } - - if (OBJ_obj2nid(manifest->fileHashAlg) != NID_sha256) { - log_validation_status(rc, uri, nonconformant_digest_algorithm, generation); - goto done; - } - - if ((sorted_fileList = sk_FileAndHash_dup(manifest->fileList)) == NULL) { - logmsg(rc, log_sys_err, "Couldn't allocate shallow copy of fileList for manifest %s", uri->s); - goto done; - } - - (void) sk_FileAndHash_set_cmp_func(sorted_fileList, FileAndHash_name_cmp); - sk_FileAndHash_sort(sorted_fileList); - - for (i = 0; (fah = sk_FileAndHash_value(sorted_fileList, i)) != NULL && (fah2 = sk_FileAndHash_value(sorted_fileList, i + 1)) != NULL; i++) { - if (!strcmp((char *) fah->file->data, (char *) fah2->file->data)) { - log_validation_status(rc, uri, duplicate_name_in_manifest, generation); - goto done; - } - } - - for (i = 0; (fah = sk_FileAndHash_value(manifest->fileList, i)) != NULL; i++) { - if (fah->hash->length != HASH_SHA256_LEN || - (fah->hash->flags & (ASN1_STRING_FLAG_BITS_LEFT | 7)) > ASN1_STRING_FLAG_BITS_LEFT) { - log_validation_status(rc, uri, bad_manifest_digest_length, generation); - goto done; - } - } - - result = manifest; - manifest = NULL; - - done: - BIO_free(bio); - Manifest_free(manifest); - CMS_ContentInfo_free(cms); - sk_FileAndHash_free(sorted_fileList); - return result; -} - -/** - * Check whether we already have a particular manifest, attempt to fetch it - * and check issuer's signature if we don't. - * - * General plan here is to do basic checks on both current and backup - * generation manifests, then, if both generations pass all of our - * other tests, pick the generation with the highest manifest number, - * to protect against replay attacks. - * - * Once we've picked the manifest we're going to use, we need to check - * it against the CRL we've chosen. Not much we can do if they don't - * match besides whine about it, but we do need to whine in this case. - */ -static int check_manifest(rcynic_ctx_t *rc, - STACK_OF(walk_ctx_t) *wsk) -{ - walk_ctx_t *w = walk_ctx_stack_head(wsk); - Manifest *old_manifest, *new_manifest, *result = NULL; - certinfo_t old_certinfo, new_certinfo; - const uri_t *uri, *crldp = NULL; - object_generation_t generation = object_generation_null; - path_t old_path, new_path; - FileAndHash *fah = NULL; - const char *crl_tail; - int i, ok = 1; - - assert(rc && wsk && w && !w->manifest); - - uri = &w->certinfo.manifest; - - logmsg(rc, log_telemetry, "Checking manifest %s", uri->s); - - new_manifest = check_manifest_1(rc, wsk, uri, &new_path, - &rc->unauthenticated, &new_certinfo, - object_generation_current); - - old_manifest = check_manifest_1(rc, wsk, uri, &old_path, - &rc->old_authenticated, &old_certinfo, - object_generation_backup); - - if (!new_manifest) - result = old_manifest; - - else if (!old_manifest) - result = new_manifest; - - else { - int num_cmp = ASN1_INTEGER_cmp(old_manifest->manifestNumber, new_manifest->manifestNumber); - int date_cmp = ASN1_STRING_cmp(old_manifest->thisUpdate, new_manifest->thisUpdate); - - if (num_cmp > 0) - log_validation_status(rc, uri, backup_number_higher_than_current, object_generation_current); - if (date_cmp > 0) - log_validation_status(rc, uri, backup_thisupdate_newer_than_current, object_generation_current); - - if (num_cmp > 0 && date_cmp > 0) - result = old_manifest; - else - result = new_manifest; - } - - if (result && result == new_manifest) { - generation = object_generation_current; - install_object(rc, uri, &new_path, generation); - crldp = &new_certinfo.crldp; - } - - if (result && result == old_manifest) { - generation = object_generation_backup; - install_object(rc, uri, &old_path, generation); - crldp = &old_certinfo.crldp; - } - - if (result) { - crl_tail = strrchr(crldp->s, '/'); - assert(crl_tail != NULL); - crl_tail++; - - for (i = 0; (fah = sk_FileAndHash_value(result->fileList, i)) != NULL; i++) - if (!strcmp((char *) fah->file->data, crl_tail)) - break; - - if (!fah) { - log_validation_status(rc, uri, crl_not_in_manifest, generation); - if (rc->require_crl_in_manifest) - ok = 0; - } - - else if (!check_crl_digest(rc, crldp, fah->hash->data, fah->hash->length)) { - log_validation_status(rc, uri, digest_mismatch, generation); - if (!rc->allow_crl_digest_mismatch) - ok = 0; - } - } - - if ((!result || result != new_manifest) && !access(new_path.s, F_OK)) - log_validation_status(rc, uri, object_rejected, object_generation_current); - - if (!result && !access(old_path.s, F_OK)) - log_validation_status(rc, uri, object_rejected, object_generation_backup); - - if (result != new_manifest) - Manifest_free(new_manifest); - - if (result != old_manifest) - Manifest_free(old_manifest); - - w->manifest = result; - if (crldp) - w->crldp = *crldp; - w->manifest_generation = generation; - - return ok; -} - - - -/** - * Mark CRL or manifest that we're rechecking so XML report makes more sense. - */ -static void rsync_needed_mark_recheck(rcynic_ctx_t *rc, - const uri_t *uri) -{ - validation_status_t *v = NULL; - - if (uri->s[0] != '\0') - v = validation_status_find(rc->validation_status_root, - uri, object_generation_current); - - if (v) { - validation_status_set_code(v, stale_crl_or_manifest, 0); - log_validation_status(rc, uri, rechecking_object, - object_generation_current); - } -} - -/** - * Check whether we need to rsync a particular tree. This depends on - * the setting of rc->rsync_early, whether we have a valid manifest on - * file, and whether that manifest is stale yet. - */ -static int rsync_needed(rcynic_ctx_t *rc, - STACK_OF(walk_ctx_t) *wsk) -{ - walk_ctx_t *w = walk_ctx_stack_head(wsk); - int needed; - - assert(rc && wsk && w); - - needed = (rc->rsync_early || - !check_manifest(rc, wsk) || - w->manifest == NULL || - X509_cmp_current_time(w->manifest->nextUpdate) < 0); - - if (needed && w->manifest != NULL) { - rsync_needed_mark_recheck(rc, &w->certinfo.manifest); - rsync_needed_mark_recheck(rc, &w->certinfo.crldp); - Manifest_free(w->manifest); - w->manifest = NULL; - } - - return needed; -} - - - -/** - * Extract a ROA prefix from the ASN.1 bitstring encoding. - */ -static int extract_roa_prefix(const ROAIPAddress *ra, - const unsigned afi, - unsigned char *addr, - unsigned *prefixlen, - unsigned *max_prefixlen) -{ - unsigned length; - long maxlen; - - assert(ra && addr && prefixlen && max_prefixlen); - - maxlen = ASN1_INTEGER_get(ra->maxLength); - - switch (afi) { - case IANA_AFI_IPV4: length = 4; break; - case IANA_AFI_IPV6: length = 16; break; - default: return 0; - } - - if (ra->IPAddress->length < 0 || ra->IPAddress->length > length || - maxlen < 0 || maxlen > (long) length * 8) - return 0; - - if (ra->IPAddress->length > 0) { - memcpy(addr, ra->IPAddress->data, ra->IPAddress->length); - if ((ra->IPAddress->flags & 7) != 0) { - unsigned char mask = 0xFF >> (8 - (ra->IPAddress->flags & 7)); - addr[ra->IPAddress->length - 1] &= ~mask; - } - } - - memset(addr + ra->IPAddress->length, 0, length - ra->IPAddress->length); - *prefixlen = (ra->IPAddress->length * 8) - (ra->IPAddress->flags & 7); - *max_prefixlen = ra->maxLength ? (unsigned) maxlen : *prefixlen; - - return 1; -} - -/** - * Read and check one ROA from disk. - */ -static int check_roa_1(rcynic_ctx_t *rc, - STACK_OF(walk_ctx_t) *wsk, - const uri_t *uri, - path_t *path, - const path_t *prefix, - const unsigned char *hash, - const size_t hashlen, - const object_generation_t generation) -{ - STACK_OF(IPAddressFamily) *roa_resources = NULL, *ee_resources = NULL; - unsigned char addrbuf[ADDR_RAW_BUF_LEN]; - CMS_ContentInfo *cms = NULL; - BIO *bio = NULL; - ROA *roa = NULL; - X509 *x = NULL; - int i, j, result = 0; - unsigned afi, *safi = NULL, safi_, prefixlen, max_prefixlen; - ROAIPAddressFamily *rf; - ROAIPAddress *ra; - - assert(rc && wsk && uri && path && prefix); - - if ((bio = BIO_new(BIO_s_mem())) == NULL) { - logmsg(rc, log_sys_err, "Couldn't allocate BIO for ROA %s", uri->s); - goto error; - } - - if (!check_cms(rc, wsk, uri, path, prefix, &cms, &x, NULL, bio, NULL, 0, - NID_ct_ROA, 0, generation)) - goto error; - - if (!(roa = ASN1_item_d2i_bio(ASN1_ITEM_rptr(ROA), bio, NULL))) { - log_validation_status(rc, uri, cms_econtent_decode_error, generation); - goto error; - } - - if (roa->version) { - log_validation_status(rc, uri, wrong_object_version, generation); - goto error; - } - - if (ASN1_INTEGER_cmp(roa->asID, asn1_zero) < 0 || - ASN1_INTEGER_cmp(roa->asID, asn1_four_octets) > 0) { - log_validation_status(rc, uri, bad_roa_asID, generation); - goto error; - } - - ee_resources = X509_get_ext_d2i(x, NID_sbgp_ipAddrBlock, NULL, NULL); - - /* - * Extract prefixes from ROA and convert them into a resource set. - */ - - if (!(roa_resources = sk_IPAddressFamily_new_null())) - goto error; - - for (i = 0; i < sk_ROAIPAddressFamily_num(roa->ipAddrBlocks); i++) { - rf = sk_ROAIPAddressFamily_value(roa->ipAddrBlocks, i); - if (!rf || !rf->addressFamily || rf->addressFamily->length < 2 || rf->addressFamily->length > 3) { - log_validation_status(rc, uri, malformed_roa_addressfamily, generation); - goto error; - } - afi = (rf->addressFamily->data[0] << 8) | (rf->addressFamily->data[1]); - if (rf->addressFamily->length == 3) - *(safi = &safi_) = rf->addressFamily->data[2]; - for (j = 0; j < sk_ROAIPAddress_num(rf->addresses); j++) { - ra = sk_ROAIPAddress_value(rf->addresses, j); - if (!ra || - !extract_roa_prefix(ra, afi, addrbuf, &prefixlen, &max_prefixlen) || - !v3_addr_add_prefix(roa_resources, afi, safi, addrbuf, prefixlen)) { - log_validation_status(rc, uri, roa_resources_malformed, generation); - goto error; - } - if (max_prefixlen < prefixlen) { - log_validation_status(rc, uri, roa_max_prefixlen_too_short, generation); - goto error; - } - } - } - - /* - * ROAs can include nested prefixes, so direct translation to - * resource sets could include overlapping ranges, which is illegal. - * So we have to remove nested stuff before whacking into canonical - * form. Fortunately, this is relatively easy, since we know these - * are just prefixes, not ranges: in a list of prefixes sorted by - * the RFC 3779 rules, the first element of a set of nested prefixes - * will always be the least specific. - */ - - for (i = 0; i < sk_IPAddressFamily_num(roa_resources); i++) { - IPAddressFamily *f = sk_IPAddressFamily_value(roa_resources, i); - - if ((afi = v3_addr_get_afi(f)) == 0) { - log_validation_status(rc, uri, roa_contains_bad_afi_value, generation); - goto error; - } - - if (f->ipAddressChoice->type == IPAddressChoice_addressesOrRanges) { - IPAddressOrRanges *aors = f->ipAddressChoice->u.addressesOrRanges; - - sk_IPAddressOrRange_sort(aors); - - for (j = 0; j < sk_IPAddressOrRange_num(aors) - 1; j++) { - IPAddressOrRange *a = sk_IPAddressOrRange_value(aors, j); - IPAddressOrRange *b = sk_IPAddressOrRange_value(aors, j + 1); - unsigned char a_min[ADDR_RAW_BUF_LEN], a_max[ADDR_RAW_BUF_LEN]; - unsigned char b_min[ADDR_RAW_BUF_LEN], b_max[ADDR_RAW_BUF_LEN]; - int length; - - if ((length = v3_addr_get_range(a, afi, a_min, a_max, ADDR_RAW_BUF_LEN)) == 0 || - (length = v3_addr_get_range(b, afi, b_min, b_max, ADDR_RAW_BUF_LEN)) == 0) { - log_validation_status(rc, uri, roa_resources_malformed, generation); - goto error; - } - - if (memcmp(a_max, b_max, length) >= 0) { - (void) sk_IPAddressOrRange_delete(aors, j + 1); - IPAddressOrRange_free(b); - --j; - } - } - } - } - - if (!v3_addr_canonize(roa_resources)) { - log_validation_status(rc, uri, roa_resources_malformed, generation); - goto error; - } - - if (!v3_addr_subset(roa_resources, ee_resources)) { - log_validation_status(rc, uri, roa_resource_not_in_ee, generation); - goto error; - } - - result = 1; - - error: - BIO_free(bio); - ROA_free(roa); - CMS_ContentInfo_free(cms); - sk_IPAddressFamily_pop_free(roa_resources, IPAddressFamily_free); - sk_IPAddressFamily_pop_free(ee_resources, IPAddressFamily_free); - - return result; -} - -/** - * Check whether we already have a particular ROA, attempt to fetch it - * and check issuer's signature if we don't. - */ -static void check_roa(rcynic_ctx_t *rc, - STACK_OF(walk_ctx_t) *wsk, - const uri_t *uri, - const unsigned char *hash, - const size_t hashlen) -{ - walk_ctx_t *w = walk_ctx_stack_head(wsk); - path_t path; - - assert(rc && wsk && w && uri); - - if (uri_to_filename(rc, uri, &path, &rc->new_authenticated) && - !access(path.s, F_OK)) - return; - - logmsg(rc, log_telemetry, "Checking ROA %s", uri->s); - - if (check_roa_1(rc, wsk, uri, &path, &rc->unauthenticated, - hash, hashlen, object_generation_current)) { - install_object(rc, uri, &path, object_generation_current); - return; - } - - if (!access(path.s, F_OK)) - log_validation_status(rc, uri, object_rejected, object_generation_current); - else if (hash) - log_validation_status(rc, uri, manifest_lists_missing_object, object_generation_current); - - if (check_roa_1(rc, wsk, uri, &path, &rc->old_authenticated, - hash, hashlen, object_generation_backup)) { - install_object(rc, uri, &path, object_generation_backup); - return; - } - - if (!access(path.s, F_OK)) - log_validation_status(rc, uri, object_rejected, object_generation_backup); - else if (hash && w->manifest_generation == object_generation_backup) - log_validation_status(rc, uri, manifest_lists_missing_object, object_generation_backup); -} - - - -/** - * Read and check one Ghostbuster record from disk. - */ -static int check_ghostbuster_1(rcynic_ctx_t *rc, - STACK_OF(walk_ctx_t) *wsk, - const uri_t *uri, - path_t *path, - const path_t *prefix, - const unsigned char *hash, - const size_t hashlen, - const object_generation_t generation) -{ - CMS_ContentInfo *cms = NULL; - BIO *bio = NULL; - X509 *x; - int result = 0; - - assert(rc && wsk && uri && path && prefix); - -#if 0 - /* - * May want this later if we're going to inspect the VCard. For now, - * just leave this NULL and the right thing should happen. - */ - if ((bio = BIO_new(BIO_s_mem())) == NULL) { - logmsg(rc, log_sys_err, "Couldn't allocate BIO for Ghostbuster record %s", uri->s); - goto error; - } -#endif - - if (!check_cms(rc, wsk, uri, path, prefix, &cms, &x, NULL, bio, NULL, 0, - NID_ct_rpkiGhostbusters, 1, generation)) - goto error; - -#if 0 - /* - * Here is where we would read the VCard from the bio returned by - * CMS_verify() so that we could check the VCard. - */ -#endif - - result = 1; - - error: - BIO_free(bio); - CMS_ContentInfo_free(cms); - - return result; -} - -/** - * Check whether we already have a particular Ghostbuster record, - * attempt to fetch it and check issuer's signature if we don't. - */ -static void check_ghostbuster(rcynic_ctx_t *rc, - STACK_OF(walk_ctx_t) *wsk, - const uri_t *uri, - const unsigned char *hash, - const size_t hashlen) -{ - walk_ctx_t *w = walk_ctx_stack_head(wsk); - path_t path; - - assert(rc && wsk && w && uri); - - if (uri_to_filename(rc, uri, &path, &rc->new_authenticated) && - !access(path.s, F_OK)) - return; - - logmsg(rc, log_telemetry, "Checking Ghostbuster record %s", uri->s); - - if (check_ghostbuster_1(rc, wsk, uri, &path, &rc->unauthenticated, - hash, hashlen, object_generation_current)) { - install_object(rc, uri, &path, object_generation_current); - return; - } - - if (!access(path.s, F_OK)) - log_validation_status(rc, uri, object_rejected, object_generation_current); - else if (hash) - log_validation_status(rc, uri, manifest_lists_missing_object, object_generation_current); - - if (check_ghostbuster_1(rc, wsk, uri, &path, &rc->old_authenticated, - hash, hashlen, object_generation_backup)) { - install_object(rc, uri, &path, object_generation_backup); - return; - } - - if (!access(path.s, F_OK)) - log_validation_status(rc, uri, object_rejected, object_generation_backup); - else if (hash && w->manifest_generation == object_generation_backup) - log_validation_status(rc, uri, manifest_lists_missing_object, object_generation_backup); -} - - - -static void walk_cert(rcynic_ctx_t *, void *); - -/** - * rsync callback for fetching SIA tree. - */ -static void rsync_sia_callback(rcynic_ctx_t *rc, - const rsync_ctx_t *ctx, - const rsync_status_t status, - const uri_t *uri, - void *cookie) -{ - STACK_OF(walk_ctx_t) *wsk = cookie; - walk_ctx_t *w = walk_ctx_stack_head(wsk); - - assert(rc && wsk); - - if (status != rsync_status_pending) { - w->state++; - task_add(rc, walk_cert, wsk); - return; - } - - if (rsync_count_runable(rc) >= rc->max_parallel_fetches) - return; - - if ((wsk = walk_ctx_stack_clone(wsk)) == NULL) { - logmsg(rc, log_sys_err, - "walk_ctx_stack_clone() failed, probably memory exhaustion, blundering onwards without forking stack"); - return; - } - - walk_ctx_stack_pop(wsk); - task_add(rc, walk_cert, wsk); -} - -/** - * Recursive walk of certificate hierarchy (core of the program). - * - * Walk all products of the current certificate, starting with the - * ones named in the manifest and continuing with any that we find in - * the publication directory but which are not named in the manifest. - * - * Dispatch to correct checking code for the object named by URI, - * based on the filename extension in the uri. CRLs are a special - * case because we've already checked them by the time we get here, so - * we just ignore them. Other objects are either certificates or - * CMS-signed objects of one kind or another. - */ -static void walk_cert(rcynic_ctx_t *rc, void *cookie) -{ - STACK_OF(walk_ctx_t) *wsk = cookie; - const unsigned char *hash = NULL; - object_generation_t generation; - size_t hashlen; - walk_ctx_t *w; - uri_t uri; - - assert(rc && wsk); - - while ((w = walk_ctx_stack_head(wsk)) != NULL) { - - switch (w->state) { - case walk_state_current: - generation = object_generation_current; - break; - case walk_state_backup: - generation = object_generation_backup; - break; - default: - generation = object_generation_null; - break; - } - - switch (w->state) { - - case walk_state_initial: - - if (!w->certinfo.sia.s[0] || !w->certinfo.ca) { - w->state = walk_state_done; - continue; - } - - if (!w->certinfo.manifest.s[0]) { - log_validation_status(rc, &w->certinfo.uri, sia_manifest_uri_missing, w->certinfo.generation); - w->state = walk_state_done; - continue; - } - - w->state++; - continue; - - case walk_state_rsync: - - if (rsync_needed(rc, wsk)) { - rsync_tree(rc, &w->certinfo.sia, wsk, rsync_sia_callback); - return; - } - log_validation_status(rc, &w->certinfo.sia, rsync_transfer_skipped, object_generation_null); - w->state++; - continue; - - case walk_state_ready: - - walk_ctx_loop_init(rc, wsk); /* sets w->state */ - continue; - - case walk_state_current: - case walk_state_backup: - - if (!walk_ctx_loop_this(rc, wsk, &uri, &hash, &hashlen)) { - walk_ctx_loop_next(rc, wsk); - continue; - } - - if (endswith(uri.s, ".crl") || endswith(uri.s, ".mft") || endswith(uri.s, ".mnf")) { - walk_ctx_loop_next(rc, wsk); - continue; /* CRLs and manifests checked elsewhere */ - } - - if (hash == NULL && !rc->allow_object_not_in_manifest) { - log_validation_status(rc, &uri, skipped_because_not_in_manifest, generation); - walk_ctx_loop_next(rc, wsk); - continue; - } - - if (hash == NULL) - log_validation_status(rc, &uri, tainted_by_not_being_in_manifest, generation); - else if (w->stale_manifest) - log_validation_status(rc, &uri, tainted_by_stale_manifest, generation); - - if (endswith(uri.s, ".roa")) { - check_roa(rc, wsk, &uri, hash, hashlen); - walk_ctx_loop_next(rc, wsk); - continue; - } - - if (endswith(uri.s, ".gbr")) { - check_ghostbuster(rc, wsk, &uri, hash, hashlen); - walk_ctx_loop_next(rc, wsk); - continue; - } - - if (endswith(uri.s, ".cer")) { - certinfo_t certinfo; - X509 *x = check_cert(rc, wsk, &uri, &certinfo, hash, hashlen); - if (!walk_ctx_stack_push(wsk, x, &certinfo)) - walk_ctx_loop_next(rc, wsk); - continue; - } - - log_validation_status(rc, &uri, unknown_object_type_skipped, object_generation_null); - walk_ctx_loop_next(rc, wsk); - continue; - - case walk_state_done: - - walk_ctx_stack_pop(wsk); /* Resume our issuer's state */ - continue; - - } - } - - assert(walk_ctx_stack_head(wsk) == NULL); - walk_ctx_stack_free(wsk); -} - -/** - * Check a trust anchor. Yes, we trust it, by definition, but it - * still needs to conform to the certificate profile, the - * self-signature must be correct, etcetera. - * - * Ownership of the TA certificate object passes to this function when - * called (ie, freeing "x" is our responsibility). - */ -static int check_ta(rcynic_ctx_t *rc, X509 *x, const uri_t *uri, - const path_t *path1, const path_t *path2, - const object_generation_t generation) -{ - STACK_OF(walk_ctx_t) *wsk = NULL; - walk_ctx_t *w = NULL; - - assert(rc && x && uri && path1 && path2); - - if (x == NULL) - return 1; - - if ((wsk = walk_ctx_stack_new()) == NULL) { - logmsg(rc, log_sys_err, "Couldn't allocate walk context stack"); - X509_free(x); - return 0; - } - - if ((w = walk_ctx_stack_push(wsk, x, NULL)) == NULL) { - logmsg(rc, log_sys_err, "Couldn't push walk context stack"); - walk_ctx_stack_free(wsk); - X509_free(x); - return 0; - } - - if (!check_x509(rc, wsk, uri, x, NULL, generation)) { - log_validation_status(rc, uri, object_rejected, generation); - walk_ctx_stack_free(wsk); - return 1; - } - - logmsg(rc, log_telemetry, "Copying trust anchor %s to %s", path1->s, path2->s); - - if (!mkdir_maybe(rc, path2) || !cp_ln(rc, path1, path2)) { - walk_ctx_stack_free(wsk); - return 0; - } - - log_validation_status(rc, uri, object_accepted, generation); - task_add(rc, walk_cert, wsk); - return 1; -} - - - -/** - * Check a trust anchor read from a local file. - */ -static int check_ta_cer(rcynic_ctx_t *rc, - const char *fn) - -{ - path_t path1, path2; - unsigned long hash; - X509 *x = NULL; - uri_t uri; - int i; - - assert(rc && fn); - - logmsg(rc, log_telemetry, "Processing trust anchor from file %s", fn); - - if (strlen(fn) >= sizeof(path1.s)) { - logmsg(rc, log_usage_err, "Trust anchor path name too long %s", fn); - return 0; - } - strcpy(path1.s, fn); - filename_to_uri(&uri, path1.s); - - if ((x = read_cert(&path1, NULL)) == NULL) { - logmsg(rc, log_usage_err, "Couldn't read trust anchor from file %s", fn); - log_validation_status(rc, &uri, unreadable_trust_anchor, object_generation_null); - goto lose; - } - - hash = X509_subject_name_hash(x); - - for (i = 0; i < INT_MAX; i++) { - if (snprintf(path2.s, sizeof(path2.s), "%s%lx.%d.cer", - rc->new_authenticated.s, hash, i) >= sizeof(path2.s)) { - logmsg(rc, log_sys_err, - "Couldn't construct path name for trust anchor %s", path1.s); - goto lose; - } - if (access(path2.s, F_OK)) - break; - } - if (i == INT_MAX) { - logmsg(rc, log_sys_err, "Couldn't find a free name for trust anchor %s", path1.s); - goto lose; - } - - return check_ta(rc, x, &uri, &path1, &path2, object_generation_null); - - lose: - log_validation_status(rc, &uri, trust_anchor_skipped, object_generation_null); - X509_free(x); - return 0; -} - - - -/** - * Allocate a new tal_ctx_t. - */ -static tal_ctx_t *tal_ctx_t_new(void) -{ - tal_ctx_t *tctx = malloc(sizeof(*tctx)); - if (tctx) - memset(tctx, 0, sizeof(*tctx)); - return tctx; -} - -/** - * Free a tal_ctx_t. - */ -static void tal_ctx_t_free(tal_ctx_t *tctx) -{ - if (tctx) { - EVP_PKEY_free(tctx->pkey); - free(tctx); - } -} - -/** - * Read a trust anchor from disk and compare with known public key. - * - * NB: EVP_PKEY_cmp() returns 1 for match, not 0 like every other - * xyz_cmp() function in the entire OpenSSL library. Go figure. - */ -static int check_ta_tal_callback_1(rcynic_ctx_t *rc, - const tal_ctx_t *tctx, - object_generation_t generation) - -{ - const path_t *prefix = NULL; - EVP_PKEY *pkey = NULL; - X509 *x = NULL; - path_t path; - int ret = 0; - - switch (generation) { - case object_generation_current: - prefix = &rc->unauthenticated; - break; - case object_generation_backup: - prefix = &rc->old_authenticated; - break; - default: - goto done; - } - - if (!uri_to_filename(rc, &tctx->uri, &path, prefix)) { - log_validation_status(rc, &tctx->uri, unreadable_trust_anchor_locator, generation); - goto done; - } - - if ((x = read_cert(&path, NULL)) == NULL || (pkey = X509_get_pubkey(x)) == NULL) { - log_validation_status(rc, &tctx->uri, unreadable_trust_anchor, generation); - goto done; - } - - if (EVP_PKEY_cmp(tctx->pkey, pkey) != 1) { - log_validation_status(rc, &tctx->uri, trust_anchor_key_mismatch, generation); - goto done; - } - - ret = check_ta(rc, x, &tctx->uri, &path, &tctx->path, generation); - x = NULL; - - done: - if (!ret) - log_validation_status(rc, &tctx->uri, object_rejected, generation); - EVP_PKEY_free(pkey); - X509_free(x); - return ret; -} - -/** - * rsync callback for fetching a TAL. - */ -static void rsync_tal_callback(rcynic_ctx_t *rc, - const rsync_ctx_t *ctx, - const rsync_status_t status, - const uri_t *uri, - void *cookie) -{ - tal_ctx_t *tctx = cookie; - - assert(rc && tctx); - - if (status == rsync_status_pending) - return; - - if (!check_ta_tal_callback_1(rc, tctx, object_generation_current) && - !check_ta_tal_callback_1(rc, tctx, object_generation_backup)) - log_validation_status(rc, &tctx->uri, trust_anchor_skipped, object_generation_null); - - tal_ctx_t_free(tctx); -} - -/** - * Check a trust anchor read from a trust anchor locator (TAL). - */ -static int check_ta_tal(rcynic_ctx_t *rc, - const char *fn) - -{ - tal_ctx_t *tctx = NULL; - BIO *bio = NULL; - int ret = 1; - - assert(rc && fn); - - logmsg(rc, log_telemetry, "Processing trust anchor locator from file %s", fn); - - if ((tctx = tal_ctx_t_new()) == NULL) { - logmsg(rc, log_sys_err, "malloc(tal_ctxt_t) failed"); - goto done; - } - - bio = BIO_new_file(fn, "r"); - - if (!bio) - logmsg(rc, log_usage_err, "Couldn't open trust anchor locator file %s", fn); - - if (!bio || BIO_gets(bio, tctx->uri.s, sizeof(tctx->uri.s)) <= 0) { - uri_t furi; - filename_to_uri(&furi, fn); - log_validation_status(rc, &furi, unreadable_trust_anchor_locator, object_generation_null); - goto done; - } - - tctx->uri.s[strcspn(tctx->uri.s, " \t\r\n")] = '\0'; - - if (!uri_to_filename(rc, &tctx->uri, &tctx->path, &rc->new_authenticated)) { - log_validation_status(rc, &tctx->uri, unreadable_trust_anchor_locator, object_generation_null); - goto done; - } - - if (!endswith(tctx->uri.s, ".cer")) { - log_validation_status(rc, &tctx->uri, malformed_tal_uri, object_generation_null); - goto done; - } - - bio = BIO_push(BIO_new(BIO_f_linebreak()), bio); - bio = BIO_push(BIO_new(BIO_f_base64()), bio); - if (bio) - tctx->pkey = d2i_PUBKEY_bio(bio, NULL); - if (!tctx->pkey) { - log_validation_status(rc, &tctx->uri, unreadable_trust_anchor_locator, object_generation_null); - goto done; - } - - logmsg(rc, log_telemetry, "Processing trust anchor from URI %s", tctx->uri.s); - - rsync_ta(rc, &tctx->uri, tctx, rsync_tal_callback); - tctx = NULL; /* Control has passed */ - - done: - tal_ctx_t_free(tctx); - BIO_free_all(bio); - return ret; -} - -/** - * Check a directory of trust anchors and trust anchor locators. - */ -static int check_ta_dir(rcynic_ctx_t *rc, - const char *dn) -{ - DIR *dir = NULL; - struct dirent *d; - path_t path; - int is_cer, is_tal; - - assert(rc && dn); - - if ((dir = opendir(dn)) == NULL) { - logmsg(rc, log_sys_err, "Couldn't open trust anchor directory %s: %s", - dn, strerror(errno)); - return 0; - } - - while ((d = readdir(dir)) != NULL) { - if (snprintf(path.s, sizeof(path.s), "%s/%s", dn, d->d_name) >= sizeof(path.s)) { - logmsg(rc, log_data_err, "Pathname %s/%s too long", dn, d->d_name); - break; - } - is_cer = endswith(path.s, ".cer"); - is_tal = endswith(path.s, ".tal"); - if (is_cer && !check_ta_cer(rc, path.s)) - break; - if (is_tal && !check_ta_tal(rc, path.s)) - break; - if (!is_cer && !is_tal) - logmsg(rc, log_verbose, "Skipping non-trust-anchor %s", path.s); - } - - if (dir != NULL) - closedir(dir); - - return !d;; -} - - - -/** - * Write detailed log of what we've done as an XML file. - */ -static int write_xml_file(const rcynic_ctx_t *rc, - const char *xmlfile) -{ - int i, j, use_stdout, ok; - char hostname[HOSTNAME_MAX]; - mib_counter_t code; - timestamp_t ts; - FILE *f = NULL; - path_t xmltemp; - - if (xmlfile == NULL) - return 1; - - use_stdout = !strcmp(xmlfile, "-"); - - logmsg(rc, log_telemetry, "Writing XML summary to %s", - (use_stdout ? "standard output" : xmlfile)); - - if (use_stdout) { - f = stdout; - ok = 1; - } else if (snprintf(xmltemp.s, sizeof(xmltemp.s), "%s.%u.tmp", xmlfile, (unsigned) getpid()) >= sizeof(xmltemp.s)) { - logmsg(rc, log_usage_err, "Filename \"%s\" is too long, not writing XML", xmlfile); - return 0; - } else { - ok = (f = fopen(xmltemp.s, "w")) != NULL; - } - - ok &= gethostname(hostname, sizeof(hostname)) == 0; - - if (ok) - ok &= fprintf(f, "\n" - "\n" - " \n", - time_to_string(&ts, NULL), - svn_id, XML_SUMMARY_VERSION, hostname) != EOF; - - for (j = 0; ok && j < MIB_COUNTER_T_MAX; ++j) - ok &= fprintf(f, " <%s kind=\"%s\">%s\n", - mib_counter_label[j], mib_counter_kind[j], - (mib_counter_desc[j] - ? mib_counter_desc[j] - : X509_verify_cert_error_string(mib_counter_openssl[j])), - mib_counter_label[j]) != EOF; - - if (ok) - ok &= fprintf(f, " \n") != EOF; - - for (i = 0; ok && i < sk_validation_status_t_num(rc->validation_status); i++) { - validation_status_t *v = sk_validation_status_t_value(rc->validation_status, i); - assert(v); - - (void) time_to_string(&ts, &v->timestamp); - - for (code = (mib_counter_t) 0; ok && code < MIB_COUNTER_T_MAX; code++) { - if (validation_status_get_code(v, code)) { - if (ok) - ok &= fprintf(f, " generation == object_generation_current || - v->generation == object_generation_backup)) - ok &= fprintf(f, " generation=\"%s\"", - object_generation_label[v->generation]) != EOF; - if (ok) - ok &= fprintf(f, ">%s\n", v->uri.s) != EOF; - } - } - } - - for (i = 0; ok && i < sk_rsync_history_t_num(rc->rsync_history); i++) { - rsync_history_t *h = sk_rsync_history_t_value(rc->rsync_history, i); - assert(h); - - if (ok) - ok &= fprintf(f, " started) - ok &= fprintf(f, " started=\"%s\"", - time_to_string(&ts, &h->started)) != EOF; - if (ok && h->finished) - ok &= fprintf(f, " finished=\"%s\"", - time_to_string(&ts, &h->finished)) != EOF; - if (ok && h->status != rsync_status_done) - ok &= fprintf(f, " error=\"%u\"", (unsigned) h->status) != EOF; - if (ok) - ok &= fprintf(f, ">%s%s\n", - h->uri.s, (h->final_slash ? "/" : "")) != EOF; - } - - if (ok) - ok &= fprintf(f, "\n") != EOF; - - if (f && !use_stdout) - ok &= fclose(f) != EOF; - - if (ok && !use_stdout) - ok &= rename(xmltemp.s, xmlfile) == 0; - - if (!ok) - logmsg(rc, log_sys_err, "Couldn't write XML summary to %s: %s", - (use_stdout ? "standard output" : xmlfile), strerror(errno)); - - if (!ok && !use_stdout) - (void) unlink(xmltemp.s); - - return ok; -} - - - -/** - * Long options. - */ -#define OPTIONS \ - QA('a', "authenticated", "root of authenticated data tree") \ - QA('c', "config", "override default name of config file") \ - QF('h', "help", "print this help message") \ - QA('j', "jitter", "set jitter value") \ - QA('l', "log-level", "set log level") \ - QA('u', "unauthenticated", "root of unauthenticated data tree") \ - QF('e', "use-stderr", "log to syslog") \ - QF('s', "use-syslog", "log to stderr") \ - QF('V', "version", "print program version") \ - QA('x', "xml-file", "set XML output file location") - -const static struct option longopts[] = { - { "authenticated", required_argument, NULL, 'a' }, - { "config", required_argument, NULL, 'c' }, - { "help", no_argument, NULL, 'h' }, - { "jitter", required_argument, NULL, 'j' }, - { "log-level", required_argument, NULL, 'l' }, - { "unauthenticated", required_argument, NULL, 'u' }, - { "use-stderr", no_argument, NULL, 'e' }, - { "use-syslog", no_argument, NULL, 's' }, - { "version", no_argument, NULL, 'V' }, - { "xml-file", required_argument, NULL, 'x' }, - { NULL } -}; - -/** - * Wrapper around printf() to take arguments like logmsg(). - * If C had closures, usage() would use them instead of this silliness. - */ -static void logmsg_printf(const rcynic_ctx_t *rc, - const log_level_t level, - const char *fmt, ...) -{ - va_list ap; - va_start(ap, fmt); - vprintf(fmt, ap); - putchar('\n'); - va_end(ap); -} - -/** - * Log usage message, either to stdout (for --help) or via logmsg(). - */ -static void usage (const rcynic_ctx_t *rc, const char *jane) -{ - void (*log)(const rcynic_ctx_t *, const log_level_t, const char *, ...) = rc ? logmsg : logmsg_printf; - char left[80]; - - if (rc && !jane) - jane = rc->jane; - - log(rc, log_usage_err, "usage: %s [options]", jane); - log(rc, log_usage_err, "options:"); - -#define QF(_s_, _l_, _d_) \ - (void) snprintf(left, sizeof(left), "-%c --%-32s", _s_, _l_); \ - log(rc, log_usage_err, " %s%s", left, _d_); - -#define QA(_s_, _l_, _d_) \ - (void) snprintf(left, sizeof(left), "-%c ARG --%-32s", _s_, _l_ " ARG"); \ - log(rc, log_usage_err, " %s%s", left, _d_); - - OPTIONS; - -#undef QA -#undef QF -} - -/** - * Main program. Parse command line, read config file, iterate over - * trust anchors found via config file and do a tree walk for each - * trust anchor. - */ -int main(int argc, char *argv[]) -{ - int opt_jitter = 0, use_syslog = 0, use_stderr = 0, syslog_facility = 0; - int opt_syslog = 0, opt_stderr = 0, opt_level = 0, prune = 1; - int opt_auth = 0, opt_unauth = 0, keep_lockfile = 0; - char *lockfile = NULL, *xmlfile = NULL; - char *cfg_file = "rcynic.conf"; - int c, i, ret = 1, jitter = 600, lockfd = -1; - STACK_OF(CONF_VALUE) *cfg_section = NULL; - CONF *cfg_handle = NULL; - time_t start = 0, finish; - rcynic_ctx_t rc; - unsigned delay; - long eline = 0; - path_t ta_dir; - -#define QF(_s_, _l_, _d_) _s_, -#define QA(_s_, _l_, _d_) _s_, ':', - - const static char short_opts[] = { OPTIONS '\0' }; - -#undef QA -#undef QF - -#define QF(_s_, _l_, _d_) { _l_, no_argument, NULL, _s_ }, -#define QA(_s_, _l_, _d_) { _l_, required_argument, NULL, _s_ }, - - static struct option long_opts[] = { OPTIONS { NULL } }; - -#undef QA -#undef QF - - memset(&rc, 0, sizeof(rc)); - - if ((rc.jane = strrchr(argv[0], '/')) == NULL) - rc.jane = argv[0]; - else - rc.jane++; - - rc.log_level = log_data_err; - rc.allow_stale_crl = 1; - rc.allow_stale_manifest = 1; - rc.allow_digest_mismatch = 1; - rc.allow_crl_digest_mismatch = 1; - rc.allow_nonconformant_name = 1; - rc.allow_ee_without_signedObject = 1; - rc.allow_1024_bit_ee_key = 1; - rc.allow_wrong_cms_si_attributes = 1; - rc.max_parallel_fetches = 1; - rc.max_retries = 3; - rc.retry_wait_min = 30; - rc.run_rsync = 1; - rc.rsync_timeout = 300; - rc.max_select_time = 30; - rc.rsync_early = 1; - -#define QQ(x,y) rc.priority[x] = y; - LOG_LEVELS; -#undef QQ - - if (!set_directory(&rc, &rc.authenticated, "rcynic-data/authenticated", 0) || - !set_directory(&rc, &rc.unauthenticated, "rcynic-data/unauthenticated/", 1)) - goto done; - - OpenSSL_add_all_algorithms(); - ERR_load_crypto_strings(); - - if (!create_missing_nids()) { - logmsg(&rc, log_sys_err, "Couldn't initialize missing OIDs!"); - goto done; - } - - memset(&ta_dir, 0, sizeof(ta_dir)); - - opterr = 0; - - while ((c = getopt_long(argc, argv, short_opts, long_opts, NULL)) > 0) { - switch (c) { - case 'a': - opt_auth = 1; - if (!set_directory(&rc, &rc.authenticated, optarg, 0)) - goto done; - break; - case 'c': - cfg_file = optarg; - break; - case 'l': - opt_level = 1; - if (!configure_logmsg(&rc, optarg)) - goto done; - break; - case 's': - use_syslog = opt_syslog = 1; - break; - case 'e': - use_stderr = opt_stderr = 1; - break; - case 'h': - usage(NULL, rc.jane); - ret = 0; - goto done; - case 'j': - if (!configure_integer(&rc, &jitter, optarg)) - goto done; - opt_jitter = 1; - break; - case 'u': - opt_unauth = 1; - if (!set_directory(&rc, &rc.unauthenticated, optarg, 1)) - goto done; - break; - case 'V': - puts(svn_id); - ret = 0; - goto done; - case 'x': - xmlfile = strdup(optarg); - break; - default: - usage(&rc, NULL); - goto done; - } - } - - if (!(asn1_zero = s2i_ASN1_INTEGER(NULL, "0x0")) || - !(asn1_four_octets = s2i_ASN1_INTEGER(NULL, "0xFFFFFFFF")) || - !(asn1_twenty_octets = s2i_ASN1_INTEGER(NULL, "0x7FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF")) || - !(NID_binary_signing_time = OBJ_create("1.2.840.113549.1.9.16.2.46", - "id-aa-binarySigningTime", - "id-aa-binarySigningTime"))) { - logmsg(&rc, log_sys_err, "Couldn't initialize ASN.1 constants!"); - goto done; - } - - if ((cfg_handle = NCONF_new(NULL)) == NULL) { - logmsg(&rc, log_sys_err, "Couldn't create CONF opbject"); - goto done; - } - - if (NCONF_load(cfg_handle, cfg_file, &eline) <= 0) { - if (eline <= 0) - logmsg(&rc, log_usage_err, "Couldn't load config file %s", cfg_file); - else - logmsg(&rc, log_usage_err, "Error on line %ld of config file %s", eline, cfg_file); - goto done; - } - - if (CONF_modules_load(cfg_handle, NULL, 0) <= 0) { - logmsg(&rc, log_sys_err, "Couldn't configure OpenSSL"); - goto done; - } - - if ((cfg_section = NCONF_get_section(cfg_handle, "rcynic")) == NULL) { - logmsg(&rc, log_usage_err, "Couldn't load rcynic section from config file"); - goto done; - } - - for (i = 0; i < sk_CONF_VALUE_num(cfg_section); i++) { - CONF_VALUE *val = sk_CONF_VALUE_value(cfg_section, i); - - assert(val && val->name && val->value); - - if (!opt_auth && - !name_cmp(val->name, "authenticated") && - !set_directory(&rc, &rc.authenticated, val->value, 0)) - goto done; - - else if (!opt_unauth && - !name_cmp(val->name, "unauthenticated") && - !set_directory(&rc, &rc.unauthenticated, val->value, 1)) - goto done; - - else if (!name_cmp(val->name, "trust-anchor-directory") && - !set_directory(&rc, &ta_dir, val->value, 0)) - goto done; - - else if (!name_cmp(val->name, "rsync-timeout") && - !configure_integer(&rc, &rc.rsync_timeout, val->value)) - goto done; - - else if (!name_cmp(val->name, "max-parallel-fetches") && - !configure_integer(&rc, &rc.max_parallel_fetches, val->value)) - goto done; - - else if (!name_cmp(val->name, "max-select-time") && - !configure_unsigned_integer(&rc, &rc.max_select_time, val->value)) - goto done; - - else if (!name_cmp(val->name, "rsync-program")) - rc.rsync_program = strdup(val->value); - - else if (!name_cmp(val->name, "lockfile")) - lockfile = strdup(val->value); - - else if (!name_cmp(val->name, "keep-lockfile") && - !configure_boolean(&rc, &keep_lockfile, val->value)) - goto done; - - else if (!opt_jitter && - !name_cmp(val->name, "jitter") && - !configure_integer(&rc, &jitter, val->value)) - goto done; - - else if (!opt_level && - !name_cmp(val->name, "log-level") && - !configure_logmsg(&rc, val->value)) - goto done; - - else if (!opt_syslog && - !name_cmp(val->name, "use-syslog") && - !configure_boolean(&rc, &use_syslog, val->value)) - goto done; - - else if (!opt_stderr && - !name_cmp(val->name, "use-stderr") && - !configure_boolean(&rc, &use_stderr, val->value)) - goto done; - - else if (!name_cmp(val->name, "syslog-facility") && - !configure_syslog(&rc, &syslog_facility, - facilitynames, val->value)) - goto done; - - else if (!xmlfile && - (!name_cmp(val->name, "xml-file") || - !name_cmp(val->name, "xml-summary"))) - xmlfile = strdup(val->value); - - else if (!name_cmp(val->name, "allow-stale-crl") && - !configure_boolean(&rc, &rc.allow_stale_crl, val->value)) - goto done; - - else if (!name_cmp(val->name, "allow-stale-manifest") && - !configure_boolean(&rc, &rc.allow_stale_manifest, val->value)) - goto done; - - else if (!name_cmp(val->name, "allow-non-self-signed-trust-anchor") && - !configure_boolean(&rc, &rc.allow_non_self_signed_trust_anchor, val->value)) - goto done; - - else if (!name_cmp(val->name, "require-crl-in-manifest") && - !configure_boolean(&rc, &rc.require_crl_in_manifest, val->value)) - goto done; - - else if (!name_cmp(val->name, "allow-object-not-in-manifest") && - !configure_boolean(&rc, &rc.allow_object_not_in_manifest, val->value)) - goto done; - - else if (!name_cmp(val->name, "allow-digest-mismatch") && - !configure_boolean(&rc, &rc.allow_digest_mismatch, val->value)) - goto done; - - else if (!name_cmp(val->name, "allow-crl-digest-mismatch") && - !configure_boolean(&rc, &rc.allow_crl_digest_mismatch, val->value)) - goto done; - - else if (!name_cmp(val->name, "use-links") && - !configure_boolean(&rc, &rc.use_links, val->value)) - goto done; - - else if (!name_cmp(val->name, "prune") && - !configure_boolean(&rc, &prune, val->value)) - goto done; - - else if (!name_cmp(val->name, "run-rsync") && - !configure_boolean(&rc, &rc.run_rsync, val->value)) - goto done; - - else if (!name_cmp(val->name, "allow-nonconformant-name") && - !configure_boolean(&rc, &rc.allow_nonconformant_name, val->value)) - goto done; - - else if (!name_cmp(val->name, "allow-ee-without-signedObject") && - !configure_boolean(&rc, &rc.allow_ee_without_signedObject, val->value)) - goto done; - - else if (!name_cmp(val->name, "allow-1024-bit-ee-key") && - !configure_boolean(&rc, &rc.allow_1024_bit_ee_key, val->value)) - goto done; - - else if (!name_cmp(val->name, "allow-wrong-cms-si-attributes") && - !configure_boolean(&rc, &rc.allow_wrong_cms_si_attributes, val->value)) - goto done; - - else if (!name_cmp(val->name, "rsync-early") && - !configure_boolean(&rc, &rc.rsync_early, val->value)) - goto done; - - /* - * Ugly, but the easiest way to handle all these strings. - */ - -#define QQ(x,y) \ - else if (!name_cmp(val->name, "syslog-priority-" #x) && \ - !configure_syslog(&rc, &rc.priority[x], \ - prioritynames, val->value)) \ - goto done; - - LOG_LEVELS; /* the semicolon is for emacs */ - -#undef QQ - - } - - if ((rc.rsync_history = sk_rsync_history_t_new(rsync_history_cmp)) == NULL) { - logmsg(&rc, log_sys_err, "Couldn't allocate rsync_history stack"); - goto done; - } - - if ((rc.validation_status = sk_validation_status_t_new_null()) == NULL) { - logmsg(&rc, log_sys_err, "Couldn't allocate validation_status stack"); - goto done; - } - - if ((rc.x509_store = X509_STORE_new()) == NULL) { - logmsg(&rc, log_sys_err, "Couldn't allocate X509_STORE"); - goto done; - } - - if ((rc.rsync_queue = sk_rsync_ctx_t_new_null()) == NULL) { - logmsg(&rc, log_sys_err, "Couldn't allocate rsync_queue"); - goto done; - } - - if ((rc.task_queue = sk_task_t_new_null()) == NULL) { - logmsg(&rc, log_sys_err, "Couldn't allocate task_queue"); - goto done; - } - - rc.use_syslog = use_syslog; - - if (use_syslog) - openlog(rc.jane, - LOG_PID | (use_stderr ? LOG_PERROR : 0), - (syslog_facility ? syslog_facility : LOG_LOCAL0)); - - if (jitter > 0) { - if (RAND_bytes((unsigned char *) &delay, sizeof(delay)) <= 0) { - logmsg(&rc, log_sys_err, "Couldn't read random bytes"); - goto done; - } - delay %= jitter; - logmsg(&rc, log_telemetry, "Delaying %u seconds before startup", delay); - while (delay > 0) - delay = sleep(delay); - } - - if (lockfile && - ((lockfd = open(lockfile, O_RDWR|O_CREAT|O_NONBLOCK, 0666)) < 0 || - lockf(lockfd, F_TLOCK, 0) < 0)) { - if (lockfd >= 0 && errno == EAGAIN) - logmsg(&rc, log_telemetry, "Lock %s held by another process", lockfile); - else - logmsg(&rc, log_sys_err, "Problem locking %s: %s", lockfile, strerror(errno)); - lockfd = -1; - goto done; - } - - start = time(0); - logmsg(&rc, log_telemetry, "Starting"); - - if (!construct_directory_names(&rc)) - goto done; - - if (!access(rc.new_authenticated.s, F_OK)) { - logmsg(&rc, log_sys_err, - "Timestamped output directory %s already exists! Clock went backwards?", - rc.new_authenticated.s); - goto done; - } - - if (!mkdir_maybe(&rc, &rc.new_authenticated)) { - logmsg(&rc, log_sys_err, "Couldn't prepare directory %s: %s", - rc.new_authenticated.s, strerror(errno)); - goto done; - } - - for (i = 0; i < sk_CONF_VALUE_num(cfg_section); i++) { - CONF_VALUE *val = sk_CONF_VALUE_value(cfg_section, i); - - assert(val && val->name && val->value); - - if (!name_cmp(val->name, "trust-anchor-uri-with-key") || - !name_cmp(val->name, "indirect-trust-anchor")) { - logmsg(&rc, log_usage_err, - "Directive \"%s\" is obsolete -- please use \"trust-anchor-locator\" instead", - val->name); - goto done; - } - - if ((!name_cmp(val->name, "trust-anchor") && !check_ta_cer(&rc, val->value)) || - (!name_cmp(val->name, "trust-anchor-locator") && !check_ta_tal(&rc, val->value))) - goto done; - } - - if (*ta_dir.s != '\0' && !check_ta_dir(&rc, ta_dir.s)) - goto done; - - while (sk_task_t_num(rc.task_queue) > 0 || sk_rsync_ctx_t_num(rc.rsync_queue) > 0) { - task_run_q(&rc); - rsync_mgr(&rc); - } - - logmsg(&rc, log_telemetry, "Event loop done, beginning final output and cleanup"); - - if (!finalize_directories(&rc)) - goto done; - - if (prune && rc.run_rsync && - !prune_unauthenticated(&rc, &rc.unauthenticated, - strlen(rc.unauthenticated.s))) { - logmsg(&rc, log_sys_err, "Trouble pruning old unauthenticated data"); - goto done; - } - - if (!write_xml_file(&rc, xmlfile)) - goto done; - - ret = 0; - - done: - log_openssl_errors(&rc); - - /* - * Do NOT free cfg_section, NCONF_free() takes care of that - */ - sk_validation_status_t_pop_free(rc.validation_status, validation_status_t_free); - sk_rsync_history_t_pop_free(rc.rsync_history, rsync_history_t_free); - validation_status_t_free(rc.validation_status_in_waiting); - X509_STORE_free(rc.x509_store); - NCONF_free(cfg_handle); - CONF_modules_free(); - EVP_cleanup(); - ERR_free_strings(); - if (rc.rsync_program) - free(rc.rsync_program); - if (lockfile && lockfd >= 0 && !keep_lockfile) - unlink(lockfile); - if (lockfile) - free(lockfile); - if (xmlfile) - free(xmlfile); - - if (start) { - finish = time(0); - logmsg(&rc, log_telemetry, - "Finished, elapsed time %u:%02u:%02u", - (unsigned) ((finish - start) / 3600), - (unsigned) ((finish - start) / 60 % 60), - (unsigned) ((finish - start) % 60)); - } - - return ret; -} diff --git a/rcynic/rcynic.xsl b/rcynic/rcynic.xsl deleted file mode 100644 index 487cd2f6..00000000 --- a/rcynic/rcynic.xsl +++ /dev/null @@ -1,312 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - Generators - - $Id$ - - - rcynic summary - - - - - <xsl:value-of select="$title"/> - - - - - - - -

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 1 - - - 0 - - - - - - - - - - - - -
    -

    Grand Totals

    - - - - - - - - - - - - - - - - - -
    - -
    Total
    -
    - - -
    -

    Summaries by Repository Host

    - - - -
    -

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    - -
    - - - - - -
    Total - - - - - -
    -
    - - - -
    -

    Problems

    - - - - - - - - - - - - - - - - - - - -
    StatusURI
    -
    -
    - - - -
    -

    Validation Status

    - - - - - - - - - - - - - - - - - - - - - -
    TimestampGenerationStatusURI
    -
    - - - -
    - -
    - - diff --git a/rcynic/rpki-torrent.py b/rcynic/rpki-torrent.py deleted file mode 100644 index 9b97f298..00000000 --- a/rcynic/rpki-torrent.py +++ /dev/null @@ -1,721 +0,0 @@ -#!/usr/local/bin/python - -# $Id$ -# -# Copyright (C) 2013--2014 Dragon Research Labs ("DRL") -# Portions copyright (C) 2012 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notices and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND DRL AND ISC DISCLAIM ALL -# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED -# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL OR -# ISC BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL -# DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA -# OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER -# TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -import urllib2 -import httplib -import socket -import ssl -import urlparse -import zipfile -import sys -import os -import email.utils -import base64 -import hashlib -import subprocess -import syslog -import traceback -import ConfigParser -import stat -import time -import errno -import fcntl -import argparse -import smtplib -import email.mime.text - -import transmissionrpc - -tr_env_vars = ("TR_TORRENT_DIR", "TR_TORRENT_ID", "TR_TORRENT_NAME") - -class WrongServer(Exception): - "Hostname not in X.509v3 subjectAltName extension." - -class UnexpectedRedirect(Exception): - "Unexpected HTTP redirect." - -class WrongMode(Exception): - "Wrong operation for mode." - -class BadFormat(Exception): - "Zip file does not match our expectations." - -class InconsistentEnvironment(Exception): - "Environment variables received from Transmission aren't consistent." - -class TorrentNotReady(Exception): - "Torrent is not ready for checking." - -class TorrentDoesNotMatchManifest(Exception): - "Retrieved torrent does not match manifest." - -class TorrentNameDoesNotMatchURL(Exception): - "Torrent name doesn't uniquely match a URL." - -class CouldNotFindTorrents(Exception): - "Could not find torrent(s) with given name(s)." - -class UseTheSourceLuke(Exception): - "Use The Source, Luke." - -def main(): - try: - syslog_flags = syslog.LOG_PID; - if os.isatty(sys.stderr.fileno()): - syslog_flags |= syslog.LOG_PERROR - syslog.openlog("rpki-torrent", syslog_flags) - - parser = argparse.ArgumentParser(description = __doc__) - parser.add_argument("-c", "--config", - help = "configuration file") - args = parser.parse_args() - - global cfg - cfg = MyConfigParser() - cfg.read(args.config or - [os.path.join(dn, fn) - for fn in ("rcynic.conf", "rpki.conf") - for dn in ("/var/rcynic/etc", "/usr/local/etc", "/etc")]) - - if cfg.act_as_generator: - if len(argv) == 1 and argv[0] == "generate": - generator_main() - elif len(argv) == 1 and argv[0] == "mirror": - mirror_main() - else: - raise UseTheSourceLuke - - else: - if len(argv) == 0 and all(v in os.environ for v in tr_env_vars): - torrent_completion_main() - elif len(argv) == 1 and argv[0] == "poll": - poll_main() - else: - raise UseTheSourceLuke - - except Exception, e: - for line in traceback.format_exc().splitlines(): - syslog.syslog(line) - sys.exit(1) - - -def generator_main(): - import paramiko - - class SFTPClient(paramiko.SFTPClient): - def atomic_rename(self, oldpath, newpath): - oldpath = self._adjust_cwd(oldpath) - newpath = self._adjust_cwd(newpath) - self._log(paramiko.common.DEBUG, 'atomic_rename(%r, %r)' % (oldpath, newpath)) - self._request(paramiko.sftp.CMD_EXTENDED, "posix-rename@openssh.com", oldpath, newpath) - - z = ZipFile(url = cfg.generate_url, dir = cfg.zip_dir) - client = TransmissionClient() - - client.remove_torrents(z.torrent_name) - - download_dir = client.get_session().download_dir - torrent_dir = os.path.join(download_dir, z.torrent_name) - torrent_file = os.path.join(cfg.zip_dir, z.torrent_name + ".torrent") - - - syslog.syslog("Synchronizing local data from %s to %s" % (cfg.unauthenticated, torrent_dir)) - subprocess.check_call((cfg.rsync_prog, "--archive", "--delete", - os.path.normpath(cfg.unauthenticated) + "/", - os.path.normpath(torrent_dir) + "/")) - - syslog.syslog("Creating %s" % torrent_file) - try: - os.unlink(torrent_file) - except OSError, e: - if e.errno != errno.ENOENT: - raise - ignore_output_for_now = subprocess.check_output( - (cfg.mktorrent_prog, - "-a", cfg.tracker_url, - "-c", "RPKI unauthenticated data snapshot generated by rpki-torrent", - "-o", torrent_file, - torrent_dir)) - - syslog.syslog("Generating manifest") - manifest = create_manifest(download_dir, z.torrent_name) - - syslog.syslog("Loading %s with unlimited seeding" % torrent_file) - f = open(torrent_file, "rb") - client.add(base64.b64encode(f.read())) - f.close() - client.unlimited_seeding(z.torrent_name) - - syslog.syslog("Creating upload connection") - ssh = paramiko.Transport((cfg.sftp_host, cfg.sftp_port)) - try: - hostkeys = paramiko.util.load_host_keys(cfg.sftp_hostkey_file)[cfg.sftp_host]["ssh-rsa"] - except ConfigParser.Error: - hostkeys = None - ssh.connect( - username = cfg.sftp_user, - hostkey = hostkeys, - pkey = paramiko.RSAKey.from_private_key_file(cfg.sftp_private_key_file)) - sftp = SFTPClient.from_transport(ssh) - - zip_filename = os.path.join("data", os.path.basename(z.filename)) - zip_tempname = zip_filename + ".new" - - syslog.syslog("Creating %s" % zip_tempname) - f = sftp.open(zip_tempname, "wb") - z.set_output_stream(f) - - syslog.syslog("Writing %s to zip" % torrent_file) - z.write( - torrent_file, - arcname = os.path.basename(torrent_file), - compress_type = zipfile.ZIP_DEFLATED) - - manifest_name = z.torrent_name + ".manifest" - - syslog.syslog("Writing %s to zip" % manifest_name) - zi = zipfile.ZipInfo(manifest_name, time.gmtime()[:6]) - zi.external_attr = (stat.S_IFREG | 0644) << 16 - zi.internal_attr = 1 # Text, not binary - z.writestr(zi, - "".join("%s %s\n" % (v, k) for k, v in manifest.iteritems()), - zipfile.ZIP_DEFLATED) - - syslog.syslog("Closing %s and renaming to %s" % (zip_tempname, zip_filename)) - z.close() - f.close() - sftp.atomic_rename(zip_tempname, zip_filename) - - syslog.syslog("Closing upload connection") - ssh.close() - -def mirror_main(): - client = TransmissionClient() - torrent_names = [] - - for zip_url in cfg.zip_urls: - if zip_url != cfg.generate_url: - z = ZipFile(url = zip_url, dir = cfg.zip_dir, ta = cfg.zip_ta) - if z.fetch(): - client.remove_torrents(z.torrent_name) - syslog.syslog("Mirroring torrent %s" % z.torrent_name) - client.add(z.get_torrent()) - torrent_names.append(z.torrent_name) - - if torrent_names: - client.unlimited_seeding(*torrent_names) - - -def poll_main(): - for zip_url in cfg.zip_urls: - - z = ZipFile(url = zip_url, dir = cfg.zip_dir, ta = cfg.zip_ta) - client = TransmissionClient() - - if z.fetch(): - client.remove_torrents(z.torrent_name) - syslog.syslog("Adding torrent %s" % z.torrent_name) - client.add(z.get_torrent()) - - elif cfg.run_rcynic_anyway: - run_rcynic(client, z) - - -def torrent_completion_main(): - torrent_name = os.getenv("TR_TORRENT_NAME") - torrent_id = int(os.getenv("TR_TORRENT_ID")) - - z = ZipFile(url = cfg.find_url(torrent_name), dir = cfg.zip_dir, ta = cfg.zip_ta) - client = TransmissionClient() - torrent = client.info([torrent_id]).popitem()[1] - - if torrent.name != torrent_name: - raise InconsistentEnvironment("Torrent name %s does not match ID %d" % (torrent_name, torrent_id)) - - if z.torrent_name != torrent_name: - raise InconsistentEnvironment("Torrent name %s does not match torrent name in zip file %s" % (torrent_name, z.torrent_name)) - - if torrent is None or torrent.progress != 100: - raise TorrentNotReady("Torrent %s not ready for checking, how did I get here?" % torrent_name) - - log_email("Download complete %s" % z.url) - - run_rcynic(client, z) - - -def run_rcynic(client, z): - """ - Run rcynic and any post-processing we might want. - """ - - if cfg.lockfile is not None: - syslog.syslog("Acquiring lock %s" % cfg.lockfile) - lock = os.open(cfg.lockfile, os.O_WRONLY | os.O_CREAT, 0600) - fcntl.flock(lock, fcntl.LOCK_EX) - else: - lock = None - - syslog.syslog("Checking manifest against disk") - - download_dir = client.get_session().download_dir - - manifest_from_disk = create_manifest(download_dir, z.torrent_name) - manifest_from_zip = z.get_manifest() - - excess_files = set(manifest_from_disk) - set(manifest_from_zip) - for fn in excess_files: - del manifest_from_disk[fn] - - if manifest_from_disk != manifest_from_zip: - raise TorrentDoesNotMatchManifest("Manifest for torrent %s does not match what we got" % - z.torrent_name) - - if excess_files: - syslog.syslog("Cleaning up excess files") - for fn in excess_files: - os.unlink(os.path.join(download_dir, fn)) - - syslog.syslog("Running rcynic") - log_email("Starting rcynic %s" % z.url) - subprocess.check_call((cfg.rcynic_prog, - "-c", cfg.rcynic_conf, - "-u", os.path.join(client.get_session().download_dir, z.torrent_name))) - log_email("Completed rcynic %s" % z.url) - - for cmd in cfg.post_rcynic_commands: - syslog.syslog("Running post-rcynic command: %s" % cmd) - subprocess.check_call(cmd, shell = True) - - if lock is not None: - syslog.syslog("Releasing lock %s" % cfg.lockfile) - os.close(lock) - -# See http://www.minstrel.org.uk/papers/sftp/ for details on how to -# set up safe upload-only SFTP directories on the server. In -# particular http://www.minstrel.org.uk/papers/sftp/builtin/ is likely -# to be the right path. - - -class ZipFile(object): - """ - Augmented version of standard python zipfile.ZipFile class, with - some extra methods and specialized capabilities. - - All methods of the standard zipfile.ZipFile class are supported, but - the constructor arguments are different, and opening the zip file - itself is deferred until a call which requires this, since the file - may first need to be fetched via HTTPS. - """ - - def __init__(self, url, dir, ta = None, verbose = True): - self.url = url - self.dir = dir - self.ta = ta - self.verbose = verbose - self.filename = os.path.join(dir, os.path.basename(url)) - self.changed = False - self.zf = None - self.peercert = None - self.torrent_name, zip_ext = os.path.splitext(os.path.basename(url)) - if zip_ext != ".zip": - raise BadFormat - - - def __getattr__(self, name): - if self.zf is None: - self.zf = zipfile.ZipFile(self.filename) - return getattr(self.zf, name) - - - def build_opener(self): - """ - Voodoo to create a urllib2.OpenerDirector object with TLS - certificate checking enabled and a hook to set self.peercert so - our caller can check the subjectAltName field. - - You probably don't want to look at this if you can avoid it. - """ - - assert self.ta is not None - - # Yes, we're constructing one-off classes. Look away, look away. - - class HTTPSConnection(httplib.HTTPSConnection): - zip = self - def connect(self): - sock = socket.create_connection((self.host, self.port), self.timeout) - if getattr(self, "_tunnel_host", None): - self.sock = sock - self._tunnel() - self.sock = ssl.wrap_socket(sock, - keyfile = self.key_file, - certfile = self.cert_file, - cert_reqs = ssl.CERT_REQUIRED, - ssl_version = ssl.PROTOCOL_TLSv1, - ca_certs = self.zip.ta) - self.zip.peercert = self.sock.getpeercert() - - class HTTPSHandler(urllib2.HTTPSHandler): - def https_open(self, req): - return self.do_open(HTTPSConnection, req) - - return urllib2.build_opener(HTTPSHandler) - - - def check_subjectAltNames(self): - """ - Check self.peercert against URL to make sure we were talking to - the right HTTPS server. - """ - - hostname = urlparse.urlparse(self.url).hostname - subjectAltNames = set(i[1] - for i in self.peercert.get("subjectAltName", ()) - if i[0] == "DNS") - if hostname not in subjectAltNames: - raise WrongServer - - - def download_file(self, r, bufsize = 4096): - """ - Downloaded file to disk. - """ - - tempname = self.filename + ".new" - f = open(tempname, "wb") - n = int(r.info()["Content-Length"]) - for i in xrange(0, n - bufsize, bufsize): - f.write(r.read(bufsize)) - f.write(r.read()) - f.close() - mtime = email.utils.mktime_tz(email.utils.parsedate_tz(r.info()["Last-Modified"])) - os.utime(tempname, (mtime, mtime)) - os.rename(tempname, self.filename) - - - def set_output_stream(self, stream): - """ - Set up this zip file for writing to a network stream. - """ - - assert self.zf is None - self.zf = zipfile.ZipFile(stream, "w") - - - def fetch(self): - """ - Fetch zip file from URL given to constructor. - """ - - headers = { "User-Agent" : "rpki-torrent" } - try: - headers["If-Modified-Since"] = email.utils.formatdate( - os.path.getmtime(self.filename), False, True) - except OSError: - pass - - syslog.syslog("Checking %s..." % self.url) - try: - r = self.build_opener().open(urllib2.Request(self.url, None, headers)) - syslog.syslog("%s has changed, starting download" % self.url) - self.changed = True - log_email("Downloading %s" % self.url) - except urllib2.HTTPError, e: - if e.code == 304: - syslog.syslog("%s has not changed" % self.url) - elif e.code == 404: - syslog.syslog("%s does not exist" % self.url) - else: - raise - r = None - - self.check_subjectAltNames() - - if r is not None and r.geturl() != self.url: - raise UnexpectedRedirect - - if r is not None: - self.download_file(r) - r.close() - - return self.changed - - - def check_format(self): - """ - Make sure that format of zip file matches our preconceptions: it - should contain two files, one of which is the .torrent file, the - other is the manifest, with names derived from the torrent name - inferred from the URL. - """ - - if set(self.namelist()) != set((self.torrent_name + ".torrent", self.torrent_name + ".manifest")): - raise BadFormat - - - def get_torrent(self): - """ - Extract torrent file from zip file, encoded in Base64 because - that's what the transmisionrpc library says it wants. - """ - - self.check_format() - return base64.b64encode(self.read(self.torrent_name + ".torrent")) - - - def get_manifest(self): - """ - Extract manifest from zip file, as a dictionary. - - For the moment we're fixing up the internal file names from the - format that the existing shell-script prototype uses, but this - should go away once this program both generates and checks the - manifests. - """ - - self.check_format() - result = {} - for line in self.open(self.torrent_name + ".manifest"): - h, fn = line.split() - # - # Fixup for earlier manifest format, this should go away - if not fn.startswith(self.torrent_name): - fn = os.path.normpath(os.path.join(self.torrent_name, fn)) - # - result[fn] = h - return result - - -def create_manifest(topdir, torrent_name): - """ - Generate a manifest, expressed as a dictionary. - """ - - result = {} - topdir = os.path.abspath(topdir) - for dirpath, dirnames, filenames in os.walk(os.path.join(topdir, torrent_name)): - for filename in filenames: - filename = os.path.join(dirpath, filename) - f = open(filename, "rb") - result[os.path.relpath(filename, topdir)] = hashlib.sha256(f.read()).hexdigest() - f.close() - return result - - -def log_email(msg, subj = None): - try: - if not msg.endswith("\n"): - msg += "\n" - if subj is None: - subj = msg.partition("\n")[0] - m = email.mime.text.MIMEText(msg) - m["Date"] = time.strftime("%d %b %Y %H:%M:%S +0000", time.gmtime()) - m["From"] = cfg.log_email - m["To"] = cfg.log_email - m["Subject"] = subj - s = smtplib.SMTP("localhost") - s.sendmail(cfg.log_email, [cfg.log_email], m.as_string()) - s.quit() - except ConfigParser.Error: - pass - - -class TransmissionClient(transmissionrpc.client.Client): - """ - Extension of transmissionrpc.client.Client. - """ - - def __init__(self, **kwargs): - kwargs.setdefault("address", "127.0.0.1") - kwargs.setdefault("user", cfg.transmission_username) - kwargs.setdefault("password", cfg.transmission_password) - transmissionrpc.client.Client.__init__(self, **kwargs) - - - def find_torrents(self, *names): - """ - Find torrents with given name(s), return id(s). - """ - - result = [i for i, t in self.list().iteritems() if t.name in names] - if not result: - raise CouldNotFindTorrents - return result - - - def remove_torrents(self, *names): - """ - Remove any torrents with the given name(s). - """ - - try: - ids = self.find_torrents(*names) - except CouldNotFindTorrents: - pass - else: - syslog.syslog("Removing torrent%s %s (%s)" % ( - "" if len(ids) == 1 else "s", - ", ".join(names), - ", ".join("#%s" % i for i in ids))) - self.remove(ids) - - def unlimited_seeding(self, *names): - """ - Set unlimited seeding for specified torrents. - """ - - # Apparently seedRatioMode = 2 means "no limit" - try: - self.change(self.find_torrents(*names), seedRatioMode = 2) - except CouldNotFindTorrents: - syslog.syslog("Couldn't tweak seedRatioMode, blundering onwards") - - -class MyConfigParser(ConfigParser.RawConfigParser): - - rpki_torrent_section = "rpki-torrent" - - @property - def zip_dir(self): - return self.get(self.rpki_torrent_section, "zip_dir") - - @property - def zip_ta(self): - return self.get(self.rpki_torrent_section, "zip_ta") - - @property - def rcynic_prog(self): - return self.get(self.rpki_torrent_section, "rcynic_prog") - - @property - def rcynic_conf(self): - return self.get(self.rpki_torrent_section, "rcynic_conf") - - @property - def run_rcynic_anyway(self): - return self.getboolean(self.rpki_torrent_section, "run_rcynic_anyway") - - @property - def generate_url(self): - return self.get(self.rpki_torrent_section, "generate_url") - - @property - def act_as_generator(self): - try: - return self.get(self.rpki_torrent_section, "generate_url") != "" - except ConfigParser.Error: - return False - - @property - def rsync_prog(self): - return self.get(self.rpki_torrent_section, "rsync_prog") - - @property - def mktorrent_prog(self): - return self.get(self.rpki_torrent_section, "mktorrent_prog") - - @property - def tracker_url(self): - return self.get(self.rpki_torrent_section, "tracker_url") - - @property - def sftp_host(self): - return self.get(self.rpki_torrent_section, "sftp_host") - - @property - def sftp_port(self): - try: - return self.getint(self.rpki_torrent_section, "sftp_port") - except ConfigParser.Error: - return 22 - - @property - def sftp_user(self): - return self.get(self.rpki_torrent_section, "sftp_user") - - @property - def sftp_hostkey_file(self): - return self.get(self.rpki_torrent_section, "sftp_hostkey_file") - - @property - def sftp_private_key_file(self): - return self.get(self.rpki_torrent_section, "sftp_private_key_file") - - @property - def lockfile(self): - try: - return self.get(self.rpki_torrent_section, "lockfile") - except ConfigParser.Error: - return None - - @property - def unauthenticated(self): - try: - return self.get(self.rpki_torrent_section, "unauthenticated") - except ConfigParser.Error: - return self.get("rcynic", "unauthenticated") - - @property - def log_email(self): - return self.get(self.rpki_torrent_section, "log_email") - - @property - def transmission_username(self): - try: - return self.get(self.rpki_torrent_section, "transmission_username") - except ConfigParser.Error: - return None - - @property - def transmission_password(self): - try: - return self.get(self.rpki_torrent_section, "transmission_password") - except ConfigParser.Error: - return None - - def multioption_iter(self, name, getter = None): - if getter is None: - getter = self.get - if self.has_option(self.rpki_torrent_section, name): - yield getter(self.rpki_torrent_section, name) - name += "." - names = [i for i in self.options(self.rpki_torrent_section) if i.startswith(name) and i[len(name):].isdigit()] - names.sort(key = lambda s: int(s[len(name):])) - for name in names: - yield getter(self.rpki_torrent_section, name) - - @property - def zip_urls(self): - return self.multioption_iter("zip_url") - - @property - def post_rcynic_commands(self): - return self.multioption_iter("post_rcynic_command") - - def find_url(self, torrent_name): - urls = [u for u in self.zip_urls - if os.path.splitext(os.path.basename(u))[0] == torrent_name] - if len(urls) != 1: - raise TorrentNameDoesNotMatchURL("Can't find URL matching torrent name %s" % torrent_name) - return urls[0] - - -if __name__ == "__main__": - main() diff --git a/rcynic/rules.darwin.mk b/rcynic/rules.darwin.mk deleted file mode 100644 index d37b0e75..00000000 --- a/rcynic/rules.darwin.mk +++ /dev/null @@ -1,108 +0,0 @@ -# $Id$ - -install-user-and-group: .FORCE - @if /usr/bin/dscl . -read "/Groups/${RCYNIC_GROUP}" >/dev/null 2>&1; \ - then \ - echo "You already have a group \"${RCYNIC_GROUP}\", so I will use it."; \ - elif gid="$$(/usr/bin/dscl . -list /Groups PrimaryGroupID | /usr/bin/awk 'BEGIN {gid = 501} $$2 >= gid {gid = 1 + $$2} END {print gid}')" && \ - /usr/bin/dscl . -create "/Groups/${RCYNIC_GROUP}" && \ - /usr/bin/dscl . -create "/Groups/${RCYNIC_GROUP}" RealName "${RCYNIC_GECOS}" && \ - /usr/bin/dscl . -create "/Groups/${RCYNIC_GROUP}" PrimaryGroupID "$$gid" && \ - /usr/bin/dscl . -create "/Groups/${RCYNIC_GROUP}" GeneratedUID "$$(/usr/bin/uuidgen)" && \ - /usr/bin/dscl . -create "/Groups/${RCYNIC_GROUP}" Password "*"; \ - then \ - echo "Added group \"${RCYNIC_GROUP}\"."; \ - else \ - echo "Adding group \"${RCYNIC_GROUP}\" failed..."; \ - echo "Please create it, then try again."; \ - exit 1; \ - fi; \ - if /usr/bin/dscl . -read "/Users/${RCYNIC_USER}" >/dev/null 2>&1; \ - then \ - echo "You already have a user \"${RCYNIC_USER}\", so I will use it."; \ - elif uid="$$(/usr/bin/dscl . -list /Users UniqueID | /usr/bin/awk 'BEGIN {uid = 501} $$2 >= uid {uid = 1 + $$2} END {print uid}')" && \ - /usr/bin/dscl . -create "/Users/${RCYNIC_USER}" && \ - /usr/bin/dscl . -create "/Users/${RCYNIC_USER}" UserShell "/usr/bin/false" && \ - /usr/bin/dscl . -create "/Users/${RCYNIC_USER}" RealName "${RCYNIC_GECOS}" && \ - /usr/bin/dscl . -create "/Users/${RCYNIC_USER}" UniqueID "$$uid" && \ - /usr/bin/dscl . -create "/Users/${RCYNIC_USER}" PrimaryGroupID "$$gid" && \ - /usr/bin/dscl . -create "/Users/${RCYNIC_USER}" NFSHomeDirectory "/var/empty" && \ - /usr/bin/dscl . -create "/Users/${RCYNIC_USER}" GeneratedUID "$$(/usr/bin/uuidgen)" && \ - /usr/bin/dscl . -create "/Users/${RCYNIC_USER}" Password "*"; \ - then \ - echo "Added user \"${RCYNIC_USER}\"."; \ - else \ - echo "Adding user \"${RCYNIC_USER}\" failed..."; \ - echo "Please create it, then try again."; \ - exit 1; \ - fi - @if /usr/bin/dscl . -read "/Groups/${RPKIRTR_GROUP}" >/dev/null 2>&1; \ - then \ - echo "You already have a group \"${RPKIRTR_GROUP}\", so I will use it."; \ - elif gid="$$(/usr/bin/dscl . -list /Groups PrimaryGroupID | /usr/bin/awk 'BEGIN {gid = 501} $$2 >= gid {gid = 1 + $$2} END {print gid}')" && \ - /usr/bin/dscl . -create "/Groups/${RPKIRTR_GROUP}" && \ - /usr/bin/dscl . -create "/Groups/${RPKIRTR_GROUP}" RealName "${RPKIRTR_GECOS}" && \ - /usr/bin/dscl . -create "/Groups/${RPKIRTR_GROUP}" PrimaryGroupID "$$gid" && \ - /usr/bin/dscl . -create "/Groups/${RPKIRTR_GROUP}" GeneratedUID "$$(/usr/bin/uuidgen)" && \ - /usr/bin/dscl . -create "/Groups/${RPKIRTR_GROUP}" Password "*"; \ - then \ - echo "Added group \"${RPKIRTR_GROUP}\"."; \ - else \ - echo "Adding group \"${RPKIRTR_GROUP}\" failed..."; \ - echo "Please create it, then try again."; \ - exit 1; \ - fi; \ - if /usr/bin/dscl . -read "/Users/${RPKIRTR_USER}" >/dev/null 2>&1; \ - then \ - echo "You already have a user \"${RPKIRTR_USER}\", so I will use it."; \ - elif uid="$$(/usr/bin/dscl . -list /Users UniqueID | /usr/bin/awk 'BEGIN {uid = 501} $$2 >= uid {uid = 1 + $$2} END {print uid}')" && \ - /usr/bin/dscl . -create "/Users/${RPKIRTR_USER}" && \ - /usr/bin/dscl . -create "/Users/${RPKIRTR_USER}" UserShell "/usr/bin/false" && \ - /usr/bin/dscl . -create "/Users/${RPKIRTR_USER}" RealName "${RPKIRTR_GECOS}" && \ - /usr/bin/dscl . -create "/Users/${RPKIRTR_USER}" UniqueID "$$uid" && \ - /usr/bin/dscl . -create "/Users/${RPKIRTR_USER}" PrimaryGroupID "$$gid" && \ - /usr/bin/dscl . -create "/Users/${RPKIRTR_USER}" NFSHomeDirectory "/var/empty" && \ - /usr/bin/dscl . -create "/Users/${RPKIRTR_USER}" GeneratedUID "$$(/usr/bin/uuidgen)" && \ - /usr/bin/dscl . -create "/Users/${RPKIRTR_USER}" Password "*"; \ - then \ - echo "Added user \"${RPKIRTR_USER}\"."; \ - else \ - echo "Adding user \"${RPKIRTR_USER}\" failed..."; \ - echo "Please create it, then try again."; \ - exit 1; \ - fi - - -install-shared-libraries: .FORCE - @echo "Copying required shared libraries" - @shared_libraries="${RCYNIC_DIR}/bin/rcynic ${RCYNIC_DIR}/bin/rsync"; \ - while true; \ - do \ - closure="$$(/usr/bin/otool -L $${shared_libraries} | /usr/bin/awk '/:$$/ {next} {print $$1}' | /usr/bin/sort -u)"; \ - if test "x$$shared_libraries" = "x$$closure"; - then \ - break; \ - else \ - shared_libraries="$$closure"; \ - fi; \ - done; \ - for shared in /usr/lib/dyld $$shared_libraries; \ - do \ - if /bin/test -r "${RCYNIC_DIR}/$${shared}"; \ - then \ - echo "You already have a \"${RCYNIC_DIR}/$${shared}\", so I will use it"; \ - elif /usr/bin/install -m 555 -o root -g wheel -p "$${shared}" "${RCYNIC_DIR}/$${shared}"; \ - then \ - echo "Copied $${shared} into ${RCYNIC_DIR}"; \ - else \ - echo "Unable to copy $${shared} into ${RCYNIC_DIR}"; \ - exit 1; \ - fi; \ - done - -install-rc-scripts: - ${INSTALL} -o root -g wheel -d ${DESTDIR}/Library/StartupItems/RCynic - ${INSTALL} -o root -g wheel -m 555 \ - rc-scripts/darwin/RCynic \ - rc-scripts/darwin/StartupParameters.plist \ - ${DESTDIR}/Library/Startup/RCynic diff --git a/rcynic/rules.freebsd.mk b/rcynic/rules.freebsd.mk deleted file mode 100644 index 5233386e..00000000 --- a/rcynic/rules.freebsd.mk +++ /dev/null @@ -1,56 +0,0 @@ -# $Id$ - -install-user-and-group: .FORCE - @if /usr/sbin/pw groupshow "${RCYNIC_GROUP}" 2>/dev/null; \ - then \ - echo "You already have a group \"${RCYNIC_GROUP}\", so I will use it."; \ - elif /usr/sbin/pw groupadd ${RCYNIC_GROUP}; \ - then \ - echo "Added group \"${RCYNIC_GROUP}\"."; \ - else \ - echo "Adding group \"${RCYNIC_GROUP}\" failed..."; \ - echo "Please create it, then try again."; \ - exit 1; \ - fi - @if /usr/sbin/pw groupshow "${RPKIRTR_GROUP}" 2>/dev/null; \ - then \ - echo "You already have a group \"${RPKIRTR_GROUP}\", so I will use it."; \ - elif /usr/sbin/pw groupadd ${RPKIRTR_GROUP}; \ - then \ - echo "Added group \"${RPKIRTR_GROUP}\"."; \ - else \ - echo "Adding group \"${RPKIRTR_GROUP}\" failed..."; \ - echo "Please create it, then try again."; \ - exit 1; \ - fi - @if /usr/sbin/pw usershow "${RCYNIC_USER}" 2>/dev/null; \ - then \ - echo "You already have a user \"${RCYNIC_USER}\", so I will use it."; \ - elif /usr/sbin/pw useradd ${RCYNIC_USER} -g ${RCYNIC_GROUP} -h - -d /nonexistant -s /usr/sbin/nologin -c "${RCYNIC_GECOS}" -G "${RPKIRTR_GROUP}"; \ - then \ - echo "Added user \"${RCYNIC_USER}\"."; \ - else \ - echo "Adding user \"${RCYNIC_USER}\" failed..."; \ - echo "Please create it, then try again."; \ - exit 1; \ - fi - @if /usr/sbin/pw usershow "${RPKIRTR_USER}" 2>/dev/null; \ - then \ - echo "You already have a user \"${RPKIRTR_USER}\", so I will use it."; \ - elif /usr/sbin/pw useradd ${RPKIRTR_USER} -g ${RPKIRTR_GROUP} -h - -d /nonexistant -s /usr/sbin/nologin -c "${RPKIRTR_GECOS}"; \ - then \ - echo "Added user \"${RPKIRTR_USER}\"."; \ - else \ - echo "Adding user \"${RPKIRTR_USER}\" failed..."; \ - echo "Please create it, then try again."; \ - exit 1; \ - fi - - -# We use static compilation on FreeBSD, so no need for shared libraries - -install-shared-libraries: - @true - -install-rc-scripts: - ${INSTALL} -m 555 -o root -g wheel -p rc-scripts/freebsd/rc.d.rcynic ${DESTDIR}/usr/local/etc/rc.d/rcynic diff --git a/rcynic/rules.linux.mk b/rcynic/rules.linux.mk deleted file mode 100644 index 6a962cef..00000000 --- a/rcynic/rules.linux.mk +++ /dev/null @@ -1,92 +0,0 @@ -# $Id$ - -install-user-and-group: .FORCE - @if getent group ${RCYNIC_GROUP} >/dev/null; \ - then \ - echo "You already have a group \"${RCYNIC_GROUP}\", so I will use it."; \ - elif /usr/sbin/groupadd ${RCYNIC_GROUP}; \ - then \ - echo "Added group \"${RCYNIC_GROUP}\"."; \ - else \ - echo "Adding group \"${RCYNIC_GROUP}\" failed..."; \ - echo "Please create it, then try again."; \ - exit 1; \ - fi - @nogroup='-N'; \ - if test -f /etc/redhat-release; then read vendor release version < /etc/redhat-release; if test $$vendor = CentOS; then nogroup='-n'; fi; fi; \ - if getent passwd ${RCYNIC_USER} >/dev/null; \ - then \ - echo "You already have a user \"${RCYNIC_USER}\", so I will use it."; \ - elif /usr/sbin/useradd -g ${RCYNIC_GROUP} -M $$nogroup -d "${RCYNIC_DIR}" -s /sbin/nologin -c "${RCYNIC_GECOS}" ${RCYNIC_USER}; \ - then \ - echo "Added user \"${RCYNIC_USER}\"."; \ - else \ - echo "Adding user \"${RCYNIC_USER}\" failed..."; \ - echo "Please create it, then try again."; \ - exit 1; \ - fi - @if getent group ${RPKIRTR_GROUP} >/dev/null; \ - then \ - echo "You already have a group \"${RPKIRTR_GROUP}\", so I will use it."; \ - elif /usr/sbin/groupadd ${RPKIRTR_GROUP}; \ - then \ - echo "Added group \"${RPKIRTR_GROUP}\"."; \ - else \ - echo "Adding group \"${RPKIRTR_GROUP}\" failed..."; \ - echo "Please create it, then try again."; \ - exit 1; \ - fi - @nogroup='-N'; \ - if test -f /etc/redhat-release; then read vendor release version < /etc/redhat-release; if test $$vendor = CentOS; then nogroup='-n'; fi; fi; \ - if getent passwd ${RPKIRTR_USER} >/dev/null; \ - then \ - echo "You already have a user \"${RPKIRTR_USER}\", so I will use it."; \ - elif /usr/sbin/useradd -g ${RPKIRTR_GROUP} -M $$nogroup -d "${RPKIRTR_DIR}" -s /sbin/nologin -c "${RPKIRTR_GECOS}" ${RPKIRTR_USER}; \ - then \ - echo "Added user \"${RPKIRTR_USER}\"."; \ - else \ - echo "Adding user \"${RPKIRTR_USER}\" failed..."; \ - echo "Please create it, then try again."; \ - exit 1; \ - fi - usermod -a -G ${RPKIRTR_GROUP} ${RCYNIC_USER} - -install-shared-libraries: .FORCE - @echo "Copying required shared libraries" - @if test -d /lib64; then libdir=/lib64; else libdir=/lib; fi; \ - shared_libraries="${RCYNIC_DIR}/bin/rcynic ${RCYNIC_DIR}/bin/rsync $$(/usr/bin/find $${libdir} -name 'libnss*.so*' -print)"; \ - while true; \ - do \ - closure="$$(/usr/bin/ldd $${shared_libraries} | \ - ${AWK} ' \ - { sub(/:$/, "") } \ - $$0 == "${RCYNIC_DIR}/bin/rcynic" { next } \ - $$0 == "${RCYNIC_DIR}/bin/rsync" { next } \ - $$1 ~ /\/ld-linux\.so/ { next } \ - { for (i = 1; i <= NF; i++) if ($$i ~ /^\//) print $$i } \ - ' | \ - ${SORT} -u)"; \ - if test "X$$shared_libraries" = "X$$closure"; \ - then \ - break; \ - else \ - shared_libraries="$$closure"; \ - fi; \ - done; \ - if test -f $${libdir}/libresolv.so.2; \ - then \ - shared_libraries="$${shared_libraries} $${libdir}/libresolv.so.2"; - fi; \ - for shared in $${libdir}/*ld*.so* $$shared_libraries; \ - do \ - if test ! -r "${RCYNIC_DIR}/$${shared}"; \ - then \ - ${INSTALL} -m 555 -d `dirname "${RCYNIC_DIR}$${shared}"` && \ - ${INSTALL} -m 555 -p "$${shared}" "${RCYNIC_DIR}$${shared}"; \ - fi; \ - done - -# No devfs, so no rc script - -install-rc-scripts: - @true diff --git a/rcynic/rules.unknown.mk b/rcynic/rules.unknown.mk deleted file mode 100644 index 6ce3ea18..00000000 --- a/rcynic/rules.unknown.mk +++ /dev/null @@ -1,4 +0,0 @@ -# $Id$ - -install-user-and-group install-shared-libraries install-rc-scripts: .FORCE - @echo "Don't know how to make $@ on this platform"; exit 1 diff --git a/rcynic/sample-rcynic.conf b/rcynic/sample-rcynic.conf deleted file mode 100644 index 6fa49b9c..00000000 --- a/rcynic/sample-rcynic.conf +++ /dev/null @@ -1,39 +0,0 @@ -# $Id$ -# -# Sample rcynic configuration file for jailed environment. - -[rcynic] -rsync-program = /bin/rsync -authenticated = /data/authenticated -unauthenticated = /data/unauthenticated -lockfile = /data/lock -xml-summary = /data/rcynic.xml -jitter = 600 -use-syslog = true -log-level = log_usage_err - -# You need to specify some trust anchors here, eg: - -#trust-anchor.1 = /etc/trust-anchors/ta-1.cer -#trust-anchor.2 = /etc/trust-anchors/ta-2.cer - -# or, using the "Trust Anchor Locator" form: - -#trust-anchor-locator.1 = /etc/trust-anchors/ta-1.tal -#trust-anchor-locator.2 = /etc/trust-anchors/ta-2.tal - -# The choice between these two formats depends largely on the policies -# of the entity generating the corresponding trust anchor, ie, will -# probably be made for you by the generating entity. - -# If you already keep all your TAs and TALs in a single directory -# anyway, you can also use the trust-anchor-directory directive to -# name that directory and let rcynic track whatever TAs and TALs you -# have there at the moment: - -#trust-anchor-directory = /etc/trust-anchors - -# Note that the order in which rcynic will read TAs and TALs from such -# a directory is undefined. In general this is not a problem, but if -# you really care about this for some reason, stick to the other -# directives. diff --git a/rcynic/sample-trust-anchors/README b/rcynic/sample-trust-anchors/README deleted file mode 100644 index 485d0485..00000000 --- a/rcynic/sample-trust-anchors/README +++ /dev/null @@ -1,26 +0,0 @@ -$Id$ - -These are SAMPLE trust anchors for testing rcynic. Any resemblance -between these files and real trust anchors living, dead, or wandering -the night in ghostly torment is purely coincidental. - -Remember: it's only a trust anchor if a replying party says it is. -You are the relying party, so it's your call. - -The .cer files (if any) are self-signed X.509 trust anchors. - -The .tal files are "trust anchor locators", see RFC 6490. - -See the rcynic documentation for further details. - -== - -afrinic.tal AfriNIC -apnic.tal APNIC -arin.tal ARIN testbed -bbn-testbed.tal.disabled Only useful for certain tests -lacnic.tal LACNIC -ripe-ncc-root.tal RIPE NCC -ripe-pilot.tal A RIPE testbed -rpki.net-testbed.tal rpki.net testbed -testbed-apnicrpki.tal An APNIC testbed diff --git a/rcynic/sample-trust-anchors/afrinic.tal b/rcynic/sample-trust-anchors/afrinic.tal deleted file mode 100644 index 0f202c73..00000000 --- a/rcynic/sample-trust-anchors/afrinic.tal +++ /dev/null @@ -1,8 +0,0 @@ -rsync://rpki.afrinic.net/repository/AfriNIC.cer -MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAxsAqAhWIO+ON2Ef9oRDM -pKxv+AfmSLIdLWJtjrvUyDxJPBjgR+kVrOHUeTaujygFUp49tuN5H2C1rUuQavTH -vve6xNF5fU3OkTcqEzMOZy+ctkbde2SRMVdvbO22+TH9gNhKDc9l7Vu01qU4LeJH -k3X0f5uu5346YrGAOSv6AaYBXVgXxa0s9ZvgqFpim50pReQe/WI3QwFKNgpPzfQL -6Y7fDPYdYaVOXPXSKtx7P4s4KLA/ZWmRL/bobw/i2fFviAGhDrjqqqum+/9w1hEl -L/vqihVnV18saKTnLvkItA/Bf5i11Yhw2K7qv573YWxyuqCknO/iYLTR1DToBZcZ -UQIDAQAB diff --git a/rcynic/sample-trust-anchors/altca.tal b/rcynic/sample-trust-anchors/altca.tal deleted file mode 100644 index 2dbc7413..00000000 --- a/rcynic/sample-trust-anchors/altca.tal +++ /dev/null @@ -1,9 +0,0 @@ -rsync://ca0.rpki.net/tal/root.cer - -MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAzPSPpQxa0rxz9sbbvYGr -UlpgyBVxSIt+k/WDKbr+VW7CjUoz6cc5KMFANkQWw3D6ER4kCwX4WJkD58AGGbw/ -WeAe6m3aHcORUVRkr45a4qSrYiG7Wq9RAXtwbhlXofB3zo+O90IlXDaVP2U9bw+Q -yoJBJuAmZONt0bRgrktv8QhVtKvuYkH5ZIe7DkXJcJzBn6gv09dZsdwZm3xV3soX -HEKrz5pY6Sb2xoL1CyPqzGOfVFxl0G5+dmcD/degPKxrEycAzjnHUzN1gus2jg26 -dtkix7KG/Mn1h/k53j0FdQD+zqPwakgwqjvCOdSdHMRmsikj0EF9WrZIOjZUXV6q -6wIDAQAB diff --git a/rcynic/sample-trust-anchors/apnic-rpki-root-afrinic-origin.tal b/rcynic/sample-trust-anchors/apnic-rpki-root-afrinic-origin.tal deleted file mode 100644 index d254ec44..00000000 --- a/rcynic/sample-trust-anchors/apnic-rpki-root-afrinic-origin.tal +++ /dev/null @@ -1,8 +0,0 @@ -rsync://rpki.apnic.net/repository/apnic-rpki-root-afrinic-origin.cer -MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuMLL96YV9pf0rZ4Ow/bk -cgpoPfsRzkcgmisyCuMUdotHwrp8pepujhohatScRK09ILRrZYCdpX4121MJhqXC -P3u3hy9fF0CeARKX/Q82nJccD4dtUp23UcFys8hwJgNYZI910ajkAxwNT//H/TFw -oUYbzZGBR7o2awMc7GdQl/j6dgOkV6AfYy5DyDEgOUNHnUxED2rreefL/E2Fr2ST -Esar6bTR4Tg4+nVF1PjAkgN0tKZYe4wZ6VmtqV/VTngSLysim6av7ki+JR3cVgVU -OqXeh1vPjH2tNu6u9bX37ZrdVb6NBRer9I99IDbKvyhELb6nzo8+Q74zga9HI+Pf -QwIDAQAB diff --git a/rcynic/sample-trust-anchors/apnic-rpki-root-arin-origin.tal b/rcynic/sample-trust-anchors/apnic-rpki-root-arin-origin.tal deleted file mode 100644 index b82a136f..00000000 --- a/rcynic/sample-trust-anchors/apnic-rpki-root-arin-origin.tal +++ /dev/null @@ -1,8 +0,0 @@ -rsync://rpki.apnic.net/repository/apnic-rpki-root-arin-origin.cer -MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAp6vscYtzhe0CfFk5Ro44 -llPhsInXtfAxqfYmK7m9V3khkqK3d3/ZAW6pcJm7qW8XhEGl+F5mUeeLIm5JoIhr -kT5B5M6uL0VlCCkZJH4h76ybOa83vWITNZEDy9L3c3nK4S+Basu3vYoE4ICXGG+J -7zg5Iw9saV+p03E2w1g16pt1QI3Cnggp6edkeWClEz3aPw/ULOIHb7YmatWwdERl -tL9LsuMSKszQLUY7F4XVpxey/rJYAZgzDUh+b6813WAClCkkydNjsbviuekAWJbx -sW7Mcw53u30K4g8MP03CjkDOubyoR4Qo99R1UQJCdrRsFKbSSfN/fOA4y7ikc3xs -jQIDAQAB diff --git a/rcynic/sample-trust-anchors/apnic-rpki-root-iana-origin.tal b/rcynic/sample-trust-anchors/apnic-rpki-root-iana-origin.tal deleted file mode 100644 index a4e714c0..00000000 --- a/rcynic/sample-trust-anchors/apnic-rpki-root-iana-origin.tal +++ /dev/null @@ -1,8 +0,0 @@ -rsync://rpki.apnic.net/repository/apnic-rpki-root-iana-origin.cer -MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAx9RWSL61YAAYumEiU8z8 -qH2ETVIL01ilxZlzIL9JYSORMN5Cmtf8V2JblIealSqgOTGjvSjEsiV73s67zYQI -7C/iSOb96uf3/s86NqbxDiFQGN8qG7RNcdgVuUlAidl8WxvLNI8VhqbAB5uSg/Mr -LeSOvXRja041VptAxIhcGzDMvlAJRwkrYK/Mo8P4E2rSQgwqCgae0ebY1CsJ3Cjf -i67C1nw7oXqJJovvXJ4apGmEv8az23OLC6Ki54Ul/E6xk227BFttqFV3YMtKx42H -cCcDVZZy01n7JjzvO8ccaXmHIgR7utnqhBRNNq5Xc5ZhbkrUsNtiJmrZzVlgU6Ou -0wIDAQAB diff --git a/rcynic/sample-trust-anchors/apnic-rpki-root-lacnic-origin.tal b/rcynic/sample-trust-anchors/apnic-rpki-root-lacnic-origin.tal deleted file mode 100644 index a4d56802..00000000 --- a/rcynic/sample-trust-anchors/apnic-rpki-root-lacnic-origin.tal +++ /dev/null @@ -1,8 +0,0 @@ -rsync://rpki.apnic.net/repository/apnic-rpki-root-lacnic-origin.cer -MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAyoYPp3l3DWyPtLWrmRn4 -Oux9hQ5bxd0SX/f6ygHxik+I3eMJP5J0Pr2e500tyXb2uKsX9kDqu/kckr+TUMhV -BHd5yAv8OAE3YYEvpz/7uTX7cYy2yUeA76OEP75Y88OIQEzGpPLNpIzDxMggxuDh -IhkA5xMiUJgVoEgmWSzR+MuRBjv2422wAGB5GpLgYsOjpwvG0VPmhnE+39+10ucQ -CLt0Ny5kOR4an2tkvHjm7rzKDnFm8MWxPzAWESdf+8g7ITzSglqxDNiK5E5rdzNt -h1Kvp+9RwaFArw6Ky1A4HhnoplN4EfKwxq0YamuKV0ZTTpWyT2+qDuE6sOfHRbJ0 -5QIDAQAB diff --git a/rcynic/sample-trust-anchors/apnic-rpki-root-ripe-origin.tal b/rcynic/sample-trust-anchors/apnic-rpki-root-ripe-origin.tal deleted file mode 100644 index d64df3ae..00000000 --- a/rcynic/sample-trust-anchors/apnic-rpki-root-ripe-origin.tal +++ /dev/null @@ -1,8 +0,0 @@ -rsync://rpki.apnic.net/repository/apnic-rpki-root-ripe-origin.cer -MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAwsQlXmEklLYApoDo7GEa -NNTEGFPU5wJpi04iXuga2xn+g/TMLOlyJbjuPYRtRm/7VbRnN3m9Ta+WETy03+Fm -EbXzB4xxhJKVik/ARHBnrBWhLyURy8Q5/XplE9cJein37IE1mIsbKM7o/90S225w -7GuvW7T4kjPWYmBFOywHWsfQO1EdsgiJrkz+Ab67ZkdSIiKHkf2UE6/MrbDEj+QK -9+s/vKH8BtDhaLmTWY+bVvfJ3+AWDH6roo1ozbl5yamQFbLOl3ns30f3yOJcNSNu -/qgMQRRyp2sXXQovhTy8yqm3LFspaCWnTmQtBieWZwibuOa4Z27A1FzTMst2T4wY -/wIDAQAB diff --git a/rcynic/sample-trust-anchors/apnic-testbed.tal b/rcynic/sample-trust-anchors/apnic-testbed.tal deleted file mode 100644 index f87a3bf3..00000000 --- a/rcynic/sample-trust-anchors/apnic-testbed.tal +++ /dev/null @@ -1,9 +0,0 @@ -rsync://rpki-testbed.apnic.net/repository/rpki-testbed.apnic.net.cer - -MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAyiVmZgr+aN0xZfh6yrJS -qmsX2Q58UzjPaTnhsP88zdSqmRCVAegpRMjBjwF6Ga8tR9RP89W+tRzLf2x883Et -vmNAax939gilN4VxGRgKL2YZO27w8Vk6aVpSJ0+CCQI497eataRFpG9eLiPfsRfo -Fuha75MGa5m1cmJFVk0Jj0sRa9RmB/ZVxZDdwHgxAB7L875JSUnVCOLCP4FejoYt -71wJfzFw3fZmXSPeXQ+wtxVPfh8VIOyLTyYbq3AA1Gn+herzvCsiLXZ41lcCCRZR -ObwIGptj7KnqLoJ/MOGL6eoSY+v1B85z2Il3Nl8px3ohsApDM/MANDacxl0EgLwX -2QIDAQAB diff --git a/rcynic/sample-trust-anchors/bbn-testbed.tal.disabled b/rcynic/sample-trust-anchors/bbn-testbed.tal.disabled deleted file mode 100644 index 21302ea5..00000000 --- a/rcynic/sample-trust-anchors/bbn-testbed.tal.disabled +++ /dev/null @@ -1,8 +0,0 @@ -rsync://rpki.bbn.com/rpki/root.cer -MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAvEjkk0oC2N8Ij+ruAkrX -cI7lSOIG1sQLC8xJJqklMlOBLao+3If0AVKSEP5+pb/hJwpXPomt6Lm4+fuZ6vX/ -q15llQL3YWkHZ49t/X47yCqxvM0txRaZafffbk6lJrofKhKKrYEcrr/rHiG+IQXn -U1MGaQ/a8DfRDWiCQxonh7CthrLi7tr+QI9s9fDvvHPCK9OmmhZvCUDOydf+/vMn -VShQ57KsUFcuZ0EX9UwsaIGCYGyvsYwk54UtoIGovg2IavfZK8ai0/5XEVpvKQiR -8AixZpg5bSe7555+LhzpL5+LdqX6/hVRah0JrdR8KiXvr16Kmcitj+JLqVc0Wkd8 -zwIDAQAB diff --git a/rcynic/sample-trust-anchors/lacnic.tal b/rcynic/sample-trust-anchors/lacnic.tal deleted file mode 100644 index 29220d37..00000000 --- a/rcynic/sample-trust-anchors/lacnic.tal +++ /dev/null @@ -1,8 +0,0 @@ -rsync://repository.lacnic.net/rpki/lacnic/rta-lacnic-rpki.cer -MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAqZEzhYK0+PtDOPfub/KR -c3MeWx3neXx4/wbnJWGbNAtbYqXg3uU5J4HFzPgk/VIppgSKAhlO0H60DRP48by9 -gr5/yDHu2KXhOmnMg46sYsUIpfgtBS9+VtrqWziJfb+pkGtuOWeTnj6zBmBNZKK+ -5AlMCW1WPhrylIcB+XSZx8tk9GS/3SMQ+YfMVwwAyYjsex14Uzto4GjONALE5oh1 -M3+glRQduD6vzSwOD+WahMbc9vCOTED+2McLHRKgNaQf0YJ9a1jG9oJIvDkKXEqd -fqDRktwyoD74cV57bW3tBAexB7GglITbInyQAsmdngtfg2LUMrcROHHP86QPZINj -DQIDAQAB diff --git a/rcynic/sample-trust-anchors/ripe-ncc-root.tal b/rcynic/sample-trust-anchors/ripe-ncc-root.tal deleted file mode 100644 index ebdb9f45..00000000 --- a/rcynic/sample-trust-anchors/ripe-ncc-root.tal +++ /dev/null @@ -1,9 +0,0 @@ -rsync://rpki.ripe.net/ta/ripe-ncc-ta.cer -MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0URYSGqUz2m -yBsOzeW1jQ6NsxNvlLMyhWknvnl8NiBCs/T/S2XuNKQNZ+wBZxIgPPV -2pFBFeQAvoH/WK83HwA26V2siwm/MY2nKZ+Olw+wlpzlZ1p3Ipj2eNc -Krmit8BwBC8xImzuCGaV0jkRB0GZ0hoH6Ml03umLprRsn6v0xOP0+l6 -Qc1ZHMFVFb385IQ7FQQTcVIxrdeMsoyJq9eMkE6DoclHhF/NlSllXub -ASQ9KUWqJ0+Ot3QCXr4LXECMfkpkVR2TZT+v5v658bHVs6ZxRD1b6Uk -1uQKAyHUbn/tXvP8lrjAibGzVsXDT2L0x4Edx+QdixPgOji3gBMyL2V -wIDAQAB diff --git a/rcynic/sample-trust-anchors/ripe-pilot.tal b/rcynic/sample-trust-anchors/ripe-pilot.tal deleted file mode 100644 index 971128e0..00000000 --- a/rcynic/sample-trust-anchors/ripe-pilot.tal +++ /dev/null @@ -1,7 +0,0 @@ -rsync://localcert.ripe.net/ta/ripe-ncc-pilot.cer - -MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEApIXenLOBfyo7cOnm4mGKmYxsoWCp28dw3XJAoZNW -PDK8i9MxYACpwfz7bjyGma1BWPBJuievNd6nriFI+3WG+wt2bnO2ZmiLenCwMtm8bu7BeldpWRwlAnRp4t4I -L6sZ7T9bF+4sTrv1qiEANqam0mhtLtUfbWXV5Z4mjgnNur7fJH2lIOm7Oc2/tok1rid8WsPe18zuvgwA3M0f -KQ/Oa4SMXKnHr3fg2cHAm1cfEEvhMKa3rUAvsKGVEYeTJNg6rh3IRnjWhZ8GmE1ywl/9qMa2z4YsUi9Bx9U+ -/zMS8qpJn/q6XBbZ8XYTTFvSWfXd6b82jSfABa4ukIDCUF/QFwIDAQAB diff --git a/rcynic/sample-trust-anchors/rpki.net-testbed.tal b/rcynic/sample-trust-anchors/rpki.net-testbed.tal deleted file mode 100644 index 1e466300..00000000 --- a/rcynic/sample-trust-anchors/rpki.net-testbed.tal +++ /dev/null @@ -1,9 +0,0 @@ -rsync://repo0.rpki.net/rpki/root.cer - -MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAovWQL2lh6knDxGUG5hbt -CXvvh4AOzjhDkSHlj22gn/1oiM9IeDATIwP44vhQ6L/xvuk7W6Kfa5ygmqQ+xOZO -wTWPcrUbqaQyPNxokuivzyvqVZVDecOEqs78q58mSp9nbtxmLRW7B67SJCBSzfa5 -XpVyXYEgYAjkk3fpmefU+AcxtxvvHB5OVPIaBfPcs80ICMgHQX+fphvute9XLxjf -JKJWkhZqZ0v7pZm2uhkcPx1PMGcrGee0WSDC3fr3erLueagpiLsFjwwpX6F+Ms8v -qz45H+DKmYKvPSstZjCCq9aJ0qANT9OtnfSDOS+aLRPjZryCNyvvBHxZXqj5YCGK -twIDAQAB diff --git a/rcynic/sample-trust-anchors/testbed-apnicrpki.tal.disabled b/rcynic/sample-trust-anchors/testbed-apnicrpki.tal.disabled deleted file mode 100644 index ae16a302..00000000 --- a/rcynic/sample-trust-anchors/testbed-apnicrpki.tal.disabled +++ /dev/null @@ -1,8 +0,0 @@ -rsync://apnicrpki.rand.apnic.net/repository/root.cer -MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEApQofhU9VeRGZxlNgkrFR -ShSIkGtRUwg1mkISvieM2oxA6LFhHoqFV25BdK/3IhOh/0I/E9WX7uWPD72MFGBB -LsFc6JpZFkvg8+9KUIHquk46wn4bEvA8xMsPbGo+mK0CAkqOdHQSemC7nqUlR9VH -5zH1t8aYS4mrVN59LfClmiEE7QElgmVyvccfaEd+gMGaxsDvSylWIdvVxYfFG2rB -LiEsmfXwdn2a8b1Zx5eaFD80XV9Z7h15ESP1epSScGzMp2XB0FYMC3f9k7l5sydX -Rj5BYo1rWaM+Y6AoHe/d8G0xm6iX6b/JqT4Ywdt8/7DbsweaIk73VnpnXrYATCt9 -VwIDAQAB diff --git a/rcynic/static-rsync/Makefile.in b/rcynic/static-rsync/Makefile.in deleted file mode 100644 index 8a433c7b..00000000 --- a/rcynic/static-rsync/Makefile.in +++ /dev/null @@ -1,44 +0,0 @@ -# $Id$ - -VERSION = 2.6.9 - -CFG_ENV = CFLAGS='@CFLAGS@' LDFLAGS='@LDFLAGS@ @LD_STATIC_FLAG@' -CFG_ARG = - -TARBALL = rsync-${VERSION}.tar.gz -DIRNAME = rsync-${VERSION} - -CFG_LOG = > ../config.log 2>&1 -BIN_LOG = > ../build.log 2>&1 - -BIN = rsync - -abs_top_srcdir = @abs_top_srcdir@ -abs_top_builddir = @abs_top_builddir@ - -all: ${BIN} - -${BIN}: ${DIRNAME}/${BIN} - ln ${DIRNAME}/${BIN} $@ - file $@ - -${DIRNAME}/${BIN}: configured.stamp - cd ${DIRNAME} && ${MAKE} ${BIN_LOG} - -extracted.stamp: ${TARBALL} - gzip -c -d ${TARBALL} | tar -xf - - touch $@ - -patched.stamp: extracted.stamp - for i in patches/patch-*; do if test -f "$$i"; then patch -d ${DIRNAME} <"$$i"; else :; fi; done - touch $@ - -configured.stamp: patched.stamp - cd ${DIRNAME} && ${CFG_ENV} ./configure ${CFG_ARG} ${CFG_LOG} - touch $@ - -clean: - rm -rf ${BIN} ${DIRNAME} *.stamp *.log - -distclean: clean - rm -f Makefile diff --git a/rcynic/static-rsync/README b/rcynic/static-rsync/README deleted file mode 100644 index 9ff5afa8..00000000 --- a/rcynic/static-rsync/README +++ /dev/null @@ -1,15 +0,0 @@ -$Id$ - -Hack to build a static rsync binary suitable for use in a chroot jail. - -The default configuration is for gcc, since that's the most widely -used compiler on the platforms we use. I've provided hooks intended -to make it simple to support other compilers just by overriding make -variables on the command line: if you need to do something more -drastic than this to get your compiler working, please tell me. - -If your platform doesn't support static binaries at all, you're on -your own (and should whine at your OS vendor, as this is nuts). - -We try to stick with rsync release code, but apply security patches -when necessary. diff --git a/rcynic/static-rsync/patches/patch-CVE-2007-4091 b/rcynic/static-rsync/patches/patch-CVE-2007-4091 deleted file mode 100644 index 201af96a..00000000 --- a/rcynic/static-rsync/patches/patch-CVE-2007-4091 +++ /dev/null @@ -1,60 +0,0 @@ ---- sender.c 2006-09-20 03:53:32.000000000 +0200 -+++ sender.c 2007-07-25 15:33:05.000000000 +0200 -@@ -123,6 +123,7 @@ - char fname[MAXPATHLEN]; - struct file_struct *file; - unsigned int offset; -+ size_t l = 0; - - if (ndx < 0 || ndx >= the_file_list->count) - return; -@@ -133,6 +134,20 @@ - file->dir.root, "/", NULL); - } else - offset = 0; -+ -+ l = offset + 1; -+ if (file) { -+ if (file->dirname) -+ l += strlen(file->dirname); -+ if (file->basename) -+ l += strlen(file->basename); -+ } -+ -+ if (l >= sizeof(fname)) { -+ rprintf(FERROR, "Overlong pathname\n"); -+ exit_cleanup(RERR_FILESELECT); -+ } -+ - f_name(file, fname + offset); - if (remove_source_files) { - if (do_unlink(fname) == 0) { -@@ -224,6 +239,7 @@ - enum logcode log_code = log_before_transfer ? FLOG : FINFO; - int f_xfer = write_batch < 0 ? batch_fd : f_out; - int i, j; -+ size_t l = 0; - - if (verbose > 2) - rprintf(FINFO, "send_files starting\n"); -@@ -259,6 +275,20 @@ - fname[offset++] = '/'; - } else - offset = 0; -+ -+ l = offset + 1; -+ if (file) { -+ if (file->dirname) -+ l += strlen(file->dirname); -+ if (file->basename) -+ l += strlen(file->basename); -+ } -+ -+ if (l >= sizeof(fname)) { -+ rprintf(FERROR, "Overlong pathname\n"); -+ exit_cleanup(RERR_FILESELECT); -+ } -+ - fname2 = f_name(file, fname + offset); - - if (verbose > 2) diff --git a/rcynic/static-rsync/rsync-2.6.9.tar.gz b/rcynic/static-rsync/rsync-2.6.9.tar.gz deleted file mode 100644 index 6377f639..00000000 Binary files a/rcynic/static-rsync/rsync-2.6.9.tar.gz and /dev/null differ diff --git a/rcynic/validation_status.py b/rcynic/validation_status.py deleted file mode 100644 index 1f7a704d..00000000 --- a/rcynic/validation_status.py +++ /dev/null @@ -1,34 +0,0 @@ -# $Id$ -# -# Copyright (C) 2012 Internet Systems Consortium, Inc. ("ISC") -# -# Permission to use, copy, modify, and/or distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -""" -Flat text listing of elements from rcynic.xml. -""" - -import sys - -try: - from lxml.etree import ElementTree -except ImportError: - from xml.etree.ElementTree import ElementTree - -for filename in ([sys.stdin] if len(sys.argv) < 2 else sys.argv[1:]): - for elt in ElementTree(file = filename).findall("validation_status"): - print "%s %8s %-40s %s" % ( - elt.get("timestamp"), - elt.get("generation"), - elt.get("status"), - elt.text.strip()) diff --git a/rp/rcynic/Doxyfile b/rp/rcynic/Doxyfile new file mode 100644 index 00000000..aa183715 --- /dev/null +++ b/rp/rcynic/Doxyfile @@ -0,0 +1,1679 @@ +# Doxyfile 1.7.3 + +# This file describes the settings to be used by the documentation system +# doxygen (www.doxygen.org) for a project. +# +# All text after a hash (#) is considered a comment and will be ignored. +# The format is: +# TAG = value [value, ...] +# For lists items can also be appended using: +# TAG += value [value, ...] +# Values that contain spaces should be placed between quotes (" "). + +#--------------------------------------------------------------------------- +# Project related configuration options +#--------------------------------------------------------------------------- + +# This tag specifies the encoding used for all characters in the config file +# that follow. The default is UTF-8 which is also the encoding used for all +# text before the first occurrence of this tag. Doxygen uses libiconv (or the +# iconv built into libc) for the transcoding. See +# http://www.gnu.org/software/libiconv for the list of possible encodings. + +DOXYFILE_ENCODING = UTF-8 + +# The PROJECT_NAME tag is a single word (or a sequence of words surrounded +# by quotes) that should identify the project. + +PROJECT_NAME = rcynic + +# The PROJECT_NUMBER tag can be used to enter a project or revision number. +# This could be handy for archiving the generated documentation or +# if some version control system is used. + +PROJECT_NUMBER = 1.0 + +# Using the PROJECT_BRIEF tag one can provide an optional one line description for a project that appears at the top of each page and should give viewer a quick idea about the purpose of the project. Keep the description short. + +PROJECT_BRIEF = + +# With the PROJECT_LOGO tag one can specify an logo or icon that is +# included in the documentation. The maximum height of the logo should not +# exceed 55 pixels and the maximum width should not exceed 200 pixels. +# Doxygen will copy the logo to the output directory. + +PROJECT_LOGO = + +# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) +# base path where the generated documentation will be put. +# If a relative path is entered, it will be relative to the location +# where doxygen was started. If left blank the current directory will be used. + +OUTPUT_DIRECTORY = doc + +# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create +# 4096 sub-directories (in 2 levels) under the output directory of each output +# format and will distribute the generated files over these directories. +# Enabling this option can be useful when feeding doxygen a huge amount of +# source files, where putting all generated files in the same directory would +# otherwise cause performance problems for the file system. + +CREATE_SUBDIRS = NO + +# The OUTPUT_LANGUAGE tag is used to specify the language in which all +# documentation generated by doxygen is written. Doxygen will use this +# information to generate all constant output in the proper language. +# The default language is English, other supported languages are: +# Afrikaans, Arabic, Brazilian, Catalan, Chinese, Chinese-Traditional, +# Croatian, Czech, Danish, Dutch, Esperanto, Farsi, Finnish, French, German, +# Greek, Hungarian, Italian, Japanese, Japanese-en (Japanese with English +# messages), Korean, Korean-en, Lithuanian, Norwegian, Macedonian, Persian, +# Polish, Portuguese, Romanian, Russian, Serbian, Serbian-Cyrillic, Slovak, +# Slovene, Spanish, Swedish, Ukrainian, and Vietnamese. + +OUTPUT_LANGUAGE = English + +# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will +# include brief member descriptions after the members that are listed in +# the file and class documentation (similar to JavaDoc). +# Set to NO to disable this. + +BRIEF_MEMBER_DESC = YES + +# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend +# the brief description of a member or function before the detailed description. +# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the +# brief descriptions will be completely suppressed. + +REPEAT_BRIEF = YES + +# This tag implements a quasi-intelligent brief description abbreviator +# that is used to form the text in various listings. Each string +# in this list, if found as the leading text of the brief description, will be +# stripped from the text and the result after processing the whole list, is +# used as the annotated text. Otherwise, the brief description is used as-is. +# If left blank, the following values are used ("$name" is automatically +# replaced with the name of the entity): "The $name class" "The $name widget" +# "The $name file" "is" "provides" "specifies" "contains" +# "represents" "a" "an" "the" + +ABBREVIATE_BRIEF = + +# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then +# Doxygen will generate a detailed section even if there is only a brief +# description. + +ALWAYS_DETAILED_SEC = NO + +# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all +# inherited members of a class in the documentation of that class as if those +# members were ordinary class members. Constructors, destructors and assignment +# operators of the base classes will not be shown. + +INLINE_INHERITED_MEMB = NO + +# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full +# path before files name in the file list and in the header files. If set +# to NO the shortest path that makes the file name unique will be used. + +FULL_PATH_NAMES = YES + +# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag +# can be used to strip a user-defined part of the path. Stripping is +# only done if one of the specified strings matches the left-hand part of +# the path. The tag can be used to show relative paths in the file list. +# If left blank the directory from which doxygen is run is used as the +# path to strip. + +STRIP_FROM_PATH = + +# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of +# the path mentioned in the documentation of a class, which tells +# the reader which header file to include in order to use a class. +# If left blank only the name of the header file containing the class +# definition is used. Otherwise one should specify the include paths that +# are normally passed to the compiler using the -I flag. + +STRIP_FROM_INC_PATH = + +# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter +# (but less readable) file names. This can be useful if your file system +# doesn't support long names like on DOS, Mac, or CD-ROM. + +SHORT_NAMES = NO + +# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen +# will interpret the first line (until the first dot) of a JavaDoc-style +# comment as the brief description. If set to NO, the JavaDoc +# comments will behave just like regular Qt-style comments +# (thus requiring an explicit @brief command for a brief description.) + +JAVADOC_AUTOBRIEF = NO + +# If the QT_AUTOBRIEF tag is set to YES then Doxygen will +# interpret the first line (until the first dot) of a Qt-style +# comment as the brief description. If set to NO, the comments +# will behave just like regular Qt-style comments (thus requiring +# an explicit \brief command for a brief description.) + +QT_AUTOBRIEF = NO + +# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make Doxygen +# treat a multi-line C++ special comment block (i.e. a block of //! or /// +# comments) as a brief description. This used to be the default behaviour. +# The new default is to treat a multi-line C++ comment block as a detailed +# description. Set this tag to YES if you prefer the old behaviour instead. + +MULTILINE_CPP_IS_BRIEF = NO + +# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented +# member inherits the documentation from any documented member that it +# re-implements. + +INHERIT_DOCS = YES + +# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce +# a new page for each member. If set to NO, the documentation of a member will +# be part of the file/class/namespace that contains it. + +SEPARATE_MEMBER_PAGES = YES + +# The TAB_SIZE tag can be used to set the number of spaces in a tab. +# Doxygen uses this value to replace tabs by spaces in code fragments. + +TAB_SIZE = 8 + +# This tag can be used to specify a number of aliases that acts +# as commands in the documentation. An alias has the form "name=value". +# For example adding "sideeffect=\par Side Effects:\n" will allow you to +# put the command \sideeffect (or @sideeffect) in the documentation, which +# will result in a user-defined paragraph with heading "Side Effects:". +# You can put \n's in the value part of an alias to insert newlines. + +ALIASES = + +# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C +# sources only. Doxygen will then generate output that is more tailored for C. +# For instance, some of the names that are used will be different. The list +# of all members will be omitted, etc. + +OPTIMIZE_OUTPUT_FOR_C = YES + +# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java +# sources only. Doxygen will then generate output that is more tailored for +# Java. For instance, namespaces will be presented as packages, qualified +# scopes will look different, etc. + +OPTIMIZE_OUTPUT_JAVA = NO + +# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran +# sources only. Doxygen will then generate output that is more tailored for +# Fortran. + +OPTIMIZE_FOR_FORTRAN = NO + +# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL +# sources. Doxygen will then generate output that is tailored for +# VHDL. + +OPTIMIZE_OUTPUT_VHDL = NO + +# Doxygen selects the parser to use depending on the extension of the files it +# parses. With this tag you can assign which parser to use for a given extension. +# Doxygen has a built-in mapping, but you can override or extend it using this +# tag. The format is ext=language, where ext is a file extension, and language +# is one of the parsers supported by doxygen: IDL, Java, Javascript, CSharp, C, +# C++, D, PHP, Objective-C, Python, Fortran, VHDL, C, C++. For instance to make +# doxygen treat .inc files as Fortran files (default is PHP), and .f files as C +# (default is Fortran), use: inc=Fortran f=C. Note that for custom extensions +# you also need to set FILE_PATTERNS otherwise the files are not read by doxygen. + +EXTENSION_MAPPING = + +# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want +# to include (a tag file for) the STL sources as input, then you should +# set this tag to YES in order to let doxygen match functions declarations and +# definitions whose arguments contain STL classes (e.g. func(std::string); v.s. +# func(std::string) {}). This also makes the inheritance and collaboration +# diagrams that involve STL classes more complete and accurate. + +BUILTIN_STL_SUPPORT = NO + +# If you use Microsoft's C++/CLI language, you should set this option to YES to +# enable parsing support. + +CPP_CLI_SUPPORT = NO + +# Set the SIP_SUPPORT tag to YES if your project consists of sip sources only. +# Doxygen will parse them like normal C++ but will assume all classes use public +# instead of private inheritance when no explicit protection keyword is present. + +SIP_SUPPORT = NO + +# For Microsoft's IDL there are propget and propput attributes to indicate getter +# and setter methods for a property. Setting this option to YES (the default) +# will make doxygen replace the get and set methods by a property in the +# documentation. This will only work if the methods are indeed getting or +# setting a simple type. If this is not the case, or you want to show the +# methods anyway, you should set this option to NO. + +IDL_PROPERTY_SUPPORT = YES + +# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC +# tag is set to YES, then doxygen will reuse the documentation of the first +# member in the group (if any) for the other members of the group. By default +# all members of a group must be documented explicitly. + +DISTRIBUTE_GROUP_DOC = NO + +# Set the SUBGROUPING tag to YES (the default) to allow class member groups of +# the same type (for instance a group of public functions) to be put as a +# subgroup of that type (e.g. under the Public Functions section). Set it to +# NO to prevent subgrouping. Alternatively, this can be done per class using +# the \nosubgrouping command. + +SUBGROUPING = YES + +# When TYPEDEF_HIDES_STRUCT is enabled, a typedef of a struct, union, or enum +# is documented as struct, union, or enum with the name of the typedef. So +# typedef struct TypeS {} TypeT, will appear in the documentation as a struct +# with name TypeT. When disabled the typedef will appear as a member of a file, +# namespace, or class. And the struct will be named TypeS. This can typically +# be useful for C code in case the coding convention dictates that all compound +# types are typedef'ed and only the typedef is referenced, never the tag name. + +TYPEDEF_HIDES_STRUCT = NO + +# The SYMBOL_CACHE_SIZE determines the size of the internal cache use to +# determine which symbols to keep in memory and which to flush to disk. +# When the cache is full, less often used symbols will be written to disk. +# For small to medium size projects (<1000 input files) the default value is +# probably good enough. For larger projects a too small cache size can cause +# doxygen to be busy swapping symbols to and from disk most of the time +# causing a significant performance penalty. +# If the system has enough physical memory increasing the cache will improve the +# performance by keeping more symbols in memory. Note that the value works on +# a logarithmic scale so increasing the size by one will roughly double the +# memory usage. The cache size is given by this formula: +# 2^(16+SYMBOL_CACHE_SIZE). The valid range is 0..9, the default is 0, +# corresponding to a cache size of 2^16 = 65536 symbols + +SYMBOL_CACHE_SIZE = 0 + +#--------------------------------------------------------------------------- +# Build related configuration options +#--------------------------------------------------------------------------- + +# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in +# documentation are documented, even if no documentation was available. +# Private class members and static file members will be hidden unless +# the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES + +EXTRACT_ALL = YES + +# If the EXTRACT_PRIVATE tag is set to YES all private members of a class +# will be included in the documentation. + +EXTRACT_PRIVATE = YES + +# If the EXTRACT_STATIC tag is set to YES all static members of a file +# will be included in the documentation. + +EXTRACT_STATIC = YES + +# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) +# defined locally in source files will be included in the documentation. +# If set to NO only classes defined in header files are included. + +EXTRACT_LOCAL_CLASSES = YES + +# This flag is only useful for Objective-C code. When set to YES local +# methods, which are defined in the implementation section but not in +# the interface are included in the documentation. +# If set to NO (the default) only methods in the interface are included. + +EXTRACT_LOCAL_METHODS = NO + +# If this flag is set to YES, the members of anonymous namespaces will be +# extracted and appear in the documentation as a namespace called +# 'anonymous_namespace{file}', where file will be replaced with the base +# name of the file that contains the anonymous namespace. By default +# anonymous namespaces are hidden. + +EXTRACT_ANON_NSPACES = YES + +# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all +# undocumented members of documented classes, files or namespaces. +# If set to NO (the default) these members will be included in the +# various overviews, but no documentation section is generated. +# This option has no effect if EXTRACT_ALL is enabled. + +HIDE_UNDOC_MEMBERS = NO + +# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all +# undocumented classes that are normally visible in the class hierarchy. +# If set to NO (the default) these classes will be included in the various +# overviews. This option has no effect if EXTRACT_ALL is enabled. + +HIDE_UNDOC_CLASSES = NO + +# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, Doxygen will hide all +# friend (class|struct|union) declarations. +# If set to NO (the default) these declarations will be included in the +# documentation. + +HIDE_FRIEND_COMPOUNDS = NO + +# If the HIDE_IN_BODY_DOCS tag is set to YES, Doxygen will hide any +# documentation blocks found inside the body of a function. +# If set to NO (the default) these blocks will be appended to the +# function's detailed documentation block. + +HIDE_IN_BODY_DOCS = NO + +# The INTERNAL_DOCS tag determines if documentation +# that is typed after a \internal command is included. If the tag is set +# to NO (the default) then the documentation will be excluded. +# Set it to YES to include the internal documentation. + +INTERNAL_DOCS = NO + +# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate +# file names in lower-case letters. If set to YES upper-case letters are also +# allowed. This is useful if you have classes or files whose names only differ +# in case and if your file system supports case sensitive file names. Windows +# and Mac users are advised to set this option to NO. + +CASE_SENSE_NAMES = YES + +# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen +# will show members with their full class and namespace scopes in the +# documentation. If set to YES the scope will be hidden. + +HIDE_SCOPE_NAMES = NO + +# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen +# will put a list of the files that are included by a file in the documentation +# of that file. + +SHOW_INCLUDE_FILES = NO + +# If the FORCE_LOCAL_INCLUDES tag is set to YES then Doxygen +# will list include files with double quotes in the documentation +# rather than with sharp brackets. + +FORCE_LOCAL_INCLUDES = NO + +# If the INLINE_INFO tag is set to YES (the default) then a tag [inline] +# is inserted in the documentation for inline members. + +INLINE_INFO = YES + +# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen +# will sort the (detailed) documentation of file and class members +# alphabetically by member name. If set to NO the members will appear in +# declaration order. + +SORT_MEMBER_DOCS = YES + +# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the +# brief documentation of file, namespace and class members alphabetically +# by member name. If set to NO (the default) the members will appear in +# declaration order. + +SORT_BRIEF_DOCS = NO + +# If the SORT_MEMBERS_CTORS_1ST tag is set to YES then doxygen +# will sort the (brief and detailed) documentation of class members so that +# constructors and destructors are listed first. If set to NO (the default) +# the constructors will appear in the respective orders defined by +# SORT_MEMBER_DOCS and SORT_BRIEF_DOCS. +# This tag will be ignored for brief docs if SORT_BRIEF_DOCS is set to NO +# and ignored for detailed docs if SORT_MEMBER_DOCS is set to NO. + +SORT_MEMBERS_CTORS_1ST = NO + +# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the +# hierarchy of group names into alphabetical order. If set to NO (the default) +# the group names will appear in their defined order. + +SORT_GROUP_NAMES = NO + +# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be +# sorted by fully-qualified names, including namespaces. If set to +# NO (the default), the class list will be sorted only by class name, +# not including the namespace part. +# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. +# Note: This option applies only to the class list, not to the +# alphabetical list. + +SORT_BY_SCOPE_NAME = NO + +# If the STRICT_PROTO_MATCHING option is enabled and doxygen fails to do proper type resolution of all parameters of a function it will reject a +# match between the prototype and the implementation of a member function even if there is only one candidate or it is obvious which candidate to choose by doing a simple string match. By disabling STRICT_PROTO_MATCHING doxygen +# will still accept a match between prototype and implementation in such cases. + +STRICT_PROTO_MATCHING = NO + +# The GENERATE_TODOLIST tag can be used to enable (YES) or +# disable (NO) the todo list. This list is created by putting \todo +# commands in the documentation. + +GENERATE_TODOLIST = YES + +# The GENERATE_TESTLIST tag can be used to enable (YES) or +# disable (NO) the test list. This list is created by putting \test +# commands in the documentation. + +GENERATE_TESTLIST = YES + +# The GENERATE_BUGLIST tag can be used to enable (YES) or +# disable (NO) the bug list. This list is created by putting \bug +# commands in the documentation. + +GENERATE_BUGLIST = YES + +# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or +# disable (NO) the deprecated list. This list is created by putting +# \deprecated commands in the documentation. + +GENERATE_DEPRECATEDLIST= YES + +# The ENABLED_SECTIONS tag can be used to enable conditional +# documentation sections, marked by \if sectionname ... \endif. + +ENABLED_SECTIONS = + +# The MAX_INITIALIZER_LINES tag determines the maximum number of lines +# the initial value of a variable or macro consists of for it to appear in +# the documentation. If the initializer consists of more lines than specified +# here it will be hidden. Use a value of 0 to hide initializers completely. +# The appearance of the initializer of individual variables and macros in the +# documentation can be controlled using \showinitializer or \hideinitializer +# command in the documentation regardless of this setting. + +MAX_INITIALIZER_LINES = 30 + +# Set the SHOW_USED_FILES tag to NO to disable the list of files generated +# at the bottom of the documentation of classes and structs. If set to YES the +# list will mention the files that were used to generate the documentation. + +SHOW_USED_FILES = YES + +# If the sources in your project are distributed over multiple directories +# then setting the SHOW_DIRECTORIES tag to YES will show the directory hierarchy +# in the documentation. The default is NO. + +SHOW_DIRECTORIES = NO + +# Set the SHOW_FILES tag to NO to disable the generation of the Files page. +# This will remove the Files entry from the Quick Index and from the +# Folder Tree View (if specified). The default is YES. + +SHOW_FILES = YES + +# Set the SHOW_NAMESPACES tag to NO to disable the generation of the +# Namespaces page. +# This will remove the Namespaces entry from the Quick Index +# and from the Folder Tree View (if specified). The default is YES. + +SHOW_NAMESPACES = YES + +# The FILE_VERSION_FILTER tag can be used to specify a program or script that +# doxygen should invoke to get the current version for each file (typically from +# the version control system). Doxygen will invoke the program by executing (via +# popen()) the command , where is the value of +# the FILE_VERSION_FILTER tag, and is the name of an input file +# provided by doxygen. Whatever the program writes to standard output +# is used as the file version. See the manual for examples. + +FILE_VERSION_FILTER = + +# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed +# by doxygen. The layout file controls the global structure of the generated +# output files in an output format independent way. The create the layout file +# that represents doxygen's defaults, run doxygen with the -l option. +# You can optionally specify a file name after the option, if omitted +# DoxygenLayout.xml will be used as the name of the layout file. + +LAYOUT_FILE = + +#--------------------------------------------------------------------------- +# configuration options related to warning and progress messages +#--------------------------------------------------------------------------- + +# The QUIET tag can be used to turn on/off the messages that are generated +# by doxygen. Possible values are YES and NO. If left blank NO is used. + +QUIET = YES + +# The WARNINGS tag can be used to turn on/off the warning messages that are +# generated by doxygen. Possible values are YES and NO. If left blank +# NO is used. + +WARNINGS = YES + +# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings +# for undocumented members. If EXTRACT_ALL is set to YES then this flag will +# automatically be disabled. + +WARN_IF_UNDOCUMENTED = YES + +# If WARN_IF_DOC_ERROR is set to YES, doxygen will generate warnings for +# potential errors in the documentation, such as not documenting some +# parameters in a documented function, or documenting parameters that +# don't exist or using markup commands wrongly. + +WARN_IF_DOC_ERROR = YES + +# The WARN_NO_PARAMDOC option can be enabled to get warnings for +# functions that are documented, but have no documentation for their parameters +# or return value. If set to NO (the default) doxygen will only warn about +# wrong or incomplete parameter documentation, but not about the absence of +# documentation. + +WARN_NO_PARAMDOC = NO + +# The WARN_FORMAT tag determines the format of the warning messages that +# doxygen can produce. The string should contain the $file, $line, and $text +# tags, which will be replaced by the file and line number from which the +# warning originated and the warning text. Optionally the format may contain +# $version, which will be replaced by the version of the file (if it could +# be obtained via FILE_VERSION_FILTER) + +WARN_FORMAT = "$file:$line: $text" + +# The WARN_LOGFILE tag can be used to specify a file to which warning +# and error messages should be written. If left blank the output is written +# to stderr. + +WARN_LOGFILE = + +#--------------------------------------------------------------------------- +# configuration options related to the input files +#--------------------------------------------------------------------------- + +# The INPUT tag can be used to specify the files and/or directories that contain +# documented source files. You may enter file names like "myfile.cpp" or +# directories like "/usr/src/myproject". Separate the files or directories +# with spaces. + +INPUT = . + +# This tag can be used to specify the character encoding of the source files +# that doxygen parses. Internally doxygen uses the UTF-8 encoding, which is +# also the default input encoding. Doxygen uses libiconv (or the iconv built +# into libc) for the transcoding. See http://www.gnu.org/software/libiconv for +# the list of possible encodings. + +INPUT_ENCODING = UTF-8 + +# If the value of the INPUT tag contains directories, you can use the +# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp +# and *.h) to filter out the source-files in the directories. If left +# blank the following patterns are tested: +# *.c *.cc *.cxx *.cpp *.c++ *.d *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh +# *.hxx *.hpp *.h++ *.idl *.odl *.cs *.php *.php3 *.inc *.m *.mm *.dox *.py +# *.f90 *.f *.for *.vhd *.vhdl + +FILE_PATTERNS = *.[ch] + +# The RECURSIVE tag can be used to turn specify whether or not subdirectories +# should be searched for input files as well. Possible values are YES and NO. +# If left blank NO is used. + +RECURSIVE = NO + +# The EXCLUDE tag can be used to specify files and/or directories that should +# excluded from the INPUT source files. This way you can easily exclude a +# subdirectory from a directory tree whose root is specified with the INPUT tag. + +EXCLUDE = + +# The EXCLUDE_SYMLINKS tag can be used select whether or not files or +# directories that are symbolic links (a Unix file system feature) are excluded +# from the input. + +EXCLUDE_SYMLINKS = NO + +# If the value of the INPUT tag contains directories, you can use the +# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude +# certain files from those directories. Note that the wildcards are matched +# against the file with absolute path, so to exclude all test directories +# for example use the pattern */test/* + +EXCLUDE_PATTERNS = + +# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names +# (namespaces, classes, functions, etc.) that should be excluded from the +# output. The symbol name can be a fully qualified name, a word, or if the +# wildcard * is used, a substring. Examples: ANamespace, AClass, +# AClass::ANamespace, ANamespace::*Test + +EXCLUDE_SYMBOLS = + +# The EXAMPLE_PATH tag can be used to specify one or more files or +# directories that contain example code fragments that are included (see +# the \include command). + +EXAMPLE_PATH = + +# If the value of the EXAMPLE_PATH tag contains directories, you can use the +# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp +# and *.h) to filter out the source-files in the directories. If left +# blank all files are included. + +EXAMPLE_PATTERNS = + +# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be +# searched for input files to be used with the \include or \dontinclude +# commands irrespective of the value of the RECURSIVE tag. +# Possible values are YES and NO. If left blank NO is used. + +EXAMPLE_RECURSIVE = NO + +# The IMAGE_PATH tag can be used to specify one or more files or +# directories that contain image that are included in the documentation (see +# the \image command). + +IMAGE_PATH = + +# The INPUT_FILTER tag can be used to specify a program that doxygen should +# invoke to filter for each input file. Doxygen will invoke the filter program +# by executing (via popen()) the command , where +# is the value of the INPUT_FILTER tag, and is the name of an +# input file. Doxygen will then use the output that the filter program writes +# to standard output. +# If FILTER_PATTERNS is specified, this tag will be +# ignored. + +INPUT_FILTER = + +# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern +# basis. +# Doxygen will compare the file name with each pattern and apply the +# filter if there is a match. +# The filters are a list of the form: +# pattern=filter (like *.cpp=my_cpp_filter). See INPUT_FILTER for further +# info on how filters are used. If FILTER_PATTERNS is empty or if +# non of the patterns match the file name, INPUT_FILTER is applied. + +FILTER_PATTERNS = + +# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using +# INPUT_FILTER) will be used to filter the input files when producing source +# files to browse (i.e. when SOURCE_BROWSER is set to YES). + +FILTER_SOURCE_FILES = NO + +# The FILTER_SOURCE_PATTERNS tag can be used to specify source filters per file +# pattern. A pattern will override the setting for FILTER_PATTERN (if any) +# and it is also possible to disable source filtering for a specific pattern +# using *.ext= (so without naming a filter). This option only has effect when +# FILTER_SOURCE_FILES is enabled. + +FILTER_SOURCE_PATTERNS = + +#--------------------------------------------------------------------------- +# configuration options related to source browsing +#--------------------------------------------------------------------------- + +# If the SOURCE_BROWSER tag is set to YES then a list of source files will +# be generated. Documented entities will be cross-referenced with these sources. +# Note: To get rid of all source code in the generated output, make sure also +# VERBATIM_HEADERS is set to NO. + +SOURCE_BROWSER = YES + +# Setting the INLINE_SOURCES tag to YES will include the body +# of functions and classes directly in the documentation. + +INLINE_SOURCES = NO + +# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct +# doxygen to hide any special comment blocks from generated source code +# fragments. Normal C and C++ comments will always remain visible. + +STRIP_CODE_COMMENTS = NO + +# If the REFERENCED_BY_RELATION tag is set to YES +# then for each documented function all documented +# functions referencing it will be listed. + +REFERENCED_BY_RELATION = YES + +# If the REFERENCES_RELATION tag is set to YES +# then for each documented function all documented entities +# called/used by that function will be listed. + +REFERENCES_RELATION = YES + +# If the REFERENCES_LINK_SOURCE tag is set to YES (the default) +# and SOURCE_BROWSER tag is set to YES, then the hyperlinks from +# functions in REFERENCES_RELATION and REFERENCED_BY_RELATION lists will +# link to the source code. +# Otherwise they will link to the documentation. + +REFERENCES_LINK_SOURCE = YES + +# If the USE_HTAGS tag is set to YES then the references to source code +# will point to the HTML generated by the htags(1) tool instead of doxygen +# built-in source browser. The htags tool is part of GNU's global source +# tagging system (see http://www.gnu.org/software/global/global.html). You +# will need version 4.8.6 or higher. + +USE_HTAGS = NO + +# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen +# will generate a verbatim copy of the header file for each class for +# which an include is specified. Set to NO to disable this. + +VERBATIM_HEADERS = YES + +#--------------------------------------------------------------------------- +# configuration options related to the alphabetical class index +#--------------------------------------------------------------------------- + +# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index +# of all compounds will be generated. Enable this if the project +# contains a lot of classes, structs, unions or interfaces. + +ALPHABETICAL_INDEX = YES + +# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then +# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns +# in which this list will be split (can be a number in the range [1..20]) + +COLS_IN_ALPHA_INDEX = 5 + +# In case all classes in a project start with a common prefix, all +# classes will be put under the same header in the alphabetical index. +# The IGNORE_PREFIX tag can be used to specify one or more prefixes that +# should be ignored while generating the index headers. + +IGNORE_PREFIX = + +#--------------------------------------------------------------------------- +# configuration options related to the HTML output +#--------------------------------------------------------------------------- + +# If the GENERATE_HTML tag is set to YES (the default) Doxygen will +# generate HTML output. + +GENERATE_HTML = YES + +# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `html' will be used as the default path. + +HTML_OUTPUT = html + +# The HTML_FILE_EXTENSION tag can be used to specify the file extension for +# each generated HTML page (for example: .htm,.php,.asp). If it is left blank +# doxygen will generate files with .html extension. + +HTML_FILE_EXTENSION = .html + +# The HTML_HEADER tag can be used to specify a personal HTML header for +# each generated HTML page. If it is left blank doxygen will generate a +# standard header. + +HTML_HEADER = + +# The HTML_FOOTER tag can be used to specify a personal HTML footer for +# each generated HTML page. If it is left blank doxygen will generate a +# standard footer. + +HTML_FOOTER = + +# The HTML_STYLESHEET tag can be used to specify a user-defined cascading +# style sheet that is used by each HTML page. It can be used to +# fine-tune the look of the HTML output. If the tag is left blank doxygen +# will generate a default style sheet. Note that doxygen will try to copy +# the style sheet file to the HTML output directory, so don't put your own +# stylesheet in the HTML output directory as well, or it will be erased! + +HTML_STYLESHEET = + +# The HTML_COLORSTYLE_HUE tag controls the color of the HTML output. +# Doxygen will adjust the colors in the stylesheet and background images +# according to this color. Hue is specified as an angle on a colorwheel, +# see http://en.wikipedia.org/wiki/Hue for more information. +# For instance the value 0 represents red, 60 is yellow, 120 is green, +# 180 is cyan, 240 is blue, 300 purple, and 360 is red again. +# The allowed range is 0 to 359. + +HTML_COLORSTYLE_HUE = 220 + +# The HTML_COLORSTYLE_SAT tag controls the purity (or saturation) of +# the colors in the HTML output. For a value of 0 the output will use +# grayscales only. A value of 255 will produce the most vivid colors. + +HTML_COLORSTYLE_SAT = 100 + +# The HTML_COLORSTYLE_GAMMA tag controls the gamma correction applied to +# the luminance component of the colors in the HTML output. Values below +# 100 gradually make the output lighter, whereas values above 100 make +# the output darker. The value divided by 100 is the actual gamma applied, +# so 80 represents a gamma of 0.8, The value 220 represents a gamma of 2.2, +# and 100 does not change the gamma. + +HTML_COLORSTYLE_GAMMA = 80 + +# If the HTML_TIMESTAMP tag is set to YES then the footer of each generated HTML +# page will contain the date and time when the page was generated. Setting +# this to NO can help when comparing the output of multiple runs. + +HTML_TIMESTAMP = YES + +# If the HTML_ALIGN_MEMBERS tag is set to YES, the members of classes, +# files or namespaces will be aligned in HTML using tables. If set to +# NO a bullet list will be used. + +HTML_ALIGN_MEMBERS = YES + +# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML +# documentation will contain sections that can be hidden and shown after the +# page has loaded. For this to work a browser that supports +# JavaScript and DHTML is required (for instance Mozilla 1.0+, Firefox +# Netscape 6.0+, Internet explorer 5.0+, Konqueror, or Safari). + +HTML_DYNAMIC_SECTIONS = NO + +# If the GENERATE_DOCSET tag is set to YES, additional index files +# will be generated that can be used as input for Apple's Xcode 3 +# integrated development environment, introduced with OSX 10.5 (Leopard). +# To create a documentation set, doxygen will generate a Makefile in the +# HTML output directory. Running make will produce the docset in that +# directory and running "make install" will install the docset in +# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find +# it at startup. +# See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html +# for more information. + +GENERATE_DOCSET = NO + +# When GENERATE_DOCSET tag is set to YES, this tag determines the name of the +# feed. A documentation feed provides an umbrella under which multiple +# documentation sets from a single provider (such as a company or product suite) +# can be grouped. + +DOCSET_FEEDNAME = "Doxygen generated docs" + +# When GENERATE_DOCSET tag is set to YES, this tag specifies a string that +# should uniquely identify the documentation set bundle. This should be a +# reverse domain-name style string, e.g. com.mycompany.MyDocSet. Doxygen +# will append .docset to the name. + +DOCSET_BUNDLE_ID = org.doxygen.Project + +# When GENERATE_PUBLISHER_ID tag specifies a string that should uniquely identify +# the documentation publisher. This should be a reverse domain-name style +# string, e.g. com.mycompany.MyDocSet.documentation. + +DOCSET_PUBLISHER_ID = org.doxygen.Publisher + +# The GENERATE_PUBLISHER_NAME tag identifies the documentation publisher. + +DOCSET_PUBLISHER_NAME = Publisher + +# If the GENERATE_HTMLHELP tag is set to YES, additional index files +# will be generated that can be used as input for tools like the +# Microsoft HTML help workshop to generate a compiled HTML help file (.chm) +# of the generated HTML documentation. + +GENERATE_HTMLHELP = NO + +# If the GENERATE_HTMLHELP tag is set to YES, the CHM_FILE tag can +# be used to specify the file name of the resulting .chm file. You +# can add a path in front of the file if the result should not be +# written to the html output directory. + +CHM_FILE = + +# If the GENERATE_HTMLHELP tag is set to YES, the HHC_LOCATION tag can +# be used to specify the location (absolute path including file name) of +# the HTML help compiler (hhc.exe). If non-empty doxygen will try to run +# the HTML help compiler on the generated index.hhp. + +HHC_LOCATION = + +# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag +# controls if a separate .chi index file is generated (YES) or that +# it should be included in the master .chm file (NO). + +GENERATE_CHI = NO + +# If the GENERATE_HTMLHELP tag is set to YES, the CHM_INDEX_ENCODING +# is used to encode HtmlHelp index (hhk), content (hhc) and project file +# content. + +CHM_INDEX_ENCODING = + +# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag +# controls whether a binary table of contents is generated (YES) or a +# normal table of contents (NO) in the .chm file. + +BINARY_TOC = NO + +# The TOC_EXPAND flag can be set to YES to add extra items for group members +# to the contents of the HTML help documentation and to the tree view. + +TOC_EXPAND = NO + +# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and +# QHP_VIRTUAL_FOLDER are set, an additional index file will be generated +# that can be used as input for Qt's qhelpgenerator to generate a +# Qt Compressed Help (.qch) of the generated HTML documentation. + +GENERATE_QHP = NO + +# If the QHG_LOCATION tag is specified, the QCH_FILE tag can +# be used to specify the file name of the resulting .qch file. +# The path specified is relative to the HTML output folder. + +QCH_FILE = + +# The QHP_NAMESPACE tag specifies the namespace to use when generating +# Qt Help Project output. For more information please see +# http://doc.trolltech.com/qthelpproject.html#namespace + +QHP_NAMESPACE = + +# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating +# Qt Help Project output. For more information please see +# http://doc.trolltech.com/qthelpproject.html#virtual-folders + +QHP_VIRTUAL_FOLDER = doc + +# If QHP_CUST_FILTER_NAME is set, it specifies the name of a custom filter to +# add. For more information please see +# http://doc.trolltech.com/qthelpproject.html#custom-filters + +QHP_CUST_FILTER_NAME = + +# The QHP_CUST_FILT_ATTRS tag specifies the list of the attributes of the +# custom filter to add. For more information please see +# +# Qt Help Project / Custom Filters. + +QHP_CUST_FILTER_ATTRS = + +# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this +# project's +# filter section matches. +# +# Qt Help Project / Filter Attributes. + +QHP_SECT_FILTER_ATTRS = + +# If the GENERATE_QHP tag is set to YES, the QHG_LOCATION tag can +# be used to specify the location of Qt's qhelpgenerator. +# If non-empty doxygen will try to run qhelpgenerator on the generated +# .qhp file. + +QHG_LOCATION = + +# If the GENERATE_ECLIPSEHELP tag is set to YES, additional index files +# will be generated, which together with the HTML files, form an Eclipse help +# plugin. To install this plugin and make it available under the help contents +# menu in Eclipse, the contents of the directory containing the HTML and XML +# files needs to be copied into the plugins directory of eclipse. The name of +# the directory within the plugins directory should be the same as +# the ECLIPSE_DOC_ID value. After copying Eclipse needs to be restarted before +# the help appears. + +GENERATE_ECLIPSEHELP = NO + +# A unique identifier for the eclipse help plugin. When installing the plugin +# the directory name containing the HTML and XML files should also have +# this name. + +ECLIPSE_DOC_ID = org.doxygen.Project + +# The DISABLE_INDEX tag can be used to turn on/off the condensed index at +# top of each HTML page. The value NO (the default) enables the index and +# the value YES disables it. + +DISABLE_INDEX = NO + +# This tag can be used to set the number of enum values (range [0,1..20]) +# that doxygen will group on one line in the generated HTML documentation. +# Note that a value of 0 will completely suppress the enum values from appearing in the overview section. + +ENUM_VALUES_PER_LINE = 4 + +# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index +# structure should be generated to display hierarchical information. +# If the tag value is set to YES, a side panel will be generated +# containing a tree-like index structure (just like the one that +# is generated for HTML Help). For this to work a browser that supports +# JavaScript, DHTML, CSS and frames is required (i.e. any modern browser). +# Windows users are probably better off using the HTML help feature. + +GENERATE_TREEVIEW = NO + +# By enabling USE_INLINE_TREES, doxygen will generate the Groups, Directories, +# and Class Hierarchy pages using a tree view instead of an ordered list. + +USE_INLINE_TREES = NO + +# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be +# used to set the initial width (in pixels) of the frame in which the tree +# is shown. + +TREEVIEW_WIDTH = 250 + +# When the EXT_LINKS_IN_WINDOW option is set to YES doxygen will open +# links to external symbols imported via tag files in a separate window. + +EXT_LINKS_IN_WINDOW = NO + +# Use this tag to change the font size of Latex formulas included +# as images in the HTML documentation. The default is 10. Note that +# when you change the font size after a successful doxygen run you need +# to manually remove any form_*.png images from the HTML output directory +# to force them to be regenerated. + +FORMULA_FONTSIZE = 10 + +# Use the FORMULA_TRANPARENT tag to determine whether or not the images +# generated for formulas are transparent PNGs. Transparent PNGs are +# not supported properly for IE 6.0, but are supported on all modern browsers. +# Note that when changing this option you need to delete any form_*.png files +# in the HTML output before the changes have effect. + +FORMULA_TRANSPARENT = YES + +# Enable the USE_MATHJAX option to render LaTeX formulas using MathJax +# (see http://www.mathjax.org) which uses client side Javascript for the +# rendering instead of using prerendered bitmaps. Use this if you do not +# have LaTeX installed or if you want to formulas look prettier in the HTML +# output. When enabled you also need to install MathJax separately and +# configure the path to it using the MATHJAX_RELPATH option. + +USE_MATHJAX = NO + +# When MathJax is enabled you need to specify the location relative to the +# HTML output directory using the MATHJAX_RELPATH option. The destination +# directory should contain the MathJax.js script. For instance, if the mathjax +# directory is located at the same level as the HTML output directory, then +# MATHJAX_RELPATH should be ../mathjax. The default value points to the mathjax.org site, so you can quickly see the result without installing +# MathJax, but it is strongly recommended to install a local copy of MathJax +# before deployment. + +MATHJAX_RELPATH = http://www.mathjax.org/mathjax + +# When the SEARCHENGINE tag is enabled doxygen will generate a search box +# for the HTML output. The underlying search engine uses javascript +# and DHTML and should work on any modern browser. Note that when using +# HTML help (GENERATE_HTMLHELP), Qt help (GENERATE_QHP), or docsets +# (GENERATE_DOCSET) there is already a search function so this one should +# typically be disabled. For large projects the javascript based search engine +# can be slow, then enabling SERVER_BASED_SEARCH may provide a better solution. + +SEARCHENGINE = YES + +# When the SERVER_BASED_SEARCH tag is enabled the search engine will be +# implemented using a PHP enabled web server instead of at the web client +# using Javascript. Doxygen will generate the search PHP script and index +# file to put on the web server. The advantage of the server +# based approach is that it scales better to large projects and allows +# full text search. The disadvantages are that it is more difficult to setup +# and does not have live searching capabilities. + +SERVER_BASED_SEARCH = NO + +#--------------------------------------------------------------------------- +# configuration options related to the LaTeX output +#--------------------------------------------------------------------------- + +# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will +# generate Latex output. + +GENERATE_LATEX = YES + +# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `latex' will be used as the default path. + +LATEX_OUTPUT = latex + +# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be +# invoked. If left blank `latex' will be used as the default command name. +# Note that when enabling USE_PDFLATEX this option is only used for +# generating bitmaps for formulas in the HTML output, but not in the +# Makefile that is written to the output directory. + +LATEX_CMD_NAME = latex + +# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to +# generate index for LaTeX. If left blank `makeindex' will be used as the +# default command name. + +MAKEINDEX_CMD_NAME = makeindex + +# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact +# LaTeX documents. This may be useful for small projects and may help to +# save some trees in general. + +COMPACT_LATEX = YES + +# The PAPER_TYPE tag can be used to set the paper type that is used +# by the printer. Possible values are: a4, letter, legal and +# executive. If left blank a4wide will be used. + +PAPER_TYPE = letter + +# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX +# packages that should be included in the LaTeX output. + +EXTRA_PACKAGES = + +# The LATEX_HEADER tag can be used to specify a personal LaTeX header for +# the generated latex document. The header should contain everything until +# the first chapter. If it is left blank doxygen will generate a +# standard header. Notice: only use this tag if you know what you are doing! + +LATEX_HEADER = + +# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated +# is prepared for conversion to pdf (using ps2pdf). The pdf file will +# contain links (just like the HTML output) instead of page references +# This makes the output suitable for online browsing using a pdf viewer. + +PDF_HYPERLINKS = YES + +# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of +# plain latex in the generated Makefile. Set this option to YES to get a +# higher quality PDF documentation. + +USE_PDFLATEX = YES + +# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode. +# command to the generated LaTeX files. This will instruct LaTeX to keep +# running if errors occur, instead of asking the user for help. +# This option is also used when generating formulas in HTML. + +LATEX_BATCHMODE = YES + +# If LATEX_HIDE_INDICES is set to YES then doxygen will not +# include the index chapters (such as File Index, Compound Index, etc.) +# in the output. + +LATEX_HIDE_INDICES = YES + +# If LATEX_SOURCE_CODE is set to YES then doxygen will include +# source code with syntax highlighting in the LaTeX output. +# Note that which sources are shown also depends on other settings +# such as SOURCE_BROWSER. + +LATEX_SOURCE_CODE = NO + +#--------------------------------------------------------------------------- +# configuration options related to the RTF output +#--------------------------------------------------------------------------- + +# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output +# The RTF output is optimized for Word 97 and may not look very pretty with +# other RTF readers or editors. + +GENERATE_RTF = NO + +# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `rtf' will be used as the default path. + +RTF_OUTPUT = rtf + +# If the COMPACT_RTF tag is set to YES Doxygen generates more compact +# RTF documents. This may be useful for small projects and may help to +# save some trees in general. + +COMPACT_RTF = NO + +# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated +# will contain hyperlink fields. The RTF file will +# contain links (just like the HTML output) instead of page references. +# This makes the output suitable for online browsing using WORD or other +# programs which support those fields. +# Note: wordpad (write) and others do not support links. + +RTF_HYPERLINKS = NO + +# Load stylesheet definitions from file. Syntax is similar to doxygen's +# config file, i.e. a series of assignments. You only have to provide +# replacements, missing definitions are set to their default value. + +RTF_STYLESHEET_FILE = + +# Set optional variables used in the generation of an rtf document. +# Syntax is similar to doxygen's config file. + +RTF_EXTENSIONS_FILE = + +#--------------------------------------------------------------------------- +# configuration options related to the man page output +#--------------------------------------------------------------------------- + +# If the GENERATE_MAN tag is set to YES (the default) Doxygen will +# generate man pages + +GENERATE_MAN = NO + +# The MAN_OUTPUT tag is used to specify where the man pages will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `man' will be used as the default path. + +MAN_OUTPUT = man + +# The MAN_EXTENSION tag determines the extension that is added to +# the generated man pages (default is the subroutine's section .3) + +MAN_EXTENSION = .3 + +# If the MAN_LINKS tag is set to YES and Doxygen generates man output, +# then it will generate one additional man file for each entity +# documented in the real man page(s). These additional files +# only source the real man page, but without them the man command +# would be unable to find the correct page. The default is NO. + +MAN_LINKS = NO + +#--------------------------------------------------------------------------- +# configuration options related to the XML output +#--------------------------------------------------------------------------- + +# If the GENERATE_XML tag is set to YES Doxygen will +# generate an XML file that captures the structure of +# the code including all documentation. + +GENERATE_XML = NO + +# The XML_OUTPUT tag is used to specify where the XML pages will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `xml' will be used as the default path. + +XML_OUTPUT = xml + +# The XML_SCHEMA tag can be used to specify an XML schema, +# which can be used by a validating XML parser to check the +# syntax of the XML files. + +XML_SCHEMA = + +# The XML_DTD tag can be used to specify an XML DTD, +# which can be used by a validating XML parser to check the +# syntax of the XML files. + +XML_DTD = + +# If the XML_PROGRAMLISTING tag is set to YES Doxygen will +# dump the program listings (including syntax highlighting +# and cross-referencing information) to the XML output. Note that +# enabling this will significantly increase the size of the XML output. + +XML_PROGRAMLISTING = YES + +#--------------------------------------------------------------------------- +# configuration options for the AutoGen Definitions output +#--------------------------------------------------------------------------- + +# If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will +# generate an AutoGen Definitions (see autogen.sf.net) file +# that captures the structure of the code including all +# documentation. Note that this feature is still experimental +# and incomplete at the moment. + +GENERATE_AUTOGEN_DEF = NO + +#--------------------------------------------------------------------------- +# configuration options related to the Perl module output +#--------------------------------------------------------------------------- + +# If the GENERATE_PERLMOD tag is set to YES Doxygen will +# generate a Perl module file that captures the structure of +# the code including all documentation. Note that this +# feature is still experimental and incomplete at the +# moment. + +GENERATE_PERLMOD = NO + +# If the PERLMOD_LATEX tag is set to YES Doxygen will generate +# the necessary Makefile rules, Perl scripts and LaTeX code to be able +# to generate PDF and DVI output from the Perl module output. + +PERLMOD_LATEX = NO + +# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be +# nicely formatted so it can be parsed by a human reader. +# This is useful +# if you want to understand what is going on. +# On the other hand, if this +# tag is set to NO the size of the Perl module output will be much smaller +# and Perl will parse it just the same. + +PERLMOD_PRETTY = YES + +# The names of the make variables in the generated doxyrules.make file +# are prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. +# This is useful so different doxyrules.make files included by the same +# Makefile don't overwrite each other's variables. + +PERLMOD_MAKEVAR_PREFIX = + +#--------------------------------------------------------------------------- +# Configuration options related to the preprocessor +#--------------------------------------------------------------------------- + +# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will +# evaluate all C-preprocessor directives found in the sources and include +# files. + +ENABLE_PREPROCESSING = YES + +# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro +# names in the source code. If set to NO (the default) only conditional +# compilation will be performed. Macro expansion can be done in a controlled +# way by setting EXPAND_ONLY_PREDEF to YES. + +MACRO_EXPANSION = YES + +# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES +# then the macro expansion is limited to the macros specified with the +# PREDEFINED and EXPAND_AS_DEFINED tags. + +EXPAND_ONLY_PREDEF = NO + +# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files +# in the INCLUDE_PATH (see below) will be search if a #include is found. + +SEARCH_INCLUDES = YES + +# The INCLUDE_PATH tag can be used to specify one or more directories that +# contain include files that are not input files but should be processed by +# the preprocessor. + +INCLUDE_PATH = + +# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard +# patterns (like *.h and *.hpp) to filter out the header-files in the +# directories. If left blank, the patterns specified with FILE_PATTERNS will +# be used. + +INCLUDE_FILE_PATTERNS = + +# The PREDEFINED tag can be used to specify one or more macro names that +# are defined before the preprocessor is started (similar to the -D option of +# gcc). The argument of the tag is a list of macros of the form: name +# or name=definition (no spaces). If the definition and the = are +# omitted =1 is assumed. To prevent a macro definition from being +# undefined via #undef or recursively expanded use the := operator +# instead of the = operator. + +PREDEFINED = DOXYGEN_GETS_HOPELESSLY_CONFUSED_BY_THIS_SECTION + +# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then +# this tag can be used to specify a list of macro names that should be expanded. +# The macro definition that is found in the sources will be used. +# Use the PREDEFINED tag if you want to use a different macro definition that overrules the definition found in the source code. + +EXPAND_AS_DEFINED = + +# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then +# doxygen's preprocessor will remove all references to function-like macros +# that are alone on a line, have an all uppercase name, and do not end with a +# semicolon, because these will confuse the parser if not removed. + +SKIP_FUNCTION_MACROS = YES + +#--------------------------------------------------------------------------- +# Configuration::additions related to external references +#--------------------------------------------------------------------------- + +# The TAGFILES option can be used to specify one or more tagfiles. +# Optionally an initial location of the external documentation +# can be added for each tagfile. The format of a tag file without +# this location is as follows: +# +# TAGFILES = file1 file2 ... +# Adding location for the tag files is done as follows: +# +# TAGFILES = file1=loc1 "file2 = loc2" ... +# where "loc1" and "loc2" can be relative or absolute paths or +# URLs. If a location is present for each tag, the installdox tool +# does not have to be run to correct the links. +# Note that each tag file must have a unique name +# (where the name does NOT include the path) +# If a tag file is not located in the directory in which doxygen +# is run, you must also specify the path to the tagfile here. + +TAGFILES = + +# When a file name is specified after GENERATE_TAGFILE, doxygen will create +# a tag file that is based on the input files it reads. + +GENERATE_TAGFILE = + +# If the ALLEXTERNALS tag is set to YES all external classes will be listed +# in the class index. If set to NO only the inherited external classes +# will be listed. + +ALLEXTERNALS = NO + +# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed +# in the modules index. If set to NO, only the current project's groups will +# be listed. + +EXTERNAL_GROUPS = YES + +# The PERL_PATH should be the absolute path and name of the perl script +# interpreter (i.e. the result of `which perl'). + +PERL_PATH = /usr/bin/perl + +#--------------------------------------------------------------------------- +# Configuration options related to the dot tool +#--------------------------------------------------------------------------- + +# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will +# generate a inheritance diagram (in HTML, RTF and LaTeX) for classes with base +# or super classes. Setting the tag to NO turns the diagrams off. Note that +# this option also works with HAVE_DOT disabled, but it is recommended to +# install and use dot, since it yields more powerful graphs. + +CLASS_DIAGRAMS = YES + +# You can define message sequence charts within doxygen comments using the \msc +# command. Doxygen will then run the mscgen tool (see +# http://www.mcternan.me.uk/mscgen/) to produce the chart and insert it in the +# documentation. The MSCGEN_PATH tag allows you to specify the directory where +# the mscgen tool resides. If left empty the tool is assumed to be found in the +# default search path. + +MSCGEN_PATH = + +# If set to YES, the inheritance and collaboration graphs will hide +# inheritance and usage relations if the target is undocumented +# or is not a class. + +HIDE_UNDOC_RELATIONS = NO + +# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is +# available from the path. This tool is part of Graphviz, a graph visualization +# toolkit from AT&T and Lucent Bell Labs. The other options in this section +# have no effect if this option is set to NO (the default) + +HAVE_DOT = YES + +# The DOT_NUM_THREADS specifies the number of dot invocations doxygen is +# allowed to run in parallel. When set to 0 (the default) doxygen will +# base this on the number of processors available in the system. You can set it +# explicitly to a value larger than 0 to get control over the balance +# between CPU load and processing speed. + +DOT_NUM_THREADS = 0 + +# By default doxygen will write a font called Helvetica to the output +# directory and reference it in all dot files that doxygen generates. +# When you want a differently looking font you can specify the font name +# using DOT_FONTNAME. You need to make sure dot is able to find the font, +# which can be done by putting it in a standard location or by setting the +# DOTFONTPATH environment variable or by setting DOT_FONTPATH to the directory +# containing the font. + +DOT_FONTNAME = FreeSans + +# The DOT_FONTSIZE tag can be used to set the size of the font of dot graphs. +# The default size is 10pt. + +DOT_FONTSIZE = 10 + +# By default doxygen will tell dot to use the output directory to look for the +# FreeSans.ttf font (which doxygen will put there itself). If you specify a +# different font using DOT_FONTNAME you can set the path where dot +# can find it using this tag. + +DOT_FONTPATH = + +# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for each documented class showing the direct and +# indirect inheritance relations. Setting this tag to YES will force the +# the CLASS_DIAGRAMS tag to NO. + +CLASS_GRAPH = YES + +# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for each documented class showing the direct and +# indirect implementation dependencies (inheritance, containment, and +# class references variables) of the class with other documented classes. + +COLLABORATION_GRAPH = YES + +# If the GROUP_GRAPHS and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for groups, showing the direct groups dependencies + +GROUP_GRAPHS = YES + +# If the UML_LOOK tag is set to YES doxygen will generate inheritance and +# collaboration diagrams in a style similar to the OMG's Unified Modeling +# Language. + +UML_LOOK = YES + +# If set to YES, the inheritance and collaboration graphs will show the +# relations between templates and their instances. + +TEMPLATE_RELATIONS = NO + +# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT +# tags are set to YES then doxygen will generate a graph for each documented +# file showing the direct and indirect include dependencies of the file with +# other documented files. + +INCLUDE_GRAPH = NO + +# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and +# HAVE_DOT tags are set to YES then doxygen will generate a graph for each +# documented header file showing the documented files that directly or +# indirectly include this file. + +INCLUDED_BY_GRAPH = YES + +# If the CALL_GRAPH and HAVE_DOT options are set to YES then +# doxygen will generate a call dependency graph for every global function +# or class method. Note that enabling this option will significantly increase +# the time of a run. So in most cases it will be better to enable call graphs +# for selected functions only using the \callgraph command. + +CALL_GRAPH = YES + +# If the CALLER_GRAPH and HAVE_DOT tags are set to YES then +# doxygen will generate a caller dependency graph for every global function +# or class method. Note that enabling this option will significantly increase +# the time of a run. So in most cases it will be better to enable caller +# graphs for selected functions only using the \callergraph command. + +CALLER_GRAPH = YES + +# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen +# will generate a graphical hierarchy of all classes instead of a textual one. + +GRAPHICAL_HIERARCHY = YES + +# If the DIRECTORY_GRAPH, SHOW_DIRECTORIES and HAVE_DOT tags are set to YES +# then doxygen will show the dependencies a directory has on other directories +# in a graphical way. The dependency relations are determined by the #include +# relations between the files in the directories. + +DIRECTORY_GRAPH = YES + +# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images +# generated by dot. Possible values are png, svg, gif or svg. +# If left blank png will be used. + +DOT_IMAGE_FORMAT = png + +# The tag DOT_PATH can be used to specify the path where the dot tool can be +# found. If left blank, it is assumed the dot tool can be found in the path. + +DOT_PATH = + +# The DOTFILE_DIRS tag can be used to specify one or more directories that +# contain dot files that are included in the documentation (see the +# \dotfile command). + +DOTFILE_DIRS = + +# The MSCFILE_DIRS tag can be used to specify one or more directories that +# contain msc files that are included in the documentation (see the +# \mscfile command). + +MSCFILE_DIRS = + +# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of +# nodes that will be shown in the graph. If the number of nodes in a graph +# becomes larger than this value, doxygen will truncate the graph, which is +# visualized by representing a node as a red box. Note that doxygen if the +# number of direct children of the root node in a graph is already larger than +# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note +# that the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH. + +DOT_GRAPH_MAX_NODES = 50 + +# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the +# graphs generated by dot. A depth value of 3 means that only nodes reachable +# from the root by following a path via at most 3 edges will be shown. Nodes +# that lay further from the root node will be omitted. Note that setting this +# option to 1 or 2 may greatly reduce the computation time needed for large +# code bases. Also note that the size of a graph can be further restricted by +# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction. + +MAX_DOT_GRAPH_DEPTH = 0 + +# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent +# background. This is disabled by default, because dot on Windows does not +# seem to support this out of the box. Warning: Depending on the platform used, +# enabling this option may lead to badly anti-aliased labels on the edges of +# a graph (i.e. they become hard to read). + +DOT_TRANSPARENT = NO + +# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output +# files in one run (i.e. multiple -o and -T options on the command line). This +# makes dot run faster, but since only newer versions of dot (>1.8.10) +# support this, this feature is disabled by default. + +DOT_MULTI_TARGETS = NO + +# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will +# generate a legend page explaining the meaning of the various boxes and +# arrows in the dot generated graphs. + +GENERATE_LEGEND = YES + +# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will +# remove the intermediate dot files that are used to generate +# the various graphs. + +DOT_CLEANUP = YES diff --git a/rp/rcynic/Makefile.in b/rp/rcynic/Makefile.in new file mode 100644 index 00000000..06f7d9cd --- /dev/null +++ b/rp/rcynic/Makefile.in @@ -0,0 +1,237 @@ +# $Id$ + +NAME = rcynic + +BIN = ${NAME} +SRC = ${NAME}.c +OBJ = ${NAME}.o + +GEN = defstack.h + +OBJS = ${OBJ} bio_f_linebreak.o + +CFLAGS = @CFLAGS@ -Wall -Wshadow -Wmissing-prototypes -Wmissing-declarations -Werror-implicit-function-declaration +LDFLAGS = @LDFLAGS@ @LD_STATIC_FLAG@ +LIBS = @LIBS@ + +AWK = @AWK@ +SORT = @SORT@ +PYTHON = @PYTHON@ +RRDTOOL = @RRDTOOL@ +INSTALL = @INSTALL@ + +abs_top_srcdir = @abs_top_srcdir@ +abs_top_builddir = @abs_top_builddir@ + +prefix = @prefix@ +exec_prefix = @exec_prefix@ +datarootdir = @datarootdir@ +datadir = @datadir@ +localstatedir = @localstatedir@ +sharedstatedir = @sharedstatedir@ +sysconfdir = @sysconfdir@ +bindir = @bindir@ +sbindir = @sbindir@ +libexecdir = @libexecdir@ +sysconfdir = @sysconfdir@ + +abs_builddir = @abs_builddir@ +abs_top_srcdir = @abs_top_srcdir@ +abs_top_builddir = @abs_top_builddir@ +srcdir = @srcdir@ + +RCYNIC_BIN_RCYNIC = @RCYNIC_BIN_RCYNIC@ +RCYNIC_CONF_DATA = @RCYNIC_CONF_DATA@ +RCYNIC_CONF_FILE = @RCYNIC_CONF_FILE@ +RCYNIC_CONF_RSYNC = @RCYNIC_CONF_RSYNC@ +RCYNIC_CONF_TA_DIR = @RCYNIC_CONF_TA_DIR@ +RCYNIC_CRON_USER = @RCYNIC_CRON_USER@ +RCYNIC_DATA_DIR = ${RCYNIC_DIR}/data +RCYNIC_DIR = @RCYNIC_DIR@ +RCYNIC_DIRS = ${RCYNIC_TA_DIR} ${RCYNIC_JAIL_DIRS} ${RCYNIC_DATA_DIR} ${RPKIRTR_DIR} ${RPKIRTR_DIR}/sockets +RCYNIC_GECOS = RPKI Validation System +RCYNIC_GROUP = rcynic +RCYNIC_HTML_DIR = @RCYNIC_HTML_DIR@ +RCYNIC_INSTALL_TARGETS = @RCYNIC_INSTALL_TARGETS@ +RCYNIC_JAIL_DIRS = @RCYNIC_JAIL_DIRS@ +RCYNIC_STATIC_RSYNC = @RCYNIC_STATIC_RSYNC@ +RCYNIC_TA_DIR = @RCYNIC_TA_DIR@ +RCYNIC_USER = rcynic +RPKIRTR_DIR = ${RCYNIC_DIR}/rpki-rtr +RPKIRTR_GECOS = RPKI router server +RPKIRTR_GROUP = rpkirtr +RPKIRTR_MODE = 775 +RPKIRTR_USER = rpkirtr + +SCRIPTS = rcynic-text rcynic-html rcynic-svn validation_status rcynic-cron + +all: ${BIN} ${SCRIPTS} ${RCYNIC_STATIC_RSYNC} + +clean: + if test -r static-rsync/Makefile; then cd static-rsync; ${MAKE} $@; fi + rm -f ${BIN} ${OBJS} ${SCRIPTS} + +${OBJ}: ${SRC} ${GEN} + +${BIN}: ${OBJS} + ${CC} ${CFLAGS} -o $@ ${OBJS} ${LDFLAGS} ${LIBS} + +${GEN}: ${SRC} + ${PYTHON} ${abs_top_srcdir}/buildtools/defstack.py ${SRC} >$@.tmp + mv $@.tmp $@ + +COMPILE_PYTHON = \ + AC_PYTHON_INTERPRETER='${PYTHON}' \ + AC_RRDTOOL_BINARY='${RRDTOOL}' \ + ${PYTHON} ${abs_top_srcdir}/buildtools/make-rcynic-script.py <$? >$@; \ + chmod 755 $@ + +COMPILE_PYTHON_CRON = \ + AC_PYTHON_INTERPRETER='${PYTHON}' \ + AC_RCYNIC_USER='${RCYNIC_USER}' \ + AC_RCYNIC_DIR='${RCYNIC_DIR}' \ + AC_bindir='${bindir}' \ + AC_sbindir='${sbindir}' \ + AC_sysconfdir='${sysconfdir}' \ + AC_libexecdir='${libexecdir}' \ + AC_RCYNIC_HTML_DIR='${RCYNIC_HTML_DIR}' \ + ${PYTHON} ${abs_top_srcdir}/buildtools/make-rcynic-script.py <$? >$@; \ + chmod 755 $@ + +rcynic-text: rcynic-text.py + ${COMPILE_PYTHON} + +rcynic-html: rcynic-html.py + ${COMPILE_PYTHON} + +rcynic-svn: rcynic-svn.py + ${COMPILE_PYTHON} + +validation_status: validation_status.py + ${COMPILE_PYTHON} + +rcynic-cron: rcynic-cron.py + ${COMPILE_PYTHON_CRON} + +tags: TAGS + +TAGS: ${SRC} ${GEN} + etags ${SRC} ${GEN} + +test: ${BIN} + if test -r rcynic.conf; \ + then \ + ./${BIN} -j 0 && \ + test -r rcynic.xml && \ + echo && \ + ./rcynic-text rcynic.xml; \ + else \ + echo No rcynic.conf, skipping test; \ + fi + +uninstall deinstall: + @echo Sorry, automated deinstallation of rcynic is not implemented yet + +distclean: clean + if test -r static-rsync/Makefile; then cd static-rsync; ${MAKE} $@; fi + rm -f Makefile + +static-rsync/rsync: + @echo "Building static rsync for use in chroot jail" + cd static-rsync; ${MAKE} all + +install: all ${RCYNIC_INSTALL_TARGETS} + +install-always: \ + install-directories install-scripts install-rcynic install-rcynic-conf + +install-postconf: \ + install-user-and-group install-directory-ownership install-crontab + +install-jailed: \ + install-static-rsync install-shared-libraries install-rc-scripts + +install-directories: ${RCYNIC_DIRS} + +${RCYNIC_DIRS} ${DESTDIR}${bindir} ${DESTDIR}${sysconfdir}: + ${INSTALL} -v -d $@ + +install-directory-ownership: ${RCYNIC_DATA_DIR} ${RPKIRTR_DIR} ${RPKIRTR_DIR}/sockets + chown ${RCYNIC_USER}:${RCYNIC_GROUP} ${RCYNIC_DATA_DIR} ${RPKIRTR_DIR} + chown ${RPKIRTR_USER}:${RCYNIC_GROUP} ${RPKIRTR_DIR}/sockets + chmod ${RPKIRTR_MODE} ${RPKIRTR_DIR}/sockets + +install-rcynic-conf: ${RCYNIC_CONF_FILE} + +${RCYNIC_CONF_FILE}: + @echo + @echo Found no ${RCYNIC_CONF_FILE}, creating basic config and installing default trust anchor locators. + @echo You might want to edit this. + @echo + ${INSTALL} -v -d ${RCYNIC_TA_DIR} + ${INSTALL} -v -p -m 444 sample-trust-anchors/*.tal ${RCYNIC_TA_DIR} + @echo > $@.tmp '# Basic rcynic configuration file with default trust anchors.' + @echo >>$@.tmp '# See documentation for details.' + @echo >>$@.tmp '' + @echo >>$@.tmp '[rcynic]' + @echo >>$@.tmp 'rsync-program = ${RCYNIC_CONF_RSYNC}' + @echo >>$@.tmp 'authenticated = ${RCYNIC_CONF_DATA}/authenticated' + @echo >>$@.tmp 'unauthenticated = ${RCYNIC_CONF_DATA}/unauthenticated' + @echo >>$@.tmp 'xml-summary = ${RCYNIC_CONF_DATA}/rcynic.xml' + @echo >>$@.tmp 'jitter = 600' + @echo >>$@.tmp 'max-parallel-fetches = 8' + @echo >>$@.tmp 'use-syslog = true' + @echo >>$@.tmp 'log-level = log_usage_err' + @echo >>$@.tmp 'trust-anchor-directory = ${RCYNIC_CONF_TA_DIR}' + @chmod 444 $@.tmp + @mv -f $@.tmp $@ + +install-rcynic: ${RCYNIC_BIN_RCYNIC} + +${RCYNIC_BIN_RCYNIC}: ${BIN} + ${INSTALL} -p -m 555 ${BIN} $@ + +install-static-rsync: ${RCYNIC_DIR}/bin/rsync + +${RCYNIC_DIR}/bin/rsync: static-rsync/rsync + ${INSTALL} -p -m 555 static-rsync/rsync $@ + +install-scripts: \ + ${DESTDIR}${bindir} \ + ${DESTDIR}${bindir}/rcynic-text \ + ${DESTDIR}${bindir}/rcynic-html \ + ${DESTDIR}${bindir}/rcynic-svn \ + ${DESTDIR}${bindir}/rcynic-cron \ + ${DESTDIR}${bindir}/validation_status + +${DESTDIR}${bindir}/rcynic-text: rcynic-text + ${INSTALL} -p -m 555 rcynic-text $@ + +${DESTDIR}${bindir}/rcynic-html: rcynic-html + ${INSTALL} -p -m 555 rcynic-html $@ + +${DESTDIR}${bindir}/rcynic-svn: rcynic-svn + ${INSTALL} -p -m 555 rcynic-svn $@ + +${DESTDIR}${bindir}/rcynic-cron: rcynic-cron + ${INSTALL} -p -m 555 rcynic-cron $@ + +${DESTDIR}${bindir}/validation_status: validation_status + ${INSTALL} -p -m 555 validation_status $@ + +.FORCE: + +install-crontab: .FORCE + @if test "X`/usr/bin/crontab -l -u ${RCYNIC_CRON_USER} 2>/dev/null`" != "X"; \ + then \ + echo "${RCYNIC_CRON_USER} already has a crontab, leaving it alone"; \ + else \ + echo "Setting up ${RCYNIC_CRON_USER}'s crontab to run rcynic-cron script"; \ + ${AWK} -v t=`hexdump -n 2 -e '"%u\n"' /dev/random` '\ + BEGIN {printf "MAILTO=root\n%u * * * *\texec ${bindir}/rcynic-cron\n", t % 60}' | \ + /usr/bin/crontab -u ${RCYNIC_CRON_USER} -; \ + fi + +# Platform-specific rules below here. + +@RCYNIC_MAKE_RULES@ diff --git a/rp/rcynic/README b/rp/rcynic/README new file mode 100644 index 00000000..ecc92ac5 --- /dev/null +++ b/rp/rcynic/README @@ -0,0 +1,13 @@ +$Id$ + +"Cynical rsync" -- fetch and validate RPKI certificates. + +This is the primary RPKI relying party validation tool. + +See: + +- The primary documentation at http://trac.rpki.net/ + +- The PDF manual in ../doc/manual.pdf, or + +- The flat text page ../doc/doc.RPKI.RP.rcynic diff --git a/rp/rcynic/bio_f_linebreak.c b/rp/rcynic/bio_f_linebreak.c new file mode 100644 index 00000000..c2d9fb4d --- /dev/null +++ b/rp/rcynic/bio_f_linebreak.c @@ -0,0 +1,268 @@ +/* $Id$ */ + +/** @file bio_f_linebreak.c + * + * This implements a trivial filter BIO (see OpenSSL manual) which + * does one rather silly thing: on read, it inserts line break into + * the input stream at regular intervals. + * + * You might reasonably ask why anyone would want such a thing. The + * answer is that OpenSSL's Base64 filter BIO has two input modes, + * neither of which is really useful for reading generalized Base64 + * input. In one mode, it requires line breaks at most every 79 + * characters; in the other mode, it requires that there to be no + * whitespace of any kind at all. These modes work for the things + * that OpenSSL itself does with Base64 decoding, but fail miserably + * when used to read free-form Base64 text. + * + * The real solution would be to rewrite OpenSSL's Base64 filter to + * support a third mode in which it accepts generalized Base64 text, + * but that's been suggested before and nothing has been done about + * it, probably because OpenSSL's Base64 implementation is completely + * line-oriented and rather nasty. + * + * So this filter is a stop-gap to let us get the job done. Since it + * uses a (relatively) well-defined OpenSSL internal API, it should be + * reasonably stable. + * + * 98% of the code in this module is derived from "null filter" BIO + * that ships with OpenSSL (BIO_TYPE_NULL_FILTER), so I consider this + * to be a derivative work, thus am leaving it under OpenSSL's license. + */ + +/* Original crypto/bio/bf_null.c code was: + * + * Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com) + * All rights reserved. + * + * This package is an SSL implementation written + * by Eric Young (eay@cryptsoft.com). + * The implementation was written so as to conform with Netscapes SSL. + * + * This library is free for commercial and non-commercial use as long as + * the following conditions are aheared to. The following conditions + * apply to all code found in this distribution, be it the RC4, RSA, + * lhash, DES, etc., code; not just the SSL code. The SSL documentation + * included with this distribution is covered by the same copyright terms + * except that the holder is Tim Hudson (tjh@cryptsoft.com). + * + * Copyright remains Eric Young's, and as such any Copyright notices in + * the code are not to be removed. + * If this package is used in a product, Eric Young should be given attribution + * as the author of the parts of the library used. + * This can be in the form of a textual message at program startup or + * in documentation (online or textual) provided with the package. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * 1. Redistributions of source code must retain the copyright + * notice, this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * 3. All advertising materials mentioning features or use of this software + * must display the following acknowledgement: + * "This product includes cryptographic software written by + * Eric Young (eay@cryptsoft.com)" + * The word 'cryptographic' can be left out if the rouines from the library + * being used are not cryptographic related :-). + * 4. If you include any Windows specific code (or a derivative thereof) from + * the apps directory (application code) you must include an acknowledgement: + * "This product includes software written by Tim Hudson (tjh@cryptsoft.com)" + * + * THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND + * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS + * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT + * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY + * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF + * SUCH DAMAGE. + * + * The licence and distribution terms for any publically available version or + * derivative of this code cannot be changed. i.e. this code cannot simply be + * copied and put under another distribution licence + * [including the GNU Public Licence.] + */ + +#include +#include +#include +#include + +#include "bio_f_linebreak.h" + +#ifndef BIO_TYPE_LINEBREAK_FILTER +#define BIO_TYPE_LINEBREAK_FILTER (99 | BIO_TYPE_FILTER) +#endif + +#ifndef LINEBREAK_MAX_LINE +#define LINEBREAK_MAX_LINE 72 /* We break anything longer than this */ +#endif + +static int linebreak_new(BIO *b) +{ + b->init = 1; + b->ptr = NULL; + b->flags = 0; + b->num = 0; + return 1; +} + +static int linebreak_free(BIO *b) +{ + return b != NULL; +} + +static int linebreak_read(BIO *b, char *out, int outl) +{ + int ret = 0, want, n, i; + + if (out == NULL || b->next_bio == NULL || outl <= 0) + return 0; + + while (outl > 0) { + + if (b->num >= LINEBREAK_MAX_LINE) { + b->num = 0; + *out++ = '\n'; + outl--; + ret++; + continue; + } + + want = LINEBREAK_MAX_LINE - b->num; + if (want > outl) + want = outl; + + n = BIO_read(b->next_bio, out, want); + + BIO_clear_retry_flags(b); + BIO_copy_next_retry(b); + + if (n > 0) { + for (i = n - 1; i >= 0; i--) + if (out[i] == '\n') + break; + if (i >= 0) + b->num = n - i - 1; + else + b->num += n; + out += n; + outl -= n; + ret += n; + continue; + } + + if (ret == 0) + ret = n; + break; + } + + return ret; +} + +static int linebreak_write(BIO *b, const char *in, int inl) +{ + int ret = 0; + + if (in == NULL || inl <= 0 || b->next_bio == NULL) + return 0; + + ret = BIO_write(b->next_bio, in, inl); + + BIO_clear_retry_flags(b); + BIO_copy_next_retry(b); + + return ret; +} + +static long linebreak_ctrl(BIO *b, int cmd, long num, void *ptr) +{ + long ret; + + if (b->next_bio == NULL) + return 0; + + switch (cmd) { + + case BIO_C_DO_STATE_MACHINE: + BIO_clear_retry_flags(b); + ret = BIO_ctrl(b->next_bio, cmd, num, ptr); + BIO_copy_next_retry(b); + return ret; + + case BIO_CTRL_DUP: + return 0; + + default: + return BIO_ctrl(b->next_bio, cmd, num, ptr); + + } +} + +static long linebreak_callback_ctrl(BIO *b, int cmd, bio_info_cb *cb) +{ + if (b->next_bio == NULL) + return 0; + else + return BIO_callback_ctrl(b->next_bio, cmd, cb); +} + +static int linebreak_puts(BIO *b, const char *str) +{ + if (b->next_bio == NULL) + return 0; + else + return BIO_puts(b->next_bio, str); +} + +static BIO_METHOD methods_linebreak = { + BIO_TYPE_LINEBREAK_FILTER, + "Linebreak filter", + linebreak_write, + linebreak_read, + linebreak_puts, + NULL, /* No linebreak_gets() */ + linebreak_ctrl, + linebreak_new, + linebreak_free, + linebreak_callback_ctrl, +}; + +BIO_METHOD *BIO_f_linebreak(void) +{ + return &methods_linebreak; +} + + +#ifdef __BIO_F_LINEBREAK_UNIT_TEST__ + +int main (int argc, char *argv[]) +{ + BIO *ich = BIO_new_fd(0, 1); + BIO *och = BIO_new_fd(1, 1); + BIO *fch = BIO_new(BIO_f_linebreak()); + char buffer[4098]; + int n; + + if (ich == NULL || och == NULL || fch == NULL) + return 1; + + BIO_push(fch, ich); + ich = fch; + fch = NULL; + + while ((n = BIO_read(ich, buffer, sizeof(buffer))) > 0) + BIO_write(och, buffer, n); + + BIO_free_all(ich); + BIO_free_all(och); + return 0; +} + +#endif diff --git a/rp/rcynic/bio_f_linebreak.h b/rp/rcynic/bio_f_linebreak.h new file mode 100644 index 00000000..b5becfa6 --- /dev/null +++ b/rp/rcynic/bio_f_linebreak.h @@ -0,0 +1,10 @@ +/* $Id$ */ + +#ifndef __BIO_F_LINEBREAK__ +#define __BIO_F_LINEBREAK__ + +#include + +BIO_METHOD *BIO_f_linebreak(void); + +#endif /* __BIO_F_LINEBREAK__ */ diff --git a/rp/rcynic/defstack.h b/rp/rcynic/defstack.h new file mode 100644 index 00000000..370c8129 --- /dev/null +++ b/rp/rcynic/defstack.h @@ -0,0 +1,134 @@ +/* + * Automatically generated, do not edit. + * Generator $Id: defstack.py 4878 2012-11-15 22:13:53Z sra $ + */ + +#ifndef __RCYNIC_C__DEFSTACK_H__ +#define __RCYNIC_C__DEFSTACK_H__ + +/* + * Safestack macros for validation_status_t. + */ +#define sk_validation_status_t_new(st) SKM_sk_new(validation_status_t, (st)) +#define sk_validation_status_t_new_null() SKM_sk_new_null(validation_status_t) +#define sk_validation_status_t_free(st) SKM_sk_free(validation_status_t, (st)) +#define sk_validation_status_t_num(st) SKM_sk_num(validation_status_t, (st)) +#define sk_validation_status_t_value(st, i) SKM_sk_value(validation_status_t, (st), (i)) +#define sk_validation_status_t_set(st, i, val) SKM_sk_set(validation_status_t, (st), (i), (val)) +#define sk_validation_status_t_zero(st) SKM_sk_zero(validation_status_t, (st)) +#define sk_validation_status_t_push(st, val) SKM_sk_push(validation_status_t, (st), (val)) +#define sk_validation_status_t_unshift(st, val) SKM_sk_unshift(validation_status_t, (st), (val)) +#define sk_validation_status_t_find(st, val) SKM_sk_find(validation_status_t, (st), (val)) +#define sk_validation_status_t_find_ex(st, val) SKM_sk_find_ex(validation_status_t, (st), (val)) +#define sk_validation_status_t_delete(st, i) SKM_sk_delete(validation_status_t, (st), (i)) +#define sk_validation_status_t_delete_ptr(st, ptr) SKM_sk_delete_ptr(validation_status_t, (st), (ptr)) +#define sk_validation_status_t_insert(st, val, i) SKM_sk_insert(validation_status_t, (st), (val), (i)) +#define sk_validation_status_t_set_cmp_func(st, cmp) SKM_sk_set_cmp_func(validation_status_t, (st), (cmp)) +#define sk_validation_status_t_dup(st) SKM_sk_dup(validation_status_t, st) +#define sk_validation_status_t_pop_free(st, free_func) SKM_sk_pop_free(validation_status_t, (st), (free_func)) +#define sk_validation_status_t_shift(st) SKM_sk_shift(validation_status_t, (st)) +#define sk_validation_status_t_pop(st) SKM_sk_pop(validation_status_t, (st)) +#define sk_validation_status_t_sort(st) SKM_sk_sort(validation_status_t, (st)) +#define sk_validation_status_t_is_sorted(st) SKM_sk_is_sorted(validation_status_t, (st)) + +/* + * Safestack macros for walk_ctx_t. + */ +#define sk_walk_ctx_t_new(st) SKM_sk_new(walk_ctx_t, (st)) +#define sk_walk_ctx_t_new_null() SKM_sk_new_null(walk_ctx_t) +#define sk_walk_ctx_t_free(st) SKM_sk_free(walk_ctx_t, (st)) +#define sk_walk_ctx_t_num(st) SKM_sk_num(walk_ctx_t, (st)) +#define sk_walk_ctx_t_value(st, i) SKM_sk_value(walk_ctx_t, (st), (i)) +#define sk_walk_ctx_t_set(st, i, val) SKM_sk_set(walk_ctx_t, (st), (i), (val)) +#define sk_walk_ctx_t_zero(st) SKM_sk_zero(walk_ctx_t, (st)) +#define sk_walk_ctx_t_push(st, val) SKM_sk_push(walk_ctx_t, (st), (val)) +#define sk_walk_ctx_t_unshift(st, val) SKM_sk_unshift(walk_ctx_t, (st), (val)) +#define sk_walk_ctx_t_find(st, val) SKM_sk_find(walk_ctx_t, (st), (val)) +#define sk_walk_ctx_t_find_ex(st, val) SKM_sk_find_ex(walk_ctx_t, (st), (val)) +#define sk_walk_ctx_t_delete(st, i) SKM_sk_delete(walk_ctx_t, (st), (i)) +#define sk_walk_ctx_t_delete_ptr(st, ptr) SKM_sk_delete_ptr(walk_ctx_t, (st), (ptr)) +#define sk_walk_ctx_t_insert(st, val, i) SKM_sk_insert(walk_ctx_t, (st), (val), (i)) +#define sk_walk_ctx_t_set_cmp_func(st, cmp) SKM_sk_set_cmp_func(walk_ctx_t, (st), (cmp)) +#define sk_walk_ctx_t_dup(st) SKM_sk_dup(walk_ctx_t, st) +#define sk_walk_ctx_t_pop_free(st, free_func) SKM_sk_pop_free(walk_ctx_t, (st), (free_func)) +#define sk_walk_ctx_t_shift(st) SKM_sk_shift(walk_ctx_t, (st)) +#define sk_walk_ctx_t_pop(st) SKM_sk_pop(walk_ctx_t, (st)) +#define sk_walk_ctx_t_sort(st) SKM_sk_sort(walk_ctx_t, (st)) +#define sk_walk_ctx_t_is_sorted(st) SKM_sk_is_sorted(walk_ctx_t, (st)) + +/* + * Safestack macros for rsync_ctx_t. + */ +#define sk_rsync_ctx_t_new(st) SKM_sk_new(rsync_ctx_t, (st)) +#define sk_rsync_ctx_t_new_null() SKM_sk_new_null(rsync_ctx_t) +#define sk_rsync_ctx_t_free(st) SKM_sk_free(rsync_ctx_t, (st)) +#define sk_rsync_ctx_t_num(st) SKM_sk_num(rsync_ctx_t, (st)) +#define sk_rsync_ctx_t_value(st, i) SKM_sk_value(rsync_ctx_t, (st), (i)) +#define sk_rsync_ctx_t_set(st, i, val) SKM_sk_set(rsync_ctx_t, (st), (i), (val)) +#define sk_rsync_ctx_t_zero(st) SKM_sk_zero(rsync_ctx_t, (st)) +#define sk_rsync_ctx_t_push(st, val) SKM_sk_push(rsync_ctx_t, (st), (val)) +#define sk_rsync_ctx_t_unshift(st, val) SKM_sk_unshift(rsync_ctx_t, (st), (val)) +#define sk_rsync_ctx_t_find(st, val) SKM_sk_find(rsync_ctx_t, (st), (val)) +#define sk_rsync_ctx_t_find_ex(st, val) SKM_sk_find_ex(rsync_ctx_t, (st), (val)) +#define sk_rsync_ctx_t_delete(st, i) SKM_sk_delete(rsync_ctx_t, (st), (i)) +#define sk_rsync_ctx_t_delete_ptr(st, ptr) SKM_sk_delete_ptr(rsync_ctx_t, (st), (ptr)) +#define sk_rsync_ctx_t_insert(st, val, i) SKM_sk_insert(rsync_ctx_t, (st), (val), (i)) +#define sk_rsync_ctx_t_set_cmp_func(st, cmp) SKM_sk_set_cmp_func(rsync_ctx_t, (st), (cmp)) +#define sk_rsync_ctx_t_dup(st) SKM_sk_dup(rsync_ctx_t, st) +#define sk_rsync_ctx_t_pop_free(st, free_func) SKM_sk_pop_free(rsync_ctx_t, (st), (free_func)) +#define sk_rsync_ctx_t_shift(st) SKM_sk_shift(rsync_ctx_t, (st)) +#define sk_rsync_ctx_t_pop(st) SKM_sk_pop(rsync_ctx_t, (st)) +#define sk_rsync_ctx_t_sort(st) SKM_sk_sort(rsync_ctx_t, (st)) +#define sk_rsync_ctx_t_is_sorted(st) SKM_sk_is_sorted(rsync_ctx_t, (st)) + +/* + * Safestack macros for rsync_history_t. + */ +#define sk_rsync_history_t_new(st) SKM_sk_new(rsync_history_t, (st)) +#define sk_rsync_history_t_new_null() SKM_sk_new_null(rsync_history_t) +#define sk_rsync_history_t_free(st) SKM_sk_free(rsync_history_t, (st)) +#define sk_rsync_history_t_num(st) SKM_sk_num(rsync_history_t, (st)) +#define sk_rsync_history_t_value(st, i) SKM_sk_value(rsync_history_t, (st), (i)) +#define sk_rsync_history_t_set(st, i, val) SKM_sk_set(rsync_history_t, (st), (i), (val)) +#define sk_rsync_history_t_zero(st) SKM_sk_zero(rsync_history_t, (st)) +#define sk_rsync_history_t_push(st, val) SKM_sk_push(rsync_history_t, (st), (val)) +#define sk_rsync_history_t_unshift(st, val) SKM_sk_unshift(rsync_history_t, (st), (val)) +#define sk_rsync_history_t_find(st, val) SKM_sk_find(rsync_history_t, (st), (val)) +#define sk_rsync_history_t_find_ex(st, val) SKM_sk_find_ex(rsync_history_t, (st), (val)) +#define sk_rsync_history_t_delete(st, i) SKM_sk_delete(rsync_history_t, (st), (i)) +#define sk_rsync_history_t_delete_ptr(st, ptr) SKM_sk_delete_ptr(rsync_history_t, (st), (ptr)) +#define sk_rsync_history_t_insert(st, val, i) SKM_sk_insert(rsync_history_t, (st), (val), (i)) +#define sk_rsync_history_t_set_cmp_func(st, cmp) SKM_sk_set_cmp_func(rsync_history_t, (st), (cmp)) +#define sk_rsync_history_t_dup(st) SKM_sk_dup(rsync_history_t, st) +#define sk_rsync_history_t_pop_free(st, free_func) SKM_sk_pop_free(rsync_history_t, (st), (free_func)) +#define sk_rsync_history_t_shift(st) SKM_sk_shift(rsync_history_t, (st)) +#define sk_rsync_history_t_pop(st) SKM_sk_pop(rsync_history_t, (st)) +#define sk_rsync_history_t_sort(st) SKM_sk_sort(rsync_history_t, (st)) +#define sk_rsync_history_t_is_sorted(st) SKM_sk_is_sorted(rsync_history_t, (st)) + +/* + * Safestack macros for task_t. + */ +#define sk_task_t_new(st) SKM_sk_new(task_t, (st)) +#define sk_task_t_new_null() SKM_sk_new_null(task_t) +#define sk_task_t_free(st) SKM_sk_free(task_t, (st)) +#define sk_task_t_num(st) SKM_sk_num(task_t, (st)) +#define sk_task_t_value(st, i) SKM_sk_value(task_t, (st), (i)) +#define sk_task_t_set(st, i, val) SKM_sk_set(task_t, (st), (i), (val)) +#define sk_task_t_zero(st) SKM_sk_zero(task_t, (st)) +#define sk_task_t_push(st, val) SKM_sk_push(task_t, (st), (val)) +#define sk_task_t_unshift(st, val) SKM_sk_unshift(task_t, (st), (val)) +#define sk_task_t_find(st, val) SKM_sk_find(task_t, (st), (val)) +#define sk_task_t_find_ex(st, val) SKM_sk_find_ex(task_t, (st), (val)) +#define sk_task_t_delete(st, i) SKM_sk_delete(task_t, (st), (i)) +#define sk_task_t_delete_ptr(st, ptr) SKM_sk_delete_ptr(task_t, (st), (ptr)) +#define sk_task_t_insert(st, val, i) SKM_sk_insert(task_t, (st), (val), (i)) +#define sk_task_t_set_cmp_func(st, cmp) SKM_sk_set_cmp_func(task_t, (st), (cmp)) +#define sk_task_t_dup(st) SKM_sk_dup(task_t, st) +#define sk_task_t_pop_free(st, free_func) SKM_sk_pop_free(task_t, (st), (free_func)) +#define sk_task_t_shift(st) SKM_sk_shift(task_t, (st)) +#define sk_task_t_pop(st) SKM_sk_pop(task_t, (st)) +#define sk_task_t_sort(st) SKM_sk_sort(task_t, (st)) +#define sk_task_t_is_sorted(st) SKM_sk_is_sorted(task_t, (st)) + +#endif /* __RCYNIC_C__DEFSTACK_H__ */ diff --git a/rp/rcynic/make-tal.sh b/rp/rcynic/make-tal.sh new file mode 100755 index 00000000..854a76b9 --- /dev/null +++ b/rp/rcynic/make-tal.sh @@ -0,0 +1,42 @@ +#!/bin/sh - +# $Id$ +# +# Copyright (C) 2010 Internet Systems Consortium, Inc. ("ISC") +# +# Permission to use, copy, modify, and/or distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +# Generate a trust anchor locator (TAL) given the rsync URI for a +# self-signed RFC 3779 certificate. +# +# Usage: make-tal.sh uri [local_copy_of_certificate] +# +# The optional second parameter is the name of a local copy of the +# certificate to be checked against the copy retrieved from the URI; +# if present, this should be a local X.509 file in DER format. + +case "$1" in rsync://*) :;; *) echo 1>&2 "\"$1\" is not a rsync URI"; exit 1;; esac + +tmpfile="make-tal.tmp.$$" +trap "rm -f $tmpfile" 0 1 2 15 + +rsync "$1" "$tmpfile" || exit + +if test -n "$2" +then + diff -q "$tmpfile" "$2" || exit +fi + +echo "$1" +echo +openssl x509 -inform DER -in "$tmpfile" -pubkey -noout | +awk '!/-----(BEGIN|END)/' diff --git a/rp/rcynic/rc-scripts/darwin/RCynic b/rp/rcynic/rc-scripts/darwin/RCynic new file mode 100755 index 00000000..d486a3c3 --- /dev/null +++ b/rp/rcynic/rc-scripts/darwin/RCynic @@ -0,0 +1,42 @@ +#!/bin/sh - +# +# $Id$ +# +. /etc/rc.common + +name="rcynic" +start_cmd="rcynic_start" +stop_cmd="rcynic_stop" + +: ${rcynic_dir="/var/rcynic"} + +StartService() +{ + /sbin/umount "${rcynic_dir}/dev" 2>/dev/null + + if ! /sbin/mount_devfs devfs "${rcynic_dir}/dev"; then + echo "Mounting devfs on ${rcynic_dir}/dev failed..." + exit 1 + fi + + for i in /etc/localtime /etc/resolv.conf; do + j="${rcynic_dir}${i}" + if /bin/test -r "$i" && ! /usr/bin/cmp -s "$i" "$j"; then + /usr/bin/install -m 444 -o root -g wheel -p "$i" "$j" + fi + done + + /bin/ln -f /var/run/mDNSResponder "${rcynic_dir}/var/run/mDNSResponder" +} + +StopService() +{ + /sbin/umount "${rcynic_dir}/dev" 2>/dev/null +} + +RestartService() +{ + StartService +} + +RunService "$1" diff --git a/rp/rcynic/rc-scripts/darwin/StartupParameters.plist b/rp/rcynic/rc-scripts/darwin/StartupParameters.plist new file mode 100644 index 00000000..ca46b676 --- /dev/null +++ b/rp/rcynic/rc-scripts/darwin/StartupParameters.plist @@ -0,0 +1,19 @@ + + + + + Description + RCynic Setup + OrderPreference + None + Provides + + RCynic + + Uses + + Network + Resolver + + + diff --git a/rp/rcynic/rc-scripts/freebsd/rc.d.rcynic b/rp/rcynic/rc-scripts/freebsd/rc.d.rcynic new file mode 100755 index 00000000..9b7aa545 --- /dev/null +++ b/rp/rcynic/rc-scripts/freebsd/rc.d.rcynic @@ -0,0 +1,44 @@ +#!/bin/sh - +# +# $Id$ +# +# PROVIDE: rcynic +# REQUIRE: DAEMON +# KEYWORD: nojail + +. /etc/rc.subr + +name="rcynic" +start_cmd="rcynic_start" +stop_cmd="rcynic_stop" + +: ${rcynic_dir="/var/rcynic"} + +rcynic_start() +{ + /sbin/umount "${rcynic_dir}/dev" 2>/dev/null + + if ! /sbin/mount -t devfs dev "${rcynic_dir}/dev"; then + echo "Mounting devfs on ${rcynic_dir}/dev failed..." + exit 1 + fi + + /sbin/devfs -m "${rcynic_dir}/dev" rule apply hide + /sbin/devfs -m "${rcynic_dir}/dev" rule apply path null unhide + /sbin/devfs -m "${rcynic_dir}/dev" rule apply path random unhide + + for i in /etc/localtime /etc/resolv.conf; do + j="${rcynic_dir}${i}" + if /bin/test -r "$i" && ! /usr/bin/cmp -s "$i" "$j"; then + /usr/bin/install -m 444 -o root -g wheel -p "$i" "$j" + fi + done +} + +rcynic_stop() +{ + /sbin/umount "${rcynic_dir}/dev" 2>/dev/null +} + +load_rc_config $name +run_rc_command "$1" diff --git a/rp/rcynic/rcynic-cron b/rp/rcynic/rcynic-cron new file mode 100755 index 00000000..fbe1ebeb --- /dev/null +++ b/rp/rcynic/rcynic-cron @@ -0,0 +1,106 @@ +# $Id$ +# +# Copyright (C) 2014 Dragon Research Labs ("DRL") +# Portions copyright (C) 2013 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notices and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND DRL AND ISC DISCLAIM ALL +# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL OR +# ISC BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL +# DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA +# OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +# TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +Cron job for rcynic and rtr-origin in stock configuration. + +Locking code here works like FreeBSD's lockf(1) utility given -k and +-t 0 options, which is both the sanest and simplest combination for +our purposes. In theory this is portable to any Unix-like system. +""" + +import os +import sys +import pwd +import fcntl +import errno +import argparse + +def run(*cmd, **kwargs): + chroot_this = kwargs.pop("chroot_this", False) + cwd = kwargs.pop("cwd", None) + pid = os.fork() + if pid == 0: + if chroot_this: + os.chdir(ac_rcynic_dir) + elif cwd is not None: + os.chdir(cwd) + if we_are_root: + os.initgroups(pw.pw_name, pw.pw_gid) + if chroot_this: + os.chroot(ac_rcynic_dir) + if we_are_root: + os.setgid(pw.pw_gid) + os.setuid(pw.pw_uid) + os.closerange(3, os.sysconf("SC_OPEN_MAX")) + os.execvp(cmd[0], cmd) + os._exit(1) + else: + status = os.waitpid(pid, 0)[1] + if status == 0: + return + elif os.WIFSIGNALED(status): + sys.exit("Process %s exited with signal %s" % (" ".join(cmd), os.WTERMSIG(status))) + elif os.WIFEXITED(status): + sys.exit("Program %s exited with status %s" % (" ".join(cmd), os.WEXITSTATUS(status))) + else: + sys.exit("Program %s exited for unknown reason %s" % (" ".join(cmd), status)) + +parser = argparse.ArgumentParser(description = __doc__) +parser.add_argument("--chroot", action = "store_true", help = "run chrooted") +args = parser.parse_args() + +we_are_root = os.getuid() == 0 + +if args.chroot and not we_are_root: + sys.exit("Only root can --chroot") + +try: + pw = pwd.getpwnam(ac_rcynic_user) +except KeyError: + sys.exit("Could not find passwd entry for user %s" % ac_rcynic_user) + +try: + lock = os.open(os.path.join(ac_rcynic_dir, "data/lock"), os.O_RDONLY | os.O_CREAT | os.O_NONBLOCK, 0666) + fcntl.flock(lock, fcntl.LOCK_EX | fcntl.LOCK_NB) + if we_are_root: + os.fchown(lock, pw.pw_uid, pw.pw_gid) +except (IOError, OSError), e: + if e.errno == errno.EAGAIN: + sys.exit(0) # Another instance of this script is already running, exit silently + else: + sys.exit("Error %r opening lock %r" % (e.strerror, os.path.join(ac_rcynic_dir, "data/lock"))) + +if args.chroot: + run("/bin/rcynic", "-c", "/etc/rcynic.conf", chroot_this = True) +else: + run(os.path.join(ac_bindir, "rcynic"), "-c", os.path.join(ac_sysconfdir, "rcynic.conf")) + +run(os.path.join(ac_bindir, "rtr-origin"), + "--cronjob", + os.path.join(ac_rcynic_dir, "data/authenticated"), + cwd = os.path.join(ac_rcynic_dir, "rpki-rtr")) + +prog = os.path.join(ac_libexecdir, "rpkigui-rcynic") +if os.path.exists(prog): + run(prog) + +if ac_rcynic_html_dir and os.path.exists(os.path.dirname(ac_rcynic_html_dir)): + run(os.path.join(ac_bindir, "rcynic-html"), + os.path.join(ac_rcynic_dir, "data/rcynic.xml"), + ac_rcynic_html_dir) diff --git a/rp/rcynic/rcynic-html b/rp/rcynic/rcynic-html new file mode 100755 index 00000000..58e65dde --- /dev/null +++ b/rp/rcynic/rcynic-html @@ -0,0 +1,658 @@ +# $Id$ +# +# Copyright (C) 2013--2014 Dragon Research Labs ("DRL") +# Portions copyright (C) 2009--2012 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notices and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND DRL AND ISC DISCLAIM ALL +# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL OR +# ISC BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL +# DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA +# OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +# TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +Render rcynic's XML output to basic (X)HTML with some rrdtool graphics. +""" + +import sys +import urlparse +import os +import argparse +import time +import subprocess +import copy + +try: + from lxml.etree import (ElementTree, Element, SubElement, Comment) +except ImportError: + from xml.etree.ElementTree import (ElementTree, Element, SubElement, Comment) + +session = None +args = None + +def parse_options(): + + global args + + try: + default_rrdtool_binary = ac_rrdtool_binary + except NameError: + default_rrdtool_binary = "rrdtool" + + parser = argparse.ArgumentParser(description = __doc__) + parser.add_argument("--refresh", type = int, default = 1800, + help = "refresh interval for generated HTML") + parser.add_argument("--hide-problems", action = "store_true", + help = "don't generate \"problems\" page") + parser.add_argument("--hide-graphs", action = "store_true", + help = "don't generate graphs") + parser.add_argument("--hide-object-counts", action = "store_true", + help = "don't display object counts") + parser.add_argument("--dont-update-rrds", action = "store_true", + help = "don't add new data to RRD databases") + parser.add_argument("--png-height", type = int, default = 190, + help = "height of PNG images") + parser.add_argument("--png-width", type = int, default = 1350, + help = "width of PNG images") + parser.add_argument("--svg-height", type = int, default = 600, + help = "height of SVG images") + parser.add_argument("--svg-width", type = int, default = 1200, + help = "width of SVG images") + parser.add_argument("--eps-height", type = int, default = 0, + help = "height of EPS images") + parser.add_argument("--eps-width", type = int, default = 0, + help = "width of EPS images") + parser.add_argument("--rrdtool-binary", default = default_rrdtool_binary, + help = "location of rrdtool binary") + parser.add_argument("input_file", type = argparse.FileType("r"), + help = "XML input file") + parser.add_argument("output_directory", + help = "output directory") + args = parser.parse_args() + + +def parse_utc(s): + return int(time.mktime(time.strptime(s, "%Y-%m-%dT%H:%M:%SZ"))) + +class Label(object): + + moods = ["bad", "warn", "good"] + + def __init__(self, elt): + self.code = elt.tag + self.mood = elt.get("kind") + self.text = elt.text.strip() + self.count = 0 + + def get_count(self): + return self.count + + @property + def sort_key(self): + try: + return self.moods.index(self.mood) + except ValueError: + return len(self.moods) + +class Validation_Status(object): + + def __init__(self, elt, label_map): + self.uri = elt.text.strip() + self.timestamp = elt.get("timestamp") + self.generation = elt.get("generation") + self.hostname = urlparse.urlparse(self.uri).hostname or "[None]" + self.fn2 = os.path.splitext(self.uri)[1] or None if self.generation else None + self.label = label_map[elt.get("status")] + + def sort_key(self): + return (self.label.sort_key, self.timestamp, self.hostname, self.fn2, self.generation) + + @property + def code(self): + return self.label.code + + @property + def mood(self): + return self.label.mood + + @property + def accepted(self): + return self.label.code == "object_accepted" + + @property + def rejected(self): + return self.label.code == "object_rejected" + + @property + def is_current(self): + return self.generation == "current" + + @property + def is_backup(self): + return self.generation == "backup" + + @property + def is_problem(self): + return self.label.mood != "good" + + @property + def is_connection_problem(self): + return self.label.mood != "good" and self.label.code.startswith("rsync_transfer_") + + @property + def is_object_problem(self): + return self.label.mood != "good" and not self.label.code.startswith("rsync_transfer_") + + @property + def is_connection_detail(self): + return self.label.code.startswith("rsync_transfer_") + + @property + def is_object_detail(self): + return not self.label.code.startswith("rsync_transfer_") + +class Problem_Mixin(object): + + @property + def connection_problems(self): + result = [v for v in self.validation_status if v.is_connection_problem] + result.sort(key = Validation_Status.sort_key) + return result + + @property + def object_problems(self): + result = [v for v in self.validation_status if v.is_object_problem] + result.sort(key = Validation_Status.sort_key) + return result + +class Host(Problem_Mixin): + + def __init__(self, hostname, timestamp): + self.hostname = hostname + self.timestamp = timestamp + self.elapsed = 0 + self.connections = 0 + self.failures = 0 + self.uris = set() + self.graph = None + self.counters = {} + self.totals = {} + self.validation_status = [] + + def add_connection(self, elt): + self.elapsed += parse_utc(elt.get("finished")) - parse_utc(elt.get("started")) + self.connections += 1 + if elt.get("error") is not None: + self.failures += 1 + + def add_validation_status(self, v): + self.validation_status.append(v) + if v.generation == "current": + self.uris.add(v.uri) + self.counters[(v.fn2, v.generation, v.label)] = self.get_counter(v.fn2, v.generation, v.label) + 1 + self.totals[v.label] = self.get_total(v.label) + 1 + v.label.count += 1 + + def get_counter(self, fn2, generation, label): + return self.counters.get((fn2, generation, label), 0) + + def get_total(self, label): + return self.totals.get(label, 0) + + @property + def failed(self): + return 1 if self.failures > 0 else 0 + + @property + def objects(self): + return len(self.uris) + + field_table = (("connections", "GAUGE"), + ("objects", "GAUGE"), + ("elapsed", "GAUGE"), + ("failed", "ABSOLUTE")) + + rras = tuple("RRA:AVERAGE:0.5:%s:9600" % steps + for steps in (1, 4, 24)) + + @classmethod + def field_ds_specifiers(cls, heartbeat = 24 * 60 * 60, minimum = 0, maximum = "U"): + return ["DS:%s:%s:%s:%s:%s" % (field[0], field[1], heartbeat, minimum, maximum) + for field in cls.field_table] + + @property + def field_values(self): + return tuple(str(getattr(self, field[0])) for field in self.field_table) + + @classmethod + def field_defs(cls, filebase): + return ["DEF:%s=%s.rrd:%s:AVERAGE" % (field[0], filebase, field[0]) + for field in cls.field_table] + + graph_opts = ( + "--vertical-label", "Sync time (seconds)", + "--right-axis-label", "Objects (count)", + "--lower-limit", "0", + "--right-axis", "1:0", + "--full-size-mode" ) + + graph_cmds = ( + + # Split elapsed into separate data sets, so we can color + # differently to indicate how succesful transfer was. Intent is + # that exactly one of these be defined for every value in elapsed. + + "CDEF:success=failed,UNKN,elapsed,IF", + "CDEF:failure=connections,1,EQ,failed,*,elapsed,UNKN,IF", + "CDEF:partial=connections,1,NE,failed,*,elapsed,UNKN,IF", + + # Show connection timing first, as color-coded semi-transparent + # areas with opaque borders. Intent is to make the colors stand + # out, since they're a major health indicator. Transparency is + # handled via an alpha channel (fourth octet of color code). We + # draw this stuff first so that later lines can overwrite it. + + "AREA:success#00FF0080:Sync time (success)", + "AREA:partial#FFA50080:Sync time (partial failure)", + "AREA:failure#FF000080:Sync time (total failure)", + + "LINE1:success#00FF00", # Green + "LINE1:partial#FFA500", # Orange + "LINE1:failure#FF0000", # Red + + # Now show object counts, as a simple black line. + + "LINE1:objects#000000:Objects", # Black + + # Add averages over period to chart legend. + + "VDEF:avg_elapsed=elapsed,AVERAGE", + "VDEF:avg_connections=connections,AVERAGE", + "VDEF:avg_objects=objects,AVERAGE", + "COMMENT:\j", + "GPRINT:avg_elapsed:Average sync time (seconds)\: %5.2lf", + "GPRINT:avg_connections:Average connection count\: %5.2lf", + "GPRINT:avg_objects:Average object count\: %5.2lf" ) + + graph_periods = (("week", "-1w"), + ("month", "-31d"), + ("year", "-1y")) + + def rrd_run(self, cmd): + try: + cmd = [str(i) for i in cmd] + cmd.insert(0, args.rrdtool_binary) + subprocess.check_call(cmd, stdout = open("/dev/null", "w")) + except OSError, e: + sys.exit("Problem running %s, perhaps you need to set --rrdtool-binary? (%s)" % (args.rrdtool_binary, e)) + except subprocess.CalledProcessError, e: + sys.exit("Failure running %s: %s" % (args.rrdtool_binary, e)) + + def rrd_update(self): + filename = os.path.join(args.output_directory, self.hostname) + ".rrd" + if not os.path.exists(filename): + cmd = ["create", filename, "--start", self.timestamp - 1, "--step", "3600"] + cmd.extend(self.field_ds_specifiers()) + cmd.extend(self.rras) + self.rrd_run(cmd) + self.rrd_run(["update", filename, + "%s:%s" % (self.timestamp, ":".join(str(v) for v in self.field_values))]) + + def rrd_graph(self, html): + filebase = os.path.join(args.output_directory, self.hostname) + formats = [format for format in ("png", "svg", "eps") + if getattr(args, format + "_width") and getattr(args, format + "_height")] + for period, start in self.graph_periods: + for format in formats: + cmds = [ "graph", "%s_%s.%s" % (filebase, period, format), + "--title", "%s last %s" % (self.hostname, period), + "--start", start, + "--width", getattr(args, format + "_width"), + "--height", getattr(args, format + "_height"), + "--imgformat", format.upper() ] + cmds.extend(self.graph_opts) + cmds.extend(self.field_defs(filebase)) + cmds.extend(self.graph_cmds) + self.rrd_run(cmds) + img = Element("img", src = "%s_%s.png" % (self.hostname, period), + width = str(args.png_width), + height = str(args.png_height)) + if self.graph is None: + self.graph = copy.copy(img) + html.BodyElement("h2").text = "%s over last %s" % (self.hostname, period) + html.BodyElement("a", href = "%s_%s_svg.html" % (self.hostname, period)).append(img) + html.BodyElement("br") + svg_html = HTML("%s over last %s" % (self.hostname, period), + "%s_%s_svg" % (self.hostname, period)) + svg_html.BodyElement("img", src = "%s_%s.svg" % (self.hostname, period)) + svg_html.close() + + +class Session(Problem_Mixin): + + def __init__(self): + self.hosts = {} + + self.root = ElementTree(file = args.input_file).getroot() + + self.rcynic_version = self.root.get("rcynic-version") + self.rcynic_date = self.root.get("date") + self.timestamp = parse_utc(self.rcynic_date) + + self.labels = [Label(elt) for elt in self.root.find("labels")] + self.load_validation_status() + + for elt in self.root.findall("rsync_history"): + self.get_host(urlparse.urlparse(elt.text.strip()).hostname).add_connection(elt) + + generations = set() + fn2s = set() + + for v in self.validation_status: + self.get_host(v.hostname).add_validation_status(v) + generations.add(v.generation) + fn2s.add(v.fn2) + + self.labels = [l for l in self.labels if l.count > 0] + + self.hostnames = sorted(self.hosts) + self.generations = sorted(generations) + self.fn2s = sorted(fn2s) + + def load_validation_status(self): + label_map = dict((label.code, label) for label in self.labels) + full_validation_status = [Validation_Status(elt, label_map) + for elt in self.root.findall("validation_status")] + accepted_current = set(v.uri for v in full_validation_status + if v.is_current and v.accepted) + self.validation_status = [v for v in full_validation_status + if not v.is_backup + or v.uri not in accepted_current] + + def get_host(self, hostname): + if hostname not in self.hosts: + self.hosts[hostname] = Host(hostname, self.timestamp) + return self.hosts[hostname] + + def get_sum(self, fn2, generation, label): + return sum(h.get_counter(fn2, generation, label) + for h in self.hosts.itervalues()) + + def rrd_update(self): + if not args.dont_update_rrds: + for h in self.hosts.itervalues(): + h.rrd_update() + +css = ''' + th, td { + text-align: center; padding: 4px; + } + + td.uri { + text-align: left; + } + + thead tr th, tfoot tr td { + font-weight: bold; + } + + .good { + background-color: #77ff77; + } + + .warn { + background-color: yellow; + } + + .bad { + background-color: #ff5500; + } + + body { + font-family: arial, helvetica, serif; + } + + /* Make background-color inherit like color does. */ + #nav { + background-color: inherit; + } + + #nav, #nav ul { + float: left; + width: 100%; + list-style: none; + line-height: 1; + font-weight: normal; + padding: 0; + border-color: black; + border-style: solid; + border-width: 1px 0; + margin: 0 0 1em 0; + } + + #nav a, #nav span { + display: block; + background-color: white; + color: black; + text-decoration: none; + padding: 0.25em 0.75em; + } + + #nav li { + float: left; + padding: 0; + } + + /* Use
      to set submenu width. */ + #nav li ul { + position: absolute; + display: none; + height: auto; + border-width: 1px; + margin: 0; + } + + #nav li li { + width: 100%; + } + + /* Display submenu when hovering. */ + #nav li:hover ul { + display: block; + } + + /* Reverse video when hovering. */ + #nav a:hover, #nav span:hover { + color: white; + background-color: black; + } +''' + +class HTML(object): + + def __init__(self, title, filebase): + + self.filename = os.path.join(args.output_directory, filebase + ".html") + + self.html = Element("html") + self.html.append(Comment(" Generators:\n" + + " " + session.rcynic_version + "\n" + + " $Id$\n")) + self.head = SubElement(self.html, "head") + self.body = SubElement(self.html, "body") + + title += " " + session.rcynic_date + SubElement(self.head, "title").text = title + SubElement(self.body, "h1").text = title + SubElement(self.head, "style", type = "text/css").text = css + + if args.refresh: + SubElement(self.head, "meta", { "http-equiv" : "Refresh", "content" : str(args.refresh) }) + + hostwidth = max(len(hostname) for hostname in session.hostnames) + + toc = SubElement(self.body, "ul", id = "nav") + SubElement(SubElement(toc, "li"), "a", href = "index.html").text = "Overview" + li = SubElement(toc, "li") + SubElement(li, "span").text = "Repositories" + ul = SubElement(li, "ul", style = "width: %sem" % hostwidth) + for hostname in session.hostnames: + SubElement(SubElement(ul, "li"), "a", href = "%s.html" % hostname).text = hostname + SubElement(SubElement(toc, "li"), "a", href = "problems.html").text = "Problems" + li = SubElement(toc, "li") + SubElement(li, "span").text = "All Details" + ul = SubElement(li, "ul", style = "width: 15em") + SubElement(SubElement(ul, "li"), "a", href = "connections.html").text = "All Connections" + SubElement(SubElement(ul, "li"), "a", href = "objects.html").text = "All Objects" + SubElement(self.body, "br") + + def close(self): + ElementTree(element = self.html).write(self.filename) + + def BodyElement(self, tag, **attrib): + return SubElement(self.body, tag, **attrib) + + def counter_table(self, data_func, total_func): + table = self.BodyElement("table", rules = "all", border = "1") + thead = SubElement(table, "thead") + tfoot = SubElement(table, "tfoot") + tbody = SubElement(table, "tbody") + tr = SubElement(thead, "tr") + SubElement(tr, "th") + for label in session.labels: + SubElement(tr, "th").text = label.text + for fn2 in session.fn2s: + for generation in session.generations: + counters = [data_func(fn2, generation, label) for label in session.labels] + if sum(counters) > 0: + tr = SubElement(tbody, "tr") + SubElement(tr, "td").text = ((generation or "") + " " + (fn2 or "")).strip() + for label, count in zip(session.labels, counters): + td = SubElement(tr, "td") + if count > 0: + td.set("class", label.mood) + td.text = str(count) + tr = SubElement(tfoot, "tr") + SubElement(tr, "td").text = "Total" + counters = [total_func(label) for label in session.labels] + for label, count in zip(session.labels, counters): + td = SubElement(tr, "td") + if count > 0: + td.set("class", label.mood) + td.text = str(count) + return table + + def object_count_table(self, session): + table = self.BodyElement("table", rules = "all", border = "1") + thead = SubElement(table, "thead") + tbody = SubElement(table, "tbody") + tfoot = SubElement(table, "tfoot") + fn2s = [fn2 for fn2 in session.fn2s if fn2 is not None] + total = dict((fn2, 0) for fn2 in fn2s) + for hostname in session.hostnames: + tr = SubElement(tbody, "tr") + SubElement(tr, "td").text = hostname + for fn2 in fn2s: + td = SubElement(tr, "td") + count = sum(uri.endswith(fn2) for uri in session.hosts[hostname].uris) + total[fn2] += count + if count > 0: + td.text = str(count) + trhead = SubElement(thead, "tr") + trfoot = SubElement(tfoot, "tr") + SubElement(trhead, "th").text = "Repository" + SubElement(trfoot, "td").text = "Total" + for fn2 in fn2s: + SubElement(trhead, "th").text = fn2 + SubElement(trfoot, "td").text = str(total[fn2]) + return table + + def detail_table(self, records): + if records: + table = self.BodyElement("table", rules = "all", border = "1") + thead = SubElement(table, "thead") + tbody = SubElement(table, "tbody") + tr = SubElement(thead, "tr") + SubElement(tr, "th").text = "Timestamp" + SubElement(tr, "th").text = "Generation" + SubElement(tr, "th").text = "Status" + SubElement(tr, "th").text = "URI" + for v in records: + tr = SubElement(tbody, "tr", { "class" : v.mood }) + SubElement(tr, "td").text = v.timestamp + SubElement(tr, "td").text = v.generation + SubElement(tr, "td").text = v.label.text + SubElement(tr, "td", { "class" : "uri"}).text = v.uri + return table + else: + self.BodyElement("p").text = "None found" + return None + +def main(): + + global session + + os.putenv("TZ", "UTC") + time.tzset() + + parse_options() + + session = Session() + session.rrd_update() + + for hostname in session.hostnames: + html = HTML("Repository details for %s" % hostname, hostname) + html.counter_table(session.hosts[hostname].get_counter, session.hosts[hostname].get_total) + if not args.hide_graphs: + session.hosts[hostname].rrd_graph(html) + if not args.hide_problems: + html.BodyElement("h2").text = "Connection Problems" + html.detail_table(session.hosts[hostname].connection_problems) + html.BodyElement("h2").text = "Object Problems" + html.detail_table(session.hosts[hostname].object_problems) + html.close() + + html = HTML("rcynic summary", "index") + html.BodyElement("h2").text = "Grand totals for all repositories" + html.counter_table(session.get_sum, Label.get_count) + if not args.hide_object_counts: + html.BodyElement("br") + html.BodyElement("hr") + html.BodyElement("br") + html.BodyElement("h2").text = "Current total object counts (distinct URIs)" + html.object_count_table(session) + for hostname in session.hostnames: + html.BodyElement("br") + html.BodyElement("hr") + html.BodyElement("br") + html.BodyElement("h2").text = "Overview for repository %s" % hostname + html.counter_table(session.hosts[hostname].get_counter, session.hosts[hostname].get_total) + if not args.hide_graphs: + html.BodyElement("br") + html.BodyElement("a", href = "%s.html" % hostname).append(session.hosts[hostname].graph) + html.close() + + html = HTML("Problems", "problems") + html.BodyElement("h2").text = "Connection Problems" + html.detail_table(session.connection_problems) + html.BodyElement("h2").text = "Object Problems" + html.detail_table(session.object_problems) + html.close() + + html = HTML("All connections", "connections") + html.detail_table([v for v in session.validation_status if v.is_connection_detail]) + html.close() + + html = HTML("All objects", "objects") + html.detail_table([v for v in session.validation_status if v.is_object_detail]) + html.close() + + +if __name__ == "__main__": + main() diff --git a/rp/rcynic/rcynic-svn b/rp/rcynic/rcynic-svn new file mode 100755 index 00000000..fd0df500 --- /dev/null +++ b/rp/rcynic/rcynic-svn @@ -0,0 +1,190 @@ +# $Id$ +# +# Copyright (C) 2012 Internet Systems Consortium, Inc. ("ISC") +# +# Permission to use, copy, modify, and/or distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +Archive rcynic output in a Subversion repository. +""" + +import subprocess +import argparse +import datetime +import fcntl +import glob +import os + +try: + from lxml.etree import ElementTree +except ImportError: + from xml.etree.ElementTree import ElementTree + + +mime_types = ( + ("html", "application/xhtml+xml"), + ("cer", "application/pkix-cert"), + ("crl", "application/pkix-crl"), + ("mft", "application/rpki-manifest"), + ("mnf", "application/rpki-manifest"), + ("roa", "application/rpki-roa"), + ("gbr", "application/rpki-ghostbusters")) + + +def run(*cmd, **kwargs): + """ + Run a program, displaying timing data when appropriate. + """ + + t = datetime.datetime.utcnow() + subprocess.check_call(cmd, **kwargs) + if args.show_timing: + now = datetime.datetime.utcnow() + print now, (now - t), " ".join(cmd) + + +def runxml(*cmd): + """ + + Run a program which produces XML output, displaying timing data when + appropriate and returning an ElementTree constructed from the + program's output. + """ + t = datetime.datetime.utcnow() + p = subprocess.Popen(cmd, stdout = subprocess.PIPE) + x = ElementTree(file = p.stdout) + s = p.wait() + if s: + raise subprocess.CalledProcessError(s, cmd[0]) + if args.show_timing: + now = datetime.datetime.utcnow() + print now, (now - t), " ".join(cmd) + return x + + +# Main program. + +parser = argparse.ArgumentParser(description = __doc__) + +parser.add_argument("--show_timing", action = "store_true", help = \ + """ + Show timing data on programs we run. + """) + +parser.add_argument("--verbatim", action = "store_true", help = \ + """ + Whether to archive rcynic's data output exactly as + rcynic writes it or map it into a directory + structure which makes more sense when used with + Subversion. True means archive exactly as rcynic + writes it, interpreting file and directory names + as rsync would, transient directories and all. + False means map the current authenticated/ tree in + rcynic's output to a stable authenticated/ subtree + in the subversion repository, with file and + directory names from the command line shorted to + their last component. + """) + +parser.add_argument("--lockfile", default = "rcynic-svn.lock", help = \ + """ + Lock file to to prevent multiple copies of this + program (eg, running under cron) from stepping on + each other while modifying the working directory. + """) + +parser.add_argument("files_to_archive", nargs = "*", help = \ + """ + Files to archive using Subversion. If omitted, we + assume that some other process has already + modified the Subversion working directory. + """) + +parser.add_argument("working_directory", help = \ + """ + Subversion working directory to use (must already + exist). + """) + +args = parser.parse_args() + +if args.show_timing: + t0 = datetime.datetime.utcnow() + print t0, "Starting" + +# Lock out other instances of this program. We may want some more +# sophsiticated approach when combining this with other programs, but +# this should minimize the risk of multiple copies of this program +# trying to modify the same subversion working directory at the same +# time and messing each other up. We leave the lock file in place +# because doing so removes a potential race condition. + +lock = os.open("cronjob.lock", os.O_RDONLY | os.O_CREAT | os.O_NONBLOCK, 0666) +fcntl.flock(lock, fcntl.LOCK_EX | fcntl.LOCK_NB) + +# Make sure working tree is up to date. + +run("svn", "update", "--quiet", args.working_directory) + +# Copy rcynic's output as appropriate. + +if args.files_to_archive: + + if args.verbatim: + cmd = ["rsync", "--archive", "--quiet", "--delete"] + cmd.extend(args.files_to_archive) + cmd.append(args.working_directory) + run(*cmd) + + else: + for src in args.files_to_archive: + cmd = ["rsync", "--archive", "--quiet", "--delete", "--copy-links"] + cmd.append(src.rstrip("/")) + cmd.append(args.working_directory.rstrip("/") + "/") + run(*cmd) + +# Ask Subversion to add any new files, trying hard to get the MIME +# types right. + +cmd = ["svn", "add", "--quiet", "--force", "--auto-props"] + +for fn2, mime_type in mime_types: + cmd.append("--config-option") + cmd.append("config:auto-props:*.%s=svn:mime-type=%s" % (fn2, mime_type)) + +cmd.append(".") + +run(*cmd, cwd = args.working_directory) + +# Parse XML version of Subversion's status output to figure out what +# files have been deleted, and tell Subversion that we deleted them +# intentionally. + +missing = sorted(entry.get("path") + for entry in runxml("svn", "status", "--xml", args.working_directory).find("target").findall("entry") + if entry.find("wc-status").get("item") == "missing") +deleted = [] + +for path in missing: + if not any(path.startswith(r) for r in deleted): + run("svn", "delete", "--quiet", path) + deleted.append(path + "/") + +# Commit our changes and update the working tree. + +run("svn", "commit", "--quiet", "--message", "Auto update.", args.working_directory) +run("svn", "update", "--quiet", args.working_directory) + +if args.show_timing: + now = datetime.datetime.utcnow() + print now, now - t0, "total runtime" diff --git a/rp/rcynic/rcynic-text b/rp/rcynic/rcynic-text new file mode 100755 index 00000000..a8e56dac --- /dev/null +++ b/rp/rcynic/rcynic-text @@ -0,0 +1,118 @@ +# $Id$ +# +# Copyright (C) 2012 Internet Systems Consortium, Inc. ("ISC") +# +# Permission to use, copy, modify, and/or distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +Flat text summary of rcynic.xml. +""" + +import sys +import urlparse +import textwrap + +try: + from lxml.etree import ElementTree +except ImportError: + from xml.etree.ElementTree import ElementTree + +class Label(object): + + def __init__(self, elt): + self.tag = elt.tag + self.width = max(len(s) for s in elt.text.split()) + self.lines = textwrap.wrap(elt.text.strip(), width = self.width) + self.counter = 0 + + def line(self, n): + try: + return " " + self.lines[n].center(self.width) + " " + except IndexError: + return " " * (self.width + 2) + + def add(self): + self.counter += 1 + + @property + def total(self): + return " " + str(self.counter).rjust(self.width) + " " + + @property + def visible(self): + return self.counter > 0 + +class Host(object): + + def __init__(self): + self.counters = {} + + def add(self, label): + self.counters[label] = self.counters.get(label, 0) + 1 + label.add() + + def total(self, label): + if label in self.counters: + return " " + str(self.counters[label]).rjust(label.width) + " " + else: + return " " * (label.width + 2) + +class Session(object): + + def __init__(self, labels): + self.hosts = {} + self.labels = labels + self.map = dict((label.tag, label) for label in labels) + + def add(self, elt): + label = self.map[elt.get("status")] + hostname = urlparse.urlparse(elt.text.strip()).hostname + if hostname not in self.hosts: + self.hosts[hostname] = Host() + self.hosts[hostname].add(label) + + def show(self): + visible = [label for label in self.labels if label.visible] + hostnames = sorted(hostname for hostname in self.hosts if hostname is not None) + hostwidth = max(len(hostname) for hostname in hostnames + ["Hostname"]) + separator = "+-%s-+-%s-+" % ( + "-" * hostwidth, + "-+-".join("-" * label.width for label in visible)) + print separator + for i in xrange(max(len(label.lines) for label in visible)): + print "| %s |%s|" % ( + ("Hostname" if i == 0 else "").ljust(hostwidth), + "|".join(label.line(i) for label in visible)) + print separator + for hostname in hostnames: + print "| %s |%s|" % ( + hostname.ljust(hostwidth), + "|".join(self.hosts[hostname].total(label) for label in visible)) + if hostnames: + print separator + print "| %s |%s|" % ( + "Total".ljust(hostwidth), + "|".join(label.total for label in visible)) + print separator + + +def main(): + for filename in ([sys.stdin] if len(sys.argv) < 2 else sys.argv[1:]): + etree = ElementTree(file = filename) + session = Session([Label(elt) for elt in etree.find("labels")]) + for elt in etree.findall("validation_status"): + session.add(elt) + session.show() + +if __name__ == "__main__": + main() diff --git a/rp/rcynic/rcynic.c b/rp/rcynic/rcynic.c new file mode 100644 index 00000000..dea9c48f --- /dev/null +++ b/rp/rcynic/rcynic.c @@ -0,0 +1,6070 @@ +/* + * Copyright (C) 2013--2014 Dragon Research Labs ("DRL") + * Portions copyright (C) 2009--2012 Internet Systems Consortium ("ISC") + * Portions copyright (C) 2006--2008 American Registry for Internet Numbers ("ARIN") + * + * Permission to use, copy, modify, and distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notices and this permission notice appear in all copies. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND ARIN DISCLAIM ALL + * WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED + * WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL, + * ISC, OR ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR + * CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS + * OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, + * NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION + * WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + */ + +/* $Id$ */ + +/** + * @mainpage + * + * "Cynical rsync": Recursively walk RPKI tree using rsync to pull + * data from remote sites, validating certificates and CRLs as we go. + * + * Doxygen doesn't quite know what to make of a one-file C program, + * and ends up putting most of the interesting data @link rcynic.c + * here. @endlink + */ + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#define SYSLOG_NAMES /* defines CODE prioritynames[], facilitynames[] */ +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include + +#include "bio_f_linebreak.h" + +#include "defstack.h" + +#if !defined(FILENAME_MAX) && defined(PATH_MAX) && PATH_MAX > 1024 +#define FILENAME_MAX PATH_MAX +#elif !defined(FILENAME_MAX) +#define FILENAME_MAX 1024 +#endif + +#define SCHEME_RSYNC ("rsync://") +#define SIZEOF_RSYNC (sizeof(SCHEME_RSYNC) - 1) + +/** + * Maximum length of a hostname. + */ +#ifndef HOSTNAME_MAX +#define HOSTNAME_MAX 256 +#endif + +/** + * Maximum length of an URI. + */ +#define URI_MAX (SIZEOF_RSYNC + HOSTNAME_MAX + 1 + FILENAME_MAX) + +/** + * Maximum number of times we try to kill an inferior process before + * giving up. + */ +#define KILL_MAX 10 + +/** + * Version number of XML summary output. + */ +#define XML_SUMMARY_VERSION 1 + +/** + * How much buffer space do we need for a raw address? + */ +#define ADDR_RAW_BUF_LEN 16 + +/** + * How many bytes is a SHA256 digest? + */ +#define HASH_SHA256_LEN 32 + +/** + * Logging levels. Same general idea as syslog(), but our own + * catagories based on what makes sense for this program. Default + * mappings to syslog() priorities are here because it's the easiest + * way to make sure that we assign a syslog level to each of ours. + */ + +#define LOG_LEVELS \ + QQ(log_sys_err, LOG_ERR) /* Error from OS or library */ \ + QQ(log_usage_err, LOG_ERR) /* Bad usage (local error) */ \ + QQ(log_data_err, LOG_NOTICE) /* Bad data, no biscuit */ \ + QQ(log_telemetry, LOG_INFO) /* Normal progress chatter */ \ + QQ(log_verbose, LOG_INFO) /* Extra chatter */ \ + QQ(log_debug, LOG_DEBUG) /* Only useful when debugging */ + +#define QQ(x,y) x , +typedef enum log_level { LOG_LEVELS LOG_LEVEL_T_MAX } log_level_t; +#undef QQ + +#define QQ(x,y) { #x , x }, +static const struct { + const char *name; + log_level_t value; +} log_levels[] = { + LOG_LEVELS +}; +#undef QQ + +/** + * MIB counters derived from OpenSSL. Long list of validation failure + * codes from OpenSSL (crypto/x509/x509_vfy.h). + */ + +#define MIB_COUNTERS_FROM_OPENSSL \ + QV(X509_V_ERR_UNABLE_TO_GET_CRL) \ + QV(X509_V_ERR_UNABLE_TO_DECRYPT_CERT_SIGNATURE) \ + QV(X509_V_ERR_UNABLE_TO_DECRYPT_CRL_SIGNATURE) \ + QV(X509_V_ERR_UNABLE_TO_DECODE_ISSUER_PUBLIC_KEY) \ + QV(X509_V_ERR_CERT_SIGNATURE_FAILURE) \ + QV(X509_V_ERR_CRL_SIGNATURE_FAILURE) \ + QV(X509_V_ERR_CERT_NOT_YET_VALID) \ + QV(X509_V_ERR_CERT_HAS_EXPIRED) \ + QV(X509_V_ERR_CRL_NOT_YET_VALID) \ + QV(X509_V_ERR_ERROR_IN_CERT_NOT_BEFORE_FIELD) \ + QV(X509_V_ERR_ERROR_IN_CERT_NOT_AFTER_FIELD) \ + QV(X509_V_ERR_ERROR_IN_CRL_LAST_UPDATE_FIELD) \ + QV(X509_V_ERR_ERROR_IN_CRL_NEXT_UPDATE_FIELD) \ + QV(X509_V_ERR_DEPTH_ZERO_SELF_SIGNED_CERT) \ + QV(X509_V_ERR_SELF_SIGNED_CERT_IN_CHAIN) \ + QV(X509_V_ERR_UNABLE_TO_GET_ISSUER_CERT_LOCALLY) \ + QV(X509_V_ERR_UNABLE_TO_VERIFY_LEAF_SIGNATURE) \ + QV(X509_V_ERR_CERT_CHAIN_TOO_LONG) \ + QV(X509_V_ERR_CERT_REVOKED) \ + QV(X509_V_ERR_INVALID_CA) \ + QV(X509_V_ERR_PATH_LENGTH_EXCEEDED) \ + QV(X509_V_ERR_INVALID_PURPOSE) \ + QV(X509_V_ERR_CERT_UNTRUSTED) \ + QV(X509_V_ERR_CERT_REJECTED) \ + QV(X509_V_ERR_AKID_SKID_MISMATCH) \ + QV(X509_V_ERR_AKID_ISSUER_SERIAL_MISMATCH) \ + QV(X509_V_ERR_KEYUSAGE_NO_CERTSIGN) \ + QV(X509_V_ERR_UNABLE_TO_GET_CRL_ISSUER) \ + QV(X509_V_ERR_UNHANDLED_CRITICAL_EXTENSION) \ + QV(X509_V_ERR_KEYUSAGE_NO_CRL_SIGN) \ + QV(X509_V_ERR_UNHANDLED_CRITICAL_CRL_EXTENSION) \ + QV(X509_V_ERR_INVALID_NON_CA) \ + QV(X509_V_ERR_PROXY_PATH_LENGTH_EXCEEDED) \ + QV(X509_V_ERR_KEYUSAGE_NO_DIGITAL_SIGNATURE) \ + QV(X509_V_ERR_PROXY_CERTIFICATES_NOT_ALLOWED) \ + QV(X509_V_ERR_INVALID_EXTENSION) \ + QV(X509_V_ERR_INVALID_POLICY_EXTENSION) \ + QV(X509_V_ERR_NO_EXPLICIT_POLICY) \ + QV(X509_V_ERR_UNNESTED_RESOURCE) + +/** + * MIB counters specific to rcynic. + */ + +#define MIB_COUNTERS \ + MIB_COUNTERS_FROM_OPENSSL \ + QB(aia_extension_missing, "AIA extension missing") \ + QB(aia_extension_forbidden, "AIA extension forbidden") \ + QB(aia_uri_missing, "AIA URI missing") \ + QB(aki_extension_issuer_mismatch, "AKI extension issuer mismatch") \ + QB(aki_extension_missing, "AKI extension missing") \ + QB(aki_extension_wrong_format, "AKI extension is wrong format") \ + QB(bad_asidentifiers, "Bad ASIdentifiers extension") \ + QB(bad_certificate_policy, "Bad certificate policy") \ + QB(bad_cms_econtenttype, "Bad CMS eContentType") \ + QB(bad_cms_si_contenttype, "Bad CMS SI ContentType") \ + QB(bad_cms_signer, "Bad CMS signer") \ + QB(bad_cms_signer_infos, "Bad CMS signerInfos") \ + QB(bad_crl, "Bad CRL") \ + QB(bad_ipaddrblocks, "Bad IPAddrBlocks extension") \ + QB(bad_key_usage, "Bad keyUsage") \ + QB(bad_manifest_digest_length, "Bad manifest digest length") \ + QB(bad_public_key, "Bad public key") \ + QB(bad_roa_asID, "Bad ROA asID") \ + QB(bad_certificate_serial_number, "Bad certificate serialNumber") \ + QB(bad_manifest_number, "Bad manifestNumber") \ + QB(certificate_bad_signature, "Bad certificate signature") \ + QB(certificate_failed_validation, "Certificate failed validation") \ + QB(cms_econtent_decode_error, "CMS eContent decode error") \ + QB(cms_includes_crls, "CMS includes CRLs") \ + QB(cms_signer_missing, "CMS signer missing") \ + QB(cms_ski_mismatch, "CMS SKI mismatch") \ + QB(cms_validation_failure, "CMS validation failure") \ + QB(crl_issuer_name_mismatch, "CRL issuer name mismatch") \ + QB(crl_not_in_manifest, "CRL not listed in manifest") \ + QB(crl_not_yet_valid, "CRL not yet valid") \ + QB(crl_number_extension_missing, "CRL number extension missing") \ + QB(crl_number_is_negative, "CRL number is negative") \ + QB(crl_number_out_of_range, "CRL number out of range") \ + QB(crldp_doesnt_match_issuer_sia, "CRLDP doesn't match issuer's SIA") \ + QB(crldp_uri_missing, "CRLDP URI missing") \ + QB(disallowed_x509v3_extension, "Disallowed X.509v3 extension") \ + QB(duplicate_name_in_manifest, "Duplicate name in manifest") \ + QB(inappropriate_eku_extension, "Inappropriate EKU extension") \ + QB(malformed_aia_extension, "Malformed AIA extension") \ + QB(malformed_sia_extension, "Malformed SIA extension") \ + QB(malformed_basic_constraints, "Malformed basicConstraints") \ + QB(malformed_trust_anchor, "Malformed trust anchor") \ + QB(malformed_cadirectory_uri, "Malformed caDirectory URI") \ + QB(malformed_crldp_extension, "Malformed CRDLP extension") \ + QB(malformed_crldp_uri, "Malformed CRDLP URI") \ + QB(malformed_roa_addressfamily, "Malformed ROA addressFamily") \ + QB(malformed_tal_uri, "Malformed TAL URI") \ + QB(manifest_carepository_mismatch, "Manifest caRepository mismatch") \ + QB(manifest_interval_overruns_cert, "Manifest interval overruns certificate") \ + QB(manifest_lists_missing_object, "Manifest lists missing object") \ + QB(manifest_not_yet_valid, "Manifest not yet valid") \ + QB(missing_resources, "Missing resources") \ + QB(nonconformant_asn1_time_value, "Nonconformant ASN.1 time value") \ + QB(nonconformant_public_key_algorithm,"Nonconformant public key algorithm")\ + QB(nonconformant_signature_algorithm, "Nonconformant signature algorithm")\ + QB(nonconformant_digest_algorithm, "Nonconformant digest algorithm") \ + QB(nonconformant_certificate_uid, "Nonconformant certificate UID") \ + QB(object_rejected, "Object rejected") \ + QB(rfc3779_inheritance_required, "RFC 3779 inheritance required") \ + QB(roa_contains_bad_afi_value, "ROA contains bad AFI value") \ + QB(roa_max_prefixlen_too_short, "ROA maxPrefixlen too short") \ + QB(roa_resource_not_in_ee, "ROA resource not in EE") \ + QB(roa_resources_malformed, "ROA resources malformed") \ + QB(rsync_transfer_failed, "rsync transfer failed") \ + QB(rsync_transfer_timed_out, "rsync transfer timed out") \ + QB(safi_not_allowed, "SAFI not allowed") \ + QB(sia_cadirectory_uri_missing, "SIA caDirectory URI missing") \ + QB(sia_extension_missing, "SIA extension missing") \ + QB(sia_manifest_uri_missing, "SIA manifest URI missing") \ + QB(ski_extension_missing, "SKI extension missing") \ + QB(ski_public_key_mismatch, "SKI public key mismatch") \ + QB(trust_anchor_key_mismatch, "Trust anchor key mismatch") \ + QB(trust_anchor_with_crldp, "Trust anchor can't have CRLDP") \ + QB(unknown_afi, "Unknown AFI") \ + QB(unknown_openssl_verify_error, "Unknown OpenSSL verify error") \ + QB(unreadable_trust_anchor, "Unreadable trust anchor") \ + QB(unreadable_trust_anchor_locator, "Unreadable trust anchor locator") \ + QB(wrong_object_version, "Wrong object version") \ + QW(aia_doesnt_match_issuer, "AIA doesn't match issuer") \ + QW(backup_thisupdate_newer_than_current, "Backup thisUpdate newer than current") \ + QW(backup_number_higher_than_current, "Backup number higher than current") \ + QW(bad_thisupdate, "Bad CRL thisUpdate") \ + QW(bad_cms_si_signed_attributes, "Bad CMS SI signed attributes") \ + QW(bad_signed_object_uri, "Bad signedObject URI") \ + QW(crldp_names_newer_crl, "CRLDP names newer CRL") \ + QW(digest_mismatch, "Digest mismatch") \ + QW(ee_certificate_with_1024_bit_key, "EE certificate with 1024 bit key") \ + QW(issuer_uses_multiple_crldp_values, "Issuer uses multiple CRLDP values")\ + QW(multiple_rsync_uris_in_extension, "Multiple rsync URIs in extension") \ + QW(nonconformant_issuer_name, "Nonconformant X.509 issuer name") \ + QW(nonconformant_subject_name, "Nonconformant X.509 subject name") \ + QW(policy_qualifier_cps, "Policy Qualifier CPS") \ + QW(rsync_partial_transfer, "rsync partial transfer") \ + QW(rsync_transfer_skipped, "rsync transfer skipped") \ + QW(sia_extension_missing_from_ee, "SIA extension missing from EE") \ + QW(skipped_because_not_in_manifest, "Skipped because not in manifest") \ + QW(stale_crl_or_manifest, "Stale CRL or manifest") \ + QW(tainted_by_stale_crl, "Tainted by stale CRL") \ + QW(tainted_by_stale_manifest, "Tainted by stale manifest") \ + QW(tainted_by_not_being_in_manifest, "Tainted by not being in manifest") \ + QW(trust_anchor_not_self_signed, "Trust anchor not self-signed") \ + QW(trust_anchor_skipped, "Trust anchor skipped") \ + QW(unknown_object_type_skipped, "Unknown object type skipped") \ + QW(uri_too_long, "URI too long") \ + QW(wrong_cms_si_signature_algorithm, "Wrong CMS SI signature algorithm") \ + QW(wrong_cms_si_digest_algorithm, "Wrong CMS SI digest algorithm") \ + QG(non_rsync_uri_in_extension, "Non-rsync URI in extension") \ + QG(object_accepted, "Object accepted") \ + QG(rechecking_object, "Rechecking object") \ + QG(rsync_transfer_succeeded, "rsync transfer succeeded") \ + QG(validation_ok, "OK") + +#define QV(x) QB(mib_openssl_##x, 0) + +static const char + mib_counter_kind_good[] = "good", + mib_counter_kind_warn[] = "warn", + mib_counter_kind_bad[] = "bad"; + +#define QG(x,y) mib_counter_kind_good , +#define QW(x,y) mib_counter_kind_warn , +#define QB(x,y) mib_counter_kind_bad , +static const char * const mib_counter_kind[] = { MIB_COUNTERS NULL }; +#undef QB +#undef QW +#undef QG + +#define QG(x,y) QQ(x,y) +#define QW(x,y) QQ(x,y) +#define QB(x,y) QQ(x,y) + +#define QQ(x,y) x , +typedef enum mib_counter { MIB_COUNTERS MIB_COUNTER_T_MAX } mib_counter_t; +#undef QQ + +#define QQ(x,y) y , +static const char * const mib_counter_desc[] = { MIB_COUNTERS NULL }; +#undef QQ + +#define QQ(x,y) #x , +static const char * const mib_counter_label[] = { MIB_COUNTERS NULL }; +#undef QQ + +#undef QV + +#define QQ(x,y) 0 , +#define QV(x) x , +static const long mib_counter_openssl[] = { MIB_COUNTERS 0 }; +#undef QV +#undef QQ + +/** + * Object sources. We always try to get fresh copies of objects using + * rsync, but if that fails we try using backup copies from what + * worked the last time we were run. This means that a URI + * potentially represents two different objects, so we need to + * distinguish them for tracking purposes in our validation log. + */ + +#define OBJECT_GENERATIONS \ + QQ(null) \ + QQ(current) \ + QQ(backup) + +#define QQ(x) object_generation_##x , +typedef enum object_generation { OBJECT_GENERATIONS OBJECT_GENERATION_MAX } object_generation_t; +#undef QQ + +#define QQ(x) #x , +static const char * const object_generation_label[] = { OBJECT_GENERATIONS NULL }; +#undef QQ + +/** + * Type-safe string wrapper for URIs. + */ +typedef struct { char s[URI_MAX]; } uri_t; + +/** + * Type-safe string wrapper for filename paths. + */ +typedef struct { char s[FILENAME_MAX]; } path_t; + +/** + * Type-safe wrapper for hash buffers. + */ +typedef struct { unsigned char h[EVP_MAX_MD_SIZE]; } hashbuf_t; + +/** + * Type-safe wrapper for timestamp strings. + */ +typedef struct { char s[sizeof("2001-01-01T00:00:00Z") + 1]; } timestamp_t; + +/** + * Per-URI validation status object. + * uri must be first element. + */ +typedef struct validation_status { + uri_t uri; + object_generation_t generation; + time_t timestamp; + unsigned char events[(MIB_COUNTER_T_MAX + 7) / 8]; + short balance; + struct validation_status *left_child; + struct validation_status *right_child; +} validation_status_t; + +DECLARE_STACK_OF(validation_status_t) + +/** + * Structure to hold data parsed out of a certificate. + */ +typedef struct certinfo { + int ca, ta; + object_generation_t generation; + uri_t uri, sia, aia, crldp, manifest, signedobject; +} certinfo_t; + +typedef struct rcynic_ctx rcynic_ctx_t; + +/** + * States that a walk_ctx_t can be in. + */ +typedef enum { + walk_state_initial, /**< Initial state */ + walk_state_rsync, /**< rsyncing certinfo.sia */ + walk_state_ready, /**< Ready to traverse outputs */ + walk_state_current, /**< prefix = rc->unauthenticated */ + walk_state_backup, /**< prefix = rc->old_authenticated */ + walk_state_done /**< Done walking this cert's outputs */ +} walk_state_t; + +/** + * Context for certificate tree walks. This includes all the stuff + * that we would keep as automatic variables on the call stack if we + * didn't have to use callbacks to support multiple rsync processes. + */ +typedef struct walk_ctx { + unsigned refcount; + certinfo_t certinfo; + X509 *cert; + Manifest *manifest; + object_generation_t manifest_generation; + STACK_OF(OPENSSL_STRING) *filenames; + int manifest_iteration, filename_iteration, stale_manifest; + walk_state_t state; + uri_t crldp; + STACK_OF(X509) *certs; + STACK_OF(X509_CRL) *crls; +} walk_ctx_t; + +DECLARE_STACK_OF(walk_ctx_t) + +/** + * Return codes from rsync functions. + */ +typedef enum { + rsync_status_done, /* Request completed */ + rsync_status_failed, /* Request failed */ + rsync_status_timed_out, /* Request timed out */ + rsync_status_pending, /* Request in progress */ + rsync_status_skipped /* Request not attempted */ +} rsync_status_t; + +/** + * States for asynchronous rsync. + * "initial" must be first. + */ + +#define RSYNC_STATES \ + QQ(initial) \ + QQ(running) \ + QQ(conflict_wait) \ + QQ(retry_wait) \ + QQ(closed) \ + QQ(terminating) + +#define QQ(x) rsync_state_##x, +typedef enum { RSYNC_STATES RSYNC_STATE_T_MAX } rsync_state_t; +#undef QQ + +#define QQ(x) #x , +static const char * const rsync_state_label[] = { RSYNC_STATES NULL }; +#undef QQ + +/** + * Context for asyncronous rsync. + */ +typedef struct rsync_ctx { + uri_t uri; + void (*handler)(rcynic_ctx_t *, const struct rsync_ctx *, const rsync_status_t, const uri_t *, void *); + void *cookie; + rsync_state_t state; + enum { + rsync_problem_none, /* Must be first */ + rsync_problem_timed_out, + rsync_problem_refused + } problem; + unsigned tries; + pid_t pid; + int fd; + time_t started, deadline; + char buffer[URI_MAX * 4]; + size_t buflen; +} rsync_ctx_t; + +DECLARE_STACK_OF(rsync_ctx_t) + +/** + * Record of rsync attempts. + */ +typedef struct rsync_history { + uri_t uri; + time_t started, finished; + rsync_status_t status; + int final_slash; +} rsync_history_t; + +DECLARE_STACK_OF(rsync_history_t) + +/** + * Deferred task. + */ +typedef struct task { + void (*handler)(rcynic_ctx_t *, void *); + void *cookie; +} task_t; + +DECLARE_STACK_OF(task_t) + +/** + * Trust anchor locator (TAL) fetch context. + */ +typedef struct tal_ctx { + uri_t uri; + path_t path; + EVP_PKEY *pkey; +} tal_ctx_t; + +/** + * Extended context for verify callbacks. This is a wrapper around + * OpenSSL's X509_STORE_CTX, and the embedded X509_STORE_CTX @em must be + * the first element of this structure in order for the evil cast to + * do the right thing. This is ugly but safe, as the C language + * promises us that the address of the first element of a structure is + * the same as the address of the structure. + */ +typedef struct rcynic_x509_store_ctx { + X509_STORE_CTX ctx; /* Must be first */ + rcynic_ctx_t *rc; + const certinfo_t *subject; +} rcynic_x509_store_ctx_t; + +/** + * Program context that would otherwise be a mess of global variables. + */ +struct rcynic_ctx { + path_t authenticated, old_authenticated, new_authenticated, unauthenticated; + char *jane, *rsync_program; + STACK_OF(validation_status_t) *validation_status; + STACK_OF(rsync_history_t) *rsync_history; + STACK_OF(rsync_ctx_t) *rsync_queue; + STACK_OF(task_t) *task_queue; + int use_syslog, allow_stale_crl, allow_stale_manifest, use_links; + int require_crl_in_manifest, rsync_timeout, priority[LOG_LEVEL_T_MAX]; + int allow_non_self_signed_trust_anchor, allow_object_not_in_manifest; + int max_parallel_fetches, max_retries, retry_wait_min, run_rsync; + int allow_digest_mismatch, allow_crl_digest_mismatch; + int allow_nonconformant_name, allow_ee_without_signedObject; + int allow_1024_bit_ee_key, allow_wrong_cms_si_attributes; + int rsync_early; + unsigned max_select_time; + validation_status_t *validation_status_in_waiting; + validation_status_t *validation_status_root; + log_level_t log_level; + X509_STORE *x509_store; +}; + + + +/* + * Handle NIDs we wish OpenSSL knew about. This is carefully (we + * hope) written to do nothing at all for any NID that OpenSSL knows + * about; the intent is just to add definitions for things OpenSSL + * doesn't know about yet. Of necessity, this is a bit gross, since + * it confounds runtime static variables with predefined macro names, + * but we try to put all the magic associated with this in one place. + * + * In the long run it might be cleaner to generate this with a trivial + * script and put the result in a shared .h file, but this will do for + * the moment. + */ + +#ifndef NID_ad_rpkiManifest +static int NID_ad_rpkiManifest; +#endif + +#ifndef NID_ad_signedObject +static int NID_ad_signedObject; +#endif + +#ifndef NID_ct_ROA +static int NID_ct_ROA; +#endif + +#ifndef NID_ct_rpkiManifest +static int NID_ct_rpkiManifest; +#endif + +#ifndef NID_ct_rpkiGhostbusters +static int NID_ct_rpkiGhostbusters; +#endif + +#ifndef NID_cp_ipAddr_asNumber +static int NID_cp_ipAddr_asNumber; +#endif + +#ifndef NID_id_kp_bgpsec_router +static int NID_id_kp_bgpsec_router; +#endif + +/** + * Missing NIDs, if any. + */ +static const struct { + int *nid; + const char *oid; + const char *sn; + const char *ln; +} missing_nids[] = { + +#ifndef NID_ad_rpkiManifest + {&NID_ad_rpkiManifest, "1.3.6.1.5.5.7.48.10", "id-ad-rpkiManifest", "RPKI Manifest"}, +#endif + +#ifndef NID_ad_signedObject + {&NID_ad_signedObject, "1.3.6.1.5.5.7.48.11", "id-ad-signedObject", "Signed Object"}, +#endif + +#ifndef NID_ct_ROA + {&NID_ct_ROA, "1.2.840.113549.1.9.16.1.24", "id-ct-routeOriginAttestation", "ROA eContent"}, +#endif + +#ifndef NID_ct_rpkiManifest + {&NID_ct_rpkiManifest, "1.2.840.113549.1.9.16.1.26", "id-ct-rpkiManifest", "RPKI Manifest eContent"}, +#endif + +#ifndef NID_ct_rpkiGhostbusters + {&NID_ct_rpkiGhostbusters, "1.2.840.113549.1.9.16.1.35", "id-ct-rpkiGhostbusters", "RPKI Ghostbusters eContent"}, +#endif + +#ifndef NID_cp_ipAddr_asNumber + {&NID_cp_ipAddr_asNumber, "1.3.6.1.5.5.7.14.2", "id-cp-ipAddr-asNumber", "RPKI Certificate Policy"}, +#endif + +#ifndef NID_id_kp_bgpsec_router + {&NID_id_kp_bgpsec_router, "1.3.6.1.5.5.7.3.30", "id-kp-bgpsec-router", "BGPSEC Router Certificate"}, +#endif + +}; + + + +/** + * Subversion ID data. + */ +static const char svn_id[] = "$Id$"; + +/** + * Suffix we use temporarily during the symlink shuffle. Could be + * almost anything, but we want to do the length check early, before + * we waste a lot of work we'll just have to throw away, so we just + * wire in something short and obvious. + */ +static const char authenticated_symlink_suffix[] = ".new"; + +/** + * Constants for comparisions. We can't build these at compile time, + * so they can't be const, but treat them as if they were once + * allocated. + * + * We probably need both a better scheme for naming NID_ replacements + * and a more comprehensive rewrite of how we handle OIDs OpenSSL + * doesn't know about, so that we neither conflict with defined + * symbols nor duplicate effort nor explode if and when OpenSSL adds + * new OIDs (with or without the names we would have used). + */ + +static const ASN1_INTEGER *asn1_zero, *asn1_four_octets, *asn1_twenty_octets; +static int NID_binary_signing_time; + + + +/** + * Handle missing NIDs. + */ +static int +create_missing_nids(void) +{ + int i; + + for (i = 0; i < (int) (sizeof(missing_nids) / sizeof(*missing_nids)); i++) + if ((*missing_nids[i].nid = OBJ_txt2nid(missing_nids[i].oid)) == NID_undef && + (*missing_nids[i].nid = OBJ_create(missing_nids[i].oid, + missing_nids[i].sn, + missing_nids[i].ln)) == NID_undef) + return 0; + + return 1; +} + + + +/** + * Type-safe wrapper around free() to keep safestack macros happy. + */ +static void OPENSSL_STRING_free(OPENSSL_STRING s) +{ + if (s) + free(s); +} + +/** + * Wrapper around an idiom we use with OPENSSL_STRING stacks. There's + * a bug in the current sk_OPENSSL_STRING_delete() macro that casts + * the return value to the wrong type, so we cast it to something + * innocuous here and avoid using that macro elsewhere. + */ +static void sk_OPENSSL_STRING_remove(STACK_OF(OPENSSL_STRING) *sk, const char *str) +{ + OPENSSL_STRING_free((void *) sk_OPENSSL_STRING_delete(sk, sk_OPENSSL_STRING_find(sk, str))); +} + +/** + * Allocate a new validation_status_t object. + */ +static validation_status_t *validation_status_t_new(void) +{ + validation_status_t *v = malloc(sizeof(*v)); + if (v) + memset(v, 0, sizeof(*v)); + return v; +} + +/** + * Type-safe wrapper around free() to keep safestack macros happy. + */ +static void validation_status_t_free(validation_status_t *v) +{ + if (v) + free(v); +} + + + +/** + * Allocate a new rsync_history_t object. + */ +static rsync_history_t *rsync_history_t_new(void) +{ + rsync_history_t *h = malloc(sizeof(*h)); + if (h) + memset(h, 0, sizeof(*h)); + return h; +} + +/** + * Type-safe wrapper around free() to keep safestack macros happy. + */ +static void rsync_history_t_free(rsync_history_t *h) +{ + if (h) + free(h); +} + +/** + * Compare two rsync_history_t objects. + */ +static int rsync_history_cmp(const rsync_history_t * const *a, const rsync_history_t * const *b) +{ + return strcmp((*a)->uri.s, (*b)->uri.s); +} + + + +/** + * Convert a time_t to a printable string in UTC format. + */ +static const char *time_to_string(timestamp_t *ts, const time_t *t) +{ + time_t now; + size_t n; + + assert(ts != NULL); + + if (t == NULL) { + now = time(0); + t = &now; + } + + n = strftime(ts->s, sizeof(ts->s), "%Y-%m-%dT%H:%M:%SZ", gmtime(t)); + assert(n > 0); + + return ts->s; +} + +/* + * GCC attributes to help catch format string errors. + */ + +#ifdef __GNUC__ + +static void logmsg(const rcynic_ctx_t *rc, + const log_level_t level, + const char *fmt, ...) + __attribute__ ((format (printf, 3, 4))); +#endif + +/** + * Logging. + */ +static void vlogmsg(const rcynic_ctx_t *rc, + const log_level_t level, + const char *fmt, + va_list ap) +{ + assert(rc && fmt); + + if (rc->log_level < level) + return; + + if (rc->use_syslog) { + vsyslog(rc->priority[level], fmt, ap); + } else { + char ts[sizeof("00:00:00")+1]; + time_t t = time(0); + strftime(ts, sizeof(ts), "%H:%M:%S", localtime(&t)); + fprintf(stderr, "%s: ", ts); + if (rc->jane) + fprintf(stderr, "%s: ", rc->jane); + vfprintf(stderr, fmt, ap); + putc('\n', stderr); + } +} + +/** + * Logging. + */ +static void logmsg(const rcynic_ctx_t *rc, + const log_level_t level, + const char *fmt, ...) +{ + va_list ap; + va_start(ap, fmt); + vlogmsg(rc, level, fmt, ap); + va_end(ap); +} + +/** + * Print OpenSSL library errors. + */ +static void log_openssl_errors(const rcynic_ctx_t *rc) +{ + const char *data, *file; + unsigned long code; + char error[256]; + int flags, line; + + if (!rc->log_level < log_verbose) + return; + + while ((code = ERR_get_error_line_data(&file, &line, &data, &flags))) { + ERR_error_string_n(code, error, sizeof(error)); + if (data && (flags & ERR_TXT_STRING)) + logmsg(rc, log_sys_err, "OpenSSL error %s:%d: %s: %s", file, line, error, data); + else + logmsg(rc, log_sys_err, "OpenSSL error %s:%d: %s", file, line, error); + } +} + +/** + * Configure logging. + */ +static int configure_logmsg(rcynic_ctx_t *rc, const char *name) +{ + int i; + + assert(rc && name); + + for (i = 0; i < sizeof(log_levels)/sizeof(*log_levels); i++) { + if (!strcmp(name, log_levels[i].name)) { + rc->log_level = log_levels[i].value; + return 1; + } + } + + logmsg(rc, log_usage_err, "Bad log level %s", name); + return 0; +} + +/** + * Configure syslog. + */ +static int configure_syslog(const rcynic_ctx_t *rc, + int *result, + const CODE *table, + const char *name) +{ + assert(result && table && name); + + while (table->c_name && strcmp(table->c_name, name)) + table++; + + if (table->c_name) { + *result = table->c_val; + return 1; + } else { + logmsg(rc, log_usage_err, "Bad syslog code %s", name); + return 0; + } +} + +/** + * Configure boolean variable. + */ +static int configure_boolean(const rcynic_ctx_t *rc, + int *result, + const char *val) +{ + assert(rc && result && val); + + switch (*val) { + case 'y': case 'Y': case 't': case 'T': case '1': + *result = 1; + return 1; + case 'n': case 'N': case 'f': case 'F': case '0': + *result = 0; + return 1; + default: + logmsg(rc, log_usage_err, "Bad boolean value %s", val); + return 0; + } +} + +/** + * Configure integer variable. + */ +static int configure_integer(const rcynic_ctx_t *rc, + int *result, + const char *val) +{ + long res; + char *p; + + assert(rc && result && val); + + res = strtol(val, &p, 10); + + if (*val != '\0' && *p == '\0') { + *result = (int) res; + return 1; + } else { + logmsg(rc, log_usage_err, "Bad integer value %s", val); + return 0; + } +} + +/** + * Configure unsigned integer variable. + */ +static int configure_unsigned_integer(const rcynic_ctx_t *rc, + unsigned *result, + const char *val) +{ + unsigned long res; + char *p; + + assert(rc && result && val); + + res = strtoul(val, &p, 10); + + if (*val != '\0' && *p == '\0') { + *result = (unsigned) res; + return 1; + } else { + logmsg(rc, log_usage_err, "Bad integer value %s", val); + return 0; + } +} + + + +/** + * Make a directory if it doesn't already exist. + */ +static int mkdir_maybe(const rcynic_ctx_t *rc, const path_t *name) +{ + path_t path; + char *s; + + assert(name != NULL); + if (strlen(name->s) >= sizeof(path.s)) { + logmsg(rc, log_data_err, "Pathname %s too long", name->s); + return 0; + } + strcpy(path.s, name->s); + s = path.s[0] == '/' ? path.s + 1 : path.s; + if ((s = strrchr(s, '/')) == NULL) + return 1; + *s = '\0'; + if (!mkdir_maybe(rc, &path)) { + logmsg(rc, log_sys_err, "Failed to make directory %s", path.s); + return 0; + } + if (!access(path.s, F_OK)) + return 1; + logmsg(rc, log_verbose, "Creating directory %s", path.s); + return mkdir(path.s, 0777) == 0; +} + +/** + * strdup() a string and push it onto a stack. + */ +static int sk_OPENSSL_STRING_push_strdup(STACK_OF(OPENSSL_STRING) *sk, const char *str) +{ + OPENSSL_STRING s = strdup(str); + + if (s && sk_OPENSSL_STRING_push(sk, s)) + return 1; + if (s) + free(s); + return 0; +} + +/** + * Compare two URI strings, for OpenSSL STACK operations. + */ + +static int uri_cmp(const char * const *a, const char * const *b) +{ + return strcmp(*a, *b); +} + +/** + * Is string an rsync URI? + */ +static int is_rsync(const char *uri) +{ + return uri && !strncmp(uri, SCHEME_RSYNC, SIZEOF_RSYNC); +} + +/** + * Convert an rsync URI to a filename, checking for evil character + * sequences. NB: This routine can't call mib_increment(), because + * mib_increment() calls it, so errors detected here only go into + * the log, not the MIB. + */ +static int uri_to_filename(const rcynic_ctx_t *rc, + const uri_t *uri, + path_t *path, + const path_t *prefix) +{ + const char *u; + size_t n; + + path->s[0] = '\0'; + + if (!is_rsync(uri->s)) { + logmsg(rc, log_telemetry, "%s is not an rsync URI, not converting to filename", uri->s); + return 0; + } + + u = uri->s + SIZEOF_RSYNC; + n = strlen(u); + + if (u[0] == '/' || u[0] == '.' || strstr(u, "/../") || + (n >= 3 && !strcmp(u + n - 3, "/.."))) { + logmsg(rc, log_data_err, "Dangerous URI %s, not converting to filename", uri->s); + return 0; + } + + if (prefix) + n += strlen(prefix->s); + + if (n >= sizeof(path->s)) { + logmsg(rc, log_data_err, "URI %s too long, not converting to filename", uri->s); + return 0; + } + + if (prefix) { + strcpy(path->s, prefix->s); + strcat(path->s, u); + } else { + strcpy(path->s, u); + } + + return 1; +} + +/** + * Compare filename fields of two FileAndHash structures. + */ +static int FileAndHash_name_cmp(const FileAndHash * const *a, const FileAndHash * const *b) +{ + return strcmp((char *) (*a)->file->data, (char *) (*b)->file->data); +} + +/** + * Get value of code in a validation_status_t. + */ +static int validation_status_get_code(const validation_status_t *v, + const mib_counter_t code) +{ + assert(v && code < MIB_COUNTER_T_MAX); + return (v->events[code / 8] & (1 << (code % 8))) != 0; +} + +/** + * Set value of code in a validation_status_t. + */ +static void validation_status_set_code(validation_status_t *v, + const mib_counter_t code, + int value) +{ + assert(v && code < MIB_COUNTER_T_MAX); + if (value) + v->events[code / 8] |= (1 << (code % 8)); + else + v->events[code / 8] &= ~(1 << (code % 8)); +} + +/** + * validation_status object comparison, for AVL tree rather than + * OpenSSL stacks. + */ +static int +validation_status_cmp(const validation_status_t *node, + const uri_t *uri, + const object_generation_t generation) +{ + int cmp = ((int) node->generation) - ((int) generation); + if (cmp) + return cmp; + else + return strcmp(uri->s, node->uri.s); +} + +/** + * validation_status AVL tree insertion. Adapted from code written by + * Paul Vixie and explictly placed in the public domain using examples + * from the book: "Algorithms & Data Structures," Niklaus Wirth, + * Prentice-Hall, 1986, ISBN 0-13-022005-1. Thanks, Paul! + */ +static validation_status_t * +validation_status_sprout(validation_status_t **node, + int *needs_balancing, + validation_status_t *new_node) +{ +#ifdef AVL_DEBUG +#define AVL_MSG(msg) sprintf(stderr, "AVL_DEBUG: '%s'\n", msg) +#else +#define AVL_MSG(msg) +#endif + + validation_status_t *p1, *p2, *result; + int cmp; + + /* + * Are we grounded? If so, add the node "here" and set the + * rebalance flag, then exit. + */ + if (*node == NULL) { + AVL_MSG("Grounded, adding new node"); + new_node->left_child = NULL; + new_node->right_child = NULL; + new_node->balance = 0; + *node = new_node; + *needs_balancing = 1; + return *node; + } + + /* + * Compare the data. + */ + cmp = validation_status_cmp(*node, &new_node->uri, new_node->generation); + + /* + * If LESS, prepare to move to the left. + */ + if (cmp < 0) { + + AVL_MSG("LESS. sprouting left."); + result = validation_status_sprout(&(*node)->left_child, needs_balancing, new_node); + + if (*needs_balancing) { + AVL_MSG("LESS: left branch has grown longer"); + + switch ((*node)->balance) { + + case 1: + /* + * Right branch WAS longer; balance is ok now. + */ + AVL_MSG("LESS: case 1.. balance restored implicitly"); + (*node)->balance = 0; + *needs_balancing = 0; + break; + + case 0: + /* + * Balance WAS okay; now left branch longer. + */ + AVL_MSG("LESS: case 0.. balnce bad but still ok"); + (*node)->balance = -1; + break; + + case -1: + /* + * Left branch was already too long. Rebalance. + */ + AVL_MSG("LESS: case -1: rebalancing"); + p1 = (*node)->left_child; + + if (p1->balance == -1) { + AVL_MSG("LESS: single LL"); + (*node)->left_child = p1->right_child; + p1->right_child = *node; + (*node)->balance = 0; + *node = p1; + } + + else { + AVL_MSG("LESS: double LR"); + + p2 = p1->right_child; + p1->right_child = p2->left_child; + p2->left_child = p1; + + (*node)->left_child = p2->right_child; + p2->right_child = *node; + + if (p2->balance == -1) + (*node)->balance = 1; + else + (*node)->balance = 0; + + if (p2->balance == 1) + p1->balance = -1; + else + p1->balance = 0; + *node = p2; + } + + (*node)->balance = 0; + *needs_balancing = 0; + } + } + return result; + } + + /* + * If MORE, prepare to move to the right. + */ + if (cmp > 0) { + + AVL_MSG("MORE: sprouting to the right"); + result = validation_status_sprout(&(*node)->right_child, needs_balancing, new_node); + + if (*needs_balancing) { + AVL_MSG("MORE: right branch has grown longer"); + + switch ((*node)->balance) { + + case -1:AVL_MSG("MORE: balance was off, fixed implicitly"); + (*node)->balance = 0; + *needs_balancing = 0; + break; + + case 0: AVL_MSG("MORE: balance was okay, now off but ok"); + (*node)->balance = 1; + break; + + case 1: AVL_MSG("MORE: balance was off, need to rebalance"); + p1 = (*node)->right_child; + + if (p1->balance == 1) { + AVL_MSG("MORE: single RR"); + (*node)->right_child = p1->left_child; + p1->left_child = *node; + (*node)->balance = 0; + *node = p1; + } + + else { + AVL_MSG("MORE: double RL"); + + p2 = p1->left_child; + p1->left_child = p2->right_child; + p2->right_child = p1; + + (*node)->right_child = p2->left_child; + p2->left_child = *node; + + if (p2->balance == 1) + (*node)->balance = -1; + else + (*node)->balance = 0; + + if (p2->balance == -1) + p1->balance = 1; + else + p1->balance = 0; + + *node = p2; + } /*else*/ + (*node)->balance = 0; + *needs_balancing = 0; + } + } + return result; + } + + /* + * Neither more nor less, found existing node matching key, return it. + */ + AVL_MSG("I found it!"); + *needs_balancing = 0; + return *node; + +#undef AVL_MSG +} + +/** + * Add a validation status entry to internal log. + */ +static void log_validation_status(rcynic_ctx_t *rc, + const uri_t *uri, + const mib_counter_t code, + const object_generation_t generation) +{ + validation_status_t *v = NULL; + int needs_balancing = 0; + + assert(rc && uri && code < MIB_COUNTER_T_MAX && generation < OBJECT_GENERATION_MAX); + + if (!rc->validation_status) + return; + + if (code == rsync_transfer_skipped && !rc->run_rsync) + return; + + if (rc->validation_status_in_waiting == NULL && + (rc->validation_status_in_waiting = validation_status_t_new()) == NULL) { + logmsg(rc, log_sys_err, "Couldn't allocate validation status entry for %s", uri->s); + return; + } + + v = rc->validation_status_in_waiting; + memset(v, 0, sizeof(*v)); + v->uri = *uri; + v->generation = generation; + + v = validation_status_sprout(&rc->validation_status_root, &needs_balancing, v); + if (v == rc->validation_status_in_waiting) + rc->validation_status_in_waiting = NULL; + + if (rc->validation_status_in_waiting == NULL && + !sk_validation_status_t_push(rc->validation_status, v)) { + logmsg(rc, log_sys_err, "Couldn't store validation status entry for %s", uri->s); + return; + } + + v->timestamp = time(0); + + if (validation_status_get_code(v, code)) + return; + + validation_status_set_code(v, code, 1); + + logmsg(rc, log_verbose, "Recording \"%s\" for %s%s%s", + (mib_counter_desc[code] + ? mib_counter_desc[code] + : X509_verify_cert_error_string(mib_counter_openssl[code])), + (generation != object_generation_null ? object_generation_label[generation] : ""), + (generation != object_generation_null ? " " : ""), + uri->s); +} + +/** + * Copy or link a file, as the case may be. + */ +static int cp_ln(const rcynic_ctx_t *rc, const path_t *source, const path_t *target) +{ + struct stat statbuf; + struct utimbuf utimebuf; + FILE *in = NULL, *out = NULL; + int c, ok = 0; + + if (rc->use_links) { + (void) unlink(target->s); + ok = link(source->s, target->s) == 0; + if (!ok) + logmsg(rc, log_sys_err, "Couldn't link %s to %s: %s", + source->s, target->s, strerror(errno)); + return ok; + } + + if ((in = fopen(source->s, "rb")) == NULL || + (out = fopen(target->s, "wb")) == NULL) + goto done; + + while ((c = getc(in)) != EOF) + if (putc(c, out) == EOF) + goto done; + + ok = 1; + + done: + ok &= !(in != NULL && fclose(in) == EOF); + ok &= !(out != NULL && fclose(out) == EOF); + + if (!ok) { + logmsg(rc, log_sys_err, "Couldn't copy %s to %s: %s", + source->s, target->s, strerror(errno)); + return ok; + } + + /* + * Perserve the file modification time to allow for detection of + * changed objects in the authenticated directory. Failure to reset + * the times is not optimal, but is also not critical, thus no + * failure return. + */ + if (stat(source->s, &statbuf) < 0 || + (utimebuf.actime = statbuf.st_atime, + utimebuf.modtime = statbuf.st_mtime, + utime(target->s, &utimebuf) < 0)) + logmsg(rc, log_sys_err, "Couldn't copy inode timestamp from %s to %s: %s", + source->s, target->s, strerror(errno)); + + return ok; +} + +/** + * Install an object. + */ +static int install_object(rcynic_ctx_t *rc, + const uri_t *uri, + const path_t *source, + const object_generation_t generation) +{ + path_t target; + + if (!uri_to_filename(rc, uri, &target, &rc->new_authenticated)) { + logmsg(rc, log_data_err, "Couldn't generate installation name for %s", uri->s); + return 0; + } + + if (!mkdir_maybe(rc, &target)) { + logmsg(rc, log_sys_err, "Couldn't create directory for %s", target.s); + return 0; + } + + if (!cp_ln(rc, source, &target)) + return 0; + log_validation_status(rc, uri, object_accepted, generation); + return 1; +} + +/** + * AVL tree lookup for validation status objects. + */ +static validation_status_t * +validation_status_find(validation_status_t *node, + const uri_t *uri, + const object_generation_t generation) +{ + int cmp; + + while (node != NULL && (cmp = validation_status_cmp(node, uri, generation)) != 0) + node = cmp < 0 ? node->left_child : node->right_child; + + return node; +} + +/** + * Check whether we have a validation status entry corresponding to a + * given filename. This is intended for use during pruning the + * unauthenticated tree, so it only checks the current generation. + */ +static int +validation_status_find_filename(const rcynic_ctx_t *rc, + const char *filename) +{ + uri_t uri; + + if (strlen(filename) + SIZEOF_RSYNC >= sizeof(uri.s)) + return 0; + + strcpy(uri.s, SCHEME_RSYNC); + strcat(uri.s, filename); + + return validation_status_find(rc->validation_status_root, &uri, object_generation_current) != NULL; +} + +/** + * Figure out whether we already have a good copy of an object. This + * is a little more complicated than it sounds, because we might have + * failed the current generation and accepted the backup due to having + * followed the old CA certificate chain first during a key rollover. + * So if this check is of the current object and we have not already + * accepted the current object for this URI, we need to recheck. + * + * We also handle logging when we decide that we do need to check, so + * that the caller doesn't need to concern itself with why we thought + * the check was necessary. + */ +static int skip_checking_this_object(rcynic_ctx_t *rc, + const uri_t *uri, + const object_generation_t generation) +{ + validation_status_t *v = NULL; + path_t path; + + assert(rc && uri && rc->validation_status); + + if (!uri_to_filename(rc, uri, &path, &rc->new_authenticated)) + return 1; + + if (access(path.s, R_OK)) { + logmsg(rc, log_telemetry, "Checking %s", uri->s); + return 0; + } + + if (generation != object_generation_current) + return 1; + + v = validation_status_find(rc->validation_status_root, uri, generation); + + if (v != NULL && validation_status_get_code(v, object_accepted)) + return 1; + + log_validation_status(rc, uri, rechecking_object, generation); + logmsg(rc, log_telemetry, "Rechecking %s", uri->s); + return 0; +} + + + +/** + * Check str for a suffix. + */ +static int endswith(const char *str, const char *suffix) +{ + size_t len_str, len_suffix; + assert(str != NULL && suffix != NULL); + len_str = strlen(str); + len_suffix = strlen(suffix); + return len_str >= len_suffix && !strcmp(str + len_str - len_suffix, suffix); +} + +/** + * Check str for a prefix. + */ +static int startswith(const char *str, const char *prefix) +{ + size_t len_str, len_prefix; + assert(str != NULL && prefix != NULL); + len_str = strlen(str); + len_prefix = strlen(prefix); + return len_str >= len_prefix && !strncmp(str, prefix, len_prefix); +} + +/** + * Convert a filename to a file:// URI, for logging. + */ +static void filename_to_uri(uri_t *uri, + const char *fn) +{ + assert(sizeof("file://") < sizeof(uri->s)); + strcpy(uri->s, "file://"); + if (*fn != '/') { + if (getcwd(uri->s + strlen(uri->s), sizeof(uri->s) - strlen(uri->s)) == NULL || + (!endswith(uri->s, "/") && strlen(uri->s) >= sizeof(uri->s) - 1)) + uri->s[0] = '\0'; + else + strcat(uri->s, "/"); + } + if (uri->s[0] != '\0' && strlen(uri->s) + strlen(fn) < sizeof(uri->s)) + strcat(uri->s, fn); + else + uri->s[0] = '\0'; +} + +/** + * Set a directory name, adding or stripping trailing slash as needed. + */ +static int set_directory(const rcynic_ctx_t *rc, path_t *out, const char *in, const int want_slash) +{ + int has_slash, need_slash; + size_t n; + + assert(rc && in && out); + + n = strlen(in); + + if (n == 0) { + logmsg(rc, log_usage_err, "Empty path"); + return 0; + } + + has_slash = in[n - 1] == '/'; + + need_slash = want_slash && !has_slash; + + if (n + need_slash + 1 > sizeof(out->s)) { + logmsg(rc, log_usage_err, "Path \"%s\" too long", in); + return 0; + } + + strcpy(out->s, in); + if (need_slash) + strcat(out->s, "/"); + else if (has_slash && !want_slash) + out->s[n - 1] = '\0'; + + return 1; +} + +/** + * Test whether a filesystem path points to a directory. + */ +static int is_directory(const path_t *name) +{ + struct stat st; + + assert(name); + return lstat(name->s, &st) == 0 && S_ISDIR(st.st_mode); +} + +/** + * Remove a directory tree, like rm -rf. + */ +static int rm_rf(const path_t *name) +{ + path_t path; + struct dirent *d; + DIR *dir; + int ret = 0; + + assert(name); + + if (!is_directory(name)) + return unlink(name->s) == 0; + + if ((dir = opendir(name->s)) == NULL) + return 0; + + while ((d = readdir(dir)) != NULL) { + if (!strcmp(d->d_name, ".") || !strcmp(d->d_name, "..")) + continue; + if (snprintf(path.s, sizeof(path.s), "%s/%s", name->s, d->d_name) >= sizeof(path.s)) + goto done; + if (unlink(path.s) == 0) + continue; + else if (rm_rf(&path)) + continue; + else + goto done; + } + + ret = rmdir(name->s) == 0; + + done: + closedir(dir); + return ret; +} + +/** + * Construct names for the directories not directly settable by the + * user. + * + * This function also checks for an old-style rc->authenticated + * directory, to simplify upgrade from older versions of rcynic. + */ +static int construct_directory_names(rcynic_ctx_t *rc) +{ + struct stat st; + ssize_t n; + path_t p; + time_t t = time(0); + + p = rc->authenticated; + + n = strlen(p.s); + + if (n + sizeof(authenticated_symlink_suffix) >= sizeof(p.s)) { + logmsg(rc, log_usage_err, "Symlink name would be too long"); + return 0; + } + + if (strftime(p.s + n, sizeof(p.s) - n - 1, ".%Y-%m-%dT%H:%M:%SZ", gmtime(&t)) == 0) { + logmsg(rc, log_usage_err, "Generated path with timestamp would be too long"); + return 0; + } + + if (!set_directory(rc, &rc->new_authenticated, p.s, 1)) + return 0; + + if (!set_directory(rc, &rc->old_authenticated, rc->authenticated.s, 1)) + return 0; + + if (lstat(rc->authenticated.s, &st) == 0 && S_ISDIR((st.st_mode)) && + strlen(rc->authenticated.s) + sizeof(".old") < sizeof(p.s)) { + p = rc->authenticated; + strcat(p.s, ".old"); + rm_rf(&p); + (void) rename(rc->authenticated.s, p.s); + } + + if (lstat(rc->authenticated.s, &st) == 0 && S_ISDIR(st.st_mode)) { + logmsg(rc, log_usage_err, + "Existing %s directory is in the way, please remove it", + rc->authenticated.s); + return 0; + } + + return 1; +} + +/** + * Do final symlink shuffle and cleanup of output directories. + */ +static int finalize_directories(const rcynic_ctx_t *rc) +{ + path_t path, real_old, real_new; + const char *dir; + glob_t g; + int i; + + if (!realpath(rc->old_authenticated.s, real_old.s)) + real_old.s[0] = '\0'; + + if (!realpath(rc->new_authenticated.s, real_new.s)) + real_new.s[0] = '\0'; + + assert(real_new.s[0] && real_new.s[strlen(real_new.s) - 1] != '/'); + + if ((dir = strrchr(real_new.s, '/')) == NULL) + dir = real_new.s; + else + dir++; + + path = rc->authenticated; + + if (strlen(path.s) + sizeof(authenticated_symlink_suffix) >= sizeof(path.s)) + return 0; + strcat(path.s, authenticated_symlink_suffix); + + (void) unlink(path.s); + + if (symlink(dir, path.s) < 0) { + logmsg(rc, log_sys_err, "Couldn't link %s to %s: %s", + path.s, dir, strerror(errno)); + return 0; + } + + if (rename(path.s, rc->authenticated.s) < 0) { + logmsg(rc, log_sys_err, "Couldn't rename %s to %s: %s", + path.s, rc->authenticated.s, strerror(errno)); + return 0; + } + + if (real_old.s[0] && strlen(rc->authenticated.s) + sizeof(".old") < sizeof(path.s)) { + assert(real_old.s[strlen(real_old.s) - 1] != '/'); + + path = rc->authenticated; + strcat(path.s, ".old"); + + (void) unlink(path.s); + + if ((dir = strrchr(real_old.s, '/')) == NULL) + dir = real_old.s; + else + dir++; + + (void) symlink(dir, path.s); + } + + path = rc->authenticated; + assert(strlen(path.s) + sizeof(".*") < sizeof(path.s)); + strcat(path.s, ".*"); + + memset(&g, 0, sizeof(g)); + + if (real_new.s[0] && glob(path.s, 0, 0, &g) == 0) { + for (i = 0; i < g.gl_pathc; i++) + if (realpath(g.gl_pathv[i], path.s) && + strcmp(path.s, real_old.s) && + strcmp(path.s, real_new.s)) + rm_rf(&path); + globfree(&g); + } + + return 1; +} + + + +/** + * Test whether a pair of URIs "conflict", that is, whether attempting + * to rsync both of them at the same time in parallel might cause + * unpredictable behavior. Might need a better name for this test. + * + * Returns non-zero iff the two URIs "conflict". + */ +static int conflicting_uris(const uri_t *a, const uri_t *b) +{ + size_t len_a, len_b; + + assert(a && is_rsync(a->s) && b && is_rsync(b->s)); + + len_a = strlen(a->s); + len_b = strlen(b->s); + + assert(len_a < sizeof(a->s) && len_b < sizeof(b->s)); + + return !strncmp(a->s, b->s, len_a < len_b ? len_a : len_b); +} + + + +/** + * Read non-directory filenames from a directory, so we can check to + * see what's missing from a manifest. + */ +static STACK_OF(OPENSSL_STRING) *directory_filenames(const rcynic_ctx_t *rc, + const walk_state_t state, + const uri_t *uri) +{ + STACK_OF(OPENSSL_STRING) *result = NULL; + path_t dpath, fpath; + const path_t *prefix = NULL; + DIR *dir = NULL; + struct dirent *d; + int ok = 0; + + assert(rc && uri); + + switch (state) { + case walk_state_current: + prefix = &rc->unauthenticated; + break; + case walk_state_backup: + prefix = &rc->old_authenticated; + break; + default: + goto done; + } + + if (!uri_to_filename(rc, uri, &dpath, prefix) || + (dir = opendir(dpath.s)) == NULL || + (result = sk_OPENSSL_STRING_new(uri_cmp)) == NULL) + goto done; + + while ((d = readdir(dir)) != NULL) + if (snprintf(fpath.s, sizeof(fpath.s), "%s/%s", dpath.s, d->d_name) >= sizeof(fpath.s)) { + logmsg(rc, log_data_err, "Local path name %s/%s too long", dpath.s, d->d_name); + goto done; + } + else if (!is_directory(&fpath) && !sk_OPENSSL_STRING_push_strdup(result, d->d_name)) { + logmsg(rc, log_sys_err, "sk_OPENSSL_STRING_push_strdup() failed, probably memory exhaustion"); + goto done; + } + + ok = 1; + + done: + if (dir != NULL) + closedir(dir); + + if (ok) + return result; + + sk_OPENSSL_STRING_pop_free(result, OPENSSL_STRING_free); + return NULL; +} + + + +/** + * Increment walk context reference count. + */ +static void walk_ctx_attach(walk_ctx_t *w) +{ + if (w != NULL) { + w->refcount++; + assert(w->refcount != 0); + } +} + +/** + * Decrement walk context reference count; freeing the context if the + * reference count is now zero. + */ +static void walk_ctx_detach(walk_ctx_t *w) +{ + if (w != NULL && --(w->refcount) == 0) { + assert(w->refcount == 0); + X509_free(w->cert); + Manifest_free(w->manifest); + sk_X509_free(w->certs); + sk_X509_CRL_pop_free(w->crls, X509_CRL_free); + sk_OPENSSL_STRING_pop_free(w->filenames, OPENSSL_STRING_free); + free(w); + } +} + +/** + * Return top context of a walk context stack. + */ +static walk_ctx_t *walk_ctx_stack_head(STACK_OF(walk_ctx_t) *wsk) +{ + return sk_walk_ctx_t_value(wsk, sk_walk_ctx_t_num(wsk) - 1); +} + +/** + * Whether we're done iterating over a walk context. Think of this as + * the thing you call (negated) in the second clause of a conceptual + * "for" loop. + */ +static int walk_ctx_loop_done(STACK_OF(walk_ctx_t) *wsk) +{ + walk_ctx_t *w = walk_ctx_stack_head(wsk); + return wsk == NULL || w == NULL || w->state >= walk_state_done; +} + +/** + * Walk context iterator. Think of this as the thing you call in the + * third clause of a conceptual "for" loop: this reinitializes as + * necessary for the next pass through the loop. + * + * General idea here is that we have several state variables in a walk + * context which collectively define the current pass, product URI, + * etc, and we want to be able to iterate through this sequence via + * the event system. So this function steps to the next state. + * + * Conceptually, w->manifest->fileList and w->filenames form a single + * array with index w->manifest_iteration + w->filename_iteration. + * Beware of fencepost errors, I've gotten this wrong once already. + * Slightly odd coding here is to make it easier to check this. + */ +static void walk_ctx_loop_next(const rcynic_ctx_t *rc, STACK_OF(walk_ctx_t) *wsk) +{ + walk_ctx_t *w = walk_ctx_stack_head(wsk); + int n_manifest, n_filenames; + + assert(rc && wsk && w); + + assert(w->manifest_iteration >= 0 && w->filename_iteration >= 0); + + n_manifest = w->manifest ? sk_FileAndHash_num(w->manifest->fileList) : 0; + n_filenames = w->filenames ? sk_OPENSSL_STRING_num(w->filenames) : 0; + + if (w->manifest_iteration + w->filename_iteration < n_manifest + n_filenames) { + if (w->manifest_iteration < n_manifest) + w->manifest_iteration++; + else + w->filename_iteration++; + } + + assert(w->manifest_iteration <= n_manifest && w->filename_iteration <= n_filenames); + + if (w->manifest_iteration + w->filename_iteration < n_manifest + n_filenames) + return; + + while (!walk_ctx_loop_done(wsk)) { + w->state++; + w->manifest_iteration = 0; + w->filename_iteration = 0; + sk_OPENSSL_STRING_pop_free(w->filenames, OPENSSL_STRING_free); + w->filenames = directory_filenames(rc, w->state, &w->certinfo.sia); + if (w->manifest != NULL || w->filenames != NULL) + return; + } +} + +static int check_manifest(rcynic_ctx_t *rc, STACK_OF(walk_ctx_t) *wsk); + +/** + * Loop initializer for walk context. Think of this as the thing you + * call in the first clause of a conceptual "for" loop. + */ +static void walk_ctx_loop_init(rcynic_ctx_t *rc, STACK_OF(walk_ctx_t) *wsk) +{ + walk_ctx_t *w = walk_ctx_stack_head(wsk); + + assert(rc && wsk && w && w->state == walk_state_ready); + + if (!w->manifest && !check_manifest(rc, wsk)) { + /* + * Simple failure to find a manifest doesn't get here. This is + * for manifest failures that cause us to reject all of this + * certificate's products due to policy knob settings. + */ + w->state = walk_state_done; + return; + } + + if (!w->manifest) + logmsg(rc, log_telemetry, "Couldn't get manifest %s, blundering onward", w->certinfo.manifest.s); + + w->manifest_iteration = 0; + w->filename_iteration = 0; + w->state++; + assert(w->state == walk_state_current); + + assert(w->filenames == NULL); + w->filenames = directory_filenames(rc, w->state, &w->certinfo.sia); + + w->stale_manifest = w->manifest != NULL && X509_cmp_current_time(w->manifest->nextUpdate) < 0; + + while (!walk_ctx_loop_done(wsk) && + (w->manifest == NULL || w->manifest_iteration >= sk_FileAndHash_num(w->manifest->fileList)) && + (w->filenames == NULL || w->filename_iteration >= sk_OPENSSL_STRING_num(w->filenames))) + walk_ctx_loop_next(rc, wsk); +} + +/** + * Extract URI and hash values from walk context. + */ +static int walk_ctx_loop_this(const rcynic_ctx_t *rc, + STACK_OF(walk_ctx_t) *wsk, + uri_t *uri, + const unsigned char **hash, + size_t *hashlen) +{ + const walk_ctx_t *w = walk_ctx_stack_head(wsk); + const char *name = NULL; + FileAndHash *fah = NULL; + + assert(rc && wsk && w && uri && hash && hashlen); + + if (w->manifest != NULL && w->manifest_iteration < sk_FileAndHash_num(w->manifest->fileList)) { + fah = sk_FileAndHash_value(w->manifest->fileList, w->manifest_iteration); + name = (const char *) fah->file->data; + } else if (w->filenames != NULL && w->filename_iteration < sk_OPENSSL_STRING_num(w->filenames)) { + name = sk_OPENSSL_STRING_value(w->filenames, w->filename_iteration); + } + + if (name == NULL) { + logmsg(rc, log_sys_err, "Can't find a URI in walk context, this shouldn't happen: state %d, manifest_iteration %d, filename_iteration %d", + (int) w->state, w->manifest_iteration, w->filename_iteration); + return 0; + } + + if (strlen(w->certinfo.sia.s) + strlen(name) >= sizeof(uri->s)) { + logmsg(rc, log_data_err, "URI %s%s too long, skipping", w->certinfo.sia.s, uri->s); + return 0; + } + + strcpy(uri->s, w->certinfo.sia.s); + strcat(uri->s, name); + + if (fah != NULL) { + sk_OPENSSL_STRING_remove(w->filenames, name); + *hash = fah->hash->data; + *hashlen = fah->hash->length; + } else { + *hash = NULL; + *hashlen = 0; + } + + return 1; +} + +/** + * Create a new walk context stack. + */ +static STACK_OF(walk_ctx_t) *walk_ctx_stack_new(void) +{ + return sk_walk_ctx_t_new_null(); +} + +/** + * Push a walk context onto a walk context stack, return the new context. + */ +static walk_ctx_t *walk_ctx_stack_push(STACK_OF(walk_ctx_t) *wsk, + X509 *x, + const certinfo_t *certinfo) +{ + walk_ctx_t *w; + + if (x == NULL || + (certinfo == NULL) != (sk_walk_ctx_t_num(wsk) == 0) || + (w = malloc(sizeof(*w))) == NULL) + return NULL; + + memset(w, 0, sizeof(*w)); + w->cert = x; + if (certinfo != NULL) + w->certinfo = *certinfo; + else + memset(&w->certinfo, 0, sizeof(w->certinfo)); + + if (!sk_walk_ctx_t_push(wsk, w)) { + free(w); + return NULL; + } + + walk_ctx_attach(w); + return w; +} + +/** + * Pop and discard a walk context from a walk context stack. + */ +static void walk_ctx_stack_pop(STACK_OF(walk_ctx_t) *wsk) +{ + walk_ctx_detach(sk_walk_ctx_t_pop(wsk)); +} + +/** + * Clone a stack of walk contexts. + */ +static STACK_OF(walk_ctx_t) *walk_ctx_stack_clone(STACK_OF(walk_ctx_t) *old_wsk) +{ + STACK_OF(walk_ctx_t) *new_wsk; + int i; + if (old_wsk == NULL || (new_wsk = sk_walk_ctx_t_dup(old_wsk)) == NULL) + return NULL; + for (i = 0; i < sk_walk_ctx_t_num(new_wsk); i++) + walk_ctx_attach(sk_walk_ctx_t_value(new_wsk, i)); + return new_wsk; +} + +/** + * Extract certificate stack from walk context stack. Returns a newly + * created STACK_OF(X509) pointing to the existing cert objects. + * + * NB: This is a shallow copy, so use sk_X509_free() to free it, not + * sk_X509_pop_free(). + */ +static STACK_OF(X509) *walk_ctx_stack_certs(const rcynic_ctx_t *rc, + STACK_OF(walk_ctx_t) *wsk) +{ + STACK_OF(X509) *xsk = sk_X509_new_null(); + walk_ctx_t *w; + int i; + + assert(rc); + + for (i = 0; i < sk_walk_ctx_t_num(wsk); i++) + if ((w = sk_walk_ctx_t_value(wsk, i)) == NULL || + (w->cert != NULL && !sk_X509_push(xsk, w->cert))) + goto fail; + + return xsk; + + fail: + logmsg(rc, log_sys_err, "Couldn't clone walk_ctx_stack, memory exhausted?"); + sk_X509_free(xsk); + return NULL; +} + +/** + * Free a walk context stack, decrementing reference counts of each + * frame on it. + */ +static void walk_ctx_stack_free(STACK_OF(walk_ctx_t) *wsk) +{ + sk_walk_ctx_t_pop_free(wsk, walk_ctx_detach); +} + + + +static int rsync_count_running(const rcynic_ctx_t *); + +/** + * Add a task to the task queue. + */ +static int task_add(const rcynic_ctx_t *rc, + void (*handler)(rcynic_ctx_t *, void *), + void *cookie) +{ + task_t *t = malloc(sizeof(*t)); + + assert(rc && rc->task_queue && handler); + + assert(rsync_count_running(rc) <= rc->max_parallel_fetches); + + if (!t) + return 0; + + t->handler = handler; + t->cookie = cookie; + + if (sk_task_t_push(rc->task_queue, t)) + return 1; + + free(t); + return 0; +} + +/** + * Run tasks until queue is empty. + */ +static void task_run_q(rcynic_ctx_t *rc) +{ + task_t *t; + assert(rc && rc->task_queue); + while ((t = sk_task_t_shift(rc->task_queue)) != NULL) { + t->handler(rc, t->cookie); + free(t); + } +} + + + +/** + * Check cache of whether we've already fetched a particular URI. + */ +static rsync_history_t *rsync_history_uri(const rcynic_ctx_t *rc, + const uri_t *uri) +{ + rsync_history_t h; + char *s; + int i; + + assert(rc && uri && rc->rsync_history); + + if (!is_rsync(uri->s)) + return NULL; + + h.uri = *uri; + + while ((s = strrchr(h.uri.s, '/')) != NULL && s[1] == '\0') + *s = '\0'; + + while ((i = sk_rsync_history_t_find(rc->rsync_history, &h)) < 0) { + if ((s = strrchr(h.uri.s, '/')) == NULL || + (s - h.uri.s) < SIZEOF_RSYNC) + return NULL; + *s = '\0'; + } + + return sk_rsync_history_t_value(rc->rsync_history, i); +} + +/** + * Record that we've already attempted to synchronize a particular + * rsync URI. + */ +static void rsync_history_add(const rcynic_ctx_t *rc, + const rsync_ctx_t *ctx, + const rsync_status_t status) +{ + int final_slash = 0; + rsync_history_t *h; + uri_t uri; + size_t n; + char *s; + + assert(rc && ctx && rc->rsync_history && is_rsync(ctx->uri.s)); + + uri = ctx->uri; + + while ((s = strrchr(uri.s, '/')) != NULL && s[1] == '\0') { + final_slash = 1; + *s = '\0'; + } + + if (status != rsync_status_done) { + + n = SIZEOF_RSYNC + strcspn(uri.s + SIZEOF_RSYNC, "/"); + assert(n < sizeof(uri.s)); + uri.s[n] = '\0'; + final_slash = 1; + + if ((h = rsync_history_uri(rc, &uri)) != NULL) { + assert(h->status != rsync_status_done); + return; + } + } + + if ((h = rsync_history_t_new()) != NULL) { + h->uri = uri; + h->status = status; + h->started = ctx->started; + h->finished = time(0); + h->final_slash = final_slash; + } + + if (h == NULL || !sk_rsync_history_t_push(rc->rsync_history, h)) { + rsync_history_t_free(h); + logmsg(rc, log_sys_err, + "Couldn't add %s to rsync_history, blundering onwards", uri.s); + } +} + + + +/** + * Return count of how many rsync contexts are in running. + */ +static int rsync_count_running(const rcynic_ctx_t *rc) +{ + const rsync_ctx_t *ctx; + int i, n = 0; + + assert(rc && rc->rsync_queue); + + for (i = 0; (ctx = sk_rsync_ctx_t_value(rc->rsync_queue, i)) != NULL; ++i) { + switch (ctx->state) { + case rsync_state_running: + case rsync_state_closed: + case rsync_state_terminating: + n++; + default: + continue; + } + } + + return n; +} + +/** + * Test whether an rsync context conflicts with anything that's + * currently runable. + */ +static int rsync_conflicts(const rcynic_ctx_t *rc, + const rsync_ctx_t *ctx) +{ + const rsync_ctx_t *c; + int i; + + assert(rc && ctx && rc->rsync_queue); + + for (i = 0; (c = sk_rsync_ctx_t_value(rc->rsync_queue, i)) != NULL; ++i) + if (c != ctx && + (c->state == rsync_state_initial || + c->state == rsync_state_running) && + conflicting_uris(&c->uri, &ctx->uri)) + return 1; + + return 0; +} + +/** + * Test whether a rsync context is runable at this time. + */ +static int rsync_runable(const rcynic_ctx_t *rc, + const rsync_ctx_t *ctx) +{ + assert(rc && ctx); + + switch (ctx->state) { + + case rsync_state_initial: + case rsync_state_running: + return 1; + + case rsync_state_retry_wait: + return ctx->deadline <= time(0); + + case rsync_state_closed: + case rsync_state_terminating: + return 0; + + case rsync_state_conflict_wait: + return !rsync_conflicts(rc, ctx); + + default: + break; + } + + return 0; +} + +/** + * Return count of runable rsync contexts. + */ +static int rsync_count_runable(const rcynic_ctx_t *rc) +{ + const rsync_ctx_t *ctx; + int i, n = 0; + + assert(rc && rc->rsync_queue); + + for (i = 0; (ctx = sk_rsync_ctx_t_value(rc->rsync_queue, i)) != NULL; ++i) + if (rsync_runable(rc, ctx)) + n++; + + return n; +} + +/** + * Call rsync context handler, if one is set. + */ +static void rsync_call_handler(rcynic_ctx_t *rc, + rsync_ctx_t *ctx, + const rsync_status_t status) +{ + if (!ctx) + return; + + switch (status) { + + case rsync_status_pending: + case rsync_status_done: + break; + + case rsync_status_failed: + log_validation_status(rc, &ctx->uri, rsync_transfer_failed, object_generation_null); + break; + + case rsync_status_timed_out: + log_validation_status(rc, &ctx->uri, rsync_transfer_timed_out, object_generation_null); + break; + + case rsync_status_skipped: + log_validation_status(rc, &ctx->uri, rsync_transfer_skipped, object_generation_null); + break; + } + + if (ctx->handler) + ctx->handler(rc, ctx, status, &ctx->uri, ctx->cookie); +} + +/** + * Run an rsync process. + */ +static void rsync_run(rcynic_ctx_t *rc, + rsync_ctx_t *ctx) +{ + static const char * const rsync_cmd[] = { + "rsync", "--update", "--times", "--copy-links", "--itemize-changes" + }; + static const char * const rsync_tree_args[] = { + "--recursive", "--delete" + }; + + const char *argv[10]; + path_t path; + int i, argc = 0, flags, pipe_fds[2]; + + pipe_fds[0] = pipe_fds[1] = -1; + + assert(rc && ctx && ctx->pid == 0 && ctx->state != rsync_state_running && rsync_runable(rc, ctx)); + + if (rsync_history_uri(rc, &ctx->uri)) { + logmsg(rc, log_verbose, "Late rsync cache hit for %s", ctx->uri.s); + rsync_call_handler(rc, ctx, rsync_status_done); + (void) sk_rsync_ctx_t_delete_ptr(rc->rsync_queue, ctx); + free(ctx); + return; + } + + assert(rsync_count_running(rc) < rc->max_parallel_fetches); + + logmsg(rc, log_telemetry, "Fetching %s", ctx->uri.s); + + memset(argv, 0, sizeof(argv)); + + for (i = 0; i < sizeof(rsync_cmd)/sizeof(*rsync_cmd); i++) { + assert(argc < sizeof(argv)/sizeof(*argv)); + argv[argc++] = rsync_cmd[i]; + } + if (endswith(ctx->uri.s, "/")) { + for (i = 0; i < sizeof(rsync_tree_args)/sizeof(*rsync_tree_args); i++) { + assert(argc < sizeof(argv)/sizeof(*argv)); + argv[argc++] = rsync_tree_args[i]; + } + } + + if (rc->rsync_program) + argv[0] = rc->rsync_program; + + if (!uri_to_filename(rc, &ctx->uri, &path, &rc->unauthenticated)) { + logmsg(rc, log_data_err, "Couldn't extract filename from URI: %s", ctx->uri.s); + goto lose; + } + + assert(argc < sizeof(argv)/sizeof(*argv)); + argv[argc++] = ctx->uri.s; + + assert(argc < sizeof(argv)/sizeof(*argv)); + argv[argc++] = path.s; + + if (!mkdir_maybe(rc, &path)) { + logmsg(rc, log_sys_err, "Couldn't make target directory: %s", path.s); + goto lose; + } + + for (i = 0; i < argc; i++) + logmsg(rc, log_debug, "rsync argv[%d]: %s", i, argv[i]); + + if (pipe(pipe_fds) < 0) { + logmsg(rc, log_sys_err, "pipe() failed: %s", strerror(errno)); + goto lose; + } + + switch ((ctx->pid = vfork())) { + + case -1: + logmsg(rc, log_sys_err, "vfork() failed: %s", strerror(errno)); + goto lose; + + case 0: + /* + * Child + */ +#define whine(msg) ((void) write(2, msg, sizeof(msg) - 1)) + if (close(pipe_fds[0]) < 0) + whine("close(pipe_fds[0]) failed\n"); + else if (dup2(pipe_fds[1], 1) < 0) + whine("dup2(pipe_fds[1], 1) failed\n"); + else if (dup2(pipe_fds[1], 2) < 0) + whine("dup2(pipe_fds[1], 2) failed\n"); + else if (close(pipe_fds[1]) < 0) + whine("close(pipe_fds[1]) failed\n"); + else if (execvp(argv[0], (char * const *) argv) < 0) + whine("execvp(argv[0], (char * const *) argv) failed\n"); + whine("last system error: "); + write(2, strerror(errno), strlen(strerror(errno))); + whine("\n"); + _exit(1); +#undef whine + + default: + /* + * Parent + */ + ctx->fd = pipe_fds[0]; + if ((flags = fcntl(ctx->fd, F_GETFL, 0)) == -1 || + fcntl(ctx->fd, F_SETFL, flags | O_NONBLOCK) == -1) { + logmsg(rc, log_sys_err, "fcntl(ctx->fd, F_[GS]ETFL, O_NONBLOCK) failed: %s", + strerror(errno)); + goto lose; + } + (void) close(pipe_fds[1]); + ctx->state = rsync_state_running; + ctx->problem = rsync_problem_none; + if (!ctx->started) + ctx->started = time(0); + if (rc->rsync_timeout) + ctx->deadline = time(0) + rc->rsync_timeout; + logmsg(rc, log_verbose, "Subprocess %u started, queued %d, runable %d, running %d, max %d, URI %s", + (unsigned) ctx->pid, sk_rsync_ctx_t_num(rc->rsync_queue), rsync_count_runable(rc), rsync_count_running(rc), rc->max_parallel_fetches, ctx->uri.s); + rsync_call_handler(rc, ctx, rsync_status_pending); + return; + + } + + lose: + if (pipe_fds[0] != -1) + (void) close(pipe_fds[0]); + if (pipe_fds[1] != -1) + (void) close(pipe_fds[1]); + if (rc->rsync_queue && ctx) + (void) sk_rsync_ctx_t_delete_ptr(rc->rsync_queue, ctx); + rsync_call_handler(rc, ctx, rsync_status_failed); + if (ctx->pid > 0) { + (void) kill(ctx->pid, SIGKILL); + ctx->pid = 0; + } +} + +/** + * Process one line of rsync's output. This is a separate function + * primarily to centralize scraping for magic error strings. + */ +static void do_one_rsync_log_line(const rcynic_ctx_t *rc, + rsync_ctx_t *ctx) +{ + unsigned u; + char *s; + + /* + * Send line to our log unless it's empty. + */ + if (ctx->buffer[strspn(ctx->buffer, " \t\n\r")] != '\0') + logmsg(rc, log_telemetry, "rsync[%u]: %s", ctx->pid, ctx->buffer); + + /* + * Check for magic error strings + */ + if ((s = strstr(ctx->buffer, "@ERROR: max connections")) != NULL) { + ctx->problem = rsync_problem_refused; + if (sscanf(s, "@ERROR: max connections (%u) reached -- try again later", &u) == 1) + logmsg(rc, log_verbose, "Subprocess %u reported limit of %u for %s", ctx->pid, u, ctx->uri.s); + } +} + +/** + * Construct select() arguments. + */ +static int rsync_construct_select(const rcynic_ctx_t *rc, + const time_t now, + fd_set *rfds, + struct timeval *tv) +{ + rsync_ctx_t *ctx; + time_t when = 0; + int i, n = 0; + + assert(rc && rc->rsync_queue && rfds && tv && rc->max_select_time >= 0); + + FD_ZERO(rfds); + + for (i = 0; (ctx = sk_rsync_ctx_t_value(rc->rsync_queue, i)) != NULL; ++i) { + +#if 0 + logmsg(rc, log_debug, "+++ ctx[%d] pid %d fd %d state %s started %lu deadline %lu", + i, ctx->pid, ctx->fd, rsync_state_label[ctx->state], + (unsigned long) ctx->started, (unsigned long) ctx->deadline); +#endif + + switch (ctx->state) { + + case rsync_state_running: + assert(ctx->fd >= 0); + FD_SET(ctx->fd, rfds); + if (ctx->fd > n) + n = ctx->fd; + if (!rc->rsync_timeout) + continue; + /* Fall through */ + + case rsync_state_retry_wait: + if (when == 0 || ctx->deadline < when) + when = ctx->deadline; + /* Fall through */ + + default: + continue; + } + } + + if (!when) + tv->tv_sec = rc->max_select_time; + else if (when < now) + tv->tv_sec = 0; + else if (when < now + rc->max_select_time) + tv->tv_sec = when - now; + else + tv->tv_sec = rc->max_select_time; + tv->tv_usec = 0; + return n; +} + +/** + * Convert rsync_status_t to mib_counter_t. + * + * Maybe some day this will go away and we won't be carrying + * essentially the same information in two different databases, but + * for now I'll settle for cleaning up the duplicate code logic. + */ +static mib_counter_t rsync_status_to_mib_counter(rsync_status_t status) +{ + switch (status) { + case rsync_status_done: return rsync_transfer_succeeded; + case rsync_status_timed_out: return rsync_transfer_timed_out; + case rsync_status_failed: return rsync_transfer_failed; + case rsync_status_skipped: return rsync_transfer_skipped; + default: + /* + * Keep GCC from whining about untested cases. + */ + assert(status == rsync_status_done || + status == rsync_status_timed_out || + status == rsync_status_failed || + status == rsync_status_skipped); + return rsync_transfer_failed; + } +} + +/** + * Manager for queue of rsync tasks in progress. + * + * General plan here is to process one completed child, or output + * accumulated from children, or block if there is absolutely nothing + * to do, on the theory that caller had nothing to do either or would + * not have called us. Once we've done something allegedly useful, we + * return, because this is not the event loop; if and when the event + * loop has nothing more important to do, we'll be called again. + * + * So this is the only place where the program blocks waiting for + * children, but we only do it when we know there's nothing else + * useful that we could be doing while we wait. + */ +static void rsync_mgr(rcynic_ctx_t *rc) +{ + rsync_status_t rsync_status; + int i, n, pid_status = -1; + rsync_ctx_t *ctx = NULL; + time_t now = time(0); + struct timeval tv; + fd_set rfds; + pid_t pid; + char *s; + + assert(rc && rc->rsync_queue); + + /* + * Check for exited subprocesses. + */ + + while ((pid = waitpid(-1, &pid_status, WNOHANG)) > 0) { + + /* + * Child exited, handle it. + */ + + logmsg(rc, log_verbose, "Subprocess %u exited with status %d", + (unsigned) pid, WEXITSTATUS(pid_status)); + + for (i = 0; (ctx = sk_rsync_ctx_t_value(rc->rsync_queue, i)) != NULL; ++i) + if (ctx->pid == pid) + break; + if (ctx == NULL) { + assert(i == sk_rsync_ctx_t_num(rc->rsync_queue)); + logmsg(rc, log_sys_err, "Couldn't find rsync context for pid %d", pid); + continue; + } + + close(ctx->fd); + ctx->fd = -1; + + if (ctx->buflen > 0) { + assert(ctx->buflen < sizeof(ctx->buffer)); + ctx->buffer[ctx->buflen] = '\0'; + do_one_rsync_log_line(rc, ctx); + ctx->buflen = 0; + } + + switch (WEXITSTATUS(pid_status)) { + + case 0: + rsync_status = rsync_status_done; + break; + + case 5: /* "Error starting client-server protocol" */ + /* + * Handle remote rsyncd refusing to talk to us because we've + * exceeded its connection limit. Back off for a short + * interval, then retry. + */ + if (ctx->problem == rsync_problem_refused && ctx->tries < rc->max_retries) { + unsigned char r; + if (!RAND_bytes(&r, sizeof(r))) + r = 60; + ctx->deadline = time(0) + rc->retry_wait_min + r; + ctx->state = rsync_state_retry_wait; + ctx->problem = rsync_problem_none; + ctx->pid = 0; + ctx->tries++; + logmsg(rc, log_telemetry, "Scheduling retry for %s", ctx->uri.s); + continue; + } + goto failure; + + case 23: /* "Partial transfer due to error" */ + /* + * This appears to be a catch-all for "something bad happened + * trying to do what you asked me to do". In the cases I've + * seen to date, this is things like "the directory you + * requested isn't there" or "NFS exploded when I tried to touch + * the directory". These aren't network layer failures, so we + * (probably) shouldn't give up on the repository host. + */ + rsync_status = rsync_status_done; + log_validation_status(rc, &ctx->uri, rsync_partial_transfer, object_generation_null); + break; + + default: + failure: + rsync_status = rsync_status_failed; + logmsg(rc, log_data_err, "rsync %u exited with status %d fetching %s", + (unsigned) pid, WEXITSTATUS(pid_status), ctx->uri.s); + break; + } + + if (rc->rsync_timeout && now >= ctx->deadline) + rsync_status = rsync_status_timed_out; + log_validation_status(rc, &ctx->uri, + rsync_status_to_mib_counter(rsync_status), + object_generation_null); + rsync_history_add(rc, ctx, rsync_status); + rsync_call_handler(rc, ctx, rsync_status); + (void) sk_rsync_ctx_t_delete_ptr(rc->rsync_queue, ctx); + free(ctx); + ctx = NULL; + } + + if (pid == -1 && errno != EINTR && errno != ECHILD) + logmsg(rc, log_sys_err, "waitpid() returned error: %s", strerror(errno)); + + assert(rsync_count_running(rc) <= rc->max_parallel_fetches); + + /* + * Look for rsync contexts that have become runable. Odd loop + * structure is because rsync_run() might decide to remove the + * specified rsync task from the queue instead of running it. + */ + for (i = 0; (ctx = sk_rsync_ctx_t_value(rc->rsync_queue, i)) != NULL; i++) { + n = sk_rsync_ctx_t_num(rc->rsync_queue); + if (ctx->state != rsync_state_running && + rsync_runable(rc, ctx) && + rsync_count_running(rc) < rc->max_parallel_fetches) + rsync_run(rc, ctx); + if (n > sk_rsync_ctx_t_num(rc->rsync_queue)) + i--; + } + + assert(rsync_count_running(rc) <= rc->max_parallel_fetches); + + /* + * Check for log text from subprocesses. + */ + + n = rsync_construct_select(rc, now, &rfds, &tv); + + if (n > 0 && tv.tv_sec) + logmsg(rc, log_verbose, "Waiting up to %u seconds for rsync, queued %d, runable %d, running %d, max %d", + (unsigned) tv.tv_sec, sk_rsync_ctx_t_num(rc->rsync_queue), rsync_count_runable(rc), + rsync_count_running(rc), rc->max_parallel_fetches); + + if (n > 0) { +#if 0 + logmsg(rc, log_debug, "++ select(%d, %u)", n, tv.tv_sec); +#endif + n = select(n + 1, &rfds, NULL, NULL, &tv); + } + + if (n > 0) { + + for (i = 0; (ctx = sk_rsync_ctx_t_value(rc->rsync_queue, i)) != NULL; ++i) { + if (ctx->fd <= 0 || !FD_ISSET(ctx->fd, &rfds)) + continue; + + assert(ctx->buflen < sizeof(ctx->buffer) - 1); + + while ((n = read(ctx->fd, ctx->buffer + ctx->buflen, sizeof(ctx->buffer) - 1 - ctx->buflen)) > 0) { + ctx->buflen += n; + assert(ctx->buflen < sizeof(ctx->buffer)); + ctx->buffer[ctx->buflen] = '\0'; + + while ((s = strchr(ctx->buffer, '\n')) != NULL) { + *s++ = '\0'; + do_one_rsync_log_line(rc, ctx); + assert(s > ctx->buffer && s < ctx->buffer + sizeof(ctx->buffer)); + ctx->buflen -= s - ctx->buffer; + assert(ctx->buflen < sizeof(ctx->buffer)); + if (ctx->buflen > 0) + memmove(ctx->buffer, s, ctx->buflen); + ctx->buffer[ctx->buflen] = '\0'; + } + + if (ctx->buflen == sizeof(ctx->buffer) - 1) { + ctx->buffer[sizeof(ctx->buffer) - 1] = '\0'; + do_one_rsync_log_line(rc, ctx); + ctx->buflen = 0; + } + } + + if (n == 0) { + (void) close(ctx->fd); + ctx->fd = -1; + ctx->state = rsync_state_closed; + } + } + } + + assert(rsync_count_running(rc) <= rc->max_parallel_fetches); + + /* + * Deal with children that have been running too long. + */ + if (rc->rsync_timeout) { + for (i = 0; (ctx = sk_rsync_ctx_t_value(rc->rsync_queue, i)) != NULL; ++i) { + int sig; + if (ctx->pid <= 0 || now < ctx->deadline) + continue; + sig = ctx->tries++ < KILL_MAX ? SIGTERM : SIGKILL; + if (ctx->state != rsync_state_terminating) { + ctx->problem = rsync_problem_timed_out; + ctx->state = rsync_state_terminating; + ctx->tries = 0; + logmsg(rc, log_telemetry, "Subprocess %u is taking too long fetching %s, whacking it", (unsigned) ctx->pid, ctx->uri.s); + rsync_history_add(rc, ctx, rsync_status_timed_out); + } else if (sig == SIGTERM) { + logmsg(rc, log_verbose, "Whacking subprocess %u again", (unsigned) ctx->pid); + } else { + logmsg(rc, log_verbose, "Whacking subprocess %u with big hammer", (unsigned) ctx->pid); + } + (void) kill(ctx->pid, sig); + ctx->deadline = now + 1; + } + } +} + +/** + * Set up rsync context and attempt to start it. + */ +static void rsync_init(rcynic_ctx_t *rc, + const uri_t *uri, + void *cookie, + void (*handler)(rcynic_ctx_t *, const rsync_ctx_t *, const rsync_status_t, const uri_t *, void *)) +{ + rsync_ctx_t *ctx = NULL; + + assert(rc && uri && strlen(uri->s) > SIZEOF_RSYNC); + + if (!rc->run_rsync) { + logmsg(rc, log_verbose, "rsync disabled, skipping %s", uri->s); + if (handler) + handler(rc, NULL, rsync_status_skipped, uri, cookie); + return; + } + + if (rsync_history_uri(rc, uri)) { + logmsg(rc, log_verbose, "rsync cache hit for %s", uri->s); + if (handler) + handler(rc, NULL, rsync_status_done, uri, cookie); + return; + } + + if ((ctx = malloc(sizeof(*ctx))) == NULL) { + logmsg(rc, log_sys_err, "malloc(rsync_ctxt_t) failed"); + if (handler) + handler(rc, NULL, rsync_status_failed, uri, cookie); + return; + } + + memset(ctx, 0, sizeof(*ctx)); + ctx->uri = *uri; + ctx->handler = handler; + ctx->cookie = cookie; + ctx->fd = -1; + + if (!sk_rsync_ctx_t_push(rc->rsync_queue, ctx)) { + logmsg(rc, log_sys_err, "Couldn't push rsync state object onto queue, punting %s", ctx->uri.s); + rsync_call_handler(rc, ctx, rsync_status_failed); + free(ctx); + return; + } + + if (rsync_conflicts(rc, ctx)) { + logmsg(rc, log_debug, "New rsync context %s is feeling conflicted", ctx->uri.s); + ctx->state = rsync_state_conflict_wait; + } +} + +/** + * rsync a trust anchor. + */ +static void rsync_ta(rcynic_ctx_t *rc, + const uri_t *uri, + tal_ctx_t *tctx, + void (*handler)(rcynic_ctx_t *, const rsync_ctx_t *, + const rsync_status_t, const uri_t *, void *)) +{ + assert(endswith(uri->s, ".cer")); + rsync_init(rc, uri, tctx, handler); +} + +/** + * rsync an entire subtree, generally rooted at a SIA collection. + */ +static void rsync_tree(rcynic_ctx_t *rc, + const uri_t *uri, + STACK_OF(walk_ctx_t) *wsk, + void (*handler)(rcynic_ctx_t *, const rsync_ctx_t *, + const rsync_status_t, const uri_t *, void *)) +{ + assert(endswith(uri->s, "/")); + rsync_init(rc, uri, wsk, handler); +} + + + +/** + * Clean up old stuff from previous rsync runs. --delete doesn't help + * if the URI changes and we never visit the old URI again. + */ +static int prune_unauthenticated(const rcynic_ctx_t *rc, + const path_t *name, + const size_t baselen) +{ + path_t path; + struct dirent *d; + DIR *dir; + const char *slash; + + assert(rc && name && baselen > 0 && strlen(name->s) >= baselen); + + if (!is_directory(name)) { + logmsg(rc, log_usage_err, "prune: %s is not a directory", name->s); + return 0; + } + + if ((dir = opendir(name->s)) == NULL) { + logmsg(rc, log_sys_err, "prune: opendir() failed on %s: %s", name->s, strerror(errno)); + return 0; + } + + slash = endswith(name->s, "/") ? "" : "/"; + + while ((d = readdir(dir)) != NULL) { + if (!strcmp(d->d_name, ".") || !strcmp(d->d_name, "..")) + continue; + + if (snprintf(path.s, sizeof(path.s), "%s%s%s", name->s, slash, d->d_name) >= sizeof(path.s)) { + logmsg(rc, log_debug, "prune: %s%s%s too long", name->s, slash, d->d_name); + goto done; + } + + if (validation_status_find_filename(rc, path.s + baselen)) { + logmsg(rc, log_debug, "prune: cache hit %s", path.s); + continue; + } + + if (unlink(path.s) == 0) { + logmsg(rc, log_debug, "prune: removed %s", path.s); + continue; + } + + if (prune_unauthenticated(rc, &path, baselen)) + continue; + + logmsg(rc, log_sys_err, "prune: removing %s failed: %s", path.s, strerror(errno)); + goto done; + } + + if (rmdir(name->s) == 0) + logmsg(rc, log_debug, "prune: removed %s", name->s); + else if (errno != ENOTEMPTY) + logmsg(rc, log_sys_err, "prune: couldn't remove %s: %s", name->s, strerror(errno)); + + done: + closedir(dir); + return !d; +} + + + +/** + * Read a DER object using a BIO pipeline that hashes the file content + * as we read it. Returns the internal form of the parsed DER object, + * sets the hash buffer (if specified) as a side effect. The default + * hash algorithm is SHA-256. + */ +static void *read_file_with_hash(const path_t *filename, + const ASN1_ITEM *it, + const EVP_MD *md, + hashbuf_t *hash) +{ + void *result = NULL; + BIO *b; + + if ((b = BIO_new_file(filename->s, "rb")) == NULL) + goto error; + + if (hash != NULL) { + BIO *b2 = BIO_new(BIO_f_md()); + if (b2 == NULL) + goto error; + if (md == NULL) + md = EVP_sha256(); + if (!BIO_set_md(b2, md)) { + BIO_free(b2); + goto error; + } + BIO_push(b2, b); + b = b2; + } + + if ((result = ASN1_item_d2i_bio(it, b, NULL)) == NULL) + goto error; + + if (hash != NULL) { + memset(hash, 0, sizeof(*hash)); + BIO_gets(b, (char *) hash, sizeof(hash->h)); + } + + error: + BIO_free_all(b); + return result; +} + +/** + * Read and hash a certificate. + */ +static X509 *read_cert(const path_t *filename, hashbuf_t *hash) +{ + return read_file_with_hash(filename, ASN1_ITEM_rptr(X509), NULL, hash); +} + +/** + * Read and hash a CRL. + */ +static X509_CRL *read_crl(const path_t *filename, hashbuf_t *hash) +{ + return read_file_with_hash(filename, ASN1_ITEM_rptr(X509_CRL), NULL, hash); +} + +/** + * Read and hash a CMS message. + */ +static CMS_ContentInfo *read_cms(const path_t *filename, hashbuf_t *hash) +{ + return read_file_with_hash(filename, ASN1_ITEM_rptr(CMS_ContentInfo), NULL, hash); +} + + + +/** + * Extract CRLDP data from a certificate. Stops looking after finding + * the first rsync URI. + */ +static int extract_crldp_uri(rcynic_ctx_t *rc, + const uri_t *uri, + const object_generation_t generation, + const STACK_OF(DIST_POINT) *crldp, + uri_t *result) +{ + DIST_POINT *d; + int i; + + assert(rc && uri && crldp && result); + + if (sk_DIST_POINT_num(crldp) != 1) + goto bad; + + d = sk_DIST_POINT_value(crldp, 0); + + if (d->reasons || d->CRLissuer || !d->distpoint || d->distpoint->type != 0) + goto bad; + + for (i = 0; i < sk_GENERAL_NAME_num(d->distpoint->name.fullname); i++) { + GENERAL_NAME *n = sk_GENERAL_NAME_value(d->distpoint->name.fullname, i); + if (n == NULL || n->type != GEN_URI) + goto bad; + if (!is_rsync((char *) n->d.uniformResourceIdentifier->data)) + log_validation_status(rc, uri, non_rsync_uri_in_extension, generation); + else if (sizeof(result->s) <= n->d.uniformResourceIdentifier->length) + log_validation_status(rc, uri, uri_too_long, generation); + else if (result->s[0]) + log_validation_status(rc, uri, multiple_rsync_uris_in_extension, generation); + else + strcpy(result->s, (char *) n->d.uniformResourceIdentifier->data); + } + + return result->s[0]; + + bad: + log_validation_status(rc, uri, malformed_crldp_extension, generation); + return 0; +} + +/** + * Extract SIA or AIA data from a certificate. + */ +static int extract_access_uri(rcynic_ctx_t *rc, + const uri_t *uri, + const object_generation_t generation, + const AUTHORITY_INFO_ACCESS *xia, + const int nid, + uri_t *result, + int *count) +{ + int i; + + assert(rc && uri && xia && result && count); + + for (i = 0; i < sk_ACCESS_DESCRIPTION_num(xia); i++) { + ACCESS_DESCRIPTION *a = sk_ACCESS_DESCRIPTION_value(xia, i); + if (a == NULL || a->location->type != GEN_URI) + return 0; + if (OBJ_obj2nid(a->method) != nid) + continue; + ++*count; + if (!is_rsync((char *) a->location->d.uniformResourceIdentifier->data)) + log_validation_status(rc, uri, non_rsync_uri_in_extension, generation); + else if (sizeof(result->s) <= a->location->d.uniformResourceIdentifier->length) + log_validation_status(rc, uri, uri_too_long, generation); + else if (result->s[0]) + log_validation_status(rc, uri, multiple_rsync_uris_in_extension, generation); + else + strcpy(result->s, (char *) a->location->d.uniformResourceIdentifier->data); + } + return 1; +} + + + +/** + * Check to see whether an AKI extension is present, is of the right + * form, and matches the issuer. + */ +static int check_aki(rcynic_ctx_t *rc, + const uri_t *uri, + const X509 *issuer, + const AUTHORITY_KEYID *aki, + const object_generation_t generation) +{ + assert(rc && uri && issuer && issuer->skid); + + if (aki == NULL) { + log_validation_status(rc, uri, aki_extension_missing, generation); + return 0; + } + + if (!aki->keyid || aki->serial || aki->issuer) { + log_validation_status(rc, uri, aki_extension_wrong_format, generation); + return 0; + } + + if (ASN1_OCTET_STRING_cmp(aki->keyid, issuer->skid)) { + log_validation_status(rc, uri, aki_extension_issuer_mismatch, generation); + return 0; + } + + return 1; +} + + + +/** + * Check whether a Distinguished Name conforms to the rescert profile. + * The profile is very restrictive: it only allows one mandatory + * CommonName field and one optional SerialNumber field, both of which + * must be of type PrintableString. + */ +static int check_allowed_dn(X509_NAME *dn) +{ + X509_NAME_ENTRY *ne; + ASN1_STRING *s; + int loc; + + if (dn == NULL) + return 0; + + switch (X509_NAME_entry_count(dn)) { + + case 2: + if ((loc = X509_NAME_get_index_by_NID(dn, NID_serialNumber, -1)) < 0 || + (ne = X509_NAME_get_entry(dn, loc)) == NULL || + (s = X509_NAME_ENTRY_get_data(ne)) == NULL || + ASN1_STRING_type(s) != V_ASN1_PRINTABLESTRING) + return 0; + + /* Fall through */ + + case 1: + if ((loc = X509_NAME_get_index_by_NID(dn, NID_commonName, -1)) < 0 || + (ne = X509_NAME_get_entry(dn, loc)) == NULL || + (s = X509_NAME_ENTRY_get_data(ne)) == NULL || + ASN1_STRING_type(s) != V_ASN1_PRINTABLESTRING) + return 0; + + return 1; + + default: + return 0; + } +} + +/** + * Check whether an ASN.1 TIME value conforms to RFC 5280 4.1.2.5. + */ +static int check_allowed_time_encoding(ASN1_TIME *t) +{ + switch (t->type) { + + case V_ASN1_UTCTIME: + return t->length == sizeof("yymmddHHMMSSZ") - 1; + + case V_ASN1_GENERALIZEDTIME: + return (t->length == sizeof("yyyymmddHHMMSSZ") - 1 && + strcmp("205", (char *) t->data) <= 0); + + } + return 0; +} + +/** + * Compare ASN1_TIME values. + */ +static int asn1_time_cmp(ASN1_TIME *t1, ASN1_TIME *t2) +{ + ASN1_GENERALIZEDTIME *g1 = ASN1_TIME_to_generalizedtime(t1, NULL); + ASN1_GENERALIZEDTIME *g2 = ASN1_TIME_to_generalizedtime(t2, NULL); + + int cmp = ASN1_STRING_cmp(g1, g2); + + ASN1_GENERALIZEDTIME_free(g1); + ASN1_GENERALIZEDTIME_free(g2); + + return cmp; +} + + + +/** + * Attempt to read and check one CRL from disk. + */ + +static X509_CRL *check_crl_1(rcynic_ctx_t *rc, + const uri_t *uri, + path_t *path, + const path_t *prefix, + X509 *issuer, + const object_generation_t generation) +{ + STACK_OF(X509_REVOKED) *revoked; + X509_CRL *crl = NULL; + EVP_PKEY *pkey; + int i, ret; + + assert(uri && path && issuer); + + if (!uri_to_filename(rc, uri, path, prefix) || + (crl = read_crl(path, NULL)) == NULL) + goto punt; + + if (X509_CRL_get_version(crl) != 1) { + log_validation_status(rc, uri, wrong_object_version, generation); + goto punt; + } + + if (!crl->crl || !crl->crl->sig_alg || !crl->crl->sig_alg->algorithm || + OBJ_obj2nid(crl->crl->sig_alg->algorithm) != NID_sha256WithRSAEncryption) { + log_validation_status(rc, uri, nonconformant_signature_algorithm, generation); + goto punt; + } + + if (!check_allowed_time_encoding(X509_CRL_get_lastUpdate(crl)) || + !check_allowed_time_encoding(X509_CRL_get_nextUpdate(crl))) { + log_validation_status(rc, uri, nonconformant_asn1_time_value, generation); + goto punt; + } + + if (X509_cmp_current_time(X509_CRL_get_lastUpdate(crl)) > 0) { + log_validation_status(rc, uri, crl_not_yet_valid, generation); + goto punt; + } + + if (X509_cmp_current_time(X509_CRL_get_nextUpdate(crl)) < 0) { + log_validation_status(rc, uri, stale_crl_or_manifest, generation); + if (!rc->allow_stale_crl) + goto punt; + } + + if (!check_aki(rc, uri, issuer, crl->akid, generation)) + goto punt; + + if (crl->crl_number == NULL) { + log_validation_status(rc, uri, crl_number_extension_missing, generation); + goto punt; + } + + if (ASN1_INTEGER_cmp(crl->crl_number, asn1_zero) < 0) { + log_validation_status(rc, uri, crl_number_is_negative, generation); + goto punt; + } + + if (ASN1_INTEGER_cmp(crl->crl_number, asn1_twenty_octets) > 0) { + log_validation_status(rc, uri, crl_number_out_of_range, generation); + goto punt; + } + + if (X509_CRL_get_ext_count(crl) != 2) { + log_validation_status(rc, uri, disallowed_x509v3_extension, generation); + goto punt; + } + + if (X509_NAME_cmp(X509_CRL_get_issuer(crl), X509_get_subject_name(issuer))) { + log_validation_status(rc, uri, crl_issuer_name_mismatch, generation); + goto punt; + } + + if (!check_allowed_dn(X509_CRL_get_issuer(crl))) { + log_validation_status(rc, uri, nonconformant_issuer_name, generation); + if (!rc->allow_nonconformant_name) + goto punt; + } + + if ((revoked = X509_CRL_get_REVOKED(crl)) != NULL) { + for (i = sk_X509_REVOKED_num(revoked) - 1; i >= 0; --i) { + if (X509_REVOKED_get_ext_count(sk_X509_REVOKED_value(revoked, i)) > 0) { + log_validation_status(rc, uri, disallowed_x509v3_extension, generation); + goto punt; + } + } + } + + if ((pkey = X509_get_pubkey(issuer)) == NULL) + goto punt; + ret = X509_CRL_verify(crl, pkey); + EVP_PKEY_free(pkey); + + if (ret > 0) + return crl; + + punt: + X509_CRL_free(crl); + return NULL; +} + +/** + * Check whether we already have a particular CRL, attempt to fetch it + * and check issuer's signature if we don't. + * + * General plan here is to do basic checks on both current and backup + * generation CRLs, then, if both generations pass all of our other + * tests, pick the generation with the highest CRL number, to protect + * against replay attacks. + */ +static X509_CRL *check_crl(rcynic_ctx_t *rc, + const uri_t *uri, + X509 *issuer) +{ + X509_CRL *old_crl, *new_crl, *result = NULL; + path_t old_path, new_path; + + if (uri_to_filename(rc, uri, &new_path, &rc->new_authenticated) && + (new_crl = read_crl(&new_path, NULL)) != NULL) + return new_crl; + + logmsg(rc, log_telemetry, "Checking CRL %s", uri->s); + + new_crl = check_crl_1(rc, uri, &new_path, &rc->unauthenticated, + issuer, object_generation_current); + + old_crl = check_crl_1(rc, uri, &old_path, &rc->old_authenticated, + issuer, object_generation_backup); + + if (!new_crl) + result = old_crl; + + else if (!old_crl) + result = new_crl; + + else { + ASN1_GENERALIZEDTIME *g_old = ASN1_TIME_to_generalizedtime(X509_CRL_get_lastUpdate(old_crl), NULL); + ASN1_GENERALIZEDTIME *g_new = ASN1_TIME_to_generalizedtime(X509_CRL_get_lastUpdate(new_crl), NULL); + int num_cmp = ASN1_INTEGER_cmp(old_crl->crl_number, new_crl->crl_number); + int date_cmp = (!g_old || !g_new) ? 0 : ASN1_STRING_cmp(g_old, g_new); + + if (!g_old) + log_validation_status(rc, uri, bad_thisupdate, object_generation_backup); + if (!g_new) + log_validation_status(rc, uri, bad_thisupdate, object_generation_current); + if (num_cmp > 0) + log_validation_status(rc, uri, backup_number_higher_than_current, object_generation_current); + if (g_old && g_new && date_cmp > 0) + log_validation_status(rc, uri, backup_thisupdate_newer_than_current, object_generation_current); + + if (num_cmp > 0 && (!g_old || !g_new || date_cmp > 0)) + result = old_crl; + else + result = new_crl; + + ASN1_GENERALIZEDTIME_free(g_old); + ASN1_GENERALIZEDTIME_free(g_new); + } + + if (result && result == new_crl) + install_object(rc, uri, &new_path, object_generation_current); + else if (!access(new_path.s, F_OK)) + log_validation_status(rc, uri, object_rejected, object_generation_current); + + if (result && result == old_crl) + install_object(rc, uri, &old_path, object_generation_backup); + else if (!result && !access(old_path.s, F_OK)) + log_validation_status(rc, uri, object_rejected, object_generation_backup); + + if (result != new_crl) + X509_CRL_free(new_crl); + + if (result != old_crl) + X509_CRL_free(old_crl); + + return result; +} + + +/** + * Check digest of a CRL we've already accepted. + */ +static int check_crl_digest(const rcynic_ctx_t *rc, + const uri_t *uri, + const unsigned char *hash, + const size_t hashlen) +{ + X509_CRL *crl = NULL; + hashbuf_t hashbuf; + path_t path; + int result; + + assert(rc && uri && hash); + + if (!uri_to_filename(rc, uri, &path, &rc->new_authenticated) || + (crl = read_crl(&path, &hashbuf)) == NULL) + return 0; + + result = hashlen <= sizeof(hashbuf.h) && !memcmp(hashbuf.h, hash, hashlen); + + X509_CRL_free(crl); + + return result; +} + + + +/** + * Validation callback function for use with x509_verify_cert(). + */ +static int check_x509_cb(int ok, X509_STORE_CTX *ctx) +{ + rcynic_x509_store_ctx_t *rctx = (rcynic_x509_store_ctx_t *) ctx; + mib_counter_t code; + + assert(rctx != NULL); + + switch (ctx->error) { + case X509_V_OK: + return ok; + + case X509_V_ERR_SUBJECT_ISSUER_MISMATCH: + /* + * Informational events, not really errors. ctx->check_issued() + * is called in many places where failure to find an issuer is not + * a failure for the calling function. Just leave these alone. + */ + return ok; + + case X509_V_ERR_CRL_HAS_EXPIRED: + /* + * This isn't really an error, exactly. CRLs don't really + * "expire". What OpenSSL really means by this error is just + * "it's now later than the issuer said it intended to publish a + * new CRL". Whether we treat this as an error or not is + * configurable, see the allow_stale_crl parameter. + * + * Deciding whether to allow stale CRLs is check_crl_1()'s job, + * not ours. By the time this callback occurs, we've already + * accepted the CRL; this callback is just notifying us that the + * object being checked is tainted by a stale CRL. So we mark the + * object as tainted and carry on. + */ + log_validation_status(rctx->rc, &rctx->subject->uri, tainted_by_stale_crl, rctx->subject->generation); + ok = 1; + return ok; + + case X509_V_ERR_UNABLE_TO_GET_ISSUER_CERT: + /* + * This is another error that's only an error in the strange world + * of OpenSSL, but a more serious one. By default, OpenSSL + * expects all trust anchors to be self-signed. This is not a + * PKIX requirement, it's just an OpenSSL thing, but one violates + * it at one's peril, because the only way to convince OpenSSL to + * allow a non-self-signed trust anchor is to intercept this + * "error" in the verify callback handler. + * + * So this program supports non-self-signed trust anchors, but be + * warned that enabling this feature may cause this program's + * output not to work with other OpenSSL-based applications. + */ + if (rctx->rc->allow_non_self_signed_trust_anchor) + ok = 1; + log_validation_status(rctx->rc, &rctx->subject->uri, trust_anchor_not_self_signed, rctx->subject->generation); + return ok; + + /* + * Select correct MIB counter for every known OpenSSL verify errors + * except the ones we handle explicitly above, then fall through to + * common handling for all of these. + */ +#define QV(x) \ + case x: \ + code = mib_openssl_##x; \ + break; + MIB_COUNTERS_FROM_OPENSSL; +#undef QV + + default: + code = unknown_openssl_verify_error; + break; + } + + log_validation_status(rctx->rc, &rctx->subject->uri, code, rctx->subject->generation); + return ok; +} + +/** + * Check crypto aspects of a certificate, policy OID, RFC 3779 path + * validation, and conformance to the RPKI certificate profile. + */ +static int check_x509(rcynic_ctx_t *rc, + STACK_OF(walk_ctx_t) *wsk, + const uri_t *uri, + X509 *x, + certinfo_t *certinfo, + const object_generation_t generation) +{ + walk_ctx_t *w = walk_ctx_stack_head(wsk); + rcynic_x509_store_ctx_t rctx; + EVP_PKEY *issuer_pkey = NULL, *subject_pkey = NULL; + unsigned long flags = (X509_V_FLAG_POLICY_CHECK | X509_V_FLAG_EXPLICIT_POLICY | X509_V_FLAG_X509_STRICT); + AUTHORITY_INFO_ACCESS *sia = NULL, *aia = NULL; + STACK_OF(POLICYINFO) *policies = NULL; + ASN1_BIT_STRING *ski_pubkey = NULL; + STACK_OF(DIST_POINT) *crldp = NULL; + EXTENDED_KEY_USAGE *eku = NULL; + BASIC_CONSTRAINTS *bc = NULL; + hashbuf_t ski_hashbuf; + unsigned ski_hashlen, afi; + int i, ok, crit, loc, ex_count, routercert = 0, ret = 0; + + assert(rc && wsk && w && uri && x && w->cert); + + /* + * Cleanup logic will explode if rctx.ctx hasn't been initialized, + * so we need to do this before running any test that can fail. + */ + if (!X509_STORE_CTX_init(&rctx.ctx, rc->x509_store, x, NULL)) + return 0; + + /* + * certinfo == NULL means x is a self-signed trust anchor. + */ + if (certinfo == NULL) + certinfo = &w->certinfo; + + memset(certinfo, 0, sizeof(*certinfo)); + + certinfo->uri = *uri; + certinfo->generation = generation; + + if (ASN1_INTEGER_cmp(X509_get_serialNumber(x), asn1_zero) <= 0 || + ASN1_INTEGER_cmp(X509_get_serialNumber(x), asn1_twenty_octets) > 0) { + log_validation_status(rc, uri, bad_certificate_serial_number, generation); + goto done; + } + + if (!check_allowed_time_encoding(X509_get_notBefore(x)) || + !check_allowed_time_encoding(X509_get_notAfter(x))) { + log_validation_status(rc, uri, nonconformant_asn1_time_value, generation); + goto done; + } + + /* + * Apparently nothing ever looks at these fields, so there are no + * API functions for them. We wouldn't bother either if they + * weren't forbidden by the RPKI certificate profile. + */ + if (!x->cert_info || x->cert_info->issuerUID || x->cert_info->subjectUID) { + log_validation_status(rc, uri, nonconformant_certificate_uid, generation); + goto done; + } + + /* + * Keep track of allowed extensions we've seen. Once we've + * processed all the ones we expect, anything left is an error. + */ + ex_count = X509_get_ext_count(x); + + /* + * We don't use X509_check_ca() to set certinfo->ca anymore, because + * it's not paranoid enough to enforce the RPKI certificate profile, + * but we still call it because we need it (or something) to invoke + * x509v3_cache_extensions() for us. + */ + (void) X509_check_ca(x); + + if ((bc = X509_get_ext_d2i(x, NID_basic_constraints, &crit, NULL)) != NULL) { + ex_count--; + if (!crit || bc->ca <= 0 || bc->pathlen != NULL) { + log_validation_status(rc, uri, malformed_basic_constraints, generation); + goto done; + } + } + + certinfo->ca = bc != NULL; + + if (certinfo == &w->certinfo) { + certinfo->ta = 1; + if (!certinfo->ca) { + log_validation_status(rc, uri, malformed_trust_anchor, generation); + goto done; + } + } + + if ((aia = X509_get_ext_d2i(x, NID_info_access, NULL, NULL)) != NULL) { + int n_caIssuers = 0; + ex_count--; + if (!extract_access_uri(rc, uri, generation, aia, NID_ad_ca_issuers, + &certinfo->aia, &n_caIssuers) || + !certinfo->aia.s[0] || + sk_ACCESS_DESCRIPTION_num(aia) != n_caIssuers) { + log_validation_status(rc, uri, malformed_aia_extension, generation); + goto done; + } + } + + if (certinfo->ta && aia) { + log_validation_status(rc, uri, aia_extension_forbidden, generation); + goto done; + } + + if (!certinfo->ta && !aia) { + log_validation_status(rc, uri, aia_extension_missing, generation); + goto done; + } + + if ((eku = X509_get_ext_d2i(x, NID_ext_key_usage, &crit, NULL)) != NULL) { + ex_count--; + if (crit || certinfo->ca || !endswith(uri->s, ".cer") || sk_ASN1_OBJECT_num(eku) == 0) { + log_validation_status(rc, uri, inappropriate_eku_extension, generation); + goto done; + } + for (i = 0; i < sk_ASN1_OBJECT_num(eku); i++) + routercert |= OBJ_obj2nid(sk_ASN1_OBJECT_value(eku, i)) == NID_id_kp_bgpsec_router; + } + + if ((sia = X509_get_ext_d2i(x, NID_sinfo_access, NULL, NULL)) != NULL) { + int got_caDirectory, got_rpkiManifest, got_signedObject; + int n_caDirectory = 0, n_rpkiManifest = 0, n_signedObject = 0; + ex_count--; + ok = (extract_access_uri(rc, uri, generation, sia, NID_caRepository, + &certinfo->sia, &n_caDirectory) && + extract_access_uri(rc, uri, generation, sia, NID_ad_rpkiManifest, + &certinfo->manifest, &n_rpkiManifest) && + extract_access_uri(rc, uri, generation, sia, NID_ad_signedObject, + &certinfo->signedobject, &n_signedObject)); + got_caDirectory = certinfo->sia.s[0] != '\0'; + got_rpkiManifest = certinfo->manifest.s[0] != '\0'; + got_signedObject = certinfo->signedobject.s[0] != '\0'; + ok &= sk_ACCESS_DESCRIPTION_num(sia) == n_caDirectory + n_rpkiManifest + n_signedObject; + if (certinfo->ca) + ok &= got_caDirectory && got_rpkiManifest && !got_signedObject; + else if (rc->allow_ee_without_signedObject) + ok &= !got_caDirectory && !got_rpkiManifest; + else + ok &= !got_caDirectory && !got_rpkiManifest && got_signedObject; + if (!ok) { + log_validation_status(rc, uri, malformed_sia_extension, generation); + goto done; + } + } else if (certinfo->ca || !rc->allow_ee_without_signedObject) { + log_validation_status(rc, uri, sia_extension_missing, generation); + goto done; + } else if (!routercert) { + log_validation_status(rc, uri, sia_extension_missing_from_ee, generation); + } + + if (certinfo->signedobject.s[0] && strcmp(uri->s, certinfo->signedobject.s)) + log_validation_status(rc, uri, bad_signed_object_uri, generation); + + if ((crldp = X509_get_ext_d2i(x, NID_crl_distribution_points, NULL, NULL)) != NULL) { + ex_count--; + if (!extract_crldp_uri(rc, uri, generation, crldp, &certinfo->crldp)) + goto done; + } + + rctx.rc = rc; + rctx.subject = certinfo; + + if (w->certs == NULL && (w->certs = walk_ctx_stack_certs(rc, wsk)) == NULL) + goto done; + + if (X509_get_version(x) != 2) { + log_validation_status(rc, uri, wrong_object_version, generation); + goto done; + } + + if (!x->cert_info || !x->cert_info->signature || !x->cert_info->signature->algorithm || + OBJ_obj2nid(x->cert_info->signature->algorithm) != NID_sha256WithRSAEncryption) { + log_validation_status(rc, uri, nonconformant_signature_algorithm, generation); + goto done; + } + + if (certinfo->sia.s[0] && certinfo->sia.s[strlen(certinfo->sia.s) - 1] != '/') { + log_validation_status(rc, uri, malformed_cadirectory_uri, generation); + goto done; + } + + if (!w->certinfo.ta && strcmp(w->certinfo.uri.s, certinfo->aia.s)) + log_validation_status(rc, uri, aia_doesnt_match_issuer, generation); + + if (certinfo->ca && !certinfo->sia.s[0]) { + log_validation_status(rc, uri, sia_cadirectory_uri_missing, generation); + goto done; + } + + if (certinfo->ca && !certinfo->manifest.s[0]) { + log_validation_status(rc, uri, sia_manifest_uri_missing, generation); + goto done; + } + + if (certinfo->ca && !startswith(certinfo->manifest.s, certinfo->sia.s)) { + log_validation_status(rc, uri, manifest_carepository_mismatch, generation); + goto done; + } + + if (x->skid) { + ex_count--; + } else { + log_validation_status(rc, uri, ski_extension_missing, generation); + goto done; + } + + if (!check_allowed_dn(X509_get_subject_name(x))) { + log_validation_status(rc, uri, nonconformant_subject_name, generation); + if (!rc->allow_nonconformant_name) + goto done; + } + + if (!check_allowed_dn(X509_get_issuer_name(x))) { + log_validation_status(rc, uri, nonconformant_issuer_name, generation); + if (!rc->allow_nonconformant_name) + goto done; + } + + if ((policies = X509_get_ext_d2i(x, NID_certificate_policies, &crit, NULL)) != NULL) { + POLICYQUALINFO *qualifier = NULL; + POLICYINFO *policy = NULL; + ex_count--; + if (!crit || sk_POLICYINFO_num(policies) != 1 || + (policy = sk_POLICYINFO_value(policies, 0)) == NULL || + OBJ_obj2nid(policy->policyid) != NID_cp_ipAddr_asNumber || + sk_POLICYQUALINFO_num(policy->qualifiers) > 1 || + (sk_POLICYQUALINFO_num(policy->qualifiers) == 1 && + ((qualifier = sk_POLICYQUALINFO_value(policy->qualifiers, 0)) == NULL || + OBJ_obj2nid(qualifier->pqualid) != NID_id_qt_cps))) { + log_validation_status(rc, uri, bad_certificate_policy, generation); + goto done; + } + if (qualifier) + log_validation_status(rc, uri, policy_qualifier_cps, generation); + } + + if (!X509_EXTENSION_get_critical(X509_get_ext(x, X509_get_ext_by_NID(x, NID_key_usage, -1))) || + (x->ex_flags & EXFLAG_KUSAGE) == 0 || + x->ex_kusage != (certinfo->ca ? KU_KEY_CERT_SIGN | KU_CRL_SIGN : KU_DIGITAL_SIGNATURE)) { + log_validation_status(rc, uri, bad_key_usage, generation); + goto done; + } + ex_count--; + + if (x->rfc3779_addr) { + ex_count--; + if (routercert || + (loc = X509_get_ext_by_NID(x, NID_sbgp_ipAddrBlock, -1)) < 0 || + !X509_EXTENSION_get_critical(X509_get_ext(x, loc)) || + !v3_addr_is_canonical(x->rfc3779_addr) || + sk_IPAddressFamily_num(x->rfc3779_addr) == 0) { + log_validation_status(rc, uri, bad_ipaddrblocks, generation); + goto done; + } + for (i = 0; i < sk_IPAddressFamily_num(x->rfc3779_addr); i++) { + IPAddressFamily *f = sk_IPAddressFamily_value(x->rfc3779_addr, i); + afi = v3_addr_get_afi(f); + if (afi != IANA_AFI_IPV4 && afi != IANA_AFI_IPV6) { + log_validation_status(rc, uri, unknown_afi, generation); + goto done; + } + if (f->addressFamily->length != 2) { + log_validation_status(rc, uri, safi_not_allowed, generation); + goto done; + } + } + } + + if (x->rfc3779_asid) { + ex_count--; + if ((loc = X509_get_ext_by_NID(x, NID_sbgp_autonomousSysNum, -1)) < 0 || + !X509_EXTENSION_get_critical(X509_get_ext(x, loc)) || + !v3_asid_is_canonical(x->rfc3779_asid) || + x->rfc3779_asid->asnum == NULL || + x->rfc3779_asid->rdi != NULL || + (routercert && x->rfc3779_asid->asnum->type == ASIdentifierChoice_inherit)) { + log_validation_status(rc, uri, bad_asidentifiers, generation); + goto done; + } + } + + if (!x->rfc3779_addr && !x->rfc3779_asid) { + log_validation_status(rc, uri, missing_resources, generation); + goto done; + } + + subject_pkey = X509_get_pubkey(x); + ok = subject_pkey != NULL; + if (ok) { + ASN1_OBJECT *algorithm; + + (void) X509_PUBKEY_get0_param(&algorithm, NULL, NULL, NULL, X509_get_X509_PUBKEY(x)); + + switch (OBJ_obj2nid(algorithm)) { + + case NID_rsaEncryption: + ok = (EVP_PKEY_type(subject_pkey->type) == EVP_PKEY_RSA && + BN_get_word(subject_pkey->pkey.rsa->e) == 65537); + if (!ok) + break; + if (!certinfo->ca && rc->allow_1024_bit_ee_key && + BN_num_bits(subject_pkey->pkey.rsa->n) == 1024) + log_validation_status(rc, uri, ee_certificate_with_1024_bit_key, generation); + else + ok = BN_num_bits(subject_pkey->pkey.rsa->n) == 2048; + break; + + case NID_X9_62_id_ecPublicKey: + ok = !certinfo->ca && routercert; + break; + + default: + ok = 0; + } + } + if (!ok) { + log_validation_status(rc, uri, bad_public_key, generation); + goto done; + } + + if (x->skid == NULL || + (ski_pubkey = X509_get0_pubkey_bitstr(x)) == NULL || + !EVP_Digest(ski_pubkey->data, ski_pubkey->length, + ski_hashbuf.h, &ski_hashlen, EVP_sha1(), NULL) || + ski_hashlen != 20 || + ski_hashlen != x->skid->length || + memcmp(ski_hashbuf.h, x->skid->data, ski_hashlen)) { + log_validation_status(rc, uri, ski_public_key_mismatch, generation); + goto done; + } + + if (x->akid) { + ex_count--; + if (!check_aki(rc, uri, w->cert, x->akid, generation)) + goto done; + } + + if (!x->akid && !certinfo->ta) { + log_validation_status(rc, uri, aki_extension_missing, generation); + goto done; + } + + if ((issuer_pkey = X509_get_pubkey(w->cert)) == NULL || X509_verify(x, issuer_pkey) <= 0) { + log_validation_status(rc, uri, certificate_bad_signature, generation); + goto done; + } + + if (certinfo->ta) { + + if (certinfo->crldp.s[0]) { + log_validation_status(rc, uri, trust_anchor_with_crldp, generation); + goto done; + } + + } else { + + if (!certinfo->crldp.s[0]) { + log_validation_status(rc, uri, crldp_uri_missing, generation); + goto done; + } + + if (!certinfo->ca && !startswith(certinfo->crldp.s, w->certinfo.sia.s)) { + log_validation_status(rc, uri, crldp_doesnt_match_issuer_sia, generation); + goto done; + } + + if (w->crls == NULL && ((w->crls = sk_X509_CRL_new_null()) == NULL || + !sk_X509_CRL_push(w->crls, NULL))) { + logmsg(rc, log_sys_err, "Internal allocation error setting up CRL for validation"); + goto done; + } + + assert(sk_X509_CRL_num(w->crls) == 1); + assert((w->crldp.s[0] == '\0') == (sk_X509_CRL_value(w->crls, 0) == NULL)); + + if (strcmp(w->crldp.s, certinfo->crldp.s)) { + X509_CRL *old_crl = sk_X509_CRL_value(w->crls, 0); + X509_CRL *new_crl = check_crl(rc, &certinfo->crldp, w->cert); + + if (w->crldp.s[0]) + log_validation_status(rc, uri, issuer_uses_multiple_crldp_values, generation); + + if (new_crl == NULL) { + log_validation_status(rc, uri, bad_crl, generation); + goto done; + } + + if (old_crl && new_crl && ASN1_INTEGER_cmp(old_crl->crl_number, new_crl->crl_number) < 0) { + log_validation_status(rc, uri, crldp_names_newer_crl, generation); + X509_CRL_free(old_crl); + old_crl = NULL; + } + + if (old_crl == NULL) { + sk_X509_CRL_set(w->crls, 0, new_crl); + w->crldp = certinfo->crldp; + } else { + X509_CRL_free(new_crl); + } + } + + assert(sk_X509_CRL_value(w->crls, 0)); + flags |= X509_V_FLAG_CRL_CHECK; + X509_STORE_CTX_set0_crls(&rctx.ctx, w->crls); + } + + if (ex_count > 0) { + log_validation_status(rc, uri, disallowed_x509v3_extension, generation); + goto done; + } + + assert(w->certs != NULL); + X509_STORE_CTX_trusted_stack(&rctx.ctx, w->certs); + X509_STORE_CTX_set_verify_cb(&rctx.ctx, check_x509_cb); + + X509_VERIFY_PARAM_set_flags(rctx.ctx.param, flags); + + X509_VERIFY_PARAM_add0_policy(rctx.ctx.param, OBJ_nid2obj(NID_cp_ipAddr_asNumber)); + + if (X509_verify_cert(&rctx.ctx) <= 0) { + log_validation_status(rc, uri, certificate_failed_validation, generation); + goto done; + } + + ret = 1; + + done: + X509_STORE_CTX_cleanup(&rctx.ctx); + EVP_PKEY_free(issuer_pkey); + EVP_PKEY_free(subject_pkey); + BASIC_CONSTRAINTS_free(bc); + sk_ACCESS_DESCRIPTION_pop_free(sia, ACCESS_DESCRIPTION_free); + sk_ACCESS_DESCRIPTION_pop_free(aia, ACCESS_DESCRIPTION_free); + sk_DIST_POINT_pop_free(crldp, DIST_POINT_free); + sk_POLICYINFO_pop_free(policies, POLICYINFO_free); + sk_ASN1_OBJECT_pop_free(eku, ASN1_OBJECT_free); + + return ret; +} + +/** + * Extract one datum from a CMS_SignerInfo. + */ +static void *extract_si_datum(CMS_SignerInfo *si, + int *n, + const int optional, + const int nid, + const int asn1_type) +{ + int i = CMS_signed_get_attr_by_NID(si, nid, -1); + void *result = NULL; + X509_ATTRIBUTE *a; + + assert(si && n); + + if (i < 0 && optional) + return NULL; + + if (i >= 0 && + CMS_signed_get_attr_by_NID(si, nid, i) < 0 && + (a = CMS_signed_get_attr(si, i)) != NULL && + X509_ATTRIBUTE_count(a) == 1 && + (result = X509_ATTRIBUTE_get0_data(a, 0, asn1_type, NULL)) != NULL) + --*n; + else + *n = -1; + + return result; +} + +/** + * Check a signed CMS object. + */ +static int check_cms(rcynic_ctx_t *rc, + STACK_OF(walk_ctx_t) *wsk, + const uri_t *uri, + path_t *path, + const path_t *prefix, + CMS_ContentInfo **pcms, + X509 **px, + certinfo_t *certinfo, + BIO *bio, + const unsigned char *hash, + const size_t hashlen, + const int expected_eContentType_nid, + const int require_inheritance, + const object_generation_t generation) +{ + STACK_OF(CMS_SignerInfo) *signer_infos = NULL; + CMS_ContentInfo *cms = NULL; + CMS_SignerInfo *si = NULL; + ASN1_OCTET_STRING *sid = NULL; + X509_NAME *si_issuer = NULL; + ASN1_INTEGER *si_serial = NULL; + STACK_OF(X509_CRL) *crls = NULL; + STACK_OF(X509) *certs = NULL; + X509_ALGOR *signature_alg = NULL, *digest_alg = NULL; + ASN1_OBJECT *oid = NULL; + hashbuf_t hashbuf; + X509 *x = NULL; + certinfo_t certinfo_; + int i, result = 0; + + assert(rc && wsk && uri && path && prefix); + + if (!certinfo) + certinfo = &certinfo_; + + if (!uri_to_filename(rc, uri, path, prefix)) + goto error; + + if (hash) + cms = read_cms(path, &hashbuf); + else + cms = read_cms(path, NULL); + + if (!cms) + goto error; + + if (hash && (hashlen > sizeof(hashbuf.h) || + memcmp(hashbuf.h, hash, hashlen))) { + log_validation_status(rc, uri, digest_mismatch, generation); + if (!rc->allow_digest_mismatch) + goto error; + } + + if (OBJ_obj2nid(CMS_get0_eContentType(cms)) != expected_eContentType_nid) { + log_validation_status(rc, uri, bad_cms_econtenttype, generation); + goto error; + } + + if (CMS_verify(cms, NULL, NULL, NULL, bio, CMS_NO_SIGNER_CERT_VERIFY) <= 0) { + log_validation_status(rc, uri, cms_validation_failure, generation); + goto error; + } + + if ((crls = CMS_get1_crls(cms)) != NULL) { + log_validation_status(rc, uri, cms_includes_crls, generation); + goto error; + } + + if ((signer_infos = CMS_get0_SignerInfos(cms)) == NULL || + sk_CMS_SignerInfo_num(signer_infos) != 1 || + (si = sk_CMS_SignerInfo_value(signer_infos, 0)) == NULL || + !CMS_SignerInfo_get0_signer_id(si, &sid, &si_issuer, &si_serial) || + sid == NULL || si_issuer != NULL || si_serial != NULL || + CMS_unsigned_get_attr_count(si) != -1) { + log_validation_status(rc, uri, bad_cms_signer_infos, generation); + goto error; + } + + CMS_SignerInfo_get0_algs(si, NULL, &x, &digest_alg, &signature_alg); + + if (x == NULL) { + log_validation_status(rc, uri, cms_signer_missing, generation); + goto error; + } + + if ((certs = CMS_get1_certs(cms)) == NULL || + sk_X509_num(certs) != 1 || + X509_cmp(x, sk_X509_value(certs, 0))) { + log_validation_status(rc, uri, bad_cms_signer, generation); + goto error; + } + + X509_ALGOR_get0(&oid, NULL, NULL, signature_alg); + i = OBJ_obj2nid(oid); + if (i != NID_sha256WithRSAEncryption && i != NID_rsaEncryption) { + log_validation_status(rc, uri, wrong_cms_si_signature_algorithm, generation); + goto error; + } + + X509_ALGOR_get0(&oid, NULL, NULL, digest_alg); + if (OBJ_obj2nid(oid) != NID_sha256) { + log_validation_status(rc, uri, wrong_cms_si_digest_algorithm, generation); + goto error; + } + + i = CMS_signed_get_attr_count(si); + + (void) extract_si_datum(si, &i, 1, NID_pkcs9_signingTime, V_ASN1_UTCTIME); + (void) extract_si_datum(si, &i, 1, NID_binary_signing_time, V_ASN1_INTEGER); + oid = extract_si_datum(si, &i, 0, NID_pkcs9_contentType, V_ASN1_OBJECT); + (void) extract_si_datum(si, &i, 0, NID_pkcs9_messageDigest, V_ASN1_OCTET_STRING); + + if (i != 0) { + log_validation_status(rc, uri, bad_cms_si_signed_attributes, generation); + if (!rc->allow_wrong_cms_si_attributes) + goto error; + } + + if (OBJ_obj2nid(oid) != expected_eContentType_nid) { + log_validation_status(rc, uri, bad_cms_si_contenttype, generation); + goto error; + } + + if (CMS_SignerInfo_cert_cmp(si, x)) { + log_validation_status(rc, uri, cms_ski_mismatch, generation); + goto error; + } + + if (!check_x509(rc, wsk, uri, x, certinfo, generation)) + goto error; + + if (require_inheritance && x->rfc3779_addr) { + for (i = 0; i < sk_IPAddressFamily_num(x->rfc3779_addr); i++) { + IPAddressFamily *f = sk_IPAddressFamily_value(x->rfc3779_addr, i); + if (f->ipAddressChoice->type != IPAddressChoice_inherit) { + log_validation_status(rc, uri, rfc3779_inheritance_required, generation); + goto error; + } + } + } + + if (require_inheritance && x->rfc3779_asid && x->rfc3779_asid->asnum && + x->rfc3779_asid->asnum->type != ASIdentifierChoice_inherit) { + log_validation_status(rc, uri, rfc3779_inheritance_required, generation); + goto error; + } + + if (pcms) { + *pcms = cms; + cms = NULL; + } + + if (px) + *px = x; + + result = 1; + + error: + CMS_ContentInfo_free(cms); + sk_X509_CRL_pop_free(crls, X509_CRL_free); + sk_X509_pop_free(certs, X509_free); + + return result; +} + + + +/** + * Load certificate, check against manifest, then run it through all + * the check_x509() tests. + */ +static X509 *check_cert_1(rcynic_ctx_t *rc, + STACK_OF(walk_ctx_t) *wsk, + const uri_t *uri, + path_t *path, + const path_t *prefix, + certinfo_t *certinfo, + const unsigned char *hash, + const size_t hashlen, + object_generation_t generation) +{ + hashbuf_t hashbuf; + X509 *x = NULL; + + assert(uri && path && wsk && certinfo); + + if (!uri_to_filename(rc, uri, path, prefix)) + return NULL; + + if (access(path->s, R_OK)) + return NULL; + + if (hash) + x = read_cert(path, &hashbuf); + else + x = read_cert(path, NULL); + + if (!x) { + logmsg(rc, log_sys_err, "Can't read certificate %s", path->s); + goto punt; + } + + if (hash && (hashlen > sizeof(hashbuf.h) || + memcmp(hashbuf.h, hash, hashlen))) { + log_validation_status(rc, uri, digest_mismatch, generation); + if (!rc->allow_digest_mismatch) + goto punt; + } + + if (check_x509(rc, wsk, uri, x, certinfo, generation)) + return x; + + punt: + X509_free(x); + return NULL; +} + +/** + * Try to find a good copy of a certificate either in fresh data or in + * backup data from a previous run of this program. + */ +static X509 *check_cert(rcynic_ctx_t *rc, + STACK_OF(walk_ctx_t) *wsk, + uri_t *uri, + certinfo_t *certinfo, + const unsigned char *hash, + const size_t hashlen) +{ + walk_ctx_t *w = walk_ctx_stack_head(wsk); + object_generation_t generation; + const path_t *prefix = NULL; + path_t path; + X509 *x; + + assert(rc && uri && wsk && w && certinfo); + + switch (w->state) { + case walk_state_current: + prefix = &rc->unauthenticated; + generation = object_generation_current; + break; + case walk_state_backup: + prefix = &rc->old_authenticated; + generation = object_generation_backup; + break; + default: + return NULL; + } + + if (skip_checking_this_object(rc, uri, generation)) + return NULL; + + if ((x = check_cert_1(rc, wsk, uri, &path, prefix, certinfo, + hash, hashlen, generation)) != NULL) + install_object(rc, uri, &path, generation); + else if (!access(path.s, F_OK)) + log_validation_status(rc, uri, object_rejected, generation); + else if (hash && generation == w->manifest_generation) + log_validation_status(rc, uri, manifest_lists_missing_object, generation); + + return x; +} + + + +/** + * Read and check one manifest from disk. + */ +static Manifest *check_manifest_1(rcynic_ctx_t *rc, + STACK_OF(walk_ctx_t) *wsk, + const uri_t *uri, + path_t *path, + const path_t *prefix, + certinfo_t *certinfo, + const object_generation_t generation) +{ + STACK_OF(FileAndHash) *sorted_fileList = NULL; + Manifest *manifest = NULL, *result = NULL; + CMS_ContentInfo *cms = NULL; + FileAndHash *fah = NULL, *fah2 = NULL; + BIO *bio = NULL; + X509 *x; + int i; + + assert(rc && wsk && uri && path && prefix); + + if ((bio = BIO_new(BIO_s_mem())) == NULL) { + logmsg(rc, log_sys_err, "Couldn't allocate BIO for manifest %s", uri->s); + goto done; + } + + if (!check_cms(rc, wsk, uri, path, prefix, &cms, &x, certinfo, bio, NULL, 0, + NID_ct_rpkiManifest, 1, generation)) + goto done; + + if ((manifest = ASN1_item_d2i_bio(ASN1_ITEM_rptr(Manifest), bio, NULL)) == NULL) { + log_validation_status(rc, uri, cms_econtent_decode_error, generation); + goto done; + } + + if (manifest->version) { + log_validation_status(rc, uri, wrong_object_version, generation); + goto done; + } + + if (X509_cmp_current_time(manifest->thisUpdate) > 0) { + log_validation_status(rc, uri, manifest_not_yet_valid, generation); + goto done; + } + + if (X509_cmp_current_time(manifest->nextUpdate) < 0) { + log_validation_status(rc, uri, stale_crl_or_manifest, generation); + if (!rc->allow_stale_manifest) + goto done; + } + + if (asn1_time_cmp(manifest->thisUpdate, X509_get_notBefore(x)) < 0 || + asn1_time_cmp(manifest->nextUpdate, X509_get_notAfter(x)) > 0) { + log_validation_status(rc, uri, manifest_interval_overruns_cert, generation); + goto done; + } + + if (ASN1_INTEGER_cmp(manifest->manifestNumber, asn1_zero) < 0 || + ASN1_INTEGER_cmp(manifest->manifestNumber, asn1_twenty_octets) > 0) { + log_validation_status(rc, uri, bad_manifest_number, generation); + goto done; + } + + if (OBJ_obj2nid(manifest->fileHashAlg) != NID_sha256) { + log_validation_status(rc, uri, nonconformant_digest_algorithm, generation); + goto done; + } + + if ((sorted_fileList = sk_FileAndHash_dup(manifest->fileList)) == NULL) { + logmsg(rc, log_sys_err, "Couldn't allocate shallow copy of fileList for manifest %s", uri->s); + goto done; + } + + (void) sk_FileAndHash_set_cmp_func(sorted_fileList, FileAndHash_name_cmp); + sk_FileAndHash_sort(sorted_fileList); + + for (i = 0; (fah = sk_FileAndHash_value(sorted_fileList, i)) != NULL && (fah2 = sk_FileAndHash_value(sorted_fileList, i + 1)) != NULL; i++) { + if (!strcmp((char *) fah->file->data, (char *) fah2->file->data)) { + log_validation_status(rc, uri, duplicate_name_in_manifest, generation); + goto done; + } + } + + for (i = 0; (fah = sk_FileAndHash_value(manifest->fileList, i)) != NULL; i++) { + if (fah->hash->length != HASH_SHA256_LEN || + (fah->hash->flags & (ASN1_STRING_FLAG_BITS_LEFT | 7)) > ASN1_STRING_FLAG_BITS_LEFT) { + log_validation_status(rc, uri, bad_manifest_digest_length, generation); + goto done; + } + } + + result = manifest; + manifest = NULL; + + done: + BIO_free(bio); + Manifest_free(manifest); + CMS_ContentInfo_free(cms); + sk_FileAndHash_free(sorted_fileList); + return result; +} + +/** + * Check whether we already have a particular manifest, attempt to fetch it + * and check issuer's signature if we don't. + * + * General plan here is to do basic checks on both current and backup + * generation manifests, then, if both generations pass all of our + * other tests, pick the generation with the highest manifest number, + * to protect against replay attacks. + * + * Once we've picked the manifest we're going to use, we need to check + * it against the CRL we've chosen. Not much we can do if they don't + * match besides whine about it, but we do need to whine in this case. + */ +static int check_manifest(rcynic_ctx_t *rc, + STACK_OF(walk_ctx_t) *wsk) +{ + walk_ctx_t *w = walk_ctx_stack_head(wsk); + Manifest *old_manifest, *new_manifest, *result = NULL; + certinfo_t old_certinfo, new_certinfo; + const uri_t *uri, *crldp = NULL; + object_generation_t generation = object_generation_null; + path_t old_path, new_path; + FileAndHash *fah = NULL; + const char *crl_tail; + int i, ok = 1; + + assert(rc && wsk && w && !w->manifest); + + uri = &w->certinfo.manifest; + + logmsg(rc, log_telemetry, "Checking manifest %s", uri->s); + + new_manifest = check_manifest_1(rc, wsk, uri, &new_path, + &rc->unauthenticated, &new_certinfo, + object_generation_current); + + old_manifest = check_manifest_1(rc, wsk, uri, &old_path, + &rc->old_authenticated, &old_certinfo, + object_generation_backup); + + if (!new_manifest) + result = old_manifest; + + else if (!old_manifest) + result = new_manifest; + + else { + int num_cmp = ASN1_INTEGER_cmp(old_manifest->manifestNumber, new_manifest->manifestNumber); + int date_cmp = ASN1_STRING_cmp(old_manifest->thisUpdate, new_manifest->thisUpdate); + + if (num_cmp > 0) + log_validation_status(rc, uri, backup_number_higher_than_current, object_generation_current); + if (date_cmp > 0) + log_validation_status(rc, uri, backup_thisupdate_newer_than_current, object_generation_current); + + if (num_cmp > 0 && date_cmp > 0) + result = old_manifest; + else + result = new_manifest; + } + + if (result && result == new_manifest) { + generation = object_generation_current; + install_object(rc, uri, &new_path, generation); + crldp = &new_certinfo.crldp; + } + + if (result && result == old_manifest) { + generation = object_generation_backup; + install_object(rc, uri, &old_path, generation); + crldp = &old_certinfo.crldp; + } + + if (result) { + crl_tail = strrchr(crldp->s, '/'); + assert(crl_tail != NULL); + crl_tail++; + + for (i = 0; (fah = sk_FileAndHash_value(result->fileList, i)) != NULL; i++) + if (!strcmp((char *) fah->file->data, crl_tail)) + break; + + if (!fah) { + log_validation_status(rc, uri, crl_not_in_manifest, generation); + if (rc->require_crl_in_manifest) + ok = 0; + } + + else if (!check_crl_digest(rc, crldp, fah->hash->data, fah->hash->length)) { + log_validation_status(rc, uri, digest_mismatch, generation); + if (!rc->allow_crl_digest_mismatch) + ok = 0; + } + } + + if ((!result || result != new_manifest) && !access(new_path.s, F_OK)) + log_validation_status(rc, uri, object_rejected, object_generation_current); + + if (!result && !access(old_path.s, F_OK)) + log_validation_status(rc, uri, object_rejected, object_generation_backup); + + if (result != new_manifest) + Manifest_free(new_manifest); + + if (result != old_manifest) + Manifest_free(old_manifest); + + w->manifest = result; + if (crldp) + w->crldp = *crldp; + w->manifest_generation = generation; + + return ok; +} + + + +/** + * Mark CRL or manifest that we're rechecking so XML report makes more sense. + */ +static void rsync_needed_mark_recheck(rcynic_ctx_t *rc, + const uri_t *uri) +{ + validation_status_t *v = NULL; + + if (uri->s[0] != '\0') + v = validation_status_find(rc->validation_status_root, + uri, object_generation_current); + + if (v) { + validation_status_set_code(v, stale_crl_or_manifest, 0); + log_validation_status(rc, uri, rechecking_object, + object_generation_current); + } +} + +/** + * Check whether we need to rsync a particular tree. This depends on + * the setting of rc->rsync_early, whether we have a valid manifest on + * file, and whether that manifest is stale yet. + */ +static int rsync_needed(rcynic_ctx_t *rc, + STACK_OF(walk_ctx_t) *wsk) +{ + walk_ctx_t *w = walk_ctx_stack_head(wsk); + int needed; + + assert(rc && wsk && w); + + needed = (rc->rsync_early || + !check_manifest(rc, wsk) || + w->manifest == NULL || + X509_cmp_current_time(w->manifest->nextUpdate) < 0); + + if (needed && w->manifest != NULL) { + rsync_needed_mark_recheck(rc, &w->certinfo.manifest); + rsync_needed_mark_recheck(rc, &w->certinfo.crldp); + Manifest_free(w->manifest); + w->manifest = NULL; + } + + return needed; +} + + + +/** + * Extract a ROA prefix from the ASN.1 bitstring encoding. + */ +static int extract_roa_prefix(const ROAIPAddress *ra, + const unsigned afi, + unsigned char *addr, + unsigned *prefixlen, + unsigned *max_prefixlen) +{ + unsigned length; + long maxlen; + + assert(ra && addr && prefixlen && max_prefixlen); + + maxlen = ASN1_INTEGER_get(ra->maxLength); + + switch (afi) { + case IANA_AFI_IPV4: length = 4; break; + case IANA_AFI_IPV6: length = 16; break; + default: return 0; + } + + if (ra->IPAddress->length < 0 || ra->IPAddress->length > length || + maxlen < 0 || maxlen > (long) length * 8) + return 0; + + if (ra->IPAddress->length > 0) { + memcpy(addr, ra->IPAddress->data, ra->IPAddress->length); + if ((ra->IPAddress->flags & 7) != 0) { + unsigned char mask = 0xFF >> (8 - (ra->IPAddress->flags & 7)); + addr[ra->IPAddress->length - 1] &= ~mask; + } + } + + memset(addr + ra->IPAddress->length, 0, length - ra->IPAddress->length); + *prefixlen = (ra->IPAddress->length * 8) - (ra->IPAddress->flags & 7); + *max_prefixlen = ra->maxLength ? (unsigned) maxlen : *prefixlen; + + return 1; +} + +/** + * Read and check one ROA from disk. + */ +static int check_roa_1(rcynic_ctx_t *rc, + STACK_OF(walk_ctx_t) *wsk, + const uri_t *uri, + path_t *path, + const path_t *prefix, + const unsigned char *hash, + const size_t hashlen, + const object_generation_t generation) +{ + STACK_OF(IPAddressFamily) *roa_resources = NULL, *ee_resources = NULL; + unsigned char addrbuf[ADDR_RAW_BUF_LEN]; + CMS_ContentInfo *cms = NULL; + BIO *bio = NULL; + ROA *roa = NULL; + X509 *x = NULL; + int i, j, result = 0; + unsigned afi, *safi = NULL, safi_, prefixlen, max_prefixlen; + ROAIPAddressFamily *rf; + ROAIPAddress *ra; + + assert(rc && wsk && uri && path && prefix); + + if ((bio = BIO_new(BIO_s_mem())) == NULL) { + logmsg(rc, log_sys_err, "Couldn't allocate BIO for ROA %s", uri->s); + goto error; + } + + if (!check_cms(rc, wsk, uri, path, prefix, &cms, &x, NULL, bio, NULL, 0, + NID_ct_ROA, 0, generation)) + goto error; + + if (!(roa = ASN1_item_d2i_bio(ASN1_ITEM_rptr(ROA), bio, NULL))) { + log_validation_status(rc, uri, cms_econtent_decode_error, generation); + goto error; + } + + if (roa->version) { + log_validation_status(rc, uri, wrong_object_version, generation); + goto error; + } + + if (ASN1_INTEGER_cmp(roa->asID, asn1_zero) < 0 || + ASN1_INTEGER_cmp(roa->asID, asn1_four_octets) > 0) { + log_validation_status(rc, uri, bad_roa_asID, generation); + goto error; + } + + ee_resources = X509_get_ext_d2i(x, NID_sbgp_ipAddrBlock, NULL, NULL); + + /* + * Extract prefixes from ROA and convert them into a resource set. + */ + + if (!(roa_resources = sk_IPAddressFamily_new_null())) + goto error; + + for (i = 0; i < sk_ROAIPAddressFamily_num(roa->ipAddrBlocks); i++) { + rf = sk_ROAIPAddressFamily_value(roa->ipAddrBlocks, i); + if (!rf || !rf->addressFamily || rf->addressFamily->length < 2 || rf->addressFamily->length > 3) { + log_validation_status(rc, uri, malformed_roa_addressfamily, generation); + goto error; + } + afi = (rf->addressFamily->data[0] << 8) | (rf->addressFamily->data[1]); + if (rf->addressFamily->length == 3) + *(safi = &safi_) = rf->addressFamily->data[2]; + for (j = 0; j < sk_ROAIPAddress_num(rf->addresses); j++) { + ra = sk_ROAIPAddress_value(rf->addresses, j); + if (!ra || + !extract_roa_prefix(ra, afi, addrbuf, &prefixlen, &max_prefixlen) || + !v3_addr_add_prefix(roa_resources, afi, safi, addrbuf, prefixlen)) { + log_validation_status(rc, uri, roa_resources_malformed, generation); + goto error; + } + if (max_prefixlen < prefixlen) { + log_validation_status(rc, uri, roa_max_prefixlen_too_short, generation); + goto error; + } + } + } + + /* + * ROAs can include nested prefixes, so direct translation to + * resource sets could include overlapping ranges, which is illegal. + * So we have to remove nested stuff before whacking into canonical + * form. Fortunately, this is relatively easy, since we know these + * are just prefixes, not ranges: in a list of prefixes sorted by + * the RFC 3779 rules, the first element of a set of nested prefixes + * will always be the least specific. + */ + + for (i = 0; i < sk_IPAddressFamily_num(roa_resources); i++) { + IPAddressFamily *f = sk_IPAddressFamily_value(roa_resources, i); + + if ((afi = v3_addr_get_afi(f)) == 0) { + log_validation_status(rc, uri, roa_contains_bad_afi_value, generation); + goto error; + } + + if (f->ipAddressChoice->type == IPAddressChoice_addressesOrRanges) { + IPAddressOrRanges *aors = f->ipAddressChoice->u.addressesOrRanges; + + sk_IPAddressOrRange_sort(aors); + + for (j = 0; j < sk_IPAddressOrRange_num(aors) - 1; j++) { + IPAddressOrRange *a = sk_IPAddressOrRange_value(aors, j); + IPAddressOrRange *b = sk_IPAddressOrRange_value(aors, j + 1); + unsigned char a_min[ADDR_RAW_BUF_LEN], a_max[ADDR_RAW_BUF_LEN]; + unsigned char b_min[ADDR_RAW_BUF_LEN], b_max[ADDR_RAW_BUF_LEN]; + int length; + + if ((length = v3_addr_get_range(a, afi, a_min, a_max, ADDR_RAW_BUF_LEN)) == 0 || + (length = v3_addr_get_range(b, afi, b_min, b_max, ADDR_RAW_BUF_LEN)) == 0) { + log_validation_status(rc, uri, roa_resources_malformed, generation); + goto error; + } + + if (memcmp(a_max, b_max, length) >= 0) { + (void) sk_IPAddressOrRange_delete(aors, j + 1); + IPAddressOrRange_free(b); + --j; + } + } + } + } + + if (!v3_addr_canonize(roa_resources)) { + log_validation_status(rc, uri, roa_resources_malformed, generation); + goto error; + } + + if (!v3_addr_subset(roa_resources, ee_resources)) { + log_validation_status(rc, uri, roa_resource_not_in_ee, generation); + goto error; + } + + result = 1; + + error: + BIO_free(bio); + ROA_free(roa); + CMS_ContentInfo_free(cms); + sk_IPAddressFamily_pop_free(roa_resources, IPAddressFamily_free); + sk_IPAddressFamily_pop_free(ee_resources, IPAddressFamily_free); + + return result; +} + +/** + * Check whether we already have a particular ROA, attempt to fetch it + * and check issuer's signature if we don't. + */ +static void check_roa(rcynic_ctx_t *rc, + STACK_OF(walk_ctx_t) *wsk, + const uri_t *uri, + const unsigned char *hash, + const size_t hashlen) +{ + walk_ctx_t *w = walk_ctx_stack_head(wsk); + path_t path; + + assert(rc && wsk && w && uri); + + if (uri_to_filename(rc, uri, &path, &rc->new_authenticated) && + !access(path.s, F_OK)) + return; + + logmsg(rc, log_telemetry, "Checking ROA %s", uri->s); + + if (check_roa_1(rc, wsk, uri, &path, &rc->unauthenticated, + hash, hashlen, object_generation_current)) { + install_object(rc, uri, &path, object_generation_current); + return; + } + + if (!access(path.s, F_OK)) + log_validation_status(rc, uri, object_rejected, object_generation_current); + else if (hash) + log_validation_status(rc, uri, manifest_lists_missing_object, object_generation_current); + + if (check_roa_1(rc, wsk, uri, &path, &rc->old_authenticated, + hash, hashlen, object_generation_backup)) { + install_object(rc, uri, &path, object_generation_backup); + return; + } + + if (!access(path.s, F_OK)) + log_validation_status(rc, uri, object_rejected, object_generation_backup); + else if (hash && w->manifest_generation == object_generation_backup) + log_validation_status(rc, uri, manifest_lists_missing_object, object_generation_backup); +} + + + +/** + * Read and check one Ghostbuster record from disk. + */ +static int check_ghostbuster_1(rcynic_ctx_t *rc, + STACK_OF(walk_ctx_t) *wsk, + const uri_t *uri, + path_t *path, + const path_t *prefix, + const unsigned char *hash, + const size_t hashlen, + const object_generation_t generation) +{ + CMS_ContentInfo *cms = NULL; + BIO *bio = NULL; + X509 *x; + int result = 0; + + assert(rc && wsk && uri && path && prefix); + +#if 0 + /* + * May want this later if we're going to inspect the VCard. For now, + * just leave this NULL and the right thing should happen. + */ + if ((bio = BIO_new(BIO_s_mem())) == NULL) { + logmsg(rc, log_sys_err, "Couldn't allocate BIO for Ghostbuster record %s", uri->s); + goto error; + } +#endif + + if (!check_cms(rc, wsk, uri, path, prefix, &cms, &x, NULL, bio, NULL, 0, + NID_ct_rpkiGhostbusters, 1, generation)) + goto error; + +#if 0 + /* + * Here is where we would read the VCard from the bio returned by + * CMS_verify() so that we could check the VCard. + */ +#endif + + result = 1; + + error: + BIO_free(bio); + CMS_ContentInfo_free(cms); + + return result; +} + +/** + * Check whether we already have a particular Ghostbuster record, + * attempt to fetch it and check issuer's signature if we don't. + */ +static void check_ghostbuster(rcynic_ctx_t *rc, + STACK_OF(walk_ctx_t) *wsk, + const uri_t *uri, + const unsigned char *hash, + const size_t hashlen) +{ + walk_ctx_t *w = walk_ctx_stack_head(wsk); + path_t path; + + assert(rc && wsk && w && uri); + + if (uri_to_filename(rc, uri, &path, &rc->new_authenticated) && + !access(path.s, F_OK)) + return; + + logmsg(rc, log_telemetry, "Checking Ghostbuster record %s", uri->s); + + if (check_ghostbuster_1(rc, wsk, uri, &path, &rc->unauthenticated, + hash, hashlen, object_generation_current)) { + install_object(rc, uri, &path, object_generation_current); + return; + } + + if (!access(path.s, F_OK)) + log_validation_status(rc, uri, object_rejected, object_generation_current); + else if (hash) + log_validation_status(rc, uri, manifest_lists_missing_object, object_generation_current); + + if (check_ghostbuster_1(rc, wsk, uri, &path, &rc->old_authenticated, + hash, hashlen, object_generation_backup)) { + install_object(rc, uri, &path, object_generation_backup); + return; + } + + if (!access(path.s, F_OK)) + log_validation_status(rc, uri, object_rejected, object_generation_backup); + else if (hash && w->manifest_generation == object_generation_backup) + log_validation_status(rc, uri, manifest_lists_missing_object, object_generation_backup); +} + + + +static void walk_cert(rcynic_ctx_t *, void *); + +/** + * rsync callback for fetching SIA tree. + */ +static void rsync_sia_callback(rcynic_ctx_t *rc, + const rsync_ctx_t *ctx, + const rsync_status_t status, + const uri_t *uri, + void *cookie) +{ + STACK_OF(walk_ctx_t) *wsk = cookie; + walk_ctx_t *w = walk_ctx_stack_head(wsk); + + assert(rc && wsk); + + if (status != rsync_status_pending) { + w->state++; + task_add(rc, walk_cert, wsk); + return; + } + + if (rsync_count_runable(rc) >= rc->max_parallel_fetches) + return; + + if ((wsk = walk_ctx_stack_clone(wsk)) == NULL) { + logmsg(rc, log_sys_err, + "walk_ctx_stack_clone() failed, probably memory exhaustion, blundering onwards without forking stack"); + return; + } + + walk_ctx_stack_pop(wsk); + task_add(rc, walk_cert, wsk); +} + +/** + * Recursive walk of certificate hierarchy (core of the program). + * + * Walk all products of the current certificate, starting with the + * ones named in the manifest and continuing with any that we find in + * the publication directory but which are not named in the manifest. + * + * Dispatch to correct checking code for the object named by URI, + * based on the filename extension in the uri. CRLs are a special + * case because we've already checked them by the time we get here, so + * we just ignore them. Other objects are either certificates or + * CMS-signed objects of one kind or another. + */ +static void walk_cert(rcynic_ctx_t *rc, void *cookie) +{ + STACK_OF(walk_ctx_t) *wsk = cookie; + const unsigned char *hash = NULL; + object_generation_t generation; + size_t hashlen; + walk_ctx_t *w; + uri_t uri; + + assert(rc && wsk); + + while ((w = walk_ctx_stack_head(wsk)) != NULL) { + + switch (w->state) { + case walk_state_current: + generation = object_generation_current; + break; + case walk_state_backup: + generation = object_generation_backup; + break; + default: + generation = object_generation_null; + break; + } + + switch (w->state) { + + case walk_state_initial: + + if (!w->certinfo.sia.s[0] || !w->certinfo.ca) { + w->state = walk_state_done; + continue; + } + + if (!w->certinfo.manifest.s[0]) { + log_validation_status(rc, &w->certinfo.uri, sia_manifest_uri_missing, w->certinfo.generation); + w->state = walk_state_done; + continue; + } + + w->state++; + continue; + + case walk_state_rsync: + + if (rsync_needed(rc, wsk)) { + rsync_tree(rc, &w->certinfo.sia, wsk, rsync_sia_callback); + return; + } + log_validation_status(rc, &w->certinfo.sia, rsync_transfer_skipped, object_generation_null); + w->state++; + continue; + + case walk_state_ready: + + walk_ctx_loop_init(rc, wsk); /* sets w->state */ + continue; + + case walk_state_current: + case walk_state_backup: + + if (!walk_ctx_loop_this(rc, wsk, &uri, &hash, &hashlen)) { + walk_ctx_loop_next(rc, wsk); + continue; + } + + if (endswith(uri.s, ".crl") || endswith(uri.s, ".mft") || endswith(uri.s, ".mnf")) { + walk_ctx_loop_next(rc, wsk); + continue; /* CRLs and manifests checked elsewhere */ + } + + if (hash == NULL && !rc->allow_object_not_in_manifest) { + log_validation_status(rc, &uri, skipped_because_not_in_manifest, generation); + walk_ctx_loop_next(rc, wsk); + continue; + } + + if (hash == NULL) + log_validation_status(rc, &uri, tainted_by_not_being_in_manifest, generation); + else if (w->stale_manifest) + log_validation_status(rc, &uri, tainted_by_stale_manifest, generation); + + if (endswith(uri.s, ".roa")) { + check_roa(rc, wsk, &uri, hash, hashlen); + walk_ctx_loop_next(rc, wsk); + continue; + } + + if (endswith(uri.s, ".gbr")) { + check_ghostbuster(rc, wsk, &uri, hash, hashlen); + walk_ctx_loop_next(rc, wsk); + continue; + } + + if (endswith(uri.s, ".cer")) { + certinfo_t certinfo; + X509 *x = check_cert(rc, wsk, &uri, &certinfo, hash, hashlen); + if (!walk_ctx_stack_push(wsk, x, &certinfo)) + walk_ctx_loop_next(rc, wsk); + continue; + } + + log_validation_status(rc, &uri, unknown_object_type_skipped, object_generation_null); + walk_ctx_loop_next(rc, wsk); + continue; + + case walk_state_done: + + walk_ctx_stack_pop(wsk); /* Resume our issuer's state */ + continue; + + } + } + + assert(walk_ctx_stack_head(wsk) == NULL); + walk_ctx_stack_free(wsk); +} + +/** + * Check a trust anchor. Yes, we trust it, by definition, but it + * still needs to conform to the certificate profile, the + * self-signature must be correct, etcetera. + * + * Ownership of the TA certificate object passes to this function when + * called (ie, freeing "x" is our responsibility). + */ +static int check_ta(rcynic_ctx_t *rc, X509 *x, const uri_t *uri, + const path_t *path1, const path_t *path2, + const object_generation_t generation) +{ + STACK_OF(walk_ctx_t) *wsk = NULL; + walk_ctx_t *w = NULL; + + assert(rc && x && uri && path1 && path2); + + if (x == NULL) + return 1; + + if ((wsk = walk_ctx_stack_new()) == NULL) { + logmsg(rc, log_sys_err, "Couldn't allocate walk context stack"); + X509_free(x); + return 0; + } + + if ((w = walk_ctx_stack_push(wsk, x, NULL)) == NULL) { + logmsg(rc, log_sys_err, "Couldn't push walk context stack"); + walk_ctx_stack_free(wsk); + X509_free(x); + return 0; + } + + if (!check_x509(rc, wsk, uri, x, NULL, generation)) { + log_validation_status(rc, uri, object_rejected, generation); + walk_ctx_stack_free(wsk); + return 1; + } + + logmsg(rc, log_telemetry, "Copying trust anchor %s to %s", path1->s, path2->s); + + if (!mkdir_maybe(rc, path2) || !cp_ln(rc, path1, path2)) { + walk_ctx_stack_free(wsk); + return 0; + } + + log_validation_status(rc, uri, object_accepted, generation); + task_add(rc, walk_cert, wsk); + return 1; +} + + + +/** + * Check a trust anchor read from a local file. + */ +static int check_ta_cer(rcynic_ctx_t *rc, + const char *fn) + +{ + path_t path1, path2; + unsigned long hash; + X509 *x = NULL; + uri_t uri; + int i; + + assert(rc && fn); + + logmsg(rc, log_telemetry, "Processing trust anchor from file %s", fn); + + if (strlen(fn) >= sizeof(path1.s)) { + logmsg(rc, log_usage_err, "Trust anchor path name too long %s", fn); + return 0; + } + strcpy(path1.s, fn); + filename_to_uri(&uri, path1.s); + + if ((x = read_cert(&path1, NULL)) == NULL) { + logmsg(rc, log_usage_err, "Couldn't read trust anchor from file %s", fn); + log_validation_status(rc, &uri, unreadable_trust_anchor, object_generation_null); + goto lose; + } + + hash = X509_subject_name_hash(x); + + for (i = 0; i < INT_MAX; i++) { + if (snprintf(path2.s, sizeof(path2.s), "%s%lx.%d.cer", + rc->new_authenticated.s, hash, i) >= sizeof(path2.s)) { + logmsg(rc, log_sys_err, + "Couldn't construct path name for trust anchor %s", path1.s); + goto lose; + } + if (access(path2.s, F_OK)) + break; + } + if (i == INT_MAX) { + logmsg(rc, log_sys_err, "Couldn't find a free name for trust anchor %s", path1.s); + goto lose; + } + + return check_ta(rc, x, &uri, &path1, &path2, object_generation_null); + + lose: + log_validation_status(rc, &uri, trust_anchor_skipped, object_generation_null); + X509_free(x); + return 0; +} + + + +/** + * Allocate a new tal_ctx_t. + */ +static tal_ctx_t *tal_ctx_t_new(void) +{ + tal_ctx_t *tctx = malloc(sizeof(*tctx)); + if (tctx) + memset(tctx, 0, sizeof(*tctx)); + return tctx; +} + +/** + * Free a tal_ctx_t. + */ +static void tal_ctx_t_free(tal_ctx_t *tctx) +{ + if (tctx) { + EVP_PKEY_free(tctx->pkey); + free(tctx); + } +} + +/** + * Read a trust anchor from disk and compare with known public key. + * + * NB: EVP_PKEY_cmp() returns 1 for match, not 0 like every other + * xyz_cmp() function in the entire OpenSSL library. Go figure. + */ +static int check_ta_tal_callback_1(rcynic_ctx_t *rc, + const tal_ctx_t *tctx, + object_generation_t generation) + +{ + const path_t *prefix = NULL; + EVP_PKEY *pkey = NULL; + X509 *x = NULL; + path_t path; + int ret = 0; + + switch (generation) { + case object_generation_current: + prefix = &rc->unauthenticated; + break; + case object_generation_backup: + prefix = &rc->old_authenticated; + break; + default: + goto done; + } + + if (!uri_to_filename(rc, &tctx->uri, &path, prefix)) { + log_validation_status(rc, &tctx->uri, unreadable_trust_anchor_locator, generation); + goto done; + } + + if ((x = read_cert(&path, NULL)) == NULL || (pkey = X509_get_pubkey(x)) == NULL) { + log_validation_status(rc, &tctx->uri, unreadable_trust_anchor, generation); + goto done; + } + + if (EVP_PKEY_cmp(tctx->pkey, pkey) != 1) { + log_validation_status(rc, &tctx->uri, trust_anchor_key_mismatch, generation); + goto done; + } + + ret = check_ta(rc, x, &tctx->uri, &path, &tctx->path, generation); + x = NULL; + + done: + if (!ret) + log_validation_status(rc, &tctx->uri, object_rejected, generation); + EVP_PKEY_free(pkey); + X509_free(x); + return ret; +} + +/** + * rsync callback for fetching a TAL. + */ +static void rsync_tal_callback(rcynic_ctx_t *rc, + const rsync_ctx_t *ctx, + const rsync_status_t status, + const uri_t *uri, + void *cookie) +{ + tal_ctx_t *tctx = cookie; + + assert(rc && tctx); + + if (status == rsync_status_pending) + return; + + if (!check_ta_tal_callback_1(rc, tctx, object_generation_current) && + !check_ta_tal_callback_1(rc, tctx, object_generation_backup)) + log_validation_status(rc, &tctx->uri, trust_anchor_skipped, object_generation_null); + + tal_ctx_t_free(tctx); +} + +/** + * Check a trust anchor read from a trust anchor locator (TAL). + */ +static int check_ta_tal(rcynic_ctx_t *rc, + const char *fn) + +{ + tal_ctx_t *tctx = NULL; + BIO *bio = NULL; + int ret = 1; + + assert(rc && fn); + + logmsg(rc, log_telemetry, "Processing trust anchor locator from file %s", fn); + + if ((tctx = tal_ctx_t_new()) == NULL) { + logmsg(rc, log_sys_err, "malloc(tal_ctxt_t) failed"); + goto done; + } + + bio = BIO_new_file(fn, "r"); + + if (!bio) + logmsg(rc, log_usage_err, "Couldn't open trust anchor locator file %s", fn); + + if (!bio || BIO_gets(bio, tctx->uri.s, sizeof(tctx->uri.s)) <= 0) { + uri_t furi; + filename_to_uri(&furi, fn); + log_validation_status(rc, &furi, unreadable_trust_anchor_locator, object_generation_null); + goto done; + } + + tctx->uri.s[strcspn(tctx->uri.s, " \t\r\n")] = '\0'; + + if (!uri_to_filename(rc, &tctx->uri, &tctx->path, &rc->new_authenticated)) { + log_validation_status(rc, &tctx->uri, unreadable_trust_anchor_locator, object_generation_null); + goto done; + } + + if (!endswith(tctx->uri.s, ".cer")) { + log_validation_status(rc, &tctx->uri, malformed_tal_uri, object_generation_null); + goto done; + } + + bio = BIO_push(BIO_new(BIO_f_linebreak()), bio); + bio = BIO_push(BIO_new(BIO_f_base64()), bio); + if (bio) + tctx->pkey = d2i_PUBKEY_bio(bio, NULL); + if (!tctx->pkey) { + log_validation_status(rc, &tctx->uri, unreadable_trust_anchor_locator, object_generation_null); + goto done; + } + + logmsg(rc, log_telemetry, "Processing trust anchor from URI %s", tctx->uri.s); + + rsync_ta(rc, &tctx->uri, tctx, rsync_tal_callback); + tctx = NULL; /* Control has passed */ + + done: + tal_ctx_t_free(tctx); + BIO_free_all(bio); + return ret; +} + +/** + * Check a directory of trust anchors and trust anchor locators. + */ +static int check_ta_dir(rcynic_ctx_t *rc, + const char *dn) +{ + DIR *dir = NULL; + struct dirent *d; + path_t path; + int is_cer, is_tal; + + assert(rc && dn); + + if ((dir = opendir(dn)) == NULL) { + logmsg(rc, log_sys_err, "Couldn't open trust anchor directory %s: %s", + dn, strerror(errno)); + return 0; + } + + while ((d = readdir(dir)) != NULL) { + if (snprintf(path.s, sizeof(path.s), "%s/%s", dn, d->d_name) >= sizeof(path.s)) { + logmsg(rc, log_data_err, "Pathname %s/%s too long", dn, d->d_name); + break; + } + is_cer = endswith(path.s, ".cer"); + is_tal = endswith(path.s, ".tal"); + if (is_cer && !check_ta_cer(rc, path.s)) + break; + if (is_tal && !check_ta_tal(rc, path.s)) + break; + if (!is_cer && !is_tal) + logmsg(rc, log_verbose, "Skipping non-trust-anchor %s", path.s); + } + + if (dir != NULL) + closedir(dir); + + return !d;; +} + + + +/** + * Write detailed log of what we've done as an XML file. + */ +static int write_xml_file(const rcynic_ctx_t *rc, + const char *xmlfile) +{ + int i, j, use_stdout, ok; + char hostname[HOSTNAME_MAX]; + mib_counter_t code; + timestamp_t ts; + FILE *f = NULL; + path_t xmltemp; + + if (xmlfile == NULL) + return 1; + + use_stdout = !strcmp(xmlfile, "-"); + + logmsg(rc, log_telemetry, "Writing XML summary to %s", + (use_stdout ? "standard output" : xmlfile)); + + if (use_stdout) { + f = stdout; + ok = 1; + } else if (snprintf(xmltemp.s, sizeof(xmltemp.s), "%s.%u.tmp", xmlfile, (unsigned) getpid()) >= sizeof(xmltemp.s)) { + logmsg(rc, log_usage_err, "Filename \"%s\" is too long, not writing XML", xmlfile); + return 0; + } else { + ok = (f = fopen(xmltemp.s, "w")) != NULL; + } + + ok &= gethostname(hostname, sizeof(hostname)) == 0; + + if (ok) + ok &= fprintf(f, "\n" + "\n" + " \n", + time_to_string(&ts, NULL), + svn_id, XML_SUMMARY_VERSION, hostname) != EOF; + + for (j = 0; ok && j < MIB_COUNTER_T_MAX; ++j) + ok &= fprintf(f, " <%s kind=\"%s\">%s\n", + mib_counter_label[j], mib_counter_kind[j], + (mib_counter_desc[j] + ? mib_counter_desc[j] + : X509_verify_cert_error_string(mib_counter_openssl[j])), + mib_counter_label[j]) != EOF; + + if (ok) + ok &= fprintf(f, " \n") != EOF; + + for (i = 0; ok && i < sk_validation_status_t_num(rc->validation_status); i++) { + validation_status_t *v = sk_validation_status_t_value(rc->validation_status, i); + assert(v); + + (void) time_to_string(&ts, &v->timestamp); + + for (code = (mib_counter_t) 0; ok && code < MIB_COUNTER_T_MAX; code++) { + if (validation_status_get_code(v, code)) { + if (ok) + ok &= fprintf(f, " generation == object_generation_current || + v->generation == object_generation_backup)) + ok &= fprintf(f, " generation=\"%s\"", + object_generation_label[v->generation]) != EOF; + if (ok) + ok &= fprintf(f, ">%s\n", v->uri.s) != EOF; + } + } + } + + for (i = 0; ok && i < sk_rsync_history_t_num(rc->rsync_history); i++) { + rsync_history_t *h = sk_rsync_history_t_value(rc->rsync_history, i); + assert(h); + + if (ok) + ok &= fprintf(f, " started) + ok &= fprintf(f, " started=\"%s\"", + time_to_string(&ts, &h->started)) != EOF; + if (ok && h->finished) + ok &= fprintf(f, " finished=\"%s\"", + time_to_string(&ts, &h->finished)) != EOF; + if (ok && h->status != rsync_status_done) + ok &= fprintf(f, " error=\"%u\"", (unsigned) h->status) != EOF; + if (ok) + ok &= fprintf(f, ">%s%s\n", + h->uri.s, (h->final_slash ? "/" : "")) != EOF; + } + + if (ok) + ok &= fprintf(f, "\n") != EOF; + + if (f && !use_stdout) + ok &= fclose(f) != EOF; + + if (ok && !use_stdout) + ok &= rename(xmltemp.s, xmlfile) == 0; + + if (!ok) + logmsg(rc, log_sys_err, "Couldn't write XML summary to %s: %s", + (use_stdout ? "standard output" : xmlfile), strerror(errno)); + + if (!ok && !use_stdout) + (void) unlink(xmltemp.s); + + return ok; +} + + + +/** + * Long options. + */ +#define OPTIONS \ + QA('a', "authenticated", "root of authenticated data tree") \ + QA('c', "config", "override default name of config file") \ + QF('h', "help", "print this help message") \ + QA('j', "jitter", "set jitter value") \ + QA('l', "log-level", "set log level") \ + QA('u', "unauthenticated", "root of unauthenticated data tree") \ + QF('e', "use-stderr", "log to syslog") \ + QF('s', "use-syslog", "log to stderr") \ + QF('V', "version", "print program version") \ + QA('x', "xml-file", "set XML output file location") + +const static struct option longopts[] = { + { "authenticated", required_argument, NULL, 'a' }, + { "config", required_argument, NULL, 'c' }, + { "help", no_argument, NULL, 'h' }, + { "jitter", required_argument, NULL, 'j' }, + { "log-level", required_argument, NULL, 'l' }, + { "unauthenticated", required_argument, NULL, 'u' }, + { "use-stderr", no_argument, NULL, 'e' }, + { "use-syslog", no_argument, NULL, 's' }, + { "version", no_argument, NULL, 'V' }, + { "xml-file", required_argument, NULL, 'x' }, + { NULL } +}; + +/** + * Wrapper around printf() to take arguments like logmsg(). + * If C had closures, usage() would use them instead of this silliness. + */ +static void logmsg_printf(const rcynic_ctx_t *rc, + const log_level_t level, + const char *fmt, ...) +{ + va_list ap; + va_start(ap, fmt); + vprintf(fmt, ap); + putchar('\n'); + va_end(ap); +} + +/** + * Log usage message, either to stdout (for --help) or via logmsg(). + */ +static void usage (const rcynic_ctx_t *rc, const char *jane) +{ + void (*log)(const rcynic_ctx_t *, const log_level_t, const char *, ...) = rc ? logmsg : logmsg_printf; + char left[80]; + + if (rc && !jane) + jane = rc->jane; + + log(rc, log_usage_err, "usage: %s [options]", jane); + log(rc, log_usage_err, "options:"); + +#define QF(_s_, _l_, _d_) \ + (void) snprintf(left, sizeof(left), "-%c --%-32s", _s_, _l_); \ + log(rc, log_usage_err, " %s%s", left, _d_); + +#define QA(_s_, _l_, _d_) \ + (void) snprintf(left, sizeof(left), "-%c ARG --%-32s", _s_, _l_ " ARG"); \ + log(rc, log_usage_err, " %s%s", left, _d_); + + OPTIONS; + +#undef QA +#undef QF +} + +/** + * Main program. Parse command line, read config file, iterate over + * trust anchors found via config file and do a tree walk for each + * trust anchor. + */ +int main(int argc, char *argv[]) +{ + int opt_jitter = 0, use_syslog = 0, use_stderr = 0, syslog_facility = 0; + int opt_syslog = 0, opt_stderr = 0, opt_level = 0, prune = 1; + int opt_auth = 0, opt_unauth = 0, keep_lockfile = 0; + char *lockfile = NULL, *xmlfile = NULL; + char *cfg_file = "rcynic.conf"; + int c, i, ret = 1, jitter = 600, lockfd = -1; + STACK_OF(CONF_VALUE) *cfg_section = NULL; + CONF *cfg_handle = NULL; + time_t start = 0, finish; + rcynic_ctx_t rc; + unsigned delay; + long eline = 0; + path_t ta_dir; + +#define QF(_s_, _l_, _d_) _s_, +#define QA(_s_, _l_, _d_) _s_, ':', + + const static char short_opts[] = { OPTIONS '\0' }; + +#undef QA +#undef QF + +#define QF(_s_, _l_, _d_) { _l_, no_argument, NULL, _s_ }, +#define QA(_s_, _l_, _d_) { _l_, required_argument, NULL, _s_ }, + + static struct option long_opts[] = { OPTIONS { NULL } }; + +#undef QA +#undef QF + + memset(&rc, 0, sizeof(rc)); + + if ((rc.jane = strrchr(argv[0], '/')) == NULL) + rc.jane = argv[0]; + else + rc.jane++; + + rc.log_level = log_data_err; + rc.allow_stale_crl = 1; + rc.allow_stale_manifest = 1; + rc.allow_digest_mismatch = 1; + rc.allow_crl_digest_mismatch = 1; + rc.allow_nonconformant_name = 1; + rc.allow_ee_without_signedObject = 1; + rc.allow_1024_bit_ee_key = 1; + rc.allow_wrong_cms_si_attributes = 1; + rc.max_parallel_fetches = 1; + rc.max_retries = 3; + rc.retry_wait_min = 30; + rc.run_rsync = 1; + rc.rsync_timeout = 300; + rc.max_select_time = 30; + rc.rsync_early = 1; + +#define QQ(x,y) rc.priority[x] = y; + LOG_LEVELS; +#undef QQ + + if (!set_directory(&rc, &rc.authenticated, "rcynic-data/authenticated", 0) || + !set_directory(&rc, &rc.unauthenticated, "rcynic-data/unauthenticated/", 1)) + goto done; + + OpenSSL_add_all_algorithms(); + ERR_load_crypto_strings(); + + if (!create_missing_nids()) { + logmsg(&rc, log_sys_err, "Couldn't initialize missing OIDs!"); + goto done; + } + + memset(&ta_dir, 0, sizeof(ta_dir)); + + opterr = 0; + + while ((c = getopt_long(argc, argv, short_opts, long_opts, NULL)) > 0) { + switch (c) { + case 'a': + opt_auth = 1; + if (!set_directory(&rc, &rc.authenticated, optarg, 0)) + goto done; + break; + case 'c': + cfg_file = optarg; + break; + case 'l': + opt_level = 1; + if (!configure_logmsg(&rc, optarg)) + goto done; + break; + case 's': + use_syslog = opt_syslog = 1; + break; + case 'e': + use_stderr = opt_stderr = 1; + break; + case 'h': + usage(NULL, rc.jane); + ret = 0; + goto done; + case 'j': + if (!configure_integer(&rc, &jitter, optarg)) + goto done; + opt_jitter = 1; + break; + case 'u': + opt_unauth = 1; + if (!set_directory(&rc, &rc.unauthenticated, optarg, 1)) + goto done; + break; + case 'V': + puts(svn_id); + ret = 0; + goto done; + case 'x': + xmlfile = strdup(optarg); + break; + default: + usage(&rc, NULL); + goto done; + } + } + + if (!(asn1_zero = s2i_ASN1_INTEGER(NULL, "0x0")) || + !(asn1_four_octets = s2i_ASN1_INTEGER(NULL, "0xFFFFFFFF")) || + !(asn1_twenty_octets = s2i_ASN1_INTEGER(NULL, "0x7FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF")) || + !(NID_binary_signing_time = OBJ_create("1.2.840.113549.1.9.16.2.46", + "id-aa-binarySigningTime", + "id-aa-binarySigningTime"))) { + logmsg(&rc, log_sys_err, "Couldn't initialize ASN.1 constants!"); + goto done; + } + + if ((cfg_handle = NCONF_new(NULL)) == NULL) { + logmsg(&rc, log_sys_err, "Couldn't create CONF opbject"); + goto done; + } + + if (NCONF_load(cfg_handle, cfg_file, &eline) <= 0) { + if (eline <= 0) + logmsg(&rc, log_usage_err, "Couldn't load config file %s", cfg_file); + else + logmsg(&rc, log_usage_err, "Error on line %ld of config file %s", eline, cfg_file); + goto done; + } + + if (CONF_modules_load(cfg_handle, NULL, 0) <= 0) { + logmsg(&rc, log_sys_err, "Couldn't configure OpenSSL"); + goto done; + } + + if ((cfg_section = NCONF_get_section(cfg_handle, "rcynic")) == NULL) { + logmsg(&rc, log_usage_err, "Couldn't load rcynic section from config file"); + goto done; + } + + for (i = 0; i < sk_CONF_VALUE_num(cfg_section); i++) { + CONF_VALUE *val = sk_CONF_VALUE_value(cfg_section, i); + + assert(val && val->name && val->value); + + if (!opt_auth && + !name_cmp(val->name, "authenticated") && + !set_directory(&rc, &rc.authenticated, val->value, 0)) + goto done; + + else if (!opt_unauth && + !name_cmp(val->name, "unauthenticated") && + !set_directory(&rc, &rc.unauthenticated, val->value, 1)) + goto done; + + else if (!name_cmp(val->name, "trust-anchor-directory") && + !set_directory(&rc, &ta_dir, val->value, 0)) + goto done; + + else if (!name_cmp(val->name, "rsync-timeout") && + !configure_integer(&rc, &rc.rsync_timeout, val->value)) + goto done; + + else if (!name_cmp(val->name, "max-parallel-fetches") && + !configure_integer(&rc, &rc.max_parallel_fetches, val->value)) + goto done; + + else if (!name_cmp(val->name, "max-select-time") && + !configure_unsigned_integer(&rc, &rc.max_select_time, val->value)) + goto done; + + else if (!name_cmp(val->name, "rsync-program")) + rc.rsync_program = strdup(val->value); + + else if (!name_cmp(val->name, "lockfile")) + lockfile = strdup(val->value); + + else if (!name_cmp(val->name, "keep-lockfile") && + !configure_boolean(&rc, &keep_lockfile, val->value)) + goto done; + + else if (!opt_jitter && + !name_cmp(val->name, "jitter") && + !configure_integer(&rc, &jitter, val->value)) + goto done; + + else if (!opt_level && + !name_cmp(val->name, "log-level") && + !configure_logmsg(&rc, val->value)) + goto done; + + else if (!opt_syslog && + !name_cmp(val->name, "use-syslog") && + !configure_boolean(&rc, &use_syslog, val->value)) + goto done; + + else if (!opt_stderr && + !name_cmp(val->name, "use-stderr") && + !configure_boolean(&rc, &use_stderr, val->value)) + goto done; + + else if (!name_cmp(val->name, "syslog-facility") && + !configure_syslog(&rc, &syslog_facility, + facilitynames, val->value)) + goto done; + + else if (!xmlfile && + (!name_cmp(val->name, "xml-file") || + !name_cmp(val->name, "xml-summary"))) + xmlfile = strdup(val->value); + + else if (!name_cmp(val->name, "allow-stale-crl") && + !configure_boolean(&rc, &rc.allow_stale_crl, val->value)) + goto done; + + else if (!name_cmp(val->name, "allow-stale-manifest") && + !configure_boolean(&rc, &rc.allow_stale_manifest, val->value)) + goto done; + + else if (!name_cmp(val->name, "allow-non-self-signed-trust-anchor") && + !configure_boolean(&rc, &rc.allow_non_self_signed_trust_anchor, val->value)) + goto done; + + else if (!name_cmp(val->name, "require-crl-in-manifest") && + !configure_boolean(&rc, &rc.require_crl_in_manifest, val->value)) + goto done; + + else if (!name_cmp(val->name, "allow-object-not-in-manifest") && + !configure_boolean(&rc, &rc.allow_object_not_in_manifest, val->value)) + goto done; + + else if (!name_cmp(val->name, "allow-digest-mismatch") && + !configure_boolean(&rc, &rc.allow_digest_mismatch, val->value)) + goto done; + + else if (!name_cmp(val->name, "allow-crl-digest-mismatch") && + !configure_boolean(&rc, &rc.allow_crl_digest_mismatch, val->value)) + goto done; + + else if (!name_cmp(val->name, "use-links") && + !configure_boolean(&rc, &rc.use_links, val->value)) + goto done; + + else if (!name_cmp(val->name, "prune") && + !configure_boolean(&rc, &prune, val->value)) + goto done; + + else if (!name_cmp(val->name, "run-rsync") && + !configure_boolean(&rc, &rc.run_rsync, val->value)) + goto done; + + else if (!name_cmp(val->name, "allow-nonconformant-name") && + !configure_boolean(&rc, &rc.allow_nonconformant_name, val->value)) + goto done; + + else if (!name_cmp(val->name, "allow-ee-without-signedObject") && + !configure_boolean(&rc, &rc.allow_ee_without_signedObject, val->value)) + goto done; + + else if (!name_cmp(val->name, "allow-1024-bit-ee-key") && + !configure_boolean(&rc, &rc.allow_1024_bit_ee_key, val->value)) + goto done; + + else if (!name_cmp(val->name, "allow-wrong-cms-si-attributes") && + !configure_boolean(&rc, &rc.allow_wrong_cms_si_attributes, val->value)) + goto done; + + else if (!name_cmp(val->name, "rsync-early") && + !configure_boolean(&rc, &rc.rsync_early, val->value)) + goto done; + + /* + * Ugly, but the easiest way to handle all these strings. + */ + +#define QQ(x,y) \ + else if (!name_cmp(val->name, "syslog-priority-" #x) && \ + !configure_syslog(&rc, &rc.priority[x], \ + prioritynames, val->value)) \ + goto done; + + LOG_LEVELS; /* the semicolon is for emacs */ + +#undef QQ + + } + + if ((rc.rsync_history = sk_rsync_history_t_new(rsync_history_cmp)) == NULL) { + logmsg(&rc, log_sys_err, "Couldn't allocate rsync_history stack"); + goto done; + } + + if ((rc.validation_status = sk_validation_status_t_new_null()) == NULL) { + logmsg(&rc, log_sys_err, "Couldn't allocate validation_status stack"); + goto done; + } + + if ((rc.x509_store = X509_STORE_new()) == NULL) { + logmsg(&rc, log_sys_err, "Couldn't allocate X509_STORE"); + goto done; + } + + if ((rc.rsync_queue = sk_rsync_ctx_t_new_null()) == NULL) { + logmsg(&rc, log_sys_err, "Couldn't allocate rsync_queue"); + goto done; + } + + if ((rc.task_queue = sk_task_t_new_null()) == NULL) { + logmsg(&rc, log_sys_err, "Couldn't allocate task_queue"); + goto done; + } + + rc.use_syslog = use_syslog; + + if (use_syslog) + openlog(rc.jane, + LOG_PID | (use_stderr ? LOG_PERROR : 0), + (syslog_facility ? syslog_facility : LOG_LOCAL0)); + + if (jitter > 0) { + if (RAND_bytes((unsigned char *) &delay, sizeof(delay)) <= 0) { + logmsg(&rc, log_sys_err, "Couldn't read random bytes"); + goto done; + } + delay %= jitter; + logmsg(&rc, log_telemetry, "Delaying %u seconds before startup", delay); + while (delay > 0) + delay = sleep(delay); + } + + if (lockfile && + ((lockfd = open(lockfile, O_RDWR|O_CREAT|O_NONBLOCK, 0666)) < 0 || + lockf(lockfd, F_TLOCK, 0) < 0)) { + if (lockfd >= 0 && errno == EAGAIN) + logmsg(&rc, log_telemetry, "Lock %s held by another process", lockfile); + else + logmsg(&rc, log_sys_err, "Problem locking %s: %s", lockfile, strerror(errno)); + lockfd = -1; + goto done; + } + + start = time(0); + logmsg(&rc, log_telemetry, "Starting"); + + if (!construct_directory_names(&rc)) + goto done; + + if (!access(rc.new_authenticated.s, F_OK)) { + logmsg(&rc, log_sys_err, + "Timestamped output directory %s already exists! Clock went backwards?", + rc.new_authenticated.s); + goto done; + } + + if (!mkdir_maybe(&rc, &rc.new_authenticated)) { + logmsg(&rc, log_sys_err, "Couldn't prepare directory %s: %s", + rc.new_authenticated.s, strerror(errno)); + goto done; + } + + for (i = 0; i < sk_CONF_VALUE_num(cfg_section); i++) { + CONF_VALUE *val = sk_CONF_VALUE_value(cfg_section, i); + + assert(val && val->name && val->value); + + if (!name_cmp(val->name, "trust-anchor-uri-with-key") || + !name_cmp(val->name, "indirect-trust-anchor")) { + logmsg(&rc, log_usage_err, + "Directive \"%s\" is obsolete -- please use \"trust-anchor-locator\" instead", + val->name); + goto done; + } + + if ((!name_cmp(val->name, "trust-anchor") && !check_ta_cer(&rc, val->value)) || + (!name_cmp(val->name, "trust-anchor-locator") && !check_ta_tal(&rc, val->value))) + goto done; + } + + if (*ta_dir.s != '\0' && !check_ta_dir(&rc, ta_dir.s)) + goto done; + + while (sk_task_t_num(rc.task_queue) > 0 || sk_rsync_ctx_t_num(rc.rsync_queue) > 0) { + task_run_q(&rc); + rsync_mgr(&rc); + } + + logmsg(&rc, log_telemetry, "Event loop done, beginning final output and cleanup"); + + if (!finalize_directories(&rc)) + goto done; + + if (prune && rc.run_rsync && + !prune_unauthenticated(&rc, &rc.unauthenticated, + strlen(rc.unauthenticated.s))) { + logmsg(&rc, log_sys_err, "Trouble pruning old unauthenticated data"); + goto done; + } + + if (!write_xml_file(&rc, xmlfile)) + goto done; + + ret = 0; + + done: + log_openssl_errors(&rc); + + /* + * Do NOT free cfg_section, NCONF_free() takes care of that + */ + sk_validation_status_t_pop_free(rc.validation_status, validation_status_t_free); + sk_rsync_history_t_pop_free(rc.rsync_history, rsync_history_t_free); + validation_status_t_free(rc.validation_status_in_waiting); + X509_STORE_free(rc.x509_store); + NCONF_free(cfg_handle); + CONF_modules_free(); + EVP_cleanup(); + ERR_free_strings(); + if (rc.rsync_program) + free(rc.rsync_program); + if (lockfile && lockfd >= 0 && !keep_lockfile) + unlink(lockfile); + if (lockfile) + free(lockfile); + if (xmlfile) + free(xmlfile); + + if (start) { + finish = time(0); + logmsg(&rc, log_telemetry, + "Finished, elapsed time %u:%02u:%02u", + (unsigned) ((finish - start) / 3600), + (unsigned) ((finish - start) / 60 % 60), + (unsigned) ((finish - start) % 60)); + } + + return ret; +} diff --git a/rp/rcynic/rcynic.xsl b/rp/rcynic/rcynic.xsl new file mode 100644 index 00000000..487cd2f6 --- /dev/null +++ b/rp/rcynic/rcynic.xsl @@ -0,0 +1,312 @@ + + + + + + + + + + + + + + + + + + + + + Generators + + $Id$ + + + rcynic summary + + + + + <xsl:value-of select="$title"/> + + + + + + + +

      + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 1 + + + 0 + + + + + + + + + + + + +
      +

      Grand Totals

      + + + + + + + + + + + + + + + + + +
      + +
      Total
      +
      + + +
      +

      Summaries by Repository Host

      + + + +
      +

      + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
      + +
      + + + + + +
      Total + + + + + +
      +
      + + + +
      +

      Problems

      + + + + + + + + + + + + + + + + + + + +
      StatusURI
      +
      +
      + + + +
      +

      Validation Status

      + + + + + + + + + + + + + + + + + + + + + +
      TimestampGenerationStatusURI
      +
      + + + +
      + +
      + + diff --git a/rp/rcynic/rpki-torrent.py b/rp/rcynic/rpki-torrent.py new file mode 100644 index 00000000..9b97f298 --- /dev/null +++ b/rp/rcynic/rpki-torrent.py @@ -0,0 +1,721 @@ +#!/usr/local/bin/python + +# $Id$ +# +# Copyright (C) 2013--2014 Dragon Research Labs ("DRL") +# Portions copyright (C) 2012 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notices and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND DRL AND ISC DISCLAIM ALL +# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL OR +# ISC BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL +# DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA +# OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +# TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +import urllib2 +import httplib +import socket +import ssl +import urlparse +import zipfile +import sys +import os +import email.utils +import base64 +import hashlib +import subprocess +import syslog +import traceback +import ConfigParser +import stat +import time +import errno +import fcntl +import argparse +import smtplib +import email.mime.text + +import transmissionrpc + +tr_env_vars = ("TR_TORRENT_DIR", "TR_TORRENT_ID", "TR_TORRENT_NAME") + +class WrongServer(Exception): + "Hostname not in X.509v3 subjectAltName extension." + +class UnexpectedRedirect(Exception): + "Unexpected HTTP redirect." + +class WrongMode(Exception): + "Wrong operation for mode." + +class BadFormat(Exception): + "Zip file does not match our expectations." + +class InconsistentEnvironment(Exception): + "Environment variables received from Transmission aren't consistent." + +class TorrentNotReady(Exception): + "Torrent is not ready for checking." + +class TorrentDoesNotMatchManifest(Exception): + "Retrieved torrent does not match manifest." + +class TorrentNameDoesNotMatchURL(Exception): + "Torrent name doesn't uniquely match a URL." + +class CouldNotFindTorrents(Exception): + "Could not find torrent(s) with given name(s)." + +class UseTheSourceLuke(Exception): + "Use The Source, Luke." + +def main(): + try: + syslog_flags = syslog.LOG_PID; + if os.isatty(sys.stderr.fileno()): + syslog_flags |= syslog.LOG_PERROR + syslog.openlog("rpki-torrent", syslog_flags) + + parser = argparse.ArgumentParser(description = __doc__) + parser.add_argument("-c", "--config", + help = "configuration file") + args = parser.parse_args() + + global cfg + cfg = MyConfigParser() + cfg.read(args.config or + [os.path.join(dn, fn) + for fn in ("rcynic.conf", "rpki.conf") + for dn in ("/var/rcynic/etc", "/usr/local/etc", "/etc")]) + + if cfg.act_as_generator: + if len(argv) == 1 and argv[0] == "generate": + generator_main() + elif len(argv) == 1 and argv[0] == "mirror": + mirror_main() + else: + raise UseTheSourceLuke + + else: + if len(argv) == 0 and all(v in os.environ for v in tr_env_vars): + torrent_completion_main() + elif len(argv) == 1 and argv[0] == "poll": + poll_main() + else: + raise UseTheSourceLuke + + except Exception, e: + for line in traceback.format_exc().splitlines(): + syslog.syslog(line) + sys.exit(1) + + +def generator_main(): + import paramiko + + class SFTPClient(paramiko.SFTPClient): + def atomic_rename(self, oldpath, newpath): + oldpath = self._adjust_cwd(oldpath) + newpath = self._adjust_cwd(newpath) + self._log(paramiko.common.DEBUG, 'atomic_rename(%r, %r)' % (oldpath, newpath)) + self._request(paramiko.sftp.CMD_EXTENDED, "posix-rename@openssh.com", oldpath, newpath) + + z = ZipFile(url = cfg.generate_url, dir = cfg.zip_dir) + client = TransmissionClient() + + client.remove_torrents(z.torrent_name) + + download_dir = client.get_session().download_dir + torrent_dir = os.path.join(download_dir, z.torrent_name) + torrent_file = os.path.join(cfg.zip_dir, z.torrent_name + ".torrent") + + + syslog.syslog("Synchronizing local data from %s to %s" % (cfg.unauthenticated, torrent_dir)) + subprocess.check_call((cfg.rsync_prog, "--archive", "--delete", + os.path.normpath(cfg.unauthenticated) + "/", + os.path.normpath(torrent_dir) + "/")) + + syslog.syslog("Creating %s" % torrent_file) + try: + os.unlink(torrent_file) + except OSError, e: + if e.errno != errno.ENOENT: + raise + ignore_output_for_now = subprocess.check_output( + (cfg.mktorrent_prog, + "-a", cfg.tracker_url, + "-c", "RPKI unauthenticated data snapshot generated by rpki-torrent", + "-o", torrent_file, + torrent_dir)) + + syslog.syslog("Generating manifest") + manifest = create_manifest(download_dir, z.torrent_name) + + syslog.syslog("Loading %s with unlimited seeding" % torrent_file) + f = open(torrent_file, "rb") + client.add(base64.b64encode(f.read())) + f.close() + client.unlimited_seeding(z.torrent_name) + + syslog.syslog("Creating upload connection") + ssh = paramiko.Transport((cfg.sftp_host, cfg.sftp_port)) + try: + hostkeys = paramiko.util.load_host_keys(cfg.sftp_hostkey_file)[cfg.sftp_host]["ssh-rsa"] + except ConfigParser.Error: + hostkeys = None + ssh.connect( + username = cfg.sftp_user, + hostkey = hostkeys, + pkey = paramiko.RSAKey.from_private_key_file(cfg.sftp_private_key_file)) + sftp = SFTPClient.from_transport(ssh) + + zip_filename = os.path.join("data", os.path.basename(z.filename)) + zip_tempname = zip_filename + ".new" + + syslog.syslog("Creating %s" % zip_tempname) + f = sftp.open(zip_tempname, "wb") + z.set_output_stream(f) + + syslog.syslog("Writing %s to zip" % torrent_file) + z.write( + torrent_file, + arcname = os.path.basename(torrent_file), + compress_type = zipfile.ZIP_DEFLATED) + + manifest_name = z.torrent_name + ".manifest" + + syslog.syslog("Writing %s to zip" % manifest_name) + zi = zipfile.ZipInfo(manifest_name, time.gmtime()[:6]) + zi.external_attr = (stat.S_IFREG | 0644) << 16 + zi.internal_attr = 1 # Text, not binary + z.writestr(zi, + "".join("%s %s\n" % (v, k) for k, v in manifest.iteritems()), + zipfile.ZIP_DEFLATED) + + syslog.syslog("Closing %s and renaming to %s" % (zip_tempname, zip_filename)) + z.close() + f.close() + sftp.atomic_rename(zip_tempname, zip_filename) + + syslog.syslog("Closing upload connection") + ssh.close() + +def mirror_main(): + client = TransmissionClient() + torrent_names = [] + + for zip_url in cfg.zip_urls: + if zip_url != cfg.generate_url: + z = ZipFile(url = zip_url, dir = cfg.zip_dir, ta = cfg.zip_ta) + if z.fetch(): + client.remove_torrents(z.torrent_name) + syslog.syslog("Mirroring torrent %s" % z.torrent_name) + client.add(z.get_torrent()) + torrent_names.append(z.torrent_name) + + if torrent_names: + client.unlimited_seeding(*torrent_names) + + +def poll_main(): + for zip_url in cfg.zip_urls: + + z = ZipFile(url = zip_url, dir = cfg.zip_dir, ta = cfg.zip_ta) + client = TransmissionClient() + + if z.fetch(): + client.remove_torrents(z.torrent_name) + syslog.syslog("Adding torrent %s" % z.torrent_name) + client.add(z.get_torrent()) + + elif cfg.run_rcynic_anyway: + run_rcynic(client, z) + + +def torrent_completion_main(): + torrent_name = os.getenv("TR_TORRENT_NAME") + torrent_id = int(os.getenv("TR_TORRENT_ID")) + + z = ZipFile(url = cfg.find_url(torrent_name), dir = cfg.zip_dir, ta = cfg.zip_ta) + client = TransmissionClient() + torrent = client.info([torrent_id]).popitem()[1] + + if torrent.name != torrent_name: + raise InconsistentEnvironment("Torrent name %s does not match ID %d" % (torrent_name, torrent_id)) + + if z.torrent_name != torrent_name: + raise InconsistentEnvironment("Torrent name %s does not match torrent name in zip file %s" % (torrent_name, z.torrent_name)) + + if torrent is None or torrent.progress != 100: + raise TorrentNotReady("Torrent %s not ready for checking, how did I get here?" % torrent_name) + + log_email("Download complete %s" % z.url) + + run_rcynic(client, z) + + +def run_rcynic(client, z): + """ + Run rcynic and any post-processing we might want. + """ + + if cfg.lockfile is not None: + syslog.syslog("Acquiring lock %s" % cfg.lockfile) + lock = os.open(cfg.lockfile, os.O_WRONLY | os.O_CREAT, 0600) + fcntl.flock(lock, fcntl.LOCK_EX) + else: + lock = None + + syslog.syslog("Checking manifest against disk") + + download_dir = client.get_session().download_dir + + manifest_from_disk = create_manifest(download_dir, z.torrent_name) + manifest_from_zip = z.get_manifest() + + excess_files = set(manifest_from_disk) - set(manifest_from_zip) + for fn in excess_files: + del manifest_from_disk[fn] + + if manifest_from_disk != manifest_from_zip: + raise TorrentDoesNotMatchManifest("Manifest for torrent %s does not match what we got" % + z.torrent_name) + + if excess_files: + syslog.syslog("Cleaning up excess files") + for fn in excess_files: + os.unlink(os.path.join(download_dir, fn)) + + syslog.syslog("Running rcynic") + log_email("Starting rcynic %s" % z.url) + subprocess.check_call((cfg.rcynic_prog, + "-c", cfg.rcynic_conf, + "-u", os.path.join(client.get_session().download_dir, z.torrent_name))) + log_email("Completed rcynic %s" % z.url) + + for cmd in cfg.post_rcynic_commands: + syslog.syslog("Running post-rcynic command: %s" % cmd) + subprocess.check_call(cmd, shell = True) + + if lock is not None: + syslog.syslog("Releasing lock %s" % cfg.lockfile) + os.close(lock) + +# See http://www.minstrel.org.uk/papers/sftp/ for details on how to +# set up safe upload-only SFTP directories on the server. In +# particular http://www.minstrel.org.uk/papers/sftp/builtin/ is likely +# to be the right path. + + +class ZipFile(object): + """ + Augmented version of standard python zipfile.ZipFile class, with + some extra methods and specialized capabilities. + + All methods of the standard zipfile.ZipFile class are supported, but + the constructor arguments are different, and opening the zip file + itself is deferred until a call which requires this, since the file + may first need to be fetched via HTTPS. + """ + + def __init__(self, url, dir, ta = None, verbose = True): + self.url = url + self.dir = dir + self.ta = ta + self.verbose = verbose + self.filename = os.path.join(dir, os.path.basename(url)) + self.changed = False + self.zf = None + self.peercert = None + self.torrent_name, zip_ext = os.path.splitext(os.path.basename(url)) + if zip_ext != ".zip": + raise BadFormat + + + def __getattr__(self, name): + if self.zf is None: + self.zf = zipfile.ZipFile(self.filename) + return getattr(self.zf, name) + + + def build_opener(self): + """ + Voodoo to create a urllib2.OpenerDirector object with TLS + certificate checking enabled and a hook to set self.peercert so + our caller can check the subjectAltName field. + + You probably don't want to look at this if you can avoid it. + """ + + assert self.ta is not None + + # Yes, we're constructing one-off classes. Look away, look away. + + class HTTPSConnection(httplib.HTTPSConnection): + zip = self + def connect(self): + sock = socket.create_connection((self.host, self.port), self.timeout) + if getattr(self, "_tunnel_host", None): + self.sock = sock + self._tunnel() + self.sock = ssl.wrap_socket(sock, + keyfile = self.key_file, + certfile = self.cert_file, + cert_reqs = ssl.CERT_REQUIRED, + ssl_version = ssl.PROTOCOL_TLSv1, + ca_certs = self.zip.ta) + self.zip.peercert = self.sock.getpeercert() + + class HTTPSHandler(urllib2.HTTPSHandler): + def https_open(self, req): + return self.do_open(HTTPSConnection, req) + + return urllib2.build_opener(HTTPSHandler) + + + def check_subjectAltNames(self): + """ + Check self.peercert against URL to make sure we were talking to + the right HTTPS server. + """ + + hostname = urlparse.urlparse(self.url).hostname + subjectAltNames = set(i[1] + for i in self.peercert.get("subjectAltName", ()) + if i[0] == "DNS") + if hostname not in subjectAltNames: + raise WrongServer + + + def download_file(self, r, bufsize = 4096): + """ + Downloaded file to disk. + """ + + tempname = self.filename + ".new" + f = open(tempname, "wb") + n = int(r.info()["Content-Length"]) + for i in xrange(0, n - bufsize, bufsize): + f.write(r.read(bufsize)) + f.write(r.read()) + f.close() + mtime = email.utils.mktime_tz(email.utils.parsedate_tz(r.info()["Last-Modified"])) + os.utime(tempname, (mtime, mtime)) + os.rename(tempname, self.filename) + + + def set_output_stream(self, stream): + """ + Set up this zip file for writing to a network stream. + """ + + assert self.zf is None + self.zf = zipfile.ZipFile(stream, "w") + + + def fetch(self): + """ + Fetch zip file from URL given to constructor. + """ + + headers = { "User-Agent" : "rpki-torrent" } + try: + headers["If-Modified-Since"] = email.utils.formatdate( + os.path.getmtime(self.filename), False, True) + except OSError: + pass + + syslog.syslog("Checking %s..." % self.url) + try: + r = self.build_opener().open(urllib2.Request(self.url, None, headers)) + syslog.syslog("%s has changed, starting download" % self.url) + self.changed = True + log_email("Downloading %s" % self.url) + except urllib2.HTTPError, e: + if e.code == 304: + syslog.syslog("%s has not changed" % self.url) + elif e.code == 404: + syslog.syslog("%s does not exist" % self.url) + else: + raise + r = None + + self.check_subjectAltNames() + + if r is not None and r.geturl() != self.url: + raise UnexpectedRedirect + + if r is not None: + self.download_file(r) + r.close() + + return self.changed + + + def check_format(self): + """ + Make sure that format of zip file matches our preconceptions: it + should contain two files, one of which is the .torrent file, the + other is the manifest, with names derived from the torrent name + inferred from the URL. + """ + + if set(self.namelist()) != set((self.torrent_name + ".torrent", self.torrent_name + ".manifest")): + raise BadFormat + + + def get_torrent(self): + """ + Extract torrent file from zip file, encoded in Base64 because + that's what the transmisionrpc library says it wants. + """ + + self.check_format() + return base64.b64encode(self.read(self.torrent_name + ".torrent")) + + + def get_manifest(self): + """ + Extract manifest from zip file, as a dictionary. + + For the moment we're fixing up the internal file names from the + format that the existing shell-script prototype uses, but this + should go away once this program both generates and checks the + manifests. + """ + + self.check_format() + result = {} + for line in self.open(self.torrent_name + ".manifest"): + h, fn = line.split() + # + # Fixup for earlier manifest format, this should go away + if not fn.startswith(self.torrent_name): + fn = os.path.normpath(os.path.join(self.torrent_name, fn)) + # + result[fn] = h + return result + + +def create_manifest(topdir, torrent_name): + """ + Generate a manifest, expressed as a dictionary. + """ + + result = {} + topdir = os.path.abspath(topdir) + for dirpath, dirnames, filenames in os.walk(os.path.join(topdir, torrent_name)): + for filename in filenames: + filename = os.path.join(dirpath, filename) + f = open(filename, "rb") + result[os.path.relpath(filename, topdir)] = hashlib.sha256(f.read()).hexdigest() + f.close() + return result + + +def log_email(msg, subj = None): + try: + if not msg.endswith("\n"): + msg += "\n" + if subj is None: + subj = msg.partition("\n")[0] + m = email.mime.text.MIMEText(msg) + m["Date"] = time.strftime("%d %b %Y %H:%M:%S +0000", time.gmtime()) + m["From"] = cfg.log_email + m["To"] = cfg.log_email + m["Subject"] = subj + s = smtplib.SMTP("localhost") + s.sendmail(cfg.log_email, [cfg.log_email], m.as_string()) + s.quit() + except ConfigParser.Error: + pass + + +class TransmissionClient(transmissionrpc.client.Client): + """ + Extension of transmissionrpc.client.Client. + """ + + def __init__(self, **kwargs): + kwargs.setdefault("address", "127.0.0.1") + kwargs.setdefault("user", cfg.transmission_username) + kwargs.setdefault("password", cfg.transmission_password) + transmissionrpc.client.Client.__init__(self, **kwargs) + + + def find_torrents(self, *names): + """ + Find torrents with given name(s), return id(s). + """ + + result = [i for i, t in self.list().iteritems() if t.name in names] + if not result: + raise CouldNotFindTorrents + return result + + + def remove_torrents(self, *names): + """ + Remove any torrents with the given name(s). + """ + + try: + ids = self.find_torrents(*names) + except CouldNotFindTorrents: + pass + else: + syslog.syslog("Removing torrent%s %s (%s)" % ( + "" if len(ids) == 1 else "s", + ", ".join(names), + ", ".join("#%s" % i for i in ids))) + self.remove(ids) + + def unlimited_seeding(self, *names): + """ + Set unlimited seeding for specified torrents. + """ + + # Apparently seedRatioMode = 2 means "no limit" + try: + self.change(self.find_torrents(*names), seedRatioMode = 2) + except CouldNotFindTorrents: + syslog.syslog("Couldn't tweak seedRatioMode, blundering onwards") + + +class MyConfigParser(ConfigParser.RawConfigParser): + + rpki_torrent_section = "rpki-torrent" + + @property + def zip_dir(self): + return self.get(self.rpki_torrent_section, "zip_dir") + + @property + def zip_ta(self): + return self.get(self.rpki_torrent_section, "zip_ta") + + @property + def rcynic_prog(self): + return self.get(self.rpki_torrent_section, "rcynic_prog") + + @property + def rcynic_conf(self): + return self.get(self.rpki_torrent_section, "rcynic_conf") + + @property + def run_rcynic_anyway(self): + return self.getboolean(self.rpki_torrent_section, "run_rcynic_anyway") + + @property + def generate_url(self): + return self.get(self.rpki_torrent_section, "generate_url") + + @property + def act_as_generator(self): + try: + return self.get(self.rpki_torrent_section, "generate_url") != "" + except ConfigParser.Error: + return False + + @property + def rsync_prog(self): + return self.get(self.rpki_torrent_section, "rsync_prog") + + @property + def mktorrent_prog(self): + return self.get(self.rpki_torrent_section, "mktorrent_prog") + + @property + def tracker_url(self): + return self.get(self.rpki_torrent_section, "tracker_url") + + @property + def sftp_host(self): + return self.get(self.rpki_torrent_section, "sftp_host") + + @property + def sftp_port(self): + try: + return self.getint(self.rpki_torrent_section, "sftp_port") + except ConfigParser.Error: + return 22 + + @property + def sftp_user(self): + return self.get(self.rpki_torrent_section, "sftp_user") + + @property + def sftp_hostkey_file(self): + return self.get(self.rpki_torrent_section, "sftp_hostkey_file") + + @property + def sftp_private_key_file(self): + return self.get(self.rpki_torrent_section, "sftp_private_key_file") + + @property + def lockfile(self): + try: + return self.get(self.rpki_torrent_section, "lockfile") + except ConfigParser.Error: + return None + + @property + def unauthenticated(self): + try: + return self.get(self.rpki_torrent_section, "unauthenticated") + except ConfigParser.Error: + return self.get("rcynic", "unauthenticated") + + @property + def log_email(self): + return self.get(self.rpki_torrent_section, "log_email") + + @property + def transmission_username(self): + try: + return self.get(self.rpki_torrent_section, "transmission_username") + except ConfigParser.Error: + return None + + @property + def transmission_password(self): + try: + return self.get(self.rpki_torrent_section, "transmission_password") + except ConfigParser.Error: + return None + + def multioption_iter(self, name, getter = None): + if getter is None: + getter = self.get + if self.has_option(self.rpki_torrent_section, name): + yield getter(self.rpki_torrent_section, name) + name += "." + names = [i for i in self.options(self.rpki_torrent_section) if i.startswith(name) and i[len(name):].isdigit()] + names.sort(key = lambda s: int(s[len(name):])) + for name in names: + yield getter(self.rpki_torrent_section, name) + + @property + def zip_urls(self): + return self.multioption_iter("zip_url") + + @property + def post_rcynic_commands(self): + return self.multioption_iter("post_rcynic_command") + + def find_url(self, torrent_name): + urls = [u for u in self.zip_urls + if os.path.splitext(os.path.basename(u))[0] == torrent_name] + if len(urls) != 1: + raise TorrentNameDoesNotMatchURL("Can't find URL matching torrent name %s" % torrent_name) + return urls[0] + + +if __name__ == "__main__": + main() diff --git a/rp/rcynic/rules.darwin.mk b/rp/rcynic/rules.darwin.mk new file mode 100644 index 00000000..d37b0e75 --- /dev/null +++ b/rp/rcynic/rules.darwin.mk @@ -0,0 +1,108 @@ +# $Id$ + +install-user-and-group: .FORCE + @if /usr/bin/dscl . -read "/Groups/${RCYNIC_GROUP}" >/dev/null 2>&1; \ + then \ + echo "You already have a group \"${RCYNIC_GROUP}\", so I will use it."; \ + elif gid="$$(/usr/bin/dscl . -list /Groups PrimaryGroupID | /usr/bin/awk 'BEGIN {gid = 501} $$2 >= gid {gid = 1 + $$2} END {print gid}')" && \ + /usr/bin/dscl . -create "/Groups/${RCYNIC_GROUP}" && \ + /usr/bin/dscl . -create "/Groups/${RCYNIC_GROUP}" RealName "${RCYNIC_GECOS}" && \ + /usr/bin/dscl . -create "/Groups/${RCYNIC_GROUP}" PrimaryGroupID "$$gid" && \ + /usr/bin/dscl . -create "/Groups/${RCYNIC_GROUP}" GeneratedUID "$$(/usr/bin/uuidgen)" && \ + /usr/bin/dscl . -create "/Groups/${RCYNIC_GROUP}" Password "*"; \ + then \ + echo "Added group \"${RCYNIC_GROUP}\"."; \ + else \ + echo "Adding group \"${RCYNIC_GROUP}\" failed..."; \ + echo "Please create it, then try again."; \ + exit 1; \ + fi; \ + if /usr/bin/dscl . -read "/Users/${RCYNIC_USER}" >/dev/null 2>&1; \ + then \ + echo "You already have a user \"${RCYNIC_USER}\", so I will use it."; \ + elif uid="$$(/usr/bin/dscl . -list /Users UniqueID | /usr/bin/awk 'BEGIN {uid = 501} $$2 >= uid {uid = 1 + $$2} END {print uid}')" && \ + /usr/bin/dscl . -create "/Users/${RCYNIC_USER}" && \ + /usr/bin/dscl . -create "/Users/${RCYNIC_USER}" UserShell "/usr/bin/false" && \ + /usr/bin/dscl . -create "/Users/${RCYNIC_USER}" RealName "${RCYNIC_GECOS}" && \ + /usr/bin/dscl . -create "/Users/${RCYNIC_USER}" UniqueID "$$uid" && \ + /usr/bin/dscl . -create "/Users/${RCYNIC_USER}" PrimaryGroupID "$$gid" && \ + /usr/bin/dscl . -create "/Users/${RCYNIC_USER}" NFSHomeDirectory "/var/empty" && \ + /usr/bin/dscl . -create "/Users/${RCYNIC_USER}" GeneratedUID "$$(/usr/bin/uuidgen)" && \ + /usr/bin/dscl . -create "/Users/${RCYNIC_USER}" Password "*"; \ + then \ + echo "Added user \"${RCYNIC_USER}\"."; \ + else \ + echo "Adding user \"${RCYNIC_USER}\" failed..."; \ + echo "Please create it, then try again."; \ + exit 1; \ + fi + @if /usr/bin/dscl . -read "/Groups/${RPKIRTR_GROUP}" >/dev/null 2>&1; \ + then \ + echo "You already have a group \"${RPKIRTR_GROUP}\", so I will use it."; \ + elif gid="$$(/usr/bin/dscl . -list /Groups PrimaryGroupID | /usr/bin/awk 'BEGIN {gid = 501} $$2 >= gid {gid = 1 + $$2} END {print gid}')" && \ + /usr/bin/dscl . -create "/Groups/${RPKIRTR_GROUP}" && \ + /usr/bin/dscl . -create "/Groups/${RPKIRTR_GROUP}" RealName "${RPKIRTR_GECOS}" && \ + /usr/bin/dscl . -create "/Groups/${RPKIRTR_GROUP}" PrimaryGroupID "$$gid" && \ + /usr/bin/dscl . -create "/Groups/${RPKIRTR_GROUP}" GeneratedUID "$$(/usr/bin/uuidgen)" && \ + /usr/bin/dscl . -create "/Groups/${RPKIRTR_GROUP}" Password "*"; \ + then \ + echo "Added group \"${RPKIRTR_GROUP}\"."; \ + else \ + echo "Adding group \"${RPKIRTR_GROUP}\" failed..."; \ + echo "Please create it, then try again."; \ + exit 1; \ + fi; \ + if /usr/bin/dscl . -read "/Users/${RPKIRTR_USER}" >/dev/null 2>&1; \ + then \ + echo "You already have a user \"${RPKIRTR_USER}\", so I will use it."; \ + elif uid="$$(/usr/bin/dscl . -list /Users UniqueID | /usr/bin/awk 'BEGIN {uid = 501} $$2 >= uid {uid = 1 + $$2} END {print uid}')" && \ + /usr/bin/dscl . -create "/Users/${RPKIRTR_USER}" && \ + /usr/bin/dscl . -create "/Users/${RPKIRTR_USER}" UserShell "/usr/bin/false" && \ + /usr/bin/dscl . -create "/Users/${RPKIRTR_USER}" RealName "${RPKIRTR_GECOS}" && \ + /usr/bin/dscl . -create "/Users/${RPKIRTR_USER}" UniqueID "$$uid" && \ + /usr/bin/dscl . -create "/Users/${RPKIRTR_USER}" PrimaryGroupID "$$gid" && \ + /usr/bin/dscl . -create "/Users/${RPKIRTR_USER}" NFSHomeDirectory "/var/empty" && \ + /usr/bin/dscl . -create "/Users/${RPKIRTR_USER}" GeneratedUID "$$(/usr/bin/uuidgen)" && \ + /usr/bin/dscl . -create "/Users/${RPKIRTR_USER}" Password "*"; \ + then \ + echo "Added user \"${RPKIRTR_USER}\"."; \ + else \ + echo "Adding user \"${RPKIRTR_USER}\" failed..."; \ + echo "Please create it, then try again."; \ + exit 1; \ + fi + + +install-shared-libraries: .FORCE + @echo "Copying required shared libraries" + @shared_libraries="${RCYNIC_DIR}/bin/rcynic ${RCYNIC_DIR}/bin/rsync"; \ + while true; \ + do \ + closure="$$(/usr/bin/otool -L $${shared_libraries} | /usr/bin/awk '/:$$/ {next} {print $$1}' | /usr/bin/sort -u)"; \ + if test "x$$shared_libraries" = "x$$closure"; + then \ + break; \ + else \ + shared_libraries="$$closure"; \ + fi; \ + done; \ + for shared in /usr/lib/dyld $$shared_libraries; \ + do \ + if /bin/test -r "${RCYNIC_DIR}/$${shared}"; \ + then \ + echo "You already have a \"${RCYNIC_DIR}/$${shared}\", so I will use it"; \ + elif /usr/bin/install -m 555 -o root -g wheel -p "$${shared}" "${RCYNIC_DIR}/$${shared}"; \ + then \ + echo "Copied $${shared} into ${RCYNIC_DIR}"; \ + else \ + echo "Unable to copy $${shared} into ${RCYNIC_DIR}"; \ + exit 1; \ + fi; \ + done + +install-rc-scripts: + ${INSTALL} -o root -g wheel -d ${DESTDIR}/Library/StartupItems/RCynic + ${INSTALL} -o root -g wheel -m 555 \ + rc-scripts/darwin/RCynic \ + rc-scripts/darwin/StartupParameters.plist \ + ${DESTDIR}/Library/Startup/RCynic diff --git a/rp/rcynic/rules.freebsd.mk b/rp/rcynic/rules.freebsd.mk new file mode 100644 index 00000000..5233386e --- /dev/null +++ b/rp/rcynic/rules.freebsd.mk @@ -0,0 +1,56 @@ +# $Id$ + +install-user-and-group: .FORCE + @if /usr/sbin/pw groupshow "${RCYNIC_GROUP}" 2>/dev/null; \ + then \ + echo "You already have a group \"${RCYNIC_GROUP}\", so I will use it."; \ + elif /usr/sbin/pw groupadd ${RCYNIC_GROUP}; \ + then \ + echo "Added group \"${RCYNIC_GROUP}\"."; \ + else \ + echo "Adding group \"${RCYNIC_GROUP}\" failed..."; \ + echo "Please create it, then try again."; \ + exit 1; \ + fi + @if /usr/sbin/pw groupshow "${RPKIRTR_GROUP}" 2>/dev/null; \ + then \ + echo "You already have a group \"${RPKIRTR_GROUP}\", so I will use it."; \ + elif /usr/sbin/pw groupadd ${RPKIRTR_GROUP}; \ + then \ + echo "Added group \"${RPKIRTR_GROUP}\"."; \ + else \ + echo "Adding group \"${RPKIRTR_GROUP}\" failed..."; \ + echo "Please create it, then try again."; \ + exit 1; \ + fi + @if /usr/sbin/pw usershow "${RCYNIC_USER}" 2>/dev/null; \ + then \ + echo "You already have a user \"${RCYNIC_USER}\", so I will use it."; \ + elif /usr/sbin/pw useradd ${RCYNIC_USER} -g ${RCYNIC_GROUP} -h - -d /nonexistant -s /usr/sbin/nologin -c "${RCYNIC_GECOS}" -G "${RPKIRTR_GROUP}"; \ + then \ + echo "Added user \"${RCYNIC_USER}\"."; \ + else \ + echo "Adding user \"${RCYNIC_USER}\" failed..."; \ + echo "Please create it, then try again."; \ + exit 1; \ + fi + @if /usr/sbin/pw usershow "${RPKIRTR_USER}" 2>/dev/null; \ + then \ + echo "You already have a user \"${RPKIRTR_USER}\", so I will use it."; \ + elif /usr/sbin/pw useradd ${RPKIRTR_USER} -g ${RPKIRTR_GROUP} -h - -d /nonexistant -s /usr/sbin/nologin -c "${RPKIRTR_GECOS}"; \ + then \ + echo "Added user \"${RPKIRTR_USER}\"."; \ + else \ + echo "Adding user \"${RPKIRTR_USER}\" failed..."; \ + echo "Please create it, then try again."; \ + exit 1; \ + fi + + +# We use static compilation on FreeBSD, so no need for shared libraries + +install-shared-libraries: + @true + +install-rc-scripts: + ${INSTALL} -m 555 -o root -g wheel -p rc-scripts/freebsd/rc.d.rcynic ${DESTDIR}/usr/local/etc/rc.d/rcynic diff --git a/rp/rcynic/rules.linux.mk b/rp/rcynic/rules.linux.mk new file mode 100644 index 00000000..6a962cef --- /dev/null +++ b/rp/rcynic/rules.linux.mk @@ -0,0 +1,92 @@ +# $Id$ + +install-user-and-group: .FORCE + @if getent group ${RCYNIC_GROUP} >/dev/null; \ + then \ + echo "You already have a group \"${RCYNIC_GROUP}\", so I will use it."; \ + elif /usr/sbin/groupadd ${RCYNIC_GROUP}; \ + then \ + echo "Added group \"${RCYNIC_GROUP}\"."; \ + else \ + echo "Adding group \"${RCYNIC_GROUP}\" failed..."; \ + echo "Please create it, then try again."; \ + exit 1; \ + fi + @nogroup='-N'; \ + if test -f /etc/redhat-release; then read vendor release version < /etc/redhat-release; if test $$vendor = CentOS; then nogroup='-n'; fi; fi; \ + if getent passwd ${RCYNIC_USER} >/dev/null; \ + then \ + echo "You already have a user \"${RCYNIC_USER}\", so I will use it."; \ + elif /usr/sbin/useradd -g ${RCYNIC_GROUP} -M $$nogroup -d "${RCYNIC_DIR}" -s /sbin/nologin -c "${RCYNIC_GECOS}" ${RCYNIC_USER}; \ + then \ + echo "Added user \"${RCYNIC_USER}\"."; \ + else \ + echo "Adding user \"${RCYNIC_USER}\" failed..."; \ + echo "Please create it, then try again."; \ + exit 1; \ + fi + @if getent group ${RPKIRTR_GROUP} >/dev/null; \ + then \ + echo "You already have a group \"${RPKIRTR_GROUP}\", so I will use it."; \ + elif /usr/sbin/groupadd ${RPKIRTR_GROUP}; \ + then \ + echo "Added group \"${RPKIRTR_GROUP}\"."; \ + else \ + echo "Adding group \"${RPKIRTR_GROUP}\" failed..."; \ + echo "Please create it, then try again."; \ + exit 1; \ + fi + @nogroup='-N'; \ + if test -f /etc/redhat-release; then read vendor release version < /etc/redhat-release; if test $$vendor = CentOS; then nogroup='-n'; fi; fi; \ + if getent passwd ${RPKIRTR_USER} >/dev/null; \ + then \ + echo "You already have a user \"${RPKIRTR_USER}\", so I will use it."; \ + elif /usr/sbin/useradd -g ${RPKIRTR_GROUP} -M $$nogroup -d "${RPKIRTR_DIR}" -s /sbin/nologin -c "${RPKIRTR_GECOS}" ${RPKIRTR_USER}; \ + then \ + echo "Added user \"${RPKIRTR_USER}\"."; \ + else \ + echo "Adding user \"${RPKIRTR_USER}\" failed..."; \ + echo "Please create it, then try again."; \ + exit 1; \ + fi + usermod -a -G ${RPKIRTR_GROUP} ${RCYNIC_USER} + +install-shared-libraries: .FORCE + @echo "Copying required shared libraries" + @if test -d /lib64; then libdir=/lib64; else libdir=/lib; fi; \ + shared_libraries="${RCYNIC_DIR}/bin/rcynic ${RCYNIC_DIR}/bin/rsync $$(/usr/bin/find $${libdir} -name 'libnss*.so*' -print)"; \ + while true; \ + do \ + closure="$$(/usr/bin/ldd $${shared_libraries} | \ + ${AWK} ' \ + { sub(/:$/, "") } \ + $$0 == "${RCYNIC_DIR}/bin/rcynic" { next } \ + $$0 == "${RCYNIC_DIR}/bin/rsync" { next } \ + $$1 ~ /\/ld-linux\.so/ { next } \ + { for (i = 1; i <= NF; i++) if ($$i ~ /^\//) print $$i } \ + ' | \ + ${SORT} -u)"; \ + if test "X$$shared_libraries" = "X$$closure"; \ + then \ + break; \ + else \ + shared_libraries="$$closure"; \ + fi; \ + done; \ + if test -f $${libdir}/libresolv.so.2; \ + then \ + shared_libraries="$${shared_libraries} $${libdir}/libresolv.so.2"; + fi; \ + for shared in $${libdir}/*ld*.so* $$shared_libraries; \ + do \ + if test ! -r "${RCYNIC_DIR}/$${shared}"; \ + then \ + ${INSTALL} -m 555 -d `dirname "${RCYNIC_DIR}$${shared}"` && \ + ${INSTALL} -m 555 -p "$${shared}" "${RCYNIC_DIR}$${shared}"; \ + fi; \ + done + +# No devfs, so no rc script + +install-rc-scripts: + @true diff --git a/rp/rcynic/rules.unknown.mk b/rp/rcynic/rules.unknown.mk new file mode 100644 index 00000000..6ce3ea18 --- /dev/null +++ b/rp/rcynic/rules.unknown.mk @@ -0,0 +1,4 @@ +# $Id$ + +install-user-and-group install-shared-libraries install-rc-scripts: .FORCE + @echo "Don't know how to make $@ on this platform"; exit 1 diff --git a/rp/rcynic/sample-rcynic.conf b/rp/rcynic/sample-rcynic.conf new file mode 100644 index 00000000..6fa49b9c --- /dev/null +++ b/rp/rcynic/sample-rcynic.conf @@ -0,0 +1,39 @@ +# $Id$ +# +# Sample rcynic configuration file for jailed environment. + +[rcynic] +rsync-program = /bin/rsync +authenticated = /data/authenticated +unauthenticated = /data/unauthenticated +lockfile = /data/lock +xml-summary = /data/rcynic.xml +jitter = 600 +use-syslog = true +log-level = log_usage_err + +# You need to specify some trust anchors here, eg: + +#trust-anchor.1 = /etc/trust-anchors/ta-1.cer +#trust-anchor.2 = /etc/trust-anchors/ta-2.cer + +# or, using the "Trust Anchor Locator" form: + +#trust-anchor-locator.1 = /etc/trust-anchors/ta-1.tal +#trust-anchor-locator.2 = /etc/trust-anchors/ta-2.tal + +# The choice between these two formats depends largely on the policies +# of the entity generating the corresponding trust anchor, ie, will +# probably be made for you by the generating entity. + +# If you already keep all your TAs and TALs in a single directory +# anyway, you can also use the trust-anchor-directory directive to +# name that directory and let rcynic track whatever TAs and TALs you +# have there at the moment: + +#trust-anchor-directory = /etc/trust-anchors + +# Note that the order in which rcynic will read TAs and TALs from such +# a directory is undefined. In general this is not a problem, but if +# you really care about this for some reason, stick to the other +# directives. diff --git a/rp/rcynic/sample-trust-anchors/README b/rp/rcynic/sample-trust-anchors/README new file mode 100644 index 00000000..485d0485 --- /dev/null +++ b/rp/rcynic/sample-trust-anchors/README @@ -0,0 +1,26 @@ +$Id$ + +These are SAMPLE trust anchors for testing rcynic. Any resemblance +between these files and real trust anchors living, dead, or wandering +the night in ghostly torment is purely coincidental. + +Remember: it's only a trust anchor if a replying party says it is. +You are the relying party, so it's your call. + +The .cer files (if any) are self-signed X.509 trust anchors. + +The .tal files are "trust anchor locators", see RFC 6490. + +See the rcynic documentation for further details. + +== + +afrinic.tal AfriNIC +apnic.tal APNIC +arin.tal ARIN testbed +bbn-testbed.tal.disabled Only useful for certain tests +lacnic.tal LACNIC +ripe-ncc-root.tal RIPE NCC +ripe-pilot.tal A RIPE testbed +rpki.net-testbed.tal rpki.net testbed +testbed-apnicrpki.tal An APNIC testbed diff --git a/rp/rcynic/sample-trust-anchors/afrinic.tal b/rp/rcynic/sample-trust-anchors/afrinic.tal new file mode 100644 index 00000000..0f202c73 --- /dev/null +++ b/rp/rcynic/sample-trust-anchors/afrinic.tal @@ -0,0 +1,8 @@ +rsync://rpki.afrinic.net/repository/AfriNIC.cer +MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAxsAqAhWIO+ON2Ef9oRDM +pKxv+AfmSLIdLWJtjrvUyDxJPBjgR+kVrOHUeTaujygFUp49tuN5H2C1rUuQavTH +vve6xNF5fU3OkTcqEzMOZy+ctkbde2SRMVdvbO22+TH9gNhKDc9l7Vu01qU4LeJH +k3X0f5uu5346YrGAOSv6AaYBXVgXxa0s9ZvgqFpim50pReQe/WI3QwFKNgpPzfQL +6Y7fDPYdYaVOXPXSKtx7P4s4KLA/ZWmRL/bobw/i2fFviAGhDrjqqqum+/9w1hEl +L/vqihVnV18saKTnLvkItA/Bf5i11Yhw2K7qv573YWxyuqCknO/iYLTR1DToBZcZ +UQIDAQAB diff --git a/rp/rcynic/sample-trust-anchors/altca.tal b/rp/rcynic/sample-trust-anchors/altca.tal new file mode 100644 index 00000000..2dbc7413 --- /dev/null +++ b/rp/rcynic/sample-trust-anchors/altca.tal @@ -0,0 +1,9 @@ +rsync://ca0.rpki.net/tal/root.cer + +MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAzPSPpQxa0rxz9sbbvYGr +UlpgyBVxSIt+k/WDKbr+VW7CjUoz6cc5KMFANkQWw3D6ER4kCwX4WJkD58AGGbw/ +WeAe6m3aHcORUVRkr45a4qSrYiG7Wq9RAXtwbhlXofB3zo+O90IlXDaVP2U9bw+Q +yoJBJuAmZONt0bRgrktv8QhVtKvuYkH5ZIe7DkXJcJzBn6gv09dZsdwZm3xV3soX +HEKrz5pY6Sb2xoL1CyPqzGOfVFxl0G5+dmcD/degPKxrEycAzjnHUzN1gus2jg26 +dtkix7KG/Mn1h/k53j0FdQD+zqPwakgwqjvCOdSdHMRmsikj0EF9WrZIOjZUXV6q +6wIDAQAB diff --git a/rp/rcynic/sample-trust-anchors/apnic-rpki-root-afrinic-origin.tal b/rp/rcynic/sample-trust-anchors/apnic-rpki-root-afrinic-origin.tal new file mode 100644 index 00000000..d254ec44 --- /dev/null +++ b/rp/rcynic/sample-trust-anchors/apnic-rpki-root-afrinic-origin.tal @@ -0,0 +1,8 @@ +rsync://rpki.apnic.net/repository/apnic-rpki-root-afrinic-origin.cer +MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuMLL96YV9pf0rZ4Ow/bk +cgpoPfsRzkcgmisyCuMUdotHwrp8pepujhohatScRK09ILRrZYCdpX4121MJhqXC +P3u3hy9fF0CeARKX/Q82nJccD4dtUp23UcFys8hwJgNYZI910ajkAxwNT//H/TFw +oUYbzZGBR7o2awMc7GdQl/j6dgOkV6AfYy5DyDEgOUNHnUxED2rreefL/E2Fr2ST +Esar6bTR4Tg4+nVF1PjAkgN0tKZYe4wZ6VmtqV/VTngSLysim6av7ki+JR3cVgVU +OqXeh1vPjH2tNu6u9bX37ZrdVb6NBRer9I99IDbKvyhELb6nzo8+Q74zga9HI+Pf +QwIDAQAB diff --git a/rp/rcynic/sample-trust-anchors/apnic-rpki-root-arin-origin.tal b/rp/rcynic/sample-trust-anchors/apnic-rpki-root-arin-origin.tal new file mode 100644 index 00000000..b82a136f --- /dev/null +++ b/rp/rcynic/sample-trust-anchors/apnic-rpki-root-arin-origin.tal @@ -0,0 +1,8 @@ +rsync://rpki.apnic.net/repository/apnic-rpki-root-arin-origin.cer +MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAp6vscYtzhe0CfFk5Ro44 +llPhsInXtfAxqfYmK7m9V3khkqK3d3/ZAW6pcJm7qW8XhEGl+F5mUeeLIm5JoIhr +kT5B5M6uL0VlCCkZJH4h76ybOa83vWITNZEDy9L3c3nK4S+Basu3vYoE4ICXGG+J +7zg5Iw9saV+p03E2w1g16pt1QI3Cnggp6edkeWClEz3aPw/ULOIHb7YmatWwdERl +tL9LsuMSKszQLUY7F4XVpxey/rJYAZgzDUh+b6813WAClCkkydNjsbviuekAWJbx +sW7Mcw53u30K4g8MP03CjkDOubyoR4Qo99R1UQJCdrRsFKbSSfN/fOA4y7ikc3xs +jQIDAQAB diff --git a/rp/rcynic/sample-trust-anchors/apnic-rpki-root-iana-origin.tal b/rp/rcynic/sample-trust-anchors/apnic-rpki-root-iana-origin.tal new file mode 100644 index 00000000..a4e714c0 --- /dev/null +++ b/rp/rcynic/sample-trust-anchors/apnic-rpki-root-iana-origin.tal @@ -0,0 +1,8 @@ +rsync://rpki.apnic.net/repository/apnic-rpki-root-iana-origin.cer +MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAx9RWSL61YAAYumEiU8z8 +qH2ETVIL01ilxZlzIL9JYSORMN5Cmtf8V2JblIealSqgOTGjvSjEsiV73s67zYQI +7C/iSOb96uf3/s86NqbxDiFQGN8qG7RNcdgVuUlAidl8WxvLNI8VhqbAB5uSg/Mr +LeSOvXRja041VptAxIhcGzDMvlAJRwkrYK/Mo8P4E2rSQgwqCgae0ebY1CsJ3Cjf +i67C1nw7oXqJJovvXJ4apGmEv8az23OLC6Ki54Ul/E6xk227BFttqFV3YMtKx42H +cCcDVZZy01n7JjzvO8ccaXmHIgR7utnqhBRNNq5Xc5ZhbkrUsNtiJmrZzVlgU6Ou +0wIDAQAB diff --git a/rp/rcynic/sample-trust-anchors/apnic-rpki-root-lacnic-origin.tal b/rp/rcynic/sample-trust-anchors/apnic-rpki-root-lacnic-origin.tal new file mode 100644 index 00000000..a4d56802 --- /dev/null +++ b/rp/rcynic/sample-trust-anchors/apnic-rpki-root-lacnic-origin.tal @@ -0,0 +1,8 @@ +rsync://rpki.apnic.net/repository/apnic-rpki-root-lacnic-origin.cer +MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAyoYPp3l3DWyPtLWrmRn4 +Oux9hQ5bxd0SX/f6ygHxik+I3eMJP5J0Pr2e500tyXb2uKsX9kDqu/kckr+TUMhV +BHd5yAv8OAE3YYEvpz/7uTX7cYy2yUeA76OEP75Y88OIQEzGpPLNpIzDxMggxuDh +IhkA5xMiUJgVoEgmWSzR+MuRBjv2422wAGB5GpLgYsOjpwvG0VPmhnE+39+10ucQ +CLt0Ny5kOR4an2tkvHjm7rzKDnFm8MWxPzAWESdf+8g7ITzSglqxDNiK5E5rdzNt +h1Kvp+9RwaFArw6Ky1A4HhnoplN4EfKwxq0YamuKV0ZTTpWyT2+qDuE6sOfHRbJ0 +5QIDAQAB diff --git a/rp/rcynic/sample-trust-anchors/apnic-rpki-root-ripe-origin.tal b/rp/rcynic/sample-trust-anchors/apnic-rpki-root-ripe-origin.tal new file mode 100644 index 00000000..d64df3ae --- /dev/null +++ b/rp/rcynic/sample-trust-anchors/apnic-rpki-root-ripe-origin.tal @@ -0,0 +1,8 @@ +rsync://rpki.apnic.net/repository/apnic-rpki-root-ripe-origin.cer +MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAwsQlXmEklLYApoDo7GEa +NNTEGFPU5wJpi04iXuga2xn+g/TMLOlyJbjuPYRtRm/7VbRnN3m9Ta+WETy03+Fm +EbXzB4xxhJKVik/ARHBnrBWhLyURy8Q5/XplE9cJein37IE1mIsbKM7o/90S225w +7GuvW7T4kjPWYmBFOywHWsfQO1EdsgiJrkz+Ab67ZkdSIiKHkf2UE6/MrbDEj+QK +9+s/vKH8BtDhaLmTWY+bVvfJ3+AWDH6roo1ozbl5yamQFbLOl3ns30f3yOJcNSNu +/qgMQRRyp2sXXQovhTy8yqm3LFspaCWnTmQtBieWZwibuOa4Z27A1FzTMst2T4wY +/wIDAQAB diff --git a/rp/rcynic/sample-trust-anchors/apnic-testbed.tal b/rp/rcynic/sample-trust-anchors/apnic-testbed.tal new file mode 100644 index 00000000..f87a3bf3 --- /dev/null +++ b/rp/rcynic/sample-trust-anchors/apnic-testbed.tal @@ -0,0 +1,9 @@ +rsync://rpki-testbed.apnic.net/repository/rpki-testbed.apnic.net.cer + +MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAyiVmZgr+aN0xZfh6yrJS +qmsX2Q58UzjPaTnhsP88zdSqmRCVAegpRMjBjwF6Ga8tR9RP89W+tRzLf2x883Et +vmNAax939gilN4VxGRgKL2YZO27w8Vk6aVpSJ0+CCQI497eataRFpG9eLiPfsRfo +Fuha75MGa5m1cmJFVk0Jj0sRa9RmB/ZVxZDdwHgxAB7L875JSUnVCOLCP4FejoYt +71wJfzFw3fZmXSPeXQ+wtxVPfh8VIOyLTyYbq3AA1Gn+herzvCsiLXZ41lcCCRZR +ObwIGptj7KnqLoJ/MOGL6eoSY+v1B85z2Il3Nl8px3ohsApDM/MANDacxl0EgLwX +2QIDAQAB diff --git a/rp/rcynic/sample-trust-anchors/bbn-testbed.tal.disabled b/rp/rcynic/sample-trust-anchors/bbn-testbed.tal.disabled new file mode 100644 index 00000000..21302ea5 --- /dev/null +++ b/rp/rcynic/sample-trust-anchors/bbn-testbed.tal.disabled @@ -0,0 +1,8 @@ +rsync://rpki.bbn.com/rpki/root.cer +MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAvEjkk0oC2N8Ij+ruAkrX +cI7lSOIG1sQLC8xJJqklMlOBLao+3If0AVKSEP5+pb/hJwpXPomt6Lm4+fuZ6vX/ +q15llQL3YWkHZ49t/X47yCqxvM0txRaZafffbk6lJrofKhKKrYEcrr/rHiG+IQXn +U1MGaQ/a8DfRDWiCQxonh7CthrLi7tr+QI9s9fDvvHPCK9OmmhZvCUDOydf+/vMn +VShQ57KsUFcuZ0EX9UwsaIGCYGyvsYwk54UtoIGovg2IavfZK8ai0/5XEVpvKQiR +8AixZpg5bSe7555+LhzpL5+LdqX6/hVRah0JrdR8KiXvr16Kmcitj+JLqVc0Wkd8 +zwIDAQAB diff --git a/rp/rcynic/sample-trust-anchors/lacnic.tal b/rp/rcynic/sample-trust-anchors/lacnic.tal new file mode 100644 index 00000000..29220d37 --- /dev/null +++ b/rp/rcynic/sample-trust-anchors/lacnic.tal @@ -0,0 +1,8 @@ +rsync://repository.lacnic.net/rpki/lacnic/rta-lacnic-rpki.cer +MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAqZEzhYK0+PtDOPfub/KR +c3MeWx3neXx4/wbnJWGbNAtbYqXg3uU5J4HFzPgk/VIppgSKAhlO0H60DRP48by9 +gr5/yDHu2KXhOmnMg46sYsUIpfgtBS9+VtrqWziJfb+pkGtuOWeTnj6zBmBNZKK+ +5AlMCW1WPhrylIcB+XSZx8tk9GS/3SMQ+YfMVwwAyYjsex14Uzto4GjONALE5oh1 +M3+glRQduD6vzSwOD+WahMbc9vCOTED+2McLHRKgNaQf0YJ9a1jG9oJIvDkKXEqd +fqDRktwyoD74cV57bW3tBAexB7GglITbInyQAsmdngtfg2LUMrcROHHP86QPZINj +DQIDAQAB diff --git a/rp/rcynic/sample-trust-anchors/ripe-ncc-root.tal b/rp/rcynic/sample-trust-anchors/ripe-ncc-root.tal new file mode 100644 index 00000000..ebdb9f45 --- /dev/null +++ b/rp/rcynic/sample-trust-anchors/ripe-ncc-root.tal @@ -0,0 +1,9 @@ +rsync://rpki.ripe.net/ta/ripe-ncc-ta.cer +MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0URYSGqUz2m +yBsOzeW1jQ6NsxNvlLMyhWknvnl8NiBCs/T/S2XuNKQNZ+wBZxIgPPV +2pFBFeQAvoH/WK83HwA26V2siwm/MY2nKZ+Olw+wlpzlZ1p3Ipj2eNc +Krmit8BwBC8xImzuCGaV0jkRB0GZ0hoH6Ml03umLprRsn6v0xOP0+l6 +Qc1ZHMFVFb385IQ7FQQTcVIxrdeMsoyJq9eMkE6DoclHhF/NlSllXub +ASQ9KUWqJ0+Ot3QCXr4LXECMfkpkVR2TZT+v5v658bHVs6ZxRD1b6Uk +1uQKAyHUbn/tXvP8lrjAibGzVsXDT2L0x4Edx+QdixPgOji3gBMyL2V +wIDAQAB diff --git a/rp/rcynic/sample-trust-anchors/ripe-pilot.tal b/rp/rcynic/sample-trust-anchors/ripe-pilot.tal new file mode 100644 index 00000000..971128e0 --- /dev/null +++ b/rp/rcynic/sample-trust-anchors/ripe-pilot.tal @@ -0,0 +1,7 @@ +rsync://localcert.ripe.net/ta/ripe-ncc-pilot.cer + +MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEApIXenLOBfyo7cOnm4mGKmYxsoWCp28dw3XJAoZNW +PDK8i9MxYACpwfz7bjyGma1BWPBJuievNd6nriFI+3WG+wt2bnO2ZmiLenCwMtm8bu7BeldpWRwlAnRp4t4I +L6sZ7T9bF+4sTrv1qiEANqam0mhtLtUfbWXV5Z4mjgnNur7fJH2lIOm7Oc2/tok1rid8WsPe18zuvgwA3M0f +KQ/Oa4SMXKnHr3fg2cHAm1cfEEvhMKa3rUAvsKGVEYeTJNg6rh3IRnjWhZ8GmE1ywl/9qMa2z4YsUi9Bx9U+ +/zMS8qpJn/q6XBbZ8XYTTFvSWfXd6b82jSfABa4ukIDCUF/QFwIDAQAB diff --git a/rp/rcynic/sample-trust-anchors/rpki.net-testbed.tal b/rp/rcynic/sample-trust-anchors/rpki.net-testbed.tal new file mode 100644 index 00000000..1e466300 --- /dev/null +++ b/rp/rcynic/sample-trust-anchors/rpki.net-testbed.tal @@ -0,0 +1,9 @@ +rsync://repo0.rpki.net/rpki/root.cer + +MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAovWQL2lh6knDxGUG5hbt +CXvvh4AOzjhDkSHlj22gn/1oiM9IeDATIwP44vhQ6L/xvuk7W6Kfa5ygmqQ+xOZO +wTWPcrUbqaQyPNxokuivzyvqVZVDecOEqs78q58mSp9nbtxmLRW7B67SJCBSzfa5 +XpVyXYEgYAjkk3fpmefU+AcxtxvvHB5OVPIaBfPcs80ICMgHQX+fphvute9XLxjf +JKJWkhZqZ0v7pZm2uhkcPx1PMGcrGee0WSDC3fr3erLueagpiLsFjwwpX6F+Ms8v +qz45H+DKmYKvPSstZjCCq9aJ0qANT9OtnfSDOS+aLRPjZryCNyvvBHxZXqj5YCGK +twIDAQAB diff --git a/rp/rcynic/sample-trust-anchors/testbed-apnicrpki.tal.disabled b/rp/rcynic/sample-trust-anchors/testbed-apnicrpki.tal.disabled new file mode 100644 index 00000000..ae16a302 --- /dev/null +++ b/rp/rcynic/sample-trust-anchors/testbed-apnicrpki.tal.disabled @@ -0,0 +1,8 @@ +rsync://apnicrpki.rand.apnic.net/repository/root.cer +MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEApQofhU9VeRGZxlNgkrFR +ShSIkGtRUwg1mkISvieM2oxA6LFhHoqFV25BdK/3IhOh/0I/E9WX7uWPD72MFGBB +LsFc6JpZFkvg8+9KUIHquk46wn4bEvA8xMsPbGo+mK0CAkqOdHQSemC7nqUlR9VH +5zH1t8aYS4mrVN59LfClmiEE7QElgmVyvccfaEd+gMGaxsDvSylWIdvVxYfFG2rB +LiEsmfXwdn2a8b1Zx5eaFD80XV9Z7h15ESP1epSScGzMp2XB0FYMC3f9k7l5sydX +Rj5BYo1rWaM+Y6AoHe/d8G0xm6iX6b/JqT4Ywdt8/7DbsweaIk73VnpnXrYATCt9 +VwIDAQAB diff --git a/rp/rcynic/static-rsync/Makefile.in b/rp/rcynic/static-rsync/Makefile.in new file mode 100644 index 00000000..8a433c7b --- /dev/null +++ b/rp/rcynic/static-rsync/Makefile.in @@ -0,0 +1,44 @@ +# $Id$ + +VERSION = 2.6.9 + +CFG_ENV = CFLAGS='@CFLAGS@' LDFLAGS='@LDFLAGS@ @LD_STATIC_FLAG@' +CFG_ARG = + +TARBALL = rsync-${VERSION}.tar.gz +DIRNAME = rsync-${VERSION} + +CFG_LOG = > ../config.log 2>&1 +BIN_LOG = > ../build.log 2>&1 + +BIN = rsync + +abs_top_srcdir = @abs_top_srcdir@ +abs_top_builddir = @abs_top_builddir@ + +all: ${BIN} + +${BIN}: ${DIRNAME}/${BIN} + ln ${DIRNAME}/${BIN} $@ + file $@ + +${DIRNAME}/${BIN}: configured.stamp + cd ${DIRNAME} && ${MAKE} ${BIN_LOG} + +extracted.stamp: ${TARBALL} + gzip -c -d ${TARBALL} | tar -xf - + touch $@ + +patched.stamp: extracted.stamp + for i in patches/patch-*; do if test -f "$$i"; then patch -d ${DIRNAME} <"$$i"; else :; fi; done + touch $@ + +configured.stamp: patched.stamp + cd ${DIRNAME} && ${CFG_ENV} ./configure ${CFG_ARG} ${CFG_LOG} + touch $@ + +clean: + rm -rf ${BIN} ${DIRNAME} *.stamp *.log + +distclean: clean + rm -f Makefile diff --git a/rp/rcynic/static-rsync/README b/rp/rcynic/static-rsync/README new file mode 100644 index 00000000..9ff5afa8 --- /dev/null +++ b/rp/rcynic/static-rsync/README @@ -0,0 +1,15 @@ +$Id$ + +Hack to build a static rsync binary suitable for use in a chroot jail. + +The default configuration is for gcc, since that's the most widely +used compiler on the platforms we use. I've provided hooks intended +to make it simple to support other compilers just by overriding make +variables on the command line: if you need to do something more +drastic than this to get your compiler working, please tell me. + +If your platform doesn't support static binaries at all, you're on +your own (and should whine at your OS vendor, as this is nuts). + +We try to stick with rsync release code, but apply security patches +when necessary. diff --git a/rp/rcynic/static-rsync/patches/patch-CVE-2007-4091 b/rp/rcynic/static-rsync/patches/patch-CVE-2007-4091 new file mode 100644 index 00000000..201af96a --- /dev/null +++ b/rp/rcynic/static-rsync/patches/patch-CVE-2007-4091 @@ -0,0 +1,60 @@ +--- sender.c 2006-09-20 03:53:32.000000000 +0200 ++++ sender.c 2007-07-25 15:33:05.000000000 +0200 +@@ -123,6 +123,7 @@ + char fname[MAXPATHLEN]; + struct file_struct *file; + unsigned int offset; ++ size_t l = 0; + + if (ndx < 0 || ndx >= the_file_list->count) + return; +@@ -133,6 +134,20 @@ + file->dir.root, "/", NULL); + } else + offset = 0; ++ ++ l = offset + 1; ++ if (file) { ++ if (file->dirname) ++ l += strlen(file->dirname); ++ if (file->basename) ++ l += strlen(file->basename); ++ } ++ ++ if (l >= sizeof(fname)) { ++ rprintf(FERROR, "Overlong pathname\n"); ++ exit_cleanup(RERR_FILESELECT); ++ } ++ + f_name(file, fname + offset); + if (remove_source_files) { + if (do_unlink(fname) == 0) { +@@ -224,6 +239,7 @@ + enum logcode log_code = log_before_transfer ? FLOG : FINFO; + int f_xfer = write_batch < 0 ? batch_fd : f_out; + int i, j; ++ size_t l = 0; + + if (verbose > 2) + rprintf(FINFO, "send_files starting\n"); +@@ -259,6 +275,20 @@ + fname[offset++] = '/'; + } else + offset = 0; ++ ++ l = offset + 1; ++ if (file) { ++ if (file->dirname) ++ l += strlen(file->dirname); ++ if (file->basename) ++ l += strlen(file->basename); ++ } ++ ++ if (l >= sizeof(fname)) { ++ rprintf(FERROR, "Overlong pathname\n"); ++ exit_cleanup(RERR_FILESELECT); ++ } ++ + fname2 = f_name(file, fname + offset); + + if (verbose > 2) diff --git a/rp/rcynic/static-rsync/rsync-2.6.9.tar.gz b/rp/rcynic/static-rsync/rsync-2.6.9.tar.gz new file mode 100644 index 00000000..6377f639 Binary files /dev/null and b/rp/rcynic/static-rsync/rsync-2.6.9.tar.gz differ diff --git a/rp/rcynic/validation_status b/rp/rcynic/validation_status new file mode 100755 index 00000000..1f7a704d --- /dev/null +++ b/rp/rcynic/validation_status @@ -0,0 +1,34 @@ +# $Id$ +# +# Copyright (C) 2012 Internet Systems Consortium, Inc. ("ISC") +# +# Permission to use, copy, modify, and/or distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +Flat text listing of elements from rcynic.xml. +""" + +import sys + +try: + from lxml.etree import ElementTree +except ImportError: + from xml.etree.ElementTree import ElementTree + +for filename in ([sys.stdin] if len(sys.argv) < 2 else sys.argv[1:]): + for elt in ElementTree(file = filename).findall("validation_status"): + print "%s %8s %-40s %s" % ( + elt.get("timestamp"), + elt.get("generation"), + elt.get("status"), + elt.text.strip()) diff --git a/rp/rtr-origin/Makefile.in b/rp/rtr-origin/Makefile.in new file mode 100644 index 00000000..daa18009 --- /dev/null +++ b/rp/rtr-origin/Makefile.in @@ -0,0 +1,63 @@ +# $Id$ + +BASE = rtr-origin +SRC = ${BASE}.py +BIN = ${BASE} + +INSTALL = @INSTALL@ +PYTHON = @PYTHON@ +AWK = @AWK@ + +prefix = @prefix@ +exec_prefix = @exec_prefix@ +datarootdir = @datarootdir@ +datadir = @datadir@ +localstatedir = @localstatedir@ +sharedstatedir = @sharedstatedir@ +sysconfdir = @sysconfdir@ +bindir = @bindir@ +sbindir = @sbindir@ +libexecdir = @libexecdir@ +libdir = @libdir@ + +abs_top_srcdir = @abs_top_srcdir@ +abs_top_builddir = @abs_top_builddir@ + +RTR_ORIGIN_INSTALL_TARGETS = @RCYNIC_INSTALL_TARGETS@ + +RPKI_RTR_PORT = 43779 + +SCAN_ROAS = ${bindir}/scan_roas + + +all: ${BIN} + +clean: + rm -f ${BIN} + +install: all ${RTR_ORIGIN_INSTALL_TARGETS} + +install-binary: + if test -d ${DESTDIR}${bindir} ; then :; else ${INSTALL} -m 555 -d ${DESTDIR}${bindir}; fi + ${INSTALL} -m 555 ${BIN} ${DESTDIR}${bindir}/${BIN} + +deinstall uninstall: + rm -f ${DESTDIR}${bindir}/${BIN} + +distclean: clean + rm -rf current sockets *.ax *.ix.* + rm -f Makefile + +${BIN} : ${SRC} + AC_PYTHON_INTERPRETER='${PYTHON}' AC_SCAN_ROAS='${SCAN_ROAS}' \ + ${PYTHON} ${abs_top_srcdir}/buildtools/make-rcynic-script.py <${SRC} >$@ + chmod a+x $@ + +test: + @true + +.FORCE: + +# Platform-specific rules below here. + +@RTR_ORIGIN_MAKE_RULES@ diff --git a/rp/rtr-origin/README b/rp/rtr-origin/README new file mode 100644 index 00000000..dae53010 --- /dev/null +++ b/rp/rtr-origin/README @@ -0,0 +1,11 @@ +$Id$ + +Sample implementation of draft-ymbk-rpki-rtr-protocol. + +See: + +- The primary documentation at http://trac.rpki.net/ + +- The PDF manual in ../doc/manual.pdf, or + +- The flat text page ../doc/doc.RPKI.RP.rpki-rtr diff --git a/rp/rtr-origin/rtr-origin b/rp/rtr-origin/rtr-origin new file mode 100755 index 00000000..f37d2ce0 --- /dev/null +++ b/rp/rtr-origin/rtr-origin @@ -0,0 +1,2278 @@ +#!/usr/bin/env python + +# Router origin-authentication rpki-router protocol implementation. See +# draft-ietf-sidr-rpki-rtr in fine Internet-Draft repositories near you. +# +# Run the program with the --help argument for usage information, or see +# documentation for the *_main() functions. +# +# +# $Id$ +# +# Copyright (C) 2009-2013 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +import sys +import os +import struct +import time +import glob +import socket +import fcntl +import signal +import syslog +import errno +import asyncore +import asynchat +import subprocess +import traceback +import getopt +import bisect +import random +import base64 + + +# Debugging only, should be False in production +disable_incrementals = False + +# Whether to log backtraces +backtrace_on_exceptions = False + +class IgnoreThisRecord(Exception): + pass + + +class timestamp(int): + """ + Wrapper around time module. + """ + + def __new__(cls, x): + return int.__new__(cls, x) + + @classmethod + def now(cls, delta = 0): + return cls(time.time() + delta) + + def __str__(self): + return time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime(self)) + + +class ipaddr(object): + """ + IP addresses. + """ + + def __init__(self, string = None, value = None): + assert (string is None) != (value is None) + if string is not None: + value = socket.inet_pton(self.af, string) + assert len(value) == self.size + self.value = value + + def __str__(self): + return socket.inet_ntop(self.af, self.value) + + def __cmp__(self, other): + return cmp(self.value, other.value) + +class v4addr(ipaddr): + af = socket.AF_INET + size = 4 + +class v6addr(ipaddr): + af = socket.AF_INET6 + size = 16 + +def read_current(): + """ + Read current serial number and nonce. Return None for both if + serial and nonce not recorded. For backwards compatibility, treat + file containing just a serial number as having a nonce of zero. + """ + try: + f = open("current", "r") + values = tuple(int(s) for s in f.read().split()) + f.close() + return values[0], values[1] + except IndexError: + return values[0], 0 + except IOError: + return None, None + +def write_current(serial, nonce): + """ + Write serial number and nonce. + """ + tmpfn = "current.%d.tmp" % os.getpid() + try: + f = open(tmpfn, "w") + f.write("%d %d\n" % (serial, nonce)) + f.close() + os.rename(tmpfn, "current") + finally: + try: + os.unlink(tmpfn) + except: + pass + + +def new_nonce(): + """ + Create and return a new nonce value. + """ + if force_zero_nonce: + return 0 + try: + return int(random.SystemRandom().getrandbits(16)) + except NotImplementedError: + return int(random.getrandbits(16)) + + +class read_buffer(object): + """ + Wrapper around synchronous/asynchronous read state. + """ + + def __init__(self): + self.buffer = "" + + def update(self, need, callback): + """ + Update count of needed bytes and callback, then dispatch to callback. + """ + self.need = need + self.callback = callback + return self.callback(self) + + def available(self): + """ + How much data do we have available in this buffer? + """ + return len(self.buffer) + + def needed(self): + """ + How much more data does this buffer need to become ready? + """ + return self.need - self.available() + + def ready(self): + """ + Is this buffer ready to read yet? + """ + return self.available() >= self.need + + def get(self, n): + """ + Hand some data to the caller. + """ + b = self.buffer[:n] + self.buffer = self.buffer[n:] + return b + + def put(self, b): + """ + Accumulate some data. + """ + self.buffer += b + + def retry(self): + """ + Try dispatching to the callback again. + """ + return self.callback(self) + +class PDUException(Exception): + """ + Parent exception type for exceptions that signal particular protocol + errors. String value of exception instance will be the message to + put in the error_report PDU, error_report_code value of exception + will be the numeric code to use. + """ + + def __init__(self, msg = None, pdu = None): + assert msg is None or isinstance(msg, (str, unicode)) + self.error_report_msg = msg + self.error_report_pdu = pdu + + def __str__(self): + return self.error_report_msg or self.__class__.__name__ + + def make_error_report(self): + return error_report(errno = self.error_report_code, + errmsg = self.error_report_msg, + errpdu = self.error_report_pdu) + +class UnsupportedProtocolVersion(PDUException): + error_report_code = 4 + +class UnsupportedPDUType(PDUException): + error_report_code = 5 + +class CorruptData(PDUException): + error_report_code = 0 + +class pdu(object): + """ + Object representing a generic PDU in the rpki-router protocol. + Real PDUs are subclasses of this class. + """ + + version = 0 # Protocol version + + _pdu = None # Cached when first generated + + header_struct = struct.Struct("!BBHL") + + def __cmp__(self, other): + return cmp(self.to_pdu(), other.to_pdu()) + + def check(self): + """ + Check attributes to make sure they're within range. + """ + pass + + @classmethod + def read_pdu(cls, reader): + return reader.update(need = cls.header_struct.size, callback = cls.got_header) + + @classmethod + def got_header(cls, reader): + if not reader.ready(): + return None + assert reader.available() >= cls.header_struct.size + version, pdu_type, whatever, length = cls.header_struct.unpack(reader.buffer[:cls.header_struct.size]) + if version != cls.version: + raise UnsupportedProtocolVersion( + "Received PDU version %d, expected %d" % (version, cls.version)) + if pdu_type not in cls.pdu_map: + raise UnsupportedPDUType( + "Received unsupported PDU type %d" % pdu_type) + if length < 8: + raise CorruptData( + "Received PDU with length %d, which is too short to be valid" % length) + self = cls.pdu_map[pdu_type]() + return reader.update(need = length, callback = self.got_pdu) + + def consume(self, client): + """ + Handle results in test client. Default behavior is just to print + out the PDU. + """ + blather(self) + + def send_file(self, server, filename): + """ + Send a content of a file as a cache response. Caller should catch IOError. + """ + f = open(filename, "rb") + server.push_pdu(cache_response(nonce = server.current_nonce)) + server.push_file(f) + server.push_pdu(end_of_data(serial = server.current_serial, nonce = server.current_nonce)) + + def send_nodata(self, server): + """ + Send a nodata error. + """ + server.push_pdu(error_report(errno = error_report.codes["No Data Available"], errpdu = self)) + +class pdu_with_serial(pdu): + """ + Base class for PDUs consisting of just a serial number and nonce. + """ + + header_struct = struct.Struct("!BBHLL") + + def __init__(self, serial = None, nonce = None): + if serial is not None: + assert isinstance(serial, int) + self.serial = serial + if nonce is not None: + assert isinstance(nonce, int) + self.nonce = nonce + + def __str__(self): + return "[%s, serial #%d nonce %d]" % (self.__class__.__name__, self.serial, self.nonce) + + def to_pdu(self): + """ + Generate the wire format PDU. + """ + if self._pdu is None: + self._pdu = self.header_struct.pack(self.version, self.pdu_type, self.nonce, + self.header_struct.size, self.serial) + return self._pdu + + def got_pdu(self, reader): + if not reader.ready(): + return None + b = reader.get(self.header_struct.size) + version, pdu_type, self.nonce, length, self.serial = self.header_struct.unpack(b) + if length != 12: + raise CorruptData("PDU length of %d can't be right" % length, pdu = self) + assert b == self.to_pdu() + return self + +class pdu_nonce(pdu): + """ + Base class for PDUs consisting of just a nonce. + """ + + header_struct = struct.Struct("!BBHL") + + def __init__(self, nonce = None): + if nonce is not None: + assert isinstance(nonce, int) + self.nonce = nonce + + def __str__(self): + return "[%s, nonce %d]" % (self.__class__.__name__, self.nonce) + + def to_pdu(self): + """ + Generate the wire format PDU. + """ + if self._pdu is None: + self._pdu = self.header_struct.pack(self.version, self.pdu_type, self.nonce, self.header_struct.size) + return self._pdu + + def got_pdu(self, reader): + if not reader.ready(): + return None + b = reader.get(self.header_struct.size) + version, pdu_type, self.nonce, length = self.header_struct.unpack(b) + if length != 8: + raise CorruptData("PDU length of %d can't be right" % length, pdu = self) + assert b == self.to_pdu() + return self + +class pdu_empty(pdu): + """ + Base class for empty PDUs. + """ + + header_struct = struct.Struct("!BBHL") + + def __str__(self): + return "[%s]" % self.__class__.__name__ + + def to_pdu(self): + """ + Generate the wire format PDU for this prefix. + """ + if self._pdu is None: + self._pdu = self.header_struct.pack(self.version, self.pdu_type, 0, self.header_struct.size) + return self._pdu + + def got_pdu(self, reader): + if not reader.ready(): + return None + b = reader.get(self.header_struct.size) + version, pdu_type, zero, length = self.header_struct.unpack(b) + if zero != 0: + raise CorruptData("Must-be-zero field isn't zero" % length, pdu = self) + if length != 8: + raise CorruptData("PDU length of %d can't be right" % length, pdu = self) + assert b == self.to_pdu() + return self + +class serial_notify(pdu_with_serial): + """ + Serial Notify PDU. + """ + + pdu_type = 0 + + def consume(self, client): + """ + Respond to a serial_notify message with either a serial_query or + reset_query, depending on what we already know. + """ + blather(self) + if client.current_serial is None or client.current_nonce != self.nonce: + client.push_pdu(reset_query()) + elif self.serial != client.current_serial: + client.push_pdu(serial_query(serial = client.current_serial, nonce = client.current_nonce)) + else: + blather("[Notify did not change serial number, ignoring]") + +class serial_query(pdu_with_serial): + """ + Serial Query PDU. + """ + + pdu_type = 1 + + def serve(self, server): + """ + Received a serial query, send incremental transfer in response. + If client is already up to date, just send an empty incremental + transfer. + """ + blather(self) + if server.get_serial() is None: + self.send_nodata(server) + elif server.current_nonce != self.nonce: + log("[Client requested wrong nonce, resetting client]") + server.push_pdu(cache_reset()) + elif server.current_serial == self.serial: + blather("[Client is already current, sending empty IXFR]") + server.push_pdu(cache_response(nonce = server.current_nonce)) + server.push_pdu(end_of_data(serial = server.current_serial, nonce = server.current_nonce)) + elif disable_incrementals: + server.push_pdu(cache_reset()) + else: + try: + self.send_file(server, "%d.ix.%d" % (server.current_serial, self.serial)) + except IOError: + server.push_pdu(cache_reset()) + +class reset_query(pdu_empty): + """ + Reset Query PDU. + """ + + pdu_type = 2 + + def serve(self, server): + """ + Received a reset query, send full current state in response. + """ + blather(self) + if server.get_serial() is None: + self.send_nodata(server) + else: + try: + fn = "%d.ax" % server.current_serial + self.send_file(server, fn) + except IOError: + server.push_pdu(error_report(errno = error_report.codes["Internal Error"], + errpdu = self, errmsg = "Couldn't open %s" % fn)) + +class cache_response(pdu_nonce): + """ + Cache Response PDU. + """ + + pdu_type = 3 + + def consume(self, client): + """ + Handle cache_response. + """ + blather(self) + if self.nonce != client.current_nonce: + blather("[Nonce changed, resetting]") + client.cache_reset() + +class end_of_data(pdu_with_serial): + """ + End of Data PDU. + """ + + pdu_type = 7 + + def consume(self, client): + """ + Handle end_of_data response. + """ + blather(self) + client.end_of_data(self.serial, self.nonce) + +class cache_reset(pdu_empty): + """ + Cache reset PDU. + """ + + pdu_type = 8 + + def consume(self, client): + """ + Handle cache_reset response, by issuing a reset_query. + """ + blather(self) + client.cache_reset() + client.push_pdu(reset_query()) + +class prefix(pdu): + """ + Object representing one prefix. This corresponds closely to one PDU + in the rpki-router protocol, so closely that we use lexical ordering + of the wire format of the PDU as the ordering for this class. + + This is a virtual class, but the .from_text() constructor + instantiates the correct concrete subclass (ipv4_prefix or + ipv6_prefix) depending on the syntax of its input text. + """ + + header_struct = struct.Struct("!BB2xLBBBx") + asnum_struct = struct.Struct("!L") + + @staticmethod + def from_text(asnum, addr): + """ + Construct a prefix from its text form. + """ + cls = ipv6_prefix if ":" in addr else ipv4_prefix + self = cls() + self.asn = long(asnum) + p, l = addr.split("/") + self.prefix = self.addr_type(string = p) + if "-" in l: + self.prefixlen, self.max_prefixlen = tuple(int(i) for i in l.split("-")) + else: + self.prefixlen = self.max_prefixlen = int(l) + self.announce = 1 + self.check() + return self + + def __str__(self): + plm = "%s/%s-%s" % (self.prefix, self.prefixlen, self.max_prefixlen) + return "%s %8s %-32s %s" % ("+" if self.announce else "-", self.asn, plm, + ":".join(("%02X" % ord(b) for b in self.to_pdu()))) + + def show(self): + blather("# Class: %s" % self.__class__.__name__) + blather("# ASN: %s" % self.asn) + blather("# Prefix: %s" % self.prefix) + blather("# Prefixlen: %s" % self.prefixlen) + blather("# MaxPrefixlen: %s" % self.max_prefixlen) + blather("# Announce: %s" % self.announce) + + def consume(self, client): + """ + Handle one incoming prefix PDU + """ + blather(self) + client.consume_prefix(self) + + def check(self): + """ + Check attributes to make sure they're within range. + """ + if self.announce not in (0, 1): + raise CorruptData("Announce value %d is neither zero nor one" % self.announce, pdu = self) + if self.prefixlen < 0 or self.prefixlen > self.addr_type.size * 8: + raise CorruptData("Implausible prefix length %d" % self.prefixlen, pdu = self) + if self.max_prefixlen < self.prefixlen or self.max_prefixlen > self.addr_type.size * 8: + raise CorruptData("Implausible max prefix length %d" % self.max_prefixlen, pdu = self) + pdulen = self.header_struct.size + self.addr_type.size + self.asnum_struct.size + if len(self.to_pdu()) != pdulen: + raise CorruptData("Expected %d byte PDU, got %d" % (pdulen, len(self.to_pdu())), pdu = self) + + def to_pdu(self, announce = None): + """ + Generate the wire format PDU for this prefix. + """ + if announce is not None: + assert announce in (0, 1) + elif self._pdu is not None: + return self._pdu + pdulen = self.header_struct.size + self.addr_type.size + self.asnum_struct.size + pdu = (self.header_struct.pack(self.version, self.pdu_type, pdulen, + announce if announce is not None else self.announce, + self.prefixlen, self.max_prefixlen) + + self.prefix.value + + self.asnum_struct.pack(self.asn)) + if announce is None: + assert self._pdu is None + self._pdu = pdu + return pdu + + def got_pdu(self, reader): + if not reader.ready(): + return None + b1 = reader.get(self.header_struct.size) + b2 = reader.get(self.addr_type.size) + b3 = reader.get(self.asnum_struct.size) + version, pdu_type, length, self.announce, self.prefixlen, self.max_prefixlen = self.header_struct.unpack(b1) + if length != len(b1) + len(b2) + len(b3): + raise CorruptData("Got PDU length %d, expected %d" % (length, len(b1) + len(b2) + len(b3)), pdu = self) + self.prefix = self.addr_type(value = b2) + self.asn = self.asnum_struct.unpack(b3)[0] + assert b1 + b2 + b3 == self.to_pdu() + return self + + @staticmethod + def from_bgpdump(line, rib_dump): + try: + assert isinstance(rib_dump, bool) + fields = line.split("|") + + # Parse prefix, including figuring out IP protocol version + cls = ipv6_prefix if ":" in fields[5] else ipv4_prefix + self = cls() + self.timestamp = timestamp(fields[1]) + p, l = fields[5].split("/") + self.prefix = self.addr_type(p) + self.prefixlen = self.max_prefixlen = int(l) + + # Withdrawals don't have AS paths, so be careful + assert fields[2] == "B" if rib_dump else fields[2] in ("A", "W") + if fields[2] == "W": + self.asn = 0 + self.announce = 0 + else: + self.announce = 1 + if not fields[6] or "{" in fields[6] or "(" in fields[6]: + raise IgnoreThisRecord + a = fields[6].split()[-1] + if "." in a: + a = [int(s) for s in a.split(".")] + if len(a) != 2 or a[0] < 0 or a[0] > 65535 or a[1] < 0 or a[1] > 65535: + log("Bad dotted ASNum %r, ignoring record" % fields[6]) + raise IgnoreThisRecord + a = (a[0] << 16) | a[1] + else: + a = int(a) + self.asn = a + + self.check() + return self + + except IgnoreThisRecord: + raise + + except Exception, e: + log("Ignoring line %r: %s" % (line, e)) + raise IgnoreThisRecord + +class ipv4_prefix(prefix): + """ + IPv4 flavor of a prefix. + """ + pdu_type = 4 + addr_type = v4addr + +class ipv6_prefix(prefix): + """ + IPv6 flavor of a prefix. + """ + pdu_type = 6 + addr_type = v6addr + +class router_key(pdu): + """ + Router Key PDU. + """ + + pdu_type = 9 + + header_struct = struct.Struct("!BBBxL20sL") + + @classmethod + def from_text(cls, asnum, gski, key): + """ + Construct a router key from its text form. + """ + + self = cls() + self.asn = long(asnum) + self.ski = base64.urlsafe_b64decode(gski + "=") + self.key = base64.b64decode(key) + self.announce = 1 + self.check() + return self + + def __str__(self): + return "%s %8s %-32s %s" % ("+" if self.announce else "-", self.asn, + base64.urlsafe_b64encode(self.ski).rstrip("="), + ":".join(("%02X" % ord(b) for b in self.to_pdu()))) + + def consume(self, client): + """ + Handle one incoming Router Key PDU + """ + + blather(self) + client.consume_routerkey(self) + + def check(self): + """ + Check attributes to make sure they're within range. + """ + + if self.announce not in (0, 1): + raise CorruptData("Announce value %d is neither zero nor one" % self.announce, pdu = self) + if len(self.ski) != 20: + raise CorruptData("Implausible SKI length %d" % len(self.ski), pdu = self) + pdulen = self.header_struct.size + len(self.key) + if len(self.to_pdu()) != pdulen: + raise CorruptData("Expected %d byte PDU, got %d" % (pdulen, len(self.to_pdu())), pdu = self) + + def to_pdu(self, announce = None): + if announce is not None: + assert announce in (0, 1) + elif self._pdu is not None: + return self._pdu + pdulen = self.header_struct.size + len(self.key) + pdu = (self.header_struct.pack(self.version, + self.pdu_type, + announce if announce is not None else self.announce, + pdulen, + self.ski, + self.asn) + + self.key) + if announce is None: + assert self._pdu is None + self._pdu = pdu + return pdu + + def got_pdu(self, reader): + if not reader.ready(): + return None + header = reader.get(self.header_struct.size) + version, pdu_type, self.announce, length, self.ski, self.asn = self.header_struct.unpack(header) + remaining = length - self.header_struct.size + if remaining <= 0: + raise CorruptData("Got PDU length %d, minimum is %d" % (length, self.header_struct.size + 1), pdu = self) + self.key = reader.get(remaining) + assert header + self.key == self.to_pdu() + return self + + +class error_report(pdu): + """ + Error Report PDU. + """ + + pdu_type = 10 + + header_struct = struct.Struct("!BBHL") + string_struct = struct.Struct("!L") + + errors = { + 2 : "No Data Available" } + + fatal = { + 0 : "Corrupt Data", + 1 : "Internal Error", + 3 : "Invalid Request", + 4 : "Unsupported Protocol Version", + 5 : "Unsupported PDU Type", + 6 : "Withdrawal of Unknown Record", + 7 : "Duplicate Announcement Received" } + + assert set(errors) & set(fatal) == set() + + errors.update(fatal) + + codes = dict((v, k) for k, v in errors.items()) + + def __init__(self, errno = None, errpdu = None, errmsg = None): + assert errno is None or errno in self.errors + self.errno = errno + self.errpdu = errpdu + self.errmsg = errmsg if errmsg is not None or errno is None else self.errors[errno] + + def __str__(self): + return "[%s, error #%s: %r]" % (self.__class__.__name__, self.errno, self.errmsg) + + def to_counted_string(self, s): + return self.string_struct.pack(len(s)) + s + + def read_counted_string(self, reader, remaining): + assert remaining >= self.string_struct.size + n = self.string_struct.unpack(reader.get(self.string_struct.size))[0] + assert remaining >= self.string_struct.size + n + return n, reader.get(n), (remaining - self.string_struct.size - n) + + def to_pdu(self): + """ + Generate the wire format PDU for this error report. + """ + if self._pdu is None: + assert isinstance(self.errno, int) + assert not isinstance(self.errpdu, error_report) + p = self.errpdu + if p is None: + p = "" + elif isinstance(p, pdu): + p = p.to_pdu() + assert isinstance(p, str) + pdulen = self.header_struct.size + self.string_struct.size * 2 + len(p) + len(self.errmsg) + self._pdu = self.header_struct.pack(self.version, self.pdu_type, self.errno, pdulen) + self._pdu += self.to_counted_string(p) + self._pdu += self.to_counted_string(self.errmsg.encode("utf8")) + return self._pdu + + def got_pdu(self, reader): + if not reader.ready(): + return None + header = reader.get(self.header_struct.size) + version, pdu_type, self.errno, length = self.header_struct.unpack(header) + remaining = length - self.header_struct.size + self.pdulen, self.errpdu, remaining = self.read_counted_string(reader, remaining) + self.errlen, self.errmsg, remaining = self.read_counted_string(reader, remaining) + if length != self.header_struct.size + self.string_struct.size * 2 + self.pdulen + self.errlen: + raise CorruptData("Got PDU length %d, expected %d" % ( + length, self.header_struct.size + self.string_struct.size * 2 + self.pdulen + self.errlen)) + assert (header + + self.to_counted_string(self.errpdu) + + self.to_counted_string(self.errmsg.encode("utf8")) + == self.to_pdu()) + return self + + def serve(self, server): + """ + Received an error_report from client. Not much we can do beyond + logging it, then killing the connection if error was fatal. + """ + log(self) + if self.errno in self.fatal: + log("[Shutting down due to reported fatal protocol error]") + sys.exit(1) + +pdu.pdu_map = dict((p.pdu_type, p) for p in (ipv4_prefix, ipv6_prefix, serial_notify, serial_query, reset_query, + cache_response, end_of_data, cache_reset, router_key, error_report)) + +class pdu_set(list): + """ + Object representing a set of PDUs, that is, one versioned and + (theoretically) consistant set of prefixes and router keys extracted + from rcynic's output. + """ + + @classmethod + def _load_file(cls, filename): + """ + Low-level method to read pdu_set from a file. + """ + self = cls() + f = open(filename, "rb") + r = read_buffer() + while True: + p = pdu.read_pdu(r) + while p is None: + b = f.read(r.needed()) + if b == "": + assert r.available() == 0 + return self + r.put(b) + p = r.retry() + self.append(p) + + @staticmethod + def seq_ge(a, b): + return ((a - b) % (1 << 32)) < (1 << 31) + + +class axfr_set(pdu_set): + """ + Object representing a complete set of PDUs, that is, one versioned + and (theoretically) consistant set of prefixes and router + certificates extracted from rcynic's output, all with the announce + field set. + """ + + @classmethod + def parse_rcynic(cls, rcynic_dir): + """ + Parse ROAS and router certificates fetched (and validated!) by + rcynic to create a new axfr_set. We use the scan_roas and + scan_routercerts utilities to parse the ASN.1, although we may go + back to parsing the files directly using the rpki.POW library code + some day. + """ + + self = cls() + self.serial = timestamp.now() + + try: + p = subprocess.Popen((scan_roas, rcynic_dir), stdout = subprocess.PIPE) + for line in p.stdout: + line = line.split() + asn = line[1] + self.extend(prefix.from_text(asn, addr) for addr in line[2:]) + except OSError, e: + sys.exit("Could not run %s, check your $PATH variable? (%s)" % (scan_roas, e)) + + try: + p = subprocess.Popen((scan_routercerts, rcynic_dir), stdout = subprocess.PIPE) + for line in p.stdout: + line = line.split() + gski = line[0] + key = line[-1] + self.extend(router_key.from_text(asn, gski, key) for asn in line[1:-1]) + except OSError, e: + sys.exit("Could not run %s, check your $PATH variable? (%s)" % (scan_routercerts, e)) + + self.sort() + for i in xrange(len(self) - 2, -1, -1): + if self[i] == self[i + 1]: + del self[i + 1] + return self + + @classmethod + def load(cls, filename): + """ + Load an axfr_set from a file, parse filename to obtain serial. + """ + fn1, fn2 = os.path.basename(filename).split(".") + assert fn1.isdigit() and fn2 == "ax" + self = cls._load_file(filename) + self.serial = timestamp(fn1) + return self + + def filename(self): + """ + Generate filename for this axfr_set. + """ + return "%d.ax" % self.serial + + @classmethod + def load_current(cls): + """ + Load current axfr_set. Return None if can't. + """ + serial = read_current()[0] + if serial is None: + return None + try: + return cls.load("%d.ax" % serial) + except IOError: + return None + + def save_axfr(self): + """ + Write axfr__set to file with magic filename. + """ + f = open(self.filename(), "wb") + for p in self: + f.write(p.to_pdu()) + f.close() + + def destroy_old_data(self): + """ + Destroy old data files, presumably because our nonce changed and + the old serial numbers are no longer valid. + """ + for i in glob.iglob("*.ix.*"): + os.unlink(i) + for i in glob.iglob("*.ax"): + if i != self.filename(): + os.unlink(i) + + def mark_current(self): + """ + Save current serial number and nonce, creating new nonce if + necessary. Creating a new nonce triggers cleanup of old state, as + the new nonce invalidates all old serial numbers. + """ + old_serial, nonce = read_current() + if old_serial is None or self.seq_ge(old_serial, self.serial): + blather("Creating new nonce and deleting stale data") + nonce = new_nonce() + self.destroy_old_data() + write_current(self.serial, nonce) + + def save_ixfr(self, other): + """ + Comparing this axfr_set with an older one and write the resulting + ixfr_set to file with magic filename. Since we store pdu_sets + in sorted order, computing the difference is a trivial linear + comparison. + """ + f = open("%d.ix.%d" % (self.serial, other.serial), "wb") + old = other + new = self + len_old = len(old) + len_new = len(new) + i_old = i_new = 0 + while i_old < len_old and i_new < len_new: + if old[i_old] < new[i_new]: + f.write(old[i_old].to_pdu(announce = 0)) + i_old += 1 + elif old[i_old] > new[i_new]: + f.write(new[i_new].to_pdu(announce = 1)) + i_new += 1 + else: + i_old += 1 + i_new += 1 + for i in xrange(i_old, len_old): + f.write(old[i].to_pdu(announce = 0)) + for i in xrange(i_new, len_new): + f.write(new[i].to_pdu(announce = 1)) + f.close() + + def show(self): + """ + Print this axfr_set. + """ + blather("# AXFR %d (%s)" % (self.serial, self.serial)) + for p in self: + blather(p) + + @staticmethod + def read_bgpdump(filename): + assert filename.endswith(".bz2") + blather("Reading %s" % filename) + bunzip2 = subprocess.Popen(("bzip2", "-c", "-d", filename), stdout = subprocess.PIPE) + bgpdump = subprocess.Popen(("bgpdump", "-m", "-"), stdin = bunzip2.stdout, stdout = subprocess.PIPE) + return bgpdump.stdout + + @classmethod + def parse_bgpdump_rib_dump(cls, filename): + assert os.path.basename(filename).startswith("ribs.") + self = cls() + self.serial = None + for line in cls.read_bgpdump(filename): + try: + pfx = prefix.from_bgpdump(line, rib_dump = True) + except IgnoreThisRecord: + continue + self.append(pfx) + self.serial = pfx.timestamp + if self.serial is None: + sys.exit("Failed to parse anything useful from %s" % filename) + self.sort() + for i in xrange(len(self) - 2, -1, -1): + if self[i] == self[i + 1]: + del self[i + 1] + return self + + def parse_bgpdump_update(self, filename): + assert os.path.basename(filename).startswith("updates.") + for line in self.read_bgpdump(filename): + try: + pfx = prefix.from_bgpdump(line, rib_dump = False) + except IgnoreThisRecord: + continue + announce = pfx.announce + pfx.announce = 1 + i = bisect.bisect_left(self, pfx) + if announce: + if i >= len(self) or pfx != self[i]: + self.insert(i, pfx) + else: + while i < len(self) and pfx.prefix == self[i].prefix and pfx.prefixlen == self[i].prefixlen: + del self[i] + self.serial = pfx.timestamp + +class ixfr_set(pdu_set): + """ + Object representing an incremental set of PDUs, that is, the + differences between one versioned and (theoretically) consistant set + of prefixes and router certificates extracted from rcynic's output + and another, with the announce fields set or cleared as necessary to + indicate the changes. + """ + + @classmethod + def load(cls, filename): + """ + Load an ixfr_set from a file, parse filename to obtain serials. + """ + fn1, fn2, fn3 = os.path.basename(filename).split(".") + assert fn1.isdigit() and fn2 == "ix" and fn3.isdigit() + self = cls._load_file(filename) + self.from_serial = timestamp(fn3) + self.to_serial = timestamp(fn1) + return self + + def filename(self): + """ + Generate filename for this ixfr_set. + """ + return "%d.ix.%d" % (self.to_serial, self.from_serial) + + def show(self): + """ + Print this ixfr_set. + """ + blather("# IXFR %d (%s) -> %d (%s)" % (self.from_serial, self.from_serial, + self.to_serial, self.to_serial)) + for p in self: + blather(p) + +class file_producer(object): + """ + File-based producer object for asynchat. + """ + + def __init__(self, handle, buffersize): + self.handle = handle + self.buffersize = buffersize + + def more(self): + return self.handle.read(self.buffersize) + +class pdu_channel(asynchat.async_chat): + """ + asynchat subclass that understands our PDUs. This just handles + network I/O. Specific engines (client, server) should be subclasses + of this with methods that do something useful with the resulting + PDUs. + """ + + def __init__(self, conn = None): + asynchat.async_chat.__init__(self, conn) + self.reader = read_buffer() + + def start_new_pdu(self): + """ + Start read of a new PDU. + """ + try: + p = pdu.read_pdu(self.reader) + while p is not None: + self.deliver_pdu(p) + p = pdu.read_pdu(self.reader) + except PDUException, e: + self.push_pdu(e.make_error_report()) + self.close_when_done() + else: + assert not self.reader.ready() + self.set_terminator(self.reader.needed()) + + def collect_incoming_data(self, data): + """ + Collect data into the read buffer. + """ + self.reader.put(data) + + def found_terminator(self): + """ + Got requested data, see if we now have a PDU. If so, pass it + along, then restart cycle for a new PDU. + """ + p = self.reader.retry() + if p is None: + self.set_terminator(self.reader.needed()) + else: + self.deliver_pdu(p) + self.start_new_pdu() + + def push_pdu(self, pdu): + """ + Write PDU to stream. + """ + try: + self.push(pdu.to_pdu()) + except OSError, e: + if e.errno != errno.EAGAIN: + raise + + def push_file(self, f): + """ + Write content of a file to stream. + """ + try: + self.push_with_producer(file_producer(f, self.ac_out_buffer_size)) + except OSError, e: + if e.errno != errno.EAGAIN: + raise + + def log(self, msg): + """ + Intercept asyncore's logging. + """ + log(msg) + + def log_info(self, msg, tag = "info"): + """ + Intercept asynchat's logging. + """ + log("asynchat: %s: %s" % (tag, msg)) + + def handle_error(self): + """ + Handle errors caught by asyncore main loop. + """ + c, e = sys.exc_info()[:2] + if backtrace_on_exceptions or e == 0: + for line in traceback.format_exc().splitlines(): + log(line) + else: + log("[Exception: %s: %s]" % (c.__name__, e)) + log("[Exiting after unhandled exception]") + sys.exit(1) + + def init_file_dispatcher(self, fd): + """ + Kludge to plug asyncore.file_dispatcher into asynchat. Call from + subclass's __init__() method, after calling + pdu_channel.__init__(), and don't read this on a full stomach. + """ + self.connected = True + self._fileno = fd + self.socket = asyncore.file_wrapper(fd) + self.add_channel() + flags = fcntl.fcntl(fd, fcntl.F_GETFL, 0) + flags = flags | os.O_NONBLOCK + fcntl.fcntl(fd, fcntl.F_SETFL, flags) + + def handle_close(self): + """ + Exit when channel closed. + """ + asynchat.async_chat.handle_close(self) + sys.exit(0) + +class server_write_channel(pdu_channel): + """ + Kludge to deal with ssh's habit of sometimes (compile time option) + invoking us with two unidirectional pipes instead of one + bidirectional socketpair. All the server logic is in the + server_channel class, this class just deals with sending the + server's output to a different file descriptor. + """ + + def __init__(self): + """ + Set up stdout. + """ + pdu_channel.__init__(self) + self.init_file_dispatcher(sys.stdout.fileno()) + + def readable(self): + """ + This channel is never readable. + """ + return False + +class server_channel(pdu_channel): + """ + Server protocol engine, handles upcalls from pdu_channel to + implement protocol logic. + """ + + def __init__(self): + """ + Set up stdin and stdout as connection and start listening for + first PDU. + """ + pdu_channel.__init__(self) + self.init_file_dispatcher(sys.stdin.fileno()) + self.writer = server_write_channel() + self.get_serial() + self.start_new_pdu() + + def writable(self): + """ + This channel is never writable. + """ + return False + + def push(self, data): + """ + Redirect to writer channel. + """ + return self.writer.push(data) + + def push_with_producer(self, producer): + """ + Redirect to writer channel. + """ + return self.writer.push_with_producer(producer) + + def push_pdu(self, pdu): + """ + Redirect to writer channel. + """ + return self.writer.push_pdu(pdu) + + def push_file(self, f): + """ + Redirect to writer channel. + """ + return self.writer.push_file(f) + + def deliver_pdu(self, pdu): + """ + Handle received PDU. + """ + pdu.serve(self) + + def get_serial(self): + """ + Read, cache, and return current serial number, or None if we can't + find the serial number file. The latter condition should never + happen, but maybe we got started in server mode while the cronjob + mode instance is still building its database. + """ + self.current_serial, self.current_nonce = read_current() + return self.current_serial + + def check_serial(self): + """ + Check for a new serial number. + """ + old_serial = self.current_serial + return old_serial != self.get_serial() + + def notify(self, data = None): + """ + Cronjob instance kicked us, send a notify message. + """ + if self.check_serial() is not None: + self.push_pdu(serial_notify(serial = self.current_serial, nonce = self.current_nonce)) + else: + log("Cronjob kicked me without a valid current serial number") + +class client_channel(pdu_channel): + """ + Client protocol engine, handles upcalls from pdu_channel. + """ + + current_serial = None + current_nonce = None + sql = None + host = None + port = None + cache_id = None + + def __init__(self, sock, proc, killsig, host, port): + self.killsig = killsig + self.proc = proc + self.host = host + self.port = port + pdu_channel.__init__(self, conn = sock) + self.start_new_pdu() + + @classmethod + def ssh(cls, host, port): + """ + Set up ssh connection and start listening for first PDU. + """ + args = ("ssh", "-p", port, "-s", host, "rpki-rtr") + blather("[Running ssh: %s]" % " ".join(args)) + s = socket.socketpair() + return cls(sock = s[1], + proc = subprocess.Popen(args, executable = "/usr/bin/ssh", + stdin = s[0], stdout = s[0], close_fds = True), + killsig = signal.SIGKILL, + host = host, port = port) + + @classmethod + def tcp(cls, host, port): + """ + Set up TCP connection and start listening for first PDU. + """ + blather("[Starting raw TCP connection to %s:%s]" % (host, port)) + try: + addrinfo = socket.getaddrinfo(host, port, socket.AF_UNSPEC, socket.SOCK_STREAM) + except socket.error, e: + blather("[socket.getaddrinfo() failed: %s]" % e) + else: + for ai in addrinfo: + af, socktype, proto, cn, sa = ai + blather("[Trying addr %s port %s]" % sa[:2]) + try: + s = socket.socket(af, socktype, proto) + except socket.error, e: + blather("[socket.socket() failed: %s]" % e) + continue + try: + s.connect(sa) + except socket.error, e: + blather("[socket.connect() failed: %s]" % e) + s.close() + continue + return cls(sock = s, proc = None, killsig = None, + host = host, port = port) + sys.exit(1) + + @classmethod + def loopback(cls, host, port): + """ + Set up loopback connection and start listening for first PDU. + """ + s = socket.socketpair() + blather("[Using direct subprocess kludge for testing]") + argv = [sys.executable, sys.argv[0], "--server"] + if "--syslog" in sys.argv: + argv.extend(("--syslog", sys.argv[sys.argv.index("--syslog") + 1])) + return cls(sock = s[1], + proc = subprocess.Popen(argv, stdin = s[0], stdout = s[0], close_fds = True), + killsig = signal.SIGINT, + host = host, port = port) + + @classmethod + def tls(cls, host, port): + """ + Set up TLS connection and start listening for first PDU. + + NB: This uses OpenSSL's "s_client" command, which does not + check server certificates properly, so this is not suitable for + production use. Fixing this would be a trivial change, it just + requires using a client program which does check certificates + properly (eg, gnutls-cli, or stunnel's client mode if that works + for such purposes this week). + """ + args = ("openssl", "s_client", "-tls1", "-quiet", "-connect", "%s:%s" % (host, port)) + blather("[Running: %s]" % " ".join(args)) + s = socket.socketpair() + return cls(sock = s[1], + proc = subprocess.Popen(args, stdin = s[0], stdout = s[0], close_fds = True), + killsig = signal.SIGKILL, + host = host, port = port) + + def setup_sql(self, sqlname): + """ + Set up an SQLite database to contain the table we receive. If + necessary, we will create the database. + """ + import sqlite3 + missing = not os.path.exists(sqlname) + self.sql = sqlite3.connect(sqlname, detect_types = sqlite3.PARSE_DECLTYPES) + self.sql.text_factory = str + cur = self.sql.cursor() + cur.execute("PRAGMA foreign_keys = on") + if missing: + cur.execute(''' + CREATE TABLE cache ( + cache_id INTEGER PRIMARY KEY NOT NULL, + host TEXT NOT NULL, + port TEXT NOT NULL, + nonce INTEGER, + serial INTEGER, + updated INTEGER, + UNIQUE (host, port))''') + cur.execute(''' + CREATE TABLE prefix ( + cache_id INTEGER NOT NULL + REFERENCES cache(cache_id) + ON DELETE CASCADE + ON UPDATE CASCADE, + asn INTEGER NOT NULL, + prefix TEXT NOT NULL, + prefixlen INTEGER NOT NULL, + max_prefixlen INTEGER NOT NULL, + UNIQUE (cache_id, asn, prefix, prefixlen, max_prefixlen))''') + + cur.execute(''' + CREATE TABLE routerkey ( + cache_id INTEGER NOT NULL + REFERENCES cache(cache_id) + ON DELETE CASCADE + ON UPDATE CASCADE, + asn INTEGER NOT NULL, + ski TEXT NOT NULL, + key TEXT NOT NULL, + UNIQUE (cache_id, asn, ski), + UNIQUE (cache_id, asn, key))''') + + cur.execute("SELECT cache_id, nonce, serial FROM cache WHERE host = ? AND port = ?", + (self.host, self.port)) + try: + self.cache_id, self.current_nonce, self.current_serial = cur.fetchone() + except TypeError: + cur.execute("INSERT INTO cache (host, port) VALUES (?, ?)", (self.host, self.port)) + self.cache_id = cur.lastrowid + self.sql.commit() + + def cache_reset(self): + """ + Handle cache_reset actions. + """ + self.current_serial = None + if self.sql: + cur = self.sql.cursor() + cur.execute("DELETE FROM prefix WHERE cache_id = ?", (self.cache_id,)) + cur.execute("UPDATE cache SET serial = NULL WHERE cache_id = ?", (self.cache_id,)) + + def end_of_data(self, serial, nonce): + """ + Handle end_of_data actions. + """ + self.current_serial = serial + self.current_nonce = nonce + if self.sql: + self.sql.execute("UPDATE cache SET serial = ?, nonce = ?, updated = datetime('now') WHERE cache_id = ?", + (serial, nonce, self.cache_id)) + self.sql.commit() + + def consume_prefix(self, prefix): + """ + Handle one prefix PDU. + """ + if self.sql: + values = (self.cache_id, prefix.asn, str(prefix.prefix), prefix.prefixlen, prefix.max_prefixlen) + if prefix.announce: + self.sql.execute("INSERT INTO prefix (cache_id, asn, prefix, prefixlen, max_prefixlen) " + "VALUES (?, ?, ?, ?, ?)", + values) + else: + self.sql.execute("DELETE FROM prefix " + "WHERE cache_id = ? AND asn = ? AND prefix = ? AND prefixlen = ? AND max_prefixlen = ?", + values) + + + def consume_routerkey(self, routerkey): + """ + Handle one Router Key PDU. + """ + + if self.sql: + values = (self.cache_id, routerkey.asn, + base64.urlsafe_b64encode(routerkey.ski).rstrip("="), + base64.b64encode(routerkey.key)) + if routerkey.announce: + self.sql.execute("INSERT INTO routerkey (cache_id, asn, ski, key) " + "VALUES (?, ?, ?, ?)", + values) + else: + self.sql.execute("DELETE FROM routerkey " + "WHERE cache_id = ? AND asn = ? AND (ski = ? OR key = ?)", + values) + + + def deliver_pdu(self, pdu): + """ + Handle received PDU. + """ + pdu.consume(self) + + def push_pdu(self, pdu): + """ + Log outbound PDU then write it to stream. + """ + blather(pdu) + pdu_channel.push_pdu(self, pdu) + + def cleanup(self): + """ + Force clean up this client's child process. If everything goes + well, child will have exited already before this method is called, + but we may need to whack it with a stick if something breaks. + """ + if self.proc is not None and self.proc.returncode is None: + try: + os.kill(self.proc.pid, self.killsig) + except OSError: + pass + + def handle_close(self): + """ + Intercept close event so we can log it, then shut down. + """ + blather("Server closed channel") + pdu_channel.handle_close(self) + +class kickme_channel(asyncore.dispatcher): + """ + asyncore dispatcher for the PF_UNIX socket that cronjob mode uses to + kick servers when it's time to send notify PDUs to clients. + """ + + def __init__(self, server): + asyncore.dispatcher.__init__(self) + self.server = server + self.sockname = "%s.%d" % (kickme_base, os.getpid()) + self.create_socket(socket.AF_UNIX, socket.SOCK_DGRAM) + try: + self.bind(self.sockname) + os.chmod(self.sockname, 0660) + except socket.error, e: + log("Couldn't bind() kickme socket: %r" % e) + self.close() + except OSError, e: + log("Couldn't chmod() kickme socket: %r" % e) + + def writable(self): + """ + This socket is read-only, never writable. + """ + return False + + def handle_connect(self): + """ + Ignore connect events (not very useful on datagram socket). + """ + pass + + def handle_read(self): + """ + Handle receipt of a datagram. + """ + data = self.recv(512) + self.server.notify(data) + + def cleanup(self): + """ + Clean up this dispatcher's socket. + """ + self.close() + try: + os.unlink(self.sockname) + except: + pass + + def log(self, msg): + """ + Intercept asyncore's logging. + """ + log(msg) + + def log_info(self, msg, tag = "info"): + """ + Intercept asyncore's logging. + """ + log("asyncore: %s: %s" % (tag, msg)) + + def handle_error(self): + """ + Handle errors caught by asyncore main loop. + """ + c, e = sys.exc_info()[:2] + if backtrace_on_exceptions or e == 0: + for line in traceback.format_exc().splitlines(): + log(line) + else: + log("[Exception: %s: %s]" % (c.__name__, e)) + log("[Exiting after unhandled exception]") + sys.exit(1) + + +def hostport_tag(): + """ + Construct hostname/address + port when we're running under a + protocol we understand well enough to do that. This is all + kludgery. Just grit your teeth, or perhaps just close your eyes. + """ + + proto = None + + if proto is None: + try: + host, port = socket.fromfd(0, socket.AF_INET, socket.SOCK_STREAM).getpeername() + proto = "tcp" + except: + pass + + if proto is None: + try: + host, port = socket.fromfd(0, socket.AF_INET6, socket.SOCK_STREAM).getpeername()[0:2] + proto = "tcp" + except: + pass + + if proto is None: + try: + host, port = os.environ["SSH_CONNECTION"].split()[0:2] + proto = "ssh" + except: + pass + + if proto is None: + try: + host, port = os.environ["REMOTE_HOST"], os.getenv("REMOTE_PORT") + proto = "ssl" + except: + pass + + if proto is None: + return "" + elif not port: + return "/%s/%s" % (proto, host) + elif ":" in host: + return "/%s/%s.%s" % (proto, host, port) + else: + return "/%s/%s:%s" % (proto, host, port) + + +def kick_all(serial): + """ + Kick any existing server processes to wake them up. + """ + + try: + os.stat(kickme_dir) + except OSError: + blather('# Creating directory "%s"' % kickme_dir) + os.makedirs(kickme_dir) + + msg = "Good morning, serial %d is ready" % serial + sock = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM) + for name in glob.iglob("%s.*" % kickme_base): + try: + blather("# Kicking %s" % name) + sock.sendto(msg, name) + except socket.error: + try: + blather("# Failed to kick %s, probably dead socket, attempting cleanup" % name) + os.unlink(name) + except Exception, e: + blather("# Couldn't unlink suspected dead socket %s: %s" % (name, e)) + except Exception, e: + log("# Failed to kick %s and don't understand why: %s" % (name, e)) + sock.close() + +def cronjob_main(argv): + """ + Run this mode right after rcynic to do the real work of groveling + through the ROAs that rcynic collects and translating that data into + the form used in the rpki-router protocol. This mode prepares both + full dumps (AXFR) and incremental dumps against a specific prior + version (IXFR). [Terminology here borrowed from DNS, as is much of + the protocol design.] Finally, this mode kicks any active servers, + so that they can notify their clients that a new version is + available. + + Run this in the directory where you want to write its output files, + which should also be the directory in which you run this program in + --server mode. + + This mode takes one argument on the command line, which specifies + the directory name of rcynic's authenticated output tree (normally + $somewhere/rcynic-data/authenticated/). + """ + + if len(argv) != 1: + sys.exit("Expected one argument, got %r" % (argv,)) + + old_ixfrs = glob.glob("*.ix.*") + + current = read_current()[0] + cutoff = timestamp.now(-(24 * 60 * 60)) + for f in glob.iglob("*.ax"): + t = timestamp(int(f.split(".")[0])) + if t < cutoff and t != current: + blather("# Deleting old file %s, timestamp %s" % (f, t)) + os.unlink(f) + + pdus = axfr_set.parse_rcynic(argv[0]) + if pdus == axfr_set.load_current(): + blather("# No change, new version not needed") + sys.exit() + pdus.save_axfr() + for axfr in glob.iglob("*.ax"): + if axfr != pdus.filename(): + pdus.save_ixfr(axfr_set.load(axfr)) + pdus.mark_current() + + blather("# New serial is %d (%s)" % (pdus.serial, pdus.serial)) + + kick_all(pdus.serial) + + old_ixfrs.sort() + for ixfr in old_ixfrs: + try: + blather("# Deleting old file %s" % ixfr) + os.unlink(ixfr) + except OSError: + pass + +def show_main(argv): + """ + Display dumps created by --cronjob mode in textual form. + Intended only for debugging. + + This mode takes no command line arguments. Run it in the directory + where you ran --cronjob mode. + """ + + if argv: + sys.exit("Unexpected arguments: %r" % (argv,)) + + g = glob.glob("*.ax") + g.sort() + for f in g: + axfr_set.load(f).show() + + g = glob.glob("*.ix.*") + g.sort() + for f in g: + ixfr_set.load(f).show() + +def server_main(argv): + """ + Implement the server side of the rpkk-router protocol. Other than + one PF_UNIX socket inode, this doesn't write anything to disk, so it + can be run with minimal privileges. Most of the hard work has + already been done in --cronjob mode, so all that this mode has to do + is serve up the results. + + In production use this server should run under sshd. The subsystem + mechanism in sshd does not allow us to pass arguments on the command + line, so setting this up might require a wrapper script, but in + production use you will probably want to lock down the public key + used to authenticate the ssh session so that it can only run this + one command, in which case you can just specify the full command + including any arguments in the authorized_keys file. + + Unless you do something special, sshd will have this program running + in whatever it thinks is the home directory associated with the + username given in the ssh prototocol setup, so it may be easiest to + set this up so that the home directory sshd puts this program into + is the one where --cronjob left its files for this mode to pick up. + + This mode must be run in the directory where you ran --cronjob mode. + + This mode takes one optional argument: if provided, the argument is + the name of a directory to which the program should chdir() on + startup; this may simplify setup when running under inetd. + + The server is event driven, so everything interesting happens in the + channel classes. + """ + + blather("[Starting]") + if len(argv) > 1: + sys.exit("Unexpected arguments: %r" % (argv,)) + if argv: + try: + os.chdir(argv[0]) + except OSError, e: + sys.exit(e) + kickme = None + try: + server = server_channel() + kickme = kickme_channel(server = server) + asyncore.loop(timeout = None) + except KeyboardInterrupt: + sys.exit(0) + finally: + if kickme is not None: + kickme.cleanup() + + +def listener_tcp_main(argv): + """ + Simple plain-TCP listener. Listens on a specified TCP port, upon + receiving a connection, forks the process and starts child executing + at server_main(). + + First argument (required) is numeric port number. + + Second argument (optional) is directory, like --server. + + NB: plain-TCP is completely insecure. We only implement this + because it's all that the routers currently support. In theory, we + will all be running TCP-AO in the future, at which point this will + go away. + """ + + # Perhaps we should daemonize? Deal with that later. + + if len(argv) > 2: + sys.exit("Unexpected arguments: %r" % (argv,)) + try: + port = int(argv[0]) if argv[0].isdigit() else socket.getservbyname(argv[0], "tcp") + except: + sys.exit("Couldn't parse port number on which to listen") + if len(argv) > 1: + try: + os.chdir(argv[1]) + except OSError, e: + sys.exit(e) + listener = None + try: + listener = socket.socket(socket.AF_INET6, socket.SOCK_STREAM) + listener.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, 0) + except: + if listener is not None: + listener.close() + listener = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + listener.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + try: + listener.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) + except AttributeError: + pass + listener.bind(("", port)) + listener.listen(5) + blather("[Listening on port %s]" % port) + while True: + s, ai = listener.accept() + blather("[Received connection from %r]" % (ai,)) + pid = os.fork() + if pid == 0: + os.dup2(s.fileno(), 0) + os.dup2(s.fileno(), 1) + s.close() + #os.closerange(3, os.sysconf("SC_OPEN_MAX")) + global log_tag + log_tag = "rtr-origin/server" + hostport_tag() + syslog.closelog() + syslog.openlog(log_tag, syslog.LOG_PID, syslog_facility) + server_main(()) + sys.exit() + else: + blather("[Spawned server %d]" % pid) + try: + while True: + pid, status = os.waitpid(0, os.WNOHANG) + if pid: + blather("[Server %s exited]" % pid) + else: + break + except: + pass + + +def client_main(argv): + """ + Toy client, intended only for debugging. + + This program takes one or more arguments. The first argument + determines what kind of connection it should open to the server, the + remaining arguments are connection details specific to this + particular type of connection. + + If the first argument is "loopback", the client will run a copy of + the server directly in a subprocess, and communicate with it via a + PF_UNIX socket pair. This sub-mode takes no further arguments. + + If the first argument is "ssh", the client will attempt to run ssh + in as subprocess to connect to the server using the ssh subsystem + mechanism as specified for this protocol. The remaining arguments + should be a hostname (or IP address in a form acceptable to ssh) and + a TCP port number. + + If the first argument is "tcp", the client will attempt to open a + direct (and completely insecure!) TCP connection to the server. + The remaining arguments should be a hostname (or IP address) and + a TCP port number. + + If the first argument is "tls", the client will attempt to open a + TLS connection to the server. The remaining arguments should be a + hostname (or IP address) and a TCP port number. + + An optional final name is the name of a file containing a SQLite + database in which to store the received table. If specified, this + database will be created if missing. + """ + + blather("[Startup]") + client = None + if not argv: + argv = ["loopback"] + proto = argv[0] + if proto == "loopback" and len(argv) in (1, 2): + constructor = client_channel.loopback + host, port = "", "" + sqlname = None if len(argv) == 1 else argv[1] + elif proto in ("ssh", "tcp", "tls") and len(argv) in (3, 4): + constructor = getattr(client_channel, proto) + host, port = argv[1:3] + sqlname = None if len(argv) == 3 else argv[3] + else: + sys.exit("Unexpected arguments: %s" % " ".join(argv)) + + try: + client = constructor(host, port) + if sqlname: + client.setup_sql(sqlname) + while True: + if client.current_serial is None or client.current_nonce is None: + client.push_pdu(reset_query()) + else: + client.push_pdu(serial_query(serial = client.current_serial, nonce = client.current_nonce)) + wakeup = time.time() + 600 + while True: + remaining = wakeup - time.time() + if remaining < 0: + break + asyncore.loop(timeout = remaining, count = 1) + + except KeyboardInterrupt: + sys.exit(0) + finally: + if client is not None: + client.cleanup() + +def bgpdump_convert_main(argv): + """ + Simulate route origin data from a set of BGP dump files. + + * DANGER WILL ROBINSON! * + * DEBUGGING AND TEST USE ONLY! * + + argv is an ordered list of filenames. Each file must be a BGP RIB + dumps, a BGP UPDATE dumps, or an AXFR dump in the format written by + this program's --cronjob command. The first file must be a RIB dump + or AXFR dump, it cannot be an UPDATE dump. Output will be a set of + AXFR and IXFR files with timestamps derived from the BGP dumps, + which can be used as input to this program's --server command for + test purposes. SUCH DATA PROVIDE NO SECURITY AT ALL. + + You have been warned. + """ + + first = True + db = None + axfrs = [] + + for filename in argv: + + if filename.endswith(".ax"): + blather("Reading %s" % filename) + db = axfr_set.load(filename) + + elif os.path.basename(filename).startswith("ribs."): + db = axfr_set.parse_bgpdump_rib_dump(filename) + db.save_axfr() + + elif not first: + assert db is not None + db.parse_bgpdump_update(filename) + db.save_axfr() + + else: + sys.exit("First argument must be a RIB dump or .ax file, don't know what to do with %s" % filename) + + blather("DB serial now %d (%s)" % (db.serial, db.serial)) + if first and read_current() == (None, None): + db.mark_current() + first = False + + for axfr in axfrs: + blather("Loading %s" % axfr) + ax = axfr_set.load(axfr) + blather("Computing changes from %d (%s) to %d (%s)" % (ax.serial, ax.serial, db.serial, db.serial)) + db.save_ixfr(ax) + del ax + + axfrs.append(db.filename()) + + +def bgpdump_select_main(argv): + """ + Simulate route origin data from a set of BGP dump files. + + * DANGER WILL ROBINSON! * + * DEBUGGING AND TEST USE ONLY! * + + Set current serial number to correspond to an .ax file created by + converting BGP dump files. SUCH DATA PROVIDE NO SECURITY AT ALL. + + You have been warned. + """ + + serial = None + try: + head, sep, tail = os.path.basename(argv[0]).partition(".") + if len(argv) == 1 and head.isdigit() and sep == "." and tail == "ax": + serial = timestamp(head) + except: + pass + if serial is None: + sys.exit("Argument must be name of a .ax file") + + nonce = read_current()[1] + if nonce is None: + nonce = new_nonce() + + write_current(serial, nonce) + kick_all(serial) + + +class bgpsec_replay_clock(object): + """ + Internal clock for replaying BGP dump files. + + * DANGER WILL ROBINSON! * + * DEBUGGING AND TEST USE ONLY! * + + This class replaces the normal on-disk serial number mechanism with + an in-memory version based on pre-computed data. + bgpdump_server_main() uses this hack to replay historical data for + testing purposes. DO NOT USE THIS IN PRODUCTION. + + You have been warned. + """ + + def __init__(self): + self.timestamps = [timestamp(int(f.split(".")[0])) for f in glob.iglob("*.ax")] + self.timestamps.sort() + self.offset = self.timestamps[0] - int(time.time()) + self.nonce = new_nonce() + + def __nonzero__(self): + return len(self.timestamps) > 0 + + def now(self): + return timestamp.now(self.offset) + + def read_current(self): + now = self.now() + while len(self.timestamps) > 1 and now >= self.timestamps[1]: + del self.timestamps[0] + return self.timestamps[0], self.nonce + + def siesta(self): + now = self.now() + if len(self.timestamps) <= 1: + return None + elif now < self.timestamps[1]: + return self.timestamps[1] - now + else: + return 1 + + +def bgpdump_server_main(argv): + """ + Simulate route origin data from a set of BGP dump files. + + * DANGER WILL ROBINSON! * + * DEBUGGING AND TEST USE ONLY! * + + This is a clone of server_main() which replaces the external serial + number updates triggered via the kickme channel by cronjob_main with + an internal clocking mechanism to replay historical test data. + + DO NOT USE THIS IN PRODUCTION. + + You have been warned. + """ + + blather("[Starting]") + if len(argv) > 1: + sys.exit("Unexpected arguments: %r" % (argv,)) + if argv: + try: + os.chdir(argv[0]) + except OSError, e: + sys.exit(e) + # + # Yes, this really does replace a global function with a bound + # method to our clock object. Fun stuff, huh? + # + global read_current + clock = bgpsec_replay_clock() + read_current = clock.read_current + # + try: + server = server_channel() + old_serial = server.get_serial() + blather("[Starting at serial %d (%s)]" % (old_serial, old_serial)) + while clock: + new_serial = server.get_serial() + if old_serial != new_serial: + blather("[Serial bumped from %d (%s) to %d (%s)]" % (old_serial, old_serial, new_serial, new_serial)) + server.notify() + old_serial = new_serial + asyncore.loop(timeout = clock.siesta(), count = 1) + except KeyboardInterrupt: + sys.exit(0) + +# Figure out where the scan_roas utility program is today +try: + # Set from autoconf + scan_roas = ac_scan_roas +except NameError: + # Source directory + scan_roas = os.path.normpath(os.path.join(sys.path[0], "..", "utils", + "scan_roas", "scan_roas")) +# If that didn't work, use $PATH and hope for the best +if not os.path.exists(scan_roas): + scan_roas = "scan_roas" + +# Same thing for scan_routercerts +try: + # Set from autoconf + scan_routercerts = ac_scan_routercerts +except NameError: + # Source directory + scan_routercerts = os.path.normpath(os.path.join(sys.path[0], "..", "utils", + "scan_routercerts", "scan_routercerts")) +if not os.path.exists(scan_routercerts): + scan_routercerts = "scan_routercerts" + +force_zero_nonce = False + +kickme_dir = "sockets" +kickme_base = os.path.join(kickme_dir, "kickme") + +main_dispatch = { + "cronjob" : cronjob_main, + "client" : client_main, + "server" : server_main, + "show" : show_main, + "listener_tcp" : listener_tcp_main, + "bgpdump_convert" : bgpdump_convert_main, + "bgpdump_select" : bgpdump_select_main, + "bgpdump_server" : bgpdump_server_main } + +def usage(msg = None): + f = sys.stderr if msg else sys.stdout + f.write("Usage: %s [options] --mode [arguments]\n" % sys.argv[0]) + f.write("\n") + f.write("where options are zero or more of:\n") + f.write("\n") + f.write("--syslog facility.warning_priority[.info_priority]\n") + f.write("\n") + f.write("--zero-nonce\n") + f.write("\n") + f.write("and --mode is one of:\n") + f.write("\n") + for name, func in main_dispatch.iteritems(): + f.write("--%s:\n" % name) + f.write(func.__doc__) + f.write("\n") + sys.exit(msg) + +if __name__ == "__main__": + + os.environ["TZ"] = "UTC" + time.tzset() + + mode = None + + syslog_facility, syslog_warning, syslog_info = syslog.LOG_DAEMON, syslog.LOG_WARNING, syslog.LOG_INFO + + opts, argv = getopt.getopt(sys.argv[1:], "hs:z?", ["help", "syslog=", "zero-nonce"] + main_dispatch.keys()) + for o, a in opts: + if o in ("-h", "--help", "-?"): + usage() + elif o in ("-z", "--zero-nonce"): + force_zero_nonce = True + elif o in ("-s", "--syslog"): + try: + a = [getattr(syslog, "LOG_" + i.upper()) for i in a.split(".")] + if len(a) == 2: + a.append(a[1]) + syslog_facility, syslog_warning, syslog_info = a + if syslog_facility < 8 or syslog_warning >= 8 or syslog_info >= 8: + raise ValueError + except: + usage("Bad value specified for --syslog option") + elif len(o) > 2 and o[2:] in main_dispatch: + if mode is not None: + sys.exit("Conflicting modes specified") + mode = o[2:] + + if mode is None: + usage("No mode specified") + + log_tag = "rtr-origin/" + mode + + if mode in ("server", "bgpdump_server"): + log_tag += hostport_tag() + + if mode in ("cronjob", "server" , "bgpdump_server"): + syslog.openlog(log_tag, syslog.LOG_PID, syslog_facility) + def log(msg): + return syslog.syslog(syslog_warning, str(msg)) + def blather(msg): + return syslog.syslog(syslog_info, str(msg)) + + elif mode == "show": + def log(msg): + try: + os.write(sys.stdout.fileno(), "%s\n" % msg) + except OSError, e: + if e.errno != errno.EPIPE: + raise + blather = log + + else: + def log(msg): + sys.stderr.write("%s %s[%d]: %s\n" % (time.strftime("%F %T"), log_tag, os.getpid(), msg)) + blather = log + + main_dispatch[mode](argv) diff --git a/rp/rtr-origin/rules.darwin.mk b/rp/rtr-origin/rules.darwin.mk new file mode 100644 index 00000000..1230db92 --- /dev/null +++ b/rp/rtr-origin/rules.darwin.mk @@ -0,0 +1,9 @@ +# $Id$ + +install-always: install-binary + +install-postconf: install-listener + +install-listener: + @echo "No rule for $@ on this platform (yet), you'll have to do that yourself if it matters." + diff --git a/rp/rtr-origin/rules.freebsd.mk b/rp/rtr-origin/rules.freebsd.mk new file mode 100644 index 00000000..df99da47 --- /dev/null +++ b/rp/rtr-origin/rules.freebsd.mk @@ -0,0 +1,37 @@ +# $Id$ + +install-always: install-binary + +install-postconf: install-listener + +install-listener: .FORCE + @if /usr/bin/egrep -q '^rpki-rtr' /etc/services ; \ + then \ + echo "You already have a /etc/services entry for rpki-rtr, so I will use it."; \ + elif echo >>/etc/services "rpki-rtr ${RPKI_RTR_PORT}/tcp #RFC 6810" ; \ + then \ + echo "Added rpki-rtr to /etc/services."; \ + else \ + echo "Adding rpki-rtr to /etc/services failed, please fix this, then try again."; \ + exit 1; \ + fi + @if /usr/bin/egrep -q "rpki-rtr[ ]+stream[ ]+tcp[ ]" /etc/inetd.conf; \ + then \ + echo "You already have an inetd.conf entry for rpki-rtr on TCPv4, so I will use it."; \ + elif echo >>/etc/inetd.conf "rpki-rtr stream tcp nowait rpkirtr /usr/local/bin/rtr-origin rtr-origin --server /var/rcynic/rpki-rtr"; \ + then \ + echo "Added rpki-rtr for TCPv4 to /etc/inetd.conf."; \ + else \ + echo "Adding rpki-rtr for TCPv4 to /etc/inetd.conf failed, please fix this, then try again."; \ + exit 1; \ + fi + @if /usr/bin/egrep -q "rpki-rtr[ ]+stream[ ]+tcp6[ ]" /etc/inetd.conf; \ + then \ + echo "You already have an inetd.conf entry for rpki-rtr on TCPv6, so I will use it."; \ + elif echo >>/etc/inetd.conf "rpki-rtr stream tcp6 nowait rpkirtr /usr/local/bin/rtr-origin rtr-origin --server /var/rcynic/rpki-rtr"; \ + then \ + echo "Added rpki-rtr for TCPv6 to /etc/inetd.conf."; \ + else \ + echo "Adding rpki-rtr for TCPv6 to /etc/inetd.conf failed, please fix this, then try again."; \ + exit 1; \ + fi diff --git a/rp/rtr-origin/rules.linux.mk b/rp/rtr-origin/rules.linux.mk new file mode 100644 index 00000000..23f90f69 --- /dev/null +++ b/rp/rtr-origin/rules.linux.mk @@ -0,0 +1,29 @@ +# $Id$ + +install-always: install-binary install-listener + +install-postconf: + @true + +# Only need to make listener if not already present + +install-listener: ${DESTDIR}/etc/xinetd.d/rpki-rtr + +${DESTDIR}/etc/xinetd.d/rpki-rtr: + @${AWK} 'BEGIN { \ + print "service rpki-rtr"; \ + print "{"; \ + print " type = UNLISTED"; \ + print " flags = IPv4"; \ + print " socket_type = stream"; \ + print " protocol = tcp"; \ + print " port = ${RPKI_RTR_PORT}"; \ + print " wait = no"; \ + print " user = rpkirtr"; \ + print " server = ${bindir}/${BIN}"; \ + print " server_args = --server /var/rcynic/rpki-rtr"; \ + print "}"; \ + }' >xinetd.rpki-rtr + ${INSTALL} -d ${DESTDIR}/etc/xinetd.d + ${INSTALL} -m 644 xinetd.rpki-rtr $@ + rm xinetd.rpki-rtr diff --git a/rp/rtr-origin/rules.unknown.mk b/rp/rtr-origin/rules.unknown.mk new file mode 100644 index 00000000..fb16e93a --- /dev/null +++ b/rp/rtr-origin/rules.unknown.mk @@ -0,0 +1,8 @@ +# $Id$ + +install-always: install-binary + +install-postconf: install-listener + +install-listener: + @echo "Don't know how to make $@ on this platform"; exit 1 diff --git a/rp/rtr-origin/server.sh b/rp/rtr-origin/server.sh new file mode 100755 index 00000000..7ccf2f38 --- /dev/null +++ b/rp/rtr-origin/server.sh @@ -0,0 +1,17 @@ +#!/bin/sh - +# +# Wrapper for rtr-origin.py in server mode, for testing. +# +# In production we would probably want to handle all of this either +# directly in the Python code or in the command= setting for a +# particular ssh key, but for initial testing it's simpler to run a +# shall script to change to the right directory and supply any +# necessary command line arguments. +# +# Be warned that almost any error here will cause the subsystem to +# fail mysteriously, leaving behind naught but a SIGCHILD log message +# from sshd as this script dies. + +cd /u/sra/rpki/subvert-rpki.hactrn.net/rtr-origin + +exec /usr/local/bin/python rtr-origin.py --server diff --git a/rp/rtr-origin/sshd.conf b/rp/rtr-origin/sshd.conf new file mode 100644 index 00000000..0124fc4c --- /dev/null +++ b/rp/rtr-origin/sshd.conf @@ -0,0 +1,23 @@ +# $Id$ +# +# sshd config file for testing. Invoke thusly: +# +# /usr/sbin/sshd -f /u/sra/rpki/subvert-rpki.hactrn.net/rtr-origin/sshd.conf -d + +Port 2222 +Protocol 2 +ListenAddress 127.0.0.1 +ListenAddress ::1 +HostKey /u/sra/rpki/subvert-rpki.hactrn.net/rtr-origin/ssh_host_rsa_key +PermitRootLogin no +PubkeyAuthentication yes +AuthorizedKeysFile /u/sra/rpki/subvert-rpki.hactrn.net/rtr-origin/authorized_keys +PasswordAuthentication no +PermitEmptyPasswords no +ChallengeResponseAuthentication no +UsePAM no +AllowTcpForwarding no +X11Forwarding no +UseDNS no +PidFile /u/sra/rpki/subvert-rpki.hactrn.net/rtr-origin/sshd.pid +Subsystem rpki-rtr /u/sra/rpki/subvert-rpki.hactrn.net/rtr-origin/server.sh diff --git a/rp/utils/Makefile.in b/rp/utils/Makefile.in new file mode 100644 index 00000000..c89fdff5 --- /dev/null +++ b/rp/utils/Makefile.in @@ -0,0 +1,9 @@ +# $Id$ + +SUBDIRS = uri print_rpki_manifest print_roa hashdir find_roa scan_roas scan_routercerts + +all clean test distclean install deinstall uninstall:: + @for i in ${SUBDIRS}; do echo "Making $@ in $$i"; (cd $$i && ${MAKE} $@); done + +distclean:: + rm -f Makefile diff --git a/rp/utils/README b/rp/utils/README new file mode 100644 index 00000000..edbd793b --- /dev/null +++ b/rp/utils/README @@ -0,0 +1,12 @@ +$Id$ + +A collection of small RPKI utility programs which can be combined in +various useful ways by relying parties or by rpkid test scripts. + +See: + +- The primary documentation at http://trac.rpki.net/ + +- The PDF manual in ../doc/manual.pdf, or + +- The flat text page ../doc/doc.RPKI.Utils diff --git a/rp/utils/dot.awk b/rp/utils/dot.awk new file mode 100644 index 00000000..ca1b490b --- /dev/null +++ b/rp/utils/dot.awk @@ -0,0 +1,34 @@ +#!/usr/bin/awk -f +# $Id$ +# +# This doesn't really work right yet, and even if it did, the graph +# it would generate would be hopelessly large. + +BEGIN { + cmd = "find /var/rcynic/data/unauthenticated -type f -name '*.cer' -print0 | xargs -0 ./uri -d"; + while ((cmd | getline) == 1) { + if ($1 == "File") { + sub("/var/rcynic/data/unauthenticated/", "rsync://"); + u = $2; + uri[u] = ++n; + continue; + } + if ($1 == "SIA:") { + sia[u] = $2; + continue; + } + if ($1 == "AIA:") { + aia[u] = $2; + continue; + } + } + print "digraph rpki {"; + for (u in uri) { + printf "n%06d\t[ label=\"%s\" ];\n", uri[u], u; + if (sia[u]) + printf "n%06d -> n%06d\t [ color=blue ];\n", uri[u], uri[sia[u]]; + if (aia[u]) + printf "n%06d -> n%06d\t [ color=green ];\n", uri[u], uri[aia[u]]; + } + print "}"; +} diff --git a/rp/utils/find_roa.c b/rp/utils/find_roa.c new file mode 100644 index 00000000..a14242c8 --- /dev/null +++ b/rp/utils/find_roa.c @@ -0,0 +1,356 @@ +/* + * Copyright (C) 2014 Dragon Research Labs ("DRL") + * Portions copyright (C) 2006--2008 American Registry for Internet Numbers ("ARIN") + * + * Permission to use, copy, modify, and distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notices and this permission notice appear in all copies. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND DRL AND ARIN DISCLAIM ALL + * WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED + * WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL OR + * ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL + * DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA + * OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER + * TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR + * PERFORMANCE OF THIS SOFTWARE. + */ + +/* $Id$ */ + +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include + +#ifndef FILENAME_MAX +#define FILENAME_MAX 1024 +#endif + +#ifndef ADDR_RAW_BUF_LEN +#define ADDR_RAW_BUF_LEN 16 +#endif + + + +/* + * Error handling. + */ + +#define _lose(_msg_, _file_) \ + do { \ + if (_file_) \ + fprintf(stderr, "%s:%d: %s: %s\n", __FILE__, __LINE__, _msg_, _file_); \ + else \ + fprintf(stderr, "%s:%d: %s\n", __FILE__, __LINE__, _msg_); \ + fprintf(stderr, "%s: %s\n", _msg_, _file_); \ + } while (0) + +#define lose(_msg_, _file_) \ + do { \ + _lose(_msg_, _file_); \ + goto done; \ + } while (0) + +#define lose_errno(_msg_, _file_) \ + do { \ + _lose(_msg_, _file_); \ + perror(NULL); \ + goto done; \ + } while (0) + +#define lose_openssl(_msg_, _file_) \ + do { \ + _lose(_msg_, _file_); \ + ERR_print_errors_fp(stderr); \ + goto done; \ + } while (0) + + +/* + * Extract a ROA prefix from the ASN.1 bitstring encoding. + */ +static int extract_roa_prefix(unsigned char *addr, + unsigned *prefixlen, + const ASN1_BIT_STRING *bs, + const unsigned afi) +{ + unsigned length; + + switch (afi) { + case IANA_AFI_IPV4: length = 4; break; + case IANA_AFI_IPV6: length = 16; break; + default: return 0; + } + + if (bs->length < 0 || bs->length > length) + return 0; + + if (bs->length > 0) { + memcpy(addr, bs->data, bs->length); + if ((bs->flags & 7) != 0) { + unsigned char mask = 0xFF >> (8 - (bs->flags & 7)); + addr[bs->length - 1] &= ~mask; + } + } + + memset(addr + bs->length, 0, length - bs->length); + + *prefixlen = (bs->length * 8) - (bs->flags & 7); + + return 1; +} + +/* + * Check str for a trailing suffix. + */ +static int has_suffix(const char *str, const char *suffix) +{ + size_t len_str, len_suffix; + assert(str != NULL && suffix != NULL); + len_str = strlen(str); + len_suffix = strlen(suffix); + return len_str >= len_suffix && !strcmp(str + len_str - len_suffix, suffix); +} + +/* + * Handle one object. + */ +static void file_handler(const char *filename, const unsigned prefix_afi, const unsigned char *prefix, const unsigned long prefixlen) +{ + unsigned char roa_prefix[ADDR_RAW_BUF_LEN]; + unsigned roa_prefixlen, roa_maxprefixlen, plen; + CMS_ContentInfo *cms = NULL; + BIO *b = NULL; + ROA *r = NULL; + int i, j, k, n; + unsigned long asid; + + if (!(b = BIO_new_file(filename, "rb"))) + lose_openssl("Couldn't open CMS file", filename); + + if ((cms = d2i_CMS_bio(b, NULL)) == NULL) + lose_openssl("Couldn't read CMS file", filename); + + BIO_free(b); + + if ((b = BIO_new(BIO_s_mem())) == NULL) + lose_openssl("Couldn't open ROA", filename); + + if (CMS_verify(cms, NULL, NULL, NULL, b, CMS_NOCRL | CMS_NO_SIGNER_CERT_VERIFY | CMS_NO_ATTR_VERIFY | CMS_NO_CONTENT_VERIFY) <= 0) + lose_openssl("Couldn't parse ROA CMS", filename); + + if ((r = ASN1_item_d2i_bio(ASN1_ITEM_rptr(ROA), b, NULL)) == NULL) + lose_openssl("Couldn't parse ROA", filename); + + asid = (unsigned long) ASN1_INTEGER_get(r->asID); + + for (i = 0; i < sk_ROAIPAddressFamily_num(r->ipAddrBlocks); i++) { + ROAIPAddressFamily *f = sk_ROAIPAddressFamily_value(r->ipAddrBlocks, i); + + /* + * AFI must match, SAFI must be null + */ + if (f->addressFamily->length != 2 || + prefix_afi != ((f->addressFamily->data[0] << 8) | (f->addressFamily->data[1]))) + continue; + + for (j = 0; j < sk_ROAIPAddress_num(f->addresses); j++) { + ROAIPAddress *a = sk_ROAIPAddress_value(f->addresses, j); + + if (!extract_roa_prefix(roa_prefix, &roa_prefixlen, a->IPAddress, prefix_afi)) + lose("Malformed ROA", filename); + + /* + * If the prefix we're looking for is bigger than the ROA + * prefix, the ROA can't possibly cover. + */ + if (prefixlen < roa_prefixlen) + continue; + + if (a->maxLength) + roa_maxprefixlen = ASN1_INTEGER_get(a->maxLength); + else + roa_maxprefixlen = roa_prefixlen; + + /* + * If the prefix we're looking for is smaller than the smallest + * allowed slice of the ROA prefix, the ROA can't possibly + * cover. + */ + if (prefixlen > roa_maxprefixlen) + continue; + + /* + * If we get this far, we have to compare prefixes. + */ + assert(roa_prefixlen <= ADDR_RAW_BUF_LEN * 8); + plen = prefixlen < roa_prefixlen ? prefixlen : roa_prefixlen; + k = 0; + while (plen >= 8 && prefix[k] == roa_prefix[k]) { + plen -= 8; + k++; + } + if (plen > 8 || ((prefix[k] ^ roa_prefix[k]) & (0xFF << (8 - plen))) != 0) + continue; + + /* + * If we get here, we have a match. + */ + printf("ASN %lu prefix ", asid); + switch (prefix_afi) { + case IANA_AFI_IPV4: + printf("%u.%u.%u.%u", prefix[0], prefix[1], prefix[2], prefix[3]); + break; + case IANA_AFI_IPV6: + for (n = 16; n > 1 && prefix[n-1] == 0x00 && prefix[n-2] == 0x00; n -= 2) + ; + for (k = 0; k < n; k += 2) + printf("%x%s", (prefix[k] << 8) | prefix[k+1], (k < 14 ? ":" : "")); + if (k < 16) + printf(":"); + break; + } + printf("/%lu ROA %s\n", prefixlen, filename); + goto done; + } + } + + done: + BIO_free(b); + CMS_ContentInfo_free(cms); + ROA_free(r); +} + +/* + * Walk a directory tree + */ +static int handle_directory(const char *name, const unsigned prefix_afi, const unsigned char *prefix, const unsigned long prefixlen) +{ + char path[FILENAME_MAX]; + struct dirent *d; + size_t len; + DIR *dir; + int ret = 0, need_slash; + + assert(name); + len = strlen(name); + assert(len > 0 && len < sizeof(path)); + need_slash = name[len - 1] != '/'; + + if ((dir = opendir(name)) == NULL) + lose_errno("Couldn't open directory", name); + + while ((d = readdir(dir)) != NULL) { + if (!strcmp(d->d_name, ".") || !strcmp(d->d_name, "..")) + continue; + if (len + strlen(d->d_name) + need_slash >= sizeof(path)) + lose("Constructed path name too long", d->d_name); + strcpy(path, name); + if (need_slash) + strcat(path, "/"); + strcat(path, d->d_name); + switch (d->d_type) { + case DT_DIR: + if (!handle_directory(path, prefix_afi, prefix, prefixlen)) + lose("Directory walk failed", path); + continue; + default: + if (has_suffix(path, ".roa")) + file_handler(path, prefix_afi, prefix, prefixlen); + continue; + } + } + + ret = 1; + + done: + if (dir) + closedir(dir); + return ret; +} + +static void usage (const char *jane, const int code) +{ + fprintf(code ? stderr : stdout, "usage: %s authtree prefix [prefix...]\n", jane); + exit(code); +} + +int main (int argc, char *argv[]) +{ + unsigned char prefix[ADDR_RAW_BUF_LEN]; + unsigned long prefixlen; + unsigned afi; + char *s = NULL, *p = NULL; + int i, len, ret = 1; + + if (argc == 2 && (!strcmp(argv[1], "-h") || !strcmp(argv[1], "--help"))) + usage(argv[0], 0); + + if (argc < 3) + usage(argv[0], 1); + + OpenSSL_add_all_algorithms(); + ERR_load_crypto_strings(); + + for (i = 2; i < argc; i++) { + + if ((s = strdup(argv[i])) == NULL) + lose("Couldn't strdup()", argv[i]); + + if ((p = strchr(s, '/')) != NULL) + *p++ = '\0'; + + len = a2i_ipadd(prefix, s); + + switch (len) { + case 4: afi = IANA_AFI_IPV4; break; + case 16: afi = IANA_AFI_IPV6; break; + default: lose("Unknown AFI", argv[i]); + } + + if (p) { + if (*p == '\0' || + (prefixlen = strtoul(p, &p, 10)) == ULONG_MAX || + *p != '\0' || + prefixlen > ADDR_RAW_BUF_LEN * 8) + lose("Bad prefix length", argv[i]); + } else { + prefixlen = len * 8; + } + + assert(prefixlen <= ADDR_RAW_BUF_LEN * 8); + + free(s); + p = s = NULL; + + if (!handle_directory(argv[1], afi, prefix, prefixlen)) + goto done; + + } + + ret = 0; + + done: + if (s) + free(s); + return ret; +} diff --git a/rp/utils/find_roa/Makefile.in b/rp/utils/find_roa/Makefile.in new file mode 100644 index 00000000..36c68e01 --- /dev/null +++ b/rp/utils/find_roa/Makefile.in @@ -0,0 +1,56 @@ +# $Id$ + +NAME = find_roa + +BIN = ${NAME} +SRC = ${NAME}.c +OBJ = ${NAME}.o + +CFLAGS = @CFLAGS@ +LDFLAGS = @LDFLAGS@ +LIBS = @LIBS@ + +INSTALL = @INSTALL@ -m 555 + +prefix = @prefix@ +exec_prefix = @exec_prefix@ +datarootdir = @datarootdir@ +datadir = @datadir@ +localstatedir = @localstatedir@ +sharedstatedir = @sharedstatedir@ +sysconfdir = @sysconfdir@ +bindir = @bindir@ +sbindir = @sbindir@ +libexecdir = @libexecdir@ +libdir = @libdir@ + +abs_top_srcdir = @abs_top_srcdir@ +abs_top_builddir = @abs_top_builddir@ + +all: ${BIN} + +clean: + rm -rf ${BIN} ${OBJ} ${BIN}.dSYM + +${BIN}: ${SRC} + ${CC} ${CFLAGS} -o $@ ${SRC} ${LDFLAGS} ${LIBS} + + +ROA_DIR = ${abs_top_builddir}/rcynic/rcynic-data/authenticated + +TEST_ARGS = ${ROA_DIR} 10.3.0.44 10.2.0.6 10.0.0.0/24 + +test: ${BIN} +# if test -d ${ROA_DIR}; then ./${BIN} ${TEST_ARGS} ; else :; fi + if test -d ${ROA_DIR}; then sh ./test_roa.sh ${TEST_ARGS} ; else :; fi + +install: all + if test -d ${DESTDIR}${bindir} ; then :; else ${INSTALL} -d ${DESTDIR}${bindir}; fi + ${INSTALL} ${BIN} ${DESTDIR}${bindir} + +deinstall uninstall: + rm -f ${DESTDIR}${bindir}/${BIN} + +distclean: clean + rm -rf hashed-pem-dir + rm -f Makefile diff --git a/rp/utils/hashdir.c b/rp/utils/hashdir.c new file mode 100644 index 00000000..1b5b0f46 --- /dev/null +++ b/rp/utils/hashdir.c @@ -0,0 +1,217 @@ +/* + * Copyright (C) 2014 Dragon Research Labs ("DRL") + * Portions copyright (C) 2006--2008 American Registry for Internet Numbers ("ARIN") + * + * Permission to use, copy, modify, and distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notices and this permission notice appear in all copies. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND DRL AND ARIN DISCLAIM ALL + * WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED + * WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL OR + * ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL + * DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA + * OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER + * TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR + * PERFORMANCE OF THIS SOFTWARE. + */ + +/* $Id$ */ + +/* + * Read a directory tree of DER certificates and CRLs and copy + * them into a PEM format directory with names in the hash format + * that OpenSSL's lookup routines expect. + */ + +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include + +#ifndef FILENAME_MAX +#define FILENAME_MAX 1024 +#endif + +static int verbose = 1; + +/* + * Error handling. + */ + +#define _lose(_msg_, _file_) \ + do { \ + fprintf(stderr, "%s: %s\n", _msg_, _file_); \ + } while (0) + +#define lose(_msg_, _file_) \ + do { \ + _lose(_msg_, _file_); \ + goto done; \ + } while (0) + +#define lose_errno(_msg_, _file_) \ + do { \ + _lose(_msg_, _file_); \ + perror(NULL); \ + goto done; \ + } while (0) + +#define lose_openssl(_msg_, _file_) \ + do { \ + _lose(_msg_, _file_); \ + ERR_print_errors_fp(stderr); \ + goto done; \ + } while (0) + +/* + * Check str for a trailing suffix. + */ +static int has_suffix(const char *str, const char *suffix) +{ + size_t len_str, len_suffix; + assert(str != NULL && suffix != NULL); + len_str = strlen(str); + len_suffix = strlen(suffix); + return len_str >= len_suffix && !strcmp(str + len_str - len_suffix, suffix); +} + +/* + * Handle one object. + */ +static void file_handler(const char *filename, const char *targetdir) +{ + char path[FILENAME_MAX]; + unsigned long hash; + const char *fmt; + X509_CRL *crl = NULL; + X509 *cer = NULL; + BIO *b = NULL; + int i, is_crl; + + if (has_suffix(filename, ".cer")) + is_crl = 0; + else if (has_suffix(filename, ".crl")) + is_crl = 1; + else + return; /* Ignore if neither certificate nor CRL */ + + if (verbose) + printf("Reading %s\n", filename); + + if (!(b = BIO_new_file(filename, "rb"))) + lose_openssl("Couldn't open input file", filename); + + if (is_crl + ? !(crl = d2i_X509_CRL_bio(b, NULL)) + : !(cer = d2i_X509_bio(b, NULL))) + lose_openssl("Couldn't read DER object", filename); + + BIO_free(b); + b = NULL; + + if (is_crl) { + hash = X509_NAME_hash(X509_CRL_get_issuer(crl)); + fmt = "%s/%08lx.r%d"; + } else { + hash = X509_subject_name_hash(cer); + fmt = "%s/%08lx.%d"; + } + + for (i = 0; i < INT_MAX; i++) + if (snprintf(path, sizeof(path), fmt, targetdir, hash, i) == sizeof(path)) + lose("Path too long", filename); + else if (access(path, F_OK)) + break; + if (i == INT_MAX) + lose("No pathname available", filename); + + if (verbose) + printf("Writing %s\n", path); + + if (!(b = BIO_new_file(path, "w"))) + lose_openssl("Couldn't open output file", path); + + if (is_crl + ? !PEM_write_bio_X509_CRL(b, crl) + : !PEM_write_bio_X509(b, cer)) + lose_openssl("Couldn't write PEM object", path); + + done: + X509_free(cer); + X509_CRL_free(crl); + BIO_free(b); +} + +/* + * Walk a directory tree + */ +static int handle_directory(const char *name, const char *targetdir) +{ + char path[FILENAME_MAX]; + struct dirent *d; + size_t len; + DIR *dir; + int ret = 0, need_slash; + + assert(name); + len = strlen(name); + assert(len > 0 && len < sizeof(path)); + need_slash = name[len - 1] != '/'; + + if ((dir = opendir(name)) == NULL) + lose_errno("Couldn't open directory", name); + + while ((d = readdir(dir)) != NULL) { + if (!strcmp(d->d_name, ".") || !strcmp(d->d_name, "..")) + continue; + if (len + strlen(d->d_name) + need_slash >= sizeof(path)) + lose("Constructed path name too long", d->d_name); + strcpy(path, name); + if (need_slash) + strcat(path, "/"); + strcat(path, d->d_name); + switch (d->d_type) { + case DT_DIR: + if (!handle_directory(path, targetdir)) + lose("Directory walk failed", path); + continue; + default: + file_handler(path, targetdir); + continue; + } + } + + ret = 1; + + done: + if (dir) + closedir(dir); + return ret; +} + +static void usage (const char *jane, const int code) +{ + fprintf(code ? stderr : stdout, "usage: %s input-directory output-directory\n", jane); + exit(code); +} + +int main(int argc, char *argv[]) +{ + if (argc == 2 && (!strcmp(argv[1], "-h") || !strcmp(argv[1], "--help"))) + usage(argv[0], 0); + + if (argc != 3) + usage(argv[0], 1); + + return !handle_directory(argv[1], argv[2]); +} diff --git a/rp/utils/hashdir/Makefile.in b/rp/utils/hashdir/Makefile.in new file mode 100644 index 00000000..c0cf448a --- /dev/null +++ b/rp/utils/hashdir/Makefile.in @@ -0,0 +1,55 @@ +# $Id$ + +NAME = hashdir + +BIN = ${NAME} +SRC = ${NAME}.c +OBJ = ${NAME}.o + +CFLAGS = @CFLAGS@ +LDFLAGS = @LDFLAGS@ +LIBS = @LIBS@ + +INSTALL = @INSTALL@ -m 555 + +prefix = @prefix@ +exec_prefix = @exec_prefix@ +datarootdir = @datarootdir@ +datadir = @datadir@ +localstatedir = @localstatedir@ +sharedstatedir = @sharedstatedir@ +sysconfdir = @sysconfdir@ +bindir = @bindir@ +sbindir = @sbindir@ +libexecdir = @libexecdir@ +libdir = @libdir@ + +abs_top_srcdir = @abs_top_srcdir@ +abs_top_builddir = @abs_top_builddir@ + +all: ${BIN} + +clean:: + rm -rf ${BIN} ${OBJ} ${BIN}.dSYM + +${BIN}: ${SRC} + ${CC} ${CFLAGS} -o $@ ${SRC} ${LDFLAGS} ${LIBS} + +INPUT = ${abs_top_builddir}/rcynic/rcynic-data/authenticated +OUTPUT = hashed-pem-dir + +test: ${BIN} + if test -d ${INPUT}; then rm -rf ${OUTPUT} && mkdir ${OUTPUT} && ./hashdir ${INPUT} ${OUTPUT}; else :; fi + +clean:: + rm -rf ${OUTPUT} + +install: all + if test -d ${DESTDIR}${bindir} ; then :; else ${INSTALL} -d ${DESTDIR}${bindir}; fi + ${INSTALL} ${BIN} ${DESTDIR}${bindir} + +deinstall uninstall: + rm -f ${DESTDIR}${bindir}/${BIN} + +distclean: clean + rm -f Makefile diff --git a/rp/utils/print_roa.c b/rp/utils/print_roa.c new file mode 100644 index 00000000..c88fc092 --- /dev/null +++ b/rp/utils/print_roa.c @@ -0,0 +1,384 @@ +/* + * Copyright (C) 2014 Dragon Research Labs ("DRL") + * Portions copyright (C) 2008 American Registry for Internet Numbers ("ARIN") + * + * Permission to use, copy, modify, and distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notices and this permission notice appear in all copies. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND DRL AND ARIN DISCLAIM ALL + * WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED + * WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL OR + * ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL + * DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA + * OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER + * TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR + * PERFORMANCE OF THIS SOFTWARE. + */ + +/* $Id$ */ + +/* + * Decoder test for ROAs. + * + * NB: This does -not- check the CMS signatures, just the encoding. + */ + +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include + +/* + * How much buffer space do we need for a raw address? + */ +#define ADDR_RAW_BUF_LEN 16 + + + +/* + * Extract signing time from CMS message. + */ + +static char * +extract_signingTime(CMS_ContentInfo *cms, char *buffer, size_t buflen) +{ + STACK_OF(CMS_SignerInfo) *sis = NULL; + CMS_SignerInfo *si = NULL; + X509_ATTRIBUTE *xa = NULL; + ASN1_TYPE *so = NULL; + int i = -1; + + if (cms == NULL || + buffer == NULL || + buflen < sizeof("20010401123456Z") || + (sis = CMS_get0_SignerInfos(cms)) == NULL || + sk_CMS_SignerInfo_num(sis) != 1 || + (si = sk_CMS_SignerInfo_value(sis, 0)) < 0 || + (i = CMS_signed_get_attr_by_NID(si, NID_pkcs9_signingTime, -1)) < 0 || + (xa = CMS_signed_get_attr(si, i)) == NULL || + xa->single || + sk_ASN1_TYPE_num(xa->value.set) != 1 || + (so = sk_ASN1_TYPE_value(xa->value.set, 0)) == NULL) + return NULL; + + assert(buflen > 2); + buffer[buflen - 1] = '\0'; + + switch (so->type) { + case V_ASN1_UTCTIME: + strcpy(buffer, (so->value.utctime->data[0] >= '5') ? "19" : "20"); + return strncpy(buffer + 2, (const char *) so->value.utctime->data, buflen - 3); + case V_ASN1_GENERALIZEDTIME: + return strncpy(buffer, (const char *) so->value.generalizedtime->data, buflen - 1); + default: + return NULL; + } +} + + + +/* + * Expand the bitstring form of an address into a raw byte array. + * At the moment this is coded for simplicity, not speed. + */ +static void addr_expand(unsigned char *addr, + const ASN1_BIT_STRING *bs, + const int length) +{ + assert(bs->length >= 0 && bs->length <= length); + if (bs->length > 0) { + memcpy(addr, bs->data, bs->length); + if ((bs->flags & 7) != 0) { + unsigned char mask = 0xFF >> (8 - (bs->flags & 7)); + addr[bs->length - 1] &= ~mask; + } + } + memset(addr + bs->length, 0, length - bs->length); +} + +/* + * Extract the prefix length from a bitstring. + */ +#define addr_prefixlen(bs) ((int) ((bs)->length * 8 - ((bs)->flags & 7))) + +/* + * Read ROA (CMS object) in DER format. + * + * NB: When invoked this way, CMS_verify() does -not- verify, it just decodes the ASN.1. + * + * Well, OK, this function has evolved to doing a lot more than just + * reading the object. Refactor or at least rename, someday. + */ +static ROA *read_roa(const char *filename, + const int print_cms, + const int print_roa, + const int print_signerinfo, + const int print_brief, + const int print_signingtime) +{ + unsigned char addr[ADDR_RAW_BUF_LEN]; + CMS_ContentInfo *cms = NULL; + const ASN1_OBJECT *oid = NULL; + char *asID = NULL; + BIGNUM *bn = NULL; + ROA *r = NULL; + char buf[512]; + BIO *b = NULL; + int i, j, k, n; + + if ((b = BIO_new_file(filename, "r")) == NULL || + (cms = d2i_CMS_bio(b, NULL)) == NULL) + goto done; + BIO_free(b); + b = NULL; + + if (print_signerinfo) { + STACK_OF(CMS_SignerInfo) *signerInfos = CMS_get0_SignerInfos(cms); + STACK_OF(X509) *certs = CMS_get1_certs(cms); + STACK_OF(X509_CRL) *crls = CMS_get1_crls(cms); + printf("Certificates: %d\n", certs ? sk_X509_num(certs) : 0); + printf("CRLs: %d\n", crls ? sk_X509_CRL_num(crls) : 0); + for (i = 0; i < sk_CMS_SignerInfo_num(signerInfos); i++) { + CMS_SignerInfo *si = sk_CMS_SignerInfo_value(signerInfos, i); + ASN1_OCTET_STRING *hash = NULL; + printf("SignerId[%d]: ", i); + if (CMS_SignerInfo_get0_signer_id(si, &hash, NULL, NULL) && hash != NULL) + for (j = 0; j < hash->length; j++) + printf("%02x%s", hash->data[j], j == hash->length - 1 ? "" : ":"); + else + printf("[Could not read SID]"); + if (certs) + for (j = 0; j < sk_X509_num(certs); j++) + if (!CMS_SignerInfo_cert_cmp(si, sk_X509_value(certs, j))) + printf(" [Matches certificate %d]", j); + if ((j = CMS_signed_get_attr_by_NID(si, NID_pkcs9_signingTime, -1)) >= 0) { + X509_ATTRIBUTE *xa = CMS_signed_get_attr(si, j); + if (xa && !xa->single && sk_ASN1_TYPE_num(xa->value.set) == 1) { + ASN1_TYPE *so = sk_ASN1_TYPE_value(xa->value.set, 0); + switch (so->type) { + case V_ASN1_UTCTIME: + printf(" [signingTime(U) %s%s]", + so->value.utctime->data[0] < '5' ? "20" : "19", + so->value.utctime->data); + break; + case V_ASN1_GENERALIZEDTIME: + printf(" [signingTime(G) %s]", + so->value.generalizedtime->data); + break; + } + } + } + printf("\n"); + } + sk_X509_pop_free(certs, X509_free); + sk_X509_CRL_pop_free(crls, X509_CRL_free); + } + + if ((b = BIO_new(BIO_s_mem())) == NULL || + CMS_verify(cms, NULL, NULL, NULL, b, CMS_NOCRL | CMS_NO_SIGNER_CERT_VERIFY | CMS_NO_ATTR_VERIFY | CMS_NO_CONTENT_VERIFY) <= 0 || + (r = ASN1_item_d2i_bio(ASN1_ITEM_rptr(ROA), b, NULL)) == NULL) + goto done; + BIO_free(b); + b = NULL; + + if (print_roa) { + + bn = ASN1_INTEGER_to_BN(r->asID, NULL); + asID = BN_bn2dec(bn); + + if (print_brief) { + + if (print_signingtime) { + char buffer[sizeof("20010401123456Z")], *b; + if (!extract_signingTime(cms, buffer, sizeof(buffer))) + goto done; + printf("%s ", buffer); + } + + fputs(asID, stdout); + + } else { + + if ((oid = CMS_get0_eContentType(cms)) == NULL) + goto done; + OBJ_obj2txt(buf, sizeof(buf), oid, 0); + printf("eContentType: %s\n", buf); + + if (r->version) + printf("version: %ld\n", ASN1_INTEGER_get(r->version)); + else + printf("version: 0 [Defaulted]\n"); + printf("asID: %s\n", asID); + } + + for (i = 0; i < sk_ROAIPAddressFamily_num(r->ipAddrBlocks); i++) { + + ROAIPAddressFamily *f = sk_ROAIPAddressFamily_value(r->ipAddrBlocks, i); + + unsigned afi = (f->addressFamily->data[0] << 8) | (f->addressFamily->data[1]); + + if (!print_brief) { + printf(" addressFamily: %x", afi); + if (f->addressFamily->length == 3) + printf("[%x]", f->addressFamily->data[2]); + printf("\n"); + } + + for (j = 0; j < sk_ROAIPAddress_num(f->addresses); j++) { + ROAIPAddress *a = sk_ROAIPAddress_value(f->addresses, j); + + if (print_brief) + printf(" "); + else + printf(" IPaddress: "); + + switch (afi) { + + case IANA_AFI_IPV4: + addr_expand(addr, a->IPAddress, 4); + printf("%d.%d.%d.%d", addr[0], addr[1], addr[2], addr[3]); + break; + + case IANA_AFI_IPV6: + addr_expand(addr, a->IPAddress, 16); + for (n = 16; n > 1 && addr[n-1] == 0x00 && addr[n-2] == 0x00; n -= 2) + ; + for (k = 0; k < n; k += 2) + printf("%x%s", (addr[k] << 8) | addr[k+1], (k < 14 ? ":" : "")); + if (k < 16) + printf(":"); + if (k == 0) + printf(":"); + break; + + default: + if (!print_brief) { + for (k = 0; k < a->IPAddress->length; k++) + printf("%s%02x", (k > 0 ? ":" : ""), a->IPAddress->data[k]); + printf("[%d]", (int) (a->IPAddress->flags & 7)); + } + break; + + } + + printf("/%u", addr_prefixlen(a->IPAddress)); + + if (a->maxLength) + printf("-%ld", ASN1_INTEGER_get(a->maxLength)); + + if (!print_brief) + printf("\n"); + } + } + if (print_brief) + printf("\n"); + } + + if (print_cms) { + if (print_roa) + printf("\n"); + fflush(stdout); + if ((b = BIO_new(BIO_s_fd())) == NULL) + goto done; + BIO_set_fd(b, 1, BIO_NOCLOSE); + CMS_ContentInfo_print_ctx(b, cms, 0, NULL); + BIO_free(b); + b = NULL; + } + + done: + if (ERR_peek_error()) + ERR_print_errors_fp(stderr); + BIO_free(b); + BN_free(bn); + if (asID) + OPENSSL_free(asID); + CMS_ContentInfo_free(cms); + return r; +} + + + +const static struct option longopts[] = { + { "brief", no_argument, NULL, 'b' }, + { "print-cms", no_argument, NULL, 'c' }, + { "help", no_argument, NULL, 'h' }, + { "signingtime", no_argument, NULL, 's' }, + { NULL } +}; + +static int usage (const char *jane, const int code) +{ + FILE *out = code ? stderr : stdout; + int i; + + fprintf(out, "usage: %s [options] ROA [ROA...]\n", jane); + fprintf(out, "options:\n"); + for (i = 0; longopts[i].name != NULL; i++) + fprintf(out, " -%c --%s\n", longopts[i].val, longopts[i].name); + + return code; +} + +/* + * Main program. + */ +int main (int argc, char *argv[]) +{ + int result = 0, print_brief = 0, print_signingtime = 0, print_cms = 0, c; + const char *jane = argv[0]; + ROA *r; + + OpenSSL_add_all_algorithms(); + ERR_load_crypto_strings(); + + while ((c = getopt_long(argc, argv, "bchs", longopts, NULL)) != -1) { + switch (c) { + case 'b': + print_brief = 1; + break; + case 'c': + print_cms = 1; + break; + case 's': + print_signingtime = 1; + break; + case 'h': + return usage(jane, 0); + default: + return usage(jane, 1); + } + } + + argc -= optind; + argv += optind; + + if (argc == 0) + return usage(jane, 1); + + while (argc-- > 0) { + r = read_roa(*argv++, print_cms, 1, !print_brief, print_brief, print_signingtime); + result |= r == NULL; + ROA_free(r); + } + return result; +} diff --git a/rp/utils/print_roa/Makefile.in b/rp/utils/print_roa/Makefile.in new file mode 100644 index 00000000..5999b351 --- /dev/null +++ b/rp/utils/print_roa/Makefile.in @@ -0,0 +1,52 @@ +# $Id$ + +NAME = print_roa + +BIN = ${NAME} +SRC = ${NAME}.c +OBJ = ${NAME}.o + +CFLAGS = @CFLAGS@ +LDFLAGS = @LDFLAGS@ +LIBS = @LIBS@ + +INSTALL = @INSTALL@ -m 555 + +prefix = @prefix@ +exec_prefix = @exec_prefix@ +datarootdir = @datarootdir@ +datadir = @datadir@ +localstatedir = @localstatedir@ +sharedstatedir = @sharedstatedir@ +sysconfdir = @sysconfdir@ +bindir = @bindir@ +sbindir = @sbindir@ +libexecdir = @libexecdir@ +libdir = @libdir@ + +abs_top_srcdir = @abs_top_srcdir@ +abs_top_builddir = @abs_top_builddir@ + +all: ${BIN} + +clean: + rm -rf ${BIN} ${OBJ} ${BIN}.dSYM + +${BIN}: ${SRC} + ${CC} ${CFLAGS} -o $@ ${SRC} ${LDFLAGS} ${LIBS} + +ROA_DIR = ${abs_top_builddir}/rpkid/tests/smoketest.dir/publication + +test: all + -date -u +'now: %Y%m%d%H%M%SZ' + if test -d ${ROA_DIR}; then find ${ROA_DIR} -type f -name '*.roa' -print -exec ./${BIN} {} \; ; else :; fi + +install: all + if test -d ${DESTDIR}${bindir} ; then :; else ${INSTALL} -d ${DESTDIR}${bindir}; fi + ${INSTALL} ${BIN} ${DESTDIR}${bindir} + +deinstall uninstall: + rm -f ${DESTDIR}${bindir}/${BIN} + +distclean: clean + rm -f Makefile diff --git a/rp/utils/print_rpki_manifest.c b/rp/utils/print_rpki_manifest.c new file mode 100644 index 00000000..f55f9916 --- /dev/null +++ b/rp/utils/print_rpki_manifest.c @@ -0,0 +1,235 @@ +/* + * Copyright (C) 2014 Dragon Research Labs ("DRL") + * Portions copyright (C) 2008 American Registry for Internet Numbers ("ARIN") + * + * Permission to use, copy, modify, and distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notices and this permission notice appear in all copies. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND DRL AND ARIN DISCLAIM ALL + * WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED + * WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL OR + * ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL + * DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA + * OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER + * TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR + * PERFORMANCE OF THIS SOFTWARE. + */ + +/* $Id$ */ + +/* + * Decoder test for RPKI manifests. + * + * NB: This does -not- check the CMS signatures, just the encoding. + */ + +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include + +/* + * Read manifest (CMS object) in DER format. + * + * NB: When invoked this way, CMS_verify() does -not- verify, it just decodes the ASN.1. + * + * OK, this does more than just reading the CMS. Refactor or rename, someday. + */ + +static const Manifest *read_manifest(const char *filename, + const int print_cms, + const int print_manifest, + const int print_signerinfo) +{ + CMS_ContentInfo *cms = NULL; + const ASN1_OBJECT *oid = NULL; + const Manifest *m = NULL; + char *mftnum = NULL; + BIGNUM *bn = NULL; + char buf[512]; + BIO *b = NULL; + int i, j; + + if ((b = BIO_new_file(filename, "r")) == NULL || + (cms = d2i_CMS_bio(b, NULL)) == NULL) + goto done; + BIO_free(b); + b = NULL; + + if (print_signerinfo) { + STACK_OF(CMS_SignerInfo) *signerInfos = CMS_get0_SignerInfos(cms); + STACK_OF(X509) *certs = CMS_get1_certs(cms); + STACK_OF(X509_CRL) *crls = CMS_get1_crls(cms); + printf("Certificates: %d\n", certs ? sk_X509_num(certs) : 0); + printf("CRLs: %d\n", crls ? sk_X509_CRL_num(crls) : 0); + for (i = 0; i < sk_CMS_SignerInfo_num(signerInfos); i++) { + CMS_SignerInfo *si = sk_CMS_SignerInfo_value(signerInfos, i); + ASN1_OCTET_STRING *hash = NULL; + printf("SignerId[%d]: ", i); + if (CMS_SignerInfo_get0_signer_id(si, &hash, NULL, NULL) && hash != NULL) + for (j = 0; j < hash->length; j++) + printf("%02x%s", hash->data[j], j == hash->length - 1 ? "" : ":"); + else + printf("[Could not read SID]"); + if (certs) + for (j = 0; j < sk_X509_num(certs); j++) + if (!CMS_SignerInfo_cert_cmp(si, sk_X509_value(certs, j))) + printf(" [Matches certificate %d]", j); + if ((j = CMS_signed_get_attr_by_NID(si, NID_pkcs9_signingTime, -1)) >= 0) { + X509_ATTRIBUTE *xa = CMS_signed_get_attr(si, j); + if (xa && !xa->single && sk_ASN1_TYPE_num(xa->value.set) == 1) { + ASN1_TYPE *so = sk_ASN1_TYPE_value(xa->value.set, 0); + switch (so->type) { + case V_ASN1_UTCTIME: + printf(" [signingTime(U) %s%s]", + so->value.utctime->data[0] < '5' ? "20" : "19", + so->value.utctime->data); + break; + case V_ASN1_GENERALIZEDTIME: + printf(" [signingTime(G) %s]", + so->value.generalizedtime->data); + break; + } + } + } + printf("\n"); + } + sk_X509_pop_free(certs, X509_free); + sk_X509_CRL_pop_free(crls, X509_CRL_free); + } + + if ((b = BIO_new(BIO_s_mem())) == NULL || + CMS_verify(cms, NULL, NULL, NULL, b, CMS_NOCRL | CMS_NO_SIGNER_CERT_VERIFY | CMS_NO_ATTR_VERIFY | CMS_NO_CONTENT_VERIFY) <= 0 || + (m = ASN1_item_d2i_bio(ASN1_ITEM_rptr(Manifest), b, NULL)) == NULL) + goto done; + BIO_free(b); + b = NULL; + + if (print_manifest) { + + if ((oid = CMS_get0_eContentType(cms)) == NULL) + goto done; + OBJ_obj2txt(buf, sizeof(buf), oid, 0); + printf("eContentType: %s\n", buf); + + if (m->version) + printf("version: %ld\n", ASN1_INTEGER_get(m->version)); + else + printf("version: 0 [Defaulted]\n"); + + bn = ASN1_INTEGER_to_BN(m->manifestNumber, NULL); + mftnum = BN_bn2dec(bn); + printf("manifestNumber: %s\n", mftnum); + + printf("thisUpdate: %s\n", m->thisUpdate->data); + printf("nextUpdate: %s\n", m->nextUpdate->data); + OBJ_obj2txt(buf, sizeof(buf), m->fileHashAlg, 0); + printf("fileHashAlg: %s\n", buf); + + for (i = 0; i < sk_FileAndHash_num(m->fileList); i++) { + FileAndHash *fah = sk_FileAndHash_value(m->fileList, i); + printf("fileList[%3d]: ", i); + for (j = 0; j < fah->hash->length; j++) + printf("%02x%s", fah->hash->data[j], j == fah->hash->length - 1 ? " " : ":"); + printf(" %s\n", fah->file->data); + } + + if (X509_cmp_current_time(m->nextUpdate) < 0) + printf("MANIFEST IS STALE\n"); + } + + if (print_cms) { + if (print_manifest) + printf("\n"); + fflush(stdout); + if ((b = BIO_new(BIO_s_fd())) == NULL) + goto done; + BIO_set_fd(b, 1, BIO_NOCLOSE); + CMS_ContentInfo_print_ctx(b, cms, 0, NULL); + BIO_free(b); + b = NULL; + } + + done: + if (ERR_peek_error()) + ERR_print_errors_fp(stderr); + BIO_free(b); + BN_free(bn); + if (mftnum) + OPENSSL_free(mftnum); + CMS_ContentInfo_free(cms); + return m; +} + + + +const static struct option longopts[] = { + { "print-cms", no_argument, NULL, 'c' }, + { "help", no_argument, NULL, 'h' }, + { NULL } +}; + +static int usage (const char *jane, const int code) +{ + FILE *out = code ? stderr : stdout; + int i; + + fprintf(out, "usage: %s [options] manifest [manifest...]\n", jane); + fprintf(out, "options:\n"); + for (i = 0; longopts[i].name != NULL; i++) + fprintf(out, " -%c --%s\n", longopts[i].val, longopts[i].name); + + return code; +} + +/* + * Main program. + */ +int main (int argc, char *argv[]) +{ + int result = 0, print_cms = 0, c; + const char *jane = argv[0]; + + OpenSSL_add_all_algorithms(); + ERR_load_crypto_strings(); + + while ((c = getopt_long(argc, argv, "ch", longopts, NULL)) != -1) { + switch (c) { + case 'c': + print_cms = 1; + break; + case 'h': + return usage(jane, 0); + default: + return usage(jane, 1); + } + } + + argc -= optind; + argv += optind; + + if (argc == 0) + return usage(jane, 1); + + while (argc-- > 0) + result |= read_manifest(*argv++, print_cms, 1, 1) == NULL; + return result; +} diff --git a/rp/utils/print_rpki_manifest/Makefile.in b/rp/utils/print_rpki_manifest/Makefile.in new file mode 100644 index 00000000..22f1b16b --- /dev/null +++ b/rp/utils/print_rpki_manifest/Makefile.in @@ -0,0 +1,52 @@ +# $Id$ + +NAME = print_rpki_manifest + +BIN = ${NAME} +SRC = ${NAME}.c +OBJ = ${NAME}.o + +CFLAGS = @CFLAGS@ +LDFLAGS = @LDFLAGS@ +LIBS = @LIBS@ + +INSTALL = @INSTALL@ -m 555 + +prefix = @prefix@ +exec_prefix = @exec_prefix@ +datarootdir = @datarootdir@ +datadir = @datadir@ +localstatedir = @localstatedir@ +sharedstatedir = @sharedstatedir@ +sysconfdir = @sysconfdir@ +bindir = @bindir@ +sbindir = @sbindir@ +libexecdir = @libexecdir@ +libdir = @libdir@ + +abs_top_srcdir = @abs_top_srcdir@ +abs_top_builddir = @abs_top_builddir@ + +all: ${BIN} + +clean: + rm -rf ${BIN} ${OBJ} ${BIN}.dSYM + +${BIN}: ${SRC} + ${CC} ${CFLAGS} -o $@ ${SRC} ${LDFLAGS} ${LIBS} + +MANIFEST_DIR = ${abs_top_builddir}/rpkid/tests/smoketest.dir/publication + +test: all + -date -u +'now: %Y%m%d%H%M%SZ' + if test -d ${MANIFEST_DIR}; then find ${MANIFEST_DIR} -type f -name '*.mnf' -print -exec ./${BIN} {} \; ; else :; fi + +install: all + if test -d ${DESTDIR}${bindir} ; then :; else ${INSTALL} -d ${DESTDIR}${bindir}; fi + ${INSTALL} ${BIN} ${DESTDIR}${bindir} + +deinstall uninstall: + rm -f ${DESTDIR}${bindir}/${BIN} + +distclean: clean + rm -f Makefile diff --git a/rp/utils/scan_roas.c b/rp/utils/scan_roas.c new file mode 100644 index 00000000..f32e3827 --- /dev/null +++ b/rp/utils/scan_roas.c @@ -0,0 +1,305 @@ +/* + * Copyright (C) 2014 Dragon Research Labs ("DRL") + * Portions copyright (C) 2011 Internet Systems Consortium ("ISC") + * + * Permission to use, copy, modify, and distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notices and this permission notice appear in all copies. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND DRL AND ISC DISCLAIM ALL + * WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED + * WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL OR + * ISC BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL + * DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA + * OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER + * TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR + * PERFORMANCE OF THIS SOFTWARE. + */ + +/* $Id$ */ + +/* + * Decoder test for ROAs. + * + * NB: This does -not- check the CMS signatures, just the encoding. + */ + +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include + +/* + * How much buffer space do we need for a raw address? + */ +#define ADDR_RAW_BUF_LEN 16 + +/* + * How long can a filesystem path be? + */ +#define PATH_MAX 2048 + + + +/* + * Extract signing time from CMS message. + */ + +static char * +extract_signingTime(CMS_ContentInfo *cms, char *buffer, size_t buflen) +{ + STACK_OF(CMS_SignerInfo) *sis = NULL; + CMS_SignerInfo *si = NULL; + X509_ATTRIBUTE *xa = NULL; + ASN1_TYPE *so = NULL; + int i = -1; + + if (cms == NULL || + buffer == NULL || + buflen < sizeof("20010401123456Z") || + (sis = CMS_get0_SignerInfos(cms)) == NULL || + sk_CMS_SignerInfo_num(sis) != 1 || + (si = sk_CMS_SignerInfo_value(sis, 0)) < 0 || + (i = CMS_signed_get_attr_by_NID(si, NID_pkcs9_signingTime, -1)) < 0 || + (xa = CMS_signed_get_attr(si, i)) == NULL || + xa->single || + sk_ASN1_TYPE_num(xa->value.set) != 1 || + (so = sk_ASN1_TYPE_value(xa->value.set, 0)) == NULL) + return NULL; + + assert(buflen > 2); + buffer[buflen - 1] = '\0'; + + switch (so->type) { + case V_ASN1_UTCTIME: + strcpy(buffer, (so->value.utctime->data[0] >= '5') ? "19" : "20"); + return strncpy(buffer + 2, (const char *) so->value.utctime->data, buflen - 3); + case V_ASN1_GENERALIZEDTIME: + return strncpy(buffer, (const char *) so->value.generalizedtime->data, buflen - 1); + default: + return NULL; + } +} + + + +/* + * Expand the bitstring form of an address into a raw byte array. + * At the moment this is coded for simplicity, not speed. + */ +static void addr_expand(unsigned char *addr, + const ASN1_BIT_STRING *bs, + const int length) +{ + assert(bs->length >= 0 && bs->length <= length); + if (bs->length > 0) { + memcpy(addr, bs->data, bs->length); + if ((bs->flags & 7) != 0) { + unsigned char mask = 0xFF >> (8 - (bs->flags & 7)); + addr[bs->length - 1] &= ~mask; + } + } + memset(addr + bs->length, 0, length - bs->length); +} + +/* + * Extract the prefix length from a bitstring. + */ +#define addr_prefixlen(bs) ((int) ((bs)->length * 8 - ((bs)->flags & 7))) + +/* + * Read ROA (CMS object) in DER format. + * + * NB: When invoked this way, CMS_verify() does -not- verify, it just decodes the ASN.1. + */ +static int read_roa(const char *filename) +{ + char buffer[sizeof("20010401123456Z")], *b; + unsigned char addr[ADDR_RAW_BUF_LEN]; + CMS_ContentInfo *cms = NULL; + const ASN1_OBJECT *oid = NULL; + ROA *r = NULL; + char buf[512]; + BIO *bio; + int i, j, k, n, ok; + + if ((bio = BIO_new_file(filename, "r")) == NULL || + (cms = d2i_CMS_bio(bio, NULL)) == NULL) + goto done; + BIO_free(bio); + + if ((bio = BIO_new(BIO_s_mem())) == NULL || + CMS_verify(cms, NULL, NULL, NULL, bio, CMS_NOCRL | CMS_NO_SIGNER_CERT_VERIFY | CMS_NO_ATTR_VERIFY | CMS_NO_CONTENT_VERIFY) <= 0 || + (r = ASN1_item_d2i_bio(ASN1_ITEM_rptr(ROA), bio, NULL)) == NULL) + goto done; + + if (!extract_signingTime(cms, buffer, sizeof(buffer))) + goto done; + printf("%s ", buffer); + + printf("%ld", ASN1_INTEGER_get(r->asID)); + + for (i = 0; i < sk_ROAIPAddressFamily_num(r->ipAddrBlocks); i++) { + + ROAIPAddressFamily *f = sk_ROAIPAddressFamily_value(r->ipAddrBlocks, i); + + unsigned afi = (f->addressFamily->data[0] << 8) | (f->addressFamily->data[1]); + + for (j = 0; j < sk_ROAIPAddress_num(f->addresses); j++) { + ROAIPAddress *a = sk_ROAIPAddress_value(f->addresses, j); + + printf(" "); + + switch (afi) { + + case IANA_AFI_IPV4: + addr_expand(addr, a->IPAddress, 4); + printf("%d.%d.%d.%d", addr[0], addr[1], addr[2], addr[3]); + break; + + case IANA_AFI_IPV6: + addr_expand(addr, a->IPAddress, 16); + for (n = 16; n > 1 && addr[n-1] == 0x00 && addr[n-2] == 0x00; n -= 2) + ; + for (k = 0; k < n; k += 2) + printf("%x%s", (addr[k] << 8) | addr[k+1], (k < 14 ? ":" : "")); + if (k < 16) + printf(":"); + if (k == 0) + printf(":"); + break; + + default: + break; + } + + printf("/%u", addr_prefixlen(a->IPAddress)); + + if (a->maxLength) + printf("-%ld", ASN1_INTEGER_get(a->maxLength)); + } + } + printf("\n"); + + done: + ok = r != NULL; + + if (ERR_peek_error()) + ERR_print_errors_fp(stderr); + BIO_free(bio); + CMS_ContentInfo_free(cms); + ROA_free(r); + + return ok; +} + + + +/** + * Check str for a trailing suffix. + */ +static int endswith(const char *str, const char *suffix) +{ + size_t len_str, len_suffix; + assert(str != NULL && suffix != NULL); + len_str = strlen(str); + len_suffix = strlen(suffix); + return len_str >= len_suffix && !strcmp(str + len_str - len_suffix, suffix); +} + + + +/** + * Walk directory tree, looking for ROAs. + */ +static int walk(const char *name) +{ + int need_slash, ok = 1; + char path[PATH_MAX]; + struct dirent *d; + size_t len; + DIR *dir; + + assert(name); + len = strlen(name); + + assert(len > 0 && len < sizeof(path)); + need_slash = name[len - 1] != '/'; + + if ((dir = opendir(name)) == NULL) + return 0; + + while ((d = readdir(dir)) != NULL) { + if (!strcmp(d->d_name, ".") || + !strcmp(d->d_name, "..")) + continue; + if (len + strlen(d->d_name) + need_slash >= sizeof(path)) { + ok = 0; + goto done; + } + strcpy(path, name); + if (need_slash) + strcat(path, "/"); + strcat(path, d->d_name); + switch (d->d_type) { + case DT_DIR: + ok &= walk(path); + continue; + default: + if (endswith(path, ".roa")) + ok &= read_roa(path); + continue; + } + } + + done: + closedir(dir); + return ok; +} + + + +static void usage (const char *jane, const int code) +{ + fprintf(code ? stderr : stdout, "usage: %s authtree [authtree...]\n", jane); + exit(code); +} + +/* + * Main program. + */ +int main (int argc, char *argv[]) +{ + int i, ok = 1; + + if (argc == 2 && (!strcmp(argv[1], "-h") || !strcmp(argv[1], "--help"))) + usage(argv[0], 0); + + if (argc < 2) + usage(argv[0], 1); + + OpenSSL_add_all_algorithms(); + ERR_load_crypto_strings(); + + for (i = 1; i < argc; i++) + ok &= walk(argv[i]); + + return !ok; +} diff --git a/rp/utils/scan_roas/Makefile.in b/rp/utils/scan_roas/Makefile.in new file mode 100644 index 00000000..7707969c --- /dev/null +++ b/rp/utils/scan_roas/Makefile.in @@ -0,0 +1,52 @@ +# $Id$ + +NAME = scan_roas + +BIN = ${NAME} +SRC = ${NAME}.c +OBJ = ${NAME}.o + +CFLAGS = @CFLAGS@ +LDFLAGS = @LDFLAGS@ +LIBS = @LIBS@ + +INSTALL = @INSTALL@ -m 555 + +prefix = @prefix@ +exec_prefix = @exec_prefix@ +datarootdir = @datarootdir@ +datadir = @datadir@ +localstatedir = @localstatedir@ +sharedstatedir = @sharedstatedir@ +sysconfdir = @sysconfdir@ +bindir = @bindir@ +sbindir = @sbindir@ +libexecdir = @libexecdir@ +libdir = @libdir@ + +abs_top_srcdir = @abs_top_srcdir@ +abs_top_builddir = @abs_top_builddir@ + +all: ${BIN} + +clean: + rm -rf ${BIN} ${OBJ} ${BIN}.dSYM + +${BIN}: ${SRC} + ${CC} ${CFLAGS} -o $@ ${SRC} ${LDFLAGS} ${LIBS} + +ROA_DIR = ${abs_top_builddir}/rpkid/tests/smoketest.dir/publication + +test: all + -date -u +'now: %Y%m%d%H%M%SZ' + if test -d ${ROA_DIR}; then ./${BIN} ${ROA_DIR} ; else :; fi + +install: all + if test -d ${DESTDIR}${bindir} ; then :; else ${INSTALL} -d ${DESTDIR}${bindir}; fi + ${INSTALL} ${BIN} ${DESTDIR}${bindir} + +deinstall uninstall: + rm -f ${DESTDIR}${bindir}/${BIN} + +distclean: clean + rm -f Makefile diff --git a/rp/utils/scan_routercerts.py b/rp/utils/scan_routercerts.py new file mode 100755 index 00000000..342fa272 --- /dev/null +++ b/rp/utils/scan_routercerts.py @@ -0,0 +1,69 @@ +#!/usr/bin/env python +# $Id$ +# +# Copyright (C) 2014 Dragon Research Labs ("DRL") +# +# Permission to use, copy, modify, and/or distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND DRL DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL DRL BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +Scan rcynic validated output looking for router certificates, print +out stuff that the rpki-rtr code cares about. +""" + +# This program represents a weird temporary state, mostly to avoid +# diving into a recursive yak shaving exercise. +# +# Under the old scheme, anything used by the RP code should be either +# C code or pure Python code using just the standard libraries. This +# has gotten silly, but we haven't yet refactored the current packaged +# builds from two packages into three (adding a -libs package). +# +# So, by rights, this program should be a C monstrosity written using +# the OpenSSL C API. I started coding it that way, but it was just +# too painful for something we're probably going to rewrite as a few +# lines of Python once we refactor, but by the same token I didn't +# want to delay router certificate support until the refactoring. +# +# So this program anticipates the new scheme of things, but makes one +# concession to current reality: if it has a problem importing the +# RPKI-specific libraries, it just quietly exits as if everything were +# fine and there simply are no router certificates to report. This +# isn't the right answer in the long run, but will suffice to avoid +# further bald yaks. + +import os +import sys +import base64 + +try: + import rpki.POW + import rpki.oids +except ImportError: + sys.exit(0) + +rcynic_dir = sys.argv[1] + +for root, dirs, files in os.walk(rcynic_dir): + for fn in files: + if not fn.endswith(".cer"): + continue + x = rpki.POW.X509.derReadFile(os.path.join(root, fn)) + + if rpki.oids.id_kp_bgpsec_router not in (x.getEKU() or ()): + continue + + sys.stdout.write(base64.urlsafe_b64encode(x.getSKI()).rstrip("=")) + for min_asn, max_asn in x.getRFC3779()[0]: + for asn in xrange(min_asn, max_asn + 1): + sys.stdout.write(" %s" % asn) + sys.stdout.write(" %s\n" % base64.b64encode(x.getPublicKey().derWritePublic())) diff --git a/rp/utils/scan_routercerts/Makefile.in b/rp/utils/scan_routercerts/Makefile.in new file mode 100644 index 00000000..715d1325 --- /dev/null +++ b/rp/utils/scan_routercerts/Makefile.in @@ -0,0 +1,41 @@ +# $Id$ + +NAME = scan_routercerts + +BIN = ${NAME} + +INSTALL = @INSTALL@ -m 555 + +prefix = @prefix@ +exec_prefix = @exec_prefix@ +datarootdir = @datarootdir@ +datadir = @datadir@ +localstatedir = @localstatedir@ +sharedstatedir = @sharedstatedir@ +sysconfdir = @sysconfdir@ +bindir = @bindir@ +sbindir = @sbindir@ +libexecdir = @libexecdir@ +libdir = @libdir@ + +abs_top_srcdir = @abs_top_srcdir@ +abs_top_builddir = @abs_top_builddir@ + +all clean: + @true + +ROUTERCERT_DIR = ${abs_top_builddir}/rpkid/tests/smoketest.dir/publication + +test: all + -date -u +'now: %Y%m%d%H%M%SZ' + if test -d ${ROUTERCERT_DIR}; then ./${BIN} ; else :; fi + +install: all + if test -d ${DESTDIR}${bindir} ; then :; else ${INSTALL} -d ${DESTDIR}${bindir}; fi + ${INSTALL} ${BIN} ${DESTDIR}${bindir} + +deinstall uninstall: + rm -f ${DESTDIR}${bindir}/${BIN} + +distclean: clean + rm -f Makefile diff --git a/rp/utils/strip_roa.sh b/rp/utils/strip_roa.sh new file mode 100755 index 00000000..e2dacf86 --- /dev/null +++ b/rp/utils/strip_roa.sh @@ -0,0 +1,39 @@ +#!/bin/sh - +# $Id$ +# +# Copyright (C) 2010 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. +# +# Strip boring parts of print_roa's output to make a (somewhat) terser +# description, one line per ROA. This is intended for use in +# comparing sets of ROAs using text comparision tools like "diff" or +# "comm". One could definitely do something prettier, but this +# suffices for basic tests. +# +# Use this as in a shell pipeline to postprocess print_roa's output. + +awk ' + /Certificate/ { + roa[++n] = ""; + } + /asID|addressFamily|IPaddress/ { + roa[n] = roa[n] " " $0; + } + END { + for (i in roa) + print roa[i]; + } +' | +tr -s \\011 \\040 | +sort -u diff --git a/rp/utils/table.awk b/rp/utils/table.awk new file mode 100644 index 00000000..d8627f67 --- /dev/null +++ b/rp/utils/table.awk @@ -0,0 +1,35 @@ +#!/usr/bin/awk -f +# $Id$ +# +# Reformat uri.c's output in a way that's more useful +# for some kinds of scripting. Perhaps this functionality should be +# part of uri.c itself, but for now this script will do. + +BEGIN { + cmd = "find /var/rcynic/data/unauthenticated -type f -name '*.cer' -print0 | xargs -0 ./uri -d"; + while ((cmd | getline) == 1) { + if ($1 == "File") { + if (f) + print f, u, a, s, c; + a = s = c = "-"; + f = $2; + sub("/var/rcynic/data/unauthenticated/","rsync://"); + u = $2; + continue; + } + if ($1 == "SIA:") { + s = $2; + continue; + } + if ($1 == "AIA:") { + a = $2; + continue; + } + if ($1 == "CRL:") { + c = $2; + continue; + } + } + if (f != "-") + print f, u, a, s, c; +} diff --git a/rp/utils/test_roa.sh b/rp/utils/test_roa.sh new file mode 100644 index 00000000..43d20898 --- /dev/null +++ b/rp/utils/test_roa.sh @@ -0,0 +1,35 @@ +#!/bin/sh - +# +# Copyright (C) 2008 American Registry for Internet Numbers ("ARIN") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. +# +# $Id$ + +auth_dir="${1?"usage: $0 authenticated_certificate_tree prefix [prefix...]"}" + +rm -rf hashed-pem-dir +mkdir hashed-pem-dir + +../hashdir/hashdir "$auth_dir" hashed-pem-dir >/dev/null + +./find_roa "$@" | awk ' + $1 == "ASN" && $3 == "prefix" && $5 == "ROA" { + print ""; + print "Found match:" + print; + print "Verifying certificate chain and signatures:" + roa = $6; + if (!system("../../openssl/openssl/apps/openssl cms -verify -inform DER -out /dev/null -CApath hashed-pem-dir -in " roa)) + system("../print_roa/print_roa " roa); + }' diff --git a/rp/utils/uri.c b/rp/utils/uri.c new file mode 100644 index 00000000..6353e8e5 --- /dev/null +++ b/rp/utils/uri.c @@ -0,0 +1,248 @@ +/* + * Copyright (C) 2014 Dragon Research Labs ("DRL") + * Portions copyright (C) 2006--2008 American Registry for Internet Numbers ("ARIN") + * + * Permission to use, copy, modify, and distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notices and this permission notice appear in all copies. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND DRL AND ARIN DISCLAIM ALL + * WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED + * WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL OR + * ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL + * DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA + * OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER + * TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR + * PERFORMANCE OF THIS SOFTWARE. + */ + +/* $Id$ */ + +/* + * Extract and test URIs from certificates. This is a unit test of + * rcynic code, a utility, or both, depending on how it turns out. + * + * NB: OpenSSL insures that IA5 strings are null-terminated, so it's safe + * for us to ignore the length count. + */ + +#include +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include + +static const unsigned char id_ad_caIssuers[] = {0x2b, 0x6, 0x1, 0x5, 0x5, 0x7, 0x30, 0x2}; /* 1.3.6.1.5.5.7.48.2 */ +static const unsigned char id_ad_caRepository[] = {0x2b, 0x6, 0x1, 0x5, 0x5, 0x7, 0x30, 0x5}; /* 1.3.6.1.5.5.7.48.5 */ +static const unsigned char id_ad_signedObjectRepository[] = {0x2b, 0x6, 0x1, 0x5, 0x5, 0x7, 0x30, 0x9}; /* 1.3.6.1.5.5.7.48.9 */ +static const unsigned char id_ad_rpkiManifest[] = {0x2b, 0x6, 0x1, 0x5, 0x5, 0x7, 0x30, 0xa}; /* 1.3.6.1.5.5.7.48.10 */ +static const unsigned char id_ad_signedObject[] = {0x2b, 0x6, 0x1, 0x5, 0x5, 0x7, 0x30, 0xb}; /* 1.3.6.1.5.5.7.48.11 */ + +static X509 *read_cert(const char *filename, int format, int verbose) +{ + BIO *b = BIO_new_file(filename, "r"); + STACK_OF(X509) *certs = NULL; + CMS_ContentInfo *cms = NULL; + X509 *x = NULL; + + if (b == NULL) + return NULL; + + switch (format) { + case 'p': + x = PEM_read_bio_X509(b, NULL, NULL, NULL); + break; + case 'd': + x = d2i_X509_bio(b, NULL); + break; + } + + if (x == NULL) { + BIO_reset(b); + switch (format) { + case 'p': + cms = PEM_read_bio_CMS(b, NULL, NULL, NULL); + break; + case 'd': + cms = d2i_CMS_bio(b, NULL); + break; + } + if (cms != NULL && (certs = CMS_get1_certs(cms)) != NULL) + x = sk_X509_shift(certs); + } + + if (x != NULL && verbose) { + X509_print_fp(stdout, x); + printf("\n"); + } + + sk_X509_pop_free(certs, X509_free); + CMS_ContentInfo_free(cms); + BIO_free(b); + return x; +} + +enum decode_errors { + decode_ok, + decode_no_extension, + decode_not_exactly_one_DistributionPointName, + decode_has_reasons, + decode_has_CRLissuer, + decode_no_distributionPoint, + decode_not_GeneralName, + decode_not_URI, +}; + +static enum decode_errors decode_crldp(X509 *x, int verbose, int spaces) +{ + enum decode_errors err = decode_ok; + STACK_OF(DIST_POINT) *ds = X509_get_ext_d2i(x, NID_crl_distribution_points, NULL, NULL); + DIST_POINT *d; + GENERAL_NAME *n; + int i; + + if (!ds) { + err = decode_no_extension; + } else if (sk_DIST_POINT_num(ds) != 1) { + err = decode_not_exactly_one_DistributionPointName; + } else if ((d = sk_DIST_POINT_value(ds, 0))->reasons) { + err = decode_has_reasons; + } else if (d->CRLissuer) { + err = decode_has_CRLissuer; + } else if (!d->distpoint) { + err = decode_no_distributionPoint; + } else if (d->distpoint->type != 0) { + err = decode_not_GeneralName; + } else { + for (i = 0; i < sk_GENERAL_NAME_num(d->distpoint->name.fullname); i++) { + n = sk_GENERAL_NAME_value(d->distpoint->name.fullname, i); + if (n->type != GEN_URI) { + err = decode_not_GeneralName; + break; + } + printf(" CRLDP: %s%s", n->d.uniformResourceIdentifier->data, spaces ? "" : "\n"); + } + } + + sk_DIST_POINT_pop_free(ds, DIST_POINT_free); + return err; +} + +#define decode_xia(_x_, _v_, _s_, _tag_, _nid_, _oid_) \ + _decode_xia(_x_, _v_, _s_, _tag_, _nid_, _oid_, sizeof(_oid_)) + +static enum decode_errors _decode_xia(X509 *x, + int verbose, + int spaces, + char *tag, + int nid, + const unsigned char *oid, + int oidlen) +{ + enum decode_errors err = decode_ok; + AUTHORITY_INFO_ACCESS *as = X509_get_ext_d2i(x, nid, NULL, NULL); + ACCESS_DESCRIPTION *a; + int i; + + if (!as) { + err = decode_no_extension; + } else { + for (i = 0; i < sk_ACCESS_DESCRIPTION_num(as); i++) { + a = sk_ACCESS_DESCRIPTION_value(as, i); + if (a->location->type != GEN_URI) { + err = decode_not_URI; + break; + } + if (a->method->length == oidlen && !memcmp(a->method->data, oid, oidlen)) + printf(" %s: %s%s", tag, a->location->d.uniformResourceIdentifier->data, spaces ? "" : "\n"); + } + } + + sk_ACCESS_DESCRIPTION_pop_free(as, ACCESS_DESCRIPTION_free); + return err; +} + + + +const static struct option longopts[] = { + { "der", no_argument, NULL, 'd' }, + { "help", no_argument, NULL, 'h' }, + { "pem", no_argument, NULL, 'p' }, + { "spaces", no_argument, NULL, 's' }, + { "verbose", no_argument, NULL, 'v' }, + { NULL } +}; + +static int usage (const char *jane, const int code) +{ + FILE *out = code ? stderr : stdout; + int i; + + fprintf(out, "usage: %s [-p | -d] cert [cert...]\n", jane); + fprintf(out, "options:\n"); + for (i = 0; longopts[i].name != NULL; i++) + fprintf(out, " -%c --%s\n", longopts[i].val, longopts[i].name); + + return code; +} + +int main(int argc, char *argv[]) +{ + int c, format = 'd', spaces = 0, verbose = 0; + const char *jane = argv[0]; + X509 *x; + + OpenSSL_add_all_algorithms(); + ERR_load_crypto_strings(); + + while ((c = getopt(argc, argv, "pdsv")) > 0) { + switch (c) { + case 'v': + verbose = 1; + break; + case 'p': + case 'd': + format = c; + break; + case 's': + spaces = 1; + break; + case 'h': + return usage(jane, 0); + default: + return usage(jane, 1); + } + } + + argc -= optind; + argv += optind; + + if (argc == 0) + return usage(jane, 1); + + while (argc-- > 0) { + printf(spaces ? "%s" : "File: %s\n", *argv); + if ((x = read_cert(*argv++, format, verbose)) == NULL) { + printf("Couldn't read certificate, skipping\n"); + continue; + } + decode_xia(x, verbose, spaces, "AIA:caIssuers", NID_info_access, id_ad_caIssuers); + decode_xia(x, verbose, spaces, "SIA:caRepository", NID_sinfo_access, id_ad_caRepository); + decode_xia(x, verbose, spaces, "SIA:signedObjectRepository", NID_sinfo_access, id_ad_signedObjectRepository); + decode_xia(x, verbose, spaces, "SIA:rpkiManifest", NID_sinfo_access, id_ad_rpkiManifest); + decode_xia(x, verbose, spaces, "SIA:signedObject", NID_sinfo_access, id_ad_signedObject); + decode_crldp(x, verbose, spaces); + if (spaces) + putchar('\n'); + X509_free(x); + } + + return 0; +} diff --git a/rp/utils/uri/Makefile.in b/rp/utils/uri/Makefile.in new file mode 100644 index 00000000..fc545060 --- /dev/null +++ b/rp/utils/uri/Makefile.in @@ -0,0 +1,31 @@ +# $Id$ + +NAME = uri + +BIN = ${NAME} +SRC = ${NAME}.c +OBJ = ${NAME}.o + +CFLAGS = @CFLAGS@ +LDFLAGS = @LDFLAGS@ +LIBS = @LIBS@ + +abs_top_srcdir = @abs_top_srcdir@ +abs_top_builddir = @abs_top_builddir@ + +all: ${BIN} + +clean: + rm -rf ${BIN} ${OBJ} ${BIN}.dSYM + +${BIN}: ${SRC} + ${CC} ${CFLAGS} -o $@ ${SRC} ${LDFLAGS} ${LIBS} + +test: + @true + +install deinstall uninstall: + @true + +distclean: clean + rm -f Makefile diff --git a/rpki/POW/__init__.py b/rpki/POW/__init__.py new file mode 100644 index 00000000..d3796245 --- /dev/null +++ b/rpki/POW/__init__.py @@ -0,0 +1,7 @@ +from _POW import * + +# Set callback to let POW construct rpki.sundial.datetime objects + +from rpki.sundial import datetime as sundial_datetime +customDatetime(sundial_datetime) +del sundial_datetime diff --git a/rpki/__init__.py b/rpki/__init__.py new file mode 100644 index 00000000..9e090f63 --- /dev/null +++ b/rpki/__init__.py @@ -0,0 +1,2 @@ +# This file exists to tell Python that this the content of this +# directory constitute a Python package. diff --git a/rpki/adns.py b/rpki/adns.py new file mode 100644 index 00000000..a6a900ed --- /dev/null +++ b/rpki/adns.py @@ -0,0 +1,368 @@ +# $Id$ +# +# Copyright (C) 2013--2014 Dragon Research Labs ("DRL") +# Portions copyright (C) 2010--2012 Internet Systems Consortium ("ISC") +# Portions copyright (C) 2003--2007, 2009, 2010 Nominum, Inc. ("NOMINUM") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notices and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND NOMINUM DISCLAIM ALL +# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL, +# ISC, OR NOMINUM BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, +# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION +# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +""" +Basic asynchronous DNS code, using asyncore and Bob Halley's excellent +dnspython package. +""" + +import asyncore +import socket +import time +import sys +import rpki.async +import rpki.sundial +import rpki.log + +try: + import dns.resolver, dns.rdatatype, dns.rdataclass, dns.name, dns.message + import dns.inet, dns.exception, dns.query, dns.rcode, dns.ipv4, dns.ipv6 +except ImportError: + if __name__ == "__main__": + sys.stderr.write("DNSPython not available, skipping rpki.adns unit test\n") + sys.exit(0) + else: + raise + +## @var resolver +# Resolver object, shared by everything using this module + +resolver = dns.resolver.Resolver() +if resolver.cache is None: + resolver.cache = dns.resolver.Cache() + +## @var nameservers +# Nameservers from resolver.nameservers converted to (af, address) +# pairs. The latter turns out to be a more useful form for us to use +# internally, because it simplifies the checks we need to make upon +# packet receiption. + +nameservers = [] + +for ns in resolver.nameservers: + try: + nameservers.append((socket.AF_INET, dns.ipv4.inet_aton(ns))) + continue + except Exception: + pass + try: + nameservers.append((socket.AF_INET6, dns.ipv6.inet_aton(ns))) + continue + except Exception: + pass + rpki.log.error("Couldn't parse nameserver address %r" % ns) + +class dispatcher(asyncore.dispatcher): + """ + Basic UDP socket reader for use with asyncore. + """ + + def __init__(self, cb, eb, af, bufsize = 65535): + asyncore.dispatcher.__init__(self) + self.cb = cb + self.eb = eb + self.af = af + self.bufsize = bufsize + self.create_socket(af, socket.SOCK_DGRAM) + + def handle_read(self): + """ + Receive a packet, hand it off to query class callback. + """ + wire, from_address = self.recvfrom(self.bufsize) + self.cb(self.af, from_address[0], from_address[1], wire) + + def handle_error(self): + """ + Pass errors to query class errback. + """ + self.eb(sys.exc_info()[1]) + + def handle_connect(self): + """ + Quietly ignore UDP "connection" events. + """ + pass + + def writable(self): + """ + We don't need to hear about UDP socket becoming writable. + """ + return False + + +class query(object): + """ + Simplified (no search paths) asynchronous adaptation of + dns.resolver.Resolver.query() (q.v.). + """ + + def __init__(self, cb, eb, qname, qtype = dns.rdatatype.A, qclass = dns.rdataclass.IN): + if isinstance(qname, (str, unicode)): + qname = dns.name.from_text(qname) + if isinstance(qtype, str): + qtype = dns.rdatatype.from_text(qtype) + if isinstance(qclass, str): + qclass = dns.rdataclass.from_text(qclass) + assert qname.is_absolute() + self.cb = cb + self.eb = eb + self.qname = qname + self.qtype = qtype + self.qclass = qclass + self.start = time.time() + rpki.async.event_defer(self.go) + + def go(self): + """ + Start running the query. Check our cache before doing network + query; if we find an answer there, just return it. Otherwise + start the network query. + """ + if resolver.cache: + answer = resolver.cache.get((self.qname, self.qtype, self.qclass)) + else: + answer = None + if answer: + self.cb(self, answer) + else: + self.timer = rpki.async.timer() + self.sockets = {} + self.request = dns.message.make_query(self.qname, self.qtype, self.qclass) + if resolver.keyname is not None: + self.request.use_tsig(resolver.keyring, resolver.keyname, resolver.keyalgorithm) + self.request.use_edns(resolver.edns, resolver.ednsflags, resolver.payload) + self.response = None + self.backoff = 0.10 + self.nameservers = nameservers[:] + self.loop1() + + def loop1(self): + """ + Outer loop. If we haven't got a response yet and still have + nameservers to check, start inner loop. Otherwise, we're done. + """ + self.timer.cancel() + if self.response is None and self.nameservers: + self.iterator = rpki.async.iterator(self.nameservers[:], self.loop2, self.done2) + else: + self.done1() + + def loop2(self, iterator, nameserver): + """ + Inner loop. Send query to next nameserver in our list, unless + we've hit the overall timeout for this query. + """ + self.timer.cancel() + try: + timeout = resolver._compute_timeout(self.start) + except dns.resolver.Timeout, e: + self.lose(e) + else: + af, addr = nameserver + if af not in self.sockets: + self.sockets[af] = dispatcher(self.socket_cb, self.socket_eb, af) + self.sockets[af].sendto(self.request.to_wire(), + (dns.inet.inet_ntop(af, addr), resolver.port)) + self.timer.set_handler(self.socket_timeout) + self.timer.set_errback(self.socket_eb) + self.timer.set(rpki.sundial.timedelta(seconds = timeout)) + + def socket_timeout(self): + """ + No answer from nameserver, move on to next one (inner loop). + """ + self.response = None + self.iterator() + + def socket_eb(self, e): + """ + UDP socket signaled error. If it really is some kind of socket + error, handle as if we've timed out on this nameserver; otherwise, + pass error back to caller. + """ + self.timer.cancel() + if isinstance(e, socket.error): + self.response = None + self.iterator() + else: + self.lose(e) + + def socket_cb(self, af, from_host, from_port, wire): + """ + Received a packet that might be a DNS message. If it doesn't look + like it came from one of our nameservers, just drop it and leave + the timer running. Otherwise, try parsing it: if it's an answer, + we're done, otherwise handle error appropriately and move on to + next nameserver. + """ + sender = (af, dns.inet.inet_pton(af, from_host)) + if from_port != resolver.port or sender not in self.nameservers: + return + self.timer.cancel() + try: + self.response = dns.message.from_wire(wire, keyring = self.request.keyring, request_mac = self.request.mac, one_rr_per_rrset = False) + except dns.exception.FormError: + self.nameservers.remove(sender) + else: + rcode = self.response.rcode() + if rcode in (dns.rcode.NOERROR, dns.rcode.NXDOMAIN): + self.done1() + return + if rcode != dns.rcode.SERVFAIL: + self.nameservers.remove(sender) + self.response = None + self.iterator() + + def done2(self): + """ + Done with inner loop. If we still haven't got an answer and + haven't (yet?) eliminated all of our nameservers, wait a little + while before starting the cycle again, unless we've hit the + timeout threshold for the whole query. + """ + if self.response is None and self.nameservers: + try: + delay = rpki.sundial.timedelta(seconds = min(resolver._compute_timeout(self.start), self.backoff)) + self.backoff *= 2 + self.timer.set_handler(self.loop1) + self.timer.set_errback(self.lose) + self.timer.set(delay) + except dns.resolver.Timeout, e: + self.lose(e) + else: + self.loop1() + + def cleanup(self): + """ + Shut down our timer and sockets. + """ + self.timer.cancel() + for s in self.sockets.itervalues(): + s.close() + + def lose(self, e): + """ + Something bad happened. Clean up, then pass error back to caller. + """ + self.cleanup() + self.eb(self, e) + + def done1(self): + """ + Done with outer loop. If we got a useful answer, cache it, then + pass it back to caller; if we got an error, pass the appropriate + exception back to caller. + """ + self.cleanup() + try: + if not self.nameservers: + raise dns.resolver.NoNameservers + if self.response.rcode() == dns.rcode.NXDOMAIN: + raise dns.resolver.NXDOMAIN + answer = dns.resolver.Answer(self.qname, self.qtype, self.qclass, self.response) + if resolver.cache: + resolver.cache.put((self.qname, self.qtype, self.qclass), answer) + self.cb(self, answer) + except (rpki.async.ExitNow, SystemExit): + raise + except Exception, e: + self.lose(e) + +class getaddrinfo(object): + + typemap = { dns.rdatatype.A : socket.AF_INET, + dns.rdatatype.AAAA : socket.AF_INET6 } + + def __init__(self, cb, eb, host, address_families = typemap.values()): + self.cb = cb + self.eb = eb + self.host = host + self.result = [] + self.queries = [query(self.done, self.lose, host, qtype) + for qtype in self.typemap + if self.typemap[qtype] in address_families] + + def done(self, q, answer): + if answer is not None: + for a in answer: + self.result.append((self.typemap[a.rdtype], a.address)) + self.queries.remove(q) + if not self.queries: + self.cb(self.result) + + def lose(self, q, e): + if isinstance(e, dns.resolver.NoAnswer): + self.done(q, None) + else: + for q in self.queries: + q.cleanup() + self.eb(e) + +if __name__ == "__main__": + + rpki.log.init("test-adns", use_syslog = False) + print "Some adns tests may take a minute or two, please be patient" + + class test_getaddrinfo(object): + + def __init__(self, qname): + self.qname = qname + getaddrinfo(self.done, self.lose, qname) + + def done(self, result): + print "getaddrinfo(%s) returned: %s" % ( + self.qname, + ", ".join(str(r) for r in result)) + + def lose(self, e): + print "getaddrinfo(%s) failed: %r" % (self.qname, e) + + class test_query(object): + + def __init__(self, qname, qtype = dns.rdatatype.A, qclass = dns.rdataclass.IN): + self.qname = qname + self.qtype = qtype + self.qclass = qclass + query(self.done, self.lose, qname, qtype = qtype, qclass = qclass) + + def done(self, q, result): + print "query(%s, %s, %s) returned: %s" % ( + self.qname, + dns.rdatatype.to_text(self.qtype), + dns.rdataclass.to_text(self.qclass), + ", ".join(str(r) for r in result)) + + def lose(self, q, e): + print "getaddrinfo(%s, %s, %s) failed: %r" % ( + self.qname, + dns.rdatatype.to_text(self.qtype), + dns.rdataclass.to_text(self.qclass), + e) + + if True: + for t in (dns.rdatatype.A, dns.rdatatype.AAAA, dns.rdatatype.HINFO): + test_query("subvert-rpki.hactrn.net", t) + test_query("nonexistant.rpki.net") + test_query("subvert-rpki.hactrn.net", qclass = dns.rdataclass.CH) + + for h in ("subvert-rpki.hactrn.net", "nonexistant.rpki.net"): + test_getaddrinfo(h) + + rpki.async.event_loop() diff --git a/rpki/async.py b/rpki/async.py new file mode 100644 index 00000000..49f98841 --- /dev/null +++ b/rpki/async.py @@ -0,0 +1,420 @@ +# $Id$ +# +# Copyright (C) 2009--2012 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +Utilities for event-driven programming. +""" + +import asyncore +import signal +import traceback +import gc +import sys +import rpki.log +import rpki.sundial + +ExitNow = asyncore.ExitNow + +class iterator(object): + """ + Iteration construct for event-driven code. Takes three + arguments: + + - Some kind of iterable object + + - A callback to call on each item in the iteration + + - A callback to call after the iteration terminates. + + The item callback receives two arguments: the callable iterator + object and the current value of the iteration. It should call the + iterator (or arrange for the iterator to be called) when it is time + to continue to the next item in the iteration. + + The termination callback receives no arguments. + + Special case for memory constrained cases: if keyword argument + pop_list is True, iterable must be a list, which is modified in + place, popping items off of it until it's empty. + """ + + def __init__(self, iterable, item_callback, done_callback, unwind_stack = True, pop_list = False): + assert not pop_list or isinstance(iterable, list), "iterable must be a list when using pop_list" + self.item_callback = item_callback + self.done_callback = done_callback if done_callback is not None else lambda: None + self.caller_file, self.caller_line, self.caller_function = traceback.extract_stack(limit = 2)[0][0:3] + self.unwind_stack = unwind_stack + self.pop_list = pop_list + try: + if self.pop_list: + self.iterator = iterable + else: + self.iterator = iter(iterable) + except (ExitNow, SystemExit): + raise + except Exception: + rpki.log.debug("Problem constructing iterator for %r" % (iterable,)) + raise + self.doit() + + def __repr__(self): + return rpki.log.log_repr(self, + "created at %s:%s" % (self.caller_file, + self.caller_line), + self.caller_function) + + def __call__(self): + if self.unwind_stack: + event_defer(self.doit) + else: + self.doit() + + def doit(self): + """ + Implement the iterator protocol: attempt to call the item handler + with the next iteration value, call the termination handler if the + iterator signaled StopIteration. + """ + + try: + if self.pop_list: + val = self.iterator.pop(0) + else: + val = self.iterator.next() + except (IndexError, StopIteration): + self.done_callback() + else: + self.item_callback(self, val) + +## @var timer_queue +# Timer queue. + +timer_queue = [] + +class timer(object): + """ + Timer construct for event-driven code. + """ + + ## @var gc_debug + # Verbose chatter about timers states and garbage collection. + gc_debug = False + + ## @var run_debug + # Verbose chatter about timers being run. + run_debug = False + + def __init__(self, handler = None, errback = None): + self.set_handler(handler) + self.set_errback(errback) + self.when = None + if self.gc_debug: + self.trace("Creating %r" % self) + + def trace(self, msg): + """ + Debug logging. + """ + if self.gc_debug: + bt = traceback.extract_stack(limit = 3) + rpki.log.debug("%s from %s:%d" % (msg, bt[0][0], bt[0][1])) + + def set(self, when): + """ + Set a timer. Argument can be a datetime, to specify an absolute + time, or a timedelta, to specify an offset time. + """ + if self.gc_debug: + self.trace("Setting %r to %r" % (self, when)) + if isinstance(when, rpki.sundial.timedelta): + self.when = rpki.sundial.now() + when + else: + self.when = when + assert isinstance(self.when, rpki.sundial.datetime), "%r: Expecting a datetime, got %r" % (self, self.when) + if self not in timer_queue: + timer_queue.append(self) + timer_queue.sort(key = lambda x: x.when) + + def __cmp__(self, other): + return cmp(id(self), id(other)) + + if gc_debug: + def __del__(self): + rpki.log.debug("Deleting %r" % self) + + def cancel(self): + """ + Cancel a timer, if it was set. + """ + if self.gc_debug: + self.trace("Canceling %r" % self) + try: + while True: + timer_queue.remove(self) + except ValueError: + pass + + def is_set(self): + """ + Test whether this timer is currently set. + """ + return self in timer_queue + + def set_handler(self, handler): + """ + Set timer's expiration handler. This is an alternative to + subclassing the timer class, and may be easier to use when + integrating timers into other classes (eg, the handler can be a + bound method to an object in a class representing a network + connection). + """ + self.handler = handler + + def set_errback(self, errback): + """ + Set a timer's errback. Like set_handler(), for errbacks. + """ + self.errback = errback + + @classmethod + def runq(cls): + """ + Run the timer queue: for each timer whose call time has passed, + pull the timer off the queue and call its handler() method. + + Comparisions are made against time at which this function was + called, so that even if new events keep getting scheduled, we'll + return to the I/O loop reasonably quickly. + """ + now = rpki.sundial.now() + while timer_queue and now >= timer_queue[0].when: + t = timer_queue.pop(0) + if cls.run_debug: + rpki.log.debug("Running %r" % t) + try: + if t.handler is not None: + t.handler() + else: + rpki.log.warn("Timer %r expired with no handler set" % t) + except (ExitNow, SystemExit): + raise + except Exception, e: + if t.errback is not None: + t.errback(e) + else: + rpki.log.error("Unhandled exception from timer %r: %s" % (t, e)) + rpki.log.traceback() + + def __repr__(self): + return rpki.log.log_repr(self, self.when, repr(self.handler)) + + @classmethod + def seconds_until_wakeup(cls): + """ + Calculate delay until next timer expires, or None if no timers are + set and we should wait indefinitely. Rounds up to avoid spinning + in select() or poll(). We could calculate fractional seconds in + the right units instead, but select() and poll() don't even take + the same units (argh!), and we're not doing anything that + hair-triggered, so rounding up is simplest. + """ + if not timer_queue: + return None + now = rpki.sundial.now() + if now >= timer_queue[0].when: + return 0 + delay = timer_queue[0].when - now + seconds = delay.convert_to_seconds() + if delay.microseconds: + seconds += 1 + return seconds + + @classmethod + def clear(cls): + """ + Cancel every timer on the queue. We could just throw away the + queue content, but this way we can notify subclasses that provide + their own cancel() method. + """ + while timer_queue: + timer_queue.pop(0).cancel() + +def _raiseExitNow(signum, frame): + """ + Signal handler for event_loop(). + """ + raise ExitNow + +def exit_event_loop(): + """ + Force exit from event_loop(). + """ + raise ExitNow + +def event_defer(handler, delay = rpki.sundial.timedelta(seconds = 0)): + """ + Use a near-term (default: zero interval) timer to schedule an event + to run after letting the I/O system have a turn. + """ + timer(handler).set(delay) + +## @var debug_event_timing +# Enable insanely verbose logging of event timing + +debug_event_timing = False + +def event_loop(catch_signals = (signal.SIGINT, signal.SIGTERM)): + """ + Replacement for asyncore.loop(), adding timer and signal support. + """ + old_signal_handlers = {} + while True: + save_sigs = len(old_signal_handlers) == 0 + try: + for sig in catch_signals: + old = signal.signal(sig, _raiseExitNow) + if save_sigs: + old_signal_handlers[sig] = old + while asyncore.socket_map or timer_queue: + t = timer.seconds_until_wakeup() + if debug_event_timing: + rpki.log.debug("Dismissing to asyncore.poll(), t = %s, q = %r" % (t, timer_queue)) + asyncore.poll(t, asyncore.socket_map) + timer.runq() + if timer.gc_debug: + gc.collect() + if gc.garbage: + for i in gc.garbage: + rpki.log.debug("GC-cycle %r" % i) + del gc.garbage[:] + except ExitNow: + break + except SystemExit: + raise + except ValueError, e: + if str(e) == "filedescriptor out of range in select()": + rpki.log.error("Something is badly wrong, select() thinks we gave it a bad file descriptor.") + rpki.log.error("Content of asyncore.socket_map:") + for fd in sorted(asyncore.socket_map.iterkeys()): + rpki.log.error(" fd %s obj %r" % (fd, asyncore.socket_map[fd])) + rpki.log.error("Not safe to continue due to risk of spin loop on select(). Exiting.") + sys.exit(1) + rpki.log.error("event_loop() exited with exception %r, this is not supposed to happen, restarting" % e) + except Exception, e: + rpki.log.error("event_loop() exited with exception %r, this is not supposed to happen, restarting" % e) + else: + break + finally: + for sig in old_signal_handlers: + signal.signal(sig, old_signal_handlers[sig]) + +class sync_wrapper(object): + """ + Synchronous wrapper around asynchronous functions. Running in + asynchronous mode at all times makes sense for event-driven daemons, + but is kind of tedious for simple scripts, hence this wrapper. + + The wrapped function should take at least two arguments: a callback + function and an errback function. If any arguments are passed to + the wrapper, they will be passed as additional arguments to the + wrapped function. + """ + + res = None + err = None + + def __init__(self, func): + self.func = func + + def cb(self, res = None): + """ + Wrapped code has requested normal termination. Store result, and + exit the event loop. + """ + self.res = res + raise ExitNow + + def eb(self, err): + """ + Wrapped code raised an exception. Store exception data, then exit + the event loop. + """ + exc_info = sys.exc_info() + self.err = exc_info if exc_info[1] is err else err + raise ExitNow + + def __call__(self, *args, **kwargs): + + def thunk(): + try: + self.func(self.cb, self.eb, *args, **kwargs) + except ExitNow: + raise + except Exception, e: + self.eb(e) + + event_defer(thunk) + event_loop() + if self.err is None: + return self.res + elif isinstance(self.err, tuple): + raise self.err[0], self.err[1], self.err[2] + else: + raise self.err + +class gc_summary(object): + """ + Periodic summary of GC state, for tracking down memory bloat. + """ + + def __init__(self, interval, threshold = 0): + if isinstance(interval, (int, long)): + interval = rpki.sundial.timedelta(seconds = interval) + self.interval = interval + self.threshold = threshold + self.timer = timer(handler = self.handler) + self.timer.set(self.interval) + + def handler(self): + """ + Collect and log GC state for this period, reset timer. + """ + rpki.log.debug("gc_summary: Running gc.collect()") + gc.collect() + rpki.log.debug("gc_summary: Summarizing (threshold %d)" % self.threshold) + total = {} + tuples = {} + for g in gc.get_objects(): + k = type(g).__name__ + total[k] = total.get(k, 0) + 1 + if isinstance(g, tuple): + k = ", ".join(type(x).__name__ for x in g) + tuples[k] = tuples.get(k, 0) + 1 + rpki.log.debug("gc_summary: Sorting result") + total = total.items() + total.sort(reverse = True, key = lambda x: x[1]) + tuples = tuples.items() + tuples.sort(reverse = True, key = lambda x: x[1]) + rpki.log.debug("gc_summary: Object type counts in descending order") + for name, count in total: + if count > self.threshold: + rpki.log.debug("gc_summary: %8d %s" % (count, name)) + rpki.log.debug("gc_summary: Tuple content type signature counts in descending order") + for types, count in tuples: + if count > self.threshold: + rpki.log.debug("gc_summary: %8d (%s)" % (count, types)) + rpki.log.debug("gc_summary: Scheduling next cycle") + self.timer.set(self.interval) diff --git a/rpki/cli.py b/rpki/cli.py new file mode 100644 index 00000000..1930f2b7 --- /dev/null +++ b/rpki/cli.py @@ -0,0 +1,277 @@ +# $Id$ +# +# Copyright (C) 2013--2014 Dragon Research Labs ("DRL") +# Portions copyright (C) 2010--2012 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notices and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND DRL AND ISC DISCLAIM ALL +# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL OR +# ISC BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL +# DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA +# OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +# TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +Utilities for writing command line tools. +""" + +import cmd +import glob +import shlex +import os.path +import argparse +import traceback + +try: + import readline + have_readline = True +except ImportError: + have_readline = False + +class BadCommandSyntax(Exception): + "Bad command line syntax." + +class ExitArgparse(Exception): + "Exit method from ArgumentParser." + + def __init__(self, message = None, status = 0): + self.message = message + self.status = status + +class Cmd(cmd.Cmd): + """ + Customized subclass of Python cmd module. + """ + + emptyline_repeats_last_command = False + + EOF_exits_command_loop = True + + identchars = cmd.IDENTCHARS + "/-." + + histfile = None + + last_command_failed = False + + def __init__(self, argv = None): + cmd.Cmd.__init__(self) + if argv: + self.onecmd(" ".join(argv)) + else: + self.cmdloop_with_history() + + def onecmd(self, line): + """ + Wrap error handling around cmd.Cmd.onecmd(). Might want to do + something kinder than showing a traceback, eventually. + """ + + self.last_command_failed = False + try: + return cmd.Cmd.onecmd(self, line) + except SystemExit: + raise + except ExitArgparse, e: + if e.message is not None: + print e.message + self.last_command_failed = e.status != 0 + return False + except BadCommandSyntax, e: + print e + except Exception: + traceback.print_exc() + self.last_command_failed = True + return False + + def do_EOF(self, arg): + if self.EOF_exits_command_loop and self.prompt: + print + return self.EOF_exits_command_loop + + def do_exit(self, arg): + """ + Exit program. + """ + + return True + + do_quit = do_exit + + def emptyline(self): + """ + Handle an empty line. cmd module default is to repeat the last + command, which I find to be violation of the principal of least + astonishment, so my preference is that an empty line does nothing. + """ + + if self.emptyline_repeats_last_command: + cmd.Cmd.emptyline(self) + + def filename_complete(self, text, line, begidx, endidx): + """ + Filename completion handler, with hack to restore what I consider + the normal (bash-like) behavior when one hits the completion key + and there's only one match. + """ + + result = glob.glob(text + "*") + if len(result) == 1: + path = result.pop() + if os.path.isdir(path) or (os.path.islink(path) and os.path.isdir(os.path.join(path, "."))): + result.append(path + os.path.sep) + else: + result.append(path + " ") + return result + + def completenames(self, text, *ignored): + """ + Command name completion handler, with hack to restore what I + consider the normal (bash-like) behavior when one hits the + completion key and there's only one match. + """ + + result = cmd.Cmd.completenames(self, text, *ignored) + if len(result) == 1: + result[0] += " " + return result + + def help_help(self): + """ + Type "help [topic]" for help on a command, + or just "help" for a list of commands. + """ + + self.stdout.write(self.help_help.__doc__ + "\n") + + def complete_help(self, *args): + """ + Better completion function for help command arguments. + """ + + text = args[0] + names = self.get_names() + result = [] + for prefix in ("do_", "help_"): + result.extend(s[len(prefix):] for s in names if s.startswith(prefix + text) and s != "do_EOF") + return result + + if have_readline: + + def cmdloop_with_history(self): + """ + Better command loop, with history file and tweaked readline + completion delimiters. + """ + + old_completer_delims = readline.get_completer_delims() + if self.histfile is not None: + try: + readline.read_history_file(self.histfile) + except IOError: + pass + try: + readline.set_completer_delims("".join(set(old_completer_delims) - set(self.identchars))) + self.cmdloop() + finally: + if self.histfile is not None and readline.get_current_history_length(): + readline.write_history_file(self.histfile) + readline.set_completer_delims(old_completer_delims) + + else: + + cmdloop_with_history = cmd.Cmd.cmdloop + + + +def yes_or_no(prompt, default = None, require_full_word = False): + """ + Ask a yes-or-no question. + """ + + prompt = prompt.rstrip() + _yes_or_no_prompts[default] + while True: + answer = raw_input(prompt).strip().lower() + if not answer and default is not None: + return default + if answer == "yes" or (not require_full_word and answer.startswith("y")): + return True + if answer == "no" or (not require_full_word and answer.startswith("n")): + return False + print 'Please answer "yes" or "no"' + +_yes_or_no_prompts = { + True : ' ("yes" or "no" ["yes"]) ', + False : ' ("yes" or "no" ["no"]) ', + None : ' ("yes" or "no") ' } + + +class NonExitingArgumentParser(argparse.ArgumentParser): + """ + ArgumentParser tweaked to throw ExitArgparse exception + rather than using sys.exit(), for use with command loop. + """ + + def exit(self, status = 0, message = None): + raise ExitArgparse(status = status, message = message) + + +def parsecmd(subparsers, *arg_clauses): + """ + Decorator to combine the argparse and cmd modules. + + subparsers is an instance of argparse.ArgumentParser (or subclass) which was + returned by calling the .add_subparsers() method on an ArgumentParser instance + intended to handle parsing for the entire program on the command line. + + arg_clauses is a series of defarg() invocations defining arguments to be parsed + by the argparse code. + + The decorator will use arg_clauses to construct two separate argparse parser + instances: one will be attached to the global parser as a subparser, the + other will be used to parse arguments for this command when invoked by cmd. + + The decorator will replace the original do_whatever method with a wrapped version + which uses the local argparse instance to parse the single string supplied by + the cmd module. + + The intent is that, from the command's point of view, all of this should work + pretty much the same way regardless of whether the command was invoked from + the global command line or from within the cmd command loop. Either way, + the command method should get an argparse.Namespace object. + + In theory, we could generate a completion handler from the argparse definitions, + much as the separate argcomplete package does. In practice this is a lot of + work and I'm not ready to get into that just yet. + """ + + def decorate(func): + assert func.__name__.startswith("do_") + parser = NonExitingArgumentParser(description = func.__doc__, + prog = func.__name__[3:], + add_help = False) + subparser = subparsers.add_parser(func.__name__[3:], + description = func.__doc__, + help = func.__doc__.lstrip().partition("\n")[0]) + for positional, keywords in arg_clauses: + parser.add_argument(*positional, **keywords) + subparser.add_argument(*positional, **keywords) + subparser.set_defaults(func = func) + def wrapped(self, arg): + return func(self, parser.parse_args(shlex.split(arg))) + wrapped.argparser = parser + wrapped.__doc__ = func.__doc__ + return wrapped + return decorate + +def cmdarg(*positional, **keywords): + """ + Syntactic sugar to let us use keyword arguments normally when constructing + arguments for deferred calls to argparse.ArgumentParser.add_argument(). + """ + + return positional, keywords diff --git a/rpki/config.py b/rpki/config.py new file mode 100644 index 00000000..9f26664e --- /dev/null +++ b/rpki/config.py @@ -0,0 +1,301 @@ +# $Id$ +# +# Copyright (C) 2013--2014 Dragon Research Labs ("DRL") +# Portions copyright (C) 2009--2012 Internet Systems Consortium ("ISC") +# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notices and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND ARIN DISCLAIM ALL +# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL, +# ISC, OR ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, +# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION +# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +""" +Configuration file parsing utilities, layered on top of stock Python +ConfigParser module. +""" + +import ConfigParser +import os +import re + +## @var default_filename +# Default name of config file if caller doesn't specify one explictly. + +default_filename = "rpki.conf" + +## @var default_dirname +# Default name of directory to check for global config file, or None +# if no global config file. Autoconf-generated code may set this to a +# non-None value during script startup. + +try: + import rpki.autoconf + default_dirname = rpki.autoconf.sysconfdir +except ImportError: + default_dirname = None + +## @var default_envname +# Name of environment variable containing config file name. + +default_envname = "RPKI_CONF" + +class parser(object): + """ + Extensions to stock Python ConfigParser: + + Read config file and set default section while initializing parser object. + + Support for OpenSSL-style subscripted options and a limited form of + OpenSSL-style indirect variable references (${section::option}). + + get-methods with default values and default section name. + + If no filename is given to the constructor (filename = None), we + check for an environment variable naming the config file, then we + check for a default filename in the current directory, then finally + we check for a global config file if autoconf provided a directory + name to check. + """ + + def __init__(self, filename = None, section = None, allow_missing = False): + + self.cfg = ConfigParser.RawConfigParser() + self.default_section = section + + filenames = [] + if filename is not None: + filenames.append(filename) + else: + if default_envname in os.environ: + filenames.append(os.environ[default_envname]) + filenames.append(default_filename) + if default_dirname is not None: + filenames.append("%s/%s" % (default_dirname, default_filename)) + + f = fn = None + + for fn in filenames: + try: + f = open(fn) + break + except IOError: + f = None + + if f is not None: + self.filename = fn + self.cfg.readfp(f, fn) + elif allow_missing: + self.filename = None + else: + raise + + def has_section(self, section): + """ + Test whether a section exists. + """ + + return self.cfg.has_section(section) + + def has_option(self, option, section = None): + """ + Test whether an option exists. + """ + + if section is None: + section = self.default_section + return self.cfg.has_option(section, option) + + def multiget(self, option, section = None): + """ + Parse OpenSSL-style foo.0, foo.1, ... subscripted options. + + Returns a list of values matching the specified option name. + """ + + matches = [] + if section is None: + section = self.default_section + if self.cfg.has_option(section, option): + matches.append((-1, self.get(option, section = section))) + for key in self.cfg.options(section): + s = key.rsplit(".", 1) + if len(s) == 2 and s[0] == option and s[1].isdigit(): + matches.append((int(s[1]), self.get(option, section = section))) + matches.sort() + return [match[1] for match in matches] + + _regexp = re.compile("\\${(.*?)::(.*?)}") + + def _repl(self, m): + """ + Replacement function for indirect variable substitution. + This is intended for use with re.subn(). + """ + section, option = m.group(1, 2) + if section == "ENV": + return os.getenv(option, "") + else: + return self.cfg.get(section, option) + + def get(self, option, default = None, section = None): + """ + Get an option, perhaps with a default value. + """ + if section is None: + section = self.default_section + if default is not None and not self.cfg.has_option(section, option): + return default + val = self.cfg.get(section, option) + while True: + val, modified = self._regexp.subn(self._repl, val, 1) + if not modified: + return val + + def getboolean(self, option, default = None, section = None): + """ + Get a boolean option, perhaps with a default value. + """ + v = self.get(option, default, section) + if isinstance(v, str): + v = v.lower() + if v not in self.cfg._boolean_states: + raise ValueError, "Not a boolean: %s" % v + v = self.cfg._boolean_states[v] + return v + + def getint(self, option, default = None, section = None): + """ + Get an integer option, perhaps with a default value. + """ + return int(self.get(option, default, section)) + + def getlong(self, option, default = None, section = None): + """ + Get a long integer option, perhaps with a default value. + """ + return long(self.get(option, default, section)) + + def set_global_flags(self): + """ + Consolidated control for all the little global control flags + scattered through the libraries. This isn't a particularly good + place for this function to live, but it has to live somewhere and + making it a method of the config parser from which it gets all of + its data is less silly than the available alternatives. + """ + + import rpki.http + import rpki.x509 + import rpki.sql + import rpki.async + import rpki.log + import rpki.daemonize + + try: + rpki.http.debug_http = self.getboolean("debug_http") + except ConfigParser.NoOptionError: + pass + + try: + rpki.http.want_persistent_client = self.getboolean("want_persistent_client") + except ConfigParser.NoOptionError: + pass + + try: + rpki.http.want_persistent_server = self.getboolean("want_persistent_server") + except ConfigParser.NoOptionError: + pass + + try: + rpki.http.use_adns = self.getboolean("use_adns") + except ConfigParser.NoOptionError: + pass + + try: + rpki.http.enable_ipv6_clients = self.getboolean("enable_ipv6_clients") + except ConfigParser.NoOptionError: + pass + + try: + rpki.http.enable_ipv6_servers = self.getboolean("enable_ipv6_servers") + except ConfigParser.NoOptionError: + pass + + try: + rpki.x509.CMS_object.debug_cms_certs = self.getboolean("debug_cms_certs") + except ConfigParser.NoOptionError: + pass + + try: + rpki.sql.sql_persistent.sql_debug = self.getboolean("sql_debug") + except ConfigParser.NoOptionError: + pass + + try: + rpki.async.timer.gc_debug = self.getboolean("gc_debug") + except ConfigParser.NoOptionError: + pass + + try: + rpki.async.timer.run_debug = self.getboolean("timer_debug") + except ConfigParser.NoOptionError: + pass + + try: + rpki.x509.XML_CMS_object.dump_outbound_cms = rpki.x509.DeadDrop(self.get("dump_outbound_cms")) + except OSError, e: + rpki.log.warn("Couldn't initialize mailbox %s: %s" % (self.get("dump_outbound_cms"), e)) + except ConfigParser.NoOptionError: + pass + + try: + rpki.x509.XML_CMS_object.dump_inbound_cms = rpki.x509.DeadDrop(self.get("dump_inbound_cms")) + except OSError, e: + rpki.log.warn("Couldn't initialize mailbox %s: %s" % (self.get("dump_inbound_cms"), e)) + except ConfigParser.NoOptionError: + pass + + try: + rpki.x509.XML_CMS_object.check_inbound_schema = self.getboolean("check_inbound_schema") + except ConfigParser.NoOptionError: + pass + + try: + rpki.x509.XML_CMS_object.check_outbound_schema = self.getboolean("check_outbound_schema") + except ConfigParser.NoOptionError: + pass + + try: + rpki.async.gc_summary(self.getint("gc_summary"), self.getint("gc_summary_threshold", 0)) + except ConfigParser.NoOptionError: + pass + + try: + rpki.log.enable_tracebacks = self.getboolean("enable_tracebacks") + except ConfigParser.NoOptionError: + pass + + try: + rpki.daemonize.default_pid_directory = self.get("pid_directory") + except ConfigParser.NoOptionError: + pass + + try: + rpki.daemonize.pid_filename = self.get("pid_filename") + except ConfigParser.NoOptionError: + pass + + try: + rpki.x509.generate_insecure_debug_only_rsa_key = rpki.x509.insecure_debug_only_rsa_key_generator(*self.get("insecure-debug-only-rsa-key-db").split()) + except ConfigParser.NoOptionError: + pass + except: + rpki.log.warn("insecure-debug-only-rsa-key-db configured but initialization failed, check for corrupted database file") diff --git a/rpki/csv_utils.py b/rpki/csv_utils.py new file mode 100644 index 00000000..47caabdb --- /dev/null +++ b/rpki/csv_utils.py @@ -0,0 +1,112 @@ +# $Id$ +# +# Copyright (C) 2009--2012 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +CSV utilities, moved here from myrpki.py. +""" + +import csv +import os + +class BadCSVSyntax(Exception): + """ + Bad CSV syntax. + """ + +class csv_reader(object): + """ + Reader for tab-delimited text that's (slightly) friendlier than the + stock Python csv module (which isn't intended for direct use by + humans anyway, and neither was this package originally, but that + seems to be the way that it has evolved...). + + Columns parameter specifies how many columns users of the reader + expect to see; lines with fewer columns will be padded with None + values. + + Original API design for this class courtesy of Warren Kumari, but + don't blame him if you don't like what I did with his ideas. + """ + + def __init__(self, filename, columns = None, min_columns = None, comment_characters = "#;"): + assert columns is None or isinstance(columns, int) + assert min_columns is None or isinstance(min_columns, int) + if columns is not None and min_columns is None: + min_columns = columns + self.filename = filename + self.columns = columns + self.min_columns = min_columns + self.comment_characters = comment_characters + self.file = open(filename, "r") + + def __iter__(self): + line_number = 0 + for line in self.file: + line_number += 1 + line = line.strip() + if not line or line[0] in self.comment_characters: + continue + fields = line.split() + if self.min_columns is not None and len(fields) < self.min_columns: + raise BadCSVSyntax, "%s:%d: Not enough columns in line %r" % (self.filename, line_number, line) + if self.columns is not None and len(fields) > self.columns: + raise BadCSVSyntax, "%s:%d: Too many columns in line %r" % (self.filename, line_number, line) + if self.columns is not None and len(fields) < self.columns: + fields += tuple(None for i in xrange(self.columns - len(fields))) + yield fields + + def __enter__(self): + return self + + def __exit__(self, _type, value, traceback): + self.file.close() + +class csv_writer(object): + """ + Writer object for tab delimited text. We just use the stock CSV + module in excel-tab mode for this. + + If "renmwo" is set (default), the file will be written to + a temporary name and renamed to the real filename after closing. + """ + + def __init__(self, filename, renmwo = True): + self.filename = filename + self.renmwo = "%s.~renmwo%d~" % (filename, os.getpid()) if renmwo else filename + self.file = open(self.renmwo, "w") + self.writer = csv.writer(self.file, dialect = csv.get_dialect("excel-tab")) + + def __enter__(self): + return self + + def __exit__(self, _type, value, traceback): + self.close() + + def close(self): + """ + Close this writer. + """ + if self.file is not None: + self.file.close() + self.file = None + if self.filename != self.renmwo: + os.rename(self.renmwo, self.filename) + + def __getattr__(self, attr): + """ + Fake inheritance from whatever object csv.writer deigns to give us. + """ + return getattr(self.writer, attr) diff --git a/rpki/daemonize.py b/rpki/daemonize.py new file mode 100644 index 00000000..62b4ee4e --- /dev/null +++ b/rpki/daemonize.py @@ -0,0 +1,133 @@ +# $Id$ +# +# Copyright (C) 2012 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. +# +# Some code borrowed from +# http://www.jejik.com/articles/2007/02/a_simple_unix_linux_daemon_in_python/ +# +# (which was explicitly placed in public domain by its author), and from +# +# /usr/src/lib/libc/gen/daemon.c +# +# (the libc implementation of daemon(3) on FreeBSD), so: +# +# Portions copyright (c) 1990, 1993 +# The Regents of the University of California. All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# 4. Neither the name of the University nor the names of its contributors +# may be used to endorse or promote products derived from this software +# without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND +# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS +# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY +# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF +# SUCH DAMAGE. + +""" +Make a normal program into a "daemon", like the 4.4BSD daemon(3) call. + +This doesn't quite follow either the 4.4BSD call or the Python 3.x library, +because it was written to fit into an existing package and I didn't +want to drag in yet another external library just for this. +""" + +import sys +import os +import atexit +import signal +import rpki.log + +# Does default_pid_directory need to be autoconf-configurable? + +## @var default_pid_directory +# Default directory to which to write process ID files. + +default_pid_directory = "/var/run/rpki" + +## @var pid_filename +# Configurable filename to which to write process ID file. +# pidfile argument to daemon() overrides this. + +pid_filename = None + +def daemon(nochdir = False, noclose = False, pidfile = None): + """ + Make this program become a daemon, like 4.4BSD daemon(3), and + write its pid out to a file with cleanup on exit. + """ + + if pidfile is None: + if pid_filename is None: + prog = os.path.splitext(os.path.basename(sys.argv[0]))[0] + pidfile = os.path.join(default_pid_directory, "%s.pid" % prog) + else: + pidfile = pid_filename + + old_sighup_action = signal.signal(signal.SIGHUP, signal.SIG_IGN) + + try: + pid = os.fork() + except OSError, e: + sys.exit("fork() failed: %d (%s)" % (e.errno, e.strerror)) + else: + if pid > 0: + os._exit(0) + + if not nochdir: + os.chdir("/") + + os.setsid() + + if not noclose: + sys.stdout.flush() + sys.stderr.flush() + fd = os.open(os.devnull, os.O_RDWR) + os.dup2(fd, 0) + os.dup2(fd, 1) + os.dup2(fd, 2) + if fd > 2: + os.close(fd) + + signal.signal(signal.SIGHUP, old_sighup_action) + + def delete_pid_file(): + try: + os.unlink(pidfile) + except OSError: + pass + + atexit.register(delete_pid_file) + + try: + f = open(pidfile, "w") + f.write("%d\n" % os.getpid()) + f.close() + except IOError, e: + rpki.log.warn("Couldn't write PID file %s: %s" % (pidfile, e.strerror)) diff --git a/rpki/exceptions.py b/rpki/exceptions.py new file mode 100644 index 00000000..d8d3774e --- /dev/null +++ b/rpki/exceptions.py @@ -0,0 +1,367 @@ +# $Id$ +# +# Copyright (C) 2014 Dragon Research Labs ("DRL") +# Portions copyright (C) 2009--2013 Internet Systems Consortium ("ISC") +# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notices and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND ARIN DISCLAIM ALL +# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL, +# ISC, OR ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, +# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION +# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +""" +Exception definitions for RPKI modules. +""" + +class RPKI_Exception(Exception): + """ + Base class for RPKI exceptions. + """ + +class NotInDatabase(RPKI_Exception): + """ + Lookup failed for an object expected to be in the database. + """ + +class BadURISyntax(RPKI_Exception): + """ + Illegal syntax for a URI. + """ + +class BadStatusCode(RPKI_Exception): + """ + Unrecognized protocol status code. + """ + +class BadQuery(RPKI_Exception): + """ + Unexpected protocol query. + """ + +class DBConsistancyError(RPKI_Exception): + """ + Found multiple matches for a database query that shouldn't ever + return that. + """ + +class CMSVerificationFailed(RPKI_Exception): + """ + Verification of a CMS message failed. + """ + +class HTTPRequestFailed(RPKI_Exception): + """ + HTTP request failed. + """ + +class DERObjectConversionError(RPKI_Exception): + """ + Error trying to convert a DER-based object from one representation + to another. + """ + +class NotACertificateChain(RPKI_Exception): + """ + Certificates don't form a proper chain. + """ + +class BadContactURL(RPKI_Exception): + """ + Error trying to parse contact URL. + """ + +class BadClassNameSyntax(RPKI_Exception): + """ + Illegal syntax for a class_name. + """ + +class BadIssueResponse(RPKI_Exception): + """ + issue_response PDU with wrong number of classes or certificates. + """ + +class NotImplementedYet(RPKI_Exception): + """ + Internal error -- not implemented yet. + """ + +class BadPKCS10(RPKI_Exception): + """ + Bad PKCS #10 object. + """ + +class UpstreamError(RPKI_Exception): + """ + Received an error from upstream. + """ + +class ChildNotFound(RPKI_Exception): + """ + Could not find specified child in database. + """ + +class BSCNotFound(RPKI_Exception): + """ + Could not find specified BSC in database. + """ + +class BadSender(RPKI_Exception): + """ + Unexpected XML sender value. + """ + +class ClassNameMismatch(RPKI_Exception): + """ + class_name does not match child context. + """ + +class ClassNameUnknown(RPKI_Exception): + """ + Unknown class_name. + """ + +class SKIMismatch(RPKI_Exception): + """ + SKI value in response does not match request. + """ + +class SubprocessError(RPKI_Exception): + """ + Subprocess returned unexpected error. + """ + +class BadIRDBReply(RPKI_Exception): + """ + Unexpected reply to IRDB query. + """ + +class NotFound(RPKI_Exception): + """ + Object not found in database. + """ + +class MustBePrefix(RPKI_Exception): + """ + Resource range cannot be expressed as a prefix. + """ + +class TLSValidationError(RPKI_Exception): + """ + TLS certificate validation error. + """ + +class MultipleTLSEECert(TLSValidationError): + """ + Received more than one TLS EE certificate. + """ + +class ReceivedTLSCACert(TLSValidationError): + """ + Received CA certificate via TLS. + """ + +class WrongEContentType(RPKI_Exception): + """ + Received wrong CMS eContentType. + """ + +class EmptyPEM(RPKI_Exception): + """ + Couldn't find PEM block to convert. + """ + +class UnexpectedCMSCerts(RPKI_Exception): + """ + Received CMS certs when not expecting any. + """ + +class UnexpectedCMSCRLs(RPKI_Exception): + """ + Received CMS CRLs when not expecting any. + """ + +class MissingCMSEEcert(RPKI_Exception): + """ + Didn't receive CMS EE cert when expecting one. + """ + +class MissingCMSCRL(RPKI_Exception): + """ + Didn't receive CMS CRL when expecting one. + """ + +class UnparsableCMSDER(RPKI_Exception): + """ + Alleged CMS DER wasn't parsable. + """ + +class CMSCRLNotSet(RPKI_Exception): + """ + CMS CRL has not been configured. + """ + +class ServerShuttingDown(RPKI_Exception): + """ + Server is shutting down. + """ + +class NoActiveCA(RPKI_Exception): + """ + No active ca_detail for specified class. + """ + +class BadClientURL(RPKI_Exception): + """ + URL given to HTTP client does not match profile. + """ + +class ClientNotFound(RPKI_Exception): + """ + Could not find specified client in database. + """ + +class BadExtension(RPKI_Exception): + """ + Forbidden X.509 extension. + """ + +class ForbiddenURI(RPKI_Exception): + """ + Forbidden URI, does not start with correct base URI. + """ + +class HTTPClientAborted(RPKI_Exception): + """ + HTTP client connection closed while in request-sent state. + """ + +class BadPublicationReply(RPKI_Exception): + """ + Unexpected reply to publication query. + """ + +class DuplicateObject(RPKI_Exception): + """ + Attempt to create an object that already exists. + """ + +class EmptyROAPrefixList(RPKI_Exception): + """ + Can't create ROA with an empty prefix list. + """ + +class NoCoveringCertForROA(RPKI_Exception): + """ + Couldn't find a covering certificate to generate ROA. + """ + +class BSCNotReady(RPKI_Exception): + """ + BSC not yet in a usable state, signing_cert not set. + """ + +class HTTPUnexpectedState(RPKI_Exception): + """ + HTTP event occurred in an unexpected state. + """ + +class HTTPBadVersion(RPKI_Exception): + """ + HTTP couldn't parse HTTP version. + """ + +class HandleTranslationError(RPKI_Exception): + """ + Internal error translating protocol handle -> SQL id. + """ + +class NoObjectAtURI(RPKI_Exception): + """ + No object published at specified URI. + """ + +class CMSContentNotSet(RPKI_Exception): + """ + Inner content of a CMS_object has not been set. If object is known + to be valid, the .extract() method should be able to set the + content; otherwise, only the .verify() method (which checks + signatures) is safe. + """ + +class HTTPTimeout(RPKI_Exception): + """ + HTTP connection timed out. + """ + +class BadIPResource(RPKI_Exception): + """ + Parse failure for alleged IP resource string. + """ + +class BadROAPrefix(RPKI_Exception): + """ + Parse failure for alleged ROA prefix string. + """ + +class CommandParseFailure(RPKI_Exception): + """ + Failed to parse command line. + """ + +class CMSCertHasExpired(RPKI_Exception): + """ + CMS certificate has expired. + """ + +class TrustedCMSCertHasExpired(RPKI_Exception): + """ + Trusted CMS certificate has expired. + """ + +class MultipleCMSEECert(RPKI_Exception): + """ + Can't have more than one CMS EE certificate in validation chain. + """ + +class ResourceOverlap(RPKI_Exception): + """ + Overlapping resources in resource_set. + """ + +class CMSReplay(RPKI_Exception): + """ + Possible CMS replay attack detected. + """ + +class PastNotAfter(RPKI_Exception): + """ + Requested notAfter value is already in the past. + """ + +class NullValidityInterval(RPKI_Exception): + """ + Requested validity interval is null. + """ + +class BadX510DN(RPKI_Exception): + """ + X.510 distinguished name does not match profile. + """ + +class BadAutonomousSystemNumber(RPKI_Exception): + """ + Bad AutonomousSystem number. + """ + +class WrongEKU(RPKI_Exception): + """ + Extended Key Usage extension does not match profile. + """ diff --git a/rpki/gui/__init__.py b/rpki/gui/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/rpki/gui/api/__init__.py b/rpki/gui/api/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/rpki/gui/api/urls.py b/rpki/gui/api/urls.py new file mode 100644 index 00000000..8c9d824c --- /dev/null +++ b/rpki/gui/api/urls.py @@ -0,0 +1,22 @@ +# Copyright (C) 2012 SPARTA, Inc. a Parsons Company +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +__version__ = '$Id$' + +from django.conf.urls.defaults import * +from rpki.gui.routeview.api import route_list + +urlpatterns = patterns('', + (r'^v1/route/$', route_list), +) diff --git a/rpki/gui/app/TODO b/rpki/gui/app/TODO new file mode 100644 index 00000000..b7136397 --- /dev/null +++ b/rpki/gui/app/TODO @@ -0,0 +1,60 @@ +Use RequestContext (helper function for render_to_response) and a default +list of context processors for the generic functions + +Teach cert_delete about children, conf*, parent* to say what the ramifications +of deleting a cert are. + +Teach cert form about file upload + +Redirect /accounts/profile/ to /dashboard/ + +Teach dashboard view about looking up resources from parent. +There are 3 types of resources: +- Ones we've accepted and match +- Ones we've accepted but don't match + - two subtypes: + * the parent is now giving us a superset of what they used to. + This is relatively easily handled by keeping the subdivisions + we've made and just making the superset resource the new parent + of the existing resource (e.g., we had accepted 18.5.0.0/16 and + they're now giving us 18.0.0.0/8) + * the parent is now giving us a subset (including none) of what they + used to. Two sub-cases: + - The part that they took away is neither delegated nor roa'd. + - The part that they took away is either delegated or roa'd or both. +- Ones we haven't accepted yet + +The roa needs to learn to handle its prefix children. It may need to +create the covering set of prefixes for an address range. + +Un'd resources are: +what we've gotten from our parent: +models.AddressRange.objects.filter(from_parent=myconf.pk) +minus what we've given to our children or issued roas for +models.AddressRange.objects.filter(child__conf=myconf.pk) +models.AddressRange.objects.filter(roa__conf=myconf.pk) +or +>>> from django.db.models import Q +>>> models.AddressRange.objects.filter( Q(child__conf=myconf.pk) | + Q(roa__conf=myconf.pk) ) + + +and of course the ASN one is easier: +models.Asn.objects.filter(from_parent=myconf.pk) +minus what we've given to our children +models.Asn.objects.filter(child__conf=myconf.pk) + +look in +rpki/resource_set.py + + +Adding a handle / resource-holding entity / "conf": +- upload the that we've generated and are sending to the parent + +Adding a parent: +- upload the that he sent me + (keep things open to the parent uploading this directly to the web interface) + +Adding a child: +- upload the that he sent me + diff --git a/rpki/gui/app/__init__.py b/rpki/gui/app/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/rpki/gui/app/admin.py b/rpki/gui/app/admin.py new file mode 100644 index 00000000..e69de29b diff --git a/rpki/gui/app/check_expired.py b/rpki/gui/app/check_expired.py new file mode 100644 index 00000000..fcf5ecae --- /dev/null +++ b/rpki/gui/app/check_expired.py @@ -0,0 +1,209 @@ +# Copyright (C) 2012, 2013 SPARTA, Inc. a Parsons Company +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +__version__ = '$Id$' +__all__ = ('notify_expired', 'NetworkError') + +import sys +import socket +from cStringIO import StringIO +import logging +import datetime + +from rpki.gui.cacheview.models import Cert +from rpki.gui.app.models import Conf, ResourceCert, Timestamp, Alert +from rpki.gui.app.glue import list_received_resources +from rpki.irdb import Zookeeper +from rpki.left_right import report_error_elt, list_published_objects_elt +from rpki.x509 import X509 + +from django.core.mail import send_mail + +logger = logging.getLogger(__name__) +expire_time = 0 # set by notify_expired() +now = 0 + + +def check_cert(handle, p, errs): + """Check the expiration date on the X.509 certificates in each element of + the list. + + The displayed object name defaults to the class name, but can be overridden + using the `object_name` argument. + + """ + t = p.certificate.getNotAfter() + if t <= expire_time: + e = 'expired' if t <= now else 'will expire' + errs.write("%(handle)s's %(type)s %(desc)s %(expire)s on %(date)s\n" % { + 'handle': handle, 'type': p.__class__.__name__, 'desc': str(p), + 'expire': e, 'date': t}) + + +def check_cert_list(handle, x, errs): + for p in x: + check_cert(handle, p, errs) + + +def check_expire(conf, errs): + # get certs for `handle' + cert_set = ResourceCert.objects.filter(conf=conf) + for cert in cert_set: + # look up cert in cacheview db + obj_set = Cert.objects.filter(repo__uri=cert.uri) + if not obj_set: + # since the output is cached, this can + # occur if the cache is out of date as well.. + errs.write("Unable to locate rescert in rcynic cache: handle=%s uri=%s not_after=%s\n" % (conf.handle, cert.uri, cert.not_after)) + continue + obj = obj_set[0] + msg = [] + expired = False + for n, c in enumerate(obj.cert_chain): + if c.not_after <= expire_time: + expired = True + f = '*' + else: + f = ' ' + msg.append("%s [%d] uri=%s ski=%s name=%s expires=%s" % (f, n, c.repo.uri, c.keyid, c.name, c.not_after)) + + # find ghostbuster records attached to this cert + for gbr in c.ghostbusters.all(): + info = [] + for s in ('full_name', 'organization', 'email_address', 'telephone'): + t = getattr(gbr, s, None) + if t: + info.append(t) + + msg.append(" Contact: " + ", ".join(info)) + + if expired: + errs.write("%s's rescert from parent %s will expire soon:\n" % ( + conf.handle, + # parent is None for the root cert + cert.parent.handle if cert.parent else 'self' + )) + errs.write("Certificate chain:\n") + errs.write("\n".join(msg)) + errs.write("\n") + + +def check_child_certs(conf, errs): + """Fetch the list of published objects from rpkid, and inspect the issued + resource certs (uri ending in .cer). + + """ + z = Zookeeper(handle=conf.handle) + req = list_published_objects_elt.make_pdu(action="list", + tag="list_published_objects", + self_handle=conf.handle) + pdus = z.call_rpkid(req) + for pdu in pdus: + if isinstance(pdu, report_error_elt): + logger.error("rpkid reported an error: %s" % pdu.error_code) + elif isinstance(pdu, list_published_objects_elt): + if pdu.uri.endswith('.cer'): + cert = X509() + cert.set(Base64=pdu.obj) + t = cert.getNotAfter() + if t <= expire_time: + e = 'expired' if t <= now else 'will expire' + errs.write("%(handle)s's rescert for Child %(child)s %(expire)s on %(date)s uri=%(uri)s subject=%(subject)s\n" % { + 'handle': conf.handle, + 'child': pdu.child_handle, + 'uri': pdu.uri, + 'subject': cert.getSubject(), + 'expire': e, + 'date': t}) + + +class NetworkError(Exception): + pass + + +def notify_expired(expire_days=14, from_email=None): + """Send email notificates about impending expirations of resource + and BPKI certificates. + + expire_days: the number of days ahead of today to warn + + from_email: set the From: address for the email + + """ + global expire_time # so i don't have to pass it around + global now + + now = datetime.datetime.utcnow() + expire_time = now + datetime.timedelta(expire_days) + + # this is not exactly right, since we have no way of knowing what the + # vhost for the web portal running on this machine is + host = socket.getfqdn() + if not from_email: + from_email = 'root@' + host + + # Ensure that the rcynic and routeviews data has been updated recently + # The QuerySet is created here so that it will be cached and reused on each + # iteration of the loop below + t = now - datetime.timedelta(hours=12) # 12 hours + stale_timestamps = Timestamp.objects.filter(ts__lte=t) + + # if not arguments are given, query all resource holders + qs = Conf.objects.all() + + # check expiration of certs for all handles managed by the web portal + for h in qs: + # Force cache update since several checks require fresh data + try: + list_received_resources(sys.stdout, h) + except socket.error as e: + raise NetworkError('Error while talking to rpkid: %s' % e) + + errs = StringIO() + + # Warn the resource holder admins when data may be out of date + if stale_timestamps: + errs.write('Warning! Stale data from external sources.\n') + errs.write('data source : last import\n') + for obj in stale_timestamps: + errs.write('%-15s: %s\n' % (obj.name, obj.ts)) + errs.write('\n') + + check_cert(h.handle, h, errs) + + # HostedCA is the ResourceHolderCA cross certified under ServerCA, so + # check the ServerCA expiration date as well + check_cert(h.handle, h.hosted_by, errs) + check_cert(h.handle, h.hosted_by.issuer, errs) + + check_cert_list(h.handle, h.bscs.all(), errs) + check_cert_list(h.handle, h.parents.all(), errs) + check_cert_list(h.handle, h.children.all(), errs) + check_cert_list(h.handle, h.repositories.all(), errs) + + check_expire(h, errs) + check_child_certs(h, errs) + + # if there was output, display it now + s = errs.getvalue() + if s: + logger.info(s) + + t = """This is an automated notice about the upcoming expiration of RPKI resources for the handle %s on %s. You are receiving this notification because your email address is either registered in a Ghostbuster record, or as the default email address for the account.\n\n""" % (h.handle, host) + h.send_alert( + subject='RPKI expiration notice for %s' % h.handle, + message=t + s, + from_email=from_email, + severity=Alert.WARNING + ) diff --git a/rpki/gui/app/forms.py b/rpki/gui/app/forms.py new file mode 100644 index 00000000..20ce4a07 --- /dev/null +++ b/rpki/gui/app/forms.py @@ -0,0 +1,442 @@ +# Copyright (C) 2010, 2011 SPARTA, Inc. dba Cobham Analytic Solutions +# Copyright (C) 2012 SPARTA, Inc. a Parsons Company +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +__version__ = '$Id$' + + +from django.contrib.auth.models import User +from django import forms +from rpki.resource_set import (resource_range_as, resource_range_ip) +from rpki.gui.app import models +from rpki.exceptions import BadIPResource +from rpki.POW import IPAddress + + +class AddConfForm(forms.Form): + handle = forms.CharField(required=True, + help_text='your handle for your rpki instance') + run_rpkid = forms.BooleanField(required=False, initial=True, + label='Run rpkid?', + help_text='do you want to run your own instance of rpkid?') + rpkid_server_host = forms.CharField(initial='rpkid.example.org', + label='rpkid hostname', + help_text='publicly visible hostname for your rpkid instance') + rpkid_server_port = forms.IntegerField(initial=4404, + label='rpkid port') + run_pubd = forms.BooleanField(required=False, initial=False, + label='Run pubd?', + help_text='do you want to run your own instance of pubd?') + pubd_server_host = forms.CharField(initial='pubd.example.org', + label='pubd hostname', + help_text='publicly visible hostname for your pubd instance') + pubd_server_port = forms.IntegerField(initial=4402, label='pubd port') + pubd_contact_info = forms.CharField(initial='repo-man@rpki.example.org', + label='Pubd contact', + help_text='email address for the operator of your pubd instance') + + +class GhostbusterRequestForm(forms.ModelForm): + """ + Generate a ModelForm with the subset of parents for the current + resource handle. + """ + # override default form field + parent = forms.ModelChoiceField(queryset=None, required=False, + help_text='Specify specific parent, or none for all parents') + + #override + issuer = forms.ModelChoiceField(queryset=None, widget=forms.HiddenInput) + + def __init__(self, *args, **kwargs): + conf = kwargs.pop('conf') + # override initial value for conf in case user tries to alter it + initial = kwargs.setdefault('initial', {}) + initial['issuer'] = conf + super(GhostbusterRequestForm, self).__init__(*args, **kwargs) + self.fields['parent'].queryset = conf.parents.all() + self.fields['issuer'].queryset = models.Conf.objects.filter(pk=conf.pk) + + class Meta: + model = models.GhostbusterRequest + exclude = ('vcard', 'given_name', 'family_name', 'additional_name', + 'honorific_prefix', 'honorific_suffix') + + def clean(self): + email = self.cleaned_data.get('email_address') + postal = self.cleaned_data.get('postal_address') + telephone = self.cleaned_data.get('telephone') + if not any([email, postal, telephone]): + raise forms.ValidationError( + 'One of telephone, email or postal address must be specified') + + return self.cleaned_data + + +class ImportForm(forms.Form): + """Form used for uploading parent/child identity xml files.""" + handle = forms.CharField(required=False, + widget=forms.TextInput(attrs={'class': 'xlarge'}), + help_text='Optional. Your name for this entity, or blank to accept name in XML') + xml = forms.FileField(label='XML file') + + +class ImportRepositoryForm(forms.Form): + handle = forms.CharField(max_length=30, required=False, + label='Parent Handle', + help_text='Optional. Must be specified if you use a different name for this parent') + xml = forms.FileField(label='XML file') + + +class ImportClientForm(forms.Form): + """Form used for importing publication client requests.""" + xml = forms.FileField(label='XML file') + + +class ImportCSVForm(forms.Form): + csv = forms.FileField(label='CSV file') + + +class UserCreateForm(forms.Form): + username = forms.CharField(max_length=30) + email = forms.CharField(max_length=30, + help_text='email address for new user') + password = forms.CharField(widget=forms.PasswordInput) + password2 = forms.CharField(widget=forms.PasswordInput, + label='Confirm Password') + resource_holders = forms.ModelMultipleChoiceField( + queryset=models.Conf.objects.all(), + help_text='allowed to manage these resource holders' + + ) + + def clean_username(self): + username = self.cleaned_data.get('username') + if User.objects.filter(username=username).exists(): + raise forms.ValidationError('user already exists') + return username + + def clean(self): + p1 = self.cleaned_data.get('password') + p2 = self.cleaned_data.get('password2') + if p1 != p2: + raise forms.ValidationError('passwords do not match') + return self.cleaned_data + + +class UserEditForm(forms.Form): + """Form for editing a user.""" + email = forms.CharField() + pw = forms.CharField(widget=forms.PasswordInput, label='Password', + required=False) + pw2 = forms.CharField(widget=forms.PasswordInput, label='Confirm password', + required=False) + resource_holders = forms.ModelMultipleChoiceField( + queryset=models.Conf.objects.all(), + help_text='allowed to manage these resource holders' + ) + + def clean(self): + p1 = self.cleaned_data.get('pw') + p2 = self.cleaned_data.get('pw2') + if p1 != p2: + raise forms.ValidationError('Passwords do not match') + return self.cleaned_data + + +class ROARequest(forms.Form): + """Form for entering a ROA request. + + Handles both IPv4 and IPv6.""" + + prefix = forms.CharField( + widget=forms.TextInput(attrs={ + 'autofocus': 'true', 'placeholder': 'Prefix', + 'class': 'span4' + }) + ) + max_prefixlen = forms.CharField( + required=False, + widget=forms.TextInput(attrs={ + 'placeholder': 'Max len', + 'class': 'span1' + }) + ) + asn = forms.IntegerField( + widget=forms.TextInput(attrs={ + 'placeholder': 'ASN', + 'class': 'span1' + }) + ) + confirmed = forms.BooleanField(widget=forms.HiddenInput, required=False) + + def __init__(self, *args, **kwargs): + """Takes an optional `conf` keyword argument specifying the user that + is creating the ROAs. It is used for validating that the prefix the + user entered is currently allocated to that user. + + """ + conf = kwargs.pop('conf', None) + kwargs['auto_id'] = False + super(ROARequest, self).__init__(*args, **kwargs) + self.conf = conf + self.inline = True + self.use_table = False + + def _as_resource_range(self): + """Convert the prefix in the form to a + rpki.resource_set.resource_range_ip object. + + If there is no mask provided, assume the closest classful mask. + + """ + prefix = self.cleaned_data.get('prefix') + if '/' not in prefix: + p = IPAddress(prefix) + + # determine the first nonzero bit starting from the lsb and + # subtract from the address size to find the closest classful + # mask that contains this single address + prefixlen = 0 + while (p != 0) and (p & 1) == 0: + prefixlen = prefixlen + 1 + p = p >> 1 + mask = p.bits - (8 * (prefixlen / 8)) + prefix = prefix + '/' + str(mask) + + return resource_range_ip.parse_str(prefix) + + def clean_asn(self): + value = self.cleaned_data.get('asn') + if value < 0: + raise forms.ValidationError('AS must be a positive value or 0') + return value + + def clean_prefix(self): + try: + r = self._as_resource_range() + except: + raise forms.ValidationError('invalid prefix') + + manager = models.ResourceRangeAddressV4 if r.version == 4 else models.ResourceRangeAddressV6 + if not manager.objects.filter(cert__conf=self.conf, + prefix_min__lte=r.min, + prefix_max__gte=r.max).exists(): + raise forms.ValidationError('prefix is not allocated to you') + return str(r) + + def clean_max_prefixlen(self): + v = self.cleaned_data.get('max_prefixlen') + if v: + if v[0] == '/': + v = v[1:] # allow user to specify /24 + try: + if int(v) < 0: + raise forms.ValidationError('max prefix length must be positive or 0') + except ValueError: + raise forms.ValidationError('invalid integer value') + return v + + def clean(self): + if 'prefix' in self.cleaned_data: + r = self._as_resource_range() + max_prefixlen = self.cleaned_data.get('max_prefixlen') + max_prefixlen = int(max_prefixlen) if max_prefixlen else r.prefixlen() + if max_prefixlen < r.prefixlen(): + raise forms.ValidationError( + 'max prefix length must be greater than or equal to the prefix length') + if max_prefixlen > r.min.bits: + raise forms.ValidationError, \ + 'max prefix length (%d) is out of range for IP version (%d)' % (max_prefixlen, r.min.bits) + self.cleaned_data['max_prefixlen'] = str(max_prefixlen) + return self.cleaned_data + + +class ROARequestConfirm(forms.Form): + asn = forms.IntegerField(widget=forms.HiddenInput) + prefix = forms.CharField(widget=forms.HiddenInput) + max_prefixlen = forms.IntegerField(widget=forms.HiddenInput) + + def clean_asn(self): + value = self.cleaned_data.get('asn') + if value < 0: + raise forms.ValidationError('AS must be a positive value or 0') + return value + + def clean_prefix(self): + try: + r = resource_range_ip.parse_str(self.cleaned_data.get('prefix')) + except BadIPResource: + raise forms.ValidationError('invalid prefix') + return str(r) + + def clean(self): + try: + r = resource_range_ip.parse_str(self.cleaned_data.get('prefix')) + if r.prefixlen() > self.cleaned_data.get('max_prefixlen'): + raise forms.ValidationError('max length is smaller than mask') + except BadIPResource: + pass + return self.cleaned_data + + +class AddASNForm(forms.Form): + """ + Returns a forms.Form subclass which verifies that the entered ASN range + does not overlap with a previous allocation to the specified child, and + that the ASN range is within the range allocated to the parent. + + """ + + asns = forms.CharField( + label='ASNs', + help_text='single ASN or range', + widget=forms.TextInput(attrs={'autofocus': 'true'}) + ) + + def __init__(self, *args, **kwargs): + self.child = kwargs.pop('child') + super(AddASNForm, self).__init__(*args, **kwargs) + + def clean_asns(self): + try: + r = resource_range_as.parse_str(self.cleaned_data.get('asns')) + except: + raise forms.ValidationError('invalid AS or range') + + if not models.ResourceRangeAS.objects.filter( + cert__conf=self.child.issuer, + min__lte=r.min, + max__gte=r.max).exists(): + raise forms.ValidationError('AS or range is not delegated to you') + + # determine if the entered range overlaps with any AS already + # allocated to this child + if self.child.asns.filter(end_as__gte=r.min, start_as__lte=r.max).exists(): + raise forms.ValidationError( + 'Overlap with previous allocation to this child') + + return str(r) + + +class AddNetForm(forms.Form): + """ + Returns a forms.Form subclass which validates that the entered address + range is within the resources allocated to the parent, and does not overlap + with what is already allocated to the specified child. + + """ + address_range = forms.CharField( + help_text='CIDR or range', + widget=forms.TextInput(attrs={'autofocus': 'true'}) + ) + + def __init__(self, *args, **kwargs): + self.child = kwargs.pop('child') + super(AddNetForm, self).__init__(*args, **kwargs) + + def clean_address_range(self): + address_range = self.cleaned_data.get('address_range') + try: + r = resource_range_ip.parse_str(address_range) + if r.version == 6: + qs = models.ResourceRangeAddressV6 + version = 'IPv6' + else: + qs = models.ResourceRangeAddressV4 + version = 'IPv4' + except BadIPResource: + raise forms.ValidationError('invalid IP address range') + + if not qs.objects.filter(cert__conf=self.child.issuer, + prefix_min__lte=r.min, + prefix_max__gte=r.max).exists(): + raise forms.ValidationError( + 'IP address range is not delegated to you') + + # determine if the entered range overlaps with any prefix + # already allocated to this child + for n in self.child.address_ranges.filter(version=version): + rng = n.as_resource_range() + if r.max >= rng.min and r.min <= rng.max: + raise forms.ValidationError( + 'Overlap with previous allocation to this child') + + return str(r) + + +def ChildForm(instance): + """ + Form for editing a Child model. + + This is roughly based on the equivalent ModelForm, but uses Form as a base + class so that selection boxes for the AS and Prefixes can be edited in a + single form. + + """ + + class _wrapped(forms.Form): + valid_until = forms.DateTimeField(initial=instance.valid_until) + as_ranges = forms.ModelMultipleChoiceField(queryset=models.ChildASN.objects.filter(child=instance), + required=False, + label='AS Ranges', + help_text='deselect to remove delegation') + address_ranges = forms.ModelMultipleChoiceField(queryset=models.ChildNet.objects.filter(child=instance), + required=False, + help_text='deselect to remove delegation') + + return _wrapped + + +class Empty(forms.Form): + """Stub form for views requiring confirmation.""" + pass + + +class ResourceHolderForm(forms.Form): + """form for editing ACL on Conf objects.""" + users = forms.ModelMultipleChoiceField( + queryset=User.objects.all(), + help_text='users allowed to mange this resource holder' + ) + + +class ResourceHolderCreateForm(forms.Form): + """form for creating new resource holdres.""" + handle = forms.CharField(max_length=30) + parent = forms.ModelChoiceField( + required=False, + queryset=models.Conf.objects.all(), + help_text='optionally make the new resource holder a child of this resource holder' + ) + users = forms.ModelMultipleChoiceField( + required=False, + queryset=User.objects.all(), + help_text='users allowed to mange this resource holder' + ) + + def clean_handle(self): + handle = self.cleaned_data.get('handle') + if models.Conf.objects.filter(handle=handle).exists(): + raise forms.ValidationError( + 'a resource holder with that handle already exists' + ) + return handle + + def clean(self): + handle = self.cleaned_data.get('handle') + parent = self.cleaned_data.get('parent') + if handle and parent and parent.children.filter(handle=handle).exists(): + raise forms.ValidationError('parent already has a child by that name') + return self.cleaned_data diff --git a/rpki/gui/app/glue.py b/rpki/gui/app/glue.py new file mode 100644 index 00000000..a9f6441e --- /dev/null +++ b/rpki/gui/app/glue.py @@ -0,0 +1,132 @@ +# Copyright (C) 2010, 2011 SPARTA, Inc. dba Cobham Analytic Solutions +# Copyright (C) 2012 SPARTA, Inc. a Parsons Company +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +This file contains code that interfaces between the django views implementing +the portal gui and the rpki.* modules. + +""" + +from __future__ import with_statement + +__version__ = '$Id$' + +from datetime import datetime + +from rpki.resource_set import (resource_set_as, resource_set_ipv4, + resource_set_ipv6, resource_range_ipv4, + resource_range_ipv6) +from rpki.left_right import list_received_resources_elt, report_error_elt +from rpki.irdb.zookeeper import Zookeeper +from rpki.gui.app import models +from rpki.exceptions import BadIPResource + +from django.contrib.auth.models import User +from django.db.transaction import commit_on_success + + +def ghostbuster_to_vcard(gbr): + """Convert a GhostbusterRequest object into a vCard object.""" + import vobject + + vcard = vobject.vCard() + vcard.add('N').value = vobject.vcard.Name(family=gbr.family_name, + given=gbr.given_name) + + adr_fields = ['box', 'extended', 'street', 'city', 'region', 'code', + 'country'] + adr_dict = dict((f, getattr(gbr, f, '')) for f in adr_fields) + if any(adr_dict.itervalues()): + vcard.add('ADR').value = vobject.vcard.Address(**adr_dict) + + # mapping from vCard type to Ghostbuster model field + # the ORG type is a sequence of organization unit names, so + # transform the org name into a tuple before stuffing into the + # vCard object + attrs = [('FN', 'full_name', None), + ('TEL', 'telephone', None), + ('ORG', 'organization', lambda x: (x,)), + ('EMAIL', 'email_address', None)] + for vtype, field, transform in attrs: + v = getattr(gbr, field) + if v: + vcard.add(vtype).value = transform(v) if transform else v + return vcard.serialize() + + +class LeftRightError(Exception): + """Class for wrapping report_error_elt errors from Zookeeper.call_rpkid(). + + It expects a single argument, which is the associated report_error_elt instance.""" + + def __str__(self): + return 'Error occurred while communicating with rpkid: handle=%s code=%s text=%s' % ( + self.args[0].self_handle, + self.args[0].error_code, + self.args[0].error_text) + + +@commit_on_success +def list_received_resources(log, conf): + """ + Query rpkid for this resource handle's received resources. + + The semantics are to clear the entire table and populate with the list of + certs received. Other models should not reference the table directly with + foreign keys. + + """ + + z = Zookeeper(handle=conf.handle) + pdus = z.call_rpkid(list_received_resources_elt.make_pdu(self_handle=conf.handle)) + # pdus is sometimes None (see https://trac.rpki.net/ticket/681) + if pdus is None: + print >>log, 'error: call_rpkid() returned None for handle %s when fetching received resources' % conf.handle + return + + models.ResourceCert.objects.filter(conf=conf).delete() + + for pdu in pdus: + if isinstance(pdu, report_error_elt): + # this will cause the db to be rolled back so the above delete() + # won't clobber existing resources + raise LeftRightError, pdu + elif isinstance(pdu, list_received_resources_elt): + if pdu.parent_handle != conf.handle: + parent = models.Parent.objects.get(issuer=conf, + handle=pdu.parent_handle) + else: + # root cert, self-signed + parent = None + + not_before = datetime.strptime(pdu.notBefore, "%Y-%m-%dT%H:%M:%SZ") + not_after = datetime.strptime(pdu.notAfter, "%Y-%m-%dT%H:%M:%SZ") + + cert = models.ResourceCert.objects.create( + conf=conf, parent=parent, not_before=not_before, + not_after=not_after, uri=pdu.uri) + + for asn in resource_set_as(pdu.asn): + cert.asn_ranges.create(min=asn.min, max=asn.max) + + for rng in resource_set_ipv4(pdu.ipv4): + cert.address_ranges.create(prefix_min=rng.min, + prefix_max=rng.max) + + for rng in resource_set_ipv6(pdu.ipv6): + cert.address_ranges_v6.create(prefix_min=rng.min, + prefix_max=rng.max) + else: + print >>log, "error: unexpected pdu from rpkid type=%s" % type(pdu) diff --git a/rpki/gui/app/migrations/0001_initial.py b/rpki/gui/app/migrations/0001_initial.py new file mode 100644 index 00000000..80877901 --- /dev/null +++ b/rpki/gui/app/migrations/0001_initial.py @@ -0,0 +1,192 @@ +# -*- coding: utf-8 -*- +import datetime +from south.db import db +from south.v2 import SchemaMigration +from django.db import models + + +class Migration(SchemaMigration): + + def forwards(self, orm): + # Adding model 'ResourceCert' + db.create_table('app_resourcecert', ( + ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), + ('parent', self.gf('django.db.models.fields.related.ForeignKey')(related_name='certs', to=orm['irdb.Parent'])), + ('not_before', self.gf('django.db.models.fields.DateTimeField')()), + ('not_after', self.gf('django.db.models.fields.DateTimeField')()), + ('uri', self.gf('django.db.models.fields.CharField')(max_length=255)), + )) + db.send_create_signal('app', ['ResourceCert']) + + # Adding model 'ResourceRangeAddressV4' + db.create_table('app_resourcerangeaddressv4', ( + ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), + ('prefix_min', self.gf('rpki.gui.models.IPv4AddressField')(db_index=True)), + ('prefix_max', self.gf('rpki.gui.models.IPv4AddressField')(db_index=True)), + ('cert', self.gf('django.db.models.fields.related.ForeignKey')(related_name='address_ranges', to=orm['app.ResourceCert'])), + )) + db.send_create_signal('app', ['ResourceRangeAddressV4']) + + # Adding model 'ResourceRangeAddressV6' + db.create_table('app_resourcerangeaddressv6', ( + ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), + ('prefix_min', self.gf('rpki.gui.models.IPv6AddressField')(db_index=True)), + ('prefix_max', self.gf('rpki.gui.models.IPv6AddressField')(db_index=True)), + ('cert', self.gf('django.db.models.fields.related.ForeignKey')(related_name='address_ranges_v6', to=orm['app.ResourceCert'])), + )) + db.send_create_signal('app', ['ResourceRangeAddressV6']) + + # Adding model 'ResourceRangeAS' + db.create_table('app_resourcerangeas', ( + ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), + ('min', self.gf('django.db.models.fields.PositiveIntegerField')()), + ('max', self.gf('django.db.models.fields.PositiveIntegerField')()), + ('cert', self.gf('django.db.models.fields.related.ForeignKey')(related_name='asn_ranges', to=orm['app.ResourceCert'])), + )) + db.send_create_signal('app', ['ResourceRangeAS']) + + # Adding model 'GhostbusterRequest' + db.create_table('app_ghostbusterrequest', ( + ('ghostbusterrequest_ptr', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['irdb.GhostbusterRequest'], unique=True, primary_key=True)), + ('full_name', self.gf('django.db.models.fields.CharField')(max_length=40)), + ('family_name', self.gf('django.db.models.fields.CharField')(max_length=20)), + ('given_name', self.gf('django.db.models.fields.CharField')(max_length=20)), + ('additional_name', self.gf('django.db.models.fields.CharField')(max_length=20, null=True, blank=True)), + ('honorific_prefix', self.gf('django.db.models.fields.CharField')(max_length=10, null=True, blank=True)), + ('honorific_suffix', self.gf('django.db.models.fields.CharField')(max_length=10, null=True, blank=True)), + ('email_address', self.gf('django.db.models.fields.EmailField')(max_length=75, null=True, blank=True)), + ('organization', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)), + ('telephone', self.gf('rpki.gui.app.models.TelephoneField')(max_length=40, null=True, blank=True)), + ('box', self.gf('django.db.models.fields.CharField')(max_length=40, null=True, blank=True)), + ('extended', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)), + ('street', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)), + ('city', self.gf('django.db.models.fields.CharField')(max_length=40, null=True, blank=True)), + ('region', self.gf('django.db.models.fields.CharField')(max_length=40, null=True, blank=True)), + ('code', self.gf('django.db.models.fields.CharField')(max_length=40, null=True, blank=True)), + ('country', self.gf('django.db.models.fields.CharField')(max_length=40, null=True, blank=True)), + )) + db.send_create_signal('app', ['GhostbusterRequest']) + + # Adding model 'Timestamp' + db.create_table('app_timestamp', ( + ('name', self.gf('django.db.models.fields.CharField')(max_length=30, primary_key=True)), + ('ts', self.gf('django.db.models.fields.DateTimeField')()), + )) + db.send_create_signal('app', ['Timestamp']) + + + def backwards(self, orm): + # Deleting model 'ResourceCert' + db.delete_table('app_resourcecert') + + # Deleting model 'ResourceRangeAddressV4' + db.delete_table('app_resourcerangeaddressv4') + + # Deleting model 'ResourceRangeAddressV6' + db.delete_table('app_resourcerangeaddressv6') + + # Deleting model 'ResourceRangeAS' + db.delete_table('app_resourcerangeas') + + # Deleting model 'GhostbusterRequest' + db.delete_table('app_ghostbusterrequest') + + # Deleting model 'Timestamp' + db.delete_table('app_timestamp') + + + models = { + 'app.ghostbusterrequest': { + 'Meta': {'ordering': "('family_name', 'given_name')", 'object_name': 'GhostbusterRequest', '_ormbases': ['irdb.GhostbusterRequest']}, + 'additional_name': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}), + 'box': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'city': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'code': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'country': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'email_address': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}), + 'extended': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'family_name': ('django.db.models.fields.CharField', [], {'max_length': '20'}), + 'full_name': ('django.db.models.fields.CharField', [], {'max_length': '40'}), + 'ghostbusterrequest_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['irdb.GhostbusterRequest']", 'unique': 'True', 'primary_key': 'True'}), + 'given_name': ('django.db.models.fields.CharField', [], {'max_length': '20'}), + 'honorific_prefix': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}), + 'honorific_suffix': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}), + 'organization': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'region': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'street': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'telephone': ('rpki.gui.app.models.TelephoneField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}) + }, + 'app.resourcecert': { + 'Meta': {'object_name': 'ResourceCert'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'not_after': ('django.db.models.fields.DateTimeField', [], {}), + 'not_before': ('django.db.models.fields.DateTimeField', [], {}), + 'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'certs'", 'to': "orm['irdb.Parent']"}), + 'uri': ('django.db.models.fields.CharField', [], {'max_length': '255'}) + }, + 'app.resourcerangeaddressv4': { + 'Meta': {'ordering': "('prefix_min',)", 'object_name': 'ResourceRangeAddressV4'}, + 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'address_ranges'", 'to': "orm['app.ResourceCert']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'prefix_max': ('rpki.gui.models.IPv4AddressField', [], {'db_index': 'True'}), + 'prefix_min': ('rpki.gui.models.IPv4AddressField', [], {'db_index': 'True'}) + }, + 'app.resourcerangeaddressv6': { + 'Meta': {'ordering': "('prefix_min',)", 'object_name': 'ResourceRangeAddressV6'}, + 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'address_ranges_v6'", 'to': "orm['app.ResourceCert']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'prefix_max': ('rpki.gui.models.IPv6AddressField', [], {'db_index': 'True'}), + 'prefix_min': ('rpki.gui.models.IPv6AddressField', [], {'db_index': 'True'}) + }, + 'app.resourcerangeas': { + 'Meta': {'ordering': "('min', 'max')", 'object_name': 'ResourceRangeAS'}, + 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'asn_ranges'", 'to': "orm['app.ResourceCert']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'max': ('django.db.models.fields.PositiveIntegerField', [], {}), + 'min': ('django.db.models.fields.PositiveIntegerField', [], {}) + }, + 'app.timestamp': { + 'Meta': {'object_name': 'Timestamp'}, + 'name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'primary_key': 'True'}), + 'ts': ('django.db.models.fields.DateTimeField', [], {}) + }, + 'irdb.ghostbusterrequest': { + 'Meta': {'object_name': 'GhostbusterRequest'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'issuer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'ghostbuster_requests'", 'to': "orm['irdb.ResourceHolderCA']"}), + 'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'ghostbuster_requests'", 'null': 'True', 'to': "orm['irdb.Parent']"}), + 'vcard': ('django.db.models.fields.TextField', [], {}) + }, + 'irdb.parent': { + 'Meta': {'unique_together': "(('issuer', 'handle'),)", 'object_name': 'Parent', '_ormbases': ['irdb.Turtle']}, + 'certificate': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), + 'child_handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), + 'handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), + 'issuer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'parents'", 'to': "orm['irdb.ResourceHolderCA']"}), + 'parent_handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), + 'referral_authorization': ('rpki.irdb.models.SignedReferralField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}), + 'referrer': ('rpki.irdb.models.HandleField', [], {'max_length': '120', 'null': 'True', 'blank': 'True'}), + 'repository_type': ('rpki.irdb.models.EnumField', [], {}), + 'ta': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), + 'turtle_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['irdb.Turtle']", 'unique': 'True', 'primary_key': 'True'}) + }, + 'irdb.resourceholderca': { + 'Meta': {'object_name': 'ResourceHolderCA'}, + 'certificate': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), + 'handle': ('rpki.irdb.models.HandleField', [], {'unique': 'True', 'max_length': '120'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'last_crl_update': ('rpki.irdb.models.SundialField', [], {}), + 'latest_crl': ('rpki.irdb.models.CRLField', [], {'default': 'None', 'blank': 'True'}), + 'next_crl_number': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}), + 'next_crl_update': ('rpki.irdb.models.SundialField', [], {}), + 'next_serial': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}), + 'private_key': ('rpki.irdb.models.RSAKeyField', [], {'default': 'None', 'blank': 'True'}) + }, + 'irdb.turtle': { + 'Meta': {'object_name': 'Turtle'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'service_uri': ('django.db.models.fields.CharField', [], {'max_length': '255'}) + } + } + + complete_apps = ['app'] \ No newline at end of file diff --git a/rpki/gui/app/migrations/0002_auto__add_field_resourcecert_conf.py b/rpki/gui/app/migrations/0002_auto__add_field_resourcecert_conf.py new file mode 100644 index 00000000..d3326f90 --- /dev/null +++ b/rpki/gui/app/migrations/0002_auto__add_field_resourcecert_conf.py @@ -0,0 +1,117 @@ +# -*- coding: utf-8 -*- +import datetime +from south.db import db +from south.v2 import SchemaMigration +from django.db import models + + +class Migration(SchemaMigration): + + def forwards(self, orm): + # Adding field 'ResourceCert.conf' + db.add_column('app_resourcecert', 'conf', + self.gf('django.db.models.fields.related.ForeignKey')(related_name='certs', null=True, to=orm['irdb.ResourceHolderCA']), + keep_default=False) + + + def backwards(self, orm): + # Deleting field 'ResourceCert.conf' + db.delete_column('app_resourcecert', 'conf_id') + + + models = { + 'app.ghostbusterrequest': { + 'Meta': {'ordering': "('family_name', 'given_name')", 'object_name': 'GhostbusterRequest', '_ormbases': ['irdb.GhostbusterRequest']}, + 'additional_name': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}), + 'box': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'city': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'code': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'country': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'email_address': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}), + 'extended': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'family_name': ('django.db.models.fields.CharField', [], {'max_length': '20'}), + 'full_name': ('django.db.models.fields.CharField', [], {'max_length': '40'}), + 'ghostbusterrequest_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['irdb.GhostbusterRequest']", 'unique': 'True', 'primary_key': 'True'}), + 'given_name': ('django.db.models.fields.CharField', [], {'max_length': '20'}), + 'honorific_prefix': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}), + 'honorific_suffix': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}), + 'organization': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'region': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'street': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'telephone': ('rpki.gui.app.models.TelephoneField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}) + }, + 'app.resourcecert': { + 'Meta': {'object_name': 'ResourceCert'}, + 'conf': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'certs'", 'null': 'True', 'to': "orm['irdb.ResourceHolderCA']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'not_after': ('django.db.models.fields.DateTimeField', [], {}), + 'not_before': ('django.db.models.fields.DateTimeField', [], {}), + 'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'certs'", 'to': "orm['irdb.Parent']"}), + 'uri': ('django.db.models.fields.CharField', [], {'max_length': '255'}) + }, + 'app.resourcerangeaddressv4': { + 'Meta': {'ordering': "('prefix_min',)", 'object_name': 'ResourceRangeAddressV4'}, + 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'address_ranges'", 'to': "orm['app.ResourceCert']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'prefix_max': ('rpki.gui.models.IPv4AddressField', [], {'db_index': 'True'}), + 'prefix_min': ('rpki.gui.models.IPv4AddressField', [], {'db_index': 'True'}) + }, + 'app.resourcerangeaddressv6': { + 'Meta': {'ordering': "('prefix_min',)", 'object_name': 'ResourceRangeAddressV6'}, + 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'address_ranges_v6'", 'to': "orm['app.ResourceCert']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'prefix_max': ('rpki.gui.models.IPv6AddressField', [], {'db_index': 'True'}), + 'prefix_min': ('rpki.gui.models.IPv6AddressField', [], {'db_index': 'True'}) + }, + 'app.resourcerangeas': { + 'Meta': {'ordering': "('min', 'max')", 'object_name': 'ResourceRangeAS'}, + 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'asn_ranges'", 'to': "orm['app.ResourceCert']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'max': ('django.db.models.fields.PositiveIntegerField', [], {}), + 'min': ('django.db.models.fields.PositiveIntegerField', [], {}) + }, + 'app.timestamp': { + 'Meta': {'object_name': 'Timestamp'}, + 'name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'primary_key': 'True'}), + 'ts': ('django.db.models.fields.DateTimeField', [], {}) + }, + 'irdb.ghostbusterrequest': { + 'Meta': {'object_name': 'GhostbusterRequest'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'issuer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'ghostbuster_requests'", 'to': "orm['irdb.ResourceHolderCA']"}), + 'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'ghostbuster_requests'", 'null': 'True', 'to': "orm['irdb.Parent']"}), + 'vcard': ('django.db.models.fields.TextField', [], {}) + }, + 'irdb.parent': { + 'Meta': {'unique_together': "(('issuer', 'handle'),)", 'object_name': 'Parent', '_ormbases': ['irdb.Turtle']}, + 'certificate': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), + 'child_handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), + 'handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), + 'issuer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'parents'", 'to': "orm['irdb.ResourceHolderCA']"}), + 'parent_handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), + 'referral_authorization': ('rpki.irdb.models.SignedReferralField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}), + 'referrer': ('rpki.irdb.models.HandleField', [], {'max_length': '120', 'null': 'True', 'blank': 'True'}), + 'repository_type': ('rpki.irdb.models.EnumField', [], {}), + 'ta': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), + 'turtle_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['irdb.Turtle']", 'unique': 'True', 'primary_key': 'True'}) + }, + 'irdb.resourceholderca': { + 'Meta': {'object_name': 'ResourceHolderCA'}, + 'certificate': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), + 'handle': ('rpki.irdb.models.HandleField', [], {'unique': 'True', 'max_length': '120'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'last_crl_update': ('rpki.irdb.models.SundialField', [], {}), + 'latest_crl': ('rpki.irdb.models.CRLField', [], {'default': 'None', 'blank': 'True'}), + 'next_crl_number': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}), + 'next_crl_update': ('rpki.irdb.models.SundialField', [], {}), + 'next_serial': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}), + 'private_key': ('rpki.irdb.models.RSAKeyField', [], {'default': 'None', 'blank': 'True'}) + }, + 'irdb.turtle': { + 'Meta': {'object_name': 'Turtle'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'service_uri': ('django.db.models.fields.CharField', [], {'max_length': '255'}) + } + } + + complete_apps = ['app'] \ No newline at end of file diff --git a/rpki/gui/app/migrations/0003_set_conf_from_parent.py b/rpki/gui/app/migrations/0003_set_conf_from_parent.py new file mode 100644 index 00000000..a90a11cc --- /dev/null +++ b/rpki/gui/app/migrations/0003_set_conf_from_parent.py @@ -0,0 +1,116 @@ +# -*- coding: utf-8 -*- +import datetime +from south.db import db +from south.v2 import DataMigration +from django.db import models + +class Migration(DataMigration): + + def forwards(self, orm): + "Write your forwards methods here." + # Note: Remember to use orm['appname.ModelName'] rather than "from appname.models..." + for cert in orm.ResourceCert.objects.all(): + cert.conf = cert.parent.issuer + cert.save() + + def backwards(self, orm): + "Write your backwards methods here." + pass + + models = { + 'app.ghostbusterrequest': { + 'Meta': {'ordering': "('family_name', 'given_name')", 'object_name': 'GhostbusterRequest', '_ormbases': ['irdb.GhostbusterRequest']}, + 'additional_name': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}), + 'box': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'city': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'code': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'country': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'email_address': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}), + 'extended': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'family_name': ('django.db.models.fields.CharField', [], {'max_length': '20'}), + 'full_name': ('django.db.models.fields.CharField', [], {'max_length': '40'}), + 'ghostbusterrequest_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['irdb.GhostbusterRequest']", 'unique': 'True', 'primary_key': 'True'}), + 'given_name': ('django.db.models.fields.CharField', [], {'max_length': '20'}), + 'honorific_prefix': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}), + 'honorific_suffix': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}), + 'organization': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'region': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'street': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'telephone': ('rpki.gui.app.models.TelephoneField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}) + }, + 'app.resourcecert': { + 'Meta': {'object_name': 'ResourceCert'}, + 'conf': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'certs'", 'null': 'True', 'to': "orm['irdb.ResourceHolderCA']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'not_after': ('django.db.models.fields.DateTimeField', [], {}), + 'not_before': ('django.db.models.fields.DateTimeField', [], {}), + 'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'certs'", 'to': "orm['irdb.Parent']"}), + 'uri': ('django.db.models.fields.CharField', [], {'max_length': '255'}) + }, + 'app.resourcerangeaddressv4': { + 'Meta': {'ordering': "('prefix_min',)", 'object_name': 'ResourceRangeAddressV4'}, + 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'address_ranges'", 'to': "orm['app.ResourceCert']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'prefix_max': ('rpki.gui.models.IPv4AddressField', [], {'db_index': 'True'}), + 'prefix_min': ('rpki.gui.models.IPv4AddressField', [], {'db_index': 'True'}) + }, + 'app.resourcerangeaddressv6': { + 'Meta': {'ordering': "('prefix_min',)", 'object_name': 'ResourceRangeAddressV6'}, + 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'address_ranges_v6'", 'to': "orm['app.ResourceCert']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'prefix_max': ('rpki.gui.models.IPv6AddressField', [], {'db_index': 'True'}), + 'prefix_min': ('rpki.gui.models.IPv6AddressField', [], {'db_index': 'True'}) + }, + 'app.resourcerangeas': { + 'Meta': {'ordering': "('min', 'max')", 'object_name': 'ResourceRangeAS'}, + 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'asn_ranges'", 'to': "orm['app.ResourceCert']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'max': ('django.db.models.fields.PositiveIntegerField', [], {}), + 'min': ('django.db.models.fields.PositiveIntegerField', [], {}) + }, + 'app.timestamp': { + 'Meta': {'object_name': 'Timestamp'}, + 'name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'primary_key': 'True'}), + 'ts': ('django.db.models.fields.DateTimeField', [], {}) + }, + 'irdb.ghostbusterrequest': { + 'Meta': {'object_name': 'GhostbusterRequest'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'issuer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'ghostbuster_requests'", 'to': "orm['irdb.ResourceHolderCA']"}), + 'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'ghostbuster_requests'", 'null': 'True', 'to': "orm['irdb.Parent']"}), + 'vcard': ('django.db.models.fields.TextField', [], {}) + }, + 'irdb.parent': { + 'Meta': {'unique_together': "(('issuer', 'handle'),)", 'object_name': 'Parent', '_ormbases': ['irdb.Turtle']}, + 'certificate': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), + 'child_handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), + 'handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), + 'issuer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'parents'", 'to': "orm['irdb.ResourceHolderCA']"}), + 'parent_handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), + 'referral_authorization': ('rpki.irdb.models.SignedReferralField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}), + 'referrer': ('rpki.irdb.models.HandleField', [], {'max_length': '120', 'null': 'True', 'blank': 'True'}), + 'repository_type': ('rpki.irdb.models.EnumField', [], {}), + 'ta': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), + 'turtle_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['irdb.Turtle']", 'unique': 'True', 'primary_key': 'True'}) + }, + 'irdb.resourceholderca': { + 'Meta': {'object_name': 'ResourceHolderCA'}, + 'certificate': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), + 'handle': ('rpki.irdb.models.HandleField', [], {'unique': 'True', 'max_length': '120'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'last_crl_update': ('rpki.irdb.models.SundialField', [], {}), + 'latest_crl': ('rpki.irdb.models.CRLField', [], {'default': 'None', 'blank': 'True'}), + 'next_crl_number': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}), + 'next_crl_update': ('rpki.irdb.models.SundialField', [], {}), + 'next_serial': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}), + 'private_key': ('rpki.irdb.models.RSAKeyField', [], {'default': 'None', 'blank': 'True'}) + }, + 'irdb.turtle': { + 'Meta': {'object_name': 'Turtle'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'service_uri': ('django.db.models.fields.CharField', [], {'max_length': '255'}) + } + } + + complete_apps = ['app'] + symmetrical = True diff --git a/rpki/gui/app/migrations/0004_auto__chg_field_resourcecert_conf.py b/rpki/gui/app/migrations/0004_auto__chg_field_resourcecert_conf.py new file mode 100644 index 00000000..a236ad4a --- /dev/null +++ b/rpki/gui/app/migrations/0004_auto__chg_field_resourcecert_conf.py @@ -0,0 +1,115 @@ +# -*- coding: utf-8 -*- +import datetime +from south.db import db +from south.v2 import SchemaMigration +from django.db import models + + +class Migration(SchemaMigration): + + def forwards(self, orm): + + # Changing field 'ResourceCert.conf' + db.alter_column('app_resourcecert', 'conf_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['irdb.ResourceHolderCA'])) + + def backwards(self, orm): + + # Changing field 'ResourceCert.conf' + db.alter_column('app_resourcecert', 'conf_id', self.gf('django.db.models.fields.related.ForeignKey')(null=True, to=orm['irdb.ResourceHolderCA'])) + + models = { + 'app.ghostbusterrequest': { + 'Meta': {'ordering': "('family_name', 'given_name')", 'object_name': 'GhostbusterRequest', '_ormbases': ['irdb.GhostbusterRequest']}, + 'additional_name': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}), + 'box': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'city': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'code': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'country': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'email_address': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}), + 'extended': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'family_name': ('django.db.models.fields.CharField', [], {'max_length': '20'}), + 'full_name': ('django.db.models.fields.CharField', [], {'max_length': '40'}), + 'ghostbusterrequest_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['irdb.GhostbusterRequest']", 'unique': 'True', 'primary_key': 'True'}), + 'given_name': ('django.db.models.fields.CharField', [], {'max_length': '20'}), + 'honorific_prefix': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}), + 'honorific_suffix': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}), + 'organization': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'region': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'street': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'telephone': ('rpki.gui.app.models.TelephoneField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}) + }, + 'app.resourcecert': { + 'Meta': {'object_name': 'ResourceCert'}, + 'conf': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'certs'", 'to': "orm['irdb.ResourceHolderCA']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'not_after': ('django.db.models.fields.DateTimeField', [], {}), + 'not_before': ('django.db.models.fields.DateTimeField', [], {}), + 'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'certs'", 'to': "orm['irdb.Parent']"}), + 'uri': ('django.db.models.fields.CharField', [], {'max_length': '255'}) + }, + 'app.resourcerangeaddressv4': { + 'Meta': {'ordering': "('prefix_min',)", 'object_name': 'ResourceRangeAddressV4'}, + 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'address_ranges'", 'to': "orm['app.ResourceCert']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'prefix_max': ('rpki.gui.models.IPv4AddressField', [], {'db_index': 'True'}), + 'prefix_min': ('rpki.gui.models.IPv4AddressField', [], {'db_index': 'True'}) + }, + 'app.resourcerangeaddressv6': { + 'Meta': {'ordering': "('prefix_min',)", 'object_name': 'ResourceRangeAddressV6'}, + 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'address_ranges_v6'", 'to': "orm['app.ResourceCert']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'prefix_max': ('rpki.gui.models.IPv6AddressField', [], {'db_index': 'True'}), + 'prefix_min': ('rpki.gui.models.IPv6AddressField', [], {'db_index': 'True'}) + }, + 'app.resourcerangeas': { + 'Meta': {'ordering': "('min', 'max')", 'object_name': 'ResourceRangeAS'}, + 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'asn_ranges'", 'to': "orm['app.ResourceCert']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'max': ('django.db.models.fields.PositiveIntegerField', [], {}), + 'min': ('django.db.models.fields.PositiveIntegerField', [], {}) + }, + 'app.timestamp': { + 'Meta': {'object_name': 'Timestamp'}, + 'name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'primary_key': 'True'}), + 'ts': ('django.db.models.fields.DateTimeField', [], {}) + }, + 'irdb.ghostbusterrequest': { + 'Meta': {'object_name': 'GhostbusterRequest'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'issuer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'ghostbuster_requests'", 'to': "orm['irdb.ResourceHolderCA']"}), + 'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'ghostbuster_requests'", 'null': 'True', 'to': "orm['irdb.Parent']"}), + 'vcard': ('django.db.models.fields.TextField', [], {}) + }, + 'irdb.parent': { + 'Meta': {'unique_together': "(('issuer', 'handle'),)", 'object_name': 'Parent', '_ormbases': ['irdb.Turtle']}, + 'certificate': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), + 'child_handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), + 'handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), + 'issuer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'parents'", 'to': "orm['irdb.ResourceHolderCA']"}), + 'parent_handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), + 'referral_authorization': ('rpki.irdb.models.SignedReferralField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}), + 'referrer': ('rpki.irdb.models.HandleField', [], {'max_length': '120', 'null': 'True', 'blank': 'True'}), + 'repository_type': ('rpki.irdb.models.EnumField', [], {}), + 'ta': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), + 'turtle_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['irdb.Turtle']", 'unique': 'True', 'primary_key': 'True'}) + }, + 'irdb.resourceholderca': { + 'Meta': {'object_name': 'ResourceHolderCA'}, + 'certificate': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), + 'handle': ('rpki.irdb.models.HandleField', [], {'unique': 'True', 'max_length': '120'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'last_crl_update': ('rpki.irdb.models.SundialField', [], {}), + 'latest_crl': ('rpki.irdb.models.CRLField', [], {'default': 'None', 'blank': 'True'}), + 'next_crl_number': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}), + 'next_crl_update': ('rpki.irdb.models.SundialField', [], {}), + 'next_serial': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}), + 'private_key': ('rpki.irdb.models.RSAKeyField', [], {'default': 'None', 'blank': 'True'}) + }, + 'irdb.turtle': { + 'Meta': {'object_name': 'Turtle'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'service_uri': ('django.db.models.fields.CharField', [], {'max_length': '255'}) + } + } + + complete_apps = ['app'] diff --git a/rpki/gui/app/migrations/0005_auto__chg_field_resourcecert_parent.py b/rpki/gui/app/migrations/0005_auto__chg_field_resourcecert_parent.py new file mode 100644 index 00000000..11e9c814 --- /dev/null +++ b/rpki/gui/app/migrations/0005_auto__chg_field_resourcecert_parent.py @@ -0,0 +1,115 @@ +# -*- coding: utf-8 -*- +import datetime +from south.db import db +from south.v2 import SchemaMigration +from django.db import models + + +class Migration(SchemaMigration): + + def forwards(self, orm): + + # Changing field 'ResourceCert.parent' + db.alter_column('app_resourcecert', 'parent_id', self.gf('django.db.models.fields.related.ForeignKey')(null=True, to=orm['irdb.Parent'])) + + def backwards(self, orm): + + # Changing field 'ResourceCert.parent' + db.alter_column('app_resourcecert', 'parent_id', self.gf('django.db.models.fields.related.ForeignKey')(default=1, to=orm['irdb.Parent'])) + + models = { + 'app.ghostbusterrequest': { + 'Meta': {'ordering': "('family_name', 'given_name')", 'object_name': 'GhostbusterRequest', '_ormbases': ['irdb.GhostbusterRequest']}, + 'additional_name': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}), + 'box': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'city': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'code': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'country': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'email_address': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}), + 'extended': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'family_name': ('django.db.models.fields.CharField', [], {'max_length': '20'}), + 'full_name': ('django.db.models.fields.CharField', [], {'max_length': '40'}), + 'ghostbusterrequest_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['irdb.GhostbusterRequest']", 'unique': 'True', 'primary_key': 'True'}), + 'given_name': ('django.db.models.fields.CharField', [], {'max_length': '20'}), + 'honorific_prefix': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}), + 'honorific_suffix': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}), + 'organization': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'region': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'street': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'telephone': ('rpki.gui.app.models.TelephoneField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}) + }, + 'app.resourcecert': { + 'Meta': {'object_name': 'ResourceCert'}, + 'conf': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'certs'", 'to': "orm['irdb.ResourceHolderCA']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'not_after': ('django.db.models.fields.DateTimeField', [], {}), + 'not_before': ('django.db.models.fields.DateTimeField', [], {}), + 'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'certs'", 'null': 'True', 'to': "orm['irdb.Parent']"}), + 'uri': ('django.db.models.fields.CharField', [], {'max_length': '255'}) + }, + 'app.resourcerangeaddressv4': { + 'Meta': {'ordering': "('prefix_min',)", 'object_name': 'ResourceRangeAddressV4'}, + 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'address_ranges'", 'to': "orm['app.ResourceCert']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'prefix_max': ('rpki.gui.models.IPv4AddressField', [], {'db_index': 'True'}), + 'prefix_min': ('rpki.gui.models.IPv4AddressField', [], {'db_index': 'True'}) + }, + 'app.resourcerangeaddressv6': { + 'Meta': {'ordering': "('prefix_min',)", 'object_name': 'ResourceRangeAddressV6'}, + 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'address_ranges_v6'", 'to': "orm['app.ResourceCert']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'prefix_max': ('rpki.gui.models.IPv6AddressField', [], {'db_index': 'True'}), + 'prefix_min': ('rpki.gui.models.IPv6AddressField', [], {'db_index': 'True'}) + }, + 'app.resourcerangeas': { + 'Meta': {'ordering': "('min', 'max')", 'object_name': 'ResourceRangeAS'}, + 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'asn_ranges'", 'to': "orm['app.ResourceCert']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'max': ('django.db.models.fields.PositiveIntegerField', [], {}), + 'min': ('django.db.models.fields.PositiveIntegerField', [], {}) + }, + 'app.timestamp': { + 'Meta': {'object_name': 'Timestamp'}, + 'name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'primary_key': 'True'}), + 'ts': ('django.db.models.fields.DateTimeField', [], {}) + }, + 'irdb.ghostbusterrequest': { + 'Meta': {'object_name': 'GhostbusterRequest'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'issuer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'ghostbuster_requests'", 'to': "orm['irdb.ResourceHolderCA']"}), + 'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'ghostbuster_requests'", 'null': 'True', 'to': "orm['irdb.Parent']"}), + 'vcard': ('django.db.models.fields.TextField', [], {}) + }, + 'irdb.parent': { + 'Meta': {'unique_together': "(('issuer', 'handle'),)", 'object_name': 'Parent', '_ormbases': ['irdb.Turtle']}, + 'certificate': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), + 'child_handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), + 'handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), + 'issuer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'parents'", 'to': "orm['irdb.ResourceHolderCA']"}), + 'parent_handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), + 'referral_authorization': ('rpki.irdb.models.SignedReferralField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}), + 'referrer': ('rpki.irdb.models.HandleField', [], {'max_length': '120', 'null': 'True', 'blank': 'True'}), + 'repository_type': ('rpki.irdb.models.EnumField', [], {}), + 'ta': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), + 'turtle_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['irdb.Turtle']", 'unique': 'True', 'primary_key': 'True'}) + }, + 'irdb.resourceholderca': { + 'Meta': {'object_name': 'ResourceHolderCA'}, + 'certificate': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), + 'handle': ('rpki.irdb.models.HandleField', [], {'unique': 'True', 'max_length': '120'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'last_crl_update': ('rpki.irdb.models.SundialField', [], {}), + 'latest_crl': ('rpki.irdb.models.CRLField', [], {'default': 'None', 'blank': 'True'}), + 'next_crl_number': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}), + 'next_crl_update': ('rpki.irdb.models.SundialField', [], {}), + 'next_serial': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}), + 'private_key': ('rpki.irdb.models.RSAKeyField', [], {'default': 'None', 'blank': 'True'}) + }, + 'irdb.turtle': { + 'Meta': {'object_name': 'Turtle'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'service_uri': ('django.db.models.fields.CharField', [], {'max_length': '255'}) + } + } + + complete_apps = ['app'] \ No newline at end of file diff --git a/rpki/gui/app/migrations/0006_add_conf_acl.py b/rpki/gui/app/migrations/0006_add_conf_acl.py new file mode 100644 index 00000000..88fe8171 --- /dev/null +++ b/rpki/gui/app/migrations/0006_add_conf_acl.py @@ -0,0 +1,168 @@ +# -*- coding: utf-8 -*- +import datetime +from south.db import db +from south.v2 import SchemaMigration +from django.db import models + + +class Migration(SchemaMigration): + + def forwards(self, orm): + # Adding model 'ConfACL' + db.create_table('app_confacl', ( + ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), + ('conf', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['irdb.ResourceHolderCA'])), + ('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'])), + )) + db.send_create_signal('app', ['ConfACL']) + + # Adding unique constraint on 'ConfACL', fields ['user', 'conf'] + db.create_unique('app_confacl', ['user_id', 'conf_id']) + + + def backwards(self, orm): + # Removing unique constraint on 'ConfACL', fields ['user', 'conf'] + db.delete_unique('app_confacl', ['user_id', 'conf_id']) + + # Deleting model 'ConfACL' + db.delete_table('app_confacl') + + + models = { + 'app.confacl': { + 'Meta': {'unique_together': "(('user', 'conf'),)", 'object_name': 'ConfACL'}, + 'conf': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['irdb.ResourceHolderCA']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}) + }, + 'app.ghostbusterrequest': { + 'Meta': {'ordering': "('family_name', 'given_name')", 'object_name': 'GhostbusterRequest', '_ormbases': ['irdb.GhostbusterRequest']}, + 'additional_name': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}), + 'box': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'city': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'code': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'country': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'email_address': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}), + 'extended': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'family_name': ('django.db.models.fields.CharField', [], {'max_length': '20'}), + 'full_name': ('django.db.models.fields.CharField', [], {'max_length': '40'}), + 'ghostbusterrequest_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['irdb.GhostbusterRequest']", 'unique': 'True', 'primary_key': 'True'}), + 'given_name': ('django.db.models.fields.CharField', [], {'max_length': '20'}), + 'honorific_prefix': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}), + 'honorific_suffix': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}), + 'organization': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'region': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'street': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'telephone': ('rpki.gui.app.models.TelephoneField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}) + }, + 'app.resourcecert': { + 'Meta': {'object_name': 'ResourceCert'}, + 'conf': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'certs'", 'to': "orm['irdb.ResourceHolderCA']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'not_after': ('django.db.models.fields.DateTimeField', [], {}), + 'not_before': ('django.db.models.fields.DateTimeField', [], {}), + 'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'certs'", 'null': 'True', 'to': "orm['irdb.Parent']"}), + 'uri': ('django.db.models.fields.CharField', [], {'max_length': '255'}) + }, + 'app.resourcerangeaddressv4': { + 'Meta': {'ordering': "('prefix_min',)", 'object_name': 'ResourceRangeAddressV4'}, + 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'address_ranges'", 'to': "orm['app.ResourceCert']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'prefix_max': ('rpki.gui.models.IPv4AddressField', [], {'db_index': 'True'}), + 'prefix_min': ('rpki.gui.models.IPv4AddressField', [], {'db_index': 'True'}) + }, + 'app.resourcerangeaddressv6': { + 'Meta': {'ordering': "('prefix_min',)", 'object_name': 'ResourceRangeAddressV6'}, + 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'address_ranges_v6'", 'to': "orm['app.ResourceCert']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'prefix_max': ('rpki.gui.models.IPv6AddressField', [], {'db_index': 'True'}), + 'prefix_min': ('rpki.gui.models.IPv6AddressField', [], {'db_index': 'True'}) + }, + 'app.resourcerangeas': { + 'Meta': {'ordering': "('min', 'max')", 'object_name': 'ResourceRangeAS'}, + 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'asn_ranges'", 'to': "orm['app.ResourceCert']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'max': ('django.db.models.fields.PositiveIntegerField', [], {}), + 'min': ('django.db.models.fields.PositiveIntegerField', [], {}) + }, + 'app.timestamp': { + 'Meta': {'object_name': 'Timestamp'}, + 'name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'primary_key': 'True'}), + 'ts': ('django.db.models.fields.DateTimeField', [], {}) + }, + 'auth.group': { + 'Meta': {'object_name': 'Group'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), + 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) + }, + 'auth.permission': { + 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, + 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) + }, + 'auth.user': { + 'Meta': {'object_name': 'User'}, + 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), + 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), + 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), + 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), + 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), + 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), + 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), + 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), + 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) + }, + 'contenttypes.contenttype': { + 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, + 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) + }, + 'irdb.ghostbusterrequest': { + 'Meta': {'object_name': 'GhostbusterRequest'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'issuer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'ghostbuster_requests'", 'to': "orm['irdb.ResourceHolderCA']"}), + 'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'ghostbuster_requests'", 'null': 'True', 'to': "orm['irdb.Parent']"}), + 'vcard': ('django.db.models.fields.TextField', [], {}) + }, + 'irdb.parent': { + 'Meta': {'unique_together': "(('issuer', 'handle'),)", 'object_name': 'Parent', '_ormbases': ['irdb.Turtle']}, + 'certificate': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), + 'child_handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), + 'handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), + 'issuer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'parents'", 'to': "orm['irdb.ResourceHolderCA']"}), + 'parent_handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), + 'referral_authorization': ('rpki.irdb.models.SignedReferralField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}), + 'referrer': ('rpki.irdb.models.HandleField', [], {'max_length': '120', 'null': 'True', 'blank': 'True'}), + 'repository_type': ('rpki.irdb.models.EnumField', [], {}), + 'ta': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), + 'turtle_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['irdb.Turtle']", 'unique': 'True', 'primary_key': 'True'}) + }, + 'irdb.resourceholderca': { + 'Meta': {'object_name': 'ResourceHolderCA'}, + 'certificate': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), + 'handle': ('rpki.irdb.models.HandleField', [], {'unique': 'True', 'max_length': '120'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'last_crl_update': ('rpki.irdb.models.SundialField', [], {}), + 'latest_crl': ('rpki.irdb.models.CRLField', [], {'default': 'None', 'blank': 'True'}), + 'next_crl_number': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}), + 'next_crl_update': ('rpki.irdb.models.SundialField', [], {}), + 'next_serial': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}), + 'private_key': ('rpki.irdb.models.RSAKeyField', [], {'default': 'None', 'blank': 'True'}) + }, + 'irdb.turtle': { + 'Meta': {'object_name': 'Turtle'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'service_uri': ('django.db.models.fields.CharField', [], {'max_length': '255'}) + } + } + + complete_apps = ['app'] \ No newline at end of file diff --git a/rpki/gui/app/migrations/0007_default_acls.py b/rpki/gui/app/migrations/0007_default_acls.py new file mode 100644 index 00000000..40656d0f --- /dev/null +++ b/rpki/gui/app/migrations/0007_default_acls.py @@ -0,0 +1,165 @@ +# -*- coding: utf-8 -*- +import datetime +from south.db import db +from south.v2 import DataMigration +from django.db import models +from django.core.exceptions import ObjectDoesNotExist + +class Migration(DataMigration): + + def forwards(self, orm): + "Write your forwards methods here." + # Note: Remember to use orm['appname.ModelName'] rather than "from appname.models..." + for conf in orm['irdb.ResourceHolderCA'].objects.all(): + try: + user = orm['auth.User'].objects.get(username=conf.handle) + orm['app.ConfACL'].objects.create( + conf=conf, + user=user + ) + except ObjectDoesNotExist: + pass + + def backwards(self, orm): + "Write your backwards methods here." + orm['app.ConfACL'].objects.all().delete() + + models = { + 'app.confacl': { + 'Meta': {'unique_together': "(('user', 'conf'),)", 'object_name': 'ConfACL'}, + 'conf': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['irdb.ResourceHolderCA']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}) + }, + 'app.ghostbusterrequest': { + 'Meta': {'ordering': "('family_name', 'given_name')", 'object_name': 'GhostbusterRequest', '_ormbases': ['irdb.GhostbusterRequest']}, + 'additional_name': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}), + 'box': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'city': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'code': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'country': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'email_address': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}), + 'extended': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'family_name': ('django.db.models.fields.CharField', [], {'max_length': '20'}), + 'full_name': ('django.db.models.fields.CharField', [], {'max_length': '40'}), + 'ghostbusterrequest_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['irdb.GhostbusterRequest']", 'unique': 'True', 'primary_key': 'True'}), + 'given_name': ('django.db.models.fields.CharField', [], {'max_length': '20'}), + 'honorific_prefix': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}), + 'honorific_suffix': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}), + 'organization': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'region': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'street': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'telephone': ('rpki.gui.app.models.TelephoneField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}) + }, + 'app.resourcecert': { + 'Meta': {'object_name': 'ResourceCert'}, + 'conf': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'certs'", 'to': "orm['irdb.ResourceHolderCA']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'not_after': ('django.db.models.fields.DateTimeField', [], {}), + 'not_before': ('django.db.models.fields.DateTimeField', [], {}), + 'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'certs'", 'null': 'True', 'to': "orm['irdb.Parent']"}), + 'uri': ('django.db.models.fields.CharField', [], {'max_length': '255'}) + }, + 'app.resourcerangeaddressv4': { + 'Meta': {'ordering': "('prefix_min',)", 'object_name': 'ResourceRangeAddressV4'}, + 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'address_ranges'", 'to': "orm['app.ResourceCert']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'prefix_max': ('rpki.gui.models.IPv4AddressField', [], {'db_index': 'True'}), + 'prefix_min': ('rpki.gui.models.IPv4AddressField', [], {'db_index': 'True'}) + }, + 'app.resourcerangeaddressv6': { + 'Meta': {'ordering': "('prefix_min',)", 'object_name': 'ResourceRangeAddressV6'}, + 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'address_ranges_v6'", 'to': "orm['app.ResourceCert']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'prefix_max': ('rpki.gui.models.IPv6AddressField', [], {'db_index': 'True'}), + 'prefix_min': ('rpki.gui.models.IPv6AddressField', [], {'db_index': 'True'}) + }, + 'app.resourcerangeas': { + 'Meta': {'ordering': "('min', 'max')", 'object_name': 'ResourceRangeAS'}, + 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'asn_ranges'", 'to': "orm['app.ResourceCert']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'max': ('django.db.models.fields.PositiveIntegerField', [], {}), + 'min': ('django.db.models.fields.PositiveIntegerField', [], {}) + }, + 'app.timestamp': { + 'Meta': {'object_name': 'Timestamp'}, + 'name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'primary_key': 'True'}), + 'ts': ('django.db.models.fields.DateTimeField', [], {}) + }, + 'auth.group': { + 'Meta': {'object_name': 'Group'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), + 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) + }, + 'auth.permission': { + 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, + 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) + }, + 'auth.user': { + 'Meta': {'object_name': 'User'}, + 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), + 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), + 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), + 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), + 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), + 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), + 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), + 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), + 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) + }, + 'contenttypes.contenttype': { + 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, + 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) + }, + 'irdb.ghostbusterrequest': { + 'Meta': {'object_name': 'GhostbusterRequest'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'issuer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'ghostbuster_requests'", 'to': "orm['irdb.ResourceHolderCA']"}), + 'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'ghostbuster_requests'", 'null': 'True', 'to': "orm['irdb.Parent']"}), + 'vcard': ('django.db.models.fields.TextField', [], {}) + }, + 'irdb.parent': { + 'Meta': {'unique_together': "(('issuer', 'handle'),)", 'object_name': 'Parent', '_ormbases': ['irdb.Turtle']}, + 'certificate': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), + 'child_handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), + 'handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), + 'issuer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'parents'", 'to': "orm['irdb.ResourceHolderCA']"}), + 'parent_handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), + 'referral_authorization': ('rpki.irdb.models.SignedReferralField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}), + 'referrer': ('rpki.irdb.models.HandleField', [], {'max_length': '120', 'null': 'True', 'blank': 'True'}), + 'repository_type': ('rpki.irdb.models.EnumField', [], {}), + 'ta': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), + 'turtle_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['irdb.Turtle']", 'unique': 'True', 'primary_key': 'True'}) + }, + 'irdb.resourceholderca': { + 'Meta': {'object_name': 'ResourceHolderCA'}, + 'certificate': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), + 'handle': ('rpki.irdb.models.HandleField', [], {'unique': 'True', 'max_length': '120'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'last_crl_update': ('rpki.irdb.models.SundialField', [], {}), + 'latest_crl': ('rpki.irdb.models.CRLField', [], {'default': 'None', 'blank': 'True'}), + 'next_crl_number': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}), + 'next_crl_update': ('rpki.irdb.models.SundialField', [], {}), + 'next_serial': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}), + 'private_key': ('rpki.irdb.models.RSAKeyField', [], {'default': 'None', 'blank': 'True'}) + }, + 'irdb.turtle': { + 'Meta': {'object_name': 'Turtle'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'service_uri': ('django.db.models.fields.CharField', [], {'max_length': '255'}) + } + } + + complete_apps = ['app'] + symmetrical = True diff --git a/rpki/gui/app/migrations/0008_add_alerts.py b/rpki/gui/app/migrations/0008_add_alerts.py new file mode 100644 index 00000000..77af68d2 --- /dev/null +++ b/rpki/gui/app/migrations/0008_add_alerts.py @@ -0,0 +1,176 @@ +# -*- coding: utf-8 -*- +import datetime +from south.db import db +from south.v2 import SchemaMigration +from django.db import models + + +class Migration(SchemaMigration): + + def forwards(self, orm): + # Adding model 'Alert' + db.create_table('app_alert', ( + ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), + ('conf', self.gf('django.db.models.fields.related.ForeignKey')(related_name='alerts', to=orm['irdb.ResourceHolderCA'])), + ('severity', self.gf('django.db.models.fields.SmallIntegerField')(default=0)), + ('when', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)), + ('seen', self.gf('django.db.models.fields.BooleanField')(default=False)), + ('subject', self.gf('django.db.models.fields.CharField')(max_length=66)), + ('text', self.gf('django.db.models.fields.TextField')()), + )) + db.send_create_signal('app', ['Alert']) + + + def backwards(self, orm): + # Deleting model 'Alert' + db.delete_table('app_alert') + + + models = { + 'app.alert': { + 'Meta': {'object_name': 'Alert'}, + 'conf': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'alerts'", 'to': "orm['irdb.ResourceHolderCA']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'seen': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'severity': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}), + 'subject': ('django.db.models.fields.CharField', [], {'max_length': '66'}), + 'text': ('django.db.models.fields.TextField', [], {}), + 'when': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}) + }, + 'app.confacl': { + 'Meta': {'unique_together': "(('user', 'conf'),)", 'object_name': 'ConfACL'}, + 'conf': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['irdb.ResourceHolderCA']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}) + }, + 'app.ghostbusterrequest': { + 'Meta': {'ordering': "('family_name', 'given_name')", 'object_name': 'GhostbusterRequest', '_ormbases': ['irdb.GhostbusterRequest']}, + 'additional_name': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}), + 'box': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'city': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'code': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'country': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'email_address': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}), + 'extended': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'family_name': ('django.db.models.fields.CharField', [], {'max_length': '20'}), + 'full_name': ('django.db.models.fields.CharField', [], {'max_length': '40'}), + 'ghostbusterrequest_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['irdb.GhostbusterRequest']", 'unique': 'True', 'primary_key': 'True'}), + 'given_name': ('django.db.models.fields.CharField', [], {'max_length': '20'}), + 'honorific_prefix': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}), + 'honorific_suffix': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}), + 'organization': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'region': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), + 'street': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'telephone': ('rpki.gui.app.models.TelephoneField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}) + }, + 'app.resourcecert': { + 'Meta': {'object_name': 'ResourceCert'}, + 'conf': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'certs'", 'to': "orm['irdb.ResourceHolderCA']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'not_after': ('django.db.models.fields.DateTimeField', [], {}), + 'not_before': ('django.db.models.fields.DateTimeField', [], {}), + 'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'certs'", 'null': 'True', 'to': "orm['irdb.Parent']"}), + 'uri': ('django.db.models.fields.CharField', [], {'max_length': '255'}) + }, + 'app.resourcerangeaddressv4': { + 'Meta': {'ordering': "('prefix_min',)", 'object_name': 'ResourceRangeAddressV4'}, + 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'address_ranges'", 'to': "orm['app.ResourceCert']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'prefix_max': ('rpki.gui.models.IPv4AddressField', [], {'db_index': 'True'}), + 'prefix_min': ('rpki.gui.models.IPv4AddressField', [], {'db_index': 'True'}) + }, + 'app.resourcerangeaddressv6': { + 'Meta': {'ordering': "('prefix_min',)", 'object_name': 'ResourceRangeAddressV6'}, + 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'address_ranges_v6'", 'to': "orm['app.ResourceCert']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'prefix_max': ('rpki.gui.models.IPv6AddressField', [], {'db_index': 'True'}), + 'prefix_min': ('rpki.gui.models.IPv6AddressField', [], {'db_index': 'True'}) + }, + 'app.resourcerangeas': { + 'Meta': {'ordering': "('min', 'max')", 'object_name': 'ResourceRangeAS'}, + 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'asn_ranges'", 'to': "orm['app.ResourceCert']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'max': ('django.db.models.fields.PositiveIntegerField', [], {}), + 'min': ('django.db.models.fields.PositiveIntegerField', [], {}) + }, + 'app.timestamp': { + 'Meta': {'object_name': 'Timestamp'}, + 'name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'primary_key': 'True'}), + 'ts': ('django.db.models.fields.DateTimeField', [], {}) + }, + 'auth.group': { + 'Meta': {'object_name': 'Group'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), + 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) + }, + 'auth.permission': { + 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, + 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) + }, + 'auth.user': { + 'Meta': {'object_name': 'User'}, + 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), + 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), + 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), + 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), + 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), + 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), + 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), + 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), + 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) + }, + 'contenttypes.contenttype': { + 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, + 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) + }, + 'irdb.ghostbusterrequest': { + 'Meta': {'object_name': 'GhostbusterRequest'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'issuer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'ghostbuster_requests'", 'to': "orm['irdb.ResourceHolderCA']"}), + 'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'ghostbuster_requests'", 'null': 'True', 'to': "orm['irdb.Parent']"}), + 'vcard': ('django.db.models.fields.TextField', [], {}) + }, + 'irdb.parent': { + 'Meta': {'unique_together': "(('issuer', 'handle'),)", 'object_name': 'Parent', '_ormbases': ['irdb.Turtle']}, + 'certificate': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), + 'child_handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), + 'handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), + 'issuer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'parents'", 'to': "orm['irdb.ResourceHolderCA']"}), + 'parent_handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), + 'referral_authorization': ('rpki.irdb.models.SignedReferralField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}), + 'referrer': ('rpki.irdb.models.HandleField', [], {'max_length': '120', 'null': 'True', 'blank': 'True'}), + 'repository_type': ('rpki.irdb.models.EnumField', [], {}), + 'ta': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), + 'turtle_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['irdb.Turtle']", 'unique': 'True', 'primary_key': 'True'}) + }, + 'irdb.resourceholderca': { + 'Meta': {'object_name': 'ResourceHolderCA'}, + 'certificate': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), + 'handle': ('rpki.irdb.models.HandleField', [], {'unique': 'True', 'max_length': '120'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'last_crl_update': ('rpki.irdb.models.SundialField', [], {}), + 'latest_crl': ('rpki.irdb.models.CRLField', [], {'default': 'None', 'blank': 'True'}), + 'next_crl_number': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}), + 'next_crl_update': ('rpki.irdb.models.SundialField', [], {}), + 'next_serial': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}), + 'private_key': ('rpki.irdb.models.RSAKeyField', [], {'default': 'None', 'blank': 'True'}) + }, + 'irdb.turtle': { + 'Meta': {'object_name': 'Turtle'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'service_uri': ('django.db.models.fields.CharField', [], {'max_length': '255'}) + } + } + + complete_apps = ['app'] \ No newline at end of file diff --git a/rpki/gui/app/migrations/__init__.py b/rpki/gui/app/migrations/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/rpki/gui/app/models.py b/rpki/gui/app/models.py new file mode 100644 index 00000000..7d643fdc --- /dev/null +++ b/rpki/gui/app/models.py @@ -0,0 +1,420 @@ +# Copyright (C) 2010 SPARTA, Inc. dba Cobham Analytic Solutions +# Copyright (C) 2012 SPARTA, Inc. a Parsons Company +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +__version__ = '$Id$' + +from django.db import models +from django.contrib.auth.models import User +from django.core.mail import send_mail + +import rpki.resource_set +import rpki.exceptions +import rpki.irdb.models +import rpki.gui.models +import rpki.gui.routeview.models +from south.modelsinspector import add_introspection_rules + + +class TelephoneField(models.CharField): + def __init__(self, **kwargs): + if 'max_length' not in kwargs: + kwargs['max_length'] = 40 + models.CharField.__init__(self, **kwargs) + +add_introspection_rules([], ['^rpki\.gui\.app\.models\.TelephoneField']) + + +class Parent(rpki.irdb.models.Parent): + """proxy model for irdb Parent""" + + def __unicode__(self): + return u"%s's parent %s" % (self.issuer.handle, self.handle) + + @models.permalink + def get_absolute_url(self): + return ('rpki.gui.app.views.parent_detail', [str(self.pk)]) + + class Meta: + proxy = True + + +class Child(rpki.irdb.models.Child): + """proxy model for irdb Child""" + + def __unicode__(self): + return u"%s's child %s" % (self.issuer.handle, self.handle) + + @models.permalink + def get_absolute_url(self): + return ('rpki.gui.app.views.child_detail', [str(self.pk)]) + + class Meta: + proxy = True + verbose_name_plural = 'children' + + +class ChildASN(rpki.irdb.models.ChildASN): + """Proxy model for irdb ChildASN.""" + + class Meta: + proxy = True + + def __unicode__(self): + return u'AS%s' % self.as_resource_range() + + +class ChildNet(rpki.irdb.models.ChildNet): + """Proxy model for irdb ChildNet.""" + + class Meta: + proxy = True + + def __unicode__(self): + return u'%s' % self.as_resource_range() + + +class Alert(models.Model): + """Stores alert messages intended to be consumed by the user.""" + + INFO = 0 + WARNING = 1 + ERROR = 2 + + SEVERITY_CHOICES = ( + (INFO, 'info'), + (WARNING, 'warning'), + (ERROR, 'error'), + ) + + conf = models.ForeignKey('Conf', related_name='alerts') + severity = models.SmallIntegerField(choices=SEVERITY_CHOICES, default=INFO) + when = models.DateTimeField(auto_now_add=True) + seen = models.BooleanField(default=False) + subject = models.CharField(max_length=66) + text = models.TextField() + + @models.permalink + def get_absolute_url(self): + return ('alert-detail', [str(self.pk)]) + + +class Conf(rpki.irdb.models.ResourceHolderCA): + """This is the center of the universe, also known as a place to + have a handle on a resource-holding entity. It's the + in the rpkid schema. + + """ + @property + def parents(self): + """Simulates irdb.models.Parent.objects, but returns app.models.Parent + proxy objects. + + """ + return Parent.objects.filter(issuer=self) + + @property + def children(self): + """Simulates irdb.models.Child.objects, but returns app.models.Child + proxy objects. + + """ + return Child.objects.filter(issuer=self) + + @property + def ghostbusters(self): + return GhostbusterRequest.objects.filter(issuer=self) + + @property + def repositories(self): + return Repository.objects.filter(issuer=self) + + @property + def roas(self): + return ROARequest.objects.filter(issuer=self) + + @property + def routes(self): + """Return all IPv4 routes covered by RPKI certs issued to this resource + holder. + + """ + # build a Q filter to select all RouteOrigin objects covered by + # prefixes in the resource holder's certificates + q = models.Q() + for p in ResourceRangeAddressV4.objects.filter(cert__conf=self): + q |= models.Q(prefix_min__gte=p.prefix_min, + prefix_max__lte=p.prefix_max) + return RouteOrigin.objects.filter(q) + + @property + def routes_v6(self): + """Return all IPv6 routes covered by RPKI certs issued to this resource + holder. + + """ + # build a Q filter to select all RouteOrigin objects covered by + # prefixes in the resource holder's certificates + q = models.Q() + for p in ResourceRangeAddressV6.objects.filter(cert__conf=self): + q |= models.Q(prefix_min__gte=p.prefix_min, + prefix_max__lte=p.prefix_max) + return RouteOriginV6.objects.filter(q) + + def send_alert(self, subject, message, from_email, severity=Alert.INFO): + """Store an alert for this resource holder.""" + self.alerts.create(subject=subject, text=message, severity=severity) + + send_mail( + subject=subject, + message=message, + from_email=from_email, + recipient_list=self.email_list + ) + + @property + def email_list(self): + """Return a list of the contact emails for this resource holder. + + Contact emails are extract from any ghostbuster requests, and any + linked user accounts. + + """ + notify_emails = [gbr.email_address for gbr in self.ghostbusters if gbr.email_address] + notify_emails.extend( + [acl.user.email for acl in ConfACL.objects.filter(conf=self) if acl.user.email] + ) + return notify_emails + + def clear_alerts(self): + self.alerts.all().delete() + + + class Meta: + proxy = True + + +class ResourceCert(models.Model): + """Represents a resource certificate. + + This model is used to cache the output of . + + """ + + # Handle to which this cert was issued + conf = models.ForeignKey(Conf, related_name='certs') + + # The parent that issued the cert. This field is marked null=True because + # the root has no parent + parent = models.ForeignKey(Parent, related_name='certs', null=True) + + # certificate validity period + not_before = models.DateTimeField() + not_after = models.DateTimeField() + + # Locator for this object. Used to look up the validation status, expiry + # of ancestor certs in cacheview + uri = models.CharField(max_length=255) + + def __unicode__(self): + if self.parent: + return u"%s's cert from %s" % (self.conf.handle, + self.parent.handle) + else: + return u"%s's root cert" % self.conf.handle + + def get_cert_chain(self): + """Return a list containing the complete certificate chain for this + certificate.""" + cert = self + x = [cert] + while cert.issuer: + cert = cert.issuer + x.append(cert) + x.reverse() + return x + cert_chain = property(get_cert_chain) + + +class ResourceRangeAddressV4(rpki.gui.models.PrefixV4): + cert = models.ForeignKey(ResourceCert, related_name='address_ranges') + + +class ResourceRangeAddressV6(rpki.gui.models.PrefixV6): + cert = models.ForeignKey(ResourceCert, related_name='address_ranges_v6') + + +class ResourceRangeAS(rpki.gui.models.ASN): + cert = models.ForeignKey(ResourceCert, related_name='asn_ranges') + + +class ROARequest(rpki.irdb.models.ROARequest): + class Meta: + proxy = True + + def __unicode__(self): + return u"%s's ROA request for AS%d" % (self.issuer.handle, self.asn) + + @models.permalink + def get_absolute_url(self): + return ('rpki.gui.app.views.roa_detail', [str(self.pk)]) + + @property + def routes(self): + "Return all IPv4 routes covered by this roa prefix." + # this assumes one prefix per ROA + rng = self.prefixes.filter(version=4)[0].as_resource_range() + return rpki.gui.routeview.models.RouteOrigin.objects.filter( + prefix_min__gte=rng.min, + prefix_max__lte=rng.max + ) + + @property + def routes_v6(self): + "Return all IPv6 routes covered by this roa prefix." + # this assumes one prefix per ROA + rng = self.prefixes.filter(version=6)[0].as_resource_range() + return rpki.gui.routeview.models.RouteOriginV6.objects.filter( + prefix_min__gte=rng.min, + prefix_max__lte=rng.max + ) + + +class ROARequestPrefix(rpki.irdb.models.ROARequestPrefix): + class Meta: + proxy = True + + def __unicode__(self): + return u'ROA Request Prefix %s' % str(self.as_roa_prefix()) + + +class GhostbusterRequest(rpki.irdb.models.GhostbusterRequest): + """ + Stores the information require to fill out a vCard entry to + populate a ghostbusters record. + + This model is inherited from the irdb GhostBusterRequest model so + that the broken out fields can be included for ease of editing. + """ + + full_name = models.CharField(max_length=40) + + # components of the vCard N type + family_name = models.CharField(max_length=20) + given_name = models.CharField(max_length=20) + additional_name = models.CharField(max_length=20, blank=True, null=True) + honorific_prefix = models.CharField(max_length=10, blank=True, null=True) + honorific_suffix = models.CharField(max_length=10, blank=True, null=True) + + email_address = models.EmailField(blank=True, null=True) + organization = models.CharField(blank=True, null=True, max_length=255) + telephone = TelephoneField(blank=True, null=True) + + # elements of the ADR type + box = models.CharField(verbose_name='P.O. Box', blank=True, null=True, + max_length=40) + extended = models.CharField(blank=True, null=True, max_length=255) + street = models.CharField(blank=True, null=True, max_length=255) + city = models.CharField(blank=True, null=True, max_length=40) + region = models.CharField(blank=True, null=True, max_length=40, + help_text='state or province') + code = models.CharField(verbose_name='Postal Code', blank=True, null=True, + max_length=40) + country = models.CharField(blank=True, null=True, max_length=40) + + def __unicode__(self): + return u"%s's GBR: %s" % (self.issuer.handle, self.full_name) + + @models.permalink + def get_absolute_url(self): + return ('gbr-detail', [str(self.pk)]) + + class Meta: + ordering = ('family_name', 'given_name') + + +class Timestamp(models.Model): + """Model to hold metadata about the collection of external data. + + This model is a hash table mapping a timestamp name to the + timestamp value. All timestamps values are in UTC. + + The utility function rpki.gui.app.timestmap.update(name) should be used to + set timestamps rather than updating this model directly.""" + + name = models.CharField(max_length=30, primary_key=True) + ts = models.DateTimeField(null=False) + + def __unicode__(self): + return '%s: %s' % (self.name, self.ts) + + +class Repository(rpki.irdb.models.Repository): + class Meta: + proxy = True + verbose_name = 'Repository' + verbose_name_plural = 'Repositories' + + @models.permalink + def get_absolute_url(self): + return ('rpki.gui.app.views.repository_detail', [str(self.pk)]) + + def __unicode__(self): + return "%s's repository %s" % (self.issuer.handle, self.handle) + + +class Client(rpki.irdb.models.Client): + "Proxy model for pubd clients." + + class Meta: + proxy = True + verbose_name = 'Client' + + @models.permalink + def get_absolute_url(self): + return ('rpki.gui.app.views.client_detail', [str(self.pk)]) + + def __unicode__(self): + return self.handle + + +class RouteOrigin(rpki.gui.routeview.models.RouteOrigin): + class Meta: + proxy = True + + @models.permalink + def get_absolute_url(self): + return ('rpki.gui.app.views.route_detail', [str(self.pk)]) + + +class RouteOriginV6(rpki.gui.routeview.models.RouteOriginV6): + class Meta: + proxy = True + + @models.permalink + def get_absolute_url(self): + return ('rpki.gui.app.views.route_detail', [str(self.pk)]) + + +class ConfACL(models.Model): + """Stores access control for which users are allowed to manage a given + resource handle. + + """ + + conf = models.ForeignKey(Conf) + user = models.ForeignKey(User) + + class Meta: + unique_together = (('user', 'conf')) diff --git a/rpki/gui/app/range_list.py b/rpki/gui/app/range_list.py new file mode 100755 index 00000000..21fd1f29 --- /dev/null +++ b/rpki/gui/app/range_list.py @@ -0,0 +1,252 @@ +# Copyright (C) 2012 SPARTA, Inc. a Parsons Company +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +__version__ = '$Id$' + +import bisect +import unittest + + +class RangeList(list): + """A sorted list of ranges, which automatically merges adjacent ranges. + + Items in the list are expected to have ".min" and ".max" attributes.""" + + def __init__(self, ini=None): + list.__init__(self) + if ini: + self.extend(ini) + + def append(self, v): + keys = [x.min for x in self] + + # lower bound + i = bisect.bisect_left(keys, v.min) + + # upper bound + j = bisect.bisect_right(keys, v.max, lo=i) + + # if the max value for the previous item is greater than v.min, include + # the previous item in the range to replace and use its min value. + # also include the previous item if the max value is 1 less than the + # min value for the inserted item + if i > 0 and self[i - 1].max >= v.min - 1: + i = i - 1 + vmin = self[i].min + else: + vmin = v.min + + # if the max value for the previous item is greater than the max value + # for the new item, use the previous item's max + if j > 0 and self[j - 1].max > v.max: + vmax = self[j - 1].max + else: + vmax = v.max + + # if the max value for the new item is 1 less than the min value for + # the next item, combine into a single item + if j < len(self) and vmax + 1 == self[j].min: + vmax = self[j].max + j = j + 1 + + # replace the range with a new object covering the entire range + self[i:j] = [v.__class__(vmin, vmax)] + + def extend(self, args): + for x in args: + self.append(x) + + def difference(self, other): + """Return a RangeList object which contains ranges in this object which + are not in "other".""" + it = iter(other) + + try: + cur = it.next() + except StopIteration: + return self + + r = RangeList() + + for x in self: + xmin = x.min + + def V(v): + """convert the integer value to the appropriate type for this + range""" + return x.__class__.datum_type(v) + + try: + while xmin <= x.max: + if xmin < cur.min: + r.append(x.__class__(V(xmin), + V(min(x.max, cur.min - 1)))) + xmin = cur.max + 1 + elif xmin == cur.min: + xmin = cur.max + 1 + else: # xmin > cur.min + if xmin <= cur.max: + xmin = cur.max + 1 + else: # xmin > cur.max + cur = it.next() + + except StopIteration: + r.append(x.__class__(V(xmin), x.max)) + + return r + + +class TestRangeList(unittest.TestCase): + class MinMax(object): + datum_type = int + + def __init__(self, range_min, range_max): + self.min = range_min + self.max = range_max + + def __str__(self): + return '(%d, %d)' % (self.min, self.max) + + def __repr__(self): + return '' % (self.min, self.max) + + def __eq__(self, other): + return self.min == other.min and self.max == other.max + + def setUp(self): + self.v1 = TestRangeList.MinMax(1, 2) + self.v2 = TestRangeList.MinMax(4, 5) + self.v3 = TestRangeList.MinMax(7, 8) + self.v4 = TestRangeList.MinMax(3, 4) + self.v5 = TestRangeList.MinMax(2, 3) + self.v6 = TestRangeList.MinMax(1, 10) + + def test_empty_append(self): + s = RangeList() + s.append(self.v1) + self.assertTrue(len(s) == 1) + self.assertEqual(s[0], self.v1) + + def test_no_overlap(self): + s = RangeList() + s.append(self.v1) + s.append(self.v2) + self.assertTrue(len(s) == 2) + self.assertEqual(s[0], self.v1) + self.assertEqual(s[1], self.v2) + + def test_no_overlap_prepend(self): + s = RangeList() + s.append(self.v2) + s.append(self.v1) + self.assertTrue(len(s) == 2) + self.assertEqual(s[0], self.v1) + self.assertEqual(s[1], self.v2) + + def test_insert_middle(self): + s = RangeList() + s.append(self.v1) + s.append(self.v3) + s.append(self.v2) + self.assertTrue(len(s) == 3) + self.assertEqual(s[0], self.v1) + self.assertEqual(s[1], self.v2) + self.assertEqual(s[2], self.v3) + + def test_append_overlap(self): + s = RangeList() + s.append(self.v1) + s.append(self.v5) + self.assertTrue(len(s) == 1) + self.assertEqual(s[0], TestRangeList.MinMax(1, 3)) + + def test_combine_range(self): + s = RangeList() + s.append(self.v1) + s.append(self.v4) + self.assertTrue(len(s) == 1) + self.assertEqual(s[0], TestRangeList.MinMax(1, 4)) + + def test_append_subset(self): + s = RangeList() + s.append(self.v6) + s.append(self.v3) + self.assertTrue(len(s) == 1) + self.assertEqual(s[0], self.v6) + + def test_append_equal(self): + s = RangeList() + s.append(self.v6) + s.append(self.v6) + self.assertTrue(len(s) == 1) + self.assertEqual(s[0], self.v6) + + def test_prepend_combine(self): + s = RangeList() + s.append(self.v4) + s.append(self.v1) + self.assertTrue(len(s) == 1) + self.assertEqual(s[0], TestRangeList.MinMax(1, 4)) + + def test_append_aggregate(self): + s = RangeList() + s.append(self.v1) + s.append(self.v2) + s.append(self.v3) + s.append(self.v6) + self.assertTrue(len(s) == 1) + self.assertEqual(s[0], self.v6) + + def test_diff_empty(self): + s = RangeList() + s.append(self.v1) + self.assertEqual(s, s.difference([])) + + def test_diff_self(self): + s = RangeList() + s.append(self.v1) + self.assertEqual(s.difference(s), []) + + def test_diff_middle(self): + s1 = RangeList([self.v6]) + s2 = RangeList([self.v3]) + self.assertEqual(s1.difference(s2), RangeList([TestRangeList.MinMax(1, 6), TestRangeList.MinMax(9, 10)])) + + def test_diff_overlap(self): + s1 = RangeList([self.v2]) + s2 = RangeList([self.v4]) + self.assertEqual(s1.difference(s2), RangeList([TestRangeList.MinMax(5, 5)])) + + def test_diff_overlap2(self): + s1 = RangeList([self.v2]) + s2 = RangeList([self.v4]) + self.assertEqual(s2.difference(s1), RangeList([TestRangeList.MinMax(3, 3)])) + + def test_diff_multi(self): + s1 = RangeList([TestRangeList.MinMax(1, 2), TestRangeList.MinMax(4, 5)]) + s2 = RangeList([TestRangeList.MinMax(4, 4)]) + self.assertEqual(s1.difference(s2), RangeList([TestRangeList.MinMax(1, 2), TestRangeList.MinMax(5, 5)])) + + def test_diff_multi_overlap(self): + s1 = RangeList([TestRangeList.MinMax(1, 2), TestRangeList.MinMax(3, 4)]) + s2 = RangeList([TestRangeList.MinMax(2, 3)]) + self.assertEqual(s1.difference(s2), RangeList([TestRangeList.MinMax(1,1), TestRangeList.MinMax(4,4)])) + + def test_diff_multi_overlap2(self): + s1 = RangeList([TestRangeList.MinMax(1,2), TestRangeList.MinMax(3,4), TestRangeList.MinMax(6,7)]) + s2 = RangeList([TestRangeList.MinMax(2, 3), TestRangeList.MinMax(6, 6)]) + self.assertEqual(s1.difference(s2), RangeList([TestRangeList.MinMax(1,1), TestRangeList.MinMax(4,4), TestRangeList.MinMax(7,7)])) + +if __name__ == '__main__': + unittest.main() diff --git a/rpki/gui/app/static/css/bootstrap.min.css b/rpki/gui/app/static/css/bootstrap.min.css new file mode 100644 index 00000000..c10c7f41 --- /dev/null +++ b/rpki/gui/app/static/css/bootstrap.min.css @@ -0,0 +1,9 @@ +/*! + * Bootstrap v2.3.1 + * + * Copyright 2012 Twitter, Inc + * Licensed under the Apache License v2.0 + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Designed and built with all the love in the world @twitter by @mdo and @fat. + */.clearfix{*zoom:1}.clearfix:before,.clearfix:after{display:table;line-height:0;content:""}.clearfix:after{clear:both}.hide-text{font:0/0 a;color:transparent;text-shadow:none;background-color:transparent;border:0}.input-block-level{display:block;width:100%;min-height:30px;-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}article,aside,details,figcaption,figure,footer,header,hgroup,nav,section{display:block}audio,canvas,video{display:inline-block;*display:inline;*zoom:1}audio:not([controls]){display:none}html{font-size:100%;-webkit-text-size-adjust:100%;-ms-text-size-adjust:100%}a:focus{outline:thin dotted #333;outline:5px auto -webkit-focus-ring-color;outline-offset:-2px}a:hover,a:active{outline:0}sub,sup{position:relative;font-size:75%;line-height:0;vertical-align:baseline}sup{top:-0.5em}sub{bottom:-0.25em}img{width:auto\9;height:auto;max-width:100%;vertical-align:middle;border:0;-ms-interpolation-mode:bicubic}#map_canvas img,.google-maps img{max-width:none}button,input,select,textarea{margin:0;font-size:100%;vertical-align:middle}button,input{*overflow:visible;line-height:normal}button::-moz-focus-inner,input::-moz-focus-inner{padding:0;border:0}button,html input[type="button"],input[type="reset"],input[type="submit"]{cursor:pointer;-webkit-appearance:button}label,select,button,input[type="button"],input[type="reset"],input[type="submit"],input[type="radio"],input[type="checkbox"]{cursor:pointer}input[type="search"]{-webkit-box-sizing:content-box;-moz-box-sizing:content-box;box-sizing:content-box;-webkit-appearance:textfield}input[type="search"]::-webkit-search-decoration,input[type="search"]::-webkit-search-cancel-button{-webkit-appearance:none}textarea{overflow:auto;vertical-align:top}@media print{*{color:#000!important;text-shadow:none!important;background:transparent!important;box-shadow:none!important}a,a:visited{text-decoration:underline}a[href]:after{content:" (" attr(href) ")"}abbr[title]:after{content:" (" attr(title) ")"}.ir a:after,a[href^="javascript:"]:after,a[href^="#"]:after{content:""}pre,blockquote{border:1px solid #999;page-break-inside:avoid}thead{display:table-header-group}tr,img{page-break-inside:avoid}img{max-width:100%!important}@page{margin:.5cm}p,h2,h3{orphans:3;widows:3}h2,h3{page-break-after:avoid}}body{margin:0;font-family:"Helvetica Neue",Helvetica,Arial,sans-serif;font-size:14px;line-height:20px;color:#333;background-color:#fff}a{color:#08c;text-decoration:none}a:hover,a:focus{color:#005580;text-decoration:underline}.img-rounded{-webkit-border-radius:6px;-moz-border-radius:6px;border-radius:6px}.img-polaroid{padding:4px;background-color:#fff;border:1px solid #ccc;border:1px solid rgba(0,0,0,0.2);-webkit-box-shadow:0 1px 3px rgba(0,0,0,0.1);-moz-box-shadow:0 1px 3px rgba(0,0,0,0.1);box-shadow:0 1px 3px rgba(0,0,0,0.1)}.img-circle{-webkit-border-radius:500px;-moz-border-radius:500px;border-radius:500px}.row{margin-left:-20px;*zoom:1}.row:before,.row:after{display:table;line-height:0;content:""}.row:after{clear:both}[class*="span"]{float:left;min-height:1px;margin-left:20px}.container,.navbar-static-top .container,.navbar-fixed-top .container,.navbar-fixed-bottom .container{width:940px}.span12{width:940px}.span11{width:860px}.span10{width:780px}.span9{width:700px}.span8{width:620px}.span7{width:540px}.span6{width:460px}.span5{width:380px}.span4{width:300px}.span3{width:220px}.span2{width:140px}.span1{width:60px}.offset12{margin-left:980px}.offset11{margin-left:900px}.offset10{margin-left:820px}.offset9{margin-left:740px}.offset8{margin-left:660px}.offset7{margin-left:580px}.offset6{margin-left:500px}.offset5{margin-left:420px}.offset4{margin-left:340px}.offset3{margin-left:260px}.offset2{margin-left:180px}.offset1{margin-left:100px}.row-fluid{width:100%;*zoom:1}.row-fluid:before,.row-fluid:after{display:table;line-height:0;content:""}.row-fluid:after{clear:both}.row-fluid [class*="span"]{display:block;float:left;width:100%;min-height:30px;margin-left:2.127659574468085%;*margin-left:2.074468085106383%;-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}.row-fluid [class*="span"]:first-child{margin-left:0}.row-fluid .controls-row [class*="span"]+[class*="span"]{margin-left:2.127659574468085%}.row-fluid .span12{width:100%;*width:99.94680851063829%}.row-fluid .span11{width:91.48936170212765%;*width:91.43617021276594%}.row-fluid .span10{width:82.97872340425532%;*width:82.92553191489361%}.row-fluid .span9{width:74.46808510638297%;*width:74.41489361702126%}.row-fluid .span8{width:65.95744680851064%;*width:65.90425531914893%}.row-fluid .span7{width:57.44680851063829%;*width:57.39361702127659%}.row-fluid .span6{width:48.93617021276595%;*width:48.88297872340425%}.row-fluid .span5{width:40.42553191489362%;*width:40.37234042553192%}.row-fluid .span4{width:31.914893617021278%;*width:31.861702127659576%}.row-fluid .span3{width:23.404255319148934%;*width:23.351063829787233%}.row-fluid .span2{width:14.893617021276595%;*width:14.840425531914894%}.row-fluid .span1{width:6.382978723404255%;*width:6.329787234042553%}.row-fluid .offset12{margin-left:104.25531914893617%;*margin-left:104.14893617021275%}.row-fluid .offset12:first-child{margin-left:102.12765957446808%;*margin-left:102.02127659574467%}.row-fluid .offset11{margin-left:95.74468085106382%;*margin-left:95.6382978723404%}.row-fluid .offset11:first-child{margin-left:93.61702127659574%;*margin-left:93.51063829787232%}.row-fluid .offset10{margin-left:87.23404255319149%;*margin-left:87.12765957446807%}.row-fluid .offset10:first-child{margin-left:85.1063829787234%;*margin-left:84.99999999999999%}.row-fluid .offset9{margin-left:78.72340425531914%;*margin-left:78.61702127659572%}.row-fluid .offset9:first-child{margin-left:76.59574468085106%;*margin-left:76.48936170212764%}.row-fluid .offset8{margin-left:70.2127659574468%;*margin-left:70.10638297872339%}.row-fluid .offset8:first-child{margin-left:68.08510638297872%;*margin-left:67.9787234042553%}.row-fluid .offset7{margin-left:61.70212765957446%;*margin-left:61.59574468085106%}.row-fluid .offset7:first-child{margin-left:59.574468085106375%;*margin-left:59.46808510638297%}.row-fluid .offset6{margin-left:53.191489361702125%;*margin-left:53.085106382978715%}.row-fluid .offset6:first-child{margin-left:51.063829787234035%;*margin-left:50.95744680851063%}.row-fluid .offset5{margin-left:44.68085106382979%;*margin-left:44.57446808510638%}.row-fluid .offset5:first-child{margin-left:42.5531914893617%;*margin-left:42.4468085106383%}.row-fluid .offset4{margin-left:36.170212765957444%;*margin-left:36.06382978723405%}.row-fluid .offset4:first-child{margin-left:34.04255319148936%;*margin-left:33.93617021276596%}.row-fluid .offset3{margin-left:27.659574468085104%;*margin-left:27.5531914893617%}.row-fluid .offset3:first-child{margin-left:25.53191489361702%;*margin-left:25.425531914893618%}.row-fluid .offset2{margin-left:19.148936170212764%;*margin-left:19.04255319148936%}.row-fluid .offset2:first-child{margin-left:17.02127659574468%;*margin-left:16.914893617021278%}.row-fluid .offset1{margin-left:10.638297872340425%;*margin-left:10.53191489361702%}.row-fluid .offset1:first-child{margin-left:8.51063829787234%;*margin-left:8.404255319148938%}[class*="span"].hide,.row-fluid [class*="span"].hide{display:none}[class*="span"].pull-right,.row-fluid [class*="span"].pull-right{float:right}.container{margin-right:auto;margin-left:auto;*zoom:1}.container:before,.container:after{display:table;line-height:0;content:""}.container:after{clear:both}.container-fluid{padding-right:20px;padding-left:20px;*zoom:1}.container-fluid:before,.container-fluid:after{display:table;line-height:0;content:""}.container-fluid:after{clear:both}p{margin:0 0 10px}.lead{margin-bottom:20px;font-size:21px;font-weight:200;line-height:30px}small{font-size:85%}strong{font-weight:bold}em{font-style:italic}cite{font-style:normal}.muted{color:#999}a.muted:hover,a.muted:focus{color:#808080}.text-warning{color:#c09853}a.text-warning:hover,a.text-warning:focus{color:#a47e3c}.text-error{color:#b94a48}a.text-error:hover,a.text-error:focus{color:#953b39}.text-info{color:#3a87ad}a.text-info:hover,a.text-info:focus{color:#2d6987}.text-success{color:#468847}a.text-success:hover,a.text-success:focus{color:#356635}.text-left{text-align:left}.text-right{text-align:right}.text-center{text-align:center}h1,h2,h3,h4,h5,h6{margin:10px 0;font-family:inherit;font-weight:bold;line-height:20px;color:inherit;text-rendering:optimizelegibility}h1 small,h2 small,h3 small,h4 small,h5 small,h6 small{font-weight:normal;line-height:1;color:#999}h1,h2,h3{line-height:40px}h1{font-size:38.5px}h2{font-size:31.5px}h3{font-size:24.5px}h4{font-size:17.5px}h5{font-size:14px}h6{font-size:11.9px}h1 small{font-size:24.5px}h2 small{font-size:17.5px}h3 small{font-size:14px}h4 small{font-size:14px}.page-header{padding-bottom:9px;margin:20px 0 30px;border-bottom:1px solid #eee}ul,ol{padding:0;margin:0 0 10px 25px}ul ul,ul ol,ol ol,ol ul{margin-bottom:0}li{line-height:20px}ul.unstyled,ol.unstyled{margin-left:0;list-style:none}ul.inline,ol.inline{margin-left:0;list-style:none}ul.inline>li,ol.inline>li{display:inline-block;*display:inline;padding-right:5px;padding-left:5px;*zoom:1}dl{margin-bottom:20px}dt,dd{line-height:20px}dt{font-weight:bold}dd{margin-left:10px}.dl-horizontal{*zoom:1}.dl-horizontal:before,.dl-horizontal:after{display:table;line-height:0;content:""}.dl-horizontal:after{clear:both}.dl-horizontal dt{float:left;width:160px;overflow:hidden;clear:left;text-align:right;text-overflow:ellipsis;white-space:nowrap}.dl-horizontal dd{margin-left:180px}hr{margin:20px 0;border:0;border-top:1px solid #eee;border-bottom:1px solid #fff}abbr[title],abbr[data-original-title]{cursor:help;border-bottom:1px dotted #999}abbr.initialism{font-size:90%;text-transform:uppercase}blockquote{padding:0 0 0 15px;margin:0 0 20px;border-left:5px solid #eee}blockquote p{margin-bottom:0;font-size:17.5px;font-weight:300;line-height:1.25}blockquote small{display:block;line-height:20px;color:#999}blockquote small:before{content:'\2014 \00A0'}blockquote.pull-right{float:right;padding-right:15px;padding-left:0;border-right:5px solid #eee;border-left:0}blockquote.pull-right p,blockquote.pull-right small{text-align:right}blockquote.pull-right small:before{content:''}blockquote.pull-right small:after{content:'\00A0 \2014'}q:before,q:after,blockquote:before,blockquote:after{content:""}address{display:block;margin-bottom:20px;font-style:normal;line-height:20px}code,pre{padding:0 3px 2px;font-family:Monaco,Menlo,Consolas,"Courier New",monospace;font-size:12px;color:#333;-webkit-border-radius:3px;-moz-border-radius:3px;border-radius:3px}code{padding:2px 4px;color:#d14;white-space:nowrap;background-color:#f7f7f9;border:1px solid #e1e1e8}pre{display:block;padding:9.5px;margin:0 0 10px;font-size:13px;line-height:20px;word-break:break-all;word-wrap:break-word;white-space:pre;white-space:pre-wrap;background-color:#f5f5f5;border:1px solid #ccc;border:1px solid rgba(0,0,0,0.15);-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px}pre.prettyprint{margin-bottom:20px}pre code{padding:0;color:inherit;white-space:pre;white-space:pre-wrap;background-color:transparent;border:0}.pre-scrollable{max-height:340px;overflow-y:scroll}form{margin:0 0 20px}fieldset{padding:0;margin:0;border:0}legend{display:block;width:100%;padding:0;margin-bottom:20px;font-size:21px;line-height:40px;color:#333;border:0;border-bottom:1px solid #e5e5e5}legend small{font-size:15px;color:#999}label,input,button,select,textarea{font-size:14px;font-weight:normal;line-height:20px}input,button,select,textarea{font-family:"Helvetica Neue",Helvetica,Arial,sans-serif}label{display:block;margin-bottom:5px}select,textarea,input[type="text"],input[type="password"],input[type="datetime"],input[type="datetime-local"],input[type="date"],input[type="month"],input[type="time"],input[type="week"],input[type="number"],input[type="email"],input[type="url"],input[type="search"],input[type="tel"],input[type="color"],.uneditable-input{display:inline-block;height:20px;padding:4px 6px;margin-bottom:10px;font-size:14px;line-height:20px;color:#555;vertical-align:middle;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px}input,textarea,.uneditable-input{width:206px}textarea{height:auto}textarea,input[type="text"],input[type="password"],input[type="datetime"],input[type="datetime-local"],input[type="date"],input[type="month"],input[type="time"],input[type="week"],input[type="number"],input[type="email"],input[type="url"],input[type="search"],input[type="tel"],input[type="color"],.uneditable-input{background-color:#fff;border:1px solid #ccc;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);-moz-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);-webkit-transition:border linear .2s,box-shadow linear .2s;-moz-transition:border linear .2s,box-shadow linear .2s;-o-transition:border linear .2s,box-shadow linear .2s;transition:border linear .2s,box-shadow linear .2s}textarea:focus,input[type="text"]:focus,input[type="password"]:focus,input[type="datetime"]:focus,input[type="datetime-local"]:focus,input[type="date"]:focus,input[type="month"]:focus,input[type="time"]:focus,input[type="week"]:focus,input[type="number"]:focus,input[type="email"]:focus,input[type="url"]:focus,input[type="search"]:focus,input[type="tel"]:focus,input[type="color"]:focus,.uneditable-input:focus{border-color:rgba(82,168,236,0.8);outline:0;outline:thin dotted \9;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 8px rgba(82,168,236,0.6);-moz-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 8px rgba(82,168,236,0.6);box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 8px rgba(82,168,236,0.6)}input[type="radio"],input[type="checkbox"]{margin:4px 0 0;margin-top:1px \9;*margin-top:0;line-height:normal}input[type="file"],input[type="image"],input[type="submit"],input[type="reset"],input[type="button"],input[type="radio"],input[type="checkbox"]{width:auto}select,input[type="file"]{height:30px;*margin-top:4px;line-height:30px}select{width:220px;background-color:#fff;border:1px solid #ccc}select[multiple],select[size]{height:auto}select:focus,input[type="file"]:focus,input[type="radio"]:focus,input[type="checkbox"]:focus{outline:thin dotted #333;outline:5px auto -webkit-focus-ring-color;outline-offset:-2px}.uneditable-input,.uneditable-textarea{color:#999;cursor:not-allowed;background-color:#fcfcfc;border-color:#ccc;-webkit-box-shadow:inset 0 1px 2px rgba(0,0,0,0.025);-moz-box-shadow:inset 0 1px 2px rgba(0,0,0,0.025);box-shadow:inset 0 1px 2px rgba(0,0,0,0.025)}.uneditable-input{overflow:hidden;white-space:nowrap}.uneditable-textarea{width:auto;height:auto}input:-moz-placeholder,textarea:-moz-placeholder{color:#999}input:-ms-input-placeholder,textarea:-ms-input-placeholder{color:#999}input::-webkit-input-placeholder,textarea::-webkit-input-placeholder{color:#999}.radio,.checkbox{min-height:20px;padding-left:20px}.radio input[type="radio"],.checkbox input[type="checkbox"]{float:left;margin-left:-20px}.controls>.radio:first-child,.controls>.checkbox:first-child{padding-top:5px}.radio.inline,.checkbox.inline{display:inline-block;padding-top:5px;margin-bottom:0;vertical-align:middle}.radio.inline+.radio.inline,.checkbox.inline+.checkbox.inline{margin-left:10px}.input-mini{width:60px}.input-small{width:90px}.input-medium{width:150px}.input-large{width:210px}.input-xlarge{width:270px}.input-xxlarge{width:530px}input[class*="span"],select[class*="span"],textarea[class*="span"],.uneditable-input[class*="span"],.row-fluid input[class*="span"],.row-fluid select[class*="span"],.row-fluid textarea[class*="span"],.row-fluid .uneditable-input[class*="span"]{float:none;margin-left:0}.input-append input[class*="span"],.input-append .uneditable-input[class*="span"],.input-prepend input[class*="span"],.input-prepend .uneditable-input[class*="span"],.row-fluid input[class*="span"],.row-fluid select[class*="span"],.row-fluid textarea[class*="span"],.row-fluid .uneditable-input[class*="span"],.row-fluid .input-prepend [class*="span"],.row-fluid .input-append [class*="span"]{display:inline-block}input,textarea,.uneditable-input{margin-left:0}.controls-row [class*="span"]+[class*="span"]{margin-left:20px}input.span12,textarea.span12,.uneditable-input.span12{width:926px}input.span11,textarea.span11,.uneditable-input.span11{width:846px}input.span10,textarea.span10,.uneditable-input.span10{width:766px}input.span9,textarea.span9,.uneditable-input.span9{width:686px}input.span8,textarea.span8,.uneditable-input.span8{width:606px}input.span7,textarea.span7,.uneditable-input.span7{width:526px}input.span6,textarea.span6,.uneditable-input.span6{width:446px}input.span5,textarea.span5,.uneditable-input.span5{width:366px}input.span4,textarea.span4,.uneditable-input.span4{width:286px}input.span3,textarea.span3,.uneditable-input.span3{width:206px}input.span2,textarea.span2,.uneditable-input.span2{width:126px}input.span1,textarea.span1,.uneditable-input.span1{width:46px}.controls-row{*zoom:1}.controls-row:before,.controls-row:after{display:table;line-height:0;content:""}.controls-row:after{clear:both}.controls-row [class*="span"],.row-fluid .controls-row [class*="span"]{float:left}.controls-row .checkbox[class*="span"],.controls-row .radio[class*="span"]{padding-top:5px}input[disabled],select[disabled],textarea[disabled],input[readonly],select[readonly],textarea[readonly]{cursor:not-allowed;background-color:#eee}input[type="radio"][disabled],input[type="checkbox"][disabled],input[type="radio"][readonly],input[type="checkbox"][readonly]{background-color:transparent}.control-group.warning .control-label,.control-group.warning .help-block,.control-group.warning .help-inline{color:#c09853}.control-group.warning .checkbox,.control-group.warning .radio,.control-group.warning input,.control-group.warning select,.control-group.warning textarea{color:#c09853}.control-group.warning input,.control-group.warning select,.control-group.warning textarea{border-color:#c09853;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);-moz-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);box-shadow:inset 0 1px 1px rgba(0,0,0,0.075)}.control-group.warning input:focus,.control-group.warning select:focus,.control-group.warning textarea:focus{border-color:#a47e3c;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #dbc59e;-moz-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #dbc59e;box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #dbc59e}.control-group.warning .input-prepend .add-on,.control-group.warning .input-append .add-on{color:#c09853;background-color:#fcf8e3;border-color:#c09853}.control-group.error .control-label,.control-group.error .help-block,.control-group.error .help-inline{color:#b94a48}.control-group.error .checkbox,.control-group.error .radio,.control-group.error input,.control-group.error select,.control-group.error textarea{color:#b94a48}.control-group.error input,.control-group.error select,.control-group.error textarea{border-color:#b94a48;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);-moz-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);box-shadow:inset 0 1px 1px rgba(0,0,0,0.075)}.control-group.error input:focus,.control-group.error select:focus,.control-group.error textarea:focus{border-color:#953b39;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #d59392;-moz-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #d59392;box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #d59392}.control-group.error .input-prepend .add-on,.control-group.error .input-append .add-on{color:#b94a48;background-color:#f2dede;border-color:#b94a48}.control-group.success .control-label,.control-group.success .help-block,.control-group.success .help-inline{color:#468847}.control-group.success .checkbox,.control-group.success .radio,.control-group.success input,.control-group.success select,.control-group.success textarea{color:#468847}.control-group.success input,.control-group.success select,.control-group.success textarea{border-color:#468847;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);-moz-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);box-shadow:inset 0 1px 1px rgba(0,0,0,0.075)}.control-group.success input:focus,.control-group.success select:focus,.control-group.success textarea:focus{border-color:#356635;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #7aba7b;-moz-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #7aba7b;box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #7aba7b}.control-group.success .input-prepend .add-on,.control-group.success .input-append .add-on{color:#468847;background-color:#dff0d8;border-color:#468847}.control-group.info .control-label,.control-group.info .help-block,.control-group.info .help-inline{color:#3a87ad}.control-group.info .checkbox,.control-group.info .radio,.control-group.info input,.control-group.info select,.control-group.info textarea{color:#3a87ad}.control-group.info input,.control-group.info select,.control-group.info textarea{border-color:#3a87ad;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);-moz-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);box-shadow:inset 0 1px 1px rgba(0,0,0,0.075)}.control-group.info input:focus,.control-group.info select:focus,.control-group.info textarea:focus{border-color:#2d6987;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #7ab5d3;-moz-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #7ab5d3;box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #7ab5d3}.control-group.info .input-prepend .add-on,.control-group.info .input-append .add-on{color:#3a87ad;background-color:#d9edf7;border-color:#3a87ad}input:focus:invalid,textarea:focus:invalid,select:focus:invalid{color:#b94a48;border-color:#ee5f5b}input:focus:invalid:focus,textarea:focus:invalid:focus,select:focus:invalid:focus{border-color:#e9322d;-webkit-box-shadow:0 0 6px #f8b9b7;-moz-box-shadow:0 0 6px #f8b9b7;box-shadow:0 0 6px #f8b9b7}.form-actions{padding:19px 20px 20px;margin-top:20px;margin-bottom:20px;background-color:#f5f5f5;border-top:1px solid #e5e5e5;*zoom:1}.form-actions:before,.form-actions:after{display:table;line-height:0;content:""}.form-actions:after{clear:both}.help-block,.help-inline{color:#595959}.help-block{display:block;margin-bottom:10px}.help-inline{display:inline-block;*display:inline;padding-left:5px;vertical-align:middle;*zoom:1}.input-append,.input-prepend{display:inline-block;margin-bottom:10px;font-size:0;white-space:nowrap;vertical-align:middle}.input-append input,.input-prepend input,.input-append select,.input-prepend select,.input-append .uneditable-input,.input-prepend .uneditable-input,.input-append .dropdown-menu,.input-prepend .dropdown-menu,.input-append .popover,.input-prepend .popover{font-size:14px}.input-append input,.input-prepend input,.input-append select,.input-prepend select,.input-append .uneditable-input,.input-prepend .uneditable-input{position:relative;margin-bottom:0;*margin-left:0;vertical-align:top;-webkit-border-radius:0 4px 4px 0;-moz-border-radius:0 4px 4px 0;border-radius:0 4px 4px 0}.input-append input:focus,.input-prepend input:focus,.input-append select:focus,.input-prepend select:focus,.input-append .uneditable-input:focus,.input-prepend .uneditable-input:focus{z-index:2}.input-append .add-on,.input-prepend .add-on{display:inline-block;width:auto;height:20px;min-width:16px;padding:4px 5px;font-size:14px;font-weight:normal;line-height:20px;text-align:center;text-shadow:0 1px 0 #fff;background-color:#eee;border:1px solid #ccc}.input-append .add-on,.input-prepend .add-on,.input-append .btn,.input-prepend .btn,.input-append .btn-group>.dropdown-toggle,.input-prepend .btn-group>.dropdown-toggle{vertical-align:top;-webkit-border-radius:0;-moz-border-radius:0;border-radius:0}.input-append .active,.input-prepend .active{background-color:#a9dba9;border-color:#46a546}.input-prepend .add-on,.input-prepend .btn{margin-right:-1px}.input-prepend .add-on:first-child,.input-prepend .btn:first-child{-webkit-border-radius:4px 0 0 4px;-moz-border-radius:4px 0 0 4px;border-radius:4px 0 0 4px}.input-append input,.input-append select,.input-append .uneditable-input{-webkit-border-radius:4px 0 0 4px;-moz-border-radius:4px 0 0 4px;border-radius:4px 0 0 4px}.input-append input+.btn-group .btn:last-child,.input-append select+.btn-group .btn:last-child,.input-append .uneditable-input+.btn-group .btn:last-child{-webkit-border-radius:0 4px 4px 0;-moz-border-radius:0 4px 4px 0;border-radius:0 4px 4px 0}.input-append .add-on,.input-append .btn,.input-append .btn-group{margin-left:-1px}.input-append .add-on:last-child,.input-append .btn:last-child,.input-append .btn-group:last-child>.dropdown-toggle{-webkit-border-radius:0 4px 4px 0;-moz-border-radius:0 4px 4px 0;border-radius:0 4px 4px 0}.input-prepend.input-append input,.input-prepend.input-append select,.input-prepend.input-append .uneditable-input{-webkit-border-radius:0;-moz-border-radius:0;border-radius:0}.input-prepend.input-append input+.btn-group .btn,.input-prepend.input-append select+.btn-group .btn,.input-prepend.input-append .uneditable-input+.btn-group .btn{-webkit-border-radius:0 4px 4px 0;-moz-border-radius:0 4px 4px 0;border-radius:0 4px 4px 0}.input-prepend.input-append .add-on:first-child,.input-prepend.input-append .btn:first-child{margin-right:-1px;-webkit-border-radius:4px 0 0 4px;-moz-border-radius:4px 0 0 4px;border-radius:4px 0 0 4px}.input-prepend.input-append .add-on:last-child,.input-prepend.input-append .btn:last-child{margin-left:-1px;-webkit-border-radius:0 4px 4px 0;-moz-border-radius:0 4px 4px 0;border-radius:0 4px 4px 0}.input-prepend.input-append .btn-group:first-child{margin-left:0}input.search-query{padding-right:14px;padding-right:4px \9;padding-left:14px;padding-left:4px \9;margin-bottom:0;-webkit-border-radius:15px;-moz-border-radius:15px;border-radius:15px}.form-search .input-append .search-query,.form-search .input-prepend .search-query{-webkit-border-radius:0;-moz-border-radius:0;border-radius:0}.form-search .input-append .search-query{-webkit-border-radius:14px 0 0 14px;-moz-border-radius:14px 0 0 14px;border-radius:14px 0 0 14px}.form-search .input-append .btn{-webkit-border-radius:0 14px 14px 0;-moz-border-radius:0 14px 14px 0;border-radius:0 14px 14px 0}.form-search .input-prepend .search-query{-webkit-border-radius:0 14px 14px 0;-moz-border-radius:0 14px 14px 0;border-radius:0 14px 14px 0}.form-search .input-prepend .btn{-webkit-border-radius:14px 0 0 14px;-moz-border-radius:14px 0 0 14px;border-radius:14px 0 0 14px}.form-search input,.form-inline input,.form-horizontal input,.form-search textarea,.form-inline textarea,.form-horizontal textarea,.form-search select,.form-inline select,.form-horizontal select,.form-search .help-inline,.form-inline .help-inline,.form-horizontal .help-inline,.form-search .uneditable-input,.form-inline .uneditable-input,.form-horizontal .uneditable-input,.form-search .input-prepend,.form-inline .input-prepend,.form-horizontal .input-prepend,.form-search .input-append,.form-inline .input-append,.form-horizontal .input-append{display:inline-block;*display:inline;margin-bottom:0;vertical-align:middle;*zoom:1}.form-search .hide,.form-inline .hide,.form-horizontal .hide{display:none}.form-search label,.form-inline label,.form-search .btn-group,.form-inline .btn-group{display:inline-block}.form-search .input-append,.form-inline .input-append,.form-search .input-prepend,.form-inline .input-prepend{margin-bottom:0}.form-search .radio,.form-search .checkbox,.form-inline .radio,.form-inline .checkbox{padding-left:0;margin-bottom:0;vertical-align:middle}.form-search .radio input[type="radio"],.form-search .checkbox input[type="checkbox"],.form-inline .radio input[type="radio"],.form-inline .checkbox input[type="checkbox"]{float:left;margin-right:3px;margin-left:0}.control-group{margin-bottom:10px}legend+.control-group{margin-top:20px;-webkit-margin-top-collapse:separate}.form-horizontal .control-group{margin-bottom:20px;*zoom:1}.form-horizontal .control-group:before,.form-horizontal .control-group:after{display:table;line-height:0;content:""}.form-horizontal .control-group:after{clear:both}.form-horizontal .control-label{float:left;width:160px;padding-top:5px;text-align:right}.form-horizontal .controls{*display:inline-block;*padding-left:20px;margin-left:180px;*margin-left:0}.form-horizontal .controls:first-child{*padding-left:180px}.form-horizontal .help-block{margin-bottom:0}.form-horizontal input+.help-block,.form-horizontal select+.help-block,.form-horizontal textarea+.help-block,.form-horizontal .uneditable-input+.help-block,.form-horizontal .input-prepend+.help-block,.form-horizontal .input-append+.help-block{margin-top:10px}.form-horizontal .form-actions{padding-left:180px}table{max-width:100%;background-color:transparent;border-collapse:collapse;border-spacing:0}.table{width:100%;margin-bottom:20px}.table th,.table td{padding:8px;line-height:20px;text-align:left;vertical-align:top;border-top:1px solid #ddd}.table th{font-weight:bold}.table thead th{vertical-align:bottom}.table caption+thead tr:first-child th,.table caption+thead tr:first-child td,.table colgroup+thead tr:first-child th,.table colgroup+thead tr:first-child td,.table thead:first-child tr:first-child th,.table thead:first-child tr:first-child td{border-top:0}.table tbody+tbody{border-top:2px solid #ddd}.table .table{background-color:#fff}.table-condensed th,.table-condensed td{padding:4px 5px}.table-bordered{border:1px solid #ddd;border-collapse:separate;*border-collapse:collapse;border-left:0;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px}.table-bordered th,.table-bordered td{border-left:1px solid #ddd}.table-bordered caption+thead tr:first-child th,.table-bordered caption+tbody tr:first-child th,.table-bordered caption+tbody tr:first-child td,.table-bordered colgroup+thead tr:first-child th,.table-bordered colgroup+tbody tr:first-child th,.table-bordered colgroup+tbody tr:first-child td,.table-bordered thead:first-child tr:first-child th,.table-bordered tbody:first-child tr:first-child th,.table-bordered tbody:first-child tr:first-child td{border-top:0}.table-bordered thead:first-child tr:first-child>th:first-child,.table-bordered tbody:first-child tr:first-child>td:first-child,.table-bordered tbody:first-child tr:first-child>th:first-child{-webkit-border-top-left-radius:4px;border-top-left-radius:4px;-moz-border-radius-topleft:4px}.table-bordered thead:first-child tr:first-child>th:last-child,.table-bordered tbody:first-child tr:first-child>td:last-child,.table-bordered tbody:first-child tr:first-child>th:last-child{-webkit-border-top-right-radius:4px;border-top-right-radius:4px;-moz-border-radius-topright:4px}.table-bordered thead:last-child tr:last-child>th:first-child,.table-bordered tbody:last-child tr:last-child>td:first-child,.table-bordered tbody:last-child tr:last-child>th:first-child,.table-bordered tfoot:last-child tr:last-child>td:first-child,.table-bordered tfoot:last-child tr:last-child>th:first-child{-webkit-border-bottom-left-radius:4px;border-bottom-left-radius:4px;-moz-border-radius-bottomleft:4px}.table-bordered thead:last-child tr:last-child>th:last-child,.table-bordered tbody:last-child tr:last-child>td:last-child,.table-bordered tbody:last-child tr:last-child>th:last-child,.table-bordered tfoot:last-child tr:last-child>td:last-child,.table-bordered tfoot:last-child tr:last-child>th:last-child{-webkit-border-bottom-right-radius:4px;border-bottom-right-radius:4px;-moz-border-radius-bottomright:4px}.table-bordered tfoot+tbody:last-child tr:last-child td:first-child{-webkit-border-bottom-left-radius:0;border-bottom-left-radius:0;-moz-border-radius-bottomleft:0}.table-bordered tfoot+tbody:last-child tr:last-child td:last-child{-webkit-border-bottom-right-radius:0;border-bottom-right-radius:0;-moz-border-radius-bottomright:0}.table-bordered caption+thead tr:first-child th:first-child,.table-bordered caption+tbody tr:first-child td:first-child,.table-bordered colgroup+thead tr:first-child th:first-child,.table-bordered colgroup+tbody tr:first-child td:first-child{-webkit-border-top-left-radius:4px;border-top-left-radius:4px;-moz-border-radius-topleft:4px}.table-bordered caption+thead tr:first-child th:last-child,.table-bordered caption+tbody tr:first-child td:last-child,.table-bordered colgroup+thead tr:first-child th:last-child,.table-bordered colgroup+tbody tr:first-child td:last-child{-webkit-border-top-right-radius:4px;border-top-right-radius:4px;-moz-border-radius-topright:4px}.table-striped tbody>tr:nth-child(odd)>td,.table-striped tbody>tr:nth-child(odd)>th{background-color:#f9f9f9}.table-hover tbody tr:hover>td,.table-hover tbody tr:hover>th{background-color:#f5f5f5}table td[class*="span"],table th[class*="span"],.row-fluid table td[class*="span"],.row-fluid table th[class*="span"]{display:table-cell;float:none;margin-left:0}.table td.span1,.table th.span1{float:none;width:44px;margin-left:0}.table td.span2,.table th.span2{float:none;width:124px;margin-left:0}.table td.span3,.table th.span3{float:none;width:204px;margin-left:0}.table td.span4,.table th.span4{float:none;width:284px;margin-left:0}.table td.span5,.table th.span5{float:none;width:364px;margin-left:0}.table td.span6,.table th.span6{float:none;width:444px;margin-left:0}.table td.span7,.table th.span7{float:none;width:524px;margin-left:0}.table td.span8,.table th.span8{float:none;width:604px;margin-left:0}.table td.span9,.table th.span9{float:none;width:684px;margin-left:0}.table td.span10,.table th.span10{float:none;width:764px;margin-left:0}.table td.span11,.table th.span11{float:none;width:844px;margin-left:0}.table td.span12,.table th.span12{float:none;width:924px;margin-left:0}.table tbody tr.success>td{background-color:#dff0d8}.table tbody tr.error>td{background-color:#f2dede}.table tbody tr.warning>td{background-color:#fcf8e3}.table tbody tr.info>td{background-color:#d9edf7}.table-hover tbody tr.success:hover>td{background-color:#d0e9c6}.table-hover tbody tr.error:hover>td{background-color:#ebcccc}.table-hover tbody tr.warning:hover>td{background-color:#faf2cc}.table-hover tbody tr.info:hover>td{background-color:#c4e3f3}[class^="icon-"],[class*=" icon-"]{display:inline-block;width:14px;height:14px;margin-top:1px;*margin-right:.3em;line-height:14px;vertical-align:text-top;background-image:url("../img/glyphicons-halflings.png");background-position:14px 14px;background-repeat:no-repeat}.icon-white,.nav-pills>.active>a>[class^="icon-"],.nav-pills>.active>a>[class*=" icon-"],.nav-list>.active>a>[class^="icon-"],.nav-list>.active>a>[class*=" icon-"],.navbar-inverse .nav>.active>a>[class^="icon-"],.navbar-inverse .nav>.active>a>[class*=" icon-"],.dropdown-menu>li>a:hover>[class^="icon-"],.dropdown-menu>li>a:focus>[class^="icon-"],.dropdown-menu>li>a:hover>[class*=" icon-"],.dropdown-menu>li>a:focus>[class*=" icon-"],.dropdown-menu>.active>a>[class^="icon-"],.dropdown-menu>.active>a>[class*=" icon-"],.dropdown-submenu:hover>a>[class^="icon-"],.dropdown-submenu:focus>a>[class^="icon-"],.dropdown-submenu:hover>a>[class*=" icon-"],.dropdown-submenu:focus>a>[class*=" icon-"]{background-image:url("../img/glyphicons-halflings-white.png")}.icon-glass{background-position:0 0}.icon-music{background-position:-24px 0}.icon-search{background-position:-48px 0}.icon-envelope{background-position:-72px 0}.icon-heart{background-position:-96px 0}.icon-star{background-position:-120px 0}.icon-star-empty{background-position:-144px 0}.icon-user{background-position:-168px 0}.icon-film{background-position:-192px 0}.icon-th-large{background-position:-216px 0}.icon-th{background-position:-240px 0}.icon-th-list{background-position:-264px 0}.icon-ok{background-position:-288px 0}.icon-remove{background-position:-312px 0}.icon-zoom-in{background-position:-336px 0}.icon-zoom-out{background-position:-360px 0}.icon-off{background-position:-384px 0}.icon-signal{background-position:-408px 0}.icon-cog{background-position:-432px 0}.icon-trash{background-position:-456px 0}.icon-home{background-position:0 -24px}.icon-file{background-position:-24px -24px}.icon-time{background-position:-48px -24px}.icon-road{background-position:-72px -24px}.icon-download-alt{background-position:-96px -24px}.icon-download{background-position:-120px -24px}.icon-upload{background-position:-144px -24px}.icon-inbox{background-position:-168px -24px}.icon-play-circle{background-position:-192px -24px}.icon-repeat{background-position:-216px -24px}.icon-refresh{background-position:-240px -24px}.icon-list-alt{background-position:-264px -24px}.icon-lock{background-position:-287px -24px}.icon-flag{background-position:-312px -24px}.icon-headphones{background-position:-336px -24px}.icon-volume-off{background-position:-360px -24px}.icon-volume-down{background-position:-384px -24px}.icon-volume-up{background-position:-408px -24px}.icon-qrcode{background-position:-432px -24px}.icon-barcode{background-position:-456px -24px}.icon-tag{background-position:0 -48px}.icon-tags{background-position:-25px -48px}.icon-book{background-position:-48px -48px}.icon-bookmark{background-position:-72px -48px}.icon-print{background-position:-96px -48px}.icon-camera{background-position:-120px -48px}.icon-font{background-position:-144px -48px}.icon-bold{background-position:-167px -48px}.icon-italic{background-position:-192px -48px}.icon-text-height{background-position:-216px -48px}.icon-text-width{background-position:-240px -48px}.icon-align-left{background-position:-264px -48px}.icon-align-center{background-position:-288px -48px}.icon-align-right{background-position:-312px -48px}.icon-align-justify{background-position:-336px -48px}.icon-list{background-position:-360px -48px}.icon-indent-left{background-position:-384px -48px}.icon-indent-right{background-position:-408px -48px}.icon-facetime-video{background-position:-432px -48px}.icon-picture{background-position:-456px -48px}.icon-pencil{background-position:0 -72px}.icon-map-marker{background-position:-24px -72px}.icon-adjust{background-position:-48px -72px}.icon-tint{background-position:-72px -72px}.icon-edit{background-position:-96px -72px}.icon-share{background-position:-120px -72px}.icon-check{background-position:-144px -72px}.icon-move{background-position:-168px -72px}.icon-step-backward{background-position:-192px -72px}.icon-fast-backward{background-position:-216px -72px}.icon-backward{background-position:-240px -72px}.icon-play{background-position:-264px -72px}.icon-pause{background-position:-288px -72px}.icon-stop{background-position:-312px -72px}.icon-forward{background-position:-336px -72px}.icon-fast-forward{background-position:-360px -72px}.icon-step-forward{background-position:-384px -72px}.icon-eject{background-position:-408px -72px}.icon-chevron-left{background-position:-432px -72px}.icon-chevron-right{background-position:-456px -72px}.icon-plus-sign{background-position:0 -96px}.icon-minus-sign{background-position:-24px -96px}.icon-remove-sign{background-position:-48px -96px}.icon-ok-sign{background-position:-72px -96px}.icon-question-sign{background-position:-96px -96px}.icon-info-sign{background-position:-120px -96px}.icon-screenshot{background-position:-144px -96px}.icon-remove-circle{background-position:-168px -96px}.icon-ok-circle{background-position:-192px -96px}.icon-ban-circle{background-position:-216px -96px}.icon-arrow-left{background-position:-240px -96px}.icon-arrow-right{background-position:-264px -96px}.icon-arrow-up{background-position:-289px -96px}.icon-arrow-down{background-position:-312px -96px}.icon-share-alt{background-position:-336px -96px}.icon-resize-full{background-position:-360px -96px}.icon-resize-small{background-position:-384px -96px}.icon-plus{background-position:-408px -96px}.icon-minus{background-position:-433px -96px}.icon-asterisk{background-position:-456px -96px}.icon-exclamation-sign{background-position:0 -120px}.icon-gift{background-position:-24px -120px}.icon-leaf{background-position:-48px -120px}.icon-fire{background-position:-72px -120px}.icon-eye-open{background-position:-96px -120px}.icon-eye-close{background-position:-120px -120px}.icon-warning-sign{background-position:-144px -120px}.icon-plane{background-position:-168px -120px}.icon-calendar{background-position:-192px -120px}.icon-random{width:16px;background-position:-216px -120px}.icon-comment{background-position:-240px -120px}.icon-magnet{background-position:-264px -120px}.icon-chevron-up{background-position:-288px -120px}.icon-chevron-down{background-position:-313px -119px}.icon-retweet{background-position:-336px -120px}.icon-shopping-cart{background-position:-360px -120px}.icon-folder-close{width:16px;background-position:-384px -120px}.icon-folder-open{width:16px;background-position:-408px -120px}.icon-resize-vertical{background-position:-432px -119px}.icon-resize-horizontal{background-position:-456px -118px}.icon-hdd{background-position:0 -144px}.icon-bullhorn{background-position:-24px -144px}.icon-bell{background-position:-48px -144px}.icon-certificate{background-position:-72px -144px}.icon-thumbs-up{background-position:-96px -144px}.icon-thumbs-down{background-position:-120px -144px}.icon-hand-right{background-position:-144px -144px}.icon-hand-left{background-position:-168px -144px}.icon-hand-up{background-position:-192px -144px}.icon-hand-down{background-position:-216px -144px}.icon-circle-arrow-right{background-position:-240px -144px}.icon-circle-arrow-left{background-position:-264px -144px}.icon-circle-arrow-up{background-position:-288px -144px}.icon-circle-arrow-down{background-position:-312px -144px}.icon-globe{background-position:-336px -144px}.icon-wrench{background-position:-360px -144px}.icon-tasks{background-position:-384px -144px}.icon-filter{background-position:-408px -144px}.icon-briefcase{background-position:-432px -144px}.icon-fullscreen{background-position:-456px -144px}.dropup,.dropdown{position:relative}.dropdown-toggle{*margin-bottom:-3px}.dropdown-toggle:active,.open .dropdown-toggle{outline:0}.caret{display:inline-block;width:0;height:0;vertical-align:top;border-top:4px solid #000;border-right:4px solid transparent;border-left:4px solid transparent;content:""}.dropdown .caret{margin-top:8px;margin-left:2px}.dropdown-menu{position:absolute;top:100%;left:0;z-index:1000;display:none;float:left;min-width:160px;padding:5px 0;margin:2px 0 0;list-style:none;background-color:#fff;border:1px solid #ccc;border:1px solid rgba(0,0,0,0.2);*border-right-width:2px;*border-bottom-width:2px;-webkit-border-radius:6px;-moz-border-radius:6px;border-radius:6px;-webkit-box-shadow:0 5px 10px rgba(0,0,0,0.2);-moz-box-shadow:0 5px 10px rgba(0,0,0,0.2);box-shadow:0 5px 10px rgba(0,0,0,0.2);-webkit-background-clip:padding-box;-moz-background-clip:padding;background-clip:padding-box}.dropdown-menu.pull-right{right:0;left:auto}.dropdown-menu .divider{*width:100%;height:1px;margin:9px 1px;*margin:-5px 0 5px;overflow:hidden;background-color:#e5e5e5;border-bottom:1px solid #fff}.dropdown-menu>li>a{display:block;padding:3px 20px;clear:both;font-weight:normal;line-height:20px;color:#333;white-space:nowrap}.dropdown-menu>li>a:hover,.dropdown-menu>li>a:focus,.dropdown-submenu:hover>a,.dropdown-submenu:focus>a{color:#fff;text-decoration:none;background-color:#0081c2;background-image:-moz-linear-gradient(top,#08c,#0077b3);background-image:-webkit-gradient(linear,0 0,0 100%,from(#08c),to(#0077b3));background-image:-webkit-linear-gradient(top,#08c,#0077b3);background-image:-o-linear-gradient(top,#08c,#0077b3);background-image:linear-gradient(to bottom,#08c,#0077b3);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff0088cc',endColorstr='#ff0077b3',GradientType=0)}.dropdown-menu>.active>a,.dropdown-menu>.active>a:hover,.dropdown-menu>.active>a:focus{color:#fff;text-decoration:none;background-color:#0081c2;background-image:-moz-linear-gradient(top,#08c,#0077b3);background-image:-webkit-gradient(linear,0 0,0 100%,from(#08c),to(#0077b3));background-image:-webkit-linear-gradient(top,#08c,#0077b3);background-image:-o-linear-gradient(top,#08c,#0077b3);background-image:linear-gradient(to bottom,#08c,#0077b3);background-repeat:repeat-x;outline:0;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff0088cc',endColorstr='#ff0077b3',GradientType=0)}.dropdown-menu>.disabled>a,.dropdown-menu>.disabled>a:hover,.dropdown-menu>.disabled>a:focus{color:#999}.dropdown-menu>.disabled>a:hover,.dropdown-menu>.disabled>a:focus{text-decoration:none;cursor:default;background-color:transparent;background-image:none;filter:progid:DXImageTransform.Microsoft.gradient(enabled=false)}.open{*z-index:1000}.open>.dropdown-menu{display:block}.pull-right>.dropdown-menu{right:0;left:auto}.dropup .caret,.navbar-fixed-bottom .dropdown .caret{border-top:0;border-bottom:4px solid #000;content:""}.dropup .dropdown-menu,.navbar-fixed-bottom .dropdown .dropdown-menu{top:auto;bottom:100%;margin-bottom:1px}.dropdown-submenu{position:relative}.dropdown-submenu>.dropdown-menu{top:0;left:100%;margin-top:-6px;margin-left:-1px;-webkit-border-radius:0 6px 6px 6px;-moz-border-radius:0 6px 6px 6px;border-radius:0 6px 6px 6px}.dropdown-submenu:hover>.dropdown-menu{display:block}.dropup .dropdown-submenu>.dropdown-menu{top:auto;bottom:0;margin-top:0;margin-bottom:-2px;-webkit-border-radius:5px 5px 5px 0;-moz-border-radius:5px 5px 5px 0;border-radius:5px 5px 5px 0}.dropdown-submenu>a:after{display:block;float:right;width:0;height:0;margin-top:5px;margin-right:-10px;border-color:transparent;border-left-color:#ccc;border-style:solid;border-width:5px 0 5px 5px;content:" "}.dropdown-submenu:hover>a:after{border-left-color:#fff}.dropdown-submenu.pull-left{float:none}.dropdown-submenu.pull-left>.dropdown-menu{left:-100%;margin-left:10px;-webkit-border-radius:6px 0 6px 6px;-moz-border-radius:6px 0 6px 6px;border-radius:6px 0 6px 6px}.dropdown .dropdown-menu .nav-header{padding-right:20px;padding-left:20px}.typeahead{z-index:1051;margin-top:2px;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px}.well{min-height:20px;padding:19px;margin-bottom:20px;background-color:#f5f5f5;border:1px solid #e3e3e3;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.05);-moz-box-shadow:inset 0 1px 1px rgba(0,0,0,0.05);box-shadow:inset 0 1px 1px rgba(0,0,0,0.05)}.well blockquote{border-color:#ddd;border-color:rgba(0,0,0,0.15)}.well-large{padding:24px;-webkit-border-radius:6px;-moz-border-radius:6px;border-radius:6px}.well-small{padding:9px;-webkit-border-radius:3px;-moz-border-radius:3px;border-radius:3px}.fade{opacity:0;-webkit-transition:opacity .15s linear;-moz-transition:opacity .15s linear;-o-transition:opacity .15s linear;transition:opacity .15s linear}.fade.in{opacity:1}.collapse{position:relative;height:0;overflow:hidden;-webkit-transition:height .35s ease;-moz-transition:height .35s ease;-o-transition:height .35s ease;transition:height .35s ease}.collapse.in{height:auto}.close{float:right;font-size:20px;font-weight:bold;line-height:20px;color:#000;text-shadow:0 1px 0 #fff;opacity:.2;filter:alpha(opacity=20)}.close:hover,.close:focus{color:#000;text-decoration:none;cursor:pointer;opacity:.4;filter:alpha(opacity=40)}button.close{padding:0;cursor:pointer;background:transparent;border:0;-webkit-appearance:none}.btn{display:inline-block;*display:inline;padding:4px 12px;margin-bottom:0;*margin-left:.3em;font-size:14px;line-height:20px;color:#333;text-align:center;text-shadow:0 1px 1px rgba(255,255,255,0.75);vertical-align:middle;cursor:pointer;background-color:#f5f5f5;*background-color:#e6e6e6;background-image:-moz-linear-gradient(top,#fff,#e6e6e6);background-image:-webkit-gradient(linear,0 0,0 100%,from(#fff),to(#e6e6e6));background-image:-webkit-linear-gradient(top,#fff,#e6e6e6);background-image:-o-linear-gradient(top,#fff,#e6e6e6);background-image:linear-gradient(to bottom,#fff,#e6e6e6);background-repeat:repeat-x;border:1px solid #ccc;*border:0;border-color:#e6e6e6 #e6e6e6 #bfbfbf;border-color:rgba(0,0,0,0.1) rgba(0,0,0,0.1) rgba(0,0,0,0.25);border-bottom-color:#b3b3b3;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffffffff',endColorstr='#ffe6e6e6',GradientType=0);filter:progid:DXImageTransform.Microsoft.gradient(enabled=false);*zoom:1;-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,0.2),0 1px 2px rgba(0,0,0,0.05);-moz-box-shadow:inset 0 1px 0 rgba(255,255,255,0.2),0 1px 2px rgba(0,0,0,0.05);box-shadow:inset 0 1px 0 rgba(255,255,255,0.2),0 1px 2px rgba(0,0,0,0.05)}.btn:hover,.btn:focus,.btn:active,.btn.active,.btn.disabled,.btn[disabled]{color:#333;background-color:#e6e6e6;*background-color:#d9d9d9}.btn:active,.btn.active{background-color:#ccc \9}.btn:first-child{*margin-left:0}.btn:hover,.btn:focus{color:#333;text-decoration:none;background-position:0 -15px;-webkit-transition:background-position .1s linear;-moz-transition:background-position .1s linear;-o-transition:background-position .1s linear;transition:background-position .1s linear}.btn:focus{outline:thin dotted #333;outline:5px auto -webkit-focus-ring-color;outline-offset:-2px}.btn.active,.btn:active{background-image:none;outline:0;-webkit-box-shadow:inset 0 2px 4px rgba(0,0,0,0.15),0 1px 2px rgba(0,0,0,0.05);-moz-box-shadow:inset 0 2px 4px rgba(0,0,0,0.15),0 1px 2px rgba(0,0,0,0.05);box-shadow:inset 0 2px 4px rgba(0,0,0,0.15),0 1px 2px rgba(0,0,0,0.05)}.btn.disabled,.btn[disabled]{cursor:default;background-image:none;opacity:.65;filter:alpha(opacity=65);-webkit-box-shadow:none;-moz-box-shadow:none;box-shadow:none}.btn-large{padding:11px 19px;font-size:17.5px;-webkit-border-radius:6px;-moz-border-radius:6px;border-radius:6px}.btn-large [class^="icon-"],.btn-large [class*=" icon-"]{margin-top:4px}.btn-small{padding:2px 10px;font-size:11.9px;-webkit-border-radius:3px;-moz-border-radius:3px;border-radius:3px}.btn-small [class^="icon-"],.btn-small [class*=" icon-"]{margin-top:0}.btn-mini [class^="icon-"],.btn-mini [class*=" icon-"]{margin-top:-1px}.btn-mini{padding:0 6px;font-size:10.5px;-webkit-border-radius:3px;-moz-border-radius:3px;border-radius:3px}.btn-block{display:block;width:100%;padding-right:0;padding-left:0;-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}.btn-block+.btn-block{margin-top:5px}input[type="submit"].btn-block,input[type="reset"].btn-block,input[type="button"].btn-block{width:100%}.btn-primary.active,.btn-warning.active,.btn-danger.active,.btn-success.active,.btn-info.active,.btn-inverse.active{color:rgba(255,255,255,0.75)}.btn-primary{color:#fff;text-shadow:0 -1px 0 rgba(0,0,0,0.25);background-color:#006dcc;*background-color:#04c;background-image:-moz-linear-gradient(top,#08c,#04c);background-image:-webkit-gradient(linear,0 0,0 100%,from(#08c),to(#04c));background-image:-webkit-linear-gradient(top,#08c,#04c);background-image:-o-linear-gradient(top,#08c,#04c);background-image:linear-gradient(to bottom,#08c,#04c);background-repeat:repeat-x;border-color:#04c #04c #002a80;border-color:rgba(0,0,0,0.1) rgba(0,0,0,0.1) rgba(0,0,0,0.25);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff0088cc',endColorstr='#ff0044cc',GradientType=0);filter:progid:DXImageTransform.Microsoft.gradient(enabled=false)}.btn-primary:hover,.btn-primary:focus,.btn-primary:active,.btn-primary.active,.btn-primary.disabled,.btn-primary[disabled]{color:#fff;background-color:#04c;*background-color:#003bb3}.btn-primary:active,.btn-primary.active{background-color:#039 \9}.btn-warning{color:#fff;text-shadow:0 -1px 0 rgba(0,0,0,0.25);background-color:#faa732;*background-color:#f89406;background-image:-moz-linear-gradient(top,#fbb450,#f89406);background-image:-webkit-gradient(linear,0 0,0 100%,from(#fbb450),to(#f89406));background-image:-webkit-linear-gradient(top,#fbb450,#f89406);background-image:-o-linear-gradient(top,#fbb450,#f89406);background-image:linear-gradient(to bottom,#fbb450,#f89406);background-repeat:repeat-x;border-color:#f89406 #f89406 #ad6704;border-color:rgba(0,0,0,0.1) rgba(0,0,0,0.1) rgba(0,0,0,0.25);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#fffbb450',endColorstr='#fff89406',GradientType=0);filter:progid:DXImageTransform.Microsoft.gradient(enabled=false)}.btn-warning:hover,.btn-warning:focus,.btn-warning:active,.btn-warning.active,.btn-warning.disabled,.btn-warning[disabled]{color:#fff;background-color:#f89406;*background-color:#df8505}.btn-warning:active,.btn-warning.active{background-color:#c67605 \9}.btn-danger{color:#fff;text-shadow:0 -1px 0 rgba(0,0,0,0.25);background-color:#da4f49;*background-color:#bd362f;background-image:-moz-linear-gradient(top,#ee5f5b,#bd362f);background-image:-webkit-gradient(linear,0 0,0 100%,from(#ee5f5b),to(#bd362f));background-image:-webkit-linear-gradient(top,#ee5f5b,#bd362f);background-image:-o-linear-gradient(top,#ee5f5b,#bd362f);background-image:linear-gradient(to bottom,#ee5f5b,#bd362f);background-repeat:repeat-x;border-color:#bd362f #bd362f #802420;border-color:rgba(0,0,0,0.1) rgba(0,0,0,0.1) rgba(0,0,0,0.25);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffee5f5b',endColorstr='#ffbd362f',GradientType=0);filter:progid:DXImageTransform.Microsoft.gradient(enabled=false)}.btn-danger:hover,.btn-danger:focus,.btn-danger:active,.btn-danger.active,.btn-danger.disabled,.btn-danger[disabled]{color:#fff;background-color:#bd362f;*background-color:#a9302a}.btn-danger:active,.btn-danger.active{background-color:#942a25 \9}.btn-success{color:#fff;text-shadow:0 -1px 0 rgba(0,0,0,0.25);background-color:#5bb75b;*background-color:#51a351;background-image:-moz-linear-gradient(top,#62c462,#51a351);background-image:-webkit-gradient(linear,0 0,0 100%,from(#62c462),to(#51a351));background-image:-webkit-linear-gradient(top,#62c462,#51a351);background-image:-o-linear-gradient(top,#62c462,#51a351);background-image:linear-gradient(to bottom,#62c462,#51a351);background-repeat:repeat-x;border-color:#51a351 #51a351 #387038;border-color:rgba(0,0,0,0.1) rgba(0,0,0,0.1) rgba(0,0,0,0.25);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff62c462',endColorstr='#ff51a351',GradientType=0);filter:progid:DXImageTransform.Microsoft.gradient(enabled=false)}.btn-success:hover,.btn-success:focus,.btn-success:active,.btn-success.active,.btn-success.disabled,.btn-success[disabled]{color:#fff;background-color:#51a351;*background-color:#499249}.btn-success:active,.btn-success.active{background-color:#408140 \9}.btn-info{color:#fff;text-shadow:0 -1px 0 rgba(0,0,0,0.25);background-color:#49afcd;*background-color:#2f96b4;background-image:-moz-linear-gradient(top,#5bc0de,#2f96b4);background-image:-webkit-gradient(linear,0 0,0 100%,from(#5bc0de),to(#2f96b4));background-image:-webkit-linear-gradient(top,#5bc0de,#2f96b4);background-image:-o-linear-gradient(top,#5bc0de,#2f96b4);background-image:linear-gradient(to bottom,#5bc0de,#2f96b4);background-repeat:repeat-x;border-color:#2f96b4 #2f96b4 #1f6377;border-color:rgba(0,0,0,0.1) rgba(0,0,0,0.1) rgba(0,0,0,0.25);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff5bc0de',endColorstr='#ff2f96b4',GradientType=0);filter:progid:DXImageTransform.Microsoft.gradient(enabled=false)}.btn-info:hover,.btn-info:focus,.btn-info:active,.btn-info.active,.btn-info.disabled,.btn-info[disabled]{color:#fff;background-color:#2f96b4;*background-color:#2a85a0}.btn-info:active,.btn-info.active{background-color:#24748c \9}.btn-inverse{color:#fff;text-shadow:0 -1px 0 rgba(0,0,0,0.25);background-color:#363636;*background-color:#222;background-image:-moz-linear-gradient(top,#444,#222);background-image:-webkit-gradient(linear,0 0,0 100%,from(#444),to(#222));background-image:-webkit-linear-gradient(top,#444,#222);background-image:-o-linear-gradient(top,#444,#222);background-image:linear-gradient(to bottom,#444,#222);background-repeat:repeat-x;border-color:#222 #222 #000;border-color:rgba(0,0,0,0.1) rgba(0,0,0,0.1) rgba(0,0,0,0.25);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff444444',endColorstr='#ff222222',GradientType=0);filter:progid:DXImageTransform.Microsoft.gradient(enabled=false)}.btn-inverse:hover,.btn-inverse:focus,.btn-inverse:active,.btn-inverse.active,.btn-inverse.disabled,.btn-inverse[disabled]{color:#fff;background-color:#222;*background-color:#151515}.btn-inverse:active,.btn-inverse.active{background-color:#080808 \9}button.btn,input[type="submit"].btn{*padding-top:3px;*padding-bottom:3px}button.btn::-moz-focus-inner,input[type="submit"].btn::-moz-focus-inner{padding:0;border:0}button.btn.btn-large,input[type="submit"].btn.btn-large{*padding-top:7px;*padding-bottom:7px}button.btn.btn-small,input[type="submit"].btn.btn-small{*padding-top:3px;*padding-bottom:3px}button.btn.btn-mini,input[type="submit"].btn.btn-mini{*padding-top:1px;*padding-bottom:1px}.btn-link,.btn-link:active,.btn-link[disabled]{background-color:transparent;background-image:none;-webkit-box-shadow:none;-moz-box-shadow:none;box-shadow:none}.btn-link{color:#08c;cursor:pointer;border-color:transparent;-webkit-border-radius:0;-moz-border-radius:0;border-radius:0}.btn-link:hover,.btn-link:focus{color:#005580;text-decoration:underline;background-color:transparent}.btn-link[disabled]:hover,.btn-link[disabled]:focus{color:#333;text-decoration:none}.btn-group{position:relative;display:inline-block;*display:inline;*margin-left:.3em;font-size:0;white-space:nowrap;vertical-align:middle;*zoom:1}.btn-group:first-child{*margin-left:0}.btn-group+.btn-group{margin-left:5px}.btn-toolbar{margin-top:10px;margin-bottom:10px;font-size:0}.btn-toolbar>.btn+.btn,.btn-toolbar>.btn-group+.btn,.btn-toolbar>.btn+.btn-group{margin-left:5px}.btn-group>.btn{position:relative;-webkit-border-radius:0;-moz-border-radius:0;border-radius:0}.btn-group>.btn+.btn{margin-left:-1px}.btn-group>.btn,.btn-group>.dropdown-menu,.btn-group>.popover{font-size:14px}.btn-group>.btn-mini{font-size:10.5px}.btn-group>.btn-small{font-size:11.9px}.btn-group>.btn-large{font-size:17.5px}.btn-group>.btn:first-child{margin-left:0;-webkit-border-bottom-left-radius:4px;border-bottom-left-radius:4px;-webkit-border-top-left-radius:4px;border-top-left-radius:4px;-moz-border-radius-bottomleft:4px;-moz-border-radius-topleft:4px}.btn-group>.btn:last-child,.btn-group>.dropdown-toggle{-webkit-border-top-right-radius:4px;border-top-right-radius:4px;-webkit-border-bottom-right-radius:4px;border-bottom-right-radius:4px;-moz-border-radius-topright:4px;-moz-border-radius-bottomright:4px}.btn-group>.btn.large:first-child{margin-left:0;-webkit-border-bottom-left-radius:6px;border-bottom-left-radius:6px;-webkit-border-top-left-radius:6px;border-top-left-radius:6px;-moz-border-radius-bottomleft:6px;-moz-border-radius-topleft:6px}.btn-group>.btn.large:last-child,.btn-group>.large.dropdown-toggle{-webkit-border-top-right-radius:6px;border-top-right-radius:6px;-webkit-border-bottom-right-radius:6px;border-bottom-right-radius:6px;-moz-border-radius-topright:6px;-moz-border-radius-bottomright:6px}.btn-group>.btn:hover,.btn-group>.btn:focus,.btn-group>.btn:active,.btn-group>.btn.active{z-index:2}.btn-group .dropdown-toggle:active,.btn-group.open .dropdown-toggle{outline:0}.btn-group>.btn+.dropdown-toggle{*padding-top:5px;padding-right:8px;*padding-bottom:5px;padding-left:8px;-webkit-box-shadow:inset 1px 0 0 rgba(255,255,255,0.125),inset 0 1px 0 rgba(255,255,255,0.2),0 1px 2px rgba(0,0,0,0.05);-moz-box-shadow:inset 1px 0 0 rgba(255,255,255,0.125),inset 0 1px 0 rgba(255,255,255,0.2),0 1px 2px rgba(0,0,0,0.05);box-shadow:inset 1px 0 0 rgba(255,255,255,0.125),inset 0 1px 0 rgba(255,255,255,0.2),0 1px 2px rgba(0,0,0,0.05)}.btn-group>.btn-mini+.dropdown-toggle{*padding-top:2px;padding-right:5px;*padding-bottom:2px;padding-left:5px}.btn-group>.btn-small+.dropdown-toggle{*padding-top:5px;*padding-bottom:4px}.btn-group>.btn-large+.dropdown-toggle{*padding-top:7px;padding-right:12px;*padding-bottom:7px;padding-left:12px}.btn-group.open .dropdown-toggle{background-image:none;-webkit-box-shadow:inset 0 2px 4px rgba(0,0,0,0.15),0 1px 2px rgba(0,0,0,0.05);-moz-box-shadow:inset 0 2px 4px rgba(0,0,0,0.15),0 1px 2px rgba(0,0,0,0.05);box-shadow:inset 0 2px 4px rgba(0,0,0,0.15),0 1px 2px rgba(0,0,0,0.05)}.btn-group.open .btn.dropdown-toggle{background-color:#e6e6e6}.btn-group.open .btn-primary.dropdown-toggle{background-color:#04c}.btn-group.open .btn-warning.dropdown-toggle{background-color:#f89406}.btn-group.open .btn-danger.dropdown-toggle{background-color:#bd362f}.btn-group.open .btn-success.dropdown-toggle{background-color:#51a351}.btn-group.open .btn-info.dropdown-toggle{background-color:#2f96b4}.btn-group.open .btn-inverse.dropdown-toggle{background-color:#222}.btn .caret{margin-top:8px;margin-left:0}.btn-large .caret{margin-top:6px}.btn-large .caret{border-top-width:5px;border-right-width:5px;border-left-width:5px}.btn-mini .caret,.btn-small .caret{margin-top:8px}.dropup .btn-large .caret{border-bottom-width:5px}.btn-primary .caret,.btn-warning .caret,.btn-danger .caret,.btn-info .caret,.btn-success .caret,.btn-inverse .caret{border-top-color:#fff;border-bottom-color:#fff}.btn-group-vertical{display:inline-block;*display:inline;*zoom:1}.btn-group-vertical>.btn{display:block;float:none;max-width:100%;-webkit-border-radius:0;-moz-border-radius:0;border-radius:0}.btn-group-vertical>.btn+.btn{margin-top:-1px;margin-left:0}.btn-group-vertical>.btn:first-child{-webkit-border-radius:4px 4px 0 0;-moz-border-radius:4px 4px 0 0;border-radius:4px 4px 0 0}.btn-group-vertical>.btn:last-child{-webkit-border-radius:0 0 4px 4px;-moz-border-radius:0 0 4px 4px;border-radius:0 0 4px 4px}.btn-group-vertical>.btn-large:first-child{-webkit-border-radius:6px 6px 0 0;-moz-border-radius:6px 6px 0 0;border-radius:6px 6px 0 0}.btn-group-vertical>.btn-large:last-child{-webkit-border-radius:0 0 6px 6px;-moz-border-radius:0 0 6px 6px;border-radius:0 0 6px 6px}.alert{padding:8px 35px 8px 14px;margin-bottom:20px;text-shadow:0 1px 0 rgba(255,255,255,0.5);background-color:#fcf8e3;border:1px solid #fbeed5;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px}.alert,.alert h4{color:#c09853}.alert h4{margin:0}.alert .close{position:relative;top:-2px;right:-21px;line-height:20px}.alert-success{color:#468847;background-color:#dff0d8;border-color:#d6e9c6}.alert-success h4{color:#468847}.alert-danger,.alert-error{color:#b94a48;background-color:#f2dede;border-color:#eed3d7}.alert-danger h4,.alert-error h4{color:#b94a48}.alert-info{color:#3a87ad;background-color:#d9edf7;border-color:#bce8f1}.alert-info h4{color:#3a87ad}.alert-block{padding-top:14px;padding-bottom:14px}.alert-block>p,.alert-block>ul{margin-bottom:0}.alert-block p+p{margin-top:5px}.nav{margin-bottom:20px;margin-left:0;list-style:none}.nav>li>a{display:block}.nav>li>a:hover,.nav>li>a:focus{text-decoration:none;background-color:#eee}.nav>li>a>img{max-width:none}.nav>.pull-right{float:right}.nav-header{display:block;padding:3px 15px;font-size:11px;font-weight:bold;line-height:20px;color:#999;text-shadow:0 1px 0 rgba(255,255,255,0.5);text-transform:uppercase}.nav li+.nav-header{margin-top:9px}.nav-list{padding-right:15px;padding-left:15px;margin-bottom:0}.nav-list>li>a,.nav-list .nav-header{margin-right:-15px;margin-left:-15px;text-shadow:0 1px 0 rgba(255,255,255,0.5)}.nav-list>li>a{padding:3px 15px}.nav-list>.active>a,.nav-list>.active>a:hover,.nav-list>.active>a:focus{color:#fff;text-shadow:0 -1px 0 rgba(0,0,0,0.2);background-color:#08c}.nav-list [class^="icon-"],.nav-list [class*=" icon-"]{margin-right:2px}.nav-list .divider{*width:100%;height:1px;margin:9px 1px;*margin:-5px 0 5px;overflow:hidden;background-color:#e5e5e5;border-bottom:1px solid #fff}.nav-tabs,.nav-pills{*zoom:1}.nav-tabs:before,.nav-pills:before,.nav-tabs:after,.nav-pills:after{display:table;line-height:0;content:""}.nav-tabs:after,.nav-pills:after{clear:both}.nav-tabs>li,.nav-pills>li{float:left}.nav-tabs>li>a,.nav-pills>li>a{padding-right:12px;padding-left:12px;margin-right:2px;line-height:14px}.nav-tabs{border-bottom:1px solid #ddd}.nav-tabs>li{margin-bottom:-1px}.nav-tabs>li>a{padding-top:8px;padding-bottom:8px;line-height:20px;border:1px solid transparent;-webkit-border-radius:4px 4px 0 0;-moz-border-radius:4px 4px 0 0;border-radius:4px 4px 0 0}.nav-tabs>li>a:hover,.nav-tabs>li>a:focus{border-color:#eee #eee #ddd}.nav-tabs>.active>a,.nav-tabs>.active>a:hover,.nav-tabs>.active>a:focus{color:#555;cursor:default;background-color:#fff;border:1px solid #ddd;border-bottom-color:transparent}.nav-pills>li>a{padding-top:8px;padding-bottom:8px;margin-top:2px;margin-bottom:2px;-webkit-border-radius:5px;-moz-border-radius:5px;border-radius:5px}.nav-pills>.active>a,.nav-pills>.active>a:hover,.nav-pills>.active>a:focus{color:#fff;background-color:#08c}.nav-stacked>li{float:none}.nav-stacked>li>a{margin-right:0}.nav-tabs.nav-stacked{border-bottom:0}.nav-tabs.nav-stacked>li>a{border:1px solid #ddd;-webkit-border-radius:0;-moz-border-radius:0;border-radius:0}.nav-tabs.nav-stacked>li:first-child>a{-webkit-border-top-right-radius:4px;border-top-right-radius:4px;-webkit-border-top-left-radius:4px;border-top-left-radius:4px;-moz-border-radius-topright:4px;-moz-border-radius-topleft:4px}.nav-tabs.nav-stacked>li:last-child>a{-webkit-border-bottom-right-radius:4px;border-bottom-right-radius:4px;-webkit-border-bottom-left-radius:4px;border-bottom-left-radius:4px;-moz-border-radius-bottomright:4px;-moz-border-radius-bottomleft:4px}.nav-tabs.nav-stacked>li>a:hover,.nav-tabs.nav-stacked>li>a:focus{z-index:2;border-color:#ddd}.nav-pills.nav-stacked>li>a{margin-bottom:3px}.nav-pills.nav-stacked>li:last-child>a{margin-bottom:1px}.nav-tabs .dropdown-menu{-webkit-border-radius:0 0 6px 6px;-moz-border-radius:0 0 6px 6px;border-radius:0 0 6px 6px}.nav-pills .dropdown-menu{-webkit-border-radius:6px;-moz-border-radius:6px;border-radius:6px}.nav .dropdown-toggle .caret{margin-top:6px;border-top-color:#08c;border-bottom-color:#08c}.nav .dropdown-toggle:hover .caret,.nav .dropdown-toggle:focus .caret{border-top-color:#005580;border-bottom-color:#005580}.nav-tabs .dropdown-toggle .caret{margin-top:8px}.nav .active .dropdown-toggle .caret{border-top-color:#fff;border-bottom-color:#fff}.nav-tabs .active .dropdown-toggle .caret{border-top-color:#555;border-bottom-color:#555}.nav>.dropdown.active>a:hover,.nav>.dropdown.active>a:focus{cursor:pointer}.nav-tabs .open .dropdown-toggle,.nav-pills .open .dropdown-toggle,.nav>li.dropdown.open.active>a:hover,.nav>li.dropdown.open.active>a:focus{color:#fff;background-color:#999;border-color:#999}.nav li.dropdown.open .caret,.nav li.dropdown.open.active .caret,.nav li.dropdown.open a:hover .caret,.nav li.dropdown.open a:focus .caret{border-top-color:#fff;border-bottom-color:#fff;opacity:1;filter:alpha(opacity=100)}.tabs-stacked .open>a:hover,.tabs-stacked .open>a:focus{border-color:#999}.tabbable{*zoom:1}.tabbable:before,.tabbable:after{display:table;line-height:0;content:""}.tabbable:after{clear:both}.tab-content{overflow:auto}.tabs-below>.nav-tabs,.tabs-right>.nav-tabs,.tabs-left>.nav-tabs{border-bottom:0}.tab-content>.tab-pane,.pill-content>.pill-pane{display:none}.tab-content>.active,.pill-content>.active{display:block}.tabs-below>.nav-tabs{border-top:1px solid #ddd}.tabs-below>.nav-tabs>li{margin-top:-1px;margin-bottom:0}.tabs-below>.nav-tabs>li>a{-webkit-border-radius:0 0 4px 4px;-moz-border-radius:0 0 4px 4px;border-radius:0 0 4px 4px}.tabs-below>.nav-tabs>li>a:hover,.tabs-below>.nav-tabs>li>a:focus{border-top-color:#ddd;border-bottom-color:transparent}.tabs-below>.nav-tabs>.active>a,.tabs-below>.nav-tabs>.active>a:hover,.tabs-below>.nav-tabs>.active>a:focus{border-color:transparent #ddd #ddd #ddd}.tabs-left>.nav-tabs>li,.tabs-right>.nav-tabs>li{float:none}.tabs-left>.nav-tabs>li>a,.tabs-right>.nav-tabs>li>a{min-width:74px;margin-right:0;margin-bottom:3px}.tabs-left>.nav-tabs{float:left;margin-right:19px;border-right:1px solid #ddd}.tabs-left>.nav-tabs>li>a{margin-right:-1px;-webkit-border-radius:4px 0 0 4px;-moz-border-radius:4px 0 0 4px;border-radius:4px 0 0 4px}.tabs-left>.nav-tabs>li>a:hover,.tabs-left>.nav-tabs>li>a:focus{border-color:#eee #ddd #eee #eee}.tabs-left>.nav-tabs .active>a,.tabs-left>.nav-tabs .active>a:hover,.tabs-left>.nav-tabs .active>a:focus{border-color:#ddd transparent #ddd #ddd;*border-right-color:#fff}.tabs-right>.nav-tabs{float:right;margin-left:19px;border-left:1px solid #ddd}.tabs-right>.nav-tabs>li>a{margin-left:-1px;-webkit-border-radius:0 4px 4px 0;-moz-border-radius:0 4px 4px 0;border-radius:0 4px 4px 0}.tabs-right>.nav-tabs>li>a:hover,.tabs-right>.nav-tabs>li>a:focus{border-color:#eee #eee #eee #ddd}.tabs-right>.nav-tabs .active>a,.tabs-right>.nav-tabs .active>a:hover,.tabs-right>.nav-tabs .active>a:focus{border-color:#ddd #ddd #ddd transparent;*border-left-color:#fff}.nav>.disabled>a{color:#999}.nav>.disabled>a:hover,.nav>.disabled>a:focus{text-decoration:none;cursor:default;background-color:transparent}.navbar{*position:relative;*z-index:2;margin-bottom:20px;overflow:visible}.navbar-inner{min-height:40px;padding-right:20px;padding-left:20px;background-color:#fafafa;background-image:-moz-linear-gradient(top,#fff,#f2f2f2);background-image:-webkit-gradient(linear,0 0,0 100%,from(#fff),to(#f2f2f2));background-image:-webkit-linear-gradient(top,#fff,#f2f2f2);background-image:-o-linear-gradient(top,#fff,#f2f2f2);background-image:linear-gradient(to bottom,#fff,#f2f2f2);background-repeat:repeat-x;border:1px solid #d4d4d4;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffffffff',endColorstr='#fff2f2f2',GradientType=0);*zoom:1;-webkit-box-shadow:0 1px 4px rgba(0,0,0,0.065);-moz-box-shadow:0 1px 4px rgba(0,0,0,0.065);box-shadow:0 1px 4px rgba(0,0,0,0.065)}.navbar-inner:before,.navbar-inner:after{display:table;line-height:0;content:""}.navbar-inner:after{clear:both}.navbar .container{width:auto}.nav-collapse.collapse{height:auto;overflow:visible}.navbar .brand{display:block;float:left;padding:10px 20px 10px;margin-left:-20px;font-size:20px;font-weight:200;color:#777;text-shadow:0 1px 0 #fff}.navbar .brand:hover,.navbar .brand:focus{text-decoration:none}.navbar-text{margin-bottom:0;line-height:40px;color:#777}.navbar-link{color:#777}.navbar-link:hover,.navbar-link:focus{color:#333}.navbar .divider-vertical{height:40px;margin:0 9px;border-right:1px solid #fff;border-left:1px solid #f2f2f2}.navbar .btn,.navbar .btn-group{margin-top:5px}.navbar .btn-group .btn,.navbar .input-prepend .btn,.navbar .input-append .btn,.navbar .input-prepend .btn-group,.navbar .input-append .btn-group{margin-top:0}.navbar-form{margin-bottom:0;*zoom:1}.navbar-form:before,.navbar-form:after{display:table;line-height:0;content:""}.navbar-form:after{clear:both}.navbar-form input,.navbar-form select,.navbar-form .radio,.navbar-form .checkbox{margin-top:5px}.navbar-form input,.navbar-form select,.navbar-form .btn{display:inline-block;margin-bottom:0}.navbar-form input[type="image"],.navbar-form input[type="checkbox"],.navbar-form input[type="radio"]{margin-top:3px}.navbar-form .input-append,.navbar-form .input-prepend{margin-top:5px;white-space:nowrap}.navbar-form .input-append input,.navbar-form .input-prepend input{margin-top:0}.navbar-search{position:relative;float:left;margin-top:5px;margin-bottom:0}.navbar-search .search-query{padding:4px 14px;margin-bottom:0;font-family:"Helvetica Neue",Helvetica,Arial,sans-serif;font-size:13px;font-weight:normal;line-height:1;-webkit-border-radius:15px;-moz-border-radius:15px;border-radius:15px}.navbar-static-top{position:static;margin-bottom:0}.navbar-static-top .navbar-inner{-webkit-border-radius:0;-moz-border-radius:0;border-radius:0}.navbar-fixed-top,.navbar-fixed-bottom{position:fixed;right:0;left:0;z-index:1030;margin-bottom:0}.navbar-fixed-top .navbar-inner,.navbar-static-top .navbar-inner{border-width:0 0 1px}.navbar-fixed-bottom .navbar-inner{border-width:1px 0 0}.navbar-fixed-top .navbar-inner,.navbar-fixed-bottom .navbar-inner{padding-right:0;padding-left:0;-webkit-border-radius:0;-moz-border-radius:0;border-radius:0}.navbar-static-top .container,.navbar-fixed-top .container,.navbar-fixed-bottom .container{width:940px}.navbar-fixed-top{top:0}.navbar-fixed-top .navbar-inner,.navbar-static-top .navbar-inner{-webkit-box-shadow:0 1px 10px rgba(0,0,0,0.1);-moz-box-shadow:0 1px 10px rgba(0,0,0,0.1);box-shadow:0 1px 10px rgba(0,0,0,0.1)}.navbar-fixed-bottom{bottom:0}.navbar-fixed-bottom .navbar-inner{-webkit-box-shadow:0 -1px 10px rgba(0,0,0,0.1);-moz-box-shadow:0 -1px 10px rgba(0,0,0,0.1);box-shadow:0 -1px 10px rgba(0,0,0,0.1)}.navbar .nav{position:relative;left:0;display:block;float:left;margin:0 10px 0 0}.navbar .nav.pull-right{float:right;margin-right:0}.navbar .nav>li{float:left}.navbar .nav>li>a{float:none;padding:10px 15px 10px;color:#777;text-decoration:none;text-shadow:0 1px 0 #fff}.navbar .nav .dropdown-toggle .caret{margin-top:8px}.navbar .nav>li>a:focus,.navbar .nav>li>a:hover{color:#333;text-decoration:none;background-color:transparent}.navbar .nav>.active>a,.navbar .nav>.active>a:hover,.navbar .nav>.active>a:focus{color:#555;text-decoration:none;background-color:#e5e5e5;-webkit-box-shadow:inset 0 3px 8px rgba(0,0,0,0.125);-moz-box-shadow:inset 0 3px 8px rgba(0,0,0,0.125);box-shadow:inset 0 3px 8px rgba(0,0,0,0.125)}.navbar .btn-navbar{display:none;float:right;padding:7px 10px;margin-right:5px;margin-left:5px;color:#fff;text-shadow:0 -1px 0 rgba(0,0,0,0.25);background-color:#ededed;*background-color:#e5e5e5;background-image:-moz-linear-gradient(top,#f2f2f2,#e5e5e5);background-image:-webkit-gradient(linear,0 0,0 100%,from(#f2f2f2),to(#e5e5e5));background-image:-webkit-linear-gradient(top,#f2f2f2,#e5e5e5);background-image:-o-linear-gradient(top,#f2f2f2,#e5e5e5);background-image:linear-gradient(to bottom,#f2f2f2,#e5e5e5);background-repeat:repeat-x;border-color:#e5e5e5 #e5e5e5 #bfbfbf;border-color:rgba(0,0,0,0.1) rgba(0,0,0,0.1) rgba(0,0,0,0.25);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#fff2f2f2',endColorstr='#ffe5e5e5',GradientType=0);filter:progid:DXImageTransform.Microsoft.gradient(enabled=false);-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,0.1),0 1px 0 rgba(255,255,255,0.075);-moz-box-shadow:inset 0 1px 0 rgba(255,255,255,0.1),0 1px 0 rgba(255,255,255,0.075);box-shadow:inset 0 1px 0 rgba(255,255,255,0.1),0 1px 0 rgba(255,255,255,0.075)}.navbar .btn-navbar:hover,.navbar .btn-navbar:focus,.navbar .btn-navbar:active,.navbar .btn-navbar.active,.navbar .btn-navbar.disabled,.navbar .btn-navbar[disabled]{color:#fff;background-color:#e5e5e5;*background-color:#d9d9d9}.navbar .btn-navbar:active,.navbar .btn-navbar.active{background-color:#ccc \9}.navbar .btn-navbar .icon-bar{display:block;width:18px;height:2px;background-color:#f5f5f5;-webkit-border-radius:1px;-moz-border-radius:1px;border-radius:1px;-webkit-box-shadow:0 1px 0 rgba(0,0,0,0.25);-moz-box-shadow:0 1px 0 rgba(0,0,0,0.25);box-shadow:0 1px 0 rgba(0,0,0,0.25)}.btn-navbar .icon-bar+.icon-bar{margin-top:3px}.navbar .nav>li>.dropdown-menu:before{position:absolute;top:-7px;left:9px;display:inline-block;border-right:7px solid transparent;border-bottom:7px solid #ccc;border-left:7px solid transparent;border-bottom-color:rgba(0,0,0,0.2);content:''}.navbar .nav>li>.dropdown-menu:after{position:absolute;top:-6px;left:10px;display:inline-block;border-right:6px solid transparent;border-bottom:6px solid #fff;border-left:6px solid transparent;content:''}.navbar-fixed-bottom .nav>li>.dropdown-menu:before{top:auto;bottom:-7px;border-top:7px solid #ccc;border-bottom:0;border-top-color:rgba(0,0,0,0.2)}.navbar-fixed-bottom .nav>li>.dropdown-menu:after{top:auto;bottom:-6px;border-top:6px solid #fff;border-bottom:0}.navbar .nav li.dropdown>a:hover .caret,.navbar .nav li.dropdown>a:focus .caret{border-top-color:#333;border-bottom-color:#333}.navbar .nav li.dropdown.open>.dropdown-toggle,.navbar .nav li.dropdown.active>.dropdown-toggle,.navbar .nav li.dropdown.open.active>.dropdown-toggle{color:#555;background-color:#e5e5e5}.navbar .nav li.dropdown>.dropdown-toggle .caret{border-top-color:#777;border-bottom-color:#777}.navbar .nav li.dropdown.open>.dropdown-toggle .caret,.navbar .nav li.dropdown.active>.dropdown-toggle .caret,.navbar .nav li.dropdown.open.active>.dropdown-toggle .caret{border-top-color:#555;border-bottom-color:#555}.navbar .pull-right>li>.dropdown-menu,.navbar .nav>li>.dropdown-menu.pull-right{right:0;left:auto}.navbar .pull-right>li>.dropdown-menu:before,.navbar .nav>li>.dropdown-menu.pull-right:before{right:12px;left:auto}.navbar .pull-right>li>.dropdown-menu:after,.navbar .nav>li>.dropdown-menu.pull-right:after{right:13px;left:auto}.navbar .pull-right>li>.dropdown-menu .dropdown-menu,.navbar .nav>li>.dropdown-menu.pull-right .dropdown-menu{right:100%;left:auto;margin-right:-1px;margin-left:0;-webkit-border-radius:6px 0 6px 6px;-moz-border-radius:6px 0 6px 6px;border-radius:6px 0 6px 6px}.navbar-inverse .navbar-inner{background-color:#1b1b1b;background-image:-moz-linear-gradient(top,#222,#111);background-image:-webkit-gradient(linear,0 0,0 100%,from(#222),to(#111));background-image:-webkit-linear-gradient(top,#222,#111);background-image:-o-linear-gradient(top,#222,#111);background-image:linear-gradient(to bottom,#222,#111);background-repeat:repeat-x;border-color:#252525;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff222222',endColorstr='#ff111111',GradientType=0)}.navbar-inverse .brand,.navbar-inverse .nav>li>a{color:#999;text-shadow:0 -1px 0 rgba(0,0,0,0.25)}.navbar-inverse .brand:hover,.navbar-inverse .nav>li>a:hover,.navbar-inverse .brand:focus,.navbar-inverse .nav>li>a:focus{color:#fff}.navbar-inverse .brand{color:#999}.navbar-inverse .navbar-text{color:#999}.navbar-inverse .nav>li>a:focus,.navbar-inverse .nav>li>a:hover{color:#fff;background-color:transparent}.navbar-inverse .nav .active>a,.navbar-inverse .nav .active>a:hover,.navbar-inverse .nav .active>a:focus{color:#fff;background-color:#111}.navbar-inverse .navbar-link{color:#999}.navbar-inverse .navbar-link:hover,.navbar-inverse .navbar-link:focus{color:#fff}.navbar-inverse .divider-vertical{border-right-color:#222;border-left-color:#111}.navbar-inverse .nav li.dropdown.open>.dropdown-toggle,.navbar-inverse .nav li.dropdown.active>.dropdown-toggle,.navbar-inverse .nav li.dropdown.open.active>.dropdown-toggle{color:#fff;background-color:#111}.navbar-inverse .nav li.dropdown>a:hover .caret,.navbar-inverse .nav li.dropdown>a:focus .caret{border-top-color:#fff;border-bottom-color:#fff}.navbar-inverse .nav li.dropdown>.dropdown-toggle .caret{border-top-color:#999;border-bottom-color:#999}.navbar-inverse .nav li.dropdown.open>.dropdown-toggle .caret,.navbar-inverse .nav li.dropdown.active>.dropdown-toggle .caret,.navbar-inverse .nav li.dropdown.open.active>.dropdown-toggle .caret{border-top-color:#fff;border-bottom-color:#fff}.navbar-inverse .navbar-search .search-query{color:#fff;background-color:#515151;border-color:#111;-webkit-box-shadow:inset 0 1px 2px rgba(0,0,0,0.1),0 1px 0 rgba(255,255,255,0.15);-moz-box-shadow:inset 0 1px 2px rgba(0,0,0,0.1),0 1px 0 rgba(255,255,255,0.15);box-shadow:inset 0 1px 2px rgba(0,0,0,0.1),0 1px 0 rgba(255,255,255,0.15);-webkit-transition:none;-moz-transition:none;-o-transition:none;transition:none}.navbar-inverse .navbar-search .search-query:-moz-placeholder{color:#ccc}.navbar-inverse .navbar-search .search-query:-ms-input-placeholder{color:#ccc}.navbar-inverse .navbar-search .search-query::-webkit-input-placeholder{color:#ccc}.navbar-inverse .navbar-search .search-query:focus,.navbar-inverse .navbar-search .search-query.focused{padding:5px 15px;color:#333;text-shadow:0 1px 0 #fff;background-color:#fff;border:0;outline:0;-webkit-box-shadow:0 0 3px rgba(0,0,0,0.15);-moz-box-shadow:0 0 3px rgba(0,0,0,0.15);box-shadow:0 0 3px rgba(0,0,0,0.15)}.navbar-inverse .btn-navbar{color:#fff;text-shadow:0 -1px 0 rgba(0,0,0,0.25);background-color:#0e0e0e;*background-color:#040404;background-image:-moz-linear-gradient(top,#151515,#040404);background-image:-webkit-gradient(linear,0 0,0 100%,from(#151515),to(#040404));background-image:-webkit-linear-gradient(top,#151515,#040404);background-image:-o-linear-gradient(top,#151515,#040404);background-image:linear-gradient(to bottom,#151515,#040404);background-repeat:repeat-x;border-color:#040404 #040404 #000;border-color:rgba(0,0,0,0.1) rgba(0,0,0,0.1) rgba(0,0,0,0.25);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff151515',endColorstr='#ff040404',GradientType=0);filter:progid:DXImageTransform.Microsoft.gradient(enabled=false)}.navbar-inverse .btn-navbar:hover,.navbar-inverse .btn-navbar:focus,.navbar-inverse .btn-navbar:active,.navbar-inverse .btn-navbar.active,.navbar-inverse .btn-navbar.disabled,.navbar-inverse .btn-navbar[disabled]{color:#fff;background-color:#040404;*background-color:#000}.navbar-inverse .btn-navbar:active,.navbar-inverse .btn-navbar.active{background-color:#000 \9}.breadcrumb{padding:8px 15px;margin:0 0 20px;list-style:none;background-color:#f5f5f5;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px}.breadcrumb>li{display:inline-block;*display:inline;text-shadow:0 1px 0 #fff;*zoom:1}.breadcrumb>li>.divider{padding:0 5px;color:#ccc}.breadcrumb>.active{color:#999}.pagination{margin:20px 0}.pagination ul{display:inline-block;*display:inline;margin-bottom:0;margin-left:0;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px;*zoom:1;-webkit-box-shadow:0 1px 2px rgba(0,0,0,0.05);-moz-box-shadow:0 1px 2px rgba(0,0,0,0.05);box-shadow:0 1px 2px rgba(0,0,0,0.05)}.pagination ul>li{display:inline}.pagination ul>li>a,.pagination ul>li>span{float:left;padding:4px 12px;line-height:20px;text-decoration:none;background-color:#fff;border:1px solid #ddd;border-left-width:0}.pagination ul>li>a:hover,.pagination ul>li>a:focus,.pagination ul>.active>a,.pagination ul>.active>span{background-color:#f5f5f5}.pagination ul>.active>a,.pagination ul>.active>span{color:#999;cursor:default}.pagination ul>.disabled>span,.pagination ul>.disabled>a,.pagination ul>.disabled>a:hover,.pagination ul>.disabled>a:focus{color:#999;cursor:default;background-color:transparent}.pagination ul>li:first-child>a,.pagination ul>li:first-child>span{border-left-width:1px;-webkit-border-bottom-left-radius:4px;border-bottom-left-radius:4px;-webkit-border-top-left-radius:4px;border-top-left-radius:4px;-moz-border-radius-bottomleft:4px;-moz-border-radius-topleft:4px}.pagination ul>li:last-child>a,.pagination ul>li:last-child>span{-webkit-border-top-right-radius:4px;border-top-right-radius:4px;-webkit-border-bottom-right-radius:4px;border-bottom-right-radius:4px;-moz-border-radius-topright:4px;-moz-border-radius-bottomright:4px}.pagination-centered{text-align:center}.pagination-right{text-align:right}.pagination-large ul>li>a,.pagination-large ul>li>span{padding:11px 19px;font-size:17.5px}.pagination-large ul>li:first-child>a,.pagination-large ul>li:first-child>span{-webkit-border-bottom-left-radius:6px;border-bottom-left-radius:6px;-webkit-border-top-left-radius:6px;border-top-left-radius:6px;-moz-border-radius-bottomleft:6px;-moz-border-radius-topleft:6px}.pagination-large ul>li:last-child>a,.pagination-large ul>li:last-child>span{-webkit-border-top-right-radius:6px;border-top-right-radius:6px;-webkit-border-bottom-right-radius:6px;border-bottom-right-radius:6px;-moz-border-radius-topright:6px;-moz-border-radius-bottomright:6px}.pagination-mini ul>li:first-child>a,.pagination-small ul>li:first-child>a,.pagination-mini ul>li:first-child>span,.pagination-small ul>li:first-child>span{-webkit-border-bottom-left-radius:3px;border-bottom-left-radius:3px;-webkit-border-top-left-radius:3px;border-top-left-radius:3px;-moz-border-radius-bottomleft:3px;-moz-border-radius-topleft:3px}.pagination-mini ul>li:last-child>a,.pagination-small ul>li:last-child>a,.pagination-mini ul>li:last-child>span,.pagination-small ul>li:last-child>span{-webkit-border-top-right-radius:3px;border-top-right-radius:3px;-webkit-border-bottom-right-radius:3px;border-bottom-right-radius:3px;-moz-border-radius-topright:3px;-moz-border-radius-bottomright:3px}.pagination-small ul>li>a,.pagination-small ul>li>span{padding:2px 10px;font-size:11.9px}.pagination-mini ul>li>a,.pagination-mini ul>li>span{padding:0 6px;font-size:10.5px}.pager{margin:20px 0;text-align:center;list-style:none;*zoom:1}.pager:before,.pager:after{display:table;line-height:0;content:""}.pager:after{clear:both}.pager li{display:inline}.pager li>a,.pager li>span{display:inline-block;padding:5px 14px;background-color:#fff;border:1px solid #ddd;-webkit-border-radius:15px;-moz-border-radius:15px;border-radius:15px}.pager li>a:hover,.pager li>a:focus{text-decoration:none;background-color:#f5f5f5}.pager .next>a,.pager .next>span{float:right}.pager .previous>a,.pager .previous>span{float:left}.pager .disabled>a,.pager .disabled>a:hover,.pager .disabled>a:focus,.pager .disabled>span{color:#999;cursor:default;background-color:#fff}.modal-backdrop{position:fixed;top:0;right:0;bottom:0;left:0;z-index:1040;background-color:#000}.modal-backdrop.fade{opacity:0}.modal-backdrop,.modal-backdrop.fade.in{opacity:.8;filter:alpha(opacity=80)}.modal{position:fixed;top:10%;left:50%;z-index:1050;width:560px;margin-left:-280px;background-color:#fff;border:1px solid #999;border:1px solid rgba(0,0,0,0.3);*border:1px solid #999;-webkit-border-radius:6px;-moz-border-radius:6px;border-radius:6px;outline:0;-webkit-box-shadow:0 3px 7px rgba(0,0,0,0.3);-moz-box-shadow:0 3px 7px rgba(0,0,0,0.3);box-shadow:0 3px 7px rgba(0,0,0,0.3);-webkit-background-clip:padding-box;-moz-background-clip:padding-box;background-clip:padding-box}.modal.fade{top:-25%;-webkit-transition:opacity .3s linear,top .3s ease-out;-moz-transition:opacity .3s linear,top .3s ease-out;-o-transition:opacity .3s linear,top .3s ease-out;transition:opacity .3s linear,top .3s ease-out}.modal.fade.in{top:10%}.modal-header{padding:9px 15px;border-bottom:1px solid #eee}.modal-header .close{margin-top:2px}.modal-header h3{margin:0;line-height:30px}.modal-body{position:relative;max-height:400px;padding:15px;overflow-y:auto}.modal-form{margin-bottom:0}.modal-footer{padding:14px 15px 15px;margin-bottom:0;text-align:right;background-color:#f5f5f5;border-top:1px solid #ddd;-webkit-border-radius:0 0 6px 6px;-moz-border-radius:0 0 6px 6px;border-radius:0 0 6px 6px;*zoom:1;-webkit-box-shadow:inset 0 1px 0 #fff;-moz-box-shadow:inset 0 1px 0 #fff;box-shadow:inset 0 1px 0 #fff}.modal-footer:before,.modal-footer:after{display:table;line-height:0;content:""}.modal-footer:after{clear:both}.modal-footer .btn+.btn{margin-bottom:0;margin-left:5px}.modal-footer .btn-group .btn+.btn{margin-left:-1px}.modal-footer .btn-block+.btn-block{margin-left:0}.tooltip{position:absolute;z-index:1030;display:block;font-size:11px;line-height:1.4;opacity:0;filter:alpha(opacity=0);visibility:visible}.tooltip.in{opacity:.8;filter:alpha(opacity=80)}.tooltip.top{padding:5px 0;margin-top:-3px}.tooltip.right{padding:0 5px;margin-left:3px}.tooltip.bottom{padding:5px 0;margin-top:3px}.tooltip.left{padding:0 5px;margin-left:-3px}.tooltip-inner{max-width:200px;padding:8px;color:#fff;text-align:center;text-decoration:none;background-color:#000;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px}.tooltip-arrow{position:absolute;width:0;height:0;border-color:transparent;border-style:solid}.tooltip.top .tooltip-arrow{bottom:0;left:50%;margin-left:-5px;border-top-color:#000;border-width:5px 5px 0}.tooltip.right .tooltip-arrow{top:50%;left:0;margin-top:-5px;border-right-color:#000;border-width:5px 5px 5px 0}.tooltip.left .tooltip-arrow{top:50%;right:0;margin-top:-5px;border-left-color:#000;border-width:5px 0 5px 5px}.tooltip.bottom .tooltip-arrow{top:0;left:50%;margin-left:-5px;border-bottom-color:#000;border-width:0 5px 5px}.popover{position:absolute;top:0;left:0;z-index:1010;display:none;max-width:276px;padding:1px;text-align:left;white-space:normal;background-color:#fff;border:1px solid #ccc;border:1px solid rgba(0,0,0,0.2);-webkit-border-radius:6px;-moz-border-radius:6px;border-radius:6px;-webkit-box-shadow:0 5px 10px rgba(0,0,0,0.2);-moz-box-shadow:0 5px 10px rgba(0,0,0,0.2);box-shadow:0 5px 10px rgba(0,0,0,0.2);-webkit-background-clip:padding-box;-moz-background-clip:padding;background-clip:padding-box}.popover.top{margin-top:-10px}.popover.right{margin-left:10px}.popover.bottom{margin-top:10px}.popover.left{margin-left:-10px}.popover-title{padding:8px 14px;margin:0;font-size:14px;font-weight:normal;line-height:18px;background-color:#f7f7f7;border-bottom:1px solid #ebebeb;-webkit-border-radius:5px 5px 0 0;-moz-border-radius:5px 5px 0 0;border-radius:5px 5px 0 0}.popover-title:empty{display:none}.popover-content{padding:9px 14px}.popover .arrow,.popover .arrow:after{position:absolute;display:block;width:0;height:0;border-color:transparent;border-style:solid}.popover .arrow{border-width:11px}.popover .arrow:after{border-width:10px;content:""}.popover.top .arrow{bottom:-11px;left:50%;margin-left:-11px;border-top-color:#999;border-top-color:rgba(0,0,0,0.25);border-bottom-width:0}.popover.top .arrow:after{bottom:1px;margin-left:-10px;border-top-color:#fff;border-bottom-width:0}.popover.right .arrow{top:50%;left:-11px;margin-top:-11px;border-right-color:#999;border-right-color:rgba(0,0,0,0.25);border-left-width:0}.popover.right .arrow:after{bottom:-10px;left:1px;border-right-color:#fff;border-left-width:0}.popover.bottom .arrow{top:-11px;left:50%;margin-left:-11px;border-bottom-color:#999;border-bottom-color:rgba(0,0,0,0.25);border-top-width:0}.popover.bottom .arrow:after{top:1px;margin-left:-10px;border-bottom-color:#fff;border-top-width:0}.popover.left .arrow{top:50%;right:-11px;margin-top:-11px;border-left-color:#999;border-left-color:rgba(0,0,0,0.25);border-right-width:0}.popover.left .arrow:after{right:1px;bottom:-10px;border-left-color:#fff;border-right-width:0}.thumbnails{margin-left:-20px;list-style:none;*zoom:1}.thumbnails:before,.thumbnails:after{display:table;line-height:0;content:""}.thumbnails:after{clear:both}.row-fluid .thumbnails{margin-left:0}.thumbnails>li{float:left;margin-bottom:20px;margin-left:20px}.thumbnail{display:block;padding:4px;line-height:20px;border:1px solid #ddd;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px;-webkit-box-shadow:0 1px 3px rgba(0,0,0,0.055);-moz-box-shadow:0 1px 3px rgba(0,0,0,0.055);box-shadow:0 1px 3px rgba(0,0,0,0.055);-webkit-transition:all .2s ease-in-out;-moz-transition:all .2s ease-in-out;-o-transition:all .2s ease-in-out;transition:all .2s ease-in-out}a.thumbnail:hover,a.thumbnail:focus{border-color:#08c;-webkit-box-shadow:0 1px 4px rgba(0,105,214,0.25);-moz-box-shadow:0 1px 4px rgba(0,105,214,0.25);box-shadow:0 1px 4px rgba(0,105,214,0.25)}.thumbnail>img{display:block;max-width:100%;margin-right:auto;margin-left:auto}.thumbnail .caption{padding:9px;color:#555}.media,.media-body{overflow:hidden;*overflow:visible;zoom:1}.media,.media .media{margin-top:15px}.media:first-child{margin-top:0}.media-object{display:block}.media-heading{margin:0 0 5px}.media>.pull-left{margin-right:10px}.media>.pull-right{margin-left:10px}.media-list{margin-left:0;list-style:none}.label,.badge{display:inline-block;padding:2px 4px;font-size:11.844px;font-weight:bold;line-height:14px;color:#fff;text-shadow:0 -1px 0 rgba(0,0,0,0.25);white-space:nowrap;vertical-align:baseline;background-color:#999}.label{-webkit-border-radius:3px;-moz-border-radius:3px;border-radius:3px}.badge{padding-right:9px;padding-left:9px;-webkit-border-radius:9px;-moz-border-radius:9px;border-radius:9px}.label:empty,.badge:empty{display:none}a.label:hover,a.label:focus,a.badge:hover,a.badge:focus{color:#fff;text-decoration:none;cursor:pointer}.label-important,.badge-important{background-color:#b94a48}.label-important[href],.badge-important[href]{background-color:#953b39}.label-warning,.badge-warning{background-color:#f89406}.label-warning[href],.badge-warning[href]{background-color:#c67605}.label-success,.badge-success{background-color:#468847}.label-success[href],.badge-success[href]{background-color:#356635}.label-info,.badge-info{background-color:#3a87ad}.label-info[href],.badge-info[href]{background-color:#2d6987}.label-inverse,.badge-inverse{background-color:#333}.label-inverse[href],.badge-inverse[href]{background-color:#1a1a1a}.btn .label,.btn .badge{position:relative;top:-1px}.btn-mini .label,.btn-mini .badge{top:0}@-webkit-keyframes progress-bar-stripes{from{background-position:40px 0}to{background-position:0 0}}@-moz-keyframes progress-bar-stripes{from{background-position:40px 0}to{background-position:0 0}}@-ms-keyframes progress-bar-stripes{from{background-position:40px 0}to{background-position:0 0}}@-o-keyframes progress-bar-stripes{from{background-position:0 0}to{background-position:40px 0}}@keyframes progress-bar-stripes{from{background-position:40px 0}to{background-position:0 0}}.progress{height:20px;margin-bottom:20px;overflow:hidden;background-color:#f7f7f7;background-image:-moz-linear-gradient(top,#f5f5f5,#f9f9f9);background-image:-webkit-gradient(linear,0 0,0 100%,from(#f5f5f5),to(#f9f9f9));background-image:-webkit-linear-gradient(top,#f5f5f5,#f9f9f9);background-image:-o-linear-gradient(top,#f5f5f5,#f9f9f9);background-image:linear-gradient(to bottom,#f5f5f5,#f9f9f9);background-repeat:repeat-x;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#fff5f5f5',endColorstr='#fff9f9f9',GradientType=0);-webkit-box-shadow:inset 0 1px 2px rgba(0,0,0,0.1);-moz-box-shadow:inset 0 1px 2px rgba(0,0,0,0.1);box-shadow:inset 0 1px 2px rgba(0,0,0,0.1)}.progress .bar{float:left;width:0;height:100%;font-size:12px;color:#fff;text-align:center;text-shadow:0 -1px 0 rgba(0,0,0,0.25);background-color:#0e90d2;background-image:-moz-linear-gradient(top,#149bdf,#0480be);background-image:-webkit-gradient(linear,0 0,0 100%,from(#149bdf),to(#0480be));background-image:-webkit-linear-gradient(top,#149bdf,#0480be);background-image:-o-linear-gradient(top,#149bdf,#0480be);background-image:linear-gradient(to bottom,#149bdf,#0480be);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff149bdf',endColorstr='#ff0480be',GradientType=0);-webkit-box-shadow:inset 0 -1px 0 rgba(0,0,0,0.15);-moz-box-shadow:inset 0 -1px 0 rgba(0,0,0,0.15);box-shadow:inset 0 -1px 0 rgba(0,0,0,0.15);-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box;-webkit-transition:width .6s ease;-moz-transition:width .6s ease;-o-transition:width .6s ease;transition:width .6s ease}.progress .bar+.bar{-webkit-box-shadow:inset 1px 0 0 rgba(0,0,0,0.15),inset 0 -1px 0 rgba(0,0,0,0.15);-moz-box-shadow:inset 1px 0 0 rgba(0,0,0,0.15),inset 0 -1px 0 rgba(0,0,0,0.15);box-shadow:inset 1px 0 0 rgba(0,0,0,0.15),inset 0 -1px 0 rgba(0,0,0,0.15)}.progress-striped .bar{background-color:#149bdf;background-image:-webkit-gradient(linear,0 100%,100% 0,color-stop(0.25,rgba(255,255,255,0.15)),color-stop(0.25,transparent),color-stop(0.5,transparent),color-stop(0.5,rgba(255,255,255,0.15)),color-stop(0.75,rgba(255,255,255,0.15)),color-stop(0.75,transparent),to(transparent));background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:-moz-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:-o-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);-webkit-background-size:40px 40px;-moz-background-size:40px 40px;-o-background-size:40px 40px;background-size:40px 40px}.progress.active .bar{-webkit-animation:progress-bar-stripes 2s linear infinite;-moz-animation:progress-bar-stripes 2s linear infinite;-ms-animation:progress-bar-stripes 2s linear infinite;-o-animation:progress-bar-stripes 2s linear infinite;animation:progress-bar-stripes 2s linear infinite}.progress-danger .bar,.progress .bar-danger{background-color:#dd514c;background-image:-moz-linear-gradient(top,#ee5f5b,#c43c35);background-image:-webkit-gradient(linear,0 0,0 100%,from(#ee5f5b),to(#c43c35));background-image:-webkit-linear-gradient(top,#ee5f5b,#c43c35);background-image:-o-linear-gradient(top,#ee5f5b,#c43c35);background-image:linear-gradient(to bottom,#ee5f5b,#c43c35);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffee5f5b',endColorstr='#ffc43c35',GradientType=0)}.progress-danger.progress-striped .bar,.progress-striped .bar-danger{background-color:#ee5f5b;background-image:-webkit-gradient(linear,0 100%,100% 0,color-stop(0.25,rgba(255,255,255,0.15)),color-stop(0.25,transparent),color-stop(0.5,transparent),color-stop(0.5,rgba(255,255,255,0.15)),color-stop(0.75,rgba(255,255,255,0.15)),color-stop(0.75,transparent),to(transparent));background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:-moz-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:-o-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent)}.progress-success .bar,.progress .bar-success{background-color:#5eb95e;background-image:-moz-linear-gradient(top,#62c462,#57a957);background-image:-webkit-gradient(linear,0 0,0 100%,from(#62c462),to(#57a957));background-image:-webkit-linear-gradient(top,#62c462,#57a957);background-image:-o-linear-gradient(top,#62c462,#57a957);background-image:linear-gradient(to bottom,#62c462,#57a957);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff62c462',endColorstr='#ff57a957',GradientType=0)}.progress-success.progress-striped .bar,.progress-striped .bar-success{background-color:#62c462;background-image:-webkit-gradient(linear,0 100%,100% 0,color-stop(0.25,rgba(255,255,255,0.15)),color-stop(0.25,transparent),color-stop(0.5,transparent),color-stop(0.5,rgba(255,255,255,0.15)),color-stop(0.75,rgba(255,255,255,0.15)),color-stop(0.75,transparent),to(transparent));background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:-moz-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:-o-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent)}.progress-info .bar,.progress .bar-info{background-color:#4bb1cf;background-image:-moz-linear-gradient(top,#5bc0de,#339bb9);background-image:-webkit-gradient(linear,0 0,0 100%,from(#5bc0de),to(#339bb9));background-image:-webkit-linear-gradient(top,#5bc0de,#339bb9);background-image:-o-linear-gradient(top,#5bc0de,#339bb9);background-image:linear-gradient(to bottom,#5bc0de,#339bb9);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff5bc0de',endColorstr='#ff339bb9',GradientType=0)}.progress-info.progress-striped .bar,.progress-striped .bar-info{background-color:#5bc0de;background-image:-webkit-gradient(linear,0 100%,100% 0,color-stop(0.25,rgba(255,255,255,0.15)),color-stop(0.25,transparent),color-stop(0.5,transparent),color-stop(0.5,rgba(255,255,255,0.15)),color-stop(0.75,rgba(255,255,255,0.15)),color-stop(0.75,transparent),to(transparent));background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:-moz-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:-o-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent)}.progress-warning .bar,.progress .bar-warning{background-color:#faa732;background-image:-moz-linear-gradient(top,#fbb450,#f89406);background-image:-webkit-gradient(linear,0 0,0 100%,from(#fbb450),to(#f89406));background-image:-webkit-linear-gradient(top,#fbb450,#f89406);background-image:-o-linear-gradient(top,#fbb450,#f89406);background-image:linear-gradient(to bottom,#fbb450,#f89406);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#fffbb450',endColorstr='#fff89406',GradientType=0)}.progress-warning.progress-striped .bar,.progress-striped .bar-warning{background-color:#fbb450;background-image:-webkit-gradient(linear,0 100%,100% 0,color-stop(0.25,rgba(255,255,255,0.15)),color-stop(0.25,transparent),color-stop(0.5,transparent),color-stop(0.5,rgba(255,255,255,0.15)),color-stop(0.75,rgba(255,255,255,0.15)),color-stop(0.75,transparent),to(transparent));background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:-moz-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:-o-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent)}.accordion{margin-bottom:20px}.accordion-group{margin-bottom:2px;border:1px solid #e5e5e5;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px}.accordion-heading{border-bottom:0}.accordion-heading .accordion-toggle{display:block;padding:8px 15px}.accordion-toggle{cursor:pointer}.accordion-inner{padding:9px 15px;border-top:1px solid #e5e5e5}.carousel{position:relative;margin-bottom:20px;line-height:1}.carousel-inner{position:relative;width:100%;overflow:hidden}.carousel-inner>.item{position:relative;display:none;-webkit-transition:.6s ease-in-out left;-moz-transition:.6s ease-in-out left;-o-transition:.6s ease-in-out left;transition:.6s ease-in-out left}.carousel-inner>.item>img,.carousel-inner>.item>a>img{display:block;line-height:1}.carousel-inner>.active,.carousel-inner>.next,.carousel-inner>.prev{display:block}.carousel-inner>.active{left:0}.carousel-inner>.next,.carousel-inner>.prev{position:absolute;top:0;width:100%}.carousel-inner>.next{left:100%}.carousel-inner>.prev{left:-100%}.carousel-inner>.next.left,.carousel-inner>.prev.right{left:0}.carousel-inner>.active.left{left:-100%}.carousel-inner>.active.right{left:100%}.carousel-control{position:absolute;top:40%;left:15px;width:40px;height:40px;margin-top:-20px;font-size:60px;font-weight:100;line-height:30px;color:#fff;text-align:center;background:#222;border:3px solid #fff;-webkit-border-radius:23px;-moz-border-radius:23px;border-radius:23px;opacity:.5;filter:alpha(opacity=50)}.carousel-control.right{right:15px;left:auto}.carousel-control:hover,.carousel-control:focus{color:#fff;text-decoration:none;opacity:.9;filter:alpha(opacity=90)}.carousel-indicators{position:absolute;top:15px;right:15px;z-index:5;margin:0;list-style:none}.carousel-indicators li{display:block;float:left;width:10px;height:10px;margin-left:5px;text-indent:-999px;background-color:#ccc;background-color:rgba(255,255,255,0.25);border-radius:5px}.carousel-indicators .active{background-color:#fff}.carousel-caption{position:absolute;right:0;bottom:0;left:0;padding:15px;background:#333;background:rgba(0,0,0,0.75)}.carousel-caption h4,.carousel-caption p{line-height:20px;color:#fff}.carousel-caption h4{margin:0 0 5px}.carousel-caption p{margin-bottom:0}.hero-unit{padding:60px;margin-bottom:30px;font-size:18px;font-weight:200;line-height:30px;color:inherit;background-color:#eee;-webkit-border-radius:6px;-moz-border-radius:6px;border-radius:6px}.hero-unit h1{margin-bottom:0;font-size:60px;line-height:1;letter-spacing:-1px;color:inherit}.hero-unit li{line-height:30px}.pull-right{float:right}.pull-left{float:left}.hide{display:none}.show{display:block}.invisible{visibility:hidden}.affix{position:fixed} diff --git a/rpki/gui/app/static/img/glyphicons-halflings-white.png b/rpki/gui/app/static/img/glyphicons-halflings-white.png new file mode 100644 index 00000000..3bf6484a Binary files /dev/null and b/rpki/gui/app/static/img/glyphicons-halflings-white.png differ diff --git a/rpki/gui/app/static/img/glyphicons-halflings.png b/rpki/gui/app/static/img/glyphicons-halflings.png new file mode 100644 index 00000000..a9969993 Binary files /dev/null and b/rpki/gui/app/static/img/glyphicons-halflings.png differ diff --git a/rpki/gui/app/static/img/sui-riu.ico b/rpki/gui/app/static/img/sui-riu.ico new file mode 100644 index 00000000..61223e27 Binary files /dev/null and b/rpki/gui/app/static/img/sui-riu.ico differ diff --git a/rpki/gui/app/static/js/bootstrap.min.js b/rpki/gui/app/static/js/bootstrap.min.js new file mode 100644 index 00000000..95c5ac5e --- /dev/null +++ b/rpki/gui/app/static/js/bootstrap.min.js @@ -0,0 +1,6 @@ +/*! +* Bootstrap.js by @fat & @mdo +* Copyright 2012 Twitter, Inc. +* http://www.apache.org/licenses/LICENSE-2.0.txt +*/ +!function(e){"use strict";e(function(){e.support.transition=function(){var e=function(){var e=document.createElement("bootstrap"),t={WebkitTransition:"webkitTransitionEnd",MozTransition:"transitionend",OTransition:"oTransitionEnd otransitionend",transition:"transitionend"},n;for(n in t)if(e.style[n]!==undefined)return t[n]}();return e&&{end:e}}()})}(window.jQuery),!function(e){"use strict";var t='[data-dismiss="alert"]',n=function(n){e(n).on("click",t,this.close)};n.prototype.close=function(t){function s(){i.trigger("closed").remove()}var n=e(this),r=n.attr("data-target"),i;r||(r=n.attr("href"),r=r&&r.replace(/.*(?=#[^\s]*$)/,"")),i=e(r),t&&t.preventDefault(),i.length||(i=n.hasClass("alert")?n:n.parent()),i.trigger(t=e.Event("close"));if(t.isDefaultPrevented())return;i.removeClass("in"),e.support.transition&&i.hasClass("fade")?i.on(e.support.transition.end,s):s()};var r=e.fn.alert;e.fn.alert=function(t){return this.each(function(){var r=e(this),i=r.data("alert");i||r.data("alert",i=new n(this)),typeof t=="string"&&i[t].call(r)})},e.fn.alert.Constructor=n,e.fn.alert.noConflict=function(){return e.fn.alert=r,this},e(document).on("click.alert.data-api",t,n.prototype.close)}(window.jQuery),!function(e){"use strict";var t=function(t,n){this.$element=e(t),this.options=e.extend({},e.fn.button.defaults,n)};t.prototype.setState=function(e){var t="disabled",n=this.$element,r=n.data(),i=n.is("input")?"val":"html";e+="Text",r.resetText||n.data("resetText",n[i]()),n[i](r[e]||this.options[e]),setTimeout(function(){e=="loadingText"?n.addClass(t).attr(t,t):n.removeClass(t).removeAttr(t)},0)},t.prototype.toggle=function(){var e=this.$element.closest('[data-toggle="buttons-radio"]');e&&e.find(".active").removeClass("active"),this.$element.toggleClass("active")};var n=e.fn.button;e.fn.button=function(n){return this.each(function(){var r=e(this),i=r.data("button"),s=typeof n=="object"&&n;i||r.data("button",i=new t(this,s)),n=="toggle"?i.toggle():n&&i.setState(n)})},e.fn.button.defaults={loadingText:"loading..."},e.fn.button.Constructor=t,e.fn.button.noConflict=function(){return e.fn.button=n,this},e(document).on("click.button.data-api","[data-toggle^=button]",function(t){var n=e(t.target);n.hasClass("btn")||(n=n.closest(".btn")),n.button("toggle")})}(window.jQuery),!function(e){"use strict";var t=function(t,n){this.$element=e(t),this.$indicators=this.$element.find(".carousel-indicators"),this.options=n,this.options.pause=="hover"&&this.$element.on("mouseenter",e.proxy(this.pause,this)).on("mouseleave",e.proxy(this.cycle,this))};t.prototype={cycle:function(t){return t||(this.paused=!1),this.interval&&clearInterval(this.interval),this.options.interval&&!this.paused&&(this.interval=setInterval(e.proxy(this.next,this),this.options.interval)),this},getActiveIndex:function(){return this.$active=this.$element.find(".item.active"),this.$items=this.$active.parent().children(),this.$items.index(this.$active)},to:function(t){var n=this.getActiveIndex(),r=this;if(t>this.$items.length-1||t<0)return;return this.sliding?this.$element.one("slid",function(){r.to(t)}):n==t?this.pause().cycle():this.slide(t>n?"next":"prev",e(this.$items[t]))},pause:function(t){return t||(this.paused=!0),this.$element.find(".next, .prev").length&&e.support.transition.end&&(this.$element.trigger(e.support.transition.end),this.cycle(!0)),clearInterval(this.interval),this.interval=null,this},next:function(){if(this.sliding)return;return this.slide("next")},prev:function(){if(this.sliding)return;return this.slide("prev")},slide:function(t,n){var r=this.$element.find(".item.active"),i=n||r[t](),s=this.interval,o=t=="next"?"left":"right",u=t=="next"?"first":"last",a=this,f;this.sliding=!0,s&&this.pause(),i=i.length?i:this.$element.find(".item")[u](),f=e.Event("slide",{relatedTarget:i[0],direction:o});if(i.hasClass("active"))return;this.$indicators.length&&(this.$indicators.find(".active").removeClass("active"),this.$element.one("slid",function(){var t=e(a.$indicators.children()[a.getActiveIndex()]);t&&t.addClass("active")}));if(e.support.transition&&this.$element.hasClass("slide")){this.$element.trigger(f);if(f.isDefaultPrevented())return;i.addClass(t),i[0].offsetWidth,r.addClass(o),i.addClass(o),this.$element.one(e.support.transition.end,function(){i.removeClass([t,o].join(" ")).addClass("active"),r.removeClass(["active",o].join(" ")),a.sliding=!1,setTimeout(function(){a.$element.trigger("slid")},0)})}else{this.$element.trigger(f);if(f.isDefaultPrevented())return;r.removeClass("active"),i.addClass("active"),this.sliding=!1,this.$element.trigger("slid")}return s&&this.cycle(),this}};var n=e.fn.carousel;e.fn.carousel=function(n){return this.each(function(){var r=e(this),i=r.data("carousel"),s=e.extend({},e.fn.carousel.defaults,typeof n=="object"&&n),o=typeof n=="string"?n:s.slide;i||r.data("carousel",i=new t(this,s)),typeof n=="number"?i.to(n):o?i[o]():s.interval&&i.pause().cycle()})},e.fn.carousel.defaults={interval:5e3,pause:"hover"},e.fn.carousel.Constructor=t,e.fn.carousel.noConflict=function(){return e.fn.carousel=n,this},e(document).on("click.carousel.data-api","[data-slide], [data-slide-to]",function(t){var n=e(this),r,i=e(n.attr("data-target")||(r=n.attr("href"))&&r.replace(/.*(?=#[^\s]+$)/,"")),s=e.extend({},i.data(),n.data()),o;i.carousel(s),(o=n.attr("data-slide-to"))&&i.data("carousel").pause().to(o).cycle(),t.preventDefault()})}(window.jQuery),!function(e){"use strict";var t=function(t,n){this.$element=e(t),this.options=e.extend({},e.fn.collapse.defaults,n),this.options.parent&&(this.$parent=e(this.options.parent)),this.options.toggle&&this.toggle()};t.prototype={constructor:t,dimension:function(){var e=this.$element.hasClass("width");return e?"width":"height"},show:function(){var t,n,r,i;if(this.transitioning||this.$element.hasClass("in"))return;t=this.dimension(),n=e.camelCase(["scroll",t].join("-")),r=this.$parent&&this.$parent.find("> .accordion-group > .in");if(r&&r.length){i=r.data("collapse");if(i&&i.transitioning)return;r.collapse("hide"),i||r.data("collapse",null)}this.$element[t](0),this.transition("addClass",e.Event("show"),"shown"),e.support.transition&&this.$element[t](this.$element[0][n])},hide:function(){var t;if(this.transitioning||!this.$element.hasClass("in"))return;t=this.dimension(),this.reset(this.$element[t]()),this.transition("removeClass",e.Event("hide"),"hidden"),this.$element[t](0)},reset:function(e){var t=this.dimension();return this.$element.removeClass("collapse")[t](e||"auto")[0].offsetWidth,this.$element[e!==null?"addClass":"removeClass"]("collapse"),this},transition:function(t,n,r){var i=this,s=function(){n.type=="show"&&i.reset(),i.transitioning=0,i.$element.trigger(r)};this.$element.trigger(n);if(n.isDefaultPrevented())return;this.transitioning=1,this.$element[t]("in"),e.support.transition&&this.$element.hasClass("collapse")?this.$element.one(e.support.transition.end,s):s()},toggle:function(){this[this.$element.hasClass("in")?"hide":"show"]()}};var n=e.fn.collapse;e.fn.collapse=function(n){return this.each(function(){var r=e(this),i=r.data("collapse"),s=e.extend({},e.fn.collapse.defaults,r.data(),typeof n=="object"&&n);i||r.data("collapse",i=new t(this,s)),typeof n=="string"&&i[n]()})},e.fn.collapse.defaults={toggle:!0},e.fn.collapse.Constructor=t,e.fn.collapse.noConflict=function(){return e.fn.collapse=n,this},e(document).on("click.collapse.data-api","[data-toggle=collapse]",function(t){var n=e(this),r,i=n.attr("data-target")||t.preventDefault()||(r=n.attr("href"))&&r.replace(/.*(?=#[^\s]+$)/,""),s=e(i).data("collapse")?"toggle":n.data();n[e(i).hasClass("in")?"addClass":"removeClass"]("collapsed"),e(i).collapse(s)})}(window.jQuery),!function(e){"use strict";function r(){e(t).each(function(){i(e(this)).removeClass("open")})}function i(t){var n=t.attr("data-target"),r;n||(n=t.attr("href"),n=n&&/#/.test(n)&&n.replace(/.*(?=#[^\s]*$)/,"")),r=n&&e(n);if(!r||!r.length)r=t.parent();return r}var t="[data-toggle=dropdown]",n=function(t){var n=e(t).on("click.dropdown.data-api",this.toggle);e("html").on("click.dropdown.data-api",function(){n.parent().removeClass("open")})};n.prototype={constructor:n,toggle:function(t){var n=e(this),s,o;if(n.is(".disabled, :disabled"))return;return s=i(n),o=s.hasClass("open"),r(),o||s.toggleClass("open"),n.focus(),!1},keydown:function(n){var r,s,o,u,a,f;if(!/(38|40|27)/.test(n.keyCode))return;r=e(this),n.preventDefault(),n.stopPropagation();if(r.is(".disabled, :disabled"))return;u=i(r),a=u.hasClass("open");if(!a||a&&n.keyCode==27)return n.which==27&&u.find(t).focus(),r.click();s=e("[role=menu] li:not(.divider):visible a",u);if(!s.length)return;f=s.index(s.filter(":focus")),n.keyCode==38&&f>0&&f--,n.keyCode==40&&f').appendTo(document.body),this.$backdrop.click(this.options.backdrop=="static"?e.proxy(this.$element[0].focus,this.$element[0]):e.proxy(this.hide,this)),i&&this.$backdrop[0].offsetWidth,this.$backdrop.addClass("in");if(!t)return;i?this.$backdrop.one(e.support.transition.end,t):t()}else!this.isShown&&this.$backdrop?(this.$backdrop.removeClass("in"),e.support.transition&&this.$element.hasClass("fade")?this.$backdrop.one(e.support.transition.end,t):t()):t&&t()}};var n=e.fn.modal;e.fn.modal=function(n){return this.each(function(){var r=e(this),i=r.data("modal"),s=e.extend({},e.fn.modal.defaults,r.data(),typeof n=="object"&&n);i||r.data("modal",i=new t(this,s)),typeof n=="string"?i[n]():s.show&&i.show()})},e.fn.modal.defaults={backdrop:!0,keyboard:!0,show:!0},e.fn.modal.Constructor=t,e.fn.modal.noConflict=function(){return e.fn.modal=n,this},e(document).on("click.modal.data-api",'[data-toggle="modal"]',function(t){var n=e(this),r=n.attr("href"),i=e(n.attr("data-target")||r&&r.replace(/.*(?=#[^\s]+$)/,"")),s=i.data("modal")?"toggle":e.extend({remote:!/#/.test(r)&&r},i.data(),n.data());t.preventDefault(),i.modal(s).one("hide",function(){n.focus()})})}(window.jQuery),!function(e){"use strict";var t=function(e,t){this.init("tooltip",e,t)};t.prototype={constructor:t,init:function(t,n,r){var i,s,o,u,a;this.type=t,this.$element=e(n),this.options=this.getOptions(r),this.enabled=!0,o=this.options.trigger.split(" ");for(a=o.length;a--;)u=o[a],u=="click"?this.$element.on("click."+this.type,this.options.selector,e.proxy(this.toggle,this)):u!="manual"&&(i=u=="hover"?"mouseenter":"focus",s=u=="hover"?"mouseleave":"blur",this.$element.on(i+"."+this.type,this.options.selector,e.proxy(this.enter,this)),this.$element.on(s+"."+this.type,this.options.selector,e.proxy(this.leave,this)));this.options.selector?this._options=e.extend({},this.options,{trigger:"manual",selector:""}):this.fixTitle()},getOptions:function(t){return t=e.extend({},e.fn[this.type].defaults,this.$element.data(),t),t.delay&&typeof t.delay=="number"&&(t.delay={show:t.delay,hide:t.delay}),t},enter:function(t){var n=e.fn[this.type].defaults,r={},i;this._options&&e.each(this._options,function(e,t){n[e]!=t&&(r[e]=t)},this),i=e(t.currentTarget)[this.type](r).data(this.type);if(!i.options.delay||!i.options.delay.show)return i.show();clearTimeout(this.timeout),i.hoverState="in",this.timeout=setTimeout(function(){i.hoverState=="in"&&i.show()},i.options.delay.show)},leave:function(t){var n=e(t.currentTarget)[this.type](this._options).data(this.type);this.timeout&&clearTimeout(this.timeout);if(!n.options.delay||!n.options.delay.hide)return n.hide();n.hoverState="out",this.timeout=setTimeout(function(){n.hoverState=="out"&&n.hide()},n.options.delay.hide)},show:function(){var t,n,r,i,s,o,u=e.Event("show");if(this.hasContent()&&this.enabled){this.$element.trigger(u);if(u.isDefaultPrevented())return;t=this.tip(),this.setContent(),this.options.animation&&t.addClass("fade"),s=typeof this.options.placement=="function"?this.options.placement.call(this,t[0],this.$element[0]):this.options.placement,t.detach().css({top:0,left:0,display:"block"}),this.options.container?t.appendTo(this.options.container):t.insertAfter(this.$element),n=this.getPosition(),r=t[0].offsetWidth,i=t[0].offsetHeight;switch(s){case"bottom":o={top:n.top+n.height,left:n.left+n.width/2-r/2};break;case"top":o={top:n.top-i,left:n.left+n.width/2-r/2};break;case"left":o={top:n.top+n.height/2-i/2,left:n.left-r};break;case"right":o={top:n.top+n.height/2-i/2,left:n.left+n.width}}this.applyPlacement(o,s),this.$element.trigger("shown")}},applyPlacement:function(e,t){var n=this.tip(),r=n[0].offsetWidth,i=n[0].offsetHeight,s,o,u,a;n.offset(e).addClass(t).addClass("in"),s=n[0].offsetWidth,o=n[0].offsetHeight,t=="top"&&o!=i&&(e.top=e.top+i-o,a=!0),t=="bottom"||t=="top"?(u=0,e.left<0&&(u=e.left*-2,e.left=0,n.offset(e),s=n[0].offsetWidth,o=n[0].offsetHeight),this.replaceArrow(u-r+s,s,"left")):this.replaceArrow(o-i,o,"top"),a&&n.offset(e)},replaceArrow:function(e,t,n){this.arrow().css(n,e?50*(1-e/t)+"%":"")},setContent:function(){var e=this.tip(),t=this.getTitle();e.find(".tooltip-inner")[this.options.html?"html":"text"](t),e.removeClass("fade in top bottom left right")},hide:function(){function i(){var t=setTimeout(function(){n.off(e.support.transition.end).detach()},500);n.one(e.support.transition.end,function(){clearTimeout(t),n.detach()})}var t=this,n=this.tip(),r=e.Event("hide");this.$element.trigger(r);if(r.isDefaultPrevented())return;return n.removeClass("in"),e.support.transition&&this.$tip.hasClass("fade")?i():n.detach(),this.$element.trigger("hidden"),this},fixTitle:function(){var e=this.$element;(e.attr("title")||typeof e.attr("data-original-title")!="string")&&e.attr("data-original-title",e.attr("title")||"").attr("title","")},hasContent:function(){return this.getTitle()},getPosition:function(){var t=this.$element[0];return e.extend({},typeof t.getBoundingClientRect=="function"?t.getBoundingClientRect():{width:t.offsetWidth,height:t.offsetHeight},this.$element.offset())},getTitle:function(){var e,t=this.$element,n=this.options;return e=t.attr("data-original-title")||(typeof n.title=="function"?n.title.call(t[0]):n.title),e},tip:function(){return this.$tip=this.$tip||e(this.options.template)},arrow:function(){return this.$arrow=this.$arrow||this.tip().find(".tooltip-arrow")},validate:function(){this.$element[0].parentNode||(this.hide(),this.$element=null,this.options=null)},enable:function(){this.enabled=!0},disable:function(){this.enabled=!1},toggleEnabled:function(){this.enabled=!this.enabled},toggle:function(t){var n=t?e(t.currentTarget)[this.type](this._options).data(this.type):this;n.tip().hasClass("in")?n.hide():n.show()},destroy:function(){this.hide().$element.off("."+this.type).removeData(this.type)}};var n=e.fn.tooltip;e.fn.tooltip=function(n){return this.each(function(){var r=e(this),i=r.data("tooltip"),s=typeof n=="object"&&n;i||r.data("tooltip",i=new t(this,s)),typeof n=="string"&&i[n]()})},e.fn.tooltip.Constructor=t,e.fn.tooltip.defaults={animation:!0,placement:"top",selector:!1,template:'
      ',trigger:"hover focus",title:"",delay:0,html:!1,container:!1},e.fn.tooltip.noConflict=function(){return e.fn.tooltip=n,this}}(window.jQuery),!function(e){"use strict";var t=function(e,t){this.init("popover",e,t)};t.prototype=e.extend({},e.fn.tooltip.Constructor.prototype,{constructor:t,setContent:function(){var e=this.tip(),t=this.getTitle(),n=this.getContent();e.find(".popover-title")[this.options.html?"html":"text"](t),e.find(".popover-content")[this.options.html?"html":"text"](n),e.removeClass("fade top bottom left right in")},hasContent:function(){return this.getTitle()||this.getContent()},getContent:function(){var e,t=this.$element,n=this.options;return e=(typeof n.content=="function"?n.content.call(t[0]):n.content)||t.attr("data-content"),e},tip:function(){return this.$tip||(this.$tip=e(this.options.template)),this.$tip},destroy:function(){this.hide().$element.off("."+this.type).removeData(this.type)}});var n=e.fn.popover;e.fn.popover=function(n){return this.each(function(){var r=e(this),i=r.data("popover"),s=typeof n=="object"&&n;i||r.data("popover",i=new t(this,s)),typeof n=="string"&&i[n]()})},e.fn.popover.Constructor=t,e.fn.popover.defaults=e.extend({},e.fn.tooltip.defaults,{placement:"right",trigger:"click",content:"",template:'

      '}),e.fn.popover.noConflict=function(){return e.fn.popover=n,this}}(window.jQuery),!function(e){"use strict";function t(t,n){var r=e.proxy(this.process,this),i=e(t).is("body")?e(window):e(t),s;this.options=e.extend({},e.fn.scrollspy.defaults,n),this.$scrollElement=i.on("scroll.scroll-spy.data-api",r),this.selector=(this.options.target||(s=e(t).attr("href"))&&s.replace(/.*(?=#[^\s]+$)/,"")||"")+" .nav li > a",this.$body=e("body"),this.refresh(),this.process()}t.prototype={constructor:t,refresh:function(){var t=this,n;this.offsets=e([]),this.targets=e([]),n=this.$body.find(this.selector).map(function(){var n=e(this),r=n.data("target")||n.attr("href"),i=/^#\w/.test(r)&&e(r);return i&&i.length&&[[i.position().top+(!e.isWindow(t.$scrollElement.get(0))&&t.$scrollElement.scrollTop()),r]]||null}).sort(function(e,t){return e[0]-t[0]}).each(function(){t.offsets.push(this[0]),t.targets.push(this[1])})},process:function(){var e=this.$scrollElement.scrollTop()+this.options.offset,t=this.$scrollElement[0].scrollHeight||this.$body[0].scrollHeight,n=t-this.$scrollElement.height(),r=this.offsets,i=this.targets,s=this.activeTarget,o;if(e>=n)return s!=(o=i.last()[0])&&this.activate(o);for(o=r.length;o--;)s!=i[o]&&e>=r[o]&&(!r[o+1]||e<=r[o+1])&&this.activate(i[o])},activate:function(t){var n,r;this.activeTarget=t,e(this.selector).parent(".active").removeClass("active"),r=this.selector+'[data-target="'+t+'"],'+this.selector+'[href="'+t+'"]',n=e(r).parent("li").addClass("active"),n.parent(".dropdown-menu").length&&(n=n.closest("li.dropdown").addClass("active")),n.trigger("activate")}};var n=e.fn.scrollspy;e.fn.scrollspy=function(n){return this.each(function(){var r=e(this),i=r.data("scrollspy"),s=typeof n=="object"&&n;i||r.data("scrollspy",i=new t(this,s)),typeof n=="string"&&i[n]()})},e.fn.scrollspy.Constructor=t,e.fn.scrollspy.defaults={offset:10},e.fn.scrollspy.noConflict=function(){return e.fn.scrollspy=n,this},e(window).on("load",function(){e('[data-spy="scroll"]').each(function(){var t=e(this);t.scrollspy(t.data())})})}(window.jQuery),!function(e){"use strict";var t=function(t){this.element=e(t)};t.prototype={constructor:t,show:function(){var t=this.element,n=t.closest("ul:not(.dropdown-menu)"),r=t.attr("data-target"),i,s,o;r||(r=t.attr("href"),r=r&&r.replace(/.*(?=#[^\s]*$)/,""));if(t.parent("li").hasClass("active"))return;i=n.find(".active:last a")[0],o=e.Event("show",{relatedTarget:i}),t.trigger(o);if(o.isDefaultPrevented())return;s=e(r),this.activate(t.parent("li"),n),this.activate(s,s.parent(),function(){t.trigger({type:"shown",relatedTarget:i})})},activate:function(t,n,r){function o(){i.removeClass("active").find("> .dropdown-menu > .active").removeClass("active"),t.addClass("active"),s?(t[0].offsetWidth,t.addClass("in")):t.removeClass("fade"),t.parent(".dropdown-menu")&&t.closest("li.dropdown").addClass("active"),r&&r()}var i=n.find("> .active"),s=r&&e.support.transition&&i.hasClass("fade");s?i.one(e.support.transition.end,o):o(),i.removeClass("in")}};var n=e.fn.tab;e.fn.tab=function(n){return this.each(function(){var r=e(this),i=r.data("tab");i||r.data("tab",i=new t(this)),typeof n=="string"&&i[n]()})},e.fn.tab.Constructor=t,e.fn.tab.noConflict=function(){return e.fn.tab=n,this},e(document).on("click.tab.data-api",'[data-toggle="tab"], [data-toggle="pill"]',function(t){t.preventDefault(),e(this).tab("show")})}(window.jQuery),!function(e){"use strict";var t=function(t,n){this.$element=e(t),this.options=e.extend({},e.fn.typeahead.defaults,n),this.matcher=this.options.matcher||this.matcher,this.sorter=this.options.sorter||this.sorter,this.highlighter=this.options.highlighter||this.highlighter,this.updater=this.options.updater||this.updater,this.source=this.options.source,this.$menu=e(this.options.menu),this.shown=!1,this.listen()};t.prototype={constructor:t,select:function(){var e=this.$menu.find(".active").attr("data-value");return this.$element.val(this.updater(e)).change(),this.hide()},updater:function(e){return e},show:function(){var t=e.extend({},this.$element.position(),{height:this.$element[0].offsetHeight});return this.$menu.insertAfter(this.$element).css({top:t.top+t.height,left:t.left}).show(),this.shown=!0,this},hide:function(){return this.$menu.hide(),this.shown=!1,this},lookup:function(t){var n;return this.query=this.$element.val(),!this.query||this.query.length"+t+""})},render:function(t){var n=this;return t=e(t).map(function(t,r){return t=e(n.options.item).attr("data-value",r),t.find("a").html(n.highlighter(r)),t[0]}),t.first().addClass("active"),this.$menu.html(t),this},next:function(t){var n=this.$menu.find(".active").removeClass("active"),r=n.next();r.length||(r=e(this.$menu.find("li")[0])),r.addClass("active")},prev:function(e){var t=this.$menu.find(".active").removeClass("active"),n=t.prev();n.length||(n=this.$menu.find("li").last()),n.addClass("active")},listen:function(){this.$element.on("focus",e.proxy(this.focus,this)).on("blur",e.proxy(this.blur,this)).on("keypress",e.proxy(this.keypress,this)).on("keyup",e.proxy(this.keyup,this)),this.eventSupported("keydown")&&this.$element.on("keydown",e.proxy(this.keydown,this)),this.$menu.on("click",e.proxy(this.click,this)).on("mouseenter","li",e.proxy(this.mouseenter,this)).on("mouseleave","li",e.proxy(this.mouseleave,this))},eventSupported:function(e){var t=e in this.$element;return t||(this.$element.setAttribute(e,"return;"),t=typeof this.$element[e]=="function"),t},move:function(e){if(!this.shown)return;switch(e.keyCode){case 9:case 13:case 27:e.preventDefault();break;case 38:e.preventDefault(),this.prev();break;case 40:e.preventDefault(),this.next()}e.stopPropagation()},keydown:function(t){this.suppressKeyPressRepeat=~e.inArray(t.keyCode,[40,38,9,13,27]),this.move(t)},keypress:function(e){if(this.suppressKeyPressRepeat)return;this.move(e)},keyup:function(e){switch(e.keyCode){case 40:case 38:case 16:case 17:case 18:break;case 9:case 13:if(!this.shown)return;this.select();break;case 27:if(!this.shown)return;this.hide();break;default:this.lookup()}e.stopPropagation(),e.preventDefault()},focus:function(e){this.focused=!0},blur:function(e){this.focused=!1,!this.mousedover&&this.shown&&this.hide()},click:function(e){e.stopPropagation(),e.preventDefault(),this.select(),this.$element.focus()},mouseenter:function(t){this.mousedover=!0,this.$menu.find(".active").removeClass("active"),e(t.currentTarget).addClass("active")},mouseleave:function(e){this.mousedover=!1,!this.focused&&this.shown&&this.hide()}};var n=e.fn.typeahead;e.fn.typeahead=function(n){return this.each(function(){var r=e(this),i=r.data("typeahead"),s=typeof n=="object"&&n;i||r.data("typeahead",i=new t(this,s)),typeof n=="string"&&i[n]()})},e.fn.typeahead.defaults={source:[],items:8,menu:'',item:'
    • ',minLength:1},e.fn.typeahead.Constructor=t,e.fn.typeahead.noConflict=function(){return e.fn.typeahead=n,this},e(document).on("focus.typeahead.data-api",'[data-provide="typeahead"]',function(t){var n=e(this);if(n.data("typeahead"))return;n.typeahead(n.data())})}(window.jQuery),!function(e){"use strict";var t=function(t,n){this.options=e.extend({},e.fn.affix.defaults,n),this.$window=e(window).on("scroll.affix.data-api",e.proxy(this.checkPosition,this)).on("click.affix.data-api",e.proxy(function(){setTimeout(e.proxy(this.checkPosition,this),1)},this)),this.$element=e(t),this.checkPosition()};t.prototype.checkPosition=function(){if(!this.$element.is(":visible"))return;var t=e(document).height(),n=this.$window.scrollTop(),r=this.$element.offset(),i=this.options.offset,s=i.bottom,o=i.top,u="affix affix-top affix-bottom",a;typeof i!="object"&&(s=o=i),typeof o=="function"&&(o=i.top()),typeof s=="function"&&(s=i.bottom()),a=this.unpin!=null&&n+this.unpin<=r.top?!1:s!=null&&r.top+this.$element.height()>=t-s?"bottom":o!=null&&n<=o?"top":!1;if(this.affixed===a)return;this.affixed=a,this.unpin=a=="bottom"?r.top-n:null,this.$element.removeClass(u).addClass("affix"+(a?"-"+a:""))};var n=e.fn.affix;e.fn.affix=function(n){return this.each(function(){var r=e(this),i=r.data("affix"),s=typeof n=="object"&&n;i||r.data("affix",i=new t(this,s)),typeof n=="string"&&i[n]()})},e.fn.affix.Constructor=t,e.fn.affix.defaults={offset:0},e.fn.affix.noConflict=function(){return e.fn.affix=n,this},e(window).on("load",function(){e('[data-spy="affix"]').each(function(){var t=e(this),n=t.data();n.offset=n.offset||{},n.offsetBottom&&(n.offset.bottom=n.offsetBottom),n.offsetTop&&(n.offset.top=n.offsetTop),t.affix(n)})})}(window.jQuery); \ No newline at end of file diff --git a/rpki/gui/app/static/js/jquery-1.8.3.min.js b/rpki/gui/app/static/js/jquery-1.8.3.min.js new file mode 100644 index 00000000..83589daa --- /dev/null +++ b/rpki/gui/app/static/js/jquery-1.8.3.min.js @@ -0,0 +1,2 @@ +/*! jQuery v1.8.3 jquery.com | jquery.org/license */ +(function(e,t){function _(e){var t=M[e]={};return v.each(e.split(y),function(e,n){t[n]=!0}),t}function H(e,n,r){if(r===t&&e.nodeType===1){var i="data-"+n.replace(P,"-$1").toLowerCase();r=e.getAttribute(i);if(typeof r=="string"){try{r=r==="true"?!0:r==="false"?!1:r==="null"?null:+r+""===r?+r:D.test(r)?v.parseJSON(r):r}catch(s){}v.data(e,n,r)}else r=t}return r}function B(e){var t;for(t in e){if(t==="data"&&v.isEmptyObject(e[t]))continue;if(t!=="toJSON")return!1}return!0}function et(){return!1}function tt(){return!0}function ut(e){return!e||!e.parentNode||e.parentNode.nodeType===11}function at(e,t){do e=e[t];while(e&&e.nodeType!==1);return e}function ft(e,t,n){t=t||0;if(v.isFunction(t))return v.grep(e,function(e,r){var i=!!t.call(e,r,e);return i===n});if(t.nodeType)return v.grep(e,function(e,r){return e===t===n});if(typeof t=="string"){var r=v.grep(e,function(e){return e.nodeType===1});if(it.test(t))return v.filter(t,r,!n);t=v.filter(t,r)}return v.grep(e,function(e,r){return v.inArray(e,t)>=0===n})}function lt(e){var t=ct.split("|"),n=e.createDocumentFragment();if(n.createElement)while(t.length)n.createElement(t.pop());return n}function Lt(e,t){return e.getElementsByTagName(t)[0]||e.appendChild(e.ownerDocument.createElement(t))}function At(e,t){if(t.nodeType!==1||!v.hasData(e))return;var n,r,i,s=v._data(e),o=v._data(t,s),u=s.events;if(u){delete o.handle,o.events={};for(n in u)for(r=0,i=u[n].length;r").appendTo(i.body),n=t.css("display");t.remove();if(n==="none"||n===""){Pt=i.body.appendChild(Pt||v.extend(i.createElement("iframe"),{frameBorder:0,width:0,height:0}));if(!Ht||!Pt.createElement)Ht=(Pt.contentWindow||Pt.contentDocument).document,Ht.write(""),Ht.close();t=Ht.body.appendChild(Ht.createElement(e)),n=Dt(t,"display"),i.body.removeChild(Pt)}return Wt[e]=n,n}function fn(e,t,n,r){var i;if(v.isArray(t))v.each(t,function(t,i){n||sn.test(e)?r(e,i):fn(e+"["+(typeof i=="object"?t:"")+"]",i,n,r)});else if(!n&&v.type(t)==="object")for(i in t)fn(e+"["+i+"]",t[i],n,r);else r(e,t)}function Cn(e){return function(t,n){typeof t!="string"&&(n=t,t="*");var r,i,s,o=t.toLowerCase().split(y),u=0,a=o.length;if(v.isFunction(n))for(;u)[^>]*$|#([\w\-]*)$)/,E=/^<(\w+)\s*\/?>(?:<\/\1>|)$/,S=/^[\],:{}\s]*$/,x=/(?:^|:|,)(?:\s*\[)+/g,T=/\\(?:["\\\/bfnrt]|u[\da-fA-F]{4})/g,N=/"[^"\\\r\n]*"|true|false|null|-?(?:\d\d*\.|)\d+(?:[eE][\-+]?\d+|)/g,C=/^-ms-/,k=/-([\da-z])/gi,L=function(e,t){return(t+"").toUpperCase()},A=function(){i.addEventListener?(i.removeEventListener("DOMContentLoaded",A,!1),v.ready()):i.readyState==="complete"&&(i.detachEvent("onreadystatechange",A),v.ready())},O={};v.fn=v.prototype={constructor:v,init:function(e,n,r){var s,o,u,a;if(!e)return this;if(e.nodeType)return this.context=this[0]=e,this.length=1,this;if(typeof e=="string"){e.charAt(0)==="<"&&e.charAt(e.length-1)===">"&&e.length>=3?s=[null,e,null]:s=w.exec(e);if(s&&(s[1]||!n)){if(s[1])return n=n instanceof v?n[0]:n,a=n&&n.nodeType?n.ownerDocument||n:i,e=v.parseHTML(s[1],a,!0),E.test(s[1])&&v.isPlainObject(n)&&this.attr.call(e,n,!0),v.merge(this,e);o=i.getElementById(s[2]);if(o&&o.parentNode){if(o.id!==s[2])return r.find(e);this.length=1,this[0]=o}return this.context=i,this.selector=e,this}return!n||n.jquery?(n||r).find(e):this.constructor(n).find(e)}return v.isFunction(e)?r.ready(e):(e.selector!==t&&(this.selector=e.selector,this.context=e.context),v.makeArray(e,this))},selector:"",jquery:"1.8.3",length:0,size:function(){return this.length},toArray:function(){return l.call(this)},get:function(e){return e==null?this.toArray():e<0?this[this.length+e]:this[e]},pushStack:function(e,t,n){var r=v.merge(this.constructor(),e);return r.prevObject=this,r.context=this.context,t==="find"?r.selector=this.selector+(this.selector?" ":"")+n:t&&(r.selector=this.selector+"."+t+"("+n+")"),r},each:function(e,t){return v.each(this,e,t)},ready:function(e){return v.ready.promise().done(e),this},eq:function(e){return e=+e,e===-1?this.slice(e):this.slice(e,e+1)},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},slice:function(){return this.pushStack(l.apply(this,arguments),"slice",l.call(arguments).join(","))},map:function(e){return this.pushStack(v.map(this,function(t,n){return e.call(t,n,t)}))},end:function(){return this.prevObject||this.constructor(null)},push:f,sort:[].sort,splice:[].splice},v.fn.init.prototype=v.fn,v.extend=v.fn.extend=function(){var e,n,r,i,s,o,u=arguments[0]||{},a=1,f=arguments.length,l=!1;typeof u=="boolean"&&(l=u,u=arguments[1]||{},a=2),typeof u!="object"&&!v.isFunction(u)&&(u={}),f===a&&(u=this,--a);for(;a0)return;r.resolveWith(i,[v]),v.fn.trigger&&v(i).trigger("ready").off("ready")},isFunction:function(e){return v.type(e)==="function"},isArray:Array.isArray||function(e){return v.type(e)==="array"},isWindow:function(e){return e!=null&&e==e.window},isNumeric:function(e){return!isNaN(parseFloat(e))&&isFinite(e)},type:function(e){return e==null?String(e):O[h.call(e)]||"object"},isPlainObject:function(e){if(!e||v.type(e)!=="object"||e.nodeType||v.isWindow(e))return!1;try{if(e.constructor&&!p.call(e,"constructor")&&!p.call(e.constructor.prototype,"isPrototypeOf"))return!1}catch(n){return!1}var r;for(r in e);return r===t||p.call(e,r)},isEmptyObject:function(e){var t;for(t in e)return!1;return!0},error:function(e){throw new Error(e)},parseHTML:function(e,t,n){var r;return!e||typeof e!="string"?null:(typeof t=="boolean"&&(n=t,t=0),t=t||i,(r=E.exec(e))?[t.createElement(r[1])]:(r=v.buildFragment([e],t,n?null:[]),v.merge([],(r.cacheable?v.clone(r.fragment):r.fragment).childNodes)))},parseJSON:function(t){if(!t||typeof t!="string")return null;t=v.trim(t);if(e.JSON&&e.JSON.parse)return e.JSON.parse(t);if(S.test(t.replace(T,"@").replace(N,"]").replace(x,"")))return(new Function("return "+t))();v.error("Invalid JSON: "+t)},parseXML:function(n){var r,i;if(!n||typeof n!="string")return null;try{e.DOMParser?(i=new DOMParser,r=i.parseFromString(n,"text/xml")):(r=new ActiveXObject("Microsoft.XMLDOM"),r.async="false",r.loadXML(n))}catch(s){r=t}return(!r||!r.documentElement||r.getElementsByTagName("parsererror").length)&&v.error("Invalid XML: "+n),r},noop:function(){},globalEval:function(t){t&&g.test(t)&&(e.execScript||function(t){e.eval.call(e,t)})(t)},camelCase:function(e){return e.replace(C,"ms-").replace(k,L)},nodeName:function(e,t){return e.nodeName&&e.nodeName.toLowerCase()===t.toLowerCase()},each:function(e,n,r){var i,s=0,o=e.length,u=o===t||v.isFunction(e);if(r){if(u){for(i in e)if(n.apply(e[i],r)===!1)break}else for(;s0&&e[0]&&e[a-1]||a===0||v.isArray(e));if(f)for(;u-1)a.splice(n,1),i&&(n<=o&&o--,n<=u&&u--)}),this},has:function(e){return v.inArray(e,a)>-1},empty:function(){return a=[],this},disable:function(){return a=f=n=t,this},disabled:function(){return!a},lock:function(){return f=t,n||c.disable(),this},locked:function(){return!f},fireWith:function(e,t){return t=t||[],t=[e,t.slice?t.slice():t],a&&(!r||f)&&(i?f.push(t):l(t)),this},fire:function(){return c.fireWith(this,arguments),this},fired:function(){return!!r}};return c},v.extend({Deferred:function(e){var t=[["resolve","done",v.Callbacks("once memory"),"resolved"],["reject","fail",v.Callbacks("once memory"),"rejected"],["notify","progress",v.Callbacks("memory")]],n="pending",r={state:function(){return n},always:function(){return i.done(arguments).fail(arguments),this},then:function(){var e=arguments;return v.Deferred(function(n){v.each(t,function(t,r){var s=r[0],o=e[t];i[r[1]](v.isFunction(o)?function(){var e=o.apply(this,arguments);e&&v.isFunction(e.promise)?e.promise().done(n.resolve).fail(n.reject).progress(n.notify):n[s+"With"](this===i?n:this,[e])}:n[s])}),e=null}).promise()},promise:function(e){return e!=null?v.extend(e,r):r}},i={};return r.pipe=r.then,v.each(t,function(e,s){var o=s[2],u=s[3];r[s[1]]=o.add,u&&o.add(function(){n=u},t[e^1][2].disable,t[2][2].lock),i[s[0]]=o.fire,i[s[0]+"With"]=o.fireWith}),r.promise(i),e&&e.call(i,i),i},when:function(e){var t=0,n=l.call(arguments),r=n.length,i=r!==1||e&&v.isFunction(e.promise)?r:0,s=i===1?e:v.Deferred(),o=function(e,t,n){return function(r){t[e]=this,n[e]=arguments.length>1?l.call(arguments):r,n===u?s.notifyWith(t,n):--i||s.resolveWith(t,n)}},u,a,f;if(r>1){u=new Array(r),a=new Array(r),f=new Array(r);for(;t
      a",n=p.getElementsByTagName("*"),r=p.getElementsByTagName("a")[0];if(!n||!r||!n.length)return{};s=i.createElement("select"),o=s.appendChild(i.createElement("option")),u=p.getElementsByTagName("input")[0],r.style.cssText="top:1px;float:left;opacity:.5",t={leadingWhitespace:p.firstChild.nodeType===3,tbody:!p.getElementsByTagName("tbody").length,htmlSerialize:!!p.getElementsByTagName("link").length,style:/top/.test(r.getAttribute("style")),hrefNormalized:r.getAttribute("href")==="/a",opacity:/^0.5/.test(r.style.opacity),cssFloat:!!r.style.cssFloat,checkOn:u.value==="on",optSelected:o.selected,getSetAttribute:p.className!=="t",enctype:!!i.createElement("form").enctype,html5Clone:i.createElement("nav").cloneNode(!0).outerHTML!=="<:nav>",boxModel:i.compatMode==="CSS1Compat",submitBubbles:!0,changeBubbles:!0,focusinBubbles:!1,deleteExpando:!0,noCloneEvent:!0,inlineBlockNeedsLayout:!1,shrinkWrapBlocks:!1,reliableMarginRight:!0,boxSizingReliable:!0,pixelPosition:!1},u.checked=!0,t.noCloneChecked=u.cloneNode(!0).checked,s.disabled=!0,t.optDisabled=!o.disabled;try{delete p.test}catch(d){t.deleteExpando=!1}!p.addEventListener&&p.attachEvent&&p.fireEvent&&(p.attachEvent("onclick",h=function(){t.noCloneEvent=!1}),p.cloneNode(!0).fireEvent("onclick"),p.detachEvent("onclick",h)),u=i.createElement("input"),u.value="t",u.setAttribute("type","radio"),t.radioValue=u.value==="t",u.setAttribute("checked","checked"),u.setAttribute("name","t"),p.appendChild(u),a=i.createDocumentFragment(),a.appendChild(p.lastChild),t.checkClone=a.cloneNode(!0).cloneNode(!0).lastChild.checked,t.appendChecked=u.checked,a.removeChild(u),a.appendChild(p);if(p.attachEvent)for(l in{submit:!0,change:!0,focusin:!0})f="on"+l,c=f in p,c||(p.setAttribute(f,"return;"),c=typeof p[f]=="function"),t[l+"Bubbles"]=c;return v(function(){var n,r,s,o,u="padding:0;margin:0;border:0;display:block;overflow:hidden;",a=i.getElementsByTagName("body")[0];if(!a)return;n=i.createElement("div"),n.style.cssText="visibility:hidden;border:0;width:0;height:0;position:static;top:0;margin-top:1px",a.insertBefore(n,a.firstChild),r=i.createElement("div"),n.appendChild(r),r.innerHTML="
      t
      ",s=r.getElementsByTagName("td"),s[0].style.cssText="padding:0;margin:0;border:0;display:none",c=s[0].offsetHeight===0,s[0].style.display="",s[1].style.display="none",t.reliableHiddenOffsets=c&&s[0].offsetHeight===0,r.innerHTML="",r.style.cssText="box-sizing:border-box;-moz-box-sizing:border-box;-webkit-box-sizing:border-box;padding:1px;border:1px;display:block;width:4px;margin-top:1%;position:absolute;top:1%;",t.boxSizing=r.offsetWidth===4,t.doesNotIncludeMarginInBodyOffset=a.offsetTop!==1,e.getComputedStyle&&(t.pixelPosition=(e.getComputedStyle(r,null)||{}).top!=="1%",t.boxSizingReliable=(e.getComputedStyle(r,null)||{width:"4px"}).width==="4px",o=i.createElement("div"),o.style.cssText=r.style.cssText=u,o.style.marginRight=o.style.width="0",r.style.width="1px",r.appendChild(o),t.reliableMarginRight=!parseFloat((e.getComputedStyle(o,null)||{}).marginRight)),typeof r.style.zoom!="undefined"&&(r.innerHTML="",r.style.cssText=u+"width:1px;padding:1px;display:inline;zoom:1",t.inlineBlockNeedsLayout=r.offsetWidth===3,r.style.display="block",r.style.overflow="visible",r.innerHTML="
      ",r.firstChild.style.width="5px",t.shrinkWrapBlocks=r.offsetWidth!==3,n.style.zoom=1),a.removeChild(n),n=r=s=o=null}),a.removeChild(p),n=r=s=o=u=a=p=null,t}();var D=/(?:\{[\s\S]*\}|\[[\s\S]*\])$/,P=/([A-Z])/g;v.extend({cache:{},deletedIds:[],uuid:0,expando:"jQuery"+(v.fn.jquery+Math.random()).replace(/\D/g,""),noData:{embed:!0,object:"clsid:D27CDB6E-AE6D-11cf-96B8-444553540000",applet:!0},hasData:function(e){return e=e.nodeType?v.cache[e[v.expando]]:e[v.expando],!!e&&!B(e)},data:function(e,n,r,i){if(!v.acceptData(e))return;var s,o,u=v.expando,a=typeof n=="string",f=e.nodeType,l=f?v.cache:e,c=f?e[u]:e[u]&&u;if((!c||!l[c]||!i&&!l[c].data)&&a&&r===t)return;c||(f?e[u]=c=v.deletedIds.pop()||v.guid++:c=u),l[c]||(l[c]={},f||(l[c].toJSON=v.noop));if(typeof n=="object"||typeof n=="function")i?l[c]=v.extend(l[c],n):l[c].data=v.extend(l[c].data,n);return s=l[c],i||(s.data||(s.data={}),s=s.data),r!==t&&(s[v.camelCase(n)]=r),a?(o=s[n],o==null&&(o=s[v.camelCase(n)])):o=s,o},removeData:function(e,t,n){if(!v.acceptData(e))return;var r,i,s,o=e.nodeType,u=o?v.cache:e,a=o?e[v.expando]:v.expando;if(!u[a])return;if(t){r=n?u[a]:u[a].data;if(r){v.isArray(t)||(t in r?t=[t]:(t=v.camelCase(t),t in r?t=[t]:t=t.split(" ")));for(i=0,s=t.length;i1,null,!1))},removeData:function(e){return this.each(function(){v.removeData(this,e)})}}),v.extend({queue:function(e,t,n){var r;if(e)return t=(t||"fx")+"queue",r=v._data(e,t),n&&(!r||v.isArray(n)?r=v._data(e,t,v.makeArray(n)):r.push(n)),r||[]},dequeue:function(e,t){t=t||"fx";var n=v.queue(e,t),r=n.length,i=n.shift(),s=v._queueHooks(e,t),o=function(){v.dequeue(e,t)};i==="inprogress"&&(i=n.shift(),r--),i&&(t==="fx"&&n.unshift("inprogress"),delete s.stop,i.call(e,o,s)),!r&&s&&s.empty.fire()},_queueHooks:function(e,t){var n=t+"queueHooks";return v._data(e,n)||v._data(e,n,{empty:v.Callbacks("once memory").add(function(){v.removeData(e,t+"queue",!0),v.removeData(e,n,!0)})})}}),v.fn.extend({queue:function(e,n){var r=2;return typeof e!="string"&&(n=e,e="fx",r--),arguments.length1)},removeAttr:function(e){return this.each(function(){v.removeAttr(this,e)})},prop:function(e,t){return v.access(this,v.prop,e,t,arguments.length>1)},removeProp:function(e){return e=v.propFix[e]||e,this.each(function(){try{this[e]=t,delete this[e]}catch(n){}})},addClass:function(e){var t,n,r,i,s,o,u;if(v.isFunction(e))return this.each(function(t){v(this).addClass(e.call(this,t,this.className))});if(e&&typeof e=="string"){t=e.split(y);for(n=0,r=this.length;n=0)r=r.replace(" "+n[s]+" "," ");i.className=e?v.trim(r):""}}}return this},toggleClass:function(e,t){var n=typeof e,r=typeof t=="boolean";return v.isFunction(e)?this.each(function(n){v(this).toggleClass(e.call(this,n,this.className,t),t)}):this.each(function(){if(n==="string"){var i,s=0,o=v(this),u=t,a=e.split(y);while(i=a[s++])u=r?u:!o.hasClass(i),o[u?"addClass":"removeClass"](i)}else if(n==="undefined"||n==="boolean")this.className&&v._data(this,"__className__",this.className),this.className=this.className||e===!1?"":v._data(this,"__className__")||""})},hasClass:function(e){var t=" "+e+" ",n=0,r=this.length;for(;n=0)return!0;return!1},val:function(e){var n,r,i,s=this[0];if(!arguments.length){if(s)return n=v.valHooks[s.type]||v.valHooks[s.nodeName.toLowerCase()],n&&"get"in n&&(r=n.get(s,"value"))!==t?r:(r=s.value,typeof r=="string"?r.replace(R,""):r==null?"":r);return}return i=v.isFunction(e),this.each(function(r){var s,o=v(this);if(this.nodeType!==1)return;i?s=e.call(this,r,o.val()):s=e,s==null?s="":typeof s=="number"?s+="":v.isArray(s)&&(s=v.map(s,function(e){return e==null?"":e+""})),n=v.valHooks[this.type]||v.valHooks[this.nodeName.toLowerCase()];if(!n||!("set"in n)||n.set(this,s,"value")===t)this.value=s})}}),v.extend({valHooks:{option:{get:function(e){var t=e.attributes.value;return!t||t.specified?e.value:e.text}},select:{get:function(e){var t,n,r=e.options,i=e.selectedIndex,s=e.type==="select-one"||i<0,o=s?null:[],u=s?i+1:r.length,a=i<0?u:s?i:0;for(;a=0}),n.length||(e.selectedIndex=-1),n}}},attrFn:{},attr:function(e,n,r,i){var s,o,u,a=e.nodeType;if(!e||a===3||a===8||a===2)return;if(i&&v.isFunction(v.fn[n]))return v(e)[n](r);if(typeof e.getAttribute=="undefined")return v.prop(e,n,r);u=a!==1||!v.isXMLDoc(e),u&&(n=n.toLowerCase(),o=v.attrHooks[n]||(X.test(n)?F:j));if(r!==t){if(r===null){v.removeAttr(e,n);return}return o&&"set"in o&&u&&(s=o.set(e,r,n))!==t?s:(e.setAttribute(n,r+""),r)}return o&&"get"in o&&u&&(s=o.get(e,n))!==null?s:(s=e.getAttribute(n),s===null?t:s)},removeAttr:function(e,t){var n,r,i,s,o=0;if(t&&e.nodeType===1){r=t.split(y);for(;o=0}})});var $=/^(?:textarea|input|select)$/i,J=/^([^\.]*|)(?:\.(.+)|)$/,K=/(?:^|\s)hover(\.\S+|)\b/,Q=/^key/,G=/^(?:mouse|contextmenu)|click/,Y=/^(?:focusinfocus|focusoutblur)$/,Z=function(e){return v.event.special.hover?e:e.replace(K,"mouseenter$1 mouseleave$1")};v.event={add:function(e,n,r,i,s){var o,u,a,f,l,c,h,p,d,m,g;if(e.nodeType===3||e.nodeType===8||!n||!r||!(o=v._data(e)))return;r.handler&&(d=r,r=d.handler,s=d.selector),r.guid||(r.guid=v.guid++),a=o.events,a||(o.events=a={}),u=o.handle,u||(o.handle=u=function(e){return typeof v=="undefined"||!!e&&v.event.triggered===e.type?t:v.event.dispatch.apply(u.elem,arguments)},u.elem=e),n=v.trim(Z(n)).split(" ");for(f=0;f=0&&(y=y.slice(0,-1),a=!0),y.indexOf(".")>=0&&(b=y.split("."),y=b.shift(),b.sort());if((!s||v.event.customEvent[y])&&!v.event.global[y])return;n=typeof n=="object"?n[v.expando]?n:new v.Event(y,n):new v.Event(y),n.type=y,n.isTrigger=!0,n.exclusive=a,n.namespace=b.join("."),n.namespace_re=n.namespace?new RegExp("(^|\\.)"+b.join("\\.(?:.*\\.|)")+"(\\.|$)"):null,h=y.indexOf(":")<0?"on"+y:"";if(!s){u=v.cache;for(f in u)u[f].events&&u[f].events[y]&&v.event.trigger(n,r,u[f].handle.elem,!0);return}n.result=t,n.target||(n.target=s),r=r!=null?v.makeArray(r):[],r.unshift(n),p=v.event.special[y]||{};if(p.trigger&&p.trigger.apply(s,r)===!1)return;m=[[s,p.bindType||y]];if(!o&&!p.noBubble&&!v.isWindow(s)){g=p.delegateType||y,l=Y.test(g+y)?s:s.parentNode;for(c=s;l;l=l.parentNode)m.push([l,g]),c=l;c===(s.ownerDocument||i)&&m.push([c.defaultView||c.parentWindow||e,g])}for(f=0;f=0:v.find(h,this,null,[s]).length),u[h]&&f.push(c);f.length&&w.push({elem:s,matches:f})}d.length>m&&w.push({elem:this,matches:d.slice(m)});for(r=0;r0?this.on(t,null,e,n):this.trigger(t)},Q.test(t)&&(v.event.fixHooks[t]=v.event.keyHooks),G.test(t)&&(v.event.fixHooks[t]=v.event.mouseHooks)}),function(e,t){function nt(e,t,n,r){n=n||[],t=t||g;var i,s,a,f,l=t.nodeType;if(!e||typeof e!="string")return n;if(l!==1&&l!==9)return[];a=o(t);if(!a&&!r)if(i=R.exec(e))if(f=i[1]){if(l===9){s=t.getElementById(f);if(!s||!s.parentNode)return n;if(s.id===f)return n.push(s),n}else if(t.ownerDocument&&(s=t.ownerDocument.getElementById(f))&&u(t,s)&&s.id===f)return n.push(s),n}else{if(i[2])return S.apply(n,x.call(t.getElementsByTagName(e),0)),n;if((f=i[3])&&Z&&t.getElementsByClassName)return S.apply(n,x.call(t.getElementsByClassName(f),0)),n}return vt(e.replace(j,"$1"),t,n,r,a)}function rt(e){return function(t){var n=t.nodeName.toLowerCase();return n==="input"&&t.type===e}}function it(e){return function(t){var n=t.nodeName.toLowerCase();return(n==="input"||n==="button")&&t.type===e}}function st(e){return N(function(t){return t=+t,N(function(n,r){var i,s=e([],n.length,t),o=s.length;while(o--)n[i=s[o]]&&(n[i]=!(r[i]=n[i]))})})}function ot(e,t,n){if(e===t)return n;var r=e.nextSibling;while(r){if(r===t)return-1;r=r.nextSibling}return 1}function ut(e,t){var n,r,s,o,u,a,f,l=L[d][e+" "];if(l)return t?0:l.slice(0);u=e,a=[],f=i.preFilter;while(u){if(!n||(r=F.exec(u)))r&&(u=u.slice(r[0].length)||u),a.push(s=[]);n=!1;if(r=I.exec(u))s.push(n=new m(r.shift())),u=u.slice(n.length),n.type=r[0].replace(j," ");for(o in i.filter)(r=J[o].exec(u))&&(!f[o]||(r=f[o](r)))&&(s.push(n=new m(r.shift())),u=u.slice(n.length),n.type=o,n.matches=r);if(!n)break}return t?u.length:u?nt.error(e):L(e,a).slice(0)}function at(e,t,r){var i=t.dir,s=r&&t.dir==="parentNode",o=w++;return t.first?function(t,n,r){while(t=t[i])if(s||t.nodeType===1)return e(t,n,r)}:function(t,r,u){if(!u){var a,f=b+" "+o+" ",l=f+n;while(t=t[i])if(s||t.nodeType===1){if((a=t[d])===l)return t.sizset;if(typeof a=="string"&&a.indexOf(f)===0){if(t.sizset)return t}else{t[d]=l;if(e(t,r,u))return t.sizset=!0,t;t.sizset=!1}}}else while(t=t[i])if(s||t.nodeType===1)if(e(t,r,u))return t}}function ft(e){return e.length>1?function(t,n,r){var i=e.length;while(i--)if(!e[i](t,n,r))return!1;return!0}:e[0]}function lt(e,t,n,r,i){var s,o=[],u=0,a=e.length,f=t!=null;for(;u-1&&(s[f]=!(o[f]=c))}}else g=lt(g===o?g.splice(d,g.length):g),i?i(null,o,g,a):S.apply(o,g)})}function ht(e){var t,n,r,s=e.length,o=i.relative[e[0].type],u=o||i.relative[" "],a=o?1:0,f=at(function(e){return e===t},u,!0),l=at(function(e){return T.call(t,e)>-1},u,!0),h=[function(e,n,r){return!o&&(r||n!==c)||((t=n).nodeType?f(e,n,r):l(e,n,r))}];for(;a1&&ft(h),a>1&&e.slice(0,a-1).join("").replace(j,"$1"),n,a0,s=e.length>0,o=function(u,a,f,l,h){var p,d,v,m=[],y=0,w="0",x=u&&[],T=h!=null,N=c,C=u||s&&i.find.TAG("*",h&&a.parentNode||a),k=b+=N==null?1:Math.E;T&&(c=a!==g&&a,n=o.el);for(;(p=C[w])!=null;w++){if(s&&p){for(d=0;v=e[d];d++)if(v(p,a,f)){l.push(p);break}T&&(b=k,n=++o.el)}r&&((p=!v&&p)&&y--,u&&x.push(p))}y+=w;if(r&&w!==y){for(d=0;v=t[d];d++)v(x,m,a,f);if(u){if(y>0)while(w--)!x[w]&&!m[w]&&(m[w]=E.call(l));m=lt(m)}S.apply(l,m),T&&!u&&m.length>0&&y+t.length>1&&nt.uniqueSort(l)}return T&&(b=k,c=N),x};return o.el=0,r?N(o):o}function dt(e,t,n){var r=0,i=t.length;for(;r2&&(f=u[0]).type==="ID"&&t.nodeType===9&&!s&&i.relative[u[1].type]){t=i.find.ID(f.matches[0].replace($,""),t,s)[0];if(!t)return n;e=e.slice(u.shift().length)}for(o=J.POS.test(e)?-1:u.length-1;o>=0;o--){f=u[o];if(i.relative[l=f.type])break;if(c=i.find[l])if(r=c(f.matches[0].replace($,""),z.test(u[0].type)&&t.parentNode||t,s)){u.splice(o,1),e=r.length&&u.join("");if(!e)return S.apply(n,x.call(r,0)),n;break}}}return a(e,h)(r,t,s,n,z.test(e)),n}function mt(){}var n,r,i,s,o,u,a,f,l,c,h=!0,p="undefined",d=("sizcache"+Math.random()).replace(".",""),m=String,g=e.document,y=g.documentElement,b=0,w=0,E=[].pop,S=[].push,x=[].slice,T=[].indexOf||function(e){var t=0,n=this.length;for(;ti.cacheLength&&delete e[t.shift()],e[n+" "]=r},e)},k=C(),L=C(),A=C(),O="[\\x20\\t\\r\\n\\f]",M="(?:\\\\.|[-\\w]|[^\\x00-\\xa0])+",_=M.replace("w","w#"),D="([*^$|!~]?=)",P="\\["+O+"*("+M+")"+O+"*(?:"+D+O+"*(?:(['\"])((?:\\\\.|[^\\\\])*?)\\3|("+_+")|)|)"+O+"*\\]",H=":("+M+")(?:\\((?:(['\"])((?:\\\\.|[^\\\\])*?)\\2|([^()[\\]]*|(?:(?:"+P+")|[^:]|\\\\.)*|.*))\\)|)",B=":(even|odd|eq|gt|lt|nth|first|last)(?:\\("+O+"*((?:-\\d)?\\d*)"+O+"*\\)|)(?=[^-]|$)",j=new RegExp("^"+O+"+|((?:^|[^\\\\])(?:\\\\.)*)"+O+"+$","g"),F=new RegExp("^"+O+"*,"+O+"*"),I=new RegExp("^"+O+"*([\\x20\\t\\r\\n\\f>+~])"+O+"*"),q=new RegExp(H),R=/^(?:#([\w\-]+)|(\w+)|\.([\w\-]+))$/,U=/^:not/,z=/[\x20\t\r\n\f]*[+~]/,W=/:not\($/,X=/h\d/i,V=/input|select|textarea|button/i,$=/\\(?!\\)/g,J={ID:new RegExp("^#("+M+")"),CLASS:new RegExp("^\\.("+M+")"),NAME:new RegExp("^\\[name=['\"]?("+M+")['\"]?\\]"),TAG:new RegExp("^("+M.replace("w","w*")+")"),ATTR:new RegExp("^"+P),PSEUDO:new RegExp("^"+H),POS:new RegExp(B,"i"),CHILD:new RegExp("^:(only|nth|first|last)-child(?:\\("+O+"*(even|odd|(([+-]|)(\\d*)n|)"+O+"*(?:([+-]|)"+O+"*(\\d+)|))"+O+"*\\)|)","i"),needsContext:new RegExp("^"+O+"*[>+~]|"+B,"i")},K=function(e){var t=g.createElement("div");try{return e(t)}catch(n){return!1}finally{t=null}},Q=K(function(e){return e.appendChild(g.createComment("")),!e.getElementsByTagName("*").length}),G=K(function(e){return e.innerHTML="",e.firstChild&&typeof e.firstChild.getAttribute!==p&&e.firstChild.getAttribute("href")==="#"}),Y=K(function(e){e.innerHTML="";var t=typeof e.lastChild.getAttribute("multiple");return t!=="boolean"&&t!=="string"}),Z=K(function(e){return e.innerHTML="",!e.getElementsByClassName||!e.getElementsByClassName("e").length?!1:(e.lastChild.className="e",e.getElementsByClassName("e").length===2)}),et=K(function(e){e.id=d+0,e.innerHTML="
      ",y.insertBefore(e,y.firstChild);var t=g.getElementsByName&&g.getElementsByName(d).length===2+g.getElementsByName(d+0).length;return r=!g.getElementById(d),y.removeChild(e),t});try{x.call(y.childNodes,0)[0].nodeType}catch(tt){x=function(e){var t,n=[];for(;t=this[e];e++)n.push(t);return n}}nt.matches=function(e,t){return nt(e,null,null,t)},nt.matchesSelector=function(e,t){return nt(t,null,null,[e]).length>0},s=nt.getText=function(e){var t,n="",r=0,i=e.nodeType;if(i){if(i===1||i===9||i===11){if(typeof e.textContent=="string")return e.textContent;for(e=e.firstChild;e;e=e.nextSibling)n+=s(e)}else if(i===3||i===4)return e.nodeValue}else for(;t=e[r];r++)n+=s(t);return n},o=nt.isXML=function(e){var t=e&&(e.ownerDocument||e).documentElement;return t?t.nodeName!=="HTML":!1},u=nt.contains=y.contains?function(e,t){var n=e.nodeType===9?e.documentElement:e,r=t&&t.parentNode;return e===r||!!(r&&r.nodeType===1&&n.contains&&n.contains(r))}:y.compareDocumentPosition?function(e,t){return t&&!!(e.compareDocumentPosition(t)&16)}:function(e,t){while(t=t.parentNode)if(t===e)return!0;return!1},nt.attr=function(e,t){var n,r=o(e);return r||(t=t.toLowerCase()),(n=i.attrHandle[t])?n(e):r||Y?e.getAttribute(t):(n=e.getAttributeNode(t),n?typeof e[t]=="boolean"?e[t]?t:null:n.specified?n.value:null:null)},i=nt.selectors={cacheLength:50,createPseudo:N,match:J,attrHandle:G?{}:{href:function(e){return e.getAttribute("href",2)},type:function(e){return e.getAttribute("type")}},find:{ID:r?function(e,t,n){if(typeof t.getElementById!==p&&!n){var r=t.getElementById(e);return r&&r.parentNode?[r]:[]}}:function(e,n,r){if(typeof n.getElementById!==p&&!r){var i=n.getElementById(e);return i?i.id===e||typeof i.getAttributeNode!==p&&i.getAttributeNode("id").value===e?[i]:t:[]}},TAG:Q?function(e,t){if(typeof t.getElementsByTagName!==p)return t.getElementsByTagName(e)}:function(e,t){var n=t.getElementsByTagName(e);if(e==="*"){var r,i=[],s=0;for(;r=n[s];s++)r.nodeType===1&&i.push(r);return i}return n},NAME:et&&function(e,t){if(typeof t.getElementsByName!==p)return t.getElementsByName(name)},CLASS:Z&&function(e,t,n){if(typeof t.getElementsByClassName!==p&&!n)return t.getElementsByClassName(e)}},relative:{">":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(e){return e[1]=e[1].replace($,""),e[3]=(e[4]||e[5]||"").replace($,""),e[2]==="~="&&(e[3]=" "+e[3]+" "),e.slice(0,4)},CHILD:function(e){return e[1]=e[1].toLowerCase(),e[1]==="nth"?(e[2]||nt.error(e[0]),e[3]=+(e[3]?e[4]+(e[5]||1):2*(e[2]==="even"||e[2]==="odd")),e[4]=+(e[6]+e[7]||e[2]==="odd")):e[2]&&nt.error(e[0]),e},PSEUDO:function(e){var t,n;if(J.CHILD.test(e[0]))return null;if(e[3])e[2]=e[3];else if(t=e[4])q.test(t)&&(n=ut(t,!0))&&(n=t.indexOf(")",t.length-n)-t.length)&&(t=t.slice(0,n),e[0]=e[0].slice(0,n)),e[2]=t;return e.slice(0,3)}},filter:{ID:r?function(e){return e=e.replace($,""),function(t){return t.getAttribute("id")===e}}:function(e){return e=e.replace($,""),function(t){var n=typeof t.getAttributeNode!==p&&t.getAttributeNode("id");return n&&n.value===e}},TAG:function(e){return e==="*"?function(){return!0}:(e=e.replace($,"").toLowerCase(),function(t){return t.nodeName&&t.nodeName.toLowerCase()===e})},CLASS:function(e){var t=k[d][e+" "];return t||(t=new RegExp("(^|"+O+")"+e+"("+O+"|$)"))&&k(e,function(e){return t.test(e.className||typeof e.getAttribute!==p&&e.getAttribute("class")||"")})},ATTR:function(e,t,n){return function(r,i){var s=nt.attr(r,e);return s==null?t==="!=":t?(s+="",t==="="?s===n:t==="!="?s!==n:t==="^="?n&&s.indexOf(n)===0:t==="*="?n&&s.indexOf(n)>-1:t==="$="?n&&s.substr(s.length-n.length)===n:t==="~="?(" "+s+" ").indexOf(n)>-1:t==="|="?s===n||s.substr(0,n.length+1)===n+"-":!1):!0}},CHILD:function(e,t,n,r){return e==="nth"?function(e){var t,i,s=e.parentNode;if(n===1&&r===0)return!0;if(s){i=0;for(t=s.firstChild;t;t=t.nextSibling)if(t.nodeType===1){i++;if(e===t)break}}return i-=r,i===n||i%n===0&&i/n>=0}:function(t){var n=t;switch(e){case"only":case"first":while(n=n.previousSibling)if(n.nodeType===1)return!1;if(e==="first")return!0;n=t;case"last":while(n=n.nextSibling)if(n.nodeType===1)return!1;return!0}}},PSEUDO:function(e,t){var n,r=i.pseudos[e]||i.setFilters[e.toLowerCase()]||nt.error("unsupported pseudo: "+e);return r[d]?r(t):r.length>1?(n=[e,e,"",t],i.setFilters.hasOwnProperty(e.toLowerCase())?N(function(e,n){var i,s=r(e,t),o=s.length;while(o--)i=T.call(e,s[o]),e[i]=!(n[i]=s[o])}):function(e){return r(e,0,n)}):r}},pseudos:{not:N(function(e){var t=[],n=[],r=a(e.replace(j,"$1"));return r[d]?N(function(e,t,n,i){var s,o=r(e,null,i,[]),u=e.length;while(u--)if(s=o[u])e[u]=!(t[u]=s)}):function(e,i,s){return t[0]=e,r(t,null,s,n),!n.pop()}}),has:N(function(e){return function(t){return nt(e,t).length>0}}),contains:N(function(e){return function(t){return(t.textContent||t.innerText||s(t)).indexOf(e)>-1}}),enabled:function(e){return e.disabled===!1},disabled:function(e){return e.disabled===!0},checked:function(e){var t=e.nodeName.toLowerCase();return t==="input"&&!!e.checked||t==="option"&&!!e.selected},selected:function(e){return e.parentNode&&e.parentNode.selectedIndex,e.selected===!0},parent:function(e){return!i.pseudos.empty(e)},empty:function(e){var t;e=e.firstChild;while(e){if(e.nodeName>"@"||(t=e.nodeType)===3||t===4)return!1;e=e.nextSibling}return!0},header:function(e){return X.test(e.nodeName)},text:function(e){var t,n;return e.nodeName.toLowerCase()==="input"&&(t=e.type)==="text"&&((n=e.getAttribute("type"))==null||n.toLowerCase()===t)},radio:rt("radio"),checkbox:rt("checkbox"),file:rt("file"),password:rt("password"),image:rt("image"),submit:it("submit"),reset:it("reset"),button:function(e){var t=e.nodeName.toLowerCase();return t==="input"&&e.type==="button"||t==="button"},input:function(e){return V.test(e.nodeName)},focus:function(e){var t=e.ownerDocument;return e===t.activeElement&&(!t.hasFocus||t.hasFocus())&&!!(e.type||e.href||~e.tabIndex)},active:function(e){return e===e.ownerDocument.activeElement},first:st(function(){return[0]}),last:st(function(e,t){return[t-1]}),eq:st(function(e,t,n){return[n<0?n+t:n]}),even:st(function(e,t){for(var n=0;n=0;)e.push(r);return e}),gt:st(function(e,t,n){for(var r=n<0?n+t:n;++r",e.querySelectorAll("[selected]").length||i.push("\\["+O+"*(?:checked|disabled|ismap|multiple|readonly|selected|value)"),e.querySelectorAll(":checked").length||i.push(":checked")}),K(function(e){e.innerHTML="

      ",e.querySelectorAll("[test^='']").length&&i.push("[*^$]="+O+"*(?:\"\"|'')"),e.innerHTML="",e.querySelectorAll(":enabled").length||i.push(":enabled",":disabled")}),i=new RegExp(i.join("|")),vt=function(e,r,s,o,u){if(!o&&!u&&!i.test(e)){var a,f,l=!0,c=d,h=r,p=r.nodeType===9&&e;if(r.nodeType===1&&r.nodeName.toLowerCase()!=="object"){a=ut(e),(l=r.getAttribute("id"))?c=l.replace(n,"\\$&"):r.setAttribute("id",c),c="[id='"+c+"'] ",f=a.length;while(f--)a[f]=c+a[f].join("");h=z.test(e)&&r.parentNode||r,p=a.join(",")}if(p)try{return S.apply(s,x.call(h.querySelectorAll(p),0)),s}catch(v){}finally{l||r.removeAttribute("id")}}return t(e,r,s,o,u)},u&&(K(function(t){e=u.call(t,"div");try{u.call(t,"[test!='']:sizzle"),s.push("!=",H)}catch(n){}}),s=new RegExp(s.join("|")),nt.matchesSelector=function(t,n){n=n.replace(r,"='$1']");if(!o(t)&&!s.test(n)&&!i.test(n))try{var a=u.call(t,n);if(a||e||t.document&&t.document.nodeType!==11)return a}catch(f){}return nt(n,null,null,[t]).length>0})}(),i.pseudos.nth=i.pseudos.eq,i.filters=mt.prototype=i.pseudos,i.setFilters=new mt,nt.attr=v.attr,v.find=nt,v.expr=nt.selectors,v.expr[":"]=v.expr.pseudos,v.unique=nt.uniqueSort,v.text=nt.getText,v.isXMLDoc=nt.isXML,v.contains=nt.contains}(e);var nt=/Until$/,rt=/^(?:parents|prev(?:Until|All))/,it=/^.[^:#\[\.,]*$/,st=v.expr.match.needsContext,ot={children:!0,contents:!0,next:!0,prev:!0};v.fn.extend({find:function(e){var t,n,r,i,s,o,u=this;if(typeof e!="string")return v(e).filter(function(){for(t=0,n=u.length;t0)for(i=r;i=0:v.filter(e,this).length>0:this.filter(e).length>0)},closest:function(e,t){var n,r=0,i=this.length,s=[],o=st.test(e)||typeof e!="string"?v(e,t||this.context):0;for(;r-1:v.find.matchesSelector(n,e)){s.push(n);break}n=n.parentNode}}return s=s.length>1?v.unique(s):s,this.pushStack(s,"closest",e)},index:function(e){return e?typeof e=="string"?v.inArray(this[0],v(e)):v.inArray(e.jquery?e[0]:e,this):this[0]&&this[0].parentNode?this.prevAll().length:-1},add:function(e,t){var n=typeof e=="string"?v(e,t):v.makeArray(e&&e.nodeType?[e]:e),r=v.merge(this.get(),n);return this.pushStack(ut(n[0])||ut(r[0])?r:v.unique(r))},addBack:function(e){return this.add(e==null?this.prevObject:this.prevObject.filter(e))}}),v.fn.andSelf=v.fn.addBack,v.each({parent:function(e){var t=e.parentNode;return t&&t.nodeType!==11?t:null},parents:function(e){return v.dir(e,"parentNode")},parentsUntil:function(e,t,n){return v.dir(e,"parentNode",n)},next:function(e){return at(e,"nextSibling")},prev:function(e){return at(e,"previousSibling")},nextAll:function(e){return v.dir(e,"nextSibling")},prevAll:function(e){return v.dir(e,"previousSibling")},nextUntil:function(e,t,n){return v.dir(e,"nextSibling",n)},prevUntil:function(e,t,n){return v.dir(e,"previousSibling",n)},siblings:function(e){return v.sibling((e.parentNode||{}).firstChild,e)},children:function(e){return v.sibling(e.firstChild)},contents:function(e){return v.nodeName(e,"iframe")?e.contentDocument||e.contentWindow.document:v.merge([],e.childNodes)}},function(e,t){v.fn[e]=function(n,r){var i=v.map(this,t,n);return nt.test(e)||(r=n),r&&typeof r=="string"&&(i=v.filter(r,i)),i=this.length>1&&!ot[e]?v.unique(i):i,this.length>1&&rt.test(e)&&(i=i.reverse()),this.pushStack(i,e,l.call(arguments).join(","))}}),v.extend({filter:function(e,t,n){return n&&(e=":not("+e+")"),t.length===1?v.find.matchesSelector(t[0],e)?[t[0]]:[]:v.find.matches(e,t)},dir:function(e,n,r){var i=[],s=e[n];while(s&&s.nodeType!==9&&(r===t||s.nodeType!==1||!v(s).is(r)))s.nodeType===1&&i.push(s),s=s[n];return i},sibling:function(e,t){var n=[];for(;e;e=e.nextSibling)e.nodeType===1&&e!==t&&n.push(e);return n}});var ct="abbr|article|aside|audio|bdi|canvas|data|datalist|details|figcaption|figure|footer|header|hgroup|mark|meter|nav|output|progress|section|summary|time|video",ht=/ jQuery\d+="(?:null|\d+)"/g,pt=/^\s+/,dt=/<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\w:]+)[^>]*)\/>/gi,vt=/<([\w:]+)/,mt=/]","i"),Et=/^(?:checkbox|radio)$/,St=/checked\s*(?:[^=]|=\s*.checked.)/i,xt=/\/(java|ecma)script/i,Tt=/^\s*\s*$/g,Nt={option:[1,""],legend:[1,"
      ","
      "],thead:[1,"","
      "],tr:[2,"","
      "],td:[3,"","
      "],col:[2,"","
      "],area:[1,"",""],_default:[0,"",""]},Ct=lt(i),kt=Ct.appendChild(i.createElement("div"));Nt.optgroup=Nt.option,Nt.tbody=Nt.tfoot=Nt.colgroup=Nt.caption=Nt.thead,Nt.th=Nt.td,v.support.htmlSerialize||(Nt._default=[1,"X
      ","
      "]),v.fn.extend({text:function(e){return v.access(this,function(e){return e===t?v.text(this):this.empty().append((this[0]&&this[0].ownerDocument||i).createTextNode(e))},null,e,arguments.length)},wrapAll:function(e){if(v.isFunction(e))return this.each(function(t){v(this).wrapAll(e.call(this,t))});if(this[0]){var t=v(e,this[0].ownerDocument).eq(0).clone(!0);this[0].parentNode&&t.insertBefore(this[0]),t.map(function(){var e=this;while(e.firstChild&&e.firstChild.nodeType===1)e=e.firstChild;return e}).append(this)}return this},wrapInner:function(e){return v.isFunction(e)?this.each(function(t){v(this).wrapInner(e.call(this,t))}):this.each(function(){var t=v(this),n=t.contents();n.length?n.wrapAll(e):t.append(e)})},wrap:function(e){var t=v.isFunction(e);return this.each(function(n){v(this).wrapAll(t?e.call(this,n):e)})},unwrap:function(){return this.parent().each(function(){v.nodeName(this,"body")||v(this).replaceWith(this.childNodes)}).end()},append:function(){return this.domManip(arguments,!0,function(e){(this.nodeType===1||this.nodeType===11)&&this.appendChild(e)})},prepend:function(){return this.domManip(arguments,!0,function(e){(this.nodeType===1||this.nodeType===11)&&this.insertBefore(e,this.firstChild)})},before:function(){if(!ut(this[0]))return this.domManip(arguments,!1,function(e){this.parentNode.insertBefore(e,this)});if(arguments.length){var e=v.clean(arguments);return this.pushStack(v.merge(e,this),"before",this.selector)}},after:function(){if(!ut(this[0]))return this.domManip(arguments,!1,function(e){this.parentNode.insertBefore(e,this.nextSibling)});if(arguments.length){var e=v.clean(arguments);return this.pushStack(v.merge(this,e),"after",this.selector)}},remove:function(e,t){var n,r=0;for(;(n=this[r])!=null;r++)if(!e||v.filter(e,[n]).length)!t&&n.nodeType===1&&(v.cleanData(n.getElementsByTagName("*")),v.cleanData([n])),n.parentNode&&n.parentNode.removeChild(n);return this},empty:function(){var e,t=0;for(;(e=this[t])!=null;t++){e.nodeType===1&&v.cleanData(e.getElementsByTagName("*"));while(e.firstChild)e.removeChild(e.firstChild)}return this},clone:function(e,t){return e=e==null?!1:e,t=t==null?e:t,this.map(function(){return v.clone(this,e,t)})},html:function(e){return v.access(this,function(e){var n=this[0]||{},r=0,i=this.length;if(e===t)return n.nodeType===1?n.innerHTML.replace(ht,""):t;if(typeof e=="string"&&!yt.test(e)&&(v.support.htmlSerialize||!wt.test(e))&&(v.support.leadingWhitespace||!pt.test(e))&&!Nt[(vt.exec(e)||["",""])[1].toLowerCase()]){e=e.replace(dt,"<$1>");try{for(;r1&&typeof f=="string"&&St.test(f))return this.each(function(){v(this).domManip(e,n,r)});if(v.isFunction(f))return this.each(function(i){var s=v(this);e[0]=f.call(this,i,n?s.html():t),s.domManip(e,n,r)});if(this[0]){i=v.buildFragment(e,this,l),o=i.fragment,s=o.firstChild,o.childNodes.length===1&&(o=s);if(s){n=n&&v.nodeName(s,"tr");for(u=i.cacheable||c-1;a0?this.clone(!0):this).get(),v(o[i])[t](r),s=s.concat(r);return this.pushStack(s,e,o.selector)}}),v.extend({clone:function(e,t,n){var r,i,s,o;v.support.html5Clone||v.isXMLDoc(e)||!wt.test("<"+e.nodeName+">")?o=e.cloneNode(!0):(kt.innerHTML=e.outerHTML,kt.removeChild(o=kt.firstChild));if((!v.support.noCloneEvent||!v.support.noCloneChecked)&&(e.nodeType===1||e.nodeType===11)&&!v.isXMLDoc(e)){Ot(e,o),r=Mt(e),i=Mt(o);for(s=0;r[s];++s)i[s]&&Ot(r[s],i[s])}if(t){At(e,o);if(n){r=Mt(e),i=Mt(o);for(s=0;r[s];++s)At(r[s],i[s])}}return r=i=null,o},clean:function(e,t,n,r){var s,o,u,a,f,l,c,h,p,d,m,g,y=t===i&&Ct,b=[];if(!t||typeof t.createDocumentFragment=="undefined")t=i;for(s=0;(u=e[s])!=null;s++){typeof u=="number"&&(u+="");if(!u)continue;if(typeof u=="string")if(!gt.test(u))u=t.createTextNode(u);else{y=y||lt(t),c=t.createElement("div"),y.appendChild(c),u=u.replace(dt,"<$1>"),a=(vt.exec(u)||["",""])[1].toLowerCase(),f=Nt[a]||Nt._default,l=f[0],c.innerHTML=f[1]+u+f[2];while(l--)c=c.lastChild;if(!v.support.tbody){h=mt.test(u),p=a==="table"&&!h?c.firstChild&&c.firstChild.childNodes:f[1]===""&&!h?c.childNodes:[];for(o=p.length-1;o>=0;--o)v.nodeName(p[o],"tbody")&&!p[o].childNodes.length&&p[o].parentNode.removeChild(p[o])}!v.support.leadingWhitespace&&pt.test(u)&&c.insertBefore(t.createTextNode(pt.exec(u)[0]),c.firstChild),u=c.childNodes,c.parentNode.removeChild(c)}u.nodeType?b.push(u):v.merge(b,u)}c&&(u=c=y=null);if(!v.support.appendChecked)for(s=0;(u=b[s])!=null;s++)v.nodeName(u,"input")?_t(u):typeof u.getElementsByTagName!="undefined"&&v.grep(u.getElementsByTagName("input"),_t);if(n){m=function(e){if(!e.type||xt.test(e.type))return r?r.push(e.parentNode?e.parentNode.removeChild(e):e):n.appendChild(e)};for(s=0;(u=b[s])!=null;s++)if(!v.nodeName(u,"script")||!m(u))n.appendChild(u),typeof u.getElementsByTagName!="undefined"&&(g=v.grep(v.merge([],u.getElementsByTagName("script")),m),b.splice.apply(b,[s+1,0].concat(g)),s+=g.length)}return b},cleanData:function(e,t){var n,r,i,s,o=0,u=v.expando,a=v.cache,f=v.support.deleteExpando,l=v.event.special;for(;(i=e[o])!=null;o++)if(t||v.acceptData(i)){r=i[u],n=r&&a[r];if(n){if(n.events)for(s in n.events)l[s]?v.event.remove(i,s):v.removeEvent(i,s,n.handle);a[r]&&(delete a[r],f?delete i[u]:i.removeAttribute?i.removeAttribute(u):i[u]=null,v.deletedIds.push(r))}}}}),function(){var e,t;v.uaMatch=function(e){e=e.toLowerCase();var t=/(chrome)[ \/]([\w.]+)/.exec(e)||/(webkit)[ \/]([\w.]+)/.exec(e)||/(opera)(?:.*version|)[ \/]([\w.]+)/.exec(e)||/(msie) ([\w.]+)/.exec(e)||e.indexOf("compatible")<0&&/(mozilla)(?:.*? rv:([\w.]+)|)/.exec(e)||[];return{browser:t[1]||"",version:t[2]||"0"}},e=v.uaMatch(o.userAgent),t={},e.browser&&(t[e.browser]=!0,t.version=e.version),t.chrome?t.webkit=!0:t.webkit&&(t.safari=!0),v.browser=t,v.sub=function(){function e(t,n){return new e.fn.init(t,n)}v.extend(!0,e,this),e.superclass=this,e.fn=e.prototype=this(),e.fn.constructor=e,e.sub=this.sub,e.fn.init=function(r,i){return i&&i instanceof v&&!(i instanceof e)&&(i=e(i)),v.fn.init.call(this,r,i,t)},e.fn.init.prototype=e.fn;var t=e(i);return e}}();var Dt,Pt,Ht,Bt=/alpha\([^)]*\)/i,jt=/opacity=([^)]*)/,Ft=/^(top|right|bottom|left)$/,It=/^(none|table(?!-c[ea]).+)/,qt=/^margin/,Rt=new RegExp("^("+m+")(.*)$","i"),Ut=new RegExp("^("+m+")(?!px)[a-z%]+$","i"),zt=new RegExp("^([-+])=("+m+")","i"),Wt={BODY:"block"},Xt={position:"absolute",visibility:"hidden",display:"block"},Vt={letterSpacing:0,fontWeight:400},$t=["Top","Right","Bottom","Left"],Jt=["Webkit","O","Moz","ms"],Kt=v.fn.toggle;v.fn.extend({css:function(e,n){return v.access(this,function(e,n,r){return r!==t?v.style(e,n,r):v.css(e,n)},e,n,arguments.length>1)},show:function(){return Yt(this,!0)},hide:function(){return Yt(this)},toggle:function(e,t){var n=typeof e=="boolean";return v.isFunction(e)&&v.isFunction(t)?Kt.apply(this,arguments):this.each(function(){(n?e:Gt(this))?v(this).show():v(this).hide()})}}),v.extend({cssHooks:{opacity:{get:function(e,t){if(t){var n=Dt(e,"opacity");return n===""?"1":n}}}},cssNumber:{fillOpacity:!0,fontWeight:!0,lineHeight:!0,opacity:!0,orphans:!0,widows:!0,zIndex:!0,zoom:!0},cssProps:{"float":v.support.cssFloat?"cssFloat":"styleFloat"},style:function(e,n,r,i){if(!e||e.nodeType===3||e.nodeType===8||!e.style)return;var s,o,u,a=v.camelCase(n),f=e.style;n=v.cssProps[a]||(v.cssProps[a]=Qt(f,a)),u=v.cssHooks[n]||v.cssHooks[a];if(r===t)return u&&"get"in u&&(s=u.get(e,!1,i))!==t?s:f[n];o=typeof r,o==="string"&&(s=zt.exec(r))&&(r=(s[1]+1)*s[2]+parseFloat(v.css(e,n)),o="number");if(r==null||o==="number"&&isNaN(r))return;o==="number"&&!v.cssNumber[a]&&(r+="px");if(!u||!("set"in u)||(r=u.set(e,r,i))!==t)try{f[n]=r}catch(l){}},css:function(e,n,r,i){var s,o,u,a=v.camelCase(n);return n=v.cssProps[a]||(v.cssProps[a]=Qt(e.style,a)),u=v.cssHooks[n]||v.cssHooks[a],u&&"get"in u&&(s=u.get(e,!0,i)),s===t&&(s=Dt(e,n)),s==="normal"&&n in Vt&&(s=Vt[n]),r||i!==t?(o=parseFloat(s),r||v.isNumeric(o)?o||0:s):s},swap:function(e,t,n){var r,i,s={};for(i in t)s[i]=e.style[i],e.style[i]=t[i];r=n.call(e);for(i in t)e.style[i]=s[i];return r}}),e.getComputedStyle?Dt=function(t,n){var r,i,s,o,u=e.getComputedStyle(t,null),a=t.style;return u&&(r=u.getPropertyValue(n)||u[n],r===""&&!v.contains(t.ownerDocument,t)&&(r=v.style(t,n)),Ut.test(r)&&qt.test(n)&&(i=a.width,s=a.minWidth,o=a.maxWidth,a.minWidth=a.maxWidth=a.width=r,r=u.width,a.width=i,a.minWidth=s,a.maxWidth=o)),r}:i.documentElement.currentStyle&&(Dt=function(e,t){var n,r,i=e.currentStyle&&e.currentStyle[t],s=e.style;return i==null&&s&&s[t]&&(i=s[t]),Ut.test(i)&&!Ft.test(t)&&(n=s.left,r=e.runtimeStyle&&e.runtimeStyle.left,r&&(e.runtimeStyle.left=e.currentStyle.left),s.left=t==="fontSize"?"1em":i,i=s.pixelLeft+"px",s.left=n,r&&(e.runtimeStyle.left=r)),i===""?"auto":i}),v.each(["height","width"],function(e,t){v.cssHooks[t]={get:function(e,n,r){if(n)return e.offsetWidth===0&&It.test(Dt(e,"display"))?v.swap(e,Xt,function(){return tn(e,t,r)}):tn(e,t,r)},set:function(e,n,r){return Zt(e,n,r?en(e,t,r,v.support.boxSizing&&v.css(e,"boxSizing")==="border-box"):0)}}}),v.support.opacity||(v.cssHooks.opacity={get:function(e,t){return jt.test((t&&e.currentStyle?e.currentStyle.filter:e.style.filter)||"")?.01*parseFloat(RegExp.$1)+"":t?"1":""},set:function(e,t){var n=e.style,r=e.currentStyle,i=v.isNumeric(t)?"alpha(opacity="+t*100+")":"",s=r&&r.filter||n.filter||"";n.zoom=1;if(t>=1&&v.trim(s.replace(Bt,""))===""&&n.removeAttribute){n.removeAttribute("filter");if(r&&!r.filter)return}n.filter=Bt.test(s)?s.replace(Bt,i):s+" "+i}}),v(function(){v.support.reliableMarginRight||(v.cssHooks.marginRight={get:function(e,t){return v.swap(e,{display:"inline-block"},function(){if(t)return Dt(e,"marginRight")})}}),!v.support.pixelPosition&&v.fn.position&&v.each(["top","left"],function(e,t){v.cssHooks[t]={get:function(e,n){if(n){var r=Dt(e,t);return Ut.test(r)?v(e).position()[t]+"px":r}}}})}),v.expr&&v.expr.filters&&(v.expr.filters.hidden=function(e){return e.offsetWidth===0&&e.offsetHeight===0||!v.support.reliableHiddenOffsets&&(e.style&&e.style.display||Dt(e,"display"))==="none"},v.expr.filters.visible=function(e){return!v.expr.filters.hidden(e)}),v.each({margin:"",padding:"",border:"Width"},function(e,t){v.cssHooks[e+t]={expand:function(n){var r,i=typeof n=="string"?n.split(" "):[n],s={};for(r=0;r<4;r++)s[e+$t[r]+t]=i[r]||i[r-2]||i[0];return s}},qt.test(e)||(v.cssHooks[e+t].set=Zt)});var rn=/%20/g,sn=/\[\]$/,on=/\r?\n/g,un=/^(?:color|date|datetime|datetime-local|email|hidden|month|number|password|range|search|tel|text|time|url|week)$/i,an=/^(?:select|textarea)/i;v.fn.extend({serialize:function(){return v.param(this.serializeArray())},serializeArray:function(){return this.map(function(){return this.elements?v.makeArray(this.elements):this}).filter(function(){return this.name&&!this.disabled&&(this.checked||an.test(this.nodeName)||un.test(this.type))}).map(function(e,t){var n=v(this).val();return n==null?null:v.isArray(n)?v.map(n,function(e,n){return{name:t.name,value:e.replace(on,"\r\n")}}):{name:t.name,value:n.replace(on,"\r\n")}}).get()}}),v.param=function(e,n){var r,i=[],s=function(e,t){t=v.isFunction(t)?t():t==null?"":t,i[i.length]=encodeURIComponent(e)+"="+encodeURIComponent(t)};n===t&&(n=v.ajaxSettings&&v.ajaxSettings.traditional);if(v.isArray(e)||e.jquery&&!v.isPlainObject(e))v.each(e,function(){s(this.name,this.value)});else for(r in e)fn(r,e[r],n,s);return i.join("&").replace(rn,"+")};var ln,cn,hn=/#.*$/,pn=/^(.*?):[ \t]*([^\r\n]*)\r?$/mg,dn=/^(?:about|app|app\-storage|.+\-extension|file|res|widget):$/,vn=/^(?:GET|HEAD)$/,mn=/^\/\//,gn=/\?/,yn=/)<[^<]*)*<\/script>/gi,bn=/([?&])_=[^&]*/,wn=/^([\w\+\.\-]+:)(?:\/\/([^\/?#:]*)(?::(\d+)|)|)/,En=v.fn.load,Sn={},xn={},Tn=["*/"]+["*"];try{cn=s.href}catch(Nn){cn=i.createElement("a"),cn.href="",cn=cn.href}ln=wn.exec(cn.toLowerCase())||[],v.fn.load=function(e,n,r){if(typeof e!="string"&&En)return En.apply(this,arguments);if(!this.length)return this;var i,s,o,u=this,a=e.indexOf(" ");return a>=0&&(i=e.slice(a,e.length),e=e.slice(0,a)),v.isFunction(n)?(r=n,n=t):n&&typeof n=="object"&&(s="POST"),v.ajax({url:e,type:s,dataType:"html",data:n,complete:function(e,t){r&&u.each(r,o||[e.responseText,t,e])}}).done(function(e){o=arguments,u.html(i?v("
      ").append(e.replace(yn,"")).find(i):e)}),this},v.each("ajaxStart ajaxStop ajaxComplete ajaxError ajaxSuccess ajaxSend".split(" "),function(e,t){v.fn[t]=function(e){return this.on(t,e)}}),v.each(["get","post"],function(e,n){v[n]=function(e,r,i,s){return v.isFunction(r)&&(s=s||i,i=r,r=t),v.ajax({type:n,url:e,data:r,success:i,dataType:s})}}),v.extend({getScript:function(e,n){return v.get(e,t,n,"script")},getJSON:function(e,t,n){return v.get(e,t,n,"json")},ajaxSetup:function(e,t){return t?Ln(e,v.ajaxSettings):(t=e,e=v.ajaxSettings),Ln(e,t),e},ajaxSettings:{url:cn,isLocal:dn.test(ln[1]),global:!0,type:"GET",contentType:"application/x-www-form-urlencoded; charset=UTF-8",processData:!0,async:!0,accepts:{xml:"application/xml, text/xml",html:"text/html",text:"text/plain",json:"application/json, text/javascript","*":Tn},contents:{xml:/xml/,html:/html/,json:/json/},responseFields:{xml:"responseXML",text:"responseText"},converters:{"* text":e.String,"text html":!0,"text json":v.parseJSON,"text xml":v.parseXML},flatOptions:{context:!0,url:!0}},ajaxPrefilter:Cn(Sn),ajaxTransport:Cn(xn),ajax:function(e,n){function T(e,n,s,a){var l,y,b,w,S,T=n;if(E===2)return;E=2,u&&clearTimeout(u),o=t,i=a||"",x.readyState=e>0?4:0,s&&(w=An(c,x,s));if(e>=200&&e<300||e===304)c.ifModified&&(S=x.getResponseHeader("Last-Modified"),S&&(v.lastModified[r]=S),S=x.getResponseHeader("Etag"),S&&(v.etag[r]=S)),e===304?(T="notmodified",l=!0):(l=On(c,w),T=l.state,y=l.data,b=l.error,l=!b);else{b=T;if(!T||e)T="error",e<0&&(e=0)}x.status=e,x.statusText=(n||T)+"",l?d.resolveWith(h,[y,T,x]):d.rejectWith(h,[x,T,b]),x.statusCode(g),g=t,f&&p.trigger("ajax"+(l?"Success":"Error"),[x,c,l?y:b]),m.fireWith(h,[x,T]),f&&(p.trigger("ajaxComplete",[x,c]),--v.active||v.event.trigger("ajaxStop"))}typeof e=="object"&&(n=e,e=t),n=n||{};var r,i,s,o,u,a,f,l,c=v.ajaxSetup({},n),h=c.context||c,p=h!==c&&(h.nodeType||h instanceof v)?v(h):v.event,d=v.Deferred(),m=v.Callbacks("once memory"),g=c.statusCode||{},b={},w={},E=0,S="canceled",x={readyState:0,setRequestHeader:function(e,t){if(!E){var n=e.toLowerCase();e=w[n]=w[n]||e,b[e]=t}return this},getAllResponseHeaders:function(){return E===2?i:null},getResponseHeader:function(e){var n;if(E===2){if(!s){s={};while(n=pn.exec(i))s[n[1].toLowerCase()]=n[2]}n=s[e.toLowerCase()]}return n===t?null:n},overrideMimeType:function(e){return E||(c.mimeType=e),this},abort:function(e){return e=e||S,o&&o.abort(e),T(0,e),this}};d.promise(x),x.success=x.done,x.error=x.fail,x.complete=m.add,x.statusCode=function(e){if(e){var t;if(E<2)for(t in e)g[t]=[g[t],e[t]];else t=e[x.status],x.always(t)}return this},c.url=((e||c.url)+"").replace(hn,"").replace(mn,ln[1]+"//"),c.dataTypes=v.trim(c.dataType||"*").toLowerCase().split(y),c.crossDomain==null&&(a=wn.exec(c.url.toLowerCase()),c.crossDomain=!(!a||a[1]===ln[1]&&a[2]===ln[2]&&(a[3]||(a[1]==="http:"?80:443))==(ln[3]||(ln[1]==="http:"?80:443)))),c.data&&c.processData&&typeof c.data!="string"&&(c.data=v.param(c.data,c.traditional)),kn(Sn,c,n,x);if(E===2)return x;f=c.global,c.type=c.type.toUpperCase(),c.hasContent=!vn.test(c.type),f&&v.active++===0&&v.event.trigger("ajaxStart");if(!c.hasContent){c.data&&(c.url+=(gn.test(c.url)?"&":"?")+c.data,delete c.data),r=c.url;if(c.cache===!1){var N=v.now(),C=c.url.replace(bn,"$1_="+N);c.url=C+(C===c.url?(gn.test(c.url)?"&":"?")+"_="+N:"")}}(c.data&&c.hasContent&&c.contentType!==!1||n.contentType)&&x.setRequestHeader("Content-Type",c.contentType),c.ifModified&&(r=r||c.url,v.lastModified[r]&&x.setRequestHeader("If-Modified-Since",v.lastModified[r]),v.etag[r]&&x.setRequestHeader("If-None-Match",v.etag[r])),x.setRequestHeader("Accept",c.dataTypes[0]&&c.accepts[c.dataTypes[0]]?c.accepts[c.dataTypes[0]]+(c.dataTypes[0]!=="*"?", "+Tn+"; q=0.01":""):c.accepts["*"]);for(l in c.headers)x.setRequestHeader(l,c.headers[l]);if(!c.beforeSend||c.beforeSend.call(h,x,c)!==!1&&E!==2){S="abort";for(l in{success:1,error:1,complete:1})x[l](c[l]);o=kn(xn,c,n,x);if(!o)T(-1,"No Transport");else{x.readyState=1,f&&p.trigger("ajaxSend",[x,c]),c.async&&c.timeout>0&&(u=setTimeout(function(){x.abort("timeout")},c.timeout));try{E=1,o.send(b,T)}catch(k){if(!(E<2))throw k;T(-1,k)}}return x}return x.abort()},active:0,lastModified:{},etag:{}});var Mn=[],_n=/\?/,Dn=/(=)\?(?=&|$)|\?\?/,Pn=v.now();v.ajaxSetup({jsonp:"callback",jsonpCallback:function(){var e=Mn.pop()||v.expando+"_"+Pn++;return this[e]=!0,e}}),v.ajaxPrefilter("json jsonp",function(n,r,i){var s,o,u,a=n.data,f=n.url,l=n.jsonp!==!1,c=l&&Dn.test(f),h=l&&!c&&typeof a=="string"&&!(n.contentType||"").indexOf("application/x-www-form-urlencoded")&&Dn.test(a);if(n.dataTypes[0]==="jsonp"||c||h)return s=n.jsonpCallback=v.isFunction(n.jsonpCallback)?n.jsonpCallback():n.jsonpCallback,o=e[s],c?n.url=f.replace(Dn,"$1"+s):h?n.data=a.replace(Dn,"$1"+s):l&&(n.url+=(_n.test(f)?"&":"?")+n.jsonp+"="+s),n.converters["script json"]=function(){return u||v.error(s+" was not called"),u[0]},n.dataTypes[0]="json",e[s]=function(){u=arguments},i.always(function(){e[s]=o,n[s]&&(n.jsonpCallback=r.jsonpCallback,Mn.push(s)),u&&v.isFunction(o)&&o(u[0]),u=o=t}),"script"}),v.ajaxSetup({accepts:{script:"text/javascript, application/javascript, application/ecmascript, application/x-ecmascript"},contents:{script:/javascript|ecmascript/},converters:{"text script":function(e){return v.globalEval(e),e}}}),v.ajaxPrefilter("script",function(e){e.cache===t&&(e.cache=!1),e.crossDomain&&(e.type="GET",e.global=!1)}),v.ajaxTransport("script",function(e){if(e.crossDomain){var n,r=i.head||i.getElementsByTagName("head")[0]||i.documentElement;return{send:function(s,o){n=i.createElement("script"),n.async="async",e.scriptCharset&&(n.charset=e.scriptCharset),n.src=e.url,n.onload=n.onreadystatechange=function(e,i){if(i||!n.readyState||/loaded|complete/.test(n.readyState))n.onload=n.onreadystatechange=null,r&&n.parentNode&&r.removeChild(n),n=t,i||o(200,"success")},r.insertBefore(n,r.firstChild)},abort:function(){n&&n.onload(0,1)}}}});var Hn,Bn=e.ActiveXObject?function(){for(var e in Hn)Hn[e](0,1)}:!1,jn=0;v.ajaxSettings.xhr=e.ActiveXObject?function(){return!this.isLocal&&Fn()||In()}:Fn,function(e){v.extend(v.support,{ajax:!!e,cors:!!e&&"withCredentials"in e})}(v.ajaxSettings.xhr()),v.support.ajax&&v.ajaxTransport(function(n){if(!n.crossDomain||v.support.cors){var r;return{send:function(i,s){var o,u,a=n.xhr();n.username?a.open(n.type,n.url,n.async,n.username,n.password):a.open(n.type,n.url,n.async);if(n.xhrFields)for(u in n.xhrFields)a[u]=n.xhrFields[u];n.mimeType&&a.overrideMimeType&&a.overrideMimeType(n.mimeType),!n.crossDomain&&!i["X-Requested-With"]&&(i["X-Requested-With"]="XMLHttpRequest");try{for(u in i)a.setRequestHeader(u,i[u])}catch(f){}a.send(n.hasContent&&n.data||null),r=function(e,i){var u,f,l,c,h;try{if(r&&(i||a.readyState===4)){r=t,o&&(a.onreadystatechange=v.noop,Bn&&delete Hn[o]);if(i)a.readyState!==4&&a.abort();else{u=a.status,l=a.getAllResponseHeaders(),c={},h=a.responseXML,h&&h.documentElement&&(c.xml=h);try{c.text=a.responseText}catch(p){}try{f=a.statusText}catch(p){f=""}!u&&n.isLocal&&!n.crossDomain?u=c.text?200:404:u===1223&&(u=204)}}}catch(d){i||s(-1,d)}c&&s(u,f,c,l)},n.async?a.readyState===4?setTimeout(r,0):(o=++jn,Bn&&(Hn||(Hn={},v(e).unload(Bn)),Hn[o]=r),a.onreadystatechange=r):r()},abort:function(){r&&r(0,1)}}}});var qn,Rn,Un=/^(?:toggle|show|hide)$/,zn=new RegExp("^(?:([-+])=|)("+m+")([a-z%]*)$","i"),Wn=/queueHooks$/,Xn=[Gn],Vn={"*":[function(e,t){var n,r,i=this.createTween(e,t),s=zn.exec(t),o=i.cur(),u=+o||0,a=1,f=20;if(s){n=+s[2],r=s[3]||(v.cssNumber[e]?"":"px");if(r!=="px"&&u){u=v.css(i.elem,e,!0)||n||1;do a=a||".5",u/=a,v.style(i.elem,e,u+r);while(a!==(a=i.cur()/o)&&a!==1&&--f)}i.unit=r,i.start=u,i.end=s[1]?u+(s[1]+1)*n:n}return i}]};v.Animation=v.extend(Kn,{tweener:function(e,t){v.isFunction(e)?(t=e,e=["*"]):e=e.split(" ");var n,r=0,i=e.length;for(;r-1,f={},l={},c,h;a?(l=i.position(),c=l.top,h=l.left):(c=parseFloat(o)||0,h=parseFloat(u)||0),v.isFunction(t)&&(t=t.call(e,n,s)),t.top!=null&&(f.top=t.top-s.top+c),t.left!=null&&(f.left=t.left-s.left+h),"using"in t?t.using.call(e,f):i.css(f)}},v.fn.extend({position:function(){if(!this[0])return;var e=this[0],t=this.offsetParent(),n=this.offset(),r=er.test(t[0].nodeName)?{top:0,left:0}:t.offset();return n.top-=parseFloat(v.css(e,"marginTop"))||0,n.left-=parseFloat(v.css(e,"marginLeft"))||0,r.top+=parseFloat(v.css(t[0],"borderTopWidth"))||0,r.left+=parseFloat(v.css(t[0],"borderLeftWidth"))||0,{top:n.top-r.top,left:n.left-r.left}},offsetParent:function(){return this.map(function(){var e=this.offsetParent||i.body;while(e&&!er.test(e.nodeName)&&v.css(e,"position")==="static")e=e.offsetParent;return e||i.body})}}),v.each({scrollLeft:"pageXOffset",scrollTop:"pageYOffset"},function(e,n){var r=/Y/.test(n);v.fn[e]=function(i){return v.access(this,function(e,i,s){var o=tr(e);if(s===t)return o?n in o?o[n]:o.document.documentElement[i]:e[i];o?o.scrollTo(r?v(o).scrollLeft():s,r?s:v(o).scrollTop()):e[i]=s},e,i,arguments.length,null)}}),v.each({Height:"height",Width:"width"},function(e,n){v.each({padding:"inner"+e,content:n,"":"outer"+e},function(r,i){v.fn[i]=function(i,s){var o=arguments.length&&(r||typeof i!="boolean"),u=r||(i===!0||s===!0?"margin":"border");return v.access(this,function(n,r,i){var s;return v.isWindow(n)?n.document.documentElement["client"+e]:n.nodeType===9?(s=n.documentElement,Math.max(n.body["scroll"+e],s["scroll"+e],n.body["offset"+e],s["offset"+e],s["client"+e])):i===t?v.css(n,r,i,u):v.style(n,r,i,u)},n,o?i:t,o,null)}})}),e.jQuery=e.$=v,typeof define=="function"&&define.amd&&define.amd.jQuery&&define("jquery",[],function(){return v})})(window); \ No newline at end of file diff --git a/rpki/gui/app/templates/404.html b/rpki/gui/app/templates/404.html new file mode 100644 index 00000000..76ef3aee --- /dev/null +++ b/rpki/gui/app/templates/404.html @@ -0,0 +1,11 @@ +{% extends "base.html" %} + +{% block content %} + + +
      + Whoops! I could not find the page you requested. +
      +{% endblock content %} diff --git a/rpki/gui/app/templates/500.html b/rpki/gui/app/templates/500.html new file mode 100644 index 00000000..216fe8ae --- /dev/null +++ b/rpki/gui/app/templates/500.html @@ -0,0 +1,11 @@ +{% extends "base.html" %} + +{% block content %} + + +
      + Whoops! The administrator has been notified of this error. +
      +{% endblock content %} diff --git a/rpki/gui/app/templates/app/alert_confirm_clear.html b/rpki/gui/app/templates/app/alert_confirm_clear.html new file mode 100644 index 00000000..5d7fcf04 --- /dev/null +++ b/rpki/gui/app/templates/app/alert_confirm_clear.html @@ -0,0 +1,21 @@ +{% extends "app/app_base.html" %} +{% load url from future %} + +{% block content %} + + +
      +
      + Please confirm that you would like to delete all alerts. +
      +
      + {% csrf_token %} +
      + + Cancel +
      + +
      +{% endblock %} diff --git a/rpki/gui/app/templates/app/alert_confirm_delete.html b/rpki/gui/app/templates/app/alert_confirm_delete.html new file mode 100644 index 00000000..78c84917 --- /dev/null +++ b/rpki/gui/app/templates/app/alert_confirm_delete.html @@ -0,0 +1,17 @@ +{% extends "app/alert_detail.html" %} +{% load url from future %} + +{% block action %} +
      +
      + Please confirm that you would like to delete this alert. +
      +
      + {% csrf_token %} +
      + + Cancel +
      + +
      +{% endblock action %} diff --git a/rpki/gui/app/templates/app/alert_detail.html b/rpki/gui/app/templates/app/alert_detail.html new file mode 100644 index 00000000..b3a73b7e --- /dev/null +++ b/rpki/gui/app/templates/app/alert_detail.html @@ -0,0 +1,31 @@ +{% extends "app/app_base.html" %} +{% load url from future %} +{% load app_extras %} + +{% block content %} + + +
      +
      + + + + + + +
      Date: {{ object.when }}
      Severity:{{ object.get_severity_display }}
      + +

      +{{ object.text }} + + + +{% block action %} +

      +{% endblock action %} + +{% endblock content %} diff --git a/rpki/gui/app/templates/app/alert_list.html b/rpki/gui/app/templates/app/alert_list.html new file mode 100644 index 00000000..dd0530e4 --- /dev/null +++ b/rpki/gui/app/templates/app/alert_list.html @@ -0,0 +1,31 @@ +{% extends "app/app_base.html" %} +{% load url from future %} + +{% block content %} + + + + + + + + + + + + {% for obj in object_list %} + + + + + + {% endfor %} + +
      #DateSubject
      {# #}{{ obj.when }}{{ obj.subject }}
      + + +{% endblock content %} diff --git a/rpki/gui/app/templates/app/app_base.html b/rpki/gui/app/templates/app/app_base.html new file mode 100644 index 00000000..4fb5f731 --- /dev/null +++ b/rpki/gui/app/templates/app/app_base.html @@ -0,0 +1,31 @@ +{% extends "base.html" %} +{# this can be removed when django 1.4 is EOL, because it is the default behavior in 1.5 #} +{% load url from future %} +{% load app_extras %} + +{# This template defines the common structure for the rpki.gui.app application. #} + +{% block sidebar %} + +

      {{ request.session.handle }}

      + +{# common navigation #} + + + +{% endblock sidebar %} diff --git a/rpki/gui/app/templates/app/app_confirm_delete.html b/rpki/gui/app/templates/app/app_confirm_delete.html new file mode 100644 index 00000000..7c35a733 --- /dev/null +++ b/rpki/gui/app/templates/app/app_confirm_delete.html @@ -0,0 +1,21 @@ +{% extends "app/app_base.html" %} + +{% block content %} +
      +

      {{ form_title }}

      +
      + +
      +

      Warning!

      + Please confirm that you would like to delete this object. +
      + +
      + {% csrf_token %} + {{ form }} +
      + + Cancel +
      +
      +{% endblock content %} diff --git a/rpki/gui/app/templates/app/app_form.html b/rpki/gui/app/templates/app/app_form.html new file mode 100644 index 00000000..b6ab60a2 --- /dev/null +++ b/rpki/gui/app/templates/app/app_form.html @@ -0,0 +1,19 @@ +{% extends "app/app_base.html" %} + +{% block content %} + + +{# allow this template to be subclassed to fill in extra information, such as warnings #} +{% block form_info %}{% endblock form_info %} + +
      + {% csrf_token %} + {% include "app/bootstrap_form.html" %} +
      + + Cancel +
      +
      +{% endblock %} diff --git a/rpki/gui/app/templates/app/bootstrap_form.html b/rpki/gui/app/templates/app/bootstrap_form.html new file mode 100644 index 00000000..c6fd5424 --- /dev/null +++ b/rpki/gui/app/templates/app/bootstrap_form.html @@ -0,0 +1,26 @@ +{% if form.non_field_errors %} +
      + {{ form.non_field_errors }} +
      +{% endif %} + +{% for field in form %} + +{% if field.is_hidden %} +{{ field }} +{% else %} +
      + +
      + {{ field }} + {% if field.help_text %} + {{ field.help_text }} + {% endif %} + {% if field.errors %} + {{ field.errors }} + {% endif %} +
      +
      +{% endif %} + +{% endfor %} diff --git a/rpki/gui/app/templates/app/child_detail.html b/rpki/gui/app/templates/app/child_detail.html new file mode 100644 index 00000000..8178e179 --- /dev/null +++ b/rpki/gui/app/templates/app/child_detail.html @@ -0,0 +1,48 @@ +{% extends "app/app_base.html" %} +{% load url from future %} + +{% block content %} + + +
      +

      Valid until {{ object.valid_until }} +

      + +
      +
      + Addresses + {% if object.address_ranges.all %} +
        + {% for a in object.address_ranges.all %} +
      • {{ a.as_resource_range }}
      • + {% endfor %} +
      + {% else %} +

      none

      + {% endif %} +
      +
      + ASNs + {% if object.asns.all %} +
        + {% for a in object.asns.all %} +
      • {{ a.as_resource_range }}
      • + {% endfor %} +
      + {% else %} +

      none

      + {% endif %} +
      +
      + +{% block action %} + Edit + AS + Prefix + Export + Delete +{% endblock %} + +{% endblock %} diff --git a/rpki/gui/app/templates/app/client_detail.html b/rpki/gui/app/templates/app/client_detail.html new file mode 100644 index 00000000..3117e859 --- /dev/null +++ b/rpki/gui/app/templates/app/client_detail.html @@ -0,0 +1,25 @@ +{% extends "app/app_base.html" %} +{% load url from future %} + +{% block content %} + + + + + + + + + + + +
      Name{{ object.handle }}
      SIA{{ object.sia_base }}
      + +{% block action %} + Export + Delete +{% endblock action %} + +{% endblock content %} diff --git a/rpki/gui/app/templates/app/client_list.html b/rpki/gui/app/templates/app/client_list.html new file mode 100644 index 00000000..12987c53 --- /dev/null +++ b/rpki/gui/app/templates/app/client_list.html @@ -0,0 +1,22 @@ +{% extends "app/app_base.html" %} +{% load url from future %} + +{% block content %} + + + + + {% for client in object_list %} + + + + + {% endfor %} + +
      HandleAction
      {{ client.handle }} + +
      + Import +{% endblock content %} diff --git a/rpki/gui/app/templates/app/conf_empty.html b/rpki/gui/app/templates/app/conf_empty.html new file mode 100644 index 00000000..efe06f14 --- /dev/null +++ b/rpki/gui/app/templates/app/conf_empty.html @@ -0,0 +1,17 @@ +{% extends "base.html" %} +{% load url from future %} + +{% block content %} + +{% if request.user.is_superuser %} +
      +There are currently no resource holders on this system. +
      + Create +{% else %} +
      +Your account does not have permission to manage any resource handles on this server. Please contact your portal-gui adminstrator. +
      +{% endif %} + +{% endblock %} diff --git a/rpki/gui/app/templates/app/conf_list.html b/rpki/gui/app/templates/app/conf_list.html new file mode 100644 index 00000000..dce6d59e --- /dev/null +++ b/rpki/gui/app/templates/app/conf_list.html @@ -0,0 +1,17 @@ +{% extends "app/app_base.html" %} +{% load url from future %} + +{% block content %} + + +

      Please select a handle.

      + + + +{% endblock %} diff --git a/rpki/gui/app/templates/app/dashboard.html b/rpki/gui/app/templates/app/dashboard.html new file mode 100644 index 00000000..65dbb90f --- /dev/null +++ b/rpki/gui/app/templates/app/dashboard.html @@ -0,0 +1,230 @@ +{% extends "app/app_base.html" %} + +{# this can be removed when django 1.4 is EOL, because it is the default behavior in 1.5 #} +{% load url from future %} + +{% block sidebar_extra %} +
    • +
    • + {# #}export identity
    • +{% endblock sidebar_extra %} + +{% block content %} +
      +
      + + + + + + + + + + {% for object in asns %} + + + + + + {% endfor %} + + {% for object in prefixes %} + + + + + + {% endfor %} + + {% if prefixes_v6 %} + {% for object in prefixes_v6 %} + + + + + + {% endfor %} + {% endif %} +
      ResourceValid UntilParent
      {{ object }}{{ object.cert.not_after }} + {% if object.cert.parent %} + {{ object.cert.parent.handle }} + {% endif %} +
      {{ object.as_resource_range }}{{ object.cert.not_after }} + {% if object.cert.parent %} + {{ object.cert.parent.handle }} + {% endif %} +
      {{ object.as_resource_range }}{{ object.cert.not_after }} + {% if object.cert.parent %} + {{ object.cert.parent.handle }} + {% endif %} +
      + refresh +
      +
      +

      Unallocated Resources

      +

      The following resources have not been allocated to a child, nor appear in a ROA. + + {% if unused_asns %} +

      ASNs

      +
        + {% for asn in unused_asns %} +
      • AS{{ asn }} + {% endfor %} +
      + {% endif %} + + {% if unused_prefixes %} +

      IPv4

      + + + {% for addr in unused_prefixes %} + + + + + {% endfor %} +
      PrefixAction
      {{ addr }} + ROA +
      + {% endif %} + + {% if unused_prefixes_v6 %} +

      IPv6

      + + + {% for addr in unused_prefixes_v6 %} + + + + + {% endfor %} +
      Prefix
      {{ addr }} + ROA +
      + {% endif %} + +
      +
      + +
      +
      + + + + {% for roa in conf.roas %} + + + + + + + + {% endfor %} +
      PrefixMax LengthAS
      {{ roa.prefixes.all.0.as_roa_prefix }}{{ roa.prefixes.all.0.max_prefixlen }}{{ roa.asn }} + + + +
      + Create + Import + Export +
      + +
      + + + + {% for gbr in conf.ghostbusters %} + + + + + + + + {% endfor %} +
      Full NameOrganizationEmailTelephone
      {{ gbr.full_name }}{{ gbr.organization }}{{ gbr.email_address }}{{ gbr.telephone }} + + + +
      + Create +
      +
      + +
      +
      + + + + {% for child in conf.children %} + + + + + {% endfor %} +
      Handle
      {{ child.handle }} + +
      + +
      + +
      +
      +
      + + + + {% for parent in conf.parents %} + + + + + {% endfor %} +
      Handle
      {{ parent.handle }} + +
      + Import +
      +
      + +
      +
      + + + + {% for repo in conf.repositories %} + + + + + {% endfor %} +
      Handle
      {{ repo.handle }} + +
      + Import +
      +
      +{% endblock %} diff --git a/rpki/gui/app/templates/app/ghostbuster_confirm_delete.html b/rpki/gui/app/templates/app/ghostbuster_confirm_delete.html new file mode 100644 index 00000000..76b1d25a --- /dev/null +++ b/rpki/gui/app/templates/app/ghostbuster_confirm_delete.html @@ -0,0 +1,20 @@ +{% extends "app/ghostbuster_detail.html" %} + +{% block extra %} + +
      +

      + Please confirm that you really want to delete by clicking Delete. + +

      +
      + {% csrf_token %} + + Cancel +
      +
      +
      + +{% endblock %} + + diff --git a/rpki/gui/app/templates/app/ghostbusterrequest_detail.html b/rpki/gui/app/templates/app/ghostbusterrequest_detail.html new file mode 100644 index 00000000..296f0f16 --- /dev/null +++ b/rpki/gui/app/templates/app/ghostbusterrequest_detail.html @@ -0,0 +1,64 @@ +{% extends "app/app_base.html" %} +{% load url from future %} + +{% block content %} + + + + + + {% if object.honorific_prefix %} + + {% endif %} + + {% if object.organization %} + + {% endif %} + + {% if object.telephone %} + + {% endif %} + + {% if object.email_address %} + + {% endif %} + + {% if object.box %} + + {% endif %} + + {% if object.extended %} + + {% endif %} + + {% if object.street %} + + {% endif %} + + {% if object.city %} + + {% endif %} + + {% if object.region %} + + {% endif %} + + {% if object.code %} + + {% endif %} + + {% if object.country %} + + {% endif %} + +
      Full Name{{ object.full_name }}
      Honorific Prefix{{ object.honorific_prefix }}
      Organization{{ object.organization }}
      Telephone{{ object.telephone }}
      Email{{ object.email_address }}
      P.O. Box{{ object.box }}
      Extended Address{{ object.extended }}
      Street Address{{ object.street }}
      City{{ object.city }}
      Region{{ object.region }}
      Postal Code{{ object.code }}
      Country{{ object.country }}
      + +{% block action %} +{# the roarequest_confirm_delete template will override this section #} + Edit + Delete +{% endblock action %} + +{% endblock content %} diff --git a/rpki/gui/app/templates/app/import_resource_form.html b/rpki/gui/app/templates/app/import_resource_form.html new file mode 100644 index 00000000..e446d344 --- /dev/null +++ b/rpki/gui/app/templates/app/import_resource_form.html @@ -0,0 +1,9 @@ +{% extends "app/app_form.html" %} + +{% block form_info %} +
      + Warning! All existing resources of this type currently in the + database will be deleted and replaced with the contents of the CSV + file you are uploading. +
      +{% endblock form_info %} diff --git a/rpki/gui/app/templates/app/object_confirm_delete.html b/rpki/gui/app/templates/app/object_confirm_delete.html new file mode 100644 index 00000000..c4af9b26 --- /dev/null +++ b/rpki/gui/app/templates/app/object_confirm_delete.html @@ -0,0 +1,21 @@ +{% extends parent_template %} +{% comment %} +Since Django templates do not support multiple inheritance, we simluate it by +dynamically extending from the *_detail.html template for a concrete object +type. The *DeleteView classes should set a "parent_template" variable which is +string specifying the concrete template to inherit from. +{% endcomment %} +{% load url from future %} + +{% block action %} +
      +

      Warning!

      + Please confirm that you would like to delete this object +
      + +
      + {% csrf_token %} + + Cancel +
      +{% endblock %} diff --git a/rpki/gui/app/templates/app/parent_detail.html b/rpki/gui/app/templates/app/parent_detail.html new file mode 100644 index 00000000..4dd1842f --- /dev/null +++ b/rpki/gui/app/templates/app/parent_detail.html @@ -0,0 +1,67 @@ +{% extends "app/app_base.html" %} +{% load url from future %} + +{% block content %} + + + + + + + + + + + + + + + + + + + + + + + + + + + +
      service_uri{{ object.service_uri }}
      parent_handle{{ object.parent_handle }}
      child_handle{{ object.child_handle }}
      repository_type{{ object.repository_type }}
      referrer{{ object.referrer }}
      ta validity period{{ object.ta.getNotBefore }} - {{ object.ta.getNotAfter }}
      + +
      +
      +

      Delegated Addresses

      +
        + {% for c in object.certs.all %} + {% for a in c.address_ranges.all %} +
      • {{ a }}
      • + {% endfor %} + {% for a in c.address_ranges_v6.all %} +
      • {{ a }}
      • + {% endfor %} + {% endfor %} +
      +
      +
      +

      Delegated ASNs

      +
        + {% for c in object.certs.all %} + {% for a in c.asn_ranges.all %} +
      • {{ a }}
      • + {% endfor %} + {% endfor %} +
      +
      +
      + +{% block action %} + Export + Delete +{% endblock action %} + +{% endblock content %} diff --git a/rpki/gui/app/templates/app/pubclient_list.html b/rpki/gui/app/templates/app/pubclient_list.html new file mode 100644 index 00000000..1872e005 --- /dev/null +++ b/rpki/gui/app/templates/app/pubclient_list.html @@ -0,0 +1,10 @@ +{% extends "app/object_list.html" %} +{% load url from future %} + +{% block actions %} +
      + Import +
      +{% endblock actions %} + + diff --git a/rpki/gui/app/templates/app/repository_detail.html b/rpki/gui/app/templates/app/repository_detail.html new file mode 100644 index 00000000..92a43e54 --- /dev/null +++ b/rpki/gui/app/templates/app/repository_detail.html @@ -0,0 +1,19 @@ +{% extends "app/app_base.html" %} +{% load url from future %} + +{% block content %} + + + + + + + +
      SIA{{ object.sia_base }}
      + +{% block action %} + Delete +{% endblock action %} +{% endblock content %} diff --git a/rpki/gui/app/templates/app/resource_holder_list.html b/rpki/gui/app/templates/app/resource_holder_list.html new file mode 100644 index 00000000..6525e74d --- /dev/null +++ b/rpki/gui/app/templates/app/resource_holder_list.html @@ -0,0 +1,37 @@ +{% extends "app/app_base.html" %} +{% load url from future %} + +{% block content %} + + +

      +This page lists all of the resource holders that are currently managed by this server. +Note that this is distinct from the +list of web interface users. +

      + + + + + + + + + + {% for conf in object_list %} + + + + + {% endfor %} + +
      HandleAction
      {{ conf.handle }} + + +
      + + Create +{% endblock content %} +{# vim: set ft=htmldjango: #} diff --git a/rpki/gui/app/templates/app/roa_detail.html b/rpki/gui/app/templates/app/roa_detail.html new file mode 100644 index 00000000..ec76579d --- /dev/null +++ b/rpki/gui/app/templates/app/roa_detail.html @@ -0,0 +1,40 @@ +{% extends "app/app_base.html" %} +{% load url from future %} +{% load app_extras %} + +{% block content %} + + +
      +
      + + + + + + + +
      PrefixMax LengthAS
      {{ object.prefixes.all.0.as_roa_prefix }}{{ object.prefixes.all.0.max_prefixlen }}{{ object.asn }}
      +
      + +
      +

      Covered Routes

      +

      This table lists currently announced routes which are covered by prefixes included in this ROA. + + + {% for r in object.routes %} + + + + + + + {% endfor %} +
      PrefixASValidity
      {{ r.as_resource_range }}{{ r.asn }}{% validity_label r.status %}
      +

      +
      + + Delete +{% endblock content %} diff --git a/rpki/gui/app/templates/app/roarequest_confirm_delete.html b/rpki/gui/app/templates/app/roarequest_confirm_delete.html new file mode 100644 index 00000000..7dc3ec2b --- /dev/null +++ b/rpki/gui/app/templates/app/roarequest_confirm_delete.html @@ -0,0 +1,59 @@ +{% extends "app/app_base.html" %} +{% load url from future %} +{% load app_extras %} + +{% block content %} + + +
      +
      +
      +

      Please confirm that you would like to delete the + following ROA Request. The table to the right indicates how validation + status for matching routes may change. +

      + + + + + + + + + + + + + + +
      Prefix{{ object.prefixes.all.0.as_roa_prefix }}
      Max Length{{ object.prefixes.all.0.max_prefixlen }}
      AS{{ object.asn }}
      + +
      + {% csrf_token %} + + Cancel +
      +
      + +
      +

      Matching Routes

      + + + + + + + + {% for r in routes %} + + + + + + {% endfor %} +
      PrefixOrigin ASValidation Status
      {{ r.get_prefix_display }}{{ r.asn }}{% validity_label r.newstatus %}
      +
      +
      +{% endblock content %} diff --git a/rpki/gui/app/templates/app/roarequest_confirm_form.html b/rpki/gui/app/templates/app/roarequest_confirm_form.html new file mode 100644 index 00000000..446bb6a4 --- /dev/null +++ b/rpki/gui/app/templates/app/roarequest_confirm_form.html @@ -0,0 +1,60 @@ +{% extends "app/app_base.html" %} +{% load url from future %} + +{% block content %} +
      +

      Confirm ROA Request

      +
      + +
      +
      +
      +

      Please confirm that you would like to create the following ROA. + The accompanying table indicates how the validation status may change as a result. +

      + + + + + + + + + + + + +
      ASPrefixMax Length
      {{ asn }}{{ prefix }}{{ max_prefixlen }}
      + +
      + {% csrf_token %} + {% include "app/bootstrap_form.html" %} + +
      + + Cancel +
      +
      +
      + +
      +

      Matched Routes

      + + + + + + + + {% for r in routes %} + + + + + + {% endfor %} +
      PrefixOrigin ASValidation Status
      {{ r.get_prefix_display }}{{ r.asn }}{{ r.status }}
      +
      + +
      +{% endblock content %} diff --git a/rpki/gui/app/templates/app/roarequest_confirm_multi_form.html b/rpki/gui/app/templates/app/roarequest_confirm_multi_form.html new file mode 100644 index 00000000..4a06a4aa --- /dev/null +++ b/rpki/gui/app/templates/app/roarequest_confirm_multi_form.html @@ -0,0 +1,66 @@ +{% extends "app/app_base.html" %} +{% load url from future %} +{% load app_extras %} + +{% block content %} +
      +

      Confirm ROA Requests

      +
      + +
      +
      +
      +

      Please confirm that you would like to create the following ROA(s). + The accompanying table indicates how the validation status may change as a result. +

      + + + + + + + + {% for roa in roas %} + + + + + + {% endfor %} +
      PrefixMax LengthAS
      {{ roa.prefix }}{{ roa.max_prefixlen }}{{ roa.asn }}
      + +
      + {% csrf_token %} + {{ formset.management_form }} + {% for form in formset %} + {% include "app/bootstrap_form.html" %} + {% endfor %} + +
      + + Cancel +
      +
      +
      + +
      +

      Matched Routes

      + + + + + + + + {% for r in routes %} + + + + + + {% endfor %} +
      PrefixOrigin ASValidation Status
      {{ r.get_prefix_display }}{{ r.asn }}{% validity_label r.newstatus %}
      +
      + +
      +{% endblock content %} diff --git a/rpki/gui/app/templates/app/roarequest_form.html b/rpki/gui/app/templates/app/roarequest_form.html new file mode 100644 index 00000000..3a29131d --- /dev/null +++ b/rpki/gui/app/templates/app/roarequest_form.html @@ -0,0 +1,50 @@ +{% extends "app/app_base.html" %} +{% load url from future %} + +{# This form is used for creating a new ROA request #} + +{% block content %} +
      +

      Create ROA

      +
      + + + + +
      +
      +
      + {% csrf_token %} + {% include "app/bootstrap_form.html" %} +
      + + Cancel +
      +
      +
      + +
      + Routes matching your prefix: + + + +
      PrefixAS
      +
      +
      +{% endblock content %} diff --git a/rpki/gui/app/templates/app/roarequest_multi_form.html b/rpki/gui/app/templates/app/roarequest_multi_form.html new file mode 100644 index 00000000..06d07943 --- /dev/null +++ b/rpki/gui/app/templates/app/roarequest_multi_form.html @@ -0,0 +1,28 @@ +{% extends "app/app_base.html" %} +{% load url from future %} + +{% block content %} +
      +

      Create ROAs

      +
      + +
      + {% csrf_token %} + {{ formset.management_form }} + {% for form in formset %} +
      + {{ form.prefix }} + {{ form.max_prefixlen }} + {{ form.asn }} + + {% if form.errors %}{{ form.errors }}{% endif %} + {% if form.non_field_errors %}{{ form.non_field_errors }}{% endif %} +
      + {% endfor %} + +
      + + Cancel +
      +
      +{% endblock %} diff --git a/rpki/gui/app/templates/app/route_detail.html b/rpki/gui/app/templates/app/route_detail.html new file mode 100644 index 00000000..84add4a8 --- /dev/null +++ b/rpki/gui/app/templates/app/route_detail.html @@ -0,0 +1,58 @@ +{% extends "app/app_base.html" %} +{% load app_extras %} +{% load bootstrap_pager %} + +{# template for displaying the list of ROAs covering a specific route #} + +{% block content %} + + +
      +
      + + + + + + + + + + + +
      PrefixASValidity
      {{ object.as_resource_range }}{{ object.asn }}{% validity_label object.status %}
      +
      +
      + +
      +
      +

      The table below lists all ROAs which cover the route described above. + + + + + + + + + + + + + {% for pfx in roa_prefixes %} + + + + + + + + {% endfor %} + +
      PrefixMax LengthASNExpiresURI
      {{ pfx.as_resource_range }}{{ pfx.max_length }}{{ pfx.roas.all.0.asid }}{{ pfx.roas.all.0.not_after }}{{ pfx.roas.all.0.repo.uri }}
      + {% bootstrap_pager request roa_prefixes %} +

      +
      +{% endblock %} diff --git a/rpki/gui/app/templates/app/routes_view.html b/rpki/gui/app/templates/app/routes_view.html new file mode 100644 index 00000000..885f3fa9 --- /dev/null +++ b/rpki/gui/app/templates/app/routes_view.html @@ -0,0 +1,55 @@ +{% extends "app/app_base.html" %} +{% load url from future %} +{% load bootstrap_pager %} +{% load app_extras %} + +{% block sidebar_extra %} + +
    • IPv4: {{ timestamp.bgp_v4_import.isoformat }}
    • +
    • IPv6: {{ timestamp.bgp_v6_import.isoformat }}
    • + +
    • {{ timestamp.rcynic_import.isoformat }}
    • +{% endblock sidebar_extra %} + +{% block content %} + + + +

      +This view shows currently advertised routes for the prefixes listed in resource certs received from RPKI parents. + +

      + {% csrf_token %} + + + + + + + + + + + {% for r in routes %} + + + + + + + {% endfor %} + +
      PrefixOrigin ASValidation Status
      {{ r.get_prefix_display }}{{ r.asn }} + {% validity_label r.status %} + +
      +
      + +
      +
      + +{% bootstrap_pager request routes %} + +{% endblock content %} diff --git a/rpki/gui/app/templates/app/user_list.html b/rpki/gui/app/templates/app/user_list.html new file mode 100644 index 00000000..1b419ded --- /dev/null +++ b/rpki/gui/app/templates/app/user_list.html @@ -0,0 +1,37 @@ +{% extends "app/app_base.html" %} +{% load url from future %} + +{% block content %} + + +

      +This page lists all user accounts in the web interface. Note that this is distinct from the +list of resource holders. +

      + + + + + + + + + + + {% for user in object_list %} + + + + + + {% endfor %} + +
      UsernameEmail
      {{ user.username }}{{ user.email }} + + +
      + + Create +{% endblock content %} diff --git a/rpki/gui/app/templates/base.html b/rpki/gui/app/templates/base.html new file mode 100644 index 00000000..08d0c112 --- /dev/null +++ b/rpki/gui/app/templates/base.html @@ -0,0 +1,63 @@ +{% load url from future %} +{% load app_extras %} + + + + + + {% block title %}RPKI {% if request.session.handle %}: {{ request.session.handle }}{% endif %}{% endblock %} + {% block head %}{% endblock %} + + + + + + + + +
      + +
      + +
      + +
      +
      + {% block sidebar %}{% endblock %} +
      + +
      + {% if messages %} + {% for message in messages %} + {# this will break if there is more than one tag, but don't expect to use that feature #} +
      + {{ message }} +
      + {% endfor %} + {% endif %} + {% block content %}{% endblock %} +
      +
      + + + diff --git a/rpki/gui/app/templates/registration/login.html b/rpki/gui/app/templates/registration/login.html new file mode 100644 index 00000000..0d6fb6fd --- /dev/null +++ b/rpki/gui/app/templates/registration/login.html @@ -0,0 +1,25 @@ +{% extends "base.html" %} +{% load url from future %} + +{% block content %} + + +{% if form.errors %} +
      +

      Your username and password didn't match. Please try again.

      +
      +{% endif %} + +
      + {% csrf_token %} + {% include "app/bootstrap_form.html" %} + + +
      + +
      +
      + +{% endblock %} diff --git a/rpki/gui/app/templatetags/__init__.py b/rpki/gui/app/templatetags/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/rpki/gui/app/templatetags/app_extras.py b/rpki/gui/app/templatetags/app_extras.py new file mode 100644 index 00000000..2bde9bc2 --- /dev/null +++ b/rpki/gui/app/templatetags/app_extras.py @@ -0,0 +1,58 @@ +from django import template + +register = template.Library() + + +@register.simple_tag +def verbose_name(obj): + "Return the model class' verbose name." + return obj._meta.verbose_name.capitalize() + + +@register.simple_tag +def verbose_name_plural(qs): + "Return the verbose name for the model class." + return qs.model._meta.verbose_name_plural.capitalize() + +css = { + 'valid': 'label-success', + 'invalid': 'label-important' +} + + +@register.simple_tag +def validity_label(validity): + return '%s' % (css.get(validity, ''), validity) + + +@register.simple_tag +def severity_class(severity): + css = { + 0: 'label-info', + 1: 'label-warning', + 2: 'label-important', + } + return css.get(severity) + + +@register.simple_tag +def alert_count(conf): + qs = conf.alerts.filter(seen=False) + unread = len(qs) + if unread: + severity = max([x.severity for x in qs]) + css = { + 0: 'badge-info', + 1: 'badge-warning', + 2: 'badge-important' + } + css_class = css.get(severity) + else: + css_class = 'badge-default' + return u'%d' % (css_class, unread) + + +@register.simple_tag +def rpki_version(): + import rpki.version + return rpki.version.VERSION diff --git a/rpki/gui/app/templatetags/bootstrap_pager.py b/rpki/gui/app/templatetags/bootstrap_pager.py new file mode 100644 index 00000000..bae8445a --- /dev/null +++ b/rpki/gui/app/templatetags/bootstrap_pager.py @@ -0,0 +1,55 @@ +from django import template + +register = template.Library() + + +class BootstrapPagerNode(template.Node): + def __init__(self, request, pager_object): + self.request = template.Variable(request) + self.pager_object = template.Variable(pager_object) + + def render(self, context): + request = self.request.resolve(context) + pager_object = self.pager_object.resolve(context) + if pager_object.paginator.num_pages == 1: + return '' + r = ['') + return '\n'.join(r) + + +@register.tag +def bootstrap_pager(parser, token): + try: + tag_name, request, pager_object = token.split_contents() + except ValueError: + raise template.TemplateSyntaxError("%r tag requires two arguments" % token.contents.split()[0]) + return BootstrapPagerNode(request, pager_object) diff --git a/rpki/gui/app/timestamp.py b/rpki/gui/app/timestamp.py new file mode 100644 index 00000000..959f2025 --- /dev/null +++ b/rpki/gui/app/timestamp.py @@ -0,0 +1,25 @@ +# $Id$ +# Copyright (C) 2012 SPARTA, Inc. a Parsons Company +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. +# + +import models +from datetime import datetime + +def update(name): + "Set the timestamp value for the given name to the current time." + q = models.Timestamp.objects.filter(name=name) + obj = q[0] if q else models.Timestamp(name=name) + obj.ts = datetime.utcnow() + obj.save() diff --git a/rpki/gui/app/urls.py b/rpki/gui/app/urls.py new file mode 100644 index 00000000..92e90b0e --- /dev/null +++ b/rpki/gui/app/urls.py @@ -0,0 +1,81 @@ +# Copyright (C) 2010, 2011 SPARTA, Inc. dba Cobham Analytic Solutions +# Copyright (C) 2012 SPARTA, Inc. a Parsons Company +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +__version__ = '$Id$' + +from django.conf.urls import patterns, url +from rpki.gui.app import views + +urlpatterns = patterns( + '', + (r'^$', views.dashboard), + url(r'^alert/$', views.AlertListView.as_view(), name='alert-list'), + url(r'^alert/clear_all$', views.alert_clear_all, name='alert-clear-all'), + url(r'^alert/(?P\d+)/$', views.AlertDetailView.as_view(), + name='alert-detail'), + url(r'^alert/(?P\d+)/delete$', views.AlertDeleteView.as_view(), + name='alert-delete'), + (r'^conf/export$', views.conf_export), + (r'^conf/list$', views.conf_list), + (r'^conf/select$', views.conf_select), + url(r'^conf/export_asns$', views.export_asns, name='export-asns'), + url(r'^conf/export_prefixes$', views.export_prefixes, name='export-prefixes'), + url(r'^conf/import_asns$', views.import_asns, name='import-asns'), + url(r'^conf/import_prefixes$', views.import_prefixes, name='import-prefixes'), + (r'^parent/import$', views.parent_import), + (r'^parent/(?P\d+)/$', views.parent_detail), + (r'^parent/(?P\d+)/delete$', views.parent_delete), + (r'^parent/(?P\d+)/export$', views.parent_export), + (r'^child/import$', views.child_import), + (r'^child/(?P\d+)/$', views.child_detail), + (r'^child/(?P\d+)/add_address$', views.child_add_prefix), + (r'^child/(?P\d+)/add_asn$', views.child_add_asn), + (r'^child/(?P\d+)/delete$', views.child_delete), + (r'^child/(?P\d+)/edit$', views.child_edit), + (r'^child/(?P\d+)/export$', views.child_response), + url(r'^gbr/create$', views.ghostbuster_create, name='gbr-create'), + url(r'^gbr/(?P\d+)/$', views.GhostbusterDetailView.as_view(), name='gbr-detail'), + url(r'^gbr/(?P\d+)/edit$', views.ghostbuster_edit, name='gbr-edit'), + url(r'^gbr/(?P\d+)/delete$', views.ghostbuster_delete, name='gbr-delete'), + (r'^refresh$', views.refresh), + (r'^client/import$', views.client_import), + (r'^client/$', views.client_list), + (r'^client/(?P\d+)/$', views.client_detail), + (r'^client/(?P\d+)/delete$', views.client_delete), + url(r'^client/(?P\d+)/export$', views.client_export, name='client-export'), + (r'^repo/import$', views.repository_import), + (r'^repo/(?P\d+)/$', views.repository_detail), + (r'^repo/(?P\d+)/delete$', views.repository_delete), + (r'^resource_holder/$', views.resource_holder_list), + (r'^resource_holder/create$', views.resource_holder_create), + (r'^resource_holder/(?P\d+)/delete$', views.resource_holder_delete), + (r'^resource_holder/(?P\d+)/edit$', views.resource_holder_edit), + (r'^roa/(?P\d+)/$', views.roa_detail), + (r'^roa/create$', views.roa_create), + (r'^roa/create_multi$', views.roa_create_multi), + (r'^roa/confirm$', views.roa_create_confirm), + (r'^roa/confirm_multi$', views.roa_create_multi_confirm), + url(r'^roa/export$', views.roa_export, name='roa-export'), + url(r'^roa/import$', views.roa_import, name='roa-import'), + (r'^roa/(?P\d+)/delete$', views.roa_delete), + url(r'^roa/(?P\d+)/clone$', views.roa_clone, name="roa-clone"), + (r'^route/$', views.route_view), + (r'^route/(?P\d+)/$', views.route_detail), + url(r'^route/suggest$', views.route_suggest, name="suggest-roas"), + (r'^user/$', views.user_list), + (r'^user/create$', views.user_create), + (r'^user/(?P\d+)/delete$', views.user_delete), + (r'^user/(?P\d+)/edit$', views.user_edit), +) diff --git a/rpki/gui/app/views.py b/rpki/gui/app/views.py new file mode 100644 index 00000000..db4cf0c1 --- /dev/null +++ b/rpki/gui/app/views.py @@ -0,0 +1,1314 @@ +# Copyright (C) 2010, 2011 SPARTA, Inc. dba Cobham Analytic Solutions +# Copyright (C) 2012 SPARTA, Inc. a Parsons Company +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +This module contains the view functions implementing the web portal +interface. + +""" + +__version__ = '$Id$' + +import os +import os.path +from tempfile import NamedTemporaryFile +import cStringIO +import csv +import logging + +from django.utils.decorators import method_decorator +from django.contrib.auth.decorators import login_required +from django.shortcuts import get_object_or_404, render, redirect +from django.utils.http import urlquote +from django import http +from django.core.urlresolvers import reverse, reverse_lazy +from django.contrib.auth.models import User +from django.views.generic import DetailView, ListView, DeleteView +from django.core.paginator import Paginator, InvalidPage +from django.forms.formsets import formset_factory, BaseFormSet +import django.db.models +from django.contrib import messages + +from rpki.irdb import Zookeeper, ChildASN, ChildNet, ROARequestPrefix +from rpki.gui.app import models, forms, glue, range_list +from rpki.resource_set import (resource_range_as, resource_range_ip, + roa_prefix_ipv4) +from rpki import sundial +import rpki.exceptions + +from rpki.gui.cacheview.models import ROA +from rpki.gui.routeview.models import RouteOrigin +from rpki.gui.decorators import tls_required + +logger = logging.getLogger(__name__) + + +def superuser_required(f): + """Decorator which returns HttpResponseForbidden if the user does + not have superuser permissions. + + """ + @login_required + def _wrapped(request, *args, **kwargs): + if not request.user.is_superuser: + return http.HttpResponseForbidden() + return f(request, *args, **kwargs) + return _wrapped + + +def handle_required(f): + """Decorator for view functions which require the user to be logged in and + a resource handle selected for the session. + + """ + @login_required + @tls_required + def wrapped_fn(request, *args, **kwargs): + if 'handle' not in request.session: + if request.user.is_superuser: + conf = models.Conf.objects.all() + else: + conf = models.Conf.objects.filter(confacl__user=request.user) + + if conf.count() == 1: + request.session['handle'] = conf[0] + elif conf.count() == 0: + return render(request, 'app/conf_empty.html', {}) + else: + url = '%s?next=%s' % (reverse(conf_list), + urlquote(request.get_full_path())) + return http.HttpResponseRedirect(url) + + return f(request, *args, **kwargs) + return wrapped_fn + + +@handle_required +def generic_import(request, queryset, configure, form_class=None, + post_import_redirect=None): + """ + Generic view function for importing XML files used in the setup + process. + + queryset + queryset containing all objects of the type being imported + + configure + method on Zookeeper to invoke with the imported XML file + + form_class + specifies the form to use for import. If None, uses the generic + forms.ImportForm. + + post_import_redirect + if None (default), the user will be redirected to the detail page for + the imported object. Otherwise, the user will be redirected to the + specified URL. + + """ + conf = request.session['handle'] + if form_class is None: + form_class = forms.ImportForm + if request.method == 'POST': + form = form_class(request.POST, request.FILES) + if form.is_valid(): + tmpf = NamedTemporaryFile(prefix='import', suffix='.xml', + delete=False) + tmpf.write(form.cleaned_data['xml'].read()) + tmpf.close() + z = Zookeeper(handle=conf.handle) + handle = form.cleaned_data.get('handle') + # CharField uses an empty string for the empty value, rather than + # None. Convert to none in this case, since configure_child/parent + # expects it. + if handle == '': + handle = None + # configure_repository returns None, so can't use tuple expansion + # here. Unpack the tuple below if post_import_redirect is None. + r = configure(z, tmpf.name, handle) + # force rpkid run now + z.synchronize_ca(poke=True) + os.remove(tmpf.name) + if post_import_redirect: + url = post_import_redirect + else: + _, handle = r + url = queryset.get(issuer=conf, + handle=handle).get_absolute_url() + return http.HttpResponseRedirect(url) + else: + form = form_class() + + return render(request, 'app/app_form.html', { + 'form': form, + 'form_title': 'Import ' + queryset.model._meta.verbose_name.capitalize(), + }) + + +@handle_required +def dashboard(request): + conf = request.session['handle'] + + used_asns = range_list.RangeList() + + # asns used in my roas + qs = models.ROARequest.objects.filter(issuer=conf) + roa_asns = set((obj.asn for obj in qs)) + used_asns.extend((resource_range_as(asn, asn) for asn in roa_asns)) + + # asns given to my children + child_asns = ChildASN.objects.filter(child__in=conf.children.all()) + used_asns.extend((resource_range_as(obj.start_as, obj.end_as) for obj in child_asns)) + + # my received asns + asns = models.ResourceRangeAS.objects.filter(cert__conf=conf) + my_asns = range_list.RangeList([resource_range_as(obj.min, obj.max) for obj in asns]) + + unused_asns = my_asns.difference(used_asns) + + used_prefixes = range_list.RangeList() + used_prefixes_v6 = range_list.RangeList() + + # prefixes used in my roas + for obj in models.ROARequestPrefix.objects.filter(roa_request__issuer=conf, + version='IPv4'): + used_prefixes.append(obj.as_resource_range()) + + for obj in models.ROARequestPrefix.objects.filter(roa_request__issuer=conf, + version='IPv6'): + used_prefixes_v6.append(obj.as_resource_range()) + + # prefixes given to my children + for obj in ChildNet.objects.filter(child__in=conf.children.all(), + version='IPv4'): + used_prefixes.append(obj.as_resource_range()) + + for obj in ChildNet.objects.filter(child__in=conf.children.all(), + version='IPv6'): + used_prefixes_v6.append(obj.as_resource_range()) + + # my received prefixes + prefixes = models.ResourceRangeAddressV4.objects.filter(cert__conf=conf).all() + prefixes_v6 = models.ResourceRangeAddressV6.objects.filter(cert__conf=conf).all() + my_prefixes = range_list.RangeList([obj.as_resource_range() for obj in prefixes]) + my_prefixes_v6 = range_list.RangeList([obj.as_resource_range() for obj in prefixes_v6]) + + unused_prefixes = my_prefixes.difference(used_prefixes) + # monkey-patch each object with a boolean value indicating whether or not + # it is a prefix. We have to do this here because in the template there is + # no way to catch the MustBePrefix exception. + for x in unused_prefixes: + try: + x.prefixlen() + x.is_prefix = True + except rpki.exceptions.MustBePrefix: + x.is_prefix = False + + unused_prefixes_v6 = my_prefixes_v6.difference(used_prefixes_v6) + for x in unused_prefixes_v6: + try: + x.prefixlen() + x.is_prefix = True + except rpki.exceptions.MustBePrefix: + x.is_prefix = False + + clients = models.Client.objects.all() if request.user.is_superuser else None + + return render(request, 'app/dashboard.html', { + 'conf': conf, + 'unused_asns': unused_asns, + 'unused_prefixes': unused_prefixes, + 'unused_prefixes_v6': unused_prefixes_v6, + 'asns': asns, + 'prefixes': prefixes, + 'prefixes_v6': prefixes_v6, + 'clients': clients, + }) + + +@login_required +def conf_list(request, **kwargs): + """Allow the user to select a handle.""" + log = request.META['wsgi.errors'] + next_url = request.GET.get('next', reverse(dashboard)) + if request.user.is_superuser: + qs = models.Conf.objects.all() + else: + qs = models.Conf.objects.filter(confacl__user=request.user) + return render(request, 'app/conf_list.html', { + 'conf_list': qs, + 'next_url': next_url + }) + + +@login_required +def conf_select(request): + """Change the handle for the current session.""" + if not 'handle' in request.GET: + return redirect(conf_list) + handle = request.GET['handle'] + next_url = request.GET.get('next', reverse(dashboard)) + if request.user.is_superuser: + request.session['handle'] = get_object_or_404(models.Conf, handle=handle) + else: + request.session['handle'] = get_object_or_404( + models.Conf, confacl__user=request.user, handle=handle + ) + return http.HttpResponseRedirect(next_url) + + +def serve_xml(content, basename, ext='xml'): + """ + Generate a HttpResponse object with the content type set to XML. + + `content` is a string. + + `basename` is the prefix to specify for the XML filename. + + `csv` is the type (default: xml) + + """ + resp = http.HttpResponse(content, mimetype='application/%s' % ext) + resp['Content-Disposition'] = 'attachment; filename=%s.%s' % (basename, ext) + return resp + + +@handle_required +def conf_export(request): + """Return the identity.xml for the current handle.""" + conf = request.session['handle'] + z = Zookeeper(handle=conf.handle) + xml = z.generate_identity() + return serve_xml(str(xml), '%s.identity' % conf.handle) + + +@handle_required +def export_asns(request): + """Export CSV file containing ASN allocations to children.""" + conf = request.session['handle'] + s = cStringIO.StringIO() + csv_writer = csv.writer(s, delimiter=' ') + for childasn in ChildASN.objects.filter(child__issuer=conf): + csv_writer.writerow([childasn.child.handle, str(childasn.as_resource_range())]) + return serve_xml(s.getvalue(), '%s.asns' % conf.handle, ext='csv') + + +@handle_required +def import_asns(request): + conf = request.session['handle'] + if request.method == 'POST': + form = forms.ImportCSVForm(request.POST, request.FILES) + if form.is_valid(): + f = NamedTemporaryFile(prefix='asns', suffix='.csv', delete=False) + f.write(request.FILES['csv'].read()) + f.close() + z = Zookeeper(handle=conf.handle) + z.load_asns(f.name) + z.run_rpkid_now() + os.unlink(f.name) + messages.success(request, 'Successfully imported AS delgations from CSV file.') + return redirect(dashboard) + else: + form = forms.ImportCSVForm() + return render(request, 'app/import_resource_form.html', { + 'form_title': 'Import CSV containing ASN delegations', + 'form': form, + 'cancel_url': reverse(dashboard) + }) + + +@handle_required +def export_prefixes(request): + """Export CSV file containing ASN allocations to children.""" + conf = request.session['handle'] + s = cStringIO.StringIO() + csv_writer = csv.writer(s, delimiter=' ') + for childnet in ChildNet.objects.filter(child__issuer=conf): + csv_writer.writerow([childnet.child.handle, str(childnet.as_resource_range())]) + return serve_xml(s.getvalue(), '%s.prefixes' % conf.handle, ext='csv') + + +@handle_required +def import_prefixes(request): + conf = request.session['handle'] + if request.method == 'POST': + form = forms.ImportCSVForm(request.POST, request.FILES) + if form.is_valid(): + f = NamedTemporaryFile(prefix='prefixes', suffix='.csv', delete=False) + f.write(request.FILES['csv'].read()) + f.close() + z = Zookeeper(handle=conf.handle) + z.load_prefixes(f.name) + z.run_rpkid_now() + os.unlink(f.name) + messages.success(request, 'Successfully imported prefix delegations from CSV file.') + return redirect(dashboard) + else: + form = forms.ImportCSVForm() + return render(request, 'app/import_resource_form.html', { + 'form_title': 'Import CSV containing Prefix delegations', + 'form': form, + 'cancel_url': reverse(dashboard) + }) + + +@handle_required +def parent_import(request): + conf = request.session['handle'] + return generic_import(request, conf.parents, Zookeeper.configure_parent) + + +@handle_required +def parent_detail(request, pk): + return render(request, 'app/parent_detail.html', { + 'object': get_object_or_404(request.session['handle'].parents, pk=pk)}) + + +@handle_required +def parent_delete(request, pk): + conf = request.session['handle'] + obj = get_object_or_404(conf.parents, pk=pk) # confirm permission + log = request.META['wsgi.errors'] + if request.method == 'POST': + form = forms.Empty(request.POST, request.FILES) + if form.is_valid(): + z = Zookeeper(handle=conf.handle, logstream=log) + z.delete_parent(obj.handle) + z.synchronize_ca() + return http.HttpResponseRedirect(reverse(dashboard)) + else: + form = forms.Empty() + return render(request, 'app/object_confirm_delete.html', { + 'object': obj, + 'form': form, + 'parent_template': 'app/parent_detail.html' + }) + + +@handle_required +def parent_export(request, pk): + """Export XML repository request for a given parent.""" + conf = request.session['handle'] + parent = get_object_or_404(conf.parents, pk=pk) + z = Zookeeper(handle=conf.handle) + xml = z.generate_repository_request(parent) + return serve_xml(str(xml), '%s.repository' % parent.handle) + + +@handle_required +def child_import(request): + conf = request.session['handle'] + return generic_import(request, conf.children, Zookeeper.configure_child) + + +@handle_required +def child_add_prefix(request, pk): + logstream = request.META['wsgi.errors'] + conf = request.session['handle'] + child = get_object_or_404(conf.children, pk=pk) + if request.method == 'POST': + form = forms.AddNetForm(request.POST, child=child) + if form.is_valid(): + address_range = form.cleaned_data.get('address_range') + r = resource_range_ip.parse_str(address_range) + version = 'IPv%d' % r.version + child.address_ranges.create(start_ip=str(r.min), end_ip=str(r.max), + version=version) + Zookeeper(handle=conf.handle, logstream=logstream).run_rpkid_now() + return http.HttpResponseRedirect(child.get_absolute_url()) + else: + form = forms.AddNetForm(child=child) + return render(request, 'app/app_form.html', + {'object': child, 'form': form, 'form_title': 'Add Prefix'}) + + +@handle_required +def child_add_asn(request, pk): + logstream = request.META['wsgi.errors'] + conf = request.session['handle'] + child = get_object_or_404(conf.children, pk=pk) + if request.method == 'POST': + form = forms.AddASNForm(request.POST, child=child) + if form.is_valid(): + asns = form.cleaned_data.get('asns') + r = resource_range_as.parse_str(asns) + child.asns.create(start_as=r.min, end_as=r.max) + Zookeeper(handle=conf.handle, logstream=logstream).run_rpkid_now() + return http.HttpResponseRedirect(child.get_absolute_url()) + else: + form = forms.AddASNForm(child=child) + return render(request, 'app/app_form.html', + {'object': child, 'form': form, 'form_title': 'Add ASN'}) + + +@handle_required +def child_detail(request, pk): + child = get_object_or_404(request.session['handle'].children, pk=pk) + return render(request, 'app/child_detail.html', {'object': child}) + + +@handle_required +def child_edit(request, pk): + """Edit the end validity date for a resource handle's child.""" + log = request.META['wsgi.errors'] + conf = request.session['handle'] + child = get_object_or_404(conf.children.all(), pk=pk) + form_class = forms.ChildForm(child) + if request.method == 'POST': + form = form_class(request.POST, request.FILES) + if form.is_valid(): + child.valid_until = sundial.datetime.from_datetime(form.cleaned_data.get('valid_until')) + child.save() + # remove AS & prefixes that are not selected in the form + models.ChildASN.objects.filter(child=child).exclude(pk__in=form.cleaned_data.get('as_ranges')).delete() + models.ChildNet.objects.filter(child=child).exclude(pk__in=form.cleaned_data.get('address_ranges')).delete() + Zookeeper(handle=conf.handle, logstream=log).run_rpkid_now() + return http.HttpResponseRedirect(child.get_absolute_url()) + else: + form = form_class(initial={ + 'as_ranges': child.asns.all(), + 'address_ranges': child.address_ranges.all()}) + + return render(request, 'app/app_form.html', { + 'object': child, + 'form': form, + 'form_title': 'Edit Child: ' + child.handle, + }) + + +@handle_required +def child_response(request, pk): + """ + Export the XML file containing the output of the configure_child + to send back to the client. + + """ + conf = request.session['handle'] + child = get_object_or_404(models.Child, issuer=conf, pk=pk) + z = Zookeeper(handle=conf.handle) + xml = z.generate_parental_response(child) + resp = serve_xml(str(xml), child.handle) + return resp + + +@handle_required +def child_delete(request, pk): + logstream = request.META['wsgi.errors'] + conf = request.session['handle'] + child = get_object_or_404(conf.children, pk=pk) + if request.method == 'POST': + form = forms.Empty(request.POST) + if form.is_valid(): + z = Zookeeper(handle=conf.handle, logstream=logstream) + z.delete_child(child.handle) + z.synchronize_ca() + return http.HttpResponseRedirect(reverse(dashboard)) + else: + form = forms.Empty() + return render(request, 'app/object_confirm_delete.html', { + 'object': child, + 'form': form, + 'parent_template': 'app/child_detail.html' + }) + + +@handle_required +def roa_detail(request, pk): + conf = request.session['handle'] + obj = get_object_or_404(conf.roas, pk=pk) + return render(request, 'app/roa_detail.html', {'object': obj}) + + +def get_covered_routes(rng, max_prefixlen, asn): + """Returns a list of routeview.models.RouteOrigin objects which would + change validation status if a ROA were created with the parameters to this + function. + + A "newstatus" attribute is monkey-patched on the RouteOrigin objects which + can be used in the template. "status" remains the current validation + status of the object. + + """ + + # find all routes that match or are completed covered by the proposed new roa + qs = RouteOrigin.objects.filter( + prefix_min__gte=rng.min, + prefix_max__lte=rng.max + ) + routes = [] + for route in qs: + status = route.status + # tweak the validation status due to the presence of the + # new ROA. Don't need to check the prefix bounds here + # because all the matches routes will be covered by this + # new ROA + if status == 'unknown': + # if the route was previously unknown (no covering + # ROAs), then: + # if the AS matches, it is valid, otherwise invalid + if (route.asn != 0 and route.asn == asn and route.prefixlen <= max_prefixlen): + route.newstatus = 'valid' + else: + route.newstatus = 'invalid' + routes.append(route) + elif status == 'invalid': + # if the route was previously invalid, but this new ROA + # matches the ASN, it is now valid + if route.asn != 0 and route.asn == asn and route.prefixlen <= max_prefixlen: + route.newstatus = 'valid' + routes.append(route) + + return routes + + +@handle_required +def roa_create(request): + """Present the user with a form to create a ROA. + + Doesn't use the generic create_object() form because we need to + create both the ROARequest and ROARequestPrefix objects. + + """ + + conf = request.session['handle'] + if request.method == 'POST': + form = forms.ROARequest(request.POST, request.FILES, conf=conf) + if form.is_valid(): + asn = form.cleaned_data.get('asn') + rng = form._as_resource_range() # FIXME calling "private" method + max_prefixlen = int(form.cleaned_data.get('max_prefixlen')) + + routes = get_covered_routes(rng, max_prefixlen, asn) + + prefix = str(rng) + form = forms.ROARequestConfirm(initial={'asn': asn, + 'prefix': prefix, + 'max_prefixlen': max_prefixlen}) + return render(request, 'app/roarequest_confirm_form.html', + {'form': form, + 'asn': asn, + 'prefix': prefix, + 'max_prefixlen': max_prefixlen, + 'routes': routes}) + else: + # pull initial values from query parameters + d = {} + for s in ('asn', 'prefix'): + if s in request.GET: + d[s] = request.GET[s] + form = forms.ROARequest(initial=d) + + return render(request, 'app/roarequest_form.html', {'form': form}) + + +class ROARequestFormSet(BaseFormSet): + """There is no way to pass arbitrary keyword arguments to the form + constructor, so we have to override BaseFormSet to allow it. + + """ + def __init__(self, *args, **kwargs): + self.conf = kwargs.pop('conf') + super(ROARequestFormSet, self).__init__(*args, **kwargs) + + def _construct_forms(self): + self.forms = [] + for i in xrange(self.total_form_count()): + self.forms.append(self._construct_form(i, conf=self.conf)) + + +def split_with_default(s): + xs = s.split(',') + if len(xs) == 1: + return xs[0], None + return xs + + +@handle_required +def roa_create_multi(request): + """version of roa_create that uses a formset to allow entry of multiple + roas on a single page. + + ROAs can be specified in the GET query string, as such: + + ?roa=prefix,asn + + Mulitple ROAs may be specified: + + ?roa=prefix,asn+roa=prefix2,asn2 + + If an IP range is specified, it will be automatically split into multiple + prefixes: + + ?roa=1.1.1.1-2.2.2.2,42 + + The ASN may optionally be omitted. + + """ + + conf = request.session['handle'] + if request.method == 'GET': + init = [] + for x in request.GET.getlist('roa'): + rng, asn = split_with_default(x) + rng = resource_range_ip.parse_str(rng) + if rng.can_be_prefix: + init.append({'asn': asn, 'prefix': str(rng)}) + else: + v = [] + rng.chop_into_prefixes(v) + init.extend([{'asn': asn, 'prefix': str(p)} for p in v]) + formset = formset_factory(forms.ROARequest, formset=ROARequestFormSet, + can_delete=True)(initial=init, conf=conf) + elif request.method == 'POST': + formset = formset_factory(forms.ROARequest, formset=ROARequestFormSet, + extra=0, can_delete=True)(request.POST, request.FILES, conf=conf) + if formset.is_valid(): + routes = [] + v = [] + # as of Django 1.4.5 we still can't use formset.cleaned_data + # because deleted forms are not excluded, which causes an + # AttributeError to be raised. + for form in formset: + if hasattr(form, 'cleaned_data') and form.cleaned_data: # exclude empty forms + asn = form.cleaned_data.get('asn') + rng = resource_range_ip.parse_str(form.cleaned_data.get('prefix')) + max_prefixlen = int(form.cleaned_data.get('max_prefixlen')) + # FIXME: This won't do the right thing in the event that a + # route is covered by multiple ROAs created in the form. + # You will see duplicate entries, each with a potentially + # different validation status. + routes.extend(get_covered_routes(rng, max_prefixlen, asn)) + v.append({'prefix': str(rng), 'max_prefixlen': max_prefixlen, + 'asn': asn}) + # if there were no rows, skip the confirmation step + if v: + formset = formset_factory(forms.ROARequestConfirm, extra=0)(initial=v) + return render(request, 'app/roarequest_confirm_multi_form.html', + {'routes': routes, 'formset': formset, 'roas': v}) + return render(request, 'app/roarequest_multi_form.html', + {'formset': formset}) + + +@handle_required +def roa_create_confirm(request): + """This function is called when the user confirms the creation of a ROA + request. It is responsible for updating the IRDB. + + """ + conf = request.session['handle'] + log = request.META['wsgi.errors'] + if request.method == 'POST': + form = forms.ROARequestConfirm(request.POST, request.FILES) + if form.is_valid(): + asn = form.cleaned_data.get('asn') + prefix = form.cleaned_data.get('prefix') + rng = resource_range_ip.parse_str(prefix) + max_prefixlen = form.cleaned_data.get('max_prefixlen') + # Always create ROA requests with a single prefix. + # https://trac.rpki.net/ticket/32 + roa = models.ROARequest.objects.create(issuer=conf, asn=asn) + v = 'IPv%d' % rng.version + roa.prefixes.create(version=v, prefix=str(rng.min), + prefixlen=rng.prefixlen(), + max_prefixlen=max_prefixlen) + Zookeeper(handle=conf.handle, logstream=log).run_rpkid_now() + return http.HttpResponseRedirect(reverse(dashboard)) + # What should happen when the submission form isn't valid? For now + # just fall through and redirect back to the ROA creation form + return http.HttpResponseRedirect(reverse(roa_create)) + + +@handle_required +def roa_create_multi_confirm(request): + """This function is called when the user confirms the creation of a ROA + request. It is responsible for updating the IRDB. + + """ + conf = request.session['handle'] + log = request.META['wsgi.errors'] + if request.method == 'POST': + formset = formset_factory(forms.ROARequestConfirm, extra=0)(request.POST, request.FILES) + if formset.is_valid(): + for cleaned_data in formset.cleaned_data: + asn = cleaned_data.get('asn') + prefix = cleaned_data.get('prefix') + rng = resource_range_ip.parse_str(prefix) + max_prefixlen = cleaned_data.get('max_prefixlen') + # Always create ROA requests with a single prefix. + # https://trac.rpki.net/ticket/32 + roa = models.ROARequest.objects.create(issuer=conf, asn=asn) + v = 'IPv%d' % rng.version + roa.prefixes.create(version=v, prefix=str(rng.min), + prefixlen=rng.prefixlen(), + max_prefixlen=max_prefixlen) + Zookeeper(handle=conf.handle, logstream=log).run_rpkid_now() + return redirect(dashboard) + # What should happen when the submission form isn't valid? For now + # just fall through and redirect back to the ROA creation form + return http.HttpResponseRedirect(reverse(roa_create_multi)) + + +@handle_required +def roa_delete(request, pk): + """Handles deletion of a single ROARequest object. + + Uses a form for double confirmation, displaying how the route + validation status may change as a result. + + """ + + conf = request.session['handle'] + roa = get_object_or_404(conf.roas, pk=pk) + if request.method == 'POST': + roa.delete() + Zookeeper(handle=conf.handle).run_rpkid_now() + return redirect(reverse(dashboard)) + + ### Process GET ### + + # note: assumes we only generate one prefix per ROA + roa_prefix = roa.prefixes.all()[0] + rng = roa_prefix.as_resource_range() + + routes = [] + for route in roa.routes: + # select all roas which cover this route + # excluding the current roa + # note: we can't identify the exact ROA here, because we only know what + # was requested to rpkid + roas = route.roas.exclude( + asid=roa.asn, + prefixes__prefix_min=rng.min, + prefixes__prefix_max=rng.max, + prefixes__max_length=roa_prefix.max_prefixlen + ) + + # subselect exact match + if route.asn != 0 and roas.filter(asid=route.asn, + prefixes__max_length__gte=route.prefixlen).exists(): + route.newstatus = 'valid' + elif roas.exists(): + route.newstatus = 'invalid' + else: + route.newstatus = 'unknown' + # we may want to ignore routes for which there is no status change, + # but the user may want to see that nothing has changed explicitly + routes.append(route) + + return render(request, 'app/roarequest_confirm_delete.html', + {'object': roa, 'routes': routes}) + + +@handle_required +def roa_clone(request, pk): + conf = request.session['handle'] + roa = get_object_or_404(conf.roas, pk=pk) + return redirect( + reverse(roa_create_multi) + "?roa=" + str(roa.prefixes.all()[0].as_roa_prefix()) + ) + + +@handle_required +def roa_import(request): + """Import CSV containing ROA declarations.""" + if request.method == 'POST': + form = forms.ImportCSVForm(request.POST, request.FILES) + if form.is_valid(): + import tempfile + tmp = tempfile.NamedTemporaryFile(suffix='.csv', prefix='roas', delete=False) + tmp.write(request.FILES['csv'].read()) + tmp.close() + z = Zookeeper(handle=request.session['handle']) + z.load_roa_requests(tmp.name) + z.run_rpkid_now() + os.unlink(tmp.name) + messages.success(request, 'Successfully imported ROAs.') + return redirect(dashboard) + else: + form = forms.ImportCSVForm() + return render(request, 'app/import_resource_form.html', { + 'form_title': 'Import ROAs from CSV', + 'form': form, + 'cancel_url': reverse(dashboard) + }) + + +@handle_required +def roa_export(request): + """Export CSV containing ROA declarations.""" + # FIXME: remove when Zookeeper can do this + f = cStringIO.StringIO() + csv_writer = csv.writer(f, delimiter=' ') + conf = request.session['handle'] + # each roa prefix gets a unique group so rpkid will issue separate roas + for group, roapfx in enumerate(ROARequestPrefix.objects.filter(roa_request__issuer=conf)): + csv_writer.writerow([str(roapfx.as_roa_prefix()), roapfx.roa_request.asn, '%s-%d' % (conf.handle, group)]) + resp = http.HttpResponse(f.getvalue(), mimetype='application/csv') + resp['Content-Disposition'] = 'attachment; filename=roas.csv' + return resp + + +class GhostbusterDetailView(DetailView): + def get_queryset(self): + return self.request.session['handle'].ghostbusters + + +@handle_required +def ghostbuster_delete(request, pk): + conf = request.session['handle'] + logstream = request.META['wsgi.errors'] + obj = get_object_or_404(conf.ghostbusters, pk=pk) + if request.method == 'POST': + form = forms.Empty(request.POST, request.FILES) + if form.is_valid(): + obj.delete() + Zookeeper(handle=conf.handle, logstream=logstream).run_rpkid_now() + return http.HttpResponseRedirect(reverse(dashboard)) + else: + form = forms.Empty(request.POST, request.FILES) + return render(request, 'app/object_confirm_delete.html', { + 'object': obj, + 'form': form, + 'parent_template': 'app/ghostbusterrequest_detail.html' + }) + + +@handle_required +def ghostbuster_create(request): + conf = request.session['handle'] + logstream = request.META['wsgi.errors'] + if request.method == 'POST': + form = forms.GhostbusterRequestForm(request.POST, request.FILES, + conf=conf) + if form.is_valid(): + obj = form.save(commit=False) + obj.vcard = glue.ghostbuster_to_vcard(obj) + obj.save() + Zookeeper(handle=conf.handle, logstream=logstream).run_rpkid_now() + return http.HttpResponseRedirect(reverse(dashboard)) + else: + form = forms.GhostbusterRequestForm(conf=conf) + return render(request, 'app/app_form.html', + {'form': form, 'form_title': 'New Ghostbuster Request'}) + + +@handle_required +def ghostbuster_edit(request, pk): + conf = request.session['handle'] + obj = get_object_or_404(conf.ghostbusters, pk=pk) + logstream = request.META['wsgi.errors'] + if request.method == 'POST': + form = forms.GhostbusterRequestForm(request.POST, request.FILES, + conf=conf, instance=obj) + if form.is_valid(): + obj = form.save(commit=False) + obj.vcard = glue.ghostbuster_to_vcard(obj) + obj.save() + Zookeeper(handle=conf.handle, logstream=logstream).run_rpkid_now() + return http.HttpResponseRedirect(reverse(dashboard)) + else: + form = forms.GhostbusterRequestForm(conf=conf, instance=obj) + return render(request, 'app/app_form.html', + {'form': form, 'form_title': 'Edit Ghostbuster Request'}) + + +@handle_required +def refresh(request): + """ + Query rpkid, update the db, and redirect back to the dashboard. + + """ + glue.list_received_resources(request.META['wsgi.errors'], + request.session['handle']) + return http.HttpResponseRedirect(reverse(dashboard)) + + +@handle_required +def route_view(request): + """ + Display a list of global routing table entries which match resources + listed in received certificates. + + """ + conf = request.session['handle'] + count = request.GET.get('count', 25) + page = request.GET.get('page', 1) + + paginator = Paginator(conf.routes, count) + try: + routes = paginator.page(page) + except InvalidPage: + # page was empty, or page number was invalid + routes = [] + ts = dict((attr['name'], attr['ts']) for attr in models.Timestamp.objects.values()) + return render(request, 'app/routes_view.html', + {'routes': routes, 'timestamp': ts}) + + +def route_detail(request, pk): + """Show a list of ROAs that match a given IPv4 route.""" + route = get_object_or_404(models.RouteOrigin, pk=pk) + # when running rootd, viewing the 0.0.0.0/0 route will cause a fetch of all + # roas, so we paginate here, even though in the general case the number of + # objects will be small enough to fit a single page + count = request.GET.get('count', 25) + page = request.GET.get('page', 1) + paginator = Paginator(route.roa_prefixes.all(), count) + return render(request, 'app/route_detail.html', { + 'object': route, + 'roa_prefixes': paginator.page(page), + }) + + +def route_suggest(request): + """Handles POSTs from the route view and redirects to the ROA creation + page based on selected route objects. The form should contain elements of + the form "pk-NUM" where NUM is the RouteOrigin object id. + + """ + if request.method == 'POST': + routes = [] + for pk in request.POST.iterkeys(): + logger.debug(pk) + if pk.startswith("pk-"): + n = int(pk[3:]) + routes.append(n) + qs = RouteOrigin.objects.filter(pk__in=routes) + s = [] + for r in qs: + s.append('roa=%s/%d,%d' % (str(r.prefix_min), r.prefixlen, r.asn)) + p = '&'.join(s) + return redirect(reverse(roa_create_multi) + '?' + p) + + +@handle_required +def repository_detail(request, pk): + conf = request.session['handle'] + return render(request, + 'app/repository_detail.html', + {'object': get_object_or_404(conf.repositories, pk=pk)}) + + +@handle_required +def repository_delete(request, pk): + log = request.META['wsgi.errors'] + conf = request.session['handle'] + # Ensure the repository being deleted belongs to the current user. + obj = get_object_or_404(models.Repository, issuer=conf, pk=pk) + if request.method == 'POST': + form = forms.Empty(request.POST, request.FILES) + if form.is_valid(): + z = Zookeeper(handle=conf.handle, logstream=log) + z.delete_repository(obj.handle) + z.synchronize_ca() + return http.HttpResponseRedirect(reverse(dashboard)) + else: + form = forms.Empty() + return render(request, 'app/object_confirm_delete.html', { + 'object': obj, + 'form': form, + 'parent_template': + 'app/repository_detail.html', + }) + + +@handle_required +def repository_import(request): + """Import XML response file from repository operator.""" + return generic_import(request, + models.Repository.objects, + Zookeeper.configure_repository, + form_class=forms.ImportRepositoryForm, + post_import_redirect=reverse(dashboard)) + + +@superuser_required +def client_list(request): + """display a list of all repository client (irdb.models.Client)""" + + return render(request, 'app/client_list.html', { + 'object_list': models.Client.objects.all() + }) + + +@superuser_required +def client_detail(request, pk): + return render(request, 'app/client_detail.html', + {'object': get_object_or_404(models.Client, pk=pk)}) + + +@superuser_required +def client_delete(request, pk): + log = request.META['wsgi.errors'] + obj = get_object_or_404(models.Client, pk=pk) + if request.method == 'POST': + form = forms.Empty(request.POST, request.FILES) + if form.is_valid(): + z = Zookeeper(logstream=log) + z.delete_publication_client(obj.handle) + z.synchronize_pubd() + return http.HttpResponseRedirect(reverse(dashboard)) + else: + form = forms.Empty() + return render(request, 'app/object_confirm_delete.html', { + 'object': obj, + 'form': form, + 'parent_template': 'app/client_detail.html' + }) + + +@superuser_required +def client_import(request): + return generic_import(request, models.Client.objects, + Zookeeper.configure_publication_client, + form_class=forms.ImportClientForm, + post_import_redirect=reverse(dashboard)) + + +@superuser_required +def client_export(request, pk): + """Return the XML file resulting from a configure_publication_client + request. + + """ + client = get_object_or_404(models.Client, pk=pk) + z = Zookeeper() + xml = z.generate_repository_response(client) + return serve_xml(str(xml), '%s.repo' % z.handle) + + +### Routines for managing resource handles serviced by this server + +@superuser_required +def resource_holder_list(request): + """Display a list of all the RPKI handles managed by this server.""" + return render(request, 'app/resource_holder_list.html', { + 'object_list': models.Conf.objects.all() + }) + + +@superuser_required +def resource_holder_edit(request, pk): + """Display a list of all the RPKI handles managed by this server.""" + conf = get_object_or_404(models.Conf, pk=pk) + if request.method == 'POST': + form = forms.ResourceHolderForm(request.POST, request.FILES) + if form.is_valid(): + models.ConfACL.objects.filter(conf=conf).delete() + for user in form.cleaned_data.get('users'): + models.ConfACL.objects.create(user=user, conf=conf) + return redirect(resource_holder_list) + else: + users = [acl.user for acl in models.ConfACL.objects.filter(conf=conf).all()] + form = forms.ResourceHolderForm(initial={ + 'users': users + }) + return render(request, 'app/app_form.html', { + 'form_title': "Edit Resource Holder: " + conf.handle, + 'form': form, + 'cancel_url': reverse(resource_holder_list) + }) + + +@superuser_required +def resource_holder_delete(request, pk): + conf = get_object_or_404(models.Conf, pk=pk) + log = request.META['wsgi.errors'] + if request.method == 'POST': + form = forms.Empty(request.POST) + if form.is_valid(): + z = Zookeeper(handle=conf.handle, logstream=log) + z.delete_self() + z.synchronize_deleted_ca() + return redirect(resource_holder_list) + else: + form = forms.Empty() + return render(request, 'app/app_confirm_delete.html', { + 'form_title': 'Delete Resource Holder: ' + conf.handle, + 'form': form, + 'cancel_url': reverse(resource_holder_list) + }) + + +@superuser_required +def resource_holder_create(request): + log = request.META['wsgi.errors'] + if request.method == 'POST': + form = forms.ResourceHolderCreateForm(request.POST, request.FILES) + if form.is_valid(): + handle = form.cleaned_data.get('handle') + parent = form.cleaned_data.get('parent') + + zk_child = Zookeeper(handle=handle, logstream=log) + identity_xml = zk_child.initialize_resource_bpki() + if parent: + # FIXME etree_wrapper should allow us to deal with file objects + t = NamedTemporaryFile(delete=False) + t.close() + + identity_xml.save(t.name) + zk_parent = Zookeeper(handle=parent.handle, logstream=log) + parent_response, _ = zk_parent.configure_child(t.name) + parent_response.save(t.name) + zk_parent.synchronize_ca() + repo_req, _ = zk_child.configure_parent(t.name) + repo_req.save(t.name) + repo_resp, _ = zk_parent.configure_publication_client(t.name) + repo_resp.save(t.name) + zk_parent.synchronize_pubd() + zk_child.configure_repository(t.name) + os.remove(t.name) + zk_child.synchronize_ca() + return redirect(resource_holder_list) + else: + form = forms.ResourceHolderCreateForm() + return render(request, 'app/app_form.html', { + 'form': form, + 'form_title': 'Create Resource Holder', + 'cancel_url': reverse(resource_holder_list) + }) + + +### views for managing user logins to the web interface + +@superuser_required +def user_create(request): + if request.method == 'POST': + form = forms.UserCreateForm(request.POST, request.FILES) + if form.is_valid(): + username = form.cleaned_data.get('username') + pw = form.cleaned_data.get('password') + email = form.cleaned_data.get('email') + user = User.objects.create_user(username, email, pw) + for conf in form.cleaned_data.get('resource_holders'): + models.ConfACL.objects.create(user=user, conf=conf) + return redirect(user_list) + else: + form = forms.UserCreateForm() + + return render(request, 'app/app_form.html', { + 'form': form, + 'form_title': 'Create User', + 'cancel_url': reverse(user_list), + }) + + +@superuser_required +def user_list(request): + """Display a list of all the RPKI handles managed by this server.""" + return render(request, 'app/user_list.html', { + 'object_list': User.objects.all() + }) + + +@superuser_required +def user_delete(request, pk): + user = get_object_or_404(User, pk=pk) + if request.method == 'POST': + form = forms.Empty(request.POST, request.FILES) + if form.is_valid(): + user.delete() + return redirect(user_list) + else: + form = forms.Empty() + return render(request, 'app/app_confirm_delete.html', { + 'form_title': 'Delete User: ' + user.username, + 'form': form, + 'cancel_url': reverse(user_list) + }) + + +@superuser_required +def user_edit(request, pk): + user = get_object_or_404(User, pk=pk) + if request.method == 'POST': + form = forms.UserEditForm(request.POST) + if form.is_valid(): + pw = form.cleaned_data.get('pw') + if pw: + user.set_password(pw) + user.email = form.cleaned_data.get('email') + user.save() + models.ConfACL.objects.filter(user=user).delete() + handles = form.cleaned_data.get('resource_holders') + for conf in handles: + models.ConfACL.objects.create(user=user, conf=conf) + return redirect(user_list) + else: + form = forms.UserEditForm(initial={ + 'email': user.email, + 'resource_holders': models.Conf.objects.filter(confacl__user=user).all() + }) + return render(request, 'app/app_form.html', { + 'form': form, + 'form_title': 'Edit User: ' + user.username, + 'cancel_url': reverse(user_list) + }) + + +class AlertListView(ListView): + # this nonsense is required to decorate CBVs + @method_decorator(handle_required) + def dispatch(self, request, *args, **kwargs): + return super(AlertListView, self).dispatch(request, *args, **kwargs) + + def get_queryset(self, **kwargs): + conf = self.request.session['handle'] + return conf.alerts.all() + + +class AlertDetailView(DetailView): + # this nonsense is required to decorate CBVs + @method_decorator(handle_required) + def dispatch(self, request, *args, **kwargs): + return super(AlertDetailView, self).dispatch(request, *args, **kwargs) + + def get_queryset(self, **kwargs): + conf = self.request.session['handle'] + return conf.alerts.all() + + def get_object(self, **kwargs): + obj = super(AlertDetailView, self).get_object(**kwargs) + # mark alert as read by the user + obj.seen = True + obj.save() + return obj + + +class AlertDeleteView(DeleteView): + success_url = reverse_lazy('alert-list') + + # this nonsense is required to decorate CBVs + @method_decorator(handle_required) + def dispatch(self, request, *args, **kwargs): + return super(AlertDeleteView, self).dispatch(request, *args, **kwargs) + + def get_queryset(self, **kwargs): + conf = self.request.session['handle'] + return conf.alerts.all() + + +@handle_required +def alert_clear_all(request): + """Clear all alerts associated with the current resource holder.""" + if request.method == 'POST': + form = forms.Empty(request.POST, request.FILES) + if form.is_valid(): + # delete alerts + request.session['handle'].clear_alerts() + return redirect('alert-list') + else: + form = forms.Empty() + return render(request, 'app/alert_confirm_clear.html', {'form': form}) diff --git a/rpki/gui/cacheview/__init__.py b/rpki/gui/cacheview/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/rpki/gui/cacheview/forms.py b/rpki/gui/cacheview/forms.py new file mode 100644 index 00000000..28b8ff24 --- /dev/null +++ b/rpki/gui/cacheview/forms.py @@ -0,0 +1,51 @@ +# Copyright (C) 2011 SPARTA, Inc. dba Cobham Analytic Solutions +# Copyright (C) 2013 SPARTA, Inc. a Parsons Company +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +__version__ = '$Id$' + +from django import forms + +from rpki.gui.cacheview.misc import parse_ipaddr +from rpki.exceptions import BadIPResource +from rpki.resource_set import resource_range_as + + +class SearchForm(forms.Form): + asn = forms.CharField(required=False, help_text='AS or range', label='AS') + addr = forms.CharField(required=False, max_length=40, help_text='range/CIDR', label='IP Address') + + def clean(self): + asn = self.cleaned_data.get('asn') + addr = self.cleaned_data.get('addr') + if (asn and addr) or ((not asn) and (not addr)): + raise forms.ValidationError, 'Please specify either an AS or IP range, not both' + + if asn: + try: + resource_range_as.parse_str(asn) + except ValueError: + raise forms.ValidationError, 'invalid AS range' + + if addr: + #try: + parse_ipaddr(addr) + #except BadIPResource: + # raise forms.ValidationError, 'invalid IP address range/prefix' + + return self.cleaned_data + + +class SearchForm2(forms.Form): + resource = forms.CharField(required=True) diff --git a/rpki/gui/cacheview/misc.py b/rpki/gui/cacheview/misc.py new file mode 100644 index 00000000..9a69645c --- /dev/null +++ b/rpki/gui/cacheview/misc.py @@ -0,0 +1,31 @@ +# Copyright (C) 2011 SPARTA, Inc. dba Cobham Analytic Solutions +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +from rpki.resource_set import resource_range_ipv4, resource_range_ipv6 +from rpki.exceptions import BadIPResource + +def parse_ipaddr(s): + # resource_set functions only accept str + if isinstance(s, unicode): + s = s.encode() + s = s.strip() + r = resource_range_ipv4.parse_str(s) + try: + r = resource_range_ipv4.parse_str(s) + return 4, r + except BadIPResource: + r = resource_range_ipv6.parse_str(s) + return 6, r + +# vim:sw=4 ts=8 expandtab diff --git a/rpki/gui/cacheview/models.py b/rpki/gui/cacheview/models.py new file mode 100644 index 00000000..c3ee8421 --- /dev/null +++ b/rpki/gui/cacheview/models.py @@ -0,0 +1,237 @@ +# Copyright (C) 2011 SPARTA, Inc. dba Cobham Analytic Solutions +# Copyright (C) 2012 SPARTA, Inc. a Parsons Company +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +__version__ = '$Id$' + +from datetime import datetime +import time + +from django.db import models +from django.core.urlresolvers import reverse + +import rpki.resource_set +import rpki.gui.models + + +class TelephoneField(models.CharField): + def __init__(self, *args, **kwargs): + kwargs['max_length'] = 255 + models.CharField.__init__(self, *args, **kwargs) + + +class AddressRange(rpki.gui.models.PrefixV4): + @models.permalink + def get_absolute_url(self): + return ('rpki.gui.cacheview.views.addressrange_detail', [str(self.pk)]) + + +class AddressRangeV6(rpki.gui.models.PrefixV6): + @models.permalink + def get_absolute_url(self): + return ('rpki.gui.cacheview.views.addressrange_detail_v6', + [str(self.pk)]) + + +class ASRange(rpki.gui.models.ASN): + @models.permalink + def get_absolute_url(self): + return ('rpki.gui.cacheview.views.asrange_detail', [str(self.pk)]) + +kinds = list(enumerate(('good', 'warn', 'bad'))) +kinds_dict = dict((v, k) for k, v in kinds) + + +class ValidationLabel(models.Model): + """ + Represents a specific error condition defined in the rcynic XML + output file. + """ + label = models.CharField(max_length=79, db_index=True, unique=True) + status = models.CharField(max_length=255) + kind = models.PositiveSmallIntegerField(choices=kinds) + + def __unicode__(self): + return self.label + + +class RepositoryObject(models.Model): + """ + Represents a globally unique RPKI repository object, specified by its URI. + """ + uri = models.URLField(unique=True, db_index=True) + +generations = list(enumerate(('current', 'backup'))) +generations_dict = dict((val, key) for (key, val) in generations) + + +class ValidationStatus(models.Model): + timestamp = models.DateTimeField() + generation = models.PositiveSmallIntegerField(choices=generations, null=True) + status = models.ForeignKey(ValidationLabel) + repo = models.ForeignKey(RepositoryObject, related_name='statuses') + + +class SignedObject(models.Model): + """ + Abstract class to hold common metadata for all signed objects. + The signing certificate is ommitted here in order to give a proper + value for the 'related_name' attribute. + """ + repo = models.ForeignKey(RepositoryObject, related_name='cert', unique=True) + + # on-disk file modification time + mtime = models.PositiveIntegerField(default=0) + + # SubjectName + name = models.CharField(max_length=255) + + # value from the SKI extension + keyid = models.CharField(max_length=60, db_index=True) + + # validity period from EE cert which signed object + not_before = models.DateTimeField() + not_after = models.DateTimeField() + + def mtime_as_datetime(self): + """ + convert the local timestamp to UTC and convert to a datetime object + """ + return datetime.utcfromtimestamp(self.mtime + time.timezone) + + def status_id(self): + """ + Returns a HTML class selector for the current object based on its validation status. + The selector is chosen based on the current generation only. If there is any bad status, + return bad, else if there are any warn status, return warn, else return good. + """ + for x in reversed(kinds): + if self.repo.statuses.filter(generation=generations_dict['current'], status__kind=x[0]): + return x[1] + return None # should not happen + + def __unicode__(self): + return u'%s' % self.name + + +class Cert(SignedObject): + """ + Object representing a resource certificate. + """ + addresses = models.ManyToManyField(AddressRange, related_name='certs') + addresses_v6 = models.ManyToManyField(AddressRangeV6, related_name='certs') + asns = models.ManyToManyField(ASRange, related_name='certs') + issuer = models.ForeignKey('self', related_name='children', null=True) + sia = models.CharField(max_length=255) + + def get_absolute_url(self): + return reverse('cert-detail', args=[str(self.pk)]) + + def get_cert_chain(self): + """Return a list containing the complete certificate chain for this + certificate.""" + cert = self + x = [cert] + while cert != cert.issuer: + cert = cert.issuer + x.append(cert) + x.reverse() + return x + cert_chain = property(get_cert_chain) + + +class ROAPrefix(models.Model): + "Abstract base class for ROA mixin." + + max_length = models.PositiveSmallIntegerField() + + class Meta: + abstract = True + + def as_roa_prefix(self): + "Return value as a rpki.resource_set.roa_prefix_ip object." + rng = self.as_resource_range() + return self.roa_cls(rng.min, rng.prefixlen(), self.max_length) + + def __unicode__(self): + p = self.as_resource_range() + if p.prefixlen() == self.max_length: + return str(p) + return '%s-%s' % (str(p), self.max_length) + + +# ROAPrefix is declared first, so subclass picks up __unicode__ from it. +class ROAPrefixV4(ROAPrefix, rpki.gui.models.PrefixV4): + "One v4 prefix in a ROA." + + roa_cls = rpki.resource_set.roa_prefix_ipv4 + + @property + def routes(self): + """return all routes covered by this roa prefix""" + return RouteOrigin.objects.filter(prefix_min__gte=self.prefix_min, + prefix_max__lte=self.prefix_max) + + class Meta: + ordering = ('prefix_min',) + + +# ROAPrefix is declared first, so subclass picks up __unicode__ from it. +class ROAPrefixV6(ROAPrefix, rpki.gui.models.PrefixV6): + "One v6 prefix in a ROA." + + roa_cls = rpki.resource_set.roa_prefix_ipv6 + + class Meta: + ordering = ('prefix_min',) + + +class ROA(SignedObject): + asid = models.PositiveIntegerField() + prefixes = models.ManyToManyField(ROAPrefixV4, related_name='roas') + prefixes_v6 = models.ManyToManyField(ROAPrefixV6, related_name='roas') + issuer = models.ForeignKey('Cert', related_name='roas') + + def get_absolute_url(self): + return reverse('roa-detail', args=[str(self.pk)]) + + class Meta: + ordering = ('asid',) + + def __unicode__(self): + return u'ROA for AS%d' % self.asid + + +class Ghostbuster(SignedObject): + full_name = models.CharField(max_length=40) + email_address = models.EmailField(blank=True, null=True) + organization = models.CharField(blank=True, null=True, max_length=255) + telephone = TelephoneField(blank=True, null=True) + issuer = models.ForeignKey('Cert', related_name='ghostbusters') + + def get_absolute_url(self): + # note that ghostbuster-detail is different from gbr-detail! sigh + return reverse('ghostbuster-detail', args=[str(self.pk)]) + + def __unicode__(self): + if self.full_name: + return self.full_name + if self.organization: + return self.organization + if self.email_address: + return self.email_address + return self.telephone + + +from rpki.gui.routeview.models import RouteOrigin diff --git a/rpki/gui/cacheview/templates/cacheview/addressrange_detail.html b/rpki/gui/cacheview/templates/cacheview/addressrange_detail.html new file mode 100644 index 00000000..76edc1ba --- /dev/null +++ b/rpki/gui/cacheview/templates/cacheview/addressrange_detail.html @@ -0,0 +1,18 @@ +{% extends "cacheview/cacheview_base.html" %} + +{% block content %} +

      {% block title %}IP Range Detail{% endblock %}

      + +

      +IP Range: {{ object }} +

      + +

      Covered by the following resource certs:

      + +
        +{% for cert in object.certs.all %} +
      • {{ cert }}
      • +{% endfor %} +
      + +{% endblock %} diff --git a/rpki/gui/cacheview/templates/cacheview/cacheview_base.html b/rpki/gui/cacheview/templates/cacheview/cacheview_base.html new file mode 100644 index 00000000..ec71d740 --- /dev/null +++ b/rpki/gui/cacheview/templates/cacheview/cacheview_base.html @@ -0,0 +1,10 @@ +{% extends "base.html" %} +{% load url from future %} + +{% block sidebar %} +
      + {% csrf_token %} + + +
      +{% endblock %} diff --git a/rpki/gui/cacheview/templates/cacheview/cert_detail.html b/rpki/gui/cacheview/templates/cacheview/cert_detail.html new file mode 100644 index 00000000..256e7780 --- /dev/null +++ b/rpki/gui/cacheview/templates/cacheview/cert_detail.html @@ -0,0 +1,105 @@ +{% extends "cacheview/signedobject_detail.html" %} + +{% block title %} +Resource Certificate Detail +{% endblock %} + +{% block detail %} + +

      RFC3779 Resources

      + + + + + + + + + + + +
      AS RangesIP Ranges
      +
        + {% for asn in object.asns.all %} +
      • {{ asn }}
      • + {% endfor %} +
      +
      +
        + {% for rng in object.addresses.all %} +
      • {{ rng }}
      • + {% endfor %} +
      +
      + +
      +

      Issued Objects

      +
        + +{% if object.ghostbusters.all %} +
      • +

        Ghostbusters

        + + + + + + + +{% for g in object.ghostbusters.all %} + + + + + +{% endfor %} + +
        NameExpires
        {{ g }}{{ g.not_after }}
        +{% endif %} + +{% if object.roas.all %} +
      • +

        ROAs

        + + + + + + {% for roa in object.roas.all %} + {% for pfx in roa.prefixes.all %} + + + + + + + {% endfor %} + {% endfor %} + +
        #PrefixASExpires
        #{{ pfx }}{{ roa.asid }}{{ roa.not_after }}
        +{% endif %} + +{% if object.children.all %} +
      • +

        Children

        + + + + + + + {% for child in object.children.all %} + + + + + {% endfor %} + +
        NameExpires
        {{ child.name }}{{ child.not_after }}
        +{% endif %} + +
      + +
      + +{% endblock %} diff --git a/rpki/gui/cacheview/templates/cacheview/ghostbuster_detail.html b/rpki/gui/cacheview/templates/cacheview/ghostbuster_detail.html new file mode 100644 index 00000000..4215f757 --- /dev/null +++ b/rpki/gui/cacheview/templates/cacheview/ghostbuster_detail.html @@ -0,0 +1,13 @@ +{% extends "cacheview/signedobject_detail.html" %} + +{% block title %}Ghostbuster Detail{% endblock %} + +{% block detail %} +

      + + + + + +
      Full Name{{ object.full_name }}
      Organization{{ object.organization }}
      Email{{ object.email_address }}
      Telephone{{ object.telephone }}
      +{% endblock %} diff --git a/rpki/gui/cacheview/templates/cacheview/global_summary.html b/rpki/gui/cacheview/templates/cacheview/global_summary.html new file mode 100644 index 00000000..0dbd0ffc --- /dev/null +++ b/rpki/gui/cacheview/templates/cacheview/global_summary.html @@ -0,0 +1,26 @@ +{% extends "cacheview/cacheview_base.html" %} + +{% block content %} +

      + + + + + + + + + + + {% for r in roots %} + + + + + + {% endfor %} + +
      NameExpiresURI
      {{ r.name }}{{ r.not_after }}{{ r.repo.uri }}
      +{% endblock content %} diff --git a/rpki/gui/cacheview/templates/cacheview/query_result.html b/rpki/gui/cacheview/templates/cacheview/query_result.html new file mode 100644 index 00000000..0694c531 --- /dev/null +++ b/rpki/gui/cacheview/templates/cacheview/query_result.html @@ -0,0 +1,21 @@ +{% extends "cacheview/cacheview_base.html" %} + +{% block content %} + +

      {% block title %}Query Results{% endblock %}

      + + + + {% for object in object_list %} + + + + + + + {% endfor %} +
      PrefixASValidUntil
      {{ object.0 }}{{ object.1.asid }}{{ object.1.ok }}{{ object.1.not_after }}
      + +

      new query

      + +{% endblock %} diff --git a/rpki/gui/cacheview/templates/cacheview/roa_detail.html b/rpki/gui/cacheview/templates/cacheview/roa_detail.html new file mode 100644 index 00000000..39cc547b --- /dev/null +++ b/rpki/gui/cacheview/templates/cacheview/roa_detail.html @@ -0,0 +1,18 @@ +{% extends "cacheview/signedobject_detail.html" %} + +{% block title %}ROA Detail{% endblock %} + +{% block detail %} +

      + + +
      AS{{ object.asid }}
      + +

      Prefixes

      + +
        +{% for pfx in object.prefixes.all %} +
      • {{ pfx }} +{% endfor %} +
      +{% endblock %} diff --git a/rpki/gui/cacheview/templates/cacheview/search_form.html b/rpki/gui/cacheview/templates/cacheview/search_form.html new file mode 100644 index 00000000..1141615d --- /dev/null +++ b/rpki/gui/cacheview/templates/cacheview/search_form.html @@ -0,0 +1,17 @@ +{% extends "cacheview/cacheview_base.html" %} + +{% block title %} +{{ search_type }} Search +{% endblock %} + +{% block content %} + +

      {{search_type}} Search

      + +
      + {% csrf_token %} + {{ form.as_p }} + +
      + +{% endblock %} diff --git a/rpki/gui/cacheview/templates/cacheview/search_result.html b/rpki/gui/cacheview/templates/cacheview/search_result.html new file mode 100644 index 00000000..7cbf852e --- /dev/null +++ b/rpki/gui/cacheview/templates/cacheview/search_result.html @@ -0,0 +1,42 @@ +{% extends "cacheview/cacheview_base.html" %} + +{% block content %} + + + +

      Matching Resource Certificates

      +{% if certs %} +
        +{% for cert in certs %} +
      • {{ cert }} +{% endfor %} +
      +{% else %} +

      none

      +{% endif %} + +

      Matching ROAs

      +{% if roas %} + + + + + + + +{% for roa in roas %} + + + + + +{% endfor %} + +
      #PrefixAS
      #{{ roa.prefixes.all.0 }}{{ roa.asid }}
      +{% else %} +

      none

      +{% endif %} + +{% endblock %} diff --git a/rpki/gui/cacheview/templates/cacheview/signedobject_detail.html b/rpki/gui/cacheview/templates/cacheview/signedobject_detail.html new file mode 100644 index 00000000..22ae3d27 --- /dev/null +++ b/rpki/gui/cacheview/templates/cacheview/signedobject_detail.html @@ -0,0 +1,58 @@ +{% extends "cacheview/cacheview_base.html" %} + +{% block content %} + + +

      Cert Info

      + + + + {% if object.sia %} + + {% endif %} + + +
      Subject Name{{ object.name }}
      SKI{{ object.keyid }}
      SIA{{ object.sia }}
      Not Before{{ object.not_before }}
      Not After{{ object.not_after }}
      + +

      Metadata

      + + + + +
      URI{{ object.repo.uri }}
      Last Modified{{ object.mtime_as_datetime|date:"DATETIME_FORMAT" }}
      + +

      Validation Status

      + + + + + + {% for status in object.repo.statuses.all %} + + {% endfor %} + +
      TimestampGenerationStatus
      {{ status.timestamp }}{{ status.get_generation_display }}{{ status.status.status }}
      + +

      X.509 Certificate Chain

      + + + + + + + +{% for cert in chain %} + + + + +{% endfor %} + + +
      DepthName
      {{ cert.0 }}{{ cert.1.name }}
      + +{% block detail %}{% endblock %} + +{% endblock %} diff --git a/rpki/gui/cacheview/tests.py b/rpki/gui/cacheview/tests.py new file mode 100644 index 00000000..2247054b --- /dev/null +++ b/rpki/gui/cacheview/tests.py @@ -0,0 +1,23 @@ +""" +This file demonstrates two different styles of tests (one doctest and one +unittest). These will both pass when you run "manage.py test". + +Replace these with more appropriate tests for your application. +""" + +from django.test import TestCase + +class SimpleTest(TestCase): + def test_basic_addition(self): + """ + Tests that 1 + 1 always equals 2. + """ + self.failUnlessEqual(1 + 1, 2) + +__test__ = {"doctest": """ +Another way to test that 1 + 1 is equal to 2. + +>>> 1 + 1 == 2 +True +"""} + diff --git a/rpki/gui/cacheview/urls.py b/rpki/gui/cacheview/urls.py new file mode 100644 index 00000000..cc03a587 --- /dev/null +++ b/rpki/gui/cacheview/urls.py @@ -0,0 +1,32 @@ +# Copyright (C) 2011 SPARTA, Inc. dba Cobham Analytic Solutions +# Copyright (C) 2013 SPARTA, Inc. a Parsons Company +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +__version__ = '$Id$' + +from django.conf.urls import patterns, url +from rpki.gui.cacheview.views import (CertDetailView, RoaDetailView, + GhostbusterDetailView) + +urlpatterns = patterns('', + url(r'^search$', 'rpki.gui.cacheview.views.search_view', + name='res-search'), + url(r'^cert/(?P[^/]+)$', CertDetailView.as_view(), name='cert-detail'), + url(r'^gbr/(?P[^/]+)$', GhostbusterDetailView.as_view(), + name='ghostbuster-detail'), + url(r'^roa/(?P[^/]+)$', RoaDetailView.as_view(), name='roa-detail'), + (r'^$', 'rpki.gui.cacheview.views.global_summary'), +) + +# vim:sw=4 ts=8 expandtab diff --git a/rpki/gui/cacheview/util.py b/rpki/gui/cacheview/util.py new file mode 100644 index 00000000..0d3d7ae3 --- /dev/null +++ b/rpki/gui/cacheview/util.py @@ -0,0 +1,432 @@ +# Copyright (C) 2011 SPARTA, Inc. dba Cobham +# Copyright (C) 2012, 2013 SPARTA, Inc. a Parsons Company +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +__version__ = '$Id$' +__all__ = ('import_rcynic_xml') + +default_logfile = '/var/rcynic/data/rcynic.xml' +default_root = '/var/rcynic/data' +object_accepted = None # set by import_rcynic_xml() + +import time +import vobject +import logging +import os +import stat +from socket import getfqdn +from cStringIO import StringIO + +from django.db import transaction +import django.db.models + +import rpki +import rpki.gui.app.timestamp +from rpki.gui.app.models import Conf, Alert +from rpki.gui.cacheview import models +from rpki.rcynic import rcynic_xml_iterator, label_iterator +from rpki.sundial import datetime +from rpki.irdb.zookeeper import Zookeeper + +logger = logging.getLogger(__name__) + + +def rcynic_cert(cert, obj): + obj.sia = cert.sia_directory_uri + + # object must be saved for the related manager methods below to work + obj.save() + + # for the root cert, we can't set inst.issuer = inst until + # after inst.save() has been called. + if obj.issuer is None: + obj.issuer = obj + obj.save() + + # resources can change when a cert is updated + obj.asns.clear() + obj.addresses.clear() + + if cert.resources.asn.inherit: + # FIXME: what happens when the parent's resources change and the child + # cert is not reissued? + obj.asns.add(*obj.issuer.asns.all()) + else: + for asr in cert.resources.asn: + logger.debug('processing %s' % asr) + + attrs = {'min': asr.min, 'max': asr.max} + q = models.ASRange.objects.filter(**attrs) + if not q: + obj.asns.create(**attrs) + else: + obj.asns.add(q[0]) + + # obj.issuer is None the first time we process the root cert in the + # hierarchy, so we need to guard against dereference + for cls, addr_obj, addrset, parentset in ( + models.AddressRange, obj.addresses, cert.resources.v4, + obj.issuer.addresses.all() if obj.issuer else [] + ), ( + models.AddressRangeV6, obj.addresses_v6, cert.resources.v6, + obj.issuer.addresses_v6.all() if obj.issuer else [] + ): + if addrset.inherit: + addr_obj.add(*parentset) + else: + for rng in addrset: + logger.debug('processing %s' % rng) + + attrs = {'prefix_min': rng.min, 'prefix_max': rng.max} + q = cls.objects.filter(**attrs) + if not q: + addr_obj.create(**attrs) + else: + addr_obj.add(q[0]) + + +def rcynic_roa(roa, obj): + obj.asid = roa.asID + # object must be saved for the related manager methods below to work + obj.save() + obj.prefixes.clear() + obj.prefixes_v6.clear() + for pfxset in roa.prefix_sets: + if pfxset.__class__.__name__ == 'roa_prefix_set_ipv6': + roa_cls = models.ROAPrefixV6 + prefix_obj = obj.prefixes_v6 + else: + roa_cls = models.ROAPrefixV4 + prefix_obj = obj.prefixes + + for pfx in pfxset: + attrs = {'prefix_min': pfx.min(), + 'prefix_max': pfx.max(), + 'max_length': pfx.max_prefixlen} + q = roa_cls.objects.filter(**attrs) + if not q: + prefix_obj.create(**attrs) + else: + prefix_obj.add(q[0]) + + +def rcynic_gbr(gbr, obj): + vcard = vobject.readOne(gbr.vcard) + obj.full_name = vcard.fn.value if hasattr(vcard, 'fn') else None + obj.email_address = vcard.email.value if hasattr(vcard, 'email') else None + obj.telephone = vcard.tel.value if hasattr(vcard, 'tel') else None + obj.organization = vcard.org.value[0] if hasattr(vcard, 'org') else None + obj.save() + +LABEL_CACHE = {} + +# dict keeping mapping of uri to (handle, old status, new status) for objects +# published by the local rpkid +uris = {} + +dispatch = { + 'rcynic_certificate': rcynic_cert, + 'rcynic_roa': rcynic_roa, + 'rcynic_ghostbuster': rcynic_gbr +} + +model_class = { + 'rcynic_certificate': models.Cert, + 'rcynic_roa': models.ROA, + 'rcynic_ghostbuster': models.Ghostbuster +} + + +def save_status(repo, vs): + timestamp = datetime.fromXMLtime(vs.timestamp).to_sql() + status = LABEL_CACHE[vs.status] + g = models.generations_dict[vs.generation] if vs.generation else None + repo.statuses.create(generation=g, timestamp=timestamp, status=status) + + # if this object is in our interest set, update with the current validation + # status + if repo.uri in uris: + x, y, z, q = uris[repo.uri] + valid = z or (status is object_accepted) # don't clobber previous True value + uris[repo.uri] = x, y, valid, repo + + if status is not object_accepted: + return + + cls = model_class[vs.file_class.__name__] + # find the instance of the signedobject subclass that is associated with + # this repo instance (may be empty when not accepted) + inst_qs = cls.objects.filter(repo=repo) + + logger.debug('processing %s' % vs.filename) + + if not inst_qs: + inst = cls(repo=repo) + logger.debug('object not found in db, creating new object cls=%s id=%s' % ( + cls, + id(inst) + )) + else: + inst = inst_qs[0] + + try: + # determine if the object is changed/new + mtime = os.stat(vs.filename)[stat.ST_MTIME] + except OSError as e: + logger.error('unable to stat %s: %s %s' % ( + vs.filename, type(e), e)) + # treat as if missing from rcynic.xml + # use inst_qs rather than deleting inst so that we don't raise an + # exception for newly created objects (inst_qs will be empty) + inst_qs.delete() + return + + if mtime != inst.mtime: + inst.mtime = mtime + try: + obj = vs.obj # causes object to be lazily loaded + except Exception, e: + logger.warning('Caught %s while processing %s: %s' % ( + type(e), vs.filename, e)) + return + + inst.not_before = obj.notBefore.to_sql() + inst.not_after = obj.notAfter.to_sql() + inst.name = obj.subject + inst.keyid = obj.ski + + # look up signing cert + if obj.issuer == obj.subject: + # self-signed cert (TA) + assert(isinstance(inst, models.Cert)) + inst.issuer = None + else: + # if an object has moved in the repository, the entry for + # the old location will still be in the database, but + # without any object_accepted in its validtion status + qs = models.Cert.objects.filter( + keyid=obj.aki, + name=obj.issuer, + repo__statuses__status=object_accepted + ) + ncerts = len(qs) + if ncerts == 0: + logger.warning('unable to find signing cert with ski=%s (%s)' % (obj.aki, obj.issuer)) + return + else: + if ncerts > 1: + # multiple matching certs, all of which are valid + logger.warning('Found multiple certs matching ski=%s sn=%s' % (obj.aki, obj.issuer)) + for c in qs: + logger.warning(c.repo.uri) + # just use the first match + inst.issuer = qs[0] + + try: + # do object-specific tasks + dispatch[vs.file_class.__name__](obj, inst) + except: + logger.error('caught exception while processing rcynic_object:\n' + 'vs=' + repr(vs) + '\nobj=' + repr(obj)) + # .show() writes to stdout + obj.show() + raise + + logger.debug('object saved id=%s' % id(inst)) + else: + logger.debug('object is unchanged') + + +@transaction.commit_on_success +def process_cache(root, xml_file): + + last_uri = None + repo = None + + logger.info('clearing validation statuses') + models.ValidationStatus.objects.all().delete() + + logger.info('updating validation status') + for vs in rcynic_xml_iterator(root, xml_file): + if vs.uri != last_uri: + repo, created = models.RepositoryObject.objects.get_or_create(uri=vs.uri) + last_uri = vs.uri + save_status(repo, vs) + + # garbage collection + # remove all objects which have no ValidationStatus references, which + # means they did not appear in the last XML output + logger.info('performing garbage collection') + + # Delete all objects that have zero validation status elements. + models.RepositoryObject.objects.annotate(num_statuses=django.db.models.Count('statuses')).filter(num_statuses=0).delete() + + # Delete all SignedObject instances that were not accepted. There may + # exist rows for objects that were previously accepted. + # See https://trac.rpki.net/ticket/588#comment:30 + # + # We have to do this here rather than in save_status() because the + # elements are not guaranteed to be consecutive for a + # given URI. see https://trac.rpki.net/ticket/625#comment:5 + models.SignedObject.objects.exclude(repo__statuses__status=object_accepted).delete() + + # ROAPrefixV* objects are M2M so they are not automatically deleted when + # their ROA object disappears + models.ROAPrefixV4.objects.annotate(num_roas=django.db.models.Count('roas')).filter(num_roas=0).delete() + models.ROAPrefixV6.objects.annotate(num_roas=django.db.models.Count('roas')).filter(num_roas=0).delete() + logger.info('done with garbage collection') + + +@transaction.commit_on_success +def process_labels(xml_file): + logger.info('updating labels...') + + for label, kind, desc in label_iterator(xml_file): + logger.debug('label=%s kind=%s desc=%s' % (label, kind, desc)) + if kind: + q = models.ValidationLabel.objects.filter(label=label) + if not q: + obj = models.ValidationLabel(label=label) + else: + obj = q[0] + + obj.kind = models.kinds_dict[kind] + obj.status = desc + obj.save() + + LABEL_CACHE[label] = obj + + +def fetch_published_objects(): + """Query rpkid for all objects published by local users, and look up the + current validation status of each object. The validation status is used + later to send alerts for objects which have transitioned to invalid. + + """ + logger.info('querying for published objects') + + handles = [conf.handle for conf in Conf.objects.all()] + req = [rpki.left_right.list_published_objects_elt.make_pdu(action='list', self_handle=h, tag=h) for h in handles] + z = Zookeeper() + pdus = z.call_rpkid(*req) + for pdu in pdus: + if isinstance(pdu, rpki.left_right.list_published_objects_elt): + # Look up the object in the rcynic cache + qs = models.RepositoryObject.objects.filter(uri=pdu.uri) + if qs: + # get the current validity state + valid = qs[0].statuses.filter(status=object_accepted).exists() + uris[pdu.uri] = (pdu.self_handle, valid, False, None) + logger.debug('adding ' + pdu.uri) + else: + # this object is not in the cache. it was either published + # recently, or disappared previously. if it disappeared + # previously, it has already been alerted. in either case, we + # omit the uri from the list since we are interested only in + # objects which were valid and are no longer valid + pass + elif isinstance(pdu, rpki.left_right.report_error_elt): + logging.error('rpkid reported an error: %s' % pdu.error_code) + + +class Handle(object): + def __init__(self): + self.invalid = [] + self.missing = [] + + def add_invalid(self, v): + self.invalid.append(v) + + def add_missing(self, v): + self.missing.append(v) + + +def notify_invalid(): + """Send email alerts to the addresses registered in ghostbuster records for + any invalid objects that were published by users of this system. + + """ + + logger.info('sending notifications for invalid objects') + + # group invalid objects by user + notify = {} + for uri, v in uris.iteritems(): + handle, old_status, new_status, obj = v + + if obj is None: + # object went missing + n = notify.get(handle, Handle()) + n.add_missing(uri) + # only select valid->invalid + elif old_status and not new_status: + n = notify.get(handle, Handle()) + n.add_invalid(obj) + + for handle, v in notify.iteritems(): + conf = Conf.objects.get(handle) + + msg = StringIO() + msg.write('This is an alert about problems with objects published by ' + 'the resource handle %s.\n\n' % handle) + + if v.invalid: + msg.write('The following objects were previously valid, but are ' + 'now invalid:\n') + + for o in v.invalid: + msg.write('\n') + msg.write(o.repo.uri) + msg.write('\n') + for s in o.statuses.all(): + msg.write('\t') + msg.write(s.status.label) + msg.write(': ') + msg.write(s.status.status) + msg.write('\n') + + if v.missing: + msg.write('The following objects were previously valid but are no ' + 'longer in the cache:\n') + + for o in v.missing: + msg.write(o) + msg.write('\n') + + msg.write("""-- +You are receiving this email because your address is published in a Ghostbuster +record, or is the default email address for this resource holder account on +%s.""" % getfqdn()) + + from_email = 'root@' + getfqdn() + subj = 'invalid RPKI object alert for resource handle %s' % conf.handle + conf.send_alert(subj, msg.getvalue(), from_email, severity=Alert.ERROR) + + +def import_rcynic_xml(root=default_root, logfile=default_logfile): + """Load the contents of rcynic.xml into the rpki.gui.cacheview database.""" + + global object_accepted + + start = time.time() + process_labels(logfile) + object_accepted = LABEL_CACHE['object_accepted'] + fetch_published_objects() + process_cache(root, logfile) + notify_invalid() + + rpki.gui.app.timestamp.update('rcynic_import') + + stop = time.time() + logger.info('elapsed time %d seconds.' % (stop - start)) diff --git a/rpki/gui/cacheview/views.py b/rpki/gui/cacheview/views.py new file mode 100644 index 00000000..94870eb2 --- /dev/null +++ b/rpki/gui/cacheview/views.py @@ -0,0 +1,172 @@ +# Copyright (C) 2011 SPARTA, Inc. dba Cobham Analytic Solutions +# Copyright (C) 2013 SPARTA, Inc. a Parsons Company +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +__version__ = '$Id$' + +from django.views.generic import DetailView +from django.shortcuts import render +from django.db.models import F + +from rpki.gui.cacheview import models, forms, misc +from rpki.resource_set import resource_range_as, resource_range_ip +from rpki.POW import IPAddress +from rpki.exceptions import BadIPResource + + +def cert_chain(obj): + """ + returns an iterator covering all certs from the root cert down to the EE. + """ + chain = [obj] + while obj != obj.issuer: + obj = obj.issuer + chain.append(obj) + return zip(range(len(chain)), reversed(chain)) + + +class SignedObjectDetailView(DetailView): + def get_context_data(self, **kwargs): + context = super(SignedObjectDetailView, + self).get_context_data(**kwargs) + context['chain'] = cert_chain(self.object) + return context + + +class RoaDetailView(SignedObjectDetailView): + model = models.ROA + + +class CertDetailView(SignedObjectDetailView): + model = models.Cert + + +class GhostbusterDetailView(SignedObjectDetailView): + model = models.Ghostbuster + + +def search_view(request): + certs = None + roas = None + + if request.method == 'POST': + form = forms.SearchForm2(request.POST, request.FILES) + if form.is_valid(): + resource = form.cleaned_data.get('resource') + # try to determine the type of input given + try: + r = resource_range_as.parse_str(resource) + certs = models.Cert.objects.filter(asns__min__gte=r.min, + asns__max__lte=r.max) + roas = models.ROA.objects.filter(asid__gte=r.min, + asid__lte=r.max) + except: + try: + r = resource_range_ip.parse_str(resource) + if r.version == 4: + certs = models.Cert.objects.filter( + addresses__prefix_min__lte=r.min, + addresses__prefix_max__gte=r.max) + roas = models.ROA.objects.filter( + prefixes__prefix_min__lte=r.min, + prefixes__prefix_max__gte=r.max) + else: + certs = models.Cert.objects.filter( + addresses_v6__prefix_min__lte=r.min, + addresses_v6__prefix_max__gte=r.max) + roas = models.ROA.objects.filter( + prefixes_v6__prefix_min__lte=r.min, + prefixes_v6__prefix_max__gte=r.max) + except BadIPResource: + pass + + return render(request, 'cacheview/search_result.html', + {'resource': resource, 'certs': certs, 'roas': roas}) + + +def cmp_prefix(x, y): + r = cmp(x[0].family, y[0].family) + if r == 0: + r = cmp(x[2], y[2]) # integer address + if r == 0: + r = cmp(x[0].bits, y[0].bits) + if r == 0: + r = cmp(x[0].max_length, y[0].max_length) + if r == 0: + r = cmp(x[1].asid, y[1].asid) + return r + + +#def cmp_prefix(x,y): +# for attr in ('family', 'prefix', 'bits', 'max_length'): +# r = cmp(getattr(x[0], attr), getattr(y[0], attr)) +# if r: +# return r +# return cmp(x[1].asid, y[1].asid) + + +def query_view(request): + """ + Allow the user to search for an AS or prefix, and show all published ROA + information. + """ + + if request.method == 'POST': + form = forms.SearchForm(request.POST, request.FILES) + if form.is_valid(): + certs = None + roas = None + + addr = form.cleaned_data.get('addr') + asn = form.cleaned_data.get('asn') + + if addr: + family, r = misc.parse_ipaddr(addr) + prefixes = models.ROAPrefix.objects.filter(family=family, prefix=str(r.min)) + + prefix_list = [] + for pfx in prefixes: + for roa in pfx.roas.all(): + prefix_list.append((pfx, roa)) + elif asn: + r = resource_range_as.parse_str(asn) + roas = models.ROA.objects.filter(asid__gte=r.min, asid__lte=r.max) + + # display the results sorted by prefix + prefix_list = [] + for roa in roas: + for pfx in roa.prefixes.all(): + addr = IPAddress(pfx.prefix.encode()) + prefix_list.append((pfx, roa, addr)) + prefix_list.sort(cmp=cmp_prefix) + + return render('cacheview/query_result.html', + {'object_list': prefix_list}, request) + else: + form = forms.SearchForm() + + return render('cacheview/search_form.html', { + 'form': form, 'search_type': 'ROA '}, request) + + +def global_summary(request): + """Display a table summarizing the state of the global RPKI.""" + + roots = models.Cert.objects.filter(issuer=F('pk')) # self-signed + + return render(request, 'cacheview/global_summary.html', { + 'roots': roots + }) + +# vim:sw=4 ts=8 expandtab diff --git a/rpki/gui/decorators.py b/rpki/gui/decorators.py new file mode 100644 index 00000000..69d20c46 --- /dev/null +++ b/rpki/gui/decorators.py @@ -0,0 +1,31 @@ +# Copyright (C) 2013 SPARTA, Inc. a Parsons Company +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +__version__ = '$Id$' + +from django import http + + +def tls_required(f): + """Decorator which returns a 500 error if the connection is not secured + with TLS (https). + + """ + def _tls_required(request, *args, **kwargs): + if not request.is_secure(): + return http.HttpResponseServerError( + 'This resource may only be accessed securely via https', + content_type='text/plain') + return f(request, *args, **kwargs) + return _tls_required diff --git a/rpki/gui/default_settings.py b/rpki/gui/default_settings.py new file mode 100644 index 00000000..3859247c --- /dev/null +++ b/rpki/gui/default_settings.py @@ -0,0 +1,171 @@ +""" +This module contains static configuration settings for the web portal. +""" + +__version__ = '$Id$' + +import os +import random +import string +import socket + +import rpki.config +import rpki.autoconf + +# Where to put static files. +STATIC_ROOT = rpki.autoconf.datarootdir + '/rpki/media' + +# Must end with a slash! +STATIC_URL = '/media/' + +# Where to email server errors. +ADMINS = (('Administrator', 'root@localhost'),) + +LOGGING = { + 'version': 1, + 'formatters': { + 'verbose': { + # see http://docs.python.org/2.7/library/logging.html#logging.LogRecord + 'format': '%(levelname)s %(asctime)s %(name)s %(message)s' + }, + }, + 'handlers': { + 'stderr': { + 'class': 'logging.StreamHandler', + 'level': 'DEBUG', + 'formatter': 'verbose', + }, + 'mail_admins': { + 'level': 'ERROR', + 'class': 'django.utils.log.AdminEmailHandler', + }, + }, + 'loggers': { + 'django': { + 'level': 'ERROR', + 'handlers': ['stderr', 'mail_admins'], + }, + 'rpki.gui': { + 'level': 'WARNING', + 'handlers': ['stderr'], + }, + }, +} + +# Load the SQL authentication bits from the system rpki.conf. +rpki_config = rpki.config.parser(section='web_portal') + +DATABASES = { + 'default': { + 'ENGINE': 'django.db.backends.mysql', + 'NAME': rpki_config.get('sql-database'), + 'USER': rpki_config.get('sql-username'), + 'PASSWORD': rpki_config.get('sql-password'), + + # Ensure the default storage engine is InnoDB since we need + # foreign key support. The Django documentation suggests + # removing this after the syncdb is performed as an optimization, + # but there isn't an easy way to do this automatically. + + 'OPTIONS': { + 'init_command': 'SET storage_engine=INNODB', + } + } +} + + +def select_tz(): + "Find a supported timezone that looks like UTC" + for tz in ('UTC', 'GMT', 'Etc/UTC', 'Etc/GMT'): + if os.path.exists('/usr/share/zoneinfo/' + tz): + return tz + # Can't determine the proper timezone, fall back to UTC and let Django + # report the error to the user. + return 'UTC' + +# Local time zone for this installation. Choices can be found here: +# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name +# although not all choices may be available on all operating systems. +# If running in a Windows environment this must be set to the same as your +# system time zone. +TIME_ZONE = select_tz() + +def get_secret_key(): + """Retrieve the secret-key value from rpki.conf or generate a random value + if it is not present.""" + d = string.letters + string.digits + val = ''.join([random.choice(d) for _ in range(50)]) + return rpki_config.get('secret-key', val) + +# Make this unique, and don't share it with anybody. +SECRET_KEY = get_secret_key() + +# See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts +# for details on why you might need this. +def get_allowed_hosts(): + allowed_hosts = set(rpki_config.multiget("allowed-hosts")) + allowed_hosts.add(socket.getfqdn()) + try: + import netifaces + for interface in netifaces.interfaces(): + addresses = netifaces.ifaddresses(interface) + for af in (netifaces.AF_INET, netifaces.AF_INET6): + if af in addresses: + for address in addresses[af]: + if "addr" in address: + allowed_hosts.add(address["addr"]) + except ImportError: + pass + return list(allowed_hosts) + +ALLOWED_HOSTS = get_allowed_hosts() + +# List of callables that know how to import templates from various sources. +TEMPLATE_LOADERS = ( + 'django.template.loaders.filesystem.Loader', + 'django.template.loaders.app_directories.Loader', + 'django.template.loaders.eggs.Loader' +) + +MIDDLEWARE_CLASSES = ( + 'django.middleware.common.CommonMiddleware', + 'django.contrib.sessions.middleware.SessionMiddleware', + 'django.middleware.csrf.CsrfViewMiddleware', + 'django.contrib.auth.middleware.AuthenticationMiddleware', + 'django.contrib.messages.middleware.MessageMiddleware' +) + +ROOT_URLCONF = 'rpki.gui.urls' + +INSTALLED_APPS = ( + 'django.contrib.auth', + #'django.contrib.admin', + #'django.contrib.admindocs', + 'django.contrib.contenttypes', + 'django.contrib.sessions', + 'django.contrib.staticfiles', + 'rpki.irdb', + 'rpki.gui.app', + 'rpki.gui.cacheview', + 'rpki.gui.routeview', + 'south', +) + +TEMPLATE_CONTEXT_PROCESSORS = ( + "django.contrib.auth.context_processors.auth", + "django.core.context_processors.debug", + "django.core.context_processors.i18n", + "django.core.context_processors.media", + "django.contrib.messages.context_processors.messages", + "django.core.context_processors.request", + "django.core.context_processors.static" +) + +# Allow local site to override any setting above -- but if there's +# anything that local sites routinely need to modify, please consider +# putting that configuration into rpki.conf and just adding code here +# to read that configuration. +try: + from local_settings import * +except: + pass diff --git a/rpki/gui/models.py b/rpki/gui/models.py new file mode 100644 index 00000000..7a684f32 --- /dev/null +++ b/rpki/gui/models.py @@ -0,0 +1,150 @@ +# Copyright (C) 2012 SPARTA, Inc. a Parsons Company +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +Common classes for reuse in apps. +""" + +__version__ = '$Id$' + +from django.db import models + +import rpki.resource_set +import rpki.POW +from south.modelsinspector import add_introspection_rules + + +class IPv6AddressField(models.Field): + "Field large enough to hold a 128-bit unsigned integer." + + __metaclass__ = models.SubfieldBase + + def db_type(self, connection): + return 'binary(16)' + + def to_python(self, value): + if isinstance(value, rpki.POW.IPAddress): + return value + return rpki.POW.IPAddress.fromBytes(value) + + def get_db_prep_value(self, value, connection, prepared): + """ + Note that we add a custom conversion to encode long values as hex + strings in SQL statements. See settings.get_conv() for details. + + """ + return value.toBytes() + + +class IPv4AddressField(models.Field): + "Wrapper around rpki.POW.IPAddress." + + __metaclass__ = models.SubfieldBase + + def db_type(self, connection): + return 'int UNSIGNED' + + def to_python(self, value): + if isinstance(value, rpki.POW.IPAddress): + return value + return rpki.POW.IPAddress(value, version=4) + + def get_db_prep_value(self, value, connection, prepared): + return long(value) + +add_introspection_rules( + [ + ([IPv4AddressField, IPv6AddressField], [], {}) + ], + ['^rpki\.gui\.models\.IPv4AddressField', + '^rpki\.gui\.models\.IPv6AddressField'] +) + + +class Prefix(models.Model): + """Common implementation for models with an IP address range. + + Expects that `range_cls` is set to the appropriate subclass of + rpki.resource_set.resource_range_ip.""" + + def as_resource_range(self): + """ + Returns the prefix as a rpki.resource_set.resource_range_ip object. + """ + return self.range_cls(self.prefix_min, self.prefix_max) + + @property + def prefixlen(self): + "Returns the prefix length for the prefix in this object." + return self.as_resource_range().prefixlen() + + def get_prefix_display(self): + "Return a string representatation of this IP prefix." + return str(self.as_resource_range()) + + def __unicode__(self): + """This method may be overridden by subclasses. The default + implementation calls get_prefix_display(). """ + return self.get_prefix_display() + + class Meta: + abstract = True + + # default sort order reflects what "sh ip bgp" outputs + ordering = ('prefix_min',) + + +class PrefixV4(Prefix): + "IPv4 Prefix." + + range_cls = rpki.resource_set.resource_range_ipv4 + + prefix_min = IPv4AddressField(db_index=True, null=False) + prefix_max = IPv4AddressField(db_index=True, null=False) + + class Meta(Prefix.Meta): + abstract = True + + +class PrefixV6(Prefix): + "IPv6 Prefix." + + range_cls = rpki.resource_set.resource_range_ipv6 + + prefix_min = IPv6AddressField(db_index=True, null=False) + prefix_max = IPv6AddressField(db_index=True, null=False) + + class Meta(Prefix.Meta): + abstract = True + + +class ASN(models.Model): + """Represents a range of ASNs. + + This model is abstract, and is intended to be reused by applications.""" + + min = models.PositiveIntegerField(null=False) + max = models.PositiveIntegerField(null=False) + + class Meta: + abstract = True + ordering = ('min', 'max') + + def as_resource_range(self): + return rpki.resource_set.resource_range_as(self.min, self.max) + + def __unicode__(self): + return u'AS%s' % self.as_resource_range() + +# vim:sw=4 ts=8 expandtab diff --git a/rpki/gui/routeview/__init__.py b/rpki/gui/routeview/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/rpki/gui/routeview/api.py b/rpki/gui/routeview/api.py new file mode 100644 index 00000000..cf699c9a --- /dev/null +++ b/rpki/gui/routeview/api.py @@ -0,0 +1,69 @@ +# Copyright (C) 2012 SPARTA, Inc. a Parsons Company +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +__version__ = '$Id$' + +import json +from django import http +from rpki.gui.routeview.models import RouteOrigin, RouteOriginV6 +from rpki import resource_set +import rpki.exceptions + +def route_list(request): + """Implements the REST query against the route models to allow the client + to search for routes. + + The only search currently supported is returning all the routes covered by + the prefix given in the 'prefix__in=' query string parameter. + + By default, only returns up to 10 matching routes, but the client may + request a different limit with the 'count=' query string parameter. + + """ + hard_limit = 100 + + if request.method == 'GET' and 'prefix__in' in request.GET: + # find all routers covered by this prefix + match_prefix = request.GET.get('prefix__in') + # max number of items to return + limit = request.GET.get('count', 10) + if limit < 1 or limit > hard_limit: + return http.HttpResponseBadRequest('invalid value for count parameter') + + try: + if ':' in match_prefix: + # v6 + pfx = resource_set.resource_range_ipv6.parse_str(match_prefix) + manager = RouteOriginV6 + else: + # v4 + pfx = resource_set.resource_range_ipv4.parse_str(match_prefix) + manager = RouteOrigin + except (AssertionError, rpki.exceptions.BadIPResource), e: + return http.HttpResponseBadRequest(e) + + try: + qs = manager.objects.filter(prefix_min__gte=pfx.min, + prefix_max__lte=pfx.max)[:limit] + # FIXME - a REST API should really return the url of the resource, + # but since we are combining two separate tables, the .pk is not a + # unique identifier. + matches = [{'prefix': str(x.as_resource_range()), 'asn': x.asn} for x in qs] + except IndexError: + # no matches + matches = [] + + return http.HttpResponse(json.dumps(matches), content_type='text/javascript') + + return http.HttpResponseBadRequest() diff --git a/rpki/gui/routeview/models.py b/rpki/gui/routeview/models.py new file mode 100644 index 00000000..052860c4 --- /dev/null +++ b/rpki/gui/routeview/models.py @@ -0,0 +1,81 @@ +# Copyright (C) 2010, 2011 SPARTA, Inc. dba Cobham Analytic Solutions +# Copyright (C) 2012 SPARTA, Inc. a Parsons Company +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +__version__ = '$Id$' + +from django.db.models import PositiveIntegerField, permalink +import rpki.gui.models + + +class RouteOrigin(rpki.gui.models.PrefixV4): + "Represents an IPv4 BGP routing table entry." + + asn = PositiveIntegerField(help_text='origin AS', null=False) + + def __unicode__(self): + return u"AS%d's route origin for %s" % (self.asn, + self.get_prefix_display()) + + @property + def roas(self): + "Return a queryset of ROAs which cover this route." + return rpki.gui.cacheview.models.ROA.objects.filter( + prefixes__prefix_min__lte=self.prefix_min, + prefixes__prefix_max__gte=self.prefix_max + ) + + @property + def roa_prefixes(self): + "Return a queryset of ROA prefixes which cover this route." + return rpki.gui.cacheview.models.ROAPrefixV4.objects.filter( + prefix_min__lte=self.prefix_min, + prefix_max__gte=self.prefix_max + ) + + @property + def status(self): + "Returns the validation status of this route origin object." + roas = self.roas + # subselect exact match + if self.asn != 0 and roas.filter(asid=self.asn, prefixes__max_length__gte=self.prefixlen).exists(): + return 'valid' + elif roas.exists(): + return 'invalid' + return 'unknown' + + @permalink + def get_absolute_url(self): + return ('rpki.gui.app.views.route_detail', [str(self.pk)]) + + class Meta: + # sort by increasing mask length (/16 before /24) + ordering = ('prefix_min', '-prefix_max') + + +class RouteOriginV6(rpki.gui.models.PrefixV6): + "Represents an IPv6 BGP routing table entry." + + asn = PositiveIntegerField(help_text='origin AS', null=False) + + def __unicode__(self): + return u"AS%d's route origin for %s" % (self.asn, + self.get_prefix_display()) + + class Meta: + ordering = ('prefix_min', '-prefix_max') + + +# this goes at the end of the file to avoid problems with circular imports +import rpki.gui.cacheview.models diff --git a/rpki/gui/routeview/util.py b/rpki/gui/routeview/util.py new file mode 100644 index 00000000..7884224c --- /dev/null +++ b/rpki/gui/routeview/util.py @@ -0,0 +1,236 @@ +# Copyright (C) 2012, 2013 SPARTA, Inc. a Parsons Company +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +__version__ = '$Id$' +__all__ = ('import_routeviews_dump') + +import itertools +import _mysql_exceptions +import os.path +import subprocess +import time +import logging +import urlparse +from urllib import urlretrieve, unquote + +from django.db import transaction, connection + +from rpki.resource_set import resource_range_ipv4, resource_range_ipv6 +from rpki.exceptions import BadIPResource +import rpki.gui.app.timestamp + +# globals +logger = logging.getLogger(__name__) + +# Eventually this can be retrived from rpki.conf +DEFAULT_URL = 'http://archive.routeviews.org/oix-route-views/oix-full-snapshot-latest.dat.bz2' + +def parse_text(f): + last_prefix = None + cursor = connection.cursor() + range_class = resource_range_ipv4 + table = 'routeview_routeorigin' + sql = "INSERT INTO %s_new SET asn=%%s, prefix_min=%%s, prefix_max=%%s" % table + + try: + logger.info('Dropping existing staging table...') + cursor.execute('DROP TABLE IF EXISTS %s_new' % table) + except _mysql_exceptions.Warning: + pass + + logger.info('Creating staging table...') + cursor.execute('CREATE TABLE %(table)s_new LIKE %(table)s' % {'table': table}) + + logger.info('Disabling autocommit...') + cursor.execute('SET autocommit=0') + + logger.info('Adding rows to table...') + for row in itertools.islice(f, 5, None): + cols = row.split() + + # index -1 is i/e/? for igp/egp + origin_as = cols[-2] + # FIXME: skip AS_SETs + if origin_as[0] == '{': + continue + + prefix = cols[1] + + # validate the prefix since the "sh ip bgp" output is sometimes + # corrupt by no space between the prefix and the next hop IP + # address. + net, bits = prefix.split('/') + if len(bits) > 2: + s = ['mask for %s looks fishy...' % prefix] + prefix = '%s/%s' % (net, bits[0:2]) + s.append('assuming it should be %s' % prefix) + logger.warning(' '.join(s)) + + # the output may contain multiple paths to the same origin. + # if this is the same prefix as the last entry, we don't need + # to validate it again. + # + # prefixes are sorted, but the origin_as is not, so we keep a set to + # avoid duplicates, and insert into the db once we've seen all the + # origin_as values for a given prefix + if prefix != last_prefix: + # output routes for previous prefix + if last_prefix is not None: + try: + rng = range_class.parse_str(last_prefix) + rmin = long(rng.min) + rmax = long(rng.max) + cursor.executemany(sql, [(asn, rmin, rmax) for asn in asns]) + except BadIPResource: + logger.warning('skipping bad prefix: ' + last_prefix) + + asns = set() + last_prefix = prefix + + try: + asns.add(int(origin_as)) + except ValueError as err: + logger.warning('\n'.join( + ['unable to parse origin AS: ' + origin_as], + ['ValueError: ' + str(err)] + ['route entry was: ' + row], + )) + + logger.info('Committing...') + cursor.execute('COMMIT') + + try: + logger.info('Dropping old table...') + cursor.execute('DROP TABLE IF EXISTS %s_old' % table) + except _mysql_exceptions.Warning: + pass + + logger.info('Swapping staging table with live table...') + cursor.execute('RENAME TABLE %(table)s TO %(table)s_old, %(table)s_new TO %(table)s' % {'table': table}) + + transaction.commit_unless_managed() + + logger.info('Updating timestamp metadata...') + rpki.gui.app.timestamp.update('bgp_v4_import') + + +def parse_mrt(f): + # filter input through bgpdump + pipe = subprocess.Popen(['bgpdump', '-m', '-v', '-'], stdin=f, + stdout=subprocess.PIPE) + + last_prefix = None + last_as = None + for e in pipe.stdout.readlines(): + a = e.split('|') + prefix = a[5] + try: + origin_as = int(a[6].split()[-1]) + except ValueError: + # skip AS_SETs + continue + + if prefix != last_prefix: + last_prefix = prefix + elif last_as == origin_as: + continue + last_as = origin_as + + asns = PREFIXES.get(prefix) + if not asns: + asns = set() + PREFIXES[prefix] = asns + asns.add(origin_as) + + pipe.wait() + if pipe.returncode: + raise ProgException('bgpdump exited with code %d' % pipe.returncode) + + +class ProgException(Exception): + pass + + +class UnknownInputType(ProgException): + pass + + +class PipeFailed(ProgException): + pass + + +def import_routeviews_dump(filename=DEFAULT_URL, filetype='auto'): + """Load the oix-full-snapshot-latest.bz2 from routeview.org into the + rpki.gui.routeview database. + + Arguments: + + filename [optional]: the full path to the downloaded file to parse + + filetype [optional]: 'text' or 'mrt' + + """ + start_time = time.time() + + if filename.startswith('http://'): + #get filename from the basename of the URL + u = urlparse.urlparse(filename) + bname = os.path.basename(unquote(u.path)) + tmpname = os.path.join('/tmp', bname) + + logger.info("Downloading %s to %s" % (filename, tmpname)) + if os.path.exists(tmpname): + os.remove(tmpname) + # filename is replaced with a local filename containing cached copy of + # URL + filename, headers = urlretrieve(filename, tmpname) + + if filetype == 'auto': + # try to determine input type from filename, based on the default + # filenames from archive.routeviews.org + bname = os.path.basename(filename) + if bname.startswith('oix-full-snapshot-latest'): + filetype = 'text' + elif bname.startswith('rib.'): + filetype = 'mrt' + else: + raise UnknownInputType('unable to automatically determine input file type') + logging.info('Detected import format as "%s"' % filetype) + + pipe = None + if filename.endswith('.bz2'): + bunzip = 'bunzip2' + logging.info('Decompressing input file on the fly...') + pipe = subprocess.Popen([bunzip, '--stdout', filename], + stdout=subprocess.PIPE) + input_file = pipe.stdout + else: + input_file = open(filename) + + try: + dispatch = {'text': parse_text, 'mrt': parse_mrt} + dispatch[filetype](input_file) + except KeyError: + raise UnknownInputType('"%s" is an unknown input file type' % filetype) + + if pipe: + logging.debug('Waiting for child to exit...') + pipe.wait() + if pipe.returncode: + raise PipeFailed('Child exited code %d' % pipe.returncode) + pipe = None + else: + input_file.close() + + logger.info('Elapsed time %d secs' % (time.time() - start_time)) diff --git a/rpki/gui/script_util.py b/rpki/gui/script_util.py new file mode 100644 index 00000000..c3a864fd --- /dev/null +++ b/rpki/gui/script_util.py @@ -0,0 +1,43 @@ +# Copyright (C) 2013 SPARTA, Inc. a Parsons Company +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +This module contains utility functions for use in standalone scripts. +""" + +from django.conf import settings + +from rpki import config +from rpki import autoconf + +__version__ = '$Id$' + + +def setup(): + """ + Configure Django enough to use the ORM. + """ + cfg = config.parser(section='web_portal') + # INSTALLED_APPS doesn't seem necessary so long as you are only accessing + # existing tables. + settings.configure( + DATABASES={ + 'default': { + 'ENGINE': 'django.db.backends.mysql', + 'NAME': cfg.get('sql-database'), + 'USER': cfg.get('sql-username'), + 'PASSWORD': cfg.get('sql-password'), + } + }, + ) diff --git a/rpki/gui/urls.py b/rpki/gui/urls.py new file mode 100644 index 00000000..955092f5 --- /dev/null +++ b/rpki/gui/urls.py @@ -0,0 +1,36 @@ +# Copyright (C) 2010, 2011 SPARTA, Inc. dba Cobham Analytic Solutions +# Copyright (C) 2012, 2013 SPARTA, Inc. a Parsons Company +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +__version__ = '$Id$' + +from django.conf.urls import patterns, include + +urlpatterns = patterns( + '', + + # Uncomment the admin/doc line below and add 'django.contrib.admindocs' + # to INSTALLED_APPS to enable admin documentation: + #(r'^admin/doc/', include('django.contrib.admindocs.urls')), + + # Uncomment the next line to enable the admin: + #(r'^admin/', include(admin.site.urls)), + + (r'^api/', include('rpki.gui.api.urls')), + (r'^cacheview/', include('rpki.gui.cacheview.urls')), + (r'^rpki/', include('rpki.gui.app.urls')), + + (r'^accounts/login/$', 'rpki.gui.views.login'), + (r'^accounts/logout/$', 'rpki.gui.views.logout', {'next_page': '/rpki/'}), +) diff --git a/rpki/gui/views.py b/rpki/gui/views.py new file mode 100644 index 00000000..404d6c7e --- /dev/null +++ b/rpki/gui/views.py @@ -0,0 +1,30 @@ +# Copyright (C) 2013 SPARTA, Inc. a Parsons Company +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +__version__ = '$Id$' + +import django.contrib.auth.views +from rpki.gui.decorators import tls_required + + +@tls_required +def login(request, *args, **kwargs): + "Wrapper around django.contrib.auth.views.login to force use of TLS." + return django.contrib.auth.views.login(request, *args, **kwargs) + + +@tls_required +def logout(request, *args, **kwargs): + "Wrapper around django.contrib.auth.views.logout to force use of TLS." + return django.contrib.auth.views.logout(request, *args, **kwargs) diff --git a/rpki/http.py b/rpki/http.py new file mode 100644 index 00000000..3c541f26 --- /dev/null +++ b/rpki/http.py @@ -0,0 +1,1070 @@ +# $Id$ +# +# Copyright (C) 2013--2014 Dragon Research Labs ("DRL") +# Portions copyright (C) 2009--2012 Internet Systems Consortium ("ISC") +# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notices and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND ARIN DISCLAIM ALL +# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL, +# ISC, OR ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, +# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION +# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +""" +HTTP utilities, both client and server. +""" + +import time +import socket +import asyncore +import asynchat +import urlparse +import sys +import random +import rpki.async +import rpki.sundial +import rpki.x509 +import rpki.exceptions +import rpki.log +import rpki.POW + +## @var rpki_content_type +# HTTP content type used for all RPKI messages. +rpki_content_type = "application/x-rpki" + +## @var debug_http +# Verbose chatter about HTTP streams. +debug_http = False + +## @var want_persistent_client +# Whether we want persistent HTTP client streams, when server also supports them. +want_persistent_client = False + +## @var want_persistent_server +# Whether we want persistent HTTP server streams, when client also supports them. +want_persistent_server = False + +## @var default_client_timeout +# Default HTTP client connection timeout. +default_client_timeout = rpki.sundial.timedelta(minutes = 5) + +## @var default_server_timeout +# Default HTTP server connection timeouts. Given our druthers, we'd +# prefer that the client close the connection, as this avoids the +# problem of client starting to reuse connection just as server closes +# it, so this should be longer than the client timeout. +default_server_timeout = rpki.sundial.timedelta(minutes = 10) + +## @var default_http_version +# Preferred HTTP version. +default_http_version = (1, 0) + +## @var default_tcp_port +# Default port for clients and servers that don't specify one. +default_tcp_port = 80 + +## @var enable_ipv6_servers +# Whether to enable IPv6 listeners. Enabled by default, as it should +# be harmless. Has no effect if kernel doesn't support IPv6. +enable_ipv6_servers = True + +## @var enable_ipv6_clients +# Whether to consider IPv6 addresses when making connections. +# Disabled by default, as IPv6 connectivity is still a bad joke in +# far too much of the world. +enable_ipv6_clients = False + +## @var have_ipv6 +# Whether the current machine claims to support IPv6. Note that just +# because the kernel supports it doesn't mean that the machine has +# usable IPv6 connectivity. I don't know of a simple portable way to +# probe for connectivity at runtime (the old test of "can you ping +# SRI-NIC.ARPA?" seems a bit dated...). Don't set this, it's set +# automatically by probing using the socket() system call at runtime. +try: + # pylint: disable=W0702,W0104 + socket.socket(socket.AF_INET6).close() + socket.IPPROTO_IPV6 + socket.IPV6_V6ONLY +except: + have_ipv6 = False +else: + have_ipv6 = True + +## @var use_adns + +# Whether to use rpki.adns code. This is still experimental, so it's +# not (yet) enabled by default. +use_adns = False +try: + import rpki.adns +except ImportError: + pass + +def supported_address_families(enable_ipv6): + """ + IP address families on which servers should listen, and to consider + when selecting addresses for client connections. + """ + if enable_ipv6 and have_ipv6: + return (socket.AF_INET, socket.AF_INET6) + else: + return (socket.AF_INET,) + +def localhost_addrinfo(): + """ + Return pseudo-getaddrinfo results for localhost. + """ + result = [(socket.AF_INET, "127.0.0.1")] + if enable_ipv6_clients and have_ipv6: + result.append((socket.AF_INET6, "::1")) + return result + +class http_message(object): + """ + Virtual class representing of one HTTP message. + """ + + software_name = "ISC RPKI library" + + def __init__(self, version = None, body = None, headers = None): + self.version = version + self.body = body + self.headers = headers + self.normalize_headers() + + def normalize_headers(self, headers = None): + """ + Clean up (some of) the horrible messes that HTTP allows in its + headers. + """ + if headers is None: + headers = () if self.headers is None else self.headers.items() + translate_underscore = True + else: + translate_underscore = False + result = {} + for k, v in headers: + if translate_underscore: + k = k.replace("_", "-") + k = "-".join(s.capitalize() for s in k.split("-")) + v = v.strip() + if k in result: + result[k] += ", " + v + else: + result[k] = v + self.headers = result + + @classmethod + def parse_from_wire(cls, headers): + """ + Parse and normalize an incoming HTTP message. + """ + self = cls() + headers = headers.split("\r\n") + self.parse_first_line(*headers.pop(0).split(None, 2)) + for i in xrange(len(headers) - 2, -1, -1): + if headers[i + 1][0].isspace(): + headers[i] += headers[i + 1] + del headers[i + 1] + self.normalize_headers([h.split(":", 1) for h in headers]) + return self + + def format(self): + """ + Format an outgoing HTTP message. + """ + s = self.format_first_line() + if self.body is not None: + assert isinstance(self.body, str) + self.headers["Content-Length"] = len(self.body) + for kv in self.headers.iteritems(): + s += "%s: %s\r\n" % kv + s += "\r\n" + if self.body is not None: + s += self.body + return s + + def __str__(self): + return self.format() + + def parse_version(self, version): + """ + Parse HTTP version, raise an exception if we can't. + """ + if version[:5] != "HTTP/": + raise rpki.exceptions.HTTPBadVersion, "Couldn't parse version %s" % version + self.version = tuple(int(i) for i in version[5:].split(".")) + + @property + def persistent(self): + """ + Figure out whether this HTTP message encourages a persistent connection. + """ + c = self.headers.get("Connection") + if self.version == (1, 1): + return c is None or "close" not in c.lower() + elif self.version == (1, 0): + return c is not None and "keep-alive" in c.lower() + else: + return False + +class http_request(http_message): + """ + HTTP request message. + """ + + def __init__(self, cmd = None, path = None, version = default_http_version, body = None, callback = None, errback = None, **headers): + assert cmd == "POST" or body is None + http_message.__init__(self, version = version, body = body, headers = headers) + self.cmd = cmd + self.path = path + self.callback = callback + self.errback = errback + self.retried = False + + def parse_first_line(self, cmd, path, version): + """ + Parse first line of HTTP request message. + """ + self.parse_version(version) + self.cmd = cmd + self.path = path + + def format_first_line(self): + """ + Format first line of HTTP request message, and set up the + User-Agent header. + """ + self.headers.setdefault("User-Agent", self.software_name) + return "%s %s HTTP/%d.%d\r\n" % (self.cmd, self.path, self.version[0], self.version[1]) + + def __repr__(self): + return rpki.log.log_repr(self, self.cmd, self.path) + +class http_response(http_message): + """ + HTTP response message. + """ + + def __init__(self, code = None, reason = None, version = default_http_version, body = None, **headers): + http_message.__init__(self, version = version, body = body, headers = headers) + self.code = code + self.reason = reason + + def parse_first_line(self, version, code, reason): + """ + Parse first line of HTTP response message. + """ + self.parse_version(version) + self.code = int(code) + self.reason = reason + + def format_first_line(self): + """ + Format first line of HTTP response message, and set up Date and + Server headers. + """ + self.headers.setdefault("Date", time.strftime("%a, %d %b %Y %T GMT")) + self.headers.setdefault("Server", self.software_name) + return "HTTP/%d.%d %s %s\r\n" % (self.version[0], self.version[1], self.code, self.reason) + + def __repr__(self): + return rpki.log.log_repr(self, self.code, self.reason) + +def log_method(self, msg, logger = rpki.log.debug): + """ + Logging method used in several different classes. + """ + assert isinstance(logger, rpki.log.logger) + if debug_http or logger is not rpki.log.debug: + logger("%r: %s" % (self, msg)) + +def addr_to_string(addr): + """ + Convert socket addr tuple to printable string. Assumes 2-element + tuple is IPv4, 4-element tuple is IPv6, throws TypeError for + anything else. + """ + + if len(addr) == 2: + return "%s:%d" % (addr[0], addr[1]) + if len(addr) == 4: + return "%s.%d" % (addr[0], addr[1]) + raise TypeError + +class http_stream(asynchat.async_chat): + """ + Virtual class representing an HTTP message stream. + """ + + log = log_method + + def __repr__(self): + status = ["connected"] if self.connected else [] + try: + status.append(addr_to_string(self.addr)) + except TypeError: + pass + return rpki.log.log_repr(self, *status) + + def __init__(self, sock = None): + asynchat.async_chat.__init__(self, sock) + self.buffer = [] + self.timer = rpki.async.timer(self.handle_timeout) + self.restart() + + def restart(self): + """ + (Re)start HTTP message parser, reset timer. + """ + assert not self.buffer + self.chunk_handler = None + self.set_terminator("\r\n\r\n") + self.update_timeout() + + def update_timeout(self): + """ + Put this stream's timer in known good state: set it to the + stream's timeout value if we're doing timeouts, otherwise clear + it. + """ + if self.timeout is not None: + self.log("Setting timeout %s" % self.timeout) + self.timer.set(self.timeout) + else: + self.log("Clearing timeout") + self.timer.cancel() + + def collect_incoming_data(self, data): + """ + Buffer incoming data from asynchat. + """ + self.buffer.append(data) + self.update_timeout() + + def get_buffer(self): + """ + Consume data buffered from asynchat. + """ + val = "".join(self.buffer) + self.buffer = [] + return val + + def found_terminator(self): + """ + Asynchat reported that it found whatever terminator we set, so + figure out what to do next. This can be messy, because we can be + in any of several different states: + + @li We might be handling chunked HTTP, in which case we have to + initialize the chunk decoder; + + @li We might have found the end of the message body, in which case + we can (finally) process it; or + + @li We might have just gotten to the end of the message headers, + in which case we have to parse them to figure out which of three + separate mechanisms (chunked, content-length, TCP close) is going + to tell us how to find the end of the message body. + """ + self.update_timeout() + if self.chunk_handler: + self.chunk_handler() + elif not isinstance(self.get_terminator(), str): + self.handle_body() + else: + self.msg = self.parse_type.parse_from_wire(self.get_buffer()) + if self.msg.version == (1, 1) and "chunked" in self.msg.headers.get("Transfer-Encoding", "").lower(): + self.msg.body = [] + self.chunk_handler = self.chunk_header + self.set_terminator("\r\n") + elif "Content-Length" in self.msg.headers: + self.set_terminator(int(self.msg.headers["Content-Length"])) + else: + self.handle_no_content_length() + + def chunk_header(self): + """ + Asynchat just handed us what should be the header of one chunk of + a chunked encoding stream. If this chunk has a body, set the + stream up to read it; otherwise, this is the last chunk, so start + the process of exiting the chunk decoder. + """ + n = int(self.get_buffer().partition(";")[0], 16) + self.log("Chunk length %s" % n) + if n: + self.chunk_handler = self.chunk_body + self.set_terminator(n) + else: + self.msg.body = "".join(self.msg.body) + self.chunk_handler = self.chunk_discard_trailer + + def chunk_body(self): + """ + Asynchat just handed us what should be the body of a chunk of the + body of a chunked message (sic). Save it, and prepare to move on + to the next chunk. + """ + self.log("Chunk body") + self.msg.body += self.buffer + self.buffer = [] + self.chunk_handler = self.chunk_discard_crlf + self.set_terminator("\r\n") + + def chunk_discard_crlf(self): + """ + Consume the CRLF that terminates a chunk, reinitialize chunk + decoder to be ready for the next chunk. + """ + self.log("Chunk CRLF") + s = self.get_buffer() + assert s == "", "%r: Expected chunk CRLF, got '%s'" % (self, s) + self.chunk_handler = self.chunk_header + + def chunk_discard_trailer(self): + """ + Consume chunk trailer, which should be empty, then (finally!) exit + the chunk decoder and hand complete message off to the application. + """ + self.log("Chunk trailer") + s = self.get_buffer() + assert s == "", "%r: Expected end of chunk trailers, got '%s'" % (self, s) + self.chunk_handler = None + self.handle_message() + + def handle_body(self): + """ + Hand normal (not chunked) message off to the application. + """ + self.msg.body = self.get_buffer() + self.handle_message() + + def handle_error(self): + """ + Asynchat (or asyncore, or somebody) raised an exception. See + whether it's one we should just pass along, otherwise log a stack + trace and close the stream. + """ + self.timer.cancel() + etype = sys.exc_info()[0] + if etype in (SystemExit, rpki.async.ExitNow): + raise + rpki.log.traceback() + if etype is not rpki.exceptions.HTTPClientAborted: + self.log("Closing due to error", rpki.log.warn) + self.close() + + def handle_timeout(self): + """ + Inactivity timer expired, close connection with prejudice. + """ + self.log("Timeout, closing") + self.close() + + def handle_close(self): + """ + Wrapper around asynchat connection close handler, so that we can + log the event, cancel timer, and so forth. + """ + self.log("Close event in HTTP stream handler") + self.timer.cancel() + asynchat.async_chat.handle_close(self) + +class http_server(http_stream): + """ + HTTP server stream. + """ + + ## @var parse_type + # Stream parser should look for incoming HTTP request messages. + parse_type = http_request + + ## @var timeout + # Use the default server timeout value set in the module header. + timeout = default_server_timeout + + def __init__(self, sock, handlers): + self.handlers = handlers + http_stream.__init__(self, sock = sock) + self.expect_close = not want_persistent_server + self.log("Starting") + + def handle_no_content_length(self): + """ + Handle an incoming message that used neither chunking nor a + Content-Length header (that is: this message will be the last one + in this server stream). No special action required. + """ + self.handle_message() + + def find_handler(self, path): + """ + Helper method to search self.handlers. + """ + for s, h in self.handlers: + if path.startswith(s): + return h + return None + + def handle_message(self): + """ + HTTP layer managed to deliver a complete HTTP request to + us, figure out what to do with it. Check the command and + Content-Type, look for a handler, and if everything looks right, + pass the message body, path, and a reply callback to the handler. + """ + self.log("Received request %r" % self.msg) + if not self.msg.persistent: + self.expect_close = True + handler = self.find_handler(self.msg.path) + error = None + if self.msg.cmd != "POST": + error = 501, "No handler for method %s" % self.msg.cmd + elif self.msg.headers["Content-Type"] != rpki_content_type: + error = 415, "No handler for Content-Type %s" % self.headers["Content-Type"] + elif handler is None: + error = 404, "No handler for URL %s" % self.msg.path + if error is None: + try: + handler(self.msg.body, self.msg.path, self.send_reply) + except (rpki.async.ExitNow, SystemExit): + raise + except Exception, e: + rpki.log.traceback() + self.send_error(500, reason = "Unhandled exception %s: %s" % (e.__class__.__name__, e)) + else: + self.send_error(code = error[0], reason = error[1]) + + def send_error(self, code, reason): + """ + Send an error response to this request. + """ + self.send_message(code = code, reason = reason) + + def send_reply(self, code, body = None, reason = "OK"): + """ + Send a reply to this request. + """ + self.send_message(code = code, body = body, reason = reason) + + def send_message(self, code, reason = "OK", body = None): + """ + Queue up reply message. If both parties agree that connection is + persistant, and if no error occurred, restart this stream to + listen for next message; otherwise, queue up a close event for + this stream so it will shut down once the reply has been sent. + """ + self.log("Sending response %s %s" % (code, reason)) + if code >= 400: + self.expect_close = True + msg = http_response(code = code, reason = reason, body = body, + Content_Type = rpki_content_type, + Connection = "Close" if self.expect_close else "Keep-Alive") + self.push(msg.format()) + if self.expect_close: + self.log("Closing") + self.timer.cancel() + self.close_when_done() + else: + self.log("Listening for next message") + self.restart() + +class http_listener(asyncore.dispatcher): + """ + Listener for incoming HTTP connections. + """ + + log = log_method + + def __repr__(self): + try: + status = (addr_to_string(self.addr),) + except TypeError: + status = () + return rpki.log.log_repr(self, *status) + + def __init__(self, handlers, addrinfo): + asyncore.dispatcher.__init__(self) + self.handlers = handlers + try: + af, socktype, proto, canonname, sockaddr = addrinfo # pylint: disable=W0612 + self.create_socket(af, socktype) + self.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + try: + self.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) + except AttributeError: + pass + if have_ipv6 and af == socket.AF_INET6: + self.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, 1) + self.bind(sockaddr) + self.listen(5) + except Exception, e: + self.log("Couldn't set up HTTP listener: %s" % e, rpki.log.warn) + rpki.log.traceback() + self.close() + for h in handlers: + self.log("Handling %s" % h[0]) + + def handle_accept(self): + """ + Asyncore says we have an incoming connection, spawn an http_server + stream for it and pass along all of our handler data. + """ + try: + s, c = self.accept() + self.log("Accepting connection from %s" % addr_to_string(c)) + http_server(sock = s, handlers = self.handlers) + except (rpki.async.ExitNow, SystemExit): + raise + except Exception, e: + self.log("Unable to accept connection: %s" % e) + self.handle_error() + + def handle_error(self): + """ + Asyncore signaled an error, pass it along or log it. + """ + if sys.exc_info()[0] in (SystemExit, rpki.async.ExitNow): + raise + self.log("Error in HTTP listener", rpki.log.warn) + rpki.log.traceback() + +class http_client(http_stream): + """ + HTTP client stream. + """ + + ## @var parse_type + # Stream parser should look for incoming HTTP response messages. + parse_type = http_response + + ## @var timeout + # Use the default client timeout value set in the module header. + timeout = default_client_timeout + + ## @var state + # Application layer connection state. + state = None + + def __init__(self, queue, hostport): + self.log("Creating new connection to %s" % addr_to_string(hostport)) + http_stream.__init__(self) + self.queue = queue + self.host = hostport[0] + self.port = hostport[1] + self.set_state("opening") + self.expect_close = not want_persistent_client + + def start(self): + """ + Create socket and request a connection. + """ + if not use_adns: + self.log("Not using ADNS") + self.gotaddrinfo([(socket.AF_INET, self.host)]) + elif self.host == "localhost": + self.log("Bypassing DNS for localhost") + self.gotaddrinfo(localhost_addrinfo()) + else: + families = supported_address_families(enable_ipv6_clients) + self.log("Starting ADNS lookup for %s in families %r" % (self.host, families)) + rpki.adns.getaddrinfo(self.gotaddrinfo, self.dns_error, self.host, families) + + def dns_error(self, e): + """ + Handle DNS lookup errors. For now, just whack the connection. + Undoubtedly we should do something better with diagnostics here. + """ + self.handle_error() + + def gotaddrinfo(self, addrinfo): + """ + Got address data from DNS, create socket and request connection. + """ + try: + self.af, self.address = random.choice(addrinfo) + self.log("Connecting to AF %s host %s port %s addr %s" % (self.af, self.host, self.port, self.address)) + self.create_socket(self.af, socket.SOCK_STREAM) + self.connect((self.address, self.port)) + if self.addr is None: + self.addr = (self.host, self.port) + self.update_timeout() + except (rpki.async.ExitNow, SystemExit): + raise + except Exception: + self.handle_error() + + def handle_connect(self): + """ + Asyncore says socket has connected. + """ + self.log("Socket connected") + self.set_state("idle") + assert self.queue.client is self + self.queue.send_request() + + def set_state(self, state): + """ + Set HTTP client connection state. + """ + self.log("State transition %s => %s" % (self.state, state)) + self.state = state + + def handle_no_content_length(self): + """ + Handle response message that used neither chunking nor a + Content-Length header (that is: this message will be the last one + in this server stream). In this case we want to read until we + reach the end of the data stream. + """ + self.set_terminator(None) + + def send_request(self, msg): + """ + Queue up request message and kickstart connection. + """ + self.log("Sending request %r" % msg) + assert self.state == "idle", "%r: state should be idle, is %s" % (self, self.state) + self.set_state("request-sent") + msg.headers["Connection"] = "Close" if self.expect_close else "Keep-Alive" + self.push(msg.format()) + self.restart() + + def handle_message(self): + """ + Handle incoming HTTP response message. Make sure we're in a state + where we expect to see such a message (and allow the mysterious + empty messages that Apache sends during connection close, no idea + what that is supposed to be about). If everybody agrees that the + connection should stay open, put it into an idle state; otherwise, + arrange for the stream to shut down. + """ + + self.log("Message received, state %s" % self.state) + + if not self.msg.persistent: + self.expect_close = True + + if self.state != "request-sent": + if self.state == "closing": + assert not self.msg.body + self.log("Ignoring empty response received while closing") + return + raise rpki.exceptions.HTTPUnexpectedState, "%r received message while in unexpected state %s" % (self, self.state) + + if self.expect_close: + self.log("Closing") + self.set_state("closing") + self.close_when_done() + else: + self.log("Idling") + self.set_state("idle") + self.update_timeout() + + if self.msg.code != 200: + errmsg = "HTTP request failed" + if self.msg.code is not None: + errmsg += " with status %s" % self.msg.code + if self.msg.reason: + errmsg += ", reason %s" % self.msg.reason + if self.msg.body: + errmsg += ", response %s" % self.msg.body + raise rpki.exceptions.HTTPRequestFailed(errmsg) + self.queue.return_result(self, self.msg, detach = self.expect_close) + + def handle_close(self): + """ + Asyncore signaled connection close. If we were waiting for that + to find the end of a response message, process the resulting + message now; if we were waiting for the response to a request we + sent, signal the error. + """ + http_stream.handle_close(self) + self.log("State %s" % self.state) + if self.get_terminator() is None: + self.handle_body() + elif self.state == "request-sent": + raise rpki.exceptions.HTTPClientAborted, "HTTP request aborted by close event" + else: + self.queue.detach(self) + + def handle_timeout(self): + """ + Connection idle timer has expired. Shut down connection in any + case, noisily if we weren't idle. + """ + bad = self.state not in ("idle", "closing") + if bad: + self.log("Timeout while in state %s" % self.state, rpki.log.warn) + http_stream.handle_timeout(self) + if bad: + try: + raise rpki.exceptions.HTTPTimeout + except: # pylint: disable=W0702 + self.handle_error() + else: + self.queue.detach(self) + + def handle_error(self): + """ + Asyncore says something threw an exception. Log it, then shut + down the connection and pass back the exception. + """ + eclass, edata = sys.exc_info()[0:2] + self.log("Error on HTTP client connection %s:%s %s %s" % (self.host, self.port, eclass, edata), rpki.log.warn) + http_stream.handle_error(self) + self.queue.return_result(self, edata, detach = True) + +class http_queue(object): + """ + Queue of pending HTTP requests for a single destination. This class + is very tightly coupled to http_client; http_client handles the HTTP + stream itself, this class provides a slightly higher-level API. + """ + + log = log_method + + def __repr__(self): + return rpki.log.log_repr(self, addr_to_string(self.hostport)) + + def __init__(self, hostport): + self.hostport = hostport + self.client = None + self.log("Created") + self.queue = [] + + def request(self, *requests): + """ + Append http_request object(s) to this queue. + """ + self.log("Adding requests %r" % requests) + self.queue.extend(requests) + + def restart(self): + """ + Send next request for this queue, if we can. This may involve + starting a new http_client stream, reusing an existing idle + stream, or just ignoring this request if there's an active client + stream already; in the last case, handling of the response (or + exception, or timeout) for the query currently in progress will + call this method when it's time to kick out the next query. + """ + try: + if self.client is None: + self.client = http_client(self, self.hostport) + self.log("Attached client %r" % self.client) + self.client.start() + elif self.client.state == "idle": + self.log("Sending request to existing client %r" % self.client) + self.send_request() + else: + self.log("Client %r exists in state %r" % (self.client, self.client.state)) + except (rpki.async.ExitNow, SystemExit): + raise + except Exception, e: + self.return_result(self.client, e, detach = True) + + def send_request(self): + """ + Kick out the next query in this queue, if any. + """ + if self.queue: + self.client.send_request(self.queue[0]) + + def detach(self, client_): + """ + Detatch a client from this queue. Silently ignores attempting to + detach a client that is not attached to this queue, to simplify + handling of what otherwise would be a nasty set of race + conditions. + """ + if client_ is self.client: + self.log("Detaching client %r" % client_) + self.client = None + + def return_result(self, client, result, detach = False): # pylint: disable=W0621 + """ + Client stream has returned a result, which we need to pass along + to the original caller. Result may be either an HTTP response + message or an exception. In either case, once we're done + processing this result, kick off next message in the queue, if any. + """ + + if client is not self.client: + self.log("Wrong client trying to return result. THIS SHOULD NOT HAPPEN. Dropping result %r" % result, rpki.log.warn) + return + + if detach: + self.detach(client) + + try: + req = self.queue.pop(0) + self.log("Dequeuing request %r" % req) + except IndexError: + self.log("No caller. THIS SHOULD NOT HAPPEN. Dropping result %r" % result, rpki.log.warn) + return + + assert isinstance(result, http_response) or isinstance(result, Exception) + + if isinstance(result, http_response): + try: + self.log("Returning result %r to caller" % result) + req.callback(result.body) + except (rpki.async.ExitNow, SystemExit): + raise + except Exception, e: + result = e + + if isinstance(result, Exception): + try: + self.log("Returning exception %r to caller: %s" % (result, result), rpki.log.warn) + req.errback(result) + except (rpki.async.ExitNow, SystemExit): + raise + except Exception: + # + # If we get here, we may have lost the event chain. Not + # obvious what we can do about it at this point, but force a + # traceback so that it will be somewhat obvious that something + # really bad happened. + # + self.log("Exception in exception callback", rpki.log.warn) + rpki.log.traceback(True) + + self.log("Queue: %r" % self.queue) + + if self.queue: + self.restart() + +## @var client_queues +# Map of (host, port) tuples to http_queue objects. +client_queues = {} + +def client(msg, url, callback, errback): + """ + Open client HTTP connection, send a message, set up callbacks to + handle response. + """ + + u = urlparse.urlparse(url) + + if (u.scheme not in ("", "http") or + u.username is not None or + u.password is not None or + u.params != "" or + u.query != "" or + u.fragment != ""): + raise rpki.exceptions.BadClientURL, "Unusable URL %s" % url + + if debug_http: + rpki.log.debug("Contacting %s" % url) + + request = http_request( + cmd = "POST", + path = u.path, + body = msg, + callback = callback, + errback = errback, + Host = u.hostname, + Content_Type = rpki_content_type) + + hostport = (u.hostname or "localhost", u.port or default_tcp_port) + + if debug_http: + rpki.log.debug("Created request %r for %s" % (request, addr_to_string(hostport))) + if hostport not in client_queues: + client_queues[hostport] = http_queue(hostport) + client_queues[hostport].request(request) + + # Defer connection attempt until after we've had time to process any + # pending I/O events, in case connections have closed. + + if debug_http: + rpki.log.debug("Scheduling connection startup for %r" % request) + rpki.async.event_defer(client_queues[hostport].restart) + +def server(handlers, port, host = ""): + """ + Run an HTTP server and wait (forever) for connections. + """ + + if not isinstance(handlers, (tuple, list)): + handlers = (("/", handlers),) + + # Yes, this is sick. So is getaddrinfo() returning duplicate + # records, which RedHat has the gall to claim is a feature. + ai = [] + for af in supported_address_families(enable_ipv6_servers): + try: + if host: + h = host + elif have_ipv6 and af == socket.AF_INET6: + h = "::" + else: + h = "0.0.0.0" + for a in socket.getaddrinfo(h, port, af, socket.SOCK_STREAM): + if a not in ai: + ai.append(a) + except socket.gaierror: + pass + + for a in ai: + http_listener(addrinfo = a, handlers = handlers) + + rpki.async.event_loop() + +class caller(object): + """ + Handle client-side mechanics for protocols based on HTTP, CMS, and + rpki.xml_utils. Calling sequence is intended to nest within + rpki.async.sync_wrapper. + """ + + debug = False + + def __init__(self, proto, client_key, client_cert, server_ta, server_cert, url, debug = None): + self.proto = proto + self.client_key = client_key + self.client_cert = client_cert + self.server_ta = server_ta + self.server_cert = server_cert + self.url = url + self.cms_timestamp = None + if debug is not None: + self.debug = debug + + def __call__(self, cb, eb, *pdus): + + def done(r_der): + """ + Handle CMS-wrapped XML response message. + """ + try: + r_cms = self.proto.cms_msg(DER = r_der) + r_msg = r_cms.unwrap((self.server_ta, self.server_cert)) + self.cms_timestamp = r_cms.check_replay(self.cms_timestamp, self.url) + if self.debug: + print "" + print r_cms.pretty_print_content() + cb(r_msg) + except (rpki.async.ExitNow, SystemExit): + raise + except Exception, e: + eb(e) + + q_msg = self.proto.msg.query(*pdus) + q_cms = self.proto.cms_msg() + q_der = q_cms.wrap(q_msg, self.client_key, self.client_cert) + if self.debug: + print "" + print q_cms.pretty_print_content() + + client(url = self.url, msg = q_der, callback = done, errback = eb) diff --git a/rpki/ipaddrs.py b/rpki/ipaddrs.py new file mode 100644 index 00000000..c1855302 --- /dev/null +++ b/rpki/ipaddrs.py @@ -0,0 +1,137 @@ +# $Id$ +# +# Copyright (C) 2009-2012 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. +# +# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +Classes to represent IP addresses. These are mostly obsolete at this +point, having been replaced by the rpki.POW.IPAddress class, but there +may still be some code using these, so keep them for now for backwards +compatability. + +Given some of the other operations we need to perform on them, it's +most convenient to represent IP addresses as Python "long" values. +The classes in this module just wrap suitable read/write syntax around +the underlying "long" type. + +These classes also supply a "bits" attribute for use by other code +built on these classes; for the most part, IPv6 addresses really are +just IPv4 addresses with more bits, so we supply the number of bits +once, here, thus avoiding a lot of duplicate code elsewhere. +""" + +import socket, struct + +class v4addr(long): + """ + IPv4 address. + + Derived from long, but supports IPv4 print syntax. + """ + + bits = 32 + ipversion = 4 + + def __new__(cls, x): + """ + Construct a v4addr object. + """ + if isinstance(x, unicode): + x = x.encode("ascii") + if isinstance(x, str): + return cls.from_bytes(socket.inet_pton(socket.AF_INET, ".".join(str(int(i)) for i in x.split(".")))) + else: + return long.__new__(cls, x) + + def to_bytes(self): + """ + Convert a v4addr object to a raw byte string. + """ + return struct.pack("!I", long(self)) + + @classmethod + def from_bytes(cls, x): + """ + Convert from a raw byte string to a v4addr object. + """ + return cls(struct.unpack("!I", x)[0]) + + def __str__(self): + """ + Convert a v4addr object to string format. + """ + return socket.inet_ntop(socket.AF_INET, self.to_bytes()) + +class v6addr(long): + """ + IPv6 address. + + Derived from long, but supports IPv6 print syntax. + """ + + bits = 128 + ipversion = 6 + + def __new__(cls, x): + """ + Construct a v6addr object. + """ + if isinstance(x, unicode): + x = x.encode("ascii") + if isinstance(x, str): + return cls.from_bytes(socket.inet_pton(socket.AF_INET6, x)) + else: + return long.__new__(cls, x) + + def to_bytes(self): + """ + Convert a v6addr object to a raw byte string. + """ + return struct.pack("!QQ", long(self) >> 64, long(self) & 0xFFFFFFFFFFFFFFFF) + + @classmethod + def from_bytes(cls, x): + """ + Convert from a raw byte string to a v6addr object. + """ + x = struct.unpack("!QQ", x) + return cls((x[0] << 64) | x[1]) + + def __str__(self): + """ + Convert a v6addr object to string format. + """ + return socket.inet_ntop(socket.AF_INET6, self.to_bytes()) + +def parse(s): + """ + Parse a string as either an IPv4 or IPv6 address, and return object of appropriate class. + """ + if isinstance(s, unicode): + s = s.encode("ascii") + return v6addr(s) if ":" in s else v4addr(s) diff --git a/rpki/irdb/__init__.py b/rpki/irdb/__init__.py new file mode 100644 index 00000000..cc83387e --- /dev/null +++ b/rpki/irdb/__init__.py @@ -0,0 +1,26 @@ +# $Id$ +# +# Copyright (C) 2011-2012 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +Django really wants its models packaged up in a "models" module within a +Python package, so humor it. +""" + +# pylint: disable=W0401 + +from rpki.irdb.models import * +from rpki.irdb.zookeeper import Zookeeper +from rpki.irdb.router import DBContextRouter, database diff --git a/rpki/irdb/models.py b/rpki/irdb/models.py new file mode 100644 index 00000000..1ad9b4e3 --- /dev/null +++ b/rpki/irdb/models.py @@ -0,0 +1,646 @@ +# $Id$ +# +# Copyright (C) 2013--2014 Dragon Research Labs ("DRL") +# Portions copyright (C) 2011--2012 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notices and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND DRL AND ISC DISCLAIM ALL +# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL OR +# ISC BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL +# DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA +# OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +# TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +Internet Registry (IR) Database, Django-style. + +This is the back-end code's interface to the database. It's intended +to be usable by command line programs and other scripts, not just +Django GUI code, so be careful. +""" + +# pylint: disable=W0232 + +import django.db.models +import rpki.x509 +import rpki.sundial +import rpki.resource_set +import socket +import rpki.POW +from south.modelsinspector import add_introspection_rules + +## @var ip_version_choices +# Choice argument for fields implementing IP version numbers. + +ip_version_choices = ((4, "IPv4"), (6, "IPv6")) + +## @var ca_certificate_lifetime +# Lifetime for a BPKI CA certificate. + +ca_certificate_lifetime = rpki.sundial.timedelta(days = 3652) + +## @var crl_interval + +# Expected interval between BPKI CRL updates. This should be a little +# longer than the real regeneration cycle, so that the old CRL will +# not go stale while we're generating the new one. Eg, if we +# regenerate daily, an interval of 24 hours is too short, but 25 hours +# would be OK, as would 24 hours and 30 minutes. + +crl_interval = rpki.sundial.timedelta(hours = 25) + +## @var ee_certificate_lifetime +# Lifetime for a BPKI EE certificate. + +ee_certificate_lifetime = rpki.sundial.timedelta(days = 60) + +### + +# Field types + +class HandleField(django.db.models.CharField): + """ + A handle field type. + """ + + description = 'A "handle" in one of the RPKI protocols' + + def __init__(self, *args, **kwargs): + kwargs["max_length"] = 120 + django.db.models.CharField.__init__(self, *args, **kwargs) + +class EnumField(django.db.models.PositiveSmallIntegerField): + """ + An enumeration type that uses strings in Python and small integers + in SQL. + """ + + description = "An enumeration type" + + __metaclass__ = django.db.models.SubfieldBase + + def __init__(self, *args, **kwargs): + if isinstance(kwargs.get("choices"), (tuple, list)) and isinstance(kwargs["choices"][0], str): + kwargs["choices"] = tuple(enumerate(kwargs["choices"], 1)) + django.db.models.PositiveSmallIntegerField.__init__(self, *args, **kwargs) + self.enum_i2s = dict(self.flatchoices) + self.enum_s2i = dict((v, k) for k, v in self.flatchoices) + + def to_python(self, value): + return self.enum_i2s.get(value, value) + + def get_prep_value(self, value): + return self.enum_s2i.get(value, value) + +class SundialField(django.db.models.DateTimeField): + """ + A field type for our customized datetime objects. + """ + __metaclass__ = django.db.models.SubfieldBase + + description = "A datetime type using our customized datetime objects" + + def to_python(self, value): + if isinstance(value, rpki.sundial.pydatetime.datetime): + return rpki.sundial.datetime.from_datetime( + django.db.models.DateTimeField.to_python(self, value)) + else: + return value + + def get_prep_value(self, value): + if isinstance(value, rpki.sundial.datetime): + return value.to_datetime() + else: + return value + + +class DERField(django.db.models.Field): + """ + Field types for DER objects. + """ + + __metaclass__ = django.db.models.SubfieldBase + + def __init__(self, *args, **kwargs): + kwargs["serialize"] = False + kwargs["blank"] = True + kwargs["default"] = None + django.db.models.Field.__init__(self, *args, **kwargs) + + def db_type(self, connection): + if connection.settings_dict['ENGINE'] == "django.db.backends.posgresql": + return "bytea" + else: + return "BLOB" + + def to_python(self, value): + assert value is None or isinstance(value, (self.rpki_type, str)) + if isinstance(value, str): + return self.rpki_type(DER = value) + else: + return value + + def get_prep_value(self, value): + assert value is None or isinstance(value, (self.rpki_type, str)) + if isinstance(value, self.rpki_type): + return value.get_DER() + else: + return value + +class CertificateField(DERField): + description = "X.509 certificate" + rpki_type = rpki.x509.X509 + +class RSAKeyField(DERField): + description = "RSA keypair" + rpki_type = rpki.x509.RSA + +class CRLField(DERField): + description = "Certificate Revocation List" + rpki_type = rpki.x509.CRL + +class PKCS10Field(DERField): + description = "PKCS #10 certificate request" + rpki_type = rpki.x509.PKCS10 + +class SignedReferralField(DERField): + description = "CMS signed object containing XML" + rpki_type = rpki.x509.SignedReferral + + +# Custom managers + +class CertificateManager(django.db.models.Manager): + + def get_or_certify(self, **kwargs): + """ + Sort of like .get_or_create(), but for models containing + certificates which need to be generated based on other fields. + + Takes keyword arguments like .get(), checks for existing object. + If none, creates a new one; if found an existing object but some + of the non-key fields don't match, updates the existing object. + Runs certification method for new or updated objects. Returns a + tuple consisting of the object and a boolean indicating whether + anything has changed. + """ + + changed = False + + try: + obj = self.get(**self._get_or_certify_keys(kwargs)) + + except self.model.DoesNotExist: + obj = self.model(**kwargs) + changed = True + + else: + for k in kwargs: + if getattr(obj, k) != kwargs[k]: + setattr(obj, k, kwargs[k]) + changed = True + + if changed: + obj.avow() + obj.save() + + return obj, changed + + def _get_or_certify_keys(self, kwargs): + assert len(self.model._meta.unique_together) == 1 + return dict((k, kwargs[k]) for k in self.model._meta.unique_together[0]) + +class ResourceHolderCAManager(CertificateManager): + def _get_or_certify_keys(self, kwargs): + return { "handle" : kwargs["handle"] } + +class ServerCAManager(CertificateManager): + def _get_or_certify_keys(self, kwargs): + return { "pk" : 1 } + +class ResourceHolderEEManager(CertificateManager): + def _get_or_certify_keys(self, kwargs): + return { "issuer" : kwargs["issuer"] } + +### + +class CA(django.db.models.Model): + certificate = CertificateField() + private_key = RSAKeyField() + latest_crl = CRLField() + + # Might want to bring these into line with what rpkid does. Current + # variables here were chosen to map easily to what OpenSSL command + # line tool was keeping on disk. + + next_serial = django.db.models.BigIntegerField(default = 1) + next_crl_number = django.db.models.BigIntegerField(default = 1) + last_crl_update = SundialField() + next_crl_update = SundialField() + + class Meta: + abstract = True + + def avow(self): + if self.private_key is None: + self.private_key = rpki.x509.RSA.generate(quiet = True) + now = rpki.sundial.now() + notAfter = now + ca_certificate_lifetime + self.certificate = rpki.x509.X509.bpki_self_certify( + keypair = self.private_key, + subject_name = self.subject_name, + serial = self.next_serial, + now = now, + notAfter = notAfter) + self.next_serial += 1 + self.generate_crl() + return self.certificate + + def certify(self, subject_name, subject_key, validity_interval, is_ca, pathLenConstraint = None): + now = rpki.sundial.now() + notAfter = now + validity_interval + result = self.certificate.bpki_certify( + keypair = self.private_key, + subject_name = subject_name, + subject_key = subject_key, + serial = self.next_serial, + now = now, + notAfter = notAfter, + is_ca = is_ca, + pathLenConstraint = pathLenConstraint) + self.next_serial += 1 + return result + + def revoke(self, cert): + Revocation.objects.create( + issuer = self, + revoked = rpki.sundial.now(), + serial = cert.certificate.getSerial(), + expires = cert.certificate.getNotAfter() + crl_interval) + cert.delete() + self.generate_crl() + + def generate_crl(self): + now = rpki.sundial.now() + self.revocations.filter(expires__lt = now).delete() + revoked = [(r.serial, r.revoked) for r in self.revocations.all()] + self.latest_crl = rpki.x509.CRL.generate( + keypair = self.private_key, + issuer = self.certificate, + serial = self.next_crl_number, + thisUpdate = now, + nextUpdate = now + crl_interval, + revokedCertificates = revoked) + self.last_crl_update = now + self.next_crl_update = now + crl_interval + self.next_crl_number += 1 + +class ServerCA(CA): + objects = ServerCAManager() + + def __unicode__(self): + return "" + + @property + def subject_name(self): + if self.certificate is not None: + return self.certificate.getSubject() + else: + return rpki.x509.X501DN.from_cn("%s BPKI server CA" % socket.gethostname()) + +class ResourceHolderCA(CA): + handle = HandleField(unique = True) + objects = ResourceHolderCAManager() + + def __unicode__(self): + return self.handle + + @property + def subject_name(self): + if self.certificate is not None: + return self.certificate.getSubject() + else: + return rpki.x509.X501DN.from_cn("%s BPKI resource CA" % self.handle) + +class Certificate(django.db.models.Model): + + certificate = CertificateField() + objects = CertificateManager() + + class Meta: + abstract = True + unique_together = ("issuer", "handle") + + def revoke(self): + self.issuer.revoke(self) + +class CrossCertification(Certificate): + handle = HandleField() + ta = CertificateField() + + class Meta: + abstract = True + + def avow(self): + self.certificate = self.issuer.certify( + subject_name = self.ta.getSubject(), + subject_key = self.ta.getPublicKey(), + validity_interval = ee_certificate_lifetime, + is_ca = True, + pathLenConstraint = 0) + + def __unicode__(self): + return self.handle + +class HostedCA(Certificate): + issuer = django.db.models.ForeignKey(ServerCA) + hosted = django.db.models.OneToOneField(ResourceHolderCA, related_name = "hosted_by") + + def avow(self): + self.certificate = self.issuer.certify( + subject_name = self.hosted.certificate.getSubject(), + subject_key = self.hosted.certificate.getPublicKey(), + validity_interval = ee_certificate_lifetime, + is_ca = True, + pathLenConstraint = 1) + + class Meta: + unique_together = ("issuer", "hosted") + + def __unicode__(self): + return self.hosted.handle + +class Revocation(django.db.models.Model): + serial = django.db.models.BigIntegerField() + revoked = SundialField() + expires = SundialField() + + class Meta: + abstract = True + unique_together = ("issuer", "serial") + +class ServerRevocation(Revocation): + issuer = django.db.models.ForeignKey(ServerCA, related_name = "revocations") + +class ResourceHolderRevocation(Revocation): + issuer = django.db.models.ForeignKey(ResourceHolderCA, related_name = "revocations") + +class EECertificate(Certificate): + private_key = RSAKeyField() + + class Meta: + abstract = True + + def avow(self): + if self.private_key is None: + self.private_key = rpki.x509.RSA.generate(quiet = True) + self.certificate = self.issuer.certify( + subject_name = self.subject_name, + subject_key = self.private_key.get_public(), + validity_interval = ee_certificate_lifetime, + is_ca = False) + +class ServerEE(EECertificate): + issuer = django.db.models.ForeignKey(ServerCA, related_name = "ee_certificates") + purpose = EnumField(choices = ("rpkid", "pubd", "irdbd", "irbe")) + + class Meta: + unique_together = ("issuer", "purpose") + + @property + def subject_name(self): + return rpki.x509.X501DN.from_cn("%s BPKI %s EE" % (socket.gethostname(), + self.get_purpose_display())) + +class Referral(EECertificate): + issuer = django.db.models.OneToOneField(ResourceHolderCA, related_name = "referral_certificate") + objects = ResourceHolderEEManager() + + @property + def subject_name(self): + return rpki.x509.X501DN.from_cn("%s BPKI Referral EE" % self.issuer.handle) + +class Turtle(django.db.models.Model): + service_uri = django.db.models.CharField(max_length = 255) + +class Rootd(EECertificate, Turtle): + issuer = django.db.models.OneToOneField(ResourceHolderCA, related_name = "rootd") + objects = ResourceHolderEEManager() + + @property + def subject_name(self): + return rpki.x509.X501DN.from_cn("%s BPKI rootd EE" % self.issuer.handle) + +class BSC(Certificate): + issuer = django.db.models.ForeignKey(ResourceHolderCA, related_name = "bscs") + handle = HandleField() + pkcs10 = PKCS10Field() + + def avow(self): + self.certificate = self.issuer.certify( + subject_name = self.pkcs10.getSubject(), + subject_key = self.pkcs10.getPublicKey(), + validity_interval = ee_certificate_lifetime, + is_ca = False) + + def __unicode__(self): + return self.handle + +class ResourceSet(django.db.models.Model): + valid_until = SundialField() + + class Meta: + abstract = True + + @property + def resource_bag(self): + raw_asn, raw_net = self._select_resource_bag() + asns = rpki.resource_set.resource_set_as.from_django( + (a.start_as, a.end_as) for a in raw_asn) + ipv4 = rpki.resource_set.resource_set_ipv4.from_django( + (a.start_ip, a.end_ip) for a in raw_net if a.version == "IPv4") + ipv6 = rpki.resource_set.resource_set_ipv6.from_django( + (a.start_ip, a.end_ip) for a in raw_net if a.version == "IPv6") + return rpki.resource_set.resource_bag( + valid_until = self.valid_until, asn = asns, v4 = ipv4, v6 = ipv6) + + # Writing of .setter method deferred until something needs it. + +class ResourceSetASN(django.db.models.Model): + start_as = django.db.models.BigIntegerField() + end_as = django.db.models.BigIntegerField() + + class Meta: + abstract = True + + def as_resource_range(self): + return rpki.resource_set.resource_range_as(self.start_as, self.end_as) + +class ResourceSetNet(django.db.models.Model): + start_ip = django.db.models.CharField(max_length = 40) + end_ip = django.db.models.CharField(max_length = 40) + version = EnumField(choices = ip_version_choices) + + class Meta: + abstract = True + + def as_resource_range(self): + return rpki.resource_set.resource_range_ip.from_strings(self.start_ip, self.end_ip) + +class Child(CrossCertification, ResourceSet): + issuer = django.db.models.ForeignKey(ResourceHolderCA, related_name = "children") + name = django.db.models.TextField(null = True, blank = True) + + def _select_resource_bag(self): + child_asn = rpki.irdb.ChildASN.objects.raw(""" + SELECT * + FROM irdb_childasn + WHERE child_id = %s + """, [self.id]) + child_net = list(rpki.irdb.ChildNet.objects.raw(""" + SELECT * + FROM irdb_childnet + WHERE child_id = %s + """, [self.id])) + return child_asn, child_net + + class Meta: + unique_together = ("issuer", "handle") + +class ChildASN(ResourceSetASN): + child = django.db.models.ForeignKey(Child, related_name = "asns") + + class Meta: + unique_together = ("child", "start_as", "end_as") + +class ChildNet(ResourceSetNet): + child = django.db.models.ForeignKey(Child, related_name = "address_ranges") + + class Meta: + unique_together = ("child", "start_ip", "end_ip", "version") + +class Parent(CrossCertification, Turtle): + issuer = django.db.models.ForeignKey(ResourceHolderCA, related_name = "parents") + parent_handle = HandleField() + child_handle = HandleField() + repository_type = EnumField(choices = ("none", "offer", "referral")) + referrer = HandleField(null = True, blank = True) + referral_authorization = SignedReferralField(null = True, blank = True) + + # This shouldn't be necessary + class Meta: + unique_together = ("issuer", "handle") + +class ROARequest(django.db.models.Model): + issuer = django.db.models.ForeignKey(ResourceHolderCA, related_name = "roa_requests") + asn = django.db.models.BigIntegerField() + + @property + def roa_prefix_bag(self): + prefixes = list(rpki.irdb.ROARequestPrefix.objects.raw(""" + SELECT * + FROM irdb_roarequestprefix + WHERE roa_request_id = %s + """, [self.id])) + v4 = rpki.resource_set.roa_prefix_set_ipv4.from_django( + (p.prefix, p.prefixlen, p.max_prefixlen) for p in prefixes if p.version == "IPv4") + v6 = rpki.resource_set.roa_prefix_set_ipv6.from_django( + (p.prefix, p.prefixlen, p.max_prefixlen) for p in prefixes if p.version == "IPv6") + return rpki.resource_set.roa_prefix_bag(v4 = v4, v6 = v6) + + # Writing of .setter method deferred until something needs it. + +class ROARequestPrefix(django.db.models.Model): + roa_request = django.db.models.ForeignKey(ROARequest, related_name = "prefixes") + version = EnumField(choices = ip_version_choices) + prefix = django.db.models.CharField(max_length = 40) + prefixlen = django.db.models.PositiveSmallIntegerField() + max_prefixlen = django.db.models.PositiveSmallIntegerField() + + def as_roa_prefix(self): + if self.version == 'IPv4': + return rpki.resource_set.roa_prefix_ipv4(rpki.POW.IPAddress(self.prefix), self.prefixlen, self.max_prefixlen) + else: + return rpki.resource_set.roa_prefix_ipv6(rpki.POW.IPAddress(self.prefix), self.prefixlen, self.max_prefixlen) + + def as_resource_range(self): + return self.as_roa_prefix().to_resource_range() + + class Meta: + unique_together = ("roa_request", "version", "prefix", "prefixlen", "max_prefixlen") + +class GhostbusterRequest(django.db.models.Model): + issuer = django.db.models.ForeignKey(ResourceHolderCA, related_name = "ghostbuster_requests") + parent = django.db.models.ForeignKey(Parent, related_name = "ghostbuster_requests", null = True) + vcard = django.db.models.TextField() + +class EECertificateRequest(ResourceSet): + issuer = django.db.models.ForeignKey(ResourceHolderCA, related_name = "ee_certificate_requests") + pkcs10 = PKCS10Field() + gski = django.db.models.CharField(max_length = 27) + cn = django.db.models.CharField(max_length = 64) + sn = django.db.models.CharField(max_length = 64) + eku = django.db.models.TextField(null = True) + + def _select_resource_bag(self): + ee_asn = rpki.irdb.EECertificateRequestASN.objects.raw(""" + SELECT * + FROM irdb_eecertificaterequestasn + WHERE ee_certificate_request_id = %s + """, [self.id]) + ee_net = rpki.irdb.EECertificateRequestNet.objects.raw(""" + SELECT * + FROM irdb_eecertificaterequestnet + WHERE ee_certificate_request_id = %s + """, [self.id]) + return ee_asn, ee_net + + class Meta: + unique_together = ("issuer", "gski") + +class EECertificateRequestASN(ResourceSetASN): + ee_certificate_request = django.db.models.ForeignKey(EECertificateRequest, related_name = "asns") + + class Meta: + unique_together = ("ee_certificate_request", "start_as", "end_as") + +class EECertificateRequestNet(ResourceSetNet): + ee_certificate_request = django.db.models.ForeignKey(EECertificateRequest, related_name = "address_ranges") + + class Meta: + unique_together = ("ee_certificate_request", "start_ip", "end_ip", "version") + +class Repository(CrossCertification): + issuer = django.db.models.ForeignKey(ResourceHolderCA, related_name = "repositories") + client_handle = HandleField() + service_uri = django.db.models.CharField(max_length = 255) + sia_base = django.db.models.TextField() + turtle = django.db.models.OneToOneField(Turtle, related_name = "repository") + + # This shouldn't be necessary + class Meta: + unique_together = ("issuer", "handle") + +class Client(CrossCertification): + issuer = django.db.models.ForeignKey(ServerCA, related_name = "clients") + sia_base = django.db.models.TextField() + parent_handle = HandleField() + + # This shouldn't be necessary + class Meta: + unique_together = ("issuer", "handle") + +# for Django South -- these are just simple subclasses +add_introspection_rules([], + ('^rpki\.irdb\.models\.CertificateField', + '^rpki\.irdb\.models\.CRLField', + '^rpki\.irdb\.models\.EnumField', + '^rpki\.irdb\.models\.HandleField', + '^rpki\.irdb\.models\.RSAKeyField', + '^rpki\.irdb\.models\.SignedReferralField', + '^rpki\.irdb\.models\.SundialField')) diff --git a/rpki/irdb/router.py b/rpki/irdb/router.py new file mode 100644 index 00000000..1f27d0c9 --- /dev/null +++ b/rpki/irdb/router.py @@ -0,0 +1,95 @@ +# $Id$ +# +# Copyright (C) 2012 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +Django-style "Database router". + +For most programs, you don't need this. Django's normal mode of +behavior is to use a single SQL database for the IRDB, which is +normally what we want. For certain test scenarios, however, it's +useful to be able to use the same Django ORM models and managers with +multiple databases without having to complicate the interface by +passing database names everywhere. Using a database router +accomplishes this. +""" + +class DBContextRouter(object): + """ + A Django database router for use with multiple IRDBs. + + This router is designed to work in conjunction with the + rpki.irdb.database context handler (q.v.). + """ + + _app = "irdb" + + _database = None + + def db_for_read(self, model, **hints): + if model._meta.app_label == self._app: + return self._database + else: + return None + + def db_for_write(self, model, **hints): + if model._meta.app_label == self._app: + return self._database + else: + return None + + def allow_relation(self, obj1, obj2, **hints): + if self._database is None: + return None + elif obj1._meta.app_label == self._app and obj2._meta.app_label == self._app: + return True + else: + return None + + def allow_syncdb(self, db, model): + if db == self._database and model._meta.app_label == self._app: + return True + else: + return None + +class database(object): + """ + Context manager for use with DBContextRouter. Use thusly: + + with rpki.irdb.database("blarg"): + do_stuff() + + This binds IRDB operations to database blarg for the duration of + the call to do_stuff(), then restores the prior state. + """ + + def __init__(self, name, on_entry = None, on_exit = None): + if not isinstance(name, str): + raise ValueError("database name must be a string, not %r" % name) + self.name = name + self.on_entry = on_entry + self.on_exit = on_exit + + def __enter__(self): + if self.on_entry is not None: + self.on_entry() + self.former = DBContextRouter._database + DBContextRouter._database = self.name + + def __exit__(self, _type, value, traceback): + assert DBContextRouter._database is self.name + DBContextRouter._database = self.former + if self.on_exit is not None: + self.on_exit() diff --git a/rpki/irdb/zookeeper.py b/rpki/irdb/zookeeper.py new file mode 100644 index 00000000..f99dc9f0 --- /dev/null +++ b/rpki/irdb/zookeeper.py @@ -0,0 +1,1682 @@ +# $Id$ +# +# Copyright (C) 2013--2014 Dragon Research Labs ("DRL") +# Portions copyright (C) 2009--2012 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notices and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND DRL AND ISC DISCLAIM ALL +# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL OR +# ISC BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL +# DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA +# OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +# TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +Management code for the IRDB. +""" + +# pylint: disable=W0612 + +import os +import copy +import types +import rpki.config +import rpki.cli +import rpki.sundial +import rpki.log +import rpki.oids +import rpki.http +import rpki.resource_set +import rpki.relaxng +import rpki.exceptions +import rpki.left_right +import rpki.x509 +import rpki.async +import rpki.irdb +import django.db.transaction + +from lxml.etree import (Element, SubElement, ElementTree, + tostring as ElementToString) + +from rpki.csv_utils import csv_reader + + + +# XML namespace and protocol version for OOB setup protocol. The name +# is historical and may change before we propose this as the basis for +# a standard. + +myrpki_namespace = "http://www.hactrn.net/uris/rpki/myrpki/" +myrpki_version = "2" +myrpki_namespaceQName = "{" + myrpki_namespace + "}" + +# XML namespace and protocol version for router certificate requests. +# We probably ought to be pulling this sort of thing from the schema, +# with an assertion to make sure that we understand the current +# protocol version number, but just copy what we did for myrpki until +# I'm ready to rewrite the rpki.relaxng code. + +routercert_namespace = "http://www.hactrn.net/uris/rpki/router-certificate/" +routercert_version = "1" +routercert_namespaceQName = "{" + routercert_namespace + "}" + +myrpki_section = "myrpki" +irdbd_section = "irdbd" +rpkid_section = "rpkid" +pubd_section = "pubd" +rootd_section = "rootd" + +# A whole lot of exceptions + +class HandleNotSet(Exception): "Handle not set." +class MissingHandle(Exception): "Missing handle." +class CouldntTalkToDaemon(Exception): "Couldn't talk to daemon." +class BadXMLMessage(Exception): "Bad XML message." +class PastExpiration(Exception): "Expiration date has already passed." +class CantRunRootd(Exception): "Can't run rootd." + + + +def B64Element(e, tag, obj, **kwargs): + """ + Create an XML element containing Base64 encoded data taken from a + DER object. + """ + + if e is None: + se = Element(tag, **kwargs) + else: + se = SubElement(e, tag, **kwargs) + if e is not None and e.text is None: + e.text = "\n" + se.text = "\n" + obj.get_Base64() + se.tail = "\n" + return se + +class PEM_writer(object): + """ + Write PEM files to disk, keeping track of which ones we've already + written and setting the file mode appropriately. + + Comparing the old file with what we're about to write serves no real + purpose except to calm users who find repeated messages about + writing the same file confusing. + """ + + def __init__(self, logstream = None): + self.wrote = set() + self.logstream = logstream + + def __call__(self, filename, obj, compare = True): + filename = os.path.realpath(filename) + if filename in self.wrote: + return + tempname = filename + pem = obj.get_PEM() + if not filename.startswith("/dev/"): + try: + if compare and pem == open(filename, "r").read(): + return + except: # pylint: disable=W0702 + pass + tempname += ".%s.tmp" % os.getpid() + mode = 0400 if filename.endswith(".key") else 0444 + if self.logstream is not None: + self.logstream.write("Writing %s\n" % filename) + f = os.fdopen(os.open(tempname, os.O_WRONLY | os.O_CREAT | os.O_TRUNC, mode), "w") + f.write(pem) + f.close() + if tempname != filename: + os.rename(tempname, filename) + self.wrote.add(filename) + + + + +def etree_read(filename): + """ + Read an etree from a file, verifying then stripping XML namespace + cruft. + """ + + e = ElementTree(file = filename).getroot() + rpki.relaxng.myrpki.assertValid(e) + for i in e.getiterator(): + if i.tag.startswith(myrpki_namespaceQName): + i.tag = i.tag[len(myrpki_namespaceQName):] + else: + raise BadXMLMessage, "XML tag %r is not in namespace %r" % (i.tag, myrpki_namespace) + return e + + +class etree_wrapper(object): + """ + Wrapper for ETree objects so we can return them as function results + without requiring the caller to understand much about them. + + """ + + def __init__(self, e, msg = None, debug = False): + self.msg = msg + e = copy.deepcopy(e) + e.set("version", myrpki_version) + for i in e.getiterator(): + if i.tag[0] != "{": + i.tag = myrpki_namespaceQName + i.tag + assert i.tag.startswith(myrpki_namespaceQName) + if debug: + print ElementToString(e) + rpki.relaxng.myrpki.assertValid(e) + self.etree = e + + def __str__(self): + return ElementToString(self.etree) + + def save(self, filename, logstream = None): + filename = os.path.realpath(filename) + tempname = filename + if not filename.startswith("/dev/"): + tempname += ".%s.tmp" % os.getpid() + ElementTree(self.etree).write(tempname) + if tempname != filename: + os.rename(tempname, filename) + if logstream is not None: + logstream.write("Wrote %s\n" % filename) + if self.msg is not None: + logstream.write(self.msg + "\n") + + @property + def file(self): + from cStringIO import StringIO + return StringIO(ElementToString(self.etree)) + + + +class Zookeeper(object): + + ## @var show_xml + # Whether to show XML for debugging + + show_xml = False + + def __init__(self, cfg = None, handle = None, logstream = None): + + if cfg is None: + cfg = rpki.config.parser() + + if handle is None: + handle = cfg.get("handle", section = myrpki_section) + + self.cfg = cfg + + self.logstream = logstream + + self.run_rpkid = cfg.getboolean("run_rpkid", section = myrpki_section) + self.run_pubd = cfg.getboolean("run_pubd", section = myrpki_section) + self.run_rootd = cfg.getboolean("run_rootd", section = myrpki_section) + + if self.run_rootd and (not self.run_pubd or not self.run_rpkid): + raise CantRunRootd, "Can't run rootd unless also running rpkid and pubd" + + self.default_repository = cfg.get("default_repository", "", section = myrpki_section) + self.pubd_contact_info = cfg.get("pubd_contact_info", "", section = myrpki_section) + + self.rsync_module = cfg.get("publication_rsync_module", section = myrpki_section) + self.rsync_server = cfg.get("publication_rsync_server", section = myrpki_section) + + self.reset_identity(handle) + + + def reset_identity(self, handle): + """ + Select handle of current resource holding entity. + """ + + if handle is None: + raise MissingHandle + self.handle = handle + + + def set_logstream(self, logstream): + """ + Set log stream for this Zookeeper. The log stream is a file-like + object, or None to suppress all logging. + """ + + self.logstream = logstream + + + def log(self, msg): + """ + Send some text to this Zookeeper's log stream, if one is set. + """ + + if self.logstream is not None: + self.logstream.write(msg) + self.logstream.write("\n") + + + @property + def resource_ca(self): + """ + Get ResourceHolderCA object associated with current handle. + """ + + if self.handle is None: + raise HandleNotSet + return rpki.irdb.ResourceHolderCA.objects.get(handle = self.handle) + + + @property + def server_ca(self): + """ + Get ServerCA object. + """ + + return rpki.irdb.ServerCA.objects.get() + + + @django.db.transaction.commit_on_success + def initialize_server_bpki(self): + """ + Initialize server BPKI portion of an RPKI installation. Reads the + configuration file and generates the initial BPKI server + certificates needed to start daemons. + """ + + if self.run_rpkid or self.run_pubd: + server_ca, created = rpki.irdb.ServerCA.objects.get_or_certify() + rpki.irdb.ServerEE.objects.get_or_certify(issuer = server_ca, purpose = "irbe") + + if self.run_rpkid: + rpki.irdb.ServerEE.objects.get_or_certify(issuer = server_ca, purpose = "rpkid") + rpki.irdb.ServerEE.objects.get_or_certify(issuer = server_ca, purpose = "irdbd") + + if self.run_pubd: + rpki.irdb.ServerEE.objects.get_or_certify(issuer = server_ca, purpose = "pubd") + + + @django.db.transaction.commit_on_success + def initialize_resource_bpki(self): + """ + Initialize the resource-holding BPKI for an RPKI installation. + Returns XML describing the resource holder. + + This method is present primarily for backwards compatibility with + the old combined initialize() method which initialized both the + server BPKI and the default resource-holding BPKI in a single + method call. In the long run we want to replace this with + something that takes a handle as argument and creates the + resource-holding BPKI idenity if needed. + """ + + resource_ca, created = rpki.irdb.ResourceHolderCA.objects.get_or_certify(handle = self.handle) + return self.generate_identity() + + + def initialize(self): + """ + Backwards compatibility wrapper: calls initialize_server_bpki() + and initialize_resource_bpki(), returns latter's result. + """ + + self.initialize_server_bpki() + return self.initialize_resource_bpki() + + + def generate_identity(self): + """ + Generate identity XML. Broken out of .initialize() because it's + easier for the GUI this way. + """ + + e = Element("identity", handle = self.handle) + B64Element(e, "bpki_ta", self.resource_ca.certificate) + return etree_wrapper(e, msg = 'This is the "identity" file you will need to send to your parent') + + + @django.db.transaction.commit_on_success + def delete_self(self): + """ + Delete the ResourceHolderCA object corresponding to the current handle. + This corresponds to deleting an rpkid object. + + This code assumes the normal Django cascade-on-delete behavior, + that is, we assume that deleting the ResourceHolderCA object + deletes all the subordinate objects that refer to it via foreign + key relationships. + """ + + resource_ca = self.resource_ca + if resource_ca is not None: + resource_ca.delete() + else: + self.log("No such ResourceHolderCA \"%s\"" % self.handle) + + + @django.db.transaction.commit_on_success + def configure_rootd(self): + + assert self.run_rpkid and self.run_pubd and self.run_rootd + + rpki.irdb.Rootd.objects.get_or_certify( + issuer = self.resource_ca, + service_uri = "http://localhost:%s/" % self.cfg.get("rootd_server_port", section = myrpki_section)) + + return self.generate_rootd_repository_offer() + + + def generate_rootd_repository_offer(self): + """ + Generate repository offer for rootd. Split out of + configure_rootd() because that's easier for the GUI. + """ + + # The following assumes we'll set up the respository manually. + # Not sure this is a reasonable assumption, particularly if we + # ever fix rootd to use the publication protocol. + + try: + self.resource_ca.repositories.get(handle = self.handle) + return None + + except rpki.irdb.Repository.DoesNotExist: + e = Element("repository", type = "offer", handle = self.handle, parent_handle = self.handle) + B64Element(e, "bpki_client_ta", self.resource_ca.certificate) + return etree_wrapper(e, msg = 'This is the "repository offer" file for you to use if you want to publish in your own repository') + + + def write_bpki_files(self): + """ + Write out BPKI certificate, key, and CRL files for daemons that + need them. + """ + + writer = PEM_writer(self.logstream) + + if self.run_rpkid: + rpkid = self.server_ca.ee_certificates.get(purpose = "rpkid") + writer(self.cfg.get("bpki-ta", section = rpkid_section), self.server_ca.certificate) + writer(self.cfg.get("rpkid-key", section = rpkid_section), rpkid.private_key) + writer(self.cfg.get("rpkid-cert", section = rpkid_section), rpkid.certificate) + writer(self.cfg.get("irdb-cert", section = rpkid_section), + self.server_ca.ee_certificates.get(purpose = "irdbd").certificate) + writer(self.cfg.get("irbe-cert", section = rpkid_section), + self.server_ca.ee_certificates.get(purpose = "irbe").certificate) + + if self.run_pubd: + pubd = self.server_ca.ee_certificates.get(purpose = "pubd") + writer(self.cfg.get("bpki-ta", section = pubd_section), self.server_ca.certificate) + writer(self.cfg.get("pubd-key", section = pubd_section), pubd.private_key) + writer(self.cfg.get("pubd-cert", section = pubd_section), pubd.certificate) + writer(self.cfg.get("irbe-cert", section = pubd_section), + self.server_ca.ee_certificates.get(purpose = "irbe").certificate) + + if self.run_rootd: + try: + rootd = rpki.irdb.ResourceHolderCA.objects.get(handle = self.handle).rootd + writer(self.cfg.get("bpki-ta", section = rootd_section), self.server_ca.certificate) + writer(self.cfg.get("rootd-bpki-crl", section = rootd_section), self.server_ca.latest_crl) + writer(self.cfg.get("rootd-bpki-key", section = rootd_section), rootd.private_key) + writer(self.cfg.get("rootd-bpki-cert", section = rootd_section), rootd.certificate) + writer(self.cfg.get("child-bpki-cert", section = rootd_section), rootd.issuer.certificate) + except rpki.irdb.ResourceHolderCA.DoesNotExist: + self.log("rootd enabled but resource holding entity not yet configured, skipping rootd setup") + except rpki.irdb.Rootd.DoesNotExist: + self.log("rootd enabled but not yet configured, skipping rootd setup") + + + @django.db.transaction.commit_on_success + def update_bpki(self): + """ + Update BPKI certificates. Assumes an existing RPKI installation. + + Basic plan here is to reissue all BPKI certificates we can, right + now. In the long run we might want to be more clever about only + touching ones that need maintenance, but this will do for a start. + + We also reissue CRLs for all CAs. + + Most likely this should be run under cron. + """ + + for model in (rpki.irdb.ServerCA, + rpki.irdb.ResourceHolderCA, + rpki.irdb.ServerEE, + rpki.irdb.Referral, + rpki.irdb.Rootd, + rpki.irdb.HostedCA, + rpki.irdb.BSC, + rpki.irdb.Child, + rpki.irdb.Parent, + rpki.irdb.Client, + rpki.irdb.Repository): + for obj in model.objects.all(): + self.log("Regenerating BPKI certificate %s" % obj.certificate.getSubject()) + obj.avow() + obj.save() + + self.log("Regenerating Server BPKI CRL") + self.server_ca.generate_crl() + self.server_ca.save() + + for ca in rpki.irdb.ResourceHolderCA.objects.all(): + self.log("Regenerating BPKI CRL for Resource Holder %s" % ca.handle) + ca.generate_crl() + ca.save() + + + @django.db.transaction.commit_on_success + def synchronize_bpki(self): + """ + Synchronize BPKI updates. This is separate from .update_bpki() + because this requires rpkid to be running and none of the other + BPKI update stuff does; there may be circumstances under which it + makes sense to do the rest of the BPKI update and allow this to + fail with a warning. + """ + + if self.run_rpkid: + updates = [] + + updates.extend( + rpki.left_right.self_elt.make_pdu( + action = "set", + tag = "%s__self" % ca.handle, + self_handle = ca.handle, + bpki_cert = ca.certificate) + for ca in rpki.irdb.ResourceHolderCA.objects.all()) + + updates.extend( + rpki.left_right.bsc_elt.make_pdu( + action = "set", + tag = "%s__bsc__%s" % (bsc.issuer.handle, bsc.handle), + self_handle = bsc.issuer.handle, + bsc_handle = bsc.handle, + signing_cert = bsc.certificate, + signing_cert_crl = bsc.issuer.latest_crl) + for bsc in rpki.irdb.BSC.objects.all()) + + updates.extend( + rpki.left_right.repository_elt.make_pdu( + action = "set", + tag = "%s__repository__%s" % (repository.issuer.handle, repository.handle), + self_handle = repository.issuer.handle, + repository_handle = repository.handle, + bpki_cert = repository.certificate) + for repository in rpki.irdb.Repository.objects.all()) + + updates.extend( + rpki.left_right.parent_elt.make_pdu( + action = "set", + tag = "%s__parent__%s" % (parent.issuer.handle, parent.handle), + self_handle = parent.issuer.handle, + parent_handle = parent.handle, + bpki_cms_cert = parent.certificate) + for parent in rpki.irdb.Parent.objects.all()) + + updates.extend( + rpki.left_right.parent_elt.make_pdu( + action = "set", + tag = "%s__rootd" % rootd.issuer.handle, + self_handle = rootd.issuer.handle, + parent_handle = rootd.issuer.handle, + bpki_cms_cert = rootd.certificate) + for rootd in rpki.irdb.Rootd.objects.all()) + + updates.extend( + rpki.left_right.child_elt.make_pdu( + action = "set", + tag = "%s__child__%s" % (child.issuer.handle, child.handle), + self_handle = child.issuer.handle, + child_handle = child.handle, + bpki_cert = child.certificate) + for child in rpki.irdb.Child.objects.all()) + + if updates: + self.check_error_report(self.call_rpkid(updates)) + + if self.run_pubd: + updates = [] + + updates.append( + rpki.publication.config_elt.make_pdu( + action = "set", + bpki_crl = self.server_ca.latest_crl)) + + updates.extend( + rpki.publication.client_elt.make_pdu( + action = "set", + client_handle = client.handle, + bpki_cert = client.certificate) + for client in self.server_ca.clients.all()) + + if updates: + self.check_error_report(self.call_pubd(updates)) + + + @django.db.transaction.commit_on_success + def configure_child(self, filename, child_handle = None, valid_until = None): + """ + Configure a new child of this RPKI entity, given the child's XML + identity file as an input. Extracts the child's data from the + XML, cross-certifies the child's resource-holding BPKI + certificate, and generates an XML file describing the relationship + between the child and this parent, including this parent's BPKI + data and up-down protocol service URI. + """ + + c = etree_read(filename) + + if child_handle is None: + child_handle = c.get("handle") + + if valid_until is None: + valid_until = rpki.sundial.now() + rpki.sundial.timedelta(days = 365) + else: + valid_until = rpki.sundial.datetime.fromXMLtime(valid_until) + if valid_until < rpki.sundial.now(): + raise PastExpiration, "Specified new expiration time %s has passed" % valid_until + + self.log("Child calls itself %r, we call it %r" % (c.get("handle"), child_handle)) + + child, created = rpki.irdb.Child.objects.get_or_certify( + issuer = self.resource_ca, + handle = child_handle, + ta = rpki.x509.X509(Base64 = c.findtext("bpki_ta")), + valid_until = valid_until) + + return self.generate_parental_response(child), child_handle + + + @django.db.transaction.commit_on_success + def generate_parental_response(self, child): + """ + Generate parental response XML. Broken out of .configure_child() + for GUI. + """ + + service_uri = "http://%s:%s/up-down/%s/%s" % ( + self.cfg.get("rpkid_server_host", section = myrpki_section), + self.cfg.get("rpkid_server_port", section = myrpki_section), + self.handle, child.handle) + + e = Element("parent", parent_handle = self.handle, child_handle = child.handle, + service_uri = service_uri, valid_until = str(child.valid_until)) + B64Element(e, "bpki_resource_ta", self.resource_ca.certificate) + B64Element(e, "bpki_child_ta", child.ta) + + try: + if self.default_repository: + repo = self.resource_ca.repositories.get(handle = self.default_repository) + else: + repo = self.resource_ca.repositories.get() + except rpki.irdb.Repository.DoesNotExist: + repo = None + + if repo is None: + self.log("Couldn't find any usable repositories, not giving referral") + + elif repo.handle == self.handle: + SubElement(e, "repository", type = "offer") + + else: + proposed_sia_base = repo.sia_base + child.handle + "/" + referral_cert, created = rpki.irdb.Referral.objects.get_or_certify(issuer = self.resource_ca) + auth = rpki.x509.SignedReferral() + auth.set_content(B64Element(None, myrpki_namespaceQName + "referral", child.ta, + version = myrpki_version, + authorized_sia_base = proposed_sia_base)) + auth.schema_check() + auth.sign(referral_cert.private_key, referral_cert.certificate, self.resource_ca.latest_crl) + + r = SubElement(e, "repository", type = "referral") + B64Element(r, "authorization", auth, referrer = repo.client_handle) + SubElement(r, "contact_info") + + return etree_wrapper(e, msg = "Send this file back to the child you just configured") + + + @django.db.transaction.commit_on_success + def delete_child(self, child_handle): + """ + Delete a child of this RPKI entity. + """ + + self.resource_ca.children.get(handle = child_handle).delete() + + + @django.db.transaction.commit_on_success + def configure_parent(self, filename, parent_handle = None): + """ + Configure a new parent of this RPKI entity, given the output of + the parent's configure_child command as input. Reads the parent's + response XML, extracts the parent's BPKI and service URI + information, cross-certifies the parent's BPKI data into this + entity's BPKI, and checks for offers or referrals of publication + service. If a publication offer or referral is present, we + generate a request-for-service message to that repository, in case + the user wants to avail herself of the referral or offer. + """ + + p = etree_read(filename) + + if parent_handle is None: + parent_handle = p.get("parent_handle") + + r = p.find("repository") + + repository_type = "none" + referrer = None + referral_authorization = None + + if r is not None: + repository_type = r.get("type") + + if repository_type == "referral": + a = r.find("authorization") + referrer = a.get("referrer") + referral_authorization = rpki.x509.SignedReferral(Base64 = a.text) + + self.log("Parent calls itself %r, we call it %r" % (p.get("parent_handle"), parent_handle)) + self.log("Parent calls us %r" % p.get("child_handle")) + + parent, created = rpki.irdb.Parent.objects.get_or_certify( + issuer = self.resource_ca, + handle = parent_handle, + child_handle = p.get("child_handle"), + parent_handle = p.get("parent_handle"), + service_uri = p.get("service_uri"), + ta = rpki.x509.X509(Base64 = p.findtext("bpki_resource_ta")), + repository_type = repository_type, + referrer = referrer, + referral_authorization = referral_authorization) + + return self.generate_repository_request(parent), parent_handle + + + def generate_repository_request(self, parent): + """ + Generate repository request for a given parent. + """ + + e = Element("repository", handle = self.handle, + parent_handle = parent.handle, type = parent.repository_type) + if parent.repository_type == "referral": + B64Element(e, "authorization", parent.referral_authorization, referrer = parent.referrer) + SubElement(e, "contact_info") + B64Element(e, "bpki_client_ta", self.resource_ca.certificate) + return etree_wrapper(e, msg = "This is the file to send to the repository operator") + + + @django.db.transaction.commit_on_success + def delete_parent(self, parent_handle): + """ + Delete a parent of this RPKI entity. + """ + + self.resource_ca.parents.get(handle = parent_handle).delete() + + + @django.db.transaction.commit_on_success + def delete_rootd(self): + """ + Delete rootd associated with this RPKI entity. + """ + + self.resource_ca.rootd.delete() + + + @django.db.transaction.commit_on_success + def configure_publication_client(self, filename, sia_base = None, flat = False): + """ + Configure publication server to know about a new client, given the + client's request-for-service message as input. Reads the client's + request for service, cross-certifies the client's BPKI data, and + generates a response message containing the repository's BPKI data + and service URI. + """ + + client = etree_read(filename) + + client_ta = rpki.x509.X509(Base64 = client.findtext("bpki_client_ta")) + + if sia_base is None and flat: + self.log("Flat publication structure forced, homing client at top-level") + sia_base = "rsync://%s/%s/%s/" % (self.rsync_server, self.rsync_module, client.get("handle")) + + if sia_base is None and client.get("type") == "referral": + self.log("This looks like a referral, checking") + try: + auth = client.find("authorization") + referrer = self.server_ca.clients.get(handle = auth.get("referrer")) + referral_cms = rpki.x509.SignedReferral(Base64 = auth.text) + referral_xml = referral_cms.unwrap(ta = (referrer.certificate, self.server_ca.certificate)) + if rpki.x509.X509(Base64 = referral_xml.text) != client_ta: + raise BadXMLMessage, "Referral trust anchor does not match" + sia_base = referral_xml.get("authorized_sia_base") + except rpki.irdb.Client.DoesNotExist: + self.log("We have no record of the client (%s) alleged to have made this referral" % auth.get("referrer")) + + if sia_base is None and client.get("type") == "offer": + self.log("This looks like an offer, checking") + try: + parent = rpki.irdb.ResourceHolderCA.objects.get(children__ta__exact = client_ta) + if "/" in parent.repositories.get(ta = self.server_ca.certificate).client_handle: + self.log("Client's parent is not top-level, this is not a valid offer") + else: + self.log("Found client and its parent, nesting") + sia_base = "rsync://%s/%s/%s/%s/" % (self.rsync_server, self.rsync_module, + parent.handle, client.get("handle")) + except rpki.irdb.Repository.DoesNotExist: + self.log("Found client's parent, but repository isn't set, this shouldn't happen!") + except rpki.irdb.ResourceHolderCA.DoesNotExist: + try: + rpki.irdb.Rootd.objects.get(issuer__certificate__exact = client_ta) + except rpki.irdb.Rootd.DoesNotExist: + self.log("We don't host this client's parent, so we didn't make this offer") + else: + self.log("This client's parent is rootd") + + if sia_base is None: + self.log("Don't know where to nest this client, defaulting to top-level") + sia_base = "rsync://%s/%s/%s/" % (self.rsync_server, self.rsync_module, client.get("handle")) + + if not sia_base.startswith("rsync://"): + raise BadXMLMessage, "Malformed sia_base parameter %r, should start with 'rsync://'" % sia_base + + client_handle = "/".join(sia_base.rstrip("/").split("/")[4:]) + + parent_handle = client.get("parent_handle") + + self.log("Client calls itself %r, we call it %r" % (client.get("handle"), client_handle)) + self.log("Client says its parent handle is %r" % parent_handle) + + client, created = rpki.irdb.Client.objects.get_or_certify( + issuer = self.server_ca, + handle = client_handle, + parent_handle = parent_handle, + ta = client_ta, + sia_base = sia_base) + + return self.generate_repository_response(client), client_handle + + + def generate_repository_response(self, client): + """ + Generate repository response XML to a given client. + """ + + service_uri = "http://%s:%s/client/%s" % ( + self.cfg.get("pubd_server_host", section = myrpki_section), + self.cfg.get("pubd_server_port", section = myrpki_section), + client.handle) + + e = Element("repository", type = "confirmed", + client_handle = client.handle, + parent_handle = client.parent_handle, + sia_base = client.sia_base, + service_uri = service_uri) + + B64Element(e, "bpki_server_ta", self.server_ca.certificate) + B64Element(e, "bpki_client_ta", client.ta) + SubElement(e, "contact_info").text = self.pubd_contact_info + return etree_wrapper(e, msg = "Send this file back to the publication client you just configured") + + + @django.db.transaction.commit_on_success + def delete_publication_client(self, client_handle): + """ + Delete a publication client of this RPKI entity. + """ + + self.server_ca.clients.get(handle = client_handle).delete() + + + @django.db.transaction.commit_on_success + def configure_repository(self, filename, parent_handle = None): + """ + Configure a publication repository for this RPKI entity, given the + repository's response to our request-for-service message as input. + Reads the repository's response, extracts and cross-certifies the + BPKI data and service URI, and links the repository data with the + corresponding parent data in our local database. + """ + + r = etree_read(filename) + + if parent_handle is None: + parent_handle = r.get("parent_handle") + + self.log("Repository calls us %r" % (r.get("client_handle"))) + self.log("Repository response associated with parent_handle %r" % parent_handle) + + try: + if parent_handle == self.handle: + turtle = self.resource_ca.rootd + else: + turtle = self.resource_ca.parents.get(handle = parent_handle) + + except (rpki.irdb.Parent.DoesNotExist, rpki.irdb.Rootd.DoesNotExist): + self.log("Could not find parent %r in our database" % parent_handle) + + else: + rpki.irdb.Repository.objects.get_or_certify( + issuer = self.resource_ca, + handle = parent_handle, + client_handle = r.get("client_handle"), + service_uri = r.get("service_uri"), + sia_base = r.get("sia_base"), + ta = rpki.x509.X509(Base64 = r.findtext("bpki_server_ta")), + turtle = turtle) + + + @django.db.transaction.commit_on_success + def delete_repository(self, repository_handle): + """ + Delete a repository of this RPKI entity. + """ + + self.resource_ca.repositories.get(handle = repository_handle).delete() + + + @django.db.transaction.commit_on_success + def renew_children(self, child_handle, valid_until = None): + """ + Update validity period for one child entity or, if child_handle is + None, for all child entities. + """ + + if child_handle is None: + children = self.resource_ca.children.all() + else: + children = self.resource_ca.children.filter(handle = child_handle) + + if valid_until is None: + valid_until = rpki.sundial.now() + rpki.sundial.timedelta(days = 365) + else: + valid_until = rpki.sundial.datetime.fromXMLtime(valid_until) + if valid_until < rpki.sundial.now(): + raise PastExpiration, "Specified new expiration time %s has passed" % valid_until + + self.log("New validity date %s" % valid_until) + + for child in children: + child.valid_until = valid_until + child.save() + + + @django.db.transaction.commit_on_success + def load_prefixes(self, filename, ignore_missing_children = False): + """ + Whack IRDB to match prefixes.csv. + """ + + grouped4 = {} + grouped6 = {} + + for handle, prefix in csv_reader(filename, columns = 2): + grouped = grouped6 if ":" in prefix else grouped4 + if handle not in grouped: + grouped[handle] = [] + grouped[handle].append(prefix) + + primary_keys = [] + + for version, grouped, rset in ((4, grouped4, rpki.resource_set.resource_set_ipv4), + (6, grouped6, rpki.resource_set.resource_set_ipv6)): + for handle, prefixes in grouped.iteritems(): + try: + child = self.resource_ca.children.get(handle = handle) + except rpki.irdb.Child.DoesNotExist: + if not ignore_missing_children: + raise + else: + for prefix in rset(",".join(prefixes)): + obj, created = rpki.irdb.ChildNet.objects.get_or_create( + child = child, + start_ip = str(prefix.min), + end_ip = str(prefix.max), + version = version) + primary_keys.append(obj.pk) + + q = rpki.irdb.ChildNet.objects + q = q.filter(child__issuer__exact = self.resource_ca) + q = q.exclude(pk__in = primary_keys) + q.delete() + + + @django.db.transaction.commit_on_success + def load_asns(self, filename, ignore_missing_children = False): + """ + Whack IRDB to match asns.csv. + """ + + grouped = {} + + for handle, asn in csv_reader(filename, columns = 2): + if handle not in grouped: + grouped[handle] = [] + grouped[handle].append(asn) + + primary_keys = [] + + for handle, asns in grouped.iteritems(): + try: + child = self.resource_ca.children.get(handle = handle) + except rpki.irdb.Child.DoesNotExist: + if not ignore_missing_children: + raise + else: + for asn in rpki.resource_set.resource_set_as(",".join(asns)): + obj, created = rpki.irdb.ChildASN.objects.get_or_create( + child = child, + start_as = str(asn.min), + end_as = str(asn.max)) + primary_keys.append(obj.pk) + + q = rpki.irdb.ChildASN.objects + q = q.filter(child__issuer__exact = self.resource_ca) + q = q.exclude(pk__in = primary_keys) + q.delete() + + + @django.db.transaction.commit_on_success + def load_roa_requests(self, filename): + """ + Whack IRDB to match roa.csv. + """ + + grouped = {} + + # format: p/n-m asn group + for pnm, asn, group in csv_reader(filename, columns = 3): + key = (asn, group) + if key not in grouped: + grouped[key] = [] + grouped[key].append(pnm) + + # Deleting and recreating all the ROA requests is inefficient, + # but rpkid's current representation of ROA requests is wrong + # (see #32), so it's not worth a lot of effort here as we're + # just going to have to rewrite this soon anyway. + + self.resource_ca.roa_requests.all().delete() + + for key, pnms in grouped.iteritems(): + asn, group = key + + roa_request = self.resource_ca.roa_requests.create(asn = asn) + + for pnm in pnms: + if ":" in pnm: + p = rpki.resource_set.roa_prefix_ipv6.parse_str(pnm) + v = 6 + else: + p = rpki.resource_set.roa_prefix_ipv4.parse_str(pnm) + v = 4 + roa_request.prefixes.create( + version = v, + prefix = str(p.prefix), + prefixlen = int(p.prefixlen), + max_prefixlen = int(p.max_prefixlen)) + + + @django.db.transaction.commit_on_success + def load_ghostbuster_requests(self, filename, parent = None): + """ + Whack IRDB to match ghostbusters.vcard. + + This accepts one or more vCards from a file. + """ + + self.resource_ca.ghostbuster_requests.filter(parent = parent).delete() + + vcard = [] + + for line in open(filename, "r"): + if not vcard and not line.upper().startswith("BEGIN:VCARD"): + continue + vcard.append(line) + if line.upper().startswith("END:VCARD"): + self.resource_ca.ghostbuster_requests.create(vcard = "".join(vcard), parent = parent) + vcard = [] + + + def call_rpkid(self, *pdus): + """ + Issue a call to rpkid, return result. + + Implementation is a little silly, constructs a wrapper object, + invokes it once, then throws it away. Hard to do better without + rewriting a bit of the HTTP code, as we want to be sure we're + using the current BPKI certificate and key objects. + """ + + url = "http://%s:%s/left-right" % ( + self.cfg.get("rpkid_server_host", section = myrpki_section), + self.cfg.get("rpkid_server_port", section = myrpki_section)) + + rpkid = self.server_ca.ee_certificates.get(purpose = "rpkid") + irbe = self.server_ca.ee_certificates.get(purpose = "irbe") + + if len(pdus) == 1 and isinstance(pdus[0], types.GeneratorType): + pdus = tuple(pdus[0]) + elif len(pdus) == 1 and isinstance(pdus[0], (tuple, list)): + pdus = pdus[0] + + call_rpkid = rpki.async.sync_wrapper(rpki.http.caller( + proto = rpki.left_right, + client_key = irbe.private_key, + client_cert = irbe.certificate, + server_ta = self.server_ca.certificate, + server_cert = rpkid.certificate, + url = url, + debug = self.show_xml)) + + return call_rpkid(*pdus) + + + def run_rpkid_now(self): + """ + Poke rpkid to immediately run the cron job for the current handle. + + This method is used by the GUI when a user has changed something in the + IRDB (ghostbuster, roa) which does not require a full synchronize() call, + to force the object to be immediately issued. + """ + + self.call_rpkid(rpki.left_right.self_elt.make_pdu( + action = "set", self_handle = self.handle, run_now = "yes")) + + + def publish_world_now(self): + """ + Poke rpkid to (re)publish everything for the current handle. + """ + + self.call_rpkid(rpki.left_right.self_elt.make_pdu( + action = "set", self_handle = self.handle, publish_world_now = "yes")) + + + def reissue(self): + """ + Poke rpkid to reissue everything for the current handle. + """ + + self.call_rpkid(rpki.left_right.self_elt.make_pdu( + action = "set", self_handle = self.handle, reissue = "yes")) + + def rekey(self): + """ + Poke rpkid to rekey all RPKI certificates received for the current + handle. + """ + + self.call_rpkid(rpki.left_right.self_elt.make_pdu( + action = "set", self_handle = self.handle, rekey = "yes")) + + + def revoke(self): + """ + Poke rpkid to revoke old RPKI keys for the current handle. + """ + + self.call_rpkid(rpki.left_right.self_elt.make_pdu( + action = "set", self_handle = self.handle, revoke = "yes")) + + + def revoke_forgotten(self): + """ + Poke rpkid to revoke old forgotten RPKI keys for the current handle. + """ + + self.call_rpkid(rpki.left_right.self_elt.make_pdu( + action = "set", self_handle = self.handle, revoke_forgotten = "yes")) + + + def clear_all_sql_cms_replay_protection(self): + """ + Tell rpkid and pubd to clear replay protection for all SQL-based + entities. This is a fairly blunt instrument, but as we don't + expect this to be necessary except in the case of gross + misconfiguration, it should suffice + """ + + self.call_rpkid(rpki.left_right.self_elt.make_pdu(action = "set", self_handle = ca.handle, + clear_replay_protection = "yes") + for ca in rpki.irdb.ResourceHolderCA.objects.all()) + if self.run_pubd: + self.call_pubd(rpki.publication.client_elt.make_pdu(action = "set", + client_handle = client.handle, + clear_replay_protection = "yes") + for client in self.server_ca.clients.all()) + + + def call_pubd(self, *pdus): + """ + Issue a call to pubd, return result. + + Implementation is a little silly, constructs a wrapper object, + invokes it once, then throws it away. Hard to do better without + rewriting a bit of the HTTP code, as we want to be sure we're + using the current BPKI certificate and key objects. + """ + + url = "http://%s:%s/control" % ( + self.cfg.get("pubd_server_host", section = myrpki_section), + self.cfg.get("pubd_server_port", section = myrpki_section)) + + pubd = self.server_ca.ee_certificates.get(purpose = "pubd") + irbe = self.server_ca.ee_certificates.get(purpose = "irbe") + + if len(pdus) == 1 and isinstance(pdus[0], types.GeneratorType): + pdus = tuple(pdus[0]) + elif len(pdus) == 1 and isinstance(pdus[0], (tuple, list)): + pdus = pdus[0] + + call_pubd = rpki.async.sync_wrapper(rpki.http.caller( + proto = rpki.publication, + client_key = irbe.private_key, + client_cert = irbe.certificate, + server_ta = self.server_ca.certificate, + server_cert = pubd.certificate, + url = url, + debug = self.show_xml)) + + return call_pubd(*pdus) + + + def check_error_report(self, pdus): + """ + Check a response from rpkid or pubd for error_report PDUs, log and + throw exceptions as needed. + """ + + if any(isinstance(pdu, (rpki.left_right.report_error_elt, rpki.publication.report_error_elt)) for pdu in pdus): + for pdu in pdus: + if isinstance(pdu, rpki.left_right.report_error_elt): + self.log("rpkid reported failure: %s" % pdu.error_code) + elif isinstance(pdu, rpki.publication.report_error_elt): + self.log("pubd reported failure: %s" % pdu.error_code) + else: + continue + if pdu.error_text: + self.log(pdu.error_text) + raise CouldntTalkToDaemon + + + @django.db.transaction.commit_on_success + def synchronize(self, *handles_to_poke): + """ + Configure RPKI daemons with the data built up by the other + commands in this program. Commands which modify the IRDB and want + to whack everything into sync should call this when they're done, + but be warned that this can be slow with a lot of CAs. + + Any arguments given are handles of CAs which should be poked with a + operation. + """ + + for ca in rpki.irdb.ResourceHolderCA.objects.all(): + self.synchronize_rpkid_one_ca_core(ca, ca.handle in handles_to_poke) + self.synchronize_pubd_core() + self.synchronize_rpkid_deleted_core() + + + @django.db.transaction.commit_on_success + def synchronize_ca(self, ca = None, poke = False): + """ + Synchronize one CA. Most commands which modify a CA should call + this. CA to synchronize defaults to the current resource CA. + """ + + if ca is None: + ca = self.resource_ca + self.synchronize_rpkid_one_ca_core(ca, poke) + + + @django.db.transaction.commit_on_success + def synchronize_deleted_ca(self): + """ + Delete CAs which are present in rpkid's database but not in the + IRDB. + """ + + self.synchronize_rpkid_deleted_core() + + + @django.db.transaction.commit_on_success + def synchronize_pubd(self): + """ + Synchronize pubd. Most commands which modify pubd should call this. + """ + + self.synchronize_pubd_core() + + + def synchronize_rpkid_one_ca_core(self, ca, poke = False): + """ + Synchronize one CA. This is the core synchronization code. Don't + call this directly, instead call one of the methods that calls + this inside a Django commit wrapper. + + This method configures rpkid with data built up by the other + commands in this program. Most commands which modify IRDB values + related to rpkid should call this when they're done. + + If poke is True, we append a left-right run_now operation for this + CA to the end of whatever other commands this method generates. + """ + + # We can use a single BSC for everything -- except BSC key + # rollovers. Drive off that bridge when we get to it. + + bsc_handle = "bsc" + + # A default RPKI CRL cycle time of six hours seems sane. One + # might make a case for a day instead, but we've been running with + # six hours for a while now and haven't seen a lot of whining. + + self_crl_interval = self.cfg.getint("self_crl_interval", 6 * 60 * 60, section = myrpki_section) + + # regen_margin now just controls how long before RPKI certificate + # expiration we should regenerate; it used to control the interval + # before RPKI CRL staleness at which to regenerate the CRL, but + # using the same timer value for both of these is hopeless. + # + # A default regeneration margin of two weeks gives enough time for + # humans to react. We add a two hour fudge factor in the hope + # that this will regenerate certificates just *before* the + # companion cron job warns of impending doom. + + self_regen_margin = self.cfg.getint("self_regen_margin", 14 * 24 * 60 * 60 + 2 * 60, section = myrpki_section) + + # See what rpkid already has on file for this entity. + + rpkid_reply = self.call_rpkid( + rpki.left_right.self_elt.make_pdu( action = "get", tag = "self", self_handle = ca.handle), + rpki.left_right.bsc_elt.make_pdu( action = "list", tag = "bsc", self_handle = ca.handle), + rpki.left_right.repository_elt.make_pdu(action = "list", tag = "repository", self_handle = ca.handle), + rpki.left_right.parent_elt.make_pdu( action = "list", tag = "parent", self_handle = ca.handle), + rpki.left_right.child_elt.make_pdu( action = "list", tag = "child", self_handle = ca.handle)) + + self_pdu = rpkid_reply[0] + bsc_pdus = dict((x.bsc_handle, x) for x in rpkid_reply if isinstance(x, rpki.left_right.bsc_elt)) + repository_pdus = dict((x.repository_handle, x) for x in rpkid_reply if isinstance(x, rpki.left_right.repository_elt)) + parent_pdus = dict((x.parent_handle, x) for x in rpkid_reply if isinstance(x, rpki.left_right.parent_elt)) + child_pdus = dict((x.child_handle, x) for x in rpkid_reply if isinstance(x, rpki.left_right.child_elt)) + + rpkid_query = [] + + self_cert, created = rpki.irdb.HostedCA.objects.get_or_certify( + issuer = self.server_ca, + hosted = ca) + + # There should be exactly one object per hosted entity, by definition + + if (isinstance(self_pdu, rpki.left_right.report_error_elt) or + self_pdu.crl_interval != self_crl_interval or + self_pdu.regen_margin != self_regen_margin or + self_pdu.bpki_cert != self_cert.certificate): + rpkid_query.append(rpki.left_right.self_elt.make_pdu( + action = "create" if isinstance(self_pdu, rpki.left_right.report_error_elt) else "set", + tag = "self", + self_handle = ca.handle, + bpki_cert = ca.certificate, + crl_interval = self_crl_interval, + regen_margin = self_regen_margin)) + + # In general we only need one per . BSC objects + # are a little unusual in that the keypair and PKCS #10 + # subelement is generated by rpkid, so complete setup requires + # two round trips. + + bsc_pdu = bsc_pdus.pop(bsc_handle, None) + + if bsc_pdu is None: + rpkid_query.append(rpki.left_right.bsc_elt.make_pdu( + action = "create", + tag = "bsc", + self_handle = ca.handle, + bsc_handle = bsc_handle, + generate_keypair = "yes")) + + elif bsc_pdu.pkcs10_request is None: + rpkid_query.append(rpki.left_right.bsc_elt.make_pdu( + action = "set", + tag = "bsc", + self_handle = ca.handle, + bsc_handle = bsc_handle, + generate_keypair = "yes")) + + rpkid_query.extend(rpki.left_right.bsc_elt.make_pdu( + action = "destroy", self_handle = ca.handle, bsc_handle = b) for b in bsc_pdus) + + # If we've already got actions queued up, run them now, so we + # can finish setting up the BSC before anything tries to use it. + + if rpkid_query: + rpkid_query.append(rpki.left_right.bsc_elt.make_pdu(action = "list", tag = "bsc", self_handle = ca.handle)) + rpkid_reply = self.call_rpkid(rpkid_query) + bsc_pdus = dict((x.bsc_handle, x) + for x in rpkid_reply + if isinstance(x, rpki.left_right.bsc_elt) and x.action == "list") + bsc_pdu = bsc_pdus.pop(bsc_handle, None) + self.check_error_report(rpkid_reply) + + rpkid_query = [] + + assert bsc_pdu.pkcs10_request is not None + + bsc, created = rpki.irdb.BSC.objects.get_or_certify( + issuer = ca, + handle = bsc_handle, + pkcs10 = bsc_pdu.pkcs10_request) + + if bsc_pdu.signing_cert != bsc.certificate or bsc_pdu.signing_cert_crl != ca.latest_crl: + rpkid_query.append(rpki.left_right.bsc_elt.make_pdu( + action = "set", + tag = "bsc", + self_handle = ca.handle, + bsc_handle = bsc_handle, + signing_cert = bsc.certificate, + signing_cert_crl = ca.latest_crl)) + + # At present we need one per , not because + # rpkid requires that, but because pubd does. pubd probably should + # be fixed to support a single client allowed to update multiple + # trees, but for the moment the easiest way forward is just to + # enforce a 1:1 mapping between and objects + + for repository in ca.repositories.all(): + + repository_pdu = repository_pdus.pop(repository.handle, None) + + if (repository_pdu is None or + repository_pdu.bsc_handle != bsc_handle or + repository_pdu.peer_contact_uri != repository.service_uri or + repository_pdu.bpki_cert != repository.certificate): + rpkid_query.append(rpki.left_right.repository_elt.make_pdu( + action = "create" if repository_pdu is None else "set", + tag = repository.handle, + self_handle = ca.handle, + repository_handle = repository.handle, + bsc_handle = bsc_handle, + peer_contact_uri = repository.service_uri, + bpki_cert = repository.certificate)) + + rpkid_query.extend(rpki.left_right.repository_elt.make_pdu( + action = "destroy", self_handle = ca.handle, repository_handle = r) for r in repository_pdus) + + # setup code currently assumes 1:1 mapping between + # and , and further assumes that the handles + # for an associated pair are the identical (that is: + # parent.repository_handle == parent.parent_handle). + # + # If no such repository exists, our choices are to ignore the + # parent entry or throw an error. For now, we ignore the parent. + + for parent in ca.parents.all(): + + try: + + parent_pdu = parent_pdus.pop(parent.handle, None) + + if (parent_pdu is None or + parent_pdu.bsc_handle != bsc_handle or + parent_pdu.repository_handle != parent.handle or + parent_pdu.peer_contact_uri != parent.service_uri or + parent_pdu.sia_base != parent.repository.sia_base or + parent_pdu.sender_name != parent.child_handle or + parent_pdu.recipient_name != parent.parent_handle or + parent_pdu.bpki_cms_cert != parent.certificate): + rpkid_query.append(rpki.left_right.parent_elt.make_pdu( + action = "create" if parent_pdu is None else "set", + tag = parent.handle, + self_handle = ca.handle, + parent_handle = parent.handle, + bsc_handle = bsc_handle, + repository_handle = parent.handle, + peer_contact_uri = parent.service_uri, + sia_base = parent.repository.sia_base, + sender_name = parent.child_handle, + recipient_name = parent.parent_handle, + bpki_cms_cert = parent.certificate)) + + except rpki.irdb.Repository.DoesNotExist: + pass + + try: + + parent_pdu = parent_pdus.pop(ca.handle, None) + + if (parent_pdu is None or + parent_pdu.bsc_handle != bsc_handle or + parent_pdu.repository_handle != ca.handle or + parent_pdu.peer_contact_uri != ca.rootd.service_uri or + parent_pdu.sia_base != ca.rootd.repository.sia_base or + parent_pdu.sender_name != ca.handle or + parent_pdu.recipient_name != ca.handle or + parent_pdu.bpki_cms_cert != ca.rootd.certificate): + rpkid_query.append(rpki.left_right.parent_elt.make_pdu( + action = "create" if parent_pdu is None else "set", + tag = ca.handle, + self_handle = ca.handle, + parent_handle = ca.handle, + bsc_handle = bsc_handle, + repository_handle = ca.handle, + peer_contact_uri = ca.rootd.service_uri, + sia_base = ca.rootd.repository.sia_base, + sender_name = ca.handle, + recipient_name = ca.handle, + bpki_cms_cert = ca.rootd.certificate)) + + except rpki.irdb.Rootd.DoesNotExist: + pass + + rpkid_query.extend(rpki.left_right.parent_elt.make_pdu( + action = "destroy", self_handle = ca.handle, parent_handle = p) for p in parent_pdus) + + # Children are simpler than parents, because they call us, so no URL + # to construct and figuring out what certificate to use is their + # problem, not ours. + + for child in ca.children.all(): + + child_pdu = child_pdus.pop(child.handle, None) + + if (child_pdu is None or + child_pdu.bsc_handle != bsc_handle or + child_pdu.bpki_cert != child.certificate): + rpkid_query.append(rpki.left_right.child_elt.make_pdu( + action = "create" if child_pdu is None else "set", + tag = child.handle, + self_handle = ca.handle, + child_handle = child.handle, + bsc_handle = bsc_handle, + bpki_cert = child.certificate)) + + rpkid_query.extend(rpki.left_right.child_elt.make_pdu( + action = "destroy", self_handle = ca.handle, child_handle = c) for c in child_pdus) + + # If caller wants us to poke rpkid, add that to the very end of the message + + if poke: + rpkid_query.append(rpki.left_right.self_elt.make_pdu( + action = "set", self_handle = ca.handle, run_now = "yes")) + + # If we changed anything, ship updates off to rpkid + + if rpkid_query: + rpkid_reply = self.call_rpkid(rpkid_query) + bsc_pdus = dict((x.bsc_handle, x) for x in rpkid_reply if isinstance(x, rpki.left_right.bsc_elt)) + if bsc_handle in bsc_pdus and bsc_pdus[bsc_handle].pkcs10_request: + bsc_req = bsc_pdus[bsc_handle].pkcs10_request + self.check_error_report(rpkid_reply) + + + def synchronize_pubd_core(self): + """ + Configure pubd with data built up by the other commands in this + program. This is the core synchronization code. Don't call this + directly, instead call a methods that calls this inside a Django + commit wrapper. + + This method configures pubd with data built up by the other + commands in this program. Commands which modify IRDB fields + related to pubd should call this when they're done. + """ + + # If we're not running pubd, the rest of this is a waste of time + + if not self.run_pubd: + return + + # Make sure that pubd's BPKI CRL is up to date. + + self.call_pubd(rpki.publication.config_elt.make_pdu( + action = "set", + bpki_crl = self.server_ca.latest_crl)) + + # See what pubd already has on file + + pubd_reply = self.call_pubd(rpki.publication.client_elt.make_pdu(action = "list")) + client_pdus = dict((x.client_handle, x) for x in pubd_reply if isinstance(x, rpki.publication.client_elt)) + pubd_query = [] + + # Check all clients + + for client in self.server_ca.clients.all(): + + client_pdu = client_pdus.pop(client.handle, None) + + if (client_pdu is None or + client_pdu.base_uri != client.sia_base or + client_pdu.bpki_cert != client.certificate): + pubd_query.append(rpki.publication.client_elt.make_pdu( + action = "create" if client_pdu is None else "set", + client_handle = client.handle, + bpki_cert = client.certificate, + base_uri = client.sia_base)) + + # Delete any unknown clients + + pubd_query.extend(rpki.publication.client_elt.make_pdu( + action = "destroy", client_handle = p) for p in client_pdus) + + # If we changed anything, ship updates off to pubd + + if pubd_query: + pubd_reply = self.call_pubd(pubd_query) + self.check_error_report(pubd_reply) + + + def synchronize_rpkid_deleted_core(self): + """ + Remove any objects present in rpkid's database but not + present in the IRDB. This is the core synchronization code. + Don't call this directly, instead call a methods that calls this + inside a Django commit wrapper. + """ + + rpkid_reply = self.call_rpkid(rpki.left_right.self_elt.make_pdu(action = "list")) + self.check_error_report(rpkid_reply) + + self_handles = set(s.self_handle for s in rpkid_reply) + ca_handles = set(ca.handle for ca in rpki.irdb.ResourceHolderCA.objects.all()) + assert ca_handles <= self_handles + + rpkid_query = [rpki.left_right.self_elt.make_pdu(action = "destroy", self_handle = handle) + for handle in (self_handles - ca_handles)] + + if rpkid_query: + rpkid_reply = self.call_rpkid(rpkid_query) + self.check_error_report(rpkid_reply) + + + @django.db.transaction.commit_on_success + def add_ee_certificate_request(self, pkcs10, resources): + """ + Check a PKCS #10 request to see if it complies with the + specification for a RPKI EE certificate; if it does, add an + EECertificateRequest for it to the IRDB. + + Not yet sure what we want for update and delete semantics here, so + for the moment this is straight addition. See methods like + .load_asns() and .load_prefixes() for other strategies. + """ + + pkcs10.check_valid_request_ee() + ee_request = self.resource_ca.ee_certificate_requests.create( + pkcs10 = pkcs10, + gski = pkcs10.gSKI(), + valid_until = resources.valid_until) + for range in resources.asn: + ee_request.asns.create(start_as = str(range.min), end_as = str(range.max)) + for range in resources.v4: + ee_request.address_ranges.create(start_ip = str(range.min), end_ip = str(range.max), version = 4) + for range in resources.v6: + ee_request.address_ranges.create(start_ip = str(range.min), end_ip = str(range.max), version = 6) + + + @django.db.transaction.commit_on_success + def add_router_certificate_request(self, router_certificate_request_xml, valid_until = None): + """ + Read XML file containing one or more router certificate requests, + attempt to add request(s) to IRDB. + + Check each PKCS #10 request to see if it complies with the + specification for a router certificate; if it does, create an EE + certificate request for it along with the ASN resources and + router-ID supplied in the XML. + """ + + xml = ElementTree(file = router_certificate_request_xml).getroot() + rpki.relaxng.router_certificate.assertValid(xml) + + for req in xml.getiterator(routercert_namespaceQName + "router_certificate_request"): + + pkcs10 = rpki.x509.PKCS10(Base64 = req.text) + router_id = long(req.get("router_id")) + asns = rpki.resource_set.resource_set_as(req.get("asn")) + if not valid_until: + valid_until = req.get("valid_until") + + if valid_until and isinstance(valid_until, (str, unicode)): + valid_until = rpki.sundial.datetime.fromXMLtime(valid_until) + + if not valid_until: + valid_until = rpki.sundial.now() + rpki.sundial.timedelta(days = 365) + elif valid_until < rpki.sundial.now(): + raise PastExpiration, "Specified expiration date %s has already passed" % valid_until + + pkcs10.check_valid_request_router() + + cn = "ROUTER-%08x" % asns[0].min + sn = "%08x" % router_id + + ee_request = self.resource_ca.ee_certificate_requests.create( + pkcs10 = pkcs10, + gski = pkcs10.gSKI(), + valid_until = valid_until, + cn = cn, + sn = sn, + eku = rpki.oids.id_kp_bgpsec_router) + + for range in asns: + ee_request.asns.create(start_as = str(range.min), end_as = str(range.max)) + + + @django.db.transaction.commit_on_success + def delete_router_certificate_request(self, gski): + """ + Delete a router certificate request from this RPKI entity. + """ + + self.resource_ca.ee_certificate_requests.get(gski = gski).delete() diff --git a/rpki/irdbd.py b/rpki/irdbd.py new file mode 100644 index 00000000..41739dc4 --- /dev/null +++ b/rpki/irdbd.py @@ -0,0 +1,266 @@ +# $Id$ +# +# Copyright (C) 2013--2014 Dragon Research Labs ("DRL") +# Portions copyright (C) 2009--2012 Internet Systems Consortium ("ISC") +# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notices and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND ARIN DISCLAIM ALL +# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL, +# ISC, OR ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, +# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION +# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +""" +IR database daemon. +""" + +import sys +import os +import time +import argparse +import urlparse +import rpki.http +import rpki.config +import rpki.resource_set +import rpki.relaxng +import rpki.exceptions +import rpki.left_right +import rpki.log +import rpki.x509 +import rpki.daemonize + +class main(object): + + def handle_list_resources(self, q_pdu, r_msg): + child = rpki.irdb.Child.objects.get( + issuer__handle__exact = q_pdu.self_handle, + handle = q_pdu.child_handle) + resources = child.resource_bag + r_pdu = rpki.left_right.list_resources_elt() + r_pdu.tag = q_pdu.tag + r_pdu.self_handle = q_pdu.self_handle + r_pdu.child_handle = q_pdu.child_handle + r_pdu.valid_until = child.valid_until.strftime("%Y-%m-%dT%H:%M:%SZ") + r_pdu.asn = resources.asn + r_pdu.ipv4 = resources.v4 + r_pdu.ipv6 = resources.v6 + r_msg.append(r_pdu) + + def handle_list_roa_requests(self, q_pdu, r_msg): + for request in rpki.irdb.ROARequest.objects.raw(""" + SELECT irdb_roarequest.* + FROM irdb_roarequest, irdb_resourceholderca + WHERE irdb_roarequest.issuer_id = irdb_resourceholderca.id + AND irdb_resourceholderca.handle = %s + """, [q_pdu.self_handle]): + prefix_bag = request.roa_prefix_bag + r_pdu = rpki.left_right.list_roa_requests_elt() + r_pdu.tag = q_pdu.tag + r_pdu.self_handle = q_pdu.self_handle + r_pdu.asn = request.asn + r_pdu.ipv4 = prefix_bag.v4 + r_pdu.ipv6 = prefix_bag.v6 + r_msg.append(r_pdu) + + def handle_list_ghostbuster_requests(self, q_pdu, r_msg): + ghostbusters = rpki.irdb.GhostbusterRequest.objects.filter( + issuer__handle__exact = q_pdu.self_handle, + parent__handle__exact = q_pdu.parent_handle) + if ghostbusters.count() == 0: + ghostbusters = rpki.irdb.GhostbusterRequest.objects.filter( + issuer__handle__exact = q_pdu.self_handle, + parent = None) + for ghostbuster in ghostbusters: + r_pdu = rpki.left_right.list_ghostbuster_requests_elt() + r_pdu.tag = q_pdu.tag + r_pdu.self_handle = q_pdu.self_handle + r_pdu.parent_handle = q_pdu.parent_handle + r_pdu.vcard = ghostbuster.vcard + r_msg.append(r_pdu) + + def handle_list_ee_certificate_requests(self, q_pdu, r_msg): + for ee_req in rpki.irdb.EECertificateRequest.objects.filter(issuer__handle__exact = q_pdu.self_handle): + resources = ee_req.resource_bag + r_pdu = rpki.left_right.list_ee_certificate_requests_elt() + r_pdu.tag = q_pdu.tag + r_pdu.self_handle = q_pdu.self_handle + r_pdu.gski = ee_req.gski + r_pdu.valid_until = ee_req.valid_until.strftime("%Y-%m-%dT%H:%M:%SZ") + r_pdu.asn = resources.asn + r_pdu.ipv4 = resources.v4 + r_pdu.ipv6 = resources.v6 + r_pdu.cn = ee_req.cn + r_pdu.sn = ee_req.sn + r_pdu.eku = ee_req.eku + r_pdu.pkcs10 = ee_req.pkcs10 + r_msg.append(r_pdu) + + def handler(self, query, path, cb): + try: + q_pdu = None + r_msg = rpki.left_right.msg.reply() + from django.db import connection + connection.cursor() # Reconnect to mysqld if necessary + self.start_new_transaction() + serverCA = rpki.irdb.ServerCA.objects.get() + rpkid = serverCA.ee_certificates.get(purpose = "rpkid") + try: + q_cms = rpki.left_right.cms_msg(DER = query) + q_msg = q_cms.unwrap((serverCA.certificate, rpkid.certificate)) + self.cms_timestamp = q_cms.check_replay(self.cms_timestamp, path) + if not isinstance(q_msg, rpki.left_right.msg) or not q_msg.is_query(): + raise rpki.exceptions.BadQuery("Unexpected %r PDU" % q_msg) + for q_pdu in q_msg: + self.dispatch(q_pdu, r_msg) + except (rpki.async.ExitNow, SystemExit): + raise + except Exception, e: + rpki.log.traceback() + if q_pdu is None: + r_msg.append(rpki.left_right.report_error_elt.from_exception(e)) + else: + r_msg.append(rpki.left_right.report_error_elt.from_exception(e, q_pdu.self_handle, q_pdu.tag)) + irdbd = serverCA.ee_certificates.get(purpose = "irdbd") + cb(200, body = rpki.left_right.cms_msg().wrap(r_msg, irdbd.private_key, irdbd.certificate)) + except (rpki.async.ExitNow, SystemExit): + raise + except Exception, e: + rpki.log.traceback() + cb(500, reason = "Unhandled exception %s: %s" % (e.__class__.__name__, e)) + + def dispatch(self, q_pdu, r_msg): + try: + handler = self.dispatch_vector[type(q_pdu)] + except KeyError: + raise rpki.exceptions.BadQuery("Unexpected %r PDU" % q_pdu) + else: + handler(q_pdu, r_msg) + + def __init__(self, **kwargs): + + global rpki # pylint: disable=W0602 + + os.environ["TZ"] = "UTC" + time.tzset() + + parser = argparse.ArgumentParser(description = __doc__) + parser.add_argument("-c", "--config", + help = "override default location of configuration file") + parser.add_argument("-d", "--debug", action = "store_true", + help = "enable debugging mode") + parser.add_argument("-f", "--foreground", action = "store_true", + help = "do not daemonize") + parser.add_argument("--pidfile", + help = "override default location of pid file") + parser.add_argument("--profile", + help = "enable profiling, saving data to PROFILE") + args = parser.parse_args() + + rpki.log.init("irdbd", use_syslog = not args.debug) + + self.cfg = rpki.config.parser(args.config, "irdbd") + self.cfg.set_global_flags() + + if not args.foreground and not args.debug: + rpki.daemonize.daemon(pidfile = args.pidfile) + + if args.profile: + import cProfile + prof = cProfile.Profile() + try: + prof.runcall(self.main) + finally: + prof.dump_stats(args.profile) + rpki.log.info("Dumped profile data to %s" % args.profile) + else: + self.main() + + def main(self): + + global rpki # pylint: disable=W0602 + from django.conf import settings + + startup_msg = self.cfg.get("startup-message", "") + if startup_msg: + rpki.log.info(startup_msg) + + # Do -not- turn on DEBUG here except for short-lived tests, + # otherwise irdbd will eventually run out of memory and crash. + # + # If you must enable debugging, use django.db.reset_queries() to + # clear the query list manually, but it's probably better just to + # run with debugging disabled, since that's the expectation for + # production code. + # + # https://docs.djangoproject.com/en/dev/faq/models/#why-is-django-leaking-memory + + settings.configure( + DATABASES = { + "default" : { + "ENGINE" : "django.db.backends.mysql", + "NAME" : self.cfg.get("sql-database"), + "USER" : self.cfg.get("sql-username"), + "PASSWORD" : self.cfg.get("sql-password"), + "HOST" : "", + "PORT" : "" }}, + INSTALLED_APPS = ("rpki.irdb",),) + + import rpki.irdb # pylint: disable=W0621 + + # Entirely too much fun with read-only access to transactional databases. + # + # http://stackoverflow.com/questions/3346124/how-do-i-force-django-to-ignore-any-caches-and-reload-data + # http://devblog.resolversystems.com/?p=439 + # http://groups.google.com/group/django-users/browse_thread/thread/e25cec400598c06d + # http://stackoverflow.com/questions/1028671/python-mysqldb-update-query-fails + # http://dev.mysql.com/doc/refman/5.0/en/set-transaction.html + # + # It turns out that MySQL is doing us a favor with this weird + # transactional behavior on read, because without it there's a + # race condition if multiple updates are committed to the IRDB + # while we're in the middle of processing a query. Note that + # proper transaction management by the committers doesn't protect + # us, this is a transactional problem on read. So we need to use + # explicit transaction management. Since irdbd is a read-only + # consumer of IRDB data, this means we need to commit an empty + # transaction at the beginning of processing each query, to reset + # the transaction isolation snapshot. + + import django.db.transaction + self.start_new_transaction = django.db.transaction.commit_manually(django.db.transaction.commit) + + self.dispatch_vector = { + rpki.left_right.list_resources_elt : self.handle_list_resources, + rpki.left_right.list_roa_requests_elt : self.handle_list_roa_requests, + rpki.left_right.list_ghostbuster_requests_elt : self.handle_list_ghostbuster_requests, + rpki.left_right.list_ee_certificate_requests_elt : self.handle_list_ee_certificate_requests} + + try: + self.http_server_host = self.cfg.get("server-host", "") + self.http_server_port = self.cfg.getint("server-port") + except: + # + # Backwards compatibility, remove this eventually. + # + u = urlparse.urlparse(self.cfg.get("http-url")) + if (u.scheme not in ("", "http") or + u.username is not None or + u.password is not None or + u.params or u.query or u.fragment): + raise + self.http_server_host = u.hostname + self.http_server_port = int(u.port) + + self.cms_timestamp = None + + rpki.http.server( + host = self.http_server_host, + port = self.http_server_port, + handlers = self.handler) diff --git a/rpki/left_right.py b/rpki/left_right.py new file mode 100644 index 00000000..2d46cdfa --- /dev/null +++ b/rpki/left_right.py @@ -0,0 +1,1300 @@ +# $Id$ +# +# Copyright (C) 2013--2014 Dragon Research Labs ("DRL") +# Portions copyright (C) 2009--2012 Internet Systems Consortium ("ISC") +# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notices and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND ARIN DISCLAIM ALL +# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL, +# ISC, OR ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, +# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION +# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +""" +RPKI "left-right" protocol. +""" + +import rpki.resource_set +import rpki.x509 +import rpki.sql +import rpki.exceptions +import rpki.xml_utils +import rpki.http +import rpki.up_down +import rpki.relaxng +import rpki.sundial +import rpki.log +import rpki.publication +import rpki.async +import rpki.rpkid_tasks + +## @var enforce_strict_up_down_xml_sender +# Enforce strict checking of XML "sender" field in up-down protocol + +enforce_strict_up_down_xml_sender = False + +class left_right_namespace(object): + """ + XML namespace parameters for left-right protocol. + """ + + xmlns = "http://www.hactrn.net/uris/rpki/left-right-spec/" + nsmap = { None : xmlns } + +class data_elt(rpki.xml_utils.data_elt, rpki.sql.sql_persistent, left_right_namespace): + """ + Virtual class for top-level left-right protocol data elements. + """ + + handles = () + + self_id = None + self_handle = None + + @property + @rpki.sql.cache_reference + def self(self): + """ + Fetch self object to which this object links. + """ + return self_elt.sql_fetch(self.gctx, self.self_id) + + @property + @rpki.sql.cache_reference + def bsc(self): + """ + Return BSC object to which this object links. + """ + return bsc_elt.sql_fetch(self.gctx, self.bsc_id) + + def make_reply_clone_hook(self, r_pdu): + """ + Set handles when cloning, including _id -> _handle translation. + """ + if r_pdu.self_handle is None: + r_pdu.self_handle = self.self_handle + for tag, elt in self.handles: + id_name = tag + "_id" + handle_name = tag + "_handle" + if getattr(r_pdu, handle_name, None) is None: + try: + setattr(r_pdu, handle_name, getattr(elt.sql_fetch(self.gctx, getattr(r_pdu, id_name)), handle_name)) + except AttributeError: + continue + + @classmethod + def serve_fetch_handle(cls, gctx, self_id, handle): + """ + Find an object based on its handle. + """ + return cls.sql_fetch_where1(gctx, cls.element_name + "_handle = %s AND self_id = %s", (handle, self_id)) + + def serve_fetch_one_maybe(self): + """ + Find the object on which a get, set, or destroy method should + operate, or which would conflict with a create method. + """ + where = "%s.%s_handle = %%s AND %s.self_id = self.self_id AND self.self_handle = %%s" % ((self.element_name,) * 3) + args = (getattr(self, self.element_name + "_handle"), self.self_handle) + return self.sql_fetch_where1(self.gctx, where, args, "self") + + def serve_fetch_all(self): + """ + Find the objects on which a list method should operate. + """ + where = "%s.self_id = self.self_id and self.self_handle = %%s" % self.element_name + return self.sql_fetch_where(self.gctx, where, (self.self_handle,), "self") + + def serve_pre_save_hook(self, q_pdu, r_pdu, cb, eb): + """ + Hook to do _handle => _id translation before saving. + + self is always the object to be saved to SQL. For create + operations, self and q_pdu are be the same object; for set + operations, self is the pre-existing object from SQL and q_pdu is + the set request received from the the IRBE. + """ + for tag, elt in self.handles: + id_name = tag + "_id" + if getattr(self, id_name, None) is None: + x = elt.serve_fetch_handle(self.gctx, self.self_id, getattr(q_pdu, tag + "_handle")) + if x is None: + raise rpki.exceptions.HandleTranslationError, "Could not translate %r %s_handle" % (self, tag) + setattr(self, id_name, getattr(x, id_name)) + cb() + +class self_elt(data_elt): + """ + element. + """ + + element_name = "self" + attributes = ("action", "tag", "self_handle", "crl_interval", "regen_margin") + elements = ("bpki_cert", "bpki_glue") + booleans = ("rekey", "reissue", "revoke", "run_now", "publish_world_now", "revoke_forgotten", + "clear_replay_protection") + + sql_template = rpki.sql.template( + "self", + "self_id", + "self_handle", + "use_hsm", + "crl_interval", + "regen_margin", + ("bpki_cert", rpki.x509.X509), + ("bpki_glue", rpki.x509.X509)) + + handles = () + + use_hsm = False + crl_interval = None + regen_margin = None + bpki_cert = None + bpki_glue = None + cron_tasks = None + + def __repr__(self): + return rpki.log.log_repr(self) + + @property + def bscs(self): + """ + Fetch all BSC objects that link to this self object. + """ + return bsc_elt.sql_fetch_where(self.gctx, "self_id = %s", (self.self_id,)) + + @property + def repositories(self): + """ + Fetch all repository objects that link to this self object. + """ + return repository_elt.sql_fetch_where(self.gctx, "self_id = %s", (self.self_id,)) + + @property + def parents(self): + """ + Fetch all parent objects that link to this self object. + """ + return parent_elt.sql_fetch_where(self.gctx, "self_id = %s", (self.self_id,)) + + @property + def children(self): + """ + Fetch all child objects that link to this self object. + """ + return child_elt.sql_fetch_where(self.gctx, "self_id = %s", (self.self_id,)) + + @property + def roas(self): + """ + Fetch all ROA objects that link to this self object. + """ + return rpki.rpkid.roa_obj.sql_fetch_where(self.gctx, "self_id = %s", (self.self_id,)) + + @property + def ghostbusters(self): + """ + Fetch all Ghostbuster record objects that link to this self object. + """ + return rpki.rpkid.ghostbuster_obj.sql_fetch_where(self.gctx, "self_id = %s", (self.self_id,)) + + @property + def ee_certificates(self): + """ + Fetch all EE certificate objects that link to this self object. + """ + return rpki.rpkid.ee_cert_obj.sql_fetch_where(self.gctx, "self_id = %s", (self.self_id,)) + + + def serve_post_save_hook(self, q_pdu, r_pdu, cb, eb): + """ + Extra server actions for self_elt. + """ + rpki.log.trace() + actions = [] + if q_pdu.rekey: + actions.append(self.serve_rekey) + if q_pdu.revoke: + actions.append(self.serve_revoke) + if q_pdu.reissue: + actions.append(self.serve_reissue) + if q_pdu.revoke_forgotten: + actions.append(self.serve_revoke_forgotten) + if q_pdu.publish_world_now: + actions.append(self.serve_publish_world_now) + if q_pdu.run_now: + actions.append(self.serve_run_now) + if q_pdu.clear_replay_protection: + actions.append(self.serve_clear_replay_protection) + def loop(iterator, action): + action(iterator, eb) + rpki.async.iterator(actions, loop, cb) + + def serve_rekey(self, cb, eb): + """ + Handle a left-right rekey action for this self. + """ + rpki.log.trace() + def loop(iterator, parent): + parent.serve_rekey(iterator, eb) + rpki.async.iterator(self.parents, loop, cb) + + def serve_revoke(self, cb, eb): + """ + Handle a left-right revoke action for this self. + """ + rpki.log.trace() + def loop(iterator, parent): + parent.serve_revoke(iterator, eb) + rpki.async.iterator(self.parents, loop, cb) + + def serve_reissue(self, cb, eb): + """ + Handle a left-right reissue action for this self. + """ + rpki.log.trace() + def loop(iterator, parent): + parent.serve_reissue(iterator, eb) + rpki.async.iterator(self.parents, loop, cb) + + def serve_revoke_forgotten(self, cb, eb): + """ + Handle a left-right revoke_forgotten action for this self. + """ + rpki.log.trace() + def loop(iterator, parent): + parent.serve_revoke_forgotten(iterator, eb) + rpki.async.iterator(self.parents, loop, cb) + + def serve_clear_replay_protection(self, cb, eb): + """ + Handle a left-right clear_replay_protection action for this self. + """ + rpki.log.trace() + def loop(iterator, obj): + obj.serve_clear_replay_protection(iterator, eb) + rpki.async.iterator(self.parents + self.children + self.repositories, loop, cb) + + def serve_destroy_hook(self, cb, eb): + """ + Extra cleanup actions when destroying a self_elt. + """ + rpki.log.trace() + def loop(iterator, parent): + parent.delete(iterator) + rpki.async.iterator(self.parents, loop, cb) + + + def serve_publish_world_now(self, cb, eb): + """ + Handle a left-right publish_world_now action for this self. + + The publication stuff needs refactoring, right now publication is + interleaved with local operations in a way that forces far too + many bounces through the task system for any complex update. The + whole thing ought to be rewritten to queue up outgoing publication + PDUs and only send them when we're all done or when we need to + force publication at a particular point in a multi-phase operation. + + Once that reorganization has been done, this method should be + rewritten to reuse the low-level publish() methods that each + object will have...but we're not there yet. So, for now, we just + do this via brute force. Think of it as a trial version to see + whether we've identified everything that needs to be republished + for this operation. + """ + + def loop(iterator, parent): + q_msg = rpki.publication.msg.query() + for ca in parent.cas: + ca_detail = ca.active_ca_detail + if ca_detail is not None: + q_msg.append(rpki.publication.crl_elt.make_publish( + ca_detail.crl_uri, ca_detail.latest_crl)) + q_msg.append(rpki.publication.manifest_elt.make_publish( + ca_detail.manifest_uri, ca_detail.latest_manifest)) + q_msg.extend(rpki.publication.certificate_elt.make_publish( + c.uri, c.cert) for c in ca_detail.child_certs) + q_msg.extend(rpki.publication.roa_elt.make_publish( + r.uri, r.roa) for r in ca_detail.roas if r.roa is not None) + q_msg.extend(rpki.publication.ghostbuster_elt.make_publish( + g.uri, g.ghostbuster) for g in ca_detail.ghostbusters) + parent.repository.call_pubd(iterator, eb, q_msg) + + rpki.async.iterator(self.parents, loop, cb) + + def serve_run_now(self, cb, eb): + """ + Handle a left-right run_now action for this self. + """ + rpki.log.debug("Forced immediate run of periodic actions for self %s[%d]" % ( + self.self_handle, self.self_id)) + completion = rpki.rpkid_tasks.CompletionHandler(cb) + self.schedule_cron_tasks(completion) + assert completion.count > 0 + self.gctx.task_run() + + def serve_fetch_one_maybe(self): + """ + Find the self object upon which a get, set, or destroy action + should operate, or which would conflict with a create method. + """ + return self.serve_fetch_handle(self.gctx, None, self.self_handle) + + @classmethod + def serve_fetch_handle(cls, gctx, self_id, self_handle): + """ + Find a self object based on its self_handle. + """ + return cls.sql_fetch_where1(gctx, "self_handle = %s", self_handle) + + def serve_fetch_all(self): + """ + Find the self objects upon which a list action should operate. + This is different from the list action for all other objects, + where list only works within a given self_id context. + """ + return self.sql_fetch_all(self.gctx) + + def schedule_cron_tasks(self, completion): + """ + Schedule periodic tasks. + """ + + if self.cron_tasks is None: + self.cron_tasks = tuple(task(self) for task in rpki.rpkid_tasks.task_classes) + + for task in self.cron_tasks: + self.gctx.task_add(task) + completion.register(task) + + def find_covering_ca_details(self, resources): + """ + Return all active ca_detail_objs for this which cover a + particular set of resources. + + If we expected there to be a large number of ca_detail_objs, we + could add index tables and write fancy SQL query to do this, but + for the expected common case where there are only one or two + active ca_detail_objs per , it's probably not worth it. In + any case, this is an optimization we can leave for later. + """ + + results = set() + for parent in self.parents: + for ca in parent.cas: + ca_detail = ca.active_ca_detail + if ca_detail is not None and ca_detail.covers(resources): + results.add(ca_detail) + return results + + +class bsc_elt(data_elt): + """ + (Business Signing Context) element. + """ + + element_name = "bsc" + attributes = ("action", "tag", "self_handle", "bsc_handle", "key_type", "hash_alg", "key_length") + elements = ("signing_cert", "signing_cert_crl", "pkcs10_request") + booleans = ("generate_keypair",) + + sql_template = rpki.sql.template( + "bsc", + "bsc_id", + "bsc_handle", + "self_id", + "hash_alg", + ("private_key_id", rpki.x509.RSA), + ("pkcs10_request", rpki.x509.PKCS10), + ("signing_cert", rpki.x509.X509), + ("signing_cert_crl", rpki.x509.CRL)) + + handles = (("self", self_elt),) + + private_key_id = None + pkcs10_request = None + signing_cert = None + signing_cert_crl = None + + def __repr__(self): + return rpki.log.log_repr(self, self.bsc_handle) + + @property + def repositories(self): + """ + Fetch all repository objects that link to this BSC object. + """ + return repository_elt.sql_fetch_where(self.gctx, "bsc_id = %s", (self.bsc_id,)) + + @property + def parents(self): + """ + Fetch all parent objects that link to this BSC object. + """ + return parent_elt.sql_fetch_where(self.gctx, "bsc_id = %s", (self.bsc_id,)) + + @property + def children(self): + """ + Fetch all child objects that link to this BSC object. + """ + return child_elt.sql_fetch_where(self.gctx, "bsc_id = %s", (self.bsc_id,)) + + def serve_pre_save_hook(self, q_pdu, r_pdu, cb, eb): + """ + Extra server actions for bsc_elt -- handle key generation. For + now this only allows RSA with SHA-256. + """ + if q_pdu.generate_keypair: + assert q_pdu.key_type in (None, "rsa") and q_pdu.hash_alg in (None, "sha256") + self.private_key_id = rpki.x509.RSA.generate(keylength = q_pdu.key_length or 2048) + self.pkcs10_request = rpki.x509.PKCS10.create(keypair = self.private_key_id) + r_pdu.pkcs10_request = self.pkcs10_request + data_elt.serve_pre_save_hook(self, q_pdu, r_pdu, cb, eb) + +class repository_elt(data_elt): + """ + element. + """ + + element_name = "repository" + attributes = ("action", "tag", "self_handle", "repository_handle", "bsc_handle", "peer_contact_uri") + elements = ("bpki_cert", "bpki_glue") + booleans = ("clear_replay_protection",) + + sql_template = rpki.sql.template( + "repository", + "repository_id", + "repository_handle", + "self_id", + "bsc_id", + "peer_contact_uri", + ("bpki_cert", rpki.x509.X509), + ("bpki_glue", rpki.x509.X509), + ("last_cms_timestamp", rpki.sundial.datetime)) + + handles = (("self", self_elt), + ("bsc", bsc_elt)) + + bpki_cert = None + bpki_glue = None + last_cms_timestamp = None + + def __repr__(self): + return rpki.log.log_repr(self, self.repository_handle) + + @property + def parents(self): + """ + Fetch all parent objects that link to this repository object. + """ + return parent_elt.sql_fetch_where(self.gctx, "repository_id = %s", (self.repository_id,)) + + def serve_post_save_hook(self, q_pdu, r_pdu, cb, eb): + """ + Extra server actions for repository_elt. + """ + actions = [] + if q_pdu.clear_replay_protection: + actions.append(self.serve_clear_replay_protection) + def loop(iterator, action): + action(iterator, eb) + rpki.async.iterator(actions, loop, cb) + + def serve_clear_replay_protection(self, cb, eb): + """ + Handle a left-right clear_replay_protection action for this repository. + """ + self.last_cms_timestamp = None + self.sql_mark_dirty() + cb() + + @staticmethod + def default_pubd_handler(pdu): + """ + Default handler for publication response PDUs. + """ + pdu.raise_if_error() + + def call_pubd(self, callback, errback, q_msg, handlers = None): + """ + Send a message to publication daemon and return the response. + + As a convenience, attempting to send an empty message returns + immediate success without sending anything. + + Handlers is a dict of handler functions to process the response + PDUs. If the tag value in the response PDU appears in the dict, + the associated handler is called to process the PDU. If no tag + matches, default_pubd_handler() is called. A handler value of + False suppresses calling of the default handler. + """ + + try: + rpki.log.trace() + + self.gctx.sql.sweep() + + if not q_msg: + return callback() + + if handlers is None: + handlers = {} + + for q_pdu in q_msg: + rpki.log.info("Sending %s %s to pubd" % (q_pdu.action, q_pdu.uri)) + + bsc = self.bsc + q_der = rpki.publication.cms_msg().wrap(q_msg, bsc.private_key_id, bsc.signing_cert, bsc.signing_cert_crl) + bpki_ta_path = (self.gctx.bpki_ta, self.self.bpki_cert, self.self.bpki_glue, self.bpki_cert, self.bpki_glue) + + def done(r_der): + try: + rpki.log.debug("Received response from pubd") + r_cms = rpki.publication.cms_msg(DER = r_der) + r_msg = r_cms.unwrap(bpki_ta_path) + r_cms.check_replay_sql(self, self.peer_contact_uri) + for r_pdu in r_msg: + handler = handlers.get(r_pdu.tag, self.default_pubd_handler) + if handler: + rpki.log.debug("Calling pubd handler %r" % handler) + handler(r_pdu) + if len(q_msg) != len(r_msg): + raise rpki.exceptions.BadPublicationReply, "Wrong number of response PDUs from pubd: sent %r, got %r" % (q_msg, r_msg) + callback() + except (rpki.async.ExitNow, SystemExit): + raise + except Exception, e: + errback(e) + + rpki.log.debug("Sending request to pubd") + rpki.http.client( + url = self.peer_contact_uri, + msg = q_der, + callback = done, + errback = errback) + + except (rpki.async.ExitNow, SystemExit): + raise + except Exception, e: + errback(e) + +class parent_elt(data_elt): + """ + element. + """ + + element_name = "parent" + attributes = ("action", "tag", "self_handle", "parent_handle", "bsc_handle", "repository_handle", + "peer_contact_uri", "sia_base", "sender_name", "recipient_name") + elements = ("bpki_cms_cert", "bpki_cms_glue") + booleans = ("rekey", "reissue", "revoke", "revoke_forgotten", "clear_replay_protection") + + sql_template = rpki.sql.template( + "parent", + "parent_id", + "parent_handle", + "self_id", + "bsc_id", + "repository_id", + "peer_contact_uri", + "sia_base", + "sender_name", + "recipient_name", + ("bpki_cms_cert", rpki.x509.X509), + ("bpki_cms_glue", rpki.x509.X509), + ("last_cms_timestamp", rpki.sundial.datetime)) + + handles = (("self", self_elt), + ("bsc", bsc_elt), + ("repository", repository_elt)) + + bpki_cms_cert = None + bpki_cms_glue = None + last_cms_timestamp = None + + def __repr__(self): + return rpki.log.log_repr(self, self.parent_handle) + + @property + @rpki.sql.cache_reference + def repository(self): + """ + Fetch repository object to which this parent object links. + """ + return repository_elt.sql_fetch(self.gctx, self.repository_id) + + @property + def cas(self): + """ + Fetch all CA objects that link to this parent object. + """ + return rpki.rpkid.ca_obj.sql_fetch_where(self.gctx, "parent_id = %s", (self.parent_id,)) + + def serve_post_save_hook(self, q_pdu, r_pdu, cb, eb): + """ + Extra server actions for parent_elt. + """ + actions = [] + if q_pdu.rekey: + actions.append(self.serve_rekey) + if q_pdu.revoke: + actions.append(self.serve_revoke) + if q_pdu.reissue: + actions.append(self.serve_reissue) + if q_pdu.revoke_forgotten: + actions.append(self.serve_revoke_forgotten) + if q_pdu.clear_replay_protection: + actions.append(self.serve_clear_replay_protection) + def loop(iterator, action): + action(iterator, eb) + rpki.async.iterator(actions, loop, cb) + + def serve_rekey(self, cb, eb): + """ + Handle a left-right rekey action for this parent. + """ + def loop(iterator, ca): + ca.rekey(iterator, eb) + rpki.async.iterator(self.cas, loop, cb) + + def serve_revoke(self, cb, eb): + """ + Handle a left-right revoke action for this parent. + """ + def loop(iterator, ca): + ca.revoke(cb = iterator, eb = eb) + rpki.async.iterator(self.cas, loop, cb) + + def serve_reissue(self, cb, eb): + """ + Handle a left-right reissue action for this parent. + """ + def loop(iterator, ca): + ca.reissue(cb = iterator, eb = eb) + rpki.async.iterator(self.cas, loop, cb) + + def serve_clear_replay_protection(self, cb, eb): + """ + Handle a left-right clear_replay_protection action for this parent. + """ + self.last_cms_timestamp = None + self.sql_mark_dirty() + cb() + + + def get_skis(self, cb, eb): + """ + Fetch SKIs that this parent thinks we have. In theory this should + agree with our own database, but in practice stuff can happen, so + sometimes we need to know what our parent thinks. + + Result is a dictionary with the resource class name as key and a + set of SKIs as value. + """ + + def done(r_msg): + cb(dict((rc.class_name, set(c.cert.gSKI() for c in rc.certs)) + for rc in r_msg.payload.classes)) + + rpki.up_down.list_pdu.query(self, done, eb) + + + def revoke_skis(self, rc_name, skis_to_revoke, cb, eb): + """ + Revoke a set of SKIs within a particular resource class. + """ + + def loop(iterator, ski): + rpki.log.debug("Asking parent %r to revoke class %r, SKI %s" % (self, rc_name, ski)) + q_pdu = rpki.up_down.revoke_pdu() + q_pdu.class_name = rc_name + q_pdu.ski = ski + self.query_up_down(q_pdu, lambda r_pdu: iterator(), eb) + + rpki.async.iterator(skis_to_revoke, loop, cb) + + + def serve_revoke_forgotten(self, cb, eb): + """ + Handle a left-right revoke_forgotten action for this parent. + + This is a bit fiddly: we have to compare the result of an up-down + list query with what we have locally and identify the SKIs of any + certificates that have gone missing. This should never happen in + ordinary operation, but can arise if we have somehow lost a + private key, in which case there is nothing more we can do with + the issued cert, so we have to clear it. As this really is not + supposed to happen, we don't clear it automatically, instead we + require an explicit trigger. + """ + + def got_skis(skis_from_parent): + + def loop(iterator, item): + rc_name, skis_to_revoke = item + if rc_name in ca_map: + for ca_detail in ca_map[rc_name].issue_response_candidate_ca_details: + skis_to_revoke.discard(ca_detail.latest_ca_cert.gSKI()) + self.revoke_skis(rc_name, skis_to_revoke, iterator, eb) + + ca_map = dict((ca.parent_resource_class, ca) for ca in self.cas) + rpki.async.iterator(skis_from_parent.items(), loop, cb) + + self.get_skis(got_skis, eb) + + + def delete(self, cb, delete_parent = True): + """ + Delete all the CA stuff under this parent, and perhaps the parent + itself. + """ + + def loop(iterator, ca): + self.gctx.checkpoint() + ca.delete(self, iterator) + + def revoke(): + self.gctx.checkpoint() + self.serve_revoke_forgotten(done, fail) + + def fail(e): + rpki.log.warn("Trouble getting parent to revoke certificates, blundering onwards: %s" % e) + done() + + def done(): + self.gctx.checkpoint() + self.gctx.sql.sweep() + if delete_parent: + self.sql_delete() + cb() + + rpki.async.iterator(self.cas, loop, revoke) + + + def serve_destroy_hook(self, cb, eb): + """ + Extra server actions when destroying a parent_elt. + """ + + self.delete(cb, delete_parent = False) + + + def query_up_down(self, q_pdu, cb, eb): + """ + Client code for sending one up-down query PDU to this parent. + """ + + rpki.log.trace() + + bsc = self.bsc + if bsc is None: + raise rpki.exceptions.BSCNotFound, "Could not find BSC %s" % self.bsc_id + + if bsc.signing_cert is None: + raise rpki.exceptions.BSCNotReady, "BSC %r[%s] is not yet usable" % (bsc.bsc_handle, bsc.bsc_id) + + q_msg = rpki.up_down.message_pdu.make_query( + payload = q_pdu, + sender = self.sender_name, + recipient = self.recipient_name) + + q_der = rpki.up_down.cms_msg().wrap(q_msg, bsc.private_key_id, + bsc.signing_cert, + bsc.signing_cert_crl) + + def unwrap(r_der): + try: + r_cms = rpki.up_down.cms_msg(DER = r_der) + r_msg = r_cms.unwrap((self.gctx.bpki_ta, + self.self.bpki_cert, + self.self.bpki_glue, + self.bpki_cms_cert, + self.bpki_cms_glue)) + r_cms.check_replay_sql(self, self.peer_contact_uri) + r_msg.payload.check_response() + except (SystemExit, rpki.async.ExitNow): + raise + except Exception, e: + eb(e) + else: + cb(r_msg) + + rpki.http.client( + msg = q_der, + url = self.peer_contact_uri, + callback = unwrap, + errback = eb) + +class child_elt(data_elt): + """ + element. + """ + + element_name = "child" + attributes = ("action", "tag", "self_handle", "child_handle", "bsc_handle") + elements = ("bpki_cert", "bpki_glue") + booleans = ("reissue", "clear_replay_protection") + + sql_template = rpki.sql.template( + "child", + "child_id", + "child_handle", + "self_id", + "bsc_id", + ("bpki_cert", rpki.x509.X509), + ("bpki_glue", rpki.x509.X509), + ("last_cms_timestamp", rpki.sundial.datetime)) + + handles = (("self", self_elt), + ("bsc", bsc_elt)) + + bpki_cert = None + bpki_glue = None + last_cms_timestamp = None + + def __repr__(self): + return rpki.log.log_repr(self, self.child_handle) + + def fetch_child_certs(self, ca_detail = None, ski = None, unique = False): + """ + Fetch all child_cert objects that link to this child object. + """ + return rpki.rpkid.child_cert_obj.fetch(self.gctx, self, ca_detail, ski, unique) + + @property + def child_certs(self): + """ + Fetch all child_cert objects that link to this child object. + """ + return self.fetch_child_certs() + + @property + def parents(self): + """ + Fetch all parent objects that link to self object to which this child object links. + """ + return parent_elt.sql_fetch_where(self.gctx, "self_id = %s", (self.self_id,)) + + def serve_post_save_hook(self, q_pdu, r_pdu, cb, eb): + """ + Extra server actions for child_elt. + """ + actions = [] + if q_pdu.reissue: + actions.append(self.serve_reissue) + if q_pdu.clear_replay_protection: + actions.append(self.serve_clear_replay_protection) + def loop(iterator, action): + action(iterator, eb) + rpki.async.iterator(actions, loop, cb) + + def serve_reissue(self, cb, eb): + """ + Handle a left-right reissue action for this child. + """ + publisher = rpki.rpkid.publication_queue() + for child_cert in self.child_certs: + child_cert.reissue(child_cert.ca_detail, publisher, force = True) + publisher.call_pubd(cb, eb) + + def serve_clear_replay_protection(self, cb, eb): + """ + Handle a left-right clear_replay_protection action for this child. + """ + self.last_cms_timestamp = None + self.sql_mark_dirty() + cb() + + def ca_from_class_name(self, class_name): + """ + Fetch the CA corresponding to an up-down class_name. + """ + if not class_name.isdigit(): + raise rpki.exceptions.BadClassNameSyntax, "Bad class name %s" % class_name + ca = rpki.rpkid.ca_obj.sql_fetch(self.gctx, long(class_name)) + if ca is None: + raise rpki.exceptions.ClassNameUnknown, "Unknown class name %s" % class_name + parent = ca.parent + if self.self_id != parent.self_id: + raise rpki.exceptions.ClassNameMismatch( + "Class name mismatch: child.self_id = %d, parent.self_id = %d" % ( + self.self_id, parent.self_id)) + return ca + + def serve_destroy_hook(self, cb, eb): + """ + Extra server actions when destroying a child_elt. + """ + publisher = rpki.rpkid.publication_queue() + for child_cert in self.child_certs: + child_cert.revoke(publisher = publisher, + generate_crl_and_manifest = True) + publisher.call_pubd(cb, eb) + + def serve_up_down(self, query, callback): + """ + Outer layer of server handling for one up-down PDU from this child. + """ + + rpki.log.trace() + + bsc = self.bsc + if bsc is None: + raise rpki.exceptions.BSCNotFound, "Could not find BSC %s" % self.bsc_id + q_cms = rpki.up_down.cms_msg(DER = query) + q_msg = q_cms.unwrap((self.gctx.bpki_ta, + self.self.bpki_cert, + self.self.bpki_glue, + self.bpki_cert, + self.bpki_glue)) + q_cms.check_replay_sql(self, "child", self.child_handle) + q_msg.payload.gctx = self.gctx + if enforce_strict_up_down_xml_sender and q_msg.sender != self.child_handle: + raise rpki.exceptions.BadSender, "Unexpected XML sender %s" % q_msg.sender + self.gctx.sql.sweep() + + def done(r_msg): + # + # Exceptions from this point on are problematic, as we have no + # sane way of reporting errors in the error reporting mechanism. + # May require refactoring, ignore the issue for now. + # + reply = rpki.up_down.cms_msg().wrap(r_msg, bsc.private_key_id, + bsc.signing_cert, bsc.signing_cert_crl) + callback(reply) + + try: + q_msg.serve_top_level(self, done) + except (rpki.async.ExitNow, SystemExit): + raise + except rpki.exceptions.NoActiveCA, data: + done(q_msg.serve_error(data)) + except Exception, e: + rpki.log.traceback() + done(q_msg.serve_error(e)) + +class list_resources_elt(rpki.xml_utils.base_elt, left_right_namespace): + """ + element. + """ + + element_name = "list_resources" + attributes = ("self_handle", "tag", "child_handle", "valid_until", "asn", "ipv4", "ipv6") + valid_until = None + + def __repr__(self): + return rpki.log.log_repr(self, self.self_handle, self.child_handle, self.asn, self.ipv4, self.ipv6) + + def startElement(self, stack, name, attrs): + """ + Handle element. This requires special handling + due to the data types of some of the attributes. + """ + assert name == "list_resources", "Unexpected name %s, stack %s" % (name, stack) + self.read_attrs(attrs) + if isinstance(self.valid_until, str): + self.valid_until = rpki.sundial.datetime.fromXMLtime(self.valid_until) + if self.asn is not None: + self.asn = rpki.resource_set.resource_set_as(self.asn) + if self.ipv4 is not None: + self.ipv4 = rpki.resource_set.resource_set_ipv4(self.ipv4) + if self.ipv6 is not None: + self.ipv6 = rpki.resource_set.resource_set_ipv6(self.ipv6) + + def toXML(self): + """ + Generate element. This requires special + handling due to the data types of some of the attributes. + """ + elt = self.make_elt() + if isinstance(self.valid_until, int): + elt.set("valid_until", self.valid_until.toXMLtime()) + return elt + +class list_roa_requests_elt(rpki.xml_utils.base_elt, left_right_namespace): + """ + element. + """ + + element_name = "list_roa_requests" + attributes = ("self_handle", "tag", "asn", "ipv4", "ipv6") + + def startElement(self, stack, name, attrs): + """ + Handle element. This requires special handling + due to the data types of some of the attributes. + """ + assert name == "list_roa_requests", "Unexpected name %s, stack %s" % (name, stack) + self.read_attrs(attrs) + if self.ipv4 is not None: + self.ipv4 = rpki.resource_set.roa_prefix_set_ipv4(self.ipv4) + if self.ipv6 is not None: + self.ipv6 = rpki.resource_set.roa_prefix_set_ipv6(self.ipv6) + + def __repr__(self): + return rpki.log.log_repr(self, self.self_handle, self.asn, self.ipv4, self.ipv6) + +class list_ghostbuster_requests_elt(rpki.xml_utils.text_elt, left_right_namespace): + """ + element. + """ + + element_name = "list_ghostbuster_requests" + attributes = ("self_handle", "tag", "parent_handle") + text_attribute = "vcard" + + vcard = None + + def __repr__(self): + return rpki.log.log_repr(self, self.self_handle, self.parent_handle) + +class list_ee_certificate_requests_elt(rpki.xml_utils.base_elt, left_right_namespace): + """ + element. + """ + + element_name = "list_ee_certificate_requests" + attributes = ("self_handle", "tag", "gski", "valid_until", "asn", "ipv4", "ipv6", "cn", "sn", "eku") + elements = ("pkcs10",) + + pkcs10 = None + valid_until = None + eku = None + + def __repr__(self): + return rpki.log.log_repr(self, self.self_handle, self.gski, self.cn, self.sn, self.asn, self.ipv4, self.ipv6) + + def startElement(self, stack, name, attrs): + """ + Handle element. This requires special + handling due to the data types of some of the attributes. + """ + if name not in self.elements: + assert name == self.element_name, "Unexpected name %s, stack %s" % (name, stack) + self.read_attrs(attrs) + if isinstance(self.valid_until, str): + self.valid_until = rpki.sundial.datetime.fromXMLtime(self.valid_until) + if self.asn is not None: + self.asn = rpki.resource_set.resource_set_as(self.asn) + if self.ipv4 is not None: + self.ipv4 = rpki.resource_set.resource_set_ipv4(self.ipv4) + if self.ipv6 is not None: + self.ipv6 = rpki.resource_set.resource_set_ipv6(self.ipv6) + if self.eku is not None: + self.eku = self.eku.split(",") + + def endElement(self, stack, name, text): + """ + Handle sub-element. + """ + assert len(self.elements) == 1 + if name == self.elements[0]: + self.pkcs10 = rpki.x509.PKCS10(Base64 = text) + else: + assert name == self.element_name, "Unexpected name %s, stack %s" % (name, stack) + stack.pop() + + def toXML(self): + """ + Generate element. This requires special + handling due to the data types of some of the attributes. + """ + if isinstance(self.eku, (tuple, list)): + self.eku = ",".join(self.eku) + elt = self.make_elt() + for i in self.elements: + self.make_b64elt(elt, i, getattr(self, i, None)) + if isinstance(self.valid_until, int): + elt.set("valid_until", self.valid_until.toXMLtime()) + return elt + +class list_published_objects_elt(rpki.xml_utils.text_elt, left_right_namespace): + """ + element. + """ + + element_name = "list_published_objects" + attributes = ("self_handle", "tag", "uri", "child_handle") + text_attribute = "obj" + + obj = None + child_handle = None + + def __repr__(self): + return rpki.log.log_repr(self, self.self_handle, self.child_handle, self.uri) + + def serve_dispatch(self, r_msg, cb, eb): + """ + Handle a query. The method name is a + misnomer here, there's no action attribute and no dispatch, we + just dump every published object for the specified and return. + """ + for parent in self_elt.serve_fetch_handle(self.gctx, None, self.self_handle).parents: + for ca in parent.cas: + ca_detail = ca.active_ca_detail + if ca_detail is not None: + r_msg.append(self.make_reply(ca_detail.crl_uri, ca_detail.latest_crl)) + r_msg.append(self.make_reply(ca_detail.manifest_uri, ca_detail.latest_manifest)) + r_msg.extend(self.make_reply(c.uri, c.cert, c.child.child_handle) + for c in ca_detail.child_certs) + r_msg.extend(self.make_reply(r.uri, r.roa) + for r in ca_detail.roas if r.roa is not None) + r_msg.extend(self.make_reply(g.uri, g.ghostbuster) + for g in ca_detail.ghostbusters) + r_msg.extend(self.make_reply(c.uri, c.cert) + for c in ca_detail.ee_certificates) + cb() + + def make_reply(self, uri, obj, child_handle = None): + """ + Generate one reply PDU. + """ + r_pdu = self.make_pdu(tag = self.tag, self_handle = self.self_handle, + uri = uri, child_handle = child_handle) + r_pdu.obj = obj.get_Base64() + return r_pdu + +class list_received_resources_elt(rpki.xml_utils.base_elt, left_right_namespace): + """ + element. + """ + + element_name = "list_received_resources" + attributes = ("self_handle", "tag", "parent_handle", + "notBefore", "notAfter", "uri", "sia_uri", "aia_uri", "asn", "ipv4", "ipv6") + + def __repr__(self): + return rpki.log.log_repr(self, self.self_handle, self.parent_handle, self.uri, self.notAfter) + + def serve_dispatch(self, r_msg, cb, eb): + """ + Handle a query. The method name is a + misnomer here, there's no action attribute and no dispatch, we + just dump a bunch of data about every certificate issued to us by + one of our parents, then return. + """ + for parent in self_elt.serve_fetch_handle(self.gctx, None, self.self_handle).parents: + for ca in parent.cas: + ca_detail = ca.active_ca_detail + if ca_detail is not None and ca_detail.latest_ca_cert is not None: + r_msg.append(self.make_reply(parent.parent_handle, ca_detail.ca_cert_uri, ca_detail.latest_ca_cert)) + cb() + + def make_reply(self, parent_handle, uri, cert): + """ + Generate one reply PDU. + """ + resources = cert.get_3779resources() + return self.make_pdu( + tag = self.tag, + self_handle = self.self_handle, + parent_handle = parent_handle, + notBefore = str(cert.getNotBefore()), + notAfter = str(cert.getNotAfter()), + uri = uri, + sia_uri = cert.get_sia_directory_uri(), + aia_uri = cert.get_aia_uri(), + asn = resources.asn, + ipv4 = resources.v4, + ipv6 = resources.v6) + +class report_error_elt(rpki.xml_utils.text_elt, left_right_namespace): + """ + element. + """ + + element_name = "report_error" + attributes = ("tag", "self_handle", "error_code") + text_attribute = "error_text" + + error_text = None + + def __repr__(self): + return rpki.log.log_repr(self, self.self_handle, self.error_code) + + @classmethod + def from_exception(cls, e, self_handle = None, tag = None): + """ + Generate a element from an exception. + """ + self = cls() + self.self_handle = self_handle + self.tag = tag + self.error_code = e.__class__.__name__ + self.error_text = str(e) + return self + +class msg(rpki.xml_utils.msg, left_right_namespace): + """ + Left-right PDU. + """ + + ## @var version + # Protocol version + version = 1 + + ## @var pdus + # Dispatch table of PDUs for this protocol. + pdus = dict((x.element_name, x) + for x in (self_elt, child_elt, parent_elt, bsc_elt, + repository_elt, list_resources_elt, + list_roa_requests_elt, list_ghostbuster_requests_elt, + list_ee_certificate_requests_elt, + list_published_objects_elt, + list_received_resources_elt, report_error_elt)) + + def serve_top_level(self, gctx, cb): + """ + Serve one msg PDU. + """ + + r_msg = self.__class__.reply() + + def loop(iterator, q_pdu): + + def fail(e): + if not isinstance(e, rpki.exceptions.NotFound): + rpki.log.traceback() + r_msg.append(report_error_elt.from_exception( + e, self_handle = q_pdu.self_handle, tag = q_pdu.tag)) + cb(r_msg) + + try: + q_pdu.gctx = gctx + q_pdu.serve_dispatch(r_msg, iterator, fail) + except (rpki.async.ExitNow, SystemExit): + raise + except Exception, e: + fail(e) + + def done(): + cb(r_msg) + + rpki.async.iterator(self, loop, done) + +class sax_handler(rpki.xml_utils.sax_handler): + """ + SAX handler for Left-Right protocol. + """ + + pdu = msg + name = "msg" + version = "1" + +class cms_msg(rpki.x509.XML_CMS_object): + """ + Class to hold a CMS-signed left-right PDU. + """ + + encoding = "us-ascii" + schema = rpki.relaxng.left_right + saxify = sax_handler.saxify diff --git a/rpki/log.py b/rpki/log.py new file mode 100644 index 00000000..c605331a --- /dev/null +++ b/rpki/log.py @@ -0,0 +1,199 @@ +# $Id$ +# +# Copyright (C) 2013--2014 Dragon Research Labs ("DRL") +# Portions copyright (C) 2009--2012 Internet Systems Consortium ("ISC") +# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notices and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND ARIN DISCLAIM ALL +# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL, +# ISC, OR ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, +# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION +# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +""" +Logging facilities for RPKI libraries. +""" + +import syslog +import sys +import os +import time +import traceback as tb + +try: + have_setproctitle = False + if os.getenv("DISABLE_SETPROCTITLE") is None: + import setproctitle + have_setproctitle = True +except ImportError: + pass + +## @var enable_trace +# Whether call tracing is enabled. + +enable_trace = False + +## @var show_python_ids +# Whether __repr__() methods should show Python id numbers + +show_python_ids = False + +## @var enable_tracebacks +# Whether tracebacks are enabled globally. Individual classes and +# modules may choose to override this. + +enable_tracebacks = False + +## @var use_setproctitle +# Whether to use setproctitle (if available) to change name shown for +# this process in ps listings (etc). + +use_setproctitle = True + +## @var proctitle_extra + +# Extra text to include in proctitle display. By default this is the +# tail of the current directory name, as this is often useful, but you +# can set it to something else if you like. If None or the empty +# string, the extra information field will be omitted from the proctitle. + +proctitle_extra = os.path.basename(os.getcwd()) + +def init(ident = "rpki", flags = syslog.LOG_PID, facility = syslog.LOG_DAEMON, use_syslog = None, log_file = sys.stderr, tag_log_lines = True): + """ + Initialize logging system. + """ + + # If caller didn't say whether to use syslog, use log file if user supplied one, otherwise use syslog + + if use_syslog is None: + use_syslog = log_file is sys.stderr + + logger.use_syslog = use_syslog + logger.tag_log_lines = tag_log_lines + + if use_syslog: + syslog.openlog(ident, flags, facility) + + else: + logger.tag = ident + logger.pid = os.getpid() + logger.log_file = log_file + + if ident and have_setproctitle and use_setproctitle: + if proctitle_extra: + setproctitle.setproctitle("%s (%s)" % (ident, proctitle_extra)) + else: + setproctitle.setproctitle(ident) + +class logger(object): + """ + Closure for logging. + """ + + use_syslog = True + tag = "" + pid = 0 + log_file = sys.stderr + + def __init__(self, priority): + self.priority = priority + + def __call__(self, message): + if self.use_syslog: + syslog.syslog(self.priority, message) + elif self.tag_log_lines: + self.log_file.write("%s %s[%d]: %s\n" % (time.strftime("%F %T"), self.tag, self.pid, message)) + self.log_file.flush() + else: + self.log_file.write(message + "\n") + self.log_file.flush() + +error = logger(syslog.LOG_ERR) +warn = logger(syslog.LOG_WARNING) +note = logger(syslog.LOG_NOTICE) +info = logger(syslog.LOG_INFO) +debug = logger(syslog.LOG_DEBUG) + + +def set_trace(enable): + """ + Enable or disable call tracing. + """ + + global enable_trace + enable_trace = enable + +def trace(): + """ + Execution trace -- where are we now, and whence came we here? + """ + + if enable_trace: + bt = tb.extract_stack(limit = 3) + return debug("[%s() at %s:%d from %s:%d]" % (bt[1][2], bt[1][0], bt[1][1], bt[0][0], bt[0][1])) + +def traceback(do_it = None): + """ + Consolidated backtrace facility with a bit of extra info. Argument + specifies whether or not to log the traceback (some modules and + classes have their own controls for this, this lets us provide a + unified interface). If no argument is specified, we use the global + default value rpki.log.enable_tracebacks. + + Assertion failures generate backtraces unconditionally, on the + theory that (a) assertion failures are programming errors by + definition, and (b) it's often hard to figure out what's triggering + a particular assertion failure without the backtrace. + """ + + if do_it is None: + do_it = enable_tracebacks + + e = sys.exc_info()[1] + assert e is not None, "rpki.log.traceback() called without valid trace on stack! This should not happen." + + if do_it or isinstance(e, AssertionError): + bt = tb.extract_stack(limit = 3) + error("Exception caught in %s() at %s:%d called from %s:%d" % (bt[1][2], bt[1][0], bt[1][1], bt[0][0], bt[0][1])) + bt = tb.format_exc() + assert bt is not None, "Apparently I'm still not using the right test for null backtrace" + for line in bt.splitlines(): + warn(line) + +def log_repr(obj, *tokens): + """ + Constructor for __repr__() strings, handles suppression of Python + IDs as needed, includes self_handle when available. + """ + + # pylint: disable=W0702 + + words = ["%s.%s" % (obj.__class__.__module__, obj.__class__.__name__)] + try: + words.append("{%s}" % obj.self.self_handle) + except: + pass + + for token in tokens: + if token is not None: + try: + s = str(token) + except: + s = "???" + debug("Failed to generate repr() string for object of type %r" % type(token)) + traceback() + if s: + words.append(s) + + if show_python_ids: + words.append(" at %#x" % id(obj)) + + return "<" + " ".join(words) + ">" diff --git a/rpki/myrpki.py b/rpki/myrpki.py new file mode 100644 index 00000000..c5c7990f --- /dev/null +++ b/rpki/myrpki.py @@ -0,0 +1,23 @@ +# $Id$ +# +# Copyright (C) 2012 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +This is a tombstone for a program that no longer exists. +""" + +if __name__ != "__main__": # sic -- don't break regression tests + import sys + sys.exit('"myrpki" is obsolete. Please use "rpkic" instead.') diff --git a/rpki/mysql_import.py b/rpki/mysql_import.py new file mode 100644 index 00000000..88d30357 --- /dev/null +++ b/rpki/mysql_import.py @@ -0,0 +1,65 @@ +# $Id$ +# +# Copyright (C) 2011-2012 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. +# +# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +Import wrapper for MySQLdb. + +MySQLdb is an independent package, not part of Python, and has some +minor version skew issues with respect to Python itself, which we want +to suppress so that they don't annoy the user. None of this is +particularly hard, but the maze of whacky incantations required to do +this in multiple version of Python on multiple platforms is somewhat +tedious, and turns out to cause other problems when combined with the +way we construct executable Python scripts containing a standard +header indicating the location of our config file. + +So it turns out to be easier just to put all of the import voodoo +here, and have other modules that need MySQLdb import the MySQL module +object from this module. Looks kind of strange, but seems to work. +""" + +# pylint: disable=W0611 + +from __future__ import with_statement + +import warnings + +if hasattr(warnings, "catch_warnings"): + with warnings.catch_warnings(): + warnings.simplefilter("ignore", DeprecationWarning) + import MySQLdb +else: + import MySQLdb + +import _mysql_exceptions + +warnings.simplefilter("error", _mysql_exceptions.Warning) + +import MySQLdb.converters diff --git a/rpki/oids.py b/rpki/oids.py new file mode 100644 index 00000000..a97df6a7 --- /dev/null +++ b/rpki/oids.py @@ -0,0 +1,101 @@ +# $Id$ +# +# Copyright (C) 2013--2014 Dragon Research Labs ("DRL") +# Portions copyright (C) 2009--2012 Internet Systems Consortium ("ISC") +# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notices and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND ARIN DISCLAIM ALL +# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL, +# ISC, OR ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, +# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION +# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +""" +OID database. + +This used to be fairly complicated, with multiple representations and +a collection of conversion functions, but now it is very simple: + +- We represent OIDs as Python strings, holding the dotted-decimal + form of an OID. Nothing but decimal digits and "." is legal. + This is compatible with the format that rpki.POW uses. + +- We define symbols in this module whose values are OIDs. + +That's pretty much it. There's a bit of code at the end which checks +the syntax of the defined strings and provides a pretty-print function +for the rare occasion when we need to print an OID, but other than +that this is just a collection of symbolic names for text strings. +""" + +ecdsa_with_SHA256 = "1.2.840.10045.4.3.2" +sha256WithRSAEncryption = "1.2.840.113549.1.1.11" +sha384WithRSAEncryption = "1.2.840.113549.1.1.12" +sha512WithRSAEncryption = "1.2.840.113549.1.1.13" +id_data = "1.2.840.113549.1.7.1" +id_smime = "1.2.840.113549.1.9.16" +id_ct = "1.2.840.113549.1.9.16.1" +id_ct_routeOriginAttestation = "1.2.840.113549.1.9.16.1.24" +id_ct_rpkiManifest = "1.2.840.113549.1.9.16.1.26" +id_ct_xml = "1.2.840.113549.1.9.16.1.28" +id_ct_rpkiGhostbusters = "1.2.840.113549.1.9.16.1.35" +authorityInfoAccess = "1.3.6.1.5.5.7.1.1" +sbgp_ipAddrBlock = "1.3.6.1.5.5.7.1.7" +sbgp_autonomousSysNum = "1.3.6.1.5.5.7.1.8" +subjectInfoAccess = "1.3.6.1.5.5.7.1.11" +id_kp_bgpsec_router = "1.3.6.1.5.5.7.3.30" +id_cp_ipAddr_asNumber = "1.3.6.1.5.5.7.14.2" +id_ad_caIssuers = "1.3.6.1.5.5.7.48.2" +id_ad_caRepository = "1.3.6.1.5.5.7.48.5" +id_ad_signedObjectRepository = "1.3.6.1.5.5.7.48.9" +id_ad_rpkiManifest = "1.3.6.1.5.5.7.48.10" +id_ad_signedObject = "1.3.6.1.5.5.7.48.11" +commonName = "2.5.4.3" +serialNumber = "2.5.4.5" +countryName = "2.5.4.6" +localityName = "2.5.4.7" +stateOrProvinceName = "2.5.4.8" +streetAddress = "2.5.4.9" +organizationName = "2.5.4.10" +organizationalUnitName = "2.5.4.11" +subjectKeyIdentifier = "2.5.29.14" +keyUsage = "2.5.29.15" +basicConstraints = "2.5.29.19" +cRLNumber = "2.5.29.20" +cRLDistributionPoints = "2.5.29.31" +certificatePolicies = "2.5.29.32" +authorityKeyIdentifier = "2.5.29.35" +extendedKeyUsage = "2.5.29.37" +id_sha256 = "2.16.840.1.101.3.4.2.1" + +# Make sure all symbols exported so far look like OIDs, and build a +# dictionary to use when pretty-printing. + +_oid2name = {} + +for _sym in dir(): + if not _sym.startswith("_"): + _val = globals()[_sym] + if not isinstance(_val, str) or not all(_v.isdigit() for _v in _val.split(".")): + raise ValueError("Bad OID definition: %s = %r" % (_sym, _val)) + _oid2name[_val] = _sym.replace("_", "-") + +del _sym +del _val + +def oid2name(oid): + """ + Translate an OID into a string suitable for printing. + """ + + if not isinstance(oid, (str, unicode)) or not all(o.isdigit() for o in oid.split(".")): + raise ValueError("Parameter does not look like an OID string: " + repr(oid)) + + return _oid2name.get(oid, oid) diff --git a/rpki/old_irdbd.py b/rpki/old_irdbd.py new file mode 100644 index 00000000..41060344 --- /dev/null +++ b/rpki/old_irdbd.py @@ -0,0 +1,325 @@ +# $Id$ +# +# Copyright (C) 2013--2014 Dragon Research Labs ("DRL") +# Portions copyright (C) 2009--2012 Internet Systems Consortium ("ISC") +# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notices and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND ARIN DISCLAIM ALL +# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL, +# ISC, OR ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, +# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION +# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +""" +IR database daemon. + +This is the old (pre-Django) version of irdbd, still used by smoketest +and perhaps still useful as a minimal example. This does NOT work with +the GUI, rpkic, or any of the other more recent tools. +""" + +import sys +import os +import time +import argparse +import urlparse +import rpki.http +import rpki.config +import rpki.resource_set +import rpki.relaxng +import rpki.exceptions +import rpki.left_right +import rpki.log +import rpki.x509 + +from rpki.mysql_import import MySQLdb + +class main(object): + + + def handle_list_resources(self, q_pdu, r_msg): + + r_pdu = rpki.left_right.list_resources_elt() + r_pdu.tag = q_pdu.tag + r_pdu.self_handle = q_pdu.self_handle + r_pdu.child_handle = q_pdu.child_handle + + self.cur.execute( + """ + SELECT registrant_id, valid_until + FROM registrant + WHERE registry_handle = %s AND registrant_handle = %s + """, + (q_pdu.self_handle, q_pdu.child_handle)) + + if self.cur.rowcount != 1: + raise rpki.exceptions.NotInDatabase( + "This query should have produced a single exact match, something's messed up" + " (rowcount = %d, self_handle = %s, child_handle = %s)" + % (self.cur.rowcount, q_pdu.self_handle, q_pdu.child_handle)) + + registrant_id, valid_until = self.cur.fetchone() + + r_pdu.valid_until = valid_until.strftime("%Y-%m-%dT%H:%M:%SZ") + + r_pdu.asn = rpki.resource_set.resource_set_as.from_sql( + self.cur, + """ + SELECT start_as, end_as + FROM registrant_asn + WHERE registrant_id = %s + """, + (registrant_id,)) + + r_pdu.ipv4 = rpki.resource_set.resource_set_ipv4.from_sql( + self.cur, + """ + SELECT start_ip, end_ip + FROM registrant_net + WHERE registrant_id = %s AND version = 4 + """, + (registrant_id,)) + + r_pdu.ipv6 = rpki.resource_set.resource_set_ipv6.from_sql( + self.cur, + """ + SELECT start_ip, end_ip + FROM registrant_net + WHERE registrant_id = %s AND version = 6 + """, + (registrant_id,)) + + r_msg.append(r_pdu) + + + def handle_list_roa_requests(self, q_pdu, r_msg): + + self.cur.execute( + "SELECT roa_request_id, asn FROM roa_request WHERE self_handle = %s", + (q_pdu.self_handle,)) + + for roa_request_id, asn in self.cur.fetchall(): + + r_pdu = rpki.left_right.list_roa_requests_elt() + r_pdu.tag = q_pdu.tag + r_pdu.self_handle = q_pdu.self_handle + r_pdu.asn = asn + + r_pdu.ipv4 = rpki.resource_set.roa_prefix_set_ipv4.from_sql( + self.cur, + """ + SELECT prefix, prefixlen, max_prefixlen + FROM roa_request_prefix + WHERE roa_request_id = %s AND version = 4 + """, + (roa_request_id,)) + + r_pdu.ipv6 = rpki.resource_set.roa_prefix_set_ipv6.from_sql( + self.cur, + """ + SELECT prefix, prefixlen, max_prefixlen + FROM roa_request_prefix + WHERE roa_request_id = %s AND version = 6 + """, + (roa_request_id,)) + + r_msg.append(r_pdu) + + + def handle_list_ghostbuster_requests(self, q_pdu, r_msg): + + self.cur.execute( + """ + SELECT vcard + FROM ghostbuster_request + WHERE self_handle = %s AND parent_handle = %s + """, + (q_pdu.self_handle, q_pdu.parent_handle)) + + vcards = [result[0] for result in self.cur.fetchall()] + + if not vcards: + + self.cur.execute( + """ + SELECT vcard + FROM ghostbuster_request + WHERE self_handle = %s AND parent_handle IS NULL + """, + (q_pdu.self_handle,)) + + vcards = [result[0] for result in self.cur.fetchall()] + + for vcard in vcards: + r_pdu = rpki.left_right.list_ghostbuster_requests_elt() + r_pdu.tag = q_pdu.tag + r_pdu.self_handle = q_pdu.self_handle + r_pdu.parent_handle = q_pdu.parent_handle + r_pdu.vcard = vcard + r_msg.append(r_pdu) + + + def handle_list_ee_certificate_requests(self, q_pdu, r_msg): + + self.cur.execute( + """ + SELECT ee_certificate_id, pkcs10, gski, cn, sn, eku, valid_until + FROM ee_certificate + WHERE self_handle = %s + """, + (q_pdu.self_handle,)) + + for ee_certificate_id, pkcs10, gski, cn, sn, eku, valid_until in self.cur.fetchall(): + + r_pdu = rpki.left_right.list_ee_certificate_requests_elt() + r_pdu.tag = q_pdu.tag + r_pdu.self_handle = q_pdu.self_handle + r_pdu.valid_until = valid_until.strftime("%Y-%m-%dT%H:%M:%SZ") + r_pdu.pkcs10 = rpki.x509.PKCS10(DER = pkcs10) + r_pdu.gski = gski + r_pdu.cn = cn + r_pdu.sn = sn + r_pdu.eku = eku + + r_pdu.asn = rpki.resource_set.resource_set_as.from_sql( + self.cur, + """ + SELECT start_as, end_as + FROM ee_certificate_asn + WHERE ee_certificate_id = %s + """, + (ee_certificate_id,)) + + r_pdu.ipv4 = rpki.resource_set.resource_set_ipv4.from_sql( + self.cur, + """ + SELECT start_ip, end_ip + FROM ee_certificate_net + WHERE ee_certificate_id = %s AND version = 4 + """, + (ee_certificate_id,)) + + r_pdu.ipv6 = rpki.resource_set.resource_set_ipv6.from_sql( + self.cur, + """ + SELECT start_ip, end_ip + FROM ee_certificate_net + WHERE ee_certificate_id = %s AND version = 6 + """, + (ee_certificate_id,)) + + r_msg.append(r_pdu) + + + handle_dispatch = { + rpki.left_right.list_resources_elt : handle_list_resources, + rpki.left_right.list_roa_requests_elt : handle_list_roa_requests, + rpki.left_right.list_ghostbuster_requests_elt : handle_list_ghostbuster_requests, + rpki.left_right.list_ee_certificate_requests_elt : handle_list_ee_certificate_requests } + + def handler(self, query, path, cb): + try: + + self.db.ping(True) + + r_msg = rpki.left_right.msg.reply() + + try: + + q_msg = rpki.left_right.cms_msg(DER = query).unwrap((self.bpki_ta, self.rpkid_cert)) + + if not isinstance(q_msg, rpki.left_right.msg) or not q_msg.is_query(): + raise rpki.exceptions.BadQuery, "Unexpected %r PDU" % q_msg + + for q_pdu in q_msg: + + try: + + try: + h = self.handle_dispatch[type(q_pdu)] + except KeyError: + raise rpki.exceptions.BadQuery, "Unexpected %r PDU" % q_pdu + else: + h(self, q_pdu, r_msg) + + except (rpki.async.ExitNow, SystemExit): + raise + + except Exception, e: + rpki.log.traceback() + r_msg.append(rpki.left_right.report_error_elt.from_exception(e, q_pdu.self_handle, q_pdu.tag)) + + except (rpki.async.ExitNow, SystemExit): + raise + + except Exception, e: + rpki.log.traceback() + r_msg.append(rpki.left_right.report_error_elt.from_exception(e)) + + cb(200, body = rpki.left_right.cms_msg().wrap(r_msg, self.irdbd_key, self.irdbd_cert)) + + except (rpki.async.ExitNow, SystemExit): + raise + + except Exception, e: + rpki.log.traceback() + + # We only get here in cases where we couldn't or wouldn't generate + # , so just return HTTP failure. + + cb(500, reason = "Unhandled exception %s: %s" % (e.__class__.__name__, e)) + + + def __init__(self): + + os.environ["TZ"] = "UTC" + time.tzset() + + parser = argparse.ArgumentParser(description = __doc__) + parser.add_argument("-c", "--config", + help = "override default location of configuration file") + parser.add_argument("-d", "--debug", action = "store_true", + help = "enable debugging mode") + args = parser.parse_args() + + rpki.log.init("irdbd", use_syslog = not args.debug) + + self.cfg = rpki.config.parser(args.config, "irdbd") + + startup_msg = self.cfg.get("startup-message", "") + if startup_msg: + rpki.log.info(startup_msg) + + self.cfg.set_global_flags() + + self.db = MySQLdb.connect(user = self.cfg.get("sql-username"), + db = self.cfg.get("sql-database"), + passwd = self.cfg.get("sql-password")) + + self.cur = self.db.cursor() + self.db.autocommit(True) + + self.bpki_ta = rpki.x509.X509(Auto_update = self.cfg.get("bpki-ta")) + self.rpkid_cert = rpki.x509.X509(Auto_update = self.cfg.get("rpkid-cert")) + self.irdbd_cert = rpki.x509.X509(Auto_update = self.cfg.get("irdbd-cert")) + self.irdbd_key = rpki.x509.RSA( Auto_update = self.cfg.get("irdbd-key")) + + u = urlparse.urlparse(self.cfg.get("http-url")) + + assert u.scheme in ("", "http") and \ + u.username is None and \ + u.password is None and \ + u.params == "" and \ + u.query == "" and \ + u.fragment == "" + + rpki.http.server(host = u.hostname or "localhost", + port = u.port or 443, + handlers = ((u.path, self.handler),)) diff --git a/rpki/pubd.py b/rpki/pubd.py new file mode 100644 index 00000000..31f22ed4 --- /dev/null +++ b/rpki/pubd.py @@ -0,0 +1,174 @@ +# $Id$ +# +# Copyright (C) 2013--2014 Dragon Research Labs ("DRL") +# Portions copyright (C) 2009--2012 Internet Systems Consortium ("ISC") +# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notices and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND ARIN DISCLAIM ALL +# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL, +# ISC, OR ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, +# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION +# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +""" +RPKI publication engine. +""" + +import os +import time +import argparse +import sys +import re +import rpki.resource_set +import rpki.up_down +import rpki.x509 +import rpki.sql +import rpki.http +import rpki.config +import rpki.exceptions +import rpki.relaxng +import rpki.log +import rpki.publication +import rpki.daemonize + +class main(object): + """ + Main program for pubd. + """ + + def __init__(self): + + os.environ["TZ"] = "UTC" + time.tzset() + + self.irbe_cms_timestamp = None + + parser = argparse.ArgumentParser(description = __doc__) + parser.add_argument("-c", "--config", + help = "override default location of configuration file") + parser.add_argument("-d", "--debug", action = "store_true", + help = "enable debugging mode") + parser.add_argument("-f", "--foreground", action = "store_true", + help = "do not daemonize") + parser.add_argument("--pidfile", + help = "override default location of pid file") + parser.add_argument("--profile", + help = "enable profiling, saving data to PROFILE") + args = parser.parse_args() + + self.profile = args.profile + + rpki.log.init("pubd", use_syslog = not args.debug) + + self.cfg = rpki.config.parser(args.config, "pubd") + self.cfg.set_global_flags() + + if not args.foreground and not args.debug: + rpki.daemonize.daemon(pidfile = args.pidfile) + + if self.profile: + import cProfile + prof = cProfile.Profile() + try: + prof.runcall(self.main) + finally: + prof.dump_stats(self.profile) + rpki.log.info("Dumped profile data to %s" % self.profile) + else: + self.main() + + def main(self): + + if self.profile: + rpki.log.info("Running in profile mode with output to %s" % self.profile) + + self.sql = rpki.sql.session(self.cfg) + + self.bpki_ta = rpki.x509.X509(Auto_update = self.cfg.get("bpki-ta")) + self.irbe_cert = rpki.x509.X509(Auto_update = self.cfg.get("irbe-cert")) + self.pubd_cert = rpki.x509.X509(Auto_update = self.cfg.get("pubd-cert")) + self.pubd_key = rpki.x509.RSA( Auto_update = self.cfg.get("pubd-key")) + + self.http_server_host = self.cfg.get("server-host", "") + self.http_server_port = self.cfg.getint("server-port") + + self.publication_base = self.cfg.get("publication-base", "publication/") + + self.publication_multimodule = self.cfg.getboolean("publication-multimodule", False) + + rpki.http.server( + host = self.http_server_host, + port = self.http_server_port, + handlers = (("/control", self.control_handler), + ("/client/", self.client_handler))) + + def handler_common(self, query, client, cb, certs, crl = None): + """ + Common PDU handler code. + """ + + def done(r_msg): + reply = rpki.publication.cms_msg().wrap(r_msg, self.pubd_key, self.pubd_cert, crl) + self.sql.sweep() + cb(reply) + + q_cms = rpki.publication.cms_msg(DER = query) + q_msg = q_cms.unwrap(certs) + if client is None: + self.irbe_cms_timestamp = q_cms.check_replay(self.irbe_cms_timestamp, "control") + else: + q_cms.check_replay_sql(client, client.client_handle) + q_msg.serve_top_level(self, client, done) + + def control_handler(self, query, path, cb): + """ + Process one PDU from the IRBE. + """ + + def done(body): + cb(200, body = body) + + rpki.log.trace() + try: + self.handler_common(query, None, done, (self.bpki_ta, self.irbe_cert)) + except (rpki.async.ExitNow, SystemExit): + raise + except Exception, e: + rpki.log.traceback() + cb(500, reason = "Unhandled exception %s: %s" % (e.__class__.__name__, e)) + + client_url_regexp = re.compile("/client/([-A-Z0-9_/]+)$", re.I) + + def client_handler(self, query, path, cb): + """ + Process one PDU from a client. + """ + + def done(body): + cb(200, body = body) + + rpki.log.trace() + try: + match = self.client_url_regexp.search(path) + if match is None: + raise rpki.exceptions.BadContactURL, "Bad path: %s" % path + client_handle = match.group(1) + client = rpki.publication.client_elt.sql_fetch_where1(self, "client_handle = %s", (client_handle,)) + if client is None: + raise rpki.exceptions.ClientNotFound, "Could not find client %s" % client_handle + config = rpki.publication.config_elt.fetch(self) + if config is None or config.bpki_crl is None: + raise rpki.exceptions.CMSCRLNotSet + self.handler_common(query, client, done, (self.bpki_ta, client.bpki_cert, client.bpki_glue), config.bpki_crl) + except (rpki.async.ExitNow, SystemExit): + raise + except Exception, e: + rpki.log.traceback() + cb(500, reason = "Could not process PDU: %s" % e) diff --git a/rpki/publication.py b/rpki/publication.py new file mode 100644 index 00000000..2462ae39 --- /dev/null +++ b/rpki/publication.py @@ -0,0 +1,466 @@ +# $Id$ +# +# Copyright (C) 2009--2012 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. +# +# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +RPKI "publication" protocol. +""" + +import os +import errno +import rpki.resource_set +import rpki.x509 +import rpki.sql +import rpki.exceptions +import rpki.xml_utils +import rpki.http +import rpki.up_down +import rpki.relaxng +import rpki.sundial +import rpki.log + +class publication_namespace(object): + """ + XML namespace parameters for publication protocol. + """ + + xmlns = "http://www.hactrn.net/uris/rpki/publication-spec/" + nsmap = { None : xmlns } + +class control_elt(rpki.xml_utils.data_elt, rpki.sql.sql_persistent, publication_namespace): + """ + Virtual class for control channel objects. + """ + + def serve_dispatch(self, r_msg, cb, eb): + """ + Action dispatch handler. This needs special handling because we + need to make sure that this PDU arrived via the control channel. + """ + if self.client is not None: + raise rpki.exceptions.BadQuery, "Control query received on client channel" + rpki.xml_utils.data_elt.serve_dispatch(self, r_msg, cb, eb) + +class config_elt(control_elt): + """ + element. This is a little weird because there should + never be more than one row in the SQL config table, but we have to + put the BPKI CRL somewhere and SQL is the least bad place available. + + So we reuse a lot of the SQL machinery, but we nail config_id at 1, + we don't expose it in the XML protocol, and we only support the get + and set actions. + """ + + attributes = ("action", "tag") + element_name = "config" + elements = ("bpki_crl",) + + sql_template = rpki.sql.template( + "config", + "config_id", + ("bpki_crl", rpki.x509.CRL)) + + wired_in_config_id = 1 + + def startElement(self, stack, name, attrs): + """ + StartElement() handler for config object. This requires special + handling because of the weird way we treat config_id. + """ + control_elt.startElement(self, stack, name, attrs) + self.config_id = self.wired_in_config_id + + @classmethod + def fetch(cls, gctx): + """ + Fetch the config object from SQL. This requires special handling + because of the weird way we treat config_id. + """ + return cls.sql_fetch(gctx, cls.wired_in_config_id) + + def serve_set(self, r_msg, cb, eb): + """ + Handle a set action. This requires special handling because + config doesn't support the create method. + """ + if self.sql_fetch(self.gctx, self.config_id) is None: + control_elt.serve_create(self, r_msg, cb, eb) + else: + control_elt.serve_set(self, r_msg, cb, eb) + + def serve_fetch_one_maybe(self): + """ + Find the config object on which a get or set method should + operate. + """ + return self.sql_fetch(self.gctx, self.config_id) + +class client_elt(control_elt): + """ + element. + """ + + element_name = "client" + attributes = ("action", "tag", "client_handle", "base_uri") + elements = ("bpki_cert", "bpki_glue") + booleans = ("clear_replay_protection",) + + sql_template = rpki.sql.template( + "client", + "client_id", + "client_handle", + "base_uri", + ("bpki_cert", rpki.x509.X509), + ("bpki_glue", rpki.x509.X509), + ("last_cms_timestamp", rpki.sundial.datetime)) + + base_uri = None + bpki_cert = None + bpki_glue = None + last_cms_timestamp = None + + def serve_post_save_hook(self, q_pdu, r_pdu, cb, eb): + """ + Extra server actions for client_elt. + """ + actions = [] + if q_pdu.clear_replay_protection: + actions.append(self.serve_clear_replay_protection) + def loop(iterator, action): + action(iterator, eb) + rpki.async.iterator(actions, loop, cb) + + def serve_clear_replay_protection(self, cb, eb): + """ + Handle a clear_replay_protection action for this client. + """ + self.last_cms_timestamp = None + self.sql_mark_dirty() + cb() + + def serve_fetch_one_maybe(self): + """ + Find the client object on which a get, set, or destroy method + should operate, or which would conflict with a create method. + """ + return self.sql_fetch_where1(self.gctx, "client_handle = %s", self.client_handle) + + def serve_fetch_all(self): + """ + Find client objects on which a list method should operate. + """ + return self.sql_fetch_all(self.gctx) + + def check_allowed_uri(self, uri): + """ + Make sure that a target URI is within this client's allowed URI space. + """ + if not uri.startswith(self.base_uri): + raise rpki.exceptions.ForbiddenURI + +class publication_object_elt(rpki.xml_utils.base_elt, publication_namespace): + """ + Virtual class for publishable objects. These have very similar + syntax, differences lie in underlying datatype and methods. XML + methods are a little different from the pattern used for objects + that support the create/set/get/list/destroy actions, but + publishable objects don't go in SQL either so these classes would be + different in any case. + """ + + attributes = ("action", "tag", "client_handle", "uri") + payload_type = None + payload = None + + def endElement(self, stack, name, text): + """ + Handle a publishable element element. + """ + assert name == self.element_name, "Unexpected name %s, stack %s" % (name, stack) + if text: + self.payload = self.payload_type(Base64 = text) # pylint: disable=E1102 + stack.pop() + + def toXML(self): + """ + Generate XML element for publishable object. + """ + elt = self.make_elt() + if self.payload: + elt.text = self.payload.get_Base64() + return elt + + def serve_dispatch(self, r_msg, cb, eb): + """ + Action dispatch handler. + """ + # pylint: disable=E0203 + try: + if self.client is None: + raise rpki.exceptions.BadQuery, "Client query received on control channel" + dispatch = { "publish" : self.serve_publish, + "withdraw" : self.serve_withdraw } + if self.action not in dispatch: + raise rpki.exceptions.BadQuery, "Unexpected query: action %s" % self.action + self.client.check_allowed_uri(self.uri) + dispatch[self.action]() + r_pdu = self.__class__() + r_pdu.action = self.action + r_pdu.tag = self.tag + r_pdu.uri = self.uri + r_msg.append(r_pdu) + cb() + except rpki.exceptions.NoObjectAtURI, e: + # This can happen when we're cleaning up from a prior mess, so + # we generate a PDU then carry on. + r_msg.append(report_error_elt.from_exception(e, self.tag)) + cb() + + def serve_publish(self): + """ + Publish an object. + """ + rpki.log.info("Publishing %s" % self.payload.tracking_data(self.uri)) + filename = self.uri_to_filename() + filename_tmp = filename + ".tmp" + dirname = os.path.dirname(filename) + if not os.path.isdir(dirname): + os.makedirs(dirname) + f = open(filename_tmp, "wb") + f.write(self.payload.get_DER()) + f.close() + os.rename(filename_tmp, filename) + + def serve_withdraw(self): + """ + Withdraw an object, then recursively delete empty directories. + """ + rpki.log.info("Withdrawing %s" % self.uri) + filename = self.uri_to_filename() + try: + os.remove(filename) + except OSError, e: + if e.errno == errno.ENOENT: + raise rpki.exceptions.NoObjectAtURI, "No object published at %s" % self.uri + else: + raise + min_path_len = len(self.gctx.publication_base.rstrip("/")) + dirname = os.path.dirname(filename) + while len(dirname) > min_path_len: + try: + os.rmdir(dirname) + except OSError: + break + else: + dirname = os.path.dirname(dirname) + + def uri_to_filename(self): + """ + Convert a URI to a local filename. + """ + if not self.uri.startswith("rsync://"): + raise rpki.exceptions.BadURISyntax, self.uri + path = self.uri.split("/")[3:] + if not self.gctx.publication_multimodule: + del path[0] + path.insert(0, self.gctx.publication_base.rstrip("/")) + filename = "/".join(path) + if "/../" in filename or filename.endswith("/.."): + raise rpki.exceptions.BadURISyntax, filename + return filename + + @classmethod + def make_publish(cls, uri, obj, tag = None): + """ + Construct a publication PDU. + """ + assert cls.payload_type is not None and type(obj) is cls.payload_type + return cls.make_pdu(action = "publish", uri = uri, payload = obj, tag = tag) + + @classmethod + def make_withdraw(cls, uri, obj, tag = None): + """ + Construct a withdrawal PDU. + """ + assert cls.payload_type is not None and type(obj) is cls.payload_type + return cls.make_pdu(action = "withdraw", uri = uri, tag = tag) + + def raise_if_error(self): + """ + No-op, since this is not a PDU. + """ + pass + +class certificate_elt(publication_object_elt): + """ + element. + """ + + element_name = "certificate" + payload_type = rpki.x509.X509 + +class crl_elt(publication_object_elt): + """ + element. + """ + + element_name = "crl" + payload_type = rpki.x509.CRL + +class manifest_elt(publication_object_elt): + """ + element. + """ + + element_name = "manifest" + payload_type = rpki.x509.SignedManifest + +class roa_elt(publication_object_elt): + """ + element. + """ + + element_name = "roa" + payload_type = rpki.x509.ROA + +class ghostbuster_elt(publication_object_elt): + """ + element. + """ + + element_name = "ghostbuster" + payload_type = rpki.x509.Ghostbuster + +publication_object_elt.obj2elt = dict( + (e.payload_type, e) for e in + (certificate_elt, crl_elt, manifest_elt, roa_elt, ghostbuster_elt)) + +class report_error_elt(rpki.xml_utils.text_elt, publication_namespace): + """ + element. + """ + + element_name = "report_error" + attributes = ("tag", "error_code") + text_attribute = "error_text" + + error_text = None + + @classmethod + def from_exception(cls, e, tag = None): + """ + Generate a element from an exception. + """ + self = cls() + self.tag = tag + self.error_code = e.__class__.__name__ + self.error_text = str(e) + return self + + def __str__(self): + s = "" + if getattr(self, "tag", None) is not None: + s += "[%s] " % self.tag + s += self.error_code + if getattr(self, "error_text", None) is not None: + s += ": " + self.error_text + return s + + def raise_if_error(self): + """ + Raise exception associated with this PDU. + """ + t = rpki.exceptions.__dict__.get(self.error_code) + if isinstance(t, type) and issubclass(t, rpki.exceptions.RPKI_Exception): + raise t, getattr(self, "text", None) + else: + raise rpki.exceptions.BadPublicationReply, "Unexpected response from pubd: %s" % self + +class msg(rpki.xml_utils.msg, publication_namespace): + """ + Publication PDU. + """ + + ## @var version + # Protocol version + version = 1 + + ## @var pdus + # Dispatch table of PDUs for this protocol. + pdus = dict((x.element_name, x) for x in + (config_elt, client_elt, certificate_elt, crl_elt, manifest_elt, roa_elt, ghostbuster_elt, report_error_elt)) + + def serve_top_level(self, gctx, client, cb): + """ + Serve one msg PDU. + """ + if not self.is_query(): + raise rpki.exceptions.BadQuery, "Message type is not query" + r_msg = self.__class__.reply() + + def loop(iterator, q_pdu): + + def fail(e): + if not isinstance(e, rpki.exceptions.NotFound): + rpki.log.traceback() + r_msg.append(report_error_elt.from_exception(e, q_pdu.tag)) + cb(r_msg) + + try: + q_pdu.gctx = gctx + q_pdu.client = client + q_pdu.serve_dispatch(r_msg, iterator, fail) + except (rpki.async.ExitNow, SystemExit): + raise + except Exception, e: + fail(e) + + def done(): + cb(r_msg) + + rpki.async.iterator(self, loop, done) + +class sax_handler(rpki.xml_utils.sax_handler): + """ + SAX handler for publication protocol. + """ + + pdu = msg + name = "msg" + version = "1" + +class cms_msg(rpki.x509.XML_CMS_object): + """ + Class to hold a CMS-signed publication PDU. + """ + + encoding = "us-ascii" + schema = rpki.relaxng.publication + saxify = sax_handler.saxify diff --git a/rpki/rcynic.py b/rpki/rcynic.py new file mode 100644 index 00000000..73394fb8 --- /dev/null +++ b/rpki/rcynic.py @@ -0,0 +1,275 @@ +# Copyright (C) 2010-2012 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +Iterator class to parse the output of an rcynic run. +""" + +__version__ = '$Id$' + +import os +import rpki.x509 +import rpki.exceptions +import rpki.resource_set +from xml.etree.ElementTree import ElementTree + +class UnknownObject(rpki.exceptions.RPKI_Exception): + """ + Unrecognized object in rcynic result cache. + """ + +class NotRsyncURI(rpki.exceptions.RPKI_Exception): + """ + URI is not an rsync URI. + """ + +class rcynic_object(object): + """ + An object read from rcynic cache. + """ + + def __init__(self, filename, **kwargs): + self.filename = filename + for k, v in kwargs.iteritems(): + setattr(self, k, v) + self.obj = self.obj_class(DER_file = filename) + + def __repr__(self): + return "<%s %s %s at 0x%x>" % (self.__class__.__name__, self.uri, self.resources, id(self)) + + def show_attrs(self, *attrs): + """ + Print a bunch of object attributes, quietly ignoring any that + might be missing. + """ + for a in attrs: + try: + print "%s: %s" % (a.capitalize(), getattr(self, a)) + except AttributeError: + pass + + def show(self): + """ + Print common object attributes. + """ + self.show_attrs("filename", "uri", "status", "timestamp") + +class rcynic_certificate(rcynic_object): + """ + A certificate from rcynic cache. + """ + + obj_class = rpki.x509.X509 + + def __init__(self, filename, **kwargs): + rcynic_object.__init__(self, filename, **kwargs) + self.notBefore = self.obj.getNotBefore() + self.notAfter = self.obj.getNotAfter() + self.aia_uri = self.obj.get_aia_uri() + self.sia_directory_uri = self.obj.get_sia_directory_uri() + self.manifest_uri = self.obj.get_sia_manifest_uri() + self.resources = self.obj.get_3779resources() + self.is_ca = self.obj.is_CA() + self.serial = self.obj.getSerial() + self.issuer = self.obj.getIssuer() + self.subject = self.obj.getSubject() + self.ski = self.obj.hSKI() + self.aki = self.obj.hAKI() + + def show(self): + """ + Print certificate attributes. + """ + rcynic_object.show(self) + self.show_attrs("notBefore", "notAfter", "aia_uri", "sia_directory_uri", "resources") + +class rcynic_roa(rcynic_object): + """ + A ROA from rcynic cache. + """ + + obj_class = rpki.x509.ROA + + def __init__(self, filename, **kwargs): + rcynic_object.__init__(self, filename, **kwargs) + self.obj.extract() + self.asID = self.obj.get_POW().getASID() + self.prefix_sets = [] + v4, v6 = self.obj.get_POW().getPrefixes() + if v4: + self.prefix_sets.append(rpki.resource_set.roa_prefix_set_ipv4([ + rpki.resource_set.roa_prefix_ipv4(p[0], p[1], p[2]) for p in v4])) + if v6: + self.prefix_sets.append(rpki.resource_set.roa_prefix_set_ipv6([ + rpki.resource_set.roa_prefix_ipv6(p[0], p[1], p[2]) for p in v6])) + self.ee = rpki.x509.X509(POW = self.obj.get_POW().certs()[0]) + self.notBefore = self.ee.getNotBefore() + self.notAfter = self.ee.getNotAfter() + self.aia_uri = self.ee.get_aia_uri() + self.resources = self.ee.get_3779resources() + self.issuer = self.ee.getIssuer() + self.serial = self.ee.getSerial() + self.subject = self.ee.getSubject() + self.aki = self.ee.hAKI() + self.ski = self.ee.hSKI() + + def show(self): + """ + Print ROA attributes. + """ + rcynic_object.show(self) + self.show_attrs("notBefore", "notAfter", "aia_uri", "resources", "asID") + if self.prefix_sets: + print "Prefixes:", ",".join(str(i) for i in self.prefix_sets) + +class rcynic_ghostbuster(rcynic_object): + """ + Ghostbuster record from the rcynic cache. + """ + + obj_class = rpki.x509.Ghostbuster + + def __init__(self, *args, **kwargs): + rcynic_object.__init__(self, *args, **kwargs) + self.obj.extract() + self.vcard = self.obj.get_content() + self.ee = rpki.x509.X509(POW = self.obj.get_POW().certs()[0]) + self.notBefore = self.ee.getNotBefore() + self.notAfter = self.ee.getNotAfter() + self.aia_uri = self.ee.get_aia_uri() + self.issuer = self.ee.getIssuer() + self.serial = self.ee.getSerial() + self.subject = self.ee.getSubject() + self.aki = self.ee.hAKI() + self.ski = self.ee.hSKI() + + def show(self): + rcynic_object.show(self) + self.show_attrs("notBefore", "notAfter", "vcard") + +file_name_classes = { + ".cer" : rcynic_certificate, + ".gbr" : rcynic_ghostbuster, + ".roa" : rcynic_roa } + +class rcynic_file_iterator(object): + """ + Iterate over files in an rcynic output tree, yielding a Python + representation of each object found. + """ + + def __init__(self, rcynic_root, + authenticated_subdir = "authenticated"): + self.rcynic_dir = os.path.join(rcynic_root, authenticated_subdir) + + def __iter__(self): + for root, dirs, files in os.walk(self.rcynic_dir): # pylint: disable=W0612 + for filename in files: + filename = os.path.join(root, filename) + ext = os.path.splitext(filename)[1] + if ext in file_name_classes: + yield file_name_classes[ext](filename) + +class validation_status_element(object): + def __init__(self, *args, **kwargs): + self.attrs = [] + for k, v in kwargs.iteritems(): + setattr(self, k, v) + # attribute names are saved so that the __repr__ method can + # display the subset of attributes the user specified + self.attrs.append(k) + self._obj = None + + def get_obj(self): + if not self._obj: + self._obj = self.file_class(filename=self.filename, uri=self.uri) + return self._obj + + def __repr__(self): + v = [self.__class__.__name__, 'id=%s' % str(id(self))] + v.extend(['%s=%s' % (x, getattr(self, x)) for x in self.attrs]) + return '<%s>' % (' '.join(v),) + + obj = property(get_obj) + +class rcynic_xml_iterator(object): + """ + Iterate over validation_status entries in the XML output from an + rcynic run. Yields a tuple for each entry: + + timestamp, generation, status, object + + where URI, status, and timestamp are the corresponding values from + the XML element, OK is a boolean indicating whether validation was + considered succesful, and object is a Python representation of the + object in question. If OK is True, object will be from rcynic's + authenticated output tree; otherwise, object will be from rcynic's + unauthenticated output tree. + + Note that it is possible for the same URI to appear in more than one + validation_status element; in such cases, the succesful case (OK + True) should be the last entry (as rcynic will stop trying once it + gets a good copy), but there may be multiple failures, which might + or might not have different status codes. + """ + + def __init__(self, rcynic_root, xml_file, + authenticated_old_subdir = "authenticated.old", + unauthenticated_subdir = "unauthenticated"): + self.rcynic_root = rcynic_root + self.xml_file = xml_file + self.authenticated_subdir = os.path.join(rcynic_root, 'authenticated') + self.authenticated_old_subdir = os.path.join(rcynic_root, authenticated_old_subdir) + self.unauthenticated_subdir = os.path.join(rcynic_root, unauthenticated_subdir) + + base_uri = "rsync://" + + def uri_to_filename(self, uri): + if uri.startswith(self.base_uri): + return uri[len(self.base_uri):] + else: + raise NotRsyncURI, "Not an rsync URI %r" % uri + + def __iter__(self): + for validation_status in ElementTree(file=self.xml_file).getroot().getiterator("validation_status"): + timestamp = validation_status.get("timestamp") + status = validation_status.get("status") + uri = validation_status.text.strip() + generation = validation_status.get("generation") + + # determine the path to this object + if status == 'object_accepted': + d = self.authenticated_subdir + elif generation == 'backup': + d = self.authenticated_old_subdir + else: + d = self.unauthenticated_subdir + + filename = os.path.join(d, self.uri_to_filename(uri)) + + ext = os.path.splitext(filename)[1] + if ext in file_name_classes: + yield validation_status_element(timestamp = timestamp, generation = generation, + uri=uri, status = status, filename = filename, + file_class = file_name_classes[ext]) + +def label_iterator(xml_file): + """ + Returns an iterator which contains all defined labels from an rcynic XML + output file. Each item is a tuple of the form + (label, kind, description). + """ + + for label in ElementTree(file=xml_file).find("labels"): + yield label.tag, label.get("kind"), label.text.strip() diff --git a/rpki/relaxng.py b/rpki/relaxng.py new file mode 100644 index 00000000..0d8c0d64 --- /dev/null +++ b/rpki/relaxng.py @@ -0,0 +1,2441 @@ +# Automatically generated, do not edit. + +import lxml.etree + +## @var left_right +## Parsed RelaxNG left_right schema +left_right = lxml.etree.RelaxNG(lxml.etree.fromstring(r''' + + + + + + + + 1 + + + + + + query + + + + + + + + reply + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 1024 + + + + + + + + create + + + + + + set + + + + + + get + + + + + + list + + + + + + destroy + + + + + + + 512000 + + + + + + + 255 + [\-_A-Za-z0-9]+ + + + + + + 4096 + + + + + + 1024 + + + + + + 512000 + [\-,0-9]* + + + + + 512000 + [\-,0-9/.]* + + + + + 512000 + [\-,0-9/:a-fA-F]* + + + + + + + yes + + + + + yes + + + + + yes + + + + + yes + + + + + yes + + + + + yes + + + + + yes + + + + + + + + yes + no + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + yes + + + + rsa + + + + + sha256 + + + + + 2048 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + yes + + + + + yes + + + + + yes + + + + + yes + + + + + yes + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + yes + + + + + yes + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + yes + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + .*Z + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 27 + 27 + + + + + .*Z + + + + + + + + + + + + + + + + + + + + + 64 + [\-0-9A-Za-z_ ]+ + + + + + + + 64 + [0-9A-Fa-f]+ + + + + + + + 512000 + [.,0-9]+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + .*Z + + + + + .*Z + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 1024 + + + + + + + + + + + + + + 512000 + + + + + + +''')) + +## @var up_down +## Parsed RelaxNG up_down schema +up_down = lxml.etree.RelaxNG(lxml.etree.fromstring(r''' + + + + + 512000 + [\-,0-9]* + + + + + 512000 + [\-,/.0-9]* + + + + + 512000 + [\-,/:0-9a-fA-F]* + + + + + 1 + 1024 + + + + + 27 + 1024 + + + + + 1 + 1024 + + + + + 10 + 4096 + + + + + 4 + 512000 + + + + + + + 1 + + + + + + + + + + + + + + list + + + + + + list_response + + + + + + issue + + + + + + issue_response + + + + + + revoke + + + + + + revoke_response + + + + + + error_response + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 1024 + rsync://.+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 9999 + + + + + + + + + 1024 + + + + + + +''')) + +## @var publication +## Parsed RelaxNG publication schema +publication = lxml.etree.RelaxNG(lxml.etree.fromstring(r''' + + + + + + + + 1 + + + + + + query + + + + + + + + reply + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 1024 + + + + + + + + + + + 4096 + + + + + + + + + + + 255 + [\-_A-Za-z0-9/]+ + + + + + + + + + + + + + + set + + + + + + + + + + + set + + + + + + + + + + get + + + + + + + + + + get + + + + + + + + + + + + + + + + + yes + + + + + + + + + + + + + + + + + + + + + + + + create + + + + + + + + + + + + + create + + + + + + + + + + + set + + + + + + + + + + + + + set + + + + + + + + + + + get + + + + + + + + + + + get + + + + + + + + + + + + list + + + + + + + + + + list + + + + + + + + + + + + destroy + + + + + + + + + + + destroy + + + + + + + + + + + + publish + + + + + + + + + + + + publish + + + + + + + + + + + withdraw + + + + + + + + + + + withdraw + + + + + + + + + + + + publish + + + + + + + + + + + + publish + + + + + + + + + + + withdraw + + + + + + + + + + + withdraw + + + + + + + + + + + + publish + + + + + + + + + + + + publish + + + + + + + + + + + withdraw + + + + + + + + + + + withdraw + + + + + + + + + + + + publish + + + + + + + + + + + + publish + + + + + + + + + + + withdraw + + + + + + + + + + + withdraw + + + + + + + + + + + + publish + + + + + + + + + + + + publish + + + + + + + + + + + withdraw + + + + + + + + + + + withdraw + + + + + + + + + + + 1024 + + + + + + + + + + + + + 512000 + + + + + + +''')) + +## @var myrpki +## Parsed RelaxNG myrpki schema +myrpki = lxml.etree.RelaxNG(lxml.etree.fromstring(r''' + + + + 2 + + + + 512000 + + + + + 255 + [\-_A-Za-z0-9]+ + + + + + 255 + [\-_A-Za-z0-9/]+ + + + + + 4096 + + + + + + + + 512000 + [\-,0-9]+ + + + + + 512000 + [\-,0-9/.]+ + + + + + 512000 + [\-,0-9/:a-fA-F]+ + + + + + .*Z + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + none + + + offer + + + + referral + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + confirmed + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +''')) + +## @var router_certificate +## Parsed RelaxNG router_certificate schema +router_certificate = lxml.etree.RelaxNG(lxml.etree.fromstring(r''' + + + + 1 + + + + 512000 + + + + + + + + 512000 + [0-9][\-,0-9]* + + + + + .*Z + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +''')) + diff --git a/rpki/resource_set.py b/rpki/resource_set.py new file mode 100644 index 00000000..2ec19cab --- /dev/null +++ b/rpki/resource_set.py @@ -0,0 +1,1148 @@ +# $Id$ +# +# Copyright (C) 2013--2014 Dragon Research Labs ("DRL") +# Portions copyright (C) 2009--2012 Internet Systems Consortium ("ISC") +# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notices and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND ARIN DISCLAIM ALL +# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL, +# ISC, OR ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, +# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION +# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +""" +Classes dealing with sets of resources. + +The basic mechanics of a resource set are the same for any of the +resources we handle (ASNs, IPv4 addresses, or IPv6 addresses), so we +can provide the same operations on any of them, even though the +underlying details vary. + +We also provide some basic set operations (union, intersection, etc). +""" + +import re +import math +import rpki.exceptions +import rpki.POW + +## @var inherit_token +# Token used to indicate inheritance in read and print syntax. + +inherit_token = "" + +re_asn_range = re.compile("^([0-9]+)-([0-9]+)$") +re_address_range = re.compile("^([0-9:.a-fA-F]+)-([0-9:.a-fA-F]+)$") +re_prefix_with_maxlen = re.compile("^([0-9:.a-fA-F]+)/([0-9]+)-([0-9]+)$") +re_prefix = re.compile("^([0-9:.a-fA-F]+)/([0-9]+)$") + +class resource_range(object): + """ + Generic resource range type. Assumes underlying type is some kind + of integer. + + This is a virtual class. You probably don't want to use this type + directly. + """ + + def __init__(self, range_min, range_max): + assert range_min.__class__ is range_max.__class__, \ + "Type mismatch, %r doesn't match %r" % (range_min.__class__, range_max.__class__) + assert range_min <= range_max, "Mis-ordered range: %s before %s" % (range_min, range_max) + self.min = range_min + self.max = range_max + + def __cmp__(self, other): + assert self.__class__ is other.__class__, \ + "Type mismatch, comparing %r with %r" % (self.__class__, other.__class__) + return cmp(self.min, other.min) or cmp(self.max, other.max) + +class resource_range_as(resource_range): + """ + Range of Autonomous System Numbers. + + Denotes a single ASN by a range whose min and max values are + identical. + """ + + ## @var datum_type + # Type of underlying data (min and max). + + datum_type = long + + def __init__(self, range_min, range_max): + resource_range.__init__(self, + long(range_min) if isinstance(range_min, int) else range_min, + long(range_max) if isinstance(range_max, int) else range_max) + + def __str__(self): + """ + Convert a resource_range_as to string format. + """ + if self.min == self.max: + return str(self.min) + else: + return str(self.min) + "-" + str(self.max) + + @classmethod + def parse_str(cls, x): + """ + Parse ASN resource range from text (eg, XML attributes). + """ + r = re_asn_range.match(x) + if r: + return cls(long(r.group(1)), long(r.group(2))) + else: + return cls(long(x), long(x)) + + @classmethod + def from_strings(cls, a, b = None): + """ + Construct ASN range from strings. + """ + if b is None: + b = a + return cls(long(a), long(b)) + +class resource_range_ip(resource_range): + """ + Range of (generic) IP addresses. + + Prefixes are converted to ranges on input, and ranges that can be + represented as prefixes are written as prefixes on output. + + This is a virtual class. You probably don't want to use it + directly. + """ + + ## @var datum_type + # Type of underlying data (min and max). + + datum_type = rpki.POW.IPAddress + + def prefixlen(self): + """ + Determine whether a resource_range_ip can be expressed as a + prefix. Returns prefix length if it can, otherwise raises + MustBePrefix exception. + """ + mask = self.min ^ self.max + if self.min & mask != 0: + raise rpki.exceptions.MustBePrefix + prefixlen = self.min.bits + while mask & 1: + prefixlen -= 1 + mask >>= 1 + if mask: + raise rpki.exceptions.MustBePrefix + return prefixlen + + @property + def can_be_prefix(self): + """ + Boolean property indicating whether this range can be expressed as + a prefix. + + This just calls .prefixlen() to do the work, so that we can keep + the logic in one place. This property is useful primarily in + context where catching an exception isn't practical. + """ + try: + self.prefixlen() + return True + except rpki.exceptions.MustBePrefix: + return False + + def __str__(self): + """ + Convert a resource_range_ip to string format. + """ + try: + return str(self.min) + "/" + str(self.prefixlen()) + except rpki.exceptions.MustBePrefix: + return str(self.min) + "-" + str(self.max) + + @classmethod + def parse_str(cls, x): + """ + Parse IP address range or prefix from text (eg, XML attributes). + """ + r = re_address_range.match(x) + if r: + return cls.from_strings(r.group(1), r.group(2)) + r = re_prefix.match(x) + if r: + a = rpki.POW.IPAddress(r.group(1)) + if cls is resource_range_ip and a.version == 4: + cls = resource_range_ipv4 + if cls is resource_range_ip and a.version == 6: + cls = resource_range_ipv6 + return cls.make_prefix(a, int(r.group(2))) + raise rpki.exceptions.BadIPResource, 'Bad IP resource "%s"' % (x) + + @classmethod + def make_prefix(cls, prefix, prefixlen): + """ + Construct a resource range corresponding to a prefix. + """ + assert isinstance(prefix, rpki.POW.IPAddress) and isinstance(prefixlen, (int, long)) + assert prefixlen >= 0 and prefixlen <= prefix.bits, "Nonsensical prefix length: %s" % prefixlen + mask = (1 << (prefix.bits - prefixlen)) - 1 + assert (prefix & mask) == 0, "Resource not in canonical form: %s/%s" % (prefix, prefixlen) + return cls(prefix, rpki.POW.IPAddress(prefix | mask)) + + def chop_into_prefixes(self, result): + """ + Chop up a resource_range_ip into ranges that can be represented as + prefixes. + """ + try: + self.prefixlen() + result.append(self) + except rpki.exceptions.MustBePrefix: + range_min = self.min + range_max = self.max + while range_max >= range_min: + bits = int(math.log(long(range_max - range_min + 1), 2)) + while True: + mask = ~(~0 << bits) + assert range_min + mask <= range_max + if range_min & mask == 0: + break + assert bits > 0 + bits -= 1 + result.append(self.make_prefix(range_min, range_min.bits - bits)) + range_min = range_min + mask + 1 + + @classmethod + def from_strings(cls, a, b = None): + """ + Construct IP address range from strings. + """ + if b is None: + b = a + a = rpki.POW.IPAddress(a) + b = rpki.POW.IPAddress(b) + if a.version != b.version: + raise TypeError + if cls is resource_range_ip: + if a.version == 4: + return resource_range_ipv4(a, b) + if a.version == 6: + return resource_range_ipv6(a, b) + elif a.version == cls.version: + return cls(a, b) + else: + raise TypeError + +class resource_range_ipv4(resource_range_ip): + """ + Range of IPv4 addresses. + """ + + version = 4 + +class resource_range_ipv6(resource_range_ip): + """ + Range of IPv6 addresses. + """ + + version = 6 + +def _rsplit(rset, that): + """ + Utility function to split a resource range into two resource ranges. + """ + + this = rset.pop(0) + + assert type(this) is type(that), "type(this) [%r] is not type(that) [%r]" % (type(this), type(that)) + + assert type(this.min) is type(that.min), "type(this.min) [%r] is not type(that.min) [%r]" % (type(this.min), type(that.min)) + assert type(this.min) is type(this.max), "type(this.min) [%r] is not type(this.max) [%r]" % (type(this.min), type(this.max)) + assert type(that.min) is type(that.max), "type(that.min) [%r] is not type(that.max) [%r]" % (type(that.min), type(that.max)) + + if this.min < that.min: + rset.insert(0, type(this)(this.min, type(that.min)(that.min - 1))) + rset.insert(1, type(this)(that.min, this.max)) + + else: + assert this.max > that.max + rset.insert(0, type(this)(this.min, that.max)) + rset.insert(1, type(this)(type(that.max)(that.max + 1), this.max)) + +class resource_set(list): + """ + Generic resource set, a list subclass containing resource ranges. + + This is a virtual class. You probably don't want to use it + directly. + """ + + ## @var inherit + # Boolean indicating whether this resource_set uses RFC 3779 inheritance. + + inherit = False + + ## @var canonical + # Whether this resource_set is currently in canonical form. + + canonical = False + + def __init__(self, ini = None, allow_overlap = False): + """ + Initialize a resource_set. + """ + list.__init__(self) + if isinstance(ini, (int, long)): + ini = str(ini) + if ini is inherit_token: + self.inherit = True + elif isinstance(ini, str) and len(ini): + self.extend(self.parse_str(s) for s in ini.split(",")) + elif isinstance(ini, list): + self.extend(ini) + elif ini is not None and ini != "": + raise ValueError("Unexpected initializer: %s" % str(ini)) + self.canonize(allow_overlap) + + def canonize(self, allow_overlap = False): + """ + Whack this resource_set into canonical form. + """ + assert not self.inherit or len(self) == 0 + if not self.canonical: + self.sort() + i = 0 + while i + 1 < len(self): + if allow_overlap and self[i].max + 1 >= self[i+1].min: + self[i] = type(self[i])(self[i].min, max(self[i].max, self[i+1].max)) + del self[i+1] + elif self[i].max + 1 == self[i+1].min: + self[i] = type(self[i])(self[i].min, self[i+1].max) + del self[i+1] + else: + i += 1 + for i in xrange(0, len(self) - 1): + if self[i].max >= self[i+1].min: + raise rpki.exceptions.ResourceOverlap("Resource overlap: %s %s" % (self[i], self[i+1])) + self.canonical = True + + def append(self, item): + """ + Wrapper around list.append() (q.v.) to reset canonical flag. + """ + list.append(self, item) + self.canonical = False + + def extend(self, item): + """ + Wrapper around list.extend() (q.v.) to reset canonical flag. + """ + list.extend(self, item) + self.canonical = False + + def __str__(self): + """ + Convert a resource_set to string format. + """ + if self.inherit: + return inherit_token + else: + return ",".join(str(x) for x in self) + + def _comm(self, other): + """ + Like comm(1), sort of. + + Returns a tuple of three resource sets: resources only in self, + resources only in other, and resources in both. Used (not very + efficiently) as the basis for most set operations on resource + sets. + """ + + assert not self.inherit + assert type(self) is type(other), "Type mismatch %r %r" % (type(self), type(other)) + set1 = type(self)(self) # clone and whack into canonical form + set2 = type(other)(other) # ditto + only1, only2, both = [], [], [] + while set1 or set2: + if set1 and (not set2 or set1[0].max < set2[0].min): + only1.append(set1.pop(0)) + elif set2 and (not set1 or set2[0].max < set1[0].min): + only2.append(set2.pop(0)) + elif set1[0].min < set2[0].min: + _rsplit(set1, set2[0]) + elif set2[0].min < set1[0].min: + _rsplit(set2, set1[0]) + elif set1[0].max < set2[0].max: + _rsplit(set2, set1[0]) + elif set2[0].max < set1[0].max: + _rsplit(set1, set2[0]) + else: + assert set1[0].min == set2[0].min and set1[0].max == set2[0].max + both.append(set1.pop(0)) + set2.pop(0) + return type(self)(only1), type(self)(only2), type(self)(both) + + def union(self, other): + """ + Set union for resource sets. + """ + + assert not self.inherit + assert type(self) is type(other), "Type mismatch: %r %r" % (type(self), type(other)) + set1 = type(self)(self) # clone and whack into canonical form + set2 = type(other)(other) # ditto + result = [] + while set1 or set2: + if set1 and (not set2 or set1[0].max < set2[0].min): + result.append(set1.pop(0)) + elif set2 and (not set1 or set2[0].max < set1[0].min): + result.append(set2.pop(0)) + else: + this = set1.pop(0) + that = set2.pop(0) + assert type(this) is type(that) + range_min = min(this.min, that.min) + range_max = max(this.max, that.max) + result.append(type(this)(range_min, range_max)) + while set1 and set1[0].max <= range_max: + assert set1[0].min >= range_min + del set1[0] + while set2 and set2[0].max <= range_max: + assert set2[0].min >= range_min + del set2[0] + return type(self)(result) + + __or__ = union + + def intersection(self, other): + """ + Set intersection for resource sets. + """ + return self._comm(other)[2] + + __and__ = intersection + + def difference(self, other): + """ + Set difference for resource sets. + """ + return self._comm(other)[0] + + __sub__ = difference + + def symmetric_difference(self, other): + """ + Set symmetric difference (XOR) for resource sets. + """ + com = self._comm(other) + return com[0] | com[1] + + __xor__ = symmetric_difference + + def contains(self, item): + """ + Set membership test for resource sets. + """ + assert not self.inherit + self.canonize() + if not self: + return False + if type(item) is type(self[0]): + range_min = item.min + range_max = item.max + else: + range_min = item + range_max = item + lo = 0 + hi = len(self) + while lo < hi: + mid = (lo + hi) / 2 + if self[mid].max < range_max: + lo = mid + 1 + else: + hi = mid + return lo < len(self) and self[lo].min <= range_min and self[lo].max >= range_max + + __contains__ = contains + + def issubset(self, other): + """ + Test whether self is a subset (possibly improper) of other. + """ + for i in self: + if not other.contains(i): + return False + return True + + __le__ = issubset + + def issuperset(self, other): + """ + Test whether self is a superset (possibly improper) of other. + """ + return other.issubset(self) + + __ge__ = issuperset + + def __lt__(self, other): + return not self.issuperset(other) + + def __gt__(self, other): + return not self.issubset(other) + + def __ne__(self, other): + """ + A set with the inherit bit set is always unequal to any other set, because + we can't know the answer here. This is also consistent with __nonzero__ + which returns True for inherit sets, and False for empty sets. + """ + return self.inherit or other.inherit or list.__ne__(self, other) + + def __eq__(self, other): + return not self.__ne__(other) + + def __nonzero__(self): + """ + Tests whether or not this set is empty. Note that sets with the inherit + bit set are considered non-empty, despite having zero length. + """ + return self.inherit or len(self) + + @classmethod + def from_sql(cls, sql, query, args = None): + """ + Create resource set from an SQL query. + + sql is an object that supports execute() and fetchall() methods + like a DB API 2.0 cursor object. + + query is an SQL query that returns a sequence of (min, max) pairs. + """ + + sql.execute(query, args) + return cls(ini = [cls.range_type(cls.range_type.datum_type(b), + cls.range_type.datum_type(e)) + for (b, e) in sql.fetchall()]) + + @classmethod + def from_django(cls, iterable): + """ + Create resource set from a Django query. + + iterable is something which returns (min, max) pairs. + """ + + return cls(ini = [cls.range_type(cls.range_type.datum_type(b), + cls.range_type.datum_type(e)) + for (b, e) in iterable]) + + @classmethod + def parse_str(cls, s): + """ + Parse resource set from text string (eg, XML attributes). This is + a backwards compatability wrapper, real functionality is now part + of the range classes. + """ + return cls.range_type.parse_str(s) + +class resource_set_as(resource_set): + """ + Autonomous System Number resource set. + """ + + ## @var range_type + # Type of range underlying this type of resource_set. + + range_type = resource_range_as + +class resource_set_ip(resource_set): + """ + (Generic) IP address resource set. + + This is a virtual class. You probably don't want to use it + directly. + """ + + def to_roa_prefix_set(self): + """ + Convert from a resource set to a ROA prefix set. + """ + prefix_ranges = [] + for r in self: + r.chop_into_prefixes(prefix_ranges) + return self.roa_prefix_set_type([ + self.roa_prefix_set_type.prefix_type(r.min, r.prefixlen()) + for r in prefix_ranges]) + +class resource_set_ipv4(resource_set_ip): + """ + IPv4 address resource set. + """ + + ## @var range_type + # Type of range underlying this type of resource_set. + + range_type = resource_range_ipv4 + +class resource_set_ipv6(resource_set_ip): + """ + IPv6 address resource set. + """ + + ## @var range_type + # Type of range underlying this type of resource_set. + + range_type = resource_range_ipv6 + +class resource_bag(object): + """ + Container to simplify passing around the usual triple of ASN, IPv4, + and IPv6 resource sets. + """ + + ## @var asn + # Set of Autonomous System Number resources. + + ## @var v4 + # Set of IPv4 resources. + + ## @var v6 + # Set of IPv6 resources. + + ## @var valid_until + # Expiration date of resources, for setting certificate notAfter field. + + def __init__(self, asn = None, v4 = None, v6 = None, valid_until = None): + self.asn = asn or resource_set_as() + self.v4 = v4 or resource_set_ipv4() + self.v6 = v6 or resource_set_ipv6() + self.valid_until = valid_until + + def oversized(self, other): + """ + True iff self is oversized with respect to other. + """ + return not self.asn.issubset(other.asn) or \ + not self.v4.issubset(other.v4) or \ + not self.v6.issubset(other.v6) + + def undersized(self, other): + """ + True iff self is undersized with respect to other. + """ + return not other.asn.issubset(self.asn) or \ + not other.v4.issubset(self.v4) or \ + not other.v6.issubset(self.v6) + + @classmethod + def from_inheritance(cls): + """ + Build a resource bag that just inherits everything from its + parent. + """ + self = cls() + self.asn = resource_set_as() + self.v4 = resource_set_ipv4() + self.v6 = resource_set_ipv6() + self.asn.inherit = True + self.v4.inherit = True + self.v6.inherit = True + return self + + @classmethod + def from_str(cls, text, allow_overlap = False): + """ + Parse a comma-separated text string into a resource_bag. Not + particularly efficient, fix that if and when it becomes an issue. + """ + asns = [] + v4s = [] + v6s = [] + for word in text.split(","): + if "." in word: + v4s.append(word) + elif ":" in word: + v6s.append(word) + else: + asns.append(word) + return cls(asn = resource_set_as(",".join(asns), allow_overlap) if asns else None, + v4 = resource_set_ipv4(",".join(v4s), allow_overlap) if v4s else None, + v6 = resource_set_ipv6(",".join(v6s), allow_overlap) if v6s else None) + + @classmethod + def from_POW_rfc3779(cls, resources): + """ + Build a resource_bag from data returned by + rpki.POW.X509.getRFC3779(). + + The conversion to long for v4 and v6 is (intended to be) + temporary: in the long run, we should be using rpki.POW.IPAddress + rather than long here. + """ + asn = inherit_token if resources[0] == "inherit" else [resource_range_as( r[0], r[1]) for r in resources[0] or ()] + v4 = inherit_token if resources[1] == "inherit" else [resource_range_ipv4(r[0], r[1]) for r in resources[1] or ()] + v6 = inherit_token if resources[2] == "inherit" else [resource_range_ipv6(r[0], r[1]) for r in resources[2] or ()] + return cls(resource_set_as(asn) if asn else None, + resource_set_ipv4(v4) if v4 else None, + resource_set_ipv6(v6) if v6 else None) + + def empty(self): + """ + True iff all resource sets in this bag are empty. + """ + return not self.asn and not self.v4 and not self.v6 + + def __nonzero__(self): + return not self.empty() + + def __eq__(self, other): + return self.asn == other.asn and \ + self.v4 == other.v4 and \ + self.v6 == other.v6 and \ + self.valid_until == other.valid_until + + def __ne__(self, other): + return not (self == other) + + def intersection(self, other): + """ + Compute intersection with another resource_bag. valid_until + attribute (if any) inherits from self. + """ + return self.__class__(self.asn & other.asn, + self.v4 & other.v4, + self.v6 & other.v6, + self.valid_until) + + __and__ = intersection + + def union(self, other): + """ + Compute union with another resource_bag. valid_until attribute + (if any) inherits from self. + """ + return self.__class__(self.asn | other.asn, + self.v4 | other.v4, + self.v6 | other.v6, + self.valid_until) + + __or__ = union + + def difference(self, other): + """ + Compute difference against another resource_bag. valid_until + attribute (if any) inherits from self + """ + return self.__class__(self.asn - other.asn, + self.v4 - other.v4, + self.v6 - other.v6, + self.valid_until) + + __sub__ = difference + + def symmetric_difference(self, other): + """ + Compute symmetric difference against another resource_bag. + valid_until attribute (if any) inherits from self + """ + return self.__class__(self.asn ^ other.asn, + self.v4 ^ other.v4, + self.v6 ^ other.v6, + self.valid_until) + + __xor__ = symmetric_difference + + def __str__(self): + s = "" + if self.asn: + s += "ASN: %s" % self.asn + if self.v4: + if s: + s += ", " + s += "V4: %s" % self.v4 + if self.v6: + if s: + s += ", " + s += "V6: %s" % self.v6 + return s + + def __iter__(self): + for r in self.asn: + yield r + for r in self.v4: + yield r + for r in self.v6: + yield r + +# Sadly, there are enough differences between RFC 3779 and the data +# structures in the latest proposed ROA format that we can't just use +# the RFC 3779 code for ROAs. So we need a separate set of classes +# that are similar in concept but different in detail, with conversion +# functions. Such is life. I suppose it might be possible to do this +# with multiple inheritance, but that's probably more bother than it's +# worth. + +class roa_prefix(object): + """ + ROA prefix. This is similar to the resource_range_ip class, but + differs in that it only represents prefixes, never ranges, and + includes the maximum prefix length as an additional value. + + This is a virtual class, you probably don't want to use it directly. + """ + + ## @var prefix + # The prefix itself, an IP address with bits beyond the prefix + # length zeroed. + + ## @var prefixlen + # (Minimum) prefix length. + + ## @var max_prefixlen + # Maxmimum prefix length. + + def __init__(self, prefix, prefixlen, max_prefixlen = None): + """ + Initialize a ROA prefix. max_prefixlen is optional and defaults + to prefixlen. max_prefixlen must not be smaller than prefixlen. + """ + if max_prefixlen is None: + max_prefixlen = prefixlen + assert max_prefixlen >= prefixlen, "Bad max_prefixlen: %d must not be shorter than %d" % (max_prefixlen, prefixlen) + self.prefix = prefix + self.prefixlen = prefixlen + self.max_prefixlen = max_prefixlen + + def __cmp__(self, other): + """ + Compare two ROA prefix objects. Comparision is based on prefix, + prefixlen, and max_prefixlen, in that order. + """ + assert self.__class__ is other.__class__ + return (cmp(self.prefix, other.prefix) or + cmp(self.prefixlen, other.prefixlen) or + cmp(self.max_prefixlen, other.max_prefixlen)) + + def __str__(self): + """ + Convert a ROA prefix to string format. + """ + if self.prefixlen == self.max_prefixlen: + return str(self.prefix) + "/" + str(self.prefixlen) + else: + return str(self.prefix) + "/" + str(self.prefixlen) + "-" + str(self.max_prefixlen) + + def to_resource_range(self): + """ + Convert this ROA prefix to the equivilent resource_range_ip + object. This is an irreversable transformation because it loses + the max_prefixlen attribute, nothing we can do about that. + """ + return self.range_type.make_prefix(self.prefix, self.prefixlen) + + def min(self): + """ + Return lowest address covered by prefix. + """ + return self.prefix + + def max(self): + """ + Return highest address covered by prefix. + """ + return self.prefix | ((1 << (self.prefix.bits - self.prefixlen)) - 1) + + def to_POW_roa_tuple(self): + """ + Convert a resource_range_ip to rpki.POW.ROA.setPrefixes() format. + """ + return self.prefix, self.prefixlen, self.max_prefixlen + + @classmethod + def parse_str(cls, x): + """ + Parse ROA prefix from text (eg, an XML attribute). + """ + r = re_prefix_with_maxlen.match(x) + if r: + return cls(rpki.POW.IPAddress(r.group(1)), int(r.group(2)), int(r.group(3))) + r = re_prefix.match(x) + if r: + return cls(rpki.POW.IPAddress(r.group(1)), int(r.group(2))) + raise rpki.exceptions.BadROAPrefix, 'Bad ROA prefix "%s"' % (x) + +class roa_prefix_ipv4(roa_prefix): + """ + IPv4 ROA prefix. + """ + + ## @var range_type + # Type of corresponding resource_range_ip. + + range_type = resource_range_ipv4 + +class roa_prefix_ipv6(roa_prefix): + """ + IPv6 ROA prefix. + """ + + ## @var range_type + # Type of corresponding resource_range_ip. + + range_type = resource_range_ipv6 + +class roa_prefix_set(list): + """ + Set of ROA prefixes, analogous to the resource_set_ip class. + """ + + def __init__(self, ini = None): + """ + Initialize a ROA prefix set. + """ + list.__init__(self) + if isinstance(ini, str) and len(ini): + self.extend(self.parse_str(s) for s in ini.split(",")) + elif isinstance(ini, (list, tuple)): + self.extend(ini) + else: + assert ini is None or ini == "", "Unexpected initializer: %s" % str(ini) + self.sort() + + def __str__(self): + """ + Convert a ROA prefix set to string format. + """ + return ",".join(str(x) for x in self) + + @classmethod + def parse_str(cls, s): + """ + Parse ROA prefix from text (eg, an XML attribute). + This method is a backwards compatability shim. + """ + return cls.prefix_type.parse_str(s) + + def to_resource_set(self): + """ + Convert a ROA prefix set to a resource set. This is an + irreversable transformation. We have to compute a union here + because ROA prefix sets can include overlaps, while RFC 3779 + resource sets cannot. This is ugly, and there is almost certainly + a more efficient way to do this, but start by getting the output + right before worrying about making it fast or pretty. + """ + r = self.resource_set_type() + s = self.resource_set_type() + s.append(None) + for p in self: + s[0] = p.to_resource_range() + r |= s + return r + + @classmethod + def from_sql(cls, sql, query, args = None): + """ + Create ROA prefix set from an SQL query. + + sql is an object that supports execute() and fetchall() methods + like a DB API 2.0 cursor object. + + query is an SQL query that returns a sequence of (prefix, + prefixlen, max_prefixlen) triples. + """ + + sql.execute(query, args) + return cls([cls.prefix_type(rpki.POW.IPAddress(x), int(y), int(z)) + for (x, y, z) in sql.fetchall()]) + + @classmethod + def from_django(cls, iterable): + """ + Create ROA prefix set from a Django query. + + iterable is something which returns (prefix, prefixlen, + max_prefixlen) triples. + """ + + return cls([cls.prefix_type(rpki.POW.IPAddress(x), int(y), int(z)) + for (x, y, z) in iterable]) + + def to_POW_roa_tuple(self): + """ + Convert ROA prefix set to form used by rpki.POW.ROA.setPrefixes(). + """ + if self: + return tuple(a.to_POW_roa_tuple() for a in self) + else: + return None + + +class roa_prefix_set_ipv4(roa_prefix_set): + """ + Set of IPv4 ROA prefixes. + """ + + ## @var prefix_type + # Type of underlying roa_prefix. + + prefix_type = roa_prefix_ipv4 + + ## @var resource_set_type + # Type of corresponding resource_set_ip class. + + resource_set_type = resource_set_ipv4 + +# Fix back link from resource_set to roa_prefix +resource_set_ipv4.roa_prefix_set_type = roa_prefix_set_ipv4 + +class roa_prefix_set_ipv6(roa_prefix_set): + """ + Set of IPv6 ROA prefixes. + """ + + ## @var prefix_type + # Type of underlying roa_prefix. + + prefix_type = roa_prefix_ipv6 + + ## @var resource_set_type + # Type of corresponding resource_set_ip class. + + resource_set_type = resource_set_ipv6 + +# Fix back link from resource_set to roa_prefix +resource_set_ipv6.roa_prefix_set_type = roa_prefix_set_ipv6 + +class roa_prefix_bag(object): + """ + Container to simplify passing around the combination of an IPv4 ROA + prefix set and an IPv6 ROA prefix set. + """ + + ## @var v4 + # Set of IPv4 prefixes. + + ## @var v6 + # Set of IPv6 prefixes. + + def __init__(self, v4 = None, v6 = None): + self.v4 = v4 or roa_prefix_set_ipv4() + self.v6 = v6 or roa_prefix_set_ipv6() + + def __eq__(self, other): + return self.v4 == other.v4 and self.v6 == other.v6 + + def __ne__(self, other): + return not (self == other) + + +# Test suite for set operations. + +if __name__ == "__main__": + + def testprefix(v): + return " (%s)" % v.to_roa_prefix_set() if isinstance(v, resource_set_ip) else "" + + def test1(t, s1, s2): + if isinstance(s1, str) and isinstance(s2, str): + print "x: ", s1 + print "y: ", s2 + r1 = t(s1) + r2 = t(s2) + print "x: ", r1, testprefix(r1) + print "y: ", r2, testprefix(r2) + v1 = r1._comm(r2) + v2 = r2._comm(r1) + assert v1[0] == v2[1] and v1[1] == v2[0] and v1[2] == v2[2] + for i in r1: assert i in r1 and i.min in r1 and i.max in r1 + for i in r2: assert i in r2 and i.min in r2 and i.max in r2 + for i in v1[0]: assert i in r1 and i not in r2 + for i in v1[1]: assert i not in r1 and i in r2 + for i in v1[2]: assert i in r1 and i in r2 + v1 = r1 | r2 + v2 = r2 | r1 + assert v1 == v2 + print "x|y:", v1, testprefix(v1) + v1 = r1 - r2 + v2 = r2 - r1 + print "x-y:", v1, testprefix(v1) + print "y-x:", v2, testprefix(v2) + v1 = r1 ^ r2 + v2 = r2 ^ r1 + assert v1 == v2 + print "x^y:", v1, testprefix(v1) + v1 = r1 & r2 + v2 = r2 & r1 + assert v1 == v2 + print "x&y:", v1, testprefix(v1) + + def test2(t, s1, s2): + print "x: ", s1 + print "y: ", s2 + r1 = t(s1) + r2 = t(s2) + print "x: ", r1 + print "y: ", r2 + print "x>y:", (r1 > r2) + print "xy:", (r1 > r2) + print "x \ + os.stat(os.path.join(self.rpki_root_dir, self.rpki_subject_cert)).st_mtime + + def get_subject_cert(self): + filename = os.path.join(self.rpki_root_dir, self.rpki_subject_cert) + try: + x = rpki.x509.X509(Auto_file = filename) + rpki.log.debug("Read subject cert %s" % filename) + return x + except IOError: + return None + + def set_subject_cert(self, cert): + filename = os.path.join(self.rpki_root_dir, self.rpki_subject_cert) + rpki.log.debug("Writing subject cert %s, SKI %s" % (filename, cert.hSKI())) + f = open(filename, "wb") + f.write(cert.get_DER()) + f.close() + + def del_subject_cert(self): + filename = os.path.join(self.rpki_root_dir, self.rpki_subject_cert) + rpki.log.debug("Deleting subject cert %s" % filename) + os.remove(filename) + + def get_subject_pkcs10(self): + try: + x = rpki.x509.PKCS10(Auto_file = self.rpki_subject_pkcs10) + rpki.log.debug("Read subject PKCS #10 %s" % self.rpki_subject_pkcs10) + return x + except IOError: + return None + + def set_subject_pkcs10(self, pkcs10): + rpki.log.debug("Writing subject PKCS #10 %s" % self.rpki_subject_pkcs10) + f = open(self.rpki_subject_pkcs10, "wb") + f.write(pkcs10.get_DER()) + f.close() + + def del_subject_pkcs10(self): + rpki.log.debug("Deleting subject PKCS #10 %s" % self.rpki_subject_pkcs10) + try: + os.remove(self.rpki_subject_pkcs10) + except OSError: + pass + + def issue_subject_cert_maybe(self, new_pkcs10): + now = rpki.sundial.now() + subject_cert = self.get_subject_cert() + old_pkcs10 = self.get_subject_pkcs10() + if new_pkcs10 is not None and new_pkcs10 != old_pkcs10: + self.set_subject_pkcs10(new_pkcs10) + if subject_cert is not None: + rpki.log.debug("PKCS #10 changed, regenerating subject certificate") + self.revoke_subject_cert(now) + subject_cert = None + if subject_cert is not None and subject_cert.getNotAfter() <= now + self.rpki_subject_regen: + rpki.log.debug("Subject certificate has reached expiration threshold, regenerating") + self.revoke_subject_cert(now) + subject_cert = None + if subject_cert is not None and self.root_newer_than_subject(): + rpki.log.debug("Root certificate has changed, regenerating subject") + self.revoke_subject_cert(now) + subject_cert = None + self.get_root_cert() + if subject_cert is not None: + return subject_cert + pkcs10 = old_pkcs10 if new_pkcs10 is None else new_pkcs10 + if pkcs10 is None: + rpki.log.debug("No PKCS #10 request, can't generate subject certificate yet") + return None + resources = self.rpki_root_cert.get_3779resources() + notAfter = now + self.rpki_subject_lifetime + rpki.log.info("Generating subject cert %s with resources %s, expires %s" % ( + self.rpki_base_uri + self.rpki_subject_cert, resources, notAfter)) + req_key = pkcs10.getPublicKey() + req_sia = pkcs10.get_SIA() + self.next_serial_number() + subject_cert = self.rpki_root_cert.issue( + keypair = self.rpki_root_key, + subject_key = req_key, + serial = self.serial_number, + sia = req_sia, + aia = self.rpki_root_cert_uri, + crldp = self.rpki_base_uri + self.rpki_root_crl, + resources = resources, + notBefore = now, + notAfter = notAfter) + self.set_subject_cert(subject_cert) + self.generate_crl_and_manifest(now) + return subject_cert + + def generate_crl_and_manifest(self, now): + subject_cert = self.get_subject_cert() + self.next_serial_number() + self.next_crl_number() + while self.revoked and self.revoked[0][1] + 2 * self.rpki_subject_regen < now: + del self.revoked[0] + crl = rpki.x509.CRL.generate( + keypair = self.rpki_root_key, + issuer = self.rpki_root_cert, + serial = self.crl_number, + thisUpdate = now, + nextUpdate = now + self.rpki_subject_regen, + revokedCertificates = self.revoked) + rpki.log.debug("Writing CRL %s" % os.path.join(self.rpki_root_dir, self.rpki_root_crl)) + f = open(os.path.join(self.rpki_root_dir, self.rpki_root_crl), "wb") + f.write(crl.get_DER()) + f.close() + manifest_content = [(self.rpki_root_crl, crl)] + if subject_cert is not None: + manifest_content.append((self.rpki_subject_cert, subject_cert)) + manifest_resources = rpki.resource_set.resource_bag.from_inheritance() + manifest_keypair = rpki.x509.RSA.generate() + manifest_cert = self.rpki_root_cert.issue( + keypair = self.rpki_root_key, + subject_key = manifest_keypair.get_public(), + serial = self.serial_number, + sia = (None, None, self.rpki_base_uri + self.rpki_root_manifest), + aia = self.rpki_root_cert_uri, + crldp = self.rpki_base_uri + self.rpki_root_crl, + resources = manifest_resources, + notBefore = now, + notAfter = now + self.rpki_subject_lifetime, + is_ca = False) + manifest = rpki.x509.SignedManifest.build( + serial = self.crl_number, + thisUpdate = now, + nextUpdate = now + self.rpki_subject_regen, + names_and_objs = manifest_content, + keypair = manifest_keypair, + certs = manifest_cert) + rpki.log.debug("Writing manifest %s" % os.path.join(self.rpki_root_dir, self.rpki_root_manifest)) + f = open(os.path.join(self.rpki_root_dir, self.rpki_root_manifest), "wb") + f.write(manifest.get_DER()) + f.close() + + def revoke_subject_cert(self, now): + self.revoked.append((self.get_subject_cert().getSerial(), now)) + + def compose_response(self, r_msg, pkcs10 = None): + subject_cert = self.issue_subject_cert_maybe(pkcs10) + rc = rpki.up_down.class_elt() + rc.class_name = self.rpki_class_name + rc.cert_url = rpki.up_down.multi_uri(self.rpki_root_cert_uri) + rc.from_resource_bag(self.rpki_root_cert.get_3779resources()) + rc.issuer = self.rpki_root_cert + r_msg.payload.classes.append(rc) + if subject_cert is not None: + rc.certs.append(rpki.up_down.certificate_elt()) + rc.certs[0].cert_url = rpki.up_down.multi_uri(self.rpki_base_uri + self.rpki_subject_cert) + rc.certs[0].cert = subject_cert + + def up_down_handler(self, query, path, cb): + try: + q_cms = cms_msg(DER = query) + q_msg = q_cms.unwrap((self.bpki_ta, self.child_bpki_cert)) + self.cms_timestamp = q_cms.check_replay(self.cms_timestamp, path) + except (rpki.async.ExitNow, SystemExit): + raise + except Exception, e: + rpki.log.traceback() + return cb(400, reason = "Could not process PDU: %s" % e) + + def done(r_msg): + cb(200, body = cms_msg().wrap( + r_msg, self.rootd_bpki_key, self.rootd_bpki_cert, + self.rootd_bpki_crl if self.include_bpki_crl else None)) + + try: + q_msg.serve_top_level(None, done) + except (rpki.async.ExitNow, SystemExit): + raise + except Exception, e: + rpki.log.traceback() + try: + done(q_msg.serve_error(e)) + except (rpki.async.ExitNow, SystemExit): + raise + except Exception, e: + rpki.log.traceback() + cb(500, reason = "Could not process PDU: %s" % e) + + + def next_crl_number(self): + if self.crl_number is None: + try: + crl = rpki.x509.CRL(DER_file = os.path.join(self.rpki_root_dir, self.rpki_root_crl)) + self.crl_number = crl.getCRLNumber() + except: # pylint: disable=W0702 + self.crl_number = 0 + self.crl_number += 1 + return self.crl_number + + + def next_serial_number(self): + if self.serial_number is None: + subject_cert = self.get_subject_cert() + if subject_cert is not None: + self.serial_number = subject_cert.getSerial() + 1 + else: + self.serial_number = 0 + self.serial_number += 1 + return self.serial_number + + + def __init__(self): + + global rootd + rootd = self # Gross, but simpler than what we'd have to do otherwise + + self.rpki_root_cert = None + self.serial_number = None + self.crl_number = None + self.revoked = [] + self.cms_timestamp = None + + os.environ["TZ"] = "UTC" + time.tzset() + + parser = argparse.ArgumentParser(description = __doc__) + parser.add_argument("-c", "--config", + help = "override default location of configuration file") + parser.add_argument("-d", "--debug", action = "store_true", + help = "enable debugging mode") + parser.add_argument("-f", "--foreground", action = "store_true", + help = "do not daemonize") + parser.add_argument("--pidfile", + help = "override default location of pid file") + args = parser.parse_args() + + rpki.log.init("rootd", use_syslog = not args.debug) + + self.cfg = rpki.config.parser(args.config, "rootd") + self.cfg.set_global_flags() + + if not args.foreground and not args.debug: + rpki.daemonize.daemon(pidfile = args.pidfile) + + self.bpki_ta = rpki.x509.X509(Auto_update = self.cfg.get("bpki-ta")) + self.rootd_bpki_key = rpki.x509.RSA( Auto_update = self.cfg.get("rootd-bpki-key")) + self.rootd_bpki_cert = rpki.x509.X509(Auto_update = self.cfg.get("rootd-bpki-cert")) + self.rootd_bpki_crl = rpki.x509.CRL( Auto_update = self.cfg.get("rootd-bpki-crl")) + self.child_bpki_cert = rpki.x509.X509(Auto_update = self.cfg.get("child-bpki-cert")) + + self.http_server_host = self.cfg.get("server-host", "") + self.http_server_port = self.cfg.getint("server-port") + + self.rpki_class_name = self.cfg.get("rpki-class-name", "wombat") + + self.rpki_root_dir = self.cfg.get("rpki-root-dir") + self.rpki_base_uri = self.cfg.get("rpki-base-uri", "rsync://" + self.rpki_class_name + ".invalid/") + + self.rpki_root_key = rpki.x509.RSA(Auto_update = self.cfg.get("rpki-root-key")) + self.rpki_root_cert_file = self.cfg.get("rpki-root-cert") + self.rpki_root_cert_uri = self.cfg.get("rpki-root-cert-uri", self.rpki_base_uri + "root.cer") + + self.rpki_root_manifest = self.cfg.get("rpki-root-manifest", "root.mft") + self.rpki_root_crl = self.cfg.get("rpki-root-crl", "root.crl") + self.rpki_subject_cert = self.cfg.get("rpki-subject-cert", "child.cer") + self.rpki_subject_pkcs10 = self.cfg.get("rpki-subject-pkcs10", "child.pkcs10") + + self.rpki_subject_lifetime = rpki.sundial.timedelta.parse(self.cfg.get("rpki-subject-lifetime", "8w")) + self.rpki_subject_regen = rpki.sundial.timedelta.parse(self.cfg.get("rpki-subject-regen", self.rpki_subject_lifetime.convert_to_seconds() / 2)) + + self.include_bpki_crl = self.cfg.getboolean("include-bpki-crl", False) + + rpki.http.server(host = self.http_server_host, + port = self.http_server_port, + handlers = self.up_down_handler) diff --git a/rpki/rpkic.py b/rpki/rpkic.py new file mode 100644 index 00000000..d5339f5b --- /dev/null +++ b/rpki/rpkic.py @@ -0,0 +1,877 @@ +# $Id$ +# +# Copyright (C) 2014 Dragon Research Labs ("DRL") +# Portions copyright (C) 2009--2013 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notices and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND DRL AND ISC DISCLAIM ALL +# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL OR +# ISC BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL +# DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA +# OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +# TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +Command line configuration and control tool for rpkid et al. + +Type "help" at the inernal prompt, or run the program with the --help option for +an overview of the available commands; type "help foo" for (more) detailed help +on the "foo" command. +""" + +# NB: As of this writing, I'm trying really hard to avoid having this +# program depend on a Django settings.py file. This may prove to be a +# waste of time in the long run, but for for now, this means that one +# has to be careful about exactly how and when one imports Django +# modules, or anything that imports Django modules. Bottom line is +# that we don't import such modules until we need them. + +import os +import argparse +import sys +import time +import rpki.config +import rpki.sundial +import rpki.log +import rpki.http +import rpki.resource_set +import rpki.relaxng +import rpki.exceptions +import rpki.left_right +import rpki.x509 +import rpki.async +import rpki.version + +from rpki.cli import Cmd, BadCommandSyntax, parsecmd, cmdarg + +class BadPrefixSyntax(Exception): "Bad prefix syntax." +class CouldntTalkToDaemon(Exception): "Couldn't talk to daemon." +class BadXMLMessage(Exception): "Bad XML message." +class PastExpiration(Exception): "Expiration date has already passed." +class CantRunRootd(Exception): "Can't run rootd." + +module_doc = __doc__ + +class main(Cmd): + + prompt = "rpkic> " + + completedefault = Cmd.filename_complete + + # Top-level argparser, for stuff that one might want when starting + # up the interactive command loop. Not sure -i belongs here, but + # it's harmless so leave it here for the moment. + + top_argparser = argparse.ArgumentParser(add_help = False) + top_argparser.add_argument("-c", "--config", + help = "override default location of configuration file") + top_argparser.add_argument("-i", "--identity", "--handle", + help = "set initial entity handdle") + top_argparser.add_argument("--profile", + help = "enable profiling, saving data to PROFILE") + + # Argparser for non-interactive commands (no command loop). + + full_argparser = argparse.ArgumentParser(parents = [top_argparser], + description = module_doc) + argsubparsers = full_argparser.add_subparsers(title = "Commands", metavar = "") + + def __init__(self): + os.environ["TZ"] = "UTC" + time.tzset() + + # Try parsing just the arguments that make sense if we're + # going to be running an interactive command loop. If that + # parses everything, we're interactive, otherwise, it's either + # a non-interactive command or a parse error, so we let the full + # parser sort that out for us. + + args, argv = self.top_argparser.parse_known_args() + self.interactive = not argv + if not self.interactive: + args = self.full_argparser.parse_args() + + self.cfg_file = args.config + self.handle = args.identity + + if args.profile: + import cProfile + prof = cProfile.Profile() + try: + prof.runcall(self.main, args) + finally: + prof.dump_stats(args.profile) + print "Dumped profile data to %s" % args.profile + else: + self.main(args) + + def main(self, args): + rpki.log.init("rpkic", use_syslog = False) + self.read_config() + if self.interactive: + Cmd.__init__(self) + else: + args.func(self, args) + + def read_config(self): + global rpki # pylint: disable=W0602 + + try: + cfg = rpki.config.parser(self.cfg_file, "myrpki") + cfg.set_global_flags() + except IOError, e: + sys.exit("%s: %s" % (e.strerror, e.filename)) + + self.histfile = cfg.get("history_file", os.path.expanduser("~/.rpkic_history")) + self.autosync = cfg.getboolean("autosync", True, section = "rpkic") + + from django.conf import settings + + settings.configure( + DATABASES = { "default" : { + "ENGINE" : "django.db.backends.mysql", + "NAME" : cfg.get("sql-database", section = "irdbd"), + "USER" : cfg.get("sql-username", section = "irdbd"), + "PASSWORD" : cfg.get("sql-password", section = "irdbd"), + "HOST" : "", + "PORT" : "", + "OPTIONS" : { "init_command": "SET storage_engine=INNODB" }}}, + INSTALLED_APPS = ("rpki.irdb",), + ) + + import rpki.irdb # pylint: disable=W0621 + + try: + rpki.irdb.models.ca_certificate_lifetime = rpki.sundial.timedelta.parse( + cfg.get("bpki_ca_certificate_lifetime", section = "rpkic")) + except rpki.config.ConfigParser.Error: + pass + + try: + rpki.irdb.models.ee_certificate_lifetime = rpki.sundial.timedelta.parse( + cfg.get("bpki_ee_certificate_lifetime", section = "rpkic")) + except rpki.config.ConfigParser.Error: + pass + + try: + rpki.irdb.models.crl_interval = rpki.sundial.timedelta.parse( + cfg.get("bpki_crl_interval", section = "rpkic")) + except rpki.config.ConfigParser.Error: + pass + + import django.core.management + django.core.management.call_command("syncdb", verbosity = 0, load_initial_data = False) + + self.zoo = rpki.irdb.Zookeeper(cfg = cfg, handle = self.handle, logstream = sys.stdout) + + + def do_help(self, arg): + """ + List available commands with "help" or detailed help with "help cmd". + """ + + argv = arg.split() + + if not argv: + #return self.full_argparser.print_help() + return self.print_topics( + self.doc_header, + sorted(set(name[3:] for name in self.get_names() + if name.startswith("do_") + and getattr(self, name).__doc__)), + 15, 80) + + try: + return getattr(self, "help_" + argv[0])() + except AttributeError: + pass + + func = getattr(self, "do_" + argv[0], None) + + try: + return func.argparser.print_help() + except AttributeError: + pass + + try: + return self.stdout.write(func.__doc__ + "\n") + except AttributeError: + pass + + self.stdout.write((self.nohelp + "\n") % arg) + + + def irdb_handle_complete(self, manager, text, line, begidx, endidx): + return [obj.handle for obj in manager.all() if obj.handle and obj.handle.startswith(text)] + + + @parsecmd(argsubparsers, + cmdarg("handle", help = "new handle")) + def do_select_identity(self, args): + """ + Select an identity handle for use with later commands. + """ + + self.zoo.reset_identity(args.handle) + + def complete_select_identity(self, *args): + return self.irdb_handle_complete(rpki.irdb.ResourceHolderCA.objects, *args) + + + @parsecmd(argsubparsers) + def do_initialize(self, args): + """ + Initialize an RPKI installation. DEPRECATED. + + This command reads the configuration file, creates the BPKI and + EntityDB directories, generates the initial BPKI certificates, and + creates an XML file describing the resource-holding aspect of this + RPKI installation. + """ + + rootd_case = self.zoo.run_rootd and self.zoo.handle == self.zoo.cfg.get("handle") + + r = self.zoo.initialize() + r.save("%s.identity.xml" % self.zoo.handle, + None if rootd_case else sys.stdout) + + if rootd_case: + r = self.zoo.configure_rootd() + if r is not None: + r.save("%s.%s.repository-request.xml" % (self.zoo.handle, self.zoo.handle), sys.stdout) + + self.zoo.write_bpki_files() + + + @parsecmd(argsubparsers, + cmdarg("handle", help = "handle of entity to create")) + def do_create_identity(self, args): + """ + Create a new resource-holding entity. + + Returns XML file describing the new resource holder. + + This command is idempotent: calling it for a resource holder which + already exists returns the existing identity. + """ + + self.zoo.reset_identity(args.handle) + + r = self.zoo.initialize_resource_bpki() + r.save("%s.identity.xml" % self.zoo.handle, sys.stdout) + + + @parsecmd(argsubparsers) + def do_initialize_server_bpki(self, args): + """ + Initialize server BPKI portion of an RPKI installation. + + Reads server configuration from configuration file and creates the + server BPKI objects needed to start daemons. + """ + + self.zoo.initialize_server_bpki() + self.zoo.write_bpki_files() + + + @parsecmd(argsubparsers) + def do_update_bpki(self, args): + """ + Update BPKI certificates. Assumes an existing RPKI installation. + + Basic plan here is to reissue all BPKI certificates we can, right + now. In the long run we might want to be more clever about only + touching ones that need maintenance, but this will do for a start. + + We also reissue CRLs for all CAs. + + Most likely this should be run under cron. + """ + + self.zoo.update_bpki() + self.zoo.write_bpki_files() + try: + self.zoo.synchronize_bpki() + except Exception, e: + print "Couldn't push updated BPKI material into daemons: %s" % e + + + @parsecmd(argsubparsers, + cmdarg("--child_handle", help = "override default handle for new child"), + cmdarg("--valid_until", help = "override default validity interval"), + cmdarg("child_xml", help = "XML file containing child's identity")) + def do_configure_child(self, args): + """ + Configure a new child of this RPKI entity. + + This command extracts the child's data from an XML input file, + cross-certifies the child's resource-holding BPKI certificate, and + generates an XML output file describing the relationship between + the child and this parent, including this parent's BPKI data and + up-down protocol service URI. + """ + + r, child_handle = self.zoo.configure_child(args.child_xml, args.child_handle, args.valid_until) + r.save("%s.%s.parent-response.xml" % (self.zoo.handle, child_handle), sys.stdout) + self.zoo.synchronize_ca() + + + @parsecmd(argsubparsers, + cmdarg("child_handle", help = "handle of child to delete")) + def do_delete_child(self, args): + """ + Delete a child of this RPKI entity. + """ + + try: + self.zoo.delete_child(args.child_handle) + self.zoo.synchronize_ca() + except rpki.irdb.ResourceHolderCA.DoesNotExist: + print "No such resource holder \"%s\"" % self.zoo.handle + except rpki.irdb.Child.DoesNotExist: + print "No such child \"%s\"" % args.child_handle + + def complete_delete_child(self, *args): + return self.irdb_handle_complete(self.zoo.resource_ca.children, *args) + + + @parsecmd(argsubparsers, + cmdarg("--parent_handle", help = "override default handle for new parent"), + cmdarg("parent_xml", help = "XML file containing parent's response")) + def do_configure_parent(self, args): + """ + Configure a new parent of this RPKI entity. + + This command reads the parent's response XML, extracts the + parent's BPKI and service URI information, cross-certifies the + parent's BPKI data into this entity's BPKI, and checks for offers + or referrals of publication service. If a publication offer or + referral is present, we generate a request-for-service message to + that repository, in case the user wants to avail herself of the + referral or offer. + + We do NOT attempt automatic synchronization with rpkid at the + completion of this command, because synchronization at this point + will usually fail due to the repository not being set up yet. If + you know what you are doing and for some reason really want to + synchronize here, run the synchronize command yourself. + """ + + r, parent_handle = self.zoo.configure_parent(args.parent_xml, args.parent_handle) + r.save("%s.%s.repository-request.xml" % (self.zoo.handle, parent_handle), sys.stdout) + + + @parsecmd(argsubparsers, + cmdarg("parent_handle", help = "handle of parent to delete")) + def do_delete_parent(self, args): + """ + Delete a parent of this RPKI entity. + """ + + try: + self.zoo.delete_parent(args.parent_handle) + self.zoo.synchronize_ca() + except rpki.irdb.ResourceHolderCA.DoesNotExist: + print "No such resource holder \"%s\"" % self.zoo.handle + except rpki.irdb.Parent.DoesNotExist: + print "No such parent \"%s\"" % args.parent_handle + + def complete_delete_parent(self, *args): + return self.irdb_handle_complete(self.zoo.resource_ca.parents, *args) + + + @parsecmd(argsubparsers) + def do_configure_root(self, args): + """ + Configure the current resource holding identity as a root. + + This configures rpkid to talk to rootd as (one of) its parent(s). + Returns repository request XML file like configure_parent does. + """ + + r = self.zoo.configure_rootd() + if r is not None: + r.save("%s.%s.repository-request.xml" % (self.zoo.handle, self.zoo.handle), sys.stdout) + self.zoo.write_bpki_files() + + + @parsecmd(argsubparsers) + def do_delete_root(self, args): + """ + Delete local RPKI root as parent of the current entity. + + This tells the current rpkid identity () to stop talking to + rootd. + """ + + try: + self.zoo.delete_rootd() + self.zoo.synchronize_ca() + except rpki.irdb.ResourceHolderCA.DoesNotExist: + print "No such resource holder \"%s\"" % self.zoo.handle + except rpki.irdb.Rootd.DoesNotExist: + print "No associated rootd" + + + @parsecmd(argsubparsers, + cmdarg("--flat", help = "use flat publication scheme", action = "store_true"), + cmdarg("--sia_base", help = "override SIA base value"), + cmdarg("client_xml", help = "XML file containing client request")) + def do_configure_publication_client(self, args): + """ + Configure publication server to know about a new client. + + This command reads the client's request for service, + cross-certifies the client's BPKI data, and generates a response + message containing the repository's BPKI data and service URI. + """ + + r, client_handle = self.zoo.configure_publication_client(args.client_xml, args.sia_base, args.flat) + r.save("%s.repository-response.xml" % client_handle.replace("/", "."), sys.stdout) + try: + self.zoo.synchronize_pubd() + except rpki.irdb.Repository.DoesNotExist: + pass + + + @parsecmd(argsubparsers, + cmdarg("client_handle", help = "handle of client to delete")) + def do_delete_publication_client(self, args): + """ + Delete a publication client of this RPKI entity. + """ + + try: + self.zoo.delete_publication_client(args.client_handle) + self.zoo.synchronize_pubd() + except rpki.irdb.ResourceHolderCA.DoesNotExist: + print "No such resource holder \"%s\"" % self.zoo.handle + except rpki.irdb.Client.DoesNotExist: + print "No such client \"%s\"" % args.client_handle + + def complete_delete_publication_client(self, *args): + return self.irdb_handle_complete(self.zoo.server_ca.clients, *args) + + + @parsecmd(argsubparsers, + cmdarg("--parent_handle", help = "override default parent handle"), + cmdarg("repository_xml", help = "XML file containing repository response")) + def do_configure_repository(self, args): + """ + Configure a publication repository for this RPKI entity. + + This command reads the repository's response to this entity's + request for publication service, extracts and cross-certifies the + BPKI data and service URI, and links the repository data with the + corresponding parent data in our local database. + """ + + self.zoo.configure_repository(args.repository_xml, args.parent_handle) + self.zoo.synchronize_ca() + + + @parsecmd(argsubparsers, + cmdarg("repository_handle", help = "handle of repository to delete")) + def do_delete_repository(self, args): + """ + Delete a repository of this RPKI entity. + """ + + try: + self.zoo.delete_repository(args.repository_handle) + self.zoo.synchronize_ca() + except rpki.irdb.ResourceHolderCA.DoesNotExist: + print "No such resource holder \"%s\"" % self.zoo.handle + except rpki.irdb.Repository.DoesNotExist: + print "No such repository \"%s\"" % args.repository_handle + + def complete_delete_repository(self, *args): + return self.irdb_handle_complete(self.zoo.resource_ca.repositories, *args) + + + @parsecmd(argsubparsers) + def do_delete_identity(self, args): + """ + Delete the current RPKI identity (rpkid object). + """ + + try: + self.zoo.delete_self() + self.zoo.synchronize_deleted_ca() + except rpki.irdb.ResourceHolderCA.DoesNotExist: + print "No such resource holder \"%s\"" % self.zoo.handle + + + @parsecmd(argsubparsers, + cmdarg("--valid_until", help = "override default new validity interval"), + cmdarg("child_handle", help = "handle of child to renew")) + def do_renew_child(self, args): + """ + Update validity period for one child entity. + """ + + self.zoo.renew_children(args.child_handle, args.valid_until) + self.zoo.synchronize_ca() + if self.autosync: + self.zoo.run_rpkid_now() + + def complete_renew_child(self, *args): + return self.irdb_handle_complete(self.zoo.resource_ca.children, *args) + + + @parsecmd(argsubparsers, + cmdarg("--valid_until", help = "override default new validity interval")) + def do_renew_all_children(self, args): + """ + Update validity period for all child entities. + """ + + self.zoo.renew_children(None, args.valid_until) + self.zoo.synchronize_ca() + if self.autosync: + self.zoo.run_rpkid_now() + + + @parsecmd(argsubparsers, + cmdarg("prefixes_csv", help = "CSV file listing prefixes")) + def do_load_prefixes(self, args): + """ + Load prefixes into IRDB from CSV file. + """ + + self.zoo.load_prefixes(args.prefixes_csv, True) + if self.autosync: + self.zoo.run_rpkid_now() + + + @parsecmd(argsubparsers) + def do_show_child_resources(self, args): + """ + Show resources assigned to children. + """ + + for child in self.zoo.resource_ca.children.all(): + resources = child.resource_bag + print "Child:", child.handle + if resources.asn: + print " ASN:", resources.asn + if resources.v4: + print " IPv4:", resources.v4 + if resources.v6: + print " IPv6:", resources.v6 + + + @parsecmd(argsubparsers) + def do_show_roa_requests(self, args): + """ + Show ROA requests. + """ + + for roa_request in self.zoo.resource_ca.roa_requests.all(): + prefixes = roa_request.roa_prefix_bag + print "ASN: ", roa_request.asn + if prefixes.v4: + print " IPv4:", prefixes.v4 + if prefixes.v6: + print " IPv6:", prefixes.v6 + + + @parsecmd(argsubparsers) + def do_show_ghostbuster_requests(self, args): + """ + Show Ghostbuster requests. + """ + + for ghostbuster_request in self.zoo.resource_ca.ghostbuster_requests.all(): + print "Parent:", ghostbuster_request.parent or "*" + print ghostbuster_request.vcard + + + @parsecmd(argsubparsers) + def do_show_received_resources(self, args): + """ + Show resources received by this entity from its parent(s). + """ + + for pdu in self.zoo.call_rpkid( + rpki.left_right.list_received_resources_elt.make_pdu(self_handle = self.zoo.handle)): + + print "Parent: ", pdu.parent_handle + print " notBefore:", pdu.notBefore + print " notAfter: ", pdu.notAfter + print " URI: ", pdu.uri + print " SIA URI: ", pdu.sia_uri + print " AIA URI: ", pdu.aia_uri + print " ASN: ", pdu.asn + print " IPv4: ", pdu.ipv4 + print " IPv6: ", pdu.ipv6 + + + @parsecmd(argsubparsers) + def do_show_published_objects(self, args): + """ + Show published objects. + """ + + for pdu in self.zoo.call_rpkid( + rpki.left_right.list_published_objects_elt.make_pdu(self_handle = self.zoo.handle)): + + track = rpki.x509.uri_dispatch(pdu.uri)(Base64 = pdu.obj).tracking_data(pdu.uri) + child = pdu.child_handle + + if child is None: + print track + else: + print track, child + + + @parsecmd(argsubparsers) + def do_show_bpki(self, args): + """ + Show this entity's BPKI objects. + """ + + print "Self: ", self.zoo.resource_ca.handle + print " notBefore:", self.zoo.resource_ca.certificate.getNotBefore() + print " notAfter: ", self.zoo.resource_ca.certificate.getNotAfter() + print " Subject: ", self.zoo.resource_ca.certificate.getSubject() + print " SKI: ", self.zoo.resource_ca.certificate.hSKI() + for bsc in self.zoo.resource_ca.bscs.all(): + print "BSC: ", bsc.handle + print " notBefore:", bsc.certificate.getNotBefore() + print " notAfter: ", bsc.certificate.getNotAfter() + print " Subject: ", bsc.certificate.getSubject() + print " SKI: ", bsc.certificate.hSKI() + for parent in self.zoo.resource_ca.parents.all(): + print "Parent: ", parent.handle + print " notBefore:", parent.certificate.getNotBefore() + print " notAfter: ", parent.certificate.getNotAfter() + print " Subject: ", parent.certificate.getSubject() + print " SKI: ", parent.certificate.hSKI() + print " URL: ", parent.service_uri + for child in self.zoo.resource_ca.children.all(): + print "Child: ", child.handle + print " notBefore:", child.certificate.getNotBefore() + print " notAfter: ", child.certificate.getNotAfter() + print " Subject: ", child.certificate.getSubject() + print " SKI: ", child.certificate.hSKI() + for repository in self.zoo.resource_ca.repositories.all(): + print "Repository: ", repository.handle + print " notBefore:", repository.certificate.getNotBefore() + print " notAfter: ", repository.certificate.getNotAfter() + print " Subject: ", repository.certificate.getSubject() + print " SKI: ", repository.certificate.hSKI() + print " URL: ", repository.service_uri + + + @parsecmd(argsubparsers, + cmdarg("asns_csv", help = "CSV file listing ASNs")) + def do_load_asns(self, args): + """ + Load ASNs into IRDB from CSV file. + """ + + self.zoo.load_asns(args.asns_csv, True) + if self.autosync: + self.zoo.run_rpkid_now() + + + @parsecmd(argsubparsers, + cmdarg("roa_requests_csv", help = "CSV file listing ROA requests")) + def do_load_roa_requests(self, args): + """ + Load ROA requests into IRDB from CSV file. + """ + + self.zoo.load_roa_requests(args.roa_requests_csv) + if self.autosync: + self.zoo.run_rpkid_now() + + + @parsecmd(argsubparsers, + cmdarg("ghostbuster_requests", help = "file listing Ghostbuster requests as a sequence of VCards")) + def do_load_ghostbuster_requests(self, args): + """ + Load Ghostbuster requests into IRDB from file. + """ + + self.zoo.load_ghostbuster_requests(args.ghostbuster_requests) + if self.autosync: + self.zoo.run_rpkid_now() + + + @parsecmd(argsubparsers, + cmdarg("--valid_until", help = "override default validity interval"), + cmdarg("router_certificate_request_xml", help = "file containing XML router certificate request")) + def do_add_router_certificate_request(self, args): + """ + Load router certificate request(s) into IRDB from XML file. + """ + + self.zoo.add_router_certificate_request(args.router_certificate_request_xml, args.valid_until) + if self.autosync: + self.zoo.run_rpkid_now() + + @parsecmd(argsubparsers, + cmdarg("gski", help = "g(SKI) of router certificate request to delete")) + def do_delete_router_certificate_request(self, args): + """ + Delete a router certificate request from the IRDB. + """ + + try: + self.zoo.delete_router_certificate_request(args.gski) + if self.autosync: + self.zoo.run_rpkid_now() + except rpki.irdb.ResourceHolderCA.DoesNotExist: + print "No such resource holder \"%s\"" % self.zoo.handle + except rpki.irdb.EECertificateRequest.DoesNotExist: + print "No certificate request matching g(SKI) \"%s\"" % args.gski + + def complete_delete_router_certificate_request(self, text, line, begidx, endidx): + return [obj.gski for obj in self.zoo.resource_ca.ee_certificate_requests.all() + if obj.gski and obj.gski.startswith(text)] + + + @parsecmd(argsubparsers) + def do_show_router_certificate_requests(self, args): + """ + Show this entity's router certificate requests. + """ + + for req in self.zoo.resource_ca.ee_certificate_requests.all(): + print "%s %s %s %s" % (req.gski, req.valid_until, req.cn, req.sn) + + + # What about updates? Validity interval, change router-id, change + # ASNs. Not sure what this looks like yet, blunder ahead with the + # core code while mulling over the UI. + + + @parsecmd(argsubparsers) + def do_synchronize(self, args): + """ + Whack daemons to match IRDB. + + This command may be replaced by implicit synchronization embedded + in of other commands, haven't decided yet. + """ + + self.zoo.synchronize() + + + @parsecmd(argsubparsers) + def do_force_publication(self, args): + """ + Whack rpkid to force (re)publication of everything. + + This is not usually necessary, as rpkid automatically publishes + changes it makes, but this command can be useful occasionally when + a fault or configuration error has left rpkid holding data which + it has not been able to publish. + """ + + self.zoo.publish_world_now() + + + @parsecmd(argsubparsers) + def do_force_reissue(self, args): + """ + Whack rpkid to force reissuance of everything. + + This is not usually necessary, as rpkid reissues automatically + objects automatically as needed, but this command can be useful + occasionally when a fault or configuration error has prevented + rpkid from reissuing when it should have. + """ + + self.zoo.reissue() + + + @parsecmd(argsubparsers) + def do_up_down_rekey(self, args): + """ + Initiate a "rekey" operation. + + This tells rpkid to generate new keys for each certificate issued + to it via the up-down protocol. + + Rekeying is the first stage of a key rollover operation. You will + need to follow it up later with a "revoke" operation to clean up + the old keys + """ + + self.zoo.rekey() + + + @parsecmd(argsubparsers) + def do_up_down_revoke(self, args): + """ + Initiate a "revoke" operation. + + This tells rpkid to clean up old keys formerly used by + certificates issued to it via the up-down protocol. + + This is the cleanup stage of a key rollover operation. + """ + + self.zoo.revoke() + + + @parsecmd(argsubparsers) + def do_revoke_forgotten(self, args): + """ + Initiate a "revoke_forgotten" operation. + + This tells rpkid to ask its parent to revoke certificates for + which rpkid does not know the private keys. + + This should never happen during ordinary operation, but can happen + if rpkid is misconfigured or its database has been damaged, so we + need a way to resynchronize rpkid with its parent in such cases. + We could do this automatically, but as we don't know the precise + cause of the failure we don't know if it's recoverable locally + (eg, from an SQL backup), so we require a manual trigger before + discarding possibly-useful certificates. + """ + + self.zoo.revoke_forgotten() + + + @parsecmd(argsubparsers) + def do_clear_all_sql_cms_replay_protection(self, args): + """ + Tell rpkid and pubd to clear replay protection. + + This clears the replay protection timestamps stored in SQL for all + entities known to rpkid and pubd. This is a fairly blunt + instrument, but as we don't expect this to be necessary except in + the case of gross misconfiguration, it should suffice + """ + + self.zoo.clear_all_sql_cms_replay_protection() + + + @parsecmd(argsubparsers) + def do_version(self, args): + """ + Show current software version number. + """ + + print rpki.version.VERSION + + + @parsecmd(argsubparsers) + def do_list_self_handles(self, args): + """ + List all handles in this rpkid instance. + """ + + for ca in rpki.irdb.ResourceHolderCA.objects.all(): + print ca.handle + diff --git a/rpki/rpkid.py b/rpki/rpkid.py new file mode 100644 index 00000000..d6163bee --- /dev/null +++ b/rpki/rpkid.py @@ -0,0 +1,2500 @@ +# $Id$ +# +# Copyright (C) 2013--2014 Dragon Research Labs ("DRL") +# Portions copyright (C) 2009--2012 Internet Systems Consortium ("ISC") +# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notices and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND ARIN DISCLAIM ALL +# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL, +# ISC, OR ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, +# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION +# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +""" +RPKI CA engine. +""" + +import os +import time +import argparse +import sys +import re +import random +import base64 +import rpki.resource_set +import rpki.up_down +import rpki.left_right +import rpki.x509 +import rpki.sql +import rpki.http +import rpki.config +import rpki.exceptions +import rpki.relaxng +import rpki.log +import rpki.async +import rpki.daemonize +import rpki.rpkid_tasks + +class main(object): + """ + Main program for rpkid. + """ + + def __init__(self): + + os.environ["TZ"] = "UTC" + time.tzset() + + self.irdbd_cms_timestamp = None + self.irbe_cms_timestamp = None + self.task_current = None + self.task_queue = [] + + parser = argparse.ArgumentParser(description = __doc__) + parser.add_argument("-c", "--config", + help = "override default location of configuration file") + parser.add_argument("-d", "--debug", action = "store_true", + help = "enable debugging mode") + parser.add_argument("-f", "--foreground", action = "store_true", + help = "do not daemonize") + parser.add_argument("--pidfile", + help = "override default location of pid file") + parser.add_argument("--profile", + help = "enable profiling, saving data to PROFILE") + args = parser.parse_args() + + self.profile = args.profile + + rpki.log.init("rpkid", use_syslog = not args.debug) + + self.cfg = rpki.config.parser(args.config, "rpkid") + self.cfg.set_global_flags() + + if not args.foreground and not args.debug: + rpki.daemonize.daemon(pidfile = args.pidfile) + + if self.profile: + import cProfile + prof = cProfile.Profile() + try: + prof.runcall(self.main) + finally: + prof.dump_stats(self.profile) + rpki.log.info("Dumped profile data to %s" % self.profile) + else: + self.main() + + def main(self): + + startup_msg = self.cfg.get("startup-message", "") + if startup_msg: + rpki.log.info(startup_msg) + + if self.profile: + rpki.log.info("Running in profile mode with output to %s" % self.profile) + + self.sql = rpki.sql.session(self.cfg) + + self.bpki_ta = rpki.x509.X509(Auto_update = self.cfg.get("bpki-ta")) + self.irdb_cert = rpki.x509.X509(Auto_update = self.cfg.get("irdb-cert")) + self.irbe_cert = rpki.x509.X509(Auto_update = self.cfg.get("irbe-cert")) + self.rpkid_cert = rpki.x509.X509(Auto_update = self.cfg.get("rpkid-cert")) + self.rpkid_key = rpki.x509.RSA( Auto_update = self.cfg.get("rpkid-key")) + + self.irdb_url = self.cfg.get("irdb-url") + + self.http_server_host = self.cfg.get("server-host", "") + self.http_server_port = self.cfg.getint("server-port") + + self.publication_kludge_base = self.cfg.get("publication-kludge-base", "publication/") + + # Icky hack to let Iain do some testing quickly, should go away + # once we sort out whether we can make this change permanent. + # + # OK, the stuff to add router certificate support makes enough + # other changes that we're going to need a migration program in + # any case, so might as well throw the switch here too, or at + # least find out if it (still) works as expected. + + self.merge_publication_directories = self.cfg.getboolean("merge_publication_directories", + True) + + self.use_internal_cron = self.cfg.getboolean("use-internal-cron", True) + + self.initial_delay = random.randint(self.cfg.getint("initial-delay-min", 10), + self.cfg.getint("initial-delay-max", 120)) + + # Should be much longer in production + self.cron_period = rpki.sundial.timedelta(seconds = self.cfg.getint("cron-period", 120)) + self.cron_keepalive = rpki.sundial.timedelta(seconds = self.cfg.getint("cron-keepalive", 0)) + if not self.cron_keepalive: + self.cron_keepalive = self.cron_period * 4 + self.cron_timeout = None + + self.start_cron() + + rpki.http.server( + host = self.http_server_host, + port = self.http_server_port, + handlers = (("/left-right", self.left_right_handler), + ("/up-down/", self.up_down_handler), + ("/cronjob", self.cronjob_handler))) + + + def start_cron(self): + """ + Start clock for rpkid's internal cron process. + """ + + if self.use_internal_cron: + self.cron_timer = rpki.async.timer(handler = self.cron) + when = rpki.sundial.now() + rpki.sundial.timedelta(seconds = self.initial_delay) + rpki.log.debug("Scheduling initial cron pass at %s" % when) + self.cron_timer.set(when) + else: + rpki.log.debug("Not using internal clock, start_cron() call ignored") + + def irdb_query(self, callback, errback, *q_pdus, **kwargs): + """ + Perform an IRDB callback query. + """ + + rpki.log.trace() + + try: + q_types = tuple(type(q_pdu) for q_pdu in q_pdus) + + expected_pdu_count = kwargs.pop("expected_pdu_count", None) + assert len(kwargs) == 0 + + q_msg = rpki.left_right.msg.query() + q_msg.extend(q_pdus) + q_der = rpki.left_right.cms_msg().wrap(q_msg, self.rpkid_key, self.rpkid_cert) + + def unwrap(r_der): + try: + r_cms = rpki.left_right.cms_msg(DER = r_der) + r_msg = r_cms.unwrap((self.bpki_ta, self.irdb_cert)) + self.irdbd_cms_timestamp = r_cms.check_replay(self.irdbd_cms_timestamp, self.irdb_url) + if not r_msg.is_reply() or not all(type(r_pdu) in q_types for r_pdu in r_msg): + raise rpki.exceptions.BadIRDBReply( + "Unexpected response to IRDB query: %s" % r_cms.pretty_print_content()) + if expected_pdu_count is not None and len(r_msg) != expected_pdu_count: + assert isinstance(expected_pdu_count, (int, long)) + raise rpki.exceptions.BadIRDBReply( + "Expected exactly %d PDU%s from IRDB: %s" % ( + expected_pdu_count, "" if expected_pdu_count == 1 else "s", + r_cms.pretty_print_content())) + callback(r_msg) + except Exception, e: + errback(e) + + rpki.http.client( + url = self.irdb_url, + msg = q_der, + callback = unwrap, + errback = errback) + + except Exception, e: + errback(e) + + + def irdb_query_child_resources(self, self_handle, child_handle, callback, errback): + """ + Ask IRDB about a child's resources. + """ + + rpki.log.trace() + + q_pdu = rpki.left_right.list_resources_elt() + q_pdu.self_handle = self_handle + q_pdu.child_handle = child_handle + + def done(r_msg): + callback(rpki.resource_set.resource_bag( + asn = r_msg[0].asn, + v4 = r_msg[0].ipv4, + v6 = r_msg[0].ipv6, + valid_until = r_msg[0].valid_until)) + + self.irdb_query(done, errback, q_pdu, expected_pdu_count = 1) + + def irdb_query_roa_requests(self, self_handle, callback, errback): + """ + Ask IRDB about self's ROA requests. + """ + + rpki.log.trace() + + q_pdu = rpki.left_right.list_roa_requests_elt() + q_pdu.self_handle = self_handle + + self.irdb_query(callback, errback, q_pdu) + + def irdb_query_ghostbuster_requests(self, self_handle, parent_handles, callback, errback): + """ + Ask IRDB about self's ghostbuster record requests. + """ + + rpki.log.trace() + + q_pdus = [] + + for parent_handle in parent_handles: + q_pdu = rpki.left_right.list_ghostbuster_requests_elt() + q_pdu.self_handle = self_handle + q_pdu.parent_handle = parent_handle + q_pdus.append(q_pdu) + + self.irdb_query(callback, errback, *q_pdus) + + def irdb_query_ee_certificate_requests(self, self_handle, callback, errback): + """ + Ask IRDB about self's EE certificate requests. + """ + + rpki.log.trace() + + q_pdu = rpki.left_right.list_ee_certificate_requests_elt() + q_pdu.self_handle = self_handle + + self.irdb_query(callback, errback, q_pdu) + + def left_right_handler(self, query, path, cb): + """ + Process one left-right PDU. + """ + + rpki.log.trace() + + def done(r_msg): + reply = rpki.left_right.cms_msg().wrap(r_msg, self.rpkid_key, self.rpkid_cert) + self.sql.sweep() + cb(200, body = reply) + + try: + q_cms = rpki.left_right.cms_msg(DER = query) + q_msg = q_cms.unwrap((self.bpki_ta, self.irbe_cert)) + self.irbe_cms_timestamp = q_cms.check_replay(self.irbe_cms_timestamp, path) + if not q_msg.is_query(): + raise rpki.exceptions.BadQuery, "Message type is not query" + q_msg.serve_top_level(self, done) + except (rpki.async.ExitNow, SystemExit): + raise + except Exception, e: + rpki.log.traceback() + cb(500, reason = "Unhandled exception %s: %s" % (e.__class__.__name__, e)) + + up_down_url_regexp = re.compile("/up-down/([-A-Z0-9_]+)/([-A-Z0-9_]+)$", re.I) + + def up_down_handler(self, query, path, cb): + """ + Process one up-down PDU. + """ + + rpki.log.trace() + + def done(reply): + self.sql.sweep() + cb(200, body = reply) + + try: + match = self.up_down_url_regexp.search(path) + if match is None: + raise rpki.exceptions.BadContactURL, "Bad URL path received in up_down_handler(): %s" % path + self_handle, child_handle = match.groups() + child = rpki.left_right.child_elt.sql_fetch_where1(self, "self.self_handle = %s AND child.child_handle = %s AND child.self_id = self.self_id", + (self_handle, child_handle), "self") + if child is None: + raise rpki.exceptions.ChildNotFound, "Could not find child %s of self %s in up_down_handler()" % (child_handle, self_handle) + child.serve_up_down(query, done) + except (rpki.async.ExitNow, SystemExit): + raise + except (rpki.exceptions.ChildNotFound, rpki.exceptions.BadContactURL), e: + rpki.log.warn(str(e)) + cb(400, reason = str(e)) + except Exception, e: + rpki.log.traceback() + cb(400, reason = "Could not process PDU: %s" % e) + + def checkpoint(self, force = False): + """ + Record that we were still alive when we got here, by resetting + keepalive timer. + """ + if force or self.cron_timeout is not None: + self.cron_timeout = rpki.sundial.now() + self.cron_keepalive + + def task_add(self, task): + """ + Add a task to the scheduler task queue, unless it's already queued. + """ + if task not in self.task_queue: + rpki.log.debug("Adding %r to task queue" % task) + self.task_queue.append(task) + return True + else: + rpki.log.debug("Task %r was already in the task queue" % task) + return False + + def task_next(self): + """ + Pull next task from the task queue and put it the deferred event + queue (we don't want to run it directly, as that could eventually + blow out our call stack). + """ + try: + self.task_current = self.task_queue.pop(0) + except IndexError: + self.task_current = None + else: + rpki.async.event_defer(self.task_current) + + def task_run(self): + """ + Run first task on the task queue, unless one is running already. + """ + if self.task_current is None: + self.task_next() + + def cron(self, cb = None): + """ + Periodic tasks. + """ + + rpki.log.trace() + + now = rpki.sundial.now() + + rpki.log.debug("Starting cron run") + + def done(): + self.sql.sweep() + self.cron_timeout = None + rpki.log.info("Finished cron run started at %s" % now) + if cb is not None: + cb() + + completion = rpki.rpkid_tasks.CompletionHandler(done) + try: + selves = rpki.left_right.self_elt.sql_fetch_all(self) + except Exception, e: + rpki.log.warn("Error pulling self_elts from SQL, maybe SQL server is down? (%s)" % e) + else: + for s in selves: + s.schedule_cron_tasks(completion) + nothing_queued = completion.count == 0 + + assert self.use_internal_cron or self.cron_timeout is None + + if self.cron_timeout is not None and self.cron_timeout < now: + rpki.log.warn("cron keepalive threshold %s has expired, breaking lock" % self.cron_timeout) + self.cron_timeout = None + + if self.use_internal_cron: + when = now + self.cron_period + rpki.log.debug("Scheduling next cron run at %s" % when) + self.cron_timer.set(when) + + if self.cron_timeout is None: + self.checkpoint(self.use_internal_cron) + self.task_run() + + elif self.use_internal_cron: + rpki.log.warn("cron already running, keepalive will expire at %s" % self.cron_timeout) + + if nothing_queued: + done() + + def cronjob_handler(self, query, path, cb): + """ + External trigger for periodic tasks. This is somewhat obsolete + now that we have internal timers, but the test framework still + uses it. + """ + + def done(): + cb(200, body = "OK") + + if self.use_internal_cron: + cb(500, reason = "Running cron internally") + else: + rpki.log.debug("Starting externally triggered cron") + self.cron(done) + +class ca_obj(rpki.sql.sql_persistent): + """ + Internal CA object. + """ + + sql_template = rpki.sql.template( + "ca", + "ca_id", + "last_crl_sn", + ("next_crl_update", rpki.sundial.datetime), + "last_issued_sn", + "last_manifest_sn", + ("next_manifest_update", rpki.sundial.datetime), + "sia_uri", + "parent_id", + "parent_resource_class") + + last_crl_sn = 0 + last_issued_sn = 0 + last_manifest_sn = 0 + + def __repr__(self): + return rpki.log.log_repr(self, repr(self.parent), self.parent_resource_class) + + @property + @rpki.sql.cache_reference + def parent(self): + """ + Fetch parent object to which this CA object links. + """ + return rpki.left_right.parent_elt.sql_fetch(self.gctx, self.parent_id) + + @property + def ca_details(self): + """ + Fetch all ca_detail objects that link to this CA object. + """ + return ca_detail_obj.sql_fetch_where(self.gctx, "ca_id = %s", (self.ca_id,)) + + @property + def pending_ca_details(self): + """ + Fetch the pending ca_details for this CA, if any. + """ + return ca_detail_obj.sql_fetch_where(self.gctx, "ca_id = %s AND state = 'pending'", (self.ca_id,)) + + @property + def active_ca_detail(self): + """ + Fetch the active ca_detail for this CA, if any. + """ + return ca_detail_obj.sql_fetch_where1(self.gctx, "ca_id = %s AND state = 'active'", (self.ca_id,)) + + @property + def deprecated_ca_details(self): + """ + Fetch deprecated ca_details for this CA, if any. + """ + return ca_detail_obj.sql_fetch_where(self.gctx, "ca_id = %s AND state = 'deprecated'", (self.ca_id,)) + + @property + def active_or_deprecated_ca_details(self): + """ + Fetch active and deprecated ca_details for this CA, if any. + """ + return ca_detail_obj.sql_fetch_where(self.gctx, "ca_id = %s AND (state = 'active' OR state = 'deprecated')", (self.ca_id,)) + + @property + def revoked_ca_details(self): + """ + Fetch revoked ca_details for this CA, if any. + """ + return ca_detail_obj.sql_fetch_where(self.gctx, "ca_id = %s AND state = 'revoked'", (self.ca_id,)) + + @property + def issue_response_candidate_ca_details(self): + """ + Fetch ca_details which are candidates for consideration when + processing an up-down issue_response PDU. + """ + #return ca_detail_obj.sql_fetch_where(self.gctx, "ca_id = %s AND latest_ca_cert IS NOT NULL AND state != 'revoked'", (self.ca_id,)) + return ca_detail_obj.sql_fetch_where(self.gctx, "ca_id = %s AND state != 'revoked'", (self.ca_id,)) + + def construct_sia_uri(self, parent, rc): + """ + Construct the sia_uri value for this CA given configured + information and the parent's up-down protocol list_response PDU. + """ + + sia_uri = rc.suggested_sia_head and rc.suggested_sia_head.rsync() + if not sia_uri or not sia_uri.startswith(parent.sia_base): + sia_uri = parent.sia_base + if not sia_uri.endswith("/"): + raise rpki.exceptions.BadURISyntax, "SIA URI must end with a slash: %s" % sia_uri + # With luck this can go away sometime soon. + if self.gctx.merge_publication_directories: + return sia_uri + else: + return sia_uri + str(self.ca_id) + "/" + + def check_for_updates(self, parent, rc, cb, eb): + """ + Parent has signaled continued existance of a resource class we + already knew about, so we need to check for an updated + certificate, changes in resource coverage, revocation and reissue + with the same key, etc. + """ + + sia_uri = self.construct_sia_uri(parent, rc) + sia_uri_changed = self.sia_uri != sia_uri + if sia_uri_changed: + rpki.log.debug("SIA changed: was %s now %s" % (self.sia_uri, sia_uri)) + self.sia_uri = sia_uri + self.sql_mark_dirty() + + rc_resources = rc.to_resource_bag() + cert_map = dict((c.cert.get_SKI(), c) for c in rc.certs) + + def loop(iterator, ca_detail): + + self.gctx.checkpoint() + + rc_cert = cert_map.pop(ca_detail.public_key.get_SKI(), None) + + if rc_cert is None: + + rpki.log.warn("SKI %s in resource class %s is in database but missing from list_response to %s from %s, maybe parent certificate went away?" + % (ca_detail.public_key.gSKI(), rc.class_name, parent.self.self_handle, parent.parent_handle)) + publisher = publication_queue() + ca_detail.delete(ca = ca_detail.ca, publisher = publisher) + return publisher.call_pubd(iterator, eb) + + else: + + if ca_detail.state == "active" and ca_detail.ca_cert_uri != rc_cert.cert_url.rsync(): + rpki.log.debug("AIA changed: was %s now %s" % (ca_detail.ca_cert_uri, rc_cert.cert_url.rsync())) + ca_detail.ca_cert_uri = rc_cert.cert_url.rsync() + ca_detail.sql_mark_dirty() + + if ca_detail.state in ("pending", "active"): + + if ca_detail.state == "pending": + current_resources = rpki.resource_set.resource_bag() + else: + current_resources = ca_detail.latest_ca_cert.get_3779resources() + + if (ca_detail.state == "pending" or + sia_uri_changed or + ca_detail.latest_ca_cert != rc_cert.cert or + ca_detail.latest_ca_cert.getNotAfter() != rc_resources.valid_until or + current_resources.undersized(rc_resources) or + current_resources.oversized(rc_resources)): + return ca_detail.update( + parent = parent, + ca = self, + rc = rc, + sia_uri_changed = sia_uri_changed, + old_resources = current_resources, + callback = iterator, + errback = eb) + + iterator() + + def done(): + if cert_map: + rpki.log.warn("Unknown certificate SKI%s %s in resource class %s in list_response " + "to %s from %s, maybe you want to \"revoke_forgotten\"?" + % ("" if len(cert_map) == 1 else "s", + ", ".join(c.cert.gSKI() for c in cert_map.values()), + rc.class_name, parent.self.self_handle, parent.parent_handle)) + self.gctx.sql.sweep() + self.gctx.checkpoint() + cb() + + ca_details = self.issue_response_candidate_ca_details + + if True: + skis_parent = set(x.cert.gSKI() + for x in cert_map.itervalues()) + skis_me = set(x.latest_ca_cert.gSKI() + for x in ca_details + if x.latest_ca_cert is not None) + for ski in skis_parent & skis_me: + rpki.log.debug("Parent %s agrees that %s has SKI %s in resource class %s" + % (parent.parent_handle, parent.self.self_handle, ski, rc.class_name)) + for ski in skis_parent - skis_me: + rpki.log.debug("Parent %s thinks %s has SKI %s in resource class %s but I don't think so" + % (parent.parent_handle, parent.self.self_handle, ski, rc.class_name)) + for ski in skis_me - skis_parent: + rpki.log.debug("I think %s has SKI %s in resource class %s but parent %s doesn't think so" + % (parent.self.self_handle, ski, rc.class_name, parent.parent_handle)) + + if ca_details: + rpki.async.iterator(ca_details, loop, done) + else: + rpki.log.warn("Existing resource class %s to %s from %s with no certificates, rekeying" % + (rc.class_name, parent.self.self_handle, parent.parent_handle)) + self.gctx.checkpoint() + self.rekey(cb, eb) + + @classmethod + def create(cls, parent, rc, cb, eb): + """ + Parent has signaled existance of a new resource class, so we need + to create and set up a corresponding CA object. + """ + + self = cls() + self.gctx = parent.gctx + self.parent_id = parent.parent_id + self.parent_resource_class = rc.class_name + self.sql_store() + try: + self.sia_uri = self.construct_sia_uri(parent, rc) + except rpki.exceptions.BadURISyntax: + self.sql_delete() + raise + ca_detail = ca_detail_obj.create(self) + + def done(issue_response): + c = issue_response.payload.classes[0].certs[0] + rpki.log.debug("CA %r received certificate %s" % (self, c.cert_url)) + ca_detail.activate( + ca = self, + cert = c.cert, + uri = c.cert_url, + callback = cb, + errback = eb) + + rpki.log.debug("Sending issue request to %r from %r" % (parent, self.create)) + rpki.up_down.issue_pdu.query(parent, self, ca_detail, done, eb) + + def delete(self, parent, callback): + """ + The list of current resource classes received from parent does not + include the class corresponding to this CA, so we need to delete + it (and its little dog too...). + + All certs published by this CA are now invalid, so need to + withdraw them, the CRL, and the manifest from the repository, + delete all child_cert and ca_detail records associated with this + CA, then finally delete this CA itself. + """ + + def lose(e): + rpki.log.traceback() + rpki.log.warn("Could not delete CA %r, skipping: %s" % (self, e)) + callback() + + def done(): + rpki.log.debug("Deleting %r" % self) + self.sql_delete() + callback() + + publisher = publication_queue() + for ca_detail in self.ca_details: + ca_detail.delete(ca = self, publisher = publisher, allow_failure = True) + publisher.call_pubd(done, lose) + + def next_serial_number(self): + """ + Allocate a certificate serial number. + """ + self.last_issued_sn += 1 + self.sql_mark_dirty() + return self.last_issued_sn + + def next_manifest_number(self): + """ + Allocate a manifest serial number. + """ + self.last_manifest_sn += 1 + self.sql_mark_dirty() + return self.last_manifest_sn + + def next_crl_number(self): + """ + Allocate a CRL serial number. + """ + self.last_crl_sn += 1 + self.sql_mark_dirty() + return self.last_crl_sn + + def rekey(self, cb, eb): + """ + Initiate a rekey operation for this ca. Generate a new keypair. + Request cert from parent using new keypair. Mark result as our + active ca_detail. Reissue all child certs issued by this ca using + the new ca_detail. + """ + + rpki.log.trace() + + parent = self.parent + old_detail = self.active_ca_detail + new_detail = ca_detail_obj.create(self) + + def done(issue_response): + c = issue_response.payload.classes[0].certs[0] + rpki.log.debug("CA %r received certificate %s" % (self, c.cert_url)) + new_detail.activate( + ca = self, + cert = c.cert, + uri = c.cert_url, + predecessor = old_detail, + callback = cb, + errback = eb) + + rpki.log.debug("Sending issue request to %r from %r" % (parent, self.rekey)) + rpki.up_down.issue_pdu.query(parent, self, new_detail, done, eb) + + def revoke(self, cb, eb, revoke_all = False): + """ + Revoke deprecated ca_detail objects associated with this CA, or + all ca_details associated with this CA if revoke_all is set. + """ + + rpki.log.trace() + + def loop(iterator, ca_detail): + ca_detail.revoke(cb = iterator, eb = eb) + + ca_details = self.ca_details if revoke_all else self.deprecated_ca_details + + rpki.async.iterator(ca_details, loop, cb) + + def reissue(self, cb, eb): + """ + Reissue all current certificates issued by this CA. + """ + + ca_detail = self.active_ca_detail + if ca_detail: + ca_detail.reissue(cb, eb) + else: + cb() + +class ca_detail_obj(rpki.sql.sql_persistent): + """ + Internal CA detail object. + """ + + sql_template = rpki.sql.template( + "ca_detail", + "ca_detail_id", + ("private_key_id", rpki.x509.RSA), + ("public_key", rpki.x509.PublicKey), + ("latest_ca_cert", rpki.x509.X509), + ("manifest_private_key_id", rpki.x509.RSA), + ("manifest_public_key", rpki.x509.PublicKey), + ("latest_manifest_cert", rpki.x509.X509), + ("latest_manifest", rpki.x509.SignedManifest), + ("latest_crl", rpki.x509.CRL), + ("crl_published", rpki.sundial.datetime), + ("manifest_published", rpki.sundial.datetime), + "state", + "ca_cert_uri", + "ca_id") + + crl_published = None + manifest_published = None + latest_ca_cert = None + latest_crl = None + latest_manifest = None + ca_cert_uri = None + + def __repr__(self): + return rpki.log.log_repr(self, repr(self.ca), self.state, self.ca_cert_uri) + + def sql_decode(self, vals): + """ + Extra assertions for SQL decode of a ca_detail_obj. + """ + rpki.sql.sql_persistent.sql_decode(self, vals) + assert self.public_key is None or self.private_key_id is None or self.public_key.get_DER() == self.private_key_id.get_public_DER() + assert self.manifest_public_key is None or self.manifest_private_key_id is None or self.manifest_public_key.get_DER() == self.manifest_private_key_id.get_public_DER() + + @property + @rpki.sql.cache_reference + def ca(self): + """ + Fetch CA object to which this ca_detail links. + """ + return ca_obj.sql_fetch(self.gctx, self.ca_id) + + def fetch_child_certs(self, child = None, ski = None, unique = False, unpublished = None): + """ + Fetch all child_cert objects that link to this ca_detail. + """ + return rpki.rpkid.child_cert_obj.fetch(self.gctx, child, self, ski, unique, unpublished) + + @property + def child_certs(self): + """ + Fetch all child_cert objects that link to this ca_detail. + """ + return self.fetch_child_certs() + + def unpublished_child_certs(self, when): + """ + Fetch all unpublished child_cert objects linked to this ca_detail + with attempted publication dates older than when. + """ + return self.fetch_child_certs(unpublished = when) + + @property + def revoked_certs(self): + """ + Fetch all revoked_cert objects that link to this ca_detail. + """ + return revoked_cert_obj.sql_fetch_where(self.gctx, "ca_detail_id = %s", (self.ca_detail_id,)) + + @property + def roas(self): + """ + Fetch all ROA objects that link to this ca_detail. + """ + return rpki.rpkid.roa_obj.sql_fetch_where(self.gctx, "ca_detail_id = %s", (self.ca_detail_id,)) + + def unpublished_roas(self, when): + """ + Fetch all unpublished ROA objects linked to this ca_detail with + attempted publication dates older than when. + """ + return rpki.rpkid.roa_obj.sql_fetch_where(self.gctx, "ca_detail_id = %s AND published IS NOT NULL and published < %s", (self.ca_detail_id, when)) + + @property + def ghostbusters(self): + """ + Fetch all Ghostbuster objects that link to this ca_detail. + """ + return rpki.rpkid.ghostbuster_obj.sql_fetch_where(self.gctx, "ca_detail_id = %s", (self.ca_detail_id,)) + + @property + def ee_certificates(self): + """ + Fetch all EE certificate objects that link to this ca_detail. + """ + return rpki.rpkid.ee_cert_obj.sql_fetch_where(self.gctx, "ca_detail_id = %s", (self.ca_detail_id,)) + + def unpublished_ghostbusters(self, when): + """ + Fetch all unpublished Ghostbusters objects linked to this + ca_detail with attempted publication dates older than when. + """ + return rpki.rpkid.ghostbuster_obj.sql_fetch_where(self.gctx, "ca_detail_id = %s AND published IS NOT NULL and published < %s", (self.ca_detail_id, when)) + + @property + def crl_uri(self): + """ + Return publication URI for this ca_detail's CRL. + """ + return self.ca.sia_uri + self.crl_uri_tail + + @property + def crl_uri_tail(self): + """ + Return tail (filename portion) of publication URI for this ca_detail's CRL. + """ + return self.public_key.gSKI() + ".crl" + + @property + def manifest_uri(self): + """ + Return publication URI for this ca_detail's manifest. + """ + return self.ca.sia_uri + self.public_key.gSKI() + ".mft" + + def has_expired(self): + """ + Return whether this ca_detail's certificate has expired. + """ + return self.latest_ca_cert.getNotAfter() <= rpki.sundial.now() + + def covers(self, target): + """ + Test whether this ca-detail covers a given set of resources. + """ + + assert not target.asn.inherit and not target.v4.inherit and not target.v6.inherit + me = self.latest_ca_cert.get_3779resources() + return target.asn <= me.asn and target.v4 <= me.v4 and target.v6 <= me.v6 + + def activate(self, ca, cert, uri, callback, errback, predecessor = None): + """ + Activate this ca_detail. + """ + + publisher = publication_queue() + + self.latest_ca_cert = cert + self.ca_cert_uri = uri.rsync() + self.generate_manifest_cert() + self.state = "active" + self.generate_crl(publisher = publisher) + self.generate_manifest(publisher = publisher) + self.sql_store() + + if predecessor is not None: + predecessor.state = "deprecated" + predecessor.sql_store() + for child_cert in predecessor.child_certs: + child_cert.reissue(ca_detail = self, publisher = publisher) + for roa in predecessor.roas: + roa.regenerate(publisher = publisher) + for ghostbuster in predecessor.ghostbusters: + ghostbuster.regenerate(publisher = publisher) + predecessor.generate_crl(publisher = publisher) + predecessor.generate_manifest(publisher = publisher) + + publisher.call_pubd(callback, errback) + + def delete(self, ca, publisher, allow_failure = False): + """ + Delete this ca_detail and all of the certs it issued. + + If allow_failure is true, we clean up as much as we can but don't + raise an exception. + """ + + repository = ca.parent.repository + handler = False if allow_failure else None + for child_cert in self.child_certs: + publisher.withdraw(cls = rpki.publication.certificate_elt, + uri = child_cert.uri, + obj = child_cert.cert, + repository = repository, + handler = handler) + child_cert.sql_mark_deleted() + for roa in self.roas: + roa.revoke(publisher = publisher, allow_failure = allow_failure, fast = True) + for ghostbuster in self.ghostbusters: + ghostbuster.revoke(publisher = publisher, allow_failure = allow_failure, fast = True) + try: + latest_manifest = self.latest_manifest + except AttributeError: + latest_manifest = None + if latest_manifest is not None: + publisher.withdraw(cls = rpki.publication.manifest_elt, + uri = self.manifest_uri, + obj = self.latest_manifest, + repository = repository, + handler = handler) + try: + latest_crl = self.latest_crl + except AttributeError: + latest_crl = None + if latest_crl is not None: + publisher.withdraw(cls = rpki.publication.crl_elt, + uri = self.crl_uri, + obj = self.latest_crl, + repository = repository, + handler = handler) + self.gctx.sql.sweep() + for cert in self.revoked_certs: # + self.child_certs + rpki.log.debug("Deleting %r" % cert) + cert.sql_delete() + rpki.log.debug("Deleting %r" % self) + self.sql_delete() + + def revoke(self, cb, eb): + """ + Request revocation of all certificates whose SKI matches the key + for this ca_detail. + + Tasks: + + - Request revocation of old keypair by parent. + + - Revoke all child certs issued by the old keypair. + + - Generate a final CRL, signed with the old keypair, listing all + the revoked certs, with a next CRL time after the last cert or + CRL signed by the old keypair will have expired. + + - Generate a corresponding final manifest. + + - Destroy old keypairs. + + - Leave final CRL and manifest in place until their nextupdate + time has passed. + """ + + ca = self.ca + parent = ca.parent + + def parent_revoked(r_msg): + + if r_msg.payload.ski != self.latest_ca_cert.gSKI(): + raise rpki.exceptions.SKIMismatch + + rpki.log.debug("Parent revoked %s, starting cleanup" % self.latest_ca_cert.gSKI()) + + crl_interval = rpki.sundial.timedelta(seconds = parent.self.crl_interval) + + nextUpdate = rpki.sundial.now() + + if self.latest_manifest is not None: + self.latest_manifest.extract_if_needed() + nextUpdate = nextUpdate.later(self.latest_manifest.getNextUpdate()) + + if self.latest_crl is not None: + nextUpdate = nextUpdate.later(self.latest_crl.getNextUpdate()) + + publisher = publication_queue() + + for child_cert in self.child_certs: + nextUpdate = nextUpdate.later(child_cert.cert.getNotAfter()) + child_cert.revoke(publisher = publisher) + + for roa in self.roas: + nextUpdate = nextUpdate.later(roa.cert.getNotAfter()) + roa.revoke(publisher = publisher) + + for ghostbuster in self.ghostbusters: + nextUpdate = nextUpdate.later(ghostbuster.cert.getNotAfter()) + ghostbuster.revoke(publisher = publisher) + + nextUpdate += crl_interval + self.generate_crl(publisher = publisher, nextUpdate = nextUpdate) + self.generate_manifest(publisher = publisher, nextUpdate = nextUpdate) + self.private_key_id = None + self.manifest_private_key_id = None + self.manifest_public_key = None + self.latest_manifest_cert = None + self.state = "revoked" + self.sql_mark_dirty() + publisher.call_pubd(cb, eb) + + rpki.log.debug("Asking parent to revoke CA certificate %s" % self.latest_ca_cert.gSKI()) + rpki.up_down.revoke_pdu.query(ca, self.latest_ca_cert.gSKI(), parent_revoked, eb) + + def update(self, parent, ca, rc, sia_uri_changed, old_resources, callback, errback): + """ + Need to get a new certificate for this ca_detail and perhaps frob + children of this ca_detail. + """ + + def issued(issue_response): + c = issue_response.payload.classes[0].certs[0] + rpki.log.debug("CA %r received certificate %s" % (self, c.cert_url)) + + if self.state == "pending": + return self.activate( + ca = ca, + cert = c.cert, + uri = c.cert_url, + callback = callback, + errback = errback) + + validity_changed = self.latest_ca_cert is None or self.latest_ca_cert.getNotAfter() != c.cert.getNotAfter() + + publisher = publication_queue() + + if self.latest_ca_cert != c.cert: + self.latest_ca_cert = c.cert + self.sql_mark_dirty() + self.generate_manifest_cert() + self.generate_crl(publisher = publisher) + self.generate_manifest(publisher = publisher) + + new_resources = self.latest_ca_cert.get_3779resources() + + if sia_uri_changed or old_resources.oversized(new_resources): + for child_cert in self.child_certs: + child_resources = child_cert.cert.get_3779resources() + if sia_uri_changed or child_resources.oversized(new_resources): + child_cert.reissue( + ca_detail = self, + resources = child_resources & new_resources, + publisher = publisher) + + if sia_uri_changed or validity_changed or old_resources.oversized(new_resources): + for roa in self.roas: + roa.update(publisher = publisher, fast = True) + + if sia_uri_changed or validity_changed: + for ghostbuster in self.ghostbusters: + ghostbuster.update(publisher = publisher, fast = True) + + publisher.call_pubd(callback, errback) + + rpki.log.debug("Sending issue request to %r from %r" % (parent, self.update)) + rpki.up_down.issue_pdu.query(parent, ca, self, issued, errback) + + @classmethod + def create(cls, ca): + """ + Create a new ca_detail object for a specified CA. + """ + self = cls() + self.gctx = ca.gctx + self.ca_id = ca.ca_id + self.state = "pending" + + self.private_key_id = rpki.x509.RSA.generate() + self.public_key = self.private_key_id.get_public() + + self.manifest_private_key_id = rpki.x509.RSA.generate() + self.manifest_public_key = self.manifest_private_key_id.get_public() + + self.sql_store() + return self + + def issue_ee(self, ca, resources, subject_key, sia, + cn = None, sn = None, notAfter = None, eku = None): + """ + Issue a new EE certificate. + """ + + if notAfter is None: + notAfter = self.latest_ca_cert.getNotAfter() + + return self.latest_ca_cert.issue( + keypair = self.private_key_id, + subject_key = subject_key, + serial = ca.next_serial_number(), + sia = sia, + aia = self.ca_cert_uri, + crldp = self.crl_uri, + resources = resources, + notAfter = notAfter, + is_ca = False, + cn = cn, + sn = sn, + eku = eku) + + def generate_manifest_cert(self): + """ + Generate a new manifest certificate for this ca_detail. + """ + + resources = rpki.resource_set.resource_bag.from_inheritance() + self.latest_manifest_cert = self.issue_ee( + ca = self.ca, + resources = resources, + subject_key = self.manifest_public_key, + sia = (None, None, self.manifest_uri)) + + def issue(self, ca, child, subject_key, sia, resources, publisher, child_cert = None): + """ + Issue a new certificate to a child. Optional child_cert argument + specifies an existing child_cert object to update in place; if not + specified, we create a new one. Returns the child_cert object + containing the newly issued cert. + """ + + self.check_failed_publication(publisher) + + assert child_cert is None or child_cert.child_id == child.child_id + + cert = self.latest_ca_cert.issue( + keypair = self.private_key_id, + subject_key = subject_key, + serial = ca.next_serial_number(), + aia = self.ca_cert_uri, + crldp = self.crl_uri, + sia = sia, + resources = resources, + notAfter = resources.valid_until) + + if child_cert is None: + child_cert = rpki.rpkid.child_cert_obj( + gctx = child.gctx, + child_id = child.child_id, + ca_detail_id = self.ca_detail_id, + cert = cert) + rpki.log.debug("Created new child_cert %r" % child_cert) + else: + child_cert.cert = cert + del child_cert.ca_detail + child_cert.ca_detail_id = self.ca_detail_id + rpki.log.debug("Reusing existing child_cert %r" % child_cert) + + child_cert.ski = cert.get_SKI() + child_cert.published = rpki.sundial.now() + child_cert.sql_store() + publisher.publish( + cls = rpki.publication.certificate_elt, + uri = child_cert.uri, + obj = child_cert.cert, + repository = ca.parent.repository, + handler = child_cert.published_callback) + self.generate_manifest(publisher = publisher) + return child_cert + + def generate_crl(self, publisher, nextUpdate = None): + """ + Generate a new CRL for this ca_detail. At the moment this is + unconditional, that is, it is up to the caller to decide whether a + new CRL is needed. + """ + + self.check_failed_publication(publisher) + + ca = self.ca + parent = ca.parent + crl_interval = rpki.sundial.timedelta(seconds = parent.self.crl_interval) + now = rpki.sundial.now() + + if nextUpdate is None: + nextUpdate = now + crl_interval + + certlist = [] + for revoked_cert in self.revoked_certs: + if now > revoked_cert.expires + crl_interval: + revoked_cert.sql_delete() + else: + certlist.append((revoked_cert.serial, revoked_cert.revoked)) + certlist.sort() + + self.latest_crl = rpki.x509.CRL.generate( + keypair = self.private_key_id, + issuer = self.latest_ca_cert, + serial = ca.next_crl_number(), + thisUpdate = now, + nextUpdate = nextUpdate, + revokedCertificates = certlist) + + self.crl_published = rpki.sundial.now() + self.sql_mark_dirty() + publisher.publish( + cls = rpki.publication.crl_elt, + uri = self.crl_uri, + obj = self.latest_crl, + repository = parent.repository, + handler = self.crl_published_callback) + + def crl_published_callback(self, pdu): + """ + Check result of CRL publication. + """ + pdu.raise_if_error() + self.crl_published = None + self.sql_mark_dirty() + + def generate_manifest(self, publisher, nextUpdate = None): + """ + Generate a new manifest for this ca_detail. + """ + + self.check_failed_publication(publisher) + + ca = self.ca + parent = ca.parent + crl_interval = rpki.sundial.timedelta(seconds = parent.self.crl_interval) + now = rpki.sundial.now() + uri = self.manifest_uri + + if nextUpdate is None: + nextUpdate = now + crl_interval + + if self.latest_manifest_cert is None or self.latest_manifest_cert.getNotAfter() < nextUpdate: + rpki.log.debug("Generating EE certificate for %s" % uri) + self.generate_manifest_cert() + rpki.log.debug("Latest CA cert notAfter %s, new %s EE notAfter %s" % ( + self.latest_ca_cert.getNotAfter(), uri, self.latest_manifest_cert.getNotAfter())) + + rpki.log.debug("Constructing manifest object list for %s" % uri) + objs = [(self.crl_uri_tail, self.latest_crl)] + objs.extend((c.uri_tail, c.cert) for c in self.child_certs) + objs.extend((r.uri_tail, r.roa) for r in self.roas if r.roa is not None) + objs.extend((g.uri_tail, g.ghostbuster) for g in self.ghostbusters) + objs.extend((e.uri_tail, e.cert) for e in self.ee_certificates) + + rpki.log.debug("Building manifest object %s" % uri) + self.latest_manifest = rpki.x509.SignedManifest.build( + serial = ca.next_manifest_number(), + thisUpdate = now, + nextUpdate = nextUpdate, + names_and_objs = objs, + keypair = self.manifest_private_key_id, + certs = self.latest_manifest_cert) + + rpki.log.debug("Manifest generation took %s" % (rpki.sundial.now() - now)) + + self.manifest_published = rpki.sundial.now() + self.sql_mark_dirty() + publisher.publish(cls = rpki.publication.manifest_elt, + uri = uri, + obj = self.latest_manifest, + repository = parent.repository, + handler = self.manifest_published_callback) + + def manifest_published_callback(self, pdu): + """ + Check result of manifest publication. + """ + pdu.raise_if_error() + self.manifest_published = None + self.sql_mark_dirty() + + def reissue(self, cb, eb): + """ + Reissue all current certificates issued by this ca_detail. + """ + + publisher = publication_queue() + self.check_failed_publication(publisher) + for roa in self.roas: + roa.regenerate(publisher, fast = True) + for ghostbuster in self.ghostbusters: + ghostbuster.regenerate(publisher, fast = True) + for ee_certificate in self.ee_certificates: + ee_certificate.reissue(publisher, force = True) + for child_cert in self.child_certs: + child_cert.reissue(self, publisher, force = True) + self.gctx.sql.sweep() + self.generate_manifest_cert() + self.sql_mark_dirty() + self.generate_crl(publisher = publisher) + self.generate_manifest(publisher = publisher) + self.gctx.sql.sweep() + publisher.call_pubd(cb, eb) + + def check_failed_publication(self, publisher, check_all = True): + """ + Check for failed publication of objects issued by this ca_detail. + + All publishable objects have timestamp fields recording time of + last attempted publication, and callback methods which clear these + timestamps once publication has succeeded. Our task here is to + look for objects issued by this ca_detail which have timestamps + set (indicating that they have not been published) and for which + the timestamps are not very recent (for some definition of very + recent -- intent is to allow a bit of slack in case pubd is just + being slow). In such cases, we want to retry publication. + + As an optimization, we can probably skip checking other products + if manifest and CRL have been published, thus saving ourselves + several complex SQL queries. Not sure yet whether this + optimization is worthwhile. + + For the moment we check everything without optimization, because + it simplifies testing. + + For the moment our definition of staleness is hardwired; this + should become configurable. + """ + + rpki.log.debug("Checking for failed publication for %r" % self) + + stale = rpki.sundial.now() - rpki.sundial.timedelta(seconds = 60) + repository = self.ca.parent.repository + + if self.latest_crl is not None and \ + self.crl_published is not None and \ + self.crl_published < stale: + rpki.log.debug("Retrying publication for %s" % self.crl_uri) + publisher.publish(cls = rpki.publication.crl_elt, + uri = self.crl_uri, + obj = self.latest_crl, + repository = repository, + handler = self.crl_published_callback) + + if self.latest_manifest is not None and \ + self.manifest_published is not None and \ + self.manifest_published < stale: + rpki.log.debug("Retrying publication for %s" % self.manifest_uri) + publisher.publish(cls = rpki.publication.manifest_elt, + uri = self.manifest_uri, + obj = self.latest_manifest, + repository = repository, + handler = self.manifest_published_callback) + + if not check_all: + return + + # Might also be able to return here if manifest and CRL are up to + # date, but let's avoid premature optimization + + for child_cert in self.unpublished_child_certs(stale): + rpki.log.debug("Retrying publication for %s" % child_cert) + publisher.publish( + cls = rpki.publication.certificate_elt, + uri = child_cert.uri, + obj = child_cert.cert, + repository = repository, + handler = child_cert.published_callback) + + for roa in self.unpublished_roas(stale): + rpki.log.debug("Retrying publication for %s" % roa) + publisher.publish( + cls = rpki.publication.roa_elt, + uri = roa.uri, + obj = roa.roa, + repository = repository, + handler = roa.published_callback) + + for ghostbuster in self.unpublished_ghostbusters(stale): + rpki.log.debug("Retrying publication for %s" % ghostbuster) + publisher.publish( + cls = rpki.publication.ghostbuster_elt, + uri = ghostbuster.uri, + obj = ghostbuster.ghostbuster, + repository = repository, + handler = ghostbuster.published_callback) + +class child_cert_obj(rpki.sql.sql_persistent): + """ + Certificate that has been issued to a child. + """ + + sql_template = rpki.sql.template( + "child_cert", + "child_cert_id", + ("cert", rpki.x509.X509), + "child_id", + "ca_detail_id", + "ski", + ("published", rpki.sundial.datetime)) + + def __repr__(self): + args = [self] + try: + args.append(self.uri) + except: + pass + return rpki.log.log_repr(*args) + + def __init__(self, gctx = None, child_id = None, ca_detail_id = None, cert = None): + """ + Initialize a child_cert_obj. + """ + rpki.sql.sql_persistent.__init__(self) + self.gctx = gctx + self.child_id = child_id + self.ca_detail_id = ca_detail_id + self.cert = cert + self.published = None + if child_id or ca_detail_id or cert: + self.sql_mark_dirty() + + @property + @rpki.sql.cache_reference + def child(self): + """ + Fetch child object to which this child_cert object links. + """ + return rpki.left_right.child_elt.sql_fetch(self.gctx, self.child_id) + + @property + @rpki.sql.cache_reference + def ca_detail(self): + """ + Fetch ca_detail object to which this child_cert object links. + """ + return ca_detail_obj.sql_fetch(self.gctx, self.ca_detail_id) + + @ca_detail.deleter + def ca_detail(self): + try: + del self._ca_detail + except AttributeError: + pass + + @property + def uri_tail(self): + """ + Return the tail (filename) portion of the URI for this child_cert. + """ + return self.cert.gSKI() + ".cer" + + @property + def uri(self): + """ + Return the publication URI for this child_cert. + """ + return self.ca_detail.ca.sia_uri + self.uri_tail + + def revoke(self, publisher, generate_crl_and_manifest = True): + """ + Revoke a child cert. + """ + + ca_detail = self.ca_detail + ca = ca_detail.ca + rpki.log.debug("Revoking %r %r" % (self, self.uri)) + revoked_cert_obj.revoke(cert = self.cert, ca_detail = ca_detail) + publisher.withdraw( + cls = rpki.publication.certificate_elt, + uri = self.uri, + obj = self.cert, + repository = ca.parent.repository) + self.gctx.sql.sweep() + self.sql_delete() + if generate_crl_and_manifest: + ca_detail.generate_crl(publisher = publisher) + ca_detail.generate_manifest(publisher = publisher) + + def reissue(self, ca_detail, publisher, resources = None, sia = None, force = False): + """ + Reissue an existing child cert, reusing the public key. If the + child cert we would generate is identical to the one we already + have, we just return the one we already have. If we have to + revoke the old child cert when generating the new one, we have to + generate a new child_cert_obj, so calling code that needs the + updated child_cert_obj must use the return value from this method. + """ + + ca = ca_detail.ca + child = self.child + + old_resources = self.cert.get_3779resources() + old_sia = self.cert.get_SIA() + old_aia = self.cert.get_AIA()[0] + old_ca_detail = self.ca_detail + + needed = False + + if resources is None: + resources = old_resources + + if sia is None: + sia = old_sia + + assert resources.valid_until is not None and old_resources.valid_until is not None + + if resources.asn != old_resources.asn or resources.v4 != old_resources.v4 or resources.v6 != old_resources.v6: + rpki.log.debug("Resources changed for %r: old %s new %s" % (self, old_resources, resources)) + needed = True + + if resources.valid_until != old_resources.valid_until: + rpki.log.debug("Validity changed for %r: old %s new %s" % ( + self, old_resources.valid_until, resources.valid_until)) + needed = True + + if sia != old_sia: + rpki.log.debug("SIA changed for %r: old %r new %r" % (self, old_sia, sia)) + needed = True + + if ca_detail != old_ca_detail: + rpki.log.debug("Issuer changed for %r: old %r new %r" % (self, old_ca_detail, ca_detail)) + needed = True + + if ca_detail.ca_cert_uri != old_aia: + rpki.log.debug("AIA changed for %r: old %r new %r" % (self, old_aia, ca_detail.ca_cert_uri)) + needed = True + + must_revoke = old_resources.oversized(resources) or old_resources.valid_until > resources.valid_until + if must_revoke: + rpki.log.debug("Must revoke any existing cert(s) for %r" % self) + needed = True + + if not needed and force: + rpki.log.debug("No change needed for %r, forcing reissuance anyway" % self) + needed = True + + if not needed: + rpki.log.debug("No change to %r" % self) + return self + + if must_revoke: + for x in child.fetch_child_certs(ca_detail = ca_detail, ski = self.ski): + rpki.log.debug("Revoking child_cert %r" % x) + x.revoke(publisher = publisher) + ca_detail.generate_crl(publisher = publisher) + ca_detail.generate_manifest(publisher = publisher) + + child_cert = ca_detail.issue( + ca = ca, + child = child, + subject_key = self.cert.getPublicKey(), + sia = sia, + resources = resources, + child_cert = None if must_revoke else self, + publisher = publisher) + + rpki.log.debug("New child_cert %r uri %s" % (child_cert, child_cert.uri)) + + return child_cert + + @classmethod + def fetch(cls, gctx = None, child = None, ca_detail = None, ski = None, unique = False, unpublished = None): + """ + Fetch all child_cert objects matching a particular set of + parameters. This is a wrapper to consolidate various queries that + would otherwise be inline SQL WHERE expressions. In most cases + code calls this indirectly, through methods in other classes. + """ + + args = [] + where = [] + + if child: + where.append("child_id = %s") + args.append(child.child_id) + + if ca_detail: + where.append("ca_detail_id = %s") + args.append(ca_detail.ca_detail_id) + + if ski: + where.append("ski = %s") + args.append(ski) + + if unpublished is not None: + where.append("published IS NOT NULL AND published < %s") + args.append(unpublished) + + where = " AND ".join(where) + + gctx = gctx or (child and child.gctx) or (ca_detail and ca_detail.gctx) or None + + if unique: + return cls.sql_fetch_where1(gctx, where, args) + else: + return cls.sql_fetch_where(gctx, where, args) + + def published_callback(self, pdu): + """ + Publication callback: check result and mark published. + """ + pdu.raise_if_error() + self.published = None + self.sql_mark_dirty() + +class revoked_cert_obj(rpki.sql.sql_persistent): + """ + Tombstone for a revoked certificate. + """ + + sql_template = rpki.sql.template( + "revoked_cert", + "revoked_cert_id", + "serial", + "ca_detail_id", + ("revoked", rpki.sundial.datetime), + ("expires", rpki.sundial.datetime)) + + def __repr__(self): + return rpki.log.log_repr(self, repr(self.ca_detail), self.serial, self.revoked) + + def __init__(self, gctx = None, serial = None, revoked = None, expires = None, ca_detail_id = None): + """ + Initialize a revoked_cert_obj. + """ + rpki.sql.sql_persistent.__init__(self) + self.gctx = gctx + self.serial = serial + self.revoked = revoked + self.expires = expires + self.ca_detail_id = ca_detail_id + if serial or revoked or expires or ca_detail_id: + self.sql_mark_dirty() + + @property + @rpki.sql.cache_reference + def ca_detail(self): + """ + Fetch ca_detail object to which this revoked_cert_obj links. + """ + return ca_detail_obj.sql_fetch(self.gctx, self.ca_detail_id) + + @classmethod + def revoke(cls, cert, ca_detail): + """ + Revoke a certificate. + """ + return cls( + serial = cert.getSerial(), + expires = cert.getNotAfter(), + revoked = rpki.sundial.now(), + gctx = ca_detail.gctx, + ca_detail_id = ca_detail.ca_detail_id) + +class roa_obj(rpki.sql.sql_persistent): + """ + Route Origin Authorization. + """ + + sql_template = rpki.sql.template( + "roa", + "roa_id", + "ca_detail_id", + "self_id", + "asn", + ("roa", rpki.x509.ROA), + ("cert", rpki.x509.X509), + ("published", rpki.sundial.datetime)) + + ca_detail_id = None + cert = None + roa = None + published = None + + @property + @rpki.sql.cache_reference + def self(self): + """ + Fetch self object to which this roa_obj links. + """ + return rpki.left_right.self_elt.sql_fetch(self.gctx, self.self_id) + + @property + @rpki.sql.cache_reference + def ca_detail(self): + """ + Fetch ca_detail object to which this roa_obj links. + """ + return rpki.rpkid.ca_detail_obj.sql_fetch(self.gctx, self.ca_detail_id) + + @ca_detail.deleter + def ca_detail(self): + try: + del self._ca_detail + except AttributeError: + pass + + def sql_fetch_hook(self): + """ + Extra SQL fetch actions for roa_obj -- handle prefix lists. + """ + for version, datatype, attribute in ((4, rpki.resource_set.roa_prefix_set_ipv4, "ipv4"), + (6, rpki.resource_set.roa_prefix_set_ipv6, "ipv6")): + setattr(self, attribute, datatype.from_sql( + self.gctx.sql, + """ + SELECT prefix, prefixlen, max_prefixlen FROM roa_prefix + WHERE roa_id = %s AND version = %s + """, + (self.roa_id, version))) + + def sql_insert_hook(self): + """ + Extra SQL insert actions for roa_obj -- handle prefix lists. + """ + for version, prefix_set in ((4, self.ipv4), (6, self.ipv6)): + if prefix_set: + self.gctx.sql.executemany( + """ + INSERT roa_prefix (roa_id, prefix, prefixlen, max_prefixlen, version) + VALUES (%s, %s, %s, %s, %s) + """, + ((self.roa_id, x.prefix, x.prefixlen, x.max_prefixlen, version) + for x in prefix_set)) + + def sql_delete_hook(self): + """ + Extra SQL delete actions for roa_obj -- handle prefix lists. + """ + self.gctx.sql.execute("DELETE FROM roa_prefix WHERE roa_id = %s", (self.roa_id,)) + + def __repr__(self): + args = [self, self.asn, self.ipv4, self.ipv6] + try: + args.append(self.uri) + except: + pass + return rpki.log.log_repr(*args) + + def __init__(self, gctx = None, self_id = None, asn = None, ipv4 = None, ipv6 = None): + rpki.sql.sql_persistent.__init__(self) + self.gctx = gctx + self.self_id = self_id + self.asn = asn + self.ipv4 = ipv4 + self.ipv6 = ipv6 + + # Defer marking new ROA as dirty until .generate() has a chance to + # finish setup, otherwise we get SQL consistency errors. + # + #if self_id or asn or ipv4 or ipv6: self.sql_mark_dirty() + + def update(self, publisher, fast = False): + """ + Bring this roa_obj's ROA up to date if necesssary. + """ + + v4 = self.ipv4.to_resource_set() if self.ipv4 is not None else rpki.resource_set.resource_set_ipv4() + v6 = self.ipv6.to_resource_set() if self.ipv6 is not None else rpki.resource_set.resource_set_ipv6() + + if self.roa is None: + rpki.log.debug("%r doesn't exist, generating" % self) + return self.generate(publisher = publisher, fast = fast) + + ca_detail = self.ca_detail + + if ca_detail is None: + rpki.log.debug("%r has no associated ca_detail, generating" % self) + return self.generate(publisher = publisher, fast = fast) + + if ca_detail.state != "active": + rpki.log.debug("ca_detail associated with %r not active (state %s), regenerating" % (self, ca_detail.state)) + return self.regenerate(publisher = publisher, fast = fast) + + regen_time = self.cert.getNotAfter() - rpki.sundial.timedelta(seconds = self.self.regen_margin) + + if rpki.sundial.now() > regen_time: + rpki.log.debug("%r past threshold %s, regenerating" % (self, regen_time)) + return self.regenerate(publisher = publisher, fast = fast) + + ca_resources = ca_detail.latest_ca_cert.get_3779resources() + ee_resources = self.cert.get_3779resources() + + if ee_resources.oversized(ca_resources): + rpki.log.debug("%r oversized with respect to CA, regenerating" % self) + return self.regenerate(publisher = publisher, fast = fast) + + if ee_resources.v4 != v4 or ee_resources.v6 != v6: + rpki.log.debug("%r resources do not match EE, regenerating" % self) + return self.regenerate(publisher = publisher, fast = fast) + + if self.cert.get_AIA()[0] != ca_detail.ca_cert_uri: + rpki.log.debug("%r AIA changed, regenerating" % self) + return self.regenerate(publisher = publisher, fast = fast) + + def generate(self, publisher, fast = False): + """ + Generate a ROA. + + At present we have no way of performing a direct lookup from a + desired set of resources to a covering certificate, so we have to + search. This could be quite slow if we have a lot of active + ca_detail objects. Punt on the issue for now, revisit if + profiling shows this as a hotspot. + + Once we have the right covering certificate, we generate the ROA + payload, generate a new EE certificate, use the EE certificate to + sign the ROA payload, publish the result, then throw away the + private key for the EE cert, all per the ROA specification. This + implies that generating a lot of ROAs will tend to thrash + /dev/random, but there is not much we can do about that. + + If fast is set, we leave generating the new manifest for our + caller to handle, presumably at the end of a bulk operation. + """ + + if self.ipv4 is None and self.ipv6 is None: + raise rpki.exceptions.EmptyROAPrefixList + + # Ugly and expensive search for covering ca_detail, there has to + # be a better way, but it would require the ability to test for + # resource subsets in SQL. + + v4 = self.ipv4.to_resource_set() if self.ipv4 is not None else rpki.resource_set.resource_set_ipv4() + v6 = self.ipv6.to_resource_set() if self.ipv6 is not None else rpki.resource_set.resource_set_ipv6() + + ca_detail = self.ca_detail + if ca_detail is None or ca_detail.state != "active" or ca_detail.has_expired(): + rpki.log.debug("Searching for new ca_detail for ROA %r" % self) + ca_detail = None + for parent in self.self.parents: + for ca in parent.cas: + ca_detail = ca.active_ca_detail + assert ca_detail is None or ca_detail.state == "active" + if ca_detail is not None and not ca_detail.has_expired(): + resources = ca_detail.latest_ca_cert.get_3779resources() + if v4.issubset(resources.v4) and v6.issubset(resources.v6): + break + ca_detail = None + if ca_detail is not None: + break + else: + rpki.log.debug("Keeping old ca_detail for ROA %r" % self) + + if ca_detail is None: + raise rpki.exceptions.NoCoveringCertForROA, "Could not find a certificate covering %r" % self + + rpki.log.debug("Using new ca_detail %r for ROA %r, ca_detail_state %s" % ( + ca_detail, self, ca_detail.state)) + + ca = ca_detail.ca + resources = rpki.resource_set.resource_bag(v4 = v4, v6 = v6) + keypair = rpki.x509.RSA.generate() + + del self.ca_detail + self.ca_detail_id = ca_detail.ca_detail_id + self.cert = ca_detail.issue_ee( + ca = ca, + resources = resources, + subject_key = keypair.get_public(), + sia = (None, None, self.uri_from_key(keypair))) + self.roa = rpki.x509.ROA.build(self.asn, self.ipv4, self.ipv6, keypair, (self.cert,)) + self.published = rpki.sundial.now() + self.sql_store() + + rpki.log.debug("Generating %r URI %s" % (self, self.uri)) + publisher.publish( + cls = rpki.publication.roa_elt, + uri = self.uri, + obj = self.roa, + repository = ca.parent.repository, + handler = self.published_callback) + if not fast: + ca_detail.generate_manifest(publisher = publisher) + + + def published_callback(self, pdu): + """ + Check publication result. + """ + pdu.raise_if_error() + self.published = None + self.sql_mark_dirty() + + def revoke(self, publisher, regenerate = False, allow_failure = False, fast = False): + """ + Withdraw ROA associated with this roa_obj. + + In order to preserve make-before-break properties without + duplicating code, this method also handles generating a + replacement ROA when requested. + + If allow_failure is set, failing to withdraw the ROA will not be + considered an error. + + If fast is set, SQL actions will be deferred, on the assumption + that our caller will handle regenerating CRL and manifest and + flushing the SQL cache. + """ + + ca_detail = self.ca_detail + cert = self.cert + roa = self.roa + uri = self.uri + + rpki.log.debug("%s %r, ca_detail %r state is %s" % ( + "Regenerating" if regenerate else "Not regenerating", + self, ca_detail, ca_detail.state)) + + if regenerate: + self.generate(publisher = publisher, fast = fast) + + rpki.log.debug("Withdrawing %r %s and revoking its EE cert" % (self, uri)) + rpki.rpkid.revoked_cert_obj.revoke(cert = cert, ca_detail = ca_detail) + publisher.withdraw(cls = rpki.publication.roa_elt, uri = uri, obj = roa, + repository = ca_detail.ca.parent.repository, + handler = False if allow_failure else None) + + if not regenerate: + self.sql_mark_deleted() + + if not fast: + ca_detail.generate_crl(publisher = publisher) + ca_detail.generate_manifest(publisher = publisher) + self.gctx.sql.sweep() + + def regenerate(self, publisher, fast = False): + """ + Reissue ROA associated with this roa_obj. + """ + if self.ca_detail is None: + self.generate(publisher = publisher, fast = fast) + else: + self.revoke(publisher = publisher, regenerate = True, fast = fast) + + def uri_from_key(self, key): + """ + Return publication URI for a public key. + """ + return self.ca_detail.ca.sia_uri + key.gSKI() + ".roa" + + @property + def uri(self): + """ + Return the publication URI for this roa_obj's ROA. + """ + return self.ca_detail.ca.sia_uri + self.uri_tail + + @property + def uri_tail(self): + """ + Return the tail (filename portion) of the publication URI for this + roa_obj's ROA. + """ + return self.cert.gSKI() + ".roa" + + +class ghostbuster_obj(rpki.sql.sql_persistent): + """ + Ghostbusters record. + """ + + sql_template = rpki.sql.template( + "ghostbuster", + "ghostbuster_id", + "ca_detail_id", + "self_id", + "vcard", + ("ghostbuster", rpki.x509.Ghostbuster), + ("cert", rpki.x509.X509), + ("published", rpki.sundial.datetime)) + + ca_detail_id = None + cert = None + ghostbuster = None + published = None + vcard = None + + def __repr__(self): + args = [self] + try: + args.extend(self.vcard.splitlines()[2:-1]) + except: + pass + try: + args.append(self.uri) + except: + pass + return rpki.log.log_repr(*args) + + @property + @rpki.sql.cache_reference + def self(self): + """ + Fetch self object to which this ghostbuster_obj links. + """ + return rpki.left_right.self_elt.sql_fetch(self.gctx, self.self_id) + + @property + @rpki.sql.cache_reference + def ca_detail(self): + """ + Fetch ca_detail object to which this ghostbuster_obj links. + """ + return rpki.rpkid.ca_detail_obj.sql_fetch(self.gctx, self.ca_detail_id) + + def __init__(self, gctx = None, self_id = None, ca_detail_id = None, vcard = None): + rpki.sql.sql_persistent.__init__(self) + self.gctx = gctx + self.self_id = self_id + self.ca_detail_id = ca_detail_id + self.vcard = vcard + + # Defer marking new ghostbuster as dirty until .generate() has a chance to + # finish setup, otherwise we get SQL consistency errors. + + def update(self, publisher, fast = False): + """ + Bring this ghostbuster_obj up to date if necesssary. + """ + + if self.ghostbuster is None: + rpki.log.debug("Ghostbuster record doesn't exist, generating") + return self.generate(publisher = publisher, fast = fast) + + regen_time = self.cert.getNotAfter() - rpki.sundial.timedelta(seconds = self.self.regen_margin) + + if rpki.sundial.now() > regen_time: + rpki.log.debug("%r past threshold %s, regenerating" % (self, regen_time)) + return self.regenerate(publisher = publisher, fast = fast) + + if self.cert.get_AIA()[0] != self.ca_detail.ca_cert_uri: + rpki.log.debug("%r AIA changed, regenerating" % self) + return self.regenerate(publisher = publisher, fast = fast) + + def generate(self, publisher, fast = False): + """ + Generate a Ghostbuster record + + Once we have the right covering certificate, we generate the + ghostbuster payload, generate a new EE certificate, use the EE + certificate to sign the ghostbuster payload, publish the result, + then throw away the private key for the EE cert. This is modeled + after the way we handle ROAs. + + If fast is set, we leave generating the new manifest for our + caller to handle, presumably at the end of a bulk operation. + """ + + ca_detail = self.ca_detail + ca = ca_detail.ca + + resources = rpki.resource_set.resource_bag.from_inheritance() + keypair = rpki.x509.RSA.generate() + + self.cert = ca_detail.issue_ee( + ca = ca, + resources = resources, + subject_key = keypair.get_public(), + sia = (None, None, self.uri_from_key(keypair))) + self.ghostbuster = rpki.x509.Ghostbuster.build(self.vcard, keypair, (self.cert,)) + self.published = rpki.sundial.now() + self.sql_store() + + rpki.log.debug("Generating Ghostbuster record %r" % self.uri) + publisher.publish( + cls = rpki.publication.ghostbuster_elt, + uri = self.uri, + obj = self.ghostbuster, + repository = ca.parent.repository, + handler = self.published_callback) + if not fast: + ca_detail.generate_manifest(publisher = publisher) + + def published_callback(self, pdu): + """ + Check publication result. + """ + pdu.raise_if_error() + self.published = None + self.sql_mark_dirty() + + def revoke(self, publisher, regenerate = False, allow_failure = False, fast = False): + """ + Withdraw Ghostbuster associated with this ghostbuster_obj. + + In order to preserve make-before-break properties without + duplicating code, this method also handles generating a + replacement ghostbuster when requested. + + If allow_failure is set, failing to withdraw the ghostbuster will not be + considered an error. + + If fast is set, SQL actions will be deferred, on the assumption + that our caller will handle regenerating CRL and manifest and + flushing the SQL cache. + """ + + ca_detail = self.ca_detail + cert = self.cert + ghostbuster = self.ghostbuster + uri = self.uri + + rpki.log.debug("%s %r, ca_detail %r state is %s" % ( + "Regenerating" if regenerate else "Not regenerating", + self, ca_detail, ca_detail.state)) + + if regenerate: + self.generate(publisher = publisher, fast = fast) + + rpki.log.debug("Withdrawing %r %s and revoking its EE cert" % (self, uri)) + rpki.rpkid.revoked_cert_obj.revoke(cert = cert, ca_detail = ca_detail) + publisher.withdraw(cls = rpki.publication.ghostbuster_elt, uri = uri, obj = ghostbuster, + repository = ca_detail.ca.parent.repository, + handler = False if allow_failure else None) + + if not regenerate: + self.sql_mark_deleted() + + if not fast: + ca_detail.generate_crl(publisher = publisher) + ca_detail.generate_manifest(publisher = publisher) + self.gctx.sql.sweep() + + def regenerate(self, publisher, fast = False): + """ + Reissue Ghostbuster associated with this ghostbuster_obj. + """ + if self.ghostbuster is None: + self.generate(publisher = publisher, fast = fast) + else: + self.revoke(publisher = publisher, regenerate = True, fast = fast) + + def uri_from_key(self, key): + """ + Return publication URI for a public key. + """ + return self.ca_detail.ca.sia_uri + key.gSKI() + ".gbr" + + @property + def uri(self): + """ + Return the publication URI for this ghostbuster_obj's ghostbuster. + """ + return self.ca_detail.ca.sia_uri + self.uri_tail + + @property + def uri_tail(self): + """ + Return the tail (filename portion) of the publication URI for this + ghostbuster_obj's ghostbuster. + """ + return self.cert.gSKI() + ".gbr" + + +class ee_cert_obj(rpki.sql.sql_persistent): + """ + EE certificate (router certificate or generic). + """ + + sql_template = rpki.sql.template( + "ee_cert", + "ee_cert_id", + "self_id", + "ca_detail_id", + "ski", + ("cert", rpki.x509.X509), + ("published", rpki.sundial.datetime)) + + def __repr__(self): + return rpki.log.log_repr(self, self.cert.getSubject(), self.uri) + + def __init__(self, gctx = None, self_id = None, ca_detail_id = None, cert = None): + rpki.sql.sql_persistent.__init__(self) + self.gctx = gctx + self.self_id = self_id + self.ca_detail_id = ca_detail_id + self.cert = cert + self.ski = None if cert is None else cert.get_SKI() + self.published = None + if self_id or ca_detail_id or cert: + self.sql_mark_dirty() + + @property + @rpki.sql.cache_reference + def self(self): + """ + Fetch self object to which this ee_cert_obj links. + """ + return rpki.left_right.self_elt.sql_fetch(self.gctx, self.self_id) + + @property + @rpki.sql.cache_reference + def ca_detail(self): + """ + Fetch ca_detail object to which this ee_cert_obj links. + """ + return rpki.rpkid.ca_detail_obj.sql_fetch(self.gctx, self.ca_detail_id) + + @ca_detail.deleter + def ca_detail(self): + try: + del self._ca_detail + except AttributeError: + pass + + @property + def gski(self): + """ + Calculate g(SKI), for ease of comparison with XML. + + Although, really, one has to ask why we don't just store g(SKI) + in rpkid.sql instead of ski.... + """ + return base64.urlsafe_b64encode(self.ski).rstrip("=") + + @gski.setter + def gski(self, val): + self.ski = base64.urlsafe_b64decode(s + ("=" * ((4 - len(s)) % 4))) + + @property + def uri(self): + """ + Return the publication URI for this ee_cert_obj. + """ + return self.ca_detail.ca.sia_uri + self.uri_tail + + @property + def uri_tail(self): + """ + Return the tail (filename portion) of the publication URI for this + ee_cert_obj. + """ + return self.cert.gSKI() + ".cer" + + @classmethod + def create(cls, ca_detail, subject_name, subject_key, resources, publisher, eku = None): + """ + Generate a new certificate and stuff it in a new ee_cert_obj. + """ + + cn, sn = subject_name.extract_cn_and_sn() + ca = ca_detail.ca + + cert = ca_detail.issue_ee( + ca = ca, + subject_key = subject_key, + sia = None, + resources = resources, + notAfter = resources.valid_until, + cn = cn, + sn = sn, + eku = eku) + + self = cls( + gctx = ca_detail.gctx, + self_id = ca.parent.self.self_id, + ca_detail_id = ca_detail.ca_detail_id, + cert = cert) + + publisher.publish( + cls = rpki.publication.certificate_elt, + uri = self.uri, + obj = self.cert, + repository = ca.parent.repository, + handler = self.published_callback) + + self.sql_store() + + ca_detail.generate_manifest(publisher = publisher) + + rpki.log.debug("New ee_cert %r" % self) + + return self + + def revoke(self, publisher, generate_crl_and_manifest = True): + """ + Revoke and withdraw an EE certificate. + """ + + ca_detail = self.ca_detail + ca = ca_detail.ca + rpki.log.debug("Revoking %r %r" % (self, self.uri)) + revoked_cert_obj.revoke(cert = self.cert, ca_detail = ca_detail) + publisher.withdraw(cls = rpki.publication.certificate_elt, + uri = self.uri, + obj = self.cert, + repository = ca.parent.repository) + self.gctx.sql.sweep() + self.sql_delete() + if generate_crl_and_manifest: + ca_detail.generate_crl(publisher = publisher) + ca_detail.generate_manifest(publisher = publisher) + + def reissue(self, publisher, ca_detail = None, resources = None, force = False): + """ + Reissue an existing EE cert, reusing the public key. If the EE + cert we would generate is identical to the one we already have, we + just return; if we need to reissue, we reuse this ee_cert_obj and + just update its contents, as the publication URI will not have + changed. + """ + + needed = False + + old_cert = self.cert + + old_ca_detail = self.ca_detail + if ca_detail is None: + ca_detail = old_ca_detail + + assert ca_detail.ca is old_ca_detail.ca + + old_resources = old_cert.get_3779resources() + if resources is None: + resources = old_resources + + assert resources.valid_until is not None and old_resources.valid_until is not None + + assert ca_detail.covers(resources) + + if ca_detail != self.ca_detail: + rpki.log.debug("ca_detail changed for %r: old %r new %r" % ( + self, self.ca_detail, ca_detail)) + needed = True + + if ca_detail.ca_cert_uri != old_cert.get_AIA()[0]: + rpki.log.debug("AIA changed for %r: old %s new %s" % ( + self, old_cert.get_AIA()[0], ca_detail.ca_cert_uri)) + needed = True + + if resources.valid_until != old_resources.valid_until: + rpki.log.debug("Validity changed for %r: old %s new %s" % ( + self, old_resources.valid_until, resources.valid_until)) + needed = True + + if resources.asn != old_resources.asn or resources.v4 != old_resources.v4 or resources.v6 != old_resources.v6: + rpki.log.debug("Resources changed for %r: old %s new %s" % ( + self, old_resources, resources)) + needed = True + + must_revoke = (old_resources.oversized(resources) or + old_resources.valid_until > resources.valid_until) + if must_revoke: + rpki.log.debug("Must revoke existing cert(s) for %r" % self) + needed = True + + if not needed and force: + rpki.log.debug("No change needed for %r, forcing reissuance anyway" % self) + needed = True + + if not needed: + rpki.log.debug("No change to %r" % self) + return + + cn, sn = self.cert.getSubject().extract_cn_and_sn() + + self.cert = ca_detail.issue_ee( + ca = ca_detail.ca, + subject_key = self.cert.getPublicKey(), + eku = self.cert.get_EKU(), + sia = None, + resources = resources, + notAfter = resources.valid_until, + cn = cn, + sn = sn) + + self.sql_mark_dirty() + + publisher.publish( + cls = rpki.publication.certificate_elt, + uri = self.uri, + obj = self.cert, + repository = ca_detail.ca.parent.repository, + handler = self.published_callback) + + if must_revoke: + revoked_cert_obj.revoke(cert = old_cert.cert, ca_detail = old_ca_detail) + + self.gctx.sql.sweep() + + if must_revoke: + ca_detail.generate_crl(publisher = publisher) + self.gctx.sql.sweep() + + ca_detail.generate_manifest(publisher = publisher) + + def published_callback(self, pdu): + """ + Publication callback: check result and mark published. + """ + pdu.raise_if_error() + self.published = None + self.sql_mark_dirty() + + +class publication_queue(object): + """ + Utility to simplify publication from within rpkid. + + General idea here is to accumulate a collection of objects to be + published, in one or more repositories, each potentially with its + own completion callback. Eventually we want to publish everything + we've accumulated, at which point we need to iterate over the + collection and do repository.call_pubd() for each repository. + """ + + replace = True + + def __init__(self): + self.clear() + + def clear(self): + self.repositories = {} + self.msgs = {} + self.handlers = {} + if self.replace: + self.uris = {} + + def _add(self, uri, obj, repository, handler, make_pdu): + rid = id(repository) + if rid not in self.repositories: + self.repositories[rid] = repository + self.msgs[rid] = rpki.publication.msg.query() + if self.replace and uri in self.uris: + rpki.log.debug("Removing publication duplicate <%s %r %r>" % (self.uris[uri].action, self.uris[uri].uri, self.uris[uri].payload)) + self.msgs[rid].remove(self.uris.pop(uri)) + pdu = make_pdu(uri = uri, obj = obj) + if handler is not None: + self.handlers[id(pdu)] = handler + pdu.tag = id(pdu) + self.msgs[rid].append(pdu) + if self.replace: + self.uris[uri] = pdu + + def publish(self, cls, uri, obj, repository, handler = None): + return self._add( uri, obj, repository, handler, cls.make_publish) + + def withdraw(self, cls, uri, obj, repository, handler = None): + return self._add( uri, obj, repository, handler, cls.make_withdraw) + + def call_pubd(self, cb, eb): + def loop(iterator, rid): + rpki.log.debug("Calling pubd[%r]" % self.repositories[rid]) + self.repositories[rid].call_pubd(iterator, eb, self.msgs[rid], self.handlers) + def done(): + self.clear() + cb() + rpki.async.iterator(self.repositories, loop, done) + + @property + def size(self): + return sum(len(self.msgs[rid]) for rid in self.repositories) + + def empty(self): + assert (not self.msgs) == (self.size == 0) + return not self.msgs diff --git a/rpki/rpkid_tasks.py b/rpki/rpkid_tasks.py new file mode 100644 index 00000000..04e1c0df --- /dev/null +++ b/rpki/rpkid_tasks.py @@ -0,0 +1,750 @@ +# $Id$ +# +# Copyright (C) 2014 Dragon Research Labs ("DRL") +# Portions copyright (C) 2012--2013 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notices and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND DRL AND ISC DISCLAIM ALL +# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL OR +# ISC BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL +# DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA +# OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +# TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +rpkid task objects. Split out from rpki.left_right and rpki.rpkid +because interactions with rpkid scheduler were getting too complicated. +""" + +import rpki.log +import rpki.rpkid +import rpki.async +import rpki.up_down +import rpki.sundial +import rpki.publication +import rpki.exceptions + +task_classes = () + +def queue_task(cls): + """ + Class decorator to add a new task class to task_classes. + """ + + global task_classes + task_classes += (cls,) + return cls + + +class CompletionHandler(object): + """ + Track one or more scheduled rpkid tasks and execute a callback when + the last of them terminates. + """ + + ## @var debug + # Debug logging. + + debug = False + + def __init__(self, cb): + self.cb = cb + self.tasks = set() + + def register(self, task): + if self.debug: + rpki.log.debug("Completion handler %r registering task %r" % (self, task)) + self.tasks.add(task) + task.register_completion(self.done) + + def done(self, task): + try: + self.tasks.remove(task) + except KeyError: + rpki.log.warn("Completion handler %r called with unregistered task %r, blundering onwards" % (self, task)) + else: + if self.debug: + rpki.log.debug("Completion handler %r called with registered task %r" % (self, task)) + if not self.tasks: + if self.debug: + rpki.log.debug("Completion handler %r finished, calling %r" % (self, self.cb)) + self.cb() + + @property + def count(self): + return len(self.tasks) + + +class AbstractTask(object): + """ + Abstract base class for rpkid scheduler task objects. This just + handles the scheduler hooks, real work starts in self.start. + + NB: This assumes that the rpki.rpkid.rpkid.task_* methods have been + rewritten to expect instances of subclasses of this class, rather + than expecting thunks to be wrapped up in the older version of this + class. Rewrite, rewrite, remove this comment when done, OK! + """ + + ## @var timeslice + # How long before a task really should consider yielding the CPU to + # let something else run. + + timeslice = rpki.sundial.timedelta(seconds = 15) + + def __init__(self, s, description = None): + self.self = s + self.description = description + self.completions = [] + self.continuation = None + self.due_date = None + self.clear() + + def __repr__(self): + return rpki.log.log_repr(self, self.description) + + def register_completion(self, completion): + self.completions.append(completion) + + def exit(self): + while self.completions: + self.completions.pop(0)(self) + self.clear() + self.due_date = None + self.self.gctx.task_next() + + def postpone(self, continuation): + self.continuation = continuation + self.due_date = None + self.self.gctx.task_add(self) + self.self.gctx.task_next() + + def __call__(self): + self.due_date = rpki.sundial.now() + self.timeslice + if self.continuation is None: + rpki.log.debug("Running task %r" % self) + self.clear() + self.start() + else: + rpki.log.debug("Restarting task %r at %r" % (self, self.continuation)) + continuation = self.continuation + self.continuation = None + continuation() + + @property + def overdue(self): + return rpki.sundial.now() > self.due_date + + def __getattr__(self, name): + return getattr(self.self, name) + + def start(self): + raise NotImplementedError + + def clear(self): + pass + + +@queue_task +class PollParentTask(AbstractTask): + """ + Run the regular client poll cycle with each of this self's + parents, in turn. + """ + + def clear(self): + self.parent_iterator = None + self.parent = None + self.ca_map = None + self.class_iterator = None + + def start(self): + rpki.log.trace() + self.gctx.checkpoint() + rpki.log.debug("Self %s[%d] polling parents" % (self.self_handle, self.self_id)) + rpki.async.iterator(self.parents, self.parent_loop, self.exit) + + def parent_loop(self, parent_iterator, parent): + self.parent_iterator = parent_iterator + self.parent = parent + rpki.up_down.list_pdu.query(parent, self.got_list, self.list_failed) + + def got_list(self, r_msg): + self.ca_map = dict((ca.parent_resource_class, ca) for ca in self.parent.cas) + self.gctx.checkpoint() + rpki.async.iterator(r_msg.payload.classes, self.class_loop, self.class_done) + + def list_failed(self, e): + rpki.log.traceback() + rpki.log.warn("Couldn't get resource class list from parent %r, skipping: %s (%r)" % ( + self.parent, e, e)) + self.parent_iterator() + + def class_loop(self, class_iterator, rc): + self.gctx.checkpoint() + self.class_iterator = class_iterator + try: + ca = self.ca_map.pop(rc.class_name) + except KeyError: + rpki.rpkid.ca_obj.create(self.parent, rc, class_iterator, self.class_create_failed) + else: + ca.check_for_updates(self.parent, rc, class_iterator, self.class_update_failed) + + def class_update_failed(self, e): + rpki.log.traceback() + rpki.log.warn("Couldn't update class, skipping: %s" % e) + self.class_iterator() + + def class_create_failed(self, e): + rpki.log.traceback() + rpki.log.warn("Couldn't create class, skipping: %s" % e) + self.class_iterator() + + def class_done(self): + rpki.async.iterator(self.ca_map.values(), self.ca_loop, self.ca_done) + + def ca_loop(self, iterator, ca): + self.gctx.checkpoint() + ca.delete(self.parent, iterator) + + def ca_done(self): + self.gctx.checkpoint() + self.gctx.sql.sweep() + self.parent_iterator() + + +@queue_task +class UpdateChildrenTask(AbstractTask): + """ + Check for updated IRDB data for all of this self's children and + issue new certs as necessary. Must handle changes both in + resources and in expiration date. + """ + + def clear(self): + self.now = None + self.rsn = None + self.publisher = None + self.iterator = None + self.child = None + self.child_certs = None + + def start(self): + rpki.log.trace() + self.gctx.checkpoint() + rpki.log.debug("Self %s[%d] updating children" % (self.self_handle, self.self_id)) + self.now = rpki.sundial.now() + self.rsn = self.now + rpki.sundial.timedelta(seconds = self.regen_margin) + self.publisher = rpki.rpkid.publication_queue() + rpki.async.iterator(self.children, self.loop, self.done) + + def loop(self, iterator, child): + self.gctx.checkpoint() + self.gctx.sql.sweep() + self.iterator = iterator + self.child = child + self.child_certs = child.child_certs + if self.overdue: + self.publisher.call_pubd(lambda: self.postpone(self.do_child), self.publication_failed) + else: + self.do_child() + + def do_child(self): + if self.child_certs: + self.gctx.irdb_query_child_resources(self.child.self.self_handle, self.child.child_handle, + self.got_resources, self.lose) + else: + self.iterator() + + def lose(self, e): + rpki.log.traceback() + rpki.log.warn("Couldn't update child %r, skipping: %s" % (self.child, e)) + self.iterator() + + def got_resources(self, irdb_resources): + try: + for child_cert in self.child_certs: + ca_detail = child_cert.ca_detail + ca = ca_detail.ca + if ca_detail.state == "active": + old_resources = child_cert.cert.get_3779resources() + new_resources = old_resources & irdb_resources & ca_detail.latest_ca_cert.get_3779resources() + old_aia = child_cert.cert.get_AIA()[0] + new_aia = ca_detail.ca_cert_uri + + if new_resources.empty(): + rpki.log.debug("Resources shrank to the null set, " + "revoking and withdrawing child %s certificate SKI %s" % ( + self.child.child_handle, child_cert.cert.gSKI())) + child_cert.revoke(publisher = self.publisher) + ca_detail.generate_crl(publisher = self.publisher) + ca_detail.generate_manifest(publisher = self.publisher) + + elif (old_resources != new_resources or + old_aia != new_aia or + (old_resources.valid_until < self.rsn and + irdb_resources.valid_until > self.now and + old_resources.valid_until != irdb_resources.valid_until)): + + rpki.log.debug("Need to reissue child %s certificate SKI %s" % ( + self.child.child_handle, child_cert.cert.gSKI())) + if old_resources != new_resources: + rpki.log.debug("Child %s SKI %s resources changed: old %s new %s" % ( + self.child.child_handle, child_cert.cert.gSKI(), old_resources, new_resources)) + if old_resources.valid_until != irdb_resources.valid_until: + rpki.log.debug("Child %s SKI %s validity changed: old %s new %s" % ( + self.child.child_handle, child_cert.cert.gSKI(), + old_resources.valid_until, irdb_resources.valid_until)) + + new_resources.valid_until = irdb_resources.valid_until + child_cert.reissue( + ca_detail = ca_detail, + resources = new_resources, + publisher = self.publisher) + + elif old_resources.valid_until < self.now: + rpki.log.debug("Child %s certificate SKI %s has expired: cert.valid_until %s, irdb.valid_until %s" + % (self.child.child_handle, child_cert.cert.gSKI(), + old_resources.valid_until, irdb_resources.valid_until)) + child_cert.sql_delete() + self.publisher.withdraw( + cls = rpki.publication.certificate_elt, + uri = child_cert.uri, + obj = child_cert.cert, + repository = ca.parent.repository) + ca_detail.generate_manifest(publisher = self.publisher) + + except (SystemExit, rpki.async.ExitNow): + raise + except Exception, e: + self.gctx.checkpoint() + self.lose(e) + else: + self.gctx.checkpoint() + self.gctx.sql.sweep() + self.iterator() + + def done(self): + self.gctx.checkpoint() + self.gctx.sql.sweep() + self.publisher.call_pubd(self.exit, self.publication_failed) + + def publication_failed(self, e): + rpki.log.traceback() + rpki.log.warn("Couldn't publish for %s, skipping: %s" % (self.self_handle, e)) + self.gctx.checkpoint() + self.exit() + + +@queue_task +class UpdateROAsTask(AbstractTask): + """ + Generate or update ROAs for this self. + """ + + def clear(self): + self.orphans = None + self.updates = None + self.publisher = None + self.ca_details = None + self.count = None + + def start(self): + rpki.log.trace() + self.gctx.checkpoint() + self.gctx.sql.sweep() + rpki.log.debug("Self %s[%d] updating ROAs" % (self.self_handle, self.self_id)) + + rpki.log.debug("Issuing query for ROA requests") + self.gctx.irdb_query_roa_requests(self.self_handle, self.got_roa_requests, self.roa_requests_failed) + + def got_roa_requests(self, roa_requests): + self.gctx.checkpoint() + rpki.log.debug("Received response to query for ROA requests") + + if self.gctx.sql.dirty: + rpki.log.warn("Unexpected dirty SQL cache, flushing") + self.gctx.sql.sweep() + + roas = {} + seen = set() + self.orphans = [] + self.updates = [] + self.publisher = rpki.rpkid.publication_queue() + self.ca_details = set() + + for roa in self.roas: + k = (roa.asn, str(roa.ipv4), str(roa.ipv6)) + if k not in roas: + roas[k] = roa + elif (roa.roa is not None and roa.cert is not None and roa.ca_detail is not None and roa.ca_detail.state == "active" and + (roas[k].roa is None or roas[k].cert is None or roas[k].ca_detail is None or roas[k].ca_detail.state != "active")): + self.orphans.append(roas[k]) + roas[k] = roa + else: + self.orphans.append(roa) + + for roa_request in roa_requests: + k = (roa_request.asn, str(roa_request.ipv4), str(roa_request.ipv6)) + if k in seen: + rpki.log.warn("Skipping duplicate ROA request %r" % roa_request) + else: + seen.add(k) + roa = roas.pop(k, None) + if roa is None: + roa = rpki.rpkid.roa_obj(self.gctx, self.self_id, roa_request.asn, roa_request.ipv4, roa_request.ipv6) + rpki.log.debug("Created new %r" % roa) + else: + rpki.log.debug("Found existing %r" % roa) + self.updates.append(roa) + + self.orphans.extend(roas.itervalues()) + + if self.overdue: + self.postpone(self.begin_loop) + else: + self.begin_loop() + + def begin_loop(self): + self.count = 0 + rpki.async.iterator(self.updates, self.loop, self.done, pop_list = True) + + def loop(self, iterator, roa): + self.gctx.checkpoint() + try: + roa.update(publisher = self.publisher, fast = True) + self.ca_details.add(roa.ca_detail) + self.gctx.sql.sweep() + except (SystemExit, rpki.async.ExitNow): + raise + except rpki.exceptions.NoCoveringCertForROA: + rpki.log.warn("No covering certificate for %r, skipping" % roa) + except Exception, e: + rpki.log.traceback() + rpki.log.warn("Could not update %r, skipping: %s" % (roa, e)) + self.count += 1 + if self.overdue: + self.publish(lambda: self.postpone(iterator)) + else: + iterator() + + def publish(self, done): + if not self.publisher.empty(): + for ca_detail in self.ca_details: + rpki.log.debug("Generating new CRL for %r" % ca_detail) + ca_detail.generate_crl(publisher = self.publisher) + rpki.log.debug("Generating new manifest for %r" % ca_detail) + ca_detail.generate_manifest(publisher = self.publisher) + self.ca_details.clear() + self.gctx.sql.sweep() + self.gctx.checkpoint() + self.publisher.call_pubd(done, self.publication_failed) + + def publication_failed(self, e): + rpki.log.traceback() + rpki.log.warn("Couldn't publish for %s, skipping: %s" % (self.self_handle, e)) + self.gctx.checkpoint() + self.exit() + + def done(self): + for roa in self.orphans: + try: + self.ca_details.add(roa.ca_detail) + roa.revoke(publisher = self.publisher, fast = True) + except (SystemExit, rpki.async.ExitNow): + raise + except Exception, e: + rpki.log.traceback() + rpki.log.warn("Could not revoke %r: %s" % (roa, e)) + self.gctx.sql.sweep() + self.gctx.checkpoint() + self.publish(self.exit) + + def roa_requests_failed(self, e): + rpki.log.traceback() + rpki.log.warn("Could not fetch ROA requests for %s, skipping: %s" % (self.self_handle, e)) + self.exit() + + +@queue_task +class UpdateGhostbustersTask(AbstractTask): + """ + Generate or update Ghostbuster records for this self. + + This was originally based on the ROA update code. It's possible + that both could benefit from refactoring, but at this point the + potential scaling issues for ROAs completely dominate structure of + the ROA code, and aren't relevant here unless someone is being + exceptionally silly. + """ + + def start(self): + rpki.log.trace() + self.gctx.checkpoint() + rpki.log.debug("Self %s[%d] updating Ghostbuster records" % (self.self_handle, self.self_id)) + + self.gctx.irdb_query_ghostbuster_requests(self.self_handle, + (p.parent_handle for p in self.parents), + self.got_ghostbuster_requests, + self.ghostbuster_requests_failed) + + def got_ghostbuster_requests(self, ghostbuster_requests): + + try: + self.gctx.checkpoint() + if self.gctx.sql.dirty: + rpki.log.warn("Unexpected dirty SQL cache, flushing") + self.gctx.sql.sweep() + + ghostbusters = {} + orphans = [] + publisher = rpki.rpkid.publication_queue() + ca_details = set() + seen = set() + + parents = dict((p.parent_handle, p) for p in self.parents) + + for ghostbuster in self.ghostbusters: + k = (ghostbuster.ca_detail_id, ghostbuster.vcard) + if ghostbuster.ca_detail.state != "active" or k in ghostbusters: + orphans.append(ghostbuster) + else: + ghostbusters[k] = ghostbuster + + for ghostbuster_request in ghostbuster_requests: + if ghostbuster_request.parent_handle not in parents: + rpki.log.warn("Unknown parent_handle %r in Ghostbuster request, skipping" % ghostbuster_request.parent_handle) + continue + k = (ghostbuster_request.parent_handle, ghostbuster_request.vcard) + if k in seen: + rpki.log.warn("Skipping duplicate Ghostbuster request %r" % ghostbuster_request) + continue + seen.add(k) + for ca in parents[ghostbuster_request.parent_handle].cas: + ca_detail = ca.active_ca_detail + if ca_detail is not None: + ghostbuster = ghostbusters.pop((ca_detail.ca_detail_id, ghostbuster_request.vcard), None) + if ghostbuster is None: + ghostbuster = rpki.rpkid.ghostbuster_obj(self.gctx, self.self_id, ca_detail.ca_detail_id, ghostbuster_request.vcard) + rpki.log.debug("Created new %r for %r" % (ghostbuster, ghostbuster_request.parent_handle)) + else: + rpki.log.debug("Found existing %r for %s" % (ghostbuster, ghostbuster_request.parent_handle)) + ghostbuster.update(publisher = publisher, fast = True) + ca_details.add(ca_detail) + + orphans.extend(ghostbusters.itervalues()) + for ghostbuster in orphans: + ca_details.add(ghostbuster.ca_detail) + ghostbuster.revoke(publisher = publisher, fast = True) + + for ca_detail in ca_details: + ca_detail.generate_crl(publisher = publisher) + ca_detail.generate_manifest(publisher = publisher) + + self.gctx.sql.sweep() + + self.gctx.checkpoint() + publisher.call_pubd(self.exit, self.publication_failed) + + except (SystemExit, rpki.async.ExitNow): + raise + except Exception, e: + rpki.log.traceback() + rpki.log.warn("Could not update Ghostbuster records for %s, skipping: %s" % (self.self_handle, e)) + self.exit() + + def publication_failed(self, e): + rpki.log.traceback() + rpki.log.warn("Couldn't publish Ghostbuster updates for %s, skipping: %s" % (self.self_handle, e)) + self.gctx.checkpoint() + self.exit() + + def ghostbuster_requests_failed(self, e): + rpki.log.traceback() + rpki.log.warn("Could not fetch Ghostbuster record requests for %s, skipping: %s" % (self.self_handle, e)) + self.exit() + + +@queue_task +class UpdateEECertificatesTask(AbstractTask): + """ + Generate or update EE certificates for this self. + + Not yet sure what kind of scaling constraints this task might have, + so keeping it simple for initial version, we can optimize later. + """ + + def start(self): + rpki.log.trace() + self.gctx.checkpoint() + rpki.log.debug("Self %s[%d] updating EE certificates" % (self.self_handle, self.self_id)) + + self.gctx.irdb_query_ee_certificate_requests(self.self_handle, + self.got_requests, + self.get_requests_failed) + + def got_requests(self, requests): + + try: + self.gctx.checkpoint() + if self.gctx.sql.dirty: + rpki.log.warn("Unexpected dirty SQL cache, flushing") + self.gctx.sql.sweep() + + publisher = rpki.rpkid.publication_queue() + + existing = dict() + for ee in self.ee_certificates: + gski = ee.gski + if gski not in existing: + existing[gski] = set() + existing[gski].add(ee) + + ca_details = set() + + for req in requests: + ees = existing.pop(req.gski, ()) + resources = rpki.resource_set.resource_bag( + asn = req.asn, + v4 = req.ipv4, + v6 = req.ipv6, + valid_until = req.valid_until) + covering = self.find_covering_ca_details(resources) + ca_details.update(covering) + + for ee in ees: + if ee.ca_detail in covering: + rpki.log.debug("Updating existing EE certificate for %s %s" % (req.gski, resources)) + ee.reissue( + resources = resources, + publisher = publisher) + covering.remove(ee.ca_detail) + else: + rpki.log.debug("Existing EE certificate for %s %s is no longer covered" % (req.gski, resources)) + ee.revoke(publisher = publisher) + + for ca_detail in covering: + rpki.log.debug("No existing EE certificate for %s %s" % (req.gski, resources)) + rpki.rpkid.ee_cert_obj.create( + ca_detail = ca_detail, + subject_name = rpki.x509.X501DN.from_cn(req.cn, req.sn), + subject_key = req.pkcs10.getPublicKey(), + resources = resources, + publisher = publisher, + eku = req.eku or None) + + # Anything left is an orphan + for ees in existing.values(): + for ee in ees: + ca_details.add(ee.ca_detail) + ee.revoke(publisher = publisher) + + self.gctx.sql.sweep() + + for ca_detail in ca_details: + ca_detail.generate_crl(publisher = publisher) + ca_detail.generate_manifest(publisher = publisher) + + self.gctx.sql.sweep() + + self.gctx.checkpoint() + publisher.call_pubd(self.exit, self.publication_failed) + + except (SystemExit, rpki.async.ExitNow): + raise + except Exception, e: + rpki.log.traceback() + rpki.log.warn("Could not update EE certificates for %s, skipping: %s" % (self.self_handle, e)) + self.exit() + + def publication_failed(self, e): + rpki.log.traceback() + rpki.log.warn("Couldn't publish EE certificate updates for %s, skipping: %s" % (self.self_handle, e)) + self.gctx.checkpoint() + self.exit() + + def get_requests_failed(self, e): + rpki.log.traceback() + rpki.log.warn("Could not fetch EE certificate requests for %s, skipping: %s" % (self.self_handle, e)) + self.exit() + + +@queue_task +class RegenerateCRLsAndManifestsTask(AbstractTask): + """ + Generate new CRLs and manifests as necessary for all of this self's + CAs. Extracting nextUpdate from a manifest is hard at the moment + due to implementation silliness, so for now we generate a new + manifest whenever we generate a new CRL + + This code also cleans up tombstones left behind by revoked ca_detail + objects, since we're walking through the relevant portions of the + database anyway. + """ + + def start(self): + rpki.log.trace() + self.gctx.checkpoint() + rpki.log.debug("Self %s[%d] regenerating CRLs and manifests" % (self.self_handle, self.self_id)) + + now = rpki.sundial.now() + crl_interval = rpki.sundial.timedelta(seconds = self.crl_interval) + regen_margin = max(self.gctx.cron_period * 2, crl_interval / 4) + publisher = rpki.rpkid.publication_queue() + + for parent in self.parents: + for ca in parent.cas: + try: + for ca_detail in ca.revoked_ca_details: + if now > ca_detail.latest_crl.getNextUpdate(): + ca_detail.delete(ca = ca, publisher = publisher) + for ca_detail in ca.active_or_deprecated_ca_details: + if now + regen_margin > ca_detail.latest_crl.getNextUpdate(): + ca_detail.generate_crl(publisher = publisher) + ca_detail.generate_manifest(publisher = publisher) + except (SystemExit, rpki.async.ExitNow): + raise + except Exception, e: + rpki.log.traceback() + rpki.log.warn("Couldn't regenerate CRLs and manifests for CA %r, skipping: %s" % (ca, e)) + + self.gctx.checkpoint() + self.gctx.sql.sweep() + publisher.call_pubd(self.exit, self.lose) + + def lose(self, e): + rpki.log.traceback() + rpki.log.warn("Couldn't publish updated CRLs and manifests for self %r, skipping: %s" % (self.self_handle, e)) + self.gctx.checkpoint() + self.exit() + + +@queue_task +class CheckFailedPublication(AbstractTask): + """ + Periodic check for objects we tried to publish but failed (eg, due + to pubd being down or unreachable). + """ + + def start(self): + rpki.log.trace() + publisher = rpki.rpkid.publication_queue() + for parent in self.parents: + for ca in parent.cas: + ca_detail = ca.active_ca_detail + if ca_detail is not None: + ca_detail.check_failed_publication(publisher) + self.gctx.checkpoint() + self.gctx.sql.sweep() + publisher.call_pubd(self.exit, self.publication_failed) + + def publication_failed(self, e): + rpki.log.traceback() + rpki.log.warn("Couldn't publish for %s, skipping: %s" % (self.self_handle, e)) + self.gctx.checkpoint() + self.exit() diff --git a/rpki/sql.py b/rpki/sql.py new file mode 100644 index 00000000..c753278c --- /dev/null +++ b/rpki/sql.py @@ -0,0 +1,424 @@ +# $Id$ +# +# Copyright (C) 2009-2013 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. +# +# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +SQL interface code. +""" + +import weakref + +from rpki.mysql_import import (MySQLdb, _mysql_exceptions) + +import rpki.x509 +import rpki.resource_set +import rpki.sundial +import rpki.log + +class session(object): + """ + SQL session layer. + """ + + ## @var ping_threshold + # Timeout after which we should issue a ping command before the real + # one. Intent is to keep the MySQL connection alive without pinging + # before every single command. + + ping_threshold = rpki.sundial.timedelta(seconds = 60) + + def __init__(self, cfg): + + self.username = cfg.get("sql-username") + self.database = cfg.get("sql-database") + self.password = cfg.get("sql-password") + + self.conv = MySQLdb.converters.conversions.copy() + self.conv.update({ + rpki.sundial.datetime : MySQLdb.converters.DateTime2literal, + MySQLdb.converters.FIELD_TYPE.DATETIME : rpki.sundial.datetime.DateTime_or_None }) + + self.cache = weakref.WeakValueDictionary() + self.dirty = set() + + self.connect() + + def connect(self): + self.db = MySQLdb.connect(user = self.username, + db = self.database, + passwd = self.password, + conv = self.conv) + self.cur = self.db.cursor() + self.db.autocommit(True) + self.timestamp = rpki.sundial.now() + + def close(self): + if self.cur: + self.cur.close() + self.cur = None + if self.db: + self.db.close() + self.db = None + + def _wrap_execute(self, func, query, args): + try: + now = rpki.sundial.now() + if now > self.timestamp + self.ping_threshold: + self.db.ping(True) + self.timestamp = now + return func(query, args) + except _mysql_exceptions.MySQLError: + if self.dirty: + rpki.log.warn("MySQL exception with dirty objects in SQL cache!") + raise + + def execute(self, query, args = None): + return self._wrap_execute(self.cur.execute, query, args) + + def executemany(self, query, args): + return self._wrap_execute(self.cur.executemany, query, args) + + def fetchall(self): + return self.cur.fetchall() + + def lastrowid(self): + return self.cur.lastrowid + + def cache_clear(self): + """ + Clear the SQL object cache. Shouldn't be necessary now that the + cache uses weak references, but should be harmless. + """ + rpki.log.debug("Clearing SQL cache") + self.assert_pristine() + self.cache.clear() + + def assert_pristine(self): + """ + Assert that there are no dirty objects in the cache. + """ + assert not self.dirty, "Dirty objects in SQL cache: %s" % self.dirty + + def sweep(self): + """ + Write any dirty objects out to SQL. + """ + for s in self.dirty.copy(): + #if s.sql_cache_debug: + rpki.log.debug("Sweeping (%s) %r" % ("deleting" if s.sql_deleted else "storing", s)) + if s.sql_deleted: + s.sql_delete() + else: + s.sql_store() + self.assert_pristine() + +class template(object): + """ + SQL template generator. + """ + + def __init__(self, table_name, index_column, *data_columns): + """ + Build a SQL template. + """ + type_map = dict((x[0], x[1]) for x in data_columns if isinstance(x, tuple)) + data_columns = tuple(isinstance(x, tuple) and x[0] or x for x in data_columns) + columns = (index_column,) + data_columns + self.table = table_name + self.index = index_column + self.columns = columns + self.map = type_map + self.select = "SELECT %s FROM %s" % (", ".join("%s.%s" % (table_name, c) for c in columns), table_name) + self.insert = "INSERT %s (%s) VALUES (%s)" % (table_name, + ", ".join(data_columns), + ", ".join("%(" + s + ")s" for s in data_columns)) + self.update = "UPDATE %s SET %s WHERE %s = %%(%s)s" % (table_name, + ", ".join(s + " = %(" + s + ")s" for s in data_columns), + index_column, + index_column) + self.delete = "DELETE FROM %s WHERE %s = %%s" % (table_name, index_column) + +class sql_persistent(object): + """ + Mixin for persistent class that needs to be stored in SQL. + """ + + ## @var sql_in_db + # Whether this object is already in SQL or not. + + sql_in_db = False + + ## @var sql_deleted + # Whether our cached copy of this object has been deleted. + + sql_deleted = False + + ## @var sql_debug + # Enable logging of SQL actions + + sql_debug = False + + ## @var sql_cache_debug + # Enable debugging of SQL cache actions + + sql_cache_debug = False + + @classmethod + def sql_fetch(cls, gctx, id): # pylint: disable=W0622 + """ + Fetch one object from SQL, based on its primary key. + + Since in this one case we know that the primary index is also the + cache key, we check for a cache hit directly in the hope of + bypassing the SQL lookup entirely. + + This method is usually called via a one-line class-specific + wrapper. As a convenience, we also accept an id of None, and just + return None in this case. + """ + + if id is None: + return None + assert isinstance(id, (int, long)), "id should be an integer, was %r" % type(id) + key = (cls, id) + if key in gctx.sql.cache: + return gctx.sql.cache[key] + else: + return cls.sql_fetch_where1(gctx, "%s = %%s" % cls.sql_template.index, (id,)) + + @classmethod + def sql_fetch_where1(cls, gctx, where, args = None, also_from = None): + """ + Fetch one object from SQL, based on an arbitrary SQL WHERE expression. + """ + results = cls.sql_fetch_where(gctx, where, args, also_from) + if len(results) == 0: + return None + elif len(results) == 1: + return results[0] + else: + raise rpki.exceptions.DBConsistancyError, \ + "Database contained multiple matches for %s where %s: %r" % \ + (cls.__name__, where % tuple(repr(a) for a in args), results) + + @classmethod + def sql_fetch_all(cls, gctx): + """ + Fetch all objects of this type from SQL. + """ + return cls.sql_fetch_where(gctx, None) + + @classmethod + def sql_fetch_where(cls, gctx, where, args = None, also_from = None): + """ + Fetch objects of this type matching an arbitrary SQL WHERE expression. + """ + if where is None: + assert args is None and also_from is None + if cls.sql_debug: + rpki.log.debug("sql_fetch_where(%r)" % cls.sql_template.select) + gctx.sql.execute(cls.sql_template.select) + else: + query = cls.sql_template.select + if also_from is not None: + query += "," + also_from + query += " WHERE " + where + if cls.sql_debug: + rpki.log.debug("sql_fetch_where(%r, %r)" % (query, args)) + gctx.sql.execute(query, args) + results = [] + for row in gctx.sql.fetchall(): + key = (cls, row[0]) + if key in gctx.sql.cache: + results.append(gctx.sql.cache[key]) + else: + results.append(cls.sql_init(gctx, row, key)) + return results + + @classmethod + def sql_init(cls, gctx, row, key): + """ + Initialize one Python object from the result of a SQL query. + """ + self = cls() + self.gctx = gctx + self.sql_decode(dict(zip(cls.sql_template.columns, row))) + gctx.sql.cache[key] = self + self.sql_in_db = True + self.sql_fetch_hook() + return self + + def sql_mark_dirty(self): + """ + Mark this object as needing to be written back to SQL. + """ + if self.sql_cache_debug and not self.sql_is_dirty: + rpki.log.debug("Marking %r SQL dirty" % self) + self.gctx.sql.dirty.add(self) + + def sql_mark_clean(self): + """ + Mark this object as not needing to be written back to SQL. + """ + if self.sql_cache_debug and self.sql_is_dirty: + rpki.log.debug("Marking %r SQL clean" % self) + self.gctx.sql.dirty.discard(self) + + @property + def sql_is_dirty(self): + """ + Query whether this object needs to be written back to SQL. + """ + return self in self.gctx.sql.dirty + + def sql_mark_deleted(self): + """ + Mark this object as needing to be deleted in SQL. + """ + self.sql_deleted = True + self.sql_mark_dirty() + + def sql_store(self): + """ + Store this object to SQL. + """ + args = self.sql_encode() + if not self.sql_in_db: + if self.sql_debug: + rpki.log.debug("sql_store(%r, %r)" % (self.sql_template.insert, args)) + self.gctx.sql.execute(self.sql_template.insert, args) + setattr(self, self.sql_template.index, self.gctx.sql.lastrowid()) + self.gctx.sql.cache[(self.__class__, self.gctx.sql.lastrowid())] = self + self.sql_insert_hook() + else: + if self.sql_debug: + rpki.log.debug("sql_store(%r, %r)" % (self.sql_template.update, args)) + self.gctx.sql.execute(self.sql_template.update, args) + self.sql_update_hook() + key = (self.__class__, getattr(self, self.sql_template.index)) + assert key in self.gctx.sql.cache and self.gctx.sql.cache[key] == self + self.sql_mark_clean() + self.sql_in_db = True + + def sql_delete(self): + """ + Delete this object from SQL. + """ + if self.sql_in_db: + id = getattr(self, self.sql_template.index) # pylint: disable=W0622 + if self.sql_debug: + rpki.log.debug("sql_delete(%r, %r)" % (self.sql_template.delete, id)) + self.sql_delete_hook() + self.gctx.sql.execute(self.sql_template.delete, id) + key = (self.__class__, id) + if self.gctx.sql.cache.get(key) == self: + del self.gctx.sql.cache[key] + self.sql_in_db = False + self.sql_mark_clean() + + def sql_encode(self): + """ + Convert object attributes into a dict for use with canned SQL + queries. This is a default version that assumes a one-to-one + mapping between column names in SQL and attribute names in Python. + If you need something fancier, override this. + """ + d = dict((a, getattr(self, a, None)) for a in self.sql_template.columns) + for i in self.sql_template.map: + if d.get(i) is not None: + d[i] = self.sql_template.map[i].to_sql(d[i]) + return d + + def sql_decode(self, vals): + """ + Initialize an object with values returned by self.sql_fetch(). + This is a default version that assumes a one-to-one mapping + between column names in SQL and attribute names in Python. If you + need something fancier, override this. + """ + for a in self.sql_template.columns: + if vals.get(a) is not None and a in self.sql_template.map: + setattr(self, a, self.sql_template.map[a].from_sql(vals[a])) + else: + setattr(self, a, vals[a]) + + def sql_fetch_hook(self): + """ + Customization hook. + """ + pass + + def sql_insert_hook(self): + """ + Customization hook. + """ + pass + + def sql_update_hook(self): + """ + Customization hook. + """ + self.sql_delete_hook() + self.sql_insert_hook() + + def sql_delete_hook(self): + """ + Customization hook. + """ + pass + + +def cache_reference(func): + """ + Decorator for use with property methods which just do an SQL lookup based on an ID. + Check for an existing reference to the object, just return that if we find it, + otherwise perform the SQL lookup. + + Not 100% certain this is a good idea, but I //think// it should work well with the + current weak reference SQL cache, so long as we create no circular references. + So don't do that. + """ + + attr_name = "_" + func.__name__ + + def wrapped(self): + try: + value = getattr(self, attr_name) + assert value is not None + except AttributeError: + value = func(self) + if value is not None: + setattr(self, attr_name, value) + return value + + wrapped.__name__ = func.__name__ + wrapped.__doc__ = func.__doc__ + wrapped.__dict__.update(func.__dict__) + + return wrapped diff --git a/rpki/sql_schemas.py b/rpki/sql_schemas.py new file mode 100644 index 00000000..e57c7a7f --- /dev/null +++ b/rpki/sql_schemas.py @@ -0,0 +1,319 @@ +# Automatically generated, do not edit. + +## @var rpkid +## SQL schema rpkid +rpkid = '''-- $Id: rpkid.sql 5753 2014-04-05 19:24:26Z sra $ + +-- Copyright (C) 2009--2011 Internet Systems Consortium ("ISC") +-- +-- Permission to use, copy, modify, and distribute this software for any +-- purpose with or without fee is hereby granted, provided that the above +-- copyright notice and this permission notice appear in all copies. +-- +-- THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +-- REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +-- AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +-- INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +-- LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +-- OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +-- PERFORMANCE OF THIS SOFTWARE. + +-- Copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") +-- +-- Permission to use, copy, modify, and distribute this software for any +-- purpose with or without fee is hereby granted, provided that the above +-- copyright notice and this permission notice appear in all copies. +-- +-- THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH +-- REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +-- AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, +-- INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +-- LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +-- OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +-- PERFORMANCE OF THIS SOFTWARE. + +-- SQL objects needed by the RPKI engine (rpkid.py). + +-- DROP TABLE commands must be in correct (reverse dependency) order +-- to satisfy FOREIGN KEY constraints. + +DROP TABLE IF EXISTS ee_cert; +DROP TABLE IF EXISTS ghostbuster; +DROP TABLE IF EXISTS roa_prefix; +DROP TABLE IF EXISTS roa; +DROP TABLE IF EXISTS revoked_cert; +DROP TABLE IF EXISTS child_cert; +DROP TABLE IF EXISTS child; +DROP TABLE IF EXISTS ca_detail; +DROP TABLE IF EXISTS ca; +DROP TABLE IF EXISTS parent; +DROP TABLE IF EXISTS repository; +DROP TABLE IF EXISTS bsc; +DROP TABLE IF EXISTS self; + +CREATE TABLE self ( + self_id SERIAL NOT NULL, + self_handle VARCHAR(255) NOT NULL, + use_hsm BOOLEAN NOT NULL DEFAULT FALSE, + crl_interval BIGINT UNSIGNED, + regen_margin BIGINT UNSIGNED, + bpki_cert LONGBLOB, + bpki_glue LONGBLOB, + PRIMARY KEY (self_id), + UNIQUE (self_handle) +) ENGINE=InnoDB; + +CREATE TABLE bsc ( + bsc_id SERIAL NOT NULL, + bsc_handle VARCHAR(255) NOT NULL, + private_key_id LONGBLOB, + pkcs10_request LONGBLOB, + hash_alg ENUM ('sha256'), + signing_cert LONGBLOB, + signing_cert_crl LONGBLOB, + self_id BIGINT UNSIGNED NOT NULL, + PRIMARY KEY (bsc_id), + CONSTRAINT bsc_self_id + FOREIGN KEY (self_id) REFERENCES self (self_id) ON DELETE CASCADE, + UNIQUE (self_id, bsc_handle) +) ENGINE=InnoDB; + +CREATE TABLE repository ( + repository_id SERIAL NOT NULL, + repository_handle VARCHAR(255) NOT NULL, + peer_contact_uri TEXT, + bpki_cert LONGBLOB, + bpki_glue LONGBLOB, + last_cms_timestamp DATETIME, + bsc_id BIGINT UNSIGNED NOT NULL, + self_id BIGINT UNSIGNED NOT NULL, + PRIMARY KEY (repository_id), + CONSTRAINT repository_self_id + FOREIGN KEY (self_id) REFERENCES self (self_id) ON DELETE CASCADE, + CONSTRAINT repository_bsc_id + FOREIGN KEY (bsc_id) REFERENCES bsc (bsc_id) ON DELETE CASCADE, + UNIQUE (self_id, repository_handle) +) ENGINE=InnoDB; + +CREATE TABLE parent ( + parent_id SERIAL NOT NULL, + parent_handle VARCHAR(255) NOT NULL, + bpki_cms_cert LONGBLOB, + bpki_cms_glue LONGBLOB, + peer_contact_uri TEXT, + sia_base TEXT, + sender_name TEXT, + recipient_name TEXT, + last_cms_timestamp DATETIME, + self_id BIGINT UNSIGNED NOT NULL, + bsc_id BIGINT UNSIGNED NOT NULL, + repository_id BIGINT UNSIGNED NOT NULL, + PRIMARY KEY (parent_id), + CONSTRAINT parent_repository_id + FOREIGN KEY (repository_id) REFERENCES repository (repository_id) ON DELETE CASCADE, + CONSTRAINT parent_bsc_id + FOREIGN KEY (bsc_id) REFERENCES bsc (bsc_id) ON DELETE CASCADE, + CONSTRAINT parent_self_id + FOREIGN KEY (self_id) REFERENCES self (self_id) ON DELETE CASCADE, + UNIQUE (self_id, parent_handle) +) ENGINE=InnoDB; + +CREATE TABLE ca ( + ca_id SERIAL NOT NULL, + last_crl_sn BIGINT UNSIGNED NOT NULL, + last_manifest_sn BIGINT UNSIGNED NOT NULL, + next_manifest_update DATETIME, + next_crl_update DATETIME, + last_issued_sn BIGINT UNSIGNED NOT NULL, + sia_uri TEXT, + parent_resource_class TEXT, + parent_id BIGINT UNSIGNED NOT NULL, + PRIMARY KEY (ca_id), + CONSTRAINT ca_parent_id + FOREIGN KEY (parent_id) REFERENCES parent (parent_id) ON DELETE CASCADE +) ENGINE=InnoDB; + +CREATE TABLE ca_detail ( + ca_detail_id SERIAL NOT NULL, + public_key LONGBLOB, + private_key_id LONGBLOB, + latest_crl LONGBLOB, + crl_published DATETIME, + latest_ca_cert LONGBLOB, + manifest_private_key_id LONGBLOB, + manifest_public_key LONGBLOB, + latest_manifest_cert LONGBLOB, + latest_manifest LONGBLOB, + manifest_published DATETIME, + state ENUM ('pending', 'active', 'deprecated', 'revoked') NOT NULL, + ca_cert_uri TEXT, + ca_id BIGINT UNSIGNED NOT NULL, + PRIMARY KEY (ca_detail_id), + CONSTRAINT ca_detail_ca_id + FOREIGN KEY (ca_id) REFERENCES ca (ca_id) ON DELETE CASCADE +) ENGINE=InnoDB; + +CREATE TABLE child ( + child_id SERIAL NOT NULL, + child_handle VARCHAR(255) NOT NULL, + bpki_cert LONGBLOB, + bpki_glue LONGBLOB, + last_cms_timestamp DATETIME, + self_id BIGINT UNSIGNED NOT NULL, + bsc_id BIGINT UNSIGNED NOT NULL, + PRIMARY KEY (child_id), + CONSTRAINT child_bsc_id + FOREIGN KEY (bsc_id) REFERENCES bsc (bsc_id) ON DELETE CASCADE, + CONSTRAINT child_self_id + FOREIGN KEY (self_id) REFERENCES self (self_id) ON DELETE CASCADE, + UNIQUE (self_id, child_handle) +) ENGINE=InnoDB; + +CREATE TABLE child_cert ( + child_cert_id SERIAL NOT NULL, + cert LONGBLOB NOT NULL, + published DATETIME, + ski TINYBLOB NOT NULL, + child_id BIGINT UNSIGNED NOT NULL, + ca_detail_id BIGINT UNSIGNED NOT NULL, + PRIMARY KEY (child_cert_id), + CONSTRAINT child_cert_ca_detail_id + FOREIGN KEY (ca_detail_id) REFERENCES ca_detail (ca_detail_id) ON DELETE CASCADE, + CONSTRAINT child_cert_child_id + FOREIGN KEY (child_id) REFERENCES child (child_id) ON DELETE CASCADE +) ENGINE=InnoDB; + +CREATE TABLE revoked_cert ( + revoked_cert_id SERIAL NOT NULL, + serial BIGINT UNSIGNED NOT NULL, + revoked DATETIME NOT NULL, + expires DATETIME NOT NULL, + ca_detail_id BIGINT UNSIGNED NOT NULL, + PRIMARY KEY (revoked_cert_id), + CONSTRAINT revoked_cert_ca_detail_id + FOREIGN KEY (ca_detail_id) REFERENCES ca_detail (ca_detail_id) ON DELETE CASCADE +) ENGINE=InnoDB; + +CREATE TABLE roa ( + roa_id SERIAL NOT NULL, + asn BIGINT UNSIGNED NOT NULL, + cert LONGBLOB NOT NULL, + roa LONGBLOB NOT NULL, + published DATETIME, + self_id BIGINT UNSIGNED NOT NULL, + ca_detail_id BIGINT UNSIGNED NOT NULL, + PRIMARY KEY (roa_id), + CONSTRAINT roa_self_id + FOREIGN KEY (self_id) REFERENCES self (self_id) ON DELETE CASCADE, + CONSTRAINT roa_ca_detail_id + FOREIGN KEY (ca_detail_id) REFERENCES ca_detail (ca_detail_id) ON DELETE CASCADE +) ENGINE=InnoDB; + +CREATE TABLE roa_prefix ( + prefix VARCHAR(40) NOT NULL, + prefixlen TINYINT UNSIGNED NOT NULL, + max_prefixlen TINYINT UNSIGNED NOT NULL, + version TINYINT UNSIGNED NOT NULL, + roa_id BIGINT UNSIGNED NOT NULL, + PRIMARY KEY (roa_id, prefix, prefixlen, max_prefixlen), + CONSTRAINT roa_prefix_roa_id + FOREIGN KEY (roa_id) REFERENCES roa (roa_id) ON DELETE CASCADE +) ENGINE=InnoDB; + +CREATE TABLE ghostbuster ( + ghostbuster_id SERIAL NOT NULL, + vcard LONGBLOB NOT NULL, + cert LONGBLOB NOT NULL, + ghostbuster LONGBLOB NOT NULL, + published DATETIME, + self_id BIGINT UNSIGNED NOT NULL, + ca_detail_id BIGINT UNSIGNED NOT NULL, + PRIMARY KEY (ghostbuster_id), + CONSTRAINT ghostbuster_self_id + FOREIGN KEY (self_id) REFERENCES self (self_id) ON DELETE CASCADE, + CONSTRAINT ghostbuster_ca_detail_id + FOREIGN KEY (ca_detail_id) REFERENCES ca_detail (ca_detail_id) ON DELETE CASCADE +) ENGINE=InnoDB; + +CREATE TABLE ee_cert ( + ee_cert_id SERIAL NOT NULL, + ski BINARY(20) NOT NULL, + cert LONGBLOB NOT NULL, + published DATETIME, + self_id BIGINT UNSIGNED NOT NULL, + ca_detail_id BIGINT UNSIGNED NOT NULL, + PRIMARY KEY (ee_cert_id), + CONSTRAINT ee_cert_self_id + FOREIGN KEY (self_id) REFERENCES self (self_id) ON DELETE CASCADE, + CONSTRAINT ee_cert_ca_detail_id + FOREIGN KEY (ca_detail_id) REFERENCES ca_detail (ca_detail_id) ON DELETE CASCADE +) ENGINE=InnoDB; + +-- Local Variables: +-- indent-tabs-mode: nil +-- End: +''' + +## @var pubd +## SQL schema pubd +pubd = '''-- $Id: pubd.sql 3465 2010-10-07 00:59:39Z sra $ + +-- Copyright (C) 2009--2010 Internet Systems Consortium ("ISC") +-- +-- Permission to use, copy, modify, and distribute this software for any +-- purpose with or without fee is hereby granted, provided that the above +-- copyright notice and this permission notice appear in all copies. +-- +-- THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +-- REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +-- AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +-- INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +-- LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +-- OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +-- PERFORMANCE OF THIS SOFTWARE. + +-- Copyright (C) 2008 American Registry for Internet Numbers ("ARIN") +-- +-- Permission to use, copy, modify, and distribute this software for any +-- purpose with or without fee is hereby granted, provided that the above +-- copyright notice and this permission notice appear in all copies. +-- +-- THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH +-- REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +-- AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, +-- INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +-- LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +-- OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +-- PERFORMANCE OF THIS SOFTWARE. + +-- SQL objects needed by pubd.py. + +-- The config table is weird because we're really only using it +-- to store one BPKI CRL, but putting this here lets us use a lot of +-- existing machinery and the alternatives are whacky in other ways. + +DROP TABLE IF EXISTS client; +DROP TABLE IF EXISTS config; + +CREATE TABLE config ( + config_id SERIAL NOT NULL, + bpki_crl LONGBLOB, + PRIMARY KEY (config_id) +) ENGINE=InnoDB; + +CREATE TABLE client ( + client_id SERIAL NOT NULL, + client_handle VARCHAR(255) NOT NULL, + base_uri TEXT, + bpki_cert LONGBLOB, + bpki_glue LONGBLOB, + last_cms_timestamp DATETIME, + PRIMARY KEY (client_id), + UNIQUE (client_handle) +) ENGINE=InnoDB; + +-- Local Variables: +-- indent-tabs-mode: nil +-- End: +''' + diff --git a/rpki/sundial.py b/rpki/sundial.py new file mode 100644 index 00000000..0825d61b --- /dev/null +++ b/rpki/sundial.py @@ -0,0 +1,289 @@ +# $Id$ +# +# Copyright (C) 2009--2012 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. +# +# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +Unified RPKI date/time handling, based on the standard Python datetime module. + +Module name chosen to sidestep a nightmare of import-related errors +that occur with the more obvious module names. + +List of arithmetic methods that require result casting was derived by +inspection of the datetime module, to wit: + + >>> import datetime + >>> for t in (datetime.datetime, datetime.timedelta): + ... for k in t.__dict__.keys(): + ... if k.startswith("__"): + ... print "%s.%s()" % (t.__name__, k) +""" + +import datetime as pydatetime +import re + +def now(): + """ + Get current timestamp. + """ + return datetime.utcnow() + +class ParseFailure(Exception): + """ + Parse failure constructing timedelta. + """ + +class datetime(pydatetime.datetime): + """ + RPKI extensions to standard datetime.datetime class. All work here + is in UTC, so we use naive datetime objects. + """ + + def totimestamp(self): + """ + Convert to seconds from epoch (like time.time()). Conversion + method is a bit silly, but avoids time module timezone whackiness. + """ + return int(self.strftime("%s")) + + @classmethod + def fromXMLtime(cls, x): + """ + Convert from XML time representation. + """ + if x is None: + return None + else: + return cls.strptime(x, "%Y-%m-%dT%H:%M:%SZ") + + def toXMLtime(self): + """ + Convert to XML time representation. + """ + return self.strftime("%Y-%m-%dT%H:%M:%SZ") + + def __str__(self): + return self.toXMLtime() + + @classmethod + def from_datetime(cls, x): + """ + Convert a datetime.datetime object into this subclass. This is + whacky due to the weird constructors for datetime. + """ + return cls.combine(x.date(), x.time()) + + def to_datetime(self): + """ + Convert to a datetime.datetime object. In most cases this + shouldn't be necessary, but convincing SQL interfaces to use + subclasses of datetime can be hard. + """ + return pydatetime.datetime(year = self.year, month = self.month, day = self.day, + hour = self.hour, minute = self.minute, second = self.second, + microsecond = 0, tzinfo = None) + + + @classmethod + def fromOpenSSL(cls, x): + """ + Convert from the format OpenSSL's command line tool uses into this + subclass. May require rewriting if we run into locale problems. + """ + if x.startswith("notBefore=") or x.startswith("notAfter="): + x = x.partition("=")[2] + return cls.strptime(x, "%b %d %H:%M:%S %Y GMT") + + @classmethod + def from_sql(cls, x): + """ + Convert from SQL storage format. + """ + return cls.from_datetime(x) + + def to_sql(self): + """ + Convert to SQL storage format. + """ + return self.to_datetime() + + def later(self, other): + """ + Return the later of two timestamps. + """ + return other if other > self else self + + def earlier(self, other): + """ + Return the earlier of two timestamps. + """ + return other if other < self else self + + def __add__(self, y): return _cast(pydatetime.datetime.__add__(self, y)) + def __radd__(self, y): return _cast(pydatetime.datetime.__radd__(self, y)) + def __rsub__(self, y): return _cast(pydatetime.datetime.__rsub__(self, y)) + def __sub__(self, y): return _cast(pydatetime.datetime.__sub__(self, y)) + + @classmethod + def DateTime_or_None(cls, s): + """ + MySQLdb converter. Parse as this class if we can, let the default + MySQLdb DateTime_or_None() converter deal with failure cases. + """ + + for sep in " T": + d, _, t = s.partition(sep) + if t: + try: + return cls(*[int(x) for x in d.split("-") + t.split(":")]) + except: + break + + from rpki.mysql_import import MySQLdb + return MySQLdb.times.DateTime_or_None(s) + +class timedelta(pydatetime.timedelta): + """ + Timedelta with text parsing. This accepts two input formats: + + - A simple integer, indicating a number of seconds. + + - A string of the form "uY vW wD xH yM zS" where u, v, w, x, y, and z + are integers and Y, W, D, H, M, and S indicate years, weeks, days, + hours, minutes, and seconds. All of the fields are optional, but + at least one must be specified. Eg,"3D4H" means "three days plus + four hours". + + There is no "months" format, because the definition of a month is too + fuzzy to be useful (what day is six months from August 30th?) + + Similarly, the "years" conversion may produce surprising results, as + "one year" in conventional English does not refer to a fixed interval + but rather a fixed (and in some cases undefined) offset within the + Gregorian calendar (what day is one year from February 29th?) 1Y as + implemented by this code refers to a specific number of seconds. + If you mean 365 days or 52 weeks, say that instead. + """ + + ## @var regexp + # Hideously ugly regular expression to parse the complex text form. + # Tags are intended for use with re.MatchObject.groupdict() and map + # directly to the keywords expected by the timedelta constructor. + + regexp = re.compile("\\s*".join(("^", + "(?:(?P\\d+)Y)?", + "(?:(?P\\d+)W)?", + "(?:(?P\\d+)D)?", + "(?:(?P\\d+)H)?", + "(?:(?P\\d+)M)?", + "(?:(?P\\d+)S)?", + "$")), + re.I) + + ## @var years_to_seconds + # Conversion factor from years to seconds (value furnished by the + # "units" program). + + years_to_seconds = 31556926 + + @classmethod + def parse(cls, arg): + """ + Parse text into a timedelta object. + """ + if not isinstance(arg, str): + return cls(seconds = arg) + elif arg.isdigit(): + return cls(seconds = int(arg)) + else: + match = cls.regexp.match(arg) + if match: + #return cls(**dict((k, int(v)) for (k, v) in match.groupdict().items() if v is not None)) + d = match.groupdict("0") + for k, v in d.iteritems(): + d[k] = int(v) + d["days"] += d.pop("weeks") * 7 + d["seconds"] += d.pop("years") * cls.years_to_seconds + return cls(**d) + else: + raise ParseFailure, "Couldn't parse timedelta %r" % (arg,) + + def convert_to_seconds(self): + """ + Convert a timedelta interval to seconds. + """ + return self.days * 24 * 60 * 60 + self.seconds + + @classmethod + def fromtimedelta(cls, x): + """ + Convert a datetime.timedelta object into this subclass. + """ + return cls(days = x.days, seconds = x.seconds, microseconds = x.microseconds) + + def __abs__(self): return _cast(pydatetime.timedelta.__abs__(self)) + def __add__(self, x): return _cast(pydatetime.timedelta.__add__(self, x)) + def __div__(self, x): return _cast(pydatetime.timedelta.__div__(self, x)) + def __floordiv__(self, x): return _cast(pydatetime.timedelta.__floordiv__(self, x)) + def __mul__(self, x): return _cast(pydatetime.timedelta.__mul__(self, x)) + def __neg__(self): return _cast(pydatetime.timedelta.__neg__(self)) + def __pos__(self): return _cast(pydatetime.timedelta.__pos__(self)) + def __radd__(self, x): return _cast(pydatetime.timedelta.__radd__(self, x)) + def __rdiv__(self, x): return _cast(pydatetime.timedelta.__rdiv__(self, x)) + def __rfloordiv__(self, x): return _cast(pydatetime.timedelta.__rfloordiv__(self, x)) + def __rmul__(self, x): return _cast(pydatetime.timedelta.__rmul__(self, x)) + def __rsub__(self, x): return _cast(pydatetime.timedelta.__rsub__(self, x)) + def __sub__(self, x): return _cast(pydatetime.timedelta.__sub__(self, x)) + +def _cast(x): + """ + Cast result of arithmetic operations back into correct subtype. + """ + if isinstance(x, pydatetime.datetime): + return datetime.from_datetime(x) + if isinstance(x, pydatetime.timedelta): + return timedelta.fromtimedelta(x) + return x + +if __name__ == "__main__": + + def test(t): + print + print "str: ", t + print "repr: ", repr(t) + print "seconds since epoch:", t.strftime("%s") + print "XMLtime: ", t.toXMLtime() + print + + print + print "Testing time conversion routines" + test(now()) + test(now() + timedelta(days = 30)) + test(now() + timedelta.parse("3d5s")) + test(now() + timedelta.parse(" 3d 5s ")) + test(now() + timedelta.parse("1y3d5h")) diff --git a/rpki/up_down.py b/rpki/up_down.py new file mode 100644 index 00000000..d2ad85d3 --- /dev/null +++ b/rpki/up_down.py @@ -0,0 +1,732 @@ +# $Id$ +# +# Copyright (C) 2013--2014 Dragon Research Labs ("DRL") +# Portions copyright (C) 2009--2012 Internet Systems Consortium ("ISC") +# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notices and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND ARIN DISCLAIM ALL +# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL, +# ISC, OR ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, +# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION +# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +""" +RPKI "up-down" protocol. +""" + +import base64 +import lxml.etree +import rpki.resource_set +import rpki.x509 +import rpki.exceptions +import rpki.log +import rpki.xml_utils +import rpki.relaxng + +xmlns = "http://www.apnic.net/specs/rescerts/up-down/" + +nsmap = { None : xmlns } + +class base_elt(object): + """ + Generic PDU object. + + Virtual class, just provides some default methods. + """ + + def startElement(self, stack, name, attrs): + """ + Ignore startElement() if there's no specific handler. + + Some elements have no attributes and we only care about their + text content. + """ + pass + + def endElement(self, stack, name, text): + """ + Ignore endElement() if there's no specific handler. + + If we don't need to do anything else, just pop the stack. + """ + stack.pop() + + def make_elt(self, name, *attrs): + """ + Construct a element, copying over a set of attributes. + """ + elt = lxml.etree.Element("{%s}%s" % (xmlns, name), nsmap=nsmap) + for key in attrs: + val = getattr(self, key, None) + if val is not None: + elt.set(key, str(val)) + return elt + + def make_b64elt(self, elt, name, value): + """ + Construct a sub-element with Base64 text content. + """ + if value is not None and not value.empty(): + lxml.etree.SubElement(elt, "{%s}%s" % (xmlns, name), nsmap=nsmap).text = value.get_Base64() + + def serve_pdu(self, q_msg, r_msg, child, callback, errback): + """ + Default PDU handler to catch unexpected types. + """ + raise rpki.exceptions.BadQuery("Unexpected query type %s" % q_msg.type) + + def check_response(self): + """ + Placeholder for response checking. + """ + pass + +class multi_uri(list): + """ + Container for a set of URIs. + """ + + def __init__(self, ini): + """ + Initialize a set of URIs, which includes basic some syntax checking. + """ + list.__init__(self) + if isinstance(ini, (list, tuple)): + self[:] = ini + elif isinstance(ini, str): + self[:] = ini.split(",") + for s in self: + if s.strip() != s or "://" not in s: + raise rpki.exceptions.BadURISyntax("Bad URI \"%s\"" % s) + else: + raise TypeError + + def __str__(self): + """ + Convert a multi_uri back to a string representation. + """ + return ",".join(self) + + def rsync(self): + """ + Find first rsync://... URI in self. + """ + for s in self: + if s.startswith("rsync://"): + return s + return None + +class certificate_elt(base_elt): + """ + Up-Down protocol representation of an issued certificate. + """ + + def startElement(self, stack, name, attrs): + """ + Handle attributes of element. + """ + assert name == "certificate", "Unexpected name %s, stack %s" % (name, stack) + self.cert_url = multi_uri(attrs["cert_url"]) + self.req_resource_set_as = rpki.resource_set.resource_set_as(attrs.get("req_resource_set_as")) + self.req_resource_set_ipv4 = rpki.resource_set.resource_set_ipv4(attrs.get("req_resource_set_ipv4")) + self.req_resource_set_ipv6 = rpki.resource_set.resource_set_ipv6(attrs.get("req_resource_set_ipv6")) + + def endElement(self, stack, name, text): + """ + Handle text content of a element. + """ + assert name == "certificate", "Unexpected name %s, stack %s" % (name, stack) + self.cert = rpki.x509.X509(Base64 = text) + stack.pop() + + def toXML(self): + """ + Generate a element. + """ + elt = self.make_elt("certificate", "cert_url", + "req_resource_set_as", "req_resource_set_ipv4", "req_resource_set_ipv6") + elt.text = self.cert.get_Base64() + return elt + +class class_elt(base_elt): + """ + Up-Down protocol representation of a resource class. + """ + + issuer = None + + def __init__(self): + """ + Initialize class_elt. + """ + base_elt.__init__(self) + self.certs = [] + + def startElement(self, stack, name, attrs): + """ + Handle elements and their children. + """ + if name == "certificate": + cert = certificate_elt() + self.certs.append(cert) + stack.append(cert) + cert.startElement(stack, name, attrs) + elif name != "issuer": + assert name == "class", "Unexpected name %s, stack %s" % (name, stack) + self.class_name = attrs["class_name"] + self.cert_url = multi_uri(attrs["cert_url"]) + self.suggested_sia_head = attrs.get("suggested_sia_head") + self.resource_set_as = rpki.resource_set.resource_set_as(attrs["resource_set_as"]) + self.resource_set_ipv4 = rpki.resource_set.resource_set_ipv4(attrs["resource_set_ipv4"]) + self.resource_set_ipv6 = rpki.resource_set.resource_set_ipv6(attrs["resource_set_ipv6"]) + self.resource_set_notafter = rpki.sundial.datetime.fromXMLtime(attrs.get("resource_set_notafter")) + + def endElement(self, stack, name, text): + """ + Handle elements and their children. + """ + if name == "issuer": + self.issuer = rpki.x509.X509(Base64 = text) + else: + assert name == "class", "Unexpected name %s, stack %s" % (name, stack) + stack.pop() + + def toXML(self): + """ + Generate a element. + """ + elt = self.make_elt("class", "class_name", "cert_url", "resource_set_as", + "resource_set_ipv4", "resource_set_ipv6", + "resource_set_notafter", "suggested_sia_head") + elt.extend([i.toXML() for i in self.certs]) + self.make_b64elt(elt, "issuer", self.issuer) + return elt + + def to_resource_bag(self): + """ + Build a resource_bag from from this element. + """ + return rpki.resource_set.resource_bag(self.resource_set_as, + self.resource_set_ipv4, + self.resource_set_ipv6, + self.resource_set_notafter) + + def from_resource_bag(self, bag): + """ + Set resources of this class element from a resource_bag. + """ + self.resource_set_as = bag.asn + self.resource_set_ipv4 = bag.v4 + self.resource_set_ipv6 = bag.v6 + self.resource_set_notafter = bag.valid_until + +class list_pdu(base_elt): + """ + Up-Down protocol "list" PDU. + """ + + def toXML(self): + """Generate (empty) payload of "list" PDU.""" + return [] + + def serve_pdu(self, q_msg, r_msg, child, callback, errback): + """ + Serve one "list" PDU. + """ + + def handle(irdb_resources): + + r_msg.payload = list_response_pdu() + + if irdb_resources.valid_until < rpki.sundial.now(): + rpki.log.debug("Child %s's resources expired %s" % (child.child_handle, irdb_resources.valid_until)) + else: + for parent in child.parents: + for ca in parent.cas: + ca_detail = ca.active_ca_detail + if not ca_detail: + rpki.log.debug("No active ca_detail, can't issue to %s" % child.child_handle) + continue + resources = ca_detail.latest_ca_cert.get_3779resources() & irdb_resources + if resources.empty(): + rpki.log.debug("No overlap between received resources and what child %s should get ([%s], [%s])" % (child.child_handle, ca_detail.latest_ca_cert.get_3779resources(), irdb_resources)) + continue + rc = class_elt() + rc.class_name = str(ca.ca_id) + rc.cert_url = multi_uri(ca_detail.ca_cert_uri) + rc.from_resource_bag(resources) + for child_cert in child.fetch_child_certs(ca_detail = ca_detail): + c = certificate_elt() + c.cert_url = multi_uri(child_cert.uri) + c.cert = child_cert.cert + rc.certs.append(c) + rc.issuer = ca_detail.latest_ca_cert + r_msg.payload.classes.append(rc) + + callback() + + self.gctx.irdb_query_child_resources(child.self.self_handle, child.child_handle, handle, errback) + + @classmethod + def query(cls, parent, cb, eb): + """ + Send a "list" query to parent. + """ + try: + rpki.log.info('Sending "list" request to parent %s' % parent.parent_handle) + parent.query_up_down(cls(), cb, eb) + except (rpki.async.ExitNow, SystemExit): + raise + except Exception, e: + eb(e) + +class class_response_syntax(base_elt): + """ + Syntax for Up-Down protocol "list_response" and "issue_response" PDUs. + """ + + def __init__(self): + """ + Initialize class_response_syntax. + """ + base_elt.__init__(self) + self.classes = [] + + def startElement(self, stack, name, attrs): + """ + Handle "list_response" and "issue_response" PDUs. + """ + assert name == "class", "Unexpected name %s, stack %s" % (name, stack) + c = class_elt() + self.classes.append(c) + stack.append(c) + c.startElement(stack, name, attrs) + + def toXML(self): + """Generate payload of "list_response" and "issue_response" PDUs.""" + return [c.toXML() for c in self.classes] + +class list_response_pdu(class_response_syntax): + """ + Up-Down protocol "list_response" PDU. + """ + pass + +class issue_pdu(base_elt): + """ + Up-Down protocol "issue" PDU. + """ + + def startElement(self, stack, name, attrs): + """ + Handle "issue" PDU. + """ + assert name == "request", "Unexpected name %s, stack %s" % (name, stack) + self.class_name = attrs["class_name"] + self.req_resource_set_as = rpki.resource_set.resource_set_as(attrs.get("req_resource_set_as")) + self.req_resource_set_ipv4 = rpki.resource_set.resource_set_ipv4(attrs.get("req_resource_set_ipv4")) + self.req_resource_set_ipv6 = rpki.resource_set.resource_set_ipv6(attrs.get("req_resource_set_ipv6")) + + def endElement(self, stack, name, text): + """ + Handle "issue" PDU. + """ + assert name == "request", "Unexpected name %s, stack %s" % (name, stack) + self.pkcs10 = rpki.x509.PKCS10(Base64 = text) + stack.pop() + + def toXML(self): + """ + Generate payload of "issue" PDU. + """ + elt = self.make_elt("request", "class_name", "req_resource_set_as", + "req_resource_set_ipv4", "req_resource_set_ipv6") + elt.text = self.pkcs10.get_Base64() + return [elt] + + def serve_pdu(self, q_msg, r_msg, child, callback, errback): + """ + Serve one issue request PDU. + """ + + # Subsetting not yet implemented, this is the one place where we + # have to handle it, by reporting that we're lame. + + if self.req_resource_set_as or \ + self.req_resource_set_ipv4 or \ + self.req_resource_set_ipv6: + raise rpki.exceptions.NotImplementedYet("req_* attributes not implemented yet, sorry") + + # Check the request + self.pkcs10.check_valid_request_ca() + ca = child.ca_from_class_name(self.class_name) + ca_detail = ca.active_ca_detail + if ca_detail is None: + raise rpki.exceptions.NoActiveCA("No active CA for class %r" % self.class_name) + + # Check current cert, if any + + def got_resources(irdb_resources): + + if irdb_resources.valid_until < rpki.sundial.now(): + raise rpki.exceptions.IRDBExpired("IRDB entry for child %s expired %s" % ( + child.child_handle, irdb_resources.valid_until)) + + resources = irdb_resources & ca_detail.latest_ca_cert.get_3779resources() + resources.valid_until = irdb_resources.valid_until + req_key = self.pkcs10.getPublicKey() + req_sia = self.pkcs10.get_SIA() + child_cert = child.fetch_child_certs(ca_detail = ca_detail, ski = req_key.get_SKI(), unique = True) + + # Generate new cert or regenerate old one if necessary + + publisher = rpki.rpkid.publication_queue() + + if child_cert is None: + child_cert = ca_detail.issue( + ca = ca, + child = child, + subject_key = req_key, + sia = req_sia, + resources = resources, + publisher = publisher) + else: + child_cert = child_cert.reissue( + ca_detail = ca_detail, + sia = req_sia, + resources = resources, + publisher = publisher) + + def done(): + c = certificate_elt() + c.cert_url = multi_uri(child_cert.uri) + c.cert = child_cert.cert + rc = class_elt() + rc.class_name = self.class_name + rc.cert_url = multi_uri(ca_detail.ca_cert_uri) + rc.from_resource_bag(resources) + rc.certs.append(c) + rc.issuer = ca_detail.latest_ca_cert + r_msg.payload = issue_response_pdu() + r_msg.payload.classes.append(rc) + callback() + + self.gctx.sql.sweep() + assert child_cert and child_cert.sql_in_db + publisher.call_pubd(done, errback) + + self.gctx.irdb_query_child_resources(child.self.self_handle, child.child_handle, got_resources, errback) + + @classmethod + def query(cls, parent, ca, ca_detail, callback, errback): + """ + Send an "issue" request to parent associated with ca. + """ + assert ca_detail is not None and ca_detail.state in ("pending", "active") + self = cls() + self.class_name = ca.parent_resource_class + self.pkcs10 = rpki.x509.PKCS10.create( + keypair = ca_detail.private_key_id, + is_ca = True, + caRepository = ca.sia_uri, + rpkiManifest = ca_detail.manifest_uri) + rpki.log.info('Sending "issue" request to parent %s' % parent.parent_handle) + parent.query_up_down(self, callback, errback) + +class issue_response_pdu(class_response_syntax): + """ + Up-Down protocol "issue_response" PDU. + """ + + def check_response(self): + """ + Check whether this looks like a reasonable issue_response PDU. + XML schema should be tighter for this response. + """ + if len(self.classes) != 1 or len(self.classes[0].certs) != 1: + raise rpki.exceptions.BadIssueResponse + +class revoke_syntax(base_elt): + """ + Syntax for Up-Down protocol "revoke" and "revoke_response" PDUs. + """ + + def startElement(self, stack, name, attrs): + """Handle "revoke" PDU.""" + self.class_name = attrs["class_name"] + self.ski = attrs["ski"] + + def toXML(self): + """Generate payload of "revoke" PDU.""" + return [self.make_elt("key", "class_name", "ski")] + +class revoke_pdu(revoke_syntax): + """ + Up-Down protocol "revoke" PDU. + """ + + def get_SKI(self): + """ + Convert g(SKI) encoding from PDU back to raw SKI. + """ + return base64.urlsafe_b64decode(self.ski + "=") + + def serve_pdu(self, q_msg, r_msg, child, cb, eb): + """ + Serve one revoke request PDU. + """ + + def done(): + r_msg.payload = revoke_response_pdu() + r_msg.payload.class_name = self.class_name + r_msg.payload.ski = self.ski + cb() + + ca = child.ca_from_class_name(self.class_name) + publisher = rpki.rpkid.publication_queue() + for ca_detail in ca.ca_details: + for child_cert in child.fetch_child_certs(ca_detail = ca_detail, ski = self.get_SKI()): + child_cert.revoke(publisher = publisher) + self.gctx.sql.sweep() + publisher.call_pubd(done, eb) + + @classmethod + def query(cls, ca, gski, cb, eb): + """ + Send a "revoke" request for certificate(s) named by gski to parent associated with ca. + """ + parent = ca.parent + self = cls() + self.class_name = ca.parent_resource_class + self.ski = gski + rpki.log.info('Sending "revoke" request for SKI %s to parent %s' % (gski, parent.parent_handle)) + parent.query_up_down(self, cb, eb) + +class revoke_response_pdu(revoke_syntax): + """ + Up-Down protocol "revoke_response" PDU. + """ + + pass + +class error_response_pdu(base_elt): + """ + Up-Down protocol "error_response" PDU. + """ + + codes = { + 1101 : "Already processing request", + 1102 : "Version number error", + 1103 : "Unrecognised request type", + 1201 : "Request - no such resource class", + 1202 : "Request - no resources allocated in resource class", + 1203 : "Request - badly formed certificate request", + 1301 : "Revoke - no such resource class", + 1302 : "Revoke - no such key", + 2001 : "Internal Server Error - Request not performed" } + + exceptions = { + rpki.exceptions.NoActiveCA : 1202, + (rpki.exceptions.ClassNameUnknown, revoke_pdu) : 1301, + rpki.exceptions.ClassNameUnknown : 1201, + (rpki.exceptions.NotInDatabase, revoke_pdu) : 1302 } + + def __init__(self, exception = None, request_payload = None): + """ + Initialize an error_response PDU from an exception object. + """ + base_elt.__init__(self) + if exception is not None: + rpki.log.debug("Constructing up-down error response from exception %s" % exception) + exception_type = type(exception) + request_type = None if request_payload is None else type(request_payload) + rpki.log.debug("Constructing up-down error response: exception_type %s, request_type %s" % ( + exception_type, request_type)) + if False: + self.status = self.exceptions.get((exception_type, request_type), + self.exceptions.get(exception_type, + 2001)) + else: + self.status = self.exceptions.get((exception_type, request_type)) + if self.status is None: + rpki.log.debug("No request-type-specific match, trying exception match") + self.status = self.exceptions.get(exception_type) + if self.status is None: + rpki.log.debug("No exception match either, defaulting") + self.status = 2001 + self.description = str(exception) + rpki.log.debug("Chosen status code: %s" % self.status) + + def endElement(self, stack, name, text): + """ + Handle "error_response" PDU. + """ + if name == "status": + code = int(text) + if code not in self.codes: + raise rpki.exceptions.BadStatusCode("%s is not a known status code" % code) + self.status = code + elif name == "description": + self.description = text + else: + assert name == "message", "Unexpected name %s, stack %s" % (name, stack) + stack.pop() + stack[-1].endElement(stack, name, text) + + def toXML(self): + """ + Generate payload of "error_response" PDU. + """ + assert self.status in self.codes + elt = self.make_elt("status") + elt.text = str(self.status) + payload = [elt] + if self.description: + elt = self.make_elt("description") + elt.text = str(self.description) + elt.set("{http://www.w3.org/XML/1998/namespace}lang", "en-US") + payload.append(elt) + return payload + + def check_response(self): + """ + Handle an error response. For now, just raise an exception, + perhaps figure out something more clever to do later. + """ + raise rpki.exceptions.UpstreamError(self.codes[self.status]) + +class message_pdu(base_elt): + """ + Up-Down protocol message wrapper PDU. + """ + + version = 1 + + name2type = { + "list" : list_pdu, + "list_response" : list_response_pdu, + "issue" : issue_pdu, + "issue_response" : issue_response_pdu, + "revoke" : revoke_pdu, + "revoke_response" : revoke_response_pdu, + "error_response" : error_response_pdu } + + type2name = dict((v, k) for k, v in name2type.items()) + + error_pdu_type = error_response_pdu + + def toXML(self): + """ + Generate payload of message PDU. + """ + elt = self.make_elt("message", "version", "sender", "recipient", "type") + elt.extend(self.payload.toXML()) + return elt + + def startElement(self, stack, name, attrs): + """ + Handle message PDU. + + Payload of the element varies depending on the "type" + attribute, so after some basic checks we have to instantiate the + right class object to handle whatever kind of PDU this is. + """ + assert name == "message", "Unexpected name %s, stack %s" % (name, stack) + assert self.version == int(attrs["version"]) + self.sender = attrs["sender"] + self.recipient = attrs["recipient"] + self.type = attrs["type"] + self.payload = self.name2type[attrs["type"]]() + stack.append(self.payload) + + def __str__(self): + """ + Convert a message PDU to a string. + """ + return lxml.etree.tostring(self.toXML(), pretty_print = True, encoding = "UTF-8") + + def serve_top_level(self, child, callback): + """ + Serve one message request PDU. + """ + + r_msg = message_pdu() + r_msg.sender = self.recipient + r_msg.recipient = self.sender + + def done(): + r_msg.type = self.type2name[type(r_msg.payload)] + callback(r_msg) + + def lose(e): + rpki.log.traceback() + callback(self.serve_error(e)) + + try: + self.log_query(child) + self.payload.serve_pdu(self, r_msg, child, done, lose) + except (rpki.async.ExitNow, SystemExit): + raise + except Exception, e: + lose(e) + + def log_query(self, child): + """ + Log query we're handling. Separate method so rootd can override. + """ + rpki.log.info("Serving %s query from child %s [sender %s, recipient %s]" % (self.type, child.child_handle, self.sender, self.recipient)) + + def serve_error(self, exception): + """ + Generate an error_response message PDU. + """ + r_msg = message_pdu() + r_msg.sender = self.recipient + r_msg.recipient = self.sender + r_msg.payload = self.error_pdu_type(exception, self.payload) + r_msg.type = self.type2name[type(r_msg.payload)] + return r_msg + + @classmethod + def make_query(cls, payload, sender, recipient): + """ + Construct one message PDU. + """ + assert not cls.type2name[type(payload)].endswith("_response") + if sender is None: + sender = "tweedledee" + if recipient is None: + recipient = "tweedledum" + self = cls() + self.sender = sender + self.recipient = recipient + self.payload = payload + self.type = self.type2name[type(payload)] + return self + +class sax_handler(rpki.xml_utils.sax_handler): + """ + SAX handler for Up-Down protocol. + """ + + pdu = message_pdu + name = "message" + version = "1" + +class cms_msg(rpki.x509.XML_CMS_object): + """ + Class to hold a CMS-signed up-down PDU. + """ + + encoding = "UTF-8" + schema = rpki.relaxng.up_down + saxify = sax_handler.saxify + allow_extra_certs = True + allow_extra_crls = True diff --git a/rpki/x509.py b/rpki/x509.py new file mode 100644 index 00000000..fb1a5a2b --- /dev/null +++ b/rpki/x509.py @@ -0,0 +1,2031 @@ +# $Id$ +# +# Copyright (C) 2014 Dragon Research Labs ("DRL") +# Portions copyright (C) 2009--2013 Internet Systems Consortium ("ISC") +# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notices and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND ARIN DISCLAIM ALL +# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL, +# ISC, OR ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, +# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION +# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +""" +One X.509 implementation to rule them all... + +...and in the darkness hide the twisty maze of partially-overlapping +X.509 support packages in Python. + +Once upon a time we were using four separate partially-overlapping +implementions of X.509 and related protocols. Over the years this has +collapsed down to one, but the interface module we built on top of the +previous mess has itself become heavily embedded in the code base. So +this is a bit more complicated (not to mention baroque) than one might +expect for a module that had grown in a saner fashion. We clean up +bits of it from time to time. Some day this may all make sense. +""" + +import rpki.POW +import base64 +import lxml.etree +import os +import subprocess +import email.mime.application +import email.utils +import mailbox +import time +import rpki.exceptions +import rpki.resource_set +import rpki.oids +import rpki.sundial +import rpki.log +import rpki.async +import rpki.relaxng + +def base64_with_linebreaks(der): + """ + Encode DER (really, anything) as Base64 text, with linebreaks to + keep the result (sort of) readable. + """ + b = base64.b64encode(der) + n = len(b) + return "\n" + "\n".join(b[i : min(i + 64, n)] for i in xrange(0, n, 64)) + "\n" + +def looks_like_PEM(text): + """ + Guess whether text looks like a PEM encoding. + """ + + i = text.find("-----BEGIN ") + return i >= 0 and text.find("\n-----END ", i) > i + +def first_rsync_uri(xia): + """ + Find first rsync URI in a sequence of AIA or SIA URIs. + Returns the URI if found, otherwise None. + """ + + if xia is not None: + for uri in xia: + if uri.startswith("rsync://"): + return uri + return None + +class X501DN(object): + """ + Class to hold an X.501 Distinguished Name. + + This is nothing like a complete implementation, just enough for our + purposes. See RFC 5280 4.1.2.4 for the ASN.1 details. In brief: + + - A DN is a SEQUENCE OF RDNs. + + - A RDN is a SET OF AttributeAndValues; in practice, multi-value + RDNs are rare, so an RDN is almost always a set with a single + element. + + - An AttributeAndValue is a SEQUENCE consisting of a OID and a + value, where a whole bunch of things including both syntax and + semantics of the value are determined by the OID. + + - The value is some kind of ASN.1 string; there are far too many + encoding options options, most of which are either strongly + discouraged or outright forbidden by the PKIX profile, but which + persist for historical reasons. The only ones PKIX actually + likes are PrintableString and UTF8String, but there are nuances + and special cases where some of the others are required. + + The RPKI profile further restricts DNs to a single mandatory + CommonName attribute with a single optional SerialNumber attribute + (not to be confused with the certificate serial number). + + BPKI certificates should (we hope) follow the general PKIX guideline + but the ones we construct ourselves are likely to be relatively + simple. + """ + + def __str__(self): + return "".join("/" + "+".join("%s=%s" % (rpki.oids.oid2name(a[0]), a[1]) + for a in rdn) + for rdn in self.dn) + + def __cmp__(self, other): + return cmp(self.dn, other.dn) + + def __repr__(self): + return rpki.log.log_repr(self, str(self)) + + def _debug(self): + if False: + import traceback + for chunk in traceback.format_stack(limit = 5): + for line in chunk.splitlines(): + rpki.log.debug("== %s" % line) + rpki.log.debug("++ %r %r" % (self, self.dn)) + + @classmethod + def from_cn(cls, cn, sn = None): + assert isinstance(cn, (str, unicode)) + if isinstance(sn, (int, long)): + sn = "%08X" % sn + elif isinstance(sn, (str, unicode)): + assert all(c in "0123456789abcdefABCDEF" for c in sn) + sn = str(sn) + self = cls() + if sn is not None: + self.dn = (((rpki.oids.commonName, cn),), ((rpki.oids.serialNumber, sn),)) + else: + self.dn = (((rpki.oids.commonName, cn),),) + return self + + @classmethod + def from_POW(cls, t): + assert isinstance(t, tuple) + self = cls() + self.dn = t + return self + + def get_POW(self): + return self.dn + + def extract_cn_and_sn(self): + cn = None + sn = None + + for rdn in self.dn: + if len(rdn) == 1 and len(rdn[0]) == 2: + oid = rdn[0][0] + val = rdn[0][1] + if oid == rpki.oids.commonName and cn is None: + cn = val + continue + if oid == rpki.oids.serialNumber and sn is None: + sn = val + continue + raise rpki.exceptions.BadX510DN("Bad subject name: %s" % (self.dn,)) + + if cn is None: + raise rpki.exceptions.BadX510DN("Subject name is missing CN: %s" % (self.dn,)) + + return cn, sn + + +class DER_object(object): + """ + Virtual class to hold a generic DER object. + """ + + ## @var formats + # Formats supported in this object. This is kind of redundant now + # that we're down to a single ASN.1 package and everything supports + # the same DER and POW formats, it's mostly historical baggage from + # the days when we had three different ASN.1 encoders, each with its + # own low-level Python object format. Clean up, some day. + formats = ("DER", "POW") + + ## @var POW_class + # Class of underlying POW object. Concrete subclasses must supply this. + POW_class = None + + ## Other attributes that self.clear() should whack. + other_clear = () + + ## @var DER + # DER value of this object + DER = None + + ## @var failure_threshold + # Rate-limiting interval between whines about Auto_update objects. + failure_threshold = rpki.sundial.timedelta(minutes = 5) + + def empty(self): + """ + Test whether this object is empty. + """ + return all(getattr(self, a, None) is None for a in self.formats) + + def clear(self): + """ + Make this object empty. + """ + for a in self.formats + self.other_clear: + setattr(self, a, None) + self.filename = None + self.timestamp = None + self.lastfail = None + + def __init__(self, **kw): + """ + Initialize a DER_object. + """ + self.clear() + if len(kw): + self.set(**kw) + + def set(self, **kw): + """ + Set this object by setting one of its known formats. + + This method only allows one to set one format at a time. + Subsequent calls will clear the object first. The point of all + this is to let the object's internal converters handle mustering + the object into whatever format you need at the moment. + """ + + if len(kw) == 1: + name = kw.keys()[0] + if name in self.formats: + self.clear() + setattr(self, name, kw[name]) + return + if name == "PEM": + self.clear() + self._set_PEM(kw[name]) + return + if name == "Base64": + self.clear() + self.DER = base64.b64decode(kw[name]) + return + if name == "Auto_update": + self.filename = kw[name] + self.check_auto_update() + return + if name in ("PEM_file", "DER_file", "Auto_file"): + f = open(kw[name], "rb") + value = f.read() + f.close() + self.clear() + if name == "PEM_file" or (name == "Auto_file" and looks_like_PEM(value)): + self._set_PEM(value) + else: + self.DER = value + return + raise rpki.exceptions.DERObjectConversionError("Can't honor conversion request %r" % (kw,)) + + def check_auto_update(self): + """ + Check for updates to a DER object that auto-updates from a file. + """ + if self.filename is None: + return + try: + filename = self.filename + timestamp = os.stat(self.filename).st_mtime + if self.timestamp is None or self.timestamp < timestamp: + rpki.log.debug("Updating %s, timestamp %s" % (filename, rpki.sundial.datetime.fromtimestamp(timestamp))) + f = open(filename, "rb") + value = f.read() + f.close() + self.clear() + if looks_like_PEM(value): + self._set_PEM(value) + else: + self.DER = value + self.filename = filename + self.timestamp = timestamp + except (IOError, OSError), e: + now = rpki.sundial.now() + if self.lastfail is None or now > self.lastfail + self.failure_threshold: + rpki.log.warn("Could not auto_update %r (last failure %s): %s" % (self, self.lastfail, e)) + self.lastfail = now + else: + self.lastfail = None + + def check(self): + """ + Perform basic checks on a DER object. + """ + self.check_auto_update() + assert not self.empty() + + def _set_PEM(self, pem): + """ + Set the POW value of this object based on a PEM input value. + Subclasses may need to override this. + """ + assert self.empty() + self.POW = self.POW_class.pemRead(pem) + + def get_DER(self): + """ + Get the DER value of this object. + Subclasses may need to override this method. + """ + self.check() + if self.DER: + return self.DER + if self.POW: + self.DER = self.POW.derWrite() + return self.get_DER() + raise rpki.exceptions.DERObjectConversionError("No conversion path to DER available") + + def get_POW(self): + """ + Get the rpki.POW value of this object. + Subclasses may need to override this method. + """ + self.check() + if not self.POW: # pylint: disable=E0203 + self.POW = self.POW_class.derRead(self.get_DER()) + return self.POW + + def get_Base64(self): + """ + Get the Base64 encoding of the DER value of this object. + """ + return base64_with_linebreaks(self.get_DER()) + + def get_PEM(self): + """ + Get the PEM representation of this object. + """ + return self.get_POW().pemWrite() + + def __cmp__(self, other): + """ + Compare two DER-encoded objects. + """ + if self is None and other is None: + return 0 + elif self is None: + return -1 + elif other is None: + return 1 + elif isinstance(other, str): + return cmp(self.get_DER(), other) + else: + return cmp(self.get_DER(), other.get_DER()) + + def hSKI(self): + """ + Return hexadecimal string representation of SKI for this object. + Only work for subclasses that implement get_SKI(). + """ + ski = self.get_SKI() + return ":".join(("%02X" % ord(i) for i in ski)) if ski else "" + + def gSKI(self): + """ + Calculate g(SKI) for this object. Only work for subclasses + that implement get_SKI(). + """ + return base64.urlsafe_b64encode(self.get_SKI()).rstrip("=") + + def hAKI(self): + """ + Return hexadecimal string representation of AKI for this + object. Only work for subclasses that implement get_AKI(). + """ + aki = self.get_AKI() + return ":".join(("%02X" % ord(i) for i in aki)) if aki else "" + + def gAKI(self): + """ + Calculate g(AKI) for this object. Only work for subclasses + that implement get_AKI(). + """ + return base64.urlsafe_b64encode(self.get_AKI()).rstrip("=") + + def get_AKI(self): + """ + Get the AKI extension from this object, if supported. + """ + return self.get_POW().getAKI() + + def get_SKI(self): + """ + Get the SKI extension from this object, if supported. + """ + return self.get_POW().getSKI() + + def get_EKU(self): + """ + Get the Extended Key Usage extension from this object, if supported. + """ + return self.get_POW().getEKU() + + def get_SIA(self): + """ + Get the SIA extension from this object. Only works for subclasses + that support getSIA(). + """ + return self.get_POW().getSIA() + + def get_sia_directory_uri(self): + """ + Get SIA directory (id-ad-caRepository) URI from this object. + Only works for subclasses that support getSIA(). + """ + sia = self.get_POW().getSIA() + return None if sia is None else first_rsync_uri(sia[0]) + + def get_sia_manifest_uri(self): + """ + Get SIA manifest (id-ad-rpkiManifest) URI from this object. + Only works for subclasses that support getSIA(). + """ + sia = self.get_POW().getSIA() + return None if sia is None else first_rsync_uri(sia[1]) + + def get_sia_object_uri(self): + """ + Get SIA object (id-ad-signedObject) URI from this object. + Only works for subclasses that support getSIA(). + """ + sia = self.get_POW().getSIA() + return None if sia is None else first_rsync_uri(sia[2]) + + def get_AIA(self): + """ + Get the SIA extension from this object. Only works for subclasses + that support getAIA(). + """ + return self.get_POW().getAIA() + + def get_aia_uri(self): + """ + Get AIA (id-ad-caIssuers) URI from this object. + Only works for subclasses that support getAIA(). + """ + return first_rsync_uri(self.get_POW().getAIA()) + + def get_basicConstraints(self): + """ + Get the basicConstraints extension from this object. Only works + for subclasses that support getExtension(). + """ + return self.get_POW().getBasicConstraints() + + def is_CA(self): + """ + Return True if and only if object has the basicConstraints + extension and its cA value is true. + """ + basicConstraints = self.get_basicConstraints() + return basicConstraints is not None and basicConstraints[0] + + def get_3779resources(self): + """ + Get RFC 3779 resources as rpki.resource_set objects. + """ + resources = rpki.resource_set.resource_bag.from_POW_rfc3779(self.get_POW().getRFC3779()) + try: + resources.valid_until = self.getNotAfter() + except AttributeError: + pass + return resources + + @classmethod + def from_sql(cls, x): + """ + Convert from SQL storage format. + """ + return cls(DER = x) + + def to_sql(self): + """ + Convert to SQL storage format. + """ + return self.get_DER() + + def dumpasn1(self): + """ + Pretty print an ASN.1 DER object using cryptlib dumpasn1 tool. + Use a temporary file rather than popen4() because dumpasn1 uses + seek() when decoding ASN.1 content nested in OCTET STRING values. + """ + + ret = None + fn = "dumpasn1.%d.tmp" % os.getpid() + try: + f = open(fn, "wb") + f.write(self.get_DER()) + f.close() + p = subprocess.Popen(("dumpasn1", "-a", fn), stdout = subprocess.PIPE, stderr = subprocess.STDOUT) + ret = "\n".join(x for x in p.communicate()[0].splitlines() if x.startswith(" ")) + except Exception, e: + ret = "[Could not run dumpasn1: %s]" % e + finally: + os.unlink(fn) + return ret + + def tracking_data(self, uri): + """ + Return a string containing data we want to log when tracking how + objects move through the RPKI system. Subclasses may wrap this to + provide more information, but should make sure to include at least + this information at the start of the tracking line. + """ + try: + d = rpki.POW.Digest(rpki.POW.SHA1_DIGEST) + d.update(self.get_DER()) + return "%s %s %s" % (uri, self.creation_timestamp, + "".join(("%02X" % ord(b) for b in d.digest()))) + except: # pylint: disable=W0702 + return uri + + def __getstate__(self): + """ + Pickling protocol -- pickle the DER encoding. + """ + return self.get_DER() + + def __setstate__(self, state): + """ + Pickling protocol -- unpickle the DER encoding. + """ + self.set(DER = state) + +class X509(DER_object): + """ + X.509 certificates. + + This class is designed to hold all the different representations of + X.509 certs we're using and convert between them. X.509 support in + Python a nasty maze of half-cooked stuff (except perhaps for + cryptlib, which is just different). Users of this module should not + have to care about this implementation nightmare. + """ + + POW_class = rpki.POW.X509 + + def getIssuer(self): + """ + Get the issuer of this certificate. + """ + return X501DN.from_POW(self.get_POW().getIssuer()) + + def getSubject(self): + """ + Get the subject of this certificate. + """ + return X501DN.from_POW(self.get_POW().getSubject()) + + def getNotBefore(self): + """ + Get the inception time of this certificate. + """ + return self.get_POW().getNotBefore() + + def getNotAfter(self): + """ + Get the expiration time of this certificate. + """ + return self.get_POW().getNotAfter() + + def getSerial(self): + """ + Get the serial number of this certificate. + """ + return self.get_POW().getSerial() + + def getPublicKey(self): + """ + Extract the public key from this certificate. + """ + return PublicKey(POW = self.get_POW().getPublicKey()) + + def get_SKI(self): + """ + Get the SKI extension from this object. + """ + return self.get_POW().getSKI() + + def expired(self): + """ + Test whether this certificate has expired. + """ + return self.getNotAfter() <= rpki.sundial.now() + + def issue(self, keypair, subject_key, serial, sia, aia, crldp, notAfter, + cn = None, resources = None, is_ca = True, notBefore = None, + sn = None, eku = None): + """ + Issue an RPKI certificate. + """ + + assert aia is not None and crldp is not None + + assert eku is None or not is_ca + + return self._issue( + keypair = keypair, + subject_key = subject_key, + serial = serial, + sia = sia, + aia = aia, + crldp = crldp, + notBefore = notBefore, + notAfter = notAfter, + cn = cn, + sn = sn, + resources = resources, + is_ca = is_ca, + aki = self.get_SKI(), + issuer_name = self.getSubject(), + eku = eku) + + + @classmethod + def self_certify(cls, keypair, subject_key, serial, sia, notAfter, + cn = None, resources = None, notBefore = None, + sn = None): + """ + Generate a self-certified RPKI certificate. + """ + + ski = subject_key.get_SKI() + + if cn is None: + cn = "".join(("%02X" % ord(i) for i in ski)) + + return cls._issue( + keypair = keypair, + subject_key = subject_key, + serial = serial, + sia = sia, + aia = None, + crldp = None, + notBefore = notBefore, + notAfter = notAfter, + cn = cn, + sn = sn, + resources = resources, + is_ca = True, + aki = ski, + issuer_name = X501DN.from_cn(cn, sn), + eku = None) + + + @classmethod + def _issue(cls, keypair, subject_key, serial, sia, aia, crldp, notAfter, + cn, sn, resources, is_ca, aki, issuer_name, notBefore, eku): + """ + Common code to issue an RPKI certificate. + """ + + now = rpki.sundial.now() + ski = subject_key.get_SKI() + + if notBefore is None: + notBefore = now + + if cn is None: + cn = "".join(("%02X" % ord(i) for i in ski)) + + if now >= notAfter: + raise rpki.exceptions.PastNotAfter("notAfter value %s is already in the past" % notAfter) + + if notBefore >= notAfter: + raise rpki.exceptions.NullValidityInterval("notAfter value %s predates notBefore value %s" % + (notAfter, notBefore)) + + cert = rpki.POW.X509() + + cert.setVersion(2) + cert.setSerial(serial) + cert.setIssuer(issuer_name.get_POW()) + cert.setSubject(X501DN.from_cn(cn, sn).get_POW()) + cert.setNotBefore(notBefore) + cert.setNotAfter(notAfter) + cert.setPublicKey(subject_key.get_POW()) + cert.setSKI(ski) + cert.setAKI(aki) + cert.setCertificatePolicies((rpki.oids.id_cp_ipAddr_asNumber,)) + + if crldp is not None: + cert.setCRLDP((crldp,)) + + if aia is not None: + cert.setAIA((aia,)) + + if is_ca: + cert.setBasicConstraints(True, None) + cert.setKeyUsage(frozenset(("keyCertSign", "cRLSign"))) + + else: + cert.setKeyUsage(frozenset(("digitalSignature",))) + + assert sia is not None or not is_ca + + if sia is not None: + caRepository, rpkiManifest, signedObject = sia + cert.setSIA( + (caRepository,) if isinstance(caRepository, str) else caRepository, + (rpkiManifest,) if isinstance(rpkiManifest, str) else rpkiManifest, + (signedObject,) if isinstance(signedObject, str) else signedObject) + + if resources is not None: + cert.setRFC3779( + asn = ("inherit" if resources.asn.inherit else + ((r.min, r.max) for r in resources.asn)), + ipv4 = ("inherit" if resources.v4.inherit else + ((r.min, r.max) for r in resources.v4)), + ipv6 = ("inherit" if resources.v6.inherit else + ((r.min, r.max) for r in resources.v6))) + + if eku is not None: + assert not is_ca + cert.setEKU(eku) + + cert.sign(keypair.get_POW(), rpki.POW.SHA256_DIGEST) + + return cls(POW = cert) + + def bpki_cross_certify(self, keypair, source_cert, serial, notAfter, + now = None, pathLenConstraint = 0): + """ + Issue a BPKI certificate with values taking from an existing certificate. + """ + return self.bpki_certify( + keypair = keypair, + subject_name = source_cert.getSubject(), + subject_key = source_cert.getPublicKey(), + serial = serial, + notAfter = notAfter, + now = now, + pathLenConstraint = pathLenConstraint, + is_ca = True) + + @classmethod + def bpki_self_certify(cls, keypair, subject_name, serial, notAfter, + now = None, pathLenConstraint = None): + """ + Issue a self-signed BPKI CA certificate. + """ + return cls._bpki_certify( + keypair = keypair, + issuer_name = subject_name, + subject_name = subject_name, + subject_key = keypair.get_public(), + serial = serial, + now = now, + notAfter = notAfter, + pathLenConstraint = pathLenConstraint, + is_ca = True) + + def bpki_certify(self, keypair, subject_name, subject_key, serial, notAfter, is_ca, + now = None, pathLenConstraint = None): + """ + Issue a normal BPKI certificate. + """ + assert keypair.get_public() == self.getPublicKey() + return self._bpki_certify( + keypair = keypair, + issuer_name = self.getSubject(), + subject_name = subject_name, + subject_key = subject_key, + serial = serial, + now = now, + notAfter = notAfter, + pathLenConstraint = pathLenConstraint, + is_ca = is_ca) + + @classmethod + def _bpki_certify(cls, keypair, issuer_name, subject_name, subject_key, + serial, now, notAfter, pathLenConstraint, is_ca): + """ + Issue a BPKI certificate. This internal method does the real + work, after one of the wrapper methods has extracted the relevant + fields. + """ + + if now is None: + now = rpki.sundial.now() + + issuer_key = keypair.get_public() + + assert (issuer_key == subject_key) == (issuer_name == subject_name) + assert is_ca or issuer_name != subject_name + assert is_ca or pathLenConstraint is None + assert pathLenConstraint is None or (isinstance(pathLenConstraint, (int, long)) and + pathLenConstraint >= 0) + + cert = rpki.POW.X509() + cert.setVersion(2) + cert.setSerial(serial) + cert.setIssuer(issuer_name.get_POW()) + cert.setSubject(subject_name.get_POW()) + cert.setNotBefore(now) + cert.setNotAfter(notAfter) + cert.setPublicKey(subject_key.get_POW()) + cert.setSKI(subject_key.get_POW().calculateSKI()) + if issuer_key != subject_key: + cert.setAKI(issuer_key.get_POW().calculateSKI()) + if is_ca: + cert.setBasicConstraints(True, pathLenConstraint) + cert.sign(keypair.get_POW(), rpki.POW.SHA256_DIGEST) + return cls(POW = cert) + + @classmethod + def normalize_chain(cls, chain): + """ + Normalize a chain of certificates into a tuple of X509 objects. + Given all the glue certificates needed for BPKI cross + certification, it's easiest to allow sloppy arguments to the CMS + validation methods and provide a single method that normalizes the + allowed cases. So this method allows X509, None, lists, and + tuples, and returns a tuple of X509 objects. + """ + if isinstance(chain, cls): + chain = (chain,) + return tuple(x for x in chain if x is not None) + + @property + def creation_timestamp(self): + """ + Time at which this object was created. + """ + return self.getNotBefore() + +class PKCS10(DER_object): + """ + Class to hold a PKCS #10 request. + """ + + POW_class = rpki.POW.PKCS10 + + ## @var expected_ca_keyUsage + # KeyUsage extension flags expected for CA requests. + + expected_ca_keyUsage = frozenset(("keyCertSign", "cRLSign")) + + ## @var allowed_extensions + # Extensions allowed by RPKI profile. + + allowed_extensions = frozenset((rpki.oids.basicConstraints, + rpki.oids.keyUsage, + rpki.oids.subjectInfoAccess, + rpki.oids.extendedKeyUsage)) + + + def get_DER(self): + """ + Get the DER value of this certification request. + """ + self.check() + if self.DER: + return self.DER + if self.POW: + self.DER = self.POW.derWrite() + return self.get_DER() + raise rpki.exceptions.DERObjectConversionError("No conversion path to DER available") + + def get_POW(self): + """ + Get the rpki.POW value of this certification request. + """ + self.check() + if not self.POW: # pylint: disable=E0203 + self.POW = rpki.POW.PKCS10.derRead(self.get_DER()) + return self.POW + + def getSubject(self): + """ + Extract the subject name from this certification request. + """ + return X501DN.from_POW(self.get_POW().getSubject()) + + def getPublicKey(self): + """ + Extract the public key from this certification request. + """ + return PublicKey(POW = self.get_POW().getPublicKey()) + + def get_SKI(self): + """ + Compute SKI for public key from this certification request. + """ + return self.getPublicKey().get_SKI() + + + def check_valid_request_common(self): + """ + Common code for checking this certification requests to see + whether they conform to the RPKI certificate profile. + + Throws an exception if the request isn't valid, so if this method + returns at all, the request is ok. + + You probably don't want to call this directly, as it only performs + the checks that are common to all RPKI certificates. + """ + + if not self.get_POW().verify(): + raise rpki.exceptions.BadPKCS10("PKCS #10 signature check failed") + + ver = self.get_POW().getVersion() + + if ver != 0: + raise rpki.exceptions.BadPKCS10("PKCS #10 request has bad version number %s" % ver) + + ku = self.get_POW().getKeyUsage() + + if ku is not None and self.expected_ca_keyUsage != ku: + raise rpki.exceptions.BadPKCS10("PKCS #10 keyUsage doesn't match profile: %r" % ku) + + forbidden_extensions = self.get_POW().getExtensionOIDs() - self.allowed_extensions + + if forbidden_extensions: + raise rpki.exceptions.BadExtension("Forbidden extension%s in PKCS #10 certificate request: %s" % ( + "" if len(forbidden_extensions) == 1 else "s", + ", ".join(forbidden_extensions))) + + + def check_valid_request_ca(self): + """ + Check this certification request to see whether it's a valid + request for an RPKI CA certificate. + + Throws an exception if the request isn't valid, so if this method + returns at all, the request is ok. + """ + + self.check_valid_request_common() + + alg = self.get_POW().getSignatureAlgorithm() + bc = self.get_POW().getBasicConstraints() + eku = self.get_POW().getEKU() + sias = self.get_POW().getSIA() + + if alg != rpki.oids.sha256WithRSAEncryption: + raise rpki.exceptions.BadPKCS10("PKCS #10 has bad signature algorithm for CA: %s" % alg) + + if bc is None or not bc[0] or bc[1] is not None: + raise rpki.exceptions.BadPKCS10("PKCS #10 CA bad basicConstraints") + + if eku is not None: + raise rpki.exceptions.BadPKCS10("PKCS #10 CA EKU not allowed") + + if sias is None: + raise rpki.exceptions.BadPKCS10("PKCS #10 CA SIA missing") + + caRepository, rpkiManifest, signedObject = sias + + if signedObject: + raise rpki.exceptions.BadPKCS10("PKCS #10 CA SIA must not have id-ad-signedObject") + + if not caRepository: + raise rpki.exceptions.BadPKCS10("PKCS #10 CA SIA must have id-ad-caRepository") + + if not any(uri.startswith("rsync://") for uri in caRepository): + raise rpki.exceptions.BadPKCS10("PKCS #10 CA SIA id-ad-caRepository contains no rsync URIs") + + if any(uri.startswith("rsync://") and not uri.endswith("/") for uri in caRepository): + raise rpki.exceptions.BadPKCS10("PKCS #10 CA SIA id-ad-caRepository does not end with slash") + + if not rpkiManifest: + raise rpki.exceptions.BadPKCS10("PKCS #10 CA SIA must have id-ad-rpkiManifest") + + if not any(uri.startswith("rsync://") for uri in rpkiManifest): + raise rpki.exceptions.BadPKCS10("PKCS #10 CA SIA id-ad-rpkiManifest contains no rsync URIs") + + if any(uri.startswith("rsync://") and uri.endswith("/") for uri in rpkiManifest): + raise rpki.exceptions.BadPKCS10("PKCS #10 CA SIA id-ad-rpkiManifest ends with slash") + + + def check_valid_request_ee(self): + """ + Check this certification request to see whether it's a valid + request for an RPKI EE certificate. + + Throws an exception if the request isn't valid, so if this method + returns at all, the request is ok. + + We're a bit less strict here than we are for either CA + certificates or BGPSEC router certificates, because the profile is + less tightly nailed down for unspecified-use RPKI EE certificates. + Future specific purposes may impose tighter constraints. + + Note that this method does NOT apply to so-called "infrastructure" + EE certificates (eg, the EE certificates embedded in manifests and + ROAs); those are constrained fairly tightly, but they're also + generated internally so we don't need to check them as user or + protocol input. + """ + + self.check_valid_request_common() + + alg = self.get_POW().getSignatureAlgorithm() + bc = self.get_POW().getBasicConstraints() + sia = self.get_POW().getSIA() + + caRepository, rpkiManifest, signedObject = sia or (None, None, None) + + if alg not in (rpki.oids.sha256WithRSAEncryption, rpki.oids.ecdsa_with_SHA256): + raise rpki.exceptions.BadPKCS10("PKCS #10 has bad signature algorithm for EE: %s" % alg) + + if bc is not None and (bc[0] or bc[1] is not None): + raise rpki.exceptions.BadPKCS10("PKCS #10 EE has bad basicConstraints") + + if caRepository: + raise rpki.exceptions.BadPKCS10("PKCS #10 EE must not have id-ad-caRepository") + + if rpkiManifest: + raise rpki.exceptions.BadPKCS10("PKCS #10 EE must not have id-ad-rpkiManifest") + + if signedObject and not any(uri.startswith("rsync://") for uri in signedObject): + raise rpki.exceptions.BadPKCS10("PKCS #10 EE SIA id-ad-signedObject contains no rsync URIs") + + + def check_valid_request_router(self): + """ + Check this certification request to see whether it's a valid + request for a BGPSEC router certificate. + + Throws an exception if the request isn't valid, so if this method + returns at all, the request is ok. + + draft-ietf-sidr-bgpsec-pki-profiles 3.2 says follow RFC 6487 3 + except where explicitly overriden, and does not override for SIA. + But draft-ietf-sidr-bgpsec-pki-profiles also says that router + certificates don't get SIA, while RFC 6487 requires SIA. So what + do we do with SIA in PKCS #10 for router certificates? + + For the moment, ignore it, but make sure we don't include it in + the certificate when we get to the code that generates that. + """ + + self.check_valid_request_ee() + + alg = self.get_POW().getSignatureAlgorithm() + eku = self.get_POW().getEKU() + + if alg != rpki.oids.ecdsa_with_SHA256: + raise rpki.exceptions.BadPKCS10("PKCS #10 has bad signature algorithm for router: %s" % alg) + + # Not really clear to me whether PKCS #10 should have EKU or not, so allow + # either, but insist that it be the right one if present. + + if eku is not None and rpki.oids.id_kp_bgpsec_router not in eku: + raise rpki.exceptions.BadPKCS10("PKCS #10 router must have EKU") + + + @classmethod + def create(cls, keypair, exts = None, is_ca = False, + caRepository = None, rpkiManifest = None, signedObject = None, + cn = None, sn = None, eku = None): + """ + Create a new request for a given keypair. + """ + + assert exts is None, "Old calling sequence to rpki.x509.PKCS10.create()" + + if cn is None: + cn = "".join(("%02X" % ord(i) for i in keypair.get_SKI())) + + if isinstance(caRepository, str): + caRepository = (caRepository,) + + if isinstance(rpkiManifest, str): + rpkiManifest = (rpkiManifest,) + + if isinstance(signedObject, str): + signedObject = (signedObject,) + + req = rpki.POW.PKCS10() + req.setVersion(0) + req.setSubject(X501DN.from_cn(cn, sn).get_POW()) + req.setPublicKey(keypair.get_POW()) + + if is_ca: + req.setBasicConstraints(True, None) + req.setKeyUsage(cls.expected_ca_keyUsage) + + if caRepository or rpkiManifest or signedObject: + req.setSIA(caRepository, rpkiManifest, signedObject) + + if eku: + req.setEKU(eku) + + req.sign(keypair.get_POW(), rpki.POW.SHA256_DIGEST) + return cls(POW = req) + +## @var generate_insecure_debug_only_rsa_key +# Debugging hack to let us save throwaway RSA keys from one debug +# session to the next. DO NOT USE THIS IN PRODUCTION. + +generate_insecure_debug_only_rsa_key = None + +class insecure_debug_only_rsa_key_generator(object): + + def __init__(self, filename, keyno = 0): + try: + try: + import gdbm as dbm_du_jour + except ImportError: + import dbm as dbm_du_jour + self.keyno = long(keyno) + self.filename = filename + self.db = dbm_du_jour.open(filename, "c") + except: + rpki.log.warn("insecure_debug_only_rsa_key_generator initialization FAILED, hack inoperative") + raise + + def __call__(self): + k = str(self.keyno) + try: + v = rpki.POW.Asymmetric.derReadPrivate(self.db[k]) + except KeyError: + v = rpki.POW.Asymmetric.generateRSA(2048) + self.db[k] = v.derWritePrivate() + self.keyno += 1 + return v + + +class PrivateKey(DER_object): + """ + Class to hold a Public/Private key pair. + """ + + POW_class = rpki.POW.Asymmetric + + def get_DER(self): + """ + Get the DER value of this keypair. + """ + self.check() + if self.DER: + return self.DER + if self.POW: + self.DER = self.POW.derWritePrivate() + return self.get_DER() + raise rpki.exceptions.DERObjectConversionError("No conversion path to DER available") + + def get_POW(self): + """ + Get the rpki.POW value of this keypair. + """ + self.check() + if not self.POW: # pylint: disable=E0203 + self.POW = rpki.POW.Asymmetric.derReadPrivate(self.get_DER()) + return self.POW + + def get_PEM(self): + """ + Get the PEM representation of this keypair. + """ + return self.get_POW().pemWritePrivate() + + def _set_PEM(self, pem): + """ + Set the POW value of this keypair from a PEM string. + """ + assert self.empty() + self.POW = self.POW_class.pemReadPrivate(pem) + + def get_public_DER(self): + """ + Get the DER encoding of the public key from this keypair. + """ + return self.get_POW().derWritePublic() + + def get_SKI(self): + """ + Calculate the SKI of this keypair. + """ + return self.get_POW().calculateSKI() + + def get_public(self): + """ + Convert the public key of this keypair into a PublicKey object. + """ + return PublicKey(DER = self.get_public_DER()) + +class PublicKey(DER_object): + """ + Class to hold a public key. + """ + + POW_class = rpki.POW.Asymmetric + + def get_DER(self): + """ + Get the DER value of this public key. + """ + self.check() + if self.DER: + return self.DER + if self.POW: + self.DER = self.POW.derWritePublic() + return self.get_DER() + raise rpki.exceptions.DERObjectConversionError("No conversion path to DER available") + + def get_POW(self): + """ + Get the rpki.POW value of this public key. + """ + self.check() + if not self.POW: # pylint: disable=E0203 + self.POW = rpki.POW.Asymmetric.derReadPublic(self.get_DER()) + return self.POW + + def get_PEM(self): + """ + Get the PEM representation of this public key. + """ + return self.get_POW().pemWritePublic() + + def _set_PEM(self, pem): + """ + Set the POW value of this public key from a PEM string. + """ + assert self.empty() + self.POW = self.POW_class.pemReadPublic(pem) + + def get_SKI(self): + """ + Calculate the SKI of this public key. + """ + return self.get_POW().calculateSKI() + +class KeyParams(DER_object): + """ + Wrapper for OpenSSL's asymmetric key parameter classes. + """ + + POW_class = rpki.POW.AsymmetricParams + + @classmethod + def generateEC(cls, curve = rpki.POW.EC_P256_CURVE): + return cls(POW = rpki.POW.AsymmetricParams.generateEC(curve = curve)) + +class RSA(PrivateKey): + """ + Class to hold an RSA key pair. + """ + + @classmethod + def generate(cls, keylength = 2048, quiet = False): + """ + Generate a new keypair. + """ + if not quiet: + rpki.log.debug("Generating new %d-bit RSA key" % keylength) + if generate_insecure_debug_only_rsa_key is not None: + return cls(POW = generate_insecure_debug_only_rsa_key()) + else: + return cls(POW = rpki.POW.Asymmetric.generateRSA(keylength)) + +class ECDSA(PrivateKey): + """ + Class to hold an ECDSA key pair. + """ + + @classmethod + def generate(cls, params = None, quiet = False): + """ + Generate a new keypair. + """ + + if params is None: + if not quiet: + rpki.log.debug("Generating new ECDSA key parameters") + params = KeyParams.generateEC() + + assert isinstance(params, KeyParams) + + if not quiet: + rpki.log.debug("Generating new ECDSA key") + + return cls(POW = rpki.POW.Asymmetric.generateFromParams(params.get_POW())) + +class CMS_object(DER_object): + """ + Abstract class to hold a CMS object. + """ + + econtent_oid = rpki.oids.id_data + POW_class = rpki.POW.CMS + + ## @var dump_on_verify_failure + # Set this to True to get dumpasn1 dumps of ASN.1 on CMS verify failures. + + dump_on_verify_failure = True + + ## @var debug_cms_certs + # Set this to True to log a lot of chatter about CMS certificates. + + debug_cms_certs = False + + ## @var dump_using_dumpasn1 + # Set this to use external dumpasn1 program, which is prettier and + # more informative than OpenSSL's CMS text dump, but which won't + # work if the dumpasn1 program isn't installed. + + dump_using_dumpasn1 = False + + ## @var require_crls + # Set this to False to make CMS CRLs optional in the cases where we + # would otherwise require them. Some day this option should go away + # and CRLs should be uncondtionally mandatory in such cases. + + require_crls = False + + ## @var allow_extra_certs + # Set this to True to allow CMS messages to contain CA certificates. + + allow_extra_certs = False + + ## @var allow_extra_crls + # Set this to True to allow CMS messages to contain multiple CRLs. + + allow_extra_crls = False + + ## @var print_on_der_error + # Set this to True to log alleged DER when we have trouble parsing + # it, in case it's really a Perl backtrace or something. + + print_on_der_error = True + + def get_DER(self): + """ + Get the DER value of this CMS_object. + """ + self.check() + if self.DER: + return self.DER + if self.POW: + self.DER = self.POW.derWrite() + return self.get_DER() + raise rpki.exceptions.DERObjectConversionError("No conversion path to DER available") + + def get_POW(self): + """ + Get the rpki.POW value of this CMS_object. + """ + self.check() + if not self.POW: # pylint: disable=E0203 + self.POW = self.POW_class.derRead(self.get_DER()) + return self.POW + + def get_signingTime(self): + """ + Extract signingTime from CMS signed attributes. + """ + return self.get_POW().signingTime() + + def verify(self, ta): + """ + Verify CMS wrapper and store inner content. + """ + + try: + cms = self.get_POW() + except (rpki.async.ExitNow, SystemExit): + raise + except Exception: + if self.print_on_der_error: + rpki.log.debug("Problem parsing DER CMS message, might not really be DER: %r" % + self.get_DER()) + raise rpki.exceptions.UnparsableCMSDER + + if cms.eContentType() != self.econtent_oid: + raise rpki.exceptions.WrongEContentType("Got CMS eContentType %s, expected %s" % ( + cms.eContentType(), self.econtent_oid)) + + certs = [X509(POW = x) for x in cms.certs()] + crls = [CRL(POW = c) for c in cms.crls()] + + if self.debug_cms_certs: + for x in certs: + rpki.log.debug("Received CMS cert issuer %s subject %s SKI %s" % ( + x.getIssuer(), x.getSubject(), x.hSKI())) + for c in crls: + rpki.log.debug("Received CMS CRL issuer %r" % (c.getIssuer(),)) + + store = rpki.POW.X509Store() + + now = rpki.sundial.now() + + trusted_ee = None + + for x in X509.normalize_chain(ta): + if self.debug_cms_certs: + rpki.log.debug("CMS trusted cert issuer %s subject %s SKI %s" % ( + x.getIssuer(), x.getSubject(), x.hSKI())) + if x.getNotAfter() < now: + raise rpki.exceptions.TrustedCMSCertHasExpired("Trusted CMS certificate has expired", + "%s (%s)" % (x.getSubject(), x.hSKI())) + if not x.is_CA(): + if trusted_ee is None: + trusted_ee = x + else: + raise rpki.exceptions.MultipleCMSEECert("Multiple CMS EE certificates", *("%s (%s)" % ( + x.getSubject(), x.hSKI()) for x in ta if not x.is_CA())) + store.addTrust(x.get_POW()) + + if trusted_ee: + if self.debug_cms_certs: + rpki.log.debug("Trusted CMS EE cert issuer %s subject %s SKI %s" % ( + trusted_ee.getIssuer(), trusted_ee.getSubject(), trusted_ee.hSKI())) + if len(certs) > 1 or (len(certs) == 1 and + (certs[0].getSubject() != trusted_ee.getSubject() or + certs[0].getPublicKey() != trusted_ee.getPublicKey())): + raise rpki.exceptions.UnexpectedCMSCerts("Unexpected CMS certificates", *("%s (%s)" % ( + x.getSubject(), x.hSKI()) for x in certs)) + if crls: + raise rpki.exceptions.UnexpectedCMSCRLs("Unexpected CRLs", *("%s (%s)" % ( + c.getIssuer(), c.hAKI()) for c in crls)) + + else: + untrusted_ee = [x for x in certs if not x.is_CA()] + if len(untrusted_ee) < 1: + raise rpki.exceptions.MissingCMSEEcert + if len(untrusted_ee) > 1 or (not self.allow_extra_certs and len(certs) > len(untrusted_ee)): + raise rpki.exceptions.UnexpectedCMSCerts("Unexpected CMS certificates", *("%s (%s)" % ( + x.getSubject(), x.hSKI()) for x in certs)) + if len(crls) < 1: + if self.require_crls: + raise rpki.exceptions.MissingCMSCRL + else: + rpki.log.warn("MISSING CMS CRL! Ignoring per self.require_crls setting") + if len(crls) > 1 and not self.allow_extra_crls: + raise rpki.exceptions.UnexpectedCMSCRLs("Unexpected CRLs", *("%s (%s)" % ( + c.getIssuer(), c.hAKI()) for c in crls)) + + for x in certs: + if x.getNotAfter() < now: + raise rpki.exceptions.CMSCertHasExpired("CMS certificate has expired", "%s (%s)" % ( + x.getSubject(), x.hSKI())) + + for c in crls: + if c.getNextUpdate() < now: + rpki.log.warn("Stale BPKI CMS CRL (%s %s %s)" % (c.getNextUpdate(), c.getIssuer(), c.hAKI())) + + try: + content = cms.verify(store) + except (rpki.async.ExitNow, SystemExit): + raise + except Exception: + if self.dump_on_verify_failure: + if self.dump_using_dumpasn1: + dbg = self.dumpasn1() + else: + dbg = cms.pprint() + rpki.log.warn("CMS verification failed, dumping ASN.1 (%d octets):" % len(self.get_DER())) + for line in dbg.splitlines(): + rpki.log.warn(line) + raise rpki.exceptions.CMSVerificationFailed("CMS verification failed") + + return content + + def extract(self): + """ + Extract and store inner content from CMS wrapper without verifying + the CMS. + + DANGER WILL ROBINSON!!! + + Do not use this method on unvalidated data. Use the verify() + method instead. + + If you don't understand this warning, don't use this method. + """ + + try: + cms = self.get_POW() + except (rpki.async.ExitNow, SystemExit): + raise + except Exception: + raise rpki.exceptions.UnparsableCMSDER + + if cms.eContentType() != self.econtent_oid: + raise rpki.exceptions.WrongEContentType("Got CMS eContentType %s, expected %s" % ( + cms.eContentType(), self.econtent_oid)) + + return cms.verify(rpki.POW.X509Store(), None, + (rpki.POW.CMS_NOCRL | rpki.POW.CMS_NO_SIGNER_CERT_VERIFY | + rpki.POW.CMS_NO_ATTR_VERIFY | rpki.POW.CMS_NO_CONTENT_VERIFY)) + + + def sign(self, keypair, certs, crls = None, no_certs = False): + """ + Sign and wrap inner content. + """ + + rpki.log.trace() + + if isinstance(certs, X509): + cert = certs + certs = () + else: + cert = certs[0] + certs = certs[1:] + + if crls is None: + crls = () + elif isinstance(crls, CRL): + crls = (crls,) + + if self.debug_cms_certs: + rpki.log.debug("Signing with cert issuer %s subject %s SKI %s" % ( + cert.getIssuer(), cert.getSubject(), cert.hSKI())) + for i, c in enumerate(certs): + rpki.log.debug("Additional cert %d issuer %s subject %s SKI %s" % ( + i, c.getIssuer(), c.getSubject(), c.hSKI())) + + self._sign(cert.get_POW(), + keypair.get_POW(), + [x.get_POW() for x in certs], + [c.get_POW() for c in crls], + rpki.POW.CMS_NOCERTS if no_certs else 0) + + @property + def creation_timestamp(self): + """ + Time at which this object was created. + """ + return self.get_signingTime() + + +class Wrapped_CMS_object(CMS_object): + """ + Abstract class to hold CMS objects wrapping non-DER content (eg, XML + or VCard). + + CMS-wrapped objects are a little different from the other DER_object + types because the signed object is CMS wrapping some other kind of + inner content. A Wrapped_CMS_object is the outer CMS wrapped object + so that the usual DER and PEM operations do the obvious things, and + the inner content is handle via separate methods. + """ + + other_clear = ("content",) + + def get_content(self): + """ + Get the inner content of this Wrapped_CMS_object. + """ + if self.content is None: + raise rpki.exceptions.CMSContentNotSet("Inner content of CMS object %r is not set" % self) + return self.content + + def set_content(self, content): + """ + Set the (inner) content of this Wrapped_CMS_object, clearing the wrapper. + """ + self.clear() + self.content = content + + def verify(self, ta): + """ + Verify CMS wrapper and store inner content. + """ + + self.decode(CMS_object.verify(self, ta)) + return self.get_content() + + def extract(self): + """ + Extract and store inner content from CMS wrapper without verifying + the CMS. + + DANGER WILL ROBINSON!!! + + Do not use this method on unvalidated data. Use the verify() + method instead. + + If you don't understand this warning, don't use this method. + """ + + self.decode(CMS_object.extract(self)) + return self.get_content() + + def extract_if_needed(self): + """ + Extract inner content if needed. See caveats for .extract(), do + not use unless you really know what you are doing. + """ + + if self.content is None: + self.extract() + + def _sign(self, cert, keypair, certs, crls, flags): + """ + Internal method to call POW to do CMS signature. This is split + out from the .sign() API method to handle differences in how + different CMS-based POW classes handle the inner content. + """ + + cms = self.POW_class() + cms.sign(cert, keypair, self.encode(), certs, crls, self.econtent_oid, flags) + self.POW = cms + + +class DER_CMS_object(CMS_object): + """ + Abstract class for CMS-based objects with DER-encoded content + handled by C-level subclasses of rpki.POW.CMS. + """ + + def _sign(self, cert, keypair, certs, crls, flags): + self.get_POW().sign(cert, keypair, certs, crls, self.econtent_oid, flags) + + + def extract_if_needed(self): + """ + Extract inner content if needed. See caveats for .extract(), do + not use unless you really know what you are doing. + """ + + try: + self.get_POW().getVersion() + except rpki.POW.NotVerifiedError: + self.extract() + + +class SignedManifest(DER_CMS_object): + """ + Class to hold a signed manifest. + """ + + econtent_oid = rpki.oids.id_ct_rpkiManifest + POW_class = rpki.POW.Manifest + + def getThisUpdate(self): + """ + Get thisUpdate value from this manifest. + """ + return self.get_POW().getThisUpdate() + + def getNextUpdate(self): + """ + Get nextUpdate value from this manifest. + """ + return self.get_POW().getNextUpdate() + + @classmethod + def build(cls, serial, thisUpdate, nextUpdate, names_and_objs, keypair, certs, version = 0): + """ + Build a signed manifest. + """ + + filelist = [] + for name, obj in names_and_objs: + d = rpki.POW.Digest(rpki.POW.SHA256_DIGEST) + d.update(obj.get_DER()) + filelist.append((name.rpartition("/")[2], d.digest())) + filelist.sort(key = lambda x: x[0]) + + obj = cls.POW_class() + obj.setVersion(version) + obj.setManifestNumber(serial) + obj.setThisUpdate(thisUpdate) + obj.setNextUpdate(nextUpdate) + obj.setAlgorithm(rpki.oids.id_sha256) + obj.addFiles(filelist) + + self = cls(POW = obj) + self.sign(keypair, certs) + return self + +class ROA(DER_CMS_object): + """ + Class to hold a signed ROA. + """ + + econtent_oid = rpki.oids.id_ct_routeOriginAttestation + POW_class = rpki.POW.ROA + + @classmethod + def build(cls, asn, ipv4, ipv6, keypair, certs, version = 0): + """ + Build a ROA. + """ + ipv4 = ipv4.to_POW_roa_tuple() if ipv4 else None + ipv6 = ipv6.to_POW_roa_tuple() if ipv6 else None + obj = cls.POW_class() + obj.setVersion(version) + obj.setASID(asn) + obj.setPrefixes(ipv4 = ipv4, ipv6 = ipv6) + self = cls(POW = obj) + self.sign(keypair, certs) + return self + + def tracking_data(self, uri): + """ + Return a string containing data we want to log when tracking how + objects move through the RPKI system. + """ + msg = DER_CMS_object.tracking_data(self, uri) + try: + self.extract_if_needed() + asn = self.get_POW().getASID() + text = [] + for prefixes in self.get_POW().getPrefixes(): + if prefixes is not None: + for prefix, prefixlen, maxprefixlen in prefixes: + if maxprefixlen is None or prefixlen == maxprefixlen: + text.append("%s/%s" % (prefix, prefixlen)) + else: + text.append("%s/%s-%s" % (prefix, prefixlen, maxprefixlen)) + text.sort() + msg = "%s %s %s" % (msg, asn, ",".join(text)) + except: # pylint: disable=W0702 + pass + return msg + +class DeadDrop(object): + """ + Dead-drop utility for storing copies of CMS messages for debugging or + audit. At the moment this uses Maildir mailbox format, as it has + approximately the right properties and a number of useful tools for + manipulating it already exist. + """ + + def __init__(self, name): + self.name = name + self.pid = os.getpid() + self.maildir = mailbox.Maildir(name, factory = None, create = True) + self.warned = False + + def dump(self, obj): + try: + now = time.time() + msg = email.mime.application.MIMEApplication(obj.get_DER(), "x-rpki") + msg["Date"] = email.utils.formatdate(now) + msg["Subject"] = "Process %s dump of %r" % (self.pid, obj) + msg["Message-ID"] = email.utils.make_msgid() + msg["X-RPKI-PID"] = str(self.pid) + msg["X-RPKI-Object"] = repr(obj) + msg["X-RPKI-Timestamp"] = "%f" % now + self.maildir.add(msg) + self.warned = False + except Exception, e: + if not self.warned: + rpki.log.warn("Could not write to mailbox %s: %s" % (self.name, e)) + self.warned = True + +class XML_CMS_object(Wrapped_CMS_object): + """ + Class to hold CMS-wrapped XML protocol data. + """ + + econtent_oid = rpki.oids.id_ct_xml + + ## @var dump_outbound_cms + # If set, we write all outbound XML-CMS PDUs to disk, for debugging. + # If set, value should be a DeadDrop object. + + dump_outbound_cms = None + + ## @var dump_inbound_cms + # If set, we write all inbound XML-CMS PDUs to disk, for debugging. + # If set, value should be a DeadDrop object. + + dump_inbound_cms = None + + ## @var check_inbound_schema + # If set, perform RelaxNG schema check on inbound messages. + + check_inbound_schema = True + + ## @var check_outbound_schema + # If set, perform RelaxNG schema check on outbound messages. + + check_outbound_schema = False + + def encode(self): + """ + Encode inner content for signing. + """ + return lxml.etree.tostring(self.get_content(), + pretty_print = True, + encoding = self.encoding, + xml_declaration = True) + + def decode(self, xml): + """ + Decode XML and set inner content. + """ + self.content = lxml.etree.fromstring(xml) + + def pretty_print_content(self): + """ + Pretty print XML content of this message. + """ + return lxml.etree.tostring(self.get_content(), + pretty_print = True, + encoding = self.encoding, + xml_declaration = True) + + def schema_check(self): + """ + Handle XML RelaxNG schema check. + """ + try: + self.schema.assertValid(self.get_content()) + except lxml.etree.DocumentInvalid: + rpki.log.error("PDU failed schema check") + for line in self.pretty_print_content().splitlines(): + rpki.log.warn(line) + raise + + def dump_to_disk(self, prefix): + """ + Write DER of current message to disk, for debugging. + """ + f = open(prefix + rpki.sundial.now().isoformat() + "Z.cms", "wb") + f.write(self.get_DER()) + f.close() + + def wrap(self, msg, keypair, certs, crls = None): + """ + Wrap an XML PDU in CMS and return its DER encoding. + """ + rpki.log.trace() + if self.saxify is None: + self.set_content(msg) + else: + self.set_content(msg.toXML()) + if self.check_outbound_schema: + self.schema_check() + self.sign(keypair, certs, crls) + if self.dump_outbound_cms: + self.dump_outbound_cms.dump(self) + return self.get_DER() + + def unwrap(self, ta): + """ + Unwrap a CMS-wrapped XML PDU and return Python objects. + """ + if self.dump_inbound_cms: + self.dump_inbound_cms.dump(self) + self.verify(ta) + if self.check_inbound_schema: + self.schema_check() + if self.saxify is None: + return self.get_content() + else: + return self.saxify(self.get_content()) # pylint: disable=E1102 + + def check_replay(self, timestamp, *context): + """ + Check CMS signing-time in this object against a recorded + timestamp. Raises an exception if the recorded timestamp is more + recent, otherwise returns the new timestamp. + """ + new_timestamp = self.get_signingTime() + if timestamp is not None and timestamp > new_timestamp: + if context: + context = " (" + " ".join(context) + ")" + raise rpki.exceptions.CMSReplay( + "CMS replay: last message %s, this message %s%s" % ( + timestamp, new_timestamp, context)) + return new_timestamp + + def check_replay_sql(self, obj, *context): + """ + Like .check_replay() but gets recorded timestamp from + "last_cms_timestamp" field of an SQL object and stores the new + timestamp back in that same field. + """ + obj.last_cms_timestamp = self.check_replay(obj.last_cms_timestamp, *context) + obj.sql_mark_dirty() + + ## @var saxify + # SAX handler hook. Subclasses can set this to a SAX handler, in + # which case .unwrap() will call it and return the result. + # Otherwise, .unwrap() just returns a verified element tree. + + saxify = None + +class SignedReferral(XML_CMS_object): + encoding = "us-ascii" + schema = rpki.relaxng.myrpki + saxify = None + +class Ghostbuster(Wrapped_CMS_object): + """ + Class to hold Ghostbusters record (CMS-wrapped VCard). This is + quite minimal because we treat the VCard as an opaque byte string + managed by the back-end. + """ + + econtent_oid = rpki.oids.id_ct_rpkiGhostbusters + + def encode(self): + """ + Encode inner content for signing. At the moment we're treating + the VCard as an opaque byte string, so no encoding needed here. + """ + return self.get_content() + + def decode(self, vcard): + """ + Decode XML and set inner content. At the moment we're treating + the VCard as an opaque byte string, so no encoding needed here. + """ + self.content = vcard + + @classmethod + def build(cls, vcard, keypair, certs): + """ + Build a Ghostbuster record. + """ + self = cls() + self.set_content(vcard) + self.sign(keypair, certs) + return self + + +class CRL(DER_object): + """ + Class to hold a Certificate Revocation List. + """ + + POW_class = rpki.POW.CRL + + def get_DER(self): + """ + Get the DER value of this CRL. + """ + self.check() + if self.DER: + return self.DER + if self.POW: + self.DER = self.POW.derWrite() + return self.get_DER() + raise rpki.exceptions.DERObjectConversionError("No conversion path to DER available") + + def get_POW(self): + """ + Get the rpki.POW value of this CRL. + """ + self.check() + if not self.POW: # pylint: disable=E0203 + self.POW = rpki.POW.CRL.derRead(self.get_DER()) + return self.POW + + def getThisUpdate(self): + """ + Get thisUpdate value from this CRL. + """ + return self.get_POW().getThisUpdate() + + def getNextUpdate(self): + """ + Get nextUpdate value from this CRL. + """ + return self.get_POW().getNextUpdate() + + def getIssuer(self): + """ + Get issuer value of this CRL. + """ + return X501DN.from_POW(self.get_POW().getIssuer()) + + def getCRLNumber(self): + """ + Get CRL Number value for this CRL. + """ + return self.get_POW().getCRLNumber() + + @classmethod + def generate(cls, keypair, issuer, serial, thisUpdate, nextUpdate, revokedCertificates, version = 1): + """ + Generate a new CRL. + """ + crl = rpki.POW.CRL() + crl.setVersion(version) + crl.setIssuer(issuer.getSubject().get_POW()) + crl.setThisUpdate(thisUpdate) + crl.setNextUpdate(nextUpdate) + crl.setAKI(issuer.get_SKI()) + crl.setCRLNumber(serial) + crl.addRevocations(revokedCertificates) + crl.sign(keypair.get_POW()) + return cls(POW = crl) + + @property + def creation_timestamp(self): + """ + Time at which this object was created. + """ + return self.getThisUpdate() + +## @var uri_dispatch_map +# Map of known URI filename extensions and corresponding classes. + +uri_dispatch_map = { + ".cer" : X509, + ".crl" : CRL, + ".gbr" : Ghostbuster, + ".mft" : SignedManifest, + ".mnf" : SignedManifest, + ".roa" : ROA, + } + +def uri_dispatch(uri): + """ + Return the Python class object corresponding to a given URI. + """ + return uri_dispatch_map[os.path.splitext(uri)[1]] diff --git a/rpki/xml_utils.py b/rpki/xml_utils.py new file mode 100644 index 00000000..f254fd11 --- /dev/null +++ b/rpki/xml_utils.py @@ -0,0 +1,494 @@ +# $Id$ +# +# Copyright (C) 2009-2012 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. +# +# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +XML utilities. +""" + +import xml.sax +import lxml.sax +import lxml.etree +import rpki.exceptions + +class sax_handler(xml.sax.handler.ContentHandler): + """ + SAX handler for RPKI protocols. + + This class provides some basic amenities for parsing protocol XML of + the kind we use in the RPKI protocols, including whacking all the + protocol element text into US-ASCII, simplifying accumulation of + text fields, and hiding some of the fun relating to XML namespaces. + + General assumption: by the time this parsing code gets invoked, the + XML has already passed RelaxNG validation, so we only have to check + for errors that the schema can't catch, and we don't have to play as + many XML namespace games. + """ + + def __init__(self): + """ + Initialize SAX handler. + """ + xml.sax.handler.ContentHandler.__init__(self) + self.text = "" + self.stack = [] + + def startElementNS(self, name, qname, attrs): + """ + Redirect startElementNS() events to startElement(). + """ + return self.startElement(name[1], attrs) + + def endElementNS(self, name, qname): + """ + Redirect endElementNS() events to endElement(). + """ + return self.endElement(name[1]) + + def characters(self, content): + """ + Accumulate a chuck of element content (text). + """ + self.text += content + + def startElement(self, name, attrs): + """ + Handle startElement() events. + + We maintain a stack of nested elements under construction so that + we can feed events directly to the current element rather than + having to pass them through all the nesting elements. + + If the stack is empty, this event is for the outermost element, so + we call a virtual method to create the corresponding object and + that's the object we'll be returning as our final result. + """ + + a = dict() + for k, v in attrs.items(): + if isinstance(k, tuple): + if k == ("http://www.w3.org/XML/1998/namespace", "lang"): + k = "xml:lang" + else: + assert k[0] is None + k = k[1] + a[k.encode("ascii")] = v.encode("ascii") + if len(self.stack) == 0: + assert not hasattr(self, "result") + self.result = self.create_top_level(name, a) + self.stack.append(self.result) + self.stack[-1].startElement(self.stack, name, a) + + def endElement(self, name): + """ + Handle endElement() events. Mostly this means handling any + accumulated element text. + """ + text = self.text.encode("ascii").strip() + self.text = "" + self.stack[-1].endElement(self.stack, name, text) + + @classmethod + def saxify(cls, elt): + """ + Create a one-off SAX parser, parse an ETree, return the result. + """ + self = cls() + lxml.sax.saxify(elt, self) + return self.result + + def create_top_level(self, name, attrs): + """ + Handle top-level PDU for this protocol. + """ + assert name == self.name and attrs["version"] == self.version + return self.pdu() + +class base_elt(object): + """ + Virtual base class for XML message elements. The left-right and + publication protocols use this. At least for now, the up-down + protocol does not, due to different design assumptions. + """ + + ## @var attributes + # XML attributes for this element. + attributes = () + + ## @var elements + # XML elements contained by this element. + elements = () + + ## @var booleans + # Boolean attributes (value "yes" or "no") for this element. + booleans = () + + def startElement(self, stack, name, attrs): + """ + Default startElement() handler: just process attributes. + """ + if name not in self.elements: + assert name == self.element_name, "Unexpected name %s, stack %s" % (name, stack) + self.read_attrs(attrs) + + def endElement(self, stack, name, text): + """ + Default endElement() handler: just pop the stack. + """ + assert name == self.element_name, "Unexpected name %s, stack %s" % (name, stack) + stack.pop() + + def toXML(self): + """ + Default toXML() element generator. + """ + return self.make_elt() + + def read_attrs(self, attrs): + """ + Template-driven attribute reader. + """ + for key in self.attributes: + val = attrs.get(key, None) + if isinstance(val, str) and val.isdigit() and not key.endswith("_handle"): + val = long(val) + setattr(self, key, val) + for key in self.booleans: + setattr(self, key, attrs.get(key, False)) + + def make_elt(self): + """ + XML element constructor. + """ + elt = lxml.etree.Element("{%s}%s" % (self.xmlns, self.element_name), nsmap = self.nsmap) + for key in self.attributes: + val = getattr(self, key, None) + if val is not None: + elt.set(key, str(val)) + for key in self.booleans: + if getattr(self, key, False): + elt.set(key, "yes") + return elt + + def make_b64elt(self, elt, name, value): + """ + Constructor for Base64-encoded subelement. + """ + if value is not None and not value.empty(): + lxml.etree.SubElement(elt, "{%s}%s" % (self.xmlns, name), nsmap = self.nsmap).text = value.get_Base64() + + def __str__(self): + """ + Convert a base_elt object to string format. + """ + return lxml.etree.tostring(self.toXML(), pretty_print = True, encoding = "us-ascii") + + @classmethod + def make_pdu(cls, **kargs): + """ + Generic PDU constructor. + """ + self = cls() + for k, v in kargs.items(): + if isinstance(v, bool): + v = 1 if v else 0 + setattr(self, k, v) + return self + +class text_elt(base_elt): + """ + Virtual base class for XML message elements that contain text. + """ + + ## @var text_attribute + # Name of the class attribute that holds the text value. + text_attribute = None + + def endElement(self, stack, name, text): + """ + Extract text from parsed XML. + """ + base_elt.endElement(self, stack, name, text) + setattr(self, self.text_attribute, text) + + def toXML(self): + """ + Insert text into generated XML. + """ + elt = self.make_elt() + elt.text = getattr(self, self.text_attribute) or None + return elt + +class data_elt(base_elt): + """ + Virtual base class for PDUs that map to SQL objects. These objects + all implement the create/set/get/list/destroy action attribute. + """ + + def endElement(self, stack, name, text): + """ + Default endElement handler for SQL-based objects. This assumes + that sub-elements are Base64-encoded using the sql_template + mechanism. + """ + if name in self.elements: + elt_type = self.sql_template.map.get(name) + assert elt_type is not None, "Couldn't find element type for %s, stack %s" % (name, stack) + setattr(self, name, elt_type(Base64 = text)) + else: + assert name == self.element_name, "Unexpected name %s, stack %s" % (name, stack) + stack.pop() + + def toXML(self): + """ + Default element generator for SQL-based objects. This assumes + that sub-elements are Base64-encoded DER objects. + """ + elt = self.make_elt() + for i in self.elements: + self.make_b64elt(elt, i, getattr(self, i, None)) + return elt + + def make_reply(self, r_pdu = None): + """ + Construct a reply PDU. + """ + if r_pdu is None: + r_pdu = self.__class__() + self.make_reply_clone_hook(r_pdu) + handle_name = self.element_name + "_handle" + setattr(r_pdu, handle_name, getattr(self, handle_name, None)) + else: + self.make_reply_clone_hook(r_pdu) + for b in r_pdu.booleans: + setattr(r_pdu, b, False) + r_pdu.action = self.action + r_pdu.tag = self.tag + return r_pdu + + def make_reply_clone_hook(self, r_pdu): + """ + Overridable hook. + """ + pass + + def serve_fetch_one(self): + """ + Find the object on which a get, set, or destroy method should + operate. + """ + r = self.serve_fetch_one_maybe() + if r is None: + raise rpki.exceptions.NotFound + return r + + def serve_pre_save_hook(self, q_pdu, r_pdu, cb, eb): + """ + Overridable hook. + """ + cb() + + def serve_post_save_hook(self, q_pdu, r_pdu, cb, eb): + """ + Overridable hook. + """ + cb() + + def serve_create(self, r_msg, cb, eb): + """ + Handle a create action. + """ + + r_pdu = self.make_reply() + + def one(): + self.sql_store() + setattr(r_pdu, self.sql_template.index, getattr(self, self.sql_template.index)) + self.serve_post_save_hook(self, r_pdu, two, eb) + + def two(): + r_msg.append(r_pdu) + cb() + + oops = self.serve_fetch_one_maybe() + if oops is not None: + raise rpki.exceptions.DuplicateObject, "Object already exists: %r[%r] %r[%r]" % (self, getattr(self, self.element_name + "_handle"), + oops, getattr(oops, oops.element_name + "_handle")) + + self.serve_pre_save_hook(self, r_pdu, one, eb) + + def serve_set(self, r_msg, cb, eb): + """ + Handle a set action. + """ + + db_pdu = self.serve_fetch_one() + r_pdu = self.make_reply() + for a in db_pdu.sql_template.columns[1:]: + v = getattr(self, a, None) + if v is not None: + setattr(db_pdu, a, v) + db_pdu.sql_mark_dirty() + + def one(): + db_pdu.sql_store() + db_pdu.serve_post_save_hook(self, r_pdu, two, eb) + + def two(): + r_msg.append(r_pdu) + cb() + + db_pdu.serve_pre_save_hook(self, r_pdu, one, eb) + + def serve_get(self, r_msg, cb, eb): + """ + Handle a get action. + """ + r_pdu = self.serve_fetch_one() + self.make_reply(r_pdu) + r_msg.append(r_pdu) + cb() + + def serve_list(self, r_msg, cb, eb): + """ + Handle a list action for non-self objects. + """ + for r_pdu in self.serve_fetch_all(): + self.make_reply(r_pdu) + r_msg.append(r_pdu) + cb() + + def serve_destroy_hook(self, cb, eb): + """ + Overridable hook. + """ + cb() + + def serve_destroy(self, r_msg, cb, eb): + """ + Handle a destroy action. + """ + def done(): + db_pdu.sql_delete() + r_msg.append(self.make_reply()) + cb() + db_pdu = self.serve_fetch_one() + db_pdu.serve_destroy_hook(done, eb) + + def serve_dispatch(self, r_msg, cb, eb): + """ + Action dispatch handler. + """ + dispatch = { "create" : self.serve_create, + "set" : self.serve_set, + "get" : self.serve_get, + "list" : self.serve_list, + "destroy" : self.serve_destroy } + if self.action not in dispatch: + raise rpki.exceptions.BadQuery, "Unexpected query: action %s" % self.action + dispatch[self.action](r_msg, cb, eb) + + def unimplemented_control(self, *controls): + """ + Uniform handling for unimplemented control operations. + """ + unimplemented = [x for x in controls if getattr(self, x, False)] + if unimplemented: + raise rpki.exceptions.NotImplementedYet, "Unimplemented control %s" % ", ".join(unimplemented) + +class msg(list): + """ + Generic top-level PDU. + """ + + def startElement(self, stack, name, attrs): + """ + Handle top-level PDU. + """ + if name == "msg": + assert self.version == int(attrs["version"]) + self.type = attrs["type"] + else: + elt = self.pdus[name]() + self.append(elt) + stack.append(elt) + elt.startElement(stack, name, attrs) + + def endElement(self, stack, name, text): + """ + Handle top-level PDU. + """ + assert name == "msg", "Unexpected name %s, stack %s" % (name, stack) + assert len(stack) == 1 + stack.pop() + + def __str__(self): + """ + Convert msg object to string. + """ + return lxml.etree.tostring(self.toXML(), pretty_print = True, encoding = "us-ascii") + + def toXML(self): + """ + Generate top-level PDU. + """ + elt = lxml.etree.Element("{%s}msg" % (self.xmlns), nsmap = self.nsmap, version = str(self.version), type = self.type) + elt.extend([i.toXML() for i in self]) + return elt + + @classmethod + def query(cls, *args): + """ + Create a query PDU. + """ + self = cls(args) + self.type = "query" + return self + + @classmethod + def reply(cls, *args): + """ + Create a reply PDU. + """ + self = cls(args) + self.type = "reply" + return self + + def is_query(self): + """ + Is this msg a query? + """ + return self.type == "query" + + def is_reply(self): + """ + Is this msg a reply? + """ + return self.type == "reply" diff --git a/rpkid/Doxyfile b/rpkid/Doxyfile deleted file mode 100644 index 33f39f4b..00000000 --- a/rpkid/Doxyfile +++ /dev/null @@ -1,1705 +0,0 @@ -# Doxyfile 1.7.3 - -# This file describes the settings to be used by the documentation system -# doxygen (www.doxygen.org) for a project. -# -# All text after a hash (#) is considered a comment and will be ignored. -# The format is: -# TAG = value [value, ...] -# For lists items can also be appended using: -# TAG += value [value, ...] -# Values that contain spaces should be placed between quotes (" "). - -#--------------------------------------------------------------------------- -# Project related configuration options -#--------------------------------------------------------------------------- - -# This tag specifies the encoding used for all characters in the config file -# that follow. The default is UTF-8 which is also the encoding used for all -# text before the first occurrence of this tag. Doxygen uses libiconv (or the -# iconv built into libc) for the transcoding. See -# http://www.gnu.org/software/libiconv for the list of possible encodings. - -DOXYFILE_ENCODING = UTF-8 - -# The PROJECT_NAME tag is a single word (or a sequence of words surrounded -# by quotes) that should identify the project. - -PROJECT_NAME = "RPKI Engine" - -# The PROJECT_NUMBER tag can be used to enter a project or revision number. -# This could be handy for archiving the generated documentation or -# if some version control system is used. - -PROJECT_NUMBER = 1.0 - -# Using the PROJECT_BRIEF tag one can provide an optional one line description for a project that appears at the top of each page and should give viewer a quick idea about the purpose of the project. Keep the description short. - -PROJECT_BRIEF = - -# With the PROJECT_LOGO tag one can specify an logo or icon that is -# included in the documentation. The maximum height of the logo should not -# exceed 55 pixels and the maximum width should not exceed 200 pixels. -# Doxygen will copy the logo to the output directory. - -PROJECT_LOGO = - -# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) -# base path where the generated documentation will be put. -# If a relative path is entered, it will be relative to the location -# where doxygen was started. If left blank the current directory will be used. - -OUTPUT_DIRECTORY = doc - -# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create -# 4096 sub-directories (in 2 levels) under the output directory of each output -# format and will distribute the generated files over these directories. -# Enabling this option can be useful when feeding doxygen a huge amount of -# source files, where putting all generated files in the same directory would -# otherwise cause performance problems for the file system. - -CREATE_SUBDIRS = NO - -# The OUTPUT_LANGUAGE tag is used to specify the language in which all -# documentation generated by doxygen is written. Doxygen will use this -# information to generate all constant output in the proper language. -# The default language is English, other supported languages are: -# Afrikaans, Arabic, Brazilian, Catalan, Chinese, Chinese-Traditional, -# Croatian, Czech, Danish, Dutch, Esperanto, Farsi, Finnish, French, German, -# Greek, Hungarian, Italian, Japanese, Japanese-en (Japanese with English -# messages), Korean, Korean-en, Lithuanian, Norwegian, Macedonian, Persian, -# Polish, Portuguese, Romanian, Russian, Serbian, Serbian-Cyrillic, Slovak, -# Slovene, Spanish, Swedish, Ukrainian, and Vietnamese. - -OUTPUT_LANGUAGE = English - -# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will -# include brief member descriptions after the members that are listed in -# the file and class documentation (similar to JavaDoc). -# Set to NO to disable this. - -BRIEF_MEMBER_DESC = YES - -# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend -# the brief description of a member or function before the detailed description. -# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the -# brief descriptions will be completely suppressed. - -REPEAT_BRIEF = YES - -# This tag implements a quasi-intelligent brief description abbreviator -# that is used to form the text in various listings. Each string -# in this list, if found as the leading text of the brief description, will be -# stripped from the text and the result after processing the whole list, is -# used as the annotated text. Otherwise, the brief description is used as-is. -# If left blank, the following values are used ("$name" is automatically -# replaced with the name of the entity): "The $name class" "The $name widget" -# "The $name file" "is" "provides" "specifies" "contains" -# "represents" "a" "an" "the" - -ABBREVIATE_BRIEF = "The $name class" \ - "The $name widget" \ - "The $name file" \ - is \ - provides \ - specifies \ - contains \ - represents \ - a \ - an \ - the - -# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then -# Doxygen will generate a detailed section even if there is only a brief -# description. - -ALWAYS_DETAILED_SEC = NO - -# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all -# inherited members of a class in the documentation of that class as if those -# members were ordinary class members. Constructors, destructors and assignment -# operators of the base classes will not be shown. - -INLINE_INHERITED_MEMB = NO - -# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full -# path before files name in the file list and in the header files. If set -# to NO the shortest path that makes the file name unique will be used. - -FULL_PATH_NAMES = NO - -# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag -# can be used to strip a user-defined part of the path. Stripping is -# only done if one of the specified strings matches the left-hand part of -# the path. The tag can be used to show relative paths in the file list. -# If left blank the directory from which doxygen is run is used as the -# path to strip. - -STRIP_FROM_PATH = - -# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of -# the path mentioned in the documentation of a class, which tells -# the reader which header file to include in order to use a class. -# If left blank only the name of the header file containing the class -# definition is used. Otherwise one should specify the include paths that -# are normally passed to the compiler using the -I flag. - -STRIP_FROM_INC_PATH = - -# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter -# (but less readable) file names. This can be useful if your file system -# doesn't support long names like on DOS, Mac, or CD-ROM. - -SHORT_NAMES = NO - -# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen -# will interpret the first line (until the first dot) of a JavaDoc-style -# comment as the brief description. If set to NO, the JavaDoc -# comments will behave just like regular Qt-style comments -# (thus requiring an explicit @brief command for a brief description.) - -JAVADOC_AUTOBRIEF = NO - -# If the QT_AUTOBRIEF tag is set to YES then Doxygen will -# interpret the first line (until the first dot) of a Qt-style -# comment as the brief description. If set to NO, the comments -# will behave just like regular Qt-style comments (thus requiring -# an explicit \brief command for a brief description.) - -QT_AUTOBRIEF = NO - -# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make Doxygen -# treat a multi-line C++ special comment block (i.e. a block of //! or /// -# comments) as a brief description. This used to be the default behaviour. -# The new default is to treat a multi-line C++ comment block as a detailed -# description. Set this tag to YES if you prefer the old behaviour instead. - -MULTILINE_CPP_IS_BRIEF = NO - -# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented -# member inherits the documentation from any documented member that it -# re-implements. - -INHERIT_DOCS = YES - -# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce -# a new page for each member. If set to NO, the documentation of a member will -# be part of the file/class/namespace that contains it. - -SEPARATE_MEMBER_PAGES = NO - -# The TAB_SIZE tag can be used to set the number of spaces in a tab. -# Doxygen uses this value to replace tabs by spaces in code fragments. - -TAB_SIZE = 8 - -# This tag can be used to specify a number of aliases that acts -# as commands in the documentation. An alias has the form "name=value". -# For example adding "sideeffect=\par Side Effects:\n" will allow you to -# put the command \sideeffect (or @sideeffect) in the documentation, which -# will result in a user-defined paragraph with heading "Side Effects:". -# You can put \n's in the value part of an alias to insert newlines. - -ALIASES = - -# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C -# sources only. Doxygen will then generate output that is more tailored for C. -# For instance, some of the names that are used will be different. The list -# of all members will be omitted, etc. - -OPTIMIZE_OUTPUT_FOR_C = NO - -# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java -# sources only. Doxygen will then generate output that is more tailored for -# Java. For instance, namespaces will be presented as packages, qualified -# scopes will look different, etc. - -OPTIMIZE_OUTPUT_JAVA = YES - -# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran -# sources only. Doxygen will then generate output that is more tailored for -# Fortran. - -OPTIMIZE_FOR_FORTRAN = NO - -# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL -# sources. Doxygen will then generate output that is tailored for -# VHDL. - -OPTIMIZE_OUTPUT_VHDL = NO - -# Doxygen selects the parser to use depending on the extension of the files it -# parses. With this tag you can assign which parser to use for a given extension. -# Doxygen has a built-in mapping, but you can override or extend it using this -# tag. The format is ext=language, where ext is a file extension, and language -# is one of the parsers supported by doxygen: IDL, Java, Javascript, CSharp, C, -# C++, D, PHP, Objective-C, Python, Fortran, VHDL, C, C++. For instance to make -# doxygen treat .inc files as Fortran files (default is PHP), and .f files as C -# (default is Fortran), use: inc=Fortran f=C. Note that for custom extensions -# you also need to set FILE_PATTERNS otherwise the files are not read by doxygen. - -EXTENSION_MAPPING = - -# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want -# to include (a tag file for) the STL sources as input, then you should -# set this tag to YES in order to let doxygen match functions declarations and -# definitions whose arguments contain STL classes (e.g. func(std::string); v.s. -# func(std::string) {}). This also makes the inheritance and collaboration -# diagrams that involve STL classes more complete and accurate. - -BUILTIN_STL_SUPPORT = NO - -# If you use Microsoft's C++/CLI language, you should set this option to YES to -# enable parsing support. - -CPP_CLI_SUPPORT = NO - -# Set the SIP_SUPPORT tag to YES if your project consists of sip sources only. -# Doxygen will parse them like normal C++ but will assume all classes use public -# instead of private inheritance when no explicit protection keyword is present. - -SIP_SUPPORT = NO - -# For Microsoft's IDL there are propget and propput attributes to indicate getter -# and setter methods for a property. Setting this option to YES (the default) -# will make doxygen replace the get and set methods by a property in the -# documentation. This will only work if the methods are indeed getting or -# setting a simple type. If this is not the case, or you want to show the -# methods anyway, you should set this option to NO. - -IDL_PROPERTY_SUPPORT = YES - -# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC -# tag is set to YES, then doxygen will reuse the documentation of the first -# member in the group (if any) for the other members of the group. By default -# all members of a group must be documented explicitly. - -DISTRIBUTE_GROUP_DOC = NO - -# Set the SUBGROUPING tag to YES (the default) to allow class member groups of -# the same type (for instance a group of public functions) to be put as a -# subgroup of that type (e.g. under the Public Functions section). Set it to -# NO to prevent subgrouping. Alternatively, this can be done per class using -# the \nosubgrouping command. - -SUBGROUPING = YES - -# When TYPEDEF_HIDES_STRUCT is enabled, a typedef of a struct, union, or enum -# is documented as struct, union, or enum with the name of the typedef. So -# typedef struct TypeS {} TypeT, will appear in the documentation as a struct -# with name TypeT. When disabled the typedef will appear as a member of a file, -# namespace, or class. And the struct will be named TypeS. This can typically -# be useful for C code in case the coding convention dictates that all compound -# types are typedef'ed and only the typedef is referenced, never the tag name. - -TYPEDEF_HIDES_STRUCT = NO - -# The SYMBOL_CACHE_SIZE determines the size of the internal cache use to -# determine which symbols to keep in memory and which to flush to disk. -# When the cache is full, less often used symbols will be written to disk. -# For small to medium size projects (<1000 input files) the default value is -# probably good enough. For larger projects a too small cache size can cause -# doxygen to be busy swapping symbols to and from disk most of the time -# causing a significant performance penalty. -# If the system has enough physical memory increasing the cache will improve the -# performance by keeping more symbols in memory. Note that the value works on -# a logarithmic scale so increasing the size by one will roughly double the -# memory usage. The cache size is given by this formula: -# 2^(16+SYMBOL_CACHE_SIZE). The valid range is 0..9, the default is 0, -# corresponding to a cache size of 2^16 = 65536 symbols - -SYMBOL_CACHE_SIZE = 0 - -#--------------------------------------------------------------------------- -# Build related configuration options -#--------------------------------------------------------------------------- - -# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in -# documentation are documented, even if no documentation was available. -# Private class members and static file members will be hidden unless -# the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES - -EXTRACT_ALL = YES - -# If the EXTRACT_PRIVATE tag is set to YES all private members of a class -# will be included in the documentation. - -EXTRACT_PRIVATE = YES - -# If the EXTRACT_STATIC tag is set to YES all static members of a file -# will be included in the documentation. - -EXTRACT_STATIC = YES - -# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) -# defined locally in source files will be included in the documentation. -# If set to NO only classes defined in header files are included. - -EXTRACT_LOCAL_CLASSES = YES - -# This flag is only useful for Objective-C code. When set to YES local -# methods, which are defined in the implementation section but not in -# the interface are included in the documentation. -# If set to NO (the default) only methods in the interface are included. - -EXTRACT_LOCAL_METHODS = NO - -# If this flag is set to YES, the members of anonymous namespaces will be -# extracted and appear in the documentation as a namespace called -# 'anonymous_namespace{file}', where file will be replaced with the base -# name of the file that contains the anonymous namespace. By default -# anonymous namespaces are hidden. - -EXTRACT_ANON_NSPACES = NO - -# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all -# undocumented members of documented classes, files or namespaces. -# If set to NO (the default) these members will be included in the -# various overviews, but no documentation section is generated. -# This option has no effect if EXTRACT_ALL is enabled. - -HIDE_UNDOC_MEMBERS = NO - -# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all -# undocumented classes that are normally visible in the class hierarchy. -# If set to NO (the default) these classes will be included in the various -# overviews. This option has no effect if EXTRACT_ALL is enabled. - -HIDE_UNDOC_CLASSES = NO - -# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, Doxygen will hide all -# friend (class|struct|union) declarations. -# If set to NO (the default) these declarations will be included in the -# documentation. - -HIDE_FRIEND_COMPOUNDS = NO - -# If the HIDE_IN_BODY_DOCS tag is set to YES, Doxygen will hide any -# documentation blocks found inside the body of a function. -# If set to NO (the default) these blocks will be appended to the -# function's detailed documentation block. - -HIDE_IN_BODY_DOCS = NO - -# The INTERNAL_DOCS tag determines if documentation -# that is typed after a \internal command is included. If the tag is set -# to NO (the default) then the documentation will be excluded. -# Set it to YES to include the internal documentation. - -INTERNAL_DOCS = NO - -# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate -# file names in lower-case letters. If set to YES upper-case letters are also -# allowed. This is useful if you have classes or files whose names only differ -# in case and if your file system supports case sensitive file names. Windows -# and Mac users are advised to set this option to NO. - -CASE_SENSE_NAMES = YES - -# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen -# will show members with their full class and namespace scopes in the -# documentation. If set to YES the scope will be hidden. - -HIDE_SCOPE_NAMES = NO - -# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen -# will put a list of the files that are included by a file in the documentation -# of that file. - -SHOW_INCLUDE_FILES = YES - -# If the FORCE_LOCAL_INCLUDES tag is set to YES then Doxygen -# will list include files with double quotes in the documentation -# rather than with sharp brackets. - -FORCE_LOCAL_INCLUDES = NO - -# If the INLINE_INFO tag is set to YES (the default) then a tag [inline] -# is inserted in the documentation for inline members. - -INLINE_INFO = YES - -# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen -# will sort the (detailed) documentation of file and class members -# alphabetically by member name. If set to NO the members will appear in -# declaration order. - -SORT_MEMBER_DOCS = YES - -# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the -# brief documentation of file, namespace and class members alphabetically -# by member name. If set to NO (the default) the members will appear in -# declaration order. - -SORT_BRIEF_DOCS = YES - -# If the SORT_MEMBERS_CTORS_1ST tag is set to YES then doxygen -# will sort the (brief and detailed) documentation of class members so that -# constructors and destructors are listed first. If set to NO (the default) -# the constructors will appear in the respective orders defined by -# SORT_MEMBER_DOCS and SORT_BRIEF_DOCS. -# This tag will be ignored for brief docs if SORT_BRIEF_DOCS is set to NO -# and ignored for detailed docs if SORT_MEMBER_DOCS is set to NO. - -SORT_MEMBERS_CTORS_1ST = NO - -# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the -# hierarchy of group names into alphabetical order. If set to NO (the default) -# the group names will appear in their defined order. - -SORT_GROUP_NAMES = NO - -# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be -# sorted by fully-qualified names, including namespaces. If set to -# NO (the default), the class list will be sorted only by class name, -# not including the namespace part. -# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. -# Note: This option applies only to the class list, not to the -# alphabetical list. - -SORT_BY_SCOPE_NAME = YES - -# If the STRICT_PROTO_MATCHING option is enabled and doxygen fails to do proper type resolution of all parameters of a function it will reject a -# match between the prototype and the implementation of a member function even if there is only one candidate or it is obvious which candidate to choose by doing a simple string match. By disabling STRICT_PROTO_MATCHING doxygen -# will still accept a match between prototype and implementation in such cases. - -STRICT_PROTO_MATCHING = NO - -# The GENERATE_TODOLIST tag can be used to enable (YES) or -# disable (NO) the todo list. This list is created by putting \todo -# commands in the documentation. - -GENERATE_TODOLIST = YES - -# The GENERATE_TESTLIST tag can be used to enable (YES) or -# disable (NO) the test list. This list is created by putting \test -# commands in the documentation. - -GENERATE_TESTLIST = YES - -# The GENERATE_BUGLIST tag can be used to enable (YES) or -# disable (NO) the bug list. This list is created by putting \bug -# commands in the documentation. - -GENERATE_BUGLIST = YES - -# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or -# disable (NO) the deprecated list. This list is created by putting -# \deprecated commands in the documentation. - -GENERATE_DEPRECATEDLIST= YES - -# The ENABLED_SECTIONS tag can be used to enable conditional -# documentation sections, marked by \if sectionname ... \endif. - -ENABLED_SECTIONS = - -# The MAX_INITIALIZER_LINES tag determines the maximum number of lines -# the initial value of a variable or macro consists of for it to appear in -# the documentation. If the initializer consists of more lines than specified -# here it will be hidden. Use a value of 0 to hide initializers completely. -# The appearance of the initializer of individual variables and macros in the -# documentation can be controlled using \showinitializer or \hideinitializer -# command in the documentation regardless of this setting. - -MAX_INITIALIZER_LINES = 30 - -# Set the SHOW_USED_FILES tag to NO to disable the list of files generated -# at the bottom of the documentation of classes and structs. If set to YES the -# list will mention the files that were used to generate the documentation. - -SHOW_USED_FILES = YES - -# If the sources in your project are distributed over multiple directories -# then setting the SHOW_DIRECTORIES tag to YES will show the directory hierarchy -# in the documentation. The default is NO. - -SHOW_DIRECTORIES = NO - -# Set the SHOW_FILES tag to NO to disable the generation of the Files page. -# This will remove the Files entry from the Quick Index and from the -# Folder Tree View (if specified). The default is YES. - -SHOW_FILES = YES - -# Set the SHOW_NAMESPACES tag to NO to disable the generation of the -# Namespaces page. -# This will remove the Namespaces entry from the Quick Index -# and from the Folder Tree View (if specified). The default is YES. - -SHOW_NAMESPACES = YES - -# The FILE_VERSION_FILTER tag can be used to specify a program or script that -# doxygen should invoke to get the current version for each file (typically from -# the version control system). Doxygen will invoke the program by executing (via -# popen()) the command , where is the value of -# the FILE_VERSION_FILTER tag, and is the name of an input file -# provided by doxygen. Whatever the program writes to standard output -# is used as the file version. See the manual for examples. - -FILE_VERSION_FILTER = "perl -e '@a = split(q( ), qx(svn stat -v $ARGV[0])); shift @a until $a[0] =~ /^[0-9]+$/ or @a == 0; shift @a; print shift(@a), qq(\n)'" - -# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed -# by doxygen. The layout file controls the global structure of the generated -# output files in an output format independent way. The create the layout file -# that represents doxygen's defaults, run doxygen with the -l option. -# You can optionally specify a file name after the option, if omitted -# DoxygenLayout.xml will be used as the name of the layout file. - -LAYOUT_FILE = - -#--------------------------------------------------------------------------- -# configuration options related to warning and progress messages -#--------------------------------------------------------------------------- - -# The QUIET tag can be used to turn on/off the messages that are generated -# by doxygen. Possible values are YES and NO. If left blank NO is used. - -QUIET = YES - -# The WARNINGS tag can be used to turn on/off the warning messages that are -# generated by doxygen. Possible values are YES and NO. If left blank -# NO is used. - -WARNINGS = YES - -# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings -# for undocumented members. If EXTRACT_ALL is set to YES then this flag will -# automatically be disabled. - -WARN_IF_UNDOCUMENTED = YES - -# If WARN_IF_DOC_ERROR is set to YES, doxygen will generate warnings for -# potential errors in the documentation, such as not documenting some -# parameters in a documented function, or documenting parameters that -# don't exist or using markup commands wrongly. - -WARN_IF_DOC_ERROR = YES - -# The WARN_NO_PARAMDOC option can be enabled to get warnings for -# functions that are documented, but have no documentation for their parameters -# or return value. If set to NO (the default) doxygen will only warn about -# wrong or incomplete parameter documentation, but not about the absence of -# documentation. - -WARN_NO_PARAMDOC = NO - -# The WARN_FORMAT tag determines the format of the warning messages that -# doxygen can produce. The string should contain the $file, $line, and $text -# tags, which will be replaced by the file and line number from which the -# warning originated and the warning text. Optionally the format may contain -# $version, which will be replaced by the version of the file (if it could -# be obtained via FILE_VERSION_FILTER) - -WARN_FORMAT = "$file:$line: $text" - -# The WARN_LOGFILE tag can be used to specify a file to which warning -# and error messages should be written. If left blank the output is written -# to stderr. - -WARN_LOGFILE = - -#--------------------------------------------------------------------------- -# configuration options related to the input files -#--------------------------------------------------------------------------- - -# The INPUT tag can be used to specify the files and/or directories that contain -# documented source files. You may enter file names like "myfile.cpp" or -# directories like "/usr/src/myproject". Separate the files or directories -# with spaces. - -INPUT = doc \ - rpki \ - rpki/gui \ - rpki/gui/app \ - rpki/gui/app/templates \ - rpki/gui/app/templates/registration \ - rpki/gui/app/templates/rpkigui \ - rpki/gui/cacheview \ - rpki/gui/cacheview/templates \ - rpki/gui/cacheview/templates/cacheview \ - rpki/gui/templates \ - rpkid.py \ - pubd.py \ - irdbd.py \ - rootd.py \ - irbe_cli.py - -# This tag can be used to specify the character encoding of the source files -# that doxygen parses. Internally doxygen uses the UTF-8 encoding, which is -# also the default input encoding. Doxygen uses libiconv (or the iconv built -# into libc) for the transcoding. See http://www.gnu.org/software/libiconv for -# the list of possible encodings. - -INPUT_ENCODING = UTF-8 - -# If the value of the INPUT tag contains directories, you can use the -# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp -# and *.h) to filter out the source-files in the directories. If left -# blank the following patterns are tested: -# *.c *.cc *.cxx *.cpp *.c++ *.d *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh -# *.hxx *.hpp *.h++ *.idl *.odl *.cs *.php *.php3 *.inc *.m *.mm *.dox *.py -# *.f90 *.f *.for *.vhd *.vhdl - -FILE_PATTERNS = *.py - -# The RECURSIVE tag can be used to turn specify whether or not subdirectories -# should be searched for input files as well. Possible values are YES and NO. -# If left blank NO is used. - -RECURSIVE = NO - -# The EXCLUDE tag can be used to specify files and/or directories that should -# excluded from the INPUT source files. This way you can easily exclude a -# subdirectory from a directory tree whose root is specified with the INPUT tag. - -EXCLUDE = - -# The EXCLUDE_SYMLINKS tag can be used select whether or not files or -# directories that are symbolic links (a Unix file system feature) are excluded -# from the input. - -EXCLUDE_SYMLINKS = NO - -# If the value of the INPUT tag contains directories, you can use the -# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude -# certain files from those directories. Note that the wildcards are matched -# against the file with absolute path, so to exclude all test directories -# for example use the pattern */test/* - -EXCLUDE_PATTERNS = - -# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names -# (namespaces, classes, functions, etc.) that should be excluded from the -# output. The symbol name can be a fully qualified name, a word, or if the -# wildcard * is used, a substring. Examples: ANamespace, AClass, -# AClass::ANamespace, ANamespace::*Test - -EXCLUDE_SYMBOLS = - -# The EXAMPLE_PATH tag can be used to specify one or more files or -# directories that contain example code fragments that are included (see -# the \include command). - -EXAMPLE_PATH = . \ - examples - -# If the value of the EXAMPLE_PATH tag contains directories, you can use the -# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp -# and *.h) to filter out the source-files in the directories. If left -# blank all files are included. - -EXAMPLE_PATTERNS = * - -# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be -# searched for input files to be used with the \include or \dontinclude -# commands irrespective of the value of the RECURSIVE tag. -# Possible values are YES and NO. If left blank NO is used. - -EXAMPLE_RECURSIVE = NO - -# The IMAGE_PATH tag can be used to specify one or more files or -# directories that contain image that are included in the documentation (see -# the \image command). - -IMAGE_PATH = $(IMAGE_PATH) - -# The INPUT_FILTER tag can be used to specify a program that doxygen should -# invoke to filter for each input file. Doxygen will invoke the filter program -# by executing (via popen()) the command , where -# is the value of the INPUT_FILTER tag, and is the name of an -# input file. Doxygen will then use the output that the filter program writes -# to standard output. -# If FILTER_PATTERNS is specified, this tag will be -# ignored. - -INPUT_FILTER = - -# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern -# basis. -# Doxygen will compare the file name with each pattern and apply the -# filter if there is a match. -# The filters are a list of the form: -# pattern=filter (like *.cpp=my_cpp_filter). See INPUT_FILTER for further -# info on how filters are used. If FILTER_PATTERNS is empty or if -# non of the patterns match the file name, INPUT_FILTER is applied. - -FILTER_PATTERNS = - -# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using -# INPUT_FILTER) will be used to filter the input files when producing source -# files to browse (i.e. when SOURCE_BROWSER is set to YES). - -FILTER_SOURCE_FILES = NO - -# The FILTER_SOURCE_PATTERNS tag can be used to specify source filters per file -# pattern. A pattern will override the setting for FILTER_PATTERN (if any) -# and it is also possible to disable source filtering for a specific pattern -# using *.ext= (so without naming a filter). This option only has effect when -# FILTER_SOURCE_FILES is enabled. - -FILTER_SOURCE_PATTERNS = - -#--------------------------------------------------------------------------- -# configuration options related to source browsing -#--------------------------------------------------------------------------- - -# If the SOURCE_BROWSER tag is set to YES then a list of source files will -# be generated. Documented entities will be cross-referenced with these sources. -# Note: To get rid of all source code in the generated output, make sure also -# VERBATIM_HEADERS is set to NO. - -SOURCE_BROWSER = YES - -# Setting the INLINE_SOURCES tag to YES will include the body -# of functions and classes directly in the documentation. - -INLINE_SOURCES = NO - -# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct -# doxygen to hide any special comment blocks from generated source code -# fragments. Normal C and C++ comments will always remain visible. - -STRIP_CODE_COMMENTS = YES - -# If the REFERENCED_BY_RELATION tag is set to YES -# then for each documented function all documented -# functions referencing it will be listed. - -REFERENCED_BY_RELATION = YES - -# If the REFERENCES_RELATION tag is set to YES -# then for each documented function all documented entities -# called/used by that function will be listed. - -REFERENCES_RELATION = YES - -# If the REFERENCES_LINK_SOURCE tag is set to YES (the default) -# and SOURCE_BROWSER tag is set to YES, then the hyperlinks from -# functions in REFERENCES_RELATION and REFERENCED_BY_RELATION lists will -# link to the source code. -# Otherwise they will link to the documentation. - -REFERENCES_LINK_SOURCE = YES - -# If the USE_HTAGS tag is set to YES then the references to source code -# will point to the HTML generated by the htags(1) tool instead of doxygen -# built-in source browser. The htags tool is part of GNU's global source -# tagging system (see http://www.gnu.org/software/global/global.html). You -# will need version 4.8.6 or higher. - -USE_HTAGS = NO - -# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen -# will generate a verbatim copy of the header file for each class for -# which an include is specified. Set to NO to disable this. - -VERBATIM_HEADERS = NO - -#--------------------------------------------------------------------------- -# configuration options related to the alphabetical class index -#--------------------------------------------------------------------------- - -# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index -# of all compounds will be generated. Enable this if the project -# contains a lot of classes, structs, unions or interfaces. - -ALPHABETICAL_INDEX = YES - -# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then -# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns -# in which this list will be split (can be a number in the range [1..20]) - -COLS_IN_ALPHA_INDEX = 5 - -# In case all classes in a project start with a common prefix, all -# classes will be put under the same header in the alphabetical index. -# The IGNORE_PREFIX tag can be used to specify one or more prefixes that -# should be ignored while generating the index headers. - -IGNORE_PREFIX = - -#--------------------------------------------------------------------------- -# configuration options related to the HTML output -#--------------------------------------------------------------------------- - -# If the GENERATE_HTML tag is set to YES (the default) Doxygen will -# generate HTML output. - -GENERATE_HTML = YES - -# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `html' will be used as the default path. - -HTML_OUTPUT = html - -# The HTML_FILE_EXTENSION tag can be used to specify the file extension for -# each generated HTML page (for example: .htm,.php,.asp). If it is left blank -# doxygen will generate files with .html extension. - -HTML_FILE_EXTENSION = .html - -# The HTML_HEADER tag can be used to specify a personal HTML header for -# each generated HTML page. If it is left blank doxygen will generate a -# standard header. - -HTML_HEADER = - -# The HTML_FOOTER tag can be used to specify a personal HTML footer for -# each generated HTML page. If it is left blank doxygen will generate a -# standard footer. - -HTML_FOOTER = - -# The HTML_STYLESHEET tag can be used to specify a user-defined cascading -# style sheet that is used by each HTML page. It can be used to -# fine-tune the look of the HTML output. If the tag is left blank doxygen -# will generate a default style sheet. Note that doxygen will try to copy -# the style sheet file to the HTML output directory, so don't put your own -# stylesheet in the HTML output directory as well, or it will be erased! - -HTML_STYLESHEET = - -# The HTML_COLORSTYLE_HUE tag controls the color of the HTML output. -# Doxygen will adjust the colors in the stylesheet and background images -# according to this color. Hue is specified as an angle on a colorwheel, -# see http://en.wikipedia.org/wiki/Hue for more information. -# For instance the value 0 represents red, 60 is yellow, 120 is green, -# 180 is cyan, 240 is blue, 300 purple, and 360 is red again. -# The allowed range is 0 to 359. - -HTML_COLORSTYLE_HUE = 220 - -# The HTML_COLORSTYLE_SAT tag controls the purity (or saturation) of -# the colors in the HTML output. For a value of 0 the output will use -# grayscales only. A value of 255 will produce the most vivid colors. - -HTML_COLORSTYLE_SAT = 100 - -# The HTML_COLORSTYLE_GAMMA tag controls the gamma correction applied to -# the luminance component of the colors in the HTML output. Values below -# 100 gradually make the output lighter, whereas values above 100 make -# the output darker. The value divided by 100 is the actual gamma applied, -# so 80 represents a gamma of 0.8, The value 220 represents a gamma of 2.2, -# and 100 does not change the gamma. - -HTML_COLORSTYLE_GAMMA = 80 - -# If the HTML_TIMESTAMP tag is set to YES then the footer of each generated HTML -# page will contain the date and time when the page was generated. Setting -# this to NO can help when comparing the output of multiple runs. - -HTML_TIMESTAMP = YES - -# If the HTML_ALIGN_MEMBERS tag is set to YES, the members of classes, -# files or namespaces will be aligned in HTML using tables. If set to -# NO a bullet list will be used. - -HTML_ALIGN_MEMBERS = YES - -# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML -# documentation will contain sections that can be hidden and shown after the -# page has loaded. For this to work a browser that supports -# JavaScript and DHTML is required (for instance Mozilla 1.0+, Firefox -# Netscape 6.0+, Internet explorer 5.0+, Konqueror, or Safari). - -HTML_DYNAMIC_SECTIONS = NO - -# If the GENERATE_DOCSET tag is set to YES, additional index files -# will be generated that can be used as input for Apple's Xcode 3 -# integrated development environment, introduced with OSX 10.5 (Leopard). -# To create a documentation set, doxygen will generate a Makefile in the -# HTML output directory. Running make will produce the docset in that -# directory and running "make install" will install the docset in -# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find -# it at startup. -# See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html -# for more information. - -GENERATE_DOCSET = NO - -# When GENERATE_DOCSET tag is set to YES, this tag determines the name of the -# feed. A documentation feed provides an umbrella under which multiple -# documentation sets from a single provider (such as a company or product suite) -# can be grouped. - -DOCSET_FEEDNAME = "Doxygen generated docs" - -# When GENERATE_DOCSET tag is set to YES, this tag specifies a string that -# should uniquely identify the documentation set bundle. This should be a -# reverse domain-name style string, e.g. com.mycompany.MyDocSet. Doxygen -# will append .docset to the name. - -DOCSET_BUNDLE_ID = org.doxygen.Project - -# When GENERATE_PUBLISHER_ID tag specifies a string that should uniquely identify -# the documentation publisher. This should be a reverse domain-name style -# string, e.g. com.mycompany.MyDocSet.documentation. - -DOCSET_PUBLISHER_ID = org.doxygen.Publisher - -# The GENERATE_PUBLISHER_NAME tag identifies the documentation publisher. - -DOCSET_PUBLISHER_NAME = Publisher - -# If the GENERATE_HTMLHELP tag is set to YES, additional index files -# will be generated that can be used as input for tools like the -# Microsoft HTML help workshop to generate a compiled HTML help file (.chm) -# of the generated HTML documentation. - -GENERATE_HTMLHELP = NO - -# If the GENERATE_HTMLHELP tag is set to YES, the CHM_FILE tag can -# be used to specify the file name of the resulting .chm file. You -# can add a path in front of the file if the result should not be -# written to the html output directory. - -CHM_FILE = - -# If the GENERATE_HTMLHELP tag is set to YES, the HHC_LOCATION tag can -# be used to specify the location (absolute path including file name) of -# the HTML help compiler (hhc.exe). If non-empty doxygen will try to run -# the HTML help compiler on the generated index.hhp. - -HHC_LOCATION = - -# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag -# controls if a separate .chi index file is generated (YES) or that -# it should be included in the master .chm file (NO). - -GENERATE_CHI = NO - -# If the GENERATE_HTMLHELP tag is set to YES, the CHM_INDEX_ENCODING -# is used to encode HtmlHelp index (hhk), content (hhc) and project file -# content. - -CHM_INDEX_ENCODING = - -# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag -# controls whether a binary table of contents is generated (YES) or a -# normal table of contents (NO) in the .chm file. - -BINARY_TOC = NO - -# The TOC_EXPAND flag can be set to YES to add extra items for group members -# to the contents of the HTML help documentation and to the tree view. - -TOC_EXPAND = NO - -# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and -# QHP_VIRTUAL_FOLDER are set, an additional index file will be generated -# that can be used as input for Qt's qhelpgenerator to generate a -# Qt Compressed Help (.qch) of the generated HTML documentation. - -GENERATE_QHP = NO - -# If the QHG_LOCATION tag is specified, the QCH_FILE tag can -# be used to specify the file name of the resulting .qch file. -# The path specified is relative to the HTML output folder. - -QCH_FILE = - -# The QHP_NAMESPACE tag specifies the namespace to use when generating -# Qt Help Project output. For more information please see -# http://doc.trolltech.com/qthelpproject.html#namespace - -QHP_NAMESPACE = - -# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating -# Qt Help Project output. For more information please see -# http://doc.trolltech.com/qthelpproject.html#virtual-folders - -QHP_VIRTUAL_FOLDER = doc - -# If QHP_CUST_FILTER_NAME is set, it specifies the name of a custom filter to -# add. For more information please see -# http://doc.trolltech.com/qthelpproject.html#custom-filters - -QHP_CUST_FILTER_NAME = - -# The QHP_CUST_FILT_ATTRS tag specifies the list of the attributes of the -# custom filter to add. For more information please see -# -# Qt Help Project / Custom Filters. - -QHP_CUST_FILTER_ATTRS = - -# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this -# project's -# filter section matches. -# -# Qt Help Project / Filter Attributes. - -QHP_SECT_FILTER_ATTRS = - -# If the GENERATE_QHP tag is set to YES, the QHG_LOCATION tag can -# be used to specify the location of Qt's qhelpgenerator. -# If non-empty doxygen will try to run qhelpgenerator on the generated -# .qhp file. - -QHG_LOCATION = - -# If the GENERATE_ECLIPSEHELP tag is set to YES, additional index files -# will be generated, which together with the HTML files, form an Eclipse help -# plugin. To install this plugin and make it available under the help contents -# menu in Eclipse, the contents of the directory containing the HTML and XML -# files needs to be copied into the plugins directory of eclipse. The name of -# the directory within the plugins directory should be the same as -# the ECLIPSE_DOC_ID value. After copying Eclipse needs to be restarted before -# the help appears. - -GENERATE_ECLIPSEHELP = NO - -# A unique identifier for the eclipse help plugin. When installing the plugin -# the directory name containing the HTML and XML files should also have -# this name. - -ECLIPSE_DOC_ID = org.doxygen.Project - -# The DISABLE_INDEX tag can be used to turn on/off the condensed index at -# top of each HTML page. The value NO (the default) enables the index and -# the value YES disables it. - -DISABLE_INDEX = NO - -# This tag can be used to set the number of enum values (range [0,1..20]) -# that doxygen will group on one line in the generated HTML documentation. -# Note that a value of 0 will completely suppress the enum values from appearing in the overview section. - -ENUM_VALUES_PER_LINE = 4 - -# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index -# structure should be generated to display hierarchical information. -# If the tag value is set to YES, a side panel will be generated -# containing a tree-like index structure (just like the one that -# is generated for HTML Help). For this to work a browser that supports -# JavaScript, DHTML, CSS and frames is required (i.e. any modern browser). -# Windows users are probably better off using the HTML help feature. - -GENERATE_TREEVIEW = NO - -# By enabling USE_INLINE_TREES, doxygen will generate the Groups, Directories, -# and Class Hierarchy pages using a tree view instead of an ordered list. - -USE_INLINE_TREES = NO - -# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be -# used to set the initial width (in pixels) of the frame in which the tree -# is shown. - -TREEVIEW_WIDTH = 250 - -# When the EXT_LINKS_IN_WINDOW option is set to YES doxygen will open -# links to external symbols imported via tag files in a separate window. - -EXT_LINKS_IN_WINDOW = NO - -# Use this tag to change the font size of Latex formulas included -# as images in the HTML documentation. The default is 10. Note that -# when you change the font size after a successful doxygen run you need -# to manually remove any form_*.png images from the HTML output directory -# to force them to be regenerated. - -FORMULA_FONTSIZE = 10 - -# Use the FORMULA_TRANPARENT tag to determine whether or not the images -# generated for formulas are transparent PNGs. Transparent PNGs are -# not supported properly for IE 6.0, but are supported on all modern browsers. -# Note that when changing this option you need to delete any form_*.png files -# in the HTML output before the changes have effect. - -FORMULA_TRANSPARENT = YES - -# Enable the USE_MATHJAX option to render LaTeX formulas using MathJax -# (see http://www.mathjax.org) which uses client side Javascript for the -# rendering instead of using prerendered bitmaps. Use this if you do not -# have LaTeX installed or if you want to formulas look prettier in the HTML -# output. When enabled you also need to install MathJax separately and -# configure the path to it using the MATHJAX_RELPATH option. - -USE_MATHJAX = NO - -# When MathJax is enabled you need to specify the location relative to the -# HTML output directory using the MATHJAX_RELPATH option. The destination -# directory should contain the MathJax.js script. For instance, if the mathjax -# directory is located at the same level as the HTML output directory, then -# MATHJAX_RELPATH should be ../mathjax. The default value points to the mathjax.org site, so you can quickly see the result without installing -# MathJax, but it is strongly recommended to install a local copy of MathJax -# before deployment. - -MATHJAX_RELPATH = http://www.mathjax.org/mathjax - -# When the SEARCHENGINE tag is enabled doxygen will generate a search box -# for the HTML output. The underlying search engine uses javascript -# and DHTML and should work on any modern browser. Note that when using -# HTML help (GENERATE_HTMLHELP), Qt help (GENERATE_QHP), or docsets -# (GENERATE_DOCSET) there is already a search function so this one should -# typically be disabled. For large projects the javascript based search engine -# can be slow, then enabling SERVER_BASED_SEARCH may provide a better solution. - -SEARCHENGINE = YES - -# When the SERVER_BASED_SEARCH tag is enabled the search engine will be -# implemented using a PHP enabled web server instead of at the web client -# using Javascript. Doxygen will generate the search PHP script and index -# file to put on the web server. The advantage of the server -# based approach is that it scales better to large projects and allows -# full text search. The disadvantages are that it is more difficult to setup -# and does not have live searching capabilities. - -SERVER_BASED_SEARCH = NO - -#--------------------------------------------------------------------------- -# configuration options related to the LaTeX output -#--------------------------------------------------------------------------- - -# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will -# generate Latex output. - -GENERATE_LATEX = NO - -# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `latex' will be used as the default path. - -LATEX_OUTPUT = latex - -# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be -# invoked. If left blank `latex' will be used as the default command name. -# Note that when enabling USE_PDFLATEX this option is only used for -# generating bitmaps for formulas in the HTML output, but not in the -# Makefile that is written to the output directory. - -LATEX_CMD_NAME = latex - -# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to -# generate index for LaTeX. If left blank `makeindex' will be used as the -# default command name. - -MAKEINDEX_CMD_NAME = makeindex - -# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact -# LaTeX documents. This may be useful for small projects and may help to -# save some trees in general. - -COMPACT_LATEX = YES - -# The PAPER_TYPE tag can be used to set the paper type that is used -# by the printer. Possible values are: a4, letter, legal and -# executive. If left blank a4wide will be used. - -PAPER_TYPE = letter - -# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX -# packages that should be included in the LaTeX output. - -EXTRA_PACKAGES = - -# The LATEX_HEADER tag can be used to specify a personal LaTeX header for -# the generated latex document. The header should contain everything until -# the first chapter. If it is left blank doxygen will generate a -# standard header. Notice: only use this tag if you know what you are doing! - -LATEX_HEADER = - -# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated -# is prepared for conversion to pdf (using ps2pdf). The pdf file will -# contain links (just like the HTML output) instead of page references -# This makes the output suitable for online browsing using a pdf viewer. - -PDF_HYPERLINKS = YES - -# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of -# plain latex in the generated Makefile. Set this option to YES to get a -# higher quality PDF documentation. - -USE_PDFLATEX = YES - -# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode. -# command to the generated LaTeX files. This will instruct LaTeX to keep -# running if errors occur, instead of asking the user for help. -# This option is also used when generating formulas in HTML. - -LATEX_BATCHMODE = YES - -# If LATEX_HIDE_INDICES is set to YES then doxygen will not -# include the index chapters (such as File Index, Compound Index, etc.) -# in the output. - -LATEX_HIDE_INDICES = YES - -# If LATEX_SOURCE_CODE is set to YES then doxygen will include -# source code with syntax highlighting in the LaTeX output. -# Note that which sources are shown also depends on other settings -# such as SOURCE_BROWSER. - -LATEX_SOURCE_CODE = NO - -#--------------------------------------------------------------------------- -# configuration options related to the RTF output -#--------------------------------------------------------------------------- - -# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output -# The RTF output is optimized for Word 97 and may not look very pretty with -# other RTF readers or editors. - -GENERATE_RTF = NO - -# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `rtf' will be used as the default path. - -RTF_OUTPUT = rtf - -# If the COMPACT_RTF tag is set to YES Doxygen generates more compact -# RTF documents. This may be useful for small projects and may help to -# save some trees in general. - -COMPACT_RTF = NO - -# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated -# will contain hyperlink fields. The RTF file will -# contain links (just like the HTML output) instead of page references. -# This makes the output suitable for online browsing using WORD or other -# programs which support those fields. -# Note: wordpad (write) and others do not support links. - -RTF_HYPERLINKS = NO - -# Load stylesheet definitions from file. Syntax is similar to doxygen's -# config file, i.e. a series of assignments. You only have to provide -# replacements, missing definitions are set to their default value. - -RTF_STYLESHEET_FILE = - -# Set optional variables used in the generation of an rtf document. -# Syntax is similar to doxygen's config file. - -RTF_EXTENSIONS_FILE = - -#--------------------------------------------------------------------------- -# configuration options related to the man page output -#--------------------------------------------------------------------------- - -# If the GENERATE_MAN tag is set to YES (the default) Doxygen will -# generate man pages - -GENERATE_MAN = NO - -# The MAN_OUTPUT tag is used to specify where the man pages will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `man' will be used as the default path. - -MAN_OUTPUT = man - -# The MAN_EXTENSION tag determines the extension that is added to -# the generated man pages (default is the subroutine's section .3) - -MAN_EXTENSION = .3 - -# If the MAN_LINKS tag is set to YES and Doxygen generates man output, -# then it will generate one additional man file for each entity -# documented in the real man page(s). These additional files -# only source the real man page, but without them the man command -# would be unable to find the correct page. The default is NO. - -MAN_LINKS = NO - -#--------------------------------------------------------------------------- -# configuration options related to the XML output -#--------------------------------------------------------------------------- - -# If the GENERATE_XML tag is set to YES Doxygen will -# generate an XML file that captures the structure of -# the code including all documentation. - -GENERATE_XML = YES - -# The XML_OUTPUT tag is used to specify where the XML pages will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `xml' will be used as the default path. - -XML_OUTPUT = xml - -# The XML_SCHEMA tag can be used to specify an XML schema, -# which can be used by a validating XML parser to check the -# syntax of the XML files. - -XML_SCHEMA = - -# The XML_DTD tag can be used to specify an XML DTD, -# which can be used by a validating XML parser to check the -# syntax of the XML files. - -XML_DTD = - -# If the XML_PROGRAMLISTING tag is set to YES Doxygen will -# dump the program listings (including syntax highlighting -# and cross-referencing information) to the XML output. Note that -# enabling this will significantly increase the size of the XML output. - -XML_PROGRAMLISTING = YES - -#--------------------------------------------------------------------------- -# configuration options for the AutoGen Definitions output -#--------------------------------------------------------------------------- - -# If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will -# generate an AutoGen Definitions (see autogen.sf.net) file -# that captures the structure of the code including all -# documentation. Note that this feature is still experimental -# and incomplete at the moment. - -GENERATE_AUTOGEN_DEF = NO - -#--------------------------------------------------------------------------- -# configuration options related to the Perl module output -#--------------------------------------------------------------------------- - -# If the GENERATE_PERLMOD tag is set to YES Doxygen will -# generate a Perl module file that captures the structure of -# the code including all documentation. Note that this -# feature is still experimental and incomplete at the -# moment. - -GENERATE_PERLMOD = NO - -# If the PERLMOD_LATEX tag is set to YES Doxygen will generate -# the necessary Makefile rules, Perl scripts and LaTeX code to be able -# to generate PDF and DVI output from the Perl module output. - -PERLMOD_LATEX = NO - -# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be -# nicely formatted so it can be parsed by a human reader. -# This is useful -# if you want to understand what is going on. -# On the other hand, if this -# tag is set to NO the size of the Perl module output will be much smaller -# and Perl will parse it just the same. - -PERLMOD_PRETTY = YES - -# The names of the make variables in the generated doxyrules.make file -# are prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. -# This is useful so different doxyrules.make files included by the same -# Makefile don't overwrite each other's variables. - -PERLMOD_MAKEVAR_PREFIX = - -#--------------------------------------------------------------------------- -# Configuration options related to the preprocessor -#--------------------------------------------------------------------------- - -# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will -# evaluate all C-preprocessor directives found in the sources and include -# files. - -ENABLE_PREPROCESSING = YES - -# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro -# names in the source code. If set to NO (the default) only conditional -# compilation will be performed. Macro expansion can be done in a controlled -# way by setting EXPAND_ONLY_PREDEF to YES. - -MACRO_EXPANSION = NO - -# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES -# then the macro expansion is limited to the macros specified with the -# PREDEFINED and EXPAND_AS_DEFINED tags. - -EXPAND_ONLY_PREDEF = NO - -# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files -# in the INCLUDE_PATH (see below) will be search if a #include is found. - -SEARCH_INCLUDES = YES - -# The INCLUDE_PATH tag can be used to specify one or more directories that -# contain include files that are not input files but should be processed by -# the preprocessor. - -INCLUDE_PATH = - -# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard -# patterns (like *.h and *.hpp) to filter out the header-files in the -# directories. If left blank, the patterns specified with FILE_PATTERNS will -# be used. - -INCLUDE_FILE_PATTERNS = - -# The PREDEFINED tag can be used to specify one or more macro names that -# are defined before the preprocessor is started (similar to the -D option of -# gcc). The argument of the tag is a list of macros of the form: name -# or name=definition (no spaces). If the definition and the = are -# omitted =1 is assumed. To prevent a macro definition from being -# undefined via #undef or recursively expanded use the := operator -# instead of the = operator. - -PREDEFINED = - -# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then -# this tag can be used to specify a list of macro names that should be expanded. -# The macro definition that is found in the sources will be used. -# Use the PREDEFINED tag if you want to use a different macro definition that overrules the definition found in the source code. - -EXPAND_AS_DEFINED = - -# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then -# doxygen's preprocessor will remove all references to function-like macros -# that are alone on a line, have an all uppercase name, and do not end with a -# semicolon, because these will confuse the parser if not removed. - -SKIP_FUNCTION_MACROS = YES - -#--------------------------------------------------------------------------- -# Configuration::additions related to external references -#--------------------------------------------------------------------------- - -# The TAGFILES option can be used to specify one or more tagfiles. -# Optionally an initial location of the external documentation -# can be added for each tagfile. The format of a tag file without -# this location is as follows: -# -# TAGFILES = file1 file2 ... -# Adding location for the tag files is done as follows: -# -# TAGFILES = file1=loc1 "file2 = loc2" ... -# where "loc1" and "loc2" can be relative or absolute paths or -# URLs. If a location is present for each tag, the installdox tool -# does not have to be run to correct the links. -# Note that each tag file must have a unique name -# (where the name does NOT include the path) -# If a tag file is not located in the directory in which doxygen -# is run, you must also specify the path to the tagfile here. - -TAGFILES = - -# When a file name is specified after GENERATE_TAGFILE, doxygen will create -# a tag file that is based on the input files it reads. - -GENERATE_TAGFILE = - -# If the ALLEXTERNALS tag is set to YES all external classes will be listed -# in the class index. If set to NO only the inherited external classes -# will be listed. - -ALLEXTERNALS = NO - -# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed -# in the modules index. If set to NO, only the current project's groups will -# be listed. - -EXTERNAL_GROUPS = YES - -# The PERL_PATH should be the absolute path and name of the perl script -# interpreter (i.e. the result of `which perl'). - -PERL_PATH = /usr/bin/perl - -#--------------------------------------------------------------------------- -# Configuration options related to the dot tool -#--------------------------------------------------------------------------- - -# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will -# generate a inheritance diagram (in HTML, RTF and LaTeX) for classes with base -# or super classes. Setting the tag to NO turns the diagrams off. Note that -# this option also works with HAVE_DOT disabled, but it is recommended to -# install and use dot, since it yields more powerful graphs. - -CLASS_DIAGRAMS = NO - -# You can define message sequence charts within doxygen comments using the \msc -# command. Doxygen will then run the mscgen tool (see -# http://www.mcternan.me.uk/mscgen/) to produce the chart and insert it in the -# documentation. The MSCGEN_PATH tag allows you to specify the directory where -# the mscgen tool resides. If left empty the tool is assumed to be found in the -# default search path. - -MSCGEN_PATH = - -# If set to YES, the inheritance and collaboration graphs will hide -# inheritance and usage relations if the target is undocumented -# or is not a class. - -HIDE_UNDOC_RELATIONS = NO - -# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is -# available from the path. This tool is part of Graphviz, a graph visualization -# toolkit from AT&T and Lucent Bell Labs. The other options in this section -# have no effect if this option is set to NO (the default) - -HAVE_DOT = YES - -# The DOT_NUM_THREADS specifies the number of dot invocations doxygen is -# allowed to run in parallel. When set to 0 (the default) doxygen will -# base this on the number of processors available in the system. You can set it -# explicitly to a value larger than 0 to get control over the balance -# between CPU load and processing speed. - -DOT_NUM_THREADS = 0 - -# By default doxygen will write a font called Helvetica to the output -# directory and reference it in all dot files that doxygen generates. -# When you want a differently looking font you can specify the font name -# using DOT_FONTNAME. You need to make sure dot is able to find the font, -# which can be done by putting it in a standard location or by setting the -# DOTFONTPATH environment variable or by setting DOT_FONTPATH to the directory -# containing the font. - -DOT_FONTNAME = FreeSans - -# The DOT_FONTSIZE tag can be used to set the size of the font of dot graphs. -# The default size is 10pt. - -DOT_FONTSIZE = 10 - -# By default doxygen will tell dot to use the output directory to look for the -# FreeSans.ttf font (which doxygen will put there itself). If you specify a -# different font using DOT_FONTNAME you can set the path where dot -# can find it using this tag. - -DOT_FONTPATH = - -# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for each documented class showing the direct and -# indirect inheritance relations. Setting this tag to YES will force the -# the CLASS_DIAGRAMS tag to NO. - -CLASS_GRAPH = YES - -# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for each documented class showing the direct and -# indirect implementation dependencies (inheritance, containment, and -# class references variables) of the class with other documented classes. - -COLLABORATION_GRAPH = YES - -# If the GROUP_GRAPHS and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for groups, showing the direct groups dependencies - -GROUP_GRAPHS = YES - -# If the UML_LOOK tag is set to YES doxygen will generate inheritance and -# collaboration diagrams in a style similar to the OMG's Unified Modeling -# Language. - -UML_LOOK = YES - -# If set to YES, the inheritance and collaboration graphs will show the -# relations between templates and their instances. - -TEMPLATE_RELATIONS = YES - -# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT -# tags are set to YES then doxygen will generate a graph for each documented -# file showing the direct and indirect include dependencies of the file with -# other documented files. - -INCLUDE_GRAPH = YES - -# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and -# HAVE_DOT tags are set to YES then doxygen will generate a graph for each -# documented header file showing the documented files that directly or -# indirectly include this file. - -INCLUDED_BY_GRAPH = YES - -# If the CALL_GRAPH and HAVE_DOT options are set to YES then -# doxygen will generate a call dependency graph for every global function -# or class method. Note that enabling this option will significantly increase -# the time of a run. So in most cases it will be better to enable call graphs -# for selected functions only using the \callgraph command. - -CALL_GRAPH = YES - -# If the CALLER_GRAPH and HAVE_DOT tags are set to YES then -# doxygen will generate a caller dependency graph for every global function -# or class method. Note that enabling this option will significantly increase -# the time of a run. So in most cases it will be better to enable caller -# graphs for selected functions only using the \callergraph command. - -CALLER_GRAPH = YES - -# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen -# will generate a graphical hierarchy of all classes instead of a textual one. - -GRAPHICAL_HIERARCHY = YES - -# If the DIRECTORY_GRAPH, SHOW_DIRECTORIES and HAVE_DOT tags are set to YES -# then doxygen will show the dependencies a directory has on other directories -# in a graphical way. The dependency relations are determined by the #include -# relations between the files in the directories. - -DIRECTORY_GRAPH = YES - -# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images -# generated by dot. Possible values are png, svg, gif or svg. -# If left blank png will be used. - -DOT_IMAGE_FORMAT = png - -# The tag DOT_PATH can be used to specify the path where the dot tool can be -# found. If left blank, it is assumed the dot tool can be found in the path. - -DOT_PATH = - -# The DOTFILE_DIRS tag can be used to specify one or more directories that -# contain dot files that are included in the documentation (see the -# \dotfile command). - -DOTFILE_DIRS = doc - -# The MSCFILE_DIRS tag can be used to specify one or more directories that -# contain msc files that are included in the documentation (see the -# \mscfile command). - -MSCFILE_DIRS = - -# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of -# nodes that will be shown in the graph. If the number of nodes in a graph -# becomes larger than this value, doxygen will truncate the graph, which is -# visualized by representing a node as a red box. Note that doxygen if the -# number of direct children of the root node in a graph is already larger than -# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note -# that the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH. - -DOT_GRAPH_MAX_NODES = 50 - -# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the -# graphs generated by dot. A depth value of 3 means that only nodes reachable -# from the root by following a path via at most 3 edges will be shown. Nodes -# that lay further from the root node will be omitted. Note that setting this -# option to 1 or 2 may greatly reduce the computation time needed for large -# code bases. Also note that the size of a graph can be further restricted by -# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction. - -MAX_DOT_GRAPH_DEPTH = 0 - -# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent -# background. This is disabled by default, because dot on Windows does not -# seem to support this out of the box. Warning: Depending on the platform used, -# enabling this option may lead to badly anti-aliased labels on the edges of -# a graph (i.e. they become hard to read). - -DOT_TRANSPARENT = NO - -# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output -# files in one run (i.e. multiple -o and -T options on the command line). This -# makes dot run faster, but since only newer versions of dot (>1.8.10) -# support this, this feature is disabled by default. - -DOT_MULTI_TARGETS = YES - -# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will -# generate a legend page explaining the meaning of the various boxes and -# arrows in the dot generated graphs. - -GENERATE_LEGEND = NO - -# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will -# remove the intermediate dot files that are used to generate -# the various graphs. - -DOT_CLEANUP = NO diff --git a/rpkid/Makefile.in b/rpkid/Makefile.in deleted file mode 100644 index d36a3163..00000000 --- a/rpkid/Makefile.in +++ /dev/null @@ -1,339 +0,0 @@ -# $Id$ - -PYTHON = @PYTHON@ -TRANG = @TRANG@ - -CFLAGS = @CFLAGS@ -LDFLAGS = @LDFLAGS@ @POW_LDFLAGS@ -LIBS = @LIBS@ - -INSTALL = @INSTALL@ -m 555 - -prefix = @prefix@ -exec_prefix = @exec_prefix@ -datarootdir = @datarootdir@ -datadir = @datadir@ -localstatedir = @localstatedir@ -sharedstatedir = @sharedstatedir@ -sysconfdir = @sysconfdir@ -bindir = @bindir@ -sbindir = @sbindir@ -libexecdir = @libexecdir@ -sysconfdir = @sysconfdir@ - -abs_builddir = @abs_builddir@ -abs_top_srcdir = @abs_top_srcdir@ -abs_top_builddir= @abs_top_builddir@ -srcdir = @srcdir@ - -WSGI_DAEMON_PROCESS = @WSGI_DAEMON_PROCESS@ -WSGI_PROCESS_GROUP = @WSGI_PROCESS_GROUP@ -RCYNIC_HTML_DIR = @RCYNIC_HTML_DIR@ -APACHE_VERSION = @APACHE_VERSION@ -WSGI_PYTHON_EGG_CACHE_DIR = @WSGI_PYTHON_EGG_CACHE_DIR@ -WSGI_PYTHON_EGG_CACHE_USER = @WSGI_PYTHON_EGG_CACHE_USER@ - -RPKID_INSTALL_TARGETS = @RPKID_INSTALL_TARGETS@ - -SETUP_PY_INSTALL_LAYOUT = @SETUP_PY_INSTALL_LAYOUT@ - -SETUP_PY_ROOT = `${PYTHON} -c 'import sys; print "--root " + sys.argv[1] if sys.argv[1] else ""' '${DESTDIR}'` - -POW_SO = rpki/POW/_POW.so - -all:: rpki/autoconf.py setup_autoconf.py rpki/relaxng.py myrpki.rng rpki/sql_schemas.py ${POW_SO} build/stamp - -.FORCE: - -${POW_SO}: .FORCE setup_autoconf.py - ${PYTHON} setup.py build_ext --inplace - -build/stamp: .FORCE setup_autoconf.py - ${PYTHON} setup.py build - touch $@ - -clean:: - rm -rf ${POW_SO} build dist - -RNGS = left-right-schema.rng up-down-schema.rng publication-schema.rng myrpki.rng router-certificate-schema.rng - -rpki/relaxng.py: ${abs_top_srcdir}/buildtools/make-relaxng.py ${RNGS} - ${PYTHON} ${abs_top_srcdir}/buildtools/make-relaxng.py ${RNGS} >$@.tmp - mv $@.tmp $@ - -left-right-schema.rng: left-right-schema.rnc - ${TRANG} left-right-schema.rnc left-right-schema.rng - -up-down-schema.rng: up-down-schema.rnc - ${TRANG} up-down-schema.rnc up-down-schema.rng - -publication-schema.rng: publication-schema.rnc - ${TRANG} publication-schema.rnc publication-schema.rng - -myrpki.rng: myrpki.rnc - ${TRANG} myrpki.rnc myrpki.rng - -router-certificate-schema.rng: router-certificate-schema.rnc - ${TRANG} router-certificate-schema.rnc router-certificate-schema.rng - -rpki/sql_schemas.py: ${abs_top_srcdir}/buildtools/make-sql-schemas.py rpkid.sql pubd.sql - ${PYTHON} ${abs_top_srcdir}/buildtools/make-sql-schemas.py >$@.tmp - mv $@.tmp $@ - -clean:: - find . -type f -name '*.py[co]' -delete - cd tests; $(MAKE) $@ - -install:: ${RPKID_INSTALL_TARGETS} - -install-always:: all - ${PYTHON} setup.py install ${SETUP_PY_ROOT} ${SETUP_PY_INSTALL_LAYOUT} --record installed - @echo - @echo "== Default configuration file location is ${sysconfdir}/rpki.conf ==" - @echo - ${INSTALL} examples/rpki.conf ${DESTDIR}${sysconfdir}/rpki.conf.sample - ${INSTALL} -d ${DESTDIR}${datarootdir}/rpki/publication - -uninstall deinstall:: - -${libexecdir}/rpkigui-apache-conf-gen --remove --verbose - xargs rm -fv $@ - -doc/rpkid.dot: rpkid.sql - sh ${abs_top_srcdir}/buildtools/graphviz-sql.sh $? >$@ - -.SUFFIXES: .dot .png .pdf .eps - -.dot.pdf: - dot -Tps2 $? | ps2pdf - $@ - -.dot.eps: - dot -o $@ -Teps $? - -.dot.png: - dot -o $@ -Tpng $? - -dot: doc/pubd.dot doc/rpkid.dot - -eps: doc/pubd.eps doc/rpkid.eps doc/rpkid-bpki.eps doc/pubd-bpki.eps - -png: doc/pubd.png doc/rpkid.png doc/rpkid-bpki.png doc/pubd-bpki.png - -pdf: doc/pubd.pdf doc/rpkid.pdf doc/rpkid-bpki.pdf doc/pubd-bpki.pdf - -docclean: - rm -rf doc/html doc/latex doc/xml - rm -f doc/*.eps doc/*.pdf doc/*.png - rm -f doc/pubd.dot doc/rpkid.dot - -html: dot eps png - TZ='' IMAGE_PATH=${abs_builddir}/doc doxygen - -docs: dot eps png html pdf - -## - -distclean:: clean docclean - cd tests; ${MAKE} $@ - rm -f TAGS Makefile - -all:: examples/rpki.conf - -examples/rpki.conf: rpki/autoconf.py rpki-confgen rpki-confgen.xml - ${PYTHON} rpki-confgen \ - --read-xml rpki-confgen.xml \ - --autoconf \ - --set myrpki::handle=`hostname -f | sed 's/[.]/_/g'` \ - --set myrpki::rpkid_server_host=`hostname -f` \ - --set myrpki::pubd_server_host=`hostname -f` \ - --pwgen myrpki::shared_sql_password \ - --pwgen web_portal::secret-key \ - --write-conf $@ - -clean:: - rm -f examples/rpki.conf - -rpki/autoconf.py: Makefile - @echo 'Generating $@'; \ - (echo '# Automatically generated. DO NOT EDIT.'; \ - echo ; \ - echo 'bindir = "${bindir}"'; \ - echo 'datarootdir = "${datarootdir}"'; \ - echo 'localstatedir = "${localstatedir}"'; \ - echo 'sbindir = "${sbindir}"'; \ - echo 'sharedstatedir = "${sharedstatedir}"'; \ - echo 'sysconfdir = "${sysconfdir}"'; \ - echo 'libexecdir = "${libexecdir}"'; \ - echo ; \ - echo 'WSGI_DAEMON_PROCESS = "${WSGI_DAEMON_PROCESS}"'; \ - echo 'WSGI_PROCESS_GROUP = "${WSGI_PROCESS_GROUP}"'; \ - echo 'RCYNIC_HTML_DIR = "${RCYNIC_HTML_DIR}"'; \ - echo 'APACHE_VERSION = "${APACHE_VERSION}"'; \ - echo 'WSGI_PYTHON_EGG_CACHE_DIR = "${WSGI_PYTHON_EGG_CACHE_DIR}"'; \ - echo 'WSGI_PYTHON_EGG_CACHE_USER = "${WSGI_PYTHON_EGG_CACHE_USER}"'; \ - ) > $@ - -clean:: - rm -f rpki/autoconf.py - -setup_autoconf.py: rpki/autoconf.py - @echo 'Generating $@'; \ - (cat rpki/autoconf.py; \ - echo ; \ - echo 'CFLAGS = """${CFLAGS}"""'; \ - echo 'LDFLAGS = """${LDFLAGS}"""'; \ - echo 'LIBS = """${LIBS}"""'; \ - ) > $@ - -clean:: - rm -f setup_autoconf.py setup_autoconf.pyc - -install-postconf: \ - install-user install-egg-cache install-conf install-apache install-mysql install-django install-bpki install-cron - -# This should create user "rpkid" and group "rpkid", but as we have -# not yet tested our ability to run in such a configuration, this -# would be a little premature. Can steal Makefile code for this from -# rcynic when we're ready to do something with it. - -install-user: - @true - -# This is only necessary on some platforms (currently FreeBSD, -# due to shortcomings in the way that Python eggs are installed -# as system libraries). - -install-egg-cache: - @if test 'X${WSGI_PYTHON_EGG_CACHE_DIR}' != X && test ! -d '${WSGI_PYTHON_EGG_CACHE_DIR}'; then \ - mkdir -v '${WSGI_PYTHON_EGG_CACHE_DIR}'; \ - if test 'X${WSGI_PYTHON_EGG_CACHE_USER}' != X; then \ - chown '${WSGI_PYTHON_EGG_CACHE_USER}' '${WSGI_PYTHON_EGG_CACHE_DIR}'; \ - fi; \ - fi - -# We used to play the FreeBSD game of copying rpki.conf.sample to -# rpki.conf on install and removing rpki.conf if it's identical to -# rpki.conf.sample in uninstall, but that turns out to work poorly -# with generated passwords. So now we copy rpki.conf.sample if and -# only if rpki.conf does not exist, and we leave removal of rpki.conf -# for the user to deal with. This in turn leaves us with a different -# problem of how to upgrade rpki.conf, but at least in the FreeBSD -# universe we're supposed to leave that problem for the user. - -install-conf: - if test -f ${DESTDIR}${sysconfdir}/rpki.conf; \ - then \ - true; \ - else \ - cp -p ${DESTDIR}${sysconfdir}/rpki.conf.sample ${DESTDIR}${sysconfdir}/rpki.conf; \ - fi - -uninstall deinstall:: -# if cmp -s ${DESTDIR}${sysconfdir}/rpki.conf ${DESTDIR}${sysconfdir}/rpki.conf.sample; then rm -f ${DESTDIR}${sysconfdir}/rpki.conf; else true; fi - rm -f ${DESTDIR}${sysconfdir}/rpki.conf.sample - -install-apache: - ${libexecdir}/rpkigui-apache-conf-gen --install --verbose - -install-mysql: - ${sbindir}/rpki-sql-setup - -install-django: - ${sbindir}/rpki-manage syncdb --noinput - ${sbindir}/rpki-manage migrate app - -install-bpki: - ${sbindir}/rpkic initialize_server_bpki - -# This needs to set up crontab entries for rpkigui-check-expired, -# rpkigui-import-routes, and rpkic update_bpki. They probably don't -# want run under the same user IDs either, so what with having to use -# /usr/bin/crontab on some platforms, this should be entirely too -# entertaining. Might be ok to run them all as user rpkid eventually. -# -# We really should be running exactly the same cron setup/teardown -# code here as we do in platform-specific post-installation scripts, -# but for now we just use crontab(1) here on all platforms. - -install-cron: install-cron-using-crontab - -uninstall deinstall:: uninstall-cron-using-crontab - -# Code for setting up and tearing down cron jobs using the crontab(1) -# program. We don't use this on all platforms, but we do use it on -# more than one, so it's broken out here as common code. -# -# CRONTAB_USER really should be rpkid, but we don't have the rest of -# the package set up for that yet, so run it as root for now. - -CRONTAB_USER = root - -install-cron-using-crontab: - @crontab -l -u ${CRONTAB_USER} 2>/dev/null | \ - awk -v t=`hexdump -n 2 -e '"%u\n"' /dev/urandom` ' \ - BEGIN { \ - cmd["${libexecdir}/rpkigui-import-routes"] = sprintf("%2u */2 * * *", t % 60); \ - cmd["${libexecdir}/rpkigui-check-expired"] = "@daily "; \ - cmd["${sbindir}/rpkic update_bpki" ] = "30 3 * * * "; \ - } \ - { \ - print; \ - for (i in cmd) \ - if ($$0 ~ i) \ - found[i] = $$0; \ - } \ - END { \ - for (i in cmd) \ - if (!found[i]) \ - print cmd[i] "\texec " i; \ - }' | \ - crontab -u ${CRONTAB_USER} - - -uninstall-cron-using-crontab: - @crontab -l -u ${CRONTAB_USER} 2>/dev/null | \ - awk ' \ - BEGIN { \ - empty = 1; \ - } \ - $$0 !~ "${libexecdir}/rpkigui-import-routes" && \ - $$0 !~ "${libexecdir}/rpkigui-check-expired" && \ - $$0 !~ "${sbindir}/rpkic update_bpki" { \ - empty = 0; \ - print | "/usr/bin/crontab -u ${CRONTAB_USER} -"; \ - } \ - END { \ - if (empty) \ - system("/usr/bin/crontab -u ${CRONTAB_USER} -r"); \ - }' diff --git a/rpkid/README b/rpkid/README deleted file mode 100644 index 91d372de..00000000 --- a/rpkid/README +++ /dev/null @@ -1,11 +0,0 @@ -$Id$ -*- Text -*- - -RPKI CA tools. - -See: - -- The primary documentation at http://trac.rpki.net/ - -- The PDF manual in ../doc/manual.pdf, or - -- The flat text pages ../doc/doc.RPKI.CA* diff --git a/rpkid/doc/README b/rpkid/doc/README deleted file mode 100644 index 33902d7e..00000000 --- a/rpkid/doc/README +++ /dev/null @@ -1,16 +0,0 @@ -$Id$ - -Internals documentation for the RPKI CA tools. - -Once upon a time this included the hand-written documentation for the -CA tools, but that is now part of the overall package documentation. -What's left here is just what Doxygen generates from the source code -and a few Graphviz diagrams. - -At the moment the control for the stuff generated here is still -../Makefile, that may change at some point. - -We no longer generate the documentation here automatically, as it's -kind of large and we're not sure anybody else cares about it, so if -you want this manual you'll have to install Doxygen and build it -yourself. diff --git a/rpkid/doc/mainpage.py b/rpkid/doc/mainpage.py deleted file mode 100644 index 4570547b..00000000 --- a/rpkid/doc/mainpage.py +++ /dev/null @@ -1,71 +0,0 @@ -## @file -# @details -# Doxygen documentation source, expressed as Python comments to make Doxygen happy. -# -# $Id$ -# -# Copyright (C) 2009--2012 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. -# -# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -## @mainpage RPKI Engine Reference Manual -# -# This collection of Python modules implements an RPKI CA engine. -# -# See http://trac.rpki.net/ for the RPKI tools package documentation. -# -# The documentation you're reading is generated automatically by -# Doxygen from comments and documentation in -# the code. -# -# At one point this manual also included documentation for the CA -# tools, but that has been integrated into the overall package -# documentation. This manual is now just the CA tools internals. - -## @page sql-schemas SQL database schemas -# -# @li @subpage rpkid-sql "rpkid database schema" -# @li @subpage pubd-sql "pubd database schema" - -## @page rpkid-sql rpkid SQL schema -# -# @image html rpkid.png "Diagram of rpkid.sql" -# @image latex rpkid.eps "Diagram of rpkid.sql" height=\textheight -# -# @verbinclude rpkid.sql - -## @page pubd-sql pubd SQL Schema -# -# @image html pubd.png "Diagram of pubd.sql" -# @image latex pubd.eps "Diagram of pubd.sql" width=\textwidth -# -# @verbinclude pubd.sql - -# Local Variables: -# mode:python -# compile-command: "cd ../.. && ./config.status && cd rpkid && make docs" -# End: diff --git a/rpkid/doc/pubd-bpki.dot b/rpkid/doc/pubd-bpki.dot deleted file mode 100644 index 44ad8a90..00000000 --- a/rpkid/doc/pubd-bpki.dot +++ /dev/null @@ -1,42 +0,0 @@ -// $Id$ - -// Color code: -// Black: Operating entity -// Red: Cross-certified client -// -// Shape code: -// Octagon: TA -// Diamond: CA -// Record: EE - -digraph bpki_pubd { - splines = true; - size = "14,14"; - node [ fontname = Times, fontsize = 9 ]; - - // Operating entity - node [ color = black, fontcolor = black, shape = record ]; - TA [ shape = octagon, label = "BPKI TA" ]; - pubd [ label = "pubd|{HTTPS server|CMS}" ]; - ctl [ label = "Control|{HTTPS client|CMS}" ]; - - // Clients - node [ color = red, fontcolor = red, shape = diamond ]; - Alice_CA; - Bob_CA; - node [ color = red, fontcolor = red, shape = record ]; - Alice_EE [ label = "Alice\nEE|{HTTPS client|CMS}" ]; - Bob_EE [ label = "Bob\nEE|{HTTPS client|CMS}" ]; - - edge [ color = black, style = dotted ]; - TA -> pubd; - TA -> ctl; - - edge [ color = black, style = solid ]; - TA -> Alice_CA; - TA -> Bob_CA; - - edge [ color = red, style = solid ]; - Alice_CA -> Alice_EE; - Bob_CA -> Bob_EE; -} diff --git a/rpkid/doc/rpkid-bpki.dot b/rpkid/doc/rpkid-bpki.dot deleted file mode 100644 index 651591cb..00000000 --- a/rpkid/doc/rpkid-bpki.dot +++ /dev/null @@ -1,76 +0,0 @@ -// $Id$ - -// Color code: -// Black: Hosting entity -// Blue: Hosted entity -// Red: Cross-certified peer -// -// Shape code: -// Octagon: TA -// Diamond: CA -// Record: EE - -digraph bpki_rpkid { - splines = true; - size = "14,14"; - node [ fontname = Times, fontsize = 9 ]; - - // Hosting entity - node [ color = black, shape = record ]; - TA [ shape = octagon, label = "BPKI TA" ]; - rpkid [ label = "rpkid|{HTTPS server|HTTPS left-right client|CMS left-right}" ]; - irdbd [ label = "irdbd|{HTTPS left-right server|CMS left-right}" ]; - irbe [ label = "IRBE|{HTTPS left-right client|CMS left-right}" ]; - - // Hosted entities - node [ color = blue, fontcolor = blue ]; - Alice_CA [ shape = diamond ]; - Alice_EE [ label = "Alice\nBSC EE|{HTTPS up-down client|CMS up-down}" ]; - Ellen_CA [ shape = diamond ]; - Ellen_EE [ label = "Ellen\nBSC EE|{HTTPS up-down client|CMS up-down}" ]; - - // Peers - node [ color = red, fontcolor = red, shape = diamond ]; - Bob_CA; - Carol_CA; - Dave_CA; - Frank_CA; - Ginny_CA; - Harry_CA; - node [ shape = record ]; - Bob_EE [ label = "Bob\nEE|{HTTPS up-down|CMS up-down}" ]; - Carol_EE [ label = "Carol\nEE|{HTTPS up-down|CMS up-down}" ]; - Dave_EE [ label = "Dave\nEE|{HTTPS up-down|CMS up-down}" ]; - Frank_EE [ label = "Frank\nEE|{HTTPS up-down|CMS up-down}" ]; - Ginny_EE [ label = "Ginny\nEE|{HTTPS up-down|CMS up-down}" ]; - Harry_EE [ label = "Bob\nEE|{HTTPS up-down|CMS up-down}" ]; - - edge [ color = black, style = solid ]; - TA -> Alice_CA; - TA -> Ellen_CA; - - edge [ color = black, style = dotted ]; - TA -> rpkid; - TA -> irdbd; - TA -> irbe; - - edge [ color = blue, style = solid ]; - Alice_CA -> Bob_CA; - Alice_CA -> Carol_CA; - Alice_CA -> Dave_CA; - Ellen_CA -> Frank_CA; - Ellen_CA -> Ginny_CA; - Ellen_CA -> Harry_CA; - - edge [ color = blue, style = dotted ]; - Alice_CA -> Alice_EE; - Ellen_CA -> Ellen_EE; - - edge [ color = red, style = solid ]; - Bob_CA -> Bob_EE; - Carol_CA -> Carol_EE; - Dave_CA -> Dave_EE; - Frank_CA -> Frank_EE; - Ginny_CA -> Ginny_EE; - Harry_CA -> Harry_EE; -} diff --git a/rpkid/examples/asns.csv b/rpkid/examples/asns.csv deleted file mode 100644 index 9d742740..00000000 --- a/rpkid/examples/asns.csv +++ /dev/null @@ -1,5 +0,0 @@ -# $Id$ -# -# Syntax: -# -Alice 64533 diff --git a/rpkid/examples/prefixes.csv b/rpkid/examples/prefixes.csv deleted file mode 100644 index ece18d32..00000000 --- a/rpkid/examples/prefixes.csv +++ /dev/null @@ -1,8 +0,0 @@ -# $Id$ -# -# Syntax: / -# or: - -# -Alice 192.0.2.0/27 -Bob 192.0.2.44-192.0.2.100 -Bob 10.0.0.0/8 diff --git a/rpkid/examples/roas.csv b/rpkid/examples/roas.csv deleted file mode 100644 index e4ec3074..00000000 --- a/rpkid/examples/roas.csv +++ /dev/null @@ -1,5 +0,0 @@ -# $Id$ -# -# Syntax: /- -# -10.3.0.44/32 666 Mom diff --git a/rpkid/examples/rsyncd.conf b/rpkid/examples/rsyncd.conf deleted file mode 100644 index faf1dd0d..00000000 --- a/rpkid/examples/rsyncd.conf +++ /dev/null @@ -1,53 +0,0 @@ -# $Id$ -# -# Sample rsyncd.conf file for use with pubd. You may need to -# customize this for the conventions on your system. See the rsync -# and rsyncd.conf manual pages for a complete explanation of how to -# configure rsyncd, this is just a simple configuration to get you -# started. -# -# There are two parameters in the following which you should set to -# appropriate values for your system: -# -# "myname" is the rsync module name to configure, as in -# "rsync://rpki.example.org/rpki/"; see the publication_rsync_module -# parameter in rpki.conf -# -# "/some/where/publication" is the absolute pathname of the directory -# where you told pubd to place its outputs; see the -# publication_base_directory parameter in rpki.conf. -# -# You may need to adjust other parameters for your system environment. -# -# Copyright (C) 2009-2010 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -pid file = /var/run/rsyncd.pid -uid = nobody -gid = nobody - -[rpki] - use chroot = no - read only = yes - transfer logging = yes - path = /some/where/publication - comment = RPKI Testbed - -[root] - # This one is only relevant if you're running rootd. - use chroot = no - read only = yes - transfer logging = yes - path = /some/where/publication.root - comment = RPKI Testbed Root diff --git a/rpkid/ext/POW.c b/rpkid/ext/POW.c deleted file mode 100644 index b5d9ccaf..00000000 --- a/rpkid/ext/POW.c +++ /dev/null @@ -1,9253 +0,0 @@ -/* - * This module started out as the core of Peter Shannon's "Python - * OpenSSL Wrappers" package, an excellent but somewhat dated package - * which I encountered while looking for some halfway sane way to cram - * RFC 3779 certificate support code into Python. - * - * At this point enough of the code has been added or rewritten that - * it's unclear (either way) whether this code properly qualifies as a - * derivative work. Given that both Peter's original code and all of - * subsequent changes to it were done under something equivalent to a - * BSD license, this may not matter very much, but the following - * attempts to give proper credit to all concerned. - * - **** - * - * Copyright (C) 2009--2013 Internet Systems Consortium ("ISC") - * - * Permission to use, copy, modify, and distribute this software for any - * purpose with or without fee is hereby granted, provided that the above - * copyright notice and this permission notice appear in all copies. - * - * THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH - * REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY - * AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, - * INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM - * LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE - * OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR - * PERFORMANCE OF THIS SOFTWARE. - * - **** - * - * Portions copyright (C) 2006--2008 American Registry for Internet - * Numbers ("ARIN") - * - * Permission to use, copy, modify, and distribute this software for any - * purpose with or without fee is hereby granted, provided that the above - * copyright notice and this permission notice appear in all copies. - * - * THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH - * REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY - * AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, - * INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM - * LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE - * OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR - * PERFORMANCE OF THIS SOFTWARE. - * - **** - * - * Portions Copyright (c) 2001, 2002, Peter Shannon - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions - * are met: - * - * * Redistributions of source code must retain the above - * copyright notice, this list of conditions and the following - * disclaimer. - * - * * Redistributions in binary form must reproduce the above - * copyright notice, this list of conditions and the following - * disclaimer in the documentation and/or other materials - * provided with the distribution. - * - * * The name of the contributors may be used to endorse or promote - * products derived from this software without specific prior - * written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS - * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT - * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS - * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS - * OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT - * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, - * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY - * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT - * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE - * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ - -/* $Id$ */ - -#define PY_SSIZE_T_CLEAN 1 -#include -#include - -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -#include -#include - -#include -#include -#include -#include - -/* - * GCC attribute to let us tell GCC not to whine about unused formal - * parameters when we're in maximal warning mode. - */ -#ifdef __GNUC__ -#define GCC_UNUSED __attribute__((unused)) -#else -define GCC_UNUSED -#endif - -/* - * Maximum size of a raw IP (v4 or v6) address, in bytes. - */ -#define RAW_IPADDR_BUFLEN 16 - -/* - * Maximum size of an ASN.1 Integer converted from a Python Long, in bytes. - */ -#define MAX_ASN1_INTEGER_LEN 20 - -/* Digests */ -#define MD5_DIGEST 2 -#define SHA_DIGEST 3 -#define SHA1_DIGEST 4 -#define SHA256_DIGEST 6 -#define SHA384_DIGEST 7 -#define SHA512_DIGEST 8 - -/* Object format */ -#define SHORTNAME_FORMAT 1 -#define LONGNAME_FORMAT 2 -#define OIDNAME_FORMAT 3 - -/* AsymmetricParam EC curves */ -#define EC_P256_CURVE NID_X9_62_prime256v1 - -/* Object check functions */ -#define POW_X509_Check(op) PyObject_TypeCheck(op, &POW_X509_Type) -#define POW_X509Store_Check(op) PyObject_TypeCheck(op, &POW_X509Store_Type) -#define POW_X509StoreCTX_Check(op) PyObject_TypeCheck(op, &POW_X509StoreCTX_Type) -#define POW_CRL_Check(op) PyObject_TypeCheck(op, &POW_CRL_Type) -#define POW_Asymmetric_Check(op) PyObject_TypeCheck(op, &POW_Asymmetric_Type) -#define POW_AsymmetricParams_Check(op) PyObject_TypeCheck(op, &POW_AsymmetricParams_Type) -#define POW_Digest_Check(op) PyObject_TypeCheck(op, &POW_Digest_Type) -#define POW_CMS_Check(op) PyObject_TypeCheck(op, &POW_CMS_Type) -#define POW_IPAddress_Check(op) PyObject_TypeCheck(op, &POW_IPAddress_Type) -#define POW_ROA_Check(op) PyObject_TypeCheck(op, &POW_ROA_Type) -#define POW_Manifest_Check(op) PyObject_TypeCheck(op, &POW_Manifest_Type) -#define POW_ROA_Check(op) PyObject_TypeCheck(op, &POW_ROA_Type) - -static char pow_module__doc__ [] = - "Python interface to RFC-3779-enabled OpenSSL. This code is intended\n" - "to support the rpki.net toolset.\n" - "\n" - "This code started out life as Peter Shannon's excellent \"Python OpenSSL\n" - "Wrappers\" package. It has been extensively modified since then, to add\n" - "support for things needed for the RPKI protocols, to upgrade the code\n" - "to use modern (circa Python 2.7) classes, and to remove code not\n" - "needed for RPKI.\n" - ; - -#define LAME_DISCLAIMER_IN_ALL_CLASS_DOCUMENTATION \ - "The documentation for this class used to provide a nice example of how\n" \ - "to use the class. Sadly, most of what was in that example is now\n" \ - "obsolete due to recent or impending API changes. Once the new API is\n" \ - "stable, this documentation should be rewritten to provide such examples.\n" - -/* - * Handle NIDs we wish OpenSSL knew about. This is carefully (we - * hope) written to do nothing at all for any NID that OpenSSL knows - * about; the intent is just to add definitions for things OpenSSL - * doesn't know about yet. Of necessity, this is a bit gross, since - * it confounds runtime static variables with predefined macro names, - * but we try to put all the magic associated with this in one place. - */ - -#ifndef NID_rpkiManifest -static int NID_rpkiManifest; -#endif - -#ifndef NID_signedObject -static int NID_signedObject; -#endif - -static const struct { - int *nid; - const char *oid; - const char *sn; - const char *ln; -} missing_nids[] = { - -#ifndef NID_rpkiManifest - {&NID_rpkiManifest, "1.3.6.1.5.5.7.48.10", "id-ad-rpkiManifest", "RPKI Manifest"}, -#endif - -#ifndef NID_signedObject - {&NID_signedObject, "1.3.6.1.5.5.7.48.11", "id-ad-signedObject", "Signed Object"} -#endif - -}; - -/* - * IP versions. - */ - -typedef struct ipaddress_version { - unsigned version; - unsigned afi; - unsigned af; - unsigned length; -} ipaddress_version; - -static const ipaddress_version ipaddress_version_4 = { - 4, IANA_AFI_IPV4, AF_INET, 4 -}; - -static const ipaddress_version ipaddress_version_6 = { - 6, IANA_AFI_IPV6, AF_INET6, 16 -}; - -static const ipaddress_version * const ipaddress_versions[] = { - &ipaddress_version_4, &ipaddress_version_6 -}; - -/* - * Names of bits in the KeyUsage BitString (RFC 5280 4.2.1.3). - */ - -static const char * const key_usage_bit_names[] = { - "digitalSignature", /* (0) */ - "nonRepudiation", /* (1) */ - "keyEncipherment", /* (2) */ - "dataEncipherment", /* (3) */ - "keyAgreement", /* (4) */ - "keyCertSign", /* (5) */ - "cRLSign", /* (6) */ - "encipherOnly", /* (7) */ - "decipherOnly", /* (8) */ - NULL -}; - -/* - * Exception objects. - */ - -static PyObject - *ErrorObject, - *OpenSSLErrorObject, - *POWErrorObject, - *NotVerifiedErrorObject; - -/* - * Constructor for customized datetime class. - */ - -static PyObject *custom_datetime; - -/* - * "ex_data" index for pointer we want to attach to X509_STORE_CTX so - * we can extract it in callbacks. - */ - -static int x509_store_ctx_ex_data_idx = -1; - -/* - * Declarations of type objects (definitions come later). - */ - -static PyTypeObject - POW_X509_Type, - POW_X509Store_Type, - POW_X509StoreCTX_Type, - POW_CRL_Type, - POW_Asymmetric_Type, - POW_AsymmetricParams_Type, - POW_Digest_Type, - POW_CMS_Type, - POW_IPAddress_Type, - POW_ROA_Type, - POW_Manifest_Type, - POW_ROA_Type, - POW_PKCS10_Type; - -/* - * Object internals. - */ - -typedef struct { - PyObject_HEAD - unsigned char address[16]; - const struct ipaddress_version *type; -} ipaddress_object; - -typedef struct { - PyObject_HEAD - X509 *x509; -} x509_object; - -typedef struct { - PyObject_HEAD - X509_STORE *store; - PyObject *ctxclass; -} x509_store_object; - -typedef struct { - PyObject_HEAD - X509_STORE_CTX *ctx; - x509_store_object *store; -} x509_store_ctx_object; - -typedef struct { - PyObject_HEAD - X509_CRL *crl; -} crl_object; - -typedef struct { - PyObject_HEAD - EVP_PKEY *pkey; -} asymmetric_object; - -typedef struct { - PyObject_HEAD - EVP_PKEY *pkey; -} asymmetric_params_object; - -typedef struct { - PyObject_HEAD - EVP_MD_CTX digest_ctx; - int digest_type; -} digest_object; - -typedef struct { - PyObject_HEAD - CMS_ContentInfo *cms; -} cms_object; - -typedef struct { - cms_object cms; /* Subclass of CMS */ - ROA *roa; -} roa_object; - -typedef struct { - cms_object cms; /* Subclass of CMS */ - Manifest *manifest; -} manifest_object; - -typedef struct { - PyObject_HEAD - X509_REQ *pkcs10; - X509_EXTENSIONS *exts; -} pkcs10_object; - - - -/* - * Utility functions. - */ - -/* - * Minimal intervention debug-by-printf() hack, use only for good. - */ - -#if 0 -#define KVETCH(_msg_) write(2, _msg_ "\n", sizeof(_msg_)) -#else -#define KVETCH(_msg_) ((void) 0) -#endif - -#if 0 -#define ENTERING(_name_) KVETCH("Entering " #_name_ "()") -#else -#define ENTERING(_name_) ((void) 0) -#endif - -/* - * Error handling macros. All of macros assume that there's a cleanup - * label named "error" which these macros can use as a goto target. - */ - -#define lose(_msg_) \ - do { \ - PyErr_SetString(POWErrorObject, (_msg_)); \ - goto error; \ - } while (0) - -#define lose_no_memory() \ - do { \ - PyErr_NoMemory(); \ - goto error; \ - } while (0) - -#define lose_type_error(_msg_) \ - do { \ - PyErr_SetString(PyExc_TypeError, (_msg_)); \ - goto error; \ - } while (0) - -#define lose_value_error(_msg_) \ - do { \ - PyErr_SetString(PyExc_ValueError, (_msg_)); \ - goto error; \ - } while (0) - -#define lose_openssl_error(_msg_) \ - do { \ - set_openssl_exception(OpenSSLErrorObject, (_msg_), 0); \ - goto error; \ - } while (0) - -#define lose_not_verified(_msg_) \ - do { \ - PyErr_SetString(NotVerifiedErrorObject, (_msg_)); \ - goto error; \ - } while (0) - -#define assert_no_unhandled_openssl_errors() \ - do { \ - if (ERR_peek_error()) { \ - set_openssl_exception(OpenSSLErrorObject, NULL, __LINE__); \ - goto error; \ - } \ - } while (0) - -#define POW_assert(_cond_) \ - do { \ - if (!(_cond_)) { \ - (void) PyErr_Format(POWErrorObject, \ - "Assertion %s failed at " __FILE__ ":%d", \ - #_cond_, __LINE__); \ - goto error; \ - } \ - } while (0) - -/* - * Consolidate some tedious EVP-related switch statements. - */ - -static const EVP_MD * -evp_digest_factory(int digest_type) -{ - switch (digest_type) { - case MD5_DIGEST: return EVP_md5(); - case SHA_DIGEST: return EVP_sha(); - case SHA1_DIGEST: return EVP_sha1(); - case SHA256_DIGEST: return EVP_sha256(); - case SHA384_DIGEST: return EVP_sha384(); - case SHA512_DIGEST: return EVP_sha512(); - default: return NULL; - } -} - -/* - * Raise an exception with data pulled from the OpenSSL error stack. - * Exception value is a tuple with some internal structure. - * - * If a string error message is supplied, that string is the first - * element of the exception value tuple. - * - * If a non-zero line number is supplied, a string listing this as an - * unhandled exception detected at that line will be the next element - * of the exception value tuple (or the first, if no error message was - * supplied). - * - * Remainder of exception value tuple is zero or more tuples, each - * representing one error from the stack. - * - * Each error tuple contains six slots: - * - the numeric error code - * - string translation of numeric error code ("reason") - * - name of library in which error occurred - * - name of function in which error occurred - * - name of file in which error occurred - * - line number in file where error occurred - */ - -static void -set_openssl_exception(PyObject *error_class, const char *msg, const int unhandled_line) -{ - PyObject *errtuple = NULL; - PyObject *errlist = NULL; - unsigned long err; - const char *file; - int line; - - if ((errlist = PyList_New(0)) == NULL) - return; - - if (msg) { - PyObject *s = PyString_FromString(msg); - (void) PyList_Append(errlist, s); - Py_XDECREF(s); - } - - if (unhandled_line) { - PyObject *s = PyString_FromFormat("Unhandled OpenSSL error at " __FILE__ ":%d!", unhandled_line); - (void) PyList_Append(errlist, s); - Py_XDECREF(s); - } - - while ((err = ERR_get_error_line(&file, &line)) != 0) { - PyObject *t = Py_BuildValue("(issssi)", - err, - ERR_reason_error_string(err), - ERR_lib_error_string(err), - ERR_func_error_string(err), - file, - line); - (void) PyList_Append(errlist, t); - Py_XDECREF(t); - } - - if ((errtuple = PyList_AsTuple(errlist)) != NULL) - PyErr_SetObject(error_class, errtuple); - - Py_XDECREF(errtuple); - Py_XDECREF(errlist); -} - -static X509_NAME * -x509_object_helper_set_name(PyObject *dn_obj) -{ - PyObject *rdn_obj = NULL; - PyObject *pair_obj = NULL; - PyObject *type_obj = NULL; - PyObject *value_obj = NULL; - X509_NAME *name = NULL; - char *type_str, *value_str; - int asn1_type, i, j; - - if ((name = X509_NAME_new()) == NULL) - lose_no_memory(); - - for (i = 0; i < PySequence_Size(dn_obj); i++) { - - if ((rdn_obj = PySequence_GetItem(dn_obj, i)) == NULL) - goto error; - - if (!PySequence_Check(rdn_obj) || PySequence_Size(rdn_obj) == 0) - lose_type_error("each RDN must be a sequence with at least one element"); - - for (j = 0; j < PySequence_Size(rdn_obj); j++) { - - if ((pair_obj = PySequence_GetItem(rdn_obj, j)) == NULL) - goto error; - - if (!PySequence_Check(pair_obj) || PySequence_Size(pair_obj) != 2) - lose_type_error("each name entry must be a two-element sequence"); - - if ((type_obj = PySequence_GetItem(pair_obj, 0)) == NULL || - (type_str = PyString_AsString(type_obj)) == NULL || - (value_obj = PySequence_GetItem(pair_obj, 1)) == NULL || - (value_str = PyString_AsString(value_obj)) == NULL) - goto error; - - if ((asn1_type = ASN1_PRINTABLE_type((unsigned char *) value_str, -1)) != V_ASN1_PRINTABLESTRING) - asn1_type = V_ASN1_UTF8STRING; - - if (!X509_NAME_add_entry_by_txt(name, type_str, asn1_type, - (unsigned char *) value_str, - strlen((char *) value_str), - -1, (j ? -1 : 0))) - lose("Unable to add name entry"); - - Py_XDECREF(pair_obj); - Py_XDECREF(type_obj); - Py_XDECREF(value_obj); - pair_obj = type_obj = value_obj = NULL; - } - - Py_XDECREF(rdn_obj); - rdn_obj = NULL; - } - - return name; - - error: - X509_NAME_free(name); - Py_XDECREF(rdn_obj); - Py_XDECREF(pair_obj); - Py_XDECREF(type_obj); - Py_XDECREF(value_obj); - return NULL; -} - -static PyObject * -x509_object_helper_get_name(X509_NAME *name, int format) -{ - X509_NAME_ENTRY *entry = NULL; - PyObject *result = NULL; - PyObject *rdn = NULL; - PyObject *item = NULL; - const char *oid = NULL; - char oidbuf[512]; - int i, set = -1; - - /* - * Overall theory here: multi-value RDNs are very rare in the wild. - * We should support them, so we don't throw an exception if handed - * one in a BPKI certificate, but with minimal effort. What we care - * about here is optimizing for the common case of single-valued RDNs. - */ - - if ((result = PyTuple_New(X509_NAME_entry_count(name))) == NULL) - goto error; - - for (i = 0; i < X509_NAME_entry_count(name); i++) { - - if ((entry = X509_NAME_get_entry(name, i)) == NULL) - lose("Couldn't get certificate name"); - - if (entry->set < 0 || entry->set < set || entry->set > set + 1) - lose("X509_NAME->set value out of expected range"); - - switch (format) { - case SHORTNAME_FORMAT: - oid = OBJ_nid2sn(OBJ_obj2nid(entry->object)); - break; - case LONGNAME_FORMAT: - oid = OBJ_nid2ln(OBJ_obj2nid(entry->object)); - break; - case OIDNAME_FORMAT: - oid = NULL; - break; - default: - lose("Unknown name format"); - } - - if (oid == NULL) { - if (OBJ_obj2txt(oidbuf, sizeof(oidbuf), entry->object, 1) <= 0) - lose_openssl_error("Couldn't translate OID"); - oid = oidbuf; - } - - if (entry->set > set) { - - set++; - if ((item = Py_BuildValue("((ss#))", oid, ASN1_STRING_data(entry->value), - (Py_ssize_t) ASN1_STRING_length(entry->value))) == NULL) - goto error; - PyTuple_SET_ITEM(result, set, item); - item = NULL; - - } else { - - if ((rdn = PyTuple_GetItem(result, set)) == NULL) - goto error; - (void) _PyTuple_Resize(&rdn, PyTuple_Size(rdn) + 1); - PyTuple_SET_ITEM(result, set, rdn); - if (rdn == NULL) - goto error; - if ((item = Py_BuildValue("(ss#)", oid, ASN1_STRING_data(entry->value), - (Py_ssize_t) ASN1_STRING_length(entry->value))) == NULL) - goto error; - PyTuple_SetItem(rdn, PyTuple_Size(rdn) - 1, item); - rdn = item = NULL; - - } - } - - if (++set != PyTuple_Size(result)) { - if (set < 0 || set > PyTuple_Size(result)) - lose("Impossible set count for DN, something went horribly wrong"); - _PyTuple_Resize(&result, set); - } - - return result; - - error: - Py_XDECREF(item); - Py_XDECREF(result); - return NULL; -} - -static STACK_OF(X509) * -x509_helper_iterable_to_stack(PyObject *iterable) -{ - STACK_OF(X509) *stack = NULL; - PyObject *iterator = NULL; - PyObject *item = NULL; - - if ((stack = sk_X509_new_null()) == NULL) - lose_no_memory(); - - if (iterable != Py_None) { - - if ((iterator = PyObject_GetIter(iterable)) == NULL) - goto error; - - while ((item = PyIter_Next(iterator)) != NULL) { - - if (!POW_X509_Check(item)) - lose_type_error("Inapropriate type"); - - if (!sk_X509_push(stack, ((x509_object *) item)->x509)) - lose("Couldn't add X509 object to stack"); - - Py_XDECREF(item); - item = NULL; - } - } - - Py_XDECREF(iterator); - return stack; - - error: - Py_XDECREF(iterator); - Py_XDECREF(item); - sk_X509_free(stack); - return NULL; -} - -/* - * Pull items off an OpenSSL STACK and put them into a Python tuple. - * Assumes that handler is stealing the OpenSSL references to the - * items in the STACK, so shifts consumed frames off the stack so that - * the appropriate _pop_free() destructor can clean up on failures. - * This is OK because all current uses of this function are processing - * the result of OpenSSL xxx_get1_xxx() methods which we have to free - * in any case. - */ - -static x509_object *x509_object_new_helper(PyTypeObject *, X509 *); -static crl_object *crl_object_new_helper (PyTypeObject *, X509_CRL *); - -static PyObject * -stack_to_tuple_helper(_STACK *sk, PyObject *(*handler)(void *)) -{ - PyObject *result = NULL; - PyObject *obj = NULL; - int i; - - if ((result = PyTuple_New(sk_num(sk))) == NULL) - goto error; - - for (i = 0; sk_num(sk); i++) { - if ((obj = handler(sk_value(sk, 0))) == NULL) - goto error; - sk_shift(sk); - if (PyTuple_SetItem(result, i, obj) != 0) - goto error; - obj = NULL; - } - - return result; - - error: - - Py_XDECREF(obj); - return NULL; -} - -static PyObject * -stack_to_tuple_helper_get_x509(void *cert) -{ - x509_object *obj; - - ENTERING(stack_to_tuple_helper_get_x509); - - if ((obj = x509_object_new_helper(NULL, cert)) == NULL) - return NULL; - - return (PyObject *) obj; -} - -static PyObject * -stack_to_tuple_helper_get_crl(void *crl) -{ - crl_object *obj; - - ENTERING(stack_to_tuple_helper_get_crl); - - if ((obj = crl_object_new_helper(NULL, crl)) == NULL) - return NULL; - - return (PyObject *) obj; -} - -/* - * Time conversion functions. Obvious mapping into Python data types - * is datetime, or, rather, our customized rpki.sundial.datetime. - * - * Unsuprisingly, it's easiest for us to map between GeneralizedTime - * (as restricted by RFC 5280) and datetime. Conversion between - * GeneralizedTime and UTCTime is handled automatically according to - * the RFC 5280 rules for those ASN.1 types where it's required. - */ - -static PyObject * -ASN1_TIME_to_Python(ASN1_TIME *t) -{ - ASN1_GENERALIZEDTIME *g = NULL; - PyObject *result = NULL; - int year, month, day, hour, minute, second; - - if ((g = ASN1_TIME_to_generalizedtime(t, NULL)) == NULL) - lose_openssl_error("Couldn't convert ASN.1 TIME"); - - if (sscanf((char *) g->data, "%4d%2d%2d%2d%2d%2dZ", - &year, &month, &day, &hour, &minute, &second) != 6) - lose("Couldn't scan ASN.1 TIME value"); - - if (custom_datetime != NULL && custom_datetime != Py_None) - result = PyObject_CallFunction(custom_datetime, "iiiiii", - year, month, day, hour, minute, second); - else - result = PyDateTime_FromDateAndTime(year, month, day, hour, minute, second, 0); - - error: - ASN1_GENERALIZEDTIME_free(g); - return result; -} - -static ASN1_TIME * -Python_to_ASN1_TIME(PyObject *arg, const int object_requires_utctime) -{ - char buf[sizeof("20010401123456Z") + 1]; - ASN1_TIME *result = NULL; - const char *s = NULL; - int ok; - - if (PyDateTime_Check(arg)) { - if (snprintf(buf, sizeof(buf), "%4d%02d%02d%02d%02d%02dZ", - PyDateTime_GET_YEAR(arg), - PyDateTime_GET_MONTH(arg), - PyDateTime_GET_DAY(arg), - PyDateTime_DATE_GET_HOUR(arg), - PyDateTime_DATE_GET_MINUTE(arg), - PyDateTime_DATE_GET_SECOND(arg)) >= (int) sizeof(buf)) - lose("Internal error -- GeneralizedTime buffer too small"); - s = buf; - } - - if (s == NULL && (s = PyString_AsString(arg)) == NULL) - goto error; - - if (strlen(s) < 10) - lose_type_error("String is too short to parse as a valid ASN.1 TIME"); - - if ((result = ASN1_TIME_new()) == NULL) - lose_no_memory(); - - if (object_requires_utctime && - ((s[0] == '1' && s[1] == '9' && s[2] > '4') || - (s[0] == '2' && s[1] == '0' && s[2] < '5'))) - ok = ASN1_UTCTIME_set_string(result, s + 2); - else - ok = ASN1_GENERALIZEDTIME_set_string(result, s); - - if (ok) - return result; - - error: - ASN1_TIME_free(result); - return NULL; -} - -/* - * Extract a Python string from a memory BIO. - */ -static PyObject * -BIO_to_PyString_helper(BIO *bio) -{ - char *ptr = NULL; - Py_ssize_t len = 0; - - if ((len = BIO_get_mem_data(bio, &ptr)) == 0) - lose_openssl_error("Unable to get BIO data"); - - return Py_BuildValue("s#", ptr, len); - - error: - return NULL; -} - -static PyObject * -read_from_string_helper(PyObject *(*object_read_helper)(PyTypeObject *, BIO *), - PyTypeObject *type, - PyObject *args) -{ - PyObject *result = NULL; - char *src = NULL; - BIO *bio = NULL; - Py_ssize_t len = 0; - - if (!PyArg_ParseTuple(args, "s#", &src, &len)) - goto error; - - if ((bio = BIO_new_mem_buf(src, len)) == NULL) - lose_no_memory(); - - result = object_read_helper(type, bio); - - error: - BIO_free(bio); - return result; -} - -static PyObject * -read_from_file_helper(PyObject *(*object_read_helper)(PyTypeObject *, BIO *), - PyTypeObject *type, - PyObject *args) -{ - const char *filename = NULL; - PyObject *result = NULL; - BIO *bio = NULL; - - if (!PyArg_ParseTuple(args, "s", &filename)) - goto error; - - if ((bio = BIO_new_file(filename, "rb")) == NULL) - lose_openssl_error("Could not open file"); - - result = object_read_helper(type, bio); - - error: - BIO_free(bio); - return result; -} - -/* - * Simplify entries in method definition tables. See the "Common - * Object Structures" section of the API manual for available flags. - */ -#define Define_Method(__python_name__, __c_name__, __flags__) \ - { #__python_name__, (PyCFunction) __c_name__, __flags__, __c_name__##__doc__ } - -#define Define_Class_Method(__python_name__, __c_name__, __flags__) \ - Define_Method(__python_name__, __c_name__, (__flags__) | METH_CLASS) - -/* - * Convert an ASN1_INTEGER into a Python integer or long. - */ -static PyObject * -ASN1_INTEGER_to_PyLong(ASN1_INTEGER *arg) -{ - PyObject *result = NULL; - PyObject *obj = NULL; - - if ((obj = _PyLong_FromByteArray(ASN1_STRING_data(arg), - ASN1_STRING_length(arg), - 0, 0)) != NULL) - result = PyNumber_Int(obj); - - Py_XDECREF(obj); - return result; -} - -/* - * Convert a Python long to an ASN1_INTEGER. - * This is just nasty, do not read on a full stomach. - * - * Maximum size of integer to be converted here is taken from RFC 5280 - * 4.1.2.2, which sets a maximum of 20 octets for an X.509 certificate - * serial number. - * - * In theory we could use _PyLong_NumBits() to determine the length of - * the long before converting, and raise OverflowError if it's too big. - * Hmm. - */ -static ASN1_INTEGER * -PyLong_to_ASN1_INTEGER(PyObject *arg) -{ - PyObject *obj = NULL; - ASN1_INTEGER *a = NULL; - unsigned char buf[MAX_ASN1_INTEGER_LEN]; - size_t len; - - memset(buf, 0, sizeof(buf)); - - /* - * Make sure argument is a PyLong small enough that its length (in - * bits!) doesn't overflow a size_t (which is a mis-use of size_t, - * but take that up with whoever wrote _PyLong_NumBits()...). - */ - if ((obj = PyNumber_Long(arg)) == NULL || - (len = _PyLong_NumBits(obj)) == (size_t) -1) - goto error; - - /* - * Next make sure it's a non-negative integer small enough to fit in - * our buffer. If we really thought we needed to support larger - * integers we could allocate this dynamically, but we don't, so - * it's not worth the overhead. - * - * Paranoia: We can't convert len to bytes yet, because that - * requires rounding up and we don't know yet that we have enough - * headroom to do that arithmetic without overflowing a size_t. - */ - if (_PyLong_Sign(obj) < 0 || (len / 8) + 1 > sizeof(buf)) { - PyErr_SetObject(PyExc_OverflowError, obj); - goto error; - } - - /* - * Now that we know we're dealing with a sane number of bits, - * convert it to bytes. - */ - len = (len + 7) / 8; - - /* - * Extract that many bytes. - */ - if (_PyLong_AsByteArray((PyLongObject *) obj, buf, len, 0, 0) < 0) - goto error; - - /* - * We're done with the PyLong now. - */ - Py_XDECREF(obj); - obj = NULL; - - /* - * Generate the ASN1_INTEGER and return it. - */ - if ((a = ASN1_INTEGER_new()) == NULL || - (a->length < (int) len + 1 && (a->data = OPENSSL_realloc(a->data, len + 1)) == NULL)) - lose_no_memory(); - - a->type = V_ASN1_INTEGER; - a->length = len; - a->data[len] = 0; - memcpy(a->data, buf, len); - - return a; - - error: - Py_XDECREF(obj); - ASN1_INTEGER_free(a); - return NULL; -} - -/* - * Handle missing NIDs. - */ - -static int -create_missing_nids(void) -{ - int i; - - for (i = 0; i < (int) (sizeof(missing_nids) / sizeof(*missing_nids)); i++) - if ((*missing_nids[i].nid = OBJ_txt2nid(missing_nids[i].oid)) == NID_undef && - (*missing_nids[i].nid = OBJ_create(missing_nids[i].oid, - missing_nids[i].sn, - missing_nids[i].ln)) == NID_undef) - return 0; - - return 1; -} - -static PyObject * -ASN1_OBJECT_to_PyString(const ASN1_OBJECT *oid) -{ - PyObject *result = NULL; - char buf[512]; - - ENTERING(ASN1_OBJECT_to_PyString); - - if (OBJ_obj2txt(buf, sizeof(buf), oid, 1) <= 0) - lose_openssl_error("Couldn't translate OID"); - - result = PyString_FromString(buf); - - error: - return result; -} - - - -/* - * Extension functions. Calling sequence here is a little weird, - * because it turns out that the simplest way to avoid massive - * duplication of code between classes is to work directly with - * X509_EXTENSIONS objects. - */ - -static PyObject * -extension_get_key_usage(X509_EXTENSIONS **exts) -{ - ASN1_BIT_STRING *ext = NULL; - PyObject *result = NULL; - PyObject *token = NULL; - int bit = -1; - - ENTERING(extension_get_key_usage); - - if (!exts) - goto error; - - if ((ext = X509V3_get_d2i(*exts, NID_key_usage, NULL, NULL)) == NULL) - Py_RETURN_NONE; - - if ((result = PyFrozenSet_New(NULL)) == NULL) - goto error; - - for (bit = 0; key_usage_bit_names[bit] != NULL; bit++) { - if (ASN1_BIT_STRING_get_bit(ext, bit) && - ((token = PyString_FromString(key_usage_bit_names[bit])) == NULL || - PySet_Add(result, token) < 0)) - goto error; - Py_XDECREF(token); - token = NULL; - } - - ASN1_BIT_STRING_free(ext); - return result; - - error: - ASN1_BIT_STRING_free(ext); - Py_XDECREF(token); - Py_XDECREF(result); - return NULL; -} - -static PyObject * -extension_set_key_usage(X509_EXTENSIONS **exts, PyObject *args) -{ - ASN1_BIT_STRING *ext = NULL; - PyObject *iterable = NULL; - PyObject *critical = Py_True; - PyObject *iterator = NULL; - PyObject *item = NULL; - const char *token; - int bit = -1; - int ok = 0; - - ENTERING(extension_set_key_usage); - - if (!exts) - goto error; - - if ((ext = ASN1_BIT_STRING_new()) == NULL) - lose_no_memory(); - - if (!PyArg_ParseTuple(args, "O|O", &iterable, &critical) || - (iterator = PyObject_GetIter(iterable)) == NULL) - goto error; - - while ((item = PyIter_Next(iterator)) != NULL) { - - if ((token = PyString_AsString(item)) == NULL) - goto error; - - for (bit = 0; key_usage_bit_names[bit] != NULL; bit++) - if (!strcmp(token, key_usage_bit_names[bit])) - break; - - if (key_usage_bit_names[bit] == NULL) - lose("Unrecognized KeyUsage token"); - - if (!ASN1_BIT_STRING_set_bit(ext, bit, 1)) - lose_no_memory(); - - Py_XDECREF(item); - item = NULL; - } - - if (!X509V3_add1_i2d(exts, NID_key_usage, ext, - PyObject_IsTrue(critical), - X509V3_ADD_REPLACE)) - lose_openssl_error("Couldn't add KeyUsage extension to OpenSSL object"); - - ok = 1; - - error: /* Fall through */ - ASN1_BIT_STRING_free(ext); - Py_XDECREF(iterator); - Py_XDECREF(item); - - if (ok) - Py_RETURN_NONE; - else - return NULL; -} - -static PyObject * -extension_get_basic_constraints(X509_EXTENSIONS **exts) -{ - BASIC_CONSTRAINTS *ext = NULL; - PyObject *result = NULL; - - ENTERING(extension_get_basic_constraints); - - if (!exts) - goto error; - - if ((ext = X509V3_get_d2i(*exts, NID_basic_constraints, NULL, NULL)) == NULL) - Py_RETURN_NONE; - - if (ext->pathlen == NULL) - result = Py_BuildValue("(NO)", PyBool_FromLong(ext->ca), Py_None); - else - result = Py_BuildValue("(Nl)", PyBool_FromLong(ext->ca), ASN1_INTEGER_get(ext->pathlen)); - - error: - BASIC_CONSTRAINTS_free(ext); - return result; -} - -static PyObject * -extension_set_basic_constraints(X509_EXTENSIONS **exts, PyObject *args) -{ - BASIC_CONSTRAINTS *ext = NULL; - PyObject *is_ca = NULL; - PyObject *pathlen_obj = Py_None; - PyObject *critical = Py_True; - long pathlen = -1; - int ok = 0; - - ENTERING(extension_set_basic_constraints); - - if (!exts) - goto error; - - if (!PyArg_ParseTuple(args, "O|OO", &is_ca, &pathlen_obj, &critical)) - goto error; - - if (pathlen_obj != Py_None && (pathlen = PyInt_AsLong(pathlen_obj)) < 0) - lose_type_error("Bad pathLenConstraint value"); - - if ((ext = BASIC_CONSTRAINTS_new()) == NULL) - lose_no_memory(); - - ext->ca = PyObject_IsTrue(is_ca) ? 0xFF : 0; - - if (pathlen_obj != Py_None && - ((ext->pathlen == NULL && (ext->pathlen = ASN1_INTEGER_new()) == NULL) || - !ASN1_INTEGER_set(ext->pathlen, pathlen))) - lose_no_memory(); - - if (!X509V3_add1_i2d(exts, NID_basic_constraints, ext, - PyObject_IsTrue(critical), X509V3_ADD_REPLACE)) - lose_openssl_error("Couldn't add BasicConstraints extension to OpenSSL object"); - - ok = 1; - - error: - BASIC_CONSTRAINTS_free(ext); - - if (ok) - Py_RETURN_NONE; - else - return NULL; -} - -static PyObject * -extension_get_sia(X509_EXTENSIONS **exts) -{ - AUTHORITY_INFO_ACCESS *ext = NULL; - PyObject *result = NULL; - PyObject *result_caRepository = NULL; - PyObject *result_rpkiManifest = NULL; - PyObject *result_signedObject = NULL; - int n_caRepository = 0; - int n_rpkiManifest = 0; - int n_signedObject = 0; - const char *uri; - PyObject *obj; - int i, nid; - - ENTERING(pkcs10_object_get_sia); - - if (!exts) - goto error; - - if ((ext = X509V3_get_d2i(*exts, NID_sinfo_access, NULL, NULL)) == NULL) - Py_RETURN_NONE; - - /* - * Easiest to do this in two passes, first pass just counts URIs. - */ - - for (i = 0; i < sk_ACCESS_DESCRIPTION_num(ext); i++) { - ACCESS_DESCRIPTION *a = sk_ACCESS_DESCRIPTION_value(ext, i); - if (a->location->type != GEN_URI) - continue; - nid = OBJ_obj2nid(a->method); - if (nid == NID_caRepository) { - n_caRepository++; - continue; - } - if (nid == NID_rpkiManifest) { - n_rpkiManifest++; - continue; - } - if (nid == NID_signedObject) { - n_signedObject++; - continue; - } - } - - if (((result_caRepository = PyTuple_New(n_caRepository)) == NULL) || - ((result_rpkiManifest = PyTuple_New(n_rpkiManifest)) == NULL) || - ((result_signedObject = PyTuple_New(n_signedObject)) == NULL)) - goto error; - - n_caRepository = n_rpkiManifest = n_signedObject = 0; - - for (i = 0; i < sk_ACCESS_DESCRIPTION_num(ext); i++) { - ACCESS_DESCRIPTION *a = sk_ACCESS_DESCRIPTION_value(ext, i); - if (a->location->type != GEN_URI) - continue; - nid = OBJ_obj2nid(a->method); - uri = (char *) ASN1_STRING_data(a->location->d.uniformResourceIdentifier); - if (nid == NID_caRepository) { - if ((obj = PyString_FromString(uri)) == NULL) - goto error; - PyTuple_SET_ITEM(result_caRepository, n_caRepository++, obj); - continue; - } - if (nid == NID_rpkiManifest) { - if ((obj = PyString_FromString(uri)) == NULL) - goto error; - PyTuple_SET_ITEM(result_rpkiManifest, n_rpkiManifest++, obj); - continue; - } - if (nid == NID_signedObject) { - if ((obj = PyString_FromString(uri)) == NULL) - goto error; - PyTuple_SET_ITEM(result_signedObject, n_signedObject++, obj); - continue; - } - } - - result = Py_BuildValue("(OOO)", - result_caRepository, - result_rpkiManifest, - result_signedObject); - - error: - AUTHORITY_INFO_ACCESS_free(ext); - Py_XDECREF(result_caRepository); - Py_XDECREF(result_rpkiManifest); - Py_XDECREF(result_signedObject); - return result; -} - -static PyObject * -extension_set_sia(X509_EXTENSIONS **exts, PyObject *args, PyObject *kwds) -{ - static char *kwlist[] = {"caRepository", "rpkiManifest", "signedObject", NULL}; - AUTHORITY_INFO_ACCESS *ext = NULL; - PyObject *caRepository = Py_None; - PyObject *rpkiManifest = Py_None; - PyObject *signedObject = Py_None; - PyObject *iterator = NULL; - ASN1_OBJECT *oid = NULL; - PyObject **pobj = NULL; - PyObject *item = NULL; - ACCESS_DESCRIPTION *a = NULL; - int i, nid = NID_undef, ok = 0; - Py_ssize_t urilen; - char *uri; - - ENTERING(extension_set_sia); - - if (!exts) - goto error; - - if (!PyArg_ParseTupleAndKeywords(args, kwds, "|OOO", kwlist, - &caRepository, &rpkiManifest, &signedObject)) - goto error; - - if ((ext = AUTHORITY_INFO_ACCESS_new()) == NULL) - lose_no_memory(); - - /* - * This is going to want refactoring, because it's ugly, because we - * want to reuse code for AIA, and because it'd be nice to support a - * single URI as an abbreviation for a collection containing one URI. - */ - - for (i = 0; i < 3; i++) { - switch (i) { - case 0: pobj = &caRepository; nid = NID_caRepository; break; - case 1: pobj = &rpkiManifest; nid = NID_rpkiManifest; break; - case 2: pobj = &signedObject; nid = NID_signedObject; break; - } - - if (*pobj == Py_None) - continue; - - if ((oid = OBJ_nid2obj(nid)) == NULL) - lose_openssl_error("Couldn't find SIA accessMethod OID"); - - if ((iterator = PyObject_GetIter(*pobj)) == NULL) - goto error; - - while ((item = PyIter_Next(iterator)) != NULL) { - - if (PyString_AsStringAndSize(item, &uri, &urilen) < 0) - goto error; - - if ((a = ACCESS_DESCRIPTION_new()) == NULL || - (a->method = OBJ_dup(oid)) == NULL || - (a->location->d.uniformResourceIdentifier = ASN1_IA5STRING_new()) == NULL || - !ASN1_OCTET_STRING_set(a->location->d.uniformResourceIdentifier, (unsigned char *) uri, urilen)) - lose_no_memory(); - - a->location->type = GEN_URI; - - if (!sk_ACCESS_DESCRIPTION_push(ext, a)) - lose_no_memory(); - - a = NULL; - Py_XDECREF(item); - item = NULL; - } - - Py_XDECREF(iterator); - iterator = NULL; - } - - if (!X509V3_add1_i2d(exts, NID_sinfo_access, ext, 0, X509V3_ADD_REPLACE)) - lose_openssl_error("Couldn't add SIA extension to OpenSSL object"); - - ok = 1; - - error: - AUTHORITY_INFO_ACCESS_free(ext); - ACCESS_DESCRIPTION_free(a); - Py_XDECREF(item); - Py_XDECREF(iterator); - - if (ok) - Py_RETURN_NONE; - else - return NULL; -} - -static PyObject * -extension_get_eku(X509_EXTENSIONS **exts) -{ - EXTENDED_KEY_USAGE *ext = NULL; - PyObject *result = NULL; - PyObject *oid = NULL; - int i; - - ENTERING(extension_get_eku); - - if (!exts) - goto error; - - if ((ext = X509V3_get_d2i(*exts, NID_ext_key_usage, NULL, NULL)) == NULL) - Py_RETURN_NONE; - - if ((result = PyFrozenSet_New(NULL)) == NULL) - goto error; - - for (i = 0; i < sk_ASN1_OBJECT_num(ext); i++) { - if ((oid = ASN1_OBJECT_to_PyString(sk_ASN1_OBJECT_value(ext, i))) == NULL || - PySet_Add(result, oid) < 0) - goto error; - Py_XDECREF(oid); - oid = NULL; - } - - sk_ASN1_OBJECT_pop_free(ext, ASN1_OBJECT_free); - return result; - - error: - sk_ASN1_OBJECT_pop_free(ext, ASN1_OBJECT_free); - Py_XDECREF(oid); - Py_XDECREF(result); - return NULL; -} - -static PyObject * -extension_set_eku(X509_EXTENSIONS **exts, PyObject *args) -{ - EXTENDED_KEY_USAGE *ext = NULL; - PyObject *iterable = NULL; - PyObject *critical = Py_False; - PyObject *iterator = NULL; - PyObject *item = NULL; - ASN1_OBJECT *obj = NULL; - const char *txt; - int ok = 0; - - ENTERING(extension_set_eku); - - if (!exts) - goto error; - - if ((ext = sk_ASN1_OBJECT_new_null()) == NULL) - lose_no_memory(); - - if (!PyArg_ParseTuple(args, "O|O", &iterable, &critical) || - (iterator = PyObject_GetIter(iterable)) == NULL) - goto error; - - while ((item = PyIter_Next(iterator)) != NULL) { - - if ((txt = PyString_AsString(item)) == NULL) - goto error; - - if ((obj = OBJ_txt2obj(txt, 1)) == NULL) - lose("Couldn't parse OID"); - - if (!sk_ASN1_OBJECT_push(ext, obj)) - lose_no_memory(); - - obj = NULL; - Py_XDECREF(item); - item = NULL; - } - - if (sk_ASN1_OBJECT_num(ext) < 1) - lose("Empty ExtendedKeyUsage extension"); - - if (!X509V3_add1_i2d(exts, NID_ext_key_usage, ext, - PyObject_IsTrue(critical), - X509V3_ADD_REPLACE)) - lose_openssl_error("Couldn't add ExtendedKeyUsage extension to OpenSSL object"); - - ok = 1; - - error: /* Fall through */ - sk_ASN1_OBJECT_pop_free(ext, ASN1_OBJECT_free); - Py_XDECREF(item); - Py_XDECREF(iterator); - - if (ok) - Py_RETURN_NONE; - else - return NULL; -} - -static PyObject * -extension_get_ski(X509_EXTENSIONS **exts) -{ - ASN1_OCTET_STRING *ext = NULL; - PyObject *result = NULL; - - ENTERING(extension_get_ski); - - if (!exts) - goto error; - - if ((ext = X509V3_get_d2i(*exts, NID_subject_key_identifier, NULL, NULL)) == NULL) - Py_RETURN_NONE; - - result = Py_BuildValue("s#", ASN1_STRING_data(ext), - (Py_ssize_t) ASN1_STRING_length(ext)); - - error: /* Fall through */ - ASN1_OCTET_STRING_free(ext); - return result; -} - -static PyObject * -extension_set_ski(X509_EXTENSIONS **exts, PyObject *args) -{ - ASN1_OCTET_STRING *ext = NULL; - const unsigned char *buf = NULL; - Py_ssize_t len; - int ok = 0; - - ENTERING(extension_set_ski); - - if (!exts) - goto error; - - if (!PyArg_ParseTuple(args, "s#", &buf, &len)) - goto error; - - if ((ext = ASN1_OCTET_STRING_new()) == NULL || - !ASN1_OCTET_STRING_set(ext, buf, len)) - lose_no_memory(); - - /* - * RFC 5280 says this MUST be non-critical. - */ - - if (!X509V3_add1_i2d(exts, NID_subject_key_identifier, - ext, 0, X509V3_ADD_REPLACE)) - lose_openssl_error("Couldn't add SKI extension to OpenSSL object"); - - ok = 1; - - error: - ASN1_OCTET_STRING_free(ext); - - if (ok) - Py_RETURN_NONE; - else - return NULL; -} - -static PyObject * -extension_get_aki(X509_EXTENSIONS **exts) -{ - AUTHORITY_KEYID *ext = NULL; - PyObject *result = NULL; - - ENTERING(extension_get_aki); - - if (!exts) - goto error; - - if ((ext = X509V3_get_d2i(*exts, NID_authority_key_identifier, NULL, NULL)) == NULL) - Py_RETURN_NONE; - - result = Py_BuildValue("s#", ASN1_STRING_data(ext->keyid), - (Py_ssize_t) ASN1_STRING_length(ext->keyid)); - - error: /* Fall through */ - AUTHORITY_KEYID_free(ext); - return result; -} - -static PyObject * -extension_set_aki(X509_EXTENSIONS **exts, PyObject *args) -{ - AUTHORITY_KEYID *ext = NULL; - const unsigned char *buf = NULL; - Py_ssize_t len; - int ok = 0; - - ENTERING(extension_set_aki); - - assert (exts); - - if (!PyArg_ParseTuple(args, "s#", &buf, &len)) - goto error; - - if ((ext = AUTHORITY_KEYID_new()) == NULL || - (ext->keyid == NULL && (ext->keyid = ASN1_OCTET_STRING_new()) == NULL) || - !ASN1_OCTET_STRING_set(ext->keyid, buf, len)) - lose_no_memory(); - - /* - * RFC 5280 says this MUST be non-critical. - */ - - if (!X509V3_add1_i2d(exts, NID_authority_key_identifier, - ext, 0, X509V3_ADD_REPLACE)) - lose_openssl_error("Couldn't add AKI extension to OpenSSL object"); - - ok = 1; - - error: - AUTHORITY_KEYID_free(ext); - - if (ok) - Py_RETURN_NONE; - else - return NULL; -} - - - -/* - * IPAddress object. - */ - -static PyObject * -ipaddress_object_new(PyTypeObject *type, PyObject *args, PyObject *kwds) -{ - static char *kwlist[] = {"initializer", "version", NULL}; - ipaddress_object *self = NULL; - PyObject *init = NULL; - PyObject *pylong = NULL; - int version = 0; - const char *s = NULL; - int v; - - ENTERING(ipaddress_object_new); - - if (!PyArg_ParseTupleAndKeywords(args, kwds, "O|i", kwlist, &init, &version) || - (self = (ipaddress_object *) type->tp_alloc(type, 0)) == NULL) - goto error; - - if (POW_IPAddress_Check(init)) { - ipaddress_object *src = (ipaddress_object *) init; - memcpy(self->address, src->address, sizeof(self->address)); - self->type = src->type; - return (PyObject *) self; - } - - if ((s = PyString_AsString(init)) == NULL) - PyErr_Clear(); - else if (version == 0) - version = strchr(s, ':') ? 6 : 4; - - self->type = NULL; - - for (v = 0; v < (int) (sizeof(ipaddress_versions)/sizeof(*ipaddress_versions)); v++) - if ((unsigned) version == ipaddress_versions[v]->version) - self->type = ipaddress_versions[v]; - - if (self->type == NULL) - lose("Unknown IP version number"); - - if (s != NULL) { - if (inet_pton(self->type->af, s, self->address) <= 0) - lose("Couldn't parse IP address"); - return (PyObject *) self; - } - - if ((pylong = PyNumber_Long(init)) != NULL) { - if (_PyLong_AsByteArray((PyLongObject *) pylong, self->address, self->type->length, 0, 0) < 0) - goto error; - Py_XDECREF(pylong); - return (PyObject *) self; - } - - lose_type_error("Couldn't convert initializer to IPAddress"); - - error: - Py_XDECREF(self); - Py_XDECREF(pylong); - return NULL; -} - -static PyObject * -ipaddress_object_str(ipaddress_object *self) -{ - char addrstr[sizeof("aaaa:bbbb:cccc:dddd:eeee:ffff:255.255.255.255") + 1]; - - ENTERING(ipaddress_object_str); - - if (!inet_ntop(self->type->af, self->address, addrstr, sizeof(addrstr))) - lose("Couldn't convert IP address"); - - return PyString_FromString(addrstr); - - error: - return NULL; -} - -static PyObject * -ipaddress_object_repr(ipaddress_object *self) -{ - char addrstr[sizeof("aaaa:bbbb:cccc:dddd:eeee:ffff:255.255.255.255") + 1]; - - ENTERING(ipaddress_object_repr); - - if (!inet_ntop(self->type->af, self->address, addrstr, sizeof(addrstr))) - lose("Couldn't convert IP address"); - - return PyString_FromFormat("<%s object %s at %p>", - self->ob_type->tp_name, addrstr, self); - - error: - return NULL; -} - -static int -ipaddress_object_compare(PyObject *arg1, PyObject *arg2) -{ - PyObject *obj1 = PyNumber_Long(arg1); - PyObject *obj2 = PyNumber_Long(arg2); - int cmp = -1; - - ENTERING(ipaddress_object_compare); - - if (obj1 != NULL && obj2 != NULL) - cmp = PyObject_Compare(obj1, obj2); - - Py_XDECREF(obj1); - Py_XDECREF(obj2); - return cmp; -} - -static PyObject * -ipaddress_object_richcompare(PyObject *arg1, PyObject *arg2, int op) -{ - PyObject *obj1 = PyNumber_Long(arg1); - PyObject *obj2 = PyNumber_Long(arg2); - PyObject *result = NULL; - - ENTERING(ipaddress_object_richcompare); - - if (obj1 != NULL && obj2 != NULL) - result = PyObject_RichCompare(obj1, obj2, op); - - Py_XDECREF(obj1); - Py_XDECREF(obj2); - return result; -} - -static long -ipaddress_object_hash(ipaddress_object *self) -{ - unsigned long h = 0; - int i; - - ENTERING(ipaddress_object_hash); - - for (i = 0; (unsigned) i < self->type->length; i++) - h ^= self->address[i] << ((i & 3) << 3); - - return (long) h == -1 ? 0 : (long) h; -} - -static char ipaddress_object_from_bytes__doc__[] = - "Construct an IPAddress object from a sequence of bytes.\n" - "\n" - "Argument must be a Python string of exactly 4 or 16 bytes.\n" - ; - -static PyObject * -ipaddress_object_from_bytes(PyTypeObject *type, PyObject *args) -{ - ipaddress_object *result = NULL; - char *bytes = NULL; - Py_ssize_t len; - int v; - - ENTERING(ipaddress_object_from_bytes); - - if (!PyArg_ParseTuple(args, "s#", &bytes, &len)) - goto error; - - if ((result = (ipaddress_object *) type->tp_alloc(type, 0)) == NULL) - goto error; - - result->type = NULL; - - for (v = 0; v < (int) (sizeof(ipaddress_versions)/sizeof(*ipaddress_versions)); v++) - if (len == ipaddress_versions[v]->length) - result->type = ipaddress_versions[v]; - - if (result->type == NULL) - lose("Unknown IP version number"); - - memcpy(result->address, bytes, len); - return (PyObject *) result; - - error: - Py_XDECREF(result); - return NULL; -} - -static char ipaddress_object_to_bytes__doc__[] = - "Return the binary value of this IPAddress as a Python string\n" - "of exactly 4 or 16 bytes.\n" - ; - -static PyObject * -ipaddress_object_to_bytes(ipaddress_object *self) -{ - ENTERING(ipaddress_object_from_bytes); - return PyString_FromStringAndSize((char *) self->address, self->type->length); -} - -static PyObject * -ipaddress_object_get_bits(ipaddress_object *self, GCC_UNUSED void *closure) -{ - ENTERING(ipaddress_object_get_bits); - return PyInt_FromLong(self->type->length * 8); -} - -static PyObject * -ipaddress_object_get_version(ipaddress_object *self, GCC_UNUSED void *closure) -{ - ENTERING(ipaddress_object_get_version); - return PyInt_FromLong(self->type->version); -} - -static PyObject * -ipaddress_object_number_binary_helper(binaryfunc function, PyObject *arg1, PyObject *arg2) -{ - ipaddress_object *addr = NULL; - ipaddress_object *addr1 = NULL; - ipaddress_object *addr2 = NULL; - ipaddress_object *result = NULL; - PyObject *obj1 = NULL; - PyObject *obj2 = NULL; - PyObject *obj3 = NULL; - PyObject *obj4 = NULL; - - if (POW_IPAddress_Check(arg1)) - addr1 = (ipaddress_object *) arg1; - - if (POW_IPAddress_Check(arg2)) - addr2 = (ipaddress_object *) arg2; - - if ((addr1 == NULL && addr2 == NULL) || - (addr1 != NULL && addr2 != NULL && addr1->type != addr2->type) || - (obj1 = PyNumber_Long(arg1)) == NULL || - (obj2 = PyNumber_Long(arg2)) == NULL) { - result = (ipaddress_object *) Py_NotImplemented; - Py_INCREF(result); - goto error; - } - - if ((obj3 = function(obj1, obj2)) == NULL) - goto error; - - if ((obj4 = PyNumber_Long(obj3)) == NULL) - lose("Couldn't convert result"); - - addr = addr1 != NULL ? addr1 : addr2; - - if ((result = (ipaddress_object *) addr->ob_type->tp_alloc(addr->ob_type, 0)) == NULL) - goto error; - - result->type = addr->type; - - if (_PyLong_AsByteArray((PyLongObject *) obj4, result->address, result->type->length, 0, 0) < 0) { - Py_XDECREF(result); - result = NULL; - } - - error: /* Fall through */ - Py_XDECREF(obj1); - Py_XDECREF(obj2); - Py_XDECREF(obj3); - Py_XDECREF(obj4); - - return (PyObject *) result; -} - -static PyObject * -ipaddress_object_number_long(PyObject *arg) -{ - ipaddress_object *addr = (ipaddress_object *) arg; - - ENTERING(ipaddress_object_number_long); - - if (!POW_IPAddress_Check(arg)) - return Py_INCREF(Py_NotImplemented), Py_NotImplemented; - - return _PyLong_FromByteArray(addr->address, addr->type->length, 0, 0); -} - -static PyObject * -ipaddress_object_number_int(PyObject *arg) -{ - ENTERING(ipaddress_object_number_int); - return ipaddress_object_number_long(arg); -} - -static PyObject * -ipaddress_object_number_add(PyObject *arg1, PyObject *arg2) -{ - ENTERING(ipaddress_object_number_add); - return ipaddress_object_number_binary_helper(PyNumber_Add, arg1, arg2); -} - -static PyObject * -ipaddress_object_number_subtract(PyObject *arg1, PyObject *arg2) -{ - ENTERING(ipaddress_object_number_subtract); - return ipaddress_object_number_binary_helper(PyNumber_Subtract, arg1, arg2); -} - -static PyObject * -ipaddress_object_number_lshift(PyObject *arg1, PyObject *arg2) -{ - ENTERING(ipaddress_object_number_lshift); - return ipaddress_object_number_binary_helper(PyNumber_Lshift, arg1, arg2); -} - -static PyObject * -ipaddress_object_number_rshift(PyObject *arg1, PyObject *arg2) -{ - ENTERING(ipaddress_object_number_rshift); - return ipaddress_object_number_binary_helper(PyNumber_Rshift, arg1, arg2); -} - -static PyObject * -ipaddress_object_number_and(PyObject *arg1, PyObject *arg2) -{ - ENTERING(ipaddress_object_number_and); - return ipaddress_object_number_binary_helper(PyNumber_And, arg1, arg2); -} - -static PyObject * -ipaddress_object_number_xor(PyObject *arg1, PyObject *arg2) -{ - ENTERING(ipaddress_object_number_xor); - return ipaddress_object_number_binary_helper(PyNumber_Xor, arg1, arg2); -} - -static PyObject * -ipaddress_object_number_or(PyObject *arg1, PyObject *arg2) -{ - ENTERING(ipaddress_object_number_or); - return ipaddress_object_number_binary_helper(PyNumber_Or, arg1, arg2); -} - -static int -ipaddress_object_number_nonzero(ipaddress_object *self) -{ - int i; - - ENTERING(ipaddress_object_number_nonzero); - - for (i = 0; (unsigned) i < self->type->length; i++) - if (self->address[i] != 0) - return 1; - return 0; -} - -static PyObject * -ipaddress_object_number_invert(ipaddress_object *self) -{ - ipaddress_object *result = NULL; - int i; - - ENTERING(ipaddress_object_number_invert); - - if ((result = (ipaddress_object *) self->ob_type->tp_alloc(self->ob_type, 0)) == NULL) - goto error; - - result->type = self->type; - - for (i = 0; (unsigned) i < self->type->length; i++) - result->address[i] = ~self->address[i]; - - error: /* Fall through */ - return (PyObject *) result; -} - -static char ipaddress_object_copy__doc__[] = - "" - ; - -static PyObject * -ipaddress_object_copy(ipaddress_object *self, GCC_UNUSED PyObject *args) -{ - ipaddress_object *result = NULL; - - ENTERING(ipaddress_object_copy); - - if ((result = (ipaddress_object *) self->ob_type->tp_alloc(self->ob_type, 0)) == NULL) - goto error; - - memcpy(result->address, self->address, sizeof(result->address)); - result->type = self->type; - - error: - return (PyObject *) result; -} - -static struct PyMethodDef ipaddress_object_methods[] = { - Define_Method(__copy__, ipaddress_object_copy, METH_VARARGS), - Define_Method(__deepcopy__, ipaddress_object_copy, METH_VARARGS), - Define_Method(toBytes, ipaddress_object_to_bytes, METH_NOARGS), - Define_Class_Method(fromBytes, ipaddress_object_from_bytes, METH_VARARGS), - {NULL} -}; - -static PyGetSetDef ipaddress_object_getsetters[] = { - {"bits", (getter) ipaddress_object_get_bits}, - {"version", (getter) ipaddress_object_get_version}, - {NULL} -}; - -static PyNumberMethods ipaddress_NumberMethods = { - ipaddress_object_number_add, /* nb_add */ - ipaddress_object_number_subtract, /* nb_subtract */ - 0, /* nb_multiply */ - 0, /* nb_divide */ - 0, /* nb_remainder */ - 0, /* nb_divmod */ - 0, /* nb_power */ - 0, /* nb_negative */ - 0, /* nb_positive */ - 0, /* nb_absolute */ - (inquiry) ipaddress_object_number_nonzero, /* nb_nonzero */ - (unaryfunc) ipaddress_object_number_invert, /* nb_invert */ - ipaddress_object_number_lshift, /* nb_lshift */ - ipaddress_object_number_rshift, /* nb_rshift */ - ipaddress_object_number_and, /* nb_and */ - ipaddress_object_number_xor, /* nb_xor */ - ipaddress_object_number_or, /* nb_or */ - 0, /* nb_coerce */ - ipaddress_object_number_int, /* nb_int */ - ipaddress_object_number_long, /* nb_long */ - 0, /* nb_float */ - 0, /* nb_oct */ - 0, /* nb_hex */ - 0, /* nb_inplace_add */ - 0, /* nb_inplace_subtract */ - 0, /* nb_inplace_multiply */ - 0, /* nb_inplace_divide */ - 0, /* nb_inplace_remainder */ - 0, /* nb_inplace_power */ - 0, /* nb_inplace_lshift */ - 0, /* nb_inplace_rshift */ - 0, /* nb_inplace_and */ - 0, /* nb_inplace_xor */ - 0, /* nb_inplace_or */ - 0, /* nb_floor_divide */ - 0, /* nb_true_divide */ - 0, /* nb_inplace_floor_divide */ - 0, /* nb_inplace_true_divide */ - 0, /* nb_index */ -}; - -static PyTypeObject POW_IPAddress_Type = { - PyObject_HEAD_INIT(NULL) - 0, /* ob_size */ - "rpki.POW.IPAddress", /* tp_name */ - sizeof(ipaddress_object), /* tp_basicsize */ - 0, /* tp_itemsize */ - 0, /* tp_dealloc */ - 0, /* tp_print */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - ipaddress_object_compare, /* tp_compare */ - (reprfunc) ipaddress_object_repr, /* tp_repr */ - &ipaddress_NumberMethods, /* tp_as_number */ - 0, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - (hashfunc) ipaddress_object_hash, /* tp_hash */ - 0, /* tp_call */ - (reprfunc) ipaddress_object_str, /* tp_str */ - 0, /* tp_getattro */ - 0, /* tp_setattro */ - 0, /* tp_as_buffer */ - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_CHECKTYPES, /* tp_flags */ - 0, /* tp_doc */ - 0, /* tp_traverse */ - 0, /* tp_clear */ - ipaddress_object_richcompare, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - 0, /* tp_iter */ - 0, /* tp_iternext */ - ipaddress_object_methods, /* tp_methods */ - 0, /* tp_members */ - ipaddress_object_getsetters, /* tp_getset */ - 0, /* tp_base */ - 0, /* tp_dict */ - 0, /* tp_descr_get */ - 0, /* tp_descr_set */ - 0, /* tp_dictoffset */ - 0, /* tp_init */ - 0, /* tp_alloc */ - ipaddress_object_new, /* tp_new */ -}; - - - -/* - * X509 object. - */ - -static x509_object * -x509_object_new_helper(PyTypeObject *type, X509 *x) -{ - x509_object *self; - - if (type == NULL) - type = &POW_X509_Type; - - if ((self = (x509_object *) type->tp_alloc(type, 0)) == NULL) - return NULL; - - self->x509 = x; - return self; -} - -static PyObject * -x509_object_new(PyTypeObject *type, GCC_UNUSED PyObject *args, GCC_UNUSED PyObject *kwds) -{ - x509_object *self = NULL; - X509 *x = NULL; - - ENTERING(x509_object_new); - - if ((x = X509_new()) == NULL) - lose_no_memory(); - - if ((self = x509_object_new_helper(type, x)) == NULL) - goto error; - - return (PyObject *) self; - - error: - X509_free(x); - return NULL; -} - -static void -x509_object_dealloc(x509_object *self) -{ - ENTERING(x509_object_dealloc); - X509_free(self->x509); - self->ob_type->tp_free((PyObject*) self); -} - -static PyObject * -x509_object_pem_read_helper(PyTypeObject *type, BIO *bio) -{ - x509_object *self = NULL; - - ENTERING(x509_object_pem_read_helper); - - if ((self = (x509_object *) x509_object_new(type, NULL, NULL)) == NULL) - goto error; - - if (!PEM_read_bio_X509(bio, &self->x509, NULL, NULL)) - lose_openssl_error("Couldn't load PEM encoded certificate"); - - return (PyObject *) self; - - error: - - Py_XDECREF(self); - return NULL; -} - -static PyObject * -x509_object_der_read_helper(PyTypeObject *type, BIO *bio) -{ - x509_object *self; - - ENTERING(x509_object_der_read_helper); - - if ((self = (x509_object *) x509_object_new(type, NULL, NULL)) == NULL) - goto error; - - if (!d2i_X509_bio(bio, &self->x509)) - lose_openssl_error("Couldn't load DER encoded certificate"); - - return (PyObject *) self; - - error: - Py_XDECREF(self); - return NULL; -} - -static char x509_object_pem_read__doc__[] = - "Read a PEM-encoded X.509 object from a string.\n" - ; - -static PyObject * -x509_object_pem_read(PyTypeObject *type, PyObject *args) -{ - ENTERING(x509_object_pem_read); - return read_from_string_helper(x509_object_pem_read_helper, type, args); -} - -static char x509_object_pem_read_file__doc__[] = - "Read a PEM-encoded X.509 object from a file.\n" - ; - -static PyObject * -x509_object_pem_read_file(PyTypeObject *type, PyObject *args) -{ - ENTERING(x509_object_pem_read_file); - return read_from_file_helper(x509_object_pem_read_helper, type, args); -} - -static char x509_object_der_read__doc__[] = - "Read a DER-encoded X.509 object from a string.\n" - ; - -static PyObject * -x509_object_der_read(PyTypeObject *type, PyObject *args) -{ - ENTERING(x509_object_der_read); - return read_from_string_helper(x509_object_der_read_helper, type, args); -} - -static char x509_object_der_read_file__doc__[] = - "Read a DER-encoded X.509 object from a file.\n" - ; - -static PyObject * -x509_object_der_read_file(PyTypeObject *type, PyObject *args) -{ - ENTERING(x509_object_der_read_file); - return read_from_file_helper(x509_object_der_read_helper, type, args); -} - -static char x509_object_pem_write__doc__[] = - "Return the PEM encoding of this certificate, as a string.\n" - ; - -static PyObject * -x509_object_pem_write(x509_object *self) -{ - PyObject *result = NULL; - BIO *bio = NULL; - - ENTERING(x509_object_pem_write); - - if ((bio = BIO_new(BIO_s_mem())) == NULL) - lose_no_memory(); - - if (!PEM_write_bio_X509(bio, self->x509)) - lose_openssl_error("Unable to write certificate"); - - result = BIO_to_PyString_helper(bio); - - error: /* Fall through */ - BIO_free(bio); - return result; -} - -static char x509_object_der_write__doc__[] = - "Return the DER encoding of this certificate, as a string.\n" - ; - -static PyObject * -x509_object_der_write(x509_object *self) -{ - PyObject *result = NULL; - BIO *bio = NULL; - - ENTERING(x509_object_der_write); - - if ((bio = BIO_new(BIO_s_mem())) == NULL) - lose_no_memory(); - - if (!i2d_X509_bio(bio, self->x509)) - lose_openssl_error("Unable to write certificate"); - - result = BIO_to_PyString_helper(bio); - - error: /* Fall through */ - BIO_free(bio); - return result; -} - -static X509_EXTENSIONS ** -x509_object_extension_helper(x509_object *self) -{ - if (self && self->x509 && self->x509->cert_info) - return &self->x509->cert_info->extensions; - PyErr_SetString(PyExc_ValueError, "Can't find X509_EXTENSIONS in X509 object"); - return NULL; -} - -static char x509_object_get_public_key__doc__[] = - "Return the public key from this certificate object,\n" - "as an Asymmetric object.\n" - ; - -static PyObject * -x509_object_get_public_key(x509_object *self) -{ - PyTypeObject *type = &POW_Asymmetric_Type; - asymmetric_object *asym = NULL; - - ENTERING(x509_object_get_public_key); - - if ((asym = (asymmetric_object *) type->tp_alloc(type, 0)) == NULL) - goto error; - - if ((asym->pkey = X509_get_pubkey(self->x509)) == NULL) - lose_openssl_error("Couldn't extract public key from certificate"); - - return (PyObject *) asym; - - error: - Py_XDECREF(asym); - return NULL; -} - -static char x509_object_set_public_key__doc__[] = - "Set the public key of this certificate object.\n" - "\n" - "The \"key\" parameter should be an instance of the Asymmetric class,\n" - "containing a public key.\n" - ; - -static PyObject * -x509_object_set_public_key(x509_object *self, PyObject *args) -{ - asymmetric_object *asym; - - ENTERING(x509_object_set_public_key); - - if (!PyArg_ParseTuple(args, "O!", &POW_Asymmetric_Type, &asym)) - goto error; - - if (!X509_set_pubkey(self->x509, asym->pkey)) - lose_openssl_error("Couldn't set certificate's public key"); - - Py_RETURN_NONE; - - error: - return NULL; -} - -static char x509_object_sign__doc__[] = - "Sign a certificate with a private key.\n" - "\n" - "The \"key\" parameter should be an instance of the Asymmetric class,\n" - "containing a private key.\n" - "\n" - "The optional \"digest\" parameter indicates which digest to compute and\n" - "sign, and should be one of the following:\n" - "\n" - "* MD5_DIGEST\n" - "* SHA_DIGEST\n" - "* SHA1_DIGEST\n" - "* SHA256_DIGEST\n" - "* SHA384_DIGEST\n" - "* SHA512_DIGEST\n" - "\n" - "The default digest algorithm is SHA-256.\n" - ; - -static PyObject * -x509_object_sign(x509_object *self, PyObject *args) -{ - asymmetric_object *asym; - int digest_type = SHA256_DIGEST; - const EVP_MD *digest_method = NULL; - - ENTERING(x509_object_sign); - - if (!PyArg_ParseTuple(args, "O!|i", &POW_Asymmetric_Type, &asym, &digest_type)) - goto error; - - if ((digest_method = evp_digest_factory(digest_type)) == NULL) - lose("Unsupported digest algorithm"); - - if (!X509_sign(self->x509, asym->pkey, digest_method)) - lose_openssl_error("Couldn't sign certificate"); - - Py_RETURN_NONE; - - error: - return NULL; -} - -static char x509_object_get_version__doc__[] = - "Return version number of this certificate.\n" - ; - -static PyObject * -x509_object_get_version(x509_object *self) -{ - ENTERING(x509_object_get_version); - return Py_BuildValue("l", X509_get_version(self->x509)); -} - -static char x509_object_set_version__doc__[] = - "Set version number of this certificate.\n" - "\n" - "The \"version\" parameter should be an integer.\n" - ; - -static PyObject * -x509_object_set_version(x509_object *self, PyObject *args) -{ - long version = 0; - - ENTERING(x509_object_set_version); - - if (!PyArg_ParseTuple(args, "l", &version)) - goto error; - - if (!X509_set_version(self->x509, version)) - lose("Couldn't set certificate version"); - - Py_RETURN_NONE; - - error: - - return NULL; -} - -static char x509_object_get_serial__doc__[] = - "Return the serial number of this certificate.\n" - ; - -static PyObject * -x509_object_get_serial(x509_object *self) -{ - ENTERING(x509_object_get_serial); - return Py_BuildValue("N", ASN1_INTEGER_to_PyLong(X509_get_serialNumber(self->x509))); -} - -static char x509_object_set_serial__doc__[] = - "Set the serial number of this certificate.\n" - "\n" - "The \"serial\" parameter should ba an integer.\n" - ; - -static PyObject * -x509_object_set_serial(x509_object *self, PyObject *args) -{ - ASN1_INTEGER *a_serial = NULL; - PyObject *p_serial = NULL; - int ok = 0; - - ENTERING(x509_object_set_serial); - - if (!PyArg_ParseTuple(args, "O", &p_serial) || - (a_serial = PyLong_to_ASN1_INTEGER(p_serial)) == NULL) - goto error; - - if (!X509_set_serialNumber(self->x509, a_serial)) - lose_no_memory(); - - ok = 1; - - error: - ASN1_INTEGER_free(a_serial); - - if (ok) - Py_RETURN_NONE; - else - return NULL; -} - -static char x509_object_get_issuer__doc__[] = - "Return this certificate's issuer name, represented as a tuple.\n" - "\n" - "Each element of this tuple is another tuple representing one\n" - "\"Relative Distinguished Name\" (RDN), each element of which in turn\n" - "is yet another tuple representing one AttributeTypeAndValue pair.\n" - "\n" - "In practice, RDNs containing multiple attributes are rare, thus the RDN\n" - "tuples will usually be exactly one element long, but using the\n" - "tuple-of-tuples-of-tuples format lets us represent the general case.\n" - "\n" - "The AttributeTypeANdValue pairs are two-element tuples, the first\n" - "element of which is a string representing an Object Identifier (OID),\n" - "the second of which contains the attribute value.\n" - "\n" - "This method takes an optional \"format\" parameter which controls\n" - "the format in which OIDs are returned. Allowed values are:\n" - "\n" - " * SHORTNAME_FORMAT (the OpenSSL \"short name\" for this OID)\n" - " * LONGNAME_FORMAT (the OpenSSL \"long name\" for this OID)\n" - " * OIDNAME_FORMAT (the OID in dotted decimal numeric format)\n" - "\n" - "The default is OIDNAME_FORMAT.\n" - "\n" - "See RFC 5280 section 4.1.2.4 for details of the ASN.1 structure.\n" - ; - -static PyObject * -x509_object_get_issuer(x509_object *self, PyObject *args) -{ - PyObject *result = NULL; - int format = OIDNAME_FORMAT; - - ENTERING(x509_object_get_issuer); - - if (!PyArg_ParseTuple(args, "|i", &format)) - goto error; - - result = x509_object_helper_get_name(X509_get_issuer_name(self->x509), - format); - - error: /* Fall through */ - return result; -} - -static char x509_object_get_subject__doc__[] = - "Return this certificate's subject name, as a tuple.\n" - "\n" - "See the documentation for the \"getIssuer\" method for details on the\n" - "structure of the return value and use of the optional \"format\"\n" - "parameter.\n" - ; - -static PyObject * -x509_object_get_subject(x509_object *self, PyObject *args) -{ - PyObject *result = NULL; - int format = OIDNAME_FORMAT; - - ENTERING(x509_object_get_subject); - - if (!PyArg_ParseTuple(args, "|i", &format)) - goto error; - - result = x509_object_helper_get_name(X509_get_subject_name(self->x509), - format); - - error: /* Fall through */ - return result; -} - -static char x509_object_set_subject__doc__[] = - "Set this certificate's subject name.\n" - "\n" - "The \"name\" parameter should be in the same format as the return\n" - "value from the \"getIssuer\" method.\n" - ; - -static PyObject * -x509_object_set_subject(x509_object *self, PyObject *args) -{ - PyObject *name_sequence = NULL; - X509_NAME *name = NULL; - - ENTERING(x509_object_set_subject); - - if (!PyArg_ParseTuple(args, "O", &name_sequence)) - goto error; - - if (!PySequence_Check(name_sequence)) - lose_type_error("Inapropriate type"); - - if ((name = x509_object_helper_set_name(name_sequence)) == NULL) - goto error; - - if (!X509_set_subject_name(self->x509, name)) - lose("Unable to set subject name"); - - X509_NAME_free(name); - - Py_RETURN_NONE; - - error: - X509_NAME_free(name); - return NULL; -} - -static char x509_object_set_issuer__doc__[] = - "Set this certificate's issuer name.\n" - "\n" - "The \"name\" parameter should be in the same format as the return\n" - "value from the \"getIssuer\" method.\n" - ; - -static PyObject * -x509_object_set_issuer(x509_object *self, PyObject *args) -{ - PyObject *name_sequence = NULL; - X509_NAME *name = NULL; - - ENTERING(x509_object_set_issuer); - - if (!PyArg_ParseTuple(args, "O", &name_sequence)) - goto error; - - if (!PySequence_Check(name_sequence)) - lose_type_error("Inapropriate type"); - - if ((name = x509_object_helper_set_name(name_sequence)) == NULL) - goto error; - - if (!X509_set_issuer_name(self->x509, name)) - lose("Unable to set issuer name"); - - X509_NAME_free(name); - - Py_RETURN_NONE; - - error: - X509_NAME_free(name); - return NULL; -} - -static char x509_object_get_not_before__doc__[] = - "Return this certificate's \"notBefore\" value as a datetime.\n" - ; - -static PyObject * -x509_object_get_not_before (x509_object *self) -{ - ENTERING(x509_object_get_not_before); - return ASN1_TIME_to_Python(X509_get_notBefore(self->x509)); -} - -static char x509_object_get_not_after__doc__[] = - "Return this certificate's \"notAfter\" value as a datetime.\n" - ; - -static PyObject * -x509_object_get_not_after (x509_object *self) -{ - ENTERING(x509_object_get_not_after); - return ASN1_TIME_to_Python(X509_get_notAfter(self->x509)); -} - -static char x509_object_set_not_after__doc__[] = - "Set this certificate's \"notAfter\" value.\n" - "\n" - "The \"time\" parameter should be a datetime object.\n" - ; - -static PyObject * -x509_object_set_not_after (x509_object *self, PyObject *args) -{ - PyObject *o = NULL; - ASN1_TIME *t = NULL; - - ENTERING(x509_object_set_not_after); - - if (!PyArg_ParseTuple(args, "O", &o)) - goto error; - - if ((t = Python_to_ASN1_TIME(o, 1)) == NULL) - lose("Couldn't convert notAfter string"); - - if (!X509_set_notAfter(self->x509, t)) - lose("Couldn't set notAfter"); - - ASN1_TIME_free(t); - Py_RETURN_NONE; - - error: - ASN1_TIME_free(t); - return NULL; -} - -static char x509_object_set_not_before__doc__[] = - "Set this certificate's \"notBefore\" value.\n" - "\n" - "The \"time\" parameter should be a datetime object.\n" - ; - -static PyObject * -x509_object_set_not_before (x509_object *self, PyObject *args) -{ - PyObject *o = NULL; - ASN1_TIME *t = NULL; - - ENTERING(x509_object_set_not_before); - - if (!PyArg_ParseTuple(args, "O", &o)) - goto error; - - if ((t = Python_to_ASN1_TIME(o, 1)) == NULL) - lose("Couldn't convert notBefore string"); - - if (!X509_set_notBefore(self->x509, t)) - lose("Couldn't set notBefore"); - - ASN1_TIME_free(t); - Py_RETURN_NONE; - - error: - ASN1_TIME_free(t); - return NULL; -} - -static char x509_object_clear_extensions__doc__[] = - "Clear all extensions attached to this certificate.\n" - ; - -static PyObject * -x509_object_clear_extensions(x509_object *self) -{ - X509_EXTENSION *ext; - - ENTERING(x509_object_clear_extensions); - - while ((ext = X509_delete_ext(self->x509, 0)) != NULL) - X509_EXTENSION_free(ext); - - Py_RETURN_NONE; -} - -static char x509_object_get_ski__doc__[] = - "Return the Subject Key Identifier (SKI) value for this\n" - "certificate, or None if the certificate has no SKI extension.\n" - ; - -static PyObject * -x509_object_get_ski(x509_object *self) -{ - return extension_get_ski(x509_object_extension_helper(self)); -} - -static char x509_object_set_ski__doc__[] = - "Set the Subject Key Identifier (SKI) value for this certificate.\n" - ; - -static PyObject * -x509_object_set_ski(x509_object *self, PyObject *args) -{ - return extension_set_ski(x509_object_extension_helper(self), args); -} - -static char x509_object_get_aki__doc__[] = - "Return the Authority Key Identifier (AKI) keyid value for this\n" - "certificate, or None if the certificate has no AKI extension or has an\n" - "AKI extension with no keyIdentifier value.\n" - ; - -static PyObject * -x509_object_get_aki(x509_object *self) -{ - return extension_get_aki(x509_object_extension_helper(self)); -} - -static char x509_object_set_aki__doc__[] = - "Set the Authority Key Identifier (AKI) value for this certificate.\n" - "\n" - "We only support the keyIdentifier method, as that's the only form\n" - "which is legal for RPKI certificates.\n" - ; - -static PyObject * -x509_object_set_aki(x509_object *self, PyObject *args) -{ - return extension_set_aki(x509_object_extension_helper(self), args); -} - -static char x509_object_get_key_usage__doc__[] = - "Return a FrozenSet of strings representing the KeyUsage\n" - "settings for this certificate, or None if the certificate has no\n" - "KeyUsage extension. The bits have the same names as in RFC 5280.\n" - ; - -static PyObject * -x509_object_get_key_usage(x509_object *self) -{ - return extension_get_key_usage(x509_object_extension_helper(self)); -} - -static char x509_object_set_key_usage__doc__[] = - "Set the KeyUsage extension for this certificate.\n" - "\n" - "Argument \"iterable\" should be an iterable object which returns zero or more\n" - "strings naming bits to be enabled. The bits have the same names as in RFC 5280.\n" - "\n" - "Optional argument \"critical\" is a boolean indicating whether the extension\n" - "should be marked as critical or not. RFC 5280 4.2.1.3 says this extension SHOULD\n" - "be marked as critical when used, so the default is True.\n" - ; - -static PyObject * -x509_object_set_key_usage(x509_object *self, PyObject *args) -{ - return extension_set_key_usage(x509_object_extension_helper(self), args); -} - -static char x509_object_get_eku__doc__[] = - "Return a FrozenSet of object identifiers representing the\n" - "ExtendedKeyUsage settings for this certificate, or None if\n" - "the certificate has no ExtendedKeyUsage extension.\n" - ; - -static PyObject * -x509_object_get_eku(x509_object *self) -{ - return extension_get_eku(x509_object_extension_helper(self)); -} - -static char x509_object_set_eku__doc__[] = - "Set the ExtendedKeyUsage extension for this certificate.\n" - "\n" - "Argument \"iterable\" should be an iterable object which returns one or more\n" - "object identifiers.\n" - "\n" - "Optional argument \"critical\" is a boolean indicating whether the extension\n" - "should be marked as critical or not. RFC 6487 4.8.5 says this extension\n" - "MUST NOT be marked as non-critical when used, so the default is False.\n" - ; - -static PyObject * -x509_object_set_eku(x509_object *self, PyObject *args) -{ - return extension_set_eku(x509_object_extension_helper(self), args); -} - -static char x509_object_get_rfc3779__doc__[] = - "Return this certificate's RFC 3779 resources.\n" - "\n" - "Return value is a three-element tuple: the first element is the ASN\n" - "resources, the second is the IPv4 resources, the third is the IPv6\n" - "resources. Each of these elements in turn is either the string\n" - "\"inherit\" or a tuple representing a set of ranges of ASNs or IP\n" - "addresses.\n" - "\n" - "Each range is a two-element tuple, respectively representing the low\n" - "and high ends of the range, inclusive. ASN ranges are represented by\n" - "pairs of integers, IP address ranges are represented by pairs of\n" - "IPAddress objects.\n" - ; - -static PyObject * -x509_object_get_rfc3779(x509_object *self) -{ - PyObject *result = NULL; - PyObject *asn_result = NULL; - PyObject *ipv4_result = NULL; - PyObject *ipv6_result = NULL; - PyObject *range = NULL; - PyObject *range_b = NULL; - PyObject *range_e = NULL; - ASIdentifiers *asid = NULL; - IPAddrBlocks *addr = NULL; - int i, j; - - ENTERING(x509_object_get_rfc3779); - - if ((asid = X509_get_ext_d2i(self->x509, NID_sbgp_autonomousSysNum, NULL, NULL)) != NULL && - asid->asnum != NULL) { - switch (asid->asnum->type) { - - case ASIdentifierChoice_inherit: - if ((asn_result = PyString_FromString("inherit")) == NULL) - goto error; - break; - - case ASIdentifierChoice_asIdsOrRanges: - - if ((asn_result = PyTuple_New(sk_ASIdOrRange_num(asid->asnum->u.asIdsOrRanges))) == NULL) - goto error; - - for (i = 0; i < sk_ASIdOrRange_num(asid->asnum->u.asIdsOrRanges); i++) { - ASIdOrRange *aor = sk_ASIdOrRange_value(asid->asnum->u.asIdsOrRanges, i); - ASN1_INTEGER *b = NULL; - ASN1_INTEGER *e = NULL; - - switch (aor->type) { - - case ASIdOrRange_id: - b = e = aor->u.id; - break; - - case ASIdOrRange_range: - b = aor->u.range->min; - e = aor->u.range->max; - break; - - default: - lose_type_error("Unexpected asIdsOrRanges type"); - } - - if (ASN1_STRING_type(b) == V_ASN1_NEG_INTEGER || - ASN1_STRING_type(e) == V_ASN1_NEG_INTEGER) - lose_type_error("I don't believe in negative ASNs"); - - if ((range_b = ASN1_INTEGER_to_PyLong(b)) == NULL || - (range_e = ASN1_INTEGER_to_PyLong(e)) == NULL || - (range = Py_BuildValue("(NN)", range_b, range_e)) == NULL) - goto error; - - PyTuple_SET_ITEM(asn_result, i, range); - range = range_b = range_e = NULL; - } - - break; - - default: - lose_type_error("Unexpected ASIdentifierChoice type"); - } - } - - if ((addr = X509_get_ext_d2i(self->x509, NID_sbgp_ipAddrBlock, NULL, NULL)) != NULL) { - for (i = 0; i < sk_IPAddressFamily_num(addr); i++) { - IPAddressFamily *f = sk_IPAddressFamily_value(addr, i); - const struct ipaddress_version *ip_type = NULL; - const unsigned int afi = v3_addr_get_afi(f); - PyObject **result_obj = NULL; - int addr_len = 0; - - switch (afi) { - case IANA_AFI_IPV4: result_obj = &ipv4_result; ip_type = &ipaddress_version_4; break; - case IANA_AFI_IPV6: result_obj = &ipv6_result; ip_type = &ipaddress_version_6; break; - default: lose_type_error("Unknown AFI"); - } - - if (*result_obj != NULL) - lose_type_error("Duplicate IPAddressFamily"); - - if (f->addressFamily->length > 2) - lose_type_error("Unsupported SAFI"); - - switch (f->ipAddressChoice->type) { - - case IPAddressChoice_inherit: - if ((*result_obj = PyString_FromString("inherit")) == NULL) - goto error; - continue; - - case IPAddressChoice_addressesOrRanges: - break; - - default: - lose_type_error("Unexpected IPAddressChoice type"); - } - - if ((*result_obj = PyTuple_New(sk_IPAddressOrRange_num(f->ipAddressChoice->u.addressesOrRanges))) == NULL) - goto error; - - for (j = 0; j < sk_IPAddressOrRange_num(f->ipAddressChoice->u.addressesOrRanges); j++) { - IPAddressOrRange *aor = sk_IPAddressOrRange_value(f->ipAddressChoice->u.addressesOrRanges, j); - ipaddress_object *addr_b = NULL; - ipaddress_object *addr_e = NULL; - - if ((range_b = POW_IPAddress_Type.tp_alloc(&POW_IPAddress_Type, 0)) == NULL || - (range_e = POW_IPAddress_Type.tp_alloc(&POW_IPAddress_Type, 0)) == NULL) - goto error; - - addr_b = (ipaddress_object *) range_b; - addr_e = (ipaddress_object *) range_e; - - if ((addr_len = v3_addr_get_range(aor, afi, addr_b->address, addr_e->address, - sizeof(addr_b->address))) == 0) - lose_type_error("Couldn't unpack IP addresses from BIT STRINGs"); - - addr_b->type = addr_e->type = ip_type; - - if ((range = Py_BuildValue("(NN)", range_b, range_e)) == NULL) - goto error; - - PyTuple_SET_ITEM(*result_obj, j, range); - range = range_b = range_e = NULL; - } - } - } - - result = Py_BuildValue("(OOO)", - (asn_result == NULL ? Py_None : asn_result), - (ipv4_result == NULL ? Py_None : ipv4_result), - (ipv6_result == NULL ? Py_None : ipv6_result)); - - error: /* Fall through */ - ASIdentifiers_free(asid); - sk_IPAddressFamily_pop_free(addr, IPAddressFamily_free); - Py_XDECREF(range_b); - Py_XDECREF(range_e); - Py_XDECREF(range); - Py_XDECREF(asn_result); - Py_XDECREF(ipv4_result); - Py_XDECREF(ipv6_result); - - return result; -} - -static char x509_object_set_rfc3779__doc__[] = - "Set this certificate's RFC 3779 resources.\n" - "\n" - "This method takes three arguments: \"asn\", \"ipv4\", and \"ipv6\".\n" - "\n" - "Each of these arguments can be:\n" - "\n" - "* None, to omit this kind of resource;\n" - "\n" - "* The string \"inherit\", to specify RFC 3779 resource inheritance; or\n" - "\n" - "* An iterable object which returns range pairs of the appropriate type.\n" - "\n" - "Range pairs are as returned by the .getRFC3779() method.\n" - ; - -static PyObject * -x509_object_set_rfc3779(x509_object *self, PyObject *args, PyObject *kwds) -{ - static char *kwlist[] = {"asn", "ipv4", "ipv6", NULL}; - PyObject *asn_arg = Py_None; - PyObject *ipv4_arg = Py_None; - PyObject *ipv6_arg = Py_None; - PyObject *iterator = NULL; - PyObject *item = NULL; - PyObject *fast = NULL; - ASIdentifiers *asid = NULL; - IPAddrBlocks *addr = NULL; - ASN1_INTEGER *asid_b = NULL; - ASN1_INTEGER *asid_e = NULL; - ipaddress_object *addr_b = NULL; - ipaddress_object *addr_e = NULL; - int empty = 0; - - ENTERING(x509_object_set_rfc3779); - - if (!PyArg_ParseTupleAndKeywords(args, kwds, "|OOO", kwlist, &asn_arg, &ipv4_arg, &ipv6_arg)) - goto error; - - if (asn_arg != Py_None) { - - empty = 1; - - if ((asid = ASIdentifiers_new()) == NULL) - lose_no_memory(); - - if (PyString_Check(asn_arg)) { - - if (strcmp(PyString_AsString(asn_arg), "inherit")) - lose_type_error("ASID must be an iterable that returns range pairs, or the string \"inherit\""); - - if (!v3_asid_add_inherit(asid, V3_ASID_ASNUM)) - lose_no_memory(); - - empty = 0; - - } else { - - if ((iterator = PyObject_GetIter(asn_arg)) == NULL) - goto error; - - while ((item = PyIter_Next(iterator)) != NULL) { - - if ((fast = PySequence_Fast(item, "ASN range must be a sequence")) == NULL) - goto error; - - if (PySequence_Fast_GET_SIZE(fast) != 2) - lose_type_error("ASN range must be two-element sequence"); - - if ((asid_b = PyLong_to_ASN1_INTEGER(PySequence_Fast_GET_ITEM(fast, 0))) == NULL) - goto error; - - switch (PyObject_RichCompareBool(PySequence_Fast_GET_ITEM(fast, 0), - PySequence_Fast_GET_ITEM(fast, 1), Py_EQ)) { - case 0: - if ((asid_e = PyLong_to_ASN1_INTEGER(PySequence_Fast_GET_ITEM(fast, 1))) == NULL) - goto error; - break; - case 1: - break; - default: - goto error; - } - - if (!v3_asid_add_id_or_range(asid, V3_ASID_ASNUM, asid_b, asid_e)) - lose_openssl_error("Couldn't add range to ASID"); - - asid_b = asid_e = NULL; - Py_XDECREF(item); - Py_XDECREF(fast); - item = fast = NULL; - empty = 0; - } - - Py_XDECREF(iterator); - iterator = NULL; - } - - if (!empty && (!v3_asid_canonize(asid) || - !X509_add1_ext_i2d(self->x509, NID_sbgp_autonomousSysNum, - asid, 1, X509V3_ADD_REPLACE))) - lose_openssl_error("Couldn't add ASID extension to certificate"); - } - - if (ipv4_arg != Py_None || ipv6_arg != Py_None) { - int v; - - empty = 1; - - if ((addr = sk_IPAddressFamily_new_null()) == NULL) - lose_no_memory(); - - /* - * Cheap trick to let us inline all of this instead of being - * forced to use a separate function. Refactor, some day. - */ - - for (v = 0; v < (int) (sizeof(ipaddress_versions)/sizeof(*ipaddress_versions)); v++) { - const struct ipaddress_version *ip_type = ipaddress_versions[v]; - PyObject **argp; - - switch (ip_type->version) { - case 4: argp = &ipv4_arg; break; - case 6: argp = &ipv6_arg; break; - default: continue; /* Never happens */ - } - - if (PyString_Check(*argp)) { - - if (strcmp(PyString_AsString(*argp), "inherit")) - lose_type_error("Argument must be an iterable that returns range pairs, or the string \"inherit\""); - - if (!v3_addr_add_inherit(addr, ip_type->afi, NULL)) - lose_no_memory(); - - empty = 0; - - } else { - - if ((iterator = PyObject_GetIter(*argp)) == NULL) - goto error; - - while ((item = PyIter_Next(iterator)) != NULL) { - - if ((fast = PySequence_Fast(item, "Address range must be a sequence")) == NULL) - goto error; - - if (PySequence_Fast_GET_SIZE(fast) != 2 || - !POW_IPAddress_Check(PySequence_Fast_GET_ITEM(fast, 0)) || - !POW_IPAddress_Check(PySequence_Fast_GET_ITEM(fast, 1))) - lose_type_error("Address range must be two-element sequence of IPAddress objects"); - - addr_b = (ipaddress_object *) PySequence_Fast_GET_ITEM(fast, 0); - addr_e = (ipaddress_object *) PySequence_Fast_GET_ITEM(fast, 1); - - if (addr_b->type != ip_type || - addr_e->type != ip_type || - memcmp(addr_b->address, addr_e->address, ip_type->length) > 0) - lose("Address range must be two-element sequence of IPAddress objects in ascending order"); - - if (!v3_addr_add_range(addr, ip_type->afi, NULL, addr_b->address, addr_e->address)) - lose_openssl_error("Couldn't add range to IPAddrBlock"); - - Py_XDECREF(item); - Py_XDECREF(fast); - item = fast = NULL; - addr_b = addr_e = NULL; - empty = 0; - } - - Py_XDECREF(iterator); - iterator = NULL; - } - } - - if (!empty && (!v3_addr_canonize(addr) || - !X509_add1_ext_i2d(self->x509, NID_sbgp_ipAddrBlock, - addr, 1, X509V3_ADD_REPLACE))) - lose_openssl_error("Couldn't add IPAddrBlock extension to certificate"); - } - - Py_RETURN_NONE; - - error: - ASN1_INTEGER_free(asid_b); - ASN1_INTEGER_free(asid_e); - ASIdentifiers_free(asid); - sk_IPAddressFamily_pop_free(addr, IPAddressFamily_free); - Py_XDECREF(iterator); - Py_XDECREF(item); - Py_XDECREF(fast); - return NULL; -} - -static char x509_object_get_basic_constraints__doc__[] = - "Return BasicConstraints for this certificate.\n" - "\n" - "If this certificate has no BasicConstraints extension, this method\n" - "returns None.\n" - "\n" - "Otherwise, this method returns a two-element tuple. The first element\n" - "of the tuple is a boolean representing the extension's cA value; the\n" - "second element of the tuple is either an integer representing the\n" - "pathLenConstraint value or None if there is no pathLenConstraint.\n" - ; - -static PyObject * -x509_object_get_basic_constraints(x509_object *self) -{ - return extension_get_basic_constraints(x509_object_extension_helper(self)); -} - -static char x509_object_set_basic_constraints__doc__[] = - "Set BasicConstraints for this certificate.\n" - "\n" - "First argument \"ca\" is a boolean indicating whether the certificate\n" - "is a CA certificate or not.\n" - "\n" - "Optional second argument \"pathLenConstraint\" is a non-negative integer\n" - "specifying the pathLenConstraint value for this certificate; this value\n" - "may only be set for CA certificates." - "\n" - "Optional third argument \"critical\" specifies whether the extension\n" - "should be marked as critical. RFC 5280 4.2.1.9 requires that CA\n" - "certificates mark this extension as critical, so the default is True.\n" - ; - -static PyObject * -x509_object_set_basic_constraints(x509_object *self, PyObject *args) -{ - return extension_set_basic_constraints(x509_object_extension_helper(self), args); -} - -static char x509_object_get_sia__doc__[] = - "Get SIA values for this certificate.\n" - "\n" - "If the certificate has no SIA extension, this method returns None.\n" - "\n" - "Otherwise, it returns a tuple containing three values:\n" - "caRepository URIs, rpkiManifest URIs, and signedObject URIs.\n" - "Each of these values is a tuple of strings, representing an ordered\n" - "sequence of URIs. Any or all of these sequences may be empty.\n" - "\n" - "Any other accessMethods are ignored, as are any non-URI\n" - "accessLocations.\n" - ; - -static PyObject * -x509_object_get_sia(x509_object *self) -{ - return extension_get_sia(x509_object_extension_helper(self)); -} - -static char x509_object_set_sia__doc__[] = - "Set SIA values for this certificate. Takes three arguments:\n" - "\"caRepository\", \"rpkiManifest\", and \"signedObject\".\n" - "Each of these should be an iterable which returns URIs.\n" - "\n" - "None is acceptable as an alternate way of specifying an empty\n" - "collection of URIs for a particular argument.\n" - ; - -static PyObject * -x509_object_set_sia(x509_object *self, PyObject *args, PyObject *kwds) -{ - return extension_set_sia(x509_object_extension_helper(self), args, kwds); -} - -static char x509_object_get_aia__doc__[] = - "Get this certificate's AIA values.\n" - "\n" - "If the certificate has no AIA extension, this method returns None.\n" - "\n" - "Otherwise, this returns a sequence of caIssuers URIs.\n" - "\n" - "Any other accessMethods are ignored, as are any non-URI\n" - "accessLocations.\n" - ; - -static PyObject * -x509_object_get_aia(x509_object *self) -{ - AUTHORITY_INFO_ACCESS *ext = NULL; - PyObject *result = NULL; - const char *uri; - PyObject *obj; - int i, n = 0; - - ENTERING(x509_object_get_aia); - - if ((ext = X509_get_ext_d2i(self->x509, NID_info_access, NULL, NULL)) == NULL) - Py_RETURN_NONE; - - for (i = 0; i < sk_ACCESS_DESCRIPTION_num(ext); i++) { - ACCESS_DESCRIPTION *a = sk_ACCESS_DESCRIPTION_value(ext, i); - if (a->location->type == GEN_URI && - OBJ_obj2nid(a->method) == NID_ad_ca_issuers) - n++; - } - - if (((result = PyTuple_New(n)) == NULL)) - goto error; - - n = 0; - - for (i = 0; i < sk_ACCESS_DESCRIPTION_num(ext); i++) { - ACCESS_DESCRIPTION *a = sk_ACCESS_DESCRIPTION_value(ext, i); - if (a->location->type == GEN_URI && OBJ_obj2nid(a->method) == NID_ad_ca_issuers) { - uri = (char *) ASN1_STRING_data(a->location->d.uniformResourceIdentifier); - if ((obj = PyString_FromString(uri)) == NULL) - goto error; - PyTuple_SET_ITEM(result, n++, obj); - } - } - - AUTHORITY_INFO_ACCESS_free(ext); - return result; - - error: - AUTHORITY_INFO_ACCESS_free(ext); - Py_XDECREF(result); - return NULL; -} - -static char x509_object_set_aia__doc__[] = - "Set AIA URIs for this certificate.\n" - "\n" - "Argument is a iterable which returns caIssuers URIs.\n" - ; - -static PyObject * -x509_object_set_aia(x509_object *self, PyObject *args) -{ - AUTHORITY_INFO_ACCESS *ext = NULL; - PyObject *caIssuers = NULL; - PyObject *iterator = NULL; - ASN1_OBJECT *oid = NULL; - PyObject *item = NULL; - ACCESS_DESCRIPTION *a = NULL; - int ok = 0; - Py_ssize_t urilen; - char *uri; - - ENTERING(x509_object_set_aia); - - if (!PyArg_ParseTuple(args, "O", &caIssuers)) - goto error; - - if ((ext = AUTHORITY_INFO_ACCESS_new()) == NULL) - lose_no_memory(); - - if ((oid = OBJ_nid2obj(NID_ad_ca_issuers)) == NULL) - lose_openssl_error("Couldn't find AIA accessMethod OID"); - - if ((iterator = PyObject_GetIter(caIssuers)) == NULL) - goto error; - - while ((item = PyIter_Next(iterator)) != NULL) { - - if (PyString_AsStringAndSize(item, &uri, &urilen) < 0) - goto error; - - if ((a = ACCESS_DESCRIPTION_new()) == NULL || - (a->method = OBJ_dup(oid)) == NULL || - (a->location->d.uniformResourceIdentifier = ASN1_IA5STRING_new()) == NULL || - !ASN1_OCTET_STRING_set(a->location->d.uniformResourceIdentifier, (unsigned char *) uri, urilen)) - lose_no_memory(); - - a->location->type = GEN_URI; - - if (!sk_ACCESS_DESCRIPTION_push(ext, a)) - lose_no_memory(); - - a = NULL; - Py_XDECREF(item); - item = NULL; - } - - Py_XDECREF(iterator); - iterator = NULL; - - if (!X509_add1_ext_i2d(self->x509, NID_info_access, ext, 0, X509V3_ADD_REPLACE)) - lose_openssl_error("Couldn't add AIA extension to certificate"); - - ok = 1; - - error: - AUTHORITY_INFO_ACCESS_free(ext); - ACCESS_DESCRIPTION_free(a); - Py_XDECREF(item); - Py_XDECREF(iterator); - - if (ok) - Py_RETURN_NONE; - else - return NULL; -} - -static char x509_object_get_crldp__doc__[] = - "Get CRL Distribution Point (CRLDP) values for this certificate.\n" - "\n" - "If the certificate has no CRLDP extension, this method returns None.\n" - "\n" - "Otherwise, it returns a sequence of URIs representing distributionPoint\n" - "fullName values found in the first Distribution Point. Other CRLDP\n" - "fields are ignored, as are subsequent Distribution Points and any non-URI\n" - "fullName values.\n" - ; - -static PyObject * -x509_object_get_crldp(x509_object *self) -{ - CRL_DIST_POINTS *ext = NULL; - DIST_POINT *dp = NULL; - PyObject *result = NULL; - const char *uri; - PyObject *obj; - int i, n = 0; - - ENTERING(x509_object_get_crldp); - - if ((ext = X509_get_ext_d2i(self->x509, NID_crl_distribution_points, NULL, NULL)) == NULL || - (dp = sk_DIST_POINT_value(ext, 0)) == NULL || - dp->distpoint == NULL || - dp->distpoint->type != 0) - Py_RETURN_NONE; - - for (i = 0; i < sk_GENERAL_NAME_num(dp->distpoint->name.fullname); i++) { - GENERAL_NAME *gn = sk_GENERAL_NAME_value(dp->distpoint->name.fullname, i); - if (gn->type == GEN_URI) - n++; - } - - if (((result = PyTuple_New(n)) == NULL)) - goto error; - - n = 0; - - for (i = 0; i < sk_GENERAL_NAME_num(dp->distpoint->name.fullname); i++) { - GENERAL_NAME *gn = sk_GENERAL_NAME_value(dp->distpoint->name.fullname, i); - if (gn->type == GEN_URI) { - uri = (char *) ASN1_STRING_data(gn->d.uniformResourceIdentifier); - if ((obj = PyString_FromString(uri)) == NULL) - goto error; - PyTuple_SET_ITEM(result, n++, obj); - } - } - - sk_DIST_POINT_pop_free(ext, DIST_POINT_free); - return result; - - error: - sk_DIST_POINT_pop_free(ext, DIST_POINT_free); - Py_XDECREF(result); - return NULL; -} - -static char x509_object_set_crldp__doc__[] = - "Set CRLDP values for this certificate.\n" - "\n" - "Argument is a iterable which returns distributionPoint fullName URIs.\n" - ; - -static PyObject * -x509_object_set_crldp(x509_object *self, PyObject *args) -{ - CRL_DIST_POINTS *ext = NULL; - PyObject *fullNames = NULL; - PyObject *iterator = NULL; - PyObject *item = NULL; - DIST_POINT *dp = NULL; - GENERAL_NAME *gn = NULL; - Py_ssize_t urilen; - char *uri; - int ok = 0; - - ENTERING(x509_object_set_crldp); - - if (!PyArg_ParseTuple(args, "O", &fullNames)) - goto error; - - if ((ext = sk_DIST_POINT_new_null()) == NULL || - (dp = DIST_POINT_new()) == NULL || - (dp->distpoint = DIST_POINT_NAME_new()) == NULL || - (dp->distpoint->name.fullname = sk_GENERAL_NAME_new_null()) == NULL) - lose_no_memory(); - - dp->distpoint->type = 0; - - if ((iterator = PyObject_GetIter(fullNames)) == NULL) - goto error; - - while ((item = PyIter_Next(iterator)) != NULL) { - - if (PyString_AsStringAndSize(item, &uri, &urilen) < 0) - goto error; - - if ((gn = GENERAL_NAME_new()) == NULL || - (gn->d.uniformResourceIdentifier = ASN1_IA5STRING_new()) == NULL || - !ASN1_OCTET_STRING_set(gn->d.uniformResourceIdentifier, (unsigned char *) uri, urilen)) - lose_no_memory(); - - gn->type = GEN_URI; - - if (!sk_GENERAL_NAME_push(dp->distpoint->name.fullname, gn)) - lose_no_memory(); - - gn = NULL; - Py_XDECREF(item); - item = NULL; - } - - Py_XDECREF(iterator); - iterator = NULL; - - if (!sk_DIST_POINT_push(ext, dp)) - lose_no_memory(); - - dp = NULL; - - if (!X509_add1_ext_i2d(self->x509, NID_crl_distribution_points, ext, 0, X509V3_ADD_REPLACE)) - lose_openssl_error("Couldn't add CRLDP extension to certificate"); - - ok = 1; - - error: - sk_DIST_POINT_pop_free(ext, DIST_POINT_free); - DIST_POINT_free(dp); - GENERAL_NAME_free(gn); - Py_XDECREF(item); - Py_XDECREF(iterator); - - if (ok) - Py_RETURN_NONE; - else - return NULL; -} - -static char x509_object_get_certificate_policies__doc__[] = - "Get Certificate Policies values for this certificate.\n" - "\n" - "If this certificate has no Certificate Policies extension, this method\n" - "returns None.\n" - "\n" - "Otherwise, this method returns a sequence of Object Identifiers.\n" - "\n" - "Policy qualifiers, if any, are ignored.\n" - ; - -static PyObject * -x509_object_get_certificate_policies(x509_object *self) -{ - CERTIFICATEPOLICIES *ext = NULL; - PyObject *result = NULL; - PyObject *obj; - int i; - - ENTERING(x509_object_get_certificate_policies); - - if ((ext = X509_get_ext_d2i(self->x509, NID_certificate_policies, NULL, NULL)) == NULL) - Py_RETURN_NONE; - - if (((result = PyTuple_New(sk_POLICYINFO_num(ext))) == NULL)) - goto error; - - for (i = 0; i < sk_POLICYINFO_num(ext); i++) { - POLICYINFO *p = sk_POLICYINFO_value(ext, i); - - if ((obj = ASN1_OBJECT_to_PyString(p->policyid)) == NULL) - goto error; - - PyTuple_SET_ITEM(result, i, obj); - } - - sk_POLICYINFO_pop_free(ext, POLICYINFO_free); - return result; - - error: - sk_POLICYINFO_pop_free(ext, POLICYINFO_free); - Py_XDECREF(result); - return NULL; -} - -static char x509_object_set_certificate_policies__doc__[] = - "Set Certificate Policies for this certificate.\n" - "\n" - "Argument is a iterable which returns policy OIDs.\n" - "\n" - "Policy qualifier are not supported.\n" - "\n" - "The extension will be marked as critical, since there's not much point\n" - "in using this extension without making it critical.\n" - ; - -static PyObject * -x509_object_set_certificate_policies(x509_object *self, PyObject *args) -{ - CERTIFICATEPOLICIES *ext = NULL; - PyObject *policies = NULL; - PyObject *iterator = NULL; - POLICYINFO *pol = NULL; - PyObject *item = NULL; - const char *oid; - int ok = 0; - - ENTERING(x509_object_set_certificate_policies); - - if (!PyArg_ParseTuple(args, "O", &policies)) - goto error; - - if ((ext = sk_POLICYINFO_new_null()) == NULL) - lose_no_memory(); - - if ((iterator = PyObject_GetIter(policies)) == NULL) - goto error; - - while ((item = PyIter_Next(iterator)) != NULL) { - - if ((oid = PyString_AsString(item)) == NULL) - goto error; - - if ((pol = POLICYINFO_new()) == NULL) - lose_no_memory(); - - if ((pol->policyid = OBJ_txt2obj(oid, 1)) == NULL) - lose("Couldn't parse OID"); - - if (!sk_POLICYINFO_push(ext, pol)) - lose_no_memory(); - - pol = NULL; - Py_XDECREF(item); - item = NULL; - } - - Py_XDECREF(iterator); - iterator = NULL; - - if (!X509_add1_ext_i2d(self->x509, NID_certificate_policies, ext, 1, X509V3_ADD_REPLACE)) - lose_openssl_error("Couldn't add CERTIFICATE_POLICIES extension to certificate"); - - ok = 1; - - error: - POLICYINFO_free(pol); - sk_POLICYINFO_pop_free(ext, POLICYINFO_free); - Py_XDECREF(item); - Py_XDECREF(iterator); - - if (ok) - Py_RETURN_NONE; - else - return NULL; -} - -/* - * May want EKU handlers eventually, skip for now. - */ - -static char x509_object_pprint__doc__[] = - "Return a pretty-printed rendition of this certificate.\n" - ; - -static PyObject * -x509_object_pprint(x509_object *self) -{ - PyObject *result = NULL; - BIO *bio = NULL; - - ENTERING(x509_object_pprint); - - if ((bio = BIO_new(BIO_s_mem())) == NULL) - lose_no_memory(); - - if (!X509_print(bio, self->x509)) - lose_openssl_error("Unable to pretty-print certificate"); - - result = BIO_to_PyString_helper(bio); - - error: /* Fall through */ - BIO_free(bio); - return result; -} - -static struct PyMethodDef x509_object_methods[] = { - Define_Method(pemWrite, x509_object_pem_write, METH_NOARGS), - Define_Method(derWrite, x509_object_der_write, METH_NOARGS), - Define_Method(sign, x509_object_sign, METH_VARARGS), - Define_Method(getPublicKey, x509_object_get_public_key, METH_NOARGS), - Define_Method(setPublicKey, x509_object_set_public_key, METH_VARARGS), - Define_Method(getVersion, x509_object_get_version, METH_NOARGS), - Define_Method(setVersion, x509_object_set_version, METH_VARARGS), - Define_Method(getSerial, x509_object_get_serial, METH_NOARGS), - Define_Method(setSerial, x509_object_set_serial, METH_VARARGS), - Define_Method(getIssuer, x509_object_get_issuer, METH_VARARGS), - Define_Method(setIssuer, x509_object_set_issuer, METH_VARARGS), - Define_Method(getSubject, x509_object_get_subject, METH_VARARGS), - Define_Method(setSubject, x509_object_set_subject, METH_VARARGS), - Define_Method(getNotBefore, x509_object_get_not_before, METH_NOARGS), - Define_Method(getNotAfter, x509_object_get_not_after, METH_NOARGS), - Define_Method(setNotAfter, x509_object_set_not_after, METH_VARARGS), - Define_Method(setNotBefore, x509_object_set_not_before, METH_VARARGS), - Define_Method(clearExtensions, x509_object_clear_extensions, METH_NOARGS), - Define_Method(pprint, x509_object_pprint, METH_NOARGS), - Define_Method(getSKI, x509_object_get_ski, METH_NOARGS), - Define_Method(setSKI, x509_object_set_ski, METH_VARARGS), - Define_Method(getAKI, x509_object_get_aki, METH_NOARGS), - Define_Method(setAKI, x509_object_set_aki, METH_VARARGS), - Define_Method(getKeyUsage, x509_object_get_key_usage, METH_NOARGS), - Define_Method(setKeyUsage, x509_object_set_key_usage, METH_VARARGS), - Define_Method(getEKU, x509_object_get_eku, METH_NOARGS), - Define_Method(setEKU, x509_object_set_eku, METH_VARARGS), - Define_Method(getRFC3779, x509_object_get_rfc3779, METH_NOARGS), - Define_Method(setRFC3779, x509_object_set_rfc3779, METH_KEYWORDS), - Define_Method(getBasicConstraints, x509_object_get_basic_constraints, METH_NOARGS), - Define_Method(setBasicConstraints, x509_object_set_basic_constraints, METH_VARARGS), - Define_Method(getSIA, x509_object_get_sia, METH_NOARGS), - Define_Method(setSIA, x509_object_set_sia, METH_KEYWORDS), - Define_Method(getAIA, x509_object_get_aia, METH_NOARGS), - Define_Method(setAIA, x509_object_set_aia, METH_VARARGS), - Define_Method(getCRLDP, x509_object_get_crldp, METH_NOARGS), - Define_Method(setCRLDP, x509_object_set_crldp, METH_VARARGS), - Define_Method(getCertificatePolicies, x509_object_get_certificate_policies, METH_NOARGS), - Define_Method(setCertificatePolicies, x509_object_set_certificate_policies, METH_VARARGS), - Define_Class_Method(pemRead, x509_object_pem_read, METH_VARARGS), - Define_Class_Method(pemReadFile, x509_object_pem_read_file, METH_VARARGS), - Define_Class_Method(derRead, x509_object_der_read, METH_VARARGS), - Define_Class_Method(derReadFile, x509_object_der_read_file, METH_VARARGS), - {NULL} -}; - -static char POW_X509_Type__doc__[] = - "This class represents an X.509 certificate.\n" - "\n" - LAME_DISCLAIMER_IN_ALL_CLASS_DOCUMENTATION - ; - -static PyTypeObject POW_X509_Type = { - PyObject_HEAD_INIT(0) - 0, /* ob_size */ - "rpki.POW.X509", /* tp_name */ - sizeof(x509_object), /* tp_basicsize */ - 0, /* tp_itemsize */ - (destructor)x509_object_dealloc, /* tp_dealloc */ - 0, /* tp_print */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_compare */ - 0, /* tp_repr */ - 0, /* tp_as_number */ - 0, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - 0, /* tp_hash */ - 0, /* tp_call */ - 0, /* tp_str */ - 0, /* tp_getattro */ - 0, /* tp_setattro */ - 0, /* tp_as_buffer */ - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /* tp_flags */ - POW_X509_Type__doc__, /* tp_doc */ - 0, /* tp_traverse */ - 0, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - 0, /* tp_iter */ - 0, /* tp_iternext */ - x509_object_methods, /* tp_methods */ - 0, /* tp_members */ - 0, /* tp_getset */ - 0, /* tp_base */ - 0, /* tp_dict */ - 0, /* tp_descr_get */ - 0, /* tp_descr_set */ - 0, /* tp_dictoffset */ - 0, /* tp_init */ - 0, /* tp_alloc */ - x509_object_new, /* tp_new */ -}; - - - -/* - * X509Store object. - */ - -static PyObject * -x509_store_object_new(PyTypeObject *type, GCC_UNUSED PyObject *args, GCC_UNUSED PyObject *kwds) -{ - x509_store_object *self = NULL; - - ENTERING(x509_store_object_new); - - if ((self = (x509_store_object *) type->tp_alloc(type, 0)) == NULL) - goto error; - - if ((self->store = X509_STORE_new()) == NULL) - lose_no_memory(); - - self->ctxclass = (PyObject *) &POW_X509StoreCTX_Type; - Py_XINCREF(self->ctxclass); - return (PyObject *) self; - - error: - Py_XDECREF(self); - return NULL; -} - -static void -x509_store_object_dealloc(x509_store_object *self) -{ - ENTERING(x509_store_object_dealloc); - X509_STORE_free(self->store); - Py_XDECREF(self->ctxclass); - self->ob_type->tp_free((PyObject*) self); -} - -static char x509_store_object_add_trust__doc__[] = - "Add a trusted certificate to this certificate store object.\n" - "\n" - "The \"certificate\" parameter should be an instance of the X509 class.\n" - ; - -static PyObject * -x509_store_object_add_trust(x509_store_object *self, PyObject *args) -{ - x509_object *x509 = NULL; - - ENTERING(x509_store_object_add_trust); - - if (!PyArg_ParseTuple(args, "O!", &POW_X509_Type, &x509)) - goto error; - - X509_STORE_add_cert(self->store, x509->x509); - - Py_RETURN_NONE; - - error: - - return NULL; -} - -static char x509_store_object_add_crl__doc__[] = - "Add a CRL to this certificate store object.\n" - "\n" - "The \"crl\" parameter should be an instance of the CRL class.\n" - ; - -static PyObject * -x509_store_object_add_crl(x509_store_object *self, PyObject *args) -{ - crl_object *crl = NULL; - - ENTERING(x509_store_object_add_crl); - - if (!PyArg_ParseTuple(args, "O!", &POW_CRL_Type, &crl)) - goto error; - - X509_STORE_add_crl(self->store, crl->crl); - - Py_RETURN_NONE; - - error: - - return NULL; -} - -static char x509_store_object_set_flags__doc__[] = - "Set validation flags for this X509Store.\n" - "\n" - "Argument is an integer containing bit flags to set.\n" - ; - -static PyObject * -x509_store_object_set_flags (x509_store_object *self, PyObject *args) -{ - unsigned long flags; - - if (!PyArg_ParseTuple(args, "k", &flags)) - goto error; - - if (!X509_VERIFY_PARAM_set_flags(self->store->param, flags)) - lose_openssl_error("X509_VERIFY_PARAM_set_flags() failed"); - - Py_RETURN_NONE; - - error: - return NULL; -} - -static char x509_store_object_clear_flags__doc__[] = - "Clear validation flags for this X509Store.\n" - "\n" - "Argument is an integer containing bit flags to clear.\n" - ; - -static PyObject * -x509_store_object_clear_flags (x509_store_object *self, PyObject *args) -{ - unsigned long flags; - - if (!PyArg_ParseTuple(args, "k", &flags)) - goto error; - - if (!X509_VERIFY_PARAM_clear_flags(self->store->param, flags)) - lose_openssl_error("X509_VERIFY_PARAM_clear_flags() failed"); - - Py_RETURN_NONE; - - error: - return NULL; -} - -static char x509_store_object_set_context_class__doc__[] = - "Set validation context class factory for this X509Store.\n" - "\n" - "This must be a callable object which takes one argument, an X509Store,\n" - "and returns a subclass of X509StoreCTX. The callable can be a class\n" - "object but need not be, so long as calling it returns an instance of an\n" - "appropriate class. The default is X509StoreCTX.\n" - ; - -static PyObject * -x509_store_object_set_context_class (x509_store_object *self, PyObject *args) -{ - PyObject *ctxclass = (PyObject *) &POW_X509StoreCTX_Type; - - if (!PyArg_ParseTuple(args, "|O", &ctxclass)) - goto error; - - if (!PyCallable_Check(ctxclass)) - lose("Context class must be callable"); - - Py_XDECREF(self->ctxclass); - self->ctxclass = ctxclass; - Py_XINCREF(self->ctxclass); - - Py_RETURN_NONE; - - error: - return NULL; -} - -static char x509_store_object_verify__doc__[] = - "Verify an X509 certificate object using this certificate store.\n" - "\n" - "Optional second argument is an iterable that supplies untrusted certificates\n" - "to be considered when building a chain to the trust anchor.\n" - "\n" - "This method returns an instance of the store's verification context class.\n" - ; - -static PyObject * -x509_store_object_verify(x509_store_object *self, PyObject *args) -{ - x509_store_ctx_object *ctx = NULL; - STACK_OF(X509) *stack = NULL; - x509_object *x509 = NULL; - PyObject *chain = Py_None; - int ok; - - if (!PyArg_ParseTuple(args, "O!|O", &POW_X509_Type, &x509, &chain)) - goto error; - - if ((ctx = (x509_store_ctx_object *) PyObject_CallFunctionObjArgs(self->ctxclass, self, NULL)) == NULL) - goto error; - - if (!POW_X509StoreCTX_Check(ctx)) - lose_type_error("Returned context is not a X509StoreCTX"); - - if (ctx->ctx == NULL) - lose("Uninitialized X509StoreCTX"); - - if (chain != Py_None && (stack = x509_helper_iterable_to_stack(chain)) == NULL) - goto error; - - Py_XINCREF(x509); - Py_XINCREF(chain); - X509_STORE_CTX_set_cert(ctx->ctx, x509->x509); - X509_STORE_CTX_set_chain(ctx->ctx, stack); - - ok = X509_verify_cert(ctx->ctx); - - X509_STORE_CTX_set_chain(ctx->ctx, NULL); - X509_STORE_CTX_set_cert(ctx->ctx, NULL); - Py_XDECREF(chain); - Py_XDECREF(x509); - - sk_X509_free(stack); - - if (PyErr_Occurred()) - goto error; - - if (ok < 0) - lose_openssl_error("X509_verify_cert() raised an exception"); - - return (PyObject *) ctx; - - error: /* fall through */ - Py_XDECREF(ctx); - return NULL; -} - -static struct PyMethodDef x509_store_object_methods[] = { - Define_Method(addTrust, x509_store_object_add_trust, METH_VARARGS), - Define_Method(addCrl, x509_store_object_add_crl, METH_VARARGS), - Define_Method(setContextClass,x509_store_object_set_context_class, METH_VARARGS), - Define_Method(setFlags, x509_store_object_set_flags, METH_VARARGS), - Define_Method(clearFlags, x509_store_object_clear_flags, METH_VARARGS), - Define_Method(verify, x509_store_object_verify, METH_VARARGS), - {NULL} -}; - -static char POW_X509Store_Type__doc__[] = - "This class holds the OpenSSL certificate store objects used in CMS\n" - "and certificate verification.\n" - "\n" - LAME_DISCLAIMER_IN_ALL_CLASS_DOCUMENTATION - ; - -static PyTypeObject POW_X509Store_Type = { - PyObject_HEAD_INIT(0) - 0, /* ob_size */ - "rpki.POW.X509Store", /* tp_name */ - sizeof(x509_store_object), /* tp_basicsize */ - 0, /* tp_itemsize */ - (destructor)x509_store_object_dealloc, /* tp_dealloc */ - 0, /* tp_print */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_compare */ - 0, /* tp_repr */ - 0, /* tp_as_number */ - 0, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - 0, /* tp_hash */ - 0, /* tp_call */ - 0, /* tp_str */ - 0, /* tp_getattro */ - 0, /* tp_setattro */ - 0, /* tp_as_buffer */ - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /* tp_flags */ - POW_X509Store_Type__doc__, /* tp_doc */ - 0, /* tp_traverse */ - 0, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - 0, /* tp_iter */ - 0, /* tp_iternext */ - x509_store_object_methods, /* tp_methods */ - 0, /* tp_members */ - 0, /* tp_getset */ - 0, /* tp_base */ - 0, /* tp_dict */ - 0, /* tp_descr_get */ - 0, /* tp_descr_set */ - 0, /* tp_dictoffset */ - 0, /* tp_init */ - 0, /* tp_alloc */ - x509_store_object_new, /* tp_new */ -}; - - - -/* - * X509StoreCTX object. - */ - -static int -x509_store_ctx_object_verify_cb(int ok, X509_STORE_CTX *ctx) -{ - static char method_name[] = "verify_callback"; - PyObject *self = X509_STORE_CTX_get_ex_data(ctx, x509_store_ctx_ex_data_idx); - PyObject *result = NULL; - - if (self == NULL || !PyObject_HasAttrString(self, method_name)) - return ok; - - if ((result = PyObject_CallMethod(self, method_name, "i", ok)) == NULL) - return -1; - - ok = PyObject_IsTrue(result); - Py_XDECREF(result); - return ok; -} - -static PyObject * -x509_store_ctx_object_new(PyTypeObject *type, GCC_UNUSED PyObject *args, GCC_UNUSED PyObject *kwds) -{ - x509_store_ctx_object *self = NULL; - - ENTERING(x509_store_ctx_object_new); - - if ((self = (x509_store_ctx_object *) type->tp_alloc(type, 0)) == NULL) - goto error; - - self->ctx = NULL; - self->store = NULL; - return (PyObject *) self; - - error: - Py_XDECREF(self); - return NULL; -} - -static int -x509_store_ctx_object_init(x509_store_ctx_object *self, PyObject *args, GCC_UNUSED PyObject *kwds) -{ - x509_store_object *store = NULL; - - if (!PyArg_ParseTuple(args, "|O!", &POW_X509Store_Type, &store)) - goto error; - - if ((self->ctx = X509_STORE_CTX_new()) == NULL) - lose_no_memory(); - - if (!X509_STORE_CTX_init(self->ctx, store ? store->store : NULL, NULL, NULL)) - lose_openssl_error("Couldn't initialize X509_STORE_CTX"); - - if (!X509_STORE_CTX_set_ex_data(self->ctx, x509_store_ctx_ex_data_idx, self)) - lose_openssl_error("Couldn't set X509_STORE_CTX ex_data"); - - Py_XDECREF(self->store); - self->store = store; - Py_XINCREF(self->store); - - X509_VERIFY_PARAM_set_flags(self->ctx->param, X509_V_FLAG_X509_STRICT); - X509_STORE_CTX_set_verify_cb(self->ctx, x509_store_ctx_object_verify_cb); - return 0; - - error: - return -1; -} - -static void -x509_store_ctx_object_dealloc(x509_store_ctx_object *self) -{ - ENTERING(x509_store_ctx_object_dealloc); - X509_STORE_CTX_free(self->ctx); - Py_XDECREF(self->store); - self->ob_type->tp_free((PyObject*) self); -} - -static PyObject * -x509_store_ctx_object_get_store (x509_store_ctx_object *self, GCC_UNUSED void *closure) -{ - return Py_BuildValue("O", self->store == NULL ? Py_None : (PyObject *) self->store); -} - -static char x509_store_ctx_object_get_error__doc__[] = - "Extract verification error code from this X509StoreCTX.\n" - ; - -static PyObject* -x509_store_ctx_object_get_error (x509_store_ctx_object *self) -{ - return Py_BuildValue("i", X509_STORE_CTX_get_error(self->ctx)); -} - -static char x509_store_ctx_object_get_error_string__doc__[] = - "Extract verification error code from this X509StoreCTX.\n" - ; - -static PyObject* -x509_store_ctx_object_get_error_string (x509_store_ctx_object *self) -{ - return Py_BuildValue("s", X509_verify_cert_error_string(X509_STORE_CTX_get_error(self->ctx))); -} - -static char x509_store_ctx_object_get_error_depth__doc__[] = - "Extract verification error depth from this X509StoreCTX.\n" - ; - -static PyObject* -x509_store_ctx_object_get_error_depth (x509_store_ctx_object *self) -{ - return Py_BuildValue("i", X509_STORE_CTX_get_error_depth(self->ctx)); -} - -static char x509_store_ctx_object_get_current_certificate__doc__[] = - "Extract the certificate which caused the current validation error,\n" - "or None if no certificate is relevant.\n" - ; - -static PyObject * -x509_store_ctx_object_get_current_certificate (x509_store_ctx_object *self) -{ - X509 *x = X509_STORE_CTX_get_current_cert(self->ctx); - x509_object *obj = NULL; - - if (x == NULL) - Py_RETURN_NONE; - - if ((x = X509_dup(x)) == NULL) - lose_no_memory(); - - if ((obj = x509_object_new_helper(NULL, x)) == NULL) - goto error; - - return (PyObject *) obj; - - error: - Py_XDECREF(obj); - X509_free(x); - return NULL; -} - -static char x509_store_ctx_object_get_chain__doc__[] = - "Extract certificate chain from X509StoreCTX. If validation\n" - "completed succesfully, this is the complete validation chain;\n" - "otherwise, the returned chain may be invalid or incomplete.\n" - ; - -static PyObject * -x509_store_ctx_object_get_chain (x509_store_ctx_object *self) -{ - STACK_OF(X509) *chain = NULL; - PyObject *result = NULL; - - if ((chain = X509_STORE_CTX_get1_chain(self->ctx)) == NULL) - lose_openssl_error("X509_STORE_CTX_get1_chain() failed"); - - result = stack_to_tuple_helper(CHECKED_PTR_OF(STACK_OF(X509), chain), - stack_to_tuple_helper_get_x509); - - error: /* fall through */ - sk_X509_pop_free(chain, X509_free); - return result; -} - -/* - * For some reason, there are no methods for the policy mechanism for - * X509_STORE, only for X509_STORE_CTX. Presumably we can whack these - * anyway using the X509_VERIFY_PARAM_*() calls, the question is - * whether there's a good reason for this omission. - * - * For the moment, I'm just going to leave the policy stuff - * unimplemented, until we figure out whether it belongs in X509Store - * or X509StoreCTX. - */ - -#define IMPLEMENT_X509StoreCTX_POLICY 0 - -#if IMPLEMENT_X509StoreCTX_POLICY - -static char x509_store_ctx_object_set_policy__doc__[] = - "Set this X509StoreCTX to require a specified certificate policy.\n" - ; - -static PyObject* -x509_store_ctx_object_set_policy (x509_store_ctx_object *self, PyObject *args) -{ - ASN1_OBJECT *policy = NULL; - char *oid = NULL; - - if (!PyArg_ParseTuple(args, "s", &oid)) - goto error; - - if ((policy = OBJ_txt2obj(oid, 1)) == NULL) - lose_openssl_error("Couldn't parse OID"); - - if (!X509_VERIFY_PARAM_set_flags(self->ctx->param, X509_V_FLAG_POLICY_CHECK | X509_V_FLAG_EXPLICIT_POLICY)) - lose_openssl_error("Couldn't set policy flags"); - - if (!X509_VERIFY_PARAM_add0_policy(self->ctx->param, policy)) - lose_openssl_error("Couldn't set policy"); - - Py_RETURN_NONE; - - error: - ASN1_OBJECT_free(policy); - return NULL; -} - -#endif /* IMPLEMENT_X509StoreCTX_POLICY */ - -/* - * See (omnibus) man page for X509_STORE_CTX_get_error() for other - * query methods we might want to expose. Someday we might want to - * support X509_V_FLAG_USE_CHECK_TIME too. - */ - -static struct PyMethodDef x509_store_ctx_object_methods[] = { - Define_Method(getError, x509_store_ctx_object_get_error, METH_NOARGS), - Define_Method(getErrorString, x509_store_ctx_object_get_error_string, METH_NOARGS), - Define_Method(getErrorDepth, x509_store_ctx_object_get_error_depth, METH_NOARGS), - Define_Method(getCurrentCertificate, x509_store_ctx_object_get_current_certificate, METH_NOARGS), - Define_Method(getChain, x509_store_ctx_object_get_chain, METH_NOARGS), - -#if IMPLEMENT_X509StoreCTX_POLICY - Define_Method(setPolicy, x509_store_ctx_object_set_policy, METH_VARARGS), -#endif - {NULL} -}; - -static PyGetSetDef x509_store_ctx_object_getsetters[] = { - {"store", (getter) x509_store_ctx_object_get_store}, - {NULL} -}; - -static char POW_X509StoreCTX_Type__doc__[] = - "This class holds the state of an OpenSSL certificate verification\n" - "operation. Ordinarily, the user will never have cause to instantiate\n" - "this class directly, instead, an object of this class will be returned\n" - "by X509Store.verify().\n" - "\n" - "If you need to see OpenSSL's verification callbacks, you can do so\n" - "by subclassing X509StoreCTX and attaching your subclass to an X509Store\n" - "object using X509Store.setContextClass(). Your subclass should provide\n" - "a .verify_callback() method, wich should expect to receive one argument:\n" - "the integer \"ok\" value passed by OpenSSL's verification callbacks.\n" - "\n" - "The return value from your .verify_callback() method will be is interpreted\n" - "as a boolean value: anything which evaluates to True will be result in a\n" - "return value of 1 to OpenSSL, while anything which evaluates to False will\n" - "result in a return value of 0 to OpenSSL.\n" - ; - -static PyTypeObject POW_X509StoreCTX_Type = { - PyObject_HEAD_INIT(0) - 0, /* ob_size */ - "rpki.POW.X509StoreCTX", /* tp_name */ - sizeof(x509_store_ctx_object), /* tp_basicsize */ - 0, /* tp_itemsize */ - (destructor)x509_store_ctx_object_dealloc,/* tp_dealloc */ - 0, /* tp_print */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_compare */ - 0, /* tp_repr */ - 0, /* tp_as_number */ - 0, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - 0, /* tp_hash */ - 0, /* tp_call */ - 0, /* tp_str */ - 0, /* tp_getattro */ - 0, /* tp_setattro */ - 0, /* tp_as_buffer */ - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /* tp_flags */ - POW_X509StoreCTX_Type__doc__, /* tp_doc */ - 0, /* tp_traverse */ - 0, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - 0, /* tp_iter */ - 0, /* tp_iternext */ - x509_store_ctx_object_methods, /* tp_methods */ - 0, /* tp_members */ - x509_store_ctx_object_getsetters, /* tp_getset */ - 0, /* tp_base */ - 0, /* tp_dict */ - 0, /* tp_descr_get */ - 0, /* tp_descr_set */ - 0, /* tp_dictoffset */ - (initproc) x509_store_ctx_object_init, /* tp_init */ - 0, /* tp_alloc */ - x509_store_ctx_object_new, /* tp_new */ -}; - - - -/* - * CRL object. - */ - -static crl_object * -crl_object_new_helper(PyTypeObject *type, X509_CRL *crl) -{ - crl_object *self = NULL; - - if (type == NULL) - type = &POW_CRL_Type; - - if ((self = (crl_object *) type->tp_alloc(type, 0)) == NULL) - return NULL; - - self->crl = crl; - return self; -} - -static PyObject * -crl_object_new(PyTypeObject *type, GCC_UNUSED PyObject *args, GCC_UNUSED PyObject *kwds) -{ - crl_object *self = NULL; - X509_CRL *crl = NULL; - - ENTERING(crl_object_new); - - if ((crl = X509_CRL_new()) == NULL) - lose_no_memory(); - - if ((self = crl_object_new_helper(type, crl)) == NULL) - goto error; - - return (PyObject *) self; - - error: - X509_CRL_free(crl); - return NULL; -} - -static void -crl_object_dealloc(crl_object *self) -{ - ENTERING(crl_object_dealloc); - X509_CRL_free(self->crl); - self->ob_type->tp_free((PyObject*) self); -} - -static PyObject * -crl_object_pem_read_helper(PyTypeObject *type, BIO *bio) -{ - crl_object *self; - - ENTERING(crl_object_pem_read_helper); - - if ((self = (crl_object *) crl_object_new(type, NULL, NULL)) == NULL) - goto error; - - if (!PEM_read_bio_X509_CRL(bio, &self->crl, NULL, NULL)) - lose_openssl_error("Couldn't PEM encoded load CRL"); - - return (PyObject *) self; - - error: - Py_XDECREF(self); - return NULL; -} - -static PyObject * -crl_object_der_read_helper(PyTypeObject *type, BIO *bio) -{ - crl_object *self; - - ENTERING(crl_object_der_read_helper); - - if ((self = (crl_object *) crl_object_new(type, NULL, NULL)) == NULL) - goto error; - - if (!d2i_X509_CRL_bio(bio, &self->crl)) - lose_openssl_error("Couldn't load DER encoded CRL"); - - return (PyObject *) self; - - error: - Py_XDECREF(self); - return NULL; -} - -static char crl_object_pem_read__doc__[] = - "Read a PEM-encoded CRL object from a string.\n" - ; - -static PyObject * -crl_object_pem_read(PyTypeObject *type, PyObject *args) -{ - ENTERING(crl_object_pem_read); - return read_from_string_helper(crl_object_pem_read_helper, type, args); -} - -static char crl_object_pem_read_file__doc__[] = - "Read a PEM-encoded CRL object from a file.\n" - ; - -static PyObject * -crl_object_pem_read_file(PyTypeObject *type, PyObject *args) -{ - ENTERING(crl_object_pem_read_file); - return read_from_file_helper(crl_object_pem_read_helper, type, args); -} - -static char crl_object_der_read__doc__[] = - "Read a DER-encoded CRL object from a string.\n" - ; - -static PyObject * -crl_object_der_read(PyTypeObject *type, PyObject *args) -{ - ENTERING(crl_object_der_read); - return read_from_string_helper(crl_object_der_read_helper, type, args); -} - -static char crl_object_der_read_file__doc__[] = - "Read a DER-encoded CRL object from a file.\n" - ; - -static PyObject * -crl_object_der_read_file(PyTypeObject *type, PyObject *args) -{ - ENTERING(crl_object_der_read_file); - return read_from_file_helper(crl_object_der_read_helper, type, args); -} - -static X509_EXTENSIONS ** -crl_object_extension_helper(crl_object *self) -{ - if (self && self->crl && self->crl->crl) - return &self->crl->crl->extensions; - PyErr_SetString(PyExc_ValueError, "Can't find X509_EXTENSIONS in CRL object"); - return NULL; -} - -static char crl_object_get_version__doc__[] = - "return the version number of this CRL.\n" - ; - -static PyObject * -crl_object_get_version(crl_object *self) -{ - ENTERING(crl_object_get_version); - return Py_BuildValue("l", X509_CRL_get_version(self->crl)); -} - -static char crl_object_set_version__doc__[] = - "Set the version number of this CRL.\n" - "\n" - "The \"version\" parameter should be a positive integer.\n" - ; - -static PyObject * -crl_object_set_version(crl_object *self, PyObject *args) -{ - long version = 0; - - ENTERING(crl_object_set_version); - - if (!PyArg_ParseTuple(args, "i", &version)) - goto error; - - if (!X509_CRL_set_version(self->crl, version)) - lose_no_memory(); - - Py_RETURN_NONE; - - error: - return NULL; -} - -static char crl_object_get_issuer__doc__[] = - "Return issuer name of this CRL.\n" - "\n" - "See the \"getIssuer()\" method of the X509 class for more details.\n" - ; - -static PyObject * -crl_object_get_issuer(crl_object *self, PyObject *args) -{ - PyObject *result = NULL; - int format = OIDNAME_FORMAT; - - ENTERING(crl_object_get_issuer); - - if (!PyArg_ParseTuple(args, "|i", &format)) - goto error; - - result = x509_object_helper_get_name(X509_CRL_get_issuer(self->crl), format); - - error: /* Fall through */ - return result; -} - -static char crl_object_set_issuer__doc__[] = - "Set this CRL's issuer name.\n" - "\n" - "See the \"setIssuer()\" method of the X509 class for details.\n" - ; - -static PyObject * -crl_object_set_issuer(crl_object *self, PyObject *args) -{ - PyObject *name_sequence = NULL; - X509_NAME *name = NULL; - - ENTERING(crl_object_set_issuer); - - if (!PyArg_ParseTuple(args, "O", &name_sequence)) - goto error; - - if (!PySequence_Check(name_sequence)) - lose_type_error("Inapropriate type"); - - if ((name = x509_object_helper_set_name(name_sequence)) == NULL) - goto error; - - if (!X509_CRL_set_issuer_name(self->crl, name)) - lose_openssl_error("Unable to set issuer name"); - - X509_NAME_free(name); - - Py_RETURN_NONE; - - error: - X509_NAME_free(name); - return NULL; -} - -/* - * NB: OpenSSL is confused about the name of this field, probably for - * backwards compatability with some ancient mistake. What RFC 5280 - * calls "thisUpdate", OpenSSL calls "lastUpdate". - */ - -static char crl_object_set_this_update__doc__[] = - "Set this CRL's \"thisUpdate\" value.\n" - "\n" - "The \"time\" parameter should be a datetime object.\n" - ; - -static PyObject * -crl_object_set_this_update (crl_object *self, PyObject *args) -{ - PyObject *o = NULL; - ASN1_TIME *t = NULL; - - ENTERING(crl_object_set_this_update); - - if (!PyArg_ParseTuple(args, "O", &o)) - goto error; - - if ((t = Python_to_ASN1_TIME(o, 1)) == NULL) - lose("Couldn't convert thisUpdate string"); - - if (!X509_CRL_set_lastUpdate(self->crl, t)) /* sic */ - lose("Couldn't set thisUpdate"); - - ASN1_TIME_free(t); - Py_RETURN_NONE; - - error: - ASN1_TIME_free(t); - return NULL; -} - -static char crl_object_get_this_update__doc__[] = - "Return this CRL's \"thisUpdate\" value as a datetime.\n" - ; - -static PyObject * -crl_object_get_this_update (crl_object *self) -{ - ENTERING(crl_object_get_this_update); - return ASN1_TIME_to_Python(X509_CRL_get_lastUpdate(self->crl)); /* sic */ -} - -static char crl_object_set_next_update__doc__[] = - "Set this CRL's \"nextUpdate\" value.\n" - "\n" - "The \"time\" parameter should be a datetime object.\n" - ; - -static PyObject * -crl_object_set_next_update (crl_object *self, PyObject *args) -{ - PyObject *o = NULL; - ASN1_TIME *t = NULL; - - ENTERING(crl_object_set_next_update); - - if (!PyArg_ParseTuple(args, "O", &o)) - goto error; - - if ((t = Python_to_ASN1_TIME(o, 1)) == NULL) - lose("Couldn't parse nextUpdate string"); - - if (!X509_CRL_set_nextUpdate(self->crl, t)) - lose("Couldn't set nextUpdate"); - - ASN1_TIME_free(t); - Py_RETURN_NONE; - - error: - ASN1_TIME_free(t); - return NULL; -} - -static char crl_object_get_next_update__doc__[] = - "Returns this CRL's \"nextUpdate\" value as a datetime.\n" - ; - -static PyObject * -crl_object_get_next_update (crl_object *self) -{ - ENTERING(crl_object_get_next_update); - return ASN1_TIME_to_Python(X509_CRL_get_nextUpdate(self->crl)); -} - -static char crl_object_add_revocations__doc__[] = - "This method adds a collection of revocations to this CRL.\n" - "\n" - "The \"iterable\" parameter should be an iterable object which returns\n" - "two-element sequences. The first element of each pair should be the\n" - "revoked serial number (an integer), the second element should be the\n" - "revocation date (a datetime object).\n" - ; - -static PyObject * -crl_object_add_revocations(crl_object *self, PyObject *args) -{ - PyObject *iterable = NULL; - PyObject *iterator = NULL; - PyObject *item = NULL; - PyObject *fast = NULL; - X509_REVOKED *revoked = NULL; - ASN1_INTEGER *serial = NULL; - ASN1_TIME *date = NULL; - int ok = 0; - - ENTERING(crl_object_add_revocations); - - if (!PyArg_ParseTuple(args, "O", &iterable) || - (iterator = PyObject_GetIter(iterable)) == NULL) - goto error; - - while ((item = PyIter_Next(iterator)) != NULL) { - - if ((fast = PySequence_Fast(item, "Revocation entry must be a sequence")) == NULL) - goto error; - - if (PySequence_Fast_GET_SIZE(fast) != 2) - lose_type_error("Revocation entry must be two-element sequence"); - - if ((serial = PyLong_to_ASN1_INTEGER(PySequence_Fast_GET_ITEM(fast, 0))) == NULL || - (date = Python_to_ASN1_TIME(PySequence_Fast_GET_ITEM(fast, 1), 1)) == NULL) - goto error; - - if ((revoked = X509_REVOKED_new()) == NULL || - !X509_REVOKED_set_serialNumber(revoked, serial) || - !X509_REVOKED_set_revocationDate(revoked, date)) - lose_no_memory(); - - ASN1_INTEGER_free(serial); - serial = NULL; - - ASN1_TIME_free(date); - date = NULL; - - if (!X509_CRL_add0_revoked(self->crl, revoked)) - lose_no_memory(); - - revoked = NULL; - Py_XDECREF(item); - Py_XDECREF(fast); - item = fast = NULL; - } - - if (!X509_CRL_sort(self->crl)) - lose_openssl_error("Couldn't sort CRL"); - - ok = 1; - - error: - Py_XDECREF(iterator); - Py_XDECREF(item); - Py_XDECREF(fast); - X509_REVOKED_free(revoked); - ASN1_INTEGER_free(serial); - ASN1_TIME_free(date); - - if (ok) - Py_RETURN_NONE; - else - return NULL; -} - -static char crl_object_get_revoked__doc__[] = - "Return a sequence of two-element tuples representing the sequence of\n" - "revoked certificates listed in this CRL.\n" - "\n" - "The first element of each pair is the serialNumber of the revoked\n" - "certificate, the second element is the revocationDate.\n" - ; - -static PyObject * -crl_object_get_revoked(crl_object *self) -{ - STACK_OF(X509_REVOKED) *revoked = NULL; - X509_REVOKED *r = NULL; - PyObject *result = NULL; - PyObject *item = NULL; - PyObject *serial = NULL; - PyObject *date = NULL; - int i; - - ENTERING(crl_object_get_revoked); - - if ((revoked = X509_CRL_get_REVOKED(self->crl)) == NULL) - lose("Inexplicable NULL revocation list pointer"); - - if ((result = PyTuple_New(sk_X509_REVOKED_num(revoked))) == NULL) - goto error; - - for (i = 0; i < sk_X509_REVOKED_num(revoked); i++) { - r = sk_X509_REVOKED_value(revoked, i); - - if ((serial = ASN1_INTEGER_to_PyLong(r->serialNumber)) == NULL || - (date = ASN1_TIME_to_Python(r->revocationDate)) == NULL || - (item = Py_BuildValue("(NN)", serial, date)) == NULL) - goto error; - - PyTuple_SET_ITEM(result, i, item); - item = serial = date = NULL; - } - - return result; - - error: - Py_XDECREF(result); - Py_XDECREF(item); - Py_XDECREF(serial); - Py_XDECREF(date); - return NULL; -} - -static char crl_object_clear_extensions__doc__[] = - "Clear all extensions attached to this CRL.\n" - ; - -static PyObject * -crl_object_clear_extensions(crl_object *self) -{ - X509_EXTENSION *ext; - - ENTERING(crl_object_clear_extensions); - - while ((ext = X509_CRL_delete_ext(self->crl, 0)) != NULL) - X509_EXTENSION_free(ext); - - Py_RETURN_NONE; -} - -static char crl_object_sign__doc__[] = - "Sign this CRL with a private key.\n" - "\n" - "The \"key\" parameter should be an instance of the Asymmetric class,\n" - "containing a private key.\n" - "\n" - "The optional \"digest\" parameter indicates which digest to compute and\n" - "sign, and should be one of the following:\n" - "\n" - "* MD5_DIGEST\n" - "* SHA_DIGEST\n" - "* SHA1_DIGEST\n" - "* SHA256_DIGEST\n" - "* SHA384_DIGEST\n" - "* SHA512_DIGEST\n" - "\n" - "The default digest algorithm is SHA-256.\n" - ; - -static PyObject * -crl_object_sign(crl_object *self, PyObject *args) -{ - asymmetric_object *asym; - int digest_type = SHA256_DIGEST; - const EVP_MD *digest_method = NULL; - - ENTERING(crl_object_sign); - - if (!PyArg_ParseTuple(args, "O!|i", &POW_Asymmetric_Type, &asym, &digest_type)) - goto error; - - if ((digest_method = evp_digest_factory(digest_type)) == NULL) - lose("Unsupported digest algorithm"); - - if (!X509_CRL_sign(self->crl, asym->pkey, digest_method)) - lose_openssl_error("Couldn't sign CRL"); - - Py_RETURN_NONE; - - error: - return NULL; -} - -static char crl_object_verify__doc__[] = - "Verify this CRL's signature.\n" - "\n" - "The check is performed using OpenSSL's X509_CRL_verify() function.\n" - "\n" - "The \"key\" parameter should be an instance of the Asymmetric class\n" - "containing the public key of the purported signer.\n" - ; - -static PyObject * -crl_object_verify(crl_object *self, PyObject *args) -{ - asymmetric_object *asym; - - ENTERING(crl_object_verify); - - if (!PyArg_ParseTuple(args, "O!", &POW_Asymmetric_Type, &asym)) - goto error; - - return PyBool_FromLong(X509_CRL_verify(self->crl, asym->pkey)); - - error: - return NULL; -} - -static char crl_object_pem_write__doc__[] = - "Return the PEM encoding of this CRL, as a string.\n" - ; - -static PyObject * -crl_object_pem_write(crl_object *self) -{ - PyObject *result = NULL; - BIO *bio = NULL; - - ENTERING(crl_object_pem_write); - - if ((bio = BIO_new(BIO_s_mem())) == NULL) - lose_no_memory(); - - if (!PEM_write_bio_X509_CRL(bio, self->crl)) - lose_openssl_error("Unable to write CRL"); - - result = BIO_to_PyString_helper(bio); - - error: /* Fall through */ - BIO_free(bio); - return result; -} - -static char crl_object_der_write__doc__[] = - "Return the DER encoding of this CRL, as a string.\n" - ; - -static PyObject * -crl_object_der_write(crl_object *self) -{ - PyObject *result = NULL; - BIO *bio = NULL; - - ENTERING(crl_object_der_write); - - if ((bio = BIO_new(BIO_s_mem())) == NULL) - lose_no_memory(); - - if (!i2d_X509_CRL_bio(bio, self->crl)) - lose_openssl_error("Unable to write CRL"); - - result = BIO_to_PyString_helper(bio); - - error: /* Fall through */ - BIO_free(bio); - return result; -} - -static char crl_object_get_aki__doc__[] = - "Return the Authority Key Identifier (AKI) keyid value for\n" - "this CRL, or None if the CRL has no AKI extension\n" - "or has an AKI extension with no keyIdentifier value.\n" - ; - -static PyObject * -crl_object_get_aki(crl_object *self) -{ - return extension_get_aki(crl_object_extension_helper(self)); -} - -static char crl_object_set_aki__doc__[] = - "Set the Authority Key Identifier (AKI) value for this\n" - "CRL. We only support the keyIdentifier method, as that's\n" - "the only form which is legal for RPKI certificates.\n" - ; - -static PyObject * -crl_object_set_aki(crl_object *self, PyObject *args) -{ - return extension_set_aki(crl_object_extension_helper(self), args); -} - -static char crl_object_get_crl_number__doc__[] = - "Return the CRL Number extension value from this CRL, an integer.\n" - ; - -static PyObject * -crl_object_get_crl_number(crl_object *self) -{ - ASN1_INTEGER *ext = X509_CRL_get_ext_d2i(self->crl, NID_crl_number, NULL, NULL); - PyObject *result = NULL; - - ENTERING(crl_object_get_crl_number); - - if (ext == NULL) - Py_RETURN_NONE; - - result = Py_BuildValue("N", ASN1_INTEGER_to_PyLong(ext)); - ASN1_INTEGER_free(ext); - return result; -} - -static char crl_object_set_crl_number__doc__[] = - "Set the CRL Number extension value in this CRL.\n" - "\n" - "The \"number\" parameter should be an integer.\n" - ; - -static PyObject * -crl_object_set_crl_number(crl_object *self, PyObject *args) -{ - ASN1_INTEGER *ext = NULL; - PyObject *crl_number = NULL; - - ENTERING(crl_object_set_crl_number); - - if (!PyArg_ParseTuple(args, "O", &crl_number) || - (ext = PyLong_to_ASN1_INTEGER(crl_number)) == NULL) - goto error; - - if (!X509_CRL_add1_ext_i2d(self->crl, NID_crl_number, ext, 0, X509V3_ADD_REPLACE)) - lose_openssl_error("Couldn't add CRL Number extension to CRL"); - - ASN1_INTEGER_free(ext); - Py_RETURN_NONE; - - error: - ASN1_INTEGER_free(ext); - return NULL; -} - -static char crl_object_pprint__doc__[] = - "Return a pretty-printed rendition of this CRL.\n" - ; - -static PyObject * -crl_object_pprint(crl_object *self) -{ - PyObject *result = NULL; - BIO *bio = NULL; - - ENTERING(crl_object_pprint); - - if ((bio = BIO_new(BIO_s_mem())) == NULL) - lose_no_memory(); - - if (!X509_CRL_print(bio, self->crl)) - lose_openssl_error("Unable to pretty-print CRL"); - - result = BIO_to_PyString_helper(bio); - - error: /* Fall through */ - BIO_free(bio); - return result; -} - -static struct PyMethodDef crl_object_methods[] = { - Define_Method(sign, crl_object_sign, METH_VARARGS), - Define_Method(verify, crl_object_verify, METH_VARARGS), - Define_Method(getVersion, crl_object_get_version, METH_NOARGS), - Define_Method(setVersion, crl_object_set_version, METH_VARARGS), - Define_Method(getIssuer, crl_object_get_issuer, METH_VARARGS), - Define_Method(setIssuer, crl_object_set_issuer, METH_VARARGS), - Define_Method(getThisUpdate, crl_object_get_this_update, METH_NOARGS), - Define_Method(setThisUpdate, crl_object_set_this_update, METH_VARARGS), - Define_Method(getNextUpdate, crl_object_get_next_update, METH_NOARGS), - Define_Method(setNextUpdate, crl_object_set_next_update, METH_VARARGS), - Define_Method(getRevoked, crl_object_get_revoked, METH_NOARGS), - Define_Method(addRevocations, crl_object_add_revocations, METH_VARARGS), - Define_Method(clearExtensions, crl_object_clear_extensions, METH_NOARGS), - Define_Method(pemWrite, crl_object_pem_write, METH_NOARGS), - Define_Method(derWrite, crl_object_der_write, METH_NOARGS), - Define_Method(pprint, crl_object_pprint, METH_NOARGS), - Define_Method(getAKI, crl_object_get_aki, METH_NOARGS), - Define_Method(setAKI, crl_object_set_aki, METH_VARARGS), - Define_Method(getCRLNumber, crl_object_get_crl_number, METH_NOARGS), - Define_Method(setCRLNumber, crl_object_set_crl_number, METH_VARARGS), - Define_Class_Method(pemRead, crl_object_pem_read, METH_VARARGS), - Define_Class_Method(pemReadFile, crl_object_pem_read_file, METH_VARARGS), - Define_Class_Method(derRead, crl_object_der_read, METH_VARARGS), - Define_Class_Method(derReadFile, crl_object_der_read_file, METH_VARARGS), - {NULL} -}; - -static char POW_CRL_Type__doc__[] = - "Container for OpenSSL's X509 CRL management facilities.\n" - ; - -static PyTypeObject POW_CRL_Type = { - PyObject_HEAD_INIT(0) - 0, /* ob_size */ - "rpki.POW.CRL", /* tp_name */ - sizeof(crl_object), /* tp_basicsize */ - 0, /* tp_itemsize */ - (destructor)crl_object_dealloc, /* tp_dealloc */ - 0, /* tp_print */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_compare */ - 0, /* tp_repr */ - 0, /* tp_as_number */ - 0, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - 0, /* tp_hash */ - 0, /* tp_call */ - 0, /* tp_str */ - 0, /* tp_getattro */ - 0, /* tp_setattro */ - 0, /* tp_as_buffer */ - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /* tp_flags */ - POW_CRL_Type__doc__, /* tp_doc */ - 0, /* tp_traverse */ - 0, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - 0, /* tp_iter */ - 0, /* tp_iternext */ - crl_object_methods, /* tp_methods */ - 0, /* tp_members */ - 0, /* tp_getset */ - 0, /* tp_base */ - 0, /* tp_dict */ - 0, /* tp_descr_get */ - 0, /* tp_descr_set */ - 0, /* tp_dictoffset */ - 0, /* tp_init */ - 0, /* tp_alloc */ - crl_object_new, /* tp_new */ -}; - - - -/* - * Asymmetric object. - */ - -static PyObject * -asymmetric_object_new(PyTypeObject *type, GCC_UNUSED PyObject *args, GCC_UNUSED PyObject *kwds) -{ - asymmetric_object *self = NULL; - - ENTERING(asymmetric_object_new); - - if ((self = (asymmetric_object *) type->tp_alloc(type, 0)) == NULL) - goto error; - - self->pkey = NULL; - - return (PyObject *) self; - - error: - - Py_XDECREF(self); - return NULL; -} - -static int -asymmetric_object_init(asymmetric_object *self, PyObject *args, PyObject *kwds) -{ - static char *kwlist[] = {NULL}; - - ENTERING(asymmetric_object_init); - - if (!PyArg_ParseTupleAndKeywords(args, kwds, "", kwlist)) - goto error; - - /* - * We used to take arguments to generate an RSA key, but that's - * now in the .generateRSA() class method. - */ - - return 0; - - error: - return -1; -} - -static void -asymmetric_object_dealloc(asymmetric_object *self) -{ - ENTERING(asymmetric_object_dealloc); - EVP_PKEY_free(self->pkey); - self->ob_type->tp_free((PyObject*) self); -} - -static PyObject * -asymmetric_object_pem_read_private_helper(PyTypeObject *type, BIO *bio, char *pass) -{ - asymmetric_object *self = NULL; - - ENTERING(asymmetric_object_pem_read_private_helper); - - if ((self = (asymmetric_object *) asymmetric_object_new(type, NULL, NULL)) == NULL) - goto error; - - if (!PEM_read_bio_PrivateKey(bio, &self->pkey, NULL, pass)) - lose_openssl_error("Couldn't load private key"); - - return (PyObject *) self; - - error: - Py_XDECREF(self); - return NULL; -} - -/* - * We can't use the generic read_from_*_helper() functions here - * because of optional the PEM password, so we just code the two PEM - * read cases for private keys directly. Other than the passphrase, - * code is pretty much the same as the generic functions. - * - * It turns out that OpenSSL is moving away from its old raw PKCS #1.5 - * private key format in favor of PKCS #8. This makes sense, but it - * leaves us with a minor mess to track. Many OpenSSL functions that - * originally expected PKCS #1.5 now also accept PKCS #8, so there's - * no tearing hurry about this, but at some point we might want to - * switch to writing PKCS #8. It looks like this would be relatively - * straightforward: see functions i2d_PKCS8PrivateKey_bio() and - * PEM_write_bio_PKCS8PrivateKey(), and note that PKCS #8 supports - * encrypted private keys in DER format, so the DER methods should - * take a passphrase argument as the PEM methods do. - */ - -static char asymmetric_object_pem_read_private__doc__[] = - "Read a PEM-encoded private key from a string.\n" - "\n" - "Optional second argument is a passphrase for the key.\n" - ; - -static PyObject * -asymmetric_object_pem_read_private(PyTypeObject *type, PyObject *args) -{ - PyObject *result = NULL; - char *pass = NULL; - char *src = NULL; - BIO *bio = NULL; - Py_ssize_t len = 0; - - ENTERING(asymmetric_object_pem_read_private); - - if (!PyArg_ParseTuple(args, "s#|s", &src, &len, &pass)) - goto error; - - if ((bio = BIO_new_mem_buf(src, len)) == NULL) - lose_no_memory(); - - result = asymmetric_object_pem_read_private_helper(type, bio, pass); - - error: - BIO_free(bio); - return result; -} - -static char asymmetric_object_pem_read_private_file__doc__[] = - "Read a PEM-encoded private key from a file.\n" - "\n" - "Optional second argument is a passphrase for the key.\n" - ; - -static PyObject * -asymmetric_object_pem_read_private_file(PyTypeObject *type, PyObject *args) -{ - const char *filename = NULL; - PyObject *result = NULL; - char *pass = NULL; - BIO *bio = NULL; - - ENTERING(asymmetric_object_pem_read_private_file); - - if (!PyArg_ParseTuple(args, "s|s", &filename, &pass)) - goto error; - - if ((bio = BIO_new_file(filename, "rb")) == NULL) - lose_openssl_error("Could not open file"); - - result = asymmetric_object_pem_read_private_helper(type, bio, pass); - - error: - BIO_free(bio); - return result; -} - -static PyObject * -asymmetric_object_der_read_private_helper(PyTypeObject *type, BIO *bio) -{ - asymmetric_object *self = NULL; - - ENTERING(asymmetric_object_der_read_private_helper); - - if ((self = (asymmetric_object *) asymmetric_object_new(type, NULL, NULL)) == NULL) - goto error; - - if (!d2i_PrivateKey_bio(bio, &self->pkey)) - lose_openssl_error("Couldn't load private key"); - - return (PyObject *) self; - - error: - - Py_XDECREF(self); - return NULL; -} - -static char asymmetric_object_der_read_private__doc__[] = - "Read a DER-encoded private key from a string.\n" - ; - -static PyObject * -asymmetric_object_der_read_private(PyTypeObject *type, PyObject *args) -{ - ENTERING(asymmetric_object_der_read_private); - return read_from_string_helper(asymmetric_object_der_read_private_helper, type, args); -} - -static char asymmetric_object_der_read_private_file__doc__[] = - "Read a DER-encoded private key from a file.\n" - ; - -static PyObject * -asymmetric_object_der_read_private_file(PyTypeObject *type, PyObject *args) -{ - ENTERING(asymmetric_object_der_read_private_file); - return read_from_file_helper(asymmetric_object_der_read_private_helper, type, args); -} - -static PyObject * -asymmetric_object_pem_read_public_helper(PyTypeObject *type, BIO *bio) -{ - asymmetric_object *self = NULL; - - ENTERING(asymmetric_object_pem_read_public_helper); - - if ((self = (asymmetric_object *) asymmetric_object_new(type, NULL, NULL)) == NULL) - goto error; - - if (!PEM_read_bio_PUBKEY(bio, &self->pkey, NULL, NULL)) - lose_openssl_error("Couldn't load public key"); - - return (PyObject *) self; - - error: - Py_XDECREF(self); - return NULL; -} - -static PyObject * -asymmetric_object_der_read_public_helper(PyTypeObject *type, BIO *bio) -{ - asymmetric_object *self = NULL; - - ENTERING(asymmetric_object_der_read_public_helper); - - if ((self = (asymmetric_object *) asymmetric_object_new(type, NULL, NULL)) == NULL) - goto error; - - if (!d2i_PUBKEY_bio(bio, &self->pkey)) - lose_openssl_error("Couldn't load public key"); - - return (PyObject *) self; - - error: - - Py_XDECREF(self); - return NULL; -} - -static char asymmetric_object_pem_read_public__doc__[] = - "Read a PEM-encoded public key from a string.\n" - ; - -static PyObject * -asymmetric_object_pem_read_public(PyTypeObject *type, PyObject *args) -{ - ENTERING(asymmetric_object_pem_read_public); - return read_from_string_helper(asymmetric_object_pem_read_public_helper, type, args); -} - -static char asymmetric_object_pem_read_public_file__doc__[] = - "Read a PEM-encoded public key from a file.\n" - ; - -static PyObject * -asymmetric_object_pem_read_public_file(PyTypeObject *type, PyObject *args) -{ - ENTERING(asymmetric_object_pem_read_public_file); - return read_from_file_helper(asymmetric_object_pem_read_public_helper, type, args); -} - -static char asymmetric_object_der_read_public__doc__[] = - "Read a DER-encoded public key from a string.\n" - ; - -static PyObject * -asymmetric_object_der_read_public(PyTypeObject *type, PyObject *args) -{ - ENTERING(asymmetric_object_der_read_public); - return read_from_string_helper(asymmetric_object_der_read_public_helper, type, args); -} - -static char asymmetric_object_der_read_public_file__doc__[] = - "Read a DER-encoded public key from a file.\n" - ; - -static PyObject * -asymmetric_object_der_read_public_file(PyTypeObject *type, PyObject *args) -{ - ENTERING(asymmetric_object_der_read_public_file); - return read_from_file_helper(asymmetric_object_der_read_public_helper, type, args); -} - -static char asymmetric_object_pem_write_private__doc__[] = - "Return the PEM encoding of an \"Asymmetric\" private key.\n" - "\n" - "This method takes an optional parameter \"passphrase\" which, if\n" - "specified, will be used to encrypt the private key with AES-256-CBC.\n" - "\n" - "If you don't specify a passphrase, the key will not be encrypted.\n" - ; - -static PyObject * -asymmetric_object_pem_write_private(asymmetric_object *self, PyObject *args) -{ - PyObject *result = NULL; - char *passphrase = NULL; - const EVP_CIPHER *evp_method = NULL; - BIO *bio = NULL; - - ENTERING(asymmetric_object_pem_write_private); - - if (!PyArg_ParseTuple(args, "|s", &passphrase)) - goto error; - - if ((bio = BIO_new(BIO_s_mem())) == NULL) - lose_no_memory(); - - if (passphrase) - evp_method = EVP_aes_256_cbc(); - - if (!PEM_write_bio_PrivateKey(bio, self->pkey, evp_method, NULL, 0, NULL, passphrase)) - lose_openssl_error("Unable to write key"); - - result = BIO_to_PyString_helper(bio); - - error: /* Fall through */ - BIO_free(bio); - return result; -} - -static char asymmetric_object_pem_write_public__doc__[] = - "Return the PEM encoding of an \"Asymmetric\" public key.\n" - ; - -static PyObject * -asymmetric_object_pem_write_public(asymmetric_object *self) -{ - PyObject *result = NULL; - BIO *bio = NULL; - - ENTERING(asymmetric_object_pem_write_public); - - if ((bio = BIO_new(BIO_s_mem())) == NULL) - lose_no_memory(); - - if (!PEM_write_bio_PUBKEY(bio, self->pkey)) - lose_openssl_error("Unable to write key"); - - result = BIO_to_PyString_helper(bio); - - error: /* Fall through */ - BIO_free(bio); - return result; -} - -static char asymmetric_object_der_write_private__doc__[] = - "Return the DER encoding of an \"Asymmetric\" private key.\n" - ; - -static PyObject * -asymmetric_object_der_write_private(asymmetric_object *self) -{ - PyObject *result = NULL; - BIO *bio = NULL; - - ENTERING(asymmetric_object_der_write_private); - - if ((bio = BIO_new(BIO_s_mem())) == NULL) - lose_no_memory(); - - if (!i2d_PrivateKey_bio(bio, self->pkey)) - lose_openssl_error("Unable to write private key"); - - result = BIO_to_PyString_helper(bio); - - error: /* Fall through */ - BIO_free(bio); - return result; -} - -static char asymmetric_object_der_write_public__doc__[] = - "Return the DER encoding of an \"Asymmetric\" public key.\n" - ; - -static PyObject * -asymmetric_object_der_write_public(asymmetric_object *self) -{ - PyObject *result = NULL; - BIO *bio = NULL; - - ENTERING(asymmetric_object_der_write_public); - - if ((bio = BIO_new(BIO_s_mem())) == NULL) - lose_no_memory(); - - if (!i2d_PUBKEY_bio(bio, self->pkey)) - lose_openssl_error("Unable to write public key"); - - result = BIO_to_PyString_helper(bio); - - error: /* Fall through */ - BIO_free(bio); - return result; -} - -static char asymmetric_object_generate_rsa__doc__[] = - "Generate a new RSA keypair.\n" - "\n" - "Optional argument key_size is the desired key size, in bits;\n" - "if not specified, the default is 2048." - ; - -static PyObject * -asymmetric_object_generate_rsa(PyTypeObject *type, PyObject *args, PyObject *kwds) -{ - static char *kwlist[] = {"key_size", NULL}; - asymmetric_object *self = NULL; - EVP_PKEY_CTX *ctx = NULL; - int key_size = 2048; - int ok = 0; - - ENTERING(asymmetric_object_generate_rsa); - - if (!PyArg_ParseTupleAndKeywords(args, kwds, "|i", kwlist, &key_size)) - goto error; - - if ((self = (asymmetric_object *) asymmetric_object_new(type, NULL, NULL)) == NULL) - goto error; - - /* - * Explictly setting RSA_F4 would be tedious, as it requires messing - * about with bignums, and F4 is the default, so we leave it alone. - * In case this ever changes, the required sequence would be: - * BN_new(), BN_set_word(), EVP_PKEY_CTX_set_rsa_keygen_pubexp(), - * BN_free(). - */ - - if ((ctx = EVP_PKEY_CTX_new_id(EVP_PKEY_RSA, NULL)) == NULL || - EVP_PKEY_keygen_init(ctx) <= 0 || - EVP_PKEY_CTX_set_rsa_keygen_bits(ctx, key_size) <= 0 || - EVP_PKEY_keygen(ctx, &self->pkey) <= 0) - lose_openssl_error("Couldn't generate new RSA key"); - - ok = 1; - - error: - EVP_PKEY_CTX_free(ctx); - - if (ok) - return (PyObject *) self; - - Py_XDECREF(self); - return NULL; -} - -static char asymmetric_object_generate_from_params__doc__[] = - "Generate a new keypair using an AsymmetricParams object.\n" - ; - -static PyObject * -asymmetric_object_generate_from_params(PyTypeObject *type, PyObject *args) -{ - asymmetric_params_object *params = NULL; - asymmetric_object *self = NULL; - EVP_PKEY_CTX *ctx = NULL; - int ok = 0; - - ENTERING(asymmetric_object_generate_from_params); - - if (!PyArg_ParseTuple(args, "O!", &POW_AsymmetricParams_Type, ¶ms)) - goto error; - - if ((self = (asymmetric_object *) asymmetric_object_new(type, NULL, NULL)) == NULL) - goto error; - - if ((ctx = EVP_PKEY_CTX_new(params->pkey, NULL)) == NULL || - EVP_PKEY_keygen_init(ctx) <= 0 || - EVP_PKEY_keygen(ctx, &self->pkey) <= 0) - lose_openssl_error("Couldn't generate new key"); - - ok = 1; - - error: - EVP_PKEY_CTX_free(ctx); - - if (ok) - return (PyObject *) self; - - Py_XDECREF(self); - return NULL; -} - -static char asymmetric_object_calculate_ski__doc__[] = - "Calculate SKI value for this key.\n" - "\n" - "The SKI is the SHA-1 hash of key's SubjectPublicKey value.\n" - ; - -static PyObject * -asymmetric_object_calculate_ski(asymmetric_object *self) -{ - PyObject *result = NULL; - X509_PUBKEY *pubkey = NULL; - unsigned char digest[EVP_MAX_MD_SIZE]; - unsigned digest_length; - - ENTERING(asymmetric_object_calculate_ski); - - if (!X509_PUBKEY_set(&pubkey, self->pkey)) - lose_openssl_error("Couldn't extract public key"); - - if (!EVP_Digest(pubkey->public_key->data, pubkey->public_key->length, - digest, &digest_length, EVP_sha1(), NULL)) - lose_openssl_error("Couldn't calculate SHA-1 digest of public key"); - - result = PyString_FromStringAndSize((char *) digest, digest_length); - - error: - X509_PUBKEY_free(pubkey); - return result; -} - -static struct PyMethodDef asymmetric_object_methods[] = { - Define_Method(pemWritePrivate, asymmetric_object_pem_write_private, METH_VARARGS), - Define_Method(pemWritePublic, asymmetric_object_pem_write_public, METH_NOARGS), - Define_Method(derWritePrivate, asymmetric_object_der_write_private, METH_NOARGS), - Define_Method(derWritePublic, asymmetric_object_der_write_public, METH_NOARGS), - Define_Method(calculateSKI, asymmetric_object_calculate_ski, METH_NOARGS), - Define_Class_Method(pemReadPublic, asymmetric_object_pem_read_public, METH_VARARGS), - Define_Class_Method(pemReadPublicFile, asymmetric_object_pem_read_public_file, METH_VARARGS), - Define_Class_Method(derReadPublic, asymmetric_object_der_read_public, METH_VARARGS), - Define_Class_Method(derReadPublicFile, asymmetric_object_der_read_public_file, METH_VARARGS), - Define_Class_Method(pemReadPrivate, asymmetric_object_pem_read_private, METH_VARARGS), - Define_Class_Method(pemReadPrivateFile, asymmetric_object_pem_read_private_file, METH_VARARGS), - Define_Class_Method(derReadPrivate, asymmetric_object_der_read_private, METH_VARARGS), - Define_Class_Method(derReadPrivateFile, asymmetric_object_der_read_private_file, METH_VARARGS), - Define_Class_Method(generateRSA, asymmetric_object_generate_rsa, METH_KEYWORDS), - Define_Class_Method(generateFromParams, asymmetric_object_generate_from_params, METH_VARARGS), - {NULL} -}; - -static char POW_Asymmetric_Type__doc__[] = - "Container for OpenSSL's EVP_PKEY asymmetric key classes.\n" - "\n" - LAME_DISCLAIMER_IN_ALL_CLASS_DOCUMENTATION - ; - -static PyTypeObject POW_Asymmetric_Type = { - PyObject_HEAD_INIT(0) - 0, /* ob_size */ - "rpki.POW.Asymmetric", /* tp_name */ - sizeof(asymmetric_object), /* tp_basicsize */ - 0, /* tp_itemsize */ - (destructor)asymmetric_object_dealloc, /* tp_dealloc */ - 0, /* tp_print */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_compare */ - 0, /* tp_repr */ - 0, /* tp_as_number */ - 0, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - 0, /* tp_hash */ - 0, /* tp_call */ - 0, /* tp_str */ - 0, /* tp_getattro */ - 0, /* tp_setattro */ - 0, /* tp_as_buffer */ - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /* tp_flags */ - POW_Asymmetric_Type__doc__, /* tp_doc */ - 0, /* tp_traverse */ - 0, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - 0, /* tp_iter */ - 0, /* tp_iternext */ - asymmetric_object_methods, /* tp_methods */ - 0, /* tp_members */ - 0, /* tp_getset */ - 0, /* tp_base */ - 0, /* tp_dict */ - 0, /* tp_descr_get */ - 0, /* tp_descr_set */ - 0, /* tp_dictoffset */ - (initproc) asymmetric_object_init, /* tp_init */ - 0, /* tp_alloc */ - asymmetric_object_new, /* tp_new */ -}; - - - -/* - * AsymmetricParams object. - */ - -static PyObject * -asymmetric_params_object_new(PyTypeObject *type, GCC_UNUSED PyObject *args, GCC_UNUSED PyObject *kwds) -{ - asymmetric_params_object *self = NULL; - - ENTERING(asymmetric_params_object_new); - - if ((self = (asymmetric_params_object *) type->tp_alloc(type, 0)) == NULL) - goto error; - - self->pkey = NULL; - - return (PyObject *) self; - - error: - - Py_XDECREF(self); - return NULL; -} - -static int -asymmetric_params_object_init(asymmetric_params_object *self, PyObject *args, PyObject *kwds) -{ - static char *kwlist[] = {NULL}; - - ENTERING(asymmetric_params_object_init); - - if (!PyArg_ParseTupleAndKeywords(args, kwds, "", kwlist)) - goto error; - - return 0; - - error: - return -1; -} - -static void -asymmetric_params_object_dealloc(asymmetric_params_object *self) -{ - ENTERING(asymmetric_params_object_dealloc); - EVP_PKEY_free(self->pkey); - self->ob_type->tp_free((PyObject*) self); -} - -static PyObject * -asymmetric_params_object_pem_read_helper(PyTypeObject *type, BIO *bio) -{ - asymmetric_params_object *self = NULL; - - ENTERING(asymmetric_params_object_pem_read_helper); - - if ((self = (asymmetric_params_object *) asymmetric_params_object_new(type, NULL, NULL)) == NULL) - goto error; - - if (!PEM_read_bio_Parameters(bio, &self->pkey)) - lose_openssl_error("Couldn't load PEM encoded key parameters"); - - return (PyObject *) self; - - error: - - Py_XDECREF(self); - return NULL; -} - -static char asymmetric_params_object_pem_read__doc__[] = - "Read PEM-encoded key parameters from a string.\n" - ; - -static PyObject * -asymmetric_params_object_pem_read(PyTypeObject *type, PyObject *args) -{ - ENTERING(asymmetric_params_object_pem_read); - return read_from_string_helper(asymmetric_params_object_pem_read_helper, type, args); -} - -static char asymmetric_params_object_pem_read_file__doc__[] = - "Read PEM-encoded key parameters from a file.\n" - ; - -static PyObject * -asymmetric_params_object_pem_read_file(PyTypeObject *type, PyObject *args) -{ - ENTERING(asymmetric_params_object_pem_read_file); - return read_from_file_helper(asymmetric_params_object_pem_read_helper, type, args); -} - -static char asymmetric_params_object_pem_write__doc__[] = - "Return the PEM encoding of this set of key parameters, as a string.\n" - ; - -static PyObject * -asymmetric_params_object_pem_write(asymmetric_params_object *self) -{ - PyObject *result = NULL; - BIO *bio = NULL; - - ENTERING(asymmetric_params_object_pem_write); - - if ((bio = BIO_new(BIO_s_mem())) == NULL) - lose_no_memory(); - - if (PEM_write_bio_Parameters(bio, self->pkey) <= 0) - lose_openssl_error("Unable to write key parameters"); - - result = BIO_to_PyString_helper(bio); - - error: /* Fall through */ - BIO_free(bio); - return result; -} - -static char asymmetric_params_object_generate_ec__doc__[] = - "Generate a new set of EC parameters.\n" - "\n" - "Optional argument curve is a numeric code representing the curve to use;\n" - "if not specified, the default is P-256." - ; - -static PyObject * -asymmetric_params_object_generate_ec(PyTypeObject *type, PyObject *args, PyObject *kwds) -{ - static char *kwlist[] = {"curve", NULL}; - asymmetric_params_object *self = NULL; - EVP_PKEY_CTX *ctx = NULL; - int curve = NID_X9_62_prime256v1; - int ok = 0; - - ENTERING(asymmetric_params_object_generate_ec); - - if (!PyArg_ParseTupleAndKeywords(args, kwds, "|i", kwlist, &curve)) - goto error; - - if ((self = (asymmetric_params_object *) asymmetric_params_object_new(type, NULL, NULL)) == NULL) - goto error; - - if ((ctx = EVP_PKEY_CTX_new_id(EVP_PKEY_EC, NULL)) == NULL || - EVP_PKEY_paramgen_init(ctx) <= 0 || - EVP_PKEY_CTX_set_ec_paramgen_curve_nid(ctx, curve) <= 0 || - EVP_PKEY_paramgen(ctx, &self->pkey) <= 0) - lose_openssl_error("Couldn't generate key parameters"); - - ok = 1; - - error: - EVP_PKEY_CTX_free(ctx); - - if (ok) - return (PyObject *) self; - - Py_XDECREF(self); - return NULL; -} - -static char asymmetric_params_object_generate_dh__doc__[] = - "Generate a new set of DH parameters.\n" - "\n" - "Optional argument prime_length is length of the DH prime parameter\n" - "to use, in bits; if not specified, the default is 2048 bits.\n" - "\n" - "Be warned that generating DH parameters with a 2048-bit prime may\n" - "take a ridiculously long time.\n" - ; - -static PyObject * -asymmetric_params_object_generate_dh(PyTypeObject *type, PyObject *args, PyObject *kwds) -{ - static char *kwlist[] = {"prime_length", NULL}; - asymmetric_params_object *self = NULL; - EVP_PKEY_CTX *ctx = NULL; - int prime_length = 2048; - int ok = 0; - - ENTERING(asymmetric_params_object_generate_dh); - - if (!PyArg_ParseTupleAndKeywords(args, kwds, "|i", kwlist, &prime_length)) - goto error; - - if ((self = (asymmetric_params_object *) asymmetric_params_object_new(type, NULL, NULL)) == NULL) - goto error; - - if ((ctx = EVP_PKEY_CTX_new_id(EVP_PKEY_DH, NULL)) == NULL || - EVP_PKEY_paramgen_init(ctx) <= 0 || - EVP_PKEY_CTX_set_dh_paramgen_prime_len(ctx, prime_length) <= 0 || - EVP_PKEY_paramgen(ctx, &self->pkey) <= 0) - lose_openssl_error("Couldn't generate key parameters"); - - ok = 1; - - error: - EVP_PKEY_CTX_free(ctx); - - if (ok) - return (PyObject *) self; - - Py_XDECREF(self); - return NULL; -} - -static char asymmetric_params_object_generate_dsa__doc__[] = - "Generate a new set of DSA parameters.\n" - "\n" - "Optional argument key_length is the length of the key to generate, in bits;\n" - "if not specified, the default is 2048 bits." - ; - -static PyObject * -asymmetric_params_object_generate_dsa(PyTypeObject *type, PyObject *args, PyObject *kwds) -{ - static char *kwlist[] = {"key_length", NULL}; - asymmetric_params_object *self = NULL; - EVP_PKEY_CTX *ctx = NULL; - int key_length = 2048; - int ok = 0; - - ENTERING(asymmetric_params_object_generate_dsa); - - if (!PyArg_ParseTupleAndKeywords(args, kwds, "|i", kwlist, &key_length)) - goto error; - - if ((self = (asymmetric_params_object *) asymmetric_params_object_new(type, NULL, NULL)) == NULL) - goto error; - - if ((ctx = EVP_PKEY_CTX_new_id(EVP_PKEY_DSA, NULL)) == NULL || - EVP_PKEY_paramgen_init(ctx) <= 0 || - EVP_PKEY_CTX_set_dsa_paramgen_bits(ctx, key_length) <= 0 || - EVP_PKEY_paramgen(ctx, &self->pkey) <= 0) - lose_openssl_error("Couldn't generate key parameters"); - - ok = 1; - - error: - EVP_PKEY_CTX_free(ctx); - - if (ok) - return (PyObject *) self; - - Py_XDECREF(self); - return NULL; -} - -static struct PyMethodDef asymmetric_params_object_methods[] = { - Define_Method(pemWrite, asymmetric_params_object_pem_write, METH_NOARGS), - Define_Class_Method(pemRead, asymmetric_params_object_pem_read, METH_VARARGS), - Define_Class_Method(pemReadFile, asymmetric_params_object_pem_read_file, METH_VARARGS), - Define_Class_Method(generateEC, asymmetric_params_object_generate_ec, METH_KEYWORDS), - Define_Class_Method(generateDH, asymmetric_params_object_generate_dh, METH_KEYWORDS), - Define_Class_Method(generateDSA, asymmetric_params_object_generate_dsa, METH_KEYWORDS), - {NULL} -}; - -static char POW_AsymmetricParams_Type__doc__[] = - "Container for OpenSSL's EVP_PKEY asymmetric key parameter classes.\n" - "\n" - LAME_DISCLAIMER_IN_ALL_CLASS_DOCUMENTATION - ; - -static PyTypeObject POW_AsymmetricParams_Type = { - PyObject_HEAD_INIT(0) - 0, /* ob_size */ - "rpki.POW.AsymmetricParams", /* tp_name */ - sizeof(asymmetric_params_object), /* tp_basicsize */ - 0, /* tp_itemsize */ - (destructor)asymmetric_params_object_dealloc, /* tp_dealloc */ - 0, /* tp_print */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_compare */ - 0, /* tp_repr */ - 0, /* tp_as_number */ - 0, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - 0, /* tp_hash */ - 0, /* tp_call */ - 0, /* tp_str */ - 0, /* tp_getattro */ - 0, /* tp_setattro */ - 0, /* tp_as_buffer */ - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /* tp_flags */ - POW_AsymmetricParams_Type__doc__, /* tp_doc */ - 0, /* tp_traverse */ - 0, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - 0, /* tp_iter */ - 0, /* tp_iternext */ - asymmetric_params_object_methods, /* tp_methods */ - 0, /* tp_members */ - 0, /* tp_getset */ - 0, /* tp_base */ - 0, /* tp_dict */ - 0, /* tp_descr_get */ - 0, /* tp_descr_set */ - 0, /* tp_dictoffset */ - (initproc) asymmetric_params_object_init, /* tp_init */ - 0, /* tp_alloc */ - asymmetric_params_object_new, /* tp_new */ -}; - - - -/* - * Digest object. - */ - -static PyObject * -digest_object_new(PyTypeObject *type, GCC_UNUSED PyObject *args, GCC_UNUSED PyObject *kwds) -{ - digest_object *self = NULL; - - ENTERING(digest_object_new); - - if ((self = (digest_object *) type->tp_alloc(type, 0)) == NULL) - goto error; - - self->digest_type = 0; - - return (PyObject *) self; - - error: - return NULL; -} - -static int -digest_object_init(digest_object *self, PyObject *args, PyObject *kwds) -{ - static char *kwlist[] = {"digest_type", NULL}; - const EVP_MD *digest_method = NULL; - int digest_type = 0; - - ENTERING(digest_object_init); - - if (!PyArg_ParseTupleAndKeywords(args, kwds, "i", kwlist, &digest_type)) - goto error; - - if ((digest_method = evp_digest_factory(digest_type)) == NULL) - lose("Unsupported digest algorithm"); - - self->digest_type = digest_type; - if (!EVP_DigestInit(&self->digest_ctx, digest_method)) - lose_openssl_error("Couldn't initialize digest"); - - return 0; - - error: - return -1; -} - -static void -digest_object_dealloc(digest_object *self) -{ - ENTERING(digest_object_dealloc); - EVP_MD_CTX_cleanup(&self->digest_ctx); - self->ob_type->tp_free((PyObject*) self); -} - -static char digest_object_update__doc__[] = - "Add data to this digest.\n" - "\n" - "the \"data\" parameter should be a string containing the data to be added.\n" - ; - -static PyObject * -digest_object_update(digest_object *self, PyObject *args) -{ - char *data = NULL; - Py_ssize_t len = 0; - - ENTERING(digest_object_update); - - if (!PyArg_ParseTuple(args, "s#", &data, &len)) - goto error; - - if (!EVP_DigestUpdate(&self->digest_ctx, data, len)) - lose_openssl_error("EVP_DigestUpdate() failed"); - - Py_RETURN_NONE; - - error: - return NULL; -} - -static char digest_object_copy__doc__[] = - "Return a copy of this Digest object.\n" - ; - -static PyObject * -digest_object_copy(digest_object *self) -{ - digest_object *new = NULL; - - ENTERING(digest_object_copy); - - if ((new = (digest_object *) digest_object_new(&POW_Digest_Type, NULL, NULL)) == NULL) - goto error; - - new->digest_type = self->digest_type; - if (!EVP_MD_CTX_copy(&new->digest_ctx, &self->digest_ctx)) - lose_openssl_error("Couldn't copy digest"); - - return (PyObject*) new; - - error: - - Py_XDECREF(new); - return NULL; -} - -static char digest_object_digest__doc__[] = - "Return the digest of all the data which this Digest object has processed.\n" - "\n" - "This method can be called at any time and will not effect the internal\n" - "state of the Digest object.\n" - ; - -/* - * Do we really need to do this copy? Nice general operation, but does - * anything we're doing for RPKI care? - */ - -static PyObject * -digest_object_digest(digest_object *self) -{ - unsigned char digest_text[EVP_MAX_MD_SIZE]; - EVP_MD_CTX ctx; - unsigned digest_len = 0; - - ENTERING(digest_object_digest); - - if (!EVP_MD_CTX_copy(&ctx, &self->digest_ctx)) - lose_openssl_error("Couldn't copy digest"); - - EVP_DigestFinal(&ctx, digest_text, &digest_len); - - EVP_MD_CTX_cleanup(&ctx); - - return Py_BuildValue("s#", digest_text, (Py_ssize_t) digest_len); - - error: - return NULL; -} - -static struct PyMethodDef digest_object_methods[] = { - Define_Method(update, digest_object_update, METH_VARARGS), - Define_Method(digest, digest_object_digest, METH_NOARGS), - Define_Method(copy, digest_object_copy, METH_NOARGS), - {NULL} -}; - -static char POW_Digest_Type__doc__[] = - "This class provides access to the digest functionality of OpenSSL.\n" - "It emulates the digest modules in the Python Standard Library, but\n" - "does not currently support the \"hexdigest\" method.\n" - "\n" - "The constructor takes one parameter, the kind of Digest object to create.\n" - "This should be one of the following:\n" - "\n" - " * MD5_DIGEST\n" - " * SHA_DIGEST\n" - " * SHA1_DIGEST\n" - " * SHA256_DIGEST\n" - " * SHA384_DIGEST\n" - " * SHA512_DIGEST\n" - ; - -static PyTypeObject POW_Digest_Type = { - PyObject_HEAD_INIT(0) - 0, /* ob_size */ - "rpki.POW.Digest", /* tp_name */ - sizeof(digest_object), /* tp_basicsize */ - 0, /* tp_itemsize */ - (destructor)digest_object_dealloc, /* tp_dealloc */ - 0, /* tp_print */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_compare */ - 0, /* tp_repr */ - 0, /* tp_as_number */ - 0, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - 0, /* tp_hash */ - 0, /* tp_call */ - 0, /* tp_str */ - 0, /* tp_getattro */ - 0, /* tp_setattro */ - 0, /* tp_as_buffer */ - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /* tp_flags */ - POW_Digest_Type__doc__, /* tp_doc */ - 0, /* tp_traverse */ - 0, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - 0, /* tp_iter */ - 0, /* tp_iternext */ - digest_object_methods, /* tp_methods */ - 0, /* tp_members */ - 0, /* tp_getset */ - 0, /* tp_base */ - 0, /* tp_dict */ - 0, /* tp_descr_get */ - 0, /* tp_descr_set */ - 0, /* tp_dictoffset */ - (initproc) digest_object_init, /* tp_init */ - 0, /* tp_alloc */ - digest_object_new, /* tp_new */ -}; - - - -/* - * CMS object. - */ - -static PyObject * -cms_object_new(PyTypeObject *type, GCC_UNUSED PyObject *args, GCC_UNUSED PyObject *kwds) -{ - cms_object *self; - - ENTERING(cms_object_new); - - if ((self = (cms_object *) type->tp_alloc(type, 0)) != NULL) - return (PyObject *) self; - - Py_XDECREF(self); - return NULL; -} - -static void -cms_object_dealloc(cms_object *self) -{ - ENTERING(cms_object_dealloc); - CMS_ContentInfo_free(self->cms); - self->ob_type->tp_free((PyObject*) self); -} - -static PyObject * -cms_object_pem_read_helper(PyTypeObject *type, BIO *bio) -{ - cms_object *self; - - ENTERING(cms_object_pem_read_helper); - - if ((self = (cms_object *) type->tp_new(type, NULL, NULL)) == NULL) - goto error; - - if (!PEM_read_bio_CMS(bio, &self->cms, NULL, NULL)) - lose_openssl_error("Couldn't load PEM encoded CMS message"); - - return (PyObject *) self; - - error: - Py_XDECREF(self); - return NULL; -} - -static PyObject * -cms_object_der_read_helper(PyTypeObject *type, BIO *bio) -{ - cms_object *self; - - ENTERING(cms_object_der_read_helper); - - if ((self = (cms_object *) type->tp_new(type, NULL, NULL)) == NULL) - goto error; - - if (!d2i_CMS_bio(bio, &self->cms)) - lose_openssl_error("Couldn't load DER encoded CMS message"); - - return (PyObject *) self; - - error: - Py_XDECREF(self); - return NULL; -} - -static char cms_object_pem_read__doc__[] = - "Read a PEM-encoded CMS object from a string.\n" - ; - -static PyObject * -cms_object_pem_read(PyTypeObject *type, PyObject *args) -{ - ENTERING(cms_object_pem_read); - return read_from_string_helper(cms_object_pem_read_helper, type, args); -} - -static char cms_object_pem_read_file__doc__[] = - "Read a PEM-encoded CMS object from a file.\n" - ; - -static PyObject * -cms_object_pem_read_file(PyTypeObject *type, PyObject *args) -{ - ENTERING(cms_object_pem_read_file); - return read_from_file_helper(cms_object_pem_read_helper, type, args); -} - -static char cms_object_der_read__doc__[] = - "Read a DER-encoded CMS object from a string.\n" - ; - -static PyObject * -cms_object_der_read(PyTypeObject *type, PyObject *args) -{ - ENTERING(cms_object_der_read); - return read_from_string_helper(cms_object_der_read_helper, type, args); -} - -static char cms_object_der_read_file__doc__[] = - "Read a DER-encoded CMS object from a file.\n" - ; - -static PyObject * -cms_object_der_read_file(PyTypeObject *type, PyObject *args) -{ - ENTERING(cms_object_der_read_file); - return read_from_file_helper(cms_object_der_read_helper, type, args); -} - -static char cms_object_pem_write__doc__[] = - "Return the DER encoding of this CMS message.\n" - ; - -static PyObject * -cms_object_pem_write(cms_object *self) -{ - PyObject *result = NULL; - BIO *bio = NULL; - - ENTERING(cms_object_pem_write); - - if ((bio = BIO_new(BIO_s_mem())) == NULL) - lose_no_memory(); - - if (!PEM_write_bio_CMS(bio, self->cms)) - lose_openssl_error("Unable to write CMS object"); - - result = BIO_to_PyString_helper(bio); - - error: /* Fall through */ - BIO_free(bio); - return result; -} - -static char cms_object_der_write__doc__[] = - "Return the DER encoding of this CMS message.\n" - ; - -static PyObject * -cms_object_der_write(cms_object *self) -{ - PyObject *result = NULL; - BIO *bio = NULL; - - ENTERING(cms_object_der_write); - - if ((bio = BIO_new(BIO_s_mem())) == NULL) - lose_no_memory(); - - if (!i2d_CMS_bio(bio, self->cms)) - lose_openssl_error("Unable to write CMS object"); - - result = BIO_to_PyString_helper(bio); - - error: /* Fall through */ - BIO_free(bio); - return result; -} - -static int -cms_object_sign_helper(cms_object *self, - BIO *bio, - x509_object *signcert, - asymmetric_object *signkey, - PyObject *x509_iterable, - PyObject *crl_iterable, - char *oid, - unsigned flags) -{ - STACK_OF(X509) *x509_stack = NULL; - ASN1_OBJECT *econtent_type = NULL; - CMS_ContentInfo *cms = NULL; - PyObject *iterator = NULL; - PyObject *item = NULL; - int ok = 0; - - ENTERING(cms_object_sign_helper); - - assert_no_unhandled_openssl_errors(); - - flags &= CMS_NOCERTS | CMS_NOATTR; - flags |= CMS_BINARY | CMS_NOSMIMECAP | CMS_PARTIAL | CMS_USE_KEYID; - - if ((x509_stack = x509_helper_iterable_to_stack(x509_iterable)) == NULL) - goto error; - - assert_no_unhandled_openssl_errors(); - - if (oid && (econtent_type = OBJ_txt2obj(oid, 1)) == NULL) - lose_openssl_error("Couldn't parse OID"); - - assert_no_unhandled_openssl_errors(); - - if ((cms = CMS_sign(NULL, NULL, x509_stack, bio, flags)) == NULL) - lose_openssl_error("Couldn't create CMS message"); - - assert_no_unhandled_openssl_errors(); - - if (econtent_type) - CMS_set1_eContentType(cms, econtent_type); - - assert_no_unhandled_openssl_errors(); - - if (!CMS_add1_signer(cms, signcert->x509, signkey->pkey, EVP_sha256(), flags)) - lose_openssl_error("Couldn't sign CMS message"); - - assert_no_unhandled_openssl_errors(); - - if (crl_iterable != Py_None) { - - if ((iterator = PyObject_GetIter(crl_iterable)) == NULL) - goto error; - - while ((item = PyIter_Next(iterator)) != NULL) { - - if (!POW_CRL_Check(item)) - lose_type_error("Inappropriate type"); - - if (!CMS_add1_crl(cms, ((crl_object *) item)->crl)) - lose_openssl_error("Couldn't add CRL to CMS"); - - assert_no_unhandled_openssl_errors(); - - Py_XDECREF(item); - item = NULL; - } - } - - if (!CMS_final(cms, bio, NULL, flags)) - lose_openssl_error("Couldn't finalize CMS signatures"); - - assert_no_unhandled_openssl_errors(); - - CMS_ContentInfo_free(self->cms); - self->cms = cms; - cms = NULL; - - ok = 1; - - error: /* fall through */ - CMS_ContentInfo_free(cms); - sk_X509_free(x509_stack); - ASN1_OBJECT_free(econtent_type); - Py_XDECREF(iterator); - Py_XDECREF(item); - - return ok; -} - -static char cms_object_sign__doc__[] = - "Sign this CMS message message with a private key.\n" - "\n" - "The \"signcert\" parameter should be the certificate against which the\n" - "message will eventually be verified, an X509 object.\n" - "\n" - "The \"key\" parameter should be the private key with which to sign the\n" - "message, an Asymmetric object.\n" - "\n" - "The \"data\" parameter should be the message to be signed, a string.\n" - "\n" - "The optional \"certs\" parameter should be an iterable supplying X509 objects\n" - "to be included in the signed message.\n" - "\n" - "The optional \"crls\" parameter should be an iterable supplying CRL objects\n" - "to be included in the signed message.\n" - "\n" - "The optional \"eContentType\" parameter should be an Object Identifier\n" - "to use as the eContentType value in the signed message.\n" - "\n" - "The optional \"flags\" parameters should be an integer holding a bitmask,\n" - "and can include the following flags:\n" - "\n" - " * CMS_NOCERTS\n" - " * CMS_NOATTR\n" - ; - -static PyObject * -cms_object_sign(cms_object *self, PyObject *args) -{ - asymmetric_object *signkey = NULL; - x509_object *signcert = NULL; - PyObject *x509_iterable = Py_None; - PyObject *crl_iterable = Py_None; - char *buf = NULL, *oid = NULL; - Py_ssize_t len; - unsigned flags = 0; - BIO *bio = NULL; - int ok = 0; - - ENTERING(cms_object_sign); - - if (!PyArg_ParseTuple(args, "O!O!s#|OOsI", - &POW_X509_Type, &signcert, - &POW_Asymmetric_Type, &signkey, - &buf, &len, - &x509_iterable, - &crl_iterable, - &oid, - &flags)) - goto error; - - assert_no_unhandled_openssl_errors(); - - if ((bio = BIO_new_mem_buf(buf, len)) == NULL) - lose_no_memory(); - - assert_no_unhandled_openssl_errors(); - - if (!cms_object_sign_helper(self, bio, signcert, signkey, - x509_iterable, crl_iterable, oid, flags)) - lose_openssl_error("Couldn't sign CMS object"); - - assert_no_unhandled_openssl_errors(); - - ok = 1; - - error: - BIO_free(bio); - - if (ok) - Py_RETURN_NONE; - else - return NULL; -} - -static BIO * -cms_object_verify_helper(cms_object *self, PyObject *args, PyObject *kwds) -{ - static char *kwlist[] = {"store", "certs", "flags", NULL}; - x509_store_object *store = NULL; - PyObject *certs_iterable = Py_None; - STACK_OF(X509) *certs_stack = NULL; - unsigned flags = 0, ok = 0; - BIO *bio = NULL; - - ENTERING(cms_object_verify_helper); - - if (!PyArg_ParseTupleAndKeywords(args, kwds, "O!|OI", kwlist, - &POW_X509Store_Type, &store, &certs_iterable, &flags)) - goto error; - - if ((bio = BIO_new(BIO_s_mem())) == NULL) - lose_no_memory(); - - assert_no_unhandled_openssl_errors(); - - flags &= (CMS_NOINTERN | CMS_NOCRL | CMS_NO_SIGNER_CERT_VERIFY | - CMS_NO_ATTR_VERIFY | CMS_NO_CONTENT_VERIFY); - - if (certs_iterable != Py_None && - (certs_stack = x509_helper_iterable_to_stack(certs_iterable)) == NULL) - goto error; - - assert_no_unhandled_openssl_errors(); - - if (CMS_verify(self->cms, certs_stack, store->store, NULL, bio, flags) <= 0) - lose_openssl_error("Couldn't verify CMS message"); - - assert_no_unhandled_openssl_errors(); - - ok = 1; - - error: /* fall through */ - sk_X509_free(certs_stack); - - if (ok) - return bio; - - BIO_free(bio); - return NULL; -} - -static char cms_object_verify__doc__[] = - "Verify this CMS message against a trusted certificate store.\n" - "\n" - "The \"store\" parameter is an X509Store object, the trusted certificate\n" - "store to use in verification.\n" - "\n" - "The optional \"certs\" parameter is a set of certificates to search\n" - "for the signer's certificate.\n" - "\n" - "The optional \"flags\" parameter is an integer of bit flags,\n" - "containing zero or more of the following:\n" - "\n" - " * CMS_NOINTERN\n" - " * CMS_NOCRL\n" - " * CMS_NO_SIGNER_CERT_VERIFY\n" - " * CMS_NO_ATTR_VERIFY\n" - " * CMS_NO_CONTENT_VERIFY\n" - ; - -static PyObject * -cms_object_verify(cms_object *self, PyObject *args, PyObject *kwds) -{ - PyObject *result = NULL; - BIO *bio = NULL; - - ENTERING(cms_object_verify); - - if ((bio = cms_object_verify_helper(self, args, kwds)) != NULL) - result = BIO_to_PyString_helper(bio); - - BIO_free(bio); - return result; -} - -static char cms_object_eContentType__doc__[] = - "Return the eContentType OID of this CMS message.\n" - ; - -static PyObject * -cms_object_eContentType(cms_object *self) -{ - const ASN1_OBJECT *oid = NULL; - PyObject *result = NULL; - - ENTERING(cms_object_eContentType); - - if ((oid = CMS_get0_eContentType(self->cms)) == NULL) - lose_openssl_error("Couldn't extract eContentType from CMS message"); - - assert_no_unhandled_openssl_errors(); - - result = ASN1_OBJECT_to_PyString(oid); - - error: - return result; -} - -static char cms_object_signingTime__doc__[] = - "Return the signingTime of this CMS message.\n" - ; - -static PyObject * -cms_object_signingTime(cms_object *self) -{ - PyObject *result = NULL; - STACK_OF(CMS_SignerInfo) *sis = NULL; - CMS_SignerInfo *si = NULL; - X509_ATTRIBUTE *xa = NULL; - ASN1_TYPE *so = NULL; - int i; - - ENTERING(cms_object_signingTime); - - if ((sis = CMS_get0_SignerInfos(self->cms)) == NULL) - lose_openssl_error("Couldn't extract signerInfos from CMS message[1]"); - - if (sk_CMS_SignerInfo_num(sis) != 1) - lose_openssl_error("Couldn't extract signerInfos from CMS message[2]"); - - si = sk_CMS_SignerInfo_value(sis, 0); - - if ((i = CMS_signed_get_attr_by_NID(si, NID_pkcs9_signingTime, -1)) < 0) - lose_openssl_error("Couldn't extract signerInfos from CMS message[3]"); - - if ((xa = CMS_signed_get_attr(si, i)) == NULL) - lose_openssl_error("Couldn't extract signerInfos from CMS message[4]"); - - if (xa->single) - lose("Couldn't extract signerInfos from CMS message[5]"); - - if (sk_ASN1_TYPE_num(xa->value.set) != 1) - lose("Couldn't extract signerInfos from CMS message[6]"); - - if ((so = sk_ASN1_TYPE_value(xa->value.set, 0)) == NULL) - lose("Couldn't extract signerInfos from CMS message[7]"); - - switch (so->type) { - case V_ASN1_UTCTIME: - result = ASN1_TIME_to_Python(so->value.utctime); - break; - case V_ASN1_GENERALIZEDTIME: - result = ASN1_TIME_to_Python(so->value.generalizedtime); - break; - default: - lose("Couldn't extract signerInfos from CMS message[8]"); - } - - error: - return result; -} - -static char cms_object_pprint__doc__[] = - "Return a pretty-printed representation of this CMS message.\n" - ; - -static PyObject * -cms_object_pprint(cms_object *self) -{ - BIO *bio = NULL; - PyObject *result = NULL; - - ENTERING(cms_object_pprint); - - if ((bio = BIO_new(BIO_s_mem())) == NULL) - lose_no_memory(); - - if (!CMS_ContentInfo_print_ctx(bio, self->cms, 0, NULL)) - lose_openssl_error("Unable to pretty-print CMS object"); - - result = BIO_to_PyString_helper(bio); - - error: - BIO_free(bio); - return result; -} - -static char cms_object_certs__doc__[] = - "Return any certificates embedded in this CMS message, as a\n" - "tuple of X509 objects. This tuple will be empty if the message\n" - "wrapper contains no certificates.\n" - ; - -static PyObject * -cms_object_certs(cms_object *self) -{ - STACK_OF(X509) *certs = NULL; - PyObject *result = NULL; - - ENTERING(cms_object_certs); - - if ((certs = CMS_get1_certs(self->cms)) != NULL) - result = stack_to_tuple_helper(CHECKED_PTR_OF(STACK_OF(X509), certs), - stack_to_tuple_helper_get_x509); - else if (!ERR_peek_error()) - result = Py_BuildValue("()"); - else - lose_openssl_error("Couldn't extract certs from CMS message"); - - error: /* fall through */ - sk_X509_pop_free(certs, X509_free); - return result; -} - -static char cms_object_crls__doc__[] = - "Return any CRLs embedded in this CMS message, as a tuple of\n" - "CRL objects. This tuple will be empty if the message contains no CRLs.\n" - ; - -static PyObject * -cms_object_crls(cms_object *self) -{ - STACK_OF(X509_CRL) *crls = NULL; - PyObject *result = NULL; - - ENTERING(cms_object_crls); - - if ((crls = CMS_get1_crls(self->cms)) != NULL) - result = stack_to_tuple_helper(CHECKED_PTR_OF(STACK_OF(X509_CRL), crls), - stack_to_tuple_helper_get_crl); - else if (!ERR_peek_error()) - result = Py_BuildValue("()"); - else - lose_openssl_error("Couldn't extract CRLs from CMS message"); - - error: /* fall through */ - sk_X509_CRL_pop_free(crls, X509_CRL_free); - return result; -} - -static struct PyMethodDef cms_object_methods[] = { - Define_Method(pemWrite, cms_object_pem_write, METH_NOARGS), - Define_Method(derWrite, cms_object_der_write, METH_NOARGS), - Define_Method(sign, cms_object_sign, METH_VARARGS), - Define_Method(verify, cms_object_verify, METH_KEYWORDS), - Define_Method(eContentType, cms_object_eContentType, METH_NOARGS), - Define_Method(signingTime, cms_object_signingTime, METH_NOARGS), - Define_Method(pprint, cms_object_pprint, METH_NOARGS), - Define_Method(certs, cms_object_certs, METH_NOARGS), - Define_Method(crls, cms_object_crls, METH_NOARGS), - Define_Class_Method(pemRead, cms_object_pem_read, METH_VARARGS), - Define_Class_Method(pemReadFile, cms_object_pem_read_file, METH_VARARGS), - Define_Class_Method(derRead, cms_object_der_read, METH_VARARGS), - Define_Class_Method(derReadFile, cms_object_der_read_file, METH_VARARGS), - {NULL} -}; - -static char POW_CMS_Type__doc__[] = - "Wrapper for OpenSSL's CMS class. At present this only handes signed\n" - "objects, as those are the only kind of CMS objects used in RPKI.\n" - ; - -static PyTypeObject POW_CMS_Type = { - PyObject_HEAD_INIT(0) - 0, /* ob_size */ - "rpki.POW.CMS", /* tp_name */ - sizeof(cms_object), /* tp_basicsize */ - 0, /* tp_itemsize */ - (destructor)cms_object_dealloc, /* tp_dealloc */ - 0, /* tp_print */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_compare */ - 0, /* tp_repr */ - 0, /* tp_as_number */ - 0, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - 0, /* tp_hash */ - 0, /* tp_call */ - 0, /* tp_str */ - 0, /* tp_getattro */ - 0, /* tp_setattro */ - 0, /* tp_as_buffer */ - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /* tp_flags */ - POW_CMS_Type__doc__, /* tp_doc */ - 0, /* tp_traverse */ - 0, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - 0, /* tp_iter */ - 0, /* tp_iternext */ - cms_object_methods, /* tp_methods */ - 0, /* tp_members */ - 0, /* tp_getset */ - 0, /* tp_base */ - 0, /* tp_dict */ - 0, /* tp_descr_get */ - 0, /* tp_descr_set */ - 0, /* tp_dictoffset */ - 0, /* tp_init */ - 0, /* tp_alloc */ - cms_object_new, /* tp_new */ -}; - - - -/* - * Manifest object. - */ - -static PyObject * -manifest_object_new(PyTypeObject *type, PyObject *args, PyObject *kwds) -{ - manifest_object *self = NULL; - - ENTERING(manifest_object_new); - - if ((self = (manifest_object *) cms_object_new(type, args, kwds)) != NULL && - (self->manifest = Manifest_new()) != NULL) - return (PyObject *) self; - - Py_XDECREF(self); - return NULL; -} - -static void -manifest_object_dealloc(manifest_object *self) -{ - ENTERING(manifest_object_dealloc); - Manifest_free(self->manifest); - cms_object_dealloc(&self->cms); -} - -static char manifest_object_verify__doc__[] = - "Verify this manifest. See the CMS class's .verify() method for details.\n" - ; - -static PyObject * -manifest_object_verify(manifest_object *self, PyObject *args, PyObject *kwds) -{ - BIO *bio = NULL; - int ok = 0; - - ENTERING(manifest_object_verify); - - if ((bio = cms_object_verify_helper(&self->cms, args, kwds)) == NULL) - goto error; - - if (!ASN1_item_d2i_bio(ASN1_ITEM_rptr(Manifest), bio, &self->manifest)) - lose_openssl_error("Couldn't decode manifest"); - - ok = 1; - - error: - BIO_free(bio); - - if (ok) - Py_RETURN_NONE; - else - return NULL; -} - -static PyObject * -manifest_object_der_read_helper(PyTypeObject *type, BIO *bio) -{ - manifest_object *self; - - ENTERING(manifest_object_der_read_helper); - - if ((self = (manifest_object *) cms_object_der_read_helper(type, bio)) != NULL) - self->manifest = NULL; - - return (PyObject *) self; -} - -static char manifest_object_der_read__doc__[] = - "Read a DER-encoded manifest object from a string.\n" - ; - -static PyObject * -manifest_object_der_read(PyTypeObject *type, PyObject *args) -{ - ENTERING(manifest_object_der_read); - return read_from_string_helper(manifest_object_der_read_helper, type, args); -} - -static char manifest_object_der_read_file__doc__[] = - "Read a DER-encoded manifest object from a file.\n" - ; - -static PyObject * -manifest_object_der_read_file(PyTypeObject *type, PyObject *args) -{ - ENTERING(manifest_object_der_read_file); - return read_from_file_helper(manifest_object_der_read_helper, type, args); -} - -static PyObject * -manifest_object_pem_read_helper(PyTypeObject *type, BIO *bio) -{ - manifest_object *self; - - ENTERING(manifest_object_pem_read_helper); - - if ((self = (manifest_object *) cms_object_pem_read_helper(type, bio)) != NULL) - self->manifest = NULL; - - return (PyObject *) self; -} - -static char manifest_object_pem_read__doc__[] = - "Read a PEM-encoded manifest object from a string.\n" - ; - -static PyObject * -manifest_object_pem_read(PyTypeObject *type, PyObject *args) -{ - ENTERING(manifest_object_pem_read); - return read_from_string_helper(manifest_object_pem_read_helper, type, args); -} - -static char manifest_object_pem_read_file__doc__[] = - "Read a PEM-encoded manifest object from a file.\n" - ; - -static PyObject * -manifest_object_pem_read_file(PyTypeObject *type, PyObject *args) -{ - ENTERING(manifest_object_pem_read_file); - return read_from_file_helper(manifest_object_pem_read_helper, type, args); -} - -static char manifest_object_get_version__doc__[] = - "Return the version number of this manifest.\n" - ; - -static PyObject * -manifest_object_get_version(manifest_object *self) -{ - ENTERING(manifest_object_get_version); - - if (self->manifest == NULL) - lose_not_verified("Can't report version of unverified manifest"); - - if (self->manifest->version) - return Py_BuildValue("N", ASN1_INTEGER_to_PyLong(self->manifest->version)); - else - return PyInt_FromLong(0); - - error: - return NULL; -} - -static char manifest_object_set_version__doc__[] = - "Set the version number of this manifest.\n" - "\n" - "The \"version\" parameter should be a non-negative integer.\n" - "\n" - "As of this writing, zero is both the default and the only defined version.\n" - "Attempting to set any version number other than zero will fail, as we\n" - "don't understand how to write other versions, by definition.\n" - ; - -static PyObject * -manifest_object_set_version(manifest_object *self, PyObject *args) -{ - int version = 0; - - ENTERING(manifest_object_set_version); - - if (!PyArg_ParseTuple(args, "|i", &version)) - goto error; - - if (version != 0) - lose("RFC 6486 only defines RPKI manifest version zero"); - - if (self->manifest == NULL) - lose_not_verified("Can't set version of unverified manifest"); - - ASN1_INTEGER_free(self->manifest->version); - self->manifest->version = NULL; - - Py_RETURN_NONE; - - error: - return NULL; -} - -static char manifest_object_get_manifest_number__doc__[] = - "Return the manifestNumber of this manifest.\n" - ; - -static PyObject * -manifest_object_get_manifest_number(manifest_object *self) -{ - ENTERING(manifest_object_get_manifest_number); - - if (self->manifest == NULL) - lose_not_verified("Can't get manifestNumber of unverified manifest"); - - return Py_BuildValue("N", ASN1_INTEGER_to_PyLong(self->manifest->manifestNumber)); - - error: - return NULL; -} - -static char manifest_object_set_manifest_number__doc__[] = - "Set the manifestNumber of this manifest.\n" - "\n" - "The \"manifestNumber\" parameter should be a non-negative integer.\n" - ; - -static PyObject * -manifest_object_set_manifest_number(manifest_object *self, PyObject *args) -{ - PyObject *manifestNumber = NULL; - PyObject *zero = NULL; - int ok = 0; - - ENTERING(manifest_object_set_manifest_number); - - if (!PyArg_ParseTuple(args, "O", &manifestNumber)) - goto error; - - if ((zero = PyInt_FromLong(0)) == NULL) - goto error; - - switch (PyObject_RichCompareBool(manifestNumber, zero, Py_GE)) { - case -1: - goto error; - case 0: - lose("Negative manifest number is not allowed"); - } - - if (self->manifest == NULL) - lose_not_verified("Can't set manifestNumber of unverified manifest"); - - ASN1_INTEGER_free(self->manifest->manifestNumber); - - if ((self->manifest->manifestNumber = PyLong_to_ASN1_INTEGER(manifestNumber)) == NULL) - goto error; - - ok = 1; - - error: - Py_XDECREF(zero); - - if (ok) - Py_RETURN_NONE; - else - return NULL; -} - -static char manifest_object_set_this_update__doc__[] = - "Set this manifest's \"thisUpdate\" value.\n" - "\n" - "The \"time\" parameter should be a datetime object.\n" - ; - -static PyObject * -manifest_object_set_this_update (manifest_object *self, PyObject *args) -{ - ASN1_TIME *t = NULL; - PyObject *o = NULL; - - ENTERING(manifest_object_set_this_update); - - if (!PyArg_ParseTuple(args, "O", &o)) - goto error; - - if (self->manifest == NULL) - lose_not_verified("Can't set thisUpdate value of unverified manifest"); - - if ((t = Python_to_ASN1_TIME(o, 0)) == NULL) - lose("Couldn't convert thisUpdate string"); - - ASN1_TIME_free(self->manifest->thisUpdate); - self->manifest->thisUpdate = t; - Py_RETURN_NONE; - - error: - ASN1_TIME_free(t); - return NULL; -} - -static char manifest_object_get_this_update__doc__[] = - "Return this manifest's \"thisUpdate\" value as a datetime.\n" - ; - -static PyObject * -manifest_object_get_this_update (manifest_object *self) -{ - ENTERING(manifest_object_get_this_update); - - if (self->manifest == NULL) - lose_not_verified("Can't get thisUpdate value of unverified manifest"); - - return ASN1_TIME_to_Python(self->manifest->thisUpdate); - - error: - return NULL; -} - -static char manifest_object_set_next_update__doc__[] = - "Set this manifest's \"nextUpdate\" value.\n" - "\n" - "The \"time\" parameter should be a datetime object.\n" - ; - -static PyObject * -manifest_object_set_next_update (manifest_object *self, PyObject *args) -{ - ASN1_TIME *t = NULL; - PyObject *o = NULL; - - ENTERING(manifest_object_set_next_update); - - if (!PyArg_ParseTuple(args, "O", &o)) - goto error; - - if (self->manifest == NULL) - lose_not_verified("Can't set nextUpdate value of unverified manifest"); - - if ((t = Python_to_ASN1_TIME(o, 0)) == NULL) - lose("Couldn't parse nextUpdate string"); - - ASN1_TIME_free(self->manifest->nextUpdate); - self->manifest->nextUpdate = t; - Py_RETURN_NONE; - - error: - ASN1_TIME_free(t); - return NULL; -} - -static char manifest_object_get_next_update__doc__[] = - "Return this manifest's \"nextUpdate\" value as a datetime.\n" - ; - -static PyObject * -manifest_object_get_next_update (manifest_object *self) -{ - ENTERING(manifest_object_get_next_update); - - if (self->manifest == NULL) - lose_not_verified("Can't extract nextUpdate value of unverified manifest"); - - return ASN1_TIME_to_Python(self->manifest->nextUpdate); - - error: - return NULL; -} - -static char manifest_object_get_algorithm__doc__[] = - "Return this manifest's fileHashAlg OID.\n" - ; - -static PyObject * -manifest_object_get_algorithm(manifest_object *self) -{ - PyObject *result = NULL; - - ENTERING(manifest_object_get_algorithm); - - if (self->manifest == NULL) - lose_not_verified("Can't extract algorithm OID of unverified manifest"); - - result = ASN1_OBJECT_to_PyString(self->manifest->fileHashAlg); - - error: - return result; -} - -static char manifest_object_set_algorithm__doc__[] = - "Set this manifest's fileHashAlg OID.\n" - ; - -static PyObject * -manifest_object_set_algorithm(manifest_object *self, PyObject *args) -{ - ASN1_OBJECT *oid = NULL; - const char *s = NULL; - - ENTERING(manifest_object_set_algorithm); - - if (!PyArg_ParseTuple(args, "s", &s)) - goto error; - - if (self->manifest == NULL) - lose_not_verified("Can't set algorithm OID for unverified manifest"); - - if ((oid = OBJ_txt2obj(s, 1)) == NULL) - lose_no_memory(); - - ASN1_OBJECT_free(self->manifest->fileHashAlg); - self->manifest->fileHashAlg = oid; - Py_RETURN_NONE; - - error: - ASN1_OBJECT_free(oid); - return NULL; -} - -static char manifest_object_add_files__doc__[] = - "Add a collection of pairs to this manifest.\n" - "\n" - "The \"iterable\" parameter should be an iterable object supplying\n" - "returning two-element sequences; the first element of each sequence\n" - "should be the filename (a text string), the second element should be the\n" - "hash (a binary string).\n" - ; - -static PyObject * -manifest_object_add_files(manifest_object *self, PyObject *args) -{ - PyObject *iterable = NULL; - PyObject *iterator = NULL; - PyObject *item = NULL; - PyObject *fast = NULL; - FileAndHash *fah = NULL; - char *file = NULL; - char *hash = NULL; - Py_ssize_t filelen, hashlen; - int ok = 0; - - ENTERING(manifest_object_add_files); - - if (self->manifest == NULL) - lose_not_verified("Can't add files to unverified manifest"); - - if (!PyArg_ParseTuple(args, "O", &iterable) || - (iterator = PyObject_GetIter(iterable)) == NULL) - goto error; - - while ((item = PyIter_Next(iterator)) != NULL) { - - if ((fast = PySequence_Fast(item, "FileAndHash entry must be a sequence")) == NULL) - goto error; - - if (PySequence_Fast_GET_SIZE(fast) != 2) - lose_type_error("FileAndHash entry must be two-element sequence"); - - if (PyString_AsStringAndSize(PySequence_Fast_GET_ITEM(fast, 0), &file, &filelen) < 0 || - PyString_AsStringAndSize(PySequence_Fast_GET_ITEM(fast, 1), &hash, &hashlen) < 0) - goto error; - - if ((fah = FileAndHash_new()) == NULL || - !ASN1_OCTET_STRING_set(fah->file, (unsigned char *) file, filelen) || - !ASN1_BIT_STRING_set(fah->hash, (unsigned char *) hash, hashlen) || - !sk_FileAndHash_push(self->manifest->fileList, fah)) - lose_no_memory(); - - fah->hash->flags &= ~7; - fah->hash->flags |= ASN1_STRING_FLAG_BITS_LEFT; - - fah = NULL; - Py_XDECREF(item); - Py_XDECREF(fast); - item = fast = NULL; - } - - ok = 1; - - error: - Py_XDECREF(iterator); - Py_XDECREF(item); - Py_XDECREF(fast); - FileAndHash_free(fah); - - if (ok) - Py_RETURN_NONE; - else - return NULL; -} - -static char manifest_object_get_files__doc__[] = - "Return a tuple of pairs representing the contents of\n" - "this manifest.\n" - ; - -static PyObject * -manifest_object_get_files(manifest_object *self) -{ - PyObject *result = NULL; - PyObject *item = NULL; - int i; - - ENTERING(manifest_object_get_files); - - if (self->manifest == NULL) - lose_not_verified("Can't get files from unverified manifest"); - - if (self->manifest->fileList == NULL) - lose("Inexplicable NULL manifest fileList pointer"); - - if ((result = PyTuple_New(sk_FileAndHash_num(self->manifest->fileList))) == NULL) - goto error; - - for (i = 0; i < sk_FileAndHash_num(self->manifest->fileList); i++) { - FileAndHash *fah = sk_FileAndHash_value(self->manifest->fileList, i); - - item = Py_BuildValue("(s#s#)", - ASN1_STRING_data(fah->file), - (Py_ssize_t) ASN1_STRING_length(fah->file), - ASN1_STRING_data(fah->hash), - (Py_ssize_t) ASN1_STRING_length(fah->hash)); - if (item == NULL) - goto error; - - PyTuple_SET_ITEM(result, i, item); - item = NULL; - } - - return result; - - error: - Py_XDECREF(result); - Py_XDECREF(item); - return NULL; -} - -static char manifest_object_sign__doc__[] = - "Sign this manifest. See the CMS class's .sign() method for details.\n" - ; - -static PyObject * -manifest_object_sign(manifest_object *self, PyObject *args) -{ - asymmetric_object *signkey = NULL; - x509_object *signcert = NULL; - PyObject *x509_iterable = Py_None; - PyObject *crl_iterable = Py_None; - char *oid = NULL; - unsigned flags = 0; - BIO *bio = NULL; - int ok = 0; - - ENTERING(manifest_object_sign); - - if (!PyArg_ParseTuple(args, "O!O!|OOsI", - &POW_X509_Type, &signcert, - &POW_Asymmetric_Type, &signkey, - &x509_iterable, - &crl_iterable, - &oid, - &flags)) - goto error; - - if ((bio = BIO_new(BIO_s_mem())) == NULL) - lose_no_memory(); - - assert_no_unhandled_openssl_errors(); - - if (!ASN1_item_i2d_bio(ASN1_ITEM_rptr(Manifest), bio, self->manifest)) - lose_openssl_error("Couldn't encode manifest"); - - assert_no_unhandled_openssl_errors(); - - if (!cms_object_sign_helper(&self->cms, bio, signcert, signkey, - x509_iterable, crl_iterable, oid, flags)) - lose_openssl_error("Couldn't sign manifest"); - - assert_no_unhandled_openssl_errors(); - - ok = 1; - - error: - BIO_free(bio); - - if (ok) - Py_RETURN_NONE; - else - return NULL; -} - -static struct PyMethodDef manifest_object_methods[] = { - Define_Method(getVersion, manifest_object_get_version, METH_NOARGS), - Define_Method(setVersion, manifest_object_set_version, METH_VARARGS), - Define_Method(getManifestNumber, manifest_object_get_manifest_number, METH_NOARGS), - Define_Method(setManifestNumber, manifest_object_set_manifest_number, METH_VARARGS), - Define_Method(getThisUpdate, manifest_object_get_this_update, METH_NOARGS), - Define_Method(setThisUpdate, manifest_object_set_this_update, METH_VARARGS), - Define_Method(getNextUpdate, manifest_object_get_next_update, METH_NOARGS), - Define_Method(setNextUpdate, manifest_object_set_next_update, METH_VARARGS), - Define_Method(getAlgorithm, manifest_object_get_algorithm, METH_NOARGS), - Define_Method(setAlgorithm, manifest_object_set_algorithm, METH_VARARGS), - Define_Method(getFiles, manifest_object_get_files, METH_NOARGS), - Define_Method(addFiles, manifest_object_add_files, METH_VARARGS), - Define_Method(sign, manifest_object_sign, METH_VARARGS), - Define_Method(verify, manifest_object_verify, METH_KEYWORDS), - Define_Class_Method(pemRead, manifest_object_pem_read, METH_VARARGS), - Define_Class_Method(pemReadFile, manifest_object_pem_read_file, METH_VARARGS), - Define_Class_Method(derRead, manifest_object_der_read, METH_VARARGS), - Define_Class_Method(derReadFile, manifest_object_der_read_file, METH_VARARGS), - {NULL} -}; - -static char POW_Manifest_Type__doc__[] = - "This class provides access to RPKI manifest payload.\n" - "Most methods are inherited from or share code with the CMS class.\n" - ; - -static PyTypeObject POW_Manifest_Type = { - PyObject_HEAD_INIT(0) - 0, /* ob_size */ - "rpki.POW.Manifest", /* tp_name */ - sizeof(manifest_object), /* tp_basicsize */ - 0, /* tp_itemsize */ - (destructor)manifest_object_dealloc, /* tp_dealloc */ - 0, /* tp_print */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_compare */ - 0, /* tp_repr */ - 0, /* tp_as_number */ - 0, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - 0, /* tp_hash */ - 0, /* tp_call */ - 0, /* tp_str */ - 0, /* tp_getattro */ - 0, /* tp_setattro */ - 0, /* tp_as_buffer */ - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /* tp_flags */ - POW_Manifest_Type__doc__, /* tp_doc */ - 0, /* tp_traverse */ - 0, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - 0, /* tp_iter */ - 0, /* tp_iternext */ - manifest_object_methods, /* tp_methods */ - 0, /* tp_members */ - 0, /* tp_getset */ - &POW_CMS_Type, /* tp_base */ - 0, /* tp_dict */ - 0, /* tp_descr_get */ - 0, /* tp_descr_set */ - 0, /* tp_dictoffset */ - 0, /* tp_init */ - 0, /* tp_alloc */ - manifest_object_new, /* tp_new */ -}; - - - -/* - * ROA object. - */ - -static PyObject * -roa_object_new(PyTypeObject *type, PyObject *args, PyObject *kwds) -{ - roa_object *self = NULL; - - ENTERING(roa_object_new); - - if ((self = (roa_object *) cms_object_new(type, args, kwds)) != NULL && - (self->roa = ROA_new()) != NULL) - return (PyObject *) self; - - Py_XDECREF(self); - return NULL; -} - -static void -roa_object_dealloc(roa_object *self) -{ - ENTERING(roa_object_dealloc); - ROA_free(self->roa); - cms_object_dealloc(&self->cms); -} - -static char roa_object_verify__doc__[] = - "Verify this ROA. See CMS.verify() for details.\n" - ; - -static PyObject * -roa_object_verify(roa_object *self, PyObject *args, PyObject *kwds) -{ - BIO *bio = NULL; - int ok = 0; - - ENTERING(roa_object_verify); - - if ((bio = cms_object_verify_helper(&self->cms, args, kwds)) == NULL) - goto error; - - if (!ASN1_item_d2i_bio(ASN1_ITEM_rptr(ROA), bio, &self->roa)) - lose_openssl_error("Couldn't decode ROA"); - - ok = 1; - - error: - BIO_free(bio); - - if (ok) - Py_RETURN_NONE; - else - return NULL; -} - -static PyObject * -roa_object_pem_read_helper(PyTypeObject *type, BIO *bio) -{ - roa_object *self; - - ENTERING(roa_object_pem_read_helper); - - if ((self = (roa_object *) cms_object_pem_read_helper(type, bio)) != NULL) - self->roa = NULL; - - return (PyObject *) self; -} - -static PyObject * -roa_object_der_read_helper(PyTypeObject *type, BIO *bio) -{ - roa_object *self; - - ENTERING(roa_object_der_read_helper); - - if ((self = (roa_object *) cms_object_der_read_helper(type, bio)) != NULL) - self->roa = NULL; - - return (PyObject *) self; -} - -static char roa_object_pem_read__doc__[] = - "Read a PEM-encoded ROA object from a string.\n" - ; - -static PyObject * -roa_object_pem_read(PyTypeObject *type, PyObject *args) -{ - ENTERING(roa_object_pem_read); - return read_from_string_helper(roa_object_pem_read_helper, type, args); -} - -static char roa_object_pem_read_file__doc__[] = - "Read a PEM-encoded ROA object from a file.\n" - ; - -static PyObject * -roa_object_pem_read_file(PyTypeObject *type, PyObject *args) -{ - ENTERING(roa_object_pem_read_file); - return read_from_file_helper(roa_object_pem_read_helper, type, args); -} - -static char roa_object_der_read__doc__[] = - "Read a DER-encoded ROA object from a string.\n" - ; - -static PyObject * -roa_object_der_read(PyTypeObject *type, PyObject *args) -{ - ENTERING(roa_object_der_read); - return read_from_string_helper(roa_object_der_read_helper, type, args); -} - -static char roa_object_der_read_file__doc__[] = - "Read a DER-encoded ROA object from a file.\n" - ; - -static PyObject * -roa_object_der_read_file(PyTypeObject *type, PyObject *args) -{ - ENTERING(roa_object_der_read_file); - return read_from_file_helper(roa_object_der_read_helper, type, args); -} - -static char roa_object_get_version__doc__[] = - "Return the version number of this ROA.\n" - ; - -static PyObject * -roa_object_get_version(roa_object *self) -{ - ENTERING(roa_object_get_version); - - if (self->roa == NULL) - lose_not_verified("Can't get version of unverified ROA"); - - if (self->roa->version) - return Py_BuildValue("N", ASN1_INTEGER_to_PyLong(self->roa->version)); - else - return PyInt_FromLong(0); - - error: - return NULL; -} - -static char roa_object_set_version__doc__[] = - "Set the version number of this ROA.\n" - "\n" - "The \"version\" parameter should be a non-negative integer.\n" - "\n" - "As of this writing, zero is both the default and the only defined version.\n" - "Attempting to set any version number other than zero will fail, as we\n" - "don't understand how to write other versions, by definition.\n" - ; - -static PyObject * -roa_object_set_version(roa_object *self, PyObject *args) -{ - int version = 0; - - ENTERING(roa_object_set_version); - - if (self->roa == NULL) - lose_not_verified("Can't set version of unverified ROA"); - - if (!PyArg_ParseTuple(args, "|i", &version)) - goto error; - - if (version != 0) - lose("RFC 6482 only defines ROA version zero"); - - ASN1_INTEGER_free(self->roa->version); - self->roa->version = NULL; - - Py_RETURN_NONE; - - error: - return NULL; -} - -static char roa_object_get_asid__doc__[] = - "Return the Autonomous System ID of this ROA.\n" - ; - -static PyObject * -roa_object_get_asid(roa_object *self) -{ - ENTERING(roa_object_get_asid); - - if (self->roa == NULL) - lose_not_verified("Can't get ASN of unverified ROA"); - - return Py_BuildValue("N", ASN1_INTEGER_to_PyLong(self->roa->asID)); - - error: - return NULL; -} - -static char roa_object_set_asid__doc__[] = - "Sets the Autonomous System ID of this ROA.\n" - "\n" - "The \"asID\" parameter should be a non-negative integer.\n" - ; - -static PyObject * -roa_object_set_asid(roa_object *self, PyObject *args) -{ - PyObject *asID = NULL; - PyObject *zero = NULL; - int ok = 0; - - ENTERING(roa_object_set_asid); - - if (self->roa == NULL) - lose_not_verified("Can't set ASN of unverified ROA"); - - if (!PyArg_ParseTuple(args, "O", &asID)) - goto error; - - if ((zero = PyInt_FromLong(0)) == NULL) - goto error; - - switch (PyObject_RichCompareBool(asID, zero, Py_GE)) { - case -1: - goto error; - case 0: - lose("Negative asID is not allowed"); - } - - ASN1_INTEGER_free(self->roa->asID); - - if ((self->roa->asID = PyLong_to_ASN1_INTEGER(asID)) == NULL) - goto error; - - ok = 1; - - error: - Py_XDECREF(zero); - - if (ok) - Py_RETURN_NONE; - else - return NULL; -} - -static char roa_object_get_prefixes__doc__[] = - "Return this ROA's prefix list. This is a two-element\n" - "tuple: the first element is the IPv4 prefix set, the second is the\n" - "IPv6 prefix set.\n" - "\n" - "Each prefix set is either None, if there are no prefixes for this IP\n" - "version, or a sequence of three-element tuple representing ROA prefix\n" - "entries.\n" - "\n" - "Each ROA prefix entry consists of the prefix itself (an IPAddress),\n" - "the prefix length (an integer), and the maxPrefixLen value, which is\n" - "either an integer or None depending on whether the maxPrefixLen value\n" - "is set for this prefix.\n" - ; - -static PyObject * -roa_object_get_prefixes(roa_object *self) -{ - PyObject *result = NULL; - PyObject *ipv4_result = NULL; - PyObject *ipv6_result = NULL; - PyObject *item = NULL; - ipaddress_object *addr = NULL; - int i, j; - - ENTERING(roa_object_get_prefixes); - - if (self->roa == NULL) - lose_not_verified("Can't get prefixes from unverified ROA"); - - for (i = 0; i < sk_ROAIPAddressFamily_num(self->roa->ipAddrBlocks); i++) { - ROAIPAddressFamily *fam = sk_ROAIPAddressFamily_value(self->roa->ipAddrBlocks, i); - const unsigned afi = (fam->addressFamily->data[0] << 8) | (fam->addressFamily->data[1]); - const ipaddress_version *ip_type = NULL; - PyObject **resultp = NULL; - - switch (afi) { - case IANA_AFI_IPV4: resultp = &ipv4_result; ip_type = &ipaddress_version_4; break; - case IANA_AFI_IPV6: resultp = &ipv6_result; ip_type = &ipaddress_version_6; break; - default: lose_type_error("Unknown AFI"); - } - - if (fam->addressFamily->length > 2) - lose_type_error("Unsupported SAFI"); - - if (*resultp != NULL) - lose_type_error("Duplicate ROAIPAddressFamily"); - - if ((*resultp = PyTuple_New(sk_ROAIPAddress_num(fam->addresses))) == NULL) - goto error; - - for (j = 0; j < sk_ROAIPAddress_num(fam->addresses); j++) { - ROAIPAddress *a = sk_ROAIPAddress_value(fam->addresses, j); - unsigned prefixlen = ((a->IPAddress)->length * 8 - ((a->IPAddress)->flags & 7)); - - if ((addr = (ipaddress_object *) POW_IPAddress_Type.tp_alloc(&POW_IPAddress_Type, 0)) == NULL) - goto error; - - addr->type = ip_type; - - memset(addr->address, 0, sizeof(addr->address)); - - if ((unsigned) a->IPAddress->length > addr->type->length) - lose("ROAIPAddress BIT STRING too long for AFI"); - - if (a->IPAddress->length > 0) { - memcpy(addr->address, a->IPAddress->data, a->IPAddress->length); - - if ((a->IPAddress->flags & 7) != 0) { - unsigned char mask = 0xFF >> (8 - (a->IPAddress->flags & 7)); - addr->address[a->IPAddress->length - 1] &= ~mask; - } - } - - if (a->maxLength == NULL) - item = Py_BuildValue("(NIO)", addr, prefixlen, Py_None); - else - item = Py_BuildValue("(NIl)", addr, prefixlen, ASN1_INTEGER_get(a->maxLength)); - - if (item == NULL) - goto error; - - PyTuple_SET_ITEM(*resultp, j, item); - item = NULL; - addr = NULL; - } - } - - result = Py_BuildValue("(OO)", - (ipv4_result == NULL ? Py_None : ipv4_result), - (ipv6_result == NULL ? Py_None : ipv6_result)); - - error: /* Fall through */ - Py_XDECREF(addr); - Py_XDECREF(item); - Py_XDECREF(ipv4_result); - Py_XDECREF(ipv6_result); - - return result; -} - -static char roa_object_set_prefixes__doc__[] = - "Set this ROA's prefix list.\n" - "\n" - "This method takes two arguments, \"ipv4\" and \"ipv6\". Each of these\n" - "is either None, if no prefixes should be set for this IP version, or\n" - "an iterable object returning ROA prefix entries in the same format as\n" - "returned by the .getPrefixes() method. The maxPrefixLen value may be\n" - "omitted (that is, the ROA prefix entry tuple may be of length two\n" - "rather than of length three); this will be taken as equivalent to\n" - "specifying a maxPrefixLen value of None.\n" - ; - -static PyObject * -roa_object_set_prefixes(roa_object *self, PyObject *args, PyObject *kwds) -{ - static char *kwlist[] = {"ipv4", "ipv6", NULL}; - STACK_OF(ROAIPAddressFamily) *prefixes = NULL; - ROAIPAddressFamily *fam = NULL; - ROAIPAddress *a = NULL; - PyObject *ipv4_arg = Py_None; - PyObject *ipv6_arg = Py_None; - PyObject *iterator = NULL; - PyObject *item = NULL; - PyObject *fast = NULL; - int ok = 0; - int v; - - ENTERING(roa_object_set_prefixes); - - if (self->roa == NULL) - lose_not_verified("Can't set prefixes of unverified ROA"); - - if (!PyArg_ParseTupleAndKeywords(args, kwds, "|OO", kwlist, &ipv4_arg, &ipv6_arg)) - goto error; - - if ((prefixes = sk_ROAIPAddressFamily_new_null()) == NULL) - lose_no_memory(); - - for (v = 0; v < (int) (sizeof(ipaddress_versions)/sizeof(*ipaddress_versions)); v++) { - const struct ipaddress_version *ip_type = ipaddress_versions[v]; - unsigned char afibuf[2]; - PyObject **argp; - - switch (ip_type->version) { - case 4: argp = &ipv4_arg; break; - case 6: argp = &ipv6_arg; break; - default: continue; - } - - if (*argp == Py_None) - continue; - - afibuf[0] = (ip_type->afi >> 8) & 0xFF; - afibuf[1] = (ip_type->afi ) & 0xFF; - - if ((iterator = PyObject_GetIter(*argp)) == NULL) - goto error; - - while ((item = PyIter_Next(iterator)) != NULL) { - unsigned prefixlen, maxprefixlen, bitlen, bytelen; - ipaddress_object *addr = NULL; - PyObject *maxlenobj = Py_None; - - if ((fast = PySequence_Fast(item, "ROA prefix must be a sequence")) == NULL) - goto error; - - switch (PySequence_Fast_GET_SIZE(fast)) { - case 3: - maxlenobj = PySequence_Fast_GET_ITEM(fast, 2); - /* Fall through */ - case 2: - if (!POW_IPAddress_Check(PySequence_Fast_GET_ITEM(fast, 0))) - lose_type_error("First element of ROA prefix must be an IPAddress object"); - addr = (ipaddress_object *) PySequence_Fast_GET_ITEM(fast, 0); - prefixlen = (unsigned) PyInt_AsLong(PySequence_Fast_GET_ITEM(fast, 1)); - if (PyErr_Occurred()) - goto error; - break; - default: - lose_type_error("ROA prefix must be a two- or three-element sequence"); - } - - if (maxlenobj == Py_None) { - maxprefixlen = prefixlen; - } else { - maxprefixlen = (unsigned) PyInt_AsLong(maxlenobj); - if (PyErr_Occurred()) - goto error; - } - - if (addr->type != ip_type) - lose_type_error("Bad ROA prefix"); - - if (prefixlen > addr->type->length * 8) - lose("Bad prefix length"); - - if (maxprefixlen > addr->type->length * 8 || maxprefixlen < prefixlen) - lose("Bad maxLength value"); - - bytelen = (prefixlen + 7) / 8; - bitlen = prefixlen % 8; - - if ((a = ROAIPAddress_new()) == NULL || - (a->IPAddress == NULL && (a->IPAddress = ASN1_BIT_STRING_new()) == NULL) || - !ASN1_BIT_STRING_set(a->IPAddress, addr->address, bytelen)) - lose_no_memory(); - - a->IPAddress->flags &= ~7; - a->IPAddress->flags |= ASN1_STRING_FLAG_BITS_LEFT; - if (bitlen > 0) { - a->IPAddress->data[bytelen - 1] &= ~(0xFF >> bitlen); - a->IPAddress->flags |= 8 - bitlen; - } - - if (prefixlen != maxprefixlen && - ((a->maxLength = ASN1_INTEGER_new()) == NULL || - !ASN1_INTEGER_set(a->maxLength, maxprefixlen))) - lose_no_memory(); - - if (fam == NULL && - ((fam = ROAIPAddressFamily_new()) == NULL || - !sk_ROAIPAddressFamily_push(prefixes, fam) || - !ASN1_OCTET_STRING_set(fam->addressFamily, afibuf, sizeof(afibuf)))) - lose_no_memory(); - - if (!sk_ROAIPAddress_push(fam->addresses, a)) - lose_no_memory(); - - a = NULL; - Py_XDECREF(item); - Py_XDECREF(fast); - item = fast = NULL; - } - - fam = NULL; - Py_XDECREF(iterator); - iterator = NULL; - } - - sk_ROAIPAddressFamily_pop_free(self->roa->ipAddrBlocks, ROAIPAddressFamily_free); - self->roa->ipAddrBlocks = prefixes; - prefixes = NULL; - - ok = 1; - - error: - sk_ROAIPAddressFamily_pop_free(prefixes, ROAIPAddressFamily_free); - ROAIPAddressFamily_free(fam); - ROAIPAddress_free(a); - Py_XDECREF(iterator); - Py_XDECREF(item); - Py_XDECREF(fast); - - if (ok) - Py_RETURN_NONE; - else - return NULL; -} - -static char roa_object_sign__doc__[] = - "Sign this ROA. See CMS.sign() for details.\n" - ; - -static PyObject * -roa_object_sign(roa_object *self, PyObject *args) -{ - asymmetric_object *signkey = NULL; - x509_object *signcert = NULL; - PyObject *x509_iterable = Py_None; - PyObject *crl_iterable = Py_None; - char *oid = NULL; - unsigned flags = 0; - BIO *bio = NULL; - int ok = 0; - - ENTERING(roa_object_sign); - - if (!PyArg_ParseTuple(args, "O!O!|OOsI", - &POW_X509_Type, &signcert, - &POW_Asymmetric_Type, &signkey, - &x509_iterable, - &crl_iterable, - &oid, - &flags)) - goto error; - - if ((bio = BIO_new(BIO_s_mem())) == NULL) - lose_no_memory(); - - assert_no_unhandled_openssl_errors(); - - if (!ASN1_item_i2d_bio(ASN1_ITEM_rptr(ROA), bio, self->roa)) - lose_openssl_error("Couldn't encode ROA"); - - assert_no_unhandled_openssl_errors(); - - if (!cms_object_sign_helper(&self->cms, bio, signcert, signkey, - x509_iterable, crl_iterable, oid, flags)) - lose_openssl_error("Couldn't sign ROA"); - - assert_no_unhandled_openssl_errors(); - - ok = 1; - - error: - BIO_free(bio); - - if (ok) - Py_RETURN_NONE; - else - return NULL; -} - -static struct PyMethodDef roa_object_methods[] = { - Define_Method(getVersion, roa_object_get_version, METH_NOARGS), - Define_Method(setVersion, roa_object_set_version, METH_VARARGS), - Define_Method(getASID, roa_object_get_asid, METH_NOARGS), - Define_Method(setASID, roa_object_set_asid, METH_VARARGS), - Define_Method(getPrefixes, roa_object_get_prefixes, METH_NOARGS), - Define_Method(setPrefixes, roa_object_set_prefixes, METH_KEYWORDS), - Define_Method(sign, roa_object_sign, METH_VARARGS), - Define_Method(verify, roa_object_verify, METH_KEYWORDS), - Define_Class_Method(pemRead, roa_object_pem_read, METH_VARARGS), - Define_Class_Method(pemReadFile, roa_object_pem_read_file, METH_VARARGS), - Define_Class_Method(derRead, roa_object_der_read, METH_VARARGS), - Define_Class_Method(derReadFile, roa_object_der_read_file, METH_VARARGS), - {NULL} -}; - -static char POW_ROA_Type__doc__[] = - "This class provides access to RPKI ROA payload.\n" - "Most methods are inherited from or share code with the CMS class.\n" - ; - -static PyTypeObject POW_ROA_Type = { - PyObject_HEAD_INIT(0) - 0, /* ob_size */ - "rpki.POW.ROA", /* tp_name */ - sizeof(roa_object), /* tp_basicsize */ - 0, /* tp_itemsize */ - (destructor)roa_object_dealloc, /* tp_dealloc */ - 0, /* tp_print */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_compare */ - 0, /* tp_repr */ - 0, /* tp_as_number */ - 0, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - 0, /* tp_hash */ - 0, /* tp_call */ - 0, /* tp_str */ - 0, /* tp_getattro */ - 0, /* tp_setattro */ - 0, /* tp_as_buffer */ - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /* tp_flags */ - POW_ROA_Type__doc__, /* tp_doc */ - 0, /* tp_traverse */ - 0, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - 0, /* tp_iter */ - 0, /* tp_iternext */ - roa_object_methods, /* tp_methods */ - 0, /* tp_members */ - 0, /* tp_getset */ - &POW_CMS_Type, /* tp_base */ - 0, /* tp_dict */ - 0, /* tp_descr_get */ - 0, /* tp_descr_set */ - 0, /* tp_dictoffset */ - 0, /* tp_init */ - 0, /* tp_alloc */ - roa_object_new, /* tp_new */ -}; - - - -/* - * PKCS10 object. - */ - -static PyObject * -pkcs10_object_new(PyTypeObject *type, GCC_UNUSED PyObject *args, GCC_UNUSED PyObject *kwds) -{ - pkcs10_object *self; - - ENTERING(pkcs10_object_new); - - if ((self = (pkcs10_object *) type->tp_alloc(type, 0)) != NULL && - (self->pkcs10 = X509_REQ_new()) != NULL && - (self->exts = sk_X509_EXTENSION_new_null()) != NULL) - return (PyObject *) self; - - Py_XDECREF(self); - return NULL; -} - -static void -pkcs10_object_dealloc(pkcs10_object *self) -{ - ENTERING(pkcs10_object_dealloc); - X509_REQ_free(self->pkcs10); - sk_X509_EXTENSION_pop_free(self->exts, X509_EXTENSION_free); - self->ob_type->tp_free((PyObject*) self); -} - -static PyObject * -pkcs10_object_pem_read_helper(PyTypeObject *type, BIO *bio) -{ - pkcs10_object *self = NULL; - - ENTERING(pkcs10_object_pem_read_helper); - - assert_no_unhandled_openssl_errors(); - - if ((self = (pkcs10_object *) pkcs10_object_new(type, NULL, NULL)) == NULL) - goto error; - - assert_no_unhandled_openssl_errors(); - - if (!PEM_read_bio_X509_REQ(bio, &self->pkcs10, NULL, NULL)) - lose_openssl_error("Couldn't load PEM encoded PKCS#10 request"); - - sk_X509_EXTENSION_pop_free(self->exts, X509_EXTENSION_free); - self->exts = X509_REQ_get_extensions(self->pkcs10); - - assert_no_unhandled_openssl_errors(); - - return (PyObject *) self; - - error: - - Py_XDECREF(self); - return NULL; -} - -static PyObject * -pkcs10_object_der_read_helper(PyTypeObject *type, BIO *bio) -{ - pkcs10_object *self = NULL; - - ENTERING(pkcs10_object_der_read_helper); - - assert_no_unhandled_openssl_errors(); - - if ((self = (pkcs10_object *) pkcs10_object_new(type, NULL, NULL)) == NULL) - goto error; - - assert_no_unhandled_openssl_errors(); - - if (!d2i_X509_REQ_bio(bio, &self->pkcs10)) - lose_openssl_error("Couldn't load DER encoded PKCS#10 request"); - - sk_X509_EXTENSION_pop_free(self->exts, X509_EXTENSION_free); - self->exts = X509_REQ_get_extensions(self->pkcs10); - - assert_no_unhandled_openssl_errors(); - - return (PyObject *) self; - - error: - Py_XDECREF(self); - return NULL; -} - -static char pkcs10_object_pem_read__doc__[] = - "Read a PEM-encoded PKCS#10 object from a string.\n" - ; - -static PyObject * -pkcs10_object_pem_read(PyTypeObject *type, PyObject *args) -{ - ENTERING(pkcs10_object_pem_read); - return read_from_string_helper(pkcs10_object_pem_read_helper, type, args); -} - -static char pkcs10_object_pem_read_file__doc__[] = - "Read a PEM-encoded PKCS#10 object from a file.\n" - ; - -static PyObject * -pkcs10_object_pem_read_file(PyTypeObject *type, PyObject *args) -{ - ENTERING(pkcs10_object_pem_read_file); - return read_from_file_helper(pkcs10_object_pem_read_helper, type, args); -} - -static char pkcs10_object_der_read__doc__[] = - "Read a DER-encoded PKCS#10 object from a string.\n" - ; - -static PyObject * -pkcs10_object_der_read(PyTypeObject *type, PyObject *args) -{ - ENTERING(pkcs10_object_der_read); - return read_from_string_helper(pkcs10_object_der_read_helper, type, args); -} - -static char pkcs10_object_der_read_file__doc__[] = - "Read a DER-encoded PKCS#10 object from a file.\n" - ; - -static PyObject * -pkcs10_object_der_read_file(PyTypeObject *type, PyObject *args) -{ - ENTERING(pkcs10_object_der_read_file); - return read_from_file_helper(pkcs10_object_der_read_helper, type, args); -} - -static char pkcs10_object_pem_write__doc__[] = - "Returns the PEM encoding of this PKCS#10 object.\n" - ; - -static PyObject * -pkcs10_object_pem_write(pkcs10_object *self) -{ - PyObject *result = NULL; - BIO *bio = NULL; - - ENTERING(pkcs10_object_pem_write); - - if ((bio = BIO_new(BIO_s_mem())) == NULL) - lose_no_memory(); - - if (!PEM_write_bio_X509_REQ(bio, self->pkcs10)) - lose_openssl_error("Unable to write PKCS#10 request"); - - result = BIO_to_PyString_helper(bio); - - error: /* Fall through */ - BIO_free(bio); - return result; -} - -static char pkcs10_object_der_write__doc__[] = - "Return the DER encoding of this PKCS#10 object.\n" - ; - -static PyObject * -pkcs10_object_der_write(pkcs10_object *self) -{ - PyObject *result = NULL; - BIO *bio = NULL; - - ENTERING(pkcs10_object_der_write); - - if ((bio = BIO_new(BIO_s_mem())) == NULL) - lose_no_memory(); - - if (!i2d_X509_REQ_bio(bio, self->pkcs10)) - lose_openssl_error("Unable to write PKCS#10 request"); - - result = BIO_to_PyString_helper(bio); - - error: /* Fall through */ - BIO_free(bio); - return result; -} - -static X509_EXTENSIONS ** -pkcs10_object_extension_helper(pkcs10_object *self) -{ - return &self->exts; -} - -static char pkcs10_object_get_public_key__doc__[] = - "Return the public key from this PKCS#10 request, as an Asymmetric\n" - "object.\n" - ; - -static PyObject * -pkcs10_object_get_public_key(pkcs10_object *self) -{ - PyTypeObject *type = &POW_Asymmetric_Type; - asymmetric_object *asym = NULL; - - ENTERING(pkcs10_object_get_public_key); - - if ((asym = (asymmetric_object *) type->tp_alloc(type, 0)) == NULL) - goto error; - - if ((asym->pkey = X509_REQ_get_pubkey(self->pkcs10)) == NULL) - lose_openssl_error("Couldn't extract public key from PKCS#10 request"); - - return (PyObject *) asym; - - error: - Py_XDECREF(asym); - return NULL; -} - -static char pkcs10_object_set_public_key__doc__[] = - "Set the public key for this PKCS#10 request.\n" - "\n" - "The \"key\" parameter should be an instance of the Asymmetric class,\n" - "containing a public key.\n" - ; - -static PyObject * -pkcs10_object_set_public_key(pkcs10_object *self, PyObject *args) -{ - asymmetric_object *asym; - - ENTERING(pkcs10_object_set_public_key); - - if (!PyArg_ParseTuple(args, "O!", &POW_Asymmetric_Type, &asym)) - goto error; - - if (!X509_REQ_set_pubkey(self->pkcs10, asym->pkey)) - lose_openssl_error("Couldn't set certificate's PKCS#10 request"); - - Py_RETURN_NONE; - - error: - return NULL; -} - -static char pkcs10_object_sign__doc__[] = - "Sign a PKCS#10 request with a private key.\n" - "\n" - "The \"key\" parameter should be an instance of the Asymmetric class,\n" - "containing a private key.\n" - "\n" - "The optional \"digest\" parameter indicates which digest to compute and\n" - "sign, and should be one of the following:\n" - "\n" - "* MD5_DIGEST\n" - "* SHA_DIGEST\n" - "* SHA1_DIGEST\n" - "* SHA256_DIGEST\n" - "* SHA384_DIGEST\n" - "* SHA512_DIGEST\n" - "\n" - "The default digest algorithm is SHA-256.\n" - ; - -static PyObject * -pkcs10_object_sign(pkcs10_object *self, PyObject *args) -{ - asymmetric_object *asym; - int loc, digest_type = SHA256_DIGEST; - const EVP_MD *digest_method = NULL; - - ENTERING(pkcs10_object_sign); - - if (!PyArg_ParseTuple(args, "O!|i", &POW_Asymmetric_Type, &asym, &digest_type)) - goto error; - - if ((digest_method = evp_digest_factory(digest_type)) == NULL) - lose("Unsupported digest algorithm"); - - while ((loc = X509_REQ_get_attr_by_NID(self->pkcs10, NID_ext_req, -1)) >= 0) - X509_ATTRIBUTE_free(X509_REQ_delete_attr(self->pkcs10, loc)); - - if (sk_X509_EXTENSION_num(self->exts) > 0 && - !X509_REQ_add_extensions(self->pkcs10, self->exts)) - lose_openssl_error("Couldn't add extensions block to PKCS#10 request"); - - if (!X509_REQ_sign(self->pkcs10, asym->pkey, digest_method)) - lose_openssl_error("Couldn't sign PKCS#10 request"); - - Py_RETURN_NONE; - - error: - return NULL; -} - -static char pkcs10_object_verify__doc__[] = - "Verify a PKCS#10 request.\n" - "\n" - "This calls OpenSSL's X509_REQ_verify() method to check the request's\n" - "self-signature.\n" - ; - -static PyObject * -pkcs10_object_verify(pkcs10_object *self) -{ - EVP_PKEY *pkey = NULL; - int status; - - ENTERING(pkcs10_object_verify); - - if ((pkey = X509_REQ_get_pubkey(self->pkcs10)) == NULL) - lose_openssl_error("Couldn't extract public key from PKCS#10 for verification"); - - if ((status = X509_REQ_verify(self->pkcs10, pkey)) < 0) - lose_openssl_error("Couldn't verify PKCS#10 signature"); - - EVP_PKEY_free(pkey); - return PyBool_FromLong(status); - - error: - EVP_PKEY_free(pkey); - return NULL; -} - -static char pkcs10_object_get_version__doc__[] = - "Return the version number of this PKCS#10 request.\n" - ; - -static PyObject * -pkcs10_object_get_version(pkcs10_object *self) -{ - ENTERING(pkcs10_object_get_version); - return Py_BuildValue("l", X509_REQ_get_version(self->pkcs10)); -} - -static char pkcs10_object_set_version__doc__[] = - "Set the version number of this PKCS#10 request.\n" - "\n" - "The \"version\" parameter should be an integer, but the only defined\n" - "value is zero, so this field is optional and defaults to zero.\n" -; - -static PyObject * -pkcs10_object_set_version(pkcs10_object *self, PyObject *args) -{ - long version = 0; - - ENTERING(pkcs10_object_set_version); - - if (!PyArg_ParseTuple(args, "|l", &version)) - goto error; - - if (version != 0) - lose("RFC 6487 6.1.1 forbids non-zero values for this field"); - - if (!X509_REQ_set_version(self->pkcs10, version)) - lose("Couldn't set certificate version"); - - Py_RETURN_NONE; - - error: - - return NULL; -} - -static char pkcs10_object_get_subject__doc__[] = - "Return this PKCS #10 request's subject name.\n" - "\n" - "See the X509.getIssuer() method for details of the return value and\n" - "use of the optional \"format\" parameter.\n" - ; - -static PyObject * -pkcs10_object_get_subject(pkcs10_object *self, PyObject *args) -{ - PyObject *result = NULL; - int format = OIDNAME_FORMAT; - - ENTERING(pkcs10_object_get_subject); - - if (!PyArg_ParseTuple(args, "|i", &format)) - goto error; - - result = x509_object_helper_get_name(X509_REQ_get_subject_name(self->pkcs10), - format); - - error: /* Fall through */ - return result; -} - -static char pkcs10_object_set_subject__doc__[] = - "Set this PKCS#10 request's subject name.\n" - "\n" - "The \"name\" parameter should be in the same format as the return\n" - "value from the \"getSubject\" method.\n" - ; - -static PyObject * -pkcs10_object_set_subject(pkcs10_object *self, PyObject *args) -{ - PyObject *name_sequence = NULL; - X509_NAME *name = NULL; - - ENTERING(pkcs10_object_set_subject); - - if (!PyArg_ParseTuple(args, "O", &name_sequence)) - goto error; - - if (!PySequence_Check(name_sequence)) - lose_type_error("Inapropriate type"); - - if ((name = x509_object_helper_set_name(name_sequence)) == NULL) - goto error; - - if (!X509_REQ_set_subject_name(self->pkcs10, name)) - lose("Unable to set subject name"); - - X509_NAME_free(name); - - Py_RETURN_NONE; - - error: - X509_NAME_free(name); - return NULL; -} - -static char pkcs10_object_get_key_usage__doc__[] = - "Return a FrozenSet of strings representing the KeyUsage settings for\n" - "this PKCS#10 request, or None if the request has no KeyUsage\n" - "extension. The bits have the same names as in RFC 5280.\n" - ; - -static PyObject * -pkcs10_object_get_key_usage(pkcs10_object *self) -{ - return extension_get_key_usage(pkcs10_object_extension_helper(self)); -} - -static char pkcs10_object_set_key_usage__doc__[] = - "Set the KeyUsage extension for this PKCS#10 request.\n" - "\n" - "Argument \"iterable\" should be an iterable object which returns zero or more\n" - "strings naming bits to be enabled. The bits have the same names as in RFC 5280.\n" - "\n" - "Optional argument \"critical\" is a boolean indicating whether the extension\n" - "should be marked as critical or not. RFC 5280 4.2.1.3 says this extension SHOULD\n" - "be marked as critical when used, so the default is True.\n" - ; - -static PyObject * -pkcs10_object_set_key_usage(pkcs10_object *self, PyObject *args) -{ - return extension_set_key_usage(pkcs10_object_extension_helper(self), args); -} - -static char pkcs10_object_get_eku__doc__[] = - "Return a FrozenSet of object identifiers representing the\n" - "ExtendedKeyUsage settings for this PKCS #10 requst, or None if\n" - "the request has no ExtendedKeyUsage extension.\n" - ; - -static PyObject * -pkcs10_object_get_eku(pkcs10_object *self) -{ - return extension_get_eku(pkcs10_object_extension_helper(self)); -} - -static char pkcs10_object_set_eku__doc__[] = - "Set the ExtendedKeyUsage extension for this PKCS #10 request.\n" - "\n" - "Argument \"iterable\" should be an iterable object which returns one or more\n" - "object identifiers.\n" - "\n" - "Optional argument \"critical\" is a boolean indicating whether the extension\n" - "should be marked as critical or not. RFC 6487 4.8.5 says this extension\n" - "MUST NOT be marked as non-critical when used, so the default is False.\n" - ; - -static PyObject * -pkcs10_object_set_eku(pkcs10_object *self, PyObject *args) -{ - return extension_set_eku(pkcs10_object_extension_helper(self), args); -} - -static char pkcs10_object_get_basic_constraints__doc__[] = - "Return BasicConstraints value for this PKCS#10 request.\n" - "\n" - "If this request has no BasicConstraints extension, this method returns\n" - "None.\n" - "\n" - "Otherwise, this method returns a two-element tuple. The first element\n" - "of the tuple is a boolean representing the extension's cA value; the\n" - "second element of the tuple is either an integer representing\n" - "thepathLenConstraint value or None if there is no pathLenConstraint.\n" - ; - -static PyObject * -pkcs10_object_get_basic_constraints(pkcs10_object *self) -{ - return extension_get_basic_constraints(pkcs10_object_extension_helper(self)); -} - -static char pkcs10_object_set_basic_constraints__doc__[] = - "Set BasicConstraints value for this PKCS#10 request.\n" - "\n" - "First argument \"ca\" is a boolean indicating whether the request\n" - "is for a CA certificate or not.\n" - "\n" - "Optional second argument \"pathLenConstraint\" is None or a\n" - "non-negative integer specifying the pathLenConstraint value for this\n" - "certificate. Per RFC 5280, this value may only be set to an integer\n" - "value for CA certificates." - "\n" - "Optional third argument \"critical\" specifies whether the extension\n" - "should be marked as critical. RFC 5280 4.2.1.9 requires that CA\n" - "certificates mark this extension as critical, so the default is True.\n" - ; - -static PyObject * -pkcs10_object_set_basic_constraints(pkcs10_object *self, PyObject *args) -{ - return extension_set_basic_constraints(pkcs10_object_extension_helper(self), args); -} - -static char pkcs10_object_get_sia__doc__[] = - "Return the SIA values for this PKCS#10 request.\n" - "\n" - "If this request has no SIA extension, this method returns None.\n" - "\n" - "Otherwise, this returns a tuple containing three sequences:\n" - "caRepository URIs, rpkiManifest URIs, and signedObject URIs.\n" - "Any other accessMethods are ignored, as are any non-URI\n" - "accessLocations.\n" - ; - -static PyObject * -pkcs10_object_get_sia(pkcs10_object *self) -{ - return extension_get_sia(pkcs10_object_extension_helper(self)); -} - -static char pkcs10_object_set_sia__doc__[] = - "Set SIA values for this PKCS#10 request.\n" - "\n" - "Takes three arguments: caRepository, rpkiManifest, and signedObject.\n" - "\n" - "Each of these should be an iterable which returns URIs.\n" - "\n" - "None is acceptable as an alternate way of specifying an empty\n" - "collection of URIs for a particular argument.\n" - ; - -static PyObject * -pkcs10_object_set_sia(pkcs10_object *self, PyObject *args, PyObject *kwds) -{ - return extension_set_sia(pkcs10_object_extension_helper(self), args, kwds); -} - -static char pkcs10_object_get_signature_algorithm__doc__[] = - "Return this PKCS #10 reqeuest's signature algorithm OID.\n" - ; - -static PyObject * -pkcs10_object_get_signature_algorithm(pkcs10_object *self) -{ - ASN1_OBJECT *oid = NULL; - - ENTERING(pkcs10_object_get_signature_algorithm); - - X509_ALGOR_get0(&oid, NULL, NULL, self->pkcs10->sig_alg); - - return ASN1_OBJECT_to_PyString(oid); -} - -static char pkcs10_object_get_extension_oids__doc__[] = - "Return the set of extension OIDs used in this request. This is mostly\n" - "useful for enforcing restrictions on what extensions are allowed to be\n" - "present, eg, to conform with the RPKI profile.\n" - ; - -static PyObject * -pkcs10_object_get_extension_oids(pkcs10_object *self) -{ - PyObject *result = NULL; - PyObject *oid = NULL; - int i; - - ENTERING(pkcs10_object_get_extension_oids); - - if ((result = PyFrozenSet_New(NULL)) == NULL) - goto error; - - for (i = 0; i < sk_X509_EXTENSION_num(self->exts); i++) { - X509_EXTENSION *ext = sk_X509_EXTENSION_value(self->exts, i); - if ((oid = ASN1_OBJECT_to_PyString(ext->object)) == NULL || - PySet_Add(result, oid) < 0) - goto error; - Py_XDECREF(oid); - oid = NULL; - } - - return result; - - error: - Py_XDECREF(result); - Py_XDECREF(oid); - return NULL; -} - -/* - * May want EKU handlers eventually, skip for now. - */ - -static char pkcs10_object_pprint__doc__[] = - "Return a pretty-printed rendition of this PKCS#10 request.\n" - ; - -static PyObject * -pkcs10_object_pprint(pkcs10_object *self) -{ - PyObject *result = NULL; - BIO *bio = NULL; - - ENTERING(pkcs10_object_pprint); - - if ((bio = BIO_new(BIO_s_mem())) == NULL) - lose_no_memory(); - - if (!X509_REQ_print(bio, self->pkcs10)) - lose_openssl_error("Unable to pretty-print PKCS#10 request"); - - result = BIO_to_PyString_helper(bio); - - error: /* Fall through */ - BIO_free(bio); - return result; -} - -static struct PyMethodDef pkcs10_object_methods[] = { - Define_Method(pemWrite, pkcs10_object_pem_write, METH_NOARGS), - Define_Method(derWrite, pkcs10_object_der_write, METH_NOARGS), - Define_Method(sign, pkcs10_object_sign, METH_VARARGS), - Define_Method(verify, pkcs10_object_verify, METH_NOARGS), - Define_Method(getPublicKey, pkcs10_object_get_public_key, METH_NOARGS), - Define_Method(setPublicKey, pkcs10_object_set_public_key, METH_VARARGS), - Define_Method(getVersion, pkcs10_object_get_version, METH_NOARGS), - Define_Method(setVersion, pkcs10_object_set_version, METH_VARARGS), - Define_Method(getSubject, pkcs10_object_get_subject, METH_VARARGS), - Define_Method(setSubject, pkcs10_object_set_subject, METH_VARARGS), - Define_Method(pprint, pkcs10_object_pprint, METH_NOARGS), - Define_Method(getKeyUsage, pkcs10_object_get_key_usage, METH_NOARGS), - Define_Method(setKeyUsage, pkcs10_object_set_key_usage, METH_VARARGS), - Define_Method(getEKU, pkcs10_object_get_eku, METH_NOARGS), - Define_Method(setEKU, pkcs10_object_set_eku, METH_VARARGS), - Define_Method(getBasicConstraints, pkcs10_object_get_basic_constraints, METH_NOARGS), - Define_Method(setBasicConstraints, pkcs10_object_set_basic_constraints, METH_VARARGS), - Define_Method(getSIA, pkcs10_object_get_sia, METH_NOARGS), - Define_Method(setSIA, pkcs10_object_set_sia, METH_KEYWORDS), - Define_Method(getSignatureAlgorithm, pkcs10_object_get_signature_algorithm, METH_NOARGS), - Define_Method(getExtensionOIDs, pkcs10_object_get_extension_oids, METH_NOARGS), - Define_Class_Method(pemRead, pkcs10_object_pem_read, METH_VARARGS), - Define_Class_Method(pemReadFile, pkcs10_object_pem_read_file, METH_VARARGS), - Define_Class_Method(derRead, pkcs10_object_der_read, METH_VARARGS), - Define_Class_Method(derReadFile, pkcs10_object_der_read_file, METH_VARARGS), - {NULL} -}; - -static char POW_PKCS10_Type__doc__[] = - "This class represents a PKCS#10 request.\n" - "\n" - LAME_DISCLAIMER_IN_ALL_CLASS_DOCUMENTATION - ; - -static PyTypeObject POW_PKCS10_Type = { - PyObject_HEAD_INIT(0) - 0, /* ob_size */ - "rpki.POW.PKCS10", /* tp_name */ - sizeof(pkcs10_object), /* tp_basicsize */ - 0, /* tp_itemsize */ - (destructor)pkcs10_object_dealloc, /* tp_dealloc */ - 0, /* tp_print */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_compare */ - 0, /* tp_repr */ - 0, /* tp_as_number */ - 0, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - 0, /* tp_hash */ - 0, /* tp_call */ - 0, /* tp_str */ - 0, /* tp_getattro */ - 0, /* tp_setattro */ - 0, /* tp_as_buffer */ - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /* tp_flags */ - POW_PKCS10_Type__doc__, /* tp_doc */ - 0, /* tp_traverse */ - 0, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - 0, /* tp_iter */ - 0, /* tp_iternext */ - pkcs10_object_methods, /* tp_methods */ - 0, /* tp_members */ - 0, /* tp_getset */ - 0, /* tp_base */ - 0, /* tp_dict */ - 0, /* tp_descr_get */ - 0, /* tp_descr_set */ - 0, /* tp_dictoffset */ - 0, /* tp_init */ - 0, /* tp_alloc */ - pkcs10_object_new, /* tp_new */ -}; - - - -/* - * Module functions. - */ - -static char pow_module_add_object__doc__[] = - "Add new a new object identifier to OpenSSL's internal database.\n" - "\n" - "The \"oid\" should be an ASN.1 object identifer, represented as a string\n" - "in dotted-decimal format.\n" - "\n" - "The \"shortName\" parameter should be the OpenSSL \"short name\" to use.\n" - "\n" - "The \"longName\" parameter should be the OpenSSL \"long name\" to use.\n" - ; - -static PyObject * -pow_module_add_object(GCC_UNUSED PyObject *self, PyObject *args) -{ - char *oid = NULL, *sn = NULL, *ln = NULL; - - ENTERING(pow_module_add_object); - - if (!PyArg_ParseTuple(args, "sss", &oid, &sn, &ln)) - goto error; - - if (!OBJ_create(oid, sn, ln)) - lose_openssl_error("Unable to add object"); - - Py_RETURN_NONE; - - error: - - return NULL; -} - -static char pow_module_get_error__doc__[] = - "Pop one error off OpenSSL's global error stack and returns it as a string.\n" - "\n" - "Returns None if the error stack is empty.\n" - ; - -static PyObject * -pow_module_get_error(GCC_UNUSED PyObject *self) -{ - unsigned long error = ERR_get_error(); - char buf[256]; - - ENTERING(pow_module_get_error); - - if (!error) - Py_RETURN_NONE; - - ERR_error_string_n(error, buf, sizeof(buf)); - return Py_BuildValue("s", buf); -} - -static char pow_module_clear_error__doc__[] = - "Remove all errors from OpenSSL's global error stack.\n" - ; - -static PyObject * -pow_module_clear_error(GCC_UNUSED PyObject *self) -{ - ENTERING(pow_module_clear_error); - ERR_clear_error(); - Py_RETURN_NONE; -} - -static char pow_module_seed__doc__[] = - "Add data to OpenSSL's pseudo-random number generator state.\n" - "\n" - "The \"data\" parameter is the seed to add. Entropy of the data is\n" - "assumed to be equal to the length of the data.\n" - ; - -static PyObject * -pow_module_seed(GCC_UNUSED PyObject *self, PyObject *args) -{ - char *data = NULL; - Py_ssize_t datalen = 0; - - ENTERING(pow_module_seed); - - if (!PyArg_ParseTuple(args, "s#", &data, &datalen)) - goto error; - - RAND_seed(data, datalen); - - Py_RETURN_NONE; - - error: - - return NULL; -} - -static char pow_module_add__doc__[] = - "Add data to OpenSSL's pseudo-random number generator state.\n" - "\n" - "The \"data\" parameter is the data to add.\n" - "\n" - "The \"entropy\" parameter should be an estimate of the number of\n" - "random bytes in the data parameter.\n" - ; - -static PyObject * -pow_module_add(GCC_UNUSED PyObject *self, PyObject *args) -{ - char *data = NULL; - Py_ssize_t datalen = 0; - double entropy = 0; - - ENTERING(pow_module_add); - - if (!PyArg_ParseTuple(args, "s#d", &data, &datalen, &entropy)) - goto error; - - RAND_add(data, datalen, entropy); - - Py_RETURN_NONE; - - error: - return NULL; -} - -static char pow_module_write_random_file__doc__[] = - "Write the current state of OpenSSL's pseduo-random number generator to\n" - "a file.\n" - "\n" - "The \"filename\" parameter is the name of the file to write.\n" - ; - -static PyObject * -pow_module_write_random_file(GCC_UNUSED PyObject *self, PyObject *args) -{ - char *filename = NULL; - - ENTERING(pow_module_write_random_file); - - if (!PyArg_ParseTuple(args, "s", &filename)) - goto error; - - if (RAND_write_file(filename) == -1) - lose("Couldn't write random file"); - - Py_RETURN_NONE; - - error: - return NULL; -} - -static char pow_module_read_random_file__doc__[] = - "Restore the state of OpenSSLs pseudo-random number generator from\n" - "data previously saved to a file.\n" - "\n" - "The \"filename\" parameter is the name of the file to read.\n" - ; - -static PyObject * -pow_module_read_random_file(GCC_UNUSED PyObject *self, PyObject *args) -{ - char *file = NULL; - int len = -1; - - ENTERING(pow_module_read_random_file); - - if (!PyArg_ParseTuple(args, "s|i", &file, &len)) - goto error; - - if (!RAND_load_file(file, len)) - lose("Couldn't load random file"); - - Py_RETURN_NONE; - - error: - return NULL; -} - -static char pow_module_custom_datetime__doc__[] = - "Set constructor callback for customized datetime class.\n" - ; - -static PyObject * -pow_module_custom_datetime(GCC_UNUSED PyObject *self, PyObject *args) -{ - PyObject *cb = NULL; - - ENTERING(pow_module_custom_datetime); - - if (!PyArg_ParseTuple(args, "O", &cb)) - goto error; - - Py_XINCREF(cb); - Py_XDECREF(custom_datetime); - custom_datetime = cb; - - Py_RETURN_NONE; - - error: - return NULL; -} - - -static struct PyMethodDef pow_module_methods[] = { - Define_Method(getError, pow_module_get_error, METH_NOARGS), - Define_Method(clearError, pow_module_clear_error, METH_NOARGS), - Define_Method(seed, pow_module_seed, METH_VARARGS), - Define_Method(add, pow_module_add, METH_VARARGS), - Define_Method(readRandomFile, pow_module_read_random_file, METH_VARARGS), - Define_Method(writeRandomFile, pow_module_write_random_file, METH_VARARGS), - Define_Method(addObject, pow_module_add_object, METH_VARARGS), - Define_Method(customDatetime, pow_module_custom_datetime, METH_VARARGS), - {NULL} -}; - - - -/* - * Module initialization. - */ - -void -init_POW(void) -{ - PyObject *m = Py_InitModule3("_POW", pow_module_methods, pow_module__doc__); - int OpenSSL_ok = 1; - - /* - * Python encourages us to use these functions instead of the ones - * in libc, and OpenSSL allows us to do this. The result seems to - * work, and, in theory, gives Python's memory allocator a better - * idea of how much memory we're really using. Not sure why it - * cares, but let's try to be nice about it. - * - * Note that this must be done BEFORE anything in OpenSSL uses - * dynamic memory, and that this will probably fail in horrible ways - * without the build-time code (-Bsymbolic, etc) which isolates our - * copy of the OpenSSL code from any system shared libraries. - * Enough other things already fail in horrible ways without that - * isolation that adding one more doesn't make much difference, but - * if you tinker with the build script and start seeing nasty - * memory-related issues, this might be the cause. - */ - CRYPTO_set_mem_functions(PyMem_Malloc, PyMem_Realloc, PyMem_Free); - - /* - * Import the DateTime API - */ - - PyDateTime_IMPORT; - -#define Define_Class(__type__) \ - do { \ - char *__name__ = strrchr(__type__.tp_name, '.'); \ - if (PyType_Ready(&__type__) == 0 && __name__ != NULL) { \ - Py_INCREF(&__type__); \ - PyModule_AddObject(m, __name__+1, (PyObject *) &__type__); \ - } \ - } while (0) - - Define_Class(POW_X509_Type); - Define_Class(POW_X509Store_Type); - Define_Class(POW_X509StoreCTX_Type); - Define_Class(POW_CRL_Type); - Define_Class(POW_Asymmetric_Type); - Define_Class(POW_AsymmetricParams_Type); - Define_Class(POW_Digest_Type); - Define_Class(POW_CMS_Type); - Define_Class(POW_IPAddress_Type); - Define_Class(POW_Manifest_Type); - Define_Class(POW_ROA_Type); - Define_Class(POW_PKCS10_Type); - -#undef Define_Class - -#define Define_Exception(__name__, __parent__) \ - PyModule_AddObject(m, #__name__, ((__name__##Object) \ - = PyErr_NewException("rpki.POW." #__name__, __parent__, NULL))) - - Define_Exception(Error, NULL); - Define_Exception(OpenSSLError, ErrorObject); - Define_Exception(POWError, ErrorObject); - Define_Exception(NotVerifiedError, ErrorObject); - -#undef Define_Exception - -#define Define_Integer_Constant(__name__) \ - PyModule_AddIntConstant(m, #__name__, __name__) - - /* Object format types */ - Define_Integer_Constant(LONGNAME_FORMAT); - Define_Integer_Constant(SHORTNAME_FORMAT); - Define_Integer_Constant(OIDNAME_FORMAT); - - /* Message digests */ - Define_Integer_Constant(MD5_DIGEST); - Define_Integer_Constant(SHA_DIGEST); - Define_Integer_Constant(SHA1_DIGEST); - Define_Integer_Constant(SHA256_DIGEST); - Define_Integer_Constant(SHA384_DIGEST); - Define_Integer_Constant(SHA512_DIGEST); - - /* CMS flags */ - Define_Integer_Constant(CMS_NOCERTS); - Define_Integer_Constant(CMS_NOATTR); - Define_Integer_Constant(CMS_NOINTERN); - Define_Integer_Constant(CMS_NOCRL); - Define_Integer_Constant(CMS_NO_SIGNER_CERT_VERIFY); - Define_Integer_Constant(CMS_NO_ATTR_VERIFY); - Define_Integer_Constant(CMS_NO_CONTENT_VERIFY); - - /* X509 validation flags */ - Define_Integer_Constant(X509_V_FLAG_CB_ISSUER_CHECK); - Define_Integer_Constant(X509_V_FLAG_USE_CHECK_TIME); - Define_Integer_Constant(X509_V_FLAG_CRL_CHECK); - Define_Integer_Constant(X509_V_FLAG_CRL_CHECK_ALL); - Define_Integer_Constant(X509_V_FLAG_IGNORE_CRITICAL); - Define_Integer_Constant(X509_V_FLAG_X509_STRICT); - Define_Integer_Constant(X509_V_FLAG_ALLOW_PROXY_CERTS); - Define_Integer_Constant(X509_V_FLAG_POLICY_CHECK); - Define_Integer_Constant(X509_V_FLAG_EXPLICIT_POLICY); - Define_Integer_Constant(X509_V_FLAG_INHIBIT_ANY); - Define_Integer_Constant(X509_V_FLAG_INHIBIT_MAP); - Define_Integer_Constant(X509_V_FLAG_NOTIFY_POLICY); - Define_Integer_Constant(X509_V_FLAG_CHECK_SS_SIGNATURE); - - /* X509 validation error codes */ - Define_Integer_Constant(X509_V_OK); - Define_Integer_Constant(X509_V_ERR_UNABLE_TO_GET_ISSUER_CERT); - Define_Integer_Constant(X509_V_ERR_UNABLE_TO_GET_CRL); - Define_Integer_Constant(X509_V_ERR_UNABLE_TO_DECRYPT_CERT_SIGNATURE); - Define_Integer_Constant(X509_V_ERR_UNABLE_TO_DECRYPT_CRL_SIGNATURE); - Define_Integer_Constant(X509_V_ERR_UNABLE_TO_DECODE_ISSUER_PUBLIC_KEY); - Define_Integer_Constant(X509_V_ERR_CERT_SIGNATURE_FAILURE); - Define_Integer_Constant(X509_V_ERR_CRL_SIGNATURE_FAILURE); - Define_Integer_Constant(X509_V_ERR_CERT_NOT_YET_VALID); - Define_Integer_Constant(X509_V_ERR_CERT_HAS_EXPIRED); - Define_Integer_Constant(X509_V_ERR_CRL_NOT_YET_VALID); - Define_Integer_Constant(X509_V_ERR_CRL_HAS_EXPIRED); - Define_Integer_Constant(X509_V_ERR_ERROR_IN_CERT_NOT_BEFORE_FIELD); - Define_Integer_Constant(X509_V_ERR_ERROR_IN_CERT_NOT_AFTER_FIELD); - Define_Integer_Constant(X509_V_ERR_ERROR_IN_CRL_LAST_UPDATE_FIELD); - Define_Integer_Constant(X509_V_ERR_ERROR_IN_CRL_NEXT_UPDATE_FIELD); - Define_Integer_Constant(X509_V_ERR_OUT_OF_MEM); - Define_Integer_Constant(X509_V_ERR_DEPTH_ZERO_SELF_SIGNED_CERT); - Define_Integer_Constant(X509_V_ERR_SELF_SIGNED_CERT_IN_CHAIN); - Define_Integer_Constant(X509_V_ERR_UNABLE_TO_GET_ISSUER_CERT_LOCALLY); - Define_Integer_Constant(X509_V_ERR_UNABLE_TO_VERIFY_LEAF_SIGNATURE); - Define_Integer_Constant(X509_V_ERR_CERT_CHAIN_TOO_LONG); - Define_Integer_Constant(X509_V_ERR_CERT_REVOKED); - Define_Integer_Constant(X509_V_ERR_INVALID_CA); - Define_Integer_Constant(X509_V_ERR_PATH_LENGTH_EXCEEDED); - Define_Integer_Constant(X509_V_ERR_INVALID_PURPOSE); - Define_Integer_Constant(X509_V_ERR_CERT_UNTRUSTED); - Define_Integer_Constant(X509_V_ERR_CERT_REJECTED); - Define_Integer_Constant(X509_V_ERR_SUBJECT_ISSUER_MISMATCH); - Define_Integer_Constant(X509_V_ERR_AKID_SKID_MISMATCH); - Define_Integer_Constant(X509_V_ERR_AKID_ISSUER_SERIAL_MISMATCH); - Define_Integer_Constant(X509_V_ERR_KEYUSAGE_NO_CERTSIGN); - Define_Integer_Constant(X509_V_ERR_UNABLE_TO_GET_CRL_ISSUER); - Define_Integer_Constant(X509_V_ERR_UNHANDLED_CRITICAL_EXTENSION); - Define_Integer_Constant(X509_V_ERR_KEYUSAGE_NO_CRL_SIGN); - Define_Integer_Constant(X509_V_ERR_UNHANDLED_CRITICAL_CRL_EXTENSION); - Define_Integer_Constant(X509_V_ERR_INVALID_NON_CA); - Define_Integer_Constant(X509_V_ERR_PROXY_PATH_LENGTH_EXCEEDED); - Define_Integer_Constant(X509_V_ERR_KEYUSAGE_NO_DIGITAL_SIGNATURE); - Define_Integer_Constant(X509_V_ERR_PROXY_CERTIFICATES_NOT_ALLOWED); - Define_Integer_Constant(X509_V_ERR_INVALID_EXTENSION); - Define_Integer_Constant(X509_V_ERR_INVALID_POLICY_EXTENSION); - Define_Integer_Constant(X509_V_ERR_NO_EXPLICIT_POLICY); - Define_Integer_Constant(X509_V_ERR_UNNESTED_RESOURCE); - Define_Integer_Constant(X509_V_ERR_APPLICATION_VERIFICATION); - - /* AsymmetricParam EC curve codes */ - Define_Integer_Constant(EC_P256_CURVE); - -#undef Define_Integer_Constant - - /* - * Initialise library. - * - * We shouldn't need any of the SSL code or error strings anymore. - * - * If we cared deeply about avoiding references to symmetric cipher - * algorithms and digest algorithms we're not using, we could - * replace the call to OpenSSL_add_all_algorithms() with calls to - * add just the specific algorithms we use rather than all of them. - * For now, don't worry about it. - */ - - OpenSSL_add_all_algorithms(); - ERR_load_crypto_strings(); - - OpenSSL_ok &= create_missing_nids(); - - x509_store_ctx_ex_data_idx = X509_STORE_CTX_get_ex_new_index(0, "x590_store_ctx_object for verify callback", - NULL, NULL, NULL); - - if (PyErr_Occurred() || !OpenSSL_ok) - Py_FatalError("Can't initialize module POW"); -} - -/* - * Local Variables: - * indent-tabs-mode: nil - * End: - */ diff --git a/rpkid/irbe_cli b/rpkid/irbe_cli deleted file mode 100755 index 9deac6d6..00000000 --- a/rpkid/irbe_cli +++ /dev/null @@ -1,388 +0,0 @@ -#!/usr/bin/env python - -# $Id$ -# -# Copyright (C) 2009--2013 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. -# -# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -__doc__ = """ -Command line IR back-end control program for rpkid and pubd. -""" - -# Command line processing of this program is too complex and -# idiosyncratic to be worth trying to reimplement using argparse. - -import sys -import getopt -import textwrap -import rpki.left_right -import rpki.http -import rpki.x509 -import rpki.config -import rpki.log -import rpki.publication -import rpki.async - -pem_out = None - -class UsageWrapper(textwrap.TextWrapper): - """ - Call interface around Python textwrap.Textwrapper class. - """ - - def __call__(self, *args): - """ - Format arguments, with TextWrapper indentation. - """ - return self.fill(textwrap.dedent(" ".join(args))) - -usage_fill = UsageWrapper(subsequent_indent = " " * 4) - -class reply_elt_mixin(object): - """ - Protocol mix-in for printout of reply PDUs. - """ - - is_cmd = False - - def client_reply_decode(self): - pass - - def client_reply_show(self): - print self.element_name - for i in self.attributes + self.elements: - if getattr(self, i) is not None: - print " %s: %s" % (i, getattr(self, i)) - -class cmd_elt_mixin(reply_elt_mixin): - """ - Protocol mix-in for command line client element PDUs. - """ - - is_cmd = True - - ## @var excludes - # XML attributes and elements that should not be allowed as command - # line arguments. - excludes = () - - @classmethod - def usage(cls): - """ - Generate usage message for this PDU. - """ - args = " ".join("--" + x + "=" for x in cls.attributes + cls.elements if x not in cls.excludes) - bools = " ".join("--" + x for x in cls.booleans) - if args and bools: - return args + " " + bools - else: - return args or bools - - def client_getopt(self, argv): - """ - Parse options for this class. - """ - # pylint: disable=W0621 - opts, argv = getopt.getopt(argv, "", [x + "=" for x in self.attributes + self.elements if x not in self.excludes] + list(self.booleans)) - for o, a in opts: - o = o[2:] - handler = getattr(self, "client_query_" + o, None) - if handler is not None: - handler(a) - elif o in self.booleans: - setattr(self, o, True) - else: - assert o in self.attributes - setattr(self, o, a) - return argv - - def client_query_bpki_cert(self, arg): - """ - Special handler for --bpki_cert option. - """ - self.bpki_cert = rpki.x509.X509(Auto_file = arg) - - def client_query_glue(self, arg): - """ - Special handler for --bpki_glue option. - """ - self.bpki_glue = rpki.x509.X509(Auto_file = arg) - - def client_query_bpki_cms_cert(self, arg): - """ - Special handler for --bpki_cms_cert option. - """ - self.bpki_cms_cert = rpki.x509.X509(Auto_file = arg) - - def client_query_cms_glue(self, arg): - """ - Special handler for --bpki_cms_glue option. - """ - self.bpki_cms_glue = rpki.x509.X509(Auto_file = arg) - -class cmd_msg_mixin(object): - """ - Protocol mix-in for command line client message PDUs. - """ - - @classmethod - def usage(cls): - """ - Generate usage message for this PDU. - """ - for k, v in cls.pdus.items(): - if v.is_cmd: - print usage_fill(k, v.usage()) - -# left-right protcol - -class left_right_msg(cmd_msg_mixin, rpki.left_right.msg): - - class self_elt(cmd_elt_mixin, rpki.left_right.self_elt): - pass - - class bsc_elt(cmd_elt_mixin, rpki.left_right.bsc_elt): - - excludes = ("pkcs10_request",) - - def client_query_signing_cert(self, arg): - """--signing_cert option.""" - self.signing_cert = rpki.x509.X509(Auto_file = arg) - - def client_query_signing_cert_crl(self, arg): - """--signing_cert_crl option.""" - self.signing_cert_crl = rpki.x509.CRL(Auto_file = arg) - - def client_reply_decode(self): - global pem_out - if pem_out is not None and self.pkcs10_request is not None: - if isinstance(pem_out, str): - pem_out = open(pem_out, "w") - pem_out.write(self.pkcs10_request.get_PEM()) - - class parent_elt(cmd_elt_mixin, rpki.left_right.parent_elt): - pass - - class child_elt(cmd_elt_mixin, rpki.left_right.child_elt): - pass - - class repository_elt(cmd_elt_mixin, rpki.left_right.repository_elt): - pass - - class list_published_objects_elt(cmd_elt_mixin, rpki.left_right.list_published_objects_elt): - excludes = ("uri",) - - class list_received_resources_elt(cmd_elt_mixin, rpki.left_right.list_received_resources_elt): - excludes = ("parent_handle", "notBefore", "notAfter", "uri", "sia_uri", "aia_uri", "asn", "ipv4", "ipv6") - - class report_error_elt(reply_elt_mixin, rpki.left_right.report_error_elt): - pass - - pdus = dict((x.element_name, x) - for x in (self_elt, bsc_elt, parent_elt, child_elt, repository_elt, - list_published_objects_elt, list_received_resources_elt, report_error_elt)) - -class left_right_sax_handler(rpki.left_right.sax_handler): - pdu = left_right_msg - -class left_right_cms_msg(rpki.left_right.cms_msg): - saxify = left_right_sax_handler.saxify - -# Publication protocol - -class publication_msg(cmd_msg_mixin, rpki.publication.msg): - - class config_elt(cmd_elt_mixin, rpki.publication.config_elt): - - def client_query_bpki_crl(self, arg): - """ - Special handler for --bpki_crl option. - """ - self.bpki_crl = rpki.x509.CRL(Auto_file = arg) - - class client_elt(cmd_elt_mixin, rpki.publication.client_elt): - pass - - class certificate_elt(cmd_elt_mixin, rpki.publication.certificate_elt): - pass - - class crl_elt(cmd_elt_mixin, rpki.publication.crl_elt): - pass - - class manifest_elt(cmd_elt_mixin, rpki.publication.manifest_elt): - pass - - class roa_elt(cmd_elt_mixin, rpki.publication.roa_elt): - pass - - class report_error_elt(reply_elt_mixin, rpki.publication.report_error_elt): - pass - - class ghostbuster_elt(cmd_elt_mixin, rpki.publication.ghostbuster_elt): - pass - - pdus = dict((x.element_name, x) - for x in (config_elt, client_elt, certificate_elt, crl_elt, - manifest_elt, roa_elt, report_error_elt, - ghostbuster_elt)) - -class publication_sax_handler(rpki.publication.sax_handler): - pdu = publication_msg - -class publication_cms_msg(rpki.publication.cms_msg): - saxify = publication_sax_handler.saxify - -# Usage - -top_opts = ["config=", "help", "pem_out=", "quiet", "verbose"] - -def usage(code = 1): - if __doc__ is not None: - print __doc__.strip() - print - print "Usage:" - print - print "# Top-level options:" - print usage_fill(*["--" + x for x in top_opts]) - print - print "# left-right protocol:" - left_right_msg.usage() - print - print "# publication protocol:" - publication_msg.usage() - sys.exit(code) - -# Main program - -rpki.log.init("irbe_cli") - -argv = sys.argv[1:] - -if not argv: - usage(0) - -cfg_file = None -verbose = True - -opts, argv = getopt.getopt(argv, "c:hpqv?", top_opts) -for o, a in opts: - if o in ("-?", "-h", "--help"): - usage(0) - elif o in ("-c", "--config"): - cfg_file = a - elif o in ("-p", "--pem_out"): - pem_out = a - elif o in ("-q", "--quiet"): - verbose = False - elif o in ("-v", "--verbose"): - verbose = True - -if not argv: - usage(1) - -cfg = rpki.config.parser(cfg_file, "irbe_cli") - -q_msg_left_right = [] -q_msg_publication = [] - -while argv: - if argv[0] in left_right_msg.pdus: - q_pdu = left_right_msg.pdus[argv[0]]() - q_msg = q_msg_left_right - elif argv[0] in publication_msg.pdus: - q_pdu = publication_msg.pdus[argv[0]]() - q_msg = q_msg_publication - else: - usage(1) - argv = q_pdu.client_getopt(argv[1:]) - q_msg.append(q_pdu) - -from django.conf import settings - -settings.configure( - DATABASES = { "default" : { - "ENGINE" : "django.db.backends.mysql", - "NAME" : cfg.get("sql-database", section = "irdbd"), - "USER" : cfg.get("sql-username", section = "irdbd"), - "PASSWORD" : cfg.get("sql-password", section = "irdbd"), - "HOST" : "", - "PORT" : "", - "OPTIONS" : { "init_command": "SET storage_engine=INNODB" }}}, - INSTALLED_APPS = ("rpki.irdb",), -) - -import rpki.irdb - -server_ca = rpki.irdb.ServerCA.objects.get() -irbe = server_ca.ee_certificates.get(purpose = "irbe") - -if q_msg_left_right: - - class left_right_proto(object): - cms_msg = left_right_cms_msg - msg = left_right_msg - - rpkid = server_ca.ee_certificates.get(purpose = "rpkid") - - rpkid_url = "http://%s:%s/left-right/" % ( - cfg.get("server-host", section = "rpkid"), - cfg.get("server-port", section = "rpkid")) - - call_rpkid = rpki.async.sync_wrapper(rpki.http.caller( - proto = left_right_proto, - client_key = irbe.private_key, - client_cert = irbe.certificate, - server_ta = server_ca.certificate, - server_cert = rpkid.certificate, - url = rpkid_url, - debug = verbose)) - - call_rpkid(*q_msg_left_right) - -if q_msg_publication: - - class publication_proto(object): - msg = publication_msg - cms_msg = publication_cms_msg - - pubd = server_ca.ee_certificates.get(purpose = "pubd") - - pubd_url = "http://%s:%s/control/" % ( - cfg.get("server-host", section = "pubd"), - cfg.get("server-port", section = "pubd")) - - call_pubd = rpki.async.sync_wrapper(rpki.http.caller( - proto = publication_proto, - client_key = irbe.private_key, - client_cert = irbe.certificate, - server_ta = server_ca.certificate, - server_cert = pubd.certificate, - url = pubd_url, - debug = verbose)) - - call_pubd(*q_msg_publication) diff --git a/rpkid/irdbd b/rpkid/irdbd deleted file mode 100755 index 493e3d72..00000000 --- a/rpkid/irdbd +++ /dev/null @@ -1,21 +0,0 @@ -#!/usr/bin/env python - -# $Id$ -# -# Copyright (C) 2010 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -if __name__ == "__main__": - import rpki.irdbd - rpki.irdbd.main() diff --git a/rpkid/left-right-schema.rnc b/rpkid/left-right-schema.rnc deleted file mode 100644 index b46adeb5..00000000 --- a/rpkid/left-right-schema.rnc +++ /dev/null @@ -1,323 +0,0 @@ -# $Id$ -# -# RelaxNG schema for RPKI left-right protocol. -# -# Copyright (C) 2012--2014 Dragon Research Labs ("DRL") -# Portions copyright (C) 2009--2011 Internet Systems Consortium ("ISC") -# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notices and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND ARIN DISCLAIM ALL -# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED -# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL, -# ISC, OR ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR -# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS -# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, -# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION -# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -default namespace = "http://www.hactrn.net/uris/rpki/left-right-spec/" - -# Top level PDU - -start = element msg { - attribute version { xsd:positiveInteger { maxInclusive="1" } }, - ( (attribute type { "query" }, query_elt*) | - (attribute type { "reply" }, reply_elt*) ) -} - -# PDUs allowed in a query -query_elt |= self_query -query_elt |= bsc_query -query_elt |= parent_query -query_elt |= child_query -query_elt |= repository_query -query_elt |= list_roa_requests_query -query_elt |= list_ghostbuster_requests_query -query_elt |= list_ee_certificate_requests_query -query_elt |= list_resources_query -query_elt |= list_published_objects_query -query_elt |= list_received_resources_query - -# PDUs allowed in a reply -reply_elt |= self_reply -reply_elt |= bsc_reply -reply_elt |= parent_reply -reply_elt |= child_reply -reply_elt |= repository_reply -reply_elt |= list_resources_reply -reply_elt |= list_roa_requests_reply -reply_elt |= list_ghostbuster_requests_reply -reply_elt |= list_ee_certificate_requests_reply -reply_elt |= list_published_objects_reply -reply_elt |= list_received_resources_reply -reply_elt |= report_error_reply - -# Tag attributes for bulk operations -tag = attribute tag { xsd:token {maxLength="1024" } }? - -# Combinations of action and type attributes used in later definitions. -# The same patterns repeat in most of the elements in this protocol. -ctl_create = attribute action { "create" }, tag -ctl_set = attribute action { "set" }, tag -ctl_get = attribute action { "get" }, tag -ctl_list = attribute action { "list" }, tag -ctl_destroy = attribute action { "destroy" }, tag - -# Base64 encoded DER stuff -base64 = xsd:base64Binary { maxLength="512000" } - -# Base definition for all fields that are really just SQL primary indices -#sql_id = xsd:nonNegativeInteger - -# ...except that fields containing SQL primary indicies don't belong -# in this protocol, so they're turninging into handles. -# Length restriction is a MySQL implementation issue. -# Handles are case-insensitive (because SQL is, among other reasons). -object_handle = xsd:string { maxLength="255" pattern="[\-_A-Za-z0-9]+" } - -# URIs -uri = xsd:anyURI { maxLength="4096" } - -# Name fields imported from up-down protocol -up_down_name = xsd:token { maxLength="1024" } - -# Resource lists -asn_list = xsd:string { maxLength="512000" pattern="[\-,0-9]*" } -ipv4_list = xsd:string { maxLength="512000" pattern="[\-,0-9/.]*" } -ipv6_list = xsd:string { maxLength="512000" pattern="[\-,0-9/:a-fA-F]*" } - -# element - -self_bool = (attribute rekey { "yes" }?, - attribute reissue { "yes" }?, - attribute revoke { "yes" }?, - attribute run_now { "yes" }?, - attribute publish_world_now { "yes" }?, - attribute revoke_forgotten { "yes" }?, - attribute clear_replay_protection { "yes" }?) - -self_payload = (attribute use_hsm { "yes" | "no" }?, - attribute crl_interval { xsd:positiveInteger }?, - attribute regen_margin { xsd:positiveInteger }?, - element bpki_cert { base64 }?, - element bpki_glue { base64 }?) - -self_handle = attribute self_handle { object_handle } - -self_query |= element self { ctl_create, self_handle, self_bool, self_payload } -self_reply |= element self { ctl_create, self_handle } -self_query |= element self { ctl_set, self_handle, self_bool, self_payload } -self_reply |= element self { ctl_set, self_handle } -self_query |= element self { ctl_get, self_handle } -self_reply |= element self { ctl_get, self_handle, self_payload } -self_query |= element self { ctl_list } -self_reply |= element self { ctl_list, self_handle, self_payload } -self_query |= element self { ctl_destroy, self_handle } -self_reply |= element self { ctl_destroy, self_handle } - -# element. Key parameters hardwired for now. - -bsc_bool = ((attribute generate_keypair { "yes" }, - attribute key_type { "rsa" }?, - attribute hash_alg { "sha256" }?, - attribute key_length { "2048" }?)?) - -bsc_handle = attribute bsc_handle { object_handle } - -bsc_payload = (element signing_cert { base64 }?, - element signing_cert_crl { base64 }?) - -bsc_readonly = element pkcs10_request { base64 }? - -bsc_query |= element bsc { ctl_create, self_handle, bsc_handle, bsc_bool, bsc_payload } -bsc_reply |= element bsc { ctl_create, self_handle, bsc_handle, bsc_readonly } -bsc_query |= element bsc { ctl_set, self_handle, bsc_handle, bsc_bool, bsc_payload } -bsc_reply |= element bsc { ctl_set, self_handle, bsc_handle, bsc_readonly } -bsc_query |= element bsc { ctl_get, self_handle, bsc_handle } -bsc_reply |= element bsc { ctl_get, self_handle, bsc_handle, bsc_payload, bsc_readonly } -bsc_query |= element bsc { ctl_list, self_handle } -bsc_reply |= element bsc { ctl_list, self_handle, bsc_handle, bsc_payload, bsc_readonly } -bsc_query |= element bsc { ctl_destroy, self_handle, bsc_handle } -bsc_reply |= element bsc { ctl_destroy, self_handle, bsc_handle } - -# element - -parent_handle = attribute parent_handle { object_handle } - -parent_bool = (attribute rekey { "yes" }?, - attribute reissue { "yes" }?, - attribute revoke { "yes" }?, - attribute revoke_forgotten { "yes" }?, - attribute clear_replay_protection { "yes" }?) - -parent_payload = (attribute peer_contact_uri { uri }?, - attribute sia_base { uri }?, - bsc_handle?, - repository_handle?, - attribute sender_name { up_down_name }?, - attribute recipient_name { up_down_name }?, - element bpki_cms_cert { base64 }?, - element bpki_cms_glue { base64 }?) - -parent_query |= element parent { ctl_create, self_handle, parent_handle, parent_bool, parent_payload } -parent_reply |= element parent { ctl_create, self_handle, parent_handle } -parent_query |= element parent { ctl_set, self_handle, parent_handle, parent_bool, parent_payload } -parent_reply |= element parent { ctl_set, self_handle, parent_handle } -parent_query |= element parent { ctl_get, self_handle, parent_handle } -parent_reply |= element parent { ctl_get, self_handle, parent_handle, parent_payload } -parent_query |= element parent { ctl_list, self_handle } -parent_reply |= element parent { ctl_list, self_handle, parent_handle, parent_payload } -parent_query |= element parent { ctl_destroy, self_handle, parent_handle } -parent_reply |= element parent { ctl_destroy, self_handle, parent_handle } - -# element - -child_handle = attribute child_handle { object_handle } - -child_bool = (attribute reissue { "yes" }?, - attribute clear_replay_protection { "yes" }?) - -child_payload = (bsc_handle?, - element bpki_cert { base64 }?, - element bpki_glue { base64 }?) - -child_query |= element child { ctl_create, self_handle, child_handle, child_bool, child_payload } -child_reply |= element child { ctl_create, self_handle, child_handle } -child_query |= element child { ctl_set, self_handle, child_handle, child_bool, child_payload } -child_reply |= element child { ctl_set, self_handle, child_handle } -child_query |= element child { ctl_get, self_handle, child_handle } -child_reply |= element child { ctl_get, self_handle, child_handle, child_payload } -child_query |= element child { ctl_list, self_handle } -child_reply |= element child { ctl_list, self_handle, child_handle, child_payload } -child_query |= element child { ctl_destroy, self_handle, child_handle } -child_reply |= element child { ctl_destroy, self_handle, child_handle } - -# element - -repository_handle = attribute repository_handle { object_handle } - -repository_bool = attribute clear_replay_protection { "yes" }? - -repository_payload = (attribute peer_contact_uri { uri }?, - bsc_handle?, - element bpki_cert { base64 }?, - element bpki_glue { base64 }?) - -repository_query |= element repository { ctl_create, self_handle, repository_handle, repository_bool, repository_payload } -repository_reply |= element repository { ctl_create, self_handle, repository_handle } -repository_query |= element repository { ctl_set, self_handle, repository_handle, repository_bool, repository_payload } -repository_reply |= element repository { ctl_set, self_handle, repository_handle } -repository_query |= element repository { ctl_get, self_handle, repository_handle } -repository_reply |= element repository { ctl_get, self_handle, repository_handle, repository_payload } -repository_query |= element repository { ctl_list, self_handle } -repository_reply |= element repository { ctl_list, self_handle, repository_handle, repository_payload } -repository_query |= element repository { ctl_destroy, self_handle, repository_handle } -repository_reply |= element repository { ctl_destroy, self_handle, repository_handle } - -# element - -list_resources_query = element list_resources { - tag, self_handle, child_handle -} - -list_resources_reply = element list_resources { - tag, self_handle, child_handle, - attribute valid_until { xsd:dateTime { pattern=".*Z" } }, - attribute asn { asn_list }?, - attribute ipv4 { ipv4_list }?, - attribute ipv6 { ipv6_list }? -} - -# element - -list_roa_requests_query = element list_roa_requests { - tag, self_handle -} - -list_roa_requests_reply = element list_roa_requests { - tag, self_handle, - attribute asn { xsd:nonNegativeInteger }, - attribute ipv4 { ipv4_list }?, - attribute ipv6 { ipv6_list }? -} - -# element - -list_ghostbuster_requests_query = element list_ghostbuster_requests { - tag, self_handle, parent_handle -} - -list_ghostbuster_requests_reply = element list_ghostbuster_requests { - tag, self_handle, parent_handle, - xsd:string -} - -# element - -list_ee_certificate_requests_query = element list_ee_certificate_requests { - tag, self_handle -} - -list_ee_certificate_requests_reply = element list_ee_certificate_requests { - tag, self_handle, - attribute gski { xsd:token { minLength="27" maxLength="27" } }, - attribute valid_until { xsd:dateTime { pattern=".*Z" } }, - attribute asn { asn_list }?, - attribute ipv4 { ipv4_list }?, - attribute ipv6 { ipv6_list }?, - attribute cn { xsd:string { maxLength="64" pattern="[\-0-9A-Za-z_ ]+" } }?, - attribute sn { xsd:string { maxLength="64" pattern="[0-9A-Fa-f]+" } }?, - attribute eku { xsd:string { maxLength="512000" pattern="[.,0-9]+" } }?, - element pkcs10 { base64 } -} - -# element - -list_published_objects_query = element list_published_objects { - tag, self_handle -} - -list_published_objects_reply = element list_published_objects { - tag, self_handle, - attribute uri { uri }, - attribute child_handle { object_handle }?, - base64 -} - -# element - -list_received_resources_query = element list_received_resources { - tag, self_handle -} - -list_received_resources_reply = element list_received_resources { - tag, self_handle, parent_handle, - attribute notBefore { xsd:dateTime { pattern=".*Z" } }, - attribute notAfter { xsd:dateTime { pattern=".*Z" } }, - attribute uri { uri }, - attribute sia_uri { uri }, - attribute aia_uri { uri }, - attribute asn { asn_list }?, - attribute ipv4 { ipv4_list }?, - attribute ipv6 { ipv6_list }? -} - -# element - -error = xsd:token { maxLength="1024" } - -report_error_reply = element report_error { - tag, self_handle?, - attribute error_code { error }, - xsd:string { maxLength="512000" }? -} - -# Local Variables: -# indent-tabs-mode: nil -# comment-start: "# " -# comment-start-skip: "#[ \t]*" -# End: diff --git a/rpkid/left-right-schema.rng b/rpkid/left-right-schema.rng deleted file mode 100644 index 6c3d2f1a..00000000 --- a/rpkid/left-right-schema.rng +++ /dev/null @@ -1,1089 +0,0 @@ - - - - - - - - - 1 - - - - - - query - - - - - - - - reply - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 1024 - - - - - - - - create - - - - - - set - - - - - - get - - - - - - list - - - - - - destroy - - - - - - - 512000 - - - - - - - 255 - [\-_A-Za-z0-9]+ - - - - - - 4096 - - - - - - 1024 - - - - - - 512000 - [\-,0-9]* - - - - - 512000 - [\-,0-9/.]* - - - - - 512000 - [\-,0-9/:a-fA-F]* - - - - - - - yes - - - - - yes - - - - - yes - - - - - yes - - - - - yes - - - - - yes - - - - - yes - - - - - - - - yes - no - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - yes - - - - rsa - - - - - sha256 - - - - - 2048 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - yes - - - - - yes - - - - - yes - - - - - yes - - - - - yes - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - yes - - - - - yes - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - yes - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - .*Z - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 27 - 27 - - - - - .*Z - - - - - - - - - - - - - - - - - - - - - 64 - [\-0-9A-Za-z_ ]+ - - - - - - - 64 - [0-9A-Fa-f]+ - - - - - - - 512000 - [.,0-9]+ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - .*Z - - - - - .*Z - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 1024 - - - - - - - - - - - - - - 512000 - - - - - - diff --git a/rpkid/myrpki.rnc b/rpkid/myrpki.rnc deleted file mode 100644 index 156ab0d5..00000000 --- a/rpkid/myrpki.rnc +++ /dev/null @@ -1,164 +0,0 @@ -# $Id$ -# -# RelaxNG schema for MyRPKI XML messages. -# -# This message protocol is on its way out, as we're in the process of -# moving on from the user interface model that produced it, but even -# after we finish replacing it we'll still need the schema for a while -# to validate old messages when upgrading. -# -# libxml2 (including xmllint) only groks the XML syntax of RelaxNG, so -# run the compact syntax through trang to get XML syntax. -# -# Copyright (C) 2009-2011 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -default namespace = "http://www.hactrn.net/uris/rpki/myrpki/" - -version = "2" - -base64 = xsd:base64Binary { maxLength="512000" } -object_handle = xsd:string { maxLength="255" pattern="[\-_A-Za-z0-9]+" } -pubd_handle = xsd:string { maxLength="255" pattern="[\-_A-Za-z0-9/]+" } -uri = xsd:anyURI { maxLength="4096" } -asn = xsd:positiveInteger -asn_list = xsd:string { maxLength="512000" pattern="[\-,0-9]+" } -ipv4_list = xsd:string { maxLength="512000" pattern="[\-,0-9/.]+" } -ipv6_list = xsd:string { maxLength="512000" pattern="[\-,0-9/:a-fA-F]+" } -timestamp = xsd:dateTime { pattern=".*Z" } - -# Message formate used between configure_resources and -# configure_daemons. - -start |= element myrpki { - attribute version { version }, - attribute handle { object_handle }, - attribute service_uri { uri }?, - element roa_request { - attribute asn { asn }, - attribute v4 { ipv4_list }, - attribute v6 { ipv6_list } - }*, - element child { - attribute handle { object_handle }, - attribute valid_until { timestamp }, - attribute asns { asn_list }?, - attribute v4 { ipv4_list }?, - attribute v6 { ipv6_list }?, - element bpki_certificate { base64 }? - }*, - element parent { - attribute handle { object_handle }, - attribute service_uri { uri }?, - attribute myhandle { object_handle }?, - attribute sia_base { uri }?, - element bpki_cms_certificate { base64 }? - }*, - element repository { - attribute handle { object_handle }, - attribute service_uri { uri }?, - element bpki_certificate { base64 }? - }*, - element bpki_ca_certificate { base64 }?, - element bpki_crl { base64 }?, - element bpki_bsc_certificate { base64 }?, - element bpki_bsc_pkcs10 { base64 }? -} - -# Format of an identity.xml file. - -start |= element identity { - attribute version { version }, - attribute handle { object_handle }, - element bpki_ta { base64 } -} - -# Format of element used in referrals. The Base64 -# text is a (q. v.) element signed with CMS. - -authorization = element authorization { - attribute referrer { pubd_handle }, - base64 -} - -# Format of element used in referrals. - -contact_info = element contact_info { - attribute uri { uri }?, - xsd:string -} - -# Variant payload portion of a element. - -repository_payload = ( - (attribute type { "none" }) | - (attribute type { "offer" }) | - (attribute type { "referral" }, authorization, contact_info) -) - -# element (response from configure_child). - -start |= element parent { - attribute version { version }, - attribute valid_until { timestamp }?, - attribute service_uri { uri }?, - attribute child_handle { object_handle }, - attribute parent_handle { object_handle }, - element bpki_resource_ta { base64 }, - element bpki_child_ta { base64 }, - element repository { repository_payload }? -} - -# element, types offer and referral -# (input to configure_publication_client). - -start |= element repository { - attribute version { version }, - attribute handle { object_handle }, - attribute parent_handle { object_handle }, - repository_payload, - element bpki_client_ta { base64 } -} - -# element, confirmation type (output of -# configure_publication_client). - -start |= element repository { - attribute version { version }, - attribute type { "confirmed" }, - attribute parent_handle { object_handle }, - attribute client_handle { pubd_handle }, - attribute service_uri { uri }, - attribute sia_base { uri }, - element bpki_server_ta { base64 }, - element bpki_client_ta { base64 }, - authorization?, - contact_info? -} - -# element. This is the entirety of a separate message -# which is signed with CMS then included ase the Base64 content of an -# element in the main message. - -start |= element referral { - attribute version { version }, - attribute authorized_sia_base { uri }, - base64 -} - -# Local Variables: -# indent-tabs-mode: nil -# comment-start: "# " -# comment-start-skip: "#[ \t]*" -# End: diff --git a/rpkid/myrpki.rng b/rpkid/myrpki.rng deleted file mode 100644 index 07b8beb3..00000000 --- a/rpkid/myrpki.rng +++ /dev/null @@ -1,379 +0,0 @@ - - - - - 2 - - - - 512000 - - - - - 255 - [\-_A-Za-z0-9]+ - - - - - 255 - [\-_A-Za-z0-9/]+ - - - - - 4096 - - - - - - - - 512000 - [\-,0-9]+ - - - - - 512000 - [\-,0-9/.]+ - - - - - 512000 - [\-,0-9/:a-fA-F]+ - - - - - .*Z - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - none - - - offer - - - - referral - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - confirmed - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/rpkid/portal-gui/rpki.wsgi b/rpkid/portal-gui/rpki.wsgi deleted file mode 100644 index 72ba75ac..00000000 --- a/rpkid/portal-gui/rpki.wsgi +++ /dev/null @@ -1,45 +0,0 @@ -# Copyright (C) 2010, 2011 SPARTA, Inc. dba Cobham Analytic Solutions -# Copyright (C) 2012, 2013 SPARTA, Inc. a Parsons Company -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -# This is an example wsgi application for use with mod_wsgi and apache. - -__version__ = '$Id$' - -import sys -import os -import rpki.autoconf - -os.environ['DJANGO_SETTINGS_MODULE'] = 'rpki.gui.default_settings' - -# Needed for local_settings.py -sys.path.insert(1, rpki.autoconf.sysconfdir + '/rpki') - -# Kludge to disable use of setproctitle in rpki.log. For reasons -# unknown, at least on Ubuntu 12.04 LTS, we dump core with a segment -# violation if we try to load that module in this process, even though -# it works fine in other processes on the same system. Not yet sure -# what this is about, just disable setproctitle in WSGI case for now. -os.environ['DISABLE_SETPROCTITLE'] = 'yes' - -# Kludge to set PYTHON_EGG_CACHE, mostly for FreeBSD where the ports -# system installs Python eggs in their zipped format and expects each -# user application to unpack them into its own egg cache. -if not os.environ.get('PYTHON_EGG_CACHE') and rpki.autoconf.WSGI_PYTHON_EGG_CACHE_DIR: - os.environ['PYTHON_EGG_CACHE'] = rpki.autoconf.WSGI_PYTHON_EGG_CACHE_DIR - -import django.core.handlers.wsgi -application = django.core.handlers.wsgi.WSGIHandler() - -# vim:ft=python diff --git a/rpkid/portal-gui/scripts/rpki-manage b/rpkid/portal-gui/scripts/rpki-manage deleted file mode 100755 index 0d581ce9..00000000 --- a/rpkid/portal-gui/scripts/rpki-manage +++ /dev/null @@ -1,13 +0,0 @@ -#!/usr/bin/env python - -import os -from django.core.management import execute_from_command_line - -# django-admin seems to have problems creating the superuser account when -# $LANG is unset or is set to something totally incompatible with UTF-8. -if os.environ.get('LANG') in (None, "", "C"): - os.environ['LANG'] = 'en_US.UTF-8' - -os.environ['DJANGO_SETTINGS_MODULE'] = 'rpki.gui.default_settings' - -execute_from_command_line() diff --git a/rpkid/portal-gui/scripts/rpkigui-apache-conf-gen b/rpkid/portal-gui/scripts/rpkigui-apache-conf-gen deleted file mode 100755 index 6201c364..00000000 --- a/rpkid/portal-gui/scripts/rpkigui-apache-conf-gen +++ /dev/null @@ -1,483 +0,0 @@ -#!/usr/bin/env python - -# $Id$ -# -# Copyright (C) 2013 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -import os -import re -import sys -import socket -import urllib2 -import argparse -import platform -import textwrap -import subprocess -import rpki.autoconf - -fqdn = socket.getfqdn() - -vhost_template = """\ -# -# By default, this configuration assumes that you use name-based -# virtual hosting. If that's not what you want, you may need -# to change this. -# - - - # - # By default, we enable an HTTPS virtual host on this machine's - # fully qualified domain name. This works for simple - # configurations, but if you're running a more complex Apache - # configuration or want to run the GUI on a different hostname, - # you may need to change this. - # - ServerName %(fqdn)s - - # - # Configure the WSGI application to run as a separate process from - # the Apache daemon itself. - # - %(WSGI_DAEMON_PROCESS)s - %(WSGI_PROCESS_GROUP)s - - # - # Allow access to our WSGI directory. - # - -%(allow)s - - - # - # Define the URL to the RPKI GUI - # - WSGIScriptAlias / %(datarootdir)s/rpki/wsgi/rpki.wsgi - - # - # Allow access to static content (icons, etc). - # - -%(allow)s - - - # - # Add the aliases Django expects for static content. - # - Alias /media/ %(datarootdir)s/rpki/media/ - Alias /site_media/ %(datarootdir)s/rpki/media/ - - # - # Allow access to the directory where rcynic-html writes - # its output files. - # - -%(allow)s - - - # - # Add alias pointing to rcynic-html's output files. - # - # If for some reason you need to change this, be careful to leave - # the trailing slash off the URL, otherwise /rcynic will be - # swallowed by the WSGIScriptAlias - # - Alias /rcynic %(RCYNIC_HTML_DIR)s/ - - # - # Redirect to the GUI dashboard when someone hits the bare vhost. - # - RedirectMatch ^/$ /rpki/ - - # - # Enable HTTPS - # - SSLEngine on - - # - # Specify HTTPS server certificate and key files for this virtual host. - # This should suffice for simple configurations, but if you're running - # a more complex Apache configuration you may need to change or remove - # these lines. - # - SSLCertificateFile %(sysconfdir)s/rpki/apache.cer - SSLCertificateKeyFile %(sysconfdir)s/rpki/apache.key - - # - # Take pity on users running Internet Exploder - # - BrowserMatch "MSIE [2-6]" ssl-unclean-shutdown nokeepalive downgrade-1.0 force-response-1.0 - BrowserMatch "MSIE [17-9]" ssl-unclean-shutdown - - -""" - -allow_22_template = ''' - Order deny,allow - Allow from all\ -''' - -allow_24_template = ''' - Require all granted\ -''' - -name_virtual_host_template = '''\ -# -# In most cases we want to use name-based virtual hosting. If this causes -# problems with your existing Apache configuration, try commenting out this line. -# -NameVirtualHost *:443 - -''' - -def Guess(args): - """ - Guess what platform this is and dispatch to platform constructor. - """ - - system = platform.system() - if system == "FreeBSD": - return FreeBSD(args) - if system == "Darwin": - return Darwin(args) - if system == "Linux": - distro = platform.linux_distribution()[0].lower() - if distro in ("debian", "ubuntu"): - return Debian(args) - if distro in ("fedora", "centos"): - return Redhat(args) - raise NotImplementedError("Can't guess what platform this is, sorry") - -class Platform(object): - """ - Abstract base class representing an operating system platform. - """ - - apache_cer = os.path.join(rpki.autoconf.sysconfdir, "rpki", "apache.cer") - apache_key = os.path.join(rpki.autoconf.sysconfdir, "rpki", "apache.key") - - apache_conf = os.path.join(rpki.autoconf.sysconfdir, "rpki", "apache.conf") - apache_conf_sample = apache_conf + ".sample" - - apache_conf_preface = "" - - def __init__(self, args): - self.args = args - self.log("RPKI Apache configuration: platform \"%s\", action \"%s\"" % ( - self.__class__.__name__, args.action)) - getattr(self, args.action)() - - def log(self, msg): - if self.args.verbose: - print msg - - def run(self, *cmd, **kwargs): - self.log("Running %s" % " ".join(cmd)) - subprocess.check_call(cmd, **kwargs) - - req_cmd = ("openssl", "req", "-new", - "-config", "/dev/stdin", - "-out", "/dev/stdout", - "-keyout", apache_key, - "-newkey", "rsa:2048") - - x509_cmd = ("openssl", "x509", "-req", "-sha256", - "-signkey", apache_key, - "-in", "/dev/stdin", - "-out", apache_cer, - "-days", "3650") - - req_conf = '''\ - [req] - default_bits = 2048 - default_md = sha256 - distinguished_name = req_dn - prompt = no - encrypt_key = no - [req_dn] - CN = %s - ''' % fqdn - - def unlink(self, fn, silent = False): - if os.path.lexists(fn): - if not silent: - self.log("Removing %s" % fn) - os.unlink(fn) - elif not silent: - self.log("Would have removed %s if it existed" % fn) - - def del_certs(self, silent = False): - self.unlink(self.apache_cer, silent) - self.unlink(self.apache_key, silent) - - def add_certs(self): - if os.path.exists(self.apache_cer) and os.path.exists(self.apache_key): - return - self.del_certs() - req = subprocess.Popen(self.req_cmd, - stdin = subprocess.PIPE, - stdout = subprocess.PIPE, - stderr = open("/dev/null", "w")) - x509 = subprocess.Popen(self.x509_cmd, - stdin = req.stdout, - stderr = open("/dev/null", "w")) - req.stdin.write(self.req_conf) - req.stdin.close() - if req.wait(): - raise subprocess.CalledProcessError(req.returncode, self.req_cmd) - if x509.wait(): - raise subprocess.CalledProcessError(x509.returncode, self.x509_cmd) - self.log("Created %s and %s, chmoding %s" % ( - self.apache_cer, self.apache_key, self.apache_key)) - os.chmod(self.apache_key, 0600) - - _vhost = None - - @property - def vhost(self): - if self._vhost is None: - allow = allow_22_template if self.args.apache_version <= 22 else allow_24_template - self._vhost = vhost_template % dict(rpki.autoconf.__dict__, fqdn = fqdn, allow = allow) - return self._vhost - - @property - def name_virtual_host(self): - return name_virtual_host_template if self.args.apache_version <= 22 else "" - - @property - def too_complex(self): - return textwrap.dedent('''\ - # It looks like you already have HTTPS enabled in your - # Apache configuration, which makes your configuration too - # complex for us to enable support for the RPKI GUI automatically. - # - # To enable support, take a look at %s - # and copy what you need from that file into %s, - # paying attention to the comments which mark the bits that - # you might (or might not) need to change or omit, depending - # on the details of your particular Apache configuration. - ''' % (self.apache_conf_sample, self.apache_conf)) - - def install(self): - with open(self.apache_conf_sample, "w") as f: - self.log("Writing %s" % f.name) - f.write(self.apache_conf_preface) - f.write(self.name_virtual_host) - f.write(self.vhost) - if not os.path.exists(self.apache_conf): - self.unlink(self.apache_conf) - with open(self.apache_conf, "w") as f: - self.log("Writing %s" % f.name) - if self.test_url("https://%s/" % fqdn): - f.write(self.too_complex) - sys.stdout.write(self.too_complex) - else: - if not self.test_tcp("localhost", 443): - f.write(self.apache_conf_preface) - f.write(self.name_virtual_host) - f.write(self.vhost) - if not os.path.exists(self.apache_conf_target): - self.unlink(self.apache_conf_target) - self.log("Symlinking %s to %s" % ( - self.apache_conf_target, self.apache_conf)) - os.symlink(self.apache_conf, self.apache_conf_target) - self.add_certs() - self.enable() - self.restart() - - def enable(self): - pass - - def disable(self): - pass - - def remove(self): - try: - same = open(self.apache_conf, "r").read() == open(self.apache_conf_sample, "r").read() - except: - same = False - self.unlink(self.apache_conf_sample) - if same: - self.unlink(self.apache_conf) - self.unlink(self.apache_conf_target) - self.disable() - self.restart() - - def purge(self): - self.remove() - self.unlink(self.apache_conf) - self.del_certs() - - @staticmethod - def test_url(url = "https://localhost/"): - try: - urllib2.urlopen(url).close() - except IOError: - return False - else: - return True - - @staticmethod - def test_tcp(host = "localhost", port = 443, family = socket.AF_UNSPEC, proto = socket.SOCK_STREAM): - try: - addrinfo = socket.getaddrinfo(host, port, family, proto) - except socket.error: - return False - for af, socktype, proto, canon, sa in addrinfo: - try: - s = socket.socket(af, socktype, proto) - s.connect(sa) - s.close() - except socket.error: - continue - else: - return True - return False - -class FreeBSD(Platform): - """ - FreeBSD. - """ - - # On FreeBSD we have to ask httpd what version it is before we know - # where to put files or what to call the service. In FreeBSD's makefiles, - # this value is called APACHE_VERSION, and is calculated thusly: - # - # httpd -V | sed -ne 's/^Server version: Apache\/\([0-9]\)\.\([0-9]*\).*/\1\2/p' - - _apache_name = None - - @property - def apache_name(self): - if self._apache_name is None: - self._apache_name = "apache%s" % self.args.apache_version - return self._apache_name - - @property - def apache_conf_target(self): - return "/usr/local/etc/%s/Includes/rpki.conf" % self.apache_name - - apache_conf_preface = textwrap.dedent('''\ - # These directives tell Apache to listen on the HTTPS port - # and to enable name-based virtual hosting. If you already - # have HTTPS enabled elsewhere in your configuration, you may - # need to remove these. - - Listen [::]:443 - Listen 0.0.0.0:443 - ''') - - def restart(self): - self.run("service", self.apache_name, "restart") - -class Debian(Platform): - """ - Debian and related platforms like Ubuntu. - """ - - apache_conf_target = "/etc/apache2/sites-available/rpki" - - snake_oil_cer = "/etc/ssl/certs/ssl-cert-snakeoil.pem" - snake_oil_key = "/etc/ssl/private/ssl-cert-snakeoil.key" - - def add_certs(self): - if not os.path.exists(self.snake_oil_cer) or not os.path.exists(self.snake_oil_key): - return Platform.add_certs(self) - if not os.path.exists(self.apache_cer): - self.unlink(self.apache_cer) - os.symlink(self.snake_oil_cer, self.apache_cer) - if not os.path.exists(self.apache_key): - self.unlink(self.apache_key) - os.symlink(self.snake_oil_key, self.apache_key) - - def enable(self): - self.run("a2enmod", "ssl") - self.run("a2ensite", "rpki") - # - # In light of BREACH and CRIME attacks, mod_deflate is looking - # like a bad idea, so make sure it's off. - self.run("a2dismod", "deflate") - - def disable(self): - self.run("a2dissite", "rpki") - - def restart(self): - self.run("service", "apache2", "restart") - -class NIY(Platform): - def __init__(self, args): - raise NotImplementedError("Platform %s not implemented yet, sorry" % self.__class__.__name__) - -class Redhat(NIY): - """ - Redhat family of Linux distributions (Fedora, CentOS). - """ - -class Darwin(NIY): - """ - Mac OS X (aka Darwin). - """ - -def main(): - """ - Generate and (de)install configuration suitable for using Apache httpd - to drive the RPKI web interface under WSGI. - """ - - parser = argparse.ArgumentParser(description = __doc__) - group1 = parser.add_mutually_exclusive_group() - group2 = parser.add_mutually_exclusive_group() - - parser.add_argument("-v", "--verbose", - help = "whistle while you work", action = "store_true") - parser.add_argument("--apache-version", - help = "Apache version (default " + rpki.autoconf.APACHE_VERSION + ")", - type = int, default = rpki.autoconf.APACHE_VERSION) - - group1.add_argument("--freebsd", - help = "configure for FreeBSD", - action = "store_const", dest = "platform", const = FreeBSD) - group1.add_argument("--debian", "--ubuntu", - help = "configure for Debian/Ubuntu", - action = "store_const", dest = "platform", const = Debian) - group1.add_argument("--redhat", "--fedora", "--centos", - help = "configure for Redhat/Fedora/CentOS", - action = "store_const", dest = "platform", const = Redhat) - group1.add_argument("--macosx", "--darwin", - help = "configure for Mac OS X (Darwin)", - action = "store_const", dest = "platform", const = Darwin) - group1.add_argument("--guess", - help = "guess which platform configuration to use", - action = "store_const", dest = "platform", const = Guess) - - group2.add_argument("-i", "--install", - help = "install configuration", - action = "store_const", dest = "action", const = "install") - group2.add_argument("-r", "--remove", "--deinstall", "--uninstall", - help = "remove configuration", - action = "store_const", dest = "action", const = "remove") - group2.add_argument("-P", "--purge", - help = "remove configuration with extreme prejudice", - action = "store_const", dest = "action", const = "purge") - - parser.set_defaults(platform = Guess, action = "install") - args = parser.parse_args() - - try: - args.platform(args) - except Exception, e: - sys.exit(str(e)) - -if __name__ == "__main__": - main() diff --git a/rpkid/portal-gui/scripts/rpkigui-check-expired b/rpkid/portal-gui/scripts/rpkigui-check-expired deleted file mode 100755 index eb0c7fbb..00000000 --- a/rpkid/portal-gui/scripts/rpkigui-check-expired +++ /dev/null @@ -1,61 +0,0 @@ -#!/usr/bin/env python - -# Copyright (C) 2012 SPARTA, Inc. a Parsons Company -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -__version__ = '$Id$' - -from optparse import OptionParser -import logging -import sys - -# configure django ORM -from rpki.gui.script_util import setup -setup() - -from rpki.gui.app.check_expired import notify_expired - -usage = '%prog [ -nV ] [ handle1 handle2... ]' - -description = """Generate a report detailing all RPKI/BPKI certificates which -are due for impending expiration. If no resource handles are specified, a -report about all resource handles hosted by the local rpkid instance will be -generated.""" - -parser = OptionParser(usage, description=description) -parser.add_option('-V', '--version', help='display script version', - action='store_true', dest='version', default=False) -parser.add_option('-f', '--from', metavar='ADDRESS', dest='from_email', - help='specify the return email address for notifications') -parser.add_option('-t', '--expire-time', dest='expire_days', metavar='DAYS', - help='specify the number of days in the future to check') -parser.add_option('-l', '--level', dest='log_level', default='WARNING', - help='Set logging level [Default: %default]') -(options, args) = parser.parse_args() -if options.version: - print __version__ - sys.exit(0) - -v = getattr(logging, options.log_level.upper()) -logging.basicConfig(level=v) -logging.info('logging level set to ' + logging.getLevelName(v)) - -kwargs = {} -if options.from_email: - kwargs['from_email'] = options.from_email -if options.expire_days: - kwargs['expire_days'] = int(options.expire_days) -notify_expired(**kwargs) - -sys.exit(0) diff --git a/rpkid/portal-gui/scripts/rpkigui-flatten-roas.py b/rpkid/portal-gui/scripts/rpkigui-flatten-roas.py deleted file mode 100644 index e21c368b..00000000 --- a/rpkid/portal-gui/scripts/rpkigui-flatten-roas.py +++ /dev/null @@ -1,37 +0,0 @@ -from rpki.gui.script_util import setup -setup() - -from django.db import transaction -from django.db.models import Count -from rpki.gui.app.models import ROARequest -from rpki.irdb.zookeeper import Zookeeper - -handles = set() - - -@transaction.commit_on_success -def flatten(): - for roa in ROARequest.objects.annotate(num_prefixes=Count('prefixes')).filter(num_prefixes__gt=1): - print 'splitting roa for AS%d' % roa.asn - for pfx in roa.prefixes.all(): - # create new roa objects for each prefix - newroa = ROARequest.objects.create( - issuer=roa.issuer, - asn=roa.asn) - newroa.prefixes.create( - version=pfx.version, - prefix=pfx.prefix, - prefixlen=pfx.prefixlen, - max_prefixlen=pfx.max_prefixlen - ) - roa.delete() - handles.add(roa.issuer.handle) - -flatten() - -if handles: - # poke rpkid to run the cron job for each handle that had a roa change - z = Zookeeper() - for h in handles: - z.reset_identity(h) - z.run_rpkid_now() diff --git a/rpkid/portal-gui/scripts/rpkigui-import-routes b/rpkid/portal-gui/scripts/rpkigui-import-routes deleted file mode 100755 index 234a865b..00000000 --- a/rpkid/portal-gui/scripts/rpkigui-import-routes +++ /dev/null @@ -1,115 +0,0 @@ -#!/usr/bin/env python - -# Copyright (C) 2012, 2013 SPARTA, Inc. a Parsons Company -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -__version__ = '$Id$' - -import optparse -import logging -import time -import random -import signal -import errno -import atexit -import fcntl -import sys -import os - -# configure django ORM -from rpki.gui.script_util import setup -setup() - -from rpki.gui.routeview.util import import_routeviews_dump - - -class BadArgument(Exception): - pass - - -def timed_out(*ignored): - logging.info('timed out') - sys.exit(1) - - -if __name__ == '__main__': - parser = optparse.OptionParser( - usage='%prog [options] [PATH]', - description="""This tool is used to import the IPv4/6 BGP table dumps -from routeviews.org into the RPKI Web Portal database. If the -input file is a bzip2 compressed file, it will be decompressed -automatically.""") - parser.add_option('-t', '--type', dest='filetype', metavar='TYPE', - help='Specify the input file type (auto, text, mrt) [Default: %default]') - parser.add_option('-l', '--level', dest='log_level', default='ERROR', - help='Set logging level [Default: %default]') - parser.add_option('-u', '--bunzip2', dest='bunzip', metavar='PROG', - help='Specify bunzip2 program to use') - parser.add_option('-b', '--bgpdump', dest='bgpdump', metavar='PROG', - help='Specify path to bgdump binary') - parser.add_option('-j', '--jitter', dest='jitter', type='int', - help='Specify upper bound of startup delay, in seconds [Default: %default]') - parser.add_option('--lockfile', dest='lockfile', - help='Set name of lock file; empty string disables locking [Default: %default]') - parser.add_option('--timeout', dest='timeout', type='int', - help='Specify timeout for download and import, in seconds [Default: %default]') - parser.set_defaults(debug=False, verbose=False, filetype='auto', jitter=0, - lockfile='/tmp/rpkigui-import-routes.lock', timeout=90*60) - options, args = parser.parse_args() - - v = getattr(logging, options.log_level.upper()) - logging.basicConfig(level=v) - atexit.register(logging.shutdown) - logging.info('logging level set to ' + logging.getLevelName(v)) - - if options.bgpdump: - BGPDUMP = os.path.expanduser(options.bgpdump) - - if options.jitter > 0: - try: - delay = random.SystemRandom().randint(0, options.jitter) - except NotImplementedError: - delay = random.randint(0, options.jitter) - logging.info('jitter active, delaying startup for %d seconds' % delay) - time.sleep(delay) - - if options.lockfile: - try: - lock = os.open(options.lockfile, os.O_RDONLY | os.O_CREAT | os.O_NONBLOCK, 0666) - fcntl.flock(lock, fcntl.LOCK_EX | fcntl.LOCK_NB) - except (IOError, OSError), e: - if e.errno == errno.EAGAIN: - logging.info('lock held by another process') - sys.exit(0) - else: - logging.exception(e) - sys.exit(1) - - try: - if len(args) > 1: - raise BadArgument('more than one filename specified') - - if options.timeout > 0: - signal.signal(signal.SIGALRM, timed_out) - signal.setitimer(signal.ITIMER_REAL, options.timeout) - - import_routeviews_dump(*args) - - if options.timeout > 0: - signal.setitimer(signal.ITIMER_REAL, 0) - - except Exception as e: - logging.exception(e) - sys.exit(1) - diff --git a/rpkid/portal-gui/scripts/rpkigui-query-routes b/rpkid/portal-gui/scripts/rpkigui-query-routes deleted file mode 100755 index 1f698f23..00000000 --- a/rpkid/portal-gui/scripts/rpkigui-query-routes +++ /dev/null @@ -1,66 +0,0 @@ -#!/usr/bin/env python - -# Copyright (C) 2013 SPARTA, Inc. a Parsons Company -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -__version__ = '$Id$' - -import sys -import optparse - -from rpki.gui.script_util import setup -setup() - -from rpki.gui.routeview import models as rv -from rpki.resource_set import resource_range_ip - -parser = optparse.OptionParser( - usage='%prog [options] PREFIX', - description='query the rpki web portal database for routes covering a ' - 'prefix specified as an argument, and display the validity and covering ' - 'ROAs for each route', - version=__version__, -) -options, args = parser.parse_args() - -if len(args) == 0: - print 'error: Specify an address/prefix' - sys.exit(1) - -# allow bare IP addresses -if '/' not in args[0]: - args[0] = args[0] + '/32' - -r = resource_range_ip.parse_str(args[0]) - -qs = rv.RouteOrigin.objects.filter( - prefix_min__lte=r.min, - prefix_max__gte=r.max -) - - -def validity_marker(route, roa, roa_prefix): - "Return + if the roa would cause the route to be accepted, or - if not" - # we already know the ROA covers this route because they are returned - # from RouteOrigin.roas, so just check the ASN and max prefix length - return '-' if (roa.asid == 0 or route.asn != roa.asid or - route.prefixlen > roa_prefix.max_length) else '+' - -# xxx.xxx.xxx.xxx/xx-xx is 22 characters -for route in qs: - print route.as_resource_range(), route.asn, route.status - for pfx in route.roa_prefixes: - for roa in pfx.roas.all(): - print validity_marker(route, roa, pfx), pfx.as_roa_prefix(), roa.asid, roa.repo.uri - print diff --git a/rpkid/portal-gui/scripts/rpkigui-rcynic b/rpkid/portal-gui/scripts/rpkigui-rcynic deleted file mode 100755 index 8fb91a70..00000000 --- a/rpkid/portal-gui/scripts/rpkigui-rcynic +++ /dev/null @@ -1,54 +0,0 @@ -#!/usr/bin/env python - -# Copyright (C) 2011 SPARTA, Inc. dba Cobham -# Copyright (C) 2012, 2013 SPARTA, Inc. a Parsons Company -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -__version__ = '$Id$' - -# probably should be exported from rpki.gui.cacheview.util -default_logfile = '/var/rcynic/data/rcynic.xml' -default_root = '/var/rcynic/data' - -import logging -import sys - -from rpki.gui.script_util import setup -setup() - -from rpki.gui.cacheview.util import import_rcynic_xml - -if __name__ == '__main__': - import optparse - - parser = optparse.OptionParser() - parser.add_option("-l", "--level", dest="log_level", default='ERROR', - help="specify the logging level [default: %default]") - parser.add_option( - "-f", "--file", dest="logfile", - help="specify the rcynic XML file to parse [default: %default]", - default=default_logfile) - parser.add_option( - "-r", "--root", - help="specify the chroot directory for the rcynic jail [default: %default]", - metavar="DIR", default=default_root) - options, args = parser.parse_args(sys.argv) - - v = getattr(logging, options.log_level.upper()) - logging.basicConfig(level=v) - logging.info('log level set to %s' % logging.getLevelName(v)) - - import_rcynic_xml(options.root, options.logfile) - - logging.shutdown() diff --git a/rpkid/portal-gui/scripts/rpkigui-reset-demo.py b/rpkid/portal-gui/scripts/rpkigui-reset-demo.py deleted file mode 100644 index 0a3a1537..00000000 --- a/rpkid/portal-gui/scripts/rpkigui-reset-demo.py +++ /dev/null @@ -1,48 +0,0 @@ -# Copyright (C) 2012, 2013, 2014 SPARTA, Inc. a Parsons Company -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -""" -This script is used to reset all of the labuser* accounts on demo.rpki.net back -to a state suitable for a new workshop. It removes all ROAs and Ghostbuster -issued by the labuser accounts. - -""" - -__version__ = '$Id$' - -from rpki.gui.script_util import setup -setup() - -import sys - -from rpki.gui.app.models import Conf -from rpki.irdb.models import ROARequest, GhostbusterRequest -from rpki.gui.app.glue import list_received_resources - -for n in xrange(1, 33): - username = 'labuser%02d' % n - print 'removing objects for ' + username - for cls in (ROARequest, GhostbusterRequest): - cls.objects.filter(issuer__handle=username).delete() - conf = Conf.objects.get(handle=username) - conf.clear_alerts() - print '... updating resource certificate cache' - list_received_resources(sys.stdout, conf) - - # Remove delegated resources (see https://trac.rpki.net/ticket/544) - # Note that we do not remove the parent-child relationship, just the - # resources. - for child in conf.children(): - child.asns.delete() - child.address_ranges.delete() diff --git a/rpkid/portal-gui/scripts/rpkigui-sync-users.py b/rpkid/portal-gui/scripts/rpkigui-sync-users.py deleted file mode 100644 index 9c636e95..00000000 --- a/rpkid/portal-gui/scripts/rpkigui-sync-users.py +++ /dev/null @@ -1,32 +0,0 @@ -# Copyright (C) 2013 SPARTA, Inc. a Parsons Company -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -__version__ = '$Id$' - -""" -Ensure that a web login exists for labuser* resource holder -""" -from rpki.gui.script_util import setup -setup() - -from django.contrib.auth.models import User -from rpki.gui.app.models import Conf, ConfACL - -# mysql> select * from irdb_resourceholderca left outer join auth_user on irdb_resourceholderca.handle = auth_user.username where username=NULL; - -for conf in Conf.objects.filter(handle__startswith='labuser'): - if not User.objects.filter(username=conf.handle).exists(): - print 'creating matching user for ' + conf.handle - user = User.objects.create_user(conf.handle, password='fnord') - ConfACL.objects.create(conf=conf, user=user) diff --git a/rpkid/pubd b/rpkid/pubd deleted file mode 100755 index 7d8ecbfa..00000000 --- a/rpkid/pubd +++ /dev/null @@ -1,21 +0,0 @@ -#!/usr/bin/env python - -# $Id$ -# -# Copyright (C) 2010 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -if __name__ == "__main__": - import rpki.pubd - rpki.pubd.main() diff --git a/rpkid/pubd.sql b/rpkid/pubd.sql deleted file mode 100644 index 3a58ec00..00000000 --- a/rpkid/pubd.sql +++ /dev/null @@ -1,59 +0,0 @@ --- $Id$ - --- Copyright (C) 2009--2010 Internet Systems Consortium ("ISC") --- --- Permission to use, copy, modify, and distribute this software for any --- purpose with or without fee is hereby granted, provided that the above --- copyright notice and this permission notice appear in all copies. --- --- THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH --- REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY --- AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, --- INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM --- LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE --- OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR --- PERFORMANCE OF THIS SOFTWARE. - --- Copyright (C) 2008 American Registry for Internet Numbers ("ARIN") --- --- Permission to use, copy, modify, and distribute this software for any --- purpose with or without fee is hereby granted, provided that the above --- copyright notice and this permission notice appear in all copies. --- --- THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH --- REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY --- AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, --- INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM --- LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE --- OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR --- PERFORMANCE OF THIS SOFTWARE. - --- SQL objects needed by pubd.py. - --- The config table is weird because we're really only using it --- to store one BPKI CRL, but putting this here lets us use a lot of --- existing machinery and the alternatives are whacky in other ways. - -DROP TABLE IF EXISTS client; -DROP TABLE IF EXISTS config; - -CREATE TABLE config ( - config_id SERIAL NOT NULL, - bpki_crl LONGBLOB, - PRIMARY KEY (config_id) -) ENGINE=InnoDB; - -CREATE TABLE client ( - client_id SERIAL NOT NULL, - client_handle VARCHAR(255) NOT NULL, - base_uri TEXT, - bpki_cert LONGBLOB, - bpki_glue LONGBLOB, - last_cms_timestamp DATETIME, - PRIMARY KEY (client_id), - UNIQUE (client_handle) -) ENGINE=InnoDB; - --- Local Variables: --- indent-tabs-mode: nil --- End: diff --git a/rpkid/publication-schema.rnc b/rpkid/publication-schema.rnc deleted file mode 100644 index 4353ae80..00000000 --- a/rpkid/publication-schema.rnc +++ /dev/null @@ -1,137 +0,0 @@ -# $Id$ -# -# RelaxNG schema for RPKI publication protocol. -# -# Copyright (C) 2012--2014 Dragon Research Labs ("DRL") -# Portions copyright (C) 2009--2011 Internet Systems Consortium ("ISC") -# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notices and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND ARIN DISCLAIM ALL -# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED -# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL, -# ISC, OR ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR -# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS -# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, -# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION -# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -default namespace = "http://www.hactrn.net/uris/rpki/publication-spec/" - -# Top level PDU - -start = element msg { - attribute version { xsd:positiveInteger { maxInclusive="1" } }, - ( (attribute type { "query" }, query_elt*) | - (attribute type { "reply" }, reply_elt*) ) -} - -# PDUs allowed in a query -query_elt = ( config_query | client_query | certificate_query | crl_query | - manifest_query | roa_query | ghostbuster_query ) - -# PDUs allowed in a reply -reply_elt = ( config_reply | client_reply | certificate_reply | crl_reply | - manifest_reply | roa_reply | ghostbuster_reply | report_error_reply ) - -# Tag attributes for bulk operations -tag = attribute tag { xsd:token {maxLength="1024" } } - -# Base64 encoded DER stuff -#base64 = xsd:base64Binary { maxLength="512000" } -# -# Sadly, it turns out that CRLs can in fact get longer than this for an active CA. -# Remove length limit for now, think about whether to put it back later. -base64 = xsd:base64Binary - -# Publication URLs -uri_t = xsd:anyURI { maxLength="4096" } -uri = attribute uri { uri_t } - -# Handles on remote objects (replaces passing raw SQL IDs). NB: -# Unlike the up-down protocol, handles in this protocol allow "/" as a -# hierarchy delimiter. -object_handle = xsd:string { maxLength="255" pattern="[\-_A-Za-z0-9/]+" } - -# element (use restricted to repository operator) -# config_handle attribute, create, list, and destroy commands omitted deliberately, see code for details - -config_payload = (element bpki_crl { base64 }?) - -config_query |= element config { attribute action { "set" }, tag?, config_payload } -config_reply |= element config { attribute action { "set" }, tag? } -config_query |= element config { attribute action { "get" }, tag? } -config_reply |= element config { attribute action { "get" }, tag?, config_payload } - -# element (use restricted to repository operator) - -client_handle = attribute client_handle { object_handle } - -client_bool = attribute clear_replay_protection { "yes" }? - -client_payload = (attribute base_uri { uri_t }?, element bpki_cert { base64 }?, element bpki_glue { base64 }?) - -client_query |= element client { attribute action { "create" }, tag?, client_handle, client_bool, client_payload } -client_reply |= element client { attribute action { "create" }, tag?, client_handle } -client_query |= element client { attribute action { "set" }, tag?, client_handle, client_bool, client_payload } -client_reply |= element client { attribute action { "set" }, tag?, client_handle } -client_query |= element client { attribute action { "get" }, tag?, client_handle } -client_reply |= element client { attribute action { "get" }, tag?, client_handle, client_payload } -client_query |= element client { attribute action { "list" }, tag? } -client_reply |= element client { attribute action { "list" }, tag?, client_handle, client_payload } -client_query |= element client { attribute action { "destroy" }, tag?, client_handle } -client_reply |= element client { attribute action { "destroy" }, tag?, client_handle } - -# element - -certificate_query |= element certificate { attribute action { "publish" }, tag?, uri, base64 } -certificate_reply |= element certificate { attribute action { "publish" }, tag?, uri } -certificate_query |= element certificate { attribute action { "withdraw" }, tag?, uri } -certificate_reply |= element certificate { attribute action { "withdraw" }, tag?, uri } - -# element - -crl_query |= element crl { attribute action { "publish" }, tag?, uri, base64 } -crl_reply |= element crl { attribute action { "publish" }, tag?, uri } -crl_query |= element crl { attribute action { "withdraw" }, tag?, uri } -crl_reply |= element crl { attribute action { "withdraw" }, tag?, uri } - -# element - -manifest_query |= element manifest { attribute action { "publish" }, tag?, uri, base64 } -manifest_reply |= element manifest { attribute action { "publish" }, tag?, uri } -manifest_query |= element manifest { attribute action { "withdraw" }, tag?, uri } -manifest_reply |= element manifest { attribute action { "withdraw" }, tag?, uri } - -# element - -roa_query |= element roa { attribute action { "publish" }, tag?, uri, base64 } -roa_reply |= element roa { attribute action { "publish" }, tag?, uri } -roa_query |= element roa { attribute action { "withdraw" }, tag?, uri } -roa_reply |= element roa { attribute action { "withdraw" }, tag?, uri } - -# element - -ghostbuster_query |= element ghostbuster { attribute action { "publish" }, tag?, uri, base64 } -ghostbuster_reply |= element ghostbuster { attribute action { "publish" }, tag?, uri } -ghostbuster_query |= element ghostbuster { attribute action { "withdraw" }, tag?, uri } -ghostbuster_reply |= element ghostbuster { attribute action { "withdraw" }, tag?, uri } - -# element - -error = xsd:token { maxLength="1024" } - -report_error_reply = element report_error { - tag?, - attribute error_code { error }, - xsd:string { maxLength="512000" }? -} - -# Local Variables: -# indent-tabs-mode: nil -# comment-start: "# " -# comment-start-skip: "#[ \t]*" -# End: diff --git a/rpkid/publication-schema.rng b/rpkid/publication-schema.rng deleted file mode 100644 index 63fb6ea5..00000000 --- a/rpkid/publication-schema.rng +++ /dev/null @@ -1,574 +0,0 @@ - - - - - - - - - 1 - - - - - - query - - - - - - - - reply - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 1024 - - - - - - - - - - - 4096 - - - - - - - - - - - 255 - [\-_A-Za-z0-9/]+ - - - - - - - - - - - - - - set - - - - - - - - - - - set - - - - - - - - - - get - - - - - - - - - - get - - - - - - - - - - - - - - - - - yes - - - - - - - - - - - - - - - - - - - - - - - - create - - - - - - - - - - - - - create - - - - - - - - - - - set - - - - - - - - - - - - - set - - - - - - - - - - - get - - - - - - - - - - - get - - - - - - - - - - - - list - - - - - - - - - - list - - - - - - - - - - - - destroy - - - - - - - - - - - destroy - - - - - - - - - - - - publish - - - - - - - - - - - - publish - - - - - - - - - - - withdraw - - - - - - - - - - - withdraw - - - - - - - - - - - - publish - - - - - - - - - - - - publish - - - - - - - - - - - withdraw - - - - - - - - - - - withdraw - - - - - - - - - - - - publish - - - - - - - - - - - - publish - - - - - - - - - - - withdraw - - - - - - - - - - - withdraw - - - - - - - - - - - - publish - - - - - - - - - - - - publish - - - - - - - - - - - withdraw - - - - - - - - - - - withdraw - - - - - - - - - - - - publish - - - - - - - - - - - - publish - - - - - - - - - - - withdraw - - - - - - - - - - - withdraw - - - - - - - - - - - 1024 - - - - - - - - - - - - - 512000 - - - - - - diff --git a/rpkid/rootd b/rpkid/rootd deleted file mode 100755 index cb59f958..00000000 --- a/rpkid/rootd +++ /dev/null @@ -1,21 +0,0 @@ -#!/usr/bin/env python - -# $Id$ -# -# Copyright (C) 2010 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -if __name__ == "__main__": - import rpki.rootd - rpki.rootd.main() diff --git a/rpkid/router-certificate-schema.rnc b/rpkid/router-certificate-schema.rnc deleted file mode 100644 index 8cc325ce..00000000 --- a/rpkid/router-certificate-schema.rnc +++ /dev/null @@ -1,61 +0,0 @@ -# $Id$ -# -# RelaxNG schema for BGPSEC router certificate interchange format. -# -# At least for now, this is a trivial encapsulation of a PKCS #10 -# request, a set (usually containing exactly one member) of autonomous -# system numbers, and a router-id. Be warned that this could change -# radically by the time we have any real operational understanding of -# how these things will be used, this is just our current best guess -# to let us move forward on initial coding. -# -# Copyright (C) 2014 Dragon Research Labs ("DRL") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND DRL DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL DRL BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -default namespace = "http://www.hactrn.net/uris/rpki/router-certificate/" - -version = "1" -base64 = xsd:base64Binary { maxLength="512000" } -router_id = xsd:unsignedInt -asn_list = xsd:string { maxLength="512000" pattern="[0-9][\-,0-9]*" } -timestamp = xsd:dateTime { pattern=".*Z" } - -# Core payload used in this schema. - -payload = ( - attribute router_id { router_id }, - attribute asn { asn_list }, - attribute valid_until { timestamp }?, - base64 -) - -# We allow two forms, one with a wrapper to allow multiple requests in -# a single file, one without for brevity; the version attribute goes -# in the outermost element in either case. - -start |= element router_certificate_request { - attribute version { version }, - payload -} - -start |= element router_certificate_requests { - attribute version { version }, - element router_certificate_request { payload }* -} - -# Local Variables: -# indent-tabs-mode: nil -# comment-start: "# " -# comment-start-skip: "#[ \t]*" -# End: diff --git a/rpkid/router-certificate-schema.rng b/rpkid/router-certificate-schema.rng deleted file mode 100644 index d8be9eda..00000000 --- a/rpkid/router-certificate-schema.rng +++ /dev/null @@ -1,98 +0,0 @@ - - - - - 1 - - - - 512000 - - - - - - - - 512000 - [0-9][\-,0-9]* - - - - - .*Z - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/rpkid/rpki-confgen b/rpkid/rpki-confgen deleted file mode 100755 index f531bee8..00000000 --- a/rpkid/rpki-confgen +++ /dev/null @@ -1,291 +0,0 @@ -#!/usr/bin/env python - -# $Id$ -# -# Copyright (C) 2014 Dragon Research Labs ("DRL") -# Portions copyright (C) 2013 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notices and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND DRL AND ISC DISCLAIM ALL -# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED -# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL OR -# ISC BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL -# DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA -# OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER -# TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -import os -import sys -import argparse -import base64 -import textwrap - -from lxml.etree import Element, SubElement, ElementTree - -space4 = " " * 4 -space6 = " " * 6 -space8 = " " * 8 -star78 = "*" * 78 - -wiki_wrapper = textwrap.TextWrapper() -conf_wrapper = textwrap.TextWrapper(initial_indent = "# ", subsequent_indent = "# ") -xml6_wrapper = textwrap.TextWrapper(initial_indent = space6, subsequent_indent = space6) -xml8_wrapper = textwrap.TextWrapper(initial_indent = space8, subsequent_indent = space8) - -class Option(object): - - def __init__(self, name, value, doc): - self.name = name - self.value = value - self.doc = doc - - @property - def width(self): - return len(self.name) - - def to_xml(self): - x = Element("option", name = self.name) - if self.value is not None: - x.set("value", self.value) - for d in self.doc: - SubElement(x, "doc").text = "\n" + xml8_wrapper.fill(d) + "\n" + space6 - return x - - def to_wiki(self, f): - f.write("\n== %s == #%s\n" % (self.name, self.name)) - for d in self.doc: - f.write("\n%s\n" % wiki_wrapper.fill(d)) - if self.value is None: - f.write("\n%s\n" % wiki_wrapper.fill("No default value.")) - else: - f.write("\n{{{\n#!ini\n%s = %s\n}}}\n" % (self.name, self.value)) - - def to_conf(self, f, width): - for i, d in enumerate(self.doc): - f.write("%s\n%s\n" % ("" if i == 0 else "#", conf_wrapper.fill(d))) - if self.value is None: - f.write("\n#%-*s = ???\n" % (width - 1, self.name)) - else: - f.write("\n%-*s = %s\n" % (width, self.name, self.value)) - -class Section(object): - - def __init__(self, name): - self.name = name - self.doc = [] - self.options = [] - - @property - def width(self): - return max(o.width for o in self.options) - - @classmethod - def from_xml(cls, elt): - self = cls(name = elt.get("name")) - for x in elt.iterchildren("doc"): - self.doc.append(" ".join(x.text.split())) - for x in elt.iterchildren("option"): - self.options.append(Option(name = x.get("name"), value = x.get("value"), - doc = [" ".join(d.text.split()) - for d in x.iterchildren("doc")])) - return self - - def to_xml(self): - x = Element("section", name = self.name) - for d in self.doc: - SubElement(x, "doc").text = "\n" + xml6_wrapper.fill(d) + "\n" + space4 - x.extend(o.to_xml() for o in self.options) - return x - - def to_wiki(self, f): - f.write("\n= [%s] section = #%s\n" % (self.name, self.name)) - for d in self.doc: - f.write("\n%s\n" % wiki_wrapper.fill(d)) - for o in self.options: - o.to_wiki(f) - - def to_conf(self, f, width): - f.write("\n" + "#" * 78 + "\n\n[" + self.name + "]\n") - if self.doc: - f.write("\n##") - for i, d in enumerate(self.doc): - f.write("%s\n%s\n" % ("" if i == 0 else "#", conf_wrapper.fill(d))) - f.write("##\n") - for o in self.options: - o.to_conf(f, width) - -def wiki_header(f, ident, toc): - f.write("\n".join(( - "{{{", - "#!comment", - "", - star78, - "THIS PAGE WAS GENERATED AUTOMATICALLY, DO NOT EDIT.", - "", - "Generated from " + ident, - " by $Id$", - star78, - "", - "}}}", - ""))) - if toc is not None: - f.write("[[TracNav(%s)]]\n" % toc) - f.write("[[PageOutline]]\n") - -def conf_header(f, ident): - f.write("\n".join(( - "# Automatically generated. Edit as needed, but be careful of overwriting.", - "#", - "# Generated from " + ident, - "# by $Id$", - ""))) - - -# http://stackoverflow.com/questions/9027028/argparse-argument-order - -class CustomAction(argparse.Action): - def __call__(self, parser, namespace, values, option_string = None): - if not "ordered_args" in namespace: - namespace.ordered_args = [] - namespace.ordered_args.append((self.dest, values)) - -class CustomFlagAction(argparse.Action): - def __init__(self, option_strings, dest, default = None, - required = False, help = None): - super(CustomFlagAction, self).__init__( - option_strings = option_strings, dest = dest, nargs = 0, - const = None, default = default, required = required, help = help) - def __call__(self, parser, namespace, values, option_string = None): - if not "ordered_args" in namespace: - namespace.ordered_args = [] - namespace.ordered_args.append((self.dest, None)) - - -class main(object): - - def __init__(self): - self.sections = [] - self.section_map = None - self.option_map = None - self.ident = None - self.toc = None - - parser = argparse.ArgumentParser(description = __doc__) - parser.add_argument("--read-xml", metavar = "FILE", action = CustomAction, - required = True, type = argparse.FileType("r"), - help = "XML input file defining sections and options") - parser.add_argument("--write-xml", metavar = "FILE", action = CustomAction, - help = "XML file to write") - parser.add_argument("--write-wiki", metavar = "FILE", action = CustomAction, - help = "TracWiki file to write") - parser.add_argument("--write-conf", metavar = "FILE", action = CustomAction, - help = "rpki.conf configuration file to write") - parser.add_argument("--set", metavar = "VARVAL", action = CustomAction, - help = "variable setting in form \"VAR=VAL\"") - parser.add_argument("--pwgen", metavar = "VAR", action = CustomAction, - help = "set variable to generated password") - parser.add_argument("--toc", metavar = "TRACNAV", action = CustomAction, - help = "set TOC value to use with TracNav plugin") - parser.add_argument("--autoconf", action = CustomFlagAction, - help = "configure [autoconf] section") - args = parser.parse_args() - - for cmd, arg in args.ordered_args: - getattr(self, "do_" + cmd)(arg) - - - def do_read_xml(self, arg): - self.option_map = None - root = ElementTree(file = arg).getroot() - self.ident = root.get("ident") - self.sections.extend(Section.from_xml(x) for x in root) - self.option_map = {} - self.section_map = {} - for section in self.sections: - if section.name in self.section_map: - sys.exit("Duplicate section %s" % section.name) - self.section_map[section.name] = section - for option in section.options: - name = (section.name, option.name) - if name in self.option_map: - sys.exit("Duplicate option %s::%s" % name) - self.option_map[name] = option - - - def do_set(self, arg): - try: - name, value = arg.split("=", 1) - section, option = name.split("::") - except ValueError: - sys.exit("Couldn't parse --set specification \"%s\"" % a) - name = (section, option) - if name not in self.option_map: - sys.exit("Couldn't find option %s::%s" % name) - self.option_map[name].value = value - - - def do_pwgen(self, arg): - try: - section, option = arg.split("::") - except ValueError: - sys.exit("Couldn't parse --pwgen specification \"%s\"" % a) - name = (section, option) - if name not in self.option_map: - sys.exit("Couldn't find option %s::%s" % name) - self.option_map[name].value = base64.urlsafe_b64encode(os.urandom(66)) - - - def do_autoconf(self, ignored): - try: - import rpki.autoconf - for option in self.section_map["autoconf"].options: - try: - option.value = getattr(rpki.autoconf, option.name) - except AttributeError: - pass - except ImportError: - sys.exit("rpki.autoconf module is not available") - except KeyError: - sys.exit("Couldn't find autoconf section") - - - def do_write_xml(self, arg): - x = Element("configuration", ident = self.ident) - x.extend(s.to_xml() for s in self.sections) - ElementTree(x).write(arg, pretty_print = True, encoding = "us-ascii") - - - def do_write_wiki(self, arg): - if "%" in arg: - for section in self.sections: - with open(arg % section.name, "w") as f: - wiki_header(f, self.ident, self.toc) - section.to_wiki(f) - else: - with open(arg, "w") as f: - for i, section in enumerate(self.sections): - if i == 0: - wiki_header(f, self.ident, self.toc) - else: - f.write("\f\n") - section.to_wiki(f) - - - def do_write_conf(self, arg): - with open(arg, "w") as f: - conf_header(f, self.ident) - width = max(s.width for s in self.sections) - for section in self.sections: - section.to_conf(f, width) - - - def do_toc(self, arg): - self.toc = arg - - -if __name__ == "__main__": - main() diff --git a/rpkid/rpki-confgen.xml b/rpkid/rpki-confgen.xml deleted file mode 100644 index e0ed273a..00000000 --- a/rpkid/rpki-confgen.xml +++ /dev/null @@ -1,900 +0,0 @@ - - - - -
      - - - The "`[myrpki]`" section contains all the parameters that you - really need to configure. The name "`myrpki`" is historical and - may change in the future. - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
      - -
      - - - rpkid's default config file is the system `rpki.conf` file. - Start rpkid with "`-c filename`" to choose a different config - file. All options are in the "`[rpkid]`" section. BPKI - Certificates and keys may be in either DER or PEM format. - - - - - - - - - - - - - - - - - - - - - - - - -
      - -
      - - - irdbd's default configuration file is the system `rpki.conf` - file. Start irdbd with "`-c filename`" to choose a different - configuration file. All options are in the "`[irdbd]`" section. - - - - Since irdbd is part of the back-end system, it has direct access to - the back-end's SQL database, and thus is able to pull its own BPKI - configuration directly from the database, and thus needs a bit less - configuration than the other daemons. - - - - - - - - - - - - - - -
      - -
      - - - pubd's default configuration file is the system `rpki.conf` - file. Start pubd with "`-c filename`" to choose a different - configuration file. All options are in the "`[pubd]`" section. - BPKI certificates and keys may be either DER or PEM format. - - - - - - - - - - - - - - - - - - - - - - -
      - -
      - - - You don't need to run rootd unless you're IANA, are certifying - private address space, or are an RIR which refuses to accept IANA as - the root of the public address hierarchy. - - - - Ok, if that wasn't enough to scare you off: rootd is a mess, and - needs to be rewritten, or, better, merged into rpkid. It - doesn't use the publication protocol, and it requires far too - many configuration parameters. - - - - rootd was originally intended to be a very simple program which - simplified rpkid enormously by moving one specific task (acting - as the root CA of an RPKI certificate hierarchy) out of rpkid. - As the specifications and code (mostly the latter) have evolved, - however, this task has become more complicated, and rootd would - have to become much more complicated to keep up. - - - - Don't run rootd unless you're sure that you need to do so. - - - - Still think you need to run rootd? OK, but remember, you have - been warned.... - - - - rootd's default configuration file is the system `rpki.conf` - file. Start rootd with "`-c filename`" to choose a different - configuration file. All options are in the "`[rootd]`" section. - Certificates and keys may be in either DER or PEM format. - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
      - -
      - - - Glue to allow the Django application to pull user configuration - from this file rather than directly editing settings.py. - - - - - - - - - - - - -
      - -
      - - - rpki-confgen --autoconf records the current autoconf settings - here, so that other options can refer to them. The section name - "autoconf" is magic, don't change it. - - - - - - - - - - -
      - -
      diff --git a/rpkid/rpki-sql-backup b/rpkid/rpki-sql-backup deleted file mode 100755 index 0b2d079d..00000000 --- a/rpkid/rpki-sql-backup +++ /dev/null @@ -1,53 +0,0 @@ -#!/usr/bin/env python - -# $Id$ -# -# Copyright (C) 2014 Dragon Research Labs ("DRL") -# Portions copyright (C) 2010-2013 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notices and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND DRL AND ISC DISCLAIM ALL -# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED -# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL OR -# ISC BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL -# DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA -# OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER -# TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -""" -Back up data from SQL databases, looking at config file to figure out -which databases and what credentials to use with them. -""" - -import subprocess -import os -import argparse -import sys -import time -import rpki.config - -os.environ["TZ"] = "UTC" -time.tzset() - -parser = argparse.ArgumentParser(description = __doc__) -parser.add_argument("-c", "--config", - help = "override default location of configuration file") -parser.add_argument("-o", "--output", - type = argparse.FileType("wb"), default = sys.stdout, - help = "destination for SQL dump (default: stdout)") -args = parser.parse_args() - -cfg = rpki.config.parser(args.config, "myrpki") - -for name in ("rpkid", "irdbd", "pubd"): - if cfg.getboolean("start_" + name, False): - subprocess.check_call( - ("mysqldump", "--add-drop-database", - "-u", cfg.get("sql-username", section = name), - "-p" + cfg.get("sql-password", section = name), - "-B", cfg.get("sql-database", section = name)), - stdout = args.output) diff --git a/rpkid/rpki-sql-setup b/rpkid/rpki-sql-setup deleted file mode 100755 index 40a78532..00000000 --- a/rpkid/rpki-sql-setup +++ /dev/null @@ -1,311 +0,0 @@ -#!/usr/bin/env python - -# $Id$ -# -# Copyright (C) 2014 Dragon Research Labs ("DRL") -# Portions copyright (C) 2009-2013 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notices and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND DRL AND ISC DISCLAIM ALL -# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED -# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL OR -# ISC BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL -# DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA -# OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER -# TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -import os -import sys -import glob -import getpass -import argparse -import datetime -import rpki.config -import rpki.version -import rpki.autoconf -import rpki.sql_schemas - -from rpki.mysql_import import MySQLdb, _mysql_exceptions - -ER_NO_SUCH_TABLE = 1146 # See mysqld_ername.h - - -class RootDB(object): - """ - Class to wrap MySQL actions that require root-equivalent access so - we can defer such actions until we're sure they're really needed. - Overall goal here is to prompt the user for the root password once - at most, and not at all when not necessary. - """ - - def __init__(self, mysql_defaults = None): - self.initialized = False - self.mysql_defaults = mysql_defaults - - def __getattr__(self, name): - if self.initialized: - raise AttributeError - if self.mysql_defaults is None: - self.db = MySQLdb.connect(db = "mysql", - user = "root", - passwd = getpass.getpass("Please enter your MySQL root password: ")) - else: - mysql_cfg = rpki.config.parser(self.mysql_defaults, "client") - self.db = MySQLdb.connect(db = "mysql", - user = mysql_cfg.get("user"), - passwd = mysql_cfg.get("password")) - self.cur = self.db.cursor() - self.cur.execute("SHOW DATABASES") - self.databases = set(d[0] for d in self.cur.fetchall()) - self.initialized = True - return getattr(self, name) - - def close(self): - if self.initialized: - self.db.close() - - -class UserDB(object): - """ - Class to wrap MySQL access parameters for a particular database. - - NB: The SQL definitions for the upgrade_version table is embedded in - this class rather than being declared in any of the .sql files. - This is deliberate: nothing but the upgrade system should ever touch - this table, and it's simpler to keep everything in one place. - - We have to be careful about SQL commits here, because CREATE TABLE - implies an automatic commit. So presence of the magic table per se - isn't significant, only its content (or lack thereof). - """ - - upgrade_version_table_schema = """ - CREATE TABLE upgrade_version ( - version TEXT NOT NULL, - updated DATETIME NOT NULL - ) ENGINE=InnoDB - """ - - def __init__(self, name): - self.name = name - self.database = cfg.get("sql-database", section = name) - self.username = cfg.get("sql-username", section = name) - self.password = cfg.get("sql-password", section = name) - self.db = None - self.cur = None - - def open(self): - self.db = MySQLdb.connect(db = self.database, user = self.username, passwd = self.password) - self.db.autocommit(False) - self.cur = self.db.cursor() - - def close(self): - if self.cur is not None: - self.cur.close() - self.cur = None - if self.db is not None: - self.db.commit() - self.db.close() - self.db = None - - @property - def exists_and_accessible(self): - try: - MySQLdb.connect(db = self.database, user = self.username, passwd = self.password).close() - except: - return False - else: - return True - - @property - def version(self): - try: - self.cur.execute("SELECT version FROM upgrade_version") - v = self.cur.fetchone() - return Version(None if v is None else v[0]) - except _mysql_exceptions.ProgrammingError, e: - if e.args[0] != ER_NO_SUCH_TABLE: - raise - log("Creating upgrade_version table in %s" % self.name) - self.cur.execute(self.upgrade_version_table_schema) - return Version(None) - - @version.setter - def version(self, v): - if v > self.version: - self.cur.execute("DELETE FROM upgrade_version") - self.cur.execute("INSERT upgrade_version (version, updated) VALUES (%s, %s)", (v, datetime.datetime.now())) - self.db.commit() - log("Updated %s to %s" % (self.name, v)) - - @property - def schema(self): - lines = [] - for line in getattr(rpki.sql_schemas, self.name, "").splitlines(): - line = " ".join(line.split()) - if line and not line.startswith("--"): - lines.append(line) - return [statement.strip() for statement in " ".join(lines).rstrip(";").split(";") if statement.strip()] - - -class Version(object): - """ - A version number. This is a class in its own right to force the - comparision and string I/O behavior we want. - """ - - def __init__(self, v): - if v is None: - v = "0.0" - self.v = tuple(v.lower().split(".")) - - def __str__(self): - return ".".join(self.v) - - def __cmp__(self, other): - return cmp(self.v, other.v) - - -class Upgrade(object): - """ - One upgrade script. Really, just its filename and the Version - object we parse from its filename, we don't need to read the script - itself except when applying it, but we do need to sort all the - available upgrade scripts into version order. - """ - - @classmethod - def load_all(cls, name, dir): - g = os.path.join(dir, "upgrade-%s-to-*.py" % name) - for fn in glob.iglob(g): - yield cls(g, fn) - - def __init__(self, g, fn): - head, sep, tail = g.partition("*") - self.fn = fn - self.version = Version(fn[len(head):-len(tail)]) - - def __cmp__(self, other): - return cmp(self.version, other.version) - - def apply(self, db): - # db is an argument here primarily so the script we exec can get at it - log("Applying %s to %s" % (self.fn, db.name)) - with open(self.fn, "r") as f: - exec f - - -def do_drop(name): - db = UserDB(name) - if db.database in root.databases: - log("DROP DATABASE %s" % db.database) - root.cur.execute("DROP DATABASE %s" % db.database) - root.db.commit() - -def do_create(name): - db = UserDB(name) - log("CREATE DATABASE %s" % db.database) - root.cur.execute("CREATE DATABASE %s" % db.database) - log("GRANT ALL ON %s.* TO %s@localhost IDENTIFIED BY ###" % (db.database, db.username)) - root.cur.execute("GRANT ALL ON %s.* TO %s@localhost IDENTIFIED BY %%s" % (db.database, db.username), - (db.password,)) - root.db.commit() - db.open() - for statement in db.schema: - if not statement.upper().startswith("DROP TABLE"): - log(statement) - db.cur.execute(statement) - db.version = current_version - db.close() - -def do_script_drop(name): - db = UserDB(name) - print "DROP DATABASE IF EXISTS %s;" % db.database - -def do_drop_and_create(name): - do_drop(name) - do_create(name) - -def do_fix_grants(name): - db = UserDB(name) - if not db.exists_and_accessible: - log("GRANT ALL ON %s.* TO %s@localhost IDENTIFIED BY ###" % (db.database, db.username)) - root.cur.execute("GRANT ALL ON %s.* TO %s@localhost IDENTIFIED BY %%s" % (db.database, db.username), - (db.password,)) - root.db.commit() - -def do_create_if_missing(name): - db = UserDB(name) - if not db.exists_and_accessible: - do_create(name) - -def do_apply_upgrades(name): - upgrades = sorted(Upgrade.load_all(name, args.upgrade_scripts)) - if upgrades: - db = UserDB(name) - db.open() - log("Current version of %s is %s" % (db.name, db.version)) - for upgrade in upgrades: - if upgrade.version > db.version: - upgrade.apply(db) - db.version = upgrade.version - db.version = current_version - db.close() - -def log(text): - if args.verbose: - print "#", text - -parser = argparse.ArgumentParser(description = """\ -Automated setup of all SQL stuff used by the RPKI CA tools. Pulls -configuration from rpki.conf, prompts for MySQL password when needed. -""") -group = parser.add_mutually_exclusive_group() -parser.add_argument("-c", "--config", - help = "specify alternate location for rpki.conf") -parser.add_argument("-v", "--verbose", action = "store_true", - help = "whistle while you work") -parser.add_argument("--mysql-defaults", - help = "specify MySQL root access credentials via a configuration file") -parser.add_argument("--upgrade-scripts", - default = os.path.join(rpki.autoconf.datarootdir, "rpki", "upgrade-scripts"), - help = "override default location of upgrade scripts") -group.add_argument("--create", - action = "store_const", dest = "dispatch", const = do_create, - help = "create databases and load schemas") -group.add_argument("--drop", - action = "store_const", dest = "dispatch", const = do_drop, - help = "drop databases") -group.add_argument("--script-drop", - action = "store_const", dest = "dispatch", const = do_script_drop, - help = "send SQL commands to drop databases to standard output") -group.add_argument("--drop-and-create", - action = "store_const", dest = "dispatch", const = do_drop_and_create, - help = "drop databases then recreate them and load schemas") -group.add_argument("--fix-grants", - action = "store_const", dest = "dispatch", const = do_fix_grants, - help = "whack database access to match current configuration file") -group.add_argument("--create-if-missing", - action = "store_const", dest = "dispatch", const = do_create_if_missing, - help = "create databases and load schemas if they don't exist already") -group.add_argument("--apply-upgrades", - action = "store_const", dest = "dispatch", const = do_apply_upgrades, - help = "apply upgrade scripts to existing databases") -parser.set_defaults(dispatch = do_create_if_missing) -args = parser.parse_args() - -try: - cfg = rpki.config.parser(args.config, "myrpki") - root = RootDB(args.mysql_defaults) - current_version = Version(rpki.version.VERSION) - for name in ("irdbd", "rpkid", "pubd"): - if cfg.getboolean("start_" + name, False): - args.dispatch(name) - root.close() -except Exception, e: - #sys.exit(str(e)) - raise diff --git a/rpkid/rpki-start-servers b/rpkid/rpki-start-servers deleted file mode 100755 index edaffb2e..00000000 --- a/rpkid/rpki-start-servers +++ /dev/null @@ -1,76 +0,0 @@ -#!/usr/bin/env python - -# $Id$ -# -# Copyright (C) 2014 Dragon Research Labs ("DRL") -# Portions copyright (C) 2009--2013 Internet Systems Consortium ("ISC") -# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notices and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND ARIN DISCLAIM ALL -# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED -# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL, -# ISC, OR ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR -# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS -# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, -# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION -# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -""" -Start servers, using config file to figure out which servers the user -wants started. -""" - -import subprocess -import os -import argparse -import sys -import time -import rpki.config -import rpki.autoconf - -os.environ["TZ"] = "UTC" -time.tzset() - -parser = argparse.ArgumentParser(description = __doc__) -parser.add_argument("-c", "--config", - help = "override default location of configuration file") -parser.add_argument("-d", "--debug", action = "store_true", - help = "enable debugging") -parser.add_argument("--logdir", default = ".", - help = "where to write write log files when debugging") -args = parser.parse_args() - -cfg = rpki.config.parser(args.config, "myrpki") - -def run(name): - # pylint: disable=E1103 - cmd = (os.path.join(rpki.autoconf.libexecdir, name), "-c", cfg.filename) - if args.debug: - proc = subprocess.Popen(cmd + ("-d",), - stdout = open(os.path.join(args.logdir, name + ".log"), "a"), - stderr = subprocess.STDOUT) - else: - proc = subprocess.Popen(cmd) - if args.debug and proc.poll() is None: - print "Started %s, pid %s" % (name, proc.pid) - elif not args.debug and proc.wait() == 0: - print "Started %s" % name - else: - print "Problem starting %s, pid %s" % (name, proc.pid) - - -if cfg.getboolean("start_irdbd", cfg.getboolean("run_rpkid", False)): - run("irdbd") - -if cfg.getboolean("start_rpkid", cfg.getboolean("run_rpkid", False)): - run("rpkid") - -if cfg.getboolean("start_pubd", cfg.getboolean("run_pubd", False)): - run("pubd") - -if cfg.getboolean("start_rootd", cfg.getboolean("run_rootd", False)): - run("rootd") diff --git a/rpkid/rpki/POW/__init__.py b/rpkid/rpki/POW/__init__.py deleted file mode 100644 index d3796245..00000000 --- a/rpkid/rpki/POW/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -from _POW import * - -# Set callback to let POW construct rpki.sundial.datetime objects - -from rpki.sundial import datetime as sundial_datetime -customDatetime(sundial_datetime) -del sundial_datetime diff --git a/rpkid/rpki/__init__.py b/rpkid/rpki/__init__.py deleted file mode 100644 index 9e090f63..00000000 --- a/rpkid/rpki/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# This file exists to tell Python that this the content of this -# directory constitute a Python package. diff --git a/rpkid/rpki/adns.py b/rpkid/rpki/adns.py deleted file mode 100644 index a6a900ed..00000000 --- a/rpkid/rpki/adns.py +++ /dev/null @@ -1,368 +0,0 @@ -# $Id$ -# -# Copyright (C) 2013--2014 Dragon Research Labs ("DRL") -# Portions copyright (C) 2010--2012 Internet Systems Consortium ("ISC") -# Portions copyright (C) 2003--2007, 2009, 2010 Nominum, Inc. ("NOMINUM") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notices and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND NOMINUM DISCLAIM ALL -# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED -# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL, -# ISC, OR NOMINUM BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR -# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS -# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, -# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION -# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -""" -Basic asynchronous DNS code, using asyncore and Bob Halley's excellent -dnspython package. -""" - -import asyncore -import socket -import time -import sys -import rpki.async -import rpki.sundial -import rpki.log - -try: - import dns.resolver, dns.rdatatype, dns.rdataclass, dns.name, dns.message - import dns.inet, dns.exception, dns.query, dns.rcode, dns.ipv4, dns.ipv6 -except ImportError: - if __name__ == "__main__": - sys.stderr.write("DNSPython not available, skipping rpki.adns unit test\n") - sys.exit(0) - else: - raise - -## @var resolver -# Resolver object, shared by everything using this module - -resolver = dns.resolver.Resolver() -if resolver.cache is None: - resolver.cache = dns.resolver.Cache() - -## @var nameservers -# Nameservers from resolver.nameservers converted to (af, address) -# pairs. The latter turns out to be a more useful form for us to use -# internally, because it simplifies the checks we need to make upon -# packet receiption. - -nameservers = [] - -for ns in resolver.nameservers: - try: - nameservers.append((socket.AF_INET, dns.ipv4.inet_aton(ns))) - continue - except Exception: - pass - try: - nameservers.append((socket.AF_INET6, dns.ipv6.inet_aton(ns))) - continue - except Exception: - pass - rpki.log.error("Couldn't parse nameserver address %r" % ns) - -class dispatcher(asyncore.dispatcher): - """ - Basic UDP socket reader for use with asyncore. - """ - - def __init__(self, cb, eb, af, bufsize = 65535): - asyncore.dispatcher.__init__(self) - self.cb = cb - self.eb = eb - self.af = af - self.bufsize = bufsize - self.create_socket(af, socket.SOCK_DGRAM) - - def handle_read(self): - """ - Receive a packet, hand it off to query class callback. - """ - wire, from_address = self.recvfrom(self.bufsize) - self.cb(self.af, from_address[0], from_address[1], wire) - - def handle_error(self): - """ - Pass errors to query class errback. - """ - self.eb(sys.exc_info()[1]) - - def handle_connect(self): - """ - Quietly ignore UDP "connection" events. - """ - pass - - def writable(self): - """ - We don't need to hear about UDP socket becoming writable. - """ - return False - - -class query(object): - """ - Simplified (no search paths) asynchronous adaptation of - dns.resolver.Resolver.query() (q.v.). - """ - - def __init__(self, cb, eb, qname, qtype = dns.rdatatype.A, qclass = dns.rdataclass.IN): - if isinstance(qname, (str, unicode)): - qname = dns.name.from_text(qname) - if isinstance(qtype, str): - qtype = dns.rdatatype.from_text(qtype) - if isinstance(qclass, str): - qclass = dns.rdataclass.from_text(qclass) - assert qname.is_absolute() - self.cb = cb - self.eb = eb - self.qname = qname - self.qtype = qtype - self.qclass = qclass - self.start = time.time() - rpki.async.event_defer(self.go) - - def go(self): - """ - Start running the query. Check our cache before doing network - query; if we find an answer there, just return it. Otherwise - start the network query. - """ - if resolver.cache: - answer = resolver.cache.get((self.qname, self.qtype, self.qclass)) - else: - answer = None - if answer: - self.cb(self, answer) - else: - self.timer = rpki.async.timer() - self.sockets = {} - self.request = dns.message.make_query(self.qname, self.qtype, self.qclass) - if resolver.keyname is not None: - self.request.use_tsig(resolver.keyring, resolver.keyname, resolver.keyalgorithm) - self.request.use_edns(resolver.edns, resolver.ednsflags, resolver.payload) - self.response = None - self.backoff = 0.10 - self.nameservers = nameservers[:] - self.loop1() - - def loop1(self): - """ - Outer loop. If we haven't got a response yet and still have - nameservers to check, start inner loop. Otherwise, we're done. - """ - self.timer.cancel() - if self.response is None and self.nameservers: - self.iterator = rpki.async.iterator(self.nameservers[:], self.loop2, self.done2) - else: - self.done1() - - def loop2(self, iterator, nameserver): - """ - Inner loop. Send query to next nameserver in our list, unless - we've hit the overall timeout for this query. - """ - self.timer.cancel() - try: - timeout = resolver._compute_timeout(self.start) - except dns.resolver.Timeout, e: - self.lose(e) - else: - af, addr = nameserver - if af not in self.sockets: - self.sockets[af] = dispatcher(self.socket_cb, self.socket_eb, af) - self.sockets[af].sendto(self.request.to_wire(), - (dns.inet.inet_ntop(af, addr), resolver.port)) - self.timer.set_handler(self.socket_timeout) - self.timer.set_errback(self.socket_eb) - self.timer.set(rpki.sundial.timedelta(seconds = timeout)) - - def socket_timeout(self): - """ - No answer from nameserver, move on to next one (inner loop). - """ - self.response = None - self.iterator() - - def socket_eb(self, e): - """ - UDP socket signaled error. If it really is some kind of socket - error, handle as if we've timed out on this nameserver; otherwise, - pass error back to caller. - """ - self.timer.cancel() - if isinstance(e, socket.error): - self.response = None - self.iterator() - else: - self.lose(e) - - def socket_cb(self, af, from_host, from_port, wire): - """ - Received a packet that might be a DNS message. If it doesn't look - like it came from one of our nameservers, just drop it and leave - the timer running. Otherwise, try parsing it: if it's an answer, - we're done, otherwise handle error appropriately and move on to - next nameserver. - """ - sender = (af, dns.inet.inet_pton(af, from_host)) - if from_port != resolver.port or sender not in self.nameservers: - return - self.timer.cancel() - try: - self.response = dns.message.from_wire(wire, keyring = self.request.keyring, request_mac = self.request.mac, one_rr_per_rrset = False) - except dns.exception.FormError: - self.nameservers.remove(sender) - else: - rcode = self.response.rcode() - if rcode in (dns.rcode.NOERROR, dns.rcode.NXDOMAIN): - self.done1() - return - if rcode != dns.rcode.SERVFAIL: - self.nameservers.remove(sender) - self.response = None - self.iterator() - - def done2(self): - """ - Done with inner loop. If we still haven't got an answer and - haven't (yet?) eliminated all of our nameservers, wait a little - while before starting the cycle again, unless we've hit the - timeout threshold for the whole query. - """ - if self.response is None and self.nameservers: - try: - delay = rpki.sundial.timedelta(seconds = min(resolver._compute_timeout(self.start), self.backoff)) - self.backoff *= 2 - self.timer.set_handler(self.loop1) - self.timer.set_errback(self.lose) - self.timer.set(delay) - except dns.resolver.Timeout, e: - self.lose(e) - else: - self.loop1() - - def cleanup(self): - """ - Shut down our timer and sockets. - """ - self.timer.cancel() - for s in self.sockets.itervalues(): - s.close() - - def lose(self, e): - """ - Something bad happened. Clean up, then pass error back to caller. - """ - self.cleanup() - self.eb(self, e) - - def done1(self): - """ - Done with outer loop. If we got a useful answer, cache it, then - pass it back to caller; if we got an error, pass the appropriate - exception back to caller. - """ - self.cleanup() - try: - if not self.nameservers: - raise dns.resolver.NoNameservers - if self.response.rcode() == dns.rcode.NXDOMAIN: - raise dns.resolver.NXDOMAIN - answer = dns.resolver.Answer(self.qname, self.qtype, self.qclass, self.response) - if resolver.cache: - resolver.cache.put((self.qname, self.qtype, self.qclass), answer) - self.cb(self, answer) - except (rpki.async.ExitNow, SystemExit): - raise - except Exception, e: - self.lose(e) - -class getaddrinfo(object): - - typemap = { dns.rdatatype.A : socket.AF_INET, - dns.rdatatype.AAAA : socket.AF_INET6 } - - def __init__(self, cb, eb, host, address_families = typemap.values()): - self.cb = cb - self.eb = eb - self.host = host - self.result = [] - self.queries = [query(self.done, self.lose, host, qtype) - for qtype in self.typemap - if self.typemap[qtype] in address_families] - - def done(self, q, answer): - if answer is not None: - for a in answer: - self.result.append((self.typemap[a.rdtype], a.address)) - self.queries.remove(q) - if not self.queries: - self.cb(self.result) - - def lose(self, q, e): - if isinstance(e, dns.resolver.NoAnswer): - self.done(q, None) - else: - for q in self.queries: - q.cleanup() - self.eb(e) - -if __name__ == "__main__": - - rpki.log.init("test-adns", use_syslog = False) - print "Some adns tests may take a minute or two, please be patient" - - class test_getaddrinfo(object): - - def __init__(self, qname): - self.qname = qname - getaddrinfo(self.done, self.lose, qname) - - def done(self, result): - print "getaddrinfo(%s) returned: %s" % ( - self.qname, - ", ".join(str(r) for r in result)) - - def lose(self, e): - print "getaddrinfo(%s) failed: %r" % (self.qname, e) - - class test_query(object): - - def __init__(self, qname, qtype = dns.rdatatype.A, qclass = dns.rdataclass.IN): - self.qname = qname - self.qtype = qtype - self.qclass = qclass - query(self.done, self.lose, qname, qtype = qtype, qclass = qclass) - - def done(self, q, result): - print "query(%s, %s, %s) returned: %s" % ( - self.qname, - dns.rdatatype.to_text(self.qtype), - dns.rdataclass.to_text(self.qclass), - ", ".join(str(r) for r in result)) - - def lose(self, q, e): - print "getaddrinfo(%s, %s, %s) failed: %r" % ( - self.qname, - dns.rdatatype.to_text(self.qtype), - dns.rdataclass.to_text(self.qclass), - e) - - if True: - for t in (dns.rdatatype.A, dns.rdatatype.AAAA, dns.rdatatype.HINFO): - test_query("subvert-rpki.hactrn.net", t) - test_query("nonexistant.rpki.net") - test_query("subvert-rpki.hactrn.net", qclass = dns.rdataclass.CH) - - for h in ("subvert-rpki.hactrn.net", "nonexistant.rpki.net"): - test_getaddrinfo(h) - - rpki.async.event_loop() diff --git a/rpkid/rpki/async.py b/rpkid/rpki/async.py deleted file mode 100644 index 49f98841..00000000 --- a/rpkid/rpki/async.py +++ /dev/null @@ -1,420 +0,0 @@ -# $Id$ -# -# Copyright (C) 2009--2012 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -""" -Utilities for event-driven programming. -""" - -import asyncore -import signal -import traceback -import gc -import sys -import rpki.log -import rpki.sundial - -ExitNow = asyncore.ExitNow - -class iterator(object): - """ - Iteration construct for event-driven code. Takes three - arguments: - - - Some kind of iterable object - - - A callback to call on each item in the iteration - - - A callback to call after the iteration terminates. - - The item callback receives two arguments: the callable iterator - object and the current value of the iteration. It should call the - iterator (or arrange for the iterator to be called) when it is time - to continue to the next item in the iteration. - - The termination callback receives no arguments. - - Special case for memory constrained cases: if keyword argument - pop_list is True, iterable must be a list, which is modified in - place, popping items off of it until it's empty. - """ - - def __init__(self, iterable, item_callback, done_callback, unwind_stack = True, pop_list = False): - assert not pop_list or isinstance(iterable, list), "iterable must be a list when using pop_list" - self.item_callback = item_callback - self.done_callback = done_callback if done_callback is not None else lambda: None - self.caller_file, self.caller_line, self.caller_function = traceback.extract_stack(limit = 2)[0][0:3] - self.unwind_stack = unwind_stack - self.pop_list = pop_list - try: - if self.pop_list: - self.iterator = iterable - else: - self.iterator = iter(iterable) - except (ExitNow, SystemExit): - raise - except Exception: - rpki.log.debug("Problem constructing iterator for %r" % (iterable,)) - raise - self.doit() - - def __repr__(self): - return rpki.log.log_repr(self, - "created at %s:%s" % (self.caller_file, - self.caller_line), - self.caller_function) - - def __call__(self): - if self.unwind_stack: - event_defer(self.doit) - else: - self.doit() - - def doit(self): - """ - Implement the iterator protocol: attempt to call the item handler - with the next iteration value, call the termination handler if the - iterator signaled StopIteration. - """ - - try: - if self.pop_list: - val = self.iterator.pop(0) - else: - val = self.iterator.next() - except (IndexError, StopIteration): - self.done_callback() - else: - self.item_callback(self, val) - -## @var timer_queue -# Timer queue. - -timer_queue = [] - -class timer(object): - """ - Timer construct for event-driven code. - """ - - ## @var gc_debug - # Verbose chatter about timers states and garbage collection. - gc_debug = False - - ## @var run_debug - # Verbose chatter about timers being run. - run_debug = False - - def __init__(self, handler = None, errback = None): - self.set_handler(handler) - self.set_errback(errback) - self.when = None - if self.gc_debug: - self.trace("Creating %r" % self) - - def trace(self, msg): - """ - Debug logging. - """ - if self.gc_debug: - bt = traceback.extract_stack(limit = 3) - rpki.log.debug("%s from %s:%d" % (msg, bt[0][0], bt[0][1])) - - def set(self, when): - """ - Set a timer. Argument can be a datetime, to specify an absolute - time, or a timedelta, to specify an offset time. - """ - if self.gc_debug: - self.trace("Setting %r to %r" % (self, when)) - if isinstance(when, rpki.sundial.timedelta): - self.when = rpki.sundial.now() + when - else: - self.when = when - assert isinstance(self.when, rpki.sundial.datetime), "%r: Expecting a datetime, got %r" % (self, self.when) - if self not in timer_queue: - timer_queue.append(self) - timer_queue.sort(key = lambda x: x.when) - - def __cmp__(self, other): - return cmp(id(self), id(other)) - - if gc_debug: - def __del__(self): - rpki.log.debug("Deleting %r" % self) - - def cancel(self): - """ - Cancel a timer, if it was set. - """ - if self.gc_debug: - self.trace("Canceling %r" % self) - try: - while True: - timer_queue.remove(self) - except ValueError: - pass - - def is_set(self): - """ - Test whether this timer is currently set. - """ - return self in timer_queue - - def set_handler(self, handler): - """ - Set timer's expiration handler. This is an alternative to - subclassing the timer class, and may be easier to use when - integrating timers into other classes (eg, the handler can be a - bound method to an object in a class representing a network - connection). - """ - self.handler = handler - - def set_errback(self, errback): - """ - Set a timer's errback. Like set_handler(), for errbacks. - """ - self.errback = errback - - @classmethod - def runq(cls): - """ - Run the timer queue: for each timer whose call time has passed, - pull the timer off the queue and call its handler() method. - - Comparisions are made against time at which this function was - called, so that even if new events keep getting scheduled, we'll - return to the I/O loop reasonably quickly. - """ - now = rpki.sundial.now() - while timer_queue and now >= timer_queue[0].when: - t = timer_queue.pop(0) - if cls.run_debug: - rpki.log.debug("Running %r" % t) - try: - if t.handler is not None: - t.handler() - else: - rpki.log.warn("Timer %r expired with no handler set" % t) - except (ExitNow, SystemExit): - raise - except Exception, e: - if t.errback is not None: - t.errback(e) - else: - rpki.log.error("Unhandled exception from timer %r: %s" % (t, e)) - rpki.log.traceback() - - def __repr__(self): - return rpki.log.log_repr(self, self.when, repr(self.handler)) - - @classmethod - def seconds_until_wakeup(cls): - """ - Calculate delay until next timer expires, or None if no timers are - set and we should wait indefinitely. Rounds up to avoid spinning - in select() or poll(). We could calculate fractional seconds in - the right units instead, but select() and poll() don't even take - the same units (argh!), and we're not doing anything that - hair-triggered, so rounding up is simplest. - """ - if not timer_queue: - return None - now = rpki.sundial.now() - if now >= timer_queue[0].when: - return 0 - delay = timer_queue[0].when - now - seconds = delay.convert_to_seconds() - if delay.microseconds: - seconds += 1 - return seconds - - @classmethod - def clear(cls): - """ - Cancel every timer on the queue. We could just throw away the - queue content, but this way we can notify subclasses that provide - their own cancel() method. - """ - while timer_queue: - timer_queue.pop(0).cancel() - -def _raiseExitNow(signum, frame): - """ - Signal handler for event_loop(). - """ - raise ExitNow - -def exit_event_loop(): - """ - Force exit from event_loop(). - """ - raise ExitNow - -def event_defer(handler, delay = rpki.sundial.timedelta(seconds = 0)): - """ - Use a near-term (default: zero interval) timer to schedule an event - to run after letting the I/O system have a turn. - """ - timer(handler).set(delay) - -## @var debug_event_timing -# Enable insanely verbose logging of event timing - -debug_event_timing = False - -def event_loop(catch_signals = (signal.SIGINT, signal.SIGTERM)): - """ - Replacement for asyncore.loop(), adding timer and signal support. - """ - old_signal_handlers = {} - while True: - save_sigs = len(old_signal_handlers) == 0 - try: - for sig in catch_signals: - old = signal.signal(sig, _raiseExitNow) - if save_sigs: - old_signal_handlers[sig] = old - while asyncore.socket_map or timer_queue: - t = timer.seconds_until_wakeup() - if debug_event_timing: - rpki.log.debug("Dismissing to asyncore.poll(), t = %s, q = %r" % (t, timer_queue)) - asyncore.poll(t, asyncore.socket_map) - timer.runq() - if timer.gc_debug: - gc.collect() - if gc.garbage: - for i in gc.garbage: - rpki.log.debug("GC-cycle %r" % i) - del gc.garbage[:] - except ExitNow: - break - except SystemExit: - raise - except ValueError, e: - if str(e) == "filedescriptor out of range in select()": - rpki.log.error("Something is badly wrong, select() thinks we gave it a bad file descriptor.") - rpki.log.error("Content of asyncore.socket_map:") - for fd in sorted(asyncore.socket_map.iterkeys()): - rpki.log.error(" fd %s obj %r" % (fd, asyncore.socket_map[fd])) - rpki.log.error("Not safe to continue due to risk of spin loop on select(). Exiting.") - sys.exit(1) - rpki.log.error("event_loop() exited with exception %r, this is not supposed to happen, restarting" % e) - except Exception, e: - rpki.log.error("event_loop() exited with exception %r, this is not supposed to happen, restarting" % e) - else: - break - finally: - for sig in old_signal_handlers: - signal.signal(sig, old_signal_handlers[sig]) - -class sync_wrapper(object): - """ - Synchronous wrapper around asynchronous functions. Running in - asynchronous mode at all times makes sense for event-driven daemons, - but is kind of tedious for simple scripts, hence this wrapper. - - The wrapped function should take at least two arguments: a callback - function and an errback function. If any arguments are passed to - the wrapper, they will be passed as additional arguments to the - wrapped function. - """ - - res = None - err = None - - def __init__(self, func): - self.func = func - - def cb(self, res = None): - """ - Wrapped code has requested normal termination. Store result, and - exit the event loop. - """ - self.res = res - raise ExitNow - - def eb(self, err): - """ - Wrapped code raised an exception. Store exception data, then exit - the event loop. - """ - exc_info = sys.exc_info() - self.err = exc_info if exc_info[1] is err else err - raise ExitNow - - def __call__(self, *args, **kwargs): - - def thunk(): - try: - self.func(self.cb, self.eb, *args, **kwargs) - except ExitNow: - raise - except Exception, e: - self.eb(e) - - event_defer(thunk) - event_loop() - if self.err is None: - return self.res - elif isinstance(self.err, tuple): - raise self.err[0], self.err[1], self.err[2] - else: - raise self.err - -class gc_summary(object): - """ - Periodic summary of GC state, for tracking down memory bloat. - """ - - def __init__(self, interval, threshold = 0): - if isinstance(interval, (int, long)): - interval = rpki.sundial.timedelta(seconds = interval) - self.interval = interval - self.threshold = threshold - self.timer = timer(handler = self.handler) - self.timer.set(self.interval) - - def handler(self): - """ - Collect and log GC state for this period, reset timer. - """ - rpki.log.debug("gc_summary: Running gc.collect()") - gc.collect() - rpki.log.debug("gc_summary: Summarizing (threshold %d)" % self.threshold) - total = {} - tuples = {} - for g in gc.get_objects(): - k = type(g).__name__ - total[k] = total.get(k, 0) + 1 - if isinstance(g, tuple): - k = ", ".join(type(x).__name__ for x in g) - tuples[k] = tuples.get(k, 0) + 1 - rpki.log.debug("gc_summary: Sorting result") - total = total.items() - total.sort(reverse = True, key = lambda x: x[1]) - tuples = tuples.items() - tuples.sort(reverse = True, key = lambda x: x[1]) - rpki.log.debug("gc_summary: Object type counts in descending order") - for name, count in total: - if count > self.threshold: - rpki.log.debug("gc_summary: %8d %s" % (count, name)) - rpki.log.debug("gc_summary: Tuple content type signature counts in descending order") - for types, count in tuples: - if count > self.threshold: - rpki.log.debug("gc_summary: %8d (%s)" % (count, types)) - rpki.log.debug("gc_summary: Scheduling next cycle") - self.timer.set(self.interval) diff --git a/rpkid/rpki/cli.py b/rpkid/rpki/cli.py deleted file mode 100644 index 1930f2b7..00000000 --- a/rpkid/rpki/cli.py +++ /dev/null @@ -1,277 +0,0 @@ -# $Id$ -# -# Copyright (C) 2013--2014 Dragon Research Labs ("DRL") -# Portions copyright (C) 2010--2012 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notices and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND DRL AND ISC DISCLAIM ALL -# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED -# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL OR -# ISC BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL -# DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA -# OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER -# TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -""" -Utilities for writing command line tools. -""" - -import cmd -import glob -import shlex -import os.path -import argparse -import traceback - -try: - import readline - have_readline = True -except ImportError: - have_readline = False - -class BadCommandSyntax(Exception): - "Bad command line syntax." - -class ExitArgparse(Exception): - "Exit method from ArgumentParser." - - def __init__(self, message = None, status = 0): - self.message = message - self.status = status - -class Cmd(cmd.Cmd): - """ - Customized subclass of Python cmd module. - """ - - emptyline_repeats_last_command = False - - EOF_exits_command_loop = True - - identchars = cmd.IDENTCHARS + "/-." - - histfile = None - - last_command_failed = False - - def __init__(self, argv = None): - cmd.Cmd.__init__(self) - if argv: - self.onecmd(" ".join(argv)) - else: - self.cmdloop_with_history() - - def onecmd(self, line): - """ - Wrap error handling around cmd.Cmd.onecmd(). Might want to do - something kinder than showing a traceback, eventually. - """ - - self.last_command_failed = False - try: - return cmd.Cmd.onecmd(self, line) - except SystemExit: - raise - except ExitArgparse, e: - if e.message is not None: - print e.message - self.last_command_failed = e.status != 0 - return False - except BadCommandSyntax, e: - print e - except Exception: - traceback.print_exc() - self.last_command_failed = True - return False - - def do_EOF(self, arg): - if self.EOF_exits_command_loop and self.prompt: - print - return self.EOF_exits_command_loop - - def do_exit(self, arg): - """ - Exit program. - """ - - return True - - do_quit = do_exit - - def emptyline(self): - """ - Handle an empty line. cmd module default is to repeat the last - command, which I find to be violation of the principal of least - astonishment, so my preference is that an empty line does nothing. - """ - - if self.emptyline_repeats_last_command: - cmd.Cmd.emptyline(self) - - def filename_complete(self, text, line, begidx, endidx): - """ - Filename completion handler, with hack to restore what I consider - the normal (bash-like) behavior when one hits the completion key - and there's only one match. - """ - - result = glob.glob(text + "*") - if len(result) == 1: - path = result.pop() - if os.path.isdir(path) or (os.path.islink(path) and os.path.isdir(os.path.join(path, "."))): - result.append(path + os.path.sep) - else: - result.append(path + " ") - return result - - def completenames(self, text, *ignored): - """ - Command name completion handler, with hack to restore what I - consider the normal (bash-like) behavior when one hits the - completion key and there's only one match. - """ - - result = cmd.Cmd.completenames(self, text, *ignored) - if len(result) == 1: - result[0] += " " - return result - - def help_help(self): - """ - Type "help [topic]" for help on a command, - or just "help" for a list of commands. - """ - - self.stdout.write(self.help_help.__doc__ + "\n") - - def complete_help(self, *args): - """ - Better completion function for help command arguments. - """ - - text = args[0] - names = self.get_names() - result = [] - for prefix in ("do_", "help_"): - result.extend(s[len(prefix):] for s in names if s.startswith(prefix + text) and s != "do_EOF") - return result - - if have_readline: - - def cmdloop_with_history(self): - """ - Better command loop, with history file and tweaked readline - completion delimiters. - """ - - old_completer_delims = readline.get_completer_delims() - if self.histfile is not None: - try: - readline.read_history_file(self.histfile) - except IOError: - pass - try: - readline.set_completer_delims("".join(set(old_completer_delims) - set(self.identchars))) - self.cmdloop() - finally: - if self.histfile is not None and readline.get_current_history_length(): - readline.write_history_file(self.histfile) - readline.set_completer_delims(old_completer_delims) - - else: - - cmdloop_with_history = cmd.Cmd.cmdloop - - - -def yes_or_no(prompt, default = None, require_full_word = False): - """ - Ask a yes-or-no question. - """ - - prompt = prompt.rstrip() + _yes_or_no_prompts[default] - while True: - answer = raw_input(prompt).strip().lower() - if not answer and default is not None: - return default - if answer == "yes" or (not require_full_word and answer.startswith("y")): - return True - if answer == "no" or (not require_full_word and answer.startswith("n")): - return False - print 'Please answer "yes" or "no"' - -_yes_or_no_prompts = { - True : ' ("yes" or "no" ["yes"]) ', - False : ' ("yes" or "no" ["no"]) ', - None : ' ("yes" or "no") ' } - - -class NonExitingArgumentParser(argparse.ArgumentParser): - """ - ArgumentParser tweaked to throw ExitArgparse exception - rather than using sys.exit(), for use with command loop. - """ - - def exit(self, status = 0, message = None): - raise ExitArgparse(status = status, message = message) - - -def parsecmd(subparsers, *arg_clauses): - """ - Decorator to combine the argparse and cmd modules. - - subparsers is an instance of argparse.ArgumentParser (or subclass) which was - returned by calling the .add_subparsers() method on an ArgumentParser instance - intended to handle parsing for the entire program on the command line. - - arg_clauses is a series of defarg() invocations defining arguments to be parsed - by the argparse code. - - The decorator will use arg_clauses to construct two separate argparse parser - instances: one will be attached to the global parser as a subparser, the - other will be used to parse arguments for this command when invoked by cmd. - - The decorator will replace the original do_whatever method with a wrapped version - which uses the local argparse instance to parse the single string supplied by - the cmd module. - - The intent is that, from the command's point of view, all of this should work - pretty much the same way regardless of whether the command was invoked from - the global command line or from within the cmd command loop. Either way, - the command method should get an argparse.Namespace object. - - In theory, we could generate a completion handler from the argparse definitions, - much as the separate argcomplete package does. In practice this is a lot of - work and I'm not ready to get into that just yet. - """ - - def decorate(func): - assert func.__name__.startswith("do_") - parser = NonExitingArgumentParser(description = func.__doc__, - prog = func.__name__[3:], - add_help = False) - subparser = subparsers.add_parser(func.__name__[3:], - description = func.__doc__, - help = func.__doc__.lstrip().partition("\n")[0]) - for positional, keywords in arg_clauses: - parser.add_argument(*positional, **keywords) - subparser.add_argument(*positional, **keywords) - subparser.set_defaults(func = func) - def wrapped(self, arg): - return func(self, parser.parse_args(shlex.split(arg))) - wrapped.argparser = parser - wrapped.__doc__ = func.__doc__ - return wrapped - return decorate - -def cmdarg(*positional, **keywords): - """ - Syntactic sugar to let us use keyword arguments normally when constructing - arguments for deferred calls to argparse.ArgumentParser.add_argument(). - """ - - return positional, keywords diff --git a/rpkid/rpki/config.py b/rpkid/rpki/config.py deleted file mode 100644 index 9f26664e..00000000 --- a/rpkid/rpki/config.py +++ /dev/null @@ -1,301 +0,0 @@ -# $Id$ -# -# Copyright (C) 2013--2014 Dragon Research Labs ("DRL") -# Portions copyright (C) 2009--2012 Internet Systems Consortium ("ISC") -# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notices and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND ARIN DISCLAIM ALL -# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED -# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL, -# ISC, OR ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR -# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS -# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, -# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION -# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -""" -Configuration file parsing utilities, layered on top of stock Python -ConfigParser module. -""" - -import ConfigParser -import os -import re - -## @var default_filename -# Default name of config file if caller doesn't specify one explictly. - -default_filename = "rpki.conf" - -## @var default_dirname -# Default name of directory to check for global config file, or None -# if no global config file. Autoconf-generated code may set this to a -# non-None value during script startup. - -try: - import rpki.autoconf - default_dirname = rpki.autoconf.sysconfdir -except ImportError: - default_dirname = None - -## @var default_envname -# Name of environment variable containing config file name. - -default_envname = "RPKI_CONF" - -class parser(object): - """ - Extensions to stock Python ConfigParser: - - Read config file and set default section while initializing parser object. - - Support for OpenSSL-style subscripted options and a limited form of - OpenSSL-style indirect variable references (${section::option}). - - get-methods with default values and default section name. - - If no filename is given to the constructor (filename = None), we - check for an environment variable naming the config file, then we - check for a default filename in the current directory, then finally - we check for a global config file if autoconf provided a directory - name to check. - """ - - def __init__(self, filename = None, section = None, allow_missing = False): - - self.cfg = ConfigParser.RawConfigParser() - self.default_section = section - - filenames = [] - if filename is not None: - filenames.append(filename) - else: - if default_envname in os.environ: - filenames.append(os.environ[default_envname]) - filenames.append(default_filename) - if default_dirname is not None: - filenames.append("%s/%s" % (default_dirname, default_filename)) - - f = fn = None - - for fn in filenames: - try: - f = open(fn) - break - except IOError: - f = None - - if f is not None: - self.filename = fn - self.cfg.readfp(f, fn) - elif allow_missing: - self.filename = None - else: - raise - - def has_section(self, section): - """ - Test whether a section exists. - """ - - return self.cfg.has_section(section) - - def has_option(self, option, section = None): - """ - Test whether an option exists. - """ - - if section is None: - section = self.default_section - return self.cfg.has_option(section, option) - - def multiget(self, option, section = None): - """ - Parse OpenSSL-style foo.0, foo.1, ... subscripted options. - - Returns a list of values matching the specified option name. - """ - - matches = [] - if section is None: - section = self.default_section - if self.cfg.has_option(section, option): - matches.append((-1, self.get(option, section = section))) - for key in self.cfg.options(section): - s = key.rsplit(".", 1) - if len(s) == 2 and s[0] == option and s[1].isdigit(): - matches.append((int(s[1]), self.get(option, section = section))) - matches.sort() - return [match[1] for match in matches] - - _regexp = re.compile("\\${(.*?)::(.*?)}") - - def _repl(self, m): - """ - Replacement function for indirect variable substitution. - This is intended for use with re.subn(). - """ - section, option = m.group(1, 2) - if section == "ENV": - return os.getenv(option, "") - else: - return self.cfg.get(section, option) - - def get(self, option, default = None, section = None): - """ - Get an option, perhaps with a default value. - """ - if section is None: - section = self.default_section - if default is not None and not self.cfg.has_option(section, option): - return default - val = self.cfg.get(section, option) - while True: - val, modified = self._regexp.subn(self._repl, val, 1) - if not modified: - return val - - def getboolean(self, option, default = None, section = None): - """ - Get a boolean option, perhaps with a default value. - """ - v = self.get(option, default, section) - if isinstance(v, str): - v = v.lower() - if v not in self.cfg._boolean_states: - raise ValueError, "Not a boolean: %s" % v - v = self.cfg._boolean_states[v] - return v - - def getint(self, option, default = None, section = None): - """ - Get an integer option, perhaps with a default value. - """ - return int(self.get(option, default, section)) - - def getlong(self, option, default = None, section = None): - """ - Get a long integer option, perhaps with a default value. - """ - return long(self.get(option, default, section)) - - def set_global_flags(self): - """ - Consolidated control for all the little global control flags - scattered through the libraries. This isn't a particularly good - place for this function to live, but it has to live somewhere and - making it a method of the config parser from which it gets all of - its data is less silly than the available alternatives. - """ - - import rpki.http - import rpki.x509 - import rpki.sql - import rpki.async - import rpki.log - import rpki.daemonize - - try: - rpki.http.debug_http = self.getboolean("debug_http") - except ConfigParser.NoOptionError: - pass - - try: - rpki.http.want_persistent_client = self.getboolean("want_persistent_client") - except ConfigParser.NoOptionError: - pass - - try: - rpki.http.want_persistent_server = self.getboolean("want_persistent_server") - except ConfigParser.NoOptionError: - pass - - try: - rpki.http.use_adns = self.getboolean("use_adns") - except ConfigParser.NoOptionError: - pass - - try: - rpki.http.enable_ipv6_clients = self.getboolean("enable_ipv6_clients") - except ConfigParser.NoOptionError: - pass - - try: - rpki.http.enable_ipv6_servers = self.getboolean("enable_ipv6_servers") - except ConfigParser.NoOptionError: - pass - - try: - rpki.x509.CMS_object.debug_cms_certs = self.getboolean("debug_cms_certs") - except ConfigParser.NoOptionError: - pass - - try: - rpki.sql.sql_persistent.sql_debug = self.getboolean("sql_debug") - except ConfigParser.NoOptionError: - pass - - try: - rpki.async.timer.gc_debug = self.getboolean("gc_debug") - except ConfigParser.NoOptionError: - pass - - try: - rpki.async.timer.run_debug = self.getboolean("timer_debug") - except ConfigParser.NoOptionError: - pass - - try: - rpki.x509.XML_CMS_object.dump_outbound_cms = rpki.x509.DeadDrop(self.get("dump_outbound_cms")) - except OSError, e: - rpki.log.warn("Couldn't initialize mailbox %s: %s" % (self.get("dump_outbound_cms"), e)) - except ConfigParser.NoOptionError: - pass - - try: - rpki.x509.XML_CMS_object.dump_inbound_cms = rpki.x509.DeadDrop(self.get("dump_inbound_cms")) - except OSError, e: - rpki.log.warn("Couldn't initialize mailbox %s: %s" % (self.get("dump_inbound_cms"), e)) - except ConfigParser.NoOptionError: - pass - - try: - rpki.x509.XML_CMS_object.check_inbound_schema = self.getboolean("check_inbound_schema") - except ConfigParser.NoOptionError: - pass - - try: - rpki.x509.XML_CMS_object.check_outbound_schema = self.getboolean("check_outbound_schema") - except ConfigParser.NoOptionError: - pass - - try: - rpki.async.gc_summary(self.getint("gc_summary"), self.getint("gc_summary_threshold", 0)) - except ConfigParser.NoOptionError: - pass - - try: - rpki.log.enable_tracebacks = self.getboolean("enable_tracebacks") - except ConfigParser.NoOptionError: - pass - - try: - rpki.daemonize.default_pid_directory = self.get("pid_directory") - except ConfigParser.NoOptionError: - pass - - try: - rpki.daemonize.pid_filename = self.get("pid_filename") - except ConfigParser.NoOptionError: - pass - - try: - rpki.x509.generate_insecure_debug_only_rsa_key = rpki.x509.insecure_debug_only_rsa_key_generator(*self.get("insecure-debug-only-rsa-key-db").split()) - except ConfigParser.NoOptionError: - pass - except: - rpki.log.warn("insecure-debug-only-rsa-key-db configured but initialization failed, check for corrupted database file") diff --git a/rpkid/rpki/csv_utils.py b/rpkid/rpki/csv_utils.py deleted file mode 100644 index 47caabdb..00000000 --- a/rpkid/rpki/csv_utils.py +++ /dev/null @@ -1,112 +0,0 @@ -# $Id$ -# -# Copyright (C) 2009--2012 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -""" -CSV utilities, moved here from myrpki.py. -""" - -import csv -import os - -class BadCSVSyntax(Exception): - """ - Bad CSV syntax. - """ - -class csv_reader(object): - """ - Reader for tab-delimited text that's (slightly) friendlier than the - stock Python csv module (which isn't intended for direct use by - humans anyway, and neither was this package originally, but that - seems to be the way that it has evolved...). - - Columns parameter specifies how many columns users of the reader - expect to see; lines with fewer columns will be padded with None - values. - - Original API design for this class courtesy of Warren Kumari, but - don't blame him if you don't like what I did with his ideas. - """ - - def __init__(self, filename, columns = None, min_columns = None, comment_characters = "#;"): - assert columns is None or isinstance(columns, int) - assert min_columns is None or isinstance(min_columns, int) - if columns is not None and min_columns is None: - min_columns = columns - self.filename = filename - self.columns = columns - self.min_columns = min_columns - self.comment_characters = comment_characters - self.file = open(filename, "r") - - def __iter__(self): - line_number = 0 - for line in self.file: - line_number += 1 - line = line.strip() - if not line or line[0] in self.comment_characters: - continue - fields = line.split() - if self.min_columns is not None and len(fields) < self.min_columns: - raise BadCSVSyntax, "%s:%d: Not enough columns in line %r" % (self.filename, line_number, line) - if self.columns is not None and len(fields) > self.columns: - raise BadCSVSyntax, "%s:%d: Too many columns in line %r" % (self.filename, line_number, line) - if self.columns is not None and len(fields) < self.columns: - fields += tuple(None for i in xrange(self.columns - len(fields))) - yield fields - - def __enter__(self): - return self - - def __exit__(self, _type, value, traceback): - self.file.close() - -class csv_writer(object): - """ - Writer object for tab delimited text. We just use the stock CSV - module in excel-tab mode for this. - - If "renmwo" is set (default), the file will be written to - a temporary name and renamed to the real filename after closing. - """ - - def __init__(self, filename, renmwo = True): - self.filename = filename - self.renmwo = "%s.~renmwo%d~" % (filename, os.getpid()) if renmwo else filename - self.file = open(self.renmwo, "w") - self.writer = csv.writer(self.file, dialect = csv.get_dialect("excel-tab")) - - def __enter__(self): - return self - - def __exit__(self, _type, value, traceback): - self.close() - - def close(self): - """ - Close this writer. - """ - if self.file is not None: - self.file.close() - self.file = None - if self.filename != self.renmwo: - os.rename(self.renmwo, self.filename) - - def __getattr__(self, attr): - """ - Fake inheritance from whatever object csv.writer deigns to give us. - """ - return getattr(self.writer, attr) diff --git a/rpkid/rpki/daemonize.py b/rpkid/rpki/daemonize.py deleted file mode 100644 index 62b4ee4e..00000000 --- a/rpkid/rpki/daemonize.py +++ /dev/null @@ -1,133 +0,0 @@ -# $Id$ -# -# Copyright (C) 2012 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. -# -# Some code borrowed from -# http://www.jejik.com/articles/2007/02/a_simple_unix_linux_daemon_in_python/ -# -# (which was explicitly placed in public domain by its author), and from -# -# /usr/src/lib/libc/gen/daemon.c -# -# (the libc implementation of daemon(3) on FreeBSD), so: -# -# Portions copyright (c) 1990, 1993 -# The Regents of the University of California. All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in the -# documentation and/or other materials provided with the distribution. -# 4. Neither the name of the University nor the names of its contributors -# may be used to endorse or promote products derived from this software -# without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND -# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -# ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE -# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS -# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) -# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY -# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF -# SUCH DAMAGE. - -""" -Make a normal program into a "daemon", like the 4.4BSD daemon(3) call. - -This doesn't quite follow either the 4.4BSD call or the Python 3.x library, -because it was written to fit into an existing package and I didn't -want to drag in yet another external library just for this. -""" - -import sys -import os -import atexit -import signal -import rpki.log - -# Does default_pid_directory need to be autoconf-configurable? - -## @var default_pid_directory -# Default directory to which to write process ID files. - -default_pid_directory = "/var/run/rpki" - -## @var pid_filename -# Configurable filename to which to write process ID file. -# pidfile argument to daemon() overrides this. - -pid_filename = None - -def daemon(nochdir = False, noclose = False, pidfile = None): - """ - Make this program become a daemon, like 4.4BSD daemon(3), and - write its pid out to a file with cleanup on exit. - """ - - if pidfile is None: - if pid_filename is None: - prog = os.path.splitext(os.path.basename(sys.argv[0]))[0] - pidfile = os.path.join(default_pid_directory, "%s.pid" % prog) - else: - pidfile = pid_filename - - old_sighup_action = signal.signal(signal.SIGHUP, signal.SIG_IGN) - - try: - pid = os.fork() - except OSError, e: - sys.exit("fork() failed: %d (%s)" % (e.errno, e.strerror)) - else: - if pid > 0: - os._exit(0) - - if not nochdir: - os.chdir("/") - - os.setsid() - - if not noclose: - sys.stdout.flush() - sys.stderr.flush() - fd = os.open(os.devnull, os.O_RDWR) - os.dup2(fd, 0) - os.dup2(fd, 1) - os.dup2(fd, 2) - if fd > 2: - os.close(fd) - - signal.signal(signal.SIGHUP, old_sighup_action) - - def delete_pid_file(): - try: - os.unlink(pidfile) - except OSError: - pass - - atexit.register(delete_pid_file) - - try: - f = open(pidfile, "w") - f.write("%d\n" % os.getpid()) - f.close() - except IOError, e: - rpki.log.warn("Couldn't write PID file %s: %s" % (pidfile, e.strerror)) diff --git a/rpkid/rpki/exceptions.py b/rpkid/rpki/exceptions.py deleted file mode 100644 index d8d3774e..00000000 --- a/rpkid/rpki/exceptions.py +++ /dev/null @@ -1,367 +0,0 @@ -# $Id$ -# -# Copyright (C) 2014 Dragon Research Labs ("DRL") -# Portions copyright (C) 2009--2013 Internet Systems Consortium ("ISC") -# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notices and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND ARIN DISCLAIM ALL -# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED -# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL, -# ISC, OR ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR -# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS -# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, -# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION -# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -""" -Exception definitions for RPKI modules. -""" - -class RPKI_Exception(Exception): - """ - Base class for RPKI exceptions. - """ - -class NotInDatabase(RPKI_Exception): - """ - Lookup failed for an object expected to be in the database. - """ - -class BadURISyntax(RPKI_Exception): - """ - Illegal syntax for a URI. - """ - -class BadStatusCode(RPKI_Exception): - """ - Unrecognized protocol status code. - """ - -class BadQuery(RPKI_Exception): - """ - Unexpected protocol query. - """ - -class DBConsistancyError(RPKI_Exception): - """ - Found multiple matches for a database query that shouldn't ever - return that. - """ - -class CMSVerificationFailed(RPKI_Exception): - """ - Verification of a CMS message failed. - """ - -class HTTPRequestFailed(RPKI_Exception): - """ - HTTP request failed. - """ - -class DERObjectConversionError(RPKI_Exception): - """ - Error trying to convert a DER-based object from one representation - to another. - """ - -class NotACertificateChain(RPKI_Exception): - """ - Certificates don't form a proper chain. - """ - -class BadContactURL(RPKI_Exception): - """ - Error trying to parse contact URL. - """ - -class BadClassNameSyntax(RPKI_Exception): - """ - Illegal syntax for a class_name. - """ - -class BadIssueResponse(RPKI_Exception): - """ - issue_response PDU with wrong number of classes or certificates. - """ - -class NotImplementedYet(RPKI_Exception): - """ - Internal error -- not implemented yet. - """ - -class BadPKCS10(RPKI_Exception): - """ - Bad PKCS #10 object. - """ - -class UpstreamError(RPKI_Exception): - """ - Received an error from upstream. - """ - -class ChildNotFound(RPKI_Exception): - """ - Could not find specified child in database. - """ - -class BSCNotFound(RPKI_Exception): - """ - Could not find specified BSC in database. - """ - -class BadSender(RPKI_Exception): - """ - Unexpected XML sender value. - """ - -class ClassNameMismatch(RPKI_Exception): - """ - class_name does not match child context. - """ - -class ClassNameUnknown(RPKI_Exception): - """ - Unknown class_name. - """ - -class SKIMismatch(RPKI_Exception): - """ - SKI value in response does not match request. - """ - -class SubprocessError(RPKI_Exception): - """ - Subprocess returned unexpected error. - """ - -class BadIRDBReply(RPKI_Exception): - """ - Unexpected reply to IRDB query. - """ - -class NotFound(RPKI_Exception): - """ - Object not found in database. - """ - -class MustBePrefix(RPKI_Exception): - """ - Resource range cannot be expressed as a prefix. - """ - -class TLSValidationError(RPKI_Exception): - """ - TLS certificate validation error. - """ - -class MultipleTLSEECert(TLSValidationError): - """ - Received more than one TLS EE certificate. - """ - -class ReceivedTLSCACert(TLSValidationError): - """ - Received CA certificate via TLS. - """ - -class WrongEContentType(RPKI_Exception): - """ - Received wrong CMS eContentType. - """ - -class EmptyPEM(RPKI_Exception): - """ - Couldn't find PEM block to convert. - """ - -class UnexpectedCMSCerts(RPKI_Exception): - """ - Received CMS certs when not expecting any. - """ - -class UnexpectedCMSCRLs(RPKI_Exception): - """ - Received CMS CRLs when not expecting any. - """ - -class MissingCMSEEcert(RPKI_Exception): - """ - Didn't receive CMS EE cert when expecting one. - """ - -class MissingCMSCRL(RPKI_Exception): - """ - Didn't receive CMS CRL when expecting one. - """ - -class UnparsableCMSDER(RPKI_Exception): - """ - Alleged CMS DER wasn't parsable. - """ - -class CMSCRLNotSet(RPKI_Exception): - """ - CMS CRL has not been configured. - """ - -class ServerShuttingDown(RPKI_Exception): - """ - Server is shutting down. - """ - -class NoActiveCA(RPKI_Exception): - """ - No active ca_detail for specified class. - """ - -class BadClientURL(RPKI_Exception): - """ - URL given to HTTP client does not match profile. - """ - -class ClientNotFound(RPKI_Exception): - """ - Could not find specified client in database. - """ - -class BadExtension(RPKI_Exception): - """ - Forbidden X.509 extension. - """ - -class ForbiddenURI(RPKI_Exception): - """ - Forbidden URI, does not start with correct base URI. - """ - -class HTTPClientAborted(RPKI_Exception): - """ - HTTP client connection closed while in request-sent state. - """ - -class BadPublicationReply(RPKI_Exception): - """ - Unexpected reply to publication query. - """ - -class DuplicateObject(RPKI_Exception): - """ - Attempt to create an object that already exists. - """ - -class EmptyROAPrefixList(RPKI_Exception): - """ - Can't create ROA with an empty prefix list. - """ - -class NoCoveringCertForROA(RPKI_Exception): - """ - Couldn't find a covering certificate to generate ROA. - """ - -class BSCNotReady(RPKI_Exception): - """ - BSC not yet in a usable state, signing_cert not set. - """ - -class HTTPUnexpectedState(RPKI_Exception): - """ - HTTP event occurred in an unexpected state. - """ - -class HTTPBadVersion(RPKI_Exception): - """ - HTTP couldn't parse HTTP version. - """ - -class HandleTranslationError(RPKI_Exception): - """ - Internal error translating protocol handle -> SQL id. - """ - -class NoObjectAtURI(RPKI_Exception): - """ - No object published at specified URI. - """ - -class CMSContentNotSet(RPKI_Exception): - """ - Inner content of a CMS_object has not been set. If object is known - to be valid, the .extract() method should be able to set the - content; otherwise, only the .verify() method (which checks - signatures) is safe. - """ - -class HTTPTimeout(RPKI_Exception): - """ - HTTP connection timed out. - """ - -class BadIPResource(RPKI_Exception): - """ - Parse failure for alleged IP resource string. - """ - -class BadROAPrefix(RPKI_Exception): - """ - Parse failure for alleged ROA prefix string. - """ - -class CommandParseFailure(RPKI_Exception): - """ - Failed to parse command line. - """ - -class CMSCertHasExpired(RPKI_Exception): - """ - CMS certificate has expired. - """ - -class TrustedCMSCertHasExpired(RPKI_Exception): - """ - Trusted CMS certificate has expired. - """ - -class MultipleCMSEECert(RPKI_Exception): - """ - Can't have more than one CMS EE certificate in validation chain. - """ - -class ResourceOverlap(RPKI_Exception): - """ - Overlapping resources in resource_set. - """ - -class CMSReplay(RPKI_Exception): - """ - Possible CMS replay attack detected. - """ - -class PastNotAfter(RPKI_Exception): - """ - Requested notAfter value is already in the past. - """ - -class NullValidityInterval(RPKI_Exception): - """ - Requested validity interval is null. - """ - -class BadX510DN(RPKI_Exception): - """ - X.510 distinguished name does not match profile. - """ - -class BadAutonomousSystemNumber(RPKI_Exception): - """ - Bad AutonomousSystem number. - """ - -class WrongEKU(RPKI_Exception): - """ - Extended Key Usage extension does not match profile. - """ diff --git a/rpkid/rpki/gui/__init__.py b/rpkid/rpki/gui/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/rpkid/rpki/gui/api/__init__.py b/rpkid/rpki/gui/api/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/rpkid/rpki/gui/api/urls.py b/rpkid/rpki/gui/api/urls.py deleted file mode 100644 index 8c9d824c..00000000 --- a/rpkid/rpki/gui/api/urls.py +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright (C) 2012 SPARTA, Inc. a Parsons Company -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -__version__ = '$Id$' - -from django.conf.urls.defaults import * -from rpki.gui.routeview.api import route_list - -urlpatterns = patterns('', - (r'^v1/route/$', route_list), -) diff --git a/rpkid/rpki/gui/app/TODO b/rpkid/rpki/gui/app/TODO deleted file mode 100644 index b7136397..00000000 --- a/rpkid/rpki/gui/app/TODO +++ /dev/null @@ -1,60 +0,0 @@ -Use RequestContext (helper function for render_to_response) and a default -list of context processors for the generic functions - -Teach cert_delete about children, conf*, parent* to say what the ramifications -of deleting a cert are. - -Teach cert form about file upload - -Redirect /accounts/profile/ to /dashboard/ - -Teach dashboard view about looking up resources from parent. -There are 3 types of resources: -- Ones we've accepted and match -- Ones we've accepted but don't match - - two subtypes: - * the parent is now giving us a superset of what they used to. - This is relatively easily handled by keeping the subdivisions - we've made and just making the superset resource the new parent - of the existing resource (e.g., we had accepted 18.5.0.0/16 and - they're now giving us 18.0.0.0/8) - * the parent is now giving us a subset (including none) of what they - used to. Two sub-cases: - - The part that they took away is neither delegated nor roa'd. - - The part that they took away is either delegated or roa'd or both. -- Ones we haven't accepted yet - -The roa needs to learn to handle its prefix children. It may need to -create the covering set of prefixes for an address range. - -Un'd resources are: -what we've gotten from our parent: -models.AddressRange.objects.filter(from_parent=myconf.pk) -minus what we've given to our children or issued roas for -models.AddressRange.objects.filter(child__conf=myconf.pk) -models.AddressRange.objects.filter(roa__conf=myconf.pk) -or ->>> from django.db.models import Q ->>> models.AddressRange.objects.filter( Q(child__conf=myconf.pk) | - Q(roa__conf=myconf.pk) ) - - -and of course the ASN one is easier: -models.Asn.objects.filter(from_parent=myconf.pk) -minus what we've given to our children -models.Asn.objects.filter(child__conf=myconf.pk) - -look in -rpki/resource_set.py - - -Adding a handle / resource-holding entity / "conf": -- upload the that we've generated and are sending to the parent - -Adding a parent: -- upload the that he sent me - (keep things open to the parent uploading this directly to the web interface) - -Adding a child: -- upload the that he sent me - diff --git a/rpkid/rpki/gui/app/__init__.py b/rpkid/rpki/gui/app/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/rpkid/rpki/gui/app/admin.py b/rpkid/rpki/gui/app/admin.py deleted file mode 100644 index e69de29b..00000000 diff --git a/rpkid/rpki/gui/app/check_expired.py b/rpkid/rpki/gui/app/check_expired.py deleted file mode 100644 index fcf5ecae..00000000 --- a/rpkid/rpki/gui/app/check_expired.py +++ /dev/null @@ -1,209 +0,0 @@ -# Copyright (C) 2012, 2013 SPARTA, Inc. a Parsons Company -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -__version__ = '$Id$' -__all__ = ('notify_expired', 'NetworkError') - -import sys -import socket -from cStringIO import StringIO -import logging -import datetime - -from rpki.gui.cacheview.models import Cert -from rpki.gui.app.models import Conf, ResourceCert, Timestamp, Alert -from rpki.gui.app.glue import list_received_resources -from rpki.irdb import Zookeeper -from rpki.left_right import report_error_elt, list_published_objects_elt -from rpki.x509 import X509 - -from django.core.mail import send_mail - -logger = logging.getLogger(__name__) -expire_time = 0 # set by notify_expired() -now = 0 - - -def check_cert(handle, p, errs): - """Check the expiration date on the X.509 certificates in each element of - the list. - - The displayed object name defaults to the class name, but can be overridden - using the `object_name` argument. - - """ - t = p.certificate.getNotAfter() - if t <= expire_time: - e = 'expired' if t <= now else 'will expire' - errs.write("%(handle)s's %(type)s %(desc)s %(expire)s on %(date)s\n" % { - 'handle': handle, 'type': p.__class__.__name__, 'desc': str(p), - 'expire': e, 'date': t}) - - -def check_cert_list(handle, x, errs): - for p in x: - check_cert(handle, p, errs) - - -def check_expire(conf, errs): - # get certs for `handle' - cert_set = ResourceCert.objects.filter(conf=conf) - for cert in cert_set: - # look up cert in cacheview db - obj_set = Cert.objects.filter(repo__uri=cert.uri) - if not obj_set: - # since the output is cached, this can - # occur if the cache is out of date as well.. - errs.write("Unable to locate rescert in rcynic cache: handle=%s uri=%s not_after=%s\n" % (conf.handle, cert.uri, cert.not_after)) - continue - obj = obj_set[0] - msg = [] - expired = False - for n, c in enumerate(obj.cert_chain): - if c.not_after <= expire_time: - expired = True - f = '*' - else: - f = ' ' - msg.append("%s [%d] uri=%s ski=%s name=%s expires=%s" % (f, n, c.repo.uri, c.keyid, c.name, c.not_after)) - - # find ghostbuster records attached to this cert - for gbr in c.ghostbusters.all(): - info = [] - for s in ('full_name', 'organization', 'email_address', 'telephone'): - t = getattr(gbr, s, None) - if t: - info.append(t) - - msg.append(" Contact: " + ", ".join(info)) - - if expired: - errs.write("%s's rescert from parent %s will expire soon:\n" % ( - conf.handle, - # parent is None for the root cert - cert.parent.handle if cert.parent else 'self' - )) - errs.write("Certificate chain:\n") - errs.write("\n".join(msg)) - errs.write("\n") - - -def check_child_certs(conf, errs): - """Fetch the list of published objects from rpkid, and inspect the issued - resource certs (uri ending in .cer). - - """ - z = Zookeeper(handle=conf.handle) - req = list_published_objects_elt.make_pdu(action="list", - tag="list_published_objects", - self_handle=conf.handle) - pdus = z.call_rpkid(req) - for pdu in pdus: - if isinstance(pdu, report_error_elt): - logger.error("rpkid reported an error: %s" % pdu.error_code) - elif isinstance(pdu, list_published_objects_elt): - if pdu.uri.endswith('.cer'): - cert = X509() - cert.set(Base64=pdu.obj) - t = cert.getNotAfter() - if t <= expire_time: - e = 'expired' if t <= now else 'will expire' - errs.write("%(handle)s's rescert for Child %(child)s %(expire)s on %(date)s uri=%(uri)s subject=%(subject)s\n" % { - 'handle': conf.handle, - 'child': pdu.child_handle, - 'uri': pdu.uri, - 'subject': cert.getSubject(), - 'expire': e, - 'date': t}) - - -class NetworkError(Exception): - pass - - -def notify_expired(expire_days=14, from_email=None): - """Send email notificates about impending expirations of resource - and BPKI certificates. - - expire_days: the number of days ahead of today to warn - - from_email: set the From: address for the email - - """ - global expire_time # so i don't have to pass it around - global now - - now = datetime.datetime.utcnow() - expire_time = now + datetime.timedelta(expire_days) - - # this is not exactly right, since we have no way of knowing what the - # vhost for the web portal running on this machine is - host = socket.getfqdn() - if not from_email: - from_email = 'root@' + host - - # Ensure that the rcynic and routeviews data has been updated recently - # The QuerySet is created here so that it will be cached and reused on each - # iteration of the loop below - t = now - datetime.timedelta(hours=12) # 12 hours - stale_timestamps = Timestamp.objects.filter(ts__lte=t) - - # if not arguments are given, query all resource holders - qs = Conf.objects.all() - - # check expiration of certs for all handles managed by the web portal - for h in qs: - # Force cache update since several checks require fresh data - try: - list_received_resources(sys.stdout, h) - except socket.error as e: - raise NetworkError('Error while talking to rpkid: %s' % e) - - errs = StringIO() - - # Warn the resource holder admins when data may be out of date - if stale_timestamps: - errs.write('Warning! Stale data from external sources.\n') - errs.write('data source : last import\n') - for obj in stale_timestamps: - errs.write('%-15s: %s\n' % (obj.name, obj.ts)) - errs.write('\n') - - check_cert(h.handle, h, errs) - - # HostedCA is the ResourceHolderCA cross certified under ServerCA, so - # check the ServerCA expiration date as well - check_cert(h.handle, h.hosted_by, errs) - check_cert(h.handle, h.hosted_by.issuer, errs) - - check_cert_list(h.handle, h.bscs.all(), errs) - check_cert_list(h.handle, h.parents.all(), errs) - check_cert_list(h.handle, h.children.all(), errs) - check_cert_list(h.handle, h.repositories.all(), errs) - - check_expire(h, errs) - check_child_certs(h, errs) - - # if there was output, display it now - s = errs.getvalue() - if s: - logger.info(s) - - t = """This is an automated notice about the upcoming expiration of RPKI resources for the handle %s on %s. You are receiving this notification because your email address is either registered in a Ghostbuster record, or as the default email address for the account.\n\n""" % (h.handle, host) - h.send_alert( - subject='RPKI expiration notice for %s' % h.handle, - message=t + s, - from_email=from_email, - severity=Alert.WARNING - ) diff --git a/rpkid/rpki/gui/app/forms.py b/rpkid/rpki/gui/app/forms.py deleted file mode 100644 index 20ce4a07..00000000 --- a/rpkid/rpki/gui/app/forms.py +++ /dev/null @@ -1,442 +0,0 @@ -# Copyright (C) 2010, 2011 SPARTA, Inc. dba Cobham Analytic Solutions -# Copyright (C) 2012 SPARTA, Inc. a Parsons Company -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -__version__ = '$Id$' - - -from django.contrib.auth.models import User -from django import forms -from rpki.resource_set import (resource_range_as, resource_range_ip) -from rpki.gui.app import models -from rpki.exceptions import BadIPResource -from rpki.POW import IPAddress - - -class AddConfForm(forms.Form): - handle = forms.CharField(required=True, - help_text='your handle for your rpki instance') - run_rpkid = forms.BooleanField(required=False, initial=True, - label='Run rpkid?', - help_text='do you want to run your own instance of rpkid?') - rpkid_server_host = forms.CharField(initial='rpkid.example.org', - label='rpkid hostname', - help_text='publicly visible hostname for your rpkid instance') - rpkid_server_port = forms.IntegerField(initial=4404, - label='rpkid port') - run_pubd = forms.BooleanField(required=False, initial=False, - label='Run pubd?', - help_text='do you want to run your own instance of pubd?') - pubd_server_host = forms.CharField(initial='pubd.example.org', - label='pubd hostname', - help_text='publicly visible hostname for your pubd instance') - pubd_server_port = forms.IntegerField(initial=4402, label='pubd port') - pubd_contact_info = forms.CharField(initial='repo-man@rpki.example.org', - label='Pubd contact', - help_text='email address for the operator of your pubd instance') - - -class GhostbusterRequestForm(forms.ModelForm): - """ - Generate a ModelForm with the subset of parents for the current - resource handle. - """ - # override default form field - parent = forms.ModelChoiceField(queryset=None, required=False, - help_text='Specify specific parent, or none for all parents') - - #override - issuer = forms.ModelChoiceField(queryset=None, widget=forms.HiddenInput) - - def __init__(self, *args, **kwargs): - conf = kwargs.pop('conf') - # override initial value for conf in case user tries to alter it - initial = kwargs.setdefault('initial', {}) - initial['issuer'] = conf - super(GhostbusterRequestForm, self).__init__(*args, **kwargs) - self.fields['parent'].queryset = conf.parents.all() - self.fields['issuer'].queryset = models.Conf.objects.filter(pk=conf.pk) - - class Meta: - model = models.GhostbusterRequest - exclude = ('vcard', 'given_name', 'family_name', 'additional_name', - 'honorific_prefix', 'honorific_suffix') - - def clean(self): - email = self.cleaned_data.get('email_address') - postal = self.cleaned_data.get('postal_address') - telephone = self.cleaned_data.get('telephone') - if not any([email, postal, telephone]): - raise forms.ValidationError( - 'One of telephone, email or postal address must be specified') - - return self.cleaned_data - - -class ImportForm(forms.Form): - """Form used for uploading parent/child identity xml files.""" - handle = forms.CharField(required=False, - widget=forms.TextInput(attrs={'class': 'xlarge'}), - help_text='Optional. Your name for this entity, or blank to accept name in XML') - xml = forms.FileField(label='XML file') - - -class ImportRepositoryForm(forms.Form): - handle = forms.CharField(max_length=30, required=False, - label='Parent Handle', - help_text='Optional. Must be specified if you use a different name for this parent') - xml = forms.FileField(label='XML file') - - -class ImportClientForm(forms.Form): - """Form used for importing publication client requests.""" - xml = forms.FileField(label='XML file') - - -class ImportCSVForm(forms.Form): - csv = forms.FileField(label='CSV file') - - -class UserCreateForm(forms.Form): - username = forms.CharField(max_length=30) - email = forms.CharField(max_length=30, - help_text='email address for new user') - password = forms.CharField(widget=forms.PasswordInput) - password2 = forms.CharField(widget=forms.PasswordInput, - label='Confirm Password') - resource_holders = forms.ModelMultipleChoiceField( - queryset=models.Conf.objects.all(), - help_text='allowed to manage these resource holders' - - ) - - def clean_username(self): - username = self.cleaned_data.get('username') - if User.objects.filter(username=username).exists(): - raise forms.ValidationError('user already exists') - return username - - def clean(self): - p1 = self.cleaned_data.get('password') - p2 = self.cleaned_data.get('password2') - if p1 != p2: - raise forms.ValidationError('passwords do not match') - return self.cleaned_data - - -class UserEditForm(forms.Form): - """Form for editing a user.""" - email = forms.CharField() - pw = forms.CharField(widget=forms.PasswordInput, label='Password', - required=False) - pw2 = forms.CharField(widget=forms.PasswordInput, label='Confirm password', - required=False) - resource_holders = forms.ModelMultipleChoiceField( - queryset=models.Conf.objects.all(), - help_text='allowed to manage these resource holders' - ) - - def clean(self): - p1 = self.cleaned_data.get('pw') - p2 = self.cleaned_data.get('pw2') - if p1 != p2: - raise forms.ValidationError('Passwords do not match') - return self.cleaned_data - - -class ROARequest(forms.Form): - """Form for entering a ROA request. - - Handles both IPv4 and IPv6.""" - - prefix = forms.CharField( - widget=forms.TextInput(attrs={ - 'autofocus': 'true', 'placeholder': 'Prefix', - 'class': 'span4' - }) - ) - max_prefixlen = forms.CharField( - required=False, - widget=forms.TextInput(attrs={ - 'placeholder': 'Max len', - 'class': 'span1' - }) - ) - asn = forms.IntegerField( - widget=forms.TextInput(attrs={ - 'placeholder': 'ASN', - 'class': 'span1' - }) - ) - confirmed = forms.BooleanField(widget=forms.HiddenInput, required=False) - - def __init__(self, *args, **kwargs): - """Takes an optional `conf` keyword argument specifying the user that - is creating the ROAs. It is used for validating that the prefix the - user entered is currently allocated to that user. - - """ - conf = kwargs.pop('conf', None) - kwargs['auto_id'] = False - super(ROARequest, self).__init__(*args, **kwargs) - self.conf = conf - self.inline = True - self.use_table = False - - def _as_resource_range(self): - """Convert the prefix in the form to a - rpki.resource_set.resource_range_ip object. - - If there is no mask provided, assume the closest classful mask. - - """ - prefix = self.cleaned_data.get('prefix') - if '/' not in prefix: - p = IPAddress(prefix) - - # determine the first nonzero bit starting from the lsb and - # subtract from the address size to find the closest classful - # mask that contains this single address - prefixlen = 0 - while (p != 0) and (p & 1) == 0: - prefixlen = prefixlen + 1 - p = p >> 1 - mask = p.bits - (8 * (prefixlen / 8)) - prefix = prefix + '/' + str(mask) - - return resource_range_ip.parse_str(prefix) - - def clean_asn(self): - value = self.cleaned_data.get('asn') - if value < 0: - raise forms.ValidationError('AS must be a positive value or 0') - return value - - def clean_prefix(self): - try: - r = self._as_resource_range() - except: - raise forms.ValidationError('invalid prefix') - - manager = models.ResourceRangeAddressV4 if r.version == 4 else models.ResourceRangeAddressV6 - if not manager.objects.filter(cert__conf=self.conf, - prefix_min__lte=r.min, - prefix_max__gte=r.max).exists(): - raise forms.ValidationError('prefix is not allocated to you') - return str(r) - - def clean_max_prefixlen(self): - v = self.cleaned_data.get('max_prefixlen') - if v: - if v[0] == '/': - v = v[1:] # allow user to specify /24 - try: - if int(v) < 0: - raise forms.ValidationError('max prefix length must be positive or 0') - except ValueError: - raise forms.ValidationError('invalid integer value') - return v - - def clean(self): - if 'prefix' in self.cleaned_data: - r = self._as_resource_range() - max_prefixlen = self.cleaned_data.get('max_prefixlen') - max_prefixlen = int(max_prefixlen) if max_prefixlen else r.prefixlen() - if max_prefixlen < r.prefixlen(): - raise forms.ValidationError( - 'max prefix length must be greater than or equal to the prefix length') - if max_prefixlen > r.min.bits: - raise forms.ValidationError, \ - 'max prefix length (%d) is out of range for IP version (%d)' % (max_prefixlen, r.min.bits) - self.cleaned_data['max_prefixlen'] = str(max_prefixlen) - return self.cleaned_data - - -class ROARequestConfirm(forms.Form): - asn = forms.IntegerField(widget=forms.HiddenInput) - prefix = forms.CharField(widget=forms.HiddenInput) - max_prefixlen = forms.IntegerField(widget=forms.HiddenInput) - - def clean_asn(self): - value = self.cleaned_data.get('asn') - if value < 0: - raise forms.ValidationError('AS must be a positive value or 0') - return value - - def clean_prefix(self): - try: - r = resource_range_ip.parse_str(self.cleaned_data.get('prefix')) - except BadIPResource: - raise forms.ValidationError('invalid prefix') - return str(r) - - def clean(self): - try: - r = resource_range_ip.parse_str(self.cleaned_data.get('prefix')) - if r.prefixlen() > self.cleaned_data.get('max_prefixlen'): - raise forms.ValidationError('max length is smaller than mask') - except BadIPResource: - pass - return self.cleaned_data - - -class AddASNForm(forms.Form): - """ - Returns a forms.Form subclass which verifies that the entered ASN range - does not overlap with a previous allocation to the specified child, and - that the ASN range is within the range allocated to the parent. - - """ - - asns = forms.CharField( - label='ASNs', - help_text='single ASN or range', - widget=forms.TextInput(attrs={'autofocus': 'true'}) - ) - - def __init__(self, *args, **kwargs): - self.child = kwargs.pop('child') - super(AddASNForm, self).__init__(*args, **kwargs) - - def clean_asns(self): - try: - r = resource_range_as.parse_str(self.cleaned_data.get('asns')) - except: - raise forms.ValidationError('invalid AS or range') - - if not models.ResourceRangeAS.objects.filter( - cert__conf=self.child.issuer, - min__lte=r.min, - max__gte=r.max).exists(): - raise forms.ValidationError('AS or range is not delegated to you') - - # determine if the entered range overlaps with any AS already - # allocated to this child - if self.child.asns.filter(end_as__gte=r.min, start_as__lte=r.max).exists(): - raise forms.ValidationError( - 'Overlap with previous allocation to this child') - - return str(r) - - -class AddNetForm(forms.Form): - """ - Returns a forms.Form subclass which validates that the entered address - range is within the resources allocated to the parent, and does not overlap - with what is already allocated to the specified child. - - """ - address_range = forms.CharField( - help_text='CIDR or range', - widget=forms.TextInput(attrs={'autofocus': 'true'}) - ) - - def __init__(self, *args, **kwargs): - self.child = kwargs.pop('child') - super(AddNetForm, self).__init__(*args, **kwargs) - - def clean_address_range(self): - address_range = self.cleaned_data.get('address_range') - try: - r = resource_range_ip.parse_str(address_range) - if r.version == 6: - qs = models.ResourceRangeAddressV6 - version = 'IPv6' - else: - qs = models.ResourceRangeAddressV4 - version = 'IPv4' - except BadIPResource: - raise forms.ValidationError('invalid IP address range') - - if not qs.objects.filter(cert__conf=self.child.issuer, - prefix_min__lte=r.min, - prefix_max__gte=r.max).exists(): - raise forms.ValidationError( - 'IP address range is not delegated to you') - - # determine if the entered range overlaps with any prefix - # already allocated to this child - for n in self.child.address_ranges.filter(version=version): - rng = n.as_resource_range() - if r.max >= rng.min and r.min <= rng.max: - raise forms.ValidationError( - 'Overlap with previous allocation to this child') - - return str(r) - - -def ChildForm(instance): - """ - Form for editing a Child model. - - This is roughly based on the equivalent ModelForm, but uses Form as a base - class so that selection boxes for the AS and Prefixes can be edited in a - single form. - - """ - - class _wrapped(forms.Form): - valid_until = forms.DateTimeField(initial=instance.valid_until) - as_ranges = forms.ModelMultipleChoiceField(queryset=models.ChildASN.objects.filter(child=instance), - required=False, - label='AS Ranges', - help_text='deselect to remove delegation') - address_ranges = forms.ModelMultipleChoiceField(queryset=models.ChildNet.objects.filter(child=instance), - required=False, - help_text='deselect to remove delegation') - - return _wrapped - - -class Empty(forms.Form): - """Stub form for views requiring confirmation.""" - pass - - -class ResourceHolderForm(forms.Form): - """form for editing ACL on Conf objects.""" - users = forms.ModelMultipleChoiceField( - queryset=User.objects.all(), - help_text='users allowed to mange this resource holder' - ) - - -class ResourceHolderCreateForm(forms.Form): - """form for creating new resource holdres.""" - handle = forms.CharField(max_length=30) - parent = forms.ModelChoiceField( - required=False, - queryset=models.Conf.objects.all(), - help_text='optionally make the new resource holder a child of this resource holder' - ) - users = forms.ModelMultipleChoiceField( - required=False, - queryset=User.objects.all(), - help_text='users allowed to mange this resource holder' - ) - - def clean_handle(self): - handle = self.cleaned_data.get('handle') - if models.Conf.objects.filter(handle=handle).exists(): - raise forms.ValidationError( - 'a resource holder with that handle already exists' - ) - return handle - - def clean(self): - handle = self.cleaned_data.get('handle') - parent = self.cleaned_data.get('parent') - if handle and parent and parent.children.filter(handle=handle).exists(): - raise forms.ValidationError('parent already has a child by that name') - return self.cleaned_data diff --git a/rpkid/rpki/gui/app/glue.py b/rpkid/rpki/gui/app/glue.py deleted file mode 100644 index a9f6441e..00000000 --- a/rpkid/rpki/gui/app/glue.py +++ /dev/null @@ -1,132 +0,0 @@ -# Copyright (C) 2010, 2011 SPARTA, Inc. dba Cobham Analytic Solutions -# Copyright (C) 2012 SPARTA, Inc. a Parsons Company -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -""" -This file contains code that interfaces between the django views implementing -the portal gui and the rpki.* modules. - -""" - -from __future__ import with_statement - -__version__ = '$Id$' - -from datetime import datetime - -from rpki.resource_set import (resource_set_as, resource_set_ipv4, - resource_set_ipv6, resource_range_ipv4, - resource_range_ipv6) -from rpki.left_right import list_received_resources_elt, report_error_elt -from rpki.irdb.zookeeper import Zookeeper -from rpki.gui.app import models -from rpki.exceptions import BadIPResource - -from django.contrib.auth.models import User -from django.db.transaction import commit_on_success - - -def ghostbuster_to_vcard(gbr): - """Convert a GhostbusterRequest object into a vCard object.""" - import vobject - - vcard = vobject.vCard() - vcard.add('N').value = vobject.vcard.Name(family=gbr.family_name, - given=gbr.given_name) - - adr_fields = ['box', 'extended', 'street', 'city', 'region', 'code', - 'country'] - adr_dict = dict((f, getattr(gbr, f, '')) for f in adr_fields) - if any(adr_dict.itervalues()): - vcard.add('ADR').value = vobject.vcard.Address(**adr_dict) - - # mapping from vCard type to Ghostbuster model field - # the ORG type is a sequence of organization unit names, so - # transform the org name into a tuple before stuffing into the - # vCard object - attrs = [('FN', 'full_name', None), - ('TEL', 'telephone', None), - ('ORG', 'organization', lambda x: (x,)), - ('EMAIL', 'email_address', None)] - for vtype, field, transform in attrs: - v = getattr(gbr, field) - if v: - vcard.add(vtype).value = transform(v) if transform else v - return vcard.serialize() - - -class LeftRightError(Exception): - """Class for wrapping report_error_elt errors from Zookeeper.call_rpkid(). - - It expects a single argument, which is the associated report_error_elt instance.""" - - def __str__(self): - return 'Error occurred while communicating with rpkid: handle=%s code=%s text=%s' % ( - self.args[0].self_handle, - self.args[0].error_code, - self.args[0].error_text) - - -@commit_on_success -def list_received_resources(log, conf): - """ - Query rpkid for this resource handle's received resources. - - The semantics are to clear the entire table and populate with the list of - certs received. Other models should not reference the table directly with - foreign keys. - - """ - - z = Zookeeper(handle=conf.handle) - pdus = z.call_rpkid(list_received_resources_elt.make_pdu(self_handle=conf.handle)) - # pdus is sometimes None (see https://trac.rpki.net/ticket/681) - if pdus is None: - print >>log, 'error: call_rpkid() returned None for handle %s when fetching received resources' % conf.handle - return - - models.ResourceCert.objects.filter(conf=conf).delete() - - for pdu in pdus: - if isinstance(pdu, report_error_elt): - # this will cause the db to be rolled back so the above delete() - # won't clobber existing resources - raise LeftRightError, pdu - elif isinstance(pdu, list_received_resources_elt): - if pdu.parent_handle != conf.handle: - parent = models.Parent.objects.get(issuer=conf, - handle=pdu.parent_handle) - else: - # root cert, self-signed - parent = None - - not_before = datetime.strptime(pdu.notBefore, "%Y-%m-%dT%H:%M:%SZ") - not_after = datetime.strptime(pdu.notAfter, "%Y-%m-%dT%H:%M:%SZ") - - cert = models.ResourceCert.objects.create( - conf=conf, parent=parent, not_before=not_before, - not_after=not_after, uri=pdu.uri) - - for asn in resource_set_as(pdu.asn): - cert.asn_ranges.create(min=asn.min, max=asn.max) - - for rng in resource_set_ipv4(pdu.ipv4): - cert.address_ranges.create(prefix_min=rng.min, - prefix_max=rng.max) - - for rng in resource_set_ipv6(pdu.ipv6): - cert.address_ranges_v6.create(prefix_min=rng.min, - prefix_max=rng.max) - else: - print >>log, "error: unexpected pdu from rpkid type=%s" % type(pdu) diff --git a/rpkid/rpki/gui/app/migrations/0001_initial.py b/rpkid/rpki/gui/app/migrations/0001_initial.py deleted file mode 100644 index 80877901..00000000 --- a/rpkid/rpki/gui/app/migrations/0001_initial.py +++ /dev/null @@ -1,192 +0,0 @@ -# -*- coding: utf-8 -*- -import datetime -from south.db import db -from south.v2 import SchemaMigration -from django.db import models - - -class Migration(SchemaMigration): - - def forwards(self, orm): - # Adding model 'ResourceCert' - db.create_table('app_resourcecert', ( - ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), - ('parent', self.gf('django.db.models.fields.related.ForeignKey')(related_name='certs', to=orm['irdb.Parent'])), - ('not_before', self.gf('django.db.models.fields.DateTimeField')()), - ('not_after', self.gf('django.db.models.fields.DateTimeField')()), - ('uri', self.gf('django.db.models.fields.CharField')(max_length=255)), - )) - db.send_create_signal('app', ['ResourceCert']) - - # Adding model 'ResourceRangeAddressV4' - db.create_table('app_resourcerangeaddressv4', ( - ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), - ('prefix_min', self.gf('rpki.gui.models.IPv4AddressField')(db_index=True)), - ('prefix_max', self.gf('rpki.gui.models.IPv4AddressField')(db_index=True)), - ('cert', self.gf('django.db.models.fields.related.ForeignKey')(related_name='address_ranges', to=orm['app.ResourceCert'])), - )) - db.send_create_signal('app', ['ResourceRangeAddressV4']) - - # Adding model 'ResourceRangeAddressV6' - db.create_table('app_resourcerangeaddressv6', ( - ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), - ('prefix_min', self.gf('rpki.gui.models.IPv6AddressField')(db_index=True)), - ('prefix_max', self.gf('rpki.gui.models.IPv6AddressField')(db_index=True)), - ('cert', self.gf('django.db.models.fields.related.ForeignKey')(related_name='address_ranges_v6', to=orm['app.ResourceCert'])), - )) - db.send_create_signal('app', ['ResourceRangeAddressV6']) - - # Adding model 'ResourceRangeAS' - db.create_table('app_resourcerangeas', ( - ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), - ('min', self.gf('django.db.models.fields.PositiveIntegerField')()), - ('max', self.gf('django.db.models.fields.PositiveIntegerField')()), - ('cert', self.gf('django.db.models.fields.related.ForeignKey')(related_name='asn_ranges', to=orm['app.ResourceCert'])), - )) - db.send_create_signal('app', ['ResourceRangeAS']) - - # Adding model 'GhostbusterRequest' - db.create_table('app_ghostbusterrequest', ( - ('ghostbusterrequest_ptr', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['irdb.GhostbusterRequest'], unique=True, primary_key=True)), - ('full_name', self.gf('django.db.models.fields.CharField')(max_length=40)), - ('family_name', self.gf('django.db.models.fields.CharField')(max_length=20)), - ('given_name', self.gf('django.db.models.fields.CharField')(max_length=20)), - ('additional_name', self.gf('django.db.models.fields.CharField')(max_length=20, null=True, blank=True)), - ('honorific_prefix', self.gf('django.db.models.fields.CharField')(max_length=10, null=True, blank=True)), - ('honorific_suffix', self.gf('django.db.models.fields.CharField')(max_length=10, null=True, blank=True)), - ('email_address', self.gf('django.db.models.fields.EmailField')(max_length=75, null=True, blank=True)), - ('organization', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)), - ('telephone', self.gf('rpki.gui.app.models.TelephoneField')(max_length=40, null=True, blank=True)), - ('box', self.gf('django.db.models.fields.CharField')(max_length=40, null=True, blank=True)), - ('extended', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)), - ('street', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)), - ('city', self.gf('django.db.models.fields.CharField')(max_length=40, null=True, blank=True)), - ('region', self.gf('django.db.models.fields.CharField')(max_length=40, null=True, blank=True)), - ('code', self.gf('django.db.models.fields.CharField')(max_length=40, null=True, blank=True)), - ('country', self.gf('django.db.models.fields.CharField')(max_length=40, null=True, blank=True)), - )) - db.send_create_signal('app', ['GhostbusterRequest']) - - # Adding model 'Timestamp' - db.create_table('app_timestamp', ( - ('name', self.gf('django.db.models.fields.CharField')(max_length=30, primary_key=True)), - ('ts', self.gf('django.db.models.fields.DateTimeField')()), - )) - db.send_create_signal('app', ['Timestamp']) - - - def backwards(self, orm): - # Deleting model 'ResourceCert' - db.delete_table('app_resourcecert') - - # Deleting model 'ResourceRangeAddressV4' - db.delete_table('app_resourcerangeaddressv4') - - # Deleting model 'ResourceRangeAddressV6' - db.delete_table('app_resourcerangeaddressv6') - - # Deleting model 'ResourceRangeAS' - db.delete_table('app_resourcerangeas') - - # Deleting model 'GhostbusterRequest' - db.delete_table('app_ghostbusterrequest') - - # Deleting model 'Timestamp' - db.delete_table('app_timestamp') - - - models = { - 'app.ghostbusterrequest': { - 'Meta': {'ordering': "('family_name', 'given_name')", 'object_name': 'GhostbusterRequest', '_ormbases': ['irdb.GhostbusterRequest']}, - 'additional_name': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}), - 'box': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), - 'city': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), - 'code': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), - 'country': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), - 'email_address': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}), - 'extended': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), - 'family_name': ('django.db.models.fields.CharField', [], {'max_length': '20'}), - 'full_name': ('django.db.models.fields.CharField', [], {'max_length': '40'}), - 'ghostbusterrequest_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['irdb.GhostbusterRequest']", 'unique': 'True', 'primary_key': 'True'}), - 'given_name': ('django.db.models.fields.CharField', [], {'max_length': '20'}), - 'honorific_prefix': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}), - 'honorific_suffix': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}), - 'organization': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), - 'region': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), - 'street': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), - 'telephone': ('rpki.gui.app.models.TelephoneField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}) - }, - 'app.resourcecert': { - 'Meta': {'object_name': 'ResourceCert'}, - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'not_after': ('django.db.models.fields.DateTimeField', [], {}), - 'not_before': ('django.db.models.fields.DateTimeField', [], {}), - 'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'certs'", 'to': "orm['irdb.Parent']"}), - 'uri': ('django.db.models.fields.CharField', [], {'max_length': '255'}) - }, - 'app.resourcerangeaddressv4': { - 'Meta': {'ordering': "('prefix_min',)", 'object_name': 'ResourceRangeAddressV4'}, - 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'address_ranges'", 'to': "orm['app.ResourceCert']"}), - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'prefix_max': ('rpki.gui.models.IPv4AddressField', [], {'db_index': 'True'}), - 'prefix_min': ('rpki.gui.models.IPv4AddressField', [], {'db_index': 'True'}) - }, - 'app.resourcerangeaddressv6': { - 'Meta': {'ordering': "('prefix_min',)", 'object_name': 'ResourceRangeAddressV6'}, - 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'address_ranges_v6'", 'to': "orm['app.ResourceCert']"}), - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'prefix_max': ('rpki.gui.models.IPv6AddressField', [], {'db_index': 'True'}), - 'prefix_min': ('rpki.gui.models.IPv6AddressField', [], {'db_index': 'True'}) - }, - 'app.resourcerangeas': { - 'Meta': {'ordering': "('min', 'max')", 'object_name': 'ResourceRangeAS'}, - 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'asn_ranges'", 'to': "orm['app.ResourceCert']"}), - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'max': ('django.db.models.fields.PositiveIntegerField', [], {}), - 'min': ('django.db.models.fields.PositiveIntegerField', [], {}) - }, - 'app.timestamp': { - 'Meta': {'object_name': 'Timestamp'}, - 'name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'primary_key': 'True'}), - 'ts': ('django.db.models.fields.DateTimeField', [], {}) - }, - 'irdb.ghostbusterrequest': { - 'Meta': {'object_name': 'GhostbusterRequest'}, - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'issuer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'ghostbuster_requests'", 'to': "orm['irdb.ResourceHolderCA']"}), - 'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'ghostbuster_requests'", 'null': 'True', 'to': "orm['irdb.Parent']"}), - 'vcard': ('django.db.models.fields.TextField', [], {}) - }, - 'irdb.parent': { - 'Meta': {'unique_together': "(('issuer', 'handle'),)", 'object_name': 'Parent', '_ormbases': ['irdb.Turtle']}, - 'certificate': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), - 'child_handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), - 'handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), - 'issuer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'parents'", 'to': "orm['irdb.ResourceHolderCA']"}), - 'parent_handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), - 'referral_authorization': ('rpki.irdb.models.SignedReferralField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}), - 'referrer': ('rpki.irdb.models.HandleField', [], {'max_length': '120', 'null': 'True', 'blank': 'True'}), - 'repository_type': ('rpki.irdb.models.EnumField', [], {}), - 'ta': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), - 'turtle_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['irdb.Turtle']", 'unique': 'True', 'primary_key': 'True'}) - }, - 'irdb.resourceholderca': { - 'Meta': {'object_name': 'ResourceHolderCA'}, - 'certificate': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), - 'handle': ('rpki.irdb.models.HandleField', [], {'unique': 'True', 'max_length': '120'}), - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'last_crl_update': ('rpki.irdb.models.SundialField', [], {}), - 'latest_crl': ('rpki.irdb.models.CRLField', [], {'default': 'None', 'blank': 'True'}), - 'next_crl_number': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}), - 'next_crl_update': ('rpki.irdb.models.SundialField', [], {}), - 'next_serial': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}), - 'private_key': ('rpki.irdb.models.RSAKeyField', [], {'default': 'None', 'blank': 'True'}) - }, - 'irdb.turtle': { - 'Meta': {'object_name': 'Turtle'}, - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'service_uri': ('django.db.models.fields.CharField', [], {'max_length': '255'}) - } - } - - complete_apps = ['app'] \ No newline at end of file diff --git a/rpkid/rpki/gui/app/migrations/0002_auto__add_field_resourcecert_conf.py b/rpkid/rpki/gui/app/migrations/0002_auto__add_field_resourcecert_conf.py deleted file mode 100644 index d3326f90..00000000 --- a/rpkid/rpki/gui/app/migrations/0002_auto__add_field_resourcecert_conf.py +++ /dev/null @@ -1,117 +0,0 @@ -# -*- coding: utf-8 -*- -import datetime -from south.db import db -from south.v2 import SchemaMigration -from django.db import models - - -class Migration(SchemaMigration): - - def forwards(self, orm): - # Adding field 'ResourceCert.conf' - db.add_column('app_resourcecert', 'conf', - self.gf('django.db.models.fields.related.ForeignKey')(related_name='certs', null=True, to=orm['irdb.ResourceHolderCA']), - keep_default=False) - - - def backwards(self, orm): - # Deleting field 'ResourceCert.conf' - db.delete_column('app_resourcecert', 'conf_id') - - - models = { - 'app.ghostbusterrequest': { - 'Meta': {'ordering': "('family_name', 'given_name')", 'object_name': 'GhostbusterRequest', '_ormbases': ['irdb.GhostbusterRequest']}, - 'additional_name': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}), - 'box': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), - 'city': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), - 'code': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), - 'country': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), - 'email_address': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}), - 'extended': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), - 'family_name': ('django.db.models.fields.CharField', [], {'max_length': '20'}), - 'full_name': ('django.db.models.fields.CharField', [], {'max_length': '40'}), - 'ghostbusterrequest_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['irdb.GhostbusterRequest']", 'unique': 'True', 'primary_key': 'True'}), - 'given_name': ('django.db.models.fields.CharField', [], {'max_length': '20'}), - 'honorific_prefix': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}), - 'honorific_suffix': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}), - 'organization': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), - 'region': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), - 'street': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), - 'telephone': ('rpki.gui.app.models.TelephoneField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}) - }, - 'app.resourcecert': { - 'Meta': {'object_name': 'ResourceCert'}, - 'conf': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'certs'", 'null': 'True', 'to': "orm['irdb.ResourceHolderCA']"}), - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'not_after': ('django.db.models.fields.DateTimeField', [], {}), - 'not_before': ('django.db.models.fields.DateTimeField', [], {}), - 'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'certs'", 'to': "orm['irdb.Parent']"}), - 'uri': ('django.db.models.fields.CharField', [], {'max_length': '255'}) - }, - 'app.resourcerangeaddressv4': { - 'Meta': {'ordering': "('prefix_min',)", 'object_name': 'ResourceRangeAddressV4'}, - 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'address_ranges'", 'to': "orm['app.ResourceCert']"}), - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'prefix_max': ('rpki.gui.models.IPv4AddressField', [], {'db_index': 'True'}), - 'prefix_min': ('rpki.gui.models.IPv4AddressField', [], {'db_index': 'True'}) - }, - 'app.resourcerangeaddressv6': { - 'Meta': {'ordering': "('prefix_min',)", 'object_name': 'ResourceRangeAddressV6'}, - 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'address_ranges_v6'", 'to': "orm['app.ResourceCert']"}), - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'prefix_max': ('rpki.gui.models.IPv6AddressField', [], {'db_index': 'True'}), - 'prefix_min': ('rpki.gui.models.IPv6AddressField', [], {'db_index': 'True'}) - }, - 'app.resourcerangeas': { - 'Meta': {'ordering': "('min', 'max')", 'object_name': 'ResourceRangeAS'}, - 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'asn_ranges'", 'to': "orm['app.ResourceCert']"}), - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'max': ('django.db.models.fields.PositiveIntegerField', [], {}), - 'min': ('django.db.models.fields.PositiveIntegerField', [], {}) - }, - 'app.timestamp': { - 'Meta': {'object_name': 'Timestamp'}, - 'name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'primary_key': 'True'}), - 'ts': ('django.db.models.fields.DateTimeField', [], {}) - }, - 'irdb.ghostbusterrequest': { - 'Meta': {'object_name': 'GhostbusterRequest'}, - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'issuer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'ghostbuster_requests'", 'to': "orm['irdb.ResourceHolderCA']"}), - 'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'ghostbuster_requests'", 'null': 'True', 'to': "orm['irdb.Parent']"}), - 'vcard': ('django.db.models.fields.TextField', [], {}) - }, - 'irdb.parent': { - 'Meta': {'unique_together': "(('issuer', 'handle'),)", 'object_name': 'Parent', '_ormbases': ['irdb.Turtle']}, - 'certificate': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), - 'child_handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), - 'handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), - 'issuer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'parents'", 'to': "orm['irdb.ResourceHolderCA']"}), - 'parent_handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), - 'referral_authorization': ('rpki.irdb.models.SignedReferralField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}), - 'referrer': ('rpki.irdb.models.HandleField', [], {'max_length': '120', 'null': 'True', 'blank': 'True'}), - 'repository_type': ('rpki.irdb.models.EnumField', [], {}), - 'ta': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), - 'turtle_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['irdb.Turtle']", 'unique': 'True', 'primary_key': 'True'}) - }, - 'irdb.resourceholderca': { - 'Meta': {'object_name': 'ResourceHolderCA'}, - 'certificate': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), - 'handle': ('rpki.irdb.models.HandleField', [], {'unique': 'True', 'max_length': '120'}), - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'last_crl_update': ('rpki.irdb.models.SundialField', [], {}), - 'latest_crl': ('rpki.irdb.models.CRLField', [], {'default': 'None', 'blank': 'True'}), - 'next_crl_number': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}), - 'next_crl_update': ('rpki.irdb.models.SundialField', [], {}), - 'next_serial': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}), - 'private_key': ('rpki.irdb.models.RSAKeyField', [], {'default': 'None', 'blank': 'True'}) - }, - 'irdb.turtle': { - 'Meta': {'object_name': 'Turtle'}, - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'service_uri': ('django.db.models.fields.CharField', [], {'max_length': '255'}) - } - } - - complete_apps = ['app'] \ No newline at end of file diff --git a/rpkid/rpki/gui/app/migrations/0003_set_conf_from_parent.py b/rpkid/rpki/gui/app/migrations/0003_set_conf_from_parent.py deleted file mode 100644 index a90a11cc..00000000 --- a/rpkid/rpki/gui/app/migrations/0003_set_conf_from_parent.py +++ /dev/null @@ -1,116 +0,0 @@ -# -*- coding: utf-8 -*- -import datetime -from south.db import db -from south.v2 import DataMigration -from django.db import models - -class Migration(DataMigration): - - def forwards(self, orm): - "Write your forwards methods here." - # Note: Remember to use orm['appname.ModelName'] rather than "from appname.models..." - for cert in orm.ResourceCert.objects.all(): - cert.conf = cert.parent.issuer - cert.save() - - def backwards(self, orm): - "Write your backwards methods here." - pass - - models = { - 'app.ghostbusterrequest': { - 'Meta': {'ordering': "('family_name', 'given_name')", 'object_name': 'GhostbusterRequest', '_ormbases': ['irdb.GhostbusterRequest']}, - 'additional_name': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}), - 'box': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), - 'city': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), - 'code': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), - 'country': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), - 'email_address': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}), - 'extended': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), - 'family_name': ('django.db.models.fields.CharField', [], {'max_length': '20'}), - 'full_name': ('django.db.models.fields.CharField', [], {'max_length': '40'}), - 'ghostbusterrequest_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['irdb.GhostbusterRequest']", 'unique': 'True', 'primary_key': 'True'}), - 'given_name': ('django.db.models.fields.CharField', [], {'max_length': '20'}), - 'honorific_prefix': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}), - 'honorific_suffix': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}), - 'organization': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), - 'region': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), - 'street': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), - 'telephone': ('rpki.gui.app.models.TelephoneField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}) - }, - 'app.resourcecert': { - 'Meta': {'object_name': 'ResourceCert'}, - 'conf': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'certs'", 'null': 'True', 'to': "orm['irdb.ResourceHolderCA']"}), - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'not_after': ('django.db.models.fields.DateTimeField', [], {}), - 'not_before': ('django.db.models.fields.DateTimeField', [], {}), - 'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'certs'", 'to': "orm['irdb.Parent']"}), - 'uri': ('django.db.models.fields.CharField', [], {'max_length': '255'}) - }, - 'app.resourcerangeaddressv4': { - 'Meta': {'ordering': "('prefix_min',)", 'object_name': 'ResourceRangeAddressV4'}, - 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'address_ranges'", 'to': "orm['app.ResourceCert']"}), - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'prefix_max': ('rpki.gui.models.IPv4AddressField', [], {'db_index': 'True'}), - 'prefix_min': ('rpki.gui.models.IPv4AddressField', [], {'db_index': 'True'}) - }, - 'app.resourcerangeaddressv6': { - 'Meta': {'ordering': "('prefix_min',)", 'object_name': 'ResourceRangeAddressV6'}, - 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'address_ranges_v6'", 'to': "orm['app.ResourceCert']"}), - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'prefix_max': ('rpki.gui.models.IPv6AddressField', [], {'db_index': 'True'}), - 'prefix_min': ('rpki.gui.models.IPv6AddressField', [], {'db_index': 'True'}) - }, - 'app.resourcerangeas': { - 'Meta': {'ordering': "('min', 'max')", 'object_name': 'ResourceRangeAS'}, - 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'asn_ranges'", 'to': "orm['app.ResourceCert']"}), - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'max': ('django.db.models.fields.PositiveIntegerField', [], {}), - 'min': ('django.db.models.fields.PositiveIntegerField', [], {}) - }, - 'app.timestamp': { - 'Meta': {'object_name': 'Timestamp'}, - 'name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'primary_key': 'True'}), - 'ts': ('django.db.models.fields.DateTimeField', [], {}) - }, - 'irdb.ghostbusterrequest': { - 'Meta': {'object_name': 'GhostbusterRequest'}, - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'issuer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'ghostbuster_requests'", 'to': "orm['irdb.ResourceHolderCA']"}), - 'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'ghostbuster_requests'", 'null': 'True', 'to': "orm['irdb.Parent']"}), - 'vcard': ('django.db.models.fields.TextField', [], {}) - }, - 'irdb.parent': { - 'Meta': {'unique_together': "(('issuer', 'handle'),)", 'object_name': 'Parent', '_ormbases': ['irdb.Turtle']}, - 'certificate': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), - 'child_handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), - 'handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), - 'issuer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'parents'", 'to': "orm['irdb.ResourceHolderCA']"}), - 'parent_handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), - 'referral_authorization': ('rpki.irdb.models.SignedReferralField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}), - 'referrer': ('rpki.irdb.models.HandleField', [], {'max_length': '120', 'null': 'True', 'blank': 'True'}), - 'repository_type': ('rpki.irdb.models.EnumField', [], {}), - 'ta': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), - 'turtle_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['irdb.Turtle']", 'unique': 'True', 'primary_key': 'True'}) - }, - 'irdb.resourceholderca': { - 'Meta': {'object_name': 'ResourceHolderCA'}, - 'certificate': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), - 'handle': ('rpki.irdb.models.HandleField', [], {'unique': 'True', 'max_length': '120'}), - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'last_crl_update': ('rpki.irdb.models.SundialField', [], {}), - 'latest_crl': ('rpki.irdb.models.CRLField', [], {'default': 'None', 'blank': 'True'}), - 'next_crl_number': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}), - 'next_crl_update': ('rpki.irdb.models.SundialField', [], {}), - 'next_serial': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}), - 'private_key': ('rpki.irdb.models.RSAKeyField', [], {'default': 'None', 'blank': 'True'}) - }, - 'irdb.turtle': { - 'Meta': {'object_name': 'Turtle'}, - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'service_uri': ('django.db.models.fields.CharField', [], {'max_length': '255'}) - } - } - - complete_apps = ['app'] - symmetrical = True diff --git a/rpkid/rpki/gui/app/migrations/0004_auto__chg_field_resourcecert_conf.py b/rpkid/rpki/gui/app/migrations/0004_auto__chg_field_resourcecert_conf.py deleted file mode 100644 index a236ad4a..00000000 --- a/rpkid/rpki/gui/app/migrations/0004_auto__chg_field_resourcecert_conf.py +++ /dev/null @@ -1,115 +0,0 @@ -# -*- coding: utf-8 -*- -import datetime -from south.db import db -from south.v2 import SchemaMigration -from django.db import models - - -class Migration(SchemaMigration): - - def forwards(self, orm): - - # Changing field 'ResourceCert.conf' - db.alter_column('app_resourcecert', 'conf_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['irdb.ResourceHolderCA'])) - - def backwards(self, orm): - - # Changing field 'ResourceCert.conf' - db.alter_column('app_resourcecert', 'conf_id', self.gf('django.db.models.fields.related.ForeignKey')(null=True, to=orm['irdb.ResourceHolderCA'])) - - models = { - 'app.ghostbusterrequest': { - 'Meta': {'ordering': "('family_name', 'given_name')", 'object_name': 'GhostbusterRequest', '_ormbases': ['irdb.GhostbusterRequest']}, - 'additional_name': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}), - 'box': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), - 'city': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), - 'code': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), - 'country': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), - 'email_address': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}), - 'extended': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), - 'family_name': ('django.db.models.fields.CharField', [], {'max_length': '20'}), - 'full_name': ('django.db.models.fields.CharField', [], {'max_length': '40'}), - 'ghostbusterrequest_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['irdb.GhostbusterRequest']", 'unique': 'True', 'primary_key': 'True'}), - 'given_name': ('django.db.models.fields.CharField', [], {'max_length': '20'}), - 'honorific_prefix': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}), - 'honorific_suffix': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}), - 'organization': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), - 'region': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), - 'street': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), - 'telephone': ('rpki.gui.app.models.TelephoneField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}) - }, - 'app.resourcecert': { - 'Meta': {'object_name': 'ResourceCert'}, - 'conf': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'certs'", 'to': "orm['irdb.ResourceHolderCA']"}), - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'not_after': ('django.db.models.fields.DateTimeField', [], {}), - 'not_before': ('django.db.models.fields.DateTimeField', [], {}), - 'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'certs'", 'to': "orm['irdb.Parent']"}), - 'uri': ('django.db.models.fields.CharField', [], {'max_length': '255'}) - }, - 'app.resourcerangeaddressv4': { - 'Meta': {'ordering': "('prefix_min',)", 'object_name': 'ResourceRangeAddressV4'}, - 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'address_ranges'", 'to': "orm['app.ResourceCert']"}), - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'prefix_max': ('rpki.gui.models.IPv4AddressField', [], {'db_index': 'True'}), - 'prefix_min': ('rpki.gui.models.IPv4AddressField', [], {'db_index': 'True'}) - }, - 'app.resourcerangeaddressv6': { - 'Meta': {'ordering': "('prefix_min',)", 'object_name': 'ResourceRangeAddressV6'}, - 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'address_ranges_v6'", 'to': "orm['app.ResourceCert']"}), - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'prefix_max': ('rpki.gui.models.IPv6AddressField', [], {'db_index': 'True'}), - 'prefix_min': ('rpki.gui.models.IPv6AddressField', [], {'db_index': 'True'}) - }, - 'app.resourcerangeas': { - 'Meta': {'ordering': "('min', 'max')", 'object_name': 'ResourceRangeAS'}, - 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'asn_ranges'", 'to': "orm['app.ResourceCert']"}), - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'max': ('django.db.models.fields.PositiveIntegerField', [], {}), - 'min': ('django.db.models.fields.PositiveIntegerField', [], {}) - }, - 'app.timestamp': { - 'Meta': {'object_name': 'Timestamp'}, - 'name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'primary_key': 'True'}), - 'ts': ('django.db.models.fields.DateTimeField', [], {}) - }, - 'irdb.ghostbusterrequest': { - 'Meta': {'object_name': 'GhostbusterRequest'}, - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'issuer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'ghostbuster_requests'", 'to': "orm['irdb.ResourceHolderCA']"}), - 'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'ghostbuster_requests'", 'null': 'True', 'to': "orm['irdb.Parent']"}), - 'vcard': ('django.db.models.fields.TextField', [], {}) - }, - 'irdb.parent': { - 'Meta': {'unique_together': "(('issuer', 'handle'),)", 'object_name': 'Parent', '_ormbases': ['irdb.Turtle']}, - 'certificate': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), - 'child_handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), - 'handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), - 'issuer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'parents'", 'to': "orm['irdb.ResourceHolderCA']"}), - 'parent_handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), - 'referral_authorization': ('rpki.irdb.models.SignedReferralField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}), - 'referrer': ('rpki.irdb.models.HandleField', [], {'max_length': '120', 'null': 'True', 'blank': 'True'}), - 'repository_type': ('rpki.irdb.models.EnumField', [], {}), - 'ta': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), - 'turtle_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['irdb.Turtle']", 'unique': 'True', 'primary_key': 'True'}) - }, - 'irdb.resourceholderca': { - 'Meta': {'object_name': 'ResourceHolderCA'}, - 'certificate': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), - 'handle': ('rpki.irdb.models.HandleField', [], {'unique': 'True', 'max_length': '120'}), - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'last_crl_update': ('rpki.irdb.models.SundialField', [], {}), - 'latest_crl': ('rpki.irdb.models.CRLField', [], {'default': 'None', 'blank': 'True'}), - 'next_crl_number': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}), - 'next_crl_update': ('rpki.irdb.models.SundialField', [], {}), - 'next_serial': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}), - 'private_key': ('rpki.irdb.models.RSAKeyField', [], {'default': 'None', 'blank': 'True'}) - }, - 'irdb.turtle': { - 'Meta': {'object_name': 'Turtle'}, - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'service_uri': ('django.db.models.fields.CharField', [], {'max_length': '255'}) - } - } - - complete_apps = ['app'] diff --git a/rpkid/rpki/gui/app/migrations/0005_auto__chg_field_resourcecert_parent.py b/rpkid/rpki/gui/app/migrations/0005_auto__chg_field_resourcecert_parent.py deleted file mode 100644 index 11e9c814..00000000 --- a/rpkid/rpki/gui/app/migrations/0005_auto__chg_field_resourcecert_parent.py +++ /dev/null @@ -1,115 +0,0 @@ -# -*- coding: utf-8 -*- -import datetime -from south.db import db -from south.v2 import SchemaMigration -from django.db import models - - -class Migration(SchemaMigration): - - def forwards(self, orm): - - # Changing field 'ResourceCert.parent' - db.alter_column('app_resourcecert', 'parent_id', self.gf('django.db.models.fields.related.ForeignKey')(null=True, to=orm['irdb.Parent'])) - - def backwards(self, orm): - - # Changing field 'ResourceCert.parent' - db.alter_column('app_resourcecert', 'parent_id', self.gf('django.db.models.fields.related.ForeignKey')(default=1, to=orm['irdb.Parent'])) - - models = { - 'app.ghostbusterrequest': { - 'Meta': {'ordering': "('family_name', 'given_name')", 'object_name': 'GhostbusterRequest', '_ormbases': ['irdb.GhostbusterRequest']}, - 'additional_name': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}), - 'box': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), - 'city': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), - 'code': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), - 'country': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), - 'email_address': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}), - 'extended': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), - 'family_name': ('django.db.models.fields.CharField', [], {'max_length': '20'}), - 'full_name': ('django.db.models.fields.CharField', [], {'max_length': '40'}), - 'ghostbusterrequest_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['irdb.GhostbusterRequest']", 'unique': 'True', 'primary_key': 'True'}), - 'given_name': ('django.db.models.fields.CharField', [], {'max_length': '20'}), - 'honorific_prefix': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}), - 'honorific_suffix': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}), - 'organization': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), - 'region': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), - 'street': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), - 'telephone': ('rpki.gui.app.models.TelephoneField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}) - }, - 'app.resourcecert': { - 'Meta': {'object_name': 'ResourceCert'}, - 'conf': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'certs'", 'to': "orm['irdb.ResourceHolderCA']"}), - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'not_after': ('django.db.models.fields.DateTimeField', [], {}), - 'not_before': ('django.db.models.fields.DateTimeField', [], {}), - 'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'certs'", 'null': 'True', 'to': "orm['irdb.Parent']"}), - 'uri': ('django.db.models.fields.CharField', [], {'max_length': '255'}) - }, - 'app.resourcerangeaddressv4': { - 'Meta': {'ordering': "('prefix_min',)", 'object_name': 'ResourceRangeAddressV4'}, - 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'address_ranges'", 'to': "orm['app.ResourceCert']"}), - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'prefix_max': ('rpki.gui.models.IPv4AddressField', [], {'db_index': 'True'}), - 'prefix_min': ('rpki.gui.models.IPv4AddressField', [], {'db_index': 'True'}) - }, - 'app.resourcerangeaddressv6': { - 'Meta': {'ordering': "('prefix_min',)", 'object_name': 'ResourceRangeAddressV6'}, - 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'address_ranges_v6'", 'to': "orm['app.ResourceCert']"}), - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'prefix_max': ('rpki.gui.models.IPv6AddressField', [], {'db_index': 'True'}), - 'prefix_min': ('rpki.gui.models.IPv6AddressField', [], {'db_index': 'True'}) - }, - 'app.resourcerangeas': { - 'Meta': {'ordering': "('min', 'max')", 'object_name': 'ResourceRangeAS'}, - 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'asn_ranges'", 'to': "orm['app.ResourceCert']"}), - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'max': ('django.db.models.fields.PositiveIntegerField', [], {}), - 'min': ('django.db.models.fields.PositiveIntegerField', [], {}) - }, - 'app.timestamp': { - 'Meta': {'object_name': 'Timestamp'}, - 'name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'primary_key': 'True'}), - 'ts': ('django.db.models.fields.DateTimeField', [], {}) - }, - 'irdb.ghostbusterrequest': { - 'Meta': {'object_name': 'GhostbusterRequest'}, - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'issuer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'ghostbuster_requests'", 'to': "orm['irdb.ResourceHolderCA']"}), - 'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'ghostbuster_requests'", 'null': 'True', 'to': "orm['irdb.Parent']"}), - 'vcard': ('django.db.models.fields.TextField', [], {}) - }, - 'irdb.parent': { - 'Meta': {'unique_together': "(('issuer', 'handle'),)", 'object_name': 'Parent', '_ormbases': ['irdb.Turtle']}, - 'certificate': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), - 'child_handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), - 'handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), - 'issuer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'parents'", 'to': "orm['irdb.ResourceHolderCA']"}), - 'parent_handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), - 'referral_authorization': ('rpki.irdb.models.SignedReferralField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}), - 'referrer': ('rpki.irdb.models.HandleField', [], {'max_length': '120', 'null': 'True', 'blank': 'True'}), - 'repository_type': ('rpki.irdb.models.EnumField', [], {}), - 'ta': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), - 'turtle_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['irdb.Turtle']", 'unique': 'True', 'primary_key': 'True'}) - }, - 'irdb.resourceholderca': { - 'Meta': {'object_name': 'ResourceHolderCA'}, - 'certificate': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), - 'handle': ('rpki.irdb.models.HandleField', [], {'unique': 'True', 'max_length': '120'}), - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'last_crl_update': ('rpki.irdb.models.SundialField', [], {}), - 'latest_crl': ('rpki.irdb.models.CRLField', [], {'default': 'None', 'blank': 'True'}), - 'next_crl_number': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}), - 'next_crl_update': ('rpki.irdb.models.SundialField', [], {}), - 'next_serial': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}), - 'private_key': ('rpki.irdb.models.RSAKeyField', [], {'default': 'None', 'blank': 'True'}) - }, - 'irdb.turtle': { - 'Meta': {'object_name': 'Turtle'}, - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'service_uri': ('django.db.models.fields.CharField', [], {'max_length': '255'}) - } - } - - complete_apps = ['app'] \ No newline at end of file diff --git a/rpkid/rpki/gui/app/migrations/0006_add_conf_acl.py b/rpkid/rpki/gui/app/migrations/0006_add_conf_acl.py deleted file mode 100644 index 88fe8171..00000000 --- a/rpkid/rpki/gui/app/migrations/0006_add_conf_acl.py +++ /dev/null @@ -1,168 +0,0 @@ -# -*- coding: utf-8 -*- -import datetime -from south.db import db -from south.v2 import SchemaMigration -from django.db import models - - -class Migration(SchemaMigration): - - def forwards(self, orm): - # Adding model 'ConfACL' - db.create_table('app_confacl', ( - ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), - ('conf', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['irdb.ResourceHolderCA'])), - ('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'])), - )) - db.send_create_signal('app', ['ConfACL']) - - # Adding unique constraint on 'ConfACL', fields ['user', 'conf'] - db.create_unique('app_confacl', ['user_id', 'conf_id']) - - - def backwards(self, orm): - # Removing unique constraint on 'ConfACL', fields ['user', 'conf'] - db.delete_unique('app_confacl', ['user_id', 'conf_id']) - - # Deleting model 'ConfACL' - db.delete_table('app_confacl') - - - models = { - 'app.confacl': { - 'Meta': {'unique_together': "(('user', 'conf'),)", 'object_name': 'ConfACL'}, - 'conf': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['irdb.ResourceHolderCA']"}), - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}) - }, - 'app.ghostbusterrequest': { - 'Meta': {'ordering': "('family_name', 'given_name')", 'object_name': 'GhostbusterRequest', '_ormbases': ['irdb.GhostbusterRequest']}, - 'additional_name': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}), - 'box': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), - 'city': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), - 'code': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), - 'country': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), - 'email_address': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}), - 'extended': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), - 'family_name': ('django.db.models.fields.CharField', [], {'max_length': '20'}), - 'full_name': ('django.db.models.fields.CharField', [], {'max_length': '40'}), - 'ghostbusterrequest_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['irdb.GhostbusterRequest']", 'unique': 'True', 'primary_key': 'True'}), - 'given_name': ('django.db.models.fields.CharField', [], {'max_length': '20'}), - 'honorific_prefix': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}), - 'honorific_suffix': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}), - 'organization': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), - 'region': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), - 'street': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), - 'telephone': ('rpki.gui.app.models.TelephoneField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}) - }, - 'app.resourcecert': { - 'Meta': {'object_name': 'ResourceCert'}, - 'conf': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'certs'", 'to': "orm['irdb.ResourceHolderCA']"}), - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'not_after': ('django.db.models.fields.DateTimeField', [], {}), - 'not_before': ('django.db.models.fields.DateTimeField', [], {}), - 'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'certs'", 'null': 'True', 'to': "orm['irdb.Parent']"}), - 'uri': ('django.db.models.fields.CharField', [], {'max_length': '255'}) - }, - 'app.resourcerangeaddressv4': { - 'Meta': {'ordering': "('prefix_min',)", 'object_name': 'ResourceRangeAddressV4'}, - 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'address_ranges'", 'to': "orm['app.ResourceCert']"}), - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'prefix_max': ('rpki.gui.models.IPv4AddressField', [], {'db_index': 'True'}), - 'prefix_min': ('rpki.gui.models.IPv4AddressField', [], {'db_index': 'True'}) - }, - 'app.resourcerangeaddressv6': { - 'Meta': {'ordering': "('prefix_min',)", 'object_name': 'ResourceRangeAddressV6'}, - 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'address_ranges_v6'", 'to': "orm['app.ResourceCert']"}), - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'prefix_max': ('rpki.gui.models.IPv6AddressField', [], {'db_index': 'True'}), - 'prefix_min': ('rpki.gui.models.IPv6AddressField', [], {'db_index': 'True'}) - }, - 'app.resourcerangeas': { - 'Meta': {'ordering': "('min', 'max')", 'object_name': 'ResourceRangeAS'}, - 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'asn_ranges'", 'to': "orm['app.ResourceCert']"}), - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'max': ('django.db.models.fields.PositiveIntegerField', [], {}), - 'min': ('django.db.models.fields.PositiveIntegerField', [], {}) - }, - 'app.timestamp': { - 'Meta': {'object_name': 'Timestamp'}, - 'name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'primary_key': 'True'}), - 'ts': ('django.db.models.fields.DateTimeField', [], {}) - }, - 'auth.group': { - 'Meta': {'object_name': 'Group'}, - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), - 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) - }, - 'auth.permission': { - 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, - 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), - 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) - }, - 'auth.user': { - 'Meta': {'object_name': 'User'}, - 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), - 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), - 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), - 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), - 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), - 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), - 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), - 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), - 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), - 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), - 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) - }, - 'contenttypes.contenttype': { - 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, - 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), - 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) - }, - 'irdb.ghostbusterrequest': { - 'Meta': {'object_name': 'GhostbusterRequest'}, - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'issuer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'ghostbuster_requests'", 'to': "orm['irdb.ResourceHolderCA']"}), - 'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'ghostbuster_requests'", 'null': 'True', 'to': "orm['irdb.Parent']"}), - 'vcard': ('django.db.models.fields.TextField', [], {}) - }, - 'irdb.parent': { - 'Meta': {'unique_together': "(('issuer', 'handle'),)", 'object_name': 'Parent', '_ormbases': ['irdb.Turtle']}, - 'certificate': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), - 'child_handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), - 'handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), - 'issuer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'parents'", 'to': "orm['irdb.ResourceHolderCA']"}), - 'parent_handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), - 'referral_authorization': ('rpki.irdb.models.SignedReferralField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}), - 'referrer': ('rpki.irdb.models.HandleField', [], {'max_length': '120', 'null': 'True', 'blank': 'True'}), - 'repository_type': ('rpki.irdb.models.EnumField', [], {}), - 'ta': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), - 'turtle_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['irdb.Turtle']", 'unique': 'True', 'primary_key': 'True'}) - }, - 'irdb.resourceholderca': { - 'Meta': {'object_name': 'ResourceHolderCA'}, - 'certificate': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), - 'handle': ('rpki.irdb.models.HandleField', [], {'unique': 'True', 'max_length': '120'}), - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'last_crl_update': ('rpki.irdb.models.SundialField', [], {}), - 'latest_crl': ('rpki.irdb.models.CRLField', [], {'default': 'None', 'blank': 'True'}), - 'next_crl_number': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}), - 'next_crl_update': ('rpki.irdb.models.SundialField', [], {}), - 'next_serial': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}), - 'private_key': ('rpki.irdb.models.RSAKeyField', [], {'default': 'None', 'blank': 'True'}) - }, - 'irdb.turtle': { - 'Meta': {'object_name': 'Turtle'}, - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'service_uri': ('django.db.models.fields.CharField', [], {'max_length': '255'}) - } - } - - complete_apps = ['app'] \ No newline at end of file diff --git a/rpkid/rpki/gui/app/migrations/0007_default_acls.py b/rpkid/rpki/gui/app/migrations/0007_default_acls.py deleted file mode 100644 index 40656d0f..00000000 --- a/rpkid/rpki/gui/app/migrations/0007_default_acls.py +++ /dev/null @@ -1,165 +0,0 @@ -# -*- coding: utf-8 -*- -import datetime -from south.db import db -from south.v2 import DataMigration -from django.db import models -from django.core.exceptions import ObjectDoesNotExist - -class Migration(DataMigration): - - def forwards(self, orm): - "Write your forwards methods here." - # Note: Remember to use orm['appname.ModelName'] rather than "from appname.models..." - for conf in orm['irdb.ResourceHolderCA'].objects.all(): - try: - user = orm['auth.User'].objects.get(username=conf.handle) - orm['app.ConfACL'].objects.create( - conf=conf, - user=user - ) - except ObjectDoesNotExist: - pass - - def backwards(self, orm): - "Write your backwards methods here." - orm['app.ConfACL'].objects.all().delete() - - models = { - 'app.confacl': { - 'Meta': {'unique_together': "(('user', 'conf'),)", 'object_name': 'ConfACL'}, - 'conf': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['irdb.ResourceHolderCA']"}), - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}) - }, - 'app.ghostbusterrequest': { - 'Meta': {'ordering': "('family_name', 'given_name')", 'object_name': 'GhostbusterRequest', '_ormbases': ['irdb.GhostbusterRequest']}, - 'additional_name': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}), - 'box': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), - 'city': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), - 'code': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), - 'country': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), - 'email_address': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}), - 'extended': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), - 'family_name': ('django.db.models.fields.CharField', [], {'max_length': '20'}), - 'full_name': ('django.db.models.fields.CharField', [], {'max_length': '40'}), - 'ghostbusterrequest_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['irdb.GhostbusterRequest']", 'unique': 'True', 'primary_key': 'True'}), - 'given_name': ('django.db.models.fields.CharField', [], {'max_length': '20'}), - 'honorific_prefix': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}), - 'honorific_suffix': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}), - 'organization': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), - 'region': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), - 'street': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), - 'telephone': ('rpki.gui.app.models.TelephoneField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}) - }, - 'app.resourcecert': { - 'Meta': {'object_name': 'ResourceCert'}, - 'conf': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'certs'", 'to': "orm['irdb.ResourceHolderCA']"}), - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'not_after': ('django.db.models.fields.DateTimeField', [], {}), - 'not_before': ('django.db.models.fields.DateTimeField', [], {}), - 'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'certs'", 'null': 'True', 'to': "orm['irdb.Parent']"}), - 'uri': ('django.db.models.fields.CharField', [], {'max_length': '255'}) - }, - 'app.resourcerangeaddressv4': { - 'Meta': {'ordering': "('prefix_min',)", 'object_name': 'ResourceRangeAddressV4'}, - 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'address_ranges'", 'to': "orm['app.ResourceCert']"}), - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'prefix_max': ('rpki.gui.models.IPv4AddressField', [], {'db_index': 'True'}), - 'prefix_min': ('rpki.gui.models.IPv4AddressField', [], {'db_index': 'True'}) - }, - 'app.resourcerangeaddressv6': { - 'Meta': {'ordering': "('prefix_min',)", 'object_name': 'ResourceRangeAddressV6'}, - 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'address_ranges_v6'", 'to': "orm['app.ResourceCert']"}), - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'prefix_max': ('rpki.gui.models.IPv6AddressField', [], {'db_index': 'True'}), - 'prefix_min': ('rpki.gui.models.IPv6AddressField', [], {'db_index': 'True'}) - }, - 'app.resourcerangeas': { - 'Meta': {'ordering': "('min', 'max')", 'object_name': 'ResourceRangeAS'}, - 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'asn_ranges'", 'to': "orm['app.ResourceCert']"}), - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'max': ('django.db.models.fields.PositiveIntegerField', [], {}), - 'min': ('django.db.models.fields.PositiveIntegerField', [], {}) - }, - 'app.timestamp': { - 'Meta': {'object_name': 'Timestamp'}, - 'name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'primary_key': 'True'}), - 'ts': ('django.db.models.fields.DateTimeField', [], {}) - }, - 'auth.group': { - 'Meta': {'object_name': 'Group'}, - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), - 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) - }, - 'auth.permission': { - 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, - 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), - 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) - }, - 'auth.user': { - 'Meta': {'object_name': 'User'}, - 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), - 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), - 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), - 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), - 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), - 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), - 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), - 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), - 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), - 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), - 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) - }, - 'contenttypes.contenttype': { - 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, - 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), - 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) - }, - 'irdb.ghostbusterrequest': { - 'Meta': {'object_name': 'GhostbusterRequest'}, - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'issuer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'ghostbuster_requests'", 'to': "orm['irdb.ResourceHolderCA']"}), - 'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'ghostbuster_requests'", 'null': 'True', 'to': "orm['irdb.Parent']"}), - 'vcard': ('django.db.models.fields.TextField', [], {}) - }, - 'irdb.parent': { - 'Meta': {'unique_together': "(('issuer', 'handle'),)", 'object_name': 'Parent', '_ormbases': ['irdb.Turtle']}, - 'certificate': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), - 'child_handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), - 'handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), - 'issuer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'parents'", 'to': "orm['irdb.ResourceHolderCA']"}), - 'parent_handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), - 'referral_authorization': ('rpki.irdb.models.SignedReferralField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}), - 'referrer': ('rpki.irdb.models.HandleField', [], {'max_length': '120', 'null': 'True', 'blank': 'True'}), - 'repository_type': ('rpki.irdb.models.EnumField', [], {}), - 'ta': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), - 'turtle_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['irdb.Turtle']", 'unique': 'True', 'primary_key': 'True'}) - }, - 'irdb.resourceholderca': { - 'Meta': {'object_name': 'ResourceHolderCA'}, - 'certificate': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), - 'handle': ('rpki.irdb.models.HandleField', [], {'unique': 'True', 'max_length': '120'}), - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'last_crl_update': ('rpki.irdb.models.SundialField', [], {}), - 'latest_crl': ('rpki.irdb.models.CRLField', [], {'default': 'None', 'blank': 'True'}), - 'next_crl_number': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}), - 'next_crl_update': ('rpki.irdb.models.SundialField', [], {}), - 'next_serial': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}), - 'private_key': ('rpki.irdb.models.RSAKeyField', [], {'default': 'None', 'blank': 'True'}) - }, - 'irdb.turtle': { - 'Meta': {'object_name': 'Turtle'}, - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'service_uri': ('django.db.models.fields.CharField', [], {'max_length': '255'}) - } - } - - complete_apps = ['app'] - symmetrical = True diff --git a/rpkid/rpki/gui/app/migrations/0008_add_alerts.py b/rpkid/rpki/gui/app/migrations/0008_add_alerts.py deleted file mode 100644 index 77af68d2..00000000 --- a/rpkid/rpki/gui/app/migrations/0008_add_alerts.py +++ /dev/null @@ -1,176 +0,0 @@ -# -*- coding: utf-8 -*- -import datetime -from south.db import db -from south.v2 import SchemaMigration -from django.db import models - - -class Migration(SchemaMigration): - - def forwards(self, orm): - # Adding model 'Alert' - db.create_table('app_alert', ( - ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), - ('conf', self.gf('django.db.models.fields.related.ForeignKey')(related_name='alerts', to=orm['irdb.ResourceHolderCA'])), - ('severity', self.gf('django.db.models.fields.SmallIntegerField')(default=0)), - ('when', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)), - ('seen', self.gf('django.db.models.fields.BooleanField')(default=False)), - ('subject', self.gf('django.db.models.fields.CharField')(max_length=66)), - ('text', self.gf('django.db.models.fields.TextField')()), - )) - db.send_create_signal('app', ['Alert']) - - - def backwards(self, orm): - # Deleting model 'Alert' - db.delete_table('app_alert') - - - models = { - 'app.alert': { - 'Meta': {'object_name': 'Alert'}, - 'conf': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'alerts'", 'to': "orm['irdb.ResourceHolderCA']"}), - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'seen': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), - 'severity': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}), - 'subject': ('django.db.models.fields.CharField', [], {'max_length': '66'}), - 'text': ('django.db.models.fields.TextField', [], {}), - 'when': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}) - }, - 'app.confacl': { - 'Meta': {'unique_together': "(('user', 'conf'),)", 'object_name': 'ConfACL'}, - 'conf': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['irdb.ResourceHolderCA']"}), - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}) - }, - 'app.ghostbusterrequest': { - 'Meta': {'ordering': "('family_name', 'given_name')", 'object_name': 'GhostbusterRequest', '_ormbases': ['irdb.GhostbusterRequest']}, - 'additional_name': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}), - 'box': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), - 'city': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), - 'code': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), - 'country': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), - 'email_address': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}), - 'extended': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), - 'family_name': ('django.db.models.fields.CharField', [], {'max_length': '20'}), - 'full_name': ('django.db.models.fields.CharField', [], {'max_length': '40'}), - 'ghostbusterrequest_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['irdb.GhostbusterRequest']", 'unique': 'True', 'primary_key': 'True'}), - 'given_name': ('django.db.models.fields.CharField', [], {'max_length': '20'}), - 'honorific_prefix': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}), - 'honorific_suffix': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}), - 'organization': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), - 'region': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), - 'street': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), - 'telephone': ('rpki.gui.app.models.TelephoneField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}) - }, - 'app.resourcecert': { - 'Meta': {'object_name': 'ResourceCert'}, - 'conf': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'certs'", 'to': "orm['irdb.ResourceHolderCA']"}), - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'not_after': ('django.db.models.fields.DateTimeField', [], {}), - 'not_before': ('django.db.models.fields.DateTimeField', [], {}), - 'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'certs'", 'null': 'True', 'to': "orm['irdb.Parent']"}), - 'uri': ('django.db.models.fields.CharField', [], {'max_length': '255'}) - }, - 'app.resourcerangeaddressv4': { - 'Meta': {'ordering': "('prefix_min',)", 'object_name': 'ResourceRangeAddressV4'}, - 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'address_ranges'", 'to': "orm['app.ResourceCert']"}), - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'prefix_max': ('rpki.gui.models.IPv4AddressField', [], {'db_index': 'True'}), - 'prefix_min': ('rpki.gui.models.IPv4AddressField', [], {'db_index': 'True'}) - }, - 'app.resourcerangeaddressv6': { - 'Meta': {'ordering': "('prefix_min',)", 'object_name': 'ResourceRangeAddressV6'}, - 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'address_ranges_v6'", 'to': "orm['app.ResourceCert']"}), - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'prefix_max': ('rpki.gui.models.IPv6AddressField', [], {'db_index': 'True'}), - 'prefix_min': ('rpki.gui.models.IPv6AddressField', [], {'db_index': 'True'}) - }, - 'app.resourcerangeas': { - 'Meta': {'ordering': "('min', 'max')", 'object_name': 'ResourceRangeAS'}, - 'cert': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'asn_ranges'", 'to': "orm['app.ResourceCert']"}), - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'max': ('django.db.models.fields.PositiveIntegerField', [], {}), - 'min': ('django.db.models.fields.PositiveIntegerField', [], {}) - }, - 'app.timestamp': { - 'Meta': {'object_name': 'Timestamp'}, - 'name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'primary_key': 'True'}), - 'ts': ('django.db.models.fields.DateTimeField', [], {}) - }, - 'auth.group': { - 'Meta': {'object_name': 'Group'}, - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), - 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) - }, - 'auth.permission': { - 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, - 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), - 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) - }, - 'auth.user': { - 'Meta': {'object_name': 'User'}, - 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), - 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), - 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), - 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), - 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), - 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), - 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), - 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), - 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), - 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), - 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) - }, - 'contenttypes.contenttype': { - 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, - 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), - 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) - }, - 'irdb.ghostbusterrequest': { - 'Meta': {'object_name': 'GhostbusterRequest'}, - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'issuer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'ghostbuster_requests'", 'to': "orm['irdb.ResourceHolderCA']"}), - 'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'ghostbuster_requests'", 'null': 'True', 'to': "orm['irdb.Parent']"}), - 'vcard': ('django.db.models.fields.TextField', [], {}) - }, - 'irdb.parent': { - 'Meta': {'unique_together': "(('issuer', 'handle'),)", 'object_name': 'Parent', '_ormbases': ['irdb.Turtle']}, - 'certificate': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), - 'child_handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), - 'handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), - 'issuer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'parents'", 'to': "orm['irdb.ResourceHolderCA']"}), - 'parent_handle': ('rpki.irdb.models.HandleField', [], {'max_length': '120'}), - 'referral_authorization': ('rpki.irdb.models.SignedReferralField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}), - 'referrer': ('rpki.irdb.models.HandleField', [], {'max_length': '120', 'null': 'True', 'blank': 'True'}), - 'repository_type': ('rpki.irdb.models.EnumField', [], {}), - 'ta': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), - 'turtle_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['irdb.Turtle']", 'unique': 'True', 'primary_key': 'True'}) - }, - 'irdb.resourceholderca': { - 'Meta': {'object_name': 'ResourceHolderCA'}, - 'certificate': ('rpki.irdb.models.CertificateField', [], {'default': 'None', 'blank': 'True'}), - 'handle': ('rpki.irdb.models.HandleField', [], {'unique': 'True', 'max_length': '120'}), - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'last_crl_update': ('rpki.irdb.models.SundialField', [], {}), - 'latest_crl': ('rpki.irdb.models.CRLField', [], {'default': 'None', 'blank': 'True'}), - 'next_crl_number': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}), - 'next_crl_update': ('rpki.irdb.models.SundialField', [], {}), - 'next_serial': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}), - 'private_key': ('rpki.irdb.models.RSAKeyField', [], {'default': 'None', 'blank': 'True'}) - }, - 'irdb.turtle': { - 'Meta': {'object_name': 'Turtle'}, - 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'service_uri': ('django.db.models.fields.CharField', [], {'max_length': '255'}) - } - } - - complete_apps = ['app'] \ No newline at end of file diff --git a/rpkid/rpki/gui/app/migrations/__init__.py b/rpkid/rpki/gui/app/migrations/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/rpkid/rpki/gui/app/models.py b/rpkid/rpki/gui/app/models.py deleted file mode 100644 index 7d643fdc..00000000 --- a/rpkid/rpki/gui/app/models.py +++ /dev/null @@ -1,420 +0,0 @@ -# Copyright (C) 2010 SPARTA, Inc. dba Cobham Analytic Solutions -# Copyright (C) 2012 SPARTA, Inc. a Parsons Company -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -__version__ = '$Id$' - -from django.db import models -from django.contrib.auth.models import User -from django.core.mail import send_mail - -import rpki.resource_set -import rpki.exceptions -import rpki.irdb.models -import rpki.gui.models -import rpki.gui.routeview.models -from south.modelsinspector import add_introspection_rules - - -class TelephoneField(models.CharField): - def __init__(self, **kwargs): - if 'max_length' not in kwargs: - kwargs['max_length'] = 40 - models.CharField.__init__(self, **kwargs) - -add_introspection_rules([], ['^rpki\.gui\.app\.models\.TelephoneField']) - - -class Parent(rpki.irdb.models.Parent): - """proxy model for irdb Parent""" - - def __unicode__(self): - return u"%s's parent %s" % (self.issuer.handle, self.handle) - - @models.permalink - def get_absolute_url(self): - return ('rpki.gui.app.views.parent_detail', [str(self.pk)]) - - class Meta: - proxy = True - - -class Child(rpki.irdb.models.Child): - """proxy model for irdb Child""" - - def __unicode__(self): - return u"%s's child %s" % (self.issuer.handle, self.handle) - - @models.permalink - def get_absolute_url(self): - return ('rpki.gui.app.views.child_detail', [str(self.pk)]) - - class Meta: - proxy = True - verbose_name_plural = 'children' - - -class ChildASN(rpki.irdb.models.ChildASN): - """Proxy model for irdb ChildASN.""" - - class Meta: - proxy = True - - def __unicode__(self): - return u'AS%s' % self.as_resource_range() - - -class ChildNet(rpki.irdb.models.ChildNet): - """Proxy model for irdb ChildNet.""" - - class Meta: - proxy = True - - def __unicode__(self): - return u'%s' % self.as_resource_range() - - -class Alert(models.Model): - """Stores alert messages intended to be consumed by the user.""" - - INFO = 0 - WARNING = 1 - ERROR = 2 - - SEVERITY_CHOICES = ( - (INFO, 'info'), - (WARNING, 'warning'), - (ERROR, 'error'), - ) - - conf = models.ForeignKey('Conf', related_name='alerts') - severity = models.SmallIntegerField(choices=SEVERITY_CHOICES, default=INFO) - when = models.DateTimeField(auto_now_add=True) - seen = models.BooleanField(default=False) - subject = models.CharField(max_length=66) - text = models.TextField() - - @models.permalink - def get_absolute_url(self): - return ('alert-detail', [str(self.pk)]) - - -class Conf(rpki.irdb.models.ResourceHolderCA): - """This is the center of the universe, also known as a place to - have a handle on a resource-holding entity. It's the - in the rpkid schema. - - """ - @property - def parents(self): - """Simulates irdb.models.Parent.objects, but returns app.models.Parent - proxy objects. - - """ - return Parent.objects.filter(issuer=self) - - @property - def children(self): - """Simulates irdb.models.Child.objects, but returns app.models.Child - proxy objects. - - """ - return Child.objects.filter(issuer=self) - - @property - def ghostbusters(self): - return GhostbusterRequest.objects.filter(issuer=self) - - @property - def repositories(self): - return Repository.objects.filter(issuer=self) - - @property - def roas(self): - return ROARequest.objects.filter(issuer=self) - - @property - def routes(self): - """Return all IPv4 routes covered by RPKI certs issued to this resource - holder. - - """ - # build a Q filter to select all RouteOrigin objects covered by - # prefixes in the resource holder's certificates - q = models.Q() - for p in ResourceRangeAddressV4.objects.filter(cert__conf=self): - q |= models.Q(prefix_min__gte=p.prefix_min, - prefix_max__lte=p.prefix_max) - return RouteOrigin.objects.filter(q) - - @property - def routes_v6(self): - """Return all IPv6 routes covered by RPKI certs issued to this resource - holder. - - """ - # build a Q filter to select all RouteOrigin objects covered by - # prefixes in the resource holder's certificates - q = models.Q() - for p in ResourceRangeAddressV6.objects.filter(cert__conf=self): - q |= models.Q(prefix_min__gte=p.prefix_min, - prefix_max__lte=p.prefix_max) - return RouteOriginV6.objects.filter(q) - - def send_alert(self, subject, message, from_email, severity=Alert.INFO): - """Store an alert for this resource holder.""" - self.alerts.create(subject=subject, text=message, severity=severity) - - send_mail( - subject=subject, - message=message, - from_email=from_email, - recipient_list=self.email_list - ) - - @property - def email_list(self): - """Return a list of the contact emails for this resource holder. - - Contact emails are extract from any ghostbuster requests, and any - linked user accounts. - - """ - notify_emails = [gbr.email_address for gbr in self.ghostbusters if gbr.email_address] - notify_emails.extend( - [acl.user.email for acl in ConfACL.objects.filter(conf=self) if acl.user.email] - ) - return notify_emails - - def clear_alerts(self): - self.alerts.all().delete() - - - class Meta: - proxy = True - - -class ResourceCert(models.Model): - """Represents a resource certificate. - - This model is used to cache the output of . - - """ - - # Handle to which this cert was issued - conf = models.ForeignKey(Conf, related_name='certs') - - # The parent that issued the cert. This field is marked null=True because - # the root has no parent - parent = models.ForeignKey(Parent, related_name='certs', null=True) - - # certificate validity period - not_before = models.DateTimeField() - not_after = models.DateTimeField() - - # Locator for this object. Used to look up the validation status, expiry - # of ancestor certs in cacheview - uri = models.CharField(max_length=255) - - def __unicode__(self): - if self.parent: - return u"%s's cert from %s" % (self.conf.handle, - self.parent.handle) - else: - return u"%s's root cert" % self.conf.handle - - def get_cert_chain(self): - """Return a list containing the complete certificate chain for this - certificate.""" - cert = self - x = [cert] - while cert.issuer: - cert = cert.issuer - x.append(cert) - x.reverse() - return x - cert_chain = property(get_cert_chain) - - -class ResourceRangeAddressV4(rpki.gui.models.PrefixV4): - cert = models.ForeignKey(ResourceCert, related_name='address_ranges') - - -class ResourceRangeAddressV6(rpki.gui.models.PrefixV6): - cert = models.ForeignKey(ResourceCert, related_name='address_ranges_v6') - - -class ResourceRangeAS(rpki.gui.models.ASN): - cert = models.ForeignKey(ResourceCert, related_name='asn_ranges') - - -class ROARequest(rpki.irdb.models.ROARequest): - class Meta: - proxy = True - - def __unicode__(self): - return u"%s's ROA request for AS%d" % (self.issuer.handle, self.asn) - - @models.permalink - def get_absolute_url(self): - return ('rpki.gui.app.views.roa_detail', [str(self.pk)]) - - @property - def routes(self): - "Return all IPv4 routes covered by this roa prefix." - # this assumes one prefix per ROA - rng = self.prefixes.filter(version=4)[0].as_resource_range() - return rpki.gui.routeview.models.RouteOrigin.objects.filter( - prefix_min__gte=rng.min, - prefix_max__lte=rng.max - ) - - @property - def routes_v6(self): - "Return all IPv6 routes covered by this roa prefix." - # this assumes one prefix per ROA - rng = self.prefixes.filter(version=6)[0].as_resource_range() - return rpki.gui.routeview.models.RouteOriginV6.objects.filter( - prefix_min__gte=rng.min, - prefix_max__lte=rng.max - ) - - -class ROARequestPrefix(rpki.irdb.models.ROARequestPrefix): - class Meta: - proxy = True - - def __unicode__(self): - return u'ROA Request Prefix %s' % str(self.as_roa_prefix()) - - -class GhostbusterRequest(rpki.irdb.models.GhostbusterRequest): - """ - Stores the information require to fill out a vCard entry to - populate a ghostbusters record. - - This model is inherited from the irdb GhostBusterRequest model so - that the broken out fields can be included for ease of editing. - """ - - full_name = models.CharField(max_length=40) - - # components of the vCard N type - family_name = models.CharField(max_length=20) - given_name = models.CharField(max_length=20) - additional_name = models.CharField(max_length=20, blank=True, null=True) - honorific_prefix = models.CharField(max_length=10, blank=True, null=True) - honorific_suffix = models.CharField(max_length=10, blank=True, null=True) - - email_address = models.EmailField(blank=True, null=True) - organization = models.CharField(blank=True, null=True, max_length=255) - telephone = TelephoneField(blank=True, null=True) - - # elements of the ADR type - box = models.CharField(verbose_name='P.O. Box', blank=True, null=True, - max_length=40) - extended = models.CharField(blank=True, null=True, max_length=255) - street = models.CharField(blank=True, null=True, max_length=255) - city = models.CharField(blank=True, null=True, max_length=40) - region = models.CharField(blank=True, null=True, max_length=40, - help_text='state or province') - code = models.CharField(verbose_name='Postal Code', blank=True, null=True, - max_length=40) - country = models.CharField(blank=True, null=True, max_length=40) - - def __unicode__(self): - return u"%s's GBR: %s" % (self.issuer.handle, self.full_name) - - @models.permalink - def get_absolute_url(self): - return ('gbr-detail', [str(self.pk)]) - - class Meta: - ordering = ('family_name', 'given_name') - - -class Timestamp(models.Model): - """Model to hold metadata about the collection of external data. - - This model is a hash table mapping a timestamp name to the - timestamp value. All timestamps values are in UTC. - - The utility function rpki.gui.app.timestmap.update(name) should be used to - set timestamps rather than updating this model directly.""" - - name = models.CharField(max_length=30, primary_key=True) - ts = models.DateTimeField(null=False) - - def __unicode__(self): - return '%s: %s' % (self.name, self.ts) - - -class Repository(rpki.irdb.models.Repository): - class Meta: - proxy = True - verbose_name = 'Repository' - verbose_name_plural = 'Repositories' - - @models.permalink - def get_absolute_url(self): - return ('rpki.gui.app.views.repository_detail', [str(self.pk)]) - - def __unicode__(self): - return "%s's repository %s" % (self.issuer.handle, self.handle) - - -class Client(rpki.irdb.models.Client): - "Proxy model for pubd clients." - - class Meta: - proxy = True - verbose_name = 'Client' - - @models.permalink - def get_absolute_url(self): - return ('rpki.gui.app.views.client_detail', [str(self.pk)]) - - def __unicode__(self): - return self.handle - - -class RouteOrigin(rpki.gui.routeview.models.RouteOrigin): - class Meta: - proxy = True - - @models.permalink - def get_absolute_url(self): - return ('rpki.gui.app.views.route_detail', [str(self.pk)]) - - -class RouteOriginV6(rpki.gui.routeview.models.RouteOriginV6): - class Meta: - proxy = True - - @models.permalink - def get_absolute_url(self): - return ('rpki.gui.app.views.route_detail', [str(self.pk)]) - - -class ConfACL(models.Model): - """Stores access control for which users are allowed to manage a given - resource handle. - - """ - - conf = models.ForeignKey(Conf) - user = models.ForeignKey(User) - - class Meta: - unique_together = (('user', 'conf')) diff --git a/rpkid/rpki/gui/app/range_list.py b/rpkid/rpki/gui/app/range_list.py deleted file mode 100755 index 21fd1f29..00000000 --- a/rpkid/rpki/gui/app/range_list.py +++ /dev/null @@ -1,252 +0,0 @@ -# Copyright (C) 2012 SPARTA, Inc. a Parsons Company -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -__version__ = '$Id$' - -import bisect -import unittest - - -class RangeList(list): - """A sorted list of ranges, which automatically merges adjacent ranges. - - Items in the list are expected to have ".min" and ".max" attributes.""" - - def __init__(self, ini=None): - list.__init__(self) - if ini: - self.extend(ini) - - def append(self, v): - keys = [x.min for x in self] - - # lower bound - i = bisect.bisect_left(keys, v.min) - - # upper bound - j = bisect.bisect_right(keys, v.max, lo=i) - - # if the max value for the previous item is greater than v.min, include - # the previous item in the range to replace and use its min value. - # also include the previous item if the max value is 1 less than the - # min value for the inserted item - if i > 0 and self[i - 1].max >= v.min - 1: - i = i - 1 - vmin = self[i].min - else: - vmin = v.min - - # if the max value for the previous item is greater than the max value - # for the new item, use the previous item's max - if j > 0 and self[j - 1].max > v.max: - vmax = self[j - 1].max - else: - vmax = v.max - - # if the max value for the new item is 1 less than the min value for - # the next item, combine into a single item - if j < len(self) and vmax + 1 == self[j].min: - vmax = self[j].max - j = j + 1 - - # replace the range with a new object covering the entire range - self[i:j] = [v.__class__(vmin, vmax)] - - def extend(self, args): - for x in args: - self.append(x) - - def difference(self, other): - """Return a RangeList object which contains ranges in this object which - are not in "other".""" - it = iter(other) - - try: - cur = it.next() - except StopIteration: - return self - - r = RangeList() - - for x in self: - xmin = x.min - - def V(v): - """convert the integer value to the appropriate type for this - range""" - return x.__class__.datum_type(v) - - try: - while xmin <= x.max: - if xmin < cur.min: - r.append(x.__class__(V(xmin), - V(min(x.max, cur.min - 1)))) - xmin = cur.max + 1 - elif xmin == cur.min: - xmin = cur.max + 1 - else: # xmin > cur.min - if xmin <= cur.max: - xmin = cur.max + 1 - else: # xmin > cur.max - cur = it.next() - - except StopIteration: - r.append(x.__class__(V(xmin), x.max)) - - return r - - -class TestRangeList(unittest.TestCase): - class MinMax(object): - datum_type = int - - def __init__(self, range_min, range_max): - self.min = range_min - self.max = range_max - - def __str__(self): - return '(%d, %d)' % (self.min, self.max) - - def __repr__(self): - return '' % (self.min, self.max) - - def __eq__(self, other): - return self.min == other.min and self.max == other.max - - def setUp(self): - self.v1 = TestRangeList.MinMax(1, 2) - self.v2 = TestRangeList.MinMax(4, 5) - self.v3 = TestRangeList.MinMax(7, 8) - self.v4 = TestRangeList.MinMax(3, 4) - self.v5 = TestRangeList.MinMax(2, 3) - self.v6 = TestRangeList.MinMax(1, 10) - - def test_empty_append(self): - s = RangeList() - s.append(self.v1) - self.assertTrue(len(s) == 1) - self.assertEqual(s[0], self.v1) - - def test_no_overlap(self): - s = RangeList() - s.append(self.v1) - s.append(self.v2) - self.assertTrue(len(s) == 2) - self.assertEqual(s[0], self.v1) - self.assertEqual(s[1], self.v2) - - def test_no_overlap_prepend(self): - s = RangeList() - s.append(self.v2) - s.append(self.v1) - self.assertTrue(len(s) == 2) - self.assertEqual(s[0], self.v1) - self.assertEqual(s[1], self.v2) - - def test_insert_middle(self): - s = RangeList() - s.append(self.v1) - s.append(self.v3) - s.append(self.v2) - self.assertTrue(len(s) == 3) - self.assertEqual(s[0], self.v1) - self.assertEqual(s[1], self.v2) - self.assertEqual(s[2], self.v3) - - def test_append_overlap(self): - s = RangeList() - s.append(self.v1) - s.append(self.v5) - self.assertTrue(len(s) == 1) - self.assertEqual(s[0], TestRangeList.MinMax(1, 3)) - - def test_combine_range(self): - s = RangeList() - s.append(self.v1) - s.append(self.v4) - self.assertTrue(len(s) == 1) - self.assertEqual(s[0], TestRangeList.MinMax(1, 4)) - - def test_append_subset(self): - s = RangeList() - s.append(self.v6) - s.append(self.v3) - self.assertTrue(len(s) == 1) - self.assertEqual(s[0], self.v6) - - def test_append_equal(self): - s = RangeList() - s.append(self.v6) - s.append(self.v6) - self.assertTrue(len(s) == 1) - self.assertEqual(s[0], self.v6) - - def test_prepend_combine(self): - s = RangeList() - s.append(self.v4) - s.append(self.v1) - self.assertTrue(len(s) == 1) - self.assertEqual(s[0], TestRangeList.MinMax(1, 4)) - - def test_append_aggregate(self): - s = RangeList() - s.append(self.v1) - s.append(self.v2) - s.append(self.v3) - s.append(self.v6) - self.assertTrue(len(s) == 1) - self.assertEqual(s[0], self.v6) - - def test_diff_empty(self): - s = RangeList() - s.append(self.v1) - self.assertEqual(s, s.difference([])) - - def test_diff_self(self): - s = RangeList() - s.append(self.v1) - self.assertEqual(s.difference(s), []) - - def test_diff_middle(self): - s1 = RangeList([self.v6]) - s2 = RangeList([self.v3]) - self.assertEqual(s1.difference(s2), RangeList([TestRangeList.MinMax(1, 6), TestRangeList.MinMax(9, 10)])) - - def test_diff_overlap(self): - s1 = RangeList([self.v2]) - s2 = RangeList([self.v4]) - self.assertEqual(s1.difference(s2), RangeList([TestRangeList.MinMax(5, 5)])) - - def test_diff_overlap2(self): - s1 = RangeList([self.v2]) - s2 = RangeList([self.v4]) - self.assertEqual(s2.difference(s1), RangeList([TestRangeList.MinMax(3, 3)])) - - def test_diff_multi(self): - s1 = RangeList([TestRangeList.MinMax(1, 2), TestRangeList.MinMax(4, 5)]) - s2 = RangeList([TestRangeList.MinMax(4, 4)]) - self.assertEqual(s1.difference(s2), RangeList([TestRangeList.MinMax(1, 2), TestRangeList.MinMax(5, 5)])) - - def test_diff_multi_overlap(self): - s1 = RangeList([TestRangeList.MinMax(1, 2), TestRangeList.MinMax(3, 4)]) - s2 = RangeList([TestRangeList.MinMax(2, 3)]) - self.assertEqual(s1.difference(s2), RangeList([TestRangeList.MinMax(1,1), TestRangeList.MinMax(4,4)])) - - def test_diff_multi_overlap2(self): - s1 = RangeList([TestRangeList.MinMax(1,2), TestRangeList.MinMax(3,4), TestRangeList.MinMax(6,7)]) - s2 = RangeList([TestRangeList.MinMax(2, 3), TestRangeList.MinMax(6, 6)]) - self.assertEqual(s1.difference(s2), RangeList([TestRangeList.MinMax(1,1), TestRangeList.MinMax(4,4), TestRangeList.MinMax(7,7)])) - -if __name__ == '__main__': - unittest.main() diff --git a/rpkid/rpki/gui/app/static/css/bootstrap.min.css b/rpkid/rpki/gui/app/static/css/bootstrap.min.css deleted file mode 100644 index c10c7f41..00000000 --- a/rpkid/rpki/gui/app/static/css/bootstrap.min.css +++ /dev/null @@ -1,9 +0,0 @@ -/*! - * Bootstrap v2.3.1 - * - * Copyright 2012 Twitter, Inc - * Licensed under the Apache License v2.0 - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Designed and built with all the love in the world @twitter by @mdo and @fat. - */.clearfix{*zoom:1}.clearfix:before,.clearfix:after{display:table;line-height:0;content:""}.clearfix:after{clear:both}.hide-text{font:0/0 a;color:transparent;text-shadow:none;background-color:transparent;border:0}.input-block-level{display:block;width:100%;min-height:30px;-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}article,aside,details,figcaption,figure,footer,header,hgroup,nav,section{display:block}audio,canvas,video{display:inline-block;*display:inline;*zoom:1}audio:not([controls]){display:none}html{font-size:100%;-webkit-text-size-adjust:100%;-ms-text-size-adjust:100%}a:focus{outline:thin dotted #333;outline:5px auto -webkit-focus-ring-color;outline-offset:-2px}a:hover,a:active{outline:0}sub,sup{position:relative;font-size:75%;line-height:0;vertical-align:baseline}sup{top:-0.5em}sub{bottom:-0.25em}img{width:auto\9;height:auto;max-width:100%;vertical-align:middle;border:0;-ms-interpolation-mode:bicubic}#map_canvas img,.google-maps img{max-width:none}button,input,select,textarea{margin:0;font-size:100%;vertical-align:middle}button,input{*overflow:visible;line-height:normal}button::-moz-focus-inner,input::-moz-focus-inner{padding:0;border:0}button,html input[type="button"],input[type="reset"],input[type="submit"]{cursor:pointer;-webkit-appearance:button}label,select,button,input[type="button"],input[type="reset"],input[type="submit"],input[type="radio"],input[type="checkbox"]{cursor:pointer}input[type="search"]{-webkit-box-sizing:content-box;-moz-box-sizing:content-box;box-sizing:content-box;-webkit-appearance:textfield}input[type="search"]::-webkit-search-decoration,input[type="search"]::-webkit-search-cancel-button{-webkit-appearance:none}textarea{overflow:auto;vertical-align:top}@media print{*{color:#000!important;text-shadow:none!important;background:transparent!important;box-shadow:none!important}a,a:visited{text-decoration:underline}a[href]:after{content:" (" attr(href) ")"}abbr[title]:after{content:" (" attr(title) ")"}.ir a:after,a[href^="javascript:"]:after,a[href^="#"]:after{content:""}pre,blockquote{border:1px solid #999;page-break-inside:avoid}thead{display:table-header-group}tr,img{page-break-inside:avoid}img{max-width:100%!important}@page{margin:.5cm}p,h2,h3{orphans:3;widows:3}h2,h3{page-break-after:avoid}}body{margin:0;font-family:"Helvetica Neue",Helvetica,Arial,sans-serif;font-size:14px;line-height:20px;color:#333;background-color:#fff}a{color:#08c;text-decoration:none}a:hover,a:focus{color:#005580;text-decoration:underline}.img-rounded{-webkit-border-radius:6px;-moz-border-radius:6px;border-radius:6px}.img-polaroid{padding:4px;background-color:#fff;border:1px solid #ccc;border:1px solid rgba(0,0,0,0.2);-webkit-box-shadow:0 1px 3px rgba(0,0,0,0.1);-moz-box-shadow:0 1px 3px rgba(0,0,0,0.1);box-shadow:0 1px 3px rgba(0,0,0,0.1)}.img-circle{-webkit-border-radius:500px;-moz-border-radius:500px;border-radius:500px}.row{margin-left:-20px;*zoom:1}.row:before,.row:after{display:table;line-height:0;content:""}.row:after{clear:both}[class*="span"]{float:left;min-height:1px;margin-left:20px}.container,.navbar-static-top .container,.navbar-fixed-top .container,.navbar-fixed-bottom .container{width:940px}.span12{width:940px}.span11{width:860px}.span10{width:780px}.span9{width:700px}.span8{width:620px}.span7{width:540px}.span6{width:460px}.span5{width:380px}.span4{width:300px}.span3{width:220px}.span2{width:140px}.span1{width:60px}.offset12{margin-left:980px}.offset11{margin-left:900px}.offset10{margin-left:820px}.offset9{margin-left:740px}.offset8{margin-left:660px}.offset7{margin-left:580px}.offset6{margin-left:500px}.offset5{margin-left:420px}.offset4{margin-left:340px}.offset3{margin-left:260px}.offset2{margin-left:180px}.offset1{margin-left:100px}.row-fluid{width:100%;*zoom:1}.row-fluid:before,.row-fluid:after{display:table;line-height:0;content:""}.row-fluid:after{clear:both}.row-fluid [class*="span"]{display:block;float:left;width:100%;min-height:30px;margin-left:2.127659574468085%;*margin-left:2.074468085106383%;-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}.row-fluid [class*="span"]:first-child{margin-left:0}.row-fluid .controls-row [class*="span"]+[class*="span"]{margin-left:2.127659574468085%}.row-fluid .span12{width:100%;*width:99.94680851063829%}.row-fluid .span11{width:91.48936170212765%;*width:91.43617021276594%}.row-fluid .span10{width:82.97872340425532%;*width:82.92553191489361%}.row-fluid .span9{width:74.46808510638297%;*width:74.41489361702126%}.row-fluid .span8{width:65.95744680851064%;*width:65.90425531914893%}.row-fluid .span7{width:57.44680851063829%;*width:57.39361702127659%}.row-fluid .span6{width:48.93617021276595%;*width:48.88297872340425%}.row-fluid .span5{width:40.42553191489362%;*width:40.37234042553192%}.row-fluid .span4{width:31.914893617021278%;*width:31.861702127659576%}.row-fluid .span3{width:23.404255319148934%;*width:23.351063829787233%}.row-fluid .span2{width:14.893617021276595%;*width:14.840425531914894%}.row-fluid .span1{width:6.382978723404255%;*width:6.329787234042553%}.row-fluid .offset12{margin-left:104.25531914893617%;*margin-left:104.14893617021275%}.row-fluid .offset12:first-child{margin-left:102.12765957446808%;*margin-left:102.02127659574467%}.row-fluid .offset11{margin-left:95.74468085106382%;*margin-left:95.6382978723404%}.row-fluid .offset11:first-child{margin-left:93.61702127659574%;*margin-left:93.51063829787232%}.row-fluid .offset10{margin-left:87.23404255319149%;*margin-left:87.12765957446807%}.row-fluid .offset10:first-child{margin-left:85.1063829787234%;*margin-left:84.99999999999999%}.row-fluid .offset9{margin-left:78.72340425531914%;*margin-left:78.61702127659572%}.row-fluid .offset9:first-child{margin-left:76.59574468085106%;*margin-left:76.48936170212764%}.row-fluid .offset8{margin-left:70.2127659574468%;*margin-left:70.10638297872339%}.row-fluid .offset8:first-child{margin-left:68.08510638297872%;*margin-left:67.9787234042553%}.row-fluid .offset7{margin-left:61.70212765957446%;*margin-left:61.59574468085106%}.row-fluid .offset7:first-child{margin-left:59.574468085106375%;*margin-left:59.46808510638297%}.row-fluid .offset6{margin-left:53.191489361702125%;*margin-left:53.085106382978715%}.row-fluid .offset6:first-child{margin-left:51.063829787234035%;*margin-left:50.95744680851063%}.row-fluid .offset5{margin-left:44.68085106382979%;*margin-left:44.57446808510638%}.row-fluid .offset5:first-child{margin-left:42.5531914893617%;*margin-left:42.4468085106383%}.row-fluid .offset4{margin-left:36.170212765957444%;*margin-left:36.06382978723405%}.row-fluid .offset4:first-child{margin-left:34.04255319148936%;*margin-left:33.93617021276596%}.row-fluid .offset3{margin-left:27.659574468085104%;*margin-left:27.5531914893617%}.row-fluid .offset3:first-child{margin-left:25.53191489361702%;*margin-left:25.425531914893618%}.row-fluid .offset2{margin-left:19.148936170212764%;*margin-left:19.04255319148936%}.row-fluid .offset2:first-child{margin-left:17.02127659574468%;*margin-left:16.914893617021278%}.row-fluid .offset1{margin-left:10.638297872340425%;*margin-left:10.53191489361702%}.row-fluid .offset1:first-child{margin-left:8.51063829787234%;*margin-left:8.404255319148938%}[class*="span"].hide,.row-fluid [class*="span"].hide{display:none}[class*="span"].pull-right,.row-fluid [class*="span"].pull-right{float:right}.container{margin-right:auto;margin-left:auto;*zoom:1}.container:before,.container:after{display:table;line-height:0;content:""}.container:after{clear:both}.container-fluid{padding-right:20px;padding-left:20px;*zoom:1}.container-fluid:before,.container-fluid:after{display:table;line-height:0;content:""}.container-fluid:after{clear:both}p{margin:0 0 10px}.lead{margin-bottom:20px;font-size:21px;font-weight:200;line-height:30px}small{font-size:85%}strong{font-weight:bold}em{font-style:italic}cite{font-style:normal}.muted{color:#999}a.muted:hover,a.muted:focus{color:#808080}.text-warning{color:#c09853}a.text-warning:hover,a.text-warning:focus{color:#a47e3c}.text-error{color:#b94a48}a.text-error:hover,a.text-error:focus{color:#953b39}.text-info{color:#3a87ad}a.text-info:hover,a.text-info:focus{color:#2d6987}.text-success{color:#468847}a.text-success:hover,a.text-success:focus{color:#356635}.text-left{text-align:left}.text-right{text-align:right}.text-center{text-align:center}h1,h2,h3,h4,h5,h6{margin:10px 0;font-family:inherit;font-weight:bold;line-height:20px;color:inherit;text-rendering:optimizelegibility}h1 small,h2 small,h3 small,h4 small,h5 small,h6 small{font-weight:normal;line-height:1;color:#999}h1,h2,h3{line-height:40px}h1{font-size:38.5px}h2{font-size:31.5px}h3{font-size:24.5px}h4{font-size:17.5px}h5{font-size:14px}h6{font-size:11.9px}h1 small{font-size:24.5px}h2 small{font-size:17.5px}h3 small{font-size:14px}h4 small{font-size:14px}.page-header{padding-bottom:9px;margin:20px 0 30px;border-bottom:1px solid #eee}ul,ol{padding:0;margin:0 0 10px 25px}ul ul,ul ol,ol ol,ol ul{margin-bottom:0}li{line-height:20px}ul.unstyled,ol.unstyled{margin-left:0;list-style:none}ul.inline,ol.inline{margin-left:0;list-style:none}ul.inline>li,ol.inline>li{display:inline-block;*display:inline;padding-right:5px;padding-left:5px;*zoom:1}dl{margin-bottom:20px}dt,dd{line-height:20px}dt{font-weight:bold}dd{margin-left:10px}.dl-horizontal{*zoom:1}.dl-horizontal:before,.dl-horizontal:after{display:table;line-height:0;content:""}.dl-horizontal:after{clear:both}.dl-horizontal dt{float:left;width:160px;overflow:hidden;clear:left;text-align:right;text-overflow:ellipsis;white-space:nowrap}.dl-horizontal dd{margin-left:180px}hr{margin:20px 0;border:0;border-top:1px solid #eee;border-bottom:1px solid #fff}abbr[title],abbr[data-original-title]{cursor:help;border-bottom:1px dotted #999}abbr.initialism{font-size:90%;text-transform:uppercase}blockquote{padding:0 0 0 15px;margin:0 0 20px;border-left:5px solid #eee}blockquote p{margin-bottom:0;font-size:17.5px;font-weight:300;line-height:1.25}blockquote small{display:block;line-height:20px;color:#999}blockquote small:before{content:'\2014 \00A0'}blockquote.pull-right{float:right;padding-right:15px;padding-left:0;border-right:5px solid #eee;border-left:0}blockquote.pull-right p,blockquote.pull-right small{text-align:right}blockquote.pull-right small:before{content:''}blockquote.pull-right small:after{content:'\00A0 \2014'}q:before,q:after,blockquote:before,blockquote:after{content:""}address{display:block;margin-bottom:20px;font-style:normal;line-height:20px}code,pre{padding:0 3px 2px;font-family:Monaco,Menlo,Consolas,"Courier New",monospace;font-size:12px;color:#333;-webkit-border-radius:3px;-moz-border-radius:3px;border-radius:3px}code{padding:2px 4px;color:#d14;white-space:nowrap;background-color:#f7f7f9;border:1px solid #e1e1e8}pre{display:block;padding:9.5px;margin:0 0 10px;font-size:13px;line-height:20px;word-break:break-all;word-wrap:break-word;white-space:pre;white-space:pre-wrap;background-color:#f5f5f5;border:1px solid #ccc;border:1px solid rgba(0,0,0,0.15);-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px}pre.prettyprint{margin-bottom:20px}pre code{padding:0;color:inherit;white-space:pre;white-space:pre-wrap;background-color:transparent;border:0}.pre-scrollable{max-height:340px;overflow-y:scroll}form{margin:0 0 20px}fieldset{padding:0;margin:0;border:0}legend{display:block;width:100%;padding:0;margin-bottom:20px;font-size:21px;line-height:40px;color:#333;border:0;border-bottom:1px solid #e5e5e5}legend small{font-size:15px;color:#999}label,input,button,select,textarea{font-size:14px;font-weight:normal;line-height:20px}input,button,select,textarea{font-family:"Helvetica Neue",Helvetica,Arial,sans-serif}label{display:block;margin-bottom:5px}select,textarea,input[type="text"],input[type="password"],input[type="datetime"],input[type="datetime-local"],input[type="date"],input[type="month"],input[type="time"],input[type="week"],input[type="number"],input[type="email"],input[type="url"],input[type="search"],input[type="tel"],input[type="color"],.uneditable-input{display:inline-block;height:20px;padding:4px 6px;margin-bottom:10px;font-size:14px;line-height:20px;color:#555;vertical-align:middle;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px}input,textarea,.uneditable-input{width:206px}textarea{height:auto}textarea,input[type="text"],input[type="password"],input[type="datetime"],input[type="datetime-local"],input[type="date"],input[type="month"],input[type="time"],input[type="week"],input[type="number"],input[type="email"],input[type="url"],input[type="search"],input[type="tel"],input[type="color"],.uneditable-input{background-color:#fff;border:1px solid #ccc;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);-moz-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);-webkit-transition:border linear .2s,box-shadow linear .2s;-moz-transition:border linear .2s,box-shadow linear .2s;-o-transition:border linear .2s,box-shadow linear .2s;transition:border linear .2s,box-shadow linear .2s}textarea:focus,input[type="text"]:focus,input[type="password"]:focus,input[type="datetime"]:focus,input[type="datetime-local"]:focus,input[type="date"]:focus,input[type="month"]:focus,input[type="time"]:focus,input[type="week"]:focus,input[type="number"]:focus,input[type="email"]:focus,input[type="url"]:focus,input[type="search"]:focus,input[type="tel"]:focus,input[type="color"]:focus,.uneditable-input:focus{border-color:rgba(82,168,236,0.8);outline:0;outline:thin dotted \9;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 8px rgba(82,168,236,0.6);-moz-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 8px rgba(82,168,236,0.6);box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 8px rgba(82,168,236,0.6)}input[type="radio"],input[type="checkbox"]{margin:4px 0 0;margin-top:1px \9;*margin-top:0;line-height:normal}input[type="file"],input[type="image"],input[type="submit"],input[type="reset"],input[type="button"],input[type="radio"],input[type="checkbox"]{width:auto}select,input[type="file"]{height:30px;*margin-top:4px;line-height:30px}select{width:220px;background-color:#fff;border:1px solid #ccc}select[multiple],select[size]{height:auto}select:focus,input[type="file"]:focus,input[type="radio"]:focus,input[type="checkbox"]:focus{outline:thin dotted #333;outline:5px auto -webkit-focus-ring-color;outline-offset:-2px}.uneditable-input,.uneditable-textarea{color:#999;cursor:not-allowed;background-color:#fcfcfc;border-color:#ccc;-webkit-box-shadow:inset 0 1px 2px rgba(0,0,0,0.025);-moz-box-shadow:inset 0 1px 2px rgba(0,0,0,0.025);box-shadow:inset 0 1px 2px rgba(0,0,0,0.025)}.uneditable-input{overflow:hidden;white-space:nowrap}.uneditable-textarea{width:auto;height:auto}input:-moz-placeholder,textarea:-moz-placeholder{color:#999}input:-ms-input-placeholder,textarea:-ms-input-placeholder{color:#999}input::-webkit-input-placeholder,textarea::-webkit-input-placeholder{color:#999}.radio,.checkbox{min-height:20px;padding-left:20px}.radio input[type="radio"],.checkbox input[type="checkbox"]{float:left;margin-left:-20px}.controls>.radio:first-child,.controls>.checkbox:first-child{padding-top:5px}.radio.inline,.checkbox.inline{display:inline-block;padding-top:5px;margin-bottom:0;vertical-align:middle}.radio.inline+.radio.inline,.checkbox.inline+.checkbox.inline{margin-left:10px}.input-mini{width:60px}.input-small{width:90px}.input-medium{width:150px}.input-large{width:210px}.input-xlarge{width:270px}.input-xxlarge{width:530px}input[class*="span"],select[class*="span"],textarea[class*="span"],.uneditable-input[class*="span"],.row-fluid input[class*="span"],.row-fluid select[class*="span"],.row-fluid textarea[class*="span"],.row-fluid .uneditable-input[class*="span"]{float:none;margin-left:0}.input-append input[class*="span"],.input-append .uneditable-input[class*="span"],.input-prepend input[class*="span"],.input-prepend .uneditable-input[class*="span"],.row-fluid input[class*="span"],.row-fluid select[class*="span"],.row-fluid textarea[class*="span"],.row-fluid .uneditable-input[class*="span"],.row-fluid .input-prepend [class*="span"],.row-fluid .input-append [class*="span"]{display:inline-block}input,textarea,.uneditable-input{margin-left:0}.controls-row [class*="span"]+[class*="span"]{margin-left:20px}input.span12,textarea.span12,.uneditable-input.span12{width:926px}input.span11,textarea.span11,.uneditable-input.span11{width:846px}input.span10,textarea.span10,.uneditable-input.span10{width:766px}input.span9,textarea.span9,.uneditable-input.span9{width:686px}input.span8,textarea.span8,.uneditable-input.span8{width:606px}input.span7,textarea.span7,.uneditable-input.span7{width:526px}input.span6,textarea.span6,.uneditable-input.span6{width:446px}input.span5,textarea.span5,.uneditable-input.span5{width:366px}input.span4,textarea.span4,.uneditable-input.span4{width:286px}input.span3,textarea.span3,.uneditable-input.span3{width:206px}input.span2,textarea.span2,.uneditable-input.span2{width:126px}input.span1,textarea.span1,.uneditable-input.span1{width:46px}.controls-row{*zoom:1}.controls-row:before,.controls-row:after{display:table;line-height:0;content:""}.controls-row:after{clear:both}.controls-row [class*="span"],.row-fluid .controls-row [class*="span"]{float:left}.controls-row .checkbox[class*="span"],.controls-row .radio[class*="span"]{padding-top:5px}input[disabled],select[disabled],textarea[disabled],input[readonly],select[readonly],textarea[readonly]{cursor:not-allowed;background-color:#eee}input[type="radio"][disabled],input[type="checkbox"][disabled],input[type="radio"][readonly],input[type="checkbox"][readonly]{background-color:transparent}.control-group.warning .control-label,.control-group.warning .help-block,.control-group.warning .help-inline{color:#c09853}.control-group.warning .checkbox,.control-group.warning .radio,.control-group.warning input,.control-group.warning select,.control-group.warning textarea{color:#c09853}.control-group.warning input,.control-group.warning select,.control-group.warning textarea{border-color:#c09853;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);-moz-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);box-shadow:inset 0 1px 1px rgba(0,0,0,0.075)}.control-group.warning input:focus,.control-group.warning select:focus,.control-group.warning textarea:focus{border-color:#a47e3c;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #dbc59e;-moz-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #dbc59e;box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #dbc59e}.control-group.warning .input-prepend .add-on,.control-group.warning .input-append .add-on{color:#c09853;background-color:#fcf8e3;border-color:#c09853}.control-group.error .control-label,.control-group.error .help-block,.control-group.error .help-inline{color:#b94a48}.control-group.error .checkbox,.control-group.error .radio,.control-group.error input,.control-group.error select,.control-group.error textarea{color:#b94a48}.control-group.error input,.control-group.error select,.control-group.error textarea{border-color:#b94a48;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);-moz-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);box-shadow:inset 0 1px 1px rgba(0,0,0,0.075)}.control-group.error input:focus,.control-group.error select:focus,.control-group.error textarea:focus{border-color:#953b39;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #d59392;-moz-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #d59392;box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #d59392}.control-group.error .input-prepend .add-on,.control-group.error .input-append .add-on{color:#b94a48;background-color:#f2dede;border-color:#b94a48}.control-group.success .control-label,.control-group.success .help-block,.control-group.success .help-inline{color:#468847}.control-group.success .checkbox,.control-group.success .radio,.control-group.success input,.control-group.success select,.control-group.success textarea{color:#468847}.control-group.success input,.control-group.success select,.control-group.success textarea{border-color:#468847;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);-moz-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);box-shadow:inset 0 1px 1px rgba(0,0,0,0.075)}.control-group.success input:focus,.control-group.success select:focus,.control-group.success textarea:focus{border-color:#356635;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #7aba7b;-moz-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #7aba7b;box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #7aba7b}.control-group.success .input-prepend .add-on,.control-group.success .input-append .add-on{color:#468847;background-color:#dff0d8;border-color:#468847}.control-group.info .control-label,.control-group.info .help-block,.control-group.info .help-inline{color:#3a87ad}.control-group.info .checkbox,.control-group.info .radio,.control-group.info input,.control-group.info select,.control-group.info textarea{color:#3a87ad}.control-group.info input,.control-group.info select,.control-group.info textarea{border-color:#3a87ad;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);-moz-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);box-shadow:inset 0 1px 1px rgba(0,0,0,0.075)}.control-group.info input:focus,.control-group.info select:focus,.control-group.info textarea:focus{border-color:#2d6987;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #7ab5d3;-moz-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #7ab5d3;box-shadow:inset 0 1px 1px rgba(0,0,0,0.075),0 0 6px #7ab5d3}.control-group.info .input-prepend .add-on,.control-group.info .input-append .add-on{color:#3a87ad;background-color:#d9edf7;border-color:#3a87ad}input:focus:invalid,textarea:focus:invalid,select:focus:invalid{color:#b94a48;border-color:#ee5f5b}input:focus:invalid:focus,textarea:focus:invalid:focus,select:focus:invalid:focus{border-color:#e9322d;-webkit-box-shadow:0 0 6px #f8b9b7;-moz-box-shadow:0 0 6px #f8b9b7;box-shadow:0 0 6px #f8b9b7}.form-actions{padding:19px 20px 20px;margin-top:20px;margin-bottom:20px;background-color:#f5f5f5;border-top:1px solid #e5e5e5;*zoom:1}.form-actions:before,.form-actions:after{display:table;line-height:0;content:""}.form-actions:after{clear:both}.help-block,.help-inline{color:#595959}.help-block{display:block;margin-bottom:10px}.help-inline{display:inline-block;*display:inline;padding-left:5px;vertical-align:middle;*zoom:1}.input-append,.input-prepend{display:inline-block;margin-bottom:10px;font-size:0;white-space:nowrap;vertical-align:middle}.input-append input,.input-prepend input,.input-append select,.input-prepend select,.input-append .uneditable-input,.input-prepend .uneditable-input,.input-append .dropdown-menu,.input-prepend .dropdown-menu,.input-append .popover,.input-prepend .popover{font-size:14px}.input-append input,.input-prepend input,.input-append select,.input-prepend select,.input-append .uneditable-input,.input-prepend .uneditable-input{position:relative;margin-bottom:0;*margin-left:0;vertical-align:top;-webkit-border-radius:0 4px 4px 0;-moz-border-radius:0 4px 4px 0;border-radius:0 4px 4px 0}.input-append input:focus,.input-prepend input:focus,.input-append select:focus,.input-prepend select:focus,.input-append .uneditable-input:focus,.input-prepend .uneditable-input:focus{z-index:2}.input-append .add-on,.input-prepend .add-on{display:inline-block;width:auto;height:20px;min-width:16px;padding:4px 5px;font-size:14px;font-weight:normal;line-height:20px;text-align:center;text-shadow:0 1px 0 #fff;background-color:#eee;border:1px solid #ccc}.input-append .add-on,.input-prepend .add-on,.input-append .btn,.input-prepend .btn,.input-append .btn-group>.dropdown-toggle,.input-prepend .btn-group>.dropdown-toggle{vertical-align:top;-webkit-border-radius:0;-moz-border-radius:0;border-radius:0}.input-append .active,.input-prepend .active{background-color:#a9dba9;border-color:#46a546}.input-prepend .add-on,.input-prepend .btn{margin-right:-1px}.input-prepend .add-on:first-child,.input-prepend .btn:first-child{-webkit-border-radius:4px 0 0 4px;-moz-border-radius:4px 0 0 4px;border-radius:4px 0 0 4px}.input-append input,.input-append select,.input-append .uneditable-input{-webkit-border-radius:4px 0 0 4px;-moz-border-radius:4px 0 0 4px;border-radius:4px 0 0 4px}.input-append input+.btn-group .btn:last-child,.input-append select+.btn-group .btn:last-child,.input-append .uneditable-input+.btn-group .btn:last-child{-webkit-border-radius:0 4px 4px 0;-moz-border-radius:0 4px 4px 0;border-radius:0 4px 4px 0}.input-append .add-on,.input-append .btn,.input-append .btn-group{margin-left:-1px}.input-append .add-on:last-child,.input-append .btn:last-child,.input-append .btn-group:last-child>.dropdown-toggle{-webkit-border-radius:0 4px 4px 0;-moz-border-radius:0 4px 4px 0;border-radius:0 4px 4px 0}.input-prepend.input-append input,.input-prepend.input-append select,.input-prepend.input-append .uneditable-input{-webkit-border-radius:0;-moz-border-radius:0;border-radius:0}.input-prepend.input-append input+.btn-group .btn,.input-prepend.input-append select+.btn-group .btn,.input-prepend.input-append .uneditable-input+.btn-group .btn{-webkit-border-radius:0 4px 4px 0;-moz-border-radius:0 4px 4px 0;border-radius:0 4px 4px 0}.input-prepend.input-append .add-on:first-child,.input-prepend.input-append .btn:first-child{margin-right:-1px;-webkit-border-radius:4px 0 0 4px;-moz-border-radius:4px 0 0 4px;border-radius:4px 0 0 4px}.input-prepend.input-append .add-on:last-child,.input-prepend.input-append .btn:last-child{margin-left:-1px;-webkit-border-radius:0 4px 4px 0;-moz-border-radius:0 4px 4px 0;border-radius:0 4px 4px 0}.input-prepend.input-append .btn-group:first-child{margin-left:0}input.search-query{padding-right:14px;padding-right:4px \9;padding-left:14px;padding-left:4px \9;margin-bottom:0;-webkit-border-radius:15px;-moz-border-radius:15px;border-radius:15px}.form-search .input-append .search-query,.form-search .input-prepend .search-query{-webkit-border-radius:0;-moz-border-radius:0;border-radius:0}.form-search .input-append .search-query{-webkit-border-radius:14px 0 0 14px;-moz-border-radius:14px 0 0 14px;border-radius:14px 0 0 14px}.form-search .input-append .btn{-webkit-border-radius:0 14px 14px 0;-moz-border-radius:0 14px 14px 0;border-radius:0 14px 14px 0}.form-search .input-prepend .search-query{-webkit-border-radius:0 14px 14px 0;-moz-border-radius:0 14px 14px 0;border-radius:0 14px 14px 0}.form-search .input-prepend .btn{-webkit-border-radius:14px 0 0 14px;-moz-border-radius:14px 0 0 14px;border-radius:14px 0 0 14px}.form-search input,.form-inline input,.form-horizontal input,.form-search textarea,.form-inline textarea,.form-horizontal textarea,.form-search select,.form-inline select,.form-horizontal select,.form-search .help-inline,.form-inline .help-inline,.form-horizontal .help-inline,.form-search .uneditable-input,.form-inline .uneditable-input,.form-horizontal .uneditable-input,.form-search .input-prepend,.form-inline .input-prepend,.form-horizontal .input-prepend,.form-search .input-append,.form-inline .input-append,.form-horizontal .input-append{display:inline-block;*display:inline;margin-bottom:0;vertical-align:middle;*zoom:1}.form-search .hide,.form-inline .hide,.form-horizontal .hide{display:none}.form-search label,.form-inline label,.form-search .btn-group,.form-inline .btn-group{display:inline-block}.form-search .input-append,.form-inline .input-append,.form-search .input-prepend,.form-inline .input-prepend{margin-bottom:0}.form-search .radio,.form-search .checkbox,.form-inline .radio,.form-inline .checkbox{padding-left:0;margin-bottom:0;vertical-align:middle}.form-search .radio input[type="radio"],.form-search .checkbox input[type="checkbox"],.form-inline .radio input[type="radio"],.form-inline .checkbox input[type="checkbox"]{float:left;margin-right:3px;margin-left:0}.control-group{margin-bottom:10px}legend+.control-group{margin-top:20px;-webkit-margin-top-collapse:separate}.form-horizontal .control-group{margin-bottom:20px;*zoom:1}.form-horizontal .control-group:before,.form-horizontal .control-group:after{display:table;line-height:0;content:""}.form-horizontal .control-group:after{clear:both}.form-horizontal .control-label{float:left;width:160px;padding-top:5px;text-align:right}.form-horizontal .controls{*display:inline-block;*padding-left:20px;margin-left:180px;*margin-left:0}.form-horizontal .controls:first-child{*padding-left:180px}.form-horizontal .help-block{margin-bottom:0}.form-horizontal input+.help-block,.form-horizontal select+.help-block,.form-horizontal textarea+.help-block,.form-horizontal .uneditable-input+.help-block,.form-horizontal .input-prepend+.help-block,.form-horizontal .input-append+.help-block{margin-top:10px}.form-horizontal .form-actions{padding-left:180px}table{max-width:100%;background-color:transparent;border-collapse:collapse;border-spacing:0}.table{width:100%;margin-bottom:20px}.table th,.table td{padding:8px;line-height:20px;text-align:left;vertical-align:top;border-top:1px solid #ddd}.table th{font-weight:bold}.table thead th{vertical-align:bottom}.table caption+thead tr:first-child th,.table caption+thead tr:first-child td,.table colgroup+thead tr:first-child th,.table colgroup+thead tr:first-child td,.table thead:first-child tr:first-child th,.table thead:first-child tr:first-child td{border-top:0}.table tbody+tbody{border-top:2px solid #ddd}.table .table{background-color:#fff}.table-condensed th,.table-condensed td{padding:4px 5px}.table-bordered{border:1px solid #ddd;border-collapse:separate;*border-collapse:collapse;border-left:0;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px}.table-bordered th,.table-bordered td{border-left:1px solid #ddd}.table-bordered caption+thead tr:first-child th,.table-bordered caption+tbody tr:first-child th,.table-bordered caption+tbody tr:first-child td,.table-bordered colgroup+thead tr:first-child th,.table-bordered colgroup+tbody tr:first-child th,.table-bordered colgroup+tbody tr:first-child td,.table-bordered thead:first-child tr:first-child th,.table-bordered tbody:first-child tr:first-child th,.table-bordered tbody:first-child tr:first-child td{border-top:0}.table-bordered thead:first-child tr:first-child>th:first-child,.table-bordered tbody:first-child tr:first-child>td:first-child,.table-bordered tbody:first-child tr:first-child>th:first-child{-webkit-border-top-left-radius:4px;border-top-left-radius:4px;-moz-border-radius-topleft:4px}.table-bordered thead:first-child tr:first-child>th:last-child,.table-bordered tbody:first-child tr:first-child>td:last-child,.table-bordered tbody:first-child tr:first-child>th:last-child{-webkit-border-top-right-radius:4px;border-top-right-radius:4px;-moz-border-radius-topright:4px}.table-bordered thead:last-child tr:last-child>th:first-child,.table-bordered tbody:last-child tr:last-child>td:first-child,.table-bordered tbody:last-child tr:last-child>th:first-child,.table-bordered tfoot:last-child tr:last-child>td:first-child,.table-bordered tfoot:last-child tr:last-child>th:first-child{-webkit-border-bottom-left-radius:4px;border-bottom-left-radius:4px;-moz-border-radius-bottomleft:4px}.table-bordered thead:last-child tr:last-child>th:last-child,.table-bordered tbody:last-child tr:last-child>td:last-child,.table-bordered tbody:last-child tr:last-child>th:last-child,.table-bordered tfoot:last-child tr:last-child>td:last-child,.table-bordered tfoot:last-child tr:last-child>th:last-child{-webkit-border-bottom-right-radius:4px;border-bottom-right-radius:4px;-moz-border-radius-bottomright:4px}.table-bordered tfoot+tbody:last-child tr:last-child td:first-child{-webkit-border-bottom-left-radius:0;border-bottom-left-radius:0;-moz-border-radius-bottomleft:0}.table-bordered tfoot+tbody:last-child tr:last-child td:last-child{-webkit-border-bottom-right-radius:0;border-bottom-right-radius:0;-moz-border-radius-bottomright:0}.table-bordered caption+thead tr:first-child th:first-child,.table-bordered caption+tbody tr:first-child td:first-child,.table-bordered colgroup+thead tr:first-child th:first-child,.table-bordered colgroup+tbody tr:first-child td:first-child{-webkit-border-top-left-radius:4px;border-top-left-radius:4px;-moz-border-radius-topleft:4px}.table-bordered caption+thead tr:first-child th:last-child,.table-bordered caption+tbody tr:first-child td:last-child,.table-bordered colgroup+thead tr:first-child th:last-child,.table-bordered colgroup+tbody tr:first-child td:last-child{-webkit-border-top-right-radius:4px;border-top-right-radius:4px;-moz-border-radius-topright:4px}.table-striped tbody>tr:nth-child(odd)>td,.table-striped tbody>tr:nth-child(odd)>th{background-color:#f9f9f9}.table-hover tbody tr:hover>td,.table-hover tbody tr:hover>th{background-color:#f5f5f5}table td[class*="span"],table th[class*="span"],.row-fluid table td[class*="span"],.row-fluid table th[class*="span"]{display:table-cell;float:none;margin-left:0}.table td.span1,.table th.span1{float:none;width:44px;margin-left:0}.table td.span2,.table th.span2{float:none;width:124px;margin-left:0}.table td.span3,.table th.span3{float:none;width:204px;margin-left:0}.table td.span4,.table th.span4{float:none;width:284px;margin-left:0}.table td.span5,.table th.span5{float:none;width:364px;margin-left:0}.table td.span6,.table th.span6{float:none;width:444px;margin-left:0}.table td.span7,.table th.span7{float:none;width:524px;margin-left:0}.table td.span8,.table th.span8{float:none;width:604px;margin-left:0}.table td.span9,.table th.span9{float:none;width:684px;margin-left:0}.table td.span10,.table th.span10{float:none;width:764px;margin-left:0}.table td.span11,.table th.span11{float:none;width:844px;margin-left:0}.table td.span12,.table th.span12{float:none;width:924px;margin-left:0}.table tbody tr.success>td{background-color:#dff0d8}.table tbody tr.error>td{background-color:#f2dede}.table tbody tr.warning>td{background-color:#fcf8e3}.table tbody tr.info>td{background-color:#d9edf7}.table-hover tbody tr.success:hover>td{background-color:#d0e9c6}.table-hover tbody tr.error:hover>td{background-color:#ebcccc}.table-hover tbody tr.warning:hover>td{background-color:#faf2cc}.table-hover tbody tr.info:hover>td{background-color:#c4e3f3}[class^="icon-"],[class*=" icon-"]{display:inline-block;width:14px;height:14px;margin-top:1px;*margin-right:.3em;line-height:14px;vertical-align:text-top;background-image:url("../img/glyphicons-halflings.png");background-position:14px 14px;background-repeat:no-repeat}.icon-white,.nav-pills>.active>a>[class^="icon-"],.nav-pills>.active>a>[class*=" icon-"],.nav-list>.active>a>[class^="icon-"],.nav-list>.active>a>[class*=" icon-"],.navbar-inverse .nav>.active>a>[class^="icon-"],.navbar-inverse .nav>.active>a>[class*=" icon-"],.dropdown-menu>li>a:hover>[class^="icon-"],.dropdown-menu>li>a:focus>[class^="icon-"],.dropdown-menu>li>a:hover>[class*=" icon-"],.dropdown-menu>li>a:focus>[class*=" icon-"],.dropdown-menu>.active>a>[class^="icon-"],.dropdown-menu>.active>a>[class*=" icon-"],.dropdown-submenu:hover>a>[class^="icon-"],.dropdown-submenu:focus>a>[class^="icon-"],.dropdown-submenu:hover>a>[class*=" icon-"],.dropdown-submenu:focus>a>[class*=" icon-"]{background-image:url("../img/glyphicons-halflings-white.png")}.icon-glass{background-position:0 0}.icon-music{background-position:-24px 0}.icon-search{background-position:-48px 0}.icon-envelope{background-position:-72px 0}.icon-heart{background-position:-96px 0}.icon-star{background-position:-120px 0}.icon-star-empty{background-position:-144px 0}.icon-user{background-position:-168px 0}.icon-film{background-position:-192px 0}.icon-th-large{background-position:-216px 0}.icon-th{background-position:-240px 0}.icon-th-list{background-position:-264px 0}.icon-ok{background-position:-288px 0}.icon-remove{background-position:-312px 0}.icon-zoom-in{background-position:-336px 0}.icon-zoom-out{background-position:-360px 0}.icon-off{background-position:-384px 0}.icon-signal{background-position:-408px 0}.icon-cog{background-position:-432px 0}.icon-trash{background-position:-456px 0}.icon-home{background-position:0 -24px}.icon-file{background-position:-24px -24px}.icon-time{background-position:-48px -24px}.icon-road{background-position:-72px -24px}.icon-download-alt{background-position:-96px -24px}.icon-download{background-position:-120px -24px}.icon-upload{background-position:-144px -24px}.icon-inbox{background-position:-168px -24px}.icon-play-circle{background-position:-192px -24px}.icon-repeat{background-position:-216px -24px}.icon-refresh{background-position:-240px -24px}.icon-list-alt{background-position:-264px -24px}.icon-lock{background-position:-287px -24px}.icon-flag{background-position:-312px -24px}.icon-headphones{background-position:-336px -24px}.icon-volume-off{background-position:-360px -24px}.icon-volume-down{background-position:-384px -24px}.icon-volume-up{background-position:-408px -24px}.icon-qrcode{background-position:-432px -24px}.icon-barcode{background-position:-456px -24px}.icon-tag{background-position:0 -48px}.icon-tags{background-position:-25px -48px}.icon-book{background-position:-48px -48px}.icon-bookmark{background-position:-72px -48px}.icon-print{background-position:-96px -48px}.icon-camera{background-position:-120px -48px}.icon-font{background-position:-144px -48px}.icon-bold{background-position:-167px -48px}.icon-italic{background-position:-192px -48px}.icon-text-height{background-position:-216px -48px}.icon-text-width{background-position:-240px -48px}.icon-align-left{background-position:-264px -48px}.icon-align-center{background-position:-288px -48px}.icon-align-right{background-position:-312px -48px}.icon-align-justify{background-position:-336px -48px}.icon-list{background-position:-360px -48px}.icon-indent-left{background-position:-384px -48px}.icon-indent-right{background-position:-408px -48px}.icon-facetime-video{background-position:-432px -48px}.icon-picture{background-position:-456px -48px}.icon-pencil{background-position:0 -72px}.icon-map-marker{background-position:-24px -72px}.icon-adjust{background-position:-48px -72px}.icon-tint{background-position:-72px -72px}.icon-edit{background-position:-96px -72px}.icon-share{background-position:-120px -72px}.icon-check{background-position:-144px -72px}.icon-move{background-position:-168px -72px}.icon-step-backward{background-position:-192px -72px}.icon-fast-backward{background-position:-216px -72px}.icon-backward{background-position:-240px -72px}.icon-play{background-position:-264px -72px}.icon-pause{background-position:-288px -72px}.icon-stop{background-position:-312px -72px}.icon-forward{background-position:-336px -72px}.icon-fast-forward{background-position:-360px -72px}.icon-step-forward{background-position:-384px -72px}.icon-eject{background-position:-408px -72px}.icon-chevron-left{background-position:-432px -72px}.icon-chevron-right{background-position:-456px -72px}.icon-plus-sign{background-position:0 -96px}.icon-minus-sign{background-position:-24px -96px}.icon-remove-sign{background-position:-48px -96px}.icon-ok-sign{background-position:-72px -96px}.icon-question-sign{background-position:-96px -96px}.icon-info-sign{background-position:-120px -96px}.icon-screenshot{background-position:-144px -96px}.icon-remove-circle{background-position:-168px -96px}.icon-ok-circle{background-position:-192px -96px}.icon-ban-circle{background-position:-216px -96px}.icon-arrow-left{background-position:-240px -96px}.icon-arrow-right{background-position:-264px -96px}.icon-arrow-up{background-position:-289px -96px}.icon-arrow-down{background-position:-312px -96px}.icon-share-alt{background-position:-336px -96px}.icon-resize-full{background-position:-360px -96px}.icon-resize-small{background-position:-384px -96px}.icon-plus{background-position:-408px -96px}.icon-minus{background-position:-433px -96px}.icon-asterisk{background-position:-456px -96px}.icon-exclamation-sign{background-position:0 -120px}.icon-gift{background-position:-24px -120px}.icon-leaf{background-position:-48px -120px}.icon-fire{background-position:-72px -120px}.icon-eye-open{background-position:-96px -120px}.icon-eye-close{background-position:-120px -120px}.icon-warning-sign{background-position:-144px -120px}.icon-plane{background-position:-168px -120px}.icon-calendar{background-position:-192px -120px}.icon-random{width:16px;background-position:-216px -120px}.icon-comment{background-position:-240px -120px}.icon-magnet{background-position:-264px -120px}.icon-chevron-up{background-position:-288px -120px}.icon-chevron-down{background-position:-313px -119px}.icon-retweet{background-position:-336px -120px}.icon-shopping-cart{background-position:-360px -120px}.icon-folder-close{width:16px;background-position:-384px -120px}.icon-folder-open{width:16px;background-position:-408px -120px}.icon-resize-vertical{background-position:-432px -119px}.icon-resize-horizontal{background-position:-456px -118px}.icon-hdd{background-position:0 -144px}.icon-bullhorn{background-position:-24px -144px}.icon-bell{background-position:-48px -144px}.icon-certificate{background-position:-72px -144px}.icon-thumbs-up{background-position:-96px -144px}.icon-thumbs-down{background-position:-120px -144px}.icon-hand-right{background-position:-144px -144px}.icon-hand-left{background-position:-168px -144px}.icon-hand-up{background-position:-192px -144px}.icon-hand-down{background-position:-216px -144px}.icon-circle-arrow-right{background-position:-240px -144px}.icon-circle-arrow-left{background-position:-264px -144px}.icon-circle-arrow-up{background-position:-288px -144px}.icon-circle-arrow-down{background-position:-312px -144px}.icon-globe{background-position:-336px -144px}.icon-wrench{background-position:-360px -144px}.icon-tasks{background-position:-384px -144px}.icon-filter{background-position:-408px -144px}.icon-briefcase{background-position:-432px -144px}.icon-fullscreen{background-position:-456px -144px}.dropup,.dropdown{position:relative}.dropdown-toggle{*margin-bottom:-3px}.dropdown-toggle:active,.open .dropdown-toggle{outline:0}.caret{display:inline-block;width:0;height:0;vertical-align:top;border-top:4px solid #000;border-right:4px solid transparent;border-left:4px solid transparent;content:""}.dropdown .caret{margin-top:8px;margin-left:2px}.dropdown-menu{position:absolute;top:100%;left:0;z-index:1000;display:none;float:left;min-width:160px;padding:5px 0;margin:2px 0 0;list-style:none;background-color:#fff;border:1px solid #ccc;border:1px solid rgba(0,0,0,0.2);*border-right-width:2px;*border-bottom-width:2px;-webkit-border-radius:6px;-moz-border-radius:6px;border-radius:6px;-webkit-box-shadow:0 5px 10px rgba(0,0,0,0.2);-moz-box-shadow:0 5px 10px rgba(0,0,0,0.2);box-shadow:0 5px 10px rgba(0,0,0,0.2);-webkit-background-clip:padding-box;-moz-background-clip:padding;background-clip:padding-box}.dropdown-menu.pull-right{right:0;left:auto}.dropdown-menu .divider{*width:100%;height:1px;margin:9px 1px;*margin:-5px 0 5px;overflow:hidden;background-color:#e5e5e5;border-bottom:1px solid #fff}.dropdown-menu>li>a{display:block;padding:3px 20px;clear:both;font-weight:normal;line-height:20px;color:#333;white-space:nowrap}.dropdown-menu>li>a:hover,.dropdown-menu>li>a:focus,.dropdown-submenu:hover>a,.dropdown-submenu:focus>a{color:#fff;text-decoration:none;background-color:#0081c2;background-image:-moz-linear-gradient(top,#08c,#0077b3);background-image:-webkit-gradient(linear,0 0,0 100%,from(#08c),to(#0077b3));background-image:-webkit-linear-gradient(top,#08c,#0077b3);background-image:-o-linear-gradient(top,#08c,#0077b3);background-image:linear-gradient(to bottom,#08c,#0077b3);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff0088cc',endColorstr='#ff0077b3',GradientType=0)}.dropdown-menu>.active>a,.dropdown-menu>.active>a:hover,.dropdown-menu>.active>a:focus{color:#fff;text-decoration:none;background-color:#0081c2;background-image:-moz-linear-gradient(top,#08c,#0077b3);background-image:-webkit-gradient(linear,0 0,0 100%,from(#08c),to(#0077b3));background-image:-webkit-linear-gradient(top,#08c,#0077b3);background-image:-o-linear-gradient(top,#08c,#0077b3);background-image:linear-gradient(to bottom,#08c,#0077b3);background-repeat:repeat-x;outline:0;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff0088cc',endColorstr='#ff0077b3',GradientType=0)}.dropdown-menu>.disabled>a,.dropdown-menu>.disabled>a:hover,.dropdown-menu>.disabled>a:focus{color:#999}.dropdown-menu>.disabled>a:hover,.dropdown-menu>.disabled>a:focus{text-decoration:none;cursor:default;background-color:transparent;background-image:none;filter:progid:DXImageTransform.Microsoft.gradient(enabled=false)}.open{*z-index:1000}.open>.dropdown-menu{display:block}.pull-right>.dropdown-menu{right:0;left:auto}.dropup .caret,.navbar-fixed-bottom .dropdown .caret{border-top:0;border-bottom:4px solid #000;content:""}.dropup .dropdown-menu,.navbar-fixed-bottom .dropdown .dropdown-menu{top:auto;bottom:100%;margin-bottom:1px}.dropdown-submenu{position:relative}.dropdown-submenu>.dropdown-menu{top:0;left:100%;margin-top:-6px;margin-left:-1px;-webkit-border-radius:0 6px 6px 6px;-moz-border-radius:0 6px 6px 6px;border-radius:0 6px 6px 6px}.dropdown-submenu:hover>.dropdown-menu{display:block}.dropup .dropdown-submenu>.dropdown-menu{top:auto;bottom:0;margin-top:0;margin-bottom:-2px;-webkit-border-radius:5px 5px 5px 0;-moz-border-radius:5px 5px 5px 0;border-radius:5px 5px 5px 0}.dropdown-submenu>a:after{display:block;float:right;width:0;height:0;margin-top:5px;margin-right:-10px;border-color:transparent;border-left-color:#ccc;border-style:solid;border-width:5px 0 5px 5px;content:" "}.dropdown-submenu:hover>a:after{border-left-color:#fff}.dropdown-submenu.pull-left{float:none}.dropdown-submenu.pull-left>.dropdown-menu{left:-100%;margin-left:10px;-webkit-border-radius:6px 0 6px 6px;-moz-border-radius:6px 0 6px 6px;border-radius:6px 0 6px 6px}.dropdown .dropdown-menu .nav-header{padding-right:20px;padding-left:20px}.typeahead{z-index:1051;margin-top:2px;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px}.well{min-height:20px;padding:19px;margin-bottom:20px;background-color:#f5f5f5;border:1px solid #e3e3e3;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.05);-moz-box-shadow:inset 0 1px 1px rgba(0,0,0,0.05);box-shadow:inset 0 1px 1px rgba(0,0,0,0.05)}.well blockquote{border-color:#ddd;border-color:rgba(0,0,0,0.15)}.well-large{padding:24px;-webkit-border-radius:6px;-moz-border-radius:6px;border-radius:6px}.well-small{padding:9px;-webkit-border-radius:3px;-moz-border-radius:3px;border-radius:3px}.fade{opacity:0;-webkit-transition:opacity .15s linear;-moz-transition:opacity .15s linear;-o-transition:opacity .15s linear;transition:opacity .15s linear}.fade.in{opacity:1}.collapse{position:relative;height:0;overflow:hidden;-webkit-transition:height .35s ease;-moz-transition:height .35s ease;-o-transition:height .35s ease;transition:height .35s ease}.collapse.in{height:auto}.close{float:right;font-size:20px;font-weight:bold;line-height:20px;color:#000;text-shadow:0 1px 0 #fff;opacity:.2;filter:alpha(opacity=20)}.close:hover,.close:focus{color:#000;text-decoration:none;cursor:pointer;opacity:.4;filter:alpha(opacity=40)}button.close{padding:0;cursor:pointer;background:transparent;border:0;-webkit-appearance:none}.btn{display:inline-block;*display:inline;padding:4px 12px;margin-bottom:0;*margin-left:.3em;font-size:14px;line-height:20px;color:#333;text-align:center;text-shadow:0 1px 1px rgba(255,255,255,0.75);vertical-align:middle;cursor:pointer;background-color:#f5f5f5;*background-color:#e6e6e6;background-image:-moz-linear-gradient(top,#fff,#e6e6e6);background-image:-webkit-gradient(linear,0 0,0 100%,from(#fff),to(#e6e6e6));background-image:-webkit-linear-gradient(top,#fff,#e6e6e6);background-image:-o-linear-gradient(top,#fff,#e6e6e6);background-image:linear-gradient(to bottom,#fff,#e6e6e6);background-repeat:repeat-x;border:1px solid #ccc;*border:0;border-color:#e6e6e6 #e6e6e6 #bfbfbf;border-color:rgba(0,0,0,0.1) rgba(0,0,0,0.1) rgba(0,0,0,0.25);border-bottom-color:#b3b3b3;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffffffff',endColorstr='#ffe6e6e6',GradientType=0);filter:progid:DXImageTransform.Microsoft.gradient(enabled=false);*zoom:1;-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,0.2),0 1px 2px rgba(0,0,0,0.05);-moz-box-shadow:inset 0 1px 0 rgba(255,255,255,0.2),0 1px 2px rgba(0,0,0,0.05);box-shadow:inset 0 1px 0 rgba(255,255,255,0.2),0 1px 2px rgba(0,0,0,0.05)}.btn:hover,.btn:focus,.btn:active,.btn.active,.btn.disabled,.btn[disabled]{color:#333;background-color:#e6e6e6;*background-color:#d9d9d9}.btn:active,.btn.active{background-color:#ccc \9}.btn:first-child{*margin-left:0}.btn:hover,.btn:focus{color:#333;text-decoration:none;background-position:0 -15px;-webkit-transition:background-position .1s linear;-moz-transition:background-position .1s linear;-o-transition:background-position .1s linear;transition:background-position .1s linear}.btn:focus{outline:thin dotted #333;outline:5px auto -webkit-focus-ring-color;outline-offset:-2px}.btn.active,.btn:active{background-image:none;outline:0;-webkit-box-shadow:inset 0 2px 4px rgba(0,0,0,0.15),0 1px 2px rgba(0,0,0,0.05);-moz-box-shadow:inset 0 2px 4px rgba(0,0,0,0.15),0 1px 2px rgba(0,0,0,0.05);box-shadow:inset 0 2px 4px rgba(0,0,0,0.15),0 1px 2px rgba(0,0,0,0.05)}.btn.disabled,.btn[disabled]{cursor:default;background-image:none;opacity:.65;filter:alpha(opacity=65);-webkit-box-shadow:none;-moz-box-shadow:none;box-shadow:none}.btn-large{padding:11px 19px;font-size:17.5px;-webkit-border-radius:6px;-moz-border-radius:6px;border-radius:6px}.btn-large [class^="icon-"],.btn-large [class*=" icon-"]{margin-top:4px}.btn-small{padding:2px 10px;font-size:11.9px;-webkit-border-radius:3px;-moz-border-radius:3px;border-radius:3px}.btn-small [class^="icon-"],.btn-small [class*=" icon-"]{margin-top:0}.btn-mini [class^="icon-"],.btn-mini [class*=" icon-"]{margin-top:-1px}.btn-mini{padding:0 6px;font-size:10.5px;-webkit-border-radius:3px;-moz-border-radius:3px;border-radius:3px}.btn-block{display:block;width:100%;padding-right:0;padding-left:0;-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}.btn-block+.btn-block{margin-top:5px}input[type="submit"].btn-block,input[type="reset"].btn-block,input[type="button"].btn-block{width:100%}.btn-primary.active,.btn-warning.active,.btn-danger.active,.btn-success.active,.btn-info.active,.btn-inverse.active{color:rgba(255,255,255,0.75)}.btn-primary{color:#fff;text-shadow:0 -1px 0 rgba(0,0,0,0.25);background-color:#006dcc;*background-color:#04c;background-image:-moz-linear-gradient(top,#08c,#04c);background-image:-webkit-gradient(linear,0 0,0 100%,from(#08c),to(#04c));background-image:-webkit-linear-gradient(top,#08c,#04c);background-image:-o-linear-gradient(top,#08c,#04c);background-image:linear-gradient(to bottom,#08c,#04c);background-repeat:repeat-x;border-color:#04c #04c #002a80;border-color:rgba(0,0,0,0.1) rgba(0,0,0,0.1) rgba(0,0,0,0.25);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff0088cc',endColorstr='#ff0044cc',GradientType=0);filter:progid:DXImageTransform.Microsoft.gradient(enabled=false)}.btn-primary:hover,.btn-primary:focus,.btn-primary:active,.btn-primary.active,.btn-primary.disabled,.btn-primary[disabled]{color:#fff;background-color:#04c;*background-color:#003bb3}.btn-primary:active,.btn-primary.active{background-color:#039 \9}.btn-warning{color:#fff;text-shadow:0 -1px 0 rgba(0,0,0,0.25);background-color:#faa732;*background-color:#f89406;background-image:-moz-linear-gradient(top,#fbb450,#f89406);background-image:-webkit-gradient(linear,0 0,0 100%,from(#fbb450),to(#f89406));background-image:-webkit-linear-gradient(top,#fbb450,#f89406);background-image:-o-linear-gradient(top,#fbb450,#f89406);background-image:linear-gradient(to bottom,#fbb450,#f89406);background-repeat:repeat-x;border-color:#f89406 #f89406 #ad6704;border-color:rgba(0,0,0,0.1) rgba(0,0,0,0.1) rgba(0,0,0,0.25);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#fffbb450',endColorstr='#fff89406',GradientType=0);filter:progid:DXImageTransform.Microsoft.gradient(enabled=false)}.btn-warning:hover,.btn-warning:focus,.btn-warning:active,.btn-warning.active,.btn-warning.disabled,.btn-warning[disabled]{color:#fff;background-color:#f89406;*background-color:#df8505}.btn-warning:active,.btn-warning.active{background-color:#c67605 \9}.btn-danger{color:#fff;text-shadow:0 -1px 0 rgba(0,0,0,0.25);background-color:#da4f49;*background-color:#bd362f;background-image:-moz-linear-gradient(top,#ee5f5b,#bd362f);background-image:-webkit-gradient(linear,0 0,0 100%,from(#ee5f5b),to(#bd362f));background-image:-webkit-linear-gradient(top,#ee5f5b,#bd362f);background-image:-o-linear-gradient(top,#ee5f5b,#bd362f);background-image:linear-gradient(to bottom,#ee5f5b,#bd362f);background-repeat:repeat-x;border-color:#bd362f #bd362f #802420;border-color:rgba(0,0,0,0.1) rgba(0,0,0,0.1) rgba(0,0,0,0.25);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffee5f5b',endColorstr='#ffbd362f',GradientType=0);filter:progid:DXImageTransform.Microsoft.gradient(enabled=false)}.btn-danger:hover,.btn-danger:focus,.btn-danger:active,.btn-danger.active,.btn-danger.disabled,.btn-danger[disabled]{color:#fff;background-color:#bd362f;*background-color:#a9302a}.btn-danger:active,.btn-danger.active{background-color:#942a25 \9}.btn-success{color:#fff;text-shadow:0 -1px 0 rgba(0,0,0,0.25);background-color:#5bb75b;*background-color:#51a351;background-image:-moz-linear-gradient(top,#62c462,#51a351);background-image:-webkit-gradient(linear,0 0,0 100%,from(#62c462),to(#51a351));background-image:-webkit-linear-gradient(top,#62c462,#51a351);background-image:-o-linear-gradient(top,#62c462,#51a351);background-image:linear-gradient(to bottom,#62c462,#51a351);background-repeat:repeat-x;border-color:#51a351 #51a351 #387038;border-color:rgba(0,0,0,0.1) rgba(0,0,0,0.1) rgba(0,0,0,0.25);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff62c462',endColorstr='#ff51a351',GradientType=0);filter:progid:DXImageTransform.Microsoft.gradient(enabled=false)}.btn-success:hover,.btn-success:focus,.btn-success:active,.btn-success.active,.btn-success.disabled,.btn-success[disabled]{color:#fff;background-color:#51a351;*background-color:#499249}.btn-success:active,.btn-success.active{background-color:#408140 \9}.btn-info{color:#fff;text-shadow:0 -1px 0 rgba(0,0,0,0.25);background-color:#49afcd;*background-color:#2f96b4;background-image:-moz-linear-gradient(top,#5bc0de,#2f96b4);background-image:-webkit-gradient(linear,0 0,0 100%,from(#5bc0de),to(#2f96b4));background-image:-webkit-linear-gradient(top,#5bc0de,#2f96b4);background-image:-o-linear-gradient(top,#5bc0de,#2f96b4);background-image:linear-gradient(to bottom,#5bc0de,#2f96b4);background-repeat:repeat-x;border-color:#2f96b4 #2f96b4 #1f6377;border-color:rgba(0,0,0,0.1) rgba(0,0,0,0.1) rgba(0,0,0,0.25);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff5bc0de',endColorstr='#ff2f96b4',GradientType=0);filter:progid:DXImageTransform.Microsoft.gradient(enabled=false)}.btn-info:hover,.btn-info:focus,.btn-info:active,.btn-info.active,.btn-info.disabled,.btn-info[disabled]{color:#fff;background-color:#2f96b4;*background-color:#2a85a0}.btn-info:active,.btn-info.active{background-color:#24748c \9}.btn-inverse{color:#fff;text-shadow:0 -1px 0 rgba(0,0,0,0.25);background-color:#363636;*background-color:#222;background-image:-moz-linear-gradient(top,#444,#222);background-image:-webkit-gradient(linear,0 0,0 100%,from(#444),to(#222));background-image:-webkit-linear-gradient(top,#444,#222);background-image:-o-linear-gradient(top,#444,#222);background-image:linear-gradient(to bottom,#444,#222);background-repeat:repeat-x;border-color:#222 #222 #000;border-color:rgba(0,0,0,0.1) rgba(0,0,0,0.1) rgba(0,0,0,0.25);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff444444',endColorstr='#ff222222',GradientType=0);filter:progid:DXImageTransform.Microsoft.gradient(enabled=false)}.btn-inverse:hover,.btn-inverse:focus,.btn-inverse:active,.btn-inverse.active,.btn-inverse.disabled,.btn-inverse[disabled]{color:#fff;background-color:#222;*background-color:#151515}.btn-inverse:active,.btn-inverse.active{background-color:#080808 \9}button.btn,input[type="submit"].btn{*padding-top:3px;*padding-bottom:3px}button.btn::-moz-focus-inner,input[type="submit"].btn::-moz-focus-inner{padding:0;border:0}button.btn.btn-large,input[type="submit"].btn.btn-large{*padding-top:7px;*padding-bottom:7px}button.btn.btn-small,input[type="submit"].btn.btn-small{*padding-top:3px;*padding-bottom:3px}button.btn.btn-mini,input[type="submit"].btn.btn-mini{*padding-top:1px;*padding-bottom:1px}.btn-link,.btn-link:active,.btn-link[disabled]{background-color:transparent;background-image:none;-webkit-box-shadow:none;-moz-box-shadow:none;box-shadow:none}.btn-link{color:#08c;cursor:pointer;border-color:transparent;-webkit-border-radius:0;-moz-border-radius:0;border-radius:0}.btn-link:hover,.btn-link:focus{color:#005580;text-decoration:underline;background-color:transparent}.btn-link[disabled]:hover,.btn-link[disabled]:focus{color:#333;text-decoration:none}.btn-group{position:relative;display:inline-block;*display:inline;*margin-left:.3em;font-size:0;white-space:nowrap;vertical-align:middle;*zoom:1}.btn-group:first-child{*margin-left:0}.btn-group+.btn-group{margin-left:5px}.btn-toolbar{margin-top:10px;margin-bottom:10px;font-size:0}.btn-toolbar>.btn+.btn,.btn-toolbar>.btn-group+.btn,.btn-toolbar>.btn+.btn-group{margin-left:5px}.btn-group>.btn{position:relative;-webkit-border-radius:0;-moz-border-radius:0;border-radius:0}.btn-group>.btn+.btn{margin-left:-1px}.btn-group>.btn,.btn-group>.dropdown-menu,.btn-group>.popover{font-size:14px}.btn-group>.btn-mini{font-size:10.5px}.btn-group>.btn-small{font-size:11.9px}.btn-group>.btn-large{font-size:17.5px}.btn-group>.btn:first-child{margin-left:0;-webkit-border-bottom-left-radius:4px;border-bottom-left-radius:4px;-webkit-border-top-left-radius:4px;border-top-left-radius:4px;-moz-border-radius-bottomleft:4px;-moz-border-radius-topleft:4px}.btn-group>.btn:last-child,.btn-group>.dropdown-toggle{-webkit-border-top-right-radius:4px;border-top-right-radius:4px;-webkit-border-bottom-right-radius:4px;border-bottom-right-radius:4px;-moz-border-radius-topright:4px;-moz-border-radius-bottomright:4px}.btn-group>.btn.large:first-child{margin-left:0;-webkit-border-bottom-left-radius:6px;border-bottom-left-radius:6px;-webkit-border-top-left-radius:6px;border-top-left-radius:6px;-moz-border-radius-bottomleft:6px;-moz-border-radius-topleft:6px}.btn-group>.btn.large:last-child,.btn-group>.large.dropdown-toggle{-webkit-border-top-right-radius:6px;border-top-right-radius:6px;-webkit-border-bottom-right-radius:6px;border-bottom-right-radius:6px;-moz-border-radius-topright:6px;-moz-border-radius-bottomright:6px}.btn-group>.btn:hover,.btn-group>.btn:focus,.btn-group>.btn:active,.btn-group>.btn.active{z-index:2}.btn-group .dropdown-toggle:active,.btn-group.open .dropdown-toggle{outline:0}.btn-group>.btn+.dropdown-toggle{*padding-top:5px;padding-right:8px;*padding-bottom:5px;padding-left:8px;-webkit-box-shadow:inset 1px 0 0 rgba(255,255,255,0.125),inset 0 1px 0 rgba(255,255,255,0.2),0 1px 2px rgba(0,0,0,0.05);-moz-box-shadow:inset 1px 0 0 rgba(255,255,255,0.125),inset 0 1px 0 rgba(255,255,255,0.2),0 1px 2px rgba(0,0,0,0.05);box-shadow:inset 1px 0 0 rgba(255,255,255,0.125),inset 0 1px 0 rgba(255,255,255,0.2),0 1px 2px rgba(0,0,0,0.05)}.btn-group>.btn-mini+.dropdown-toggle{*padding-top:2px;padding-right:5px;*padding-bottom:2px;padding-left:5px}.btn-group>.btn-small+.dropdown-toggle{*padding-top:5px;*padding-bottom:4px}.btn-group>.btn-large+.dropdown-toggle{*padding-top:7px;padding-right:12px;*padding-bottom:7px;padding-left:12px}.btn-group.open .dropdown-toggle{background-image:none;-webkit-box-shadow:inset 0 2px 4px rgba(0,0,0,0.15),0 1px 2px rgba(0,0,0,0.05);-moz-box-shadow:inset 0 2px 4px rgba(0,0,0,0.15),0 1px 2px rgba(0,0,0,0.05);box-shadow:inset 0 2px 4px rgba(0,0,0,0.15),0 1px 2px rgba(0,0,0,0.05)}.btn-group.open .btn.dropdown-toggle{background-color:#e6e6e6}.btn-group.open .btn-primary.dropdown-toggle{background-color:#04c}.btn-group.open .btn-warning.dropdown-toggle{background-color:#f89406}.btn-group.open .btn-danger.dropdown-toggle{background-color:#bd362f}.btn-group.open .btn-success.dropdown-toggle{background-color:#51a351}.btn-group.open .btn-info.dropdown-toggle{background-color:#2f96b4}.btn-group.open .btn-inverse.dropdown-toggle{background-color:#222}.btn .caret{margin-top:8px;margin-left:0}.btn-large .caret{margin-top:6px}.btn-large .caret{border-top-width:5px;border-right-width:5px;border-left-width:5px}.btn-mini .caret,.btn-small .caret{margin-top:8px}.dropup .btn-large .caret{border-bottom-width:5px}.btn-primary .caret,.btn-warning .caret,.btn-danger .caret,.btn-info .caret,.btn-success .caret,.btn-inverse .caret{border-top-color:#fff;border-bottom-color:#fff}.btn-group-vertical{display:inline-block;*display:inline;*zoom:1}.btn-group-vertical>.btn{display:block;float:none;max-width:100%;-webkit-border-radius:0;-moz-border-radius:0;border-radius:0}.btn-group-vertical>.btn+.btn{margin-top:-1px;margin-left:0}.btn-group-vertical>.btn:first-child{-webkit-border-radius:4px 4px 0 0;-moz-border-radius:4px 4px 0 0;border-radius:4px 4px 0 0}.btn-group-vertical>.btn:last-child{-webkit-border-radius:0 0 4px 4px;-moz-border-radius:0 0 4px 4px;border-radius:0 0 4px 4px}.btn-group-vertical>.btn-large:first-child{-webkit-border-radius:6px 6px 0 0;-moz-border-radius:6px 6px 0 0;border-radius:6px 6px 0 0}.btn-group-vertical>.btn-large:last-child{-webkit-border-radius:0 0 6px 6px;-moz-border-radius:0 0 6px 6px;border-radius:0 0 6px 6px}.alert{padding:8px 35px 8px 14px;margin-bottom:20px;text-shadow:0 1px 0 rgba(255,255,255,0.5);background-color:#fcf8e3;border:1px solid #fbeed5;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px}.alert,.alert h4{color:#c09853}.alert h4{margin:0}.alert .close{position:relative;top:-2px;right:-21px;line-height:20px}.alert-success{color:#468847;background-color:#dff0d8;border-color:#d6e9c6}.alert-success h4{color:#468847}.alert-danger,.alert-error{color:#b94a48;background-color:#f2dede;border-color:#eed3d7}.alert-danger h4,.alert-error h4{color:#b94a48}.alert-info{color:#3a87ad;background-color:#d9edf7;border-color:#bce8f1}.alert-info h4{color:#3a87ad}.alert-block{padding-top:14px;padding-bottom:14px}.alert-block>p,.alert-block>ul{margin-bottom:0}.alert-block p+p{margin-top:5px}.nav{margin-bottom:20px;margin-left:0;list-style:none}.nav>li>a{display:block}.nav>li>a:hover,.nav>li>a:focus{text-decoration:none;background-color:#eee}.nav>li>a>img{max-width:none}.nav>.pull-right{float:right}.nav-header{display:block;padding:3px 15px;font-size:11px;font-weight:bold;line-height:20px;color:#999;text-shadow:0 1px 0 rgba(255,255,255,0.5);text-transform:uppercase}.nav li+.nav-header{margin-top:9px}.nav-list{padding-right:15px;padding-left:15px;margin-bottom:0}.nav-list>li>a,.nav-list .nav-header{margin-right:-15px;margin-left:-15px;text-shadow:0 1px 0 rgba(255,255,255,0.5)}.nav-list>li>a{padding:3px 15px}.nav-list>.active>a,.nav-list>.active>a:hover,.nav-list>.active>a:focus{color:#fff;text-shadow:0 -1px 0 rgba(0,0,0,0.2);background-color:#08c}.nav-list [class^="icon-"],.nav-list [class*=" icon-"]{margin-right:2px}.nav-list .divider{*width:100%;height:1px;margin:9px 1px;*margin:-5px 0 5px;overflow:hidden;background-color:#e5e5e5;border-bottom:1px solid #fff}.nav-tabs,.nav-pills{*zoom:1}.nav-tabs:before,.nav-pills:before,.nav-tabs:after,.nav-pills:after{display:table;line-height:0;content:""}.nav-tabs:after,.nav-pills:after{clear:both}.nav-tabs>li,.nav-pills>li{float:left}.nav-tabs>li>a,.nav-pills>li>a{padding-right:12px;padding-left:12px;margin-right:2px;line-height:14px}.nav-tabs{border-bottom:1px solid #ddd}.nav-tabs>li{margin-bottom:-1px}.nav-tabs>li>a{padding-top:8px;padding-bottom:8px;line-height:20px;border:1px solid transparent;-webkit-border-radius:4px 4px 0 0;-moz-border-radius:4px 4px 0 0;border-radius:4px 4px 0 0}.nav-tabs>li>a:hover,.nav-tabs>li>a:focus{border-color:#eee #eee #ddd}.nav-tabs>.active>a,.nav-tabs>.active>a:hover,.nav-tabs>.active>a:focus{color:#555;cursor:default;background-color:#fff;border:1px solid #ddd;border-bottom-color:transparent}.nav-pills>li>a{padding-top:8px;padding-bottom:8px;margin-top:2px;margin-bottom:2px;-webkit-border-radius:5px;-moz-border-radius:5px;border-radius:5px}.nav-pills>.active>a,.nav-pills>.active>a:hover,.nav-pills>.active>a:focus{color:#fff;background-color:#08c}.nav-stacked>li{float:none}.nav-stacked>li>a{margin-right:0}.nav-tabs.nav-stacked{border-bottom:0}.nav-tabs.nav-stacked>li>a{border:1px solid #ddd;-webkit-border-radius:0;-moz-border-radius:0;border-radius:0}.nav-tabs.nav-stacked>li:first-child>a{-webkit-border-top-right-radius:4px;border-top-right-radius:4px;-webkit-border-top-left-radius:4px;border-top-left-radius:4px;-moz-border-radius-topright:4px;-moz-border-radius-topleft:4px}.nav-tabs.nav-stacked>li:last-child>a{-webkit-border-bottom-right-radius:4px;border-bottom-right-radius:4px;-webkit-border-bottom-left-radius:4px;border-bottom-left-radius:4px;-moz-border-radius-bottomright:4px;-moz-border-radius-bottomleft:4px}.nav-tabs.nav-stacked>li>a:hover,.nav-tabs.nav-stacked>li>a:focus{z-index:2;border-color:#ddd}.nav-pills.nav-stacked>li>a{margin-bottom:3px}.nav-pills.nav-stacked>li:last-child>a{margin-bottom:1px}.nav-tabs .dropdown-menu{-webkit-border-radius:0 0 6px 6px;-moz-border-radius:0 0 6px 6px;border-radius:0 0 6px 6px}.nav-pills .dropdown-menu{-webkit-border-radius:6px;-moz-border-radius:6px;border-radius:6px}.nav .dropdown-toggle .caret{margin-top:6px;border-top-color:#08c;border-bottom-color:#08c}.nav .dropdown-toggle:hover .caret,.nav .dropdown-toggle:focus .caret{border-top-color:#005580;border-bottom-color:#005580}.nav-tabs .dropdown-toggle .caret{margin-top:8px}.nav .active .dropdown-toggle .caret{border-top-color:#fff;border-bottom-color:#fff}.nav-tabs .active .dropdown-toggle .caret{border-top-color:#555;border-bottom-color:#555}.nav>.dropdown.active>a:hover,.nav>.dropdown.active>a:focus{cursor:pointer}.nav-tabs .open .dropdown-toggle,.nav-pills .open .dropdown-toggle,.nav>li.dropdown.open.active>a:hover,.nav>li.dropdown.open.active>a:focus{color:#fff;background-color:#999;border-color:#999}.nav li.dropdown.open .caret,.nav li.dropdown.open.active .caret,.nav li.dropdown.open a:hover .caret,.nav li.dropdown.open a:focus .caret{border-top-color:#fff;border-bottom-color:#fff;opacity:1;filter:alpha(opacity=100)}.tabs-stacked .open>a:hover,.tabs-stacked .open>a:focus{border-color:#999}.tabbable{*zoom:1}.tabbable:before,.tabbable:after{display:table;line-height:0;content:""}.tabbable:after{clear:both}.tab-content{overflow:auto}.tabs-below>.nav-tabs,.tabs-right>.nav-tabs,.tabs-left>.nav-tabs{border-bottom:0}.tab-content>.tab-pane,.pill-content>.pill-pane{display:none}.tab-content>.active,.pill-content>.active{display:block}.tabs-below>.nav-tabs{border-top:1px solid #ddd}.tabs-below>.nav-tabs>li{margin-top:-1px;margin-bottom:0}.tabs-below>.nav-tabs>li>a{-webkit-border-radius:0 0 4px 4px;-moz-border-radius:0 0 4px 4px;border-radius:0 0 4px 4px}.tabs-below>.nav-tabs>li>a:hover,.tabs-below>.nav-tabs>li>a:focus{border-top-color:#ddd;border-bottom-color:transparent}.tabs-below>.nav-tabs>.active>a,.tabs-below>.nav-tabs>.active>a:hover,.tabs-below>.nav-tabs>.active>a:focus{border-color:transparent #ddd #ddd #ddd}.tabs-left>.nav-tabs>li,.tabs-right>.nav-tabs>li{float:none}.tabs-left>.nav-tabs>li>a,.tabs-right>.nav-tabs>li>a{min-width:74px;margin-right:0;margin-bottom:3px}.tabs-left>.nav-tabs{float:left;margin-right:19px;border-right:1px solid #ddd}.tabs-left>.nav-tabs>li>a{margin-right:-1px;-webkit-border-radius:4px 0 0 4px;-moz-border-radius:4px 0 0 4px;border-radius:4px 0 0 4px}.tabs-left>.nav-tabs>li>a:hover,.tabs-left>.nav-tabs>li>a:focus{border-color:#eee #ddd #eee #eee}.tabs-left>.nav-tabs .active>a,.tabs-left>.nav-tabs .active>a:hover,.tabs-left>.nav-tabs .active>a:focus{border-color:#ddd transparent #ddd #ddd;*border-right-color:#fff}.tabs-right>.nav-tabs{float:right;margin-left:19px;border-left:1px solid #ddd}.tabs-right>.nav-tabs>li>a{margin-left:-1px;-webkit-border-radius:0 4px 4px 0;-moz-border-radius:0 4px 4px 0;border-radius:0 4px 4px 0}.tabs-right>.nav-tabs>li>a:hover,.tabs-right>.nav-tabs>li>a:focus{border-color:#eee #eee #eee #ddd}.tabs-right>.nav-tabs .active>a,.tabs-right>.nav-tabs .active>a:hover,.tabs-right>.nav-tabs .active>a:focus{border-color:#ddd #ddd #ddd transparent;*border-left-color:#fff}.nav>.disabled>a{color:#999}.nav>.disabled>a:hover,.nav>.disabled>a:focus{text-decoration:none;cursor:default;background-color:transparent}.navbar{*position:relative;*z-index:2;margin-bottom:20px;overflow:visible}.navbar-inner{min-height:40px;padding-right:20px;padding-left:20px;background-color:#fafafa;background-image:-moz-linear-gradient(top,#fff,#f2f2f2);background-image:-webkit-gradient(linear,0 0,0 100%,from(#fff),to(#f2f2f2));background-image:-webkit-linear-gradient(top,#fff,#f2f2f2);background-image:-o-linear-gradient(top,#fff,#f2f2f2);background-image:linear-gradient(to bottom,#fff,#f2f2f2);background-repeat:repeat-x;border:1px solid #d4d4d4;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffffffff',endColorstr='#fff2f2f2',GradientType=0);*zoom:1;-webkit-box-shadow:0 1px 4px rgba(0,0,0,0.065);-moz-box-shadow:0 1px 4px rgba(0,0,0,0.065);box-shadow:0 1px 4px rgba(0,0,0,0.065)}.navbar-inner:before,.navbar-inner:after{display:table;line-height:0;content:""}.navbar-inner:after{clear:both}.navbar .container{width:auto}.nav-collapse.collapse{height:auto;overflow:visible}.navbar .brand{display:block;float:left;padding:10px 20px 10px;margin-left:-20px;font-size:20px;font-weight:200;color:#777;text-shadow:0 1px 0 #fff}.navbar .brand:hover,.navbar .brand:focus{text-decoration:none}.navbar-text{margin-bottom:0;line-height:40px;color:#777}.navbar-link{color:#777}.navbar-link:hover,.navbar-link:focus{color:#333}.navbar .divider-vertical{height:40px;margin:0 9px;border-right:1px solid #fff;border-left:1px solid #f2f2f2}.navbar .btn,.navbar .btn-group{margin-top:5px}.navbar .btn-group .btn,.navbar .input-prepend .btn,.navbar .input-append .btn,.navbar .input-prepend .btn-group,.navbar .input-append .btn-group{margin-top:0}.navbar-form{margin-bottom:0;*zoom:1}.navbar-form:before,.navbar-form:after{display:table;line-height:0;content:""}.navbar-form:after{clear:both}.navbar-form input,.navbar-form select,.navbar-form .radio,.navbar-form .checkbox{margin-top:5px}.navbar-form input,.navbar-form select,.navbar-form .btn{display:inline-block;margin-bottom:0}.navbar-form input[type="image"],.navbar-form input[type="checkbox"],.navbar-form input[type="radio"]{margin-top:3px}.navbar-form .input-append,.navbar-form .input-prepend{margin-top:5px;white-space:nowrap}.navbar-form .input-append input,.navbar-form .input-prepend input{margin-top:0}.navbar-search{position:relative;float:left;margin-top:5px;margin-bottom:0}.navbar-search .search-query{padding:4px 14px;margin-bottom:0;font-family:"Helvetica Neue",Helvetica,Arial,sans-serif;font-size:13px;font-weight:normal;line-height:1;-webkit-border-radius:15px;-moz-border-radius:15px;border-radius:15px}.navbar-static-top{position:static;margin-bottom:0}.navbar-static-top .navbar-inner{-webkit-border-radius:0;-moz-border-radius:0;border-radius:0}.navbar-fixed-top,.navbar-fixed-bottom{position:fixed;right:0;left:0;z-index:1030;margin-bottom:0}.navbar-fixed-top .navbar-inner,.navbar-static-top .navbar-inner{border-width:0 0 1px}.navbar-fixed-bottom .navbar-inner{border-width:1px 0 0}.navbar-fixed-top .navbar-inner,.navbar-fixed-bottom .navbar-inner{padding-right:0;padding-left:0;-webkit-border-radius:0;-moz-border-radius:0;border-radius:0}.navbar-static-top .container,.navbar-fixed-top .container,.navbar-fixed-bottom .container{width:940px}.navbar-fixed-top{top:0}.navbar-fixed-top .navbar-inner,.navbar-static-top .navbar-inner{-webkit-box-shadow:0 1px 10px rgba(0,0,0,0.1);-moz-box-shadow:0 1px 10px rgba(0,0,0,0.1);box-shadow:0 1px 10px rgba(0,0,0,0.1)}.navbar-fixed-bottom{bottom:0}.navbar-fixed-bottom .navbar-inner{-webkit-box-shadow:0 -1px 10px rgba(0,0,0,0.1);-moz-box-shadow:0 -1px 10px rgba(0,0,0,0.1);box-shadow:0 -1px 10px rgba(0,0,0,0.1)}.navbar .nav{position:relative;left:0;display:block;float:left;margin:0 10px 0 0}.navbar .nav.pull-right{float:right;margin-right:0}.navbar .nav>li{float:left}.navbar .nav>li>a{float:none;padding:10px 15px 10px;color:#777;text-decoration:none;text-shadow:0 1px 0 #fff}.navbar .nav .dropdown-toggle .caret{margin-top:8px}.navbar .nav>li>a:focus,.navbar .nav>li>a:hover{color:#333;text-decoration:none;background-color:transparent}.navbar .nav>.active>a,.navbar .nav>.active>a:hover,.navbar .nav>.active>a:focus{color:#555;text-decoration:none;background-color:#e5e5e5;-webkit-box-shadow:inset 0 3px 8px rgba(0,0,0,0.125);-moz-box-shadow:inset 0 3px 8px rgba(0,0,0,0.125);box-shadow:inset 0 3px 8px rgba(0,0,0,0.125)}.navbar .btn-navbar{display:none;float:right;padding:7px 10px;margin-right:5px;margin-left:5px;color:#fff;text-shadow:0 -1px 0 rgba(0,0,0,0.25);background-color:#ededed;*background-color:#e5e5e5;background-image:-moz-linear-gradient(top,#f2f2f2,#e5e5e5);background-image:-webkit-gradient(linear,0 0,0 100%,from(#f2f2f2),to(#e5e5e5));background-image:-webkit-linear-gradient(top,#f2f2f2,#e5e5e5);background-image:-o-linear-gradient(top,#f2f2f2,#e5e5e5);background-image:linear-gradient(to bottom,#f2f2f2,#e5e5e5);background-repeat:repeat-x;border-color:#e5e5e5 #e5e5e5 #bfbfbf;border-color:rgba(0,0,0,0.1) rgba(0,0,0,0.1) rgba(0,0,0,0.25);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#fff2f2f2',endColorstr='#ffe5e5e5',GradientType=0);filter:progid:DXImageTransform.Microsoft.gradient(enabled=false);-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,0.1),0 1px 0 rgba(255,255,255,0.075);-moz-box-shadow:inset 0 1px 0 rgba(255,255,255,0.1),0 1px 0 rgba(255,255,255,0.075);box-shadow:inset 0 1px 0 rgba(255,255,255,0.1),0 1px 0 rgba(255,255,255,0.075)}.navbar .btn-navbar:hover,.navbar .btn-navbar:focus,.navbar .btn-navbar:active,.navbar .btn-navbar.active,.navbar .btn-navbar.disabled,.navbar .btn-navbar[disabled]{color:#fff;background-color:#e5e5e5;*background-color:#d9d9d9}.navbar .btn-navbar:active,.navbar .btn-navbar.active{background-color:#ccc \9}.navbar .btn-navbar .icon-bar{display:block;width:18px;height:2px;background-color:#f5f5f5;-webkit-border-radius:1px;-moz-border-radius:1px;border-radius:1px;-webkit-box-shadow:0 1px 0 rgba(0,0,0,0.25);-moz-box-shadow:0 1px 0 rgba(0,0,0,0.25);box-shadow:0 1px 0 rgba(0,0,0,0.25)}.btn-navbar .icon-bar+.icon-bar{margin-top:3px}.navbar .nav>li>.dropdown-menu:before{position:absolute;top:-7px;left:9px;display:inline-block;border-right:7px solid transparent;border-bottom:7px solid #ccc;border-left:7px solid transparent;border-bottom-color:rgba(0,0,0,0.2);content:''}.navbar .nav>li>.dropdown-menu:after{position:absolute;top:-6px;left:10px;display:inline-block;border-right:6px solid transparent;border-bottom:6px solid #fff;border-left:6px solid transparent;content:''}.navbar-fixed-bottom .nav>li>.dropdown-menu:before{top:auto;bottom:-7px;border-top:7px solid #ccc;border-bottom:0;border-top-color:rgba(0,0,0,0.2)}.navbar-fixed-bottom .nav>li>.dropdown-menu:after{top:auto;bottom:-6px;border-top:6px solid #fff;border-bottom:0}.navbar .nav li.dropdown>a:hover .caret,.navbar .nav li.dropdown>a:focus .caret{border-top-color:#333;border-bottom-color:#333}.navbar .nav li.dropdown.open>.dropdown-toggle,.navbar .nav li.dropdown.active>.dropdown-toggle,.navbar .nav li.dropdown.open.active>.dropdown-toggle{color:#555;background-color:#e5e5e5}.navbar .nav li.dropdown>.dropdown-toggle .caret{border-top-color:#777;border-bottom-color:#777}.navbar .nav li.dropdown.open>.dropdown-toggle .caret,.navbar .nav li.dropdown.active>.dropdown-toggle .caret,.navbar .nav li.dropdown.open.active>.dropdown-toggle .caret{border-top-color:#555;border-bottom-color:#555}.navbar .pull-right>li>.dropdown-menu,.navbar .nav>li>.dropdown-menu.pull-right{right:0;left:auto}.navbar .pull-right>li>.dropdown-menu:before,.navbar .nav>li>.dropdown-menu.pull-right:before{right:12px;left:auto}.navbar .pull-right>li>.dropdown-menu:after,.navbar .nav>li>.dropdown-menu.pull-right:after{right:13px;left:auto}.navbar .pull-right>li>.dropdown-menu .dropdown-menu,.navbar .nav>li>.dropdown-menu.pull-right .dropdown-menu{right:100%;left:auto;margin-right:-1px;margin-left:0;-webkit-border-radius:6px 0 6px 6px;-moz-border-radius:6px 0 6px 6px;border-radius:6px 0 6px 6px}.navbar-inverse .navbar-inner{background-color:#1b1b1b;background-image:-moz-linear-gradient(top,#222,#111);background-image:-webkit-gradient(linear,0 0,0 100%,from(#222),to(#111));background-image:-webkit-linear-gradient(top,#222,#111);background-image:-o-linear-gradient(top,#222,#111);background-image:linear-gradient(to bottom,#222,#111);background-repeat:repeat-x;border-color:#252525;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff222222',endColorstr='#ff111111',GradientType=0)}.navbar-inverse .brand,.navbar-inverse .nav>li>a{color:#999;text-shadow:0 -1px 0 rgba(0,0,0,0.25)}.navbar-inverse .brand:hover,.navbar-inverse .nav>li>a:hover,.navbar-inverse .brand:focus,.navbar-inverse .nav>li>a:focus{color:#fff}.navbar-inverse .brand{color:#999}.navbar-inverse .navbar-text{color:#999}.navbar-inverse .nav>li>a:focus,.navbar-inverse .nav>li>a:hover{color:#fff;background-color:transparent}.navbar-inverse .nav .active>a,.navbar-inverse .nav .active>a:hover,.navbar-inverse .nav .active>a:focus{color:#fff;background-color:#111}.navbar-inverse .navbar-link{color:#999}.navbar-inverse .navbar-link:hover,.navbar-inverse .navbar-link:focus{color:#fff}.navbar-inverse .divider-vertical{border-right-color:#222;border-left-color:#111}.navbar-inverse .nav li.dropdown.open>.dropdown-toggle,.navbar-inverse .nav li.dropdown.active>.dropdown-toggle,.navbar-inverse .nav li.dropdown.open.active>.dropdown-toggle{color:#fff;background-color:#111}.navbar-inverse .nav li.dropdown>a:hover .caret,.navbar-inverse .nav li.dropdown>a:focus .caret{border-top-color:#fff;border-bottom-color:#fff}.navbar-inverse .nav li.dropdown>.dropdown-toggle .caret{border-top-color:#999;border-bottom-color:#999}.navbar-inverse .nav li.dropdown.open>.dropdown-toggle .caret,.navbar-inverse .nav li.dropdown.active>.dropdown-toggle .caret,.navbar-inverse .nav li.dropdown.open.active>.dropdown-toggle .caret{border-top-color:#fff;border-bottom-color:#fff}.navbar-inverse .navbar-search .search-query{color:#fff;background-color:#515151;border-color:#111;-webkit-box-shadow:inset 0 1px 2px rgba(0,0,0,0.1),0 1px 0 rgba(255,255,255,0.15);-moz-box-shadow:inset 0 1px 2px rgba(0,0,0,0.1),0 1px 0 rgba(255,255,255,0.15);box-shadow:inset 0 1px 2px rgba(0,0,0,0.1),0 1px 0 rgba(255,255,255,0.15);-webkit-transition:none;-moz-transition:none;-o-transition:none;transition:none}.navbar-inverse .navbar-search .search-query:-moz-placeholder{color:#ccc}.navbar-inverse .navbar-search .search-query:-ms-input-placeholder{color:#ccc}.navbar-inverse .navbar-search .search-query::-webkit-input-placeholder{color:#ccc}.navbar-inverse .navbar-search .search-query:focus,.navbar-inverse .navbar-search .search-query.focused{padding:5px 15px;color:#333;text-shadow:0 1px 0 #fff;background-color:#fff;border:0;outline:0;-webkit-box-shadow:0 0 3px rgba(0,0,0,0.15);-moz-box-shadow:0 0 3px rgba(0,0,0,0.15);box-shadow:0 0 3px rgba(0,0,0,0.15)}.navbar-inverse .btn-navbar{color:#fff;text-shadow:0 -1px 0 rgba(0,0,0,0.25);background-color:#0e0e0e;*background-color:#040404;background-image:-moz-linear-gradient(top,#151515,#040404);background-image:-webkit-gradient(linear,0 0,0 100%,from(#151515),to(#040404));background-image:-webkit-linear-gradient(top,#151515,#040404);background-image:-o-linear-gradient(top,#151515,#040404);background-image:linear-gradient(to bottom,#151515,#040404);background-repeat:repeat-x;border-color:#040404 #040404 #000;border-color:rgba(0,0,0,0.1) rgba(0,0,0,0.1) rgba(0,0,0,0.25);filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff151515',endColorstr='#ff040404',GradientType=0);filter:progid:DXImageTransform.Microsoft.gradient(enabled=false)}.navbar-inverse .btn-navbar:hover,.navbar-inverse .btn-navbar:focus,.navbar-inverse .btn-navbar:active,.navbar-inverse .btn-navbar.active,.navbar-inverse .btn-navbar.disabled,.navbar-inverse .btn-navbar[disabled]{color:#fff;background-color:#040404;*background-color:#000}.navbar-inverse .btn-navbar:active,.navbar-inverse .btn-navbar.active{background-color:#000 \9}.breadcrumb{padding:8px 15px;margin:0 0 20px;list-style:none;background-color:#f5f5f5;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px}.breadcrumb>li{display:inline-block;*display:inline;text-shadow:0 1px 0 #fff;*zoom:1}.breadcrumb>li>.divider{padding:0 5px;color:#ccc}.breadcrumb>.active{color:#999}.pagination{margin:20px 0}.pagination ul{display:inline-block;*display:inline;margin-bottom:0;margin-left:0;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px;*zoom:1;-webkit-box-shadow:0 1px 2px rgba(0,0,0,0.05);-moz-box-shadow:0 1px 2px rgba(0,0,0,0.05);box-shadow:0 1px 2px rgba(0,0,0,0.05)}.pagination ul>li{display:inline}.pagination ul>li>a,.pagination ul>li>span{float:left;padding:4px 12px;line-height:20px;text-decoration:none;background-color:#fff;border:1px solid #ddd;border-left-width:0}.pagination ul>li>a:hover,.pagination ul>li>a:focus,.pagination ul>.active>a,.pagination ul>.active>span{background-color:#f5f5f5}.pagination ul>.active>a,.pagination ul>.active>span{color:#999;cursor:default}.pagination ul>.disabled>span,.pagination ul>.disabled>a,.pagination ul>.disabled>a:hover,.pagination ul>.disabled>a:focus{color:#999;cursor:default;background-color:transparent}.pagination ul>li:first-child>a,.pagination ul>li:first-child>span{border-left-width:1px;-webkit-border-bottom-left-radius:4px;border-bottom-left-radius:4px;-webkit-border-top-left-radius:4px;border-top-left-radius:4px;-moz-border-radius-bottomleft:4px;-moz-border-radius-topleft:4px}.pagination ul>li:last-child>a,.pagination ul>li:last-child>span{-webkit-border-top-right-radius:4px;border-top-right-radius:4px;-webkit-border-bottom-right-radius:4px;border-bottom-right-radius:4px;-moz-border-radius-topright:4px;-moz-border-radius-bottomright:4px}.pagination-centered{text-align:center}.pagination-right{text-align:right}.pagination-large ul>li>a,.pagination-large ul>li>span{padding:11px 19px;font-size:17.5px}.pagination-large ul>li:first-child>a,.pagination-large ul>li:first-child>span{-webkit-border-bottom-left-radius:6px;border-bottom-left-radius:6px;-webkit-border-top-left-radius:6px;border-top-left-radius:6px;-moz-border-radius-bottomleft:6px;-moz-border-radius-topleft:6px}.pagination-large ul>li:last-child>a,.pagination-large ul>li:last-child>span{-webkit-border-top-right-radius:6px;border-top-right-radius:6px;-webkit-border-bottom-right-radius:6px;border-bottom-right-radius:6px;-moz-border-radius-topright:6px;-moz-border-radius-bottomright:6px}.pagination-mini ul>li:first-child>a,.pagination-small ul>li:first-child>a,.pagination-mini ul>li:first-child>span,.pagination-small ul>li:first-child>span{-webkit-border-bottom-left-radius:3px;border-bottom-left-radius:3px;-webkit-border-top-left-radius:3px;border-top-left-radius:3px;-moz-border-radius-bottomleft:3px;-moz-border-radius-topleft:3px}.pagination-mini ul>li:last-child>a,.pagination-small ul>li:last-child>a,.pagination-mini ul>li:last-child>span,.pagination-small ul>li:last-child>span{-webkit-border-top-right-radius:3px;border-top-right-radius:3px;-webkit-border-bottom-right-radius:3px;border-bottom-right-radius:3px;-moz-border-radius-topright:3px;-moz-border-radius-bottomright:3px}.pagination-small ul>li>a,.pagination-small ul>li>span{padding:2px 10px;font-size:11.9px}.pagination-mini ul>li>a,.pagination-mini ul>li>span{padding:0 6px;font-size:10.5px}.pager{margin:20px 0;text-align:center;list-style:none;*zoom:1}.pager:before,.pager:after{display:table;line-height:0;content:""}.pager:after{clear:both}.pager li{display:inline}.pager li>a,.pager li>span{display:inline-block;padding:5px 14px;background-color:#fff;border:1px solid #ddd;-webkit-border-radius:15px;-moz-border-radius:15px;border-radius:15px}.pager li>a:hover,.pager li>a:focus{text-decoration:none;background-color:#f5f5f5}.pager .next>a,.pager .next>span{float:right}.pager .previous>a,.pager .previous>span{float:left}.pager .disabled>a,.pager .disabled>a:hover,.pager .disabled>a:focus,.pager .disabled>span{color:#999;cursor:default;background-color:#fff}.modal-backdrop{position:fixed;top:0;right:0;bottom:0;left:0;z-index:1040;background-color:#000}.modal-backdrop.fade{opacity:0}.modal-backdrop,.modal-backdrop.fade.in{opacity:.8;filter:alpha(opacity=80)}.modal{position:fixed;top:10%;left:50%;z-index:1050;width:560px;margin-left:-280px;background-color:#fff;border:1px solid #999;border:1px solid rgba(0,0,0,0.3);*border:1px solid #999;-webkit-border-radius:6px;-moz-border-radius:6px;border-radius:6px;outline:0;-webkit-box-shadow:0 3px 7px rgba(0,0,0,0.3);-moz-box-shadow:0 3px 7px rgba(0,0,0,0.3);box-shadow:0 3px 7px rgba(0,0,0,0.3);-webkit-background-clip:padding-box;-moz-background-clip:padding-box;background-clip:padding-box}.modal.fade{top:-25%;-webkit-transition:opacity .3s linear,top .3s ease-out;-moz-transition:opacity .3s linear,top .3s ease-out;-o-transition:opacity .3s linear,top .3s ease-out;transition:opacity .3s linear,top .3s ease-out}.modal.fade.in{top:10%}.modal-header{padding:9px 15px;border-bottom:1px solid #eee}.modal-header .close{margin-top:2px}.modal-header h3{margin:0;line-height:30px}.modal-body{position:relative;max-height:400px;padding:15px;overflow-y:auto}.modal-form{margin-bottom:0}.modal-footer{padding:14px 15px 15px;margin-bottom:0;text-align:right;background-color:#f5f5f5;border-top:1px solid #ddd;-webkit-border-radius:0 0 6px 6px;-moz-border-radius:0 0 6px 6px;border-radius:0 0 6px 6px;*zoom:1;-webkit-box-shadow:inset 0 1px 0 #fff;-moz-box-shadow:inset 0 1px 0 #fff;box-shadow:inset 0 1px 0 #fff}.modal-footer:before,.modal-footer:after{display:table;line-height:0;content:""}.modal-footer:after{clear:both}.modal-footer .btn+.btn{margin-bottom:0;margin-left:5px}.modal-footer .btn-group .btn+.btn{margin-left:-1px}.modal-footer .btn-block+.btn-block{margin-left:0}.tooltip{position:absolute;z-index:1030;display:block;font-size:11px;line-height:1.4;opacity:0;filter:alpha(opacity=0);visibility:visible}.tooltip.in{opacity:.8;filter:alpha(opacity=80)}.tooltip.top{padding:5px 0;margin-top:-3px}.tooltip.right{padding:0 5px;margin-left:3px}.tooltip.bottom{padding:5px 0;margin-top:3px}.tooltip.left{padding:0 5px;margin-left:-3px}.tooltip-inner{max-width:200px;padding:8px;color:#fff;text-align:center;text-decoration:none;background-color:#000;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px}.tooltip-arrow{position:absolute;width:0;height:0;border-color:transparent;border-style:solid}.tooltip.top .tooltip-arrow{bottom:0;left:50%;margin-left:-5px;border-top-color:#000;border-width:5px 5px 0}.tooltip.right .tooltip-arrow{top:50%;left:0;margin-top:-5px;border-right-color:#000;border-width:5px 5px 5px 0}.tooltip.left .tooltip-arrow{top:50%;right:0;margin-top:-5px;border-left-color:#000;border-width:5px 0 5px 5px}.tooltip.bottom .tooltip-arrow{top:0;left:50%;margin-left:-5px;border-bottom-color:#000;border-width:0 5px 5px}.popover{position:absolute;top:0;left:0;z-index:1010;display:none;max-width:276px;padding:1px;text-align:left;white-space:normal;background-color:#fff;border:1px solid #ccc;border:1px solid rgba(0,0,0,0.2);-webkit-border-radius:6px;-moz-border-radius:6px;border-radius:6px;-webkit-box-shadow:0 5px 10px rgba(0,0,0,0.2);-moz-box-shadow:0 5px 10px rgba(0,0,0,0.2);box-shadow:0 5px 10px rgba(0,0,0,0.2);-webkit-background-clip:padding-box;-moz-background-clip:padding;background-clip:padding-box}.popover.top{margin-top:-10px}.popover.right{margin-left:10px}.popover.bottom{margin-top:10px}.popover.left{margin-left:-10px}.popover-title{padding:8px 14px;margin:0;font-size:14px;font-weight:normal;line-height:18px;background-color:#f7f7f7;border-bottom:1px solid #ebebeb;-webkit-border-radius:5px 5px 0 0;-moz-border-radius:5px 5px 0 0;border-radius:5px 5px 0 0}.popover-title:empty{display:none}.popover-content{padding:9px 14px}.popover .arrow,.popover .arrow:after{position:absolute;display:block;width:0;height:0;border-color:transparent;border-style:solid}.popover .arrow{border-width:11px}.popover .arrow:after{border-width:10px;content:""}.popover.top .arrow{bottom:-11px;left:50%;margin-left:-11px;border-top-color:#999;border-top-color:rgba(0,0,0,0.25);border-bottom-width:0}.popover.top .arrow:after{bottom:1px;margin-left:-10px;border-top-color:#fff;border-bottom-width:0}.popover.right .arrow{top:50%;left:-11px;margin-top:-11px;border-right-color:#999;border-right-color:rgba(0,0,0,0.25);border-left-width:0}.popover.right .arrow:after{bottom:-10px;left:1px;border-right-color:#fff;border-left-width:0}.popover.bottom .arrow{top:-11px;left:50%;margin-left:-11px;border-bottom-color:#999;border-bottom-color:rgba(0,0,0,0.25);border-top-width:0}.popover.bottom .arrow:after{top:1px;margin-left:-10px;border-bottom-color:#fff;border-top-width:0}.popover.left .arrow{top:50%;right:-11px;margin-top:-11px;border-left-color:#999;border-left-color:rgba(0,0,0,0.25);border-right-width:0}.popover.left .arrow:after{right:1px;bottom:-10px;border-left-color:#fff;border-right-width:0}.thumbnails{margin-left:-20px;list-style:none;*zoom:1}.thumbnails:before,.thumbnails:after{display:table;line-height:0;content:""}.thumbnails:after{clear:both}.row-fluid .thumbnails{margin-left:0}.thumbnails>li{float:left;margin-bottom:20px;margin-left:20px}.thumbnail{display:block;padding:4px;line-height:20px;border:1px solid #ddd;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px;-webkit-box-shadow:0 1px 3px rgba(0,0,0,0.055);-moz-box-shadow:0 1px 3px rgba(0,0,0,0.055);box-shadow:0 1px 3px rgba(0,0,0,0.055);-webkit-transition:all .2s ease-in-out;-moz-transition:all .2s ease-in-out;-o-transition:all .2s ease-in-out;transition:all .2s ease-in-out}a.thumbnail:hover,a.thumbnail:focus{border-color:#08c;-webkit-box-shadow:0 1px 4px rgba(0,105,214,0.25);-moz-box-shadow:0 1px 4px rgba(0,105,214,0.25);box-shadow:0 1px 4px rgba(0,105,214,0.25)}.thumbnail>img{display:block;max-width:100%;margin-right:auto;margin-left:auto}.thumbnail .caption{padding:9px;color:#555}.media,.media-body{overflow:hidden;*overflow:visible;zoom:1}.media,.media .media{margin-top:15px}.media:first-child{margin-top:0}.media-object{display:block}.media-heading{margin:0 0 5px}.media>.pull-left{margin-right:10px}.media>.pull-right{margin-left:10px}.media-list{margin-left:0;list-style:none}.label,.badge{display:inline-block;padding:2px 4px;font-size:11.844px;font-weight:bold;line-height:14px;color:#fff;text-shadow:0 -1px 0 rgba(0,0,0,0.25);white-space:nowrap;vertical-align:baseline;background-color:#999}.label{-webkit-border-radius:3px;-moz-border-radius:3px;border-radius:3px}.badge{padding-right:9px;padding-left:9px;-webkit-border-radius:9px;-moz-border-radius:9px;border-radius:9px}.label:empty,.badge:empty{display:none}a.label:hover,a.label:focus,a.badge:hover,a.badge:focus{color:#fff;text-decoration:none;cursor:pointer}.label-important,.badge-important{background-color:#b94a48}.label-important[href],.badge-important[href]{background-color:#953b39}.label-warning,.badge-warning{background-color:#f89406}.label-warning[href],.badge-warning[href]{background-color:#c67605}.label-success,.badge-success{background-color:#468847}.label-success[href],.badge-success[href]{background-color:#356635}.label-info,.badge-info{background-color:#3a87ad}.label-info[href],.badge-info[href]{background-color:#2d6987}.label-inverse,.badge-inverse{background-color:#333}.label-inverse[href],.badge-inverse[href]{background-color:#1a1a1a}.btn .label,.btn .badge{position:relative;top:-1px}.btn-mini .label,.btn-mini .badge{top:0}@-webkit-keyframes progress-bar-stripes{from{background-position:40px 0}to{background-position:0 0}}@-moz-keyframes progress-bar-stripes{from{background-position:40px 0}to{background-position:0 0}}@-ms-keyframes progress-bar-stripes{from{background-position:40px 0}to{background-position:0 0}}@-o-keyframes progress-bar-stripes{from{background-position:0 0}to{background-position:40px 0}}@keyframes progress-bar-stripes{from{background-position:40px 0}to{background-position:0 0}}.progress{height:20px;margin-bottom:20px;overflow:hidden;background-color:#f7f7f7;background-image:-moz-linear-gradient(top,#f5f5f5,#f9f9f9);background-image:-webkit-gradient(linear,0 0,0 100%,from(#f5f5f5),to(#f9f9f9));background-image:-webkit-linear-gradient(top,#f5f5f5,#f9f9f9);background-image:-o-linear-gradient(top,#f5f5f5,#f9f9f9);background-image:linear-gradient(to bottom,#f5f5f5,#f9f9f9);background-repeat:repeat-x;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#fff5f5f5',endColorstr='#fff9f9f9',GradientType=0);-webkit-box-shadow:inset 0 1px 2px rgba(0,0,0,0.1);-moz-box-shadow:inset 0 1px 2px rgba(0,0,0,0.1);box-shadow:inset 0 1px 2px rgba(0,0,0,0.1)}.progress .bar{float:left;width:0;height:100%;font-size:12px;color:#fff;text-align:center;text-shadow:0 -1px 0 rgba(0,0,0,0.25);background-color:#0e90d2;background-image:-moz-linear-gradient(top,#149bdf,#0480be);background-image:-webkit-gradient(linear,0 0,0 100%,from(#149bdf),to(#0480be));background-image:-webkit-linear-gradient(top,#149bdf,#0480be);background-image:-o-linear-gradient(top,#149bdf,#0480be);background-image:linear-gradient(to bottom,#149bdf,#0480be);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff149bdf',endColorstr='#ff0480be',GradientType=0);-webkit-box-shadow:inset 0 -1px 0 rgba(0,0,0,0.15);-moz-box-shadow:inset 0 -1px 0 rgba(0,0,0,0.15);box-shadow:inset 0 -1px 0 rgba(0,0,0,0.15);-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box;-webkit-transition:width .6s ease;-moz-transition:width .6s ease;-o-transition:width .6s ease;transition:width .6s ease}.progress .bar+.bar{-webkit-box-shadow:inset 1px 0 0 rgba(0,0,0,0.15),inset 0 -1px 0 rgba(0,0,0,0.15);-moz-box-shadow:inset 1px 0 0 rgba(0,0,0,0.15),inset 0 -1px 0 rgba(0,0,0,0.15);box-shadow:inset 1px 0 0 rgba(0,0,0,0.15),inset 0 -1px 0 rgba(0,0,0,0.15)}.progress-striped .bar{background-color:#149bdf;background-image:-webkit-gradient(linear,0 100%,100% 0,color-stop(0.25,rgba(255,255,255,0.15)),color-stop(0.25,transparent),color-stop(0.5,transparent),color-stop(0.5,rgba(255,255,255,0.15)),color-stop(0.75,rgba(255,255,255,0.15)),color-stop(0.75,transparent),to(transparent));background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:-moz-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:-o-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);-webkit-background-size:40px 40px;-moz-background-size:40px 40px;-o-background-size:40px 40px;background-size:40px 40px}.progress.active .bar{-webkit-animation:progress-bar-stripes 2s linear infinite;-moz-animation:progress-bar-stripes 2s linear infinite;-ms-animation:progress-bar-stripes 2s linear infinite;-o-animation:progress-bar-stripes 2s linear infinite;animation:progress-bar-stripes 2s linear infinite}.progress-danger .bar,.progress .bar-danger{background-color:#dd514c;background-image:-moz-linear-gradient(top,#ee5f5b,#c43c35);background-image:-webkit-gradient(linear,0 0,0 100%,from(#ee5f5b),to(#c43c35));background-image:-webkit-linear-gradient(top,#ee5f5b,#c43c35);background-image:-o-linear-gradient(top,#ee5f5b,#c43c35);background-image:linear-gradient(to bottom,#ee5f5b,#c43c35);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ffee5f5b',endColorstr='#ffc43c35',GradientType=0)}.progress-danger.progress-striped .bar,.progress-striped .bar-danger{background-color:#ee5f5b;background-image:-webkit-gradient(linear,0 100%,100% 0,color-stop(0.25,rgba(255,255,255,0.15)),color-stop(0.25,transparent),color-stop(0.5,transparent),color-stop(0.5,rgba(255,255,255,0.15)),color-stop(0.75,rgba(255,255,255,0.15)),color-stop(0.75,transparent),to(transparent));background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:-moz-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:-o-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent)}.progress-success .bar,.progress .bar-success{background-color:#5eb95e;background-image:-moz-linear-gradient(top,#62c462,#57a957);background-image:-webkit-gradient(linear,0 0,0 100%,from(#62c462),to(#57a957));background-image:-webkit-linear-gradient(top,#62c462,#57a957);background-image:-o-linear-gradient(top,#62c462,#57a957);background-image:linear-gradient(to bottom,#62c462,#57a957);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff62c462',endColorstr='#ff57a957',GradientType=0)}.progress-success.progress-striped .bar,.progress-striped .bar-success{background-color:#62c462;background-image:-webkit-gradient(linear,0 100%,100% 0,color-stop(0.25,rgba(255,255,255,0.15)),color-stop(0.25,transparent),color-stop(0.5,transparent),color-stop(0.5,rgba(255,255,255,0.15)),color-stop(0.75,rgba(255,255,255,0.15)),color-stop(0.75,transparent),to(transparent));background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:-moz-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:-o-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent)}.progress-info .bar,.progress .bar-info{background-color:#4bb1cf;background-image:-moz-linear-gradient(top,#5bc0de,#339bb9);background-image:-webkit-gradient(linear,0 0,0 100%,from(#5bc0de),to(#339bb9));background-image:-webkit-linear-gradient(top,#5bc0de,#339bb9);background-image:-o-linear-gradient(top,#5bc0de,#339bb9);background-image:linear-gradient(to bottom,#5bc0de,#339bb9);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#ff5bc0de',endColorstr='#ff339bb9',GradientType=0)}.progress-info.progress-striped .bar,.progress-striped .bar-info{background-color:#5bc0de;background-image:-webkit-gradient(linear,0 100%,100% 0,color-stop(0.25,rgba(255,255,255,0.15)),color-stop(0.25,transparent),color-stop(0.5,transparent),color-stop(0.5,rgba(255,255,255,0.15)),color-stop(0.75,rgba(255,255,255,0.15)),color-stop(0.75,transparent),to(transparent));background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:-moz-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:-o-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent)}.progress-warning .bar,.progress .bar-warning{background-color:#faa732;background-image:-moz-linear-gradient(top,#fbb450,#f89406);background-image:-webkit-gradient(linear,0 0,0 100%,from(#fbb450),to(#f89406));background-image:-webkit-linear-gradient(top,#fbb450,#f89406);background-image:-o-linear-gradient(top,#fbb450,#f89406);background-image:linear-gradient(to bottom,#fbb450,#f89406);background-repeat:repeat-x;filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#fffbb450',endColorstr='#fff89406',GradientType=0)}.progress-warning.progress-striped .bar,.progress-striped .bar-warning{background-color:#fbb450;background-image:-webkit-gradient(linear,0 100%,100% 0,color-stop(0.25,rgba(255,255,255,0.15)),color-stop(0.25,transparent),color-stop(0.5,transparent),color-stop(0.5,rgba(255,255,255,0.15)),color-stop(0.75,rgba(255,255,255,0.15)),color-stop(0.75,transparent),to(transparent));background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:-moz-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:-o-linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,0.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,0.15) 50%,rgba(255,255,255,0.15) 75%,transparent 75%,transparent)}.accordion{margin-bottom:20px}.accordion-group{margin-bottom:2px;border:1px solid #e5e5e5;-webkit-border-radius:4px;-moz-border-radius:4px;border-radius:4px}.accordion-heading{border-bottom:0}.accordion-heading .accordion-toggle{display:block;padding:8px 15px}.accordion-toggle{cursor:pointer}.accordion-inner{padding:9px 15px;border-top:1px solid #e5e5e5}.carousel{position:relative;margin-bottom:20px;line-height:1}.carousel-inner{position:relative;width:100%;overflow:hidden}.carousel-inner>.item{position:relative;display:none;-webkit-transition:.6s ease-in-out left;-moz-transition:.6s ease-in-out left;-o-transition:.6s ease-in-out left;transition:.6s ease-in-out left}.carousel-inner>.item>img,.carousel-inner>.item>a>img{display:block;line-height:1}.carousel-inner>.active,.carousel-inner>.next,.carousel-inner>.prev{display:block}.carousel-inner>.active{left:0}.carousel-inner>.next,.carousel-inner>.prev{position:absolute;top:0;width:100%}.carousel-inner>.next{left:100%}.carousel-inner>.prev{left:-100%}.carousel-inner>.next.left,.carousel-inner>.prev.right{left:0}.carousel-inner>.active.left{left:-100%}.carousel-inner>.active.right{left:100%}.carousel-control{position:absolute;top:40%;left:15px;width:40px;height:40px;margin-top:-20px;font-size:60px;font-weight:100;line-height:30px;color:#fff;text-align:center;background:#222;border:3px solid #fff;-webkit-border-radius:23px;-moz-border-radius:23px;border-radius:23px;opacity:.5;filter:alpha(opacity=50)}.carousel-control.right{right:15px;left:auto}.carousel-control:hover,.carousel-control:focus{color:#fff;text-decoration:none;opacity:.9;filter:alpha(opacity=90)}.carousel-indicators{position:absolute;top:15px;right:15px;z-index:5;margin:0;list-style:none}.carousel-indicators li{display:block;float:left;width:10px;height:10px;margin-left:5px;text-indent:-999px;background-color:#ccc;background-color:rgba(255,255,255,0.25);border-radius:5px}.carousel-indicators .active{background-color:#fff}.carousel-caption{position:absolute;right:0;bottom:0;left:0;padding:15px;background:#333;background:rgba(0,0,0,0.75)}.carousel-caption h4,.carousel-caption p{line-height:20px;color:#fff}.carousel-caption h4{margin:0 0 5px}.carousel-caption p{margin-bottom:0}.hero-unit{padding:60px;margin-bottom:30px;font-size:18px;font-weight:200;line-height:30px;color:inherit;background-color:#eee;-webkit-border-radius:6px;-moz-border-radius:6px;border-radius:6px}.hero-unit h1{margin-bottom:0;font-size:60px;line-height:1;letter-spacing:-1px;color:inherit}.hero-unit li{line-height:30px}.pull-right{float:right}.pull-left{float:left}.hide{display:none}.show{display:block}.invisible{visibility:hidden}.affix{position:fixed} diff --git a/rpkid/rpki/gui/app/static/img/glyphicons-halflings-white.png b/rpkid/rpki/gui/app/static/img/glyphicons-halflings-white.png deleted file mode 100644 index 3bf6484a..00000000 Binary files a/rpkid/rpki/gui/app/static/img/glyphicons-halflings-white.png and /dev/null differ diff --git a/rpkid/rpki/gui/app/static/img/glyphicons-halflings.png b/rpkid/rpki/gui/app/static/img/glyphicons-halflings.png deleted file mode 100644 index a9969993..00000000 Binary files a/rpkid/rpki/gui/app/static/img/glyphicons-halflings.png and /dev/null differ diff --git a/rpkid/rpki/gui/app/static/img/sui-riu.ico b/rpkid/rpki/gui/app/static/img/sui-riu.ico deleted file mode 100644 index 61223e27..00000000 Binary files a/rpkid/rpki/gui/app/static/img/sui-riu.ico and /dev/null differ diff --git a/rpkid/rpki/gui/app/static/js/bootstrap.min.js b/rpkid/rpki/gui/app/static/js/bootstrap.min.js deleted file mode 100644 index 95c5ac5e..00000000 --- a/rpkid/rpki/gui/app/static/js/bootstrap.min.js +++ /dev/null @@ -1,6 +0,0 @@ -/*! -* Bootstrap.js by @fat & @mdo -* Copyright 2012 Twitter, Inc. -* http://www.apache.org/licenses/LICENSE-2.0.txt -*/ -!function(e){"use strict";e(function(){e.support.transition=function(){var e=function(){var e=document.createElement("bootstrap"),t={WebkitTransition:"webkitTransitionEnd",MozTransition:"transitionend",OTransition:"oTransitionEnd otransitionend",transition:"transitionend"},n;for(n in t)if(e.style[n]!==undefined)return t[n]}();return e&&{end:e}}()})}(window.jQuery),!function(e){"use strict";var t='[data-dismiss="alert"]',n=function(n){e(n).on("click",t,this.close)};n.prototype.close=function(t){function s(){i.trigger("closed").remove()}var n=e(this),r=n.attr("data-target"),i;r||(r=n.attr("href"),r=r&&r.replace(/.*(?=#[^\s]*$)/,"")),i=e(r),t&&t.preventDefault(),i.length||(i=n.hasClass("alert")?n:n.parent()),i.trigger(t=e.Event("close"));if(t.isDefaultPrevented())return;i.removeClass("in"),e.support.transition&&i.hasClass("fade")?i.on(e.support.transition.end,s):s()};var r=e.fn.alert;e.fn.alert=function(t){return this.each(function(){var r=e(this),i=r.data("alert");i||r.data("alert",i=new n(this)),typeof t=="string"&&i[t].call(r)})},e.fn.alert.Constructor=n,e.fn.alert.noConflict=function(){return e.fn.alert=r,this},e(document).on("click.alert.data-api",t,n.prototype.close)}(window.jQuery),!function(e){"use strict";var t=function(t,n){this.$element=e(t),this.options=e.extend({},e.fn.button.defaults,n)};t.prototype.setState=function(e){var t="disabled",n=this.$element,r=n.data(),i=n.is("input")?"val":"html";e+="Text",r.resetText||n.data("resetText",n[i]()),n[i](r[e]||this.options[e]),setTimeout(function(){e=="loadingText"?n.addClass(t).attr(t,t):n.removeClass(t).removeAttr(t)},0)},t.prototype.toggle=function(){var e=this.$element.closest('[data-toggle="buttons-radio"]');e&&e.find(".active").removeClass("active"),this.$element.toggleClass("active")};var n=e.fn.button;e.fn.button=function(n){return this.each(function(){var r=e(this),i=r.data("button"),s=typeof n=="object"&&n;i||r.data("button",i=new t(this,s)),n=="toggle"?i.toggle():n&&i.setState(n)})},e.fn.button.defaults={loadingText:"loading..."},e.fn.button.Constructor=t,e.fn.button.noConflict=function(){return e.fn.button=n,this},e(document).on("click.button.data-api","[data-toggle^=button]",function(t){var n=e(t.target);n.hasClass("btn")||(n=n.closest(".btn")),n.button("toggle")})}(window.jQuery),!function(e){"use strict";var t=function(t,n){this.$element=e(t),this.$indicators=this.$element.find(".carousel-indicators"),this.options=n,this.options.pause=="hover"&&this.$element.on("mouseenter",e.proxy(this.pause,this)).on("mouseleave",e.proxy(this.cycle,this))};t.prototype={cycle:function(t){return t||(this.paused=!1),this.interval&&clearInterval(this.interval),this.options.interval&&!this.paused&&(this.interval=setInterval(e.proxy(this.next,this),this.options.interval)),this},getActiveIndex:function(){return this.$active=this.$element.find(".item.active"),this.$items=this.$active.parent().children(),this.$items.index(this.$active)},to:function(t){var n=this.getActiveIndex(),r=this;if(t>this.$items.length-1||t<0)return;return this.sliding?this.$element.one("slid",function(){r.to(t)}):n==t?this.pause().cycle():this.slide(t>n?"next":"prev",e(this.$items[t]))},pause:function(t){return t||(this.paused=!0),this.$element.find(".next, .prev").length&&e.support.transition.end&&(this.$element.trigger(e.support.transition.end),this.cycle(!0)),clearInterval(this.interval),this.interval=null,this},next:function(){if(this.sliding)return;return this.slide("next")},prev:function(){if(this.sliding)return;return this.slide("prev")},slide:function(t,n){var r=this.$element.find(".item.active"),i=n||r[t](),s=this.interval,o=t=="next"?"left":"right",u=t=="next"?"first":"last",a=this,f;this.sliding=!0,s&&this.pause(),i=i.length?i:this.$element.find(".item")[u](),f=e.Event("slide",{relatedTarget:i[0],direction:o});if(i.hasClass("active"))return;this.$indicators.length&&(this.$indicators.find(".active").removeClass("active"),this.$element.one("slid",function(){var t=e(a.$indicators.children()[a.getActiveIndex()]);t&&t.addClass("active")}));if(e.support.transition&&this.$element.hasClass("slide")){this.$element.trigger(f);if(f.isDefaultPrevented())return;i.addClass(t),i[0].offsetWidth,r.addClass(o),i.addClass(o),this.$element.one(e.support.transition.end,function(){i.removeClass([t,o].join(" ")).addClass("active"),r.removeClass(["active",o].join(" ")),a.sliding=!1,setTimeout(function(){a.$element.trigger("slid")},0)})}else{this.$element.trigger(f);if(f.isDefaultPrevented())return;r.removeClass("active"),i.addClass("active"),this.sliding=!1,this.$element.trigger("slid")}return s&&this.cycle(),this}};var n=e.fn.carousel;e.fn.carousel=function(n){return this.each(function(){var r=e(this),i=r.data("carousel"),s=e.extend({},e.fn.carousel.defaults,typeof n=="object"&&n),o=typeof n=="string"?n:s.slide;i||r.data("carousel",i=new t(this,s)),typeof n=="number"?i.to(n):o?i[o]():s.interval&&i.pause().cycle()})},e.fn.carousel.defaults={interval:5e3,pause:"hover"},e.fn.carousel.Constructor=t,e.fn.carousel.noConflict=function(){return e.fn.carousel=n,this},e(document).on("click.carousel.data-api","[data-slide], [data-slide-to]",function(t){var n=e(this),r,i=e(n.attr("data-target")||(r=n.attr("href"))&&r.replace(/.*(?=#[^\s]+$)/,"")),s=e.extend({},i.data(),n.data()),o;i.carousel(s),(o=n.attr("data-slide-to"))&&i.data("carousel").pause().to(o).cycle(),t.preventDefault()})}(window.jQuery),!function(e){"use strict";var t=function(t,n){this.$element=e(t),this.options=e.extend({},e.fn.collapse.defaults,n),this.options.parent&&(this.$parent=e(this.options.parent)),this.options.toggle&&this.toggle()};t.prototype={constructor:t,dimension:function(){var e=this.$element.hasClass("width");return e?"width":"height"},show:function(){var t,n,r,i;if(this.transitioning||this.$element.hasClass("in"))return;t=this.dimension(),n=e.camelCase(["scroll",t].join("-")),r=this.$parent&&this.$parent.find("> .accordion-group > .in");if(r&&r.length){i=r.data("collapse");if(i&&i.transitioning)return;r.collapse("hide"),i||r.data("collapse",null)}this.$element[t](0),this.transition("addClass",e.Event("show"),"shown"),e.support.transition&&this.$element[t](this.$element[0][n])},hide:function(){var t;if(this.transitioning||!this.$element.hasClass("in"))return;t=this.dimension(),this.reset(this.$element[t]()),this.transition("removeClass",e.Event("hide"),"hidden"),this.$element[t](0)},reset:function(e){var t=this.dimension();return this.$element.removeClass("collapse")[t](e||"auto")[0].offsetWidth,this.$element[e!==null?"addClass":"removeClass"]("collapse"),this},transition:function(t,n,r){var i=this,s=function(){n.type=="show"&&i.reset(),i.transitioning=0,i.$element.trigger(r)};this.$element.trigger(n);if(n.isDefaultPrevented())return;this.transitioning=1,this.$element[t]("in"),e.support.transition&&this.$element.hasClass("collapse")?this.$element.one(e.support.transition.end,s):s()},toggle:function(){this[this.$element.hasClass("in")?"hide":"show"]()}};var n=e.fn.collapse;e.fn.collapse=function(n){return this.each(function(){var r=e(this),i=r.data("collapse"),s=e.extend({},e.fn.collapse.defaults,r.data(),typeof n=="object"&&n);i||r.data("collapse",i=new t(this,s)),typeof n=="string"&&i[n]()})},e.fn.collapse.defaults={toggle:!0},e.fn.collapse.Constructor=t,e.fn.collapse.noConflict=function(){return e.fn.collapse=n,this},e(document).on("click.collapse.data-api","[data-toggle=collapse]",function(t){var n=e(this),r,i=n.attr("data-target")||t.preventDefault()||(r=n.attr("href"))&&r.replace(/.*(?=#[^\s]+$)/,""),s=e(i).data("collapse")?"toggle":n.data();n[e(i).hasClass("in")?"addClass":"removeClass"]("collapsed"),e(i).collapse(s)})}(window.jQuery),!function(e){"use strict";function r(){e(t).each(function(){i(e(this)).removeClass("open")})}function i(t){var n=t.attr("data-target"),r;n||(n=t.attr("href"),n=n&&/#/.test(n)&&n.replace(/.*(?=#[^\s]*$)/,"")),r=n&&e(n);if(!r||!r.length)r=t.parent();return r}var t="[data-toggle=dropdown]",n=function(t){var n=e(t).on("click.dropdown.data-api",this.toggle);e("html").on("click.dropdown.data-api",function(){n.parent().removeClass("open")})};n.prototype={constructor:n,toggle:function(t){var n=e(this),s,o;if(n.is(".disabled, :disabled"))return;return s=i(n),o=s.hasClass("open"),r(),o||s.toggleClass("open"),n.focus(),!1},keydown:function(n){var r,s,o,u,a,f;if(!/(38|40|27)/.test(n.keyCode))return;r=e(this),n.preventDefault(),n.stopPropagation();if(r.is(".disabled, :disabled"))return;u=i(r),a=u.hasClass("open");if(!a||a&&n.keyCode==27)return n.which==27&&u.find(t).focus(),r.click();s=e("[role=menu] li:not(.divider):visible a",u);if(!s.length)return;f=s.index(s.filter(":focus")),n.keyCode==38&&f>0&&f--,n.keyCode==40&&f').appendTo(document.body),this.$backdrop.click(this.options.backdrop=="static"?e.proxy(this.$element[0].focus,this.$element[0]):e.proxy(this.hide,this)),i&&this.$backdrop[0].offsetWidth,this.$backdrop.addClass("in");if(!t)return;i?this.$backdrop.one(e.support.transition.end,t):t()}else!this.isShown&&this.$backdrop?(this.$backdrop.removeClass("in"),e.support.transition&&this.$element.hasClass("fade")?this.$backdrop.one(e.support.transition.end,t):t()):t&&t()}};var n=e.fn.modal;e.fn.modal=function(n){return this.each(function(){var r=e(this),i=r.data("modal"),s=e.extend({},e.fn.modal.defaults,r.data(),typeof n=="object"&&n);i||r.data("modal",i=new t(this,s)),typeof n=="string"?i[n]():s.show&&i.show()})},e.fn.modal.defaults={backdrop:!0,keyboard:!0,show:!0},e.fn.modal.Constructor=t,e.fn.modal.noConflict=function(){return e.fn.modal=n,this},e(document).on("click.modal.data-api",'[data-toggle="modal"]',function(t){var n=e(this),r=n.attr("href"),i=e(n.attr("data-target")||r&&r.replace(/.*(?=#[^\s]+$)/,"")),s=i.data("modal")?"toggle":e.extend({remote:!/#/.test(r)&&r},i.data(),n.data());t.preventDefault(),i.modal(s).one("hide",function(){n.focus()})})}(window.jQuery),!function(e){"use strict";var t=function(e,t){this.init("tooltip",e,t)};t.prototype={constructor:t,init:function(t,n,r){var i,s,o,u,a;this.type=t,this.$element=e(n),this.options=this.getOptions(r),this.enabled=!0,o=this.options.trigger.split(" ");for(a=o.length;a--;)u=o[a],u=="click"?this.$element.on("click."+this.type,this.options.selector,e.proxy(this.toggle,this)):u!="manual"&&(i=u=="hover"?"mouseenter":"focus",s=u=="hover"?"mouseleave":"blur",this.$element.on(i+"."+this.type,this.options.selector,e.proxy(this.enter,this)),this.$element.on(s+"."+this.type,this.options.selector,e.proxy(this.leave,this)));this.options.selector?this._options=e.extend({},this.options,{trigger:"manual",selector:""}):this.fixTitle()},getOptions:function(t){return t=e.extend({},e.fn[this.type].defaults,this.$element.data(),t),t.delay&&typeof t.delay=="number"&&(t.delay={show:t.delay,hide:t.delay}),t},enter:function(t){var n=e.fn[this.type].defaults,r={},i;this._options&&e.each(this._options,function(e,t){n[e]!=t&&(r[e]=t)},this),i=e(t.currentTarget)[this.type](r).data(this.type);if(!i.options.delay||!i.options.delay.show)return i.show();clearTimeout(this.timeout),i.hoverState="in",this.timeout=setTimeout(function(){i.hoverState=="in"&&i.show()},i.options.delay.show)},leave:function(t){var n=e(t.currentTarget)[this.type](this._options).data(this.type);this.timeout&&clearTimeout(this.timeout);if(!n.options.delay||!n.options.delay.hide)return n.hide();n.hoverState="out",this.timeout=setTimeout(function(){n.hoverState=="out"&&n.hide()},n.options.delay.hide)},show:function(){var t,n,r,i,s,o,u=e.Event("show");if(this.hasContent()&&this.enabled){this.$element.trigger(u);if(u.isDefaultPrevented())return;t=this.tip(),this.setContent(),this.options.animation&&t.addClass("fade"),s=typeof this.options.placement=="function"?this.options.placement.call(this,t[0],this.$element[0]):this.options.placement,t.detach().css({top:0,left:0,display:"block"}),this.options.container?t.appendTo(this.options.container):t.insertAfter(this.$element),n=this.getPosition(),r=t[0].offsetWidth,i=t[0].offsetHeight;switch(s){case"bottom":o={top:n.top+n.height,left:n.left+n.width/2-r/2};break;case"top":o={top:n.top-i,left:n.left+n.width/2-r/2};break;case"left":o={top:n.top+n.height/2-i/2,left:n.left-r};break;case"right":o={top:n.top+n.height/2-i/2,left:n.left+n.width}}this.applyPlacement(o,s),this.$element.trigger("shown")}},applyPlacement:function(e,t){var n=this.tip(),r=n[0].offsetWidth,i=n[0].offsetHeight,s,o,u,a;n.offset(e).addClass(t).addClass("in"),s=n[0].offsetWidth,o=n[0].offsetHeight,t=="top"&&o!=i&&(e.top=e.top+i-o,a=!0),t=="bottom"||t=="top"?(u=0,e.left<0&&(u=e.left*-2,e.left=0,n.offset(e),s=n[0].offsetWidth,o=n[0].offsetHeight),this.replaceArrow(u-r+s,s,"left")):this.replaceArrow(o-i,o,"top"),a&&n.offset(e)},replaceArrow:function(e,t,n){this.arrow().css(n,e?50*(1-e/t)+"%":"")},setContent:function(){var e=this.tip(),t=this.getTitle();e.find(".tooltip-inner")[this.options.html?"html":"text"](t),e.removeClass("fade in top bottom left right")},hide:function(){function i(){var t=setTimeout(function(){n.off(e.support.transition.end).detach()},500);n.one(e.support.transition.end,function(){clearTimeout(t),n.detach()})}var t=this,n=this.tip(),r=e.Event("hide");this.$element.trigger(r);if(r.isDefaultPrevented())return;return n.removeClass("in"),e.support.transition&&this.$tip.hasClass("fade")?i():n.detach(),this.$element.trigger("hidden"),this},fixTitle:function(){var e=this.$element;(e.attr("title")||typeof e.attr("data-original-title")!="string")&&e.attr("data-original-title",e.attr("title")||"").attr("title","")},hasContent:function(){return this.getTitle()},getPosition:function(){var t=this.$element[0];return e.extend({},typeof t.getBoundingClientRect=="function"?t.getBoundingClientRect():{width:t.offsetWidth,height:t.offsetHeight},this.$element.offset())},getTitle:function(){var e,t=this.$element,n=this.options;return e=t.attr("data-original-title")||(typeof n.title=="function"?n.title.call(t[0]):n.title),e},tip:function(){return this.$tip=this.$tip||e(this.options.template)},arrow:function(){return this.$arrow=this.$arrow||this.tip().find(".tooltip-arrow")},validate:function(){this.$element[0].parentNode||(this.hide(),this.$element=null,this.options=null)},enable:function(){this.enabled=!0},disable:function(){this.enabled=!1},toggleEnabled:function(){this.enabled=!this.enabled},toggle:function(t){var n=t?e(t.currentTarget)[this.type](this._options).data(this.type):this;n.tip().hasClass("in")?n.hide():n.show()},destroy:function(){this.hide().$element.off("."+this.type).removeData(this.type)}};var n=e.fn.tooltip;e.fn.tooltip=function(n){return this.each(function(){var r=e(this),i=r.data("tooltip"),s=typeof n=="object"&&n;i||r.data("tooltip",i=new t(this,s)),typeof n=="string"&&i[n]()})},e.fn.tooltip.Constructor=t,e.fn.tooltip.defaults={animation:!0,placement:"top",selector:!1,template:'
      ',trigger:"hover focus",title:"",delay:0,html:!1,container:!1},e.fn.tooltip.noConflict=function(){return e.fn.tooltip=n,this}}(window.jQuery),!function(e){"use strict";var t=function(e,t){this.init("popover",e,t)};t.prototype=e.extend({},e.fn.tooltip.Constructor.prototype,{constructor:t,setContent:function(){var e=this.tip(),t=this.getTitle(),n=this.getContent();e.find(".popover-title")[this.options.html?"html":"text"](t),e.find(".popover-content")[this.options.html?"html":"text"](n),e.removeClass("fade top bottom left right in")},hasContent:function(){return this.getTitle()||this.getContent()},getContent:function(){var e,t=this.$element,n=this.options;return e=(typeof n.content=="function"?n.content.call(t[0]):n.content)||t.attr("data-content"),e},tip:function(){return this.$tip||(this.$tip=e(this.options.template)),this.$tip},destroy:function(){this.hide().$element.off("."+this.type).removeData(this.type)}});var n=e.fn.popover;e.fn.popover=function(n){return this.each(function(){var r=e(this),i=r.data("popover"),s=typeof n=="object"&&n;i||r.data("popover",i=new t(this,s)),typeof n=="string"&&i[n]()})},e.fn.popover.Constructor=t,e.fn.popover.defaults=e.extend({},e.fn.tooltip.defaults,{placement:"right",trigger:"click",content:"",template:'

      '}),e.fn.popover.noConflict=function(){return e.fn.popover=n,this}}(window.jQuery),!function(e){"use strict";function t(t,n){var r=e.proxy(this.process,this),i=e(t).is("body")?e(window):e(t),s;this.options=e.extend({},e.fn.scrollspy.defaults,n),this.$scrollElement=i.on("scroll.scroll-spy.data-api",r),this.selector=(this.options.target||(s=e(t).attr("href"))&&s.replace(/.*(?=#[^\s]+$)/,"")||"")+" .nav li > a",this.$body=e("body"),this.refresh(),this.process()}t.prototype={constructor:t,refresh:function(){var t=this,n;this.offsets=e([]),this.targets=e([]),n=this.$body.find(this.selector).map(function(){var n=e(this),r=n.data("target")||n.attr("href"),i=/^#\w/.test(r)&&e(r);return i&&i.length&&[[i.position().top+(!e.isWindow(t.$scrollElement.get(0))&&t.$scrollElement.scrollTop()),r]]||null}).sort(function(e,t){return e[0]-t[0]}).each(function(){t.offsets.push(this[0]),t.targets.push(this[1])})},process:function(){var e=this.$scrollElement.scrollTop()+this.options.offset,t=this.$scrollElement[0].scrollHeight||this.$body[0].scrollHeight,n=t-this.$scrollElement.height(),r=this.offsets,i=this.targets,s=this.activeTarget,o;if(e>=n)return s!=(o=i.last()[0])&&this.activate(o);for(o=r.length;o--;)s!=i[o]&&e>=r[o]&&(!r[o+1]||e<=r[o+1])&&this.activate(i[o])},activate:function(t){var n,r;this.activeTarget=t,e(this.selector).parent(".active").removeClass("active"),r=this.selector+'[data-target="'+t+'"],'+this.selector+'[href="'+t+'"]',n=e(r).parent("li").addClass("active"),n.parent(".dropdown-menu").length&&(n=n.closest("li.dropdown").addClass("active")),n.trigger("activate")}};var n=e.fn.scrollspy;e.fn.scrollspy=function(n){return this.each(function(){var r=e(this),i=r.data("scrollspy"),s=typeof n=="object"&&n;i||r.data("scrollspy",i=new t(this,s)),typeof n=="string"&&i[n]()})},e.fn.scrollspy.Constructor=t,e.fn.scrollspy.defaults={offset:10},e.fn.scrollspy.noConflict=function(){return e.fn.scrollspy=n,this},e(window).on("load",function(){e('[data-spy="scroll"]').each(function(){var t=e(this);t.scrollspy(t.data())})})}(window.jQuery),!function(e){"use strict";var t=function(t){this.element=e(t)};t.prototype={constructor:t,show:function(){var t=this.element,n=t.closest("ul:not(.dropdown-menu)"),r=t.attr("data-target"),i,s,o;r||(r=t.attr("href"),r=r&&r.replace(/.*(?=#[^\s]*$)/,""));if(t.parent("li").hasClass("active"))return;i=n.find(".active:last a")[0],o=e.Event("show",{relatedTarget:i}),t.trigger(o);if(o.isDefaultPrevented())return;s=e(r),this.activate(t.parent("li"),n),this.activate(s,s.parent(),function(){t.trigger({type:"shown",relatedTarget:i})})},activate:function(t,n,r){function o(){i.removeClass("active").find("> .dropdown-menu > .active").removeClass("active"),t.addClass("active"),s?(t[0].offsetWidth,t.addClass("in")):t.removeClass("fade"),t.parent(".dropdown-menu")&&t.closest("li.dropdown").addClass("active"),r&&r()}var i=n.find("> .active"),s=r&&e.support.transition&&i.hasClass("fade");s?i.one(e.support.transition.end,o):o(),i.removeClass("in")}};var n=e.fn.tab;e.fn.tab=function(n){return this.each(function(){var r=e(this),i=r.data("tab");i||r.data("tab",i=new t(this)),typeof n=="string"&&i[n]()})},e.fn.tab.Constructor=t,e.fn.tab.noConflict=function(){return e.fn.tab=n,this},e(document).on("click.tab.data-api",'[data-toggle="tab"], [data-toggle="pill"]',function(t){t.preventDefault(),e(this).tab("show")})}(window.jQuery),!function(e){"use strict";var t=function(t,n){this.$element=e(t),this.options=e.extend({},e.fn.typeahead.defaults,n),this.matcher=this.options.matcher||this.matcher,this.sorter=this.options.sorter||this.sorter,this.highlighter=this.options.highlighter||this.highlighter,this.updater=this.options.updater||this.updater,this.source=this.options.source,this.$menu=e(this.options.menu),this.shown=!1,this.listen()};t.prototype={constructor:t,select:function(){var e=this.$menu.find(".active").attr("data-value");return this.$element.val(this.updater(e)).change(),this.hide()},updater:function(e){return e},show:function(){var t=e.extend({},this.$element.position(),{height:this.$element[0].offsetHeight});return this.$menu.insertAfter(this.$element).css({top:t.top+t.height,left:t.left}).show(),this.shown=!0,this},hide:function(){return this.$menu.hide(),this.shown=!1,this},lookup:function(t){var n;return this.query=this.$element.val(),!this.query||this.query.length"+t+""})},render:function(t){var n=this;return t=e(t).map(function(t,r){return t=e(n.options.item).attr("data-value",r),t.find("a").html(n.highlighter(r)),t[0]}),t.first().addClass("active"),this.$menu.html(t),this},next:function(t){var n=this.$menu.find(".active").removeClass("active"),r=n.next();r.length||(r=e(this.$menu.find("li")[0])),r.addClass("active")},prev:function(e){var t=this.$menu.find(".active").removeClass("active"),n=t.prev();n.length||(n=this.$menu.find("li").last()),n.addClass("active")},listen:function(){this.$element.on("focus",e.proxy(this.focus,this)).on("blur",e.proxy(this.blur,this)).on("keypress",e.proxy(this.keypress,this)).on("keyup",e.proxy(this.keyup,this)),this.eventSupported("keydown")&&this.$element.on("keydown",e.proxy(this.keydown,this)),this.$menu.on("click",e.proxy(this.click,this)).on("mouseenter","li",e.proxy(this.mouseenter,this)).on("mouseleave","li",e.proxy(this.mouseleave,this))},eventSupported:function(e){var t=e in this.$element;return t||(this.$element.setAttribute(e,"return;"),t=typeof this.$element[e]=="function"),t},move:function(e){if(!this.shown)return;switch(e.keyCode){case 9:case 13:case 27:e.preventDefault();break;case 38:e.preventDefault(),this.prev();break;case 40:e.preventDefault(),this.next()}e.stopPropagation()},keydown:function(t){this.suppressKeyPressRepeat=~e.inArray(t.keyCode,[40,38,9,13,27]),this.move(t)},keypress:function(e){if(this.suppressKeyPressRepeat)return;this.move(e)},keyup:function(e){switch(e.keyCode){case 40:case 38:case 16:case 17:case 18:break;case 9:case 13:if(!this.shown)return;this.select();break;case 27:if(!this.shown)return;this.hide();break;default:this.lookup()}e.stopPropagation(),e.preventDefault()},focus:function(e){this.focused=!0},blur:function(e){this.focused=!1,!this.mousedover&&this.shown&&this.hide()},click:function(e){e.stopPropagation(),e.preventDefault(),this.select(),this.$element.focus()},mouseenter:function(t){this.mousedover=!0,this.$menu.find(".active").removeClass("active"),e(t.currentTarget).addClass("active")},mouseleave:function(e){this.mousedover=!1,!this.focused&&this.shown&&this.hide()}};var n=e.fn.typeahead;e.fn.typeahead=function(n){return this.each(function(){var r=e(this),i=r.data("typeahead"),s=typeof n=="object"&&n;i||r.data("typeahead",i=new t(this,s)),typeof n=="string"&&i[n]()})},e.fn.typeahead.defaults={source:[],items:8,menu:'',item:'
    • ',minLength:1},e.fn.typeahead.Constructor=t,e.fn.typeahead.noConflict=function(){return e.fn.typeahead=n,this},e(document).on("focus.typeahead.data-api",'[data-provide="typeahead"]',function(t){var n=e(this);if(n.data("typeahead"))return;n.typeahead(n.data())})}(window.jQuery),!function(e){"use strict";var t=function(t,n){this.options=e.extend({},e.fn.affix.defaults,n),this.$window=e(window).on("scroll.affix.data-api",e.proxy(this.checkPosition,this)).on("click.affix.data-api",e.proxy(function(){setTimeout(e.proxy(this.checkPosition,this),1)},this)),this.$element=e(t),this.checkPosition()};t.prototype.checkPosition=function(){if(!this.$element.is(":visible"))return;var t=e(document).height(),n=this.$window.scrollTop(),r=this.$element.offset(),i=this.options.offset,s=i.bottom,o=i.top,u="affix affix-top affix-bottom",a;typeof i!="object"&&(s=o=i),typeof o=="function"&&(o=i.top()),typeof s=="function"&&(s=i.bottom()),a=this.unpin!=null&&n+this.unpin<=r.top?!1:s!=null&&r.top+this.$element.height()>=t-s?"bottom":o!=null&&n<=o?"top":!1;if(this.affixed===a)return;this.affixed=a,this.unpin=a=="bottom"?r.top-n:null,this.$element.removeClass(u).addClass("affix"+(a?"-"+a:""))};var n=e.fn.affix;e.fn.affix=function(n){return this.each(function(){var r=e(this),i=r.data("affix"),s=typeof n=="object"&&n;i||r.data("affix",i=new t(this,s)),typeof n=="string"&&i[n]()})},e.fn.affix.Constructor=t,e.fn.affix.defaults={offset:0},e.fn.affix.noConflict=function(){return e.fn.affix=n,this},e(window).on("load",function(){e('[data-spy="affix"]').each(function(){var t=e(this),n=t.data();n.offset=n.offset||{},n.offsetBottom&&(n.offset.bottom=n.offsetBottom),n.offsetTop&&(n.offset.top=n.offsetTop),t.affix(n)})})}(window.jQuery); \ No newline at end of file diff --git a/rpkid/rpki/gui/app/static/js/jquery-1.8.3.min.js b/rpkid/rpki/gui/app/static/js/jquery-1.8.3.min.js deleted file mode 100644 index 83589daa..00000000 --- a/rpkid/rpki/gui/app/static/js/jquery-1.8.3.min.js +++ /dev/null @@ -1,2 +0,0 @@ -/*! jQuery v1.8.3 jquery.com | jquery.org/license */ -(function(e,t){function _(e){var t=M[e]={};return v.each(e.split(y),function(e,n){t[n]=!0}),t}function H(e,n,r){if(r===t&&e.nodeType===1){var i="data-"+n.replace(P,"-$1").toLowerCase();r=e.getAttribute(i);if(typeof r=="string"){try{r=r==="true"?!0:r==="false"?!1:r==="null"?null:+r+""===r?+r:D.test(r)?v.parseJSON(r):r}catch(s){}v.data(e,n,r)}else r=t}return r}function B(e){var t;for(t in e){if(t==="data"&&v.isEmptyObject(e[t]))continue;if(t!=="toJSON")return!1}return!0}function et(){return!1}function tt(){return!0}function ut(e){return!e||!e.parentNode||e.parentNode.nodeType===11}function at(e,t){do e=e[t];while(e&&e.nodeType!==1);return e}function ft(e,t,n){t=t||0;if(v.isFunction(t))return v.grep(e,function(e,r){var i=!!t.call(e,r,e);return i===n});if(t.nodeType)return v.grep(e,function(e,r){return e===t===n});if(typeof t=="string"){var r=v.grep(e,function(e){return e.nodeType===1});if(it.test(t))return v.filter(t,r,!n);t=v.filter(t,r)}return v.grep(e,function(e,r){return v.inArray(e,t)>=0===n})}function lt(e){var t=ct.split("|"),n=e.createDocumentFragment();if(n.createElement)while(t.length)n.createElement(t.pop());return n}function Lt(e,t){return e.getElementsByTagName(t)[0]||e.appendChild(e.ownerDocument.createElement(t))}function At(e,t){if(t.nodeType!==1||!v.hasData(e))return;var n,r,i,s=v._data(e),o=v._data(t,s),u=s.events;if(u){delete o.handle,o.events={};for(n in u)for(r=0,i=u[n].length;r").appendTo(i.body),n=t.css("display");t.remove();if(n==="none"||n===""){Pt=i.body.appendChild(Pt||v.extend(i.createElement("iframe"),{frameBorder:0,width:0,height:0}));if(!Ht||!Pt.createElement)Ht=(Pt.contentWindow||Pt.contentDocument).document,Ht.write(""),Ht.close();t=Ht.body.appendChild(Ht.createElement(e)),n=Dt(t,"display"),i.body.removeChild(Pt)}return Wt[e]=n,n}function fn(e,t,n,r){var i;if(v.isArray(t))v.each(t,function(t,i){n||sn.test(e)?r(e,i):fn(e+"["+(typeof i=="object"?t:"")+"]",i,n,r)});else if(!n&&v.type(t)==="object")for(i in t)fn(e+"["+i+"]",t[i],n,r);else r(e,t)}function Cn(e){return function(t,n){typeof t!="string"&&(n=t,t="*");var r,i,s,o=t.toLowerCase().split(y),u=0,a=o.length;if(v.isFunction(n))for(;u)[^>]*$|#([\w\-]*)$)/,E=/^<(\w+)\s*\/?>(?:<\/\1>|)$/,S=/^[\],:{}\s]*$/,x=/(?:^|:|,)(?:\s*\[)+/g,T=/\\(?:["\\\/bfnrt]|u[\da-fA-F]{4})/g,N=/"[^"\\\r\n]*"|true|false|null|-?(?:\d\d*\.|)\d+(?:[eE][\-+]?\d+|)/g,C=/^-ms-/,k=/-([\da-z])/gi,L=function(e,t){return(t+"").toUpperCase()},A=function(){i.addEventListener?(i.removeEventListener("DOMContentLoaded",A,!1),v.ready()):i.readyState==="complete"&&(i.detachEvent("onreadystatechange",A),v.ready())},O={};v.fn=v.prototype={constructor:v,init:function(e,n,r){var s,o,u,a;if(!e)return this;if(e.nodeType)return this.context=this[0]=e,this.length=1,this;if(typeof e=="string"){e.charAt(0)==="<"&&e.charAt(e.length-1)===">"&&e.length>=3?s=[null,e,null]:s=w.exec(e);if(s&&(s[1]||!n)){if(s[1])return n=n instanceof v?n[0]:n,a=n&&n.nodeType?n.ownerDocument||n:i,e=v.parseHTML(s[1],a,!0),E.test(s[1])&&v.isPlainObject(n)&&this.attr.call(e,n,!0),v.merge(this,e);o=i.getElementById(s[2]);if(o&&o.parentNode){if(o.id!==s[2])return r.find(e);this.length=1,this[0]=o}return this.context=i,this.selector=e,this}return!n||n.jquery?(n||r).find(e):this.constructor(n).find(e)}return v.isFunction(e)?r.ready(e):(e.selector!==t&&(this.selector=e.selector,this.context=e.context),v.makeArray(e,this))},selector:"",jquery:"1.8.3",length:0,size:function(){return this.length},toArray:function(){return l.call(this)},get:function(e){return e==null?this.toArray():e<0?this[this.length+e]:this[e]},pushStack:function(e,t,n){var r=v.merge(this.constructor(),e);return r.prevObject=this,r.context=this.context,t==="find"?r.selector=this.selector+(this.selector?" ":"")+n:t&&(r.selector=this.selector+"."+t+"("+n+")"),r},each:function(e,t){return v.each(this,e,t)},ready:function(e){return v.ready.promise().done(e),this},eq:function(e){return e=+e,e===-1?this.slice(e):this.slice(e,e+1)},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},slice:function(){return this.pushStack(l.apply(this,arguments),"slice",l.call(arguments).join(","))},map:function(e){return this.pushStack(v.map(this,function(t,n){return e.call(t,n,t)}))},end:function(){return this.prevObject||this.constructor(null)},push:f,sort:[].sort,splice:[].splice},v.fn.init.prototype=v.fn,v.extend=v.fn.extend=function(){var e,n,r,i,s,o,u=arguments[0]||{},a=1,f=arguments.length,l=!1;typeof u=="boolean"&&(l=u,u=arguments[1]||{},a=2),typeof u!="object"&&!v.isFunction(u)&&(u={}),f===a&&(u=this,--a);for(;a0)return;r.resolveWith(i,[v]),v.fn.trigger&&v(i).trigger("ready").off("ready")},isFunction:function(e){return v.type(e)==="function"},isArray:Array.isArray||function(e){return v.type(e)==="array"},isWindow:function(e){return e!=null&&e==e.window},isNumeric:function(e){return!isNaN(parseFloat(e))&&isFinite(e)},type:function(e){return e==null?String(e):O[h.call(e)]||"object"},isPlainObject:function(e){if(!e||v.type(e)!=="object"||e.nodeType||v.isWindow(e))return!1;try{if(e.constructor&&!p.call(e,"constructor")&&!p.call(e.constructor.prototype,"isPrototypeOf"))return!1}catch(n){return!1}var r;for(r in e);return r===t||p.call(e,r)},isEmptyObject:function(e){var t;for(t in e)return!1;return!0},error:function(e){throw new Error(e)},parseHTML:function(e,t,n){var r;return!e||typeof e!="string"?null:(typeof t=="boolean"&&(n=t,t=0),t=t||i,(r=E.exec(e))?[t.createElement(r[1])]:(r=v.buildFragment([e],t,n?null:[]),v.merge([],(r.cacheable?v.clone(r.fragment):r.fragment).childNodes)))},parseJSON:function(t){if(!t||typeof t!="string")return null;t=v.trim(t);if(e.JSON&&e.JSON.parse)return e.JSON.parse(t);if(S.test(t.replace(T,"@").replace(N,"]").replace(x,"")))return(new Function("return "+t))();v.error("Invalid JSON: "+t)},parseXML:function(n){var r,i;if(!n||typeof n!="string")return null;try{e.DOMParser?(i=new DOMParser,r=i.parseFromString(n,"text/xml")):(r=new ActiveXObject("Microsoft.XMLDOM"),r.async="false",r.loadXML(n))}catch(s){r=t}return(!r||!r.documentElement||r.getElementsByTagName("parsererror").length)&&v.error("Invalid XML: "+n),r},noop:function(){},globalEval:function(t){t&&g.test(t)&&(e.execScript||function(t){e.eval.call(e,t)})(t)},camelCase:function(e){return e.replace(C,"ms-").replace(k,L)},nodeName:function(e,t){return e.nodeName&&e.nodeName.toLowerCase()===t.toLowerCase()},each:function(e,n,r){var i,s=0,o=e.length,u=o===t||v.isFunction(e);if(r){if(u){for(i in e)if(n.apply(e[i],r)===!1)break}else for(;s0&&e[0]&&e[a-1]||a===0||v.isArray(e));if(f)for(;u-1)a.splice(n,1),i&&(n<=o&&o--,n<=u&&u--)}),this},has:function(e){return v.inArray(e,a)>-1},empty:function(){return a=[],this},disable:function(){return a=f=n=t,this},disabled:function(){return!a},lock:function(){return f=t,n||c.disable(),this},locked:function(){return!f},fireWith:function(e,t){return t=t||[],t=[e,t.slice?t.slice():t],a&&(!r||f)&&(i?f.push(t):l(t)),this},fire:function(){return c.fireWith(this,arguments),this},fired:function(){return!!r}};return c},v.extend({Deferred:function(e){var t=[["resolve","done",v.Callbacks("once memory"),"resolved"],["reject","fail",v.Callbacks("once memory"),"rejected"],["notify","progress",v.Callbacks("memory")]],n="pending",r={state:function(){return n},always:function(){return i.done(arguments).fail(arguments),this},then:function(){var e=arguments;return v.Deferred(function(n){v.each(t,function(t,r){var s=r[0],o=e[t];i[r[1]](v.isFunction(o)?function(){var e=o.apply(this,arguments);e&&v.isFunction(e.promise)?e.promise().done(n.resolve).fail(n.reject).progress(n.notify):n[s+"With"](this===i?n:this,[e])}:n[s])}),e=null}).promise()},promise:function(e){return e!=null?v.extend(e,r):r}},i={};return r.pipe=r.then,v.each(t,function(e,s){var o=s[2],u=s[3];r[s[1]]=o.add,u&&o.add(function(){n=u},t[e^1][2].disable,t[2][2].lock),i[s[0]]=o.fire,i[s[0]+"With"]=o.fireWith}),r.promise(i),e&&e.call(i,i),i},when:function(e){var t=0,n=l.call(arguments),r=n.length,i=r!==1||e&&v.isFunction(e.promise)?r:0,s=i===1?e:v.Deferred(),o=function(e,t,n){return function(r){t[e]=this,n[e]=arguments.length>1?l.call(arguments):r,n===u?s.notifyWith(t,n):--i||s.resolveWith(t,n)}},u,a,f;if(r>1){u=new Array(r),a=new Array(r),f=new Array(r);for(;t
      a",n=p.getElementsByTagName("*"),r=p.getElementsByTagName("a")[0];if(!n||!r||!n.length)return{};s=i.createElement("select"),o=s.appendChild(i.createElement("option")),u=p.getElementsByTagName("input")[0],r.style.cssText="top:1px;float:left;opacity:.5",t={leadingWhitespace:p.firstChild.nodeType===3,tbody:!p.getElementsByTagName("tbody").length,htmlSerialize:!!p.getElementsByTagName("link").length,style:/top/.test(r.getAttribute("style")),hrefNormalized:r.getAttribute("href")==="/a",opacity:/^0.5/.test(r.style.opacity),cssFloat:!!r.style.cssFloat,checkOn:u.value==="on",optSelected:o.selected,getSetAttribute:p.className!=="t",enctype:!!i.createElement("form").enctype,html5Clone:i.createElement("nav").cloneNode(!0).outerHTML!=="<:nav>",boxModel:i.compatMode==="CSS1Compat",submitBubbles:!0,changeBubbles:!0,focusinBubbles:!1,deleteExpando:!0,noCloneEvent:!0,inlineBlockNeedsLayout:!1,shrinkWrapBlocks:!1,reliableMarginRight:!0,boxSizingReliable:!0,pixelPosition:!1},u.checked=!0,t.noCloneChecked=u.cloneNode(!0).checked,s.disabled=!0,t.optDisabled=!o.disabled;try{delete p.test}catch(d){t.deleteExpando=!1}!p.addEventListener&&p.attachEvent&&p.fireEvent&&(p.attachEvent("onclick",h=function(){t.noCloneEvent=!1}),p.cloneNode(!0).fireEvent("onclick"),p.detachEvent("onclick",h)),u=i.createElement("input"),u.value="t",u.setAttribute("type","radio"),t.radioValue=u.value==="t",u.setAttribute("checked","checked"),u.setAttribute("name","t"),p.appendChild(u),a=i.createDocumentFragment(),a.appendChild(p.lastChild),t.checkClone=a.cloneNode(!0).cloneNode(!0).lastChild.checked,t.appendChecked=u.checked,a.removeChild(u),a.appendChild(p);if(p.attachEvent)for(l in{submit:!0,change:!0,focusin:!0})f="on"+l,c=f in p,c||(p.setAttribute(f,"return;"),c=typeof p[f]=="function"),t[l+"Bubbles"]=c;return v(function(){var n,r,s,o,u="padding:0;margin:0;border:0;display:block;overflow:hidden;",a=i.getElementsByTagName("body")[0];if(!a)return;n=i.createElement("div"),n.style.cssText="visibility:hidden;border:0;width:0;height:0;position:static;top:0;margin-top:1px",a.insertBefore(n,a.firstChild),r=i.createElement("div"),n.appendChild(r),r.innerHTML="
      t
      ",s=r.getElementsByTagName("td"),s[0].style.cssText="padding:0;margin:0;border:0;display:none",c=s[0].offsetHeight===0,s[0].style.display="",s[1].style.display="none",t.reliableHiddenOffsets=c&&s[0].offsetHeight===0,r.innerHTML="",r.style.cssText="box-sizing:border-box;-moz-box-sizing:border-box;-webkit-box-sizing:border-box;padding:1px;border:1px;display:block;width:4px;margin-top:1%;position:absolute;top:1%;",t.boxSizing=r.offsetWidth===4,t.doesNotIncludeMarginInBodyOffset=a.offsetTop!==1,e.getComputedStyle&&(t.pixelPosition=(e.getComputedStyle(r,null)||{}).top!=="1%",t.boxSizingReliable=(e.getComputedStyle(r,null)||{width:"4px"}).width==="4px",o=i.createElement("div"),o.style.cssText=r.style.cssText=u,o.style.marginRight=o.style.width="0",r.style.width="1px",r.appendChild(o),t.reliableMarginRight=!parseFloat((e.getComputedStyle(o,null)||{}).marginRight)),typeof r.style.zoom!="undefined"&&(r.innerHTML="",r.style.cssText=u+"width:1px;padding:1px;display:inline;zoom:1",t.inlineBlockNeedsLayout=r.offsetWidth===3,r.style.display="block",r.style.overflow="visible",r.innerHTML="
      ",r.firstChild.style.width="5px",t.shrinkWrapBlocks=r.offsetWidth!==3,n.style.zoom=1),a.removeChild(n),n=r=s=o=null}),a.removeChild(p),n=r=s=o=u=a=p=null,t}();var D=/(?:\{[\s\S]*\}|\[[\s\S]*\])$/,P=/([A-Z])/g;v.extend({cache:{},deletedIds:[],uuid:0,expando:"jQuery"+(v.fn.jquery+Math.random()).replace(/\D/g,""),noData:{embed:!0,object:"clsid:D27CDB6E-AE6D-11cf-96B8-444553540000",applet:!0},hasData:function(e){return e=e.nodeType?v.cache[e[v.expando]]:e[v.expando],!!e&&!B(e)},data:function(e,n,r,i){if(!v.acceptData(e))return;var s,o,u=v.expando,a=typeof n=="string",f=e.nodeType,l=f?v.cache:e,c=f?e[u]:e[u]&&u;if((!c||!l[c]||!i&&!l[c].data)&&a&&r===t)return;c||(f?e[u]=c=v.deletedIds.pop()||v.guid++:c=u),l[c]||(l[c]={},f||(l[c].toJSON=v.noop));if(typeof n=="object"||typeof n=="function")i?l[c]=v.extend(l[c],n):l[c].data=v.extend(l[c].data,n);return s=l[c],i||(s.data||(s.data={}),s=s.data),r!==t&&(s[v.camelCase(n)]=r),a?(o=s[n],o==null&&(o=s[v.camelCase(n)])):o=s,o},removeData:function(e,t,n){if(!v.acceptData(e))return;var r,i,s,o=e.nodeType,u=o?v.cache:e,a=o?e[v.expando]:v.expando;if(!u[a])return;if(t){r=n?u[a]:u[a].data;if(r){v.isArray(t)||(t in r?t=[t]:(t=v.camelCase(t),t in r?t=[t]:t=t.split(" ")));for(i=0,s=t.length;i1,null,!1))},removeData:function(e){return this.each(function(){v.removeData(this,e)})}}),v.extend({queue:function(e,t,n){var r;if(e)return t=(t||"fx")+"queue",r=v._data(e,t),n&&(!r||v.isArray(n)?r=v._data(e,t,v.makeArray(n)):r.push(n)),r||[]},dequeue:function(e,t){t=t||"fx";var n=v.queue(e,t),r=n.length,i=n.shift(),s=v._queueHooks(e,t),o=function(){v.dequeue(e,t)};i==="inprogress"&&(i=n.shift(),r--),i&&(t==="fx"&&n.unshift("inprogress"),delete s.stop,i.call(e,o,s)),!r&&s&&s.empty.fire()},_queueHooks:function(e,t){var n=t+"queueHooks";return v._data(e,n)||v._data(e,n,{empty:v.Callbacks("once memory").add(function(){v.removeData(e,t+"queue",!0),v.removeData(e,n,!0)})})}}),v.fn.extend({queue:function(e,n){var r=2;return typeof e!="string"&&(n=e,e="fx",r--),arguments.length1)},removeAttr:function(e){return this.each(function(){v.removeAttr(this,e)})},prop:function(e,t){return v.access(this,v.prop,e,t,arguments.length>1)},removeProp:function(e){return e=v.propFix[e]||e,this.each(function(){try{this[e]=t,delete this[e]}catch(n){}})},addClass:function(e){var t,n,r,i,s,o,u;if(v.isFunction(e))return this.each(function(t){v(this).addClass(e.call(this,t,this.className))});if(e&&typeof e=="string"){t=e.split(y);for(n=0,r=this.length;n=0)r=r.replace(" "+n[s]+" "," ");i.className=e?v.trim(r):""}}}return this},toggleClass:function(e,t){var n=typeof e,r=typeof t=="boolean";return v.isFunction(e)?this.each(function(n){v(this).toggleClass(e.call(this,n,this.className,t),t)}):this.each(function(){if(n==="string"){var i,s=0,o=v(this),u=t,a=e.split(y);while(i=a[s++])u=r?u:!o.hasClass(i),o[u?"addClass":"removeClass"](i)}else if(n==="undefined"||n==="boolean")this.className&&v._data(this,"__className__",this.className),this.className=this.className||e===!1?"":v._data(this,"__className__")||""})},hasClass:function(e){var t=" "+e+" ",n=0,r=this.length;for(;n=0)return!0;return!1},val:function(e){var n,r,i,s=this[0];if(!arguments.length){if(s)return n=v.valHooks[s.type]||v.valHooks[s.nodeName.toLowerCase()],n&&"get"in n&&(r=n.get(s,"value"))!==t?r:(r=s.value,typeof r=="string"?r.replace(R,""):r==null?"":r);return}return i=v.isFunction(e),this.each(function(r){var s,o=v(this);if(this.nodeType!==1)return;i?s=e.call(this,r,o.val()):s=e,s==null?s="":typeof s=="number"?s+="":v.isArray(s)&&(s=v.map(s,function(e){return e==null?"":e+""})),n=v.valHooks[this.type]||v.valHooks[this.nodeName.toLowerCase()];if(!n||!("set"in n)||n.set(this,s,"value")===t)this.value=s})}}),v.extend({valHooks:{option:{get:function(e){var t=e.attributes.value;return!t||t.specified?e.value:e.text}},select:{get:function(e){var t,n,r=e.options,i=e.selectedIndex,s=e.type==="select-one"||i<0,o=s?null:[],u=s?i+1:r.length,a=i<0?u:s?i:0;for(;a=0}),n.length||(e.selectedIndex=-1),n}}},attrFn:{},attr:function(e,n,r,i){var s,o,u,a=e.nodeType;if(!e||a===3||a===8||a===2)return;if(i&&v.isFunction(v.fn[n]))return v(e)[n](r);if(typeof e.getAttribute=="undefined")return v.prop(e,n,r);u=a!==1||!v.isXMLDoc(e),u&&(n=n.toLowerCase(),o=v.attrHooks[n]||(X.test(n)?F:j));if(r!==t){if(r===null){v.removeAttr(e,n);return}return o&&"set"in o&&u&&(s=o.set(e,r,n))!==t?s:(e.setAttribute(n,r+""),r)}return o&&"get"in o&&u&&(s=o.get(e,n))!==null?s:(s=e.getAttribute(n),s===null?t:s)},removeAttr:function(e,t){var n,r,i,s,o=0;if(t&&e.nodeType===1){r=t.split(y);for(;o=0}})});var $=/^(?:textarea|input|select)$/i,J=/^([^\.]*|)(?:\.(.+)|)$/,K=/(?:^|\s)hover(\.\S+|)\b/,Q=/^key/,G=/^(?:mouse|contextmenu)|click/,Y=/^(?:focusinfocus|focusoutblur)$/,Z=function(e){return v.event.special.hover?e:e.replace(K,"mouseenter$1 mouseleave$1")};v.event={add:function(e,n,r,i,s){var o,u,a,f,l,c,h,p,d,m,g;if(e.nodeType===3||e.nodeType===8||!n||!r||!(o=v._data(e)))return;r.handler&&(d=r,r=d.handler,s=d.selector),r.guid||(r.guid=v.guid++),a=o.events,a||(o.events=a={}),u=o.handle,u||(o.handle=u=function(e){return typeof v=="undefined"||!!e&&v.event.triggered===e.type?t:v.event.dispatch.apply(u.elem,arguments)},u.elem=e),n=v.trim(Z(n)).split(" ");for(f=0;f=0&&(y=y.slice(0,-1),a=!0),y.indexOf(".")>=0&&(b=y.split("."),y=b.shift(),b.sort());if((!s||v.event.customEvent[y])&&!v.event.global[y])return;n=typeof n=="object"?n[v.expando]?n:new v.Event(y,n):new v.Event(y),n.type=y,n.isTrigger=!0,n.exclusive=a,n.namespace=b.join("."),n.namespace_re=n.namespace?new RegExp("(^|\\.)"+b.join("\\.(?:.*\\.|)")+"(\\.|$)"):null,h=y.indexOf(":")<0?"on"+y:"";if(!s){u=v.cache;for(f in u)u[f].events&&u[f].events[y]&&v.event.trigger(n,r,u[f].handle.elem,!0);return}n.result=t,n.target||(n.target=s),r=r!=null?v.makeArray(r):[],r.unshift(n),p=v.event.special[y]||{};if(p.trigger&&p.trigger.apply(s,r)===!1)return;m=[[s,p.bindType||y]];if(!o&&!p.noBubble&&!v.isWindow(s)){g=p.delegateType||y,l=Y.test(g+y)?s:s.parentNode;for(c=s;l;l=l.parentNode)m.push([l,g]),c=l;c===(s.ownerDocument||i)&&m.push([c.defaultView||c.parentWindow||e,g])}for(f=0;f=0:v.find(h,this,null,[s]).length),u[h]&&f.push(c);f.length&&w.push({elem:s,matches:f})}d.length>m&&w.push({elem:this,matches:d.slice(m)});for(r=0;r0?this.on(t,null,e,n):this.trigger(t)},Q.test(t)&&(v.event.fixHooks[t]=v.event.keyHooks),G.test(t)&&(v.event.fixHooks[t]=v.event.mouseHooks)}),function(e,t){function nt(e,t,n,r){n=n||[],t=t||g;var i,s,a,f,l=t.nodeType;if(!e||typeof e!="string")return n;if(l!==1&&l!==9)return[];a=o(t);if(!a&&!r)if(i=R.exec(e))if(f=i[1]){if(l===9){s=t.getElementById(f);if(!s||!s.parentNode)return n;if(s.id===f)return n.push(s),n}else if(t.ownerDocument&&(s=t.ownerDocument.getElementById(f))&&u(t,s)&&s.id===f)return n.push(s),n}else{if(i[2])return S.apply(n,x.call(t.getElementsByTagName(e),0)),n;if((f=i[3])&&Z&&t.getElementsByClassName)return S.apply(n,x.call(t.getElementsByClassName(f),0)),n}return vt(e.replace(j,"$1"),t,n,r,a)}function rt(e){return function(t){var n=t.nodeName.toLowerCase();return n==="input"&&t.type===e}}function it(e){return function(t){var n=t.nodeName.toLowerCase();return(n==="input"||n==="button")&&t.type===e}}function st(e){return N(function(t){return t=+t,N(function(n,r){var i,s=e([],n.length,t),o=s.length;while(o--)n[i=s[o]]&&(n[i]=!(r[i]=n[i]))})})}function ot(e,t,n){if(e===t)return n;var r=e.nextSibling;while(r){if(r===t)return-1;r=r.nextSibling}return 1}function ut(e,t){var n,r,s,o,u,a,f,l=L[d][e+" "];if(l)return t?0:l.slice(0);u=e,a=[],f=i.preFilter;while(u){if(!n||(r=F.exec(u)))r&&(u=u.slice(r[0].length)||u),a.push(s=[]);n=!1;if(r=I.exec(u))s.push(n=new m(r.shift())),u=u.slice(n.length),n.type=r[0].replace(j," ");for(o in i.filter)(r=J[o].exec(u))&&(!f[o]||(r=f[o](r)))&&(s.push(n=new m(r.shift())),u=u.slice(n.length),n.type=o,n.matches=r);if(!n)break}return t?u.length:u?nt.error(e):L(e,a).slice(0)}function at(e,t,r){var i=t.dir,s=r&&t.dir==="parentNode",o=w++;return t.first?function(t,n,r){while(t=t[i])if(s||t.nodeType===1)return e(t,n,r)}:function(t,r,u){if(!u){var a,f=b+" "+o+" ",l=f+n;while(t=t[i])if(s||t.nodeType===1){if((a=t[d])===l)return t.sizset;if(typeof a=="string"&&a.indexOf(f)===0){if(t.sizset)return t}else{t[d]=l;if(e(t,r,u))return t.sizset=!0,t;t.sizset=!1}}}else while(t=t[i])if(s||t.nodeType===1)if(e(t,r,u))return t}}function ft(e){return e.length>1?function(t,n,r){var i=e.length;while(i--)if(!e[i](t,n,r))return!1;return!0}:e[0]}function lt(e,t,n,r,i){var s,o=[],u=0,a=e.length,f=t!=null;for(;u-1&&(s[f]=!(o[f]=c))}}else g=lt(g===o?g.splice(d,g.length):g),i?i(null,o,g,a):S.apply(o,g)})}function ht(e){var t,n,r,s=e.length,o=i.relative[e[0].type],u=o||i.relative[" "],a=o?1:0,f=at(function(e){return e===t},u,!0),l=at(function(e){return T.call(t,e)>-1},u,!0),h=[function(e,n,r){return!o&&(r||n!==c)||((t=n).nodeType?f(e,n,r):l(e,n,r))}];for(;a1&&ft(h),a>1&&e.slice(0,a-1).join("").replace(j,"$1"),n,a0,s=e.length>0,o=function(u,a,f,l,h){var p,d,v,m=[],y=0,w="0",x=u&&[],T=h!=null,N=c,C=u||s&&i.find.TAG("*",h&&a.parentNode||a),k=b+=N==null?1:Math.E;T&&(c=a!==g&&a,n=o.el);for(;(p=C[w])!=null;w++){if(s&&p){for(d=0;v=e[d];d++)if(v(p,a,f)){l.push(p);break}T&&(b=k,n=++o.el)}r&&((p=!v&&p)&&y--,u&&x.push(p))}y+=w;if(r&&w!==y){for(d=0;v=t[d];d++)v(x,m,a,f);if(u){if(y>0)while(w--)!x[w]&&!m[w]&&(m[w]=E.call(l));m=lt(m)}S.apply(l,m),T&&!u&&m.length>0&&y+t.length>1&&nt.uniqueSort(l)}return T&&(b=k,c=N),x};return o.el=0,r?N(o):o}function dt(e,t,n){var r=0,i=t.length;for(;r2&&(f=u[0]).type==="ID"&&t.nodeType===9&&!s&&i.relative[u[1].type]){t=i.find.ID(f.matches[0].replace($,""),t,s)[0];if(!t)return n;e=e.slice(u.shift().length)}for(o=J.POS.test(e)?-1:u.length-1;o>=0;o--){f=u[o];if(i.relative[l=f.type])break;if(c=i.find[l])if(r=c(f.matches[0].replace($,""),z.test(u[0].type)&&t.parentNode||t,s)){u.splice(o,1),e=r.length&&u.join("");if(!e)return S.apply(n,x.call(r,0)),n;break}}}return a(e,h)(r,t,s,n,z.test(e)),n}function mt(){}var n,r,i,s,o,u,a,f,l,c,h=!0,p="undefined",d=("sizcache"+Math.random()).replace(".",""),m=String,g=e.document,y=g.documentElement,b=0,w=0,E=[].pop,S=[].push,x=[].slice,T=[].indexOf||function(e){var t=0,n=this.length;for(;ti.cacheLength&&delete e[t.shift()],e[n+" "]=r},e)},k=C(),L=C(),A=C(),O="[\\x20\\t\\r\\n\\f]",M="(?:\\\\.|[-\\w]|[^\\x00-\\xa0])+",_=M.replace("w","w#"),D="([*^$|!~]?=)",P="\\["+O+"*("+M+")"+O+"*(?:"+D+O+"*(?:(['\"])((?:\\\\.|[^\\\\])*?)\\3|("+_+")|)|)"+O+"*\\]",H=":("+M+")(?:\\((?:(['\"])((?:\\\\.|[^\\\\])*?)\\2|([^()[\\]]*|(?:(?:"+P+")|[^:]|\\\\.)*|.*))\\)|)",B=":(even|odd|eq|gt|lt|nth|first|last)(?:\\("+O+"*((?:-\\d)?\\d*)"+O+"*\\)|)(?=[^-]|$)",j=new RegExp("^"+O+"+|((?:^|[^\\\\])(?:\\\\.)*)"+O+"+$","g"),F=new RegExp("^"+O+"*,"+O+"*"),I=new RegExp("^"+O+"*([\\x20\\t\\r\\n\\f>+~])"+O+"*"),q=new RegExp(H),R=/^(?:#([\w\-]+)|(\w+)|\.([\w\-]+))$/,U=/^:not/,z=/[\x20\t\r\n\f]*[+~]/,W=/:not\($/,X=/h\d/i,V=/input|select|textarea|button/i,$=/\\(?!\\)/g,J={ID:new RegExp("^#("+M+")"),CLASS:new RegExp("^\\.("+M+")"),NAME:new RegExp("^\\[name=['\"]?("+M+")['\"]?\\]"),TAG:new RegExp("^("+M.replace("w","w*")+")"),ATTR:new RegExp("^"+P),PSEUDO:new RegExp("^"+H),POS:new RegExp(B,"i"),CHILD:new RegExp("^:(only|nth|first|last)-child(?:\\("+O+"*(even|odd|(([+-]|)(\\d*)n|)"+O+"*(?:([+-]|)"+O+"*(\\d+)|))"+O+"*\\)|)","i"),needsContext:new RegExp("^"+O+"*[>+~]|"+B,"i")},K=function(e){var t=g.createElement("div");try{return e(t)}catch(n){return!1}finally{t=null}},Q=K(function(e){return e.appendChild(g.createComment("")),!e.getElementsByTagName("*").length}),G=K(function(e){return e.innerHTML="",e.firstChild&&typeof e.firstChild.getAttribute!==p&&e.firstChild.getAttribute("href")==="#"}),Y=K(function(e){e.innerHTML="";var t=typeof e.lastChild.getAttribute("multiple");return t!=="boolean"&&t!=="string"}),Z=K(function(e){return e.innerHTML="",!e.getElementsByClassName||!e.getElementsByClassName("e").length?!1:(e.lastChild.className="e",e.getElementsByClassName("e").length===2)}),et=K(function(e){e.id=d+0,e.innerHTML="
      ",y.insertBefore(e,y.firstChild);var t=g.getElementsByName&&g.getElementsByName(d).length===2+g.getElementsByName(d+0).length;return r=!g.getElementById(d),y.removeChild(e),t});try{x.call(y.childNodes,0)[0].nodeType}catch(tt){x=function(e){var t,n=[];for(;t=this[e];e++)n.push(t);return n}}nt.matches=function(e,t){return nt(e,null,null,t)},nt.matchesSelector=function(e,t){return nt(t,null,null,[e]).length>0},s=nt.getText=function(e){var t,n="",r=0,i=e.nodeType;if(i){if(i===1||i===9||i===11){if(typeof e.textContent=="string")return e.textContent;for(e=e.firstChild;e;e=e.nextSibling)n+=s(e)}else if(i===3||i===4)return e.nodeValue}else for(;t=e[r];r++)n+=s(t);return n},o=nt.isXML=function(e){var t=e&&(e.ownerDocument||e).documentElement;return t?t.nodeName!=="HTML":!1},u=nt.contains=y.contains?function(e,t){var n=e.nodeType===9?e.documentElement:e,r=t&&t.parentNode;return e===r||!!(r&&r.nodeType===1&&n.contains&&n.contains(r))}:y.compareDocumentPosition?function(e,t){return t&&!!(e.compareDocumentPosition(t)&16)}:function(e,t){while(t=t.parentNode)if(t===e)return!0;return!1},nt.attr=function(e,t){var n,r=o(e);return r||(t=t.toLowerCase()),(n=i.attrHandle[t])?n(e):r||Y?e.getAttribute(t):(n=e.getAttributeNode(t),n?typeof e[t]=="boolean"?e[t]?t:null:n.specified?n.value:null:null)},i=nt.selectors={cacheLength:50,createPseudo:N,match:J,attrHandle:G?{}:{href:function(e){return e.getAttribute("href",2)},type:function(e){return e.getAttribute("type")}},find:{ID:r?function(e,t,n){if(typeof t.getElementById!==p&&!n){var r=t.getElementById(e);return r&&r.parentNode?[r]:[]}}:function(e,n,r){if(typeof n.getElementById!==p&&!r){var i=n.getElementById(e);return i?i.id===e||typeof i.getAttributeNode!==p&&i.getAttributeNode("id").value===e?[i]:t:[]}},TAG:Q?function(e,t){if(typeof t.getElementsByTagName!==p)return t.getElementsByTagName(e)}:function(e,t){var n=t.getElementsByTagName(e);if(e==="*"){var r,i=[],s=0;for(;r=n[s];s++)r.nodeType===1&&i.push(r);return i}return n},NAME:et&&function(e,t){if(typeof t.getElementsByName!==p)return t.getElementsByName(name)},CLASS:Z&&function(e,t,n){if(typeof t.getElementsByClassName!==p&&!n)return t.getElementsByClassName(e)}},relative:{">":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(e){return e[1]=e[1].replace($,""),e[3]=(e[4]||e[5]||"").replace($,""),e[2]==="~="&&(e[3]=" "+e[3]+" "),e.slice(0,4)},CHILD:function(e){return e[1]=e[1].toLowerCase(),e[1]==="nth"?(e[2]||nt.error(e[0]),e[3]=+(e[3]?e[4]+(e[5]||1):2*(e[2]==="even"||e[2]==="odd")),e[4]=+(e[6]+e[7]||e[2]==="odd")):e[2]&&nt.error(e[0]),e},PSEUDO:function(e){var t,n;if(J.CHILD.test(e[0]))return null;if(e[3])e[2]=e[3];else if(t=e[4])q.test(t)&&(n=ut(t,!0))&&(n=t.indexOf(")",t.length-n)-t.length)&&(t=t.slice(0,n),e[0]=e[0].slice(0,n)),e[2]=t;return e.slice(0,3)}},filter:{ID:r?function(e){return e=e.replace($,""),function(t){return t.getAttribute("id")===e}}:function(e){return e=e.replace($,""),function(t){var n=typeof t.getAttributeNode!==p&&t.getAttributeNode("id");return n&&n.value===e}},TAG:function(e){return e==="*"?function(){return!0}:(e=e.replace($,"").toLowerCase(),function(t){return t.nodeName&&t.nodeName.toLowerCase()===e})},CLASS:function(e){var t=k[d][e+" "];return t||(t=new RegExp("(^|"+O+")"+e+"("+O+"|$)"))&&k(e,function(e){return t.test(e.className||typeof e.getAttribute!==p&&e.getAttribute("class")||"")})},ATTR:function(e,t,n){return function(r,i){var s=nt.attr(r,e);return s==null?t==="!=":t?(s+="",t==="="?s===n:t==="!="?s!==n:t==="^="?n&&s.indexOf(n)===0:t==="*="?n&&s.indexOf(n)>-1:t==="$="?n&&s.substr(s.length-n.length)===n:t==="~="?(" "+s+" ").indexOf(n)>-1:t==="|="?s===n||s.substr(0,n.length+1)===n+"-":!1):!0}},CHILD:function(e,t,n,r){return e==="nth"?function(e){var t,i,s=e.parentNode;if(n===1&&r===0)return!0;if(s){i=0;for(t=s.firstChild;t;t=t.nextSibling)if(t.nodeType===1){i++;if(e===t)break}}return i-=r,i===n||i%n===0&&i/n>=0}:function(t){var n=t;switch(e){case"only":case"first":while(n=n.previousSibling)if(n.nodeType===1)return!1;if(e==="first")return!0;n=t;case"last":while(n=n.nextSibling)if(n.nodeType===1)return!1;return!0}}},PSEUDO:function(e,t){var n,r=i.pseudos[e]||i.setFilters[e.toLowerCase()]||nt.error("unsupported pseudo: "+e);return r[d]?r(t):r.length>1?(n=[e,e,"",t],i.setFilters.hasOwnProperty(e.toLowerCase())?N(function(e,n){var i,s=r(e,t),o=s.length;while(o--)i=T.call(e,s[o]),e[i]=!(n[i]=s[o])}):function(e){return r(e,0,n)}):r}},pseudos:{not:N(function(e){var t=[],n=[],r=a(e.replace(j,"$1"));return r[d]?N(function(e,t,n,i){var s,o=r(e,null,i,[]),u=e.length;while(u--)if(s=o[u])e[u]=!(t[u]=s)}):function(e,i,s){return t[0]=e,r(t,null,s,n),!n.pop()}}),has:N(function(e){return function(t){return nt(e,t).length>0}}),contains:N(function(e){return function(t){return(t.textContent||t.innerText||s(t)).indexOf(e)>-1}}),enabled:function(e){return e.disabled===!1},disabled:function(e){return e.disabled===!0},checked:function(e){var t=e.nodeName.toLowerCase();return t==="input"&&!!e.checked||t==="option"&&!!e.selected},selected:function(e){return e.parentNode&&e.parentNode.selectedIndex,e.selected===!0},parent:function(e){return!i.pseudos.empty(e)},empty:function(e){var t;e=e.firstChild;while(e){if(e.nodeName>"@"||(t=e.nodeType)===3||t===4)return!1;e=e.nextSibling}return!0},header:function(e){return X.test(e.nodeName)},text:function(e){var t,n;return e.nodeName.toLowerCase()==="input"&&(t=e.type)==="text"&&((n=e.getAttribute("type"))==null||n.toLowerCase()===t)},radio:rt("radio"),checkbox:rt("checkbox"),file:rt("file"),password:rt("password"),image:rt("image"),submit:it("submit"),reset:it("reset"),button:function(e){var t=e.nodeName.toLowerCase();return t==="input"&&e.type==="button"||t==="button"},input:function(e){return V.test(e.nodeName)},focus:function(e){var t=e.ownerDocument;return e===t.activeElement&&(!t.hasFocus||t.hasFocus())&&!!(e.type||e.href||~e.tabIndex)},active:function(e){return e===e.ownerDocument.activeElement},first:st(function(){return[0]}),last:st(function(e,t){return[t-1]}),eq:st(function(e,t,n){return[n<0?n+t:n]}),even:st(function(e,t){for(var n=0;n=0;)e.push(r);return e}),gt:st(function(e,t,n){for(var r=n<0?n+t:n;++r",e.querySelectorAll("[selected]").length||i.push("\\["+O+"*(?:checked|disabled|ismap|multiple|readonly|selected|value)"),e.querySelectorAll(":checked").length||i.push(":checked")}),K(function(e){e.innerHTML="

      ",e.querySelectorAll("[test^='']").length&&i.push("[*^$]="+O+"*(?:\"\"|'')"),e.innerHTML="",e.querySelectorAll(":enabled").length||i.push(":enabled",":disabled")}),i=new RegExp(i.join("|")),vt=function(e,r,s,o,u){if(!o&&!u&&!i.test(e)){var a,f,l=!0,c=d,h=r,p=r.nodeType===9&&e;if(r.nodeType===1&&r.nodeName.toLowerCase()!=="object"){a=ut(e),(l=r.getAttribute("id"))?c=l.replace(n,"\\$&"):r.setAttribute("id",c),c="[id='"+c+"'] ",f=a.length;while(f--)a[f]=c+a[f].join("");h=z.test(e)&&r.parentNode||r,p=a.join(",")}if(p)try{return S.apply(s,x.call(h.querySelectorAll(p),0)),s}catch(v){}finally{l||r.removeAttribute("id")}}return t(e,r,s,o,u)},u&&(K(function(t){e=u.call(t,"div");try{u.call(t,"[test!='']:sizzle"),s.push("!=",H)}catch(n){}}),s=new RegExp(s.join("|")),nt.matchesSelector=function(t,n){n=n.replace(r,"='$1']");if(!o(t)&&!s.test(n)&&!i.test(n))try{var a=u.call(t,n);if(a||e||t.document&&t.document.nodeType!==11)return a}catch(f){}return nt(n,null,null,[t]).length>0})}(),i.pseudos.nth=i.pseudos.eq,i.filters=mt.prototype=i.pseudos,i.setFilters=new mt,nt.attr=v.attr,v.find=nt,v.expr=nt.selectors,v.expr[":"]=v.expr.pseudos,v.unique=nt.uniqueSort,v.text=nt.getText,v.isXMLDoc=nt.isXML,v.contains=nt.contains}(e);var nt=/Until$/,rt=/^(?:parents|prev(?:Until|All))/,it=/^.[^:#\[\.,]*$/,st=v.expr.match.needsContext,ot={children:!0,contents:!0,next:!0,prev:!0};v.fn.extend({find:function(e){var t,n,r,i,s,o,u=this;if(typeof e!="string")return v(e).filter(function(){for(t=0,n=u.length;t0)for(i=r;i=0:v.filter(e,this).length>0:this.filter(e).length>0)},closest:function(e,t){var n,r=0,i=this.length,s=[],o=st.test(e)||typeof e!="string"?v(e,t||this.context):0;for(;r-1:v.find.matchesSelector(n,e)){s.push(n);break}n=n.parentNode}}return s=s.length>1?v.unique(s):s,this.pushStack(s,"closest",e)},index:function(e){return e?typeof e=="string"?v.inArray(this[0],v(e)):v.inArray(e.jquery?e[0]:e,this):this[0]&&this[0].parentNode?this.prevAll().length:-1},add:function(e,t){var n=typeof e=="string"?v(e,t):v.makeArray(e&&e.nodeType?[e]:e),r=v.merge(this.get(),n);return this.pushStack(ut(n[0])||ut(r[0])?r:v.unique(r))},addBack:function(e){return this.add(e==null?this.prevObject:this.prevObject.filter(e))}}),v.fn.andSelf=v.fn.addBack,v.each({parent:function(e){var t=e.parentNode;return t&&t.nodeType!==11?t:null},parents:function(e){return v.dir(e,"parentNode")},parentsUntil:function(e,t,n){return v.dir(e,"parentNode",n)},next:function(e){return at(e,"nextSibling")},prev:function(e){return at(e,"previousSibling")},nextAll:function(e){return v.dir(e,"nextSibling")},prevAll:function(e){return v.dir(e,"previousSibling")},nextUntil:function(e,t,n){return v.dir(e,"nextSibling",n)},prevUntil:function(e,t,n){return v.dir(e,"previousSibling",n)},siblings:function(e){return v.sibling((e.parentNode||{}).firstChild,e)},children:function(e){return v.sibling(e.firstChild)},contents:function(e){return v.nodeName(e,"iframe")?e.contentDocument||e.contentWindow.document:v.merge([],e.childNodes)}},function(e,t){v.fn[e]=function(n,r){var i=v.map(this,t,n);return nt.test(e)||(r=n),r&&typeof r=="string"&&(i=v.filter(r,i)),i=this.length>1&&!ot[e]?v.unique(i):i,this.length>1&&rt.test(e)&&(i=i.reverse()),this.pushStack(i,e,l.call(arguments).join(","))}}),v.extend({filter:function(e,t,n){return n&&(e=":not("+e+")"),t.length===1?v.find.matchesSelector(t[0],e)?[t[0]]:[]:v.find.matches(e,t)},dir:function(e,n,r){var i=[],s=e[n];while(s&&s.nodeType!==9&&(r===t||s.nodeType!==1||!v(s).is(r)))s.nodeType===1&&i.push(s),s=s[n];return i},sibling:function(e,t){var n=[];for(;e;e=e.nextSibling)e.nodeType===1&&e!==t&&n.push(e);return n}});var ct="abbr|article|aside|audio|bdi|canvas|data|datalist|details|figcaption|figure|footer|header|hgroup|mark|meter|nav|output|progress|section|summary|time|video",ht=/ jQuery\d+="(?:null|\d+)"/g,pt=/^\s+/,dt=/<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\w:]+)[^>]*)\/>/gi,vt=/<([\w:]+)/,mt=/]","i"),Et=/^(?:checkbox|radio)$/,St=/checked\s*(?:[^=]|=\s*.checked.)/i,xt=/\/(java|ecma)script/i,Tt=/^\s*\s*$/g,Nt={option:[1,""],legend:[1,"
      ","
      "],thead:[1,"","
      "],tr:[2,"","
      "],td:[3,"","
      "],col:[2,"","
      "],area:[1,"",""],_default:[0,"",""]},Ct=lt(i),kt=Ct.appendChild(i.createElement("div"));Nt.optgroup=Nt.option,Nt.tbody=Nt.tfoot=Nt.colgroup=Nt.caption=Nt.thead,Nt.th=Nt.td,v.support.htmlSerialize||(Nt._default=[1,"X
      ","
      "]),v.fn.extend({text:function(e){return v.access(this,function(e){return e===t?v.text(this):this.empty().append((this[0]&&this[0].ownerDocument||i).createTextNode(e))},null,e,arguments.length)},wrapAll:function(e){if(v.isFunction(e))return this.each(function(t){v(this).wrapAll(e.call(this,t))});if(this[0]){var t=v(e,this[0].ownerDocument).eq(0).clone(!0);this[0].parentNode&&t.insertBefore(this[0]),t.map(function(){var e=this;while(e.firstChild&&e.firstChild.nodeType===1)e=e.firstChild;return e}).append(this)}return this},wrapInner:function(e){return v.isFunction(e)?this.each(function(t){v(this).wrapInner(e.call(this,t))}):this.each(function(){var t=v(this),n=t.contents();n.length?n.wrapAll(e):t.append(e)})},wrap:function(e){var t=v.isFunction(e);return this.each(function(n){v(this).wrapAll(t?e.call(this,n):e)})},unwrap:function(){return this.parent().each(function(){v.nodeName(this,"body")||v(this).replaceWith(this.childNodes)}).end()},append:function(){return this.domManip(arguments,!0,function(e){(this.nodeType===1||this.nodeType===11)&&this.appendChild(e)})},prepend:function(){return this.domManip(arguments,!0,function(e){(this.nodeType===1||this.nodeType===11)&&this.insertBefore(e,this.firstChild)})},before:function(){if(!ut(this[0]))return this.domManip(arguments,!1,function(e){this.parentNode.insertBefore(e,this)});if(arguments.length){var e=v.clean(arguments);return this.pushStack(v.merge(e,this),"before",this.selector)}},after:function(){if(!ut(this[0]))return this.domManip(arguments,!1,function(e){this.parentNode.insertBefore(e,this.nextSibling)});if(arguments.length){var e=v.clean(arguments);return this.pushStack(v.merge(this,e),"after",this.selector)}},remove:function(e,t){var n,r=0;for(;(n=this[r])!=null;r++)if(!e||v.filter(e,[n]).length)!t&&n.nodeType===1&&(v.cleanData(n.getElementsByTagName("*")),v.cleanData([n])),n.parentNode&&n.parentNode.removeChild(n);return this},empty:function(){var e,t=0;for(;(e=this[t])!=null;t++){e.nodeType===1&&v.cleanData(e.getElementsByTagName("*"));while(e.firstChild)e.removeChild(e.firstChild)}return this},clone:function(e,t){return e=e==null?!1:e,t=t==null?e:t,this.map(function(){return v.clone(this,e,t)})},html:function(e){return v.access(this,function(e){var n=this[0]||{},r=0,i=this.length;if(e===t)return n.nodeType===1?n.innerHTML.replace(ht,""):t;if(typeof e=="string"&&!yt.test(e)&&(v.support.htmlSerialize||!wt.test(e))&&(v.support.leadingWhitespace||!pt.test(e))&&!Nt[(vt.exec(e)||["",""])[1].toLowerCase()]){e=e.replace(dt,"<$1>");try{for(;r1&&typeof f=="string"&&St.test(f))return this.each(function(){v(this).domManip(e,n,r)});if(v.isFunction(f))return this.each(function(i){var s=v(this);e[0]=f.call(this,i,n?s.html():t),s.domManip(e,n,r)});if(this[0]){i=v.buildFragment(e,this,l),o=i.fragment,s=o.firstChild,o.childNodes.length===1&&(o=s);if(s){n=n&&v.nodeName(s,"tr");for(u=i.cacheable||c-1;a0?this.clone(!0):this).get(),v(o[i])[t](r),s=s.concat(r);return this.pushStack(s,e,o.selector)}}),v.extend({clone:function(e,t,n){var r,i,s,o;v.support.html5Clone||v.isXMLDoc(e)||!wt.test("<"+e.nodeName+">")?o=e.cloneNode(!0):(kt.innerHTML=e.outerHTML,kt.removeChild(o=kt.firstChild));if((!v.support.noCloneEvent||!v.support.noCloneChecked)&&(e.nodeType===1||e.nodeType===11)&&!v.isXMLDoc(e)){Ot(e,o),r=Mt(e),i=Mt(o);for(s=0;r[s];++s)i[s]&&Ot(r[s],i[s])}if(t){At(e,o);if(n){r=Mt(e),i=Mt(o);for(s=0;r[s];++s)At(r[s],i[s])}}return r=i=null,o},clean:function(e,t,n,r){var s,o,u,a,f,l,c,h,p,d,m,g,y=t===i&&Ct,b=[];if(!t||typeof t.createDocumentFragment=="undefined")t=i;for(s=0;(u=e[s])!=null;s++){typeof u=="number"&&(u+="");if(!u)continue;if(typeof u=="string")if(!gt.test(u))u=t.createTextNode(u);else{y=y||lt(t),c=t.createElement("div"),y.appendChild(c),u=u.replace(dt,"<$1>"),a=(vt.exec(u)||["",""])[1].toLowerCase(),f=Nt[a]||Nt._default,l=f[0],c.innerHTML=f[1]+u+f[2];while(l--)c=c.lastChild;if(!v.support.tbody){h=mt.test(u),p=a==="table"&&!h?c.firstChild&&c.firstChild.childNodes:f[1]===""&&!h?c.childNodes:[];for(o=p.length-1;o>=0;--o)v.nodeName(p[o],"tbody")&&!p[o].childNodes.length&&p[o].parentNode.removeChild(p[o])}!v.support.leadingWhitespace&&pt.test(u)&&c.insertBefore(t.createTextNode(pt.exec(u)[0]),c.firstChild),u=c.childNodes,c.parentNode.removeChild(c)}u.nodeType?b.push(u):v.merge(b,u)}c&&(u=c=y=null);if(!v.support.appendChecked)for(s=0;(u=b[s])!=null;s++)v.nodeName(u,"input")?_t(u):typeof u.getElementsByTagName!="undefined"&&v.grep(u.getElementsByTagName("input"),_t);if(n){m=function(e){if(!e.type||xt.test(e.type))return r?r.push(e.parentNode?e.parentNode.removeChild(e):e):n.appendChild(e)};for(s=0;(u=b[s])!=null;s++)if(!v.nodeName(u,"script")||!m(u))n.appendChild(u),typeof u.getElementsByTagName!="undefined"&&(g=v.grep(v.merge([],u.getElementsByTagName("script")),m),b.splice.apply(b,[s+1,0].concat(g)),s+=g.length)}return b},cleanData:function(e,t){var n,r,i,s,o=0,u=v.expando,a=v.cache,f=v.support.deleteExpando,l=v.event.special;for(;(i=e[o])!=null;o++)if(t||v.acceptData(i)){r=i[u],n=r&&a[r];if(n){if(n.events)for(s in n.events)l[s]?v.event.remove(i,s):v.removeEvent(i,s,n.handle);a[r]&&(delete a[r],f?delete i[u]:i.removeAttribute?i.removeAttribute(u):i[u]=null,v.deletedIds.push(r))}}}}),function(){var e,t;v.uaMatch=function(e){e=e.toLowerCase();var t=/(chrome)[ \/]([\w.]+)/.exec(e)||/(webkit)[ \/]([\w.]+)/.exec(e)||/(opera)(?:.*version|)[ \/]([\w.]+)/.exec(e)||/(msie) ([\w.]+)/.exec(e)||e.indexOf("compatible")<0&&/(mozilla)(?:.*? rv:([\w.]+)|)/.exec(e)||[];return{browser:t[1]||"",version:t[2]||"0"}},e=v.uaMatch(o.userAgent),t={},e.browser&&(t[e.browser]=!0,t.version=e.version),t.chrome?t.webkit=!0:t.webkit&&(t.safari=!0),v.browser=t,v.sub=function(){function e(t,n){return new e.fn.init(t,n)}v.extend(!0,e,this),e.superclass=this,e.fn=e.prototype=this(),e.fn.constructor=e,e.sub=this.sub,e.fn.init=function(r,i){return i&&i instanceof v&&!(i instanceof e)&&(i=e(i)),v.fn.init.call(this,r,i,t)},e.fn.init.prototype=e.fn;var t=e(i);return e}}();var Dt,Pt,Ht,Bt=/alpha\([^)]*\)/i,jt=/opacity=([^)]*)/,Ft=/^(top|right|bottom|left)$/,It=/^(none|table(?!-c[ea]).+)/,qt=/^margin/,Rt=new RegExp("^("+m+")(.*)$","i"),Ut=new RegExp("^("+m+")(?!px)[a-z%]+$","i"),zt=new RegExp("^([-+])=("+m+")","i"),Wt={BODY:"block"},Xt={position:"absolute",visibility:"hidden",display:"block"},Vt={letterSpacing:0,fontWeight:400},$t=["Top","Right","Bottom","Left"],Jt=["Webkit","O","Moz","ms"],Kt=v.fn.toggle;v.fn.extend({css:function(e,n){return v.access(this,function(e,n,r){return r!==t?v.style(e,n,r):v.css(e,n)},e,n,arguments.length>1)},show:function(){return Yt(this,!0)},hide:function(){return Yt(this)},toggle:function(e,t){var n=typeof e=="boolean";return v.isFunction(e)&&v.isFunction(t)?Kt.apply(this,arguments):this.each(function(){(n?e:Gt(this))?v(this).show():v(this).hide()})}}),v.extend({cssHooks:{opacity:{get:function(e,t){if(t){var n=Dt(e,"opacity");return n===""?"1":n}}}},cssNumber:{fillOpacity:!0,fontWeight:!0,lineHeight:!0,opacity:!0,orphans:!0,widows:!0,zIndex:!0,zoom:!0},cssProps:{"float":v.support.cssFloat?"cssFloat":"styleFloat"},style:function(e,n,r,i){if(!e||e.nodeType===3||e.nodeType===8||!e.style)return;var s,o,u,a=v.camelCase(n),f=e.style;n=v.cssProps[a]||(v.cssProps[a]=Qt(f,a)),u=v.cssHooks[n]||v.cssHooks[a];if(r===t)return u&&"get"in u&&(s=u.get(e,!1,i))!==t?s:f[n];o=typeof r,o==="string"&&(s=zt.exec(r))&&(r=(s[1]+1)*s[2]+parseFloat(v.css(e,n)),o="number");if(r==null||o==="number"&&isNaN(r))return;o==="number"&&!v.cssNumber[a]&&(r+="px");if(!u||!("set"in u)||(r=u.set(e,r,i))!==t)try{f[n]=r}catch(l){}},css:function(e,n,r,i){var s,o,u,a=v.camelCase(n);return n=v.cssProps[a]||(v.cssProps[a]=Qt(e.style,a)),u=v.cssHooks[n]||v.cssHooks[a],u&&"get"in u&&(s=u.get(e,!0,i)),s===t&&(s=Dt(e,n)),s==="normal"&&n in Vt&&(s=Vt[n]),r||i!==t?(o=parseFloat(s),r||v.isNumeric(o)?o||0:s):s},swap:function(e,t,n){var r,i,s={};for(i in t)s[i]=e.style[i],e.style[i]=t[i];r=n.call(e);for(i in t)e.style[i]=s[i];return r}}),e.getComputedStyle?Dt=function(t,n){var r,i,s,o,u=e.getComputedStyle(t,null),a=t.style;return u&&(r=u.getPropertyValue(n)||u[n],r===""&&!v.contains(t.ownerDocument,t)&&(r=v.style(t,n)),Ut.test(r)&&qt.test(n)&&(i=a.width,s=a.minWidth,o=a.maxWidth,a.minWidth=a.maxWidth=a.width=r,r=u.width,a.width=i,a.minWidth=s,a.maxWidth=o)),r}:i.documentElement.currentStyle&&(Dt=function(e,t){var n,r,i=e.currentStyle&&e.currentStyle[t],s=e.style;return i==null&&s&&s[t]&&(i=s[t]),Ut.test(i)&&!Ft.test(t)&&(n=s.left,r=e.runtimeStyle&&e.runtimeStyle.left,r&&(e.runtimeStyle.left=e.currentStyle.left),s.left=t==="fontSize"?"1em":i,i=s.pixelLeft+"px",s.left=n,r&&(e.runtimeStyle.left=r)),i===""?"auto":i}),v.each(["height","width"],function(e,t){v.cssHooks[t]={get:function(e,n,r){if(n)return e.offsetWidth===0&&It.test(Dt(e,"display"))?v.swap(e,Xt,function(){return tn(e,t,r)}):tn(e,t,r)},set:function(e,n,r){return Zt(e,n,r?en(e,t,r,v.support.boxSizing&&v.css(e,"boxSizing")==="border-box"):0)}}}),v.support.opacity||(v.cssHooks.opacity={get:function(e,t){return jt.test((t&&e.currentStyle?e.currentStyle.filter:e.style.filter)||"")?.01*parseFloat(RegExp.$1)+"":t?"1":""},set:function(e,t){var n=e.style,r=e.currentStyle,i=v.isNumeric(t)?"alpha(opacity="+t*100+")":"",s=r&&r.filter||n.filter||"";n.zoom=1;if(t>=1&&v.trim(s.replace(Bt,""))===""&&n.removeAttribute){n.removeAttribute("filter");if(r&&!r.filter)return}n.filter=Bt.test(s)?s.replace(Bt,i):s+" "+i}}),v(function(){v.support.reliableMarginRight||(v.cssHooks.marginRight={get:function(e,t){return v.swap(e,{display:"inline-block"},function(){if(t)return Dt(e,"marginRight")})}}),!v.support.pixelPosition&&v.fn.position&&v.each(["top","left"],function(e,t){v.cssHooks[t]={get:function(e,n){if(n){var r=Dt(e,t);return Ut.test(r)?v(e).position()[t]+"px":r}}}})}),v.expr&&v.expr.filters&&(v.expr.filters.hidden=function(e){return e.offsetWidth===0&&e.offsetHeight===0||!v.support.reliableHiddenOffsets&&(e.style&&e.style.display||Dt(e,"display"))==="none"},v.expr.filters.visible=function(e){return!v.expr.filters.hidden(e)}),v.each({margin:"",padding:"",border:"Width"},function(e,t){v.cssHooks[e+t]={expand:function(n){var r,i=typeof n=="string"?n.split(" "):[n],s={};for(r=0;r<4;r++)s[e+$t[r]+t]=i[r]||i[r-2]||i[0];return s}},qt.test(e)||(v.cssHooks[e+t].set=Zt)});var rn=/%20/g,sn=/\[\]$/,on=/\r?\n/g,un=/^(?:color|date|datetime|datetime-local|email|hidden|month|number|password|range|search|tel|text|time|url|week)$/i,an=/^(?:select|textarea)/i;v.fn.extend({serialize:function(){return v.param(this.serializeArray())},serializeArray:function(){return this.map(function(){return this.elements?v.makeArray(this.elements):this}).filter(function(){return this.name&&!this.disabled&&(this.checked||an.test(this.nodeName)||un.test(this.type))}).map(function(e,t){var n=v(this).val();return n==null?null:v.isArray(n)?v.map(n,function(e,n){return{name:t.name,value:e.replace(on,"\r\n")}}):{name:t.name,value:n.replace(on,"\r\n")}}).get()}}),v.param=function(e,n){var r,i=[],s=function(e,t){t=v.isFunction(t)?t():t==null?"":t,i[i.length]=encodeURIComponent(e)+"="+encodeURIComponent(t)};n===t&&(n=v.ajaxSettings&&v.ajaxSettings.traditional);if(v.isArray(e)||e.jquery&&!v.isPlainObject(e))v.each(e,function(){s(this.name,this.value)});else for(r in e)fn(r,e[r],n,s);return i.join("&").replace(rn,"+")};var ln,cn,hn=/#.*$/,pn=/^(.*?):[ \t]*([^\r\n]*)\r?$/mg,dn=/^(?:about|app|app\-storage|.+\-extension|file|res|widget):$/,vn=/^(?:GET|HEAD)$/,mn=/^\/\//,gn=/\?/,yn=/)<[^<]*)*<\/script>/gi,bn=/([?&])_=[^&]*/,wn=/^([\w\+\.\-]+:)(?:\/\/([^\/?#:]*)(?::(\d+)|)|)/,En=v.fn.load,Sn={},xn={},Tn=["*/"]+["*"];try{cn=s.href}catch(Nn){cn=i.createElement("a"),cn.href="",cn=cn.href}ln=wn.exec(cn.toLowerCase())||[],v.fn.load=function(e,n,r){if(typeof e!="string"&&En)return En.apply(this,arguments);if(!this.length)return this;var i,s,o,u=this,a=e.indexOf(" ");return a>=0&&(i=e.slice(a,e.length),e=e.slice(0,a)),v.isFunction(n)?(r=n,n=t):n&&typeof n=="object"&&(s="POST"),v.ajax({url:e,type:s,dataType:"html",data:n,complete:function(e,t){r&&u.each(r,o||[e.responseText,t,e])}}).done(function(e){o=arguments,u.html(i?v("
      ").append(e.replace(yn,"")).find(i):e)}),this},v.each("ajaxStart ajaxStop ajaxComplete ajaxError ajaxSuccess ajaxSend".split(" "),function(e,t){v.fn[t]=function(e){return this.on(t,e)}}),v.each(["get","post"],function(e,n){v[n]=function(e,r,i,s){return v.isFunction(r)&&(s=s||i,i=r,r=t),v.ajax({type:n,url:e,data:r,success:i,dataType:s})}}),v.extend({getScript:function(e,n){return v.get(e,t,n,"script")},getJSON:function(e,t,n){return v.get(e,t,n,"json")},ajaxSetup:function(e,t){return t?Ln(e,v.ajaxSettings):(t=e,e=v.ajaxSettings),Ln(e,t),e},ajaxSettings:{url:cn,isLocal:dn.test(ln[1]),global:!0,type:"GET",contentType:"application/x-www-form-urlencoded; charset=UTF-8",processData:!0,async:!0,accepts:{xml:"application/xml, text/xml",html:"text/html",text:"text/plain",json:"application/json, text/javascript","*":Tn},contents:{xml:/xml/,html:/html/,json:/json/},responseFields:{xml:"responseXML",text:"responseText"},converters:{"* text":e.String,"text html":!0,"text json":v.parseJSON,"text xml":v.parseXML},flatOptions:{context:!0,url:!0}},ajaxPrefilter:Cn(Sn),ajaxTransport:Cn(xn),ajax:function(e,n){function T(e,n,s,a){var l,y,b,w,S,T=n;if(E===2)return;E=2,u&&clearTimeout(u),o=t,i=a||"",x.readyState=e>0?4:0,s&&(w=An(c,x,s));if(e>=200&&e<300||e===304)c.ifModified&&(S=x.getResponseHeader("Last-Modified"),S&&(v.lastModified[r]=S),S=x.getResponseHeader("Etag"),S&&(v.etag[r]=S)),e===304?(T="notmodified",l=!0):(l=On(c,w),T=l.state,y=l.data,b=l.error,l=!b);else{b=T;if(!T||e)T="error",e<0&&(e=0)}x.status=e,x.statusText=(n||T)+"",l?d.resolveWith(h,[y,T,x]):d.rejectWith(h,[x,T,b]),x.statusCode(g),g=t,f&&p.trigger("ajax"+(l?"Success":"Error"),[x,c,l?y:b]),m.fireWith(h,[x,T]),f&&(p.trigger("ajaxComplete",[x,c]),--v.active||v.event.trigger("ajaxStop"))}typeof e=="object"&&(n=e,e=t),n=n||{};var r,i,s,o,u,a,f,l,c=v.ajaxSetup({},n),h=c.context||c,p=h!==c&&(h.nodeType||h instanceof v)?v(h):v.event,d=v.Deferred(),m=v.Callbacks("once memory"),g=c.statusCode||{},b={},w={},E=0,S="canceled",x={readyState:0,setRequestHeader:function(e,t){if(!E){var n=e.toLowerCase();e=w[n]=w[n]||e,b[e]=t}return this},getAllResponseHeaders:function(){return E===2?i:null},getResponseHeader:function(e){var n;if(E===2){if(!s){s={};while(n=pn.exec(i))s[n[1].toLowerCase()]=n[2]}n=s[e.toLowerCase()]}return n===t?null:n},overrideMimeType:function(e){return E||(c.mimeType=e),this},abort:function(e){return e=e||S,o&&o.abort(e),T(0,e),this}};d.promise(x),x.success=x.done,x.error=x.fail,x.complete=m.add,x.statusCode=function(e){if(e){var t;if(E<2)for(t in e)g[t]=[g[t],e[t]];else t=e[x.status],x.always(t)}return this},c.url=((e||c.url)+"").replace(hn,"").replace(mn,ln[1]+"//"),c.dataTypes=v.trim(c.dataType||"*").toLowerCase().split(y),c.crossDomain==null&&(a=wn.exec(c.url.toLowerCase()),c.crossDomain=!(!a||a[1]===ln[1]&&a[2]===ln[2]&&(a[3]||(a[1]==="http:"?80:443))==(ln[3]||(ln[1]==="http:"?80:443)))),c.data&&c.processData&&typeof c.data!="string"&&(c.data=v.param(c.data,c.traditional)),kn(Sn,c,n,x);if(E===2)return x;f=c.global,c.type=c.type.toUpperCase(),c.hasContent=!vn.test(c.type),f&&v.active++===0&&v.event.trigger("ajaxStart");if(!c.hasContent){c.data&&(c.url+=(gn.test(c.url)?"&":"?")+c.data,delete c.data),r=c.url;if(c.cache===!1){var N=v.now(),C=c.url.replace(bn,"$1_="+N);c.url=C+(C===c.url?(gn.test(c.url)?"&":"?")+"_="+N:"")}}(c.data&&c.hasContent&&c.contentType!==!1||n.contentType)&&x.setRequestHeader("Content-Type",c.contentType),c.ifModified&&(r=r||c.url,v.lastModified[r]&&x.setRequestHeader("If-Modified-Since",v.lastModified[r]),v.etag[r]&&x.setRequestHeader("If-None-Match",v.etag[r])),x.setRequestHeader("Accept",c.dataTypes[0]&&c.accepts[c.dataTypes[0]]?c.accepts[c.dataTypes[0]]+(c.dataTypes[0]!=="*"?", "+Tn+"; q=0.01":""):c.accepts["*"]);for(l in c.headers)x.setRequestHeader(l,c.headers[l]);if(!c.beforeSend||c.beforeSend.call(h,x,c)!==!1&&E!==2){S="abort";for(l in{success:1,error:1,complete:1})x[l](c[l]);o=kn(xn,c,n,x);if(!o)T(-1,"No Transport");else{x.readyState=1,f&&p.trigger("ajaxSend",[x,c]),c.async&&c.timeout>0&&(u=setTimeout(function(){x.abort("timeout")},c.timeout));try{E=1,o.send(b,T)}catch(k){if(!(E<2))throw k;T(-1,k)}}return x}return x.abort()},active:0,lastModified:{},etag:{}});var Mn=[],_n=/\?/,Dn=/(=)\?(?=&|$)|\?\?/,Pn=v.now();v.ajaxSetup({jsonp:"callback",jsonpCallback:function(){var e=Mn.pop()||v.expando+"_"+Pn++;return this[e]=!0,e}}),v.ajaxPrefilter("json jsonp",function(n,r,i){var s,o,u,a=n.data,f=n.url,l=n.jsonp!==!1,c=l&&Dn.test(f),h=l&&!c&&typeof a=="string"&&!(n.contentType||"").indexOf("application/x-www-form-urlencoded")&&Dn.test(a);if(n.dataTypes[0]==="jsonp"||c||h)return s=n.jsonpCallback=v.isFunction(n.jsonpCallback)?n.jsonpCallback():n.jsonpCallback,o=e[s],c?n.url=f.replace(Dn,"$1"+s):h?n.data=a.replace(Dn,"$1"+s):l&&(n.url+=(_n.test(f)?"&":"?")+n.jsonp+"="+s),n.converters["script json"]=function(){return u||v.error(s+" was not called"),u[0]},n.dataTypes[0]="json",e[s]=function(){u=arguments},i.always(function(){e[s]=o,n[s]&&(n.jsonpCallback=r.jsonpCallback,Mn.push(s)),u&&v.isFunction(o)&&o(u[0]),u=o=t}),"script"}),v.ajaxSetup({accepts:{script:"text/javascript, application/javascript, application/ecmascript, application/x-ecmascript"},contents:{script:/javascript|ecmascript/},converters:{"text script":function(e){return v.globalEval(e),e}}}),v.ajaxPrefilter("script",function(e){e.cache===t&&(e.cache=!1),e.crossDomain&&(e.type="GET",e.global=!1)}),v.ajaxTransport("script",function(e){if(e.crossDomain){var n,r=i.head||i.getElementsByTagName("head")[0]||i.documentElement;return{send:function(s,o){n=i.createElement("script"),n.async="async",e.scriptCharset&&(n.charset=e.scriptCharset),n.src=e.url,n.onload=n.onreadystatechange=function(e,i){if(i||!n.readyState||/loaded|complete/.test(n.readyState))n.onload=n.onreadystatechange=null,r&&n.parentNode&&r.removeChild(n),n=t,i||o(200,"success")},r.insertBefore(n,r.firstChild)},abort:function(){n&&n.onload(0,1)}}}});var Hn,Bn=e.ActiveXObject?function(){for(var e in Hn)Hn[e](0,1)}:!1,jn=0;v.ajaxSettings.xhr=e.ActiveXObject?function(){return!this.isLocal&&Fn()||In()}:Fn,function(e){v.extend(v.support,{ajax:!!e,cors:!!e&&"withCredentials"in e})}(v.ajaxSettings.xhr()),v.support.ajax&&v.ajaxTransport(function(n){if(!n.crossDomain||v.support.cors){var r;return{send:function(i,s){var o,u,a=n.xhr();n.username?a.open(n.type,n.url,n.async,n.username,n.password):a.open(n.type,n.url,n.async);if(n.xhrFields)for(u in n.xhrFields)a[u]=n.xhrFields[u];n.mimeType&&a.overrideMimeType&&a.overrideMimeType(n.mimeType),!n.crossDomain&&!i["X-Requested-With"]&&(i["X-Requested-With"]="XMLHttpRequest");try{for(u in i)a.setRequestHeader(u,i[u])}catch(f){}a.send(n.hasContent&&n.data||null),r=function(e,i){var u,f,l,c,h;try{if(r&&(i||a.readyState===4)){r=t,o&&(a.onreadystatechange=v.noop,Bn&&delete Hn[o]);if(i)a.readyState!==4&&a.abort();else{u=a.status,l=a.getAllResponseHeaders(),c={},h=a.responseXML,h&&h.documentElement&&(c.xml=h);try{c.text=a.responseText}catch(p){}try{f=a.statusText}catch(p){f=""}!u&&n.isLocal&&!n.crossDomain?u=c.text?200:404:u===1223&&(u=204)}}}catch(d){i||s(-1,d)}c&&s(u,f,c,l)},n.async?a.readyState===4?setTimeout(r,0):(o=++jn,Bn&&(Hn||(Hn={},v(e).unload(Bn)),Hn[o]=r),a.onreadystatechange=r):r()},abort:function(){r&&r(0,1)}}}});var qn,Rn,Un=/^(?:toggle|show|hide)$/,zn=new RegExp("^(?:([-+])=|)("+m+")([a-z%]*)$","i"),Wn=/queueHooks$/,Xn=[Gn],Vn={"*":[function(e,t){var n,r,i=this.createTween(e,t),s=zn.exec(t),o=i.cur(),u=+o||0,a=1,f=20;if(s){n=+s[2],r=s[3]||(v.cssNumber[e]?"":"px");if(r!=="px"&&u){u=v.css(i.elem,e,!0)||n||1;do a=a||".5",u/=a,v.style(i.elem,e,u+r);while(a!==(a=i.cur()/o)&&a!==1&&--f)}i.unit=r,i.start=u,i.end=s[1]?u+(s[1]+1)*n:n}return i}]};v.Animation=v.extend(Kn,{tweener:function(e,t){v.isFunction(e)?(t=e,e=["*"]):e=e.split(" ");var n,r=0,i=e.length;for(;r-1,f={},l={},c,h;a?(l=i.position(),c=l.top,h=l.left):(c=parseFloat(o)||0,h=parseFloat(u)||0),v.isFunction(t)&&(t=t.call(e,n,s)),t.top!=null&&(f.top=t.top-s.top+c),t.left!=null&&(f.left=t.left-s.left+h),"using"in t?t.using.call(e,f):i.css(f)}},v.fn.extend({position:function(){if(!this[0])return;var e=this[0],t=this.offsetParent(),n=this.offset(),r=er.test(t[0].nodeName)?{top:0,left:0}:t.offset();return n.top-=parseFloat(v.css(e,"marginTop"))||0,n.left-=parseFloat(v.css(e,"marginLeft"))||0,r.top+=parseFloat(v.css(t[0],"borderTopWidth"))||0,r.left+=parseFloat(v.css(t[0],"borderLeftWidth"))||0,{top:n.top-r.top,left:n.left-r.left}},offsetParent:function(){return this.map(function(){var e=this.offsetParent||i.body;while(e&&!er.test(e.nodeName)&&v.css(e,"position")==="static")e=e.offsetParent;return e||i.body})}}),v.each({scrollLeft:"pageXOffset",scrollTop:"pageYOffset"},function(e,n){var r=/Y/.test(n);v.fn[e]=function(i){return v.access(this,function(e,i,s){var o=tr(e);if(s===t)return o?n in o?o[n]:o.document.documentElement[i]:e[i];o?o.scrollTo(r?v(o).scrollLeft():s,r?s:v(o).scrollTop()):e[i]=s},e,i,arguments.length,null)}}),v.each({Height:"height",Width:"width"},function(e,n){v.each({padding:"inner"+e,content:n,"":"outer"+e},function(r,i){v.fn[i]=function(i,s){var o=arguments.length&&(r||typeof i!="boolean"),u=r||(i===!0||s===!0?"margin":"border");return v.access(this,function(n,r,i){var s;return v.isWindow(n)?n.document.documentElement["client"+e]:n.nodeType===9?(s=n.documentElement,Math.max(n.body["scroll"+e],s["scroll"+e],n.body["offset"+e],s["offset"+e],s["client"+e])):i===t?v.css(n,r,i,u):v.style(n,r,i,u)},n,o?i:t,o,null)}})}),e.jQuery=e.$=v,typeof define=="function"&&define.amd&&define.amd.jQuery&&define("jquery",[],function(){return v})})(window); \ No newline at end of file diff --git a/rpkid/rpki/gui/app/templates/404.html b/rpkid/rpki/gui/app/templates/404.html deleted file mode 100644 index 76ef3aee..00000000 --- a/rpkid/rpki/gui/app/templates/404.html +++ /dev/null @@ -1,11 +0,0 @@ -{% extends "base.html" %} - -{% block content %} - - -
      - Whoops! I could not find the page you requested. -
      -{% endblock content %} diff --git a/rpkid/rpki/gui/app/templates/500.html b/rpkid/rpki/gui/app/templates/500.html deleted file mode 100644 index 216fe8ae..00000000 --- a/rpkid/rpki/gui/app/templates/500.html +++ /dev/null @@ -1,11 +0,0 @@ -{% extends "base.html" %} - -{% block content %} - - -
      - Whoops! The administrator has been notified of this error. -
      -{% endblock content %} diff --git a/rpkid/rpki/gui/app/templates/app/alert_confirm_clear.html b/rpkid/rpki/gui/app/templates/app/alert_confirm_clear.html deleted file mode 100644 index 5d7fcf04..00000000 --- a/rpkid/rpki/gui/app/templates/app/alert_confirm_clear.html +++ /dev/null @@ -1,21 +0,0 @@ -{% extends "app/app_base.html" %} -{% load url from future %} - -{% block content %} - - -
      -
      - Please confirm that you would like to delete all alerts. -
      -
      - {% csrf_token %} -
      - - Cancel -
      - -
      -{% endblock %} diff --git a/rpkid/rpki/gui/app/templates/app/alert_confirm_delete.html b/rpkid/rpki/gui/app/templates/app/alert_confirm_delete.html deleted file mode 100644 index 78c84917..00000000 --- a/rpkid/rpki/gui/app/templates/app/alert_confirm_delete.html +++ /dev/null @@ -1,17 +0,0 @@ -{% extends "app/alert_detail.html" %} -{% load url from future %} - -{% block action %} -
      -
      - Please confirm that you would like to delete this alert. -
      -
      - {% csrf_token %} -
      - - Cancel -
      - -
      -{% endblock action %} diff --git a/rpkid/rpki/gui/app/templates/app/alert_detail.html b/rpkid/rpki/gui/app/templates/app/alert_detail.html deleted file mode 100644 index b3a73b7e..00000000 --- a/rpkid/rpki/gui/app/templates/app/alert_detail.html +++ /dev/null @@ -1,31 +0,0 @@ -{% extends "app/app_base.html" %} -{% load url from future %} -{% load app_extras %} - -{% block content %} - - -
      -
      - - - - - - -
      Date: {{ object.when }}
      Severity:{{ object.get_severity_display }}
      - -

      -{{ object.text }} - -

      - -{% block action %} - -{% endblock action %} - -{% endblock content %} diff --git a/rpkid/rpki/gui/app/templates/app/alert_list.html b/rpkid/rpki/gui/app/templates/app/alert_list.html deleted file mode 100644 index dd0530e4..00000000 --- a/rpkid/rpki/gui/app/templates/app/alert_list.html +++ /dev/null @@ -1,31 +0,0 @@ -{% extends "app/app_base.html" %} -{% load url from future %} - -{% block content %} - - - - - - - - - - - - {% for obj in object_list %} - - - - - - {% endfor %} - -
      #DateSubject
      {# #}{{ obj.when }}{{ obj.subject }}
      - - -{% endblock content %} diff --git a/rpkid/rpki/gui/app/templates/app/app_base.html b/rpkid/rpki/gui/app/templates/app/app_base.html deleted file mode 100644 index 4fb5f731..00000000 --- a/rpkid/rpki/gui/app/templates/app/app_base.html +++ /dev/null @@ -1,31 +0,0 @@ -{% extends "base.html" %} -{# this can be removed when django 1.4 is EOL, because it is the default behavior in 1.5 #} -{% load url from future %} -{% load app_extras %} - -{# This template defines the common structure for the rpki.gui.app application. #} - -{% block sidebar %} - -

      {{ request.session.handle }}

      - -{# common navigation #} - - - -{% endblock sidebar %} diff --git a/rpkid/rpki/gui/app/templates/app/app_confirm_delete.html b/rpkid/rpki/gui/app/templates/app/app_confirm_delete.html deleted file mode 100644 index 7c35a733..00000000 --- a/rpkid/rpki/gui/app/templates/app/app_confirm_delete.html +++ /dev/null @@ -1,21 +0,0 @@ -{% extends "app/app_base.html" %} - -{% block content %} -
      -

      {{ form_title }}

      -
      - -
      -

      Warning!

      - Please confirm that you would like to delete this object. -
      - -
      - {% csrf_token %} - {{ form }} -
      - - Cancel -
      -
      -{% endblock content %} diff --git a/rpkid/rpki/gui/app/templates/app/app_form.html b/rpkid/rpki/gui/app/templates/app/app_form.html deleted file mode 100644 index b6ab60a2..00000000 --- a/rpkid/rpki/gui/app/templates/app/app_form.html +++ /dev/null @@ -1,19 +0,0 @@ -{% extends "app/app_base.html" %} - -{% block content %} - - -{# allow this template to be subclassed to fill in extra information, such as warnings #} -{% block form_info %}{% endblock form_info %} - -
      - {% csrf_token %} - {% include "app/bootstrap_form.html" %} -
      - - Cancel -
      -
      -{% endblock %} diff --git a/rpkid/rpki/gui/app/templates/app/bootstrap_form.html b/rpkid/rpki/gui/app/templates/app/bootstrap_form.html deleted file mode 100644 index c6fd5424..00000000 --- a/rpkid/rpki/gui/app/templates/app/bootstrap_form.html +++ /dev/null @@ -1,26 +0,0 @@ -{% if form.non_field_errors %} -
      - {{ form.non_field_errors }} -
      -{% endif %} - -{% for field in form %} - -{% if field.is_hidden %} -{{ field }} -{% else %} -
      - -
      - {{ field }} - {% if field.help_text %} - {{ field.help_text }} - {% endif %} - {% if field.errors %} - {{ field.errors }} - {% endif %} -
      -
      -{% endif %} - -{% endfor %} diff --git a/rpkid/rpki/gui/app/templates/app/child_detail.html b/rpkid/rpki/gui/app/templates/app/child_detail.html deleted file mode 100644 index 8178e179..00000000 --- a/rpkid/rpki/gui/app/templates/app/child_detail.html +++ /dev/null @@ -1,48 +0,0 @@ -{% extends "app/app_base.html" %} -{% load url from future %} - -{% block content %} - - -
      -

      Valid until {{ object.valid_until }} -

      - -
      -
      - Addresses - {% if object.address_ranges.all %} -
        - {% for a in object.address_ranges.all %} -
      • {{ a.as_resource_range }}
      • - {% endfor %} -
      - {% else %} -

      none

      - {% endif %} -
      -
      - ASNs - {% if object.asns.all %} -
        - {% for a in object.asns.all %} -
      • {{ a.as_resource_range }}
      • - {% endfor %} -
      - {% else %} -

      none

      - {% endif %} -
      -
      - -{% block action %} - Edit - AS - Prefix - Export - Delete -{% endblock %} - -{% endblock %} diff --git a/rpkid/rpki/gui/app/templates/app/client_detail.html b/rpkid/rpki/gui/app/templates/app/client_detail.html deleted file mode 100644 index 3117e859..00000000 --- a/rpkid/rpki/gui/app/templates/app/client_detail.html +++ /dev/null @@ -1,25 +0,0 @@ -{% extends "app/app_base.html" %} -{% load url from future %} - -{% block content %} - - - - - - - - - - - -
      Name{{ object.handle }}
      SIA{{ object.sia_base }}
      - -{% block action %} - Export - Delete -{% endblock action %} - -{% endblock content %} diff --git a/rpkid/rpki/gui/app/templates/app/client_list.html b/rpkid/rpki/gui/app/templates/app/client_list.html deleted file mode 100644 index 12987c53..00000000 --- a/rpkid/rpki/gui/app/templates/app/client_list.html +++ /dev/null @@ -1,22 +0,0 @@ -{% extends "app/app_base.html" %} -{% load url from future %} - -{% block content %} - - - - - {% for client in object_list %} - - - - - {% endfor %} - -
      HandleAction
      {{ client.handle }} - -
      - Import -{% endblock content %} diff --git a/rpkid/rpki/gui/app/templates/app/conf_empty.html b/rpkid/rpki/gui/app/templates/app/conf_empty.html deleted file mode 100644 index efe06f14..00000000 --- a/rpkid/rpki/gui/app/templates/app/conf_empty.html +++ /dev/null @@ -1,17 +0,0 @@ -{% extends "base.html" %} -{% load url from future %} - -{% block content %} - -{% if request.user.is_superuser %} -
      -There are currently no resource holders on this system. -
      - Create -{% else %} -
      -Your account does not have permission to manage any resource handles on this server. Please contact your portal-gui adminstrator. -
      -{% endif %} - -{% endblock %} diff --git a/rpkid/rpki/gui/app/templates/app/conf_list.html b/rpkid/rpki/gui/app/templates/app/conf_list.html deleted file mode 100644 index dce6d59e..00000000 --- a/rpkid/rpki/gui/app/templates/app/conf_list.html +++ /dev/null @@ -1,17 +0,0 @@ -{% extends "app/app_base.html" %} -{% load url from future %} - -{% block content %} - - -

      Please select a handle.

      - - - -{% endblock %} diff --git a/rpkid/rpki/gui/app/templates/app/dashboard.html b/rpkid/rpki/gui/app/templates/app/dashboard.html deleted file mode 100644 index 65dbb90f..00000000 --- a/rpkid/rpki/gui/app/templates/app/dashboard.html +++ /dev/null @@ -1,230 +0,0 @@ -{% extends "app/app_base.html" %} - -{# this can be removed when django 1.4 is EOL, because it is the default behavior in 1.5 #} -{% load url from future %} - -{% block sidebar_extra %} -
    • -
    • - {# #}export identity
    • -{% endblock sidebar_extra %} - -{% block content %} -
      -
      - - - - - - - - - - {% for object in asns %} - - - - - - {% endfor %} - - {% for object in prefixes %} - - - - - - {% endfor %} - - {% if prefixes_v6 %} - {% for object in prefixes_v6 %} - - - - - - {% endfor %} - {% endif %} -
      ResourceValid UntilParent
      {{ object }}{{ object.cert.not_after }} - {% if object.cert.parent %} - {{ object.cert.parent.handle }} - {% endif %} -
      {{ object.as_resource_range }}{{ object.cert.not_after }} - {% if object.cert.parent %} - {{ object.cert.parent.handle }} - {% endif %} -
      {{ object.as_resource_range }}{{ object.cert.not_after }} - {% if object.cert.parent %} - {{ object.cert.parent.handle }} - {% endif %} -
      - refresh -
      -
      -

      Unallocated Resources

      -

      The following resources have not been allocated to a child, nor appear in a ROA. - - {% if unused_asns %} -

      ASNs

      -
        - {% for asn in unused_asns %} -
      • AS{{ asn }} - {% endfor %} -
      - {% endif %} - - {% if unused_prefixes %} -

      IPv4

      - - - {% for addr in unused_prefixes %} - - - - - {% endfor %} -
      PrefixAction
      {{ addr }} - ROA -
      - {% endif %} - - {% if unused_prefixes_v6 %} -

      IPv6

      - - - {% for addr in unused_prefixes_v6 %} - - - - - {% endfor %} -
      Prefix
      {{ addr }} - ROA -
      - {% endif %} - -
      -
      - -
      -
      - - - - {% for roa in conf.roas %} - - - - - - - - {% endfor %} -
      PrefixMax LengthAS
      {{ roa.prefixes.all.0.as_roa_prefix }}{{ roa.prefixes.all.0.max_prefixlen }}{{ roa.asn }} - - - -
      - Create - Import - Export -
      - -
      - - - - {% for gbr in conf.ghostbusters %} - - - - - - - - {% endfor %} -
      Full NameOrganizationEmailTelephone
      {{ gbr.full_name }}{{ gbr.organization }}{{ gbr.email_address }}{{ gbr.telephone }} - - - -
      - Create -
      -
      - -
      -
      - - - - {% for child in conf.children %} - - - - - {% endfor %} -
      Handle
      {{ child.handle }} - -
      - -
      - -
      -
      -
      - - - - {% for parent in conf.parents %} - - - - - {% endfor %} -
      Handle
      {{ parent.handle }} - -
      - Import -
      -
      - -
      -
      - - - - {% for repo in conf.repositories %} - - - - - {% endfor %} -
      Handle
      {{ repo.handle }} - -
      - Import -
      -
      -{% endblock %} diff --git a/rpkid/rpki/gui/app/templates/app/ghostbuster_confirm_delete.html b/rpkid/rpki/gui/app/templates/app/ghostbuster_confirm_delete.html deleted file mode 100644 index 76b1d25a..00000000 --- a/rpkid/rpki/gui/app/templates/app/ghostbuster_confirm_delete.html +++ /dev/null @@ -1,20 +0,0 @@ -{% extends "app/ghostbuster_detail.html" %} - -{% block extra %} - -
      -

      - Please confirm that you really want to delete by clicking Delete. - -

      -
      - {% csrf_token %} - - Cancel -
      -
      -
      - -{% endblock %} - - diff --git a/rpkid/rpki/gui/app/templates/app/ghostbusterrequest_detail.html b/rpkid/rpki/gui/app/templates/app/ghostbusterrequest_detail.html deleted file mode 100644 index 296f0f16..00000000 --- a/rpkid/rpki/gui/app/templates/app/ghostbusterrequest_detail.html +++ /dev/null @@ -1,64 +0,0 @@ -{% extends "app/app_base.html" %} -{% load url from future %} - -{% block content %} - - - - - - {% if object.honorific_prefix %} - - {% endif %} - - {% if object.organization %} - - {% endif %} - - {% if object.telephone %} - - {% endif %} - - {% if object.email_address %} - - {% endif %} - - {% if object.box %} - - {% endif %} - - {% if object.extended %} - - {% endif %} - - {% if object.street %} - - {% endif %} - - {% if object.city %} - - {% endif %} - - {% if object.region %} - - {% endif %} - - {% if object.code %} - - {% endif %} - - {% if object.country %} - - {% endif %} - -
      Full Name{{ object.full_name }}
      Honorific Prefix{{ object.honorific_prefix }}
      Organization{{ object.organization }}
      Telephone{{ object.telephone }}
      Email{{ object.email_address }}
      P.O. Box{{ object.box }}
      Extended Address{{ object.extended }}
      Street Address{{ object.street }}
      City{{ object.city }}
      Region{{ object.region }}
      Postal Code{{ object.code }}
      Country{{ object.country }}
      - -{% block action %} -{# the roarequest_confirm_delete template will override this section #} - Edit - Delete -{% endblock action %} - -{% endblock content %} diff --git a/rpkid/rpki/gui/app/templates/app/import_resource_form.html b/rpkid/rpki/gui/app/templates/app/import_resource_form.html deleted file mode 100644 index e446d344..00000000 --- a/rpkid/rpki/gui/app/templates/app/import_resource_form.html +++ /dev/null @@ -1,9 +0,0 @@ -{% extends "app/app_form.html" %} - -{% block form_info %} -
      - Warning! All existing resources of this type currently in the - database will be deleted and replaced with the contents of the CSV - file you are uploading. -
      -{% endblock form_info %} diff --git a/rpkid/rpki/gui/app/templates/app/object_confirm_delete.html b/rpkid/rpki/gui/app/templates/app/object_confirm_delete.html deleted file mode 100644 index c4af9b26..00000000 --- a/rpkid/rpki/gui/app/templates/app/object_confirm_delete.html +++ /dev/null @@ -1,21 +0,0 @@ -{% extends parent_template %} -{% comment %} -Since Django templates do not support multiple inheritance, we simluate it by -dynamically extending from the *_detail.html template for a concrete object -type. The *DeleteView classes should set a "parent_template" variable which is -string specifying the concrete template to inherit from. -{% endcomment %} -{% load url from future %} - -{% block action %} -
      -

      Warning!

      - Please confirm that you would like to delete this object -
      - -
      - {% csrf_token %} - - Cancel -
      -{% endblock %} diff --git a/rpkid/rpki/gui/app/templates/app/parent_detail.html b/rpkid/rpki/gui/app/templates/app/parent_detail.html deleted file mode 100644 index 4dd1842f..00000000 --- a/rpkid/rpki/gui/app/templates/app/parent_detail.html +++ /dev/null @@ -1,67 +0,0 @@ -{% extends "app/app_base.html" %} -{% load url from future %} - -{% block content %} - - - - - - - - - - - - - - - - - - - - - - - - - - - -
      service_uri{{ object.service_uri }}
      parent_handle{{ object.parent_handle }}
      child_handle{{ object.child_handle }}
      repository_type{{ object.repository_type }}
      referrer{{ object.referrer }}
      ta validity period{{ object.ta.getNotBefore }} - {{ object.ta.getNotAfter }}
      - -
      -
      -

      Delegated Addresses

      -
        - {% for c in object.certs.all %} - {% for a in c.address_ranges.all %} -
      • {{ a }}
      • - {% endfor %} - {% for a in c.address_ranges_v6.all %} -
      • {{ a }}
      • - {% endfor %} - {% endfor %} -
      -
      -
      -

      Delegated ASNs

      -
        - {% for c in object.certs.all %} - {% for a in c.asn_ranges.all %} -
      • {{ a }}
      • - {% endfor %} - {% endfor %} -
      -
      -
      - -{% block action %} - Export - Delete -{% endblock action %} - -{% endblock content %} diff --git a/rpkid/rpki/gui/app/templates/app/pubclient_list.html b/rpkid/rpki/gui/app/templates/app/pubclient_list.html deleted file mode 100644 index 1872e005..00000000 --- a/rpkid/rpki/gui/app/templates/app/pubclient_list.html +++ /dev/null @@ -1,10 +0,0 @@ -{% extends "app/object_list.html" %} -{% load url from future %} - -{% block actions %} -
      - Import -
      -{% endblock actions %} - - diff --git a/rpkid/rpki/gui/app/templates/app/repository_detail.html b/rpkid/rpki/gui/app/templates/app/repository_detail.html deleted file mode 100644 index 92a43e54..00000000 --- a/rpkid/rpki/gui/app/templates/app/repository_detail.html +++ /dev/null @@ -1,19 +0,0 @@ -{% extends "app/app_base.html" %} -{% load url from future %} - -{% block content %} - - - - - - - -
      SIA{{ object.sia_base }}
      - -{% block action %} - Delete -{% endblock action %} -{% endblock content %} diff --git a/rpkid/rpki/gui/app/templates/app/resource_holder_list.html b/rpkid/rpki/gui/app/templates/app/resource_holder_list.html deleted file mode 100644 index 6525e74d..00000000 --- a/rpkid/rpki/gui/app/templates/app/resource_holder_list.html +++ /dev/null @@ -1,37 +0,0 @@ -{% extends "app/app_base.html" %} -{% load url from future %} - -{% block content %} - - -

      -This page lists all of the resource holders that are currently managed by this server. -Note that this is distinct from the -list of web interface users. -

      - - - - - - - - - - {% for conf in object_list %} - - - - - {% endfor %} - -
      HandleAction
      {{ conf.handle }} - - -
      - - Create -{% endblock content %} -{# vim: set ft=htmldjango: #} diff --git a/rpkid/rpki/gui/app/templates/app/roa_detail.html b/rpkid/rpki/gui/app/templates/app/roa_detail.html deleted file mode 100644 index ec76579d..00000000 --- a/rpkid/rpki/gui/app/templates/app/roa_detail.html +++ /dev/null @@ -1,40 +0,0 @@ -{% extends "app/app_base.html" %} -{% load url from future %} -{% load app_extras %} - -{% block content %} - - -
      -
      - - - - - - - -
      PrefixMax LengthAS
      {{ object.prefixes.all.0.as_roa_prefix }}{{ object.prefixes.all.0.max_prefixlen }}{{ object.asn }}
      -
      - -
      -

      Covered Routes

      -

      This table lists currently announced routes which are covered by prefixes included in this ROA. - - - {% for r in object.routes %} - - - - - - - {% endfor %} -
      PrefixASValidity
      {{ r.as_resource_range }}{{ r.asn }}{% validity_label r.status %}
      -

      -
      - - Delete -{% endblock content %} diff --git a/rpkid/rpki/gui/app/templates/app/roarequest_confirm_delete.html b/rpkid/rpki/gui/app/templates/app/roarequest_confirm_delete.html deleted file mode 100644 index 7dc3ec2b..00000000 --- a/rpkid/rpki/gui/app/templates/app/roarequest_confirm_delete.html +++ /dev/null @@ -1,59 +0,0 @@ -{% extends "app/app_base.html" %} -{% load url from future %} -{% load app_extras %} - -{% block content %} - - -
      -
      -
      -

      Please confirm that you would like to delete the - following ROA Request. The table to the right indicates how validation - status for matching routes may change. -

      - - - - - - - - - - - - - - -
      Prefix{{ object.prefixes.all.0.as_roa_prefix }}
      Max Length{{ object.prefixes.all.0.max_prefixlen }}
      AS{{ object.asn }}
      - -
      - {% csrf_token %} - - Cancel -
      -
      - -
      -

      Matching Routes

      - - - - - - - - {% for r in routes %} - - - - - - {% endfor %} -
      PrefixOrigin ASValidation Status
      {{ r.get_prefix_display }}{{ r.asn }}{% validity_label r.newstatus %}
      -
      -
      -{% endblock content %} diff --git a/rpkid/rpki/gui/app/templates/app/roarequest_confirm_form.html b/rpkid/rpki/gui/app/templates/app/roarequest_confirm_form.html deleted file mode 100644 index 446bb6a4..00000000 --- a/rpkid/rpki/gui/app/templates/app/roarequest_confirm_form.html +++ /dev/null @@ -1,60 +0,0 @@ -{% extends "app/app_base.html" %} -{% load url from future %} - -{% block content %} -
      -

      Confirm ROA Request

      -
      - -
      -
      -
      -

      Please confirm that you would like to create the following ROA. - The accompanying table indicates how the validation status may change as a result. -

      - - - - - - - - - - - - -
      ASPrefixMax Length
      {{ asn }}{{ prefix }}{{ max_prefixlen }}
      - -
      - {% csrf_token %} - {% include "app/bootstrap_form.html" %} - -
      - - Cancel -
      -
      -
      - -
      -

      Matched Routes

      - - - - - - - - {% for r in routes %} - - - - - - {% endfor %} -
      PrefixOrigin ASValidation Status
      {{ r.get_prefix_display }}{{ r.asn }}{{ r.status }}
      -
      - -
      -{% endblock content %} diff --git a/rpkid/rpki/gui/app/templates/app/roarequest_confirm_multi_form.html b/rpkid/rpki/gui/app/templates/app/roarequest_confirm_multi_form.html deleted file mode 100644 index 4a06a4aa..00000000 --- a/rpkid/rpki/gui/app/templates/app/roarequest_confirm_multi_form.html +++ /dev/null @@ -1,66 +0,0 @@ -{% extends "app/app_base.html" %} -{% load url from future %} -{% load app_extras %} - -{% block content %} -
      -

      Confirm ROA Requests

      -
      - -
      -
      -
      -

      Please confirm that you would like to create the following ROA(s). - The accompanying table indicates how the validation status may change as a result. -

      - - - - - - - - {% for roa in roas %} - - - - - - {% endfor %} -
      PrefixMax LengthAS
      {{ roa.prefix }}{{ roa.max_prefixlen }}{{ roa.asn }}
      - -
      - {% csrf_token %} - {{ formset.management_form }} - {% for form in formset %} - {% include "app/bootstrap_form.html" %} - {% endfor %} - -
      - - Cancel -
      -
      -
      - -
      -

      Matched Routes

      - - - - - - - - {% for r in routes %} - - - - - - {% endfor %} -
      PrefixOrigin ASValidation Status
      {{ r.get_prefix_display }}{{ r.asn }}{% validity_label r.newstatus %}
      -
      - -
      -{% endblock content %} diff --git a/rpkid/rpki/gui/app/templates/app/roarequest_form.html b/rpkid/rpki/gui/app/templates/app/roarequest_form.html deleted file mode 100644 index 3a29131d..00000000 --- a/rpkid/rpki/gui/app/templates/app/roarequest_form.html +++ /dev/null @@ -1,50 +0,0 @@ -{% extends "app/app_base.html" %} -{% load url from future %} - -{# This form is used for creating a new ROA request #} - -{% block content %} -
      -

      Create ROA

      -
      - - - - -
      -
      -
      - {% csrf_token %} - {% include "app/bootstrap_form.html" %} -
      - - Cancel -
      -
      -
      - -
      - Routes matching your prefix: - - - -
      PrefixAS
      -
      -
      -{% endblock content %} diff --git a/rpkid/rpki/gui/app/templates/app/roarequest_multi_form.html b/rpkid/rpki/gui/app/templates/app/roarequest_multi_form.html deleted file mode 100644 index 06d07943..00000000 --- a/rpkid/rpki/gui/app/templates/app/roarequest_multi_form.html +++ /dev/null @@ -1,28 +0,0 @@ -{% extends "app/app_base.html" %} -{% load url from future %} - -{% block content %} -
      -

      Create ROAs

      -
      - -
      - {% csrf_token %} - {{ formset.management_form }} - {% for form in formset %} -
      - {{ form.prefix }} - {{ form.max_prefixlen }} - {{ form.asn }} - - {% if form.errors %}{{ form.errors }}{% endif %} - {% if form.non_field_errors %}{{ form.non_field_errors }}{% endif %} -
      - {% endfor %} - -
      - - Cancel -
      -
      -{% endblock %} diff --git a/rpkid/rpki/gui/app/templates/app/route_detail.html b/rpkid/rpki/gui/app/templates/app/route_detail.html deleted file mode 100644 index 84add4a8..00000000 --- a/rpkid/rpki/gui/app/templates/app/route_detail.html +++ /dev/null @@ -1,58 +0,0 @@ -{% extends "app/app_base.html" %} -{% load app_extras %} -{% load bootstrap_pager %} - -{# template for displaying the list of ROAs covering a specific route #} - -{% block content %} - - -
      -
      - - - - - - - - - - - -
      PrefixASValidity
      {{ object.as_resource_range }}{{ object.asn }}{% validity_label object.status %}
      -
      -
      - -
      -
      -

      The table below lists all ROAs which cover the route described above. - - - - - - - - - - - - - {% for pfx in roa_prefixes %} - - - - - - - - {% endfor %} - -
      PrefixMax LengthASNExpiresURI
      {{ pfx.as_resource_range }}{{ pfx.max_length }}{{ pfx.roas.all.0.asid }}{{ pfx.roas.all.0.not_after }}{{ pfx.roas.all.0.repo.uri }}
      - {% bootstrap_pager request roa_prefixes %} -

      -
      -{% endblock %} diff --git a/rpkid/rpki/gui/app/templates/app/routes_view.html b/rpkid/rpki/gui/app/templates/app/routes_view.html deleted file mode 100644 index 885f3fa9..00000000 --- a/rpkid/rpki/gui/app/templates/app/routes_view.html +++ /dev/null @@ -1,55 +0,0 @@ -{% extends "app/app_base.html" %} -{% load url from future %} -{% load bootstrap_pager %} -{% load app_extras %} - -{% block sidebar_extra %} - -
    • IPv4: {{ timestamp.bgp_v4_import.isoformat }}
    • -
    • IPv6: {{ timestamp.bgp_v6_import.isoformat }}
    • - -
    • {{ timestamp.rcynic_import.isoformat }}
    • -{% endblock sidebar_extra %} - -{% block content %} - - - -

      -This view shows currently advertised routes for the prefixes listed in resource certs received from RPKI parents. - -

      - {% csrf_token %} - - - - - - - - - - - {% for r in routes %} - - - - - - - {% endfor %} - -
      PrefixOrigin ASValidation Status
      {{ r.get_prefix_display }}{{ r.asn }} - {% validity_label r.status %} - -
      -
      - -
      -
      - -{% bootstrap_pager request routes %} - -{% endblock content %} diff --git a/rpkid/rpki/gui/app/templates/app/user_list.html b/rpkid/rpki/gui/app/templates/app/user_list.html deleted file mode 100644 index 1b419ded..00000000 --- a/rpkid/rpki/gui/app/templates/app/user_list.html +++ /dev/null @@ -1,37 +0,0 @@ -{% extends "app/app_base.html" %} -{% load url from future %} - -{% block content %} - - -

      -This page lists all user accounts in the web interface. Note that this is distinct from the -list of resource holders. -

      - - - - - - - - - - - {% for user in object_list %} - - - - - - {% endfor %} - -
      UsernameEmail
      {{ user.username }}{{ user.email }} - - -
      - - Create -{% endblock content %} diff --git a/rpkid/rpki/gui/app/templates/base.html b/rpkid/rpki/gui/app/templates/base.html deleted file mode 100644 index 08d0c112..00000000 --- a/rpkid/rpki/gui/app/templates/base.html +++ /dev/null @@ -1,63 +0,0 @@ -{% load url from future %} -{% load app_extras %} - - - - - - {% block title %}RPKI {% if request.session.handle %}: {{ request.session.handle }}{% endif %}{% endblock %} - {% block head %}{% endblock %} - - - - - - - - -
      - -
      - -
      - -
      -
      - {% block sidebar %}{% endblock %} -
      - -
      - {% if messages %} - {% for message in messages %} - {# this will break if there is more than one tag, but don't expect to use that feature #} -
      - {{ message }} -
      - {% endfor %} - {% endif %} - {% block content %}{% endblock %} -
      -
      - - - diff --git a/rpkid/rpki/gui/app/templates/registration/login.html b/rpkid/rpki/gui/app/templates/registration/login.html deleted file mode 100644 index 0d6fb6fd..00000000 --- a/rpkid/rpki/gui/app/templates/registration/login.html +++ /dev/null @@ -1,25 +0,0 @@ -{% extends "base.html" %} -{% load url from future %} - -{% block content %} - - -{% if form.errors %} -
      -

      Your username and password didn't match. Please try again.

      -
      -{% endif %} - -
      - {% csrf_token %} - {% include "app/bootstrap_form.html" %} - - -
      - -
      -
      - -{% endblock %} diff --git a/rpkid/rpki/gui/app/templatetags/__init__.py b/rpkid/rpki/gui/app/templatetags/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/rpkid/rpki/gui/app/templatetags/app_extras.py b/rpkid/rpki/gui/app/templatetags/app_extras.py deleted file mode 100644 index 2bde9bc2..00000000 --- a/rpkid/rpki/gui/app/templatetags/app_extras.py +++ /dev/null @@ -1,58 +0,0 @@ -from django import template - -register = template.Library() - - -@register.simple_tag -def verbose_name(obj): - "Return the model class' verbose name." - return obj._meta.verbose_name.capitalize() - - -@register.simple_tag -def verbose_name_plural(qs): - "Return the verbose name for the model class." - return qs.model._meta.verbose_name_plural.capitalize() - -css = { - 'valid': 'label-success', - 'invalid': 'label-important' -} - - -@register.simple_tag -def validity_label(validity): - return '%s' % (css.get(validity, ''), validity) - - -@register.simple_tag -def severity_class(severity): - css = { - 0: 'label-info', - 1: 'label-warning', - 2: 'label-important', - } - return css.get(severity) - - -@register.simple_tag -def alert_count(conf): - qs = conf.alerts.filter(seen=False) - unread = len(qs) - if unread: - severity = max([x.severity for x in qs]) - css = { - 0: 'badge-info', - 1: 'badge-warning', - 2: 'badge-important' - } - css_class = css.get(severity) - else: - css_class = 'badge-default' - return u'%d' % (css_class, unread) - - -@register.simple_tag -def rpki_version(): - import rpki.version - return rpki.version.VERSION diff --git a/rpkid/rpki/gui/app/templatetags/bootstrap_pager.py b/rpkid/rpki/gui/app/templatetags/bootstrap_pager.py deleted file mode 100644 index bae8445a..00000000 --- a/rpkid/rpki/gui/app/templatetags/bootstrap_pager.py +++ /dev/null @@ -1,55 +0,0 @@ -from django import template - -register = template.Library() - - -class BootstrapPagerNode(template.Node): - def __init__(self, request, pager_object): - self.request = template.Variable(request) - self.pager_object = template.Variable(pager_object) - - def render(self, context): - request = self.request.resolve(context) - pager_object = self.pager_object.resolve(context) - if pager_object.paginator.num_pages == 1: - return '' - r = ['') - return '\n'.join(r) - - -@register.tag -def bootstrap_pager(parser, token): - try: - tag_name, request, pager_object = token.split_contents() - except ValueError: - raise template.TemplateSyntaxError("%r tag requires two arguments" % token.contents.split()[0]) - return BootstrapPagerNode(request, pager_object) diff --git a/rpkid/rpki/gui/app/timestamp.py b/rpkid/rpki/gui/app/timestamp.py deleted file mode 100644 index 959f2025..00000000 --- a/rpkid/rpki/gui/app/timestamp.py +++ /dev/null @@ -1,25 +0,0 @@ -# $Id$ -# Copyright (C) 2012 SPARTA, Inc. a Parsons Company -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. -# - -import models -from datetime import datetime - -def update(name): - "Set the timestamp value for the given name to the current time." - q = models.Timestamp.objects.filter(name=name) - obj = q[0] if q else models.Timestamp(name=name) - obj.ts = datetime.utcnow() - obj.save() diff --git a/rpkid/rpki/gui/app/urls.py b/rpkid/rpki/gui/app/urls.py deleted file mode 100644 index 92e90b0e..00000000 --- a/rpkid/rpki/gui/app/urls.py +++ /dev/null @@ -1,81 +0,0 @@ -# Copyright (C) 2010, 2011 SPARTA, Inc. dba Cobham Analytic Solutions -# Copyright (C) 2012 SPARTA, Inc. a Parsons Company -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -__version__ = '$Id$' - -from django.conf.urls import patterns, url -from rpki.gui.app import views - -urlpatterns = patterns( - '', - (r'^$', views.dashboard), - url(r'^alert/$', views.AlertListView.as_view(), name='alert-list'), - url(r'^alert/clear_all$', views.alert_clear_all, name='alert-clear-all'), - url(r'^alert/(?P\d+)/$', views.AlertDetailView.as_view(), - name='alert-detail'), - url(r'^alert/(?P\d+)/delete$', views.AlertDeleteView.as_view(), - name='alert-delete'), - (r'^conf/export$', views.conf_export), - (r'^conf/list$', views.conf_list), - (r'^conf/select$', views.conf_select), - url(r'^conf/export_asns$', views.export_asns, name='export-asns'), - url(r'^conf/export_prefixes$', views.export_prefixes, name='export-prefixes'), - url(r'^conf/import_asns$', views.import_asns, name='import-asns'), - url(r'^conf/import_prefixes$', views.import_prefixes, name='import-prefixes'), - (r'^parent/import$', views.parent_import), - (r'^parent/(?P\d+)/$', views.parent_detail), - (r'^parent/(?P\d+)/delete$', views.parent_delete), - (r'^parent/(?P\d+)/export$', views.parent_export), - (r'^child/import$', views.child_import), - (r'^child/(?P\d+)/$', views.child_detail), - (r'^child/(?P\d+)/add_address$', views.child_add_prefix), - (r'^child/(?P\d+)/add_asn$', views.child_add_asn), - (r'^child/(?P\d+)/delete$', views.child_delete), - (r'^child/(?P\d+)/edit$', views.child_edit), - (r'^child/(?P\d+)/export$', views.child_response), - url(r'^gbr/create$', views.ghostbuster_create, name='gbr-create'), - url(r'^gbr/(?P\d+)/$', views.GhostbusterDetailView.as_view(), name='gbr-detail'), - url(r'^gbr/(?P\d+)/edit$', views.ghostbuster_edit, name='gbr-edit'), - url(r'^gbr/(?P\d+)/delete$', views.ghostbuster_delete, name='gbr-delete'), - (r'^refresh$', views.refresh), - (r'^client/import$', views.client_import), - (r'^client/$', views.client_list), - (r'^client/(?P\d+)/$', views.client_detail), - (r'^client/(?P\d+)/delete$', views.client_delete), - url(r'^client/(?P\d+)/export$', views.client_export, name='client-export'), - (r'^repo/import$', views.repository_import), - (r'^repo/(?P\d+)/$', views.repository_detail), - (r'^repo/(?P\d+)/delete$', views.repository_delete), - (r'^resource_holder/$', views.resource_holder_list), - (r'^resource_holder/create$', views.resource_holder_create), - (r'^resource_holder/(?P\d+)/delete$', views.resource_holder_delete), - (r'^resource_holder/(?P\d+)/edit$', views.resource_holder_edit), - (r'^roa/(?P\d+)/$', views.roa_detail), - (r'^roa/create$', views.roa_create), - (r'^roa/create_multi$', views.roa_create_multi), - (r'^roa/confirm$', views.roa_create_confirm), - (r'^roa/confirm_multi$', views.roa_create_multi_confirm), - url(r'^roa/export$', views.roa_export, name='roa-export'), - url(r'^roa/import$', views.roa_import, name='roa-import'), - (r'^roa/(?P\d+)/delete$', views.roa_delete), - url(r'^roa/(?P\d+)/clone$', views.roa_clone, name="roa-clone"), - (r'^route/$', views.route_view), - (r'^route/(?P\d+)/$', views.route_detail), - url(r'^route/suggest$', views.route_suggest, name="suggest-roas"), - (r'^user/$', views.user_list), - (r'^user/create$', views.user_create), - (r'^user/(?P\d+)/delete$', views.user_delete), - (r'^user/(?P\d+)/edit$', views.user_edit), -) diff --git a/rpkid/rpki/gui/app/views.py b/rpkid/rpki/gui/app/views.py deleted file mode 100644 index db4cf0c1..00000000 --- a/rpkid/rpki/gui/app/views.py +++ /dev/null @@ -1,1314 +0,0 @@ -# Copyright (C) 2010, 2011 SPARTA, Inc. dba Cobham Analytic Solutions -# Copyright (C) 2012 SPARTA, Inc. a Parsons Company -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -""" -This module contains the view functions implementing the web portal -interface. - -""" - -__version__ = '$Id$' - -import os -import os.path -from tempfile import NamedTemporaryFile -import cStringIO -import csv -import logging - -from django.utils.decorators import method_decorator -from django.contrib.auth.decorators import login_required -from django.shortcuts import get_object_or_404, render, redirect -from django.utils.http import urlquote -from django import http -from django.core.urlresolvers import reverse, reverse_lazy -from django.contrib.auth.models import User -from django.views.generic import DetailView, ListView, DeleteView -from django.core.paginator import Paginator, InvalidPage -from django.forms.formsets import formset_factory, BaseFormSet -import django.db.models -from django.contrib import messages - -from rpki.irdb import Zookeeper, ChildASN, ChildNet, ROARequestPrefix -from rpki.gui.app import models, forms, glue, range_list -from rpki.resource_set import (resource_range_as, resource_range_ip, - roa_prefix_ipv4) -from rpki import sundial -import rpki.exceptions - -from rpki.gui.cacheview.models import ROA -from rpki.gui.routeview.models import RouteOrigin -from rpki.gui.decorators import tls_required - -logger = logging.getLogger(__name__) - - -def superuser_required(f): - """Decorator which returns HttpResponseForbidden if the user does - not have superuser permissions. - - """ - @login_required - def _wrapped(request, *args, **kwargs): - if not request.user.is_superuser: - return http.HttpResponseForbidden() - return f(request, *args, **kwargs) - return _wrapped - - -def handle_required(f): - """Decorator for view functions which require the user to be logged in and - a resource handle selected for the session. - - """ - @login_required - @tls_required - def wrapped_fn(request, *args, **kwargs): - if 'handle' not in request.session: - if request.user.is_superuser: - conf = models.Conf.objects.all() - else: - conf = models.Conf.objects.filter(confacl__user=request.user) - - if conf.count() == 1: - request.session['handle'] = conf[0] - elif conf.count() == 0: - return render(request, 'app/conf_empty.html', {}) - else: - url = '%s?next=%s' % (reverse(conf_list), - urlquote(request.get_full_path())) - return http.HttpResponseRedirect(url) - - return f(request, *args, **kwargs) - return wrapped_fn - - -@handle_required -def generic_import(request, queryset, configure, form_class=None, - post_import_redirect=None): - """ - Generic view function for importing XML files used in the setup - process. - - queryset - queryset containing all objects of the type being imported - - configure - method on Zookeeper to invoke with the imported XML file - - form_class - specifies the form to use for import. If None, uses the generic - forms.ImportForm. - - post_import_redirect - if None (default), the user will be redirected to the detail page for - the imported object. Otherwise, the user will be redirected to the - specified URL. - - """ - conf = request.session['handle'] - if form_class is None: - form_class = forms.ImportForm - if request.method == 'POST': - form = form_class(request.POST, request.FILES) - if form.is_valid(): - tmpf = NamedTemporaryFile(prefix='import', suffix='.xml', - delete=False) - tmpf.write(form.cleaned_data['xml'].read()) - tmpf.close() - z = Zookeeper(handle=conf.handle) - handle = form.cleaned_data.get('handle') - # CharField uses an empty string for the empty value, rather than - # None. Convert to none in this case, since configure_child/parent - # expects it. - if handle == '': - handle = None - # configure_repository returns None, so can't use tuple expansion - # here. Unpack the tuple below if post_import_redirect is None. - r = configure(z, tmpf.name, handle) - # force rpkid run now - z.synchronize_ca(poke=True) - os.remove(tmpf.name) - if post_import_redirect: - url = post_import_redirect - else: - _, handle = r - url = queryset.get(issuer=conf, - handle=handle).get_absolute_url() - return http.HttpResponseRedirect(url) - else: - form = form_class() - - return render(request, 'app/app_form.html', { - 'form': form, - 'form_title': 'Import ' + queryset.model._meta.verbose_name.capitalize(), - }) - - -@handle_required -def dashboard(request): - conf = request.session['handle'] - - used_asns = range_list.RangeList() - - # asns used in my roas - qs = models.ROARequest.objects.filter(issuer=conf) - roa_asns = set((obj.asn for obj in qs)) - used_asns.extend((resource_range_as(asn, asn) for asn in roa_asns)) - - # asns given to my children - child_asns = ChildASN.objects.filter(child__in=conf.children.all()) - used_asns.extend((resource_range_as(obj.start_as, obj.end_as) for obj in child_asns)) - - # my received asns - asns = models.ResourceRangeAS.objects.filter(cert__conf=conf) - my_asns = range_list.RangeList([resource_range_as(obj.min, obj.max) for obj in asns]) - - unused_asns = my_asns.difference(used_asns) - - used_prefixes = range_list.RangeList() - used_prefixes_v6 = range_list.RangeList() - - # prefixes used in my roas - for obj in models.ROARequestPrefix.objects.filter(roa_request__issuer=conf, - version='IPv4'): - used_prefixes.append(obj.as_resource_range()) - - for obj in models.ROARequestPrefix.objects.filter(roa_request__issuer=conf, - version='IPv6'): - used_prefixes_v6.append(obj.as_resource_range()) - - # prefixes given to my children - for obj in ChildNet.objects.filter(child__in=conf.children.all(), - version='IPv4'): - used_prefixes.append(obj.as_resource_range()) - - for obj in ChildNet.objects.filter(child__in=conf.children.all(), - version='IPv6'): - used_prefixes_v6.append(obj.as_resource_range()) - - # my received prefixes - prefixes = models.ResourceRangeAddressV4.objects.filter(cert__conf=conf).all() - prefixes_v6 = models.ResourceRangeAddressV6.objects.filter(cert__conf=conf).all() - my_prefixes = range_list.RangeList([obj.as_resource_range() for obj in prefixes]) - my_prefixes_v6 = range_list.RangeList([obj.as_resource_range() for obj in prefixes_v6]) - - unused_prefixes = my_prefixes.difference(used_prefixes) - # monkey-patch each object with a boolean value indicating whether or not - # it is a prefix. We have to do this here because in the template there is - # no way to catch the MustBePrefix exception. - for x in unused_prefixes: - try: - x.prefixlen() - x.is_prefix = True - except rpki.exceptions.MustBePrefix: - x.is_prefix = False - - unused_prefixes_v6 = my_prefixes_v6.difference(used_prefixes_v6) - for x in unused_prefixes_v6: - try: - x.prefixlen() - x.is_prefix = True - except rpki.exceptions.MustBePrefix: - x.is_prefix = False - - clients = models.Client.objects.all() if request.user.is_superuser else None - - return render(request, 'app/dashboard.html', { - 'conf': conf, - 'unused_asns': unused_asns, - 'unused_prefixes': unused_prefixes, - 'unused_prefixes_v6': unused_prefixes_v6, - 'asns': asns, - 'prefixes': prefixes, - 'prefixes_v6': prefixes_v6, - 'clients': clients, - }) - - -@login_required -def conf_list(request, **kwargs): - """Allow the user to select a handle.""" - log = request.META['wsgi.errors'] - next_url = request.GET.get('next', reverse(dashboard)) - if request.user.is_superuser: - qs = models.Conf.objects.all() - else: - qs = models.Conf.objects.filter(confacl__user=request.user) - return render(request, 'app/conf_list.html', { - 'conf_list': qs, - 'next_url': next_url - }) - - -@login_required -def conf_select(request): - """Change the handle for the current session.""" - if not 'handle' in request.GET: - return redirect(conf_list) - handle = request.GET['handle'] - next_url = request.GET.get('next', reverse(dashboard)) - if request.user.is_superuser: - request.session['handle'] = get_object_or_404(models.Conf, handle=handle) - else: - request.session['handle'] = get_object_or_404( - models.Conf, confacl__user=request.user, handle=handle - ) - return http.HttpResponseRedirect(next_url) - - -def serve_xml(content, basename, ext='xml'): - """ - Generate a HttpResponse object with the content type set to XML. - - `content` is a string. - - `basename` is the prefix to specify for the XML filename. - - `csv` is the type (default: xml) - - """ - resp = http.HttpResponse(content, mimetype='application/%s' % ext) - resp['Content-Disposition'] = 'attachment; filename=%s.%s' % (basename, ext) - return resp - - -@handle_required -def conf_export(request): - """Return the identity.xml for the current handle.""" - conf = request.session['handle'] - z = Zookeeper(handle=conf.handle) - xml = z.generate_identity() - return serve_xml(str(xml), '%s.identity' % conf.handle) - - -@handle_required -def export_asns(request): - """Export CSV file containing ASN allocations to children.""" - conf = request.session['handle'] - s = cStringIO.StringIO() - csv_writer = csv.writer(s, delimiter=' ') - for childasn in ChildASN.objects.filter(child__issuer=conf): - csv_writer.writerow([childasn.child.handle, str(childasn.as_resource_range())]) - return serve_xml(s.getvalue(), '%s.asns' % conf.handle, ext='csv') - - -@handle_required -def import_asns(request): - conf = request.session['handle'] - if request.method == 'POST': - form = forms.ImportCSVForm(request.POST, request.FILES) - if form.is_valid(): - f = NamedTemporaryFile(prefix='asns', suffix='.csv', delete=False) - f.write(request.FILES['csv'].read()) - f.close() - z = Zookeeper(handle=conf.handle) - z.load_asns(f.name) - z.run_rpkid_now() - os.unlink(f.name) - messages.success(request, 'Successfully imported AS delgations from CSV file.') - return redirect(dashboard) - else: - form = forms.ImportCSVForm() - return render(request, 'app/import_resource_form.html', { - 'form_title': 'Import CSV containing ASN delegations', - 'form': form, - 'cancel_url': reverse(dashboard) - }) - - -@handle_required -def export_prefixes(request): - """Export CSV file containing ASN allocations to children.""" - conf = request.session['handle'] - s = cStringIO.StringIO() - csv_writer = csv.writer(s, delimiter=' ') - for childnet in ChildNet.objects.filter(child__issuer=conf): - csv_writer.writerow([childnet.child.handle, str(childnet.as_resource_range())]) - return serve_xml(s.getvalue(), '%s.prefixes' % conf.handle, ext='csv') - - -@handle_required -def import_prefixes(request): - conf = request.session['handle'] - if request.method == 'POST': - form = forms.ImportCSVForm(request.POST, request.FILES) - if form.is_valid(): - f = NamedTemporaryFile(prefix='prefixes', suffix='.csv', delete=False) - f.write(request.FILES['csv'].read()) - f.close() - z = Zookeeper(handle=conf.handle) - z.load_prefixes(f.name) - z.run_rpkid_now() - os.unlink(f.name) - messages.success(request, 'Successfully imported prefix delegations from CSV file.') - return redirect(dashboard) - else: - form = forms.ImportCSVForm() - return render(request, 'app/import_resource_form.html', { - 'form_title': 'Import CSV containing Prefix delegations', - 'form': form, - 'cancel_url': reverse(dashboard) - }) - - -@handle_required -def parent_import(request): - conf = request.session['handle'] - return generic_import(request, conf.parents, Zookeeper.configure_parent) - - -@handle_required -def parent_detail(request, pk): - return render(request, 'app/parent_detail.html', { - 'object': get_object_or_404(request.session['handle'].parents, pk=pk)}) - - -@handle_required -def parent_delete(request, pk): - conf = request.session['handle'] - obj = get_object_or_404(conf.parents, pk=pk) # confirm permission - log = request.META['wsgi.errors'] - if request.method == 'POST': - form = forms.Empty(request.POST, request.FILES) - if form.is_valid(): - z = Zookeeper(handle=conf.handle, logstream=log) - z.delete_parent(obj.handle) - z.synchronize_ca() - return http.HttpResponseRedirect(reverse(dashboard)) - else: - form = forms.Empty() - return render(request, 'app/object_confirm_delete.html', { - 'object': obj, - 'form': form, - 'parent_template': 'app/parent_detail.html' - }) - - -@handle_required -def parent_export(request, pk): - """Export XML repository request for a given parent.""" - conf = request.session['handle'] - parent = get_object_or_404(conf.parents, pk=pk) - z = Zookeeper(handle=conf.handle) - xml = z.generate_repository_request(parent) - return serve_xml(str(xml), '%s.repository' % parent.handle) - - -@handle_required -def child_import(request): - conf = request.session['handle'] - return generic_import(request, conf.children, Zookeeper.configure_child) - - -@handle_required -def child_add_prefix(request, pk): - logstream = request.META['wsgi.errors'] - conf = request.session['handle'] - child = get_object_or_404(conf.children, pk=pk) - if request.method == 'POST': - form = forms.AddNetForm(request.POST, child=child) - if form.is_valid(): - address_range = form.cleaned_data.get('address_range') - r = resource_range_ip.parse_str(address_range) - version = 'IPv%d' % r.version - child.address_ranges.create(start_ip=str(r.min), end_ip=str(r.max), - version=version) - Zookeeper(handle=conf.handle, logstream=logstream).run_rpkid_now() - return http.HttpResponseRedirect(child.get_absolute_url()) - else: - form = forms.AddNetForm(child=child) - return render(request, 'app/app_form.html', - {'object': child, 'form': form, 'form_title': 'Add Prefix'}) - - -@handle_required -def child_add_asn(request, pk): - logstream = request.META['wsgi.errors'] - conf = request.session['handle'] - child = get_object_or_404(conf.children, pk=pk) - if request.method == 'POST': - form = forms.AddASNForm(request.POST, child=child) - if form.is_valid(): - asns = form.cleaned_data.get('asns') - r = resource_range_as.parse_str(asns) - child.asns.create(start_as=r.min, end_as=r.max) - Zookeeper(handle=conf.handle, logstream=logstream).run_rpkid_now() - return http.HttpResponseRedirect(child.get_absolute_url()) - else: - form = forms.AddASNForm(child=child) - return render(request, 'app/app_form.html', - {'object': child, 'form': form, 'form_title': 'Add ASN'}) - - -@handle_required -def child_detail(request, pk): - child = get_object_or_404(request.session['handle'].children, pk=pk) - return render(request, 'app/child_detail.html', {'object': child}) - - -@handle_required -def child_edit(request, pk): - """Edit the end validity date for a resource handle's child.""" - log = request.META['wsgi.errors'] - conf = request.session['handle'] - child = get_object_or_404(conf.children.all(), pk=pk) - form_class = forms.ChildForm(child) - if request.method == 'POST': - form = form_class(request.POST, request.FILES) - if form.is_valid(): - child.valid_until = sundial.datetime.from_datetime(form.cleaned_data.get('valid_until')) - child.save() - # remove AS & prefixes that are not selected in the form - models.ChildASN.objects.filter(child=child).exclude(pk__in=form.cleaned_data.get('as_ranges')).delete() - models.ChildNet.objects.filter(child=child).exclude(pk__in=form.cleaned_data.get('address_ranges')).delete() - Zookeeper(handle=conf.handle, logstream=log).run_rpkid_now() - return http.HttpResponseRedirect(child.get_absolute_url()) - else: - form = form_class(initial={ - 'as_ranges': child.asns.all(), - 'address_ranges': child.address_ranges.all()}) - - return render(request, 'app/app_form.html', { - 'object': child, - 'form': form, - 'form_title': 'Edit Child: ' + child.handle, - }) - - -@handle_required -def child_response(request, pk): - """ - Export the XML file containing the output of the configure_child - to send back to the client. - - """ - conf = request.session['handle'] - child = get_object_or_404(models.Child, issuer=conf, pk=pk) - z = Zookeeper(handle=conf.handle) - xml = z.generate_parental_response(child) - resp = serve_xml(str(xml), child.handle) - return resp - - -@handle_required -def child_delete(request, pk): - logstream = request.META['wsgi.errors'] - conf = request.session['handle'] - child = get_object_or_404(conf.children, pk=pk) - if request.method == 'POST': - form = forms.Empty(request.POST) - if form.is_valid(): - z = Zookeeper(handle=conf.handle, logstream=logstream) - z.delete_child(child.handle) - z.synchronize_ca() - return http.HttpResponseRedirect(reverse(dashboard)) - else: - form = forms.Empty() - return render(request, 'app/object_confirm_delete.html', { - 'object': child, - 'form': form, - 'parent_template': 'app/child_detail.html' - }) - - -@handle_required -def roa_detail(request, pk): - conf = request.session['handle'] - obj = get_object_or_404(conf.roas, pk=pk) - return render(request, 'app/roa_detail.html', {'object': obj}) - - -def get_covered_routes(rng, max_prefixlen, asn): - """Returns a list of routeview.models.RouteOrigin objects which would - change validation status if a ROA were created with the parameters to this - function. - - A "newstatus" attribute is monkey-patched on the RouteOrigin objects which - can be used in the template. "status" remains the current validation - status of the object. - - """ - - # find all routes that match or are completed covered by the proposed new roa - qs = RouteOrigin.objects.filter( - prefix_min__gte=rng.min, - prefix_max__lte=rng.max - ) - routes = [] - for route in qs: - status = route.status - # tweak the validation status due to the presence of the - # new ROA. Don't need to check the prefix bounds here - # because all the matches routes will be covered by this - # new ROA - if status == 'unknown': - # if the route was previously unknown (no covering - # ROAs), then: - # if the AS matches, it is valid, otherwise invalid - if (route.asn != 0 and route.asn == asn and route.prefixlen <= max_prefixlen): - route.newstatus = 'valid' - else: - route.newstatus = 'invalid' - routes.append(route) - elif status == 'invalid': - # if the route was previously invalid, but this new ROA - # matches the ASN, it is now valid - if route.asn != 0 and route.asn == asn and route.prefixlen <= max_prefixlen: - route.newstatus = 'valid' - routes.append(route) - - return routes - - -@handle_required -def roa_create(request): - """Present the user with a form to create a ROA. - - Doesn't use the generic create_object() form because we need to - create both the ROARequest and ROARequestPrefix objects. - - """ - - conf = request.session['handle'] - if request.method == 'POST': - form = forms.ROARequest(request.POST, request.FILES, conf=conf) - if form.is_valid(): - asn = form.cleaned_data.get('asn') - rng = form._as_resource_range() # FIXME calling "private" method - max_prefixlen = int(form.cleaned_data.get('max_prefixlen')) - - routes = get_covered_routes(rng, max_prefixlen, asn) - - prefix = str(rng) - form = forms.ROARequestConfirm(initial={'asn': asn, - 'prefix': prefix, - 'max_prefixlen': max_prefixlen}) - return render(request, 'app/roarequest_confirm_form.html', - {'form': form, - 'asn': asn, - 'prefix': prefix, - 'max_prefixlen': max_prefixlen, - 'routes': routes}) - else: - # pull initial values from query parameters - d = {} - for s in ('asn', 'prefix'): - if s in request.GET: - d[s] = request.GET[s] - form = forms.ROARequest(initial=d) - - return render(request, 'app/roarequest_form.html', {'form': form}) - - -class ROARequestFormSet(BaseFormSet): - """There is no way to pass arbitrary keyword arguments to the form - constructor, so we have to override BaseFormSet to allow it. - - """ - def __init__(self, *args, **kwargs): - self.conf = kwargs.pop('conf') - super(ROARequestFormSet, self).__init__(*args, **kwargs) - - def _construct_forms(self): - self.forms = [] - for i in xrange(self.total_form_count()): - self.forms.append(self._construct_form(i, conf=self.conf)) - - -def split_with_default(s): - xs = s.split(',') - if len(xs) == 1: - return xs[0], None - return xs - - -@handle_required -def roa_create_multi(request): - """version of roa_create that uses a formset to allow entry of multiple - roas on a single page. - - ROAs can be specified in the GET query string, as such: - - ?roa=prefix,asn - - Mulitple ROAs may be specified: - - ?roa=prefix,asn+roa=prefix2,asn2 - - If an IP range is specified, it will be automatically split into multiple - prefixes: - - ?roa=1.1.1.1-2.2.2.2,42 - - The ASN may optionally be omitted. - - """ - - conf = request.session['handle'] - if request.method == 'GET': - init = [] - for x in request.GET.getlist('roa'): - rng, asn = split_with_default(x) - rng = resource_range_ip.parse_str(rng) - if rng.can_be_prefix: - init.append({'asn': asn, 'prefix': str(rng)}) - else: - v = [] - rng.chop_into_prefixes(v) - init.extend([{'asn': asn, 'prefix': str(p)} for p in v]) - formset = formset_factory(forms.ROARequest, formset=ROARequestFormSet, - can_delete=True)(initial=init, conf=conf) - elif request.method == 'POST': - formset = formset_factory(forms.ROARequest, formset=ROARequestFormSet, - extra=0, can_delete=True)(request.POST, request.FILES, conf=conf) - if formset.is_valid(): - routes = [] - v = [] - # as of Django 1.4.5 we still can't use formset.cleaned_data - # because deleted forms are not excluded, which causes an - # AttributeError to be raised. - for form in formset: - if hasattr(form, 'cleaned_data') and form.cleaned_data: # exclude empty forms - asn = form.cleaned_data.get('asn') - rng = resource_range_ip.parse_str(form.cleaned_data.get('prefix')) - max_prefixlen = int(form.cleaned_data.get('max_prefixlen')) - # FIXME: This won't do the right thing in the event that a - # route is covered by multiple ROAs created in the form. - # You will see duplicate entries, each with a potentially - # different validation status. - routes.extend(get_covered_routes(rng, max_prefixlen, asn)) - v.append({'prefix': str(rng), 'max_prefixlen': max_prefixlen, - 'asn': asn}) - # if there were no rows, skip the confirmation step - if v: - formset = formset_factory(forms.ROARequestConfirm, extra=0)(initial=v) - return render(request, 'app/roarequest_confirm_multi_form.html', - {'routes': routes, 'formset': formset, 'roas': v}) - return render(request, 'app/roarequest_multi_form.html', - {'formset': formset}) - - -@handle_required -def roa_create_confirm(request): - """This function is called when the user confirms the creation of a ROA - request. It is responsible for updating the IRDB. - - """ - conf = request.session['handle'] - log = request.META['wsgi.errors'] - if request.method == 'POST': - form = forms.ROARequestConfirm(request.POST, request.FILES) - if form.is_valid(): - asn = form.cleaned_data.get('asn') - prefix = form.cleaned_data.get('prefix') - rng = resource_range_ip.parse_str(prefix) - max_prefixlen = form.cleaned_data.get('max_prefixlen') - # Always create ROA requests with a single prefix. - # https://trac.rpki.net/ticket/32 - roa = models.ROARequest.objects.create(issuer=conf, asn=asn) - v = 'IPv%d' % rng.version - roa.prefixes.create(version=v, prefix=str(rng.min), - prefixlen=rng.prefixlen(), - max_prefixlen=max_prefixlen) - Zookeeper(handle=conf.handle, logstream=log).run_rpkid_now() - return http.HttpResponseRedirect(reverse(dashboard)) - # What should happen when the submission form isn't valid? For now - # just fall through and redirect back to the ROA creation form - return http.HttpResponseRedirect(reverse(roa_create)) - - -@handle_required -def roa_create_multi_confirm(request): - """This function is called when the user confirms the creation of a ROA - request. It is responsible for updating the IRDB. - - """ - conf = request.session['handle'] - log = request.META['wsgi.errors'] - if request.method == 'POST': - formset = formset_factory(forms.ROARequestConfirm, extra=0)(request.POST, request.FILES) - if formset.is_valid(): - for cleaned_data in formset.cleaned_data: - asn = cleaned_data.get('asn') - prefix = cleaned_data.get('prefix') - rng = resource_range_ip.parse_str(prefix) - max_prefixlen = cleaned_data.get('max_prefixlen') - # Always create ROA requests with a single prefix. - # https://trac.rpki.net/ticket/32 - roa = models.ROARequest.objects.create(issuer=conf, asn=asn) - v = 'IPv%d' % rng.version - roa.prefixes.create(version=v, prefix=str(rng.min), - prefixlen=rng.prefixlen(), - max_prefixlen=max_prefixlen) - Zookeeper(handle=conf.handle, logstream=log).run_rpkid_now() - return redirect(dashboard) - # What should happen when the submission form isn't valid? For now - # just fall through and redirect back to the ROA creation form - return http.HttpResponseRedirect(reverse(roa_create_multi)) - - -@handle_required -def roa_delete(request, pk): - """Handles deletion of a single ROARequest object. - - Uses a form for double confirmation, displaying how the route - validation status may change as a result. - - """ - - conf = request.session['handle'] - roa = get_object_or_404(conf.roas, pk=pk) - if request.method == 'POST': - roa.delete() - Zookeeper(handle=conf.handle).run_rpkid_now() - return redirect(reverse(dashboard)) - - ### Process GET ### - - # note: assumes we only generate one prefix per ROA - roa_prefix = roa.prefixes.all()[0] - rng = roa_prefix.as_resource_range() - - routes = [] - for route in roa.routes: - # select all roas which cover this route - # excluding the current roa - # note: we can't identify the exact ROA here, because we only know what - # was requested to rpkid - roas = route.roas.exclude( - asid=roa.asn, - prefixes__prefix_min=rng.min, - prefixes__prefix_max=rng.max, - prefixes__max_length=roa_prefix.max_prefixlen - ) - - # subselect exact match - if route.asn != 0 and roas.filter(asid=route.asn, - prefixes__max_length__gte=route.prefixlen).exists(): - route.newstatus = 'valid' - elif roas.exists(): - route.newstatus = 'invalid' - else: - route.newstatus = 'unknown' - # we may want to ignore routes for which there is no status change, - # but the user may want to see that nothing has changed explicitly - routes.append(route) - - return render(request, 'app/roarequest_confirm_delete.html', - {'object': roa, 'routes': routes}) - - -@handle_required -def roa_clone(request, pk): - conf = request.session['handle'] - roa = get_object_or_404(conf.roas, pk=pk) - return redirect( - reverse(roa_create_multi) + "?roa=" + str(roa.prefixes.all()[0].as_roa_prefix()) - ) - - -@handle_required -def roa_import(request): - """Import CSV containing ROA declarations.""" - if request.method == 'POST': - form = forms.ImportCSVForm(request.POST, request.FILES) - if form.is_valid(): - import tempfile - tmp = tempfile.NamedTemporaryFile(suffix='.csv', prefix='roas', delete=False) - tmp.write(request.FILES['csv'].read()) - tmp.close() - z = Zookeeper(handle=request.session['handle']) - z.load_roa_requests(tmp.name) - z.run_rpkid_now() - os.unlink(tmp.name) - messages.success(request, 'Successfully imported ROAs.') - return redirect(dashboard) - else: - form = forms.ImportCSVForm() - return render(request, 'app/import_resource_form.html', { - 'form_title': 'Import ROAs from CSV', - 'form': form, - 'cancel_url': reverse(dashboard) - }) - - -@handle_required -def roa_export(request): - """Export CSV containing ROA declarations.""" - # FIXME: remove when Zookeeper can do this - f = cStringIO.StringIO() - csv_writer = csv.writer(f, delimiter=' ') - conf = request.session['handle'] - # each roa prefix gets a unique group so rpkid will issue separate roas - for group, roapfx in enumerate(ROARequestPrefix.objects.filter(roa_request__issuer=conf)): - csv_writer.writerow([str(roapfx.as_roa_prefix()), roapfx.roa_request.asn, '%s-%d' % (conf.handle, group)]) - resp = http.HttpResponse(f.getvalue(), mimetype='application/csv') - resp['Content-Disposition'] = 'attachment; filename=roas.csv' - return resp - - -class GhostbusterDetailView(DetailView): - def get_queryset(self): - return self.request.session['handle'].ghostbusters - - -@handle_required -def ghostbuster_delete(request, pk): - conf = request.session['handle'] - logstream = request.META['wsgi.errors'] - obj = get_object_or_404(conf.ghostbusters, pk=pk) - if request.method == 'POST': - form = forms.Empty(request.POST, request.FILES) - if form.is_valid(): - obj.delete() - Zookeeper(handle=conf.handle, logstream=logstream).run_rpkid_now() - return http.HttpResponseRedirect(reverse(dashboard)) - else: - form = forms.Empty(request.POST, request.FILES) - return render(request, 'app/object_confirm_delete.html', { - 'object': obj, - 'form': form, - 'parent_template': 'app/ghostbusterrequest_detail.html' - }) - - -@handle_required -def ghostbuster_create(request): - conf = request.session['handle'] - logstream = request.META['wsgi.errors'] - if request.method == 'POST': - form = forms.GhostbusterRequestForm(request.POST, request.FILES, - conf=conf) - if form.is_valid(): - obj = form.save(commit=False) - obj.vcard = glue.ghostbuster_to_vcard(obj) - obj.save() - Zookeeper(handle=conf.handle, logstream=logstream).run_rpkid_now() - return http.HttpResponseRedirect(reverse(dashboard)) - else: - form = forms.GhostbusterRequestForm(conf=conf) - return render(request, 'app/app_form.html', - {'form': form, 'form_title': 'New Ghostbuster Request'}) - - -@handle_required -def ghostbuster_edit(request, pk): - conf = request.session['handle'] - obj = get_object_or_404(conf.ghostbusters, pk=pk) - logstream = request.META['wsgi.errors'] - if request.method == 'POST': - form = forms.GhostbusterRequestForm(request.POST, request.FILES, - conf=conf, instance=obj) - if form.is_valid(): - obj = form.save(commit=False) - obj.vcard = glue.ghostbuster_to_vcard(obj) - obj.save() - Zookeeper(handle=conf.handle, logstream=logstream).run_rpkid_now() - return http.HttpResponseRedirect(reverse(dashboard)) - else: - form = forms.GhostbusterRequestForm(conf=conf, instance=obj) - return render(request, 'app/app_form.html', - {'form': form, 'form_title': 'Edit Ghostbuster Request'}) - - -@handle_required -def refresh(request): - """ - Query rpkid, update the db, and redirect back to the dashboard. - - """ - glue.list_received_resources(request.META['wsgi.errors'], - request.session['handle']) - return http.HttpResponseRedirect(reverse(dashboard)) - - -@handle_required -def route_view(request): - """ - Display a list of global routing table entries which match resources - listed in received certificates. - - """ - conf = request.session['handle'] - count = request.GET.get('count', 25) - page = request.GET.get('page', 1) - - paginator = Paginator(conf.routes, count) - try: - routes = paginator.page(page) - except InvalidPage: - # page was empty, or page number was invalid - routes = [] - ts = dict((attr['name'], attr['ts']) for attr in models.Timestamp.objects.values()) - return render(request, 'app/routes_view.html', - {'routes': routes, 'timestamp': ts}) - - -def route_detail(request, pk): - """Show a list of ROAs that match a given IPv4 route.""" - route = get_object_or_404(models.RouteOrigin, pk=pk) - # when running rootd, viewing the 0.0.0.0/0 route will cause a fetch of all - # roas, so we paginate here, even though in the general case the number of - # objects will be small enough to fit a single page - count = request.GET.get('count', 25) - page = request.GET.get('page', 1) - paginator = Paginator(route.roa_prefixes.all(), count) - return render(request, 'app/route_detail.html', { - 'object': route, - 'roa_prefixes': paginator.page(page), - }) - - -def route_suggest(request): - """Handles POSTs from the route view and redirects to the ROA creation - page based on selected route objects. The form should contain elements of - the form "pk-NUM" where NUM is the RouteOrigin object id. - - """ - if request.method == 'POST': - routes = [] - for pk in request.POST.iterkeys(): - logger.debug(pk) - if pk.startswith("pk-"): - n = int(pk[3:]) - routes.append(n) - qs = RouteOrigin.objects.filter(pk__in=routes) - s = [] - for r in qs: - s.append('roa=%s/%d,%d' % (str(r.prefix_min), r.prefixlen, r.asn)) - p = '&'.join(s) - return redirect(reverse(roa_create_multi) + '?' + p) - - -@handle_required -def repository_detail(request, pk): - conf = request.session['handle'] - return render(request, - 'app/repository_detail.html', - {'object': get_object_or_404(conf.repositories, pk=pk)}) - - -@handle_required -def repository_delete(request, pk): - log = request.META['wsgi.errors'] - conf = request.session['handle'] - # Ensure the repository being deleted belongs to the current user. - obj = get_object_or_404(models.Repository, issuer=conf, pk=pk) - if request.method == 'POST': - form = forms.Empty(request.POST, request.FILES) - if form.is_valid(): - z = Zookeeper(handle=conf.handle, logstream=log) - z.delete_repository(obj.handle) - z.synchronize_ca() - return http.HttpResponseRedirect(reverse(dashboard)) - else: - form = forms.Empty() - return render(request, 'app/object_confirm_delete.html', { - 'object': obj, - 'form': form, - 'parent_template': - 'app/repository_detail.html', - }) - - -@handle_required -def repository_import(request): - """Import XML response file from repository operator.""" - return generic_import(request, - models.Repository.objects, - Zookeeper.configure_repository, - form_class=forms.ImportRepositoryForm, - post_import_redirect=reverse(dashboard)) - - -@superuser_required -def client_list(request): - """display a list of all repository client (irdb.models.Client)""" - - return render(request, 'app/client_list.html', { - 'object_list': models.Client.objects.all() - }) - - -@superuser_required -def client_detail(request, pk): - return render(request, 'app/client_detail.html', - {'object': get_object_or_404(models.Client, pk=pk)}) - - -@superuser_required -def client_delete(request, pk): - log = request.META['wsgi.errors'] - obj = get_object_or_404(models.Client, pk=pk) - if request.method == 'POST': - form = forms.Empty(request.POST, request.FILES) - if form.is_valid(): - z = Zookeeper(logstream=log) - z.delete_publication_client(obj.handle) - z.synchronize_pubd() - return http.HttpResponseRedirect(reverse(dashboard)) - else: - form = forms.Empty() - return render(request, 'app/object_confirm_delete.html', { - 'object': obj, - 'form': form, - 'parent_template': 'app/client_detail.html' - }) - - -@superuser_required -def client_import(request): - return generic_import(request, models.Client.objects, - Zookeeper.configure_publication_client, - form_class=forms.ImportClientForm, - post_import_redirect=reverse(dashboard)) - - -@superuser_required -def client_export(request, pk): - """Return the XML file resulting from a configure_publication_client - request. - - """ - client = get_object_or_404(models.Client, pk=pk) - z = Zookeeper() - xml = z.generate_repository_response(client) - return serve_xml(str(xml), '%s.repo' % z.handle) - - -### Routines for managing resource handles serviced by this server - -@superuser_required -def resource_holder_list(request): - """Display a list of all the RPKI handles managed by this server.""" - return render(request, 'app/resource_holder_list.html', { - 'object_list': models.Conf.objects.all() - }) - - -@superuser_required -def resource_holder_edit(request, pk): - """Display a list of all the RPKI handles managed by this server.""" - conf = get_object_or_404(models.Conf, pk=pk) - if request.method == 'POST': - form = forms.ResourceHolderForm(request.POST, request.FILES) - if form.is_valid(): - models.ConfACL.objects.filter(conf=conf).delete() - for user in form.cleaned_data.get('users'): - models.ConfACL.objects.create(user=user, conf=conf) - return redirect(resource_holder_list) - else: - users = [acl.user for acl in models.ConfACL.objects.filter(conf=conf).all()] - form = forms.ResourceHolderForm(initial={ - 'users': users - }) - return render(request, 'app/app_form.html', { - 'form_title': "Edit Resource Holder: " + conf.handle, - 'form': form, - 'cancel_url': reverse(resource_holder_list) - }) - - -@superuser_required -def resource_holder_delete(request, pk): - conf = get_object_or_404(models.Conf, pk=pk) - log = request.META['wsgi.errors'] - if request.method == 'POST': - form = forms.Empty(request.POST) - if form.is_valid(): - z = Zookeeper(handle=conf.handle, logstream=log) - z.delete_self() - z.synchronize_deleted_ca() - return redirect(resource_holder_list) - else: - form = forms.Empty() - return render(request, 'app/app_confirm_delete.html', { - 'form_title': 'Delete Resource Holder: ' + conf.handle, - 'form': form, - 'cancel_url': reverse(resource_holder_list) - }) - - -@superuser_required -def resource_holder_create(request): - log = request.META['wsgi.errors'] - if request.method == 'POST': - form = forms.ResourceHolderCreateForm(request.POST, request.FILES) - if form.is_valid(): - handle = form.cleaned_data.get('handle') - parent = form.cleaned_data.get('parent') - - zk_child = Zookeeper(handle=handle, logstream=log) - identity_xml = zk_child.initialize_resource_bpki() - if parent: - # FIXME etree_wrapper should allow us to deal with file objects - t = NamedTemporaryFile(delete=False) - t.close() - - identity_xml.save(t.name) - zk_parent = Zookeeper(handle=parent.handle, logstream=log) - parent_response, _ = zk_parent.configure_child(t.name) - parent_response.save(t.name) - zk_parent.synchronize_ca() - repo_req, _ = zk_child.configure_parent(t.name) - repo_req.save(t.name) - repo_resp, _ = zk_parent.configure_publication_client(t.name) - repo_resp.save(t.name) - zk_parent.synchronize_pubd() - zk_child.configure_repository(t.name) - os.remove(t.name) - zk_child.synchronize_ca() - return redirect(resource_holder_list) - else: - form = forms.ResourceHolderCreateForm() - return render(request, 'app/app_form.html', { - 'form': form, - 'form_title': 'Create Resource Holder', - 'cancel_url': reverse(resource_holder_list) - }) - - -### views for managing user logins to the web interface - -@superuser_required -def user_create(request): - if request.method == 'POST': - form = forms.UserCreateForm(request.POST, request.FILES) - if form.is_valid(): - username = form.cleaned_data.get('username') - pw = form.cleaned_data.get('password') - email = form.cleaned_data.get('email') - user = User.objects.create_user(username, email, pw) - for conf in form.cleaned_data.get('resource_holders'): - models.ConfACL.objects.create(user=user, conf=conf) - return redirect(user_list) - else: - form = forms.UserCreateForm() - - return render(request, 'app/app_form.html', { - 'form': form, - 'form_title': 'Create User', - 'cancel_url': reverse(user_list), - }) - - -@superuser_required -def user_list(request): - """Display a list of all the RPKI handles managed by this server.""" - return render(request, 'app/user_list.html', { - 'object_list': User.objects.all() - }) - - -@superuser_required -def user_delete(request, pk): - user = get_object_or_404(User, pk=pk) - if request.method == 'POST': - form = forms.Empty(request.POST, request.FILES) - if form.is_valid(): - user.delete() - return redirect(user_list) - else: - form = forms.Empty() - return render(request, 'app/app_confirm_delete.html', { - 'form_title': 'Delete User: ' + user.username, - 'form': form, - 'cancel_url': reverse(user_list) - }) - - -@superuser_required -def user_edit(request, pk): - user = get_object_or_404(User, pk=pk) - if request.method == 'POST': - form = forms.UserEditForm(request.POST) - if form.is_valid(): - pw = form.cleaned_data.get('pw') - if pw: - user.set_password(pw) - user.email = form.cleaned_data.get('email') - user.save() - models.ConfACL.objects.filter(user=user).delete() - handles = form.cleaned_data.get('resource_holders') - for conf in handles: - models.ConfACL.objects.create(user=user, conf=conf) - return redirect(user_list) - else: - form = forms.UserEditForm(initial={ - 'email': user.email, - 'resource_holders': models.Conf.objects.filter(confacl__user=user).all() - }) - return render(request, 'app/app_form.html', { - 'form': form, - 'form_title': 'Edit User: ' + user.username, - 'cancel_url': reverse(user_list) - }) - - -class AlertListView(ListView): - # this nonsense is required to decorate CBVs - @method_decorator(handle_required) - def dispatch(self, request, *args, **kwargs): - return super(AlertListView, self).dispatch(request, *args, **kwargs) - - def get_queryset(self, **kwargs): - conf = self.request.session['handle'] - return conf.alerts.all() - - -class AlertDetailView(DetailView): - # this nonsense is required to decorate CBVs - @method_decorator(handle_required) - def dispatch(self, request, *args, **kwargs): - return super(AlertDetailView, self).dispatch(request, *args, **kwargs) - - def get_queryset(self, **kwargs): - conf = self.request.session['handle'] - return conf.alerts.all() - - def get_object(self, **kwargs): - obj = super(AlertDetailView, self).get_object(**kwargs) - # mark alert as read by the user - obj.seen = True - obj.save() - return obj - - -class AlertDeleteView(DeleteView): - success_url = reverse_lazy('alert-list') - - # this nonsense is required to decorate CBVs - @method_decorator(handle_required) - def dispatch(self, request, *args, **kwargs): - return super(AlertDeleteView, self).dispatch(request, *args, **kwargs) - - def get_queryset(self, **kwargs): - conf = self.request.session['handle'] - return conf.alerts.all() - - -@handle_required -def alert_clear_all(request): - """Clear all alerts associated with the current resource holder.""" - if request.method == 'POST': - form = forms.Empty(request.POST, request.FILES) - if form.is_valid(): - # delete alerts - request.session['handle'].clear_alerts() - return redirect('alert-list') - else: - form = forms.Empty() - return render(request, 'app/alert_confirm_clear.html', {'form': form}) diff --git a/rpkid/rpki/gui/cacheview/__init__.py b/rpkid/rpki/gui/cacheview/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/rpkid/rpki/gui/cacheview/forms.py b/rpkid/rpki/gui/cacheview/forms.py deleted file mode 100644 index 28b8ff24..00000000 --- a/rpkid/rpki/gui/cacheview/forms.py +++ /dev/null @@ -1,51 +0,0 @@ -# Copyright (C) 2011 SPARTA, Inc. dba Cobham Analytic Solutions -# Copyright (C) 2013 SPARTA, Inc. a Parsons Company -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -__version__ = '$Id$' - -from django import forms - -from rpki.gui.cacheview.misc import parse_ipaddr -from rpki.exceptions import BadIPResource -from rpki.resource_set import resource_range_as - - -class SearchForm(forms.Form): - asn = forms.CharField(required=False, help_text='AS or range', label='AS') - addr = forms.CharField(required=False, max_length=40, help_text='range/CIDR', label='IP Address') - - def clean(self): - asn = self.cleaned_data.get('asn') - addr = self.cleaned_data.get('addr') - if (asn and addr) or ((not asn) and (not addr)): - raise forms.ValidationError, 'Please specify either an AS or IP range, not both' - - if asn: - try: - resource_range_as.parse_str(asn) - except ValueError: - raise forms.ValidationError, 'invalid AS range' - - if addr: - #try: - parse_ipaddr(addr) - #except BadIPResource: - # raise forms.ValidationError, 'invalid IP address range/prefix' - - return self.cleaned_data - - -class SearchForm2(forms.Form): - resource = forms.CharField(required=True) diff --git a/rpkid/rpki/gui/cacheview/misc.py b/rpkid/rpki/gui/cacheview/misc.py deleted file mode 100644 index 9a69645c..00000000 --- a/rpkid/rpki/gui/cacheview/misc.py +++ /dev/null @@ -1,31 +0,0 @@ -# Copyright (C) 2011 SPARTA, Inc. dba Cobham Analytic Solutions -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -from rpki.resource_set import resource_range_ipv4, resource_range_ipv6 -from rpki.exceptions import BadIPResource - -def parse_ipaddr(s): - # resource_set functions only accept str - if isinstance(s, unicode): - s = s.encode() - s = s.strip() - r = resource_range_ipv4.parse_str(s) - try: - r = resource_range_ipv4.parse_str(s) - return 4, r - except BadIPResource: - r = resource_range_ipv6.parse_str(s) - return 6, r - -# vim:sw=4 ts=8 expandtab diff --git a/rpkid/rpki/gui/cacheview/models.py b/rpkid/rpki/gui/cacheview/models.py deleted file mode 100644 index c3ee8421..00000000 --- a/rpkid/rpki/gui/cacheview/models.py +++ /dev/null @@ -1,237 +0,0 @@ -# Copyright (C) 2011 SPARTA, Inc. dba Cobham Analytic Solutions -# Copyright (C) 2012 SPARTA, Inc. a Parsons Company -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -__version__ = '$Id$' - -from datetime import datetime -import time - -from django.db import models -from django.core.urlresolvers import reverse - -import rpki.resource_set -import rpki.gui.models - - -class TelephoneField(models.CharField): - def __init__(self, *args, **kwargs): - kwargs['max_length'] = 255 - models.CharField.__init__(self, *args, **kwargs) - - -class AddressRange(rpki.gui.models.PrefixV4): - @models.permalink - def get_absolute_url(self): - return ('rpki.gui.cacheview.views.addressrange_detail', [str(self.pk)]) - - -class AddressRangeV6(rpki.gui.models.PrefixV6): - @models.permalink - def get_absolute_url(self): - return ('rpki.gui.cacheview.views.addressrange_detail_v6', - [str(self.pk)]) - - -class ASRange(rpki.gui.models.ASN): - @models.permalink - def get_absolute_url(self): - return ('rpki.gui.cacheview.views.asrange_detail', [str(self.pk)]) - -kinds = list(enumerate(('good', 'warn', 'bad'))) -kinds_dict = dict((v, k) for k, v in kinds) - - -class ValidationLabel(models.Model): - """ - Represents a specific error condition defined in the rcynic XML - output file. - """ - label = models.CharField(max_length=79, db_index=True, unique=True) - status = models.CharField(max_length=255) - kind = models.PositiveSmallIntegerField(choices=kinds) - - def __unicode__(self): - return self.label - - -class RepositoryObject(models.Model): - """ - Represents a globally unique RPKI repository object, specified by its URI. - """ - uri = models.URLField(unique=True, db_index=True) - -generations = list(enumerate(('current', 'backup'))) -generations_dict = dict((val, key) for (key, val) in generations) - - -class ValidationStatus(models.Model): - timestamp = models.DateTimeField() - generation = models.PositiveSmallIntegerField(choices=generations, null=True) - status = models.ForeignKey(ValidationLabel) - repo = models.ForeignKey(RepositoryObject, related_name='statuses') - - -class SignedObject(models.Model): - """ - Abstract class to hold common metadata for all signed objects. - The signing certificate is ommitted here in order to give a proper - value for the 'related_name' attribute. - """ - repo = models.ForeignKey(RepositoryObject, related_name='cert', unique=True) - - # on-disk file modification time - mtime = models.PositiveIntegerField(default=0) - - # SubjectName - name = models.CharField(max_length=255) - - # value from the SKI extension - keyid = models.CharField(max_length=60, db_index=True) - - # validity period from EE cert which signed object - not_before = models.DateTimeField() - not_after = models.DateTimeField() - - def mtime_as_datetime(self): - """ - convert the local timestamp to UTC and convert to a datetime object - """ - return datetime.utcfromtimestamp(self.mtime + time.timezone) - - def status_id(self): - """ - Returns a HTML class selector for the current object based on its validation status. - The selector is chosen based on the current generation only. If there is any bad status, - return bad, else if there are any warn status, return warn, else return good. - """ - for x in reversed(kinds): - if self.repo.statuses.filter(generation=generations_dict['current'], status__kind=x[0]): - return x[1] - return None # should not happen - - def __unicode__(self): - return u'%s' % self.name - - -class Cert(SignedObject): - """ - Object representing a resource certificate. - """ - addresses = models.ManyToManyField(AddressRange, related_name='certs') - addresses_v6 = models.ManyToManyField(AddressRangeV6, related_name='certs') - asns = models.ManyToManyField(ASRange, related_name='certs') - issuer = models.ForeignKey('self', related_name='children', null=True) - sia = models.CharField(max_length=255) - - def get_absolute_url(self): - return reverse('cert-detail', args=[str(self.pk)]) - - def get_cert_chain(self): - """Return a list containing the complete certificate chain for this - certificate.""" - cert = self - x = [cert] - while cert != cert.issuer: - cert = cert.issuer - x.append(cert) - x.reverse() - return x - cert_chain = property(get_cert_chain) - - -class ROAPrefix(models.Model): - "Abstract base class for ROA mixin." - - max_length = models.PositiveSmallIntegerField() - - class Meta: - abstract = True - - def as_roa_prefix(self): - "Return value as a rpki.resource_set.roa_prefix_ip object." - rng = self.as_resource_range() - return self.roa_cls(rng.min, rng.prefixlen(), self.max_length) - - def __unicode__(self): - p = self.as_resource_range() - if p.prefixlen() == self.max_length: - return str(p) - return '%s-%s' % (str(p), self.max_length) - - -# ROAPrefix is declared first, so subclass picks up __unicode__ from it. -class ROAPrefixV4(ROAPrefix, rpki.gui.models.PrefixV4): - "One v4 prefix in a ROA." - - roa_cls = rpki.resource_set.roa_prefix_ipv4 - - @property - def routes(self): - """return all routes covered by this roa prefix""" - return RouteOrigin.objects.filter(prefix_min__gte=self.prefix_min, - prefix_max__lte=self.prefix_max) - - class Meta: - ordering = ('prefix_min',) - - -# ROAPrefix is declared first, so subclass picks up __unicode__ from it. -class ROAPrefixV6(ROAPrefix, rpki.gui.models.PrefixV6): - "One v6 prefix in a ROA." - - roa_cls = rpki.resource_set.roa_prefix_ipv6 - - class Meta: - ordering = ('prefix_min',) - - -class ROA(SignedObject): - asid = models.PositiveIntegerField() - prefixes = models.ManyToManyField(ROAPrefixV4, related_name='roas') - prefixes_v6 = models.ManyToManyField(ROAPrefixV6, related_name='roas') - issuer = models.ForeignKey('Cert', related_name='roas') - - def get_absolute_url(self): - return reverse('roa-detail', args=[str(self.pk)]) - - class Meta: - ordering = ('asid',) - - def __unicode__(self): - return u'ROA for AS%d' % self.asid - - -class Ghostbuster(SignedObject): - full_name = models.CharField(max_length=40) - email_address = models.EmailField(blank=True, null=True) - organization = models.CharField(blank=True, null=True, max_length=255) - telephone = TelephoneField(blank=True, null=True) - issuer = models.ForeignKey('Cert', related_name='ghostbusters') - - def get_absolute_url(self): - # note that ghostbuster-detail is different from gbr-detail! sigh - return reverse('ghostbuster-detail', args=[str(self.pk)]) - - def __unicode__(self): - if self.full_name: - return self.full_name - if self.organization: - return self.organization - if self.email_address: - return self.email_address - return self.telephone - - -from rpki.gui.routeview.models import RouteOrigin diff --git a/rpkid/rpki/gui/cacheview/templates/cacheview/addressrange_detail.html b/rpkid/rpki/gui/cacheview/templates/cacheview/addressrange_detail.html deleted file mode 100644 index 76edc1ba..00000000 --- a/rpkid/rpki/gui/cacheview/templates/cacheview/addressrange_detail.html +++ /dev/null @@ -1,18 +0,0 @@ -{% extends "cacheview/cacheview_base.html" %} - -{% block content %} -

      {% block title %}IP Range Detail{% endblock %}

      - -

      -IP Range: {{ object }} -

      - -

      Covered by the following resource certs:

      - -
        -{% for cert in object.certs.all %} -
      • {{ cert }}
      • -{% endfor %} -
      - -{% endblock %} diff --git a/rpkid/rpki/gui/cacheview/templates/cacheview/cacheview_base.html b/rpkid/rpki/gui/cacheview/templates/cacheview/cacheview_base.html deleted file mode 100644 index ec71d740..00000000 --- a/rpkid/rpki/gui/cacheview/templates/cacheview/cacheview_base.html +++ /dev/null @@ -1,10 +0,0 @@ -{% extends "base.html" %} -{% load url from future %} - -{% block sidebar %} -
      - {% csrf_token %} - - -
      -{% endblock %} diff --git a/rpkid/rpki/gui/cacheview/templates/cacheview/cert_detail.html b/rpkid/rpki/gui/cacheview/templates/cacheview/cert_detail.html deleted file mode 100644 index 256e7780..00000000 --- a/rpkid/rpki/gui/cacheview/templates/cacheview/cert_detail.html +++ /dev/null @@ -1,105 +0,0 @@ -{% extends "cacheview/signedobject_detail.html" %} - -{% block title %} -Resource Certificate Detail -{% endblock %} - -{% block detail %} - -

      RFC3779 Resources

      - - - - - - - - - - - -
      AS RangesIP Ranges
      -
        - {% for asn in object.asns.all %} -
      • {{ asn }}
      • - {% endfor %} -
      -
      -
        - {% for rng in object.addresses.all %} -
      • {{ rng }}
      • - {% endfor %} -
      -
      - -
      -

      Issued Objects

      -
        - -{% if object.ghostbusters.all %} -
      • -

        Ghostbusters

        - - - - - - - -{% for g in object.ghostbusters.all %} - - - - - -{% endfor %} - -
        NameExpires
        {{ g }}{{ g.not_after }}
        -{% endif %} - -{% if object.roas.all %} -
      • -

        ROAs

        - - - - - - {% for roa in object.roas.all %} - {% for pfx in roa.prefixes.all %} - - - - - - - {% endfor %} - {% endfor %} - -
        #PrefixASExpires
        #{{ pfx }}{{ roa.asid }}{{ roa.not_after }}
        -{% endif %} - -{% if object.children.all %} -
      • -

        Children

        - - - - - - - {% for child in object.children.all %} - - - - - {% endfor %} - -
        NameExpires
        {{ child.name }}{{ child.not_after }}
        -{% endif %} - -
      - -
      - -{% endblock %} diff --git a/rpkid/rpki/gui/cacheview/templates/cacheview/ghostbuster_detail.html b/rpkid/rpki/gui/cacheview/templates/cacheview/ghostbuster_detail.html deleted file mode 100644 index 4215f757..00000000 --- a/rpkid/rpki/gui/cacheview/templates/cacheview/ghostbuster_detail.html +++ /dev/null @@ -1,13 +0,0 @@ -{% extends "cacheview/signedobject_detail.html" %} - -{% block title %}Ghostbuster Detail{% endblock %} - -{% block detail %} -

      - - - - - -
      Full Name{{ object.full_name }}
      Organization{{ object.organization }}
      Email{{ object.email_address }}
      Telephone{{ object.telephone }}
      -{% endblock %} diff --git a/rpkid/rpki/gui/cacheview/templates/cacheview/global_summary.html b/rpkid/rpki/gui/cacheview/templates/cacheview/global_summary.html deleted file mode 100644 index 0dbd0ffc..00000000 --- a/rpkid/rpki/gui/cacheview/templates/cacheview/global_summary.html +++ /dev/null @@ -1,26 +0,0 @@ -{% extends "cacheview/cacheview_base.html" %} - -{% block content %} -

      - - - - - - - - - - - {% for r in roots %} - - - - - - {% endfor %} - -
      NameExpiresURI
      {{ r.name }}{{ r.not_after }}{{ r.repo.uri }}
      -{% endblock content %} diff --git a/rpkid/rpki/gui/cacheview/templates/cacheview/query_result.html b/rpkid/rpki/gui/cacheview/templates/cacheview/query_result.html deleted file mode 100644 index 0694c531..00000000 --- a/rpkid/rpki/gui/cacheview/templates/cacheview/query_result.html +++ /dev/null @@ -1,21 +0,0 @@ -{% extends "cacheview/cacheview_base.html" %} - -{% block content %} - -

      {% block title %}Query Results{% endblock %}

      - - - - {% for object in object_list %} - - - - - - - {% endfor %} -
      PrefixASValidUntil
      {{ object.0 }}{{ object.1.asid }}{{ object.1.ok }}{{ object.1.not_after }}
      - -

      new query

      - -{% endblock %} diff --git a/rpkid/rpki/gui/cacheview/templates/cacheview/roa_detail.html b/rpkid/rpki/gui/cacheview/templates/cacheview/roa_detail.html deleted file mode 100644 index 39cc547b..00000000 --- a/rpkid/rpki/gui/cacheview/templates/cacheview/roa_detail.html +++ /dev/null @@ -1,18 +0,0 @@ -{% extends "cacheview/signedobject_detail.html" %} - -{% block title %}ROA Detail{% endblock %} - -{% block detail %} -

      - - -
      AS{{ object.asid }}
      - -

      Prefixes

      - -
        -{% for pfx in object.prefixes.all %} -
      • {{ pfx }} -{% endfor %} -
      -{% endblock %} diff --git a/rpkid/rpki/gui/cacheview/templates/cacheview/search_form.html b/rpkid/rpki/gui/cacheview/templates/cacheview/search_form.html deleted file mode 100644 index 1141615d..00000000 --- a/rpkid/rpki/gui/cacheview/templates/cacheview/search_form.html +++ /dev/null @@ -1,17 +0,0 @@ -{% extends "cacheview/cacheview_base.html" %} - -{% block title %} -{{ search_type }} Search -{% endblock %} - -{% block content %} - -

      {{search_type}} Search

      - -
      - {% csrf_token %} - {{ form.as_p }} - -
      - -{% endblock %} diff --git a/rpkid/rpki/gui/cacheview/templates/cacheview/search_result.html b/rpkid/rpki/gui/cacheview/templates/cacheview/search_result.html deleted file mode 100644 index 7cbf852e..00000000 --- a/rpkid/rpki/gui/cacheview/templates/cacheview/search_result.html +++ /dev/null @@ -1,42 +0,0 @@ -{% extends "cacheview/cacheview_base.html" %} - -{% block content %} - - - -

      Matching Resource Certificates

      -{% if certs %} -
        -{% for cert in certs %} -
      • {{ cert }} -{% endfor %} -
      -{% else %} -

      none

      -{% endif %} - -

      Matching ROAs

      -{% if roas %} - - - - - - - -{% for roa in roas %} - - - - - -{% endfor %} - -
      #PrefixAS
      #{{ roa.prefixes.all.0 }}{{ roa.asid }}
      -{% else %} -

      none

      -{% endif %} - -{% endblock %} diff --git a/rpkid/rpki/gui/cacheview/templates/cacheview/signedobject_detail.html b/rpkid/rpki/gui/cacheview/templates/cacheview/signedobject_detail.html deleted file mode 100644 index 22ae3d27..00000000 --- a/rpkid/rpki/gui/cacheview/templates/cacheview/signedobject_detail.html +++ /dev/null @@ -1,58 +0,0 @@ -{% extends "cacheview/cacheview_base.html" %} - -{% block content %} - - -

      Cert Info

      - - - - {% if object.sia %} - - {% endif %} - - -
      Subject Name{{ object.name }}
      SKI{{ object.keyid }}
      SIA{{ object.sia }}
      Not Before{{ object.not_before }}
      Not After{{ object.not_after }}
      - -

      Metadata

      - - - - -
      URI{{ object.repo.uri }}
      Last Modified{{ object.mtime_as_datetime|date:"DATETIME_FORMAT" }}
      - -

      Validation Status

      - - - - - - {% for status in object.repo.statuses.all %} - - {% endfor %} - -
      TimestampGenerationStatus
      {{ status.timestamp }}{{ status.get_generation_display }}{{ status.status.status }}
      - -

      X.509 Certificate Chain

      - - - - - - - -{% for cert in chain %} - - - - -{% endfor %} - - -
      DepthName
      {{ cert.0 }}{{ cert.1.name }}
      - -{% block detail %}{% endblock %} - -{% endblock %} diff --git a/rpkid/rpki/gui/cacheview/tests.py b/rpkid/rpki/gui/cacheview/tests.py deleted file mode 100644 index 2247054b..00000000 --- a/rpkid/rpki/gui/cacheview/tests.py +++ /dev/null @@ -1,23 +0,0 @@ -""" -This file demonstrates two different styles of tests (one doctest and one -unittest). These will both pass when you run "manage.py test". - -Replace these with more appropriate tests for your application. -""" - -from django.test import TestCase - -class SimpleTest(TestCase): - def test_basic_addition(self): - """ - Tests that 1 + 1 always equals 2. - """ - self.failUnlessEqual(1 + 1, 2) - -__test__ = {"doctest": """ -Another way to test that 1 + 1 is equal to 2. - ->>> 1 + 1 == 2 -True -"""} - diff --git a/rpkid/rpki/gui/cacheview/urls.py b/rpkid/rpki/gui/cacheview/urls.py deleted file mode 100644 index cc03a587..00000000 --- a/rpkid/rpki/gui/cacheview/urls.py +++ /dev/null @@ -1,32 +0,0 @@ -# Copyright (C) 2011 SPARTA, Inc. dba Cobham Analytic Solutions -# Copyright (C) 2013 SPARTA, Inc. a Parsons Company -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -__version__ = '$Id$' - -from django.conf.urls import patterns, url -from rpki.gui.cacheview.views import (CertDetailView, RoaDetailView, - GhostbusterDetailView) - -urlpatterns = patterns('', - url(r'^search$', 'rpki.gui.cacheview.views.search_view', - name='res-search'), - url(r'^cert/(?P[^/]+)$', CertDetailView.as_view(), name='cert-detail'), - url(r'^gbr/(?P[^/]+)$', GhostbusterDetailView.as_view(), - name='ghostbuster-detail'), - url(r'^roa/(?P[^/]+)$', RoaDetailView.as_view(), name='roa-detail'), - (r'^$', 'rpki.gui.cacheview.views.global_summary'), -) - -# vim:sw=4 ts=8 expandtab diff --git a/rpkid/rpki/gui/cacheview/util.py b/rpkid/rpki/gui/cacheview/util.py deleted file mode 100644 index 0d3d7ae3..00000000 --- a/rpkid/rpki/gui/cacheview/util.py +++ /dev/null @@ -1,432 +0,0 @@ -# Copyright (C) 2011 SPARTA, Inc. dba Cobham -# Copyright (C) 2012, 2013 SPARTA, Inc. a Parsons Company -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -__version__ = '$Id$' -__all__ = ('import_rcynic_xml') - -default_logfile = '/var/rcynic/data/rcynic.xml' -default_root = '/var/rcynic/data' -object_accepted = None # set by import_rcynic_xml() - -import time -import vobject -import logging -import os -import stat -from socket import getfqdn -from cStringIO import StringIO - -from django.db import transaction -import django.db.models - -import rpki -import rpki.gui.app.timestamp -from rpki.gui.app.models import Conf, Alert -from rpki.gui.cacheview import models -from rpki.rcynic import rcynic_xml_iterator, label_iterator -from rpki.sundial import datetime -from rpki.irdb.zookeeper import Zookeeper - -logger = logging.getLogger(__name__) - - -def rcynic_cert(cert, obj): - obj.sia = cert.sia_directory_uri - - # object must be saved for the related manager methods below to work - obj.save() - - # for the root cert, we can't set inst.issuer = inst until - # after inst.save() has been called. - if obj.issuer is None: - obj.issuer = obj - obj.save() - - # resources can change when a cert is updated - obj.asns.clear() - obj.addresses.clear() - - if cert.resources.asn.inherit: - # FIXME: what happens when the parent's resources change and the child - # cert is not reissued? - obj.asns.add(*obj.issuer.asns.all()) - else: - for asr in cert.resources.asn: - logger.debug('processing %s' % asr) - - attrs = {'min': asr.min, 'max': asr.max} - q = models.ASRange.objects.filter(**attrs) - if not q: - obj.asns.create(**attrs) - else: - obj.asns.add(q[0]) - - # obj.issuer is None the first time we process the root cert in the - # hierarchy, so we need to guard against dereference - for cls, addr_obj, addrset, parentset in ( - models.AddressRange, obj.addresses, cert.resources.v4, - obj.issuer.addresses.all() if obj.issuer else [] - ), ( - models.AddressRangeV6, obj.addresses_v6, cert.resources.v6, - obj.issuer.addresses_v6.all() if obj.issuer else [] - ): - if addrset.inherit: - addr_obj.add(*parentset) - else: - for rng in addrset: - logger.debug('processing %s' % rng) - - attrs = {'prefix_min': rng.min, 'prefix_max': rng.max} - q = cls.objects.filter(**attrs) - if not q: - addr_obj.create(**attrs) - else: - addr_obj.add(q[0]) - - -def rcynic_roa(roa, obj): - obj.asid = roa.asID - # object must be saved for the related manager methods below to work - obj.save() - obj.prefixes.clear() - obj.prefixes_v6.clear() - for pfxset in roa.prefix_sets: - if pfxset.__class__.__name__ == 'roa_prefix_set_ipv6': - roa_cls = models.ROAPrefixV6 - prefix_obj = obj.prefixes_v6 - else: - roa_cls = models.ROAPrefixV4 - prefix_obj = obj.prefixes - - for pfx in pfxset: - attrs = {'prefix_min': pfx.min(), - 'prefix_max': pfx.max(), - 'max_length': pfx.max_prefixlen} - q = roa_cls.objects.filter(**attrs) - if not q: - prefix_obj.create(**attrs) - else: - prefix_obj.add(q[0]) - - -def rcynic_gbr(gbr, obj): - vcard = vobject.readOne(gbr.vcard) - obj.full_name = vcard.fn.value if hasattr(vcard, 'fn') else None - obj.email_address = vcard.email.value if hasattr(vcard, 'email') else None - obj.telephone = vcard.tel.value if hasattr(vcard, 'tel') else None - obj.organization = vcard.org.value[0] if hasattr(vcard, 'org') else None - obj.save() - -LABEL_CACHE = {} - -# dict keeping mapping of uri to (handle, old status, new status) for objects -# published by the local rpkid -uris = {} - -dispatch = { - 'rcynic_certificate': rcynic_cert, - 'rcynic_roa': rcynic_roa, - 'rcynic_ghostbuster': rcynic_gbr -} - -model_class = { - 'rcynic_certificate': models.Cert, - 'rcynic_roa': models.ROA, - 'rcynic_ghostbuster': models.Ghostbuster -} - - -def save_status(repo, vs): - timestamp = datetime.fromXMLtime(vs.timestamp).to_sql() - status = LABEL_CACHE[vs.status] - g = models.generations_dict[vs.generation] if vs.generation else None - repo.statuses.create(generation=g, timestamp=timestamp, status=status) - - # if this object is in our interest set, update with the current validation - # status - if repo.uri in uris: - x, y, z, q = uris[repo.uri] - valid = z or (status is object_accepted) # don't clobber previous True value - uris[repo.uri] = x, y, valid, repo - - if status is not object_accepted: - return - - cls = model_class[vs.file_class.__name__] - # find the instance of the signedobject subclass that is associated with - # this repo instance (may be empty when not accepted) - inst_qs = cls.objects.filter(repo=repo) - - logger.debug('processing %s' % vs.filename) - - if not inst_qs: - inst = cls(repo=repo) - logger.debug('object not found in db, creating new object cls=%s id=%s' % ( - cls, - id(inst) - )) - else: - inst = inst_qs[0] - - try: - # determine if the object is changed/new - mtime = os.stat(vs.filename)[stat.ST_MTIME] - except OSError as e: - logger.error('unable to stat %s: %s %s' % ( - vs.filename, type(e), e)) - # treat as if missing from rcynic.xml - # use inst_qs rather than deleting inst so that we don't raise an - # exception for newly created objects (inst_qs will be empty) - inst_qs.delete() - return - - if mtime != inst.mtime: - inst.mtime = mtime - try: - obj = vs.obj # causes object to be lazily loaded - except Exception, e: - logger.warning('Caught %s while processing %s: %s' % ( - type(e), vs.filename, e)) - return - - inst.not_before = obj.notBefore.to_sql() - inst.not_after = obj.notAfter.to_sql() - inst.name = obj.subject - inst.keyid = obj.ski - - # look up signing cert - if obj.issuer == obj.subject: - # self-signed cert (TA) - assert(isinstance(inst, models.Cert)) - inst.issuer = None - else: - # if an object has moved in the repository, the entry for - # the old location will still be in the database, but - # without any object_accepted in its validtion status - qs = models.Cert.objects.filter( - keyid=obj.aki, - name=obj.issuer, - repo__statuses__status=object_accepted - ) - ncerts = len(qs) - if ncerts == 0: - logger.warning('unable to find signing cert with ski=%s (%s)' % (obj.aki, obj.issuer)) - return - else: - if ncerts > 1: - # multiple matching certs, all of which are valid - logger.warning('Found multiple certs matching ski=%s sn=%s' % (obj.aki, obj.issuer)) - for c in qs: - logger.warning(c.repo.uri) - # just use the first match - inst.issuer = qs[0] - - try: - # do object-specific tasks - dispatch[vs.file_class.__name__](obj, inst) - except: - logger.error('caught exception while processing rcynic_object:\n' - 'vs=' + repr(vs) + '\nobj=' + repr(obj)) - # .show() writes to stdout - obj.show() - raise - - logger.debug('object saved id=%s' % id(inst)) - else: - logger.debug('object is unchanged') - - -@transaction.commit_on_success -def process_cache(root, xml_file): - - last_uri = None - repo = None - - logger.info('clearing validation statuses') - models.ValidationStatus.objects.all().delete() - - logger.info('updating validation status') - for vs in rcynic_xml_iterator(root, xml_file): - if vs.uri != last_uri: - repo, created = models.RepositoryObject.objects.get_or_create(uri=vs.uri) - last_uri = vs.uri - save_status(repo, vs) - - # garbage collection - # remove all objects which have no ValidationStatus references, which - # means they did not appear in the last XML output - logger.info('performing garbage collection') - - # Delete all objects that have zero validation status elements. - models.RepositoryObject.objects.annotate(num_statuses=django.db.models.Count('statuses')).filter(num_statuses=0).delete() - - # Delete all SignedObject instances that were not accepted. There may - # exist rows for objects that were previously accepted. - # See https://trac.rpki.net/ticket/588#comment:30 - # - # We have to do this here rather than in save_status() because the - # elements are not guaranteed to be consecutive for a - # given URI. see https://trac.rpki.net/ticket/625#comment:5 - models.SignedObject.objects.exclude(repo__statuses__status=object_accepted).delete() - - # ROAPrefixV* objects are M2M so they are not automatically deleted when - # their ROA object disappears - models.ROAPrefixV4.objects.annotate(num_roas=django.db.models.Count('roas')).filter(num_roas=0).delete() - models.ROAPrefixV6.objects.annotate(num_roas=django.db.models.Count('roas')).filter(num_roas=0).delete() - logger.info('done with garbage collection') - - -@transaction.commit_on_success -def process_labels(xml_file): - logger.info('updating labels...') - - for label, kind, desc in label_iterator(xml_file): - logger.debug('label=%s kind=%s desc=%s' % (label, kind, desc)) - if kind: - q = models.ValidationLabel.objects.filter(label=label) - if not q: - obj = models.ValidationLabel(label=label) - else: - obj = q[0] - - obj.kind = models.kinds_dict[kind] - obj.status = desc - obj.save() - - LABEL_CACHE[label] = obj - - -def fetch_published_objects(): - """Query rpkid for all objects published by local users, and look up the - current validation status of each object. The validation status is used - later to send alerts for objects which have transitioned to invalid. - - """ - logger.info('querying for published objects') - - handles = [conf.handle for conf in Conf.objects.all()] - req = [rpki.left_right.list_published_objects_elt.make_pdu(action='list', self_handle=h, tag=h) for h in handles] - z = Zookeeper() - pdus = z.call_rpkid(*req) - for pdu in pdus: - if isinstance(pdu, rpki.left_right.list_published_objects_elt): - # Look up the object in the rcynic cache - qs = models.RepositoryObject.objects.filter(uri=pdu.uri) - if qs: - # get the current validity state - valid = qs[0].statuses.filter(status=object_accepted).exists() - uris[pdu.uri] = (pdu.self_handle, valid, False, None) - logger.debug('adding ' + pdu.uri) - else: - # this object is not in the cache. it was either published - # recently, or disappared previously. if it disappeared - # previously, it has already been alerted. in either case, we - # omit the uri from the list since we are interested only in - # objects which were valid and are no longer valid - pass - elif isinstance(pdu, rpki.left_right.report_error_elt): - logging.error('rpkid reported an error: %s' % pdu.error_code) - - -class Handle(object): - def __init__(self): - self.invalid = [] - self.missing = [] - - def add_invalid(self, v): - self.invalid.append(v) - - def add_missing(self, v): - self.missing.append(v) - - -def notify_invalid(): - """Send email alerts to the addresses registered in ghostbuster records for - any invalid objects that were published by users of this system. - - """ - - logger.info('sending notifications for invalid objects') - - # group invalid objects by user - notify = {} - for uri, v in uris.iteritems(): - handle, old_status, new_status, obj = v - - if obj is None: - # object went missing - n = notify.get(handle, Handle()) - n.add_missing(uri) - # only select valid->invalid - elif old_status and not new_status: - n = notify.get(handle, Handle()) - n.add_invalid(obj) - - for handle, v in notify.iteritems(): - conf = Conf.objects.get(handle) - - msg = StringIO() - msg.write('This is an alert about problems with objects published by ' - 'the resource handle %s.\n\n' % handle) - - if v.invalid: - msg.write('The following objects were previously valid, but are ' - 'now invalid:\n') - - for o in v.invalid: - msg.write('\n') - msg.write(o.repo.uri) - msg.write('\n') - for s in o.statuses.all(): - msg.write('\t') - msg.write(s.status.label) - msg.write(': ') - msg.write(s.status.status) - msg.write('\n') - - if v.missing: - msg.write('The following objects were previously valid but are no ' - 'longer in the cache:\n') - - for o in v.missing: - msg.write(o) - msg.write('\n') - - msg.write("""-- -You are receiving this email because your address is published in a Ghostbuster -record, or is the default email address for this resource holder account on -%s.""" % getfqdn()) - - from_email = 'root@' + getfqdn() - subj = 'invalid RPKI object alert for resource handle %s' % conf.handle - conf.send_alert(subj, msg.getvalue(), from_email, severity=Alert.ERROR) - - -def import_rcynic_xml(root=default_root, logfile=default_logfile): - """Load the contents of rcynic.xml into the rpki.gui.cacheview database.""" - - global object_accepted - - start = time.time() - process_labels(logfile) - object_accepted = LABEL_CACHE['object_accepted'] - fetch_published_objects() - process_cache(root, logfile) - notify_invalid() - - rpki.gui.app.timestamp.update('rcynic_import') - - stop = time.time() - logger.info('elapsed time %d seconds.' % (stop - start)) diff --git a/rpkid/rpki/gui/cacheview/views.py b/rpkid/rpki/gui/cacheview/views.py deleted file mode 100644 index 94870eb2..00000000 --- a/rpkid/rpki/gui/cacheview/views.py +++ /dev/null @@ -1,172 +0,0 @@ -# Copyright (C) 2011 SPARTA, Inc. dba Cobham Analytic Solutions -# Copyright (C) 2013 SPARTA, Inc. a Parsons Company -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -__version__ = '$Id$' - -from django.views.generic import DetailView -from django.shortcuts import render -from django.db.models import F - -from rpki.gui.cacheview import models, forms, misc -from rpki.resource_set import resource_range_as, resource_range_ip -from rpki.POW import IPAddress -from rpki.exceptions import BadIPResource - - -def cert_chain(obj): - """ - returns an iterator covering all certs from the root cert down to the EE. - """ - chain = [obj] - while obj != obj.issuer: - obj = obj.issuer - chain.append(obj) - return zip(range(len(chain)), reversed(chain)) - - -class SignedObjectDetailView(DetailView): - def get_context_data(self, **kwargs): - context = super(SignedObjectDetailView, - self).get_context_data(**kwargs) - context['chain'] = cert_chain(self.object) - return context - - -class RoaDetailView(SignedObjectDetailView): - model = models.ROA - - -class CertDetailView(SignedObjectDetailView): - model = models.Cert - - -class GhostbusterDetailView(SignedObjectDetailView): - model = models.Ghostbuster - - -def search_view(request): - certs = None - roas = None - - if request.method == 'POST': - form = forms.SearchForm2(request.POST, request.FILES) - if form.is_valid(): - resource = form.cleaned_data.get('resource') - # try to determine the type of input given - try: - r = resource_range_as.parse_str(resource) - certs = models.Cert.objects.filter(asns__min__gte=r.min, - asns__max__lte=r.max) - roas = models.ROA.objects.filter(asid__gte=r.min, - asid__lte=r.max) - except: - try: - r = resource_range_ip.parse_str(resource) - if r.version == 4: - certs = models.Cert.objects.filter( - addresses__prefix_min__lte=r.min, - addresses__prefix_max__gte=r.max) - roas = models.ROA.objects.filter( - prefixes__prefix_min__lte=r.min, - prefixes__prefix_max__gte=r.max) - else: - certs = models.Cert.objects.filter( - addresses_v6__prefix_min__lte=r.min, - addresses_v6__prefix_max__gte=r.max) - roas = models.ROA.objects.filter( - prefixes_v6__prefix_min__lte=r.min, - prefixes_v6__prefix_max__gte=r.max) - except BadIPResource: - pass - - return render(request, 'cacheview/search_result.html', - {'resource': resource, 'certs': certs, 'roas': roas}) - - -def cmp_prefix(x, y): - r = cmp(x[0].family, y[0].family) - if r == 0: - r = cmp(x[2], y[2]) # integer address - if r == 0: - r = cmp(x[0].bits, y[0].bits) - if r == 0: - r = cmp(x[0].max_length, y[0].max_length) - if r == 0: - r = cmp(x[1].asid, y[1].asid) - return r - - -#def cmp_prefix(x,y): -# for attr in ('family', 'prefix', 'bits', 'max_length'): -# r = cmp(getattr(x[0], attr), getattr(y[0], attr)) -# if r: -# return r -# return cmp(x[1].asid, y[1].asid) - - -def query_view(request): - """ - Allow the user to search for an AS or prefix, and show all published ROA - information. - """ - - if request.method == 'POST': - form = forms.SearchForm(request.POST, request.FILES) - if form.is_valid(): - certs = None - roas = None - - addr = form.cleaned_data.get('addr') - asn = form.cleaned_data.get('asn') - - if addr: - family, r = misc.parse_ipaddr(addr) - prefixes = models.ROAPrefix.objects.filter(family=family, prefix=str(r.min)) - - prefix_list = [] - for pfx in prefixes: - for roa in pfx.roas.all(): - prefix_list.append((pfx, roa)) - elif asn: - r = resource_range_as.parse_str(asn) - roas = models.ROA.objects.filter(asid__gte=r.min, asid__lte=r.max) - - # display the results sorted by prefix - prefix_list = [] - for roa in roas: - for pfx in roa.prefixes.all(): - addr = IPAddress(pfx.prefix.encode()) - prefix_list.append((pfx, roa, addr)) - prefix_list.sort(cmp=cmp_prefix) - - return render('cacheview/query_result.html', - {'object_list': prefix_list}, request) - else: - form = forms.SearchForm() - - return render('cacheview/search_form.html', { - 'form': form, 'search_type': 'ROA '}, request) - - -def global_summary(request): - """Display a table summarizing the state of the global RPKI.""" - - roots = models.Cert.objects.filter(issuer=F('pk')) # self-signed - - return render(request, 'cacheview/global_summary.html', { - 'roots': roots - }) - -# vim:sw=4 ts=8 expandtab diff --git a/rpkid/rpki/gui/decorators.py b/rpkid/rpki/gui/decorators.py deleted file mode 100644 index 69d20c46..00000000 --- a/rpkid/rpki/gui/decorators.py +++ /dev/null @@ -1,31 +0,0 @@ -# Copyright (C) 2013 SPARTA, Inc. a Parsons Company -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -__version__ = '$Id$' - -from django import http - - -def tls_required(f): - """Decorator which returns a 500 error if the connection is not secured - with TLS (https). - - """ - def _tls_required(request, *args, **kwargs): - if not request.is_secure(): - return http.HttpResponseServerError( - 'This resource may only be accessed securely via https', - content_type='text/plain') - return f(request, *args, **kwargs) - return _tls_required diff --git a/rpkid/rpki/gui/default_settings.py b/rpkid/rpki/gui/default_settings.py deleted file mode 100644 index 3859247c..00000000 --- a/rpkid/rpki/gui/default_settings.py +++ /dev/null @@ -1,171 +0,0 @@ -""" -This module contains static configuration settings for the web portal. -""" - -__version__ = '$Id$' - -import os -import random -import string -import socket - -import rpki.config -import rpki.autoconf - -# Where to put static files. -STATIC_ROOT = rpki.autoconf.datarootdir + '/rpki/media' - -# Must end with a slash! -STATIC_URL = '/media/' - -# Where to email server errors. -ADMINS = (('Administrator', 'root@localhost'),) - -LOGGING = { - 'version': 1, - 'formatters': { - 'verbose': { - # see http://docs.python.org/2.7/library/logging.html#logging.LogRecord - 'format': '%(levelname)s %(asctime)s %(name)s %(message)s' - }, - }, - 'handlers': { - 'stderr': { - 'class': 'logging.StreamHandler', - 'level': 'DEBUG', - 'formatter': 'verbose', - }, - 'mail_admins': { - 'level': 'ERROR', - 'class': 'django.utils.log.AdminEmailHandler', - }, - }, - 'loggers': { - 'django': { - 'level': 'ERROR', - 'handlers': ['stderr', 'mail_admins'], - }, - 'rpki.gui': { - 'level': 'WARNING', - 'handlers': ['stderr'], - }, - }, -} - -# Load the SQL authentication bits from the system rpki.conf. -rpki_config = rpki.config.parser(section='web_portal') - -DATABASES = { - 'default': { - 'ENGINE': 'django.db.backends.mysql', - 'NAME': rpki_config.get('sql-database'), - 'USER': rpki_config.get('sql-username'), - 'PASSWORD': rpki_config.get('sql-password'), - - # Ensure the default storage engine is InnoDB since we need - # foreign key support. The Django documentation suggests - # removing this after the syncdb is performed as an optimization, - # but there isn't an easy way to do this automatically. - - 'OPTIONS': { - 'init_command': 'SET storage_engine=INNODB', - } - } -} - - -def select_tz(): - "Find a supported timezone that looks like UTC" - for tz in ('UTC', 'GMT', 'Etc/UTC', 'Etc/GMT'): - if os.path.exists('/usr/share/zoneinfo/' + tz): - return tz - # Can't determine the proper timezone, fall back to UTC and let Django - # report the error to the user. - return 'UTC' - -# Local time zone for this installation. Choices can be found here: -# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name -# although not all choices may be available on all operating systems. -# If running in a Windows environment this must be set to the same as your -# system time zone. -TIME_ZONE = select_tz() - -def get_secret_key(): - """Retrieve the secret-key value from rpki.conf or generate a random value - if it is not present.""" - d = string.letters + string.digits - val = ''.join([random.choice(d) for _ in range(50)]) - return rpki_config.get('secret-key', val) - -# Make this unique, and don't share it with anybody. -SECRET_KEY = get_secret_key() - -# See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts -# for details on why you might need this. -def get_allowed_hosts(): - allowed_hosts = set(rpki_config.multiget("allowed-hosts")) - allowed_hosts.add(socket.getfqdn()) - try: - import netifaces - for interface in netifaces.interfaces(): - addresses = netifaces.ifaddresses(interface) - for af in (netifaces.AF_INET, netifaces.AF_INET6): - if af in addresses: - for address in addresses[af]: - if "addr" in address: - allowed_hosts.add(address["addr"]) - except ImportError: - pass - return list(allowed_hosts) - -ALLOWED_HOSTS = get_allowed_hosts() - -# List of callables that know how to import templates from various sources. -TEMPLATE_LOADERS = ( - 'django.template.loaders.filesystem.Loader', - 'django.template.loaders.app_directories.Loader', - 'django.template.loaders.eggs.Loader' -) - -MIDDLEWARE_CLASSES = ( - 'django.middleware.common.CommonMiddleware', - 'django.contrib.sessions.middleware.SessionMiddleware', - 'django.middleware.csrf.CsrfViewMiddleware', - 'django.contrib.auth.middleware.AuthenticationMiddleware', - 'django.contrib.messages.middleware.MessageMiddleware' -) - -ROOT_URLCONF = 'rpki.gui.urls' - -INSTALLED_APPS = ( - 'django.contrib.auth', - #'django.contrib.admin', - #'django.contrib.admindocs', - 'django.contrib.contenttypes', - 'django.contrib.sessions', - 'django.contrib.staticfiles', - 'rpki.irdb', - 'rpki.gui.app', - 'rpki.gui.cacheview', - 'rpki.gui.routeview', - 'south', -) - -TEMPLATE_CONTEXT_PROCESSORS = ( - "django.contrib.auth.context_processors.auth", - "django.core.context_processors.debug", - "django.core.context_processors.i18n", - "django.core.context_processors.media", - "django.contrib.messages.context_processors.messages", - "django.core.context_processors.request", - "django.core.context_processors.static" -) - -# Allow local site to override any setting above -- but if there's -# anything that local sites routinely need to modify, please consider -# putting that configuration into rpki.conf and just adding code here -# to read that configuration. -try: - from local_settings import * -except: - pass diff --git a/rpkid/rpki/gui/models.py b/rpkid/rpki/gui/models.py deleted file mode 100644 index 7a684f32..00000000 --- a/rpkid/rpki/gui/models.py +++ /dev/null @@ -1,150 +0,0 @@ -# Copyright (C) 2012 SPARTA, Inc. a Parsons Company -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -""" -Common classes for reuse in apps. -""" - -__version__ = '$Id$' - -from django.db import models - -import rpki.resource_set -import rpki.POW -from south.modelsinspector import add_introspection_rules - - -class IPv6AddressField(models.Field): - "Field large enough to hold a 128-bit unsigned integer." - - __metaclass__ = models.SubfieldBase - - def db_type(self, connection): - return 'binary(16)' - - def to_python(self, value): - if isinstance(value, rpki.POW.IPAddress): - return value - return rpki.POW.IPAddress.fromBytes(value) - - def get_db_prep_value(self, value, connection, prepared): - """ - Note that we add a custom conversion to encode long values as hex - strings in SQL statements. See settings.get_conv() for details. - - """ - return value.toBytes() - - -class IPv4AddressField(models.Field): - "Wrapper around rpki.POW.IPAddress." - - __metaclass__ = models.SubfieldBase - - def db_type(self, connection): - return 'int UNSIGNED' - - def to_python(self, value): - if isinstance(value, rpki.POW.IPAddress): - return value - return rpki.POW.IPAddress(value, version=4) - - def get_db_prep_value(self, value, connection, prepared): - return long(value) - -add_introspection_rules( - [ - ([IPv4AddressField, IPv6AddressField], [], {}) - ], - ['^rpki\.gui\.models\.IPv4AddressField', - '^rpki\.gui\.models\.IPv6AddressField'] -) - - -class Prefix(models.Model): - """Common implementation for models with an IP address range. - - Expects that `range_cls` is set to the appropriate subclass of - rpki.resource_set.resource_range_ip.""" - - def as_resource_range(self): - """ - Returns the prefix as a rpki.resource_set.resource_range_ip object. - """ - return self.range_cls(self.prefix_min, self.prefix_max) - - @property - def prefixlen(self): - "Returns the prefix length for the prefix in this object." - return self.as_resource_range().prefixlen() - - def get_prefix_display(self): - "Return a string representatation of this IP prefix." - return str(self.as_resource_range()) - - def __unicode__(self): - """This method may be overridden by subclasses. The default - implementation calls get_prefix_display(). """ - return self.get_prefix_display() - - class Meta: - abstract = True - - # default sort order reflects what "sh ip bgp" outputs - ordering = ('prefix_min',) - - -class PrefixV4(Prefix): - "IPv4 Prefix." - - range_cls = rpki.resource_set.resource_range_ipv4 - - prefix_min = IPv4AddressField(db_index=True, null=False) - prefix_max = IPv4AddressField(db_index=True, null=False) - - class Meta(Prefix.Meta): - abstract = True - - -class PrefixV6(Prefix): - "IPv6 Prefix." - - range_cls = rpki.resource_set.resource_range_ipv6 - - prefix_min = IPv6AddressField(db_index=True, null=False) - prefix_max = IPv6AddressField(db_index=True, null=False) - - class Meta(Prefix.Meta): - abstract = True - - -class ASN(models.Model): - """Represents a range of ASNs. - - This model is abstract, and is intended to be reused by applications.""" - - min = models.PositiveIntegerField(null=False) - max = models.PositiveIntegerField(null=False) - - class Meta: - abstract = True - ordering = ('min', 'max') - - def as_resource_range(self): - return rpki.resource_set.resource_range_as(self.min, self.max) - - def __unicode__(self): - return u'AS%s' % self.as_resource_range() - -# vim:sw=4 ts=8 expandtab diff --git a/rpkid/rpki/gui/routeview/__init__.py b/rpkid/rpki/gui/routeview/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/rpkid/rpki/gui/routeview/api.py b/rpkid/rpki/gui/routeview/api.py deleted file mode 100644 index cf699c9a..00000000 --- a/rpkid/rpki/gui/routeview/api.py +++ /dev/null @@ -1,69 +0,0 @@ -# Copyright (C) 2012 SPARTA, Inc. a Parsons Company -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -__version__ = '$Id$' - -import json -from django import http -from rpki.gui.routeview.models import RouteOrigin, RouteOriginV6 -from rpki import resource_set -import rpki.exceptions - -def route_list(request): - """Implements the REST query against the route models to allow the client - to search for routes. - - The only search currently supported is returning all the routes covered by - the prefix given in the 'prefix__in=' query string parameter. - - By default, only returns up to 10 matching routes, but the client may - request a different limit with the 'count=' query string parameter. - - """ - hard_limit = 100 - - if request.method == 'GET' and 'prefix__in' in request.GET: - # find all routers covered by this prefix - match_prefix = request.GET.get('prefix__in') - # max number of items to return - limit = request.GET.get('count', 10) - if limit < 1 or limit > hard_limit: - return http.HttpResponseBadRequest('invalid value for count parameter') - - try: - if ':' in match_prefix: - # v6 - pfx = resource_set.resource_range_ipv6.parse_str(match_prefix) - manager = RouteOriginV6 - else: - # v4 - pfx = resource_set.resource_range_ipv4.parse_str(match_prefix) - manager = RouteOrigin - except (AssertionError, rpki.exceptions.BadIPResource), e: - return http.HttpResponseBadRequest(e) - - try: - qs = manager.objects.filter(prefix_min__gte=pfx.min, - prefix_max__lte=pfx.max)[:limit] - # FIXME - a REST API should really return the url of the resource, - # but since we are combining two separate tables, the .pk is not a - # unique identifier. - matches = [{'prefix': str(x.as_resource_range()), 'asn': x.asn} for x in qs] - except IndexError: - # no matches - matches = [] - - return http.HttpResponse(json.dumps(matches), content_type='text/javascript') - - return http.HttpResponseBadRequest() diff --git a/rpkid/rpki/gui/routeview/models.py b/rpkid/rpki/gui/routeview/models.py deleted file mode 100644 index 052860c4..00000000 --- a/rpkid/rpki/gui/routeview/models.py +++ /dev/null @@ -1,81 +0,0 @@ -# Copyright (C) 2010, 2011 SPARTA, Inc. dba Cobham Analytic Solutions -# Copyright (C) 2012 SPARTA, Inc. a Parsons Company -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -__version__ = '$Id$' - -from django.db.models import PositiveIntegerField, permalink -import rpki.gui.models - - -class RouteOrigin(rpki.gui.models.PrefixV4): - "Represents an IPv4 BGP routing table entry." - - asn = PositiveIntegerField(help_text='origin AS', null=False) - - def __unicode__(self): - return u"AS%d's route origin for %s" % (self.asn, - self.get_prefix_display()) - - @property - def roas(self): - "Return a queryset of ROAs which cover this route." - return rpki.gui.cacheview.models.ROA.objects.filter( - prefixes__prefix_min__lte=self.prefix_min, - prefixes__prefix_max__gte=self.prefix_max - ) - - @property - def roa_prefixes(self): - "Return a queryset of ROA prefixes which cover this route." - return rpki.gui.cacheview.models.ROAPrefixV4.objects.filter( - prefix_min__lte=self.prefix_min, - prefix_max__gte=self.prefix_max - ) - - @property - def status(self): - "Returns the validation status of this route origin object." - roas = self.roas - # subselect exact match - if self.asn != 0 and roas.filter(asid=self.asn, prefixes__max_length__gte=self.prefixlen).exists(): - return 'valid' - elif roas.exists(): - return 'invalid' - return 'unknown' - - @permalink - def get_absolute_url(self): - return ('rpki.gui.app.views.route_detail', [str(self.pk)]) - - class Meta: - # sort by increasing mask length (/16 before /24) - ordering = ('prefix_min', '-prefix_max') - - -class RouteOriginV6(rpki.gui.models.PrefixV6): - "Represents an IPv6 BGP routing table entry." - - asn = PositiveIntegerField(help_text='origin AS', null=False) - - def __unicode__(self): - return u"AS%d's route origin for %s" % (self.asn, - self.get_prefix_display()) - - class Meta: - ordering = ('prefix_min', '-prefix_max') - - -# this goes at the end of the file to avoid problems with circular imports -import rpki.gui.cacheview.models diff --git a/rpkid/rpki/gui/routeview/util.py b/rpkid/rpki/gui/routeview/util.py deleted file mode 100644 index 7884224c..00000000 --- a/rpkid/rpki/gui/routeview/util.py +++ /dev/null @@ -1,236 +0,0 @@ -# Copyright (C) 2012, 2013 SPARTA, Inc. a Parsons Company -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -__version__ = '$Id$' -__all__ = ('import_routeviews_dump') - -import itertools -import _mysql_exceptions -import os.path -import subprocess -import time -import logging -import urlparse -from urllib import urlretrieve, unquote - -from django.db import transaction, connection - -from rpki.resource_set import resource_range_ipv4, resource_range_ipv6 -from rpki.exceptions import BadIPResource -import rpki.gui.app.timestamp - -# globals -logger = logging.getLogger(__name__) - -# Eventually this can be retrived from rpki.conf -DEFAULT_URL = 'http://archive.routeviews.org/oix-route-views/oix-full-snapshot-latest.dat.bz2' - -def parse_text(f): - last_prefix = None - cursor = connection.cursor() - range_class = resource_range_ipv4 - table = 'routeview_routeorigin' - sql = "INSERT INTO %s_new SET asn=%%s, prefix_min=%%s, prefix_max=%%s" % table - - try: - logger.info('Dropping existing staging table...') - cursor.execute('DROP TABLE IF EXISTS %s_new' % table) - except _mysql_exceptions.Warning: - pass - - logger.info('Creating staging table...') - cursor.execute('CREATE TABLE %(table)s_new LIKE %(table)s' % {'table': table}) - - logger.info('Disabling autocommit...') - cursor.execute('SET autocommit=0') - - logger.info('Adding rows to table...') - for row in itertools.islice(f, 5, None): - cols = row.split() - - # index -1 is i/e/? for igp/egp - origin_as = cols[-2] - # FIXME: skip AS_SETs - if origin_as[0] == '{': - continue - - prefix = cols[1] - - # validate the prefix since the "sh ip bgp" output is sometimes - # corrupt by no space between the prefix and the next hop IP - # address. - net, bits = prefix.split('/') - if len(bits) > 2: - s = ['mask for %s looks fishy...' % prefix] - prefix = '%s/%s' % (net, bits[0:2]) - s.append('assuming it should be %s' % prefix) - logger.warning(' '.join(s)) - - # the output may contain multiple paths to the same origin. - # if this is the same prefix as the last entry, we don't need - # to validate it again. - # - # prefixes are sorted, but the origin_as is not, so we keep a set to - # avoid duplicates, and insert into the db once we've seen all the - # origin_as values for a given prefix - if prefix != last_prefix: - # output routes for previous prefix - if last_prefix is not None: - try: - rng = range_class.parse_str(last_prefix) - rmin = long(rng.min) - rmax = long(rng.max) - cursor.executemany(sql, [(asn, rmin, rmax) for asn in asns]) - except BadIPResource: - logger.warning('skipping bad prefix: ' + last_prefix) - - asns = set() - last_prefix = prefix - - try: - asns.add(int(origin_as)) - except ValueError as err: - logger.warning('\n'.join( - ['unable to parse origin AS: ' + origin_as], - ['ValueError: ' + str(err)] - ['route entry was: ' + row], - )) - - logger.info('Committing...') - cursor.execute('COMMIT') - - try: - logger.info('Dropping old table...') - cursor.execute('DROP TABLE IF EXISTS %s_old' % table) - except _mysql_exceptions.Warning: - pass - - logger.info('Swapping staging table with live table...') - cursor.execute('RENAME TABLE %(table)s TO %(table)s_old, %(table)s_new TO %(table)s' % {'table': table}) - - transaction.commit_unless_managed() - - logger.info('Updating timestamp metadata...') - rpki.gui.app.timestamp.update('bgp_v4_import') - - -def parse_mrt(f): - # filter input through bgpdump - pipe = subprocess.Popen(['bgpdump', '-m', '-v', '-'], stdin=f, - stdout=subprocess.PIPE) - - last_prefix = None - last_as = None - for e in pipe.stdout.readlines(): - a = e.split('|') - prefix = a[5] - try: - origin_as = int(a[6].split()[-1]) - except ValueError: - # skip AS_SETs - continue - - if prefix != last_prefix: - last_prefix = prefix - elif last_as == origin_as: - continue - last_as = origin_as - - asns = PREFIXES.get(prefix) - if not asns: - asns = set() - PREFIXES[prefix] = asns - asns.add(origin_as) - - pipe.wait() - if pipe.returncode: - raise ProgException('bgpdump exited with code %d' % pipe.returncode) - - -class ProgException(Exception): - pass - - -class UnknownInputType(ProgException): - pass - - -class PipeFailed(ProgException): - pass - - -def import_routeviews_dump(filename=DEFAULT_URL, filetype='auto'): - """Load the oix-full-snapshot-latest.bz2 from routeview.org into the - rpki.gui.routeview database. - - Arguments: - - filename [optional]: the full path to the downloaded file to parse - - filetype [optional]: 'text' or 'mrt' - - """ - start_time = time.time() - - if filename.startswith('http://'): - #get filename from the basename of the URL - u = urlparse.urlparse(filename) - bname = os.path.basename(unquote(u.path)) - tmpname = os.path.join('/tmp', bname) - - logger.info("Downloading %s to %s" % (filename, tmpname)) - if os.path.exists(tmpname): - os.remove(tmpname) - # filename is replaced with a local filename containing cached copy of - # URL - filename, headers = urlretrieve(filename, tmpname) - - if filetype == 'auto': - # try to determine input type from filename, based on the default - # filenames from archive.routeviews.org - bname = os.path.basename(filename) - if bname.startswith('oix-full-snapshot-latest'): - filetype = 'text' - elif bname.startswith('rib.'): - filetype = 'mrt' - else: - raise UnknownInputType('unable to automatically determine input file type') - logging.info('Detected import format as "%s"' % filetype) - - pipe = None - if filename.endswith('.bz2'): - bunzip = 'bunzip2' - logging.info('Decompressing input file on the fly...') - pipe = subprocess.Popen([bunzip, '--stdout', filename], - stdout=subprocess.PIPE) - input_file = pipe.stdout - else: - input_file = open(filename) - - try: - dispatch = {'text': parse_text, 'mrt': parse_mrt} - dispatch[filetype](input_file) - except KeyError: - raise UnknownInputType('"%s" is an unknown input file type' % filetype) - - if pipe: - logging.debug('Waiting for child to exit...') - pipe.wait() - if pipe.returncode: - raise PipeFailed('Child exited code %d' % pipe.returncode) - pipe = None - else: - input_file.close() - - logger.info('Elapsed time %d secs' % (time.time() - start_time)) diff --git a/rpkid/rpki/gui/script_util.py b/rpkid/rpki/gui/script_util.py deleted file mode 100644 index c3a864fd..00000000 --- a/rpkid/rpki/gui/script_util.py +++ /dev/null @@ -1,43 +0,0 @@ -# Copyright (C) 2013 SPARTA, Inc. a Parsons Company -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -""" -This module contains utility functions for use in standalone scripts. -""" - -from django.conf import settings - -from rpki import config -from rpki import autoconf - -__version__ = '$Id$' - - -def setup(): - """ - Configure Django enough to use the ORM. - """ - cfg = config.parser(section='web_portal') - # INSTALLED_APPS doesn't seem necessary so long as you are only accessing - # existing tables. - settings.configure( - DATABASES={ - 'default': { - 'ENGINE': 'django.db.backends.mysql', - 'NAME': cfg.get('sql-database'), - 'USER': cfg.get('sql-username'), - 'PASSWORD': cfg.get('sql-password'), - } - }, - ) diff --git a/rpkid/rpki/gui/urls.py b/rpkid/rpki/gui/urls.py deleted file mode 100644 index 955092f5..00000000 --- a/rpkid/rpki/gui/urls.py +++ /dev/null @@ -1,36 +0,0 @@ -# Copyright (C) 2010, 2011 SPARTA, Inc. dba Cobham Analytic Solutions -# Copyright (C) 2012, 2013 SPARTA, Inc. a Parsons Company -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -__version__ = '$Id$' - -from django.conf.urls import patterns, include - -urlpatterns = patterns( - '', - - # Uncomment the admin/doc line below and add 'django.contrib.admindocs' - # to INSTALLED_APPS to enable admin documentation: - #(r'^admin/doc/', include('django.contrib.admindocs.urls')), - - # Uncomment the next line to enable the admin: - #(r'^admin/', include(admin.site.urls)), - - (r'^api/', include('rpki.gui.api.urls')), - (r'^cacheview/', include('rpki.gui.cacheview.urls')), - (r'^rpki/', include('rpki.gui.app.urls')), - - (r'^accounts/login/$', 'rpki.gui.views.login'), - (r'^accounts/logout/$', 'rpki.gui.views.logout', {'next_page': '/rpki/'}), -) diff --git a/rpkid/rpki/gui/views.py b/rpkid/rpki/gui/views.py deleted file mode 100644 index 404d6c7e..00000000 --- a/rpkid/rpki/gui/views.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright (C) 2013 SPARTA, Inc. a Parsons Company -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND SPARTA DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL SPARTA BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -__version__ = '$Id$' - -import django.contrib.auth.views -from rpki.gui.decorators import tls_required - - -@tls_required -def login(request, *args, **kwargs): - "Wrapper around django.contrib.auth.views.login to force use of TLS." - return django.contrib.auth.views.login(request, *args, **kwargs) - - -@tls_required -def logout(request, *args, **kwargs): - "Wrapper around django.contrib.auth.views.logout to force use of TLS." - return django.contrib.auth.views.logout(request, *args, **kwargs) diff --git a/rpkid/rpki/http.py b/rpkid/rpki/http.py deleted file mode 100644 index 3c541f26..00000000 --- a/rpkid/rpki/http.py +++ /dev/null @@ -1,1070 +0,0 @@ -# $Id$ -# -# Copyright (C) 2013--2014 Dragon Research Labs ("DRL") -# Portions copyright (C) 2009--2012 Internet Systems Consortium ("ISC") -# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notices and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND ARIN DISCLAIM ALL -# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED -# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL, -# ISC, OR ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR -# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS -# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, -# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION -# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -""" -HTTP utilities, both client and server. -""" - -import time -import socket -import asyncore -import asynchat -import urlparse -import sys -import random -import rpki.async -import rpki.sundial -import rpki.x509 -import rpki.exceptions -import rpki.log -import rpki.POW - -## @var rpki_content_type -# HTTP content type used for all RPKI messages. -rpki_content_type = "application/x-rpki" - -## @var debug_http -# Verbose chatter about HTTP streams. -debug_http = False - -## @var want_persistent_client -# Whether we want persistent HTTP client streams, when server also supports them. -want_persistent_client = False - -## @var want_persistent_server -# Whether we want persistent HTTP server streams, when client also supports them. -want_persistent_server = False - -## @var default_client_timeout -# Default HTTP client connection timeout. -default_client_timeout = rpki.sundial.timedelta(minutes = 5) - -## @var default_server_timeout -# Default HTTP server connection timeouts. Given our druthers, we'd -# prefer that the client close the connection, as this avoids the -# problem of client starting to reuse connection just as server closes -# it, so this should be longer than the client timeout. -default_server_timeout = rpki.sundial.timedelta(minutes = 10) - -## @var default_http_version -# Preferred HTTP version. -default_http_version = (1, 0) - -## @var default_tcp_port -# Default port for clients and servers that don't specify one. -default_tcp_port = 80 - -## @var enable_ipv6_servers -# Whether to enable IPv6 listeners. Enabled by default, as it should -# be harmless. Has no effect if kernel doesn't support IPv6. -enable_ipv6_servers = True - -## @var enable_ipv6_clients -# Whether to consider IPv6 addresses when making connections. -# Disabled by default, as IPv6 connectivity is still a bad joke in -# far too much of the world. -enable_ipv6_clients = False - -## @var have_ipv6 -# Whether the current machine claims to support IPv6. Note that just -# because the kernel supports it doesn't mean that the machine has -# usable IPv6 connectivity. I don't know of a simple portable way to -# probe for connectivity at runtime (the old test of "can you ping -# SRI-NIC.ARPA?" seems a bit dated...). Don't set this, it's set -# automatically by probing using the socket() system call at runtime. -try: - # pylint: disable=W0702,W0104 - socket.socket(socket.AF_INET6).close() - socket.IPPROTO_IPV6 - socket.IPV6_V6ONLY -except: - have_ipv6 = False -else: - have_ipv6 = True - -## @var use_adns - -# Whether to use rpki.adns code. This is still experimental, so it's -# not (yet) enabled by default. -use_adns = False -try: - import rpki.adns -except ImportError: - pass - -def supported_address_families(enable_ipv6): - """ - IP address families on which servers should listen, and to consider - when selecting addresses for client connections. - """ - if enable_ipv6 and have_ipv6: - return (socket.AF_INET, socket.AF_INET6) - else: - return (socket.AF_INET,) - -def localhost_addrinfo(): - """ - Return pseudo-getaddrinfo results for localhost. - """ - result = [(socket.AF_INET, "127.0.0.1")] - if enable_ipv6_clients and have_ipv6: - result.append((socket.AF_INET6, "::1")) - return result - -class http_message(object): - """ - Virtual class representing of one HTTP message. - """ - - software_name = "ISC RPKI library" - - def __init__(self, version = None, body = None, headers = None): - self.version = version - self.body = body - self.headers = headers - self.normalize_headers() - - def normalize_headers(self, headers = None): - """ - Clean up (some of) the horrible messes that HTTP allows in its - headers. - """ - if headers is None: - headers = () if self.headers is None else self.headers.items() - translate_underscore = True - else: - translate_underscore = False - result = {} - for k, v in headers: - if translate_underscore: - k = k.replace("_", "-") - k = "-".join(s.capitalize() for s in k.split("-")) - v = v.strip() - if k in result: - result[k] += ", " + v - else: - result[k] = v - self.headers = result - - @classmethod - def parse_from_wire(cls, headers): - """ - Parse and normalize an incoming HTTP message. - """ - self = cls() - headers = headers.split("\r\n") - self.parse_first_line(*headers.pop(0).split(None, 2)) - for i in xrange(len(headers) - 2, -1, -1): - if headers[i + 1][0].isspace(): - headers[i] += headers[i + 1] - del headers[i + 1] - self.normalize_headers([h.split(":", 1) for h in headers]) - return self - - def format(self): - """ - Format an outgoing HTTP message. - """ - s = self.format_first_line() - if self.body is not None: - assert isinstance(self.body, str) - self.headers["Content-Length"] = len(self.body) - for kv in self.headers.iteritems(): - s += "%s: %s\r\n" % kv - s += "\r\n" - if self.body is not None: - s += self.body - return s - - def __str__(self): - return self.format() - - def parse_version(self, version): - """ - Parse HTTP version, raise an exception if we can't. - """ - if version[:5] != "HTTP/": - raise rpki.exceptions.HTTPBadVersion, "Couldn't parse version %s" % version - self.version = tuple(int(i) for i in version[5:].split(".")) - - @property - def persistent(self): - """ - Figure out whether this HTTP message encourages a persistent connection. - """ - c = self.headers.get("Connection") - if self.version == (1, 1): - return c is None or "close" not in c.lower() - elif self.version == (1, 0): - return c is not None and "keep-alive" in c.lower() - else: - return False - -class http_request(http_message): - """ - HTTP request message. - """ - - def __init__(self, cmd = None, path = None, version = default_http_version, body = None, callback = None, errback = None, **headers): - assert cmd == "POST" or body is None - http_message.__init__(self, version = version, body = body, headers = headers) - self.cmd = cmd - self.path = path - self.callback = callback - self.errback = errback - self.retried = False - - def parse_first_line(self, cmd, path, version): - """ - Parse first line of HTTP request message. - """ - self.parse_version(version) - self.cmd = cmd - self.path = path - - def format_first_line(self): - """ - Format first line of HTTP request message, and set up the - User-Agent header. - """ - self.headers.setdefault("User-Agent", self.software_name) - return "%s %s HTTP/%d.%d\r\n" % (self.cmd, self.path, self.version[0], self.version[1]) - - def __repr__(self): - return rpki.log.log_repr(self, self.cmd, self.path) - -class http_response(http_message): - """ - HTTP response message. - """ - - def __init__(self, code = None, reason = None, version = default_http_version, body = None, **headers): - http_message.__init__(self, version = version, body = body, headers = headers) - self.code = code - self.reason = reason - - def parse_first_line(self, version, code, reason): - """ - Parse first line of HTTP response message. - """ - self.parse_version(version) - self.code = int(code) - self.reason = reason - - def format_first_line(self): - """ - Format first line of HTTP response message, and set up Date and - Server headers. - """ - self.headers.setdefault("Date", time.strftime("%a, %d %b %Y %T GMT")) - self.headers.setdefault("Server", self.software_name) - return "HTTP/%d.%d %s %s\r\n" % (self.version[0], self.version[1], self.code, self.reason) - - def __repr__(self): - return rpki.log.log_repr(self, self.code, self.reason) - -def log_method(self, msg, logger = rpki.log.debug): - """ - Logging method used in several different classes. - """ - assert isinstance(logger, rpki.log.logger) - if debug_http or logger is not rpki.log.debug: - logger("%r: %s" % (self, msg)) - -def addr_to_string(addr): - """ - Convert socket addr tuple to printable string. Assumes 2-element - tuple is IPv4, 4-element tuple is IPv6, throws TypeError for - anything else. - """ - - if len(addr) == 2: - return "%s:%d" % (addr[0], addr[1]) - if len(addr) == 4: - return "%s.%d" % (addr[0], addr[1]) - raise TypeError - -class http_stream(asynchat.async_chat): - """ - Virtual class representing an HTTP message stream. - """ - - log = log_method - - def __repr__(self): - status = ["connected"] if self.connected else [] - try: - status.append(addr_to_string(self.addr)) - except TypeError: - pass - return rpki.log.log_repr(self, *status) - - def __init__(self, sock = None): - asynchat.async_chat.__init__(self, sock) - self.buffer = [] - self.timer = rpki.async.timer(self.handle_timeout) - self.restart() - - def restart(self): - """ - (Re)start HTTP message parser, reset timer. - """ - assert not self.buffer - self.chunk_handler = None - self.set_terminator("\r\n\r\n") - self.update_timeout() - - def update_timeout(self): - """ - Put this stream's timer in known good state: set it to the - stream's timeout value if we're doing timeouts, otherwise clear - it. - """ - if self.timeout is not None: - self.log("Setting timeout %s" % self.timeout) - self.timer.set(self.timeout) - else: - self.log("Clearing timeout") - self.timer.cancel() - - def collect_incoming_data(self, data): - """ - Buffer incoming data from asynchat. - """ - self.buffer.append(data) - self.update_timeout() - - def get_buffer(self): - """ - Consume data buffered from asynchat. - """ - val = "".join(self.buffer) - self.buffer = [] - return val - - def found_terminator(self): - """ - Asynchat reported that it found whatever terminator we set, so - figure out what to do next. This can be messy, because we can be - in any of several different states: - - @li We might be handling chunked HTTP, in which case we have to - initialize the chunk decoder; - - @li We might have found the end of the message body, in which case - we can (finally) process it; or - - @li We might have just gotten to the end of the message headers, - in which case we have to parse them to figure out which of three - separate mechanisms (chunked, content-length, TCP close) is going - to tell us how to find the end of the message body. - """ - self.update_timeout() - if self.chunk_handler: - self.chunk_handler() - elif not isinstance(self.get_terminator(), str): - self.handle_body() - else: - self.msg = self.parse_type.parse_from_wire(self.get_buffer()) - if self.msg.version == (1, 1) and "chunked" in self.msg.headers.get("Transfer-Encoding", "").lower(): - self.msg.body = [] - self.chunk_handler = self.chunk_header - self.set_terminator("\r\n") - elif "Content-Length" in self.msg.headers: - self.set_terminator(int(self.msg.headers["Content-Length"])) - else: - self.handle_no_content_length() - - def chunk_header(self): - """ - Asynchat just handed us what should be the header of one chunk of - a chunked encoding stream. If this chunk has a body, set the - stream up to read it; otherwise, this is the last chunk, so start - the process of exiting the chunk decoder. - """ - n = int(self.get_buffer().partition(";")[0], 16) - self.log("Chunk length %s" % n) - if n: - self.chunk_handler = self.chunk_body - self.set_terminator(n) - else: - self.msg.body = "".join(self.msg.body) - self.chunk_handler = self.chunk_discard_trailer - - def chunk_body(self): - """ - Asynchat just handed us what should be the body of a chunk of the - body of a chunked message (sic). Save it, and prepare to move on - to the next chunk. - """ - self.log("Chunk body") - self.msg.body += self.buffer - self.buffer = [] - self.chunk_handler = self.chunk_discard_crlf - self.set_terminator("\r\n") - - def chunk_discard_crlf(self): - """ - Consume the CRLF that terminates a chunk, reinitialize chunk - decoder to be ready for the next chunk. - """ - self.log("Chunk CRLF") - s = self.get_buffer() - assert s == "", "%r: Expected chunk CRLF, got '%s'" % (self, s) - self.chunk_handler = self.chunk_header - - def chunk_discard_trailer(self): - """ - Consume chunk trailer, which should be empty, then (finally!) exit - the chunk decoder and hand complete message off to the application. - """ - self.log("Chunk trailer") - s = self.get_buffer() - assert s == "", "%r: Expected end of chunk trailers, got '%s'" % (self, s) - self.chunk_handler = None - self.handle_message() - - def handle_body(self): - """ - Hand normal (not chunked) message off to the application. - """ - self.msg.body = self.get_buffer() - self.handle_message() - - def handle_error(self): - """ - Asynchat (or asyncore, or somebody) raised an exception. See - whether it's one we should just pass along, otherwise log a stack - trace and close the stream. - """ - self.timer.cancel() - etype = sys.exc_info()[0] - if etype in (SystemExit, rpki.async.ExitNow): - raise - rpki.log.traceback() - if etype is not rpki.exceptions.HTTPClientAborted: - self.log("Closing due to error", rpki.log.warn) - self.close() - - def handle_timeout(self): - """ - Inactivity timer expired, close connection with prejudice. - """ - self.log("Timeout, closing") - self.close() - - def handle_close(self): - """ - Wrapper around asynchat connection close handler, so that we can - log the event, cancel timer, and so forth. - """ - self.log("Close event in HTTP stream handler") - self.timer.cancel() - asynchat.async_chat.handle_close(self) - -class http_server(http_stream): - """ - HTTP server stream. - """ - - ## @var parse_type - # Stream parser should look for incoming HTTP request messages. - parse_type = http_request - - ## @var timeout - # Use the default server timeout value set in the module header. - timeout = default_server_timeout - - def __init__(self, sock, handlers): - self.handlers = handlers - http_stream.__init__(self, sock = sock) - self.expect_close = not want_persistent_server - self.log("Starting") - - def handle_no_content_length(self): - """ - Handle an incoming message that used neither chunking nor a - Content-Length header (that is: this message will be the last one - in this server stream). No special action required. - """ - self.handle_message() - - def find_handler(self, path): - """ - Helper method to search self.handlers. - """ - for s, h in self.handlers: - if path.startswith(s): - return h - return None - - def handle_message(self): - """ - HTTP layer managed to deliver a complete HTTP request to - us, figure out what to do with it. Check the command and - Content-Type, look for a handler, and if everything looks right, - pass the message body, path, and a reply callback to the handler. - """ - self.log("Received request %r" % self.msg) - if not self.msg.persistent: - self.expect_close = True - handler = self.find_handler(self.msg.path) - error = None - if self.msg.cmd != "POST": - error = 501, "No handler for method %s" % self.msg.cmd - elif self.msg.headers["Content-Type"] != rpki_content_type: - error = 415, "No handler for Content-Type %s" % self.headers["Content-Type"] - elif handler is None: - error = 404, "No handler for URL %s" % self.msg.path - if error is None: - try: - handler(self.msg.body, self.msg.path, self.send_reply) - except (rpki.async.ExitNow, SystemExit): - raise - except Exception, e: - rpki.log.traceback() - self.send_error(500, reason = "Unhandled exception %s: %s" % (e.__class__.__name__, e)) - else: - self.send_error(code = error[0], reason = error[1]) - - def send_error(self, code, reason): - """ - Send an error response to this request. - """ - self.send_message(code = code, reason = reason) - - def send_reply(self, code, body = None, reason = "OK"): - """ - Send a reply to this request. - """ - self.send_message(code = code, body = body, reason = reason) - - def send_message(self, code, reason = "OK", body = None): - """ - Queue up reply message. If both parties agree that connection is - persistant, and if no error occurred, restart this stream to - listen for next message; otherwise, queue up a close event for - this stream so it will shut down once the reply has been sent. - """ - self.log("Sending response %s %s" % (code, reason)) - if code >= 400: - self.expect_close = True - msg = http_response(code = code, reason = reason, body = body, - Content_Type = rpki_content_type, - Connection = "Close" if self.expect_close else "Keep-Alive") - self.push(msg.format()) - if self.expect_close: - self.log("Closing") - self.timer.cancel() - self.close_when_done() - else: - self.log("Listening for next message") - self.restart() - -class http_listener(asyncore.dispatcher): - """ - Listener for incoming HTTP connections. - """ - - log = log_method - - def __repr__(self): - try: - status = (addr_to_string(self.addr),) - except TypeError: - status = () - return rpki.log.log_repr(self, *status) - - def __init__(self, handlers, addrinfo): - asyncore.dispatcher.__init__(self) - self.handlers = handlers - try: - af, socktype, proto, canonname, sockaddr = addrinfo # pylint: disable=W0612 - self.create_socket(af, socktype) - self.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - try: - self.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) - except AttributeError: - pass - if have_ipv6 and af == socket.AF_INET6: - self.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, 1) - self.bind(sockaddr) - self.listen(5) - except Exception, e: - self.log("Couldn't set up HTTP listener: %s" % e, rpki.log.warn) - rpki.log.traceback() - self.close() - for h in handlers: - self.log("Handling %s" % h[0]) - - def handle_accept(self): - """ - Asyncore says we have an incoming connection, spawn an http_server - stream for it and pass along all of our handler data. - """ - try: - s, c = self.accept() - self.log("Accepting connection from %s" % addr_to_string(c)) - http_server(sock = s, handlers = self.handlers) - except (rpki.async.ExitNow, SystemExit): - raise - except Exception, e: - self.log("Unable to accept connection: %s" % e) - self.handle_error() - - def handle_error(self): - """ - Asyncore signaled an error, pass it along or log it. - """ - if sys.exc_info()[0] in (SystemExit, rpki.async.ExitNow): - raise - self.log("Error in HTTP listener", rpki.log.warn) - rpki.log.traceback() - -class http_client(http_stream): - """ - HTTP client stream. - """ - - ## @var parse_type - # Stream parser should look for incoming HTTP response messages. - parse_type = http_response - - ## @var timeout - # Use the default client timeout value set in the module header. - timeout = default_client_timeout - - ## @var state - # Application layer connection state. - state = None - - def __init__(self, queue, hostport): - self.log("Creating new connection to %s" % addr_to_string(hostport)) - http_stream.__init__(self) - self.queue = queue - self.host = hostport[0] - self.port = hostport[1] - self.set_state("opening") - self.expect_close = not want_persistent_client - - def start(self): - """ - Create socket and request a connection. - """ - if not use_adns: - self.log("Not using ADNS") - self.gotaddrinfo([(socket.AF_INET, self.host)]) - elif self.host == "localhost": - self.log("Bypassing DNS for localhost") - self.gotaddrinfo(localhost_addrinfo()) - else: - families = supported_address_families(enable_ipv6_clients) - self.log("Starting ADNS lookup for %s in families %r" % (self.host, families)) - rpki.adns.getaddrinfo(self.gotaddrinfo, self.dns_error, self.host, families) - - def dns_error(self, e): - """ - Handle DNS lookup errors. For now, just whack the connection. - Undoubtedly we should do something better with diagnostics here. - """ - self.handle_error() - - def gotaddrinfo(self, addrinfo): - """ - Got address data from DNS, create socket and request connection. - """ - try: - self.af, self.address = random.choice(addrinfo) - self.log("Connecting to AF %s host %s port %s addr %s" % (self.af, self.host, self.port, self.address)) - self.create_socket(self.af, socket.SOCK_STREAM) - self.connect((self.address, self.port)) - if self.addr is None: - self.addr = (self.host, self.port) - self.update_timeout() - except (rpki.async.ExitNow, SystemExit): - raise - except Exception: - self.handle_error() - - def handle_connect(self): - """ - Asyncore says socket has connected. - """ - self.log("Socket connected") - self.set_state("idle") - assert self.queue.client is self - self.queue.send_request() - - def set_state(self, state): - """ - Set HTTP client connection state. - """ - self.log("State transition %s => %s" % (self.state, state)) - self.state = state - - def handle_no_content_length(self): - """ - Handle response message that used neither chunking nor a - Content-Length header (that is: this message will be the last one - in this server stream). In this case we want to read until we - reach the end of the data stream. - """ - self.set_terminator(None) - - def send_request(self, msg): - """ - Queue up request message and kickstart connection. - """ - self.log("Sending request %r" % msg) - assert self.state == "idle", "%r: state should be idle, is %s" % (self, self.state) - self.set_state("request-sent") - msg.headers["Connection"] = "Close" if self.expect_close else "Keep-Alive" - self.push(msg.format()) - self.restart() - - def handle_message(self): - """ - Handle incoming HTTP response message. Make sure we're in a state - where we expect to see such a message (and allow the mysterious - empty messages that Apache sends during connection close, no idea - what that is supposed to be about). If everybody agrees that the - connection should stay open, put it into an idle state; otherwise, - arrange for the stream to shut down. - """ - - self.log("Message received, state %s" % self.state) - - if not self.msg.persistent: - self.expect_close = True - - if self.state != "request-sent": - if self.state == "closing": - assert not self.msg.body - self.log("Ignoring empty response received while closing") - return - raise rpki.exceptions.HTTPUnexpectedState, "%r received message while in unexpected state %s" % (self, self.state) - - if self.expect_close: - self.log("Closing") - self.set_state("closing") - self.close_when_done() - else: - self.log("Idling") - self.set_state("idle") - self.update_timeout() - - if self.msg.code != 200: - errmsg = "HTTP request failed" - if self.msg.code is not None: - errmsg += " with status %s" % self.msg.code - if self.msg.reason: - errmsg += ", reason %s" % self.msg.reason - if self.msg.body: - errmsg += ", response %s" % self.msg.body - raise rpki.exceptions.HTTPRequestFailed(errmsg) - self.queue.return_result(self, self.msg, detach = self.expect_close) - - def handle_close(self): - """ - Asyncore signaled connection close. If we were waiting for that - to find the end of a response message, process the resulting - message now; if we were waiting for the response to a request we - sent, signal the error. - """ - http_stream.handle_close(self) - self.log("State %s" % self.state) - if self.get_terminator() is None: - self.handle_body() - elif self.state == "request-sent": - raise rpki.exceptions.HTTPClientAborted, "HTTP request aborted by close event" - else: - self.queue.detach(self) - - def handle_timeout(self): - """ - Connection idle timer has expired. Shut down connection in any - case, noisily if we weren't idle. - """ - bad = self.state not in ("idle", "closing") - if bad: - self.log("Timeout while in state %s" % self.state, rpki.log.warn) - http_stream.handle_timeout(self) - if bad: - try: - raise rpki.exceptions.HTTPTimeout - except: # pylint: disable=W0702 - self.handle_error() - else: - self.queue.detach(self) - - def handle_error(self): - """ - Asyncore says something threw an exception. Log it, then shut - down the connection and pass back the exception. - """ - eclass, edata = sys.exc_info()[0:2] - self.log("Error on HTTP client connection %s:%s %s %s" % (self.host, self.port, eclass, edata), rpki.log.warn) - http_stream.handle_error(self) - self.queue.return_result(self, edata, detach = True) - -class http_queue(object): - """ - Queue of pending HTTP requests for a single destination. This class - is very tightly coupled to http_client; http_client handles the HTTP - stream itself, this class provides a slightly higher-level API. - """ - - log = log_method - - def __repr__(self): - return rpki.log.log_repr(self, addr_to_string(self.hostport)) - - def __init__(self, hostport): - self.hostport = hostport - self.client = None - self.log("Created") - self.queue = [] - - def request(self, *requests): - """ - Append http_request object(s) to this queue. - """ - self.log("Adding requests %r" % requests) - self.queue.extend(requests) - - def restart(self): - """ - Send next request for this queue, if we can. This may involve - starting a new http_client stream, reusing an existing idle - stream, or just ignoring this request if there's an active client - stream already; in the last case, handling of the response (or - exception, or timeout) for the query currently in progress will - call this method when it's time to kick out the next query. - """ - try: - if self.client is None: - self.client = http_client(self, self.hostport) - self.log("Attached client %r" % self.client) - self.client.start() - elif self.client.state == "idle": - self.log("Sending request to existing client %r" % self.client) - self.send_request() - else: - self.log("Client %r exists in state %r" % (self.client, self.client.state)) - except (rpki.async.ExitNow, SystemExit): - raise - except Exception, e: - self.return_result(self.client, e, detach = True) - - def send_request(self): - """ - Kick out the next query in this queue, if any. - """ - if self.queue: - self.client.send_request(self.queue[0]) - - def detach(self, client_): - """ - Detatch a client from this queue. Silently ignores attempting to - detach a client that is not attached to this queue, to simplify - handling of what otherwise would be a nasty set of race - conditions. - """ - if client_ is self.client: - self.log("Detaching client %r" % client_) - self.client = None - - def return_result(self, client, result, detach = False): # pylint: disable=W0621 - """ - Client stream has returned a result, which we need to pass along - to the original caller. Result may be either an HTTP response - message or an exception. In either case, once we're done - processing this result, kick off next message in the queue, if any. - """ - - if client is not self.client: - self.log("Wrong client trying to return result. THIS SHOULD NOT HAPPEN. Dropping result %r" % result, rpki.log.warn) - return - - if detach: - self.detach(client) - - try: - req = self.queue.pop(0) - self.log("Dequeuing request %r" % req) - except IndexError: - self.log("No caller. THIS SHOULD NOT HAPPEN. Dropping result %r" % result, rpki.log.warn) - return - - assert isinstance(result, http_response) or isinstance(result, Exception) - - if isinstance(result, http_response): - try: - self.log("Returning result %r to caller" % result) - req.callback(result.body) - except (rpki.async.ExitNow, SystemExit): - raise - except Exception, e: - result = e - - if isinstance(result, Exception): - try: - self.log("Returning exception %r to caller: %s" % (result, result), rpki.log.warn) - req.errback(result) - except (rpki.async.ExitNow, SystemExit): - raise - except Exception: - # - # If we get here, we may have lost the event chain. Not - # obvious what we can do about it at this point, but force a - # traceback so that it will be somewhat obvious that something - # really bad happened. - # - self.log("Exception in exception callback", rpki.log.warn) - rpki.log.traceback(True) - - self.log("Queue: %r" % self.queue) - - if self.queue: - self.restart() - -## @var client_queues -# Map of (host, port) tuples to http_queue objects. -client_queues = {} - -def client(msg, url, callback, errback): - """ - Open client HTTP connection, send a message, set up callbacks to - handle response. - """ - - u = urlparse.urlparse(url) - - if (u.scheme not in ("", "http") or - u.username is not None or - u.password is not None or - u.params != "" or - u.query != "" or - u.fragment != ""): - raise rpki.exceptions.BadClientURL, "Unusable URL %s" % url - - if debug_http: - rpki.log.debug("Contacting %s" % url) - - request = http_request( - cmd = "POST", - path = u.path, - body = msg, - callback = callback, - errback = errback, - Host = u.hostname, - Content_Type = rpki_content_type) - - hostport = (u.hostname or "localhost", u.port or default_tcp_port) - - if debug_http: - rpki.log.debug("Created request %r for %s" % (request, addr_to_string(hostport))) - if hostport not in client_queues: - client_queues[hostport] = http_queue(hostport) - client_queues[hostport].request(request) - - # Defer connection attempt until after we've had time to process any - # pending I/O events, in case connections have closed. - - if debug_http: - rpki.log.debug("Scheduling connection startup for %r" % request) - rpki.async.event_defer(client_queues[hostport].restart) - -def server(handlers, port, host = ""): - """ - Run an HTTP server and wait (forever) for connections. - """ - - if not isinstance(handlers, (tuple, list)): - handlers = (("/", handlers),) - - # Yes, this is sick. So is getaddrinfo() returning duplicate - # records, which RedHat has the gall to claim is a feature. - ai = [] - for af in supported_address_families(enable_ipv6_servers): - try: - if host: - h = host - elif have_ipv6 and af == socket.AF_INET6: - h = "::" - else: - h = "0.0.0.0" - for a in socket.getaddrinfo(h, port, af, socket.SOCK_STREAM): - if a not in ai: - ai.append(a) - except socket.gaierror: - pass - - for a in ai: - http_listener(addrinfo = a, handlers = handlers) - - rpki.async.event_loop() - -class caller(object): - """ - Handle client-side mechanics for protocols based on HTTP, CMS, and - rpki.xml_utils. Calling sequence is intended to nest within - rpki.async.sync_wrapper. - """ - - debug = False - - def __init__(self, proto, client_key, client_cert, server_ta, server_cert, url, debug = None): - self.proto = proto - self.client_key = client_key - self.client_cert = client_cert - self.server_ta = server_ta - self.server_cert = server_cert - self.url = url - self.cms_timestamp = None - if debug is not None: - self.debug = debug - - def __call__(self, cb, eb, *pdus): - - def done(r_der): - """ - Handle CMS-wrapped XML response message. - """ - try: - r_cms = self.proto.cms_msg(DER = r_der) - r_msg = r_cms.unwrap((self.server_ta, self.server_cert)) - self.cms_timestamp = r_cms.check_replay(self.cms_timestamp, self.url) - if self.debug: - print "" - print r_cms.pretty_print_content() - cb(r_msg) - except (rpki.async.ExitNow, SystemExit): - raise - except Exception, e: - eb(e) - - q_msg = self.proto.msg.query(*pdus) - q_cms = self.proto.cms_msg() - q_der = q_cms.wrap(q_msg, self.client_key, self.client_cert) - if self.debug: - print "" - print q_cms.pretty_print_content() - - client(url = self.url, msg = q_der, callback = done, errback = eb) diff --git a/rpkid/rpki/ipaddrs.py b/rpkid/rpki/ipaddrs.py deleted file mode 100644 index c1855302..00000000 --- a/rpkid/rpki/ipaddrs.py +++ /dev/null @@ -1,137 +0,0 @@ -# $Id$ -# -# Copyright (C) 2009-2012 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. -# -# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -""" -Classes to represent IP addresses. These are mostly obsolete at this -point, having been replaced by the rpki.POW.IPAddress class, but there -may still be some code using these, so keep them for now for backwards -compatability. - -Given some of the other operations we need to perform on them, it's -most convenient to represent IP addresses as Python "long" values. -The classes in this module just wrap suitable read/write syntax around -the underlying "long" type. - -These classes also supply a "bits" attribute for use by other code -built on these classes; for the most part, IPv6 addresses really are -just IPv4 addresses with more bits, so we supply the number of bits -once, here, thus avoiding a lot of duplicate code elsewhere. -""" - -import socket, struct - -class v4addr(long): - """ - IPv4 address. - - Derived from long, but supports IPv4 print syntax. - """ - - bits = 32 - ipversion = 4 - - def __new__(cls, x): - """ - Construct a v4addr object. - """ - if isinstance(x, unicode): - x = x.encode("ascii") - if isinstance(x, str): - return cls.from_bytes(socket.inet_pton(socket.AF_INET, ".".join(str(int(i)) for i in x.split(".")))) - else: - return long.__new__(cls, x) - - def to_bytes(self): - """ - Convert a v4addr object to a raw byte string. - """ - return struct.pack("!I", long(self)) - - @classmethod - def from_bytes(cls, x): - """ - Convert from a raw byte string to a v4addr object. - """ - return cls(struct.unpack("!I", x)[0]) - - def __str__(self): - """ - Convert a v4addr object to string format. - """ - return socket.inet_ntop(socket.AF_INET, self.to_bytes()) - -class v6addr(long): - """ - IPv6 address. - - Derived from long, but supports IPv6 print syntax. - """ - - bits = 128 - ipversion = 6 - - def __new__(cls, x): - """ - Construct a v6addr object. - """ - if isinstance(x, unicode): - x = x.encode("ascii") - if isinstance(x, str): - return cls.from_bytes(socket.inet_pton(socket.AF_INET6, x)) - else: - return long.__new__(cls, x) - - def to_bytes(self): - """ - Convert a v6addr object to a raw byte string. - """ - return struct.pack("!QQ", long(self) >> 64, long(self) & 0xFFFFFFFFFFFFFFFF) - - @classmethod - def from_bytes(cls, x): - """ - Convert from a raw byte string to a v6addr object. - """ - x = struct.unpack("!QQ", x) - return cls((x[0] << 64) | x[1]) - - def __str__(self): - """ - Convert a v6addr object to string format. - """ - return socket.inet_ntop(socket.AF_INET6, self.to_bytes()) - -def parse(s): - """ - Parse a string as either an IPv4 or IPv6 address, and return object of appropriate class. - """ - if isinstance(s, unicode): - s = s.encode("ascii") - return v6addr(s) if ":" in s else v4addr(s) diff --git a/rpkid/rpki/irdb/__init__.py b/rpkid/rpki/irdb/__init__.py deleted file mode 100644 index cc83387e..00000000 --- a/rpkid/rpki/irdb/__init__.py +++ /dev/null @@ -1,26 +0,0 @@ -# $Id$ -# -# Copyright (C) 2011-2012 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -""" -Django really wants its models packaged up in a "models" module within a -Python package, so humor it. -""" - -# pylint: disable=W0401 - -from rpki.irdb.models import * -from rpki.irdb.zookeeper import Zookeeper -from rpki.irdb.router import DBContextRouter, database diff --git a/rpkid/rpki/irdb/models.py b/rpkid/rpki/irdb/models.py deleted file mode 100644 index 1ad9b4e3..00000000 --- a/rpkid/rpki/irdb/models.py +++ /dev/null @@ -1,646 +0,0 @@ -# $Id$ -# -# Copyright (C) 2013--2014 Dragon Research Labs ("DRL") -# Portions copyright (C) 2011--2012 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notices and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND DRL AND ISC DISCLAIM ALL -# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED -# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL OR -# ISC BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL -# DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA -# OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER -# TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -""" -Internet Registry (IR) Database, Django-style. - -This is the back-end code's interface to the database. It's intended -to be usable by command line programs and other scripts, not just -Django GUI code, so be careful. -""" - -# pylint: disable=W0232 - -import django.db.models -import rpki.x509 -import rpki.sundial -import rpki.resource_set -import socket -import rpki.POW -from south.modelsinspector import add_introspection_rules - -## @var ip_version_choices -# Choice argument for fields implementing IP version numbers. - -ip_version_choices = ((4, "IPv4"), (6, "IPv6")) - -## @var ca_certificate_lifetime -# Lifetime for a BPKI CA certificate. - -ca_certificate_lifetime = rpki.sundial.timedelta(days = 3652) - -## @var crl_interval - -# Expected interval between BPKI CRL updates. This should be a little -# longer than the real regeneration cycle, so that the old CRL will -# not go stale while we're generating the new one. Eg, if we -# regenerate daily, an interval of 24 hours is too short, but 25 hours -# would be OK, as would 24 hours and 30 minutes. - -crl_interval = rpki.sundial.timedelta(hours = 25) - -## @var ee_certificate_lifetime -# Lifetime for a BPKI EE certificate. - -ee_certificate_lifetime = rpki.sundial.timedelta(days = 60) - -### - -# Field types - -class HandleField(django.db.models.CharField): - """ - A handle field type. - """ - - description = 'A "handle" in one of the RPKI protocols' - - def __init__(self, *args, **kwargs): - kwargs["max_length"] = 120 - django.db.models.CharField.__init__(self, *args, **kwargs) - -class EnumField(django.db.models.PositiveSmallIntegerField): - """ - An enumeration type that uses strings in Python and small integers - in SQL. - """ - - description = "An enumeration type" - - __metaclass__ = django.db.models.SubfieldBase - - def __init__(self, *args, **kwargs): - if isinstance(kwargs.get("choices"), (tuple, list)) and isinstance(kwargs["choices"][0], str): - kwargs["choices"] = tuple(enumerate(kwargs["choices"], 1)) - django.db.models.PositiveSmallIntegerField.__init__(self, *args, **kwargs) - self.enum_i2s = dict(self.flatchoices) - self.enum_s2i = dict((v, k) for k, v in self.flatchoices) - - def to_python(self, value): - return self.enum_i2s.get(value, value) - - def get_prep_value(self, value): - return self.enum_s2i.get(value, value) - -class SundialField(django.db.models.DateTimeField): - """ - A field type for our customized datetime objects. - """ - __metaclass__ = django.db.models.SubfieldBase - - description = "A datetime type using our customized datetime objects" - - def to_python(self, value): - if isinstance(value, rpki.sundial.pydatetime.datetime): - return rpki.sundial.datetime.from_datetime( - django.db.models.DateTimeField.to_python(self, value)) - else: - return value - - def get_prep_value(self, value): - if isinstance(value, rpki.sundial.datetime): - return value.to_datetime() - else: - return value - - -class DERField(django.db.models.Field): - """ - Field types for DER objects. - """ - - __metaclass__ = django.db.models.SubfieldBase - - def __init__(self, *args, **kwargs): - kwargs["serialize"] = False - kwargs["blank"] = True - kwargs["default"] = None - django.db.models.Field.__init__(self, *args, **kwargs) - - def db_type(self, connection): - if connection.settings_dict['ENGINE'] == "django.db.backends.posgresql": - return "bytea" - else: - return "BLOB" - - def to_python(self, value): - assert value is None or isinstance(value, (self.rpki_type, str)) - if isinstance(value, str): - return self.rpki_type(DER = value) - else: - return value - - def get_prep_value(self, value): - assert value is None or isinstance(value, (self.rpki_type, str)) - if isinstance(value, self.rpki_type): - return value.get_DER() - else: - return value - -class CertificateField(DERField): - description = "X.509 certificate" - rpki_type = rpki.x509.X509 - -class RSAKeyField(DERField): - description = "RSA keypair" - rpki_type = rpki.x509.RSA - -class CRLField(DERField): - description = "Certificate Revocation List" - rpki_type = rpki.x509.CRL - -class PKCS10Field(DERField): - description = "PKCS #10 certificate request" - rpki_type = rpki.x509.PKCS10 - -class SignedReferralField(DERField): - description = "CMS signed object containing XML" - rpki_type = rpki.x509.SignedReferral - - -# Custom managers - -class CertificateManager(django.db.models.Manager): - - def get_or_certify(self, **kwargs): - """ - Sort of like .get_or_create(), but for models containing - certificates which need to be generated based on other fields. - - Takes keyword arguments like .get(), checks for existing object. - If none, creates a new one; if found an existing object but some - of the non-key fields don't match, updates the existing object. - Runs certification method for new or updated objects. Returns a - tuple consisting of the object and a boolean indicating whether - anything has changed. - """ - - changed = False - - try: - obj = self.get(**self._get_or_certify_keys(kwargs)) - - except self.model.DoesNotExist: - obj = self.model(**kwargs) - changed = True - - else: - for k in kwargs: - if getattr(obj, k) != kwargs[k]: - setattr(obj, k, kwargs[k]) - changed = True - - if changed: - obj.avow() - obj.save() - - return obj, changed - - def _get_or_certify_keys(self, kwargs): - assert len(self.model._meta.unique_together) == 1 - return dict((k, kwargs[k]) for k in self.model._meta.unique_together[0]) - -class ResourceHolderCAManager(CertificateManager): - def _get_or_certify_keys(self, kwargs): - return { "handle" : kwargs["handle"] } - -class ServerCAManager(CertificateManager): - def _get_or_certify_keys(self, kwargs): - return { "pk" : 1 } - -class ResourceHolderEEManager(CertificateManager): - def _get_or_certify_keys(self, kwargs): - return { "issuer" : kwargs["issuer"] } - -### - -class CA(django.db.models.Model): - certificate = CertificateField() - private_key = RSAKeyField() - latest_crl = CRLField() - - # Might want to bring these into line with what rpkid does. Current - # variables here were chosen to map easily to what OpenSSL command - # line tool was keeping on disk. - - next_serial = django.db.models.BigIntegerField(default = 1) - next_crl_number = django.db.models.BigIntegerField(default = 1) - last_crl_update = SundialField() - next_crl_update = SundialField() - - class Meta: - abstract = True - - def avow(self): - if self.private_key is None: - self.private_key = rpki.x509.RSA.generate(quiet = True) - now = rpki.sundial.now() - notAfter = now + ca_certificate_lifetime - self.certificate = rpki.x509.X509.bpki_self_certify( - keypair = self.private_key, - subject_name = self.subject_name, - serial = self.next_serial, - now = now, - notAfter = notAfter) - self.next_serial += 1 - self.generate_crl() - return self.certificate - - def certify(self, subject_name, subject_key, validity_interval, is_ca, pathLenConstraint = None): - now = rpki.sundial.now() - notAfter = now + validity_interval - result = self.certificate.bpki_certify( - keypair = self.private_key, - subject_name = subject_name, - subject_key = subject_key, - serial = self.next_serial, - now = now, - notAfter = notAfter, - is_ca = is_ca, - pathLenConstraint = pathLenConstraint) - self.next_serial += 1 - return result - - def revoke(self, cert): - Revocation.objects.create( - issuer = self, - revoked = rpki.sundial.now(), - serial = cert.certificate.getSerial(), - expires = cert.certificate.getNotAfter() + crl_interval) - cert.delete() - self.generate_crl() - - def generate_crl(self): - now = rpki.sundial.now() - self.revocations.filter(expires__lt = now).delete() - revoked = [(r.serial, r.revoked) for r in self.revocations.all()] - self.latest_crl = rpki.x509.CRL.generate( - keypair = self.private_key, - issuer = self.certificate, - serial = self.next_crl_number, - thisUpdate = now, - nextUpdate = now + crl_interval, - revokedCertificates = revoked) - self.last_crl_update = now - self.next_crl_update = now + crl_interval - self.next_crl_number += 1 - -class ServerCA(CA): - objects = ServerCAManager() - - def __unicode__(self): - return "" - - @property - def subject_name(self): - if self.certificate is not None: - return self.certificate.getSubject() - else: - return rpki.x509.X501DN.from_cn("%s BPKI server CA" % socket.gethostname()) - -class ResourceHolderCA(CA): - handle = HandleField(unique = True) - objects = ResourceHolderCAManager() - - def __unicode__(self): - return self.handle - - @property - def subject_name(self): - if self.certificate is not None: - return self.certificate.getSubject() - else: - return rpki.x509.X501DN.from_cn("%s BPKI resource CA" % self.handle) - -class Certificate(django.db.models.Model): - - certificate = CertificateField() - objects = CertificateManager() - - class Meta: - abstract = True - unique_together = ("issuer", "handle") - - def revoke(self): - self.issuer.revoke(self) - -class CrossCertification(Certificate): - handle = HandleField() - ta = CertificateField() - - class Meta: - abstract = True - - def avow(self): - self.certificate = self.issuer.certify( - subject_name = self.ta.getSubject(), - subject_key = self.ta.getPublicKey(), - validity_interval = ee_certificate_lifetime, - is_ca = True, - pathLenConstraint = 0) - - def __unicode__(self): - return self.handle - -class HostedCA(Certificate): - issuer = django.db.models.ForeignKey(ServerCA) - hosted = django.db.models.OneToOneField(ResourceHolderCA, related_name = "hosted_by") - - def avow(self): - self.certificate = self.issuer.certify( - subject_name = self.hosted.certificate.getSubject(), - subject_key = self.hosted.certificate.getPublicKey(), - validity_interval = ee_certificate_lifetime, - is_ca = True, - pathLenConstraint = 1) - - class Meta: - unique_together = ("issuer", "hosted") - - def __unicode__(self): - return self.hosted.handle - -class Revocation(django.db.models.Model): - serial = django.db.models.BigIntegerField() - revoked = SundialField() - expires = SundialField() - - class Meta: - abstract = True - unique_together = ("issuer", "serial") - -class ServerRevocation(Revocation): - issuer = django.db.models.ForeignKey(ServerCA, related_name = "revocations") - -class ResourceHolderRevocation(Revocation): - issuer = django.db.models.ForeignKey(ResourceHolderCA, related_name = "revocations") - -class EECertificate(Certificate): - private_key = RSAKeyField() - - class Meta: - abstract = True - - def avow(self): - if self.private_key is None: - self.private_key = rpki.x509.RSA.generate(quiet = True) - self.certificate = self.issuer.certify( - subject_name = self.subject_name, - subject_key = self.private_key.get_public(), - validity_interval = ee_certificate_lifetime, - is_ca = False) - -class ServerEE(EECertificate): - issuer = django.db.models.ForeignKey(ServerCA, related_name = "ee_certificates") - purpose = EnumField(choices = ("rpkid", "pubd", "irdbd", "irbe")) - - class Meta: - unique_together = ("issuer", "purpose") - - @property - def subject_name(self): - return rpki.x509.X501DN.from_cn("%s BPKI %s EE" % (socket.gethostname(), - self.get_purpose_display())) - -class Referral(EECertificate): - issuer = django.db.models.OneToOneField(ResourceHolderCA, related_name = "referral_certificate") - objects = ResourceHolderEEManager() - - @property - def subject_name(self): - return rpki.x509.X501DN.from_cn("%s BPKI Referral EE" % self.issuer.handle) - -class Turtle(django.db.models.Model): - service_uri = django.db.models.CharField(max_length = 255) - -class Rootd(EECertificate, Turtle): - issuer = django.db.models.OneToOneField(ResourceHolderCA, related_name = "rootd") - objects = ResourceHolderEEManager() - - @property - def subject_name(self): - return rpki.x509.X501DN.from_cn("%s BPKI rootd EE" % self.issuer.handle) - -class BSC(Certificate): - issuer = django.db.models.ForeignKey(ResourceHolderCA, related_name = "bscs") - handle = HandleField() - pkcs10 = PKCS10Field() - - def avow(self): - self.certificate = self.issuer.certify( - subject_name = self.pkcs10.getSubject(), - subject_key = self.pkcs10.getPublicKey(), - validity_interval = ee_certificate_lifetime, - is_ca = False) - - def __unicode__(self): - return self.handle - -class ResourceSet(django.db.models.Model): - valid_until = SundialField() - - class Meta: - abstract = True - - @property - def resource_bag(self): - raw_asn, raw_net = self._select_resource_bag() - asns = rpki.resource_set.resource_set_as.from_django( - (a.start_as, a.end_as) for a in raw_asn) - ipv4 = rpki.resource_set.resource_set_ipv4.from_django( - (a.start_ip, a.end_ip) for a in raw_net if a.version == "IPv4") - ipv6 = rpki.resource_set.resource_set_ipv6.from_django( - (a.start_ip, a.end_ip) for a in raw_net if a.version == "IPv6") - return rpki.resource_set.resource_bag( - valid_until = self.valid_until, asn = asns, v4 = ipv4, v6 = ipv6) - - # Writing of .setter method deferred until something needs it. - -class ResourceSetASN(django.db.models.Model): - start_as = django.db.models.BigIntegerField() - end_as = django.db.models.BigIntegerField() - - class Meta: - abstract = True - - def as_resource_range(self): - return rpki.resource_set.resource_range_as(self.start_as, self.end_as) - -class ResourceSetNet(django.db.models.Model): - start_ip = django.db.models.CharField(max_length = 40) - end_ip = django.db.models.CharField(max_length = 40) - version = EnumField(choices = ip_version_choices) - - class Meta: - abstract = True - - def as_resource_range(self): - return rpki.resource_set.resource_range_ip.from_strings(self.start_ip, self.end_ip) - -class Child(CrossCertification, ResourceSet): - issuer = django.db.models.ForeignKey(ResourceHolderCA, related_name = "children") - name = django.db.models.TextField(null = True, blank = True) - - def _select_resource_bag(self): - child_asn = rpki.irdb.ChildASN.objects.raw(""" - SELECT * - FROM irdb_childasn - WHERE child_id = %s - """, [self.id]) - child_net = list(rpki.irdb.ChildNet.objects.raw(""" - SELECT * - FROM irdb_childnet - WHERE child_id = %s - """, [self.id])) - return child_asn, child_net - - class Meta: - unique_together = ("issuer", "handle") - -class ChildASN(ResourceSetASN): - child = django.db.models.ForeignKey(Child, related_name = "asns") - - class Meta: - unique_together = ("child", "start_as", "end_as") - -class ChildNet(ResourceSetNet): - child = django.db.models.ForeignKey(Child, related_name = "address_ranges") - - class Meta: - unique_together = ("child", "start_ip", "end_ip", "version") - -class Parent(CrossCertification, Turtle): - issuer = django.db.models.ForeignKey(ResourceHolderCA, related_name = "parents") - parent_handle = HandleField() - child_handle = HandleField() - repository_type = EnumField(choices = ("none", "offer", "referral")) - referrer = HandleField(null = True, blank = True) - referral_authorization = SignedReferralField(null = True, blank = True) - - # This shouldn't be necessary - class Meta: - unique_together = ("issuer", "handle") - -class ROARequest(django.db.models.Model): - issuer = django.db.models.ForeignKey(ResourceHolderCA, related_name = "roa_requests") - asn = django.db.models.BigIntegerField() - - @property - def roa_prefix_bag(self): - prefixes = list(rpki.irdb.ROARequestPrefix.objects.raw(""" - SELECT * - FROM irdb_roarequestprefix - WHERE roa_request_id = %s - """, [self.id])) - v4 = rpki.resource_set.roa_prefix_set_ipv4.from_django( - (p.prefix, p.prefixlen, p.max_prefixlen) for p in prefixes if p.version == "IPv4") - v6 = rpki.resource_set.roa_prefix_set_ipv6.from_django( - (p.prefix, p.prefixlen, p.max_prefixlen) for p in prefixes if p.version == "IPv6") - return rpki.resource_set.roa_prefix_bag(v4 = v4, v6 = v6) - - # Writing of .setter method deferred until something needs it. - -class ROARequestPrefix(django.db.models.Model): - roa_request = django.db.models.ForeignKey(ROARequest, related_name = "prefixes") - version = EnumField(choices = ip_version_choices) - prefix = django.db.models.CharField(max_length = 40) - prefixlen = django.db.models.PositiveSmallIntegerField() - max_prefixlen = django.db.models.PositiveSmallIntegerField() - - def as_roa_prefix(self): - if self.version == 'IPv4': - return rpki.resource_set.roa_prefix_ipv4(rpki.POW.IPAddress(self.prefix), self.prefixlen, self.max_prefixlen) - else: - return rpki.resource_set.roa_prefix_ipv6(rpki.POW.IPAddress(self.prefix), self.prefixlen, self.max_prefixlen) - - def as_resource_range(self): - return self.as_roa_prefix().to_resource_range() - - class Meta: - unique_together = ("roa_request", "version", "prefix", "prefixlen", "max_prefixlen") - -class GhostbusterRequest(django.db.models.Model): - issuer = django.db.models.ForeignKey(ResourceHolderCA, related_name = "ghostbuster_requests") - parent = django.db.models.ForeignKey(Parent, related_name = "ghostbuster_requests", null = True) - vcard = django.db.models.TextField() - -class EECertificateRequest(ResourceSet): - issuer = django.db.models.ForeignKey(ResourceHolderCA, related_name = "ee_certificate_requests") - pkcs10 = PKCS10Field() - gski = django.db.models.CharField(max_length = 27) - cn = django.db.models.CharField(max_length = 64) - sn = django.db.models.CharField(max_length = 64) - eku = django.db.models.TextField(null = True) - - def _select_resource_bag(self): - ee_asn = rpki.irdb.EECertificateRequestASN.objects.raw(""" - SELECT * - FROM irdb_eecertificaterequestasn - WHERE ee_certificate_request_id = %s - """, [self.id]) - ee_net = rpki.irdb.EECertificateRequestNet.objects.raw(""" - SELECT * - FROM irdb_eecertificaterequestnet - WHERE ee_certificate_request_id = %s - """, [self.id]) - return ee_asn, ee_net - - class Meta: - unique_together = ("issuer", "gski") - -class EECertificateRequestASN(ResourceSetASN): - ee_certificate_request = django.db.models.ForeignKey(EECertificateRequest, related_name = "asns") - - class Meta: - unique_together = ("ee_certificate_request", "start_as", "end_as") - -class EECertificateRequestNet(ResourceSetNet): - ee_certificate_request = django.db.models.ForeignKey(EECertificateRequest, related_name = "address_ranges") - - class Meta: - unique_together = ("ee_certificate_request", "start_ip", "end_ip", "version") - -class Repository(CrossCertification): - issuer = django.db.models.ForeignKey(ResourceHolderCA, related_name = "repositories") - client_handle = HandleField() - service_uri = django.db.models.CharField(max_length = 255) - sia_base = django.db.models.TextField() - turtle = django.db.models.OneToOneField(Turtle, related_name = "repository") - - # This shouldn't be necessary - class Meta: - unique_together = ("issuer", "handle") - -class Client(CrossCertification): - issuer = django.db.models.ForeignKey(ServerCA, related_name = "clients") - sia_base = django.db.models.TextField() - parent_handle = HandleField() - - # This shouldn't be necessary - class Meta: - unique_together = ("issuer", "handle") - -# for Django South -- these are just simple subclasses -add_introspection_rules([], - ('^rpki\.irdb\.models\.CertificateField', - '^rpki\.irdb\.models\.CRLField', - '^rpki\.irdb\.models\.EnumField', - '^rpki\.irdb\.models\.HandleField', - '^rpki\.irdb\.models\.RSAKeyField', - '^rpki\.irdb\.models\.SignedReferralField', - '^rpki\.irdb\.models\.SundialField')) diff --git a/rpkid/rpki/irdb/router.py b/rpkid/rpki/irdb/router.py deleted file mode 100644 index 1f27d0c9..00000000 --- a/rpkid/rpki/irdb/router.py +++ /dev/null @@ -1,95 +0,0 @@ -# $Id$ -# -# Copyright (C) 2012 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -""" -Django-style "Database router". - -For most programs, you don't need this. Django's normal mode of -behavior is to use a single SQL database for the IRDB, which is -normally what we want. For certain test scenarios, however, it's -useful to be able to use the same Django ORM models and managers with -multiple databases without having to complicate the interface by -passing database names everywhere. Using a database router -accomplishes this. -""" - -class DBContextRouter(object): - """ - A Django database router for use with multiple IRDBs. - - This router is designed to work in conjunction with the - rpki.irdb.database context handler (q.v.). - """ - - _app = "irdb" - - _database = None - - def db_for_read(self, model, **hints): - if model._meta.app_label == self._app: - return self._database - else: - return None - - def db_for_write(self, model, **hints): - if model._meta.app_label == self._app: - return self._database - else: - return None - - def allow_relation(self, obj1, obj2, **hints): - if self._database is None: - return None - elif obj1._meta.app_label == self._app and obj2._meta.app_label == self._app: - return True - else: - return None - - def allow_syncdb(self, db, model): - if db == self._database and model._meta.app_label == self._app: - return True - else: - return None - -class database(object): - """ - Context manager for use with DBContextRouter. Use thusly: - - with rpki.irdb.database("blarg"): - do_stuff() - - This binds IRDB operations to database blarg for the duration of - the call to do_stuff(), then restores the prior state. - """ - - def __init__(self, name, on_entry = None, on_exit = None): - if not isinstance(name, str): - raise ValueError("database name must be a string, not %r" % name) - self.name = name - self.on_entry = on_entry - self.on_exit = on_exit - - def __enter__(self): - if self.on_entry is not None: - self.on_entry() - self.former = DBContextRouter._database - DBContextRouter._database = self.name - - def __exit__(self, _type, value, traceback): - assert DBContextRouter._database is self.name - DBContextRouter._database = self.former - if self.on_exit is not None: - self.on_exit() diff --git a/rpkid/rpki/irdb/zookeeper.py b/rpkid/rpki/irdb/zookeeper.py deleted file mode 100644 index f99dc9f0..00000000 --- a/rpkid/rpki/irdb/zookeeper.py +++ /dev/null @@ -1,1682 +0,0 @@ -# $Id$ -# -# Copyright (C) 2013--2014 Dragon Research Labs ("DRL") -# Portions copyright (C) 2009--2012 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notices and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND DRL AND ISC DISCLAIM ALL -# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED -# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL OR -# ISC BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL -# DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA -# OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER -# TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -""" -Management code for the IRDB. -""" - -# pylint: disable=W0612 - -import os -import copy -import types -import rpki.config -import rpki.cli -import rpki.sundial -import rpki.log -import rpki.oids -import rpki.http -import rpki.resource_set -import rpki.relaxng -import rpki.exceptions -import rpki.left_right -import rpki.x509 -import rpki.async -import rpki.irdb -import django.db.transaction - -from lxml.etree import (Element, SubElement, ElementTree, - tostring as ElementToString) - -from rpki.csv_utils import csv_reader - - - -# XML namespace and protocol version for OOB setup protocol. The name -# is historical and may change before we propose this as the basis for -# a standard. - -myrpki_namespace = "http://www.hactrn.net/uris/rpki/myrpki/" -myrpki_version = "2" -myrpki_namespaceQName = "{" + myrpki_namespace + "}" - -# XML namespace and protocol version for router certificate requests. -# We probably ought to be pulling this sort of thing from the schema, -# with an assertion to make sure that we understand the current -# protocol version number, but just copy what we did for myrpki until -# I'm ready to rewrite the rpki.relaxng code. - -routercert_namespace = "http://www.hactrn.net/uris/rpki/router-certificate/" -routercert_version = "1" -routercert_namespaceQName = "{" + routercert_namespace + "}" - -myrpki_section = "myrpki" -irdbd_section = "irdbd" -rpkid_section = "rpkid" -pubd_section = "pubd" -rootd_section = "rootd" - -# A whole lot of exceptions - -class HandleNotSet(Exception): "Handle not set." -class MissingHandle(Exception): "Missing handle." -class CouldntTalkToDaemon(Exception): "Couldn't talk to daemon." -class BadXMLMessage(Exception): "Bad XML message." -class PastExpiration(Exception): "Expiration date has already passed." -class CantRunRootd(Exception): "Can't run rootd." - - - -def B64Element(e, tag, obj, **kwargs): - """ - Create an XML element containing Base64 encoded data taken from a - DER object. - """ - - if e is None: - se = Element(tag, **kwargs) - else: - se = SubElement(e, tag, **kwargs) - if e is not None and e.text is None: - e.text = "\n" - se.text = "\n" + obj.get_Base64() - se.tail = "\n" - return se - -class PEM_writer(object): - """ - Write PEM files to disk, keeping track of which ones we've already - written and setting the file mode appropriately. - - Comparing the old file with what we're about to write serves no real - purpose except to calm users who find repeated messages about - writing the same file confusing. - """ - - def __init__(self, logstream = None): - self.wrote = set() - self.logstream = logstream - - def __call__(self, filename, obj, compare = True): - filename = os.path.realpath(filename) - if filename in self.wrote: - return - tempname = filename - pem = obj.get_PEM() - if not filename.startswith("/dev/"): - try: - if compare and pem == open(filename, "r").read(): - return - except: # pylint: disable=W0702 - pass - tempname += ".%s.tmp" % os.getpid() - mode = 0400 if filename.endswith(".key") else 0444 - if self.logstream is not None: - self.logstream.write("Writing %s\n" % filename) - f = os.fdopen(os.open(tempname, os.O_WRONLY | os.O_CREAT | os.O_TRUNC, mode), "w") - f.write(pem) - f.close() - if tempname != filename: - os.rename(tempname, filename) - self.wrote.add(filename) - - - - -def etree_read(filename): - """ - Read an etree from a file, verifying then stripping XML namespace - cruft. - """ - - e = ElementTree(file = filename).getroot() - rpki.relaxng.myrpki.assertValid(e) - for i in e.getiterator(): - if i.tag.startswith(myrpki_namespaceQName): - i.tag = i.tag[len(myrpki_namespaceQName):] - else: - raise BadXMLMessage, "XML tag %r is not in namespace %r" % (i.tag, myrpki_namespace) - return e - - -class etree_wrapper(object): - """ - Wrapper for ETree objects so we can return them as function results - without requiring the caller to understand much about them. - - """ - - def __init__(self, e, msg = None, debug = False): - self.msg = msg - e = copy.deepcopy(e) - e.set("version", myrpki_version) - for i in e.getiterator(): - if i.tag[0] != "{": - i.tag = myrpki_namespaceQName + i.tag - assert i.tag.startswith(myrpki_namespaceQName) - if debug: - print ElementToString(e) - rpki.relaxng.myrpki.assertValid(e) - self.etree = e - - def __str__(self): - return ElementToString(self.etree) - - def save(self, filename, logstream = None): - filename = os.path.realpath(filename) - tempname = filename - if not filename.startswith("/dev/"): - tempname += ".%s.tmp" % os.getpid() - ElementTree(self.etree).write(tempname) - if tempname != filename: - os.rename(tempname, filename) - if logstream is not None: - logstream.write("Wrote %s\n" % filename) - if self.msg is not None: - logstream.write(self.msg + "\n") - - @property - def file(self): - from cStringIO import StringIO - return StringIO(ElementToString(self.etree)) - - - -class Zookeeper(object): - - ## @var show_xml - # Whether to show XML for debugging - - show_xml = False - - def __init__(self, cfg = None, handle = None, logstream = None): - - if cfg is None: - cfg = rpki.config.parser() - - if handle is None: - handle = cfg.get("handle", section = myrpki_section) - - self.cfg = cfg - - self.logstream = logstream - - self.run_rpkid = cfg.getboolean("run_rpkid", section = myrpki_section) - self.run_pubd = cfg.getboolean("run_pubd", section = myrpki_section) - self.run_rootd = cfg.getboolean("run_rootd", section = myrpki_section) - - if self.run_rootd and (not self.run_pubd or not self.run_rpkid): - raise CantRunRootd, "Can't run rootd unless also running rpkid and pubd" - - self.default_repository = cfg.get("default_repository", "", section = myrpki_section) - self.pubd_contact_info = cfg.get("pubd_contact_info", "", section = myrpki_section) - - self.rsync_module = cfg.get("publication_rsync_module", section = myrpki_section) - self.rsync_server = cfg.get("publication_rsync_server", section = myrpki_section) - - self.reset_identity(handle) - - - def reset_identity(self, handle): - """ - Select handle of current resource holding entity. - """ - - if handle is None: - raise MissingHandle - self.handle = handle - - - def set_logstream(self, logstream): - """ - Set log stream for this Zookeeper. The log stream is a file-like - object, or None to suppress all logging. - """ - - self.logstream = logstream - - - def log(self, msg): - """ - Send some text to this Zookeeper's log stream, if one is set. - """ - - if self.logstream is not None: - self.logstream.write(msg) - self.logstream.write("\n") - - - @property - def resource_ca(self): - """ - Get ResourceHolderCA object associated with current handle. - """ - - if self.handle is None: - raise HandleNotSet - return rpki.irdb.ResourceHolderCA.objects.get(handle = self.handle) - - - @property - def server_ca(self): - """ - Get ServerCA object. - """ - - return rpki.irdb.ServerCA.objects.get() - - - @django.db.transaction.commit_on_success - def initialize_server_bpki(self): - """ - Initialize server BPKI portion of an RPKI installation. Reads the - configuration file and generates the initial BPKI server - certificates needed to start daemons. - """ - - if self.run_rpkid or self.run_pubd: - server_ca, created = rpki.irdb.ServerCA.objects.get_or_certify() - rpki.irdb.ServerEE.objects.get_or_certify(issuer = server_ca, purpose = "irbe") - - if self.run_rpkid: - rpki.irdb.ServerEE.objects.get_or_certify(issuer = server_ca, purpose = "rpkid") - rpki.irdb.ServerEE.objects.get_or_certify(issuer = server_ca, purpose = "irdbd") - - if self.run_pubd: - rpki.irdb.ServerEE.objects.get_or_certify(issuer = server_ca, purpose = "pubd") - - - @django.db.transaction.commit_on_success - def initialize_resource_bpki(self): - """ - Initialize the resource-holding BPKI for an RPKI installation. - Returns XML describing the resource holder. - - This method is present primarily for backwards compatibility with - the old combined initialize() method which initialized both the - server BPKI and the default resource-holding BPKI in a single - method call. In the long run we want to replace this with - something that takes a handle as argument and creates the - resource-holding BPKI idenity if needed. - """ - - resource_ca, created = rpki.irdb.ResourceHolderCA.objects.get_or_certify(handle = self.handle) - return self.generate_identity() - - - def initialize(self): - """ - Backwards compatibility wrapper: calls initialize_server_bpki() - and initialize_resource_bpki(), returns latter's result. - """ - - self.initialize_server_bpki() - return self.initialize_resource_bpki() - - - def generate_identity(self): - """ - Generate identity XML. Broken out of .initialize() because it's - easier for the GUI this way. - """ - - e = Element("identity", handle = self.handle) - B64Element(e, "bpki_ta", self.resource_ca.certificate) - return etree_wrapper(e, msg = 'This is the "identity" file you will need to send to your parent') - - - @django.db.transaction.commit_on_success - def delete_self(self): - """ - Delete the ResourceHolderCA object corresponding to the current handle. - This corresponds to deleting an rpkid object. - - This code assumes the normal Django cascade-on-delete behavior, - that is, we assume that deleting the ResourceHolderCA object - deletes all the subordinate objects that refer to it via foreign - key relationships. - """ - - resource_ca = self.resource_ca - if resource_ca is not None: - resource_ca.delete() - else: - self.log("No such ResourceHolderCA \"%s\"" % self.handle) - - - @django.db.transaction.commit_on_success - def configure_rootd(self): - - assert self.run_rpkid and self.run_pubd and self.run_rootd - - rpki.irdb.Rootd.objects.get_or_certify( - issuer = self.resource_ca, - service_uri = "http://localhost:%s/" % self.cfg.get("rootd_server_port", section = myrpki_section)) - - return self.generate_rootd_repository_offer() - - - def generate_rootd_repository_offer(self): - """ - Generate repository offer for rootd. Split out of - configure_rootd() because that's easier for the GUI. - """ - - # The following assumes we'll set up the respository manually. - # Not sure this is a reasonable assumption, particularly if we - # ever fix rootd to use the publication protocol. - - try: - self.resource_ca.repositories.get(handle = self.handle) - return None - - except rpki.irdb.Repository.DoesNotExist: - e = Element("repository", type = "offer", handle = self.handle, parent_handle = self.handle) - B64Element(e, "bpki_client_ta", self.resource_ca.certificate) - return etree_wrapper(e, msg = 'This is the "repository offer" file for you to use if you want to publish in your own repository') - - - def write_bpki_files(self): - """ - Write out BPKI certificate, key, and CRL files for daemons that - need them. - """ - - writer = PEM_writer(self.logstream) - - if self.run_rpkid: - rpkid = self.server_ca.ee_certificates.get(purpose = "rpkid") - writer(self.cfg.get("bpki-ta", section = rpkid_section), self.server_ca.certificate) - writer(self.cfg.get("rpkid-key", section = rpkid_section), rpkid.private_key) - writer(self.cfg.get("rpkid-cert", section = rpkid_section), rpkid.certificate) - writer(self.cfg.get("irdb-cert", section = rpkid_section), - self.server_ca.ee_certificates.get(purpose = "irdbd").certificate) - writer(self.cfg.get("irbe-cert", section = rpkid_section), - self.server_ca.ee_certificates.get(purpose = "irbe").certificate) - - if self.run_pubd: - pubd = self.server_ca.ee_certificates.get(purpose = "pubd") - writer(self.cfg.get("bpki-ta", section = pubd_section), self.server_ca.certificate) - writer(self.cfg.get("pubd-key", section = pubd_section), pubd.private_key) - writer(self.cfg.get("pubd-cert", section = pubd_section), pubd.certificate) - writer(self.cfg.get("irbe-cert", section = pubd_section), - self.server_ca.ee_certificates.get(purpose = "irbe").certificate) - - if self.run_rootd: - try: - rootd = rpki.irdb.ResourceHolderCA.objects.get(handle = self.handle).rootd - writer(self.cfg.get("bpki-ta", section = rootd_section), self.server_ca.certificate) - writer(self.cfg.get("rootd-bpki-crl", section = rootd_section), self.server_ca.latest_crl) - writer(self.cfg.get("rootd-bpki-key", section = rootd_section), rootd.private_key) - writer(self.cfg.get("rootd-bpki-cert", section = rootd_section), rootd.certificate) - writer(self.cfg.get("child-bpki-cert", section = rootd_section), rootd.issuer.certificate) - except rpki.irdb.ResourceHolderCA.DoesNotExist: - self.log("rootd enabled but resource holding entity not yet configured, skipping rootd setup") - except rpki.irdb.Rootd.DoesNotExist: - self.log("rootd enabled but not yet configured, skipping rootd setup") - - - @django.db.transaction.commit_on_success - def update_bpki(self): - """ - Update BPKI certificates. Assumes an existing RPKI installation. - - Basic plan here is to reissue all BPKI certificates we can, right - now. In the long run we might want to be more clever about only - touching ones that need maintenance, but this will do for a start. - - We also reissue CRLs for all CAs. - - Most likely this should be run under cron. - """ - - for model in (rpki.irdb.ServerCA, - rpki.irdb.ResourceHolderCA, - rpki.irdb.ServerEE, - rpki.irdb.Referral, - rpki.irdb.Rootd, - rpki.irdb.HostedCA, - rpki.irdb.BSC, - rpki.irdb.Child, - rpki.irdb.Parent, - rpki.irdb.Client, - rpki.irdb.Repository): - for obj in model.objects.all(): - self.log("Regenerating BPKI certificate %s" % obj.certificate.getSubject()) - obj.avow() - obj.save() - - self.log("Regenerating Server BPKI CRL") - self.server_ca.generate_crl() - self.server_ca.save() - - for ca in rpki.irdb.ResourceHolderCA.objects.all(): - self.log("Regenerating BPKI CRL for Resource Holder %s" % ca.handle) - ca.generate_crl() - ca.save() - - - @django.db.transaction.commit_on_success - def synchronize_bpki(self): - """ - Synchronize BPKI updates. This is separate from .update_bpki() - because this requires rpkid to be running and none of the other - BPKI update stuff does; there may be circumstances under which it - makes sense to do the rest of the BPKI update and allow this to - fail with a warning. - """ - - if self.run_rpkid: - updates = [] - - updates.extend( - rpki.left_right.self_elt.make_pdu( - action = "set", - tag = "%s__self" % ca.handle, - self_handle = ca.handle, - bpki_cert = ca.certificate) - for ca in rpki.irdb.ResourceHolderCA.objects.all()) - - updates.extend( - rpki.left_right.bsc_elt.make_pdu( - action = "set", - tag = "%s__bsc__%s" % (bsc.issuer.handle, bsc.handle), - self_handle = bsc.issuer.handle, - bsc_handle = bsc.handle, - signing_cert = bsc.certificate, - signing_cert_crl = bsc.issuer.latest_crl) - for bsc in rpki.irdb.BSC.objects.all()) - - updates.extend( - rpki.left_right.repository_elt.make_pdu( - action = "set", - tag = "%s__repository__%s" % (repository.issuer.handle, repository.handle), - self_handle = repository.issuer.handle, - repository_handle = repository.handle, - bpki_cert = repository.certificate) - for repository in rpki.irdb.Repository.objects.all()) - - updates.extend( - rpki.left_right.parent_elt.make_pdu( - action = "set", - tag = "%s__parent__%s" % (parent.issuer.handle, parent.handle), - self_handle = parent.issuer.handle, - parent_handle = parent.handle, - bpki_cms_cert = parent.certificate) - for parent in rpki.irdb.Parent.objects.all()) - - updates.extend( - rpki.left_right.parent_elt.make_pdu( - action = "set", - tag = "%s__rootd" % rootd.issuer.handle, - self_handle = rootd.issuer.handle, - parent_handle = rootd.issuer.handle, - bpki_cms_cert = rootd.certificate) - for rootd in rpki.irdb.Rootd.objects.all()) - - updates.extend( - rpki.left_right.child_elt.make_pdu( - action = "set", - tag = "%s__child__%s" % (child.issuer.handle, child.handle), - self_handle = child.issuer.handle, - child_handle = child.handle, - bpki_cert = child.certificate) - for child in rpki.irdb.Child.objects.all()) - - if updates: - self.check_error_report(self.call_rpkid(updates)) - - if self.run_pubd: - updates = [] - - updates.append( - rpki.publication.config_elt.make_pdu( - action = "set", - bpki_crl = self.server_ca.latest_crl)) - - updates.extend( - rpki.publication.client_elt.make_pdu( - action = "set", - client_handle = client.handle, - bpki_cert = client.certificate) - for client in self.server_ca.clients.all()) - - if updates: - self.check_error_report(self.call_pubd(updates)) - - - @django.db.transaction.commit_on_success - def configure_child(self, filename, child_handle = None, valid_until = None): - """ - Configure a new child of this RPKI entity, given the child's XML - identity file as an input. Extracts the child's data from the - XML, cross-certifies the child's resource-holding BPKI - certificate, and generates an XML file describing the relationship - between the child and this parent, including this parent's BPKI - data and up-down protocol service URI. - """ - - c = etree_read(filename) - - if child_handle is None: - child_handle = c.get("handle") - - if valid_until is None: - valid_until = rpki.sundial.now() + rpki.sundial.timedelta(days = 365) - else: - valid_until = rpki.sundial.datetime.fromXMLtime(valid_until) - if valid_until < rpki.sundial.now(): - raise PastExpiration, "Specified new expiration time %s has passed" % valid_until - - self.log("Child calls itself %r, we call it %r" % (c.get("handle"), child_handle)) - - child, created = rpki.irdb.Child.objects.get_or_certify( - issuer = self.resource_ca, - handle = child_handle, - ta = rpki.x509.X509(Base64 = c.findtext("bpki_ta")), - valid_until = valid_until) - - return self.generate_parental_response(child), child_handle - - - @django.db.transaction.commit_on_success - def generate_parental_response(self, child): - """ - Generate parental response XML. Broken out of .configure_child() - for GUI. - """ - - service_uri = "http://%s:%s/up-down/%s/%s" % ( - self.cfg.get("rpkid_server_host", section = myrpki_section), - self.cfg.get("rpkid_server_port", section = myrpki_section), - self.handle, child.handle) - - e = Element("parent", parent_handle = self.handle, child_handle = child.handle, - service_uri = service_uri, valid_until = str(child.valid_until)) - B64Element(e, "bpki_resource_ta", self.resource_ca.certificate) - B64Element(e, "bpki_child_ta", child.ta) - - try: - if self.default_repository: - repo = self.resource_ca.repositories.get(handle = self.default_repository) - else: - repo = self.resource_ca.repositories.get() - except rpki.irdb.Repository.DoesNotExist: - repo = None - - if repo is None: - self.log("Couldn't find any usable repositories, not giving referral") - - elif repo.handle == self.handle: - SubElement(e, "repository", type = "offer") - - else: - proposed_sia_base = repo.sia_base + child.handle + "/" - referral_cert, created = rpki.irdb.Referral.objects.get_or_certify(issuer = self.resource_ca) - auth = rpki.x509.SignedReferral() - auth.set_content(B64Element(None, myrpki_namespaceQName + "referral", child.ta, - version = myrpki_version, - authorized_sia_base = proposed_sia_base)) - auth.schema_check() - auth.sign(referral_cert.private_key, referral_cert.certificate, self.resource_ca.latest_crl) - - r = SubElement(e, "repository", type = "referral") - B64Element(r, "authorization", auth, referrer = repo.client_handle) - SubElement(r, "contact_info") - - return etree_wrapper(e, msg = "Send this file back to the child you just configured") - - - @django.db.transaction.commit_on_success - def delete_child(self, child_handle): - """ - Delete a child of this RPKI entity. - """ - - self.resource_ca.children.get(handle = child_handle).delete() - - - @django.db.transaction.commit_on_success - def configure_parent(self, filename, parent_handle = None): - """ - Configure a new parent of this RPKI entity, given the output of - the parent's configure_child command as input. Reads the parent's - response XML, extracts the parent's BPKI and service URI - information, cross-certifies the parent's BPKI data into this - entity's BPKI, and checks for offers or referrals of publication - service. If a publication offer or referral is present, we - generate a request-for-service message to that repository, in case - the user wants to avail herself of the referral or offer. - """ - - p = etree_read(filename) - - if parent_handle is None: - parent_handle = p.get("parent_handle") - - r = p.find("repository") - - repository_type = "none" - referrer = None - referral_authorization = None - - if r is not None: - repository_type = r.get("type") - - if repository_type == "referral": - a = r.find("authorization") - referrer = a.get("referrer") - referral_authorization = rpki.x509.SignedReferral(Base64 = a.text) - - self.log("Parent calls itself %r, we call it %r" % (p.get("parent_handle"), parent_handle)) - self.log("Parent calls us %r" % p.get("child_handle")) - - parent, created = rpki.irdb.Parent.objects.get_or_certify( - issuer = self.resource_ca, - handle = parent_handle, - child_handle = p.get("child_handle"), - parent_handle = p.get("parent_handle"), - service_uri = p.get("service_uri"), - ta = rpki.x509.X509(Base64 = p.findtext("bpki_resource_ta")), - repository_type = repository_type, - referrer = referrer, - referral_authorization = referral_authorization) - - return self.generate_repository_request(parent), parent_handle - - - def generate_repository_request(self, parent): - """ - Generate repository request for a given parent. - """ - - e = Element("repository", handle = self.handle, - parent_handle = parent.handle, type = parent.repository_type) - if parent.repository_type == "referral": - B64Element(e, "authorization", parent.referral_authorization, referrer = parent.referrer) - SubElement(e, "contact_info") - B64Element(e, "bpki_client_ta", self.resource_ca.certificate) - return etree_wrapper(e, msg = "This is the file to send to the repository operator") - - - @django.db.transaction.commit_on_success - def delete_parent(self, parent_handle): - """ - Delete a parent of this RPKI entity. - """ - - self.resource_ca.parents.get(handle = parent_handle).delete() - - - @django.db.transaction.commit_on_success - def delete_rootd(self): - """ - Delete rootd associated with this RPKI entity. - """ - - self.resource_ca.rootd.delete() - - - @django.db.transaction.commit_on_success - def configure_publication_client(self, filename, sia_base = None, flat = False): - """ - Configure publication server to know about a new client, given the - client's request-for-service message as input. Reads the client's - request for service, cross-certifies the client's BPKI data, and - generates a response message containing the repository's BPKI data - and service URI. - """ - - client = etree_read(filename) - - client_ta = rpki.x509.X509(Base64 = client.findtext("bpki_client_ta")) - - if sia_base is None and flat: - self.log("Flat publication structure forced, homing client at top-level") - sia_base = "rsync://%s/%s/%s/" % (self.rsync_server, self.rsync_module, client.get("handle")) - - if sia_base is None and client.get("type") == "referral": - self.log("This looks like a referral, checking") - try: - auth = client.find("authorization") - referrer = self.server_ca.clients.get(handle = auth.get("referrer")) - referral_cms = rpki.x509.SignedReferral(Base64 = auth.text) - referral_xml = referral_cms.unwrap(ta = (referrer.certificate, self.server_ca.certificate)) - if rpki.x509.X509(Base64 = referral_xml.text) != client_ta: - raise BadXMLMessage, "Referral trust anchor does not match" - sia_base = referral_xml.get("authorized_sia_base") - except rpki.irdb.Client.DoesNotExist: - self.log("We have no record of the client (%s) alleged to have made this referral" % auth.get("referrer")) - - if sia_base is None and client.get("type") == "offer": - self.log("This looks like an offer, checking") - try: - parent = rpki.irdb.ResourceHolderCA.objects.get(children__ta__exact = client_ta) - if "/" in parent.repositories.get(ta = self.server_ca.certificate).client_handle: - self.log("Client's parent is not top-level, this is not a valid offer") - else: - self.log("Found client and its parent, nesting") - sia_base = "rsync://%s/%s/%s/%s/" % (self.rsync_server, self.rsync_module, - parent.handle, client.get("handle")) - except rpki.irdb.Repository.DoesNotExist: - self.log("Found client's parent, but repository isn't set, this shouldn't happen!") - except rpki.irdb.ResourceHolderCA.DoesNotExist: - try: - rpki.irdb.Rootd.objects.get(issuer__certificate__exact = client_ta) - except rpki.irdb.Rootd.DoesNotExist: - self.log("We don't host this client's parent, so we didn't make this offer") - else: - self.log("This client's parent is rootd") - - if sia_base is None: - self.log("Don't know where to nest this client, defaulting to top-level") - sia_base = "rsync://%s/%s/%s/" % (self.rsync_server, self.rsync_module, client.get("handle")) - - if not sia_base.startswith("rsync://"): - raise BadXMLMessage, "Malformed sia_base parameter %r, should start with 'rsync://'" % sia_base - - client_handle = "/".join(sia_base.rstrip("/").split("/")[4:]) - - parent_handle = client.get("parent_handle") - - self.log("Client calls itself %r, we call it %r" % (client.get("handle"), client_handle)) - self.log("Client says its parent handle is %r" % parent_handle) - - client, created = rpki.irdb.Client.objects.get_or_certify( - issuer = self.server_ca, - handle = client_handle, - parent_handle = parent_handle, - ta = client_ta, - sia_base = sia_base) - - return self.generate_repository_response(client), client_handle - - - def generate_repository_response(self, client): - """ - Generate repository response XML to a given client. - """ - - service_uri = "http://%s:%s/client/%s" % ( - self.cfg.get("pubd_server_host", section = myrpki_section), - self.cfg.get("pubd_server_port", section = myrpki_section), - client.handle) - - e = Element("repository", type = "confirmed", - client_handle = client.handle, - parent_handle = client.parent_handle, - sia_base = client.sia_base, - service_uri = service_uri) - - B64Element(e, "bpki_server_ta", self.server_ca.certificate) - B64Element(e, "bpki_client_ta", client.ta) - SubElement(e, "contact_info").text = self.pubd_contact_info - return etree_wrapper(e, msg = "Send this file back to the publication client you just configured") - - - @django.db.transaction.commit_on_success - def delete_publication_client(self, client_handle): - """ - Delete a publication client of this RPKI entity. - """ - - self.server_ca.clients.get(handle = client_handle).delete() - - - @django.db.transaction.commit_on_success - def configure_repository(self, filename, parent_handle = None): - """ - Configure a publication repository for this RPKI entity, given the - repository's response to our request-for-service message as input. - Reads the repository's response, extracts and cross-certifies the - BPKI data and service URI, and links the repository data with the - corresponding parent data in our local database. - """ - - r = etree_read(filename) - - if parent_handle is None: - parent_handle = r.get("parent_handle") - - self.log("Repository calls us %r" % (r.get("client_handle"))) - self.log("Repository response associated with parent_handle %r" % parent_handle) - - try: - if parent_handle == self.handle: - turtle = self.resource_ca.rootd - else: - turtle = self.resource_ca.parents.get(handle = parent_handle) - - except (rpki.irdb.Parent.DoesNotExist, rpki.irdb.Rootd.DoesNotExist): - self.log("Could not find parent %r in our database" % parent_handle) - - else: - rpki.irdb.Repository.objects.get_or_certify( - issuer = self.resource_ca, - handle = parent_handle, - client_handle = r.get("client_handle"), - service_uri = r.get("service_uri"), - sia_base = r.get("sia_base"), - ta = rpki.x509.X509(Base64 = r.findtext("bpki_server_ta")), - turtle = turtle) - - - @django.db.transaction.commit_on_success - def delete_repository(self, repository_handle): - """ - Delete a repository of this RPKI entity. - """ - - self.resource_ca.repositories.get(handle = repository_handle).delete() - - - @django.db.transaction.commit_on_success - def renew_children(self, child_handle, valid_until = None): - """ - Update validity period for one child entity or, if child_handle is - None, for all child entities. - """ - - if child_handle is None: - children = self.resource_ca.children.all() - else: - children = self.resource_ca.children.filter(handle = child_handle) - - if valid_until is None: - valid_until = rpki.sundial.now() + rpki.sundial.timedelta(days = 365) - else: - valid_until = rpki.sundial.datetime.fromXMLtime(valid_until) - if valid_until < rpki.sundial.now(): - raise PastExpiration, "Specified new expiration time %s has passed" % valid_until - - self.log("New validity date %s" % valid_until) - - for child in children: - child.valid_until = valid_until - child.save() - - - @django.db.transaction.commit_on_success - def load_prefixes(self, filename, ignore_missing_children = False): - """ - Whack IRDB to match prefixes.csv. - """ - - grouped4 = {} - grouped6 = {} - - for handle, prefix in csv_reader(filename, columns = 2): - grouped = grouped6 if ":" in prefix else grouped4 - if handle not in grouped: - grouped[handle] = [] - grouped[handle].append(prefix) - - primary_keys = [] - - for version, grouped, rset in ((4, grouped4, rpki.resource_set.resource_set_ipv4), - (6, grouped6, rpki.resource_set.resource_set_ipv6)): - for handle, prefixes in grouped.iteritems(): - try: - child = self.resource_ca.children.get(handle = handle) - except rpki.irdb.Child.DoesNotExist: - if not ignore_missing_children: - raise - else: - for prefix in rset(",".join(prefixes)): - obj, created = rpki.irdb.ChildNet.objects.get_or_create( - child = child, - start_ip = str(prefix.min), - end_ip = str(prefix.max), - version = version) - primary_keys.append(obj.pk) - - q = rpki.irdb.ChildNet.objects - q = q.filter(child__issuer__exact = self.resource_ca) - q = q.exclude(pk__in = primary_keys) - q.delete() - - - @django.db.transaction.commit_on_success - def load_asns(self, filename, ignore_missing_children = False): - """ - Whack IRDB to match asns.csv. - """ - - grouped = {} - - for handle, asn in csv_reader(filename, columns = 2): - if handle not in grouped: - grouped[handle] = [] - grouped[handle].append(asn) - - primary_keys = [] - - for handle, asns in grouped.iteritems(): - try: - child = self.resource_ca.children.get(handle = handle) - except rpki.irdb.Child.DoesNotExist: - if not ignore_missing_children: - raise - else: - for asn in rpki.resource_set.resource_set_as(",".join(asns)): - obj, created = rpki.irdb.ChildASN.objects.get_or_create( - child = child, - start_as = str(asn.min), - end_as = str(asn.max)) - primary_keys.append(obj.pk) - - q = rpki.irdb.ChildASN.objects - q = q.filter(child__issuer__exact = self.resource_ca) - q = q.exclude(pk__in = primary_keys) - q.delete() - - - @django.db.transaction.commit_on_success - def load_roa_requests(self, filename): - """ - Whack IRDB to match roa.csv. - """ - - grouped = {} - - # format: p/n-m asn group - for pnm, asn, group in csv_reader(filename, columns = 3): - key = (asn, group) - if key not in grouped: - grouped[key] = [] - grouped[key].append(pnm) - - # Deleting and recreating all the ROA requests is inefficient, - # but rpkid's current representation of ROA requests is wrong - # (see #32), so it's not worth a lot of effort here as we're - # just going to have to rewrite this soon anyway. - - self.resource_ca.roa_requests.all().delete() - - for key, pnms in grouped.iteritems(): - asn, group = key - - roa_request = self.resource_ca.roa_requests.create(asn = asn) - - for pnm in pnms: - if ":" in pnm: - p = rpki.resource_set.roa_prefix_ipv6.parse_str(pnm) - v = 6 - else: - p = rpki.resource_set.roa_prefix_ipv4.parse_str(pnm) - v = 4 - roa_request.prefixes.create( - version = v, - prefix = str(p.prefix), - prefixlen = int(p.prefixlen), - max_prefixlen = int(p.max_prefixlen)) - - - @django.db.transaction.commit_on_success - def load_ghostbuster_requests(self, filename, parent = None): - """ - Whack IRDB to match ghostbusters.vcard. - - This accepts one or more vCards from a file. - """ - - self.resource_ca.ghostbuster_requests.filter(parent = parent).delete() - - vcard = [] - - for line in open(filename, "r"): - if not vcard and not line.upper().startswith("BEGIN:VCARD"): - continue - vcard.append(line) - if line.upper().startswith("END:VCARD"): - self.resource_ca.ghostbuster_requests.create(vcard = "".join(vcard), parent = parent) - vcard = [] - - - def call_rpkid(self, *pdus): - """ - Issue a call to rpkid, return result. - - Implementation is a little silly, constructs a wrapper object, - invokes it once, then throws it away. Hard to do better without - rewriting a bit of the HTTP code, as we want to be sure we're - using the current BPKI certificate and key objects. - """ - - url = "http://%s:%s/left-right" % ( - self.cfg.get("rpkid_server_host", section = myrpki_section), - self.cfg.get("rpkid_server_port", section = myrpki_section)) - - rpkid = self.server_ca.ee_certificates.get(purpose = "rpkid") - irbe = self.server_ca.ee_certificates.get(purpose = "irbe") - - if len(pdus) == 1 and isinstance(pdus[0], types.GeneratorType): - pdus = tuple(pdus[0]) - elif len(pdus) == 1 and isinstance(pdus[0], (tuple, list)): - pdus = pdus[0] - - call_rpkid = rpki.async.sync_wrapper(rpki.http.caller( - proto = rpki.left_right, - client_key = irbe.private_key, - client_cert = irbe.certificate, - server_ta = self.server_ca.certificate, - server_cert = rpkid.certificate, - url = url, - debug = self.show_xml)) - - return call_rpkid(*pdus) - - - def run_rpkid_now(self): - """ - Poke rpkid to immediately run the cron job for the current handle. - - This method is used by the GUI when a user has changed something in the - IRDB (ghostbuster, roa) which does not require a full synchronize() call, - to force the object to be immediately issued. - """ - - self.call_rpkid(rpki.left_right.self_elt.make_pdu( - action = "set", self_handle = self.handle, run_now = "yes")) - - - def publish_world_now(self): - """ - Poke rpkid to (re)publish everything for the current handle. - """ - - self.call_rpkid(rpki.left_right.self_elt.make_pdu( - action = "set", self_handle = self.handle, publish_world_now = "yes")) - - - def reissue(self): - """ - Poke rpkid to reissue everything for the current handle. - """ - - self.call_rpkid(rpki.left_right.self_elt.make_pdu( - action = "set", self_handle = self.handle, reissue = "yes")) - - def rekey(self): - """ - Poke rpkid to rekey all RPKI certificates received for the current - handle. - """ - - self.call_rpkid(rpki.left_right.self_elt.make_pdu( - action = "set", self_handle = self.handle, rekey = "yes")) - - - def revoke(self): - """ - Poke rpkid to revoke old RPKI keys for the current handle. - """ - - self.call_rpkid(rpki.left_right.self_elt.make_pdu( - action = "set", self_handle = self.handle, revoke = "yes")) - - - def revoke_forgotten(self): - """ - Poke rpkid to revoke old forgotten RPKI keys for the current handle. - """ - - self.call_rpkid(rpki.left_right.self_elt.make_pdu( - action = "set", self_handle = self.handle, revoke_forgotten = "yes")) - - - def clear_all_sql_cms_replay_protection(self): - """ - Tell rpkid and pubd to clear replay protection for all SQL-based - entities. This is a fairly blunt instrument, but as we don't - expect this to be necessary except in the case of gross - misconfiguration, it should suffice - """ - - self.call_rpkid(rpki.left_right.self_elt.make_pdu(action = "set", self_handle = ca.handle, - clear_replay_protection = "yes") - for ca in rpki.irdb.ResourceHolderCA.objects.all()) - if self.run_pubd: - self.call_pubd(rpki.publication.client_elt.make_pdu(action = "set", - client_handle = client.handle, - clear_replay_protection = "yes") - for client in self.server_ca.clients.all()) - - - def call_pubd(self, *pdus): - """ - Issue a call to pubd, return result. - - Implementation is a little silly, constructs a wrapper object, - invokes it once, then throws it away. Hard to do better without - rewriting a bit of the HTTP code, as we want to be sure we're - using the current BPKI certificate and key objects. - """ - - url = "http://%s:%s/control" % ( - self.cfg.get("pubd_server_host", section = myrpki_section), - self.cfg.get("pubd_server_port", section = myrpki_section)) - - pubd = self.server_ca.ee_certificates.get(purpose = "pubd") - irbe = self.server_ca.ee_certificates.get(purpose = "irbe") - - if len(pdus) == 1 and isinstance(pdus[0], types.GeneratorType): - pdus = tuple(pdus[0]) - elif len(pdus) == 1 and isinstance(pdus[0], (tuple, list)): - pdus = pdus[0] - - call_pubd = rpki.async.sync_wrapper(rpki.http.caller( - proto = rpki.publication, - client_key = irbe.private_key, - client_cert = irbe.certificate, - server_ta = self.server_ca.certificate, - server_cert = pubd.certificate, - url = url, - debug = self.show_xml)) - - return call_pubd(*pdus) - - - def check_error_report(self, pdus): - """ - Check a response from rpkid or pubd for error_report PDUs, log and - throw exceptions as needed. - """ - - if any(isinstance(pdu, (rpki.left_right.report_error_elt, rpki.publication.report_error_elt)) for pdu in pdus): - for pdu in pdus: - if isinstance(pdu, rpki.left_right.report_error_elt): - self.log("rpkid reported failure: %s" % pdu.error_code) - elif isinstance(pdu, rpki.publication.report_error_elt): - self.log("pubd reported failure: %s" % pdu.error_code) - else: - continue - if pdu.error_text: - self.log(pdu.error_text) - raise CouldntTalkToDaemon - - - @django.db.transaction.commit_on_success - def synchronize(self, *handles_to_poke): - """ - Configure RPKI daemons with the data built up by the other - commands in this program. Commands which modify the IRDB and want - to whack everything into sync should call this when they're done, - but be warned that this can be slow with a lot of CAs. - - Any arguments given are handles of CAs which should be poked with a - operation. - """ - - for ca in rpki.irdb.ResourceHolderCA.objects.all(): - self.synchronize_rpkid_one_ca_core(ca, ca.handle in handles_to_poke) - self.synchronize_pubd_core() - self.synchronize_rpkid_deleted_core() - - - @django.db.transaction.commit_on_success - def synchronize_ca(self, ca = None, poke = False): - """ - Synchronize one CA. Most commands which modify a CA should call - this. CA to synchronize defaults to the current resource CA. - """ - - if ca is None: - ca = self.resource_ca - self.synchronize_rpkid_one_ca_core(ca, poke) - - - @django.db.transaction.commit_on_success - def synchronize_deleted_ca(self): - """ - Delete CAs which are present in rpkid's database but not in the - IRDB. - """ - - self.synchronize_rpkid_deleted_core() - - - @django.db.transaction.commit_on_success - def synchronize_pubd(self): - """ - Synchronize pubd. Most commands which modify pubd should call this. - """ - - self.synchronize_pubd_core() - - - def synchronize_rpkid_one_ca_core(self, ca, poke = False): - """ - Synchronize one CA. This is the core synchronization code. Don't - call this directly, instead call one of the methods that calls - this inside a Django commit wrapper. - - This method configures rpkid with data built up by the other - commands in this program. Most commands which modify IRDB values - related to rpkid should call this when they're done. - - If poke is True, we append a left-right run_now operation for this - CA to the end of whatever other commands this method generates. - """ - - # We can use a single BSC for everything -- except BSC key - # rollovers. Drive off that bridge when we get to it. - - bsc_handle = "bsc" - - # A default RPKI CRL cycle time of six hours seems sane. One - # might make a case for a day instead, but we've been running with - # six hours for a while now and haven't seen a lot of whining. - - self_crl_interval = self.cfg.getint("self_crl_interval", 6 * 60 * 60, section = myrpki_section) - - # regen_margin now just controls how long before RPKI certificate - # expiration we should regenerate; it used to control the interval - # before RPKI CRL staleness at which to regenerate the CRL, but - # using the same timer value for both of these is hopeless. - # - # A default regeneration margin of two weeks gives enough time for - # humans to react. We add a two hour fudge factor in the hope - # that this will regenerate certificates just *before* the - # companion cron job warns of impending doom. - - self_regen_margin = self.cfg.getint("self_regen_margin", 14 * 24 * 60 * 60 + 2 * 60, section = myrpki_section) - - # See what rpkid already has on file for this entity. - - rpkid_reply = self.call_rpkid( - rpki.left_right.self_elt.make_pdu( action = "get", tag = "self", self_handle = ca.handle), - rpki.left_right.bsc_elt.make_pdu( action = "list", tag = "bsc", self_handle = ca.handle), - rpki.left_right.repository_elt.make_pdu(action = "list", tag = "repository", self_handle = ca.handle), - rpki.left_right.parent_elt.make_pdu( action = "list", tag = "parent", self_handle = ca.handle), - rpki.left_right.child_elt.make_pdu( action = "list", tag = "child", self_handle = ca.handle)) - - self_pdu = rpkid_reply[0] - bsc_pdus = dict((x.bsc_handle, x) for x in rpkid_reply if isinstance(x, rpki.left_right.bsc_elt)) - repository_pdus = dict((x.repository_handle, x) for x in rpkid_reply if isinstance(x, rpki.left_right.repository_elt)) - parent_pdus = dict((x.parent_handle, x) for x in rpkid_reply if isinstance(x, rpki.left_right.parent_elt)) - child_pdus = dict((x.child_handle, x) for x in rpkid_reply if isinstance(x, rpki.left_right.child_elt)) - - rpkid_query = [] - - self_cert, created = rpki.irdb.HostedCA.objects.get_or_certify( - issuer = self.server_ca, - hosted = ca) - - # There should be exactly one object per hosted entity, by definition - - if (isinstance(self_pdu, rpki.left_right.report_error_elt) or - self_pdu.crl_interval != self_crl_interval or - self_pdu.regen_margin != self_regen_margin or - self_pdu.bpki_cert != self_cert.certificate): - rpkid_query.append(rpki.left_right.self_elt.make_pdu( - action = "create" if isinstance(self_pdu, rpki.left_right.report_error_elt) else "set", - tag = "self", - self_handle = ca.handle, - bpki_cert = ca.certificate, - crl_interval = self_crl_interval, - regen_margin = self_regen_margin)) - - # In general we only need one per . BSC objects - # are a little unusual in that the keypair and PKCS #10 - # subelement is generated by rpkid, so complete setup requires - # two round trips. - - bsc_pdu = bsc_pdus.pop(bsc_handle, None) - - if bsc_pdu is None: - rpkid_query.append(rpki.left_right.bsc_elt.make_pdu( - action = "create", - tag = "bsc", - self_handle = ca.handle, - bsc_handle = bsc_handle, - generate_keypair = "yes")) - - elif bsc_pdu.pkcs10_request is None: - rpkid_query.append(rpki.left_right.bsc_elt.make_pdu( - action = "set", - tag = "bsc", - self_handle = ca.handle, - bsc_handle = bsc_handle, - generate_keypair = "yes")) - - rpkid_query.extend(rpki.left_right.bsc_elt.make_pdu( - action = "destroy", self_handle = ca.handle, bsc_handle = b) for b in bsc_pdus) - - # If we've already got actions queued up, run them now, so we - # can finish setting up the BSC before anything tries to use it. - - if rpkid_query: - rpkid_query.append(rpki.left_right.bsc_elt.make_pdu(action = "list", tag = "bsc", self_handle = ca.handle)) - rpkid_reply = self.call_rpkid(rpkid_query) - bsc_pdus = dict((x.bsc_handle, x) - for x in rpkid_reply - if isinstance(x, rpki.left_right.bsc_elt) and x.action == "list") - bsc_pdu = bsc_pdus.pop(bsc_handle, None) - self.check_error_report(rpkid_reply) - - rpkid_query = [] - - assert bsc_pdu.pkcs10_request is not None - - bsc, created = rpki.irdb.BSC.objects.get_or_certify( - issuer = ca, - handle = bsc_handle, - pkcs10 = bsc_pdu.pkcs10_request) - - if bsc_pdu.signing_cert != bsc.certificate or bsc_pdu.signing_cert_crl != ca.latest_crl: - rpkid_query.append(rpki.left_right.bsc_elt.make_pdu( - action = "set", - tag = "bsc", - self_handle = ca.handle, - bsc_handle = bsc_handle, - signing_cert = bsc.certificate, - signing_cert_crl = ca.latest_crl)) - - # At present we need one per , not because - # rpkid requires that, but because pubd does. pubd probably should - # be fixed to support a single client allowed to update multiple - # trees, but for the moment the easiest way forward is just to - # enforce a 1:1 mapping between and objects - - for repository in ca.repositories.all(): - - repository_pdu = repository_pdus.pop(repository.handle, None) - - if (repository_pdu is None or - repository_pdu.bsc_handle != bsc_handle or - repository_pdu.peer_contact_uri != repository.service_uri or - repository_pdu.bpki_cert != repository.certificate): - rpkid_query.append(rpki.left_right.repository_elt.make_pdu( - action = "create" if repository_pdu is None else "set", - tag = repository.handle, - self_handle = ca.handle, - repository_handle = repository.handle, - bsc_handle = bsc_handle, - peer_contact_uri = repository.service_uri, - bpki_cert = repository.certificate)) - - rpkid_query.extend(rpki.left_right.repository_elt.make_pdu( - action = "destroy", self_handle = ca.handle, repository_handle = r) for r in repository_pdus) - - # setup code currently assumes 1:1 mapping between - # and , and further assumes that the handles - # for an associated pair are the identical (that is: - # parent.repository_handle == parent.parent_handle). - # - # If no such repository exists, our choices are to ignore the - # parent entry or throw an error. For now, we ignore the parent. - - for parent in ca.parents.all(): - - try: - - parent_pdu = parent_pdus.pop(parent.handle, None) - - if (parent_pdu is None or - parent_pdu.bsc_handle != bsc_handle or - parent_pdu.repository_handle != parent.handle or - parent_pdu.peer_contact_uri != parent.service_uri or - parent_pdu.sia_base != parent.repository.sia_base or - parent_pdu.sender_name != parent.child_handle or - parent_pdu.recipient_name != parent.parent_handle or - parent_pdu.bpki_cms_cert != parent.certificate): - rpkid_query.append(rpki.left_right.parent_elt.make_pdu( - action = "create" if parent_pdu is None else "set", - tag = parent.handle, - self_handle = ca.handle, - parent_handle = parent.handle, - bsc_handle = bsc_handle, - repository_handle = parent.handle, - peer_contact_uri = parent.service_uri, - sia_base = parent.repository.sia_base, - sender_name = parent.child_handle, - recipient_name = parent.parent_handle, - bpki_cms_cert = parent.certificate)) - - except rpki.irdb.Repository.DoesNotExist: - pass - - try: - - parent_pdu = parent_pdus.pop(ca.handle, None) - - if (parent_pdu is None or - parent_pdu.bsc_handle != bsc_handle or - parent_pdu.repository_handle != ca.handle or - parent_pdu.peer_contact_uri != ca.rootd.service_uri or - parent_pdu.sia_base != ca.rootd.repository.sia_base or - parent_pdu.sender_name != ca.handle or - parent_pdu.recipient_name != ca.handle or - parent_pdu.bpki_cms_cert != ca.rootd.certificate): - rpkid_query.append(rpki.left_right.parent_elt.make_pdu( - action = "create" if parent_pdu is None else "set", - tag = ca.handle, - self_handle = ca.handle, - parent_handle = ca.handle, - bsc_handle = bsc_handle, - repository_handle = ca.handle, - peer_contact_uri = ca.rootd.service_uri, - sia_base = ca.rootd.repository.sia_base, - sender_name = ca.handle, - recipient_name = ca.handle, - bpki_cms_cert = ca.rootd.certificate)) - - except rpki.irdb.Rootd.DoesNotExist: - pass - - rpkid_query.extend(rpki.left_right.parent_elt.make_pdu( - action = "destroy", self_handle = ca.handle, parent_handle = p) for p in parent_pdus) - - # Children are simpler than parents, because they call us, so no URL - # to construct and figuring out what certificate to use is their - # problem, not ours. - - for child in ca.children.all(): - - child_pdu = child_pdus.pop(child.handle, None) - - if (child_pdu is None or - child_pdu.bsc_handle != bsc_handle or - child_pdu.bpki_cert != child.certificate): - rpkid_query.append(rpki.left_right.child_elt.make_pdu( - action = "create" if child_pdu is None else "set", - tag = child.handle, - self_handle = ca.handle, - child_handle = child.handle, - bsc_handle = bsc_handle, - bpki_cert = child.certificate)) - - rpkid_query.extend(rpki.left_right.child_elt.make_pdu( - action = "destroy", self_handle = ca.handle, child_handle = c) for c in child_pdus) - - # If caller wants us to poke rpkid, add that to the very end of the message - - if poke: - rpkid_query.append(rpki.left_right.self_elt.make_pdu( - action = "set", self_handle = ca.handle, run_now = "yes")) - - # If we changed anything, ship updates off to rpkid - - if rpkid_query: - rpkid_reply = self.call_rpkid(rpkid_query) - bsc_pdus = dict((x.bsc_handle, x) for x in rpkid_reply if isinstance(x, rpki.left_right.bsc_elt)) - if bsc_handle in bsc_pdus and bsc_pdus[bsc_handle].pkcs10_request: - bsc_req = bsc_pdus[bsc_handle].pkcs10_request - self.check_error_report(rpkid_reply) - - - def synchronize_pubd_core(self): - """ - Configure pubd with data built up by the other commands in this - program. This is the core synchronization code. Don't call this - directly, instead call a methods that calls this inside a Django - commit wrapper. - - This method configures pubd with data built up by the other - commands in this program. Commands which modify IRDB fields - related to pubd should call this when they're done. - """ - - # If we're not running pubd, the rest of this is a waste of time - - if not self.run_pubd: - return - - # Make sure that pubd's BPKI CRL is up to date. - - self.call_pubd(rpki.publication.config_elt.make_pdu( - action = "set", - bpki_crl = self.server_ca.latest_crl)) - - # See what pubd already has on file - - pubd_reply = self.call_pubd(rpki.publication.client_elt.make_pdu(action = "list")) - client_pdus = dict((x.client_handle, x) for x in pubd_reply if isinstance(x, rpki.publication.client_elt)) - pubd_query = [] - - # Check all clients - - for client in self.server_ca.clients.all(): - - client_pdu = client_pdus.pop(client.handle, None) - - if (client_pdu is None or - client_pdu.base_uri != client.sia_base or - client_pdu.bpki_cert != client.certificate): - pubd_query.append(rpki.publication.client_elt.make_pdu( - action = "create" if client_pdu is None else "set", - client_handle = client.handle, - bpki_cert = client.certificate, - base_uri = client.sia_base)) - - # Delete any unknown clients - - pubd_query.extend(rpki.publication.client_elt.make_pdu( - action = "destroy", client_handle = p) for p in client_pdus) - - # If we changed anything, ship updates off to pubd - - if pubd_query: - pubd_reply = self.call_pubd(pubd_query) - self.check_error_report(pubd_reply) - - - def synchronize_rpkid_deleted_core(self): - """ - Remove any objects present in rpkid's database but not - present in the IRDB. This is the core synchronization code. - Don't call this directly, instead call a methods that calls this - inside a Django commit wrapper. - """ - - rpkid_reply = self.call_rpkid(rpki.left_right.self_elt.make_pdu(action = "list")) - self.check_error_report(rpkid_reply) - - self_handles = set(s.self_handle for s in rpkid_reply) - ca_handles = set(ca.handle for ca in rpki.irdb.ResourceHolderCA.objects.all()) - assert ca_handles <= self_handles - - rpkid_query = [rpki.left_right.self_elt.make_pdu(action = "destroy", self_handle = handle) - for handle in (self_handles - ca_handles)] - - if rpkid_query: - rpkid_reply = self.call_rpkid(rpkid_query) - self.check_error_report(rpkid_reply) - - - @django.db.transaction.commit_on_success - def add_ee_certificate_request(self, pkcs10, resources): - """ - Check a PKCS #10 request to see if it complies with the - specification for a RPKI EE certificate; if it does, add an - EECertificateRequest for it to the IRDB. - - Not yet sure what we want for update and delete semantics here, so - for the moment this is straight addition. See methods like - .load_asns() and .load_prefixes() for other strategies. - """ - - pkcs10.check_valid_request_ee() - ee_request = self.resource_ca.ee_certificate_requests.create( - pkcs10 = pkcs10, - gski = pkcs10.gSKI(), - valid_until = resources.valid_until) - for range in resources.asn: - ee_request.asns.create(start_as = str(range.min), end_as = str(range.max)) - for range in resources.v4: - ee_request.address_ranges.create(start_ip = str(range.min), end_ip = str(range.max), version = 4) - for range in resources.v6: - ee_request.address_ranges.create(start_ip = str(range.min), end_ip = str(range.max), version = 6) - - - @django.db.transaction.commit_on_success - def add_router_certificate_request(self, router_certificate_request_xml, valid_until = None): - """ - Read XML file containing one or more router certificate requests, - attempt to add request(s) to IRDB. - - Check each PKCS #10 request to see if it complies with the - specification for a router certificate; if it does, create an EE - certificate request for it along with the ASN resources and - router-ID supplied in the XML. - """ - - xml = ElementTree(file = router_certificate_request_xml).getroot() - rpki.relaxng.router_certificate.assertValid(xml) - - for req in xml.getiterator(routercert_namespaceQName + "router_certificate_request"): - - pkcs10 = rpki.x509.PKCS10(Base64 = req.text) - router_id = long(req.get("router_id")) - asns = rpki.resource_set.resource_set_as(req.get("asn")) - if not valid_until: - valid_until = req.get("valid_until") - - if valid_until and isinstance(valid_until, (str, unicode)): - valid_until = rpki.sundial.datetime.fromXMLtime(valid_until) - - if not valid_until: - valid_until = rpki.sundial.now() + rpki.sundial.timedelta(days = 365) - elif valid_until < rpki.sundial.now(): - raise PastExpiration, "Specified expiration date %s has already passed" % valid_until - - pkcs10.check_valid_request_router() - - cn = "ROUTER-%08x" % asns[0].min - sn = "%08x" % router_id - - ee_request = self.resource_ca.ee_certificate_requests.create( - pkcs10 = pkcs10, - gski = pkcs10.gSKI(), - valid_until = valid_until, - cn = cn, - sn = sn, - eku = rpki.oids.id_kp_bgpsec_router) - - for range in asns: - ee_request.asns.create(start_as = str(range.min), end_as = str(range.max)) - - - @django.db.transaction.commit_on_success - def delete_router_certificate_request(self, gski): - """ - Delete a router certificate request from this RPKI entity. - """ - - self.resource_ca.ee_certificate_requests.get(gski = gski).delete() diff --git a/rpkid/rpki/irdbd.py b/rpkid/rpki/irdbd.py deleted file mode 100644 index 41739dc4..00000000 --- a/rpkid/rpki/irdbd.py +++ /dev/null @@ -1,266 +0,0 @@ -# $Id$ -# -# Copyright (C) 2013--2014 Dragon Research Labs ("DRL") -# Portions copyright (C) 2009--2012 Internet Systems Consortium ("ISC") -# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notices and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND ARIN DISCLAIM ALL -# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED -# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL, -# ISC, OR ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR -# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS -# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, -# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION -# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -""" -IR database daemon. -""" - -import sys -import os -import time -import argparse -import urlparse -import rpki.http -import rpki.config -import rpki.resource_set -import rpki.relaxng -import rpki.exceptions -import rpki.left_right -import rpki.log -import rpki.x509 -import rpki.daemonize - -class main(object): - - def handle_list_resources(self, q_pdu, r_msg): - child = rpki.irdb.Child.objects.get( - issuer__handle__exact = q_pdu.self_handle, - handle = q_pdu.child_handle) - resources = child.resource_bag - r_pdu = rpki.left_right.list_resources_elt() - r_pdu.tag = q_pdu.tag - r_pdu.self_handle = q_pdu.self_handle - r_pdu.child_handle = q_pdu.child_handle - r_pdu.valid_until = child.valid_until.strftime("%Y-%m-%dT%H:%M:%SZ") - r_pdu.asn = resources.asn - r_pdu.ipv4 = resources.v4 - r_pdu.ipv6 = resources.v6 - r_msg.append(r_pdu) - - def handle_list_roa_requests(self, q_pdu, r_msg): - for request in rpki.irdb.ROARequest.objects.raw(""" - SELECT irdb_roarequest.* - FROM irdb_roarequest, irdb_resourceholderca - WHERE irdb_roarequest.issuer_id = irdb_resourceholderca.id - AND irdb_resourceholderca.handle = %s - """, [q_pdu.self_handle]): - prefix_bag = request.roa_prefix_bag - r_pdu = rpki.left_right.list_roa_requests_elt() - r_pdu.tag = q_pdu.tag - r_pdu.self_handle = q_pdu.self_handle - r_pdu.asn = request.asn - r_pdu.ipv4 = prefix_bag.v4 - r_pdu.ipv6 = prefix_bag.v6 - r_msg.append(r_pdu) - - def handle_list_ghostbuster_requests(self, q_pdu, r_msg): - ghostbusters = rpki.irdb.GhostbusterRequest.objects.filter( - issuer__handle__exact = q_pdu.self_handle, - parent__handle__exact = q_pdu.parent_handle) - if ghostbusters.count() == 0: - ghostbusters = rpki.irdb.GhostbusterRequest.objects.filter( - issuer__handle__exact = q_pdu.self_handle, - parent = None) - for ghostbuster in ghostbusters: - r_pdu = rpki.left_right.list_ghostbuster_requests_elt() - r_pdu.tag = q_pdu.tag - r_pdu.self_handle = q_pdu.self_handle - r_pdu.parent_handle = q_pdu.parent_handle - r_pdu.vcard = ghostbuster.vcard - r_msg.append(r_pdu) - - def handle_list_ee_certificate_requests(self, q_pdu, r_msg): - for ee_req in rpki.irdb.EECertificateRequest.objects.filter(issuer__handle__exact = q_pdu.self_handle): - resources = ee_req.resource_bag - r_pdu = rpki.left_right.list_ee_certificate_requests_elt() - r_pdu.tag = q_pdu.tag - r_pdu.self_handle = q_pdu.self_handle - r_pdu.gski = ee_req.gski - r_pdu.valid_until = ee_req.valid_until.strftime("%Y-%m-%dT%H:%M:%SZ") - r_pdu.asn = resources.asn - r_pdu.ipv4 = resources.v4 - r_pdu.ipv6 = resources.v6 - r_pdu.cn = ee_req.cn - r_pdu.sn = ee_req.sn - r_pdu.eku = ee_req.eku - r_pdu.pkcs10 = ee_req.pkcs10 - r_msg.append(r_pdu) - - def handler(self, query, path, cb): - try: - q_pdu = None - r_msg = rpki.left_right.msg.reply() - from django.db import connection - connection.cursor() # Reconnect to mysqld if necessary - self.start_new_transaction() - serverCA = rpki.irdb.ServerCA.objects.get() - rpkid = serverCA.ee_certificates.get(purpose = "rpkid") - try: - q_cms = rpki.left_right.cms_msg(DER = query) - q_msg = q_cms.unwrap((serverCA.certificate, rpkid.certificate)) - self.cms_timestamp = q_cms.check_replay(self.cms_timestamp, path) - if not isinstance(q_msg, rpki.left_right.msg) or not q_msg.is_query(): - raise rpki.exceptions.BadQuery("Unexpected %r PDU" % q_msg) - for q_pdu in q_msg: - self.dispatch(q_pdu, r_msg) - except (rpki.async.ExitNow, SystemExit): - raise - except Exception, e: - rpki.log.traceback() - if q_pdu is None: - r_msg.append(rpki.left_right.report_error_elt.from_exception(e)) - else: - r_msg.append(rpki.left_right.report_error_elt.from_exception(e, q_pdu.self_handle, q_pdu.tag)) - irdbd = serverCA.ee_certificates.get(purpose = "irdbd") - cb(200, body = rpki.left_right.cms_msg().wrap(r_msg, irdbd.private_key, irdbd.certificate)) - except (rpki.async.ExitNow, SystemExit): - raise - except Exception, e: - rpki.log.traceback() - cb(500, reason = "Unhandled exception %s: %s" % (e.__class__.__name__, e)) - - def dispatch(self, q_pdu, r_msg): - try: - handler = self.dispatch_vector[type(q_pdu)] - except KeyError: - raise rpki.exceptions.BadQuery("Unexpected %r PDU" % q_pdu) - else: - handler(q_pdu, r_msg) - - def __init__(self, **kwargs): - - global rpki # pylint: disable=W0602 - - os.environ["TZ"] = "UTC" - time.tzset() - - parser = argparse.ArgumentParser(description = __doc__) - parser.add_argument("-c", "--config", - help = "override default location of configuration file") - parser.add_argument("-d", "--debug", action = "store_true", - help = "enable debugging mode") - parser.add_argument("-f", "--foreground", action = "store_true", - help = "do not daemonize") - parser.add_argument("--pidfile", - help = "override default location of pid file") - parser.add_argument("--profile", - help = "enable profiling, saving data to PROFILE") - args = parser.parse_args() - - rpki.log.init("irdbd", use_syslog = not args.debug) - - self.cfg = rpki.config.parser(args.config, "irdbd") - self.cfg.set_global_flags() - - if not args.foreground and not args.debug: - rpki.daemonize.daemon(pidfile = args.pidfile) - - if args.profile: - import cProfile - prof = cProfile.Profile() - try: - prof.runcall(self.main) - finally: - prof.dump_stats(args.profile) - rpki.log.info("Dumped profile data to %s" % args.profile) - else: - self.main() - - def main(self): - - global rpki # pylint: disable=W0602 - from django.conf import settings - - startup_msg = self.cfg.get("startup-message", "") - if startup_msg: - rpki.log.info(startup_msg) - - # Do -not- turn on DEBUG here except for short-lived tests, - # otherwise irdbd will eventually run out of memory and crash. - # - # If you must enable debugging, use django.db.reset_queries() to - # clear the query list manually, but it's probably better just to - # run with debugging disabled, since that's the expectation for - # production code. - # - # https://docs.djangoproject.com/en/dev/faq/models/#why-is-django-leaking-memory - - settings.configure( - DATABASES = { - "default" : { - "ENGINE" : "django.db.backends.mysql", - "NAME" : self.cfg.get("sql-database"), - "USER" : self.cfg.get("sql-username"), - "PASSWORD" : self.cfg.get("sql-password"), - "HOST" : "", - "PORT" : "" }}, - INSTALLED_APPS = ("rpki.irdb",),) - - import rpki.irdb # pylint: disable=W0621 - - # Entirely too much fun with read-only access to transactional databases. - # - # http://stackoverflow.com/questions/3346124/how-do-i-force-django-to-ignore-any-caches-and-reload-data - # http://devblog.resolversystems.com/?p=439 - # http://groups.google.com/group/django-users/browse_thread/thread/e25cec400598c06d - # http://stackoverflow.com/questions/1028671/python-mysqldb-update-query-fails - # http://dev.mysql.com/doc/refman/5.0/en/set-transaction.html - # - # It turns out that MySQL is doing us a favor with this weird - # transactional behavior on read, because without it there's a - # race condition if multiple updates are committed to the IRDB - # while we're in the middle of processing a query. Note that - # proper transaction management by the committers doesn't protect - # us, this is a transactional problem on read. So we need to use - # explicit transaction management. Since irdbd is a read-only - # consumer of IRDB data, this means we need to commit an empty - # transaction at the beginning of processing each query, to reset - # the transaction isolation snapshot. - - import django.db.transaction - self.start_new_transaction = django.db.transaction.commit_manually(django.db.transaction.commit) - - self.dispatch_vector = { - rpki.left_right.list_resources_elt : self.handle_list_resources, - rpki.left_right.list_roa_requests_elt : self.handle_list_roa_requests, - rpki.left_right.list_ghostbuster_requests_elt : self.handle_list_ghostbuster_requests, - rpki.left_right.list_ee_certificate_requests_elt : self.handle_list_ee_certificate_requests} - - try: - self.http_server_host = self.cfg.get("server-host", "") - self.http_server_port = self.cfg.getint("server-port") - except: - # - # Backwards compatibility, remove this eventually. - # - u = urlparse.urlparse(self.cfg.get("http-url")) - if (u.scheme not in ("", "http") or - u.username is not None or - u.password is not None or - u.params or u.query or u.fragment): - raise - self.http_server_host = u.hostname - self.http_server_port = int(u.port) - - self.cms_timestamp = None - - rpki.http.server( - host = self.http_server_host, - port = self.http_server_port, - handlers = self.handler) diff --git a/rpkid/rpki/left_right.py b/rpkid/rpki/left_right.py deleted file mode 100644 index 2d46cdfa..00000000 --- a/rpkid/rpki/left_right.py +++ /dev/null @@ -1,1300 +0,0 @@ -# $Id$ -# -# Copyright (C) 2013--2014 Dragon Research Labs ("DRL") -# Portions copyright (C) 2009--2012 Internet Systems Consortium ("ISC") -# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notices and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND ARIN DISCLAIM ALL -# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED -# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL, -# ISC, OR ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR -# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS -# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, -# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION -# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -""" -RPKI "left-right" protocol. -""" - -import rpki.resource_set -import rpki.x509 -import rpki.sql -import rpki.exceptions -import rpki.xml_utils -import rpki.http -import rpki.up_down -import rpki.relaxng -import rpki.sundial -import rpki.log -import rpki.publication -import rpki.async -import rpki.rpkid_tasks - -## @var enforce_strict_up_down_xml_sender -# Enforce strict checking of XML "sender" field in up-down protocol - -enforce_strict_up_down_xml_sender = False - -class left_right_namespace(object): - """ - XML namespace parameters for left-right protocol. - """ - - xmlns = "http://www.hactrn.net/uris/rpki/left-right-spec/" - nsmap = { None : xmlns } - -class data_elt(rpki.xml_utils.data_elt, rpki.sql.sql_persistent, left_right_namespace): - """ - Virtual class for top-level left-right protocol data elements. - """ - - handles = () - - self_id = None - self_handle = None - - @property - @rpki.sql.cache_reference - def self(self): - """ - Fetch self object to which this object links. - """ - return self_elt.sql_fetch(self.gctx, self.self_id) - - @property - @rpki.sql.cache_reference - def bsc(self): - """ - Return BSC object to which this object links. - """ - return bsc_elt.sql_fetch(self.gctx, self.bsc_id) - - def make_reply_clone_hook(self, r_pdu): - """ - Set handles when cloning, including _id -> _handle translation. - """ - if r_pdu.self_handle is None: - r_pdu.self_handle = self.self_handle - for tag, elt in self.handles: - id_name = tag + "_id" - handle_name = tag + "_handle" - if getattr(r_pdu, handle_name, None) is None: - try: - setattr(r_pdu, handle_name, getattr(elt.sql_fetch(self.gctx, getattr(r_pdu, id_name)), handle_name)) - except AttributeError: - continue - - @classmethod - def serve_fetch_handle(cls, gctx, self_id, handle): - """ - Find an object based on its handle. - """ - return cls.sql_fetch_where1(gctx, cls.element_name + "_handle = %s AND self_id = %s", (handle, self_id)) - - def serve_fetch_one_maybe(self): - """ - Find the object on which a get, set, or destroy method should - operate, or which would conflict with a create method. - """ - where = "%s.%s_handle = %%s AND %s.self_id = self.self_id AND self.self_handle = %%s" % ((self.element_name,) * 3) - args = (getattr(self, self.element_name + "_handle"), self.self_handle) - return self.sql_fetch_where1(self.gctx, where, args, "self") - - def serve_fetch_all(self): - """ - Find the objects on which a list method should operate. - """ - where = "%s.self_id = self.self_id and self.self_handle = %%s" % self.element_name - return self.sql_fetch_where(self.gctx, where, (self.self_handle,), "self") - - def serve_pre_save_hook(self, q_pdu, r_pdu, cb, eb): - """ - Hook to do _handle => _id translation before saving. - - self is always the object to be saved to SQL. For create - operations, self and q_pdu are be the same object; for set - operations, self is the pre-existing object from SQL and q_pdu is - the set request received from the the IRBE. - """ - for tag, elt in self.handles: - id_name = tag + "_id" - if getattr(self, id_name, None) is None: - x = elt.serve_fetch_handle(self.gctx, self.self_id, getattr(q_pdu, tag + "_handle")) - if x is None: - raise rpki.exceptions.HandleTranslationError, "Could not translate %r %s_handle" % (self, tag) - setattr(self, id_name, getattr(x, id_name)) - cb() - -class self_elt(data_elt): - """ - element. - """ - - element_name = "self" - attributes = ("action", "tag", "self_handle", "crl_interval", "regen_margin") - elements = ("bpki_cert", "bpki_glue") - booleans = ("rekey", "reissue", "revoke", "run_now", "publish_world_now", "revoke_forgotten", - "clear_replay_protection") - - sql_template = rpki.sql.template( - "self", - "self_id", - "self_handle", - "use_hsm", - "crl_interval", - "regen_margin", - ("bpki_cert", rpki.x509.X509), - ("bpki_glue", rpki.x509.X509)) - - handles = () - - use_hsm = False - crl_interval = None - regen_margin = None - bpki_cert = None - bpki_glue = None - cron_tasks = None - - def __repr__(self): - return rpki.log.log_repr(self) - - @property - def bscs(self): - """ - Fetch all BSC objects that link to this self object. - """ - return bsc_elt.sql_fetch_where(self.gctx, "self_id = %s", (self.self_id,)) - - @property - def repositories(self): - """ - Fetch all repository objects that link to this self object. - """ - return repository_elt.sql_fetch_where(self.gctx, "self_id = %s", (self.self_id,)) - - @property - def parents(self): - """ - Fetch all parent objects that link to this self object. - """ - return parent_elt.sql_fetch_where(self.gctx, "self_id = %s", (self.self_id,)) - - @property - def children(self): - """ - Fetch all child objects that link to this self object. - """ - return child_elt.sql_fetch_where(self.gctx, "self_id = %s", (self.self_id,)) - - @property - def roas(self): - """ - Fetch all ROA objects that link to this self object. - """ - return rpki.rpkid.roa_obj.sql_fetch_where(self.gctx, "self_id = %s", (self.self_id,)) - - @property - def ghostbusters(self): - """ - Fetch all Ghostbuster record objects that link to this self object. - """ - return rpki.rpkid.ghostbuster_obj.sql_fetch_where(self.gctx, "self_id = %s", (self.self_id,)) - - @property - def ee_certificates(self): - """ - Fetch all EE certificate objects that link to this self object. - """ - return rpki.rpkid.ee_cert_obj.sql_fetch_where(self.gctx, "self_id = %s", (self.self_id,)) - - - def serve_post_save_hook(self, q_pdu, r_pdu, cb, eb): - """ - Extra server actions for self_elt. - """ - rpki.log.trace() - actions = [] - if q_pdu.rekey: - actions.append(self.serve_rekey) - if q_pdu.revoke: - actions.append(self.serve_revoke) - if q_pdu.reissue: - actions.append(self.serve_reissue) - if q_pdu.revoke_forgotten: - actions.append(self.serve_revoke_forgotten) - if q_pdu.publish_world_now: - actions.append(self.serve_publish_world_now) - if q_pdu.run_now: - actions.append(self.serve_run_now) - if q_pdu.clear_replay_protection: - actions.append(self.serve_clear_replay_protection) - def loop(iterator, action): - action(iterator, eb) - rpki.async.iterator(actions, loop, cb) - - def serve_rekey(self, cb, eb): - """ - Handle a left-right rekey action for this self. - """ - rpki.log.trace() - def loop(iterator, parent): - parent.serve_rekey(iterator, eb) - rpki.async.iterator(self.parents, loop, cb) - - def serve_revoke(self, cb, eb): - """ - Handle a left-right revoke action for this self. - """ - rpki.log.trace() - def loop(iterator, parent): - parent.serve_revoke(iterator, eb) - rpki.async.iterator(self.parents, loop, cb) - - def serve_reissue(self, cb, eb): - """ - Handle a left-right reissue action for this self. - """ - rpki.log.trace() - def loop(iterator, parent): - parent.serve_reissue(iterator, eb) - rpki.async.iterator(self.parents, loop, cb) - - def serve_revoke_forgotten(self, cb, eb): - """ - Handle a left-right revoke_forgotten action for this self. - """ - rpki.log.trace() - def loop(iterator, parent): - parent.serve_revoke_forgotten(iterator, eb) - rpki.async.iterator(self.parents, loop, cb) - - def serve_clear_replay_protection(self, cb, eb): - """ - Handle a left-right clear_replay_protection action for this self. - """ - rpki.log.trace() - def loop(iterator, obj): - obj.serve_clear_replay_protection(iterator, eb) - rpki.async.iterator(self.parents + self.children + self.repositories, loop, cb) - - def serve_destroy_hook(self, cb, eb): - """ - Extra cleanup actions when destroying a self_elt. - """ - rpki.log.trace() - def loop(iterator, parent): - parent.delete(iterator) - rpki.async.iterator(self.parents, loop, cb) - - - def serve_publish_world_now(self, cb, eb): - """ - Handle a left-right publish_world_now action for this self. - - The publication stuff needs refactoring, right now publication is - interleaved with local operations in a way that forces far too - many bounces through the task system for any complex update. The - whole thing ought to be rewritten to queue up outgoing publication - PDUs and only send them when we're all done or when we need to - force publication at a particular point in a multi-phase operation. - - Once that reorganization has been done, this method should be - rewritten to reuse the low-level publish() methods that each - object will have...but we're not there yet. So, for now, we just - do this via brute force. Think of it as a trial version to see - whether we've identified everything that needs to be republished - for this operation. - """ - - def loop(iterator, parent): - q_msg = rpki.publication.msg.query() - for ca in parent.cas: - ca_detail = ca.active_ca_detail - if ca_detail is not None: - q_msg.append(rpki.publication.crl_elt.make_publish( - ca_detail.crl_uri, ca_detail.latest_crl)) - q_msg.append(rpki.publication.manifest_elt.make_publish( - ca_detail.manifest_uri, ca_detail.latest_manifest)) - q_msg.extend(rpki.publication.certificate_elt.make_publish( - c.uri, c.cert) for c in ca_detail.child_certs) - q_msg.extend(rpki.publication.roa_elt.make_publish( - r.uri, r.roa) for r in ca_detail.roas if r.roa is not None) - q_msg.extend(rpki.publication.ghostbuster_elt.make_publish( - g.uri, g.ghostbuster) for g in ca_detail.ghostbusters) - parent.repository.call_pubd(iterator, eb, q_msg) - - rpki.async.iterator(self.parents, loop, cb) - - def serve_run_now(self, cb, eb): - """ - Handle a left-right run_now action for this self. - """ - rpki.log.debug("Forced immediate run of periodic actions for self %s[%d]" % ( - self.self_handle, self.self_id)) - completion = rpki.rpkid_tasks.CompletionHandler(cb) - self.schedule_cron_tasks(completion) - assert completion.count > 0 - self.gctx.task_run() - - def serve_fetch_one_maybe(self): - """ - Find the self object upon which a get, set, or destroy action - should operate, or which would conflict with a create method. - """ - return self.serve_fetch_handle(self.gctx, None, self.self_handle) - - @classmethod - def serve_fetch_handle(cls, gctx, self_id, self_handle): - """ - Find a self object based on its self_handle. - """ - return cls.sql_fetch_where1(gctx, "self_handle = %s", self_handle) - - def serve_fetch_all(self): - """ - Find the self objects upon which a list action should operate. - This is different from the list action for all other objects, - where list only works within a given self_id context. - """ - return self.sql_fetch_all(self.gctx) - - def schedule_cron_tasks(self, completion): - """ - Schedule periodic tasks. - """ - - if self.cron_tasks is None: - self.cron_tasks = tuple(task(self) for task in rpki.rpkid_tasks.task_classes) - - for task in self.cron_tasks: - self.gctx.task_add(task) - completion.register(task) - - def find_covering_ca_details(self, resources): - """ - Return all active ca_detail_objs for this which cover a - particular set of resources. - - If we expected there to be a large number of ca_detail_objs, we - could add index tables and write fancy SQL query to do this, but - for the expected common case where there are only one or two - active ca_detail_objs per , it's probably not worth it. In - any case, this is an optimization we can leave for later. - """ - - results = set() - for parent in self.parents: - for ca in parent.cas: - ca_detail = ca.active_ca_detail - if ca_detail is not None and ca_detail.covers(resources): - results.add(ca_detail) - return results - - -class bsc_elt(data_elt): - """ - (Business Signing Context) element. - """ - - element_name = "bsc" - attributes = ("action", "tag", "self_handle", "bsc_handle", "key_type", "hash_alg", "key_length") - elements = ("signing_cert", "signing_cert_crl", "pkcs10_request") - booleans = ("generate_keypair",) - - sql_template = rpki.sql.template( - "bsc", - "bsc_id", - "bsc_handle", - "self_id", - "hash_alg", - ("private_key_id", rpki.x509.RSA), - ("pkcs10_request", rpki.x509.PKCS10), - ("signing_cert", rpki.x509.X509), - ("signing_cert_crl", rpki.x509.CRL)) - - handles = (("self", self_elt),) - - private_key_id = None - pkcs10_request = None - signing_cert = None - signing_cert_crl = None - - def __repr__(self): - return rpki.log.log_repr(self, self.bsc_handle) - - @property - def repositories(self): - """ - Fetch all repository objects that link to this BSC object. - """ - return repository_elt.sql_fetch_where(self.gctx, "bsc_id = %s", (self.bsc_id,)) - - @property - def parents(self): - """ - Fetch all parent objects that link to this BSC object. - """ - return parent_elt.sql_fetch_where(self.gctx, "bsc_id = %s", (self.bsc_id,)) - - @property - def children(self): - """ - Fetch all child objects that link to this BSC object. - """ - return child_elt.sql_fetch_where(self.gctx, "bsc_id = %s", (self.bsc_id,)) - - def serve_pre_save_hook(self, q_pdu, r_pdu, cb, eb): - """ - Extra server actions for bsc_elt -- handle key generation. For - now this only allows RSA with SHA-256. - """ - if q_pdu.generate_keypair: - assert q_pdu.key_type in (None, "rsa") and q_pdu.hash_alg in (None, "sha256") - self.private_key_id = rpki.x509.RSA.generate(keylength = q_pdu.key_length or 2048) - self.pkcs10_request = rpki.x509.PKCS10.create(keypair = self.private_key_id) - r_pdu.pkcs10_request = self.pkcs10_request - data_elt.serve_pre_save_hook(self, q_pdu, r_pdu, cb, eb) - -class repository_elt(data_elt): - """ - element. - """ - - element_name = "repository" - attributes = ("action", "tag", "self_handle", "repository_handle", "bsc_handle", "peer_contact_uri") - elements = ("bpki_cert", "bpki_glue") - booleans = ("clear_replay_protection",) - - sql_template = rpki.sql.template( - "repository", - "repository_id", - "repository_handle", - "self_id", - "bsc_id", - "peer_contact_uri", - ("bpki_cert", rpki.x509.X509), - ("bpki_glue", rpki.x509.X509), - ("last_cms_timestamp", rpki.sundial.datetime)) - - handles = (("self", self_elt), - ("bsc", bsc_elt)) - - bpki_cert = None - bpki_glue = None - last_cms_timestamp = None - - def __repr__(self): - return rpki.log.log_repr(self, self.repository_handle) - - @property - def parents(self): - """ - Fetch all parent objects that link to this repository object. - """ - return parent_elt.sql_fetch_where(self.gctx, "repository_id = %s", (self.repository_id,)) - - def serve_post_save_hook(self, q_pdu, r_pdu, cb, eb): - """ - Extra server actions for repository_elt. - """ - actions = [] - if q_pdu.clear_replay_protection: - actions.append(self.serve_clear_replay_protection) - def loop(iterator, action): - action(iterator, eb) - rpki.async.iterator(actions, loop, cb) - - def serve_clear_replay_protection(self, cb, eb): - """ - Handle a left-right clear_replay_protection action for this repository. - """ - self.last_cms_timestamp = None - self.sql_mark_dirty() - cb() - - @staticmethod - def default_pubd_handler(pdu): - """ - Default handler for publication response PDUs. - """ - pdu.raise_if_error() - - def call_pubd(self, callback, errback, q_msg, handlers = None): - """ - Send a message to publication daemon and return the response. - - As a convenience, attempting to send an empty message returns - immediate success without sending anything. - - Handlers is a dict of handler functions to process the response - PDUs. If the tag value in the response PDU appears in the dict, - the associated handler is called to process the PDU. If no tag - matches, default_pubd_handler() is called. A handler value of - False suppresses calling of the default handler. - """ - - try: - rpki.log.trace() - - self.gctx.sql.sweep() - - if not q_msg: - return callback() - - if handlers is None: - handlers = {} - - for q_pdu in q_msg: - rpki.log.info("Sending %s %s to pubd" % (q_pdu.action, q_pdu.uri)) - - bsc = self.bsc - q_der = rpki.publication.cms_msg().wrap(q_msg, bsc.private_key_id, bsc.signing_cert, bsc.signing_cert_crl) - bpki_ta_path = (self.gctx.bpki_ta, self.self.bpki_cert, self.self.bpki_glue, self.bpki_cert, self.bpki_glue) - - def done(r_der): - try: - rpki.log.debug("Received response from pubd") - r_cms = rpki.publication.cms_msg(DER = r_der) - r_msg = r_cms.unwrap(bpki_ta_path) - r_cms.check_replay_sql(self, self.peer_contact_uri) - for r_pdu in r_msg: - handler = handlers.get(r_pdu.tag, self.default_pubd_handler) - if handler: - rpki.log.debug("Calling pubd handler %r" % handler) - handler(r_pdu) - if len(q_msg) != len(r_msg): - raise rpki.exceptions.BadPublicationReply, "Wrong number of response PDUs from pubd: sent %r, got %r" % (q_msg, r_msg) - callback() - except (rpki.async.ExitNow, SystemExit): - raise - except Exception, e: - errback(e) - - rpki.log.debug("Sending request to pubd") - rpki.http.client( - url = self.peer_contact_uri, - msg = q_der, - callback = done, - errback = errback) - - except (rpki.async.ExitNow, SystemExit): - raise - except Exception, e: - errback(e) - -class parent_elt(data_elt): - """ - element. - """ - - element_name = "parent" - attributes = ("action", "tag", "self_handle", "parent_handle", "bsc_handle", "repository_handle", - "peer_contact_uri", "sia_base", "sender_name", "recipient_name") - elements = ("bpki_cms_cert", "bpki_cms_glue") - booleans = ("rekey", "reissue", "revoke", "revoke_forgotten", "clear_replay_protection") - - sql_template = rpki.sql.template( - "parent", - "parent_id", - "parent_handle", - "self_id", - "bsc_id", - "repository_id", - "peer_contact_uri", - "sia_base", - "sender_name", - "recipient_name", - ("bpki_cms_cert", rpki.x509.X509), - ("bpki_cms_glue", rpki.x509.X509), - ("last_cms_timestamp", rpki.sundial.datetime)) - - handles = (("self", self_elt), - ("bsc", bsc_elt), - ("repository", repository_elt)) - - bpki_cms_cert = None - bpki_cms_glue = None - last_cms_timestamp = None - - def __repr__(self): - return rpki.log.log_repr(self, self.parent_handle) - - @property - @rpki.sql.cache_reference - def repository(self): - """ - Fetch repository object to which this parent object links. - """ - return repository_elt.sql_fetch(self.gctx, self.repository_id) - - @property - def cas(self): - """ - Fetch all CA objects that link to this parent object. - """ - return rpki.rpkid.ca_obj.sql_fetch_where(self.gctx, "parent_id = %s", (self.parent_id,)) - - def serve_post_save_hook(self, q_pdu, r_pdu, cb, eb): - """ - Extra server actions for parent_elt. - """ - actions = [] - if q_pdu.rekey: - actions.append(self.serve_rekey) - if q_pdu.revoke: - actions.append(self.serve_revoke) - if q_pdu.reissue: - actions.append(self.serve_reissue) - if q_pdu.revoke_forgotten: - actions.append(self.serve_revoke_forgotten) - if q_pdu.clear_replay_protection: - actions.append(self.serve_clear_replay_protection) - def loop(iterator, action): - action(iterator, eb) - rpki.async.iterator(actions, loop, cb) - - def serve_rekey(self, cb, eb): - """ - Handle a left-right rekey action for this parent. - """ - def loop(iterator, ca): - ca.rekey(iterator, eb) - rpki.async.iterator(self.cas, loop, cb) - - def serve_revoke(self, cb, eb): - """ - Handle a left-right revoke action for this parent. - """ - def loop(iterator, ca): - ca.revoke(cb = iterator, eb = eb) - rpki.async.iterator(self.cas, loop, cb) - - def serve_reissue(self, cb, eb): - """ - Handle a left-right reissue action for this parent. - """ - def loop(iterator, ca): - ca.reissue(cb = iterator, eb = eb) - rpki.async.iterator(self.cas, loop, cb) - - def serve_clear_replay_protection(self, cb, eb): - """ - Handle a left-right clear_replay_protection action for this parent. - """ - self.last_cms_timestamp = None - self.sql_mark_dirty() - cb() - - - def get_skis(self, cb, eb): - """ - Fetch SKIs that this parent thinks we have. In theory this should - agree with our own database, but in practice stuff can happen, so - sometimes we need to know what our parent thinks. - - Result is a dictionary with the resource class name as key and a - set of SKIs as value. - """ - - def done(r_msg): - cb(dict((rc.class_name, set(c.cert.gSKI() for c in rc.certs)) - for rc in r_msg.payload.classes)) - - rpki.up_down.list_pdu.query(self, done, eb) - - - def revoke_skis(self, rc_name, skis_to_revoke, cb, eb): - """ - Revoke a set of SKIs within a particular resource class. - """ - - def loop(iterator, ski): - rpki.log.debug("Asking parent %r to revoke class %r, SKI %s" % (self, rc_name, ski)) - q_pdu = rpki.up_down.revoke_pdu() - q_pdu.class_name = rc_name - q_pdu.ski = ski - self.query_up_down(q_pdu, lambda r_pdu: iterator(), eb) - - rpki.async.iterator(skis_to_revoke, loop, cb) - - - def serve_revoke_forgotten(self, cb, eb): - """ - Handle a left-right revoke_forgotten action for this parent. - - This is a bit fiddly: we have to compare the result of an up-down - list query with what we have locally and identify the SKIs of any - certificates that have gone missing. This should never happen in - ordinary operation, but can arise if we have somehow lost a - private key, in which case there is nothing more we can do with - the issued cert, so we have to clear it. As this really is not - supposed to happen, we don't clear it automatically, instead we - require an explicit trigger. - """ - - def got_skis(skis_from_parent): - - def loop(iterator, item): - rc_name, skis_to_revoke = item - if rc_name in ca_map: - for ca_detail in ca_map[rc_name].issue_response_candidate_ca_details: - skis_to_revoke.discard(ca_detail.latest_ca_cert.gSKI()) - self.revoke_skis(rc_name, skis_to_revoke, iterator, eb) - - ca_map = dict((ca.parent_resource_class, ca) for ca in self.cas) - rpki.async.iterator(skis_from_parent.items(), loop, cb) - - self.get_skis(got_skis, eb) - - - def delete(self, cb, delete_parent = True): - """ - Delete all the CA stuff under this parent, and perhaps the parent - itself. - """ - - def loop(iterator, ca): - self.gctx.checkpoint() - ca.delete(self, iterator) - - def revoke(): - self.gctx.checkpoint() - self.serve_revoke_forgotten(done, fail) - - def fail(e): - rpki.log.warn("Trouble getting parent to revoke certificates, blundering onwards: %s" % e) - done() - - def done(): - self.gctx.checkpoint() - self.gctx.sql.sweep() - if delete_parent: - self.sql_delete() - cb() - - rpki.async.iterator(self.cas, loop, revoke) - - - def serve_destroy_hook(self, cb, eb): - """ - Extra server actions when destroying a parent_elt. - """ - - self.delete(cb, delete_parent = False) - - - def query_up_down(self, q_pdu, cb, eb): - """ - Client code for sending one up-down query PDU to this parent. - """ - - rpki.log.trace() - - bsc = self.bsc - if bsc is None: - raise rpki.exceptions.BSCNotFound, "Could not find BSC %s" % self.bsc_id - - if bsc.signing_cert is None: - raise rpki.exceptions.BSCNotReady, "BSC %r[%s] is not yet usable" % (bsc.bsc_handle, bsc.bsc_id) - - q_msg = rpki.up_down.message_pdu.make_query( - payload = q_pdu, - sender = self.sender_name, - recipient = self.recipient_name) - - q_der = rpki.up_down.cms_msg().wrap(q_msg, bsc.private_key_id, - bsc.signing_cert, - bsc.signing_cert_crl) - - def unwrap(r_der): - try: - r_cms = rpki.up_down.cms_msg(DER = r_der) - r_msg = r_cms.unwrap((self.gctx.bpki_ta, - self.self.bpki_cert, - self.self.bpki_glue, - self.bpki_cms_cert, - self.bpki_cms_glue)) - r_cms.check_replay_sql(self, self.peer_contact_uri) - r_msg.payload.check_response() - except (SystemExit, rpki.async.ExitNow): - raise - except Exception, e: - eb(e) - else: - cb(r_msg) - - rpki.http.client( - msg = q_der, - url = self.peer_contact_uri, - callback = unwrap, - errback = eb) - -class child_elt(data_elt): - """ - element. - """ - - element_name = "child" - attributes = ("action", "tag", "self_handle", "child_handle", "bsc_handle") - elements = ("bpki_cert", "bpki_glue") - booleans = ("reissue", "clear_replay_protection") - - sql_template = rpki.sql.template( - "child", - "child_id", - "child_handle", - "self_id", - "bsc_id", - ("bpki_cert", rpki.x509.X509), - ("bpki_glue", rpki.x509.X509), - ("last_cms_timestamp", rpki.sundial.datetime)) - - handles = (("self", self_elt), - ("bsc", bsc_elt)) - - bpki_cert = None - bpki_glue = None - last_cms_timestamp = None - - def __repr__(self): - return rpki.log.log_repr(self, self.child_handle) - - def fetch_child_certs(self, ca_detail = None, ski = None, unique = False): - """ - Fetch all child_cert objects that link to this child object. - """ - return rpki.rpkid.child_cert_obj.fetch(self.gctx, self, ca_detail, ski, unique) - - @property - def child_certs(self): - """ - Fetch all child_cert objects that link to this child object. - """ - return self.fetch_child_certs() - - @property - def parents(self): - """ - Fetch all parent objects that link to self object to which this child object links. - """ - return parent_elt.sql_fetch_where(self.gctx, "self_id = %s", (self.self_id,)) - - def serve_post_save_hook(self, q_pdu, r_pdu, cb, eb): - """ - Extra server actions for child_elt. - """ - actions = [] - if q_pdu.reissue: - actions.append(self.serve_reissue) - if q_pdu.clear_replay_protection: - actions.append(self.serve_clear_replay_protection) - def loop(iterator, action): - action(iterator, eb) - rpki.async.iterator(actions, loop, cb) - - def serve_reissue(self, cb, eb): - """ - Handle a left-right reissue action for this child. - """ - publisher = rpki.rpkid.publication_queue() - for child_cert in self.child_certs: - child_cert.reissue(child_cert.ca_detail, publisher, force = True) - publisher.call_pubd(cb, eb) - - def serve_clear_replay_protection(self, cb, eb): - """ - Handle a left-right clear_replay_protection action for this child. - """ - self.last_cms_timestamp = None - self.sql_mark_dirty() - cb() - - def ca_from_class_name(self, class_name): - """ - Fetch the CA corresponding to an up-down class_name. - """ - if not class_name.isdigit(): - raise rpki.exceptions.BadClassNameSyntax, "Bad class name %s" % class_name - ca = rpki.rpkid.ca_obj.sql_fetch(self.gctx, long(class_name)) - if ca is None: - raise rpki.exceptions.ClassNameUnknown, "Unknown class name %s" % class_name - parent = ca.parent - if self.self_id != parent.self_id: - raise rpki.exceptions.ClassNameMismatch( - "Class name mismatch: child.self_id = %d, parent.self_id = %d" % ( - self.self_id, parent.self_id)) - return ca - - def serve_destroy_hook(self, cb, eb): - """ - Extra server actions when destroying a child_elt. - """ - publisher = rpki.rpkid.publication_queue() - for child_cert in self.child_certs: - child_cert.revoke(publisher = publisher, - generate_crl_and_manifest = True) - publisher.call_pubd(cb, eb) - - def serve_up_down(self, query, callback): - """ - Outer layer of server handling for one up-down PDU from this child. - """ - - rpki.log.trace() - - bsc = self.bsc - if bsc is None: - raise rpki.exceptions.BSCNotFound, "Could not find BSC %s" % self.bsc_id - q_cms = rpki.up_down.cms_msg(DER = query) - q_msg = q_cms.unwrap((self.gctx.bpki_ta, - self.self.bpki_cert, - self.self.bpki_glue, - self.bpki_cert, - self.bpki_glue)) - q_cms.check_replay_sql(self, "child", self.child_handle) - q_msg.payload.gctx = self.gctx - if enforce_strict_up_down_xml_sender and q_msg.sender != self.child_handle: - raise rpki.exceptions.BadSender, "Unexpected XML sender %s" % q_msg.sender - self.gctx.sql.sweep() - - def done(r_msg): - # - # Exceptions from this point on are problematic, as we have no - # sane way of reporting errors in the error reporting mechanism. - # May require refactoring, ignore the issue for now. - # - reply = rpki.up_down.cms_msg().wrap(r_msg, bsc.private_key_id, - bsc.signing_cert, bsc.signing_cert_crl) - callback(reply) - - try: - q_msg.serve_top_level(self, done) - except (rpki.async.ExitNow, SystemExit): - raise - except rpki.exceptions.NoActiveCA, data: - done(q_msg.serve_error(data)) - except Exception, e: - rpki.log.traceback() - done(q_msg.serve_error(e)) - -class list_resources_elt(rpki.xml_utils.base_elt, left_right_namespace): - """ - element. - """ - - element_name = "list_resources" - attributes = ("self_handle", "tag", "child_handle", "valid_until", "asn", "ipv4", "ipv6") - valid_until = None - - def __repr__(self): - return rpki.log.log_repr(self, self.self_handle, self.child_handle, self.asn, self.ipv4, self.ipv6) - - def startElement(self, stack, name, attrs): - """ - Handle element. This requires special handling - due to the data types of some of the attributes. - """ - assert name == "list_resources", "Unexpected name %s, stack %s" % (name, stack) - self.read_attrs(attrs) - if isinstance(self.valid_until, str): - self.valid_until = rpki.sundial.datetime.fromXMLtime(self.valid_until) - if self.asn is not None: - self.asn = rpki.resource_set.resource_set_as(self.asn) - if self.ipv4 is not None: - self.ipv4 = rpki.resource_set.resource_set_ipv4(self.ipv4) - if self.ipv6 is not None: - self.ipv6 = rpki.resource_set.resource_set_ipv6(self.ipv6) - - def toXML(self): - """ - Generate element. This requires special - handling due to the data types of some of the attributes. - """ - elt = self.make_elt() - if isinstance(self.valid_until, int): - elt.set("valid_until", self.valid_until.toXMLtime()) - return elt - -class list_roa_requests_elt(rpki.xml_utils.base_elt, left_right_namespace): - """ - element. - """ - - element_name = "list_roa_requests" - attributes = ("self_handle", "tag", "asn", "ipv4", "ipv6") - - def startElement(self, stack, name, attrs): - """ - Handle element. This requires special handling - due to the data types of some of the attributes. - """ - assert name == "list_roa_requests", "Unexpected name %s, stack %s" % (name, stack) - self.read_attrs(attrs) - if self.ipv4 is not None: - self.ipv4 = rpki.resource_set.roa_prefix_set_ipv4(self.ipv4) - if self.ipv6 is not None: - self.ipv6 = rpki.resource_set.roa_prefix_set_ipv6(self.ipv6) - - def __repr__(self): - return rpki.log.log_repr(self, self.self_handle, self.asn, self.ipv4, self.ipv6) - -class list_ghostbuster_requests_elt(rpki.xml_utils.text_elt, left_right_namespace): - """ - element. - """ - - element_name = "list_ghostbuster_requests" - attributes = ("self_handle", "tag", "parent_handle") - text_attribute = "vcard" - - vcard = None - - def __repr__(self): - return rpki.log.log_repr(self, self.self_handle, self.parent_handle) - -class list_ee_certificate_requests_elt(rpki.xml_utils.base_elt, left_right_namespace): - """ - element. - """ - - element_name = "list_ee_certificate_requests" - attributes = ("self_handle", "tag", "gski", "valid_until", "asn", "ipv4", "ipv6", "cn", "sn", "eku") - elements = ("pkcs10",) - - pkcs10 = None - valid_until = None - eku = None - - def __repr__(self): - return rpki.log.log_repr(self, self.self_handle, self.gski, self.cn, self.sn, self.asn, self.ipv4, self.ipv6) - - def startElement(self, stack, name, attrs): - """ - Handle element. This requires special - handling due to the data types of some of the attributes. - """ - if name not in self.elements: - assert name == self.element_name, "Unexpected name %s, stack %s" % (name, stack) - self.read_attrs(attrs) - if isinstance(self.valid_until, str): - self.valid_until = rpki.sundial.datetime.fromXMLtime(self.valid_until) - if self.asn is not None: - self.asn = rpki.resource_set.resource_set_as(self.asn) - if self.ipv4 is not None: - self.ipv4 = rpki.resource_set.resource_set_ipv4(self.ipv4) - if self.ipv6 is not None: - self.ipv6 = rpki.resource_set.resource_set_ipv6(self.ipv6) - if self.eku is not None: - self.eku = self.eku.split(",") - - def endElement(self, stack, name, text): - """ - Handle sub-element. - """ - assert len(self.elements) == 1 - if name == self.elements[0]: - self.pkcs10 = rpki.x509.PKCS10(Base64 = text) - else: - assert name == self.element_name, "Unexpected name %s, stack %s" % (name, stack) - stack.pop() - - def toXML(self): - """ - Generate element. This requires special - handling due to the data types of some of the attributes. - """ - if isinstance(self.eku, (tuple, list)): - self.eku = ",".join(self.eku) - elt = self.make_elt() - for i in self.elements: - self.make_b64elt(elt, i, getattr(self, i, None)) - if isinstance(self.valid_until, int): - elt.set("valid_until", self.valid_until.toXMLtime()) - return elt - -class list_published_objects_elt(rpki.xml_utils.text_elt, left_right_namespace): - """ - element. - """ - - element_name = "list_published_objects" - attributes = ("self_handle", "tag", "uri", "child_handle") - text_attribute = "obj" - - obj = None - child_handle = None - - def __repr__(self): - return rpki.log.log_repr(self, self.self_handle, self.child_handle, self.uri) - - def serve_dispatch(self, r_msg, cb, eb): - """ - Handle a query. The method name is a - misnomer here, there's no action attribute and no dispatch, we - just dump every published object for the specified and return. - """ - for parent in self_elt.serve_fetch_handle(self.gctx, None, self.self_handle).parents: - for ca in parent.cas: - ca_detail = ca.active_ca_detail - if ca_detail is not None: - r_msg.append(self.make_reply(ca_detail.crl_uri, ca_detail.latest_crl)) - r_msg.append(self.make_reply(ca_detail.manifest_uri, ca_detail.latest_manifest)) - r_msg.extend(self.make_reply(c.uri, c.cert, c.child.child_handle) - for c in ca_detail.child_certs) - r_msg.extend(self.make_reply(r.uri, r.roa) - for r in ca_detail.roas if r.roa is not None) - r_msg.extend(self.make_reply(g.uri, g.ghostbuster) - for g in ca_detail.ghostbusters) - r_msg.extend(self.make_reply(c.uri, c.cert) - for c in ca_detail.ee_certificates) - cb() - - def make_reply(self, uri, obj, child_handle = None): - """ - Generate one reply PDU. - """ - r_pdu = self.make_pdu(tag = self.tag, self_handle = self.self_handle, - uri = uri, child_handle = child_handle) - r_pdu.obj = obj.get_Base64() - return r_pdu - -class list_received_resources_elt(rpki.xml_utils.base_elt, left_right_namespace): - """ - element. - """ - - element_name = "list_received_resources" - attributes = ("self_handle", "tag", "parent_handle", - "notBefore", "notAfter", "uri", "sia_uri", "aia_uri", "asn", "ipv4", "ipv6") - - def __repr__(self): - return rpki.log.log_repr(self, self.self_handle, self.parent_handle, self.uri, self.notAfter) - - def serve_dispatch(self, r_msg, cb, eb): - """ - Handle a query. The method name is a - misnomer here, there's no action attribute and no dispatch, we - just dump a bunch of data about every certificate issued to us by - one of our parents, then return. - """ - for parent in self_elt.serve_fetch_handle(self.gctx, None, self.self_handle).parents: - for ca in parent.cas: - ca_detail = ca.active_ca_detail - if ca_detail is not None and ca_detail.latest_ca_cert is not None: - r_msg.append(self.make_reply(parent.parent_handle, ca_detail.ca_cert_uri, ca_detail.latest_ca_cert)) - cb() - - def make_reply(self, parent_handle, uri, cert): - """ - Generate one reply PDU. - """ - resources = cert.get_3779resources() - return self.make_pdu( - tag = self.tag, - self_handle = self.self_handle, - parent_handle = parent_handle, - notBefore = str(cert.getNotBefore()), - notAfter = str(cert.getNotAfter()), - uri = uri, - sia_uri = cert.get_sia_directory_uri(), - aia_uri = cert.get_aia_uri(), - asn = resources.asn, - ipv4 = resources.v4, - ipv6 = resources.v6) - -class report_error_elt(rpki.xml_utils.text_elt, left_right_namespace): - """ - element. - """ - - element_name = "report_error" - attributes = ("tag", "self_handle", "error_code") - text_attribute = "error_text" - - error_text = None - - def __repr__(self): - return rpki.log.log_repr(self, self.self_handle, self.error_code) - - @classmethod - def from_exception(cls, e, self_handle = None, tag = None): - """ - Generate a element from an exception. - """ - self = cls() - self.self_handle = self_handle - self.tag = tag - self.error_code = e.__class__.__name__ - self.error_text = str(e) - return self - -class msg(rpki.xml_utils.msg, left_right_namespace): - """ - Left-right PDU. - """ - - ## @var version - # Protocol version - version = 1 - - ## @var pdus - # Dispatch table of PDUs for this protocol. - pdus = dict((x.element_name, x) - for x in (self_elt, child_elt, parent_elt, bsc_elt, - repository_elt, list_resources_elt, - list_roa_requests_elt, list_ghostbuster_requests_elt, - list_ee_certificate_requests_elt, - list_published_objects_elt, - list_received_resources_elt, report_error_elt)) - - def serve_top_level(self, gctx, cb): - """ - Serve one msg PDU. - """ - - r_msg = self.__class__.reply() - - def loop(iterator, q_pdu): - - def fail(e): - if not isinstance(e, rpki.exceptions.NotFound): - rpki.log.traceback() - r_msg.append(report_error_elt.from_exception( - e, self_handle = q_pdu.self_handle, tag = q_pdu.tag)) - cb(r_msg) - - try: - q_pdu.gctx = gctx - q_pdu.serve_dispatch(r_msg, iterator, fail) - except (rpki.async.ExitNow, SystemExit): - raise - except Exception, e: - fail(e) - - def done(): - cb(r_msg) - - rpki.async.iterator(self, loop, done) - -class sax_handler(rpki.xml_utils.sax_handler): - """ - SAX handler for Left-Right protocol. - """ - - pdu = msg - name = "msg" - version = "1" - -class cms_msg(rpki.x509.XML_CMS_object): - """ - Class to hold a CMS-signed left-right PDU. - """ - - encoding = "us-ascii" - schema = rpki.relaxng.left_right - saxify = sax_handler.saxify diff --git a/rpkid/rpki/log.py b/rpkid/rpki/log.py deleted file mode 100644 index c605331a..00000000 --- a/rpkid/rpki/log.py +++ /dev/null @@ -1,199 +0,0 @@ -# $Id$ -# -# Copyright (C) 2013--2014 Dragon Research Labs ("DRL") -# Portions copyright (C) 2009--2012 Internet Systems Consortium ("ISC") -# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notices and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND ARIN DISCLAIM ALL -# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED -# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL, -# ISC, OR ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR -# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS -# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, -# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION -# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -""" -Logging facilities for RPKI libraries. -""" - -import syslog -import sys -import os -import time -import traceback as tb - -try: - have_setproctitle = False - if os.getenv("DISABLE_SETPROCTITLE") is None: - import setproctitle - have_setproctitle = True -except ImportError: - pass - -## @var enable_trace -# Whether call tracing is enabled. - -enable_trace = False - -## @var show_python_ids -# Whether __repr__() methods should show Python id numbers - -show_python_ids = False - -## @var enable_tracebacks -# Whether tracebacks are enabled globally. Individual classes and -# modules may choose to override this. - -enable_tracebacks = False - -## @var use_setproctitle -# Whether to use setproctitle (if available) to change name shown for -# this process in ps listings (etc). - -use_setproctitle = True - -## @var proctitle_extra - -# Extra text to include in proctitle display. By default this is the -# tail of the current directory name, as this is often useful, but you -# can set it to something else if you like. If None or the empty -# string, the extra information field will be omitted from the proctitle. - -proctitle_extra = os.path.basename(os.getcwd()) - -def init(ident = "rpki", flags = syslog.LOG_PID, facility = syslog.LOG_DAEMON, use_syslog = None, log_file = sys.stderr, tag_log_lines = True): - """ - Initialize logging system. - """ - - # If caller didn't say whether to use syslog, use log file if user supplied one, otherwise use syslog - - if use_syslog is None: - use_syslog = log_file is sys.stderr - - logger.use_syslog = use_syslog - logger.tag_log_lines = tag_log_lines - - if use_syslog: - syslog.openlog(ident, flags, facility) - - else: - logger.tag = ident - logger.pid = os.getpid() - logger.log_file = log_file - - if ident and have_setproctitle and use_setproctitle: - if proctitle_extra: - setproctitle.setproctitle("%s (%s)" % (ident, proctitle_extra)) - else: - setproctitle.setproctitle(ident) - -class logger(object): - """ - Closure for logging. - """ - - use_syslog = True - tag = "" - pid = 0 - log_file = sys.stderr - - def __init__(self, priority): - self.priority = priority - - def __call__(self, message): - if self.use_syslog: - syslog.syslog(self.priority, message) - elif self.tag_log_lines: - self.log_file.write("%s %s[%d]: %s\n" % (time.strftime("%F %T"), self.tag, self.pid, message)) - self.log_file.flush() - else: - self.log_file.write(message + "\n") - self.log_file.flush() - -error = logger(syslog.LOG_ERR) -warn = logger(syslog.LOG_WARNING) -note = logger(syslog.LOG_NOTICE) -info = logger(syslog.LOG_INFO) -debug = logger(syslog.LOG_DEBUG) - - -def set_trace(enable): - """ - Enable or disable call tracing. - """ - - global enable_trace - enable_trace = enable - -def trace(): - """ - Execution trace -- where are we now, and whence came we here? - """ - - if enable_trace: - bt = tb.extract_stack(limit = 3) - return debug("[%s() at %s:%d from %s:%d]" % (bt[1][2], bt[1][0], bt[1][1], bt[0][0], bt[0][1])) - -def traceback(do_it = None): - """ - Consolidated backtrace facility with a bit of extra info. Argument - specifies whether or not to log the traceback (some modules and - classes have their own controls for this, this lets us provide a - unified interface). If no argument is specified, we use the global - default value rpki.log.enable_tracebacks. - - Assertion failures generate backtraces unconditionally, on the - theory that (a) assertion failures are programming errors by - definition, and (b) it's often hard to figure out what's triggering - a particular assertion failure without the backtrace. - """ - - if do_it is None: - do_it = enable_tracebacks - - e = sys.exc_info()[1] - assert e is not None, "rpki.log.traceback() called without valid trace on stack! This should not happen." - - if do_it or isinstance(e, AssertionError): - bt = tb.extract_stack(limit = 3) - error("Exception caught in %s() at %s:%d called from %s:%d" % (bt[1][2], bt[1][0], bt[1][1], bt[0][0], bt[0][1])) - bt = tb.format_exc() - assert bt is not None, "Apparently I'm still not using the right test for null backtrace" - for line in bt.splitlines(): - warn(line) - -def log_repr(obj, *tokens): - """ - Constructor for __repr__() strings, handles suppression of Python - IDs as needed, includes self_handle when available. - """ - - # pylint: disable=W0702 - - words = ["%s.%s" % (obj.__class__.__module__, obj.__class__.__name__)] - try: - words.append("{%s}" % obj.self.self_handle) - except: - pass - - for token in tokens: - if token is not None: - try: - s = str(token) - except: - s = "???" - debug("Failed to generate repr() string for object of type %r" % type(token)) - traceback() - if s: - words.append(s) - - if show_python_ids: - words.append(" at %#x" % id(obj)) - - return "<" + " ".join(words) + ">" diff --git a/rpkid/rpki/myrpki.py b/rpkid/rpki/myrpki.py deleted file mode 100644 index c5c7990f..00000000 --- a/rpkid/rpki/myrpki.py +++ /dev/null @@ -1,23 +0,0 @@ -# $Id$ -# -# Copyright (C) 2012 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -""" -This is a tombstone for a program that no longer exists. -""" - -if __name__ != "__main__": # sic -- don't break regression tests - import sys - sys.exit('"myrpki" is obsolete. Please use "rpkic" instead.') diff --git a/rpkid/rpki/mysql_import.py b/rpkid/rpki/mysql_import.py deleted file mode 100644 index 88d30357..00000000 --- a/rpkid/rpki/mysql_import.py +++ /dev/null @@ -1,65 +0,0 @@ -# $Id$ -# -# Copyright (C) 2011-2012 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. -# -# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -""" -Import wrapper for MySQLdb. - -MySQLdb is an independent package, not part of Python, and has some -minor version skew issues with respect to Python itself, which we want -to suppress so that they don't annoy the user. None of this is -particularly hard, but the maze of whacky incantations required to do -this in multiple version of Python on multiple platforms is somewhat -tedious, and turns out to cause other problems when combined with the -way we construct executable Python scripts containing a standard -header indicating the location of our config file. - -So it turns out to be easier just to put all of the import voodoo -here, and have other modules that need MySQLdb import the MySQL module -object from this module. Looks kind of strange, but seems to work. -""" - -# pylint: disable=W0611 - -from __future__ import with_statement - -import warnings - -if hasattr(warnings, "catch_warnings"): - with warnings.catch_warnings(): - warnings.simplefilter("ignore", DeprecationWarning) - import MySQLdb -else: - import MySQLdb - -import _mysql_exceptions - -warnings.simplefilter("error", _mysql_exceptions.Warning) - -import MySQLdb.converters diff --git a/rpkid/rpki/oids.py b/rpkid/rpki/oids.py deleted file mode 100644 index a97df6a7..00000000 --- a/rpkid/rpki/oids.py +++ /dev/null @@ -1,101 +0,0 @@ -# $Id$ -# -# Copyright (C) 2013--2014 Dragon Research Labs ("DRL") -# Portions copyright (C) 2009--2012 Internet Systems Consortium ("ISC") -# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notices and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND ARIN DISCLAIM ALL -# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED -# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL, -# ISC, OR ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR -# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS -# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, -# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION -# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -""" -OID database. - -This used to be fairly complicated, with multiple representations and -a collection of conversion functions, but now it is very simple: - -- We represent OIDs as Python strings, holding the dotted-decimal - form of an OID. Nothing but decimal digits and "." is legal. - This is compatible with the format that rpki.POW uses. - -- We define symbols in this module whose values are OIDs. - -That's pretty much it. There's a bit of code at the end which checks -the syntax of the defined strings and provides a pretty-print function -for the rare occasion when we need to print an OID, but other than -that this is just a collection of symbolic names for text strings. -""" - -ecdsa_with_SHA256 = "1.2.840.10045.4.3.2" -sha256WithRSAEncryption = "1.2.840.113549.1.1.11" -sha384WithRSAEncryption = "1.2.840.113549.1.1.12" -sha512WithRSAEncryption = "1.2.840.113549.1.1.13" -id_data = "1.2.840.113549.1.7.1" -id_smime = "1.2.840.113549.1.9.16" -id_ct = "1.2.840.113549.1.9.16.1" -id_ct_routeOriginAttestation = "1.2.840.113549.1.9.16.1.24" -id_ct_rpkiManifest = "1.2.840.113549.1.9.16.1.26" -id_ct_xml = "1.2.840.113549.1.9.16.1.28" -id_ct_rpkiGhostbusters = "1.2.840.113549.1.9.16.1.35" -authorityInfoAccess = "1.3.6.1.5.5.7.1.1" -sbgp_ipAddrBlock = "1.3.6.1.5.5.7.1.7" -sbgp_autonomousSysNum = "1.3.6.1.5.5.7.1.8" -subjectInfoAccess = "1.3.6.1.5.5.7.1.11" -id_kp_bgpsec_router = "1.3.6.1.5.5.7.3.30" -id_cp_ipAddr_asNumber = "1.3.6.1.5.5.7.14.2" -id_ad_caIssuers = "1.3.6.1.5.5.7.48.2" -id_ad_caRepository = "1.3.6.1.5.5.7.48.5" -id_ad_signedObjectRepository = "1.3.6.1.5.5.7.48.9" -id_ad_rpkiManifest = "1.3.6.1.5.5.7.48.10" -id_ad_signedObject = "1.3.6.1.5.5.7.48.11" -commonName = "2.5.4.3" -serialNumber = "2.5.4.5" -countryName = "2.5.4.6" -localityName = "2.5.4.7" -stateOrProvinceName = "2.5.4.8" -streetAddress = "2.5.4.9" -organizationName = "2.5.4.10" -organizationalUnitName = "2.5.4.11" -subjectKeyIdentifier = "2.5.29.14" -keyUsage = "2.5.29.15" -basicConstraints = "2.5.29.19" -cRLNumber = "2.5.29.20" -cRLDistributionPoints = "2.5.29.31" -certificatePolicies = "2.5.29.32" -authorityKeyIdentifier = "2.5.29.35" -extendedKeyUsage = "2.5.29.37" -id_sha256 = "2.16.840.1.101.3.4.2.1" - -# Make sure all symbols exported so far look like OIDs, and build a -# dictionary to use when pretty-printing. - -_oid2name = {} - -for _sym in dir(): - if not _sym.startswith("_"): - _val = globals()[_sym] - if not isinstance(_val, str) or not all(_v.isdigit() for _v in _val.split(".")): - raise ValueError("Bad OID definition: %s = %r" % (_sym, _val)) - _oid2name[_val] = _sym.replace("_", "-") - -del _sym -del _val - -def oid2name(oid): - """ - Translate an OID into a string suitable for printing. - """ - - if not isinstance(oid, (str, unicode)) or not all(o.isdigit() for o in oid.split(".")): - raise ValueError("Parameter does not look like an OID string: " + repr(oid)) - - return _oid2name.get(oid, oid) diff --git a/rpkid/rpki/old_irdbd.py b/rpkid/rpki/old_irdbd.py deleted file mode 100644 index 41060344..00000000 --- a/rpkid/rpki/old_irdbd.py +++ /dev/null @@ -1,325 +0,0 @@ -# $Id$ -# -# Copyright (C) 2013--2014 Dragon Research Labs ("DRL") -# Portions copyright (C) 2009--2012 Internet Systems Consortium ("ISC") -# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notices and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND ARIN DISCLAIM ALL -# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED -# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL, -# ISC, OR ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR -# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS -# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, -# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION -# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -""" -IR database daemon. - -This is the old (pre-Django) version of irdbd, still used by smoketest -and perhaps still useful as a minimal example. This does NOT work with -the GUI, rpkic, or any of the other more recent tools. -""" - -import sys -import os -import time -import argparse -import urlparse -import rpki.http -import rpki.config -import rpki.resource_set -import rpki.relaxng -import rpki.exceptions -import rpki.left_right -import rpki.log -import rpki.x509 - -from rpki.mysql_import import MySQLdb - -class main(object): - - - def handle_list_resources(self, q_pdu, r_msg): - - r_pdu = rpki.left_right.list_resources_elt() - r_pdu.tag = q_pdu.tag - r_pdu.self_handle = q_pdu.self_handle - r_pdu.child_handle = q_pdu.child_handle - - self.cur.execute( - """ - SELECT registrant_id, valid_until - FROM registrant - WHERE registry_handle = %s AND registrant_handle = %s - """, - (q_pdu.self_handle, q_pdu.child_handle)) - - if self.cur.rowcount != 1: - raise rpki.exceptions.NotInDatabase( - "This query should have produced a single exact match, something's messed up" - " (rowcount = %d, self_handle = %s, child_handle = %s)" - % (self.cur.rowcount, q_pdu.self_handle, q_pdu.child_handle)) - - registrant_id, valid_until = self.cur.fetchone() - - r_pdu.valid_until = valid_until.strftime("%Y-%m-%dT%H:%M:%SZ") - - r_pdu.asn = rpki.resource_set.resource_set_as.from_sql( - self.cur, - """ - SELECT start_as, end_as - FROM registrant_asn - WHERE registrant_id = %s - """, - (registrant_id,)) - - r_pdu.ipv4 = rpki.resource_set.resource_set_ipv4.from_sql( - self.cur, - """ - SELECT start_ip, end_ip - FROM registrant_net - WHERE registrant_id = %s AND version = 4 - """, - (registrant_id,)) - - r_pdu.ipv6 = rpki.resource_set.resource_set_ipv6.from_sql( - self.cur, - """ - SELECT start_ip, end_ip - FROM registrant_net - WHERE registrant_id = %s AND version = 6 - """, - (registrant_id,)) - - r_msg.append(r_pdu) - - - def handle_list_roa_requests(self, q_pdu, r_msg): - - self.cur.execute( - "SELECT roa_request_id, asn FROM roa_request WHERE self_handle = %s", - (q_pdu.self_handle,)) - - for roa_request_id, asn in self.cur.fetchall(): - - r_pdu = rpki.left_right.list_roa_requests_elt() - r_pdu.tag = q_pdu.tag - r_pdu.self_handle = q_pdu.self_handle - r_pdu.asn = asn - - r_pdu.ipv4 = rpki.resource_set.roa_prefix_set_ipv4.from_sql( - self.cur, - """ - SELECT prefix, prefixlen, max_prefixlen - FROM roa_request_prefix - WHERE roa_request_id = %s AND version = 4 - """, - (roa_request_id,)) - - r_pdu.ipv6 = rpki.resource_set.roa_prefix_set_ipv6.from_sql( - self.cur, - """ - SELECT prefix, prefixlen, max_prefixlen - FROM roa_request_prefix - WHERE roa_request_id = %s AND version = 6 - """, - (roa_request_id,)) - - r_msg.append(r_pdu) - - - def handle_list_ghostbuster_requests(self, q_pdu, r_msg): - - self.cur.execute( - """ - SELECT vcard - FROM ghostbuster_request - WHERE self_handle = %s AND parent_handle = %s - """, - (q_pdu.self_handle, q_pdu.parent_handle)) - - vcards = [result[0] for result in self.cur.fetchall()] - - if not vcards: - - self.cur.execute( - """ - SELECT vcard - FROM ghostbuster_request - WHERE self_handle = %s AND parent_handle IS NULL - """, - (q_pdu.self_handle,)) - - vcards = [result[0] for result in self.cur.fetchall()] - - for vcard in vcards: - r_pdu = rpki.left_right.list_ghostbuster_requests_elt() - r_pdu.tag = q_pdu.tag - r_pdu.self_handle = q_pdu.self_handle - r_pdu.parent_handle = q_pdu.parent_handle - r_pdu.vcard = vcard - r_msg.append(r_pdu) - - - def handle_list_ee_certificate_requests(self, q_pdu, r_msg): - - self.cur.execute( - """ - SELECT ee_certificate_id, pkcs10, gski, cn, sn, eku, valid_until - FROM ee_certificate - WHERE self_handle = %s - """, - (q_pdu.self_handle,)) - - for ee_certificate_id, pkcs10, gski, cn, sn, eku, valid_until in self.cur.fetchall(): - - r_pdu = rpki.left_right.list_ee_certificate_requests_elt() - r_pdu.tag = q_pdu.tag - r_pdu.self_handle = q_pdu.self_handle - r_pdu.valid_until = valid_until.strftime("%Y-%m-%dT%H:%M:%SZ") - r_pdu.pkcs10 = rpki.x509.PKCS10(DER = pkcs10) - r_pdu.gski = gski - r_pdu.cn = cn - r_pdu.sn = sn - r_pdu.eku = eku - - r_pdu.asn = rpki.resource_set.resource_set_as.from_sql( - self.cur, - """ - SELECT start_as, end_as - FROM ee_certificate_asn - WHERE ee_certificate_id = %s - """, - (ee_certificate_id,)) - - r_pdu.ipv4 = rpki.resource_set.resource_set_ipv4.from_sql( - self.cur, - """ - SELECT start_ip, end_ip - FROM ee_certificate_net - WHERE ee_certificate_id = %s AND version = 4 - """, - (ee_certificate_id,)) - - r_pdu.ipv6 = rpki.resource_set.resource_set_ipv6.from_sql( - self.cur, - """ - SELECT start_ip, end_ip - FROM ee_certificate_net - WHERE ee_certificate_id = %s AND version = 6 - """, - (ee_certificate_id,)) - - r_msg.append(r_pdu) - - - handle_dispatch = { - rpki.left_right.list_resources_elt : handle_list_resources, - rpki.left_right.list_roa_requests_elt : handle_list_roa_requests, - rpki.left_right.list_ghostbuster_requests_elt : handle_list_ghostbuster_requests, - rpki.left_right.list_ee_certificate_requests_elt : handle_list_ee_certificate_requests } - - def handler(self, query, path, cb): - try: - - self.db.ping(True) - - r_msg = rpki.left_right.msg.reply() - - try: - - q_msg = rpki.left_right.cms_msg(DER = query).unwrap((self.bpki_ta, self.rpkid_cert)) - - if not isinstance(q_msg, rpki.left_right.msg) or not q_msg.is_query(): - raise rpki.exceptions.BadQuery, "Unexpected %r PDU" % q_msg - - for q_pdu in q_msg: - - try: - - try: - h = self.handle_dispatch[type(q_pdu)] - except KeyError: - raise rpki.exceptions.BadQuery, "Unexpected %r PDU" % q_pdu - else: - h(self, q_pdu, r_msg) - - except (rpki.async.ExitNow, SystemExit): - raise - - except Exception, e: - rpki.log.traceback() - r_msg.append(rpki.left_right.report_error_elt.from_exception(e, q_pdu.self_handle, q_pdu.tag)) - - except (rpki.async.ExitNow, SystemExit): - raise - - except Exception, e: - rpki.log.traceback() - r_msg.append(rpki.left_right.report_error_elt.from_exception(e)) - - cb(200, body = rpki.left_right.cms_msg().wrap(r_msg, self.irdbd_key, self.irdbd_cert)) - - except (rpki.async.ExitNow, SystemExit): - raise - - except Exception, e: - rpki.log.traceback() - - # We only get here in cases where we couldn't or wouldn't generate - # , so just return HTTP failure. - - cb(500, reason = "Unhandled exception %s: %s" % (e.__class__.__name__, e)) - - - def __init__(self): - - os.environ["TZ"] = "UTC" - time.tzset() - - parser = argparse.ArgumentParser(description = __doc__) - parser.add_argument("-c", "--config", - help = "override default location of configuration file") - parser.add_argument("-d", "--debug", action = "store_true", - help = "enable debugging mode") - args = parser.parse_args() - - rpki.log.init("irdbd", use_syslog = not args.debug) - - self.cfg = rpki.config.parser(args.config, "irdbd") - - startup_msg = self.cfg.get("startup-message", "") - if startup_msg: - rpki.log.info(startup_msg) - - self.cfg.set_global_flags() - - self.db = MySQLdb.connect(user = self.cfg.get("sql-username"), - db = self.cfg.get("sql-database"), - passwd = self.cfg.get("sql-password")) - - self.cur = self.db.cursor() - self.db.autocommit(True) - - self.bpki_ta = rpki.x509.X509(Auto_update = self.cfg.get("bpki-ta")) - self.rpkid_cert = rpki.x509.X509(Auto_update = self.cfg.get("rpkid-cert")) - self.irdbd_cert = rpki.x509.X509(Auto_update = self.cfg.get("irdbd-cert")) - self.irdbd_key = rpki.x509.RSA( Auto_update = self.cfg.get("irdbd-key")) - - u = urlparse.urlparse(self.cfg.get("http-url")) - - assert u.scheme in ("", "http") and \ - u.username is None and \ - u.password is None and \ - u.params == "" and \ - u.query == "" and \ - u.fragment == "" - - rpki.http.server(host = u.hostname or "localhost", - port = u.port or 443, - handlers = ((u.path, self.handler),)) diff --git a/rpkid/rpki/pubd.py b/rpkid/rpki/pubd.py deleted file mode 100644 index 31f22ed4..00000000 --- a/rpkid/rpki/pubd.py +++ /dev/null @@ -1,174 +0,0 @@ -# $Id$ -# -# Copyright (C) 2013--2014 Dragon Research Labs ("DRL") -# Portions copyright (C) 2009--2012 Internet Systems Consortium ("ISC") -# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notices and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND ARIN DISCLAIM ALL -# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED -# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL, -# ISC, OR ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR -# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS -# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, -# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION -# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -""" -RPKI publication engine. -""" - -import os -import time -import argparse -import sys -import re -import rpki.resource_set -import rpki.up_down -import rpki.x509 -import rpki.sql -import rpki.http -import rpki.config -import rpki.exceptions -import rpki.relaxng -import rpki.log -import rpki.publication -import rpki.daemonize - -class main(object): - """ - Main program for pubd. - """ - - def __init__(self): - - os.environ["TZ"] = "UTC" - time.tzset() - - self.irbe_cms_timestamp = None - - parser = argparse.ArgumentParser(description = __doc__) - parser.add_argument("-c", "--config", - help = "override default location of configuration file") - parser.add_argument("-d", "--debug", action = "store_true", - help = "enable debugging mode") - parser.add_argument("-f", "--foreground", action = "store_true", - help = "do not daemonize") - parser.add_argument("--pidfile", - help = "override default location of pid file") - parser.add_argument("--profile", - help = "enable profiling, saving data to PROFILE") - args = parser.parse_args() - - self.profile = args.profile - - rpki.log.init("pubd", use_syslog = not args.debug) - - self.cfg = rpki.config.parser(args.config, "pubd") - self.cfg.set_global_flags() - - if not args.foreground and not args.debug: - rpki.daemonize.daemon(pidfile = args.pidfile) - - if self.profile: - import cProfile - prof = cProfile.Profile() - try: - prof.runcall(self.main) - finally: - prof.dump_stats(self.profile) - rpki.log.info("Dumped profile data to %s" % self.profile) - else: - self.main() - - def main(self): - - if self.profile: - rpki.log.info("Running in profile mode with output to %s" % self.profile) - - self.sql = rpki.sql.session(self.cfg) - - self.bpki_ta = rpki.x509.X509(Auto_update = self.cfg.get("bpki-ta")) - self.irbe_cert = rpki.x509.X509(Auto_update = self.cfg.get("irbe-cert")) - self.pubd_cert = rpki.x509.X509(Auto_update = self.cfg.get("pubd-cert")) - self.pubd_key = rpki.x509.RSA( Auto_update = self.cfg.get("pubd-key")) - - self.http_server_host = self.cfg.get("server-host", "") - self.http_server_port = self.cfg.getint("server-port") - - self.publication_base = self.cfg.get("publication-base", "publication/") - - self.publication_multimodule = self.cfg.getboolean("publication-multimodule", False) - - rpki.http.server( - host = self.http_server_host, - port = self.http_server_port, - handlers = (("/control", self.control_handler), - ("/client/", self.client_handler))) - - def handler_common(self, query, client, cb, certs, crl = None): - """ - Common PDU handler code. - """ - - def done(r_msg): - reply = rpki.publication.cms_msg().wrap(r_msg, self.pubd_key, self.pubd_cert, crl) - self.sql.sweep() - cb(reply) - - q_cms = rpki.publication.cms_msg(DER = query) - q_msg = q_cms.unwrap(certs) - if client is None: - self.irbe_cms_timestamp = q_cms.check_replay(self.irbe_cms_timestamp, "control") - else: - q_cms.check_replay_sql(client, client.client_handle) - q_msg.serve_top_level(self, client, done) - - def control_handler(self, query, path, cb): - """ - Process one PDU from the IRBE. - """ - - def done(body): - cb(200, body = body) - - rpki.log.trace() - try: - self.handler_common(query, None, done, (self.bpki_ta, self.irbe_cert)) - except (rpki.async.ExitNow, SystemExit): - raise - except Exception, e: - rpki.log.traceback() - cb(500, reason = "Unhandled exception %s: %s" % (e.__class__.__name__, e)) - - client_url_regexp = re.compile("/client/([-A-Z0-9_/]+)$", re.I) - - def client_handler(self, query, path, cb): - """ - Process one PDU from a client. - """ - - def done(body): - cb(200, body = body) - - rpki.log.trace() - try: - match = self.client_url_regexp.search(path) - if match is None: - raise rpki.exceptions.BadContactURL, "Bad path: %s" % path - client_handle = match.group(1) - client = rpki.publication.client_elt.sql_fetch_where1(self, "client_handle = %s", (client_handle,)) - if client is None: - raise rpki.exceptions.ClientNotFound, "Could not find client %s" % client_handle - config = rpki.publication.config_elt.fetch(self) - if config is None or config.bpki_crl is None: - raise rpki.exceptions.CMSCRLNotSet - self.handler_common(query, client, done, (self.bpki_ta, client.bpki_cert, client.bpki_glue), config.bpki_crl) - except (rpki.async.ExitNow, SystemExit): - raise - except Exception, e: - rpki.log.traceback() - cb(500, reason = "Could not process PDU: %s" % e) diff --git a/rpkid/rpki/publication.py b/rpkid/rpki/publication.py deleted file mode 100644 index 2462ae39..00000000 --- a/rpkid/rpki/publication.py +++ /dev/null @@ -1,466 +0,0 @@ -# $Id$ -# -# Copyright (C) 2009--2012 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. -# -# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -""" -RPKI "publication" protocol. -""" - -import os -import errno -import rpki.resource_set -import rpki.x509 -import rpki.sql -import rpki.exceptions -import rpki.xml_utils -import rpki.http -import rpki.up_down -import rpki.relaxng -import rpki.sundial -import rpki.log - -class publication_namespace(object): - """ - XML namespace parameters for publication protocol. - """ - - xmlns = "http://www.hactrn.net/uris/rpki/publication-spec/" - nsmap = { None : xmlns } - -class control_elt(rpki.xml_utils.data_elt, rpki.sql.sql_persistent, publication_namespace): - """ - Virtual class for control channel objects. - """ - - def serve_dispatch(self, r_msg, cb, eb): - """ - Action dispatch handler. This needs special handling because we - need to make sure that this PDU arrived via the control channel. - """ - if self.client is not None: - raise rpki.exceptions.BadQuery, "Control query received on client channel" - rpki.xml_utils.data_elt.serve_dispatch(self, r_msg, cb, eb) - -class config_elt(control_elt): - """ - element. This is a little weird because there should - never be more than one row in the SQL config table, but we have to - put the BPKI CRL somewhere and SQL is the least bad place available. - - So we reuse a lot of the SQL machinery, but we nail config_id at 1, - we don't expose it in the XML protocol, and we only support the get - and set actions. - """ - - attributes = ("action", "tag") - element_name = "config" - elements = ("bpki_crl",) - - sql_template = rpki.sql.template( - "config", - "config_id", - ("bpki_crl", rpki.x509.CRL)) - - wired_in_config_id = 1 - - def startElement(self, stack, name, attrs): - """ - StartElement() handler for config object. This requires special - handling because of the weird way we treat config_id. - """ - control_elt.startElement(self, stack, name, attrs) - self.config_id = self.wired_in_config_id - - @classmethod - def fetch(cls, gctx): - """ - Fetch the config object from SQL. This requires special handling - because of the weird way we treat config_id. - """ - return cls.sql_fetch(gctx, cls.wired_in_config_id) - - def serve_set(self, r_msg, cb, eb): - """ - Handle a set action. This requires special handling because - config doesn't support the create method. - """ - if self.sql_fetch(self.gctx, self.config_id) is None: - control_elt.serve_create(self, r_msg, cb, eb) - else: - control_elt.serve_set(self, r_msg, cb, eb) - - def serve_fetch_one_maybe(self): - """ - Find the config object on which a get or set method should - operate. - """ - return self.sql_fetch(self.gctx, self.config_id) - -class client_elt(control_elt): - """ - element. - """ - - element_name = "client" - attributes = ("action", "tag", "client_handle", "base_uri") - elements = ("bpki_cert", "bpki_glue") - booleans = ("clear_replay_protection",) - - sql_template = rpki.sql.template( - "client", - "client_id", - "client_handle", - "base_uri", - ("bpki_cert", rpki.x509.X509), - ("bpki_glue", rpki.x509.X509), - ("last_cms_timestamp", rpki.sundial.datetime)) - - base_uri = None - bpki_cert = None - bpki_glue = None - last_cms_timestamp = None - - def serve_post_save_hook(self, q_pdu, r_pdu, cb, eb): - """ - Extra server actions for client_elt. - """ - actions = [] - if q_pdu.clear_replay_protection: - actions.append(self.serve_clear_replay_protection) - def loop(iterator, action): - action(iterator, eb) - rpki.async.iterator(actions, loop, cb) - - def serve_clear_replay_protection(self, cb, eb): - """ - Handle a clear_replay_protection action for this client. - """ - self.last_cms_timestamp = None - self.sql_mark_dirty() - cb() - - def serve_fetch_one_maybe(self): - """ - Find the client object on which a get, set, or destroy method - should operate, or which would conflict with a create method. - """ - return self.sql_fetch_where1(self.gctx, "client_handle = %s", self.client_handle) - - def serve_fetch_all(self): - """ - Find client objects on which a list method should operate. - """ - return self.sql_fetch_all(self.gctx) - - def check_allowed_uri(self, uri): - """ - Make sure that a target URI is within this client's allowed URI space. - """ - if not uri.startswith(self.base_uri): - raise rpki.exceptions.ForbiddenURI - -class publication_object_elt(rpki.xml_utils.base_elt, publication_namespace): - """ - Virtual class for publishable objects. These have very similar - syntax, differences lie in underlying datatype and methods. XML - methods are a little different from the pattern used for objects - that support the create/set/get/list/destroy actions, but - publishable objects don't go in SQL either so these classes would be - different in any case. - """ - - attributes = ("action", "tag", "client_handle", "uri") - payload_type = None - payload = None - - def endElement(self, stack, name, text): - """ - Handle a publishable element element. - """ - assert name == self.element_name, "Unexpected name %s, stack %s" % (name, stack) - if text: - self.payload = self.payload_type(Base64 = text) # pylint: disable=E1102 - stack.pop() - - def toXML(self): - """ - Generate XML element for publishable object. - """ - elt = self.make_elt() - if self.payload: - elt.text = self.payload.get_Base64() - return elt - - def serve_dispatch(self, r_msg, cb, eb): - """ - Action dispatch handler. - """ - # pylint: disable=E0203 - try: - if self.client is None: - raise rpki.exceptions.BadQuery, "Client query received on control channel" - dispatch = { "publish" : self.serve_publish, - "withdraw" : self.serve_withdraw } - if self.action not in dispatch: - raise rpki.exceptions.BadQuery, "Unexpected query: action %s" % self.action - self.client.check_allowed_uri(self.uri) - dispatch[self.action]() - r_pdu = self.__class__() - r_pdu.action = self.action - r_pdu.tag = self.tag - r_pdu.uri = self.uri - r_msg.append(r_pdu) - cb() - except rpki.exceptions.NoObjectAtURI, e: - # This can happen when we're cleaning up from a prior mess, so - # we generate a PDU then carry on. - r_msg.append(report_error_elt.from_exception(e, self.tag)) - cb() - - def serve_publish(self): - """ - Publish an object. - """ - rpki.log.info("Publishing %s" % self.payload.tracking_data(self.uri)) - filename = self.uri_to_filename() - filename_tmp = filename + ".tmp" - dirname = os.path.dirname(filename) - if not os.path.isdir(dirname): - os.makedirs(dirname) - f = open(filename_tmp, "wb") - f.write(self.payload.get_DER()) - f.close() - os.rename(filename_tmp, filename) - - def serve_withdraw(self): - """ - Withdraw an object, then recursively delete empty directories. - """ - rpki.log.info("Withdrawing %s" % self.uri) - filename = self.uri_to_filename() - try: - os.remove(filename) - except OSError, e: - if e.errno == errno.ENOENT: - raise rpki.exceptions.NoObjectAtURI, "No object published at %s" % self.uri - else: - raise - min_path_len = len(self.gctx.publication_base.rstrip("/")) - dirname = os.path.dirname(filename) - while len(dirname) > min_path_len: - try: - os.rmdir(dirname) - except OSError: - break - else: - dirname = os.path.dirname(dirname) - - def uri_to_filename(self): - """ - Convert a URI to a local filename. - """ - if not self.uri.startswith("rsync://"): - raise rpki.exceptions.BadURISyntax, self.uri - path = self.uri.split("/")[3:] - if not self.gctx.publication_multimodule: - del path[0] - path.insert(0, self.gctx.publication_base.rstrip("/")) - filename = "/".join(path) - if "/../" in filename or filename.endswith("/.."): - raise rpki.exceptions.BadURISyntax, filename - return filename - - @classmethod - def make_publish(cls, uri, obj, tag = None): - """ - Construct a publication PDU. - """ - assert cls.payload_type is not None and type(obj) is cls.payload_type - return cls.make_pdu(action = "publish", uri = uri, payload = obj, tag = tag) - - @classmethod - def make_withdraw(cls, uri, obj, tag = None): - """ - Construct a withdrawal PDU. - """ - assert cls.payload_type is not None and type(obj) is cls.payload_type - return cls.make_pdu(action = "withdraw", uri = uri, tag = tag) - - def raise_if_error(self): - """ - No-op, since this is not a PDU. - """ - pass - -class certificate_elt(publication_object_elt): - """ - element. - """ - - element_name = "certificate" - payload_type = rpki.x509.X509 - -class crl_elt(publication_object_elt): - """ - element. - """ - - element_name = "crl" - payload_type = rpki.x509.CRL - -class manifest_elt(publication_object_elt): - """ - element. - """ - - element_name = "manifest" - payload_type = rpki.x509.SignedManifest - -class roa_elt(publication_object_elt): - """ - element. - """ - - element_name = "roa" - payload_type = rpki.x509.ROA - -class ghostbuster_elt(publication_object_elt): - """ - element. - """ - - element_name = "ghostbuster" - payload_type = rpki.x509.Ghostbuster - -publication_object_elt.obj2elt = dict( - (e.payload_type, e) for e in - (certificate_elt, crl_elt, manifest_elt, roa_elt, ghostbuster_elt)) - -class report_error_elt(rpki.xml_utils.text_elt, publication_namespace): - """ - element. - """ - - element_name = "report_error" - attributes = ("tag", "error_code") - text_attribute = "error_text" - - error_text = None - - @classmethod - def from_exception(cls, e, tag = None): - """ - Generate a element from an exception. - """ - self = cls() - self.tag = tag - self.error_code = e.__class__.__name__ - self.error_text = str(e) - return self - - def __str__(self): - s = "" - if getattr(self, "tag", None) is not None: - s += "[%s] " % self.tag - s += self.error_code - if getattr(self, "error_text", None) is not None: - s += ": " + self.error_text - return s - - def raise_if_error(self): - """ - Raise exception associated with this PDU. - """ - t = rpki.exceptions.__dict__.get(self.error_code) - if isinstance(t, type) and issubclass(t, rpki.exceptions.RPKI_Exception): - raise t, getattr(self, "text", None) - else: - raise rpki.exceptions.BadPublicationReply, "Unexpected response from pubd: %s" % self - -class msg(rpki.xml_utils.msg, publication_namespace): - """ - Publication PDU. - """ - - ## @var version - # Protocol version - version = 1 - - ## @var pdus - # Dispatch table of PDUs for this protocol. - pdus = dict((x.element_name, x) for x in - (config_elt, client_elt, certificate_elt, crl_elt, manifest_elt, roa_elt, ghostbuster_elt, report_error_elt)) - - def serve_top_level(self, gctx, client, cb): - """ - Serve one msg PDU. - """ - if not self.is_query(): - raise rpki.exceptions.BadQuery, "Message type is not query" - r_msg = self.__class__.reply() - - def loop(iterator, q_pdu): - - def fail(e): - if not isinstance(e, rpki.exceptions.NotFound): - rpki.log.traceback() - r_msg.append(report_error_elt.from_exception(e, q_pdu.tag)) - cb(r_msg) - - try: - q_pdu.gctx = gctx - q_pdu.client = client - q_pdu.serve_dispatch(r_msg, iterator, fail) - except (rpki.async.ExitNow, SystemExit): - raise - except Exception, e: - fail(e) - - def done(): - cb(r_msg) - - rpki.async.iterator(self, loop, done) - -class sax_handler(rpki.xml_utils.sax_handler): - """ - SAX handler for publication protocol. - """ - - pdu = msg - name = "msg" - version = "1" - -class cms_msg(rpki.x509.XML_CMS_object): - """ - Class to hold a CMS-signed publication PDU. - """ - - encoding = "us-ascii" - schema = rpki.relaxng.publication - saxify = sax_handler.saxify diff --git a/rpkid/rpki/rcynic.py b/rpkid/rpki/rcynic.py deleted file mode 100644 index 73394fb8..00000000 --- a/rpkid/rpki/rcynic.py +++ /dev/null @@ -1,275 +0,0 @@ -# Copyright (C) 2010-2012 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -""" -Iterator class to parse the output of an rcynic run. -""" - -__version__ = '$Id$' - -import os -import rpki.x509 -import rpki.exceptions -import rpki.resource_set -from xml.etree.ElementTree import ElementTree - -class UnknownObject(rpki.exceptions.RPKI_Exception): - """ - Unrecognized object in rcynic result cache. - """ - -class NotRsyncURI(rpki.exceptions.RPKI_Exception): - """ - URI is not an rsync URI. - """ - -class rcynic_object(object): - """ - An object read from rcynic cache. - """ - - def __init__(self, filename, **kwargs): - self.filename = filename - for k, v in kwargs.iteritems(): - setattr(self, k, v) - self.obj = self.obj_class(DER_file = filename) - - def __repr__(self): - return "<%s %s %s at 0x%x>" % (self.__class__.__name__, self.uri, self.resources, id(self)) - - def show_attrs(self, *attrs): - """ - Print a bunch of object attributes, quietly ignoring any that - might be missing. - """ - for a in attrs: - try: - print "%s: %s" % (a.capitalize(), getattr(self, a)) - except AttributeError: - pass - - def show(self): - """ - Print common object attributes. - """ - self.show_attrs("filename", "uri", "status", "timestamp") - -class rcynic_certificate(rcynic_object): - """ - A certificate from rcynic cache. - """ - - obj_class = rpki.x509.X509 - - def __init__(self, filename, **kwargs): - rcynic_object.__init__(self, filename, **kwargs) - self.notBefore = self.obj.getNotBefore() - self.notAfter = self.obj.getNotAfter() - self.aia_uri = self.obj.get_aia_uri() - self.sia_directory_uri = self.obj.get_sia_directory_uri() - self.manifest_uri = self.obj.get_sia_manifest_uri() - self.resources = self.obj.get_3779resources() - self.is_ca = self.obj.is_CA() - self.serial = self.obj.getSerial() - self.issuer = self.obj.getIssuer() - self.subject = self.obj.getSubject() - self.ski = self.obj.hSKI() - self.aki = self.obj.hAKI() - - def show(self): - """ - Print certificate attributes. - """ - rcynic_object.show(self) - self.show_attrs("notBefore", "notAfter", "aia_uri", "sia_directory_uri", "resources") - -class rcynic_roa(rcynic_object): - """ - A ROA from rcynic cache. - """ - - obj_class = rpki.x509.ROA - - def __init__(self, filename, **kwargs): - rcynic_object.__init__(self, filename, **kwargs) - self.obj.extract() - self.asID = self.obj.get_POW().getASID() - self.prefix_sets = [] - v4, v6 = self.obj.get_POW().getPrefixes() - if v4: - self.prefix_sets.append(rpki.resource_set.roa_prefix_set_ipv4([ - rpki.resource_set.roa_prefix_ipv4(p[0], p[1], p[2]) for p in v4])) - if v6: - self.prefix_sets.append(rpki.resource_set.roa_prefix_set_ipv6([ - rpki.resource_set.roa_prefix_ipv6(p[0], p[1], p[2]) for p in v6])) - self.ee = rpki.x509.X509(POW = self.obj.get_POW().certs()[0]) - self.notBefore = self.ee.getNotBefore() - self.notAfter = self.ee.getNotAfter() - self.aia_uri = self.ee.get_aia_uri() - self.resources = self.ee.get_3779resources() - self.issuer = self.ee.getIssuer() - self.serial = self.ee.getSerial() - self.subject = self.ee.getSubject() - self.aki = self.ee.hAKI() - self.ski = self.ee.hSKI() - - def show(self): - """ - Print ROA attributes. - """ - rcynic_object.show(self) - self.show_attrs("notBefore", "notAfter", "aia_uri", "resources", "asID") - if self.prefix_sets: - print "Prefixes:", ",".join(str(i) for i in self.prefix_sets) - -class rcynic_ghostbuster(rcynic_object): - """ - Ghostbuster record from the rcynic cache. - """ - - obj_class = rpki.x509.Ghostbuster - - def __init__(self, *args, **kwargs): - rcynic_object.__init__(self, *args, **kwargs) - self.obj.extract() - self.vcard = self.obj.get_content() - self.ee = rpki.x509.X509(POW = self.obj.get_POW().certs()[0]) - self.notBefore = self.ee.getNotBefore() - self.notAfter = self.ee.getNotAfter() - self.aia_uri = self.ee.get_aia_uri() - self.issuer = self.ee.getIssuer() - self.serial = self.ee.getSerial() - self.subject = self.ee.getSubject() - self.aki = self.ee.hAKI() - self.ski = self.ee.hSKI() - - def show(self): - rcynic_object.show(self) - self.show_attrs("notBefore", "notAfter", "vcard") - -file_name_classes = { - ".cer" : rcynic_certificate, - ".gbr" : rcynic_ghostbuster, - ".roa" : rcynic_roa } - -class rcynic_file_iterator(object): - """ - Iterate over files in an rcynic output tree, yielding a Python - representation of each object found. - """ - - def __init__(self, rcynic_root, - authenticated_subdir = "authenticated"): - self.rcynic_dir = os.path.join(rcynic_root, authenticated_subdir) - - def __iter__(self): - for root, dirs, files in os.walk(self.rcynic_dir): # pylint: disable=W0612 - for filename in files: - filename = os.path.join(root, filename) - ext = os.path.splitext(filename)[1] - if ext in file_name_classes: - yield file_name_classes[ext](filename) - -class validation_status_element(object): - def __init__(self, *args, **kwargs): - self.attrs = [] - for k, v in kwargs.iteritems(): - setattr(self, k, v) - # attribute names are saved so that the __repr__ method can - # display the subset of attributes the user specified - self.attrs.append(k) - self._obj = None - - def get_obj(self): - if not self._obj: - self._obj = self.file_class(filename=self.filename, uri=self.uri) - return self._obj - - def __repr__(self): - v = [self.__class__.__name__, 'id=%s' % str(id(self))] - v.extend(['%s=%s' % (x, getattr(self, x)) for x in self.attrs]) - return '<%s>' % (' '.join(v),) - - obj = property(get_obj) - -class rcynic_xml_iterator(object): - """ - Iterate over validation_status entries in the XML output from an - rcynic run. Yields a tuple for each entry: - - timestamp, generation, status, object - - where URI, status, and timestamp are the corresponding values from - the XML element, OK is a boolean indicating whether validation was - considered succesful, and object is a Python representation of the - object in question. If OK is True, object will be from rcynic's - authenticated output tree; otherwise, object will be from rcynic's - unauthenticated output tree. - - Note that it is possible for the same URI to appear in more than one - validation_status element; in such cases, the succesful case (OK - True) should be the last entry (as rcynic will stop trying once it - gets a good copy), but there may be multiple failures, which might - or might not have different status codes. - """ - - def __init__(self, rcynic_root, xml_file, - authenticated_old_subdir = "authenticated.old", - unauthenticated_subdir = "unauthenticated"): - self.rcynic_root = rcynic_root - self.xml_file = xml_file - self.authenticated_subdir = os.path.join(rcynic_root, 'authenticated') - self.authenticated_old_subdir = os.path.join(rcynic_root, authenticated_old_subdir) - self.unauthenticated_subdir = os.path.join(rcynic_root, unauthenticated_subdir) - - base_uri = "rsync://" - - def uri_to_filename(self, uri): - if uri.startswith(self.base_uri): - return uri[len(self.base_uri):] - else: - raise NotRsyncURI, "Not an rsync URI %r" % uri - - def __iter__(self): - for validation_status in ElementTree(file=self.xml_file).getroot().getiterator("validation_status"): - timestamp = validation_status.get("timestamp") - status = validation_status.get("status") - uri = validation_status.text.strip() - generation = validation_status.get("generation") - - # determine the path to this object - if status == 'object_accepted': - d = self.authenticated_subdir - elif generation == 'backup': - d = self.authenticated_old_subdir - else: - d = self.unauthenticated_subdir - - filename = os.path.join(d, self.uri_to_filename(uri)) - - ext = os.path.splitext(filename)[1] - if ext in file_name_classes: - yield validation_status_element(timestamp = timestamp, generation = generation, - uri=uri, status = status, filename = filename, - file_class = file_name_classes[ext]) - -def label_iterator(xml_file): - """ - Returns an iterator which contains all defined labels from an rcynic XML - output file. Each item is a tuple of the form - (label, kind, description). - """ - - for label in ElementTree(file=xml_file).find("labels"): - yield label.tag, label.get("kind"), label.text.strip() diff --git a/rpkid/rpki/relaxng.py b/rpkid/rpki/relaxng.py deleted file mode 100644 index 0d8c0d64..00000000 --- a/rpkid/rpki/relaxng.py +++ /dev/null @@ -1,2441 +0,0 @@ -# Automatically generated, do not edit. - -import lxml.etree - -## @var left_right -## Parsed RelaxNG left_right schema -left_right = lxml.etree.RelaxNG(lxml.etree.fromstring(r''' - - - - - - - - 1 - - - - - - query - - - - - - - - reply - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 1024 - - - - - - - - create - - - - - - set - - - - - - get - - - - - - list - - - - - - destroy - - - - - - - 512000 - - - - - - - 255 - [\-_A-Za-z0-9]+ - - - - - - 4096 - - - - - - 1024 - - - - - - 512000 - [\-,0-9]* - - - - - 512000 - [\-,0-9/.]* - - - - - 512000 - [\-,0-9/:a-fA-F]* - - - - - - - yes - - - - - yes - - - - - yes - - - - - yes - - - - - yes - - - - - yes - - - - - yes - - - - - - - - yes - no - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - yes - - - - rsa - - - - - sha256 - - - - - 2048 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - yes - - - - - yes - - - - - yes - - - - - yes - - - - - yes - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - yes - - - - - yes - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - yes - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - .*Z - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 27 - 27 - - - - - .*Z - - - - - - - - - - - - - - - - - - - - - 64 - [\-0-9A-Za-z_ ]+ - - - - - - - 64 - [0-9A-Fa-f]+ - - - - - - - 512000 - [.,0-9]+ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - .*Z - - - - - .*Z - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 1024 - - - - - - - - - - - - - - 512000 - - - - - - -''')) - -## @var up_down -## Parsed RelaxNG up_down schema -up_down = lxml.etree.RelaxNG(lxml.etree.fromstring(r''' - - - - - 512000 - [\-,0-9]* - - - - - 512000 - [\-,/.0-9]* - - - - - 512000 - [\-,/:0-9a-fA-F]* - - - - - 1 - 1024 - - - - - 27 - 1024 - - - - - 1 - 1024 - - - - - 10 - 4096 - - - - - 4 - 512000 - - - - - - - 1 - - - - - - - - - - - - - - list - - - - - - list_response - - - - - - issue - - - - - - issue_response - - - - - - revoke - - - - - - revoke_response - - - - - - error_response - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 1024 - rsync://.+ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 9999 - - - - - - - - - 1024 - - - - - - -''')) - -## @var publication -## Parsed RelaxNG publication schema -publication = lxml.etree.RelaxNG(lxml.etree.fromstring(r''' - - - - - - - - 1 - - - - - - query - - - - - - - - reply - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 1024 - - - - - - - - - - - 4096 - - - - - - - - - - - 255 - [\-_A-Za-z0-9/]+ - - - - - - - - - - - - - - set - - - - - - - - - - - set - - - - - - - - - - get - - - - - - - - - - get - - - - - - - - - - - - - - - - - yes - - - - - - - - - - - - - - - - - - - - - - - - create - - - - - - - - - - - - - create - - - - - - - - - - - set - - - - - - - - - - - - - set - - - - - - - - - - - get - - - - - - - - - - - get - - - - - - - - - - - - list - - - - - - - - - - list - - - - - - - - - - - - destroy - - - - - - - - - - - destroy - - - - - - - - - - - - publish - - - - - - - - - - - - publish - - - - - - - - - - - withdraw - - - - - - - - - - - withdraw - - - - - - - - - - - - publish - - - - - - - - - - - - publish - - - - - - - - - - - withdraw - - - - - - - - - - - withdraw - - - - - - - - - - - - publish - - - - - - - - - - - - publish - - - - - - - - - - - withdraw - - - - - - - - - - - withdraw - - - - - - - - - - - - publish - - - - - - - - - - - - publish - - - - - - - - - - - withdraw - - - - - - - - - - - withdraw - - - - - - - - - - - - publish - - - - - - - - - - - - publish - - - - - - - - - - - withdraw - - - - - - - - - - - withdraw - - - - - - - - - - - 1024 - - - - - - - - - - - - - 512000 - - - - - - -''')) - -## @var myrpki -## Parsed RelaxNG myrpki schema -myrpki = lxml.etree.RelaxNG(lxml.etree.fromstring(r''' - - - - 2 - - - - 512000 - - - - - 255 - [\-_A-Za-z0-9]+ - - - - - 255 - [\-_A-Za-z0-9/]+ - - - - - 4096 - - - - - - - - 512000 - [\-,0-9]+ - - - - - 512000 - [\-,0-9/.]+ - - - - - 512000 - [\-,0-9/:a-fA-F]+ - - - - - .*Z - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - none - - - offer - - - - referral - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - confirmed - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -''')) - -## @var router_certificate -## Parsed RelaxNG router_certificate schema -router_certificate = lxml.etree.RelaxNG(lxml.etree.fromstring(r''' - - - - 1 - - - - 512000 - - - - - - - - 512000 - [0-9][\-,0-9]* - - - - - .*Z - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -''')) - diff --git a/rpkid/rpki/resource_set.py b/rpkid/rpki/resource_set.py deleted file mode 100644 index 2ec19cab..00000000 --- a/rpkid/rpki/resource_set.py +++ /dev/null @@ -1,1148 +0,0 @@ -# $Id$ -# -# Copyright (C) 2013--2014 Dragon Research Labs ("DRL") -# Portions copyright (C) 2009--2012 Internet Systems Consortium ("ISC") -# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notices and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND ARIN DISCLAIM ALL -# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED -# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL, -# ISC, OR ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR -# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS -# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, -# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION -# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -""" -Classes dealing with sets of resources. - -The basic mechanics of a resource set are the same for any of the -resources we handle (ASNs, IPv4 addresses, or IPv6 addresses), so we -can provide the same operations on any of them, even though the -underlying details vary. - -We also provide some basic set operations (union, intersection, etc). -""" - -import re -import math -import rpki.exceptions -import rpki.POW - -## @var inherit_token -# Token used to indicate inheritance in read and print syntax. - -inherit_token = "" - -re_asn_range = re.compile("^([0-9]+)-([0-9]+)$") -re_address_range = re.compile("^([0-9:.a-fA-F]+)-([0-9:.a-fA-F]+)$") -re_prefix_with_maxlen = re.compile("^([0-9:.a-fA-F]+)/([0-9]+)-([0-9]+)$") -re_prefix = re.compile("^([0-9:.a-fA-F]+)/([0-9]+)$") - -class resource_range(object): - """ - Generic resource range type. Assumes underlying type is some kind - of integer. - - This is a virtual class. You probably don't want to use this type - directly. - """ - - def __init__(self, range_min, range_max): - assert range_min.__class__ is range_max.__class__, \ - "Type mismatch, %r doesn't match %r" % (range_min.__class__, range_max.__class__) - assert range_min <= range_max, "Mis-ordered range: %s before %s" % (range_min, range_max) - self.min = range_min - self.max = range_max - - def __cmp__(self, other): - assert self.__class__ is other.__class__, \ - "Type mismatch, comparing %r with %r" % (self.__class__, other.__class__) - return cmp(self.min, other.min) or cmp(self.max, other.max) - -class resource_range_as(resource_range): - """ - Range of Autonomous System Numbers. - - Denotes a single ASN by a range whose min and max values are - identical. - """ - - ## @var datum_type - # Type of underlying data (min and max). - - datum_type = long - - def __init__(self, range_min, range_max): - resource_range.__init__(self, - long(range_min) if isinstance(range_min, int) else range_min, - long(range_max) if isinstance(range_max, int) else range_max) - - def __str__(self): - """ - Convert a resource_range_as to string format. - """ - if self.min == self.max: - return str(self.min) - else: - return str(self.min) + "-" + str(self.max) - - @classmethod - def parse_str(cls, x): - """ - Parse ASN resource range from text (eg, XML attributes). - """ - r = re_asn_range.match(x) - if r: - return cls(long(r.group(1)), long(r.group(2))) - else: - return cls(long(x), long(x)) - - @classmethod - def from_strings(cls, a, b = None): - """ - Construct ASN range from strings. - """ - if b is None: - b = a - return cls(long(a), long(b)) - -class resource_range_ip(resource_range): - """ - Range of (generic) IP addresses. - - Prefixes are converted to ranges on input, and ranges that can be - represented as prefixes are written as prefixes on output. - - This is a virtual class. You probably don't want to use it - directly. - """ - - ## @var datum_type - # Type of underlying data (min and max). - - datum_type = rpki.POW.IPAddress - - def prefixlen(self): - """ - Determine whether a resource_range_ip can be expressed as a - prefix. Returns prefix length if it can, otherwise raises - MustBePrefix exception. - """ - mask = self.min ^ self.max - if self.min & mask != 0: - raise rpki.exceptions.MustBePrefix - prefixlen = self.min.bits - while mask & 1: - prefixlen -= 1 - mask >>= 1 - if mask: - raise rpki.exceptions.MustBePrefix - return prefixlen - - @property - def can_be_prefix(self): - """ - Boolean property indicating whether this range can be expressed as - a prefix. - - This just calls .prefixlen() to do the work, so that we can keep - the logic in one place. This property is useful primarily in - context where catching an exception isn't practical. - """ - try: - self.prefixlen() - return True - except rpki.exceptions.MustBePrefix: - return False - - def __str__(self): - """ - Convert a resource_range_ip to string format. - """ - try: - return str(self.min) + "/" + str(self.prefixlen()) - except rpki.exceptions.MustBePrefix: - return str(self.min) + "-" + str(self.max) - - @classmethod - def parse_str(cls, x): - """ - Parse IP address range or prefix from text (eg, XML attributes). - """ - r = re_address_range.match(x) - if r: - return cls.from_strings(r.group(1), r.group(2)) - r = re_prefix.match(x) - if r: - a = rpki.POW.IPAddress(r.group(1)) - if cls is resource_range_ip and a.version == 4: - cls = resource_range_ipv4 - if cls is resource_range_ip and a.version == 6: - cls = resource_range_ipv6 - return cls.make_prefix(a, int(r.group(2))) - raise rpki.exceptions.BadIPResource, 'Bad IP resource "%s"' % (x) - - @classmethod - def make_prefix(cls, prefix, prefixlen): - """ - Construct a resource range corresponding to a prefix. - """ - assert isinstance(prefix, rpki.POW.IPAddress) and isinstance(prefixlen, (int, long)) - assert prefixlen >= 0 and prefixlen <= prefix.bits, "Nonsensical prefix length: %s" % prefixlen - mask = (1 << (prefix.bits - prefixlen)) - 1 - assert (prefix & mask) == 0, "Resource not in canonical form: %s/%s" % (prefix, prefixlen) - return cls(prefix, rpki.POW.IPAddress(prefix | mask)) - - def chop_into_prefixes(self, result): - """ - Chop up a resource_range_ip into ranges that can be represented as - prefixes. - """ - try: - self.prefixlen() - result.append(self) - except rpki.exceptions.MustBePrefix: - range_min = self.min - range_max = self.max - while range_max >= range_min: - bits = int(math.log(long(range_max - range_min + 1), 2)) - while True: - mask = ~(~0 << bits) - assert range_min + mask <= range_max - if range_min & mask == 0: - break - assert bits > 0 - bits -= 1 - result.append(self.make_prefix(range_min, range_min.bits - bits)) - range_min = range_min + mask + 1 - - @classmethod - def from_strings(cls, a, b = None): - """ - Construct IP address range from strings. - """ - if b is None: - b = a - a = rpki.POW.IPAddress(a) - b = rpki.POW.IPAddress(b) - if a.version != b.version: - raise TypeError - if cls is resource_range_ip: - if a.version == 4: - return resource_range_ipv4(a, b) - if a.version == 6: - return resource_range_ipv6(a, b) - elif a.version == cls.version: - return cls(a, b) - else: - raise TypeError - -class resource_range_ipv4(resource_range_ip): - """ - Range of IPv4 addresses. - """ - - version = 4 - -class resource_range_ipv6(resource_range_ip): - """ - Range of IPv6 addresses. - """ - - version = 6 - -def _rsplit(rset, that): - """ - Utility function to split a resource range into two resource ranges. - """ - - this = rset.pop(0) - - assert type(this) is type(that), "type(this) [%r] is not type(that) [%r]" % (type(this), type(that)) - - assert type(this.min) is type(that.min), "type(this.min) [%r] is not type(that.min) [%r]" % (type(this.min), type(that.min)) - assert type(this.min) is type(this.max), "type(this.min) [%r] is not type(this.max) [%r]" % (type(this.min), type(this.max)) - assert type(that.min) is type(that.max), "type(that.min) [%r] is not type(that.max) [%r]" % (type(that.min), type(that.max)) - - if this.min < that.min: - rset.insert(0, type(this)(this.min, type(that.min)(that.min - 1))) - rset.insert(1, type(this)(that.min, this.max)) - - else: - assert this.max > that.max - rset.insert(0, type(this)(this.min, that.max)) - rset.insert(1, type(this)(type(that.max)(that.max + 1), this.max)) - -class resource_set(list): - """ - Generic resource set, a list subclass containing resource ranges. - - This is a virtual class. You probably don't want to use it - directly. - """ - - ## @var inherit - # Boolean indicating whether this resource_set uses RFC 3779 inheritance. - - inherit = False - - ## @var canonical - # Whether this resource_set is currently in canonical form. - - canonical = False - - def __init__(self, ini = None, allow_overlap = False): - """ - Initialize a resource_set. - """ - list.__init__(self) - if isinstance(ini, (int, long)): - ini = str(ini) - if ini is inherit_token: - self.inherit = True - elif isinstance(ini, str) and len(ini): - self.extend(self.parse_str(s) for s in ini.split(",")) - elif isinstance(ini, list): - self.extend(ini) - elif ini is not None and ini != "": - raise ValueError("Unexpected initializer: %s" % str(ini)) - self.canonize(allow_overlap) - - def canonize(self, allow_overlap = False): - """ - Whack this resource_set into canonical form. - """ - assert not self.inherit or len(self) == 0 - if not self.canonical: - self.sort() - i = 0 - while i + 1 < len(self): - if allow_overlap and self[i].max + 1 >= self[i+1].min: - self[i] = type(self[i])(self[i].min, max(self[i].max, self[i+1].max)) - del self[i+1] - elif self[i].max + 1 == self[i+1].min: - self[i] = type(self[i])(self[i].min, self[i+1].max) - del self[i+1] - else: - i += 1 - for i in xrange(0, len(self) - 1): - if self[i].max >= self[i+1].min: - raise rpki.exceptions.ResourceOverlap("Resource overlap: %s %s" % (self[i], self[i+1])) - self.canonical = True - - def append(self, item): - """ - Wrapper around list.append() (q.v.) to reset canonical flag. - """ - list.append(self, item) - self.canonical = False - - def extend(self, item): - """ - Wrapper around list.extend() (q.v.) to reset canonical flag. - """ - list.extend(self, item) - self.canonical = False - - def __str__(self): - """ - Convert a resource_set to string format. - """ - if self.inherit: - return inherit_token - else: - return ",".join(str(x) for x in self) - - def _comm(self, other): - """ - Like comm(1), sort of. - - Returns a tuple of three resource sets: resources only in self, - resources only in other, and resources in both. Used (not very - efficiently) as the basis for most set operations on resource - sets. - """ - - assert not self.inherit - assert type(self) is type(other), "Type mismatch %r %r" % (type(self), type(other)) - set1 = type(self)(self) # clone and whack into canonical form - set2 = type(other)(other) # ditto - only1, only2, both = [], [], [] - while set1 or set2: - if set1 and (not set2 or set1[0].max < set2[0].min): - only1.append(set1.pop(0)) - elif set2 and (not set1 or set2[0].max < set1[0].min): - only2.append(set2.pop(0)) - elif set1[0].min < set2[0].min: - _rsplit(set1, set2[0]) - elif set2[0].min < set1[0].min: - _rsplit(set2, set1[0]) - elif set1[0].max < set2[0].max: - _rsplit(set2, set1[0]) - elif set2[0].max < set1[0].max: - _rsplit(set1, set2[0]) - else: - assert set1[0].min == set2[0].min and set1[0].max == set2[0].max - both.append(set1.pop(0)) - set2.pop(0) - return type(self)(only1), type(self)(only2), type(self)(both) - - def union(self, other): - """ - Set union for resource sets. - """ - - assert not self.inherit - assert type(self) is type(other), "Type mismatch: %r %r" % (type(self), type(other)) - set1 = type(self)(self) # clone and whack into canonical form - set2 = type(other)(other) # ditto - result = [] - while set1 or set2: - if set1 and (not set2 or set1[0].max < set2[0].min): - result.append(set1.pop(0)) - elif set2 and (not set1 or set2[0].max < set1[0].min): - result.append(set2.pop(0)) - else: - this = set1.pop(0) - that = set2.pop(0) - assert type(this) is type(that) - range_min = min(this.min, that.min) - range_max = max(this.max, that.max) - result.append(type(this)(range_min, range_max)) - while set1 and set1[0].max <= range_max: - assert set1[0].min >= range_min - del set1[0] - while set2 and set2[0].max <= range_max: - assert set2[0].min >= range_min - del set2[0] - return type(self)(result) - - __or__ = union - - def intersection(self, other): - """ - Set intersection for resource sets. - """ - return self._comm(other)[2] - - __and__ = intersection - - def difference(self, other): - """ - Set difference for resource sets. - """ - return self._comm(other)[0] - - __sub__ = difference - - def symmetric_difference(self, other): - """ - Set symmetric difference (XOR) for resource sets. - """ - com = self._comm(other) - return com[0] | com[1] - - __xor__ = symmetric_difference - - def contains(self, item): - """ - Set membership test for resource sets. - """ - assert not self.inherit - self.canonize() - if not self: - return False - if type(item) is type(self[0]): - range_min = item.min - range_max = item.max - else: - range_min = item - range_max = item - lo = 0 - hi = len(self) - while lo < hi: - mid = (lo + hi) / 2 - if self[mid].max < range_max: - lo = mid + 1 - else: - hi = mid - return lo < len(self) and self[lo].min <= range_min and self[lo].max >= range_max - - __contains__ = contains - - def issubset(self, other): - """ - Test whether self is a subset (possibly improper) of other. - """ - for i in self: - if not other.contains(i): - return False - return True - - __le__ = issubset - - def issuperset(self, other): - """ - Test whether self is a superset (possibly improper) of other. - """ - return other.issubset(self) - - __ge__ = issuperset - - def __lt__(self, other): - return not self.issuperset(other) - - def __gt__(self, other): - return not self.issubset(other) - - def __ne__(self, other): - """ - A set with the inherit bit set is always unequal to any other set, because - we can't know the answer here. This is also consistent with __nonzero__ - which returns True for inherit sets, and False for empty sets. - """ - return self.inherit or other.inherit or list.__ne__(self, other) - - def __eq__(self, other): - return not self.__ne__(other) - - def __nonzero__(self): - """ - Tests whether or not this set is empty. Note that sets with the inherit - bit set are considered non-empty, despite having zero length. - """ - return self.inherit or len(self) - - @classmethod - def from_sql(cls, sql, query, args = None): - """ - Create resource set from an SQL query. - - sql is an object that supports execute() and fetchall() methods - like a DB API 2.0 cursor object. - - query is an SQL query that returns a sequence of (min, max) pairs. - """ - - sql.execute(query, args) - return cls(ini = [cls.range_type(cls.range_type.datum_type(b), - cls.range_type.datum_type(e)) - for (b, e) in sql.fetchall()]) - - @classmethod - def from_django(cls, iterable): - """ - Create resource set from a Django query. - - iterable is something which returns (min, max) pairs. - """ - - return cls(ini = [cls.range_type(cls.range_type.datum_type(b), - cls.range_type.datum_type(e)) - for (b, e) in iterable]) - - @classmethod - def parse_str(cls, s): - """ - Parse resource set from text string (eg, XML attributes). This is - a backwards compatability wrapper, real functionality is now part - of the range classes. - """ - return cls.range_type.parse_str(s) - -class resource_set_as(resource_set): - """ - Autonomous System Number resource set. - """ - - ## @var range_type - # Type of range underlying this type of resource_set. - - range_type = resource_range_as - -class resource_set_ip(resource_set): - """ - (Generic) IP address resource set. - - This is a virtual class. You probably don't want to use it - directly. - """ - - def to_roa_prefix_set(self): - """ - Convert from a resource set to a ROA prefix set. - """ - prefix_ranges = [] - for r in self: - r.chop_into_prefixes(prefix_ranges) - return self.roa_prefix_set_type([ - self.roa_prefix_set_type.prefix_type(r.min, r.prefixlen()) - for r in prefix_ranges]) - -class resource_set_ipv4(resource_set_ip): - """ - IPv4 address resource set. - """ - - ## @var range_type - # Type of range underlying this type of resource_set. - - range_type = resource_range_ipv4 - -class resource_set_ipv6(resource_set_ip): - """ - IPv6 address resource set. - """ - - ## @var range_type - # Type of range underlying this type of resource_set. - - range_type = resource_range_ipv6 - -class resource_bag(object): - """ - Container to simplify passing around the usual triple of ASN, IPv4, - and IPv6 resource sets. - """ - - ## @var asn - # Set of Autonomous System Number resources. - - ## @var v4 - # Set of IPv4 resources. - - ## @var v6 - # Set of IPv6 resources. - - ## @var valid_until - # Expiration date of resources, for setting certificate notAfter field. - - def __init__(self, asn = None, v4 = None, v6 = None, valid_until = None): - self.asn = asn or resource_set_as() - self.v4 = v4 or resource_set_ipv4() - self.v6 = v6 or resource_set_ipv6() - self.valid_until = valid_until - - def oversized(self, other): - """ - True iff self is oversized with respect to other. - """ - return not self.asn.issubset(other.asn) or \ - not self.v4.issubset(other.v4) or \ - not self.v6.issubset(other.v6) - - def undersized(self, other): - """ - True iff self is undersized with respect to other. - """ - return not other.asn.issubset(self.asn) or \ - not other.v4.issubset(self.v4) or \ - not other.v6.issubset(self.v6) - - @classmethod - def from_inheritance(cls): - """ - Build a resource bag that just inherits everything from its - parent. - """ - self = cls() - self.asn = resource_set_as() - self.v4 = resource_set_ipv4() - self.v6 = resource_set_ipv6() - self.asn.inherit = True - self.v4.inherit = True - self.v6.inherit = True - return self - - @classmethod - def from_str(cls, text, allow_overlap = False): - """ - Parse a comma-separated text string into a resource_bag. Not - particularly efficient, fix that if and when it becomes an issue. - """ - asns = [] - v4s = [] - v6s = [] - for word in text.split(","): - if "." in word: - v4s.append(word) - elif ":" in word: - v6s.append(word) - else: - asns.append(word) - return cls(asn = resource_set_as(",".join(asns), allow_overlap) if asns else None, - v4 = resource_set_ipv4(",".join(v4s), allow_overlap) if v4s else None, - v6 = resource_set_ipv6(",".join(v6s), allow_overlap) if v6s else None) - - @classmethod - def from_POW_rfc3779(cls, resources): - """ - Build a resource_bag from data returned by - rpki.POW.X509.getRFC3779(). - - The conversion to long for v4 and v6 is (intended to be) - temporary: in the long run, we should be using rpki.POW.IPAddress - rather than long here. - """ - asn = inherit_token if resources[0] == "inherit" else [resource_range_as( r[0], r[1]) for r in resources[0] or ()] - v4 = inherit_token if resources[1] == "inherit" else [resource_range_ipv4(r[0], r[1]) for r in resources[1] or ()] - v6 = inherit_token if resources[2] == "inherit" else [resource_range_ipv6(r[0], r[1]) for r in resources[2] or ()] - return cls(resource_set_as(asn) if asn else None, - resource_set_ipv4(v4) if v4 else None, - resource_set_ipv6(v6) if v6 else None) - - def empty(self): - """ - True iff all resource sets in this bag are empty. - """ - return not self.asn and not self.v4 and not self.v6 - - def __nonzero__(self): - return not self.empty() - - def __eq__(self, other): - return self.asn == other.asn and \ - self.v4 == other.v4 and \ - self.v6 == other.v6 and \ - self.valid_until == other.valid_until - - def __ne__(self, other): - return not (self == other) - - def intersection(self, other): - """ - Compute intersection with another resource_bag. valid_until - attribute (if any) inherits from self. - """ - return self.__class__(self.asn & other.asn, - self.v4 & other.v4, - self.v6 & other.v6, - self.valid_until) - - __and__ = intersection - - def union(self, other): - """ - Compute union with another resource_bag. valid_until attribute - (if any) inherits from self. - """ - return self.__class__(self.asn | other.asn, - self.v4 | other.v4, - self.v6 | other.v6, - self.valid_until) - - __or__ = union - - def difference(self, other): - """ - Compute difference against another resource_bag. valid_until - attribute (if any) inherits from self - """ - return self.__class__(self.asn - other.asn, - self.v4 - other.v4, - self.v6 - other.v6, - self.valid_until) - - __sub__ = difference - - def symmetric_difference(self, other): - """ - Compute symmetric difference against another resource_bag. - valid_until attribute (if any) inherits from self - """ - return self.__class__(self.asn ^ other.asn, - self.v4 ^ other.v4, - self.v6 ^ other.v6, - self.valid_until) - - __xor__ = symmetric_difference - - def __str__(self): - s = "" - if self.asn: - s += "ASN: %s" % self.asn - if self.v4: - if s: - s += ", " - s += "V4: %s" % self.v4 - if self.v6: - if s: - s += ", " - s += "V6: %s" % self.v6 - return s - - def __iter__(self): - for r in self.asn: - yield r - for r in self.v4: - yield r - for r in self.v6: - yield r - -# Sadly, there are enough differences between RFC 3779 and the data -# structures in the latest proposed ROA format that we can't just use -# the RFC 3779 code for ROAs. So we need a separate set of classes -# that are similar in concept but different in detail, with conversion -# functions. Such is life. I suppose it might be possible to do this -# with multiple inheritance, but that's probably more bother than it's -# worth. - -class roa_prefix(object): - """ - ROA prefix. This is similar to the resource_range_ip class, but - differs in that it only represents prefixes, never ranges, and - includes the maximum prefix length as an additional value. - - This is a virtual class, you probably don't want to use it directly. - """ - - ## @var prefix - # The prefix itself, an IP address with bits beyond the prefix - # length zeroed. - - ## @var prefixlen - # (Minimum) prefix length. - - ## @var max_prefixlen - # Maxmimum prefix length. - - def __init__(self, prefix, prefixlen, max_prefixlen = None): - """ - Initialize a ROA prefix. max_prefixlen is optional and defaults - to prefixlen. max_prefixlen must not be smaller than prefixlen. - """ - if max_prefixlen is None: - max_prefixlen = prefixlen - assert max_prefixlen >= prefixlen, "Bad max_prefixlen: %d must not be shorter than %d" % (max_prefixlen, prefixlen) - self.prefix = prefix - self.prefixlen = prefixlen - self.max_prefixlen = max_prefixlen - - def __cmp__(self, other): - """ - Compare two ROA prefix objects. Comparision is based on prefix, - prefixlen, and max_prefixlen, in that order. - """ - assert self.__class__ is other.__class__ - return (cmp(self.prefix, other.prefix) or - cmp(self.prefixlen, other.prefixlen) or - cmp(self.max_prefixlen, other.max_prefixlen)) - - def __str__(self): - """ - Convert a ROA prefix to string format. - """ - if self.prefixlen == self.max_prefixlen: - return str(self.prefix) + "/" + str(self.prefixlen) - else: - return str(self.prefix) + "/" + str(self.prefixlen) + "-" + str(self.max_prefixlen) - - def to_resource_range(self): - """ - Convert this ROA prefix to the equivilent resource_range_ip - object. This is an irreversable transformation because it loses - the max_prefixlen attribute, nothing we can do about that. - """ - return self.range_type.make_prefix(self.prefix, self.prefixlen) - - def min(self): - """ - Return lowest address covered by prefix. - """ - return self.prefix - - def max(self): - """ - Return highest address covered by prefix. - """ - return self.prefix | ((1 << (self.prefix.bits - self.prefixlen)) - 1) - - def to_POW_roa_tuple(self): - """ - Convert a resource_range_ip to rpki.POW.ROA.setPrefixes() format. - """ - return self.prefix, self.prefixlen, self.max_prefixlen - - @classmethod - def parse_str(cls, x): - """ - Parse ROA prefix from text (eg, an XML attribute). - """ - r = re_prefix_with_maxlen.match(x) - if r: - return cls(rpki.POW.IPAddress(r.group(1)), int(r.group(2)), int(r.group(3))) - r = re_prefix.match(x) - if r: - return cls(rpki.POW.IPAddress(r.group(1)), int(r.group(2))) - raise rpki.exceptions.BadROAPrefix, 'Bad ROA prefix "%s"' % (x) - -class roa_prefix_ipv4(roa_prefix): - """ - IPv4 ROA prefix. - """ - - ## @var range_type - # Type of corresponding resource_range_ip. - - range_type = resource_range_ipv4 - -class roa_prefix_ipv6(roa_prefix): - """ - IPv6 ROA prefix. - """ - - ## @var range_type - # Type of corresponding resource_range_ip. - - range_type = resource_range_ipv6 - -class roa_prefix_set(list): - """ - Set of ROA prefixes, analogous to the resource_set_ip class. - """ - - def __init__(self, ini = None): - """ - Initialize a ROA prefix set. - """ - list.__init__(self) - if isinstance(ini, str) and len(ini): - self.extend(self.parse_str(s) for s in ini.split(",")) - elif isinstance(ini, (list, tuple)): - self.extend(ini) - else: - assert ini is None or ini == "", "Unexpected initializer: %s" % str(ini) - self.sort() - - def __str__(self): - """ - Convert a ROA prefix set to string format. - """ - return ",".join(str(x) for x in self) - - @classmethod - def parse_str(cls, s): - """ - Parse ROA prefix from text (eg, an XML attribute). - This method is a backwards compatability shim. - """ - return cls.prefix_type.parse_str(s) - - def to_resource_set(self): - """ - Convert a ROA prefix set to a resource set. This is an - irreversable transformation. We have to compute a union here - because ROA prefix sets can include overlaps, while RFC 3779 - resource sets cannot. This is ugly, and there is almost certainly - a more efficient way to do this, but start by getting the output - right before worrying about making it fast or pretty. - """ - r = self.resource_set_type() - s = self.resource_set_type() - s.append(None) - for p in self: - s[0] = p.to_resource_range() - r |= s - return r - - @classmethod - def from_sql(cls, sql, query, args = None): - """ - Create ROA prefix set from an SQL query. - - sql is an object that supports execute() and fetchall() methods - like a DB API 2.0 cursor object. - - query is an SQL query that returns a sequence of (prefix, - prefixlen, max_prefixlen) triples. - """ - - sql.execute(query, args) - return cls([cls.prefix_type(rpki.POW.IPAddress(x), int(y), int(z)) - for (x, y, z) in sql.fetchall()]) - - @classmethod - def from_django(cls, iterable): - """ - Create ROA prefix set from a Django query. - - iterable is something which returns (prefix, prefixlen, - max_prefixlen) triples. - """ - - return cls([cls.prefix_type(rpki.POW.IPAddress(x), int(y), int(z)) - for (x, y, z) in iterable]) - - def to_POW_roa_tuple(self): - """ - Convert ROA prefix set to form used by rpki.POW.ROA.setPrefixes(). - """ - if self: - return tuple(a.to_POW_roa_tuple() for a in self) - else: - return None - - -class roa_prefix_set_ipv4(roa_prefix_set): - """ - Set of IPv4 ROA prefixes. - """ - - ## @var prefix_type - # Type of underlying roa_prefix. - - prefix_type = roa_prefix_ipv4 - - ## @var resource_set_type - # Type of corresponding resource_set_ip class. - - resource_set_type = resource_set_ipv4 - -# Fix back link from resource_set to roa_prefix -resource_set_ipv4.roa_prefix_set_type = roa_prefix_set_ipv4 - -class roa_prefix_set_ipv6(roa_prefix_set): - """ - Set of IPv6 ROA prefixes. - """ - - ## @var prefix_type - # Type of underlying roa_prefix. - - prefix_type = roa_prefix_ipv6 - - ## @var resource_set_type - # Type of corresponding resource_set_ip class. - - resource_set_type = resource_set_ipv6 - -# Fix back link from resource_set to roa_prefix -resource_set_ipv6.roa_prefix_set_type = roa_prefix_set_ipv6 - -class roa_prefix_bag(object): - """ - Container to simplify passing around the combination of an IPv4 ROA - prefix set and an IPv6 ROA prefix set. - """ - - ## @var v4 - # Set of IPv4 prefixes. - - ## @var v6 - # Set of IPv6 prefixes. - - def __init__(self, v4 = None, v6 = None): - self.v4 = v4 or roa_prefix_set_ipv4() - self.v6 = v6 or roa_prefix_set_ipv6() - - def __eq__(self, other): - return self.v4 == other.v4 and self.v6 == other.v6 - - def __ne__(self, other): - return not (self == other) - - -# Test suite for set operations. - -if __name__ == "__main__": - - def testprefix(v): - return " (%s)" % v.to_roa_prefix_set() if isinstance(v, resource_set_ip) else "" - - def test1(t, s1, s2): - if isinstance(s1, str) and isinstance(s2, str): - print "x: ", s1 - print "y: ", s2 - r1 = t(s1) - r2 = t(s2) - print "x: ", r1, testprefix(r1) - print "y: ", r2, testprefix(r2) - v1 = r1._comm(r2) - v2 = r2._comm(r1) - assert v1[0] == v2[1] and v1[1] == v2[0] and v1[2] == v2[2] - for i in r1: assert i in r1 and i.min in r1 and i.max in r1 - for i in r2: assert i in r2 and i.min in r2 and i.max in r2 - for i in v1[0]: assert i in r1 and i not in r2 - for i in v1[1]: assert i not in r1 and i in r2 - for i in v1[2]: assert i in r1 and i in r2 - v1 = r1 | r2 - v2 = r2 | r1 - assert v1 == v2 - print "x|y:", v1, testprefix(v1) - v1 = r1 - r2 - v2 = r2 - r1 - print "x-y:", v1, testprefix(v1) - print "y-x:", v2, testprefix(v2) - v1 = r1 ^ r2 - v2 = r2 ^ r1 - assert v1 == v2 - print "x^y:", v1, testprefix(v1) - v1 = r1 & r2 - v2 = r2 & r1 - assert v1 == v2 - print "x&y:", v1, testprefix(v1) - - def test2(t, s1, s2): - print "x: ", s1 - print "y: ", s2 - r1 = t(s1) - r2 = t(s2) - print "x: ", r1 - print "y: ", r2 - print "x>y:", (r1 > r2) - print "xy:", (r1 > r2) - print "x \ - os.stat(os.path.join(self.rpki_root_dir, self.rpki_subject_cert)).st_mtime - - def get_subject_cert(self): - filename = os.path.join(self.rpki_root_dir, self.rpki_subject_cert) - try: - x = rpki.x509.X509(Auto_file = filename) - rpki.log.debug("Read subject cert %s" % filename) - return x - except IOError: - return None - - def set_subject_cert(self, cert): - filename = os.path.join(self.rpki_root_dir, self.rpki_subject_cert) - rpki.log.debug("Writing subject cert %s, SKI %s" % (filename, cert.hSKI())) - f = open(filename, "wb") - f.write(cert.get_DER()) - f.close() - - def del_subject_cert(self): - filename = os.path.join(self.rpki_root_dir, self.rpki_subject_cert) - rpki.log.debug("Deleting subject cert %s" % filename) - os.remove(filename) - - def get_subject_pkcs10(self): - try: - x = rpki.x509.PKCS10(Auto_file = self.rpki_subject_pkcs10) - rpki.log.debug("Read subject PKCS #10 %s" % self.rpki_subject_pkcs10) - return x - except IOError: - return None - - def set_subject_pkcs10(self, pkcs10): - rpki.log.debug("Writing subject PKCS #10 %s" % self.rpki_subject_pkcs10) - f = open(self.rpki_subject_pkcs10, "wb") - f.write(pkcs10.get_DER()) - f.close() - - def del_subject_pkcs10(self): - rpki.log.debug("Deleting subject PKCS #10 %s" % self.rpki_subject_pkcs10) - try: - os.remove(self.rpki_subject_pkcs10) - except OSError: - pass - - def issue_subject_cert_maybe(self, new_pkcs10): - now = rpki.sundial.now() - subject_cert = self.get_subject_cert() - old_pkcs10 = self.get_subject_pkcs10() - if new_pkcs10 is not None and new_pkcs10 != old_pkcs10: - self.set_subject_pkcs10(new_pkcs10) - if subject_cert is not None: - rpki.log.debug("PKCS #10 changed, regenerating subject certificate") - self.revoke_subject_cert(now) - subject_cert = None - if subject_cert is not None and subject_cert.getNotAfter() <= now + self.rpki_subject_regen: - rpki.log.debug("Subject certificate has reached expiration threshold, regenerating") - self.revoke_subject_cert(now) - subject_cert = None - if subject_cert is not None and self.root_newer_than_subject(): - rpki.log.debug("Root certificate has changed, regenerating subject") - self.revoke_subject_cert(now) - subject_cert = None - self.get_root_cert() - if subject_cert is not None: - return subject_cert - pkcs10 = old_pkcs10 if new_pkcs10 is None else new_pkcs10 - if pkcs10 is None: - rpki.log.debug("No PKCS #10 request, can't generate subject certificate yet") - return None - resources = self.rpki_root_cert.get_3779resources() - notAfter = now + self.rpki_subject_lifetime - rpki.log.info("Generating subject cert %s with resources %s, expires %s" % ( - self.rpki_base_uri + self.rpki_subject_cert, resources, notAfter)) - req_key = pkcs10.getPublicKey() - req_sia = pkcs10.get_SIA() - self.next_serial_number() - subject_cert = self.rpki_root_cert.issue( - keypair = self.rpki_root_key, - subject_key = req_key, - serial = self.serial_number, - sia = req_sia, - aia = self.rpki_root_cert_uri, - crldp = self.rpki_base_uri + self.rpki_root_crl, - resources = resources, - notBefore = now, - notAfter = notAfter) - self.set_subject_cert(subject_cert) - self.generate_crl_and_manifest(now) - return subject_cert - - def generate_crl_and_manifest(self, now): - subject_cert = self.get_subject_cert() - self.next_serial_number() - self.next_crl_number() - while self.revoked and self.revoked[0][1] + 2 * self.rpki_subject_regen < now: - del self.revoked[0] - crl = rpki.x509.CRL.generate( - keypair = self.rpki_root_key, - issuer = self.rpki_root_cert, - serial = self.crl_number, - thisUpdate = now, - nextUpdate = now + self.rpki_subject_regen, - revokedCertificates = self.revoked) - rpki.log.debug("Writing CRL %s" % os.path.join(self.rpki_root_dir, self.rpki_root_crl)) - f = open(os.path.join(self.rpki_root_dir, self.rpki_root_crl), "wb") - f.write(crl.get_DER()) - f.close() - manifest_content = [(self.rpki_root_crl, crl)] - if subject_cert is not None: - manifest_content.append((self.rpki_subject_cert, subject_cert)) - manifest_resources = rpki.resource_set.resource_bag.from_inheritance() - manifest_keypair = rpki.x509.RSA.generate() - manifest_cert = self.rpki_root_cert.issue( - keypair = self.rpki_root_key, - subject_key = manifest_keypair.get_public(), - serial = self.serial_number, - sia = (None, None, self.rpki_base_uri + self.rpki_root_manifest), - aia = self.rpki_root_cert_uri, - crldp = self.rpki_base_uri + self.rpki_root_crl, - resources = manifest_resources, - notBefore = now, - notAfter = now + self.rpki_subject_lifetime, - is_ca = False) - manifest = rpki.x509.SignedManifest.build( - serial = self.crl_number, - thisUpdate = now, - nextUpdate = now + self.rpki_subject_regen, - names_and_objs = manifest_content, - keypair = manifest_keypair, - certs = manifest_cert) - rpki.log.debug("Writing manifest %s" % os.path.join(self.rpki_root_dir, self.rpki_root_manifest)) - f = open(os.path.join(self.rpki_root_dir, self.rpki_root_manifest), "wb") - f.write(manifest.get_DER()) - f.close() - - def revoke_subject_cert(self, now): - self.revoked.append((self.get_subject_cert().getSerial(), now)) - - def compose_response(self, r_msg, pkcs10 = None): - subject_cert = self.issue_subject_cert_maybe(pkcs10) - rc = rpki.up_down.class_elt() - rc.class_name = self.rpki_class_name - rc.cert_url = rpki.up_down.multi_uri(self.rpki_root_cert_uri) - rc.from_resource_bag(self.rpki_root_cert.get_3779resources()) - rc.issuer = self.rpki_root_cert - r_msg.payload.classes.append(rc) - if subject_cert is not None: - rc.certs.append(rpki.up_down.certificate_elt()) - rc.certs[0].cert_url = rpki.up_down.multi_uri(self.rpki_base_uri + self.rpki_subject_cert) - rc.certs[0].cert = subject_cert - - def up_down_handler(self, query, path, cb): - try: - q_cms = cms_msg(DER = query) - q_msg = q_cms.unwrap((self.bpki_ta, self.child_bpki_cert)) - self.cms_timestamp = q_cms.check_replay(self.cms_timestamp, path) - except (rpki.async.ExitNow, SystemExit): - raise - except Exception, e: - rpki.log.traceback() - return cb(400, reason = "Could not process PDU: %s" % e) - - def done(r_msg): - cb(200, body = cms_msg().wrap( - r_msg, self.rootd_bpki_key, self.rootd_bpki_cert, - self.rootd_bpki_crl if self.include_bpki_crl else None)) - - try: - q_msg.serve_top_level(None, done) - except (rpki.async.ExitNow, SystemExit): - raise - except Exception, e: - rpki.log.traceback() - try: - done(q_msg.serve_error(e)) - except (rpki.async.ExitNow, SystemExit): - raise - except Exception, e: - rpki.log.traceback() - cb(500, reason = "Could not process PDU: %s" % e) - - - def next_crl_number(self): - if self.crl_number is None: - try: - crl = rpki.x509.CRL(DER_file = os.path.join(self.rpki_root_dir, self.rpki_root_crl)) - self.crl_number = crl.getCRLNumber() - except: # pylint: disable=W0702 - self.crl_number = 0 - self.crl_number += 1 - return self.crl_number - - - def next_serial_number(self): - if self.serial_number is None: - subject_cert = self.get_subject_cert() - if subject_cert is not None: - self.serial_number = subject_cert.getSerial() + 1 - else: - self.serial_number = 0 - self.serial_number += 1 - return self.serial_number - - - def __init__(self): - - global rootd - rootd = self # Gross, but simpler than what we'd have to do otherwise - - self.rpki_root_cert = None - self.serial_number = None - self.crl_number = None - self.revoked = [] - self.cms_timestamp = None - - os.environ["TZ"] = "UTC" - time.tzset() - - parser = argparse.ArgumentParser(description = __doc__) - parser.add_argument("-c", "--config", - help = "override default location of configuration file") - parser.add_argument("-d", "--debug", action = "store_true", - help = "enable debugging mode") - parser.add_argument("-f", "--foreground", action = "store_true", - help = "do not daemonize") - parser.add_argument("--pidfile", - help = "override default location of pid file") - args = parser.parse_args() - - rpki.log.init("rootd", use_syslog = not args.debug) - - self.cfg = rpki.config.parser(args.config, "rootd") - self.cfg.set_global_flags() - - if not args.foreground and not args.debug: - rpki.daemonize.daemon(pidfile = args.pidfile) - - self.bpki_ta = rpki.x509.X509(Auto_update = self.cfg.get("bpki-ta")) - self.rootd_bpki_key = rpki.x509.RSA( Auto_update = self.cfg.get("rootd-bpki-key")) - self.rootd_bpki_cert = rpki.x509.X509(Auto_update = self.cfg.get("rootd-bpki-cert")) - self.rootd_bpki_crl = rpki.x509.CRL( Auto_update = self.cfg.get("rootd-bpki-crl")) - self.child_bpki_cert = rpki.x509.X509(Auto_update = self.cfg.get("child-bpki-cert")) - - self.http_server_host = self.cfg.get("server-host", "") - self.http_server_port = self.cfg.getint("server-port") - - self.rpki_class_name = self.cfg.get("rpki-class-name", "wombat") - - self.rpki_root_dir = self.cfg.get("rpki-root-dir") - self.rpki_base_uri = self.cfg.get("rpki-base-uri", "rsync://" + self.rpki_class_name + ".invalid/") - - self.rpki_root_key = rpki.x509.RSA(Auto_update = self.cfg.get("rpki-root-key")) - self.rpki_root_cert_file = self.cfg.get("rpki-root-cert") - self.rpki_root_cert_uri = self.cfg.get("rpki-root-cert-uri", self.rpki_base_uri + "root.cer") - - self.rpki_root_manifest = self.cfg.get("rpki-root-manifest", "root.mft") - self.rpki_root_crl = self.cfg.get("rpki-root-crl", "root.crl") - self.rpki_subject_cert = self.cfg.get("rpki-subject-cert", "child.cer") - self.rpki_subject_pkcs10 = self.cfg.get("rpki-subject-pkcs10", "child.pkcs10") - - self.rpki_subject_lifetime = rpki.sundial.timedelta.parse(self.cfg.get("rpki-subject-lifetime", "8w")) - self.rpki_subject_regen = rpki.sundial.timedelta.parse(self.cfg.get("rpki-subject-regen", self.rpki_subject_lifetime.convert_to_seconds() / 2)) - - self.include_bpki_crl = self.cfg.getboolean("include-bpki-crl", False) - - rpki.http.server(host = self.http_server_host, - port = self.http_server_port, - handlers = self.up_down_handler) diff --git a/rpkid/rpki/rpkic.py b/rpkid/rpki/rpkic.py deleted file mode 100644 index d5339f5b..00000000 --- a/rpkid/rpki/rpkic.py +++ /dev/null @@ -1,877 +0,0 @@ -# $Id$ -# -# Copyright (C) 2014 Dragon Research Labs ("DRL") -# Portions copyright (C) 2009--2013 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notices and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND DRL AND ISC DISCLAIM ALL -# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED -# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL OR -# ISC BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL -# DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA -# OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER -# TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -""" -Command line configuration and control tool for rpkid et al. - -Type "help" at the inernal prompt, or run the program with the --help option for -an overview of the available commands; type "help foo" for (more) detailed help -on the "foo" command. -""" - -# NB: As of this writing, I'm trying really hard to avoid having this -# program depend on a Django settings.py file. This may prove to be a -# waste of time in the long run, but for for now, this means that one -# has to be careful about exactly how and when one imports Django -# modules, or anything that imports Django modules. Bottom line is -# that we don't import such modules until we need them. - -import os -import argparse -import sys -import time -import rpki.config -import rpki.sundial -import rpki.log -import rpki.http -import rpki.resource_set -import rpki.relaxng -import rpki.exceptions -import rpki.left_right -import rpki.x509 -import rpki.async -import rpki.version - -from rpki.cli import Cmd, BadCommandSyntax, parsecmd, cmdarg - -class BadPrefixSyntax(Exception): "Bad prefix syntax." -class CouldntTalkToDaemon(Exception): "Couldn't talk to daemon." -class BadXMLMessage(Exception): "Bad XML message." -class PastExpiration(Exception): "Expiration date has already passed." -class CantRunRootd(Exception): "Can't run rootd." - -module_doc = __doc__ - -class main(Cmd): - - prompt = "rpkic> " - - completedefault = Cmd.filename_complete - - # Top-level argparser, for stuff that one might want when starting - # up the interactive command loop. Not sure -i belongs here, but - # it's harmless so leave it here for the moment. - - top_argparser = argparse.ArgumentParser(add_help = False) - top_argparser.add_argument("-c", "--config", - help = "override default location of configuration file") - top_argparser.add_argument("-i", "--identity", "--handle", - help = "set initial entity handdle") - top_argparser.add_argument("--profile", - help = "enable profiling, saving data to PROFILE") - - # Argparser for non-interactive commands (no command loop). - - full_argparser = argparse.ArgumentParser(parents = [top_argparser], - description = module_doc) - argsubparsers = full_argparser.add_subparsers(title = "Commands", metavar = "") - - def __init__(self): - os.environ["TZ"] = "UTC" - time.tzset() - - # Try parsing just the arguments that make sense if we're - # going to be running an interactive command loop. If that - # parses everything, we're interactive, otherwise, it's either - # a non-interactive command or a parse error, so we let the full - # parser sort that out for us. - - args, argv = self.top_argparser.parse_known_args() - self.interactive = not argv - if not self.interactive: - args = self.full_argparser.parse_args() - - self.cfg_file = args.config - self.handle = args.identity - - if args.profile: - import cProfile - prof = cProfile.Profile() - try: - prof.runcall(self.main, args) - finally: - prof.dump_stats(args.profile) - print "Dumped profile data to %s" % args.profile - else: - self.main(args) - - def main(self, args): - rpki.log.init("rpkic", use_syslog = False) - self.read_config() - if self.interactive: - Cmd.__init__(self) - else: - args.func(self, args) - - def read_config(self): - global rpki # pylint: disable=W0602 - - try: - cfg = rpki.config.parser(self.cfg_file, "myrpki") - cfg.set_global_flags() - except IOError, e: - sys.exit("%s: %s" % (e.strerror, e.filename)) - - self.histfile = cfg.get("history_file", os.path.expanduser("~/.rpkic_history")) - self.autosync = cfg.getboolean("autosync", True, section = "rpkic") - - from django.conf import settings - - settings.configure( - DATABASES = { "default" : { - "ENGINE" : "django.db.backends.mysql", - "NAME" : cfg.get("sql-database", section = "irdbd"), - "USER" : cfg.get("sql-username", section = "irdbd"), - "PASSWORD" : cfg.get("sql-password", section = "irdbd"), - "HOST" : "", - "PORT" : "", - "OPTIONS" : { "init_command": "SET storage_engine=INNODB" }}}, - INSTALLED_APPS = ("rpki.irdb",), - ) - - import rpki.irdb # pylint: disable=W0621 - - try: - rpki.irdb.models.ca_certificate_lifetime = rpki.sundial.timedelta.parse( - cfg.get("bpki_ca_certificate_lifetime", section = "rpkic")) - except rpki.config.ConfigParser.Error: - pass - - try: - rpki.irdb.models.ee_certificate_lifetime = rpki.sundial.timedelta.parse( - cfg.get("bpki_ee_certificate_lifetime", section = "rpkic")) - except rpki.config.ConfigParser.Error: - pass - - try: - rpki.irdb.models.crl_interval = rpki.sundial.timedelta.parse( - cfg.get("bpki_crl_interval", section = "rpkic")) - except rpki.config.ConfigParser.Error: - pass - - import django.core.management - django.core.management.call_command("syncdb", verbosity = 0, load_initial_data = False) - - self.zoo = rpki.irdb.Zookeeper(cfg = cfg, handle = self.handle, logstream = sys.stdout) - - - def do_help(self, arg): - """ - List available commands with "help" or detailed help with "help cmd". - """ - - argv = arg.split() - - if not argv: - #return self.full_argparser.print_help() - return self.print_topics( - self.doc_header, - sorted(set(name[3:] for name in self.get_names() - if name.startswith("do_") - and getattr(self, name).__doc__)), - 15, 80) - - try: - return getattr(self, "help_" + argv[0])() - except AttributeError: - pass - - func = getattr(self, "do_" + argv[0], None) - - try: - return func.argparser.print_help() - except AttributeError: - pass - - try: - return self.stdout.write(func.__doc__ + "\n") - except AttributeError: - pass - - self.stdout.write((self.nohelp + "\n") % arg) - - - def irdb_handle_complete(self, manager, text, line, begidx, endidx): - return [obj.handle for obj in manager.all() if obj.handle and obj.handle.startswith(text)] - - - @parsecmd(argsubparsers, - cmdarg("handle", help = "new handle")) - def do_select_identity(self, args): - """ - Select an identity handle for use with later commands. - """ - - self.zoo.reset_identity(args.handle) - - def complete_select_identity(self, *args): - return self.irdb_handle_complete(rpki.irdb.ResourceHolderCA.objects, *args) - - - @parsecmd(argsubparsers) - def do_initialize(self, args): - """ - Initialize an RPKI installation. DEPRECATED. - - This command reads the configuration file, creates the BPKI and - EntityDB directories, generates the initial BPKI certificates, and - creates an XML file describing the resource-holding aspect of this - RPKI installation. - """ - - rootd_case = self.zoo.run_rootd and self.zoo.handle == self.zoo.cfg.get("handle") - - r = self.zoo.initialize() - r.save("%s.identity.xml" % self.zoo.handle, - None if rootd_case else sys.stdout) - - if rootd_case: - r = self.zoo.configure_rootd() - if r is not None: - r.save("%s.%s.repository-request.xml" % (self.zoo.handle, self.zoo.handle), sys.stdout) - - self.zoo.write_bpki_files() - - - @parsecmd(argsubparsers, - cmdarg("handle", help = "handle of entity to create")) - def do_create_identity(self, args): - """ - Create a new resource-holding entity. - - Returns XML file describing the new resource holder. - - This command is idempotent: calling it for a resource holder which - already exists returns the existing identity. - """ - - self.zoo.reset_identity(args.handle) - - r = self.zoo.initialize_resource_bpki() - r.save("%s.identity.xml" % self.zoo.handle, sys.stdout) - - - @parsecmd(argsubparsers) - def do_initialize_server_bpki(self, args): - """ - Initialize server BPKI portion of an RPKI installation. - - Reads server configuration from configuration file and creates the - server BPKI objects needed to start daemons. - """ - - self.zoo.initialize_server_bpki() - self.zoo.write_bpki_files() - - - @parsecmd(argsubparsers) - def do_update_bpki(self, args): - """ - Update BPKI certificates. Assumes an existing RPKI installation. - - Basic plan here is to reissue all BPKI certificates we can, right - now. In the long run we might want to be more clever about only - touching ones that need maintenance, but this will do for a start. - - We also reissue CRLs for all CAs. - - Most likely this should be run under cron. - """ - - self.zoo.update_bpki() - self.zoo.write_bpki_files() - try: - self.zoo.synchronize_bpki() - except Exception, e: - print "Couldn't push updated BPKI material into daemons: %s" % e - - - @parsecmd(argsubparsers, - cmdarg("--child_handle", help = "override default handle for new child"), - cmdarg("--valid_until", help = "override default validity interval"), - cmdarg("child_xml", help = "XML file containing child's identity")) - def do_configure_child(self, args): - """ - Configure a new child of this RPKI entity. - - This command extracts the child's data from an XML input file, - cross-certifies the child's resource-holding BPKI certificate, and - generates an XML output file describing the relationship between - the child and this parent, including this parent's BPKI data and - up-down protocol service URI. - """ - - r, child_handle = self.zoo.configure_child(args.child_xml, args.child_handle, args.valid_until) - r.save("%s.%s.parent-response.xml" % (self.zoo.handle, child_handle), sys.stdout) - self.zoo.synchronize_ca() - - - @parsecmd(argsubparsers, - cmdarg("child_handle", help = "handle of child to delete")) - def do_delete_child(self, args): - """ - Delete a child of this RPKI entity. - """ - - try: - self.zoo.delete_child(args.child_handle) - self.zoo.synchronize_ca() - except rpki.irdb.ResourceHolderCA.DoesNotExist: - print "No such resource holder \"%s\"" % self.zoo.handle - except rpki.irdb.Child.DoesNotExist: - print "No such child \"%s\"" % args.child_handle - - def complete_delete_child(self, *args): - return self.irdb_handle_complete(self.zoo.resource_ca.children, *args) - - - @parsecmd(argsubparsers, - cmdarg("--parent_handle", help = "override default handle for new parent"), - cmdarg("parent_xml", help = "XML file containing parent's response")) - def do_configure_parent(self, args): - """ - Configure a new parent of this RPKI entity. - - This command reads the parent's response XML, extracts the - parent's BPKI and service URI information, cross-certifies the - parent's BPKI data into this entity's BPKI, and checks for offers - or referrals of publication service. If a publication offer or - referral is present, we generate a request-for-service message to - that repository, in case the user wants to avail herself of the - referral or offer. - - We do NOT attempt automatic synchronization with rpkid at the - completion of this command, because synchronization at this point - will usually fail due to the repository not being set up yet. If - you know what you are doing and for some reason really want to - synchronize here, run the synchronize command yourself. - """ - - r, parent_handle = self.zoo.configure_parent(args.parent_xml, args.parent_handle) - r.save("%s.%s.repository-request.xml" % (self.zoo.handle, parent_handle), sys.stdout) - - - @parsecmd(argsubparsers, - cmdarg("parent_handle", help = "handle of parent to delete")) - def do_delete_parent(self, args): - """ - Delete a parent of this RPKI entity. - """ - - try: - self.zoo.delete_parent(args.parent_handle) - self.zoo.synchronize_ca() - except rpki.irdb.ResourceHolderCA.DoesNotExist: - print "No such resource holder \"%s\"" % self.zoo.handle - except rpki.irdb.Parent.DoesNotExist: - print "No such parent \"%s\"" % args.parent_handle - - def complete_delete_parent(self, *args): - return self.irdb_handle_complete(self.zoo.resource_ca.parents, *args) - - - @parsecmd(argsubparsers) - def do_configure_root(self, args): - """ - Configure the current resource holding identity as a root. - - This configures rpkid to talk to rootd as (one of) its parent(s). - Returns repository request XML file like configure_parent does. - """ - - r = self.zoo.configure_rootd() - if r is not None: - r.save("%s.%s.repository-request.xml" % (self.zoo.handle, self.zoo.handle), sys.stdout) - self.zoo.write_bpki_files() - - - @parsecmd(argsubparsers) - def do_delete_root(self, args): - """ - Delete local RPKI root as parent of the current entity. - - This tells the current rpkid identity () to stop talking to - rootd. - """ - - try: - self.zoo.delete_rootd() - self.zoo.synchronize_ca() - except rpki.irdb.ResourceHolderCA.DoesNotExist: - print "No such resource holder \"%s\"" % self.zoo.handle - except rpki.irdb.Rootd.DoesNotExist: - print "No associated rootd" - - - @parsecmd(argsubparsers, - cmdarg("--flat", help = "use flat publication scheme", action = "store_true"), - cmdarg("--sia_base", help = "override SIA base value"), - cmdarg("client_xml", help = "XML file containing client request")) - def do_configure_publication_client(self, args): - """ - Configure publication server to know about a new client. - - This command reads the client's request for service, - cross-certifies the client's BPKI data, and generates a response - message containing the repository's BPKI data and service URI. - """ - - r, client_handle = self.zoo.configure_publication_client(args.client_xml, args.sia_base, args.flat) - r.save("%s.repository-response.xml" % client_handle.replace("/", "."), sys.stdout) - try: - self.zoo.synchronize_pubd() - except rpki.irdb.Repository.DoesNotExist: - pass - - - @parsecmd(argsubparsers, - cmdarg("client_handle", help = "handle of client to delete")) - def do_delete_publication_client(self, args): - """ - Delete a publication client of this RPKI entity. - """ - - try: - self.zoo.delete_publication_client(args.client_handle) - self.zoo.synchronize_pubd() - except rpki.irdb.ResourceHolderCA.DoesNotExist: - print "No such resource holder \"%s\"" % self.zoo.handle - except rpki.irdb.Client.DoesNotExist: - print "No such client \"%s\"" % args.client_handle - - def complete_delete_publication_client(self, *args): - return self.irdb_handle_complete(self.zoo.server_ca.clients, *args) - - - @parsecmd(argsubparsers, - cmdarg("--parent_handle", help = "override default parent handle"), - cmdarg("repository_xml", help = "XML file containing repository response")) - def do_configure_repository(self, args): - """ - Configure a publication repository for this RPKI entity. - - This command reads the repository's response to this entity's - request for publication service, extracts and cross-certifies the - BPKI data and service URI, and links the repository data with the - corresponding parent data in our local database. - """ - - self.zoo.configure_repository(args.repository_xml, args.parent_handle) - self.zoo.synchronize_ca() - - - @parsecmd(argsubparsers, - cmdarg("repository_handle", help = "handle of repository to delete")) - def do_delete_repository(self, args): - """ - Delete a repository of this RPKI entity. - """ - - try: - self.zoo.delete_repository(args.repository_handle) - self.zoo.synchronize_ca() - except rpki.irdb.ResourceHolderCA.DoesNotExist: - print "No such resource holder \"%s\"" % self.zoo.handle - except rpki.irdb.Repository.DoesNotExist: - print "No such repository \"%s\"" % args.repository_handle - - def complete_delete_repository(self, *args): - return self.irdb_handle_complete(self.zoo.resource_ca.repositories, *args) - - - @parsecmd(argsubparsers) - def do_delete_identity(self, args): - """ - Delete the current RPKI identity (rpkid object). - """ - - try: - self.zoo.delete_self() - self.zoo.synchronize_deleted_ca() - except rpki.irdb.ResourceHolderCA.DoesNotExist: - print "No such resource holder \"%s\"" % self.zoo.handle - - - @parsecmd(argsubparsers, - cmdarg("--valid_until", help = "override default new validity interval"), - cmdarg("child_handle", help = "handle of child to renew")) - def do_renew_child(self, args): - """ - Update validity period for one child entity. - """ - - self.zoo.renew_children(args.child_handle, args.valid_until) - self.zoo.synchronize_ca() - if self.autosync: - self.zoo.run_rpkid_now() - - def complete_renew_child(self, *args): - return self.irdb_handle_complete(self.zoo.resource_ca.children, *args) - - - @parsecmd(argsubparsers, - cmdarg("--valid_until", help = "override default new validity interval")) - def do_renew_all_children(self, args): - """ - Update validity period for all child entities. - """ - - self.zoo.renew_children(None, args.valid_until) - self.zoo.synchronize_ca() - if self.autosync: - self.zoo.run_rpkid_now() - - - @parsecmd(argsubparsers, - cmdarg("prefixes_csv", help = "CSV file listing prefixes")) - def do_load_prefixes(self, args): - """ - Load prefixes into IRDB from CSV file. - """ - - self.zoo.load_prefixes(args.prefixes_csv, True) - if self.autosync: - self.zoo.run_rpkid_now() - - - @parsecmd(argsubparsers) - def do_show_child_resources(self, args): - """ - Show resources assigned to children. - """ - - for child in self.zoo.resource_ca.children.all(): - resources = child.resource_bag - print "Child:", child.handle - if resources.asn: - print " ASN:", resources.asn - if resources.v4: - print " IPv4:", resources.v4 - if resources.v6: - print " IPv6:", resources.v6 - - - @parsecmd(argsubparsers) - def do_show_roa_requests(self, args): - """ - Show ROA requests. - """ - - for roa_request in self.zoo.resource_ca.roa_requests.all(): - prefixes = roa_request.roa_prefix_bag - print "ASN: ", roa_request.asn - if prefixes.v4: - print " IPv4:", prefixes.v4 - if prefixes.v6: - print " IPv6:", prefixes.v6 - - - @parsecmd(argsubparsers) - def do_show_ghostbuster_requests(self, args): - """ - Show Ghostbuster requests. - """ - - for ghostbuster_request in self.zoo.resource_ca.ghostbuster_requests.all(): - print "Parent:", ghostbuster_request.parent or "*" - print ghostbuster_request.vcard - - - @parsecmd(argsubparsers) - def do_show_received_resources(self, args): - """ - Show resources received by this entity from its parent(s). - """ - - for pdu in self.zoo.call_rpkid( - rpki.left_right.list_received_resources_elt.make_pdu(self_handle = self.zoo.handle)): - - print "Parent: ", pdu.parent_handle - print " notBefore:", pdu.notBefore - print " notAfter: ", pdu.notAfter - print " URI: ", pdu.uri - print " SIA URI: ", pdu.sia_uri - print " AIA URI: ", pdu.aia_uri - print " ASN: ", pdu.asn - print " IPv4: ", pdu.ipv4 - print " IPv6: ", pdu.ipv6 - - - @parsecmd(argsubparsers) - def do_show_published_objects(self, args): - """ - Show published objects. - """ - - for pdu in self.zoo.call_rpkid( - rpki.left_right.list_published_objects_elt.make_pdu(self_handle = self.zoo.handle)): - - track = rpki.x509.uri_dispatch(pdu.uri)(Base64 = pdu.obj).tracking_data(pdu.uri) - child = pdu.child_handle - - if child is None: - print track - else: - print track, child - - - @parsecmd(argsubparsers) - def do_show_bpki(self, args): - """ - Show this entity's BPKI objects. - """ - - print "Self: ", self.zoo.resource_ca.handle - print " notBefore:", self.zoo.resource_ca.certificate.getNotBefore() - print " notAfter: ", self.zoo.resource_ca.certificate.getNotAfter() - print " Subject: ", self.zoo.resource_ca.certificate.getSubject() - print " SKI: ", self.zoo.resource_ca.certificate.hSKI() - for bsc in self.zoo.resource_ca.bscs.all(): - print "BSC: ", bsc.handle - print " notBefore:", bsc.certificate.getNotBefore() - print " notAfter: ", bsc.certificate.getNotAfter() - print " Subject: ", bsc.certificate.getSubject() - print " SKI: ", bsc.certificate.hSKI() - for parent in self.zoo.resource_ca.parents.all(): - print "Parent: ", parent.handle - print " notBefore:", parent.certificate.getNotBefore() - print " notAfter: ", parent.certificate.getNotAfter() - print " Subject: ", parent.certificate.getSubject() - print " SKI: ", parent.certificate.hSKI() - print " URL: ", parent.service_uri - for child in self.zoo.resource_ca.children.all(): - print "Child: ", child.handle - print " notBefore:", child.certificate.getNotBefore() - print " notAfter: ", child.certificate.getNotAfter() - print " Subject: ", child.certificate.getSubject() - print " SKI: ", child.certificate.hSKI() - for repository in self.zoo.resource_ca.repositories.all(): - print "Repository: ", repository.handle - print " notBefore:", repository.certificate.getNotBefore() - print " notAfter: ", repository.certificate.getNotAfter() - print " Subject: ", repository.certificate.getSubject() - print " SKI: ", repository.certificate.hSKI() - print " URL: ", repository.service_uri - - - @parsecmd(argsubparsers, - cmdarg("asns_csv", help = "CSV file listing ASNs")) - def do_load_asns(self, args): - """ - Load ASNs into IRDB from CSV file. - """ - - self.zoo.load_asns(args.asns_csv, True) - if self.autosync: - self.zoo.run_rpkid_now() - - - @parsecmd(argsubparsers, - cmdarg("roa_requests_csv", help = "CSV file listing ROA requests")) - def do_load_roa_requests(self, args): - """ - Load ROA requests into IRDB from CSV file. - """ - - self.zoo.load_roa_requests(args.roa_requests_csv) - if self.autosync: - self.zoo.run_rpkid_now() - - - @parsecmd(argsubparsers, - cmdarg("ghostbuster_requests", help = "file listing Ghostbuster requests as a sequence of VCards")) - def do_load_ghostbuster_requests(self, args): - """ - Load Ghostbuster requests into IRDB from file. - """ - - self.zoo.load_ghostbuster_requests(args.ghostbuster_requests) - if self.autosync: - self.zoo.run_rpkid_now() - - - @parsecmd(argsubparsers, - cmdarg("--valid_until", help = "override default validity interval"), - cmdarg("router_certificate_request_xml", help = "file containing XML router certificate request")) - def do_add_router_certificate_request(self, args): - """ - Load router certificate request(s) into IRDB from XML file. - """ - - self.zoo.add_router_certificate_request(args.router_certificate_request_xml, args.valid_until) - if self.autosync: - self.zoo.run_rpkid_now() - - @parsecmd(argsubparsers, - cmdarg("gski", help = "g(SKI) of router certificate request to delete")) - def do_delete_router_certificate_request(self, args): - """ - Delete a router certificate request from the IRDB. - """ - - try: - self.zoo.delete_router_certificate_request(args.gski) - if self.autosync: - self.zoo.run_rpkid_now() - except rpki.irdb.ResourceHolderCA.DoesNotExist: - print "No such resource holder \"%s\"" % self.zoo.handle - except rpki.irdb.EECertificateRequest.DoesNotExist: - print "No certificate request matching g(SKI) \"%s\"" % args.gski - - def complete_delete_router_certificate_request(self, text, line, begidx, endidx): - return [obj.gski for obj in self.zoo.resource_ca.ee_certificate_requests.all() - if obj.gski and obj.gski.startswith(text)] - - - @parsecmd(argsubparsers) - def do_show_router_certificate_requests(self, args): - """ - Show this entity's router certificate requests. - """ - - for req in self.zoo.resource_ca.ee_certificate_requests.all(): - print "%s %s %s %s" % (req.gski, req.valid_until, req.cn, req.sn) - - - # What about updates? Validity interval, change router-id, change - # ASNs. Not sure what this looks like yet, blunder ahead with the - # core code while mulling over the UI. - - - @parsecmd(argsubparsers) - def do_synchronize(self, args): - """ - Whack daemons to match IRDB. - - This command may be replaced by implicit synchronization embedded - in of other commands, haven't decided yet. - """ - - self.zoo.synchronize() - - - @parsecmd(argsubparsers) - def do_force_publication(self, args): - """ - Whack rpkid to force (re)publication of everything. - - This is not usually necessary, as rpkid automatically publishes - changes it makes, but this command can be useful occasionally when - a fault or configuration error has left rpkid holding data which - it has not been able to publish. - """ - - self.zoo.publish_world_now() - - - @parsecmd(argsubparsers) - def do_force_reissue(self, args): - """ - Whack rpkid to force reissuance of everything. - - This is not usually necessary, as rpkid reissues automatically - objects automatically as needed, but this command can be useful - occasionally when a fault or configuration error has prevented - rpkid from reissuing when it should have. - """ - - self.zoo.reissue() - - - @parsecmd(argsubparsers) - def do_up_down_rekey(self, args): - """ - Initiate a "rekey" operation. - - This tells rpkid to generate new keys for each certificate issued - to it via the up-down protocol. - - Rekeying is the first stage of a key rollover operation. You will - need to follow it up later with a "revoke" operation to clean up - the old keys - """ - - self.zoo.rekey() - - - @parsecmd(argsubparsers) - def do_up_down_revoke(self, args): - """ - Initiate a "revoke" operation. - - This tells rpkid to clean up old keys formerly used by - certificates issued to it via the up-down protocol. - - This is the cleanup stage of a key rollover operation. - """ - - self.zoo.revoke() - - - @parsecmd(argsubparsers) - def do_revoke_forgotten(self, args): - """ - Initiate a "revoke_forgotten" operation. - - This tells rpkid to ask its parent to revoke certificates for - which rpkid does not know the private keys. - - This should never happen during ordinary operation, but can happen - if rpkid is misconfigured or its database has been damaged, so we - need a way to resynchronize rpkid with its parent in such cases. - We could do this automatically, but as we don't know the precise - cause of the failure we don't know if it's recoverable locally - (eg, from an SQL backup), so we require a manual trigger before - discarding possibly-useful certificates. - """ - - self.zoo.revoke_forgotten() - - - @parsecmd(argsubparsers) - def do_clear_all_sql_cms_replay_protection(self, args): - """ - Tell rpkid and pubd to clear replay protection. - - This clears the replay protection timestamps stored in SQL for all - entities known to rpkid and pubd. This is a fairly blunt - instrument, but as we don't expect this to be necessary except in - the case of gross misconfiguration, it should suffice - """ - - self.zoo.clear_all_sql_cms_replay_protection() - - - @parsecmd(argsubparsers) - def do_version(self, args): - """ - Show current software version number. - """ - - print rpki.version.VERSION - - - @parsecmd(argsubparsers) - def do_list_self_handles(self, args): - """ - List all handles in this rpkid instance. - """ - - for ca in rpki.irdb.ResourceHolderCA.objects.all(): - print ca.handle - diff --git a/rpkid/rpki/rpkid.py b/rpkid/rpki/rpkid.py deleted file mode 100644 index d6163bee..00000000 --- a/rpkid/rpki/rpkid.py +++ /dev/null @@ -1,2500 +0,0 @@ -# $Id$ -# -# Copyright (C) 2013--2014 Dragon Research Labs ("DRL") -# Portions copyright (C) 2009--2012 Internet Systems Consortium ("ISC") -# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notices and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND ARIN DISCLAIM ALL -# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED -# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL, -# ISC, OR ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR -# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS -# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, -# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION -# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -""" -RPKI CA engine. -""" - -import os -import time -import argparse -import sys -import re -import random -import base64 -import rpki.resource_set -import rpki.up_down -import rpki.left_right -import rpki.x509 -import rpki.sql -import rpki.http -import rpki.config -import rpki.exceptions -import rpki.relaxng -import rpki.log -import rpki.async -import rpki.daemonize -import rpki.rpkid_tasks - -class main(object): - """ - Main program for rpkid. - """ - - def __init__(self): - - os.environ["TZ"] = "UTC" - time.tzset() - - self.irdbd_cms_timestamp = None - self.irbe_cms_timestamp = None - self.task_current = None - self.task_queue = [] - - parser = argparse.ArgumentParser(description = __doc__) - parser.add_argument("-c", "--config", - help = "override default location of configuration file") - parser.add_argument("-d", "--debug", action = "store_true", - help = "enable debugging mode") - parser.add_argument("-f", "--foreground", action = "store_true", - help = "do not daemonize") - parser.add_argument("--pidfile", - help = "override default location of pid file") - parser.add_argument("--profile", - help = "enable profiling, saving data to PROFILE") - args = parser.parse_args() - - self.profile = args.profile - - rpki.log.init("rpkid", use_syslog = not args.debug) - - self.cfg = rpki.config.parser(args.config, "rpkid") - self.cfg.set_global_flags() - - if not args.foreground and not args.debug: - rpki.daemonize.daemon(pidfile = args.pidfile) - - if self.profile: - import cProfile - prof = cProfile.Profile() - try: - prof.runcall(self.main) - finally: - prof.dump_stats(self.profile) - rpki.log.info("Dumped profile data to %s" % self.profile) - else: - self.main() - - def main(self): - - startup_msg = self.cfg.get("startup-message", "") - if startup_msg: - rpki.log.info(startup_msg) - - if self.profile: - rpki.log.info("Running in profile mode with output to %s" % self.profile) - - self.sql = rpki.sql.session(self.cfg) - - self.bpki_ta = rpki.x509.X509(Auto_update = self.cfg.get("bpki-ta")) - self.irdb_cert = rpki.x509.X509(Auto_update = self.cfg.get("irdb-cert")) - self.irbe_cert = rpki.x509.X509(Auto_update = self.cfg.get("irbe-cert")) - self.rpkid_cert = rpki.x509.X509(Auto_update = self.cfg.get("rpkid-cert")) - self.rpkid_key = rpki.x509.RSA( Auto_update = self.cfg.get("rpkid-key")) - - self.irdb_url = self.cfg.get("irdb-url") - - self.http_server_host = self.cfg.get("server-host", "") - self.http_server_port = self.cfg.getint("server-port") - - self.publication_kludge_base = self.cfg.get("publication-kludge-base", "publication/") - - # Icky hack to let Iain do some testing quickly, should go away - # once we sort out whether we can make this change permanent. - # - # OK, the stuff to add router certificate support makes enough - # other changes that we're going to need a migration program in - # any case, so might as well throw the switch here too, or at - # least find out if it (still) works as expected. - - self.merge_publication_directories = self.cfg.getboolean("merge_publication_directories", - True) - - self.use_internal_cron = self.cfg.getboolean("use-internal-cron", True) - - self.initial_delay = random.randint(self.cfg.getint("initial-delay-min", 10), - self.cfg.getint("initial-delay-max", 120)) - - # Should be much longer in production - self.cron_period = rpki.sundial.timedelta(seconds = self.cfg.getint("cron-period", 120)) - self.cron_keepalive = rpki.sundial.timedelta(seconds = self.cfg.getint("cron-keepalive", 0)) - if not self.cron_keepalive: - self.cron_keepalive = self.cron_period * 4 - self.cron_timeout = None - - self.start_cron() - - rpki.http.server( - host = self.http_server_host, - port = self.http_server_port, - handlers = (("/left-right", self.left_right_handler), - ("/up-down/", self.up_down_handler), - ("/cronjob", self.cronjob_handler))) - - - def start_cron(self): - """ - Start clock for rpkid's internal cron process. - """ - - if self.use_internal_cron: - self.cron_timer = rpki.async.timer(handler = self.cron) - when = rpki.sundial.now() + rpki.sundial.timedelta(seconds = self.initial_delay) - rpki.log.debug("Scheduling initial cron pass at %s" % when) - self.cron_timer.set(when) - else: - rpki.log.debug("Not using internal clock, start_cron() call ignored") - - def irdb_query(self, callback, errback, *q_pdus, **kwargs): - """ - Perform an IRDB callback query. - """ - - rpki.log.trace() - - try: - q_types = tuple(type(q_pdu) for q_pdu in q_pdus) - - expected_pdu_count = kwargs.pop("expected_pdu_count", None) - assert len(kwargs) == 0 - - q_msg = rpki.left_right.msg.query() - q_msg.extend(q_pdus) - q_der = rpki.left_right.cms_msg().wrap(q_msg, self.rpkid_key, self.rpkid_cert) - - def unwrap(r_der): - try: - r_cms = rpki.left_right.cms_msg(DER = r_der) - r_msg = r_cms.unwrap((self.bpki_ta, self.irdb_cert)) - self.irdbd_cms_timestamp = r_cms.check_replay(self.irdbd_cms_timestamp, self.irdb_url) - if not r_msg.is_reply() or not all(type(r_pdu) in q_types for r_pdu in r_msg): - raise rpki.exceptions.BadIRDBReply( - "Unexpected response to IRDB query: %s" % r_cms.pretty_print_content()) - if expected_pdu_count is not None and len(r_msg) != expected_pdu_count: - assert isinstance(expected_pdu_count, (int, long)) - raise rpki.exceptions.BadIRDBReply( - "Expected exactly %d PDU%s from IRDB: %s" % ( - expected_pdu_count, "" if expected_pdu_count == 1 else "s", - r_cms.pretty_print_content())) - callback(r_msg) - except Exception, e: - errback(e) - - rpki.http.client( - url = self.irdb_url, - msg = q_der, - callback = unwrap, - errback = errback) - - except Exception, e: - errback(e) - - - def irdb_query_child_resources(self, self_handle, child_handle, callback, errback): - """ - Ask IRDB about a child's resources. - """ - - rpki.log.trace() - - q_pdu = rpki.left_right.list_resources_elt() - q_pdu.self_handle = self_handle - q_pdu.child_handle = child_handle - - def done(r_msg): - callback(rpki.resource_set.resource_bag( - asn = r_msg[0].asn, - v4 = r_msg[0].ipv4, - v6 = r_msg[0].ipv6, - valid_until = r_msg[0].valid_until)) - - self.irdb_query(done, errback, q_pdu, expected_pdu_count = 1) - - def irdb_query_roa_requests(self, self_handle, callback, errback): - """ - Ask IRDB about self's ROA requests. - """ - - rpki.log.trace() - - q_pdu = rpki.left_right.list_roa_requests_elt() - q_pdu.self_handle = self_handle - - self.irdb_query(callback, errback, q_pdu) - - def irdb_query_ghostbuster_requests(self, self_handle, parent_handles, callback, errback): - """ - Ask IRDB about self's ghostbuster record requests. - """ - - rpki.log.trace() - - q_pdus = [] - - for parent_handle in parent_handles: - q_pdu = rpki.left_right.list_ghostbuster_requests_elt() - q_pdu.self_handle = self_handle - q_pdu.parent_handle = parent_handle - q_pdus.append(q_pdu) - - self.irdb_query(callback, errback, *q_pdus) - - def irdb_query_ee_certificate_requests(self, self_handle, callback, errback): - """ - Ask IRDB about self's EE certificate requests. - """ - - rpki.log.trace() - - q_pdu = rpki.left_right.list_ee_certificate_requests_elt() - q_pdu.self_handle = self_handle - - self.irdb_query(callback, errback, q_pdu) - - def left_right_handler(self, query, path, cb): - """ - Process one left-right PDU. - """ - - rpki.log.trace() - - def done(r_msg): - reply = rpki.left_right.cms_msg().wrap(r_msg, self.rpkid_key, self.rpkid_cert) - self.sql.sweep() - cb(200, body = reply) - - try: - q_cms = rpki.left_right.cms_msg(DER = query) - q_msg = q_cms.unwrap((self.bpki_ta, self.irbe_cert)) - self.irbe_cms_timestamp = q_cms.check_replay(self.irbe_cms_timestamp, path) - if not q_msg.is_query(): - raise rpki.exceptions.BadQuery, "Message type is not query" - q_msg.serve_top_level(self, done) - except (rpki.async.ExitNow, SystemExit): - raise - except Exception, e: - rpki.log.traceback() - cb(500, reason = "Unhandled exception %s: %s" % (e.__class__.__name__, e)) - - up_down_url_regexp = re.compile("/up-down/([-A-Z0-9_]+)/([-A-Z0-9_]+)$", re.I) - - def up_down_handler(self, query, path, cb): - """ - Process one up-down PDU. - """ - - rpki.log.trace() - - def done(reply): - self.sql.sweep() - cb(200, body = reply) - - try: - match = self.up_down_url_regexp.search(path) - if match is None: - raise rpki.exceptions.BadContactURL, "Bad URL path received in up_down_handler(): %s" % path - self_handle, child_handle = match.groups() - child = rpki.left_right.child_elt.sql_fetch_where1(self, "self.self_handle = %s AND child.child_handle = %s AND child.self_id = self.self_id", - (self_handle, child_handle), "self") - if child is None: - raise rpki.exceptions.ChildNotFound, "Could not find child %s of self %s in up_down_handler()" % (child_handle, self_handle) - child.serve_up_down(query, done) - except (rpki.async.ExitNow, SystemExit): - raise - except (rpki.exceptions.ChildNotFound, rpki.exceptions.BadContactURL), e: - rpki.log.warn(str(e)) - cb(400, reason = str(e)) - except Exception, e: - rpki.log.traceback() - cb(400, reason = "Could not process PDU: %s" % e) - - def checkpoint(self, force = False): - """ - Record that we were still alive when we got here, by resetting - keepalive timer. - """ - if force or self.cron_timeout is not None: - self.cron_timeout = rpki.sundial.now() + self.cron_keepalive - - def task_add(self, task): - """ - Add a task to the scheduler task queue, unless it's already queued. - """ - if task not in self.task_queue: - rpki.log.debug("Adding %r to task queue" % task) - self.task_queue.append(task) - return True - else: - rpki.log.debug("Task %r was already in the task queue" % task) - return False - - def task_next(self): - """ - Pull next task from the task queue and put it the deferred event - queue (we don't want to run it directly, as that could eventually - blow out our call stack). - """ - try: - self.task_current = self.task_queue.pop(0) - except IndexError: - self.task_current = None - else: - rpki.async.event_defer(self.task_current) - - def task_run(self): - """ - Run first task on the task queue, unless one is running already. - """ - if self.task_current is None: - self.task_next() - - def cron(self, cb = None): - """ - Periodic tasks. - """ - - rpki.log.trace() - - now = rpki.sundial.now() - - rpki.log.debug("Starting cron run") - - def done(): - self.sql.sweep() - self.cron_timeout = None - rpki.log.info("Finished cron run started at %s" % now) - if cb is not None: - cb() - - completion = rpki.rpkid_tasks.CompletionHandler(done) - try: - selves = rpki.left_right.self_elt.sql_fetch_all(self) - except Exception, e: - rpki.log.warn("Error pulling self_elts from SQL, maybe SQL server is down? (%s)" % e) - else: - for s in selves: - s.schedule_cron_tasks(completion) - nothing_queued = completion.count == 0 - - assert self.use_internal_cron or self.cron_timeout is None - - if self.cron_timeout is not None and self.cron_timeout < now: - rpki.log.warn("cron keepalive threshold %s has expired, breaking lock" % self.cron_timeout) - self.cron_timeout = None - - if self.use_internal_cron: - when = now + self.cron_period - rpki.log.debug("Scheduling next cron run at %s" % when) - self.cron_timer.set(when) - - if self.cron_timeout is None: - self.checkpoint(self.use_internal_cron) - self.task_run() - - elif self.use_internal_cron: - rpki.log.warn("cron already running, keepalive will expire at %s" % self.cron_timeout) - - if nothing_queued: - done() - - def cronjob_handler(self, query, path, cb): - """ - External trigger for periodic tasks. This is somewhat obsolete - now that we have internal timers, but the test framework still - uses it. - """ - - def done(): - cb(200, body = "OK") - - if self.use_internal_cron: - cb(500, reason = "Running cron internally") - else: - rpki.log.debug("Starting externally triggered cron") - self.cron(done) - -class ca_obj(rpki.sql.sql_persistent): - """ - Internal CA object. - """ - - sql_template = rpki.sql.template( - "ca", - "ca_id", - "last_crl_sn", - ("next_crl_update", rpki.sundial.datetime), - "last_issued_sn", - "last_manifest_sn", - ("next_manifest_update", rpki.sundial.datetime), - "sia_uri", - "parent_id", - "parent_resource_class") - - last_crl_sn = 0 - last_issued_sn = 0 - last_manifest_sn = 0 - - def __repr__(self): - return rpki.log.log_repr(self, repr(self.parent), self.parent_resource_class) - - @property - @rpki.sql.cache_reference - def parent(self): - """ - Fetch parent object to which this CA object links. - """ - return rpki.left_right.parent_elt.sql_fetch(self.gctx, self.parent_id) - - @property - def ca_details(self): - """ - Fetch all ca_detail objects that link to this CA object. - """ - return ca_detail_obj.sql_fetch_where(self.gctx, "ca_id = %s", (self.ca_id,)) - - @property - def pending_ca_details(self): - """ - Fetch the pending ca_details for this CA, if any. - """ - return ca_detail_obj.sql_fetch_where(self.gctx, "ca_id = %s AND state = 'pending'", (self.ca_id,)) - - @property - def active_ca_detail(self): - """ - Fetch the active ca_detail for this CA, if any. - """ - return ca_detail_obj.sql_fetch_where1(self.gctx, "ca_id = %s AND state = 'active'", (self.ca_id,)) - - @property - def deprecated_ca_details(self): - """ - Fetch deprecated ca_details for this CA, if any. - """ - return ca_detail_obj.sql_fetch_where(self.gctx, "ca_id = %s AND state = 'deprecated'", (self.ca_id,)) - - @property - def active_or_deprecated_ca_details(self): - """ - Fetch active and deprecated ca_details for this CA, if any. - """ - return ca_detail_obj.sql_fetch_where(self.gctx, "ca_id = %s AND (state = 'active' OR state = 'deprecated')", (self.ca_id,)) - - @property - def revoked_ca_details(self): - """ - Fetch revoked ca_details for this CA, if any. - """ - return ca_detail_obj.sql_fetch_where(self.gctx, "ca_id = %s AND state = 'revoked'", (self.ca_id,)) - - @property - def issue_response_candidate_ca_details(self): - """ - Fetch ca_details which are candidates for consideration when - processing an up-down issue_response PDU. - """ - #return ca_detail_obj.sql_fetch_where(self.gctx, "ca_id = %s AND latest_ca_cert IS NOT NULL AND state != 'revoked'", (self.ca_id,)) - return ca_detail_obj.sql_fetch_where(self.gctx, "ca_id = %s AND state != 'revoked'", (self.ca_id,)) - - def construct_sia_uri(self, parent, rc): - """ - Construct the sia_uri value for this CA given configured - information and the parent's up-down protocol list_response PDU. - """ - - sia_uri = rc.suggested_sia_head and rc.suggested_sia_head.rsync() - if not sia_uri or not sia_uri.startswith(parent.sia_base): - sia_uri = parent.sia_base - if not sia_uri.endswith("/"): - raise rpki.exceptions.BadURISyntax, "SIA URI must end with a slash: %s" % sia_uri - # With luck this can go away sometime soon. - if self.gctx.merge_publication_directories: - return sia_uri - else: - return sia_uri + str(self.ca_id) + "/" - - def check_for_updates(self, parent, rc, cb, eb): - """ - Parent has signaled continued existance of a resource class we - already knew about, so we need to check for an updated - certificate, changes in resource coverage, revocation and reissue - with the same key, etc. - """ - - sia_uri = self.construct_sia_uri(parent, rc) - sia_uri_changed = self.sia_uri != sia_uri - if sia_uri_changed: - rpki.log.debug("SIA changed: was %s now %s" % (self.sia_uri, sia_uri)) - self.sia_uri = sia_uri - self.sql_mark_dirty() - - rc_resources = rc.to_resource_bag() - cert_map = dict((c.cert.get_SKI(), c) for c in rc.certs) - - def loop(iterator, ca_detail): - - self.gctx.checkpoint() - - rc_cert = cert_map.pop(ca_detail.public_key.get_SKI(), None) - - if rc_cert is None: - - rpki.log.warn("SKI %s in resource class %s is in database but missing from list_response to %s from %s, maybe parent certificate went away?" - % (ca_detail.public_key.gSKI(), rc.class_name, parent.self.self_handle, parent.parent_handle)) - publisher = publication_queue() - ca_detail.delete(ca = ca_detail.ca, publisher = publisher) - return publisher.call_pubd(iterator, eb) - - else: - - if ca_detail.state == "active" and ca_detail.ca_cert_uri != rc_cert.cert_url.rsync(): - rpki.log.debug("AIA changed: was %s now %s" % (ca_detail.ca_cert_uri, rc_cert.cert_url.rsync())) - ca_detail.ca_cert_uri = rc_cert.cert_url.rsync() - ca_detail.sql_mark_dirty() - - if ca_detail.state in ("pending", "active"): - - if ca_detail.state == "pending": - current_resources = rpki.resource_set.resource_bag() - else: - current_resources = ca_detail.latest_ca_cert.get_3779resources() - - if (ca_detail.state == "pending" or - sia_uri_changed or - ca_detail.latest_ca_cert != rc_cert.cert or - ca_detail.latest_ca_cert.getNotAfter() != rc_resources.valid_until or - current_resources.undersized(rc_resources) or - current_resources.oversized(rc_resources)): - return ca_detail.update( - parent = parent, - ca = self, - rc = rc, - sia_uri_changed = sia_uri_changed, - old_resources = current_resources, - callback = iterator, - errback = eb) - - iterator() - - def done(): - if cert_map: - rpki.log.warn("Unknown certificate SKI%s %s in resource class %s in list_response " - "to %s from %s, maybe you want to \"revoke_forgotten\"?" - % ("" if len(cert_map) == 1 else "s", - ", ".join(c.cert.gSKI() for c in cert_map.values()), - rc.class_name, parent.self.self_handle, parent.parent_handle)) - self.gctx.sql.sweep() - self.gctx.checkpoint() - cb() - - ca_details = self.issue_response_candidate_ca_details - - if True: - skis_parent = set(x.cert.gSKI() - for x in cert_map.itervalues()) - skis_me = set(x.latest_ca_cert.gSKI() - for x in ca_details - if x.latest_ca_cert is not None) - for ski in skis_parent & skis_me: - rpki.log.debug("Parent %s agrees that %s has SKI %s in resource class %s" - % (parent.parent_handle, parent.self.self_handle, ski, rc.class_name)) - for ski in skis_parent - skis_me: - rpki.log.debug("Parent %s thinks %s has SKI %s in resource class %s but I don't think so" - % (parent.parent_handle, parent.self.self_handle, ski, rc.class_name)) - for ski in skis_me - skis_parent: - rpki.log.debug("I think %s has SKI %s in resource class %s but parent %s doesn't think so" - % (parent.self.self_handle, ski, rc.class_name, parent.parent_handle)) - - if ca_details: - rpki.async.iterator(ca_details, loop, done) - else: - rpki.log.warn("Existing resource class %s to %s from %s with no certificates, rekeying" % - (rc.class_name, parent.self.self_handle, parent.parent_handle)) - self.gctx.checkpoint() - self.rekey(cb, eb) - - @classmethod - def create(cls, parent, rc, cb, eb): - """ - Parent has signaled existance of a new resource class, so we need - to create and set up a corresponding CA object. - """ - - self = cls() - self.gctx = parent.gctx - self.parent_id = parent.parent_id - self.parent_resource_class = rc.class_name - self.sql_store() - try: - self.sia_uri = self.construct_sia_uri(parent, rc) - except rpki.exceptions.BadURISyntax: - self.sql_delete() - raise - ca_detail = ca_detail_obj.create(self) - - def done(issue_response): - c = issue_response.payload.classes[0].certs[0] - rpki.log.debug("CA %r received certificate %s" % (self, c.cert_url)) - ca_detail.activate( - ca = self, - cert = c.cert, - uri = c.cert_url, - callback = cb, - errback = eb) - - rpki.log.debug("Sending issue request to %r from %r" % (parent, self.create)) - rpki.up_down.issue_pdu.query(parent, self, ca_detail, done, eb) - - def delete(self, parent, callback): - """ - The list of current resource classes received from parent does not - include the class corresponding to this CA, so we need to delete - it (and its little dog too...). - - All certs published by this CA are now invalid, so need to - withdraw them, the CRL, and the manifest from the repository, - delete all child_cert and ca_detail records associated with this - CA, then finally delete this CA itself. - """ - - def lose(e): - rpki.log.traceback() - rpki.log.warn("Could not delete CA %r, skipping: %s" % (self, e)) - callback() - - def done(): - rpki.log.debug("Deleting %r" % self) - self.sql_delete() - callback() - - publisher = publication_queue() - for ca_detail in self.ca_details: - ca_detail.delete(ca = self, publisher = publisher, allow_failure = True) - publisher.call_pubd(done, lose) - - def next_serial_number(self): - """ - Allocate a certificate serial number. - """ - self.last_issued_sn += 1 - self.sql_mark_dirty() - return self.last_issued_sn - - def next_manifest_number(self): - """ - Allocate a manifest serial number. - """ - self.last_manifest_sn += 1 - self.sql_mark_dirty() - return self.last_manifest_sn - - def next_crl_number(self): - """ - Allocate a CRL serial number. - """ - self.last_crl_sn += 1 - self.sql_mark_dirty() - return self.last_crl_sn - - def rekey(self, cb, eb): - """ - Initiate a rekey operation for this ca. Generate a new keypair. - Request cert from parent using new keypair. Mark result as our - active ca_detail. Reissue all child certs issued by this ca using - the new ca_detail. - """ - - rpki.log.trace() - - parent = self.parent - old_detail = self.active_ca_detail - new_detail = ca_detail_obj.create(self) - - def done(issue_response): - c = issue_response.payload.classes[0].certs[0] - rpki.log.debug("CA %r received certificate %s" % (self, c.cert_url)) - new_detail.activate( - ca = self, - cert = c.cert, - uri = c.cert_url, - predecessor = old_detail, - callback = cb, - errback = eb) - - rpki.log.debug("Sending issue request to %r from %r" % (parent, self.rekey)) - rpki.up_down.issue_pdu.query(parent, self, new_detail, done, eb) - - def revoke(self, cb, eb, revoke_all = False): - """ - Revoke deprecated ca_detail objects associated with this CA, or - all ca_details associated with this CA if revoke_all is set. - """ - - rpki.log.trace() - - def loop(iterator, ca_detail): - ca_detail.revoke(cb = iterator, eb = eb) - - ca_details = self.ca_details if revoke_all else self.deprecated_ca_details - - rpki.async.iterator(ca_details, loop, cb) - - def reissue(self, cb, eb): - """ - Reissue all current certificates issued by this CA. - """ - - ca_detail = self.active_ca_detail - if ca_detail: - ca_detail.reissue(cb, eb) - else: - cb() - -class ca_detail_obj(rpki.sql.sql_persistent): - """ - Internal CA detail object. - """ - - sql_template = rpki.sql.template( - "ca_detail", - "ca_detail_id", - ("private_key_id", rpki.x509.RSA), - ("public_key", rpki.x509.PublicKey), - ("latest_ca_cert", rpki.x509.X509), - ("manifest_private_key_id", rpki.x509.RSA), - ("manifest_public_key", rpki.x509.PublicKey), - ("latest_manifest_cert", rpki.x509.X509), - ("latest_manifest", rpki.x509.SignedManifest), - ("latest_crl", rpki.x509.CRL), - ("crl_published", rpki.sundial.datetime), - ("manifest_published", rpki.sundial.datetime), - "state", - "ca_cert_uri", - "ca_id") - - crl_published = None - manifest_published = None - latest_ca_cert = None - latest_crl = None - latest_manifest = None - ca_cert_uri = None - - def __repr__(self): - return rpki.log.log_repr(self, repr(self.ca), self.state, self.ca_cert_uri) - - def sql_decode(self, vals): - """ - Extra assertions for SQL decode of a ca_detail_obj. - """ - rpki.sql.sql_persistent.sql_decode(self, vals) - assert self.public_key is None or self.private_key_id is None or self.public_key.get_DER() == self.private_key_id.get_public_DER() - assert self.manifest_public_key is None or self.manifest_private_key_id is None or self.manifest_public_key.get_DER() == self.manifest_private_key_id.get_public_DER() - - @property - @rpki.sql.cache_reference - def ca(self): - """ - Fetch CA object to which this ca_detail links. - """ - return ca_obj.sql_fetch(self.gctx, self.ca_id) - - def fetch_child_certs(self, child = None, ski = None, unique = False, unpublished = None): - """ - Fetch all child_cert objects that link to this ca_detail. - """ - return rpki.rpkid.child_cert_obj.fetch(self.gctx, child, self, ski, unique, unpublished) - - @property - def child_certs(self): - """ - Fetch all child_cert objects that link to this ca_detail. - """ - return self.fetch_child_certs() - - def unpublished_child_certs(self, when): - """ - Fetch all unpublished child_cert objects linked to this ca_detail - with attempted publication dates older than when. - """ - return self.fetch_child_certs(unpublished = when) - - @property - def revoked_certs(self): - """ - Fetch all revoked_cert objects that link to this ca_detail. - """ - return revoked_cert_obj.sql_fetch_where(self.gctx, "ca_detail_id = %s", (self.ca_detail_id,)) - - @property - def roas(self): - """ - Fetch all ROA objects that link to this ca_detail. - """ - return rpki.rpkid.roa_obj.sql_fetch_where(self.gctx, "ca_detail_id = %s", (self.ca_detail_id,)) - - def unpublished_roas(self, when): - """ - Fetch all unpublished ROA objects linked to this ca_detail with - attempted publication dates older than when. - """ - return rpki.rpkid.roa_obj.sql_fetch_where(self.gctx, "ca_detail_id = %s AND published IS NOT NULL and published < %s", (self.ca_detail_id, when)) - - @property - def ghostbusters(self): - """ - Fetch all Ghostbuster objects that link to this ca_detail. - """ - return rpki.rpkid.ghostbuster_obj.sql_fetch_where(self.gctx, "ca_detail_id = %s", (self.ca_detail_id,)) - - @property - def ee_certificates(self): - """ - Fetch all EE certificate objects that link to this ca_detail. - """ - return rpki.rpkid.ee_cert_obj.sql_fetch_where(self.gctx, "ca_detail_id = %s", (self.ca_detail_id,)) - - def unpublished_ghostbusters(self, when): - """ - Fetch all unpublished Ghostbusters objects linked to this - ca_detail with attempted publication dates older than when. - """ - return rpki.rpkid.ghostbuster_obj.sql_fetch_where(self.gctx, "ca_detail_id = %s AND published IS NOT NULL and published < %s", (self.ca_detail_id, when)) - - @property - def crl_uri(self): - """ - Return publication URI for this ca_detail's CRL. - """ - return self.ca.sia_uri + self.crl_uri_tail - - @property - def crl_uri_tail(self): - """ - Return tail (filename portion) of publication URI for this ca_detail's CRL. - """ - return self.public_key.gSKI() + ".crl" - - @property - def manifest_uri(self): - """ - Return publication URI for this ca_detail's manifest. - """ - return self.ca.sia_uri + self.public_key.gSKI() + ".mft" - - def has_expired(self): - """ - Return whether this ca_detail's certificate has expired. - """ - return self.latest_ca_cert.getNotAfter() <= rpki.sundial.now() - - def covers(self, target): - """ - Test whether this ca-detail covers a given set of resources. - """ - - assert not target.asn.inherit and not target.v4.inherit and not target.v6.inherit - me = self.latest_ca_cert.get_3779resources() - return target.asn <= me.asn and target.v4 <= me.v4 and target.v6 <= me.v6 - - def activate(self, ca, cert, uri, callback, errback, predecessor = None): - """ - Activate this ca_detail. - """ - - publisher = publication_queue() - - self.latest_ca_cert = cert - self.ca_cert_uri = uri.rsync() - self.generate_manifest_cert() - self.state = "active" - self.generate_crl(publisher = publisher) - self.generate_manifest(publisher = publisher) - self.sql_store() - - if predecessor is not None: - predecessor.state = "deprecated" - predecessor.sql_store() - for child_cert in predecessor.child_certs: - child_cert.reissue(ca_detail = self, publisher = publisher) - for roa in predecessor.roas: - roa.regenerate(publisher = publisher) - for ghostbuster in predecessor.ghostbusters: - ghostbuster.regenerate(publisher = publisher) - predecessor.generate_crl(publisher = publisher) - predecessor.generate_manifest(publisher = publisher) - - publisher.call_pubd(callback, errback) - - def delete(self, ca, publisher, allow_failure = False): - """ - Delete this ca_detail and all of the certs it issued. - - If allow_failure is true, we clean up as much as we can but don't - raise an exception. - """ - - repository = ca.parent.repository - handler = False if allow_failure else None - for child_cert in self.child_certs: - publisher.withdraw(cls = rpki.publication.certificate_elt, - uri = child_cert.uri, - obj = child_cert.cert, - repository = repository, - handler = handler) - child_cert.sql_mark_deleted() - for roa in self.roas: - roa.revoke(publisher = publisher, allow_failure = allow_failure, fast = True) - for ghostbuster in self.ghostbusters: - ghostbuster.revoke(publisher = publisher, allow_failure = allow_failure, fast = True) - try: - latest_manifest = self.latest_manifest - except AttributeError: - latest_manifest = None - if latest_manifest is not None: - publisher.withdraw(cls = rpki.publication.manifest_elt, - uri = self.manifest_uri, - obj = self.latest_manifest, - repository = repository, - handler = handler) - try: - latest_crl = self.latest_crl - except AttributeError: - latest_crl = None - if latest_crl is not None: - publisher.withdraw(cls = rpki.publication.crl_elt, - uri = self.crl_uri, - obj = self.latest_crl, - repository = repository, - handler = handler) - self.gctx.sql.sweep() - for cert in self.revoked_certs: # + self.child_certs - rpki.log.debug("Deleting %r" % cert) - cert.sql_delete() - rpki.log.debug("Deleting %r" % self) - self.sql_delete() - - def revoke(self, cb, eb): - """ - Request revocation of all certificates whose SKI matches the key - for this ca_detail. - - Tasks: - - - Request revocation of old keypair by parent. - - - Revoke all child certs issued by the old keypair. - - - Generate a final CRL, signed with the old keypair, listing all - the revoked certs, with a next CRL time after the last cert or - CRL signed by the old keypair will have expired. - - - Generate a corresponding final manifest. - - - Destroy old keypairs. - - - Leave final CRL and manifest in place until their nextupdate - time has passed. - """ - - ca = self.ca - parent = ca.parent - - def parent_revoked(r_msg): - - if r_msg.payload.ski != self.latest_ca_cert.gSKI(): - raise rpki.exceptions.SKIMismatch - - rpki.log.debug("Parent revoked %s, starting cleanup" % self.latest_ca_cert.gSKI()) - - crl_interval = rpki.sundial.timedelta(seconds = parent.self.crl_interval) - - nextUpdate = rpki.sundial.now() - - if self.latest_manifest is not None: - self.latest_manifest.extract_if_needed() - nextUpdate = nextUpdate.later(self.latest_manifest.getNextUpdate()) - - if self.latest_crl is not None: - nextUpdate = nextUpdate.later(self.latest_crl.getNextUpdate()) - - publisher = publication_queue() - - for child_cert in self.child_certs: - nextUpdate = nextUpdate.later(child_cert.cert.getNotAfter()) - child_cert.revoke(publisher = publisher) - - for roa in self.roas: - nextUpdate = nextUpdate.later(roa.cert.getNotAfter()) - roa.revoke(publisher = publisher) - - for ghostbuster in self.ghostbusters: - nextUpdate = nextUpdate.later(ghostbuster.cert.getNotAfter()) - ghostbuster.revoke(publisher = publisher) - - nextUpdate += crl_interval - self.generate_crl(publisher = publisher, nextUpdate = nextUpdate) - self.generate_manifest(publisher = publisher, nextUpdate = nextUpdate) - self.private_key_id = None - self.manifest_private_key_id = None - self.manifest_public_key = None - self.latest_manifest_cert = None - self.state = "revoked" - self.sql_mark_dirty() - publisher.call_pubd(cb, eb) - - rpki.log.debug("Asking parent to revoke CA certificate %s" % self.latest_ca_cert.gSKI()) - rpki.up_down.revoke_pdu.query(ca, self.latest_ca_cert.gSKI(), parent_revoked, eb) - - def update(self, parent, ca, rc, sia_uri_changed, old_resources, callback, errback): - """ - Need to get a new certificate for this ca_detail and perhaps frob - children of this ca_detail. - """ - - def issued(issue_response): - c = issue_response.payload.classes[0].certs[0] - rpki.log.debug("CA %r received certificate %s" % (self, c.cert_url)) - - if self.state == "pending": - return self.activate( - ca = ca, - cert = c.cert, - uri = c.cert_url, - callback = callback, - errback = errback) - - validity_changed = self.latest_ca_cert is None or self.latest_ca_cert.getNotAfter() != c.cert.getNotAfter() - - publisher = publication_queue() - - if self.latest_ca_cert != c.cert: - self.latest_ca_cert = c.cert - self.sql_mark_dirty() - self.generate_manifest_cert() - self.generate_crl(publisher = publisher) - self.generate_manifest(publisher = publisher) - - new_resources = self.latest_ca_cert.get_3779resources() - - if sia_uri_changed or old_resources.oversized(new_resources): - for child_cert in self.child_certs: - child_resources = child_cert.cert.get_3779resources() - if sia_uri_changed or child_resources.oversized(new_resources): - child_cert.reissue( - ca_detail = self, - resources = child_resources & new_resources, - publisher = publisher) - - if sia_uri_changed or validity_changed or old_resources.oversized(new_resources): - for roa in self.roas: - roa.update(publisher = publisher, fast = True) - - if sia_uri_changed or validity_changed: - for ghostbuster in self.ghostbusters: - ghostbuster.update(publisher = publisher, fast = True) - - publisher.call_pubd(callback, errback) - - rpki.log.debug("Sending issue request to %r from %r" % (parent, self.update)) - rpki.up_down.issue_pdu.query(parent, ca, self, issued, errback) - - @classmethod - def create(cls, ca): - """ - Create a new ca_detail object for a specified CA. - """ - self = cls() - self.gctx = ca.gctx - self.ca_id = ca.ca_id - self.state = "pending" - - self.private_key_id = rpki.x509.RSA.generate() - self.public_key = self.private_key_id.get_public() - - self.manifest_private_key_id = rpki.x509.RSA.generate() - self.manifest_public_key = self.manifest_private_key_id.get_public() - - self.sql_store() - return self - - def issue_ee(self, ca, resources, subject_key, sia, - cn = None, sn = None, notAfter = None, eku = None): - """ - Issue a new EE certificate. - """ - - if notAfter is None: - notAfter = self.latest_ca_cert.getNotAfter() - - return self.latest_ca_cert.issue( - keypair = self.private_key_id, - subject_key = subject_key, - serial = ca.next_serial_number(), - sia = sia, - aia = self.ca_cert_uri, - crldp = self.crl_uri, - resources = resources, - notAfter = notAfter, - is_ca = False, - cn = cn, - sn = sn, - eku = eku) - - def generate_manifest_cert(self): - """ - Generate a new manifest certificate for this ca_detail. - """ - - resources = rpki.resource_set.resource_bag.from_inheritance() - self.latest_manifest_cert = self.issue_ee( - ca = self.ca, - resources = resources, - subject_key = self.manifest_public_key, - sia = (None, None, self.manifest_uri)) - - def issue(self, ca, child, subject_key, sia, resources, publisher, child_cert = None): - """ - Issue a new certificate to a child. Optional child_cert argument - specifies an existing child_cert object to update in place; if not - specified, we create a new one. Returns the child_cert object - containing the newly issued cert. - """ - - self.check_failed_publication(publisher) - - assert child_cert is None or child_cert.child_id == child.child_id - - cert = self.latest_ca_cert.issue( - keypair = self.private_key_id, - subject_key = subject_key, - serial = ca.next_serial_number(), - aia = self.ca_cert_uri, - crldp = self.crl_uri, - sia = sia, - resources = resources, - notAfter = resources.valid_until) - - if child_cert is None: - child_cert = rpki.rpkid.child_cert_obj( - gctx = child.gctx, - child_id = child.child_id, - ca_detail_id = self.ca_detail_id, - cert = cert) - rpki.log.debug("Created new child_cert %r" % child_cert) - else: - child_cert.cert = cert - del child_cert.ca_detail - child_cert.ca_detail_id = self.ca_detail_id - rpki.log.debug("Reusing existing child_cert %r" % child_cert) - - child_cert.ski = cert.get_SKI() - child_cert.published = rpki.sundial.now() - child_cert.sql_store() - publisher.publish( - cls = rpki.publication.certificate_elt, - uri = child_cert.uri, - obj = child_cert.cert, - repository = ca.parent.repository, - handler = child_cert.published_callback) - self.generate_manifest(publisher = publisher) - return child_cert - - def generate_crl(self, publisher, nextUpdate = None): - """ - Generate a new CRL for this ca_detail. At the moment this is - unconditional, that is, it is up to the caller to decide whether a - new CRL is needed. - """ - - self.check_failed_publication(publisher) - - ca = self.ca - parent = ca.parent - crl_interval = rpki.sundial.timedelta(seconds = parent.self.crl_interval) - now = rpki.sundial.now() - - if nextUpdate is None: - nextUpdate = now + crl_interval - - certlist = [] - for revoked_cert in self.revoked_certs: - if now > revoked_cert.expires + crl_interval: - revoked_cert.sql_delete() - else: - certlist.append((revoked_cert.serial, revoked_cert.revoked)) - certlist.sort() - - self.latest_crl = rpki.x509.CRL.generate( - keypair = self.private_key_id, - issuer = self.latest_ca_cert, - serial = ca.next_crl_number(), - thisUpdate = now, - nextUpdate = nextUpdate, - revokedCertificates = certlist) - - self.crl_published = rpki.sundial.now() - self.sql_mark_dirty() - publisher.publish( - cls = rpki.publication.crl_elt, - uri = self.crl_uri, - obj = self.latest_crl, - repository = parent.repository, - handler = self.crl_published_callback) - - def crl_published_callback(self, pdu): - """ - Check result of CRL publication. - """ - pdu.raise_if_error() - self.crl_published = None - self.sql_mark_dirty() - - def generate_manifest(self, publisher, nextUpdate = None): - """ - Generate a new manifest for this ca_detail. - """ - - self.check_failed_publication(publisher) - - ca = self.ca - parent = ca.parent - crl_interval = rpki.sundial.timedelta(seconds = parent.self.crl_interval) - now = rpki.sundial.now() - uri = self.manifest_uri - - if nextUpdate is None: - nextUpdate = now + crl_interval - - if self.latest_manifest_cert is None or self.latest_manifest_cert.getNotAfter() < nextUpdate: - rpki.log.debug("Generating EE certificate for %s" % uri) - self.generate_manifest_cert() - rpki.log.debug("Latest CA cert notAfter %s, new %s EE notAfter %s" % ( - self.latest_ca_cert.getNotAfter(), uri, self.latest_manifest_cert.getNotAfter())) - - rpki.log.debug("Constructing manifest object list for %s" % uri) - objs = [(self.crl_uri_tail, self.latest_crl)] - objs.extend((c.uri_tail, c.cert) for c in self.child_certs) - objs.extend((r.uri_tail, r.roa) for r in self.roas if r.roa is not None) - objs.extend((g.uri_tail, g.ghostbuster) for g in self.ghostbusters) - objs.extend((e.uri_tail, e.cert) for e in self.ee_certificates) - - rpki.log.debug("Building manifest object %s" % uri) - self.latest_manifest = rpki.x509.SignedManifest.build( - serial = ca.next_manifest_number(), - thisUpdate = now, - nextUpdate = nextUpdate, - names_and_objs = objs, - keypair = self.manifest_private_key_id, - certs = self.latest_manifest_cert) - - rpki.log.debug("Manifest generation took %s" % (rpki.sundial.now() - now)) - - self.manifest_published = rpki.sundial.now() - self.sql_mark_dirty() - publisher.publish(cls = rpki.publication.manifest_elt, - uri = uri, - obj = self.latest_manifest, - repository = parent.repository, - handler = self.manifest_published_callback) - - def manifest_published_callback(self, pdu): - """ - Check result of manifest publication. - """ - pdu.raise_if_error() - self.manifest_published = None - self.sql_mark_dirty() - - def reissue(self, cb, eb): - """ - Reissue all current certificates issued by this ca_detail. - """ - - publisher = publication_queue() - self.check_failed_publication(publisher) - for roa in self.roas: - roa.regenerate(publisher, fast = True) - for ghostbuster in self.ghostbusters: - ghostbuster.regenerate(publisher, fast = True) - for ee_certificate in self.ee_certificates: - ee_certificate.reissue(publisher, force = True) - for child_cert in self.child_certs: - child_cert.reissue(self, publisher, force = True) - self.gctx.sql.sweep() - self.generate_manifest_cert() - self.sql_mark_dirty() - self.generate_crl(publisher = publisher) - self.generate_manifest(publisher = publisher) - self.gctx.sql.sweep() - publisher.call_pubd(cb, eb) - - def check_failed_publication(self, publisher, check_all = True): - """ - Check for failed publication of objects issued by this ca_detail. - - All publishable objects have timestamp fields recording time of - last attempted publication, and callback methods which clear these - timestamps once publication has succeeded. Our task here is to - look for objects issued by this ca_detail which have timestamps - set (indicating that they have not been published) and for which - the timestamps are not very recent (for some definition of very - recent -- intent is to allow a bit of slack in case pubd is just - being slow). In such cases, we want to retry publication. - - As an optimization, we can probably skip checking other products - if manifest and CRL have been published, thus saving ourselves - several complex SQL queries. Not sure yet whether this - optimization is worthwhile. - - For the moment we check everything without optimization, because - it simplifies testing. - - For the moment our definition of staleness is hardwired; this - should become configurable. - """ - - rpki.log.debug("Checking for failed publication for %r" % self) - - stale = rpki.sundial.now() - rpki.sundial.timedelta(seconds = 60) - repository = self.ca.parent.repository - - if self.latest_crl is not None and \ - self.crl_published is not None and \ - self.crl_published < stale: - rpki.log.debug("Retrying publication for %s" % self.crl_uri) - publisher.publish(cls = rpki.publication.crl_elt, - uri = self.crl_uri, - obj = self.latest_crl, - repository = repository, - handler = self.crl_published_callback) - - if self.latest_manifest is not None and \ - self.manifest_published is not None and \ - self.manifest_published < stale: - rpki.log.debug("Retrying publication for %s" % self.manifest_uri) - publisher.publish(cls = rpki.publication.manifest_elt, - uri = self.manifest_uri, - obj = self.latest_manifest, - repository = repository, - handler = self.manifest_published_callback) - - if not check_all: - return - - # Might also be able to return here if manifest and CRL are up to - # date, but let's avoid premature optimization - - for child_cert in self.unpublished_child_certs(stale): - rpki.log.debug("Retrying publication for %s" % child_cert) - publisher.publish( - cls = rpki.publication.certificate_elt, - uri = child_cert.uri, - obj = child_cert.cert, - repository = repository, - handler = child_cert.published_callback) - - for roa in self.unpublished_roas(stale): - rpki.log.debug("Retrying publication for %s" % roa) - publisher.publish( - cls = rpki.publication.roa_elt, - uri = roa.uri, - obj = roa.roa, - repository = repository, - handler = roa.published_callback) - - for ghostbuster in self.unpublished_ghostbusters(stale): - rpki.log.debug("Retrying publication for %s" % ghostbuster) - publisher.publish( - cls = rpki.publication.ghostbuster_elt, - uri = ghostbuster.uri, - obj = ghostbuster.ghostbuster, - repository = repository, - handler = ghostbuster.published_callback) - -class child_cert_obj(rpki.sql.sql_persistent): - """ - Certificate that has been issued to a child. - """ - - sql_template = rpki.sql.template( - "child_cert", - "child_cert_id", - ("cert", rpki.x509.X509), - "child_id", - "ca_detail_id", - "ski", - ("published", rpki.sundial.datetime)) - - def __repr__(self): - args = [self] - try: - args.append(self.uri) - except: - pass - return rpki.log.log_repr(*args) - - def __init__(self, gctx = None, child_id = None, ca_detail_id = None, cert = None): - """ - Initialize a child_cert_obj. - """ - rpki.sql.sql_persistent.__init__(self) - self.gctx = gctx - self.child_id = child_id - self.ca_detail_id = ca_detail_id - self.cert = cert - self.published = None - if child_id or ca_detail_id or cert: - self.sql_mark_dirty() - - @property - @rpki.sql.cache_reference - def child(self): - """ - Fetch child object to which this child_cert object links. - """ - return rpki.left_right.child_elt.sql_fetch(self.gctx, self.child_id) - - @property - @rpki.sql.cache_reference - def ca_detail(self): - """ - Fetch ca_detail object to which this child_cert object links. - """ - return ca_detail_obj.sql_fetch(self.gctx, self.ca_detail_id) - - @ca_detail.deleter - def ca_detail(self): - try: - del self._ca_detail - except AttributeError: - pass - - @property - def uri_tail(self): - """ - Return the tail (filename) portion of the URI for this child_cert. - """ - return self.cert.gSKI() + ".cer" - - @property - def uri(self): - """ - Return the publication URI for this child_cert. - """ - return self.ca_detail.ca.sia_uri + self.uri_tail - - def revoke(self, publisher, generate_crl_and_manifest = True): - """ - Revoke a child cert. - """ - - ca_detail = self.ca_detail - ca = ca_detail.ca - rpki.log.debug("Revoking %r %r" % (self, self.uri)) - revoked_cert_obj.revoke(cert = self.cert, ca_detail = ca_detail) - publisher.withdraw( - cls = rpki.publication.certificate_elt, - uri = self.uri, - obj = self.cert, - repository = ca.parent.repository) - self.gctx.sql.sweep() - self.sql_delete() - if generate_crl_and_manifest: - ca_detail.generate_crl(publisher = publisher) - ca_detail.generate_manifest(publisher = publisher) - - def reissue(self, ca_detail, publisher, resources = None, sia = None, force = False): - """ - Reissue an existing child cert, reusing the public key. If the - child cert we would generate is identical to the one we already - have, we just return the one we already have. If we have to - revoke the old child cert when generating the new one, we have to - generate a new child_cert_obj, so calling code that needs the - updated child_cert_obj must use the return value from this method. - """ - - ca = ca_detail.ca - child = self.child - - old_resources = self.cert.get_3779resources() - old_sia = self.cert.get_SIA() - old_aia = self.cert.get_AIA()[0] - old_ca_detail = self.ca_detail - - needed = False - - if resources is None: - resources = old_resources - - if sia is None: - sia = old_sia - - assert resources.valid_until is not None and old_resources.valid_until is not None - - if resources.asn != old_resources.asn or resources.v4 != old_resources.v4 or resources.v6 != old_resources.v6: - rpki.log.debug("Resources changed for %r: old %s new %s" % (self, old_resources, resources)) - needed = True - - if resources.valid_until != old_resources.valid_until: - rpki.log.debug("Validity changed for %r: old %s new %s" % ( - self, old_resources.valid_until, resources.valid_until)) - needed = True - - if sia != old_sia: - rpki.log.debug("SIA changed for %r: old %r new %r" % (self, old_sia, sia)) - needed = True - - if ca_detail != old_ca_detail: - rpki.log.debug("Issuer changed for %r: old %r new %r" % (self, old_ca_detail, ca_detail)) - needed = True - - if ca_detail.ca_cert_uri != old_aia: - rpki.log.debug("AIA changed for %r: old %r new %r" % (self, old_aia, ca_detail.ca_cert_uri)) - needed = True - - must_revoke = old_resources.oversized(resources) or old_resources.valid_until > resources.valid_until - if must_revoke: - rpki.log.debug("Must revoke any existing cert(s) for %r" % self) - needed = True - - if not needed and force: - rpki.log.debug("No change needed for %r, forcing reissuance anyway" % self) - needed = True - - if not needed: - rpki.log.debug("No change to %r" % self) - return self - - if must_revoke: - for x in child.fetch_child_certs(ca_detail = ca_detail, ski = self.ski): - rpki.log.debug("Revoking child_cert %r" % x) - x.revoke(publisher = publisher) - ca_detail.generate_crl(publisher = publisher) - ca_detail.generate_manifest(publisher = publisher) - - child_cert = ca_detail.issue( - ca = ca, - child = child, - subject_key = self.cert.getPublicKey(), - sia = sia, - resources = resources, - child_cert = None if must_revoke else self, - publisher = publisher) - - rpki.log.debug("New child_cert %r uri %s" % (child_cert, child_cert.uri)) - - return child_cert - - @classmethod - def fetch(cls, gctx = None, child = None, ca_detail = None, ski = None, unique = False, unpublished = None): - """ - Fetch all child_cert objects matching a particular set of - parameters. This is a wrapper to consolidate various queries that - would otherwise be inline SQL WHERE expressions. In most cases - code calls this indirectly, through methods in other classes. - """ - - args = [] - where = [] - - if child: - where.append("child_id = %s") - args.append(child.child_id) - - if ca_detail: - where.append("ca_detail_id = %s") - args.append(ca_detail.ca_detail_id) - - if ski: - where.append("ski = %s") - args.append(ski) - - if unpublished is not None: - where.append("published IS NOT NULL AND published < %s") - args.append(unpublished) - - where = " AND ".join(where) - - gctx = gctx or (child and child.gctx) or (ca_detail and ca_detail.gctx) or None - - if unique: - return cls.sql_fetch_where1(gctx, where, args) - else: - return cls.sql_fetch_where(gctx, where, args) - - def published_callback(self, pdu): - """ - Publication callback: check result and mark published. - """ - pdu.raise_if_error() - self.published = None - self.sql_mark_dirty() - -class revoked_cert_obj(rpki.sql.sql_persistent): - """ - Tombstone for a revoked certificate. - """ - - sql_template = rpki.sql.template( - "revoked_cert", - "revoked_cert_id", - "serial", - "ca_detail_id", - ("revoked", rpki.sundial.datetime), - ("expires", rpki.sundial.datetime)) - - def __repr__(self): - return rpki.log.log_repr(self, repr(self.ca_detail), self.serial, self.revoked) - - def __init__(self, gctx = None, serial = None, revoked = None, expires = None, ca_detail_id = None): - """ - Initialize a revoked_cert_obj. - """ - rpki.sql.sql_persistent.__init__(self) - self.gctx = gctx - self.serial = serial - self.revoked = revoked - self.expires = expires - self.ca_detail_id = ca_detail_id - if serial or revoked or expires or ca_detail_id: - self.sql_mark_dirty() - - @property - @rpki.sql.cache_reference - def ca_detail(self): - """ - Fetch ca_detail object to which this revoked_cert_obj links. - """ - return ca_detail_obj.sql_fetch(self.gctx, self.ca_detail_id) - - @classmethod - def revoke(cls, cert, ca_detail): - """ - Revoke a certificate. - """ - return cls( - serial = cert.getSerial(), - expires = cert.getNotAfter(), - revoked = rpki.sundial.now(), - gctx = ca_detail.gctx, - ca_detail_id = ca_detail.ca_detail_id) - -class roa_obj(rpki.sql.sql_persistent): - """ - Route Origin Authorization. - """ - - sql_template = rpki.sql.template( - "roa", - "roa_id", - "ca_detail_id", - "self_id", - "asn", - ("roa", rpki.x509.ROA), - ("cert", rpki.x509.X509), - ("published", rpki.sundial.datetime)) - - ca_detail_id = None - cert = None - roa = None - published = None - - @property - @rpki.sql.cache_reference - def self(self): - """ - Fetch self object to which this roa_obj links. - """ - return rpki.left_right.self_elt.sql_fetch(self.gctx, self.self_id) - - @property - @rpki.sql.cache_reference - def ca_detail(self): - """ - Fetch ca_detail object to which this roa_obj links. - """ - return rpki.rpkid.ca_detail_obj.sql_fetch(self.gctx, self.ca_detail_id) - - @ca_detail.deleter - def ca_detail(self): - try: - del self._ca_detail - except AttributeError: - pass - - def sql_fetch_hook(self): - """ - Extra SQL fetch actions for roa_obj -- handle prefix lists. - """ - for version, datatype, attribute in ((4, rpki.resource_set.roa_prefix_set_ipv4, "ipv4"), - (6, rpki.resource_set.roa_prefix_set_ipv6, "ipv6")): - setattr(self, attribute, datatype.from_sql( - self.gctx.sql, - """ - SELECT prefix, prefixlen, max_prefixlen FROM roa_prefix - WHERE roa_id = %s AND version = %s - """, - (self.roa_id, version))) - - def sql_insert_hook(self): - """ - Extra SQL insert actions for roa_obj -- handle prefix lists. - """ - for version, prefix_set in ((4, self.ipv4), (6, self.ipv6)): - if prefix_set: - self.gctx.sql.executemany( - """ - INSERT roa_prefix (roa_id, prefix, prefixlen, max_prefixlen, version) - VALUES (%s, %s, %s, %s, %s) - """, - ((self.roa_id, x.prefix, x.prefixlen, x.max_prefixlen, version) - for x in prefix_set)) - - def sql_delete_hook(self): - """ - Extra SQL delete actions for roa_obj -- handle prefix lists. - """ - self.gctx.sql.execute("DELETE FROM roa_prefix WHERE roa_id = %s", (self.roa_id,)) - - def __repr__(self): - args = [self, self.asn, self.ipv4, self.ipv6] - try: - args.append(self.uri) - except: - pass - return rpki.log.log_repr(*args) - - def __init__(self, gctx = None, self_id = None, asn = None, ipv4 = None, ipv6 = None): - rpki.sql.sql_persistent.__init__(self) - self.gctx = gctx - self.self_id = self_id - self.asn = asn - self.ipv4 = ipv4 - self.ipv6 = ipv6 - - # Defer marking new ROA as dirty until .generate() has a chance to - # finish setup, otherwise we get SQL consistency errors. - # - #if self_id or asn or ipv4 or ipv6: self.sql_mark_dirty() - - def update(self, publisher, fast = False): - """ - Bring this roa_obj's ROA up to date if necesssary. - """ - - v4 = self.ipv4.to_resource_set() if self.ipv4 is not None else rpki.resource_set.resource_set_ipv4() - v6 = self.ipv6.to_resource_set() if self.ipv6 is not None else rpki.resource_set.resource_set_ipv6() - - if self.roa is None: - rpki.log.debug("%r doesn't exist, generating" % self) - return self.generate(publisher = publisher, fast = fast) - - ca_detail = self.ca_detail - - if ca_detail is None: - rpki.log.debug("%r has no associated ca_detail, generating" % self) - return self.generate(publisher = publisher, fast = fast) - - if ca_detail.state != "active": - rpki.log.debug("ca_detail associated with %r not active (state %s), regenerating" % (self, ca_detail.state)) - return self.regenerate(publisher = publisher, fast = fast) - - regen_time = self.cert.getNotAfter() - rpki.sundial.timedelta(seconds = self.self.regen_margin) - - if rpki.sundial.now() > regen_time: - rpki.log.debug("%r past threshold %s, regenerating" % (self, regen_time)) - return self.regenerate(publisher = publisher, fast = fast) - - ca_resources = ca_detail.latest_ca_cert.get_3779resources() - ee_resources = self.cert.get_3779resources() - - if ee_resources.oversized(ca_resources): - rpki.log.debug("%r oversized with respect to CA, regenerating" % self) - return self.regenerate(publisher = publisher, fast = fast) - - if ee_resources.v4 != v4 or ee_resources.v6 != v6: - rpki.log.debug("%r resources do not match EE, regenerating" % self) - return self.regenerate(publisher = publisher, fast = fast) - - if self.cert.get_AIA()[0] != ca_detail.ca_cert_uri: - rpki.log.debug("%r AIA changed, regenerating" % self) - return self.regenerate(publisher = publisher, fast = fast) - - def generate(self, publisher, fast = False): - """ - Generate a ROA. - - At present we have no way of performing a direct lookup from a - desired set of resources to a covering certificate, so we have to - search. This could be quite slow if we have a lot of active - ca_detail objects. Punt on the issue for now, revisit if - profiling shows this as a hotspot. - - Once we have the right covering certificate, we generate the ROA - payload, generate a new EE certificate, use the EE certificate to - sign the ROA payload, publish the result, then throw away the - private key for the EE cert, all per the ROA specification. This - implies that generating a lot of ROAs will tend to thrash - /dev/random, but there is not much we can do about that. - - If fast is set, we leave generating the new manifest for our - caller to handle, presumably at the end of a bulk operation. - """ - - if self.ipv4 is None and self.ipv6 is None: - raise rpki.exceptions.EmptyROAPrefixList - - # Ugly and expensive search for covering ca_detail, there has to - # be a better way, but it would require the ability to test for - # resource subsets in SQL. - - v4 = self.ipv4.to_resource_set() if self.ipv4 is not None else rpki.resource_set.resource_set_ipv4() - v6 = self.ipv6.to_resource_set() if self.ipv6 is not None else rpki.resource_set.resource_set_ipv6() - - ca_detail = self.ca_detail - if ca_detail is None or ca_detail.state != "active" or ca_detail.has_expired(): - rpki.log.debug("Searching for new ca_detail for ROA %r" % self) - ca_detail = None - for parent in self.self.parents: - for ca in parent.cas: - ca_detail = ca.active_ca_detail - assert ca_detail is None or ca_detail.state == "active" - if ca_detail is not None and not ca_detail.has_expired(): - resources = ca_detail.latest_ca_cert.get_3779resources() - if v4.issubset(resources.v4) and v6.issubset(resources.v6): - break - ca_detail = None - if ca_detail is not None: - break - else: - rpki.log.debug("Keeping old ca_detail for ROA %r" % self) - - if ca_detail is None: - raise rpki.exceptions.NoCoveringCertForROA, "Could not find a certificate covering %r" % self - - rpki.log.debug("Using new ca_detail %r for ROA %r, ca_detail_state %s" % ( - ca_detail, self, ca_detail.state)) - - ca = ca_detail.ca - resources = rpki.resource_set.resource_bag(v4 = v4, v6 = v6) - keypair = rpki.x509.RSA.generate() - - del self.ca_detail - self.ca_detail_id = ca_detail.ca_detail_id - self.cert = ca_detail.issue_ee( - ca = ca, - resources = resources, - subject_key = keypair.get_public(), - sia = (None, None, self.uri_from_key(keypair))) - self.roa = rpki.x509.ROA.build(self.asn, self.ipv4, self.ipv6, keypair, (self.cert,)) - self.published = rpki.sundial.now() - self.sql_store() - - rpki.log.debug("Generating %r URI %s" % (self, self.uri)) - publisher.publish( - cls = rpki.publication.roa_elt, - uri = self.uri, - obj = self.roa, - repository = ca.parent.repository, - handler = self.published_callback) - if not fast: - ca_detail.generate_manifest(publisher = publisher) - - - def published_callback(self, pdu): - """ - Check publication result. - """ - pdu.raise_if_error() - self.published = None - self.sql_mark_dirty() - - def revoke(self, publisher, regenerate = False, allow_failure = False, fast = False): - """ - Withdraw ROA associated with this roa_obj. - - In order to preserve make-before-break properties without - duplicating code, this method also handles generating a - replacement ROA when requested. - - If allow_failure is set, failing to withdraw the ROA will not be - considered an error. - - If fast is set, SQL actions will be deferred, on the assumption - that our caller will handle regenerating CRL and manifest and - flushing the SQL cache. - """ - - ca_detail = self.ca_detail - cert = self.cert - roa = self.roa - uri = self.uri - - rpki.log.debug("%s %r, ca_detail %r state is %s" % ( - "Regenerating" if regenerate else "Not regenerating", - self, ca_detail, ca_detail.state)) - - if regenerate: - self.generate(publisher = publisher, fast = fast) - - rpki.log.debug("Withdrawing %r %s and revoking its EE cert" % (self, uri)) - rpki.rpkid.revoked_cert_obj.revoke(cert = cert, ca_detail = ca_detail) - publisher.withdraw(cls = rpki.publication.roa_elt, uri = uri, obj = roa, - repository = ca_detail.ca.parent.repository, - handler = False if allow_failure else None) - - if not regenerate: - self.sql_mark_deleted() - - if not fast: - ca_detail.generate_crl(publisher = publisher) - ca_detail.generate_manifest(publisher = publisher) - self.gctx.sql.sweep() - - def regenerate(self, publisher, fast = False): - """ - Reissue ROA associated with this roa_obj. - """ - if self.ca_detail is None: - self.generate(publisher = publisher, fast = fast) - else: - self.revoke(publisher = publisher, regenerate = True, fast = fast) - - def uri_from_key(self, key): - """ - Return publication URI for a public key. - """ - return self.ca_detail.ca.sia_uri + key.gSKI() + ".roa" - - @property - def uri(self): - """ - Return the publication URI for this roa_obj's ROA. - """ - return self.ca_detail.ca.sia_uri + self.uri_tail - - @property - def uri_tail(self): - """ - Return the tail (filename portion) of the publication URI for this - roa_obj's ROA. - """ - return self.cert.gSKI() + ".roa" - - -class ghostbuster_obj(rpki.sql.sql_persistent): - """ - Ghostbusters record. - """ - - sql_template = rpki.sql.template( - "ghostbuster", - "ghostbuster_id", - "ca_detail_id", - "self_id", - "vcard", - ("ghostbuster", rpki.x509.Ghostbuster), - ("cert", rpki.x509.X509), - ("published", rpki.sundial.datetime)) - - ca_detail_id = None - cert = None - ghostbuster = None - published = None - vcard = None - - def __repr__(self): - args = [self] - try: - args.extend(self.vcard.splitlines()[2:-1]) - except: - pass - try: - args.append(self.uri) - except: - pass - return rpki.log.log_repr(*args) - - @property - @rpki.sql.cache_reference - def self(self): - """ - Fetch self object to which this ghostbuster_obj links. - """ - return rpki.left_right.self_elt.sql_fetch(self.gctx, self.self_id) - - @property - @rpki.sql.cache_reference - def ca_detail(self): - """ - Fetch ca_detail object to which this ghostbuster_obj links. - """ - return rpki.rpkid.ca_detail_obj.sql_fetch(self.gctx, self.ca_detail_id) - - def __init__(self, gctx = None, self_id = None, ca_detail_id = None, vcard = None): - rpki.sql.sql_persistent.__init__(self) - self.gctx = gctx - self.self_id = self_id - self.ca_detail_id = ca_detail_id - self.vcard = vcard - - # Defer marking new ghostbuster as dirty until .generate() has a chance to - # finish setup, otherwise we get SQL consistency errors. - - def update(self, publisher, fast = False): - """ - Bring this ghostbuster_obj up to date if necesssary. - """ - - if self.ghostbuster is None: - rpki.log.debug("Ghostbuster record doesn't exist, generating") - return self.generate(publisher = publisher, fast = fast) - - regen_time = self.cert.getNotAfter() - rpki.sundial.timedelta(seconds = self.self.regen_margin) - - if rpki.sundial.now() > regen_time: - rpki.log.debug("%r past threshold %s, regenerating" % (self, regen_time)) - return self.regenerate(publisher = publisher, fast = fast) - - if self.cert.get_AIA()[0] != self.ca_detail.ca_cert_uri: - rpki.log.debug("%r AIA changed, regenerating" % self) - return self.regenerate(publisher = publisher, fast = fast) - - def generate(self, publisher, fast = False): - """ - Generate a Ghostbuster record - - Once we have the right covering certificate, we generate the - ghostbuster payload, generate a new EE certificate, use the EE - certificate to sign the ghostbuster payload, publish the result, - then throw away the private key for the EE cert. This is modeled - after the way we handle ROAs. - - If fast is set, we leave generating the new manifest for our - caller to handle, presumably at the end of a bulk operation. - """ - - ca_detail = self.ca_detail - ca = ca_detail.ca - - resources = rpki.resource_set.resource_bag.from_inheritance() - keypair = rpki.x509.RSA.generate() - - self.cert = ca_detail.issue_ee( - ca = ca, - resources = resources, - subject_key = keypair.get_public(), - sia = (None, None, self.uri_from_key(keypair))) - self.ghostbuster = rpki.x509.Ghostbuster.build(self.vcard, keypair, (self.cert,)) - self.published = rpki.sundial.now() - self.sql_store() - - rpki.log.debug("Generating Ghostbuster record %r" % self.uri) - publisher.publish( - cls = rpki.publication.ghostbuster_elt, - uri = self.uri, - obj = self.ghostbuster, - repository = ca.parent.repository, - handler = self.published_callback) - if not fast: - ca_detail.generate_manifest(publisher = publisher) - - def published_callback(self, pdu): - """ - Check publication result. - """ - pdu.raise_if_error() - self.published = None - self.sql_mark_dirty() - - def revoke(self, publisher, regenerate = False, allow_failure = False, fast = False): - """ - Withdraw Ghostbuster associated with this ghostbuster_obj. - - In order to preserve make-before-break properties without - duplicating code, this method also handles generating a - replacement ghostbuster when requested. - - If allow_failure is set, failing to withdraw the ghostbuster will not be - considered an error. - - If fast is set, SQL actions will be deferred, on the assumption - that our caller will handle regenerating CRL and manifest and - flushing the SQL cache. - """ - - ca_detail = self.ca_detail - cert = self.cert - ghostbuster = self.ghostbuster - uri = self.uri - - rpki.log.debug("%s %r, ca_detail %r state is %s" % ( - "Regenerating" if regenerate else "Not regenerating", - self, ca_detail, ca_detail.state)) - - if regenerate: - self.generate(publisher = publisher, fast = fast) - - rpki.log.debug("Withdrawing %r %s and revoking its EE cert" % (self, uri)) - rpki.rpkid.revoked_cert_obj.revoke(cert = cert, ca_detail = ca_detail) - publisher.withdraw(cls = rpki.publication.ghostbuster_elt, uri = uri, obj = ghostbuster, - repository = ca_detail.ca.parent.repository, - handler = False if allow_failure else None) - - if not regenerate: - self.sql_mark_deleted() - - if not fast: - ca_detail.generate_crl(publisher = publisher) - ca_detail.generate_manifest(publisher = publisher) - self.gctx.sql.sweep() - - def regenerate(self, publisher, fast = False): - """ - Reissue Ghostbuster associated with this ghostbuster_obj. - """ - if self.ghostbuster is None: - self.generate(publisher = publisher, fast = fast) - else: - self.revoke(publisher = publisher, regenerate = True, fast = fast) - - def uri_from_key(self, key): - """ - Return publication URI for a public key. - """ - return self.ca_detail.ca.sia_uri + key.gSKI() + ".gbr" - - @property - def uri(self): - """ - Return the publication URI for this ghostbuster_obj's ghostbuster. - """ - return self.ca_detail.ca.sia_uri + self.uri_tail - - @property - def uri_tail(self): - """ - Return the tail (filename portion) of the publication URI for this - ghostbuster_obj's ghostbuster. - """ - return self.cert.gSKI() + ".gbr" - - -class ee_cert_obj(rpki.sql.sql_persistent): - """ - EE certificate (router certificate or generic). - """ - - sql_template = rpki.sql.template( - "ee_cert", - "ee_cert_id", - "self_id", - "ca_detail_id", - "ski", - ("cert", rpki.x509.X509), - ("published", rpki.sundial.datetime)) - - def __repr__(self): - return rpki.log.log_repr(self, self.cert.getSubject(), self.uri) - - def __init__(self, gctx = None, self_id = None, ca_detail_id = None, cert = None): - rpki.sql.sql_persistent.__init__(self) - self.gctx = gctx - self.self_id = self_id - self.ca_detail_id = ca_detail_id - self.cert = cert - self.ski = None if cert is None else cert.get_SKI() - self.published = None - if self_id or ca_detail_id or cert: - self.sql_mark_dirty() - - @property - @rpki.sql.cache_reference - def self(self): - """ - Fetch self object to which this ee_cert_obj links. - """ - return rpki.left_right.self_elt.sql_fetch(self.gctx, self.self_id) - - @property - @rpki.sql.cache_reference - def ca_detail(self): - """ - Fetch ca_detail object to which this ee_cert_obj links. - """ - return rpki.rpkid.ca_detail_obj.sql_fetch(self.gctx, self.ca_detail_id) - - @ca_detail.deleter - def ca_detail(self): - try: - del self._ca_detail - except AttributeError: - pass - - @property - def gski(self): - """ - Calculate g(SKI), for ease of comparison with XML. - - Although, really, one has to ask why we don't just store g(SKI) - in rpkid.sql instead of ski.... - """ - return base64.urlsafe_b64encode(self.ski).rstrip("=") - - @gski.setter - def gski(self, val): - self.ski = base64.urlsafe_b64decode(s + ("=" * ((4 - len(s)) % 4))) - - @property - def uri(self): - """ - Return the publication URI for this ee_cert_obj. - """ - return self.ca_detail.ca.sia_uri + self.uri_tail - - @property - def uri_tail(self): - """ - Return the tail (filename portion) of the publication URI for this - ee_cert_obj. - """ - return self.cert.gSKI() + ".cer" - - @classmethod - def create(cls, ca_detail, subject_name, subject_key, resources, publisher, eku = None): - """ - Generate a new certificate and stuff it in a new ee_cert_obj. - """ - - cn, sn = subject_name.extract_cn_and_sn() - ca = ca_detail.ca - - cert = ca_detail.issue_ee( - ca = ca, - subject_key = subject_key, - sia = None, - resources = resources, - notAfter = resources.valid_until, - cn = cn, - sn = sn, - eku = eku) - - self = cls( - gctx = ca_detail.gctx, - self_id = ca.parent.self.self_id, - ca_detail_id = ca_detail.ca_detail_id, - cert = cert) - - publisher.publish( - cls = rpki.publication.certificate_elt, - uri = self.uri, - obj = self.cert, - repository = ca.parent.repository, - handler = self.published_callback) - - self.sql_store() - - ca_detail.generate_manifest(publisher = publisher) - - rpki.log.debug("New ee_cert %r" % self) - - return self - - def revoke(self, publisher, generate_crl_and_manifest = True): - """ - Revoke and withdraw an EE certificate. - """ - - ca_detail = self.ca_detail - ca = ca_detail.ca - rpki.log.debug("Revoking %r %r" % (self, self.uri)) - revoked_cert_obj.revoke(cert = self.cert, ca_detail = ca_detail) - publisher.withdraw(cls = rpki.publication.certificate_elt, - uri = self.uri, - obj = self.cert, - repository = ca.parent.repository) - self.gctx.sql.sweep() - self.sql_delete() - if generate_crl_and_manifest: - ca_detail.generate_crl(publisher = publisher) - ca_detail.generate_manifest(publisher = publisher) - - def reissue(self, publisher, ca_detail = None, resources = None, force = False): - """ - Reissue an existing EE cert, reusing the public key. If the EE - cert we would generate is identical to the one we already have, we - just return; if we need to reissue, we reuse this ee_cert_obj and - just update its contents, as the publication URI will not have - changed. - """ - - needed = False - - old_cert = self.cert - - old_ca_detail = self.ca_detail - if ca_detail is None: - ca_detail = old_ca_detail - - assert ca_detail.ca is old_ca_detail.ca - - old_resources = old_cert.get_3779resources() - if resources is None: - resources = old_resources - - assert resources.valid_until is not None and old_resources.valid_until is not None - - assert ca_detail.covers(resources) - - if ca_detail != self.ca_detail: - rpki.log.debug("ca_detail changed for %r: old %r new %r" % ( - self, self.ca_detail, ca_detail)) - needed = True - - if ca_detail.ca_cert_uri != old_cert.get_AIA()[0]: - rpki.log.debug("AIA changed for %r: old %s new %s" % ( - self, old_cert.get_AIA()[0], ca_detail.ca_cert_uri)) - needed = True - - if resources.valid_until != old_resources.valid_until: - rpki.log.debug("Validity changed for %r: old %s new %s" % ( - self, old_resources.valid_until, resources.valid_until)) - needed = True - - if resources.asn != old_resources.asn or resources.v4 != old_resources.v4 or resources.v6 != old_resources.v6: - rpki.log.debug("Resources changed for %r: old %s new %s" % ( - self, old_resources, resources)) - needed = True - - must_revoke = (old_resources.oversized(resources) or - old_resources.valid_until > resources.valid_until) - if must_revoke: - rpki.log.debug("Must revoke existing cert(s) for %r" % self) - needed = True - - if not needed and force: - rpki.log.debug("No change needed for %r, forcing reissuance anyway" % self) - needed = True - - if not needed: - rpki.log.debug("No change to %r" % self) - return - - cn, sn = self.cert.getSubject().extract_cn_and_sn() - - self.cert = ca_detail.issue_ee( - ca = ca_detail.ca, - subject_key = self.cert.getPublicKey(), - eku = self.cert.get_EKU(), - sia = None, - resources = resources, - notAfter = resources.valid_until, - cn = cn, - sn = sn) - - self.sql_mark_dirty() - - publisher.publish( - cls = rpki.publication.certificate_elt, - uri = self.uri, - obj = self.cert, - repository = ca_detail.ca.parent.repository, - handler = self.published_callback) - - if must_revoke: - revoked_cert_obj.revoke(cert = old_cert.cert, ca_detail = old_ca_detail) - - self.gctx.sql.sweep() - - if must_revoke: - ca_detail.generate_crl(publisher = publisher) - self.gctx.sql.sweep() - - ca_detail.generate_manifest(publisher = publisher) - - def published_callback(self, pdu): - """ - Publication callback: check result and mark published. - """ - pdu.raise_if_error() - self.published = None - self.sql_mark_dirty() - - -class publication_queue(object): - """ - Utility to simplify publication from within rpkid. - - General idea here is to accumulate a collection of objects to be - published, in one or more repositories, each potentially with its - own completion callback. Eventually we want to publish everything - we've accumulated, at which point we need to iterate over the - collection and do repository.call_pubd() for each repository. - """ - - replace = True - - def __init__(self): - self.clear() - - def clear(self): - self.repositories = {} - self.msgs = {} - self.handlers = {} - if self.replace: - self.uris = {} - - def _add(self, uri, obj, repository, handler, make_pdu): - rid = id(repository) - if rid not in self.repositories: - self.repositories[rid] = repository - self.msgs[rid] = rpki.publication.msg.query() - if self.replace and uri in self.uris: - rpki.log.debug("Removing publication duplicate <%s %r %r>" % (self.uris[uri].action, self.uris[uri].uri, self.uris[uri].payload)) - self.msgs[rid].remove(self.uris.pop(uri)) - pdu = make_pdu(uri = uri, obj = obj) - if handler is not None: - self.handlers[id(pdu)] = handler - pdu.tag = id(pdu) - self.msgs[rid].append(pdu) - if self.replace: - self.uris[uri] = pdu - - def publish(self, cls, uri, obj, repository, handler = None): - return self._add( uri, obj, repository, handler, cls.make_publish) - - def withdraw(self, cls, uri, obj, repository, handler = None): - return self._add( uri, obj, repository, handler, cls.make_withdraw) - - def call_pubd(self, cb, eb): - def loop(iterator, rid): - rpki.log.debug("Calling pubd[%r]" % self.repositories[rid]) - self.repositories[rid].call_pubd(iterator, eb, self.msgs[rid], self.handlers) - def done(): - self.clear() - cb() - rpki.async.iterator(self.repositories, loop, done) - - @property - def size(self): - return sum(len(self.msgs[rid]) for rid in self.repositories) - - def empty(self): - assert (not self.msgs) == (self.size == 0) - return not self.msgs diff --git a/rpkid/rpki/rpkid_tasks.py b/rpkid/rpki/rpkid_tasks.py deleted file mode 100644 index 04e1c0df..00000000 --- a/rpkid/rpki/rpkid_tasks.py +++ /dev/null @@ -1,750 +0,0 @@ -# $Id$ -# -# Copyright (C) 2014 Dragon Research Labs ("DRL") -# Portions copyright (C) 2012--2013 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notices and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND DRL AND ISC DISCLAIM ALL -# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED -# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL OR -# ISC BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL -# DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA -# OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER -# TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -""" -rpkid task objects. Split out from rpki.left_right and rpki.rpkid -because interactions with rpkid scheduler were getting too complicated. -""" - -import rpki.log -import rpki.rpkid -import rpki.async -import rpki.up_down -import rpki.sundial -import rpki.publication -import rpki.exceptions - -task_classes = () - -def queue_task(cls): - """ - Class decorator to add a new task class to task_classes. - """ - - global task_classes - task_classes += (cls,) - return cls - - -class CompletionHandler(object): - """ - Track one or more scheduled rpkid tasks and execute a callback when - the last of them terminates. - """ - - ## @var debug - # Debug logging. - - debug = False - - def __init__(self, cb): - self.cb = cb - self.tasks = set() - - def register(self, task): - if self.debug: - rpki.log.debug("Completion handler %r registering task %r" % (self, task)) - self.tasks.add(task) - task.register_completion(self.done) - - def done(self, task): - try: - self.tasks.remove(task) - except KeyError: - rpki.log.warn("Completion handler %r called with unregistered task %r, blundering onwards" % (self, task)) - else: - if self.debug: - rpki.log.debug("Completion handler %r called with registered task %r" % (self, task)) - if not self.tasks: - if self.debug: - rpki.log.debug("Completion handler %r finished, calling %r" % (self, self.cb)) - self.cb() - - @property - def count(self): - return len(self.tasks) - - -class AbstractTask(object): - """ - Abstract base class for rpkid scheduler task objects. This just - handles the scheduler hooks, real work starts in self.start. - - NB: This assumes that the rpki.rpkid.rpkid.task_* methods have been - rewritten to expect instances of subclasses of this class, rather - than expecting thunks to be wrapped up in the older version of this - class. Rewrite, rewrite, remove this comment when done, OK! - """ - - ## @var timeslice - # How long before a task really should consider yielding the CPU to - # let something else run. - - timeslice = rpki.sundial.timedelta(seconds = 15) - - def __init__(self, s, description = None): - self.self = s - self.description = description - self.completions = [] - self.continuation = None - self.due_date = None - self.clear() - - def __repr__(self): - return rpki.log.log_repr(self, self.description) - - def register_completion(self, completion): - self.completions.append(completion) - - def exit(self): - while self.completions: - self.completions.pop(0)(self) - self.clear() - self.due_date = None - self.self.gctx.task_next() - - def postpone(self, continuation): - self.continuation = continuation - self.due_date = None - self.self.gctx.task_add(self) - self.self.gctx.task_next() - - def __call__(self): - self.due_date = rpki.sundial.now() + self.timeslice - if self.continuation is None: - rpki.log.debug("Running task %r" % self) - self.clear() - self.start() - else: - rpki.log.debug("Restarting task %r at %r" % (self, self.continuation)) - continuation = self.continuation - self.continuation = None - continuation() - - @property - def overdue(self): - return rpki.sundial.now() > self.due_date - - def __getattr__(self, name): - return getattr(self.self, name) - - def start(self): - raise NotImplementedError - - def clear(self): - pass - - -@queue_task -class PollParentTask(AbstractTask): - """ - Run the regular client poll cycle with each of this self's - parents, in turn. - """ - - def clear(self): - self.parent_iterator = None - self.parent = None - self.ca_map = None - self.class_iterator = None - - def start(self): - rpki.log.trace() - self.gctx.checkpoint() - rpki.log.debug("Self %s[%d] polling parents" % (self.self_handle, self.self_id)) - rpki.async.iterator(self.parents, self.parent_loop, self.exit) - - def parent_loop(self, parent_iterator, parent): - self.parent_iterator = parent_iterator - self.parent = parent - rpki.up_down.list_pdu.query(parent, self.got_list, self.list_failed) - - def got_list(self, r_msg): - self.ca_map = dict((ca.parent_resource_class, ca) for ca in self.parent.cas) - self.gctx.checkpoint() - rpki.async.iterator(r_msg.payload.classes, self.class_loop, self.class_done) - - def list_failed(self, e): - rpki.log.traceback() - rpki.log.warn("Couldn't get resource class list from parent %r, skipping: %s (%r)" % ( - self.parent, e, e)) - self.parent_iterator() - - def class_loop(self, class_iterator, rc): - self.gctx.checkpoint() - self.class_iterator = class_iterator - try: - ca = self.ca_map.pop(rc.class_name) - except KeyError: - rpki.rpkid.ca_obj.create(self.parent, rc, class_iterator, self.class_create_failed) - else: - ca.check_for_updates(self.parent, rc, class_iterator, self.class_update_failed) - - def class_update_failed(self, e): - rpki.log.traceback() - rpki.log.warn("Couldn't update class, skipping: %s" % e) - self.class_iterator() - - def class_create_failed(self, e): - rpki.log.traceback() - rpki.log.warn("Couldn't create class, skipping: %s" % e) - self.class_iterator() - - def class_done(self): - rpki.async.iterator(self.ca_map.values(), self.ca_loop, self.ca_done) - - def ca_loop(self, iterator, ca): - self.gctx.checkpoint() - ca.delete(self.parent, iterator) - - def ca_done(self): - self.gctx.checkpoint() - self.gctx.sql.sweep() - self.parent_iterator() - - -@queue_task -class UpdateChildrenTask(AbstractTask): - """ - Check for updated IRDB data for all of this self's children and - issue new certs as necessary. Must handle changes both in - resources and in expiration date. - """ - - def clear(self): - self.now = None - self.rsn = None - self.publisher = None - self.iterator = None - self.child = None - self.child_certs = None - - def start(self): - rpki.log.trace() - self.gctx.checkpoint() - rpki.log.debug("Self %s[%d] updating children" % (self.self_handle, self.self_id)) - self.now = rpki.sundial.now() - self.rsn = self.now + rpki.sundial.timedelta(seconds = self.regen_margin) - self.publisher = rpki.rpkid.publication_queue() - rpki.async.iterator(self.children, self.loop, self.done) - - def loop(self, iterator, child): - self.gctx.checkpoint() - self.gctx.sql.sweep() - self.iterator = iterator - self.child = child - self.child_certs = child.child_certs - if self.overdue: - self.publisher.call_pubd(lambda: self.postpone(self.do_child), self.publication_failed) - else: - self.do_child() - - def do_child(self): - if self.child_certs: - self.gctx.irdb_query_child_resources(self.child.self.self_handle, self.child.child_handle, - self.got_resources, self.lose) - else: - self.iterator() - - def lose(self, e): - rpki.log.traceback() - rpki.log.warn("Couldn't update child %r, skipping: %s" % (self.child, e)) - self.iterator() - - def got_resources(self, irdb_resources): - try: - for child_cert in self.child_certs: - ca_detail = child_cert.ca_detail - ca = ca_detail.ca - if ca_detail.state == "active": - old_resources = child_cert.cert.get_3779resources() - new_resources = old_resources & irdb_resources & ca_detail.latest_ca_cert.get_3779resources() - old_aia = child_cert.cert.get_AIA()[0] - new_aia = ca_detail.ca_cert_uri - - if new_resources.empty(): - rpki.log.debug("Resources shrank to the null set, " - "revoking and withdrawing child %s certificate SKI %s" % ( - self.child.child_handle, child_cert.cert.gSKI())) - child_cert.revoke(publisher = self.publisher) - ca_detail.generate_crl(publisher = self.publisher) - ca_detail.generate_manifest(publisher = self.publisher) - - elif (old_resources != new_resources or - old_aia != new_aia or - (old_resources.valid_until < self.rsn and - irdb_resources.valid_until > self.now and - old_resources.valid_until != irdb_resources.valid_until)): - - rpki.log.debug("Need to reissue child %s certificate SKI %s" % ( - self.child.child_handle, child_cert.cert.gSKI())) - if old_resources != new_resources: - rpki.log.debug("Child %s SKI %s resources changed: old %s new %s" % ( - self.child.child_handle, child_cert.cert.gSKI(), old_resources, new_resources)) - if old_resources.valid_until != irdb_resources.valid_until: - rpki.log.debug("Child %s SKI %s validity changed: old %s new %s" % ( - self.child.child_handle, child_cert.cert.gSKI(), - old_resources.valid_until, irdb_resources.valid_until)) - - new_resources.valid_until = irdb_resources.valid_until - child_cert.reissue( - ca_detail = ca_detail, - resources = new_resources, - publisher = self.publisher) - - elif old_resources.valid_until < self.now: - rpki.log.debug("Child %s certificate SKI %s has expired: cert.valid_until %s, irdb.valid_until %s" - % (self.child.child_handle, child_cert.cert.gSKI(), - old_resources.valid_until, irdb_resources.valid_until)) - child_cert.sql_delete() - self.publisher.withdraw( - cls = rpki.publication.certificate_elt, - uri = child_cert.uri, - obj = child_cert.cert, - repository = ca.parent.repository) - ca_detail.generate_manifest(publisher = self.publisher) - - except (SystemExit, rpki.async.ExitNow): - raise - except Exception, e: - self.gctx.checkpoint() - self.lose(e) - else: - self.gctx.checkpoint() - self.gctx.sql.sweep() - self.iterator() - - def done(self): - self.gctx.checkpoint() - self.gctx.sql.sweep() - self.publisher.call_pubd(self.exit, self.publication_failed) - - def publication_failed(self, e): - rpki.log.traceback() - rpki.log.warn("Couldn't publish for %s, skipping: %s" % (self.self_handle, e)) - self.gctx.checkpoint() - self.exit() - - -@queue_task -class UpdateROAsTask(AbstractTask): - """ - Generate or update ROAs for this self. - """ - - def clear(self): - self.orphans = None - self.updates = None - self.publisher = None - self.ca_details = None - self.count = None - - def start(self): - rpki.log.trace() - self.gctx.checkpoint() - self.gctx.sql.sweep() - rpki.log.debug("Self %s[%d] updating ROAs" % (self.self_handle, self.self_id)) - - rpki.log.debug("Issuing query for ROA requests") - self.gctx.irdb_query_roa_requests(self.self_handle, self.got_roa_requests, self.roa_requests_failed) - - def got_roa_requests(self, roa_requests): - self.gctx.checkpoint() - rpki.log.debug("Received response to query for ROA requests") - - if self.gctx.sql.dirty: - rpki.log.warn("Unexpected dirty SQL cache, flushing") - self.gctx.sql.sweep() - - roas = {} - seen = set() - self.orphans = [] - self.updates = [] - self.publisher = rpki.rpkid.publication_queue() - self.ca_details = set() - - for roa in self.roas: - k = (roa.asn, str(roa.ipv4), str(roa.ipv6)) - if k not in roas: - roas[k] = roa - elif (roa.roa is not None and roa.cert is not None and roa.ca_detail is not None and roa.ca_detail.state == "active" and - (roas[k].roa is None or roas[k].cert is None or roas[k].ca_detail is None or roas[k].ca_detail.state != "active")): - self.orphans.append(roas[k]) - roas[k] = roa - else: - self.orphans.append(roa) - - for roa_request in roa_requests: - k = (roa_request.asn, str(roa_request.ipv4), str(roa_request.ipv6)) - if k in seen: - rpki.log.warn("Skipping duplicate ROA request %r" % roa_request) - else: - seen.add(k) - roa = roas.pop(k, None) - if roa is None: - roa = rpki.rpkid.roa_obj(self.gctx, self.self_id, roa_request.asn, roa_request.ipv4, roa_request.ipv6) - rpki.log.debug("Created new %r" % roa) - else: - rpki.log.debug("Found existing %r" % roa) - self.updates.append(roa) - - self.orphans.extend(roas.itervalues()) - - if self.overdue: - self.postpone(self.begin_loop) - else: - self.begin_loop() - - def begin_loop(self): - self.count = 0 - rpki.async.iterator(self.updates, self.loop, self.done, pop_list = True) - - def loop(self, iterator, roa): - self.gctx.checkpoint() - try: - roa.update(publisher = self.publisher, fast = True) - self.ca_details.add(roa.ca_detail) - self.gctx.sql.sweep() - except (SystemExit, rpki.async.ExitNow): - raise - except rpki.exceptions.NoCoveringCertForROA: - rpki.log.warn("No covering certificate for %r, skipping" % roa) - except Exception, e: - rpki.log.traceback() - rpki.log.warn("Could not update %r, skipping: %s" % (roa, e)) - self.count += 1 - if self.overdue: - self.publish(lambda: self.postpone(iterator)) - else: - iterator() - - def publish(self, done): - if not self.publisher.empty(): - for ca_detail in self.ca_details: - rpki.log.debug("Generating new CRL for %r" % ca_detail) - ca_detail.generate_crl(publisher = self.publisher) - rpki.log.debug("Generating new manifest for %r" % ca_detail) - ca_detail.generate_manifest(publisher = self.publisher) - self.ca_details.clear() - self.gctx.sql.sweep() - self.gctx.checkpoint() - self.publisher.call_pubd(done, self.publication_failed) - - def publication_failed(self, e): - rpki.log.traceback() - rpki.log.warn("Couldn't publish for %s, skipping: %s" % (self.self_handle, e)) - self.gctx.checkpoint() - self.exit() - - def done(self): - for roa in self.orphans: - try: - self.ca_details.add(roa.ca_detail) - roa.revoke(publisher = self.publisher, fast = True) - except (SystemExit, rpki.async.ExitNow): - raise - except Exception, e: - rpki.log.traceback() - rpki.log.warn("Could not revoke %r: %s" % (roa, e)) - self.gctx.sql.sweep() - self.gctx.checkpoint() - self.publish(self.exit) - - def roa_requests_failed(self, e): - rpki.log.traceback() - rpki.log.warn("Could not fetch ROA requests for %s, skipping: %s" % (self.self_handle, e)) - self.exit() - - -@queue_task -class UpdateGhostbustersTask(AbstractTask): - """ - Generate or update Ghostbuster records for this self. - - This was originally based on the ROA update code. It's possible - that both could benefit from refactoring, but at this point the - potential scaling issues for ROAs completely dominate structure of - the ROA code, and aren't relevant here unless someone is being - exceptionally silly. - """ - - def start(self): - rpki.log.trace() - self.gctx.checkpoint() - rpki.log.debug("Self %s[%d] updating Ghostbuster records" % (self.self_handle, self.self_id)) - - self.gctx.irdb_query_ghostbuster_requests(self.self_handle, - (p.parent_handle for p in self.parents), - self.got_ghostbuster_requests, - self.ghostbuster_requests_failed) - - def got_ghostbuster_requests(self, ghostbuster_requests): - - try: - self.gctx.checkpoint() - if self.gctx.sql.dirty: - rpki.log.warn("Unexpected dirty SQL cache, flushing") - self.gctx.sql.sweep() - - ghostbusters = {} - orphans = [] - publisher = rpki.rpkid.publication_queue() - ca_details = set() - seen = set() - - parents = dict((p.parent_handle, p) for p in self.parents) - - for ghostbuster in self.ghostbusters: - k = (ghostbuster.ca_detail_id, ghostbuster.vcard) - if ghostbuster.ca_detail.state != "active" or k in ghostbusters: - orphans.append(ghostbuster) - else: - ghostbusters[k] = ghostbuster - - for ghostbuster_request in ghostbuster_requests: - if ghostbuster_request.parent_handle not in parents: - rpki.log.warn("Unknown parent_handle %r in Ghostbuster request, skipping" % ghostbuster_request.parent_handle) - continue - k = (ghostbuster_request.parent_handle, ghostbuster_request.vcard) - if k in seen: - rpki.log.warn("Skipping duplicate Ghostbuster request %r" % ghostbuster_request) - continue - seen.add(k) - for ca in parents[ghostbuster_request.parent_handle].cas: - ca_detail = ca.active_ca_detail - if ca_detail is not None: - ghostbuster = ghostbusters.pop((ca_detail.ca_detail_id, ghostbuster_request.vcard), None) - if ghostbuster is None: - ghostbuster = rpki.rpkid.ghostbuster_obj(self.gctx, self.self_id, ca_detail.ca_detail_id, ghostbuster_request.vcard) - rpki.log.debug("Created new %r for %r" % (ghostbuster, ghostbuster_request.parent_handle)) - else: - rpki.log.debug("Found existing %r for %s" % (ghostbuster, ghostbuster_request.parent_handle)) - ghostbuster.update(publisher = publisher, fast = True) - ca_details.add(ca_detail) - - orphans.extend(ghostbusters.itervalues()) - for ghostbuster in orphans: - ca_details.add(ghostbuster.ca_detail) - ghostbuster.revoke(publisher = publisher, fast = True) - - for ca_detail in ca_details: - ca_detail.generate_crl(publisher = publisher) - ca_detail.generate_manifest(publisher = publisher) - - self.gctx.sql.sweep() - - self.gctx.checkpoint() - publisher.call_pubd(self.exit, self.publication_failed) - - except (SystemExit, rpki.async.ExitNow): - raise - except Exception, e: - rpki.log.traceback() - rpki.log.warn("Could not update Ghostbuster records for %s, skipping: %s" % (self.self_handle, e)) - self.exit() - - def publication_failed(self, e): - rpki.log.traceback() - rpki.log.warn("Couldn't publish Ghostbuster updates for %s, skipping: %s" % (self.self_handle, e)) - self.gctx.checkpoint() - self.exit() - - def ghostbuster_requests_failed(self, e): - rpki.log.traceback() - rpki.log.warn("Could not fetch Ghostbuster record requests for %s, skipping: %s" % (self.self_handle, e)) - self.exit() - - -@queue_task -class UpdateEECertificatesTask(AbstractTask): - """ - Generate or update EE certificates for this self. - - Not yet sure what kind of scaling constraints this task might have, - so keeping it simple for initial version, we can optimize later. - """ - - def start(self): - rpki.log.trace() - self.gctx.checkpoint() - rpki.log.debug("Self %s[%d] updating EE certificates" % (self.self_handle, self.self_id)) - - self.gctx.irdb_query_ee_certificate_requests(self.self_handle, - self.got_requests, - self.get_requests_failed) - - def got_requests(self, requests): - - try: - self.gctx.checkpoint() - if self.gctx.sql.dirty: - rpki.log.warn("Unexpected dirty SQL cache, flushing") - self.gctx.sql.sweep() - - publisher = rpki.rpkid.publication_queue() - - existing = dict() - for ee in self.ee_certificates: - gski = ee.gski - if gski not in existing: - existing[gski] = set() - existing[gski].add(ee) - - ca_details = set() - - for req in requests: - ees = existing.pop(req.gski, ()) - resources = rpki.resource_set.resource_bag( - asn = req.asn, - v4 = req.ipv4, - v6 = req.ipv6, - valid_until = req.valid_until) - covering = self.find_covering_ca_details(resources) - ca_details.update(covering) - - for ee in ees: - if ee.ca_detail in covering: - rpki.log.debug("Updating existing EE certificate for %s %s" % (req.gski, resources)) - ee.reissue( - resources = resources, - publisher = publisher) - covering.remove(ee.ca_detail) - else: - rpki.log.debug("Existing EE certificate for %s %s is no longer covered" % (req.gski, resources)) - ee.revoke(publisher = publisher) - - for ca_detail in covering: - rpki.log.debug("No existing EE certificate for %s %s" % (req.gski, resources)) - rpki.rpkid.ee_cert_obj.create( - ca_detail = ca_detail, - subject_name = rpki.x509.X501DN.from_cn(req.cn, req.sn), - subject_key = req.pkcs10.getPublicKey(), - resources = resources, - publisher = publisher, - eku = req.eku or None) - - # Anything left is an orphan - for ees in existing.values(): - for ee in ees: - ca_details.add(ee.ca_detail) - ee.revoke(publisher = publisher) - - self.gctx.sql.sweep() - - for ca_detail in ca_details: - ca_detail.generate_crl(publisher = publisher) - ca_detail.generate_manifest(publisher = publisher) - - self.gctx.sql.sweep() - - self.gctx.checkpoint() - publisher.call_pubd(self.exit, self.publication_failed) - - except (SystemExit, rpki.async.ExitNow): - raise - except Exception, e: - rpki.log.traceback() - rpki.log.warn("Could not update EE certificates for %s, skipping: %s" % (self.self_handle, e)) - self.exit() - - def publication_failed(self, e): - rpki.log.traceback() - rpki.log.warn("Couldn't publish EE certificate updates for %s, skipping: %s" % (self.self_handle, e)) - self.gctx.checkpoint() - self.exit() - - def get_requests_failed(self, e): - rpki.log.traceback() - rpki.log.warn("Could not fetch EE certificate requests for %s, skipping: %s" % (self.self_handle, e)) - self.exit() - - -@queue_task -class RegenerateCRLsAndManifestsTask(AbstractTask): - """ - Generate new CRLs and manifests as necessary for all of this self's - CAs. Extracting nextUpdate from a manifest is hard at the moment - due to implementation silliness, so for now we generate a new - manifest whenever we generate a new CRL - - This code also cleans up tombstones left behind by revoked ca_detail - objects, since we're walking through the relevant portions of the - database anyway. - """ - - def start(self): - rpki.log.trace() - self.gctx.checkpoint() - rpki.log.debug("Self %s[%d] regenerating CRLs and manifests" % (self.self_handle, self.self_id)) - - now = rpki.sundial.now() - crl_interval = rpki.sundial.timedelta(seconds = self.crl_interval) - regen_margin = max(self.gctx.cron_period * 2, crl_interval / 4) - publisher = rpki.rpkid.publication_queue() - - for parent in self.parents: - for ca in parent.cas: - try: - for ca_detail in ca.revoked_ca_details: - if now > ca_detail.latest_crl.getNextUpdate(): - ca_detail.delete(ca = ca, publisher = publisher) - for ca_detail in ca.active_or_deprecated_ca_details: - if now + regen_margin > ca_detail.latest_crl.getNextUpdate(): - ca_detail.generate_crl(publisher = publisher) - ca_detail.generate_manifest(publisher = publisher) - except (SystemExit, rpki.async.ExitNow): - raise - except Exception, e: - rpki.log.traceback() - rpki.log.warn("Couldn't regenerate CRLs and manifests for CA %r, skipping: %s" % (ca, e)) - - self.gctx.checkpoint() - self.gctx.sql.sweep() - publisher.call_pubd(self.exit, self.lose) - - def lose(self, e): - rpki.log.traceback() - rpki.log.warn("Couldn't publish updated CRLs and manifests for self %r, skipping: %s" % (self.self_handle, e)) - self.gctx.checkpoint() - self.exit() - - -@queue_task -class CheckFailedPublication(AbstractTask): - """ - Periodic check for objects we tried to publish but failed (eg, due - to pubd being down or unreachable). - """ - - def start(self): - rpki.log.trace() - publisher = rpki.rpkid.publication_queue() - for parent in self.parents: - for ca in parent.cas: - ca_detail = ca.active_ca_detail - if ca_detail is not None: - ca_detail.check_failed_publication(publisher) - self.gctx.checkpoint() - self.gctx.sql.sweep() - publisher.call_pubd(self.exit, self.publication_failed) - - def publication_failed(self, e): - rpki.log.traceback() - rpki.log.warn("Couldn't publish for %s, skipping: %s" % (self.self_handle, e)) - self.gctx.checkpoint() - self.exit() diff --git a/rpkid/rpki/sql.py b/rpkid/rpki/sql.py deleted file mode 100644 index c753278c..00000000 --- a/rpkid/rpki/sql.py +++ /dev/null @@ -1,424 +0,0 @@ -# $Id$ -# -# Copyright (C) 2009-2013 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. -# -# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -""" -SQL interface code. -""" - -import weakref - -from rpki.mysql_import import (MySQLdb, _mysql_exceptions) - -import rpki.x509 -import rpki.resource_set -import rpki.sundial -import rpki.log - -class session(object): - """ - SQL session layer. - """ - - ## @var ping_threshold - # Timeout after which we should issue a ping command before the real - # one. Intent is to keep the MySQL connection alive without pinging - # before every single command. - - ping_threshold = rpki.sundial.timedelta(seconds = 60) - - def __init__(self, cfg): - - self.username = cfg.get("sql-username") - self.database = cfg.get("sql-database") - self.password = cfg.get("sql-password") - - self.conv = MySQLdb.converters.conversions.copy() - self.conv.update({ - rpki.sundial.datetime : MySQLdb.converters.DateTime2literal, - MySQLdb.converters.FIELD_TYPE.DATETIME : rpki.sundial.datetime.DateTime_or_None }) - - self.cache = weakref.WeakValueDictionary() - self.dirty = set() - - self.connect() - - def connect(self): - self.db = MySQLdb.connect(user = self.username, - db = self.database, - passwd = self.password, - conv = self.conv) - self.cur = self.db.cursor() - self.db.autocommit(True) - self.timestamp = rpki.sundial.now() - - def close(self): - if self.cur: - self.cur.close() - self.cur = None - if self.db: - self.db.close() - self.db = None - - def _wrap_execute(self, func, query, args): - try: - now = rpki.sundial.now() - if now > self.timestamp + self.ping_threshold: - self.db.ping(True) - self.timestamp = now - return func(query, args) - except _mysql_exceptions.MySQLError: - if self.dirty: - rpki.log.warn("MySQL exception with dirty objects in SQL cache!") - raise - - def execute(self, query, args = None): - return self._wrap_execute(self.cur.execute, query, args) - - def executemany(self, query, args): - return self._wrap_execute(self.cur.executemany, query, args) - - def fetchall(self): - return self.cur.fetchall() - - def lastrowid(self): - return self.cur.lastrowid - - def cache_clear(self): - """ - Clear the SQL object cache. Shouldn't be necessary now that the - cache uses weak references, but should be harmless. - """ - rpki.log.debug("Clearing SQL cache") - self.assert_pristine() - self.cache.clear() - - def assert_pristine(self): - """ - Assert that there are no dirty objects in the cache. - """ - assert not self.dirty, "Dirty objects in SQL cache: %s" % self.dirty - - def sweep(self): - """ - Write any dirty objects out to SQL. - """ - for s in self.dirty.copy(): - #if s.sql_cache_debug: - rpki.log.debug("Sweeping (%s) %r" % ("deleting" if s.sql_deleted else "storing", s)) - if s.sql_deleted: - s.sql_delete() - else: - s.sql_store() - self.assert_pristine() - -class template(object): - """ - SQL template generator. - """ - - def __init__(self, table_name, index_column, *data_columns): - """ - Build a SQL template. - """ - type_map = dict((x[0], x[1]) for x in data_columns if isinstance(x, tuple)) - data_columns = tuple(isinstance(x, tuple) and x[0] or x for x in data_columns) - columns = (index_column,) + data_columns - self.table = table_name - self.index = index_column - self.columns = columns - self.map = type_map - self.select = "SELECT %s FROM %s" % (", ".join("%s.%s" % (table_name, c) for c in columns), table_name) - self.insert = "INSERT %s (%s) VALUES (%s)" % (table_name, - ", ".join(data_columns), - ", ".join("%(" + s + ")s" for s in data_columns)) - self.update = "UPDATE %s SET %s WHERE %s = %%(%s)s" % (table_name, - ", ".join(s + " = %(" + s + ")s" for s in data_columns), - index_column, - index_column) - self.delete = "DELETE FROM %s WHERE %s = %%s" % (table_name, index_column) - -class sql_persistent(object): - """ - Mixin for persistent class that needs to be stored in SQL. - """ - - ## @var sql_in_db - # Whether this object is already in SQL or not. - - sql_in_db = False - - ## @var sql_deleted - # Whether our cached copy of this object has been deleted. - - sql_deleted = False - - ## @var sql_debug - # Enable logging of SQL actions - - sql_debug = False - - ## @var sql_cache_debug - # Enable debugging of SQL cache actions - - sql_cache_debug = False - - @classmethod - def sql_fetch(cls, gctx, id): # pylint: disable=W0622 - """ - Fetch one object from SQL, based on its primary key. - - Since in this one case we know that the primary index is also the - cache key, we check for a cache hit directly in the hope of - bypassing the SQL lookup entirely. - - This method is usually called via a one-line class-specific - wrapper. As a convenience, we also accept an id of None, and just - return None in this case. - """ - - if id is None: - return None - assert isinstance(id, (int, long)), "id should be an integer, was %r" % type(id) - key = (cls, id) - if key in gctx.sql.cache: - return gctx.sql.cache[key] - else: - return cls.sql_fetch_where1(gctx, "%s = %%s" % cls.sql_template.index, (id,)) - - @classmethod - def sql_fetch_where1(cls, gctx, where, args = None, also_from = None): - """ - Fetch one object from SQL, based on an arbitrary SQL WHERE expression. - """ - results = cls.sql_fetch_where(gctx, where, args, also_from) - if len(results) == 0: - return None - elif len(results) == 1: - return results[0] - else: - raise rpki.exceptions.DBConsistancyError, \ - "Database contained multiple matches for %s where %s: %r" % \ - (cls.__name__, where % tuple(repr(a) for a in args), results) - - @classmethod - def sql_fetch_all(cls, gctx): - """ - Fetch all objects of this type from SQL. - """ - return cls.sql_fetch_where(gctx, None) - - @classmethod - def sql_fetch_where(cls, gctx, where, args = None, also_from = None): - """ - Fetch objects of this type matching an arbitrary SQL WHERE expression. - """ - if where is None: - assert args is None and also_from is None - if cls.sql_debug: - rpki.log.debug("sql_fetch_where(%r)" % cls.sql_template.select) - gctx.sql.execute(cls.sql_template.select) - else: - query = cls.sql_template.select - if also_from is not None: - query += "," + also_from - query += " WHERE " + where - if cls.sql_debug: - rpki.log.debug("sql_fetch_where(%r, %r)" % (query, args)) - gctx.sql.execute(query, args) - results = [] - for row in gctx.sql.fetchall(): - key = (cls, row[0]) - if key in gctx.sql.cache: - results.append(gctx.sql.cache[key]) - else: - results.append(cls.sql_init(gctx, row, key)) - return results - - @classmethod - def sql_init(cls, gctx, row, key): - """ - Initialize one Python object from the result of a SQL query. - """ - self = cls() - self.gctx = gctx - self.sql_decode(dict(zip(cls.sql_template.columns, row))) - gctx.sql.cache[key] = self - self.sql_in_db = True - self.sql_fetch_hook() - return self - - def sql_mark_dirty(self): - """ - Mark this object as needing to be written back to SQL. - """ - if self.sql_cache_debug and not self.sql_is_dirty: - rpki.log.debug("Marking %r SQL dirty" % self) - self.gctx.sql.dirty.add(self) - - def sql_mark_clean(self): - """ - Mark this object as not needing to be written back to SQL. - """ - if self.sql_cache_debug and self.sql_is_dirty: - rpki.log.debug("Marking %r SQL clean" % self) - self.gctx.sql.dirty.discard(self) - - @property - def sql_is_dirty(self): - """ - Query whether this object needs to be written back to SQL. - """ - return self in self.gctx.sql.dirty - - def sql_mark_deleted(self): - """ - Mark this object as needing to be deleted in SQL. - """ - self.sql_deleted = True - self.sql_mark_dirty() - - def sql_store(self): - """ - Store this object to SQL. - """ - args = self.sql_encode() - if not self.sql_in_db: - if self.sql_debug: - rpki.log.debug("sql_store(%r, %r)" % (self.sql_template.insert, args)) - self.gctx.sql.execute(self.sql_template.insert, args) - setattr(self, self.sql_template.index, self.gctx.sql.lastrowid()) - self.gctx.sql.cache[(self.__class__, self.gctx.sql.lastrowid())] = self - self.sql_insert_hook() - else: - if self.sql_debug: - rpki.log.debug("sql_store(%r, %r)" % (self.sql_template.update, args)) - self.gctx.sql.execute(self.sql_template.update, args) - self.sql_update_hook() - key = (self.__class__, getattr(self, self.sql_template.index)) - assert key in self.gctx.sql.cache and self.gctx.sql.cache[key] == self - self.sql_mark_clean() - self.sql_in_db = True - - def sql_delete(self): - """ - Delete this object from SQL. - """ - if self.sql_in_db: - id = getattr(self, self.sql_template.index) # pylint: disable=W0622 - if self.sql_debug: - rpki.log.debug("sql_delete(%r, %r)" % (self.sql_template.delete, id)) - self.sql_delete_hook() - self.gctx.sql.execute(self.sql_template.delete, id) - key = (self.__class__, id) - if self.gctx.sql.cache.get(key) == self: - del self.gctx.sql.cache[key] - self.sql_in_db = False - self.sql_mark_clean() - - def sql_encode(self): - """ - Convert object attributes into a dict for use with canned SQL - queries. This is a default version that assumes a one-to-one - mapping between column names in SQL and attribute names in Python. - If you need something fancier, override this. - """ - d = dict((a, getattr(self, a, None)) for a in self.sql_template.columns) - for i in self.sql_template.map: - if d.get(i) is not None: - d[i] = self.sql_template.map[i].to_sql(d[i]) - return d - - def sql_decode(self, vals): - """ - Initialize an object with values returned by self.sql_fetch(). - This is a default version that assumes a one-to-one mapping - between column names in SQL and attribute names in Python. If you - need something fancier, override this. - """ - for a in self.sql_template.columns: - if vals.get(a) is not None and a in self.sql_template.map: - setattr(self, a, self.sql_template.map[a].from_sql(vals[a])) - else: - setattr(self, a, vals[a]) - - def sql_fetch_hook(self): - """ - Customization hook. - """ - pass - - def sql_insert_hook(self): - """ - Customization hook. - """ - pass - - def sql_update_hook(self): - """ - Customization hook. - """ - self.sql_delete_hook() - self.sql_insert_hook() - - def sql_delete_hook(self): - """ - Customization hook. - """ - pass - - -def cache_reference(func): - """ - Decorator for use with property methods which just do an SQL lookup based on an ID. - Check for an existing reference to the object, just return that if we find it, - otherwise perform the SQL lookup. - - Not 100% certain this is a good idea, but I //think// it should work well with the - current weak reference SQL cache, so long as we create no circular references. - So don't do that. - """ - - attr_name = "_" + func.__name__ - - def wrapped(self): - try: - value = getattr(self, attr_name) - assert value is not None - except AttributeError: - value = func(self) - if value is not None: - setattr(self, attr_name, value) - return value - - wrapped.__name__ = func.__name__ - wrapped.__doc__ = func.__doc__ - wrapped.__dict__.update(func.__dict__) - - return wrapped diff --git a/rpkid/rpki/sql_schemas.py b/rpkid/rpki/sql_schemas.py deleted file mode 100644 index e57c7a7f..00000000 --- a/rpkid/rpki/sql_schemas.py +++ /dev/null @@ -1,319 +0,0 @@ -# Automatically generated, do not edit. - -## @var rpkid -## SQL schema rpkid -rpkid = '''-- $Id: rpkid.sql 5753 2014-04-05 19:24:26Z sra $ - --- Copyright (C) 2009--2011 Internet Systems Consortium ("ISC") --- --- Permission to use, copy, modify, and distribute this software for any --- purpose with or without fee is hereby granted, provided that the above --- copyright notice and this permission notice appear in all copies. --- --- THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH --- REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY --- AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, --- INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM --- LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE --- OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR --- PERFORMANCE OF THIS SOFTWARE. - --- Copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") --- --- Permission to use, copy, modify, and distribute this software for any --- purpose with or without fee is hereby granted, provided that the above --- copyright notice and this permission notice appear in all copies. --- --- THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH --- REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY --- AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, --- INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM --- LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE --- OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR --- PERFORMANCE OF THIS SOFTWARE. - --- SQL objects needed by the RPKI engine (rpkid.py). - --- DROP TABLE commands must be in correct (reverse dependency) order --- to satisfy FOREIGN KEY constraints. - -DROP TABLE IF EXISTS ee_cert; -DROP TABLE IF EXISTS ghostbuster; -DROP TABLE IF EXISTS roa_prefix; -DROP TABLE IF EXISTS roa; -DROP TABLE IF EXISTS revoked_cert; -DROP TABLE IF EXISTS child_cert; -DROP TABLE IF EXISTS child; -DROP TABLE IF EXISTS ca_detail; -DROP TABLE IF EXISTS ca; -DROP TABLE IF EXISTS parent; -DROP TABLE IF EXISTS repository; -DROP TABLE IF EXISTS bsc; -DROP TABLE IF EXISTS self; - -CREATE TABLE self ( - self_id SERIAL NOT NULL, - self_handle VARCHAR(255) NOT NULL, - use_hsm BOOLEAN NOT NULL DEFAULT FALSE, - crl_interval BIGINT UNSIGNED, - regen_margin BIGINT UNSIGNED, - bpki_cert LONGBLOB, - bpki_glue LONGBLOB, - PRIMARY KEY (self_id), - UNIQUE (self_handle) -) ENGINE=InnoDB; - -CREATE TABLE bsc ( - bsc_id SERIAL NOT NULL, - bsc_handle VARCHAR(255) NOT NULL, - private_key_id LONGBLOB, - pkcs10_request LONGBLOB, - hash_alg ENUM ('sha256'), - signing_cert LONGBLOB, - signing_cert_crl LONGBLOB, - self_id BIGINT UNSIGNED NOT NULL, - PRIMARY KEY (bsc_id), - CONSTRAINT bsc_self_id - FOREIGN KEY (self_id) REFERENCES self (self_id) ON DELETE CASCADE, - UNIQUE (self_id, bsc_handle) -) ENGINE=InnoDB; - -CREATE TABLE repository ( - repository_id SERIAL NOT NULL, - repository_handle VARCHAR(255) NOT NULL, - peer_contact_uri TEXT, - bpki_cert LONGBLOB, - bpki_glue LONGBLOB, - last_cms_timestamp DATETIME, - bsc_id BIGINT UNSIGNED NOT NULL, - self_id BIGINT UNSIGNED NOT NULL, - PRIMARY KEY (repository_id), - CONSTRAINT repository_self_id - FOREIGN KEY (self_id) REFERENCES self (self_id) ON DELETE CASCADE, - CONSTRAINT repository_bsc_id - FOREIGN KEY (bsc_id) REFERENCES bsc (bsc_id) ON DELETE CASCADE, - UNIQUE (self_id, repository_handle) -) ENGINE=InnoDB; - -CREATE TABLE parent ( - parent_id SERIAL NOT NULL, - parent_handle VARCHAR(255) NOT NULL, - bpki_cms_cert LONGBLOB, - bpki_cms_glue LONGBLOB, - peer_contact_uri TEXT, - sia_base TEXT, - sender_name TEXT, - recipient_name TEXT, - last_cms_timestamp DATETIME, - self_id BIGINT UNSIGNED NOT NULL, - bsc_id BIGINT UNSIGNED NOT NULL, - repository_id BIGINT UNSIGNED NOT NULL, - PRIMARY KEY (parent_id), - CONSTRAINT parent_repository_id - FOREIGN KEY (repository_id) REFERENCES repository (repository_id) ON DELETE CASCADE, - CONSTRAINT parent_bsc_id - FOREIGN KEY (bsc_id) REFERENCES bsc (bsc_id) ON DELETE CASCADE, - CONSTRAINT parent_self_id - FOREIGN KEY (self_id) REFERENCES self (self_id) ON DELETE CASCADE, - UNIQUE (self_id, parent_handle) -) ENGINE=InnoDB; - -CREATE TABLE ca ( - ca_id SERIAL NOT NULL, - last_crl_sn BIGINT UNSIGNED NOT NULL, - last_manifest_sn BIGINT UNSIGNED NOT NULL, - next_manifest_update DATETIME, - next_crl_update DATETIME, - last_issued_sn BIGINT UNSIGNED NOT NULL, - sia_uri TEXT, - parent_resource_class TEXT, - parent_id BIGINT UNSIGNED NOT NULL, - PRIMARY KEY (ca_id), - CONSTRAINT ca_parent_id - FOREIGN KEY (parent_id) REFERENCES parent (parent_id) ON DELETE CASCADE -) ENGINE=InnoDB; - -CREATE TABLE ca_detail ( - ca_detail_id SERIAL NOT NULL, - public_key LONGBLOB, - private_key_id LONGBLOB, - latest_crl LONGBLOB, - crl_published DATETIME, - latest_ca_cert LONGBLOB, - manifest_private_key_id LONGBLOB, - manifest_public_key LONGBLOB, - latest_manifest_cert LONGBLOB, - latest_manifest LONGBLOB, - manifest_published DATETIME, - state ENUM ('pending', 'active', 'deprecated', 'revoked') NOT NULL, - ca_cert_uri TEXT, - ca_id BIGINT UNSIGNED NOT NULL, - PRIMARY KEY (ca_detail_id), - CONSTRAINT ca_detail_ca_id - FOREIGN KEY (ca_id) REFERENCES ca (ca_id) ON DELETE CASCADE -) ENGINE=InnoDB; - -CREATE TABLE child ( - child_id SERIAL NOT NULL, - child_handle VARCHAR(255) NOT NULL, - bpki_cert LONGBLOB, - bpki_glue LONGBLOB, - last_cms_timestamp DATETIME, - self_id BIGINT UNSIGNED NOT NULL, - bsc_id BIGINT UNSIGNED NOT NULL, - PRIMARY KEY (child_id), - CONSTRAINT child_bsc_id - FOREIGN KEY (bsc_id) REFERENCES bsc (bsc_id) ON DELETE CASCADE, - CONSTRAINT child_self_id - FOREIGN KEY (self_id) REFERENCES self (self_id) ON DELETE CASCADE, - UNIQUE (self_id, child_handle) -) ENGINE=InnoDB; - -CREATE TABLE child_cert ( - child_cert_id SERIAL NOT NULL, - cert LONGBLOB NOT NULL, - published DATETIME, - ski TINYBLOB NOT NULL, - child_id BIGINT UNSIGNED NOT NULL, - ca_detail_id BIGINT UNSIGNED NOT NULL, - PRIMARY KEY (child_cert_id), - CONSTRAINT child_cert_ca_detail_id - FOREIGN KEY (ca_detail_id) REFERENCES ca_detail (ca_detail_id) ON DELETE CASCADE, - CONSTRAINT child_cert_child_id - FOREIGN KEY (child_id) REFERENCES child (child_id) ON DELETE CASCADE -) ENGINE=InnoDB; - -CREATE TABLE revoked_cert ( - revoked_cert_id SERIAL NOT NULL, - serial BIGINT UNSIGNED NOT NULL, - revoked DATETIME NOT NULL, - expires DATETIME NOT NULL, - ca_detail_id BIGINT UNSIGNED NOT NULL, - PRIMARY KEY (revoked_cert_id), - CONSTRAINT revoked_cert_ca_detail_id - FOREIGN KEY (ca_detail_id) REFERENCES ca_detail (ca_detail_id) ON DELETE CASCADE -) ENGINE=InnoDB; - -CREATE TABLE roa ( - roa_id SERIAL NOT NULL, - asn BIGINT UNSIGNED NOT NULL, - cert LONGBLOB NOT NULL, - roa LONGBLOB NOT NULL, - published DATETIME, - self_id BIGINT UNSIGNED NOT NULL, - ca_detail_id BIGINT UNSIGNED NOT NULL, - PRIMARY KEY (roa_id), - CONSTRAINT roa_self_id - FOREIGN KEY (self_id) REFERENCES self (self_id) ON DELETE CASCADE, - CONSTRAINT roa_ca_detail_id - FOREIGN KEY (ca_detail_id) REFERENCES ca_detail (ca_detail_id) ON DELETE CASCADE -) ENGINE=InnoDB; - -CREATE TABLE roa_prefix ( - prefix VARCHAR(40) NOT NULL, - prefixlen TINYINT UNSIGNED NOT NULL, - max_prefixlen TINYINT UNSIGNED NOT NULL, - version TINYINT UNSIGNED NOT NULL, - roa_id BIGINT UNSIGNED NOT NULL, - PRIMARY KEY (roa_id, prefix, prefixlen, max_prefixlen), - CONSTRAINT roa_prefix_roa_id - FOREIGN KEY (roa_id) REFERENCES roa (roa_id) ON DELETE CASCADE -) ENGINE=InnoDB; - -CREATE TABLE ghostbuster ( - ghostbuster_id SERIAL NOT NULL, - vcard LONGBLOB NOT NULL, - cert LONGBLOB NOT NULL, - ghostbuster LONGBLOB NOT NULL, - published DATETIME, - self_id BIGINT UNSIGNED NOT NULL, - ca_detail_id BIGINT UNSIGNED NOT NULL, - PRIMARY KEY (ghostbuster_id), - CONSTRAINT ghostbuster_self_id - FOREIGN KEY (self_id) REFERENCES self (self_id) ON DELETE CASCADE, - CONSTRAINT ghostbuster_ca_detail_id - FOREIGN KEY (ca_detail_id) REFERENCES ca_detail (ca_detail_id) ON DELETE CASCADE -) ENGINE=InnoDB; - -CREATE TABLE ee_cert ( - ee_cert_id SERIAL NOT NULL, - ski BINARY(20) NOT NULL, - cert LONGBLOB NOT NULL, - published DATETIME, - self_id BIGINT UNSIGNED NOT NULL, - ca_detail_id BIGINT UNSIGNED NOT NULL, - PRIMARY KEY (ee_cert_id), - CONSTRAINT ee_cert_self_id - FOREIGN KEY (self_id) REFERENCES self (self_id) ON DELETE CASCADE, - CONSTRAINT ee_cert_ca_detail_id - FOREIGN KEY (ca_detail_id) REFERENCES ca_detail (ca_detail_id) ON DELETE CASCADE -) ENGINE=InnoDB; - --- Local Variables: --- indent-tabs-mode: nil --- End: -''' - -## @var pubd -## SQL schema pubd -pubd = '''-- $Id: pubd.sql 3465 2010-10-07 00:59:39Z sra $ - --- Copyright (C) 2009--2010 Internet Systems Consortium ("ISC") --- --- Permission to use, copy, modify, and distribute this software for any --- purpose with or without fee is hereby granted, provided that the above --- copyright notice and this permission notice appear in all copies. --- --- THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH --- REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY --- AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, --- INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM --- LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE --- OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR --- PERFORMANCE OF THIS SOFTWARE. - --- Copyright (C) 2008 American Registry for Internet Numbers ("ARIN") --- --- Permission to use, copy, modify, and distribute this software for any --- purpose with or without fee is hereby granted, provided that the above --- copyright notice and this permission notice appear in all copies. --- --- THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH --- REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY --- AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, --- INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM --- LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE --- OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR --- PERFORMANCE OF THIS SOFTWARE. - --- SQL objects needed by pubd.py. - --- The config table is weird because we're really only using it --- to store one BPKI CRL, but putting this here lets us use a lot of --- existing machinery and the alternatives are whacky in other ways. - -DROP TABLE IF EXISTS client; -DROP TABLE IF EXISTS config; - -CREATE TABLE config ( - config_id SERIAL NOT NULL, - bpki_crl LONGBLOB, - PRIMARY KEY (config_id) -) ENGINE=InnoDB; - -CREATE TABLE client ( - client_id SERIAL NOT NULL, - client_handle VARCHAR(255) NOT NULL, - base_uri TEXT, - bpki_cert LONGBLOB, - bpki_glue LONGBLOB, - last_cms_timestamp DATETIME, - PRIMARY KEY (client_id), - UNIQUE (client_handle) -) ENGINE=InnoDB; - --- Local Variables: --- indent-tabs-mode: nil --- End: -''' - diff --git a/rpkid/rpki/sundial.py b/rpkid/rpki/sundial.py deleted file mode 100644 index 0825d61b..00000000 --- a/rpkid/rpki/sundial.py +++ /dev/null @@ -1,289 +0,0 @@ -# $Id$ -# -# Copyright (C) 2009--2012 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. -# -# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -""" -Unified RPKI date/time handling, based on the standard Python datetime module. - -Module name chosen to sidestep a nightmare of import-related errors -that occur with the more obvious module names. - -List of arithmetic methods that require result casting was derived by -inspection of the datetime module, to wit: - - >>> import datetime - >>> for t in (datetime.datetime, datetime.timedelta): - ... for k in t.__dict__.keys(): - ... if k.startswith("__"): - ... print "%s.%s()" % (t.__name__, k) -""" - -import datetime as pydatetime -import re - -def now(): - """ - Get current timestamp. - """ - return datetime.utcnow() - -class ParseFailure(Exception): - """ - Parse failure constructing timedelta. - """ - -class datetime(pydatetime.datetime): - """ - RPKI extensions to standard datetime.datetime class. All work here - is in UTC, so we use naive datetime objects. - """ - - def totimestamp(self): - """ - Convert to seconds from epoch (like time.time()). Conversion - method is a bit silly, but avoids time module timezone whackiness. - """ - return int(self.strftime("%s")) - - @classmethod - def fromXMLtime(cls, x): - """ - Convert from XML time representation. - """ - if x is None: - return None - else: - return cls.strptime(x, "%Y-%m-%dT%H:%M:%SZ") - - def toXMLtime(self): - """ - Convert to XML time representation. - """ - return self.strftime("%Y-%m-%dT%H:%M:%SZ") - - def __str__(self): - return self.toXMLtime() - - @classmethod - def from_datetime(cls, x): - """ - Convert a datetime.datetime object into this subclass. This is - whacky due to the weird constructors for datetime. - """ - return cls.combine(x.date(), x.time()) - - def to_datetime(self): - """ - Convert to a datetime.datetime object. In most cases this - shouldn't be necessary, but convincing SQL interfaces to use - subclasses of datetime can be hard. - """ - return pydatetime.datetime(year = self.year, month = self.month, day = self.day, - hour = self.hour, minute = self.minute, second = self.second, - microsecond = 0, tzinfo = None) - - - @classmethod - def fromOpenSSL(cls, x): - """ - Convert from the format OpenSSL's command line tool uses into this - subclass. May require rewriting if we run into locale problems. - """ - if x.startswith("notBefore=") or x.startswith("notAfter="): - x = x.partition("=")[2] - return cls.strptime(x, "%b %d %H:%M:%S %Y GMT") - - @classmethod - def from_sql(cls, x): - """ - Convert from SQL storage format. - """ - return cls.from_datetime(x) - - def to_sql(self): - """ - Convert to SQL storage format. - """ - return self.to_datetime() - - def later(self, other): - """ - Return the later of two timestamps. - """ - return other if other > self else self - - def earlier(self, other): - """ - Return the earlier of two timestamps. - """ - return other if other < self else self - - def __add__(self, y): return _cast(pydatetime.datetime.__add__(self, y)) - def __radd__(self, y): return _cast(pydatetime.datetime.__radd__(self, y)) - def __rsub__(self, y): return _cast(pydatetime.datetime.__rsub__(self, y)) - def __sub__(self, y): return _cast(pydatetime.datetime.__sub__(self, y)) - - @classmethod - def DateTime_or_None(cls, s): - """ - MySQLdb converter. Parse as this class if we can, let the default - MySQLdb DateTime_or_None() converter deal with failure cases. - """ - - for sep in " T": - d, _, t = s.partition(sep) - if t: - try: - return cls(*[int(x) for x in d.split("-") + t.split(":")]) - except: - break - - from rpki.mysql_import import MySQLdb - return MySQLdb.times.DateTime_or_None(s) - -class timedelta(pydatetime.timedelta): - """ - Timedelta with text parsing. This accepts two input formats: - - - A simple integer, indicating a number of seconds. - - - A string of the form "uY vW wD xH yM zS" where u, v, w, x, y, and z - are integers and Y, W, D, H, M, and S indicate years, weeks, days, - hours, minutes, and seconds. All of the fields are optional, but - at least one must be specified. Eg,"3D4H" means "three days plus - four hours". - - There is no "months" format, because the definition of a month is too - fuzzy to be useful (what day is six months from August 30th?) - - Similarly, the "years" conversion may produce surprising results, as - "one year" in conventional English does not refer to a fixed interval - but rather a fixed (and in some cases undefined) offset within the - Gregorian calendar (what day is one year from February 29th?) 1Y as - implemented by this code refers to a specific number of seconds. - If you mean 365 days or 52 weeks, say that instead. - """ - - ## @var regexp - # Hideously ugly regular expression to parse the complex text form. - # Tags are intended for use with re.MatchObject.groupdict() and map - # directly to the keywords expected by the timedelta constructor. - - regexp = re.compile("\\s*".join(("^", - "(?:(?P\\d+)Y)?", - "(?:(?P\\d+)W)?", - "(?:(?P\\d+)D)?", - "(?:(?P\\d+)H)?", - "(?:(?P\\d+)M)?", - "(?:(?P\\d+)S)?", - "$")), - re.I) - - ## @var years_to_seconds - # Conversion factor from years to seconds (value furnished by the - # "units" program). - - years_to_seconds = 31556926 - - @classmethod - def parse(cls, arg): - """ - Parse text into a timedelta object. - """ - if not isinstance(arg, str): - return cls(seconds = arg) - elif arg.isdigit(): - return cls(seconds = int(arg)) - else: - match = cls.regexp.match(arg) - if match: - #return cls(**dict((k, int(v)) for (k, v) in match.groupdict().items() if v is not None)) - d = match.groupdict("0") - for k, v in d.iteritems(): - d[k] = int(v) - d["days"] += d.pop("weeks") * 7 - d["seconds"] += d.pop("years") * cls.years_to_seconds - return cls(**d) - else: - raise ParseFailure, "Couldn't parse timedelta %r" % (arg,) - - def convert_to_seconds(self): - """ - Convert a timedelta interval to seconds. - """ - return self.days * 24 * 60 * 60 + self.seconds - - @classmethod - def fromtimedelta(cls, x): - """ - Convert a datetime.timedelta object into this subclass. - """ - return cls(days = x.days, seconds = x.seconds, microseconds = x.microseconds) - - def __abs__(self): return _cast(pydatetime.timedelta.__abs__(self)) - def __add__(self, x): return _cast(pydatetime.timedelta.__add__(self, x)) - def __div__(self, x): return _cast(pydatetime.timedelta.__div__(self, x)) - def __floordiv__(self, x): return _cast(pydatetime.timedelta.__floordiv__(self, x)) - def __mul__(self, x): return _cast(pydatetime.timedelta.__mul__(self, x)) - def __neg__(self): return _cast(pydatetime.timedelta.__neg__(self)) - def __pos__(self): return _cast(pydatetime.timedelta.__pos__(self)) - def __radd__(self, x): return _cast(pydatetime.timedelta.__radd__(self, x)) - def __rdiv__(self, x): return _cast(pydatetime.timedelta.__rdiv__(self, x)) - def __rfloordiv__(self, x): return _cast(pydatetime.timedelta.__rfloordiv__(self, x)) - def __rmul__(self, x): return _cast(pydatetime.timedelta.__rmul__(self, x)) - def __rsub__(self, x): return _cast(pydatetime.timedelta.__rsub__(self, x)) - def __sub__(self, x): return _cast(pydatetime.timedelta.__sub__(self, x)) - -def _cast(x): - """ - Cast result of arithmetic operations back into correct subtype. - """ - if isinstance(x, pydatetime.datetime): - return datetime.from_datetime(x) - if isinstance(x, pydatetime.timedelta): - return timedelta.fromtimedelta(x) - return x - -if __name__ == "__main__": - - def test(t): - print - print "str: ", t - print "repr: ", repr(t) - print "seconds since epoch:", t.strftime("%s") - print "XMLtime: ", t.toXMLtime() - print - - print - print "Testing time conversion routines" - test(now()) - test(now() + timedelta(days = 30)) - test(now() + timedelta.parse("3d5s")) - test(now() + timedelta.parse(" 3d 5s ")) - test(now() + timedelta.parse("1y3d5h")) diff --git a/rpkid/rpki/up_down.py b/rpkid/rpki/up_down.py deleted file mode 100644 index d2ad85d3..00000000 --- a/rpkid/rpki/up_down.py +++ /dev/null @@ -1,732 +0,0 @@ -# $Id$ -# -# Copyright (C) 2013--2014 Dragon Research Labs ("DRL") -# Portions copyright (C) 2009--2012 Internet Systems Consortium ("ISC") -# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notices and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND ARIN DISCLAIM ALL -# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED -# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL, -# ISC, OR ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR -# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS -# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, -# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION -# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -""" -RPKI "up-down" protocol. -""" - -import base64 -import lxml.etree -import rpki.resource_set -import rpki.x509 -import rpki.exceptions -import rpki.log -import rpki.xml_utils -import rpki.relaxng - -xmlns = "http://www.apnic.net/specs/rescerts/up-down/" - -nsmap = { None : xmlns } - -class base_elt(object): - """ - Generic PDU object. - - Virtual class, just provides some default methods. - """ - - def startElement(self, stack, name, attrs): - """ - Ignore startElement() if there's no specific handler. - - Some elements have no attributes and we only care about their - text content. - """ - pass - - def endElement(self, stack, name, text): - """ - Ignore endElement() if there's no specific handler. - - If we don't need to do anything else, just pop the stack. - """ - stack.pop() - - def make_elt(self, name, *attrs): - """ - Construct a element, copying over a set of attributes. - """ - elt = lxml.etree.Element("{%s}%s" % (xmlns, name), nsmap=nsmap) - for key in attrs: - val = getattr(self, key, None) - if val is not None: - elt.set(key, str(val)) - return elt - - def make_b64elt(self, elt, name, value): - """ - Construct a sub-element with Base64 text content. - """ - if value is not None and not value.empty(): - lxml.etree.SubElement(elt, "{%s}%s" % (xmlns, name), nsmap=nsmap).text = value.get_Base64() - - def serve_pdu(self, q_msg, r_msg, child, callback, errback): - """ - Default PDU handler to catch unexpected types. - """ - raise rpki.exceptions.BadQuery("Unexpected query type %s" % q_msg.type) - - def check_response(self): - """ - Placeholder for response checking. - """ - pass - -class multi_uri(list): - """ - Container for a set of URIs. - """ - - def __init__(self, ini): - """ - Initialize a set of URIs, which includes basic some syntax checking. - """ - list.__init__(self) - if isinstance(ini, (list, tuple)): - self[:] = ini - elif isinstance(ini, str): - self[:] = ini.split(",") - for s in self: - if s.strip() != s or "://" not in s: - raise rpki.exceptions.BadURISyntax("Bad URI \"%s\"" % s) - else: - raise TypeError - - def __str__(self): - """ - Convert a multi_uri back to a string representation. - """ - return ",".join(self) - - def rsync(self): - """ - Find first rsync://... URI in self. - """ - for s in self: - if s.startswith("rsync://"): - return s - return None - -class certificate_elt(base_elt): - """ - Up-Down protocol representation of an issued certificate. - """ - - def startElement(self, stack, name, attrs): - """ - Handle attributes of element. - """ - assert name == "certificate", "Unexpected name %s, stack %s" % (name, stack) - self.cert_url = multi_uri(attrs["cert_url"]) - self.req_resource_set_as = rpki.resource_set.resource_set_as(attrs.get("req_resource_set_as")) - self.req_resource_set_ipv4 = rpki.resource_set.resource_set_ipv4(attrs.get("req_resource_set_ipv4")) - self.req_resource_set_ipv6 = rpki.resource_set.resource_set_ipv6(attrs.get("req_resource_set_ipv6")) - - def endElement(self, stack, name, text): - """ - Handle text content of a element. - """ - assert name == "certificate", "Unexpected name %s, stack %s" % (name, stack) - self.cert = rpki.x509.X509(Base64 = text) - stack.pop() - - def toXML(self): - """ - Generate a element. - """ - elt = self.make_elt("certificate", "cert_url", - "req_resource_set_as", "req_resource_set_ipv4", "req_resource_set_ipv6") - elt.text = self.cert.get_Base64() - return elt - -class class_elt(base_elt): - """ - Up-Down protocol representation of a resource class. - """ - - issuer = None - - def __init__(self): - """ - Initialize class_elt. - """ - base_elt.__init__(self) - self.certs = [] - - def startElement(self, stack, name, attrs): - """ - Handle elements and their children. - """ - if name == "certificate": - cert = certificate_elt() - self.certs.append(cert) - stack.append(cert) - cert.startElement(stack, name, attrs) - elif name != "issuer": - assert name == "class", "Unexpected name %s, stack %s" % (name, stack) - self.class_name = attrs["class_name"] - self.cert_url = multi_uri(attrs["cert_url"]) - self.suggested_sia_head = attrs.get("suggested_sia_head") - self.resource_set_as = rpki.resource_set.resource_set_as(attrs["resource_set_as"]) - self.resource_set_ipv4 = rpki.resource_set.resource_set_ipv4(attrs["resource_set_ipv4"]) - self.resource_set_ipv6 = rpki.resource_set.resource_set_ipv6(attrs["resource_set_ipv6"]) - self.resource_set_notafter = rpki.sundial.datetime.fromXMLtime(attrs.get("resource_set_notafter")) - - def endElement(self, stack, name, text): - """ - Handle elements and their children. - """ - if name == "issuer": - self.issuer = rpki.x509.X509(Base64 = text) - else: - assert name == "class", "Unexpected name %s, stack %s" % (name, stack) - stack.pop() - - def toXML(self): - """ - Generate a element. - """ - elt = self.make_elt("class", "class_name", "cert_url", "resource_set_as", - "resource_set_ipv4", "resource_set_ipv6", - "resource_set_notafter", "suggested_sia_head") - elt.extend([i.toXML() for i in self.certs]) - self.make_b64elt(elt, "issuer", self.issuer) - return elt - - def to_resource_bag(self): - """ - Build a resource_bag from from this element. - """ - return rpki.resource_set.resource_bag(self.resource_set_as, - self.resource_set_ipv4, - self.resource_set_ipv6, - self.resource_set_notafter) - - def from_resource_bag(self, bag): - """ - Set resources of this class element from a resource_bag. - """ - self.resource_set_as = bag.asn - self.resource_set_ipv4 = bag.v4 - self.resource_set_ipv6 = bag.v6 - self.resource_set_notafter = bag.valid_until - -class list_pdu(base_elt): - """ - Up-Down protocol "list" PDU. - """ - - def toXML(self): - """Generate (empty) payload of "list" PDU.""" - return [] - - def serve_pdu(self, q_msg, r_msg, child, callback, errback): - """ - Serve one "list" PDU. - """ - - def handle(irdb_resources): - - r_msg.payload = list_response_pdu() - - if irdb_resources.valid_until < rpki.sundial.now(): - rpki.log.debug("Child %s's resources expired %s" % (child.child_handle, irdb_resources.valid_until)) - else: - for parent in child.parents: - for ca in parent.cas: - ca_detail = ca.active_ca_detail - if not ca_detail: - rpki.log.debug("No active ca_detail, can't issue to %s" % child.child_handle) - continue - resources = ca_detail.latest_ca_cert.get_3779resources() & irdb_resources - if resources.empty(): - rpki.log.debug("No overlap between received resources and what child %s should get ([%s], [%s])" % (child.child_handle, ca_detail.latest_ca_cert.get_3779resources(), irdb_resources)) - continue - rc = class_elt() - rc.class_name = str(ca.ca_id) - rc.cert_url = multi_uri(ca_detail.ca_cert_uri) - rc.from_resource_bag(resources) - for child_cert in child.fetch_child_certs(ca_detail = ca_detail): - c = certificate_elt() - c.cert_url = multi_uri(child_cert.uri) - c.cert = child_cert.cert - rc.certs.append(c) - rc.issuer = ca_detail.latest_ca_cert - r_msg.payload.classes.append(rc) - - callback() - - self.gctx.irdb_query_child_resources(child.self.self_handle, child.child_handle, handle, errback) - - @classmethod - def query(cls, parent, cb, eb): - """ - Send a "list" query to parent. - """ - try: - rpki.log.info('Sending "list" request to parent %s' % parent.parent_handle) - parent.query_up_down(cls(), cb, eb) - except (rpki.async.ExitNow, SystemExit): - raise - except Exception, e: - eb(e) - -class class_response_syntax(base_elt): - """ - Syntax for Up-Down protocol "list_response" and "issue_response" PDUs. - """ - - def __init__(self): - """ - Initialize class_response_syntax. - """ - base_elt.__init__(self) - self.classes = [] - - def startElement(self, stack, name, attrs): - """ - Handle "list_response" and "issue_response" PDUs. - """ - assert name == "class", "Unexpected name %s, stack %s" % (name, stack) - c = class_elt() - self.classes.append(c) - stack.append(c) - c.startElement(stack, name, attrs) - - def toXML(self): - """Generate payload of "list_response" and "issue_response" PDUs.""" - return [c.toXML() for c in self.classes] - -class list_response_pdu(class_response_syntax): - """ - Up-Down protocol "list_response" PDU. - """ - pass - -class issue_pdu(base_elt): - """ - Up-Down protocol "issue" PDU. - """ - - def startElement(self, stack, name, attrs): - """ - Handle "issue" PDU. - """ - assert name == "request", "Unexpected name %s, stack %s" % (name, stack) - self.class_name = attrs["class_name"] - self.req_resource_set_as = rpki.resource_set.resource_set_as(attrs.get("req_resource_set_as")) - self.req_resource_set_ipv4 = rpki.resource_set.resource_set_ipv4(attrs.get("req_resource_set_ipv4")) - self.req_resource_set_ipv6 = rpki.resource_set.resource_set_ipv6(attrs.get("req_resource_set_ipv6")) - - def endElement(self, stack, name, text): - """ - Handle "issue" PDU. - """ - assert name == "request", "Unexpected name %s, stack %s" % (name, stack) - self.pkcs10 = rpki.x509.PKCS10(Base64 = text) - stack.pop() - - def toXML(self): - """ - Generate payload of "issue" PDU. - """ - elt = self.make_elt("request", "class_name", "req_resource_set_as", - "req_resource_set_ipv4", "req_resource_set_ipv6") - elt.text = self.pkcs10.get_Base64() - return [elt] - - def serve_pdu(self, q_msg, r_msg, child, callback, errback): - """ - Serve one issue request PDU. - """ - - # Subsetting not yet implemented, this is the one place where we - # have to handle it, by reporting that we're lame. - - if self.req_resource_set_as or \ - self.req_resource_set_ipv4 or \ - self.req_resource_set_ipv6: - raise rpki.exceptions.NotImplementedYet("req_* attributes not implemented yet, sorry") - - # Check the request - self.pkcs10.check_valid_request_ca() - ca = child.ca_from_class_name(self.class_name) - ca_detail = ca.active_ca_detail - if ca_detail is None: - raise rpki.exceptions.NoActiveCA("No active CA for class %r" % self.class_name) - - # Check current cert, if any - - def got_resources(irdb_resources): - - if irdb_resources.valid_until < rpki.sundial.now(): - raise rpki.exceptions.IRDBExpired("IRDB entry for child %s expired %s" % ( - child.child_handle, irdb_resources.valid_until)) - - resources = irdb_resources & ca_detail.latest_ca_cert.get_3779resources() - resources.valid_until = irdb_resources.valid_until - req_key = self.pkcs10.getPublicKey() - req_sia = self.pkcs10.get_SIA() - child_cert = child.fetch_child_certs(ca_detail = ca_detail, ski = req_key.get_SKI(), unique = True) - - # Generate new cert or regenerate old one if necessary - - publisher = rpki.rpkid.publication_queue() - - if child_cert is None: - child_cert = ca_detail.issue( - ca = ca, - child = child, - subject_key = req_key, - sia = req_sia, - resources = resources, - publisher = publisher) - else: - child_cert = child_cert.reissue( - ca_detail = ca_detail, - sia = req_sia, - resources = resources, - publisher = publisher) - - def done(): - c = certificate_elt() - c.cert_url = multi_uri(child_cert.uri) - c.cert = child_cert.cert - rc = class_elt() - rc.class_name = self.class_name - rc.cert_url = multi_uri(ca_detail.ca_cert_uri) - rc.from_resource_bag(resources) - rc.certs.append(c) - rc.issuer = ca_detail.latest_ca_cert - r_msg.payload = issue_response_pdu() - r_msg.payload.classes.append(rc) - callback() - - self.gctx.sql.sweep() - assert child_cert and child_cert.sql_in_db - publisher.call_pubd(done, errback) - - self.gctx.irdb_query_child_resources(child.self.self_handle, child.child_handle, got_resources, errback) - - @classmethod - def query(cls, parent, ca, ca_detail, callback, errback): - """ - Send an "issue" request to parent associated with ca. - """ - assert ca_detail is not None and ca_detail.state in ("pending", "active") - self = cls() - self.class_name = ca.parent_resource_class - self.pkcs10 = rpki.x509.PKCS10.create( - keypair = ca_detail.private_key_id, - is_ca = True, - caRepository = ca.sia_uri, - rpkiManifest = ca_detail.manifest_uri) - rpki.log.info('Sending "issue" request to parent %s' % parent.parent_handle) - parent.query_up_down(self, callback, errback) - -class issue_response_pdu(class_response_syntax): - """ - Up-Down protocol "issue_response" PDU. - """ - - def check_response(self): - """ - Check whether this looks like a reasonable issue_response PDU. - XML schema should be tighter for this response. - """ - if len(self.classes) != 1 or len(self.classes[0].certs) != 1: - raise rpki.exceptions.BadIssueResponse - -class revoke_syntax(base_elt): - """ - Syntax for Up-Down protocol "revoke" and "revoke_response" PDUs. - """ - - def startElement(self, stack, name, attrs): - """Handle "revoke" PDU.""" - self.class_name = attrs["class_name"] - self.ski = attrs["ski"] - - def toXML(self): - """Generate payload of "revoke" PDU.""" - return [self.make_elt("key", "class_name", "ski")] - -class revoke_pdu(revoke_syntax): - """ - Up-Down protocol "revoke" PDU. - """ - - def get_SKI(self): - """ - Convert g(SKI) encoding from PDU back to raw SKI. - """ - return base64.urlsafe_b64decode(self.ski + "=") - - def serve_pdu(self, q_msg, r_msg, child, cb, eb): - """ - Serve one revoke request PDU. - """ - - def done(): - r_msg.payload = revoke_response_pdu() - r_msg.payload.class_name = self.class_name - r_msg.payload.ski = self.ski - cb() - - ca = child.ca_from_class_name(self.class_name) - publisher = rpki.rpkid.publication_queue() - for ca_detail in ca.ca_details: - for child_cert in child.fetch_child_certs(ca_detail = ca_detail, ski = self.get_SKI()): - child_cert.revoke(publisher = publisher) - self.gctx.sql.sweep() - publisher.call_pubd(done, eb) - - @classmethod - def query(cls, ca, gski, cb, eb): - """ - Send a "revoke" request for certificate(s) named by gski to parent associated with ca. - """ - parent = ca.parent - self = cls() - self.class_name = ca.parent_resource_class - self.ski = gski - rpki.log.info('Sending "revoke" request for SKI %s to parent %s' % (gski, parent.parent_handle)) - parent.query_up_down(self, cb, eb) - -class revoke_response_pdu(revoke_syntax): - """ - Up-Down protocol "revoke_response" PDU. - """ - - pass - -class error_response_pdu(base_elt): - """ - Up-Down protocol "error_response" PDU. - """ - - codes = { - 1101 : "Already processing request", - 1102 : "Version number error", - 1103 : "Unrecognised request type", - 1201 : "Request - no such resource class", - 1202 : "Request - no resources allocated in resource class", - 1203 : "Request - badly formed certificate request", - 1301 : "Revoke - no such resource class", - 1302 : "Revoke - no such key", - 2001 : "Internal Server Error - Request not performed" } - - exceptions = { - rpki.exceptions.NoActiveCA : 1202, - (rpki.exceptions.ClassNameUnknown, revoke_pdu) : 1301, - rpki.exceptions.ClassNameUnknown : 1201, - (rpki.exceptions.NotInDatabase, revoke_pdu) : 1302 } - - def __init__(self, exception = None, request_payload = None): - """ - Initialize an error_response PDU from an exception object. - """ - base_elt.__init__(self) - if exception is not None: - rpki.log.debug("Constructing up-down error response from exception %s" % exception) - exception_type = type(exception) - request_type = None if request_payload is None else type(request_payload) - rpki.log.debug("Constructing up-down error response: exception_type %s, request_type %s" % ( - exception_type, request_type)) - if False: - self.status = self.exceptions.get((exception_type, request_type), - self.exceptions.get(exception_type, - 2001)) - else: - self.status = self.exceptions.get((exception_type, request_type)) - if self.status is None: - rpki.log.debug("No request-type-specific match, trying exception match") - self.status = self.exceptions.get(exception_type) - if self.status is None: - rpki.log.debug("No exception match either, defaulting") - self.status = 2001 - self.description = str(exception) - rpki.log.debug("Chosen status code: %s" % self.status) - - def endElement(self, stack, name, text): - """ - Handle "error_response" PDU. - """ - if name == "status": - code = int(text) - if code not in self.codes: - raise rpki.exceptions.BadStatusCode("%s is not a known status code" % code) - self.status = code - elif name == "description": - self.description = text - else: - assert name == "message", "Unexpected name %s, stack %s" % (name, stack) - stack.pop() - stack[-1].endElement(stack, name, text) - - def toXML(self): - """ - Generate payload of "error_response" PDU. - """ - assert self.status in self.codes - elt = self.make_elt("status") - elt.text = str(self.status) - payload = [elt] - if self.description: - elt = self.make_elt("description") - elt.text = str(self.description) - elt.set("{http://www.w3.org/XML/1998/namespace}lang", "en-US") - payload.append(elt) - return payload - - def check_response(self): - """ - Handle an error response. For now, just raise an exception, - perhaps figure out something more clever to do later. - """ - raise rpki.exceptions.UpstreamError(self.codes[self.status]) - -class message_pdu(base_elt): - """ - Up-Down protocol message wrapper PDU. - """ - - version = 1 - - name2type = { - "list" : list_pdu, - "list_response" : list_response_pdu, - "issue" : issue_pdu, - "issue_response" : issue_response_pdu, - "revoke" : revoke_pdu, - "revoke_response" : revoke_response_pdu, - "error_response" : error_response_pdu } - - type2name = dict((v, k) for k, v in name2type.items()) - - error_pdu_type = error_response_pdu - - def toXML(self): - """ - Generate payload of message PDU. - """ - elt = self.make_elt("message", "version", "sender", "recipient", "type") - elt.extend(self.payload.toXML()) - return elt - - def startElement(self, stack, name, attrs): - """ - Handle message PDU. - - Payload of the element varies depending on the "type" - attribute, so after some basic checks we have to instantiate the - right class object to handle whatever kind of PDU this is. - """ - assert name == "message", "Unexpected name %s, stack %s" % (name, stack) - assert self.version == int(attrs["version"]) - self.sender = attrs["sender"] - self.recipient = attrs["recipient"] - self.type = attrs["type"] - self.payload = self.name2type[attrs["type"]]() - stack.append(self.payload) - - def __str__(self): - """ - Convert a message PDU to a string. - """ - return lxml.etree.tostring(self.toXML(), pretty_print = True, encoding = "UTF-8") - - def serve_top_level(self, child, callback): - """ - Serve one message request PDU. - """ - - r_msg = message_pdu() - r_msg.sender = self.recipient - r_msg.recipient = self.sender - - def done(): - r_msg.type = self.type2name[type(r_msg.payload)] - callback(r_msg) - - def lose(e): - rpki.log.traceback() - callback(self.serve_error(e)) - - try: - self.log_query(child) - self.payload.serve_pdu(self, r_msg, child, done, lose) - except (rpki.async.ExitNow, SystemExit): - raise - except Exception, e: - lose(e) - - def log_query(self, child): - """ - Log query we're handling. Separate method so rootd can override. - """ - rpki.log.info("Serving %s query from child %s [sender %s, recipient %s]" % (self.type, child.child_handle, self.sender, self.recipient)) - - def serve_error(self, exception): - """ - Generate an error_response message PDU. - """ - r_msg = message_pdu() - r_msg.sender = self.recipient - r_msg.recipient = self.sender - r_msg.payload = self.error_pdu_type(exception, self.payload) - r_msg.type = self.type2name[type(r_msg.payload)] - return r_msg - - @classmethod - def make_query(cls, payload, sender, recipient): - """ - Construct one message PDU. - """ - assert not cls.type2name[type(payload)].endswith("_response") - if sender is None: - sender = "tweedledee" - if recipient is None: - recipient = "tweedledum" - self = cls() - self.sender = sender - self.recipient = recipient - self.payload = payload - self.type = self.type2name[type(payload)] - return self - -class sax_handler(rpki.xml_utils.sax_handler): - """ - SAX handler for Up-Down protocol. - """ - - pdu = message_pdu - name = "message" - version = "1" - -class cms_msg(rpki.x509.XML_CMS_object): - """ - Class to hold a CMS-signed up-down PDU. - """ - - encoding = "UTF-8" - schema = rpki.relaxng.up_down - saxify = sax_handler.saxify - allow_extra_certs = True - allow_extra_crls = True diff --git a/rpkid/rpki/x509.py b/rpkid/rpki/x509.py deleted file mode 100644 index fb1a5a2b..00000000 --- a/rpkid/rpki/x509.py +++ /dev/null @@ -1,2031 +0,0 @@ -# $Id$ -# -# Copyright (C) 2014 Dragon Research Labs ("DRL") -# Portions copyright (C) 2009--2013 Internet Systems Consortium ("ISC") -# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notices and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND ARIN DISCLAIM ALL -# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED -# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL, -# ISC, OR ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR -# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS -# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, -# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION -# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -""" -One X.509 implementation to rule them all... - -...and in the darkness hide the twisty maze of partially-overlapping -X.509 support packages in Python. - -Once upon a time we were using four separate partially-overlapping -implementions of X.509 and related protocols. Over the years this has -collapsed down to one, but the interface module we built on top of the -previous mess has itself become heavily embedded in the code base. So -this is a bit more complicated (not to mention baroque) than one might -expect for a module that had grown in a saner fashion. We clean up -bits of it from time to time. Some day this may all make sense. -""" - -import rpki.POW -import base64 -import lxml.etree -import os -import subprocess -import email.mime.application -import email.utils -import mailbox -import time -import rpki.exceptions -import rpki.resource_set -import rpki.oids -import rpki.sundial -import rpki.log -import rpki.async -import rpki.relaxng - -def base64_with_linebreaks(der): - """ - Encode DER (really, anything) as Base64 text, with linebreaks to - keep the result (sort of) readable. - """ - b = base64.b64encode(der) - n = len(b) - return "\n" + "\n".join(b[i : min(i + 64, n)] for i in xrange(0, n, 64)) + "\n" - -def looks_like_PEM(text): - """ - Guess whether text looks like a PEM encoding. - """ - - i = text.find("-----BEGIN ") - return i >= 0 and text.find("\n-----END ", i) > i - -def first_rsync_uri(xia): - """ - Find first rsync URI in a sequence of AIA or SIA URIs. - Returns the URI if found, otherwise None. - """ - - if xia is not None: - for uri in xia: - if uri.startswith("rsync://"): - return uri - return None - -class X501DN(object): - """ - Class to hold an X.501 Distinguished Name. - - This is nothing like a complete implementation, just enough for our - purposes. See RFC 5280 4.1.2.4 for the ASN.1 details. In brief: - - - A DN is a SEQUENCE OF RDNs. - - - A RDN is a SET OF AttributeAndValues; in practice, multi-value - RDNs are rare, so an RDN is almost always a set with a single - element. - - - An AttributeAndValue is a SEQUENCE consisting of a OID and a - value, where a whole bunch of things including both syntax and - semantics of the value are determined by the OID. - - - The value is some kind of ASN.1 string; there are far too many - encoding options options, most of which are either strongly - discouraged or outright forbidden by the PKIX profile, but which - persist for historical reasons. The only ones PKIX actually - likes are PrintableString and UTF8String, but there are nuances - and special cases where some of the others are required. - - The RPKI profile further restricts DNs to a single mandatory - CommonName attribute with a single optional SerialNumber attribute - (not to be confused with the certificate serial number). - - BPKI certificates should (we hope) follow the general PKIX guideline - but the ones we construct ourselves are likely to be relatively - simple. - """ - - def __str__(self): - return "".join("/" + "+".join("%s=%s" % (rpki.oids.oid2name(a[0]), a[1]) - for a in rdn) - for rdn in self.dn) - - def __cmp__(self, other): - return cmp(self.dn, other.dn) - - def __repr__(self): - return rpki.log.log_repr(self, str(self)) - - def _debug(self): - if False: - import traceback - for chunk in traceback.format_stack(limit = 5): - for line in chunk.splitlines(): - rpki.log.debug("== %s" % line) - rpki.log.debug("++ %r %r" % (self, self.dn)) - - @classmethod - def from_cn(cls, cn, sn = None): - assert isinstance(cn, (str, unicode)) - if isinstance(sn, (int, long)): - sn = "%08X" % sn - elif isinstance(sn, (str, unicode)): - assert all(c in "0123456789abcdefABCDEF" for c in sn) - sn = str(sn) - self = cls() - if sn is not None: - self.dn = (((rpki.oids.commonName, cn),), ((rpki.oids.serialNumber, sn),)) - else: - self.dn = (((rpki.oids.commonName, cn),),) - return self - - @classmethod - def from_POW(cls, t): - assert isinstance(t, tuple) - self = cls() - self.dn = t - return self - - def get_POW(self): - return self.dn - - def extract_cn_and_sn(self): - cn = None - sn = None - - for rdn in self.dn: - if len(rdn) == 1 and len(rdn[0]) == 2: - oid = rdn[0][0] - val = rdn[0][1] - if oid == rpki.oids.commonName and cn is None: - cn = val - continue - if oid == rpki.oids.serialNumber and sn is None: - sn = val - continue - raise rpki.exceptions.BadX510DN("Bad subject name: %s" % (self.dn,)) - - if cn is None: - raise rpki.exceptions.BadX510DN("Subject name is missing CN: %s" % (self.dn,)) - - return cn, sn - - -class DER_object(object): - """ - Virtual class to hold a generic DER object. - """ - - ## @var formats - # Formats supported in this object. This is kind of redundant now - # that we're down to a single ASN.1 package and everything supports - # the same DER and POW formats, it's mostly historical baggage from - # the days when we had three different ASN.1 encoders, each with its - # own low-level Python object format. Clean up, some day. - formats = ("DER", "POW") - - ## @var POW_class - # Class of underlying POW object. Concrete subclasses must supply this. - POW_class = None - - ## Other attributes that self.clear() should whack. - other_clear = () - - ## @var DER - # DER value of this object - DER = None - - ## @var failure_threshold - # Rate-limiting interval between whines about Auto_update objects. - failure_threshold = rpki.sundial.timedelta(minutes = 5) - - def empty(self): - """ - Test whether this object is empty. - """ - return all(getattr(self, a, None) is None for a in self.formats) - - def clear(self): - """ - Make this object empty. - """ - for a in self.formats + self.other_clear: - setattr(self, a, None) - self.filename = None - self.timestamp = None - self.lastfail = None - - def __init__(self, **kw): - """ - Initialize a DER_object. - """ - self.clear() - if len(kw): - self.set(**kw) - - def set(self, **kw): - """ - Set this object by setting one of its known formats. - - This method only allows one to set one format at a time. - Subsequent calls will clear the object first. The point of all - this is to let the object's internal converters handle mustering - the object into whatever format you need at the moment. - """ - - if len(kw) == 1: - name = kw.keys()[0] - if name in self.formats: - self.clear() - setattr(self, name, kw[name]) - return - if name == "PEM": - self.clear() - self._set_PEM(kw[name]) - return - if name == "Base64": - self.clear() - self.DER = base64.b64decode(kw[name]) - return - if name == "Auto_update": - self.filename = kw[name] - self.check_auto_update() - return - if name in ("PEM_file", "DER_file", "Auto_file"): - f = open(kw[name], "rb") - value = f.read() - f.close() - self.clear() - if name == "PEM_file" or (name == "Auto_file" and looks_like_PEM(value)): - self._set_PEM(value) - else: - self.DER = value - return - raise rpki.exceptions.DERObjectConversionError("Can't honor conversion request %r" % (kw,)) - - def check_auto_update(self): - """ - Check for updates to a DER object that auto-updates from a file. - """ - if self.filename is None: - return - try: - filename = self.filename - timestamp = os.stat(self.filename).st_mtime - if self.timestamp is None or self.timestamp < timestamp: - rpki.log.debug("Updating %s, timestamp %s" % (filename, rpki.sundial.datetime.fromtimestamp(timestamp))) - f = open(filename, "rb") - value = f.read() - f.close() - self.clear() - if looks_like_PEM(value): - self._set_PEM(value) - else: - self.DER = value - self.filename = filename - self.timestamp = timestamp - except (IOError, OSError), e: - now = rpki.sundial.now() - if self.lastfail is None or now > self.lastfail + self.failure_threshold: - rpki.log.warn("Could not auto_update %r (last failure %s): %s" % (self, self.lastfail, e)) - self.lastfail = now - else: - self.lastfail = None - - def check(self): - """ - Perform basic checks on a DER object. - """ - self.check_auto_update() - assert not self.empty() - - def _set_PEM(self, pem): - """ - Set the POW value of this object based on a PEM input value. - Subclasses may need to override this. - """ - assert self.empty() - self.POW = self.POW_class.pemRead(pem) - - def get_DER(self): - """ - Get the DER value of this object. - Subclasses may need to override this method. - """ - self.check() - if self.DER: - return self.DER - if self.POW: - self.DER = self.POW.derWrite() - return self.get_DER() - raise rpki.exceptions.DERObjectConversionError("No conversion path to DER available") - - def get_POW(self): - """ - Get the rpki.POW value of this object. - Subclasses may need to override this method. - """ - self.check() - if not self.POW: # pylint: disable=E0203 - self.POW = self.POW_class.derRead(self.get_DER()) - return self.POW - - def get_Base64(self): - """ - Get the Base64 encoding of the DER value of this object. - """ - return base64_with_linebreaks(self.get_DER()) - - def get_PEM(self): - """ - Get the PEM representation of this object. - """ - return self.get_POW().pemWrite() - - def __cmp__(self, other): - """ - Compare two DER-encoded objects. - """ - if self is None and other is None: - return 0 - elif self is None: - return -1 - elif other is None: - return 1 - elif isinstance(other, str): - return cmp(self.get_DER(), other) - else: - return cmp(self.get_DER(), other.get_DER()) - - def hSKI(self): - """ - Return hexadecimal string representation of SKI for this object. - Only work for subclasses that implement get_SKI(). - """ - ski = self.get_SKI() - return ":".join(("%02X" % ord(i) for i in ski)) if ski else "" - - def gSKI(self): - """ - Calculate g(SKI) for this object. Only work for subclasses - that implement get_SKI(). - """ - return base64.urlsafe_b64encode(self.get_SKI()).rstrip("=") - - def hAKI(self): - """ - Return hexadecimal string representation of AKI for this - object. Only work for subclasses that implement get_AKI(). - """ - aki = self.get_AKI() - return ":".join(("%02X" % ord(i) for i in aki)) if aki else "" - - def gAKI(self): - """ - Calculate g(AKI) for this object. Only work for subclasses - that implement get_AKI(). - """ - return base64.urlsafe_b64encode(self.get_AKI()).rstrip("=") - - def get_AKI(self): - """ - Get the AKI extension from this object, if supported. - """ - return self.get_POW().getAKI() - - def get_SKI(self): - """ - Get the SKI extension from this object, if supported. - """ - return self.get_POW().getSKI() - - def get_EKU(self): - """ - Get the Extended Key Usage extension from this object, if supported. - """ - return self.get_POW().getEKU() - - def get_SIA(self): - """ - Get the SIA extension from this object. Only works for subclasses - that support getSIA(). - """ - return self.get_POW().getSIA() - - def get_sia_directory_uri(self): - """ - Get SIA directory (id-ad-caRepository) URI from this object. - Only works for subclasses that support getSIA(). - """ - sia = self.get_POW().getSIA() - return None if sia is None else first_rsync_uri(sia[0]) - - def get_sia_manifest_uri(self): - """ - Get SIA manifest (id-ad-rpkiManifest) URI from this object. - Only works for subclasses that support getSIA(). - """ - sia = self.get_POW().getSIA() - return None if sia is None else first_rsync_uri(sia[1]) - - def get_sia_object_uri(self): - """ - Get SIA object (id-ad-signedObject) URI from this object. - Only works for subclasses that support getSIA(). - """ - sia = self.get_POW().getSIA() - return None if sia is None else first_rsync_uri(sia[2]) - - def get_AIA(self): - """ - Get the SIA extension from this object. Only works for subclasses - that support getAIA(). - """ - return self.get_POW().getAIA() - - def get_aia_uri(self): - """ - Get AIA (id-ad-caIssuers) URI from this object. - Only works for subclasses that support getAIA(). - """ - return first_rsync_uri(self.get_POW().getAIA()) - - def get_basicConstraints(self): - """ - Get the basicConstraints extension from this object. Only works - for subclasses that support getExtension(). - """ - return self.get_POW().getBasicConstraints() - - def is_CA(self): - """ - Return True if and only if object has the basicConstraints - extension and its cA value is true. - """ - basicConstraints = self.get_basicConstraints() - return basicConstraints is not None and basicConstraints[0] - - def get_3779resources(self): - """ - Get RFC 3779 resources as rpki.resource_set objects. - """ - resources = rpki.resource_set.resource_bag.from_POW_rfc3779(self.get_POW().getRFC3779()) - try: - resources.valid_until = self.getNotAfter() - except AttributeError: - pass - return resources - - @classmethod - def from_sql(cls, x): - """ - Convert from SQL storage format. - """ - return cls(DER = x) - - def to_sql(self): - """ - Convert to SQL storage format. - """ - return self.get_DER() - - def dumpasn1(self): - """ - Pretty print an ASN.1 DER object using cryptlib dumpasn1 tool. - Use a temporary file rather than popen4() because dumpasn1 uses - seek() when decoding ASN.1 content nested in OCTET STRING values. - """ - - ret = None - fn = "dumpasn1.%d.tmp" % os.getpid() - try: - f = open(fn, "wb") - f.write(self.get_DER()) - f.close() - p = subprocess.Popen(("dumpasn1", "-a", fn), stdout = subprocess.PIPE, stderr = subprocess.STDOUT) - ret = "\n".join(x for x in p.communicate()[0].splitlines() if x.startswith(" ")) - except Exception, e: - ret = "[Could not run dumpasn1: %s]" % e - finally: - os.unlink(fn) - return ret - - def tracking_data(self, uri): - """ - Return a string containing data we want to log when tracking how - objects move through the RPKI system. Subclasses may wrap this to - provide more information, but should make sure to include at least - this information at the start of the tracking line. - """ - try: - d = rpki.POW.Digest(rpki.POW.SHA1_DIGEST) - d.update(self.get_DER()) - return "%s %s %s" % (uri, self.creation_timestamp, - "".join(("%02X" % ord(b) for b in d.digest()))) - except: # pylint: disable=W0702 - return uri - - def __getstate__(self): - """ - Pickling protocol -- pickle the DER encoding. - """ - return self.get_DER() - - def __setstate__(self, state): - """ - Pickling protocol -- unpickle the DER encoding. - """ - self.set(DER = state) - -class X509(DER_object): - """ - X.509 certificates. - - This class is designed to hold all the different representations of - X.509 certs we're using and convert between them. X.509 support in - Python a nasty maze of half-cooked stuff (except perhaps for - cryptlib, which is just different). Users of this module should not - have to care about this implementation nightmare. - """ - - POW_class = rpki.POW.X509 - - def getIssuer(self): - """ - Get the issuer of this certificate. - """ - return X501DN.from_POW(self.get_POW().getIssuer()) - - def getSubject(self): - """ - Get the subject of this certificate. - """ - return X501DN.from_POW(self.get_POW().getSubject()) - - def getNotBefore(self): - """ - Get the inception time of this certificate. - """ - return self.get_POW().getNotBefore() - - def getNotAfter(self): - """ - Get the expiration time of this certificate. - """ - return self.get_POW().getNotAfter() - - def getSerial(self): - """ - Get the serial number of this certificate. - """ - return self.get_POW().getSerial() - - def getPublicKey(self): - """ - Extract the public key from this certificate. - """ - return PublicKey(POW = self.get_POW().getPublicKey()) - - def get_SKI(self): - """ - Get the SKI extension from this object. - """ - return self.get_POW().getSKI() - - def expired(self): - """ - Test whether this certificate has expired. - """ - return self.getNotAfter() <= rpki.sundial.now() - - def issue(self, keypair, subject_key, serial, sia, aia, crldp, notAfter, - cn = None, resources = None, is_ca = True, notBefore = None, - sn = None, eku = None): - """ - Issue an RPKI certificate. - """ - - assert aia is not None and crldp is not None - - assert eku is None or not is_ca - - return self._issue( - keypair = keypair, - subject_key = subject_key, - serial = serial, - sia = sia, - aia = aia, - crldp = crldp, - notBefore = notBefore, - notAfter = notAfter, - cn = cn, - sn = sn, - resources = resources, - is_ca = is_ca, - aki = self.get_SKI(), - issuer_name = self.getSubject(), - eku = eku) - - - @classmethod - def self_certify(cls, keypair, subject_key, serial, sia, notAfter, - cn = None, resources = None, notBefore = None, - sn = None): - """ - Generate a self-certified RPKI certificate. - """ - - ski = subject_key.get_SKI() - - if cn is None: - cn = "".join(("%02X" % ord(i) for i in ski)) - - return cls._issue( - keypair = keypair, - subject_key = subject_key, - serial = serial, - sia = sia, - aia = None, - crldp = None, - notBefore = notBefore, - notAfter = notAfter, - cn = cn, - sn = sn, - resources = resources, - is_ca = True, - aki = ski, - issuer_name = X501DN.from_cn(cn, sn), - eku = None) - - - @classmethod - def _issue(cls, keypair, subject_key, serial, sia, aia, crldp, notAfter, - cn, sn, resources, is_ca, aki, issuer_name, notBefore, eku): - """ - Common code to issue an RPKI certificate. - """ - - now = rpki.sundial.now() - ski = subject_key.get_SKI() - - if notBefore is None: - notBefore = now - - if cn is None: - cn = "".join(("%02X" % ord(i) for i in ski)) - - if now >= notAfter: - raise rpki.exceptions.PastNotAfter("notAfter value %s is already in the past" % notAfter) - - if notBefore >= notAfter: - raise rpki.exceptions.NullValidityInterval("notAfter value %s predates notBefore value %s" % - (notAfter, notBefore)) - - cert = rpki.POW.X509() - - cert.setVersion(2) - cert.setSerial(serial) - cert.setIssuer(issuer_name.get_POW()) - cert.setSubject(X501DN.from_cn(cn, sn).get_POW()) - cert.setNotBefore(notBefore) - cert.setNotAfter(notAfter) - cert.setPublicKey(subject_key.get_POW()) - cert.setSKI(ski) - cert.setAKI(aki) - cert.setCertificatePolicies((rpki.oids.id_cp_ipAddr_asNumber,)) - - if crldp is not None: - cert.setCRLDP((crldp,)) - - if aia is not None: - cert.setAIA((aia,)) - - if is_ca: - cert.setBasicConstraints(True, None) - cert.setKeyUsage(frozenset(("keyCertSign", "cRLSign"))) - - else: - cert.setKeyUsage(frozenset(("digitalSignature",))) - - assert sia is not None or not is_ca - - if sia is not None: - caRepository, rpkiManifest, signedObject = sia - cert.setSIA( - (caRepository,) if isinstance(caRepository, str) else caRepository, - (rpkiManifest,) if isinstance(rpkiManifest, str) else rpkiManifest, - (signedObject,) if isinstance(signedObject, str) else signedObject) - - if resources is not None: - cert.setRFC3779( - asn = ("inherit" if resources.asn.inherit else - ((r.min, r.max) for r in resources.asn)), - ipv4 = ("inherit" if resources.v4.inherit else - ((r.min, r.max) for r in resources.v4)), - ipv6 = ("inherit" if resources.v6.inherit else - ((r.min, r.max) for r in resources.v6))) - - if eku is not None: - assert not is_ca - cert.setEKU(eku) - - cert.sign(keypair.get_POW(), rpki.POW.SHA256_DIGEST) - - return cls(POW = cert) - - def bpki_cross_certify(self, keypair, source_cert, serial, notAfter, - now = None, pathLenConstraint = 0): - """ - Issue a BPKI certificate with values taking from an existing certificate. - """ - return self.bpki_certify( - keypair = keypair, - subject_name = source_cert.getSubject(), - subject_key = source_cert.getPublicKey(), - serial = serial, - notAfter = notAfter, - now = now, - pathLenConstraint = pathLenConstraint, - is_ca = True) - - @classmethod - def bpki_self_certify(cls, keypair, subject_name, serial, notAfter, - now = None, pathLenConstraint = None): - """ - Issue a self-signed BPKI CA certificate. - """ - return cls._bpki_certify( - keypair = keypair, - issuer_name = subject_name, - subject_name = subject_name, - subject_key = keypair.get_public(), - serial = serial, - now = now, - notAfter = notAfter, - pathLenConstraint = pathLenConstraint, - is_ca = True) - - def bpki_certify(self, keypair, subject_name, subject_key, serial, notAfter, is_ca, - now = None, pathLenConstraint = None): - """ - Issue a normal BPKI certificate. - """ - assert keypair.get_public() == self.getPublicKey() - return self._bpki_certify( - keypair = keypair, - issuer_name = self.getSubject(), - subject_name = subject_name, - subject_key = subject_key, - serial = serial, - now = now, - notAfter = notAfter, - pathLenConstraint = pathLenConstraint, - is_ca = is_ca) - - @classmethod - def _bpki_certify(cls, keypair, issuer_name, subject_name, subject_key, - serial, now, notAfter, pathLenConstraint, is_ca): - """ - Issue a BPKI certificate. This internal method does the real - work, after one of the wrapper methods has extracted the relevant - fields. - """ - - if now is None: - now = rpki.sundial.now() - - issuer_key = keypair.get_public() - - assert (issuer_key == subject_key) == (issuer_name == subject_name) - assert is_ca or issuer_name != subject_name - assert is_ca or pathLenConstraint is None - assert pathLenConstraint is None or (isinstance(pathLenConstraint, (int, long)) and - pathLenConstraint >= 0) - - cert = rpki.POW.X509() - cert.setVersion(2) - cert.setSerial(serial) - cert.setIssuer(issuer_name.get_POW()) - cert.setSubject(subject_name.get_POW()) - cert.setNotBefore(now) - cert.setNotAfter(notAfter) - cert.setPublicKey(subject_key.get_POW()) - cert.setSKI(subject_key.get_POW().calculateSKI()) - if issuer_key != subject_key: - cert.setAKI(issuer_key.get_POW().calculateSKI()) - if is_ca: - cert.setBasicConstraints(True, pathLenConstraint) - cert.sign(keypair.get_POW(), rpki.POW.SHA256_DIGEST) - return cls(POW = cert) - - @classmethod - def normalize_chain(cls, chain): - """ - Normalize a chain of certificates into a tuple of X509 objects. - Given all the glue certificates needed for BPKI cross - certification, it's easiest to allow sloppy arguments to the CMS - validation methods and provide a single method that normalizes the - allowed cases. So this method allows X509, None, lists, and - tuples, and returns a tuple of X509 objects. - """ - if isinstance(chain, cls): - chain = (chain,) - return tuple(x for x in chain if x is not None) - - @property - def creation_timestamp(self): - """ - Time at which this object was created. - """ - return self.getNotBefore() - -class PKCS10(DER_object): - """ - Class to hold a PKCS #10 request. - """ - - POW_class = rpki.POW.PKCS10 - - ## @var expected_ca_keyUsage - # KeyUsage extension flags expected for CA requests. - - expected_ca_keyUsage = frozenset(("keyCertSign", "cRLSign")) - - ## @var allowed_extensions - # Extensions allowed by RPKI profile. - - allowed_extensions = frozenset((rpki.oids.basicConstraints, - rpki.oids.keyUsage, - rpki.oids.subjectInfoAccess, - rpki.oids.extendedKeyUsage)) - - - def get_DER(self): - """ - Get the DER value of this certification request. - """ - self.check() - if self.DER: - return self.DER - if self.POW: - self.DER = self.POW.derWrite() - return self.get_DER() - raise rpki.exceptions.DERObjectConversionError("No conversion path to DER available") - - def get_POW(self): - """ - Get the rpki.POW value of this certification request. - """ - self.check() - if not self.POW: # pylint: disable=E0203 - self.POW = rpki.POW.PKCS10.derRead(self.get_DER()) - return self.POW - - def getSubject(self): - """ - Extract the subject name from this certification request. - """ - return X501DN.from_POW(self.get_POW().getSubject()) - - def getPublicKey(self): - """ - Extract the public key from this certification request. - """ - return PublicKey(POW = self.get_POW().getPublicKey()) - - def get_SKI(self): - """ - Compute SKI for public key from this certification request. - """ - return self.getPublicKey().get_SKI() - - - def check_valid_request_common(self): - """ - Common code for checking this certification requests to see - whether they conform to the RPKI certificate profile. - - Throws an exception if the request isn't valid, so if this method - returns at all, the request is ok. - - You probably don't want to call this directly, as it only performs - the checks that are common to all RPKI certificates. - """ - - if not self.get_POW().verify(): - raise rpki.exceptions.BadPKCS10("PKCS #10 signature check failed") - - ver = self.get_POW().getVersion() - - if ver != 0: - raise rpki.exceptions.BadPKCS10("PKCS #10 request has bad version number %s" % ver) - - ku = self.get_POW().getKeyUsage() - - if ku is not None and self.expected_ca_keyUsage != ku: - raise rpki.exceptions.BadPKCS10("PKCS #10 keyUsage doesn't match profile: %r" % ku) - - forbidden_extensions = self.get_POW().getExtensionOIDs() - self.allowed_extensions - - if forbidden_extensions: - raise rpki.exceptions.BadExtension("Forbidden extension%s in PKCS #10 certificate request: %s" % ( - "" if len(forbidden_extensions) == 1 else "s", - ", ".join(forbidden_extensions))) - - - def check_valid_request_ca(self): - """ - Check this certification request to see whether it's a valid - request for an RPKI CA certificate. - - Throws an exception if the request isn't valid, so if this method - returns at all, the request is ok. - """ - - self.check_valid_request_common() - - alg = self.get_POW().getSignatureAlgorithm() - bc = self.get_POW().getBasicConstraints() - eku = self.get_POW().getEKU() - sias = self.get_POW().getSIA() - - if alg != rpki.oids.sha256WithRSAEncryption: - raise rpki.exceptions.BadPKCS10("PKCS #10 has bad signature algorithm for CA: %s" % alg) - - if bc is None or not bc[0] or bc[1] is not None: - raise rpki.exceptions.BadPKCS10("PKCS #10 CA bad basicConstraints") - - if eku is not None: - raise rpki.exceptions.BadPKCS10("PKCS #10 CA EKU not allowed") - - if sias is None: - raise rpki.exceptions.BadPKCS10("PKCS #10 CA SIA missing") - - caRepository, rpkiManifest, signedObject = sias - - if signedObject: - raise rpki.exceptions.BadPKCS10("PKCS #10 CA SIA must not have id-ad-signedObject") - - if not caRepository: - raise rpki.exceptions.BadPKCS10("PKCS #10 CA SIA must have id-ad-caRepository") - - if not any(uri.startswith("rsync://") for uri in caRepository): - raise rpki.exceptions.BadPKCS10("PKCS #10 CA SIA id-ad-caRepository contains no rsync URIs") - - if any(uri.startswith("rsync://") and not uri.endswith("/") for uri in caRepository): - raise rpki.exceptions.BadPKCS10("PKCS #10 CA SIA id-ad-caRepository does not end with slash") - - if not rpkiManifest: - raise rpki.exceptions.BadPKCS10("PKCS #10 CA SIA must have id-ad-rpkiManifest") - - if not any(uri.startswith("rsync://") for uri in rpkiManifest): - raise rpki.exceptions.BadPKCS10("PKCS #10 CA SIA id-ad-rpkiManifest contains no rsync URIs") - - if any(uri.startswith("rsync://") and uri.endswith("/") for uri in rpkiManifest): - raise rpki.exceptions.BadPKCS10("PKCS #10 CA SIA id-ad-rpkiManifest ends with slash") - - - def check_valid_request_ee(self): - """ - Check this certification request to see whether it's a valid - request for an RPKI EE certificate. - - Throws an exception if the request isn't valid, so if this method - returns at all, the request is ok. - - We're a bit less strict here than we are for either CA - certificates or BGPSEC router certificates, because the profile is - less tightly nailed down for unspecified-use RPKI EE certificates. - Future specific purposes may impose tighter constraints. - - Note that this method does NOT apply to so-called "infrastructure" - EE certificates (eg, the EE certificates embedded in manifests and - ROAs); those are constrained fairly tightly, but they're also - generated internally so we don't need to check them as user or - protocol input. - """ - - self.check_valid_request_common() - - alg = self.get_POW().getSignatureAlgorithm() - bc = self.get_POW().getBasicConstraints() - sia = self.get_POW().getSIA() - - caRepository, rpkiManifest, signedObject = sia or (None, None, None) - - if alg not in (rpki.oids.sha256WithRSAEncryption, rpki.oids.ecdsa_with_SHA256): - raise rpki.exceptions.BadPKCS10("PKCS #10 has bad signature algorithm for EE: %s" % alg) - - if bc is not None and (bc[0] or bc[1] is not None): - raise rpki.exceptions.BadPKCS10("PKCS #10 EE has bad basicConstraints") - - if caRepository: - raise rpki.exceptions.BadPKCS10("PKCS #10 EE must not have id-ad-caRepository") - - if rpkiManifest: - raise rpki.exceptions.BadPKCS10("PKCS #10 EE must not have id-ad-rpkiManifest") - - if signedObject and not any(uri.startswith("rsync://") for uri in signedObject): - raise rpki.exceptions.BadPKCS10("PKCS #10 EE SIA id-ad-signedObject contains no rsync URIs") - - - def check_valid_request_router(self): - """ - Check this certification request to see whether it's a valid - request for a BGPSEC router certificate. - - Throws an exception if the request isn't valid, so if this method - returns at all, the request is ok. - - draft-ietf-sidr-bgpsec-pki-profiles 3.2 says follow RFC 6487 3 - except where explicitly overriden, and does not override for SIA. - But draft-ietf-sidr-bgpsec-pki-profiles also says that router - certificates don't get SIA, while RFC 6487 requires SIA. So what - do we do with SIA in PKCS #10 for router certificates? - - For the moment, ignore it, but make sure we don't include it in - the certificate when we get to the code that generates that. - """ - - self.check_valid_request_ee() - - alg = self.get_POW().getSignatureAlgorithm() - eku = self.get_POW().getEKU() - - if alg != rpki.oids.ecdsa_with_SHA256: - raise rpki.exceptions.BadPKCS10("PKCS #10 has bad signature algorithm for router: %s" % alg) - - # Not really clear to me whether PKCS #10 should have EKU or not, so allow - # either, but insist that it be the right one if present. - - if eku is not None and rpki.oids.id_kp_bgpsec_router not in eku: - raise rpki.exceptions.BadPKCS10("PKCS #10 router must have EKU") - - - @classmethod - def create(cls, keypair, exts = None, is_ca = False, - caRepository = None, rpkiManifest = None, signedObject = None, - cn = None, sn = None, eku = None): - """ - Create a new request for a given keypair. - """ - - assert exts is None, "Old calling sequence to rpki.x509.PKCS10.create()" - - if cn is None: - cn = "".join(("%02X" % ord(i) for i in keypair.get_SKI())) - - if isinstance(caRepository, str): - caRepository = (caRepository,) - - if isinstance(rpkiManifest, str): - rpkiManifest = (rpkiManifest,) - - if isinstance(signedObject, str): - signedObject = (signedObject,) - - req = rpki.POW.PKCS10() - req.setVersion(0) - req.setSubject(X501DN.from_cn(cn, sn).get_POW()) - req.setPublicKey(keypair.get_POW()) - - if is_ca: - req.setBasicConstraints(True, None) - req.setKeyUsage(cls.expected_ca_keyUsage) - - if caRepository or rpkiManifest or signedObject: - req.setSIA(caRepository, rpkiManifest, signedObject) - - if eku: - req.setEKU(eku) - - req.sign(keypair.get_POW(), rpki.POW.SHA256_DIGEST) - return cls(POW = req) - -## @var generate_insecure_debug_only_rsa_key -# Debugging hack to let us save throwaway RSA keys from one debug -# session to the next. DO NOT USE THIS IN PRODUCTION. - -generate_insecure_debug_only_rsa_key = None - -class insecure_debug_only_rsa_key_generator(object): - - def __init__(self, filename, keyno = 0): - try: - try: - import gdbm as dbm_du_jour - except ImportError: - import dbm as dbm_du_jour - self.keyno = long(keyno) - self.filename = filename - self.db = dbm_du_jour.open(filename, "c") - except: - rpki.log.warn("insecure_debug_only_rsa_key_generator initialization FAILED, hack inoperative") - raise - - def __call__(self): - k = str(self.keyno) - try: - v = rpki.POW.Asymmetric.derReadPrivate(self.db[k]) - except KeyError: - v = rpki.POW.Asymmetric.generateRSA(2048) - self.db[k] = v.derWritePrivate() - self.keyno += 1 - return v - - -class PrivateKey(DER_object): - """ - Class to hold a Public/Private key pair. - """ - - POW_class = rpki.POW.Asymmetric - - def get_DER(self): - """ - Get the DER value of this keypair. - """ - self.check() - if self.DER: - return self.DER - if self.POW: - self.DER = self.POW.derWritePrivate() - return self.get_DER() - raise rpki.exceptions.DERObjectConversionError("No conversion path to DER available") - - def get_POW(self): - """ - Get the rpki.POW value of this keypair. - """ - self.check() - if not self.POW: # pylint: disable=E0203 - self.POW = rpki.POW.Asymmetric.derReadPrivate(self.get_DER()) - return self.POW - - def get_PEM(self): - """ - Get the PEM representation of this keypair. - """ - return self.get_POW().pemWritePrivate() - - def _set_PEM(self, pem): - """ - Set the POW value of this keypair from a PEM string. - """ - assert self.empty() - self.POW = self.POW_class.pemReadPrivate(pem) - - def get_public_DER(self): - """ - Get the DER encoding of the public key from this keypair. - """ - return self.get_POW().derWritePublic() - - def get_SKI(self): - """ - Calculate the SKI of this keypair. - """ - return self.get_POW().calculateSKI() - - def get_public(self): - """ - Convert the public key of this keypair into a PublicKey object. - """ - return PublicKey(DER = self.get_public_DER()) - -class PublicKey(DER_object): - """ - Class to hold a public key. - """ - - POW_class = rpki.POW.Asymmetric - - def get_DER(self): - """ - Get the DER value of this public key. - """ - self.check() - if self.DER: - return self.DER - if self.POW: - self.DER = self.POW.derWritePublic() - return self.get_DER() - raise rpki.exceptions.DERObjectConversionError("No conversion path to DER available") - - def get_POW(self): - """ - Get the rpki.POW value of this public key. - """ - self.check() - if not self.POW: # pylint: disable=E0203 - self.POW = rpki.POW.Asymmetric.derReadPublic(self.get_DER()) - return self.POW - - def get_PEM(self): - """ - Get the PEM representation of this public key. - """ - return self.get_POW().pemWritePublic() - - def _set_PEM(self, pem): - """ - Set the POW value of this public key from a PEM string. - """ - assert self.empty() - self.POW = self.POW_class.pemReadPublic(pem) - - def get_SKI(self): - """ - Calculate the SKI of this public key. - """ - return self.get_POW().calculateSKI() - -class KeyParams(DER_object): - """ - Wrapper for OpenSSL's asymmetric key parameter classes. - """ - - POW_class = rpki.POW.AsymmetricParams - - @classmethod - def generateEC(cls, curve = rpki.POW.EC_P256_CURVE): - return cls(POW = rpki.POW.AsymmetricParams.generateEC(curve = curve)) - -class RSA(PrivateKey): - """ - Class to hold an RSA key pair. - """ - - @classmethod - def generate(cls, keylength = 2048, quiet = False): - """ - Generate a new keypair. - """ - if not quiet: - rpki.log.debug("Generating new %d-bit RSA key" % keylength) - if generate_insecure_debug_only_rsa_key is not None: - return cls(POW = generate_insecure_debug_only_rsa_key()) - else: - return cls(POW = rpki.POW.Asymmetric.generateRSA(keylength)) - -class ECDSA(PrivateKey): - """ - Class to hold an ECDSA key pair. - """ - - @classmethod - def generate(cls, params = None, quiet = False): - """ - Generate a new keypair. - """ - - if params is None: - if not quiet: - rpki.log.debug("Generating new ECDSA key parameters") - params = KeyParams.generateEC() - - assert isinstance(params, KeyParams) - - if not quiet: - rpki.log.debug("Generating new ECDSA key") - - return cls(POW = rpki.POW.Asymmetric.generateFromParams(params.get_POW())) - -class CMS_object(DER_object): - """ - Abstract class to hold a CMS object. - """ - - econtent_oid = rpki.oids.id_data - POW_class = rpki.POW.CMS - - ## @var dump_on_verify_failure - # Set this to True to get dumpasn1 dumps of ASN.1 on CMS verify failures. - - dump_on_verify_failure = True - - ## @var debug_cms_certs - # Set this to True to log a lot of chatter about CMS certificates. - - debug_cms_certs = False - - ## @var dump_using_dumpasn1 - # Set this to use external dumpasn1 program, which is prettier and - # more informative than OpenSSL's CMS text dump, but which won't - # work if the dumpasn1 program isn't installed. - - dump_using_dumpasn1 = False - - ## @var require_crls - # Set this to False to make CMS CRLs optional in the cases where we - # would otherwise require them. Some day this option should go away - # and CRLs should be uncondtionally mandatory in such cases. - - require_crls = False - - ## @var allow_extra_certs - # Set this to True to allow CMS messages to contain CA certificates. - - allow_extra_certs = False - - ## @var allow_extra_crls - # Set this to True to allow CMS messages to contain multiple CRLs. - - allow_extra_crls = False - - ## @var print_on_der_error - # Set this to True to log alleged DER when we have trouble parsing - # it, in case it's really a Perl backtrace or something. - - print_on_der_error = True - - def get_DER(self): - """ - Get the DER value of this CMS_object. - """ - self.check() - if self.DER: - return self.DER - if self.POW: - self.DER = self.POW.derWrite() - return self.get_DER() - raise rpki.exceptions.DERObjectConversionError("No conversion path to DER available") - - def get_POW(self): - """ - Get the rpki.POW value of this CMS_object. - """ - self.check() - if not self.POW: # pylint: disable=E0203 - self.POW = self.POW_class.derRead(self.get_DER()) - return self.POW - - def get_signingTime(self): - """ - Extract signingTime from CMS signed attributes. - """ - return self.get_POW().signingTime() - - def verify(self, ta): - """ - Verify CMS wrapper and store inner content. - """ - - try: - cms = self.get_POW() - except (rpki.async.ExitNow, SystemExit): - raise - except Exception: - if self.print_on_der_error: - rpki.log.debug("Problem parsing DER CMS message, might not really be DER: %r" % - self.get_DER()) - raise rpki.exceptions.UnparsableCMSDER - - if cms.eContentType() != self.econtent_oid: - raise rpki.exceptions.WrongEContentType("Got CMS eContentType %s, expected %s" % ( - cms.eContentType(), self.econtent_oid)) - - certs = [X509(POW = x) for x in cms.certs()] - crls = [CRL(POW = c) for c in cms.crls()] - - if self.debug_cms_certs: - for x in certs: - rpki.log.debug("Received CMS cert issuer %s subject %s SKI %s" % ( - x.getIssuer(), x.getSubject(), x.hSKI())) - for c in crls: - rpki.log.debug("Received CMS CRL issuer %r" % (c.getIssuer(),)) - - store = rpki.POW.X509Store() - - now = rpki.sundial.now() - - trusted_ee = None - - for x in X509.normalize_chain(ta): - if self.debug_cms_certs: - rpki.log.debug("CMS trusted cert issuer %s subject %s SKI %s" % ( - x.getIssuer(), x.getSubject(), x.hSKI())) - if x.getNotAfter() < now: - raise rpki.exceptions.TrustedCMSCertHasExpired("Trusted CMS certificate has expired", - "%s (%s)" % (x.getSubject(), x.hSKI())) - if not x.is_CA(): - if trusted_ee is None: - trusted_ee = x - else: - raise rpki.exceptions.MultipleCMSEECert("Multiple CMS EE certificates", *("%s (%s)" % ( - x.getSubject(), x.hSKI()) for x in ta if not x.is_CA())) - store.addTrust(x.get_POW()) - - if trusted_ee: - if self.debug_cms_certs: - rpki.log.debug("Trusted CMS EE cert issuer %s subject %s SKI %s" % ( - trusted_ee.getIssuer(), trusted_ee.getSubject(), trusted_ee.hSKI())) - if len(certs) > 1 or (len(certs) == 1 and - (certs[0].getSubject() != trusted_ee.getSubject() or - certs[0].getPublicKey() != trusted_ee.getPublicKey())): - raise rpki.exceptions.UnexpectedCMSCerts("Unexpected CMS certificates", *("%s (%s)" % ( - x.getSubject(), x.hSKI()) for x in certs)) - if crls: - raise rpki.exceptions.UnexpectedCMSCRLs("Unexpected CRLs", *("%s (%s)" % ( - c.getIssuer(), c.hAKI()) for c in crls)) - - else: - untrusted_ee = [x for x in certs if not x.is_CA()] - if len(untrusted_ee) < 1: - raise rpki.exceptions.MissingCMSEEcert - if len(untrusted_ee) > 1 or (not self.allow_extra_certs and len(certs) > len(untrusted_ee)): - raise rpki.exceptions.UnexpectedCMSCerts("Unexpected CMS certificates", *("%s (%s)" % ( - x.getSubject(), x.hSKI()) for x in certs)) - if len(crls) < 1: - if self.require_crls: - raise rpki.exceptions.MissingCMSCRL - else: - rpki.log.warn("MISSING CMS CRL! Ignoring per self.require_crls setting") - if len(crls) > 1 and not self.allow_extra_crls: - raise rpki.exceptions.UnexpectedCMSCRLs("Unexpected CRLs", *("%s (%s)" % ( - c.getIssuer(), c.hAKI()) for c in crls)) - - for x in certs: - if x.getNotAfter() < now: - raise rpki.exceptions.CMSCertHasExpired("CMS certificate has expired", "%s (%s)" % ( - x.getSubject(), x.hSKI())) - - for c in crls: - if c.getNextUpdate() < now: - rpki.log.warn("Stale BPKI CMS CRL (%s %s %s)" % (c.getNextUpdate(), c.getIssuer(), c.hAKI())) - - try: - content = cms.verify(store) - except (rpki.async.ExitNow, SystemExit): - raise - except Exception: - if self.dump_on_verify_failure: - if self.dump_using_dumpasn1: - dbg = self.dumpasn1() - else: - dbg = cms.pprint() - rpki.log.warn("CMS verification failed, dumping ASN.1 (%d octets):" % len(self.get_DER())) - for line in dbg.splitlines(): - rpki.log.warn(line) - raise rpki.exceptions.CMSVerificationFailed("CMS verification failed") - - return content - - def extract(self): - """ - Extract and store inner content from CMS wrapper without verifying - the CMS. - - DANGER WILL ROBINSON!!! - - Do not use this method on unvalidated data. Use the verify() - method instead. - - If you don't understand this warning, don't use this method. - """ - - try: - cms = self.get_POW() - except (rpki.async.ExitNow, SystemExit): - raise - except Exception: - raise rpki.exceptions.UnparsableCMSDER - - if cms.eContentType() != self.econtent_oid: - raise rpki.exceptions.WrongEContentType("Got CMS eContentType %s, expected %s" % ( - cms.eContentType(), self.econtent_oid)) - - return cms.verify(rpki.POW.X509Store(), None, - (rpki.POW.CMS_NOCRL | rpki.POW.CMS_NO_SIGNER_CERT_VERIFY | - rpki.POW.CMS_NO_ATTR_VERIFY | rpki.POW.CMS_NO_CONTENT_VERIFY)) - - - def sign(self, keypair, certs, crls = None, no_certs = False): - """ - Sign and wrap inner content. - """ - - rpki.log.trace() - - if isinstance(certs, X509): - cert = certs - certs = () - else: - cert = certs[0] - certs = certs[1:] - - if crls is None: - crls = () - elif isinstance(crls, CRL): - crls = (crls,) - - if self.debug_cms_certs: - rpki.log.debug("Signing with cert issuer %s subject %s SKI %s" % ( - cert.getIssuer(), cert.getSubject(), cert.hSKI())) - for i, c in enumerate(certs): - rpki.log.debug("Additional cert %d issuer %s subject %s SKI %s" % ( - i, c.getIssuer(), c.getSubject(), c.hSKI())) - - self._sign(cert.get_POW(), - keypair.get_POW(), - [x.get_POW() for x in certs], - [c.get_POW() for c in crls], - rpki.POW.CMS_NOCERTS if no_certs else 0) - - @property - def creation_timestamp(self): - """ - Time at which this object was created. - """ - return self.get_signingTime() - - -class Wrapped_CMS_object(CMS_object): - """ - Abstract class to hold CMS objects wrapping non-DER content (eg, XML - or VCard). - - CMS-wrapped objects are a little different from the other DER_object - types because the signed object is CMS wrapping some other kind of - inner content. A Wrapped_CMS_object is the outer CMS wrapped object - so that the usual DER and PEM operations do the obvious things, and - the inner content is handle via separate methods. - """ - - other_clear = ("content",) - - def get_content(self): - """ - Get the inner content of this Wrapped_CMS_object. - """ - if self.content is None: - raise rpki.exceptions.CMSContentNotSet("Inner content of CMS object %r is not set" % self) - return self.content - - def set_content(self, content): - """ - Set the (inner) content of this Wrapped_CMS_object, clearing the wrapper. - """ - self.clear() - self.content = content - - def verify(self, ta): - """ - Verify CMS wrapper and store inner content. - """ - - self.decode(CMS_object.verify(self, ta)) - return self.get_content() - - def extract(self): - """ - Extract and store inner content from CMS wrapper without verifying - the CMS. - - DANGER WILL ROBINSON!!! - - Do not use this method on unvalidated data. Use the verify() - method instead. - - If you don't understand this warning, don't use this method. - """ - - self.decode(CMS_object.extract(self)) - return self.get_content() - - def extract_if_needed(self): - """ - Extract inner content if needed. See caveats for .extract(), do - not use unless you really know what you are doing. - """ - - if self.content is None: - self.extract() - - def _sign(self, cert, keypair, certs, crls, flags): - """ - Internal method to call POW to do CMS signature. This is split - out from the .sign() API method to handle differences in how - different CMS-based POW classes handle the inner content. - """ - - cms = self.POW_class() - cms.sign(cert, keypair, self.encode(), certs, crls, self.econtent_oid, flags) - self.POW = cms - - -class DER_CMS_object(CMS_object): - """ - Abstract class for CMS-based objects with DER-encoded content - handled by C-level subclasses of rpki.POW.CMS. - """ - - def _sign(self, cert, keypair, certs, crls, flags): - self.get_POW().sign(cert, keypair, certs, crls, self.econtent_oid, flags) - - - def extract_if_needed(self): - """ - Extract inner content if needed. See caveats for .extract(), do - not use unless you really know what you are doing. - """ - - try: - self.get_POW().getVersion() - except rpki.POW.NotVerifiedError: - self.extract() - - -class SignedManifest(DER_CMS_object): - """ - Class to hold a signed manifest. - """ - - econtent_oid = rpki.oids.id_ct_rpkiManifest - POW_class = rpki.POW.Manifest - - def getThisUpdate(self): - """ - Get thisUpdate value from this manifest. - """ - return self.get_POW().getThisUpdate() - - def getNextUpdate(self): - """ - Get nextUpdate value from this manifest. - """ - return self.get_POW().getNextUpdate() - - @classmethod - def build(cls, serial, thisUpdate, nextUpdate, names_and_objs, keypair, certs, version = 0): - """ - Build a signed manifest. - """ - - filelist = [] - for name, obj in names_and_objs: - d = rpki.POW.Digest(rpki.POW.SHA256_DIGEST) - d.update(obj.get_DER()) - filelist.append((name.rpartition("/")[2], d.digest())) - filelist.sort(key = lambda x: x[0]) - - obj = cls.POW_class() - obj.setVersion(version) - obj.setManifestNumber(serial) - obj.setThisUpdate(thisUpdate) - obj.setNextUpdate(nextUpdate) - obj.setAlgorithm(rpki.oids.id_sha256) - obj.addFiles(filelist) - - self = cls(POW = obj) - self.sign(keypair, certs) - return self - -class ROA(DER_CMS_object): - """ - Class to hold a signed ROA. - """ - - econtent_oid = rpki.oids.id_ct_routeOriginAttestation - POW_class = rpki.POW.ROA - - @classmethod - def build(cls, asn, ipv4, ipv6, keypair, certs, version = 0): - """ - Build a ROA. - """ - ipv4 = ipv4.to_POW_roa_tuple() if ipv4 else None - ipv6 = ipv6.to_POW_roa_tuple() if ipv6 else None - obj = cls.POW_class() - obj.setVersion(version) - obj.setASID(asn) - obj.setPrefixes(ipv4 = ipv4, ipv6 = ipv6) - self = cls(POW = obj) - self.sign(keypair, certs) - return self - - def tracking_data(self, uri): - """ - Return a string containing data we want to log when tracking how - objects move through the RPKI system. - """ - msg = DER_CMS_object.tracking_data(self, uri) - try: - self.extract_if_needed() - asn = self.get_POW().getASID() - text = [] - for prefixes in self.get_POW().getPrefixes(): - if prefixes is not None: - for prefix, prefixlen, maxprefixlen in prefixes: - if maxprefixlen is None or prefixlen == maxprefixlen: - text.append("%s/%s" % (prefix, prefixlen)) - else: - text.append("%s/%s-%s" % (prefix, prefixlen, maxprefixlen)) - text.sort() - msg = "%s %s %s" % (msg, asn, ",".join(text)) - except: # pylint: disable=W0702 - pass - return msg - -class DeadDrop(object): - """ - Dead-drop utility for storing copies of CMS messages for debugging or - audit. At the moment this uses Maildir mailbox format, as it has - approximately the right properties and a number of useful tools for - manipulating it already exist. - """ - - def __init__(self, name): - self.name = name - self.pid = os.getpid() - self.maildir = mailbox.Maildir(name, factory = None, create = True) - self.warned = False - - def dump(self, obj): - try: - now = time.time() - msg = email.mime.application.MIMEApplication(obj.get_DER(), "x-rpki") - msg["Date"] = email.utils.formatdate(now) - msg["Subject"] = "Process %s dump of %r" % (self.pid, obj) - msg["Message-ID"] = email.utils.make_msgid() - msg["X-RPKI-PID"] = str(self.pid) - msg["X-RPKI-Object"] = repr(obj) - msg["X-RPKI-Timestamp"] = "%f" % now - self.maildir.add(msg) - self.warned = False - except Exception, e: - if not self.warned: - rpki.log.warn("Could not write to mailbox %s: %s" % (self.name, e)) - self.warned = True - -class XML_CMS_object(Wrapped_CMS_object): - """ - Class to hold CMS-wrapped XML protocol data. - """ - - econtent_oid = rpki.oids.id_ct_xml - - ## @var dump_outbound_cms - # If set, we write all outbound XML-CMS PDUs to disk, for debugging. - # If set, value should be a DeadDrop object. - - dump_outbound_cms = None - - ## @var dump_inbound_cms - # If set, we write all inbound XML-CMS PDUs to disk, for debugging. - # If set, value should be a DeadDrop object. - - dump_inbound_cms = None - - ## @var check_inbound_schema - # If set, perform RelaxNG schema check on inbound messages. - - check_inbound_schema = True - - ## @var check_outbound_schema - # If set, perform RelaxNG schema check on outbound messages. - - check_outbound_schema = False - - def encode(self): - """ - Encode inner content for signing. - """ - return lxml.etree.tostring(self.get_content(), - pretty_print = True, - encoding = self.encoding, - xml_declaration = True) - - def decode(self, xml): - """ - Decode XML and set inner content. - """ - self.content = lxml.etree.fromstring(xml) - - def pretty_print_content(self): - """ - Pretty print XML content of this message. - """ - return lxml.etree.tostring(self.get_content(), - pretty_print = True, - encoding = self.encoding, - xml_declaration = True) - - def schema_check(self): - """ - Handle XML RelaxNG schema check. - """ - try: - self.schema.assertValid(self.get_content()) - except lxml.etree.DocumentInvalid: - rpki.log.error("PDU failed schema check") - for line in self.pretty_print_content().splitlines(): - rpki.log.warn(line) - raise - - def dump_to_disk(self, prefix): - """ - Write DER of current message to disk, for debugging. - """ - f = open(prefix + rpki.sundial.now().isoformat() + "Z.cms", "wb") - f.write(self.get_DER()) - f.close() - - def wrap(self, msg, keypair, certs, crls = None): - """ - Wrap an XML PDU in CMS and return its DER encoding. - """ - rpki.log.trace() - if self.saxify is None: - self.set_content(msg) - else: - self.set_content(msg.toXML()) - if self.check_outbound_schema: - self.schema_check() - self.sign(keypair, certs, crls) - if self.dump_outbound_cms: - self.dump_outbound_cms.dump(self) - return self.get_DER() - - def unwrap(self, ta): - """ - Unwrap a CMS-wrapped XML PDU and return Python objects. - """ - if self.dump_inbound_cms: - self.dump_inbound_cms.dump(self) - self.verify(ta) - if self.check_inbound_schema: - self.schema_check() - if self.saxify is None: - return self.get_content() - else: - return self.saxify(self.get_content()) # pylint: disable=E1102 - - def check_replay(self, timestamp, *context): - """ - Check CMS signing-time in this object against a recorded - timestamp. Raises an exception if the recorded timestamp is more - recent, otherwise returns the new timestamp. - """ - new_timestamp = self.get_signingTime() - if timestamp is not None and timestamp > new_timestamp: - if context: - context = " (" + " ".join(context) + ")" - raise rpki.exceptions.CMSReplay( - "CMS replay: last message %s, this message %s%s" % ( - timestamp, new_timestamp, context)) - return new_timestamp - - def check_replay_sql(self, obj, *context): - """ - Like .check_replay() but gets recorded timestamp from - "last_cms_timestamp" field of an SQL object and stores the new - timestamp back in that same field. - """ - obj.last_cms_timestamp = self.check_replay(obj.last_cms_timestamp, *context) - obj.sql_mark_dirty() - - ## @var saxify - # SAX handler hook. Subclasses can set this to a SAX handler, in - # which case .unwrap() will call it and return the result. - # Otherwise, .unwrap() just returns a verified element tree. - - saxify = None - -class SignedReferral(XML_CMS_object): - encoding = "us-ascii" - schema = rpki.relaxng.myrpki - saxify = None - -class Ghostbuster(Wrapped_CMS_object): - """ - Class to hold Ghostbusters record (CMS-wrapped VCard). This is - quite minimal because we treat the VCard as an opaque byte string - managed by the back-end. - """ - - econtent_oid = rpki.oids.id_ct_rpkiGhostbusters - - def encode(self): - """ - Encode inner content for signing. At the moment we're treating - the VCard as an opaque byte string, so no encoding needed here. - """ - return self.get_content() - - def decode(self, vcard): - """ - Decode XML and set inner content. At the moment we're treating - the VCard as an opaque byte string, so no encoding needed here. - """ - self.content = vcard - - @classmethod - def build(cls, vcard, keypair, certs): - """ - Build a Ghostbuster record. - """ - self = cls() - self.set_content(vcard) - self.sign(keypair, certs) - return self - - -class CRL(DER_object): - """ - Class to hold a Certificate Revocation List. - """ - - POW_class = rpki.POW.CRL - - def get_DER(self): - """ - Get the DER value of this CRL. - """ - self.check() - if self.DER: - return self.DER - if self.POW: - self.DER = self.POW.derWrite() - return self.get_DER() - raise rpki.exceptions.DERObjectConversionError("No conversion path to DER available") - - def get_POW(self): - """ - Get the rpki.POW value of this CRL. - """ - self.check() - if not self.POW: # pylint: disable=E0203 - self.POW = rpki.POW.CRL.derRead(self.get_DER()) - return self.POW - - def getThisUpdate(self): - """ - Get thisUpdate value from this CRL. - """ - return self.get_POW().getThisUpdate() - - def getNextUpdate(self): - """ - Get nextUpdate value from this CRL. - """ - return self.get_POW().getNextUpdate() - - def getIssuer(self): - """ - Get issuer value of this CRL. - """ - return X501DN.from_POW(self.get_POW().getIssuer()) - - def getCRLNumber(self): - """ - Get CRL Number value for this CRL. - """ - return self.get_POW().getCRLNumber() - - @classmethod - def generate(cls, keypair, issuer, serial, thisUpdate, nextUpdate, revokedCertificates, version = 1): - """ - Generate a new CRL. - """ - crl = rpki.POW.CRL() - crl.setVersion(version) - crl.setIssuer(issuer.getSubject().get_POW()) - crl.setThisUpdate(thisUpdate) - crl.setNextUpdate(nextUpdate) - crl.setAKI(issuer.get_SKI()) - crl.setCRLNumber(serial) - crl.addRevocations(revokedCertificates) - crl.sign(keypair.get_POW()) - return cls(POW = crl) - - @property - def creation_timestamp(self): - """ - Time at which this object was created. - """ - return self.getThisUpdate() - -## @var uri_dispatch_map -# Map of known URI filename extensions and corresponding classes. - -uri_dispatch_map = { - ".cer" : X509, - ".crl" : CRL, - ".gbr" : Ghostbuster, - ".mft" : SignedManifest, - ".mnf" : SignedManifest, - ".roa" : ROA, - } - -def uri_dispatch(uri): - """ - Return the Python class object corresponding to a given URI. - """ - return uri_dispatch_map[os.path.splitext(uri)[1]] diff --git a/rpkid/rpki/xml_utils.py b/rpkid/rpki/xml_utils.py deleted file mode 100644 index f254fd11..00000000 --- a/rpkid/rpki/xml_utils.py +++ /dev/null @@ -1,494 +0,0 @@ -# $Id$ -# -# Copyright (C) 2009-2012 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. -# -# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -""" -XML utilities. -""" - -import xml.sax -import lxml.sax -import lxml.etree -import rpki.exceptions - -class sax_handler(xml.sax.handler.ContentHandler): - """ - SAX handler for RPKI protocols. - - This class provides some basic amenities for parsing protocol XML of - the kind we use in the RPKI protocols, including whacking all the - protocol element text into US-ASCII, simplifying accumulation of - text fields, and hiding some of the fun relating to XML namespaces. - - General assumption: by the time this parsing code gets invoked, the - XML has already passed RelaxNG validation, so we only have to check - for errors that the schema can't catch, and we don't have to play as - many XML namespace games. - """ - - def __init__(self): - """ - Initialize SAX handler. - """ - xml.sax.handler.ContentHandler.__init__(self) - self.text = "" - self.stack = [] - - def startElementNS(self, name, qname, attrs): - """ - Redirect startElementNS() events to startElement(). - """ - return self.startElement(name[1], attrs) - - def endElementNS(self, name, qname): - """ - Redirect endElementNS() events to endElement(). - """ - return self.endElement(name[1]) - - def characters(self, content): - """ - Accumulate a chuck of element content (text). - """ - self.text += content - - def startElement(self, name, attrs): - """ - Handle startElement() events. - - We maintain a stack of nested elements under construction so that - we can feed events directly to the current element rather than - having to pass them through all the nesting elements. - - If the stack is empty, this event is for the outermost element, so - we call a virtual method to create the corresponding object and - that's the object we'll be returning as our final result. - """ - - a = dict() - for k, v in attrs.items(): - if isinstance(k, tuple): - if k == ("http://www.w3.org/XML/1998/namespace", "lang"): - k = "xml:lang" - else: - assert k[0] is None - k = k[1] - a[k.encode("ascii")] = v.encode("ascii") - if len(self.stack) == 0: - assert not hasattr(self, "result") - self.result = self.create_top_level(name, a) - self.stack.append(self.result) - self.stack[-1].startElement(self.stack, name, a) - - def endElement(self, name): - """ - Handle endElement() events. Mostly this means handling any - accumulated element text. - """ - text = self.text.encode("ascii").strip() - self.text = "" - self.stack[-1].endElement(self.stack, name, text) - - @classmethod - def saxify(cls, elt): - """ - Create a one-off SAX parser, parse an ETree, return the result. - """ - self = cls() - lxml.sax.saxify(elt, self) - return self.result - - def create_top_level(self, name, attrs): - """ - Handle top-level PDU for this protocol. - """ - assert name == self.name and attrs["version"] == self.version - return self.pdu() - -class base_elt(object): - """ - Virtual base class for XML message elements. The left-right and - publication protocols use this. At least for now, the up-down - protocol does not, due to different design assumptions. - """ - - ## @var attributes - # XML attributes for this element. - attributes = () - - ## @var elements - # XML elements contained by this element. - elements = () - - ## @var booleans - # Boolean attributes (value "yes" or "no") for this element. - booleans = () - - def startElement(self, stack, name, attrs): - """ - Default startElement() handler: just process attributes. - """ - if name not in self.elements: - assert name == self.element_name, "Unexpected name %s, stack %s" % (name, stack) - self.read_attrs(attrs) - - def endElement(self, stack, name, text): - """ - Default endElement() handler: just pop the stack. - """ - assert name == self.element_name, "Unexpected name %s, stack %s" % (name, stack) - stack.pop() - - def toXML(self): - """ - Default toXML() element generator. - """ - return self.make_elt() - - def read_attrs(self, attrs): - """ - Template-driven attribute reader. - """ - for key in self.attributes: - val = attrs.get(key, None) - if isinstance(val, str) and val.isdigit() and not key.endswith("_handle"): - val = long(val) - setattr(self, key, val) - for key in self.booleans: - setattr(self, key, attrs.get(key, False)) - - def make_elt(self): - """ - XML element constructor. - """ - elt = lxml.etree.Element("{%s}%s" % (self.xmlns, self.element_name), nsmap = self.nsmap) - for key in self.attributes: - val = getattr(self, key, None) - if val is not None: - elt.set(key, str(val)) - for key in self.booleans: - if getattr(self, key, False): - elt.set(key, "yes") - return elt - - def make_b64elt(self, elt, name, value): - """ - Constructor for Base64-encoded subelement. - """ - if value is not None and not value.empty(): - lxml.etree.SubElement(elt, "{%s}%s" % (self.xmlns, name), nsmap = self.nsmap).text = value.get_Base64() - - def __str__(self): - """ - Convert a base_elt object to string format. - """ - return lxml.etree.tostring(self.toXML(), pretty_print = True, encoding = "us-ascii") - - @classmethod - def make_pdu(cls, **kargs): - """ - Generic PDU constructor. - """ - self = cls() - for k, v in kargs.items(): - if isinstance(v, bool): - v = 1 if v else 0 - setattr(self, k, v) - return self - -class text_elt(base_elt): - """ - Virtual base class for XML message elements that contain text. - """ - - ## @var text_attribute - # Name of the class attribute that holds the text value. - text_attribute = None - - def endElement(self, stack, name, text): - """ - Extract text from parsed XML. - """ - base_elt.endElement(self, stack, name, text) - setattr(self, self.text_attribute, text) - - def toXML(self): - """ - Insert text into generated XML. - """ - elt = self.make_elt() - elt.text = getattr(self, self.text_attribute) or None - return elt - -class data_elt(base_elt): - """ - Virtual base class for PDUs that map to SQL objects. These objects - all implement the create/set/get/list/destroy action attribute. - """ - - def endElement(self, stack, name, text): - """ - Default endElement handler for SQL-based objects. This assumes - that sub-elements are Base64-encoded using the sql_template - mechanism. - """ - if name in self.elements: - elt_type = self.sql_template.map.get(name) - assert elt_type is not None, "Couldn't find element type for %s, stack %s" % (name, stack) - setattr(self, name, elt_type(Base64 = text)) - else: - assert name == self.element_name, "Unexpected name %s, stack %s" % (name, stack) - stack.pop() - - def toXML(self): - """ - Default element generator for SQL-based objects. This assumes - that sub-elements are Base64-encoded DER objects. - """ - elt = self.make_elt() - for i in self.elements: - self.make_b64elt(elt, i, getattr(self, i, None)) - return elt - - def make_reply(self, r_pdu = None): - """ - Construct a reply PDU. - """ - if r_pdu is None: - r_pdu = self.__class__() - self.make_reply_clone_hook(r_pdu) - handle_name = self.element_name + "_handle" - setattr(r_pdu, handle_name, getattr(self, handle_name, None)) - else: - self.make_reply_clone_hook(r_pdu) - for b in r_pdu.booleans: - setattr(r_pdu, b, False) - r_pdu.action = self.action - r_pdu.tag = self.tag - return r_pdu - - def make_reply_clone_hook(self, r_pdu): - """ - Overridable hook. - """ - pass - - def serve_fetch_one(self): - """ - Find the object on which a get, set, or destroy method should - operate. - """ - r = self.serve_fetch_one_maybe() - if r is None: - raise rpki.exceptions.NotFound - return r - - def serve_pre_save_hook(self, q_pdu, r_pdu, cb, eb): - """ - Overridable hook. - """ - cb() - - def serve_post_save_hook(self, q_pdu, r_pdu, cb, eb): - """ - Overridable hook. - """ - cb() - - def serve_create(self, r_msg, cb, eb): - """ - Handle a create action. - """ - - r_pdu = self.make_reply() - - def one(): - self.sql_store() - setattr(r_pdu, self.sql_template.index, getattr(self, self.sql_template.index)) - self.serve_post_save_hook(self, r_pdu, two, eb) - - def two(): - r_msg.append(r_pdu) - cb() - - oops = self.serve_fetch_one_maybe() - if oops is not None: - raise rpki.exceptions.DuplicateObject, "Object already exists: %r[%r] %r[%r]" % (self, getattr(self, self.element_name + "_handle"), - oops, getattr(oops, oops.element_name + "_handle")) - - self.serve_pre_save_hook(self, r_pdu, one, eb) - - def serve_set(self, r_msg, cb, eb): - """ - Handle a set action. - """ - - db_pdu = self.serve_fetch_one() - r_pdu = self.make_reply() - for a in db_pdu.sql_template.columns[1:]: - v = getattr(self, a, None) - if v is not None: - setattr(db_pdu, a, v) - db_pdu.sql_mark_dirty() - - def one(): - db_pdu.sql_store() - db_pdu.serve_post_save_hook(self, r_pdu, two, eb) - - def two(): - r_msg.append(r_pdu) - cb() - - db_pdu.serve_pre_save_hook(self, r_pdu, one, eb) - - def serve_get(self, r_msg, cb, eb): - """ - Handle a get action. - """ - r_pdu = self.serve_fetch_one() - self.make_reply(r_pdu) - r_msg.append(r_pdu) - cb() - - def serve_list(self, r_msg, cb, eb): - """ - Handle a list action for non-self objects. - """ - for r_pdu in self.serve_fetch_all(): - self.make_reply(r_pdu) - r_msg.append(r_pdu) - cb() - - def serve_destroy_hook(self, cb, eb): - """ - Overridable hook. - """ - cb() - - def serve_destroy(self, r_msg, cb, eb): - """ - Handle a destroy action. - """ - def done(): - db_pdu.sql_delete() - r_msg.append(self.make_reply()) - cb() - db_pdu = self.serve_fetch_one() - db_pdu.serve_destroy_hook(done, eb) - - def serve_dispatch(self, r_msg, cb, eb): - """ - Action dispatch handler. - """ - dispatch = { "create" : self.serve_create, - "set" : self.serve_set, - "get" : self.serve_get, - "list" : self.serve_list, - "destroy" : self.serve_destroy } - if self.action not in dispatch: - raise rpki.exceptions.BadQuery, "Unexpected query: action %s" % self.action - dispatch[self.action](r_msg, cb, eb) - - def unimplemented_control(self, *controls): - """ - Uniform handling for unimplemented control operations. - """ - unimplemented = [x for x in controls if getattr(self, x, False)] - if unimplemented: - raise rpki.exceptions.NotImplementedYet, "Unimplemented control %s" % ", ".join(unimplemented) - -class msg(list): - """ - Generic top-level PDU. - """ - - def startElement(self, stack, name, attrs): - """ - Handle top-level PDU. - """ - if name == "msg": - assert self.version == int(attrs["version"]) - self.type = attrs["type"] - else: - elt = self.pdus[name]() - self.append(elt) - stack.append(elt) - elt.startElement(stack, name, attrs) - - def endElement(self, stack, name, text): - """ - Handle top-level PDU. - """ - assert name == "msg", "Unexpected name %s, stack %s" % (name, stack) - assert len(stack) == 1 - stack.pop() - - def __str__(self): - """ - Convert msg object to string. - """ - return lxml.etree.tostring(self.toXML(), pretty_print = True, encoding = "us-ascii") - - def toXML(self): - """ - Generate top-level PDU. - """ - elt = lxml.etree.Element("{%s}msg" % (self.xmlns), nsmap = self.nsmap, version = str(self.version), type = self.type) - elt.extend([i.toXML() for i in self]) - return elt - - @classmethod - def query(cls, *args): - """ - Create a query PDU. - """ - self = cls(args) - self.type = "query" - return self - - @classmethod - def reply(cls, *args): - """ - Create a reply PDU. - """ - self = cls(args) - self.type = "reply" - return self - - def is_query(self): - """ - Is this msg a query? - """ - return self.type == "query" - - def is_reply(self): - """ - Is this msg a reply? - """ - return self.type == "reply" diff --git a/rpkid/rpkic b/rpkid/rpkic deleted file mode 100755 index 333a5eb7..00000000 --- a/rpkid/rpkic +++ /dev/null @@ -1,21 +0,0 @@ -#!/usr/bin/env python - -# $Id$ -# -# Copyright (C) 2010-2011 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -if __name__ == "__main__": - import rpki.rpkic - rpki.rpkic.main() diff --git a/rpkid/rpkid b/rpkid/rpkid deleted file mode 100755 index a4cc6cd3..00000000 --- a/rpkid/rpkid +++ /dev/null @@ -1,21 +0,0 @@ -#!/usr/bin/env python - -# $Id$ -# -# Copyright (C) 2010 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -if __name__ == "__main__": - import rpki.rpkid - rpki.rpkid.main() diff --git a/rpkid/rpkid.sql b/rpkid/rpkid.sql deleted file mode 100644 index a7e3dc0a..00000000 --- a/rpkid/rpkid.sql +++ /dev/null @@ -1,250 +0,0 @@ --- $Id$ - --- Copyright (C) 2009--2011 Internet Systems Consortium ("ISC") --- --- Permission to use, copy, modify, and distribute this software for any --- purpose with or without fee is hereby granted, provided that the above --- copyright notice and this permission notice appear in all copies. --- --- THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH --- REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY --- AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, --- INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM --- LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE --- OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR --- PERFORMANCE OF THIS SOFTWARE. - --- Copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") --- --- Permission to use, copy, modify, and distribute this software for any --- purpose with or without fee is hereby granted, provided that the above --- copyright notice and this permission notice appear in all copies. --- --- THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH --- REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY --- AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, --- INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM --- LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE --- OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR --- PERFORMANCE OF THIS SOFTWARE. - --- SQL objects needed by the RPKI engine (rpkid.py). - --- DROP TABLE commands must be in correct (reverse dependency) order --- to satisfy FOREIGN KEY constraints. - -DROP TABLE IF EXISTS ee_cert; -DROP TABLE IF EXISTS ghostbuster; -DROP TABLE IF EXISTS roa_prefix; -DROP TABLE IF EXISTS roa; -DROP TABLE IF EXISTS revoked_cert; -DROP TABLE IF EXISTS child_cert; -DROP TABLE IF EXISTS child; -DROP TABLE IF EXISTS ca_detail; -DROP TABLE IF EXISTS ca; -DROP TABLE IF EXISTS parent; -DROP TABLE IF EXISTS repository; -DROP TABLE IF EXISTS bsc; -DROP TABLE IF EXISTS self; - -CREATE TABLE self ( - self_id SERIAL NOT NULL, - self_handle VARCHAR(255) NOT NULL, - use_hsm BOOLEAN NOT NULL DEFAULT FALSE, - crl_interval BIGINT UNSIGNED, - regen_margin BIGINT UNSIGNED, - bpki_cert LONGBLOB, - bpki_glue LONGBLOB, - PRIMARY KEY (self_id), - UNIQUE (self_handle) -) ENGINE=InnoDB; - -CREATE TABLE bsc ( - bsc_id SERIAL NOT NULL, - bsc_handle VARCHAR(255) NOT NULL, - private_key_id LONGBLOB, - pkcs10_request LONGBLOB, - hash_alg ENUM ('sha256'), - signing_cert LONGBLOB, - signing_cert_crl LONGBLOB, - self_id BIGINT UNSIGNED NOT NULL, - PRIMARY KEY (bsc_id), - CONSTRAINT bsc_self_id - FOREIGN KEY (self_id) REFERENCES self (self_id) ON DELETE CASCADE, - UNIQUE (self_id, bsc_handle) -) ENGINE=InnoDB; - -CREATE TABLE repository ( - repository_id SERIAL NOT NULL, - repository_handle VARCHAR(255) NOT NULL, - peer_contact_uri TEXT, - bpki_cert LONGBLOB, - bpki_glue LONGBLOB, - last_cms_timestamp DATETIME, - bsc_id BIGINT UNSIGNED NOT NULL, - self_id BIGINT UNSIGNED NOT NULL, - PRIMARY KEY (repository_id), - CONSTRAINT repository_self_id - FOREIGN KEY (self_id) REFERENCES self (self_id) ON DELETE CASCADE, - CONSTRAINT repository_bsc_id - FOREIGN KEY (bsc_id) REFERENCES bsc (bsc_id) ON DELETE CASCADE, - UNIQUE (self_id, repository_handle) -) ENGINE=InnoDB; - -CREATE TABLE parent ( - parent_id SERIAL NOT NULL, - parent_handle VARCHAR(255) NOT NULL, - bpki_cms_cert LONGBLOB, - bpki_cms_glue LONGBLOB, - peer_contact_uri TEXT, - sia_base TEXT, - sender_name TEXT, - recipient_name TEXT, - last_cms_timestamp DATETIME, - self_id BIGINT UNSIGNED NOT NULL, - bsc_id BIGINT UNSIGNED NOT NULL, - repository_id BIGINT UNSIGNED NOT NULL, - PRIMARY KEY (parent_id), - CONSTRAINT parent_repository_id - FOREIGN KEY (repository_id) REFERENCES repository (repository_id) ON DELETE CASCADE, - CONSTRAINT parent_bsc_id - FOREIGN KEY (bsc_id) REFERENCES bsc (bsc_id) ON DELETE CASCADE, - CONSTRAINT parent_self_id - FOREIGN KEY (self_id) REFERENCES self (self_id) ON DELETE CASCADE, - UNIQUE (self_id, parent_handle) -) ENGINE=InnoDB; - -CREATE TABLE ca ( - ca_id SERIAL NOT NULL, - last_crl_sn BIGINT UNSIGNED NOT NULL, - last_manifest_sn BIGINT UNSIGNED NOT NULL, - next_manifest_update DATETIME, - next_crl_update DATETIME, - last_issued_sn BIGINT UNSIGNED NOT NULL, - sia_uri TEXT, - parent_resource_class TEXT, - parent_id BIGINT UNSIGNED NOT NULL, - PRIMARY KEY (ca_id), - CONSTRAINT ca_parent_id - FOREIGN KEY (parent_id) REFERENCES parent (parent_id) ON DELETE CASCADE -) ENGINE=InnoDB; - -CREATE TABLE ca_detail ( - ca_detail_id SERIAL NOT NULL, - public_key LONGBLOB, - private_key_id LONGBLOB, - latest_crl LONGBLOB, - crl_published DATETIME, - latest_ca_cert LONGBLOB, - manifest_private_key_id LONGBLOB, - manifest_public_key LONGBLOB, - latest_manifest_cert LONGBLOB, - latest_manifest LONGBLOB, - manifest_published DATETIME, - state ENUM ('pending', 'active', 'deprecated', 'revoked') NOT NULL, - ca_cert_uri TEXT, - ca_id BIGINT UNSIGNED NOT NULL, - PRIMARY KEY (ca_detail_id), - CONSTRAINT ca_detail_ca_id - FOREIGN KEY (ca_id) REFERENCES ca (ca_id) ON DELETE CASCADE -) ENGINE=InnoDB; - -CREATE TABLE child ( - child_id SERIAL NOT NULL, - child_handle VARCHAR(255) NOT NULL, - bpki_cert LONGBLOB, - bpki_glue LONGBLOB, - last_cms_timestamp DATETIME, - self_id BIGINT UNSIGNED NOT NULL, - bsc_id BIGINT UNSIGNED NOT NULL, - PRIMARY KEY (child_id), - CONSTRAINT child_bsc_id - FOREIGN KEY (bsc_id) REFERENCES bsc (bsc_id) ON DELETE CASCADE, - CONSTRAINT child_self_id - FOREIGN KEY (self_id) REFERENCES self (self_id) ON DELETE CASCADE, - UNIQUE (self_id, child_handle) -) ENGINE=InnoDB; - -CREATE TABLE child_cert ( - child_cert_id SERIAL NOT NULL, - cert LONGBLOB NOT NULL, - published DATETIME, - ski TINYBLOB NOT NULL, - child_id BIGINT UNSIGNED NOT NULL, - ca_detail_id BIGINT UNSIGNED NOT NULL, - PRIMARY KEY (child_cert_id), - CONSTRAINT child_cert_ca_detail_id - FOREIGN KEY (ca_detail_id) REFERENCES ca_detail (ca_detail_id) ON DELETE CASCADE, - CONSTRAINT child_cert_child_id - FOREIGN KEY (child_id) REFERENCES child (child_id) ON DELETE CASCADE -) ENGINE=InnoDB; - -CREATE TABLE revoked_cert ( - revoked_cert_id SERIAL NOT NULL, - serial BIGINT UNSIGNED NOT NULL, - revoked DATETIME NOT NULL, - expires DATETIME NOT NULL, - ca_detail_id BIGINT UNSIGNED NOT NULL, - PRIMARY KEY (revoked_cert_id), - CONSTRAINT revoked_cert_ca_detail_id - FOREIGN KEY (ca_detail_id) REFERENCES ca_detail (ca_detail_id) ON DELETE CASCADE -) ENGINE=InnoDB; - -CREATE TABLE roa ( - roa_id SERIAL NOT NULL, - asn BIGINT UNSIGNED NOT NULL, - cert LONGBLOB NOT NULL, - roa LONGBLOB NOT NULL, - published DATETIME, - self_id BIGINT UNSIGNED NOT NULL, - ca_detail_id BIGINT UNSIGNED NOT NULL, - PRIMARY KEY (roa_id), - CONSTRAINT roa_self_id - FOREIGN KEY (self_id) REFERENCES self (self_id) ON DELETE CASCADE, - CONSTRAINT roa_ca_detail_id - FOREIGN KEY (ca_detail_id) REFERENCES ca_detail (ca_detail_id) ON DELETE CASCADE -) ENGINE=InnoDB; - -CREATE TABLE roa_prefix ( - prefix VARCHAR(40) NOT NULL, - prefixlen TINYINT UNSIGNED NOT NULL, - max_prefixlen TINYINT UNSIGNED NOT NULL, - version TINYINT UNSIGNED NOT NULL, - roa_id BIGINT UNSIGNED NOT NULL, - PRIMARY KEY (roa_id, prefix, prefixlen, max_prefixlen), - CONSTRAINT roa_prefix_roa_id - FOREIGN KEY (roa_id) REFERENCES roa (roa_id) ON DELETE CASCADE -) ENGINE=InnoDB; - -CREATE TABLE ghostbuster ( - ghostbuster_id SERIAL NOT NULL, - vcard LONGBLOB NOT NULL, - cert LONGBLOB NOT NULL, - ghostbuster LONGBLOB NOT NULL, - published DATETIME, - self_id BIGINT UNSIGNED NOT NULL, - ca_detail_id BIGINT UNSIGNED NOT NULL, - PRIMARY KEY (ghostbuster_id), - CONSTRAINT ghostbuster_self_id - FOREIGN KEY (self_id) REFERENCES self (self_id) ON DELETE CASCADE, - CONSTRAINT ghostbuster_ca_detail_id - FOREIGN KEY (ca_detail_id) REFERENCES ca_detail (ca_detail_id) ON DELETE CASCADE -) ENGINE=InnoDB; - -CREATE TABLE ee_cert ( - ee_cert_id SERIAL NOT NULL, - ski BINARY(20) NOT NULL, - cert LONGBLOB NOT NULL, - published DATETIME, - self_id BIGINT UNSIGNED NOT NULL, - ca_detail_id BIGINT UNSIGNED NOT NULL, - PRIMARY KEY (ee_cert_id), - CONSTRAINT ee_cert_self_id - FOREIGN KEY (self_id) REFERENCES self (self_id) ON DELETE CASCADE, - CONSTRAINT ee_cert_ca_detail_id - FOREIGN KEY (ca_detail_id) REFERENCES ca_detail (ca_detail_id) ON DELETE CASCADE -) ENGINE=InnoDB; - --- Local Variables: --- indent-tabs-mode: nil --- End: diff --git a/rpkid/setup.py b/rpkid/setup.py deleted file mode 100644 index 653d2d31..00000000 --- a/rpkid/setup.py +++ /dev/null @@ -1,93 +0,0 @@ -# $Id$ -# -# Copyright (C) 2011-2013 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -from distutils.core import setup, Extension -from glob import glob -import setup_extensions - -try: - import setup_autoconf as autoconf - -except ImportError: - class autoconf: - "Fake autoconf object to let --help work without autoconf." - sbindir = libexecdir = datarootdir = sysconfdir = CFLAGS = LDFLAGS = LIBS = "" - -try: - from rpki.version import VERSION - -except ImportError: - VERSION = "0.0" - -# pylint: disable=W0622 - -setup(name = "rpkitoolkit", - version = VERSION, - description = "RPKI Toolkit", - license = "BSD", - url = "http://rpki.net/", - cmdclass = {"build_scripts" : setup_extensions.build_scripts, - "install_scripts" : setup_extensions.install_scripts}, - packages = ["rpki", - "rpki.POW", - "rpki.irdb", - "rpki.gui", - "rpki.gui.app", - "rpki.gui.cacheview", - "rpki.gui.api", - "rpki.gui.routeview"], - ext_modules = [Extension("rpki.POW._POW", ["ext/POW.c"], - extra_compile_args = autoconf.CFLAGS.split(), - extra_link_args = (autoconf.LDFLAGS + " " + - autoconf.LIBS).split())], - package_data = {"rpki.gui.app" : - ["migrations/*.py", - "static/*/*", - "templates/*.html", - "templates/*/*.html", - "templatetags/*.py"], - "rpki.gui.cacheview" : - ["templates/*/*.html"]}, - scripts = [(autoconf.sbindir, - ["rpkic", - "rpki-confgen", - "rpki-start-servers", - "rpki-sql-backup", - "rpki-sql-setup", - "portal-gui/scripts/rpki-manage", - "portal-gui/scripts/rpkigui-query-routes", - "irbe_cli"]), - (autoconf.libexecdir, - ["irdbd", - "pubd", - "rootd", - "rpkid", - "portal-gui/scripts/rpkigui-import-routes", - "portal-gui/scripts/rpkigui-check-expired", - "portal-gui/scripts/rpkigui-rcynic", - "portal-gui/scripts/rpkigui-apache-conf-gen"])], - data_files = [(autoconf.sysconfdir + "/rpki", - ["rpki-confgen.xml"]), - (autoconf.datarootdir + "/rpki/wsgi", - ["portal-gui/rpki.wsgi"]), - (autoconf.datarootdir + "/rpki/media/css", - glob("rpki/gui/app/static/css/*")), - (autoconf.datarootdir + "/rpki/media/js", - glob("rpki/gui/app/static/js/*")), - (autoconf.datarootdir + "/rpki/media/img", - glob("rpki/gui/app/static/img/*")), - (autoconf.datarootdir + "/rpki/upgrade-scripts", - glob("upgrade-scripts/*"))]) diff --git a/rpkid/setup_extensions.py b/rpkid/setup_extensions.py deleted file mode 100644 index 12b123aa..00000000 --- a/rpkid/setup_extensions.py +++ /dev/null @@ -1,94 +0,0 @@ -# $Id$ -# -# This module extends the stock distutils install_setup code to -# support installation into multiple target directories, with -# semantics similar to what distutils already supports for -# script_files. The bulk of this code is taken directly from the -# stock distutils package, with minor changes. As such, I consider -# this to be a derivative work of the distutils package for copyright -# purposes. - -from distutils.util import change_root, convert_path -from distutils.command.build_scripts import build_scripts as _build_scripts -from distutils.command.install_scripts import install_scripts as _install_scripts -from distutils import log -from stat import S_IMODE -import os - -class build_scripts(_build_scripts): - """ - Hacked version of distutils.build_scripts, designed to support - multiple target installation directories like install_data does. - - [(target_directory, [list_of_source_scripts]), ...] - - Most of the real work is in the companion hacked install_scripts, - but we need to tweak the list of source files that build_scripts - pulls out of the Distribution object. - """ - - def finalize_options(self): - _build_scripts.finalize_options(self) - self.scripts = [] - for script in self.distribution.scripts: - if isinstance(script, str): - self.scripts.append(script) - else: - self.scripts.extend(script[1]) - -class install_scripts(_install_scripts): - """ - Hacked version of distutils.install_scripts, designed to support - multiple target installation directories like install_data does. - - [(target_directory, [list_of_source_scripts]), ...] - - The code here is a tweaked combination of what the stock - install_scripts and install_data classes do. - """ - - user_options = _install_scripts.user_options + [ - ("root=", None, "install everything relative to this alternate root directory")] - - def initialize_options(self): - _install_scripts.initialize_options(self) - self.outfiles = [] - self.root = None - - def finalize_options (self): - self.set_undefined_options("build", - ("build_scripts", "build_dir")) - self.set_undefined_options("install", - ("install_scripts", "install_dir"), - ("root", "root"), - ("force", "force"), - ("skip_build", "skip_build")) - - def run(self): - if not self.skip_build: - self.run_command("build_scripts") - for script in self.distribution.scripts: - if isinstance(script, str): - fn = os.path.join(self.build_dir, os.path.basename(convert_path(script))) - out, _ = self.copy_file(fn, self.install_dir) - self.outfiles.append(out) - else: - dn = convert_path(script[0]) - if not os.path.isabs(dn): - dn = os.path.join(self.install_dir, dn) - elif self.root: - dn = change_root(self.root, dn) - self.mkpath(dn) - if not script[1]: - self.outfiles.append(dn) - else: - for s in script[1]: - fn = os.path.join(self.build_dir, os.path.basename(convert_path(s))) - out, _ = self.copy_file(fn, dn) - self.outfiles.append(out) - if os.name == "posix": - for fn in self.get_outputs(): - mode = S_IMODE(os.stat(fn).st_mode) | 0555 - log.info("changing mode of %s to %o", fn, mode) - if not self.dry_run: - os.chmod(fn, mode) diff --git a/rpkid/tests/Makefile.in b/rpkid/tests/Makefile.in deleted file mode 100644 index b63e8dc3..00000000 --- a/rpkid/tests/Makefile.in +++ /dev/null @@ -1,91 +0,0 @@ -# $Id$ - -PYTHON = @PYTHON@ -abs_top_builddir = @abs_top_builddir@ - -all: protocol-samples - -clean: - rm -rf smoketest.dir left-right-protocol-samples publication-protocol-samples yamltest.dir rcynic.xml rcynic-data - -protocol-samples: left-right-protocol-samples/.stamp publication-protocol-samples/.stamp - -left-right-protocol-samples/.stamp: left-right-protocol-samples.xml split-protocol-samples.xsl - rm -rf left-right-protocol-samples - mkdir left-right-protocol-samples - xsltproc --param verbose 0 --stringparam dir left-right-protocol-samples split-protocol-samples.xsl left-right-protocol-samples.xml - touch $@ - -publication-protocol-samples/.stamp: publication-protocol-samples.xml split-protocol-samples.xsl - rm -rf publication-protocol-samples - mkdir publication-protocol-samples - xsltproc --param verbose 0 --stringparam dir publication-protocol-samples split-protocol-samples.xsl publication-protocol-samples.xml - touch $@ - -parse-test: protocol-samples - ${PYTHON} xml-parse-test.py - -all-tests:: parse-test - -all-tests:: - ${PYTHON} smoketest.py smoketest.1.yaml - -all-tests:: - ${PYTHON} smoketest.py smoketest.2.yaml - -test all-tests:: - ${PYTHON} smoketest.py smoketest.3.yaml - -all-tests:: - ${PYTHON} smoketest.py smoketest.4.yaml - -all-tests:: - ${PYTHON} smoketest.py smoketest.5.yaml - -test all-tests:: - ${PYTHON} smoketest.py smoketest.6.yaml - -all-tests:: - ${PYTHON} smoketest.py smoketest.7.yaml - -profile: all - find smoketest.dir -name '*.prof' -delete - ${PYTHON} smoketest.py smoketest.2.yaml -p - for i in smoketest.dir/*.prof; do ${PYTHON} -c "import pstats;pstats.Stats('$$i').sort_stats('time').print_stats()"; done - -# This isn't a full exercise of the yamltest framework, but is -# probably as good as we can do under make. - -YAMLTEST_CONFIG = smoketest.1.yaml - -yamltest: - rm -rf yamltest.dir rcynic-data - ${PYTHON} sql-cleaner.py - ${PYTHON} yamltest.py ${YAMLTEST_CONFIG} - -YAMLCONF_CONFIG = ${YAMLTEST_CONFIG} - -yamlconf: - rm -rf yamltest.dir rcynic-data - ${PYTHON} sql-cleaner.py - ${PYTHON} yamlconf.py --loopback ${YAMLCONF_CONFIG} - @echo - ${PYTHON} yamltest.py --skip_config --synchronize ${YAMLCONF_CONFIG} - -yamltest-resume yamlconf-resume: - ${PYTHON} yamltest.py --skip_config ${YAMLCONF_CONFIG} - -yamlconf-profile: - rm -rf yamltest.dir rcynic-data - ${PYTHON} sql-cleaner.py - ${PYTHON} yamlconf.py --loopback --profile yamlconf.prof ${YAMLCONF_CONFIG} - @echo - ${PYTHON} yamltest.py --skip_config --synchronize --profile ${YAMLCONF_CONFIG} - -backup: - ${PYTHON} sql-dumper.py - tar cvvJf yamltest.backup.$$(TZ='' date +%Y.%m.%d.%H.%M.%S).txz screenlog.* yamltest.dir backup.*.sql - rm backup.*.sql - -distclean: clean - rm -f rcynic.xml Makefile diff --git a/rpkid/tests/left-right-protocol-samples.xml b/rpkid/tests/left-right-protocol-samples.xml deleted file mode 100644 index 7b97386d..00000000 --- a/rpkid/tests/left-right-protocol-samples.xml +++ /dev/null @@ -1,1093 +0,0 @@ - - - - - - - - - - - - - - - - MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV - BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN - MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS - b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArKYUtJaM5PH5917S - G2ACc7iBYdQO2HYyu8Gb6i9Q2Gxc3cWEX7RTBvgOL79pWf3GIdnoupzMnoZVtY3G - Ux2G/0WkmLui2TCeDhcfXdQ4rcp8J3V/6ESj+yuEPPOG8UN17mUKKgujrch6ZvgC - DO9AyOK/uXu+ABQXTPsn2pVe2EVh3V004ShLi8GKgVdqb/rW/6GTg0Xb/zLT6WWM - uT++6sXTlztJdQYkRamJvKfQDU1naC8mAkGf79Tba0xyBGAUII0GfREY6t4/+NAP - 2Yyb3xNlBqcJoTov0JfNKHZcCZePr79j7LK/hkZxxip+Na9xDpE+oQRV+DRukCRJ - diqg+wIDAQABo1AwTjAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBTDEsXJe6pjAQD4 - ULlB7+GMDBlimTAfBgNVHSMEGDAWgBTDEsXJe6pjAQD4ULlB7+GMDBlimTANBgkq - hkiG9w0BAQUFAAOCAQEAWWkNcW6S1tKKqtzJsdfhjJiAAPQmOXJskv0ta/8f6Acg - cum1YieNdtT0n96P7CUHOWP8QBb91JzeewR7b6WJLwb1Offs3wNq3kk75pJe89r4 - XY39EZHhMW+Dv0PhIKu2CgD4LeyH1FVTQkF/QObGEmkn+s+HTsuzd1l2VLwcP1Sm - sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH - YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq - 3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ== - - - MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV - BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN - MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS - b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArKYUtJaM5PH5917S - G2ACc7iBYdQO2HYyu8Gb6i9Q2Gxc3cWEX7RTBvgOL79pWf3GIdnoupzMnoZVtY3G - Ux2G/0WkmLui2TCeDhcfXdQ4rcp8J3V/6ESj+yuEPPOG8UN17mUKKgujrch6ZvgC - DO9AyOK/uXu+ABQXTPsn2pVe2EVh3V004ShLi8GKgVdqb/rW/6GTg0Xb/zLT6WWM - uT++6sXTlztJdQYkRamJvKfQDU1naC8mAkGf79Tba0xyBGAUII0GfREY6t4/+NAP - 2Yyb3xNlBqcJoTov0JfNKHZcCZePr79j7LK/hkZxxip+Na9xDpE+oQRV+DRukCRJ - diqg+wIDAQABo1AwTjAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBTDEsXJe6pjAQD4 - ULlB7+GMDBlimTAfBgNVHSMEGDAWgBTDEsXJe6pjAQD4ULlB7+GMDBlimTANBgkq - hkiG9w0BAQUFAAOCAQEAWWkNcW6S1tKKqtzJsdfhjJiAAPQmOXJskv0ta/8f6Acg - cum1YieNdtT0n96P7CUHOWP8QBb91JzeewR7b6WJLwb1Offs3wNq3kk75pJe89r4 - XY39EZHhMW+Dv0PhIKu2CgD4LeyH1FVTQkF/QObGEmkn+s+HTsuzd1l2VLwcP1Sm - sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH - YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq - 3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ== - - - - - - - - - - - - - - - - MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV - BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN - MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS - b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArKYUtJaM5PH5917S - G2ACc7iBYdQO2HYyu8Gb6i9Q2Gxc3cWEX7RTBvgOL79pWf3GIdnoupzMnoZVtY3G - Ux2G/0WkmLui2TCeDhcfXdQ4rcp8J3V/6ESj+yuEPPOG8UN17mUKKgujrch6ZvgC - DO9AyOK/uXu+ABQXTPsn2pVe2EVh3V004ShLi8GKgVdqb/rW/6GTg0Xb/zLT6WWM - uT++6sXTlztJdQYkRamJvKfQDU1naC8mAkGf79Tba0xyBGAUII0GfREY6t4/+NAP - 2Yyb3xNlBqcJoTov0JfNKHZcCZePr79j7LK/hkZxxip+Na9xDpE+oQRV+DRukCRJ - diqg+wIDAQABo1AwTjAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBTDEsXJe6pjAQD4 - ULlB7+GMDBlimTAfBgNVHSMEGDAWgBTDEsXJe6pjAQD4ULlB7+GMDBlimTANBgkq - hkiG9w0BAQUFAAOCAQEAWWkNcW6S1tKKqtzJsdfhjJiAAPQmOXJskv0ta/8f6Acg - cum1YieNdtT0n96P7CUHOWP8QBb91JzeewR7b6WJLwb1Offs3wNq3kk75pJe89r4 - XY39EZHhMW+Dv0PhIKu2CgD4LeyH1FVTQkF/QObGEmkn+s+HTsuzd1l2VLwcP1Sm - sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH - YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq - 3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ== - - - MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV - BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN - MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS - b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArKYUtJaM5PH5917S - G2ACc7iBYdQO2HYyu8Gb6i9Q2Gxc3cWEX7RTBvgOL79pWf3GIdnoupzMnoZVtY3G - Ux2G/0WkmLui2TCeDhcfXdQ4rcp8J3V/6ESj+yuEPPOG8UN17mUKKgujrch6ZvgC - DO9AyOK/uXu+ABQXTPsn2pVe2EVh3V004ShLi8GKgVdqb/rW/6GTg0Xb/zLT6WWM - uT++6sXTlztJdQYkRamJvKfQDU1naC8mAkGf79Tba0xyBGAUII0GfREY6t4/+NAP - 2Yyb3xNlBqcJoTov0JfNKHZcCZePr79j7LK/hkZxxip+Na9xDpE+oQRV+DRukCRJ - diqg+wIDAQABo1AwTjAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBTDEsXJe6pjAQD4 - ULlB7+GMDBlimTAfBgNVHSMEGDAWgBTDEsXJe6pjAQD4ULlB7+GMDBlimTANBgkq - hkiG9w0BAQUFAAOCAQEAWWkNcW6S1tKKqtzJsdfhjJiAAPQmOXJskv0ta/8f6Acg - cum1YieNdtT0n96P7CUHOWP8QBb91JzeewR7b6WJLwb1Offs3wNq3kk75pJe89r4 - XY39EZHhMW+Dv0PhIKu2CgD4LeyH1FVTQkF/QObGEmkn+s+HTsuzd1l2VLwcP1Sm - sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH - YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq - 3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ== - - - - - - - - - - - - MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV - BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN - MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS - b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArKYUtJaM5PH5917S - G2ACc7iBYdQO2HYyu8Gb6i9Q2Gxc3cWEX7RTBvgOL79pWf3GIdnoupzMnoZVtY3G - Ux2G/0WkmLui2TCeDhcfXdQ4rcp8J3V/6ESj+yuEPPOG8UN17mUKKgujrch6ZvgC - DO9AyOK/uXu+ABQXTPsn2pVe2EVh3V004ShLi8GKgVdqb/rW/6GTg0Xb/zLT6WWM - uT++6sXTlztJdQYkRamJvKfQDU1naC8mAkGf79Tba0xyBGAUII0GfREY6t4/+NAP - 2Yyb3xNlBqcJoTov0JfNKHZcCZePr79j7LK/hkZxxip+Na9xDpE+oQRV+DRukCRJ - diqg+wIDAQABo1AwTjAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBTDEsXJe6pjAQD4 - ULlB7+GMDBlimTAfBgNVHSMEGDAWgBTDEsXJe6pjAQD4ULlB7+GMDBlimTANBgkq - hkiG9w0BAQUFAAOCAQEAWWkNcW6S1tKKqtzJsdfhjJiAAPQmOXJskv0ta/8f6Acg - cum1YieNdtT0n96P7CUHOWP8QBb91JzeewR7b6WJLwb1Offs3wNq3kk75pJe89r4 - XY39EZHhMW+Dv0PhIKu2CgD4LeyH1FVTQkF/QObGEmkn+s+HTsuzd1l2VLwcP1Sm - sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH - YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq - 3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ== - - - MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV - BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN - MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS - b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArKYUtJaM5PH5917S - G2ACc7iBYdQO2HYyu8Gb6i9Q2Gxc3cWEX7RTBvgOL79pWf3GIdnoupzMnoZVtY3G - Ux2G/0WkmLui2TCeDhcfXdQ4rcp8J3V/6ESj+yuEPPOG8UN17mUKKgujrch6ZvgC - DO9AyOK/uXu+ABQXTPsn2pVe2EVh3V004ShLi8GKgVdqb/rW/6GTg0Xb/zLT6WWM - uT++6sXTlztJdQYkRamJvKfQDU1naC8mAkGf79Tba0xyBGAUII0GfREY6t4/+NAP - 2Yyb3xNlBqcJoTov0JfNKHZcCZePr79j7LK/hkZxxip+Na9xDpE+oQRV+DRukCRJ - diqg+wIDAQABo1AwTjAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBTDEsXJe6pjAQD4 - ULlB7+GMDBlimTAfBgNVHSMEGDAWgBTDEsXJe6pjAQD4ULlB7+GMDBlimTANBgkq - hkiG9w0BAQUFAAOCAQEAWWkNcW6S1tKKqtzJsdfhjJiAAPQmOXJskv0ta/8f6Acg - cum1YieNdtT0n96P7CUHOWP8QBb91JzeewR7b6WJLwb1Offs3wNq3kk75pJe89r4 - XY39EZHhMW+Dv0PhIKu2CgD4LeyH1FVTQkF/QObGEmkn+s+HTsuzd1l2VLwcP1Sm - sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH - YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq - 3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ== - - - - - - - - - - - - - - - - - - - MIIDHTCCAgWgAwIBAgIJAKUUCoKn9ovVMA0GCSqGSIb3DQEBBQUAMCYxJDAiBgNV - BAMTG1Rlc3QgQ2VydGlmaWNhdGUgQWxpY2UgUm9vdDAeFw0wNzA4MDExOTUzMDda - Fw0wNzA4MzExOTUzMDdaMCQxIjAgBgNVBAMTGVRlc3QgQ2VydGlmaWNhdGUgQWxp - Y2UgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDmLngkGT5kWsXd - IgLeV+5zNvcDt0+D4cds1cu+bw6Y/23z1+ooA8fU1gXQ28bl6ELM8WRLHgcntqzr - 5UX6S1xPdNfFYt8z4E1ZuvwCPsxcSwVdlYRvzAGNQivDpcJ75Mf5DTeDpr6wm7yn - 2pzxvQIet5djOX51RVGA3hOwCbhq2ceHs0ZruWG3T70H3Sa1ZVxP7m0DJlsSZa6v - 3oEeFOKZQlqrgeU74mJyLAGx/fNbIw+UBrvejfjZobIv985vQ06DZ5S2AquQ2bht - O/2bW3yqeOjH98YK0zlOpYtaZ2fyx4JLjHCspoki6+4W9UG+TuqdkB20mRsr25XT - 9kLuwIGZAgMBAAGjUDBOMAwGA1UdEwQFMAMBAf8wHQYDVR0OBBYEFF6I4IR33h/s - vOa4Vsw2icPo8TgaMB8GA1UdIwQYMBaAFG9ed1KlOQDyB+k7Yeb8LSjG5FDtMA0G - CSqGSIb3DQEBBQUAA4IBAQDVzBuGyXIq/rfMjoNKIHTUgppkc+FjS02cFASpB5mk - ksSpGWYHMZKlqz47qDi44KAG+kmPIPOT0em81+/VGeY1oizJyKSeNDhNboth5oTu - ShDr4flTQCoYvRxm1wh8WIIg09nwibzGztuV1XxtdzfQV5kK5bMBlDXnUfAYydsO - jc52x5f4tgdcfBhjnMzkCAx2kvw5Wp3NekkOKl5YYnPK++zT9IBwqrqJmsJvyLPO - vvqVBYkoBWRbmcy6wVU8JpYegNNgVRbi6zeAq33gS75m9uy+4z8Ql6DqVF0s/y+/ - 240tLCW62X98EzrALKsxhkqVZCtdc5HSRaOQr0K3I03S - - - - - - - cmVxdWVzdAo= - - - - - - - MIIDHTCCAgWgAwIBAgIJAKUUCoKn9ovVMA0GCSqGSIb3DQEBBQUAMCYxJDAiBgNV - BAMTG1Rlc3QgQ2VydGlmaWNhdGUgQWxpY2UgUm9vdDAeFw0wNzA4MDExOTUzMDda - Fw0wNzA4MzExOTUzMDdaMCQxIjAgBgNVBAMTGVRlc3QgQ2VydGlmaWNhdGUgQWxp - Y2UgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDmLngkGT5kWsXd - IgLeV+5zNvcDt0+D4cds1cu+bw6Y/23z1+ooA8fU1gXQ28bl6ELM8WRLHgcntqzr - 5UX6S1xPdNfFYt8z4E1ZuvwCPsxcSwVdlYRvzAGNQivDpcJ75Mf5DTeDpr6wm7yn - 2pzxvQIet5djOX51RVGA3hOwCbhq2ceHs0ZruWG3T70H3Sa1ZVxP7m0DJlsSZa6v - 3oEeFOKZQlqrgeU74mJyLAGx/fNbIw+UBrvejfjZobIv985vQ06DZ5S2AquQ2bht - O/2bW3yqeOjH98YK0zlOpYtaZ2fyx4JLjHCspoki6+4W9UG+TuqdkB20mRsr25XT - 9kLuwIGZAgMBAAGjUDBOMAwGA1UdEwQFMAMBAf8wHQYDVR0OBBYEFF6I4IR33h/s - vOa4Vsw2icPo8TgaMB8GA1UdIwQYMBaAFG9ed1KlOQDyB+k7Yeb8LSjG5FDtMA0G - CSqGSIb3DQEBBQUAA4IBAQDVzBuGyXIq/rfMjoNKIHTUgppkc+FjS02cFASpB5mk - ksSpGWYHMZKlqz47qDi44KAG+kmPIPOT0em81+/VGeY1oizJyKSeNDhNboth5oTu - ShDr4flTQCoYvRxm1wh8WIIg09nwibzGztuV1XxtdzfQV5kK5bMBlDXnUfAYydsO - jc52x5f4tgdcfBhjnMzkCAx2kvw5Wp3NekkOKl5YYnPK++zT9IBwqrqJmsJvyLPO - vvqVBYkoBWRbmcy6wVU8JpYegNNgVRbi6zeAq33gS75m9uy+4z8Ql6DqVF0s/y+/ - 240tLCW62X98EzrALKsxhkqVZCtdc5HSRaOQr0K3I03S - - - MIIBfjBoAgEBMA0GCSqGSIb3DQEBCwUAMCYxJDAiBgNVBAMTG1Rlc3QgQ2VydGlm - aWNhdGUgUklSIFNFTEYtMRcNMDgwNTAxMDQ1MjAxWhcNMDgwNTMxMDQ1MjAxWqAO - MAwwCgYDVR0UBAMCAQEwDQYJKoZIhvcNAQELBQADggEBACTbbaYh+f4EtXFIKPwH - K2NYq/MrhE2BnHDyA43siryddtac1E2bOtXPkC74nY5yGm4wZU07qPovJNGu1McG - J2hV2uUyAN00lJU3EikrS1ewz7vqjINar1ZUMDkh0wMYKLB9S8SdwNvCf1vcjshz - yasBRse9PCH1R0bmDaP8FZM47P55dKiijaN87HQKyZPOExFslnWH+Nr+mAF1xost - pwGcc3jreVZWbtQ2RdUDJYcNrSSCH8JYqd5ZgAYcE53xxy43rKcULz054GDFcS/B - rprwJgfrjkPttAl80cfrVOUl77ZFfFxzOeHCmQMl9VSoCxmWvnBCBBO4H7meJ7NO - gyc= - - - - - - - - - - - - - - - - MIIDHTCCAgWgAwIBAgIJAKUUCoKn9ovVMA0GCSqGSIb3DQEBBQUAMCYxJDAiBgNV - BAMTG1Rlc3QgQ2VydGlmaWNhdGUgQWxpY2UgUm9vdDAeFw0wNzA4MDExOTUzMDda - Fw0wNzA4MzExOTUzMDdaMCQxIjAgBgNVBAMTGVRlc3QgQ2VydGlmaWNhdGUgQWxp - Y2UgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDmLngkGT5kWsXd - IgLeV+5zNvcDt0+D4cds1cu+bw6Y/23z1+ooA8fU1gXQ28bl6ELM8WRLHgcntqzr - 5UX6S1xPdNfFYt8z4E1ZuvwCPsxcSwVdlYRvzAGNQivDpcJ75Mf5DTeDpr6wm7yn - 2pzxvQIet5djOX51RVGA3hOwCbhq2ceHs0ZruWG3T70H3Sa1ZVxP7m0DJlsSZa6v - 3oEeFOKZQlqrgeU74mJyLAGx/fNbIw+UBrvejfjZobIv985vQ06DZ5S2AquQ2bht - O/2bW3yqeOjH98YK0zlOpYtaZ2fyx4JLjHCspoki6+4W9UG+TuqdkB20mRsr25XT - 9kLuwIGZAgMBAAGjUDBOMAwGA1UdEwQFMAMBAf8wHQYDVR0OBBYEFF6I4IR33h/s - vOa4Vsw2icPo8TgaMB8GA1UdIwQYMBaAFG9ed1KlOQDyB+k7Yeb8LSjG5FDtMA0G - CSqGSIb3DQEBBQUAA4IBAQDVzBuGyXIq/rfMjoNKIHTUgppkc+FjS02cFASpB5mk - ksSpGWYHMZKlqz47qDi44KAG+kmPIPOT0em81+/VGeY1oizJyKSeNDhNboth5oTu - ShDr4flTQCoYvRxm1wh8WIIg09nwibzGztuV1XxtdzfQV5kK5bMBlDXnUfAYydsO - jc52x5f4tgdcfBhjnMzkCAx2kvw5Wp3NekkOKl5YYnPK++zT9IBwqrqJmsJvyLPO - vvqVBYkoBWRbmcy6wVU8JpYegNNgVRbi6zeAq33gS75m9uy+4z8Ql6DqVF0s/y+/ - 240tLCW62X98EzrALKsxhkqVZCtdc5HSRaOQr0K3I03S - - - - - - - - - - - - MIIDHTCCAgWgAwIBAgIJAKUUCoKn9ovVMA0GCSqGSIb3DQEBBQUAMCYxJDAiBgNV - BAMTG1Rlc3QgQ2VydGlmaWNhdGUgQWxpY2UgUm9vdDAeFw0wNzA4MDExOTUzMDda - Fw0wNzA4MzExOTUzMDdaMCQxIjAgBgNVBAMTGVRlc3QgQ2VydGlmaWNhdGUgQWxp - Y2UgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDmLngkGT5kWsXd - IgLeV+5zNvcDt0+D4cds1cu+bw6Y/23z1+ooA8fU1gXQ28bl6ELM8WRLHgcntqzr - 5UX6S1xPdNfFYt8z4E1ZuvwCPsxcSwVdlYRvzAGNQivDpcJ75Mf5DTeDpr6wm7yn - 2pzxvQIet5djOX51RVGA3hOwCbhq2ceHs0ZruWG3T70H3Sa1ZVxP7m0DJlsSZa6v - 3oEeFOKZQlqrgeU74mJyLAGx/fNbIw+UBrvejfjZobIv985vQ06DZ5S2AquQ2bht - O/2bW3yqeOjH98YK0zlOpYtaZ2fyx4JLjHCspoki6+4W9UG+TuqdkB20mRsr25XT - 9kLuwIGZAgMBAAGjUDBOMAwGA1UdEwQFMAMBAf8wHQYDVR0OBBYEFF6I4IR33h/s - vOa4Vsw2icPo8TgaMB8GA1UdIwQYMBaAFG9ed1KlOQDyB+k7Yeb8LSjG5FDtMA0G - CSqGSIb3DQEBBQUAA4IBAQDVzBuGyXIq/rfMjoNKIHTUgppkc+FjS02cFASpB5mk - ksSpGWYHMZKlqz47qDi44KAG+kmPIPOT0em81+/VGeY1oizJyKSeNDhNboth5oTu - ShDr4flTQCoYvRxm1wh8WIIg09nwibzGztuV1XxtdzfQV5kK5bMBlDXnUfAYydsO - jc52x5f4tgdcfBhjnMzkCAx2kvw5Wp3NekkOKl5YYnPK++zT9IBwqrqJmsJvyLPO - vvqVBYkoBWRbmcy6wVU8JpYegNNgVRbi6zeAq33gS75m9uy+4z8Ql6DqVF0s/y+/ - 240tLCW62X98EzrALKsxhkqVZCtdc5HSRaOQr0K3I03S - - - - - - - - - - - - - - - - - - MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV - BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN - MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS - b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArKYUtJaM5PH5917S - G2ACc7iBYdQO2HYyu8Gb6i9Q2Gxc3cWEX7RTBvgOL79pWf3GIdnoupzMnoZVtY3G - Ux2G/0WkmLui2TCeDhcfXdQ4rcp8J3V/6ESj+yuEPPOG8UN17mUKKgujrch6ZvgC - DO9AyOK/uXu+ABQXTPsn2pVe2EVh3V004ShLi8GKgVdqb/rW/6GTg0Xb/zLT6WWM - uT++6sXTlztJdQYkRamJvKfQDU1naC8mAkGf79Tba0xyBGAUII0GfREY6t4/+NAP - 2Yyb3xNlBqcJoTov0JfNKHZcCZePr79j7LK/hkZxxip+Na9xDpE+oQRV+DRukCRJ - diqg+wIDAQABo1AwTjAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBTDEsXJe6pjAQD4 - ULlB7+GMDBlimTAfBgNVHSMEGDAWgBTDEsXJe6pjAQD4ULlB7+GMDBlimTANBgkq - hkiG9w0BAQUFAAOCAQEAWWkNcW6S1tKKqtzJsdfhjJiAAPQmOXJskv0ta/8f6Acg - cum1YieNdtT0n96P7CUHOWP8QBb91JzeewR7b6WJLwb1Offs3wNq3kk75pJe89r4 - XY39EZHhMW+Dv0PhIKu2CgD4LeyH1FVTQkF/QObGEmkn+s+HTsuzd1l2VLwcP1Sm - sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH - YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq - 3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ== - - - MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV - BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN - MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS - b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArKYUtJaM5PH5917S - G2ACc7iBYdQO2HYyu8Gb6i9Q2Gxc3cWEX7RTBvgOL79pWf3GIdnoupzMnoZVtY3G - Ux2G/0WkmLui2TCeDhcfXdQ4rcp8J3V/6ESj+yuEPPOG8UN17mUKKgujrch6ZvgC - DO9AyOK/uXu+ABQXTPsn2pVe2EVh3V004ShLi8GKgVdqb/rW/6GTg0Xb/zLT6WWM - uT++6sXTlztJdQYkRamJvKfQDU1naC8mAkGf79Tba0xyBGAUII0GfREY6t4/+NAP - 2Yyb3xNlBqcJoTov0JfNKHZcCZePr79j7LK/hkZxxip+Na9xDpE+oQRV+DRukCRJ - diqg+wIDAQABo1AwTjAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBTDEsXJe6pjAQD4 - ULlB7+GMDBlimTAfBgNVHSMEGDAWgBTDEsXJe6pjAQD4ULlB7+GMDBlimTANBgkq - hkiG9w0BAQUFAAOCAQEAWWkNcW6S1tKKqtzJsdfhjJiAAPQmOXJskv0ta/8f6Acg - cum1YieNdtT0n96P7CUHOWP8QBb91JzeewR7b6WJLwb1Offs3wNq3kk75pJe89r4 - XY39EZHhMW+Dv0PhIKu2CgD4LeyH1FVTQkF/QObGEmkn+s+HTsuzd1l2VLwcP1Sm - sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH - YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq - 3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ== - - - - - - - - - - - - MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV - BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN - MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS - b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArKYUtJaM5PH5917S - G2ACc7iBYdQO2HYyu8Gb6i9Q2Gxc3cWEX7RTBvgOL79pWf3GIdnoupzMnoZVtY3G - Ux2G/0WkmLui2TCeDhcfXdQ4rcp8J3V/6ESj+yuEPPOG8UN17mUKKgujrch6ZvgC - DO9AyOK/uXu+ABQXTPsn2pVe2EVh3V004ShLi8GKgVdqb/rW/6GTg0Xb/zLT6WWM - uT++6sXTlztJdQYkRamJvKfQDU1naC8mAkGf79Tba0xyBGAUII0GfREY6t4/+NAP - 2Yyb3xNlBqcJoTov0JfNKHZcCZePr79j7LK/hkZxxip+Na9xDpE+oQRV+DRukCRJ - diqg+wIDAQABo1AwTjAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBTDEsXJe6pjAQD4 - ULlB7+GMDBlimTAfBgNVHSMEGDAWgBTDEsXJe6pjAQD4ULlB7+GMDBlimTANBgkq - hkiG9w0BAQUFAAOCAQEAWWkNcW6S1tKKqtzJsdfhjJiAAPQmOXJskv0ta/8f6Acg - cum1YieNdtT0n96P7CUHOWP8QBb91JzeewR7b6WJLwb1Offs3wNq3kk75pJe89r4 - XY39EZHhMW+Dv0PhIKu2CgD4LeyH1FVTQkF/QObGEmkn+s+HTsuzd1l2VLwcP1Sm - sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH - YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq - 3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ== - - - MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV - BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN - MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS - b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArKYUtJaM5PH5917S - G2ACc7iBYdQO2HYyu8Gb6i9Q2Gxc3cWEX7RTBvgOL79pWf3GIdnoupzMnoZVtY3G - Ux2G/0WkmLui2TCeDhcfXdQ4rcp8J3V/6ESj+yuEPPOG8UN17mUKKgujrch6ZvgC - DO9AyOK/uXu+ABQXTPsn2pVe2EVh3V004ShLi8GKgVdqb/rW/6GTg0Xb/zLT6WWM - uT++6sXTlztJdQYkRamJvKfQDU1naC8mAkGf79Tba0xyBGAUII0GfREY6t4/+NAP - 2Yyb3xNlBqcJoTov0JfNKHZcCZePr79j7LK/hkZxxip+Na9xDpE+oQRV+DRukCRJ - diqg+wIDAQABo1AwTjAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBTDEsXJe6pjAQD4 - ULlB7+GMDBlimTAfBgNVHSMEGDAWgBTDEsXJe6pjAQD4ULlB7+GMDBlimTANBgkq - hkiG9w0BAQUFAAOCAQEAWWkNcW6S1tKKqtzJsdfhjJiAAPQmOXJskv0ta/8f6Acg - cum1YieNdtT0n96P7CUHOWP8QBb91JzeewR7b6WJLwb1Offs3wNq3kk75pJe89r4 - XY39EZHhMW+Dv0PhIKu2CgD4LeyH1FVTQkF/QObGEmkn+s+HTsuzd1l2VLwcP1Sm - sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH - YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq - 3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ== - - - - - - - - - - - - - - - - MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV - BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN - MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS - b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArKYUtJaM5PH5917S - G2ACc7iBYdQO2HYyu8Gb6i9Q2Gxc3cWEX7RTBvgOL79pWf3GIdnoupzMnoZVtY3G - Ux2G/0WkmLui2TCeDhcfXdQ4rcp8J3V/6ESj+yuEPPOG8UN17mUKKgujrch6ZvgC - DO9AyOK/uXu+ABQXTPsn2pVe2EVh3V004ShLi8GKgVdqb/rW/6GTg0Xb/zLT6WWM - uT++6sXTlztJdQYkRamJvKfQDU1naC8mAkGf79Tba0xyBGAUII0GfREY6t4/+NAP - 2Yyb3xNlBqcJoTov0JfNKHZcCZePr79j7LK/hkZxxip+Na9xDpE+oQRV+DRukCRJ - diqg+wIDAQABo1AwTjAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBTDEsXJe6pjAQD4 - ULlB7+GMDBlimTAfBgNVHSMEGDAWgBTDEsXJe6pjAQD4ULlB7+GMDBlimTANBgkq - hkiG9w0BAQUFAAOCAQEAWWkNcW6S1tKKqtzJsdfhjJiAAPQmOXJskv0ta/8f6Acg - cum1YieNdtT0n96P7CUHOWP8QBb91JzeewR7b6WJLwb1Offs3wNq3kk75pJe89r4 - XY39EZHhMW+Dv0PhIKu2CgD4LeyH1FVTQkF/QObGEmkn+s+HTsuzd1l2VLwcP1Sm - sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH - YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq - 3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ== - - - MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV - BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN - MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS - b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArKYUtJaM5PH5917S - G2ACc7iBYdQO2HYyu8Gb6i9Q2Gxc3cWEX7RTBvgOL79pWf3GIdnoupzMnoZVtY3G - Ux2G/0WkmLui2TCeDhcfXdQ4rcp8J3V/6ESj+yuEPPOG8UN17mUKKgujrch6ZvgC - DO9AyOK/uXu+ABQXTPsn2pVe2EVh3V004ShLi8GKgVdqb/rW/6GTg0Xb/zLT6WWM - uT++6sXTlztJdQYkRamJvKfQDU1naC8mAkGf79Tba0xyBGAUII0GfREY6t4/+NAP - 2Yyb3xNlBqcJoTov0JfNKHZcCZePr79j7LK/hkZxxip+Na9xDpE+oQRV+DRukCRJ - diqg+wIDAQABo1AwTjAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBTDEsXJe6pjAQD4 - ULlB7+GMDBlimTAfBgNVHSMEGDAWgBTDEsXJe6pjAQD4ULlB7+GMDBlimTANBgkq - hkiG9w0BAQUFAAOCAQEAWWkNcW6S1tKKqtzJsdfhjJiAAPQmOXJskv0ta/8f6Acg - cum1YieNdtT0n96P7CUHOWP8QBb91JzeewR7b6WJLwb1Offs3wNq3kk75pJe89r4 - XY39EZHhMW+Dv0PhIKu2CgD4LeyH1FVTQkF/QObGEmkn+s+HTsuzd1l2VLwcP1Sm - sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH - YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq - 3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ== - - - - - - - - - - - - MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV - BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN - MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS - b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArKYUtJaM5PH5917S - G2ACc7iBYdQO2HYyu8Gb6i9Q2Gxc3cWEX7RTBvgOL79pWf3GIdnoupzMnoZVtY3G - Ux2G/0WkmLui2TCeDhcfXdQ4rcp8J3V/6ESj+yuEPPOG8UN17mUKKgujrch6ZvgC - DO9AyOK/uXu+ABQXTPsn2pVe2EVh3V004ShLi8GKgVdqb/rW/6GTg0Xb/zLT6WWM - uT++6sXTlztJdQYkRamJvKfQDU1naC8mAkGf79Tba0xyBGAUII0GfREY6t4/+NAP - 2Yyb3xNlBqcJoTov0JfNKHZcCZePr79j7LK/hkZxxip+Na9xDpE+oQRV+DRukCRJ - diqg+wIDAQABo1AwTjAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBTDEsXJe6pjAQD4 - ULlB7+GMDBlimTAfBgNVHSMEGDAWgBTDEsXJe6pjAQD4ULlB7+GMDBlimTANBgkq - hkiG9w0BAQUFAAOCAQEAWWkNcW6S1tKKqtzJsdfhjJiAAPQmOXJskv0ta/8f6Acg - cum1YieNdtT0n96P7CUHOWP8QBb91JzeewR7b6WJLwb1Offs3wNq3kk75pJe89r4 - XY39EZHhMW+Dv0PhIKu2CgD4LeyH1FVTQkF/QObGEmkn+s+HTsuzd1l2VLwcP1Sm - sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH - YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq - 3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ== - - - MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV - BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN - MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS - b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArKYUtJaM5PH5917S - G2ACc7iBYdQO2HYyu8Gb6i9Q2Gxc3cWEX7RTBvgOL79pWf3GIdnoupzMnoZVtY3G - Ux2G/0WkmLui2TCeDhcfXdQ4rcp8J3V/6ESj+yuEPPOG8UN17mUKKgujrch6ZvgC - DO9AyOK/uXu+ABQXTPsn2pVe2EVh3V004ShLi8GKgVdqb/rW/6GTg0Xb/zLT6WWM - uT++6sXTlztJdQYkRamJvKfQDU1naC8mAkGf79Tba0xyBGAUII0GfREY6t4/+NAP - 2Yyb3xNlBqcJoTov0JfNKHZcCZePr79j7LK/hkZxxip+Na9xDpE+oQRV+DRukCRJ - diqg+wIDAQABo1AwTjAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBTDEsXJe6pjAQD4 - ULlB7+GMDBlimTAfBgNVHSMEGDAWgBTDEsXJe6pjAQD4ULlB7+GMDBlimTANBgkq - hkiG9w0BAQUFAAOCAQEAWWkNcW6S1tKKqtzJsdfhjJiAAPQmOXJskv0ta/8f6Acg - cum1YieNdtT0n96P7CUHOWP8QBb91JzeewR7b6WJLwb1Offs3wNq3kk75pJe89r4 - XY39EZHhMW+Dv0PhIKu2CgD4LeyH1FVTQkF/QObGEmkn+s+HTsuzd1l2VLwcP1Sm - sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH - YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq - 3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ== - - - - - - - - - - - - - - - - - MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV - BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN - MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS - b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArKYUtJaM5PH5917S - G2ACc7iBYdQO2HYyu8Gb6i9Q2Gxc3cWEX7RTBvgOL79pWf3GIdnoupzMnoZVtY3G - Ux2G/0WkmLui2TCeDhcfXdQ4rcp8J3V/6ESj+yuEPPOG8UN17mUKKgujrch6ZvgC - DO9AyOK/uXu+ABQXTPsn2pVe2EVh3V004ShLi8GKgVdqb/rW/6GTg0Xb/zLT6WWM - uT++6sXTlztJdQYkRamJvKfQDU1naC8mAkGf79Tba0xyBGAUII0GfREY6t4/+NAP - 2Yyb3xNlBqcJoTov0JfNKHZcCZePr79j7LK/hkZxxip+Na9xDpE+oQRV+DRukCRJ - diqg+wIDAQABo1AwTjAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBTDEsXJe6pjAQD4 - ULlB7+GMDBlimTAfBgNVHSMEGDAWgBTDEsXJe6pjAQD4ULlB7+GMDBlimTANBgkq - hkiG9w0BAQUFAAOCAQEAWWkNcW6S1tKKqtzJsdfhjJiAAPQmOXJskv0ta/8f6Acg - cum1YieNdtT0n96P7CUHOWP8QBb91JzeewR7b6WJLwb1Offs3wNq3kk75pJe89r4 - XY39EZHhMW+Dv0PhIKu2CgD4LeyH1FVTQkF/QObGEmkn+s+HTsuzd1l2VLwcP1Sm - sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH - YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq - 3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ== - - - - - - - - - - - - MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV - BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN - MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS - b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArKYUtJaM5PH5917S - G2ACc7iBYdQO2HYyu8Gb6i9Q2Gxc3cWEX7RTBvgOL79pWf3GIdnoupzMnoZVtY3G - Ux2G/0WkmLui2TCeDhcfXdQ4rcp8J3V/6ESj+yuEPPOG8UN17mUKKgujrch6ZvgC - DO9AyOK/uXu+ABQXTPsn2pVe2EVh3V004ShLi8GKgVdqb/rW/6GTg0Xb/zLT6WWM - uT++6sXTlztJdQYkRamJvKfQDU1naC8mAkGf79Tba0xyBGAUII0GfREY6t4/+NAP - 2Yyb3xNlBqcJoTov0JfNKHZcCZePr79j7LK/hkZxxip+Na9xDpE+oQRV+DRukCRJ - diqg+wIDAQABo1AwTjAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBTDEsXJe6pjAQD4 - ULlB7+GMDBlimTAfBgNVHSMEGDAWgBTDEsXJe6pjAQD4ULlB7+GMDBlimTANBgkq - hkiG9w0BAQUFAAOCAQEAWWkNcW6S1tKKqtzJsdfhjJiAAPQmOXJskv0ta/8f6Acg - cum1YieNdtT0n96P7CUHOWP8QBb91JzeewR7b6WJLwb1Offs3wNq3kk75pJe89r4 - XY39EZHhMW+Dv0PhIKu2CgD4LeyH1FVTQkF/QObGEmkn+s+HTsuzd1l2VLwcP1Sm - sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH - YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq - 3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ== - - - - - - - - - - - - - - - - MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV - BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN - MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS - b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArKYUtJaM5PH5917S - G2ACc7iBYdQO2HYyu8Gb6i9Q2Gxc3cWEX7RTBvgOL79pWf3GIdnoupzMnoZVtY3G - Ux2G/0WkmLui2TCeDhcfXdQ4rcp8J3V/6ESj+yuEPPOG8UN17mUKKgujrch6ZvgC - DO9AyOK/uXu+ABQXTPsn2pVe2EVh3V004ShLi8GKgVdqb/rW/6GTg0Xb/zLT6WWM - uT++6sXTlztJdQYkRamJvKfQDU1naC8mAkGf79Tba0xyBGAUII0GfREY6t4/+NAP - 2Yyb3xNlBqcJoTov0JfNKHZcCZePr79j7LK/hkZxxip+Na9xDpE+oQRV+DRukCRJ - diqg+wIDAQABo1AwTjAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBTDEsXJe6pjAQD4 - ULlB7+GMDBlimTAfBgNVHSMEGDAWgBTDEsXJe6pjAQD4ULlB7+GMDBlimTANBgkq - hkiG9w0BAQUFAAOCAQEAWWkNcW6S1tKKqtzJsdfhjJiAAPQmOXJskv0ta/8f6Acg - cum1YieNdtT0n96P7CUHOWP8QBb91JzeewR7b6WJLwb1Offs3wNq3kk75pJe89r4 - XY39EZHhMW+Dv0PhIKu2CgD4LeyH1FVTQkF/QObGEmkn+s+HTsuzd1l2VLwcP1Sm - sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH - YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq - 3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ== - - - - - - - - - - - - MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV - BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN - MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS - b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArKYUtJaM5PH5917S - G2ACc7iBYdQO2HYyu8Gb6i9Q2Gxc3cWEX7RTBvgOL79pWf3GIdnoupzMnoZVtY3G - Ux2G/0WkmLui2TCeDhcfXdQ4rcp8J3V/6ESj+yuEPPOG8UN17mUKKgujrch6ZvgC - DO9AyOK/uXu+ABQXTPsn2pVe2EVh3V004ShLi8GKgVdqb/rW/6GTg0Xb/zLT6WWM - uT++6sXTlztJdQYkRamJvKfQDU1naC8mAkGf79Tba0xyBGAUII0GfREY6t4/+NAP - 2Yyb3xNlBqcJoTov0JfNKHZcCZePr79j7LK/hkZxxip+Na9xDpE+oQRV+DRukCRJ - diqg+wIDAQABo1AwTjAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBTDEsXJe6pjAQD4 - ULlB7+GMDBlimTAfBgNVHSMEGDAWgBTDEsXJe6pjAQD4ULlB7+GMDBlimTANBgkq - hkiG9w0BAQUFAAOCAQEAWWkNcW6S1tKKqtzJsdfhjJiAAPQmOXJskv0ta/8f6Acg - cum1YieNdtT0n96P7CUHOWP8QBb91JzeewR7b6WJLwb1Offs3wNq3kk75pJe89r4 - XY39EZHhMW+Dv0PhIKu2CgD4LeyH1FVTQkF/QObGEmkn+s+HTsuzd1l2VLwcP1Sm - sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH - YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq - 3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ== - - - - - - - - - - - - - - - - - - MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV - BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN - MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS - b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArKYUtJaM5PH5917S - G2ACc7iBYdQO2HYyu8Gb6i9Q2Gxc3cWEX7RTBvgOL79pWf3GIdnoupzMnoZVtY3G - Ux2G/0WkmLui2TCeDhcfXdQ4rcp8J3V/6ESj+yuEPPOG8UN17mUKKgujrch6ZvgC - DO9AyOK/uXu+ABQXTPsn2pVe2EVh3V004ShLi8GKgVdqb/rW/6GTg0Xb/zLT6WWM - uT++6sXTlztJdQYkRamJvKfQDU1naC8mAkGf79Tba0xyBGAUII0GfREY6t4/+NAP - 2Yyb3xNlBqcJoTov0JfNKHZcCZePr79j7LK/hkZxxip+Na9xDpE+oQRV+DRukCRJ - diqg+wIDAQABo1AwTjAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBTDEsXJe6pjAQD4 - ULlB7+GMDBlimTAfBgNVHSMEGDAWgBTDEsXJe6pjAQD4ULlB7+GMDBlimTANBgkq - hkiG9w0BAQUFAAOCAQEAWWkNcW6S1tKKqtzJsdfhjJiAAPQmOXJskv0ta/8f6Acg - cum1YieNdtT0n96P7CUHOWP8QBb91JzeewR7b6WJLwb1Offs3wNq3kk75pJe89r4 - XY39EZHhMW+Dv0PhIKu2CgD4LeyH1FVTQkF/QObGEmkn+s+HTsuzd1l2VLwcP1Sm - sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH - YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq - 3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ== - - - MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV - BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN - MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS - b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArKYUtJaM5PH5917S - G2ACc7iBYdQO2HYyu8Gb6i9Q2Gxc3cWEX7RTBvgOL79pWf3GIdnoupzMnoZVtY3G - Ux2G/0WkmLui2TCeDhcfXdQ4rcp8J3V/6ESj+yuEPPOG8UN17mUKKgujrch6ZvgC - DO9AyOK/uXu+ABQXTPsn2pVe2EVh3V004ShLi8GKgVdqb/rW/6GTg0Xb/zLT6WWM - uT++6sXTlztJdQYkRamJvKfQDU1naC8mAkGf79Tba0xyBGAUII0GfREY6t4/+NAP - 2Yyb3xNlBqcJoTov0JfNKHZcCZePr79j7LK/hkZxxip+Na9xDpE+oQRV+DRukCRJ - diqg+wIDAQABo1AwTjAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBTDEsXJe6pjAQD4 - ULlB7+GMDBlimTAfBgNVHSMEGDAWgBTDEsXJe6pjAQD4ULlB7+GMDBlimTANBgkq - hkiG9w0BAQUFAAOCAQEAWWkNcW6S1tKKqtzJsdfhjJiAAPQmOXJskv0ta/8f6Acg - cum1YieNdtT0n96P7CUHOWP8QBb91JzeewR7b6WJLwb1Offs3wNq3kk75pJe89r4 - XY39EZHhMW+Dv0PhIKu2CgD4LeyH1FVTQkF/QObGEmkn+s+HTsuzd1l2VLwcP1Sm - sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH - YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq - 3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ== - - - - - - - - - - - - MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV - BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN - MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS - b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArKYUtJaM5PH5917S - G2ACc7iBYdQO2HYyu8Gb6i9Q2Gxc3cWEX7RTBvgOL79pWf3GIdnoupzMnoZVtY3G - Ux2G/0WkmLui2TCeDhcfXdQ4rcp8J3V/6ESj+yuEPPOG8UN17mUKKgujrch6ZvgC - DO9AyOK/uXu+ABQXTPsn2pVe2EVh3V004ShLi8GKgVdqb/rW/6GTg0Xb/zLT6WWM - uT++6sXTlztJdQYkRamJvKfQDU1naC8mAkGf79Tba0xyBGAUII0GfREY6t4/+NAP - 2Yyb3xNlBqcJoTov0JfNKHZcCZePr79j7LK/hkZxxip+Na9xDpE+oQRV+DRukCRJ - diqg+wIDAQABo1AwTjAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBTDEsXJe6pjAQD4 - ULlB7+GMDBlimTAfBgNVHSMEGDAWgBTDEsXJe6pjAQD4ULlB7+GMDBlimTANBgkq - hkiG9w0BAQUFAAOCAQEAWWkNcW6S1tKKqtzJsdfhjJiAAPQmOXJskv0ta/8f6Acg - cum1YieNdtT0n96P7CUHOWP8QBb91JzeewR7b6WJLwb1Offs3wNq3kk75pJe89r4 - XY39EZHhMW+Dv0PhIKu2CgD4LeyH1FVTQkF/QObGEmkn+s+HTsuzd1l2VLwcP1Sm - sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH - YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq - 3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ== - - - MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV - BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN - MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS - b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArKYUtJaM5PH5917S - G2ACc7iBYdQO2HYyu8Gb6i9Q2Gxc3cWEX7RTBvgOL79pWf3GIdnoupzMnoZVtY3G - Ux2G/0WkmLui2TCeDhcfXdQ4rcp8J3V/6ESj+yuEPPOG8UN17mUKKgujrch6ZvgC - DO9AyOK/uXu+ABQXTPsn2pVe2EVh3V004ShLi8GKgVdqb/rW/6GTg0Xb/zLT6WWM - uT++6sXTlztJdQYkRamJvKfQDU1naC8mAkGf79Tba0xyBGAUII0GfREY6t4/+NAP - 2Yyb3xNlBqcJoTov0JfNKHZcCZePr79j7LK/hkZxxip+Na9xDpE+oQRV+DRukCRJ - diqg+wIDAQABo1AwTjAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBTDEsXJe6pjAQD4 - ULlB7+GMDBlimTAfBgNVHSMEGDAWgBTDEsXJe6pjAQD4ULlB7+GMDBlimTANBgkq - hkiG9w0BAQUFAAOCAQEAWWkNcW6S1tKKqtzJsdfhjJiAAPQmOXJskv0ta/8f6Acg - cum1YieNdtT0n96P7CUHOWP8QBb91JzeewR7b6WJLwb1Offs3wNq3kk75pJe89r4 - XY39EZHhMW+Dv0PhIKu2CgD4LeyH1FVTQkF/QObGEmkn+s+HTsuzd1l2VLwcP1Sm - sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH - YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq - 3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ== - - - - - - - - - - - - - - - - MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV - BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN - MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS - b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArKYUtJaM5PH5917S - G2ACc7iBYdQO2HYyu8Gb6i9Q2Gxc3cWEX7RTBvgOL79pWf3GIdnoupzMnoZVtY3G - Ux2G/0WkmLui2TCeDhcfXdQ4rcp8J3V/6ESj+yuEPPOG8UN17mUKKgujrch6ZvgC - DO9AyOK/uXu+ABQXTPsn2pVe2EVh3V004ShLi8GKgVdqb/rW/6GTg0Xb/zLT6WWM - uT++6sXTlztJdQYkRamJvKfQDU1naC8mAkGf79Tba0xyBGAUII0GfREY6t4/+NAP - 2Yyb3xNlBqcJoTov0JfNKHZcCZePr79j7LK/hkZxxip+Na9xDpE+oQRV+DRukCRJ - diqg+wIDAQABo1AwTjAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBTDEsXJe6pjAQD4 - ULlB7+GMDBlimTAfBgNVHSMEGDAWgBTDEsXJe6pjAQD4ULlB7+GMDBlimTANBgkq - hkiG9w0BAQUFAAOCAQEAWWkNcW6S1tKKqtzJsdfhjJiAAPQmOXJskv0ta/8f6Acg - cum1YieNdtT0n96P7CUHOWP8QBb91JzeewR7b6WJLwb1Offs3wNq3kk75pJe89r4 - XY39EZHhMW+Dv0PhIKu2CgD4LeyH1FVTQkF/QObGEmkn+s+HTsuzd1l2VLwcP1Sm - sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH - YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq - 3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ== - - - MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV - BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN - MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS - b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArKYUtJaM5PH5917S - G2ACc7iBYdQO2HYyu8Gb6i9Q2Gxc3cWEX7RTBvgOL79pWf3GIdnoupzMnoZVtY3G - Ux2G/0WkmLui2TCeDhcfXdQ4rcp8J3V/6ESj+yuEPPOG8UN17mUKKgujrch6ZvgC - DO9AyOK/uXu+ABQXTPsn2pVe2EVh3V004ShLi8GKgVdqb/rW/6GTg0Xb/zLT6WWM - uT++6sXTlztJdQYkRamJvKfQDU1naC8mAkGf79Tba0xyBGAUII0GfREY6t4/+NAP - 2Yyb3xNlBqcJoTov0JfNKHZcCZePr79j7LK/hkZxxip+Na9xDpE+oQRV+DRukCRJ - diqg+wIDAQABo1AwTjAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBTDEsXJe6pjAQD4 - ULlB7+GMDBlimTAfBgNVHSMEGDAWgBTDEsXJe6pjAQD4ULlB7+GMDBlimTANBgkq - hkiG9w0BAQUFAAOCAQEAWWkNcW6S1tKKqtzJsdfhjJiAAPQmOXJskv0ta/8f6Acg - cum1YieNdtT0n96P7CUHOWP8QBb91JzeewR7b6WJLwb1Offs3wNq3kk75pJe89r4 - XY39EZHhMW+Dv0PhIKu2CgD4LeyH1FVTQkF/QObGEmkn+s+HTsuzd1l2VLwcP1Sm - sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH - YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq - 3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ== - - - - - - - - - - - - MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV - BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN - MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS - b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArKYUtJaM5PH5917S - G2ACc7iBYdQO2HYyu8Gb6i9Q2Gxc3cWEX7RTBvgOL79pWf3GIdnoupzMnoZVtY3G - Ux2G/0WkmLui2TCeDhcfXdQ4rcp8J3V/6ESj+yuEPPOG8UN17mUKKgujrch6ZvgC - DO9AyOK/uXu+ABQXTPsn2pVe2EVh3V004ShLi8GKgVdqb/rW/6GTg0Xb/zLT6WWM - uT++6sXTlztJdQYkRamJvKfQDU1naC8mAkGf79Tba0xyBGAUII0GfREY6t4/+NAP - 2Yyb3xNlBqcJoTov0JfNKHZcCZePr79j7LK/hkZxxip+Na9xDpE+oQRV+DRukCRJ - diqg+wIDAQABo1AwTjAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBTDEsXJe6pjAQD4 - ULlB7+GMDBlimTAfBgNVHSMEGDAWgBTDEsXJe6pjAQD4ULlB7+GMDBlimTANBgkq - hkiG9w0BAQUFAAOCAQEAWWkNcW6S1tKKqtzJsdfhjJiAAPQmOXJskv0ta/8f6Acg - cum1YieNdtT0n96P7CUHOWP8QBb91JzeewR7b6WJLwb1Offs3wNq3kk75pJe89r4 - XY39EZHhMW+Dv0PhIKu2CgD4LeyH1FVTQkF/QObGEmkn+s+HTsuzd1l2VLwcP1Sm - sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH - YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq - 3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ== - - - MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV - BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN - MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS - b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArKYUtJaM5PH5917S - G2ACc7iBYdQO2HYyu8Gb6i9Q2Gxc3cWEX7RTBvgOL79pWf3GIdnoupzMnoZVtY3G - Ux2G/0WkmLui2TCeDhcfXdQ4rcp8J3V/6ESj+yuEPPOG8UN17mUKKgujrch6ZvgC - DO9AyOK/uXu+ABQXTPsn2pVe2EVh3V004ShLi8GKgVdqb/rW/6GTg0Xb/zLT6WWM - uT++6sXTlztJdQYkRamJvKfQDU1naC8mAkGf79Tba0xyBGAUII0GfREY6t4/+NAP - 2Yyb3xNlBqcJoTov0JfNKHZcCZePr79j7LK/hkZxxip+Na9xDpE+oQRV+DRukCRJ - diqg+wIDAQABo1AwTjAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBTDEsXJe6pjAQD4 - ULlB7+GMDBlimTAfBgNVHSMEGDAWgBTDEsXJe6pjAQD4ULlB7+GMDBlimTANBgkq - hkiG9w0BAQUFAAOCAQEAWWkNcW6S1tKKqtzJsdfhjJiAAPQmOXJskv0ta/8f6Acg - cum1YieNdtT0n96P7CUHOWP8QBb91JzeewR7b6WJLwb1Offs3wNq3kk75pJe89r4 - XY39EZHhMW+Dv0PhIKu2CgD4LeyH1FVTQkF/QObGEmkn+s+HTsuzd1l2VLwcP1Sm - sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH - YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq - 3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ== - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - MIIBrjCBlwIBATANBgkqhkiG9w0BAQsFADAzMTEwLwYDVQQDEygwQzQzMDRCNjVDNTlBRDk3 - QUNFRDMzNDY2REVFRDdDMTU0OEM4MTZEFw0wOTA5MjgyMDUxNDlaFw0wOTA5MjgyMTUxNDla - oDAwLjAfBgNVHSMEGDAWgBQMQwS2XFmtl6ztM0Zt7tfBVIyBbTALBgNVHRQEBAICAWkwDQYJ - KoZIhvcNAQELBQADggEBAIRT1nriWsvZO9QtLKEiO7SZE3YZqMqDXS1auGBxEZtcLyF93ct6 - dstbiWHXjlHLztgePXh970BsTYhiVbKKnZvS0pjI8a9vr9b2Dyia3QG8ArV0zXVKhAGku5v+ - RG9d+f/VH0GMQ8ITcjJOKOaK0afv1KmQsydAb8KLKAGhCs7zeuztQG2k3YP6BE2OOPYK9fYk - EGHz0kPGp/oD/PJJfmPlKD4Uk4mSvM6e5ksgKg0BnxoU9RMkWjAeGVxk0F+SDG5sPmCsVOgB - fBk4i7H945v/zs7bLLMJxTs8+ao4iCDuknjbGhjWmi9xrTXDtcCXx607rPDkJQcJE2WnRS/U - HIA= - - - MIIHBQYJKoZIhvcNAQcCoIIG9jCCBvICAQMxDTALBglghkgBZQMEAgEwggEfBgsqhkiG9w0B - CRABGqCCAQ4EggEKMIIBBgICAWoYDzIwMDkwOTI4MjA1MTQ5WhgPMjAwOTA5MjgyMTUxNDla - BglghkgBZQMEAgEwgdIwRBYfREVNRXRseFpyWmVzN1ROR2JlN1h3VlNNZ1cwLmNybAMhAPgd - nO/fVdSWmPrnxJAf4JXrf0J/dHv9en+Tsqrz4WjcMEQWH2xkdnhjSEdkcjNvS0hjUGotZ3Vr - bWV0TlJaMC5yb2EDIQAxseZlGDtfsvDOBv1X2ElR8k/V78ynwSBGM22F5DYXUTBEFh94b3BO - R2NzQl9wN2VhZllxWGF0bVZWOEhaZDAucm9hAyEAKIKdRZhS1tawepRzVXtdP1imh6zPymWp - dCjYJUDqzY2gggQLMIIEBzCCAu+gAwIBAgIBATANBgkqhkiG9w0BAQsFADAzMTEwLwYDVQQD - EygwQzQzMDRCNjVDNTlBRDk3QUNFRDMzNDY2REVFRDdDMTU0OEM4MTZEMB4XDTA5MDkxODIx - NDE1NFoXDTEwMDkxMzExMDcwOVowMzExMC8GA1UEAxMoMEQ3MjU5REEyNEY5OTRFNTVCN0E2 - NkQxMDBEOUE5REJFMURGODIwNzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKwo - COG8kQMKuAVMOe5eTlasUpFQ2OT2VNmJtJ7NPQ1rLm1ahVUcTXUn7p+ikmJYpwFRACZ6htgF - n51RL43lm/pITTSVc1A6fofkrnoNImwMG8Pj8Z46H6kbJOM69NW5asjvA5DfSu73cltGHPRg - DQqt1k/3+aWqPWiYS7OGbQdDYGmy3T5VNlc+DBzyAM2VxNrLNF5Imv1NbfLw0Bp/gvayeApe - AjhjraCP7ZQxyXesLbBZrjQz1MXpi4DOZtY8gYwaMNgeU56jR9tpM5IDY5zSPHKZyJVvLQnT - iQfMKasHYMcFDtDrRH7t+YQlmt40uby0YsIIcv5FWJf1OBHnyYcCAwEAAaOCASQwggEgMB0G - A1UdDgQWBBQNclnaJPmU5Vt6ZtEA2anb4d+CBzAfBgNVHSMEGDAWgBQMQwS2XFmtl6ztM0Zt - 7tfBVIyBbTBaBgNVHR8EUzBRME+gTaBLhklyc3luYzovL2FyaW4ucnBraS5uZXQvYXJpbi9p - c2MvSVNDLTk0LVovMi9ERU1FdGx4WnJaZXM3VE5HYmU3WHdWU01nVzAuY3JsMFgGCCsGAQUF - BwEBBEwwSjBIBggrBgEFBQcwAoY8cnN5bmM6Ly9hcmluLnJwa2kubmV0L2FyaW4vMS9ERU1F - dGx4WnJaZXM3VE5HYmU3WHdWU01nVzAuY2VyMBgGA1UdIAEB/wQOMAwwCgYIKwYBBQUHDgIw - DgYDVR0PAQH/BAQDAgeAMA0GCSqGSIb3DQEBCwUAA4IBAQCZtr0XdjKRitItHi7UfUx6hTp2 - BOwoaNoOCEKw+dRMDYEgfRKbWSRd3gyVR3F1DV41aT1vDm56+WoN2Td1WEi2H0q22f1iIKuS - m0MkOpdqVZGOYHLTErv22XzDf7ifdGo3RkW7QOQ3D1n6Qraft5AB3aHskCofvPx3CBGFHKWh - N5HXnh+J/Bly2EwxPYs4yibx6K8stnxwIwsmo7DvjdPwv+VnrmIb7pxOpvqHjEQEs7Wy9Y47 - NP3Ym2YLwbIqAuN6F9kF7DeCanBt0HeFqMsOowz11ll1xBAwcpz/bxVwyAwWUoJNncoJCrjz - n2gPGYKqW80qgQwL8vBLFtBevZbyMYIBqjCCAaYCAQOAFA1yWdok+ZTlW3pm0QDZqdvh34IH - MAsGCWCGSAFlAwQCAaBrMBoGCSqGSIb3DQEJAzENBgsqhkiG9w0BCRABGjAcBgkqhkiG9w0B - CQUxDxcNMDkwOTI4MjA1MTQ5WjAvBgkqhkiG9w0BCQQxIgQgYA8+0xE+taAr6cM6tEAt4Wh6 - BWT8Xu76a6YSZt9hb2kwDQYJKoZIhvcNAQEBBQAEggEAmD/WNppqwMtpQw+RkqIbcg3HT7fg - RRK+ehJfcyCqP/t7vUu65cAcz02gbT3LHZDkqtGD9WjgjoxSVNrYiS+4TEQbt0AXrSQFqr9F - ud2eujpeGpx56VVYgE/Jef9MfiYXSjWj9oveEWR1OdRFVCn6TW6+t1n6OMTNhnDxYt9t4NZV - OCK95aHm9vi7d8CMZfPnZMQuXiNmHberYkxLu5LZJ84C2GqGbyBllkFp2KUGKoWgMyeKkk0q - yML8lQJAFAyjnXJ+doGbqfTUpVH4q4drqRb73WbL0zf/Z2HGwhDlTmsAdjparWdQcfXIVrJF - ynS1fab9XZfj+VtBFKjooDjaLw== - - - MIIGnQYJKoZIhvcNAQcCoIIGjjCCBooCAQMxDTALBglghkgBZQMEAgEwMQYLKoZIhvcNAQkQ - ARigIgQgMB4CAg3lMBgwFgQCAAEwEDAGAwQAwAUEMAYDBADABQWgggSTMIIEjzCCA3egAwIB - AgIBAjANBgkqhkiG9w0BAQsFADAzMTEwLwYDVQQDEygwQzQzMDRCNjVDNTlBRDk3QUNFRDMz - NDY2REVFRDdDMTU0OEM4MTZEMB4XDTA5MDkxODIyNTkzM1oXDTEwMDkxMzExMDcwOVowMzEx - MC8GA1UEAxMoOTVEQkYxNzA3MTlEQUY3QTBBMURDM0UzRkEwQkE0OTlFQjRENDU5RDCCASIw - DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALpn7TNbq1aYIa9fQG660Rz3dUfpx/dZEOJc - /PD5sxLSBCgcMJKGUb7RGajyI4pbIwVax1w+A4Ie38YjSl6p95FdwBMDX2w6OwePdLwDC+0R - zCf2p/F4Go79glYssEKjFGYvUDwm8SwJ3dr8XdlgdjbT4zIrMZj9SVOgreeNGOU+jcn8HvX3 - 94/0w49JDzPwKmHzGSlBTunzQ4pYZyZ2R+rDKuTJqRslEdD5KOFjNV2s4owWkhJzCermBj2u - CFExS+0Fc0In9Q3X5PcTMp2L1Gj4sdgc2Kbc8VTWU7kgF5M/15HThgRy+Ldx/b05w22mJV7L - 6yMkNGfRpn4CxLFD0U8CAwEAAaOCAawwggGoMB0GA1UdDgQWBBSV2/FwcZ2vegodw+P6C6SZ - 601FnTAfBgNVHSMEGDAWgBQMQwS2XFmtl6ztM0Zt7tfBVIyBbTBaBgNVHR8EUzBRME+gTaBL - hklyc3luYzovL2FyaW4ucnBraS5uZXQvYXJpbi9pc2MvSVNDLTk0LVovMi9ERU1FdGx4WnJa - ZXM3VE5HYmU3WHdWU01nVzAuY3JsMFgGCCsGAQUFBwEBBEwwSjBIBggrBgEFBQcwAoY8cnN5 - bmM6Ly9hcmluLnJwa2kubmV0L2FyaW4vMS9ERU1FdGx4WnJaZXM3VE5HYmU3WHdWU01nVzAu - Y2VyMBgGA1UdIAEB/wQOMAwwCgYIKwYBBQUHDgIwDgYDVR0PAQH/BAQDAgeAMGUGCCsGAQUF - BwELBFkwVzBVBggrBgEFBQcwC4ZJcnN5bmM6Ly9hcmluLnJwa2kubmV0L2FyaW4vaXNjL0lT - Qy05NC1aLzIvbGR2eGNIR2RyM29LSGNQai1ndWttZXROUlowLnJvYTAfBggrBgEFBQcBBwEB - /wQQMA4wDAQCAAEwBgMEAcAFBDANBgkqhkiG9w0BAQsFAAOCAQEAIjdpXxwxe9sK9RkqzvDP - to3zdDhpLf29XqKKkNhux2pXXeMcRR5vNN13mguuxaO/uQtrFTBgk8EJn7CfhmIoZpZClcAS - cvIyYSBbc/VspOdmPH2DnQGFjBk/qpEUs3W3Us5/w6x2BnjuUtRBj5eQQ5kydtHTy/URSX7i - K76ngiTsDL6e77UVu8KY+EutZU3b2HH73qfeP8L4HJ2rpm5tnHZEECcAHS20bhqTqyuECcuK - FBhQA2Ye8LtVg/CbZixZNqb5bfcCj72HzsZAKC57gzu/ROJ43wINcwgCkYUeWM8eoFJoYCaQ - z1avg/vDGBrZtkNQQJt9mXoxaJF25YEuRDGCAaowggGmAgEDgBSV2/FwcZ2vegodw+P6C6SZ - 601FnTALBglghkgBZQMEAgGgazAaBgkqhkiG9w0BCQMxDQYLKoZIhvcNAQkQARgwHAYJKoZI - hvcNAQkFMQ8XDTA5MDkxODIyNTkzM1owLwYJKoZIhvcNAQkEMSIEIEU7rNCYuAgPtZckEMP4 - MMUl4hMwvLJ4KWHgg1fZNkJMMA0GCSqGSIb3DQEBAQUABIIBAA6fqD9/VisrRFIqRbwFpG/B - fkmnZGPequD7JPgJR/O/7ofUe1yunPugdPoDe+bTrEaUfyj6xAcdXXwR2fKHF8HyCPMclqCB - aQNZH/nHnawrwOXem8qwnKRyn7hOXyKPxar4VIVg90JFttgaM/l9W++PV02KQS8GlFRymvpg - Eca4THQ5/VWe/3V5dAOEGFUl0/WAjYId+jYzF9oHKSeZTqWmpvDaX4Pc+xkydw18kQBsovnv - +N931gu2r5I/XB/MGgGvXNWozK7RuMn55i5hMqI2NQs+/b7/AQU0+/i3g7SlLA8iZwHq49U2 - ZXRCjLXcy0tQOWVsMnGfReN8oNDhHbc= - - - MIIGoQYJKoZIhvcNAQcCoIIGkjCCBo4CAQMxDTALBglghkgBZQMEAgEwMAYLKoZIhvcNAQkQ - ARigIQQfMB0CAgUAMBcwFQQCAAEwDzAFAwMAlRQwBgMEA8yYuKCCBJgwggSUMIIDfKADAgEC - AgEDMA0GCSqGSIb3DQEBCwUAMDMxMTAvBgNVBAMTKDBDNDMwNEI2NUM1OUFEOTdBQ0VEMzM0 - NjZERUVEN0MxNTQ4QzgxNkQwHhcNMDkwOTE4MjI1OTU1WhcNMTAwOTEzMTEwNzA5WjAzMTEw - LwYDVQQDEyhDNjhBNEQxOUNCMDFGRTlFREU2OUY2MkE1REFCNjY1NTVGMDc2NUREMIIBIjAN - BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2z9OLSVMT632SBjbKcwfnZtkIOeaFG8oRZKe - u6q7E3krOxXuK2Yxafz5d2+FJBBFHWSmtKuHpeR2rkUWOZlgIWny6u9hb7vzt2wvuEXjBI7H - Bn3sNgGOIgHyuWjUxWyy6gr1K4x437XaMUeMx7xy/82DSgqyK0298EoiPlg8wQau38WFx+FZ - cu2Bnf3prc2l3oSRKNPAE7l4P6DKnjy3VPQT6xCt5PEscVDFzkMeJXrGe48GwJzV0ZbUQHeZ - /eMAsWyZIp5K4kciU6A552ImLjim64HXaviyHiv0rHAUImBoK5AbnpH1yOZ93StqD0iFEJMF - HubclLRuJRFomX05DwIDAQABo4IBsTCCAa0wHQYDVR0OBBYEFMaKTRnLAf6e3mn2Kl2rZlVf - B2XdMB8GA1UdIwQYMBaAFAxDBLZcWa2XrO0zRm3u18FUjIFtMFoGA1UdHwRTMFEwT6BNoEuG - SXJzeW5jOi8vYXJpbi5ycGtpLm5ldC9hcmluL2lzYy9JU0MtOTQtWi8yL0RFTUV0bHhaclpl - czdUTkdiZTdYd1ZTTWdXMC5jcmwwWAYIKwYBBQUHAQEETDBKMEgGCCsGAQUFBzAChjxyc3lu - YzovL2FyaW4ucnBraS5uZXQvYXJpbi8xL0RFTUV0bHhaclplczdUTkdiZTdYd1ZTTWdXMC5j - ZXIwGAYDVR0gAQH/BA4wDDAKBggrBgEFBQcOAjAOBgNVHQ8BAf8EBAMCB4AwZQYIKwYBBQUH - AQsEWTBXMFUGCCsGAQUFBzALhklyc3luYzovL2FyaW4ucnBraS5uZXQvYXJpbi9pc2MvSVND - LTk0LVovMi94b3BOR2NzQl9wN2VhZllxWGF0bVZWOEhaZDAucm9hMCQGCCsGAQUFBwEHAQH/ - BBUwEzARBAIAATALAwMAlRQDBAPMmLgwDQYJKoZIhvcNAQELBQADggEBAMmzrOxl/SA7uEHR - 4D5jCMNFZaKkh9Shf2Uqg+JpD88BPVLdBDHmG7CmFSI42puZk76SIrXLjyaUv3kP4wKNXOug - c3/80bynPgT+25kTeJc5T4Th735fzJZantqfG+uBQmC2Rk5mihTAL1wweIBFBYcmjAWSmuo9 - N84XWOikQnkPLAsiX75mT1E2BZB5te6UruWHRtlMggNvE72zrZBYAhk+bCC5HdkAhwA3dah5 - SsMSOoGgniFoWlLq3COV+ga1OkJgYVRQHXGP7Fjh7YCU2yUygKaf5Yniqh1ijbjJvVz419QY - ZflO9//gP3IM5ClbnWR4RhzZFKJ4DGz+lDmHIugxggGqMIIBpgIBA4AUxopNGcsB/p7eafYq - XatmVV8HZd0wCwYJYIZIAWUDBAIBoGswGgYJKoZIhvcNAQkDMQ0GCyqGSIb3DQEJEAEYMBwG - CSqGSIb3DQEJBTEPFw0wOTA5MTgyMjU5NTVaMC8GCSqGSIb3DQEJBDEiBCC4ptBgQZ1Ktxau - h1foPe9MJiB8XZJ21ynmZ7BPTWLQVTANBgkqhkiG9w0BAQEFAASCAQBlAxAGN2Tcvi8tF5qk - ECahrFZn0qvOw9tQRKNwwC5SRUQWQcd6Pi7g0znLVS0Y5iOZB7QvHiuPXvVAR7cxwjRlEZy2 - kmERAbrq7ROweJjb9L5JsacRSWUfG7JQjdqMSGLOf3gqlidBnDrKlNIWfyGntpZZFmIGKo9X - 5U8PWrCGkb+2AZT/tpt0eMGRhdgGX0n987dEhUbU7k9dZZXA7ou/g1MSL2HHfH17mL9rQqzN - UwHopIkNlG0ljGy7xI2wjjcvUCDi0Ns/asqxlz6icHgXhrhLyZy3JlcjG7/v2dm0MdZLFg4m - FN/5lE6Ayt2VEDfVNRfMzD6ezxb8PZc2astn - - - - - - - - text string - - - - - - - diff --git a/rpkid/tests/myrpki-xml-parse-test.py b/rpkid/tests/myrpki-xml-parse-test.py deleted file mode 100644 index 10b9cd58..00000000 --- a/rpkid/tests/myrpki-xml-parse-test.py +++ /dev/null @@ -1,101 +0,0 @@ -# $Id$ -# -# Copyright (C) 2009--2012 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -""" -Test parser and display tool for myrpki.xml files. -""" - -import lxml.etree, rpki.resource_set, base64, subprocess - -relaxng = lxml.etree.RelaxNG(file = "myrpki.rng") - -tree = lxml.etree.parse("myrpki.xml").getroot() - -if False: - print lxml.etree.tostring(tree, pretty_print = True, encoding = "us-ascii", xml_declaration = True) - -relaxng.assertValid(tree) - -def showitems(y): - if False: - for k, v in y.items(): - if v: - print " ", k, v - -def tag(t): - return "{http://www.hactrn.net/uris/rpki/myrpki/}" + t - -print "My handle:", tree.get("handle") - -print "Children:" -for x in tree.getiterator(tag("child")): - print " ", x - print " Handle:", x.get("handle") - print " ASNS: ", rpki.resource_set.resource_set_as(x.get("asns")) - print " IPv4: ", rpki.resource_set.resource_set_ipv4(x.get("v4")) - print " Valid: ", x.get("valid_until") - showitems(x) -print - -print "ROA requests:" -for x in tree.getiterator(tag("roa_request")): - print " ", x - print " ASN: ", x.get("asn") - print " IPv4:", rpki.resource_set.roa_prefix_set_ipv4(x.get("v4")) - print " IPv6:", rpki.resource_set.roa_prefix_set_ipv6(x.get("v6")) - showitems(x) -print - -def showpem(label, b64, kind): - cmd = ("openssl", kind, "-noout", "-text", "-inform", "DER") - if kind == "x509": - cmd += ("-certopt", "no_pubkey,no_sigdump") - p = subprocess.Popen(cmd, stdin = subprocess.PIPE, stdout = subprocess.PIPE) - text = p.communicate(input = base64.b64decode(b64))[0] - if p.returncode != 0: - raise subprocess.CalledProcessError(returncode = p.returncode, cmd = cmd) - print label, text - -for x in tree.getiterator(tag("child")): - cert = x.findtext(tag("bpki_certificate")) - if cert: - showpem("Child", cert, "x509") - -for x in tree.getiterator(tag("parent")): - print "Parent URI:", x.get("service_uri") - cert = x.findtext(tag("bpki_certificate")) - if cert: - showpem("Parent", cert, "x509") - -ca = tree.findtext(tag("bpki_ca_certificate")) -if ca: - showpem("CA", ca, "x509") - -bsc = tree.findtext(tag("bpki_bsc_certificate")) -if bsc: - showpem("BSC EE", bsc, "x509") - -repo = tree.findtext(tag("bpki_repository_certificate")) -if repo: - showpem("Repository", repo, "x509") - -req = tree.findtext(tag("bpki_bsc_pkcs10")) -if req: - showpem("BSC EE", req, "req") - -crl = tree.findtext(tag("bpki_crl")) -if crl: - showpem("CA", crl, "crl") diff --git a/rpkid/tests/old_irdbd.py b/rpkid/tests/old_irdbd.py deleted file mode 100644 index d258e4c0..00000000 --- a/rpkid/tests/old_irdbd.py +++ /dev/null @@ -1,19 +0,0 @@ -# $Id$ -# -# Copyright (C) 2010-2012 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -if __name__ == "__main__": - import rpki.old_irdbd - rpki.old_irdbd.main() diff --git a/rpkid/tests/old_irdbd.sql b/rpkid/tests/old_irdbd.sql deleted file mode 100644 index e773bb2e..00000000 --- a/rpkid/tests/old_irdbd.sql +++ /dev/null @@ -1,143 +0,0 @@ --- $Id$ - --- Copyright (C) 2009--2011 Internet Systems Consortium ("ISC") --- --- Permission to use, copy, modify, and distribute this software for any --- purpose with or without fee is hereby granted, provided that the above --- copyright notice and this permission notice appear in all copies. --- --- THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH --- REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY --- AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, --- INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM --- LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE --- OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR --- PERFORMANCE OF THIS SOFTWARE. - --- Copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") --- --- Permission to use, copy, modify, and distribute this software for any --- purpose with or without fee is hereby granted, provided that the above --- copyright notice and this permission notice appear in all copies. --- --- THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH --- REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY --- AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, --- INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM --- LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE --- OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR --- PERFORMANCE OF THIS SOFTWARE. - --- SQL objects needed by irdbd.py. You only need this if you're using --- irdbd.py as your IRDB; if you have a "real" backend you can do --- anything you like so long as you implement the relevant portion of --- the left-right protocol. - --- DROP TABLE commands must be in correct (reverse dependency) order --- to satisfy FOREIGN KEY constraints. - -DROP TABLE IF EXISTS roa_request_prefix; -DROP TABLE IF EXISTS roa_request; -DROP TABLE IF EXISTS registrant_net; -DROP TABLE IF EXISTS registrant_asn; -DROP TABLE IF EXISTS registrant; -DROP TABLE IF EXISTS ghostbuster_request; -DROP TABLE IF EXISTS ee_certificate_asn; -DROP TABLE IF EXISTS ee_certificate_net; -DROP TABLE IF EXISTS ee_certificate; - -CREATE TABLE registrant ( - registrant_id SERIAL NOT NULL, - registrant_handle VARCHAR(255) NOT NULL, - registrant_name TEXT, - registry_handle VARCHAR(255), - valid_until DATETIME NOT NULL, - PRIMARY KEY (registrant_id), - UNIQUE (registry_handle, registrant_handle) -) ENGINE=InnoDB; - -CREATE TABLE registrant_asn ( - start_as BIGINT UNSIGNED NOT NULL, - end_as BIGINT UNSIGNED NOT NULL, - registrant_id BIGINT UNSIGNED NOT NULL, - PRIMARY KEY (registrant_id, start_as, end_as), - CONSTRAINT registrant_asn_registrant_id - FOREIGN KEY (registrant_id) REFERENCES registrant (registrant_id) - ON DELETE CASCADE ON UPDATE CASCADE -) ENGINE=InnoDB; - -CREATE TABLE registrant_net ( - start_ip VARCHAR(40) NOT NULL, - end_ip VARCHAR(40) NOT NULL, - version TINYINT UNSIGNED NOT NULL, - registrant_id BIGINT UNSIGNED NOT NULL, - PRIMARY KEY (registrant_id, version, start_ip, end_ip), - CONSTRAINT registrant_net_registrant_id - FOREIGN KEY (registrant_id) REFERENCES registrant (registrant_id) - ON DELETE CASCADE ON UPDATE CASCADE -) ENGINE=InnoDB; - -CREATE TABLE roa_request ( - roa_request_id SERIAL NOT NULL, - self_handle VARCHAR(255) NOT NULL, - asn BIGINT UNSIGNED NOT NULL, - PRIMARY KEY (roa_request_id) -) ENGINE=InnoDB; - -CREATE TABLE roa_request_prefix ( - prefix VARCHAR(40) NOT NULL, - prefixlen TINYINT UNSIGNED NOT NULL, - max_prefixlen TINYINT UNSIGNED NOT NULL, - version TINYINT UNSIGNED NOT NULL, - roa_request_id BIGINT UNSIGNED NOT NULL, - PRIMARY KEY (roa_request_id, prefix, prefixlen, max_prefixlen), - CONSTRAINT roa_request_prefix_roa_request_id - FOREIGN KEY (roa_request_id) REFERENCES roa_request (roa_request_id) - ON DELETE CASCADE ON UPDATE CASCADE -) ENGINE=InnoDB; - -CREATE TABLE ghostbuster_request ( - ghostbuster_request_id SERIAL NOT NULL, - self_handle VARCHAR(255) NOT NULL, - parent_handle VARCHAR(255), - vcard LONGBLOB NOT NULL, - PRIMARY KEY (ghostbuster_request_id) -) ENGINE=InnoDB; - -CREATE TABLE ee_certificate ( - ee_certificate_id SERIAL NOT NULL, - self_handle VARCHAR(255) NOT NULL, - pkcs10 LONGBLOB NOT NULL, - gski VARCHAR(27) NOT NULL, - cn VARCHAR(64) NOT NULL, - sn VARCHAR(64), - eku TEXT NOT NULL, - valid_until DATETIME NOT NULL, - PRIMARY KEY (ee_certificate_id), - UNIQUE (self_handle, gski) -) ENGINE=InnoDB; - -CREATE TABLE ee_certificate_asn ( - start_as BIGINT UNSIGNED NOT NULL, - end_as BIGINT UNSIGNED NOT NULL, - ee_certificate_id BIGINT UNSIGNED NOT NULL, - PRIMARY KEY (ee_certificate_id, start_as, end_as), - CONSTRAINT ee_certificate_asn_ee_certificate_id - FOREIGN KEY (ee_certificate_id) REFERENCES ee_certificate (ee_certificate_id) - ON DELETE CASCADE ON UPDATE CASCADE -) ENGINE=InnoDB; - -CREATE TABLE ee_certificate_net ( - version TINYINT UNSIGNED NOT NULL, - start_ip VARCHAR(40) NOT NULL, - end_ip VARCHAR(40) NOT NULL, - ee_certificate_id BIGINT UNSIGNED NOT NULL, - PRIMARY KEY (ee_certificate_id, version, start_ip, end_ip), - CONSTRAINT ee_certificate_net_ee_certificate_id - FOREIGN KEY (ee_certificate_id) REFERENCES ee_certificate (ee_certificate_id) - ON DELETE CASCADE ON UPDATE CASCADE -) ENGINE=InnoDB; - --- Local Variables: --- indent-tabs-mode: nil --- End: diff --git a/rpkid/tests/publication-protocol-samples.xml b/rpkid/tests/publication-protocol-samples.xml deleted file mode 100644 index 96b095a7..00000000 --- a/rpkid/tests/publication-protocol-samples.xml +++ /dev/null @@ -1,370 +0,0 @@ - - - - - - - - MIIBezBlAgEBMA0GCSqGSIb3DQEBCwUAMCMxITAfBgNVBAMTGFRlc3QgQ2VydGlm - aWNhdGUgcHViZCBUQRcNMDgwNjAyMjE0OTQ1WhcNMDgwNzAyMjE0OTQ1WqAOMAww - CgYDVR0UBAMCAQEwDQYJKoZIhvcNAQELBQADggEBAFWCWgBl4ljVqX/CHo+RpqYt - vmKMnjPVflMXUB7i28RGP4DAq4l7deDU7Q82xEJyE4TXMWDWAV6UG6uUGum0VHWO - cj9ohqyiZUGfOsKg2hbwkETm8sAENOsi1yNdyKGk6jZ16aF5fubxQqZa1pdGCSac - 1/ZYC5sLLhEz3kmz+B9z9mXFVc5TgAh4dN3Gy5ftF8zZAFpDGnS4biCnRVqhGv6R - 0Lh/5xmii+ZU6kNDhbeMsjJg+ZOmtN+wMeHSIbjiy0WuuaZ3k2xSh0C94anrHBZA - vvCRhbazjR0Ef5OMZ5lcllw3uO8IHuoisHKkehy4Y0GySdj98fV+OuiRTH9vt/M= - - - - - - - - - - - - - - - - MIIBezBlAgEBMA0GCSqGSIb3DQEBCwUAMCMxITAfBgNVBAMTGFRlc3QgQ2VydGlm - aWNhdGUgcHViZCBUQRcNMDgwNjAyMjE0OTQ1WhcNMDgwNzAyMjE0OTQ1WqAOMAww - CgYDVR0UBAMCAQEwDQYJKoZIhvcNAQELBQADggEBAFWCWgBl4ljVqX/CHo+RpqYt - vmKMnjPVflMXUB7i28RGP4DAq4l7deDU7Q82xEJyE4TXMWDWAV6UG6uUGum0VHWO - cj9ohqyiZUGfOsKg2hbwkETm8sAENOsi1yNdyKGk6jZ16aF5fubxQqZa1pdGCSac - 1/ZYC5sLLhEz3kmz+B9z9mXFVc5TgAh4dN3Gy5ftF8zZAFpDGnS4biCnRVqhGv6R - 0Lh/5xmii+ZU6kNDhbeMsjJg+ZOmtN+wMeHSIbjiy0WuuaZ3k2xSh0C94anrHBZA - vvCRhbazjR0Ef5OMZ5lcllw3uO8IHuoisHKkehy4Y0GySdj98fV+OuiRTH9vt/M= - - - - - - - - - - MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV - BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN - MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS - b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArKYUtJaM5PH5917S - G2ACc7iBYdQO2HYyu8Gb6i9Q2Gxc3cWEX7RTBvgOL79pWf3GIdnoupzMnoZVtY3G - Ux2G/0WkmLui2TCeDhcfXdQ4rcp8J3V/6ESj+yuEPPOG8UN17mUKKgujrch6ZvgC - DO9AyOK/uXu+ABQXTPsn2pVe2EVh3V004ShLi8GKgVdqb/rW/6GTg0Xb/zLT6WWM - uT++6sXTlztJdQYkRamJvKfQDU1naC8mAkGf79Tba0xyBGAUII0GfREY6t4/+NAP - 2Yyb3xNlBqcJoTov0JfNKHZcCZePr79j7LK/hkZxxip+Na9xDpE+oQRV+DRukCRJ - diqg+wIDAQABo1AwTjAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBTDEsXJe6pjAQD4 - ULlB7+GMDBlimTAfBgNVHSMEGDAWgBTDEsXJe6pjAQD4ULlB7+GMDBlimTANBgkq - hkiG9w0BAQUFAAOCAQEAWWkNcW6S1tKKqtzJsdfhjJiAAPQmOXJskv0ta/8f6Acg - cum1YieNdtT0n96P7CUHOWP8QBb91JzeewR7b6WJLwb1Offs3wNq3kk75pJe89r4 - XY39EZHhMW+Dv0PhIKu2CgD4LeyH1FVTQkF/QObGEmkn+s+HTsuzd1l2VLwcP1Sm - sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH - YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq - 3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ== - - - - - - - - - - - - MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV - BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN - MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS - b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArKYUtJaM5PH5917S - G2ACc7iBYdQO2HYyu8Gb6i9Q2Gxc3cWEX7RTBvgOL79pWf3GIdnoupzMnoZVtY3G - Ux2G/0WkmLui2TCeDhcfXdQ4rcp8J3V/6ESj+yuEPPOG8UN17mUKKgujrch6ZvgC - DO9AyOK/uXu+ABQXTPsn2pVe2EVh3V004ShLi8GKgVdqb/rW/6GTg0Xb/zLT6WWM - uT++6sXTlztJdQYkRamJvKfQDU1naC8mAkGf79Tba0xyBGAUII0GfREY6t4/+NAP - 2Yyb3xNlBqcJoTov0JfNKHZcCZePr79j7LK/hkZxxip+Na9xDpE+oQRV+DRukCRJ - diqg+wIDAQABo1AwTjAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBTDEsXJe6pjAQD4 - ULlB7+GMDBlimTAfBgNVHSMEGDAWgBTDEsXJe6pjAQD4ULlB7+GMDBlimTANBgkq - hkiG9w0BAQUFAAOCAQEAWWkNcW6S1tKKqtzJsdfhjJiAAPQmOXJskv0ta/8f6Acg - cum1YieNdtT0n96P7CUHOWP8QBb91JzeewR7b6WJLwb1Offs3wNq3kk75pJe89r4 - XY39EZHhMW+Dv0PhIKu2CgD4LeyH1FVTQkF/QObGEmkn+s+HTsuzd1l2VLwcP1Sm - sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH - YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq - 3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ== - - - - - - - - - - - - - - - - MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV - BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN - MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS - b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArKYUtJaM5PH5917S - G2ACc7iBYdQO2HYyu8Gb6i9Q2Gxc3cWEX7RTBvgOL79pWf3GIdnoupzMnoZVtY3G - Ux2G/0WkmLui2TCeDhcfXdQ4rcp8J3V/6ESj+yuEPPOG8UN17mUKKgujrch6ZvgC - DO9AyOK/uXu+ABQXTPsn2pVe2EVh3V004ShLi8GKgVdqb/rW/6GTg0Xb/zLT6WWM - uT++6sXTlztJdQYkRamJvKfQDU1naC8mAkGf79Tba0xyBGAUII0GfREY6t4/+NAP - 2Yyb3xNlBqcJoTov0JfNKHZcCZePr79j7LK/hkZxxip+Na9xDpE+oQRV+DRukCRJ - diqg+wIDAQABo1AwTjAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBTDEsXJe6pjAQD4 - ULlB7+GMDBlimTAfBgNVHSMEGDAWgBTDEsXJe6pjAQD4ULlB7+GMDBlimTANBgkq - hkiG9w0BAQUFAAOCAQEAWWkNcW6S1tKKqtzJsdfhjJiAAPQmOXJskv0ta/8f6Acg - cum1YieNdtT0n96P7CUHOWP8QBb91JzeewR7b6WJLwb1Offs3wNq3kk75pJe89r4 - XY39EZHhMW+Dv0PhIKu2CgD4LeyH1FVTQkF/QObGEmkn+s+HTsuzd1l2VLwcP1Sm - sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH - YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq - 3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ== - - - - - - - - - - - - MIIDGzCCAgOgAwIBAgIJAKi+/+wUhQlxMA0GCSqGSIb3DQEBBQUAMCQxIjAgBgNV - BAMTGVRlc3QgQ2VydGlmaWNhdGUgQm9iIFJvb3QwHhcNMDcwODAxMTk1MzEwWhcN - MDcwODMxMTk1MzEwWjAkMSIwIAYDVQQDExlUZXN0IENlcnRpZmljYXRlIEJvYiBS - b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArKYUtJaM5PH5917S - G2ACc7iBYdQO2HYyu8Gb6i9Q2Gxc3cWEX7RTBvgOL79pWf3GIdnoupzMnoZVtY3G - Ux2G/0WkmLui2TCeDhcfXdQ4rcp8J3V/6ESj+yuEPPOG8UN17mUKKgujrch6ZvgC - DO9AyOK/uXu+ABQXTPsn2pVe2EVh3V004ShLi8GKgVdqb/rW/6GTg0Xb/zLT6WWM - uT++6sXTlztJdQYkRamJvKfQDU1naC8mAkGf79Tba0xyBGAUII0GfREY6t4/+NAP - 2Yyb3xNlBqcJoTov0JfNKHZcCZePr79j7LK/hkZxxip+Na9xDpE+oQRV+DRukCRJ - diqg+wIDAQABo1AwTjAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBTDEsXJe6pjAQD4 - ULlB7+GMDBlimTAfBgNVHSMEGDAWgBTDEsXJe6pjAQD4ULlB7+GMDBlimTANBgkq - hkiG9w0BAQUFAAOCAQEAWWkNcW6S1tKKqtzJsdfhjJiAAPQmOXJskv0ta/8f6Acg - cum1YieNdtT0n96P7CUHOWP8QBb91JzeewR7b6WJLwb1Offs3wNq3kk75pJe89r4 - XY39EZHhMW+Dv0PhIKu2CgD4LeyH1FVTQkF/QObGEmkn+s+HTsuzd1l2VLwcP1Sm - sqep6LAlFj62qqaIJzNeQ9NVkBqtkygnYlBOkaBTHfQTux3jYNpEo8JJB5e/WFdH - YyMNrG2xMOtIC7T4+IOHgT8PgrNhaeDg9ctewj0X8Qi9nI9nXeinicLX8vj6hdEq - 3ORv7RZMJNYqv1HQ3wUE2B7fCPFv7EUwzaCds1kgRQ== - - - - - - - - - - - - - - - - - MIIE+jCCA+KgAwIBAgIBDTANBgkqhkiG9w0BAQsFADAzMTEwLwYDVQQDEyhERjRBODAxN0U2 - NkE5RTkxNzJFNDYxMkQ4Q0Y0QzgzRjIzOERFMkEzMB4XDTA4MDUyMjE4MDUxMloXDTA4MDUy - NDE3NTQ1M1owMzExMC8GA1UEAxMoOEZCODIxOEYwNkU1MEFCNzAyQTdEOTZEQzhGMENEQ0Q4 - MjhGN0YxNzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMeziKp0k5nP7v6SZoNs - XIMQYRgNtC6Fr/9Xm/1yQHomiPqHUk47rHhGojYiK5AhkrwoYhkH4UjJl2iwklDYczXuaBU3 - F5qrKlZ4aZnjIxdlP7+hktVpeApL6yuJTUAYeC3UIxnLDVdD6phydZ/FOQluffiNDjzteCCv - oyOUatqt8WB+oND6LToHp028g1YUYLHG6mur0dPdcHOVXLSmUDuZ1HDz1nDuYvIVKjB/MpH9 - aW9XeaQ6ZFIlZVPwuuvI2brR+ThH7Gv27GL/o8qFdC300VQfoTZ+rKPGDE8K1cI906BL4kiw - x9z0oiDcE96QCz+B0vsjc9mGaA1jgAxlXWsCAwEAAaOCAhcwggITMB0GA1UdDgQWBBSPuCGP - BuUKtwKn2W3I8M3Ngo9/FzAfBgNVHSMEGDAWgBTfSoAX5mqekXLkYS2M9Mg/I43iozBVBgNV - HR8ETjBMMEqgSKBGhkRyc3luYzovL2xvY2FsaG9zdDo0NDAwL3Rlc3RiZWQvUklSLzEvMzBx - QUYtWnFucEZ5NUdFdGpQVElQeU9ONHFNLmNybDBFBggrBgEFBQcBAQQ5MDcwNQYIKwYBBQUH - MAKGKXJzeW5jOi8vbG9jYWxob3N0OjQ0MDAvdGVzdGJlZC9XT01CQVQuY2VyMBgGA1UdIAEB - /wQOMAwwCgYIKwYBBQUHDgIwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwgZsG - CCsGAQUFBwELBIGOMIGLMDQGCCsGAQUFBzAFhihyc3luYzovL2xvY2FsaG9zdDo0NDAwL3Rl - c3RiZWQvUklSL1IwLzEvMFMGCCsGAQUFBzAKhkdyc3luYzovL2xvY2FsaG9zdDo0NDAwL3Rl - c3RiZWQvUklSL1IwLzEvajdnaGp3YmxDcmNDcDlsdHlQRE56WUtQZnhjLm1uZjAaBggrBgEF - BQcBCAEB/wQLMAmgBzAFAgMA/BUwPgYIKwYBBQUHAQcBAf8ELzAtMCsEAgABMCUDAwAKAzAO - AwUAwAACAQMFAcAAAiAwDgMFAsAAAiwDBQDAAAJkMA0GCSqGSIb3DQEBCwUAA4IBAQCEhuH7 - jtI2PJY6+zwv306vmCuXhtu9Lr2mmRw2ZErB8EMcb5xypMrNqMoKeu14K2x4a4RPJkK4yATh - M81FPNRsU5mM0acIRnAPtxjHvPME7PHN2w2nGLASRsZmaa+b8A7SSOxVcFURazENztppsolH - eTpm0cpLItK7mNpudUg1JGuFo94VLf1MnE2EqARG1vTsNhel/SM/UvOArCCOBvf0Gz7kSuup - DSZ7qx+LiDmtEsLdbGNQBiYPbLrDk41PHrxdx28qIj7ejZkRzNFw/3pi8/XK281h8zeHoFVu - 6ghRPy5dbOA4akX/KG6b8XIx0iwPYdLiDbdWFbtTdPcXBauY - - - - - - - - - - - - - - - - - - - - MIIBwzCBrAIBATANBgkqhkiG9w0BAQsFADAzMTEwLwYDVQQDEyhERjRBODAxN0U2NkE5RTkx - NzJFNDYxMkQ4Q0Y0QzgzRjIzOERFMkEzFw0wODA1MjIxODA0MTZaFw0wODA1MjIxODA1MTZa - MBQwEgIBAhcNMDgwNTIyMTc1ODQwWqAvMC0wHwYDVR0jBBgwFoAU30qAF+ZqnpFy5GEtjPTI - PyON4qMwCgYDVR0UBAMCAQYwDQYJKoZIhvcNAQELBQADggEBAKkM0Fb/pJpHVHWZyjp4wojH - W2KkvA/DFtBiz3moxocSnkDVP3QI19uVvqdC6nH3hJyFmsAMwULR0f1XU/V4j+X+FqYEl6Nv - p8zAEPIB4r8xbEFs7udRwXRAjkJmOQbv9aomF2i+d7jpTFVJxShZWOgsoGEhIy/aktKQrOIR - c4ZDrXpQwXVj2Y7+cGVfQ4gvnPOdlyLcnNovoegazATvA3EcidBNPWRg7XTCz0LVBEB7JgPd - nNyXRg35HdMEHBl7U9uUQJXP7S02oaQ1ehNDMfaJPgBBpQtAnM1lIzJfevd9+e4ywGsRpxAV - 8wxTXSPd1jwuKtS0kwrgsrQ8Ya85xUE= - - - - - - - - - - - - - - - - - - - - MIIHCgYJKoZIhvcNAQcCoIIG+zCCBvcCAQMxDTALBglghkgBZQMEAgEwggEeBgsqhkiG9w0B - CRABGqCCAQ0EggEJMIIBBQIBEhgPMjAwODA1MjIxODA1MTVaGA8yMDA4MDUyMjE4MDYxNVoG - CWCGSAFlAwQCATCB0jBEFh9ZbTVUTzRJYnlDb0pNZ3E2R2o4dG41Mng5U0UuY2VyAyEA4L8Z - WMyuhOx+o6kUfsRR++QjSaRaATy4UOeVtjvZVqYwRBYfWnRxbjB3NEVFbU9hclAzQmd1SUY3 - MDhhNTM4LmNlcgMhAGQI1gYJotxWmwzcmpLNFZJ656uWOjcPYANlbNz80xm8MEQWH2xxa1Vx - RHEwMDBESW9ZVjlybXdLTGdrN2F6by5jZXIDIQB7jRAEpkPvc4s4PX9vDvnTifj3BIE145FO - 1ne2kEejVqCCBBEwggQNMIIC9aADAgECAgEFMA0GCSqGSIb3DQEBCwUAMDMxMTAvBgNVBAMT - KDhGQjgyMThGMDZFNTBBQjcwMkE3RDk2REM4RjBDRENEODI4RjdGMTcwHhcNMDgwNTIyMTc1 - NzQ5WhcNMDgwNTI0MTc1NDUzWjAzMTEwLwYDVQQDEyhERkRBMjMyMUJENEVCMDNFQTE1RkUy - N0NGRkRGMEFGRkU1QjBFNjY4MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2/Gk - AHW5pDqye0+TvUp7sl0rVgmTfeHpVp18ypxvuovogVJgkjEtBEikfaFU0646wYD6JM6IJFJX - lWLWd7bVmhkWViKuZL0VmT2wpUToNHCLUGUQUVVX8R7oSHFdTArv2AqH+6yt0LmczDH1y2M6 - 2Tgkz9wZ9ryyuPx3VX4PkHzUMlkGFICj1fvyXkcAu8jBaxR9UME1c413TPaMi6lMh1HUmtVN - LJMP5+/SnwEAW/Z3dPClCFIgQXK3nAKPVzAIwADEiqhK7cSchhO7ikI1CVt0XzG4n7oaILc3 - Hq/DAxyiutw5GlkUlKPri2YJzJ3+H4P+TveSa/b02fVA5csm/QIDAQABo4IBKjCCASYwHQYD - VR0OBBYEFN/aIyG9TrA+oV/ifP/fCv/lsOZoMB8GA1UdIwQYMBaAFI+4IY8G5Qq3AqfZbcjw - zc2Cj38XMFgGA1UdHwRRME8wTaBLoEmGR3JzeW5jOi8vbG9jYWxob3N0OjQ0MDAvdGVzdGJl - ZC9SSVIvUjAvMS9qN2doandibENyY0NwOWx0eVBETnpZS1BmeGMuY3JsMGAGCCsGAQUFBwEB - BFQwUjBQBggrBgEFBQcwAoZEcnN5bmM6Ly9sb2NhbGhvc3Q6NDQwMC90ZXN0YmVkL1JJUi8x - L2o3Z2hqd2JsQ3JjQ3A5bHR5UEROellLUGZ4Yy5jZXIwGAYDVR0gAQH/BA4wDDAKBggrBgEF - BQcOAjAOBgNVHQ8BAf8EBAMCB4AwDQYJKoZIhvcNAQELBQADggEBADpsE9HfgVTgmX1WeJTE - fm87CXuOoGH85RFiAngSt5kR4gYCyadklOZ7Eta+ERUZVu4tcKO6sJOTuHPfVrAvR0VpgH+j - PvXboYWSfwJdi00BC28ScrVM2zarA7B10+J6Oq8tbFlAyVBkrbuPet/axmndBtGWhrBTynGl - nc/5L371Lxy6CrOYqXO0Qx3SrOKaailAe3zTIpHQeACqnPdL00zIBw/hVy/VNaH1wy+FmhAz - TsmsQUrMyovJcu/ry5w0KHlP8BTnqfykikCWR+Lw0VQHmpJGAbtrmsOeIbfLY1zl7A81lDAl - AG/ZH1DUdDOUIXMLHWur+D2rwjp7RL16LHYxggGqMIIBpgIBA4AU39ojIb1OsD6hX+J8/98K - /+Ww5mgwCwYJYIZIAWUDBAIBoGswGgYJKoZIhvcNAQkDMQ0GCyqGSIb3DQEJEAEaMBwGCSqG - SIb3DQEJBTEPFw0wODA1MjIxODA1MTVaMC8GCSqGSIb3DQEJBDEiBCBj/GjEQw3LgKPf5DTz - 8eu1fcp6/cQjqqne6ZqFkF42azANBgkqhkiG9w0BAQEFAASCAQBOY0uHNMwy/o1nFANSgha5 - PZxt8fz+wTrbeomCb+lxqQKq1clcSiQORVGc8NmqC8sS5OR3eTw/3qnK9yPHxz2UQ4hn1pBa - +Zy5veM61qMaXCw6w98EyNcvUfA1AkezAjkabfHQDs3o4Ezh49thXXyRcBoF+O6Lmi+LZbT2 - 4jvfFbaXW9zsb6/DaoDkeHnlk+YYgfSP4wOnkK5uqxtDW8QpMPq3GGdIp0oJDkzEdj7VsWIL - 9JP2mxxL8fTPVUyAPOmURYwYDXqhke2O9eVDiCYhrEfB8/84Rint4Cj8n5aCujnAtqtwxHpD - 0NRYO/V1MjhG+ARy1vRH1Dm0r92RBam3 - - - - - - - - - - - - - - - - - - - - MIIGmwYJKoZIhvcNAQcCoIIGjDCCBogCAQMxDTALBglghkgBZQMEAgEwKgYLKoZIhvcNAQkQ - ARigGwQZMBcCAgKaMBEwDwQCAAEwCTAHAwUACgMALKCCBJgwggSUMIIDfKADAgECAgEJMA0G - CSqGSIb3DQEBCwUAMDMxMTAvBgNVBAMTKDhGQjgyMThGMDZFNTBBQjcwMkE3RDk2REM4RjBD - RENEODI4RjdGMTcwHhcNMDgwNTIyMTc1ODI0WhcNMDgwNTI0MTc1NDUzWjAzMTEwLwYDVQQD - Eyg5NkE5MTRBODNBQjREMzQwQzhBMTg1N0RBRTZDMEEyRTA5M0I2QjNBMIIBIjANBgkqhkiG - 9w0BAQEFAAOCAQ8AMIIBCgKCAQEApoK50BjW5bcF4gsdaYhndtVADZvQk3RCsvuqDElF6uLi - 9BYQq/NHyDOIMyJtvCmzjdv3Y135n1sNO7YvssqHlt7dMfCQTD5ND1GpFnQLdWP7stWM5AbO - nJV6+PtDITUA/QHOli7Do0YCUgR6G+1QJsMu0DK+TRSzBJ6WP7WIYOBOOg3y/NKc1rkWhS1Q - dcQepbHgQYZHzzpjNDR6+oYVuhuUEWx1P6O4pv/p+tpE0SDua7jBjMywIYHkPQBecf2IX1RU - WNojB9dJlnRx5YUUneP2SvF2MrmdDbclgzwhf6alqD2OjiMuoBOG8yeTKcuhzCMnrFAklbst - 6x3Rnq9BswIDAQABo4IBsTCCAa0wHQYDVR0OBBYEFJapFKg6tNNAyKGFfa5sCi4JO2s6MB8G - A1UdIwQYMBaAFI+4IY8G5Qq3AqfZbcjwzc2Cj38XMFgGA1UdHwRRME8wTaBLoEmGR3JzeW5j - Oi8vbG9jYWxob3N0OjQ0MDAvdGVzdGJlZC9SSVIvUjAvMS9qN2doandibENyY0NwOWx0eVBE - TnpZS1BmeGMuY3JsMGAGCCsGAQUFBwEBBFQwUjBQBggrBgEFBQcwAoZEcnN5bmM6Ly9sb2Nh - bGhvc3Q6NDQwMC90ZXN0YmVkL1JJUi8xL2o3Z2hqd2JsQ3JjQ3A5bHR5UEROellLUGZ4Yy5j - ZXIwGAYDVR0gAQH/BA4wDDAKBggrBgEFBQcOAjAOBgNVHQ8BAf8EBAMCB4AwYwYIKwYBBQUH - AQsEVzBVMFMGCCsGAQUFBzALhkdyc3luYzovL2xvY2FsaG9zdDo0NDAwL3Rlc3RiZWQvUklS - L1IwLzEvbHFrVXFEcTAwMERJb1lWOXJtd0tMZ2s3YXpvLnJvYTAgBggrBgEFBQcBBwEB/wQR - MA8wDQQCAAEwBwMFAAoDACwwDQYJKoZIhvcNAQELBQADggEBAL8iHwsyGOYhhIf3nVuL361y - TOJSP8SR0mtQLHULPl+GkYk+5MRNWtL8ucTXFvniYJtOCXEGGEIO9eDXvkQIXQSz/qbF9URQ - fuf38ghRza257syVhal6UHTgCFYuRIO9CUjcU1vkWUxH05BBIHlYdtlIQbAG/mRsCPCEgSmG - bbQaomGlUOqmJMlKxLLcoAtz2vDrwVotgHyfS5h2mgINFjnlLcNLTci+sfs7/aQAkDYx7K98 - se/ZlMorvGkFNhHoOTcGIrWkYsfkbTygVwWRm278PaB3o4449Kvsg/gb8BZeHXRs68cr5Mcf - jP7Q6jeypjTgDBnwb1yzoJIKWszFuSgxggGqMIIBpgIBA4AUlqkUqDq000DIoYV9rmwKLgk7 - azowCwYJYIZIAWUDBAIBoGswGgYJKoZIhvcNAQkDMQ0GCyqGSIb3DQEJEAEYMBwGCSqGSIb3 - DQEJBTEPFw0wODA1MjIxNzU4MjRaMC8GCSqGSIb3DQEJBDEiBCDCyf9v9Wed515TRp2WwnyM - 1rk6dB///X+aqIym2e9jdTANBgkqhkiG9w0BAQEFAASCAQAFvzrHeRPW+wn4WSyoyBEq0zKS - Cyh5tu1qTR0NHs6Rr/p8Pk81P1HQLND/U+znJZKLWlO2niEHUXPIicPDYchbj8ApH9VxKA+1 - lCWllOzFAsYyZFr3/VNs9pVp2eT4F9eEYBrBVDSNrD72MMTlWm1T5MEXqltTJJOCKzUEX96x - 91iW6A+4erop7S8hpCnxqkTin4bFVreqYcGc4CC4bh+L9pPqJnURcEk7Qeu/WEHQBm38voB4 - S11qRZNrJMQ99oiJR7hXDIBm66HjGqoUL2gPCfpgJEVVnM9pVv2k889z4eTTck2Qj54gga2W - Xkvw4Je420aDx88s9T2+PqXcbZ4g - - - - - - - - - - - - - - - - - - - text string - - - - - - - diff --git a/rpkid/tests/rcynic.conf b/rpkid/tests/rcynic.conf deleted file mode 100644 index ea31fe58..00000000 --- a/rpkid/tests/rcynic.conf +++ /dev/null @@ -1,14 +0,0 @@ -# $Id$ -# -# rcynic configuration for looking at yamltest results. - -[rcynic] -xml-summary = rcynic.xml -jitter = 0 -use-links = yes -use-syslog = no -use-stderr = yes -log-level = log_debug -max-parallel-fetches = 32 - -trust-anchor-locator = yamltest.dir/root.tal diff --git a/rpkid/tests/revoke.yaml b/rpkid/tests/revoke.yaml deleted file mode 100644 index 2edb8335..00000000 --- a/rpkid/tests/revoke.yaml +++ /dev/null @@ -1,420 +0,0 @@ -# $Id: smoketest.1.yaml 3881 2011-06-17 18:32:54Z sra $ - -# Copyright (C) 2009-2012 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. -# -# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -name: RIR -crl_interval: 5m -regen_margin: 2m -valid_for: 2d -kids: - - name: R0 - kids: - - name: Alice - ipv4: 192.0.2.1-192.0.2.33 - asn: 64533 - roa_request: - - asn: 42 - ipv4: 192.0.2.32/32 - - name: Bob - ipv4: 192.0.2.44-192.0.2.100 - ipv4: 10.3.0.0/16 - roa_request: - - asn: 666 - ipv4: 10.3.0.44/32 - ---- -- shell sleep 1; - dir=rcynic.`date +%s`.data; mkdir $dir; - cd rcynic-data; - pax -rwl . ../$dir; find . -type f -name '*.cer' | - sort | - xargs ../../../../utils/uri/uri -s - >../${dir%.data}.uris; - sleep 1 -- name: R0 - rekey: -- sleep 10 - ---- -- shell sleep 1; - dir=rcynic.`date +%s`.data; mkdir $dir; - cd rcynic-data; - pax -rwl . ../$dir; find . -type f -name '*.cer' | - sort | - xargs ../../../../utils/uri/uri -s - >../${dir%.data}.uris; - sleep 1 - -- name: R0 - revoke: -- sleep 10 - ---- -- shell sleep 1; - dir=rcynic.`date +%s`.data; mkdir $dir; - cd rcynic-data; - pax -rwl . ../$dir; find . -type f -name '*.cer' | - sort | - xargs ../../../../utils/uri/uri -s - >../${dir%.data}.uris; - sleep 1 -- sleep 30 - ---- -- shell sleep 1; - dir=rcynic.`date +%s`.data; mkdir $dir; - cd rcynic-data; - pax -rwl . ../$dir; find . -type f -name '*.cer' | - sort | - xargs ../../../../utils/uri/uri -s - >../${dir%.data}.uris; - sleep 1 -- name: R0 - rekey: -- sleep 10 - ---- -- shell sleep 1; - dir=rcynic.`date +%s`.data; mkdir $dir; - cd rcynic-data; - pax -rwl . ../$dir; find . -type f -name '*.cer' | - sort | - xargs ../../../../utils/uri/uri -s - >../${dir%.data}.uris; - sleep 1 -- name: R0 - revoke: -- sleep 10 - ---- -- shell sleep 1; - dir=rcynic.`date +%s`.data; mkdir $dir; - cd rcynic-data; - pax -rwl . ../$dir; find . -type f -name '*.cer' | - sort | - xargs ../../../../utils/uri/uri -s - >../${dir%.data}.uris; - sleep 1 -- sleep 30 - ---- -- shell sleep 1; - dir=rcynic.`date +%s`.data; mkdir $dir; - cd rcynic-data; - pax -rwl . ../$dir; find . -type f -name '*.cer' | - sort | - xargs ../../../../utils/uri/uri -s - >../${dir%.data}.uris; - sleep 1 -- name: R0 - rekey: -- sleep 10 - ---- -- shell sleep 1; - dir=rcynic.`date +%s`.data; mkdir $dir; - cd rcynic-data; - pax -rwl . ../$dir; find . -type f -name '*.cer' | - sort | - xargs ../../../../utils/uri/uri -s - >../${dir%.data}.uris; - sleep 1 -- name: R0 - revoke: -- sleep 10 - ---- -- shell sleep 1; - dir=rcynic.`date +%s`.data; mkdir $dir; - cd rcynic-data; - pax -rwl . ../$dir; find . -type f -name '*.cer' | - sort | - xargs ../../../../utils/uri/uri -s - >../${dir%.data}.uris; - sleep 1 -- sleep 30 - ---- -- shell sleep 1; - dir=rcynic.`date +%s`.data; mkdir $dir; - cd rcynic-data; - pax -rwl . ../$dir; find . -type f -name '*.cer' | - sort | - xargs ../../../../utils/uri/uri -s - >../${dir%.data}.uris; - sleep 1 -- name: R0 - rekey: -- sleep 10 - ---- -- shell sleep 1; - dir=rcynic.`date +%s`.data; mkdir $dir; - cd rcynic-data; - pax -rwl . ../$dir; find . -type f -name '*.cer' | - sort | - xargs ../../../../utils/uri/uri -s - >../${dir%.data}.uris; - sleep 1 -- name: R0 - revoke: -- sleep 10 - ---- -- shell sleep 1; - dir=rcynic.`date +%s`.data; mkdir $dir; - cd rcynic-data; - pax -rwl . ../$dir; find . -type f -name '*.cer' | - sort | - xargs ../../../../utils/uri/uri -s - >../${dir%.data}.uris; - sleep 1 -- sleep 30 - ---- -- shell sleep 1; - dir=rcynic.`date +%s`.data; mkdir $dir; - cd rcynic-data; - pax -rwl . ../$dir; find . -type f -name '*.cer' | - sort | - xargs ../../../../utils/uri/uri -s - >../${dir%.data}.uris; - sleep 1 -- name: R0 - rekey: -- sleep 10 - ---- -- shell sleep 1; - dir=rcynic.`date +%s`.data; mkdir $dir; - cd rcynic-data; - pax -rwl . ../$dir; find . -type f -name '*.cer' | - sort | - xargs ../../../../utils/uri/uri -s - >../${dir%.data}.uris; - sleep 1 -- name: R0 - revoke: -- sleep 10 - ---- -- shell sleep 1; - dir=rcynic.`date +%s`.data; mkdir $dir; - cd rcynic-data; - pax -rwl . ../$dir; find . -type f -name '*.cer' | - sort | - xargs ../../../../utils/uri/uri -s - >../${dir%.data}.uris; - sleep 1 -- sleep 30 - ---- -- shell sleep 1; - dir=rcynic.`date +%s`.data; mkdir $dir; - cd rcynic-data; - pax -rwl . ../$dir; find . -type f -name '*.cer' | - sort | - xargs ../../../../utils/uri/uri -s - >../${dir%.data}.uris; - sleep 1 -- name: R0 - rekey: -- sleep 10 - ---- -- shell sleep 1; - dir=rcynic.`date +%s`.data; mkdir $dir; - cd rcynic-data; - pax -rwl . ../$dir; find . -type f -name '*.cer' | - sort | - xargs ../../../../utils/uri/uri -s - >../${dir%.data}.uris; - sleep 1 -- name: R0 - revoke: -- sleep 10 - ---- -- shell sleep 1; - dir=rcynic.`date +%s`.data; mkdir $dir; - cd rcynic-data; - pax -rwl . ../$dir; find . -type f -name '*.cer' | - sort | - xargs ../../../../utils/uri/uri -s - >../${dir%.data}.uris; - sleep 1 -- sleep 30 - ---- -- shell sleep 1; - dir=rcynic.`date +%s`.data; mkdir $dir; - cd rcynic-data; - pax -rwl . ../$dir; find . -type f -name '*.cer' | - sort | - xargs ../../../../utils/uri/uri -s - >../${dir%.data}.uris; - sleep 1 -- name: R0 - rekey: -- sleep 10 - ---- -- shell sleep 1; - dir=rcynic.`date +%s`.data; mkdir $dir; - cd rcynic-data; - pax -rwl . ../$dir; find . -type f -name '*.cer' | - sort | - xargs ../../../../utils/uri/uri -s - >../${dir%.data}.uris; - sleep 1 -- name: R0 - revoke: -- sleep 10 - ---- -- shell sleep 1; - dir=rcynic.`date +%s`.data; mkdir $dir; - cd rcynic-data; - pax -rwl . ../$dir; find . -type f -name '*.cer' | - sort | - xargs ../../../../utils/uri/uri -s - >../${dir%.data}.uris; - sleep 1 -- sleep 30 - ---- -- shell sleep 1; - dir=rcynic.`date +%s`.data; mkdir $dir; - cd rcynic-data; - pax -rwl . ../$dir; find . -type f -name '*.cer' | - sort | - xargs ../../../../utils/uri/uri -s - >../${dir%.data}.uris; - sleep 1 -- name: R0 - rekey: -- sleep 10 - ---- -- shell sleep 1; - dir=rcynic.`date +%s`.data; mkdir $dir; - cd rcynic-data; - pax -rwl . ../$dir; find . -type f -name '*.cer' | - sort | - xargs ../../../../utils/uri/uri -s - >../${dir%.data}.uris; - sleep 1 -- name: R0 - revoke: -- sleep 10 - ---- -- shell sleep 1; - dir=rcynic.`date +%s`.data; mkdir $dir; - cd rcynic-data; - pax -rwl . ../$dir; find . -type f -name '*.cer' | - sort | - xargs ../../../../utils/uri/uri -s - >../${dir%.data}.uris; - sleep 1 -- sleep 30 - ---- -- shell sleep 1; - dir=rcynic.`date +%s`.data; mkdir $dir; - cd rcynic-data; - pax -rwl . ../$dir; find . -type f -name '*.cer' | - sort | - xargs ../../../../utils/uri/uri -s - >../${dir%.data}.uris; - sleep 1 -- name: R0 - rekey: -- sleep 10 - ---- -- shell sleep 1; - dir=rcynic.`date +%s`.data; mkdir $dir; - cd rcynic-data; - pax -rwl . ../$dir; find . -type f -name '*.cer' | - sort | - xargs ../../../../utils/uri/uri -s - >../${dir%.data}.uris; - sleep 1 -- name: R0 - revoke: -- sleep 10 - ---- -- shell sleep 1; - dir=rcynic.`date +%s`.data; mkdir $dir; - cd rcynic-data; - pax -rwl . ../$dir; find . -type f -name '*.cer' | - sort | - xargs ../../../../utils/uri/uri -s - >../${dir%.data}.uris; - sleep 1 -- sleep 30 - ---- -- shell sleep 1; - dir=rcynic.`date +%s`.data; mkdir $dir; - cd rcynic-data; - pax -rwl . ../$dir; find . -type f -name '*.cer' | - sort | - xargs ../../../../utils/uri/uri -s - >../${dir%.data}.uris; - sleep 1 -- name: R0 - rekey: -- sleep 10 - ---- -- shell sleep 1; - dir=rcynic.`date +%s`.data; mkdir $dir; - cd rcynic-data; - pax -rwl . ../$dir; find . -type f -name '*.cer' | - sort | - xargs ../../../../utils/uri/uri -s - >../${dir%.data}.uris; - sleep 1 -- name: R0 - revoke: -- sleep 10 - ---- -- shell sleep 1; - dir=rcynic.`date +%s`.data; mkdir $dir; - cd rcynic-data; - pax -rwl . ../$dir; find . -type f -name '*.cer' | - sort | - xargs ../../../../utils/uri/uri -s - >../${dir%.data}.uris; - sleep 1 -- sleep 30 diff --git a/rpkid/tests/rootd.yaml b/rpkid/tests/rootd.yaml deleted file mode 100644 index 2ee5dcd4..00000000 --- a/rpkid/tests/rootd.yaml +++ /dev/null @@ -1,24 +0,0 @@ -# $Id$ ---- -version: 1 -posturl: https://localhost:4401/up-down/1 -recipient-id: "rootd" -sender-id: "RIR" - -cms-cert-file: RIR-RPKI-EE.cer -cms-key-file: RIR-RPKI-EE.key -cms-ca-cert-file: rootd-TA.cer -cms-cert-chain-file: [ RIR-RPKI-CA.cer ] - -ssl-cert-file: RIR-RPKI-EE.cer -ssl-key-file: RIR-RPKI-EE.key -ssl-ca-cert-file: rootd-TA.cer - -requests: - list: - type: list - issue: - type: issue - class: 1 - sia: - - rsync://localhost:4400/testbed/RIR/ diff --git a/rpkid/tests/rpki b/rpkid/tests/rpki deleted file mode 120000 index 8d289d0b..00000000 --- a/rpkid/tests/rpki +++ /dev/null @@ -1 +0,0 @@ -../rpki \ No newline at end of file diff --git a/rpkid/tests/smoketest.1.yaml b/rpkid/tests/smoketest.1.yaml deleted file mode 100644 index 914aaae4..00000000 --- a/rpkid/tests/smoketest.1.yaml +++ /dev/null @@ -1,89 +0,0 @@ -# $Id$ -# -# Copyright (C) 2013--2014 Dragon Research Labs ("DRL") -# Portions copyright (C) 2009--2012 Internet Systems Consortium ("ISC") -# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notices and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND ARIN DISCLAIM ALL -# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED -# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL, -# ISC, OR ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR -# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS -# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, -# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION -# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -name: RIR -crl_interval: 5m -regen_margin: 2m -valid_for: 2d -kids: - - name: R0 - ghostbuster: | - BEGIN:VCARD - VERSION:4.0 - FN:R0 - ORG:Organizational Entity - ADR;TYPE=WORK:;;42 Twisty Passage;Deep Cavern;WA;98666;U.S.A. - TEL;TYPE=VOICE,TEXT,WORK;VALUE=uri:tel:+1-666-555-1212 - TEL;TYPE=FAX,WORK;VALUE=uri:tel:+1-666-555-1213 - EMAIL:human@example.com - END:VCARD - kids: - - name: Alice - ipv4: 192.0.2.1-192.0.2.33 - asn: 64533 - roa_request: - - asn: 42 - ipv4: 192.0.2.32/32 - router_cert: - - router_id: 666 - asn: 42 - - name: Bob - ipv4: 192.0.2.44-192.0.2.100 - ipv4: 10.3.0.0/16 - roa_request: - - asn: 666 - ipv4: 10.3.0.44/32 - ---- -- shell set -x; - rtr_origin='python ../../../rtr-origin/rtr-origin.py'; - $rtr_origin --cronjob rcynic-data/authenticated && - $rtr_origin --show ---- -- name: R0 - rekey: ---- -- name: R0 - revoke: ---- -- name: Alice - valid_add: 10 -- name: R0 - roa_request_add: - - asn: 17 - ipv4: 10.3.0.1/32, 10.0.0.44/32 ---- -- shell set -x; - rtr_origin='python ../../../rtr-origin/rtr-origin.py'; - $rtr_origin --cronjob rcynic-data/authenticated && - $rtr_origin --show ---- -- sleep 30 ---- -- sleep 30 ---- -- sleep 30 ---- -- sleep 30 ---- -- sleep 30 ---- -- sleep 30 ---- -- sleep 30 diff --git a/rpkid/tests/smoketest.2.yaml b/rpkid/tests/smoketest.2.yaml deleted file mode 100644 index 0cdec650..00000000 --- a/rpkid/tests/smoketest.2.yaml +++ /dev/null @@ -1,126 +0,0 @@ -# $Id$ - -# Copyright (C) 2009 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. -# -# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -name: RIR -valid_for: 2d -kids: - - name: R0 - kids: - - name: Alice - hosted_by: R0 - ipv4: 192.0.2.1-192.0.2.33 - asn: 64533 - - name: Bob - hosted_by: R0 - ipv4: 192.0.2.44-192.0.2.100 - - name: R1 - kids: - - name: Carol - hosted_by: R1 - ipv6: 2001:db8::44-2001:db8::100 - - name: Dave - hosted_by: R1 - ipv6: 2001:db8::10:0:44/128 - asn: 64544 - - name: R2 - kids: - - name: Elena - hosted_by: R2 - ipv4: 10.0.0.0/24 - ipv6: 2001:db8::a00:0/120 - - name: Frank - hosted_by: R2 - ipv4: 10.3.0.0/24 - ipv6: 2001:db8::a03:0/120 - - name: R3 - kids: - - name: Ginny - hosted_by: R3 - asn: 64534-64540 - - name: Harry - hosted_by: R3 - asn: 666-677 - - name: R4 - kids: - - name: Ilse - hosted_by: R4 - ipv4: 10.3.0.0/16 - - name: Jack - hosted_by: R4 - ipv4: 10.2.0.0/16 - - name: R5 - kids: - - name: Kari - hosted_by: R5 - asn: 222-233 - - name: Leon - hosted_by: R5 - asn: 244-255 - - name: R6 - kids: - - name: Mary - hosted_by: R6 - ipv4: 10.77.0.0/16 - - name: Neal - hosted_by: R6 - ipv4: 10.66.0.0/16 - - name: R7 - kids: - - name: Olga - hosted_by: R7 - ipv4: 10.88.0.0/16 - - name: Piet - hosted_by: R7 - ipv4: 10.99.0.0/16 - - name: R8 - kids: - - name: Qi - hosted_by: R8 - asn: 111-122 - - name: Rex - hosted_by: R8 - asn: 333-344 - - name: R9 - kids: - - name: Sandra - hosted_by: R9 - asn: 555-566 - - name: Thad - hosted_by: R9 - asn: 577-588 ---- -- name: Alice - add_as: 33 ---- -- name: Alice - sub_as: 33 ---- -- name: Alice - valid_for: 365d diff --git a/rpkid/tests/smoketest.3.yaml b/rpkid/tests/smoketest.3.yaml deleted file mode 100644 index e6a10a12..00000000 --- a/rpkid/tests/smoketest.3.yaml +++ /dev/null @@ -1,81 +0,0 @@ -# $Id$ - -# Copyright (C) 2009-2010 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. -# -# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -name: RIR -crl_interval: 2d -regen_margin: 1h -valid_for: 90d -kids: - - name: R0 - kids: - - name: Alice - ipv4: 192.0.2.0-192.0.2.33 - ipv6: 2002:0a00::/32 - asn: 64533 - roa_request: - - asn: 42 - ipv4: 192.0.2.0/30-32,192.0.2.32/32 - ipv6: 2002:0a00::/32-128 - - name: Bob - ipv4: 192.0.2.44-192.0.2.100 - ipv4: 10.3.0.0/16 - roa_request: - - asn: 666 - ipv4: 10.3.0.0/23 ---- -#- shell find publication -type f -name '*.roa' -# -print -exec ../../../utils/print_roa/print_roa {} \; -#- shell find publication -type f -name '*.mft' -# -print -exec ../../../utils/print_manifest/print_manifest {} \; -#--- -#- shell find publication -type f -name '*.roa' -# -print -exec ../../../utils/print_roa/print_roa {} \; -#- shell find publication -type f -name '*.mft' -# -print -exec ../../../utils/print_manifest/print_manifest {} \; -#--- -- shell set -x; - rtr_origin=../../../rtr-origin/rtr-origin; - $rtr_origin --cronjob rcynic-data/authenticated && - $rtr_origin --show ---- -- name: Alice - roa_request_del: - - asn: 42 - ipv4: 192.0.2.0/30-32,192.0.2.32/32 - ipv6: 2002:0a00::/32-128 - roa_request_add: - - asn: 666 - ipv4: 192.0.2.0/30-32,192.0.2.32/32 - ipv6: 2002:0a00::/32-128 ---- -- shell set -x; - rtr_origin=../../../rtr-origin/rtr-origin; - $rtr_origin --cronjob rcynic-data/authenticated && - $rtr_origin --show diff --git a/rpkid/tests/smoketest.4.yaml b/rpkid/tests/smoketest.4.yaml deleted file mode 100644 index c0d446bc..00000000 --- a/rpkid/tests/smoketest.4.yaml +++ /dev/null @@ -1,72 +0,0 @@ -# $Id$ - -# Copyright (C) 2011 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. -# -# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -# This is a test of what happens when certificates go missing in -# action, IRDB data expires, etc. Expected result: -# -# - RIR, R0, and Alice get certs -# - Bob gets no cert at all -# - RO and Alice have short-lived certs, which go away -# - Test ends with only RIR having a cert -# -# If run on a very slow machine, the 60 second expiration may have -# already passed by the time everything is up and running, in which -# case nobody but RIR will ever get any certs. -# -# The extra cycles with no sleep are deliberate, at one point we had a -# cycle where parent would issue a cert that had already expired, -# which led to a tight loop of revocation and reissuance every cycle; -# we're checking to make sure that doesn't happen anymore, although -# things should never get to that point because list_response should -# discourage the child from ever asking for a cert in the first place. - -name: RIR -valid_for: 60 -kids: - - name: R0 - kids: - - name: Alice - ipv4: 192.0.2.1-192.0.2.33 - asn: 64533 - - name: Bob - ipv4: 192.0.2.34-192.0.2.65 - valid_for: -10 ---- ---- ---- ---- ---- ---- -- sleep 30 ---- -- sleep 30 ---- ---- ---- diff --git a/rpkid/tests/smoketest.5.yaml b/rpkid/tests/smoketest.5.yaml deleted file mode 100644 index c6304dfc..00000000 --- a/rpkid/tests/smoketest.5.yaml +++ /dev/null @@ -1,65 +0,0 @@ -# $Id$ - -# Copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -rootd: - lifetime: 2m30s -name: RIR -crl_interval: 1m30s -regen_margin: 2m -valid_for: 1h -kids: - - name: R0 - kids: - - name: Alice - ipv4: 192.0.2.1-192.0.2.33 - asn: 64533 - roa_request: - - asn: 42 - ipv4: 192.0.2.32/32 - - name: Bob - ipv4: 192.0.2.44-192.0.2.100 - ipv4: 10.3.0.0/16 - roa_request: - - asn: 666 - ipv4: 10.3.0.44/32 ---- -- sleep 30 ---- -- sleep 30 ---- -- sleep 30 ---- -- sleep 30 ---- -- sleep 30 ---- -- sleep 30 ---- -- sleep 30 ---- -- sleep 30 ---- -- sleep 30 ---- -- sleep 30 ---- -- sleep 30 ---- -- sleep 30 ---- -- sleep 30 ---- -- sleep 30 diff --git a/rpkid/tests/smoketest.6.yaml b/rpkid/tests/smoketest.6.yaml deleted file mode 100644 index e8d65433..00000000 --- a/rpkid/tests/smoketest.6.yaml +++ /dev/null @@ -1,81 +0,0 @@ -# $Id$ - -# Copyright (C) 2009-2013 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -name: RIR -valid_for: 4w -kids: - - name: Alice - hosted_by: RIR - # - # To test immediate expiration - #valid_for: 5m - # - # To test what happens when we reach rgen_margin - #valid_for: 2w2h5m - #valid_for: 2w5m - # - kids: - - name: Betty - hosted_by: RIR - kids: - - name: Carol - hosted_by: RIR - ghostbuster: | - BEGIN:VCARD - VERSION:4.0 - FN:Carol Clever - EMAIL:carol@example.org - END:VCARD - kids: - - name: Dana - hosted_by: RIR - kids: - - name: Eve - hosted_by: RIR - kids: - - name: Fiona - hosted_by: RIR - ipv4: 192.0.2.1-192.0.2.33 - asn: 64533 - ipv6: 2001:db8::44-2001:db8::100 - roa_request: - - asn: 64533 - ipv6: 2001:db8::80/121 - ghostbusters: - - | - BEGIN:VCARD - VERSION:4.0 - FN:Fiona Fearless - EMAIL:fiona@example.org - END:VCARD - - | - BEGIN:VCARD - VERSION:4.0 - FN:Frank Fearless - EMAIL:frank@example.org - END:VCARD ---- -- name: Fiona - add_as: 33 ---- -- name: Fiona - sub_as: 33 ---- ---- ---- ---- -#- name: Fiona -# valid_for: 365d diff --git a/rpkid/tests/smoketest.7.yaml b/rpkid/tests/smoketest.7.yaml deleted file mode 100644 index fedd2fff..00000000 --- a/rpkid/tests/smoketest.7.yaml +++ /dev/null @@ -1,77 +0,0 @@ -# $Id$ - -# Copyright (C) 2009 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -# Test configuration to generate some initial ROAs for Pradosh to use -# in testing his code, until Randy has the live testbed set up. - -# Python code to remove overlaps in a set of prefixes (needed to take -# something like this peval output and feed it into smoketest.py): -# -# import rpki.resource_set -# -# r = rpki.resource_set.resource_set_ipv4() -# -# for p in prefixes: -# r = r.union(rpki.resource_set.resource_set_ipv4(p)) -# -# print r -# -# -# where "prefixes" is a sequence of prefixes in text form, eg, what -# you'd get if you applied .split(", ") to the peval output below. - - -# ran.psg.com:/usr/home/randy> peval as3130 -# ({198.180.152.0/24, 198.180.153.0/24, 198.180.152.0/25, 198.180.152.128/25, 198.180.150.0/24, 198.180.151.0/24, 198.133.206.0/24, 192.83.230.0/24, 147.28.0.0/16, 147.28.128.0/17, 147.28.128.0/18, 147.28.192.0/18, 147.28.192.0/19, 147.28.192.0/20, 147.28.192.0/21, 147.28.192.0/22, 147.28.192.0/23, 147.28.192.0/24, 147.28.192.0/25, 147.28.192.0/26, 147.28.128.0/19, 147.28.128.0/20, 147.28.128.0/21, 147.28.128.0/22, 147.28.128.0/23, 147.28.128.0/24, 147.28.128.0/25, 147.28.128.0/26}) -# -# ran.psg.com:/usr/home/randy> peval as2914 -# ({216.167.0.0/17, 216.105.240.0/24, 216.44.0.0/16, 216.42.0.0/16, 213.198.0.0/17, 213.130.32.0/19, 212.119.0.0/19, 212.105.160.0/19, 211.130.96.0/19, 211.1.32.0/19, 211.1.60.0/22, 211.1.48.0/23, 211.1.32.0/20, 210.175.160.0/19, 209.243.96.0/20, 209.243.70.0/23, 209.238.0.0/16, 209.227.0.0/17, 209.207.128.0/17, 209.189.0.0/17, 209.170.0.0/18, 209.168.0.0/17, 209.162.64.0/18, 209.157.0.0/16, 209.139.128.0/18, 209.139.0.0/17, 209.130.0.0/17, 209.124.0.0/19, 209.112.96.0/20, 209.107.64.0/19, 209.107.0.0/18, 209.75.0.0/16, 209.70.0.0/16, 209.69.0.0/16, 209.59.32.0/19, 209.57.0.0/16, 209.43.128.0/17, 209.41.0.0/18, 209.39.0.0/16, 209.24.0.0/16, 209.21.0.0/18, 208.55.0.0/16, 207.241.0.0/17, 207.207.128.0/19, 207.206.0.0/17, 207.201.128.0/18, 207.199.0.0/17, 207.198.128.0/17, 207.197.128.0/17, 207.196.0.0/17, 207.195.240.0/20, 207.159.0.0/18, 207.158.192.0/18, 207.156.128.0/17, 207.153.128.0/17, 207.152.64.0/18, 207.150.128.0/19, 207.150.0.0/17, 207.137.0.0/16, 207.126.254.0/23, 207.126.240.0/21, 207.111.64.0/18, 207.97.0.0/17, 207.91.64.0/18, 207.71.64.0/18, 207.67.128.0/17, 207.58.0.0/17, 207.56.0.0/15, 207.55.192.0/19, 207.55.128.0/18, 207.33.0.0/16, 207.32.64.0/18, 207.31.192.0/18, 207.22.64.0/18, 207.21.128.0/18, 207.21.0.0/17, 207.20.0.0/16, 206.252.0.0/19, 206.239.0.0/16, 206.222.32.0/19, 206.213.64.0/18, 206.197.192.0/24, 206.197.81.0/24, 206.184.0.0/16, 206.183.192.0/19, 206.169.194.0/24, 206.169.195.0/24, 206.169.186.0/24, 206.169.41.0/24, 206.166.128.0/18, 206.163.192.0/19, 206.163.128.0/18, 206.163.0.0/17, 206.86.0.0/16, 206.82.32.0/19, 206.80.32.0/19, 206.68.0.0/15, 206.58.0.0/16, 206.55.0.0/18, 206.54.0.0/18, 206.52.0.0/16, 206.50.0.0/16, 206.14.0.0/16, 205.238.0.0/18, 205.212.0.0/16, 205.157.128.0/20, 205.153.56.0/22, 205.149.160.0/19, 205.146.0.0/16, 204.247.0.0/16, 204.245.128.0/17, 204.233.0.0/16, 204.227.160.0/19, 204.200.0.0/14, 204.194.176.0/21, 204.170.0.0/15, 204.156.128.0/19, 204.156.0.0/19, 204.142.0.0/15, 204.141.0.0/16, 204.108.0.0/23, 204.75.146.0/24, 204.68.197.0/24, 204.62.232.0/24, 204.57.32.0/19, 204.42.0.0/16, 204.0.0.0/14, 204.0.43.0/24, 203.215.136.0/23, 203.208.120.0/21, 203.208.120.0/22, 203.208.124.0/22, 203.208.120.0/23, 203.208.122.0/23, 203.208.124.0/23, 203.208.126.0/23, 203.208.120.0/24, 203.208.121.0/24, 203.208.122.0/24, 203.208.123.0/24, 203.208.124.0/24, 203.208.125.0/24, 203.208.126.0/24, 203.208.127.0/24, 203.208.80.0/21, 203.208.80.0/22, 203.208.84.0/22, 203.208.80.0/23, 203.208.82.0/23, 203.208.84.0/23, 203.208.86.0/23, 203.208.80.0/24, 203.208.81.0/24, 203.208.82.0/24, 203.208.83.0/24, 203.208.84.0/24, 203.208.85.0/24, 203.208.86.0/24, 203.208.87.0/24, 203.205.112.0/20, 203.131.240.0/20, 203.131.248.0/21, 203.105.64.0/19, 203.105.80.0/21, 203.105.72.0/22, 203.78.192.0/20, 203.33.3.0/24, 203.32.132.0/24, 203.20.71.0/24, 203.12.225.0/24, 202.237.244.0/24, 202.163.134.0/24, 202.163.132.0/24, 202.163.128.0/22, 202.163.128.0/24, 202.163.129.0/24, 202.163.130.0/24, 202.163.131.0/24, 202.153.208.0/20, 202.69.224.0/20, 202.68.64.0/20, 202.68.64.0/21, 202.68.72.0/21, 202.68.64.0/22, 202.68.68.0/22, 202.68.72.0/22, 202.68.76.0/22, 202.68.64.0/23, 202.68.66.0/23, 202.68.68.0/23, 202.68.70.0/23, 202.68.72.0/23, 202.68.74.0/23, 202.68.76.0/23, 202.68.78.0/23, 202.68.64.0/24, 202.68.65.0/24, 202.68.66.0/24, 202.68.67.0/24, 202.68.68.0/24, 202.68.69.0/24, 202.68.70.0/24, 202.68.71.0/24, 202.68.72.0/24, 202.68.73.0/24, 202.68.74.0/24, 202.68.75.0/24, 202.68.76.0/24, 202.68.77.0/24, 202.68.78.0/24, 202.68.79.0/24, 202.47.16.0/20, 202.23.124.0/24, 200.15.0.0/16, 199.245.16.0/20, 199.240.0.0/16, 199.236.0.0/14, 199.234.0.0/16, 199.224.0.0/20, 199.217.128.0/17, 199.212.0.0/24, 199.201.197.0/24, 199.184.226.0/24, 199.184.212.0/24, 199.164.210.0/24, 199.103.128.0/17, 199.73.40.0/23, 199.73.32.0/21, 199.4.64.0/18, 198.252.194.0/23, 198.247.0.0/16, 198.232.16.0/24, 198.172.0.0/15, 198.170.0.0/15, 198.170.208.0/24, 198.138.0.0/15, 198.106.0.0/15, 198.104.0.0/16, 198.88.0.0/16, 198.87.0.0/16, 198.84.16.0/20, 198.66.0.0/16, 198.64.0.0/15, 198.63.0.0/16, 195.234.244.0/22, 192.220.0.0/16, 192.217.0.0/16, 192.204.0.0/16, 192.195.85.0/24, 192.159.82.0/24, 192.147.176.0/22, 192.147.175.0/24, 192.147.160.0/21, 192.108.74.0/23, 192.102.248.0/24, 192.80.12.0/22, 192.67.240.0/23, 192.67.236.0/22, 192.41.219.0/24, 192.41.171.0/24, 192.11.188.0/24, 170.253.0.0/16, 170.250.0.0/16, 170.249.64.0/19, 170.249.0.0/19, 168.143.0.0/16, 165.254.0.0/16, 164.162.0.0/16, 161.58.0.0/16, 159.230.128.0/20, 159.230.138.0/24, 157.238.0.0/16, 157.107.0.0/16, 154.37.0.0/16, 140.174.0.0/16, 131.103.0.0/16, 130.94.0.0/16, 130.94.60.0/24, 129.250.0.0/16, 129.192.196.0/22, 129.7.136.0/24, 128.242.0.0/16, 128.241.0.0/16, 128.241.83.0/29, 128.121.0.0/16, 125.56.144.0/21, 125.56.152.0/21, 124.40.0.0/18, 124.40.0.0/19, 124.40.32.0/19, 122.255.80.0/20, 120.29.160.0/19, 120.29.144.0/21, 119.161.104.0/21, 118.215.168.0/21, 118.215.136.0/21, 118.215.64.0/21, 118.214.208.0/21, 118.214.216.0/21, 117.104.128.0/19, 117.104.64.0/18, 117.103.176.0/20, 116.51.16.0/21, 96.17.167.0/24, 96.17.157.0/24, 96.17.155.0/24, 96.17.32.0/20, 96.16.224.0/21, 96.16.232.0/21, 96.16.240.0/21, 96.16.248.0/21, 96.6.224.0/20, 96.6.176.0/20, 96.6.144.0/20, 96.6.40.0/24, 91.186.160.0/19, 89.238.138.0/24, 83.231.128.0/17, 82.112.96.0/19, 81.93.208.0/20, 81.93.176.0/20, 81.93.189.0/24, 81.25.192.0/20, 81.20.64.0/20, 81.19.96.0/20, 80.68.16.0/21, 72.247.200.0/21, 72.247.128.0/21, 72.247.125.0/24, 72.247.56.0/22, 72.247.52.0/22, 72.246.32.0/21, 69.192.96.0/20, 69.192.32.0/20, 69.192.48.0/20, 69.55.56.0/23, 69.41.176.0/21, 69.41.168.0/21, 69.41.166.0/23, 69.41.165.0/24, 69.41.160.0/24, 66.249.144.0/24, 66.187.28.0/24, 64.7.64.0/19, 62.73.160.0/19, 61.251.96.0/20, 61.213.160.0/19, 61.213.144.0/20, 61.200.80.0/20, 61.200.80.0/21, 61.200.88.0/21, 61.120.144.0/20, 61.120.144.0/21, 61.120.152.0/21, 61.114.112.0/20, 61.114.120.0/21, 61.114.112.0/22, 61.58.32.0/20, 61.28.200.0/24, 61.28.199.0/24, 60.254.153.0/24, 60.254.132.0/22, 59.151.184.0/22}) -# -# ran.psg.com:/usr/home/randy> peval as1239 -# ({207.7.0.0/18, 204.248.180.0/25, 204.241.122.0/24, 204.217.244.0/24, 203.98.192.0/19, 193.188.96.0/23, 192.77.142.0/24, 192.31.36.0/24, 192.31.32.0/22, 192.23.224.0/21, 192.23.208.0/20, 192.23.76.0/24, 192.23.75.0/24, 163.183.0.0/16, 157.245.70.0/24, 134.32.0.0/16, 129.87.0.0/16, 85.237.96.0/19, 72.246.128.0/20, 65.168.150.0/23, 65.168.149.0/24, 63.172.252.0/22, 63.171.143.128/25, 63.169.52.128/25}) -# -# ran.psg.com:/usr/home/randy> peval as701 -# ({208.91.236.0/22, 203.33.196.0/24, 203.27.251.0/24, 198.80.148.0/24, 198.80.131.0/24, 157.130.103.144/30, 140.222.224.0/24, 65.243.171.0/24, 63.122.162.212/30, 63.116.191.0/24, 63.81.136.0/24, 17.0.0.0/8, 17.128.0.0/9}) - - -name: Alice - -valid_for: 2d - -ipv4: 17.0.0.0/8,59.151.184.0/22,60.254.132.0/22,60.254.153.0/24,61.28.199.0-61.28.200.255,61.58.32.0/20,61.114.112.0/20,61.120.144.0/20,61.200.80.0/20,61.213.144.0-61.213.191.255,61.251.96.0/20,62.73.160.0/19,63.81.136.0/24,63.116.191.0/24,63.122.162.212/30,63.169.52.128/25,63.171.143.128/25,63.172.252.0/22,64.7.64.0/19,65.168.149.0-65.168.151.255,65.243.171.0/24,66.187.28.0/24,66.249.144.0/24,69.41.160.0/24,69.41.165.0-69.41.183.255,69.55.56.0/23,69.192.32.0/19,69.192.96.0/20,72.246.32.0/21,72.246.128.0/20,72.247.52.0-72.247.59.255,72.247.125.0/24,72.247.128.0/21,72.247.200.0/21,80.68.16.0/21,81.19.96.0/20,81.20.64.0/20,81.25.192.0/20,81.93.176.0/20,81.93.208.0/20,82.112.96.0/19,83.231.128.0/17,85.237.96.0/19,89.238.138.0/24,91.186.160.0/19,96.6.40.0/24,96.6.144.0/20,96.6.176.0/20,96.6.224.0/20,96.16.224.0/19,96.17.32.0/20,96.17.155.0/24,96.17.157.0/24,96.17.167.0/24,116.51.16.0/21,117.103.176.0/20,117.104.64.0-117.104.159.255,118.214.208.0/20,118.215.64.0/21,118.215.136.0/21,118.215.168.0/21,119.161.104.0/21,120.29.144.0/21,120.29.160.0/19,122.255.80.0/20,124.40.0.0/18,125.56.144.0/20,128.121.0.0/16,128.241.0.0-128.242.255.255,129.7.136.0/24,129.87.0.0/16,129.192.196.0/22,129.250.0.0/16,130.94.0.0/16,131.103.0.0/16,134.32.0.0/16,140.174.0.0/16,140.222.224.0/24,147.28.0.0/16,154.37.0.0/16,157.107.0.0/16,157.130.103.144/30,157.238.0.0/16,157.245.70.0/24,159.230.128.0/20,161.58.0.0/16,163.183.0.0/16,164.162.0.0/16,165.254.0.0/16,168.143.0.0/16,170.249.0.0/19,170.249.64.0/19,170.250.0.0/16,170.253.0.0/16,192.11.188.0/24,192.23.75.0-192.23.76.255,192.23.208.0-192.23.231.255,192.31.32.0-192.31.36.255,192.41.171.0/24,192.41.219.0/24,192.67.236.0-192.67.241.255,192.77.142.0/24,192.80.12.0/22,192.83.230.0/24,192.102.248.0/24,192.108.74.0/23,192.147.160.0/21,192.147.175.0-192.147.179.255,192.159.82.0/24,192.195.85.0/24,192.204.0.0/16,192.217.0.0/16,192.220.0.0/16,193.188.96.0/23,195.234.244.0/22,198.63.0.0-198.66.255.255,198.80.131.0/24,198.80.148.0/24,198.84.16.0/20,198.87.0.0-198.88.255.255,198.104.0.0/16,198.106.0.0/15,198.133.206.0/24,198.138.0.0/15,198.170.0.0-198.173.255.255,198.180.150.0-198.180.153.255,198.232.16.0/24,198.247.0.0/16,198.252.194.0/23,199.4.64.0/18,199.73.32.0-199.73.41.255,199.103.128.0/17,199.164.210.0/24,199.184.212.0/24,199.184.226.0/24,199.201.197.0/24,199.212.0.0/24,199.217.128.0/17,199.224.0.0/20,199.234.0.0/16,199.236.0.0-199.240.255.255,199.245.16.0/20,200.15.0.0/16,202.23.124.0/24,202.47.16.0/20,202.68.64.0/20,202.69.224.0/20,202.153.208.0/20,202.163.128.0-202.163.132.255,202.163.134.0/24,202.237.244.0/24,203.12.225.0/24,203.20.71.0/24,203.27.251.0/24,203.32.132.0/24,203.33.3.0/24,203.33.196.0/24,203.78.192.0/20,203.98.192.0/19,203.105.64.0/19,203.131.240.0/20,203.205.112.0/20,203.208.80.0/21,203.208.120.0/21,203.215.136.0/23,204.0.0.0/14,204.42.0.0/16,204.57.32.0/19,204.62.232.0/24,204.68.197.0/24,204.75.146.0/24,204.108.0.0/23,204.141.0.0-204.143.255.255,204.156.0.0/19,204.156.128.0/19,204.170.0.0/15,204.194.176.0/21,204.200.0.0/14,204.217.244.0/24,204.227.160.0/19,204.233.0.0/16,204.241.122.0/24,204.245.128.0/17,204.247.0.0/16,204.248.180.0/25,205.146.0.0/16,205.149.160.0/19,205.153.56.0/22,205.157.128.0/20,205.212.0.0/16,205.238.0.0/18,206.14.0.0/16,206.50.0.0/16,206.52.0.0/16,206.54.0.0/18,206.55.0.0/18,206.58.0.0/16,206.68.0.0/15,206.80.32.0/19,206.82.32.0/19,206.86.0.0/16,206.163.0.0-206.163.223.255,206.166.128.0/18,206.169.41.0/24,206.169.186.0/24,206.169.194.0/23,206.183.192.0/19,206.184.0.0/16,206.197.81.0/24,206.197.192.0/24,206.213.64.0/18,206.222.32.0/19,206.239.0.0/16,206.252.0.0/19,207.7.0.0/18,207.20.0.0-207.21.191.255,207.22.64.0/18,207.31.192.0/18,207.32.64.0/18,207.33.0.0/16,207.55.128.0-207.55.223.255,207.56.0.0-207.58.127.255,207.67.128.0/17,207.71.64.0/18,207.91.64.0/18,207.97.0.0/17,207.111.64.0/18,207.126.240.0/21,207.126.254.0/23,207.137.0.0/16,207.150.0.0-207.150.159.255,207.152.64.0/18,207.153.128.0/17,207.156.128.0/17,207.158.192.0-207.159.63.255,207.195.240.0-207.196.127.255,207.197.128.0/17,207.198.128.0-207.199.127.255,207.201.128.0/18,207.206.0.0/17,207.207.128.0/19,207.241.0.0/17,208.55.0.0/16,208.91.236.0/22,209.21.0.0/18,209.24.0.0/16,209.39.0.0/16,209.41.0.0/18,209.43.128.0/17,209.57.0.0/16,209.59.32.0/19,209.69.0.0-209.70.255.255,209.75.0.0/16,209.107.0.0-209.107.95.255,209.112.96.0/20,209.124.0.0/19,209.130.0.0/17,209.139.0.0-209.139.191.255,209.157.0.0/16,209.162.64.0/18,209.168.0.0/17,209.170.0.0/18,209.189.0.0/17,209.207.128.0/17,209.227.0.0/17,209.238.0.0/16,209.243.70.0/23,209.243.96.0/20,210.175.160.0/19,211.1.32.0/19,211.130.96.0/19,212.105.160.0/19,212.119.0.0/19,213.130.32.0/19,213.198.0.0/17,216.42.0.0/16,216.44.0.0/16,216.105.240.0/24,216.167.0.0/17 - -roa_request: - - - asn: 3130 - ipv4: 198.180.152.0/24,198.180.153.0/24,198.180.152.0/25,198.180.152.128/25,198.180.150.0/24,198.180.151.0/24,198.133.206.0/24,192.83.230.0/24,147.28.0.0/16-24 - - - asn: 2914 - ipv4: 216.167.0.0/17,216.105.240.0/24,216.44.0.0/16,216.42.0.0/16,213.198.0.0/17,213.130.32.0/19,212.119.0.0/19,212.105.160.0/19,211.130.96.0/19,211.1.32.0/19,211.1.60.0/22,211.1.48.0/23,211.1.32.0/20,210.175.160.0/19,209.243.96.0/20,209.243.70.0/23,209.238.0.0/16,209.227.0.0/17,209.207.128.0/17,209.189.0.0/17,209.170.0.0/18,209.168.0.0/17,209.162.64.0/18,209.157.0.0/16,209.139.128.0/18,209.139.0.0/17,209.130.0.0/17,209.124.0.0/19,209.112.96.0/20,209.107.64.0/19,209.107.0.0/18,209.75.0.0/16,209.70.0.0/16,209.69.0.0/16,209.59.32.0/19,209.57.0.0/16,209.43.128.0/17,209.41.0.0/18,209.39.0.0/16,209.24.0.0/16,209.21.0.0/18,208.55.0.0/16,207.241.0.0/17,207.207.128.0/19,207.206.0.0/17,207.201.128.0/18,207.199.0.0/17,207.198.128.0/17,207.197.128.0/17,207.196.0.0/17,207.195.240.0/20,207.159.0.0/18,207.158.192.0/18,207.156.128.0/17,207.153.128.0/17,207.152.64.0/18,207.150.128.0/19,207.150.0.0/17,207.137.0.0/16,207.126.254.0/23,207.126.240.0/21,207.111.64.0/18,207.97.0.0/17,207.91.64.0/18,207.71.64.0/18,207.67.128.0/17,207.58.0.0/17,207.56.0.0/15,207.55.192.0/19,207.55.128.0/18,207.33.0.0/16,207.32.64.0/18,207.31.192.0/18,207.22.64.0/18,207.21.128.0/18,207.21.0.0/17,207.20.0.0/16,206.252.0.0/19,206.239.0.0/16,206.222.32.0/19,206.213.64.0/18,206.197.192.0/24,206.197.81.0/24,206.184.0.0/16,206.183.192.0/19,206.169.194.0/24,206.169.195.0/24,206.169.186.0/24,206.169.41.0/24,206.166.128.0/18,206.163.192.0/19,206.163.128.0/18,206.163.0.0/17,206.86.0.0/16,206.82.32.0/19,206.80.32.0/19,206.68.0.0/15,206.58.0.0/16,206.55.0.0/18,206.54.0.0/18,206.52.0.0/16,206.50.0.0/16,206.14.0.0/16,205.238.0.0/18,205.212.0.0/16,205.157.128.0/20,205.153.56.0/22,205.149.160.0/19,205.146.0.0/16,204.247.0.0/16,204.245.128.0/17,204.233.0.0/16,204.227.160.0/19,204.200.0.0/14,204.194.176.0/21,204.170.0.0/15,204.156.128.0/19,204.156.0.0/19,204.142.0.0/15,204.141.0.0/16,204.108.0.0/23,204.75.146.0/24,204.68.197.0/24,204.62.232.0/24,204.57.32.0/19,204.42.0.0/16,204.0.0.0/14,204.0.43.0/24,203.215.136.0/23,203.208.120.0/21,203.208.120.0/22,203.208.124.0/22,203.208.120.0/23,203.208.122.0/23,203.208.124.0/23,203.208.126.0/23,203.208.120.0/24,203.208.121.0/24,203.208.122.0/24,203.208.123.0/24,203.208.124.0/24,203.208.125.0/24,203.208.126.0/24,203.208.127.0/24,203.208.80.0/21,203.208.80.0/22,203.208.84.0/22,203.208.80.0/23,203.208.82.0/23,203.208.84.0/23,203.208.86.0/23,203.208.80.0/24,203.208.81.0/24,203.208.82.0/24,203.208.83.0/24,203.208.84.0/24,203.208.85.0/24,203.208.86.0/24,203.208.87.0/24,203.205.112.0/20,203.131.240.0/20,203.131.248.0/21,203.105.64.0/19,203.105.80.0/21,203.105.72.0/22,203.78.192.0/20,203.33.3.0/24,203.32.132.0/24,203.20.71.0/24,203.12.225.0/24,202.237.244.0/24,202.163.134.0/24,202.163.132.0/24,202.163.128.0/22,202.163.128.0/24,202.163.129.0/24,202.163.130.0/24,202.163.131.0/24,202.153.208.0/20,202.69.224.0/20,202.68.64.0/20,202.68.64.0/21,202.68.72.0/21,202.68.64.0/22,202.68.68.0/22,202.68.72.0/22,202.68.76.0/22,202.68.64.0/23,202.68.66.0/23,202.68.68.0/23,202.68.70.0/23,202.68.72.0/23,202.68.74.0/23,202.68.76.0/23,202.68.78.0/23,202.68.64.0/24,202.68.65.0/24,202.68.66.0/24,202.68.67.0/24,202.68.68.0/24,202.68.69.0/24,202.68.70.0/24,202.68.71.0/24,202.68.72.0/24,202.68.73.0/24,202.68.74.0/24,202.68.75.0/24,202.68.76.0/24,202.68.77.0/24,202.68.78.0/24,202.68.79.0/24,202.47.16.0/20,202.23.124.0/24,200.15.0.0/16,199.245.16.0/20,199.240.0.0/16,199.236.0.0/14,199.234.0.0/16,199.224.0.0/20,199.217.128.0/17,199.212.0.0/24,199.201.197.0/24,199.184.226.0/24,199.184.212.0/24,199.164.210.0/24,199.103.128.0/17,199.73.40.0/23,199.73.32.0/21,199.4.64.0/18,198.252.194.0/23,198.247.0.0/16,198.232.16.0/24,198.172.0.0/15,198.170.0.0/15,198.170.208.0/24,198.138.0.0/15,198.106.0.0/15,198.104.0.0/16,198.88.0.0/16,198.87.0.0/16,198.84.16.0/20,198.66.0.0/16,198.64.0.0/15,198.63.0.0/16,195.234.244.0/22,192.220.0.0/16,192.217.0.0/16,192.204.0.0/16,192.195.85.0/24,192.159.82.0/24,192.147.176.0/22,192.147.175.0/24,192.147.160.0/21,192.108.74.0/23,192.102.248.0/24,192.80.12.0/22,192.67.240.0/23,192.67.236.0/22,192.41.219.0/24,192.41.171.0/24,192.11.188.0/24,170.253.0.0/16,170.250.0.0/16,170.249.64.0/19,170.249.0.0/19,168.143.0.0/16,165.254.0.0/16,164.162.0.0/16,161.58.0.0/16,159.230.128.0/20,159.230.138.0/24,157.238.0.0/16,157.107.0.0/16,154.37.0.0/16,140.174.0.0/16,131.103.0.0/16,130.94.0.0/16,130.94.60.0/24,129.250.0.0/16,129.192.196.0/22,129.7.136.0/24,128.242.0.0/16,128.241.0.0/16,128.241.83.0/29,128.121.0.0/16,125.56.144.0/21,125.56.152.0/21,124.40.0.0/18,124.40.0.0/19,124.40.32.0/19,122.255.80.0/20,120.29.160.0/19,120.29.144.0/21,119.161.104.0/21,118.215.168.0/21,118.215.136.0/21,118.215.64.0/21,118.214.208.0/21,118.214.216.0/21,117.104.128.0/19,117.104.64.0/18,117.103.176.0/20,116.51.16.0/21,96.17.167.0/24,96.17.157.0/24,96.17.155.0/24,96.17.32.0/20,96.16.224.0/21,96.16.232.0/21,96.16.240.0/21,96.16.248.0/21,96.6.224.0/20,96.6.176.0/20,96.6.144.0/20,96.6.40.0/24,91.186.160.0/19,89.238.138.0/24,83.231.128.0/17,82.112.96.0/19,81.93.208.0/20,81.93.176.0/20,81.93.189.0/24,81.25.192.0/20,81.20.64.0/20,81.19.96.0/20,80.68.16.0/21,72.247.200.0/21,72.247.128.0/21,72.247.125.0/24,72.247.56.0/22,72.247.52.0/22,72.246.32.0/21,69.192.96.0/20,69.192.32.0/20,69.192.48.0/20,69.55.56.0/23,69.41.176.0/21,69.41.168.0/21,69.41.166.0/23,69.41.165.0/24,69.41.160.0/24,66.249.144.0/24,66.187.28.0/24,64.7.64.0/19,62.73.160.0/19,61.251.96.0/20,61.213.160.0/19,61.213.144.0/20,61.200.80.0/20,61.200.80.0/21,61.200.88.0/21,61.120.144.0/20,61.120.144.0/21,61.120.152.0/21,61.114.112.0/20,61.114.120.0/21,61.114.112.0/22,61.58.32.0/20,61.28.200.0/24,61.28.199.0/24,60.254.153.0/24,60.254.132.0/22,59.151.184.0/22 - - - asn: 1239 - ipv4: 207.7.0.0/18,204.248.180.0/25,204.241.122.0/24,204.217.244.0/24,203.98.192.0/19,193.188.96.0/23,192.77.142.0/24,192.31.36.0/24,192.31.32.0/22,192.23.224.0/21,192.23.208.0/20,192.23.76.0/24,192.23.75.0/24,163.183.0.0/16,157.245.70.0/24,134.32.0.0/16,129.87.0.0/16,85.237.96.0/19,72.246.128.0/20,65.168.150.0/23,65.168.149.0/24,63.172.252.0/22,63.171.143.128/25,63.169.52.128/25 - - - asn: 701 - ipv4: 208.91.236.0/22,203.33.196.0/24,203.27.251.0/24,198.80.148.0/24,198.80.131.0/24,157.130.103.144/30,140.222.224.0/24,65.243.171.0/24,63.122.162.212/30,63.116.191.0/24,63.81.136.0/24,17.0.0.0/8,17.128.0.0/9 - ---- -- shell set -x; - find publication -type f -name '*.roa' - -print -exec ../../../utils/print_roa/print_roa {} \; - ; - rtr_origin=../../../rtr-origin/rtr-origin; - $rtr_origin --cronjob rcynic-data/authenticated && - $rtr_origin --show diff --git a/rpkid/tests/smoketest.8.yaml b/rpkid/tests/smoketest.8.yaml deleted file mode 100644 index cd6d1e7a..00000000 --- a/rpkid/tests/smoketest.8.yaml +++ /dev/null @@ -1,41 +0,0 @@ -# $Id$ - -# Copyright (C) 2009-2010 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -name: RIR -valid_for: 2d -kids: - - name: Alice - hosted_by: RIR - kids: - - name: Betty - hosted_by: RIR - kids: - - name: Carol - hosted_by: RIR - kids: - - name: Dana - hosted_by: RIR - kids: - - name: Eve - hosted_by: RIR - kids: - - name: Fiona - hosted_by: RIR - ipv4: 10.0.0.0/8 - asn: 64533 - roa_request: - - asn: 64533 - ipv4: 10.0.0.0/24 diff --git a/rpkid/tests/smoketest.9.yaml b/rpkid/tests/smoketest.9.yaml deleted file mode 100644 index 3efc4f08..00000000 --- a/rpkid/tests/smoketest.9.yaml +++ /dev/null @@ -1,849 +0,0 @@ -# $Id$ - -# Copyright (C) 2009-2010 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. -# -# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -name: RIR -crl_interval: 30s -regen_margin: 30s -valid_for: 2m -kids: - - name: R0 - kids: - - name: Alice - ipv4: 192.0.2.1-192.0.2.33 - asn: 64533 - roa_request: - - asn: 42 - ipv4: 192.0.2.32/32 - - name: Bob - ipv4: 192.0.2.44-192.0.2.100 - ipv4: 10.3.0.0/16 - roa_request: - - asn: 666 - ipv4: 10.3.0.44/32 ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m ---- -- sleep 15 -- name: RIR - valid_for: 2m -- name: R0 - valid_for: 2m -- name: Alice - valid_for: 2m -- name: Bob - valid_for: 2m diff --git a/rpkid/tests/smoketest.clean.sql b/rpkid/tests/smoketest.clean.sql deleted file mode 100644 index 9f5ff9fb..00000000 --- a/rpkid/tests/smoketest.clean.sql +++ /dev/null @@ -1,54 +0,0 @@ --- $Id$ - --- Copyright (C) 2009 Internet Systems Consortium ("ISC") --- --- Permission to use, copy, modify, and distribute this software for any --- purpose with or without fee is hereby granted, provided that the above --- copyright notice and this permission notice appear in all copies. --- --- THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH --- REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY --- AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, --- INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM --- LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE --- OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR --- PERFORMANCE OF THIS SOFTWARE. - --- Clean up databases left behind by smoketest.py et al. - -DROP DATABASE IF EXISTS irdb0; -DROP DATABASE IF EXISTS irdb1; -DROP DATABASE IF EXISTS irdb2; -DROP DATABASE IF EXISTS irdb3; -DROP DATABASE IF EXISTS irdb4; -DROP DATABASE IF EXISTS irdb5; -DROP DATABASE IF EXISTS irdb6; -DROP DATABASE IF EXISTS irdb7; -DROP DATABASE IF EXISTS irdb8; -DROP DATABASE IF EXISTS irdb9; -DROP DATABASE IF EXISTS irdb10; -DROP DATABASE IF EXISTS irdb11; -DROP DATABASE IF EXISTS rpki0; -DROP DATABASE IF EXISTS rpki1; -DROP DATABASE IF EXISTS rpki2; -DROP DATABASE IF EXISTS rpki3; -DROP DATABASE IF EXISTS rpki4; -DROP DATABASE IF EXISTS rpki5; -DROP DATABASE IF EXISTS rpki6; -DROP DATABASE IF EXISTS rpki7; -DROP DATABASE IF EXISTS rpki8; -DROP DATABASE IF EXISTS rpki9; -DROP DATABASE IF EXISTS rpki10; -DROP DATABASE IF EXISTS rpki11; -DROP DATABASE IF EXISTS pubd0; -DROP DATABASE IF EXISTS pubd1; -DROP DATABASE IF EXISTS pubd2; -DROP DATABASE IF EXISTS pubd3; -DROP DATABASE IF EXISTS pubd4; -DROP DATABASE IF EXISTS pubd5; -DROP DATABASE IF EXISTS pubd6; -DROP DATABASE IF EXISTS pubd7; -DROP DATABASE IF EXISTS pubd8; -DROP DATABASE IF EXISTS pubd9; -DROP DATABASE IF EXISTS pubd10; -DROP DATABASE IF EXISTS pubd11; diff --git a/rpkid/tests/smoketest.py b/rpkid/tests/smoketest.py deleted file mode 100644 index 28bedaa4..00000000 --- a/rpkid/tests/smoketest.py +++ /dev/null @@ -1,1630 +0,0 @@ -#!/usr/bin/env python - -""" -Test framework to configure and drive a collection of rpkid.py and -old_irdbd.py instances under control of a master script. - -yaml_file is a YAML description the tests to be run, and is intended -to be implementation-agnostic. - -CONFIG contains settings for various implementation-specific -things that don't belong in yaml_file. -""" - -# $Id$ -# -# Copyright (C) 2013--2014 Dragon Research Labs ("DRL") -# Portions copyright (C) 2009--2012 Internet Systems Consortium ("ISC") -# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notices and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND ARIN DISCLAIM ALL -# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED -# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL, -# ISC, OR ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR -# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS -# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, -# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION -# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -# pylint: disable=W0621 - -import os -import yaml -import subprocess -import signal -import time -import argparse -import sys -import errno -import rpki.resource_set -import rpki.sundial -import rpki.x509 -import rpki.http -import rpki.log -import rpki.left_right -import rpki.config -import rpki.publication -import rpki.async - -from rpki.mysql_import import MySQLdb - -os.environ["TZ"] = "UTC" -time.tzset() - -parser = argparse.ArgumentParser(description = __doc__) -parser.add_argument("-c", "--config", - help = "configuration file") -parser.add_argument("--profile", action = "store_true", - help = "enable profiling") -parser.add_argument("-y", action = "store_true", - help = "ignored, present only for backwards compatability") -parser.add_argument("yaml_file", type = argparse.FileType("r"), - help = "YAML description of test network") -args = parser.parse_args() - -cfg = rpki.config.parser(args.config, "smoketest", allow_missing = True) - -# Load the YAML script early, so we can report errors ASAP - -yaml_script = [y for y in yaml.safe_load_all(args.yaml_file)] - -# Define port allocator early, so we can use it while reading config - -def allocate_port(): - """ - Allocate a TCP port number. - """ - global base_port - p = base_port - base_port += 1 - return p - -# Most filenames in the following are relative to the working directory. - -smoketest_name = cfg.get("smoketest_name", "smoketest") -smoketest_dir = cfg.get("smoketest_dir", smoketest_name + ".dir") - -irdb_db_pass = cfg.get("irdb_db_pass", "fnord") -rpki_db_pass = cfg.get("rpki_db_pass", "fnord") -pubd_db_pass = cfg.get("pubd_db_pass", "fnord") -pubd_db_name = cfg.get("pubd_db_name", "pubd0") -pubd_db_user = cfg.get("pubd_db_user", "pubd") - -base_port = int(cfg.get("base_port", "4400")) - -rsyncd_port = allocate_port() -rootd_port = allocate_port() -pubd_port = allocate_port() - -rsyncd_module = cfg.get("rsyncd_module", smoketest_name) -rootd_sia = cfg.get("rootd_sia", "rsync://localhost:%d/%s/" % (rsyncd_port, rsyncd_module)) - -rootd_name = cfg.get("rootd_name", "rootd") -rsyncd_name = cfg.get("rsyncd_name", "rsyncd") -rcynic_name = cfg.get("rcynic_name", "rcynic") -pubd_name = cfg.get("pubd_name", "pubd") - -prog_python = cfg.get("prog_python", sys.executable) -prog_rpkid = cfg.get("prog_rpkid", "../../rpkid") -prog_irdbd = cfg.get("prog_irdbd", "../old_irdbd.py") -prog_poke = cfg.get("prog_poke", "../testpoke.py") -prog_rootd = cfg.get("prog_rootd", "../../rootd") -prog_pubd = cfg.get("prog_pubd", "../../pubd") -prog_rsyncd = cfg.get("prog_rsyncd", "rsync") -prog_rcynic = cfg.get("prog_rcynic", "../../../rcynic/rcynic") -prog_openssl = cfg.get("prog_openssl", "../../../openssl/openssl/apps/openssl") - -rcynic_stats = cfg.get("rcynic_stats", "echo ; ../../../rcynic/rcynic-text %s.xml ; echo" % rcynic_name) - -rpki_sql_file = cfg.get("rpki_sql_file", "../rpkid.sql") -irdb_sql_file = cfg.get("irdb_sql_file", "old_irdbd.sql") -pub_sql_file = cfg.get("pub_sql_file", "../pubd.sql") - -startup_delay = int(cfg.get("startup_delay", "10")) - -rsyncd_dir = None -pubd_ta = None -pubd_irbe_key = None -pubd_irbe_cert = None -pubd_pubd_cert = None - -pubd_last_cms_time = None - -ecdsa_params = None - -class CantRekeyYAMLLeaf(Exception): - """ - Can't rekey YAML leaf. - """ - -class CouldntIssueBSCEECertificate(Exception): - """ - Couldn't issue BSC EE certificate - """ - -sql_conversions = MySQLdb.converters.conversions.copy() -sql_conversions.update({ - rpki.sundial.datetime : MySQLdb.converters.DateTime2literal, - MySQLdb.converters.FIELD_TYPE.DATETIME : rpki.sundial.datetime.DateTime_or_None }) - -def main(): - """ - Main program. - """ - - rpki.log.init(smoketest_name, use_syslog = False) - rpki.log.info("Starting") - - pubd_process = None - rootd_process = None - rsyncd_process = None - - rpki_sql = mangle_sql(rpki_sql_file) - irdb_sql = mangle_sql(irdb_sql_file) - pubd_sql = mangle_sql(pub_sql_file) - - rpki.log.info("Initializing test directory") - - # Connect to test directory, creating it if necessary - try: - os.chdir(smoketest_dir) - except OSError: - os.makedirs(smoketest_dir) - os.chdir(smoketest_dir) - - # Now that we're in the right directory, we can figure out whether - # we have a private openssl executable to use - global prog_openssl - if not os.path.exists(prog_openssl): - prog_openssl = "openssl" - - # Discard everything but keys, which take a while to generate. - # Apparently os.walk() can't tell the difference between directories - # and symlinks to directories, so we have to handle both. - for root, dirs, files in os.walk(".", topdown = False): - for fn in files: - if not fn.endswith(".key"): - os.remove(os.path.join(root, fn)) - for d in dirs: - try: - os.rmdir(os.path.join(root, d)) - except OSError, e: - if e.errno == errno.ENOTDIR: - os.remove(os.path.join(root, d)) - else: - raise - - rpki.log.info("Reading master YAML configuration") - y = yaml_script.pop(0) - - rpki.log.info("Constructing internal allocation database") - db = allocation_db(y) - - rpki.log.info("Constructing BPKI keys and certs for rootd") - setup_bpki_cert_chain(rootd_name, ee = ("RPKI",)) - - rpki.log.info("Constructing BPKI keys and certs for pubd") - setup_bpki_cert_chain(pubd_name, ee = ("PUBD", "IRBE")) - - - for a in db: - a.setup_bpki_certs() - - setup_publication(pubd_sql) - setup_rootd(db.root, y.get("rootd", {})) - setup_rsyncd() - setup_rcynic() - - for a in db.engines: - a.setup_conf_file() - a.setup_sql(rpki_sql, irdb_sql) - a.sync_sql() - - try: - - rpki.log.info("Starting rootd") - rootd_process = subprocess.Popen((prog_python, prog_rootd, "-d", "-c", rootd_name + ".conf")) - - rpki.log.info("Starting pubd") - pubd_process = subprocess.Popen((prog_python, prog_pubd, "-d", "-c", pubd_name + ".conf") + - (("-p", pubd_name + ".prof") if args.profile else ())) - - rpki.log.info("Starting rsyncd") - rsyncd_process = subprocess.Popen((prog_rsyncd, "--daemon", "--no-detach", "--config", rsyncd_name + ".conf")) - - # Start rpkid and irdbd instances - for a in db.engines: - a.run_daemons() - - # From this point on we'll be running event-driven, so the rest of - # the code until final exit is all closures. - - def start(): - rpki.async.iterator(db.engines, create_rpki_objects, created_rpki_objects) - - def create_rpki_objects(iterator, a): - a.create_rpki_objects(iterator) - - def created_rpki_objects(): - - # Set pubd's BPKI CRL - set_pubd_crl(yaml_loop) - - def yaml_loop(): - - # This is probably where we should be updating expired BPKI - # objects, particular CRLs - - rpki.log.info("Running cron for all RPKI engines") - rpki.async.iterator(db.engines, run_cron, run_yaml) - - def run_cron(iterator, a): - a.run_cron(iterator) - - def run_yaml(): - - # Run rcynic to check results - run_rcynic() - - # Apply next delta if we have one; otherwise, we're done. - if yaml_script: - rpki.log.info("Applying deltas") - db.apply_delta(yaml_script.pop(0), apply_delta_done) - else: - rpki.log.info("No more deltas to apply, done") - rpki.async.exit_event_loop() - - def apply_delta_done(): - - # Resync IRDBs - for a in db.engines: - a.sync_sql() - - # Loop until we run out of control YAML - yaml_loop() - - rpki.log.info("Sleeping %d seconds while daemons start up" % startup_delay) - rpki.async.timer(start).set(rpki.sundial.timedelta(seconds = startup_delay)) - rpki.async.event_loop() - - # At this point we have gone into event-driven code. - - rpki.log.info("Event loop exited normally") - - except Exception, e: - - rpki.log.info("Event loop exited with an exception: %r" % e) - rpki.log.traceback() - - finally: - - rpki.log.info("Cleaning up") - for a in db.engines: - a.kill_daemons() - for proc, name in ((rootd_process, "rootd"), - (pubd_process, "pubd"), - (rsyncd_process, "rsyncd")): - # pylint: disable=E1103 - if proc is not None and proc.poll() is None: - rpki.log.info("Killing %s, pid %s" % (name, proc.pid)) - try: - proc.terminate() - except OSError: - pass - if proc is not None: - rpki.log.info("Daemon %s, pid %s exited with code %s" % (name, proc.pid, proc.wait())) - -def cmd_sleep(cb, interval): - """ - Set an alarm, then wait for it to go off. - """ - howlong = rpki.sundial.timedelta.parse(interval) - rpki.log.info("Sleeping %r" % howlong) - rpki.async.timer(cb).set(howlong) - -def cmd_shell(cb, *cmd): - """ - Run a shell command. - """ - cmd = " ".join(cmd) - status = subprocess.call(cmd, shell = True) - rpki.log.info("Shell command returned status %d" % status) - cb() - -def cmd_echo(cb, *words): - """ - Echo some text to the log. - """ - rpki.log.note(" ".join(words)) - cb() - -## @var cmds -# Dispatch table for commands embedded in delta sections - -cmds = { "sleep" : cmd_sleep, - "shell" : cmd_shell, - "echo" : cmd_echo } - -class roa_request(object): - """ - Representation for a roa_request object. - """ - - def __init__(self, asn, ipv4, ipv6): - self.asn = asn - self.v4 = rpki.resource_set.roa_prefix_set_ipv4("".join(ipv4.split())) if ipv4 else None - self.v6 = rpki.resource_set.roa_prefix_set_ipv6("".join(ipv6.split())) if ipv6 else None - - def __eq__(self, other): - return self.asn == other.asn and self.v4 == other.v4 and self.v6 == other.v6 - - def __hash__(self): - v4 = tuple(self.v4) if self.v4 is not None else None - v6 = tuple(self.v6) if self.v6 is not None else None - return self.asn.__hash__() + v4.__hash__() + v6.__hash__() - - def __str__(self): - if self.v4 and self.v6: s = str(self.v4) + "," + str(self.v6) - elif self.v4: s = str(self.v4) - else: s = str(self.v6) - return "%s: %s" % (self.asn, s) - - @classmethod - def parse(cls, yaml): - return cls(yaml.get("asn"), yaml.get("ipv4"), yaml.get("ipv6")) - -class router_cert(object): - """ - Representation for a router_cert object. - """ - - _ecparams = None - - @classmethod - def ecparams(cls): - if cls._ecparams is None: - cls._ecparams = rpki.x509.KeyParams.generateEC() - return cls._ecparams - - def __init__(self, asn, router_id): - self.asn = rpki.resource_set.resource_set_as("".join(str(asn).split())) - self.router_id = router_id - self.keypair = rpki.x509.ECDSA.generate(self.ecparams()) - self.pkcs10 = rpki.x509.PKCS10.create(keypair = self.keypair) - self.gski = self.pkcs10.gSKI() - self.cn = "ROUTER-%08x" % self.asn[0].min - self.sn = "%08x" % self.router_id - self.eku = rpki.oids.id_kp_bgpsec_router - - def __eq__(self, other): - return self.asn == other.asn and self.sn == other.sn and self.gski == other.gski - - def __hash__(self): - v6 = tuple(self.v6) if self.v6 is not None else None - return tuple(self.asn).__hash__() + sn.__hash__() + self.gski.__hash__() - - def __str__(self): - return "%s: %s: %s" % (self.asn, self.cn, self.sn, self.gski) - - @classmethod - def parse(cls, yaml): - return cls(yaml.get("asn"), yaml.get("router_id")) - -class allocation_db(list): - """ - Representation of all the entities and allocations in the test - system. Almost everything is generated out of this database. - """ - - def __init__(self, yaml): - """ - Initialize database from the (first) YAML document. - """ - - list.__init__(self) - self.root = allocation(yaml, self) - assert self.root.is_root - if self.root.crl_interval is None: - self.root.crl_interval = rpki.sundial.timedelta.parse(cfg.get("crl_interval", "1d")).convert_to_seconds() - if self.root.regen_margin is None: - self.root.regen_margin = rpki.sundial.timedelta.parse(cfg.get("regen_margin", "1d")).convert_to_seconds() - for a in self: - if a.sia_base is None: - a.sia_base = (rootd_sia + "root/trunk/" if a.is_root else a.parent.sia_base) + a.name + "/" - if a.base.valid_until is None: - a.base.valid_until = a.parent.base.valid_until - if a.crl_interval is None: - a.crl_interval = a.parent.crl_interval - if a.regen_margin is None: - a.regen_margin = a.parent.regen_margin - a.client_handle = "/".join(a.sia_base.split("/")[4:]).rstrip("/") - self.root.closure() - self.map = dict((a.name, a) for a in self) - self.engines = [a for a in self if a.is_engine] - for i, a in enumerate(self.engines): - a.set_engine_number(i) - for a in self: - if a.is_hosted: - a.hosted_by = self.map[a.hosted_by] - a.hosted_by.hosts.append(a) - assert a.is_twig, "%s is not twig" % a.name - assert not a.hosted_by.is_hosted, "%s is hosted by a hosted entity" % a.name - - def apply_delta(self, delta, cb): - """ - Apply a delta or run a command. - """ - - def loop(iterator, d): - if isinstance(d, str): - c = d.split() - cmds[c[0]](iterator, *c[1:]) - else: - self.map[d["name"]].apply_delta(d, iterator) - - def done(): - self.root.closure() - cb() - - if delta is None: - cb() - else: - rpki.async.iterator(delta, loop, done) - - def dump(self): - """ - Print content of the database. - """ - for a in self: - print a - -class allocation(object): - - parent = None - irdb_db_name = None - irdb_port = None - rpki_db_name = None - rpki_port = None - crl_interval = None - regen_margin = None - last_cms_time = None - rpkid_process = None - irdbd_process = None - - def __init__(self, yaml, db, parent = None): - """ - Initialize one entity and insert it into the database. - """ - db.append(self) - self.name = yaml["name"] - self.parent = parent - self.kids = [allocation(k, db, self) for k in yaml.get("kids", ())] - valid_until = None - if "valid_until" in yaml: - valid_until = rpki.sundial.datetime.from_datetime(yaml.get("valid_until")) - if valid_until is None and "valid_for" in yaml: - valid_until = rpki.sundial.now() + rpki.sundial.timedelta.parse(yaml["valid_for"]) - self.base = rpki.resource_set.resource_bag( - asn = rpki.resource_set.resource_set_as(yaml.get("asn")), - v4 = rpki.resource_set.resource_set_ipv4(yaml.get("ipv4")), - v6 = rpki.resource_set.resource_set_ipv6(yaml.get("ipv6")), - valid_until = valid_until) - self.sia_base = yaml.get("sia_base") - if "crl_interval" in yaml: - self.crl_interval = rpki.sundial.timedelta.parse(yaml["crl_interval"]).convert_to_seconds() - if "regen_margin" in yaml: - self.regen_margin = rpki.sundial.timedelta.parse(yaml["regen_margin"]).convert_to_seconds() - self.roa_requests = [roa_request.parse(y) for y in yaml.get("roa_request", yaml.get("route_origin", ()))] - for r in self.roa_requests: - if r.v4: - self.base.v4 |= r.v4.to_resource_set() - if r.v6: - self.base.v6 |= r.v6.to_resource_set() - self.router_certs = [router_cert.parse(y) for y in yaml.get("router_cert", ())] - for r in self.router_certs: - self.base.asn |= r.asn - self.hosted_by = yaml.get("hosted_by") - self.extra_conf = yaml.get("extra_conf", []) - self.hosts = [] - - def closure(self): - """ - Compute the transitive resource closure. - """ - resources = self.base - for kid in self.kids: - resources |= kid.closure() - self.resources = resources - return resources - - def apply_delta(self, yaml, cb): - """ - Apply deltas to this entity. - """ - - rpki.log.info("Applying delta: %s" % yaml) - - def loop(iterator, kv): - if kv[0] == "name": - iterator() - else: - getattr(self, "apply_" + kv[0])(kv[1], iterator) - - rpki.async.iterator(yaml.items(), loop, cb) - - def apply_add_as(self, text, cb): - self.base.asn |= rpki.resource_set.resource_set_as(text) - cb() - - def apply_add_v4(self, text, cb): - self.base.v4 |= rpki.resource_set.resource_set_ipv4(text) - cb() - - def apply_add_v6(self, text, cb): - self.base.v6 |= rpki.resource_set.resource_set_ipv6(text) - cb() - - def apply_sub_as(self, text, cb): - self.base.asn |= rpki.resource_set.resource_set_as(text) - cb() - - def apply_sub_v4(self, text, cb): - self.base.v4 |= rpki.resource_set.resource_set_ipv4(text) - cb() - - def apply_sub_v6(self, text, cb): - self.base.v6 |= rpki.resource_set.resource_set_ipv6(text) - cb() - - def apply_valid_until(self, stamp, cb): - self.base.valid_until = rpki.sundial.datetime.from_datetime(stamp) - cb() - - def apply_valid_for(self, text, cb): - self.base.valid_until = rpki.sundial.now() + rpki.sundial.timedelta.parse(text) - cb() - - def apply_valid_add(self, text, cb): - self.base.valid_until += rpki.sundial.timedelta.parse(text) - cb() - - def apply_valid_sub(self, text, cb): - self.base.valid_until -= rpki.sundial.timedelta.parse(text) - cb() - - def apply_roa_request_add(self, yaml, cb): - for y in yaml: - r = roa_request.parse(y) - if r not in self.roa_requests: - self.roa_requests.append(r) - cb() - - def apply_roa_request_del(self, yaml, cb): - for y in yaml: - r = roa_request.parse(y) - if r in self.roa_requests: - self.roa_requests.remove(r) - cb() - - def apply_router_cert_add(self, yaml, cb): - for y in yaml: - r = router_cert.parse(y) - if r not in self.router_certs: - self.router_certs.append(r) - cb() - - def apply_router_cert_del(self, yaml, cb): - for y in yaml: - r = router_cert.parse(y) - if r in self.router_certs: - self.router_certs.remove(r) - cb() - - def apply_rekey(self, target, cb): - - def done(e): - if isinstance(e, Exception): - rpki.log.traceback() - raise e - cb() - - if target is None: - rpki.log.info("Rekeying %s" % self.name) - self.call_rpkid([rpki.left_right.self_elt.make_pdu( - action = "set", self_handle = self.name, rekey = "yes")], cb = done) - else: - rpki.log.info("Rekeying %s %s" % (self.name, target)) - self.call_rpkid([rpki.left_right.parent_elt.make_pdu( - action = "set", self_handle = self.name, parent_handle = target, rekey = "yes")], cb = done) - - def apply_revoke(self, target, cb): - - def done(e): - if isinstance(e, Exception): - rpki.log.traceback() - raise e - cb() - - if target is None: - rpki.log.info("Revoking %s" % self.name) - self.call_rpkid([rpki.left_right.self_elt.make_pdu( - action = "set", self_handle = self.name, revoke = "yes")], cb = done) - else: - rpki.log.info("Revoking %s %s" % (self.name, target)) - self.call_rpkid([rpki.left_right.parent_elt.make_pdu( - action = "set", self_handle = self.name, parent_handle = target, revoke = "yes")], cb = done) - - def __str__(self): - s = self.name + "\n" - if self.resources.asn: s += " ASN: %s\n" % self.resources.asn - if self.resources.v4: s += " IPv4: %s\n" % self.resources.v4 - if self.resources.v6: s += " IPv6: %s\n" % self.resources.v6 - if self.kids: s += " Kids: %s\n" % ", ".join(k.name for k in self.kids) - if self.parent: s += " Up: %s\n" % self.parent.name - if self.sia_base: s += " SIA: %s\n" % self.sia_base - return s + "Until: %s\n" % self.resources.valid_until - - - @property - def is_root(self): - return self.parent is None - - @property - def is_twig(self): - return not self.is_root - - @property - def is_hosted(self): - return self.hosted_by is not None - - @property - def is_engine(self): - return not self.is_hosted - - def set_engine_number(self, n): - """ - Set the engine number for this entity. - """ - self.irdb_db_name = "irdb%d" % n - self.irdb_port = allocate_port() - self.rpki_db_name = "rpki%d" % n - self.rpki_port = allocate_port() - - def get_rpki_port(self): - """ - Get rpki port to use for this entity. - """ - if self.is_hosted: - assert self.hosted_by.rpki_port is not None - return self.hosted_by.rpki_port - else: - assert self.rpki_port is not None - return self.rpki_port - - def setup_bpki_certs(self): - """ - Create BPKI certificates for this entity. - """ - rpki.log.info("Constructing BPKI keys and certs for %s" % self.name) - setup_bpki_cert_chain(name = self.name, - ee = ("RPKI", "IRDB", "IRBE"), - ca = ("SELF",)) - self.rpkid_ta = rpki.x509.X509(PEM_file = self.name + "-TA.cer") - self.irbe_key = rpki.x509.RSA( PEM_file = self.name + "-IRBE.key") - self.irbe_cert = rpki.x509.X509(PEM_file = self.name + "-IRBE.cer") - self.rpkid_cert = rpki.x509.X509(PEM_file = self.name + "-RPKI.cer") - - def setup_conf_file(self): - """ - Write config files for this entity. - """ - rpki.log.info("Writing config files for %s" % self.name) - assert self.rpki_port is not None - d = { "my_name" : self.name, - "irdb_db_name" : self.irdb_db_name, - "irdb_db_pass" : irdb_db_pass, - "irdb_port" : self.irdb_port, - "rpki_db_name" : self.rpki_db_name, - "rpki_db_pass" : rpki_db_pass, - "rpki_port" : self.rpki_port } - f = open(self.name + ".conf", "w") - f.write(conf_fmt_1 % d) - for line in self.extra_conf: - f.write(line + "\n") - f.close() - - def setup_sql(self, rpki_sql, irdb_sql): - """ - Set up this entity's IRDB. - """ - rpki.log.info("Setting up MySQL for %s" % self.name) - db = MySQLdb.connect(user = "rpki", db = self.rpki_db_name, passwd = rpki_db_pass, - conv = sql_conversions) - cur = db.cursor() - db.autocommit(True) - for sql in rpki_sql: - try: - cur.execute(sql) - except Exception: - if "DROP TABLE IF EXISTS" not in sql.upper(): - raise - db.close() - db = MySQLdb.connect(user = "irdb", db = self.irdb_db_name, passwd = irdb_db_pass, - conv = sql_conversions) - cur = db.cursor() - db.autocommit(True) - for sql in irdb_sql: - try: - cur.execute(sql) - except Exception: - if "DROP TABLE IF EXISTS" not in sql.upper(): - raise - for s in [self] + self.hosts: - for kid in s.kids: - cur.execute("INSERT registrant (registrant_handle, registry_handle, valid_until) VALUES (%s, %s, %s)", - (kid.name, s.name, kid.resources.valid_until)) - db.close() - - def sync_sql(self): - """ - Whack this entity's IRDB to match our master database. We do this - once during setup, then do it again every time we apply a delta to - this entity. - """ - rpki.log.info("Updating MySQL data for IRDB %s" % self.name) - db = MySQLdb.connect(user = "irdb", db = self.irdb_db_name, passwd = irdb_db_pass, - conv = sql_conversions) - cur = db.cursor() - db.autocommit(True) - cur.execute("DELETE FROM registrant_asn") - cur.execute("DELETE FROM registrant_net") - cur.execute("DELETE FROM roa_request_prefix") - cur.execute("DELETE FROM roa_request") - cur.execute("DELETE FROM ee_certificate_asn") - cur.execute("DELETE FROM ee_certificate_net") - cur.execute("DELETE FROM ee_certificate") - - for s in [self] + self.hosts: - for kid in s.kids: - cur.execute("SELECT registrant_id FROM registrant WHERE registrant_handle = %s AND registry_handle = %s", - (kid.name, s.name)) - registrant_id = cur.fetchone()[0] - for as_range in kid.resources.asn: - cur.execute("INSERT registrant_asn (start_as, end_as, registrant_id) VALUES (%s, %s, %s)", - (as_range.min, as_range.max, registrant_id)) - for v4_range in kid.resources.v4: - cur.execute("INSERT registrant_net (start_ip, end_ip, version, registrant_id) VALUES (%s, %s, 4, %s)", - (v4_range.min, v4_range.max, registrant_id)) - for v6_range in kid.resources.v6: - cur.execute("INSERT registrant_net (start_ip, end_ip, version, registrant_id) VALUES (%s, %s, 6, %s)", - (v6_range.min, v6_range.max, registrant_id)) - cur.execute("UPDATE registrant SET valid_until = %s WHERE registrant_id = %s", - (kid.resources.valid_until, registrant_id)) - for r in s.roa_requests: - cur.execute("INSERT roa_request (self_handle, asn) VALUES (%s, %s)", - (s.name, r.asn)) - roa_request_id = cur.lastrowid - for version, prefix_set in ((4, r.v4), (6, r.v6)): - if prefix_set: - cur.executemany("INSERT roa_request_prefix " - "(roa_request_id, prefix, prefixlen, max_prefixlen, version) " - "VALUES (%s, %s, %s, %s, %s)", - ((roa_request_id, x.prefix, x.prefixlen, x.max_prefixlen, version) - for x in prefix_set)) - for r in s.router_certs: - cur.execute("INSERT ee_certificate (self_handle, pkcs10, gski, cn, sn, eku, valid_until) " - "VALUES (%s, %s, %s, %s, %s, %s, %s)", - (s.name, r.pkcs10.get_DER(), r.gski, r.cn, r.sn, r.eku, s.resources.valid_until)) - ee_certificate_id = cur.lastrowid - cur.executemany("INSERT ee_certificate_asn (ee_certificate_id, start_as, end_as) VALUES (%s, %s, %s)", - ((ee_certificate_id, a.min, a.max) for a in r.asn)) - db.close() - - def run_daemons(self): - """ - Run daemons for this entity. - """ - rpki.log.info("Running daemons for %s" % self.name) - self.rpkid_process = subprocess.Popen((prog_python, prog_rpkid, "-d", "-c", self.name + ".conf") + - (("-p", self.name + ".prof") if args.profile else ())) - self.irdbd_process = subprocess.Popen((prog_python, prog_irdbd, "-d", "-c", self.name + ".conf")) - - def kill_daemons(self): - """ - Kill daemons for this entity. - """ - # pylint: disable=E1103 - for proc, name in ((self.rpkid_process, "rpkid"), - (self.irdbd_process, "irdbd")): - if proc is not None and proc.poll() is None: - rpki.log.info("Killing daemon %s pid %s for %s" % (name, proc.pid, self.name)) - try: - proc.terminate() - except OSError: - pass - if proc is not None: - rpki.log.info("Daemon %s pid %s for %s exited with code %s" % ( - name, proc.pid, self.name, proc.wait())) - - def call_rpkid(self, pdus, cb): - """ - Send a left-right message to this entity's RPKI daemon and return - the response. - - If this entity is hosted (does not run its own RPKI daemon), all - of this happens with the hosting RPKI daemon. - """ - - rpki.log.info("Calling rpkid for %s" % self.name) - - if self.is_hosted: - rpki.log.info("rpkid %s is hosted by rpkid %s, switching" % (self.name, self.hosted_by.name)) - self = self.hosted_by - assert not self.is_hosted - - assert isinstance(pdus, (list, tuple)) - assert self.rpki_port is not None - - q_msg = rpki.left_right.msg.query(*pdus) - q_cms = rpki.left_right.cms_msg() - q_der = q_cms.wrap(q_msg, self.irbe_key, self.irbe_cert) - q_url = "http://localhost:%d/left-right" % self.rpki_port - - rpki.log.debug(q_cms.pretty_print_content()) - - def done(r_der): - rpki.log.info("Callback from rpkid %s" % self.name) - r_cms = rpki.left_right.cms_msg(DER = r_der) - r_msg = r_cms.unwrap((self.rpkid_ta, self.rpkid_cert)) - self.last_cms_time = r_cms.check_replay(self.last_cms_time, q_url) - rpki.log.debug(r_cms.pretty_print_content()) - assert r_msg.is_reply - for r_pdu in r_msg: - assert not isinstance(r_pdu, rpki.left_right.report_error_elt) - cb(r_msg) - - def lose(e): - raise - - rpki.http.client( - url = q_url, - msg = q_der, - callback = done, - errback = lose) - - def cross_certify(self, certificant, reverse = False): - """ - Cross-certify and return the resulting certificate. - """ - - if reverse: - certifier = certificant - certificant = self.name + "-SELF" - else: - certifier = self.name + "-SELF" - certfile = certifier + "-" + certificant + ".cer" - - rpki.log.info("Cross certifying %s into %s's BPKI (%s)" % (certificant, certifier, certfile)) - - child = rpki.x509.X509(Auto_file = certificant + ".cer") - parent = rpki.x509.X509(Auto_file = certifier + ".cer") - keypair = rpki.x509.RSA(Auto_file = certifier + ".key") - serial_file = certifier + ".srl" - - now = rpki.sundial.now() - notAfter = now + rpki.sundial.timedelta(days = 30) - - try: - f = open(serial_file, "r") - serial = f.read() - f.close() - serial = int(serial.splitlines()[0], 16) - except IOError: - serial = 1 - - x = parent.bpki_cross_certify( - keypair = keypair, - source_cert = child, - serial = serial, - notAfter = notAfter, - now = now) - - f = open(serial_file, "w") - f.write("%02x\n" % (serial + 1)) - f.close() - - f = open(certfile, "w") - f.write(x.get_PEM()) - f.close() - - rpki.log.debug("Cross certified %s:" % certfile) - rpki.log.debug(" Issuer %s [%s]" % (x.getIssuer(), x.hAKI())) - rpki.log.debug(" Subject %s [%s]" % (x.getSubject(), x.hSKI())) - return x - - def create_rpki_objects(self, cb): - """ - Create RPKI engine objects for this engine. - - Root node of the engine tree is special, it too has a parent but - that one is the magic self-signed micro engine. - - The rest of this is straightforward. There are a lot of objects - to create, but we can do batch them all into one honking PDU, then - issue one more PDU to set BSC EE certificates based on the PKCS - #10 requests we get back when we tell rpkid to generate BSC keys. - """ - - assert not self.is_hosted - - selves = [self] + self.hosts - - for i, s in enumerate(selves): - rpki.log.info("Creating RPKI objects for [%d] %s" % (i, s.name)) - - rpkid_pdus = [] - pubd_pdus = [] - - for s in selves: - - rpkid_pdus.append(rpki.left_right.self_elt.make_pdu( - action = "create", - self_handle = s.name, - crl_interval = s.crl_interval, - regen_margin = s.regen_margin, - bpki_cert = (s.cross_certify(s.hosted_by.name + "-TA", reverse = True) - if s.is_hosted else - rpki.x509.X509(Auto_file = s.name + "-SELF.cer")))) - - rpkid_pdus.append(rpki.left_right.bsc_elt.make_pdu( - action = "create", - self_handle = s.name, - bsc_handle = "b", - generate_keypair = True)) - - pubd_pdus.append(rpki.publication.client_elt.make_pdu( - action = "create", - client_handle = s.client_handle, - base_uri = s.sia_base, - bpki_cert = s.cross_certify(pubd_name + "-TA", reverse = True))) - - rpkid_pdus.append(rpki.left_right.repository_elt.make_pdu( - action = "create", - self_handle = s.name, - bsc_handle = "b", - repository_handle = "r", - bpki_cert = s.cross_certify(pubd_name + "-TA"), - peer_contact_uri = "http://localhost:%d/client/%s" % (pubd_port, s.client_handle))) - - for k in s.kids: - rpkid_pdus.append(rpki.left_right.child_elt.make_pdu( - action = "create", - self_handle = s.name, - child_handle = k.name, - bsc_handle = "b", - bpki_cert = s.cross_certify(k.name + "-SELF"))) - - if s.is_root: - rootd_cert = s.cross_certify(rootd_name + "-TA") - rpkid_pdus.append(rpki.left_right.parent_elt.make_pdu( - action = "create", - self_handle = s.name, - parent_handle = "rootd", - bsc_handle = "b", - repository_handle = "r", - sia_base = s.sia_base, - bpki_cms_cert = rootd_cert, - sender_name = s.name, - recipient_name = "rootd", - peer_contact_uri = "http://localhost:%s/" % rootd_port)) - else: - rpkid_pdus.append(rpki.left_right.parent_elt.make_pdu( - action = "create", - self_handle = s.name, - parent_handle = s.parent.name, - bsc_handle = "b", - repository_handle = "r", - sia_base = s.sia_base, - bpki_cms_cert = s.cross_certify(s.parent.name + "-SELF"), - sender_name = s.name, - recipient_name = s.parent.name, - peer_contact_uri = "http://localhost:%s/up-down/%s/%s" % (s.parent.get_rpki_port(), - s.parent.name, s.name))) - - def one(): - call_pubd(pubd_pdus, cb = two) - - def two(vals): - self.call_rpkid(rpkid_pdus, cb = three) - - def three(vals): - - bsc_dict = dict((b.self_handle, b) for b in vals if isinstance(b, rpki.left_right.bsc_elt)) - - bsc_pdus = [] - - for s in selves: - b = bsc_dict[s.name] - - rpki.log.info("Issuing BSC EE cert for %s" % s.name) - cmd = (prog_openssl, "x509", "-req", "-sha256", "-extfile", s.name + "-RPKI.conf", - "-extensions", "req_x509_ext", "-days", "30", - "-CA", s.name + "-SELF.cer", "-CAkey", s.name + "-SELF.key", "-CAcreateserial", "-text") - signer = subprocess.Popen(cmd, stdin = subprocess.PIPE, stdout = subprocess.PIPE, stderr = subprocess.PIPE) - signed = signer.communicate(input = b.pkcs10_request.get_PEM()) - if not signed[0]: - rpki.log.warn(signed[1]) - raise CouldntIssueBSCEECertificate, "Couldn't issue BSC EE certificate" - s.bsc_ee = rpki.x509.X509(PEM = signed[0]) - s.bsc_crl = rpki.x509.CRL(PEM_file = s.name + "-SELF.crl") - rpki.log.info("BSC EE cert for %s SKI %s" % (s.name, s.bsc_ee.hSKI())) - - bsc_pdus.append(rpki.left_right.bsc_elt.make_pdu( - action = "set", - self_handle = s.name, - bsc_handle = "b", - signing_cert = s.bsc_ee, - signing_cert_crl = s.bsc_crl)) - - self.call_rpkid(bsc_pdus, cb = four) - - def four(vals): - cb() - - one() - - def setup_yaml_leaf(self): - """ - Generate certificates and write YAML scripts for leaf nodes. - - We're cheating a bit here: properly speaking, we can't generate - issue or revoke requests without knowing the class, which is - generated on the fly, but at the moment the test case is - simplistic enough that the class will always be "1", so we just - wire in that value for now. - - Well, ok, we just broke that assumption. Now we do something even - nastier, just to eke a bit more life out of this kludge. This - really needs to be rewritten, but it may require a different tool - than testpoke. - """ - - if not os.path.exists(self.name + ".key"): - rpki.log.info("Generating RPKI key for %s" % self.name) - subprocess.check_call((prog_openssl, "genrsa", "-out", self.name + ".key", "2048" ), - stdout = subprocess.PIPE, stderr = subprocess.STDOUT) - ski = rpki.x509.RSA(PEM_file = self.name + ".key").gSKI() - - if self.parent.is_hosted: - parent_host = self.parent.hosted_by.name - else: - parent_host = self.parent.name - - self.cross_certify(self.parent.name + "-SELF") - self.cross_certify(parent_host + "-TA") - - rpki.log.info("Writing leaf YAML for %s" % self.name) - f = open(self.name + ".yaml", "w") - f.write(yaml_fmt_1 % { - "parent_name" : self.parent.name, - "parent_host" : parent_host, - "my_name" : self.name, - "http_port" : self.parent.get_rpki_port(), - "class_name" : 2 if self.parent.is_hosted else 1, - "sia" : self.sia_base, - "ski" : ski }) - f.close() - - def run_cron(self, cb): - """ - Trigger cron run for this engine. - """ - - rpki.log.info("Running cron for %s" % self.name) - - assert self.rpki_port is not None - - def done(result): - assert result == "OK", 'Expected "OK" result from cronjob, got %r' % result - cb() - - rpki.http.client( - url = "http://localhost:%d/cronjob" % self.rpki_port, - msg = "Run cron now, please", - callback = done, - errback = done) - - def run_yaml(self): - """ - Run YAML scripts for this leaf entity. Since we're not bothering - to check the class list returned by the list command, the issue - command may fail, so we treat failure of the list command as an - error, but only issue a warning when issue fails. - """ - - rpki.log.info("Running YAML for %s" % self.name) - subprocess.check_call((prog_python, prog_poke, "-y", self.name + ".yaml", "-r", "list")) - if subprocess.call((prog_python, prog_poke, "-y", self.name + ".yaml", "-r", "issue")) != 0: - rpki.log.warn("YAML issue command failed for %s, continuing" % self.name) - -def setup_bpki_cert_chain(name, ee = (), ca = ()): - """ - Build a set of BPKI certificates. - """ - s = "exec >/dev/null 2>&1\n" - #s = "set -x\n" - for kind in ("TA",) + ee + ca: - d = { "name" : name, - "kind" : kind, - "ca" : "false" if kind in ee else "true", - "openssl" : prog_openssl } - f = open("%(name)s-%(kind)s.conf" % d, "w") - f.write(bpki_cert_fmt_1 % d) - f.close() - if not os.path.exists("%(name)s-%(kind)s.key" % d): - s += bpki_cert_fmt_2 % d - s += bpki_cert_fmt_3 % d - d = { "name" : name, "openssl" : prog_openssl } - s += bpki_cert_fmt_4 % d - for kind in ee + ca: - d["kind"] = kind - s += bpki_cert_fmt_5 % d - for kind in ("TA",) + ca: - d["kind"] = kind - s += bpki_cert_fmt_6 % d - subprocess.check_call(s, shell = True) - -def setup_rootd(rpkid, rootd_yaml): - """ - Write the config files for rootd. - """ - rpkid.cross_certify(rootd_name + "-TA", reverse = True) - rpki.log.info("Writing config files for %s" % rootd_name) - d = { "rootd_name" : rootd_name, - "rootd_port" : rootd_port, - "rpkid_name" : rpkid.name, - "rootd_sia" : rootd_sia, - "rsyncd_dir" : rsyncd_dir, - "openssl" : prog_openssl, - "lifetime" : rootd_yaml.get("lifetime", "30d") } - f = open(rootd_name + ".conf", "w") - f.write(rootd_fmt_1 % d) - f.close() - s = "exec >/dev/null 2>&1\n" - #s = "set -x\n" - if not os.path.exists("root.key"): - s += rootd_fmt_2 % d - s += rootd_fmt_3 % d - subprocess.check_call(s, shell = True) - -def setup_rcynic(): - """ - Write the config file for rcynic. - """ - rpki.log.info("Config file for rcynic") - d = { "rcynic_name" : rcynic_name, - "rootd_name" : rootd_name, - "rootd_sia" : rootd_sia } - f = open(rcynic_name + ".conf", "w") - f.write(rcynic_fmt_1 % d) - f.close() - -def setup_rsyncd(): - """ - Write the config file for rsyncd. - """ - rpki.log.info("Config file for rsyncd") - d = { "rsyncd_name" : rsyncd_name, - "rsyncd_port" : rsyncd_port, - "rsyncd_module" : rsyncd_module, - "rsyncd_dir" : rsyncd_dir } - f = open(rsyncd_name + ".conf", "w") - f.write(rsyncd_fmt_1 % d) - f.close() - -def setup_publication(pubd_sql): - """ - Set up publication daemon. - """ - rpki.log.info("Configure publication daemon") - publication_dir = os.getcwd() + "/publication" - assert rootd_sia.startswith("rsync://") - global rsyncd_dir - rsyncd_dir = publication_dir + "/".join(rootd_sia.split("/")[4:]) - if not rsyncd_dir.endswith("/"): - rsyncd_dir += "/" - os.makedirs(rsyncd_dir + "root/trunk") - db = MySQLdb.connect(db = pubd_db_name, user = pubd_db_user, passwd = pubd_db_pass, - conv = sql_conversions) - cur = db.cursor() - db.autocommit(True) - for sql in pubd_sql: - try: - cur.execute(sql) - except Exception: - if "DROP TABLE IF EXISTS" not in sql.upper(): - raise - db.close() - d = { "pubd_name" : pubd_name, - "pubd_port" : pubd_port, - "pubd_db_name" : pubd_db_name, - "pubd_db_user" : pubd_db_user, - "pubd_db_pass" : pubd_db_pass, - "pubd_dir" : rsyncd_dir } - f = open(pubd_name + ".conf", "w") - f.write(pubd_fmt_1 % d) - f.close() - global pubd_ta - global pubd_irbe_key - global pubd_irbe_cert - global pubd_pubd_cert - pubd_ta = rpki.x509.X509(Auto_file = pubd_name + "-TA.cer") - pubd_irbe_key = rpki.x509.RSA( Auto_file = pubd_name + "-IRBE.key") - pubd_irbe_cert = rpki.x509.X509(Auto_file = pubd_name + "-IRBE.cer") - pubd_pubd_cert = rpki.x509.X509(Auto_file = pubd_name + "-PUBD.cer") - -def call_pubd(pdus, cb): - """ - Send a publication message to publication daemon and return the - response. - """ - rpki.log.info("Calling pubd") - q_msg = rpki.publication.msg.query(*pdus) - q_cms = rpki.publication.cms_msg() - q_der = q_cms.wrap(q_msg, pubd_irbe_key, pubd_irbe_cert) - q_url = "http://localhost:%d/control" % pubd_port - - rpki.log.debug(q_cms.pretty_print_content()) - - def call_pubd_cb(r_der): - global pubd_last_cms_time - r_cms = rpki.publication.cms_msg(DER = r_der) - r_msg = r_cms.unwrap((pubd_ta, pubd_pubd_cert)) - pubd_last_cms_time = r_cms.check_replay(pubd_last_cms_time, q_url) - rpki.log.debug(r_cms.pretty_print_content()) - assert r_msg.is_reply - for r_pdu in r_msg: - assert not isinstance(r_pdu, rpki.publication.report_error_elt) - cb(r_msg) - - def call_pubd_eb(e): - rpki.log.warn("Problem calling pubd: %s" % e) - rpki.log.traceback() - - rpki.http.client( - url = q_url, - msg = q_der, - callback = call_pubd_cb, - errback = call_pubd_eb) - -def set_pubd_crl(cb): - """ - Whack publication daemon's bpki_crl. This must be configured before - publication daemon starts talking to its clients, and must be - updated whenever we update the CRL. - """ - rpki.log.info("Setting pubd's BPKI CRL") - crl = rpki.x509.CRL(Auto_file = pubd_name + "-TA.crl") - call_pubd([rpki.publication.config_elt.make_pdu(action = "set", bpki_crl = crl)], cb = lambda ignored: cb()) - -last_rcynic_run = None - -def run_rcynic(): - """ - Run rcynic to see whether what was published makes sense. - """ - rpki.log.info("Running rcynic") - env = os.environ.copy() - env["TZ"] = "" - global last_rcynic_run - if int(time.time()) == last_rcynic_run: - time.sleep(1) - subprocess.check_call((prog_rcynic, "-c", rcynic_name + ".conf"), env = env) - subprocess.call(rcynic_stats, shell = True, env = env) - last_rcynic_run = int(time.time()) - os.link("%s.xml" % rcynic_name, "%s.%s.xml" % (rcynic_name, last_rcynic_run)) - -def mangle_sql(filename): - """ - Mangle an SQL file into a sequence of SQL statements. - """ - words = [] - f = open(filename) - for line in f: - words.extend(line.partition("--")[0].split()) - f.close() - return " ".join(words).strip(";").split(";") - -bpki_cert_fmt_1 = '''\ -[req] -distinguished_name = req_dn -x509_extensions = req_x509_ext -prompt = no -default_md = sha256 - -[req_dn] -CN = Test Certificate %(name)s %(kind)s - -[req_x509_ext] -basicConstraints = critical,CA:%(ca)s -subjectKeyIdentifier = hash -authorityKeyIdentifier = keyid:always - - -[ca] -default_ca = ca_default - -[ca_default] - -certificate = %(name)s-%(kind)s.cer -serial = %(name)s-%(kind)s.srl -private_key = %(name)s-%(kind)s.key -database = %(name)s-%(kind)s.idx -crlnumber = %(name)s-%(kind)s.cnm -default_crl_days = 30 -default_md = sha256 -''' - -bpki_cert_fmt_2 = '''\ -%(openssl)s genrsa -out %(name)s-%(kind)s.key 2048 && -''' - -bpki_cert_fmt_3 = '''\ -%(openssl)s req -new \ - -sha256 \ - -key %(name)s-%(kind)s.key \ - -out %(name)s-%(kind)s.req \ - -config %(name)s-%(kind)s.conf && -touch %(name)s-%(kind)s.idx && -echo >%(name)s-%(kind)s.cnm 01 && -''' - -bpki_cert_fmt_4 = '''\ -%(openssl)s x509 -req -sha256 \ - -in %(name)s-TA.req \ - -out %(name)s-TA.cer \ - -extfile %(name)s-TA.conf \ - -extensions req_x509_ext \ - -signkey %(name)s-TA.key \ - -days 60 -text \ -''' - -bpki_cert_fmt_5 = ''' && \ -%(openssl)s x509 -req \ - -sha256 \ - -in %(name)s-%(kind)s.req \ - -out %(name)s-%(kind)s.cer \ - -extfile %(name)s-%(kind)s.conf \ - -extensions req_x509_ext \ - -days 30 \ - -text \ - -CA %(name)s-TA.cer \ - -CAkey %(name)s-TA.key \ - -CAcreateserial \ -''' - -bpki_cert_fmt_6 = ''' && \ -%(openssl)s ca -batch \ - -gencrl \ - -out %(name)s-%(kind)s.crl \ - -config %(name)s-%(kind)s.conf \ -''' - -yaml_fmt_1 = '''--- -version: 1 -posturl: http://localhost:%(http_port)s/up-down/%(parent_name)s/%(my_name)s -recipient-id: "%(parent_name)s" -sender-id: "%(my_name)s" - -cms-cert-file: %(my_name)s-RPKI.cer -cms-key-file: %(my_name)s-RPKI.key -cms-ca-cert-file: %(my_name)s-TA.cer -cms-crl-file: %(my_name)s-TA.crl -cms-ca-certs-file: - - %(my_name)s-TA-%(parent_name)s-SELF.cer - -ssl-cert-file: %(my_name)s-RPKI.cer -ssl-key-file: %(my_name)s-RPKI.key -ssl-ca-cert-file: %(my_name)s-TA.cer -ssl-ca-certs-file: - - %(my_name)s-TA-%(parent_host)s-TA.cer - -# We're cheating here by hardwiring the class name - -requests: - list: - type: list - issue: - type: issue - class: %(class_name)s - sia: - - %(sia)s - cert-request-key-file: %(my_name)s.key - revoke: - type: revoke - class: %(class_name)s - ski: %(ski)s -''' - -conf_fmt_1 = '''\ - -[irdbd] - -startup-message = This is %(my_name)s irdbd - -sql-database = %(irdb_db_name)s -sql-username = irdb -sql-password = %(irdb_db_pass)s -bpki-ta = %(my_name)s-TA.cer -rpkid-cert = %(my_name)s-RPKI.cer -irdbd-cert = %(my_name)s-IRDB.cer -irdbd-key = %(my_name)s-IRDB.key -http-url = http://localhost:%(irdb_port)d/ -enable_tracebacks = yes - -[irbe_cli] - -rpkid-bpki-ta = %(my_name)s-TA.cer -rpkid-cert = %(my_name)s-RPKI.cer -rpkid-irbe-cert = %(my_name)s-IRBE.cer -rpkid-irbe-key = %(my_name)s-IRBE.key -rpkid-url = http://localhost:%(rpki_port)d/left-right -enable_tracebacks = yes - -[rpkid] - -startup-message = This is %(my_name)s rpkid - -sql-database = %(rpki_db_name)s -sql-username = rpki -sql-password = %(rpki_db_pass)s - -bpki-ta = %(my_name)s-TA.cer -rpkid-key = %(my_name)s-RPKI.key -rpkid-cert = %(my_name)s-RPKI.cer -irdb-cert = %(my_name)s-IRDB.cer -irbe-cert = %(my_name)s-IRBE.cer - -irdb-url = http://localhost:%(irdb_port)d/ - -server-host = localhost -server-port = %(rpki_port)d - -use-internal-cron = false -enable_tracebacks = yes -''' - -rootd_fmt_1 = '''\ - -[rootd] - -bpki-ta = %(rootd_name)s-TA.cer -rootd-bpki-cert = %(rootd_name)s-RPKI.cer -rootd-bpki-key = %(rootd_name)s-RPKI.key -rootd-bpki-crl = %(rootd_name)s-TA.crl -child-bpki-cert = %(rootd_name)s-TA-%(rpkid_name)s-SELF.cer - -server-port = %(rootd_port)s - -rpki-root-dir = %(rsyncd_dir)sroot -rpki-base-uri = %(rootd_sia)sroot/ -rpki-root-cert-uri = %(rootd_sia)sroot.cer - -rpki-root-key = root.key -rpki-root-cert = root.cer - -rpki-subject-pkcs10 = %(rootd_name)s.subject.pkcs10 -rpki-subject-lifetime = %(lifetime)s - -rpki-root-crl = root.crl -rpki-root-manifest = root.mft - -rpki-class-name = trunk -rpki-subject-cert = trunk.cer - -include-bpki-crl = yes -enable_tracebacks = yes - -[req] -default_bits = 2048 -encrypt_key = no -distinguished_name = req_dn -prompt = no -default_md = sha256 -default_days = 60 - -[req_dn] -CN = Completely Bogus Test Root (NOT FOR PRODUCTION USE) - -[req_x509_ext] -basicConstraints = critical,CA:true -subjectKeyIdentifier = hash -authorityKeyIdentifier = keyid:always - -[req_x509_rpki_ext] -basicConstraints = critical,CA:true -subjectKeyIdentifier = hash -keyUsage = critical,keyCertSign,cRLSign -subjectInfoAccess = @sia -sbgp-autonomousSysNum = critical,AS:0-4294967295 -sbgp-ipAddrBlock = critical,IPv4:0.0.0.0/0,IPv6:0::/0 -certificatePolicies = critical, @rpki_certificate_policy - -[sia] - -1.3.6.1.5.5.7.48.5;URI = %(rootd_sia)sroot/ -1.3.6.1.5.5.7.48.10;URI = %(rootd_sia)sroot/root.mft - -[rpki_certificate_policy] - -policyIdentifier = 1.3.6.1.5.5.7.14.2 -''' - -rootd_fmt_2 = '''\ -%(openssl)s genrsa -out root.key 2048 && -''' - -rootd_fmt_3 = '''\ -echo >%(rootd_name)s.tal %(rootd_sia)sroot.cer && -echo >>%(rootd_name)s.tal && -%(openssl)s rsa -pubout -in root.key | -awk '!/-----(BEGIN|END)/' >>%(rootd_name)s.tal && -%(openssl)s req -new -text -sha256 \ - -key root.key \ - -out %(rootd_name)s.req \ - -config %(rootd_name)s.conf \ - -extensions req_x509_rpki_ext && -%(openssl)s x509 -req -sha256 \ - -in %(rootd_name)s.req \ - -out root.cer \ - -outform DER \ - -extfile %(rootd_name)s.conf \ - -extensions req_x509_rpki_ext \ - -signkey root.key && -ln -f root.cer %(rsyncd_dir)s -''' - -rcynic_fmt_1 = '''\ -[rcynic] -xml-summary = %(rcynic_name)s.xml -jitter = 0 -use-links = yes -use-syslog = no -use-stderr = yes -log-level = log_debug -trust-anchor-locator = %(rootd_name)s.tal -''' - -rsyncd_fmt_1 = '''\ -port = %(rsyncd_port)d -address = localhost - -[%(rsyncd_module)s] -read only = yes -transfer logging = yes -use chroot = no -path = %(rsyncd_dir)s -comment = RPKI test -''' - -pubd_fmt_1 = '''\ -[pubd] - -sql-database = %(pubd_db_name)s -sql-username = %(pubd_db_user)s -sql-password = %(pubd_db_pass)s -bpki-ta = %(pubd_name)s-TA.cer -pubd-cert = %(pubd_name)s-PUBD.cer -pubd-key = %(pubd_name)s-PUBD.key -irbe-cert = %(pubd_name)s-IRBE.cer -server-host = localhost -server-port = %(pubd_port)d -publication-base = %(pubd_dir)s -enable_tracebacks = yes -''' - -main() diff --git a/rpkid/tests/smoketest.setup.sql b/rpkid/tests/smoketest.setup.sql deleted file mode 100644 index 326988f1..00000000 --- a/rpkid/tests/smoketest.setup.sql +++ /dev/null @@ -1,112 +0,0 @@ --- $Id$ --- --- Run this manually under the MySQL CLI to set up databases for testdb.py. --- testdb.py doesn't do this automatically because it requires privileges --- that smoketest.py doesn't (or at least shouldn't) have. - --- Copyright (C) 2009 Internet Systems Consortium ("ISC") --- --- Permission to use, copy, modify, and distribute this software for any --- purpose with or without fee is hereby granted, provided that the above --- copyright notice and this permission notice appear in all copies. --- --- THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH --- REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY --- AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, --- INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM --- LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE --- OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR --- PERFORMANCE OF THIS SOFTWARE. --- --- Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") --- --- Permission to use, copy, modify, and distribute this software for any --- purpose with or without fee is hereby granted, provided that the above --- copyright notice and this permission notice appear in all copies. --- --- THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH --- REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY --- AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, --- INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM --- LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE --- OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR --- PERFORMANCE OF THIS SOFTWARE. - - -CREATE DATABASE irdb0; -CREATE DATABASE irdb1; -CREATE DATABASE irdb2; -CREATE DATABASE irdb3; -CREATE DATABASE irdb4; -CREATE DATABASE irdb5; -CREATE DATABASE irdb6; -CREATE DATABASE irdb7; -CREATE DATABASE irdb8; -CREATE DATABASE irdb9; -CREATE DATABASE irdb10; -CREATE DATABASE irdb11; - -CREATE DATABASE rpki0; -CREATE DATABASE rpki1; -CREATE DATABASE rpki2; -CREATE DATABASE rpki3; -CREATE DATABASE rpki4; -CREATE DATABASE rpki5; -CREATE DATABASE rpki6; -CREATE DATABASE rpki7; -CREATE DATABASE rpki8; -CREATE DATABASE rpki9; -CREATE DATABASE rpki10; -CREATE DATABASE rpki11; - -CREATE DATABASE pubd0; -CREATE DATABASE pubd1; -CREATE DATABASE pubd2; -CREATE DATABASE pubd3; -CREATE DATABASE pubd4; -CREATE DATABASE pubd5; -CREATE DATABASE pubd6; -CREATE DATABASE pubd7; -CREATE DATABASE pubd8; -CREATE DATABASE pubd9; -CREATE DATABASE pubd10; -CREATE DATABASE pubd11; - -GRANT ALL ON irdb0.* TO irdb@localhost IDENTIFIED BY 'fnord'; -GRANT ALL ON irdb1.* TO irdb@localhost IDENTIFIED BY 'fnord'; -GRANT ALL ON irdb2.* TO irdb@localhost IDENTIFIED BY 'fnord'; -GRANT ALL ON irdb3.* TO irdb@localhost IDENTIFIED BY 'fnord'; -GRANT ALL ON irdb4.* TO irdb@localhost IDENTIFIED BY 'fnord'; -GRANT ALL ON irdb5.* TO irdb@localhost IDENTIFIED BY 'fnord'; -GRANT ALL ON irdb6.* TO irdb@localhost IDENTIFIED BY 'fnord'; -GRANT ALL ON irdb7.* TO irdb@localhost IDENTIFIED BY 'fnord'; -GRANT ALL ON irdb8.* TO irdb@localhost IDENTIFIED BY 'fnord'; -GRANT ALL ON irdb9.* TO irdb@localhost IDENTIFIED BY 'fnord'; -GRANT ALL ON irdb10.* TO irdb@localhost IDENTIFIED BY 'fnord'; -GRANT ALL ON irdb11.* TO irdb@localhost IDENTIFIED BY 'fnord'; - -GRANT ALL ON rpki0.* TO rpki@localhost IDENTIFIED BY 'fnord'; -GRANT ALL ON rpki1.* TO rpki@localhost IDENTIFIED BY 'fnord'; -GRANT ALL ON rpki2.* TO rpki@localhost IDENTIFIED BY 'fnord'; -GRANT ALL ON rpki3.* TO rpki@localhost IDENTIFIED BY 'fnord'; -GRANT ALL ON rpki4.* TO rpki@localhost IDENTIFIED BY 'fnord'; -GRANT ALL ON rpki5.* TO rpki@localhost IDENTIFIED BY 'fnord'; -GRANT ALL ON rpki6.* TO rpki@localhost IDENTIFIED BY 'fnord'; -GRANT ALL ON rpki7.* TO rpki@localhost IDENTIFIED BY 'fnord'; -GRANT ALL ON rpki8.* TO rpki@localhost IDENTIFIED BY 'fnord'; -GRANT ALL ON rpki9.* TO rpki@localhost IDENTIFIED BY 'fnord'; -GRANT ALL ON rpki10.* TO rpki@localhost IDENTIFIED BY 'fnord'; -GRANT ALL ON rpki11.* TO rpki@localhost IDENTIFIED BY 'fnord'; - -GRANT ALL ON pubd0.* TO pubd@localhost IDENTIFIED BY 'fnord'; -GRANT ALL ON pubd1.* TO pubd@localhost IDENTIFIED BY 'fnord'; -GRANT ALL ON pubd2.* TO pubd@localhost IDENTIFIED BY 'fnord'; -GRANT ALL ON pubd3.* TO pubd@localhost IDENTIFIED BY 'fnord'; -GRANT ALL ON pubd4.* TO pubd@localhost IDENTIFIED BY 'fnord'; -GRANT ALL ON pubd5.* TO pubd@localhost IDENTIFIED BY 'fnord'; -GRANT ALL ON pubd6.* TO pubd@localhost IDENTIFIED BY 'fnord'; -GRANT ALL ON pubd7.* TO pubd@localhost IDENTIFIED BY 'fnord'; -GRANT ALL ON pubd8.* TO pubd@localhost IDENTIFIED BY 'fnord'; -GRANT ALL ON pubd9.* TO pubd@localhost IDENTIFIED BY 'fnord'; -GRANT ALL ON pubd10.* TO pubd@localhost IDENTIFIED BY 'fnord'; -GRANT ALL ON pubd11.* TO pubd@localhost IDENTIFIED BY 'fnord'; diff --git a/rpkid/tests/split-protocol-samples.xsl b/rpkid/tests/split-protocol-samples.xsl deleted file mode 100644 index 8800b6da..00000000 --- a/rpkid/tests/split-protocol-samples.xsl +++ /dev/null @@ -1,40 +0,0 @@ - - - - - . - - - - - - - - - Writing - - - Automatically generated, do not edit. - - - - - diff --git a/rpkid/tests/sql-cleaner.py b/rpkid/tests/sql-cleaner.py deleted file mode 100644 index c5b25ac2..00000000 --- a/rpkid/tests/sql-cleaner.py +++ /dev/null @@ -1,61 +0,0 @@ -# $Id$ -# -# Copyright (C) 2009--2012 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -""" -(Re)Initialize SQL tables used by these programs. -""" - -import rpki.config -import rpki.sql_schemas -from rpki.mysql_import import MySQLdb - -cfg = rpki.config.parser(None, "yamltest", allow_missing = True) - -for name in ("rpkid", "irdbd", "pubd"): - - username = cfg.get("%s_sql_username" % name, name[:4]) - password = cfg.get("%s_sql_password" % name, "fnord") - - schema = [] - for line in getattr(rpki.sql_schemas, name, "").splitlines(): - schema.extend(line.partition("--")[0].split()) - schema = " ".join(schema).strip(";").split(";") - schema = [statement.strip() for statement in schema if statement and "DROP TABLE" not in statement] - - db = MySQLdb.connect(user = username, passwd = password) - cur = db.cursor() - - cur.execute("SHOW DATABASES") - - databases = [r[0] for r in cur.fetchall() if r[0][:4] == name[:4] and r[0][4:].isdigit()] - - for database in databases: - - cur.execute("USE " + database) - - cur.execute("SHOW TABLES") - tables = [r[0] for r in cur.fetchall()] - - cur.execute("SET foreign_key_checks = 0") - for table in tables: - cur.execute("DROP TABLE %s" % table) - cur.execute("SET foreign_key_checks = 1") - - for statement in schema: - cur.execute(statement) - - cur.close() - db.close() diff --git a/rpkid/tests/sql-dumper.py b/rpkid/tests/sql-dumper.py deleted file mode 100644 index f4a7681d..00000000 --- a/rpkid/tests/sql-dumper.py +++ /dev/null @@ -1,43 +0,0 @@ -# $Id$ -# -# Copyright (C) 2009--2012 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -""" -Dump backup copies of SQL tables used by these programs. -""" - -import subprocess -import rpki.config -from rpki.mysql_import import MySQLdb - -cfg = rpki.config.parser(None, "yamltest", allow_missing = True) - -for name in ("rpkid", "irdbd", "pubd"): - - username = cfg.get("%s_sql_username" % name, name[:4]) - password = cfg.get("%s_sql_password" % name, "fnord") - - cmd = ["mysqldump", "-u", username, "-p" + password, "--databases"] - - db = MySQLdb.connect(user = username, passwd = password) - cur = db.cursor() - - cur.execute("SHOW DATABASES") - cmd.extend(r[0] for r in cur.fetchall() if r[0][:4] == name[:4] and r[0][4:].isdigit()) - - cur.close() - db.close() - - subprocess.check_call(cmd, stdout = open("backup.%s.sql" % name, "w")) diff --git a/rpkid/tests/testpoke.py b/rpkid/tests/testpoke.py deleted file mode 100644 index fd5ab206..00000000 --- a/rpkid/tests/testpoke.py +++ /dev/null @@ -1,152 +0,0 @@ -# $Id$ -# -# Copyright (C) 2014 Dragon Research Labs ("DRL") -# Portions copyright (C) 2009--2012 Internet Systems Consortium ("ISC") -# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notices and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND ARIN DISCLAIM ALL -# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED -# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL, -# ISC, OR ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR -# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS -# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, -# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION -# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -""" -Trivial RPKI up-down protocol client, for testing. - -Configuration file is YAML to be compatable with APNIC rpki_poke.pl tool. -""" - -import os -import time -import argparse -import sys -import yaml -import rpki.resource_set -import rpki.up_down -import rpki.left_right -import rpki.x509 -import rpki.http -import rpki.config -import rpki.exceptions -import rpki.relaxng -import rpki.log -import rpki.async - -os.environ["TZ"] = "UTC" -time.tzset() - -parser = argparse.ArgumentParser(description = __doc__) -parser.add_argument("-y", "--yaml", required = True, type = argparse.FileType("r"), - help = "configuration file") -parser.add_argument("-r", "--request", - help = "request name") -parser.add_argument("-d", "--debug", - help = "enable debugging") -args = parser.parse_args() - -rpki.log.init("testpoke") - -if args.debug: - rpki.log.set_trace(True) - -yaml_data = yaml.load(args.yaml) - -yaml_cmd = args.request - -if yaml_cmd is None and len(yaml_data["requests"]) == 1: - yaml_cmd = yaml_data["requests"].keys()[0] - -yaml_req = yaml_data["requests"][yaml_cmd] - -def get_PEM(name, cls, y = yaml_data): - if name in y: - return cls(PEM = y[name]) - if name + "-file" in y: - return cls(PEM_file = y[name + "-file"]) - return None - -def get_PEM_chain(name, cert = None): - chain = [] - if cert is not None: - chain.append(cert) - if name in yaml_data: - chain.extend([rpki.x509.X509(PEM = x) for x in yaml_data[name]]) - elif name + "-file" in yaml_data: - chain.extend([rpki.x509.X509(PEM_file = x) for x in yaml_data[name + "-file"]]) - return chain - -def query_up_down(q_pdu): - q_msg = rpki.up_down.message_pdu.make_query( - payload = q_pdu, - sender = yaml_data["sender-id"], - recipient = yaml_data["recipient-id"]) - q_der = rpki.up_down.cms_msg().wrap(q_msg, cms_key, cms_certs, cms_crl) - - def done(r_der): - global last_cms_timestamp - r_cms = rpki.up_down.cms_msg(DER = r_der) - r_msg = r_cms.unwrap([cms_ta] + cms_ca_certs) - last_cms_timestamp = r_cms.check_replay(last_cms_timestamp) - print r_cms.pretty_print_content() - try: - r_msg.payload.check_response() - except (rpki.async.ExitNow, SystemExit): - raise - except Exception, e: - fail(e) - - rpki.http.want_persistent_client = False - - rpki.http.client( - msg = q_der, - url = yaml_data["posturl"], - callback = done, - errback = fail) - -def do_list(): - query_up_down(rpki.up_down.list_pdu()) - -def do_issue(): - q_pdu = rpki.up_down.issue_pdu() - req_key = get_PEM("cert-request-key", rpki.x509.RSA, yaml_req) or cms_key - q_pdu.class_name = yaml_req["class"] - q_pdu.pkcs10 = rpki.x509.PKCS10.create( - keypair = req_key, - is_ca = True, - caRepository = yaml_req["sia"][0], - rpkiManifest = yaml_req["sia"][0] + req_key.gSKI() + ".mft") - query_up_down(q_pdu) - -def do_revoke(): - q_pdu = rpki.up_down.revoke_pdu() - q_pdu.class_name = yaml_req["class"] - q_pdu.ski = yaml_req["ski"] - query_up_down(q_pdu) - -dispatch = { "list" : do_list, "issue" : do_issue, "revoke" : do_revoke } - -def fail(e): # pylint: disable=W0621 - rpki.log.traceback(args.debug) - sys.exit("Testpoke failed: %s" % e) - -cms_ta = get_PEM("cms-ca-cert", rpki.x509.X509) -cms_cert = get_PEM("cms-cert", rpki.x509.X509) -cms_key = get_PEM("cms-key", rpki.x509.RSA) -cms_crl = get_PEM("cms-crl", rpki.x509.CRL) -cms_certs = get_PEM_chain("cms-cert-chain", cms_cert) -cms_ca_certs = get_PEM_chain("cms-ca-certs") - -last_cms_timestamp = None - -try: - dispatch[yaml_req["type"]]() - rpki.async.event_loop() -except Exception, e: - fail(e) diff --git a/rpkid/tests/testpoke.xsl b/rpkid/tests/testpoke.xsl deleted file mode 100644 index 91658b0b..00000000 --- a/rpkid/tests/testpoke.xsl +++ /dev/null @@ -1,78 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -----BEGIN CERTIFICATE----- - - - -----END CERTIFICATE----- - - - - diff --git a/rpkid/tests/testpoke.yaml b/rpkid/tests/testpoke.yaml deleted file mode 100644 index f2b2c618..00000000 --- a/rpkid/tests/testpoke.yaml +++ /dev/null @@ -1,24 +0,0 @@ ---- -# $Id$ - -version: 1 -posturl: http://localhost:4433/up-down/1 -recipient-id: wombat -sender-id: "1" - -cms-cert-file: biz-certs/Frank-EE.cer -cms-key-file: biz-certs/Frank-EE.key -cms-ca-cert-file: biz-certs/Bob-Root.cer -cms-cert-chain-file: [ biz-certs/Frank-CA.cer ] - -requests: - list: - type: list - issue: - type: issue - class: 1 - sia: [ "rsync://bandicoot.invalid/some/where/" ] - revoke: - type: revoke - class: 1 - ski: "CB5K6APY-4KcGAW9jaK_cVPXKX0" diff --git a/rpkid/tests/up-down-protocol-samples/Makefile b/rpkid/tests/up-down-protocol-samples/Makefile deleted file mode 100644 index 10ee791a..00000000 --- a/rpkid/tests/up-down-protocol-samples/Makefile +++ /dev/null @@ -1,11 +0,0 @@ -XMLLINT = xmllint --noout --relaxng -JING = java -jar /usr/local/share/java/classes/jing.jar -SCHEMA = ../up-down-medium-schema.rng - -all: jing xmllint - -jing: - ${JING} ${SCHEMA} *.xml - -xmllint: - ${XMLLINT} ${SCHEMA} *.xml diff --git a/rpkid/tests/up-down-protocol-samples/error_response.xml b/rpkid/tests/up-down-protocol-samples/error_response.xml deleted file mode 100644 index 83af6649..00000000 --- a/rpkid/tests/up-down-protocol-samples/error_response.xml +++ /dev/null @@ -1,9 +0,0 @@ - - - 2001 - [Readable text] - diff --git a/rpkid/tests/up-down-protocol-samples/issue1.xml b/rpkid/tests/up-down-protocol-samples/issue1.xml deleted file mode 100644 index 4b8366f9..00000000 --- a/rpkid/tests/up-down-protocol-samples/issue1.xml +++ /dev/null @@ -1,25 +0,0 @@ - - - - MIICYTCCAUkCAQAwHDEaMBgGA1UEAxMRVEVTVCBFTlRJVFkgSVNQNWMwggEiMA0G - CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDIi6ElZd/uon9Ur1IKGhr6DXWzPOng - KdOJIOlRSWcsQ9qgLNREs5YUqQd3YLlvAe+OVKV0rFpn+DBNEPmsn7h1YQv253zq - m1yYeks+xOJZQtMZyg9YDrfIgk7lu6z9kuWIsvxkz244OxiD/OemrvuQNtDhyk2Q - QQ8POyrADNl7fehQE/YJc4Kj0uO7ggiHf9K7Dg56KLYlArXZUfwzMkdH/89/vO4A - AbsFXi4Dmq2VO8rCxodkdDmqWWuu4KdRGgfyjkyOZS/f8pm64LaKT8AgcnmYAI8N - UBM90T6Mvdx0qTOoVh0xeHznAp6NChQSbdM3x3rwhBD+/k0olyZuCIWhAgMBAAGg - ADANBgkqhkiG9w0BAQUFAAOCAQEAj9bYIVfREySBzUhQSlbNi9kfdXgivC/4A7pn - b4sMm081S05u0QLhyh1XNF/L3/U5yVElVHE8xobM/CuAkXpy7N5GSYj2T28Fmn77 - 1y/xdGg6Jp26OkbrqY3gjQAaMigYg9/6tPAc9fgLiQAJLUUYb2hRqaqu4Ze8RrxU - RsnVpAHWYDFWJhNqEp8eErzAVLqxpmoYJKgmpK6TKyYKuf8+xf3Rlkb4+iu2FotR - DQrmcd6jmMjp9xLejDEuoPgcfpVP2CB1jUCAIW7yE7+a7vj9Mop1gs61zP8y/p2V - rVnXgEy93WZLjQt1D29oKhlcFGtCG4nqIBCDAWVuz/LGACB85w== - - diff --git a/rpkid/tests/up-down-protocol-samples/issue2.xml b/rpkid/tests/up-down-protocol-samples/issue2.xml deleted file mode 100644 index a991cbcd..00000000 --- a/rpkid/tests/up-down-protocol-samples/issue2.xml +++ /dev/null @@ -1,24 +0,0 @@ - - - - MIICYTCCAUkCAQAwHDEaMBgGA1UEAxMRVEVTVCBFTlRJVFkgSVNQNWMwggEiMA0G - CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDIi6ElZd/uon9Ur1IKGhr6DXWzPOng - KdOJIOlRSWcsQ9qgLNREs5YUqQd3YLlvAe+OVKV0rFpn+DBNEPmsn7h1YQv253zq - m1yYeks+xOJZQtMZyg9YDrfIgk7lu6z9kuWIsvxkz244OxiD/OemrvuQNtDhyk2Q - QQ8POyrADNl7fehQE/YJc4Kj0uO7ggiHf9K7Dg56KLYlArXZUfwzMkdH/89/vO4A - AbsFXi4Dmq2VO8rCxodkdDmqWWuu4KdRGgfyjkyOZS/f8pm64LaKT8AgcnmYAI8N - UBM90T6Mvdx0qTOoVh0xeHznAp6NChQSbdM3x3rwhBD+/k0olyZuCIWhAgMBAAGg - ADANBgkqhkiG9w0BAQUFAAOCAQEAj9bYIVfREySBzUhQSlbNi9kfdXgivC/4A7pn - b4sMm081S05u0QLhyh1XNF/L3/U5yVElVHE8xobM/CuAkXpy7N5GSYj2T28Fmn77 - 1y/xdGg6Jp26OkbrqY3gjQAaMigYg9/6tPAc9fgLiQAJLUUYb2hRqaqu4Ze8RrxU - RsnVpAHWYDFWJhNqEp8eErzAVLqxpmoYJKgmpK6TKyYKuf8+xf3Rlkb4+iu2FotR - DQrmcd6jmMjp9xLejDEuoPgcfpVP2CB1jUCAIW7yE7+a7vj9Mop1gs61zP8y/p2V - rVnXgEy93WZLjQt1D29oKhlcFGtCG4nqIBCDAWVuz/LGACB85w== - - diff --git a/rpkid/tests/up-down-protocol-samples/issue_response.xml b/rpkid/tests/up-down-protocol-samples/issue_response.xml deleted file mode 100644 index 1ee34f62..00000000 --- a/rpkid/tests/up-down-protocol-samples/issue_response.xml +++ /dev/null @@ -1,117 +0,0 @@ - - - - - MIID3jCCAsagAwIBAgIBAzANBgkqhkiG9w0BAQUFADAbMRkwFwYDVQQDExBURVNU - IEVOVElUWSBMSVIzMB4XDTA3MDgwMTE0NDgyMloXDTA4MDczMTE0NDgyMlowHDEa - MBgGA1UEAxMRVEVTVCBFTlRJVFkgSVNQNWEwggEiMA0GCSqGSIb3DQEBAQUAA4IB - DwAwggEKAoIBAQDmS614KGvmUBtlgdWNK1Z3zbvJR6CqMrAsrB/x5JArwjNv51Ox - 0B2rBSedt6HuqE/IWzYj4xLkUVknzf16qtxWBaFzq3ndPIKyj6757MA2OOYCqv2J - YCFSW7YzgHXlf/2sbuzUmiYvfihFFilHffOKctXkZfr0VG+uSDNiwTLxK4MzNmNg - nrzH55ldUdrNL4+DRyCe6cyjcsByvUktxFLqb9pCRnGQx69/n8fdC5aWPEWfwOpl - akPj85LV4XPAbiD1F+XRWNohs+kMTfDovXy374HJ9XDPqCB94mr5G2apyHHWMvhy - PYOZGQ0Ma+n4ks0zF4ZqPa8NBZSrHNQspEXLAgMBAAGjggEqMIIBJjAPBgNVHRMB - Af8EBTADAQH/MB0GA1UdDgQWBBQJ8BQLefsL/6jvVLnsPrmL0Muc7DAfBgNVHSME - GDAWgBSYvgT/gNGrlTmqPfIOZ30AraP9xTAOBgNVHQ8BAf8EBAMCAQYwQgYIKwYB - BQUHAQsENjA0MDIGCCsGAQUFBzAFhiZyc3luYzovL3dvbWJhdHMtci11cy5oYWN0 - cm4ubmV0L0lTUDVhLzBEBggrBgEFBQcBAQQ4MDYwNAYIKwYBBQUHMAKGKHJzeW5j - Oi8vd29tYmF0cy1yLXVzLmhhY3Rybi5uZXQvTElSMy5jZXIwOQYIKwYBBQUHAQcB - Af8EKjAoMAwEAgABMAYDBAAKAAAwGAQCAAIwEgMQACABDbgAAAAAAAAAAAoAADAN - BgkqhkiG9w0BAQUFAAOCAQEAkzKZYt3F6vAfWFAQN9M5N9n2klEmL9b9b4K4Vmv9 - DPNCBFbtZytCAphWB/FILS60MrvXHCcUoOmtOx3+Cw5D3yKX8Y9z2HbWmw2/7iDo - dxejgwGzI0OFa79vzC5pRwVz9CFFlMiuIShBFpHuSElmWmcxcQTJSXGU1fSGXHvG - Pv6RHSGzFJhUrW5RKOmoIrqk0JyM49R8IRAM+aMA+MOfALRTNAavW0pDlcuy+4wY - AIYRKF4k4ZDYZ9gA/LYnH56xvpEXwRE1bpxgUC5n8wQrdIn5/pJz3R5EgWe4CGOo - n/SMvEfe8d+LEc0C7LmtCwYoDOKENoOF809GVkbV9fjL8w== - - - MIID3jCCAsagAwIBAgIBAjANBgkqhkiG9w0BAQUFADAbMRkwFwYDVQQDExBURVNU - IEVOVElUWSBMSVIzMB4XDTA3MDgwMTE0NDgyMFoXDTA4MDczMTE0NDgyMFowHDEa - MBgGA1UEAxMRVEVTVCBFTlRJVFkgSVNQNWIwggEiMA0GCSqGSIb3DQEBAQUAA4IB - DwAwggEKAoIBAQC/j1nY/PodBHApznsBZCFA3FxD/kyviMhim76cco+KpTSKOyON - m4pPv2asaHGc/WhZ9b+fTS611uP6vfNgU1y3EayVC8CHzZmelFeN7AW436r8jjjT - D2VtCWDy4ZiBcthRPkGRsxCV9fXQ+eVcoYX6cSaF49FMAn8U4h5KipZontYWpe+t - tYNizSN0fIJWtNE0U1qKemGfrlRb7/lW3odrQpK8SfS1wzUHShhH0pLGHBZ0dLHp - OTxTEgWd69ycciuXTSchd5Z9TM55DPunuJlrZiAuVpxEtONegMR9eKG0BfcgfSYe - RL9daRU8eiRnvbm1CA8zTa87Lee5qx0r1vtzAgMBAAGjggEqMIIBJjAPBgNVHRMB - Af8EBTADAQH/MB0GA1UdDgQWBBRss2WU/safSlCdTYtAGqH9lxeXkjAfBgNVHSME - GDAWgBSYvgT/gNGrlTmqPfIOZ30AraP9xTAOBgNVHQ8BAf8EBAMCAQYwQgYIKwYB - BQUHAQsENjA0MDIGCCsGAQUFBzAFhiZyc3luYzovL3dvbWJhdHMtci11cy5oYWN0 - cm4ubmV0L0lTUDViLzBEBggrBgEFBQcBAQQ4MDYwNAYIKwYBBQUHMAKGKHJzeW5j - Oi8vd29tYmF0cy1yLXVzLmhhY3Rybi5uZXQvTElSMy5jZXIwOQYIKwYBBQUHAQcB - Af8EKjAoMAwEAgABMAYDBAAKAwAwGAQCAAIwEgMQACABDbgAAAAAAAAAAAoDADAN - BgkqhkiG9w0BAQUFAAOCAQEARNgVrXF+6W7sMytC7YyKSt+CpJGZV7AvzKNZKv8k - xazhefrXkrpyK0caz4BtCHbptZFgNR/dDOC9M3wn0PcRTh9ISgW8beNfut16uj1F - fZdylJvNMXa4lt/wfRbzKqPicusCH0nutkRIW2mZuLuAO8v1vKr4umgZU+z/rXWu - glEA7OeBwmvPoqKixbgER5GtnTNySKIVVa1DUo/2CaPT/YjT48P0zXHoy6rnNgcn - 2emkoegzzS2cN+5I5I+O8IRnZInqmiPgEgElgEFw+rg6xw23yax5Nyqx12J56tt0 - tPWGhrYe1dCwKZajWKn3P9+NMcGQ0d8bw/QU+B3RyVeVfw== - - - MIIDxjCCAq6gAwIBAgIBATANBgkqhkiG9w0BAQUFADAbMRkwFwYDVQQDExBURVNU - IEVOVElUWSBMSVIzMB4XDTA3MDgwMTE0NDgxOFoXDTA4MDczMTE0NDgxOFowHDEa - MBgGA1UEAxMRVEVTVCBFTlRJVFkgSVNQNWMwggEiMA0GCSqGSIb3DQEBAQUAA4IB - DwAwggEKAoIBAQDIi6ElZd/uon9Ur1IKGhr6DXWzPOngKdOJIOlRSWcsQ9qgLNRE - s5YUqQd3YLlvAe+OVKV0rFpn+DBNEPmsn7h1YQv253zqm1yYeks+xOJZQtMZyg9Y - DrfIgk7lu6z9kuWIsvxkz244OxiD/OemrvuQNtDhyk2QQQ8POyrADNl7fehQE/YJ - c4Kj0uO7ggiHf9K7Dg56KLYlArXZUfwzMkdH/89/vO4AAbsFXi4Dmq2VO8rCxodk - dDmqWWuu4KdRGgfyjkyOZS/f8pm64LaKT8AgcnmYAI8NUBM90T6Mvdx0qTOoVh0x - eHznAp6NChQSbdM3x3rwhBD+/k0olyZuCIWhAgMBAAGjggESMIIBDjAPBgNVHRMB - Af8EBTADAQH/MB0GA1UdDgQWBBQth8Ga+FgrvcL4fjBHs6mIN8nrRjAfBgNVHSME - GDAWgBSYvgT/gNGrlTmqPfIOZ30AraP9xTAOBgNVHQ8BAf8EBAMCAQYwQgYIKwYB - BQUHAQsENjA0MDIGCCsGAQUFBzAFhiZyc3luYzovL3dvbWJhdHMtci11cy5oYWN0 - cm4ubmV0L0lTUDVjLzBEBggrBgEFBQcBAQQ4MDYwNAYIKwYBBQUHMAKGKHJzeW5j - Oi8vd29tYmF0cy1yLXVzLmhhY3Rybi5uZXQvTElSMy5jZXIwIQYIKwYBBQUHAQgB - Af8EEjAQoA4wDDAKAgMA/BYCAwD8HDANBgkqhkiG9w0BAQUFAAOCAQEAUGsbhHfl - kwhe3EIkhnARJPgRkWgPCJtJ9konhROT7VlJ0Pim0kSrJWlBWUCLeKvSjQmowP4g - SddHxN4ZoXnSjb0pCDeomrZeViVQ2hxH6L/tHkl5SIEHl9MvFOe8junvgpq9GGAI - CFcibkW7Gp9p4A+GQkns0l9v+wGwuVZmqmJk4YBo7hHZRbg6/IFL1MD3HKeXmn33 - lCwFhjUuDIMXRbY/1k5fui13QUolN7mLSk60NvXJ94Tga68c1eCIpapvhxAYw69G - 7mOX42aYu1FnidZNj7Lt9jOuW0REHlavrG17HxP5loTuCNtLH1ZIrJcO7rUz9C0D - YqMybYWFUqZHyg== - - - MIIEFTCCAv2gAwIBAgIBDjANBgkqhkiG9w0BAQUFADAaMRgwFgYDVQQDEw9URVNU - IEVOVElUWSBSSVIwHhcNMDcwODAxMTQ0ODE4WhcNMDgwNzMxMTQ0ODE4WjAbMRkw - FwYDVQQDExBURVNUIEVOVElUWSBMSVIzMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A - MIIBCgKCAQEAoyFXYWSvERjUy96m3K3ZLA8PWJ9+yIVVESZMfPBraBqeagyP5tw9 - g1gqzHesGXNvWvNuJKzNGtwdC0xE9W2LChc9hvno/uZg5Z9AauWU6JpWFxccq8GM - N0ArVb8sXtyNyiV/il/u+xaG6+AI0ybl43DFDGv7G49rXPbiSlilNQHqBRs+zoS+ - tT9tGBZLaOV5TIh9tqVlozrCMtytj4oF7vbpeoDaEqkPWrXS0zGsPtMZJS0o3nls - zv13ZtXjL6nL+YWMILuihiPwk5UgBHjHxwem/vD0RbvPeCvdzpwIpUZoEEzXBWJs - hlotfwY4wk27RIcAQ3nSj/NrsvRcHLloAQIDAQABo4IBYzCCAV8wDwYDVR0TAQH/ - BAUwAwEB/zAdBgNVHQ4EFgQUmL4E/4DRq5U5qj3yDmd9AK2j/cUwHwYDVR0jBBgw - FoAU+7inozZICqCf8C7ci2i8s1xFJdcwDgYDVR0PAQH/BAQDAgEGMEEGCCsGAQUF - BwELBDUwMzAxBggrBgEFBQcwBYYlcnN5bmM6Ly93b21iYXRzLXItdXMuaGFjdHJu - Lm5ldC9MSVIzLzBDBggrBgEFBQcBAQQ3MDUwMwYIKwYBBQUHMAKGJ3JzeW5jOi8v - d29tYmF0cy1yLXVzLmhhY3Rybi5uZXQvUklSLmNlcjAhBggrBgEFBQcBCAEB/wQS - MBCgDjAMMAoCAwD8FgIDAPwcMFEGCCsGAQUFBwEHAQH/BEIwQDASBAIAATAMAwQA - CgAAAwQACgMAMCoEAgACMCQDEAAgAQ24AAAAAAAAAAAKAAADEAAgAQ24AAAAAAAA - AAAKAwAwDQYJKoZIhvcNAQEFBQADggEBAEhmCa7kUuozB6aST0Gd2XStJBcR1oWI - 8mZS5WEOjnjbVvuryDEb0fLs3x2HgCHZgZ7IAOg31cNxJpc1Ff6ZYEG+m3LpkcG/ - yOMllfOVK8RQSY+nKuya2fm2J3dCOKogEjBW20HwxNd1WgFLrDaOTR9V+iROBPKs - 3ppMPp6ksPqEqDU/3N3bLHROIISlFwWHilXuTK5ZAnzncDIQnm+zUuxI/0d3v6Fp - 8VxVlNBHqzo0VpakZOkxwqo01qJRsoxVaIxeetGNQ4noPhtj6bEM4Y8xDS9f3R7o - eEHUSTnKonMemm/AB3KZnjwL7rkL2FI1ThmDRO3Z3lprbThjThJF8EU= - - - diff --git a/rpkid/tests/up-down-protocol-samples/list.xml b/rpkid/tests/up-down-protocol-samples/list.xml deleted file mode 100644 index 01a803f3..00000000 --- a/rpkid/tests/up-down-protocol-samples/list.xml +++ /dev/null @@ -1,6 +0,0 @@ - - diff --git a/rpkid/tests/up-down-protocol-samples/list_response.xml b/rpkid/tests/up-down-protocol-samples/list_response.xml deleted file mode 100644 index 09634955..00000000 --- a/rpkid/tests/up-down-protocol-samples/list_response.xml +++ /dev/null @@ -1,171 +0,0 @@ - - - - - MIID3jCCAsagAwIBAgIBAzANBgkqhkiG9w0BAQUFADAbMRkwFwYDVQQDExBURVNU - IEVOVElUWSBMSVIzMB4XDTA3MDgwMTE0NDgyMloXDTA4MDczMTE0NDgyMlowHDEa - MBgGA1UEAxMRVEVTVCBFTlRJVFkgSVNQNWEwggEiMA0GCSqGSIb3DQEBAQUAA4IB - DwAwggEKAoIBAQDmS614KGvmUBtlgdWNK1Z3zbvJR6CqMrAsrB/x5JArwjNv51Ox - 0B2rBSedt6HuqE/IWzYj4xLkUVknzf16qtxWBaFzq3ndPIKyj6757MA2OOYCqv2J - YCFSW7YzgHXlf/2sbuzUmiYvfihFFilHffOKctXkZfr0VG+uSDNiwTLxK4MzNmNg - nrzH55ldUdrNL4+DRyCe6cyjcsByvUktxFLqb9pCRnGQx69/n8fdC5aWPEWfwOpl - akPj85LV4XPAbiD1F+XRWNohs+kMTfDovXy374HJ9XDPqCB94mr5G2apyHHWMvhy - PYOZGQ0Ma+n4ks0zF4ZqPa8NBZSrHNQspEXLAgMBAAGjggEqMIIBJjAPBgNVHRMB - Af8EBTADAQH/MB0GA1UdDgQWBBQJ8BQLefsL/6jvVLnsPrmL0Muc7DAfBgNVHSME - GDAWgBSYvgT/gNGrlTmqPfIOZ30AraP9xTAOBgNVHQ8BAf8EBAMCAQYwQgYIKwYB - BQUHAQsENjA0MDIGCCsGAQUFBzAFhiZyc3luYzovL3dvbWJhdHMtci11cy5oYWN0 - cm4ubmV0L0lTUDVhLzBEBggrBgEFBQcBAQQ4MDYwNAYIKwYBBQUHMAKGKHJzeW5j - Oi8vd29tYmF0cy1yLXVzLmhhY3Rybi5uZXQvTElSMy5jZXIwOQYIKwYBBQUHAQcB - Af8EKjAoMAwEAgABMAYDBAAKAAAwGAQCAAIwEgMQACABDbgAAAAAAAAAAAoAADAN - BgkqhkiG9w0BAQUFAAOCAQEAkzKZYt3F6vAfWFAQN9M5N9n2klEmL9b9b4K4Vmv9 - DPNCBFbtZytCAphWB/FILS60MrvXHCcUoOmtOx3+Cw5D3yKX8Y9z2HbWmw2/7iDo - dxejgwGzI0OFa79vzC5pRwVz9CFFlMiuIShBFpHuSElmWmcxcQTJSXGU1fSGXHvG - Pv6RHSGzFJhUrW5RKOmoIrqk0JyM49R8IRAM+aMA+MOfALRTNAavW0pDlcuy+4wY - AIYRKF4k4ZDYZ9gA/LYnH56xvpEXwRE1bpxgUC5n8wQrdIn5/pJz3R5EgWe4CGOo - n/SMvEfe8d+LEc0C7LmtCwYoDOKENoOF809GVkbV9fjL8w== - - - MIID3jCCAsagAwIBAgIBAjANBgkqhkiG9w0BAQUFADAbMRkwFwYDVQQDExBURVNU - IEVOVElUWSBMSVIzMB4XDTA3MDgwMTE0NDgyMFoXDTA4MDczMTE0NDgyMFowHDEa - MBgGA1UEAxMRVEVTVCBFTlRJVFkgSVNQNWIwggEiMA0GCSqGSIb3DQEBAQUAA4IB - DwAwggEKAoIBAQC/j1nY/PodBHApznsBZCFA3FxD/kyviMhim76cco+KpTSKOyON - m4pPv2asaHGc/WhZ9b+fTS611uP6vfNgU1y3EayVC8CHzZmelFeN7AW436r8jjjT - D2VtCWDy4ZiBcthRPkGRsxCV9fXQ+eVcoYX6cSaF49FMAn8U4h5KipZontYWpe+t - tYNizSN0fIJWtNE0U1qKemGfrlRb7/lW3odrQpK8SfS1wzUHShhH0pLGHBZ0dLHp - OTxTEgWd69ycciuXTSchd5Z9TM55DPunuJlrZiAuVpxEtONegMR9eKG0BfcgfSYe - RL9daRU8eiRnvbm1CA8zTa87Lee5qx0r1vtzAgMBAAGjggEqMIIBJjAPBgNVHRMB - Af8EBTADAQH/MB0GA1UdDgQWBBRss2WU/safSlCdTYtAGqH9lxeXkjAfBgNVHSME - GDAWgBSYvgT/gNGrlTmqPfIOZ30AraP9xTAOBgNVHQ8BAf8EBAMCAQYwQgYIKwYB - BQUHAQsENjA0MDIGCCsGAQUFBzAFhiZyc3luYzovL3dvbWJhdHMtci11cy5oYWN0 - cm4ubmV0L0lTUDViLzBEBggrBgEFBQcBAQQ4MDYwNAYIKwYBBQUHMAKGKHJzeW5j - Oi8vd29tYmF0cy1yLXVzLmhhY3Rybi5uZXQvTElSMy5jZXIwOQYIKwYBBQUHAQcB - Af8EKjAoMAwEAgABMAYDBAAKAwAwGAQCAAIwEgMQACABDbgAAAAAAAAAAAoDADAN - BgkqhkiG9w0BAQUFAAOCAQEARNgVrXF+6W7sMytC7YyKSt+CpJGZV7AvzKNZKv8k - xazhefrXkrpyK0caz4BtCHbptZFgNR/dDOC9M3wn0PcRTh9ISgW8beNfut16uj1F - fZdylJvNMXa4lt/wfRbzKqPicusCH0nutkRIW2mZuLuAO8v1vKr4umgZU+z/rXWu - glEA7OeBwmvPoqKixbgER5GtnTNySKIVVa1DUo/2CaPT/YjT48P0zXHoy6rnNgcn - 2emkoegzzS2cN+5I5I+O8IRnZInqmiPgEgElgEFw+rg6xw23yax5Nyqx12J56tt0 - tPWGhrYe1dCwKZajWKn3P9+NMcGQ0d8bw/QU+B3RyVeVfw== - - - MIIDxjCCAq6gAwIBAgIBATANBgkqhkiG9w0BAQUFADAbMRkwFwYDVQQDExBURVNU - IEVOVElUWSBMSVIzMB4XDTA3MDgwMTE0NDgxOFoXDTA4MDczMTE0NDgxOFowHDEa - MBgGA1UEAxMRVEVTVCBFTlRJVFkgSVNQNWMwggEiMA0GCSqGSIb3DQEBAQUAA4IB - DwAwggEKAoIBAQDIi6ElZd/uon9Ur1IKGhr6DXWzPOngKdOJIOlRSWcsQ9qgLNRE - s5YUqQd3YLlvAe+OVKV0rFpn+DBNEPmsn7h1YQv253zqm1yYeks+xOJZQtMZyg9Y - DrfIgk7lu6z9kuWIsvxkz244OxiD/OemrvuQNtDhyk2QQQ8POyrADNl7fehQE/YJ - c4Kj0uO7ggiHf9K7Dg56KLYlArXZUfwzMkdH/89/vO4AAbsFXi4Dmq2VO8rCxodk - dDmqWWuu4KdRGgfyjkyOZS/f8pm64LaKT8AgcnmYAI8NUBM90T6Mvdx0qTOoVh0x - eHznAp6NChQSbdM3x3rwhBD+/k0olyZuCIWhAgMBAAGjggESMIIBDjAPBgNVHRMB - Af8EBTADAQH/MB0GA1UdDgQWBBQth8Ga+FgrvcL4fjBHs6mIN8nrRjAfBgNVHSME - GDAWgBSYvgT/gNGrlTmqPfIOZ30AraP9xTAOBgNVHQ8BAf8EBAMCAQYwQgYIKwYB - BQUHAQsENjA0MDIGCCsGAQUFBzAFhiZyc3luYzovL3dvbWJhdHMtci11cy5oYWN0 - cm4ubmV0L0lTUDVjLzBEBggrBgEFBQcBAQQ4MDYwNAYIKwYBBQUHMAKGKHJzeW5j - Oi8vd29tYmF0cy1yLXVzLmhhY3Rybi5uZXQvTElSMy5jZXIwIQYIKwYBBQUHAQgB - Af8EEjAQoA4wDDAKAgMA/BYCAwD8HDANBgkqhkiG9w0BAQUFAAOCAQEAUGsbhHfl - kwhe3EIkhnARJPgRkWgPCJtJ9konhROT7VlJ0Pim0kSrJWlBWUCLeKvSjQmowP4g - SddHxN4ZoXnSjb0pCDeomrZeViVQ2hxH6L/tHkl5SIEHl9MvFOe8junvgpq9GGAI - CFcibkW7Gp9p4A+GQkns0l9v+wGwuVZmqmJk4YBo7hHZRbg6/IFL1MD3HKeXmn33 - lCwFhjUuDIMXRbY/1k5fui13QUolN7mLSk60NvXJ94Tga68c1eCIpapvhxAYw69G - 7mOX42aYu1FnidZNj7Lt9jOuW0REHlavrG17HxP5loTuCNtLH1ZIrJcO7rUz9C0D - YqMybYWFUqZHyg== - - - MIIEFTCCAv2gAwIBAgIBDjANBgkqhkiG9w0BAQUFADAaMRgwFgYDVQQDEw9URVNU - IEVOVElUWSBSSVIwHhcNMDcwODAxMTQ0ODE4WhcNMDgwNzMxMTQ0ODE4WjAbMRkw - FwYDVQQDExBURVNUIEVOVElUWSBMSVIzMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A - MIIBCgKCAQEAoyFXYWSvERjUy96m3K3ZLA8PWJ9+yIVVESZMfPBraBqeagyP5tw9 - g1gqzHesGXNvWvNuJKzNGtwdC0xE9W2LChc9hvno/uZg5Z9AauWU6JpWFxccq8GM - N0ArVb8sXtyNyiV/il/u+xaG6+AI0ybl43DFDGv7G49rXPbiSlilNQHqBRs+zoS+ - tT9tGBZLaOV5TIh9tqVlozrCMtytj4oF7vbpeoDaEqkPWrXS0zGsPtMZJS0o3nls - zv13ZtXjL6nL+YWMILuihiPwk5UgBHjHxwem/vD0RbvPeCvdzpwIpUZoEEzXBWJs - hlotfwY4wk27RIcAQ3nSj/NrsvRcHLloAQIDAQABo4IBYzCCAV8wDwYDVR0TAQH/ - BAUwAwEB/zAdBgNVHQ4EFgQUmL4E/4DRq5U5qj3yDmd9AK2j/cUwHwYDVR0jBBgw - FoAU+7inozZICqCf8C7ci2i8s1xFJdcwDgYDVR0PAQH/BAQDAgEGMEEGCCsGAQUF - BwELBDUwMzAxBggrBgEFBQcwBYYlcnN5bmM6Ly93b21iYXRzLXItdXMuaGFjdHJu - Lm5ldC9MSVIzLzBDBggrBgEFBQcBAQQ3MDUwMwYIKwYBBQUHMAKGJ3JzeW5jOi8v - d29tYmF0cy1yLXVzLmhhY3Rybi5uZXQvUklSLmNlcjAhBggrBgEFBQcBCAEB/wQS - MBCgDjAMMAoCAwD8FgIDAPwcMFEGCCsGAQUFBwEHAQH/BEIwQDASBAIAATAMAwQA - CgAAAwQACgMAMCoEAgACMCQDEAAgAQ24AAAAAAAAAAAKAAADEAAgAQ24AAAAAAAA - AAAKAwAwDQYJKoZIhvcNAQEFBQADggEBAEhmCa7kUuozB6aST0Gd2XStJBcR1oWI - 8mZS5WEOjnjbVvuryDEb0fLs3x2HgCHZgZ7IAOg31cNxJpc1Ff6ZYEG+m3LpkcG/ - yOMllfOVK8RQSY+nKuya2fm2J3dCOKogEjBW20HwxNd1WgFLrDaOTR9V+iROBPKs - 3ppMPp6ksPqEqDU/3N3bLHROIISlFwWHilXuTK5ZAnzncDIQnm+zUuxI/0d3v6Fp - 8VxVlNBHqzo0VpakZOkxwqo01qJRsoxVaIxeetGNQ4noPhtj6bEM4Y8xDS9f3R7o - eEHUSTnKonMemm/AB3KZnjwL7rkL2FI1ThmDRO3Z3lprbThjThJF8EU= - - - - - MIIDzDCCArSgAwIBAgIBCTANBgkqhkiG9w0BAQUFADAbMRkwFwYDVQQDExBURVNU - IEVOVElUWSBMSVIxMB4XDTA3MDgwMTE0NDgyMloXDTA4MDczMTE0NDgyMlowGzEZ - MBcGA1UEAxMQVEVTVCBFTlRJVFkgSVNQMjCCASIwDQYJKoZIhvcNAQEBBQADggEP - ADCCAQoCggEBANB338Qhrxtaa6inKNdDyJttJdiNf5Er45X9kmCsFBLXI2iFSw7b - K+Y44EjbGDePQMCQWA4/CWdfjj8EdQZgkkLz5EUENZVd6SJCLPZcpn15jOEIGXw1 - nTr95/+bKbXuiUfMDYOg4XOvHwmEqAuDzHmIv3wdc9arQhtkmlwZgyud5a1MWAV2 - lXAj7qXAMcqip8gdHvLJ8j04gsJT5VSG8nyxc+Hc6YZzCKxZO74vWMFCxYAYjDoK - KjL2/ijQKFKDxjBpUZBZGZvT1MLgUmrBTlmaGOR4Llf5fytddijJycV+5UOhm2jS - Bhy+P2n5wvqeT2jPY2/bbfxnNcCxbgo37DMCAwEAAaOCARkwggEVMA8GA1UdEwEB - /wQFMAMBAf8wHQYDVR0OBBYEFHOyFhrN3NcwYA/6gZX4ovVOlfOtMB8GA1UdIwQY - MBaAFIqUF/lT8luUVFbfdlETKfZxGaizMA4GA1UdDwEB/wQEAwIBBjBBBggrBgEF - BQcBCwQ1MDMwMQYIKwYBBQUHMAWGJXJzeW5jOi8vd29tYmF0cy1yLXVzLmhhY3Ry - bi5uZXQvSVNQMi8wRAYIKwYBBQUHAQEEODA2MDQGCCsGAQUFBzAChihyc3luYzov - L3dvbWJhdHMtci11cy5oYWN0cm4ubmV0L0xJUjEuY2VyMCkGCCsGAQUFBwEHAQH/ - BBowGDAWBAIAATAQMA4DBQLAAAIsAwUAwAACZDANBgkqhkiG9w0BAQUFAAOCAQEA - CvG1rzj5fZOV1Oq/SO+NYzxOHIA9egYgQg4NUpmqSz6v17RhR0+3tPfMmzxepTs8 - ut23KieOG7RcPGvR2f/CEvedgrrPdTS81wu01qhPWJNqriN6N+Mu8XCK3fUO+t+w - PxLUWqwzrRUcpdy+CMOOGg81Eg7e77iAeJCp648AChUdBRI6HTfp9PlKd25pJ7fj - f654MpKGbTkWXllPkxC1sL4cJUcq4o+Sn1zAKkjXUwAUjp6G6s+mIWZQiZU5Pv8n - lYXvPciYf83+wTBllLGtSFyut8hk6WmiB8rC1/5jS96pJaGRSxejqd0r99GlPre+ - QgMe2TRfFuM1esod7j1M1Q== - - - MIID9jCCAt6gAwIBAgIBEDANBgkqhkiG9w0BAQUFADAaMRgwFgYDVQQDEw9URVNU - IEVOVElUWSBSSVIwHhcNMDcwODAxMTQ0ODE4WhcNMDgwNzMxMTQ0ODE4WjAbMRkw - FwYDVQQDExBURVNUIEVOVElUWSBMSVIxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A - MIIBCgKCAQEAr10c+dm71QHhWzbMUfb9hldgqp7H7E4Fr/tRXHrCWMSoV64UYum8 - tnJ9z0nISkCCSvQ+MLWUJZ5seIFXQ9aFAo3RnLXXNC/iqX0YJ7VHmkIWyJB/lizd - uJgXH3diSggALeBzDDk3ug+nWVlMfM3iXNeYNhBsiD5FmaaIL/Z/MUm6QisTecKy - 8QnZrTekQbZtRqEYBaBTB47gmLLR/Wdod2TV8/4dIjaeJloaqhiUwyx+mq++LJ1e - dSxJ1jcrBh/MY5d+7ixfZ69NYj56HwzhHgLy0gZ1rj8RvI4PE2Q4FDYdXQLsr2XV - uWj0ImYr70dbrTvyr7ZxDJRWinwBNvA6PwIDAQABo4IBRDCCAUAwDwYDVR0TAQH/ - BAUwAwEB/zAdBgNVHQ4EFgQUipQX+VPyW5RUVt92URMp9nEZqLMwHwYDVR0jBBgw - FoAU+7inozZICqCf8C7ci2i8s1xFJdcwDgYDVR0PAQH/BAQDAgEGMEEGCCsGAQUF - BwELBDUwMzAxBggrBgEFBQcwBYYlcnN5bmM6Ly93b21iYXRzLXItdXMuaGFjdHJu - Lm5ldC9MSVIxLzBDBggrBgEFBQcBAQQ3MDUwMwYIKwYBBQUHMAKGJ3JzeW5jOi8v - d29tYmF0cy1yLXVzLmhhY3Rybi5uZXQvUklSLmNlcjAaBggrBgEFBQcBCAEB/wQL - MAmgBzAFAgMA/BUwOQYIKwYBBQUHAQcBAf8EKjAoMCYEAgABMCAwDgMFAMAAAgED - BQHAAAIgMA4DBQLAAAIsAwUAwAACZDANBgkqhkiG9w0BAQUFAAOCAQEAcn3dpGAj - ceSZKAuaulzTl0ty64mBPBGFjCXtebJQpeiuDjd0+SyhvpaDNUANNvkyFnQlnPcP - zUZHjrnNrAx+06yEXvYx9KnyBc2C1+DXOySbxxXR253CHZL3Gam4oWcK+z0jOgWD - KQVQ4wAnqYD+u1HxPjsMmK7x7tETckZkj0syTs9kzxqlsTSm8F8Y+ES7E+qNXyR9 - OxVgjr70vdgEp9AQftMQZ781SclWz7eLe5sXC1TuIct1sD6NssHGfCaxfFipSjEk - zeU/pZodfREUQSrlVbbb9HU0N59eHfGGKvZ0vojhuWPOrVzpPJGKTI20aQPn+VJ5 - KH3Nf1ICSa7Vxw== - - - diff --git a/rpkid/tests/up-down-protocol-samples/revoke.xml b/rpkid/tests/up-down-protocol-samples/revoke.xml deleted file mode 100644 index eb4b3efb..00000000 --- a/rpkid/tests/up-down-protocol-samples/revoke.xml +++ /dev/null @@ -1,9 +0,0 @@ - - - - diff --git a/rpkid/tests/up-down-protocol-samples/revoke_response.xml b/rpkid/tests/up-down-protocol-samples/revoke_response.xml deleted file mode 100644 index 9f4ebacc..00000000 --- a/rpkid/tests/up-down-protocol-samples/revoke_response.xml +++ /dev/null @@ -1,9 +0,0 @@ - - - - diff --git a/rpkid/tests/xml-parse-test.py b/rpkid/tests/xml-parse-test.py deleted file mode 100644 index 42b54695..00000000 --- a/rpkid/tests/xml-parse-test.py +++ /dev/null @@ -1,119 +0,0 @@ -# $Id$ -# -# Copyright (C) 2010 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. -# -# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -import glob, lxml.etree, lxml.sax -import rpki.up_down, rpki.left_right, rpki.publication, rpki.relaxng - -verbose = False - -def test(fileglob, rng, sax_handler, encoding, tester = None): - files = glob.glob(fileglob) - files.sort() - for f in files: - print "" - handler = sax_handler() - elt_in = lxml.etree.parse(f).getroot() - if verbose: - print "" - print lxml.etree.tostring(elt_in, pretty_print = True, encoding = encoding, xml_declaration = True) - rng.assertValid(elt_in) - lxml.sax.saxify(elt_in, handler) - elt_out = handler.result.toXML() - if verbose: - print "" - print lxml.etree.tostring(elt_out, pretty_print = True, encoding = encoding, xml_declaration = True) - rng.assertValid(elt_out) - if (tester): - tester(elt_in, elt_out, handler.result) - if verbose: - print - -def pprint(pairs): - if verbose: - for thing, name in pairs: - if thing is not None: - print "[%s]" % name - print thing.get_POW().pprint() - -def ud_tester(elt_in, elt_out, msg): - assert isinstance(msg, rpki.up_down.message_pdu) - if isinstance(msg.payload, rpki.up_down.list_response_pdu): - for c in msg.payload.classes: - pprint([(c.certs[i].cert, ("%s certificate #%d" % (c.class_name, i))) for i in xrange(len(c.certs))] + [(c.issuer, ("%s issuer" % c.class_name))]) - -def lr_tester(elt_in, elt_out, msg): - assert isinstance(msg, rpki.left_right.msg) - for obj in msg: - if isinstance(obj, rpki.left_right.self_elt): - pprint(((obj.bpki_cert, "BPKI cert"), - (obj.bpki_glue, "BPKI glue"))) - if isinstance(obj, rpki.left_right.bsc_elt): - pprint(((obj.signing_cert, "Signing certificate"), - (obj.signing_cert_crl, "Signing certificate CRL"))) - # (obj.pkcs10_request, "PKCS #10 request") - if isinstance(obj, rpki.left_right.parent_elt): - pprint(((obj.bpki_cms_cert, "CMS certificate"), - (obj.bpki_cms_glue, "CMS glue"))) - if isinstance(obj, (rpki.left_right.child_elt, rpki.left_right.repository_elt)): - pprint(((obj.bpki_cert, "Certificate"), - (obj.bpki_glue, "Glue"))) - -def pp_tester(elt_in, elt_out, msg): - assert isinstance(msg, rpki.publication.msg) - for obj in msg: - if isinstance(obj, rpki.publication.client_elt): - pprint(((obj.bpki_cert, "BPKI cert"), - (obj.bpki_glue, "BPKI glue"))) - if isinstance(obj, rpki.publication.certificate_elt): - pprint(((obj.payload, "RPKI cert"),)) - if isinstance(obj, rpki.publication.crl_elt): - pprint(((obj.payload, "RPKI CRL"),)) - if isinstance(obj, rpki.publication.manifest_elt): - pprint(((obj.payload, "RPKI manifest"),)) - if isinstance(obj, rpki.publication.roa_elt): - pprint(((obj.payload, "ROA"),)) - -test(fileglob = "up-down-protocol-samples/*.xml", - rng = rpki.relaxng.up_down, - sax_handler = rpki.up_down.sax_handler, - encoding = "utf-8", - tester = ud_tester) - -test(fileglob = "left-right-protocol-samples/*.xml", - rng = rpki.relaxng.left_right, - sax_handler = rpki.left_right.sax_handler, - encoding = "us-ascii", - tester = lr_tester) - -test(fileglob = "publication-protocol-samples/*.xml", - rng = rpki.relaxng.publication, - sax_handler = rpki.publication.sax_handler, - encoding = "us-ascii", - tester = pp_tester) diff --git a/rpkid/tests/yamlconf.py b/rpkid/tests/yamlconf.py deleted file mode 100644 index 3c71d3cd..00000000 --- a/rpkid/tests/yamlconf.py +++ /dev/null @@ -1,794 +0,0 @@ -# $Id$ -# -# Copyright (C) 2013--2014 Dragon Research Labs ("DRL") -# Portions copyright (C) 2009--2012 Internet Systems Consortium ("ISC") -# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notices and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND ARIN DISCLAIM ALL -# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED -# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL, -# ISC, OR ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR -# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS -# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, -# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION -# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -""" -Test configuration tool, using the same YAML test description format -as smoketest.py and yamltest.py, but doing just the IRDB configuration -for a massive testbed, via direct use of the rpki.irdb library code. - -For most purposes, you don't want this, but when building a -configuration for tens or hundreds of thousands of elements, being -able to do the initial configuration stage quickly can help a lot. -""" - -# pylint: disable=W0702,W0621,W0602 - -import subprocess -import re -import os -import sys -import yaml -import time -import argparse -import rpki.resource_set -import rpki.sundial -import rpki.config -import rpki.log -import rpki.csv_utils -import rpki.x509 -import rpki.sql_schemas - -from rpki.mysql_import import MySQLdb - -section_regexp = re.compile(r"\s*\[\s*(.+?)\s*\]\s*$") -variable_regexp = re.compile(r"\s*([-a-zA-Z0-9_]+)\s*=\s*(.+?)\s*$") - -flat_publication = False -only_one_pubd = True -yaml_file = None -loopback = False -quiet = False -dns_suffix = None -mysql_rootuser = None -mysql_rootpass = None -publication_base = None -publication_root = None - -# The SQL username mismatch between rpkid/examples/rpki.conf and -# rpkid/tests/smoketest.setup.sql is completely stupid and really -# should be cleaned up at some point...but not today, at least not as -# part of writing this program. These default values are wired into -# yamltest to match smoketest.setup.sql, so wire them in here too but -# in a more obvious way. - -config_overrides = { - "irdbd_sql_username" : "irdb", "irdbd_sql_password" : "fnord", - "rpkid_sql_username" : "rpki", "rpkid_sql_password" : "fnord", - "pubd_sql_username" : "pubd", "pubd_sql_password" : "fnord" } - -def cleanpath(*names): - return os.path.normpath(os.path.join(*names)) - -this_dir = os.getcwd() -test_dir = None -rpki_conf = None - -class roa_request(object): - """ - Representation of a ROA request. - """ - - def __init__(self, asn, ipv4, ipv6): - self.asn = asn - self.v4 = rpki.resource_set.roa_prefix_set_ipv4("".join(ipv4.split())) if ipv4 else None - self.v6 = rpki.resource_set.roa_prefix_set_ipv6("".join(ipv6.split())) if ipv6 else None - - def __eq__(self, other): - return self.asn == other.asn and self.v4 == other.v4 and self.v6 == other.v6 - - def __hash__(self): - v4 = tuple(self.v4) if self.v4 is not None else None - v6 = tuple(self.v6) if self.v6 is not None else None - return self.asn.__hash__() + v4.__hash__() + v6.__hash__() - - def __str__(self): - if self.v4 and self.v6: - return "%s: %s,%s" % (self.asn, self.v4, self.v6) - else: - return "%s: %s" % (self.asn, self.v4 or self.v6) - - @classmethod - def parse(cls, y): - return cls(y.get("asn"), y.get("ipv4"), y.get("ipv6")) - -class allocation_db(list): - """ - Allocation database. - """ - - def __init__(self, y): - list.__init__(self) - self.root = allocation(y, self) - assert self.root.is_root - if self.root.crl_interval is None: - self.root.crl_interval = 60 * 60 - if self.root.regen_margin is None: - self.root.regen_margin = 24 * 60 * 60 - if self.root.base.valid_until is None: - self.root.base.valid_until = rpki.sundial.now() + rpki.sundial.timedelta(days = 2) - for a in self: - if a.base.valid_until is None: - a.base.valid_until = a.parent.base.valid_until - if a.crl_interval is None: - a.crl_interval = a.parent.crl_interval - if a.regen_margin is None: - a.regen_margin = a.parent.regen_margin - self.root.closure() - self.map = dict((a.name, a) for a in self) - for a in self: - if a.is_hosted: - a.hosted_by = self.map[a.hosted_by] - a.hosted_by.hosts.append(a) - assert not a.is_root and not a.hosted_by.is_hosted - - def dump(self): - for a in self: - a.dump() - - -class allocation(object): - """ - One entity in our allocation database. Every entity in the database - is assumed to hold resources. Entities that don't have the - hosted_by property run their own copies of rpkid, irdbd, and pubd. - """ - - base_port = 4400 - base_engine = -1 - parent = None - crl_interval = None - regen_margin = None - engine = -1 - rpkid_port = 4404 - irdbd_port = 4403 - pubd_port = 4402 - rootd_port = 4401 - rsync_port = 873 - - @classmethod - def allocate_port(cls): - cls.base_port += 1 - return cls.base_port - - @classmethod - def allocate_engine(cls): - cls.base_engine += 1 - return cls.base_engine - - def __init__(self, y, db, parent = None): - db.append(self) - self.name = y["name"] - self.parent = parent - self.kids = [allocation(k, db, self) for k in y.get("kids", ())] - valid_until = None - if "valid_until" in y: - valid_until = rpki.sundial.datetime.from_datetime(y.get("valid_until")) - if valid_until is None and "valid_for" in y: - valid_until = rpki.sundial.now() + rpki.sundial.timedelta.parse(y["valid_for"]) - self.base = rpki.resource_set.resource_bag( - asn = rpki.resource_set.resource_set_as(y.get("asn")), - v4 = rpki.resource_set.resource_set_ipv4(y.get("ipv4")), - v6 = rpki.resource_set.resource_set_ipv6(y.get("ipv6")), - valid_until = valid_until) - if "crl_interval" in y: - self.crl_interval = rpki.sundial.timedelta.parse(y["crl_interval"]).convert_to_seconds() - if "regen_margin" in y: - self.regen_margin = rpki.sundial.timedelta.parse(y["regen_margin"]).convert_to_seconds() - if "ghostbusters" in y: - self.ghostbusters = y.get("ghostbusters") - elif "ghostbuster" in y: - self.ghostbusters = [y.get("ghostbuster")] - else: - self.ghostbusters = [] - self.roa_requests = [roa_request.parse(r) for r in y.get("roa_request", ())] - for r in self.roa_requests: - if r.v4: - self.base.v4 |= r.v4.to_resource_set() - if r.v6: - self.base.v6 |= r.v6.to_resource_set() - self.hosted_by = y.get("hosted_by") - self.hosts = [] - if not self.is_hosted: - self.engine = self.allocate_engine() - if loopback and not self.is_hosted: - self.rpkid_port = self.allocate_port() - self.irdbd_port = self.allocate_port() - if loopback and self.runs_pubd: - self.pubd_port = self.allocate_port() - self.rsync_port = self.allocate_port() - if loopback and self.is_root: - self.rootd_port = self.allocate_port() - - def closure(self): - resources = self.base - for kid in self.kids: - resources |= kid.closure() - self.resources = resources - return resources - - @property - def hostname(self): - if loopback: - return "localhost" - elif dns_suffix: - return self.name + "." + dns_suffix.lstrip(".") - else: - return self.name - - @property - def rsync_server(self): - if loopback: - return "%s:%s" % (self.pubd.hostname, self.pubd.rsync_port) - else: - return self.pubd.hostname - - def dump(self): - if not quiet: - print str(self) - - def __str__(self): - s = self.name + ":\n" - if self.resources.asn: s += " ASNs: %s\n" % self.resources.asn - if self.resources.v4: s += " IPv4: %s\n" % self.resources.v4 - if self.resources.v6: s += " IPv6: %s\n" % self.resources.v6 - if self.kids: s += " Kids: %s\n" % ", ".join(k.name for k in self.kids) - if self.parent: s += " Up: %s\n" % self.parent.name - if self.is_hosted: s += " Host: %s\n" % self.hosted_by.name - if self.hosts: s += " Hosts: %s\n" % ", ".join(h.name for h in self.hosts) - for r in self.roa_requests: s += " ROA: %s\n" % r - if not self.is_hosted: s += " IPort: %s\n" % self.irdbd_port - if self.runs_pubd: s += " PPort: %s\n" % self.pubd_port - if not self.is_hosted: s += " RPort: %s\n" % self.rpkid_port - if self.runs_pubd: s += " SPort: %s\n" % self.rsync_port - if self.is_root: s += " TPort: %s\n" % self.rootd_port - return s + " Until: %s\n" % self.resources.valid_until - - @property - def is_root(self): - return self.parent is None - - @property - def is_hosted(self): - return self.hosted_by is not None - - @property - def runs_pubd(self): - return self.is_root or not (self.is_hosted or only_one_pubd) - - def path(self, *names): - return cleanpath(test_dir, self.host.name, *names) - - def csvout(self, fn): - path = self.path(fn) - if not quiet: - print "Writing", path - return rpki.csv_utils.csv_writer(path) - - def up_down_url(self): - return "http://%s:%d/up-down/%s/%s" % (self.parent.host.hostname, - self.parent.host.rpkid_port, - self.parent.name, - self.name) - - def dump_asns(self, fn): - with self.csvout(fn) as f: - for k in self.kids: - f.writerows((k.name, a) for a in k.resources.asn) - - def dump_prefixes(self, fn): - with self.csvout(fn) as f: - for k in self.kids: - f.writerows((k.name, p) for p in (k.resources.v4 + k.resources.v6)) - - def dump_roas(self, fn): - with self.csvout(fn) as f: - for g1, r in enumerate(self.roa_requests): - f.writerows((p, r.asn, "G%08d%08d" % (g1, g2)) - for g2, p in enumerate((r.v4 + r.v6 if r.v4 and r.v6 else r.v4 or r.v6 or ()))) - - def dump_ghostbusters(self, fn): - if self.ghostbusters: - path = self.path(fn) - if not quiet: - print "Writing", path - with open(path, "w") as f: - for i, g in enumerate(self.ghostbusters): - if i > 0: - f.write("\n") - f.write(g) - - @property - def pubd(self): - s = self - while not s.runs_pubd: - s = s.parent - return s - - @property - def client_handle(self): - path = [] - s = self - if not flat_publication: - while not s.runs_pubd: - path.append(s) - s = s.parent - path.append(s) - return ".".join(i.name for i in reversed(path)) - - @property - def host(self): - return self.hosted_by or self - - @property - def publication_base_directory(self): - if not loopback and publication_base is not None: - return publication_base - else: - return self.path("publication") - - @property - def publication_root_directory(self): - if not loopback and publication_root is not None: - return publication_root - else: - return self.path("publication.root") - - def dump_conf(self): - - r = dict( - handle = self.name, - run_rpkid = str(not self.is_hosted), - run_pubd = str(self.runs_pubd), - run_rootd = str(self.is_root), - irdbd_sql_username = "irdb", - rpkid_sql_username = "rpki", - rpkid_server_host = self.hostname, - rpkid_server_port = str(self.rpkid_port), - irdbd_server_host = "localhost", - irdbd_server_port = str(self.irdbd_port), - rootd_server_port = str(self.rootd_port), - pubd_sql_username = "pubd", - pubd_server_host = self.pubd.hostname, - pubd_server_port = str(self.pubd.pubd_port), - publication_rsync_server = self.rsync_server) - - if loopback: - r.update( - irdbd_sql_database = self.irdb_name, - rpkid_sql_database = "rpki%d" % self.engine, - pubd_sql_database = "pubd%d" % self.engine, - bpki_servers_directory = self.path(), - publication_base_directory = self.publication_base_directory) - - r.update(config_overrides) - - with open(self.path("rpki.conf"), "w") as f: - f.write("# Automatically generated, do not edit\n") - if not quiet: - print "Writing", f.name - - section = None - for line in open(rpki_conf): - m = section_regexp.match(line) - if m: - section = m.group(1) - m = variable_regexp.match(line) - option = m.group(1) if m and section == "myrpki" else None - if option and option in r: - line = "%s = %s\n" % (option, r[option]) - f.write(line) - - def dump_rsyncd(self): - lines = [] - if self.runs_pubd: - lines.extend(( - "# Automatically generated, do not edit", - "port = %d" % self.rsync_port, - "address = %s" % self.hostname, - "log file = rsyncd.log", - "read only = yes", - "use chroot = no", - "[rpki]", - "path = %s" % self.publication_base_directory, - "comment = RPKI test")) - if self.is_root: - assert self.runs_pubd - lines.extend(( - "[root]", - "path = %s" % self.publication_root_directory, - "comment = RPKI test root")) - if lines: - with open(self.path("rsyncd.conf"), "w") as f: - if not quiet: - print "Writing", f.name - f.writelines(line + "\n" for line in lines) - - @property - def irdb_name(self): - return "irdb%d" % self.host.engine - - @property - def irdb(self): - prior_name = self.zoo.handle - return rpki.irdb.database( - self.irdb_name, - on_entry = lambda: self.zoo.reset_identity(self.name), - on_exit = lambda: self.zoo.reset_identity(prior_name)) - - def syncdb(self): - import django.core.management - assert not self.is_hosted - django.core.management.call_command("syncdb", - database = self.irdb_name, - load_initial_data = False, - interactive = False, - verbosity = 0) - - def hire_zookeeper(self): - assert not self.is_hosted - self._zoo = rpki.irdb.Zookeeper( - cfg = rpki.config.parser(self.path("rpki.conf")), - logstream = None if quiet else sys.stdout) - - @property - def zoo(self): - return self.host._zoo - - def dump_root(self): - - assert self.is_root and not self.is_hosted - - root_resources = rpki.resource_set.resource_bag( - asn = rpki.resource_set.resource_set_as("0-4294967295"), - v4 = rpki.resource_set.resource_set_ipv4("0.0.0.0/0"), - v6 = rpki.resource_set.resource_set_ipv6("::/0")) - - root_key = rpki.x509.RSA.generate(quiet = True) - - root_uri = "rsync://%s/rpki/" % self.rsync_server - - root_sia = (root_uri, root_uri + "root.mft", None) - - root_cert = rpki.x509.X509.self_certify( - keypair = root_key, - subject_key = root_key.get_public(), - serial = 1, - sia = root_sia, - notAfter = rpki.sundial.now() + rpki.sundial.timedelta(days = 365), - resources = root_resources) - - with open(self.path("publication.root", "root.cer"), "wb") as f: - f.write(root_cert.get_DER()) - - with open(self.path("root.key"), "wb") as f: - f.write(root_key.get_DER()) - - with open(cleanpath(test_dir, "root.tal"), "w") as f: - f.write("rsync://%s/root/root.cer\n\n%s" % ( - self.rsync_server, root_key.get_public().get_Base64())) - - def mkdir(self, *path): - path = self.path(*path) - if not quiet: - print "Creating directory", path - os.makedirs(path) - - def dump_sql(self): - if not self.is_hosted: - with open(self.path("rpkid.sql"), "w") as f: - if not quiet: - print "Writing", f.name - f.write(rpki.sql_schemas.rpkid) - if self.runs_pubd: - with open(self.path("pubd.sql"), "w") as f: - if not quiet: - print "Writing", f.name - f.write(rpki.sql_schemas.pubd) - if not self.is_hosted: - username = config_overrides["irdbd_sql_username"] - password = config_overrides["irdbd_sql_password"] - cmd = ("mysqldump", "-u", username, "-p" + password, self.irdb_name) - with open(self.path("irdbd.sql"), "w") as f: - if not quiet: - print "Writing", f.name - subprocess.check_call(cmd, stdout = f) - - -def pre_django_sql_setup(needed): - - username = config_overrides["irdbd_sql_username"] - password = config_overrides["irdbd_sql_password"] - - # If we have the MySQL root password, just blow away and recreate - # the required databases. Otherwise, check for missing databases, - # then blow away all tables in the required databases. In either - # case, we assume that the Django syncdb code will populate - # databases as necessary, all we need to do here is provide empty - # databases for the Django code to fill in. - - if mysql_rootpass is not None: - if mysql_rootpass: - db = MySQLdb.connect(user = mysql_rootuser, passwd = mysql_rootpass) - else: - db = MySQLdb.connect(user = mysql_rootuser) - cur = db.cursor() - for database in needed: - try: - cur.execute("DROP DATABASE IF EXISTS %s" % database) - except: - pass - cur.execute("CREATE DATABASE %s" % database) - cur.execute("GRANT ALL ON %s.* TO %s@localhost IDENTIFIED BY %%s" % ( - database, username), (password,)) - - else: - db = MySQLdb.connect(user = username, passwd = password) - cur = db.cursor() - cur.execute("SHOW DATABASES") - existing = set(r[0] for r in cur.fetchall()) - if needed - existing: - sys.stderr.write("The following databases are missing:\n") - for database in sorted(needed - existing): - sys.stderr.write(" %s\n" % database) - sys.stderr.write("Please create them manually or put MySQL root password in my config file\n") - sys.exit("Missing databases and MySQL root password not known, can't continue") - for database in needed: - db.select_db(database) - cur.execute("SHOW TABLES") - tables = [r[0] for r in cur.fetchall()] - cur.execute("SET foreign_key_checks = 0") - for table in tables: - cur.execute("DROP TABLE %s" % table) - cur.execute("SET foreign_key_checks = 1") - - cur.close() - db.commit() - db.close() - -class timestamp(object): - - def __init__(self, *args): - self.count = 0 - self.start = self.tick = rpki.sundial.now() - - def __call__(self, *args): - now = rpki.sundial.now() - if not quiet: - print "[Count %s last %s total %s now %s]" % ( - self.count, now - self.tick, now - self.start, now) - self.tick = now - self.count += 1 - - -def main(): - - global flat_publication - global config_overrides - global only_one_pubd - global loopback - global dns_suffix - global mysql_rootuser - global mysql_rootpass - global yaml_file - global test_dir - global rpki_conf - global publication_base - global publication_root - global quiet - - os.environ["TZ"] = "UTC" - time.tzset() - - parser = argparse.ArgumentParser(description = "yamlconf") - parser.add_argument("-c", "--config", help = "configuration file") - parser.add_argument("--dns_suffix", - help = "DNS suffix to add to hostnames") - parser.add_argument("-l", "--loopback", action = "store_true", - help = "Configure for use with yamltest on localhost") - parser.add_argument("-f", "--flat_publication", action = "store_true", - help = "Use flat publication model") - parser.add_argument("-q", "--quiet", action = "store_true", - help = "Work more quietly") - parser.add_argument("--profile", - help = "Filename for profile output") - parser.add_argument("yaml_file", type = argparse.FileType("r"), - help = "YAML file describing network to build") - args = parser.parse_args() - - dns_suffix = args.dns_suffix - loopback = args.loopback - flat_publication = args.flat_publication - quiet = args.quiet - yaml_file = args.yaml_file - - rpki.log.init("yamlconf", use_syslog = False) - - # Allow optional config file for this tool to override default - # passwords: this is mostly so that I can show a complete working - # example without publishing my own server's passwords. - - cfg = rpki.config.parser(args.config, "yamlconf", allow_missing = True) - try: - cfg.set_global_flags() - except: - pass - - # Use of "yamltest.dir" is deliberate: intent is for what we write to - # be usable with "yamltest --skip_config". - - only_one_pubd = cfg.getboolean("only_one_pubd", True) - test_dir = cfg.get("test_directory", cleanpath(this_dir, "yamltest.dir")) - rpki_conf = cfg.get("rpki_conf", cleanpath(this_dir, "..", "examples/rpki.conf")) - mysql_rootuser = cfg.get("mysql_rootuser", "root") - - try: - mysql_rootpass = cfg.get("mysql_rootpass") - except: - pass - - try: - publication_base = cfg.get("publication_base") - except: - pass - - try: - publication_root = cfg.get("publication_root") - except: - pass - - for k in ("rpkid_sql_password", "irdbd_sql_password", "pubd_sql_password", - "rpkid_sql_username", "irdbd_sql_username", "pubd_sql_username"): - if cfg.has_option(k): - config_overrides[k] = cfg.get(k) - - if args.profile: - import cProfile - prof = cProfile.Profile() - try: - prof.runcall(body) - finally: - prof.dump_stats(args.profile) - if not quiet: - print - print "Dumped profile data to %s" % args.profile - else: - body() - -def body(): - - global rpki - - ts = timestamp() - - for root, dirs, files in os.walk(test_dir, topdown = False): - for fn in files: - os.unlink(os.path.join(root, fn)) - for d in dirs: - os.rmdir(os.path.join(root, d)) - - if not quiet: - print - print "Reading YAML", yaml_file.name - - db = allocation_db(yaml.safe_load_all(yaml_file).next()) - - # Show what we loaded - - #db.dump() - - # Do pre-Django SQL setup - - pre_django_sql_setup(set(d.irdb_name for d in db if not d.is_hosted)) - - # Now ready for fun with multiple databases in Django! - - # https://docs.djangoproject.com/en/1.4/topics/db/multi-db/ - # https://docs.djangoproject.com/en/1.4/topics/db/sql/ - - database_template = { - "ENGINE" : "django.db.backends.mysql", - "USER" : config_overrides["irdbd_sql_username"], - "PASSWORD" : config_overrides["irdbd_sql_password"], - "HOST" : "", - "PORT" : "", - "OPTIONS" : { "init_command": "SET storage_engine=INNODB" }} - - databases = dict((d.irdb_name, - dict(database_template, NAME = d.irdb_name)) - for d in db if not d.is_hosted) - - databases["default"] = databases[db.root.irdb_name] - - from django.conf import settings - - settings.configure( - DATABASES = databases, - DATABASE_ROUTERS = ["rpki.irdb.router.DBContextRouter"], - INSTALLED_APPS = ("rpki.irdb",)) - - import rpki.irdb - - rpki.irdb.models.ca_certificate_lifetime = rpki.sundial.timedelta(days = 3652 * 2) - rpki.irdb.models.ee_certificate_lifetime = rpki.sundial.timedelta(days = 3652) - - ts() - - for d in db: - if not quiet: - print - print "Configuring", d.name - - if not d.is_hosted: - d.mkdir() - if d.runs_pubd: - d.mkdir("publication") - if d.is_root: - d.mkdir("publication.root") - - if not d.is_hosted: - d.dump_conf() - d.dump_rsyncd() - - d.dump_asns("%s.asns.csv" % d.name) - d.dump_prefixes("%s.prefixes.csv" % d.name) - d.dump_roas("%s.roas.csv" % d.name) - d.dump_ghostbusters("%s.ghostbusters.vcard" % d.name) - - if not d.is_hosted: - if not quiet: - print "Initializing SQL" - d.syncdb() - if not quiet: - print "Hiring zookeeper" - d.hire_zookeeper() - - with d.irdb: - if not quiet: - print "Creating identity" - x = d.zoo.initialize() - - if d.is_root: - if not quiet: - print "Creating RPKI root certificate and TAL" - d.dump_root() - x = d.zoo.configure_rootd() - - else: - with d.parent.irdb: - x = d.parent.zoo.configure_child(x.file)[0] - x = d.zoo.configure_parent(x.file)[0] - - with d.pubd.irdb: - x = d.pubd.zoo.configure_publication_client(x.file, flat = flat_publication)[0] - d.zoo.configure_repository(x.file) - - if loopback and not d.is_hosted: - with d.irdb: - d.zoo.write_bpki_files() - - ts() - - if not loopback: - if not quiet: - print - for d in db: - d.dump_sql() - -if __name__ == "__main__": - main() diff --git a/rpkid/tests/yamltest-test-all.sh b/rpkid/tests/yamltest-test-all.sh deleted file mode 100644 index 8daea04e..00000000 --- a/rpkid/tests/yamltest-test-all.sh +++ /dev/null @@ -1,58 +0,0 @@ -#!/bin/sh - -# $Id$ - -# Copyright (C) 2009-2013 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -set -x - -export TZ=UTC - -test -z "$STY" && exec screen -L sh $0 - -screen -X split -screen -X focus - -: ${runtime=900} - -for yaml in smoketest.*.yaml -do - settitle "$yaml: Starting" - rm -rf test rcynic-data - python sql-cleaner.py - now=$(date +%s) - finish=$(($now + $runtime)) - title="$yaml: will finish at $(date -r $finish)" - settitle "$title" - screen sh -c "settitle '$title'; exec python yamltest.py -p yamltest.pid $yaml" - date - sleep 180 - date - while test $(date +%s) -lt $finish - do - sleep 30 - date - ../../rcynic/rcynic - ../../rcynic/rcynic-text rcynic.xml - ../../utils/scan_roas/scan_roas rcynic-data/authenticated - date - echo "$title" - done - if test -r yamltest.pid - then - kill -INT $(cat yamltest.pid) - sleep 30 - fi - make backup -done diff --git a/rpkid/tests/yamltest.py b/rpkid/tests/yamltest.py deleted file mode 100644 index 5eb3bd2f..00000000 --- a/rpkid/tests/yamltest.py +++ /dev/null @@ -1,875 +0,0 @@ -#!/usr/bin/env python - -""" -Test framework, using the same YAML test description format as -smoketest.py, but using the rpkic.py tool to do all the back-end -work. Reads YAML file, generates .csv and .conf files, runs daemons -and waits for one of them to exit. -""" - -# $Id$ -# -# Copyright (C) 2013--2014 Dragon Research Labs ("DRL") -# Portions copyright (C) 2009--2012 Internet Systems Consortium ("ISC") -# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notices and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND ARIN DISCLAIM ALL -# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED -# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL, -# ISC, OR ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR -# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS -# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, -# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION -# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -# Much of the YAML handling code lifted from smoketest.py. -# -# Still to do: -# -# - Implement smoketest.py-style delta actions, that is, modify the -# allocation database under control of the YAML file, dump out new -# .csv files, and run rpkic.py again to feed resulting changes into -# running daemons. -# - -# pylint: disable=W0702,W0621 - -import subprocess -import re -import os -import argparse -import sys -import yaml -import signal -import time -import lxml.etree -import rpki.resource_set -import rpki.sundial -import rpki.config -import rpki.log -import rpki.csv_utils -import rpki.x509 -import rpki.relaxng - -# Nasty regular expressions for parsing config files. Sadly, while -# the Python ConfigParser supports writing config files, it does so in -# such a limited way that it's easier just to hack this ourselves. - -section_regexp = re.compile(r"\s*\[\s*(.+?)\s*\]\s*$") -variable_regexp = re.compile(r"\s*([-a-zA-Z0-9_]+)\s*=\s*(.+?)\s*$") - -def cleanpath(*names): - """ - Construct normalized pathnames. - """ - return os.path.normpath(os.path.join(*names)) - -# Pathnames for various things we need - -this_dir = os.getcwd() -test_dir = cleanpath(this_dir, "yamltest.dir") -rpkid_dir = cleanpath(this_dir, "..") - -prog_rpkic = cleanpath(rpkid_dir, "rpkic") -prog_rpkid = cleanpath(rpkid_dir, "rpkid") -prog_irdbd = cleanpath(rpkid_dir, "irdbd") -prog_pubd = cleanpath(rpkid_dir, "pubd") -prog_rootd = cleanpath(rpkid_dir, "rootd") - -class roa_request(object): - """ - Representation of a ROA request. - """ - - def __init__(self, asn, ipv4, ipv6): - self.asn = asn - self.v4 = rpki.resource_set.roa_prefix_set_ipv4("".join(ipv4.split())) if ipv4 else None - self.v6 = rpki.resource_set.roa_prefix_set_ipv6("".join(ipv6.split())) if ipv6 else None - - def __eq__(self, other): - return self.asn == other.asn and self.v4 == other.v4 and self.v6 == other.v6 - - def __hash__(self): - v4 = tuple(self.v4) if self.v4 is not None else None - v6 = tuple(self.v6) if self.v6 is not None else None - return self.asn.__hash__() + v4.__hash__() + v6.__hash__() - - def __str__(self): - if self.v4 and self.v6: - return "%s: %s,%s" % (self.asn, self.v4, self.v6) - else: - return "%s: %s" % (self.asn, self.v4 or self.v6) - - @classmethod - def parse(cls, y): - """ - Parse a ROA request from YAML format. - """ - return cls(y.get("asn"), y.get("ipv4"), y.get("ipv6")) - - -class router_cert(object): - """ - Representation for a router_cert object. - """ - - _ecparams = None - - @classmethod - def ecparams(cls): - if cls._ecparams is None: - cls._ecparams = rpki.x509.KeyParams.generateEC() - return cls._ecparams - - def __init__(self, asn, router_id): - self.asn = rpki.resource_set.resource_set_as("".join(str(asn).split())) - self.router_id = router_id - self.keypair = rpki.x509.ECDSA.generate(self.ecparams()) - self.pkcs10 = rpki.x509.PKCS10.create(keypair = self.keypair) - self.gski = self.pkcs10.gSKI() - - def __eq__(self, other): - return self.asn == other.asn and self.router_id == other.router_id and self.gski == other.gski - - def __hash__(self): - v6 = tuple(self.v6) if self.v6 is not None else None - return tuple(self.asn).__hash__() + self.router_id.__hash__() + self.gski.__hash__() - - def __str__(self): - return "%s: %s: %s" % (self.asn, self.router_id, self.gski) - - @classmethod - def parse(cls, yaml): - return cls(yaml.get("asn"), yaml.get("router_id")) - -class allocation_db(list): - """ - Our allocation database. - """ - - def __init__(self, yaml): - list.__init__(self) - self.root = allocation(yaml, self) - assert self.root.is_root - if self.root.crl_interval is None: - self.root.crl_interval = 60 * 60 - if self.root.regen_margin is None: - self.root.regen_margin = 24 * 60 * 60 - if self.root.base.valid_until is None: - self.root.base.valid_until = rpki.sundial.now() + rpki.sundial.timedelta(days = 2) - for a in self: - if a.base.valid_until is None: - a.base.valid_until = a.parent.base.valid_until - if a.crl_interval is None: - a.crl_interval = a.parent.crl_interval - if a.regen_margin is None: - a.regen_margin = a.parent.regen_margin - self.root.closure() - self.map = dict((a.name, a) for a in self) - for a in self: - if a.is_hosted: - a.hosted_by = self.map[a.hosted_by] - a.hosted_by.hosts.append(a) - assert not a.is_root and not a.hosted_by.is_hosted - - def dump(self): - """ - Show contents of allocation database. - """ - for a in self: - a.dump() - - -class allocation(object): - """ - One entity in our allocation database. Every entity in the database - is assumed to hold resources, so needs at least rpkic services. - Entities that don't have the hosted_by property run their own copies - of rpkid, irdbd, and pubd, so they also need myirbe services. - """ - - base_port = None - parent = None - crl_interval = None - regen_margin = None - rootd_port = None - engine = -1 - rpkid_port = -1 - irdbd_port = -1 - pubd_port = -1 - rsync_port = -1 - rootd_port = -1 - rpkic_counter = 0L - - @classmethod - def allocate_port(cls): - """ - Allocate a TCP port. - """ - cls.base_port += 1 - return cls.base_port - - base_engine = -1 - - @classmethod - def allocate_engine(cls): - """ - Allocate an engine number, mostly used to construct MySQL database - names. - """ - cls.base_engine += 1 - return cls.base_engine - - def __init__(self, yaml, db, parent = None): - db.append(self) - self.name = yaml["name"] - self.parent = parent - self.kids = [allocation(k, db, self) for k in yaml.get("kids", ())] - valid_until = None - if "valid_until" in yaml: - valid_until = rpki.sundial.datetime.from_datetime(yaml.get("valid_until")) - if valid_until is None and "valid_for" in yaml: - valid_until = rpki.sundial.now() + rpki.sundial.timedelta.parse(yaml["valid_for"]) - self.base = rpki.resource_set.resource_bag( - asn = rpki.resource_set.resource_set_as(yaml.get("asn")), - v4 = rpki.resource_set.resource_set_ipv4(yaml.get("ipv4")), - v6 = rpki.resource_set.resource_set_ipv6(yaml.get("ipv6")), - valid_until = valid_until) - if "crl_interval" in yaml: - self.crl_interval = rpki.sundial.timedelta.parse(yaml["crl_interval"]).convert_to_seconds() - if "regen_margin" in yaml: - self.regen_margin = rpki.sundial.timedelta.parse(yaml["regen_margin"]).convert_to_seconds() - self.roa_requests = [roa_request.parse(y) for y in yaml.get("roa_request", yaml.get("route_origin", ()))] - self.router_certs = [router_cert.parse(y) for y in yaml.get("router_cert", ())] - if "ghostbusters" in yaml: - self.ghostbusters = yaml.get("ghostbusters") - elif "ghostbuster" in yaml: - self.ghostbusters = [yaml.get("ghostbuster")] - else: - self.ghostbusters = [] - for r in self.roa_requests: - if r.v4: - self.base.v4 |= r.v4.to_resource_set() - if r.v6: - self.base.v6 |= r.v6.to_resource_set() - for r in self.router_certs: - self.base.asn |= r.asn - self.hosted_by = yaml.get("hosted_by") - self.hosts = [] - if not self.is_hosted: - self.engine = self.allocate_engine() - self.rpkid_port = self.allocate_port() - self.irdbd_port = self.allocate_port() - if self.runs_pubd: - self.pubd_port = self.allocate_port() - self.rsync_port = self.allocate_port() - if self.is_root: - self.rootd_port = self.allocate_port() - - def closure(self): - """ - Compute resource closure of this node and its children, to avoid a - lot of tedious (and error-prone) duplication in the YAML file. - """ - resources = self.base - for kid in self.kids: - resources |= kid.closure() - self.resources = resources - return resources - - def dump(self): - """ - Show content of this allocation node. - """ - print str(self) - - def __str__(self): - s = self.name + ":\n" - if self.resources.asn: s += " ASNs: %s\n" % self.resources.asn - if self.resources.v4: s += " IPv4: %s\n" % self.resources.v4 - if self.resources.v6: s += " IPv6: %s\n" % self.resources.v6 - if self.kids: s += " Kids: %s\n" % ", ".join(k.name for k in self.kids) - if self.parent: s += " Up: %s\n" % self.parent.name - if self.is_hosted: s += " Host: %s\n" % self.hosted_by.name - if self.hosts: s += " Hosts: %s\n" % ", ".join(h.name for h in self.hosts) - for r in self.roa_requests: s += " ROA: %s\n" % r - if not self.is_hosted: s += " IPort: %s\n" % self.irdbd_port - if self.runs_pubd: s += " PPort: %s\n" % self.pubd_port - if not self.is_hosted: s += " RPort: %s\n" % self.rpkid_port - if self.runs_pubd: s += " SPort: %s\n" % self.rsync_port - if self.is_root: s += " TPort: %s\n" % self.rootd_port - return s + " Until: %s\n" % self.resources.valid_until - - @property - def is_root(self): - """ - Is this the root node? - """ - return self.parent is None - - @property - def is_hosted(self): - """ - Is this entity hosted? - """ - return self.hosted_by is not None - - @property - def runs_pubd(self): - """ - Does this entity run a pubd? - """ - return self.is_root or not (self.is_hosted or only_one_pubd) - - def path(self, *names): - """ - Construct pathnames in this entity's test directory. - """ - return cleanpath(test_dir, self.host.name, *names) - - def csvout(self, fn): - """ - Open and log a CSV output file. - """ - path = self.path(fn) - print "Writing", path - return rpki.csv_utils.csv_writer(path) - - def up_down_url(self): - """ - Construct service URL for this node's parent. - """ - return "http://localhost:%d/up-down/%s/%s" % (self.parent.host.rpkid_port, - self.parent.name, - self.name) - - def dump_asns(self): - """ - Write Autonomous System Numbers CSV file. - """ - fn = "%s.asns.csv" % d.name - if not args.skip_config: - f = self.csvout(fn) - for k in self.kids: - f.writerows((k.name, a) for a in k.resources.asn) - f.close() - if not args.stop_after_config: - self.run_rpkic("load_asns", fn) - - def dump_prefixes(self): - """ - Write prefixes CSV file. - """ - fn = "%s.prefixes.csv" % d.name - if not args.skip_config: - f = self.csvout(fn) - for k in self.kids: - f.writerows((k.name, p) for p in (k.resources.v4 + k.resources.v6)) - f.close() - if not args.stop_after_config: - self.run_rpkic("load_prefixes", fn) - - def dump_roas(self): - """ - Write ROA CSV file. - """ - fn = "%s.roas.csv" % d.name - if not args.skip_config: - f = self.csvout(fn) - for g1, r in enumerate(self.roa_requests): - f.writerows((p, r.asn, "G%08d%08d" % (g1, g2)) - for g2, p in enumerate((r.v4 + r.v6 if r.v4 and r.v6 else r.v4 or r.v6 or ()))) - f.close() - if not args.stop_after_config: - self.run_rpkic("load_roa_requests", fn) - - def dump_ghostbusters(self): - """ - Write Ghostbusters vCard file. - """ - if self.ghostbusters: - fn = "%s.ghostbusters.vcard" % d.name - if not args.skip_config: - path = self.path(fn) - print "Writing", path - f = open(path, "w") - for i, g in enumerate(self.ghostbusters): - if i: - f.write("\n") - f.write(g) - f.close() - if not args.stop_after_config: - self.run_rpkic("load_ghostbuster_requests", fn) - - def dump_router_certificates(self): - """ - Write EE certificates (router certificates, etc). - """ - if self.router_certs: - fn = "%s.routercerts.xml" % d.name - if not args.skip_config: - path = self.path(fn) - print "Writing", path - xmlns = "{http://www.hactrn.net/uris/rpki/router-certificate/}" - xml = lxml.etree.Element(xmlns + "router_certificate_requests", version = "1") - for r in self.router_certs: - x = lxml.etree.SubElement(xml, xmlns + "router_certificate_request", - router_id = str(r.router_id), - asn = str(r.asn), - valid_until = str(self.resources.valid_until)) - x.text = r.pkcs10.get_Base64() - rpki.relaxng.router_certificate.assertValid(xml) - lxml.etree.ElementTree(xml).write(path, pretty_print = True) - if not args.stop_after_config: - self.run_rpkic("add_router_certificate_request", fn) - - @property - def pubd(self): - """ - Walk up tree until we find somebody who runs pubd. - """ - s = self - while not s.runs_pubd: - s = s.parent - return s - - @property - def client_handle(self): - """ - Work out what pubd configure_publication_client will call us. - """ - path = [] - s = self - if not args.flat_publication: - while not s.runs_pubd: - path.append(s) - s = s.parent - path.append(s) - return ".".join(i.name for i in reversed(path)) - - @property - def host(self): - return self.hosted_by or self - - def dump_conf(self): - """ - Write configuration file for OpenSSL and RPKI tools. - """ - - r = dict( - handle = self.name, - run_rpkid = str(not self.is_hosted), - run_pubd = str(self.runs_pubd), - run_rootd = str(self.is_root), - irdbd_sql_database = "irdb%d" % self.engine, - irdbd_sql_username = "irdb", - rpkid_sql_database = "rpki%d" % self.engine, - rpkid_sql_username = "rpki", - rpkid_server_host = "localhost", - rpkid_server_port = str(self.rpkid_port), - irdbd_server_host = "localhost", - irdbd_server_port = str(self.irdbd_port), - rootd_server_port = str(self.rootd_port), - pubd_sql_database = "pubd%d" % self.engine, - pubd_sql_username = "pubd", - pubd_server_host = "localhost", - pubd_server_port = str(self.pubd.pubd_port), - publication_rsync_server = "localhost:%s" % self.pubd.rsync_port, - bpki_servers_directory = self.path(), - publication_base_directory = self.path("publication"), - shared_sql_password = "fnord") - - r.update(config_overrides) - - f = open(self.path("rpki.conf"), "w") - f.write("# Automatically generated, do not edit\n") - print "Writing", f.name - - section = None - for line in open(cleanpath(rpkid_dir, "examples/rpki.conf")): - m = section_regexp.match(line) - if m: - section = m.group(1) - m = variable_regexp.match(line) - option = m.group(1) if m and section == "myrpki" else None - if option and option in r: - line = "%s = %s\n" % (option, r[option]) - f.write(line) - - f.close() - - def dump_rsyncd(self): - """ - Write rsyncd configuration file. - """ - - if self.runs_pubd: - f = open(self.path("rsyncd.conf"), "w") - print "Writing", f.name - f.writelines(s + "\n" for s in - ("# Automatically generated, do not edit", - "port = %d" % self.rsync_port, - "address = localhost", - "[rpki]", - "log file = rsyncd.log", - "read only = yes", - "use chroot = no", - "path = %s" % self.path("publication"), - "comment = RPKI test", - "[root]", - "log file = rsyncd_root.log", - "read only = yes", - "use chroot = no", - "path = %s" % self.path("publication.root"), - "comment = RPKI test root")) - f.close() - - @classmethod - def next_rpkic_counter(cls): - cls.rpkic_counter += 10000 - return str(cls.rpkic_counter) - - def run_rpkic(self, *argv): - """ - Run rpkic for this entity. - """ - cmd = [prog_rpkic, "-i", self.name, "-c", self.path("rpki.conf")] - if args.profile: - cmd.append("--profile") - cmd.append(self.path("rpkic.%s.prof" % rpki.sundial.now())) - cmd.extend(str(a) for a in argv if a is not None) - print 'Running "%s"' % " ".join(cmd) - env = os.environ.copy() - env["YAMLTEST_RPKIC_COUNTER"] = self.next_rpkic_counter() - subprocess.check_call(cmd, cwd = self.host.path(), env = env) - - def run_python_daemon(self, prog): - """ - Start a Python daemon and return a subprocess.Popen object - representing the running daemon. - """ - basename = os.path.splitext(os.path.basename(prog))[0] - cmd = [prog, "-d", "-c", self.path("rpki.conf")] - if args.profile and basename != "rootd": - cmd.append("--profile") - cmd.append(self.path(basename + ".prof")) - log = basename + ".log" - p = subprocess.Popen(cmd, - cwd = self.path(), - stdout = open(self.path(log), "w"), - stderr = subprocess.STDOUT) - print 'Running %s for %s: pid %d process %r' % (" ".join(cmd), self.name, p.pid, p) - return p - - def run_rpkid(self): - """ - Run rpkid. - """ - return self.run_python_daemon(prog_rpkid) - - def run_irdbd(self): - """ - Run irdbd. - """ - return self.run_python_daemon(prog_irdbd) - - def run_pubd(self): - """ - Run pubd. - """ - return self.run_python_daemon(prog_pubd) - - def run_rootd(self): - """ - Run rootd. - """ - return self.run_python_daemon(prog_rootd) - - def run_rsyncd(self): - """ - Run rsyncd. - """ - p = subprocess.Popen(("rsync", "--daemon", "--no-detach", "--config", "rsyncd.conf"), - cwd = self.path()) - print "Running rsyncd for %s: pid %d process %r" % (self.name, p.pid, p) - return p - -def create_root_certificate(db_root): - - print "Creating rootd RPKI root certificate" - - root_resources = rpki.resource_set.resource_bag( - asn = rpki.resource_set.resource_set_as("0-4294967295"), - v4 = rpki.resource_set.resource_set_ipv4("0.0.0.0/0"), - v6 = rpki.resource_set.resource_set_ipv6("::/0")) - - root_key = rpki.x509.RSA.generate(quiet = True) - - root_uri = "rsync://localhost:%d/rpki/" % db_root.pubd.rsync_port - - root_sia = (root_uri, root_uri + "root.mft", None) - - root_cert = rpki.x509.X509.self_certify( - keypair = root_key, - subject_key = root_key.get_public(), - serial = 1, - sia = root_sia, - notAfter = rpki.sundial.now() + rpki.sundial.timedelta(days = 365), - resources = root_resources) - - f = open(db_root.path("publication.root/root.cer"), "wb") - f.write(root_cert.get_DER()) - f.close() - - f = open(db_root.path("root.key"), "wb") - f.write(root_key.get_DER()) - f.close() - - f = open(os.path.join(test_dir, "root.tal"), "w") - f.write("rsync://localhost:%d/root/root.cer\n\n" % db_root.pubd.rsync_port) - f.write(root_key.get_public().get_Base64()) - f.close() - - - -os.environ["TZ"] = "UTC" -time.tzset() - -parser = argparse.ArgumentParser(description = __doc__) -parser.add_argument("-c", "--config", - help = "configuration file") -parser.add_argument("-f", "--flat_publication", action = "store_true", - help = "disable hierarchical publication") -parser.add_argument("-k", "--keep_going", action = "store_true", - help = "keep going until all subprocesses exit") -parser.add_argument("-p", "--pidfile", - help = "save pid to this file") -parser.add_argument("--skip_config", action = "store_true", - help = "skip over configuration phase") -parser.add_argument("--stop_after_config", action = "store_true", - help = "stop after configuration phase") -parser.add_argument("--synchronize", action = "store_true", - help = "synchronize IRDB with daemons") -parser.add_argument("--profile", action = "store_true", - help = "enable profiling") -parser.add_argument("yaml_file", type = argparse.FileType("r"), - help = "YAML description of test network") -args = parser.parse_args() - -try: - - if args.pidfile is not None: - open(args.pidfile, "w").write("%s\n" % os.getpid()) - - rpki.log.init("yamltest", use_syslog = False) - - # Allow optional config file for this tool to override default - # passwords: this is mostly so that I can show a complete working - # example without publishing my own server's passwords. - - cfg = rpki.config.parser(args.config, "yamltest", allow_missing = True) - - only_one_pubd = cfg.getboolean("only_one_pubd", True) - allocation.base_port = cfg.getint("base_port", 4400) - - config_overrides = dict( - (k, cfg.get(k)) - for k in ("rpkid_sql_password", "irdbd_sql_password", "pubd_sql_password", - "rpkid_sql_username", "irdbd_sql_username", "pubd_sql_username") - if cfg.has_option(k)) - - # Start clean, maybe - - if not args.skip_config: - for root, dirs, files in os.walk(test_dir, topdown = False): - for fn in files: - os.unlink(os.path.join(root, fn)) - for d in dirs: - os.rmdir(os.path.join(root, d)) - - # Read first YAML doc in file and process as compact description of - # test layout and resource allocations. Ignore subsequent YAML docs, - # they're for smoketest.py, not this script. - - db = allocation_db(yaml.safe_load_all(args.yaml_file).next()) - - # Show what we loaded - - #db.dump() - - if args.skip_config: - - print "Skipping pre-daemon configuration, assuming you already did that" - - else: - - # Set up each entity in our test, create publication directories, - # and initialize server BPKI. - - for d in db: - if not d.is_hosted: - os.makedirs(d.path()) - d.dump_conf() - if d.runs_pubd: - os.makedirs(d.path("publication")) - d.dump_rsyncd() - if d.is_root: - os.makedirs(d.path("publication.root")) - d.run_rpkic("initialize_server_bpki") - - # Initialize resource holding BPKI and generate self-descriptor - # for each entity. - - for d in db: - d.run_rpkic("create_identity", d.name) - - # Create RPKI root certificate. - - create_root_certificate(db.root) - - # Set up rootd. - - db.root.run_rpkic("configure_root") - - # From here on we need to pay attention to initialization order. We - # used to do all the pre-configure_daemons stuff before running any - # of the daemons, but that doesn't work right in hosted cases, so we - # have to interleave configuration with starting daemons, just as - # one would in the real world for this sort of thing. - - progs = [] - - try: - - for d in db: - - if not d.is_hosted: - print - print "Running daemons for", d.name - if d.is_root: - progs.append(d.run_rootd()) - progs.append(d.run_irdbd()) - progs.append(d.run_rpkid()) - if d.runs_pubd: - progs.append(d.run_pubd()) - progs.append(d.run_rsyncd()) - - if args.synchronize or not args.skip_config: - - print - print "Giving daemons time to start up" - time.sleep(20) - assert all(p.poll() is None for p in progs) - - if args.skip_config: - - print - print "Skipping configure_*, you'll have to do that yourself if needed" - - else: - - for d in db: - - print - print "Configuring", d.name - print - if d.is_root: - assert not d.is_hosted - d.run_rpkic("configure_publication_client", - "--flat" if args.flat_publication else None, - d.path("%s.%s.repository-request.xml" % (d.name, d.name))) - print - d.run_rpkic("configure_repository", - d.path("%s.repository-response.xml" % d.client_handle)) - print - else: - d.parent.run_rpkic("configure_child", - "--valid_until", d.resources.valid_until, - d.path("%s.identity.xml" % d.name)) - print - d.run_rpkic("configure_parent", - d.parent.path("%s.%s.parent-response.xml" % (d.parent.name, d.name))) - print - d.pubd.run_rpkic("configure_publication_client", - "--flat" if args.flat_publication else None, - d.path("%s.%s.repository-request.xml" % (d.name, d.parent.name))) - print - d.run_rpkic("configure_repository", - d.pubd.path("%s.repository-response.xml" % d.client_handle)) - print - - print - print "Done with initial configuration" - print - - if args.synchronize: - print - print "Synchronizing" - print - for d in db: - if not d.is_hosted: - d.run_rpkic("synchronize") - - if args.synchronize or not args.skip_config: - print - print "Loading CSV files" - print - for d in db: - d.dump_asns() - d.dump_prefixes() - d.dump_roas() - d.dump_ghostbusters() - d.dump_router_certificates() - - # Wait until something terminates. - - if not args.stop_after_config or args.keep_going: - print - print "Waiting for daemons to exit" - signal.signal(signal.SIGCHLD, lambda *dont_care: None) - while (any(p.poll() is None for p in progs) - if args.keep_going else - all(p.poll() is None for p in progs)): - signal.pause() - - finally: - - print - print "Shutting down" - print - - signal.signal(signal.SIGCHLD, signal.SIG_DFL) - - if args.profile: - how_long = 300 - else: - how_long = 30 - - how_often = how_long / 2 - - for i in xrange(how_long): - if i % how_often == 0: - for p in progs: - if p.poll() is None: - print "Politely nudging pid %d" % p.pid - p.terminate() - print - if all(p.poll() is not None for p in progs): - break - time.sleep(1) - - for p in progs: - if p.poll() is None: - print "Pulling the plug on pid %d" % p.pid - p.kill() - - for p in progs: - print "Program pid %d %r returned %d" % (p.pid, p, p.wait()) - -finally: - if args.pidfile is not None: - os.unlink(args.pidfile) diff --git a/rpkid/up-down-schema.rnc b/rpkid/up-down-schema.rnc deleted file mode 100644 index a603b8fe..00000000 --- a/rpkid/up-down-schema.rnc +++ /dev/null @@ -1,113 +0,0 @@ -# $Id$ -# -# RelaxNG schema for the up-down protocol, extracted from RFC 6492. -# -# Copyright (c) 2012 IETF Trust and the persons identified as authors -# of the code. All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# -# * Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in -# the documentation and/or other materials provided with the -# distribution. -# -# * Neither the name of Internet Society, IETF or IETF Trust, nor the -# names of specific contributors, may be used to endorse or promote -# products derived from this software without specific prior written -# permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. - -default namespace = "http://www.apnic.net/specs/rescerts/up-down/" - -grammar { - resource_set_as = xsd:string { maxLength="512000" pattern="[\-,0-9]*" } - resource_set_ip4 = xsd:string { maxLength="512000" pattern="[\-,/.0-9]*" } - resource_set_ip6 = xsd:string { maxLength="512000" pattern="[\-,/:0-9a-fA-F]*" } - - class_name = xsd:token { minLength="1" maxLength="1024" } - ski = xsd:token { minLength="27" maxLength="1024" } - label = xsd:token { minLength="1" maxLength="1024" } - cert_url = xsd:string { minLength="10" maxLength="4096" } - base64_binary = xsd:base64Binary { minLength="4" maxLength="512000" } - - start = element message { - attribute version { xsd:positiveInteger { maxInclusive="1" } }, - attribute sender { label }, - attribute recipient { label }, - payload - } - - payload |= attribute type { "list" }, list_request - payload |= attribute type { "list_response"}, list_response - payload |= attribute type { "issue" }, issue_request - payload |= attribute type { "issue_response"}, issue_response - payload |= attribute type { "revoke" }, revoke_request - payload |= attribute type { "revoke_response"}, revoke_response - payload |= attribute type { "error_response"}, error_response - - list_request = empty - list_response = class* - - class = element class { - attribute class_name { class_name }, - attribute cert_url { cert_url }, - attribute resource_set_as { resource_set_as }, - attribute resource_set_ipv4 { resource_set_ip4 }, - attribute resource_set_ipv6 { resource_set_ip6 }, - attribute resource_set_notafter { xsd:dateTime }, - attribute suggested_sia_head { xsd:anyURI { maxLength="1024" pattern="rsync://.+"} }?, - element certificate { - attribute cert_url { cert_url }, - attribute req_resource_set_as { resource_set_as }?, - attribute req_resource_set_ipv4 { resource_set_ip4 }?, - attribute req_resource_set_ipv6 { resource_set_ip6 }?, - base64_binary - }*, - element issuer { base64_binary } - } - - issue_request = element request { - attribute class_name { class_name }, - attribute req_resource_set_as { resource_set_as }?, - attribute req_resource_set_ipv4 { resource_set_ip4 }?, - attribute req_resource_set_ipv6 { resource_set_ip6 }?, - base64_binary - } - issue_response = class - - revoke_request = revocation - revoke_response = revocation - - revocation = element key { - attribute class_name { class_name }, - attribute ski { ski } - } - - error_response = - element status { xsd:positiveInteger { maxInclusive="9999" } }, - element description { attribute xml:lang { xsd:language }, xsd:string { maxLength="1024" } }* -} - -# Local Variables: -# indent-tabs-mode: nil -# comment-start: "# " -# comment-start-skip: "#[ \t]*" -# End: diff --git a/rpkid/up-down-schema.rng b/rpkid/up-down-schema.rng deleted file mode 100644 index 5368fa65..00000000 --- a/rpkid/up-down-schema.rng +++ /dev/null @@ -1,277 +0,0 @@ - - - - - - 512000 - [\-,0-9]* - - - - - 512000 - [\-,/.0-9]* - - - - - 512000 - [\-,/:0-9a-fA-F]* - - - - - 1 - 1024 - - - - - 27 - 1024 - - - - - 1 - 1024 - - - - - 10 - 4096 - - - - - 4 - 512000 - - - - - - - 1 - - - - - - - - - - - - - - list - - - - - - list_response - - - - - - issue - - - - - - issue_response - - - - - - revoke - - - - - - revoke_response - - - - - - error_response - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 1024 - rsync://.+ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 9999 - - - - - - - - - 1024 - - - - - - diff --git a/rpkid/upgrade-scripts/upgrade-rpkid-to-0.5709.py b/rpkid/upgrade-scripts/upgrade-rpkid-to-0.5709.py deleted file mode 100644 index aa8e3ec1..00000000 --- a/rpkid/upgrade-scripts/upgrade-rpkid-to-0.5709.py +++ /dev/null @@ -1,38 +0,0 @@ -# $Id$ -# -# Copyright (C) 2014 Dragon Research Labs ("DRL") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND DRL DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL DRL BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -""" -Upgrade RPKI SQL databases to schema expected by 0.5709. - -This code is evaluated in the context of rpki-sql-setup's -do_apply_upgrades() function and has access to its variables. -""" - -db.cur.execute(""" - CREATE TABLE ee_cert ( - ee_cert_id SERIAL NOT NULL, - ski BINARY(20) NOT NULL, - cert LONGBLOB NOT NULL, - published DATETIME, - self_id BIGINT UNSIGNED NOT NULL, - ca_detail_id BIGINT UNSIGNED NOT NULL, - PRIMARY KEY (ee_cert_id), - CONSTRAINT ee_cert_self_id - FOREIGN KEY (self_id) REFERENCES self (self_id) ON DELETE CASCADE, - CONSTRAINT ee_cert_ca_detail_id - FOREIGN KEY (ca_detail_id) REFERENCES ca_detail (ca_detail_id) ON DELETE CASCADE - ) ENGINE=InnoDB -""") diff --git a/rtr-origin/Makefile.in b/rtr-origin/Makefile.in deleted file mode 100644 index daa18009..00000000 --- a/rtr-origin/Makefile.in +++ /dev/null @@ -1,63 +0,0 @@ -# $Id$ - -BASE = rtr-origin -SRC = ${BASE}.py -BIN = ${BASE} - -INSTALL = @INSTALL@ -PYTHON = @PYTHON@ -AWK = @AWK@ - -prefix = @prefix@ -exec_prefix = @exec_prefix@ -datarootdir = @datarootdir@ -datadir = @datadir@ -localstatedir = @localstatedir@ -sharedstatedir = @sharedstatedir@ -sysconfdir = @sysconfdir@ -bindir = @bindir@ -sbindir = @sbindir@ -libexecdir = @libexecdir@ -libdir = @libdir@ - -abs_top_srcdir = @abs_top_srcdir@ -abs_top_builddir = @abs_top_builddir@ - -RTR_ORIGIN_INSTALL_TARGETS = @RCYNIC_INSTALL_TARGETS@ - -RPKI_RTR_PORT = 43779 - -SCAN_ROAS = ${bindir}/scan_roas - - -all: ${BIN} - -clean: - rm -f ${BIN} - -install: all ${RTR_ORIGIN_INSTALL_TARGETS} - -install-binary: - if test -d ${DESTDIR}${bindir} ; then :; else ${INSTALL} -m 555 -d ${DESTDIR}${bindir}; fi - ${INSTALL} -m 555 ${BIN} ${DESTDIR}${bindir}/${BIN} - -deinstall uninstall: - rm -f ${DESTDIR}${bindir}/${BIN} - -distclean: clean - rm -rf current sockets *.ax *.ix.* - rm -f Makefile - -${BIN} : ${SRC} - AC_PYTHON_INTERPRETER='${PYTHON}' AC_SCAN_ROAS='${SCAN_ROAS}' \ - ${PYTHON} ${abs_top_srcdir}/buildtools/make-rcynic-script.py <${SRC} >$@ - chmod a+x $@ - -test: - @true - -.FORCE: - -# Platform-specific rules below here. - -@RTR_ORIGIN_MAKE_RULES@ diff --git a/rtr-origin/README b/rtr-origin/README deleted file mode 100644 index dae53010..00000000 --- a/rtr-origin/README +++ /dev/null @@ -1,11 +0,0 @@ -$Id$ - -Sample implementation of draft-ymbk-rpki-rtr-protocol. - -See: - -- The primary documentation at http://trac.rpki.net/ - -- The PDF manual in ../doc/manual.pdf, or - -- The flat text page ../doc/doc.RPKI.RP.rpki-rtr diff --git a/rtr-origin/rtr-origin.py b/rtr-origin/rtr-origin.py deleted file mode 100755 index f37d2ce0..00000000 --- a/rtr-origin/rtr-origin.py +++ /dev/null @@ -1,2278 +0,0 @@ -#!/usr/bin/env python - -# Router origin-authentication rpki-router protocol implementation. See -# draft-ietf-sidr-rpki-rtr in fine Internet-Draft repositories near you. -# -# Run the program with the --help argument for usage information, or see -# documentation for the *_main() functions. -# -# -# $Id$ -# -# Copyright (C) 2009-2013 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -import sys -import os -import struct -import time -import glob -import socket -import fcntl -import signal -import syslog -import errno -import asyncore -import asynchat -import subprocess -import traceback -import getopt -import bisect -import random -import base64 - - -# Debugging only, should be False in production -disable_incrementals = False - -# Whether to log backtraces -backtrace_on_exceptions = False - -class IgnoreThisRecord(Exception): - pass - - -class timestamp(int): - """ - Wrapper around time module. - """ - - def __new__(cls, x): - return int.__new__(cls, x) - - @classmethod - def now(cls, delta = 0): - return cls(time.time() + delta) - - def __str__(self): - return time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime(self)) - - -class ipaddr(object): - """ - IP addresses. - """ - - def __init__(self, string = None, value = None): - assert (string is None) != (value is None) - if string is not None: - value = socket.inet_pton(self.af, string) - assert len(value) == self.size - self.value = value - - def __str__(self): - return socket.inet_ntop(self.af, self.value) - - def __cmp__(self, other): - return cmp(self.value, other.value) - -class v4addr(ipaddr): - af = socket.AF_INET - size = 4 - -class v6addr(ipaddr): - af = socket.AF_INET6 - size = 16 - -def read_current(): - """ - Read current serial number and nonce. Return None for both if - serial and nonce not recorded. For backwards compatibility, treat - file containing just a serial number as having a nonce of zero. - """ - try: - f = open("current", "r") - values = tuple(int(s) for s in f.read().split()) - f.close() - return values[0], values[1] - except IndexError: - return values[0], 0 - except IOError: - return None, None - -def write_current(serial, nonce): - """ - Write serial number and nonce. - """ - tmpfn = "current.%d.tmp" % os.getpid() - try: - f = open(tmpfn, "w") - f.write("%d %d\n" % (serial, nonce)) - f.close() - os.rename(tmpfn, "current") - finally: - try: - os.unlink(tmpfn) - except: - pass - - -def new_nonce(): - """ - Create and return a new nonce value. - """ - if force_zero_nonce: - return 0 - try: - return int(random.SystemRandom().getrandbits(16)) - except NotImplementedError: - return int(random.getrandbits(16)) - - -class read_buffer(object): - """ - Wrapper around synchronous/asynchronous read state. - """ - - def __init__(self): - self.buffer = "" - - def update(self, need, callback): - """ - Update count of needed bytes and callback, then dispatch to callback. - """ - self.need = need - self.callback = callback - return self.callback(self) - - def available(self): - """ - How much data do we have available in this buffer? - """ - return len(self.buffer) - - def needed(self): - """ - How much more data does this buffer need to become ready? - """ - return self.need - self.available() - - def ready(self): - """ - Is this buffer ready to read yet? - """ - return self.available() >= self.need - - def get(self, n): - """ - Hand some data to the caller. - """ - b = self.buffer[:n] - self.buffer = self.buffer[n:] - return b - - def put(self, b): - """ - Accumulate some data. - """ - self.buffer += b - - def retry(self): - """ - Try dispatching to the callback again. - """ - return self.callback(self) - -class PDUException(Exception): - """ - Parent exception type for exceptions that signal particular protocol - errors. String value of exception instance will be the message to - put in the error_report PDU, error_report_code value of exception - will be the numeric code to use. - """ - - def __init__(self, msg = None, pdu = None): - assert msg is None or isinstance(msg, (str, unicode)) - self.error_report_msg = msg - self.error_report_pdu = pdu - - def __str__(self): - return self.error_report_msg or self.__class__.__name__ - - def make_error_report(self): - return error_report(errno = self.error_report_code, - errmsg = self.error_report_msg, - errpdu = self.error_report_pdu) - -class UnsupportedProtocolVersion(PDUException): - error_report_code = 4 - -class UnsupportedPDUType(PDUException): - error_report_code = 5 - -class CorruptData(PDUException): - error_report_code = 0 - -class pdu(object): - """ - Object representing a generic PDU in the rpki-router protocol. - Real PDUs are subclasses of this class. - """ - - version = 0 # Protocol version - - _pdu = None # Cached when first generated - - header_struct = struct.Struct("!BBHL") - - def __cmp__(self, other): - return cmp(self.to_pdu(), other.to_pdu()) - - def check(self): - """ - Check attributes to make sure they're within range. - """ - pass - - @classmethod - def read_pdu(cls, reader): - return reader.update(need = cls.header_struct.size, callback = cls.got_header) - - @classmethod - def got_header(cls, reader): - if not reader.ready(): - return None - assert reader.available() >= cls.header_struct.size - version, pdu_type, whatever, length = cls.header_struct.unpack(reader.buffer[:cls.header_struct.size]) - if version != cls.version: - raise UnsupportedProtocolVersion( - "Received PDU version %d, expected %d" % (version, cls.version)) - if pdu_type not in cls.pdu_map: - raise UnsupportedPDUType( - "Received unsupported PDU type %d" % pdu_type) - if length < 8: - raise CorruptData( - "Received PDU with length %d, which is too short to be valid" % length) - self = cls.pdu_map[pdu_type]() - return reader.update(need = length, callback = self.got_pdu) - - def consume(self, client): - """ - Handle results in test client. Default behavior is just to print - out the PDU. - """ - blather(self) - - def send_file(self, server, filename): - """ - Send a content of a file as a cache response. Caller should catch IOError. - """ - f = open(filename, "rb") - server.push_pdu(cache_response(nonce = server.current_nonce)) - server.push_file(f) - server.push_pdu(end_of_data(serial = server.current_serial, nonce = server.current_nonce)) - - def send_nodata(self, server): - """ - Send a nodata error. - """ - server.push_pdu(error_report(errno = error_report.codes["No Data Available"], errpdu = self)) - -class pdu_with_serial(pdu): - """ - Base class for PDUs consisting of just a serial number and nonce. - """ - - header_struct = struct.Struct("!BBHLL") - - def __init__(self, serial = None, nonce = None): - if serial is not None: - assert isinstance(serial, int) - self.serial = serial - if nonce is not None: - assert isinstance(nonce, int) - self.nonce = nonce - - def __str__(self): - return "[%s, serial #%d nonce %d]" % (self.__class__.__name__, self.serial, self.nonce) - - def to_pdu(self): - """ - Generate the wire format PDU. - """ - if self._pdu is None: - self._pdu = self.header_struct.pack(self.version, self.pdu_type, self.nonce, - self.header_struct.size, self.serial) - return self._pdu - - def got_pdu(self, reader): - if not reader.ready(): - return None - b = reader.get(self.header_struct.size) - version, pdu_type, self.nonce, length, self.serial = self.header_struct.unpack(b) - if length != 12: - raise CorruptData("PDU length of %d can't be right" % length, pdu = self) - assert b == self.to_pdu() - return self - -class pdu_nonce(pdu): - """ - Base class for PDUs consisting of just a nonce. - """ - - header_struct = struct.Struct("!BBHL") - - def __init__(self, nonce = None): - if nonce is not None: - assert isinstance(nonce, int) - self.nonce = nonce - - def __str__(self): - return "[%s, nonce %d]" % (self.__class__.__name__, self.nonce) - - def to_pdu(self): - """ - Generate the wire format PDU. - """ - if self._pdu is None: - self._pdu = self.header_struct.pack(self.version, self.pdu_type, self.nonce, self.header_struct.size) - return self._pdu - - def got_pdu(self, reader): - if not reader.ready(): - return None - b = reader.get(self.header_struct.size) - version, pdu_type, self.nonce, length = self.header_struct.unpack(b) - if length != 8: - raise CorruptData("PDU length of %d can't be right" % length, pdu = self) - assert b == self.to_pdu() - return self - -class pdu_empty(pdu): - """ - Base class for empty PDUs. - """ - - header_struct = struct.Struct("!BBHL") - - def __str__(self): - return "[%s]" % self.__class__.__name__ - - def to_pdu(self): - """ - Generate the wire format PDU for this prefix. - """ - if self._pdu is None: - self._pdu = self.header_struct.pack(self.version, self.pdu_type, 0, self.header_struct.size) - return self._pdu - - def got_pdu(self, reader): - if not reader.ready(): - return None - b = reader.get(self.header_struct.size) - version, pdu_type, zero, length = self.header_struct.unpack(b) - if zero != 0: - raise CorruptData("Must-be-zero field isn't zero" % length, pdu = self) - if length != 8: - raise CorruptData("PDU length of %d can't be right" % length, pdu = self) - assert b == self.to_pdu() - return self - -class serial_notify(pdu_with_serial): - """ - Serial Notify PDU. - """ - - pdu_type = 0 - - def consume(self, client): - """ - Respond to a serial_notify message with either a serial_query or - reset_query, depending on what we already know. - """ - blather(self) - if client.current_serial is None or client.current_nonce != self.nonce: - client.push_pdu(reset_query()) - elif self.serial != client.current_serial: - client.push_pdu(serial_query(serial = client.current_serial, nonce = client.current_nonce)) - else: - blather("[Notify did not change serial number, ignoring]") - -class serial_query(pdu_with_serial): - """ - Serial Query PDU. - """ - - pdu_type = 1 - - def serve(self, server): - """ - Received a serial query, send incremental transfer in response. - If client is already up to date, just send an empty incremental - transfer. - """ - blather(self) - if server.get_serial() is None: - self.send_nodata(server) - elif server.current_nonce != self.nonce: - log("[Client requested wrong nonce, resetting client]") - server.push_pdu(cache_reset()) - elif server.current_serial == self.serial: - blather("[Client is already current, sending empty IXFR]") - server.push_pdu(cache_response(nonce = server.current_nonce)) - server.push_pdu(end_of_data(serial = server.current_serial, nonce = server.current_nonce)) - elif disable_incrementals: - server.push_pdu(cache_reset()) - else: - try: - self.send_file(server, "%d.ix.%d" % (server.current_serial, self.serial)) - except IOError: - server.push_pdu(cache_reset()) - -class reset_query(pdu_empty): - """ - Reset Query PDU. - """ - - pdu_type = 2 - - def serve(self, server): - """ - Received a reset query, send full current state in response. - """ - blather(self) - if server.get_serial() is None: - self.send_nodata(server) - else: - try: - fn = "%d.ax" % server.current_serial - self.send_file(server, fn) - except IOError: - server.push_pdu(error_report(errno = error_report.codes["Internal Error"], - errpdu = self, errmsg = "Couldn't open %s" % fn)) - -class cache_response(pdu_nonce): - """ - Cache Response PDU. - """ - - pdu_type = 3 - - def consume(self, client): - """ - Handle cache_response. - """ - blather(self) - if self.nonce != client.current_nonce: - blather("[Nonce changed, resetting]") - client.cache_reset() - -class end_of_data(pdu_with_serial): - """ - End of Data PDU. - """ - - pdu_type = 7 - - def consume(self, client): - """ - Handle end_of_data response. - """ - blather(self) - client.end_of_data(self.serial, self.nonce) - -class cache_reset(pdu_empty): - """ - Cache reset PDU. - """ - - pdu_type = 8 - - def consume(self, client): - """ - Handle cache_reset response, by issuing a reset_query. - """ - blather(self) - client.cache_reset() - client.push_pdu(reset_query()) - -class prefix(pdu): - """ - Object representing one prefix. This corresponds closely to one PDU - in the rpki-router protocol, so closely that we use lexical ordering - of the wire format of the PDU as the ordering for this class. - - This is a virtual class, but the .from_text() constructor - instantiates the correct concrete subclass (ipv4_prefix or - ipv6_prefix) depending on the syntax of its input text. - """ - - header_struct = struct.Struct("!BB2xLBBBx") - asnum_struct = struct.Struct("!L") - - @staticmethod - def from_text(asnum, addr): - """ - Construct a prefix from its text form. - """ - cls = ipv6_prefix if ":" in addr else ipv4_prefix - self = cls() - self.asn = long(asnum) - p, l = addr.split("/") - self.prefix = self.addr_type(string = p) - if "-" in l: - self.prefixlen, self.max_prefixlen = tuple(int(i) for i in l.split("-")) - else: - self.prefixlen = self.max_prefixlen = int(l) - self.announce = 1 - self.check() - return self - - def __str__(self): - plm = "%s/%s-%s" % (self.prefix, self.prefixlen, self.max_prefixlen) - return "%s %8s %-32s %s" % ("+" if self.announce else "-", self.asn, plm, - ":".join(("%02X" % ord(b) for b in self.to_pdu()))) - - def show(self): - blather("# Class: %s" % self.__class__.__name__) - blather("# ASN: %s" % self.asn) - blather("# Prefix: %s" % self.prefix) - blather("# Prefixlen: %s" % self.prefixlen) - blather("# MaxPrefixlen: %s" % self.max_prefixlen) - blather("# Announce: %s" % self.announce) - - def consume(self, client): - """ - Handle one incoming prefix PDU - """ - blather(self) - client.consume_prefix(self) - - def check(self): - """ - Check attributes to make sure they're within range. - """ - if self.announce not in (0, 1): - raise CorruptData("Announce value %d is neither zero nor one" % self.announce, pdu = self) - if self.prefixlen < 0 or self.prefixlen > self.addr_type.size * 8: - raise CorruptData("Implausible prefix length %d" % self.prefixlen, pdu = self) - if self.max_prefixlen < self.prefixlen or self.max_prefixlen > self.addr_type.size * 8: - raise CorruptData("Implausible max prefix length %d" % self.max_prefixlen, pdu = self) - pdulen = self.header_struct.size + self.addr_type.size + self.asnum_struct.size - if len(self.to_pdu()) != pdulen: - raise CorruptData("Expected %d byte PDU, got %d" % (pdulen, len(self.to_pdu())), pdu = self) - - def to_pdu(self, announce = None): - """ - Generate the wire format PDU for this prefix. - """ - if announce is not None: - assert announce in (0, 1) - elif self._pdu is not None: - return self._pdu - pdulen = self.header_struct.size + self.addr_type.size + self.asnum_struct.size - pdu = (self.header_struct.pack(self.version, self.pdu_type, pdulen, - announce if announce is not None else self.announce, - self.prefixlen, self.max_prefixlen) + - self.prefix.value + - self.asnum_struct.pack(self.asn)) - if announce is None: - assert self._pdu is None - self._pdu = pdu - return pdu - - def got_pdu(self, reader): - if not reader.ready(): - return None - b1 = reader.get(self.header_struct.size) - b2 = reader.get(self.addr_type.size) - b3 = reader.get(self.asnum_struct.size) - version, pdu_type, length, self.announce, self.prefixlen, self.max_prefixlen = self.header_struct.unpack(b1) - if length != len(b1) + len(b2) + len(b3): - raise CorruptData("Got PDU length %d, expected %d" % (length, len(b1) + len(b2) + len(b3)), pdu = self) - self.prefix = self.addr_type(value = b2) - self.asn = self.asnum_struct.unpack(b3)[0] - assert b1 + b2 + b3 == self.to_pdu() - return self - - @staticmethod - def from_bgpdump(line, rib_dump): - try: - assert isinstance(rib_dump, bool) - fields = line.split("|") - - # Parse prefix, including figuring out IP protocol version - cls = ipv6_prefix if ":" in fields[5] else ipv4_prefix - self = cls() - self.timestamp = timestamp(fields[1]) - p, l = fields[5].split("/") - self.prefix = self.addr_type(p) - self.prefixlen = self.max_prefixlen = int(l) - - # Withdrawals don't have AS paths, so be careful - assert fields[2] == "B" if rib_dump else fields[2] in ("A", "W") - if fields[2] == "W": - self.asn = 0 - self.announce = 0 - else: - self.announce = 1 - if not fields[6] or "{" in fields[6] or "(" in fields[6]: - raise IgnoreThisRecord - a = fields[6].split()[-1] - if "." in a: - a = [int(s) for s in a.split(".")] - if len(a) != 2 or a[0] < 0 or a[0] > 65535 or a[1] < 0 or a[1] > 65535: - log("Bad dotted ASNum %r, ignoring record" % fields[6]) - raise IgnoreThisRecord - a = (a[0] << 16) | a[1] - else: - a = int(a) - self.asn = a - - self.check() - return self - - except IgnoreThisRecord: - raise - - except Exception, e: - log("Ignoring line %r: %s" % (line, e)) - raise IgnoreThisRecord - -class ipv4_prefix(prefix): - """ - IPv4 flavor of a prefix. - """ - pdu_type = 4 - addr_type = v4addr - -class ipv6_prefix(prefix): - """ - IPv6 flavor of a prefix. - """ - pdu_type = 6 - addr_type = v6addr - -class router_key(pdu): - """ - Router Key PDU. - """ - - pdu_type = 9 - - header_struct = struct.Struct("!BBBxL20sL") - - @classmethod - def from_text(cls, asnum, gski, key): - """ - Construct a router key from its text form. - """ - - self = cls() - self.asn = long(asnum) - self.ski = base64.urlsafe_b64decode(gski + "=") - self.key = base64.b64decode(key) - self.announce = 1 - self.check() - return self - - def __str__(self): - return "%s %8s %-32s %s" % ("+" if self.announce else "-", self.asn, - base64.urlsafe_b64encode(self.ski).rstrip("="), - ":".join(("%02X" % ord(b) for b in self.to_pdu()))) - - def consume(self, client): - """ - Handle one incoming Router Key PDU - """ - - blather(self) - client.consume_routerkey(self) - - def check(self): - """ - Check attributes to make sure they're within range. - """ - - if self.announce not in (0, 1): - raise CorruptData("Announce value %d is neither zero nor one" % self.announce, pdu = self) - if len(self.ski) != 20: - raise CorruptData("Implausible SKI length %d" % len(self.ski), pdu = self) - pdulen = self.header_struct.size + len(self.key) - if len(self.to_pdu()) != pdulen: - raise CorruptData("Expected %d byte PDU, got %d" % (pdulen, len(self.to_pdu())), pdu = self) - - def to_pdu(self, announce = None): - if announce is not None: - assert announce in (0, 1) - elif self._pdu is not None: - return self._pdu - pdulen = self.header_struct.size + len(self.key) - pdu = (self.header_struct.pack(self.version, - self.pdu_type, - announce if announce is not None else self.announce, - pdulen, - self.ski, - self.asn) - + self.key) - if announce is None: - assert self._pdu is None - self._pdu = pdu - return pdu - - def got_pdu(self, reader): - if not reader.ready(): - return None - header = reader.get(self.header_struct.size) - version, pdu_type, self.announce, length, self.ski, self.asn = self.header_struct.unpack(header) - remaining = length - self.header_struct.size - if remaining <= 0: - raise CorruptData("Got PDU length %d, minimum is %d" % (length, self.header_struct.size + 1), pdu = self) - self.key = reader.get(remaining) - assert header + self.key == self.to_pdu() - return self - - -class error_report(pdu): - """ - Error Report PDU. - """ - - pdu_type = 10 - - header_struct = struct.Struct("!BBHL") - string_struct = struct.Struct("!L") - - errors = { - 2 : "No Data Available" } - - fatal = { - 0 : "Corrupt Data", - 1 : "Internal Error", - 3 : "Invalid Request", - 4 : "Unsupported Protocol Version", - 5 : "Unsupported PDU Type", - 6 : "Withdrawal of Unknown Record", - 7 : "Duplicate Announcement Received" } - - assert set(errors) & set(fatal) == set() - - errors.update(fatal) - - codes = dict((v, k) for k, v in errors.items()) - - def __init__(self, errno = None, errpdu = None, errmsg = None): - assert errno is None or errno in self.errors - self.errno = errno - self.errpdu = errpdu - self.errmsg = errmsg if errmsg is not None or errno is None else self.errors[errno] - - def __str__(self): - return "[%s, error #%s: %r]" % (self.__class__.__name__, self.errno, self.errmsg) - - def to_counted_string(self, s): - return self.string_struct.pack(len(s)) + s - - def read_counted_string(self, reader, remaining): - assert remaining >= self.string_struct.size - n = self.string_struct.unpack(reader.get(self.string_struct.size))[0] - assert remaining >= self.string_struct.size + n - return n, reader.get(n), (remaining - self.string_struct.size - n) - - def to_pdu(self): - """ - Generate the wire format PDU for this error report. - """ - if self._pdu is None: - assert isinstance(self.errno, int) - assert not isinstance(self.errpdu, error_report) - p = self.errpdu - if p is None: - p = "" - elif isinstance(p, pdu): - p = p.to_pdu() - assert isinstance(p, str) - pdulen = self.header_struct.size + self.string_struct.size * 2 + len(p) + len(self.errmsg) - self._pdu = self.header_struct.pack(self.version, self.pdu_type, self.errno, pdulen) - self._pdu += self.to_counted_string(p) - self._pdu += self.to_counted_string(self.errmsg.encode("utf8")) - return self._pdu - - def got_pdu(self, reader): - if not reader.ready(): - return None - header = reader.get(self.header_struct.size) - version, pdu_type, self.errno, length = self.header_struct.unpack(header) - remaining = length - self.header_struct.size - self.pdulen, self.errpdu, remaining = self.read_counted_string(reader, remaining) - self.errlen, self.errmsg, remaining = self.read_counted_string(reader, remaining) - if length != self.header_struct.size + self.string_struct.size * 2 + self.pdulen + self.errlen: - raise CorruptData("Got PDU length %d, expected %d" % ( - length, self.header_struct.size + self.string_struct.size * 2 + self.pdulen + self.errlen)) - assert (header - + self.to_counted_string(self.errpdu) - + self.to_counted_string(self.errmsg.encode("utf8")) - == self.to_pdu()) - return self - - def serve(self, server): - """ - Received an error_report from client. Not much we can do beyond - logging it, then killing the connection if error was fatal. - """ - log(self) - if self.errno in self.fatal: - log("[Shutting down due to reported fatal protocol error]") - sys.exit(1) - -pdu.pdu_map = dict((p.pdu_type, p) for p in (ipv4_prefix, ipv6_prefix, serial_notify, serial_query, reset_query, - cache_response, end_of_data, cache_reset, router_key, error_report)) - -class pdu_set(list): - """ - Object representing a set of PDUs, that is, one versioned and - (theoretically) consistant set of prefixes and router keys extracted - from rcynic's output. - """ - - @classmethod - def _load_file(cls, filename): - """ - Low-level method to read pdu_set from a file. - """ - self = cls() - f = open(filename, "rb") - r = read_buffer() - while True: - p = pdu.read_pdu(r) - while p is None: - b = f.read(r.needed()) - if b == "": - assert r.available() == 0 - return self - r.put(b) - p = r.retry() - self.append(p) - - @staticmethod - def seq_ge(a, b): - return ((a - b) % (1 << 32)) < (1 << 31) - - -class axfr_set(pdu_set): - """ - Object representing a complete set of PDUs, that is, one versioned - and (theoretically) consistant set of prefixes and router - certificates extracted from rcynic's output, all with the announce - field set. - """ - - @classmethod - def parse_rcynic(cls, rcynic_dir): - """ - Parse ROAS and router certificates fetched (and validated!) by - rcynic to create a new axfr_set. We use the scan_roas and - scan_routercerts utilities to parse the ASN.1, although we may go - back to parsing the files directly using the rpki.POW library code - some day. - """ - - self = cls() - self.serial = timestamp.now() - - try: - p = subprocess.Popen((scan_roas, rcynic_dir), stdout = subprocess.PIPE) - for line in p.stdout: - line = line.split() - asn = line[1] - self.extend(prefix.from_text(asn, addr) for addr in line[2:]) - except OSError, e: - sys.exit("Could not run %s, check your $PATH variable? (%s)" % (scan_roas, e)) - - try: - p = subprocess.Popen((scan_routercerts, rcynic_dir), stdout = subprocess.PIPE) - for line in p.stdout: - line = line.split() - gski = line[0] - key = line[-1] - self.extend(router_key.from_text(asn, gski, key) for asn in line[1:-1]) - except OSError, e: - sys.exit("Could not run %s, check your $PATH variable? (%s)" % (scan_routercerts, e)) - - self.sort() - for i in xrange(len(self) - 2, -1, -1): - if self[i] == self[i + 1]: - del self[i + 1] - return self - - @classmethod - def load(cls, filename): - """ - Load an axfr_set from a file, parse filename to obtain serial. - """ - fn1, fn2 = os.path.basename(filename).split(".") - assert fn1.isdigit() and fn2 == "ax" - self = cls._load_file(filename) - self.serial = timestamp(fn1) - return self - - def filename(self): - """ - Generate filename for this axfr_set. - """ - return "%d.ax" % self.serial - - @classmethod - def load_current(cls): - """ - Load current axfr_set. Return None if can't. - """ - serial = read_current()[0] - if serial is None: - return None - try: - return cls.load("%d.ax" % serial) - except IOError: - return None - - def save_axfr(self): - """ - Write axfr__set to file with magic filename. - """ - f = open(self.filename(), "wb") - for p in self: - f.write(p.to_pdu()) - f.close() - - def destroy_old_data(self): - """ - Destroy old data files, presumably because our nonce changed and - the old serial numbers are no longer valid. - """ - for i in glob.iglob("*.ix.*"): - os.unlink(i) - for i in glob.iglob("*.ax"): - if i != self.filename(): - os.unlink(i) - - def mark_current(self): - """ - Save current serial number and nonce, creating new nonce if - necessary. Creating a new nonce triggers cleanup of old state, as - the new nonce invalidates all old serial numbers. - """ - old_serial, nonce = read_current() - if old_serial is None or self.seq_ge(old_serial, self.serial): - blather("Creating new nonce and deleting stale data") - nonce = new_nonce() - self.destroy_old_data() - write_current(self.serial, nonce) - - def save_ixfr(self, other): - """ - Comparing this axfr_set with an older one and write the resulting - ixfr_set to file with magic filename. Since we store pdu_sets - in sorted order, computing the difference is a trivial linear - comparison. - """ - f = open("%d.ix.%d" % (self.serial, other.serial), "wb") - old = other - new = self - len_old = len(old) - len_new = len(new) - i_old = i_new = 0 - while i_old < len_old and i_new < len_new: - if old[i_old] < new[i_new]: - f.write(old[i_old].to_pdu(announce = 0)) - i_old += 1 - elif old[i_old] > new[i_new]: - f.write(new[i_new].to_pdu(announce = 1)) - i_new += 1 - else: - i_old += 1 - i_new += 1 - for i in xrange(i_old, len_old): - f.write(old[i].to_pdu(announce = 0)) - for i in xrange(i_new, len_new): - f.write(new[i].to_pdu(announce = 1)) - f.close() - - def show(self): - """ - Print this axfr_set. - """ - blather("# AXFR %d (%s)" % (self.serial, self.serial)) - for p in self: - blather(p) - - @staticmethod - def read_bgpdump(filename): - assert filename.endswith(".bz2") - blather("Reading %s" % filename) - bunzip2 = subprocess.Popen(("bzip2", "-c", "-d", filename), stdout = subprocess.PIPE) - bgpdump = subprocess.Popen(("bgpdump", "-m", "-"), stdin = bunzip2.stdout, stdout = subprocess.PIPE) - return bgpdump.stdout - - @classmethod - def parse_bgpdump_rib_dump(cls, filename): - assert os.path.basename(filename).startswith("ribs.") - self = cls() - self.serial = None - for line in cls.read_bgpdump(filename): - try: - pfx = prefix.from_bgpdump(line, rib_dump = True) - except IgnoreThisRecord: - continue - self.append(pfx) - self.serial = pfx.timestamp - if self.serial is None: - sys.exit("Failed to parse anything useful from %s" % filename) - self.sort() - for i in xrange(len(self) - 2, -1, -1): - if self[i] == self[i + 1]: - del self[i + 1] - return self - - def parse_bgpdump_update(self, filename): - assert os.path.basename(filename).startswith("updates.") - for line in self.read_bgpdump(filename): - try: - pfx = prefix.from_bgpdump(line, rib_dump = False) - except IgnoreThisRecord: - continue - announce = pfx.announce - pfx.announce = 1 - i = bisect.bisect_left(self, pfx) - if announce: - if i >= len(self) or pfx != self[i]: - self.insert(i, pfx) - else: - while i < len(self) and pfx.prefix == self[i].prefix and pfx.prefixlen == self[i].prefixlen: - del self[i] - self.serial = pfx.timestamp - -class ixfr_set(pdu_set): - """ - Object representing an incremental set of PDUs, that is, the - differences between one versioned and (theoretically) consistant set - of prefixes and router certificates extracted from rcynic's output - and another, with the announce fields set or cleared as necessary to - indicate the changes. - """ - - @classmethod - def load(cls, filename): - """ - Load an ixfr_set from a file, parse filename to obtain serials. - """ - fn1, fn2, fn3 = os.path.basename(filename).split(".") - assert fn1.isdigit() and fn2 == "ix" and fn3.isdigit() - self = cls._load_file(filename) - self.from_serial = timestamp(fn3) - self.to_serial = timestamp(fn1) - return self - - def filename(self): - """ - Generate filename for this ixfr_set. - """ - return "%d.ix.%d" % (self.to_serial, self.from_serial) - - def show(self): - """ - Print this ixfr_set. - """ - blather("# IXFR %d (%s) -> %d (%s)" % (self.from_serial, self.from_serial, - self.to_serial, self.to_serial)) - for p in self: - blather(p) - -class file_producer(object): - """ - File-based producer object for asynchat. - """ - - def __init__(self, handle, buffersize): - self.handle = handle - self.buffersize = buffersize - - def more(self): - return self.handle.read(self.buffersize) - -class pdu_channel(asynchat.async_chat): - """ - asynchat subclass that understands our PDUs. This just handles - network I/O. Specific engines (client, server) should be subclasses - of this with methods that do something useful with the resulting - PDUs. - """ - - def __init__(self, conn = None): - asynchat.async_chat.__init__(self, conn) - self.reader = read_buffer() - - def start_new_pdu(self): - """ - Start read of a new PDU. - """ - try: - p = pdu.read_pdu(self.reader) - while p is not None: - self.deliver_pdu(p) - p = pdu.read_pdu(self.reader) - except PDUException, e: - self.push_pdu(e.make_error_report()) - self.close_when_done() - else: - assert not self.reader.ready() - self.set_terminator(self.reader.needed()) - - def collect_incoming_data(self, data): - """ - Collect data into the read buffer. - """ - self.reader.put(data) - - def found_terminator(self): - """ - Got requested data, see if we now have a PDU. If so, pass it - along, then restart cycle for a new PDU. - """ - p = self.reader.retry() - if p is None: - self.set_terminator(self.reader.needed()) - else: - self.deliver_pdu(p) - self.start_new_pdu() - - def push_pdu(self, pdu): - """ - Write PDU to stream. - """ - try: - self.push(pdu.to_pdu()) - except OSError, e: - if e.errno != errno.EAGAIN: - raise - - def push_file(self, f): - """ - Write content of a file to stream. - """ - try: - self.push_with_producer(file_producer(f, self.ac_out_buffer_size)) - except OSError, e: - if e.errno != errno.EAGAIN: - raise - - def log(self, msg): - """ - Intercept asyncore's logging. - """ - log(msg) - - def log_info(self, msg, tag = "info"): - """ - Intercept asynchat's logging. - """ - log("asynchat: %s: %s" % (tag, msg)) - - def handle_error(self): - """ - Handle errors caught by asyncore main loop. - """ - c, e = sys.exc_info()[:2] - if backtrace_on_exceptions or e == 0: - for line in traceback.format_exc().splitlines(): - log(line) - else: - log("[Exception: %s: %s]" % (c.__name__, e)) - log("[Exiting after unhandled exception]") - sys.exit(1) - - def init_file_dispatcher(self, fd): - """ - Kludge to plug asyncore.file_dispatcher into asynchat. Call from - subclass's __init__() method, after calling - pdu_channel.__init__(), and don't read this on a full stomach. - """ - self.connected = True - self._fileno = fd - self.socket = asyncore.file_wrapper(fd) - self.add_channel() - flags = fcntl.fcntl(fd, fcntl.F_GETFL, 0) - flags = flags | os.O_NONBLOCK - fcntl.fcntl(fd, fcntl.F_SETFL, flags) - - def handle_close(self): - """ - Exit when channel closed. - """ - asynchat.async_chat.handle_close(self) - sys.exit(0) - -class server_write_channel(pdu_channel): - """ - Kludge to deal with ssh's habit of sometimes (compile time option) - invoking us with two unidirectional pipes instead of one - bidirectional socketpair. All the server logic is in the - server_channel class, this class just deals with sending the - server's output to a different file descriptor. - """ - - def __init__(self): - """ - Set up stdout. - """ - pdu_channel.__init__(self) - self.init_file_dispatcher(sys.stdout.fileno()) - - def readable(self): - """ - This channel is never readable. - """ - return False - -class server_channel(pdu_channel): - """ - Server protocol engine, handles upcalls from pdu_channel to - implement protocol logic. - """ - - def __init__(self): - """ - Set up stdin and stdout as connection and start listening for - first PDU. - """ - pdu_channel.__init__(self) - self.init_file_dispatcher(sys.stdin.fileno()) - self.writer = server_write_channel() - self.get_serial() - self.start_new_pdu() - - def writable(self): - """ - This channel is never writable. - """ - return False - - def push(self, data): - """ - Redirect to writer channel. - """ - return self.writer.push(data) - - def push_with_producer(self, producer): - """ - Redirect to writer channel. - """ - return self.writer.push_with_producer(producer) - - def push_pdu(self, pdu): - """ - Redirect to writer channel. - """ - return self.writer.push_pdu(pdu) - - def push_file(self, f): - """ - Redirect to writer channel. - """ - return self.writer.push_file(f) - - def deliver_pdu(self, pdu): - """ - Handle received PDU. - """ - pdu.serve(self) - - def get_serial(self): - """ - Read, cache, and return current serial number, or None if we can't - find the serial number file. The latter condition should never - happen, but maybe we got started in server mode while the cronjob - mode instance is still building its database. - """ - self.current_serial, self.current_nonce = read_current() - return self.current_serial - - def check_serial(self): - """ - Check for a new serial number. - """ - old_serial = self.current_serial - return old_serial != self.get_serial() - - def notify(self, data = None): - """ - Cronjob instance kicked us, send a notify message. - """ - if self.check_serial() is not None: - self.push_pdu(serial_notify(serial = self.current_serial, nonce = self.current_nonce)) - else: - log("Cronjob kicked me without a valid current serial number") - -class client_channel(pdu_channel): - """ - Client protocol engine, handles upcalls from pdu_channel. - """ - - current_serial = None - current_nonce = None - sql = None - host = None - port = None - cache_id = None - - def __init__(self, sock, proc, killsig, host, port): - self.killsig = killsig - self.proc = proc - self.host = host - self.port = port - pdu_channel.__init__(self, conn = sock) - self.start_new_pdu() - - @classmethod - def ssh(cls, host, port): - """ - Set up ssh connection and start listening for first PDU. - """ - args = ("ssh", "-p", port, "-s", host, "rpki-rtr") - blather("[Running ssh: %s]" % " ".join(args)) - s = socket.socketpair() - return cls(sock = s[1], - proc = subprocess.Popen(args, executable = "/usr/bin/ssh", - stdin = s[0], stdout = s[0], close_fds = True), - killsig = signal.SIGKILL, - host = host, port = port) - - @classmethod - def tcp(cls, host, port): - """ - Set up TCP connection and start listening for first PDU. - """ - blather("[Starting raw TCP connection to %s:%s]" % (host, port)) - try: - addrinfo = socket.getaddrinfo(host, port, socket.AF_UNSPEC, socket.SOCK_STREAM) - except socket.error, e: - blather("[socket.getaddrinfo() failed: %s]" % e) - else: - for ai in addrinfo: - af, socktype, proto, cn, sa = ai - blather("[Trying addr %s port %s]" % sa[:2]) - try: - s = socket.socket(af, socktype, proto) - except socket.error, e: - blather("[socket.socket() failed: %s]" % e) - continue - try: - s.connect(sa) - except socket.error, e: - blather("[socket.connect() failed: %s]" % e) - s.close() - continue - return cls(sock = s, proc = None, killsig = None, - host = host, port = port) - sys.exit(1) - - @classmethod - def loopback(cls, host, port): - """ - Set up loopback connection and start listening for first PDU. - """ - s = socket.socketpair() - blather("[Using direct subprocess kludge for testing]") - argv = [sys.executable, sys.argv[0], "--server"] - if "--syslog" in sys.argv: - argv.extend(("--syslog", sys.argv[sys.argv.index("--syslog") + 1])) - return cls(sock = s[1], - proc = subprocess.Popen(argv, stdin = s[0], stdout = s[0], close_fds = True), - killsig = signal.SIGINT, - host = host, port = port) - - @classmethod - def tls(cls, host, port): - """ - Set up TLS connection and start listening for first PDU. - - NB: This uses OpenSSL's "s_client" command, which does not - check server certificates properly, so this is not suitable for - production use. Fixing this would be a trivial change, it just - requires using a client program which does check certificates - properly (eg, gnutls-cli, or stunnel's client mode if that works - for such purposes this week). - """ - args = ("openssl", "s_client", "-tls1", "-quiet", "-connect", "%s:%s" % (host, port)) - blather("[Running: %s]" % " ".join(args)) - s = socket.socketpair() - return cls(sock = s[1], - proc = subprocess.Popen(args, stdin = s[0], stdout = s[0], close_fds = True), - killsig = signal.SIGKILL, - host = host, port = port) - - def setup_sql(self, sqlname): - """ - Set up an SQLite database to contain the table we receive. If - necessary, we will create the database. - """ - import sqlite3 - missing = not os.path.exists(sqlname) - self.sql = sqlite3.connect(sqlname, detect_types = sqlite3.PARSE_DECLTYPES) - self.sql.text_factory = str - cur = self.sql.cursor() - cur.execute("PRAGMA foreign_keys = on") - if missing: - cur.execute(''' - CREATE TABLE cache ( - cache_id INTEGER PRIMARY KEY NOT NULL, - host TEXT NOT NULL, - port TEXT NOT NULL, - nonce INTEGER, - serial INTEGER, - updated INTEGER, - UNIQUE (host, port))''') - cur.execute(''' - CREATE TABLE prefix ( - cache_id INTEGER NOT NULL - REFERENCES cache(cache_id) - ON DELETE CASCADE - ON UPDATE CASCADE, - asn INTEGER NOT NULL, - prefix TEXT NOT NULL, - prefixlen INTEGER NOT NULL, - max_prefixlen INTEGER NOT NULL, - UNIQUE (cache_id, asn, prefix, prefixlen, max_prefixlen))''') - - cur.execute(''' - CREATE TABLE routerkey ( - cache_id INTEGER NOT NULL - REFERENCES cache(cache_id) - ON DELETE CASCADE - ON UPDATE CASCADE, - asn INTEGER NOT NULL, - ski TEXT NOT NULL, - key TEXT NOT NULL, - UNIQUE (cache_id, asn, ski), - UNIQUE (cache_id, asn, key))''') - - cur.execute("SELECT cache_id, nonce, serial FROM cache WHERE host = ? AND port = ?", - (self.host, self.port)) - try: - self.cache_id, self.current_nonce, self.current_serial = cur.fetchone() - except TypeError: - cur.execute("INSERT INTO cache (host, port) VALUES (?, ?)", (self.host, self.port)) - self.cache_id = cur.lastrowid - self.sql.commit() - - def cache_reset(self): - """ - Handle cache_reset actions. - """ - self.current_serial = None - if self.sql: - cur = self.sql.cursor() - cur.execute("DELETE FROM prefix WHERE cache_id = ?", (self.cache_id,)) - cur.execute("UPDATE cache SET serial = NULL WHERE cache_id = ?", (self.cache_id,)) - - def end_of_data(self, serial, nonce): - """ - Handle end_of_data actions. - """ - self.current_serial = serial - self.current_nonce = nonce - if self.sql: - self.sql.execute("UPDATE cache SET serial = ?, nonce = ?, updated = datetime('now') WHERE cache_id = ?", - (serial, nonce, self.cache_id)) - self.sql.commit() - - def consume_prefix(self, prefix): - """ - Handle one prefix PDU. - """ - if self.sql: - values = (self.cache_id, prefix.asn, str(prefix.prefix), prefix.prefixlen, prefix.max_prefixlen) - if prefix.announce: - self.sql.execute("INSERT INTO prefix (cache_id, asn, prefix, prefixlen, max_prefixlen) " - "VALUES (?, ?, ?, ?, ?)", - values) - else: - self.sql.execute("DELETE FROM prefix " - "WHERE cache_id = ? AND asn = ? AND prefix = ? AND prefixlen = ? AND max_prefixlen = ?", - values) - - - def consume_routerkey(self, routerkey): - """ - Handle one Router Key PDU. - """ - - if self.sql: - values = (self.cache_id, routerkey.asn, - base64.urlsafe_b64encode(routerkey.ski).rstrip("="), - base64.b64encode(routerkey.key)) - if routerkey.announce: - self.sql.execute("INSERT INTO routerkey (cache_id, asn, ski, key) " - "VALUES (?, ?, ?, ?)", - values) - else: - self.sql.execute("DELETE FROM routerkey " - "WHERE cache_id = ? AND asn = ? AND (ski = ? OR key = ?)", - values) - - - def deliver_pdu(self, pdu): - """ - Handle received PDU. - """ - pdu.consume(self) - - def push_pdu(self, pdu): - """ - Log outbound PDU then write it to stream. - """ - blather(pdu) - pdu_channel.push_pdu(self, pdu) - - def cleanup(self): - """ - Force clean up this client's child process. If everything goes - well, child will have exited already before this method is called, - but we may need to whack it with a stick if something breaks. - """ - if self.proc is not None and self.proc.returncode is None: - try: - os.kill(self.proc.pid, self.killsig) - except OSError: - pass - - def handle_close(self): - """ - Intercept close event so we can log it, then shut down. - """ - blather("Server closed channel") - pdu_channel.handle_close(self) - -class kickme_channel(asyncore.dispatcher): - """ - asyncore dispatcher for the PF_UNIX socket that cronjob mode uses to - kick servers when it's time to send notify PDUs to clients. - """ - - def __init__(self, server): - asyncore.dispatcher.__init__(self) - self.server = server - self.sockname = "%s.%d" % (kickme_base, os.getpid()) - self.create_socket(socket.AF_UNIX, socket.SOCK_DGRAM) - try: - self.bind(self.sockname) - os.chmod(self.sockname, 0660) - except socket.error, e: - log("Couldn't bind() kickme socket: %r" % e) - self.close() - except OSError, e: - log("Couldn't chmod() kickme socket: %r" % e) - - def writable(self): - """ - This socket is read-only, never writable. - """ - return False - - def handle_connect(self): - """ - Ignore connect events (not very useful on datagram socket). - """ - pass - - def handle_read(self): - """ - Handle receipt of a datagram. - """ - data = self.recv(512) - self.server.notify(data) - - def cleanup(self): - """ - Clean up this dispatcher's socket. - """ - self.close() - try: - os.unlink(self.sockname) - except: - pass - - def log(self, msg): - """ - Intercept asyncore's logging. - """ - log(msg) - - def log_info(self, msg, tag = "info"): - """ - Intercept asyncore's logging. - """ - log("asyncore: %s: %s" % (tag, msg)) - - def handle_error(self): - """ - Handle errors caught by asyncore main loop. - """ - c, e = sys.exc_info()[:2] - if backtrace_on_exceptions or e == 0: - for line in traceback.format_exc().splitlines(): - log(line) - else: - log("[Exception: %s: %s]" % (c.__name__, e)) - log("[Exiting after unhandled exception]") - sys.exit(1) - - -def hostport_tag(): - """ - Construct hostname/address + port when we're running under a - protocol we understand well enough to do that. This is all - kludgery. Just grit your teeth, or perhaps just close your eyes. - """ - - proto = None - - if proto is None: - try: - host, port = socket.fromfd(0, socket.AF_INET, socket.SOCK_STREAM).getpeername() - proto = "tcp" - except: - pass - - if proto is None: - try: - host, port = socket.fromfd(0, socket.AF_INET6, socket.SOCK_STREAM).getpeername()[0:2] - proto = "tcp" - except: - pass - - if proto is None: - try: - host, port = os.environ["SSH_CONNECTION"].split()[0:2] - proto = "ssh" - except: - pass - - if proto is None: - try: - host, port = os.environ["REMOTE_HOST"], os.getenv("REMOTE_PORT") - proto = "ssl" - except: - pass - - if proto is None: - return "" - elif not port: - return "/%s/%s" % (proto, host) - elif ":" in host: - return "/%s/%s.%s" % (proto, host, port) - else: - return "/%s/%s:%s" % (proto, host, port) - - -def kick_all(serial): - """ - Kick any existing server processes to wake them up. - """ - - try: - os.stat(kickme_dir) - except OSError: - blather('# Creating directory "%s"' % kickme_dir) - os.makedirs(kickme_dir) - - msg = "Good morning, serial %d is ready" % serial - sock = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM) - for name in glob.iglob("%s.*" % kickme_base): - try: - blather("# Kicking %s" % name) - sock.sendto(msg, name) - except socket.error: - try: - blather("# Failed to kick %s, probably dead socket, attempting cleanup" % name) - os.unlink(name) - except Exception, e: - blather("# Couldn't unlink suspected dead socket %s: %s" % (name, e)) - except Exception, e: - log("# Failed to kick %s and don't understand why: %s" % (name, e)) - sock.close() - -def cronjob_main(argv): - """ - Run this mode right after rcynic to do the real work of groveling - through the ROAs that rcynic collects and translating that data into - the form used in the rpki-router protocol. This mode prepares both - full dumps (AXFR) and incremental dumps against a specific prior - version (IXFR). [Terminology here borrowed from DNS, as is much of - the protocol design.] Finally, this mode kicks any active servers, - so that they can notify their clients that a new version is - available. - - Run this in the directory where you want to write its output files, - which should also be the directory in which you run this program in - --server mode. - - This mode takes one argument on the command line, which specifies - the directory name of rcynic's authenticated output tree (normally - $somewhere/rcynic-data/authenticated/). - """ - - if len(argv) != 1: - sys.exit("Expected one argument, got %r" % (argv,)) - - old_ixfrs = glob.glob("*.ix.*") - - current = read_current()[0] - cutoff = timestamp.now(-(24 * 60 * 60)) - for f in glob.iglob("*.ax"): - t = timestamp(int(f.split(".")[0])) - if t < cutoff and t != current: - blather("# Deleting old file %s, timestamp %s" % (f, t)) - os.unlink(f) - - pdus = axfr_set.parse_rcynic(argv[0]) - if pdus == axfr_set.load_current(): - blather("# No change, new version not needed") - sys.exit() - pdus.save_axfr() - for axfr in glob.iglob("*.ax"): - if axfr != pdus.filename(): - pdus.save_ixfr(axfr_set.load(axfr)) - pdus.mark_current() - - blather("# New serial is %d (%s)" % (pdus.serial, pdus.serial)) - - kick_all(pdus.serial) - - old_ixfrs.sort() - for ixfr in old_ixfrs: - try: - blather("# Deleting old file %s" % ixfr) - os.unlink(ixfr) - except OSError: - pass - -def show_main(argv): - """ - Display dumps created by --cronjob mode in textual form. - Intended only for debugging. - - This mode takes no command line arguments. Run it in the directory - where you ran --cronjob mode. - """ - - if argv: - sys.exit("Unexpected arguments: %r" % (argv,)) - - g = glob.glob("*.ax") - g.sort() - for f in g: - axfr_set.load(f).show() - - g = glob.glob("*.ix.*") - g.sort() - for f in g: - ixfr_set.load(f).show() - -def server_main(argv): - """ - Implement the server side of the rpkk-router protocol. Other than - one PF_UNIX socket inode, this doesn't write anything to disk, so it - can be run with minimal privileges. Most of the hard work has - already been done in --cronjob mode, so all that this mode has to do - is serve up the results. - - In production use this server should run under sshd. The subsystem - mechanism in sshd does not allow us to pass arguments on the command - line, so setting this up might require a wrapper script, but in - production use you will probably want to lock down the public key - used to authenticate the ssh session so that it can only run this - one command, in which case you can just specify the full command - including any arguments in the authorized_keys file. - - Unless you do something special, sshd will have this program running - in whatever it thinks is the home directory associated with the - username given in the ssh prototocol setup, so it may be easiest to - set this up so that the home directory sshd puts this program into - is the one where --cronjob left its files for this mode to pick up. - - This mode must be run in the directory where you ran --cronjob mode. - - This mode takes one optional argument: if provided, the argument is - the name of a directory to which the program should chdir() on - startup; this may simplify setup when running under inetd. - - The server is event driven, so everything interesting happens in the - channel classes. - """ - - blather("[Starting]") - if len(argv) > 1: - sys.exit("Unexpected arguments: %r" % (argv,)) - if argv: - try: - os.chdir(argv[0]) - except OSError, e: - sys.exit(e) - kickme = None - try: - server = server_channel() - kickme = kickme_channel(server = server) - asyncore.loop(timeout = None) - except KeyboardInterrupt: - sys.exit(0) - finally: - if kickme is not None: - kickme.cleanup() - - -def listener_tcp_main(argv): - """ - Simple plain-TCP listener. Listens on a specified TCP port, upon - receiving a connection, forks the process and starts child executing - at server_main(). - - First argument (required) is numeric port number. - - Second argument (optional) is directory, like --server. - - NB: plain-TCP is completely insecure. We only implement this - because it's all that the routers currently support. In theory, we - will all be running TCP-AO in the future, at which point this will - go away. - """ - - # Perhaps we should daemonize? Deal with that later. - - if len(argv) > 2: - sys.exit("Unexpected arguments: %r" % (argv,)) - try: - port = int(argv[0]) if argv[0].isdigit() else socket.getservbyname(argv[0], "tcp") - except: - sys.exit("Couldn't parse port number on which to listen") - if len(argv) > 1: - try: - os.chdir(argv[1]) - except OSError, e: - sys.exit(e) - listener = None - try: - listener = socket.socket(socket.AF_INET6, socket.SOCK_STREAM) - listener.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, 0) - except: - if listener is not None: - listener.close() - listener = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - listener.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - try: - listener.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) - except AttributeError: - pass - listener.bind(("", port)) - listener.listen(5) - blather("[Listening on port %s]" % port) - while True: - s, ai = listener.accept() - blather("[Received connection from %r]" % (ai,)) - pid = os.fork() - if pid == 0: - os.dup2(s.fileno(), 0) - os.dup2(s.fileno(), 1) - s.close() - #os.closerange(3, os.sysconf("SC_OPEN_MAX")) - global log_tag - log_tag = "rtr-origin/server" + hostport_tag() - syslog.closelog() - syslog.openlog(log_tag, syslog.LOG_PID, syslog_facility) - server_main(()) - sys.exit() - else: - blather("[Spawned server %d]" % pid) - try: - while True: - pid, status = os.waitpid(0, os.WNOHANG) - if pid: - blather("[Server %s exited]" % pid) - else: - break - except: - pass - - -def client_main(argv): - """ - Toy client, intended only for debugging. - - This program takes one or more arguments. The first argument - determines what kind of connection it should open to the server, the - remaining arguments are connection details specific to this - particular type of connection. - - If the first argument is "loopback", the client will run a copy of - the server directly in a subprocess, and communicate with it via a - PF_UNIX socket pair. This sub-mode takes no further arguments. - - If the first argument is "ssh", the client will attempt to run ssh - in as subprocess to connect to the server using the ssh subsystem - mechanism as specified for this protocol. The remaining arguments - should be a hostname (or IP address in a form acceptable to ssh) and - a TCP port number. - - If the first argument is "tcp", the client will attempt to open a - direct (and completely insecure!) TCP connection to the server. - The remaining arguments should be a hostname (or IP address) and - a TCP port number. - - If the first argument is "tls", the client will attempt to open a - TLS connection to the server. The remaining arguments should be a - hostname (or IP address) and a TCP port number. - - An optional final name is the name of a file containing a SQLite - database in which to store the received table. If specified, this - database will be created if missing. - """ - - blather("[Startup]") - client = None - if not argv: - argv = ["loopback"] - proto = argv[0] - if proto == "loopback" and len(argv) in (1, 2): - constructor = client_channel.loopback - host, port = "", "" - sqlname = None if len(argv) == 1 else argv[1] - elif proto in ("ssh", "tcp", "tls") and len(argv) in (3, 4): - constructor = getattr(client_channel, proto) - host, port = argv[1:3] - sqlname = None if len(argv) == 3 else argv[3] - else: - sys.exit("Unexpected arguments: %s" % " ".join(argv)) - - try: - client = constructor(host, port) - if sqlname: - client.setup_sql(sqlname) - while True: - if client.current_serial is None or client.current_nonce is None: - client.push_pdu(reset_query()) - else: - client.push_pdu(serial_query(serial = client.current_serial, nonce = client.current_nonce)) - wakeup = time.time() + 600 - while True: - remaining = wakeup - time.time() - if remaining < 0: - break - asyncore.loop(timeout = remaining, count = 1) - - except KeyboardInterrupt: - sys.exit(0) - finally: - if client is not None: - client.cleanup() - -def bgpdump_convert_main(argv): - """ - Simulate route origin data from a set of BGP dump files. - - * DANGER WILL ROBINSON! * - * DEBUGGING AND TEST USE ONLY! * - - argv is an ordered list of filenames. Each file must be a BGP RIB - dumps, a BGP UPDATE dumps, or an AXFR dump in the format written by - this program's --cronjob command. The first file must be a RIB dump - or AXFR dump, it cannot be an UPDATE dump. Output will be a set of - AXFR and IXFR files with timestamps derived from the BGP dumps, - which can be used as input to this program's --server command for - test purposes. SUCH DATA PROVIDE NO SECURITY AT ALL. - - You have been warned. - """ - - first = True - db = None - axfrs = [] - - for filename in argv: - - if filename.endswith(".ax"): - blather("Reading %s" % filename) - db = axfr_set.load(filename) - - elif os.path.basename(filename).startswith("ribs."): - db = axfr_set.parse_bgpdump_rib_dump(filename) - db.save_axfr() - - elif not first: - assert db is not None - db.parse_bgpdump_update(filename) - db.save_axfr() - - else: - sys.exit("First argument must be a RIB dump or .ax file, don't know what to do with %s" % filename) - - blather("DB serial now %d (%s)" % (db.serial, db.serial)) - if first and read_current() == (None, None): - db.mark_current() - first = False - - for axfr in axfrs: - blather("Loading %s" % axfr) - ax = axfr_set.load(axfr) - blather("Computing changes from %d (%s) to %d (%s)" % (ax.serial, ax.serial, db.serial, db.serial)) - db.save_ixfr(ax) - del ax - - axfrs.append(db.filename()) - - -def bgpdump_select_main(argv): - """ - Simulate route origin data from a set of BGP dump files. - - * DANGER WILL ROBINSON! * - * DEBUGGING AND TEST USE ONLY! * - - Set current serial number to correspond to an .ax file created by - converting BGP dump files. SUCH DATA PROVIDE NO SECURITY AT ALL. - - You have been warned. - """ - - serial = None - try: - head, sep, tail = os.path.basename(argv[0]).partition(".") - if len(argv) == 1 and head.isdigit() and sep == "." and tail == "ax": - serial = timestamp(head) - except: - pass - if serial is None: - sys.exit("Argument must be name of a .ax file") - - nonce = read_current()[1] - if nonce is None: - nonce = new_nonce() - - write_current(serial, nonce) - kick_all(serial) - - -class bgpsec_replay_clock(object): - """ - Internal clock for replaying BGP dump files. - - * DANGER WILL ROBINSON! * - * DEBUGGING AND TEST USE ONLY! * - - This class replaces the normal on-disk serial number mechanism with - an in-memory version based on pre-computed data. - bgpdump_server_main() uses this hack to replay historical data for - testing purposes. DO NOT USE THIS IN PRODUCTION. - - You have been warned. - """ - - def __init__(self): - self.timestamps = [timestamp(int(f.split(".")[0])) for f in glob.iglob("*.ax")] - self.timestamps.sort() - self.offset = self.timestamps[0] - int(time.time()) - self.nonce = new_nonce() - - def __nonzero__(self): - return len(self.timestamps) > 0 - - def now(self): - return timestamp.now(self.offset) - - def read_current(self): - now = self.now() - while len(self.timestamps) > 1 and now >= self.timestamps[1]: - del self.timestamps[0] - return self.timestamps[0], self.nonce - - def siesta(self): - now = self.now() - if len(self.timestamps) <= 1: - return None - elif now < self.timestamps[1]: - return self.timestamps[1] - now - else: - return 1 - - -def bgpdump_server_main(argv): - """ - Simulate route origin data from a set of BGP dump files. - - * DANGER WILL ROBINSON! * - * DEBUGGING AND TEST USE ONLY! * - - This is a clone of server_main() which replaces the external serial - number updates triggered via the kickme channel by cronjob_main with - an internal clocking mechanism to replay historical test data. - - DO NOT USE THIS IN PRODUCTION. - - You have been warned. - """ - - blather("[Starting]") - if len(argv) > 1: - sys.exit("Unexpected arguments: %r" % (argv,)) - if argv: - try: - os.chdir(argv[0]) - except OSError, e: - sys.exit(e) - # - # Yes, this really does replace a global function with a bound - # method to our clock object. Fun stuff, huh? - # - global read_current - clock = bgpsec_replay_clock() - read_current = clock.read_current - # - try: - server = server_channel() - old_serial = server.get_serial() - blather("[Starting at serial %d (%s)]" % (old_serial, old_serial)) - while clock: - new_serial = server.get_serial() - if old_serial != new_serial: - blather("[Serial bumped from %d (%s) to %d (%s)]" % (old_serial, old_serial, new_serial, new_serial)) - server.notify() - old_serial = new_serial - asyncore.loop(timeout = clock.siesta(), count = 1) - except KeyboardInterrupt: - sys.exit(0) - -# Figure out where the scan_roas utility program is today -try: - # Set from autoconf - scan_roas = ac_scan_roas -except NameError: - # Source directory - scan_roas = os.path.normpath(os.path.join(sys.path[0], "..", "utils", - "scan_roas", "scan_roas")) -# If that didn't work, use $PATH and hope for the best -if not os.path.exists(scan_roas): - scan_roas = "scan_roas" - -# Same thing for scan_routercerts -try: - # Set from autoconf - scan_routercerts = ac_scan_routercerts -except NameError: - # Source directory - scan_routercerts = os.path.normpath(os.path.join(sys.path[0], "..", "utils", - "scan_routercerts", "scan_routercerts")) -if not os.path.exists(scan_routercerts): - scan_routercerts = "scan_routercerts" - -force_zero_nonce = False - -kickme_dir = "sockets" -kickme_base = os.path.join(kickme_dir, "kickme") - -main_dispatch = { - "cronjob" : cronjob_main, - "client" : client_main, - "server" : server_main, - "show" : show_main, - "listener_tcp" : listener_tcp_main, - "bgpdump_convert" : bgpdump_convert_main, - "bgpdump_select" : bgpdump_select_main, - "bgpdump_server" : bgpdump_server_main } - -def usage(msg = None): - f = sys.stderr if msg else sys.stdout - f.write("Usage: %s [options] --mode [arguments]\n" % sys.argv[0]) - f.write("\n") - f.write("where options are zero or more of:\n") - f.write("\n") - f.write("--syslog facility.warning_priority[.info_priority]\n") - f.write("\n") - f.write("--zero-nonce\n") - f.write("\n") - f.write("and --mode is one of:\n") - f.write("\n") - for name, func in main_dispatch.iteritems(): - f.write("--%s:\n" % name) - f.write(func.__doc__) - f.write("\n") - sys.exit(msg) - -if __name__ == "__main__": - - os.environ["TZ"] = "UTC" - time.tzset() - - mode = None - - syslog_facility, syslog_warning, syslog_info = syslog.LOG_DAEMON, syslog.LOG_WARNING, syslog.LOG_INFO - - opts, argv = getopt.getopt(sys.argv[1:], "hs:z?", ["help", "syslog=", "zero-nonce"] + main_dispatch.keys()) - for o, a in opts: - if o in ("-h", "--help", "-?"): - usage() - elif o in ("-z", "--zero-nonce"): - force_zero_nonce = True - elif o in ("-s", "--syslog"): - try: - a = [getattr(syslog, "LOG_" + i.upper()) for i in a.split(".")] - if len(a) == 2: - a.append(a[1]) - syslog_facility, syslog_warning, syslog_info = a - if syslog_facility < 8 or syslog_warning >= 8 or syslog_info >= 8: - raise ValueError - except: - usage("Bad value specified for --syslog option") - elif len(o) > 2 and o[2:] in main_dispatch: - if mode is not None: - sys.exit("Conflicting modes specified") - mode = o[2:] - - if mode is None: - usage("No mode specified") - - log_tag = "rtr-origin/" + mode - - if mode in ("server", "bgpdump_server"): - log_tag += hostport_tag() - - if mode in ("cronjob", "server" , "bgpdump_server"): - syslog.openlog(log_tag, syslog.LOG_PID, syslog_facility) - def log(msg): - return syslog.syslog(syslog_warning, str(msg)) - def blather(msg): - return syslog.syslog(syslog_info, str(msg)) - - elif mode == "show": - def log(msg): - try: - os.write(sys.stdout.fileno(), "%s\n" % msg) - except OSError, e: - if e.errno != errno.EPIPE: - raise - blather = log - - else: - def log(msg): - sys.stderr.write("%s %s[%d]: %s\n" % (time.strftime("%F %T"), log_tag, os.getpid(), msg)) - blather = log - - main_dispatch[mode](argv) diff --git a/rtr-origin/rules.darwin.mk b/rtr-origin/rules.darwin.mk deleted file mode 100644 index 1230db92..00000000 --- a/rtr-origin/rules.darwin.mk +++ /dev/null @@ -1,9 +0,0 @@ -# $Id$ - -install-always: install-binary - -install-postconf: install-listener - -install-listener: - @echo "No rule for $@ on this platform (yet), you'll have to do that yourself if it matters." - diff --git a/rtr-origin/rules.freebsd.mk b/rtr-origin/rules.freebsd.mk deleted file mode 100644 index df99da47..00000000 --- a/rtr-origin/rules.freebsd.mk +++ /dev/null @@ -1,37 +0,0 @@ -# $Id$ - -install-always: install-binary - -install-postconf: install-listener - -install-listener: .FORCE - @if /usr/bin/egrep -q '^rpki-rtr' /etc/services ; \ - then \ - echo "You already have a /etc/services entry for rpki-rtr, so I will use it."; \ - elif echo >>/etc/services "rpki-rtr ${RPKI_RTR_PORT}/tcp #RFC 6810" ; \ - then \ - echo "Added rpki-rtr to /etc/services."; \ - else \ - echo "Adding rpki-rtr to /etc/services failed, please fix this, then try again."; \ - exit 1; \ - fi - @if /usr/bin/egrep -q "rpki-rtr[ ]+stream[ ]+tcp[ ]" /etc/inetd.conf; \ - then \ - echo "You already have an inetd.conf entry for rpki-rtr on TCPv4, so I will use it."; \ - elif echo >>/etc/inetd.conf "rpki-rtr stream tcp nowait rpkirtr /usr/local/bin/rtr-origin rtr-origin --server /var/rcynic/rpki-rtr"; \ - then \ - echo "Added rpki-rtr for TCPv4 to /etc/inetd.conf."; \ - else \ - echo "Adding rpki-rtr for TCPv4 to /etc/inetd.conf failed, please fix this, then try again."; \ - exit 1; \ - fi - @if /usr/bin/egrep -q "rpki-rtr[ ]+stream[ ]+tcp6[ ]" /etc/inetd.conf; \ - then \ - echo "You already have an inetd.conf entry for rpki-rtr on TCPv6, so I will use it."; \ - elif echo >>/etc/inetd.conf "rpki-rtr stream tcp6 nowait rpkirtr /usr/local/bin/rtr-origin rtr-origin --server /var/rcynic/rpki-rtr"; \ - then \ - echo "Added rpki-rtr for TCPv6 to /etc/inetd.conf."; \ - else \ - echo "Adding rpki-rtr for TCPv6 to /etc/inetd.conf failed, please fix this, then try again."; \ - exit 1; \ - fi diff --git a/rtr-origin/rules.linux.mk b/rtr-origin/rules.linux.mk deleted file mode 100644 index 23f90f69..00000000 --- a/rtr-origin/rules.linux.mk +++ /dev/null @@ -1,29 +0,0 @@ -# $Id$ - -install-always: install-binary install-listener - -install-postconf: - @true - -# Only need to make listener if not already present - -install-listener: ${DESTDIR}/etc/xinetd.d/rpki-rtr - -${DESTDIR}/etc/xinetd.d/rpki-rtr: - @${AWK} 'BEGIN { \ - print "service rpki-rtr"; \ - print "{"; \ - print " type = UNLISTED"; \ - print " flags = IPv4"; \ - print " socket_type = stream"; \ - print " protocol = tcp"; \ - print " port = ${RPKI_RTR_PORT}"; \ - print " wait = no"; \ - print " user = rpkirtr"; \ - print " server = ${bindir}/${BIN}"; \ - print " server_args = --server /var/rcynic/rpki-rtr"; \ - print "}"; \ - }' >xinetd.rpki-rtr - ${INSTALL} -d ${DESTDIR}/etc/xinetd.d - ${INSTALL} -m 644 xinetd.rpki-rtr $@ - rm xinetd.rpki-rtr diff --git a/rtr-origin/rules.unknown.mk b/rtr-origin/rules.unknown.mk deleted file mode 100644 index fb16e93a..00000000 --- a/rtr-origin/rules.unknown.mk +++ /dev/null @@ -1,8 +0,0 @@ -# $Id$ - -install-always: install-binary - -install-postconf: install-listener - -install-listener: - @echo "Don't know how to make $@ on this platform"; exit 1 diff --git a/rtr-origin/server.sh b/rtr-origin/server.sh deleted file mode 100755 index 7ccf2f38..00000000 --- a/rtr-origin/server.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/sh - -# -# Wrapper for rtr-origin.py in server mode, for testing. -# -# In production we would probably want to handle all of this either -# directly in the Python code or in the command= setting for a -# particular ssh key, but for initial testing it's simpler to run a -# shall script to change to the right directory and supply any -# necessary command line arguments. -# -# Be warned that almost any error here will cause the subsystem to -# fail mysteriously, leaving behind naught but a SIGCHILD log message -# from sshd as this script dies. - -cd /u/sra/rpki/subvert-rpki.hactrn.net/rtr-origin - -exec /usr/local/bin/python rtr-origin.py --server diff --git a/rtr-origin/sshd.conf b/rtr-origin/sshd.conf deleted file mode 100644 index 0124fc4c..00000000 --- a/rtr-origin/sshd.conf +++ /dev/null @@ -1,23 +0,0 @@ -# $Id$ -# -# sshd config file for testing. Invoke thusly: -# -# /usr/sbin/sshd -f /u/sra/rpki/subvert-rpki.hactrn.net/rtr-origin/sshd.conf -d - -Port 2222 -Protocol 2 -ListenAddress 127.0.0.1 -ListenAddress ::1 -HostKey /u/sra/rpki/subvert-rpki.hactrn.net/rtr-origin/ssh_host_rsa_key -PermitRootLogin no -PubkeyAuthentication yes -AuthorizedKeysFile /u/sra/rpki/subvert-rpki.hactrn.net/rtr-origin/authorized_keys -PasswordAuthentication no -PermitEmptyPasswords no -ChallengeResponseAuthentication no -UsePAM no -AllowTcpForwarding no -X11Forwarding no -UseDNS no -PidFile /u/sra/rpki/subvert-rpki.hactrn.net/rtr-origin/sshd.pid -Subsystem rpki-rtr /u/sra/rpki/subvert-rpki.hactrn.net/rtr-origin/server.sh diff --git a/schemas/relaxng/left-right-schema.rnc b/schemas/relaxng/left-right-schema.rnc new file mode 100644 index 00000000..b46adeb5 --- /dev/null +++ b/schemas/relaxng/left-right-schema.rnc @@ -0,0 +1,323 @@ +# $Id$ +# +# RelaxNG schema for RPKI left-right protocol. +# +# Copyright (C) 2012--2014 Dragon Research Labs ("DRL") +# Portions copyright (C) 2009--2011 Internet Systems Consortium ("ISC") +# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notices and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND ARIN DISCLAIM ALL +# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL, +# ISC, OR ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, +# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION +# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +default namespace = "http://www.hactrn.net/uris/rpki/left-right-spec/" + +# Top level PDU + +start = element msg { + attribute version { xsd:positiveInteger { maxInclusive="1" } }, + ( (attribute type { "query" }, query_elt*) | + (attribute type { "reply" }, reply_elt*) ) +} + +# PDUs allowed in a query +query_elt |= self_query +query_elt |= bsc_query +query_elt |= parent_query +query_elt |= child_query +query_elt |= repository_query +query_elt |= list_roa_requests_query +query_elt |= list_ghostbuster_requests_query +query_elt |= list_ee_certificate_requests_query +query_elt |= list_resources_query +query_elt |= list_published_objects_query +query_elt |= list_received_resources_query + +# PDUs allowed in a reply +reply_elt |= self_reply +reply_elt |= bsc_reply +reply_elt |= parent_reply +reply_elt |= child_reply +reply_elt |= repository_reply +reply_elt |= list_resources_reply +reply_elt |= list_roa_requests_reply +reply_elt |= list_ghostbuster_requests_reply +reply_elt |= list_ee_certificate_requests_reply +reply_elt |= list_published_objects_reply +reply_elt |= list_received_resources_reply +reply_elt |= report_error_reply + +# Tag attributes for bulk operations +tag = attribute tag { xsd:token {maxLength="1024" } }? + +# Combinations of action and type attributes used in later definitions. +# The same patterns repeat in most of the elements in this protocol. +ctl_create = attribute action { "create" }, tag +ctl_set = attribute action { "set" }, tag +ctl_get = attribute action { "get" }, tag +ctl_list = attribute action { "list" }, tag +ctl_destroy = attribute action { "destroy" }, tag + +# Base64 encoded DER stuff +base64 = xsd:base64Binary { maxLength="512000" } + +# Base definition for all fields that are really just SQL primary indices +#sql_id = xsd:nonNegativeInteger + +# ...except that fields containing SQL primary indicies don't belong +# in this protocol, so they're turninging into handles. +# Length restriction is a MySQL implementation issue. +# Handles are case-insensitive (because SQL is, among other reasons). +object_handle = xsd:string { maxLength="255" pattern="[\-_A-Za-z0-9]+" } + +# URIs +uri = xsd:anyURI { maxLength="4096" } + +# Name fields imported from up-down protocol +up_down_name = xsd:token { maxLength="1024" } + +# Resource lists +asn_list = xsd:string { maxLength="512000" pattern="[\-,0-9]*" } +ipv4_list = xsd:string { maxLength="512000" pattern="[\-,0-9/.]*" } +ipv6_list = xsd:string { maxLength="512000" pattern="[\-,0-9/:a-fA-F]*" } + +# element + +self_bool = (attribute rekey { "yes" }?, + attribute reissue { "yes" }?, + attribute revoke { "yes" }?, + attribute run_now { "yes" }?, + attribute publish_world_now { "yes" }?, + attribute revoke_forgotten { "yes" }?, + attribute clear_replay_protection { "yes" }?) + +self_payload = (attribute use_hsm { "yes" | "no" }?, + attribute crl_interval { xsd:positiveInteger }?, + attribute regen_margin { xsd:positiveInteger }?, + element bpki_cert { base64 }?, + element bpki_glue { base64 }?) + +self_handle = attribute self_handle { object_handle } + +self_query |= element self { ctl_create, self_handle, self_bool, self_payload } +self_reply |= element self { ctl_create, self_handle } +self_query |= element self { ctl_set, self_handle, self_bool, self_payload } +self_reply |= element self { ctl_set, self_handle } +self_query |= element self { ctl_get, self_handle } +self_reply |= element self { ctl_get, self_handle, self_payload } +self_query |= element self { ctl_list } +self_reply |= element self { ctl_list, self_handle, self_payload } +self_query |= element self { ctl_destroy, self_handle } +self_reply |= element self { ctl_destroy, self_handle } + +# element. Key parameters hardwired for now. + +bsc_bool = ((attribute generate_keypair { "yes" }, + attribute key_type { "rsa" }?, + attribute hash_alg { "sha256" }?, + attribute key_length { "2048" }?)?) + +bsc_handle = attribute bsc_handle { object_handle } + +bsc_payload = (element signing_cert { base64 }?, + element signing_cert_crl { base64 }?) + +bsc_readonly = element pkcs10_request { base64 }? + +bsc_query |= element bsc { ctl_create, self_handle, bsc_handle, bsc_bool, bsc_payload } +bsc_reply |= element bsc { ctl_create, self_handle, bsc_handle, bsc_readonly } +bsc_query |= element bsc { ctl_set, self_handle, bsc_handle, bsc_bool, bsc_payload } +bsc_reply |= element bsc { ctl_set, self_handle, bsc_handle, bsc_readonly } +bsc_query |= element bsc { ctl_get, self_handle, bsc_handle } +bsc_reply |= element bsc { ctl_get, self_handle, bsc_handle, bsc_payload, bsc_readonly } +bsc_query |= element bsc { ctl_list, self_handle } +bsc_reply |= element bsc { ctl_list, self_handle, bsc_handle, bsc_payload, bsc_readonly } +bsc_query |= element bsc { ctl_destroy, self_handle, bsc_handle } +bsc_reply |= element bsc { ctl_destroy, self_handle, bsc_handle } + +# element + +parent_handle = attribute parent_handle { object_handle } + +parent_bool = (attribute rekey { "yes" }?, + attribute reissue { "yes" }?, + attribute revoke { "yes" }?, + attribute revoke_forgotten { "yes" }?, + attribute clear_replay_protection { "yes" }?) + +parent_payload = (attribute peer_contact_uri { uri }?, + attribute sia_base { uri }?, + bsc_handle?, + repository_handle?, + attribute sender_name { up_down_name }?, + attribute recipient_name { up_down_name }?, + element bpki_cms_cert { base64 }?, + element bpki_cms_glue { base64 }?) + +parent_query |= element parent { ctl_create, self_handle, parent_handle, parent_bool, parent_payload } +parent_reply |= element parent { ctl_create, self_handle, parent_handle } +parent_query |= element parent { ctl_set, self_handle, parent_handle, parent_bool, parent_payload } +parent_reply |= element parent { ctl_set, self_handle, parent_handle } +parent_query |= element parent { ctl_get, self_handle, parent_handle } +parent_reply |= element parent { ctl_get, self_handle, parent_handle, parent_payload } +parent_query |= element parent { ctl_list, self_handle } +parent_reply |= element parent { ctl_list, self_handle, parent_handle, parent_payload } +parent_query |= element parent { ctl_destroy, self_handle, parent_handle } +parent_reply |= element parent { ctl_destroy, self_handle, parent_handle } + +# element + +child_handle = attribute child_handle { object_handle } + +child_bool = (attribute reissue { "yes" }?, + attribute clear_replay_protection { "yes" }?) + +child_payload = (bsc_handle?, + element bpki_cert { base64 }?, + element bpki_glue { base64 }?) + +child_query |= element child { ctl_create, self_handle, child_handle, child_bool, child_payload } +child_reply |= element child { ctl_create, self_handle, child_handle } +child_query |= element child { ctl_set, self_handle, child_handle, child_bool, child_payload } +child_reply |= element child { ctl_set, self_handle, child_handle } +child_query |= element child { ctl_get, self_handle, child_handle } +child_reply |= element child { ctl_get, self_handle, child_handle, child_payload } +child_query |= element child { ctl_list, self_handle } +child_reply |= element child { ctl_list, self_handle, child_handle, child_payload } +child_query |= element child { ctl_destroy, self_handle, child_handle } +child_reply |= element child { ctl_destroy, self_handle, child_handle } + +# element + +repository_handle = attribute repository_handle { object_handle } + +repository_bool = attribute clear_replay_protection { "yes" }? + +repository_payload = (attribute peer_contact_uri { uri }?, + bsc_handle?, + element bpki_cert { base64 }?, + element bpki_glue { base64 }?) + +repository_query |= element repository { ctl_create, self_handle, repository_handle, repository_bool, repository_payload } +repository_reply |= element repository { ctl_create, self_handle, repository_handle } +repository_query |= element repository { ctl_set, self_handle, repository_handle, repository_bool, repository_payload } +repository_reply |= element repository { ctl_set, self_handle, repository_handle } +repository_query |= element repository { ctl_get, self_handle, repository_handle } +repository_reply |= element repository { ctl_get, self_handle, repository_handle, repository_payload } +repository_query |= element repository { ctl_list, self_handle } +repository_reply |= element repository { ctl_list, self_handle, repository_handle, repository_payload } +repository_query |= element repository { ctl_destroy, self_handle, repository_handle } +repository_reply |= element repository { ctl_destroy, self_handle, repository_handle } + +# element + +list_resources_query = element list_resources { + tag, self_handle, child_handle +} + +list_resources_reply = element list_resources { + tag, self_handle, child_handle, + attribute valid_until { xsd:dateTime { pattern=".*Z" } }, + attribute asn { asn_list }?, + attribute ipv4 { ipv4_list }?, + attribute ipv6 { ipv6_list }? +} + +# element + +list_roa_requests_query = element list_roa_requests { + tag, self_handle +} + +list_roa_requests_reply = element list_roa_requests { + tag, self_handle, + attribute asn { xsd:nonNegativeInteger }, + attribute ipv4 { ipv4_list }?, + attribute ipv6 { ipv6_list }? +} + +# element + +list_ghostbuster_requests_query = element list_ghostbuster_requests { + tag, self_handle, parent_handle +} + +list_ghostbuster_requests_reply = element list_ghostbuster_requests { + tag, self_handle, parent_handle, + xsd:string +} + +# element + +list_ee_certificate_requests_query = element list_ee_certificate_requests { + tag, self_handle +} + +list_ee_certificate_requests_reply = element list_ee_certificate_requests { + tag, self_handle, + attribute gski { xsd:token { minLength="27" maxLength="27" } }, + attribute valid_until { xsd:dateTime { pattern=".*Z" } }, + attribute asn { asn_list }?, + attribute ipv4 { ipv4_list }?, + attribute ipv6 { ipv6_list }?, + attribute cn { xsd:string { maxLength="64" pattern="[\-0-9A-Za-z_ ]+" } }?, + attribute sn { xsd:string { maxLength="64" pattern="[0-9A-Fa-f]+" } }?, + attribute eku { xsd:string { maxLength="512000" pattern="[.,0-9]+" } }?, + element pkcs10 { base64 } +} + +# element + +list_published_objects_query = element list_published_objects { + tag, self_handle +} + +list_published_objects_reply = element list_published_objects { + tag, self_handle, + attribute uri { uri }, + attribute child_handle { object_handle }?, + base64 +} + +# element + +list_received_resources_query = element list_received_resources { + tag, self_handle +} + +list_received_resources_reply = element list_received_resources { + tag, self_handle, parent_handle, + attribute notBefore { xsd:dateTime { pattern=".*Z" } }, + attribute notAfter { xsd:dateTime { pattern=".*Z" } }, + attribute uri { uri }, + attribute sia_uri { uri }, + attribute aia_uri { uri }, + attribute asn { asn_list }?, + attribute ipv4 { ipv4_list }?, + attribute ipv6 { ipv6_list }? +} + +# element + +error = xsd:token { maxLength="1024" } + +report_error_reply = element report_error { + tag, self_handle?, + attribute error_code { error }, + xsd:string { maxLength="512000" }? +} + +# Local Variables: +# indent-tabs-mode: nil +# comment-start: "# " +# comment-start-skip: "#[ \t]*" +# End: diff --git a/schemas/relaxng/left-right-schema.rng b/schemas/relaxng/left-right-schema.rng new file mode 100644 index 00000000..6c3d2f1a --- /dev/null +++ b/schemas/relaxng/left-right-schema.rng @@ -0,0 +1,1089 @@ + + + + + + + + + 1 + + + + + + query + + + + + + + + reply + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 1024 + + + + + + + + create + + + + + + set + + + + + + get + + + + + + list + + + + + + destroy + + + + + + + 512000 + + + + + + + 255 + [\-_A-Za-z0-9]+ + + + + + + 4096 + + + + + + 1024 + + + + + + 512000 + [\-,0-9]* + + + + + 512000 + [\-,0-9/.]* + + + + + 512000 + [\-,0-9/:a-fA-F]* + + + + + + + yes + + + + + yes + + + + + yes + + + + + yes + + + + + yes + + + + + yes + + + + + yes + + + + + + + + yes + no + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + yes + + + + rsa + + + + + sha256 + + + + + 2048 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + yes + + + + + yes + + + + + yes + + + + + yes + + + + + yes + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + yes + + + + + yes + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + yes + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + .*Z + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 27 + 27 + + + + + .*Z + + + + + + + + + + + + + + + + + + + + + 64 + [\-0-9A-Za-z_ ]+ + + + + + + + 64 + [0-9A-Fa-f]+ + + + + + + + 512000 + [.,0-9]+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + .*Z + + + + + .*Z + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 1024 + + + + + + + + + + + + + + 512000 + + + + + + diff --git a/schemas/relaxng/myrpki.rnc b/schemas/relaxng/myrpki.rnc new file mode 100644 index 00000000..156ab0d5 --- /dev/null +++ b/schemas/relaxng/myrpki.rnc @@ -0,0 +1,164 @@ +# $Id$ +# +# RelaxNG schema for MyRPKI XML messages. +# +# This message protocol is on its way out, as we're in the process of +# moving on from the user interface model that produced it, but even +# after we finish replacing it we'll still need the schema for a while +# to validate old messages when upgrading. +# +# libxml2 (including xmllint) only groks the XML syntax of RelaxNG, so +# run the compact syntax through trang to get XML syntax. +# +# Copyright (C) 2009-2011 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +default namespace = "http://www.hactrn.net/uris/rpki/myrpki/" + +version = "2" + +base64 = xsd:base64Binary { maxLength="512000" } +object_handle = xsd:string { maxLength="255" pattern="[\-_A-Za-z0-9]+" } +pubd_handle = xsd:string { maxLength="255" pattern="[\-_A-Za-z0-9/]+" } +uri = xsd:anyURI { maxLength="4096" } +asn = xsd:positiveInteger +asn_list = xsd:string { maxLength="512000" pattern="[\-,0-9]+" } +ipv4_list = xsd:string { maxLength="512000" pattern="[\-,0-9/.]+" } +ipv6_list = xsd:string { maxLength="512000" pattern="[\-,0-9/:a-fA-F]+" } +timestamp = xsd:dateTime { pattern=".*Z" } + +# Message formate used between configure_resources and +# configure_daemons. + +start |= element myrpki { + attribute version { version }, + attribute handle { object_handle }, + attribute service_uri { uri }?, + element roa_request { + attribute asn { asn }, + attribute v4 { ipv4_list }, + attribute v6 { ipv6_list } + }*, + element child { + attribute handle { object_handle }, + attribute valid_until { timestamp }, + attribute asns { asn_list }?, + attribute v4 { ipv4_list }?, + attribute v6 { ipv6_list }?, + element bpki_certificate { base64 }? + }*, + element parent { + attribute handle { object_handle }, + attribute service_uri { uri }?, + attribute myhandle { object_handle }?, + attribute sia_base { uri }?, + element bpki_cms_certificate { base64 }? + }*, + element repository { + attribute handle { object_handle }, + attribute service_uri { uri }?, + element bpki_certificate { base64 }? + }*, + element bpki_ca_certificate { base64 }?, + element bpki_crl { base64 }?, + element bpki_bsc_certificate { base64 }?, + element bpki_bsc_pkcs10 { base64 }? +} + +# Format of an identity.xml file. + +start |= element identity { + attribute version { version }, + attribute handle { object_handle }, + element bpki_ta { base64 } +} + +# Format of element used in referrals. The Base64 +# text is a (q. v.) element signed with CMS. + +authorization = element authorization { + attribute referrer { pubd_handle }, + base64 +} + +# Format of element used in referrals. + +contact_info = element contact_info { + attribute uri { uri }?, + xsd:string +} + +# Variant payload portion of a element. + +repository_payload = ( + (attribute type { "none" }) | + (attribute type { "offer" }) | + (attribute type { "referral" }, authorization, contact_info) +) + +# element (response from configure_child). + +start |= element parent { + attribute version { version }, + attribute valid_until { timestamp }?, + attribute service_uri { uri }?, + attribute child_handle { object_handle }, + attribute parent_handle { object_handle }, + element bpki_resource_ta { base64 }, + element bpki_child_ta { base64 }, + element repository { repository_payload }? +} + +# element, types offer and referral +# (input to configure_publication_client). + +start |= element repository { + attribute version { version }, + attribute handle { object_handle }, + attribute parent_handle { object_handle }, + repository_payload, + element bpki_client_ta { base64 } +} + +# element, confirmation type (output of +# configure_publication_client). + +start |= element repository { + attribute version { version }, + attribute type { "confirmed" }, + attribute parent_handle { object_handle }, + attribute client_handle { pubd_handle }, + attribute service_uri { uri }, + attribute sia_base { uri }, + element bpki_server_ta { base64 }, + element bpki_client_ta { base64 }, + authorization?, + contact_info? +} + +# element. This is the entirety of a separate message +# which is signed with CMS then included ase the Base64 content of an +# element in the main message. + +start |= element referral { + attribute version { version }, + attribute authorized_sia_base { uri }, + base64 +} + +# Local Variables: +# indent-tabs-mode: nil +# comment-start: "# " +# comment-start-skip: "#[ \t]*" +# End: diff --git a/schemas/relaxng/myrpki.rng b/schemas/relaxng/myrpki.rng new file mode 100644 index 00000000..07b8beb3 --- /dev/null +++ b/schemas/relaxng/myrpki.rng @@ -0,0 +1,379 @@ + + + + + 2 + + + + 512000 + + + + + 255 + [\-_A-Za-z0-9]+ + + + + + 255 + [\-_A-Za-z0-9/]+ + + + + + 4096 + + + + + + + + 512000 + [\-,0-9]+ + + + + + 512000 + [\-,0-9/.]+ + + + + + 512000 + [\-,0-9/:a-fA-F]+ + + + + + .*Z + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + none + + + offer + + + + referral + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + confirmed + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/schemas/relaxng/publication-schema.rnc b/schemas/relaxng/publication-schema.rnc new file mode 100644 index 00000000..4353ae80 --- /dev/null +++ b/schemas/relaxng/publication-schema.rnc @@ -0,0 +1,137 @@ +# $Id$ +# +# RelaxNG schema for RPKI publication protocol. +# +# Copyright (C) 2012--2014 Dragon Research Labs ("DRL") +# Portions copyright (C) 2009--2011 Internet Systems Consortium ("ISC") +# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notices and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND ARIN DISCLAIM ALL +# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL, +# ISC, OR ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, +# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION +# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +default namespace = "http://www.hactrn.net/uris/rpki/publication-spec/" + +# Top level PDU + +start = element msg { + attribute version { xsd:positiveInteger { maxInclusive="1" } }, + ( (attribute type { "query" }, query_elt*) | + (attribute type { "reply" }, reply_elt*) ) +} + +# PDUs allowed in a query +query_elt = ( config_query | client_query | certificate_query | crl_query | + manifest_query | roa_query | ghostbuster_query ) + +# PDUs allowed in a reply +reply_elt = ( config_reply | client_reply | certificate_reply | crl_reply | + manifest_reply | roa_reply | ghostbuster_reply | report_error_reply ) + +# Tag attributes for bulk operations +tag = attribute tag { xsd:token {maxLength="1024" } } + +# Base64 encoded DER stuff +#base64 = xsd:base64Binary { maxLength="512000" } +# +# Sadly, it turns out that CRLs can in fact get longer than this for an active CA. +# Remove length limit for now, think about whether to put it back later. +base64 = xsd:base64Binary + +# Publication URLs +uri_t = xsd:anyURI { maxLength="4096" } +uri = attribute uri { uri_t } + +# Handles on remote objects (replaces passing raw SQL IDs). NB: +# Unlike the up-down protocol, handles in this protocol allow "/" as a +# hierarchy delimiter. +object_handle = xsd:string { maxLength="255" pattern="[\-_A-Za-z0-9/]+" } + +# element (use restricted to repository operator) +# config_handle attribute, create, list, and destroy commands omitted deliberately, see code for details + +config_payload = (element bpki_crl { base64 }?) + +config_query |= element config { attribute action { "set" }, tag?, config_payload } +config_reply |= element config { attribute action { "set" }, tag? } +config_query |= element config { attribute action { "get" }, tag? } +config_reply |= element config { attribute action { "get" }, tag?, config_payload } + +# element (use restricted to repository operator) + +client_handle = attribute client_handle { object_handle } + +client_bool = attribute clear_replay_protection { "yes" }? + +client_payload = (attribute base_uri { uri_t }?, element bpki_cert { base64 }?, element bpki_glue { base64 }?) + +client_query |= element client { attribute action { "create" }, tag?, client_handle, client_bool, client_payload } +client_reply |= element client { attribute action { "create" }, tag?, client_handle } +client_query |= element client { attribute action { "set" }, tag?, client_handle, client_bool, client_payload } +client_reply |= element client { attribute action { "set" }, tag?, client_handle } +client_query |= element client { attribute action { "get" }, tag?, client_handle } +client_reply |= element client { attribute action { "get" }, tag?, client_handle, client_payload } +client_query |= element client { attribute action { "list" }, tag? } +client_reply |= element client { attribute action { "list" }, tag?, client_handle, client_payload } +client_query |= element client { attribute action { "destroy" }, tag?, client_handle } +client_reply |= element client { attribute action { "destroy" }, tag?, client_handle } + +# element + +certificate_query |= element certificate { attribute action { "publish" }, tag?, uri, base64 } +certificate_reply |= element certificate { attribute action { "publish" }, tag?, uri } +certificate_query |= element certificate { attribute action { "withdraw" }, tag?, uri } +certificate_reply |= element certificate { attribute action { "withdraw" }, tag?, uri } + +# element + +crl_query |= element crl { attribute action { "publish" }, tag?, uri, base64 } +crl_reply |= element crl { attribute action { "publish" }, tag?, uri } +crl_query |= element crl { attribute action { "withdraw" }, tag?, uri } +crl_reply |= element crl { attribute action { "withdraw" }, tag?, uri } + +# element + +manifest_query |= element manifest { attribute action { "publish" }, tag?, uri, base64 } +manifest_reply |= element manifest { attribute action { "publish" }, tag?, uri } +manifest_query |= element manifest { attribute action { "withdraw" }, tag?, uri } +manifest_reply |= element manifest { attribute action { "withdraw" }, tag?, uri } + +# element + +roa_query |= element roa { attribute action { "publish" }, tag?, uri, base64 } +roa_reply |= element roa { attribute action { "publish" }, tag?, uri } +roa_query |= element roa { attribute action { "withdraw" }, tag?, uri } +roa_reply |= element roa { attribute action { "withdraw" }, tag?, uri } + +# element + +ghostbuster_query |= element ghostbuster { attribute action { "publish" }, tag?, uri, base64 } +ghostbuster_reply |= element ghostbuster { attribute action { "publish" }, tag?, uri } +ghostbuster_query |= element ghostbuster { attribute action { "withdraw" }, tag?, uri } +ghostbuster_reply |= element ghostbuster { attribute action { "withdraw" }, tag?, uri } + +# element + +error = xsd:token { maxLength="1024" } + +report_error_reply = element report_error { + tag?, + attribute error_code { error }, + xsd:string { maxLength="512000" }? +} + +# Local Variables: +# indent-tabs-mode: nil +# comment-start: "# " +# comment-start-skip: "#[ \t]*" +# End: diff --git a/schemas/relaxng/publication-schema.rng b/schemas/relaxng/publication-schema.rng new file mode 100644 index 00000000..63fb6ea5 --- /dev/null +++ b/schemas/relaxng/publication-schema.rng @@ -0,0 +1,574 @@ + + + + + + + + + 1 + + + + + + query + + + + + + + + reply + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 1024 + + + + + + + + + + + 4096 + + + + + + + + + + + 255 + [\-_A-Za-z0-9/]+ + + + + + + + + + + + + + + set + + + + + + + + + + + set + + + + + + + + + + get + + + + + + + + + + get + + + + + + + + + + + + + + + + + yes + + + + + + + + + + + + + + + + + + + + + + + + create + + + + + + + + + + + + + create + + + + + + + + + + + set + + + + + + + + + + + + + set + + + + + + + + + + + get + + + + + + + + + + + get + + + + + + + + + + + + list + + + + + + + + + + list + + + + + + + + + + + + destroy + + + + + + + + + + + destroy + + + + + + + + + + + + publish + + + + + + + + + + + + publish + + + + + + + + + + + withdraw + + + + + + + + + + + withdraw + + + + + + + + + + + + publish + + + + + + + + + + + + publish + + + + + + + + + + + withdraw + + + + + + + + + + + withdraw + + + + + + + + + + + + publish + + + + + + + + + + + + publish + + + + + + + + + + + withdraw + + + + + + + + + + + withdraw + + + + + + + + + + + + publish + + + + + + + + + + + + publish + + + + + + + + + + + withdraw + + + + + + + + + + + withdraw + + + + + + + + + + + + publish + + + + + + + + + + + + publish + + + + + + + + + + + withdraw + + + + + + + + + + + withdraw + + + + + + + + + + + 1024 + + + + + + + + + + + + + 512000 + + + + + + diff --git a/schemas/relaxng/router-certificate-schema.rnc b/schemas/relaxng/router-certificate-schema.rnc new file mode 100644 index 00000000..8cc325ce --- /dev/null +++ b/schemas/relaxng/router-certificate-schema.rnc @@ -0,0 +1,61 @@ +# $Id$ +# +# RelaxNG schema for BGPSEC router certificate interchange format. +# +# At least for now, this is a trivial encapsulation of a PKCS #10 +# request, a set (usually containing exactly one member) of autonomous +# system numbers, and a router-id. Be warned that this could change +# radically by the time we have any real operational understanding of +# how these things will be used, this is just our current best guess +# to let us move forward on initial coding. +# +# Copyright (C) 2014 Dragon Research Labs ("DRL") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND DRL DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL DRL BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +default namespace = "http://www.hactrn.net/uris/rpki/router-certificate/" + +version = "1" +base64 = xsd:base64Binary { maxLength="512000" } +router_id = xsd:unsignedInt +asn_list = xsd:string { maxLength="512000" pattern="[0-9][\-,0-9]*" } +timestamp = xsd:dateTime { pattern=".*Z" } + +# Core payload used in this schema. + +payload = ( + attribute router_id { router_id }, + attribute asn { asn_list }, + attribute valid_until { timestamp }?, + base64 +) + +# We allow two forms, one with a wrapper to allow multiple requests in +# a single file, one without for brevity; the version attribute goes +# in the outermost element in either case. + +start |= element router_certificate_request { + attribute version { version }, + payload +} + +start |= element router_certificate_requests { + attribute version { version }, + element router_certificate_request { payload }* +} + +# Local Variables: +# indent-tabs-mode: nil +# comment-start: "# " +# comment-start-skip: "#[ \t]*" +# End: diff --git a/schemas/relaxng/router-certificate-schema.rng b/schemas/relaxng/router-certificate-schema.rng new file mode 100644 index 00000000..d8be9eda --- /dev/null +++ b/schemas/relaxng/router-certificate-schema.rng @@ -0,0 +1,98 @@ + + + + + 1 + + + + 512000 + + + + + + + + 512000 + [0-9][\-,0-9]* + + + + + .*Z + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/schemas/relaxng/up-down-schema.rnc b/schemas/relaxng/up-down-schema.rnc new file mode 100644 index 00000000..a603b8fe --- /dev/null +++ b/schemas/relaxng/up-down-schema.rnc @@ -0,0 +1,113 @@ +# $Id$ +# +# RelaxNG schema for the up-down protocol, extracted from RFC 6492. +# +# Copyright (c) 2012 IETF Trust and the persons identified as authors +# of the code. All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# * Neither the name of Internet Society, IETF or IETF Trust, nor the +# names of specific contributors, may be used to endorse or promote +# products derived from this software without specific prior written +# permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +default namespace = "http://www.apnic.net/specs/rescerts/up-down/" + +grammar { + resource_set_as = xsd:string { maxLength="512000" pattern="[\-,0-9]*" } + resource_set_ip4 = xsd:string { maxLength="512000" pattern="[\-,/.0-9]*" } + resource_set_ip6 = xsd:string { maxLength="512000" pattern="[\-,/:0-9a-fA-F]*" } + + class_name = xsd:token { minLength="1" maxLength="1024" } + ski = xsd:token { minLength="27" maxLength="1024" } + label = xsd:token { minLength="1" maxLength="1024" } + cert_url = xsd:string { minLength="10" maxLength="4096" } + base64_binary = xsd:base64Binary { minLength="4" maxLength="512000" } + + start = element message { + attribute version { xsd:positiveInteger { maxInclusive="1" } }, + attribute sender { label }, + attribute recipient { label }, + payload + } + + payload |= attribute type { "list" }, list_request + payload |= attribute type { "list_response"}, list_response + payload |= attribute type { "issue" }, issue_request + payload |= attribute type { "issue_response"}, issue_response + payload |= attribute type { "revoke" }, revoke_request + payload |= attribute type { "revoke_response"}, revoke_response + payload |= attribute type { "error_response"}, error_response + + list_request = empty + list_response = class* + + class = element class { + attribute class_name { class_name }, + attribute cert_url { cert_url }, + attribute resource_set_as { resource_set_as }, + attribute resource_set_ipv4 { resource_set_ip4 }, + attribute resource_set_ipv6 { resource_set_ip6 }, + attribute resource_set_notafter { xsd:dateTime }, + attribute suggested_sia_head { xsd:anyURI { maxLength="1024" pattern="rsync://.+"} }?, + element certificate { + attribute cert_url { cert_url }, + attribute req_resource_set_as { resource_set_as }?, + attribute req_resource_set_ipv4 { resource_set_ip4 }?, + attribute req_resource_set_ipv6 { resource_set_ip6 }?, + base64_binary + }*, + element issuer { base64_binary } + } + + issue_request = element request { + attribute class_name { class_name }, + attribute req_resource_set_as { resource_set_as }?, + attribute req_resource_set_ipv4 { resource_set_ip4 }?, + attribute req_resource_set_ipv6 { resource_set_ip6 }?, + base64_binary + } + issue_response = class + + revoke_request = revocation + revoke_response = revocation + + revocation = element key { + attribute class_name { class_name }, + attribute ski { ski } + } + + error_response = + element status { xsd:positiveInteger { maxInclusive="9999" } }, + element description { attribute xml:lang { xsd:language }, xsd:string { maxLength="1024" } }* +} + +# Local Variables: +# indent-tabs-mode: nil +# comment-start: "# " +# comment-start-skip: "#[ \t]*" +# End: diff --git a/schemas/relaxng/up-down-schema.rng b/schemas/relaxng/up-down-schema.rng new file mode 100644 index 00000000..5368fa65 --- /dev/null +++ b/schemas/relaxng/up-down-schema.rng @@ -0,0 +1,277 @@ + + + + + + 512000 + [\-,0-9]* + + + + + 512000 + [\-,/.0-9]* + + + + + 512000 + [\-,/:0-9a-fA-F]* + + + + + 1 + 1024 + + + + + 27 + 1024 + + + + + 1 + 1024 + + + + + 10 + 4096 + + + + + 4 + 512000 + + + + + + + 1 + + + + + + + + + + + + + + list + + + + + + list_response + + + + + + issue + + + + + + issue_response + + + + + + revoke + + + + + + revoke_response + + + + + + error_response + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 1024 + rsync://.+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 9999 + + + + + + + + + 1024 + + + + + + diff --git a/schemas/sql/pubd.sql b/schemas/sql/pubd.sql new file mode 100644 index 00000000..3a58ec00 --- /dev/null +++ b/schemas/sql/pubd.sql @@ -0,0 +1,59 @@ +-- $Id$ + +-- Copyright (C) 2009--2010 Internet Systems Consortium ("ISC") +-- +-- Permission to use, copy, modify, and distribute this software for any +-- purpose with or without fee is hereby granted, provided that the above +-- copyright notice and this permission notice appear in all copies. +-- +-- THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +-- REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +-- AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +-- INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +-- LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +-- OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +-- PERFORMANCE OF THIS SOFTWARE. + +-- Copyright (C) 2008 American Registry for Internet Numbers ("ARIN") +-- +-- Permission to use, copy, modify, and distribute this software for any +-- purpose with or without fee is hereby granted, provided that the above +-- copyright notice and this permission notice appear in all copies. +-- +-- THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH +-- REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +-- AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, +-- INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +-- LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +-- OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +-- PERFORMANCE OF THIS SOFTWARE. + +-- SQL objects needed by pubd.py. + +-- The config table is weird because we're really only using it +-- to store one BPKI CRL, but putting this here lets us use a lot of +-- existing machinery and the alternatives are whacky in other ways. + +DROP TABLE IF EXISTS client; +DROP TABLE IF EXISTS config; + +CREATE TABLE config ( + config_id SERIAL NOT NULL, + bpki_crl LONGBLOB, + PRIMARY KEY (config_id) +) ENGINE=InnoDB; + +CREATE TABLE client ( + client_id SERIAL NOT NULL, + client_handle VARCHAR(255) NOT NULL, + base_uri TEXT, + bpki_cert LONGBLOB, + bpki_glue LONGBLOB, + last_cms_timestamp DATETIME, + PRIMARY KEY (client_id), + UNIQUE (client_handle) +) ENGINE=InnoDB; + +-- Local Variables: +-- indent-tabs-mode: nil +-- End: diff --git a/schemas/sql/rpkid.sql b/schemas/sql/rpkid.sql new file mode 100644 index 00000000..a7e3dc0a --- /dev/null +++ b/schemas/sql/rpkid.sql @@ -0,0 +1,250 @@ +-- $Id$ + +-- Copyright (C) 2009--2011 Internet Systems Consortium ("ISC") +-- +-- Permission to use, copy, modify, and distribute this software for any +-- purpose with or without fee is hereby granted, provided that the above +-- copyright notice and this permission notice appear in all copies. +-- +-- THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +-- REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +-- AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +-- INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +-- LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +-- OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +-- PERFORMANCE OF THIS SOFTWARE. + +-- Copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") +-- +-- Permission to use, copy, modify, and distribute this software for any +-- purpose with or without fee is hereby granted, provided that the above +-- copyright notice and this permission notice appear in all copies. +-- +-- THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH +-- REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +-- AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, +-- INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +-- LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +-- OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +-- PERFORMANCE OF THIS SOFTWARE. + +-- SQL objects needed by the RPKI engine (rpkid.py). + +-- DROP TABLE commands must be in correct (reverse dependency) order +-- to satisfy FOREIGN KEY constraints. + +DROP TABLE IF EXISTS ee_cert; +DROP TABLE IF EXISTS ghostbuster; +DROP TABLE IF EXISTS roa_prefix; +DROP TABLE IF EXISTS roa; +DROP TABLE IF EXISTS revoked_cert; +DROP TABLE IF EXISTS child_cert; +DROP TABLE IF EXISTS child; +DROP TABLE IF EXISTS ca_detail; +DROP TABLE IF EXISTS ca; +DROP TABLE IF EXISTS parent; +DROP TABLE IF EXISTS repository; +DROP TABLE IF EXISTS bsc; +DROP TABLE IF EXISTS self; + +CREATE TABLE self ( + self_id SERIAL NOT NULL, + self_handle VARCHAR(255) NOT NULL, + use_hsm BOOLEAN NOT NULL DEFAULT FALSE, + crl_interval BIGINT UNSIGNED, + regen_margin BIGINT UNSIGNED, + bpki_cert LONGBLOB, + bpki_glue LONGBLOB, + PRIMARY KEY (self_id), + UNIQUE (self_handle) +) ENGINE=InnoDB; + +CREATE TABLE bsc ( + bsc_id SERIAL NOT NULL, + bsc_handle VARCHAR(255) NOT NULL, + private_key_id LONGBLOB, + pkcs10_request LONGBLOB, + hash_alg ENUM ('sha256'), + signing_cert LONGBLOB, + signing_cert_crl LONGBLOB, + self_id BIGINT UNSIGNED NOT NULL, + PRIMARY KEY (bsc_id), + CONSTRAINT bsc_self_id + FOREIGN KEY (self_id) REFERENCES self (self_id) ON DELETE CASCADE, + UNIQUE (self_id, bsc_handle) +) ENGINE=InnoDB; + +CREATE TABLE repository ( + repository_id SERIAL NOT NULL, + repository_handle VARCHAR(255) NOT NULL, + peer_contact_uri TEXT, + bpki_cert LONGBLOB, + bpki_glue LONGBLOB, + last_cms_timestamp DATETIME, + bsc_id BIGINT UNSIGNED NOT NULL, + self_id BIGINT UNSIGNED NOT NULL, + PRIMARY KEY (repository_id), + CONSTRAINT repository_self_id + FOREIGN KEY (self_id) REFERENCES self (self_id) ON DELETE CASCADE, + CONSTRAINT repository_bsc_id + FOREIGN KEY (bsc_id) REFERENCES bsc (bsc_id) ON DELETE CASCADE, + UNIQUE (self_id, repository_handle) +) ENGINE=InnoDB; + +CREATE TABLE parent ( + parent_id SERIAL NOT NULL, + parent_handle VARCHAR(255) NOT NULL, + bpki_cms_cert LONGBLOB, + bpki_cms_glue LONGBLOB, + peer_contact_uri TEXT, + sia_base TEXT, + sender_name TEXT, + recipient_name TEXT, + last_cms_timestamp DATETIME, + self_id BIGINT UNSIGNED NOT NULL, + bsc_id BIGINT UNSIGNED NOT NULL, + repository_id BIGINT UNSIGNED NOT NULL, + PRIMARY KEY (parent_id), + CONSTRAINT parent_repository_id + FOREIGN KEY (repository_id) REFERENCES repository (repository_id) ON DELETE CASCADE, + CONSTRAINT parent_bsc_id + FOREIGN KEY (bsc_id) REFERENCES bsc (bsc_id) ON DELETE CASCADE, + CONSTRAINT parent_self_id + FOREIGN KEY (self_id) REFERENCES self (self_id) ON DELETE CASCADE, + UNIQUE (self_id, parent_handle) +) ENGINE=InnoDB; + +CREATE TABLE ca ( + ca_id SERIAL NOT NULL, + last_crl_sn BIGINT UNSIGNED NOT NULL, + last_manifest_sn BIGINT UNSIGNED NOT NULL, + next_manifest_update DATETIME, + next_crl_update DATETIME, + last_issued_sn BIGINT UNSIGNED NOT NULL, + sia_uri TEXT, + parent_resource_class TEXT, + parent_id BIGINT UNSIGNED NOT NULL, + PRIMARY KEY (ca_id), + CONSTRAINT ca_parent_id + FOREIGN KEY (parent_id) REFERENCES parent (parent_id) ON DELETE CASCADE +) ENGINE=InnoDB; + +CREATE TABLE ca_detail ( + ca_detail_id SERIAL NOT NULL, + public_key LONGBLOB, + private_key_id LONGBLOB, + latest_crl LONGBLOB, + crl_published DATETIME, + latest_ca_cert LONGBLOB, + manifest_private_key_id LONGBLOB, + manifest_public_key LONGBLOB, + latest_manifest_cert LONGBLOB, + latest_manifest LONGBLOB, + manifest_published DATETIME, + state ENUM ('pending', 'active', 'deprecated', 'revoked') NOT NULL, + ca_cert_uri TEXT, + ca_id BIGINT UNSIGNED NOT NULL, + PRIMARY KEY (ca_detail_id), + CONSTRAINT ca_detail_ca_id + FOREIGN KEY (ca_id) REFERENCES ca (ca_id) ON DELETE CASCADE +) ENGINE=InnoDB; + +CREATE TABLE child ( + child_id SERIAL NOT NULL, + child_handle VARCHAR(255) NOT NULL, + bpki_cert LONGBLOB, + bpki_glue LONGBLOB, + last_cms_timestamp DATETIME, + self_id BIGINT UNSIGNED NOT NULL, + bsc_id BIGINT UNSIGNED NOT NULL, + PRIMARY KEY (child_id), + CONSTRAINT child_bsc_id + FOREIGN KEY (bsc_id) REFERENCES bsc (bsc_id) ON DELETE CASCADE, + CONSTRAINT child_self_id + FOREIGN KEY (self_id) REFERENCES self (self_id) ON DELETE CASCADE, + UNIQUE (self_id, child_handle) +) ENGINE=InnoDB; + +CREATE TABLE child_cert ( + child_cert_id SERIAL NOT NULL, + cert LONGBLOB NOT NULL, + published DATETIME, + ski TINYBLOB NOT NULL, + child_id BIGINT UNSIGNED NOT NULL, + ca_detail_id BIGINT UNSIGNED NOT NULL, + PRIMARY KEY (child_cert_id), + CONSTRAINT child_cert_ca_detail_id + FOREIGN KEY (ca_detail_id) REFERENCES ca_detail (ca_detail_id) ON DELETE CASCADE, + CONSTRAINT child_cert_child_id + FOREIGN KEY (child_id) REFERENCES child (child_id) ON DELETE CASCADE +) ENGINE=InnoDB; + +CREATE TABLE revoked_cert ( + revoked_cert_id SERIAL NOT NULL, + serial BIGINT UNSIGNED NOT NULL, + revoked DATETIME NOT NULL, + expires DATETIME NOT NULL, + ca_detail_id BIGINT UNSIGNED NOT NULL, + PRIMARY KEY (revoked_cert_id), + CONSTRAINT revoked_cert_ca_detail_id + FOREIGN KEY (ca_detail_id) REFERENCES ca_detail (ca_detail_id) ON DELETE CASCADE +) ENGINE=InnoDB; + +CREATE TABLE roa ( + roa_id SERIAL NOT NULL, + asn BIGINT UNSIGNED NOT NULL, + cert LONGBLOB NOT NULL, + roa LONGBLOB NOT NULL, + published DATETIME, + self_id BIGINT UNSIGNED NOT NULL, + ca_detail_id BIGINT UNSIGNED NOT NULL, + PRIMARY KEY (roa_id), + CONSTRAINT roa_self_id + FOREIGN KEY (self_id) REFERENCES self (self_id) ON DELETE CASCADE, + CONSTRAINT roa_ca_detail_id + FOREIGN KEY (ca_detail_id) REFERENCES ca_detail (ca_detail_id) ON DELETE CASCADE +) ENGINE=InnoDB; + +CREATE TABLE roa_prefix ( + prefix VARCHAR(40) NOT NULL, + prefixlen TINYINT UNSIGNED NOT NULL, + max_prefixlen TINYINT UNSIGNED NOT NULL, + version TINYINT UNSIGNED NOT NULL, + roa_id BIGINT UNSIGNED NOT NULL, + PRIMARY KEY (roa_id, prefix, prefixlen, max_prefixlen), + CONSTRAINT roa_prefix_roa_id + FOREIGN KEY (roa_id) REFERENCES roa (roa_id) ON DELETE CASCADE +) ENGINE=InnoDB; + +CREATE TABLE ghostbuster ( + ghostbuster_id SERIAL NOT NULL, + vcard LONGBLOB NOT NULL, + cert LONGBLOB NOT NULL, + ghostbuster LONGBLOB NOT NULL, + published DATETIME, + self_id BIGINT UNSIGNED NOT NULL, + ca_detail_id BIGINT UNSIGNED NOT NULL, + PRIMARY KEY (ghostbuster_id), + CONSTRAINT ghostbuster_self_id + FOREIGN KEY (self_id) REFERENCES self (self_id) ON DELETE CASCADE, + CONSTRAINT ghostbuster_ca_detail_id + FOREIGN KEY (ca_detail_id) REFERENCES ca_detail (ca_detail_id) ON DELETE CASCADE +) ENGINE=InnoDB; + +CREATE TABLE ee_cert ( + ee_cert_id SERIAL NOT NULL, + ski BINARY(20) NOT NULL, + cert LONGBLOB NOT NULL, + published DATETIME, + self_id BIGINT UNSIGNED NOT NULL, + ca_detail_id BIGINT UNSIGNED NOT NULL, + PRIMARY KEY (ee_cert_id), + CONSTRAINT ee_cert_self_id + FOREIGN KEY (self_id) REFERENCES self (self_id) ON DELETE CASCADE, + CONSTRAINT ee_cert_ca_detail_id + FOREIGN KEY (ca_detail_id) REFERENCES ca_detail (ca_detail_id) ON DELETE CASCADE +) ENGINE=InnoDB; + +-- Local Variables: +-- indent-tabs-mode: nil +-- End: diff --git a/scripts/analyze-rcynic-history.py b/scripts/analyze-rcynic-history.py deleted file mode 100644 index b72d0741..00000000 --- a/scripts/analyze-rcynic-history.py +++ /dev/null @@ -1,290 +0,0 @@ -# $Id$ -# -# Copyright (C) 2011-2012 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -""" -Parse traffic data out of rynic XML output, whack it a bit, print some -summaries and run gnuplot to draw some pictures. -""" - -plot_all_hosts = False - -window_hours = 72 - -import mailbox -import sys -import urlparse -import os -import datetime -import subprocess -import shelve - -from xml.etree.cElementTree import (ElementTree as ElementTree, - fromstring as ElementTreeFromString) - -def parse_utc(s): - return datetime.datetime.strptime(s, "%Y-%m-%dT%H:%M:%SZ") - -class Rsync_History(object): - """ - An Rsync_History object represents one rsync connection. - """ - - def __init__(self, elt): - self.error = elt.get("error") - self.uri = elt.text.strip() - self.hostname = urlparse.urlparse(self.uri).hostname or None - self.elapsed = parse_utc(elt.get("finished")) - parse_utc(elt.get("started")) - -class Host(object): - """ - A host object represents all the data collected for one host. Note - that it (usually) contains a list of all the sessions in which this - host appears. - """ - - def __init__(self, hostname, session_id): - self.hostname = hostname - self.session_id = session_id - self.elapsed = datetime.timedelta(0) - self.connection_count = 0 - self.dead_connections = 0 - self.uris = set() - self.total_connection_time = datetime.timedelta(0) - - def add_rsync_history(self, h): - self.connection_count += 1 - self.elapsed += h.elapsed - self.dead_connections += int(h.error is not None) - self.total_connection_time += h.elapsed - - def add_uri(self, u): - self.uris.add(u) - - def finalize(self): - self.object_count = len(self.uris) - del self.uris - - @property - def failed(self): - return 1 if self.dead_connections else 0 - - @property - def seconds_per_object(self): - if self.failed: - return None - else: - return float(self.elapsed.days * 24 * 60 * 60 + - self.elapsed.seconds + - self.elapsed.microseconds / 10**6) / float(self.object_count) - - @property - def objects_per_connection(self): - if self.failed: - return None - else: - return float(self.object_count) / float(self.connection_count) - - @property - def average_connection_time(self): - return float(self.total_connection_time.days * 24 * 60 * 60 + - self.total_connection_time.seconds + - self.total_connection_time.microseconds / 10**6) / float(self.connection_count) - - class Format(object): - - def __init__(self, attr, title, fmt, ylabel = ""): - self.attr = attr - self.title = title - self.width = len(title) - int("%" in fmt) - self.fmt = "%%%d%s" % (self.width, fmt) - self.oops = "*" * self.width - self.ylabel = ylabel - - def __call__(self, obj): - try: - value = getattr(obj, self.attr) - return None if value is None else self.fmt % value - except ZeroDivisionError: - return self.oops - - format = (Format("connection_count", "Connections", "d", "Connections To Repository (Per Session)"), - Format("object_count", "Objects", "d", "Objects In Repository (Distinct URIs Per Session)"), - Format("objects_per_connection", "Objects/Connection", ".3f", "Objects In Repository / Connections To Repository"), - Format("seconds_per_object", "Seconds/Object", ".3f", "Seconds To Transfer / Object (Average Per Session)"), - Format("failure_rate_running", "Failure Rate", ".3f%%", "Sessions With Failed Connections Within Last %d Hours" % window_hours), - Format("average_connection_time", "Average Connection", ".3f", "Seconds / Connection (Average Per Session)"), - Format("hostname", "Hostname", "s")) - - format_dict = dict((fmt.attr, fmt) for fmt in format) - - def format_field(self, name): - result = self.format_dict[name](self) - return None if result is None else result.strip() - -class Session(dict): - """ - A session corresponds to one XML file. This is a dictionary of Host - objects, keyed by hostname. - """ - - def __init__(self, session_id, msg_key): - self.session_id = session_id - self.msg_key = msg_key - self.date = parse_utc(session_id) - self.calculated_failure_history = False - - @property - def hostnames(self): - return set(self.iterkeys()) - - def get_plot_row(self, name, hostnames): - return (self.session_id,) + tuple(self[h].format_field(name) if h in self else "" for h in hostnames) - - def add_rsync_history(self, h): - if h.hostname not in self: - self[h.hostname] = Host(h.hostname, self.session_id) - self[h.hostname].add_rsync_history(h) - - def add_uri(self, u): - h = urlparse.urlparse(u).hostname - if h and h in self: - self[h].add_uri(u) - - def finalize(self): - for h in self.itervalues(): - h.finalize() - - def calculate_failure_history(self, sessions): - start = self.date - datetime.timedelta(hours = window_hours) - sessions = tuple(s for s in sessions if s.date <= self.date and s.date > start) - for hostname, h in self.iteritems(): - i = n = 0 - for s in sessions: - if hostname in s: - i += s[hostname].failed - n += 1 - h.failure_rate_running = float(100 * i) / n - self.calculated_failure_history = True - -def plotter(f, hostnames, field, logscale = False): - plotlines = sorted(session.get_plot_row(field, hostnames) for session in sessions) - title = Host.format_dict[field].title - ylabel = Host.format_dict[field].ylabel - n = len(hostnames) + 1 - assert all(n == len(plotline) for plotline in plotlines) - if "%%" in Host.format_dict[field].fmt: - f.write('set format y "%.0f%%"\n') - else: - f.write('set format y\n') - if logscale: - f.write("set logscale y\n") - else: - f.write("unset logscale y\n") - f.write(""" - set xdata time - set timefmt '%Y-%m-%dT%H:%M:%SZ' - #set format x '%m/%d' - #set format x '%b%d' - #set format x '%Y-%m-%d' - set format x '%Y-%m' - #set title '""" + title + """' - set ylabel '""" + ylabel + """' - plot""" + ",".join(" '-' using 1:2 with linespoints pointinterval 500 title '%s'" % h for h in hostnames) + "\n") - for i in xrange(1, n): - for plotline in plotlines: - if plotline[i] is not None: - f.write("%s %s\n" % (plotline[0], plotline[i].rstrip("%"))) - f.write("e\n") - -def plot_hosts(hostnames, fields): - for field in fields: - for logscale in (False, True): - gnuplot = subprocess.Popen(("gnuplot",), stdin = subprocess.PIPE) - gnuplot.stdin.write("set terminal pdf\n") - gnuplot.stdin.write("set output '%s/%s-%s.pdf'\n" % (outdir, field, "log" if logscale else "linear")) - plotter(gnuplot.stdin, hostnames, field, logscale = logscale) - gnuplot.stdin.close() - gnuplot.wait() - -outdir = "images" - -if not os.path.exists(outdir): - os.makedirs(outdir) - -mb = mailbox.Maildir("/u/sra/rpki/rcynic-xml", factory = None, create = False) - -if sys.platform == "darwin": # Sigh - shelf = shelve.open("rcynic-xml", "c") -else: - shelf = shelve.open("rcynic-xml.db", "c") - -sessions = [] - -latest = None -parsed = 0 - -for i, key in enumerate(mb.iterkeys(), 1): - sys.stderr.write("\r%s %d/%d/%d..." % ("|\\-/"[i & 3], parsed, i, len(mb))) - - if key in shelf: - session = shelf[key] - - else: - assert not mb[key].is_multipart() - input = ElementTreeFromString(mb[key].get_payload()) - date = input.get("date") - sys.stderr.write("%s..." % date) - session = Session(date, key) - for elt in input.findall("rsync_history"): - session.add_rsync_history(Rsync_History(elt)) - for elt in input.findall("validation_status"): - if elt.get("generation") == "current": - session.add_uri(elt.text.strip()) - session.finalize() - shelf[key] = session - parsed += 1 - - sessions.append(session) - if latest is None or session.session_id > latest.session_id: - latest = session - -sys.stderr.write("\n") - -shelf.sync() - -for session in sessions: - if not getattr(session, "calculated_failure_history", False): - session.calculate_failure_history(sessions) - shelf[session.msg_key] = session - -if plot_all_hosts: - hostnames = sorted(reduce(lambda x, y: x | y, - (s.hostnames for s in sessions), - set())) - -else: - hostnames = ("rpki.apnic.net", "rpki.ripe.net", "localcert.ripe.net", - "repository.lacnic.net", "rpki.afrinic.net", "rpki.arin.net", - "arin.rpki.net", "repo0.rpki.net", "rgnet.rpki.net") - -plot_hosts(hostnames, [fmt.attr for fmt in Host.format if fmt.attr != "hostname"]) - -if latest is not None: - f = open("rcynic.xml", "wb") - f.write(mb[latest.msg_key].get_payload()) - f.close() - -shelf.close() diff --git a/scripts/analyze-transition.py b/scripts/analyze-transition.py deleted file mode 100644 index e2125dfb..00000000 --- a/scripts/analyze-transition.py +++ /dev/null @@ -1,88 +0,0 @@ -# $Id$ -# -# Copyright (C) 2012 Internet Systems Consortium, Inc. ("ISC") -# -# Permission to use, copy, modify, and/or distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -""" -Compare rcynic.xml files, tell the user what became invalid, and why. -""" - -import sys - -try: - from lxml.etree import ElementTree -except ImportError: - from xml.etree.ElementTree import ElementTree - -class Object(object): - - def __init__(self, session, uri): - self.session = session - self.uri = uri - self.labels = [] - - def add(self, label): - self.labels.append(label) - - def __cmp__(self, other): - return cmp(self.labels, other.labels) - - @property - def accepted(self): - return "object_accepted" in self.labels - -class Session(dict): - - def __init__(self, name): - self.name = name - tree = ElementTree(file = name) - labels = tuple((elt.tag.strip(), elt.text.strip()) for elt in tree.find("labels")) - self.labels = tuple(pair[0] for pair in labels) - self.descrs = dict(labels) - self.date = tree.getroot().get("date") - for elt in tree.findall("validation_status"): - status = elt.get("status") - uri = elt.text.strip() - if status.startswith("rsync_transfer_") or elt.get("generation") != "current": - continue - if uri not in self: - self[uri] = Object(self, uri) - self[uri].add(status) - -skip_labels = frozenset(("object_accepted", "object_rejected")) - -old_db = new_db = None - -for arg in sys.argv[1:]: - - old_db = new_db - new_db = Session(arg) - if old_db is None: - continue - - old_uris = frozenset(old_db) - new_uris = frozenset(new_db) - - for uri in sorted(old_uris - new_uris): - print new_db.date, uri, "dropped" - - for uri in sorted(old_uris & new_uris): - old = old_db[uri] - new = new_db[uri] - if old.accepted and not new.accepted: - print new_db.date, uri, "invalid" - labels = frozenset(new.labels) - frozenset(old.labels) - skip_labels - for label in new.labels: - if label in labels: - print " ", new_db.descrs[label] diff --git a/scripts/apnic-to-csv.py b/scripts/apnic-to-csv.py deleted file mode 100644 index 62293a51..00000000 --- a/scripts/apnic-to-csv.py +++ /dev/null @@ -1,55 +0,0 @@ -# $Id$ -# -# Copyright (C) 2010-2012 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -""" -Parse APNIC "Extended Allocation and Assignment" reports and write -out (just) the RPKI-relevant fields in myrpki-format CSV syntax. -""" - -from rpki.csv_utils import csv_writer -from rpki.ipaddrs import v4addr - -asns = csv_writer("asns.csv") -prefixes = csv_writer("prefixes.csv") - -for line in open("delegated-apnic-extended-latest"): - - line = line.rstrip() - - if not line.startswith("apnic|") or line.endswith("|summary"): - continue - - try: - registry, cc, rectype, start, value, date, status, opaque_id = line.split("|") - except ValueError: - continue - - if not opaque_id: - continue - - assert registry == "apnic" - - if rectype == "asn": - asns.writerow((opaque_id, "%s-%s" % (start, int(start) + int(value) - 1))) - - elif rectype == "ipv4": - prefixes.writerow((opaque_id, "%s-%s" % (start, v4addr(v4addr(start) + long(value) - 1)))) - - elif rectype == "ipv6": - prefixes.writerow((opaque_id, "%s/%s" % (start, value))) - -asns.close() -prefixes.close() diff --git a/scripts/application-x-rpki-mailcap-handler.sh b/scripts/application-x-rpki-mailcap-handler.sh deleted file mode 100755 index 891987d9..00000000 --- a/scripts/application-x-rpki-mailcap-handler.sh +++ /dev/null @@ -1,53 +0,0 @@ -#!/bin/sh - -# $Id$ -# -# Copyright (C) 2010 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -# Given the Maildir dump format, one can use Mutt as a viewer with two -# tweaks: -# -# 1) Add to ~/.muttrc -# -# auto_view application/x-rpki -# -# 2) Add to ~/.mailcap -# -# application/x-rpki; /path/to/this/script.sh ; copiousoutput -# -# "copiousoutput" is required by mutt to enable auto_view (inline -# display) behavior. -# -# This script could do fancier things (pretty XML formatting, -# verification checks of the CMS, etcetera) if anybody cared. -# For the moment the main use for this script is debugging. - -# We have to jump through some hoops to figure out where our OpenSSL -# binary is. If you have already installed an OpenSSL binary that -# understands CMS, feel free to use that instead. - -#exec 2>&1; set -x - -: ${AWK=/usr/bin/awk} -: ${OPENSSL=$(/usr/bin/dirname $0)/../openssl/openssl/apps/openssl} -: ${SPLITBASE64=$(/usr/bin/dirname $0)/splitbase64.xsl} -: ${XMLINDENT=/usr/local/bin/xmlindent} -: ${XMLLINT=/usr/local/bin/xmllint} -: ${XSLTPROC=/usr/local/bin/xsltproc} - -# This produces prettier output, but also hangs sometimes, apparently some xmlindent bug dealing with really long XML attributes -#OPENSSL_CONF=/dev/null $OPENSSL cms -verify -nosigs -noverify -inform DER 2>/dev/null | $XSLTPROC $SPLITBASE64 - | $XMLINDENT -i 2 | $AWK NF - -# So we do this instead -OPENSSL_CONF=/dev/null $OPENSSL cms -verify -nosigs -noverify -inform DER 2>/dev/null | $XSLTPROC $SPLITBASE64 - | $XMLLINT -format - diff --git a/scripts/arin-to-csv.py b/scripts/arin-to-csv.py deleted file mode 100644 index a4e7ffc3..00000000 --- a/scripts/arin-to-csv.py +++ /dev/null @@ -1,114 +0,0 @@ -# $Id$ -# -# Copyright (C) 2009-2012 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -""" -Parse an ARIN database research dump and write out (just) the -RPKI-relevant fields in myrpki-format CSV syntax. - -NB: The input data for this script comes from ARIN under an agreement -that allows research use but forbids redistribution, so if you think -you need a copy of the data, please talk to ARIN about it, not us. - -Input format used to be RPSL WHOIS dump, but ARIN recently went Java, -so we have to parse a 3.5GB XML "document". Credit to Liza Daly for -explaining the incantations needed to convince lxml to do this nicely, -see: http://www.ibm.com/developerworks/xml/library/x-hiperfparse/ -""" - -import sys -import lxml.etree - -from rpki.csv_utils import csv_writer - -def ns(tag): - return "{http://www.arin.net/bulkwhois/core/v1}" + tag - -tag_asn = ns("asn") -tag_net = ns("net") -tag_org = ns("org") -tag_poc = ns("poc") -tag_orgHandle = ns("orgHandle") -tag_netBlock = ns("netBlock") -tag_type = ns("type") -tag_startAddress = ns("startAddress") -tag_endAddress = ns("endAddress") -tag_startAsNumber = ns("startAsNumber") -tag_endAsNumber = ns("endAsNumber") - -def find(node, tag): - return node.findtext(tag).strip() - -def do_asn(node): - asns.writerow((find(node, tag_orgHandle), - "%s-%s" % (find(node, tag_startAsNumber), - find(node, tag_endAsNumber)))) - -erx_table = { - "AF" : "afrinic", - "AP" : "apnic", - "AR" : "arin", - "AV" : "arin", - "FX" : "afrinic", - "LN" : "lacnic", - "LX" : "lacnic", - "PV" : "apnic", - "PX" : "apnic", - "RN" : "ripe", - "RV" : "ripe", - "RX" : "ripe" } - -def do_net(node): - handle = find(node, tag_orgHandle) - for netblock in node.iter(tag_netBlock): - tag = find(netblock, tag_type) - startAddress = find(netblock, tag_startAddress) - endAddress = find(netblock, tag_endAddress) - if not startAddress.endswith(".000") and not startAddress.endswith(":0000"): - continue - if not endAddress.endswith(".255") and not endAddress.endswith(":FFFF"): - continue - if tag in ("DS", "DA", "IU"): - prefixes.writerow((handle, "%s-%s" % (startAddress, endAddress))) - elif tag in erx_table: - erx.writerow((erx_table[tag], "%s-%s" % (startAddress, endAddress))) - -dispatch = { tag_asn : do_asn, tag_net : do_net } - -asns = csv_writer("asns.csv") -prefixes = csv_writer("prefixes.csv") -erx = csv_writer("erx.csv") - -root = None - -for event, node in lxml.etree.iterparse(sys.stdin): - - if root is None: - root = node - while root.getparent() is not None: - root = root.getparent() - - if node.getparent() is root: - - if node.tag in dispatch: - dispatch[node.tag](node) - - node.clear() - while node.getprevious() is not None: - del node.getparent()[0] - -asns.close() -prefixes.close() -erx.close() diff --git a/scripts/cross_certify.py b/scripts/cross_certify.py deleted file mode 100644 index fab7743b..00000000 --- a/scripts/cross_certify.py +++ /dev/null @@ -1,74 +0,0 @@ -# $Id$ -# -# Copyright (C) 2014 Dragon Research Labs ("DRL") -# Portions copyright (C) 2009--2012 Internet Systems Consortium ("ISC") -# Portions copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notices and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND DRL, ISC, AND ARIN DISCLAIM ALL -# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED -# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL, -# ISC, OR ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR -# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS -# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, -# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION -# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -""" -Cross-certification tool to issue a new certificate based on an old -one that was issued by somebody else. The point of the exercise is to -end up with a valid certificate in our own BPKI which has the same -subject name and subject public key as the one we're replacing. -""" - -import os -import sys -import time -import argparse -import rpki.x509 -import rpki.sundial - -os.environ["TZ"] = "UTC" -time.tzset() - -parser = argparse.ArgumentParser(description = __doc__) -parser.add_argument("-i", "--in", required = True, dest = "input", - type = lambda s: rpki.x509.X509(Auto_file = s), - help = "input certificate") -parser.add_argument("-c", "--ca", required = True, - type = lambda s: rpki.x509.X509(Auto_file = s), - help = "issuing certificate") -parser.add_argument("-k", "--key", required = True, - type = lambda s: rpki.x509.RSA(Auto_file = s), - help = "private key of issuing certificate") -parser.add_argument("-s", "--serial", required = True, - help = "serial number file") -parser.add_argument("-o", "--out", - help = "output filename") -parser.add_argument("-l", "--lifetime", - type = rpki.sundial.timedelta, default = "30d", - help = "lifetime of generated certificate") -args = parser.parse_args() - -now = rpki.sundial.now() -notAfter = now + args.lifetime - -try: - with open(args.serial, "r") as f: - serial = int(f.read().splitlines()[0], 16) -except IOError: - serial = 1 - -cert = args.ca.cross_certify(args.key, args.input, serial, notAfter, now) - -with open(args.serial, "w") as f: - f.write("%02x\n" % (serial + 1)) - -if args.out is None: - sys.stdout.write(cert.get_PEM()) -else: - with open(args.out, "w") as f: - f.write(cert.get_PEM()) diff --git a/scripts/csvgrep.py b/scripts/csvgrep.py deleted file mode 100644 index 68bdd259..00000000 --- a/scripts/csvgrep.py +++ /dev/null @@ -1,72 +0,0 @@ -# $Id$ -# -# Copyright (C) 2010-2012 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -""" -Utility to simplify finding handles in one of the pseudo-RIR databases. - -Usage: python csvgrep.py datum [datum ...] - -where each datum is an ASN, IP address, or IP prefix. - -ASNs are recognized by being pure integers; IP addreses are recognized -by having dots (IPv4) or colons (IPv6). - -After eating all of the command line arguments, we search asns.csv for -any ASNs given, and prefixes.csv for any prefixes given. -""" - -import sys -from rpki.resource_set import resource_set_as, resource_set_ipv4, resource_set_ipv6 -from rpki.csv_utils import csv_reader - -asn = resource_set_as() -ipv4 = resource_set_ipv4() -ipv6 = resource_set_ipv6() - -for datum in sys.argv[1:]: - if datum.replace("-", "").isdigit(): - t = asn - else: - t = ipv6 if ":" in datum else ipv4 - if "-" not in datum and "/" not in datum: - datum = datum + "-" + datum - try: - t.append(t.parse_str(datum)) - except: - print "Error attempting to parse", datum - raise - -#print "Looking for: ASNs %s IPv4 %s IPv6 %s" % (asn, ipv4, ipv6) - -def matches(set1, datum): - set2 = set1.__class__(datum) - if set1.intersection(set2): - return set2 - else: - return False - -if asn: - for h, a in csv_reader("asns.csv", columns = 2): - m = matches(asn, a) - if m: - print h, m - -if ipv4 or ipv6: - for h, a in csv_reader("prefixes.csv", columns = 2): - t = ipv6 if ":" in a else ipv4 - m = t and matches(t, a) - if m: - print h, m diff --git a/scripts/expand-roa-prefixes.py b/scripts/expand-roa-prefixes.py deleted file mode 100644 index ae34ea0a..00000000 --- a/scripts/expand-roa-prefixes.py +++ /dev/null @@ -1,79 +0,0 @@ -# $Id$ -# -# Copyright (C) 2011 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -""" -I got tired of trying to explain in English how the maxLength macro -hack works in ROAs, so this is an attempt to explain it as code. - -Given one or more ROA prefix sets on the command line, this script -prints out the expansion as a list of prefixes. -""" - -import sys -import rpki.resource_set -import rpki.ipaddrs - -class NotAPrefix(Exception): - """ - Address is not a proper prefix. - """ - -class address_range(object): - """ - Iterator for rpki.ipaddrs address objects. - """ - - def __init__(self, start, stop, step): - self.addr = start - self.stop = stop - self.step = step - self.type = type(start) - - def __iter__(self): - while self.addr < self.stop: - yield self.addr - self.addr = self.type(self.addr + self.step) - -def main(argv): - - prefix_sets = [] - for arg in argv: - if ":" in arg: - prefix_sets.extend(rpki.resource_set.roa_prefix_set_ipv6(arg)) - else: - prefix_sets.extend(rpki.resource_set.roa_prefix_set_ipv4(arg)) - - for prefix_set in prefix_sets: - sys.stdout.write("%s expands to:\n" % prefix_set) - - prefix_type = prefix_set.range_type.datum_type - prefix_min = prefix_set.prefix - prefix_max = prefix_set.prefix + (1L << (prefix_type.bits - prefix_set.prefixlen)) - - for prefixlen in xrange(prefix_set.prefixlen, prefix_set.max_prefixlen + 1): - - step = (1L << (prefix_type.bits - prefixlen)) - mask = step - 1 - - for addr in address_range(prefix_min, prefix_max, step): - if (addr & mask) != 0: - raise NotAPrefix, "%s is not a /%d prefix" % (addr, prefixlen) - sys.stdout.write(" %s/%d\n" % (addr, prefixlen)) - - sys.stdout.write("\n") - -if __name__ == "__main__": - main(sys.argv[1:] if len(sys.argv) > 1 else ["18.0.0.0/8-24"]) diff --git a/scripts/extract-key.py b/scripts/extract-key.py deleted file mode 100644 index b85c3d55..00000000 --- a/scripts/extract-key.py +++ /dev/null @@ -1,64 +0,0 @@ -# $Id$ - -# Copyright (C) 2014 Dragon Research Labs ("DRL") -# Portions copyright (C) 2008 American Registry for Internet Numbers ("ARIN") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notices and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND DRL AND AND ARIN DISCLAIM ALL -# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED -# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL OR -# ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL -# DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA -# OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER -# TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -""" -Extract a private key from rpkid's database. - -This is a debugging tool. rpkid goes to some trouble not to expose -private keys, which is correct for normal operation, but for debugging -it is occasionally useful to be able to extract the private key from -MySQL. This script is just a convenience, it doesn't enable anything -that couldn't be done via the mysql command line tool. - -While we're at this we also extract the corresponding certificate. -""" - -import os -import time -import argparse -import sys -import MySQLdb -import rpki.x509 - -os.environ["TZ"] = "UTC" -time.tzset() - -parser = argparse.ArgumentParser(description = __doc__) -parser.add_argument("-s", "--self", required = True, help = "self handle") -parser.add_argument("-b", "--bsc", required = True, help = "BSC handle") -parser.add_argument("-u", "--user", required = True, help = "MySQL user name") -parser.add_argument("-d", "--db", required = True, help = "MySQL database name") -parser.add_argument("-p", "--password", required = True, help = "MySQL password") -args = parser.parse_args() - -cur = MySQLdb.connect(user = args.user, db = args.db, passwd = args.password).cursor() - -cur.execute( - """ - SELECT bsc.private_key_id, bsc.signing_cert - FROM bsc, self - WHERE self.self_handle = %s AND self.self_id = bsc.self_id AND bsc_handle = %s - """, - (args.self, args.bsc)) - -key, cer = cur.fetchone() - -print rpki.x509.RSA(DER = key).get_PEM() - -if cer: - print rpki.x509.X509(DER = cer).get_PEM() diff --git a/scripts/fakerootd.py b/scripts/fakerootd.py deleted file mode 100644 index 6275a2a9..00000000 --- a/scripts/fakerootd.py +++ /dev/null @@ -1,50 +0,0 @@ -# $Id$ -# -# Copyright (C) 2011 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -""" -Hack to fake a catatonic rootd, for testing. -""" - -import sys -import socket -import datetime -import signal - -port = int(sys.argv[1]) if len(sys.argv) > 1 else 4405 -limit = int(sys.argv[2]) if len(sys.argv) > 2 else 5 - -print "Listening on port", port - -s4 = socket.socket(socket.AF_INET, socket.SOCK_STREAM) -s4.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) -s4.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) -s4.bind(('', port)) -s4.listen(limit) - -s6 = socket.socket(socket.AF_INET6, socket.SOCK_STREAM) -s6.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) -s6.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) -s6.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, 1) -s6.bind(('::1', port)) -s6.listen(limit) - -print "Going to sleep at", datetime.datetime.utcnow() - -try: - signal.pause() -except KeyboardInterrupt: - sys.exit(0) - diff --git a/scripts/find-roa-expiration.py b/scripts/find-roa-expiration.py deleted file mode 100644 index 1401dc42..00000000 --- a/scripts/find-roa-expiration.py +++ /dev/null @@ -1,61 +0,0 @@ -# $Id$ -# -# Copyright (C) 2012 Internet Systems Consortium, Inc. ("ISC") -# -# Permission to use, copy, modify, and/or distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -""" -Look for ROAs for particular prefixes, like find_roa, then, for each -ROA we find, dig out the expiration times of all the certificates -involved in the authorization chain, all the way back to the root. -""" - -import sys -import subprocess -import rpki.POW - -def filename_to_uri(filename): - if not filename.startswith(sys.argv[1]): - raise ValueError - return "rsync://" + filename[len(sys.argv[1]):].lstrip("/") - -def uri_to_filename(uri): - if not uri.startswith("rsync://"): - raise ValueError - return sys.argv[1] + "/" + uri[len("rsync://"):] - -def get_aia(x): - for i in xrange(x.countExtensions()): - ext = x.getExtension(i) - if ext[0] == "authorityInfoAccess": - return ext[2][ext[2].index("rsync://"):] - return None - -for line in subprocess.check_output(["find_roa"] + sys.argv[1:]).splitlines(): - - words = line.split() - fn = words.pop() - del words[-1] - print " ".join(words) - - x = rpki.POW.CMS.derReadFile(fn).certs()[0] - uri = get_aia(x) - print x.getNotAfter(), filename_to_uri(fn) - - while uri: - fn = uri_to_filename(uri) - x = rpki.POW.X509.derReadFile(fn) - print x.getNotAfter(), uri - uri = get_aia(x) - - print diff --git a/scripts/format-application-x-rpki.py b/scripts/format-application-x-rpki.py deleted file mode 100644 index 184103f9..00000000 --- a/scripts/format-application-x-rpki.py +++ /dev/null @@ -1,132 +0,0 @@ -# $Id$ -# -# Copyright (C) 2014 Dragon Research Labs ("DRL") -# Portions copyright (C) 2010--2012 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notices and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND DRL AND ISC DISCLAIM ALL -# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED -# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL OR -# ISC BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL -# DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA -# OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER -# TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -""" -Take the basic application/x-rpki messages that rpkid and friends -log and translate them into a text version that's easier to search, -without losing any of the original data. We use MH for the output -format because nmh makes a handy viewer. -""" - -import email.mime -import email.mime.application -import email.mime.text -import email.mime.multipart -import email.utils -import email.encoders -import mailbox -import rpki.POW -import lxml.etree -import argparse -import sys -import base64 - -parser = argparse.ArgumentParser(description = __doc__) -parser.add_argument("-i", "--input", required = True, - help = "input Maildir") -parser.add_argument("-m", "--mark", action = "store_true", - help = "mark seen messages") -parser.add_argument("-k", "--kill", action = "store_true", - help = "kill seen messages") -parser.add_argument("-o", "--output", required = True, - help = "output MH folder") -parser.add_argument("-t", "--tag", - default = "{http://www.apnic.net/specs/rescerts/up-down/}message", - help = "XML namespace tag for an input message") -parser.add_argument("-u", "--unseen", action = "store_true", - help = "only process unseen messages") -args = parser.parse_args() - -def pprint_cert(b64): - return rpki.POW.X509.derRead(base64.b64decode(b64)).pprint() - -def up_down(): - msg["X-RPKI-Up-Down-Type"] = xml.get("type") - msg["X-RPKI-Up-Down-Sender"] = xml.get("sender") - msg["X-RPKI-Up-Down-Recipient"] = xml.get("recipient") - msg["Subject"] = "Up-down %s %s => %s" % (xml.get("type"), xml.get("sender"), xml.get("recipient")) - for x in xml: - if x.tag.endswith("class"): - for y in x: - if y.tag.endswith("certificate") or y.tag.endswith("issuer"): - msg.attach(email.mime.text.MIMEText(pprint_cert(y.text))) - -def left_right(): - msg["X-RPKI-Left-Right-Type"] = xml.get("type") - msg["Subject"] = "Left-right %s" % xml.get("type") - -def publication(): - msg["X-RPKI-Left-Right-Type"] = xml.get("type") - msg["Subject"] = "Publication %s" % xml.get("type") - -dispatch = { "{http://www.apnic.net/specs/rescerts/up-down/}message" : up_down, - "{http://www.hactrn.net/uris/rpki/left-right-spec/}msg" : left_right, - "{http://www.hactrn.net/uris/rpki/publication-spec/}msg" : publication } - -def fix_headers(): - if "X-RPKI-PID" in srcmsg or "X-RPKI-Object" in srcmsg: - msg["X-RPKI-PID"] = srcmsg["X-RPKI-PID"] - msg["X-RPKI-Object"] = srcmsg["X-RPKI-Object"] - else: - words = srcmsg["Subject"].split() - msg["X-RPKI-PID"] = words[1] - msg["X-RPKI-Object"] = " ".join(words[4:]) - -destination = None -source = None -try: - destination = mailbox.MH(args.output, factory = None, create = True) - source = mailbox.Maildir(args.input, factory = None) - - for srckey, srcmsg in source.iteritems(): - if args.unseen and "S" in srcmsg.get_flags(): - continue - assert not srcmsg.is_multipart() and srcmsg.get_content_type() == "application/x-rpki" - payload = srcmsg.get_payload(decode = True) - cms = rpki.POW.CMS.derRead(payload) - txt = cms.verify(rpki.POW.X509Store(), None, rpki.POW.CMS_NOCRL | rpki.POW.CMS_NO_SIGNER_CERT_VERIFY | rpki.POW.CMS_NO_ATTR_VERIFY | rpki.POW.CMS_NO_CONTENT_VERIFY) - xml = lxml.etree.fromstring(txt) - tag = xml.tag - if args.tag and tag != args.tag: - continue - msg = email.mime.multipart.MIMEMultipart("related") - msg["X-RPKI-Tag"] = tag - for i in ("Date", "Message-ID", "X-RPKI-Timestamp"): - msg[i] = srcmsg[i] - fix_headers() - if tag in dispatch: - dispatch[tag]() - if "Subject" not in msg: - msg["Subject"] = srcmsg["Subject"] - msg.attach(email.mime.text.MIMEText(txt)) - msg.attach(email.mime.application.MIMEApplication(payload, "x-rpki")) - msg.epilogue = "\n" # Force trailing newline - key = destination.add(msg) - print "Added", key - if args.kill: - del source[srckey] - elif args.mark: - srcmsg.set_subdir("cur") - srcmsg.add_flag("S") - source[srckey] = srcmsg - -finally: - if destination: - destination.close() - if source: - source.close() diff --git a/scripts/gc_summary.awk b/scripts/gc_summary.awk deleted file mode 100644 index b3b1bc6a..00000000 --- a/scripts/gc_summary.awk +++ /dev/null @@ -1,72 +0,0 @@ -#!/usr/bin/awk -f - -# $Id$ -# -# Copyright (C) 2010 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -# Use gnuplot to graph interesting data from gc_summary lines in rpkid logs. - -BEGIN { - target = ENVIRON["TARGET"] ? ENVIRON["TARGET"] : "tuple"; - outtype = ENVIRON["OUTTYPE"] ? ENVIRON["OUTTYPE"] : "png"; - outname = ENVIRON["OUTNAME"] ? ENVIRON["OUTNAME"] : ""; - print "set xdata time"; - print "set timefmt '%Y-%m-%dT%H:%M:%S'"; - #print "set format x '%d%b'"; - print "set format x '%T'"; - print "set key right bottom"; - if (outname) { - print "set terminal", outtype; - print "set output '" outname "." outtype "'"; - print "set term png size 1024,1024"; - } - if (ARGC <= 2) { - print "plot '-' using 1:2 with linespoints title 'rpkid use of", target, "objects'"; - } else { - cmd = "plot '-' using 1:2 with linespoints title '" ARGV[1] "'"; - for (i = 2; i < ARGC; i++) - cmd = cmd ", '-' using 1:2 with linespoints title '" ARGV[i] "'"; - print cmd; - } -} - -FILENAME != filename && filename { - print "e"; -} - -FILENAME != filename { - print "#", FILENAME - filename = FILENAME; - proc = ""; -} - -$6 == target && proc != $3 && proc { - print ""; -} - -$6 == target && proc != $3 { - proc = $3; -} - -$6 == target { - print "#", $0; - print $1 "T" $2, $5; -} - -END { - print "e"; - if (!outname) - print "pause mouse any"; -} diff --git a/scripts/gc_summary.py b/scripts/gc_summary.py deleted file mode 100644 index 1f6987bf..00000000 --- a/scripts/gc_summary.py +++ /dev/null @@ -1,112 +0,0 @@ -# $Id$ -# -# Copyright (C) 2010 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -# Use gnuplot to graph interesting data from gc_summary lines in rpkid logs. - -import sys, os, time - -class datapoint(object): - - outtype = os.getenv("OUTTYPE", "png") - outname = os.getenv("OUTNAME", "") - timefmt = os.getenv("TIMEFMT", "%T") - pretend = os.getenv("PRETEND_EVERYTHING_CHANGED", False) - threshold = int(os.getenv("THRESHOLD", "100")) - - raw = [] - filenames = [] - - def __init__(self, filename, timestamp, process, count, typesig, line): - self.filename = filename - self.timestamp = timestamp - self.process = process - self.count = count - self.typesig = typesig - self.line = line - self.key = "%s %s" % (filename, typesig) - self.raw.append(self) - if filename not in self.filenames: - self.filenames.append(filename) - - def __cmp__(self, other): - c = cmp(self.key, other.key) - return c if c else cmp(self.timestamp, other.timestamp) - - @classmethod - def plot(cls): - - print "# [%s] Looking for interesting records" % time.strftime("%T") - changed = {} - for i in cls.raw: - if i.key not in changed: - changed[i.key] = set() - changed[i.key].add(i.count) - if cls.pretend: - changed = set(changed.iterkeys()) - else: - changed = set(k for k, v in changed.iteritems() if max(v) - min(v) > cls.threshold) - - if not changed: - print "# [%s] Apparently nothing worth reporting" % time.strftime("%T") - print "print 'Nothing to plot'" - return - - print "# [%s] Header" % time.strftime("%T") - print "set xdata time" - print "set timefmt '%Y-%m-%dT%H:%M:%S'" - print "set format x '%s'" % cls.timefmt - print "set key right bottom" - if cls.outname: - print "set terminal", cls.outtype - print "set output '%s.%s'" % (cls.outname, cls.outtype) - print "set term png size 1024,1024" - print "plot", ", ".join("'-' using 1:2 with linespoints title '%s'" % i for i in changed) - - print "# [%s] Sorting" % time.strftime("%T") - cls.raw.sort() - - key = None - proc = None - for i in cls.raw: - if i.key not in changed: - continue - if key is not None and i.key != key: - print "e" - elif proc is not None and i.process != proc: - print "" - key = i.key - proc = i.process - print "#", i.key, i.line - print i.timestamp, i.count - print "e" - if not cls.outname: - print "pause mouse any" - -for filename in sys.argv[1:]: - print "# [%s] Reading %s" % (time.strftime("%T"), filename) - for line in open(filename): - if "gc_summary:" in line: - word = line.split(None, 6) - if word[4].isdigit() and word[5].startswith("(") and word[5].endswith(")"): - datapoint(filename = filename, - timestamp = word[0] + "T" + word[1], - process = word[2], - count = int(word[4]), - typesig = word[5], - line = line.strip()) - -print "# [%s] Plotting" % time.strftime("%T") -datapoint.plot() diff --git a/scripts/generate-ripe-root-cert.py b/scripts/generate-ripe-root-cert.py deleted file mode 100644 index 3407bc51..00000000 --- a/scripts/generate-ripe-root-cert.py +++ /dev/null @@ -1,57 +0,0 @@ -# $Id$ -# -# Copyright (C) 2010-2012 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -""" -Parse IANA XML data and write out just what we need to generate a root -cert for Pseudo-RIPE. -""" - -import sys -import lxml.etree -from rpki.csv_utils import csv_writer - -def iterate_xml(filename, tag): - return lxml.etree.parse(filename).getroot().getiterator(tag) - -def ns(tag): - return "{http://www.iana.org/assignments}" + tag - -tag_description = ns("description") -tag_designation = ns("designation") -tag_record = ns("record") -tag_number = ns("number") -tag_prefix = ns("prefix") - -asns = csv_writer("asns.csv") -prefixes = csv_writer("prefixes.csv") - -for record in iterate_xml("as-numbers.xml", tag_record): - if record.findtext(tag_description) == "Assigned by RIPE NCC": - asns.writerow(("RIPE", record.findtext(tag_number))) - -for record in iterate_xml("ipv4-address-space.xml", tag_record): - if record.findtext(tag_designation) in ("RIPE NCC", "Administered by RIPE NCC"): - prefix = record.findtext(tag_prefix) - p, l = prefix.split("/") - assert l == "8", "Violated /8 assumption: %r" % prefix - prefixes.writerow(("RIPE", "%d.0.0.0/8" % int(p))) - -for record in iterate_xml("ipv6-unicast-address-assignments.xml", tag_record): - if record.findtext(tag_description) == "RIPE NCC": - prefixes.writerow(("RIPE", record.findtext(tag_prefix))) - -asns.close() -prefixes.close() diff --git a/scripts/gski.py b/scripts/gski.py deleted file mode 100644 index 083a59c8..00000000 --- a/scripts/gski.py +++ /dev/null @@ -1,21 +0,0 @@ -# $Id$ - -# Copyright (C) 2007--2008 American Registry for Internet Numbers ("ARIN") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -import rpki.x509, sys - -for file in sys.argv[1:]: - cert = rpki.x509.X509(Auto_file = file) - print cert.gSKI(), cert.hSKI(), file diff --git a/scripts/guess-roas-from-routeviews.py b/scripts/guess-roas-from-routeviews.py deleted file mode 100644 index d8fb9c4c..00000000 --- a/scripts/guess-roas-from-routeviews.py +++ /dev/null @@ -1,63 +0,0 @@ -# $Id$ -# -# Copyright (C) 2009 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -""" -Pull RFC 3779 resources from a cert, attempt to mine routeviews (via -DNS, using the dnspython toolkit) for what the ROAs might look like -for the addresses found in the cert. - -This doesn't handle IPv6, because neither, apparently, does the -routeviews DNS interface. Oh well. - -NB: this is wild-assed guessing at best. Even if the routeviews data -were signed, which it is not, you have no particular reason to believe -it. Do not use output of this script production. Sanity check. -Beware of dog. If you issue ROAs using this script and your wallpaper -peels, your cat explodes, or your children turn blue, it's your own -fault for using this script. You have been warned. -""" - -import sys -import dns.resolver -import rpki.x509 -from rpki.ipaddrs import v4addr -from rpki.resource_set import roa_prefix_ipv4, resource_set_ipv4, resource_range_ipv4 - -roas = [] - -for filename in sys.argv[1:]: - resources = rpki.x509.X509(Auto_file = filename).get_3779resources().v4 - - while resources: - labels = str(resources[0].min).split(".") - labels.reverse() - - try: - for answer in dns.resolver.query(".".join(labels) + ".asn.routeviews.org", "txt"): - asn, prefix, prefixlen = answer.strings - roa_prefix = roa_prefix_ipv4(v4addr(prefix), long(prefixlen)) - roa = "%s\t%s\t%s" % (roa_prefix, long(asn), filename) - if roa not in roas: - roas.append(roa) - resources = resources.difference(resource_set_ipv4([roa_prefix.to_resource_range()])) - - except dns.resolver.NXDOMAIN: - resources = resources.difference(resource_set_ipv4([resource_range_ipv4(resources[0].min, v4addr(resources[0].min + 256))])) - -roas.sort() - -for roa in roas: - print roa diff --git a/scripts/iana-to-csv.py b/scripts/iana-to-csv.py deleted file mode 100644 index f803a21e..00000000 --- a/scripts/iana-to-csv.py +++ /dev/null @@ -1,85 +0,0 @@ -# $Id$ -# -# Copyright (C) 2010-2012 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -""" -Parse IANA XML data. -""" - -import sys -import lxml.etree -from rpki.csv_utils import csv_reader, csv_writer -from rpki.resource_set import resource_bag - -def iterate_xml(filename, tag): - return lxml.etree.parse(filename).getroot().getiterator(tag) - -def ns(tag): - return "{http://www.iana.org/assignments}" + tag - -tag_description = ns("description") -tag_designation = ns("designation") -tag_record = ns("record") -tag_number = ns("number") -tag_prefix = ns("prefix") -tag_status = ns("status") - -handles = {} -rirs = { "legacy" : resource_bag() } - -for rir in ("AfriNIC", "APNIC", "ARIN", "LACNIC", "RIPE NCC"): - handle = rir.split()[0].lower() - handles[rir] = handles["Assigned by %s" % rir] = handles["Administered by %s" % rir] = handle - rirs[handle] = resource_bag() - -asns = csv_writer("asns.csv") -prefixes = csv_writer("prefixes.csv") - -for record in iterate_xml("as-numbers.xml", tag_record): - description = record.findtext(tag_description) - if description in handles: - asns.writerow((handles[description], record.findtext(tag_number))) - -for record in iterate_xml("ipv4-address-space.xml", tag_record): - designation = record.findtext(tag_designation) - if record.findtext(tag_status) != "RESERVED": - prefix, prefixlen = [int(i) for i in record.findtext(tag_prefix).split("/")] - if prefixlen != 8: - raise ValueError("%s violated /8 assumption" % record.findtext(tag_prefix)) - rirs[handles.get(designation, "legacy")] |= resource_bag.from_str("%d.0.0.0/8" % prefix) - -for record in iterate_xml("ipv6-unicast-address-assignments.xml", tag_record): - description = record.findtext(tag_description) - if record.findtext(tag_description) in handles: - rirs[handles[description]] |= resource_bag.from_str(record.findtext(tag_prefix)) - -erx = list(csv_reader("erx.csv")) -assert all(r in rirs for r, p in erx) - -erx_overrides = resource_bag.from_str(",".join(p for r, p in erx), allow_overlap = True) - -for rir in rirs: - if rir != "legacy": - rirs[rir] -= erx_overrides - rirs[rir] |= resource_bag.from_str(",".join(p for r, p in erx if r == rir), allow_overlap = True) - -for rir, bag in rirs.iteritems(): - for p in bag.v4: - prefixes.writerow((rir, p)) - for p in bag.v6: - prefixes.writerow((rir, p)) - -asns.close() -prefixes.close() diff --git a/scripts/missing-oids.py b/scripts/missing-oids.py deleted file mode 100644 index 16316eac..00000000 --- a/scripts/missing-oids.py +++ /dev/null @@ -1,38 +0,0 @@ -# $Id$ -# -# Copyright (C) 2008 American Registry for Internet Numbers ("ARIN") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -""" -Figure out what OIDs from rpki.oids are missing from dumpasn1's database. -""" - -import rpki.POW.pkix, rpki.oids - -need_header = True - -for oid, name in rpki.oids.oid2name.items(): - try: - rpki.POW.pkix.oid2obj(oid) - except: - o = rpki.POW.pkix.Oid() - o.set(oid) - if need_header: - print - print "# Local additions" - need_header = False - print - print "OID =", " ".join(("%02X" % ord(c)) for c in o.toString()) - print "Comment = RPKI project" - print "Description =", name, "(" + " ".join((str(i) for i in oid)) + ")" diff --git a/scripts/object-dates.py b/scripts/object-dates.py deleted file mode 100644 index b99441d6..00000000 --- a/scripts/object-dates.py +++ /dev/null @@ -1,63 +0,0 @@ -#!/usr/bin/env python -# $Id$ - -# Extract notBefore, notAfter, thisUpdate and nextUpdate dates from -# RPKI objects. - -# Copyright (C) 2013--2014 Dragon Research Labs ("DRL") -# -# Permission to use, copy, modify, and/or distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND DRL DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL DRL BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -import sys -import os.path -import rpki.POW - -extract_flags = (rpki.POW.CMS_NOCRL | - rpki.POW.CMS_NO_SIGNER_CERT_VERIFY | - rpki.POW.CMS_NO_ATTR_VERIFY | - rpki.POW.CMS_NO_CONTENT_VERIFY) - -def get_mft(fn): - cms = rpki.POW.Manifest.derReadFile(fn) - cms.verify(rpki.POW.X509Store(), None, extract_flags) - return cms, cms.certs()[0] - -def get_roa(fn): - return None, rpki.POW.CMS.derReadFile(fn).certs()[0] - -def get_gbr(fn): - return None, rpki.POW.CMS.derReadFile(fn).certs()[0] - -def get_crl(fn): - return rpki.POW.CRL.derReadFile(fn), None - -def get_cer(fn): - return None, rpki.POW.X509.derReadFile(fn) - -dispatch = dict(mft = get_mft, - roa = get_roa, - gbr = get_gbr, - crl = get_crl, - cer = get_cer) - -for fn in sys.argv[1:]: - obj, cer = dispatch[os.path.splitext(fn)[1][1:]](fn) - print fn - if cer is not None: - print "notBefore: ", cer.getNotBefore() - if obj is not None: - print "thisUpdate:", obj.getThisUpdate() - print "nextUpdate:", obj.getNextUpdate() - if cer is not None: - print "notAfter: ", cer.getNotAfter() - print diff --git a/scripts/pcap-to-xml.sh b/scripts/pcap-to-xml.sh deleted file mode 100644 index 73c30880..00000000 --- a/scripts/pcap-to-xml.sh +++ /dev/null @@ -1,36 +0,0 @@ -#!/bin/sh - -# $Id$ -# -# Copyright (C) 2011 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -# Zeroeth cut at a packet decoder for RPKI up-down / left-right / -# publication traffic captured off the wire. Needs work, not suitable -# for general use, depends on a bunch of external programs that I -# happen to have installed...but has been useful to me. - -for p in *.pcap -do - tcptrace -e $p - for i in *.dat - do - j=${i%_contents.dat} - sed '1,/^ $/d' $i >$j.der - openssl cms -verify -noverify -inform DER -in $j.der | xmlindent > $j.xml - k=$(dumpasn1 -a $j.der 2>/dev/null | awk 'BEGIN {FS = "[ \t/:]+"} /signingTime/ {nr = NR + 2} NR == nr {print $6 "-" $5 "-" $4 "T" $7 ":" $8 ":" $9 "Z"}') - mv $j.der $k.$j.der - mv $j.xml $k.$j.xml - rm $i - done -done diff --git a/scripts/print-profile.py b/scripts/print-profile.py deleted file mode 100644 index 081d2602..00000000 --- a/scripts/print-profile.py +++ /dev/null @@ -1,20 +0,0 @@ -# $Id$ -# -# Copyright (C) 2010 Internet Systems Consortium, Inc. ("ISC") -# -# Permission to use, copy, modify, and/or distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -import pstats, glob - -for f in glob.iglob("*.prof"): - pstats.Stats(f).sort_stats("time").print_stats(50) diff --git a/scripts/rcynic-diff.py b/scripts/rcynic-diff.py deleted file mode 100644 index 327a7b71..00000000 --- a/scripts/rcynic-diff.py +++ /dev/null @@ -1,114 +0,0 @@ -# $Id$ -# -# Copyright (C) 2012 Internet Systems Consortium, Inc. ("ISC") -# -# Permission to use, copy, modify, and/or distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -""" -Diff a series of rcynic.xml files, sort of. -""" - -import sys - -try: - from lxml.etree import ElementTree -except ImportError: - from xml.etree.ElementTree import ElementTree - -show_backup_generation = False -show_rsync_transfer = False - -class Object(object): - - def __init__(self, session, uri, generation): - self.session = session - self.uri = uri - self.generation = generation - self.labels = [] - - def add(self, label): - self.labels.append(label) - - def __cmp__(self, other): - return cmp(self.labels, other.labels) - -def show(old = None, new = None): - assert old is not None or new is not None - assert old is None or new is None or old.uri == new.uri - if old is None: - obj = new - labels = ["+" + label for label in new.labels] - elif new is None: - obj = old - labels = ["-" + label for label in old.labels] - else: - obj = new - labels = [] - for label in new.session.labels: - if label in new.labels and label in old.labels: - labels.append(label) - elif label in new.labels: - labels.append("+" + label) - elif label in old.labels: - labels.append("-" + label) - labels = " ".join(labels) - if show_backup_generation: - print " ", obj.uri, obj.generation, labels - else: - print " ", obj.uri, labels - -class Session(dict): - - def __init__(self, name): - self.name = name - tree = ElementTree(file = name) - self.labels = [elt.tag.strip() for elt in tree.find("labels")] - for elt in tree.findall("validation_status"): - generation = elt.get("generation") - status = elt.get("status") - uri = elt.text.strip() - if not show_rsync_transfer and status.startswith("rsync_transfer_"): - continue - if show_backup_generation: - key = (uri, generation) - elif generation == "backup": - continue - else: - key = uri - if key not in self: - self[key] = Object(self, uri, generation) - self[key].add(status) - -old_db = new_db = None - -for arg in sys.argv[1:]: - - old_db = new_db - new_db = Session(arg) - - if old_db is None: - continue - - only_old = set(old_db) - set(new_db) - only_new = set(new_db) - set(old_db) - changed = set(key for key in (set(old_db) & set(new_db)) if old_db[key] != new_db[key]) - - if only_old or changed or only_new: - print "Comparing", old_db.name, "with", new_db.name - for key in sorted(only_old): - show(old = old_db[key]) - for key in sorted(changed): - show(old = old_db[key], new = new_db[key]) - for key in sorted(only_new): - show(new = new_db[key]) - print diff --git a/scripts/rcynic-lta b/scripts/rcynic-lta deleted file mode 100755 index 4c55db92..00000000 --- a/scripts/rcynic-lta +++ /dev/null @@ -1,1055 +0,0 @@ -#!/usr/local/bin/python - -# $Id$ - -# Copyright (C) 2013 Dragon Research Labs ("DRL") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND DRL DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL DRL BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -######################################################################## -# -# DANGER WILL ROBINSON -# -# This is a PROTOTYPE of a local trust anchor mechanism. At the -# moment, it DOES NOT WORK by any sane standard of measurement. It -# produces output, but there is no particular reason to believe said -# output is useful, and fairly good reason to believe that it is not. -# -# With luck, this may eventually mutate into something useful. For -# now, just leave it alone unless you really know what you are doing, -# in which case, on your head be it. -# -# YOU HAVE BEEN WARNED -# -######################################################################## - -import os -import sys -import yaml -import glob -import time -import shutil -import base64 -import socket -import sqlite3 -import weakref -import rpki.POW -import rpki.x509 -import rpki.sundial -import rpki.resource_set - -# Teach SQLite3 about our data types. - -sqlite3.register_adapter(rpki.POW.IPAddress, - lambda x: buffer("_" + x.toBytes())) - -sqlite3.register_converter("RangeVal", - lambda s: long(s) if s.isdigit() else rpki.POW.IPAddress.fromBytes(s[1:])) - -sqlite3.register_adapter(rpki.x509.X501DN, str) - - -class main(object): - - tal_directory = None - constraints = None - rcynic_input = None - rcynic_output = None - tals = None - keyfile = None - - ltakey = None - ltacer = None - - ltauri = "rsync://localhost/lta" - ltasia = ltauri + "/" - ltaaia = ltauri + ".cer" - ltamft = ltauri + "/lta.mft" - ltacrl = ltauri + "/lta.crl" - - cer_delta = rpki.sundial.timedelta(days = 7) - crl_delta = rpki.sundial.timedelta(hours = 1) - - all_mentioned_resources = rpki.resource_set.resource_bag() - - - def __init__(self): - print "Parsing YAML" - self.parse_yaml() - print - print "Parsing TALs" - self.parse_tals() - print - print "Creating DB" - self.rpdb = RPDB(self.db_name) - print - print "Creating CA" - self.create_ca() - print - print "Loading DB" - self.rpdb.load(self.rcynic_input) - print - print "Processing adds and drops" - self.process_add_drop() - print - print "Processing deletions" - self.process_constraint_deletions() - print - print "Re-parenting TAs" - self.re_parent_tas() - print - print "Generating CRL and manifest" - self.generate_crl_and_manifest() - print - print "Committing final changes to DB" - self.rpdb.commit() - print - print "Dumping para-objects" - self.rpdb.dump_paras(self.rcynic_output) - print - print "Closing DB" - self.rpdb.close() - - - def create_ca(self): - self.serial = Serial() - self.ltakey = rpki.x509.RSA.generate(quiet = True) - cer = OutgoingX509.self_certify( - cn = "%s LTA Root Certificate" % socket.getfqdn(), - keypair = self.ltakey, - subject_key = self.ltakey.get_RSApublic(), - serial = self.serial(), - sia = (self.ltasia, self.ltamft, None), - notAfter = rpki.sundial.now() + self.cer_delta, - resources = rpki.resource_set.resource_bag.from_str("0-4294967295,0.0.0.0/0,::/0")) - subject_id = self.rpdb.find_keyname(cer.getSubject(), cer.get_SKI()) - self.rpdb.cur.execute("INSERT INTO outgoing (der, fn2, subject, issuer, uri, key) " - "VALUES (?, 'cer', ?, ?, ?, ?)", - (buffer(cer.get_DER()), subject_id, subject_id, self.ltaaia, - buffer(self.ltakey.get_DER()))) - self.ltacer = self.rpdb.find_outgoing_by_id(self.rpdb.cur.lastrowid) - - - def parse_yaml(self, fn = "rcynic-lta.yaml"): - y = yaml.safe_load(open(fn, "r")) - self.db_name = y["db-name"] - self.tal_directory = y["tal-directory"] - self.rcynic_input = y["rcynic-input"] - self.rcynic_output = y["rcynic-output"] - self.keyfile = y["keyfile"] - self.constraints = [Constraint(yc) for yc in y["constraints"]] - - - def parse_tals(self): - self.tals = {} - for fn in glob.iglob(os.path.join(self.tal_directory, "*.tal")): - with open(fn, "r") as f: - uri = f.readline().strip() - key = rpki.POW.Asymmetric.derReadPublic(base64.b64decode(f.read())) - self.tals[uri] = key - - - @staticmethod - def show_candidates(constraint, candidates): - print - print "Constraint:", repr(constraint) - print "Resources: ", constraint.mentioned_resources - for i, candidate in enumerate(candidates): - print " Candidate #%d id %d depth %d name %s uri %s" % ( - i, candidate.rowid, - candidate.depth, - candidate.subject_name, - candidate.uri) - if constraint.mentioned_resources <= candidate.resources: - print " Matched" - #print " Constraint resources:", constraint.mentioned_resources - #print " Candidate resources: ", candidate.resources - break - else: - print " No match" - - - def process_add_drop(self): - # - # We probably need to create the output root before running this, - # otherwise there's a chance that an "add" constraint will yield - # no viable candidate parent. Not likely to happen with current - # test setup where several of our roots claim 0/0. - # - for constraint in self.constraints: - candidates = self.rpdb.find_by_resource_bag(constraint.mentioned_resources) - candidates.sort(reverse = True, key = lambda candidate: candidate.depth) - #self.show_candidates(constraint, candidates) - constraint.drop(candidates) - constraint.add(candidates) - - - def process_constraint_deletions(self): - for obj in self.rpdb.find_by_resource_bag(self.all_mentioned_resources): - self.add_para(obj, obj.resources - self.all_mentioned_resources) - - - def re_parent_tas(self): - for uri, key in self.tals.iteritems(): - for ta in self.rpdb.find_by_ski_or_uri(key.calculateSKI(), uri): - if ta.para_obj is None: - self.add_para(ta, ta.resources - self.all_mentioned_resources) - - - def add_para(self, obj, resources): - return self.rpdb.add_para( - obj = obj, - resources = resources, - serial = self.serial, - ltacer = self.ltacer, - ltasia = self.ltasia, - ltaaia = self.ltaaia, - ltamft = self.ltamft, - ltacrl = self.ltacrl, - ltakey = self.ltakey) - - - def generate_crl_and_manifest(self): - thisUpdate = rpki.sundial.now() - nextUpdate = thisUpdate + self.crl_delta - serial = self.serial() - issuer = self.ltacer.getSubject() - aki = buffer(self.ltacer.get_SKI()) - - crl = OutgoingCRL.generate( - keypair = self.ltakey, - issuer = self.ltacer, - serial = serial, - thisUpdate = thisUpdate, - nextUpdate = nextUpdate, - revokedCertificates = ()) - - issuer_id = self.rpdb.find_keyname(issuer, aki) - - self.rpdb.cur.execute("INSERT INTO outgoing (der, fn2, subject, issuer, uri) " - "VALUES (?, 'crl', NULL, ?, ?)", - (buffer(crl.get_DER()), issuer_id, self.ltacrl)) - crl = self.rpdb.find_outgoing_by_id(self.rpdb.cur.lastrowid) - - key = rpki.x509.RSA.generate(quiet = True) - - cer = self.ltacer.issue( - keypair = self.ltakey, - subject_key = key.get_RSApublic(), - serial = serial, - sia = (None, None, self.ltamft), - aia = self.ltaaia, - crldp = self.ltacrl, - resources = rpki.resource_set.resource_bag.from_inheritance(), - notAfter = self.ltacer.getNotAfter(), - is_ca = False) - - # Temporary kludge, need more general solution but that requires - # more refactoring than I feel like doing this late in the day. - # - names_and_objs = [(uri, OutgoingObject.create(fn2 = fn2, der = der, uri = uri, - rpdb = None, rowid = None, - subject_id = None, issuer_id = None)) - for fn2, der, uri in - self.rpdb.cur.execute("SELECT fn2, der, uri FROM outgoing WHERE issuer = ?", - (self.ltacer.rowid,))] - - mft = OutgoingSignedManifest.build( - serial = serial, - thisUpdate = thisUpdate, - nextUpdate = nextUpdate, - names_and_objs = names_and_objs, - keypair = key, - certs = cer) - - subject_id = self.rpdb.find_keyname(cer.getSubject(), cer.get_SKI()) - - self.rpdb.cur.execute("INSERT INTO outgoing (der, fn2, subject, issuer, uri, key) " - "VALUES (?, 'mft', ?, ?, ?, ?)", - (buffer(mft.get_DER()), subject_id, issuer_id, self.ltamft, buffer(key.get_DER()))) - - - @staticmethod - def parse_xki(s): - """ - Parse text form of an SKI or AKI. We accept two encodings: - colon-delimited hexadecimal, and URL-safe Base64. The former is - what OpenSSL prints in its text representation of SKI and AKI - extensions; the latter is the g(SKI) value that some RPKI CA engines - (including rpkid) use when constructing filenames. - - In either case, we check that the decoded result contains the right - number of octets to be a SHA-1 hash. - """ - - if ":" in s: - b = "".join(chr(int(c, 16)) for c in s.split(":")) - else: - b = base64.urlsafe_b64decode(s + ("=" * (4 - len(s) % 4))) - if len(b) != 20: - raise RuntimeError("Bad length for SHA1 xKI value: %r" % s) - return b - - - -class Serial(object): - - def __init__(self): - self.value = long(time.time()) << 32 - - def __call__(self): - self.value += 1 - return self.value - - -class ConstrainedObject(object): - # I keep expecting the classes derived from this to have some common - # methods, but so far it hasn't happened. Clean up eventually if not. - pass - -class ConstrainedROA(ConstrainedObject): - - def __init__(self, constraint, y): - self.constraint = constraint - self.asn = long(y["asn"]) if y is not None else None - self.maxlen = long(y["maxlen"]) if y is not None and "maxlen" in y else None - - def drop(self, candidates): - for candidate in candidates: - if isinstance(candidate, IncomingROA) and \ - self.constraint.mentioned_resources == candidate.resources and \ - (self.asn is None or self.asn == candidate.get_POW().getASID()): - print "Dropping ROA %r" % candidate - candidate.disposition = "delete" - - def add(self, candidates): - assert self.asn is not None - for candidate in candidates: - if isinstance(candidate, IncomingX509) and self.constraint.mentioned_resources <= candidate.resources: - print "Should add ROA %s %s\nunder candidate %s (depth %s resources %s)" % ( - self.asn, self.constraint.prefixes, candidate.subject_name, candidate.depth, candidate.resources) - break - -class ConstrainedGBR(ConstrainedObject): - - def __init__(self, constraint, y): - self.constraint = constraint - self.vcard = y - - def drop(self, candidates): - for candidate in candidates: - if isinstance(candidate, IncomingX509) and self.constraint.mentioned_resources == candidate.resources: - print "Dropping GBRs directly under %r" % candidate - for gbr in candidate.find_children("gbr"): - print "Dropping GBR %r" % gbr - gbr.disposition = "delete" - - def add(self, candidates): - assert self.vcard is not None - for candidate in candidates: - if isinstance(candidate, IncomingX509) and self.constraint.mentioned_resources <= candidate.resources: - print "Should add GBR\n%s\nunder candidate %s (depth %s resources %s)" % ( - "\n".join((" " * 4) + line for line in self.vcard.splitlines()), - candidate.subject_name, candidate.depth, candidate.resources) - break - -class ConstrainedRTR(ConstrainedObject): - - def __init__(self, constraint, y): - self.constraint = constraint - self.key = y["key"] if y is not None else None - self.subject = y["subject"] if y is not None else None - - def add(self, candidates): - raise NotImplementedError - - def drop(self, candidates): - for candidate in candidates: - if isinstance(candidate, IncomingX509) and not candidate.is_ca and \ - self.constraint.mentioned_resources == candidate.resources and \ - (self.subject is None or candidate.getSubject() == self.subject): - print "Dropping RTR certificate %r" % candidate - candidate.disposition = "delete" - -class Constraint(object): - - dispatch = dict(roa = ConstrainedROA, - gbr = ConstrainedGBR, - rtr = ConstrainedRTR) - - def __init__(self, y): - self.y = y # Mostly for debugging. I think. - self.prefixes = rpki.resource_set.resource_bag.from_str(str(y.get("prefix", ""))) - self.asns = rpki.resource_set.resource_bag.from_str(str(y.get("asn", ""))) - self.init_drops(y.get("drop", ())) - self.init_adds( y.get("add", ())) - - def init_drops(self, drops): - if drops == "all": - self.drops = tuple(d(self, None) for d in self.dispatch.itervalues()) - else: - dd = [] - for d in (drops if isinstance(drops, (list, tuple)) else [drops]): - if isinstance(d, str): - dd.append(self.dispatch[d[:-1]](self, None)) - elif isinstance(d, dict) and len(d) == 1: - dd.append(self.dispatch[d.keys()[0]](self, d.values()[0])) - else: - raise ValueError("Unexpected drop clause " + repr(drops)) - self.drops = tuple(dd) - - def init_adds(self, adds): - if not all(isinstance(a, dict) and len(a) == 1 for a in adds): - raise ValueError("Expected list of single-entry mappings, got " + repr(adds)) - self.adds = tuple(self.dispatch[a.keys()[0]](self, a.values()[0]) for a in adds) - - def drop(self, candidates): - for d in self.drops: - d.drop(candidates) - - def add(self, candidates): - for a in self.adds: - a.add(candidates) - - def __repr__(self): - return "<%s:%s %r>" % (self.__class__.__module__, self.__class__.__name__, self.y) - - @property - def mentioned_resources(self): - return self.prefixes | self.asns - - -class BaseObject(object): - """ - Mixin to add some SQL-related methods to classes derived from - rpki.x509.DER_object. - """ - - _rpdb = None - _rowid = None - _fn2 = None - _fn2map = None - _uri = None - _subject_id = None - _issuer_id = None - - @property - def rowid(self): - return self._rowid - - @property - def para_resources(self): - return self.resources if self.para_obj is None else self.para_obj.resources - - @property - def fn2(self): - return self._fn2 - - @property - def uri(self): - return self._uri - - @classmethod - def setfn2map(cls, **map): - cls._fn2map = map - for k, v in map.iteritems(): - v._fn2 = k - - @classmethod - def create(cls, rpdb, rowid, fn2, der, uri, subject_id, issuer_id): - self = cls._fn2map[fn2]() - if der is not None: - self.set(DER = der) - self._rpdb = rpdb - self._rowid = rowid - self._uri = uri - self._subject_id = subject_id - self._issuer_id = issuer_id - return self - - @property - def subject_id(self): - return self._subject_id - - @property - def subject_name(self): - return self._rpdb.find_keyname_by_id(self._subject_id)[0] - - @property - def issuer_id(self): - return self._issuer_id - - @property - def issuer_name(self): - return self._rpdb.find_keyname_by_id(self._subject_id)[0] - - -class IncomingObject(BaseObject): - - _depth = None - _is_ca = False - _disposition = None - - @property - def para_obj(self): - if getattr(self, "_para_id", None) is None: - self._rpdb.cur.execute("SELECT replacement FROM incoming WHERE id = ?", (self.rowid,)) - self._para_id = self._rpdb.cur.fetchone()[0] - return self._rpdb.find_outgoing_by_id(self._para_id) - - @para_obj.setter - def para_obj(self, value): - if value is None: - self._rpdb.cur.execute("DELETE FROM outgoing WHERE id IN (SELECT replacement FROM incoming WHERE id = ?)", - (self.rowid,)) - try: - del self._para_id - except AttributeError: - pass - else: - assert isinstance(value.rowid, int) - self._rpdb.cur.execute("UPDATE incoming SET replacement = ? WHERE id = ?", (value.rowid, self.rowid)) - self._para_id = value.rowid - - @property - def disposition(self): - if self._disposition is None: - self._disposition = self._rpdb.cur.execute("SELECT disposition FROM incoming " - "WHERE id = ?", (self.rowid,)).fetchone()[0] - return self._disposition - - @disposition.setter - def disposition(self, value): - self._rpdb.cur.execute("UPDATE incoming SET disposition = ? WHERE id = ?", (value, self.rowid)) - self._disposition = value - - @classmethod - def fromFile(cls, fn): - return cls._fn2map[os.path.splitext(fn)[1][1:]](DER_file = fn) - - @classmethod - def create(cls, rpdb, rowid, fn2, der, uri, subject_id, issuer_id, depth = None, is_ca = False): - assert der is not None - self = super(IncomingObject, cls).create(rpdb, rowid, fn2, der, uri, subject_id, issuer_id) - self._depth = depth - self._is_ca = is_ca - return self - - @property - def depth(self): - return self._depth - - @property - def is_ca(self): - return self._is_ca - - @property - def issuer(self): - if self._issuer_id is None or self._issuer_id == self._subject_id: - return None - return self._rpdb.find_incoming_by_id(self._issuer_id) - - -class OutgoingObject(BaseObject): - - @property - def orig_obj(self): - if getattr(self, "_orig_id", None) is None: - self._rpdb.cur.execute("SELECT id FROM incoming WHERE replacement = ?", (self.rowid,)) - r = self._rpdb.cur.fetchone() - self._orig_id = None if r is None else r[0] - return self._rpdb.find_incoming_by_id(self._orig_id) - - -class BaseX509(rpki.x509.X509): - - @property - def resources(self): - r = self.get_3779resources() - r.valid_until = None - return r - - def find_children(self, fn2 = None): - return self._rpdb._find_results(fn2, "WHERE issuer = ?", [self.subject_id]) - - -class BaseCRL(rpki.x509.CRL): - - @property - def resources(self): - return None - - -class CommonCMS(object): - - @property - def resources(self): - r = rpki.x509.X509(POW = self.get_POW().certs()[0]).get_3779resources() - r.valid_until = None - return r - - -class BaseSignedManifest (rpki.x509.SignedManifest, CommonCMS): pass -class BaseROA (rpki.x509.ROA, CommonCMS): pass -class BaseGhostbuster (rpki.x509.Ghostbuster, CommonCMS): pass - -class IncomingX509 (BaseX509, IncomingObject): pass -class IncomingCRL (BaseCRL, IncomingObject): pass -class IncomingSignedManifest (BaseSignedManifest, IncomingObject): pass -class IncomingROA (BaseROA, IncomingObject): pass -class IncomingGhostbuster (BaseGhostbuster, IncomingObject): pass - -class OutgoingX509 (BaseX509, OutgoingObject): pass -class OutgoingCRL (BaseCRL, OutgoingObject): pass -class OutgoingSignedManifest (BaseSignedManifest, OutgoingObject): pass -class OutgoingROA (BaseROA, OutgoingObject): pass -class OutgoingGhostbuster (BaseGhostbuster, OutgoingObject): pass - -IncomingObject.setfn2map(cer = IncomingX509, - crl = IncomingCRL, - mft = IncomingSignedManifest, - roa = IncomingROA, - gbr = IncomingGhostbuster) - -OutgoingObject.setfn2map(cer = OutgoingX509, - crl = OutgoingCRL, - mft = OutgoingSignedManifest, - roa = OutgoingROA, - gbr = OutgoingGhostbuster) - - -class RPDB(object): - """ - Relying party database. - """ - - def __init__(self, db_name): - - try: - os.unlink(db_name) - except: - pass - - self.db = sqlite3.connect(db_name, detect_types = sqlite3.PARSE_DECLTYPES) - self.db.text_factory = str - self.cur = self.db.cursor() - - self.incoming_cache = weakref.WeakValueDictionary() - self.outgoing_cache = weakref.WeakValueDictionary() - - self.cur.executescript(''' - PRAGMA foreign_keys = on; - - CREATE TABLE keyname ( - id INTEGER PRIMARY KEY NOT NULL, - name TEXT NOT NULL, - keyid BLOB NOT NULL, - UNIQUE (name, keyid)); - - CREATE TABLE incoming ( - id INTEGER PRIMARY KEY NOT NULL, - der BLOB NOT NULL, - fn2 TEXT NOT NULL - CHECK (fn2 IN ('cer', 'crl', 'mft', 'roa', 'gbr')), - uri TEXT NOT NULL, - depth INTEGER, - is_ca BOOLEAN NOT NULL DEFAULT 0, - disposition TEXT NOT NULL - DEFAULT 'keep' - CHECK (disposition IN ('keep', 'delete', 'replace')), - subject INTEGER - REFERENCES keyname(id) - ON DELETE RESTRICT - ON UPDATE RESTRICT, - issuer INTEGER NOT NULL - REFERENCES keyname(id) - ON DELETE RESTRICT - ON UPDATE RESTRICT, - replacement INTEGER - REFERENCES outgoing(id) - ON DELETE SET NULL - ON UPDATE SET NULL, - UNIQUE (der), - UNIQUE (subject, issuer), - CHECK ((subject IS NULL) == (fn2 == 'crl'))); - - CREATE TABLE outgoing ( - id INTEGER PRIMARY KEY NOT NULL, - der BLOB, - key BLOB, - fn2 TEXT NOT NULL - CHECK (fn2 IN ('cer', 'crl', 'mft', 'roa', 'gbr')), - uri TEXT NOT NULL, - subject INTEGER - REFERENCES keyname(id) - ON DELETE RESTRICT - ON UPDATE RESTRICT, - issuer INTEGER NOT NULL - REFERENCES keyname(id) - ON DELETE RESTRICT - ON UPDATE RESTRICT, - UNIQUE (subject, issuer), - CHECK ((key IS NULL) == (fn2 == 'crl')), - CHECK ((subject IS NULL) == (fn2 == 'crl'))); - - CREATE TABLE range ( - id INTEGER NOT NULL - REFERENCES incoming(id) - ON DELETE CASCADE - ON UPDATE CASCADE, - min RangeVal NOT NULL, - max RangeVal NOT NULL, - UNIQUE (id, min, max)); - - ''') - - - def load(self, rcynic_input, spinner = 100): - - start = rpki.sundial.now() - nobj = 0 - - for root, dirs, files in os.walk(rcynic_input): - for fn in files: - fn = os.path.join(root, fn) - - try: - obj = IncomingObject.fromFile(fn) - except: - if spinner: - sys.stderr.write("\r") - sys.stderr.write("Couldn't read %s, skipping\n" % fn) - continue - - if spinner and nobj % spinner == 0: - sys.stderr.write("\r%s %d %s..." % ("|\\-/"[(nobj/spinner) & 3], nobj, rpki.sundial.now() - start)) - - nobj += 1 - - if obj.fn2 == "crl": - ski = None - aki = buffer(obj.get_AKI()) - cer = None - bag = None - issuer = obj.getIssuer() - subject = None - is_ca = False - - else: - if obj.fn2 == "cer": - cer = obj - else: - cer = rpki.x509.X509(POW = obj.get_POW().certs()[0]) - issuer = cer.getIssuer() - subject = cer.getSubject() - ski = buffer(cer.get_SKI()) - aki = cer.get_AKI() - if aki is None: - assert subject == issuer - aki = ski - else: - aki = buffer(aki) - bag = cer.get_3779resources() - is_ca = cer.is_CA() - - der = buffer(obj.get_DER()) - uri = "rsync://" + fn[len(rcynic_input) + 1:] - - self.cur.execute("SELECT id FROM incoming WHERE der = ?", (der,)) - r = self.cur.fetchone() - - if r is not None: - rowid = r[0] - - else: - subject_id = None if ski is None else self.find_keyname(subject, ski) - issuer_id = self.find_keyname(issuer, aki) - - self.cur.execute("INSERT INTO incoming (der, fn2, subject, issuer, uri, is_ca) " - "VALUES (?, ?, ?, ?, ?, ?)", - (der, obj.fn2, subject_id, issuer_id, uri, is_ca)) - rowid = self.cur.lastrowid - - if bag is not None: - for rset in (bag.asn, bag.v4, bag.v6): - if rset is not None: - self.cur.executemany("REPLACE INTO range (id, min, max) VALUES (?, ?, ?)", - ((rowid, i.min, i.max) for i in rset)) - - if spinner: - sys.stderr.write("\r= %d objects in %s.\n" % (nobj, rpki.sundial.now() - start)) - - self.cur.execute("UPDATE incoming SET depth = 0 WHERE subject = issuer") - - for depth in xrange(1, 500): - - self.cur.execute("SELECT COUNT(*) FROM incoming WHERE depth IS NULL") - if self.cur.fetchone()[0] == 0: - break - - if spinner: - sys.stderr.write("\rSetting depth %d..." % depth) - - self.cur.execute(""" - UPDATE incoming SET depth = ? - WHERE depth IS NULL - AND issuer IN (SELECT subject FROM incoming WHERE depth = ?) - """, - (depth, depth - 1)) - - else: - if spinner: - sys.stderr.write("\rSetting depth %d is absurd, giving up, " % depth) - - if spinner: - sys.stderr.write("\nCommitting...") - - self.db.commit() - - if spinner: - sys.stderr.write("done.\n") - - - def add_para(self, obj, resources, serial, ltacer, ltasia, ltaaia, ltamft, ltacrl, ltakey): - - assert isinstance(obj, IncomingX509) - - if obj.para_obj is not None: - resources &= obj.para_obj.resources - - obj.para_obj = None - - if not resources: - return - - pow = obj.get_POW() - - x = rpki.POW.X509() - - x.setVersion( pow.getVersion()) - x.setSubject( pow.getSubject()) - x.setNotBefore( pow.getNotBefore()) - x.setNotAfter( pow.getNotAfter()) - x.setPublicKey( pow.getPublicKey()) - x.setSKI( pow.getSKI()) - x.setBasicConstraints( pow.getBasicConstraints()) - x.setKeyUsage( pow.getKeyUsage()) - x.setCertificatePolicies( pow.getCertificatePolicies()) - x.setSIA( *pow.getSIA()) - - x.setIssuer( ltacer.get_POW().getIssuer()) - x.setAKI( ltacer.get_POW().getSKI()) - x.setAIA( (ltaaia,)) - x.setCRLDP( (ltacrl,)) - - x.setSerial( serial()) - x.setRFC3779( - asn = ((r.min, r.max) for r in resources.asn), - ipv4 = ((r.min, r.max) for r in resources.v4), - ipv6 = ((r.min, r.max) for r in resources.v6)) - - x.sign(ltakey.get_POW(), rpki.POW.SHA256_DIGEST) - cer = OutgoingX509(POW = x) - - ski = buffer(cer.get_SKI()) - aki = buffer(cer.get_AKI()) - bag = cer.get_3779resources() - issuer = cer.getIssuer() - subject = cer.getSubject() - der = buffer(cer.get_DER()) - uri = ltasia + cer.gSKI() + ".cer" - - # This will want to change when we start generating replacement keys for everything. - # This should really be a keypair, not just a public key, same comment. - # - key = buffer(pow.getPublicKey().derWritePublic()) - - subject_id = self.find_keyname(subject, ski) - issuer_id = self.find_keyname(issuer, aki) - - self.cur.execute("INSERT INTO outgoing (der, fn2, subject, issuer, uri, key) " - "VALUES (?, 'cer', ?, ?, ?, ?)", - (der, subject_id, issuer_id, uri, key)) - rowid = self.cur.lastrowid - self.cur.execute("UPDATE incoming SET replacement = ? WHERE id = ?", - (rowid, obj.rowid)) - - # Fix up _orig_id and _para_id here? Maybe later. - - #self.db.commit() - - - def dump_paras(self, rcynic_output): - shutil.rmtree(rcynic_output, ignore_errors = True) - rsync = "rsync://" - for der, uri in self.cur.execute("SELECT der, uri FROM outgoing"): - assert uri.startswith(rsync) - fn = os.path.join(rcynic_output, uri[len(rsync):]) - dn = os.path.dirname(fn) - if not os.path.exists(dn): - os.makedirs(dn) - with open(fn, "wb") as f: - #print ">> Writing", f.name - f.write(der) - - - def find_keyname(self, name, keyid): - keys = (name, buffer(keyid)) - self.cur.execute("SELECT id FROM keyname WHERE name = ? AND keyid = ?", keys) - result = self.cur.fetchone() - if result is None: - self.cur.execute("INSERT INTO keyname (name, keyid) VALUES (?, ?)", keys) - result = self.cur.lastrowid - else: - result = result[0] - return result - - - def find_keyname_by_id(self, rowid): - self.cur.execute("SELECT name, keyid FROM keyname WHERE id = ?", (rowid,)) - result = self.cur.fetchone() - return (None, None) if result is None else result - - - def find_incoming_by_id(self, rowid): - if rowid is None: - return None - if rowid in self.incoming_cache: - return self.incoming_cache[rowid] - r = self._find_results(None, "WHERE id = ?", [rowid]) - assert len(r) < 2 - return r[0] if r else None - - - def find_outgoing_by_id(self, rowid): - if rowid is None: - return None - if rowid in self.outgoing_cache: - return self.outgoing_cache[rowid] - self.cur.execute("SELECT fn2, der, key, uri, subject, issuer FROM outgoing WHERE id = ?", (rowid,)) - r = self.cur.fetchone() - if r is None: - return None - fn2, der, key, uri, subject_id, issuer_id = r - obj = OutgoingObject.create(rpdb = self, rowid = rowid, fn2 = fn2, der = der, uri = uri, - subject_id = subject_id, issuer_id = issuer_id) - self.outgoing_cache[rowid] = obj - return obj - - - def find_by_ski_or_uri(self, ski, uri): - if not ski and not uri: - return [] - j = "" - w = [] - a = [] - if ski: - j = "JOIN keyname ON incoming.subject = keyname.id" - w.append("keyname.keyid = ?") - a.append(buffer(ski)) - if uri: - w.append("incoming.uri = ?") - a.append(uri) - return self._find_results(None, "%s WHERE %s" % (j, " AND ".join(w)), a) - - - # It's easiest to understand overlap conditions by understanding - # non-overlap then inverting and and applying De Morgan's law. - # Ranges A and B do not overlap if: A.min > B.max or B.min > A.max; - # therefore A and B do overlap if: A.min <= B.max and B.min <= A.max. - - def find_by_range(self, range_min, range_max = None, fn2 = None): - if range_max is None: - range_max = range_min - if isinstance(range_min, (str, unicode)): - range_min = long(range_min) if range_min.isdigit() else rpki.POW.IPAddress(range_min) - if isinstance(range_max, (str, unicode)): - range_max = long(range_max) if range_max.isdigit() else rpki.POW.IPAddress(range_max) - assert isinstance(range_min, (int, long, rpki.POW.IPAddress)) - assert isinstance(range_max, (int, long, rpki.POW.IPAddress)) - return self._find_results(fn2, - "JOIN range ON incoming.id = range.id " - "WHERE ? <= range.max AND ? >= range.min", - [range_min, range_max]) - - - def find_by_resource_bag(self, bag, fn2 = None): - assert bag.asn or bag.v4 or bag.v6 - qset = [] - aset = [] - for rset in (bag.asn, bag.v4, bag.v6): - if rset: - for r in rset: - qset.append("(? <= max AND ? >= min)") - aset.append(r.min) - aset.append(r.max) - return self._find_results( - fn2, - """ - JOIN range ON incoming.id = range.id - WHERE - """ + (" OR ".join(qset)), - aset) - - - def _find_results(self, fn2, query, args = None): - if args is None: - args = [] - if fn2 is not None: - query += " AND fn2 = ?" - args.append(fn2) - results = [] - for rowid, fn2, der, uri, subject_id, issuer_id, depth, is_ca in self.cur.execute( - ''' - SELECT DISTINCT - incoming.id, incoming.fn2, - incoming.der, incoming.uri, - incoming.subject, incoming.issuer, - incoming.depth, incoming.is_ca - FROM incoming - ''' + query, args): - if rowid in self.incoming_cache: - obj = self.incoming_cache[rowid] - assert obj.rowid == rowid - else: - obj = IncomingObject.create(rpdb = self, rowid = rowid, fn2 = fn2, der = der, uri = uri, - subject_id = subject_id, issuer_id = issuer_id, depth = depth, - is_ca = is_ca) - self.incoming_cache[rowid] = obj - results.append(obj) - return results - - - def commit(self): - self.db.commit() - - - def close(self): - self.commit() - self.cur.close() - self.db.close() - -if __name__ == "__main__": - #profile = None - profile = "rcynic-lta.prof" - if profile: - import cProfile - prof = cProfile.Profile() - try: - prof.runcall(main) - finally: - prof.dump_stats(profile) - sys.stderr.write("Dumped profile data to %s\n" % profile) - else: - main() - diff --git a/scripts/rcynic-lta.yaml b/scripts/rcynic-lta.yaml deleted file mode 100644 index ab17a56c..00000000 --- a/scripts/rcynic-lta.yaml +++ /dev/null @@ -1,69 +0,0 @@ -db-name: - /u/sra/rpki/subvert-rpki.hactrn.net/trunk/scripts/rcynic-lta.db - -rcynic-input: - /u/sra/rpki/subvert-rpki.hactrn.net/trunk/rcynic/rcynic-data/authenticated - -rcynic-output: - /u/sra/rpki/subvert-rpki.hactrn.net/trunk/rcynic/rcynic-data/lta-unauthenticated - -tal-directory: - /u/sra/rpki/subvert-rpki.hactrn.net/trunk/rcynic/sample-trust-anchors - -keyfile: - /u/sra/rpki/subvert-rpki.hactrn.net/trunk/scripts/rcynic-lta.key - -common: - - - &VCARD-1 | - BEGIN:VCARD - VERSION:4.0 - FN:R0 - ORG:Organizational Entity - ADR;TYPE=WORK:;;42 Twisty Passage;Deep Cavern;WA;98666;U.S.A. - TEL;TYPE=VOICE,TEXT,WORK;VALUE=uri:tel:+1-666-555-1212 - TEL;TYPE=FAX,WORK;VALUE=uri:tel:+1-666-555-1213 - EMAIL:human@example.com - END:VCARD - - &GBR-1 { gbr: *VCARD-1 } - - - &VCARD-2 | - BEGIN:VCARD - VERSION:4.0 - ORG:Epilogue Technology Corporation - EMAIL:sra@epilogue.com - END:VCARD - - &GBR-2 { gbr: *VCARD-2 } - - - &ROA-666 { roa: { asn: 666 } } - -constraints: - - # Need something for a drop test, sorry Randy - - prefix: 147.28.224.0/19 - drop: roas - - - prefix: 10.0.0.0/8 - add: [ { roa: { asn: 666, maxlen: 16 }}, *GBR-1 ] - - - prefix: 192.168.0.0/16 - drop: all - - - asn: 666 - add: [ *GBR-1 ] - - - prefix: 128.224.0.0/16 - drop: all - add: [ *GBR-2 ] - - - prefix: 128.224.1.0/24 - add: [ *GBR-2, *ROA-666 ] - - - prefix: 128.224.2.0/24 - add: [ *GBR-2, *ROA-666 ] - - - prefix: 149.20.0.0/16 - add: [ *ROA-666 ] - - - prefix: 2001:4f8:3:d::/64 - add: [ *ROA-666 ] diff --git a/scripts/repo0-testbed-daily b/scripts/repo0-testbed-daily deleted file mode 100755 index 576464c4..00000000 --- a/scripts/repo0-testbed-daily +++ /dev/null @@ -1,19 +0,0 @@ -#!/bin/sh - -# $Id: daily 602 2013-06-02 18:00:25Z sra $ - -home=/home/sra/rpki.testbed - -exec >>$home/logs/daily.log 2>&1 - -set -x - -date - -cd $home - -/usr/local/sbin/rpkic update_bpki - -/usr/local/bin/svn update -/usr/local/bin/svn add --force . -/usr/local/bin/svn ci --message 'Daily auto update' -/usr/local/bin/svn update diff --git a/scripts/repo0-testbed-monthly b/scripts/repo0-testbed-monthly deleted file mode 100755 index b7ada110..00000000 --- a/scripts/repo0-testbed-monthly +++ /dev/null @@ -1,22 +0,0 @@ -#!/bin/sh - -# $Id: monthly 602 2013-06-02 18:00:25Z sra $ - -home=/home/sra/rpki.testbed - -exec >>$home/logs/monthly.log 2>&1 - -set -x - -date - -cd $home - -for identity in iana afrinic apnic arin lacnic legacy ripe -do - /usr/local/sbin/rpkic -i $identity renew_all_children -done - -/usr/local/bin/svn update -/usr/local/bin/svn add --force . -/usr/local/bin/svn ci --message 'Monthly auto update' -/usr/local/bin/svn update diff --git a/scripts/repo0-testbed-weekly b/scripts/repo0-testbed-weekly deleted file mode 100755 index 6f1f8ead..00000000 --- a/scripts/repo0-testbed-weekly +++ /dev/null @@ -1,96 +0,0 @@ -#!/bin/sh - -# $Id: weekly 756 2013-11-21 22:54:28Z sra $ -# -# Run weekly periodic IR back-end tasks. - -home=/home/sra/rpki.testbed - -top=/home/sra/subvert-rpki.hactrn.net/trunk - -exec >>$home/logs/weekly.log 2>&1 -set -x -date - -export OPENSSL_CONF=/dev/null -for openssl in $top/openssl/openssl/apps/openssl /usr/local/bin/openssl -do - test -x $openssl && break -done - -## Download all input files. See the fetch script for the current -## list of files, but for a long time now it's been: -## -## http://www.iana.org/assignments/as-numbers/as-numbers.xml -## http://www.iana.org/assignments/ipv4-address-space/ipv4-address-space.xml -## http://www.iana.org/assignments/ipv6-unicast-address-assignments/ipv6-unicast-address-assignments.xml -## ftp://ftp.ripe.net/ripe/dbase/split/ripe.db.aut-num.gz -## ftp://ftp.ripe.net/ripe/dbase/split/ripe.db.inetnum.gz -## ftp://ftp.ripe.net/ripe/dbase/split/ripe.db.inet6num.gz -## ftp://ftp.ripe.net/pub/stats/ripencc/membership/alloclist.txt -## ftp://ftp.apnic.net/public/stats/apnic/delegated-apnic-extended-latest -## -## Along with an ARIN bulkwhois dump which we get under a research NDA -## and retrieve via a mechanism that I'm not going to describe here. - -/bin/sh -x $home/scripts/fetch - -## Process ARIN data first -- we need erx.csv, which comes from ARIN. - -cd $home/arin -/usr/local/bin/unzip -p arin_db.zip arin_db.xml | -/usr/local/bin/python $top/scripts/arin-to-csv.py -/usr/local/bin/python $top/scripts/translate-handles.py asns.csv prefixes.csv - -## Process IANA data, now that we have erx.csv. - -cd $home/iana -/usr/local/bin/python $top/scripts/iana-to-csv.py - -## Process APNIC data. - -cd $home/apnic -/usr/local/bin/python $top/scripts/apnic-to-csv.py -/usr/local/bin/python $top/scripts/translate-handles.py asns.csv prefixes.csv - -## Process RIPE data. RIPE's database is a horror, the less said -## about it the better. -## -## Somewhere along the line we seem to have stopped even trying to -## generate the ASN database for RIPE, not sure why. I've restored it -## here, guess we'll find out if there was a reason why we disabled it. - -cd $home/ripe -/usr/local/bin/python $top/scripts/ripe-asns-to-csv.py -/usr/bin/awk -f $top/scripts/ripe-prefixes-to-csv.awk alloclist.txt | -/bin/cat extra-prefixes.csv - | -/usr/bin/sort -uo prefixes.csv -/usr/local/bin/python $top/scripts/translate-handles.py asns.csv prefixes.csv -/usr/bin/sort -uo prefixes.csv prefixes.csv - -## Not yet doing anything for AfriNIC, LACNIC, or Legacy. - -## Generate root certificate. This is a lot simpler now that we're pretending to be the One True Root. - -cd $home/root -$openssl req -new -x509 -days 90 -set_serial $(/bin/date -u +%s) -config root.conf -out root.cer -key root.key -outform DER -/bin/cp -fp root.cer root.cer.dup && -/bin/mv -f root.cer.dup /home/pubd/publication/root.cer - -## Whack all the files into subversion. - -cd $home -/usr/local/bin/svn update -/usr/local/bin/svn add --force . -/usr/local/bin/svn ci --message 'Weekly auto update' -/usr/local/bin/svn update - -## Feed all the new data into the IRDB. - -for entity in iana afrinic apnic arin lacnic legacy ripe -do - for resource in asns prefixes - do - /bin/test -r $entity/$resource.csv && - /usr/local/sbin/rpkic --identity $entity load_$resource $entity/$resource.csv - done -done diff --git a/scripts/ripe-asns-to-csv.py b/scripts/ripe-asns-to-csv.py deleted file mode 100644 index 50251ce8..00000000 --- a/scripts/ripe-asns-to-csv.py +++ /dev/null @@ -1,108 +0,0 @@ -# $Id$ -# -# Copyright (C) 2009-2012 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -""" -Parse a WHOIS research dump and write out (just) the RPKI-relevant -fields in myrpki-format CSV syntax. - -NB: The input data for this script is publicly available via FTP, but -you'll have to fetch the data from RIPE yourself, and be sure to see -the terms and conditions referenced by the data file header comments. -""" - -import gzip -from rpki.csv_utils import csv_writer - -class Handle(dict): - - want_tags = () - - debug = False - - def set(self, tag, val): - if tag in self.want_tags: - self[tag] = "".join(val.split(" ")) - - def check(self): - for tag in self.want_tags: - if not tag in self: - return False - if self.debug: - self.log() - return True - - def __repr__(self): - return "<%s %s>" % (self.__class__.__name__, - " ".join("%s:%s" % (tag, self.get(tag, "?")) - for tag in self.want_tags)) - - def log(self): - print repr(self) - - def finish(self, ctx): - self.check() - -class aut_num(Handle): - want_tags = ("aut-num", "mnt-by", "as-name") - - def set(self, tag, val): - if tag == "aut-num" and val.startswith("AS"): - val = val[2:] - Handle.set(self, tag, val) - - def finish(self, ctx): - if self.check(): - ctx.asns.writerow((self["mnt-by"], self["aut-num"])) - -class main(object): - - types = dict((x.want_tags[0], x) for x in (aut_num,)) - - - def finish_statement(self, done): - if self.statement: - tag, sep, val = self.statement.partition(":") - assert sep, "Couldn't find separator in %r" % self.statement - tag = tag.strip().lower() - val = val.strip().upper() - if self.cur is None: - self.cur = self.types[tag]() if tag in self.types else False - if self.cur is not False: - self.cur.set(tag, val) - if done and self.cur: - self.cur.finish(self) - self.cur = None - - filenames = ("ripe.db.aut-num.gz",) - - def __init__(self): - self.asns = csv_writer("asns.csv") - for fn in self.filenames: - f = gzip.open(fn) - self.statement = "" - self.cur = None - for line in f: - line = line.expandtabs().partition("#")[0].rstrip("\n") - if line and not line[0].isalpha(): - self.statement += line[1:] if line[0] == "+" else line - else: - self.finish_statement(not line) - self.statement = line - self.finish_statement(True) - f.close() - self.asns.close() - -main() diff --git a/scripts/ripe-prefixes-to-csv.awk b/scripts/ripe-prefixes-to-csv.awk deleted file mode 100644 index 37327484..00000000 --- a/scripts/ripe-prefixes-to-csv.awk +++ /dev/null @@ -1,37 +0,0 @@ -#!/usr/bin/awk -f -# $Id$ - -# ftp -pa ftp://ftp.ripe.net/pub/stats/ripencc/membership/alloclist.txt - -function done() { - for (i = 1; i <= n_allocs; i++) - print handle "\t" alloc[i]; - n_allocs = 0; -} - -/^[a-z]/ { - done(); - handle = $0; - nr = NR; -} - -NR == nr + 1 { - name = $0; -} - -NR > nr + 2 && NF > 1 && $2 !~ /:/ { - split($2, a, "/"); - len = a[2]; - split(a[1], a, /[.]/); - for (i = length(a); i < 4; i++) - a[i+1] = 0; - alloc[++n_allocs] = sprintf("%d.%d.%d.%d/%d", a[1], a[2], a[3], a[4], len); -} - -NR > nr + 2 && NF > 1 && $2 ~ /:/ { - alloc[++n_allocs] = $2; -} - -END { - done(); -} diff --git a/scripts/ripe-to-csv.awk b/scripts/ripe-to-csv.awk deleted file mode 100644 index 5325574f..00000000 --- a/scripts/ripe-to-csv.awk +++ /dev/null @@ -1,124 +0,0 @@ -#!/usr/bin/awk -f - -# Parse a WHOIS research dump and write out (just) the RPKI-relevant -# fields in myrpki-format CSV syntax. -# -# Unfortunately, unlike the ARIN and APNIC databases, the RIPE database -# doesn't really have any useful concept of an organizational handle. -# More precisely, while it has handles out the wazoo, none of them are -# useful as a reliable grouping mechanism for tracking which set of -# resources are held by a particular organization. So, instead of being -# able to track all of an organization's resources with a single handle -# as we can in the ARIN and APNIC databases, the best we can do with the -# RIPE database is to track individual resources, each with its own -# resource handle. Well, for prefixes -- ASN entries behave more like -# in the ARIN and APNIC databases. -# -# This is an AWK script rather than a Python script because it is a -# fairly simple stream parser that has to process a ridiculous amount -# of text. AWK turns out to be significantly faster for this. -# -# NB: The input data for this script is publicly available via FTP, but -# you'll have to fetch the data from RIPE yourself, and be sure to see -# the terms and conditions referenced by the data file header comments. -# -# $Id$ -# -# Copyright (C) 2009-2010 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -# On input, ":" is the most useful delimiter -# On output, we want tab-delimited text. -BEGIN { - FS = "[ \t]*:"; - OFS = "\t"; -} - -# Clean up trailing whitespace. -{ - sub(/[ \t]+$/, ""); -} - -# Continuation line: strip comment, if any, then append value, if any, -# to what we had from previous line(s). -/^[^A-Z]/ { - sub(/[ \t]*#.*$/, ""); - if (NF) - val = val $0; - next; -} - -# Anything other than line continuation terminates the previous line, -# so if we were working on a line, we're done with it now, process it. -key { - do_line(); -} - -# Non-empty line and we have no tag, this must be start of a new block. -NF && !tag { - tag = $1; -} - -# One of the tags we care about, clean up and save the data. -/^(AS-NAME|AUT-NUM|INET6NUM|INETNUM|MNT-BY|NETNAME|STATUS):/ { - key = $1; - sub(/^[^ \t]+:/, ""); - sub(/[ \t]*#.*$/, ""); - val = $0; -} - -# Blank line and we have something, process it. -!NF && tag { - do_block(); -} - -# End of file, process final data, if any. -END { - do_line(); - do_block(); -} - -# Handle one line, after line icky RPSL continuation. -function do_line() { - gsub(/[ \t]/, "", val); - if (key && val) - tags[key] = val; - key = ""; - val = ""; -} - -# Dispatch to handle known block types, then clean up so we can start -# a new block. -function do_block() { - if (tag == "INETNUM" || tag == "INET6NUM") - do_prefix(); - else if (tag == "AUT-NUM") - do_asn(); - delete tags; - tag = ""; -} - -# Handle an AUT-NUM block: extract the ASN, use MNT-BY as the handle. -function do_asn() { - sub(/^AS/, "", tags[tag]); - if (tags["MNT-BY"] && tags[tag]) - print tags["MNT-BY"], tags[tag] >"asns.csv"; -} - -# Handle an INETNUM or INET6NUM block: check for the status values we -# care about, use NETNAME as the handle. -function do_prefix() { - if (tags["STATUS"] ~ /^ASSIGNED(P[AI])$/ && tags["NETNAME"] && tags[tag]) - print tags["NETNAME"], tags[tag] >"prefixes.csv"; -} diff --git a/scripts/ripe-to-csv.py b/scripts/ripe-to-csv.py deleted file mode 100644 index b864345b..00000000 --- a/scripts/ripe-to-csv.py +++ /dev/null @@ -1,138 +0,0 @@ -# $Id$ -# -# Copyright (C) 2009-2012 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -""" -Parse a WHOIS research dump and write out (just) the RPKI-relevant -fields in myrpki-format CSV syntax. - -Unfortunately, unlike the ARIN and APNIC databases, the RIPE database -doesn't really have any useful concept of an organizational handle. -More precisely, while it has handles out the wazoo, none of them are -useful as a reliable grouping mechanism for tracking which set of -resources are held by a particular organization. So, instead of being -able to track all of an organization's resources with a single handle -as we can in the ARIN and APNIC databases, the best we can do with the -RIPE database is to track individual resources, each with its own -resource handle. Well, for prefixes -- ASN entries behave more like -in the ARIN and APNIC databases. - -Feh. - -NB: The input data for this script is publicly available via FTP, but -you'll have to fetch the data from RIPE yourself, and be sure to see -the terms and conditions referenced by the data file header comments. -""" - -import gzip -from rpki.csv_utils import csv_writer - -class Handle(dict): - - want_tags = () - - want_status = ("ASSIGNED", "ASSIGNEDPA", "ASSIGNEDPI") - - debug = False - - def set(self, tag, val): - if tag in self.want_tags: - self[tag] = "".join(val.split(" ")) - - def check(self): - for tag in self.want_tags: - if not tag in self: - return False - if self.debug: - self.log() - return True - - def __repr__(self): - return "<%s %s>" % (self.__class__.__name__, - " ".join("%s:%s" % (tag, self.get(tag, "?")) - for tag in self.want_tags)) - - def log(self): - print repr(self) - - def finish(self, ctx): - self.check() - -class aut_num(Handle): - want_tags = ("aut-num", "mnt-by") # "as-name" - - def set(self, tag, val): - if tag == "aut-num" and val.startswith("AS"): - val = val[2:] - Handle.set(self, tag, val) - - def finish(self, ctx): - if self.check(): - ctx.asns.writerow((self["mnt-by"], self["aut-num"])) - -class inetnum(Handle): - want_tags = ("inetnum", "netname", "status") # "mnt-by" - - def finish(self, ctx): - if self.check() and self["status"] in self.want_status: - ctx.prefixes.writerow((self["netname"], self["inetnum"])) - -class inet6num(Handle): - want_tags = ("inet6num", "netname", "status") # "mnt-by" - - def finish(self, ctx): - if self.check() and self["status"] in self.want_status: - ctx.prefixes.writerow((self["netname"], self["inet6num"])) - -class main(object): - - types = dict((x.want_tags[0], x) for x in (aut_num, inetnum, inet6num)) - - def finish_statement(self, done): - if self.statement: - tag, sep, val = self.statement.partition(":") - assert sep, "Couldn't find separator in %r" % self.statement - tag = tag.strip().lower() - val = val.strip().upper() - if self.cur is None: - self.cur = self.types[tag]() if tag in self.types else False - if self.cur is not False: - self.cur.set(tag, val) - if done and self.cur: - self.cur.finish(self) - self.cur = None - - filenames = ("ripe.db.aut-num.gz", "ripe.db.inet6num.gz", "ripe.db.inetnum.gz") - - def __init__(self): - self.asns = csv_writer("asns.csv") - self.prefixes = csv_writer("prefixes.csv") - for fn in self.filenames: - f = gzip.open(fn) - self.statement = "" - self.cur = None - for line in f: - line = line.expandtabs().partition("#")[0].rstrip("\n") - if line and not line[0].isalpha(): - self.statement += line[1:] if line[0] == "+" else line - else: - self.finish_statement(not line) - self.statement = line - self.finish_statement(True) - f.close() - self.asns.close() - self.prefixes.close() - -main() diff --git a/scripts/roa-to-irr.py b/scripts/roa-to-irr.py deleted file mode 100644 index 01b2aac8..00000000 --- a/scripts/roa-to-irr.py +++ /dev/null @@ -1,159 +0,0 @@ -# $Id$ -# -# Copyright (C) 2014 Dragon Research Labs ("DRL") -# Portions copyright (C) 2010--2012 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notices and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND DRL AND ISC DISCLAIM ALL -# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED -# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL OR -# ISC BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL -# DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA -# OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER -# TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -""" -Generate IRR route and route6 objects from ROAs. -""" - -import os -import socket -import sys -import argparse -import errno -import time -import rpki.x509 - -args = None - -class route(object): - """ - Interesting parts of a route object. - """ - - def __init__(self, label, uri, asnum, date, prefix, prefixlen, max_prefixlen): - self.label = label - self.uri = uri - self.asn = asnum - self.date = date - self.prefix = prefix - self.prefixlen = prefixlen - self.max_prefixlen = self.prefixlen if max_prefixlen is None else max_prefixlen - - def __cmp__(self, other): - result = cmp(self.asn, other.asn) - if result == 0: - result = cmp(self.prefix, other.prefix) - if result == 0: - result = cmp(self.prefixlen, other.prefixlen) - if result == 0: - result = cmp(self.max_prefixlen, other.max_prefixlen) - if result == 0: - result = cmp(self.date, other.date) - return result - - def __str__(self): - lines = ( - "%-14s%s/%s" % (self.label, self.prefix, self.prefixlen), - "descr: %s/%s-%s" % (self.prefix, self.prefixlen, self.max_prefixlen), - "origin: AS%d" % self.asn, - "notify: %s" % args.notify, - "mnt-by: %s" % args.mnt_by, - "changed: %s %s" % (args.changed_by, self.date), - "source: %s" % args.source, - "override: %s" % args.password if args.password is not None else None, - "") - return "\n".join(line for line in lines if line is not None) - - def write(self, output_directory): - name = "%s-%s-%s-AS%d-%s" % (self.prefix, self.prefixlen, self.max_prefixlen, self.asn, self.date) - with open(os.path.join(output_directory, name), "w") as f: - f.write(str(self)) - - -class route_list(list): - """ - A list of route objects. - """ - - def __init__(self, rcynic_dir): - for root, dirs, files in os.walk(rcynic_dir): - for f in files: - if f.endswith(".roa"): - path = os.path.join(root, f) - uri = "rsync://" + path[len(rcynic_dir):].lstrip("/") - roa = rpki.x509.ROA(DER_file = path) - roa.extract() - assert roa.get_POW().getVersion() == 0, "ROA version is %d, expected 0" % roa.get_POW().getVersion() - asnum = roa.get_POW().getASID() - notBefore = roa.get_POW().certs()[0].getNotBefore().strftime("%Y%m%d") - v4, v6 = roa.get_POW().getPrefixes() - if v4 is not None: - for prefix, prefixlen, max_prefixlen in v4: - self.append(route("route:", uri, asnum, notBefore, prefix, prefixlen, max_prefixlen)) - if v6 is not None: - for prefix, prefixlen, max_prefixlen in v6: - self.append(route("route6:", uri, asnum, notBefore, prefix, prefixlen, max_prefixlen)) - self.sort() - for i in xrange(len(self) - 2, -1, -1): - if self[i] == self[i + 1]: - del self[i + 1] - -def email_header(f): - if args.email: - f.write("\n".join(( - "From %s" % args.email_from, - "Date: %s" % time.strftime("%d %b %Y %T %z"), - "From: %s" % args.email_from, - "Subject: Fake email header to make irr_rpsl_submit happy", - "Message-Id: <%s.%s@%s>" % (os.getpid(), time.time(), socket.gethostname()), - "", ""))) - -def main(): - - global args - whoami = "%s@%s" % (os.getlogin(), socket.gethostname()) - - parser = argparse.ArgumentParser(description = __doc__) - parser.add_argument("-c", "--changed_by", default = whoami, - help = "override \"changed:\" value") - parser.add_argument("-f", "--from", dest = "email_from", default = whoami, - help = "override \"from:\" header when using --email") - parser.add_argument("-m", "--mnt_by", default = "MAINT-RPKI", - help = "override \"mnt-by:\" value") - parser.add_argument("-n", "--notify", default = whoami, - help = "override \"notify:\" value") - parser.add_argument("-p", "--password", - help = "specify \"override:\" password") - parser.add_argument("-s", "--source", default = "RPKI", - help = "override \"source:\" value") - group = parser.add_mutually_exclusive_group() - group.add_argument("-e", "--email", action = "store_true", - help = "generate fake RFC 822 header suitable for piping to irr_rpsl_submit") - group.add_argument("-o", "--output", - help = "write route and route6 objects to directory OUTPUT, one object per file") - parser.add_argument("authenticated_directory", - help = "directory tree containing authenticated rcynic output") - args = parser.parse_args() - - if not os.path.isdir(args.authenticated_directory): - sys.exit("\"%s\" is not a directory" % args.authenticated_directory) - - routes = route_list(args.authenticated_directory) - - if args.output: - if not os.path.isdir(args.output): - os.makedirs(args.output) - for r in routes: - r.write(args.output) - else: - email_header(sys.stdout) - for r in routes: - sys.stdout.write("%s\n" % r) - -if __name__ == "__main__": - main() diff --git a/scripts/rp-sqlite b/scripts/rp-sqlite deleted file mode 100755 index ee43096d..00000000 --- a/scripts/rp-sqlite +++ /dev/null @@ -1,425 +0,0 @@ -#!/usr/local/bin/python - -# $Id$ - -# Copyright (C) 2013 Dragon Research Labs ("DRL") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND DRL DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL DRL BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -# Preliminary script to work out what's involved in building an -# SQLite3 database of RP objects. We haven't bothered with this until -# now in rcynic, because we mostly just walk the filesystem tree, but -# LTA and some of the ideas Tim is playing with require a lot of -# lookups based on things that are not the URIs we use as filenames, -# so some kind of indexing may become necessary. Given the complexity -# of building any kind of real index over RFC 3779 resources, -# otherwise fine lightweight tools like the Python shelve library -# probably won't cut it here, and I don't want to add a dependency on -# MySQL on the RP side (yet?), so let's see what we can do with SQLite3. - -import os -import sys -import yaml -import base64 -import sqlite3 -import weakref -import rpki.POW -import rpki.x509 -import rpki.resource_set - -sqlite3.register_adapter(rpki.POW.IPAddress, - lambda x: buffer("_" + x.toBytes())) - -sqlite3.register_converter("RangeVal", - lambda s: long(s) if s.isdigit() else rpki.POW.IPAddress.fromBytes(s[1:])) - - -def main(): - rpdb = RPDB() - rpdb.load() - test(rpdb) - rpdb.close() - -def test(rpdb): - fn2s = [None] + rpdb.fn2map.keys() - - print - print "Testing YAML parsing" - parse_yaml(rpdb) - - print - print "Looking for certificates without AKI" - for r in rpdb.find_by_aki(None, "cer"): - print r, r.uris - print - print "Testing range functions" - for fn2 in fn2s: - if fn2 is not None: - print - print "Restricting search to type", fn2 - print - print "Looking for range that should include adrilankha and psg again" - for r in rpdb.find_by_range("147.28.0.19", "147.28.0.62", fn2): - print r, r.uris - print - print "Looking for range that should include adrilankha" - for r in rpdb.find_by_range("147.28.0.19", "147.28.0.19", fn2): - print r, r.uris - print - print "Looking for range that should include ASN 3130" - for r in rpdb.find_by_range(3130, 3130, fn2): - print r, r.uris - print - print "Moving on to resource sets" - for fn2 in fn2s: - if fn2 is not None: - print - print "Restricting search to type", fn2 - for expr in ("147.28.0.19-147.28.0.62", - "3130", - "2001:418:1::19/128", - "147.28.0.19-147.28.0.62,198.180.150.50/32", - "3130,147.28.0.19-147.28.0.62,198.180.150.50/32", - "2001:418:1::62/128,198.180.150.50/32,2001:418:8006::50/128", - "147.28.0.19-147.28.0.62,2001:418:1::19/128,2001:418:1::62/128,198.180.150.50/32,2001:418:8006::50/128"): - print - print "Trying", expr - for r in rpdb.find_by_resource_bag(rpki.resource_set.resource_bag.from_str(expr), fn2): - print r, r.uris - - -def parse_xki(s): - """ - Parse text form of an SKI or AKI. We accept two encodings: - colon-delimited hexadecimal, and URL-safe Base64. The former is - what OpenSSL prints in its text representation of SKI and AKI - extensions; the latter is the g(SKI) value that some RPKI CA engines - (including rpkid) use when constructing filenames. - - In either case, we check that the decoded result contains the right - number of octets to be a SHA-1 hash. - """ - - if ":" in s: - b = "".join(chr(int(c, 16)) for c in s.split(":")) - else: - b = base64.urlsafe_b64decode(s + ("=" * (4 - len(s) % 4))) - if len(b) != 20: - raise RuntimeError("Bad length for SHA1 xKI value: %r" % s) - return b - - -def parse_yaml(rpdb, fn = "rp-sqlite.yaml"): - yy = yaml.safe_load(open(fn, "r")) - for y in yy: - - ski = None - uri = None - obj = set() - - print - - if "ski" in y: - ski = parse_xki(y["ski"]) - obj.update(rpdb.find_by_ski(ski)) - if "uri" in y: - uri = y["uri"] - obj.update(rpdb.find_by_uri(uri)) - if len(obj) == 1: - obj = obj.pop() - else: - raise RuntimeError("Constraint entry must name a unique object using SKI, URI, or both (%r, %r, %r)" % ( - ski, uri, obj)) - - print "URI:", uri - print "SKI:", " ".join("%02X" % ord(c) for c in ski), "(" + y["ski"] + ")" - - new_resources = old_resources = obj.get_3779resources() - - if "set" in y: - new_resources = rpki.resource_set.resource_bag.from_str(y["set"]) - - if "add" in y: - new_resources = new_resources | rpki.resource_set.resource_bag.from_str(y["add"]) - - if "sub" in y: - new_resources = new_resources - rpki.resource_set.resource_bag.from_str(y["sub"]) - - if new_resources == old_resources: - print "No resource change, skipping" - continue - - print "Old:", old_resources - print "New:", new_resources - print "Add:", new_resources - old_resources - print "Sub:", old_resources - new_resources - - # See draft-ietf-sidr-ltamgmt-08.txt for real processing details, but overview: - # - # - Process constraints file as above to determine list of target - # certificates (2.1). May need to add more fields to YAML hash - # for things like CP, CRLDP, etc, although I'm not entirely sure - # yet which of those it really makes sense to tweak via - # constraints. - # - # - Use resources from selected target certificates to determine - # which additional certificates we need to reissue to remove those - # resources (2.2, "perforation"). In theory we already have SQL - # that will just locate all of these for us. - # - # - Figure out which trust anchors to process (2.3, TA - # re-parenting); we can look in SQL for NULL AKI, but that's just - # a hint, we either have to verify that rcynic accepted those TAs - # or we have to look at the TALs. Looking at TALs is probably - # easier. - # - # At some point we probably need to parse the constraints file into - # Constraints objects or something like that, except that we may - # really need something more general that will accomodate - # perforation and TA reparenting as well. Figure out and refactor - # as we go along, most likely. - - -class RPDB(object): - """ - Relying party database. - - For now just wire in the database name and rcynic root, fix this - later if overall approach seems usable. Might even end up just - being an in-memory SQL database, who knows? - """ - - fn2map = dict(cer = rpki.x509.X509, - crl = rpki.x509.CRL, - mft = rpki.x509.SignedManifest, - roa = rpki.x509.ROA, - gbr = rpki.x509.Ghostbuster) - - def __init__(self, db_name = "rp-sqlite.db", delete_old_db = True): - - if delete_old_db: - try: - os.unlink(db_name) - except: - pass - - exists = os.path.exists(db_name) - - self.db = sqlite3.connect(db_name, detect_types = sqlite3.PARSE_DECLTYPES) - self.db.text_factory = str - self.cur = self.db.cursor() - - self.cache = weakref.WeakValueDictionary() - - if exists: - return - - self.cur.executescript(''' - PRAGMA foreign_keys = on; - - CREATE TABLE object ( - id INTEGER PRIMARY KEY NOT NULL, - der BLOB NOT NULL, - fn2 TEXT NOT NULL, - ski BLOB, - aki BLOB, - inherits BOOLEAN NOT NULL, - UNIQUE (der)); - - CREATE TABLE uri ( - id INTEGER NOT NULL, - uri TEXT NOT NULL, - UNIQUE (uri), - FOREIGN KEY (id) REFERENCES object(id) - ON DELETE CASCADE - ON UPDATE CASCADE); - - CREATE INDEX uri_index ON uri(id); - - CREATE TABLE range ( - id INTEGER NOT NULL, - min RangeVal NOT NULL, - max RangeVal NOT NULL, - UNIQUE (id, min, max), - FOREIGN KEY (id) REFERENCES object(id) - ON DELETE CASCADE - ON UPDATE CASCADE); - - CREATE INDEX range_index ON range(min, max); - ''') - - def load(self, - rcynic_root = os.path.expanduser("~/rpki/subvert-rpki.hactrn.net/trunk/" - "rcynic/rcynic-data/unauthenticated"), - spinner = 100): - - nobj = 0 - - for root, dirs, files in os.walk(rcynic_root): - for fn in files: - fn = os.path.join(root, fn) - fn2 = os.path.splitext(fn)[1][1:] - - try: - obj = self.fn2map[fn2](DER_file = fn) - except: - continue - - if spinner and nobj % spinner == 0: - sys.stderr.write("\r%s %d..." % ("|\\-/"[(nobj/spinner) & 3], nobj)) - - nobj += 1 - - inherits = False - - if fn2 == "crl": - ski = None - aki = buffer(obj.get_AKI()) - cer = None - bag = None - - else: - if fn2 == "cer": - cer = obj - else: - cer = rpki.x509.X509(POW = obj.get_POW().certs()[0]) - ski = buffer(cer.get_SKI()) - try: - aki = buffer(cer.get_AKI()) - except: - aki = None - bag = cer.get_3779resources() - inherits = bag.asn.inherit or bag.v4.inherit or bag.v6.inherit - - der = buffer(obj.get_DER()) - uri = "rsync://" + fn[len(rcynic_root) + 1:] - - try: - self.cur.execute("INSERT INTO object (der, fn2, ski, aki, inherits) VALUES (?, ?, ?, ?, ?)", - (der, fn2, ski, aki, inherits)) - rowid = self.cur.lastrowid - - except sqlite3.IntegrityError: - self.cur.execute("SELECT id FROM object WHERE der = ? AND fn2 = ?", (der, fn2)) - rows = self.cur.fetchall() - rowid = rows[0][0] - assert len(rows) == 1 - - else: - if bag is not None: - for rset in (bag.asn, bag.v4, bag.v6): - if rset is not None: - self.cur.executemany("REPLACE INTO range (id, min, max) VALUES (?, ?, ?)", - ((rowid, i.min, i.max) for i in rset)) - - self.cur.execute("INSERT INTO uri (id, uri) VALUES (?, ?)", - (rowid, uri)) - - if spinner: - sys.stderr.write("\r= %d objects, committing..." % nobj) - - self.db.commit() - - if spinner: - sys.stderr.write("done.\n") - - - def find_by_ski(self, ski, fn2 = None): - if ski is None: - return self._find_results(fn2, "SELECT id, fn2, der FROM object WHERE ski IS NULL") - else: - return self._find_results(fn2, "SELECT id, fn2, der FROM object WHERE ski = ?", [buffer(ski)]) - - def find_by_aki(self, aki, fn2 = None): - if aki is None: - return self._find_results(fn2, "SELECT id, fn2, der FROM object WHERE aki IS NULL") - else: - return self._find_results(fn2, "SELECT id, fn2, der FROM object WHERE aki = ?", [buffer(aki)]) - - def find_by_uri(self, uri): - return self._find_results(None, "SELECT object.id, fn2, der FROM object, uri WHERE uri.uri = ? AND object.id = uri.id", [uri]) - - - # It's easiest to understand overlap conditions by understanding - # non-overlap then inverting and and applying De Morgan's law. Ranges - # A and B do not overlap if either A.min > B.max or A.max < B.min; - # therefore they do overlap if A.min <= B.max and A.max >= B.min. - - def find_by_range(self, range_min, range_max = None, fn2 = None): - if range_max is None: - range_max = range_min - if isinstance(range_min, (str, unicode)): - range_min = long(range_min) if range_min.isdigit() else rpki.POW.IPAddress(range_min) - if isinstance(range_max, (str, unicode)): - range_max = long(range_max) if range_max.isdigit() else rpki.POW.IPAddress(range_max) - assert isinstance(range_min, (int, long, rpki.POW.IPAddress)) - assert isinstance(range_max, (int, long, rpki.POW.IPAddress)) - return self._find_results( - fn2, - """ - SELECT object.id, fn2, der FROM object, range - WHERE ? <= max AND ? >= min AND object.id = range.id - """, - [range_min, range_max]) - - - def find_by_resource_bag(self, bag, fn2 = None): - assert bag.asn or bag.v4 or bag.v6 - qset = [] - aset = [] - for rset in (bag.asn, bag.v4, bag.v6): - if rset: - for r in rset: - qset.append("(? <= max AND ? >= min)") - aset.append(r.min) - aset.append(r.max) - return self._find_results( - fn2, - """ - SELECT object.id, fn2, der FROM object, range - WHERE object.id = range.id AND (%s) - """ % (" OR ".join(qset)), - aset) - - - def _find_results(self, fn2, query, args = None): - if args is None: - args = [] - if fn2 is not None: - assert fn2 in self.fn2map - query += " AND fn2 = ?" - args.append(fn2) - query += " GROUP BY object.id" - results = [] - self.cur.execute(query, args) - selections = self.cur.fetchall() - for rowid, fn2, der in selections: - if rowid in self.cache: - obj = self.cache[rowid] - else: - obj = self.fn2map[fn2](DER = der) - self.cur.execute("SELECT uri FROM uri WHERE id = ?", (rowid,)) - obj.uris = [u[0] for u in self.cur.fetchall()] - obj.uri = obj.uris[0] if len(obj.uris) == 1 else None - self.cache[rowid] = obj - results.append(obj) - return results - - - def close(self): - self.cur.close() - self.db.close() - - -if __name__ == "__main__": - main() diff --git a/scripts/rp-sqlite.yaml b/scripts/rp-sqlite.yaml deleted file mode 100644 index 4c0a0b8d..00000000 --- a/scripts/rp-sqlite.yaml +++ /dev/null @@ -1,53 +0,0 @@ -- ski: B8:14:5D:13:53:7D:AE:6E:E2:E3:95:84:A8:99:EB:7D:1A:7D:E5:DF - uri: rsync://repo0.rpki.net/rpki/root.cer - add: 10.0.0.44/32 - -- ski: A2:B3:2A:99:20:07:7A:E9:A4:9F:3F:02:F2:32:F9:3D:54:F8:7E:78 - uri: rsync://repo0.rpki.net/rpki/root/iana.cer - sub: 10.0.0.0/8 - -- ski: 3NYgwt_HYV91MeCGLWdUL4uq65M - uri: rsync://repo0.rpki.net/rpki/root/iana/5/3NYgwt_HYV91MeCGLWdUL4uq65M.cer - add: 10.0.0.0/8 - -- ski: 3YFTaQOUkPQfhckX_ikYzy0mR9s - uri: rsync://repo0.rpki.net/rpki/root/iana/5/3YFTaQOUkPQfhckX_ikYzy0mR9s.cer - -- ski: 7ew1d5WFCSfhd8lnpmjwOohS_DQ - uri: rsync://repo0.rpki.net/rpki/root/iana/5/7ew1d5WFCSfhd8lnpmjwOohS_DQ.cer - -- ski: PWxDsvUgDdeloE3eQfceV8vbIyg - uri: rsync://repo0.rpki.net/rpki/root/iana/5/PWxDsvUgDdeloE3eQfceV8vbIyg.cer - -- ski: WnOFfpqobEKxzmvddJue3PXsEIQ - uri: rsync://repo0.rpki.net/rpki/root/iana/5/WnOFfpqobEKxzmvddJue3PXsEIQ.cer - -- ski: m6TQTKwKYyVva-Yq__I-Zz25Vns - uri: rsync://repo0.rpki.net/rpki/root/iana/5/m6TQTKwKYyVva-Yq__I-Zz25Vns.cer - -- ski: 8U5wQ47aZuuOXYPGX5BPvlcTfNg - uri: rsync://repo0.rpki.net/rpki/root/iana/ripe/8/8U5wQ47aZuuOXYPGX5BPvlcTfNg.cer - -- ski: bccxGl4Xl4ur3nd1fwQ-1QIwtNA - uri: rsync://repo0.rpki.net/rpki/root/iana/ripe/8/bccxGl4Xl4ur3nd1fwQ-1QIwtNA.cer - -- ski: zbXifbEkZNmOVhYZTCZFfLPxhjM - uri: rsync://repo0.rpki.net/rpki/root/iana/ripe/8/zbXifbEkZNmOVhYZTCZFfLPxhjM.cer - -- ski: XYjTToHopYme-hlwgUyUyYRD_VQ - uri: rsync://repo0.rpki.net/rpki/root/iana/arin/6/XYjTToHopYme-hlwgUyUyYRD_VQ.cer - -- ski: _3I3i3uVmnliCinb2STR2MaxuU8 - uri: rsync://repo0.rpki.net/rpki/root/iana/arin/6/_3I3i3uVmnliCinb2STR2MaxuU8.cer - -- ski: qdtoqOMCNSOdYuNNC7ya3dUaPl4 - uri: rsync://repo0.rpki.net/rpki/root/iana/arin/6/qdtoqOMCNSOdYuNNC7ya3dUaPl4.cer - -- ski: yq4x9MteoM0DQYTh6NLbbmf--QY - uri: rsync://repo0.rpki.net/rpki/root/iana/arin/6/yq4x9MteoM0DQYTh6NLbbmf--QY.cer - -- ski: j2TDGclJnZ7mXpZCQJS0cfkOL34 - uri: rsync://repo0.rpki.net/rpki/root/iana/apnic/7/j2TDGclJnZ7mXpZCQJS0cfkOL34.cer - -- ski: yRk89OOx4yyJHJ6Z1JLLnk0_oDc - uri: rsync://repo0.rpki.net/rpki/root/iana/apnic/7/yRk89OOx4yyJHJ6Z1JLLnk0_oDc.cer diff --git a/scripts/rpki b/scripts/rpki deleted file mode 120000 index 168548eb..00000000 --- a/scripts/rpki +++ /dev/null @@ -1 +0,0 @@ -../rpkid/rpki \ No newline at end of file diff --git a/scripts/rpkidemo b/scripts/rpkidemo deleted file mode 100755 index fdb4e1bb..00000000 --- a/scripts/rpkidemo +++ /dev/null @@ -1,495 +0,0 @@ -#!/usr/bin/env python - -""" -Hosted GUI client startup script, for workshops, etc. - -As of when this is run, we assume that the tarball (contents TBD and -perhaps changing from one workshop to another) have been unpacked, -that we are on some Unix-like machine, and that we are executing in -a Python interpreter. We have to check anything else we care about. - -In what we hope is the most common case, this script should be run -with no options. - -$Id$ - -Copyright (C) 2010 Internet Systems Consortium ("ISC") - -Permission to use, copy, modify, and distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -PERFORMANCE OF THIS SOFTWARE. -""" - -# Check Python version before doing anything else - -import sys - -python_version = sys.version_info[:2] - -have_ssl_module = python_version >= (2, 6) - -if python_version == (2, 5): - print """ - WARNING WARNING WARNING - - You are running Python version 2.5, which does not include - real SSL support. This means that sessions created by this - script will be vulnerable to monkey-in-the-middle attacks. - - Python 2.6 does not have this problem. - """ - while True: - answer = raw_input("Are you SURE you want to proceed? (yes/NO) ").strip().lower() - if answer in ("", "n", "no"): - sys.exit("You have chosen wisely") - elif answer in ("y", "yes"): - print "You have been warned" - break - else: - print 'Please answer "yes" or "no"' - -elif have_ssl_module: - try: - import ssl - except ImportError: - sys.exit("You're running Python 2.6+, but I can't find the ssl module, so you have no SSL support at all, argh!") - -else: - sys.exit("Sorry, this script requires Python 2.6+, I seem to be running in %s" % sys.version) - -# Ok, it's safe to import the other stuff we need now - -import os, subprocess, webbrowser, urllib2, getpass, re, errno, time, email.utils, httplib, socket, getopt, urllib, cookielib -import tempfile -from xml.etree.ElementTree import fromstring as ElementFromString - -def save(filename, data): - """ - Save data to a file. - """ - - tempname = "%s.%d.tmp" % (filename, os.getpid()) - f = open(tempname, "w") - f.write(data) - f.close() - os.rename(tempname, filename) - -def save_error(err): - """ - Save the data from the file-like object "f" into a temporary file - and open a web browser to view the result. - """ - - with tempfile.NamedTemporaryFile(prefix = "rpkidemo-error", suffix = ".html", delete = False) as tmpf: - tmpf.write(err.read()) - - # Save filename for use outside the with statement. This ensures - # the file is properly flushed prior to invoking the web browser. - fname = tmpf.name - - sys.stderr.write("errors saved in %s\n" % fname) - webbrowser.open("file://" + fname) - -class CSV_File(object): - """ - A CSV file that's being maintained by the GUI but being monitored, - downloaded, and used here. - """ - - def __init__(self, filename, url): - self.filename = filename - self.url = url - try: - self.timestamp = os.stat(filename).st_mtime - except: - self.store(0, "") - - def last_modified(self): - """ - Return CSV file timestamp formatted for use with HTTP. - """ - return email.utils.formatdate(self.timestamp, False, True) - - def store(self, timestamp, data): - """ - Save CSV file, and record new timestamp. - """ - save(self.filename, data) - self.timestamp = timestamp - os.utime(self.filename, (time.time(), timestamp)) - - -class AbstractHTTPSConnection(httplib.HTTPSConnection): - """ - Customization of httplib.HTTPSConnection to enable certificate - validation. - - This is an abstract class; subclass must set trust_anchor to the - filename of a anchor file in the format that the ssl module - expects. - """ - - trust_anchor = None - - def connect(self): - assert self.trust_anchor is not None - sock = socket.create_connection((self.host, self.port), self.timeout) - if getattr(self, "_tunnel_host", None): - self.sock = sock - self._tunnel() - self.sock = ssl.wrap_socket(sock, - keyfile = self.key_file, - certfile = self.cert_file, - cert_reqs = ssl.CERT_REQUIRED, - ssl_version = ssl.PROTOCOL_TLSv1, - ca_certs = self.trust_anchor) - - -class main(object): - """ - Main program. - """ - - # Environmental parameters - - top = os.path.realpath(os.path.join((sys.path[0] or "."), "..")) - cwd = os.getcwd() - - # Parameters that we might want to get from a config file. - # Just wire them all in for the moment. - - base_url = "https://demo.rpki.net/" - myrpki_url = base_url + "rpki/" - auth_url = myrpki_url + "demo/login" - example_myrpki_cfg = "%s/rpkid/examples/rpki.conf" % top - working_dir = "%s/rpkidemo-data" % cwd - myrpki_py = "%s/rpkid/myrpki.py" % top - user_agent = "RPKIDemo" - delay = 15 - trust_anchor = "%s/scripts/rpkidemo.pem" % top - - openssl = None - - def setup_openssl(self): - """ - Find a usable version of OpenSSL, or build one if we must. - """ - - def scrape(*args): - return subprocess.Popen(args, stdout = subprocess.PIPE, stderr = subprocess.STDOUT).communicate()[0] - - def usable_openssl(f): - return f is not None and os.path.exists(f) and "-ss_cert" in scrape(f, "ca", "-?") and "Usage cms" in scrape(f, "cms", "-?") - - for d in os.environ["PATH"].split(":"): - f = os.path.join(d, "openssl") - if usable_openssl(f): - self.openssl = f - break - - if self.openssl is None: - print "Couldn't find usable openssl on path, attempting to build one" - subprocess.check_call(("./configure",), cwd = self.top) - subprocess.check_call(("make",), cwd = os.path.join(self.top, "openssl")) - self.openssl = os.path.join(self.top, "openssl", "openssl", "apps", "openssl") - print "Done building openssl" - print - - if usable_openssl(self.openssl): - print "Using", self.openssl - else: - sys.exit("Could not find or build usable version of openssl, giving up") - - @staticmethod - def setup_utc(): - """ - This script thinks in UTC. - """ - - os.environ["TZ"] = "UTC" - time.tzset() - - def setup_username(self): - """ - Get username and password for web interface, construct urllib2 - "opener" tailored for our use, perform an initial GET (ignoring - result, other than exceptions) to test the username and password. - """ - - print "I need to know your username and password on the Django GUI server to proceed" - - while True: - - try: - self.username = raw_input("Username: ") - self.password = getpass.getpass() - - handlers = [] - - self.cookiejar = cookielib.CookieJar() - handlers.append(urllib2.HTTPCookieProcessor(self.cookiejar)) - - if have_ssl_module: - - class HTTPSConnection(AbstractHTTPSConnection): - trust_anchor = self.trust_anchor - - class HTTPSHandler(urllib2.HTTPSHandler): - def https_open(self, req): - return self.do_open(HTTPSConnection, req) - - handlers.append(HTTPSHandler) - - self.opener = urllib2.build_opener(*handlers) - - # Test login credentials - resp = self.opener.open(self.auth_url) # GET - - r = self.opener.open(urllib2.Request( - url = self.auth_url, - data = urllib.urlencode({ "username" : self.username, - "password" : self.password, - "csrfmiddlewaretoken" : self.csrftoken() }), - headers = { "Referer" : self.auth_url, - "User-Agent" : self.user_agent})) # POST - return - - except urllib2.URLError, e: - print "Could not log in to server: %s" % e - print "Please try again" - save_error(e) - - def csrftoken(self): - """ - Pull Django's CSFR token from cookie database. - - Django's login form requires the "csrfmiddlewaretoken." It turns out - this is the same value as the "csrftoken" cookie, so we don't need - to bother parsing the form. - """ - - return [c.value for c in self.cookiejar if c.name == "csrftoken"][0] - - def setup_working_directory(self): - """ - Create working directory and move to it. - """ - - try: - print "Creating", self.working_dir - os.mkdir(self.working_dir) - except OSError, e: - if e.errno != errno.EEXIST: - raise - print self.working_dir, "already exists, reusing it" - os.chdir(self.working_dir) - - def setup_config_file(self): - """ - Generate rpki.conf - """ - - if os.path.exists("rpki.conf"): - print "You already have a rpki.conf file, so I will use it" - return - - print "Generating rpki.conf" - section_regexp = re.compile("\s*\[\s*(.+?)\s*\]\s*$") - variable_regexp = re.compile("\s*([-a-zA-Z0-9_]+)\s*=\s*(.+?)\s*$") - f = open("rpki.conf", "w") - f.write("# Automatically generated, do not edit\n") - section = None - for line in open(self.example_myrpki_cfg): - m = section_regexp.match(line) - if m: - section = m.group(1) - m = variable_regexp.match(line) - option = m.group(1) if m and section == "myrpki" else None - value = m.group(2) if option else None - if option == "handle": - line = "handle = %s\n" % self.username - if option == "openssl": - line = "openssl = %s\n" % self.openssl - if option in ("run_rpkid", "run_pubd", "run_rootd") and value != "false": - line = "%s = false\n" % option - f.write(line) - f.close() - - def myrpki(self, *cmd): - """ - Run a myrpki command. - """ - return subprocess.check_call((sys.executable, self.myrpki_py) + cmd) - - def upload(self, url, filename): - """ - Upload filename to URL, return result. - """ - - url = "%s%s/%s" % (self.myrpki_url, url, self.username) - data = open(filename).read() - print "Uploading", filename, "to", url - post_data = urllib.urlencode({ - "content" : data, - "csrfmiddlewaretoken" : self.csrftoken() }) # POST - try: - return self.opener.open(urllib2.Request(url, post_data, { - "User-Agent" : self.user_agent, - "Referer" : url})) - except urllib2.HTTPError, e: - sys.stderr.write("Problem uploading to URL %s\n" % url) - save_error(e) - raise - - def update(self): - """ - Run configure_resources, upload result, download updated result. - """ - - self.myrpki("configure_resources") - r = self.upload("demo/myrpki-xml", "myrpki.xml") - save("myrpki.xml", r.read()) - - def setup_csv_files(self): - """ - Create CSV file objects and synchronize timestamps. - """ - - self.csv_files = [ - CSV_File("asns.csv", "demo/down/asns/%s" % self.username), - CSV_File("prefixes.csv", "demo/down/prefixes/%s" % self.username), - CSV_File("roas.csv", "demo/down/roas/%s" % self.username) ] - - def upload_for_response(self, url, path): - """ - Upload an XML file to the requested URL and wait for for the server - to signal that a response is ready. - """ - - self.upload(url, path) - - print """ - Waiting for response to upload. This may require action by a human - being on the server side, so it may take a while, please be patient. - """ - - while True: - try: - return self.opener.open(urllib2.Request( - "%s%s/%s" % (self.myrpki_url, url, self.username), - None, - { "User-Agent" : self.user_agent })) - except urllib2.HTTPError, e: - # Portal GUI uses response code 503 to signal "not ready" - if e.code != 503: - sys.stderr.write("Problem getting response from %s: %s\n" % (url, e)) - save_error(e) - raise - time.sleep(self.delay) - - def setup_parent(self): - """ - Upload the user's identity.xml and wait for the portal gui to send - back the parent.xml response. - """ - - r = self.upload_for_response("demo/parent-request", "entitydb/identity.xml") - parent_data = r.read() - save("parent.xml", parent_data) - self.myrpki("configure_parent", "parent.xml") - - # Extract the parent_handle from the xml response and save it for use by - # setup_repository() - self.parent_handle = ElementFromString(parent_data).get("parent_handle") - - def setup_repository(self): - """ - Upload the repository referral to the portal-gui and wait the - response from the repository operator. - """ - - r = self.upload_for_response("demo/repository-request", "entitydb/repositories/%s.xml" % self.parent_handle) - save("repository.xml", r.read()) - self.myrpki("configure_repository", "repository.xml") - - def poll(self, csv_file): - """ - Poll for new version of a CSV file, save if changed, return - boolean indicating whether file has changed. - """ - - try: - url = self.myrpki_url + csv_file.url - r = self.opener.open(urllib2.Request(url, None, { - "If-Modified-Since" : csv_file.last_modified(), - "User-Agent" : self.user_agent })) - timestamp = time.mktime(r.info().getdate("Last-Modified")) - csv_file.store(timestamp, r.read()) - return True - except urllib2.HTTPError, e: - if e.code == 304: # 304 == "Not Modified" - return False - else: - sys.stderr.write("Problem polling URL %s\n" % url) - save_error(e) - raise - - def poll_loop(self): - """ - Loop forever, polling for updates. - """ - - while True: - changed = False - for csv_file in self.csv_files: - if self.poll(csv_file): - changed = True - if changed: - self.update() - time.sleep(self.delay) - - def getopt(self): - """ - Parse options. - """ - opts, argv = getopt.getopt(sys.argv[1:], "hi?", ["help"]) - for o, a in opts: - if o in ("-h", "--help", "-?"): - print __doc__ - sys.exit(0) - if argv: - sys.exit("Unexpected arguments %r" % (argv,)) - - def __init__(self): - self.getopt() - self.setup_utc() - self.setup_openssl() - self.setup_username() - self.setup_working_directory() - self.setup_config_file() - self.setup_csv_files() - self.myrpki("initialize") - self.setup_parent() - self.setup_repository() - self.update() - self.update() - - webbrowser.open(self.myrpki_url) - - self.poll_loop() - -main() - -# Local Variables: -# mode:python -# End: - -# vim:sw=2 ts=8 expandtab diff --git a/scripts/rpkidemo.pem b/scripts/rpkidemo.pem deleted file mode 100644 index f96fdb9e..00000000 --- a/scripts/rpkidemo.pem +++ /dev/null @@ -1,23 +0,0 @@ ------BEGIN CERTIFICATE----- -MIIDxjCCAy+gAwIBAgIJALc3/mkoVFOMMA0GCSqGSIb3DQEBBQUAMIGaMR4wHAYD -VQQDExVSR25ldC9QU0duZXQgU2VjdXJpdHkxCzAJBgNVBAYTAlVTMQswCQYDVQQI -EwJXQTEaMBgGA1UEBxMRQmFpbmJyaWRnZSBJc2xhbmQxEzARBgNVBAoTClJHbmV0 -LCBMTEMxDzANBgNVBAsTBlBTR25ldDEcMBoGCSqGSIb3DQEJARYNcmFuZHlAcHNn -LmNvbTAeFw0xMTAyMjcwNDMxMzhaFw0yMTAyMjQwNDMxMzhaMIGaMR4wHAYDVQQD -ExVSR25ldC9QU0duZXQgU2VjdXJpdHkxCzAJBgNVBAYTAlVTMQswCQYDVQQIEwJX -QTEaMBgGA1UEBxMRQmFpbmJyaWRnZSBJc2xhbmQxEzARBgNVBAoTClJHbmV0LCBM -TEMxDzANBgNVBAsTBlBTR25ldDEcMBoGCSqGSIb3DQEJARYNcmFuZHlAcHNnLmNv -bTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAw/lD46076mS4vDHI943dlNPh -KN1EY0wnx7nDga7jh4x8U5gt+MdvdH+kRABR7KVjkaUYKcd+DlAQeNLYXXkXOECz -yN90PgADWucPij6GJn7o9xwNsh2yAhICzqc19RgiKPVJL9V5xWBwKgzkpGG7dcUm -97TjZwhj8B8xcHjVbJ8CAwEAAaOCARAwggEMMAwGA1UdEwQFMAMBAf8wCwYDVR0P -BAQDAgEGMB0GA1UdDgQWBBRUmkatFo7oAUl5SJqUCfAC0LpkgDCBzwYDVR0jBIHH -MIHEgBRUmkatFo7oAUl5SJqUCfAC0LpkgKGBoKSBnTCBmjEeMBwGA1UEAxMVUkdu -ZXQvUFNHbmV0IFNlY3VyaXR5MQswCQYDVQQGEwJVUzELMAkGA1UECBMCV0ExGjAY -BgNVBAcTEUJhaW5icmlkZ2UgSXNsYW5kMRMwEQYDVQQKEwpSR25ldCwgTExDMQ8w -DQYDVQQLEwZQU0duZXQxHDAaBgkqhkiG9w0BCQEWDXJhbmR5QHBzZy5jb22CCQC3 -N/5pKFRTjDANBgkqhkiG9w0BAQUFAAOBgQBHBN06mk++v2fb3DnDiwt0Xqna4oNH -LNN69VaKLHhQ8J0shO4386E9ejLTutWO5VCdRim3a7WuneYSIncFBY76ddipWmuK -chitDDRUn/ccx4pkPoiHBGqthMqSbNGVsamAMOAJzzPyGXdur46wpzz6DtWObsQg -2/Q6evShgNYmtg== ------END CERTIFICATE----- diff --git a/scripts/rrd-rcynic-history.py b/scripts/rrd-rcynic-history.py deleted file mode 100644 index 8a0d50a8..00000000 --- a/scripts/rrd-rcynic-history.py +++ /dev/null @@ -1,201 +0,0 @@ -# $Id$ -# -# Copyright (C) 2011-2012 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -""" -Parse traffic data out of rynic XML output, whack it a bit, and stuff -it into one or more RRDs (Round Robin Databases -- see rrdtool). - -Haven't decided yet whether to draw the resulting pictures here or -elsewhere. - -This is an initial adaptation of analyze-rcynic-history.py, which uses -gnuplot and a shelve database. It's also my first attempt to do -anything with rrdtool, so no doubt I'll get half of it wrong. -""" - -import mailbox -import sys -import urlparse -import os -import time - -from xml.etree.cElementTree import (ElementTree as ElementTree, - fromstring as ElementTreeFromString) - -os.putenv("TZ", "UTC") -time.tzset() - -def parse_utc(s): - return int(time.mktime(time.strptime(s, "%Y-%m-%dT%H:%M:%SZ"))) - -class Host(object): - """ - A host object represents all the data collected for one host for a given session. - """ - - def __init__(self, hostname, timestamp): - self.hostname = hostname - self.timestamp = timestamp - self.elapsed = 0 - self.connections = 0 - self.failures = 0 - self.uris = set() - - def add_connection(self, elt): - self.elapsed += parse_utc(elt.get("finished")) - parse_utc(elt.get("started")) - self.connections += 1 - if elt.get("error") is not None: - self.failures += 1 - - def add_object_uri(self, u): - self.uris.add(u) - - @property - def failed(self): - return 1 if self.failures > 0 else 0 - - @property - def objects(self): - return len(self.uris) - - field_table = (("timestamp", None, None, None), - ("connections", "GAUGE", "Connections", "FF0000"), - ("objects", "GAUGE", "Objects", "00FF00"), - ("elapsed", "GAUGE", "Fetch Time", "0000FF"), - ("failed", "ABSOLUTE", "Failed", "00FFFF")) - - @property - def field_values(self): - return tuple(str(getattr(self, field[0])) for field in self.field_table) - - @classmethod - def field_ds_specifiers(cls, heartbeat = 24 * 60 * 60, minimum = 0, maximum = "U"): - return ["DS:%s:%s:%s:%s:%s" % (field[0], field[1], heartbeat, minimum, maximum) - for field in cls.field_table if field[1] is not None] - - @classmethod - def field_graph_specifiers(cls, hostname): - result = [] - for field in cls.field_table: - if field[1] is not None: - result.append("DEF:%s=%s.rrd:%s:AVERAGE" % (field[0], hostname, field[0])) - result.append("'LINE1:%s#%s:%s'" % (field[0], field[3], field[2])) - return result - - def save(self, rrdtable): - rrdtable.add(self.hostname, self.field_values) - -class Session(dict): - """ - A session corresponds to one XML file. This is a dictionary of Host - objects, keyed by hostname. - """ - - def __init__(self, timestamp): - dict.__init__(self) - self.timestamp = timestamp - - @property - def hostnames(self): - return set(self.iterkeys()) - - def add_connection(self, elt): - hostname = urlparse.urlparse(elt.text.strip()).hostname - if hostname not in self: - self[hostname] = Host(hostname, self.timestamp) - self[hostname].add_connection(elt) - - def add_object_uri(self, u): - h = urlparse.urlparse(u).hostname - if h and h in self: - self[h].add_object_uri(u) - - def save(self, rrdtable): - for h in self.itervalues(): - h.save(rrdtable) - -class RRDTable(dict): - """ - Final data we're going to be sending to rrdtool. We need to buffer - it until we're done because we have to sort it. Might be easier - just to sort the maildir, then again it might be easier to get rid - of the maildir too once we're dealing with current data. We'll see. - """ - - def __init__(self, rrdtool = sys.stdout): - dict.__init__(self) - self.rrdtool = rrdtool - - def add(self, hostname, data): - if hostname not in self: - self[hostname] = [] - self[hostname].append(data) - - def sort(self): - for data in self.itervalues(): - data.sort() - - @property - def oldest(self): - return min(min(datum[0] for datum in data) for data in self.itervalues()) - - rras = tuple("RRA:AVERAGE:0.5:%s:9600" % steps for steps in (1, 4, 24)) - - def create(self): - start = self.oldest - ds_list = Host.field_ds_specifiers() - ds_list.extend(self.rras) - for hostname in self: - if not os.path.exists("%s.rrd" % hostname): - self.rrdtool("create %s.rrd --start %s --step 3600 %s\n" % (hostname, start, " ".join(ds_list))) - - def update(self): - for hostname, data in self.iteritems(): - for datum in data: - self.rrdtool("update %s.rrd %s\n" % (hostname, ":".join(str(d) for d in datum))) - - def graph(self): - for hostname in self: - self.rrdtool("graph %s.png --start -90d %s\n" % (hostname, " ".join(Host.field_graph_specifiers(hostname)))) - -mb = mailbox.Maildir("/u/sra/rpki/rcynic-xml", factory = None, create = False) - -rrdtable = RRDTable() - -for i, key in enumerate(mb.iterkeys(), 1): - sys.stderr.write("\r%s %d/%d..." % ("|\\-/"[i & 3], i, len(mb))) - - assert not mb[key].is_multipart() - input = ElementTreeFromString(mb[key].get_payload()) - date = input.get("date") - sys.stderr.write("%s..." % date) - session = Session(parse_utc(date)) - for elt in input.findall("rsync_history"): - session.add_connection(elt) - for elt in input.findall("validation_status"): - if elt.get("generation") == "current": - session.add_object_uri(elt.text.strip()) - session.save(rrdtable) - - # XXX - #if i > 4: break - -sys.stderr.write("\n") - -rrdtable.create() -rrdtable.sort() -rrdtable.update() -rrdtable.graph() diff --git a/scripts/setup-rootd.sh b/scripts/setup-rootd.sh deleted file mode 100644 index 41a271b8..00000000 --- a/scripts/setup-rootd.sh +++ /dev/null @@ -1,36 +0,0 @@ -#!/bin/sh - -# -# $Id$ -# -# Copyright (C) 2010 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -# Setting up rootd requires cross-certifying rpkid's resource-holding -# BPKI trust anchor under the BPKI trust anchor that rootd uses. This -# script handles that, albiet in a very ugly way. -# -# Filenames are wired in, you might need to change these if you've -# done something more complicated. - -export RANDFILE=.OpenSSL.whines.unless.I.set.this -export BPKI_DIRECTORY=`pwd`/bpki/servers - -openssl=../openssl/openssl/apps/openssl - -$openssl ca -notext -batch -config rpki.conf \ - -ss_cert bpki/resources/ca.cer \ - -out $BPKI_DIRECTORY/child.cer \ - -extensions ca_x509_ext_xcert0 - -$openssl x509 -noout -text -in $BPKI_DIRECTORY/child.cer diff --git a/scripts/show-cms-ee-certs.sh b/scripts/show-cms-ee-certs.sh deleted file mode 100644 index 4f5168c7..00000000 --- a/scripts/show-cms-ee-certs.sh +++ /dev/null @@ -1,25 +0,0 @@ -#!/bin/sh - -# -# $Id$ -# -# Copyright (C) 2010 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -openssl=../openssl/openssl/apps/openssl - -for i in "$@" -do - $openssl cms -verify -noverify -out /dev/null -inform DER -certsout /dev/stdout -in $1 | - $openssl x509 -noout -text -certopt no_sigdump,no_pubkey -done diff --git a/scripts/show-tracking-data.py b/scripts/show-tracking-data.py deleted file mode 100644 index 07e0a144..00000000 --- a/scripts/show-tracking-data.py +++ /dev/null @@ -1,39 +0,0 @@ -# $Id$ -# -# Copyright (C) 2012 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -""" -Show tracking data for a bunch of objects retrieved by rcynic. - -This script takes one required argument, which is the name of a -directory tree containing the validated outpt of an rcynic run. If -you follow the default naming scheme this will be -/some/where/rcynic-data/authenticated. -""" - -import os -import sys -import rpki.x509 -import rpki.sundial - -rcynic_dir = sys.argv[1] - -for root, dirs, files in os.walk(rcynic_dir): - for f in files: - path = os.path.join(root, f) - date = rpki.sundial.datetime.utcfromtimestamp(os.stat(path).st_mtime) - uri = "rsync://" + path[len(rcynic_dir):].lstrip("/") - obj = rpki.x509.uri_dispatch(uri)(DER_file = path) - print date, obj.tracking_data(uri) diff --git a/scripts/signed-object-dates.py b/scripts/signed-object-dates.py deleted file mode 100644 index fefd9448..00000000 --- a/scripts/signed-object-dates.py +++ /dev/null @@ -1,40 +0,0 @@ -#!/usr/bin/env python -# $Id$ - -# Extract notBefore, and notAfter values from an RPKI signed object; -# if the object is a manifest, also extract thisUpdate and nextUpdate. - -# Copyright (C) 2013 Dragon Research Labs ("DRL") -# -# Permission to use, copy, modify, and/or distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND DRL DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL DRL BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -import sys -import rpki.POW - -extract_flags = (rpki.POW.CMS_NOCRL | - rpki.POW.CMS_NO_SIGNER_CERT_VERIFY | - rpki.POW.CMS_NO_ATTR_VERIFY | - rpki.POW.CMS_NO_CONTENT_VERIFY) - -for fn in sys.argv[1:]: - cls = rpki.POW.Manifest if fn.endswith(".mft") else rpki.POW.CMS - cms = cls.derReadFile(fn) - cer = cms.certs()[0] - print fn - print " notBefore: ", cer.getNotBefore() - if fn.endswith(".mft"): - cms.verify(rpki.POW.X509Store(), None, extract_flags) - print " thisUpdate:", cms.getThisUpdate() - print " nextUpdate:", cms.getNextUpdate() - print " notAfter: ", cer.getNotAfter() - print diff --git a/scripts/splitbase64.xsl b/scripts/splitbase64.xsl deleted file mode 100644 index 2b2d2fb1..00000000 --- a/scripts/splitbase64.xsl +++ /dev/null @@ -1,66 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/scripts/testbed-rootcert.py b/scripts/testbed-rootcert.py deleted file mode 100644 index 0716be2f..00000000 --- a/scripts/testbed-rootcert.py +++ /dev/null @@ -1,66 +0,0 @@ -# $Id$ -# -# Copyright (C) 2009-2012 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -""" -Generate config for a test RPKI root certificate for resources -specified in asns.csv and prefixes.csv. - -This script is separate from arin-to-csv.py so that we can convert on -the fly rather than having to pull the entire database into memory. -""" - -import sys -from rpki.csv_utils import csv_reader - -if len(sys.argv) not in (2, 4): - sys.exit("Usage: %s holder [asns.csv prefixes.csv]" % sys.argv[0]) - -print '''\ -[req] -default_bits = 2048 -default_md = sha256 -distinguished_name = req_dn -prompt = no -encrypt_key = no - -[req_dn] -CN = Pseudo-%(HOLDER)s testbed root RPKI certificate - -[x509v3_extensions] -basicConstraints = critical,CA:true -subjectKeyIdentifier = hash -keyUsage = critical,keyCertSign,cRLSign -subjectInfoAccess = 1.3.6.1.5.5.7.48.5;URI:rsync://%(holder)s.rpki.net/rpki/%(holder)s/,1.3.6.1.5.5.7.48.10;URI:rsync://%(holder)s.rpki.net/rpki/%(holder)s/root.mft -certificatePolicies = critical,1.3.6.1.5.5.7.14.2 -sbgp-autonomousSysNum = critical,@rfc3779_asns -sbgp-ipAddrBlock = critical,@rfc3997_addrs - -[rfc3779_asns] -''' % { "holder" : sys.argv[1].lower(), - "HOLDER" : sys.argv[1].upper() } - -for i, asn in enumerate(asn for handle, asn in csv_reader(sys.argv[2] if len(sys.argv) > 2 else "asns.csv", columns = 2)): - print "AS.%d = %s" % (i, asn) - -print '''\ - -[rfc3997_addrs] - -''' - -for i, prefix in enumerate(prefix for handle, prefix in csv_reader(sys.argv[3] if len(sys.argv) > 2 else "prefixes.csv", columns = 2)): - v = 6 if ":" in prefix else 4 - print "IPv%d.%d = %s" % (v, i, prefix) diff --git a/scripts/translate-handles.py b/scripts/translate-handles.py deleted file mode 100644 index 49848277..00000000 --- a/scripts/translate-handles.py +++ /dev/null @@ -1,49 +0,0 @@ -# $Id$ -# -# Copyright (C) 2010-2012 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -""" -Translate handles from the ones provided in a database dump into the -ones we use in our testbed. This has been broken out into a separate -program for two reasons: - -- Conversion of some of the RIR data is a very slow process, and it's - both annoying and unnecessary to run it every time we add a new - participant to the testbed. - -- This handle translation business now has fingers into half a dozen - scripts, so it needs refactoring in any case, either as a common - library function or as a separate script. - -This program takes a list of .CSV files on its command line, and -rewrites them as needed after performing the translation. -""" - -import os -import sys -from rpki.csv_utils import csv_reader, csv_writer - -translations = dict((src, dst) for src, dst in csv_reader("translations.csv", columns = 2)) - -for filename in sys.argv[1:]: - - f = csv_writer(filename) - - for cols in csv_reader(filename): - if cols[0] in translations: - cols[0] = translations[cols[0]] - f.writerow(cols) - - f.close() diff --git a/scripts/upgrade-add-ghostbusters.py b/scripts/upgrade-add-ghostbusters.py deleted file mode 100644 index a8c8a92b..00000000 --- a/scripts/upgrade-add-ghostbusters.py +++ /dev/null @@ -1,73 +0,0 @@ -# $Id$ -# -# Copyright (C) 2009--2011 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -""" -Add SQL tables needed for Ghostbusters support. -Most of the code here lifted from rpki-sql-setup.py -""" - -import getopt, sys, rpki.config, warnings - -from rpki.mysql_import import MySQLdb - -def fix(name, *statements): - db = MySQLdb.connect(db = cfg.get("sql-database", section = name), - user = cfg.get("sql-username", section = name), - passwd = cfg.get("sql-password", section = name)) - cur = db.cursor() - for statement in statements: - cur.execute(statement) - db.commit() - db.close() - -cfg_file = None - -opts, argv = getopt.getopt(sys.argv[1:], "c:h?", ["config=", "help"]) -for o, a in opts: - if o in ("-h", "--help", "-?"): - print __doc__ - sys.exit(0) - if o in ("-c", "--config"): - cfg_file = a - -cfg = rpki.config.parser(cfg_file, "myrpki") - -fix("irdbd", """ - CREATE TABLE ghostbuster_request ( - ghostbuster_request_id SERIAL NOT NULL, - self_handle VARCHAR(40) NOT NULL, - parent_handle VARCHAR(40), - vcard LONGBLOB NOT NULL, - PRIMARY KEY (ghostbuster_request_id) - ) ENGINE=InnoDB; -""") - -fix("rpkid", """ - CREATE TABLE ghostbuster ( - ghostbuster_id SERIAL NOT NULL, - vcard LONGBLOB NOT NULL, - cert LONGBLOB NOT NULL, - ghostbuster LONGBLOB NOT NULL, - published DATETIME, - self_id BIGINT UNSIGNED NOT NULL, - ca_detail_id BIGINT UNSIGNED NOT NULL, - PRIMARY KEY (ghostbuster_id), - CONSTRAINT ghostbuster_self_id - FOREIGN KEY (self_id) REFERENCES self (self_id) ON DELETE CASCADE, - CONSTRAINT ghostbuster_ca_detail_id - FOREIGN KEY (ca_detail_id) REFERENCES ca_detail (ca_detail_id) ON DELETE CASCADE - ) ENGINE=InnoDB; -""") diff --git a/scripts/verify-bpki.sh b/scripts/verify-bpki.sh deleted file mode 100755 index 0e36d796..00000000 --- a/scripts/verify-bpki.sh +++ /dev/null @@ -1,43 +0,0 @@ -#!/bin/sh - -# $Id$ -# -# Copyright (C) 2009-2010 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -# Tests of generated BPKI certificates. Kind of cheesy, but does test -# the basic stuff. - -exec 2>&1 - -for bpki in bpki/* -do - crls=$(find $bpki -name '*.crl') - - # Check that CRLs verify properly - for crl in $crls - do - echo -n "$crl: " - openssl crl -CAfile $bpki/ca.cer -noout -in $crl - done - - # Check that issued certificates verify properly - cat $bpki/ca.cer $crls | openssl verify -crl_check -CAfile /dev/stdin $(find $bpki -name '*.cer' ! -name 'ca.cer' ! -name '*.cacert.cer') - -done - -# Check that cross-certified BSC certificates verify properly -if test -d bpki/servers -then - cat bpki/servers/xcert.*.cer | openssl verify -verbose -CAfile bpki/servers/ca.cer -untrusted /dev/stdin bpki/resources/bsc.*.cer -fi diff --git a/scripts/whack-ripe-asns.py b/scripts/whack-ripe-asns.py deleted file mode 100644 index 9c702271..00000000 --- a/scripts/whack-ripe-asns.py +++ /dev/null @@ -1,83 +0,0 @@ -# $Id$ -# -# Copyright (C) 2010 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -""" -Fix problems in asns.csv generated from RIPE's database. - -RIPE's database contains inconsistancies, overlaps, and format errors -that make it impossible to feed the output of ripe-to-csv.awk directly -into testbed-rootcert.py without OpenSSL rejecting the resulting -root.conf. This script takes a brute force approach to fixing this: -it converts all ASNs to range form, runs the resulting file through -the unix sort program to put the data into canonical order, then reads -it back, merging overlaps, and writing the result in a form acceptable -to testbed-rootcert.py. - -Since we're doing all this anyway, the script also merges adjacent -blocks. - -Ordinarily, it would be dangerous to have the same program act as both -the source and sink of a pipeline, particularly for such a large data -set, as the chance of deadlock would approach 100%, but in this case -we know that the sort program must consume and buffer (somehow) all of -its input before writing a single line of output, so a single script -can safely act as a filter both before and after sort. -""" - -import sys, subprocess - -sorter = subprocess.Popen(("sort", "-T.", "-n"), - stdin = subprocess.PIPE, - stdout = subprocess.PIPE) - -for line in sys.stdin: - handle, asn = line.split() - - if "-" in asn: - range_min, range_max = asn.split("-") - else: - range_min, range_max = asn, asn - - sorter.stdin.write("%d %d\n" % (long(range_min), long(range_max))) - -sorter.stdin.close() - -prev_min = None -prev_max = None - -def show(): - if prev_min and prev_max: - sys.stdout.write("x\t%s-%s\n" % (prev_min, prev_max)) - -for line in sorter.stdout: - this_min, this_max = line.split() - this_min = long(this_min) - this_max = long(this_max) - - if prev_min and prev_max and prev_max + 1 >= this_min: - prev_min = min(prev_min, this_min) - prev_max = max(prev_max, this_max) - - else: - show() - prev_min = this_min - prev_max = this_max - -show() - -sorter.stdout.close() - -sys.exit(sorter.wait()) diff --git a/scripts/whack-ripe-prefixes.py b/scripts/whack-ripe-prefixes.py deleted file mode 100644 index 52ea3f18..00000000 --- a/scripts/whack-ripe-prefixes.py +++ /dev/null @@ -1,101 +0,0 @@ -# $Id$ -# -# Copyright (C) 2010 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -""" -Fix problems in prefixes.csv generated from RIPE's database. - -RIPE's database contains inconsistancies, overlaps, and format errors -that make it impossible to feed the output of ripe-to-csv.awk directly -into testbed-rootcert.py without OpenSSL rejecting the resulting -root.conf. This script takes a brute force approach to fixing this: -it converts all prefixes and address ranges into pairs of unsigned -decimal integers representing range min and range max, runs the -resulting 3+ million entry file through the unix sort program to put -the data into canonical order, then reads it back, merging overlaps -and converting everything back to ranges of IP addresses, and writing -the result in a form acceptable to testbed-rootcert.py. - -Since we're doing all this anyway, the script also merges adjacent -address blocks, not because OpenSSL can't handle them (it can) but -because doing so cuts out a lot of unnecessary I/O. - -Ordinarily, it would be dangerous to have the same program act as both -the source and sink of a pipeline, particularly for such a large data -set, as the chance of deadlock would approach 100%, but in this case -we know that the sort program must consume and buffer (somehow) all of -its input before writing a single line of output, so a single script -can safely act as a filter both before and after sort. -""" - -import sys -import subprocess -import rpki.ipaddrs - -sorter = subprocess.Popen(("sort", "-T.", "-n"), - stdin = subprocess.PIPE, - stdout = subprocess.PIPE) - -for line in sys.stdin: - handle, prefix = line.split() - - if "-" in prefix: - range_min, range_max = prefix.split("-") - range_min = rpki.ipaddrs.parse(range_min) - range_max = rpki.ipaddrs.parse(range_max) - - else: - address, length = prefix.split("/") - address = rpki.ipaddrs.parse(address) - mask = (1L << (address.bits - int(length))) - 1 - range_min = address & ~mask - range_max = address | mask - - sorter.stdin.write("%d %d\n" % (long(range_min), long(range_max))) - -sorter.stdin.close() - -prev_min = None -prev_max = None - -def address(number): - if number > 0xffffffff: - return rpki.ipaddrs.v6addr(number) - else: - return rpki.ipaddrs.v4addr(number) - -def show(): - if prev_min and prev_max: - sys.stdout.write("x\t%s-%s\n" % (address(prev_min), address(prev_max))) - -for line in sorter.stdout: - this_min, this_max = line.split() - this_min = long(this_min) - this_max = long(this_max) - - if prev_min and prev_max and prev_max + 1 >= this_min: - prev_min = min(prev_min, this_min) - prev_max = max(prev_max, this_max) - - else: - show() - prev_min = this_min - prev_max = this_max - -show() - -sorter.stdout.close() - -sys.exit(sorter.wait()) diff --git a/scripts/x509-dot.py b/scripts/x509-dot.py deleted file mode 100644 index 42e1543a..00000000 --- a/scripts/x509-dot.py +++ /dev/null @@ -1,170 +0,0 @@ -# $Id$ - -""" -Generate .dot description of a certificate tree. - -Copyright (C) 2009-2012 Internet Systems Consortium ("ISC") - -Permission to use, copy, modify, and distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -PERFORMANCE OF THIS SOFTWARE. - -Portions copyright (C) 2008 American Registry for Internet Numbers ("ARIN") - -Permission to use, copy, modify, and distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH -REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, -INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -PERFORMANCE OF THIS SOFTWARE. -""" - -import rpki.POW, sys, glob, os - -class x509(object): - - ski = None - aki = None - - show_file = False - show_ski = False - show_aki = False - show_issuer = True - show_subject = True - - cn_only = True - - subjects = {} - - def __init__(self, filename): - - while filename.startswith("./"): - filename = filename[2:] - - self.filename = filename - - f = open(filename, "rb") - text = f.read() - f.close() - - if "-----BEGIN" in text: - self.pow = rpki.POW.X509.pemRead(text) - else: - self.pow = rpki.POW.X509.derRead(text) - - - try: - self.ski = ":".join(["%02X" % ord(i) for i in self.pow.getSKI()]) - except: - pass - - try: - self.aki = ":".join(["%02X" % ord(i) for i in self.pow.getAKI()]) - except: - pass - - self.subject = self.canonize(self.pow.getSubject()) - self.issuer = self.canonize(self.pow.getIssuer()) - - if self.subject in self.subjects: - self.subjects[self.subject].append(self) - else: - self.subjects[self.subject] = [self] - - def canonize(self, name): - - # Probably should just use rpki.x509.X501DN class here. - - try: - if self.cn_only and name[0][0][0] == "2.5.4.3": - return name[0][0][1] - except: - pass - - return name - - def set_node(self, node): - - self.node = node - - def dot(self): - - label = [] - - if self.show_issuer: - label.append(("Issuer", self.issuer)) - - if self.show_subject: - label.append(("Subject", self.subject)) - - if self.show_file: - label.append(("File", self.filename)) - - if self.show_aki: - label.append(("AKI", self.aki)) - - if self.show_ski: - label.append(("SKI", self.ski)) - - print "#", repr(label) - - if len(label) > 1: - print '%s [shape = record, label = "{%s}"];' % (self.node, "|".join("{%s|%s}" % (x, y) for x, y in label if y is not None)) - else: - print '%s [label = "%s"];' % (self.node, label[0][1]) - - for issuer in self.subjects.get(self.issuer, ()): - - if issuer is self: - print "# Issuer is self" - issuer = None - - if issuer is not None and self.aki is not None and self.ski is not None and self.aki == self.ski: - print "# Self-signed" - issuer = None - - if issuer is not None and self.aki is not None and issuer.ski is not None and self.aki != issuer.ski: - print "# AKI does not match issuer SKI" - issuer = None - - if issuer is not None: - print "%s -> %s;" % (issuer.node, self.node) - - print - -certs = [] - -for topdir in sys.argv[1:] or ["."]: - for dirpath, dirnames, filenames in os.walk(topdir): - certs += [x509(dirpath + "/" + filename) for filename in filenames if filename.endswith(".cer")] - -for i, cert in enumerate(certs): - cert.set_node("cert_%d" % i) - -print """\ -digraph certificates { - -rotate = 90; -#size = "11,8.5"; -splines = true; -ratio = fill; - -""" - -for cert in certs: - cert.dot() - -print "}" diff --git a/setup.py b/setup.py new file mode 100644 index 00000000..653d2d31 --- /dev/null +++ b/setup.py @@ -0,0 +1,93 @@ +# $Id$ +# +# Copyright (C) 2011-2013 Internet Systems Consortium ("ISC") +# +# Permission to use, copy, modify, and distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +from distutils.core import setup, Extension +from glob import glob +import setup_extensions + +try: + import setup_autoconf as autoconf + +except ImportError: + class autoconf: + "Fake autoconf object to let --help work without autoconf." + sbindir = libexecdir = datarootdir = sysconfdir = CFLAGS = LDFLAGS = LIBS = "" + +try: + from rpki.version import VERSION + +except ImportError: + VERSION = "0.0" + +# pylint: disable=W0622 + +setup(name = "rpkitoolkit", + version = VERSION, + description = "RPKI Toolkit", + license = "BSD", + url = "http://rpki.net/", + cmdclass = {"build_scripts" : setup_extensions.build_scripts, + "install_scripts" : setup_extensions.install_scripts}, + packages = ["rpki", + "rpki.POW", + "rpki.irdb", + "rpki.gui", + "rpki.gui.app", + "rpki.gui.cacheview", + "rpki.gui.api", + "rpki.gui.routeview"], + ext_modules = [Extension("rpki.POW._POW", ["ext/POW.c"], + extra_compile_args = autoconf.CFLAGS.split(), + extra_link_args = (autoconf.LDFLAGS + " " + + autoconf.LIBS).split())], + package_data = {"rpki.gui.app" : + ["migrations/*.py", + "static/*/*", + "templates/*.html", + "templates/*/*.html", + "templatetags/*.py"], + "rpki.gui.cacheview" : + ["templates/*/*.html"]}, + scripts = [(autoconf.sbindir, + ["rpkic", + "rpki-confgen", + "rpki-start-servers", + "rpki-sql-backup", + "rpki-sql-setup", + "portal-gui/scripts/rpki-manage", + "portal-gui/scripts/rpkigui-query-routes", + "irbe_cli"]), + (autoconf.libexecdir, + ["irdbd", + "pubd", + "rootd", + "rpkid", + "portal-gui/scripts/rpkigui-import-routes", + "portal-gui/scripts/rpkigui-check-expired", + "portal-gui/scripts/rpkigui-rcynic", + "portal-gui/scripts/rpkigui-apache-conf-gen"])], + data_files = [(autoconf.sysconfdir + "/rpki", + ["rpki-confgen.xml"]), + (autoconf.datarootdir + "/rpki/wsgi", + ["portal-gui/rpki.wsgi"]), + (autoconf.datarootdir + "/rpki/media/css", + glob("rpki/gui/app/static/css/*")), + (autoconf.datarootdir + "/rpki/media/js", + glob("rpki/gui/app/static/js/*")), + (autoconf.datarootdir + "/rpki/media/img", + glob("rpki/gui/app/static/img/*")), + (autoconf.datarootdir + "/rpki/upgrade-scripts", + glob("upgrade-scripts/*"))]) diff --git a/setup_extensions.py b/setup_extensions.py new file mode 100644 index 00000000..12b123aa --- /dev/null +++ b/setup_extensions.py @@ -0,0 +1,94 @@ +# $Id$ +# +# This module extends the stock distutils install_setup code to +# support installation into multiple target directories, with +# semantics similar to what distutils already supports for +# script_files. The bulk of this code is taken directly from the +# stock distutils package, with minor changes. As such, I consider +# this to be a derivative work of the distutils package for copyright +# purposes. + +from distutils.util import change_root, convert_path +from distutils.command.build_scripts import build_scripts as _build_scripts +from distutils.command.install_scripts import install_scripts as _install_scripts +from distutils import log +from stat import S_IMODE +import os + +class build_scripts(_build_scripts): + """ + Hacked version of distutils.build_scripts, designed to support + multiple target installation directories like install_data does. + + [(target_directory, [list_of_source_scripts]), ...] + + Most of the real work is in the companion hacked install_scripts, + but we need to tweak the list of source files that build_scripts + pulls out of the Distribution object. + """ + + def finalize_options(self): + _build_scripts.finalize_options(self) + self.scripts = [] + for script in self.distribution.scripts: + if isinstance(script, str): + self.scripts.append(script) + else: + self.scripts.extend(script[1]) + +class install_scripts(_install_scripts): + """ + Hacked version of distutils.install_scripts, designed to support + multiple target installation directories like install_data does. + + [(target_directory, [list_of_source_scripts]), ...] + + The code here is a tweaked combination of what the stock + install_scripts and install_data classes do. + """ + + user_options = _install_scripts.user_options + [ + ("root=", None, "install everything relative to this alternate root directory")] + + def initialize_options(self): + _install_scripts.initialize_options(self) + self.outfiles = [] + self.root = None + + def finalize_options (self): + self.set_undefined_options("build", + ("build_scripts", "build_dir")) + self.set_undefined_options("install", + ("install_scripts", "install_dir"), + ("root", "root"), + ("force", "force"), + ("skip_build", "skip_build")) + + def run(self): + if not self.skip_build: + self.run_command("build_scripts") + for script in self.distribution.scripts: + if isinstance(script, str): + fn = os.path.join(self.build_dir, os.path.basename(convert_path(script))) + out, _ = self.copy_file(fn, self.install_dir) + self.outfiles.append(out) + else: + dn = convert_path(script[0]) + if not os.path.isabs(dn): + dn = os.path.join(self.install_dir, dn) + elif self.root: + dn = change_root(self.root, dn) + self.mkpath(dn) + if not script[1]: + self.outfiles.append(dn) + else: + for s in script[1]: + fn = os.path.join(self.build_dir, os.path.basename(convert_path(s))) + out, _ = self.copy_file(fn, dn) + self.outfiles.append(out) + if os.name == "posix": + for fn in self.get_outputs(): + mode = S_IMODE(os.stat(fn).st_mode) | 0555 + log.info("changing mode of %s to %o", fn, mode) + if not self.dry_run: + os.chmod(fn, mode) diff --git a/utils/Makefile.in b/utils/Makefile.in deleted file mode 100644 index c89fdff5..00000000 --- a/utils/Makefile.in +++ /dev/null @@ -1,9 +0,0 @@ -# $Id$ - -SUBDIRS = uri print_rpki_manifest print_roa hashdir find_roa scan_roas scan_routercerts - -all clean test distclean install deinstall uninstall:: - @for i in ${SUBDIRS}; do echo "Making $@ in $$i"; (cd $$i && ${MAKE} $@); done - -distclean:: - rm -f Makefile diff --git a/utils/README b/utils/README deleted file mode 100644 index edbd793b..00000000 --- a/utils/README +++ /dev/null @@ -1,12 +0,0 @@ -$Id$ - -A collection of small RPKI utility programs which can be combined in -various useful ways by relying parties or by rpkid test scripts. - -See: - -- The primary documentation at http://trac.rpki.net/ - -- The PDF manual in ../doc/manual.pdf, or - -- The flat text page ../doc/doc.RPKI.Utils diff --git a/utils/find_roa/Makefile.in b/utils/find_roa/Makefile.in deleted file mode 100644 index 36c68e01..00000000 --- a/utils/find_roa/Makefile.in +++ /dev/null @@ -1,56 +0,0 @@ -# $Id$ - -NAME = find_roa - -BIN = ${NAME} -SRC = ${NAME}.c -OBJ = ${NAME}.o - -CFLAGS = @CFLAGS@ -LDFLAGS = @LDFLAGS@ -LIBS = @LIBS@ - -INSTALL = @INSTALL@ -m 555 - -prefix = @prefix@ -exec_prefix = @exec_prefix@ -datarootdir = @datarootdir@ -datadir = @datadir@ -localstatedir = @localstatedir@ -sharedstatedir = @sharedstatedir@ -sysconfdir = @sysconfdir@ -bindir = @bindir@ -sbindir = @sbindir@ -libexecdir = @libexecdir@ -libdir = @libdir@ - -abs_top_srcdir = @abs_top_srcdir@ -abs_top_builddir = @abs_top_builddir@ - -all: ${BIN} - -clean: - rm -rf ${BIN} ${OBJ} ${BIN}.dSYM - -${BIN}: ${SRC} - ${CC} ${CFLAGS} -o $@ ${SRC} ${LDFLAGS} ${LIBS} - - -ROA_DIR = ${abs_top_builddir}/rcynic/rcynic-data/authenticated - -TEST_ARGS = ${ROA_DIR} 10.3.0.44 10.2.0.6 10.0.0.0/24 - -test: ${BIN} -# if test -d ${ROA_DIR}; then ./${BIN} ${TEST_ARGS} ; else :; fi - if test -d ${ROA_DIR}; then sh ./test_roa.sh ${TEST_ARGS} ; else :; fi - -install: all - if test -d ${DESTDIR}${bindir} ; then :; else ${INSTALL} -d ${DESTDIR}${bindir}; fi - ${INSTALL} ${BIN} ${DESTDIR}${bindir} - -deinstall uninstall: - rm -f ${DESTDIR}${bindir}/${BIN} - -distclean: clean - rm -rf hashed-pem-dir - rm -f Makefile diff --git a/utils/find_roa/find_roa.c b/utils/find_roa/find_roa.c deleted file mode 100644 index a14242c8..00000000 --- a/utils/find_roa/find_roa.c +++ /dev/null @@ -1,356 +0,0 @@ -/* - * Copyright (C) 2014 Dragon Research Labs ("DRL") - * Portions copyright (C) 2006--2008 American Registry for Internet Numbers ("ARIN") - * - * Permission to use, copy, modify, and distribute this software for any - * purpose with or without fee is hereby granted, provided that the above - * copyright notices and this permission notice appear in all copies. - * - * THE SOFTWARE IS PROVIDED "AS IS" AND DRL AND ARIN DISCLAIM ALL - * WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED - * WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL OR - * ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL - * DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA - * OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER - * TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR - * PERFORMANCE OF THIS SOFTWARE. - */ - -/* $Id$ */ - -#include -#include -#include -#include -#include -#include -#include -#include - -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -#include - -#ifndef FILENAME_MAX -#define FILENAME_MAX 1024 -#endif - -#ifndef ADDR_RAW_BUF_LEN -#define ADDR_RAW_BUF_LEN 16 -#endif - - - -/* - * Error handling. - */ - -#define _lose(_msg_, _file_) \ - do { \ - if (_file_) \ - fprintf(stderr, "%s:%d: %s: %s\n", __FILE__, __LINE__, _msg_, _file_); \ - else \ - fprintf(stderr, "%s:%d: %s\n", __FILE__, __LINE__, _msg_); \ - fprintf(stderr, "%s: %s\n", _msg_, _file_); \ - } while (0) - -#define lose(_msg_, _file_) \ - do { \ - _lose(_msg_, _file_); \ - goto done; \ - } while (0) - -#define lose_errno(_msg_, _file_) \ - do { \ - _lose(_msg_, _file_); \ - perror(NULL); \ - goto done; \ - } while (0) - -#define lose_openssl(_msg_, _file_) \ - do { \ - _lose(_msg_, _file_); \ - ERR_print_errors_fp(stderr); \ - goto done; \ - } while (0) - - -/* - * Extract a ROA prefix from the ASN.1 bitstring encoding. - */ -static int extract_roa_prefix(unsigned char *addr, - unsigned *prefixlen, - const ASN1_BIT_STRING *bs, - const unsigned afi) -{ - unsigned length; - - switch (afi) { - case IANA_AFI_IPV4: length = 4; break; - case IANA_AFI_IPV6: length = 16; break; - default: return 0; - } - - if (bs->length < 0 || bs->length > length) - return 0; - - if (bs->length > 0) { - memcpy(addr, bs->data, bs->length); - if ((bs->flags & 7) != 0) { - unsigned char mask = 0xFF >> (8 - (bs->flags & 7)); - addr[bs->length - 1] &= ~mask; - } - } - - memset(addr + bs->length, 0, length - bs->length); - - *prefixlen = (bs->length * 8) - (bs->flags & 7); - - return 1; -} - -/* - * Check str for a trailing suffix. - */ -static int has_suffix(const char *str, const char *suffix) -{ - size_t len_str, len_suffix; - assert(str != NULL && suffix != NULL); - len_str = strlen(str); - len_suffix = strlen(suffix); - return len_str >= len_suffix && !strcmp(str + len_str - len_suffix, suffix); -} - -/* - * Handle one object. - */ -static void file_handler(const char *filename, const unsigned prefix_afi, const unsigned char *prefix, const unsigned long prefixlen) -{ - unsigned char roa_prefix[ADDR_RAW_BUF_LEN]; - unsigned roa_prefixlen, roa_maxprefixlen, plen; - CMS_ContentInfo *cms = NULL; - BIO *b = NULL; - ROA *r = NULL; - int i, j, k, n; - unsigned long asid; - - if (!(b = BIO_new_file(filename, "rb"))) - lose_openssl("Couldn't open CMS file", filename); - - if ((cms = d2i_CMS_bio(b, NULL)) == NULL) - lose_openssl("Couldn't read CMS file", filename); - - BIO_free(b); - - if ((b = BIO_new(BIO_s_mem())) == NULL) - lose_openssl("Couldn't open ROA", filename); - - if (CMS_verify(cms, NULL, NULL, NULL, b, CMS_NOCRL | CMS_NO_SIGNER_CERT_VERIFY | CMS_NO_ATTR_VERIFY | CMS_NO_CONTENT_VERIFY) <= 0) - lose_openssl("Couldn't parse ROA CMS", filename); - - if ((r = ASN1_item_d2i_bio(ASN1_ITEM_rptr(ROA), b, NULL)) == NULL) - lose_openssl("Couldn't parse ROA", filename); - - asid = (unsigned long) ASN1_INTEGER_get(r->asID); - - for (i = 0; i < sk_ROAIPAddressFamily_num(r->ipAddrBlocks); i++) { - ROAIPAddressFamily *f = sk_ROAIPAddressFamily_value(r->ipAddrBlocks, i); - - /* - * AFI must match, SAFI must be null - */ - if (f->addressFamily->length != 2 || - prefix_afi != ((f->addressFamily->data[0] << 8) | (f->addressFamily->data[1]))) - continue; - - for (j = 0; j < sk_ROAIPAddress_num(f->addresses); j++) { - ROAIPAddress *a = sk_ROAIPAddress_value(f->addresses, j); - - if (!extract_roa_prefix(roa_prefix, &roa_prefixlen, a->IPAddress, prefix_afi)) - lose("Malformed ROA", filename); - - /* - * If the prefix we're looking for is bigger than the ROA - * prefix, the ROA can't possibly cover. - */ - if (prefixlen < roa_prefixlen) - continue; - - if (a->maxLength) - roa_maxprefixlen = ASN1_INTEGER_get(a->maxLength); - else - roa_maxprefixlen = roa_prefixlen; - - /* - * If the prefix we're looking for is smaller than the smallest - * allowed slice of the ROA prefix, the ROA can't possibly - * cover. - */ - if (prefixlen > roa_maxprefixlen) - continue; - - /* - * If we get this far, we have to compare prefixes. - */ - assert(roa_prefixlen <= ADDR_RAW_BUF_LEN * 8); - plen = prefixlen < roa_prefixlen ? prefixlen : roa_prefixlen; - k = 0; - while (plen >= 8 && prefix[k] == roa_prefix[k]) { - plen -= 8; - k++; - } - if (plen > 8 || ((prefix[k] ^ roa_prefix[k]) & (0xFF << (8 - plen))) != 0) - continue; - - /* - * If we get here, we have a match. - */ - printf("ASN %lu prefix ", asid); - switch (prefix_afi) { - case IANA_AFI_IPV4: - printf("%u.%u.%u.%u", prefix[0], prefix[1], prefix[2], prefix[3]); - break; - case IANA_AFI_IPV6: - for (n = 16; n > 1 && prefix[n-1] == 0x00 && prefix[n-2] == 0x00; n -= 2) - ; - for (k = 0; k < n; k += 2) - printf("%x%s", (prefix[k] << 8) | prefix[k+1], (k < 14 ? ":" : "")); - if (k < 16) - printf(":"); - break; - } - printf("/%lu ROA %s\n", prefixlen, filename); - goto done; - } - } - - done: - BIO_free(b); - CMS_ContentInfo_free(cms); - ROA_free(r); -} - -/* - * Walk a directory tree - */ -static int handle_directory(const char *name, const unsigned prefix_afi, const unsigned char *prefix, const unsigned long prefixlen) -{ - char path[FILENAME_MAX]; - struct dirent *d; - size_t len; - DIR *dir; - int ret = 0, need_slash; - - assert(name); - len = strlen(name); - assert(len > 0 && len < sizeof(path)); - need_slash = name[len - 1] != '/'; - - if ((dir = opendir(name)) == NULL) - lose_errno("Couldn't open directory", name); - - while ((d = readdir(dir)) != NULL) { - if (!strcmp(d->d_name, ".") || !strcmp(d->d_name, "..")) - continue; - if (len + strlen(d->d_name) + need_slash >= sizeof(path)) - lose("Constructed path name too long", d->d_name); - strcpy(path, name); - if (need_slash) - strcat(path, "/"); - strcat(path, d->d_name); - switch (d->d_type) { - case DT_DIR: - if (!handle_directory(path, prefix_afi, prefix, prefixlen)) - lose("Directory walk failed", path); - continue; - default: - if (has_suffix(path, ".roa")) - file_handler(path, prefix_afi, prefix, prefixlen); - continue; - } - } - - ret = 1; - - done: - if (dir) - closedir(dir); - return ret; -} - -static void usage (const char *jane, const int code) -{ - fprintf(code ? stderr : stdout, "usage: %s authtree prefix [prefix...]\n", jane); - exit(code); -} - -int main (int argc, char *argv[]) -{ - unsigned char prefix[ADDR_RAW_BUF_LEN]; - unsigned long prefixlen; - unsigned afi; - char *s = NULL, *p = NULL; - int i, len, ret = 1; - - if (argc == 2 && (!strcmp(argv[1], "-h") || !strcmp(argv[1], "--help"))) - usage(argv[0], 0); - - if (argc < 3) - usage(argv[0], 1); - - OpenSSL_add_all_algorithms(); - ERR_load_crypto_strings(); - - for (i = 2; i < argc; i++) { - - if ((s = strdup(argv[i])) == NULL) - lose("Couldn't strdup()", argv[i]); - - if ((p = strchr(s, '/')) != NULL) - *p++ = '\0'; - - len = a2i_ipadd(prefix, s); - - switch (len) { - case 4: afi = IANA_AFI_IPV4; break; - case 16: afi = IANA_AFI_IPV6; break; - default: lose("Unknown AFI", argv[i]); - } - - if (p) { - if (*p == '\0' || - (prefixlen = strtoul(p, &p, 10)) == ULONG_MAX || - *p != '\0' || - prefixlen > ADDR_RAW_BUF_LEN * 8) - lose("Bad prefix length", argv[i]); - } else { - prefixlen = len * 8; - } - - assert(prefixlen <= ADDR_RAW_BUF_LEN * 8); - - free(s); - p = s = NULL; - - if (!handle_directory(argv[1], afi, prefix, prefixlen)) - goto done; - - } - - ret = 0; - - done: - if (s) - free(s); - return ret; -} diff --git a/utils/find_roa/test_roa.sh b/utils/find_roa/test_roa.sh deleted file mode 100644 index 43d20898..00000000 --- a/utils/find_roa/test_roa.sh +++ /dev/null @@ -1,35 +0,0 @@ -#!/bin/sh - -# -# Copyright (C) 2008 American Registry for Internet Numbers ("ARIN") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ARIN DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. -# -# $Id$ - -auth_dir="${1?"usage: $0 authenticated_certificate_tree prefix [prefix...]"}" - -rm -rf hashed-pem-dir -mkdir hashed-pem-dir - -../hashdir/hashdir "$auth_dir" hashed-pem-dir >/dev/null - -./find_roa "$@" | awk ' - $1 == "ASN" && $3 == "prefix" && $5 == "ROA" { - print ""; - print "Found match:" - print; - print "Verifying certificate chain and signatures:" - roa = $6; - if (!system("../../openssl/openssl/apps/openssl cms -verify -inform DER -out /dev/null -CApath hashed-pem-dir -in " roa)) - system("../print_roa/print_roa " roa); - }' diff --git a/utils/hashdir/Makefile.in b/utils/hashdir/Makefile.in deleted file mode 100644 index c0cf448a..00000000 --- a/utils/hashdir/Makefile.in +++ /dev/null @@ -1,55 +0,0 @@ -# $Id$ - -NAME = hashdir - -BIN = ${NAME} -SRC = ${NAME}.c -OBJ = ${NAME}.o - -CFLAGS = @CFLAGS@ -LDFLAGS = @LDFLAGS@ -LIBS = @LIBS@ - -INSTALL = @INSTALL@ -m 555 - -prefix = @prefix@ -exec_prefix = @exec_prefix@ -datarootdir = @datarootdir@ -datadir = @datadir@ -localstatedir = @localstatedir@ -sharedstatedir = @sharedstatedir@ -sysconfdir = @sysconfdir@ -bindir = @bindir@ -sbindir = @sbindir@ -libexecdir = @libexecdir@ -libdir = @libdir@ - -abs_top_srcdir = @abs_top_srcdir@ -abs_top_builddir = @abs_top_builddir@ - -all: ${BIN} - -clean:: - rm -rf ${BIN} ${OBJ} ${BIN}.dSYM - -${BIN}: ${SRC} - ${CC} ${CFLAGS} -o $@ ${SRC} ${LDFLAGS} ${LIBS} - -INPUT = ${abs_top_builddir}/rcynic/rcynic-data/authenticated -OUTPUT = hashed-pem-dir - -test: ${BIN} - if test -d ${INPUT}; then rm -rf ${OUTPUT} && mkdir ${OUTPUT} && ./hashdir ${INPUT} ${OUTPUT}; else :; fi - -clean:: - rm -rf ${OUTPUT} - -install: all - if test -d ${DESTDIR}${bindir} ; then :; else ${INSTALL} -d ${DESTDIR}${bindir}; fi - ${INSTALL} ${BIN} ${DESTDIR}${bindir} - -deinstall uninstall: - rm -f ${DESTDIR}${bindir}/${BIN} - -distclean: clean - rm -f Makefile diff --git a/utils/hashdir/hashdir.c b/utils/hashdir/hashdir.c deleted file mode 100644 index 1b5b0f46..00000000 --- a/utils/hashdir/hashdir.c +++ /dev/null @@ -1,217 +0,0 @@ -/* - * Copyright (C) 2014 Dragon Research Labs ("DRL") - * Portions copyright (C) 2006--2008 American Registry for Internet Numbers ("ARIN") - * - * Permission to use, copy, modify, and distribute this software for any - * purpose with or without fee is hereby granted, provided that the above - * copyright notices and this permission notice appear in all copies. - * - * THE SOFTWARE IS PROVIDED "AS IS" AND DRL AND ARIN DISCLAIM ALL - * WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED - * WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL OR - * ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL - * DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA - * OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER - * TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR - * PERFORMANCE OF THIS SOFTWARE. - */ - -/* $Id$ */ - -/* - * Read a directory tree of DER certificates and CRLs and copy - * them into a PEM format directory with names in the hash format - * that OpenSSL's lookup routines expect. - */ - -#include -#include -#include -#include -#include -#include -#include -#include - -#include -#include -#include -#include - -#ifndef FILENAME_MAX -#define FILENAME_MAX 1024 -#endif - -static int verbose = 1; - -/* - * Error handling. - */ - -#define _lose(_msg_, _file_) \ - do { \ - fprintf(stderr, "%s: %s\n", _msg_, _file_); \ - } while (0) - -#define lose(_msg_, _file_) \ - do { \ - _lose(_msg_, _file_); \ - goto done; \ - } while (0) - -#define lose_errno(_msg_, _file_) \ - do { \ - _lose(_msg_, _file_); \ - perror(NULL); \ - goto done; \ - } while (0) - -#define lose_openssl(_msg_, _file_) \ - do { \ - _lose(_msg_, _file_); \ - ERR_print_errors_fp(stderr); \ - goto done; \ - } while (0) - -/* - * Check str for a trailing suffix. - */ -static int has_suffix(const char *str, const char *suffix) -{ - size_t len_str, len_suffix; - assert(str != NULL && suffix != NULL); - len_str = strlen(str); - len_suffix = strlen(suffix); - return len_str >= len_suffix && !strcmp(str + len_str - len_suffix, suffix); -} - -/* - * Handle one object. - */ -static void file_handler(const char *filename, const char *targetdir) -{ - char path[FILENAME_MAX]; - unsigned long hash; - const char *fmt; - X509_CRL *crl = NULL; - X509 *cer = NULL; - BIO *b = NULL; - int i, is_crl; - - if (has_suffix(filename, ".cer")) - is_crl = 0; - else if (has_suffix(filename, ".crl")) - is_crl = 1; - else - return; /* Ignore if neither certificate nor CRL */ - - if (verbose) - printf("Reading %s\n", filename); - - if (!(b = BIO_new_file(filename, "rb"))) - lose_openssl("Couldn't open input file", filename); - - if (is_crl - ? !(crl = d2i_X509_CRL_bio(b, NULL)) - : !(cer = d2i_X509_bio(b, NULL))) - lose_openssl("Couldn't read DER object", filename); - - BIO_free(b); - b = NULL; - - if (is_crl) { - hash = X509_NAME_hash(X509_CRL_get_issuer(crl)); - fmt = "%s/%08lx.r%d"; - } else { - hash = X509_subject_name_hash(cer); - fmt = "%s/%08lx.%d"; - } - - for (i = 0; i < INT_MAX; i++) - if (snprintf(path, sizeof(path), fmt, targetdir, hash, i) == sizeof(path)) - lose("Path too long", filename); - else if (access(path, F_OK)) - break; - if (i == INT_MAX) - lose("No pathname available", filename); - - if (verbose) - printf("Writing %s\n", path); - - if (!(b = BIO_new_file(path, "w"))) - lose_openssl("Couldn't open output file", path); - - if (is_crl - ? !PEM_write_bio_X509_CRL(b, crl) - : !PEM_write_bio_X509(b, cer)) - lose_openssl("Couldn't write PEM object", path); - - done: - X509_free(cer); - X509_CRL_free(crl); - BIO_free(b); -} - -/* - * Walk a directory tree - */ -static int handle_directory(const char *name, const char *targetdir) -{ - char path[FILENAME_MAX]; - struct dirent *d; - size_t len; - DIR *dir; - int ret = 0, need_slash; - - assert(name); - len = strlen(name); - assert(len > 0 && len < sizeof(path)); - need_slash = name[len - 1] != '/'; - - if ((dir = opendir(name)) == NULL) - lose_errno("Couldn't open directory", name); - - while ((d = readdir(dir)) != NULL) { - if (!strcmp(d->d_name, ".") || !strcmp(d->d_name, "..")) - continue; - if (len + strlen(d->d_name) + need_slash >= sizeof(path)) - lose("Constructed path name too long", d->d_name); - strcpy(path, name); - if (need_slash) - strcat(path, "/"); - strcat(path, d->d_name); - switch (d->d_type) { - case DT_DIR: - if (!handle_directory(path, targetdir)) - lose("Directory walk failed", path); - continue; - default: - file_handler(path, targetdir); - continue; - } - } - - ret = 1; - - done: - if (dir) - closedir(dir); - return ret; -} - -static void usage (const char *jane, const int code) -{ - fprintf(code ? stderr : stdout, "usage: %s input-directory output-directory\n", jane); - exit(code); -} - -int main(int argc, char *argv[]) -{ - if (argc == 2 && (!strcmp(argv[1], "-h") || !strcmp(argv[1], "--help"))) - usage(argv[0], 0); - - if (argc != 3) - usage(argv[0], 1); - - return !handle_directory(argv[1], argv[2]); -} diff --git a/utils/print_roa/Makefile.in b/utils/print_roa/Makefile.in deleted file mode 100644 index 5999b351..00000000 --- a/utils/print_roa/Makefile.in +++ /dev/null @@ -1,52 +0,0 @@ -# $Id$ - -NAME = print_roa - -BIN = ${NAME} -SRC = ${NAME}.c -OBJ = ${NAME}.o - -CFLAGS = @CFLAGS@ -LDFLAGS = @LDFLAGS@ -LIBS = @LIBS@ - -INSTALL = @INSTALL@ -m 555 - -prefix = @prefix@ -exec_prefix = @exec_prefix@ -datarootdir = @datarootdir@ -datadir = @datadir@ -localstatedir = @localstatedir@ -sharedstatedir = @sharedstatedir@ -sysconfdir = @sysconfdir@ -bindir = @bindir@ -sbindir = @sbindir@ -libexecdir = @libexecdir@ -libdir = @libdir@ - -abs_top_srcdir = @abs_top_srcdir@ -abs_top_builddir = @abs_top_builddir@ - -all: ${BIN} - -clean: - rm -rf ${BIN} ${OBJ} ${BIN}.dSYM - -${BIN}: ${SRC} - ${CC} ${CFLAGS} -o $@ ${SRC} ${LDFLAGS} ${LIBS} - -ROA_DIR = ${abs_top_builddir}/rpkid/tests/smoketest.dir/publication - -test: all - -date -u +'now: %Y%m%d%H%M%SZ' - if test -d ${ROA_DIR}; then find ${ROA_DIR} -type f -name '*.roa' -print -exec ./${BIN} {} \; ; else :; fi - -install: all - if test -d ${DESTDIR}${bindir} ; then :; else ${INSTALL} -d ${DESTDIR}${bindir}; fi - ${INSTALL} ${BIN} ${DESTDIR}${bindir} - -deinstall uninstall: - rm -f ${DESTDIR}${bindir}/${BIN} - -distclean: clean - rm -f Makefile diff --git a/utils/print_roa/print_roa.c b/utils/print_roa/print_roa.c deleted file mode 100644 index c88fc092..00000000 --- a/utils/print_roa/print_roa.c +++ /dev/null @@ -1,384 +0,0 @@ -/* - * Copyright (C) 2014 Dragon Research Labs ("DRL") - * Portions copyright (C) 2008 American Registry for Internet Numbers ("ARIN") - * - * Permission to use, copy, modify, and distribute this software for any - * purpose with or without fee is hereby granted, provided that the above - * copyright notices and this permission notice appear in all copies. - * - * THE SOFTWARE IS PROVIDED "AS IS" AND DRL AND ARIN DISCLAIM ALL - * WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED - * WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL OR - * ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL - * DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA - * OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER - * TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR - * PERFORMANCE OF THIS SOFTWARE. - */ - -/* $Id$ */ - -/* - * Decoder test for ROAs. - * - * NB: This does -not- check the CMS signatures, just the encoding. - */ - -#include -#include -#include -#include -#include -#include -#include - -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -#include - -/* - * How much buffer space do we need for a raw address? - */ -#define ADDR_RAW_BUF_LEN 16 - - - -/* - * Extract signing time from CMS message. - */ - -static char * -extract_signingTime(CMS_ContentInfo *cms, char *buffer, size_t buflen) -{ - STACK_OF(CMS_SignerInfo) *sis = NULL; - CMS_SignerInfo *si = NULL; - X509_ATTRIBUTE *xa = NULL; - ASN1_TYPE *so = NULL; - int i = -1; - - if (cms == NULL || - buffer == NULL || - buflen < sizeof("20010401123456Z") || - (sis = CMS_get0_SignerInfos(cms)) == NULL || - sk_CMS_SignerInfo_num(sis) != 1 || - (si = sk_CMS_SignerInfo_value(sis, 0)) < 0 || - (i = CMS_signed_get_attr_by_NID(si, NID_pkcs9_signingTime, -1)) < 0 || - (xa = CMS_signed_get_attr(si, i)) == NULL || - xa->single || - sk_ASN1_TYPE_num(xa->value.set) != 1 || - (so = sk_ASN1_TYPE_value(xa->value.set, 0)) == NULL) - return NULL; - - assert(buflen > 2); - buffer[buflen - 1] = '\0'; - - switch (so->type) { - case V_ASN1_UTCTIME: - strcpy(buffer, (so->value.utctime->data[0] >= '5') ? "19" : "20"); - return strncpy(buffer + 2, (const char *) so->value.utctime->data, buflen - 3); - case V_ASN1_GENERALIZEDTIME: - return strncpy(buffer, (const char *) so->value.generalizedtime->data, buflen - 1); - default: - return NULL; - } -} - - - -/* - * Expand the bitstring form of an address into a raw byte array. - * At the moment this is coded for simplicity, not speed. - */ -static void addr_expand(unsigned char *addr, - const ASN1_BIT_STRING *bs, - const int length) -{ - assert(bs->length >= 0 && bs->length <= length); - if (bs->length > 0) { - memcpy(addr, bs->data, bs->length); - if ((bs->flags & 7) != 0) { - unsigned char mask = 0xFF >> (8 - (bs->flags & 7)); - addr[bs->length - 1] &= ~mask; - } - } - memset(addr + bs->length, 0, length - bs->length); -} - -/* - * Extract the prefix length from a bitstring. - */ -#define addr_prefixlen(bs) ((int) ((bs)->length * 8 - ((bs)->flags & 7))) - -/* - * Read ROA (CMS object) in DER format. - * - * NB: When invoked this way, CMS_verify() does -not- verify, it just decodes the ASN.1. - * - * Well, OK, this function has evolved to doing a lot more than just - * reading the object. Refactor or at least rename, someday. - */ -static ROA *read_roa(const char *filename, - const int print_cms, - const int print_roa, - const int print_signerinfo, - const int print_brief, - const int print_signingtime) -{ - unsigned char addr[ADDR_RAW_BUF_LEN]; - CMS_ContentInfo *cms = NULL; - const ASN1_OBJECT *oid = NULL; - char *asID = NULL; - BIGNUM *bn = NULL; - ROA *r = NULL; - char buf[512]; - BIO *b = NULL; - int i, j, k, n; - - if ((b = BIO_new_file(filename, "r")) == NULL || - (cms = d2i_CMS_bio(b, NULL)) == NULL) - goto done; - BIO_free(b); - b = NULL; - - if (print_signerinfo) { - STACK_OF(CMS_SignerInfo) *signerInfos = CMS_get0_SignerInfos(cms); - STACK_OF(X509) *certs = CMS_get1_certs(cms); - STACK_OF(X509_CRL) *crls = CMS_get1_crls(cms); - printf("Certificates: %d\n", certs ? sk_X509_num(certs) : 0); - printf("CRLs: %d\n", crls ? sk_X509_CRL_num(crls) : 0); - for (i = 0; i < sk_CMS_SignerInfo_num(signerInfos); i++) { - CMS_SignerInfo *si = sk_CMS_SignerInfo_value(signerInfos, i); - ASN1_OCTET_STRING *hash = NULL; - printf("SignerId[%d]: ", i); - if (CMS_SignerInfo_get0_signer_id(si, &hash, NULL, NULL) && hash != NULL) - for (j = 0; j < hash->length; j++) - printf("%02x%s", hash->data[j], j == hash->length - 1 ? "" : ":"); - else - printf("[Could not read SID]"); - if (certs) - for (j = 0; j < sk_X509_num(certs); j++) - if (!CMS_SignerInfo_cert_cmp(si, sk_X509_value(certs, j))) - printf(" [Matches certificate %d]", j); - if ((j = CMS_signed_get_attr_by_NID(si, NID_pkcs9_signingTime, -1)) >= 0) { - X509_ATTRIBUTE *xa = CMS_signed_get_attr(si, j); - if (xa && !xa->single && sk_ASN1_TYPE_num(xa->value.set) == 1) { - ASN1_TYPE *so = sk_ASN1_TYPE_value(xa->value.set, 0); - switch (so->type) { - case V_ASN1_UTCTIME: - printf(" [signingTime(U) %s%s]", - so->value.utctime->data[0] < '5' ? "20" : "19", - so->value.utctime->data); - break; - case V_ASN1_GENERALIZEDTIME: - printf(" [signingTime(G) %s]", - so->value.generalizedtime->data); - break; - } - } - } - printf("\n"); - } - sk_X509_pop_free(certs, X509_free); - sk_X509_CRL_pop_free(crls, X509_CRL_free); - } - - if ((b = BIO_new(BIO_s_mem())) == NULL || - CMS_verify(cms, NULL, NULL, NULL, b, CMS_NOCRL | CMS_NO_SIGNER_CERT_VERIFY | CMS_NO_ATTR_VERIFY | CMS_NO_CONTENT_VERIFY) <= 0 || - (r = ASN1_item_d2i_bio(ASN1_ITEM_rptr(ROA), b, NULL)) == NULL) - goto done; - BIO_free(b); - b = NULL; - - if (print_roa) { - - bn = ASN1_INTEGER_to_BN(r->asID, NULL); - asID = BN_bn2dec(bn); - - if (print_brief) { - - if (print_signingtime) { - char buffer[sizeof("20010401123456Z")], *b; - if (!extract_signingTime(cms, buffer, sizeof(buffer))) - goto done; - printf("%s ", buffer); - } - - fputs(asID, stdout); - - } else { - - if ((oid = CMS_get0_eContentType(cms)) == NULL) - goto done; - OBJ_obj2txt(buf, sizeof(buf), oid, 0); - printf("eContentType: %s\n", buf); - - if (r->version) - printf("version: %ld\n", ASN1_INTEGER_get(r->version)); - else - printf("version: 0 [Defaulted]\n"); - printf("asID: %s\n", asID); - } - - for (i = 0; i < sk_ROAIPAddressFamily_num(r->ipAddrBlocks); i++) { - - ROAIPAddressFamily *f = sk_ROAIPAddressFamily_value(r->ipAddrBlocks, i); - - unsigned afi = (f->addressFamily->data[0] << 8) | (f->addressFamily->data[1]); - - if (!print_brief) { - printf(" addressFamily: %x", afi); - if (f->addressFamily->length == 3) - printf("[%x]", f->addressFamily->data[2]); - printf("\n"); - } - - for (j = 0; j < sk_ROAIPAddress_num(f->addresses); j++) { - ROAIPAddress *a = sk_ROAIPAddress_value(f->addresses, j); - - if (print_brief) - printf(" "); - else - printf(" IPaddress: "); - - switch (afi) { - - case IANA_AFI_IPV4: - addr_expand(addr, a->IPAddress, 4); - printf("%d.%d.%d.%d", addr[0], addr[1], addr[2], addr[3]); - break; - - case IANA_AFI_IPV6: - addr_expand(addr, a->IPAddress, 16); - for (n = 16; n > 1 && addr[n-1] == 0x00 && addr[n-2] == 0x00; n -= 2) - ; - for (k = 0; k < n; k += 2) - printf("%x%s", (addr[k] << 8) | addr[k+1], (k < 14 ? ":" : "")); - if (k < 16) - printf(":"); - if (k == 0) - printf(":"); - break; - - default: - if (!print_brief) { - for (k = 0; k < a->IPAddress->length; k++) - printf("%s%02x", (k > 0 ? ":" : ""), a->IPAddress->data[k]); - printf("[%d]", (int) (a->IPAddress->flags & 7)); - } - break; - - } - - printf("/%u", addr_prefixlen(a->IPAddress)); - - if (a->maxLength) - printf("-%ld", ASN1_INTEGER_get(a->maxLength)); - - if (!print_brief) - printf("\n"); - } - } - if (print_brief) - printf("\n"); - } - - if (print_cms) { - if (print_roa) - printf("\n"); - fflush(stdout); - if ((b = BIO_new(BIO_s_fd())) == NULL) - goto done; - BIO_set_fd(b, 1, BIO_NOCLOSE); - CMS_ContentInfo_print_ctx(b, cms, 0, NULL); - BIO_free(b); - b = NULL; - } - - done: - if (ERR_peek_error()) - ERR_print_errors_fp(stderr); - BIO_free(b); - BN_free(bn); - if (asID) - OPENSSL_free(asID); - CMS_ContentInfo_free(cms); - return r; -} - - - -const static struct option longopts[] = { - { "brief", no_argument, NULL, 'b' }, - { "print-cms", no_argument, NULL, 'c' }, - { "help", no_argument, NULL, 'h' }, - { "signingtime", no_argument, NULL, 's' }, - { NULL } -}; - -static int usage (const char *jane, const int code) -{ - FILE *out = code ? stderr : stdout; - int i; - - fprintf(out, "usage: %s [options] ROA [ROA...]\n", jane); - fprintf(out, "options:\n"); - for (i = 0; longopts[i].name != NULL; i++) - fprintf(out, " -%c --%s\n", longopts[i].val, longopts[i].name); - - return code; -} - -/* - * Main program. - */ -int main (int argc, char *argv[]) -{ - int result = 0, print_brief = 0, print_signingtime = 0, print_cms = 0, c; - const char *jane = argv[0]; - ROA *r; - - OpenSSL_add_all_algorithms(); - ERR_load_crypto_strings(); - - while ((c = getopt_long(argc, argv, "bchs", longopts, NULL)) != -1) { - switch (c) { - case 'b': - print_brief = 1; - break; - case 'c': - print_cms = 1; - break; - case 's': - print_signingtime = 1; - break; - case 'h': - return usage(jane, 0); - default: - return usage(jane, 1); - } - } - - argc -= optind; - argv += optind; - - if (argc == 0) - return usage(jane, 1); - - while (argc-- > 0) { - r = read_roa(*argv++, print_cms, 1, !print_brief, print_brief, print_signingtime); - result |= r == NULL; - ROA_free(r); - } - return result; -} diff --git a/utils/print_roa/strip_roa.sh b/utils/print_roa/strip_roa.sh deleted file mode 100755 index e2dacf86..00000000 --- a/utils/print_roa/strip_roa.sh +++ /dev/null @@ -1,39 +0,0 @@ -#!/bin/sh - -# $Id$ -# -# Copyright (C) 2010 Internet Systems Consortium ("ISC") -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. -# -# Strip boring parts of print_roa's output to make a (somewhat) terser -# description, one line per ROA. This is intended for use in -# comparing sets of ROAs using text comparision tools like "diff" or -# "comm". One could definitely do something prettier, but this -# suffices for basic tests. -# -# Use this as in a shell pipeline to postprocess print_roa's output. - -awk ' - /Certificate/ { - roa[++n] = ""; - } - /asID|addressFamily|IPaddress/ { - roa[n] = roa[n] " " $0; - } - END { - for (i in roa) - print roa[i]; - } -' | -tr -s \\011 \\040 | -sort -u diff --git a/utils/print_rpki_manifest/Makefile.in b/utils/print_rpki_manifest/Makefile.in deleted file mode 100644 index 22f1b16b..00000000 --- a/utils/print_rpki_manifest/Makefile.in +++ /dev/null @@ -1,52 +0,0 @@ -# $Id$ - -NAME = print_rpki_manifest - -BIN = ${NAME} -SRC = ${NAME}.c -OBJ = ${NAME}.o - -CFLAGS = @CFLAGS@ -LDFLAGS = @LDFLAGS@ -LIBS = @LIBS@ - -INSTALL = @INSTALL@ -m 555 - -prefix = @prefix@ -exec_prefix = @exec_prefix@ -datarootdir = @datarootdir@ -datadir = @datadir@ -localstatedir = @localstatedir@ -sharedstatedir = @sharedstatedir@ -sysconfdir = @sysconfdir@ -bindir = @bindir@ -sbindir = @sbindir@ -libexecdir = @libexecdir@ -libdir = @libdir@ - -abs_top_srcdir = @abs_top_srcdir@ -abs_top_builddir = @abs_top_builddir@ - -all: ${BIN} - -clean: - rm -rf ${BIN} ${OBJ} ${BIN}.dSYM - -${BIN}: ${SRC} - ${CC} ${CFLAGS} -o $@ ${SRC} ${LDFLAGS} ${LIBS} - -MANIFEST_DIR = ${abs_top_builddir}/rpkid/tests/smoketest.dir/publication - -test: all - -date -u +'now: %Y%m%d%H%M%SZ' - if test -d ${MANIFEST_DIR}; then find ${MANIFEST_DIR} -type f -name '*.mnf' -print -exec ./${BIN} {} \; ; else :; fi - -install: all - if test -d ${DESTDIR}${bindir} ; then :; else ${INSTALL} -d ${DESTDIR}${bindir}; fi - ${INSTALL} ${BIN} ${DESTDIR}${bindir} - -deinstall uninstall: - rm -f ${DESTDIR}${bindir}/${BIN} - -distclean: clean - rm -f Makefile diff --git a/utils/print_rpki_manifest/print_rpki_manifest.c b/utils/print_rpki_manifest/print_rpki_manifest.c deleted file mode 100644 index f55f9916..00000000 --- a/utils/print_rpki_manifest/print_rpki_manifest.c +++ /dev/null @@ -1,235 +0,0 @@ -/* - * Copyright (C) 2014 Dragon Research Labs ("DRL") - * Portions copyright (C) 2008 American Registry for Internet Numbers ("ARIN") - * - * Permission to use, copy, modify, and distribute this software for any - * purpose with or without fee is hereby granted, provided that the above - * copyright notices and this permission notice appear in all copies. - * - * THE SOFTWARE IS PROVIDED "AS IS" AND DRL AND ARIN DISCLAIM ALL - * WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED - * WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL OR - * ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL - * DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA - * OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER - * TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR - * PERFORMANCE OF THIS SOFTWARE. - */ - -/* $Id$ */ - -/* - * Decoder test for RPKI manifests. - * - * NB: This does -not- check the CMS signatures, just the encoding. - */ - -#include -#include -#include -#include -#include -#include - -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -#include - -/* - * Read manifest (CMS object) in DER format. - * - * NB: When invoked this way, CMS_verify() does -not- verify, it just decodes the ASN.1. - * - * OK, this does more than just reading the CMS. Refactor or rename, someday. - */ - -static const Manifest *read_manifest(const char *filename, - const int print_cms, - const int print_manifest, - const int print_signerinfo) -{ - CMS_ContentInfo *cms = NULL; - const ASN1_OBJECT *oid = NULL; - const Manifest *m = NULL; - char *mftnum = NULL; - BIGNUM *bn = NULL; - char buf[512]; - BIO *b = NULL; - int i, j; - - if ((b = BIO_new_file(filename, "r")) == NULL || - (cms = d2i_CMS_bio(b, NULL)) == NULL) - goto done; - BIO_free(b); - b = NULL; - - if (print_signerinfo) { - STACK_OF(CMS_SignerInfo) *signerInfos = CMS_get0_SignerInfos(cms); - STACK_OF(X509) *certs = CMS_get1_certs(cms); - STACK_OF(X509_CRL) *crls = CMS_get1_crls(cms); - printf("Certificates: %d\n", certs ? sk_X509_num(certs) : 0); - printf("CRLs: %d\n", crls ? sk_X509_CRL_num(crls) : 0); - for (i = 0; i < sk_CMS_SignerInfo_num(signerInfos); i++) { - CMS_SignerInfo *si = sk_CMS_SignerInfo_value(signerInfos, i); - ASN1_OCTET_STRING *hash = NULL; - printf("SignerId[%d]: ", i); - if (CMS_SignerInfo_get0_signer_id(si, &hash, NULL, NULL) && hash != NULL) - for (j = 0; j < hash->length; j++) - printf("%02x%s", hash->data[j], j == hash->length - 1 ? "" : ":"); - else - printf("[Could not read SID]"); - if (certs) - for (j = 0; j < sk_X509_num(certs); j++) - if (!CMS_SignerInfo_cert_cmp(si, sk_X509_value(certs, j))) - printf(" [Matches certificate %d]", j); - if ((j = CMS_signed_get_attr_by_NID(si, NID_pkcs9_signingTime, -1)) >= 0) { - X509_ATTRIBUTE *xa = CMS_signed_get_attr(si, j); - if (xa && !xa->single && sk_ASN1_TYPE_num(xa->value.set) == 1) { - ASN1_TYPE *so = sk_ASN1_TYPE_value(xa->value.set, 0); - switch (so->type) { - case V_ASN1_UTCTIME: - printf(" [signingTime(U) %s%s]", - so->value.utctime->data[0] < '5' ? "20" : "19", - so->value.utctime->data); - break; - case V_ASN1_GENERALIZEDTIME: - printf(" [signingTime(G) %s]", - so->value.generalizedtime->data); - break; - } - } - } - printf("\n"); - } - sk_X509_pop_free(certs, X509_free); - sk_X509_CRL_pop_free(crls, X509_CRL_free); - } - - if ((b = BIO_new(BIO_s_mem())) == NULL || - CMS_verify(cms, NULL, NULL, NULL, b, CMS_NOCRL | CMS_NO_SIGNER_CERT_VERIFY | CMS_NO_ATTR_VERIFY | CMS_NO_CONTENT_VERIFY) <= 0 || - (m = ASN1_item_d2i_bio(ASN1_ITEM_rptr(Manifest), b, NULL)) == NULL) - goto done; - BIO_free(b); - b = NULL; - - if (print_manifest) { - - if ((oid = CMS_get0_eContentType(cms)) == NULL) - goto done; - OBJ_obj2txt(buf, sizeof(buf), oid, 0); - printf("eContentType: %s\n", buf); - - if (m->version) - printf("version: %ld\n", ASN1_INTEGER_get(m->version)); - else - printf("version: 0 [Defaulted]\n"); - - bn = ASN1_INTEGER_to_BN(m->manifestNumber, NULL); - mftnum = BN_bn2dec(bn); - printf("manifestNumber: %s\n", mftnum); - - printf("thisUpdate: %s\n", m->thisUpdate->data); - printf("nextUpdate: %s\n", m->nextUpdate->data); - OBJ_obj2txt(buf, sizeof(buf), m->fileHashAlg, 0); - printf("fileHashAlg: %s\n", buf); - - for (i = 0; i < sk_FileAndHash_num(m->fileList); i++) { - FileAndHash *fah = sk_FileAndHash_value(m->fileList, i); - printf("fileList[%3d]: ", i); - for (j = 0; j < fah->hash->length; j++) - printf("%02x%s", fah->hash->data[j], j == fah->hash->length - 1 ? " " : ":"); - printf(" %s\n", fah->file->data); - } - - if (X509_cmp_current_time(m->nextUpdate) < 0) - printf("MANIFEST IS STALE\n"); - } - - if (print_cms) { - if (print_manifest) - printf("\n"); - fflush(stdout); - if ((b = BIO_new(BIO_s_fd())) == NULL) - goto done; - BIO_set_fd(b, 1, BIO_NOCLOSE); - CMS_ContentInfo_print_ctx(b, cms, 0, NULL); - BIO_free(b); - b = NULL; - } - - done: - if (ERR_peek_error()) - ERR_print_errors_fp(stderr); - BIO_free(b); - BN_free(bn); - if (mftnum) - OPENSSL_free(mftnum); - CMS_ContentInfo_free(cms); - return m; -} - - - -const static struct option longopts[] = { - { "print-cms", no_argument, NULL, 'c' }, - { "help", no_argument, NULL, 'h' }, - { NULL } -}; - -static int usage (const char *jane, const int code) -{ - FILE *out = code ? stderr : stdout; - int i; - - fprintf(out, "usage: %s [options] manifest [manifest...]\n", jane); - fprintf(out, "options:\n"); - for (i = 0; longopts[i].name != NULL; i++) - fprintf(out, " -%c --%s\n", longopts[i].val, longopts[i].name); - - return code; -} - -/* - * Main program. - */ -int main (int argc, char *argv[]) -{ - int result = 0, print_cms = 0, c; - const char *jane = argv[0]; - - OpenSSL_add_all_algorithms(); - ERR_load_crypto_strings(); - - while ((c = getopt_long(argc, argv, "ch", longopts, NULL)) != -1) { - switch (c) { - case 'c': - print_cms = 1; - break; - case 'h': - return usage(jane, 0); - default: - return usage(jane, 1); - } - } - - argc -= optind; - argv += optind; - - if (argc == 0) - return usage(jane, 1); - - while (argc-- > 0) - result |= read_manifest(*argv++, print_cms, 1, 1) == NULL; - return result; -} diff --git a/utils/scan_roas/Makefile.in b/utils/scan_roas/Makefile.in deleted file mode 100644 index 7707969c..00000000 --- a/utils/scan_roas/Makefile.in +++ /dev/null @@ -1,52 +0,0 @@ -# $Id$ - -NAME = scan_roas - -BIN = ${NAME} -SRC = ${NAME}.c -OBJ = ${NAME}.o - -CFLAGS = @CFLAGS@ -LDFLAGS = @LDFLAGS@ -LIBS = @LIBS@ - -INSTALL = @INSTALL@ -m 555 - -prefix = @prefix@ -exec_prefix = @exec_prefix@ -datarootdir = @datarootdir@ -datadir = @datadir@ -localstatedir = @localstatedir@ -sharedstatedir = @sharedstatedir@ -sysconfdir = @sysconfdir@ -bindir = @bindir@ -sbindir = @sbindir@ -libexecdir = @libexecdir@ -libdir = @libdir@ - -abs_top_srcdir = @abs_top_srcdir@ -abs_top_builddir = @abs_top_builddir@ - -all: ${BIN} - -clean: - rm -rf ${BIN} ${OBJ} ${BIN}.dSYM - -${BIN}: ${SRC} - ${CC} ${CFLAGS} -o $@ ${SRC} ${LDFLAGS} ${LIBS} - -ROA_DIR = ${abs_top_builddir}/rpkid/tests/smoketest.dir/publication - -test: all - -date -u +'now: %Y%m%d%H%M%SZ' - if test -d ${ROA_DIR}; then ./${BIN} ${ROA_DIR} ; else :; fi - -install: all - if test -d ${DESTDIR}${bindir} ; then :; else ${INSTALL} -d ${DESTDIR}${bindir}; fi - ${INSTALL} ${BIN} ${DESTDIR}${bindir} - -deinstall uninstall: - rm -f ${DESTDIR}${bindir}/${BIN} - -distclean: clean - rm -f Makefile diff --git a/utils/scan_roas/scan_roas.c b/utils/scan_roas/scan_roas.c deleted file mode 100644 index f32e3827..00000000 --- a/utils/scan_roas/scan_roas.c +++ /dev/null @@ -1,305 +0,0 @@ -/* - * Copyright (C) 2014 Dragon Research Labs ("DRL") - * Portions copyright (C) 2011 Internet Systems Consortium ("ISC") - * - * Permission to use, copy, modify, and distribute this software for any - * purpose with or without fee is hereby granted, provided that the above - * copyright notices and this permission notice appear in all copies. - * - * THE SOFTWARE IS PROVIDED "AS IS" AND DRL AND ISC DISCLAIM ALL - * WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED - * WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL OR - * ISC BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL - * DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA - * OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER - * TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR - * PERFORMANCE OF THIS SOFTWARE. - */ - -/* $Id$ */ - -/* - * Decoder test for ROAs. - * - * NB: This does -not- check the CMS signatures, just the encoding. - */ - -#include -#include -#include -#include -#include -#include -#include - -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -#include - -/* - * How much buffer space do we need for a raw address? - */ -#define ADDR_RAW_BUF_LEN 16 - -/* - * How long can a filesystem path be? - */ -#define PATH_MAX 2048 - - - -/* - * Extract signing time from CMS message. - */ - -static char * -extract_signingTime(CMS_ContentInfo *cms, char *buffer, size_t buflen) -{ - STACK_OF(CMS_SignerInfo) *sis = NULL; - CMS_SignerInfo *si = NULL; - X509_ATTRIBUTE *xa = NULL; - ASN1_TYPE *so = NULL; - int i = -1; - - if (cms == NULL || - buffer == NULL || - buflen < sizeof("20010401123456Z") || - (sis = CMS_get0_SignerInfos(cms)) == NULL || - sk_CMS_SignerInfo_num(sis) != 1 || - (si = sk_CMS_SignerInfo_value(sis, 0)) < 0 || - (i = CMS_signed_get_attr_by_NID(si, NID_pkcs9_signingTime, -1)) < 0 || - (xa = CMS_signed_get_attr(si, i)) == NULL || - xa->single || - sk_ASN1_TYPE_num(xa->value.set) != 1 || - (so = sk_ASN1_TYPE_value(xa->value.set, 0)) == NULL) - return NULL; - - assert(buflen > 2); - buffer[buflen - 1] = '\0'; - - switch (so->type) { - case V_ASN1_UTCTIME: - strcpy(buffer, (so->value.utctime->data[0] >= '5') ? "19" : "20"); - return strncpy(buffer + 2, (const char *) so->value.utctime->data, buflen - 3); - case V_ASN1_GENERALIZEDTIME: - return strncpy(buffer, (const char *) so->value.generalizedtime->data, buflen - 1); - default: - return NULL; - } -} - - - -/* - * Expand the bitstring form of an address into a raw byte array. - * At the moment this is coded for simplicity, not speed. - */ -static void addr_expand(unsigned char *addr, - const ASN1_BIT_STRING *bs, - const int length) -{ - assert(bs->length >= 0 && bs->length <= length); - if (bs->length > 0) { - memcpy(addr, bs->data, bs->length); - if ((bs->flags & 7) != 0) { - unsigned char mask = 0xFF >> (8 - (bs->flags & 7)); - addr[bs->length - 1] &= ~mask; - } - } - memset(addr + bs->length, 0, length - bs->length); -} - -/* - * Extract the prefix length from a bitstring. - */ -#define addr_prefixlen(bs) ((int) ((bs)->length * 8 - ((bs)->flags & 7))) - -/* - * Read ROA (CMS object) in DER format. - * - * NB: When invoked this way, CMS_verify() does -not- verify, it just decodes the ASN.1. - */ -static int read_roa(const char *filename) -{ - char buffer[sizeof("20010401123456Z")], *b; - unsigned char addr[ADDR_RAW_BUF_LEN]; - CMS_ContentInfo *cms = NULL; - const ASN1_OBJECT *oid = NULL; - ROA *r = NULL; - char buf[512]; - BIO *bio; - int i, j, k, n, ok; - - if ((bio = BIO_new_file(filename, "r")) == NULL || - (cms = d2i_CMS_bio(bio, NULL)) == NULL) - goto done; - BIO_free(bio); - - if ((bio = BIO_new(BIO_s_mem())) == NULL || - CMS_verify(cms, NULL, NULL, NULL, bio, CMS_NOCRL | CMS_NO_SIGNER_CERT_VERIFY | CMS_NO_ATTR_VERIFY | CMS_NO_CONTENT_VERIFY) <= 0 || - (r = ASN1_item_d2i_bio(ASN1_ITEM_rptr(ROA), bio, NULL)) == NULL) - goto done; - - if (!extract_signingTime(cms, buffer, sizeof(buffer))) - goto done; - printf("%s ", buffer); - - printf("%ld", ASN1_INTEGER_get(r->asID)); - - for (i = 0; i < sk_ROAIPAddressFamily_num(r->ipAddrBlocks); i++) { - - ROAIPAddressFamily *f = sk_ROAIPAddressFamily_value(r->ipAddrBlocks, i); - - unsigned afi = (f->addressFamily->data[0] << 8) | (f->addressFamily->data[1]); - - for (j = 0; j < sk_ROAIPAddress_num(f->addresses); j++) { - ROAIPAddress *a = sk_ROAIPAddress_value(f->addresses, j); - - printf(" "); - - switch (afi) { - - case IANA_AFI_IPV4: - addr_expand(addr, a->IPAddress, 4); - printf("%d.%d.%d.%d", addr[0], addr[1], addr[2], addr[3]); - break; - - case IANA_AFI_IPV6: - addr_expand(addr, a->IPAddress, 16); - for (n = 16; n > 1 && addr[n-1] == 0x00 && addr[n-2] == 0x00; n -= 2) - ; - for (k = 0; k < n; k += 2) - printf("%x%s", (addr[k] << 8) | addr[k+1], (k < 14 ? ":" : "")); - if (k < 16) - printf(":"); - if (k == 0) - printf(":"); - break; - - default: - break; - } - - printf("/%u", addr_prefixlen(a->IPAddress)); - - if (a->maxLength) - printf("-%ld", ASN1_INTEGER_get(a->maxLength)); - } - } - printf("\n"); - - done: - ok = r != NULL; - - if (ERR_peek_error()) - ERR_print_errors_fp(stderr); - BIO_free(bio); - CMS_ContentInfo_free(cms); - ROA_free(r); - - return ok; -} - - - -/** - * Check str for a trailing suffix. - */ -static int endswith(const char *str, const char *suffix) -{ - size_t len_str, len_suffix; - assert(str != NULL && suffix != NULL); - len_str = strlen(str); - len_suffix = strlen(suffix); - return len_str >= len_suffix && !strcmp(str + len_str - len_suffix, suffix); -} - - - -/** - * Walk directory tree, looking for ROAs. - */ -static int walk(const char *name) -{ - int need_slash, ok = 1; - char path[PATH_MAX]; - struct dirent *d; - size_t len; - DIR *dir; - - assert(name); - len = strlen(name); - - assert(len > 0 && len < sizeof(path)); - need_slash = name[len - 1] != '/'; - - if ((dir = opendir(name)) == NULL) - return 0; - - while ((d = readdir(dir)) != NULL) { - if (!strcmp(d->d_name, ".") || - !strcmp(d->d_name, "..")) - continue; - if (len + strlen(d->d_name) + need_slash >= sizeof(path)) { - ok = 0; - goto done; - } - strcpy(path, name); - if (need_slash) - strcat(path, "/"); - strcat(path, d->d_name); - switch (d->d_type) { - case DT_DIR: - ok &= walk(path); - continue; - default: - if (endswith(path, ".roa")) - ok &= read_roa(path); - continue; - } - } - - done: - closedir(dir); - return ok; -} - - - -static void usage (const char *jane, const int code) -{ - fprintf(code ? stderr : stdout, "usage: %s authtree [authtree...]\n", jane); - exit(code); -} - -/* - * Main program. - */ -int main (int argc, char *argv[]) -{ - int i, ok = 1; - - if (argc == 2 && (!strcmp(argv[1], "-h") || !strcmp(argv[1], "--help"))) - usage(argv[0], 0); - - if (argc < 2) - usage(argv[0], 1); - - OpenSSL_add_all_algorithms(); - ERR_load_crypto_strings(); - - for (i = 1; i < argc; i++) - ok &= walk(argv[i]); - - return !ok; -} diff --git a/utils/scan_routercerts/Makefile.in b/utils/scan_routercerts/Makefile.in deleted file mode 100644 index 715d1325..00000000 --- a/utils/scan_routercerts/Makefile.in +++ /dev/null @@ -1,41 +0,0 @@ -# $Id$ - -NAME = scan_routercerts - -BIN = ${NAME} - -INSTALL = @INSTALL@ -m 555 - -prefix = @prefix@ -exec_prefix = @exec_prefix@ -datarootdir = @datarootdir@ -datadir = @datadir@ -localstatedir = @localstatedir@ -sharedstatedir = @sharedstatedir@ -sysconfdir = @sysconfdir@ -bindir = @bindir@ -sbindir = @sbindir@ -libexecdir = @libexecdir@ -libdir = @libdir@ - -abs_top_srcdir = @abs_top_srcdir@ -abs_top_builddir = @abs_top_builddir@ - -all clean: - @true - -ROUTERCERT_DIR = ${abs_top_builddir}/rpkid/tests/smoketest.dir/publication - -test: all - -date -u +'now: %Y%m%d%H%M%SZ' - if test -d ${ROUTERCERT_DIR}; then ./${BIN} ; else :; fi - -install: all - if test -d ${DESTDIR}${bindir} ; then :; else ${INSTALL} -d ${DESTDIR}${bindir}; fi - ${INSTALL} ${BIN} ${DESTDIR}${bindir} - -deinstall uninstall: - rm -f ${DESTDIR}${bindir}/${BIN} - -distclean: clean - rm -f Makefile diff --git a/utils/scan_routercerts/scan_routercerts b/utils/scan_routercerts/scan_routercerts deleted file mode 100755 index 342fa272..00000000 --- a/utils/scan_routercerts/scan_routercerts +++ /dev/null @@ -1,69 +0,0 @@ -#!/usr/bin/env python -# $Id$ -# -# Copyright (C) 2014 Dragon Research Labs ("DRL") -# -# Permission to use, copy, modify, and/or distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND DRL DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL DRL BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -""" -Scan rcynic validated output looking for router certificates, print -out stuff that the rpki-rtr code cares about. -""" - -# This program represents a weird temporary state, mostly to avoid -# diving into a recursive yak shaving exercise. -# -# Under the old scheme, anything used by the RP code should be either -# C code or pure Python code using just the standard libraries. This -# has gotten silly, but we haven't yet refactored the current packaged -# builds from two packages into three (adding a -libs package). -# -# So, by rights, this program should be a C monstrosity written using -# the OpenSSL C API. I started coding it that way, but it was just -# too painful for something we're probably going to rewrite as a few -# lines of Python once we refactor, but by the same token I didn't -# want to delay router certificate support until the refactoring. -# -# So this program anticipates the new scheme of things, but makes one -# concession to current reality: if it has a problem importing the -# RPKI-specific libraries, it just quietly exits as if everything were -# fine and there simply are no router certificates to report. This -# isn't the right answer in the long run, but will suffice to avoid -# further bald yaks. - -import os -import sys -import base64 - -try: - import rpki.POW - import rpki.oids -except ImportError: - sys.exit(0) - -rcynic_dir = sys.argv[1] - -for root, dirs, files in os.walk(rcynic_dir): - for fn in files: - if not fn.endswith(".cer"): - continue - x = rpki.POW.X509.derReadFile(os.path.join(root, fn)) - - if rpki.oids.id_kp_bgpsec_router not in (x.getEKU() or ()): - continue - - sys.stdout.write(base64.urlsafe_b64encode(x.getSKI()).rstrip("=")) - for min_asn, max_asn in x.getRFC3779()[0]: - for asn in xrange(min_asn, max_asn + 1): - sys.stdout.write(" %s" % asn) - sys.stdout.write(" %s\n" % base64.b64encode(x.getPublicKey().derWritePublic())) diff --git a/utils/uri/Makefile.in b/utils/uri/Makefile.in deleted file mode 100644 index fc545060..00000000 --- a/utils/uri/Makefile.in +++ /dev/null @@ -1,31 +0,0 @@ -# $Id$ - -NAME = uri - -BIN = ${NAME} -SRC = ${NAME}.c -OBJ = ${NAME}.o - -CFLAGS = @CFLAGS@ -LDFLAGS = @LDFLAGS@ -LIBS = @LIBS@ - -abs_top_srcdir = @abs_top_srcdir@ -abs_top_builddir = @abs_top_builddir@ - -all: ${BIN} - -clean: - rm -rf ${BIN} ${OBJ} ${BIN}.dSYM - -${BIN}: ${SRC} - ${CC} ${CFLAGS} -o $@ ${SRC} ${LDFLAGS} ${LIBS} - -test: - @true - -install deinstall uninstall: - @true - -distclean: clean - rm -f Makefile diff --git a/utils/uri/dot.awk b/utils/uri/dot.awk deleted file mode 100644 index ca1b490b..00000000 --- a/utils/uri/dot.awk +++ /dev/null @@ -1,34 +0,0 @@ -#!/usr/bin/awk -f -# $Id$ -# -# This doesn't really work right yet, and even if it did, the graph -# it would generate would be hopelessly large. - -BEGIN { - cmd = "find /var/rcynic/data/unauthenticated -type f -name '*.cer' -print0 | xargs -0 ./uri -d"; - while ((cmd | getline) == 1) { - if ($1 == "File") { - sub("/var/rcynic/data/unauthenticated/", "rsync://"); - u = $2; - uri[u] = ++n; - continue; - } - if ($1 == "SIA:") { - sia[u] = $2; - continue; - } - if ($1 == "AIA:") { - aia[u] = $2; - continue; - } - } - print "digraph rpki {"; - for (u in uri) { - printf "n%06d\t[ label=\"%s\" ];\n", uri[u], u; - if (sia[u]) - printf "n%06d -> n%06d\t [ color=blue ];\n", uri[u], uri[sia[u]]; - if (aia[u]) - printf "n%06d -> n%06d\t [ color=green ];\n", uri[u], uri[aia[u]]; - } - print "}"; -} diff --git a/utils/uri/table.awk b/utils/uri/table.awk deleted file mode 100644 index d8627f67..00000000 --- a/utils/uri/table.awk +++ /dev/null @@ -1,35 +0,0 @@ -#!/usr/bin/awk -f -# $Id$ -# -# Reformat uri.c's output in a way that's more useful -# for some kinds of scripting. Perhaps this functionality should be -# part of uri.c itself, but for now this script will do. - -BEGIN { - cmd = "find /var/rcynic/data/unauthenticated -type f -name '*.cer' -print0 | xargs -0 ./uri -d"; - while ((cmd | getline) == 1) { - if ($1 == "File") { - if (f) - print f, u, a, s, c; - a = s = c = "-"; - f = $2; - sub("/var/rcynic/data/unauthenticated/","rsync://"); - u = $2; - continue; - } - if ($1 == "SIA:") { - s = $2; - continue; - } - if ($1 == "AIA:") { - a = $2; - continue; - } - if ($1 == "CRL:") { - c = $2; - continue; - } - } - if (f != "-") - print f, u, a, s, c; -} diff --git a/utils/uri/uri.c b/utils/uri/uri.c deleted file mode 100644 index 6353e8e5..00000000 --- a/utils/uri/uri.c +++ /dev/null @@ -1,248 +0,0 @@ -/* - * Copyright (C) 2014 Dragon Research Labs ("DRL") - * Portions copyright (C) 2006--2008 American Registry for Internet Numbers ("ARIN") - * - * Permission to use, copy, modify, and distribute this software for any - * purpose with or without fee is hereby granted, provided that the above - * copyright notices and this permission notice appear in all copies. - * - * THE SOFTWARE IS PROVIDED "AS IS" AND DRL AND ARIN DISCLAIM ALL - * WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED - * WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL DRL OR - * ARIN BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL - * DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA - * OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER - * TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR - * PERFORMANCE OF THIS SOFTWARE. - */ - -/* $Id$ */ - -/* - * Extract and test URIs from certificates. This is a unit test of - * rcynic code, a utility, or both, depending on how it turns out. - * - * NB: OpenSSL insures that IA5 strings are null-terminated, so it's safe - * for us to ignore the length count. - */ - -#include -#include -#include -#include - -#include -#include -#include -#include -#include -#include -#include - -static const unsigned char id_ad_caIssuers[] = {0x2b, 0x6, 0x1, 0x5, 0x5, 0x7, 0x30, 0x2}; /* 1.3.6.1.5.5.7.48.2 */ -static const unsigned char id_ad_caRepository[] = {0x2b, 0x6, 0x1, 0x5, 0x5, 0x7, 0x30, 0x5}; /* 1.3.6.1.5.5.7.48.5 */ -static const unsigned char id_ad_signedObjectRepository[] = {0x2b, 0x6, 0x1, 0x5, 0x5, 0x7, 0x30, 0x9}; /* 1.3.6.1.5.5.7.48.9 */ -static const unsigned char id_ad_rpkiManifest[] = {0x2b, 0x6, 0x1, 0x5, 0x5, 0x7, 0x30, 0xa}; /* 1.3.6.1.5.5.7.48.10 */ -static const unsigned char id_ad_signedObject[] = {0x2b, 0x6, 0x1, 0x5, 0x5, 0x7, 0x30, 0xb}; /* 1.3.6.1.5.5.7.48.11 */ - -static X509 *read_cert(const char *filename, int format, int verbose) -{ - BIO *b = BIO_new_file(filename, "r"); - STACK_OF(X509) *certs = NULL; - CMS_ContentInfo *cms = NULL; - X509 *x = NULL; - - if (b == NULL) - return NULL; - - switch (format) { - case 'p': - x = PEM_read_bio_X509(b, NULL, NULL, NULL); - break; - case 'd': - x = d2i_X509_bio(b, NULL); - break; - } - - if (x == NULL) { - BIO_reset(b); - switch (format) { - case 'p': - cms = PEM_read_bio_CMS(b, NULL, NULL, NULL); - break; - case 'd': - cms = d2i_CMS_bio(b, NULL); - break; - } - if (cms != NULL && (certs = CMS_get1_certs(cms)) != NULL) - x = sk_X509_shift(certs); - } - - if (x != NULL && verbose) { - X509_print_fp(stdout, x); - printf("\n"); - } - - sk_X509_pop_free(certs, X509_free); - CMS_ContentInfo_free(cms); - BIO_free(b); - return x; -} - -enum decode_errors { - decode_ok, - decode_no_extension, - decode_not_exactly_one_DistributionPointName, - decode_has_reasons, - decode_has_CRLissuer, - decode_no_distributionPoint, - decode_not_GeneralName, - decode_not_URI, -}; - -static enum decode_errors decode_crldp(X509 *x, int verbose, int spaces) -{ - enum decode_errors err = decode_ok; - STACK_OF(DIST_POINT) *ds = X509_get_ext_d2i(x, NID_crl_distribution_points, NULL, NULL); - DIST_POINT *d; - GENERAL_NAME *n; - int i; - - if (!ds) { - err = decode_no_extension; - } else if (sk_DIST_POINT_num(ds) != 1) { - err = decode_not_exactly_one_DistributionPointName; - } else if ((d = sk_DIST_POINT_value(ds, 0))->reasons) { - err = decode_has_reasons; - } else if (d->CRLissuer) { - err = decode_has_CRLissuer; - } else if (!d->distpoint) { - err = decode_no_distributionPoint; - } else if (d->distpoint->type != 0) { - err = decode_not_GeneralName; - } else { - for (i = 0; i < sk_GENERAL_NAME_num(d->distpoint->name.fullname); i++) { - n = sk_GENERAL_NAME_value(d->distpoint->name.fullname, i); - if (n->type != GEN_URI) { - err = decode_not_GeneralName; - break; - } - printf(" CRLDP: %s%s", n->d.uniformResourceIdentifier->data, spaces ? "" : "\n"); - } - } - - sk_DIST_POINT_pop_free(ds, DIST_POINT_free); - return err; -} - -#define decode_xia(_x_, _v_, _s_, _tag_, _nid_, _oid_) \ - _decode_xia(_x_, _v_, _s_, _tag_, _nid_, _oid_, sizeof(_oid_)) - -static enum decode_errors _decode_xia(X509 *x, - int verbose, - int spaces, - char *tag, - int nid, - const unsigned char *oid, - int oidlen) -{ - enum decode_errors err = decode_ok; - AUTHORITY_INFO_ACCESS *as = X509_get_ext_d2i(x, nid, NULL, NULL); - ACCESS_DESCRIPTION *a; - int i; - - if (!as) { - err = decode_no_extension; - } else { - for (i = 0; i < sk_ACCESS_DESCRIPTION_num(as); i++) { - a = sk_ACCESS_DESCRIPTION_value(as, i); - if (a->location->type != GEN_URI) { - err = decode_not_URI; - break; - } - if (a->method->length == oidlen && !memcmp(a->method->data, oid, oidlen)) - printf(" %s: %s%s", tag, a->location->d.uniformResourceIdentifier->data, spaces ? "" : "\n"); - } - } - - sk_ACCESS_DESCRIPTION_pop_free(as, ACCESS_DESCRIPTION_free); - return err; -} - - - -const static struct option longopts[] = { - { "der", no_argument, NULL, 'd' }, - { "help", no_argument, NULL, 'h' }, - { "pem", no_argument, NULL, 'p' }, - { "spaces", no_argument, NULL, 's' }, - { "verbose", no_argument, NULL, 'v' }, - { NULL } -}; - -static int usage (const char *jane, const int code) -{ - FILE *out = code ? stderr : stdout; - int i; - - fprintf(out, "usage: %s [-p | -d] cert [cert...]\n", jane); - fprintf(out, "options:\n"); - for (i = 0; longopts[i].name != NULL; i++) - fprintf(out, " -%c --%s\n", longopts[i].val, longopts[i].name); - - return code; -} - -int main(int argc, char *argv[]) -{ - int c, format = 'd', spaces = 0, verbose = 0; - const char *jane = argv[0]; - X509 *x; - - OpenSSL_add_all_algorithms(); - ERR_load_crypto_strings(); - - while ((c = getopt(argc, argv, "pdsv")) > 0) { - switch (c) { - case 'v': - verbose = 1; - break; - case 'p': - case 'd': - format = c; - break; - case 's': - spaces = 1; - break; - case 'h': - return usage(jane, 0); - default: - return usage(jane, 1); - } - } - - argc -= optind; - argv += optind; - - if (argc == 0) - return usage(jane, 1); - - while (argc-- > 0) { - printf(spaces ? "%s" : "File: %s\n", *argv); - if ((x = read_cert(*argv++, format, verbose)) == NULL) { - printf("Couldn't read certificate, skipping\n"); - continue; - } - decode_xia(x, verbose, spaces, "AIA:caIssuers", NID_info_access, id_ad_caIssuers); - decode_xia(x, verbose, spaces, "SIA:caRepository", NID_sinfo_access, id_ad_caRepository); - decode_xia(x, verbose, spaces, "SIA:signedObjectRepository", NID_sinfo_access, id_ad_signedObjectRepository); - decode_xia(x, verbose, spaces, "SIA:rpkiManifest", NID_sinfo_access, id_ad_rpkiManifest); - decode_xia(x, verbose, spaces, "SIA:signedObject", NID_sinfo_access, id_ad_signedObject); - decode_crldp(x, verbose, spaces); - if (spaces) - putchar('\n'); - X509_free(x); - } - - return 0; -} -- cgit v1.2.3 From cc30a7ed3c710c6082df2f2079e3012b17161352 Mon Sep 17 00:00:00 2001 From: Rob Austein Date: Sun, 6 Apr 2014 23:55:37 +0000 Subject: Start whacking Makefiles svn path=/branches/tk685/; revision=5758 --- Makefile.in | 86 +++++++++++++++++++++++++++++-- buildtools/make-version.py | 4 +- ca/Makefile.in | 87 ++------------------------------ ca/rpki | 1 + configure | 73 ++++++++++++--------------- configure.ac | 67 +++++++++++------------- potpourri/rpki | 2 +- rp/Makefile.in | 9 ++++ rp/rcynic/Makefile.in | 37 +------------- rp/rcynic/rcynic-cron | 30 +++++------ rp/rcynic/rcynic-html | 2 +- rp/rtr-origin/Makefile.in | 18 ++----- rp/rtr-origin/rtr-origin | 4 +- rp/utils/Makefile.in | 61 ++++++++++++++++++++-- rp/utils/find_roa/Makefile.in | 56 -------------------- rp/utils/hashdir/Makefile.in | 55 -------------------- rp/utils/print_roa/Makefile.in | 52 ------------------- rp/utils/print_rpki_manifest/Makefile.in | 52 ------------------- rp/utils/scan_roas/Makefile.in | 52 ------------------- rp/utils/scan_routercerts/Makefile.in | 41 --------------- rp/utils/uri/Makefile.in | 31 ------------ schemas/Makefile.in | 66 ++++++++++++++++++++++++ setup.py | 38 +++++++------- 23 files changed, 323 insertions(+), 601 deletions(-) create mode 120000 ca/rpki create mode 100644 rp/Makefile.in delete mode 100644 rp/utils/find_roa/Makefile.in delete mode 100644 rp/utils/hashdir/Makefile.in delete mode 100644 rp/utils/print_roa/Makefile.in delete mode 100644 rp/utils/print_rpki_manifest/Makefile.in delete mode 100644 rp/utils/scan_roas/Makefile.in delete mode 100644 rp/utils/scan_routercerts/Makefile.in delete mode 100644 rp/utils/uri/Makefile.in create mode 100644 schemas/Makefile.in diff --git a/Makefile.in b/Makefile.in index 1247ff0d..1394a6cb 100644 --- a/Makefile.in +++ b/Makefile.in @@ -1,16 +1,38 @@ # $Id$ -PYTHON = @PYTHON@ +CFLAGS = @CFLAGS@ +LDFLAGS = @LDFLAGS@ @POW_LDFLAGS@ +LIBS = @LIBS@ -abs_top_builddir = @abs_top_builddir@ +PYTHON = @PYTHON@ +INSTALL = @INSTALL@ -m 555 -SUBDIRS = @TOP_LEVEL_SUBDIRS@ +prefix = @prefix@ +exec_prefix = @exec_prefix@ +datarootdir = @datarootdir@ +datadir = @datadir@ +localstatedir = @localstatedir@ +sharedstatedir = @sharedstatedir@ +sysconfdir = @sysconfdir@ +bindir = @bindir@ +sbindir = @sbindir@ +libexecdir = @libexecdir@ +sysconfdir = @sysconfdir@ + +abs_builddir = @abs_builddir@ +abs_top_srcdir = @abs_top_srcdir@ +abs_top_builddir= @abs_top_builddir@ +srcdir = @srcdir@ + +SUBDIRS = @TOP_LEVEL_SUBDIRS@ + +SETUP_PY_INSTALL_LAYOUT = @SETUP_PY_INSTALL_LAYOUT@ default: all test:: all -all:: VERSION +all:: VERSION rpki/autoconf.py setup_autoconf.py all install clean test distclean deinstall uninstall:: @for i in ${SUBDIRS}; do echo "Making $@ in $$i"; (cd $$i && ${MAKE} $@); done @@ -30,3 +52,59 @@ VERSION: .FORCE ${PYTHON} buildtools/make-version.py .FORCE: + +rpki/autoconf.py: Makefile + @echo 'Generating $@'; \ + (echo '# Automatically generated. DO NOT EDIT.'; \ + echo ; \ + echo 'bindir = "${bindir}"'; \ + echo 'datarootdir = "${datarootdir}"'; \ + echo 'localstatedir = "${localstatedir}"'; \ + echo 'sbindir = "${sbindir}"'; \ + echo 'sharedstatedir = "${sharedstatedir}"'; \ + echo 'sysconfdir = "${sysconfdir}"'; \ + echo 'libexecdir = "${libexecdir}"'; \ + echo ; \ + echo 'WSGI_DAEMON_PROCESS = "${WSGI_DAEMON_PROCESS}"'; \ + echo 'WSGI_PROCESS_GROUP = "${WSGI_PROCESS_GROUP}"'; \ + echo 'RCYNIC_HTML_DIR = "${RCYNIC_HTML_DIR}"'; \ + echo 'APACHE_VERSION = "${APACHE_VERSION}"'; \ + echo 'WSGI_PYTHON_EGG_CACHE_DIR = "${WSGI_PYTHON_EGG_CACHE_DIR}"'; \ + echo 'WSGI_PYTHON_EGG_CACHE_USER = "${WSGI_PYTHON_EGG_CACHE_USER}"'; \ + ) > $@ + +clean:: + rm -f rpki/autoconf.py + +setup_autoconf.py: rpki/autoconf.py + @echo 'Generating $@'; \ + (cat rpki/autoconf.py; \ + echo ; \ + echo 'CFLAGS = """${CFLAGS}"""'; \ + echo 'LDFLAGS = """${LDFLAGS}"""'; \ + echo 'LIBS = """${LIBS}"""'; \ + ) > $@ + +clean:: + rm -f setup_autoconf.py setup_autoconf.pyc + +SETUP_PY_ROOT = `${PYTHON} -c 'import sys; print "--root " + sys.argv[1] if sys.argv[1] else ""' '${DESTDIR}'` + +POW_SO = rpki/POW/_POW.so + +all:: setup_autoconf.py ${POW_SO} build/stamp + +.FORCE: + +${POW_SO}: .FORCE setup_autoconf.py + ${PYTHON} setup.py build_ext --inplace + +build/stamp: .FORCE setup_autoconf.py + ${PYTHON} setup.py build + touch $@ + +clean:: + rm -rf ${POW_SO} build dist + +clean:: + find . -type f -name '*.py[co]' -delete diff --git a/buildtools/make-version.py b/buildtools/make-version.py index af513b5d..a73a89ab 100644 --- a/buildtools/make-version.py +++ b/buildtools/make-version.py @@ -63,7 +63,7 @@ if v == unknown: sys.stderr.write("Warning: Could not determine software version\n") if old is None or v != old: + with open("rpki/version.py", "w") as f: + f.write("VERSION = \"%s\"\n" % v) with open("VERSION", "w") as f: f.write(v + "\n") - with open("rpkid/rpki/version.py", "w") as f: - f.write("VERSION = \"%s\"\n" % v) diff --git a/ca/Makefile.in b/ca/Makefile.in index d36a3163..0764edc1 100644 --- a/ca/Makefile.in +++ b/ca/Makefile.in @@ -33,58 +33,12 @@ APACHE_VERSION = @APACHE_VERSION@ WSGI_PYTHON_EGG_CACHE_DIR = @WSGI_PYTHON_EGG_CACHE_DIR@ WSGI_PYTHON_EGG_CACHE_USER = @WSGI_PYTHON_EGG_CACHE_USER@ -RPKID_INSTALL_TARGETS = @RPKID_INSTALL_TARGETS@ - -SETUP_PY_INSTALL_LAYOUT = @SETUP_PY_INSTALL_LAYOUT@ - -SETUP_PY_ROOT = `${PYTHON} -c 'import sys; print "--root " + sys.argv[1] if sys.argv[1] else ""' '${DESTDIR}'` - -POW_SO = rpki/POW/_POW.so - -all:: rpki/autoconf.py setup_autoconf.py rpki/relaxng.py myrpki.rng rpki/sql_schemas.py ${POW_SO} build/stamp - -.FORCE: - -${POW_SO}: .FORCE setup_autoconf.py - ${PYTHON} setup.py build_ext --inplace - -build/stamp: .FORCE setup_autoconf.py - ${PYTHON} setup.py build - touch $@ +CA_INSTALL_TARGETS = @CA_INSTALL_TARGETS@ clean:: - rm -rf ${POW_SO} build dist - -RNGS = left-right-schema.rng up-down-schema.rng publication-schema.rng myrpki.rng router-certificate-schema.rng - -rpki/relaxng.py: ${abs_top_srcdir}/buildtools/make-relaxng.py ${RNGS} - ${PYTHON} ${abs_top_srcdir}/buildtools/make-relaxng.py ${RNGS} >$@.tmp - mv $@.tmp $@ - -left-right-schema.rng: left-right-schema.rnc - ${TRANG} left-right-schema.rnc left-right-schema.rng - -up-down-schema.rng: up-down-schema.rnc - ${TRANG} up-down-schema.rnc up-down-schema.rng - -publication-schema.rng: publication-schema.rnc - ${TRANG} publication-schema.rnc publication-schema.rng - -myrpki.rng: myrpki.rnc - ${TRANG} myrpki.rnc myrpki.rng - -router-certificate-schema.rng: router-certificate-schema.rnc - ${TRANG} router-certificate-schema.rnc router-certificate-schema.rng - -rpki/sql_schemas.py: ${abs_top_srcdir}/buildtools/make-sql-schemas.py rpkid.sql pubd.sql - ${PYTHON} ${abs_top_srcdir}/buildtools/make-sql-schemas.py >$@.tmp - mv $@.tmp $@ - -clean:: - find . -type f -name '*.py[co]' -delete cd tests; $(MAKE) $@ -install:: ${RPKID_INSTALL_TARGETS} +install:: ${CA_INSTALL_TARGETS} install-always:: all ${PYTHON} setup.py install ${SETUP_PY_ROOT} ${SETUP_PY_INSTALL_LAYOUT} --record installed @@ -171,7 +125,7 @@ distclean:: clean docclean all:: examples/rpki.conf -examples/rpki.conf: rpki/autoconf.py rpki-confgen rpki-confgen.xml +examples/rpki.conf: ${abs_top_srcdir}/rpki/autoconf.py rpki-confgen rpki-confgen.xml ${PYTHON} rpki-confgen \ --read-xml rpki-confgen.xml \ --autoconf \ @@ -185,41 +139,6 @@ examples/rpki.conf: rpki/autoconf.py rpki-confgen rpki-confgen.xml clean:: rm -f examples/rpki.conf -rpki/autoconf.py: Makefile - @echo 'Generating $@'; \ - (echo '# Automatically generated. DO NOT EDIT.'; \ - echo ; \ - echo 'bindir = "${bindir}"'; \ - echo 'datarootdir = "${datarootdir}"'; \ - echo 'localstatedir = "${localstatedir}"'; \ - echo 'sbindir = "${sbindir}"'; \ - echo 'sharedstatedir = "${sharedstatedir}"'; \ - echo 'sysconfdir = "${sysconfdir}"'; \ - echo 'libexecdir = "${libexecdir}"'; \ - echo ; \ - echo 'WSGI_DAEMON_PROCESS = "${WSGI_DAEMON_PROCESS}"'; \ - echo 'WSGI_PROCESS_GROUP = "${WSGI_PROCESS_GROUP}"'; \ - echo 'RCYNIC_HTML_DIR = "${RCYNIC_HTML_DIR}"'; \ - echo 'APACHE_VERSION = "${APACHE_VERSION}"'; \ - echo 'WSGI_PYTHON_EGG_CACHE_DIR = "${WSGI_PYTHON_EGG_CACHE_DIR}"'; \ - echo 'WSGI_PYTHON_EGG_CACHE_USER = "${WSGI_PYTHON_EGG_CACHE_USER}"'; \ - ) > $@ - -clean:: - rm -f rpki/autoconf.py - -setup_autoconf.py: rpki/autoconf.py - @echo 'Generating $@'; \ - (cat rpki/autoconf.py; \ - echo ; \ - echo 'CFLAGS = """${CFLAGS}"""'; \ - echo 'LDFLAGS = """${LDFLAGS}"""'; \ - echo 'LIBS = """${LIBS}"""'; \ - ) > $@ - -clean:: - rm -f setup_autoconf.py setup_autoconf.pyc - install-postconf: \ install-user install-egg-cache install-conf install-apache install-mysql install-django install-bpki install-cron diff --git a/ca/rpki b/ca/rpki new file mode 120000 index 00000000..8d289d0b --- /dev/null +++ b/ca/rpki @@ -0,0 +1 @@ +../rpki \ No newline at end of file diff --git a/configure b/configure index 4a512935..711ec03d 100755 --- a/configure +++ b/configure @@ -582,7 +582,7 @@ PACKAGE_STRING='rpkitools 1.0' PACKAGE_BUGREPORT='' PACKAGE_URL='' -ac_unique_file="rcynic/rcynic.c" +ac_unique_file="rp/rcynic/rcynic.c" # Factoring default headers for most tests. ac_includes_default="\ #include @@ -629,7 +629,7 @@ TOP_LEVEL_SUBDIRS WSGI_PYTHON_EGG_CACHE_USER WSGI_PYTHON_EGG_CACHE_DIR SETUP_PY_INSTALL_LAYOUT -RPKID_INSTALL_TARGETS +CA_INSTALL_TARGETS RTR_ORIGIN_INSTALL_TARGETS RCYNIC_INSTALL_TARGETS RCYNIC_HTML_DIR @@ -714,7 +714,7 @@ PATH_SEPARATOR SHELL' ac_subst_files='RCYNIC_MAKE_RULES RTR_ORIGIN_MAKE_RULES -RPKID_MAKE_RULES' +CA_MAKE_RULES' ac_user_opts=' enable_option_checking with_system_openssl @@ -4571,28 +4571,28 @@ $as_echo "$enable_target_installation" >&6; } # rcynic jail setup is complicated enough that it's simplest to have # different rule sets for different platforms. Icky, but.... # rpki-rtr isn't as complicated, but has similar issues, same hack. -# rpkid isn't as complicated either, but same hack. +# ca isn't as complicated either, but same hack. case $host_os in darwin*) - RCYNIC_MAKE_RULES='rcynic/rules.darwin.mk' - RTR_ORIGIN_MAKE_RULES='rtr-origin/rules.darwin.mk' - RPKID_MAKE_RULES='rpkid/rules.darwin.mk' + RCYNIC_MAKE_RULES='rp/rcynic/rules.darwin.mk' + RTR_ORIGIN_MAKE_RULES='rp/rtr-origin/rules.darwin.mk' + CA_MAKE_RULES='ca/rules.darwin.mk' ;; freebsd*) - RCYNIC_MAKE_RULES='rcynic/rules.freebsd.mk' - RTR_ORIGIN_MAKE_RULES='rtr-origin/rules.freebsd.mk' - RPKID_MAKE_RULES='rpkid/rules.freebsd.mk' + RCYNIC_MAKE_RULES='rp/rcynic/rules.freebsd.mk' + RTR_ORIGIN_MAKE_RULES='rp/rtr-origin/rules.freebsd.mk' + CA_MAKE_RULES='ca/rules.freebsd.mk' ;; linux*) - RCYNIC_MAKE_RULES='rcynic/rules.linux.mk' - RTR_ORIGIN_MAKE_RULES='rtr-origin/rules.linux.mk' - RPKID_MAKE_RULES='rpkid/rules.linux.mk' + RCYNIC_MAKE_RULES='rp/rcynic/rules.linux.mk' + RTR_ORIGIN_MAKE_RULES='rp/rtr-origin/rules.linux.mk' + CA_MAKE_RULES='ca/rules.linux.mk' ;; *) - RCYNIC_MAKE_RULES='rcynic/rules.unknown.mk' - RTR_ORIGIN_MAKE_RULES='rtr-origin/rules.unknown.mk' - RPKID_MAKE_RULES='rpkid/rules.unknown.mk' + RCYNIC_MAKE_RULES='rp/rcynic/rules.unknown.mk' + RTR_ORIGIN_MAKE_RULES='rp/rtr-origin/rules.unknown.mk' + CA_MAKE_RULES='ca/rules.unknown.mk' ;; esac @@ -4629,7 +4629,7 @@ esac RCYNIC_INSTALL_TARGETS='install-always' RTR_ORIGIN_INSTALL_TARGETS='install-always' -RPKID_INSTALL_TARGETS='install-always' +CA_INSTALL_TARGETS='install-always' if test $use_rcynic_jail = yes then @@ -4640,7 +4640,7 @@ if test $enable_target_installation = yes then RCYNIC_INSTALL_TARGETS="$RCYNIC_INSTALL_TARGETS install-postconf" RTR_ORIGIN_INSTALL_TARGETS="$RTR_ORIGIN_INSTALL_TARGETS install-postconf" - RPKID_INSTALL_TARGETS="$RPKID_INSTALL_TARGETS install-postconf" + CA_INSTALL_TARGETS="$CA_INSTALL_TARGETS install-postconf" fi @@ -4992,20 +4992,14 @@ fi TOP_LEVEL_SUBDIRS="h" test $build_openssl = yes && TOP_LEVEL_SUBDIRS="$TOP_LEVEL_SUBDIRS openssl" -test $build_rp_tools = yes && TOP_LEVEL_SUBDIRS="$TOP_LEVEL_SUBDIRS rcynic utils rtr-origin" -test $build_ca_tools = yes && TOP_LEVEL_SUBDIRS="$TOP_LEVEL_SUBDIRS rpkid" +test $build_rp_tools = yes && TOP_LEVEL_SUBDIRS="$TOP_LEVEL_SUBDIRS rp" +test $build_ca_tools = yes && TOP_LEVEL_SUBDIRS="$TOP_LEVEL_SUBDIRS ca" -ac_config_files="$ac_config_files Makefile h/Makefile" +ac_config_files="$ac_config_files Makefile h/Makefile schemas/Makefile" -if test "X$RCYNIC_STATIC_RSYNC" != "X" -then - ac_config_files="$ac_config_files rcynic/static-rsync/Makefile" - -fi - # OpenSSL has its own build system that bears no relationship to # anything but itself, and our use of it is a bit weird, so this is a # BFMI (Brute Force and Massive Ignorance) job. @@ -5062,13 +5056,13 @@ fi if test $build_rp_tools = yes then - ac_config_files="$ac_config_files rcynic/Makefile utils/Makefile utils/find_roa/Makefile utils/hashdir/Makefile utils/print_rpki_manifest/Makefile utils/print_roa/Makefile utils/scan_roas/Makefile utils/scan_routercerts/Makefile utils/uri/Makefile rtr-origin/Makefile" + ac_config_files="$ac_config_files rp/Makefile rp/rcynic/Makefile rp/rcynic/static-rsync/Makefile rp/utils/Makefile rp/rtr-origin/Makefile" fi if test $build_ca_tools = yes then - ac_config_files="$ac_config_files rpkid/Makefile rpkid/tests/Makefile" + ac_config_files="$ac_config_files ca/Makefile ca/tests/Makefile" fi @@ -5809,21 +5803,16 @@ do case $ac_config_target in "Makefile") CONFIG_FILES="$CONFIG_FILES Makefile" ;; "h/Makefile") CONFIG_FILES="$CONFIG_FILES h/Makefile" ;; - "rcynic/static-rsync/Makefile") CONFIG_FILES="$CONFIG_FILES rcynic/static-rsync/Makefile" ;; + "schemas/Makefile") CONFIG_FILES="$CONFIG_FILES schemas/Makefile" ;; "openssl/Makefile") CONFIG_FILES="$CONFIG_FILES openssl/Makefile" ;; "openssl/tests/Makefile") CONFIG_FILES="$CONFIG_FILES openssl/tests/Makefile" ;; - "rcynic/Makefile") CONFIG_FILES="$CONFIG_FILES rcynic/Makefile" ;; - "utils/Makefile") CONFIG_FILES="$CONFIG_FILES utils/Makefile" ;; - "utils/find_roa/Makefile") CONFIG_FILES="$CONFIG_FILES utils/find_roa/Makefile" ;; - "utils/hashdir/Makefile") CONFIG_FILES="$CONFIG_FILES utils/hashdir/Makefile" ;; - "utils/print_rpki_manifest/Makefile") CONFIG_FILES="$CONFIG_FILES utils/print_rpki_manifest/Makefile" ;; - "utils/print_roa/Makefile") CONFIG_FILES="$CONFIG_FILES utils/print_roa/Makefile" ;; - "utils/scan_roas/Makefile") CONFIG_FILES="$CONFIG_FILES utils/scan_roas/Makefile" ;; - "utils/scan_routercerts/Makefile") CONFIG_FILES="$CONFIG_FILES utils/scan_routercerts/Makefile" ;; - "utils/uri/Makefile") CONFIG_FILES="$CONFIG_FILES utils/uri/Makefile" ;; - "rtr-origin/Makefile") CONFIG_FILES="$CONFIG_FILES rtr-origin/Makefile" ;; - "rpkid/Makefile") CONFIG_FILES="$CONFIG_FILES rpkid/Makefile" ;; - "rpkid/tests/Makefile") CONFIG_FILES="$CONFIG_FILES rpkid/tests/Makefile" ;; + "rp/Makefile") CONFIG_FILES="$CONFIG_FILES rp/Makefile" ;; + "rp/rcynic/Makefile") CONFIG_FILES="$CONFIG_FILES rp/rcynic/Makefile" ;; + "rp/rcynic/static-rsync/Makefile") CONFIG_FILES="$CONFIG_FILES rp/rcynic/static-rsync/Makefile" ;; + "rp/utils/Makefile") CONFIG_FILES="$CONFIG_FILES rp/utils/Makefile" ;; + "rp/rtr-origin/Makefile") CONFIG_FILES="$CONFIG_FILES rp/rtr-origin/Makefile" ;; + "ca/Makefile") CONFIG_FILES="$CONFIG_FILES ca/Makefile" ;; + "ca/tests/Makefile") CONFIG_FILES="$CONFIG_FILES ca/tests/Makefile" ;; *) as_fn_error $? "invalid argument: \`$ac_config_target'" "$LINENO" 5;; esac diff --git a/configure.ac b/configure.ac index 4f4aa244..65368e06 100644 --- a/configure.ac +++ b/configure.ac @@ -9,7 +9,7 @@ AC_INIT([rpkitools], [1.0]) dnl AC_REVISION([$Revision$]) -AC_CONFIG_SRCDIR([rcynic/rcynic.c]) +AC_CONFIG_SRCDIR([rp/rcynic/rcynic.c]) AC_CONFIG_AUX_DIR([buildtools]) # I'm not sure we should really be using the $host_* variables as we @@ -383,34 +383,34 @@ AC_MSG_RESULT([$enable_target_installation]) # rcynic jail setup is complicated enough that it's simplest to have # different rule sets for different platforms. Icky, but.... # rpki-rtr isn't as complicated, but has similar issues, same hack. -# rpkid isn't as complicated either, but same hack. +# ca isn't as complicated either, but same hack. case $host_os in darwin*) - RCYNIC_MAKE_RULES='rcynic/rules.darwin.mk' - RTR_ORIGIN_MAKE_RULES='rtr-origin/rules.darwin.mk' - RPKID_MAKE_RULES='rpkid/rules.darwin.mk' + RCYNIC_MAKE_RULES='rp/rcynic/rules.darwin.mk' + RTR_ORIGIN_MAKE_RULES='rp/rtr-origin/rules.darwin.mk' + CA_MAKE_RULES='ca/rules.darwin.mk' ;; freebsd*) - RCYNIC_MAKE_RULES='rcynic/rules.freebsd.mk' - RTR_ORIGIN_MAKE_RULES='rtr-origin/rules.freebsd.mk' - RPKID_MAKE_RULES='rpkid/rules.freebsd.mk' + RCYNIC_MAKE_RULES='rp/rcynic/rules.freebsd.mk' + RTR_ORIGIN_MAKE_RULES='rp/rtr-origin/rules.freebsd.mk' + CA_MAKE_RULES='ca/rules.freebsd.mk' ;; linux*) - RCYNIC_MAKE_RULES='rcynic/rules.linux.mk' - RTR_ORIGIN_MAKE_RULES='rtr-origin/rules.linux.mk' - RPKID_MAKE_RULES='rpkid/rules.linux.mk' + RCYNIC_MAKE_RULES='rp/rcynic/rules.linux.mk' + RTR_ORIGIN_MAKE_RULES='rp/rtr-origin/rules.linux.mk' + CA_MAKE_RULES='ca/rules.linux.mk' ;; *) - RCYNIC_MAKE_RULES='rcynic/rules.unknown.mk' - RTR_ORIGIN_MAKE_RULES='rtr-origin/rules.unknown.mk' - RPKID_MAKE_RULES='rpkid/rules.unknown.mk' + RCYNIC_MAKE_RULES='rp/rcynic/rules.unknown.mk' + RTR_ORIGIN_MAKE_RULES='rp/rtr-origin/rules.unknown.mk' + CA_MAKE_RULES='ca/rules.unknown.mk' ;; esac AC_SUBST_FILE(RCYNIC_MAKE_RULES) AC_SUBST_FILE(RTR_ORIGIN_MAKE_RULES) -AC_SUBST_FILE(RPKID_MAKE_RULES) +AC_SUBST_FILE(CA_MAKE_RULES) # Where to put HTML files is similarly platform dependent, we have to know. # rcynic-cron will skip generating HTML files if it has no place to put them. @@ -441,7 +441,7 @@ AC_SUBST(RCYNIC_HTML_DIR) RCYNIC_INSTALL_TARGETS='install-always' RTR_ORIGIN_INSTALL_TARGETS='install-always' -RPKID_INSTALL_TARGETS='install-always' +CA_INSTALL_TARGETS='install-always' if test $use_rcynic_jail = yes then @@ -452,12 +452,12 @@ if test $enable_target_installation = yes then RCYNIC_INSTALL_TARGETS="$RCYNIC_INSTALL_TARGETS install-postconf" RTR_ORIGIN_INSTALL_TARGETS="$RTR_ORIGIN_INSTALL_TARGETS install-postconf" - RPKID_INSTALL_TARGETS="$RPKID_INSTALL_TARGETS install-postconf" + CA_INSTALL_TARGETS="$CA_INSTALL_TARGETS install-postconf" fi AC_SUBST(RCYNIC_INSTALL_TARGETS) AC_SUBST(RTR_ORIGIN_INSTALL_TARGETS) -AC_SUBST(RPKID_INSTALL_TARGETS) +AC_SUBST(CA_INSTALL_TARGETS) # Now a bunch of checks to figure out what we can do with Python. If # we don't have Python at all, none of the rest of this matters. If @@ -754,18 +754,14 @@ fi TOP_LEVEL_SUBDIRS="h" test $build_openssl = yes && TOP_LEVEL_SUBDIRS="$TOP_LEVEL_SUBDIRS openssl" -test $build_rp_tools = yes && TOP_LEVEL_SUBDIRS="$TOP_LEVEL_SUBDIRS rcynic utils rtr-origin" -test $build_ca_tools = yes && TOP_LEVEL_SUBDIRS="$TOP_LEVEL_SUBDIRS rpkid" +test $build_rp_tools = yes && TOP_LEVEL_SUBDIRS="$TOP_LEVEL_SUBDIRS rp" +test $build_ca_tools = yes && TOP_LEVEL_SUBDIRS="$TOP_LEVEL_SUBDIRS ca" AC_SUBST(TOP_LEVEL_SUBDIRS) AC_CONFIG_FILES([Makefile - h/Makefile]) - -if test "X$RCYNIC_STATIC_RSYNC" != "X" -then - AC_CONFIG_FILES([rcynic/static-rsync/Makefile]) -fi + h/Makefile + schemas/Makefile]) # OpenSSL has its own build system that bears no relationship to # anything but itself, and our use of it is a bit weird, so this is a @@ -818,22 +814,17 @@ fi if test $build_rp_tools = yes then - AC_CONFIG_FILES([rcynic/Makefile - utils/Makefile - utils/find_roa/Makefile - utils/hashdir/Makefile - utils/print_rpki_manifest/Makefile - utils/print_roa/Makefile - utils/scan_roas/Makefile - utils/scan_routercerts/Makefile - utils/uri/Makefile - rtr-origin/Makefile]) + AC_CONFIG_FILES([rp/Makefile + rp/rcynic/Makefile + rp/rcynic/static-rsync/Makefile + rp/utils/Makefile + rp/rtr-origin/Makefile]) fi if test $build_ca_tools = yes then - AC_CONFIG_FILES([rpkid/Makefile - rpkid/tests/Makefile]) + AC_CONFIG_FILES([ca/Makefile + ca/tests/Makefile]) fi if test $build_ca_tools = yes && diff --git a/potpourri/rpki b/potpourri/rpki index 168548eb..8d289d0b 120000 --- a/potpourri/rpki +++ b/potpourri/rpki @@ -1 +1 @@ -../rpkid/rpki \ No newline at end of file +../rpki \ No newline at end of file diff --git a/rp/Makefile.in b/rp/Makefile.in new file mode 100644 index 00000000..ceeef9f1 --- /dev/null +++ b/rp/Makefile.in @@ -0,0 +1,9 @@ +# $Id$ + +SUBDIRS = rcynic rtr-origin utils + +all clean test distclean install deinstall uninstall:: + @for i in ${SUBDIRS}; do echo "Making $@ in $$i"; (cd $$i && ${MAKE} $@); done + +distclean:: + rm -f Makefile diff --git a/rp/rcynic/Makefile.in b/rp/rcynic/Makefile.in index 06f7d9cd..972f1de6 100644 --- a/rp/rcynic/Makefile.in +++ b/rp/rcynic/Makefile.in @@ -65,11 +65,11 @@ RPKIRTR_USER = rpkirtr SCRIPTS = rcynic-text rcynic-html rcynic-svn validation_status rcynic-cron -all: ${BIN} ${SCRIPTS} ${RCYNIC_STATIC_RSYNC} +all: ${BIN} ${RCYNIC_STATIC_RSYNC} clean: if test -r static-rsync/Makefile; then cd static-rsync; ${MAKE} $@; fi - rm -f ${BIN} ${OBJS} ${SCRIPTS} + rm -f ${BIN} ${OBJS} ${OBJ}: ${SRC} ${GEN} @@ -80,39 +80,6 @@ ${GEN}: ${SRC} ${PYTHON} ${abs_top_srcdir}/buildtools/defstack.py ${SRC} >$@.tmp mv $@.tmp $@ -COMPILE_PYTHON = \ - AC_PYTHON_INTERPRETER='${PYTHON}' \ - AC_RRDTOOL_BINARY='${RRDTOOL}' \ - ${PYTHON} ${abs_top_srcdir}/buildtools/make-rcynic-script.py <$? >$@; \ - chmod 755 $@ - -COMPILE_PYTHON_CRON = \ - AC_PYTHON_INTERPRETER='${PYTHON}' \ - AC_RCYNIC_USER='${RCYNIC_USER}' \ - AC_RCYNIC_DIR='${RCYNIC_DIR}' \ - AC_bindir='${bindir}' \ - AC_sbindir='${sbindir}' \ - AC_sysconfdir='${sysconfdir}' \ - AC_libexecdir='${libexecdir}' \ - AC_RCYNIC_HTML_DIR='${RCYNIC_HTML_DIR}' \ - ${PYTHON} ${abs_top_srcdir}/buildtools/make-rcynic-script.py <$? >$@; \ - chmod 755 $@ - -rcynic-text: rcynic-text.py - ${COMPILE_PYTHON} - -rcynic-html: rcynic-html.py - ${COMPILE_PYTHON} - -rcynic-svn: rcynic-svn.py - ${COMPILE_PYTHON} - -validation_status: validation_status.py - ${COMPILE_PYTHON} - -rcynic-cron: rcynic-cron.py - ${COMPILE_PYTHON_CRON} - tags: TAGS TAGS: ${SRC} ${GEN} diff --git a/rp/rcynic/rcynic-cron b/rp/rcynic/rcynic-cron index fbe1ebeb..4da1d5cd 100755 --- a/rp/rcynic/rcynic-cron +++ b/rp/rcynic/rcynic-cron @@ -37,13 +37,13 @@ def run(*cmd, **kwargs): pid = os.fork() if pid == 0: if chroot_this: - os.chdir(ac_rcynic_dir) + os.chdir(rpki.autoconf.rcynic_dir) elif cwd is not None: os.chdir(cwd) if we_are_root: os.initgroups(pw.pw_name, pw.pw_gid) if chroot_this: - os.chroot(ac_rcynic_dir) + os.chroot(rpki.autoconf.rcynic_dir) if we_are_root: os.setgid(pw.pw_gid) os.setuid(pw.pw_uid) @@ -71,12 +71,12 @@ if args.chroot and not we_are_root: sys.exit("Only root can --chroot") try: - pw = pwd.getpwnam(ac_rcynic_user) + pw = pwd.getpwnam(rpki.autoconf.rcynic_user) except KeyError: - sys.exit("Could not find passwd entry for user %s" % ac_rcynic_user) + sys.exit("Could not find passwd entry for user %s" % rpki.autoconf.rcynic_user) try: - lock = os.open(os.path.join(ac_rcynic_dir, "data/lock"), os.O_RDONLY | os.O_CREAT | os.O_NONBLOCK, 0666) + lock = os.open(os.path.join(rpki.autoconf.rcynic_dir, "data/lock"), os.O_RDONLY | os.O_CREAT | os.O_NONBLOCK, 0666) fcntl.flock(lock, fcntl.LOCK_EX | fcntl.LOCK_NB) if we_are_root: os.fchown(lock, pw.pw_uid, pw.pw_gid) @@ -84,23 +84,23 @@ except (IOError, OSError), e: if e.errno == errno.EAGAIN: sys.exit(0) # Another instance of this script is already running, exit silently else: - sys.exit("Error %r opening lock %r" % (e.strerror, os.path.join(ac_rcynic_dir, "data/lock"))) + sys.exit("Error %r opening lock %r" % (e.strerror, os.path.join(rpki.autoconf.rcynic_dir, "data/lock"))) if args.chroot: run("/bin/rcynic", "-c", "/etc/rcynic.conf", chroot_this = True) else: - run(os.path.join(ac_bindir, "rcynic"), "-c", os.path.join(ac_sysconfdir, "rcynic.conf")) + run(os.path.join(rpki.autoconf.bindir, "rcynic"), "-c", os.path.join(rpki.autoconf.sysconfdir, "rcynic.conf")) -run(os.path.join(ac_bindir, "rtr-origin"), +run(os.path.join(rpki.autoconf.bindir, "rtr-origin"), "--cronjob", - os.path.join(ac_rcynic_dir, "data/authenticated"), - cwd = os.path.join(ac_rcynic_dir, "rpki-rtr")) + os.path.join(rpki.autoconf.rcynic_dir, "data/authenticated"), + cwd = os.path.join(rpki.autoconf.rcynic_dir, "rpki-rtr")) -prog = os.path.join(ac_libexecdir, "rpkigui-rcynic") +prog = os.path.join(rpki.autoconf.libexecdir, "rpkigui-rcynic") if os.path.exists(prog): run(prog) -if ac_rcynic_html_dir and os.path.exists(os.path.dirname(ac_rcynic_html_dir)): - run(os.path.join(ac_bindir, "rcynic-html"), - os.path.join(ac_rcynic_dir, "data/rcynic.xml"), - ac_rcynic_html_dir) +if rpki.autoconf.rcynic_html_dir and os.path.exists(os.path.dirname(rpki.autoconf.rcynic_html_dir)): + run(os.path.join(rpki.autoconf.bindir, "rcynic-html"), + os.path.join(rpki.autoconf.rcynic_dir, "data/rcynic.xml"), + rpki.autoconf.rcynic_html_dir) diff --git a/rp/rcynic/rcynic-html b/rp/rcynic/rcynic-html index 58e65dde..6070cd13 100755 --- a/rp/rcynic/rcynic-html +++ b/rp/rcynic/rcynic-html @@ -41,7 +41,7 @@ def parse_options(): global args try: - default_rrdtool_binary = ac_rrdtool_binary + default_rrdtool_binary = rpki.autoconf.rrdtool_binary except NameError: default_rrdtool_binary = "rrdtool" diff --git a/rp/rtr-origin/Makefile.in b/rp/rtr-origin/Makefile.in index daa18009..e587305b 100644 --- a/rp/rtr-origin/Makefile.in +++ b/rp/rtr-origin/Makefile.in @@ -1,8 +1,6 @@ # $Id$ -BASE = rtr-origin -SRC = ${BASE}.py -BIN = ${BASE} +BIN = rtr-origin INSTALL = @INSTALL@ PYTHON = @PYTHON@ @@ -30,10 +28,8 @@ RPKI_RTR_PORT = 43779 SCAN_ROAS = ${bindir}/scan_roas -all: ${BIN} - -clean: - rm -f ${BIN} +all clean test:: + @true install: all ${RTR_ORIGIN_INSTALL_TARGETS} @@ -48,14 +44,6 @@ distclean: clean rm -rf current sockets *.ax *.ix.* rm -f Makefile -${BIN} : ${SRC} - AC_PYTHON_INTERPRETER='${PYTHON}' AC_SCAN_ROAS='${SCAN_ROAS}' \ - ${PYTHON} ${abs_top_srcdir}/buildtools/make-rcynic-script.py <${SRC} >$@ - chmod a+x $@ - -test: - @true - .FORCE: # Platform-specific rules below here. diff --git a/rp/rtr-origin/rtr-origin b/rp/rtr-origin/rtr-origin index f37d2ce0..e1e82ccf 100755 --- a/rp/rtr-origin/rtr-origin +++ b/rp/rtr-origin/rtr-origin @@ -2163,7 +2163,7 @@ def bgpdump_server_main(argv): # Figure out where the scan_roas utility program is today try: # Set from autoconf - scan_roas = ac_scan_roas + scan_roas = rpki.autoconf.scan_roas except NameError: # Source directory scan_roas = os.path.normpath(os.path.join(sys.path[0], "..", "utils", @@ -2175,7 +2175,7 @@ if not os.path.exists(scan_roas): # Same thing for scan_routercerts try: # Set from autoconf - scan_routercerts = ac_scan_routercerts + scan_routercerts = rpki.autoconf.scan_routercerts except NameError: # Source directory scan_routercerts = os.path.normpath(os.path.join(sys.path[0], "..", "utils", diff --git a/rp/utils/Makefile.in b/rp/utils/Makefile.in index c89fdff5..03c041a4 100644 --- a/rp/utils/Makefile.in +++ b/rp/utils/Makefile.in @@ -1,9 +1,62 @@ # $Id$ -SUBDIRS = uri print_rpki_manifest print_roa hashdir find_roa scan_roas scan_routercerts +CFLAGS = @CFLAGS@ +LDFLAGS = @LDFLAGS@ +LIBS = @LIBS@ -all clean test distclean install deinstall uninstall:: - @for i in ${SUBDIRS}; do echo "Making $@ in $$i"; (cd $$i && ${MAKE} $@); done +INSTALL = @INSTALL@ -m 555 -distclean:: +prefix = @prefix@ +exec_prefix = @exec_prefix@ +datarootdir = @datarootdir@ +datadir = @datadir@ +localstatedir = @localstatedir@ +sharedstatedir = @sharedstatedir@ +sysconfdir = @sysconfdir@ +bindir = @bindir@ +sbindir = @sbindir@ +libexecdir = @libexecdir@ +libdir = @libdir@ + +abs_top_srcdir = @abs_top_srcdir@ +abs_top_builddir = @abs_top_builddir@ + +BINS = find_roa hashdir print_rpki_manifest print_roa scan_roas uri + +SCRIPTS = scan_routercerts + +all: ${BINS} + +clean: + rm -rf ${BINS} *.o *.dSYM + +test: + @true + +install: all + if test -d ${DESTDIR}${bindir} ; then :; else ${INSTALL} -d ${DESTDIR}${bindir}; fi + ${INSTALL} ${BINS} ${SCRIPTS} ${DESTDIR}${bindir} + +deinstall uninstall: + for i in ${BINS} ${SCRIPTS}; do rm -f ${DESTDIR}${bindir}/$$i; done + +distclean: clean rm -f Makefile + +find_roa: find_roa.c + ${CC} ${CFLAGS} -o $@ find_roa.c ${LDFLAGS} ${LIBS} + +hashdir: hashdir.c + ${CC} ${CFLAGS} -o $@ hashdir.c ${LDFLAGS} ${LIBS} + +print_roa: print_roa.c + ${CC} ${CFLAGS} -o $@ print_roa.c ${LDFLAGS} ${LIBS} + +print_rpki_manifest: print_rpki_manifest.c + ${CC} ${CFLAGS} -o $@ print_rpki_manifest.c ${LDFLAGS} ${LIBS} + +scan_roas: scan_roas.c + ${CC} ${CFLAGS} -o $@ scan_roas.c ${LDFLAGS} ${LIBS} + +uri: uri.c + ${CC} ${CFLAGS} -o $@ uri.c ${LDFLAGS} ${LIBS} diff --git a/rp/utils/find_roa/Makefile.in b/rp/utils/find_roa/Makefile.in deleted file mode 100644 index 36c68e01..00000000 --- a/rp/utils/find_roa/Makefile.in +++ /dev/null @@ -1,56 +0,0 @@ -# $Id$ - -NAME = find_roa - -BIN = ${NAME} -SRC = ${NAME}.c -OBJ = ${NAME}.o - -CFLAGS = @CFLAGS@ -LDFLAGS = @LDFLAGS@ -LIBS = @LIBS@ - -INSTALL = @INSTALL@ -m 555 - -prefix = @prefix@ -exec_prefix = @exec_prefix@ -datarootdir = @datarootdir@ -datadir = @datadir@ -localstatedir = @localstatedir@ -sharedstatedir = @sharedstatedir@ -sysconfdir = @sysconfdir@ -bindir = @bindir@ -sbindir = @sbindir@ -libexecdir = @libexecdir@ -libdir = @libdir@ - -abs_top_srcdir = @abs_top_srcdir@ -abs_top_builddir = @abs_top_builddir@ - -all: ${BIN} - -clean: - rm -rf ${BIN} ${OBJ} ${BIN}.dSYM - -${BIN}: ${SRC} - ${CC} ${CFLAGS} -o $@ ${SRC} ${LDFLAGS} ${LIBS} - - -ROA_DIR = ${abs_top_builddir}/rcynic/rcynic-data/authenticated - -TEST_ARGS = ${ROA_DIR} 10.3.0.44 10.2.0.6 10.0.0.0/24 - -test: ${BIN} -# if test -d ${ROA_DIR}; then ./${BIN} ${TEST_ARGS} ; else :; fi - if test -d ${ROA_DIR}; then sh ./test_roa.sh ${TEST_ARGS} ; else :; fi - -install: all - if test -d ${DESTDIR}${bindir} ; then :; else ${INSTALL} -d ${DESTDIR}${bindir}; fi - ${INSTALL} ${BIN} ${DESTDIR}${bindir} - -deinstall uninstall: - rm -f ${DESTDIR}${bindir}/${BIN} - -distclean: clean - rm -rf hashed-pem-dir - rm -f Makefile diff --git a/rp/utils/hashdir/Makefile.in b/rp/utils/hashdir/Makefile.in deleted file mode 100644 index c0cf448a..00000000 --- a/rp/utils/hashdir/Makefile.in +++ /dev/null @@ -1,55 +0,0 @@ -# $Id$ - -NAME = hashdir - -BIN = ${NAME} -SRC = ${NAME}.c -OBJ = ${NAME}.o - -CFLAGS = @CFLAGS@ -LDFLAGS = @LDFLAGS@ -LIBS = @LIBS@ - -INSTALL = @INSTALL@ -m 555 - -prefix = @prefix@ -exec_prefix = @exec_prefix@ -datarootdir = @datarootdir@ -datadir = @datadir@ -localstatedir = @localstatedir@ -sharedstatedir = @sharedstatedir@ -sysconfdir = @sysconfdir@ -bindir = @bindir@ -sbindir = @sbindir@ -libexecdir = @libexecdir@ -libdir = @libdir@ - -abs_top_srcdir = @abs_top_srcdir@ -abs_top_builddir = @abs_top_builddir@ - -all: ${BIN} - -clean:: - rm -rf ${BIN} ${OBJ} ${BIN}.dSYM - -${BIN}: ${SRC} - ${CC} ${CFLAGS} -o $@ ${SRC} ${LDFLAGS} ${LIBS} - -INPUT = ${abs_top_builddir}/rcynic/rcynic-data/authenticated -OUTPUT = hashed-pem-dir - -test: ${BIN} - if test -d ${INPUT}; then rm -rf ${OUTPUT} && mkdir ${OUTPUT} && ./hashdir ${INPUT} ${OUTPUT}; else :; fi - -clean:: - rm -rf ${OUTPUT} - -install: all - if test -d ${DESTDIR}${bindir} ; then :; else ${INSTALL} -d ${DESTDIR}${bindir}; fi - ${INSTALL} ${BIN} ${DESTDIR}${bindir} - -deinstall uninstall: - rm -f ${DESTDIR}${bindir}/${BIN} - -distclean: clean - rm -f Makefile diff --git a/rp/utils/print_roa/Makefile.in b/rp/utils/print_roa/Makefile.in deleted file mode 100644 index 5999b351..00000000 --- a/rp/utils/print_roa/Makefile.in +++ /dev/null @@ -1,52 +0,0 @@ -# $Id$ - -NAME = print_roa - -BIN = ${NAME} -SRC = ${NAME}.c -OBJ = ${NAME}.o - -CFLAGS = @CFLAGS@ -LDFLAGS = @LDFLAGS@ -LIBS = @LIBS@ - -INSTALL = @INSTALL@ -m 555 - -prefix = @prefix@ -exec_prefix = @exec_prefix@ -datarootdir = @datarootdir@ -datadir = @datadir@ -localstatedir = @localstatedir@ -sharedstatedir = @sharedstatedir@ -sysconfdir = @sysconfdir@ -bindir = @bindir@ -sbindir = @sbindir@ -libexecdir = @libexecdir@ -libdir = @libdir@ - -abs_top_srcdir = @abs_top_srcdir@ -abs_top_builddir = @abs_top_builddir@ - -all: ${BIN} - -clean: - rm -rf ${BIN} ${OBJ} ${BIN}.dSYM - -${BIN}: ${SRC} - ${CC} ${CFLAGS} -o $@ ${SRC} ${LDFLAGS} ${LIBS} - -ROA_DIR = ${abs_top_builddir}/rpkid/tests/smoketest.dir/publication - -test: all - -date -u +'now: %Y%m%d%H%M%SZ' - if test -d ${ROA_DIR}; then find ${ROA_DIR} -type f -name '*.roa' -print -exec ./${BIN} {} \; ; else :; fi - -install: all - if test -d ${DESTDIR}${bindir} ; then :; else ${INSTALL} -d ${DESTDIR}${bindir}; fi - ${INSTALL} ${BIN} ${DESTDIR}${bindir} - -deinstall uninstall: - rm -f ${DESTDIR}${bindir}/${BIN} - -distclean: clean - rm -f Makefile diff --git a/rp/utils/print_rpki_manifest/Makefile.in b/rp/utils/print_rpki_manifest/Makefile.in deleted file mode 100644 index 22f1b16b..00000000 --- a/rp/utils/print_rpki_manifest/Makefile.in +++ /dev/null @@ -1,52 +0,0 @@ -# $Id$ - -NAME = print_rpki_manifest - -BIN = ${NAME} -SRC = ${NAME}.c -OBJ = ${NAME}.o - -CFLAGS = @CFLAGS@ -LDFLAGS = @LDFLAGS@ -LIBS = @LIBS@ - -INSTALL = @INSTALL@ -m 555 - -prefix = @prefix@ -exec_prefix = @exec_prefix@ -datarootdir = @datarootdir@ -datadir = @datadir@ -localstatedir = @localstatedir@ -sharedstatedir = @sharedstatedir@ -sysconfdir = @sysconfdir@ -bindir = @bindir@ -sbindir = @sbindir@ -libexecdir = @libexecdir@ -libdir = @libdir@ - -abs_top_srcdir = @abs_top_srcdir@ -abs_top_builddir = @abs_top_builddir@ - -all: ${BIN} - -clean: - rm -rf ${BIN} ${OBJ} ${BIN}.dSYM - -${BIN}: ${SRC} - ${CC} ${CFLAGS} -o $@ ${SRC} ${LDFLAGS} ${LIBS} - -MANIFEST_DIR = ${abs_top_builddir}/rpkid/tests/smoketest.dir/publication - -test: all - -date -u +'now: %Y%m%d%H%M%SZ' - if test -d ${MANIFEST_DIR}; then find ${MANIFEST_DIR} -type f -name '*.mnf' -print -exec ./${BIN} {} \; ; else :; fi - -install: all - if test -d ${DESTDIR}${bindir} ; then :; else ${INSTALL} -d ${DESTDIR}${bindir}; fi - ${INSTALL} ${BIN} ${DESTDIR}${bindir} - -deinstall uninstall: - rm -f ${DESTDIR}${bindir}/${BIN} - -distclean: clean - rm -f Makefile diff --git a/rp/utils/scan_roas/Makefile.in b/rp/utils/scan_roas/Makefile.in deleted file mode 100644 index 7707969c..00000000 --- a/rp/utils/scan_roas/Makefile.in +++ /dev/null @@ -1,52 +0,0 @@ -# $Id$ - -NAME = scan_roas - -BIN = ${NAME} -SRC = ${NAME}.c -OBJ = ${NAME}.o - -CFLAGS = @CFLAGS@ -LDFLAGS = @LDFLAGS@ -LIBS = @LIBS@ - -INSTALL = @INSTALL@ -m 555 - -prefix = @prefix@ -exec_prefix = @exec_prefix@ -datarootdir = @datarootdir@ -datadir = @datadir@ -localstatedir = @localstatedir@ -sharedstatedir = @sharedstatedir@ -sysconfdir = @sysconfdir@ -bindir = @bindir@ -sbindir = @sbindir@ -libexecdir = @libexecdir@ -libdir = @libdir@ - -abs_top_srcdir = @abs_top_srcdir@ -abs_top_builddir = @abs_top_builddir@ - -all: ${BIN} - -clean: - rm -rf ${BIN} ${OBJ} ${BIN}.dSYM - -${BIN}: ${SRC} - ${CC} ${CFLAGS} -o $@ ${SRC} ${LDFLAGS} ${LIBS} - -ROA_DIR = ${abs_top_builddir}/rpkid/tests/smoketest.dir/publication - -test: all - -date -u +'now: %Y%m%d%H%M%SZ' - if test -d ${ROA_DIR}; then ./${BIN} ${ROA_DIR} ; else :; fi - -install: all - if test -d ${DESTDIR}${bindir} ; then :; else ${INSTALL} -d ${DESTDIR}${bindir}; fi - ${INSTALL} ${BIN} ${DESTDIR}${bindir} - -deinstall uninstall: - rm -f ${DESTDIR}${bindir}/${BIN} - -distclean: clean - rm -f Makefile diff --git a/rp/utils/scan_routercerts/Makefile.in b/rp/utils/scan_routercerts/Makefile.in deleted file mode 100644 index 715d1325..00000000 --- a/rp/utils/scan_routercerts/Makefile.in +++ /dev/null @@ -1,41 +0,0 @@ -# $Id$ - -NAME = scan_routercerts - -BIN = ${NAME} - -INSTALL = @INSTALL@ -m 555 - -prefix = @prefix@ -exec_prefix = @exec_prefix@ -datarootdir = @datarootdir@ -datadir = @datadir@ -localstatedir = @localstatedir@ -sharedstatedir = @sharedstatedir@ -sysconfdir = @sysconfdir@ -bindir = @bindir@ -sbindir = @sbindir@ -libexecdir = @libexecdir@ -libdir = @libdir@ - -abs_top_srcdir = @abs_top_srcdir@ -abs_top_builddir = @abs_top_builddir@ - -all clean: - @true - -ROUTERCERT_DIR = ${abs_top_builddir}/rpkid/tests/smoketest.dir/publication - -test: all - -date -u +'now: %Y%m%d%H%M%SZ' - if test -d ${ROUTERCERT_DIR}; then ./${BIN} ; else :; fi - -install: all - if test -d ${DESTDIR}${bindir} ; then :; else ${INSTALL} -d ${DESTDIR}${bindir}; fi - ${INSTALL} ${BIN} ${DESTDIR}${bindir} - -deinstall uninstall: - rm -f ${DESTDIR}${bindir}/${BIN} - -distclean: clean - rm -f Makefile diff --git a/rp/utils/uri/Makefile.in b/rp/utils/uri/Makefile.in deleted file mode 100644 index fc545060..00000000 --- a/rp/utils/uri/Makefile.in +++ /dev/null @@ -1,31 +0,0 @@ -# $Id$ - -NAME = uri - -BIN = ${NAME} -SRC = ${NAME}.c -OBJ = ${NAME}.o - -CFLAGS = @CFLAGS@ -LDFLAGS = @LDFLAGS@ -LIBS = @LIBS@ - -abs_top_srcdir = @abs_top_srcdir@ -abs_top_builddir = @abs_top_builddir@ - -all: ${BIN} - -clean: - rm -rf ${BIN} ${OBJ} ${BIN}.dSYM - -${BIN}: ${SRC} - ${CC} ${CFLAGS} -o $@ ${SRC} ${LDFLAGS} ${LIBS} - -test: - @true - -install deinstall uninstall: - @true - -distclean: clean - rm -f Makefile diff --git a/schemas/Makefile.in b/schemas/Makefile.in new file mode 100644 index 00000000..3451d987 --- /dev/null +++ b/schemas/Makefile.in @@ -0,0 +1,66 @@ +# $Id$ + +PYTHON = @PYTHON@ +TRANG = @TRANG@ + +CFLAGS = @CFLAGS@ +LDFLAGS = @LDFLAGS@ @POW_LDFLAGS@ +LIBS = @LIBS@ + +INSTALL = @INSTALL@ -m 555 + +prefix = @prefix@ +exec_prefix = @exec_prefix@ +datarootdir = @datarootdir@ +datadir = @datadir@ +localstatedir = @localstatedir@ +sharedstatedir = @sharedstatedir@ +sysconfdir = @sysconfdir@ +bindir = @bindir@ +sbindir = @sbindir@ +libexecdir = @libexecdir@ +sysconfdir = @sysconfdir@ + +abs_builddir = @abs_builddir@ +abs_top_srcdir = @abs_top_srcdir@ +abs_top_builddir= @abs_top_builddir@ +srcdir = @srcdir@ + +all:: ${abs_top_srcdir}/rpki/relaxng.py ${abs_top_srcdir}/rpki/sql_schemas.py + +install clean test distclean deinstall uninstall:: + @true + +RNGS = relaxng/left-right-schema.rng \ + relaxng/up-down-schema.rng \ + relaxng/publication-schema.rng \ + relaxng/myrpki.rng \ + relaxng/router-certificate-schema.rng + +SQLS = sql/rpkid.sql sql/pubd.sql + +${abs_top_srcdir}/rpki/relaxng.py: ${abs_top_srcdir}/buildtools/make-relaxng.py ${RNGS} + ${PYTHON} ${abs_top_srcdir}/buildtools/make-relaxng.py ${RNGS} >$@.tmp + mv $@.tmp $@ + +${abs_top_srcdir}/rpki/sql_schemas.py: ${abs_top_srcdir}/buildtools/make-sql-schemas.py ${SQLS} + ${PYTHON} ${abs_top_srcdir}/buildtools/make-sql-schemas.py >$@.tmp + mv $@.tmp $@ + +relaxng/left-right-schema.rng: relaxng/left-right-schema.rnc + ${TRANG} relaxng/left-right-schema.rnc relaxng/left-right-schema.rng + +relaxng/up-down-schema.rng: relaxng/up-down-schema.rnc + ${TRANG} relaxng/up-down-schema.rnc relaxng/up-down-schema.rng + +relaxng/publication-schema.rng: relaxng/publication-schema.rnc + ${TRANG} relaxng/publication-schema.rnc relaxng/publication-schema.rng + +relaxng/myrpki.rng: relaxng/myrpki.rnc + ${TRANG} relaxng/myrpki.rnc relaxng/myrpki.rng + +relaxng/router-certificate-schema.rng: relaxng/router-certificate-schema.rnc + ${TRANG} relaxng/router-certificate-schema.rnc relaxng/router-certificate-schema.rng + +dont-run-trang: + touch relaxng/*.rng diff --git a/setup.py b/setup.py index 653d2d31..f370e5aa 100644 --- a/setup.py +++ b/setup.py @@ -62,27 +62,27 @@ setup(name = "rpkitoolkit", "rpki.gui.cacheview" : ["templates/*/*.html"]}, scripts = [(autoconf.sbindir, - ["rpkic", - "rpki-confgen", - "rpki-start-servers", - "rpki-sql-backup", - "rpki-sql-setup", - "portal-gui/scripts/rpki-manage", - "portal-gui/scripts/rpkigui-query-routes", - "irbe_cli"]), + ["ca/rpkic", + "ca/rpki-confgen", + "ca/rpki-start-servers", + "ca/rpki-sql-backup", + "ca/rpki-sql-setup", + "ca/rpki-manage", + "ca/rpkigui-query-routes", + "ca/irbe_cli"]), (autoconf.libexecdir, - ["irdbd", - "pubd", - "rootd", - "rpkid", - "portal-gui/scripts/rpkigui-import-routes", - "portal-gui/scripts/rpkigui-check-expired", - "portal-gui/scripts/rpkigui-rcynic", - "portal-gui/scripts/rpkigui-apache-conf-gen"])], + ["ca/irdbd", + "ca/pubd", + "ca/rootd", + "ca/rpkid", + "ca/rpkigui-import-routes", + "ca/rpkigui-check-expired", + "ca/rpkigui-rcynic", + "ca/rpkigui-apache-conf-gen"])], data_files = [(autoconf.sysconfdir + "/rpki", - ["rpki-confgen.xml"]), + ["ca/rpki-confgen.xml"]), (autoconf.datarootdir + "/rpki/wsgi", - ["portal-gui/rpki.wsgi"]), + ["ca/rpki.wsgi"]), (autoconf.datarootdir + "/rpki/media/css", glob("rpki/gui/app/static/css/*")), (autoconf.datarootdir + "/rpki/media/js", @@ -90,4 +90,4 @@ setup(name = "rpkitoolkit", (autoconf.datarootdir + "/rpki/media/img", glob("rpki/gui/app/static/img/*")), (autoconf.datarootdir + "/rpki/upgrade-scripts", - glob("upgrade-scripts/*"))]) + glob("ca/upgrade-scripts/*"))]) -- cgit v1.2.3 From d8eb94e6f5d7f23d1c208339052587647f60416d Mon Sep 17 00:00:00 2001 From: Rob Austein Date: Mon, 7 Apr 2014 02:58:47 +0000 Subject: RP cleanup and test rules. svn path=/branches/tk685/; revision=5759 --- rp/rcynic/rcynic-cron | 2 ++ rp/rcynic/rcynic-html | 2 ++ rp/rcynic/rcynic-svn | 2 ++ rp/rcynic/rcynic-text | 2 ++ rp/rcynic/rpki-torrent.py | 4 +-- rp/rcynic/validation_status | 2 ++ rp/utils/Makefile.in | 36 ++++++++++++++++++----- rp/utils/scan_routercerts | 69 ++++++++++++++++++++++++++++++++++++++++++++ rp/utils/scan_routercerts.py | 69 -------------------------------------------- schemas/Makefile.in | 3 ++ 10 files changed, 112 insertions(+), 79 deletions(-) create mode 100755 rp/utils/scan_routercerts delete mode 100755 rp/utils/scan_routercerts.py diff --git a/rp/rcynic/rcynic-cron b/rp/rcynic/rcynic-cron index 4da1d5cd..73368e0d 100755 --- a/rp/rcynic/rcynic-cron +++ b/rp/rcynic/rcynic-cron @@ -1,3 +1,5 @@ +#!/usr/bin/env python +# # $Id$ # # Copyright (C) 2014 Dragon Research Labs ("DRL") diff --git a/rp/rcynic/rcynic-html b/rp/rcynic/rcynic-html index 6070cd13..a7de2291 100755 --- a/rp/rcynic/rcynic-html +++ b/rp/rcynic/rcynic-html @@ -1,3 +1,5 @@ +#!/usr/bin/env python +# # $Id$ # # Copyright (C) 2013--2014 Dragon Research Labs ("DRL") diff --git a/rp/rcynic/rcynic-svn b/rp/rcynic/rcynic-svn index fd0df500..c667ec4a 100755 --- a/rp/rcynic/rcynic-svn +++ b/rp/rcynic/rcynic-svn @@ -1,3 +1,5 @@ +#!/usr/bin/env python +# # $Id$ # # Copyright (C) 2012 Internet Systems Consortium, Inc. ("ISC") diff --git a/rp/rcynic/rcynic-text b/rp/rcynic/rcynic-text index a8e56dac..c837e88b 100755 --- a/rp/rcynic/rcynic-text +++ b/rp/rcynic/rcynic-text @@ -1,3 +1,5 @@ +#!/usr/bin/env python +# # $Id$ # # Copyright (C) 2012 Internet Systems Consortium, Inc. ("ISC") diff --git a/rp/rcynic/rpki-torrent.py b/rp/rcynic/rpki-torrent.py index 9b97f298..cc0798e7 100644 --- a/rp/rcynic/rpki-torrent.py +++ b/rp/rcynic/rpki-torrent.py @@ -1,5 +1,5 @@ -#!/usr/local/bin/python - +#!/usr/bin/env python +# # $Id$ # # Copyright (C) 2013--2014 Dragon Research Labs ("DRL") diff --git a/rp/rcynic/validation_status b/rp/rcynic/validation_status index 1f7a704d..1ff17e75 100755 --- a/rp/rcynic/validation_status +++ b/rp/rcynic/validation_status @@ -1,3 +1,5 @@ +#!/usr/bin/env python +# # $Id$ # # Copyright (C) 2012 Internet Systems Consortium, Inc. ("ISC") diff --git a/rp/utils/Makefile.in b/rp/utils/Makefile.in index 03c041a4..137230b4 100644 --- a/rp/utils/Makefile.in +++ b/rp/utils/Makefile.in @@ -25,22 +25,19 @@ BINS = find_roa hashdir print_rpki_manifest print_roa scan_roas uri SCRIPTS = scan_routercerts -all: ${BINS} +all:: ${BINS} -clean: +clean:: rm -rf ${BINS} *.o *.dSYM -test: - @true - -install: all +install:: all if test -d ${DESTDIR}${bindir} ; then :; else ${INSTALL} -d ${DESTDIR}${bindir}; fi ${INSTALL} ${BINS} ${SCRIPTS} ${DESTDIR}${bindir} -deinstall uninstall: +deinstall uninstall:: for i in ${BINS} ${SCRIPTS}; do rm -f ${DESTDIR}${bindir}/$$i; done -distclean: clean +distclean:: clean rm -f Makefile find_roa: find_roa.c @@ -60,3 +57,26 @@ scan_roas: scan_roas.c uri: uri.c ${CC} ${CFLAGS} -o $@ uri.c ${LDFLAGS} ${LIBS} + +# Tests + +RSYNC_AUTH_DIR = ${abs_top_builddir}/rp/rcynic/rcynic-data/authenticated +HASHDIR_OUTPUT = hashed-pem-dir +TARGET_PREFIXES = 10.3.0.44 10.2.0.6 10.0.0.0/24 + +test:: ${BINS} ${SCRIPTS} + if test -d ${RSYNC_AUTH_DIR}; \ + then \ + rm -rf ${HASHDIR_OUTPUT} ; \ + mkdir ${HASHDIR_OUTPUT} ; \ + ./hashdir ${RSYNC_AUTH_DIR} ${HASHDIR_OUTPUT}; \ + ./find_roa ${RSYNC_AUTH_DIR} ${TARGET_PREFIXES} ; \ + date -u +'now: %Y%m%d%H%M%SZ' || : ; \ + find ${RSYNC_AUTH_DIR} -type f -name '*.roa' -print -exec ./print_roa {} \; ; \ + find ${RSYNC_AUTH_DIR} -type f -name '*.mft' -print -exec ./print_rpki_manifest {} \; ; \ + ./scan_roas ${RSYNC_AUTH_DIR} ; \ + ./scan_routercerts ${RSYNC_AUTH_DIR} ; \ + fi + +clean:: + rm -rf ${HASHDIR_OUTPUT} diff --git a/rp/utils/scan_routercerts b/rp/utils/scan_routercerts new file mode 100755 index 00000000..342fa272 --- /dev/null +++ b/rp/utils/scan_routercerts @@ -0,0 +1,69 @@ +#!/usr/bin/env python +# $Id$ +# +# Copyright (C) 2014 Dragon Research Labs ("DRL") +# +# Permission to use, copy, modify, and/or distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND DRL DISCLAIMS ALL WARRANTIES WITH +# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS. IN NO EVENT SHALL DRL BE LIABLE FOR ANY SPECIAL, DIRECT, +# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +""" +Scan rcynic validated output looking for router certificates, print +out stuff that the rpki-rtr code cares about. +""" + +# This program represents a weird temporary state, mostly to avoid +# diving into a recursive yak shaving exercise. +# +# Under the old scheme, anything used by the RP code should be either +# C code or pure Python code using just the standard libraries. This +# has gotten silly, but we haven't yet refactored the current packaged +# builds from two packages into three (adding a -libs package). +# +# So, by rights, this program should be a C monstrosity written using +# the OpenSSL C API. I started coding it that way, but it was just +# too painful for something we're probably going to rewrite as a few +# lines of Python once we refactor, but by the same token I didn't +# want to delay router certificate support until the refactoring. +# +# So this program anticipates the new scheme of things, but makes one +# concession to current reality: if it has a problem importing the +# RPKI-specific libraries, it just quietly exits as if everything were +# fine and there simply are no router certificates to report. This +# isn't the right answer in the long run, but will suffice to avoid +# further bald yaks. + +import os +import sys +import base64 + +try: + import rpki.POW + import rpki.oids +except ImportError: + sys.exit(0) + +rcynic_dir = sys.argv[1] + +for root, dirs, files in os.walk(rcynic_dir): + for fn in files: + if not fn.endswith(".cer"): + continue + x = rpki.POW.X509.derReadFile(os.path.join(root, fn)) + + if rpki.oids.id_kp_bgpsec_router not in (x.getEKU() or ()): + continue + + sys.stdout.write(base64.urlsafe_b64encode(x.getSKI()).rstrip("=")) + for min_asn, max_asn in x.getRFC3779()[0]: + for asn in xrange(min_asn, max_asn + 1): + sys.stdout.write(" %s" % asn) + sys.stdout.write(" %s\n" % base64.b64encode(x.getPublicKey().derWritePublic())) diff --git a/rp/utils/scan_routercerts.py b/rp/utils/scan_routercerts.py deleted file mode 100755 index 342fa272..00000000 --- a/rp/utils/scan_routercerts.py +++ /dev/null @@ -1,69 +0,0 @@ -#!/usr/bin/env python -# $Id$ -# -# Copyright (C) 2014 Dragon Research Labs ("DRL") -# -# Permission to use, copy, modify, and/or distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND DRL DISCLAIMS ALL WARRANTIES WITH -# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS. IN NO EVENT SHALL DRL BE LIABLE FOR ANY SPECIAL, DIRECT, -# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. - -""" -Scan rcynic validated output looking for router certificates, print -out stuff that the rpki-rtr code cares about. -""" - -# This program represents a weird temporary state, mostly to avoid -# diving into a recursive yak shaving exercise. -# -# Under the old scheme, anything used by the RP code should be either -# C code or pure Python code using just the standard libraries. This -# has gotten silly, but we haven't yet refactored the current packaged -# builds from two packages into three (adding a -libs package). -# -# So, by rights, this program should be a C monstrosity written using -# the OpenSSL C API. I started coding it that way, but it was just -# too painful for something we're probably going to rewrite as a few -# lines of Python once we refactor, but by the same token I didn't -# want to delay router certificate support until the refactoring. -# -# So this program anticipates the new scheme of things, but makes one -# concession to current reality: if it has a problem importing the -# RPKI-specific libraries, it just quietly exits as if everything were -# fine and there simply are no router certificates to report. This -# isn't the right answer in the long run, but will suffice to avoid -# further bald yaks. - -import os -import sys -import base64 - -try: - import rpki.POW - import rpki.oids -except ImportError: - sys.exit(0) - -rcynic_dir = sys.argv[1] - -for root, dirs, files in os.walk(rcynic_dir): - for fn in files: - if not fn.endswith(".cer"): - continue - x = rpki.POW.X509.derReadFile(os.path.join(root, fn)) - - if rpki.oids.id_kp_bgpsec_router not in (x.getEKU() or ()): - continue - - sys.stdout.write(base64.urlsafe_b64encode(x.getSKI()).rstrip("=")) - for min_asn, max_asn in x.getRFC3779()[0]: - for asn in xrange(min_asn, max_asn + 1): - sys.stdout.write(" %s" % asn) - sys.stdout.write(" %s\n" % base64.b64encode(x.getPublicKey().derWritePublic())) diff --git a/schemas/Makefile.in b/schemas/Makefile.in index 3451d987..b7dae635 100644 --- a/schemas/Makefile.in +++ b/schemas/Makefile.in @@ -31,6 +31,9 @@ all:: ${abs_top_srcdir}/rpki/relaxng.py ${abs_top_srcdir}/rpki/sql_schemas.py install clean test distclean deinstall uninstall:: @true +distclean:: + rm -f Makefile + RNGS = relaxng/left-right-schema.rng \ relaxng/up-down-schema.rng \ relaxng/publication-schema.rng \ -- cgit v1.2.3 From 8a54b07cc85256a8fe9132c3d0422b31f2570a1c Mon Sep 17 00:00:00 2001 From: Rob Austein Date: Mon, 7 Apr 2014 03:18:50 +0000 Subject: Fix schema generation. svn path=/branches/tk685/; revision=5760 --- Makefile.in | 17 ++---------- configure | 2 +- configure.ac | 2 +- rpki/relaxng.py | 40 +++++++++++++-------------- rpki/sql_schemas.py | 4 +-- schemas/Makefile.in | 2 +- schemas/relaxng/left-right-schema.rng | 2 +- schemas/relaxng/myrpki.rng | 2 +- schemas/relaxng/publication-schema.rng | 2 +- schemas/relaxng/router-certificate-schema.rng | 2 +- schemas/relaxng/up-down-schema.rng | 2 +- 11 files changed, 33 insertions(+), 44 deletions(-) diff --git a/Makefile.in b/Makefile.in index 1394a6cb..71dab019 100644 --- a/Makefile.in +++ b/Makefile.in @@ -42,8 +42,9 @@ export: tar czf subvert-rpki.hactrn.net-$$(date +%Y.%m.%d).tar.gz subvert-rpki.hactrn.net rm -rf subvert-rpki.hactrn.net -distclean clean:: - rm -rf build autom4te.cache +clean distclean:: + rm -rf build autom4te.cache rpki/autoconf.py setup_autoconf.py setup_autoconf.pyc ${POW_SO} build dist + find . -type f -name '*.py[co]' -delete distclean:: rm -rf Makefile config.log config.status @@ -73,9 +74,6 @@ rpki/autoconf.py: Makefile echo 'WSGI_PYTHON_EGG_CACHE_USER = "${WSGI_PYTHON_EGG_CACHE_USER}"'; \ ) > $@ -clean:: - rm -f rpki/autoconf.py - setup_autoconf.py: rpki/autoconf.py @echo 'Generating $@'; \ (cat rpki/autoconf.py; \ @@ -85,9 +83,6 @@ setup_autoconf.py: rpki/autoconf.py echo 'LIBS = """${LIBS}"""'; \ ) > $@ -clean:: - rm -f setup_autoconf.py setup_autoconf.pyc - SETUP_PY_ROOT = `${PYTHON} -c 'import sys; print "--root " + sys.argv[1] if sys.argv[1] else ""' '${DESTDIR}'` POW_SO = rpki/POW/_POW.so @@ -102,9 +97,3 @@ ${POW_SO}: .FORCE setup_autoconf.py build/stamp: .FORCE setup_autoconf.py ${PYTHON} setup.py build touch $@ - -clean:: - rm -rf ${POW_SO} build dist - -clean:: - find . -type f -name '*.py[co]' -delete diff --git a/configure b/configure index 711ec03d..921affbf 100755 --- a/configure +++ b/configure @@ -4990,7 +4990,7 @@ fi # Figure out which parts of this package we have to build. - TOP_LEVEL_SUBDIRS="h" + TOP_LEVEL_SUBDIRS="h schemas" test $build_openssl = yes && TOP_LEVEL_SUBDIRS="$TOP_LEVEL_SUBDIRS openssl" test $build_rp_tools = yes && TOP_LEVEL_SUBDIRS="$TOP_LEVEL_SUBDIRS rp" test $build_ca_tools = yes && TOP_LEVEL_SUBDIRS="$TOP_LEVEL_SUBDIRS ca" diff --git a/configure.ac b/configure.ac index 65368e06..ff25b982 100644 --- a/configure.ac +++ b/configure.ac @@ -752,7 +752,7 @@ fi # Figure out which parts of this package we have to build. - TOP_LEVEL_SUBDIRS="h" + TOP_LEVEL_SUBDIRS="h schemas" test $build_openssl = yes && TOP_LEVEL_SUBDIRS="$TOP_LEVEL_SUBDIRS openssl" test $build_rp_tools = yes && TOP_LEVEL_SUBDIRS="$TOP_LEVEL_SUBDIRS rp" test $build_ca_tools = yes && TOP_LEVEL_SUBDIRS="$TOP_LEVEL_SUBDIRS ca" diff --git a/rpki/relaxng.py b/rpki/relaxng.py index 0d8c0d64..917ed6ed 100644 --- a/rpki/relaxng.py +++ b/rpki/relaxng.py @@ -2,11 +2,11 @@ import lxml.etree -## @var left_right -## Parsed RelaxNG left_right schema -left_right = lxml.etree.RelaxNG(lxml.etree.fromstring(r''' +## @var relaxng/left_right +## Parsed RelaxNG relaxng/left_right schema +relaxng/left_right = lxml.etree.RelaxNG(lxml.etree.fromstring(r''' ''')) -## @var up_down -## Parsed RelaxNG up_down schema -up_down = lxml.etree.RelaxNG(lxml.etree.fromstring(r''' +## @var relaxng/up_down +## Parsed RelaxNG relaxng/up_down schema +relaxng/up_down = lxml.etree.RelaxNG(lxml.etree.fromstring(r''' ''')) -## @var publication -## Parsed RelaxNG publication schema -publication = lxml.etree.RelaxNG(lxml.etree.fromstring(r''' +## @var relaxng/publication +## Parsed RelaxNG relaxng/publication schema +relaxng/publication = lxml.etree.RelaxNG(lxml.etree.fromstring(r''' ''')) -## @var myrpki -## Parsed RelaxNG myrpki schema -myrpki = lxml.etree.RelaxNG(lxml.etree.fromstring(r''' +## @var relaxng/myrpki +## Parsed RelaxNG relaxng/myrpki schema +relaxng/myrpki = lxml.etree.RelaxNG(lxml.etree.fromstring(r''' ''')) -## @var router_certificate -## Parsed RelaxNG router_certificate schema -router_certificate = lxml.etree.RelaxNG(lxml.etree.fromstring(r''' +## @var relaxng/router_certificate +## Parsed RelaxNG relaxng/router_certificate schema +relaxng/router_certificate = lxml.etree.RelaxNG(lxml.etree.fromstring(r''' ''')) -## @var relaxng/up_down -## Parsed RelaxNG relaxng/up_down schema -relaxng/up_down = lxml.etree.RelaxNG(lxml.etree.fromstring(r''' +## @var myrpki +## Parsed RelaxNG myrpki schema +myrpki = lxml.etree.RelaxNG(lxml.etree.fromstring(r''' - - - + + + 2 + + + 512000 - [\-,0-9]* - + - 512000 - [\-,/.0-9]* + 255 + [\-_A-Za-z0-9]+ - + - 512000 - [\-,/:0-9a-fA-F]* + 255 + [\-_A-Za-z0-9/]+ - - - 1 - 1024 + + + 4096 - - - 27 - 1024 - + + - - - 1 - 1024 + + + 512000 + [\-,0-9]+ - + - 10 - 4096 + 512000 + [\-,0-9/.]+ - - - 4 + + 512000 + [\-,0-9/:a-fA-F]+ - - - - - 1 - - - - - - - - - - - - - - list - - - - - - list_response - - - - - - issue - - - - - - issue_response - - - - - - revoke - - - - - - revoke_response - - - - - - error_response - - - - - - - - - - + + + .*Z + - - - - - - - - - - - - - - - - + + + + + - - + + - - - 1024 - rsync://.+ - + + - - - + + + + + + + + + + + + + + + + + + + - - + + - - + + - - + + - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - - - - - - - - - - - - - + + + - - - + + + - - - + + + + + + + + - + + + + + + + + + + + + + + + + + + + + + + + - - - - - + + + + + + + + + + - - + + + + + none + + + offer + + + + referral + + + + + - - - - + + + + + - - + + + + + + + + + + + + + + + + + + + + + + + + + + - - - - - 9999 - + + + + + + + + + + + + + + + + + - - - - - - - 1024 - + + + + + + + + + confirmed + + + + + + + + + + + + + + + - - + + + + + + + + + + + + + + + + + + + + + + + ''')) -## @var relaxng/publication -## Parsed RelaxNG relaxng/publication schema -relaxng/publication = lxml.etree.RelaxNG(lxml.etree.fromstring(r''' +## @var publication +## Parsed RelaxNG publication schema +publication = lxml.etree.RelaxNG(lxml.etree.fromstring(r''' - - - - publish - - - - - - - - - - - - publish - - - - - - - - - - - withdraw - - - - - - - - - - - withdraw - - - - - - - - - - - - publish - - - - - - - - - - - - publish + + + + + - - - - - + + + + + + + + + + + - - + + - withdraw + create - + + + - - + + - withdraw + create - + - - - + + - publish + set - - + + + - - + + - publish + set - + - - + + - withdraw + get - + - - + + - withdraw + get - + + - - - + + - publish + list - - - - + + - publish + list - + + - - + + - withdraw + destroy - + - - + + - withdraw + destroy - + - - - + + + publish @@ -1890,8 +1808,8 @@ relaxng/publication = lxml.etree.RelaxNG(lxml.etree.fromstring(r''' - - + + publish @@ -1901,8 +1819,8 @@ relaxng/publication = lxml.etree.RelaxNG(lxml.etree.fromstring(r''' - - + + withdraw @@ -1912,421 +1830,222 @@ relaxng/publication = lxml.etree.RelaxNG(lxml.etree.fromstring(r''' - - + + withdraw - - - - - - - - - 1024 - - - - - - - - - - - - - 512000 - - - - - - -''')) - -## @var relaxng/myrpki -## Parsed RelaxNG relaxng/myrpki schema -relaxng/myrpki = lxml.etree.RelaxNG(lxml.etree.fromstring(r''' - - - - 2 - - - - 512000 - - - - - 255 - [\-_A-Za-z0-9]+ - - - - - 255 - [\-_A-Za-z0-9/]+ - - - - - 4096 - - - - - - - - 512000 - [\-,0-9]+ - - - - - 512000 - [\-,0-9/.]+ - - - - - 512000 - [\-,0-9/:a-fA-F]+ - - - - - .*Z - - - - - - - - - - - - - - - + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + publish + - - - + + + + + + + + + publish + - - - + + + + + + + + withdraw + - - - + + + + + + + + withdraw + - - - + + - - - - - - + + + + + + publish - - + + + + + + + + + + + publish - - - + + + + - - - - - - + + + + + withdraw - + + + + - - - + + + + withdraw + - - - + - + - - - - - none + + + + + publish - - offer + + + + + + + + + + + publish - - - referral - - - - - + + + + + - - - - - + + + + withdraw + + + + + + + + + + + withdraw - - - + + + + + + + + + publish + - - - + - - - - - + + + + + + + + publish - - - - - - - - - + + - - - - - - - - - - - - + + + + + withdraw - - - - + + + + - - - - - - - - - confirmed - - - - - - - - - - - - + + + + + withdraw - - - - - - - + + + + + + + + 1024 + + + + - + - - - - - - - - - - + + - + + + 512000 + + - + ''')) -## @var relaxng/router_certificate -## Parsed RelaxNG relaxng/router_certificate schema -relaxng/router_certificate = lxml.etree.RelaxNG(lxml.etree.fromstring(r''' +## @var router_certificate +## Parsed RelaxNG router_certificate schema +router_certificate = lxml.etree.RelaxNG(lxml.etree.fromstring(r''' - - - 1 - - - + + + 1 + + + + 512000 + + + + + + + + 512000 + [0-9][\-,0-9]* + + + + + .*Z + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +''')) + +## @var up_down +## Parsed RelaxNG up_down schema +up_down = lxml.etree.RelaxNG(lxml.etree.fromstring(r''' + + + + 512000 + [\-,0-9]* - - + + + 512000 + [\-,/.0-9]* + - + 512000 - [0-9][\-,0-9]* + [\-,/:0-9a-fA-F]* - - - .*Z + + + 1 + 1024 - - - - - - - - - - - - - - + + + 27 + 1024 + - - - + + + 1 + 1024 + + + + + 10 + 4096 + + + + + 4 + 512000 + + + + - + + 1 + + + + + + + - - - - + + + list + + + + + + list_response + + + + + + issue + + + + + + issue_response + + + + + + revoke + + + + + + revoke_response + + + + + + error_response + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 1024 + rsync://.+ + + + - - + + + + + + + + + + + + + + + + + + + + + + + - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 9999 + + + + + + + + + 1024 + + + +