1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
1001
1002
1003
1004
|
#!/usr/bin/env python
# $Id$
"""
Reimplementation of rcynic in Python. Work in progress.
"""
import os
import sys
import time
import shutil
import errno
import logging
import argparse
import subprocess
import tornado.gen
import tornado.locks
import tornado.ioloop
import tornado.queues
import tornado.process
import rpki.POW
import rpki.sundial
from rpki.oids import id_kp_bgpsec_router
from lxml.etree import ElementTree, Element, SubElement, Comment
logger = logging.getLogger("rcynicng")
codes = rpki.POW.validation_status
class Generation(object):
all = []
def __init__(self, name, tree):
self.name = name
self.tree = tree
self.all.append(self)
self.pos = len(self.all)
setattr(self.__class__, name, self)
def __hash__(self):
return hash(self.name)
def __cmp__(self, other):
return cmp(self.pos, 0 if other is None else other.pos)
def __str__(self):
return self.name
class Status(object):
"""
Validation status database, like validation_status_t in rcynic:tos.
rcynic:tos version of this data structure is stored as an AVL
tree, because the OpenSSL STACK_OF() sort-and-bsearch turned out
to be a very poor choice for the input data. Remains to be seen
whether we need to do something like that here too.
"""
db = dict()
def __init__(self, uri, generation):
assert generation is None or isinstance(generation, Generation)
self.uri = uri
self._generation = generation
self._timestamp = None
self.status = set()
def __str__(self):
return "{my.timestamp} {my.uri} {status} {my.generation}".format(
my = self, status = ",".join(str(s) for s in sorted(self.status)))
@property
def timestamp(self):
return time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime(self._timestamp))
@property
def generation(self):
return str(self._generation)
@classmethod
def get(cls, uri, generation):
try:
return cls.db[uri, generation].status
except KeyError:
return None
@classmethod
def update(cls, uri, generation):
try:
key = (uri, generation)
self = cls.db[key]
except KeyError:
self = cls.db[key] = cls(uri, generation)
self._timestamp = time.time()
return self.status
@classmethod
def add(cls, uri, generation, *codes):
status = cls.update(uri, generation)
for code in codes:
status.add(code)
@classmethod
def remove(cls, uri, generation, *codes):
key = (uri, generation)
if key in cls.db:
for code in codes:
cls.db[key].status.discard(code)
@classmethod
def test(cls, uri, generation, code):
key = (uri, generation)
return key in cls.db and code in cls.db[key].status
def install_object(obj):
fn = uri_to_filename(obj.uri, new_authenticated)
dn = os.path.dirname(fn)
#logger.debug("Installing %r by linking %s to %s", obj, obj.fn, fn)
if not os.path.isdir(dn):
os.makedirs(dn)
os.link(obj.fn, fn)
def final_install():
cur_link = old_authenticated
new_link = cur_link + ".new"
old_link = cur_link + ".old"
dir_base = os.path.realpath(cur_link + ".")
new_real = os.path.realpath(new_authenticated)
old_real = os.path.realpath(old_authenticated)
if os.path.islink(old_link):
logger.debug("Unlinking %s", old_link)
os.unlink(old_link)
if os.path.isdir(old_real):
logger.debug("Symlinking %s to %s", os.path.basename(old_real), old_link)
os.symlink(os.path.basename(old_real), old_link)
logger.debug("Symlinking %s to %s", os.path.basename(new_real), cur_link)
os.symlink(os.path.basename(new_real), new_link)
os.rename(new_link, cur_link)
for path in os.listdir(os.path.dirname(dir_base)):
path = os.path.realpath(os.path.join(os.path.dirname(dir_base), path))
if path.startswith(dir_base) and path not in (new_real, old_real) and os.path.isdir(path):
logger.debug("Removing %s", path)
shutil.rmtree(path)
class X509StoreCTX(rpki.POW.X509StoreCTX):
@classmethod
def subclass(cls, **kwargs):
return type(cls.__name__, (cls,), kwargs)
status = None
def verify_callback(self, ok):
err = self.getError()
if err in (codes.X509_V_OK.code, codes.X509_V_ERR_SUBJECT_ISSUER_MISMATCH.code):
return ok
elif err == codes.X509_V_ERR_CRL_HAS_EXPIRED.code:
return True
elif err == codes.X509_V_ERR_UNABLE_TO_GET_ISSUER_CERT.code:
self.status.add(codes.TRUST_ANCHOR_NOT_SELF_SIGNED)
return ok
else:
self.status.add(codes.find(err))
return ok
class X509(rpki.POW.X509):
def __repr__(self):
try:
return "<X509 \"{}\" {} at 0x{:x}>".format(self.uri, self.generation, id(self))
except:
return "<X509 at 0x{:x}>".format(id(self))
@classmethod
def derReadURI(cls, uri, generation, cms = None):
fn = uri_to_filename(uri, generation.tree)
if not os.path.exists(fn):
Status.add(uri, generation, codes.OBJECT_NOT_FOUND)
return None
if cms is None:
with open(fn, "rb") as f:
der = f.read()
else:
# XXX awful kludge to work around current lack of subclass
# support in rpki.POW.CMS.certs().
der = cms.certs()[0].derWrite()
self = cls.derRead(der)
self.uri = uri
self.fn = fn
self.generation = generation
self.sha256 = sha256(der) if cms is None else None
self.bc = self.getBasicConstraints()
self.aki = self.getAKI()
self.ski = self.getSKI()
self.eku = self.getEKU()
self.aia = self.getAIA()
self.sia = self.getSIA()
self.crldp = self.getCRLDP()
self.is_ca = self.bc is not None and self.bc[0]
self.caDirectory, self.rpkiManifest, self.signedObjectRepository, self.rpkiNotify \
= self.sia or (None, None, None, None)
return self
@staticmethod
def count_uris(uris, scheme = "rsync://"):
count = 0
if uris is not None:
for uri in uris:
if uri.startswith(scheme):
count += 1
return count
def check(self, trusted, crl):
#logger.debug("Starting checks for %r", self)
status = Status.update(self.uri, self.generation)
is_ta = trusted is None
is_routercert = (self.eku is not None and id_kp_bgpsec_router in self.eku and
not self.is_ca and self.uri.endswith(".cer"))
if self.eku is not None and (self.is_ca or not self.uri.endswith(".cer")):
status.add(codes.INAPPROPRIATE_EKU_EXTENSION)
if is_ta and not self.is_ca:
status.add(codes.MALFORMED_TRUST_ANCHOR)
if is_ta and self.aia is not None:
status.add(codes.AIA_EXTENSION_FORBIDDEN)
if not is_ta and self.aia is None:
status.add(codes.AIA_EXTENSION_MISSING)
if is_routercert and self.sia is not None:
status.add(codes.SIA_EXTENSION_FORBIDDEN)
if not is_routercert and self.sia is None:
status.add(codes.SIA_EXTENSION_MISSING)
if is_ta and self.crldp is not None:
status.add(codes.CRLDP_EXTENSION_FORBIDDEN)
if not is_ta and self.crldp is None:
status.add(codes.CRLDP_EXTENSION_MISSING)
if not is_ta and self.aki is None:
status.add(codes.AKI_EXTENSION_MISSING)
elif not is_ta and self.aki != trusted[0].ski:
status.add(codes.AKI_EXTENSION_ISSUER_MISMATCH)
serial = self.getSerial()
if serial <= 0 or serial > 0x7FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF:
status.add(codes.BAD_CERTIFICATE_SERIAL_NUMBER)
if self.getVersion() != 2:
status.add(codes.WRONG_OBJECT_VERSION)
n_rsync_caIssuers = self.count_uris(self.aia)
n_rsync_caDirectory = self.count_uris(self.caDirectory)
n_rsync_rpkiManifest = self.count_uris(self.rpkiManifest)
n_rsync_signedObjectRepository = self.count_uris(self.signedObjectRepository)
if n_rsync_caIssuers > 1 or n_rsync_caDirectory > 1 or n_rsync_rpkiManifest > 1 or n_rsync_signedObjectRepository > 1:
status.add(codes.MULTIPLE_RSYNC_URIS_IN_EXTENSION)
if self.aia is not None and n_rsync_caIssuers == 0:
status.add(codes.MALFORMED_AIA_EXTENSION)
if self.is_ca:
ok = n_rsync_caDirectory != 0 and n_rsync_rpkiManifest != 0 and n_rsync_signedObjectRepository == 0
elif not is_routercert:
ok = n_rsync_caDirectory == 0 and n_rsync_rpkiManifest == 0 and n_rsync_signedObjectRepository != 0
else:
ok = self.sia is None
if not ok:
status.add(codes.MALFORMED_SIA_EXTENSION)
if not is_ta and self.count_uris(self.crldp) == 0:
status.add(codes.MALFORMED_CRLDP_EXTENSION)
self.checkRPKIConformance(status = status, eku = id_kp_bgpsec_router if is_routercert else None)
try:
self.verify(trusted = [self] if trusted is None else trusted, crl = crl, policy = "1.3.6.1.5.5.7.14.2",
context_class = X509StoreCTX.subclass(status = status))
except rpki.POW.ValidationError as e:
logger.debug("%r rejected: %s", self, e)
status.add(codes.OBJECT_REJECTED)
codes.normalize(status)
#logger.debug("Finished checks for %r", self)
return not any(s.kind == "bad" for s in status)
class CRL(rpki.POW.CRL):
def __repr__(self):
try:
return "<CRL \"{}\" {} at 0x{:x}>".format(self.uri, self.generation, id(self))
except:
return "<CRL at 0x{:x}>".format(id(self))
@classmethod
def derReadURI(cls, uri, generation):
fn = uri_to_filename(uri, generation.tree)
if not os.path.exists(fn):
Status.add(uri, generation, codes.OBJECT_NOT_FOUND)
return None
with open(fn, "rb") as f:
der = f.read()
self = cls.derRead(der)
self.uri = uri
self.fn = fn
self.generation = generation
self.sha256 = sha256(der)
self.aki = self.getAKI()
self.thisUpdate = self.getThisUpdate()
self.nextUpdate = self.getNextUpdate()
self.number = self.getCRLNumber()
return self
def check(self, issuer):
status = Status.update(self.uri, self.generation)
self.checkRPKIConformance(status = status, issuer = issuer)
try:
self.verify(issuer)
except rpki.POW.ValidationError as e:
logger.debug("%r rejected: %s", self, e)
status.add(codes.OBJECT_REJECTED)
codes.normalize(status)
if self.getVersion() != 1:
status.add(codes.WRONG_OBJECT_VERSION)
now = rpki.sundial.now()
if self.thisUpdate > now:
status.add(codes.CRL_NOT_YET_VALID)
if self.nextUpdate < now:
status.add(codes.STALE_CRL_OR_MANIFEST)
if self.number is None:
status.add(codes.CRL_NUMBER_EXTENSION_MISSING)
if self.number < 0:
status.add(codes.CRL_NUMBER_IS_NEGATIVE)
if self.number > 0x7FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF:
status.add(codes.CRL_NUMBER_OUT_OF_RANGE)
if self.getIssuer() != issuer.getSubject():
status.add(codes.CRL_ISSUER_NAME_MISMATCH)
if self.aki is None:
status.add(codes.AKI_EXTENSION_MISSING)
elif self.aki != issuer.ski:
status.add(codes.AKI_EXTENSION_ISSUER_MISMATCH)
return not any(s.kind == "bad" for s in status)
class Ghostbuster(rpki.POW.CMS):
def __repr__(self):
try:
return "<Ghostbuster \"{}\" {} at 0x{:x}>".format(self.uri, self.generation, id(self))
except:
return "<Ghostbuster at 0x{:x}>".format(id(self))
@classmethod
def derReadURI(cls, uri, generation):
fn = uri_to_filename(uri, generation.tree)
if not os.path.exists(fn):
Status.add(uri, generation, codes.OBJECT_NOT_FOUND)
return None
with open(fn, "rb") as f:
der = f.read()
self = cls.derRead(der)
self.uri = uri
self.fn = fn
self.generation = generation
self.sha256 = sha256(der)
self.ee = X509.derReadURI(uri, generation, self)
self.vcard = None
return self
def check(self, trusted, crl):
status = Status.update(self.uri, self.generation)
self.ee.check(trusted = trusted, crl = crl)
try:
self.vcard = self.verify()
except rpki.POW.ValidationError as e:
logger.debug("%r rejected: %s", self, e)
status.add(codes.OBJECT_REJECTED)
self.checkRPKIConformance(status)
codes.normalize(status)
return not any(s.kind == "bad" for s in status)
class Manifest(rpki.POW.Manifest):
def __repr__(self):
try:
return "<Manifest \"{}\" {} at 0x{:x}>".format(self.uri, self.generation, id(self))
except:
return "<Manifest at 0x{:x}>".format(id(self))
@classmethod
def derReadURI(cls, uri, generation):
fn = uri_to_filename(uri, generation.tree)
if not os.path.exists(fn):
Status.add(uri, generation, codes.OBJECT_NOT_FOUND)
return None
with open(fn, "rb") as f:
der = f.read()
self = cls.derRead(der)
self.uri = uri
self.fn = fn
self.ee = X509.derReadURI(uri, generation, self)
self.fah = None
self.generation = generation
self.thisUpdate = None
self.nextUpdate = None
self.number = None
return self
def check(self, trusted, crl):
status = Status.update(self.uri, self.generation)
self.ee.check(trusted = trusted, crl = crl)
try:
self.verify()
except rpki.POW.ValidationError as e:
logger.debug("%r rejected: %s", self, e)
status.add(codes.OBJECT_REJECTED)
self.checkRPKIConformance(status)
self.thisUpdate = self.getThisUpdate()
self.nextUpdate = self.getNextUpdate()
self.number = self.getManifestNumber()
self.fah = self.getFiles()
self.notBefore = self.ee.getNotBefore()
self.notAfter = self.ee.getNotAfter()
if self.thisUpdate < self.notBefore or self.nextUpdate > self.notAfter:
status.add(codes.MANIFEST_INTERVAL_OVERRUNS_CERT)
now = rpki.sundial.now()
if self.thisUpdate > now:
status.add(codes.MANIFEST_NOT_YET_VALID)
if self.nextUpdate < now:
status.add(codes.STALE_CRL_OR_MANIFEST)
codes.normalize(status)
return not any(s.kind == "bad" for s in status)
def find_crl_uris(self):
diruri = self.uri[:self.uri.rindex("/") + 1]
for fn, digest in self.fah:
if fn.endswith(".crl"):
yield diruri + fn, digest
class ROA(rpki.POW.ROA):
def __repr__(self):
try:
return "<ROA \"{}\" {} at 0x{:x}>".format(self.uri, self.generation, id(self))
except:
return "<ROA at 0x{:x}>".format(id(self))
@classmethod
def derReadURI(cls, uri, generation):
fn = uri_to_filename(uri, generation.tree)
if not os.path.exists(fn):
Status.add(uri, generation, codes.OBJECT_NOT_FOUND)
return None
with open(fn, "rb") as f:
der = f.read()
self = cls.derRead(der)
self.uri = uri
self.fn = fn
self.generation = generation
self.sha256 = sha256(der)
self.ee = X509.derReadURI(uri, generation, self)
self.asn = None
self.prefixes = None
return self
def check(self, trusted, crl):
status = Status.update(self.uri, self.generation)
self.ee.check(trusted = trusted, crl = crl)
try:
vcard = self.verify()
except rpki.POW.ValidationError:
status.add(codes.OBJECT_REJECTED)
self.checkRPKIConformance(status)
self.asn = self.getASID()
self.prefixes = self.getPrefixes()
codes.normalize(status)
return not any(s.kind == "bad" for s in status)
class WalkFrame(object):
"""
Certificate tree walk stack frame. This is basically just a
preamble and a loop, broken out into several separate methods so
that we can fork new tasks in the middle then resume processing of
the current state machine (ie, this frame) when appropriate (eg,
after an rsync or RRDP fetch completes).
"""
fns2 = dict(cer = X509,
gbr = Ghostbuster,
roa = ROA)
def __init__(self, cer):
self.cer = cer
self.state = self.initial
def __repr__(self):
try:
return "<WalkFrame \"{}\" at 0x{:x}".format(self.cer.uri, id(self))
except:
return "<WalkFrame at 0x{:x}>".format(id(self))
@tornado.gen.coroutine
def __call__(self, wsk):
yield self.state(wsk)
@tornado.gen.coroutine
def initial(self, wsk):
self.diruri = first_rsync_uri(self.cer.caDirectory)
self.fetcher = Fetcher(self.diruri)
if not self.fetcher.needed():
self.state = self.ready
elif args.no_spawn_on_fetch:
self.state = self.fetch
else:
self.state = self.fetch
yield task_queue.put(wsk.clone())
wsk.pop()
@tornado.gen.coroutine
def fetch(self, wsk):
yield self.fetcher.fetch()
self.state = self.ready
@tornado.gen.coroutine
def ready(self, wsk):
self.trusted = wsk.trusted()
#logger.debug("%r scanning products", self)
mft_uri = first_rsync_uri(self.cer.rpkiManifest)
crl_candidates = []
# NB: CRL checks on manifest EE certificates deferred until we've picked a CRL.
current_mft = Manifest.derReadURI(mft_uri, Generation.current)
if current_mft is not None and current_mft.check(trusted = self.trusted, crl = None):
crl_candidates.extend(current_mft.find_crl_uris())
else:
current_mft = None
backup_mft = Manifest.derReadURI(mft_uri, Generation.backup)
if backup_mft is not None and backup_mft.check(trusted = self.trusted, crl = None):
crl_candidates.extend(backup_mft.find_crl_uris())
else:
backup_mft = None
Status.remove(mft_uri, Generation.backup, codes.OBJECT_NOT_FOUND)
if current_mft is None and backup_mft is None:
wsk.pop()
return
self.crl = None
crls = {}
for uri, digest in crl_candidates:
for generation in (Generation.current, Generation.backup):
try:
crl = crls[uri, generation]
except KeyError:
crl = crls[uri, generation] = CRL.derReadURI(uri, generation)
if crl == self.crl:
continue
if crl is None and generation == Generation.backup:
Status.remove(uri, generation, codes.OBJECT_NOT_FOUND)
if crl is None:
continue
if crl.sha256 != digest:
#Status.add(uri, generation, codes.DIGEST_MISMATCH)
continue
if not crl.check(self.trusted[0]) or (self.crl is not None and crl.number < self.crl.number):
continue
if self.crl is None or crl.number > self.crl.number or crl.thisUpdate > self.crl.thisUpdate:
self.crl = crl
if self.crl is None:
wsk.pop()
return
install_object(self.crl)
Status.add(self.crl.uri, self.crl.generation, codes.OBJECT_ACCEPTED)
#logger.debug("Picked %s CRL %s", self.crl.generation, self.crl.uri)
if current_mft is not None and self.crl.isRevoked(current_mft.ee):
Status.add(current_mft.uri, current_mft.generation, codes.MANIFEST_EE_REVOKED)
current_mft = None
if backup_mft is not None and self.crl.isRevoked(backup_mft.ee):
Status.add(backup_mft.uri, backup_mft.generation, codes.MANIFEST_EE_REVOKED)
backup_mft = None
if current_mft is not None:
self.mft = current_mft
elif backup_mft is not None:
self.mft = backup_mft
else:
wsk.pop()
return
install_object(self.mft)
Status.add(mft_uri, self.mft.generation, codes.OBJECT_ACCEPTED)
self.stale_crl = Status.test(self.crl.uri, self.crl.generation, codes.STALE_CRL_OR_MANIFEST)
self.stale_mft = Status.test(self.mft.uri, self.mft.generation, codes.STALE_CRL_OR_MANIFEST)
# Use an explicit iterator so we can resume it; run loop in separate method, same reason.
self.mft_iterator = iter(self.mft.getFiles())
self.state = self.loop
@tornado.gen.coroutine
def loop(self, wsk):
#logger.debug("Processing %s", self.mft.uri)
for fn, digest in self.mft_iterator:
yield tornado.gen.moment
uri = self.diruri + fn
cls = self.fns2.get(uri[-3:])
# Need general URI validator here?
if uri == self.crl.uri:
continue
if uri[-4] != "." or cls is None:
Status.add(uri, None, codes.UNKNOWN_OBJECT_TYPE_SKIPPED)
continue
for generation in (Generation.current, Generation.backup):
obj = cls.derReadURI(uri, generation)
if obj is None and generation is Generation.current:
Status.add(uri, generation, codes.OBJECT_NOT_FOUND)
if obj is None:
continue
if self.stale_crl:
Status.add(uri, generation, codes.TAINTED_BY_STALE_CRL)
if self.stale_mft:
Status.add(uri, generation, codes.TAINTED_BY_STALE_MANIFEST)
ok = obj.check(trusted = self.trusted, crl = self.crl)
if obj.sha256 != digest:
Status.add(uri, generation, codes.DIGEST_MISMATCH)
ok = False
if ok:
install_object(obj)
Status.add(uri, generation, codes.OBJECT_ACCEPTED)
break
else:
Status.add(uri, generation, codes.OBJECT_REJECTED)
else:
continue
if ok and cls is X509 and obj.is_ca:
wsk.push(obj)
return
wsk.pop()
class WalkTask(object):
"""
Task corresponding to one walk stack, roughly analgous to
STACK_OF(walk_ctx_t) in rcynic:tos.
"""
def __init__(self, wsk = None, cer = None):
self.wsk = [] if wsk is None else wsk
if cer is not None:
self.push(cer)
def __repr__(self):
try:
return "<WalkTask \"{}\" at 0x{:x}".format(self.wsk[-1].cer.uri, id(self))
except:
return "<WalkTask at 0x{:x}>".format(id(self))
@tornado.gen.coroutine
def __call__(self):
while self.wsk:
yield self.wsk[-1](wsk = self)
def push(self, cer):
self.wsk.append(WalkFrame(cer))
def pop(self):
return self.wsk.pop()
def clone(self):
return WalkTask(wsk = list(self.wsk))
def trusted(self):
stack = [w.cer for w in self.wsk]
stack.reverse()
return stack
def read_tals():
for root, dirs, files in os.walk(args.tals):
for fn in files:
if fn.endswith(".tal"):
furi = "file://" + os.path.abspath(os.path.join(root, fn))
try:
with open(os.path.join(root, fn), "r") as f:
lines = f.readlines()
uri = lines.pop(0).strip()
b64 = "".join(lines[lines.index("\n"):])
key = rpki.POW.Asymmetric.derReadPublic(b64.decode("base64"))
if not uri.endswith(".cer"):
Status.add(furi, None, codes.MALFORMED_TAL_URI)
yield uri, key
except:
Status.add(furi, None, codes.UNREADABLE_TRUST_ANCHOR_LOCATOR)
def uri_to_filename(uri, base = None):
fn = uri[uri.index("://")+3:]
if base is not None:
fn = os.path.join(base, fn)
return fn
def first_uri(uris, scheme):
if uris is not None:
for uri in uris:
if uri.startswith(scheme):
return uri
return None
def first_rsync_uri(uris):
return first_uri(uris, "rsync://")
def sha256(bytes):
d = rpki.POW.Digest(rpki.POW.SHA256_DIGEST)
d.update(bytes)
return d.digest()
class Fetcher(object):
"""
Network transfer methods and history database.
At the moment this is rsync-only; eventually it will include
support for HTTPS and RRDP.
"""
# Internal protocol:
#
# - Instances which have just gotten to the query stage are not registered
#
# - Instances which are in progress are listed in .history and
# have a Condition object in .pending; instances which depend on
# this should wait for the condition, then return.
#
# - Instances which have completed are listed in .history and have
# .pending set to None.
_rsync_deadhosts = set()
_rsync_history = dict()
def __init__(self, uri):
self.uri = uri
self.pending = None
self.status = None
self.runtime = None
def _rsync_split_uri(self):
return tuple(self.uri.rstrip("/").split("/")[2:])
def _rsync_find(self, path):
for i in xrange(1, len(path)):
target = path[:i+1]
try:
return self._rsync_history[target]
except KeyError:
continue
return None
def needed(self):
if args.no_fetch:
return False
if self.uri.startswith("rsync://"):
return self._rsync_needed()
raise ValueError
def _rsync_needed(self):
path = self._rsync_split_uri()
if path[0] in self._rsync_deadhosts:
return False
entry = self._rsync_find(path)
return entry is None or entry.pending is not None
def fetch(self):
if self.uri.startswith("rsync://"):
return self._rsync_fetch()
raise ValueError
@tornado.gen.coroutine
def _rsync_fetch(self):
assert self.uri.startswith("rsync://") and (self.uri.endswith(".cer") or self.uri.endswith("/"))
if args.no_fetch:
return
path = self._rsync_split_uri()
dead = path[0] in self._rsync_deadhosts
other = self._rsync_find(path)
if not dead and other is not None and other.pending is not None:
yield other.pending.wait()
if dead or other is not None:
return
self.pending = tornado.locks.Condition()
self._rsync_history[path] = self
try:
cmd = ["rsync", "--update", "--times", "--copy-links", "--itemize-changes"]
if self.uri.endswith("/"):
cmd.append("--recursive")
cmd.append("--delete")
cmd.append(self.uri)
cmd.append(uri_to_filename(self.uri, args.unauthenticated))
dn = os.path.dirname(cmd[-1])
if not os.path.exists(dn):
os.makedirs(dn)
# We use the stdout close from rsync to detect when the subprocess has finished.
# There's a lovely tornado.process.Subprocess.wait_for_exit() method which does
# exactly what one would think we'd want -- but Unix signal handling still hasn't
# caught up to the software interrupt architecture ITS had forty years ago, so
# signals still cause random "system call interrupted" failures in other libraries.
# Nothing Tornado can do about this, so we avoid signals entirely and collect the
# process exit status directly from the operating system. In theory, the WNOHANG
# isn't necessary here, we use it anyway to be safe in case theory is wrong.
# If we need to add a timeout here to guard against rsync processes taking too long
# (which has happened in the past with, eg, LACNIC), see tornado.gen.with_timeout()
# (documented in the utility functions section of the tornado.gen page), which wraps
# any future in a timeout.
t0 = time.time()
rsync = tornado.process.Subprocess(cmd, stdout = tornado.process.Subprocess.STREAM, stderr = subprocess.STDOUT)
logger.debug("rsync[%s] started \"%s\"", rsync.pid, " ".join(cmd))
output = yield rsync.stdout.read_until_close()
pid, self.status = os.waitpid(rsync.pid, os.WNOHANG)
t1 = time.time()
self.runtime = t1 - t0
if (pid, self.status) == (0, 0):
logger.warn("rsync[%s] Couldn't get real exit status without blocking, sorry", rsync.pid)
for line in output.splitlines():
logger.debug("rsync[%s] %s", rsync.pid, line)
logger.debug("rsync[%s] finished after %s seconds with status 0x%x", rsync.pid, self.runtime, self.status)
# Should do something with rsync result and validation status database here.
finally:
pending = self.pending
self.pending = None
pending.notify_all()
class CheckTALTask(object):
def __init__(self, uri, key):
self.uri = uri
self.key = key
def __repr__(self):
return "<CheckTALTask: \"{}\">".format(self.uri)
@tornado.gen.coroutine
def __call__(self):
yield Fetcher(self.uri).fetch()
if self.check(Generation.current):
yield task_queue.put(WalkTask(cer = self.cer))
elif self.check(Generation.backup):
yield task_queue.put(WalkTask(cer = self.cer))
else:
Status.add(self.uri, None, codes.TRUST_ANCHOR_SKIPPED)
def check(self, generation):
self.cer = X509.derReadURI(self.uri, generation)
ok = False
if self.cer is None:
Status.add(self.uri, generation, codes.UNREADABLE_TRUST_ANCHOR)
elif self.key.derWritePublic() != self.cer.getPublicKey().derWritePublic():
Status.add(self.uri, generation, codes.TRUST_ANCHOR_KEY_MISMATCH)
else:
ok = self.cer.check(trusted = None, crl = None)
if ok:
install_object(self.cer)
Status.add(self.uri, generation, codes.OBJECT_ACCEPTED)
else:
Status.add(self.uri, generation, codes.OBJECT_REJECTED)
return ok
@tornado.gen.coroutine
def worker(meself):
#
# NB: This particular style of control loop REQUIRES an except
# clause, even if that except clause is just a pass statement.
#
while True:
task = yield task_queue.get()
try:
logger.debug("Worker %s starting %s, queue length %s", meself, task, task_queue.qsize())
yield task()
except:
logger.exception("Worker %s caught unhandled exception from %s", meself, task)
finally:
task_queue.task_done()
logger.debug("Worker %s finished %s, queue length %s", meself, task, task_queue.qsize())
def final_report():
# Clean up a bit to avoid confusing the user unnecessarily.
for s in Status.db.itervalues():
if codes.OBJECT_ACCEPTED in s.status:
s.status.discard(codes.OBJECT_REJECTED)
if s.generation is Generation.backup:
if Status.test(s.uri, Generation.current, codes.OBJECT_ACCEPTED):
s.status.discard(codes.OBJECT_REJECTED)
s.status.discard(codes.OBJECT_NOT_FOUND)
doc = Element("rcynic-summary") # rcynic-version = "", summary-version = "", reporting-hostname = ""
labels = SubElement(doc, "labels")
for code in codes.all():
SubElement(labels, code.name).text = code.text
for uri, generation in Status.db:
for sym in sorted(Status.db[uri, generation].status):
SubElement(doc, "validation_status",
timestamp = str(Status.db[uri, generation].timestamp),
status = str(sym),
generation = str(generation)
).text = uri
#
# Should generate <rsync_history/> elements here too, later
#
ElementTree(doc).write(file = args.xml_file, pretty_print = True)
@tornado.gen.coroutine
def launcher():
for i in xrange(args.workers):
tornado.ioloop.IOLoop.current().spawn_callback(worker, i)
yield [task_queue.put(CheckTALTask(uri, key)) for uri, key in read_tals()]
yield task_queue.join()
class posint(int):
def __init__(self, value):
if self <= 0:
raise ValueError
def main():
os.putenv("TZ", "UTC")
time.tzset()
parser = argparse.ArgumentParser(description = __doc__)
parser.add_argument("--authenticated", default = "rcynic-data/authenticated")
parser.add_argument("--unauthenticated", default = "rcynic-data/unauthenticated")
parser.add_argument("--xml-file", default = "rcynicng.xml", type = argparse.FileType("w"))
parser.add_argument("--tals", default = "sample-trust-anchors")
parser.add_argument("--workers", type = posint, default = 10)
parser.add_argument("--no-fetch", action = "store_true")
parser.add_argument("--no-spawn-on-fetch", action = "store_true")
global args
args = parser.parse_args()
global new_authenticated, old_authenticated
new_authenticated = args.authenticated.rstrip("/") + time.strftime(".%Y-%m-%dT%H:%M:%SZ")
old_authenticated = args.authenticated.rstrip("/")
Generation("current", args.unauthenticated)
Generation("backup", old_authenticated)
logging.basicConfig(level = logging.DEBUG, format = "%(asctime)s %(message)s", datefmt = "%Y-%m-%d %H:%M:%S")
global task_queue
task_queue = tornado.queues.Queue()
tornado.ioloop.IOLoop.current().run_sync(launcher)
final_report()
final_install()
if __name__ == "__main__":
main()
|