Compare commits
1086 Commits
developmen
...
developmen
Author | SHA1 | Date | |
---|---|---|---|
![]() |
6ec3c8e10d | ||
![]() |
7aaf277db2 | ||
![]() |
11e0e1b489 | ||
![]() |
e9d815cc9d | ||
![]() |
c86d24fc8f | ||
![]() |
3b6d3838f5 | ||
![]() |
fcdfa889be | ||
![]() |
9ee40f343b | ||
![]() |
43ff16b28a | ||
![]() |
1f8b0a4032 | ||
![]() |
9bf1bcc483 | ||
![]() |
f1891851b3 | ||
![]() |
5f4d7afefb | ||
![]() |
46258bca74 | ||
![]() |
9c5bc2bfe0 | ||
![]() |
2c3bfb16ef | ||
![]() |
6b64f50450 | ||
![]() |
cbe6a5e2d6 | ||
![]() |
f265ed6122 | ||
![]() |
7301c706fd | ||
![]() |
bfc8dee559 | ||
![]() |
29f39ab480 | ||
![]() |
b7ac7f4616 | ||
![]() |
4da59769d2 | ||
![]() |
60573991ee | ||
![]() |
63bf2cb5b1 | ||
![]() |
c4b44016bc | ||
![]() |
a78a84faa7 | ||
![]() |
c3ff6526a1 | ||
![]() |
4bf29524eb | ||
![]() |
9aa001c4d1 | ||
![]() |
5012e9209c | ||
![]() |
1568ad59c6 | ||
![]() |
b5487e3c94 | ||
![]() |
f2974cbd07 | ||
![]() |
a167e1d5fa | ||
![]() |
c7e153917a | ||
![]() |
45cc4aa79e | ||
![]() |
0507c04ce9 | ||
![]() |
62736abba4 | ||
![]() |
97118f09c4 | ||
![]() |
5a84a8c0ad | ||
![]() |
a3f13e5387 | ||
![]() |
477a574500 | ||
![]() |
3642ac03b2 | ||
![]() |
06244059a8 | ||
![]() |
079f631711 | ||
![]() |
fbf5562a11 | ||
![]() |
df5ff0f400 | ||
![]() |
777783171a | ||
![]() |
39988e52e2 | ||
![]() |
79c82a4c3d | ||
![]() |
f49cea3914 | ||
![]() |
73c6f41fa3 | ||
![]() |
9ea39c6ed9 | ||
![]() |
89e5f7dffe | ||
![]() |
3f24336b83 | ||
![]() |
1e66518a79 | ||
![]() |
af3fd17ec2 | ||
![]() |
67c98fd81b | ||
![]() |
5cd70d7cf1 | ||
![]() |
654d628d39 | ||
![]() |
e8a409e337 | ||
![]() |
bd76402586 | ||
![]() |
1d104345fd | ||
![]() |
bd0a199ffa | ||
![]() |
4b1f69bcbb | ||
![]() |
e3a6814e3f | ||
![]() |
acd13ff31b | ||
![]() |
bb3e5d078f | ||
![]() |
054f61d6c1 | ||
![]() |
c1dd2e4946 | ||
![]() |
a714103b82 | ||
![]() |
a3a83dd89c | ||
![]() |
8db8109391 | ||
![]() |
d90af29019 | ||
![]() |
9d8d98fcc9 | ||
![]() |
01830d19a0 | ||
![]() |
49cc018fa4 | ||
![]() |
dd87c869ca | ||
![]() |
df44cffb96 | ||
![]() |
164053d1e8 | ||
![]() |
af741c50fb | ||
![]() |
34ccca9b07 | ||
![]() |
6e5d8d14af | ||
![]() |
4cda9f6a6b | ||
![]() |
fbb62ef17c | ||
![]() |
8077186c3a | ||
![]() |
1c07618b18 | ||
![]() |
4d7eaee0cc | ||
![]() |
7b64896234 | ||
![]() |
4f0a846814 | ||
![]() |
7c1bd453ee | ||
![]() |
9a975723c1 | ||
![]() |
ef024ddef3 | ||
![]() |
b61138a348 | ||
![]() |
d852eef08e | ||
![]() |
fd63b857f3 | ||
![]() |
0f9da6a44e | ||
![]() |
53a42f7411 | ||
![]() |
9c2bed8034 | ||
![]() |
9d614a4ab3 | ||
![]() |
7763685cb0 | ||
![]() |
4c6712741b | ||
![]() |
87b060f2ae | ||
![]() |
9dc357ab8d | ||
![]() |
f62c3d22ed | ||
![]() |
a237e38c51 | ||
![]() |
4388cb7790 | ||
![]() |
095a2012cb | ||
![]() |
264e0c1aad | ||
![]() |
0130355e1a | ||
![]() |
af50ef47d7 | ||
![]() |
4f2b1ca960 | ||
![]() |
f45f65596b | ||
![]() |
10402ae78d | ||
![]() |
5cd1df8601 | ||
![]() |
ee38856f29 | ||
![]() |
dc229bb8aa | ||
![]() |
a6a48e812f | ||
![]() |
5a8372437b | ||
![]() |
c4ead93bd9 | ||
![]() |
71de409ee9 | ||
![]() |
46c24c5cc3 | ||
![]() |
d48d4d0c18 | ||
![]() |
5a32c8eca0 | ||
![]() |
6c132bca90 | ||
![]() |
3882ecf1a0 | ||
![]() |
acf38cc010 | ||
![]() |
785b824b69 | ||
![]() |
63212e2db3 | ||
![]() |
3179d1c620 | ||
![]() |
aed1d8419b | ||
![]() |
c3cb0aa514 | ||
![]() |
a206b5f95e | ||
![]() |
9b8f9f8afd | ||
![]() |
066be20a9d | ||
![]() |
6e3386f693 | ||
![]() |
2c630848ee | ||
![]() |
5634e1bb1f | ||
![]() |
b744385584 | ||
![]() |
d407cd702b | ||
![]() |
20a071fba9 | ||
![]() |
f897dee3c5 | ||
![]() |
536f36df4e | ||
![]() |
571128efb1 | ||
![]() |
f1478cbc66 | ||
![]() |
75c5c855d9 | ||
![]() |
43d466e2fe | ||
![]() |
efa8c8e611 | ||
![]() |
820ad4f8af | ||
![]() |
34eeecf6de | ||
![]() |
050f5ed002 | ||
![]() |
2fba338639 | ||
![]() |
950ac8e19b | ||
![]() |
61929bb91a | ||
![]() |
9175148bd1 | ||
![]() |
5f08ea9310 | ||
![]() |
707bf795a9 | ||
![]() |
fcf64798dc | ||
![]() |
9b607be633 | ||
![]() |
01a8992cec | ||
![]() |
301541223d | ||
![]() |
4f58a4b2f3 | ||
![]() |
6f3babd223 | ||
![]() |
3f26b432b7 | ||
![]() |
b684bdbaa9 | ||
![]() |
23113616d9 | ||
![]() |
ba94dc7e86 | ||
![]() |
5e8f4f2a30 | ||
![]() |
f54feec57f | ||
![]() |
bbe5f293f4 | ||
![]() |
8ad1cceeb8 | ||
![]() |
bd970c65ea | ||
![]() |
43a8772529 | ||
![]() |
fc05956983 | ||
![]() |
8ec4a11a4b | ||
![]() |
c9ff3cd60e | ||
![]() |
a15d4cd130 | ||
![]() |
45ba80ec23 | ||
![]() |
32cff324d8 | ||
![]() |
cda5d7cfed | ||
![]() |
e46b90cbad | ||
![]() |
435f9f7f3c | ||
![]() |
9f1ea09ee6 | ||
![]() |
37c325f033 | ||
![]() |
76bffb2a23 | ||
![]() |
bd498d414b | ||
![]() |
f98c65ffb4 | ||
![]() |
eae29c53dd | ||
![]() |
8d17b69eb8 | ||
![]() |
938d64f48e | ||
![]() |
485ca38867 | ||
![]() |
355c540510 | ||
![]() |
d97a218170 | ||
![]() |
82c3330321 | ||
![]() |
db70743439 | ||
![]() |
4594578919 | ||
![]() |
bc0cb0a8fe | ||
![]() |
9e0cee849c | ||
![]() |
d6e4bca3ed | ||
![]() |
f49006a64e | ||
![]() |
75811ba553 | ||
![]() |
26de19b22b | ||
![]() |
72bdd130f0 | ||
![]() |
4131732b74 | ||
![]() |
7cecbe27be | ||
![]() |
3fab05071d | ||
![]() |
a98f2cede5 | ||
![]() |
283a0863c2 | ||
![]() |
18b089fc2d | ||
![]() |
60139abb10 | ||
![]() |
2cc1a9886f | ||
![]() |
1c7122b7e4 | ||
![]() |
4eba3ca6a0 | ||
![]() |
670d57a9db | ||
![]() |
8784113544 | ||
![]() |
c9f279ac9b | ||
![]() |
2622781a1d | ||
![]() |
c6249cd2d5 | ||
![]() |
97019d3b44 | ||
![]() |
75b4e6328e | ||
![]() |
eb9f936e78 | ||
![]() |
d1930c08e8 | ||
![]() |
3dd0fbfc80 | ||
![]() |
2202ebac8a | ||
![]() |
5c16601657 | ||
![]() |
3ff3330f1a | ||
![]() |
5b02d20e4d | ||
![]() |
867da9a3d0 | ||
![]() |
c9f6d35fa4 | ||
![]() |
c79a5c2ee3 | ||
![]() |
a400beb8b9 | ||
![]() |
8ce0b07e63 | ||
![]() |
a0876d3df5 | ||
![]() |
e829fa3d3f | ||
![]() |
da25890556 | ||
![]() |
8df0f5863a | ||
![]() |
2d66248303 | ||
![]() |
8221852eef | ||
![]() |
d50e1bfd6d | ||
![]() |
5f453789d4 | ||
![]() |
7658481128 | ||
![]() |
593bb31ac3 | ||
![]() |
f5e89c9660 | ||
![]() |
62db2267fc | ||
![]() |
f6544f7a2e | ||
![]() |
5ec6acc061 | ||
![]() |
6c7a1316ae | ||
![]() |
d6635097c7 | ||
![]() |
187ba67cc8 | ||
![]() |
c808873996 | ||
![]() |
a3378c3df5 | ||
![]() |
e063eeeced | ||
![]() |
a5051cffba | ||
![]() |
24deac9f92 | ||
![]() |
3621c7bc77 | ||
![]() |
57c2d4fcd8 | ||
![]() |
835ffe79c6 | ||
![]() |
1ac27e8125 | ||
![]() |
deb88ae03b | ||
![]() |
a2777d929e | ||
![]() |
03c7b6ea3e | ||
![]() |
872034073e | ||
![]() |
3d39b61a46 | ||
![]() |
c55c790a5d | ||
![]() |
ccbc1ed10c | ||
![]() |
348c80060e | ||
![]() |
b81d24c3ef | ||
![]() |
c03c67d9fb | ||
![]() |
0f72b7c188 | ||
![]() |
07fd3451ab | ||
![]() |
473e241d5c | ||
![]() |
ffe53ab72e | ||
![]() |
c13cff150f | ||
![]() |
e446f20223 | ||
![]() |
dd0ca967c4 | ||
![]() |
7b0bb25358 | ||
![]() |
57ab049565 | ||
![]() |
6a5f0964ff | ||
![]() |
66043e5cd0 | ||
![]() |
bb2951be2c | ||
![]() |
0d68de5ec4 | ||
![]() |
f4e43f2cc7 | ||
![]() |
b829b7662e | ||
![]() |
e4be1d8d35 | ||
![]() |
941d3ba73d | ||
![]() |
9556d5cd61 | ||
![]() |
1fc6c2db86 | ||
![]() |
c5949b547d | ||
![]() |
3fdd6b8e80 | ||
![]() |
4193511d1b | ||
![]() |
3bf00b14b8 | ||
![]() |
7d4c22594f | ||
![]() |
6f588c00d7 | ||
![]() |
441630d57e | ||
![]() |
3946a01871 | ||
![]() |
6f36a85353 | ||
![]() |
5d4ed36096 | ||
![]() |
282dc7afb3 | ||
![]() |
617ec1f500 | ||
![]() |
37157118af | ||
![]() |
33bea4adb3 | ||
![]() |
a0b62a9948 | ||
![]() |
c7c2c7ffaa | ||
![]() |
362b82326e | ||
![]() |
38d462c833 | ||
![]() |
7b73e34f9f | ||
![]() |
d88ad57032 | ||
![]() |
800f79f125 | ||
![]() |
522dfbc0db | ||
![]() |
918ad4c7c2 | ||
![]() |
2c8e611a15 | ||
![]() |
0158fb0967 | ||
![]() |
fd33b9271b | ||
![]() |
0e7c47a7e9 | ||
![]() |
0b51a6a3f0 | ||
![]() |
67639f64d4 | ||
![]() |
36fd21a3cd | ||
![]() |
48fe6779bb | ||
![]() |
6acc199eca | ||
![]() |
6eff4565dd | ||
![]() |
8cc333e7f7 | ||
![]() |
cbcaa97abb | ||
![]() |
d18971bc6e | ||
![]() |
f5bce507a5 | ||
![]() |
ee49ec7d72 | ||
![]() |
07e8d44406 | ||
![]() |
ab823b2797 | ||
![]() |
e7502c9ffd | ||
![]() |
9de879ecc2 | ||
![]() |
68ca9a6e94 | ||
![]() |
310834c237 | ||
![]() |
118f6dc787 | ||
![]() |
3faf2433c7 | ||
![]() |
d3d2529719 | ||
![]() |
23b9cf6e21 | ||
![]() |
66910fb1a4 | ||
![]() |
24c82170d8 | ||
![]() |
e26073ed6d | ||
![]() |
0088a2849f | ||
![]() |
e902eb61db | ||
![]() |
4cfd78c955 | ||
![]() |
cfee038a34 | ||
![]() |
06c2a0d90d | ||
![]() |
1e241bd79c | ||
![]() |
0d526df512 | ||
![]() |
961b5abe41 | ||
![]() |
d0527d1ac1 | ||
![]() |
08cb0a8c1c | ||
![]() |
de0678d5bf | ||
![]() |
f619c0d33f | ||
![]() |
7fea1d58a8 | ||
![]() |
db25abeb99 | ||
![]() |
e90e37c42f | ||
![]() |
38bb284694 | ||
![]() |
a123b3d781 | ||
![]() |
9b583b0541 | ||
![]() |
27e06c51cc | ||
![]() |
7d254a0556 | ||
![]() |
7b451242b6 | ||
![]() |
5f8c92a0a2 | ||
![]() |
29bab6f1f1 | ||
![]() |
ab8cad95d7 | ||
![]() |
44f37bd156 | ||
![]() |
b855de50eb | ||
![]() |
00602beadd | ||
![]() |
6861ac477a | ||
![]() |
4303cd8f5b | ||
![]() |
0c73c952fa | ||
![]() |
2f40ff3883 | ||
![]() |
90d6556229 | ||
![]() |
d813842f89 | ||
![]() |
f7802650ee | ||
![]() |
f28783e616 | ||
![]() |
d0684396b6 | ||
![]() |
4dc39e37b2 | ||
![]() |
9b9a8660d9 | ||
![]() |
10f0a934b0 | ||
![]() |
8c3f304d9b | ||
![]() |
38705d1962 | ||
![]() |
efb3629eb0 | ||
![]() |
e8084d4ab9 | ||
![]() |
6733d30439 | ||
![]() |
8b1846647b | ||
![]() |
d5dad4734f | ||
![]() |
e7869d832e | ||
![]() |
a1e14fccb1 | ||
![]() |
f0981e2c57 | ||
![]() |
0c17c748fe | ||
![]() |
030f47a88a | ||
![]() |
9c185007a2 | ||
![]() |
d7a4bef3b3 | ||
![]() |
fc7711cca2 | ||
![]() |
79699324d9 | ||
![]() |
3919808d14 | ||
![]() |
b1dea67eef | ||
![]() |
c3196181c1 | ||
![]() |
c24ad4f887 | ||
![]() |
ad1c623c80 | ||
![]() |
9d81cad0aa | ||
![]() |
5f72738b7f | ||
![]() |
70278f86ab | ||
![]() |
083dd7454a | ||
![]() |
8aa0f9d030 | ||
![]() |
3b0ea3d7a1 | ||
![]() |
5ce057a498 | ||
![]() |
8c3f88e233 | ||
![]() |
8c2db870c7 | ||
![]() |
04581abbf6 | ||
![]() |
abfbe90a57 | ||
![]() |
67e5cc770d | ||
![]() |
b1c9474159 | ||
![]() |
8e8d771a64 | ||
![]() |
07a110ff86 | ||
![]() |
c696f9a38b | ||
![]() |
f941132c8a | ||
![]() |
c0825231e9 | ||
![]() |
2246a9fbdc | ||
![]() |
86270d8495 | ||
![]() |
e52330b935 | ||
![]() |
4b08dd5263 | ||
![]() |
ce7bba1f8d | ||
![]() |
46338119b6 | ||
![]() |
36f6ca47e9 | ||
![]() |
cd50d46162 | ||
![]() |
016107500f | ||
![]() |
04ebaa8d8f | ||
![]() |
c495ecacb0 | ||
![]() |
3f702c29cd | ||
![]() |
8603ca5b99 | ||
![]() |
7b4e65eaf1 | ||
![]() |
f101a0f3a0 | ||
![]() |
e0b95fe931 | ||
![]() |
db7d8b0b45 | ||
![]() |
46d3a1e53c | ||
![]() |
ef6197250c | ||
![]() |
9aa8710a57 | ||
![]() |
735c6f2fb5 | ||
![]() |
942c6c2a1e | ||
![]() |
836c65e91e | ||
![]() |
4a6b69247b | ||
![]() |
66a48f44da | ||
![]() |
fa3ec78e25 | ||
![]() |
112cee9118 | ||
![]() |
6fdfbcb223 | ||
![]() |
c41f1ca4b3 | ||
![]() |
888273bb2f | ||
![]() |
1978405fb9 | ||
![]() |
d019076854 | ||
![]() |
8f4453862d | ||
![]() |
3e1d8c8ed7 | ||
![]() |
a41d4db1c4 | ||
![]() |
00d9c9af0c | ||
![]() |
7aafd05b74 | ||
![]() |
5540afa194 | ||
![]() |
6b9e7fc11f | ||
![]() |
058455061d | ||
![]() |
d1e4c8dbb9 | ||
![]() |
e87198f7ba | ||
![]() |
a7bfedfa2b | ||
![]() |
2794fe0636 | ||
![]() |
6347358cc2 | ||
![]() |
739f0a709c | ||
![]() |
ffbe46edfb | ||
![]() |
ea6e0c464b | ||
![]() |
4948e3a75e | ||
![]() |
3ed07317e5 | ||
![]() |
13f8d796b4 | ||
![]() |
9bdc330e9b | ||
![]() |
bcb6836a23 | ||
![]() |
cd15540cb9 | ||
![]() |
fe264673e1 | ||
![]() |
e022fc9b99 | ||
![]() |
0487a18623 | ||
![]() |
5e1fe450f6 | ||
![]() |
8a1987ba69 | ||
![]() |
a4ccb94978 | ||
![]() |
fa47c5045b | ||
![]() |
3098fcf1e1 | ||
![]() |
cd9949cb11 | ||
![]() |
41b3babc69 | ||
![]() |
990987bb6a | ||
![]() |
faab2347f9 | ||
![]() |
9a2b01c92e | ||
![]() |
403d9b5a08 | ||
![]() |
71c1c01b35 | ||
![]() |
941b644e9e | ||
![]() |
7a92327da2 | ||
![]() |
ecaf9f843a | ||
![]() |
3506fd9f4e | ||
![]() |
bf4c40dfb8 | ||
![]() |
d533bc4e0f | ||
![]() |
4aa5071a0d | ||
![]() |
c6976e996e | ||
![]() |
1584c4acb1 | ||
![]() |
f1345ec2ed | ||
![]() |
147946747c | ||
![]() |
6eacd79f07 | ||
![]() |
f17006b91e | ||
![]() |
65966f5ddf | ||
![]() |
f6223d1472 | ||
![]() |
b3080e9ac6 | ||
![]() |
7d58ca38ce | ||
![]() |
9484366844 | ||
![]() |
b8bef65f00 | ||
![]() |
7358bd10f8 | ||
![]() |
26a00babb4 | ||
![]() |
03521ac8ce | ||
![]() |
f2bf36a2eb | ||
![]() |
38f851e30e | ||
![]() |
c84d41c06f | ||
![]() |
1ee4a610fc | ||
![]() |
38cc5d65d1 | ||
![]() |
8dfe60a1a7 | ||
![]() |
c08a6f69e0 | ||
![]() |
ed446c569c | ||
![]() |
918a1d7c89 | ||
![]() |
15140cd6bb | ||
![]() |
af92067069 | ||
![]() |
2ec26f23b0 | ||
![]() |
0d328d18d1 | ||
![]() |
edbb4770bf | ||
![]() |
096407487b | ||
![]() |
2d28231e97 | ||
![]() |
2e1f689344 | ||
![]() |
236c72d2df | ||
![]() |
aa9c9e54ff | ||
![]() |
775f380a6c | ||
![]() |
645902ac42 | ||
![]() |
3d219c208d | ||
![]() |
fb08fa36fc | ||
![]() |
694553c752 | ||
![]() |
6fff00d088 | ||
![]() |
18aa07f49e | ||
![]() |
459839cb8a | ||
![]() |
5c7664e5d2 | ||
![]() |
718c8ba461 | ||
![]() |
899415dce9 | ||
![]() |
3dac99da94 | ||
![]() |
e6180b769a | ||
![]() |
35f43b880e | ||
![]() |
f295bcafa5 | ||
![]() |
580e25a9e8 | ||
![]() |
e6622dfdce | ||
![]() |
91bb3ea291 | ||
![]() |
478904116f | ||
![]() |
9048f31618 | ||
![]() |
b5853078c6 | ||
![]() |
ffc632034d | ||
![]() |
fa8f705452 | ||
![]() |
e12e0a3a5c | ||
![]() |
efdffd6b99 | ||
![]() |
31f92ebcef | ||
![]() |
438001cf60 | ||
![]() |
32fc05e04b | ||
![]() |
9ded1d2051 | ||
![]() |
9f90e1ea26 | ||
![]() |
86ed244d7a | ||
![]() |
310599249d | ||
![]() |
f8888b9338 | ||
![]() |
f9dafb1f6b | ||
![]() |
1073bac469 | ||
![]() |
e8e9e00f11 | ||
![]() |
89b950a7e8 | ||
![]() |
de50c62825 | ||
![]() |
6fb57f3271 | ||
![]() |
2943a1ebe8 | ||
![]() |
88c133b90a | ||
![]() |
d6bf1ab748 | ||
![]() |
5a50da6d90 | ||
![]() |
e93af8ad45 | ||
![]() |
248ea9cea5 | ||
![]() |
1a00552657 | ||
![]() |
64390da174 | ||
![]() |
b95b8b6cd3 | ||
![]() |
1e377c8801 | ||
![]() |
c5055d4e72 | ||
![]() |
5e39c4c2c8 | ||
![]() |
60fe8f09cc | ||
![]() |
7b4a295d8a | ||
![]() |
1e47b00568 | ||
![]() |
c0aee417f9 | ||
![]() |
55b6ceadab | ||
![]() |
321bb400d3 | ||
![]() |
ea3c09957d | ||
![]() |
53a49c3747 | ||
![]() |
eab66494cf | ||
![]() |
01e9b7c80e | ||
![]() |
60751e1363 | ||
![]() |
ff4afb6c0f | ||
![]() |
76498cf31c | ||
![]() |
58b44556f6 | ||
![]() |
ef87129383 | ||
![]() |
003b4cfd27 | ||
![]() |
2aa4a9b5aa | ||
![]() |
59cc006882 | ||
![]() |
db793c6e07 | ||
![]() |
b4763b541e | ||
![]() |
82b6017180 | ||
![]() |
8cd5b714c0 | ||
![]() |
6f5614e461 | ||
![]() |
4617d66cb8 | ||
![]() |
b2c054e7c7 | ||
![]() |
9716781cbe | ||
![]() |
a61c1914d6 | ||
![]() |
3d064b9003 | ||
![]() |
1f5d33f006 | ||
![]() |
6c53c023b8 | ||
![]() |
65065dd4e3 | ||
![]() |
3f82448a67 | ||
![]() |
6530f0ace4 | ||
![]() |
2b23c0d559 | ||
![]() |
16c4464864 | ||
![]() |
41c2ebcd61 | ||
![]() |
48eeb1bc72 | ||
![]() |
b77199b085 | ||
![]() |
9b82caf129 | ||
![]() |
709d1e3884 | ||
![]() |
9c12ff241e | ||
![]() |
2125465761 | ||
![]() |
b98c4b6dfd | ||
![]() |
d06989a149 | ||
![]() |
5f66ee992a | ||
![]() |
0d49eff7e4 | ||
![]() |
badaa8599b | ||
![]() |
5d78367d1c | ||
![]() |
cd9bdcfa61 | ||
![]() |
b30da5ca67 | ||
![]() |
d699f78f91 | ||
![]() |
53cc766032 | ||
![]() |
a82f9a2b70 | ||
![]() |
d0367eb6d0 | ||
![]() |
9cac91c413 | ||
![]() |
d66d9245b9 | ||
![]() |
fb89b4e683 | ||
![]() |
1bda8559bc | ||
![]() |
19dc603fe3 | ||
![]() |
6c62091622 | ||
![]() |
cf4d90877f | ||
![]() |
bf43c8498d | ||
![]() |
ef4a2dc077 | ||
![]() |
5dff968096 | ||
![]() |
2676b8384b | ||
![]() |
7a8437c30e | ||
![]() |
4544239269 | ||
![]() |
4c3b4d1012 | ||
![]() |
cc5b5e1971 | ||
![]() |
f988270a0c | ||
![]() |
2b9ac57230 | ||
![]() |
336e42a9e0 | ||
![]() |
bbfc32e67e | ||
![]() |
fc0123ea5e | ||
![]() |
4d54b49c03 | ||
![]() |
5f6dda1aa1 | ||
![]() |
65e92ebd92 | ||
![]() |
d350f3db82 | ||
![]() |
c848d1f13d | ||
![]() |
eeb3ba970c | ||
![]() |
c322c3b887 | ||
![]() |
2c892835cb | ||
![]() |
04b063da70 | ||
![]() |
3d0c3bea2e | ||
![]() |
0d4efa67eb | ||
![]() |
3068ce38a0 | ||
![]() |
e1e2a4964a | ||
![]() |
030a3f33f1 | ||
![]() |
ed1cc0f1bf | ||
![]() |
80d231a3fa | ||
![]() |
2940500db6 | ||
![]() |
0008b7989f | ||
![]() |
7aedc5f1f7 | ||
![]() |
d03f2d9ed8 | ||
![]() |
b99577eaeb | ||
![]() |
eb9559cb18 | ||
![]() |
7f63022caa | ||
![]() |
a7b6fc8fb8 | ||
![]() |
61d779083f | ||
![]() |
b0e56d64cd | ||
![]() |
12ad2d9423 | ||
![]() |
32c895b21a | ||
![]() |
006f77dd28 | ||
![]() |
c789d38df0 | ||
![]() |
f8bf038b81 | ||
![]() |
3b705a9434 | ||
![]() |
6c7de4124d | ||
![]() |
ae626b22ce | ||
![]() |
59803d7b67 | ||
![]() |
1d4bb01e1e | ||
![]() |
0e2a79cad3 | ||
![]() |
98737a69ba | ||
![]() |
ce08806aea | ||
![]() |
94653a14c4 | ||
![]() |
0f53c78ccd | ||
![]() |
470f38f7f9 | ||
![]() |
b03f5b80ac | ||
![]() |
9f2e74ec69 | ||
![]() |
9894b88e5f | ||
![]() |
54f6a7aa42 | ||
![]() |
933dc1da17 | ||
![]() |
30ccf9a398 | ||
![]() |
ae8dd1bb0e | ||
![]() |
bfb4a3034a | ||
![]() |
c6e06cc235 | ||
![]() |
c30250539f | ||
![]() |
57c971ef0f | ||
![]() |
0eaae2bb2a | ||
![]() |
d8320da1bb | ||
![]() |
436cb5109a | ||
![]() |
7a60ad9c21 | ||
![]() |
d5d6243c01 | ||
![]() |
53d0ad38b8 | ||
![]() |
7fb16cbca6 | ||
![]() |
2a8a5dcb94 | ||
![]() |
ff5d62f7de | ||
![]() |
97035596e1 | ||
![]() |
8c19dcdc7c | ||
![]() |
63d4e3c3f5 | ||
![]() |
cae763669b | ||
![]() |
b3598c5d0e | ||
![]() |
ac365eef18 | ||
![]() |
f7aa22f9a6 | ||
![]() |
15b0d05493 | ||
![]() |
9f544b2409 | ||
![]() |
fcdbff62cc | ||
![]() |
6edf027459 | ||
![]() |
90476ea9fd | ||
![]() |
b28a9fcec9 | ||
![]() |
89873b4c02 | ||
![]() |
879823c428 | ||
![]() |
0604c9daff | ||
![]() |
7290208a20 | ||
![]() |
eb2aef6064 | ||
![]() |
6736508364 | ||
![]() |
c6292fcfe1 | ||
![]() |
059dc71235 | ||
![]() |
41c272d7b1 | ||
![]() |
dea1df2ee6 | ||
![]() |
d9bf780296 | ||
![]() |
ab701e1f33 | ||
![]() |
0c588da450 | ||
![]() |
200df1f50f | ||
![]() |
d311ca61bc | ||
![]() |
62289d388b | ||
![]() |
832fbb024e | ||
![]() |
449bf1a4f5 | ||
![]() |
8cd4601f55 | ||
![]() |
94e15a8030 | ||
![]() |
417e316076 | ||
![]() |
eb56ed6192 | ||
![]() |
47ed80113f | ||
![]() |
9d5d63a58a | ||
![]() |
6e929c64bd | ||
![]() |
0af6abf565 | ||
![]() |
2c83a05fd0 | ||
![]() |
e3318ad7d5 | ||
![]() |
4becaac072 | ||
![]() |
6ff44ece1f | ||
![]() |
a72af2b7d1 | ||
![]() |
d6522c1a2d | ||
![]() |
5e3b5b9eb0 | ||
![]() |
9d832ba2e8 | ||
![]() |
5b2ce43348 | ||
![]() |
9fb1cc901c | ||
![]() |
98b866cdd8 | ||
![]() |
b6c051df89 | ||
![]() |
506bef141b | ||
![]() |
b3e9cbf7ff | ||
![]() |
76a036c73d | ||
![]() |
ba593850b9 | ||
![]() |
d5202aec91 | ||
![]() |
face851f94 | ||
![]() |
e5fe7075dd | ||
![]() |
71e5a5776e | ||
![]() |
fb1df3ec46 | ||
![]() |
58c0578451 | ||
![]() |
c20a594061 | ||
![]() |
0ff9d77eec | ||
![]() |
d0c8aeb398 | ||
![]() |
6354123f0f | ||
![]() |
b4d04ce1f5 | ||
![]() |
0df78fe030 | ||
![]() |
84b2673814 | ||
![]() |
d28269cbf5 | ||
![]() |
90c85c7dc7 | ||
![]() |
0a137be794 | ||
![]() |
58a29072e6 | ||
![]() |
251bd0fa42 | ||
![]() |
ed99e5b903 | ||
![]() |
048f9bf54c | ||
![]() |
0a2b66ec34 | ||
![]() |
1f5e71ba3b | ||
![]() |
43cb132638 | ||
![]() |
e95bf801c5 | ||
![]() |
0e780fae7e | ||
![]() |
79e60c5bcb | ||
![]() |
19e8bd11bd | ||
![]() |
c4ea3bf9a4 | ||
![]() |
526dcf4148 | ||
![]() |
0e5547197a | ||
![]() |
0eed5840bf | ||
![]() |
0a06ec4cba | ||
![]() |
280c447a6f | ||
![]() |
2c1bf72cc6 | ||
![]() |
69922222b4 | ||
![]() |
77c9ed6c5d | ||
![]() |
36e0d84f56 | ||
![]() |
f6706ca7db | ||
![]() |
a31d38e1b5 | ||
![]() |
8d3f247b5d | ||
![]() |
4e51246b43 | ||
![]() |
4c282c42f9 | ||
![]() |
7158882b63 | ||
![]() |
7ee8391833 | ||
![]() |
a8ef4d5a22 | ||
![]() |
56fdb5c511 | ||
![]() |
2a490f8a70 | ||
![]() |
0fde855c37 | ||
![]() |
4434e8a9ee | ||
![]() |
bdac98700a | ||
![]() |
6f7d964dda | ||
![]() |
377539f977 | ||
![]() |
ad498fdb77 | ||
![]() |
c25c0884dc | ||
![]() |
607df9840b | ||
![]() |
78cbf36d7d | ||
![]() |
91fd9086d6 | ||
![]() |
42125aa7be | ||
![]() |
1ac024fca0 | ||
![]() |
2bbac71fad | ||
![]() |
adad816b3a | ||
![]() |
2eee4fb6fe | ||
![]() |
71db93185f | ||
![]() |
1270412d4b | ||
![]() |
a10c674f68 | ||
![]() |
44800cf175 | ||
![]() |
51a4146876 | ||
![]() |
3c54bd740f | ||
![]() |
563bbfcb8b | ||
![]() |
4942fab225 | ||
![]() |
91a828805b | ||
![]() |
eb9b60c0ef | ||
![]() |
66acfbbab4 | ||
![]() |
efe8ed76ba | ||
![]() |
dad0d456d3 | ||
![]() |
4601794d49 | ||
![]() |
36157fb688 | ||
![]() |
639374522d | ||
![]() |
2499ce7277 | ||
![]() |
0bab4069cd | ||
![]() |
03f82ea891 | ||
![]() |
5949e12ffc | ||
![]() |
7ca3c0515a | ||
![]() |
711d64d5f1 | ||
![]() |
b22d12009a | ||
![]() |
5dc752c6a9 | ||
![]() |
50a90d2b41 | ||
![]() |
a1ce222a87 | ||
![]() |
a77bf3126d | ||
![]() |
300769dda6 | ||
![]() |
a65c554f64 | ||
![]() |
894d41a30b | ||
![]() |
673da3de99 | ||
![]() |
0f535cc26a | ||
![]() |
b1447906dd | ||
![]() |
d7e4e3b7aa | ||
![]() |
b445a8487b | ||
![]() |
f5ad8b5428 | ||
![]() |
af460a0939 | ||
![]() |
8cf3d091cb | ||
![]() |
575c59bf2c | ||
![]() |
28c2492e50 | ||
![]() |
1312b4d2e9 | ||
![]() |
1f77deab61 | ||
![]() |
96823f0a06 | ||
![]() |
41a823a57e | ||
![]() |
deb3bf3981 | ||
![]() |
09d6c7e5ae | ||
![]() |
a8cc170fdb | ||
![]() |
c7eb4c8e26 | ||
![]() |
b31bc06e63 | ||
![]() |
46d703de6b | ||
![]() |
bb3e63ea17 | ||
![]() |
5d466e01b3 | ||
![]() |
7cbdac5f52 | ||
![]() |
1e2d9be8f7 | ||
![]() |
e89395c428 | ||
![]() |
58ac3abe1a | ||
![]() |
c22b937fe5 | ||
![]() |
4c1fa030bf | ||
![]() |
7e2676f635 | ||
![]() |
e5cf9b1aec | ||
![]() |
d1e7f05c7d | ||
![]() |
4323bfaab0 | ||
![]() |
9d9d21127c | ||
![]() |
dd9df1745c | ||
![]() |
2fcf728d38 | ||
![]() |
e9993ed64e | ||
![]() |
012e281366 | ||
![]() |
0d62d5a161 | ||
![]() |
cc5dad3e83 | ||
![]() |
62f2accc5c | ||
![]() |
6ad2af98cd | ||
![]() |
286a599ae8 | ||
![]() |
ce834fffd7 | ||
![]() |
7ff8b4dc29 | ||
![]() |
21490a518f | ||
![]() |
68b2815859 | ||
![]() |
3afe967cc9 | ||
![]() |
c3d419037c | ||
![]() |
04ac2d2259 | ||
![]() |
2e725a579f | ||
![]() |
74e3d17f5d | ||
![]() |
e93197771f | ||
![]() |
0662b7c4b8 | ||
![]() |
cdde9c77a9 | ||
![]() |
b85765fe15 | ||
![]() |
f732734d86 | ||
![]() |
280d82eaf4 | ||
![]() |
e755c595c7 | ||
![]() |
ebfc2e5a08 | ||
![]() |
4d4c268a59 | ||
![]() |
a3fac5f9d7 | ||
![]() |
987da167ce | ||
![]() |
fa6a021e40 | ||
![]() |
d5af432060 | ||
![]() |
065aa904ca | ||
![]() |
99f1a3be4d | ||
![]() |
f6d12274e5 | ||
![]() |
586413e48f | ||
![]() |
6ab50c5f1a | ||
![]() |
a1e83c824e | ||
![]() |
570c3273ff | ||
![]() |
40c2db9727 | ||
![]() |
2de4d6b7a0 | ||
![]() |
24e59f5ff1 | ||
![]() |
cfcf916d77 | ||
![]() |
328aaec373 | ||
![]() |
96f79d8d4d | ||
![]() |
e6c6d75f4c | ||
![]() |
d1b12cf579 | ||
![]() |
6112ad8a77 | ||
![]() |
ea91113f6e | ||
![]() |
c21600c2ac | ||
![]() |
e18e184868 | ||
![]() |
f84c004550 | ||
![]() |
65060d37e8 | ||
![]() |
4627c26972 | ||
![]() |
3e3c4952d3 | ||
![]() |
94ea1148ba | ||
![]() |
9a6f302d72 | ||
![]() |
d819b18be3 | ||
![]() |
9e372ffe50 | ||
![]() |
e8380a8786 | ||
![]() |
58ed9cb835 | ||
![]() |
8fd19e83bd | ||
![]() |
90ba85513d | ||
![]() |
cbe1c59f73 | ||
![]() |
7df2ac30da | ||
![]() |
68fcc73f13 | ||
![]() |
d5bfec0338 | ||
![]() |
5054669c49 | ||
![]() |
a874fdfa2e | ||
![]() |
e157974744 | ||
![]() |
d4887b74be | ||
![]() |
5175c4fc27 | ||
![]() |
ba73b6c668 | ||
![]() |
e951d553a1 | ||
![]() |
ec3920b46a | ||
![]() |
35234db54b | ||
![]() |
3068eaca03 | ||
![]() |
f3359a0998 | ||
![]() |
1cd0ae2fe2 | ||
![]() |
c2d555cce1 | ||
![]() |
7edab1330c | ||
![]() |
c890ddcb34 | ||
![]() |
47b19b556b | ||
![]() |
2d17f0f924 | ||
![]() |
e535b84ac8 | ||
![]() |
bc8b415728 | ||
![]() |
4df3636f85 | ||
![]() |
69589f9788 | ||
![]() |
b68839da5b | ||
![]() |
30305ffa5d | ||
![]() |
c941b614b9 | ||
![]() |
184959fca8 | ||
![]() |
22a793f744 | ||
![]() |
96e898017b | ||
![]() |
df5d161ceb | ||
![]() |
b86951039e | ||
![]() |
c1b051d275 | ||
![]() |
38de3f0ec8 | ||
![]() |
58b2eee82c | ||
![]() |
9b62e8388c | ||
![]() |
24ce223618 | ||
![]() |
0e47affc0f | ||
![]() |
2dc4cefec4 | ||
![]() |
602b770741 | ||
![]() |
be23c28ba5 | ||
![]() |
5baf004bc0 | ||
![]() |
bd064a4453 | ||
![]() |
a2ea2d56bd | ||
![]() |
6e1c729763 | ||
![]() |
80ef22b7b3 | ||
![]() |
5aaf7fdea4 | ||
![]() |
95bab6c415 | ||
![]() |
e613d22199 | ||
![]() |
235d9c615b | ||
![]() |
554ff68124 | ||
![]() |
b35223eb0e | ||
![]() |
5e960911fc | ||
![]() |
b927b8193b | ||
![]() |
19c1dcbb04 | ||
![]() |
381d4552d1 | ||
![]() |
a9bb7c12a6 | ||
![]() |
d83f2bfdbe | ||
![]() |
0fd0c67f8f | ||
![]() |
739b7f0810 | ||
![]() |
4e61e97e01 | ||
![]() |
c3c3183d7b | ||
![]() |
ab0fd3af55 | ||
![]() |
cfc15328e5 | ||
![]() |
31b3a89b59 | ||
![]() |
c925940f76 | ||
![]() |
7eafbbaa80 | ||
![]() |
324ec1bb54 | ||
![]() |
f31f98975a | ||
![]() |
4d227b97fc | ||
![]() |
d3620ca76c | ||
![]() |
50d6617eef | ||
![]() |
6fa6c3e366 | ||
![]() |
cee743b663 | ||
![]() |
532aec28b0 | ||
![]() |
25b71c45f7 | ||
![]() |
695d116bb6 | ||
![]() |
ac2c8880e7 | ||
![]() |
58b96d325d | ||
![]() |
84ba16ee16 | ||
![]() |
36a5a0e43f | ||
![]() |
6b72f83dd8 | ||
![]() |
0d2d3615b8 | ||
![]() |
3e0c1a852e | ||
![]() |
9730df0bbf | ||
![]() |
06281f73fb | ||
![]() |
7ee2afa55d | ||
![]() |
b403d299b5 | ||
![]() |
1551698ea8 | ||
![]() |
3fd4f64176 | ||
![]() |
0b2f82c120 | ||
![]() |
d8500856d0 | ||
![]() |
f9281c5156 | ||
![]() |
5f7ab7b290 | ||
![]() |
ac145f19a7 | ||
![]() |
cf058841f6 | ||
![]() |
53f0c58933 | ||
![]() |
466375c505 | ||
![]() |
580db34dee | ||
![]() |
4de83ad555 | ||
![]() |
22fb66145a | ||
![]() |
51c6f6af83 | ||
![]() |
149324b0c5 | ||
![]() |
c9f007d4e6 | ||
![]() |
6187cdeca0 | ||
![]() |
771ebb060e | ||
![]() |
b5ffc4c4ac | ||
![]() |
a642a84a9f | ||
![]() |
daaa57794b | ||
![]() |
292ed358f8 | ||
![]() |
b659b9de77 | ||
![]() |
96be9e04c6 | ||
![]() |
9867ffa1cc | ||
![]() |
f686b53cec | ||
![]() |
6992f7c798 | ||
![]() |
d8f65786d9 | ||
![]() |
b30d1421e9 | ||
![]() |
6751b390ef | ||
![]() |
cc1fc929e9 | ||
![]() |
9218459ead | ||
![]() |
b18d55c2b2 | ||
![]() |
7fc614b7ba | ||
![]() |
ec84aa5e43 | ||
![]() |
2cf1da6d8b | ||
![]() |
9c87d228dc | ||
![]() |
78a9e45344 | ||
![]() |
b9b569f5f4 | ||
![]() |
f8d85e501d |
10
.github/dependabot.yml
vendored
Normal file
10
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
---
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: npm
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "13:00"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: "development/7.4"
|
25
.github/workflows/codeql.yaml
vendored
Normal file
25
.github/workflows/codeql.yaml
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
---
|
||||
name: codeQL
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [development/*, stabilization/*, hotfix/*]
|
||||
pull_request:
|
||||
branches: [development/*, stabilization/*, hotfix/*]
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Static analysis with CodeQL
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3
|
||||
with:
|
||||
languages: javascript, typescript
|
||||
|
||||
- name: Build and analyze
|
||||
uses: github/codeql-action/analyze@v3
|
16
.github/workflows/dependency-review.yaml
vendored
Normal file
16
.github/workflows/dependency-review.yaml
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
---
|
||||
name: dependency review
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches: [development/*, stabilization/*, hotfix/*]
|
||||
|
||||
jobs:
|
||||
dependency-review:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: 'Checkout Repository'
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: 'Dependency Review'
|
||||
uses: actions/dependency-review-action@v4
|
76
.github/workflows/tests.yaml
vendored
Normal file
76
.github/workflows/tests.yaml
vendored
Normal file
@@ -0,0 +1,76 @@
|
||||
---
|
||||
name: tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches-ignore:
|
||||
- 'development/**'
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
services:
|
||||
# Label used to access the service container
|
||||
redis:
|
||||
# Docker Hub image
|
||||
image: redis
|
||||
# Set health checks to wait until redis has started
|
||||
options: >-
|
||||
--health-cmd "redis-cli ping"
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
ports:
|
||||
# Maps port 6379 on service container to the host
|
||||
- 6379:6379
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
- uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: '16'
|
||||
cache: 'yarn'
|
||||
- name: install dependencies
|
||||
run: yarn install --frozen-lockfile --prefer-offline
|
||||
continue-on-error: true # TODO ARSN-97 Remove it when no errors in TS
|
||||
- name: lint yaml
|
||||
run: yarn --silent lint_yml
|
||||
- name: lint javascript
|
||||
run: yarn --silent lint -- --max-warnings 0
|
||||
- name: lint markdown
|
||||
run: yarn --silent lint_md
|
||||
- name: run unit tests
|
||||
run: yarn test
|
||||
- name: run functional tests
|
||||
run: yarn ft_test
|
||||
- name: run executables tests
|
||||
run: yarn install && yarn test
|
||||
working-directory: 'lib/executables/pensieveCreds/'
|
||||
|
||||
compile:
|
||||
name: Compile and upload build artifacts
|
||||
needs: test
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Install NodeJS
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '16'
|
||||
cache: yarn
|
||||
- name: Install dependencies
|
||||
run: yarn install --frozen-lockfile --prefer-offline
|
||||
continue-on-error: true # TODO ARSN-97 Remove it when no errors in TS
|
||||
- name: Compile
|
||||
run: yarn build
|
||||
continue-on-error: true # TODO ARSN-97 Remove it when no errors in TS
|
||||
- name: Upload artifacts
|
||||
uses: scality/action-artifacts@v4
|
||||
with:
|
||||
url: https://artifacts.scality.net
|
||||
user: ${{ secrets.ARTIFACTS_USER }}
|
||||
password: ${{ secrets.ARTIFACTS_PASSWORD }}
|
||||
source: ./build
|
||||
method: upload
|
||||
if: success()
|
17
.gitignore
vendored
17
.gitignore
vendored
@@ -1 +1,18 @@
|
||||
# Logs
|
||||
*.log
|
||||
|
||||
# Dependency directory
|
||||
node_modules/
|
||||
*/node_modules/
|
||||
|
||||
# Build executables
|
||||
*-win.exe
|
||||
*-linux
|
||||
*-macos
|
||||
|
||||
# Coverage
|
||||
coverage/
|
||||
.nyc_output/
|
||||
|
||||
# TypeScript
|
||||
build/
|
||||
|
0
.npmignore
Normal file
0
.npmignore
Normal file
@@ -1,8 +1,5 @@
|
||||
# Arsenal
|
||||
|
||||
[![CircleCI][badgepub]](https://circleci.com/gh/scality/Arsenal)
|
||||
[![Scality CI][badgepriv]](http://ci.ironmann.io/gh/scality/Arsenal)
|
||||
|
||||
Common utilities for the S3 project components
|
||||
|
||||
Within this repository, you will be able to find the shared libraries for the
|
||||
@@ -104,7 +101,7 @@ You can handle exit event on both master and workers by calling the
|
||||
'onExit' method and setting the callback. This allows release of resources
|
||||
or save state before exiting the process.
|
||||
|
||||
#### Silencing a singnal
|
||||
#### Silencing a signal
|
||||
|
||||
```
|
||||
import { Clustering } from 'arsenal';
|
||||
|
6
babel.config.js
Normal file
6
babel.config.js
Normal file
@@ -0,0 +1,6 @@
|
||||
module.exports = {
|
||||
presets: [
|
||||
['@babel/preset-env', { targets: { node: 'current' } }],
|
||||
'@babel/preset-typescript',
|
||||
],
|
||||
};
|
@@ -2,13 +2,15 @@
|
||||
general:
|
||||
branches:
|
||||
ignore:
|
||||
- /^ultron\/.*/ # Ignore ultron/* branches
|
||||
- /^ultron\/.*/ # Ignore ultron/* branches
|
||||
|
||||
machine:
|
||||
node:
|
||||
version: 6.13.1
|
||||
services:
|
||||
- redis
|
||||
environment:
|
||||
CXX: g++-4.9
|
||||
node:
|
||||
version: 4.5.0
|
||||
|
||||
dependencies:
|
||||
override:
|
||||
@@ -23,3 +25,4 @@ test:
|
||||
- npm run --silent lint_md
|
||||
- npm run --silent test
|
||||
- npm run ft_test
|
||||
- cd lib/executables/pensieveCreds && npm install && npm test
|
||||
|
144
documentation/BucketInfoModelVersion.md
Normal file
144
documentation/BucketInfoModelVersion.md
Normal file
@@ -0,0 +1,144 @@
|
||||
# BucketInfo Model Version History
|
||||
|
||||
## Model Version 0/1
|
||||
|
||||
### Properties
|
||||
|
||||
``` javascript
|
||||
this._acl = aclInstance;
|
||||
this._name = name;
|
||||
this._owner = owner;
|
||||
this._ownerDisplayName = ownerDisplayName;
|
||||
this._creationDate = creationDate;
|
||||
```
|
||||
|
||||
### Usage
|
||||
|
||||
No explicit references in the code since mdBucketModelVersion
|
||||
property not added until Model Version 2
|
||||
|
||||
## Model Version 2
|
||||
|
||||
### Properties Added
|
||||
|
||||
``` javascript
|
||||
this._mdBucketModelVersion = mdBucketModelVersion || 0
|
||||
this._transient = transient || false;
|
||||
this._deleted = deleted || false;
|
||||
```
|
||||
|
||||
### Usage
|
||||
|
||||
Used to determine which splitter to use ( < 2 means old splitter)
|
||||
|
||||
## Model version 3
|
||||
|
||||
### Properties Added
|
||||
|
||||
```
|
||||
this._serverSideEncryption = serverSideEncryption || null;
|
||||
```
|
||||
|
||||
### Usage
|
||||
|
||||
Used to store the server bucket encryption info
|
||||
|
||||
## Model version 4
|
||||
|
||||
### Properties Added
|
||||
|
||||
```javascript
|
||||
this._locationConstraint = LocationConstraint || null;
|
||||
```
|
||||
|
||||
### Usage
|
||||
|
||||
Used to store the location constraint of the bucket
|
||||
|
||||
## Model version 5
|
||||
|
||||
### Properties Added
|
||||
|
||||
```javascript
|
||||
this._websiteConfiguration = websiteConfiguration || null;
|
||||
this._cors = cors || null;
|
||||
```
|
||||
|
||||
### Usage
|
||||
|
||||
Used to store the bucket website configuration info
|
||||
and to store CORS rules to apply to cross-domain requests
|
||||
|
||||
## Model version 6
|
||||
|
||||
### Properties Added
|
||||
|
||||
```javascript
|
||||
this._lifecycleConfiguration = lifecycleConfiguration || null;
|
||||
```
|
||||
|
||||
### Usage
|
||||
|
||||
Used to store the bucket lifecycle configuration info
|
||||
|
||||
## Model version 7
|
||||
|
||||
### Properties Added
|
||||
|
||||
```javascript
|
||||
this._objectLockEnabled = objectLockEnabled || false;
|
||||
this._objectLockConfiguration = objectLockConfiguration || null;
|
||||
```
|
||||
|
||||
### Usage
|
||||
|
||||
Used to determine whether object lock capabilities are enabled on a bucket and
|
||||
to store the object lock configuration of the bucket
|
||||
|
||||
## Model version 8
|
||||
|
||||
### Properties Added
|
||||
|
||||
```javascript
|
||||
this._notificationConfiguration = notificationConfiguration || null;
|
||||
```
|
||||
|
||||
### Usage
|
||||
|
||||
Used to store the bucket notification configuration info
|
||||
|
||||
## Model version 9
|
||||
|
||||
### Properties Added
|
||||
|
||||
```javascript
|
||||
this._serverSideEncryption.configuredMasterKeyId = configuredMasterKeyId || undefined;
|
||||
```
|
||||
|
||||
### Usage
|
||||
|
||||
Used to store the users configured KMS key id
|
||||
|
||||
## Model version 10
|
||||
|
||||
### Properties Added
|
||||
|
||||
```javascript
|
||||
this._uid = uid || uuid();
|
||||
```
|
||||
|
||||
### Usage
|
||||
|
||||
Used to set a unique identifier on a bucket
|
||||
|
||||
## Model version 11
|
||||
|
||||
### Properties Added
|
||||
|
||||
```javascript
|
||||
this._tags = tags || null;
|
||||
```
|
||||
|
||||
### Usage
|
||||
|
||||
Used to store bucket tagging
|
27
documentation/listingAlgos/delimiter.md
Normal file
27
documentation/listingAlgos/delimiter.md
Normal file
@@ -0,0 +1,27 @@
|
||||
# Delimiter
|
||||
|
||||
The Delimiter class handles raw listings from the database with an
|
||||
optional delimiter, and fills in a curated listing with "Contents" and
|
||||
"CommonPrefixes" as a result.
|
||||
|
||||
## Expected Behavior
|
||||
|
||||
- only lists keys belonging to the given **prefix** (if provided)
|
||||
|
||||
- groups listed keys that have a common prefix ending with a delimiter
|
||||
inside CommonPrefixes
|
||||
|
||||
- can take a **marker** or **continuationToken** to list from a specific key
|
||||
|
||||
- can take a **maxKeys** parameter to limit how many keys can be returned
|
||||
|
||||
## State Chart
|
||||
|
||||
- States with grey background are *Idle* states, which are waiting for
|
||||
a new listing key
|
||||
|
||||
- States with blue background are *Processing* states, which are
|
||||
actively processing a new listing key passed by the filter()
|
||||
function
|
||||
|
||||

|
45
documentation/listingAlgos/delimiterMaster.md
Normal file
45
documentation/listingAlgos/delimiterMaster.md
Normal file
@@ -0,0 +1,45 @@
|
||||
# DelimiterMaster
|
||||
|
||||
The DelimiterMaster class handles raw listings from the database of a
|
||||
versioned or non-versioned bucket with an optional delimiter, and
|
||||
fills in a curated listing with "Contents" and "CommonPrefixes" as a
|
||||
result.
|
||||
|
||||
## Expected Behavior
|
||||
|
||||
- only lists latest versions of versioned buckets
|
||||
|
||||
- only lists keys belonging to the given **prefix** (if provided)
|
||||
|
||||
- does not list latest versions that are delete markers
|
||||
|
||||
- groups listed keys that have a common prefix ending with a delimiter
|
||||
inside CommonPrefixes
|
||||
|
||||
- can take a **marker** or **continuationToken** to list from a specific key
|
||||
|
||||
- can take a **maxKeys** parameter to limit how many keys can be returned
|
||||
|
||||
- reconciles internal PHD keys with the next version (those are
|
||||
created when a specific version that is the latest version is
|
||||
deleted)
|
||||
|
||||
- skips internal keys like replay keys
|
||||
|
||||
## State Chart
|
||||
|
||||
- States with grey background are *Idle* states, which are waiting for
|
||||
a new listing key
|
||||
|
||||
- States with blue background are *Processing* states, which are
|
||||
actively processing a new listing key passed by the filter()
|
||||
function
|
||||
|
||||
### Bucket Vformat=v0
|
||||
|
||||

|
||||
|
||||
### Bucket Vformat=v1
|
||||
|
||||
For buckets in versioning key format **v1**, the algorithm used is the
|
||||
one from [Delimiter](delimiter.md).
|
@@ -0,0 +1,45 @@
|
||||
digraph {
|
||||
node [shape="box",style="filled,rounded",fontsize=16,fixedsize=true,width=3];
|
||||
edge [fontsize=14];
|
||||
rankdir=TB;
|
||||
|
||||
START [shape="circle",width=0.2,label="",style="filled",fillcolor="black"]
|
||||
END [shape="circle",width=0.2,label="",style="filled",fillcolor="black",peripheries=2]
|
||||
|
||||
node [fillcolor="lightgrey"];
|
||||
"NotSkippingPrefixNorVersions.Idle" [label="NotSkippingPrefixNorVersions",group="NotSkippingPrefixNorVersions",width=4];
|
||||
"SkippingPrefix.Idle" [label="SkippingPrefix",group="SkippingPrefix"];
|
||||
"SkippingVersions.Idle" [label="SkippingVersions",group="SkippingVersions"];
|
||||
"WaitVersionAfterPHD.Idle" [label="WaitVersionAfterPHD",group="WaitVersionAfterPHD"];
|
||||
|
||||
node [fillcolor="lightblue"];
|
||||
"NotSkippingPrefixNorVersions.Processing" [label="NotSkippingPrefixNorVersions",group="NotSkippingPrefixNorVersions",width=4];
|
||||
"SkippingPrefix.Processing" [label="SkippingPrefix",group="SkippingPrefix"];
|
||||
"SkippingVersions.Processing" [label="SkippingVersions",group="SkippingVersions"];
|
||||
"WaitVersionAfterPHD.Processing" [label="WaitVersionAfterPHD",group="WaitVersionAfterPHD"];
|
||||
|
||||
START -> "SkippingVersions.Idle" [label="[marker != undefined]"]
|
||||
START -> "NotSkippingPrefixNorVersions.Idle" [label="[marker == undefined]"]
|
||||
|
||||
"NotSkippingPrefixNorVersions.Idle" -> "NotSkippingPrefixNorVersions.Processing" [label="filter(key, value)"]
|
||||
"SkippingPrefix.Idle" -> "SkippingPrefix.Processing" [label="filter(key, value)"]
|
||||
"SkippingVersions.Idle" -> "SkippingVersions.Processing" [label="filter(key, value)"]
|
||||
"WaitVersionAfterPHD.Idle" -> "WaitVersionAfterPHD.Processing" [label="filter(key, value)"]
|
||||
|
||||
|
||||
"NotSkippingPrefixNorVersions.Processing" -> "SkippingVersions.Idle" [label="[Version.isDeleteMarker(value)]\n-> FILTER_ACCEPT"]
|
||||
"NotSkippingPrefixNorVersions.Processing" -> "WaitVersionAfterPHD.Idle" [label="[Version.isPHD(value)]\n-> FILTER_ACCEPT"]
|
||||
"NotSkippingPrefixNorVersions.Processing" -> "SkippingPrefix.Idle" [label="[key.startsWith(<ReplayPrefix>)]\n/ prefix <- <ReplayPrefix>\n-> FILTER_SKIP"]
|
||||
"NotSkippingPrefixNorVersions.Processing" -> END [label="[isListableKey(key, value) and\nKeys == maxKeys]\n-> FILTER_END"]
|
||||
"NotSkippingPrefixNorVersions.Processing" -> "SkippingPrefix.Idle" [label="[isListableKey(key, value) and\nnKeys < maxKeys and\nhasDelimiter(key)]\n/ prefix <- prefixOf(key)\n/ CommonPrefixes.append(prefixOf(key))\n-> FILTER_ACCEPT"]
|
||||
"NotSkippingPrefixNorVersions.Processing" -> "SkippingVersions.Idle" [label="[isListableKey(key, value) and\nnKeys < maxKeys and\nnot hasDelimiter(key)]\n/ Contents.append(key, value)\n-> FILTER_ACCEPT"]
|
||||
|
||||
"SkippingPrefix.Processing" -> "SkippingPrefix.Idle" [label="[key.startsWith(prefix)]\n-> FILTER_SKIP"]
|
||||
"SkippingPrefix.Processing" -> "NotSkippingPrefixNorVersions.Processing" [label="[not key.startsWith(prefix)]"]
|
||||
|
||||
"SkippingVersions.Processing" -> "SkippingVersions.Idle" [label="[isVersionKey(key)]\n-> FILTER_SKIP"]
|
||||
"SkippingVersions.Processing" -> "NotSkippingPrefixNorVersions.Processing" [label="[not isVersionKey(key)]"]
|
||||
|
||||
"WaitVersionAfterPHD.Processing" -> "NotSkippingPrefixNorVersions.Processing" [label="[isVersionKey(key) and master(key) == PHDkey]\n/ key <- master(key)"]
|
||||
"WaitVersionAfterPHD.Processing" -> "NotSkippingPrefixNorVersions.Processing" [label="[not isVersionKey(key) or master(key) != PHDkey]"]
|
||||
}
|
216
documentation/listingAlgos/pics/delimiterMasterV0StateChart.svg
Normal file
216
documentation/listingAlgos/pics/delimiterMasterV0StateChart.svg
Normal file
@@ -0,0 +1,216 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
|
||||
"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||
<!-- Generated by graphviz version 2.43.0 (0)
|
||||
-->
|
||||
<!-- Title: %3 Pages: 1 -->
|
||||
<svg width="2313pt" height="460pt"
|
||||
viewBox="0.00 0.00 2313.37 460.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
|
||||
<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 456)">
|
||||
<title>%3</title>
|
||||
<polygon fill="white" stroke="transparent" points="-4,4 -4,-456 2309.37,-456 2309.37,4 -4,4"/>
|
||||
<!-- START -->
|
||||
<g id="node1" class="node">
|
||||
<title>START</title>
|
||||
<ellipse fill="black" stroke="black" cx="35.37" cy="-445" rx="7" ry="7"/>
|
||||
</g>
|
||||
<!-- NotSkippingPrefixNorVersions.Idle -->
|
||||
<g id="node3" class="node">
|
||||
<title>NotSkippingPrefixNorVersions.Idle</title>
|
||||
<path fill="lightgrey" stroke="black" d="M925.37,-387C925.37,-387 661.37,-387 661.37,-387 655.37,-387 649.37,-381 649.37,-375 649.37,-375 649.37,-363 649.37,-363 649.37,-357 655.37,-351 661.37,-351 661.37,-351 925.37,-351 925.37,-351 931.37,-351 937.37,-357 937.37,-363 937.37,-363 937.37,-375 937.37,-375 937.37,-381 931.37,-387 925.37,-387"/>
|
||||
<text text-anchor="middle" x="793.37" y="-365.2" font-family="Times,serif" font-size="16.00">NotSkippingPrefixNorVersions</text>
|
||||
</g>
|
||||
<!-- START->NotSkippingPrefixNorVersions.Idle -->
|
||||
<g id="edge2" class="edge">
|
||||
<title>START->NotSkippingPrefixNorVersions.Idle</title>
|
||||
<path fill="none" stroke="black" d="M42.39,-443.31C95.3,-438.15 434.98,-404.99 638.94,-385.08"/>
|
||||
<polygon fill="black" stroke="black" points="639.54,-388.53 649.15,-384.08 638.86,-381.57 639.54,-388.53"/>
|
||||
<text text-anchor="middle" x="497.87" y="-408.8" font-family="Times,serif" font-size="14.00">[marker == undefined]</text>
|
||||
</g>
|
||||
<!-- SkippingVersions.Idle -->
|
||||
<g id="node5" class="node">
|
||||
<title>SkippingVersions.Idle</title>
|
||||
<path fill="lightgrey" stroke="black" d="M242.37,-138C242.37,-138 50.37,-138 50.37,-138 44.37,-138 38.37,-132 38.37,-126 38.37,-126 38.37,-114 38.37,-114 38.37,-108 44.37,-102 50.37,-102 50.37,-102 242.37,-102 242.37,-102 248.37,-102 254.37,-108 254.37,-114 254.37,-114 254.37,-126 254.37,-126 254.37,-132 248.37,-138 242.37,-138"/>
|
||||
<text text-anchor="middle" x="146.37" y="-116.2" font-family="Times,serif" font-size="16.00">SkippingVersions</text>
|
||||
</g>
|
||||
<!-- START->SkippingVersions.Idle -->
|
||||
<g id="edge1" class="edge">
|
||||
<title>START->SkippingVersions.Idle</title>
|
||||
<path fill="none" stroke="black" d="M33.04,-438.14C20.64,-405.9 -34.57,-248.17 33.37,-156 36.76,-151.4 40.74,-147.39 45.16,-143.89"/>
|
||||
<polygon fill="black" stroke="black" points="47.27,-146.68 53.53,-138.13 43.3,-140.92 47.27,-146.68"/>
|
||||
<text text-anchor="middle" x="85.87" y="-321.8" font-family="Times,serif" font-size="14.00">[marker != undefined]</text>
|
||||
</g>
|
||||
<!-- END -->
|
||||
<g id="node2" class="node">
|
||||
<title>END</title>
|
||||
<ellipse fill="black" stroke="black" cx="727.37" cy="-120" rx="7" ry="7"/>
|
||||
<ellipse fill="none" stroke="black" cx="727.37" cy="-120" rx="11" ry="11"/>
|
||||
</g>
|
||||
<!-- NotSkippingPrefixNorVersions.Processing -->
|
||||
<g id="node7" class="node">
|
||||
<title>NotSkippingPrefixNorVersions.Processing</title>
|
||||
<path fill="lightblue" stroke="black" d="M925.37,-300C925.37,-300 661.37,-300 661.37,-300 655.37,-300 649.37,-294 649.37,-288 649.37,-288 649.37,-276 649.37,-276 649.37,-270 655.37,-264 661.37,-264 661.37,-264 925.37,-264 925.37,-264 931.37,-264 937.37,-270 937.37,-276 937.37,-276 937.37,-288 937.37,-288 937.37,-294 931.37,-300 925.37,-300"/>
|
||||
<text text-anchor="middle" x="793.37" y="-278.2" font-family="Times,serif" font-size="16.00">NotSkippingPrefixNorVersions</text>
|
||||
</g>
|
||||
<!-- NotSkippingPrefixNorVersions.Idle->NotSkippingPrefixNorVersions.Processing -->
|
||||
<g id="edge3" class="edge">
|
||||
<title>NotSkippingPrefixNorVersions.Idle->NotSkippingPrefixNorVersions.Processing</title>
|
||||
<path fill="none" stroke="black" d="M793.37,-350.8C793.37,-339.16 793.37,-323.55 793.37,-310.24"/>
|
||||
<polygon fill="black" stroke="black" points="796.87,-310.18 793.37,-300.18 789.87,-310.18 796.87,-310.18"/>
|
||||
<text text-anchor="middle" x="851.37" y="-321.8" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
|
||||
</g>
|
||||
<!-- SkippingPrefix.Idle -->
|
||||
<g id="node4" class="node">
|
||||
<title>SkippingPrefix.Idle</title>
|
||||
<path fill="lightgrey" stroke="black" d="M1209.37,-138C1209.37,-138 1017.37,-138 1017.37,-138 1011.37,-138 1005.37,-132 1005.37,-126 1005.37,-126 1005.37,-114 1005.37,-114 1005.37,-108 1011.37,-102 1017.37,-102 1017.37,-102 1209.37,-102 1209.37,-102 1215.37,-102 1221.37,-108 1221.37,-114 1221.37,-114 1221.37,-126 1221.37,-126 1221.37,-132 1215.37,-138 1209.37,-138"/>
|
||||
<text text-anchor="middle" x="1113.37" y="-116.2" font-family="Times,serif" font-size="16.00">SkippingPrefix</text>
|
||||
</g>
|
||||
<!-- SkippingPrefix.Processing -->
|
||||
<g id="node8" class="node">
|
||||
<title>SkippingPrefix.Processing</title>
|
||||
<path fill="lightblue" stroke="black" d="M1070.37,-36C1070.37,-36 878.37,-36 878.37,-36 872.37,-36 866.37,-30 866.37,-24 866.37,-24 866.37,-12 866.37,-12 866.37,-6 872.37,0 878.37,0 878.37,0 1070.37,0 1070.37,0 1076.37,0 1082.37,-6 1082.37,-12 1082.37,-12 1082.37,-24 1082.37,-24 1082.37,-30 1076.37,-36 1070.37,-36"/>
|
||||
<text text-anchor="middle" x="974.37" y="-14.2" font-family="Times,serif" font-size="16.00">SkippingPrefix</text>
|
||||
</g>
|
||||
<!-- SkippingPrefix.Idle->SkippingPrefix.Processing -->
|
||||
<g id="edge4" class="edge">
|
||||
<title>SkippingPrefix.Idle->SkippingPrefix.Processing</title>
|
||||
<path fill="none" stroke="black" d="M1011.89,-101.96C994.96,-97.13 981.04,-91.17 975.37,-84 967.11,-73.56 966.25,-58.93 967.72,-46.2"/>
|
||||
<polygon fill="black" stroke="black" points="971.22,-46.52 969.4,-36.09 964.31,-45.38 971.22,-46.52"/>
|
||||
<text text-anchor="middle" x="1033.37" y="-65.3" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
|
||||
</g>
|
||||
<!-- SkippingVersions.Processing -->
|
||||
<g id="node9" class="node">
|
||||
<title>SkippingVersions.Processing</title>
|
||||
<path fill="lightblue" stroke="black" d="M381.37,-36C381.37,-36 189.37,-36 189.37,-36 183.37,-36 177.37,-30 177.37,-24 177.37,-24 177.37,-12 177.37,-12 177.37,-6 183.37,0 189.37,0 189.37,0 381.37,0 381.37,0 387.37,0 393.37,-6 393.37,-12 393.37,-12 393.37,-24 393.37,-24 393.37,-30 387.37,-36 381.37,-36"/>
|
||||
<text text-anchor="middle" x="285.37" y="-14.2" font-family="Times,serif" font-size="16.00">SkippingVersions</text>
|
||||
</g>
|
||||
<!-- SkippingVersions.Idle->SkippingVersions.Processing -->
|
||||
<g id="edge5" class="edge">
|
||||
<title>SkippingVersions.Idle->SkippingVersions.Processing</title>
|
||||
<path fill="none" stroke="black" d="M141.4,-101.91C138.35,-87.58 136.8,-67.37 147.37,-54 151.89,-48.28 161.64,-43.34 173.99,-39.12"/>
|
||||
<polygon fill="black" stroke="black" points="175.39,-42.36 183.89,-36.04 173.3,-35.67 175.39,-42.36"/>
|
||||
<text text-anchor="middle" x="205.37" y="-65.3" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
|
||||
</g>
|
||||
<!-- WaitVersionAfterPHD.Idle -->
|
||||
<g id="node6" class="node">
|
||||
<title>WaitVersionAfterPHD.Idle</title>
|
||||
<path fill="lightgrey" stroke="black" d="M1534.37,-138C1534.37,-138 1342.37,-138 1342.37,-138 1336.37,-138 1330.37,-132 1330.37,-126 1330.37,-126 1330.37,-114 1330.37,-114 1330.37,-108 1336.37,-102 1342.37,-102 1342.37,-102 1534.37,-102 1534.37,-102 1540.37,-102 1546.37,-108 1546.37,-114 1546.37,-114 1546.37,-126 1546.37,-126 1546.37,-132 1540.37,-138 1534.37,-138"/>
|
||||
<text text-anchor="middle" x="1438.37" y="-116.2" font-family="Times,serif" font-size="16.00">WaitVersionAfterPHD</text>
|
||||
</g>
|
||||
<!-- WaitVersionAfterPHD.Processing -->
|
||||
<g id="node10" class="node">
|
||||
<title>WaitVersionAfterPHD.Processing</title>
|
||||
<path fill="lightblue" stroke="black" d="M1534.37,-36C1534.37,-36 1342.37,-36 1342.37,-36 1336.37,-36 1330.37,-30 1330.37,-24 1330.37,-24 1330.37,-12 1330.37,-12 1330.37,-6 1336.37,0 1342.37,0 1342.37,0 1534.37,0 1534.37,0 1540.37,0 1546.37,-6 1546.37,-12 1546.37,-12 1546.37,-24 1546.37,-24 1546.37,-30 1540.37,-36 1534.37,-36"/>
|
||||
<text text-anchor="middle" x="1438.37" y="-14.2" font-family="Times,serif" font-size="16.00">WaitVersionAfterPHD</text>
|
||||
</g>
|
||||
<!-- WaitVersionAfterPHD.Idle->WaitVersionAfterPHD.Processing -->
|
||||
<g id="edge6" class="edge">
|
||||
<title>WaitVersionAfterPHD.Idle->WaitVersionAfterPHD.Processing</title>
|
||||
<path fill="none" stroke="black" d="M1438.37,-101.58C1438.37,-86.38 1438.37,-64.07 1438.37,-46.46"/>
|
||||
<polygon fill="black" stroke="black" points="1441.87,-46.22 1438.37,-36.22 1434.87,-46.22 1441.87,-46.22"/>
|
||||
<text text-anchor="middle" x="1496.37" y="-65.3" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
|
||||
</g>
|
||||
<!-- NotSkippingPrefixNorVersions.Processing->END -->
|
||||
<g id="edge10" class="edge">
|
||||
<title>NotSkippingPrefixNorVersions.Processing->END</title>
|
||||
<path fill="none" stroke="black" d="M649.15,-273.62C611.7,-268.54 578.44,-260.07 566.37,-246 540.33,-215.64 540,-186.08 566.37,-156 586.46,-133.07 673.88,-148.86 702.37,-138 705.22,-136.91 708.06,-135.44 710.76,-133.82"/>
|
||||
<polygon fill="black" stroke="black" points="712.88,-136.61 719.13,-128.05 708.91,-130.84 712.88,-136.61"/>
|
||||
<text text-anchor="middle" x="672.87" y="-212.3" font-family="Times,serif" font-size="14.00">[isListableKey(key, value) and</text>
|
||||
<text text-anchor="middle" x="672.87" y="-197.3" font-family="Times,serif" font-size="14.00">Keys == maxKeys]</text>
|
||||
<text text-anchor="middle" x="672.87" y="-182.3" font-family="Times,serif" font-size="14.00">-> FILTER_END</text>
|
||||
</g>
|
||||
<!-- NotSkippingPrefixNorVersions.Processing->SkippingPrefix.Idle -->
|
||||
<g id="edge9" class="edge">
|
||||
<title>NotSkippingPrefixNorVersions.Processing->SkippingPrefix.Idle</title>
|
||||
<path fill="none" stroke="black" d="M937.6,-274.31C1018.89,-269.01 1106.69,-260.11 1119.37,-246 1143.16,-219.51 1134.03,-175.72 1124.38,-147.62"/>
|
||||
<polygon fill="black" stroke="black" points="1127.6,-146.22 1120.86,-138.04 1121.03,-148.64 1127.6,-146.22"/>
|
||||
<text text-anchor="middle" x="1254.37" y="-212.3" font-family="Times,serif" font-size="14.00">[key.startsWith(<ReplayPrefix>)]</text>
|
||||
<text text-anchor="middle" x="1254.37" y="-197.3" font-family="Times,serif" font-size="14.00">/ prefix <- <ReplayPrefix></text>
|
||||
<text text-anchor="middle" x="1254.37" y="-182.3" font-family="Times,serif" font-size="14.00">-> FILTER_SKIP</text>
|
||||
</g>
|
||||
<!-- NotSkippingPrefixNorVersions.Processing->SkippingPrefix.Idle -->
|
||||
<g id="edge11" class="edge">
|
||||
<title>NotSkippingPrefixNorVersions.Processing->SkippingPrefix.Idle</title>
|
||||
<path fill="none" stroke="black" d="M799.18,-263.65C800.96,-258.05 802.85,-251.79 804.37,-246 814.73,-206.45 793.03,-183.41 823.37,-156 851.23,-130.83 954.1,-142.59 991.37,-138 992.65,-137.84 993.94,-137.68 995.24,-137.52"/>
|
||||
<polygon fill="black" stroke="black" points="995.81,-140.98 1005.29,-136.25 994.93,-134.03 995.81,-140.98"/>
|
||||
<text text-anchor="middle" x="969.37" y="-234.8" font-family="Times,serif" font-size="14.00">[isListableKey(key, value) and</text>
|
||||
<text text-anchor="middle" x="969.37" y="-219.8" font-family="Times,serif" font-size="14.00">nKeys < maxKeys and</text>
|
||||
<text text-anchor="middle" x="969.37" y="-204.8" font-family="Times,serif" font-size="14.00">hasDelimiter(key)]</text>
|
||||
<text text-anchor="middle" x="969.37" y="-189.8" font-family="Times,serif" font-size="14.00">/ prefix <- prefixOf(key)</text>
|
||||
<text text-anchor="middle" x="969.37" y="-174.8" font-family="Times,serif" font-size="14.00">/ CommonPrefixes.append(prefixOf(key))</text>
|
||||
<text text-anchor="middle" x="969.37" y="-159.8" font-family="Times,serif" font-size="14.00">-> FILTER_ACCEPT</text>
|
||||
</g>
|
||||
<!-- NotSkippingPrefixNorVersions.Processing->SkippingVersions.Idle -->
|
||||
<g id="edge7" class="edge">
|
||||
<title>NotSkippingPrefixNorVersions.Processing->SkippingVersions.Idle</title>
|
||||
<path fill="none" stroke="black" d="M649.11,-279.23C439.56,-275.94 73.58,-267.19 53.37,-246 25.76,-217.06 30.6,-188.89 53.37,-156 56.56,-151.39 60.44,-147.39 64.78,-143.91"/>
|
||||
<polygon fill="black" stroke="black" points="66.8,-146.76 73.04,-138.2 62.83,-141 66.8,-146.76"/>
|
||||
<text text-anchor="middle" x="167.87" y="-204.8" font-family="Times,serif" font-size="14.00">[Version.isDeleteMarker(value)]</text>
|
||||
<text text-anchor="middle" x="167.87" y="-189.8" font-family="Times,serif" font-size="14.00">-> FILTER_ACCEPT</text>
|
||||
</g>
|
||||
<!-- NotSkippingPrefixNorVersions.Processing->SkippingVersions.Idle -->
|
||||
<g id="edge12" class="edge">
|
||||
<title>NotSkippingPrefixNorVersions.Processing->SkippingVersions.Idle</title>
|
||||
<path fill="none" stroke="black" d="M649.33,-279.1C514.97,-275.99 331.4,-267.75 305.37,-246 273.69,-219.53 311.53,-185.22 282.37,-156 276.73,-150.36 270.32,-145.59 263.42,-141.56"/>
|
||||
<polygon fill="black" stroke="black" points="264.92,-138.39 254.44,-136.84 261.67,-144.59 264.92,-138.39"/>
|
||||
<text text-anchor="middle" x="411.87" y="-227.3" font-family="Times,serif" font-size="14.00">[isListableKey(key, value) and</text>
|
||||
<text text-anchor="middle" x="411.87" y="-212.3" font-family="Times,serif" font-size="14.00">nKeys < maxKeys and</text>
|
||||
<text text-anchor="middle" x="411.87" y="-197.3" font-family="Times,serif" font-size="14.00">not hasDelimiter(key)]</text>
|
||||
<text text-anchor="middle" x="411.87" y="-182.3" font-family="Times,serif" font-size="14.00">/ Contents.append(key, value)</text>
|
||||
<text text-anchor="middle" x="411.87" y="-167.3" font-family="Times,serif" font-size="14.00">-> FILTER_ACCEPT</text>
|
||||
</g>
|
||||
<!-- NotSkippingPrefixNorVersions.Processing->WaitVersionAfterPHD.Idle -->
|
||||
<g id="edge8" class="edge">
|
||||
<title>NotSkippingPrefixNorVersions.Processing->WaitVersionAfterPHD.Idle</title>
|
||||
<path fill="none" stroke="black" d="M937.38,-280.87C1099.43,-279.42 1344.59,-272.74 1378.37,-246 1411.11,-220.08 1384.48,-192.16 1405.37,-156 1407.38,-152.52 1409.8,-149.11 1412.4,-145.87"/>
|
||||
<polygon fill="black" stroke="black" points="1415.16,-148.04 1419.13,-138.21 1409.9,-143.41 1415.16,-148.04"/>
|
||||
<text text-anchor="middle" x="1486.87" y="-204.8" font-family="Times,serif" font-size="14.00">[Version.isPHD(value)]</text>
|
||||
<text text-anchor="middle" x="1486.87" y="-189.8" font-family="Times,serif" font-size="14.00">-> FILTER_ACCEPT</text>
|
||||
</g>
|
||||
<!-- SkippingPrefix.Processing->SkippingPrefix.Idle -->
|
||||
<g id="edge13" class="edge">
|
||||
<title>SkippingPrefix.Processing->SkippingPrefix.Idle</title>
|
||||
<path fill="none" stroke="black" d="M1064.61,-36.08C1074.44,-40.7 1083.66,-46.57 1091.37,-54 1101.65,-63.92 1107.13,-78.81 1110.04,-91.84"/>
|
||||
<polygon fill="black" stroke="black" points="1106.62,-92.56 1111.88,-101.76 1113.5,-91.29 1106.62,-92.56"/>
|
||||
<text text-anchor="middle" x="1190.37" y="-72.8" font-family="Times,serif" font-size="14.00">[key.startsWith(prefix)]</text>
|
||||
<text text-anchor="middle" x="1190.37" y="-57.8" font-family="Times,serif" font-size="14.00">-> FILTER_SKIP</text>
|
||||
</g>
|
||||
<!-- SkippingPrefix.Processing->NotSkippingPrefixNorVersions.Processing -->
|
||||
<g id="edge14" class="edge">
|
||||
<title>SkippingPrefix.Processing->NotSkippingPrefixNorVersions.Processing</title>
|
||||
<path fill="none" stroke="black" d="M899.82,-36.01C864.18,-48.2 824.54,-68.57 802.37,-102 771.84,-148.02 779.31,-216.26 786.77,-253.8"/>
|
||||
<polygon fill="black" stroke="black" points="783.43,-254.92 788.94,-263.97 790.28,-253.46 783.43,-254.92"/>
|
||||
<text text-anchor="middle" x="899.37" y="-116.3" font-family="Times,serif" font-size="14.00">[not key.startsWith(prefix)]</text>
|
||||
</g>
|
||||
<!-- SkippingVersions.Processing->SkippingVersions.Idle -->
|
||||
<g id="edge15" class="edge">
|
||||
<title>SkippingVersions.Processing->SkippingVersions.Idle</title>
|
||||
<path fill="none" stroke="black" d="M283.88,-36.24C281.71,-50.87 276.4,-71.43 263.37,-84 258.07,-89.11 252.06,-93.48 245.62,-97.21"/>
|
||||
<polygon fill="black" stroke="black" points="243.85,-94.19 236.61,-101.92 247.09,-100.39 243.85,-94.19"/>
|
||||
<text text-anchor="middle" x="349.87" y="-72.8" font-family="Times,serif" font-size="14.00">[isVersionKey(key)]</text>
|
||||
<text text-anchor="middle" x="349.87" y="-57.8" font-family="Times,serif" font-size="14.00">-> FILTER_SKIP</text>
|
||||
</g>
|
||||
<!-- SkippingVersions.Processing->NotSkippingPrefixNorVersions.Processing -->
|
||||
<g id="edge16" class="edge">
|
||||
<title>SkippingVersions.Processing->NotSkippingPrefixNorVersions.Processing</title>
|
||||
<path fill="none" stroke="black" d="M382.46,-36.08C396.72,-40.7 410.82,-46.57 423.37,-54 476.67,-85.57 487.28,-102.42 518.37,-156 539.39,-192.23 514.46,-218.85 546.37,-246 561.72,-259.06 598.56,-267.25 639.23,-272.39"/>
|
||||
<polygon fill="black" stroke="black" points="639.01,-275.89 649.36,-273.59 639.84,-268.93 639.01,-275.89"/>
|
||||
<text text-anchor="middle" x="590.37" y="-116.3" font-family="Times,serif" font-size="14.00">[not isVersionKey(key)]</text>
|
||||
</g>
|
||||
<!-- WaitVersionAfterPHD.Processing->NotSkippingPrefixNorVersions.Processing -->
|
||||
<g id="edge17" class="edge">
|
||||
<title>WaitVersionAfterPHD.Processing->NotSkippingPrefixNorVersions.Processing</title>
|
||||
<path fill="none" stroke="black" d="M1536.41,-36.13C1544.73,-40.79 1552.27,-46.65 1558.37,-54 1585.64,-86.89 1597.89,-215.12 1568.37,-246 1547.29,-268.05 1167.71,-276.42 947.74,-279.43"/>
|
||||
<polygon fill="black" stroke="black" points="947.67,-275.93 937.71,-279.57 947.76,-282.93 947.67,-275.93"/>
|
||||
<text text-anchor="middle" x="1758.37" y="-123.8" font-family="Times,serif" font-size="14.00">[isVersionKey(key) and master(key) == PHDkey]</text>
|
||||
<text text-anchor="middle" x="1758.37" y="-108.8" font-family="Times,serif" font-size="14.00">/ key <- master(key)</text>
|
||||
</g>
|
||||
<!-- WaitVersionAfterPHD.Processing->NotSkippingPrefixNorVersions.Processing -->
|
||||
<g id="edge18" class="edge">
|
||||
<title>WaitVersionAfterPHD.Processing->NotSkippingPrefixNorVersions.Processing</title>
|
||||
<path fill="none" stroke="black" d="M1546.51,-21.25C1677.94,-26.54 1888.29,-44.09 1937.37,-102 1947.71,-114.21 1946.85,-125.11 1937.37,-138 1841.62,-268.08 1749.48,-218.23 1590.37,-246 1471.26,-266.79 1143.92,-275.5 947.77,-278.94"/>
|
||||
<polygon fill="black" stroke="black" points="947.6,-275.44 937.66,-279.11 947.72,-282.44 947.6,-275.44"/>
|
||||
<text text-anchor="middle" x="2124.87" y="-116.3" font-family="Times,serif" font-size="14.00">[not isVersionKey(key) or master(key) != PHDkey]</text>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
After Width: | Height: | Size: 18 KiB |
35
documentation/listingAlgos/pics/delimiterStateChart.dot
Normal file
35
documentation/listingAlgos/pics/delimiterStateChart.dot
Normal file
@@ -0,0 +1,35 @@
|
||||
digraph {
|
||||
node [shape="box",style="filled,rounded",fontsize=16,fixedsize=true,width=3];
|
||||
edge [fontsize=14];
|
||||
rankdir=TB;
|
||||
|
||||
START [shape="circle",width=0.2,label="",style="filled",fillcolor="black"]
|
||||
END [shape="circle",width=0.2,label="",style="filled",fillcolor="black",peripheries=2]
|
||||
|
||||
node [fillcolor="lightgrey"];
|
||||
"NotSkipping.Idle" [label="NotSkipping",group="NotSkipping"];
|
||||
"NeverSkipping.Idle" [label="NeverSkipping",group="NeverSkipping"];
|
||||
"NotSkippingPrefix.Idle" [label="NotSkippingPrefix",group="NotSkippingPrefix"];
|
||||
"SkippingPrefix.Idle" [label="SkippingPrefix",group="SkippingPrefix"];
|
||||
|
||||
node [fillcolor="lightblue"];
|
||||
"NeverSkipping.Processing" [label="NeverSkipping",group="NeverSkipping"];
|
||||
"NotSkippingPrefix.Processing" [label="NotSkippingPrefix",group="NotSkippingPrefix"];
|
||||
"SkippingPrefix.Processing" [label="SkippingPrefix",group="SkippingPrefix"];
|
||||
|
||||
START -> "NotSkipping.Idle"
|
||||
"NotSkipping.Idle" -> "NeverSkipping.Idle" [label="[delimiter == undefined]"]
|
||||
"NotSkipping.Idle" -> "NotSkippingPrefix.Idle" [label="[delimiter == '/']"]
|
||||
|
||||
"NeverSkipping.Idle" -> "NeverSkipping.Processing" [label="filter(key, value)"]
|
||||
"NotSkippingPrefix.Idle" -> "NotSkippingPrefix.Processing" [label="filter(key, value)"]
|
||||
"SkippingPrefix.Idle" -> "SkippingPrefix.Processing" [label="filter(key, value)"]
|
||||
|
||||
"NeverSkipping.Processing" -> END [label="[nKeys == maxKeys]\n-> FILTER_END"]
|
||||
"NeverSkipping.Processing" -> "NeverSkipping.Idle" [label="[nKeys < maxKeys]\n/ Contents.append(key, value)\n -> FILTER_ACCEPT"]
|
||||
"NotSkippingPrefix.Processing" -> END [label="[nKeys == maxKeys]\n -> FILTER_END"]
|
||||
"NotSkippingPrefix.Processing" -> "SkippingPrefix.Idle" [label="[nKeys < maxKeys and hasDelimiter(key)]\n/ prefix <- prefixOf(key)\n/ CommonPrefixes.append(prefixOf(key))\n-> FILTER_ACCEPT"]
|
||||
"NotSkippingPrefix.Processing" -> "NotSkippingPrefix.Idle" [label="[nKeys < maxKeys and not hasDelimiter(key)]\n/ Contents.append(key, value)\n -> FILTER_ACCEPT"]
|
||||
"SkippingPrefix.Processing" -> "SkippingPrefix.Idle" [label="[key.startsWith(prefix)]\n-> FILTER_SKIP"]
|
||||
"SkippingPrefix.Processing" -> "NotSkippingPrefix.Processing" [label="[not key.startsWith(prefix)]"]
|
||||
}
|
166
documentation/listingAlgos/pics/delimiterStateChart.svg
Normal file
166
documentation/listingAlgos/pics/delimiterStateChart.svg
Normal file
@@ -0,0 +1,166 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
|
||||
"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||
<!-- Generated by graphviz version 2.43.0 (0)
|
||||
-->
|
||||
<!-- Title: %3 Pages: 1 -->
|
||||
<svg width="975pt" height="533pt"
|
||||
viewBox="0.00 0.00 975.00 533.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
|
||||
<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 529)">
|
||||
<title>%3</title>
|
||||
<polygon fill="white" stroke="transparent" points="-4,4 -4,-529 971,-529 971,4 -4,4"/>
|
||||
<!-- START -->
|
||||
<g id="node1" class="node">
|
||||
<title>START</title>
|
||||
<ellipse fill="black" stroke="black" cx="283" cy="-518" rx="7" ry="7"/>
|
||||
</g>
|
||||
<!-- NotSkipping.Idle -->
|
||||
<g id="node3" class="node">
|
||||
<title>NotSkipping.Idle</title>
|
||||
<path fill="lightgrey" stroke="black" d="M379,-474C379,-474 187,-474 187,-474 181,-474 175,-468 175,-462 175,-462 175,-450 175,-450 175,-444 181,-438 187,-438 187,-438 379,-438 379,-438 385,-438 391,-444 391,-450 391,-450 391,-462 391,-462 391,-468 385,-474 379,-474"/>
|
||||
<text text-anchor="middle" x="283" y="-452.2" font-family="Times,serif" font-size="16.00">NotSkipping</text>
|
||||
</g>
|
||||
<!-- START->NotSkipping.Idle -->
|
||||
<g id="edge1" class="edge">
|
||||
<title>START->NotSkipping.Idle</title>
|
||||
<path fill="none" stroke="black" d="M283,-510.58C283,-504.23 283,-494.07 283,-484.3"/>
|
||||
<polygon fill="black" stroke="black" points="286.5,-484.05 283,-474.05 279.5,-484.05 286.5,-484.05"/>
|
||||
</g>
|
||||
<!-- END -->
|
||||
<g id="node2" class="node">
|
||||
<title>END</title>
|
||||
<ellipse fill="black" stroke="black" cx="196" cy="-120" rx="7" ry="7"/>
|
||||
<ellipse fill="none" stroke="black" cx="196" cy="-120" rx="11" ry="11"/>
|
||||
</g>
|
||||
<!-- NeverSkipping.Idle -->
|
||||
<g id="node4" class="node">
|
||||
<title>NeverSkipping.Idle</title>
|
||||
<path fill="lightgrey" stroke="black" d="M262,-387C262,-387 70,-387 70,-387 64,-387 58,-381 58,-375 58,-375 58,-363 58,-363 58,-357 64,-351 70,-351 70,-351 262,-351 262,-351 268,-351 274,-357 274,-363 274,-363 274,-375 274,-375 274,-381 268,-387 262,-387"/>
|
||||
<text text-anchor="middle" x="166" y="-365.2" font-family="Times,serif" font-size="16.00">NeverSkipping</text>
|
||||
</g>
|
||||
<!-- NotSkipping.Idle->NeverSkipping.Idle -->
|
||||
<g id="edge2" class="edge">
|
||||
<title>NotSkipping.Idle->NeverSkipping.Idle</title>
|
||||
<path fill="none" stroke="black" d="M216.5,-437.82C206.51,-433.18 196.91,-427.34 189,-420 182.25,-413.74 177.33,-405.11 173.81,-396.79"/>
|
||||
<polygon fill="black" stroke="black" points="177.05,-395.47 170.3,-387.31 170.49,-397.9 177.05,-395.47"/>
|
||||
<text text-anchor="middle" x="279.5" y="-408.8" font-family="Times,serif" font-size="14.00">[delimiter == undefined]</text>
|
||||
</g>
|
||||
<!-- NotSkippingPrefix.Idle -->
|
||||
<g id="node5" class="node">
|
||||
<title>NotSkippingPrefix.Idle</title>
|
||||
<path fill="lightgrey" stroke="black" d="M496,-387C496,-387 304,-387 304,-387 298,-387 292,-381 292,-375 292,-375 292,-363 292,-363 292,-357 298,-351 304,-351 304,-351 496,-351 496,-351 502,-351 508,-357 508,-363 508,-363 508,-375 508,-375 508,-381 502,-387 496,-387"/>
|
||||
<text text-anchor="middle" x="400" y="-365.2" font-family="Times,serif" font-size="16.00">NotSkippingPrefix</text>
|
||||
</g>
|
||||
<!-- NotSkipping.Idle->NotSkippingPrefix.Idle -->
|
||||
<g id="edge3" class="edge">
|
||||
<title>NotSkipping.Idle->NotSkippingPrefix.Idle</title>
|
||||
<path fill="none" stroke="black" d="M340.77,-437.93C351.2,-433.2 361.45,-427.29 370,-420 377.58,-413.53 383.76,-404.65 388.51,-396.16"/>
|
||||
<polygon fill="black" stroke="black" points="391.63,-397.74 393.08,-387.24 385.4,-394.54 391.63,-397.74"/>
|
||||
<text text-anchor="middle" x="442.5" y="-408.8" font-family="Times,serif" font-size="14.00">[delimiter == '/']</text>
|
||||
</g>
|
||||
<!-- NeverSkipping.Processing -->
|
||||
<g id="node7" class="node">
|
||||
<title>NeverSkipping.Processing</title>
|
||||
<path fill="lightblue" stroke="black" d="M204,-270C204,-270 12,-270 12,-270 6,-270 0,-264 0,-258 0,-258 0,-246 0,-246 0,-240 6,-234 12,-234 12,-234 204,-234 204,-234 210,-234 216,-240 216,-246 216,-246 216,-258 216,-258 216,-264 210,-270 204,-270"/>
|
||||
<text text-anchor="middle" x="108" y="-248.2" font-family="Times,serif" font-size="16.00">NeverSkipping</text>
|
||||
</g>
|
||||
<!-- NeverSkipping.Idle->NeverSkipping.Processing -->
|
||||
<g id="edge4" class="edge">
|
||||
<title>NeverSkipping.Idle->NeverSkipping.Processing</title>
|
||||
<path fill="none" stroke="black" d="M64.1,-350.93C47.33,-346.11 33.58,-340.17 28,-333 15.72,-317.21 17.05,-304.74 28,-288 30.93,-283.52 34.58,-279.6 38.69,-276.19"/>
|
||||
<polygon fill="black" stroke="black" points="40.97,-278.86 47.1,-270.22 36.92,-273.16 40.97,-278.86"/>
|
||||
<text text-anchor="middle" x="86" y="-306.8" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
|
||||
</g>
|
||||
<!-- NotSkippingPrefix.Processing -->
|
||||
<g id="node8" class="node">
|
||||
<title>NotSkippingPrefix.Processing</title>
|
||||
<path fill="lightblue" stroke="black" d="M554,-270C554,-270 362,-270 362,-270 356,-270 350,-264 350,-258 350,-258 350,-246 350,-246 350,-240 356,-234 362,-234 362,-234 554,-234 554,-234 560,-234 566,-240 566,-246 566,-246 566,-258 566,-258 566,-264 560,-270 554,-270"/>
|
||||
<text text-anchor="middle" x="458" y="-248.2" font-family="Times,serif" font-size="16.00">NotSkippingPrefix</text>
|
||||
</g>
|
||||
<!-- NotSkippingPrefix.Idle->NotSkippingPrefix.Processing -->
|
||||
<g id="edge5" class="edge">
|
||||
<title>NotSkippingPrefix.Idle->NotSkippingPrefix.Processing</title>
|
||||
<path fill="none" stroke="black" d="M395.69,-350.84C392.38,-333.75 390.03,-307.33 401,-288 403.42,-283.74 406.58,-279.94 410.19,-276.55"/>
|
||||
<polygon fill="black" stroke="black" points="412.5,-279.18 418.1,-270.18 408.11,-273.73 412.5,-279.18"/>
|
||||
<text text-anchor="middle" x="459" y="-306.8" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
|
||||
</g>
|
||||
<!-- SkippingPrefix.Idle -->
|
||||
<g id="node6" class="node">
|
||||
<title>SkippingPrefix.Idle</title>
|
||||
<path fill="lightgrey" stroke="black" d="M554,-138C554,-138 362,-138 362,-138 356,-138 350,-132 350,-126 350,-126 350,-114 350,-114 350,-108 356,-102 362,-102 362,-102 554,-102 554,-102 560,-102 566,-108 566,-114 566,-114 566,-126 566,-126 566,-132 560,-138 554,-138"/>
|
||||
<text text-anchor="middle" x="458" y="-116.2" font-family="Times,serif" font-size="16.00">SkippingPrefix</text>
|
||||
</g>
|
||||
<!-- SkippingPrefix.Processing -->
|
||||
<g id="node9" class="node">
|
||||
<title>SkippingPrefix.Processing</title>
|
||||
<path fill="lightblue" stroke="black" d="M691,-36C691,-36 499,-36 499,-36 493,-36 487,-30 487,-24 487,-24 487,-12 487,-12 487,-6 493,0 499,0 499,0 691,0 691,0 697,0 703,-6 703,-12 703,-12 703,-24 703,-24 703,-30 697,-36 691,-36"/>
|
||||
<text text-anchor="middle" x="595" y="-14.2" font-family="Times,serif" font-size="16.00">SkippingPrefix</text>
|
||||
</g>
|
||||
<!-- SkippingPrefix.Idle->SkippingPrefix.Processing -->
|
||||
<g id="edge6" class="edge">
|
||||
<title>SkippingPrefix.Idle->SkippingPrefix.Processing</title>
|
||||
<path fill="none" stroke="black" d="M452.35,-101.95C448.76,-87.65 446.54,-67.45 457,-54 461.44,-48.29 471.08,-43.36 483.3,-39.15"/>
|
||||
<polygon fill="black" stroke="black" points="484.61,-42.41 493.1,-36.07 482.51,-35.73 484.61,-42.41"/>
|
||||
<text text-anchor="middle" x="515" y="-65.3" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
|
||||
</g>
|
||||
<!-- NeverSkipping.Processing->END -->
|
||||
<g id="edge7" class="edge">
|
||||
<title>NeverSkipping.Processing->END</title>
|
||||
<path fill="none" stroke="black" d="M102.91,-233.88C97.93,-213.45 93.18,-179.15 109,-156 123.79,-134.35 154.41,-126.09 175.08,-122.94"/>
|
||||
<polygon fill="black" stroke="black" points="175.62,-126.4 185.11,-121.69 174.76,-119.45 175.62,-126.4"/>
|
||||
<text text-anchor="middle" x="185" y="-189.8" font-family="Times,serif" font-size="14.00">[nKeys == maxKeys]</text>
|
||||
<text text-anchor="middle" x="185" y="-174.8" font-family="Times,serif" font-size="14.00">-> FILTER_END</text>
|
||||
</g>
|
||||
<!-- NeverSkipping.Processing->NeverSkipping.Idle -->
|
||||
<g id="edge8" class="edge">
|
||||
<title>NeverSkipping.Processing->NeverSkipping.Idle</title>
|
||||
<path fill="none" stroke="black" d="M129.49,-270.27C134.87,-275.48 140.18,-281.55 144,-288 153.56,-304.17 159.09,-324.63 162.21,-340.81"/>
|
||||
<polygon fill="black" stroke="black" points="158.78,-341.49 163.94,-350.74 165.68,-340.29 158.78,-341.49"/>
|
||||
<text text-anchor="middle" x="265.5" y="-321.8" font-family="Times,serif" font-size="14.00">[nKeys < maxKeys]</text>
|
||||
<text text-anchor="middle" x="265.5" y="-306.8" font-family="Times,serif" font-size="14.00">/ Contents.append(key, value)</text>
|
||||
<text text-anchor="middle" x="265.5" y="-291.8" font-family="Times,serif" font-size="14.00"> -> FILTER_ACCEPT</text>
|
||||
</g>
|
||||
<!-- NotSkippingPrefix.Processing->END -->
|
||||
<g id="edge9" class="edge">
|
||||
<title>NotSkippingPrefix.Processing->END</title>
|
||||
<path fill="none" stroke="black" d="M349.96,-237.93C333,-232.81 316.36,-225.74 302,-216 275.27,-197.87 285.01,-177.6 261,-156 247.64,-143.98 229.41,-134.62 215.65,-128.62"/>
|
||||
<polygon fill="black" stroke="black" points="216.74,-125.28 206.16,-124.7 214.07,-131.75 216.74,-125.28"/>
|
||||
<text text-anchor="middle" x="378" y="-189.8" font-family="Times,serif" font-size="14.00">[nKeys == maxKeys]</text>
|
||||
<text text-anchor="middle" x="378" y="-174.8" font-family="Times,serif" font-size="14.00"> -> FILTER_END</text>
|
||||
</g>
|
||||
<!-- NotSkippingPrefix.Processing->NotSkippingPrefix.Idle -->
|
||||
<g id="edge11" class="edge">
|
||||
<title>NotSkippingPrefix.Processing->NotSkippingPrefix.Idle</title>
|
||||
<path fill="none" stroke="black" d="M499.64,-270.11C506.59,-274.86 512.87,-280.76 517,-288 526.9,-305.38 528.94,-316.96 517,-333 513.56,-337.62 509.53,-341.66 505.07,-345.18"/>
|
||||
<polygon fill="black" stroke="black" points="502.89,-342.43 496.63,-350.98 506.85,-348.2 502.89,-342.43"/>
|
||||
<text text-anchor="middle" x="690.5" y="-321.8" font-family="Times,serif" font-size="14.00">[nKeys < maxKeys and not hasDelimiter(key)]</text>
|
||||
<text text-anchor="middle" x="690.5" y="-306.8" font-family="Times,serif" font-size="14.00">/ Contents.append(key, value)</text>
|
||||
<text text-anchor="middle" x="690.5" y="-291.8" font-family="Times,serif" font-size="14.00"> -> FILTER_ACCEPT</text>
|
||||
</g>
|
||||
<!-- NotSkippingPrefix.Processing->SkippingPrefix.Idle -->
|
||||
<g id="edge10" class="edge">
|
||||
<title>NotSkippingPrefix.Processing->SkippingPrefix.Idle</title>
|
||||
<path fill="none" stroke="black" d="M458,-233.74C458,-211.98 458,-174.32 458,-148.56"/>
|
||||
<polygon fill="black" stroke="black" points="461.5,-148.33 458,-138.33 454.5,-148.33 461.5,-148.33"/>
|
||||
<text text-anchor="middle" x="609.5" y="-204.8" font-family="Times,serif" font-size="14.00">[nKeys < maxKeys and hasDelimiter(key)]</text>
|
||||
<text text-anchor="middle" x="609.5" y="-189.8" font-family="Times,serif" font-size="14.00">/ prefix <- prefixOf(key)</text>
|
||||
<text text-anchor="middle" x="609.5" y="-174.8" font-family="Times,serif" font-size="14.00">/ CommonPrefixes.append(prefixOf(key))</text>
|
||||
<text text-anchor="middle" x="609.5" y="-159.8" font-family="Times,serif" font-size="14.00">-> FILTER_ACCEPT</text>
|
||||
</g>
|
||||
<!-- SkippingPrefix.Processing->SkippingPrefix.Idle -->
|
||||
<g id="edge12" class="edge">
|
||||
<title>SkippingPrefix.Processing->SkippingPrefix.Idle</title>
|
||||
<path fill="none" stroke="black" d="M593.49,-36.23C591.32,-50.84 586,-71.39 573,-84 567.75,-89.09 561.77,-93.45 555.38,-97.17"/>
|
||||
<polygon fill="black" stroke="black" points="553.66,-94.12 546.43,-101.87 556.91,-100.32 553.66,-94.12"/>
|
||||
<text text-anchor="middle" x="672" y="-72.8" font-family="Times,serif" font-size="14.00">[key.startsWith(prefix)]</text>
|
||||
<text text-anchor="middle" x="672" y="-57.8" font-family="Times,serif" font-size="14.00">-> FILTER_SKIP</text>
|
||||
</g>
|
||||
<!-- SkippingPrefix.Processing->NotSkippingPrefix.Processing -->
|
||||
<g id="edge13" class="edge">
|
||||
<title>SkippingPrefix.Processing->NotSkippingPrefix.Processing</title>
|
||||
<path fill="none" stroke="black" d="M703.16,-31.64C728.6,-36.87 750.75,-44.11 759,-54 778.46,-77.34 776.26,-200.01 762,-216 749.37,-230.17 656.13,-239.42 576.2,-244.84"/>
|
||||
<polygon fill="black" stroke="black" points="575.77,-241.36 566.03,-245.51 576.24,-248.34 575.77,-241.36"/>
|
||||
<text text-anchor="middle" x="870" y="-116.3" font-family="Times,serif" font-size="14.00">[not key.startsWith(prefix)]</text>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
After Width: | Height: | Size: 12 KiB |
@@ -1,703 +0,0 @@
|
||||
{
|
||||
"_comment": "------------------- Amazon errors ------------------",
|
||||
"AccessDenied": {
|
||||
"code": 403,
|
||||
"description": "Access Denied"
|
||||
},
|
||||
"AccessForbidden": {
|
||||
"code": 403,
|
||||
"description": "Access Forbidden"
|
||||
},
|
||||
"AccountProblem": {
|
||||
"code": 403,
|
||||
"description": "There is a problem with your AWS account that prevents the operation from completing successfully. Please use Contact Us."
|
||||
},
|
||||
"AmbiguousGrantByEmailAddress": {
|
||||
"code": 400,
|
||||
"description": "The email address you provided is associated with more than one account."
|
||||
},
|
||||
"BadDigest": {
|
||||
"code": 400,
|
||||
"description": "The Content-MD5 you specified did not match what we received."
|
||||
},
|
||||
"BucketAlreadyExists": {
|
||||
"code": 409,
|
||||
"description": "The requested bucket name is not available. The bucket namespace is shared by all users of the system. Please select a different name and try again."
|
||||
},
|
||||
"BucketAlreadyOwnedByYou": {
|
||||
"code": 409,
|
||||
"description": "Your previous request to create the named bucket succeeded and you already own it. You get this error in all AWS regions except US Standard, us-east-1. In us-east-1 region, you will get 200 OK, but it is no-op (if bucket exists S3 will not do anything)."
|
||||
},
|
||||
"BucketNotEmpty": {
|
||||
"code": 409,
|
||||
"description": "The bucket you tried to delete is not empty."
|
||||
},
|
||||
"CredentialsNotSupported": {
|
||||
"code": 400,
|
||||
"description": "This request does not support credentials."
|
||||
},
|
||||
"CrossLocationLoggingProhibited": {
|
||||
"code": 403,
|
||||
"description": "Cross-location logging not allowed. Buckets in one geographic location cannot log information to a bucket in another location."
|
||||
},
|
||||
"DeleteConflict": {
|
||||
"code": 409,
|
||||
"description": "The request was rejected because it attempted to delete a resource that has attached subordinate entities. The error message describes these entities."
|
||||
},
|
||||
"EntityTooSmall": {
|
||||
"code": 400,
|
||||
"description": "Your proposed upload is smaller than the minimum allowed object size."
|
||||
},
|
||||
"EntityTooLarge": {
|
||||
"code": 400,
|
||||
"description": "Your proposed upload exceeds the maximum allowed object size."
|
||||
},
|
||||
"ExpiredToken": {
|
||||
"code": 400,
|
||||
"description": "The provided token has expired."
|
||||
},
|
||||
"IllegalVersioningConfigurationException": {
|
||||
"code": 400,
|
||||
"description": "Indicates that the versioning configuration specified in the request is invalid."
|
||||
},
|
||||
"IncompleteBody": {
|
||||
"code": 400,
|
||||
"description": "You did not provide the number of bytes specified by the Content-Length HTTP header."
|
||||
},
|
||||
"IncorrectNumberOfFilesInPostRequest": {
|
||||
"code": 400,
|
||||
"description": "POST requires exactly one file upload per request."
|
||||
},
|
||||
"InlineDataTooLarge": {
|
||||
"code": 400,
|
||||
"description": "Inline data exceeds the maximum allowed size."
|
||||
},
|
||||
"InternalError": {
|
||||
"code": 500,
|
||||
"description": "We encountered an internal error. Please try again."
|
||||
},
|
||||
"InvalidAccessKeyId": {
|
||||
"code": 403,
|
||||
"description": "The AWS access key Id you provided does not exist in our records."
|
||||
},
|
||||
"InvalidAddressingHeader": {
|
||||
"code": 400,
|
||||
"description": "You must specify the Anonymous role."
|
||||
},
|
||||
"InvalidArgument": {
|
||||
"code": 400,
|
||||
"description": "Invalid Argument"
|
||||
},
|
||||
"InvalidBucketName": {
|
||||
"code": 400,
|
||||
"description": "The specified bucket is not valid."
|
||||
},
|
||||
"InvalidBucketState": {
|
||||
"code": 409,
|
||||
"description": "The request is not valid with the current state of the bucket."
|
||||
},
|
||||
"InvalidDigest": {
|
||||
"code": 400,
|
||||
"description": "The Content-MD5 you specified is not valid."
|
||||
},
|
||||
"InvalidEncryptionAlgorithmError": {
|
||||
"code": 400,
|
||||
"description": "The encryption request you specified is not valid. The valid value is AES256."
|
||||
},
|
||||
"InvalidLocationConstraint": {
|
||||
"code": 400,
|
||||
"description": "The specified location constraint is not valid."
|
||||
},
|
||||
"InvalidObjectState": {
|
||||
"code": 403,
|
||||
"description": "The operation is not valid for the current state of the object."
|
||||
},
|
||||
"InvalidPart": {
|
||||
"code": 400,
|
||||
"description": "One or more of the specified parts could not be found. The part might not have been uploaded, or the specified entity tag might not have matched the part's entity tag."
|
||||
},
|
||||
"InvalidPartOrder": {
|
||||
"code": 400,
|
||||
"description": "The list of parts was not in ascending order.Parts list must specified in order by part number."
|
||||
},
|
||||
"InvalidPayer": {
|
||||
"code": 403,
|
||||
"description": "All access to this object has been disabled."
|
||||
},
|
||||
"InvalidPolicyDocument": {
|
||||
"code": 400,
|
||||
"description": "The content of the form does not meet the conditions specified in the policy document."
|
||||
},
|
||||
"InvalidRange": {
|
||||
"code": 416,
|
||||
"description": "The requested range cannot be satisfied."
|
||||
},
|
||||
"InvalidRedirectLocation": {
|
||||
"code": 400,
|
||||
"description": "The website redirect location must have a prefix of 'http://' or 'https://' or '/'."
|
||||
},
|
||||
"InvalidRequest": {
|
||||
"code": 400,
|
||||
"description": "SOAP requests must be made over an HTTPS connection."
|
||||
},
|
||||
"InvalidSecurity": {
|
||||
"code": 403,
|
||||
"description": "The provided security credentials are not valid."
|
||||
},
|
||||
"InvalidSOAPRequest": {
|
||||
"code": 400,
|
||||
"description": "The SOAP request body is invalid."
|
||||
},
|
||||
"InvalidStorageClass": {
|
||||
"code": 400,
|
||||
"description": "The storage class you specified is not valid."
|
||||
},
|
||||
"InvalidTargetBucketForLogging": {
|
||||
"code": 400,
|
||||
"description": "The target bucket for logging does not exist, is not owned by you, or does not have the appropriate grants for the log-delivery group."
|
||||
},
|
||||
"InvalidToken": {
|
||||
"code": 400,
|
||||
"description": "The provided token is malformed or otherwise invalid."
|
||||
},
|
||||
"InvalidURI": {
|
||||
"code": 400,
|
||||
"description": "Couldn't parse the specified URI."
|
||||
},
|
||||
"KeyTooLong": {
|
||||
"code": 400,
|
||||
"description": "Your key is too long."
|
||||
},
|
||||
"LimitExceeded": {
|
||||
"code": 409,
|
||||
"description": " The request was rejected because it attempted to create resources beyond the current AWS account limits. The error message describes the limit exceeded."
|
||||
},
|
||||
"MalformedACLError": {
|
||||
"code": 400,
|
||||
"description": "The XML you provided was not well-formed or did not validate against our published schema."
|
||||
},
|
||||
"MalformedPOSTRequest": {
|
||||
"code": 400,
|
||||
"description": "The body of your POST request is not well-formed multipart/form-data."
|
||||
},
|
||||
"MalformedXML": {
|
||||
"code": 400,
|
||||
"description": "The XML you provided was not well-formed or did not validate against our published schema."
|
||||
},
|
||||
"MaxMessageLengthExceeded": {
|
||||
"code": 400,
|
||||
"description": "Your request was too big."
|
||||
},
|
||||
"MaxPostPreDataLengthExceededError": {
|
||||
"code": 400,
|
||||
"description": "Your POST request fields preceding the upload file were too large."
|
||||
},
|
||||
"MetadataTooLarge": {
|
||||
"code": 400,
|
||||
"description": "Your metadata headers exceed the maximum allowed metadata size."
|
||||
},
|
||||
"MethodNotAllowed": {
|
||||
"code": 405,
|
||||
"description": "The specified method is not allowed against this resource."
|
||||
},
|
||||
"MissingAttachment": {
|
||||
"code": 400,
|
||||
"description": "A SOAP attachment was expected, but none were found."
|
||||
},
|
||||
"MissingContentLength": {
|
||||
"code": 411,
|
||||
"description": "You must provide the Content-Length HTTP header."
|
||||
},
|
||||
"MissingRequestBodyError": {
|
||||
"code": 400,
|
||||
"description": "Request body is empty"
|
||||
},
|
||||
"MissingSecurityElement": {
|
||||
"code": 400,
|
||||
"description": "The SOAP 1.1 request is missing a security element."
|
||||
},
|
||||
"MissingSecurityHeader": {
|
||||
"code": 400,
|
||||
"description": "Your request is missing a required header."
|
||||
},
|
||||
"NoLoggingStatusForKey": {
|
||||
"code": 400,
|
||||
"description": "There is no such thing as a logging status subresource for a key."
|
||||
},
|
||||
"NoSuchBucket": {
|
||||
"code": 404,
|
||||
"description": "The specified bucket does not exist."
|
||||
},
|
||||
"NoSuchCORSConfiguration": {
|
||||
"code": 404,
|
||||
"description": "The CORS configuration does not exist"
|
||||
},
|
||||
"NoSuchKey": {
|
||||
"code": 404,
|
||||
"description": "The specified key does not exist."
|
||||
},
|
||||
"NoSuchLifecycleConfiguration": {
|
||||
"code": 404,
|
||||
"description": "The lifecycle configuration does not exist."
|
||||
},
|
||||
"NoSuchWebsiteConfiguration": {
|
||||
"code": 404,
|
||||
"description": "The specified bucket does not have a website configuration"
|
||||
},
|
||||
"NoSuchUpload": {
|
||||
"code": 404,
|
||||
"description": "The specified multipart upload does not exist. The upload ID might be invalid, or the multipart upload might have been aborted or completed."
|
||||
},
|
||||
"NoSuchVersion": {
|
||||
"code": 404,
|
||||
"description": "Indicates that the version ID specified in the request does not match an existing version."
|
||||
},
|
||||
"NotImplemented": {
|
||||
"code": 501,
|
||||
"description": "A header you provided implies functionality that is not implemented."
|
||||
},
|
||||
"NotModified": {
|
||||
"code": 304,
|
||||
"description": "Not Modified."
|
||||
},
|
||||
"NotSignedUp": {
|
||||
"code": 403,
|
||||
"description": "Your account is not signed up for the S3 service. You must sign up before you can use S3. "
|
||||
},
|
||||
"NoSuchBucketPolicy": {
|
||||
"code": 404,
|
||||
"description": "The specified bucket does not have a bucket policy."
|
||||
},
|
||||
"OperationAborted": {
|
||||
"code": 409,
|
||||
"description": "A conflicting conditional operation is currently in progress against this resource. Try again."
|
||||
},
|
||||
"PermanentRedirect": {
|
||||
"code": 301,
|
||||
"description": "The bucket you are attempting to access must be addressed using the specified endpoint. Send all future requests to this endpoint."
|
||||
},
|
||||
"PreconditionFailed": {
|
||||
"code": 412,
|
||||
"description": "At least one of the preconditions you specified did not hold."
|
||||
},
|
||||
"Redirect": {
|
||||
"code": 307,
|
||||
"description": "Temporary redirect."
|
||||
},
|
||||
"RestoreAlreadyInProgress": {
|
||||
"code": 409,
|
||||
"description": "Object restore is already in progress."
|
||||
},
|
||||
"RequestIsNotMultiPartContent": {
|
||||
"code": 400,
|
||||
"description": "Bucket POST must be of the enclosure-type multipart/form-data."
|
||||
},
|
||||
"RequestTimeout": {
|
||||
"code": 400,
|
||||
"description": "Your socket connection to the server was not read from or written to within the timeout period."
|
||||
},
|
||||
"RequestTimeTooSkewed": {
|
||||
"code": 403,
|
||||
"description": "The difference between the request time and the server's time is too large."
|
||||
},
|
||||
"RequestTorrentOfBucketError": {
|
||||
"code": 400,
|
||||
"description": "Requesting the torrent file of a bucket is not permitted."
|
||||
},
|
||||
"SignatureDoesNotMatch": {
|
||||
"code": 403,
|
||||
"description": "The request signature we calculated does not match the signature you provided."
|
||||
},
|
||||
"_comment" : {
|
||||
"note" : "This is an AWS S3 specific error. We are opting to use the more general 'ServiceUnavailable' error used throughout AWS (IAM/EC2) to have uniformity of error messages even though we are potentially compromising S3 compatibility.",
|
||||
"ServiceUnavailable": {
|
||||
"code": 503,
|
||||
"description": "Reduce your request rate."
|
||||
}
|
||||
},
|
||||
"ServiceUnavailable": {
|
||||
"code": 503,
|
||||
"description": "The request has failed due to a temporary failure of the server."
|
||||
},
|
||||
"SlowDown": {
|
||||
"code": 503,
|
||||
"description": "Reduce your request rate."
|
||||
},
|
||||
"TemporaryRedirect": {
|
||||
"code": 307,
|
||||
"description": "You are being redirected to the bucket while DNS updates."
|
||||
},
|
||||
"TokenRefreshRequired": {
|
||||
"code": 400,
|
||||
"description": "The provided token must be refreshed."
|
||||
},
|
||||
"TooManyBuckets": {
|
||||
"code": 400,
|
||||
"description": "You have attempted to create more buckets than allowed."
|
||||
},
|
||||
"TooManyParts": {
|
||||
"code": 400,
|
||||
"description": "You have attempted to upload more parts than allowed."
|
||||
},
|
||||
"UnexpectedContent": {
|
||||
"code": 400,
|
||||
"description": "This request does not support content."
|
||||
},
|
||||
"UnresolvableGrantByEmailAddress": {
|
||||
"code": 400,
|
||||
"description": "The email address you provided does not match any account on record."
|
||||
},
|
||||
"UserKeyMustBeSpecified": {
|
||||
"code": 400,
|
||||
"description": "The bucket POST must contain the specified field name. If it is specified, check the order of the fields."
|
||||
},
|
||||
"NoSuchEntity": {
|
||||
"code": 404,
|
||||
"description": "The request was rejected because it referenced an entity that does not exist. The error message describes the entity."
|
||||
},
|
||||
"WrongFormat": {
|
||||
"code": 400,
|
||||
"description": "Data entered by the user has a wrong format."
|
||||
},
|
||||
"Forbidden": {
|
||||
"code": 403,
|
||||
"description": "Authentication failed."
|
||||
},
|
||||
"EntityDoesNotExist": {
|
||||
"code": 404,
|
||||
"description": "Not found."
|
||||
},
|
||||
"EntityAlreadyExists": {
|
||||
"code": 409,
|
||||
"description": "The request was rejected because it attempted to create a resource that already exists."
|
||||
},
|
||||
"ServiceFailure": {
|
||||
"code": 500,
|
||||
"description": "Server error: the request processing has failed because of an unknown error, exception or failure."
|
||||
},
|
||||
"IncompleteSignature": {
|
||||
"code": 400,
|
||||
"description": "The request signature does not conform to AWS standards."
|
||||
},
|
||||
"InternalFailure": {
|
||||
"code": 500,
|
||||
"description": "The request processing has failed because of an unknown error, exception or failure."
|
||||
},
|
||||
"InvalidAction": {
|
||||
"code": 400,
|
||||
"description": "The action or operation requested is invalid. Verify that the action is typed correctly."
|
||||
},
|
||||
"InvalidClientTokenId": {
|
||||
"code": 403,
|
||||
"description": "The X.509 certificate or AWS access key ID provided does not exist in our records."
|
||||
},
|
||||
"InvalidParameterCombination": {
|
||||
"code": 400,
|
||||
"description": "Parameters that must not be used together were used together."
|
||||
},
|
||||
"InvalidParameterValue": {
|
||||
"code": 400,
|
||||
"description": "An invalid or out-of-range value was supplied for the input parameter."
|
||||
},
|
||||
"InvalidQueryParameter": {
|
||||
"code": 400,
|
||||
"description": "The AWS query string is malformed or does not adhere to AWS standards."
|
||||
},
|
||||
"MalformedQueryString": {
|
||||
"code": 404,
|
||||
"description": "The query string contains a syntax error."
|
||||
},
|
||||
"MissingAction": {
|
||||
"code": 400,
|
||||
"description": "The request is missing an action or a required parameter."
|
||||
},
|
||||
"MissingAuthenticationToken": {
|
||||
"code": 403,
|
||||
"description": "The request must contain either a valid (registered) AWS access key ID or X.509 certificate."
|
||||
},
|
||||
"MissingParameter": {
|
||||
"code": 400,
|
||||
"description": "A required parameter for the specified action is not supplied."
|
||||
},
|
||||
"OptInRequired": {
|
||||
"code": 403,
|
||||
"description": "The AWS access key ID needs a subscription for the service."
|
||||
},
|
||||
"RequestExpired": {
|
||||
"code": 400,
|
||||
"description": "The request reached the service more than 15 minutes after the date stamp on the request or more than 15 minutes after the request expiration date (such as for pre-signed URLs), or the date stamp on the request is more than 15 minutes in the future."
|
||||
},
|
||||
"Throttling": {
|
||||
"code": 400,
|
||||
"description": "The request was denied due to request throttling."
|
||||
},
|
||||
"AccountNotFound": {
|
||||
"code": 404,
|
||||
"description": "No account was found in Vault, please contact your system administrator."
|
||||
},
|
||||
"ValidationError": {
|
||||
"code": 400,
|
||||
"description": "The specified value is invalid."
|
||||
},
|
||||
"MalformedPolicyDocument": {
|
||||
"code": 400,
|
||||
"description": "Syntax errors in policy."
|
||||
},
|
||||
"InvalidInput": {
|
||||
"code": 400,
|
||||
"description": "The request was rejected because an invalid or out-of-range value was supplied for an input parameter."
|
||||
},
|
||||
"_comment": "-------------- Special non-AWS S3 errors --------------",
|
||||
"MPUinProgress": {
|
||||
"code": 409,
|
||||
"description": "The bucket you tried to delete has an ongoing multipart upload."
|
||||
},
|
||||
"_comment": "-------------- Internal project errors --------------",
|
||||
"_comment": "----------------------- Vault -----------------------",
|
||||
"_comment": "#### formatErrors ####",
|
||||
"BadName": {
|
||||
"description": "name not ok",
|
||||
"code": 5001
|
||||
},
|
||||
"BadAccount": {
|
||||
"description": "account not ok",
|
||||
"code": 5002
|
||||
},
|
||||
"BadGroup": {
|
||||
"description": "group not ok",
|
||||
"code": 5003
|
||||
},
|
||||
"BadId": {
|
||||
"description": "id not ok",
|
||||
"code": 5004
|
||||
},
|
||||
"BadAccountName": {
|
||||
"description": "accountName not ok",
|
||||
"code": 5005
|
||||
},
|
||||
"BadNameFriendly": {
|
||||
"description": "nameFriendly not ok",
|
||||
"code": 5006
|
||||
},
|
||||
"BadEmailAddress": {
|
||||
"description": "email address not ok",
|
||||
"code": 5007
|
||||
},
|
||||
"BadPath": {
|
||||
"description": "path not ok",
|
||||
"code": 5008
|
||||
},
|
||||
"BadArn": {
|
||||
"description": "arn not ok",
|
||||
"code": 5009
|
||||
},
|
||||
"BadCreateDate": {
|
||||
"description": "createDate not ok",
|
||||
"code": 5010
|
||||
},
|
||||
"BadLastUsedDate": {
|
||||
"description": "lastUsedDate not ok",
|
||||
"code": 5011
|
||||
},
|
||||
"BadNotBefore": {
|
||||
"description": "notBefore not ok",
|
||||
"code": 5012
|
||||
},
|
||||
"BadNotAfter": {
|
||||
"description": "notAfter not ok",
|
||||
"code": 5013
|
||||
},
|
||||
"BadSaltedPwd": {
|
||||
"description": "salted password not ok",
|
||||
"code": 5014
|
||||
},
|
||||
"ok": {
|
||||
"description": "No error",
|
||||
"code": 200
|
||||
},
|
||||
"BadUser": {
|
||||
"description": "user not ok",
|
||||
"code": 5016
|
||||
},
|
||||
"BadSaltedPasswd": {
|
||||
"description": "salted password not ok",
|
||||
"code": 5017
|
||||
},
|
||||
"BadPasswdDate": {
|
||||
"description": "password date not ok",
|
||||
"code": 5018
|
||||
},
|
||||
"BadCanonicalId": {
|
||||
"description": "canonicalId not ok",
|
||||
"code": 5019
|
||||
},
|
||||
"BadAlias": {
|
||||
"description": "alias not ok",
|
||||
"code": 5020
|
||||
},
|
||||
"_comment": "#### internalErrors ####",
|
||||
"DBPutFailed": {
|
||||
"description": "DB put failed",
|
||||
"code": 5021
|
||||
},
|
||||
"_comment": "#### alreadyExistErrors ####",
|
||||
"AccountEmailAlreadyUsed": {
|
||||
"description": "an other account already uses that email",
|
||||
"code": 5022
|
||||
},
|
||||
"AccountNameAlreadyUsed": {
|
||||
"description": "an other account already uses that name",
|
||||
"code": 5023
|
||||
},
|
||||
"UserEmailAlreadyUsed": {
|
||||
"description": "an other user already uses that email",
|
||||
"code": 5024
|
||||
},
|
||||
"UserNameAlreadyUsed": {
|
||||
"description": "an other user already uses that name",
|
||||
"code": 5025
|
||||
},
|
||||
"_comment": "#### doesntExistErrors ####",
|
||||
"NoParentAccount": {
|
||||
"description": "parent account does not exist",
|
||||
"code": 5026
|
||||
},
|
||||
"_comment": "#### authErrors ####",
|
||||
"BadStringToSign": {
|
||||
"description": "stringToSign not ok'",
|
||||
"code": 5027
|
||||
},
|
||||
"BadSignatureFromRequest": {
|
||||
"description": "signatureFromRequest not ok",
|
||||
"code": 5028
|
||||
},
|
||||
"BadAlgorithm": {
|
||||
"description": "hashAlgorithm not ok",
|
||||
"code": 5029
|
||||
},
|
||||
"SecretKeyDoesNotExist": {
|
||||
"description": "secret key does not exist",
|
||||
"code": 5030
|
||||
},
|
||||
"InvalidRegion": {
|
||||
"description": "Region was not provided or is not recognized by the system",
|
||||
"code": 5031
|
||||
},
|
||||
"ScopeDate": {
|
||||
"description": "scope date is missing, or format is invalid",
|
||||
"code": 5032
|
||||
},
|
||||
"BadAccessKey": {
|
||||
"description": "access key not ok",
|
||||
"code": 5033
|
||||
},
|
||||
"NoDict": {
|
||||
"description": "no dictionary of params provided for signature verification",
|
||||
"code": 5034
|
||||
},
|
||||
"BadSecretKey": {
|
||||
"description": "secretKey not ok",
|
||||
"code": 5035
|
||||
},
|
||||
"BadSecretKeyValue": {
|
||||
"description": "secretKey value not ok",
|
||||
"code": 5036
|
||||
},
|
||||
"BadSecretKeyStatus": {
|
||||
"description": "secretKey status not ok",
|
||||
"code": 5037
|
||||
},
|
||||
"_comment": "#### OidcpErrors ####",
|
||||
"BadUrl": {
|
||||
"description": "url not ok",
|
||||
"code": 5038
|
||||
},
|
||||
"BadClientIdList": {
|
||||
"description": "client id list not ok'",
|
||||
"code": 5039
|
||||
},
|
||||
"BadThumbprintList": {
|
||||
"description": "thumbprint list not ok'",
|
||||
"code": 5040
|
||||
},
|
||||
"BadObject": {
|
||||
"description": "Object not ok'",
|
||||
"code": 5041
|
||||
},
|
||||
"_comment": "#### RoleErrors ####",
|
||||
"BadRole": {
|
||||
"description": "role not ok",
|
||||
"code": 5042
|
||||
},
|
||||
"_comment": "#### SamlpErrors ####",
|
||||
"BadSamlp": {
|
||||
"description": "samlp not ok",
|
||||
"code": 5043
|
||||
},
|
||||
"BadMetadataDocument": {
|
||||
"description": "metadata document not ok",
|
||||
"code": 5044
|
||||
},
|
||||
"BadSessionIndex": {
|
||||
"description": "session index not ok",
|
||||
"code": 5045
|
||||
},
|
||||
"Unauthorized": {
|
||||
"description": "not authenticated",
|
||||
"code": 401
|
||||
},
|
||||
"_comment": "--------------------- MetaData ---------------------",
|
||||
"_comment": "#### formatErrors ####",
|
||||
"CacheUpdated": {
|
||||
"description": "The cache has been updated",
|
||||
"code": 500
|
||||
},
|
||||
"DBNotFound": {
|
||||
"description": "This DB does not exist",
|
||||
"code": 404
|
||||
},
|
||||
"DBAlreadyExists": {
|
||||
"description": "This DB already exist",
|
||||
"code": 409
|
||||
},
|
||||
"ObjNotFound": {
|
||||
"description": "This object does not exist",
|
||||
"code": 404
|
||||
},
|
||||
"PermissionDenied": {
|
||||
"description": "Permission denied",
|
||||
"code": 403
|
||||
},
|
||||
"BadRequest": {
|
||||
"description": "BadRequest",
|
||||
"code": 400
|
||||
},
|
||||
"RaftSessionNotLeader": {
|
||||
"description": "NotLeader",
|
||||
"code": 500
|
||||
},
|
||||
"RaftSessionLeaderNotConnected": {
|
||||
"description": "RaftSessionLeaderNotConnected",
|
||||
"code": 400
|
||||
},
|
||||
"NoLeaderForDB": {
|
||||
"description": "NoLeaderForDB",
|
||||
"code": 400
|
||||
},
|
||||
"RouteNotFound": {
|
||||
"description": "RouteNotFound",
|
||||
"code": 404
|
||||
},
|
||||
"NoMapsInConfig": {
|
||||
"description": "NoMapsInConfig",
|
||||
"code": 404
|
||||
},
|
||||
"DBAPINotReady": {
|
||||
"message": "DBAPINotReady",
|
||||
"code": 500
|
||||
},
|
||||
"NotEnoughMapsInConfig:": {
|
||||
"description": "NotEnoughMapsInConfig",
|
||||
"code": 400
|
||||
}
|
||||
}
|
39
eve/main.yml
39
eve/main.yml
@@ -1,39 +0,0 @@
|
||||
---
|
||||
version: 0.2
|
||||
|
||||
branches:
|
||||
default:
|
||||
stage: pre-merge
|
||||
|
||||
stages:
|
||||
pre-merge:
|
||||
worker: &master-worker
|
||||
type: docker
|
||||
path: eve/workers/master
|
||||
volumes:
|
||||
- '/home/eve/workspace'
|
||||
steps:
|
||||
- Git:
|
||||
name: fetch source
|
||||
repourl: '%(prop:git_reference)s'
|
||||
shallow: True
|
||||
retryFetch: True
|
||||
haltOnFailure: True
|
||||
- ShellCommand:
|
||||
name: install dependencies
|
||||
command: npm install
|
||||
- ShellCommand:
|
||||
name: run lint yml
|
||||
command: npm run --silent lint_yml
|
||||
- ShellCommand:
|
||||
name: run lint
|
||||
command: npm run --silent lint -- --max-warnings 0
|
||||
- ShellCommand:
|
||||
name: run lint_md
|
||||
command: npm run --silent lint_md
|
||||
- ShellCommand:
|
||||
name: run test
|
||||
command: npm run --silent test
|
||||
- ShellCommand:
|
||||
name: run ft_test
|
||||
command: npm run ft_test
|
@@ -1,55 +0,0 @@
|
||||
FROM ubuntu:trusty
|
||||
|
||||
#
|
||||
# Install apt packages needed by the buildchain
|
||||
#
|
||||
ENV LANG C.UTF-8
|
||||
COPY buildbot_worker_packages.list arsenal_packages.list /tmp/
|
||||
RUN apt-get update -q && apt-get -qy install curl apt-transport-https \
|
||||
&& apt-get install -qy software-properties-common python-software-properties \
|
||||
&& curl --silent https://deb.nodesource.com/gpgkey/nodesource.gpg.key | apt-key add - \
|
||||
&& echo "deb https://deb.nodesource.com/node_6.x trusty main" > /etc/apt/sources.list.d/nodesource.list \
|
||||
&& add-apt-repository ppa:ubuntu-toolchain-r/test \
|
||||
&& apt-get update -q \
|
||||
&& cat /tmp/buildbot_worker_packages.list | xargs apt-get install -qy \
|
||||
&& cat /tmp/arsenal_packages.list | xargs apt-get install -qy \
|
||||
&& pip install pip==9.0.1 \
|
||||
&& rm -rf /var/lib/apt/lists/* \
|
||||
&& rm -f /tmp/*_packages.list
|
||||
|
||||
#
|
||||
# Install usefull nodejs dependencies
|
||||
#
|
||||
|
||||
RUN npm install mocha -g
|
||||
|
||||
#
|
||||
# Add user eve
|
||||
#
|
||||
|
||||
RUN adduser -u 1042 --home /home/eve --disabled-password --gecos "" eve \
|
||||
&& adduser eve sudo \
|
||||
&& sed -ri 's/(%sudo.*)ALL$/\1NOPASSWD:ALL/' /etc/sudoers
|
||||
|
||||
#
|
||||
# Run buildbot-worker on startup
|
||||
#
|
||||
|
||||
ARG BUILDBOT_VERSION=0.9.12
|
||||
RUN pip install yamllint
|
||||
RUN pip install buildbot-worker==$BUILDBOT_VERSION
|
||||
|
||||
USER eve
|
||||
ENV HOME /home/eve
|
||||
#
|
||||
# Setup nodejs environmnent
|
||||
#
|
||||
|
||||
ENV CXX=g++-4.9
|
||||
ENV LANG C.UTF-8
|
||||
|
||||
|
||||
WORKDIR /home/eve/workspace
|
||||
CMD buildbot-worker create-worker . "$BUILDMASTER:$BUILDMASTER_PORT" "$WORKERNAME" "$WORKERPASS" \
|
||||
&& sudo service redis-server start \
|
||||
&& buildbot-worker start --nodaemon
|
@@ -1,3 +0,0 @@
|
||||
nodejs
|
||||
redis-server
|
||||
g++-4.9
|
@@ -1,9 +0,0 @@
|
||||
ca-certificates
|
||||
git
|
||||
libffi-dev
|
||||
libssl-dev
|
||||
python2.7
|
||||
python2.7-dev
|
||||
python-pip
|
||||
software-properties-common
|
||||
sudo
|
194
index.d.ts
vendored
194
index.d.ts
vendored
@@ -1,194 +0,0 @@
|
||||
import { Logger } from 'werelogs';
|
||||
|
||||
interface Ciphers {
|
||||
ciphers: string;
|
||||
}
|
||||
|
||||
interface Dhparam {
|
||||
dhparam: string;
|
||||
}
|
||||
|
||||
declare module "arsenal" {
|
||||
class ArsenalError extends Error {
|
||||
code: number;
|
||||
description: string;
|
||||
'AccessDenied'?: boolean;
|
||||
'AccountProblem'?: boolean;
|
||||
'AmbiguousGrantByEmailAddress'?: boolean;
|
||||
'BadDigest'?: boolean;
|
||||
'BucketAlreadyExists'?: boolean;
|
||||
'BucketAlreadyOwnedByYou'?: boolean;
|
||||
'BucketNotEmpty'?: boolean;
|
||||
'CredentialsNotSupported'?: boolean;
|
||||
'CrossLocationLoggingProhibited'?: boolean;
|
||||
'DeleteConflict'?: boolean;
|
||||
'EntityTooSmall'?: boolean;
|
||||
'EntityTooLarge'?: boolean;
|
||||
'ExpiredToken'?: boolean;
|
||||
'IllegalVersioningConfigurationException'?: boolean;
|
||||
'IncompleteBody'?: boolean;
|
||||
'IncorrectNumberOfFilesInPostRequest'?: boolean;
|
||||
'InlineDataTooLarge'?: boolean;
|
||||
'InternalError'?: boolean;
|
||||
'InvalidAccessKeyId'?: boolean;
|
||||
'InvalidAddressingHeader'?: boolean;
|
||||
'InvalidArgument'?: boolean;
|
||||
'InvalidBucketName'?: boolean;
|
||||
'InvalidBucketState'?: boolean;
|
||||
'InvalidDigest'?: boolean;
|
||||
'InvalidEncryptionAlgorithmError'?: boolean;
|
||||
'InvalidLocationConstraint'?: boolean;
|
||||
'InvalidObjectState'?: boolean;
|
||||
'InvalidPart'?: boolean;
|
||||
'InvalidPartOrder'?: boolean;
|
||||
'InvalidPayer'?: boolean;
|
||||
'InvalidPolicyDocument'?: boolean;
|
||||
'InvalidRange'?: boolean;
|
||||
'InvalidRequest'?: boolean;
|
||||
'InvalidSecurity'?: boolean;
|
||||
'InvalidSOAPRequest'?: boolean;
|
||||
'InvalidStorageClass'?: boolean;
|
||||
'InvalidTargetBucketForLogging'?: boolean;
|
||||
'InvalidToken'?: boolean;
|
||||
'InvalidURI'?: boolean;
|
||||
'KeyTooLong'?: boolean;
|
||||
'LimitExceeded'?: boolean;
|
||||
'MalformedACLError'?: boolean;
|
||||
'MalformedPOSTRequest'?: boolean;
|
||||
'MalformedXML'?: boolean;
|
||||
'MaxMessageLengthExceeded'?: boolean;
|
||||
'MaxPostPreDataLengthExceededError'?: boolean;
|
||||
'MetadataTooLarge'?: boolean;
|
||||
'MethodNotAllowed'?: boolean;
|
||||
'MissingAttachment'?: boolean;
|
||||
'MissingContentLength'?: boolean;
|
||||
'MissingRequestBodyError'?: boolean;
|
||||
'MissingSecurityElement'?: boolean;
|
||||
'MissingSecurityHeader'?: boolean;
|
||||
'NoLoggingStatusForKey'?: boolean;
|
||||
'NoSuchBucket'?: boolean;
|
||||
'NoSuchKey'?: boolean;
|
||||
'NoSuchLifecycleConfiguration'?: boolean;
|
||||
'NoSuchUpload'?: boolean;
|
||||
'NoSuchVersion'?: boolean;
|
||||
'NotImplemented'?: boolean;
|
||||
'NotModified'?: boolean;
|
||||
'NotSignedUp'?: boolean;
|
||||
'NoSuchBucketPolicy'?: boolean;
|
||||
'OperationAborted'?: boolean;
|
||||
'PermanentRedirect'?: boolean;
|
||||
'PreconditionFailed'?: boolean;
|
||||
'Redirect'?: boolean;
|
||||
'RestoreAlreadyInProgress'?: boolean;
|
||||
'RequestIsNotMultiPartContent'?: boolean;
|
||||
'RequestTimeout'?: boolean;
|
||||
'RequestTimeTooSkewed'?: boolean;
|
||||
'RequestTorrentOfBucketError'?: boolean;
|
||||
'SignatureDoesNotMatch'?: boolean;
|
||||
'ServiceUnavailable'?: boolean;
|
||||
'SlowDown'?: boolean;
|
||||
'TemporaryRedirect'?: boolean;
|
||||
'TokenRefreshRequired'?: boolean;
|
||||
'TooManyBuckets'?: boolean;
|
||||
'TooManyParts'?: boolean;
|
||||
'UnexpectedContent'?: boolean;
|
||||
'UnresolvableGrantByEmailAddress'?: boolean;
|
||||
'UserKeyMustBeSpecified'?: boolean;
|
||||
'NoSuchEntity'?: boolean;
|
||||
'WrongFormat'?: boolean;
|
||||
'Forbidden'?: boolean;
|
||||
'EntityDoesNotExist'?: boolean;
|
||||
'EntityAlreadyExists'?: boolean;
|
||||
'ServiceFailure'?: boolean;
|
||||
'IncompleteSignature'?: boolean;
|
||||
'InternalFailure'?: boolean;
|
||||
'InvalidAction'?: boolean;
|
||||
'InvalidClientTokenId'?: boolean;
|
||||
'InvalidParameterCombination'?: boolean;
|
||||
'InvalidParameterValue'?: boolean;
|
||||
'InvalidQueryParameter'?: boolean;
|
||||
'MalformedQueryString'?: boolean;
|
||||
'MissingAction'?: boolean;
|
||||
'MissingAuthenticationToken'?: boolean;
|
||||
'MissingParameter'?: boolean;
|
||||
'OptInRequired'?: boolean;
|
||||
'RequestExpired'?: boolean;
|
||||
'Throttling'?: boolean;
|
||||
'AccountNotFound'?: boolean;
|
||||
'ValidationError'?: boolean;
|
||||
'MalformedPolicyDocument'?: boolean;
|
||||
'InvalidInput'?: boolean;
|
||||
'MPUinProgress'?: boolean;
|
||||
'BadName'?: boolean;
|
||||
'BadAccount'?: boolean;
|
||||
'BadGroup'?: boolean;
|
||||
'BadId'?: boolean;
|
||||
'BadAccountName'?: boolean;
|
||||
'BadNameFriendly'?: boolean;
|
||||
'BadEmailAddress'?: boolean;
|
||||
'BadPath'?: boolean;
|
||||
'BadArn'?: boolean;
|
||||
'BadCreateDate'?: boolean;
|
||||
'BadLastUsedDate'?: boolean;
|
||||
'BadNotBefore'?: boolean;
|
||||
'BadNotAfter'?: boolean;
|
||||
'BadSaltedPwd'?: boolean;
|
||||
'ok'?: boolean;
|
||||
'BadUser'?: boolean;
|
||||
'BadSaltedPasswd'?: boolean;
|
||||
'BadPasswdDate'?: boolean;
|
||||
'BadCanonicalId'?: boolean;
|
||||
'BadAlias'?: boolean;
|
||||
'DBPutFailed'?: boolean;
|
||||
'AccountEmailAlreadyUsed'?: boolean;
|
||||
'AccountNameAlreadyUsed'?: boolean;
|
||||
'UserEmailAlreadyUsed'?: boolean;
|
||||
'UserNameAlreadyUsed'?: boolean;
|
||||
'NoParentAccount'?: boolean;
|
||||
'BadStringToSign'?: boolean;
|
||||
'BadSignatureFromRequest'?: boolean;
|
||||
'BadAlgorithm'?: boolean;
|
||||
'SecretKeyDoesNotExist'?: boolean;
|
||||
'InvalidRegion'?: boolean;
|
||||
'ScopeDate'?: boolean;
|
||||
'BadAccessKey'?: boolean;
|
||||
'NoDict'?: boolean;
|
||||
'BadSecretKey'?: boolean;
|
||||
'BadSecretKeyValue'?: boolean;
|
||||
'BadSecretKeyStatus'?: boolean;
|
||||
'BadUrl'?: boolean;
|
||||
'BadClientIdList'?: boolean;
|
||||
'BadThumbprintList'?: boolean;
|
||||
'BadObject'?: boolean;
|
||||
'BadRole'?: boolean;
|
||||
'BadSamlp'?: boolean;
|
||||
'BadMetadataDocument'?: boolean;
|
||||
'BadSessionIndex'?: boolean;
|
||||
'Unauthorized'?: boolean;
|
||||
'CacheUpdated'?: boolean;
|
||||
'DBNotFound'?: boolean;
|
||||
'DBAlreadyExists'?: boolean;
|
||||
'ObjNotFound'?: boolean;
|
||||
'PermissionDenied'?: boolean;
|
||||
'BadRequest'?: boolean;
|
||||
'RaftSessionNotLeader'?: boolean;
|
||||
'RaftSessionLeaderNotConnected'?: boolean;
|
||||
'NoLeaderForDB'?: boolean;
|
||||
'RouteNotFound'?: boolean;
|
||||
'NoMapsInConfig'?: boolean;
|
||||
'DBAPINotReady'?: boolean;
|
||||
'NotEnoughMapsInConfig:'?: boolean;
|
||||
}
|
||||
|
||||
export var errors: { [key:string]: ArsenalError };
|
||||
|
||||
export class Clustering {
|
||||
constructor(size: number, logger: Logger, timeout?: number);
|
||||
start(cb: (cluster: Clustering) => void): Clustering;
|
||||
}
|
||||
|
||||
namespace https {
|
||||
var ciphers: Ciphers;
|
||||
var dhparam: Dhparam;
|
||||
}
|
||||
}
|
44
index.js
44
index.js
@@ -1,44 +0,0 @@
|
||||
module.exports = {
|
||||
auth: require('./lib/auth/auth'),
|
||||
constants: require('./lib/constants'),
|
||||
db: require('./lib/db'),
|
||||
errors: require('./lib/errors.js'),
|
||||
shuffle: require('./lib/shuffle'),
|
||||
stringHash: require('./lib/stringHash'),
|
||||
ipCheck: require('./lib/ipCheck'),
|
||||
https: {
|
||||
ciphers: require('./lib/https/ciphers.js'),
|
||||
dhparam: require('./lib/https/dh2048.js'),
|
||||
},
|
||||
algorithms: {
|
||||
list: {
|
||||
Basic: require('./lib/algos/list/basic').List,
|
||||
Delimiter: require('./lib/algos/list/delimiter').Delimiter,
|
||||
DelimiterVersions: require('./lib/algos/list/delimiterVersions')
|
||||
.DelimiterVersions,
|
||||
DelimiterMaster: require('./lib/algos/list/delimiterMaster')
|
||||
.DelimiterMaster,
|
||||
MPU: require('./lib/algos/list/MPU').MultipartUploads,
|
||||
},
|
||||
},
|
||||
policies: {
|
||||
evaluators: require('./lib/policyEvaluator/evaluator.js'),
|
||||
validateUserPolicy: require('./lib/policy/policyValidator')
|
||||
.validateUserPolicy,
|
||||
RequestContext: require('./lib/policyEvaluator/RequestContext.js'),
|
||||
},
|
||||
Clustering: require('./lib/Clustering'),
|
||||
testing: {
|
||||
matrix: require('./lib/testing/matrix.js'),
|
||||
},
|
||||
versioning: {
|
||||
VersioningConstants: require('./lib/versioning/constants.js')
|
||||
.VersioningConstants,
|
||||
VersioningUtils: require('./lib/versioning/utils.js').VersioningUtils,
|
||||
},
|
||||
network: {
|
||||
http: {
|
||||
server: require('./lib/network/http/server'),
|
||||
},
|
||||
},
|
||||
};
|
159
index.ts
Normal file
159
index.ts
Normal file
@@ -0,0 +1,159 @@
|
||||
import * as evaluators from './lib/policyEvaluator/evaluator';
|
||||
import evaluatePrincipal from './lib/policyEvaluator/principal';
|
||||
import RequestContext from './lib/policyEvaluator/RequestContext';
|
||||
import * as requestUtils from './lib/policyEvaluator/requestUtils';
|
||||
import * as actionMaps from './lib/policyEvaluator/utils/actionMaps';
|
||||
import { validateUserPolicy } from './lib/policy/policyValidator'
|
||||
import * as userMetadata from './lib/s3middleware/userMetadata';
|
||||
import convertToXml from './lib/s3middleware/convertToXml';
|
||||
import escapeForXml from './lib/s3middleware/escapeForXml';
|
||||
import * as objectLegalHold from './lib/s3middleware/objectLegalHold';
|
||||
import * as tagging from './lib/s3middleware/tagging';
|
||||
import { validateConditionalHeaders } from './lib/s3middleware/validateConditionalHeaders';
|
||||
import MD5Sum from './lib/s3middleware/MD5Sum';
|
||||
import NullStream from './lib/s3middleware/nullStream';
|
||||
import * as objectUtils from './lib/s3middleware/objectUtils';
|
||||
import * as mpuUtils from './lib/s3middleware/azureHelpers/mpuUtils';
|
||||
import ResultsCollector from './lib/s3middleware/azureHelpers/ResultsCollector';
|
||||
import SubStreamInterface from './lib/s3middleware/azureHelpers/SubStreamInterface';
|
||||
import * as processMpuParts from './lib/s3middleware/processMpuParts';
|
||||
import * as retention from './lib/s3middleware/objectRetention';
|
||||
import * as lifecycleHelpers from './lib/s3middleware/lifecycleHelpers';
|
||||
export { default as errors } from './lib/errors';
|
||||
export { default as Clustering } from './lib/Clustering';
|
||||
export * as ipCheck from './lib/ipCheck';
|
||||
export * as auth from './lib/auth/auth';
|
||||
export * as constants from './lib/constants';
|
||||
export * as https from './lib/https';
|
||||
export * as metrics from './lib/metrics';
|
||||
export * as network from './lib/network';
|
||||
export * as s3routes from './lib/s3routes';
|
||||
export * as versioning from './lib/versioning';
|
||||
export * as stream from './lib/stream';
|
||||
export * as jsutil from './lib/jsutil';
|
||||
export { default as stringHash } from './lib/stringHash';
|
||||
export * as db from './lib/db';
|
||||
export { default as shuffle } from './lib/shuffle';
|
||||
export * as models from './lib/models';
|
||||
|
||||
export const algorithms = {
|
||||
list: {
|
||||
Basic: require('./lib/algos/list/basic').List,
|
||||
Delimiter: require('./lib/algos/list/delimiter').Delimiter,
|
||||
DelimiterVersions: require('./lib/algos/list/delimiterVersions').DelimiterVersions,
|
||||
DelimiterMaster: require('./lib/algos/list/delimiterMaster').DelimiterMaster,
|
||||
MPU: require('./lib/algos/list/MPU').MultipartUploads,
|
||||
},
|
||||
listTools: {
|
||||
DelimiterTools: require('./lib/algos/list/tools'),
|
||||
},
|
||||
cache: {
|
||||
LRUCache: require('./lib/algos/cache/LRUCache'),
|
||||
},
|
||||
stream: {
|
||||
MergeStream: require('./lib/algos/stream/MergeStream'),
|
||||
},
|
||||
SortedSet: require('./lib/algos/set/SortedSet'),
|
||||
Heap: require('./lib/algos/heap/Heap'),
|
||||
};
|
||||
|
||||
export const policies = {
|
||||
evaluators,
|
||||
validateUserPolicy,
|
||||
evaluatePrincipal,
|
||||
RequestContext,
|
||||
requestUtils,
|
||||
actionMaps,
|
||||
};
|
||||
|
||||
export const testing = {
|
||||
matrix: require('./lib/testing/matrix.js'),
|
||||
};
|
||||
|
||||
export const s3middleware = {
|
||||
userMetadata,
|
||||
convertToXml,
|
||||
escapeForXml,
|
||||
objectLegalHold,
|
||||
tagging,
|
||||
validateConditionalHeaders,
|
||||
MD5Sum,
|
||||
NullStream,
|
||||
objectUtils,
|
||||
azureHelper: {
|
||||
mpuUtils,
|
||||
ResultsCollector,
|
||||
SubStreamInterface,
|
||||
},
|
||||
processMpuParts,
|
||||
retention,
|
||||
lifecycleHelpers,
|
||||
};
|
||||
|
||||
export const storage = {
|
||||
metadata: {
|
||||
MetadataWrapper: require('./lib/storage/metadata/MetadataWrapper'),
|
||||
bucketclient: {
|
||||
BucketClientInterface:
|
||||
require('./lib/storage/metadata/bucketclient/' +
|
||||
'BucketClientInterface'),
|
||||
LogConsumer:
|
||||
require('./lib/storage/metadata/bucketclient/LogConsumer'),
|
||||
},
|
||||
file: {
|
||||
BucketFileInterface:
|
||||
require('./lib/storage/metadata/file/BucketFileInterface'),
|
||||
MetadataFileServer:
|
||||
require('./lib/storage/metadata/file/MetadataFileServer'),
|
||||
MetadataFileClient:
|
||||
require('./lib/storage/metadata/file/MetadataFileClient'),
|
||||
},
|
||||
inMemory: {
|
||||
metastore:
|
||||
require('./lib/storage/metadata/in_memory/metastore'),
|
||||
metadata: require('./lib/storage/metadata/in_memory/metadata'),
|
||||
bucketUtilities:
|
||||
require('./lib/storage/metadata/in_memory/bucket_utilities'),
|
||||
},
|
||||
mongoclient: {
|
||||
MongoClientInterface:
|
||||
require('./lib/storage/metadata/mongoclient/' +
|
||||
'MongoClientInterface'),
|
||||
LogConsumer:
|
||||
require('./lib/storage/metadata/mongoclient/LogConsumer'),
|
||||
},
|
||||
proxy: {
|
||||
Server: require('./lib/storage/metadata/proxy/Server'),
|
||||
},
|
||||
},
|
||||
data: {
|
||||
DataWrapper: require('./lib/storage/data/DataWrapper'),
|
||||
MultipleBackendGateway:
|
||||
require('./lib/storage/data/MultipleBackendGateway'),
|
||||
parseLC: require('./lib/storage/data/LocationConstraintParser'),
|
||||
file: {
|
||||
DataFileStore:
|
||||
require('./lib/storage/data/file/DataFileStore'),
|
||||
DataFileInterface:
|
||||
require('./lib/storage/data/file/DataFileInterface'),
|
||||
},
|
||||
external: {
|
||||
AwsClient: require('./lib/storage/data/external/AwsClient'),
|
||||
AzureClient: require('./lib/storage/data/external/AzureClient'),
|
||||
GcpClient: require('./lib/storage/data/external/GcpClient'),
|
||||
GCP: require('./lib/storage/data/external/GCP/GcpService'),
|
||||
GcpUtils: require('./lib/storage/data/external/GCP/GcpUtils'),
|
||||
GcpSigner: require('./lib/storage/data/external/GCP/GcpSigner'),
|
||||
PfsClient: require('./lib/storage/data/external/PfsClient'),
|
||||
backendUtils: require('./lib/storage/data/external/utils'),
|
||||
},
|
||||
inMemory: {
|
||||
datastore: require('./lib/storage/data/in_memory/datastore'),
|
||||
},
|
||||
},
|
||||
utils: require('./lib/storage/utils'),
|
||||
};
|
||||
|
||||
export const pensieve = {
|
||||
credentialUtils: require('./lib/executables/pensieveCreds/utils'),
|
||||
};
|
@@ -1,18 +1,28 @@
|
||||
'use strict'; // eslint-disable-line
|
||||
import cluster, { Worker } from 'cluster';
|
||||
import * as werelogs from 'werelogs';
|
||||
|
||||
const cluster = require('cluster');
|
||||
export default class Clustering {
|
||||
_size: number;
|
||||
_shutdownTimeout: number;
|
||||
_logger: werelogs.Logger;
|
||||
_shutdown: boolean;
|
||||
_workers: (Worker | undefined)[];
|
||||
_workersTimeout: (NodeJS.Timeout | undefined)[];
|
||||
_workersStatus: (number | string | undefined)[];
|
||||
_status: number;
|
||||
_exitCb?: (clustering: Clustering, exitSignal?: string) => void;
|
||||
_index?: number;
|
||||
|
||||
class Clustering {
|
||||
/**
|
||||
* Constructor
|
||||
*
|
||||
* @param {number} size Cluster size
|
||||
* @param {Logger} logger Logger object
|
||||
* @param {number} [shutdownTimeout=5000] Change default shutdown timeout
|
||||
* @param size Cluster size
|
||||
* @param logger Logger object
|
||||
* @param [shutdownTimeout=5000] Change default shutdown timeout
|
||||
* releasing ressources
|
||||
* @return {Clustering} itself
|
||||
* @return itself
|
||||
*/
|
||||
constructor(size, logger, shutdownTimeout) {
|
||||
constructor(size: number, logger: werelogs.Logger, shutdownTimeout?: number) {
|
||||
this._size = size;
|
||||
if (size < 1) {
|
||||
throw new Error('Cluster size must be greater than or equal to 1');
|
||||
@@ -32,7 +42,6 @@ class Clustering {
|
||||
* Method called after a stop() call
|
||||
*
|
||||
* @private
|
||||
* @return {undefined}
|
||||
*/
|
||||
_afterStop() {
|
||||
// Asuming all workers shutdown gracefully
|
||||
@@ -41,10 +50,11 @@ class Clustering {
|
||||
for (let i = 0; i < size; ++i) {
|
||||
// If the process return an error code or killed by a signal,
|
||||
// set the status
|
||||
if (typeof this._workersStatus[i] === 'number') {
|
||||
this._status = this._workersStatus[i];
|
||||
const status = this._workersStatus[i];
|
||||
if (typeof status === 'number') {
|
||||
this._status = status;
|
||||
break;
|
||||
} else if (typeof this._workersStatus[i] === 'string') {
|
||||
} else if (typeof status === 'string') {
|
||||
this._status = 1;
|
||||
break;
|
||||
}
|
||||
@@ -58,13 +68,17 @@ class Clustering {
|
||||
/**
|
||||
* Method called when a worker exited
|
||||
*
|
||||
* @param {Cluster.worker} worker - Current worker
|
||||
* @param {number} i - Worker index
|
||||
* @param {number} code - Exit code
|
||||
* @param {string} signal - Exit signal
|
||||
* @return {undefined}
|
||||
* @param worker - Current worker
|
||||
* @param i - Worker index
|
||||
* @param code - Exit code
|
||||
* @param signal - Exit signal
|
||||
*/
|
||||
_workerExited(worker, i, code, signal) {
|
||||
_workerExited(
|
||||
worker: Worker,
|
||||
i: number,
|
||||
code: number,
|
||||
signal: string,
|
||||
) {
|
||||
// If the worker:
|
||||
// - was killed by a signal
|
||||
// - return an error code
|
||||
@@ -91,8 +105,9 @@ class Clustering {
|
||||
this._workersStatus[i] = undefined;
|
||||
}
|
||||
this._workers[i] = undefined;
|
||||
if (this._workersTimeout[i]) {
|
||||
clearTimeout(this._workersTimeout[i]);
|
||||
const timeout = this._workersTimeout[i];
|
||||
if (timeout) {
|
||||
clearTimeout(timeout);
|
||||
this._workersTimeout[i] = undefined;
|
||||
}
|
||||
// If we don't trigger the stop method, the watchdog
|
||||
@@ -110,29 +125,28 @@ class Clustering {
|
||||
/**
|
||||
* Method to start a worker
|
||||
*
|
||||
* @param {number} i Index of the starting worker
|
||||
* @return {undefined}
|
||||
* @param i Index of the starting worker
|
||||
*/
|
||||
startWorker(i) {
|
||||
if (!cluster.isMaster) {
|
||||
startWorker(i: number) {
|
||||
if (!cluster.isPrimary) {
|
||||
return;
|
||||
}
|
||||
// Fork a new worker
|
||||
this._workers[i] = cluster.fork();
|
||||
// Listen for message from the worker
|
||||
this._workers[i].on('message', msg => {
|
||||
this._workers[i]!.on('message', msg => {
|
||||
// If the worker is ready, send him his id
|
||||
if (msg === 'ready') {
|
||||
this._workers[i].send({ msg: 'setup', id: i });
|
||||
this._workers[i]!.send({ msg: 'setup', id: i });
|
||||
}
|
||||
});
|
||||
this._workers[i].on('exit', (code, signal) =>
|
||||
this._workerExited(this._workers[i], i, code, signal));
|
||||
this._workers[i]!.on('exit', (code, signal) =>
|
||||
this._workerExited(this._workers[i]!, i, code, signal));
|
||||
// Trigger when the worker was started
|
||||
this._workers[i].on('online', () => {
|
||||
this._workers[i]!.on('online', () => {
|
||||
this._logger.info('Worker started', {
|
||||
id: i,
|
||||
childPid: this._workers[i].process.pid,
|
||||
childPid: this._workers[i]!.process.pid,
|
||||
});
|
||||
});
|
||||
}
|
||||
@@ -140,10 +154,10 @@ class Clustering {
|
||||
/**
|
||||
* Method to put handler on cluster exit
|
||||
*
|
||||
* @param {function} cb - Callback(Clustering, [exitSignal])
|
||||
* @return {Clustering} Itself
|
||||
* @param cb - Callback(Clustering, [exitSignal])
|
||||
* @return Itself
|
||||
*/
|
||||
onExit(cb) {
|
||||
onExit(cb: (clustering: Clustering, exitSignal?: string) => void) {
|
||||
this._exitCb = cb;
|
||||
return this;
|
||||
}
|
||||
@@ -152,33 +166,33 @@ class Clustering {
|
||||
* Method to start the cluster (if master) or to start the callback
|
||||
* (worker)
|
||||
*
|
||||
* @param {function} cb - Callback to run the worker
|
||||
* @return {Clustering} itself
|
||||
* @param cb - Callback to run the worker
|
||||
* @return itself
|
||||
*/
|
||||
start(cb) {
|
||||
start(cb: (clustering: Clustering) => void) {
|
||||
process.on('SIGINT', () => this.stop('SIGINT'));
|
||||
process.on('SIGHUP', () => this.stop('SIGHUP'));
|
||||
process.on('SIGQUIT', () => this.stop('SIGQUIT'));
|
||||
process.on('SIGTERM', () => this.stop('SIGTERM'));
|
||||
process.on('SIGPIPE', () => {});
|
||||
process.on('exit', (code, signal) => {
|
||||
process.on('exit', (code?: number, signal?: string) => {
|
||||
if (this._exitCb) {
|
||||
this._status = code || 0;
|
||||
return this._exitCb(this, signal);
|
||||
}
|
||||
return process.exit(code || 0);
|
||||
});
|
||||
process.on('uncaughtException', err => {
|
||||
process.on('uncaughtException', (err: Error) => {
|
||||
this._logger.fatal('caught error', {
|
||||
error: err.message,
|
||||
stack: err.stack.split('\n').map(str => str.trim()),
|
||||
stack: err.stack?.split('\n')?.map(str => str.trim()),
|
||||
});
|
||||
process.exit(1);
|
||||
});
|
||||
if (!cluster.isMaster) {
|
||||
if (!cluster.isPrimary) {
|
||||
// Waiting for message from master to
|
||||
// know the id of the slave cluster
|
||||
process.on('message', msg => {
|
||||
process.on('message', (msg: any) => {
|
||||
if (msg.msg === 'setup') {
|
||||
this._index = msg.id;
|
||||
cb(this);
|
||||
@@ -186,7 +200,7 @@ class Clustering {
|
||||
});
|
||||
// Send message to the master, to let him know
|
||||
// the worker has started
|
||||
process.send('ready');
|
||||
process.send?.('ready');
|
||||
} else {
|
||||
for (let i = 0; i < this._size; ++i) {
|
||||
this.startWorker(i);
|
||||
@@ -198,7 +212,7 @@ class Clustering {
|
||||
/**
|
||||
* Method to get workers
|
||||
*
|
||||
* @return {Cluster.Worker[]} Workers
|
||||
* @return Workers
|
||||
*/
|
||||
getWorkers() {
|
||||
return this._workers;
|
||||
@@ -207,7 +221,7 @@ class Clustering {
|
||||
/**
|
||||
* Method to get the status of the cluster
|
||||
*
|
||||
* @return {number} Status code
|
||||
* @return Status code
|
||||
*/
|
||||
getStatus() {
|
||||
return this._status;
|
||||
@@ -216,7 +230,7 @@ class Clustering {
|
||||
/**
|
||||
* Method to return if it's the master process
|
||||
*
|
||||
* @return {boolean} - True if master, false otherwise
|
||||
* @return - True if master, false otherwise
|
||||
*/
|
||||
isMaster() {
|
||||
return this._index === undefined;
|
||||
@@ -225,7 +239,7 @@ class Clustering {
|
||||
/**
|
||||
* Method to get index of the worker
|
||||
*
|
||||
* @return {number|undefined} Worker index, undefined if it's master
|
||||
* @return Worker index, undefined if it's master
|
||||
*/
|
||||
getIndex() {
|
||||
return this._index;
|
||||
@@ -234,11 +248,10 @@ class Clustering {
|
||||
/**
|
||||
* Method to stop the cluster
|
||||
*
|
||||
* @param {string} signal - Set internally when processes killed by signal
|
||||
* @return {undefined}
|
||||
* @param signal - Set internally when processes killed by signal
|
||||
*/
|
||||
stop(signal) {
|
||||
if (!cluster.isMaster) {
|
||||
stop(signal?: string) {
|
||||
if (!cluster.isPrimary) {
|
||||
if (this._exitCb) {
|
||||
return this._exitCb(this, signal);
|
||||
}
|
||||
@@ -251,13 +264,17 @@ class Clustering {
|
||||
}
|
||||
this._workersTimeout[i] = setTimeout(() => {
|
||||
// Kill the worker if the sigterm was ignored or take too long
|
||||
process.kill(worker.process.pid, 'SIGKILL');
|
||||
if (worker.process.pid) {
|
||||
process.kill(worker.process.pid, 'SIGKILL');
|
||||
}
|
||||
}, this._shutdownTimeout);
|
||||
// Send sigterm to the process, allowing to release ressources
|
||||
// and save some states
|
||||
return process.kill(worker.process.pid, 'SIGTERM');
|
||||
if (worker.process.pid) {
|
||||
return process.kill(worker.process.pid, 'SIGTERM');
|
||||
} else {
|
||||
return true;
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Clustering;
|
167
lib/algos/cache/LRUCache.js
vendored
Normal file
167
lib/algos/cache/LRUCache.js
vendored
Normal file
@@ -0,0 +1,167 @@
|
||||
const assert = require('assert');
|
||||
|
||||
/**
|
||||
* @class
|
||||
* @classdesc Implements a key-value in-memory cache with a capped
|
||||
* number of items and a Least Recently Used (LRU) strategy for
|
||||
* eviction.
|
||||
*/
|
||||
class LRUCache {
|
||||
/**
|
||||
* @constructor
|
||||
* @param {number} maxEntries - maximum number of entries kept in
|
||||
* the cache
|
||||
*/
|
||||
constructor(maxEntries) {
|
||||
assert(maxEntries >= 1);
|
||||
this._maxEntries = maxEntries;
|
||||
this.clear();
|
||||
}
|
||||
|
||||
/**
|
||||
* Add or update the value associated to a key in the cache,
|
||||
* making it the most recently accessed for eviction purpose.
|
||||
*
|
||||
* @param {string} key - key to add
|
||||
* @param {object} value - associated value (can be of any type)
|
||||
* @return {boolean} true if the cache contained an entry with
|
||||
* this key, false if it did not
|
||||
*/
|
||||
add(key, value) {
|
||||
let entry = this._entryMap[key];
|
||||
if (entry) {
|
||||
entry.value = value;
|
||||
// make the entry the most recently used by re-pushing it
|
||||
// to the head of the LRU list
|
||||
this._lruRemoveEntry(entry);
|
||||
this._lruPushEntry(entry);
|
||||
return true;
|
||||
}
|
||||
if (this._entryCount === this._maxEntries) {
|
||||
// if the cache is already full, abide by the LRU strategy
|
||||
// and remove the least recently used entry from the cache
|
||||
// before pushing the new entry
|
||||
this._removeEntry(this._lruTail);
|
||||
}
|
||||
entry = { key, value };
|
||||
this._entryMap[key] = entry;
|
||||
this._entryCount += 1;
|
||||
this._lruPushEntry(entry);
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the value associated to a key in the cache, making it the
|
||||
* most recently accessed for eviction purpose.
|
||||
*
|
||||
* @param {string} key - key of which to fetch the associated value
|
||||
* @return {object|undefined} - returns the associated value if
|
||||
* exists in the cache, or undefined if not found - either if the
|
||||
* key was never added or if it has been evicted from the cache.
|
||||
*/
|
||||
get(key) {
|
||||
const entry = this._entryMap[key];
|
||||
if (entry) {
|
||||
// make the entry the most recently used by re-pushing it
|
||||
// to the head of the LRU list
|
||||
this._lruRemoveEntry(entry);
|
||||
this._lruPushEntry(entry);
|
||||
return entry.value;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove an entry from the cache if exists
|
||||
*
|
||||
* @param {string} key - key to remove
|
||||
* @return {boolean} true if an entry has been removed, false if
|
||||
* there was no entry with this key in the cache - either if the
|
||||
* key was never added or if it has been evicted from the cache.
|
||||
*/
|
||||
remove(key) {
|
||||
const entry = this._entryMap[key];
|
||||
if (entry) {
|
||||
this._removeEntry(entry);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the current number of cached entries
|
||||
*
|
||||
* @return {number} current number of cached entries
|
||||
*/
|
||||
count() {
|
||||
return this._entryCount;
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove all entries from the cache
|
||||
*
|
||||
* @return {undefined}
|
||||
*/
|
||||
clear() {
|
||||
this._entryMap = {};
|
||||
this._entryCount = 0;
|
||||
this._lruHead = null;
|
||||
this._lruTail = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Push an entry to the front of the LRU list, making it the most
|
||||
* recently accessed
|
||||
*
|
||||
* @param {object} entry - entry to push
|
||||
* @return {undefined}
|
||||
*/
|
||||
_lruPushEntry(entry) {
|
||||
/* eslint-disable no-param-reassign */
|
||||
entry._lruNext = this._lruHead;
|
||||
entry._lruPrev = null;
|
||||
if (this._lruHead) {
|
||||
this._lruHead._lruPrev = entry;
|
||||
}
|
||||
this._lruHead = entry;
|
||||
if (!this._lruTail) {
|
||||
this._lruTail = entry;
|
||||
}
|
||||
/* eslint-enable no-param-reassign */
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove an entry from the LRU list
|
||||
*
|
||||
* @param {object} entry - entry to remove
|
||||
* @return {undefined}
|
||||
*/
|
||||
_lruRemoveEntry(entry) {
|
||||
/* eslint-disable no-param-reassign */
|
||||
if (entry._lruPrev) {
|
||||
entry._lruPrev._lruNext = entry._lruNext;
|
||||
} else {
|
||||
this._lruHead = entry._lruNext;
|
||||
}
|
||||
if (entry._lruNext) {
|
||||
entry._lruNext._lruPrev = entry._lruPrev;
|
||||
} else {
|
||||
this._lruTail = entry._lruPrev;
|
||||
}
|
||||
/* eslint-enable no-param-reassign */
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper function to remove an existing entry from the cache
|
||||
*
|
||||
* @param {object} entry - cache entry to remove
|
||||
* @return {undefined}
|
||||
*/
|
||||
_removeEntry(entry) {
|
||||
this._lruRemoveEntry(entry);
|
||||
delete this._entryMap[entry.key];
|
||||
this._entryCount -= 1;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = LRUCache;
|
124
lib/algos/heap/Heap.ts
Normal file
124
lib/algos/heap/Heap.ts
Normal file
@@ -0,0 +1,124 @@
|
||||
export enum HeapOrder {
|
||||
Min = -1,
|
||||
Max = 1,
|
||||
}
|
||||
|
||||
export enum CompareResult {
|
||||
LT = -1,
|
||||
EQ = 0,
|
||||
GT = 1,
|
||||
}
|
||||
|
||||
export type CompareFunction = (x: any, y: any) => CompareResult;
|
||||
|
||||
export class Heap {
|
||||
size: number;
|
||||
_maxSize: number;
|
||||
_order: HeapOrder;
|
||||
_heap: any[];
|
||||
_cmpFn: CompareFunction;
|
||||
|
||||
constructor(size: number, order: HeapOrder, cmpFn: CompareFunction) {
|
||||
this.size = 0;
|
||||
this._maxSize = size;
|
||||
this._order = order;
|
||||
this._cmpFn = cmpFn;
|
||||
this._heap = new Array<any>(this._maxSize);
|
||||
}
|
||||
|
||||
_parent(i: number): number {
|
||||
return Math.floor((i - 1) / 2);
|
||||
}
|
||||
|
||||
_left(i: number): number {
|
||||
return Math.floor((2 * i) + 1);
|
||||
}
|
||||
|
||||
_right(i: number): number {
|
||||
return Math.floor((2 * i) + 2);
|
||||
}
|
||||
|
||||
_shouldSwap(childIdx: number, parentIdx: number): boolean {
|
||||
return this._cmpFn(this._heap[childIdx], this._heap[parentIdx]) as number === this._order as number;
|
||||
}
|
||||
|
||||
_swap(i: number, j: number) {
|
||||
const tmp = this._heap[i];
|
||||
this._heap[i] = this._heap[j];
|
||||
this._heap[j] = tmp;
|
||||
}
|
||||
|
||||
_heapify(i: number) {
|
||||
const l = this._left(i);
|
||||
const r = this._right(i);
|
||||
let c = i;
|
||||
|
||||
if (l < this.size && this._shouldSwap(l, c)) {
|
||||
c = l;
|
||||
}
|
||||
|
||||
if (r < this.size && this._shouldSwap(r, c)) {
|
||||
c = r;
|
||||
}
|
||||
|
||||
if (c != i) {
|
||||
this._swap(c, i);
|
||||
this._heapify(c);
|
||||
}
|
||||
}
|
||||
|
||||
add(item: any): any {
|
||||
if (this.size >= this._maxSize) {
|
||||
return new Error('Max heap size reached');
|
||||
}
|
||||
|
||||
++this.size;
|
||||
let c = this.size - 1;
|
||||
this._heap[c] = item;
|
||||
|
||||
while (c > 0) {
|
||||
if (!this._shouldSwap(c, this._parent(c))) {
|
||||
return null;
|
||||
}
|
||||
|
||||
this._swap(c, this._parent(c));
|
||||
c = this._parent(c);
|
||||
}
|
||||
|
||||
return null;
|
||||
};
|
||||
|
||||
remove(): any {
|
||||
if (this.size <= 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const ret = this._heap[0];
|
||||
this._heap[0] = this._heap[this.size - 1];
|
||||
this._heapify(0);
|
||||
--this.size;
|
||||
|
||||
return ret;
|
||||
};
|
||||
|
||||
peek(): any {
|
||||
if (this.size <= 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return this._heap[0];
|
||||
};
|
||||
}
|
||||
|
||||
export class MinHeap extends Heap {
|
||||
constructor(size: number, cmpFn: CompareFunction) {
|
||||
super(size, HeapOrder.Min, cmpFn);
|
||||
}
|
||||
}
|
||||
|
||||
export class MaxHeap extends Heap {
|
||||
constructor(size: number, cmpFn: CompareFunction) {
|
||||
super(size, HeapOrder.Max, cmpFn);
|
||||
}
|
||||
}
|
||||
|
124
lib/algos/list/Extension.js
Normal file
124
lib/algos/list/Extension.js
Normal file
@@ -0,0 +1,124 @@
|
||||
'use strict'; // eslint-disable-line strict
|
||||
|
||||
const { FILTER_SKIP, SKIP_NONE } = require('./tools');
|
||||
|
||||
// Use a heuristic to amortize the cost of JSON
|
||||
// serialization/deserialization only on largest metadata where the
|
||||
// potential for size reduction is high, considering the bulk of the
|
||||
// blob size is due to the "location" field containing a large number
|
||||
// of MPU parts.
|
||||
//
|
||||
// Measured on some standard metadata:
|
||||
// - 100 parts -> 9K blob
|
||||
// - 2000 parts -> 170K blob
|
||||
//
|
||||
// Using a 10K threshold should lead to a worst case of about 10M to
|
||||
// store a raw listing of 1000 entries, even with some growth
|
||||
// multiplication factor due to some internal memory duplication, it
|
||||
// should stay within reasonable memory limits.
|
||||
|
||||
const TRIM_METADATA_MIN_BLOB_SIZE = 10000;
|
||||
|
||||
/**
|
||||
* Base class of listing extensions.
|
||||
*/
|
||||
class Extension {
|
||||
/**
|
||||
* This takes a list of parameters and a logger as the inputs.
|
||||
* Derivatives should have their own format regarding parameters.
|
||||
*
|
||||
* @param {Object} parameters - listing parameter from applications
|
||||
* @param {RequestLogger} logger - the logger
|
||||
* @constructor
|
||||
*/
|
||||
constructor(parameters, logger) {
|
||||
// inputs
|
||||
this.parameters = parameters;
|
||||
this.logger = logger;
|
||||
// listing results
|
||||
this.res = undefined;
|
||||
this.keys = 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Filters-out non-requested optional fields from the value. This function
|
||||
* shall be applied on any value that is to be returned as part of the
|
||||
* result of a listing extension.
|
||||
*
|
||||
* @param {String} value - The JSON value of a listing item
|
||||
*
|
||||
* @return {String} The value that may have been trimmed of some
|
||||
* heavy unused fields, or left untouched (depending on size
|
||||
* heuristics)
|
||||
*/
|
||||
trimMetadata(value) {
|
||||
let ret = undefined;
|
||||
if (value.length >= TRIM_METADATA_MIN_BLOB_SIZE) {
|
||||
try {
|
||||
ret = JSON.parse(value);
|
||||
delete ret.location;
|
||||
ret = JSON.stringify(ret);
|
||||
} catch (e) {
|
||||
// Prefer returning an unfiltered data rather than
|
||||
// stopping the service in case of parsing failure.
|
||||
// The risk of this approach is a potential
|
||||
// reproduction of MD-692, where too much memory is
|
||||
// used by repd.
|
||||
this.logger.warn(
|
||||
'Could not parse Object Metadata while listing',
|
||||
{ err: e.toString() });
|
||||
}
|
||||
}
|
||||
return ret || value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates listing parameters that metadata can understand from the input
|
||||
* parameters. What metadata can understand: gt, gte, lt, lte, limit, keys,
|
||||
* values, reverse; we use the same set of parameters as levelup's.
|
||||
* Derivatives should have their own conversion of their original listing
|
||||
* parameters into metadata listing parameters.
|
||||
*
|
||||
* @return {object} - listing parameters for metadata
|
||||
*/
|
||||
genMDParams() {
|
||||
return {};
|
||||
}
|
||||
|
||||
/**
|
||||
* This function receives a data entry from metadata and decides if it will
|
||||
* include the entry in the listing result or not.
|
||||
*
|
||||
* @param {object} entry - a listing entry from metadata
|
||||
* expected format: { key, value }
|
||||
* @return {number} - result of filtering the entry:
|
||||
* > 0: entry is accepted and included in the result
|
||||
* = 0: entry is accepted but not included (skipping)
|
||||
* < 0: entry is not accepted, listing should finish
|
||||
*/
|
||||
filter(entry) {
|
||||
return entry ? FILTER_SKIP : FILTER_SKIP;
|
||||
}
|
||||
|
||||
/**
|
||||
* Provides the insight into why filter is skipping an entry. This could be
|
||||
* because it is skipping a range of delimited keys or a range of specific
|
||||
* version when doing master version listing.
|
||||
*
|
||||
* @return {string} - the insight: a common prefix or a master key,
|
||||
* or SKIP_NONE if there is no insight
|
||||
*/
|
||||
skipping() {
|
||||
return SKIP_NONE;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the listing resutls. Format depends on derivatives' specific logic.
|
||||
* @return {Array} - The listed elements
|
||||
*/
|
||||
result() {
|
||||
return this.res;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.default = Extension;
|
@@ -1,7 +1,10 @@
|
||||
'use strict'; // eslint-disable-line strict
|
||||
|
||||
const checkLimit = require('./tools').checkLimit;
|
||||
const { inc, checkLimit, listingParamsMasterKeysV0ToV1,
|
||||
FILTER_END, FILTER_ACCEPT } = require('./tools');
|
||||
const DEFAULT_MAX_KEYS = 1000;
|
||||
const VSConst = require('../../versioning/constants').VersioningConstants;
|
||||
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
|
||||
|
||||
function numberDefault(num, defaultNum) {
|
||||
const parsedNum = Number.parseInt(num, 10);
|
||||
@@ -17,9 +20,12 @@ class MultipartUploads {
|
||||
* Init and check parameters
|
||||
* @param {Object} params - The parameters you sent to DBD
|
||||
* @param {RequestLogger} logger - The logger of the request
|
||||
* @param {String} [vFormat] - versioning key format
|
||||
* @return {undefined}
|
||||
*/
|
||||
constructor(params, logger) {
|
||||
constructor(params, logger, vFormat) {
|
||||
this.params = params;
|
||||
this.vFormat = vFormat || BucketVersioningKeyFormat.v0;
|
||||
this.CommonPrefixes = [];
|
||||
this.Uploads = [];
|
||||
this.IsTruncated = false;
|
||||
@@ -32,6 +38,44 @@ class MultipartUploads {
|
||||
this.delimiter = params.delimiter;
|
||||
this.splitter = params.splitter;
|
||||
this.logger = logger;
|
||||
|
||||
Object.assign(this, {
|
||||
[BucketVersioningKeyFormat.v0]: {
|
||||
genMDParams: this.genMDParamsV0,
|
||||
getObjectKey: this.getObjectKeyV0,
|
||||
},
|
||||
[BucketVersioningKeyFormat.v1]: {
|
||||
genMDParams: this.genMDParamsV1,
|
||||
getObjectKey: this.getObjectKeyV1,
|
||||
},
|
||||
}[this.vFormat]);
|
||||
}
|
||||
|
||||
genMDParamsV0() {
|
||||
const params = {};
|
||||
if (this.params.keyMarker) {
|
||||
params.gt = `overview${this.params.splitter}` +
|
||||
`${this.params.keyMarker}${this.params.splitter}`;
|
||||
if (this.params.uploadIdMarker) {
|
||||
params.gt += `${this.params.uploadIdMarker}`;
|
||||
}
|
||||
// advance so that lower bound does not include the supplied
|
||||
// markers
|
||||
params.gt = inc(params.gt);
|
||||
}
|
||||
if (this.params.prefix) {
|
||||
if (params.gt === undefined || this.params.prefix > params.gt) {
|
||||
delete params.gt;
|
||||
params.gte = this.params.prefix;
|
||||
}
|
||||
params.lt = inc(this.params.prefix);
|
||||
}
|
||||
return params;
|
||||
}
|
||||
|
||||
genMDParamsV1() {
|
||||
const v0params = this.genMDParamsV0();
|
||||
return listingParamsMasterKeysV0ToV1(v0params);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -78,19 +122,27 @@ class MultipartUploads {
|
||||
}
|
||||
}
|
||||
|
||||
getObjectKeyV0(obj) {
|
||||
return obj.key;
|
||||
}
|
||||
|
||||
getObjectKeyV1(obj) {
|
||||
return obj.key.slice(DbPrefixes.Master.length);
|
||||
}
|
||||
|
||||
/**
|
||||
* This function applies filter on each element
|
||||
* @param {String} obj - The key and value of the element
|
||||
* @return {Boolean} - True: Continue, False: Stop
|
||||
* @return {number} - > 0: Continue, < 0: Stop
|
||||
*/
|
||||
filter(obj) {
|
||||
// Check first in case of maxkeys = 0
|
||||
if (this.keys >= this.maxKeys) {
|
||||
// In cases of maxKeys <= 0 => IsTruncated = false
|
||||
this.IsTruncated = this.maxKeys > 0;
|
||||
return false;
|
||||
return FILTER_END;
|
||||
}
|
||||
const key = obj.key;
|
||||
const key = this.getObjectKey(obj);
|
||||
const value = obj.value;
|
||||
if (this.delimiter) {
|
||||
const mpuPrefixSlice = `overview${this.splitter}`.length;
|
||||
@@ -107,7 +159,11 @@ class MultipartUploads {
|
||||
} else {
|
||||
this.addUpload(value);
|
||||
}
|
||||
return true;
|
||||
return FILTER_ACCEPT;
|
||||
}
|
||||
|
||||
skipping() {
|
||||
return '';
|
||||
}
|
||||
|
||||
/**
|
||||
|
@@ -1,12 +1,14 @@
|
||||
'use strict'; // eslint-disable-line strict
|
||||
|
||||
const checkLimit = require('./tools').checkLimit;
|
||||
const Extension = require('./Extension').default;
|
||||
|
||||
const { checkLimit, FILTER_END, FILTER_ACCEPT, FILTER_SKIP } = require('./tools');
|
||||
const DEFAULT_MAX_KEYS = 10000;
|
||||
|
||||
/**
|
||||
* Class of an extension doing the simple listing
|
||||
*/
|
||||
class List {
|
||||
class List extends Extension {
|
||||
/**
|
||||
* Constructor
|
||||
* Set the logger and the res
|
||||
@@ -15,30 +17,100 @@ class List {
|
||||
* @return {undefined}
|
||||
*/
|
||||
constructor(parameters, logger) {
|
||||
this.logger = logger;
|
||||
super(parameters, logger);
|
||||
this.res = [];
|
||||
if (parameters) {
|
||||
this.maxKeys = checkLimit(parameters.maxKeys, DEFAULT_MAX_KEYS);
|
||||
this.filterKey = parameters.filterKey;
|
||||
this.filterKeyStartsWith = parameters.filterKeyStartsWith;
|
||||
} else {
|
||||
this.maxKeys = DEFAULT_MAX_KEYS;
|
||||
}
|
||||
this.keys = 0;
|
||||
}
|
||||
|
||||
genMDParams() {
|
||||
const params = this.parameters ? {
|
||||
gt: this.parameters.gt,
|
||||
gte: this.parameters.gte || this.parameters.start,
|
||||
lt: this.parameters.lt,
|
||||
lte: this.parameters.lte || this.parameters.end,
|
||||
keys: this.parameters.keys,
|
||||
values: this.parameters.values,
|
||||
} : {};
|
||||
Object.keys(params).forEach(key => {
|
||||
if (params[key] === null || params[key] === undefined) {
|
||||
delete params[key];
|
||||
}
|
||||
});
|
||||
return params;
|
||||
}
|
||||
|
||||
/**
|
||||
* Filters customAttributes sub-object if present
|
||||
*
|
||||
* @param {String} value - The JSON value of a listing item
|
||||
*
|
||||
* @return {Boolean} Returns true if matches, else false.
|
||||
*/
|
||||
customFilter(value) {
|
||||
let _value;
|
||||
try {
|
||||
_value = JSON.parse(value);
|
||||
} catch (e) {
|
||||
// Prefer returning an unfiltered data rather than
|
||||
// stopping the service in case of parsing failure.
|
||||
// The risk of this approach is a potential
|
||||
// reproduction of MD-692, where too much memory is
|
||||
// used by repd.
|
||||
this.logger.warn(
|
||||
'Could not parse Object Metadata while listing',
|
||||
{ err: e.toString() });
|
||||
return false;
|
||||
}
|
||||
if (_value.customAttributes !== undefined) {
|
||||
for (const key of Object.keys(_value.customAttributes)) {
|
||||
if (this.filterKey !== undefined &&
|
||||
key === this.filterKey) {
|
||||
return true;
|
||||
}
|
||||
if (this.filterKeyStartsWith !== undefined &&
|
||||
key.startsWith(this.filterKeyStartsWith)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Function apply on each element
|
||||
* Just add it to the array
|
||||
* @param {Object} elem - The data from the database
|
||||
* @return {Boolean} - True = continue the stream
|
||||
* @return {number} - > 0 : continue listing
|
||||
* < 0 : listing done
|
||||
*/
|
||||
filter(elem) {
|
||||
// Check first in case of maxkeys <= 0
|
||||
if (this.keys >= this.maxKeys) {
|
||||
return false;
|
||||
return FILTER_END;
|
||||
}
|
||||
if ((this.filterKey !== undefined ||
|
||||
this.filterKeyStartsWith !== undefined) &&
|
||||
typeof elem === 'object' &&
|
||||
!this.customFilter(elem.value)) {
|
||||
return FILTER_SKIP;
|
||||
}
|
||||
if (typeof elem === 'object') {
|
||||
this.res.push({
|
||||
key: elem.key,
|
||||
value: this.trimMetadata(elem.value),
|
||||
});
|
||||
} else {
|
||||
this.res.push(elem);
|
||||
}
|
||||
this.res.push(elem);
|
||||
this.keys++;
|
||||
return true;
|
||||
return FILTER_ACCEPT;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@@ -1,198 +0,0 @@
|
||||
'use strict'; // eslint-disable-line strict
|
||||
|
||||
/**
|
||||
* Find the next delimiter in the path
|
||||
*
|
||||
* @param {string} key - path of the object
|
||||
* @param {string} delimiter - string to find
|
||||
* @param {number} index - index to start at
|
||||
* @return {number} delimiterIndex - returns -1 in case no delimiter is found
|
||||
*/
|
||||
function nextDelimiter(key, delimiter, index) {
|
||||
return key.indexOf(delimiter, index);
|
||||
}
|
||||
|
||||
/**
|
||||
* Find the common prefix in the path
|
||||
*
|
||||
* @param {String} key - path of the object
|
||||
* @param {String} delimiter - separator
|
||||
* @param {Number} delimiterIndex - 'folder' index in the path
|
||||
* @return {String} - CommonPrefix
|
||||
*/
|
||||
function getCommonPrefix(key, delimiter, delimiterIndex) {
|
||||
return key.substring(0, delimiterIndex + delimiter.length);
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle object listing with parameters
|
||||
*
|
||||
* @prop {String[]} CommonPrefixes - 'folders' defined by the delimiter
|
||||
* @prop {String[]} Contents - 'files' to list
|
||||
* @prop {Boolean} IsTruncated - truncated listing flag
|
||||
* @prop {String|undefined} NextMarker - marker per amazon format
|
||||
* @prop {Number} keys - count of listed keys
|
||||
* @prop {String|undefined} delimiter - separator per amazon format
|
||||
* @prop {String|undefined} prefix - prefix per amazon format
|
||||
* @prop {Number} maxKeys - number of keys to list
|
||||
*/
|
||||
class Delimiter {
|
||||
/**
|
||||
* Create a new Delimiter instance
|
||||
* @constructor
|
||||
* @param {Object} parameters - listing parameters
|
||||
* @param {String} parameters.delimiter - delimiter per amazon format
|
||||
* @param {String} parameters.start - prefix per amazon format
|
||||
* @param {String} [parameters.gt] - NextMarker per amazon format
|
||||
* @param {Number} [parameters.maxKeys] - number of keys to list
|
||||
*/
|
||||
constructor(parameters) {
|
||||
this.CommonPrefixes = [];
|
||||
this.Contents = [];
|
||||
this.IsTruncated = false;
|
||||
this.NextMarker = parameters.gt;
|
||||
this.keys = 0;
|
||||
|
||||
this.delimiter = parameters.delimiter;
|
||||
this.prefix = parameters.start;
|
||||
this.maxKeys = parameters.maxKeys || 1000;
|
||||
if (this.delimiter !== undefined &&
|
||||
this.NextMarker !== undefined &&
|
||||
this.NextMarker.startsWith(this.prefix || '')) {
|
||||
const nextDelimiterIndex =
|
||||
this.NextMarker.indexOf(this.delimiter,
|
||||
this.prefix
|
||||
? this.prefix.length
|
||||
: 0);
|
||||
this.NextMarker =
|
||||
this.NextMarker.slice(0, nextDelimiterIndex +
|
||||
this.delimiter.length);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* check if the max keys count has been reached and set the
|
||||
* final state of the result if it is the case
|
||||
* @return {Boolean} - indicates if the iteration has to stop
|
||||
*/
|
||||
_reachedMaxKeys() {
|
||||
if (this.keys >= this.maxKeys) {
|
||||
// In cases of maxKeys <= 0 -> IsTruncated = false
|
||||
this.IsTruncated = this.maxKeys > 0;
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a (key, value) tuple to the listing
|
||||
* Set the NextMarker to the current key
|
||||
* Increment the keys counter
|
||||
* @param {String} key - The key to add
|
||||
* @param {String} value - The value of the key
|
||||
* @return {Boolean} - indicates if iteration should continue
|
||||
*/
|
||||
addContents(key, value) {
|
||||
if (this._reachedMaxKeys()) {
|
||||
return false;
|
||||
}
|
||||
const tmp = JSON.parse(value);
|
||||
this.Contents.push({
|
||||
key,
|
||||
value: {
|
||||
Size: tmp['content-length'],
|
||||
ETag: tmp['content-md5'],
|
||||
LastModified: tmp['last-modified'],
|
||||
Owner: {
|
||||
DisplayName: tmp['owner-display-name'],
|
||||
ID: tmp['owner-id'],
|
||||
},
|
||||
StorageClass: tmp['x-amz-storage-class'],
|
||||
Initiated: tmp.initiated,
|
||||
Initiator: tmp.initiator,
|
||||
EventualStorageBucket: tmp.eventualStorageBucket,
|
||||
partLocations: tmp.partLocations,
|
||||
creationDate: tmp.creationDate,
|
||||
},
|
||||
});
|
||||
this.NextMarker = key;
|
||||
++this.keys;
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Filter to apply on each iteration, based on:
|
||||
* - prefix
|
||||
* - delimiter
|
||||
* - maxKeys
|
||||
* The marker is being handled directly by levelDB
|
||||
* @param {Object} obj - The key and value of the element
|
||||
* @param {String} obj.key - The key of the element
|
||||
* @param {String} obj.value - The value of the element
|
||||
* @return {Boolean} - indicates if iteration should continue
|
||||
*/
|
||||
filter(obj) {
|
||||
const key = obj.key;
|
||||
const value = obj.value;
|
||||
if ((this.prefix && !key.startsWith(this.prefix))
|
||||
|| (typeof this.NextMarker === 'string' &&
|
||||
key <= this.NextMarker)) {
|
||||
return true;
|
||||
}
|
||||
if (this.delimiter) {
|
||||
const baseIndex = this.prefix ? this.prefix.length : 0;
|
||||
const delimiterIndex = nextDelimiter(key,
|
||||
this.delimiter,
|
||||
baseIndex);
|
||||
if (delimiterIndex === -1) {
|
||||
return this.addContents(key, value);
|
||||
}
|
||||
return this.addCommonPrefix(key, delimiterIndex);
|
||||
}
|
||||
return this.addContents(key, value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a Common Prefix in the list
|
||||
* @param {String} key - object name
|
||||
* @param {Number} index - after prefix starting point
|
||||
* @return {Boolean} - indicates if iteration should continue
|
||||
*/
|
||||
addCommonPrefix(key, index) {
|
||||
const commonPrefix = getCommonPrefix(key, this.delimiter, index);
|
||||
if (this.CommonPrefixes.indexOf(commonPrefix) === -1
|
||||
&& this.NextMarker !== commonPrefix) {
|
||||
if (this._reachedMaxKeys()) {
|
||||
return false;
|
||||
}
|
||||
this.CommonPrefixes.push(commonPrefix);
|
||||
this.NextMarker = commonPrefix;
|
||||
++this.keys;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return an object containing all mandatory fields to use once the
|
||||
* iteration is done, doesn't show a NextMarker field if the output
|
||||
* isn't truncated
|
||||
* @return {Object} - following amazon format
|
||||
*/
|
||||
result() {
|
||||
/* NextMarker is only provided when delimiter is used.
|
||||
* specified in v1 listing documentation
|
||||
* http://docs.aws.amazon.com/AmazonS3/latest/API/RESTBucketGET.html
|
||||
*/
|
||||
return {
|
||||
CommonPrefixes: this.CommonPrefixes,
|
||||
Contents: this.Contents,
|
||||
IsTruncated: this.IsTruncated,
|
||||
NextMarker: (this.IsTruncated && this.delimiter)
|
||||
? this.NextMarker
|
||||
: undefined,
|
||||
Delimiter: this.delimiter,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { Delimiter };
|
356
lib/algos/list/delimiter.ts
Normal file
356
lib/algos/list/delimiter.ts
Normal file
@@ -0,0 +1,356 @@
|
||||
'use strict'; // eslint-disable-line strict
|
||||
|
||||
const Extension = require('./Extension').default;
|
||||
const { inc, listingParamsMasterKeysV0ToV1,
|
||||
FILTER_END, FILTER_ACCEPT, FILTER_SKIP, SKIP_NONE } = require('./tools');
|
||||
const VSConst = require('../../versioning/constants').VersioningConstants;
|
||||
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
|
||||
|
||||
export interface FilterState {
|
||||
id: number,
|
||||
};
|
||||
|
||||
export interface FilterReturnValue {
|
||||
FILTER_ACCEPT,
|
||||
FILTER_SKIP,
|
||||
FILTER_END,
|
||||
};
|
||||
|
||||
export const enum DelimiterFilterStateId {
|
||||
NotSkipping = 1,
|
||||
SkippingPrefix = 2,
|
||||
};
|
||||
|
||||
export interface DelimiterFilterState_NotSkipping extends FilterState {
|
||||
id: DelimiterFilterStateId.NotSkipping,
|
||||
};
|
||||
|
||||
export interface DelimiterFilterState_SkippingPrefix extends FilterState {
|
||||
id: DelimiterFilterStateId.SkippingPrefix,
|
||||
prefix: string;
|
||||
};
|
||||
|
||||
type KeyHandler = (key: string, value: string) => FilterReturnValue;
|
||||
|
||||
type ResultObject = {
|
||||
CommonPrefixes: string[];
|
||||
Contents: {
|
||||
key: string;
|
||||
value: string;
|
||||
}[];
|
||||
IsTruncated: boolean;
|
||||
Delimiter ?: string;
|
||||
NextMarker ?: string;
|
||||
NextContinuationToken ?: string;
|
||||
};
|
||||
|
||||
/**
|
||||
* Handle object listing with parameters
|
||||
*
|
||||
* @prop {String[]} CommonPrefixes - 'folders' defined by the delimiter
|
||||
* @prop {String[]} Contents - 'files' to list
|
||||
* @prop {Boolean} IsTruncated - truncated listing flag
|
||||
* @prop {String|undefined} NextMarker - marker per amazon format
|
||||
* @prop {Number} keys - count of listed keys
|
||||
* @prop {String|undefined} delimiter - separator per amazon format
|
||||
* @prop {String|undefined} prefix - prefix per amazon format
|
||||
* @prop {Number} maxKeys - number of keys to list
|
||||
*/
|
||||
export class Delimiter extends Extension {
|
||||
|
||||
state: FilterState;
|
||||
keyHandlers: { [id: number]: KeyHandler };
|
||||
|
||||
/**
|
||||
* Create a new Delimiter instance
|
||||
* @constructor
|
||||
* @param {Object} parameters - listing parameters
|
||||
* @param {String} [parameters.delimiter] - delimiter per amazon
|
||||
* format
|
||||
* @param {String} [parameters.prefix] - prefix per amazon
|
||||
* format
|
||||
* @param {String} [parameters.marker] - marker per amazon
|
||||
* format
|
||||
* @param {Number} [parameters.maxKeys] - number of keys to list
|
||||
* @param {Boolean} [parameters.v2] - indicates whether v2
|
||||
* format
|
||||
* @param {String} [parameters.startAfter] - marker per amazon
|
||||
* format
|
||||
* @param {String} [parameters.continuationToken] - obfuscated amazon
|
||||
* token
|
||||
* @param {RequestLogger} logger - The logger of the
|
||||
* request
|
||||
* @param {String} [vFormat] - versioning key format
|
||||
*/
|
||||
constructor(parameters, logger, vFormat) {
|
||||
super(parameters, logger);
|
||||
// original listing parameters
|
||||
this.delimiter = parameters.delimiter;
|
||||
this.prefix = parameters.prefix;
|
||||
this.maxKeys = parameters.maxKeys || 1000;
|
||||
|
||||
if (parameters.v2) {
|
||||
this.marker = parameters.continuationToken || parameters.startAfter;
|
||||
} else {
|
||||
this.marker = parameters.marker;
|
||||
}
|
||||
this.nextMarker = this.marker;
|
||||
|
||||
this.vFormat = vFormat || BucketVersioningKeyFormat.v0;
|
||||
// results
|
||||
this.CommonPrefixes = [];
|
||||
this.Contents = [];
|
||||
this.IsTruncated = false;
|
||||
this.keyHandlers = {};
|
||||
|
||||
Object.assign(this, {
|
||||
[BucketVersioningKeyFormat.v0]: {
|
||||
genMDParams: this.genMDParamsV0,
|
||||
getObjectKey: this.getObjectKeyV0,
|
||||
skipping: this.skippingV0,
|
||||
},
|
||||
[BucketVersioningKeyFormat.v1]: {
|
||||
genMDParams: this.genMDParamsV1,
|
||||
getObjectKey: this.getObjectKeyV1,
|
||||
skipping: this.skippingV1,
|
||||
},
|
||||
}[this.vFormat]);
|
||||
|
||||
// if there is a delimiter, we may skip ranges by prefix,
|
||||
// hence using the NotSkippingPrefix flavor that checks the
|
||||
// subprefix up to the delimiter for the NotSkipping state
|
||||
if (this.delimiter) {
|
||||
this.setKeyHandler(
|
||||
DelimiterFilterStateId.NotSkipping,
|
||||
this.keyHandler_NotSkippingPrefix.bind(this));
|
||||
} else {
|
||||
// listing without a delimiter never has to skip over any
|
||||
// prefix -> use NeverSkipping flavor for the NotSkipping
|
||||
// state
|
||||
this.setKeyHandler(
|
||||
DelimiterFilterStateId.NotSkipping,
|
||||
this.keyHandler_NeverSkipping.bind(this));
|
||||
}
|
||||
this.setKeyHandler(
|
||||
DelimiterFilterStateId.SkippingPrefix,
|
||||
this.keyHandler_SkippingPrefix.bind(this));
|
||||
|
||||
this.state = <DelimiterFilterState_NotSkipping> {
|
||||
id: DelimiterFilterStateId.NotSkipping,
|
||||
};
|
||||
}
|
||||
|
||||
genMDParamsV0() {
|
||||
const params: { gt ?: string, gte ?: string, lt ?: string } = {};
|
||||
if (this.prefix) {
|
||||
params.gte = this.prefix;
|
||||
params.lt = inc(this.prefix);
|
||||
}
|
||||
if (this.marker && this.delimiter) {
|
||||
const commonPrefix = this.getCommonPrefix(this.marker);
|
||||
if (commonPrefix) {
|
||||
const afterPrefix = inc(commonPrefix);
|
||||
if (!params.gte || afterPrefix > params.gte) {
|
||||
params.gte = afterPrefix;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (this.marker && (!params.gte || this.marker >= params.gte)) {
|
||||
delete params.gte;
|
||||
params.gt = this.marker;
|
||||
}
|
||||
return params;
|
||||
}
|
||||
|
||||
genMDParamsV1() {
|
||||
const params = this.genMDParamsV0();
|
||||
return listingParamsMasterKeysV0ToV1(params);
|
||||
}
|
||||
|
||||
/**
|
||||
* check if the max keys count has been reached and set the
|
||||
* final state of the result if it is the case
|
||||
* @return {Boolean} - indicates if the iteration has to stop
|
||||
*/
|
||||
_reachedMaxKeys(): boolean {
|
||||
if (this.keys >= this.maxKeys) {
|
||||
// In cases of maxKeys <= 0 -> IsTruncated = false
|
||||
this.IsTruncated = this.maxKeys > 0;
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a (key, value) tuple to the listing
|
||||
* Set the NextMarker to the current key
|
||||
* Increment the keys counter
|
||||
* @param {String} key - The key to add
|
||||
* @param {String} value - The value of the key
|
||||
* @return {number} - indicates if iteration should continue
|
||||
*/
|
||||
addContents(key: string, value: string): void {
|
||||
this.Contents.push({ key, value: this.trimMetadata(value) });
|
||||
++this.keys;
|
||||
this.nextMarker = key;
|
||||
}
|
||||
|
||||
getCommonPrefix(key: string): string | undefined {
|
||||
if (!this.delimiter) {
|
||||
return undefined;
|
||||
}
|
||||
const baseIndex = this.prefix ? this.prefix.length : 0;
|
||||
const delimiterIndex = key.indexOf(this.delimiter, baseIndex);
|
||||
if (delimiterIndex === -1) {
|
||||
return undefined;
|
||||
}
|
||||
return key.substring(0, delimiterIndex + this.delimiter.length);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a Common Prefix in the list
|
||||
* @param {String} commonPrefix - common prefix to add
|
||||
* @param {String} key - full key starting with commonPrefix
|
||||
* @return {Boolean} - indicates if iteration should continue
|
||||
*/
|
||||
addCommonPrefix(commonPrefix: string, key: string): void {
|
||||
// add the new prefix to the list
|
||||
this.CommonPrefixes.push(commonPrefix);
|
||||
++this.keys;
|
||||
this.nextMarker = commonPrefix;
|
||||
}
|
||||
|
||||
addCommonPrefixOrContents(key: string, value: string): string | undefined {
|
||||
// add the subprefix to the common prefixes if the key has the delimiter
|
||||
const commonPrefix = this.getCommonPrefix(key);
|
||||
if (commonPrefix) {
|
||||
this.addCommonPrefix(commonPrefix, key);
|
||||
return commonPrefix;
|
||||
}
|
||||
this.addContents(key, value);
|
||||
return undefined;
|
||||
}
|
||||
|
||||
getObjectKeyV0(obj: { key: string }): string {
|
||||
return obj.key;
|
||||
}
|
||||
|
||||
getObjectKeyV1(obj: { key: string }): string {
|
||||
return obj.key.slice(DbPrefixes.Master.length);
|
||||
}
|
||||
|
||||
/**
|
||||
* Filter to apply on each iteration, based on:
|
||||
* - prefix
|
||||
* - delimiter
|
||||
* - maxKeys
|
||||
* The marker is being handled directly by levelDB
|
||||
* @param {Object} obj - The key and value of the element
|
||||
* @param {String} obj.key - The key of the element
|
||||
* @param {String} obj.value - The value of the element
|
||||
* @return {number} - indicates if iteration should continue
|
||||
*/
|
||||
filter(obj: { key: string, value: string }): FilterReturnValue {
|
||||
const key = this.getObjectKey(obj);
|
||||
const value = obj.value;
|
||||
|
||||
return this.handleKey(key, value);
|
||||
}
|
||||
|
||||
setState(state: FilterState): void {
|
||||
this.state = state;
|
||||
}
|
||||
|
||||
setKeyHandler(stateId: number, keyHandler: KeyHandler): void {
|
||||
this.keyHandlers[stateId] = keyHandler;
|
||||
}
|
||||
|
||||
handleKey(key: string, value: string): FilterReturnValue {
|
||||
return this.keyHandlers[this.state.id](key, value);
|
||||
}
|
||||
|
||||
keyHandler_NeverSkipping(key: string, value: string): FilterReturnValue {
|
||||
if (this._reachedMaxKeys()) {
|
||||
return FILTER_END;
|
||||
}
|
||||
this.addContents(key, value);
|
||||
return FILTER_ACCEPT;
|
||||
}
|
||||
|
||||
keyHandler_NotSkippingPrefix(key: string, value: string): FilterReturnValue {
|
||||
if (this._reachedMaxKeys()) {
|
||||
return FILTER_END;
|
||||
}
|
||||
const commonPrefix = this.addCommonPrefixOrContents(key, value);
|
||||
if (commonPrefix) {
|
||||
// transition into SkippingPrefix state to skip all following keys
|
||||
// while they start with the same prefix
|
||||
this.setState(<DelimiterFilterState_SkippingPrefix> {
|
||||
id: DelimiterFilterStateId.SkippingPrefix,
|
||||
prefix: commonPrefix,
|
||||
});
|
||||
}
|
||||
return FILTER_ACCEPT;
|
||||
}
|
||||
|
||||
keyHandler_SkippingPrefix(key: string, value: string): FilterReturnValue {
|
||||
const { prefix } = <DelimiterFilterState_SkippingPrefix> this.state;
|
||||
if (key.startsWith(prefix)) {
|
||||
return FILTER_SKIP;
|
||||
}
|
||||
this.setState(<DelimiterFilterState_NotSkipping> {
|
||||
id: DelimiterFilterStateId.NotSkipping,
|
||||
});
|
||||
return this.handleKey(key, value);
|
||||
}
|
||||
|
||||
skippingBase(): string | undefined {
|
||||
switch (this.state.id) {
|
||||
case DelimiterFilterStateId.SkippingPrefix:
|
||||
const { prefix } = <DelimiterFilterState_SkippingPrefix> this.state;
|
||||
return prefix;
|
||||
|
||||
default:
|
||||
return SKIP_NONE;
|
||||
}
|
||||
}
|
||||
|
||||
skippingV0() {
|
||||
return this.skippingBase();
|
||||
}
|
||||
|
||||
skippingV1() {
|
||||
const skipTo = this.skippingBase();
|
||||
if (skipTo === SKIP_NONE) {
|
||||
return SKIP_NONE;
|
||||
}
|
||||
return DbPrefixes.Master + skipTo;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return an object containing all mandatory fields to use once the
|
||||
* iteration is done, doesn't show a NextMarker field if the output
|
||||
* isn't truncated
|
||||
* @return {Object} - following amazon format
|
||||
*/
|
||||
result(): ResultObject {
|
||||
/* NextMarker is only provided when delimiter is used.
|
||||
* specified in v1 listing documentation
|
||||
* http://docs.aws.amazon.com/AmazonS3/latest/API/RESTBucketGET.html
|
||||
*/
|
||||
const result: ResultObject = {
|
||||
CommonPrefixes: this.CommonPrefixes,
|
||||
Contents: this.Contents,
|
||||
IsTruncated: this.IsTruncated,
|
||||
Delimiter: this.delimiter,
|
||||
};
|
||||
if (this.parameters.v2) {
|
||||
result.NextContinuationToken = this.IsTruncated
|
||||
? this.nextMarker : undefined;
|
||||
} else {
|
||||
result.NextMarker = (this.IsTruncated && this.delimiter)
|
||||
? this.nextMarker : undefined;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
@@ -1,84 +0,0 @@
|
||||
'use strict'; // eslint-disable-line strict
|
||||
|
||||
const Delimiter = require('./delimiter').Delimiter;
|
||||
const VSUtils = require('../../versioning/utils').VersioningUtils;
|
||||
|
||||
/**
|
||||
* Extended delimiter class for versioning.
|
||||
*/
|
||||
class DelimiterMaster extends Delimiter {
|
||||
/**
|
||||
* Overriding the base function to extract the versionId of the entry.
|
||||
*
|
||||
* @param {string} key - the key of the entry
|
||||
* @param {object} value - the value of the entry
|
||||
* @return {undefined}
|
||||
*/
|
||||
addContents(key, value) {
|
||||
this.Contents.push({
|
||||
key,
|
||||
value: {
|
||||
Size: value['content-length'],
|
||||
ETag: value['content-md5'],
|
||||
LastModified: value['last-modified'],
|
||||
// <versioning>
|
||||
VersionId: VSUtils.getts(value),
|
||||
// </versioning>
|
||||
Owner: {
|
||||
DisplayName: value['owner-display-name'],
|
||||
ID: value['owner-id'],
|
||||
},
|
||||
StorageClass: value['x-amz-storage-class'],
|
||||
Initiated: value.initiated,
|
||||
Initiator: value.initiator,
|
||||
EventualStorageBucket: value.eventualStorageBucket,
|
||||
partLocations: value.partLocations,
|
||||
creationDate: value.creationDate,
|
||||
},
|
||||
});
|
||||
this.NextMarker = key;
|
||||
++this.keys;
|
||||
}
|
||||
|
||||
/**
|
||||
* Overriding the filter function that formats the
|
||||
* listing results based on the listing algorithm.
|
||||
*
|
||||
* @param {object} obj - metadata entry in the form of { key, value }
|
||||
* @return {boolean} - continue filtering or return the formatted list
|
||||
*/
|
||||
filter(obj) {
|
||||
// Check first in case of maxkeys <= 0
|
||||
if (this.keys >= this.maxKeys) {
|
||||
// In cases of maxKeys <= 0 => IsTruncated = false
|
||||
this.IsTruncated = this.maxKeys > 0;
|
||||
return false;
|
||||
}
|
||||
// <versioning>
|
||||
const value = VSUtils.decodeVersion(obj.value);
|
||||
// ignore it if the master version is a delete marker
|
||||
if (VSUtils.isDeleteMarker(value)) {
|
||||
return true;
|
||||
}
|
||||
// use the original object name for delimitering to work correctly
|
||||
const key = VSUtils.getObjectNameFromMasterKey(obj.key);
|
||||
// </versioning>
|
||||
if (this.delimiter) {
|
||||
const commonPrefixIndex =
|
||||
key.indexOf(this.delimiter, this.searchStart);
|
||||
if (commonPrefixIndex === -1) {
|
||||
this.addContents(key, value);
|
||||
} else {
|
||||
this.addCommonPrefix(
|
||||
key.substring(0, commonPrefixIndex + this.delimLen));
|
||||
}
|
||||
} else {
|
||||
this.addContents(key, value);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
DelimiterMaster,
|
||||
};
|
190
lib/algos/list/delimiterMaster.ts
Normal file
190
lib/algos/list/delimiterMaster.ts
Normal file
@@ -0,0 +1,190 @@
|
||||
import {
|
||||
Delimiter,
|
||||
FilterState,
|
||||
FilterReturnValue,
|
||||
DelimiterFilterStateId,
|
||||
DelimiterFilterState_NotSkipping,
|
||||
DelimiterFilterState_SkippingPrefix,
|
||||
} from './delimiter';
|
||||
const Version = require('../../versioning/Version').Version;
|
||||
const VSConst = require('../../versioning/constants').VersioningConstants;
|
||||
const { BucketVersioningKeyFormat } = VSConst;
|
||||
const { FILTER_ACCEPT, FILTER_SKIP, FILTER_END } = require('./tools');
|
||||
|
||||
const VID_SEP = VSConst.VersionId.Separator;
|
||||
const { DbPrefixes } = VSConst;
|
||||
|
||||
const enum DelimiterMasterFilterStateId {
|
||||
SkippingVersionsV0 = 101,
|
||||
WaitVersionAfterPHDV0 = 102,
|
||||
};
|
||||
|
||||
interface DelimiterMasterFilterState_SkippingVersionsV0 extends FilterState {
|
||||
id: DelimiterMasterFilterStateId.SkippingVersionsV0,
|
||||
masterKey: string,
|
||||
};
|
||||
|
||||
interface DelimiterMasterFilterState_WaitVersionAfterPHDV0 extends FilterState {
|
||||
id: DelimiterMasterFilterStateId.WaitVersionAfterPHDV0,
|
||||
masterKey: string,
|
||||
};
|
||||
|
||||
/**
|
||||
* Handle object listing with parameters. This extends the base class Delimiter
|
||||
* to return the raw master versions of existing objects.
|
||||
*/
|
||||
export class DelimiterMaster extends Delimiter {
|
||||
|
||||
/**
|
||||
* Delimiter listing of master versions.
|
||||
* @param {Object} parameters - listing parameters
|
||||
* @param {String} parameters.delimiter - delimiter per amazon format
|
||||
* @param {String} parameters.prefix - prefix per amazon format
|
||||
* @param {String} parameters.marker - marker per amazon format
|
||||
* @param {Number} parameters.maxKeys - number of keys to list
|
||||
* @param {Boolean} parameters.v2 - indicates whether v2 format
|
||||
* @param {String} parameters.startAfter - marker per amazon v2 format
|
||||
* @param {String} parameters.continuationToken - obfuscated amazon token
|
||||
* @param {RequestLogger} logger - The logger of the request
|
||||
* @param {String} [vFormat] - versioning key format
|
||||
*/
|
||||
constructor(parameters, logger, vFormat) {
|
||||
super(parameters, logger, vFormat);
|
||||
|
||||
Object.assign(this, {
|
||||
[BucketVersioningKeyFormat.v0]: {
|
||||
skipping: this.skippingV0,
|
||||
},
|
||||
[BucketVersioningKeyFormat.v1]: {
|
||||
skipping: this.skippingV1,
|
||||
},
|
||||
}[this.vFormat]);
|
||||
|
||||
if (this.vFormat === BucketVersioningKeyFormat.v0) {
|
||||
// override Delimiter's implementation of NotSkipping for
|
||||
// DelimiterMaster logic (skipping versions and special
|
||||
// handling of delete markers and PHDs)
|
||||
this.setKeyHandler(
|
||||
DelimiterFilterStateId.NotSkipping,
|
||||
this.keyHandler_NotSkippingPrefixNorVersionsV0.bind(this));
|
||||
|
||||
// add extra state handlers specific to DelimiterMaster with v0 format
|
||||
this.setKeyHandler(
|
||||
DelimiterMasterFilterStateId.SkippingVersionsV0,
|
||||
this.keyHandler_SkippingVersionsV0.bind(this));
|
||||
|
||||
this.setKeyHandler(
|
||||
DelimiterMasterFilterStateId.WaitVersionAfterPHDV0,
|
||||
this.keyHandler_WaitVersionAfterPHDV0.bind(this));
|
||||
|
||||
if (this.marker) {
|
||||
// distinct initial state to include some special logic
|
||||
// before the first master key is found that does not have
|
||||
// to be checked afterwards
|
||||
this.state = <DelimiterMasterFilterState_SkippingVersionsV0> {
|
||||
id: DelimiterMasterFilterStateId.SkippingVersionsV0,
|
||||
masterKey: this.marker,
|
||||
};
|
||||
} else {
|
||||
this.state = <DelimiterFilterState_NotSkipping> {
|
||||
id: DelimiterFilterStateId.NotSkipping,
|
||||
};
|
||||
}
|
||||
}
|
||||
// in v1, we can directly use Delimiter's implementation,
|
||||
// which is already set to the proper state
|
||||
}
|
||||
|
||||
filter_onNewMasterKeyV0(key: string, value: string): FilterReturnValue {
|
||||
// if this master key is a delete marker, accept it without
|
||||
// adding the version to the contents
|
||||
if (Version.isDeleteMarker(value)) {
|
||||
// update the state to start skipping versions of the new master key
|
||||
this.setState(<DelimiterMasterFilterState_SkippingVersionsV0> {
|
||||
id: DelimiterMasterFilterStateId.SkippingVersionsV0,
|
||||
masterKey: key,
|
||||
});
|
||||
return FILTER_ACCEPT;
|
||||
}
|
||||
if (Version.isPHD(value)) {
|
||||
// master version is a PHD version: wait for the first
|
||||
// following version that will be considered as the actual
|
||||
// master key
|
||||
this.setState(<DelimiterMasterFilterState_WaitVersionAfterPHDV0> {
|
||||
id: DelimiterMasterFilterStateId.WaitVersionAfterPHDV0,
|
||||
masterKey: key,
|
||||
});
|
||||
return FILTER_ACCEPT;
|
||||
}
|
||||
if (key.startsWith(DbPrefixes.Replay)) {
|
||||
// skip internal replay prefix entirely
|
||||
this.setState(<DelimiterFilterState_SkippingPrefix> {
|
||||
id: DelimiterFilterStateId.SkippingPrefix,
|
||||
prefix: DbPrefixes.Replay,
|
||||
});
|
||||
return FILTER_SKIP;
|
||||
}
|
||||
if (this._reachedMaxKeys()) {
|
||||
return FILTER_END;
|
||||
}
|
||||
const commonPrefix = this.addCommonPrefixOrContents(key, value);
|
||||
if (commonPrefix) {
|
||||
// transition into SkippingPrefix state to skip all following keys
|
||||
// while they start with the same prefix
|
||||
this.setState(<DelimiterFilterState_SkippingPrefix> {
|
||||
id: DelimiterFilterStateId.SkippingPrefix,
|
||||
prefix: commonPrefix,
|
||||
});
|
||||
return FILTER_ACCEPT;
|
||||
}
|
||||
// update the state to start skipping versions of the new master key
|
||||
this.setState(<DelimiterMasterFilterState_SkippingVersionsV0> {
|
||||
id: DelimiterMasterFilterStateId.SkippingVersionsV0,
|
||||
masterKey: key,
|
||||
});
|
||||
return FILTER_ACCEPT;
|
||||
}
|
||||
|
||||
keyHandler_NotSkippingPrefixNorVersionsV0(key: string, value: string): FilterReturnValue {
|
||||
return this.filter_onNewMasterKeyV0(key, value);
|
||||
}
|
||||
|
||||
keyHandler_SkippingVersionsV0(key: string, value: string): FilterReturnValue {
|
||||
/* In the SkippingVersionsV0 state, skip all version keys
|
||||
* (<key><versionIdSeparator><version>) */
|
||||
const versionIdIndex = key.indexOf(VID_SEP);
|
||||
if (versionIdIndex !== -1) {
|
||||
return FILTER_SKIP;
|
||||
}
|
||||
return this.filter_onNewMasterKeyV0(key, value);
|
||||
}
|
||||
|
||||
keyHandler_WaitVersionAfterPHDV0(key: string, value: string): FilterReturnValue {
|
||||
// After a PHD key is encountered, the next version key of the
|
||||
// same object if it exists is the new master key, hence
|
||||
// consider it as such and call 'onNewMasterKeyV0' (the test
|
||||
// 'masterKey == phdKey' is probably redundant when we already
|
||||
// know we have a versioned key, since all objects in v0 have
|
||||
// a master key, but keeping it in doubt)
|
||||
const { masterKey: phdKey } = <DelimiterMasterFilterState_WaitVersionAfterPHDV0> this.state;
|
||||
const versionIdIndex = key.indexOf(VID_SEP);
|
||||
if (versionIdIndex !== -1) {
|
||||
const masterKey = key.slice(0, versionIdIndex);
|
||||
if (masterKey === phdKey) {
|
||||
return this.filter_onNewMasterKeyV0(masterKey, value);
|
||||
}
|
||||
}
|
||||
return this.filter_onNewMasterKeyV0(key, value);
|
||||
}
|
||||
|
||||
skippingBase(): string | undefined {
|
||||
switch (this.state.id) {
|
||||
case DelimiterMasterFilterStateId.SkippingVersionsV0:
|
||||
const { masterKey } = <DelimiterMasterFilterState_SkippingVersionsV0> this.state;
|
||||
return masterKey + VID_SEP;
|
||||
|
||||
default:
|
||||
return super.skippingBase();
|
||||
}
|
||||
}
|
||||
}
|
@@ -1,106 +1,304 @@
|
||||
'use strict'; // eslint-disable-line strict
|
||||
|
||||
const Delimiter = require('./delimiter').Delimiter;
|
||||
const VSUtils = require('../../versioning/utils').VersioningUtils;
|
||||
const Version = require('../../versioning/Version').Version;
|
||||
const VSConst = require('../../versioning/constants').VersioningConstants;
|
||||
const { inc, FILTER_END, FILTER_ACCEPT, FILTER_SKIP, SKIP_NONE } =
|
||||
require('./tools');
|
||||
|
||||
const VID_SEP = VSConst.VersionId.Separator;
|
||||
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
|
||||
|
||||
/**
|
||||
* Extended delimiter class for versioning.
|
||||
* Handle object listing with parameters
|
||||
*
|
||||
* @prop {String[]} CommonPrefixes - 'folders' defined by the delimiter
|
||||
* @prop {String[]} Contents - 'files' to list
|
||||
* @prop {Boolean} IsTruncated - truncated listing flag
|
||||
* @prop {String|undefined} NextMarker - marker per amazon format
|
||||
* @prop {Number} keys - count of listed keys
|
||||
* @prop {String|undefined} delimiter - separator per amazon format
|
||||
* @prop {String|undefined} prefix - prefix per amazon format
|
||||
* @prop {Number} maxKeys - number of keys to list
|
||||
*/
|
||||
class DelimiterVersions extends Delimiter {
|
||||
/**
|
||||
* Constructor of the extension
|
||||
* Init and check parameters
|
||||
* @param {Object} parameters - parameters sent to DBD
|
||||
* @param {RequestLogger} logger - werelogs request logger
|
||||
* @param {object} latestVersions - latest versions of some keys
|
||||
* @return {undefined}
|
||||
*/
|
||||
constructor(parameters, logger, latestVersions) {
|
||||
super(parameters, logger);
|
||||
this.NextVersionMarker = undefined; // next version marker
|
||||
this.latestVersions = undefined; // final list of the latest versions
|
||||
this._latestVersions = latestVersions; // reserved for caching
|
||||
constructor(parameters, logger, vFormat) {
|
||||
super(parameters, logger, vFormat);
|
||||
// specific to version listing
|
||||
this.keyMarker = parameters.keyMarker;
|
||||
this.versionIdMarker = parameters.versionIdMarker;
|
||||
// internal state
|
||||
this.masterKey = undefined;
|
||||
this.masterVersionId = undefined;
|
||||
// listing results
|
||||
this.NextMarker = parameters.keyMarker;
|
||||
this.NextVersionIdMarker = undefined;
|
||||
this.inReplayPrefix = false;
|
||||
|
||||
Object.assign(this, {
|
||||
[BucketVersioningKeyFormat.v0]: {
|
||||
genMDParams: this.genMDParamsV0,
|
||||
filter: this.filterV0,
|
||||
skipping: this.skippingV0,
|
||||
},
|
||||
[BucketVersioningKeyFormat.v1]: {
|
||||
genMDParams: this.genMDParamsV1,
|
||||
filter: this.filterV1,
|
||||
skipping: this.skippingV1,
|
||||
},
|
||||
}[this.vFormat]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Overriding the base function to not process the metadata entry here,
|
||||
* leaving the job of extracting object's attributes to S3.
|
||||
*
|
||||
* @param {string} key - key of the entry
|
||||
* @param {string} value - value of the entry
|
||||
* @return {undefined}
|
||||
*/
|
||||
addContents(key, value) {
|
||||
const components =
|
||||
VSUtils.getObjectNameAndVersionIdFromVersionKey(key);
|
||||
const objectName = components.objectName;
|
||||
const versionId = components.versionId;
|
||||
this.Contents.push({
|
||||
key: objectName,
|
||||
value,
|
||||
});
|
||||
this.NextMarker = objectName;
|
||||
this.NextVersionMarker = versionId;
|
||||
++this.keys;
|
||||
// only include the latest versions of the keys in the resulting list
|
||||
// this is not actually used now, it's reserved for caching in future
|
||||
if (this._latestVersions) {
|
||||
this.latestVersions[objectName] = this._latestVersions[objectName];
|
||||
genMDParamsV0() {
|
||||
const params = {};
|
||||
if (this.parameters.prefix) {
|
||||
params.gte = this.parameters.prefix;
|
||||
params.lt = inc(this.parameters.prefix);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Overriding the base function to only do delimitering, not parsing value.
|
||||
*
|
||||
* @param {object} obj - the metadata entry in the form of { key, value }
|
||||
* @return {boolean} - continue filtering or return the formatted list
|
||||
*/
|
||||
filter(obj) {
|
||||
// Check first in case of maxkeys <= 0
|
||||
if (this.keys >= this.maxKeys) {
|
||||
// In cases of maxKeys <= 0 => IsTruncated = false
|
||||
this.IsTruncated = this.maxKeys > 0;
|
||||
return false;
|
||||
}
|
||||
// <versioning>
|
||||
const key = VSUtils.getObjectNameFromVersionKey(obj.key);
|
||||
// </versioning>
|
||||
if (this.delimiter) {
|
||||
const commonPrefixIndex =
|
||||
key.indexOf(this.delimiter, this.searchStart);
|
||||
if (commonPrefixIndex === -1) {
|
||||
this.addContents(obj.key, obj.value);
|
||||
} else {
|
||||
this.addCommonPrefix(key.substring(0,
|
||||
commonPrefixIndex + this.delimLen));
|
||||
if (this.parameters.keyMarker) {
|
||||
if (params.gte && params.gte > this.parameters.keyMarker) {
|
||||
return params;
|
||||
}
|
||||
delete params.gte;
|
||||
if (this.parameters.versionIdMarker) {
|
||||
// versionIdMarker should always come with keyMarker
|
||||
// but may not be the other way around
|
||||
params.gt = this.parameters.keyMarker
|
||||
+ VID_SEP
|
||||
+ this.parameters.versionIdMarker;
|
||||
} else {
|
||||
params.gt = inc(this.parameters.keyMarker + VID_SEP);
|
||||
}
|
||||
} else {
|
||||
this.addContents(obj.key, obj.value);
|
||||
}
|
||||
return true;
|
||||
return params;
|
||||
}
|
||||
|
||||
genMDParamsV1() {
|
||||
// return an array of two listing params sets to ask for
|
||||
// synchronized listing of M and V ranges
|
||||
const params = [{}, {}];
|
||||
if (this.parameters.prefix) {
|
||||
params[0].gte = DbPrefixes.Master + this.parameters.prefix;
|
||||
params[0].lt = DbPrefixes.Master + inc(this.parameters.prefix);
|
||||
params[1].gte = DbPrefixes.Version + this.parameters.prefix;
|
||||
params[1].lt = DbPrefixes.Version + inc(this.parameters.prefix);
|
||||
} else {
|
||||
params[0].gte = DbPrefixes.Master;
|
||||
params[0].lt = inc(DbPrefixes.Master); // stop after the last master key
|
||||
params[1].gte = DbPrefixes.Version;
|
||||
params[1].lt = inc(DbPrefixes.Version); // stop after the last version key
|
||||
}
|
||||
if (this.parameters.keyMarker) {
|
||||
if (params[1].gte <= DbPrefixes.Version + this.parameters.keyMarker) {
|
||||
delete params[0].gte;
|
||||
delete params[1].gte;
|
||||
params[0].gt = DbPrefixes.Master + inc(this.parameters.keyMarker + VID_SEP);
|
||||
if (this.parameters.versionIdMarker) {
|
||||
// versionIdMarker should always come with keyMarker
|
||||
// but may not be the other way around
|
||||
params[1].gt = DbPrefixes.Version
|
||||
+ this.parameters.keyMarker
|
||||
+ VID_SEP
|
||||
+ this.parameters.versionIdMarker;
|
||||
} else {
|
||||
params[1].gt = DbPrefixes.Version
|
||||
+ inc(this.parameters.keyMarker + VID_SEP);
|
||||
}
|
||||
}
|
||||
}
|
||||
return params;
|
||||
}
|
||||
|
||||
/**
|
||||
* This function format the result to return
|
||||
* @return {Object} - The result.
|
||||
* Used to synchronize listing of M and V prefixes by object key
|
||||
*
|
||||
* @param {object} masterObj object listed from first range
|
||||
* returned by genMDParamsV1() (the master keys range)
|
||||
* @param {object} versionObj object listed from second range
|
||||
* returned by genMDParamsV1() (the version keys range)
|
||||
* @return {number} comparison result:
|
||||
* * -1 if master key < version key
|
||||
* * 1 if master key > version key
|
||||
*/
|
||||
compareObjects(masterObj, versionObj) {
|
||||
const masterKey = masterObj.key.slice(DbPrefixes.Master.length);
|
||||
const versionKey = versionObj.key.slice(DbPrefixes.Version.length);
|
||||
return masterKey < versionKey ? -1 : 1;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a (key, versionId, value) tuple to the listing.
|
||||
* Set the NextMarker to the current key
|
||||
* Increment the keys counter
|
||||
* @param {object} obj - the entry to add to the listing result
|
||||
* @param {String} obj.key - The key to add
|
||||
* @param {String} obj.versionId - versionId
|
||||
* @param {String} obj.value - The value of the key
|
||||
* @return {Boolean} - indicates if iteration should continue
|
||||
*/
|
||||
addContents(obj) {
|
||||
if (this._reachedMaxKeys()) {
|
||||
return FILTER_END;
|
||||
}
|
||||
this.Contents.push({
|
||||
key: obj.key,
|
||||
value: this.trimMetadata(obj.value),
|
||||
versionId: obj.versionId,
|
||||
});
|
||||
this.NextMarker = obj.key;
|
||||
this.NextVersionIdMarker = obj.versionId;
|
||||
++this.keys;
|
||||
return FILTER_ACCEPT;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a Common Prefix in the list
|
||||
* @param {String} key - object name
|
||||
* @param {Number} index - after prefix starting point
|
||||
* @return {Boolean} - indicates if iteration should continue
|
||||
*/
|
||||
addCommonPrefix(key, index) {
|
||||
const commonPrefix = key.substring(0, index + this.delimiter.length);
|
||||
if (this.CommonPrefixes.indexOf(commonPrefix) === -1
|
||||
&& this.NextMarker !== commonPrefix) {
|
||||
if (this._reachedMaxKeys()) {
|
||||
return FILTER_END;
|
||||
}
|
||||
this.CommonPrefixes.push(commonPrefix);
|
||||
this.NextMarker = commonPrefix;
|
||||
++this.keys;
|
||||
return FILTER_ACCEPT;
|
||||
}
|
||||
return FILTER_SKIP;
|
||||
}
|
||||
|
||||
/**
|
||||
* Filter to apply on each iteration if bucket is in v0
|
||||
* versioning key format, based on:
|
||||
* - prefix
|
||||
* - delimiter
|
||||
* - maxKeys
|
||||
* The marker is being handled directly by levelDB
|
||||
* @param {Object} obj - The key and value of the element
|
||||
* @param {String} obj.key - The key of the element
|
||||
* @param {String} obj.value - The value of the element
|
||||
* @return {number} - indicates if iteration should continue
|
||||
*/
|
||||
filterV0(obj) {
|
||||
if (obj.key.startsWith(DbPrefixes.Replay)) {
|
||||
this.inReplayPrefix = true;
|
||||
return FILTER_SKIP;
|
||||
}
|
||||
this.inReplayPrefix = false;
|
||||
|
||||
if (Version.isPHD(obj.value)) {
|
||||
// return accept to avoid skipping the next values in range
|
||||
return FILTER_ACCEPT;
|
||||
}
|
||||
return this.filterCommon(obj.key, obj.value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Filter to apply on each iteration if bucket is in v1
|
||||
* versioning key format, based on:
|
||||
* - prefix
|
||||
* - delimiter
|
||||
* - maxKeys
|
||||
* The marker is being handled directly by levelDB
|
||||
* @param {Object} obj - The key and value of the element
|
||||
* @param {String} obj.key - The key of the element
|
||||
* @param {String} obj.value - The value of the element
|
||||
* @return {number} - indicates if iteration should continue
|
||||
*/
|
||||
filterV1(obj) {
|
||||
if (Version.isPHD(obj.value)) {
|
||||
// return accept to avoid skipping the next values in range
|
||||
return FILTER_ACCEPT;
|
||||
}
|
||||
// this function receives both M and V keys, but their prefix
|
||||
// length is the same so we can remove their prefix without
|
||||
// looking at the type of key
|
||||
return this.filterCommon(obj.key.slice(DbPrefixes.Master.length),
|
||||
obj.value);
|
||||
}
|
||||
|
||||
filterCommon(key, value) {
|
||||
if (this.prefix && !key.startsWith(this.prefix)) {
|
||||
return FILTER_SKIP;
|
||||
}
|
||||
let nonversionedKey;
|
||||
let versionId = undefined;
|
||||
const versionIdIndex = key.indexOf(VID_SEP);
|
||||
if (versionIdIndex < 0) {
|
||||
nonversionedKey = key;
|
||||
this.masterKey = key;
|
||||
this.masterVersionId =
|
||||
Version.from(value).getVersionId() || 'null';
|
||||
versionId = this.masterVersionId;
|
||||
} else {
|
||||
nonversionedKey = key.slice(0, versionIdIndex);
|
||||
versionId = key.slice(versionIdIndex + 1);
|
||||
// skip a version key if it is the master version
|
||||
if (this.masterKey === nonversionedKey && this.masterVersionId === versionId) {
|
||||
return FILTER_SKIP;
|
||||
}
|
||||
this.masterKey = undefined;
|
||||
this.masterVersionId = undefined;
|
||||
}
|
||||
if (this.delimiter) {
|
||||
const baseIndex = this.prefix ? this.prefix.length : 0;
|
||||
const delimiterIndex = nonversionedKey.indexOf(this.delimiter, baseIndex);
|
||||
if (delimiterIndex >= 0) {
|
||||
return this.addCommonPrefix(nonversionedKey, delimiterIndex);
|
||||
}
|
||||
}
|
||||
return this.addContents({ key: nonversionedKey, value, versionId });
|
||||
}
|
||||
|
||||
skippingV0() {
|
||||
if (this.inReplayPrefix) {
|
||||
return DbPrefixes.Replay;
|
||||
}
|
||||
if (this.NextMarker) {
|
||||
const index = this.NextMarker.lastIndexOf(this.delimiter);
|
||||
if (index === this.NextMarker.length - 1) {
|
||||
return this.NextMarker;
|
||||
}
|
||||
}
|
||||
return SKIP_NONE;
|
||||
}
|
||||
|
||||
skippingV1() {
|
||||
const skipV0 = this.skippingV0();
|
||||
if (skipV0 === SKIP_NONE) {
|
||||
return SKIP_NONE;
|
||||
}
|
||||
// skip to the same object key in both M and V range listings
|
||||
return [DbPrefixes.Master + skipV0,
|
||||
DbPrefixes.Version + skipV0];
|
||||
}
|
||||
|
||||
/**
|
||||
* Return an object containing all mandatory fields to use once the
|
||||
* iteration is done, doesn't show a NextMarker field if the output
|
||||
* isn't truncated
|
||||
* @return {Object} - following amazon format
|
||||
*/
|
||||
result() {
|
||||
// Unset NextMarker when not truncated
|
||||
/* NextMarker is only provided when delimiter is used.
|
||||
* specified in v1 listing documentation
|
||||
* http://docs.aws.amazon.com/AmazonS3/latest/API/RESTBucketGET.html
|
||||
*/
|
||||
return {
|
||||
CommonPrefixes: this.CommonPrefixes,
|
||||
Contents: this.Contents,
|
||||
// <versioning>
|
||||
LatestVersions: this.latestVersions,
|
||||
// </versioning>
|
||||
Versions: this.Contents,
|
||||
IsTruncated: this.IsTruncated,
|
||||
NextMarker: this.IsTruncated ? this.NextMarker : undefined,
|
||||
NextVersionMarker: this.IsTruncated ?
|
||||
this.NextVersionMarker : undefined,
|
||||
NextKeyMarker: this.IsTruncated ? this.NextMarker : undefined,
|
||||
NextVersionIdMarker: this.IsTruncated ?
|
||||
this.NextVersionIdMarker : undefined,
|
||||
Delimiter: this.delimiter,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
DelimiterVersions,
|
||||
};
|
||||
module.exports = { DelimiterVersions };
|
||||
|
9
lib/algos/list/exportAlgos.js
Normal file
9
lib/algos/list/exportAlgos.js
Normal file
@@ -0,0 +1,9 @@
|
||||
module.exports = {
|
||||
Basic: require('./basic').List,
|
||||
Delimiter: require('./delimiter').Delimiter,
|
||||
DelimiterVersions: require('./delimiterVersions')
|
||||
.DelimiterVersions,
|
||||
DelimiterMaster: require('./delimiterMaster')
|
||||
.DelimiterMaster,
|
||||
MPU: require('./MPU').MultipartUploads,
|
||||
};
|
95
lib/algos/list/skip.js
Normal file
95
lib/algos/list/skip.js
Normal file
@@ -0,0 +1,95 @@
|
||||
const assert = require('assert');
|
||||
|
||||
const { FILTER_END, FILTER_SKIP, SKIP_NONE } = require('./tools');
|
||||
|
||||
|
||||
const MAX_STREAK_LENGTH = 100;
|
||||
|
||||
/**
|
||||
* Handle the filtering and the skip mechanism of a listing result.
|
||||
*/
|
||||
class Skip {
|
||||
/**
|
||||
* @param {Object} params - skip parameters
|
||||
* @param {Object} params.extension - delimiter extension used (required)
|
||||
* @param {String} params.gte - current range gte (greater than or
|
||||
* equal) used by the client code
|
||||
*/
|
||||
constructor(params) {
|
||||
assert(params.extension);
|
||||
|
||||
this.extension = params.extension;
|
||||
this.gteParams = params.gte;
|
||||
|
||||
this.listingEndCb = null;
|
||||
this.skipRangeCb = null;
|
||||
|
||||
/* Used to count consecutive FILTER_SKIP returned by the extension
|
||||
* filter method. Once this counter reaches MAX_STREAK_LENGTH, the
|
||||
* filter function tries to skip unwanted values by defining a new
|
||||
* range. */
|
||||
this.streakLength = 0;
|
||||
}
|
||||
|
||||
setListingEndCb(cb) {
|
||||
this.listingEndCb = cb;
|
||||
}
|
||||
|
||||
setSkipRangeCb(cb) {
|
||||
this.skipRangeCb = cb;
|
||||
}
|
||||
|
||||
/**
|
||||
* Filter an entry.
|
||||
* @param {Object} entry - entry to filter.
|
||||
* @return {undefined}
|
||||
*
|
||||
* This function calls the listing end or the skip range callbacks if
|
||||
* needed.
|
||||
*/
|
||||
filter(entry) {
|
||||
assert(this.listingEndCb);
|
||||
assert(this.skipRangeCb);
|
||||
|
||||
const filteringResult = this.extension.filter(entry);
|
||||
const skippingRange = this.extension.skipping();
|
||||
|
||||
if (filteringResult === FILTER_END) {
|
||||
this.listingEndCb();
|
||||
} else if (filteringResult === FILTER_SKIP
|
||||
&& skippingRange !== SKIP_NONE) {
|
||||
if (++this.streakLength >= MAX_STREAK_LENGTH) {
|
||||
let newRange;
|
||||
if (Array.isArray(skippingRange)) {
|
||||
newRange = [];
|
||||
for (let i = 0; i < skippingRange.length; ++i) {
|
||||
newRange.push(this._inc(skippingRange[i]));
|
||||
}
|
||||
} else {
|
||||
newRange = this._inc(skippingRange);
|
||||
}
|
||||
/* Avoid to loop on the same range again and again. */
|
||||
if (newRange === this.gteParams) {
|
||||
this.streakLength = 1;
|
||||
} else {
|
||||
this.skipRangeCb(newRange);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
this.streakLength = 0;
|
||||
}
|
||||
}
|
||||
|
||||
_inc(str) {
|
||||
if (!str) {
|
||||
return str;
|
||||
}
|
||||
const lastCharValue = str.charCodeAt(str.length - 1);
|
||||
const lastCharNewValue = String.fromCharCode(lastCharValue + 1);
|
||||
|
||||
return `${str.slice(0, str.length - 1)}${lastCharNewValue}`;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
module.exports = Skip;
|
@@ -1,3 +1,11 @@
|
||||
const { DbPrefixes } = require('../../versioning/constants').VersioningConstants;
|
||||
|
||||
// constants for extensions
|
||||
const SKIP_NONE = undefined; // to be inline with the values of NextMarker
|
||||
const FILTER_ACCEPT = 1;
|
||||
const FILTER_SKIP = 0;
|
||||
const FILTER_END = -1;
|
||||
|
||||
/**
|
||||
* This function check if number is valid
|
||||
* To be valid a number need to be an Integer and be lower than the limit
|
||||
@@ -13,6 +21,50 @@ function checkLimit(number, limit) {
|
||||
return valid ? parsed : limit;
|
||||
}
|
||||
|
||||
/**
|
||||
* Increment the charCode of the last character of a valid string.
|
||||
*
|
||||
* @param {string} str - the input string
|
||||
* @return {string} - the incremented string
|
||||
* or the input if it is not valid
|
||||
*/
|
||||
function inc(str) {
|
||||
return str ? (str.slice(0, str.length - 1) +
|
||||
String.fromCharCode(str.charCodeAt(str.length - 1) + 1)) : str;
|
||||
}
|
||||
|
||||
/**
|
||||
* Transform listing parameters for v0 versioning key format to make
|
||||
* it compatible with v1 format
|
||||
*
|
||||
* @param {object} v0params - listing parameters for v0 format
|
||||
* @return {object} - listing parameters for v1 format
|
||||
*/
|
||||
function listingParamsMasterKeysV0ToV1(v0params) {
|
||||
const v1params = Object.assign({}, v0params);
|
||||
if (v0params.gt !== undefined) {
|
||||
v1params.gt = `${DbPrefixes.Master}${v0params.gt}`;
|
||||
} else if (v0params.gte !== undefined) {
|
||||
v1params.gte = `${DbPrefixes.Master}${v0params.gte}`;
|
||||
} else {
|
||||
v1params.gte = DbPrefixes.Master;
|
||||
}
|
||||
if (v0params.lt !== undefined) {
|
||||
v1params.lt = `${DbPrefixes.Master}${v0params.lt}`;
|
||||
} else if (v0params.lte !== undefined) {
|
||||
v1params.lte = `${DbPrefixes.Master}${v0params.lte}`;
|
||||
} else {
|
||||
v1params.lt = inc(DbPrefixes.Master); // stop after the last master key
|
||||
}
|
||||
return v1params;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
checkLimit,
|
||||
inc,
|
||||
listingParamsMasterKeysV0ToV1,
|
||||
SKIP_NONE,
|
||||
FILTER_END,
|
||||
FILTER_SKIP,
|
||||
FILTER_ACCEPT,
|
||||
};
|
||||
|
87
lib/algos/set/ArrayUtils.js
Normal file
87
lib/algos/set/ArrayUtils.js
Normal file
@@ -0,0 +1,87 @@
|
||||
function indexOf(arr, value) {
|
||||
if (!arr.length) {
|
||||
return -1;
|
||||
}
|
||||
let lo = 0;
|
||||
let hi = arr.length - 1;
|
||||
|
||||
while (hi - lo > 1) {
|
||||
const i = lo + ((hi - lo) >> 1);
|
||||
if (arr[i] > value) {
|
||||
hi = i;
|
||||
} else {
|
||||
lo = i;
|
||||
}
|
||||
}
|
||||
if (arr[lo] === value) {
|
||||
return lo;
|
||||
}
|
||||
if (arr[hi] === value) {
|
||||
return hi;
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
function indexAtOrBelow(arr, value) {
|
||||
let i;
|
||||
let lo;
|
||||
let hi;
|
||||
|
||||
if (!arr.length || arr[0] > value) {
|
||||
return -1;
|
||||
}
|
||||
if (arr[arr.length - 1] <= value) {
|
||||
return arr.length - 1;
|
||||
}
|
||||
|
||||
lo = 0;
|
||||
hi = arr.length - 1;
|
||||
|
||||
while (hi - lo > 1) {
|
||||
i = lo + ((hi - lo) >> 1);
|
||||
if (arr[i] > value) {
|
||||
hi = i;
|
||||
} else {
|
||||
lo = i;
|
||||
}
|
||||
}
|
||||
|
||||
return lo;
|
||||
}
|
||||
|
||||
/*
|
||||
* perform symmetric diff in O(m + n)
|
||||
*/
|
||||
function symDiff(k1, k2, v1, v2, cb) {
|
||||
let i = 0;
|
||||
let j = 0;
|
||||
const n = k1.length;
|
||||
const m = k2.length;
|
||||
|
||||
while (i < n && j < m) {
|
||||
if (k1[i] < k2[j]) {
|
||||
cb(v1[i]);
|
||||
i++;
|
||||
} else if (k2[j] < k1[i]) {
|
||||
cb(v2[j]);
|
||||
j++;
|
||||
} else {
|
||||
i++;
|
||||
j++;
|
||||
}
|
||||
}
|
||||
while (i < n) {
|
||||
cb(v1[i]);
|
||||
i++;
|
||||
}
|
||||
while (j < m) {
|
||||
cb(v2[j]);
|
||||
j++;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
indexOf,
|
||||
indexAtOrBelow,
|
||||
symDiff,
|
||||
};
|
51
lib/algos/set/SortedSet.js
Normal file
51
lib/algos/set/SortedSet.js
Normal file
@@ -0,0 +1,51 @@
|
||||
const ArrayUtils = require('./ArrayUtils');
|
||||
|
||||
class SortedSet {
|
||||
constructor(obj) {
|
||||
if (obj) {
|
||||
this.keys = obj.keys;
|
||||
this.values = obj.values;
|
||||
} else {
|
||||
this.clear();
|
||||
}
|
||||
}
|
||||
|
||||
clear() {
|
||||
this.keys = [];
|
||||
this.values = [];
|
||||
}
|
||||
|
||||
get size() {
|
||||
return this.keys.length;
|
||||
}
|
||||
|
||||
set(key, value) {
|
||||
const index = ArrayUtils.indexAtOrBelow(this.keys, key);
|
||||
if (this.keys[index] === key) {
|
||||
this.values[index] = value;
|
||||
return;
|
||||
}
|
||||
this.keys.splice(index + 1, 0, key);
|
||||
this.values.splice(index + 1, 0, value);
|
||||
}
|
||||
|
||||
isSet(key) {
|
||||
const index = ArrayUtils.indexOf(this.keys, key);
|
||||
return index >= 0;
|
||||
}
|
||||
|
||||
get(key) {
|
||||
const index = ArrayUtils.indexOf(this.keys, key);
|
||||
return index >= 0 ? this.values[index] : undefined;
|
||||
}
|
||||
|
||||
del(key) {
|
||||
const index = ArrayUtils.indexOf(this.keys, key);
|
||||
if (index >= 0) {
|
||||
this.keys.splice(index, 1);
|
||||
this.values.splice(index, 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = SortedSet;
|
106
lib/algos/stream/MergeStream.js
Normal file
106
lib/algos/stream/MergeStream.js
Normal file
@@ -0,0 +1,106 @@
|
||||
const stream = require('stream');
|
||||
|
||||
class MergeStream extends stream.Readable {
|
||||
constructor(stream1, stream2, compare) {
|
||||
super({ objectMode: true });
|
||||
|
||||
this._compare = compare;
|
||||
this._streams = [stream1, stream2];
|
||||
|
||||
// peekItems elements represent the latest item consumed from
|
||||
// the respective input stream but not yet pushed. It can also
|
||||
// be one of the following special values:
|
||||
// - undefined: stream hasn't started emitting items
|
||||
// - null: EOF reached and no more item to peek
|
||||
this._peekItems = [undefined, undefined];
|
||||
this._streamEof = [false, false];
|
||||
this._streamToResume = null;
|
||||
|
||||
stream1.on('data', item => this._onItem(stream1, item, 0, 1));
|
||||
stream1.once('end', () => this._onEnd(stream1, 0, 1));
|
||||
stream1.once('error', err => this._onError(stream1, err, 0, 1));
|
||||
|
||||
stream2.on('data', item => this._onItem(stream2, item, 1, 0));
|
||||
stream2.once('end', () => this._onEnd(stream2, 1, 0));
|
||||
stream2.once('error', err => this._onError(stream2, err, 1, 0));
|
||||
}
|
||||
|
||||
_read() {
|
||||
if (this._streamToResume) {
|
||||
this._streamToResume.resume();
|
||||
this._streamToResume = null;
|
||||
}
|
||||
}
|
||||
|
||||
_destroy(err, callback) {
|
||||
for (let i = 0; i < 2; ++i) {
|
||||
if (!this._streamEof[i]) {
|
||||
this._streams[i].destroy();
|
||||
}
|
||||
}
|
||||
callback();
|
||||
}
|
||||
|
||||
_onItem(myStream, myItem, myIndex, otherIndex) {
|
||||
this._peekItems[myIndex] = myItem;
|
||||
const otherItem = this._peekItems[otherIndex];
|
||||
if (otherItem === undefined) {
|
||||
// wait for the other stream to wake up
|
||||
return myStream.pause();
|
||||
}
|
||||
if (otherItem === null || this._compare(myItem, otherItem) <= 0) {
|
||||
if (!this.push(myItem)) {
|
||||
myStream.pause();
|
||||
this._streamToResume = myStream;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
const otherStream = this._streams[otherIndex];
|
||||
const otherMore = this.push(otherItem);
|
||||
if (this._streamEof[otherIndex]) {
|
||||
this._peekItems[otherIndex] = null;
|
||||
return this.push(myItem);
|
||||
}
|
||||
myStream.pause();
|
||||
if (otherMore) {
|
||||
return otherStream.resume();
|
||||
}
|
||||
this._streamToResume = otherStream;
|
||||
return undefined;
|
||||
}
|
||||
|
||||
_onEnd(myStream, myIndex, otherIndex) {
|
||||
this._streamEof[myIndex] = true;
|
||||
if (this._peekItems[myIndex] === undefined) {
|
||||
this._peekItems[myIndex] = null;
|
||||
}
|
||||
const myItem = this._peekItems[myIndex];
|
||||
const otherItem = this._peekItems[otherIndex];
|
||||
if (otherItem === undefined) {
|
||||
// wait for the other stream to wake up
|
||||
return undefined;
|
||||
}
|
||||
if (otherItem === null) {
|
||||
return this.push(null);
|
||||
}
|
||||
if (myItem === null || this._compare(myItem, otherItem) <= 0) {
|
||||
this.push(otherItem);
|
||||
this._peekItems[myIndex] = null;
|
||||
}
|
||||
if (this._streamEof[otherIndex]) {
|
||||
return this.push(null);
|
||||
}
|
||||
const otherStream = this._streams[otherIndex];
|
||||
return otherStream.resume();
|
||||
}
|
||||
|
||||
_onError(myStream, err, myIndex, otherIndex) {
|
||||
myStream.destroy();
|
||||
if (this._streams[otherIndex]) {
|
||||
this._streams[otherIndex].destroy();
|
||||
}
|
||||
this.emit('error', err);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = MergeStream;
|
@@ -1,6 +1,4 @@
|
||||
'use strict'; // eslint-disable-line strict
|
||||
|
||||
const constants = require('../constants');
|
||||
import * as constants from '../constants';
|
||||
|
||||
/**
|
||||
* Class containing requester's information received from Vault
|
||||
@@ -8,9 +6,15 @@ const constants = require('../constants');
|
||||
* shortid, email, accountDisplayName and IAMdisplayName (if applicable)
|
||||
* @return {AuthInfo} an AuthInfo instance
|
||||
*/
|
||||
export default class AuthInfo {
|
||||
arn: string;
|
||||
canonicalID: string;
|
||||
shortid: string;
|
||||
email: string;
|
||||
accountDisplayName: string;
|
||||
IAMdisplayName: string;
|
||||
|
||||
class AuthInfo {
|
||||
constructor(objectFromVault) {
|
||||
constructor(objectFromVault: any) {
|
||||
// amazon resource name for IAM user (if applicable)
|
||||
this.arn = objectFromVault.arn;
|
||||
// account canonicalID
|
||||
@@ -49,6 +53,12 @@ class AuthInfo {
|
||||
isRequesterPublicUser() {
|
||||
return this.canonicalID === constants.publicId;
|
||||
}
|
||||
isRequesterAServiceAccount() {
|
||||
return this.canonicalID.startsWith(
|
||||
`${constants.zenkoServiceAccount}/`);
|
||||
}
|
||||
isRequesterThisServiceAccount(serviceName: string) {
|
||||
const computedCanonicalID = `${constants.zenkoServiceAccount}/${serviceName}`;
|
||||
return this.canonicalID === computedCanonicalID;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = AuthInfo;
|
387
lib/auth/Vault.ts
Normal file
387
lib/auth/Vault.ts
Normal file
@@ -0,0 +1,387 @@
|
||||
import { Logger } from 'werelogs';
|
||||
import errors from '../errors';
|
||||
import AuthInfo from './AuthInfo';
|
||||
|
||||
/** vaultSignatureCb parses message from Vault and instantiates
|
||||
* @param err - error from vault
|
||||
* @param authInfo - info from vault
|
||||
* @param log - log for request
|
||||
* @param callback - callback to authCheck functions
|
||||
* @param [streamingV4Params] - present if v4 signature;
|
||||
* items used to calculate signature on chunks if streaming auth
|
||||
*/
|
||||
function vaultSignatureCb(
|
||||
err: Error | null,
|
||||
authInfo: { message: { body: any } },
|
||||
log: Logger,
|
||||
callback: (err: Error | null, data?: any, results?: any, params?: any) => void,
|
||||
streamingV4Params?: any
|
||||
) {
|
||||
// vaultclient API guarantees that it returns:
|
||||
// - either `err`, an Error object with `code` and `message` properties set
|
||||
// - or `err == null` and `info` is an object with `message.code` and
|
||||
// `message.message` properties set.
|
||||
if (err) {
|
||||
log.debug('received error message from auth provider',
|
||||
{ errorMessage: err });
|
||||
return callback(err);
|
||||
}
|
||||
log.debug('received info from Vault', { authInfo });
|
||||
const info = authInfo.message.body;
|
||||
const userInfo = new AuthInfo(info.userInfo);
|
||||
const authorizationResults = info.authorizationResults;
|
||||
const auditLog: { accountDisplayName: string, IAMdisplayName?: string } =
|
||||
{ accountDisplayName: userInfo.getAccountDisplayName() };
|
||||
const iamDisplayName = userInfo.getIAMdisplayName();
|
||||
if (iamDisplayName) {
|
||||
auditLog.IAMdisplayName = iamDisplayName;
|
||||
}
|
||||
// @ts-ignore
|
||||
log.addDefaultFields(auditLog);
|
||||
return callback(null, userInfo, authorizationResults, streamingV4Params);
|
||||
}
|
||||
|
||||
export type AuthV4RequestParams = {
|
||||
version: 4;
|
||||
log: Logger;
|
||||
data: {
|
||||
accessKey: string;
|
||||
signatureFromRequest: string;
|
||||
region: string;
|
||||
stringToSign: string;
|
||||
scopeDate: string;
|
||||
authType: 'query' | 'header';
|
||||
signatureVersion: string;
|
||||
signatureAge?: number;
|
||||
timestamp: number;
|
||||
credentialScope: string;
|
||||
securityToken: string;
|
||||
algo: string;
|
||||
log: Logger;
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Class that provides common authentication methods against different
|
||||
* authentication backends.
|
||||
* @class Vault
|
||||
*/
|
||||
export default class Vault {
|
||||
client: any;
|
||||
implName: string;
|
||||
|
||||
/**
|
||||
* @constructor
|
||||
* @param {object} client - authentication backend or vault client
|
||||
* @param {string} implName - implementation name for auth backend
|
||||
*/
|
||||
constructor(client: any, implName: string) {
|
||||
this.client = client;
|
||||
this.implName = implName;
|
||||
}
|
||||
/**
|
||||
* authenticateV2Request
|
||||
*
|
||||
* @param params - the authentication parameters as returned by
|
||||
* auth.extractParams
|
||||
* @param params.version - shall equal 2
|
||||
* @param params.data.accessKey - the user's accessKey
|
||||
* @param params.data.signatureFromRequest - the signature read
|
||||
* from the request
|
||||
* @param params.data.stringToSign - the stringToSign
|
||||
* @param params.data.algo - the hashing algorithm used for the
|
||||
* signature
|
||||
* @param params.data.authType - the type of authentication (query
|
||||
* or header)
|
||||
* @param params.data.signatureVersion - the version of the
|
||||
* signature (AWS or AWS4)
|
||||
* @param [params.data.signatureAge] - the age of the signature in
|
||||
* ms
|
||||
* @param params.data.log - the logger object
|
||||
* @param {RequestContext []} requestContexts - an array of RequestContext
|
||||
* instances which contain information for policy authorization check
|
||||
* @param callback - callback with either error or user info
|
||||
*/
|
||||
authenticateV2Request(
|
||||
params: {
|
||||
version: 2;
|
||||
log: Logger;
|
||||
data: {
|
||||
securityToken: string;
|
||||
accessKey: string;
|
||||
signatureFromRequest: string;
|
||||
stringToSign: string;
|
||||
algo: string;
|
||||
authType: 'query' | 'header';
|
||||
signatureVersion: string;
|
||||
signatureAge?: number;
|
||||
log: Logger;
|
||||
};
|
||||
},
|
||||
requestContexts: any[],
|
||||
callback: (err: Error | null, data?: any) => void
|
||||
) {
|
||||
params.log.debug('authenticating V2 request');
|
||||
let serializedRCsArr: any;
|
||||
if (requestContexts) {
|
||||
serializedRCsArr = requestContexts.map(rc => rc.serialize());
|
||||
}
|
||||
this.client.verifySignatureV2(
|
||||
params.data.stringToSign,
|
||||
params.data.signatureFromRequest,
|
||||
params.data.accessKey,
|
||||
{
|
||||
algo: params.data.algo,
|
||||
// @ts-ignore
|
||||
reqUid: params.log.getSerializedUids(),
|
||||
logger: params.log,
|
||||
securityToken: params.data.securityToken,
|
||||
requestContext: serializedRCsArr,
|
||||
},
|
||||
(err: Error | null, userInfo?: any) => vaultSignatureCb(err, userInfo,
|
||||
params.log, callback),
|
||||
);
|
||||
}
|
||||
|
||||
/** authenticateV4Request
|
||||
* @param params - the authentication parameters as returned by
|
||||
* auth.extractParams
|
||||
* @param params.version - shall equal 4
|
||||
* @param params.data.log - the logger object
|
||||
* @param params.data.accessKey - the user's accessKey
|
||||
* @param params.data.signatureFromRequest - the signature read
|
||||
* from the request
|
||||
* @param params.data.region - the AWS region
|
||||
* @param params.data.stringToSign - the stringToSign
|
||||
* @param params.data.scopeDate - the timespan to allow the request
|
||||
* @param params.data.authType - the type of authentication (query
|
||||
* or header)
|
||||
* @param params.data.signatureVersion - the version of the
|
||||
* signature (AWS or AWS4)
|
||||
* @param params.data.signatureAge - the age of the signature in ms
|
||||
* @param params.data.timestamp - signaure timestamp
|
||||
* @param params.credentialScope - credentialScope for signature
|
||||
* @param {RequestContext [] | null} requestContexts -
|
||||
* an array of RequestContext or null if authenticaiton of a chunk
|
||||
* in streamingv4 auth
|
||||
* instances which contain information for policy authorization check
|
||||
* @param callback - callback with either error or user info
|
||||
*/
|
||||
authenticateV4Request(
|
||||
params: AuthV4RequestParams,
|
||||
requestContexts: any[] | null,
|
||||
callback: (err: Error | null, data?: any) => void
|
||||
) {
|
||||
params.log.debug('authenticating V4 request');
|
||||
let serializedRCs: any;
|
||||
if (requestContexts) {
|
||||
serializedRCs = requestContexts.map(rc => rc.serialize());
|
||||
}
|
||||
const streamingV4Params = {
|
||||
accessKey: params.data.accessKey,
|
||||
signatureFromRequest: params.data.signatureFromRequest,
|
||||
region: params.data.region,
|
||||
scopeDate: params.data.scopeDate,
|
||||
timestamp: params.data.timestamp,
|
||||
credentialScope: params.data.credentialScope };
|
||||
this.client.verifySignatureV4(
|
||||
params.data.stringToSign,
|
||||
params.data.signatureFromRequest,
|
||||
params.data.accessKey,
|
||||
params.data.region,
|
||||
params.data.scopeDate,
|
||||
{
|
||||
// @ts-ignore
|
||||
reqUid: params.log.getSerializedUids(),
|
||||
logger: params.log,
|
||||
securityToken: params.data.securityToken,
|
||||
requestContext: serializedRCs,
|
||||
},
|
||||
(err: Error | null, userInfo?: any) => vaultSignatureCb(err, userInfo,
|
||||
params.log, callback, streamingV4Params),
|
||||
);
|
||||
}
|
||||
|
||||
/** getCanonicalIds -- call Vault to get canonicalIDs based on email
|
||||
* addresses
|
||||
* @param emailAddresses - list of emailAddresses
|
||||
* @param log - log object
|
||||
* @param callback - callback with either error or an array
|
||||
* of objects with each object containing the canonicalID and emailAddress
|
||||
* of an account as properties
|
||||
*/
|
||||
getCanonicalIds(
|
||||
emailAddresses: string[],
|
||||
log: Logger,
|
||||
callback: (
|
||||
err: Error | null,
|
||||
data?: { canonicalID: string; email: string }[]
|
||||
) => void
|
||||
) {
|
||||
log.trace('getting canonicalIDs from Vault based on emailAddresses',
|
||||
{ emailAddresses });
|
||||
this.client.getCanonicalIds(emailAddresses,
|
||||
// @ts-ignore
|
||||
{ reqUid: log.getSerializedUids() },
|
||||
(err: Error | null, info?: any) => {
|
||||
if (err) {
|
||||
log.debug('received error message from auth provider',
|
||||
{ errorMessage: err });
|
||||
return callback(err);
|
||||
}
|
||||
const infoFromVault = info.message.body;
|
||||
log.trace('info received from vault', { infoFromVault });
|
||||
const foundIds: { canonicalID: string; email: string }[] = [];
|
||||
for (let i = 0; i < Object.keys(infoFromVault).length; i++) {
|
||||
const key = Object.keys(infoFromVault)[i];
|
||||
if (infoFromVault[key] === 'WrongFormat'
|
||||
|| infoFromVault[key] === 'NotFound') {
|
||||
return callback(errors.UnresolvableGrantByEmailAddress);
|
||||
}
|
||||
foundIds.push({
|
||||
email: key,
|
||||
canonicalID: infoFromVault[key],
|
||||
})
|
||||
}
|
||||
return callback(null, foundIds);
|
||||
});
|
||||
}
|
||||
|
||||
/** getEmailAddresses -- call Vault to get email addresses based on
|
||||
* canonicalIDs
|
||||
* @param canonicalIDs - list of canonicalIDs
|
||||
* @param log - log object
|
||||
* @param callback - callback with either error or an object
|
||||
* with canonicalID keys and email address values
|
||||
*/
|
||||
getEmailAddresses(
|
||||
canonicalIDs: string[],
|
||||
log: Logger,
|
||||
callback: (err: Error | null, data?: { [key: string]: any }) => void
|
||||
) {
|
||||
log.trace('getting emailAddresses from Vault based on canonicalIDs',
|
||||
{ canonicalIDs });
|
||||
this.client.getEmailAddresses(canonicalIDs,
|
||||
// @ts-ignore
|
||||
{ reqUid: log.getSerializedUids() },
|
||||
(err: Error | null, info?: any) => {
|
||||
if (err) {
|
||||
log.debug('received error message from vault',
|
||||
{ errorMessage: err });
|
||||
return callback(err);
|
||||
}
|
||||
const infoFromVault = info.message.body;
|
||||
log.trace('info received from vault', { infoFromVault });
|
||||
const result = {};
|
||||
/* If the email address was not found in Vault, do not
|
||||
send the canonicalID back to the API */
|
||||
Object.keys(infoFromVault).forEach(key => {
|
||||
if (infoFromVault[key] !== 'NotFound' &&
|
||||
infoFromVault[key] !== 'WrongFormat') {
|
||||
result[key] = infoFromVault[key];
|
||||
}
|
||||
});
|
||||
return callback(null, result);
|
||||
});
|
||||
}
|
||||
|
||||
/** getAccountIds -- call Vault to get accountIds based on
|
||||
* canonicalIDs
|
||||
* @param canonicalIDs - list of canonicalIDs
|
||||
* @param log - log object
|
||||
* @param callback - callback with either error or an object
|
||||
* with canonicalID keys and accountId values
|
||||
*/
|
||||
getAccountIds(
|
||||
canonicalIDs: string[],
|
||||
log: Logger,
|
||||
callback: (err: Error | null, data?: { [key: string]: string }) => void
|
||||
) {
|
||||
log.trace('getting accountIds from Vault based on canonicalIDs',
|
||||
{ canonicalIDs });
|
||||
this.client.getAccountIds(canonicalIDs,
|
||||
// @ts-expect-error
|
||||
{ reqUid: log.getSerializedUids() },
|
||||
(err: Error | null, info?: any) => {
|
||||
if (err) {
|
||||
log.debug('received error message from vault',
|
||||
{ errorMessage: err });
|
||||
return callback(err);
|
||||
}
|
||||
const infoFromVault = info.message.body;
|
||||
log.trace('info received from vault', { infoFromVault });
|
||||
const result = {};
|
||||
/* If the accountId was not found in Vault, do not
|
||||
send the canonicalID back to the API */
|
||||
Object.keys(infoFromVault).forEach(key => {
|
||||
if (infoFromVault[key] !== 'NotFound' &&
|
||||
infoFromVault[key] !== 'WrongFormat') {
|
||||
result[key] = infoFromVault[key];
|
||||
}
|
||||
});
|
||||
return callback(null, result);
|
||||
});
|
||||
}
|
||||
|
||||
/** checkPolicies -- call Vault to evaluate policies
|
||||
* @param {object} requestContextParams - parameters needed to construct
|
||||
* requestContext in Vault
|
||||
* @param {object} requestContextParams.constantParams - params that have
|
||||
* the same value for each requestContext to be constructed in Vault
|
||||
* @param {object} requestContextParams.paramaterize - params that have
|
||||
* arrays as values since a requestContext needs to be constructed with
|
||||
* each option in Vault
|
||||
* @param {string} userArn - arn of requesting user
|
||||
* @param {object} log - log object
|
||||
* @param {function} callback - callback with either error or an array
|
||||
* of authorization results
|
||||
*/
|
||||
checkPolicies(
|
||||
requestContextParams: any[],
|
||||
userArn: string,
|
||||
log: Logger,
|
||||
callback: (err: Error | null, data?: any[]) => void
|
||||
) {
|
||||
log.trace('sending request context params to vault to evaluate' +
|
||||
'policies');
|
||||
this.client.checkPolicies(requestContextParams, userArn, {
|
||||
// @ts-ignore
|
||||
reqUid: log.getSerializedUids(),
|
||||
}, (err: Error | null, info?: any) => {
|
||||
if (err) {
|
||||
log.debug('received error message from auth provider',
|
||||
{ error: err });
|
||||
return callback(err);
|
||||
}
|
||||
const result = info.message.body;
|
||||
return callback(null, result);
|
||||
});
|
||||
}
|
||||
|
||||
checkHealth(log: Logger, callback: (err: Error | null, data?: any) => void) {
|
||||
if (!this.client.healthcheck) {
|
||||
const defResp = {};
|
||||
defResp[this.implName] = { code: 200, message: 'OK' };
|
||||
return callback(null, defResp);
|
||||
}
|
||||
// @ts-ignore
|
||||
return this.client.healthcheck(log.getSerializedUids(), (err: Error | null, obj?: any) => {
|
||||
const respBody = {};
|
||||
if (err) {
|
||||
log.debug(`error from ${this.implName}`, { error: err });
|
||||
respBody[this.implName] = {
|
||||
error: err,
|
||||
};
|
||||
// error returned as null so async parallel doesn't return
|
||||
// before all backends are checked
|
||||
return callback(null, respBody);
|
||||
}
|
||||
respBody[this.implName] = {
|
||||
code: 200,
|
||||
message: 'OK',
|
||||
body: obj,
|
||||
};
|
||||
return callback(null, respBody);
|
||||
});
|
||||
}
|
||||
}
|
@@ -1,16 +1,21 @@
|
||||
'use strict'; // eslint-disable-line strict
|
||||
import * as crypto from 'crypto';
|
||||
import { Logger } from 'werelogs';
|
||||
import errors from '../errors';
|
||||
import * as queryString from 'querystring';
|
||||
import AuthInfo from './AuthInfo';
|
||||
import * as v2 from './v2/authV2';
|
||||
import * as v4 from './v4/authV4';
|
||||
import * as constants from '../constants';
|
||||
import constructStringToSignV2 from './v2/constructStringToSign';
|
||||
import constructStringToSignV4 from './v4/constructStringToSign';
|
||||
import { convertUTCtoISO8601 } from './v4/timeUtils';
|
||||
import * as vaultUtilities from './in_memory/vaultUtilities';
|
||||
import * as backend from './in_memory/Backend';
|
||||
import validateAuthConfig from './in_memory/validateAuthConfig';
|
||||
import AuthLoader from './in_memory/AuthLoader';
|
||||
import Vault from './Vault';
|
||||
|
||||
const errors = require('../errors');
|
||||
const queryString = require('querystring');
|
||||
const AuthInfo = require('./AuthInfo');
|
||||
const v2 = require('./v2/authV2');
|
||||
const v4 = require('./v4/authV4');
|
||||
const constants = require('../constants');
|
||||
const constructStringToSignV4 = require('./v4/constructStringToSign');
|
||||
const convertUTCtoISO8601 = require('./v4/timeUtils').convertUTCtoISO8601;
|
||||
const crypto = require('crypto');
|
||||
const vaultUtilities = require('./in_memory/vaultUtilities');
|
||||
let vault = null;
|
||||
let vault: Vault | null = null;
|
||||
const auth = {};
|
||||
const checkFunctions = {
|
||||
v2: {
|
||||
@@ -27,7 +32,7 @@ const checkFunctions = {
|
||||
// 'All Users Group' so use this group as the canonicalID for the publicUser
|
||||
const publicUserInfo = new AuthInfo({ canonicalID: constants.publicId });
|
||||
|
||||
function setAuthHandler(handler) {
|
||||
function setAuthHandler(handler: Vault) {
|
||||
vault = handler;
|
||||
return auth;
|
||||
}
|
||||
@@ -35,25 +40,30 @@ function setAuthHandler(handler) {
|
||||
/**
|
||||
* This function will check validity of request parameters to authenticate
|
||||
*
|
||||
* @param {Http.Request} request - Http request object
|
||||
* @param {object} log - Logger object
|
||||
* @param {string} awsService - Aws service related
|
||||
* @param {object} data - Parameters from queryString parsing or body of
|
||||
* @param request - Http request object
|
||||
* @param log - Logger object
|
||||
* @param awsService - Aws service related
|
||||
* @param data - Parameters from queryString parsing or body of
|
||||
* POST request
|
||||
*
|
||||
* @return {object} ret
|
||||
* @return {object} ret.err - arsenal.errors object if any error was found
|
||||
* @return {object} ret.params - auth parameters to use later on for signature
|
||||
* @return ret
|
||||
* @return ret.err - arsenal.errors object if any error was found
|
||||
* @return ret.params - auth parameters to use later on for signature
|
||||
* computation and check
|
||||
* @return {object} ret.params.version - the auth scheme version
|
||||
* @return ret.params.version - the auth scheme version
|
||||
* (undefined, 2, 4)
|
||||
* @return {object} ret.params.data - the auth scheme's specific data
|
||||
* @return ret.params.data - the auth scheme's specific data
|
||||
*/
|
||||
function extractParams(request, log, awsService, data) {
|
||||
function extractParams(
|
||||
request: any,
|
||||
log: Logger,
|
||||
awsService: string,
|
||||
data: { [key: string]: string }
|
||||
) {
|
||||
log.trace('entered', { method: 'Arsenal.auth.server.extractParams' });
|
||||
const authHeader = request.headers.authorization;
|
||||
let version = null;
|
||||
let method = null;
|
||||
let version: 'v2' |'v4' | null = null;
|
||||
let method: 'query' | 'headers' | null = null;
|
||||
|
||||
// Identify auth version and method to dispatch to the right check function
|
||||
if (authHeader) {
|
||||
@@ -65,7 +75,7 @@ function extractParams(request, log, awsService, data) {
|
||||
} else if (authHeader.startsWith('AWS4')) {
|
||||
version = 'v4';
|
||||
} else {
|
||||
log.trace('missing authorization security header',
|
||||
log.trace('invalid authorization security header',
|
||||
{ header: authHeader });
|
||||
return { err: errors.AccessDenied };
|
||||
}
|
||||
@@ -96,16 +106,21 @@ function extractParams(request, log, awsService, data) {
|
||||
/**
|
||||
* This function will check validity of request parameters to authenticate
|
||||
*
|
||||
* @param {Http.Request} request - Http request object
|
||||
* @param {object} log - Logger object
|
||||
* @param {function} cb - the callback
|
||||
* @param {string} awsService - Aws service related
|
||||
* @param request - Http request object
|
||||
* @param log - Logger object
|
||||
* @param cb - the callback
|
||||
* @param awsService - Aws service related
|
||||
* @param {RequestContext[] | null} requestContexts - array of RequestContext
|
||||
* or null if no requestContexts to be sent to Vault (for instance,
|
||||
* in multi-object delete request)
|
||||
* @return {undefined}
|
||||
*/
|
||||
function doAuth(request, log, cb, awsService, requestContexts) {
|
||||
function doAuth(
|
||||
request: any,
|
||||
log: Logger,
|
||||
cb: (err: Error | null, data?: any) => void,
|
||||
awsService: string,
|
||||
requestContexts: any[] | null
|
||||
) {
|
||||
const res = extractParams(request, log, awsService, request.query);
|
||||
if (res.err) {
|
||||
return cb(res.err);
|
||||
@@ -113,22 +128,31 @@ function doAuth(request, log, cb, awsService, requestContexts) {
|
||||
return cb(null, res.params);
|
||||
}
|
||||
if (requestContexts) {
|
||||
requestContexts.forEach(requestContext => {
|
||||
requestContext.setAuthType(res.params.data.authType);
|
||||
requestContext.setSignatureVersion(res.params
|
||||
.data.signatureVersion);
|
||||
requestContext.setSignatureAge(res.params.data.signatureAge);
|
||||
requestContexts.forEach((requestContext) => {
|
||||
const { params } = res
|
||||
if ('data' in params) {
|
||||
const { data } = params
|
||||
requestContext.setAuthType(data.authType);
|
||||
requestContext.setSignatureVersion(data.signatureVersion);
|
||||
requestContext.setSecurityToken(data.securityToken);
|
||||
if ('signatureAge' in data) {
|
||||
requestContext.setSignatureAge(data.signatureAge);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Corner cases managed, we're left with normal auth
|
||||
// TODO What's happening here?
|
||||
// @ts-ignore
|
||||
res.params.log = log;
|
||||
if (res.params.version === 2) {
|
||||
return vault.authenticateV2Request(res.params, requestContexts, cb);
|
||||
// @ts-ignore
|
||||
return vault!.authenticateV2Request(res.params, requestContexts, cb);
|
||||
}
|
||||
if (res.params.version === 4) {
|
||||
return vault.authenticateV4Request(res.params, requestContexts, cb,
|
||||
awsService);
|
||||
// @ts-ignore
|
||||
return vault!.authenticateV4Request(res.params, requestContexts, cb);
|
||||
}
|
||||
|
||||
log.error('authentication method not found', {
|
||||
@@ -137,20 +161,44 @@ function doAuth(request, log, cb, awsService, requestContexts) {
|
||||
return cb(errors.InternalError);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* This function will generate a version 4 content-md5 header
|
||||
* It looks at the request path to determine what kind of header encoding is required
|
||||
*
|
||||
* @param path - the request path
|
||||
* @param payload - the request payload to hash
|
||||
*/
|
||||
function generateContentMD5Header(
|
||||
path: string,
|
||||
payload: string,
|
||||
) {
|
||||
const encoding = path && path.startsWith('/_/backbeat/') ? 'hex' : 'base64';
|
||||
return crypto.createHash('md5').update(payload, 'binary').digest(encoding);
|
||||
}
|
||||
/**
|
||||
* This function will generate a version 4 header
|
||||
*
|
||||
* @param {Http.Request} request - Http request object
|
||||
* @param {object} data - Parameters from queryString parsing or body of
|
||||
* @param request - Http request object
|
||||
* @param data - Parameters from queryString parsing or body of
|
||||
* POST request
|
||||
* @param {string} accessKey - the accessKey
|
||||
* @param {string} secretKeyValue - the secretKey
|
||||
* @param {string} awsService - Aws service related
|
||||
* @return {undefined}
|
||||
* @param accessKey - the accessKey
|
||||
* @param secretKeyValue - the secretKey
|
||||
* @param awsService - Aws service related
|
||||
* @param [proxyPath] - path that gets proxied by reverse proxy
|
||||
* @param [sessionToken] - security token if the access/secret keys
|
||||
* are temporary credentials from STS
|
||||
* @param [payload] - body of the request if any
|
||||
*/
|
||||
function generateV4Headers(request, data, accessKey, secretKeyValue,
|
||||
awsService) {
|
||||
function generateV4Headers(
|
||||
request: any,
|
||||
data: { [key: string]: string },
|
||||
accessKey: string,
|
||||
secretKeyValue: string,
|
||||
awsService: string,
|
||||
proxyPath?: string,
|
||||
sessionToken?: string,
|
||||
payload?: string,
|
||||
) {
|
||||
Object.assign(request, { headers: {} });
|
||||
const amzDate = convertUTCtoISO8601(Date.now());
|
||||
// get date without time
|
||||
@@ -162,9 +210,9 @@ function generateV4Headers(request, data, accessKey, secretKeyValue,
|
||||
const timestamp = amzDate;
|
||||
const algorithm = 'AWS4-HMAC-SHA256';
|
||||
|
||||
let payload = '';
|
||||
payload = payload || '';
|
||||
if (request.method === 'POST') {
|
||||
payload = queryString.stringify(data, null, null, {
|
||||
payload = queryString.stringify(data, undefined, undefined, {
|
||||
encodeURIComponent,
|
||||
});
|
||||
}
|
||||
@@ -173,23 +221,30 @@ function generateV4Headers(request, data, accessKey, secretKeyValue,
|
||||
request.setHeader('host', request._headers.host);
|
||||
request.setHeader('x-amz-date', amzDate);
|
||||
request.setHeader('x-amz-content-sha256', payloadChecksum);
|
||||
request.setHeader('content-md5', generateContentMD5Header(request.path, payload));
|
||||
|
||||
if (sessionToken) {
|
||||
request.setHeader('x-amz-security-token', sessionToken);
|
||||
}
|
||||
|
||||
Object.assign(request.headers, request._headers);
|
||||
const signedHeaders = Object.keys(request._headers)
|
||||
.filter(headerName =>
|
||||
headerName.startsWith('x-amz-')
|
||||
|| headerName.startsWith('x-scal-')
|
||||
|| headerName === 'content-md5'
|
||||
|| headerName === 'host'
|
||||
).sort().join(';');
|
||||
const params = { request, signedHeaders, payloadChecksum,
|
||||
credentialScope, timestamp, query: data,
|
||||
awsService: service };
|
||||
credentialScope, timestamp, query: data,
|
||||
awsService: service, proxyPath };
|
||||
const stringToSign = constructStringToSignV4(params);
|
||||
const signingKey = vaultUtilities.calculateSigningKey(secretKeyValue,
|
||||
region,
|
||||
scopeDate,
|
||||
service);
|
||||
const signature = crypto.createHmac('sha256', signingKey)
|
||||
.update(stringToSign, 'binary').digest('hex');
|
||||
.update(stringToSign as string, 'binary').digest('hex');
|
||||
const authorizationHeader = `${algorithm} Credential=${accessKey}` +
|
||||
`/${credentialScope}, SignedHeaders=${signedHeaders}, ` +
|
||||
`Signature=${signature}`;
|
||||
@@ -197,13 +252,11 @@ function generateV4Headers(request, data, accessKey, secretKeyValue,
|
||||
Object.assign(request, { headers: {} });
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
setHandler: setAuthHandler,
|
||||
server: {
|
||||
extractParams,
|
||||
doAuth,
|
||||
},
|
||||
client: {
|
||||
generateV4Headers,
|
||||
},
|
||||
};
|
||||
export const server = { extractParams, doAuth }
|
||||
export const client = { generateV4Headers, constructStringToSignV2 }
|
||||
export const inMemory = { backend, validateAuthConfig, AuthLoader }
|
||||
export {
|
||||
setAuthHandler as setHandler,
|
||||
AuthInfo,
|
||||
Vault
|
||||
}
|
204
lib/auth/in_memory/AuthLoader.ts
Normal file
204
lib/auth/in_memory/AuthLoader.ts
Normal file
@@ -0,0 +1,204 @@
|
||||
import * as fs from 'fs';
|
||||
import glob from 'simple-glob';
|
||||
import joi from 'joi';
|
||||
import werelogs from 'werelogs';
|
||||
import * as types from './types';
|
||||
import { Account, Accounts } from './types';
|
||||
import ARN from '../../models/ARN';
|
||||
|
||||
/** Load authentication information from files or pre-loaded account objects */
|
||||
export default class AuthLoader {
|
||||
#log: werelogs.Logger;
|
||||
#authData: Accounts;
|
||||
#isValid: 'waiting-for-validation' | 'valid' | 'invalid';
|
||||
|
||||
constructor(logApi: { Logger: typeof werelogs.Logger } = werelogs) {
|
||||
this.#log = new logApi.Logger('S3');
|
||||
this.#authData = { accounts: [] };
|
||||
this.#isValid = 'waiting-for-validation';
|
||||
}
|
||||
|
||||
/** Add one or more accounts to the authentication info */
|
||||
addAccounts(authData: Accounts, filePath?: string) {
|
||||
const isValid = this.#isAuthDataValid(authData, filePath);
|
||||
if (isValid) {
|
||||
this.#authData.accounts = [
|
||||
...this.#authData.accounts,
|
||||
...authData.accounts,
|
||||
];
|
||||
// defer validity checking when getting data to avoid
|
||||
// logging multiple times the errors (we need to validate
|
||||
// all accounts at once to detect duplicate values)
|
||||
if (this.#isValid === 'valid') {
|
||||
this.#isValid = 'waiting-for-validation';
|
||||
}
|
||||
} else {
|
||||
this.#isValid = 'invalid';
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Add account information from a file. Use { legacy: false } as an option
|
||||
* to use the new, Promise-based version.
|
||||
*
|
||||
* @param filePath - file path containing JSON
|
||||
* authentication info (see {@link addAccounts()} for format)
|
||||
*/
|
||||
addFile(filePath: string, options: { legacy: false }): Promise<void>;
|
||||
/** @deprecated Please use Promise-version instead. */
|
||||
addFile(filePath: string, options?: { legacy: true }): void;
|
||||
addFile(filePath: string, options = { legacy: true }) {
|
||||
// On deprecation, remove the legacy part and keep the promises.
|
||||
const readFunc: any = options.legacy ? fs.readFileSync : fs.promises.readFile;
|
||||
const readResult = readFunc(filePath, 'utf8') as Promise<string> | string;
|
||||
const prom = Promise.resolve(readResult).then((data) => {
|
||||
const authData = JSON.parse(data);
|
||||
this.addAccounts(authData, filePath);
|
||||
});
|
||||
return options.legacy ? undefined : prom;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add account information from a filesystem path
|
||||
*
|
||||
* @param globPattern - filesystem glob pattern,
|
||||
* can be a single string or an array of glob patterns. Globs
|
||||
* can be simple file paths or can contain glob matching
|
||||
* characters, like '/a/b/*.json'. The matching files are
|
||||
* individually loaded as JSON and accounts are added. See
|
||||
* {@link addAccounts()} for JSON format.
|
||||
*/
|
||||
addFilesByGlob(globPattern: string | string[]) {
|
||||
// FIXME switch glob to async version
|
||||
const files = glob(globPattern);
|
||||
files.forEach((filePath) => this.addFile(filePath));
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform validation on authentication info previously
|
||||
* loaded. Note that it has to be done on the entire set after an
|
||||
* update to catch duplicate account IDs or access keys.
|
||||
*/
|
||||
validate() {
|
||||
if (this.#isValid === 'waiting-for-validation') {
|
||||
const isValid = this.#isAuthDataValid(this.#authData);
|
||||
this.#isValid = isValid ? 'valid' : 'invalid';
|
||||
}
|
||||
return this.#isValid === 'valid';
|
||||
}
|
||||
|
||||
/**
|
||||
* Get authentication info as a plain JS object containing all accounts
|
||||
* under the "accounts" attribute, with validation.
|
||||
*/
|
||||
get data() {
|
||||
return this.validate() ? this.#authData : null;
|
||||
}
|
||||
|
||||
/** backward-compat: ignore arn if starts with 'aws:' and log a warning */
|
||||
#isNotLegacyAWSARN(account: Account, filePath?: string) {
|
||||
if (account.arn.startsWith('aws:')) {
|
||||
const { name: accountName, arn: accountArn } = account;
|
||||
this.#log.error(
|
||||
'account must have a valid AWS ARN, legacy examples ' +
|
||||
"starting with 'aws:' are not supported anymore. " +
|
||||
'Please convert to a proper account entry (see ' +
|
||||
'examples at https://github.com/scality/S3/blob/' +
|
||||
'master/conf/authdata.json). Also note that support ' +
|
||||
'for account users has been dropped.',
|
||||
{ accountName, accountArn, filePath }
|
||||
);
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
#isValidUsers(account: Account, filePath?: string) {
|
||||
if (account.users) {
|
||||
const { name: accountName, arn: accountArn } = account;
|
||||
this.#log.error(
|
||||
'support for account users has been dropped, consider ' +
|
||||
'turning users into account entries (see examples at ' +
|
||||
'https://github.com/scality/S3/blob/master/conf/' +
|
||||
'authdata.json)',
|
||||
{ accountName, accountArn, filePath }
|
||||
);
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
#isValidARN(account: Account, filePath?: string) {
|
||||
const arnObj = ARN.createFromString(account.arn);
|
||||
const { name: accountName, arn: accountArn } = account;
|
||||
if (arnObj instanceof ARN) {
|
||||
if (!arnObj.isIAMAccount()) {
|
||||
this.#log.error('authentication config validation error', {
|
||||
reason: 'not an IAM account ARN',
|
||||
accountName,
|
||||
accountArn,
|
||||
filePath,
|
||||
});
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
this.#log.error('authentication config validation error', {
|
||||
reason: arnObj.error.description,
|
||||
accountName,
|
||||
accountArn,
|
||||
filePath,
|
||||
});
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
#isAuthDataValid(authData: any, filePath?: string) {
|
||||
const options = { abortEarly: true };
|
||||
const response = types.validators.accounts.validate(authData, options);
|
||||
if (response.error) {
|
||||
this.#dumpJoiErrors(response.error.details, filePath);
|
||||
return false;
|
||||
}
|
||||
const validAccounts = response.value.accounts.filter(
|
||||
(account: Account) =>
|
||||
this.#isNotLegacyAWSARN(account, filePath) &&
|
||||
this.#isValidUsers(account, filePath) &&
|
||||
this.#isValidARN(account, filePath)
|
||||
);
|
||||
const areSomeInvalidAccounts =
|
||||
validAccounts.length !== response.value.accounts.length;
|
||||
if (areSomeInvalidAccounts) {
|
||||
return false;
|
||||
}
|
||||
const keys = validAccounts.flatMap((account) => account.keys);
|
||||
const uniqueKeysValidator = types.validators.keys.unique('access');
|
||||
const areKeysUnique = uniqueKeysValidator.validate(keys);
|
||||
if (areKeysUnique.error) {
|
||||
this.#dumpJoiErrors(areKeysUnique.error.details, filePath);
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
#dumpJoiErrors(errors: joi.ValidationErrorItem[], filePath?: string) {
|
||||
errors.forEach((err) => {
|
||||
const baseLogInfo = { item: err.path, filePath };
|
||||
const logInfo = () => {
|
||||
if (err.type === 'array.unique') {
|
||||
const reason = `duplicate value '${err.context?.path}'`;
|
||||
const dupValue = err.context?.value[err.context.path];
|
||||
return { ...baseLogInfo, reason, dupValue };
|
||||
} else {
|
||||
const reason = err.message;
|
||||
const context = err.context;
|
||||
return { ...baseLogInfo, reason, context };
|
||||
}
|
||||
};
|
||||
this.#log.error(
|
||||
'authentication config validation error',
|
||||
logInfo()
|
||||
);
|
||||
});
|
||||
}
|
||||
}
|
194
lib/auth/in_memory/Backend.ts
Normal file
194
lib/auth/in_memory/Backend.ts
Normal file
@@ -0,0 +1,194 @@
|
||||
import * as crypto from 'crypto';
|
||||
import errors from '../../errors';
|
||||
import { calculateSigningKey, hashSignature } from './vaultUtilities';
|
||||
import Indexer from './Indexer';
|
||||
import { Accounts } from './types';
|
||||
|
||||
function _formatResponse(userInfoToSend: any) {
|
||||
return {
|
||||
message: {
|
||||
body: { userInfo: userInfoToSend },
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Class that provides a memory backend for verifying signatures and getting
|
||||
* emails and canonical ids associated with an account.
|
||||
*/
|
||||
class Backend {
|
||||
indexer: Indexer;
|
||||
service: string;
|
||||
|
||||
constructor(service: string, indexer: Indexer) {
|
||||
this.service = service;
|
||||
this.indexer = indexer;
|
||||
}
|
||||
|
||||
// CODEQUALITY-TODO-SYNC Should be synchronous
|
||||
verifySignatureV2(
|
||||
stringToSign: string,
|
||||
signatureFromRequest: string,
|
||||
accessKey: string,
|
||||
options: { algo: 'SHA256' | 'SHA1' },
|
||||
callback: (
|
||||
error: Error | null,
|
||||
data?: ReturnType<typeof _formatResponse>
|
||||
) => void
|
||||
) {
|
||||
const entity = this.indexer.getEntityByKey(accessKey);
|
||||
if (!entity) {
|
||||
return callback(errors.InvalidAccessKeyId);
|
||||
}
|
||||
const secretKey = this.indexer.getSecretKey(entity, accessKey);
|
||||
const reconstructedSig =
|
||||
hashSignature(stringToSign, secretKey, options.algo);
|
||||
if (signatureFromRequest !== reconstructedSig) {
|
||||
return callback(errors.SignatureDoesNotMatch);
|
||||
}
|
||||
const userInfoToSend = {
|
||||
accountDisplayName: this.indexer.getAcctDisplayName(entity),
|
||||
canonicalID: entity.canonicalID,
|
||||
arn: entity.arn,
|
||||
// TODO Why?
|
||||
// @ts-ignore
|
||||
IAMdisplayName: entity.IAMdisplayName,
|
||||
};
|
||||
const vaultReturnObject = _formatResponse(userInfoToSend);
|
||||
return callback(null, vaultReturnObject);
|
||||
}
|
||||
|
||||
// TODO Options not used. Why ?
|
||||
// CODEQUALITY-TODO-SYNC Should be synchronous
|
||||
verifySignatureV4(
|
||||
stringToSign: string,
|
||||
signatureFromRequest: string,
|
||||
accessKey: string,
|
||||
region: string,
|
||||
scopeDate: string,
|
||||
_options: { algo: 'SHA256' | 'SHA1' },
|
||||
callback: (
|
||||
err: Error | null,
|
||||
data?: ReturnType<typeof _formatResponse>
|
||||
) => void
|
||||
) {
|
||||
const entity = this.indexer.getEntityByKey(accessKey);
|
||||
if (!entity) {
|
||||
return callback(errors.InvalidAccessKeyId);
|
||||
}
|
||||
const secretKey = this.indexer.getSecretKey(entity, accessKey);
|
||||
const signingKey = calculateSigningKey(secretKey, region, scopeDate);
|
||||
const reconstructedSig = crypto.createHmac('sha256', signingKey)
|
||||
.update(stringToSign, 'binary').digest('hex');
|
||||
if (signatureFromRequest !== reconstructedSig) {
|
||||
return callback(errors.SignatureDoesNotMatch);
|
||||
}
|
||||
const userInfoToSend = {
|
||||
accountDisplayName: this.indexer.getAcctDisplayName(entity),
|
||||
canonicalID: entity.canonicalID,
|
||||
arn: entity.arn,
|
||||
// TODO Why?
|
||||
// @ts-ignore
|
||||
IAMdisplayName: entity.IAMdisplayName,
|
||||
};
|
||||
const vaultReturnObject = _formatResponse(userInfoToSend);
|
||||
return callback(null, vaultReturnObject);
|
||||
}
|
||||
|
||||
// TODO log not used. Why ?
|
||||
// CODEQUALITY-TODO-SYNC Should be synchronous
|
||||
getCanonicalIds(
|
||||
emails: string[],
|
||||
_log: any,
|
||||
cb: (err: null, data: { message: { body: any } }) => void
|
||||
) {
|
||||
const results = {};
|
||||
emails.forEach(email => {
|
||||
const lowercasedEmail = email.toLowerCase();
|
||||
const entity = this.indexer.getEntityByEmail(lowercasedEmail);
|
||||
if (!entity) {
|
||||
results[email] = 'NotFound';
|
||||
} else {
|
||||
results[email] =
|
||||
entity.canonicalID;
|
||||
}
|
||||
});
|
||||
const vaultReturnObject = {
|
||||
message: {
|
||||
body: results,
|
||||
},
|
||||
};
|
||||
return cb(null, vaultReturnObject);
|
||||
}
|
||||
|
||||
// TODO options not used. Why ?
|
||||
// CODEQUALITY-TODO-SYNC Should be synchronous
|
||||
getEmailAddresses(
|
||||
canonicalIDs: string[],
|
||||
_options: any,
|
||||
cb: (err: null, data: { message: { body: any } }) => void
|
||||
) {
|
||||
const results = {};
|
||||
canonicalIDs.forEach(canonicalId => {
|
||||
const foundEntity = this.indexer.getEntityByCanId(canonicalId);
|
||||
if (!foundEntity || !foundEntity.email) {
|
||||
results[canonicalId] = 'NotFound';
|
||||
} else {
|
||||
results[canonicalId] = foundEntity.email;
|
||||
}
|
||||
});
|
||||
const vaultReturnObject = {
|
||||
message: {
|
||||
body: results,
|
||||
},
|
||||
};
|
||||
return cb(null, vaultReturnObject);
|
||||
}
|
||||
|
||||
// TODO options not used. Why ?
|
||||
// CODEQUALITY-TODO-SYNC Should be synchronous
|
||||
/**
|
||||
* Gets accountIds for a list of accounts based on
|
||||
* the canonical IDs associated with the account
|
||||
* @param canonicalIDs - list of canonicalIDs
|
||||
* @param _options - to send log id to vault
|
||||
* @param cb - callback to calling function
|
||||
* @returns The next is wrong. Here to keep archives.
|
||||
* callback with either error or
|
||||
* an object from Vault containing account canonicalID
|
||||
* as each object key and an accountId as the value (or "NotFound")
|
||||
*/
|
||||
getAccountIds(
|
||||
canonicalIDs: string[],
|
||||
_options: any,
|
||||
cb: (err: null, data: { message: { body: any } }) => void
|
||||
) {
|
||||
const results = {};
|
||||
canonicalIDs.forEach(canonicalID => {
|
||||
const foundEntity = this.indexer.getEntityByCanId(canonicalID);
|
||||
if (!foundEntity || !foundEntity.shortid) {
|
||||
results[canonicalID] = 'Not Found';
|
||||
} else {
|
||||
results[canonicalID] = foundEntity.shortid;
|
||||
}
|
||||
});
|
||||
const vaultReturnObject = {
|
||||
message: {
|
||||
body: results,
|
||||
},
|
||||
};
|
||||
return cb(null, vaultReturnObject);
|
||||
}
|
||||
}
|
||||
|
||||
class S3AuthBackend extends Backend {
|
||||
constructor(authdata: Accounts) {
|
||||
super('s3', new Indexer(authdata));
|
||||
}
|
||||
|
||||
refreshAuthData(authData: Accounts) {
|
||||
this.indexer = new Indexer(authData);
|
||||
}
|
||||
}
|
||||
|
||||
export { S3AuthBackend as s3 };
|
93
lib/auth/in_memory/Indexer.ts
Normal file
93
lib/auth/in_memory/Indexer.ts
Normal file
@@ -0,0 +1,93 @@
|
||||
import { Accounts, Account, Entity } from './types';
|
||||
|
||||
/**
|
||||
* Class that provides an internal indexing over the simple data provided by
|
||||
* the authentication configuration file for the memory backend. This allows
|
||||
* accessing the different authentication entities through various types of
|
||||
* keys.
|
||||
*/
|
||||
export default class Indexer {
|
||||
accountsBy: {
|
||||
canId: { [id: string]: Entity | undefined },
|
||||
accessKey: { [id: string]: Entity | undefined },
|
||||
email: { [id: string]: Entity | undefined },
|
||||
}
|
||||
|
||||
constructor(authdata?: Accounts) {
|
||||
this.accountsBy = {
|
||||
canId: {},
|
||||
accessKey: {},
|
||||
email: {},
|
||||
};
|
||||
|
||||
/*
|
||||
* This may happen if the application is configured to use another
|
||||
* authentication backend than in-memory.
|
||||
* As such, we're managing the error here to avoid screwing up there.
|
||||
*/
|
||||
if (!authdata) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.#build(authdata);
|
||||
}
|
||||
|
||||
#indexAccount(account: Account) {
|
||||
const accountData: Entity = {
|
||||
arn: account.arn,
|
||||
canonicalID: account.canonicalID,
|
||||
shortid: account.shortid,
|
||||
accountDisplayName: account.name,
|
||||
email: account.email.toLowerCase(),
|
||||
keys: [],
|
||||
};
|
||||
this.accountsBy.canId[accountData.canonicalID] = accountData;
|
||||
this.accountsBy.email[accountData.email] = accountData;
|
||||
if (account.keys !== undefined) {
|
||||
account.keys.forEach(key => {
|
||||
accountData.keys.push(key);
|
||||
this.accountsBy.accessKey[key.access] = accountData;
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
#build(authdata: Accounts) {
|
||||
authdata.accounts.forEach(account => {
|
||||
this.#indexAccount(account);
|
||||
});
|
||||
}
|
||||
|
||||
/** This method returns the account associated to a canonical ID. */
|
||||
getEntityByCanId(canId: string): Entity | undefined {
|
||||
return this.accountsBy.canId[canId];
|
||||
}
|
||||
|
||||
/**
|
||||
* This method returns the entity (either an account or a user) associated
|
||||
* to a canonical ID.
|
||||
* @param {string} key - The accessKey of the entity
|
||||
*/
|
||||
getEntityByKey(key: string): Entity | undefined {
|
||||
return this.accountsBy.accessKey[key];
|
||||
}
|
||||
|
||||
/**
|
||||
* This method returns the entity (either an account or a user) associated
|
||||
* to an email address.
|
||||
*/
|
||||
getEntityByEmail(email: string): Entity | undefined {
|
||||
const lowerCasedEmail = email.toLowerCase();
|
||||
return this.accountsBy.email[lowerCasedEmail];
|
||||
}
|
||||
|
||||
/** This method returns the secret key associated with the entity. */
|
||||
getSecretKey(entity: Entity, accessKey: string) {
|
||||
const keys = entity.keys.filter(kv => kv.access === accessKey);
|
||||
return keys[0].secret;
|
||||
}
|
||||
|
||||
/** This method returns the account display name associated with the entity. */
|
||||
getAcctDisplayName(entity: Entity) {
|
||||
return entity.accountDisplayName;
|
||||
}
|
||||
}
|
@@ -1,148 +0,0 @@
|
||||
'use strict'; // eslint-disable-line strict
|
||||
|
||||
const crypto = require('crypto');
|
||||
|
||||
const errors = require('../../errors');
|
||||
const accountsKeyedbyAccessKey =
|
||||
require('./vault.json').accountsKeyedbyAccessKey;
|
||||
const accountsKeyedbyCanID =
|
||||
require('./vault.json').accountsKeyedbyCanID;
|
||||
const accountsKeyedbyEmail =
|
||||
require('./vault.json').accountsKeyedbyEmail;
|
||||
const calculateSigningKey = require('./vaultUtilities').calculateSigningKey;
|
||||
const hashSignature = require('./vaultUtilities').hashSignature;
|
||||
|
||||
const backend = {
|
||||
/** verifySignatureV2
|
||||
* @param {string} stringToSign - string to sign built per AWS rules
|
||||
* @param {string} signatureFromRequest - signature sent with request
|
||||
* @param {string} accessKey - user's accessKey
|
||||
* @param {object} options - contains algorithm (SHA1 or SHA256)
|
||||
* @param {function} callback - callback with either error or user info
|
||||
* @return {function} calls callback
|
||||
*/
|
||||
verifySignatureV2: (stringToSign, signatureFromRequest,
|
||||
accessKey, options, callback) => {
|
||||
const account = accountsKeyedbyAccessKey[accessKey];
|
||||
if (!account) {
|
||||
return callback(errors.InvalidAccessKeyId);
|
||||
}
|
||||
const secretKey = account.secretKey;
|
||||
const reconstructedSig =
|
||||
hashSignature(stringToSign, secretKey, options.algo);
|
||||
if (signatureFromRequest !== reconstructedSig) {
|
||||
return callback(errors.SignatureDoesNotMatch);
|
||||
}
|
||||
const userInfoToSend = {
|
||||
accountDisplayName: account.displayName,
|
||||
canonicalID: account.canonicalID,
|
||||
arn: account.arn,
|
||||
IAMdisplayName: account.IAMdisplayName,
|
||||
};
|
||||
const vaultReturnObject = {
|
||||
message: {
|
||||
body: userInfoToSend,
|
||||
},
|
||||
};
|
||||
return callback(null, vaultReturnObject);
|
||||
},
|
||||
|
||||
|
||||
/** verifySignatureV4
|
||||
* @param {string} stringToSign - string to sign built per AWS rules
|
||||
* @param {string} signatureFromRequest - signature sent with request
|
||||
* @param {string} accessKey - user's accessKey
|
||||
* @param {string} region - region specified in request credential
|
||||
* @param {string} scopeDate - date specified in request credential
|
||||
* @param {object} options - options to send to Vault
|
||||
* (just contains reqUid for logging in Vault)
|
||||
* @param {function} callback - callback with either error or user info
|
||||
* @return {function} calls callback
|
||||
*/
|
||||
verifySignatureV4: (stringToSign, signatureFromRequest, accessKey,
|
||||
region, scopeDate, options, callback) => {
|
||||
const account = accountsKeyedbyAccessKey[accessKey];
|
||||
if (!account) {
|
||||
return callback(errors.InvalidAccessKeyId);
|
||||
}
|
||||
const secretKey = account.secretKey;
|
||||
const signingKey = calculateSigningKey(secretKey, region, scopeDate);
|
||||
const reconstructedSig = crypto.createHmac('sha256', signingKey)
|
||||
.update(stringToSign, 'binary').digest('hex');
|
||||
if (signatureFromRequest !== reconstructedSig) {
|
||||
return callback(errors.SignatureDoesNotMatch);
|
||||
}
|
||||
const userInfoToSend = {
|
||||
accountDisplayName: account.displayName,
|
||||
canonicalID: account.canonicalID,
|
||||
arn: account.arn,
|
||||
IAMdisplayName: account.IAMdisplayName,
|
||||
};
|
||||
const vaultReturnObject = {
|
||||
message: {
|
||||
body: userInfoToSend,
|
||||
},
|
||||
};
|
||||
return callback(null, vaultReturnObject);
|
||||
},
|
||||
|
||||
/**
|
||||
* Gets canonical ID's for a list of accounts
|
||||
* based on email associated with account
|
||||
* @param {array} emails - list of email addresses
|
||||
* @param {object} log - log object
|
||||
* @param {function} cb - callback to calling function
|
||||
* @returns {function} callback with either error or
|
||||
* object with email addresses as keys and canonical IDs
|
||||
* as values
|
||||
*/
|
||||
getCanonicalIds: (emails, log, cb) => {
|
||||
const results = {};
|
||||
emails.forEach(email => {
|
||||
const lowercasedEmail = email.toLowerCase();
|
||||
if (!accountsKeyedbyEmail[lowercasedEmail]) {
|
||||
results[email] = 'NotFound';
|
||||
} else {
|
||||
results[email] =
|
||||
accountsKeyedbyEmail[lowercasedEmail].canonicalID;
|
||||
}
|
||||
});
|
||||
const vaultReturnObject = {
|
||||
message: {
|
||||
body: results,
|
||||
},
|
||||
};
|
||||
return cb(null, vaultReturnObject);
|
||||
},
|
||||
|
||||
/**
|
||||
* Gets email addresses (referred to as diplay names for getACL's)
|
||||
* for a list of accounts
|
||||
* based on canonical IDs associated with account
|
||||
* @param {array} canonicalIDs - list of canonicalIDs
|
||||
* @param {object} options - to send log id to vault
|
||||
* @param {function} cb - callback to calling function
|
||||
* @returns {function} callback with either error or
|
||||
* an object from Vault containing account canonicalID
|
||||
* as each object key and an email address as the value (or "NotFound")
|
||||
*/
|
||||
getEmailAddresses: (canonicalIDs, options, cb) => {
|
||||
const results = {};
|
||||
canonicalIDs.forEach(canonicalId => {
|
||||
const foundAccount = accountsKeyedbyCanID[canonicalId];
|
||||
if (!foundAccount || !foundAccount.email) {
|
||||
results[canonicalId] = 'NotFound';
|
||||
} else {
|
||||
results[canonicalId] = foundAccount.email;
|
||||
}
|
||||
});
|
||||
const vaultReturnObject = {
|
||||
message: {
|
||||
body: results,
|
||||
},
|
||||
};
|
||||
return cb(null, vaultReturnObject);
|
||||
},
|
||||
};
|
||||
|
||||
module.exports = backend;
|
51
lib/auth/in_memory/types.ts
Normal file
51
lib/auth/in_memory/types.ts
Normal file
@@ -0,0 +1,51 @@
|
||||
import joi from 'joi';
|
||||
|
||||
export type Callback<Data = any> = (err?: Error | null | undefined, data?: Data) => void;
|
||||
|
||||
export type Credentials = { access: string; secret: string };
|
||||
export type Base = {
|
||||
arn: string;
|
||||
canonicalID: string;
|
||||
shortid: string;
|
||||
email: string;
|
||||
keys: Credentials[];
|
||||
};
|
||||
export type Account = Base & { name: string; users: any[] };
|
||||
export type Accounts = { accounts: Account[] };
|
||||
export type Entity = Base & { accountDisplayName: string };
|
||||
|
||||
const keys = ((): joi.ArraySchema => {
|
||||
const str = joi.string().required();
|
||||
const items = { access: str, secret: str };
|
||||
return joi.array().items(items).required();
|
||||
})();
|
||||
|
||||
const account = (() => {
|
||||
return joi.object<Account>({
|
||||
name: joi.string().required(),
|
||||
email: joi.string().email().required(),
|
||||
arn: joi.string().required(),
|
||||
canonicalID: joi.string().required(),
|
||||
shortid: joi
|
||||
.string()
|
||||
.regex(/^[0-9]{12}$/)
|
||||
.required(),
|
||||
keys: keys,
|
||||
// backward-compat
|
||||
users: joi.array(),
|
||||
});
|
||||
})();
|
||||
|
||||
const accounts = (() => {
|
||||
return joi.object<Accounts>({
|
||||
accounts: joi
|
||||
.array()
|
||||
.items(account)
|
||||
.required()
|
||||
.unique('arn')
|
||||
.unique('email')
|
||||
.unique('canonicalID'),
|
||||
});
|
||||
})();
|
||||
|
||||
export const validators = { keys, account, accounts };
|
16
lib/auth/in_memory/validateAuthConfig.ts
Normal file
16
lib/auth/in_memory/validateAuthConfig.ts
Normal file
@@ -0,0 +1,16 @@
|
||||
import { Logger } from 'werelogs';
|
||||
import AuthLoader from './AuthLoader';
|
||||
import { Accounts } from './types';
|
||||
|
||||
/**
|
||||
* @deprecated please use {@link AuthLoader} class instead
|
||||
* @return true on erroneous data false on success
|
||||
*/
|
||||
export default function validateAuthConfig(
|
||||
authdata: Accounts,
|
||||
logApi?: { Logger: typeof Logger }
|
||||
) {
|
||||
const authLoader = new AuthLoader(logApi);
|
||||
authLoader.addAccounts(authdata);
|
||||
return !authLoader.validate();
|
||||
}
|
@@ -1,79 +0,0 @@
|
||||
{
|
||||
"accountsKeyedbyAccessKey": {
|
||||
"accessKey1": {
|
||||
"arn": "aws::iam:accessKey1:user/Bart",
|
||||
"IAMdisplayName": "Bart",
|
||||
"secretKey": "verySecretKey1",
|
||||
"canonicalID": "accessKey1canonicalID",
|
||||
"displayName": "accessKey1displayName"
|
||||
},
|
||||
"accessKey2": {
|
||||
"arn": "aws::iam:accessKey2:user/Lisa",
|
||||
"IAMdisplayName": "Lisa",
|
||||
"secretKey": "verySecretKey2",
|
||||
"canonicalID": "accessKey2canonicalID",
|
||||
"displayName": "accessKey2displayName"
|
||||
}
|
||||
},
|
||||
|
||||
"accountsKeyedbyEmail": {
|
||||
"sampleaccount1@sampling.com": {
|
||||
"arn": "aws::iam:123456789012:root",
|
||||
"createDate": "",
|
||||
"saltedPwd": "",
|
||||
"pwdlastUsed": "",
|
||||
"pwdCreated": "",
|
||||
"name": "",
|
||||
"shortid": "123456789012",
|
||||
"email": "sampleaccount1@sampling.com",
|
||||
"canonicalID": "79a59df900b949e55d96a1e698fbacedfd6e09d98eacf8f8d5218e7cd47ef2be",
|
||||
"secretKeyIdList": [],
|
||||
"aliasList": [],
|
||||
"oidcpdList": []
|
||||
},
|
||||
"sampleaccount2@sampling.com": {
|
||||
"arn": "aws::iam:321456789012:root",
|
||||
"createDate": "",
|
||||
"saltedPwd": "",
|
||||
"pwdlastUsed": "",
|
||||
"pwdCreated": "",
|
||||
"name": "",
|
||||
"shortid": "321456789012",
|
||||
"email": "sampleaccount2@sampling.com",
|
||||
"canonicalID": "79a59df900b949e55d96a1e698fbacedfd6e09d98eacf8f8d5218e7cd47ef2bf",
|
||||
"secretKeyIdList": [],
|
||||
"aliasList": [],
|
||||
"oidcpdList": []
|
||||
}
|
||||
},
|
||||
"accountsKeyedbyCanID": {
|
||||
"79a59df900b949e55d96a1e698fbacedfd6e09d98eacf8f8d5218e7cd47ef2be": {
|
||||
"arn": "aws::iam:123456789012:root",
|
||||
"createDate": "",
|
||||
"saltedPwd": "",
|
||||
"pwdlastUsed": "",
|
||||
"pwdCreated": "",
|
||||
"name": "",
|
||||
"shortid": "123456789012",
|
||||
"email": "sampleaccount1@sampling.com",
|
||||
"canonicalID": "79a59df900b949e55d96a1e698fbacedfd6e09d98eacf8f8d5218e7cd47ef2be",
|
||||
"secretKeyIdList": [],
|
||||
"aliasList": [],
|
||||
"oidcpdList": []
|
||||
},
|
||||
"79a59df900b949e55d96a1e698fbacedfd6e09d98eacf8f8d5218e7cd47ef2bf": {
|
||||
"arn": "aws::iam:321456789012:root",
|
||||
"createDate": "",
|
||||
"saltedPwd": "",
|
||||
"pwdlastUsed": "",
|
||||
"pwdCreated": "",
|
||||
"name": "",
|
||||
"shortid": "321456789012",
|
||||
"email": "sampleaccount2@sampling.com",
|
||||
"canonicalID": "79a59df900b949e55d96a1e698fbacedfd6e09d98eacf8f8d5218e7cd47ef2bf",
|
||||
"secretKeyIdList": [],
|
||||
"aliasList": [],
|
||||
"oidcpdList": []
|
||||
}
|
||||
}
|
||||
}
|
@@ -1,6 +1,4 @@
|
||||
'use strict'; // eslint-disable-line strict
|
||||
|
||||
const crypto = require('crypto');
|
||||
import * as crypto from 'crypto';
|
||||
|
||||
/** hashSignature for v2 Auth
|
||||
* @param {string} stringToSign - built string to sign per AWS rules
|
||||
@@ -8,11 +6,19 @@ const crypto = require('crypto');
|
||||
* @param {string} algorithm - either SHA256 or SHA1
|
||||
* @return {string} reconstructed signature
|
||||
*/
|
||||
function hashSignature(stringToSign, secretKey, algorithm) {
|
||||
export function hashSignature(
|
||||
stringToSign: string,
|
||||
secretKey: string,
|
||||
algorithm: 'SHA256' | 'SHA1'
|
||||
): string {
|
||||
const hmacObject = crypto.createHmac(algorithm, secretKey);
|
||||
return hmacObject.update(stringToSign, 'binary').digest('base64');
|
||||
}
|
||||
|
||||
const sha256Digest = (key: string | Buffer, data: string) => {
|
||||
return crypto.createHmac('sha256', key).update(data, 'binary').digest();
|
||||
};
|
||||
|
||||
/** calculateSigningKey for v4 Auth
|
||||
* @param {string} secretKey - requester's secretKey
|
||||
* @param {string} region - region included in request
|
||||
@@ -20,16 +26,15 @@ function hashSignature(stringToSign, secretKey, algorithm) {
|
||||
* @param {string} [service] - To specify another service than s3
|
||||
* @return {string} signingKey - signingKey to calculate signature
|
||||
*/
|
||||
function calculateSigningKey(secretKey, region, scopeDate, service) {
|
||||
const dateKey = crypto.createHmac('sha256', `AWS4${secretKey}`)
|
||||
.update(scopeDate, 'binary').digest();
|
||||
const dateRegionKey = crypto.createHmac('sha256', dateKey)
|
||||
.update(region, 'binary').digest();
|
||||
const dateRegionServiceKey = crypto.createHmac('sha256', dateRegionKey)
|
||||
.update(service || 's3', 'binary').digest();
|
||||
const signingKey = crypto.createHmac('sha256', dateRegionServiceKey)
|
||||
.update('aws4_request', 'binary').digest();
|
||||
export function calculateSigningKey(
|
||||
secretKey: string,
|
||||
region: string,
|
||||
scopeDate: string,
|
||||
service?: string
|
||||
): Buffer {
|
||||
const dateKey = sha256Digest(`AWS4${secretKey}`, scopeDate);
|
||||
const dateRegionKey = sha256Digest(dateKey, region);
|
||||
const dateRegionServiceKey = sha256Digest(dateRegionKey, service || 's3');
|
||||
const signingKey = sha256Digest(dateRegionServiceKey, 'aws4_request');
|
||||
return signingKey;
|
||||
}
|
||||
|
||||
module.exports = { hashSignature, calculateSigningKey };
|
@@ -1,7 +1,5 @@
|
||||
'use strict'; // eslint-disable-line strict
|
||||
|
||||
function algoCheck(signatureLength) {
|
||||
let algo;
|
||||
export default function algoCheck(signatureLength: number) {
|
||||
let algo: 'sha256' | 'sha1';
|
||||
// If the signature sent is 44 characters,
|
||||
// this means that sha256 was used:
|
||||
// 44 characters in base64
|
||||
@@ -13,7 +11,6 @@ function algoCheck(signatureLength) {
|
||||
if (signatureLength === SHA1LEN) {
|
||||
algo = 'sha1';
|
||||
}
|
||||
// @ts-ignore
|
||||
return algo;
|
||||
}
|
||||
|
||||
module.exports = algoCheck;
|
@@ -1,11 +0,0 @@
|
||||
'use strict'; // eslint-disable-line strict
|
||||
|
||||
const headerAuthCheck = require('./headerAuthCheck');
|
||||
const queryAuthCheck = require('./queryAuthCheck');
|
||||
|
||||
const authV2 = {
|
||||
header: headerAuthCheck,
|
||||
query: queryAuthCheck,
|
||||
};
|
||||
|
||||
module.exports = authV2;
|
2
lib/auth/v2/authV2.ts
Normal file
2
lib/auth/v2/authV2.ts
Normal file
@@ -0,0 +1,2 @@
|
||||
export * as header from './headerAuthCheck';
|
||||
export * as query from './queryAuthCheck';
|
@@ -1,9 +1,9 @@
|
||||
'use strict'; // eslint-disable-line strict
|
||||
const errors = require('../../errors');
|
||||
import { Logger } from 'werelogs';
|
||||
import errors from '../../errors';
|
||||
|
||||
const epochTime = new Date('1970-01-01').getTime();
|
||||
|
||||
function checkRequestExpiry(timestamp, log) {
|
||||
export default function checkRequestExpiry(timestamp: number, log: Logger) {
|
||||
// If timestamp is before epochTime, the request is invalid and return
|
||||
// errors.AccessDenied
|
||||
if (timestamp < epochTime) {
|
||||
@@ -32,5 +32,3 @@ function checkRequestExpiry(timestamp, log) {
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
module.exports = checkRequestExpiry;
|
@@ -1,11 +1,14 @@
|
||||
'use strict'; // eslint-disable-line strict
|
||||
import { Logger } from 'werelogs';
|
||||
import utf8 from 'utf8';
|
||||
import getCanonicalizedAmzHeaders from './getCanonicalizedAmzHeaders';
|
||||
import getCanonicalizedResource from './getCanonicalizedResource';
|
||||
|
||||
const utf8 = require('utf8');
|
||||
|
||||
const getCanonicalizedAmzHeaders = require('./getCanonicalizedAmzHeaders');
|
||||
const getCanonicalizedResource = require('./getCanonicalizedResource');
|
||||
|
||||
function constructStringToSign(request, data, log) {
|
||||
export default function constructStringToSign(
|
||||
request: any,
|
||||
data: { [key: string]: string },
|
||||
log: Logger,
|
||||
clientType?: any
|
||||
) {
|
||||
/*
|
||||
Build signature per AWS requirements:
|
||||
StringToSign = HTTP-Verb + '\n' +
|
||||
@@ -36,11 +39,9 @@ function constructStringToSign(request, data, log) {
|
||||
than here in stringToSign so we have replicated that.
|
||||
*/
|
||||
const date = query.Expires ? query.Expires : headers.date;
|
||||
const combinedQueryHeaders = Object.assign(headers, query);
|
||||
const combinedQueryHeaders = Object.assign({}, headers, query);
|
||||
stringToSign += (date ? `${date}\n` : '\n')
|
||||
+ getCanonicalizedAmzHeaders(combinedQueryHeaders)
|
||||
+ getCanonicalizedResource(request);
|
||||
+ getCanonicalizedAmzHeaders(combinedQueryHeaders, clientType)
|
||||
+ getCanonicalizedResource(request, clientType);
|
||||
return utf8.encode(stringToSign);
|
||||
}
|
||||
|
||||
module.exports = constructStringToSign;
|
@@ -1,13 +1,14 @@
|
||||
'use strict'; // eslint-disable-line strict
|
||||
|
||||
function getCanonicalizedAmzHeaders(headers) {
|
||||
export default function getCanonicalizedAmzHeaders(headers: Headers, clientType: string) {
|
||||
/*
|
||||
Iterate through headers and pull any headers that are x-amz headers.
|
||||
Need to include 'x-amz-date' here even though AWS docs
|
||||
ambiguous on this.
|
||||
*/
|
||||
const filterFn = clientType === 'GCP' ?
|
||||
(val: string) => val.substr(0, 7) === 'x-goog-' :
|
||||
(val: string) => val.substr(0, 6) === 'x-amz-';
|
||||
const amzHeaders = Object.keys(headers)
|
||||
.filter(val => val.substr(0, 6) === 'x-amz-')
|
||||
.filter(filterFn)
|
||||
.map(val => [val.trim(), headers[val].trim()]);
|
||||
/*
|
||||
AWS docs state that duplicate headers should be combined
|
||||
@@ -40,5 +41,3 @@ function getCanonicalizedAmzHeaders(headers) {
|
||||
`${headerStr}${current[0]}:${current[1]}\n`,
|
||||
'');
|
||||
}
|
||||
|
||||
module.exports = getCanonicalizedAmzHeaders;
|
@@ -1,8 +1,45 @@
|
||||
'use strict'; // eslint-disable-line strict
|
||||
import * as url from 'url';
|
||||
|
||||
const url = require('url');
|
||||
const gcpSubresources = [
|
||||
'acl',
|
||||
'billing',
|
||||
'compose',
|
||||
'cors',
|
||||
'encryption',
|
||||
'lifecycle',
|
||||
'location',
|
||||
'logging',
|
||||
'storageClass',
|
||||
'tagging',
|
||||
'upload_id',
|
||||
'versioning',
|
||||
'versions',
|
||||
'websiteConfig',
|
||||
];
|
||||
|
||||
function getCanonicalizedResource(request) {
|
||||
const awsSubresources = [
|
||||
'acl',
|
||||
'cors',
|
||||
'delete',
|
||||
'lifecycle',
|
||||
'location',
|
||||
'logging',
|
||||
'notification',
|
||||
'partNumber',
|
||||
'policy',
|
||||
'requestPayment',
|
||||
'tagging',
|
||||
'torrent',
|
||||
'uploadId',
|
||||
'uploads',
|
||||
'versionId',
|
||||
'versioning',
|
||||
'replication',
|
||||
'versions',
|
||||
'website',
|
||||
];
|
||||
|
||||
export default function getCanonicalizedResource(request: any, clientType: string) {
|
||||
/*
|
||||
This variable is used to determine whether to insert
|
||||
a '?' or '&'. Once a query parameter is added to the resourceString,
|
||||
@@ -24,25 +61,8 @@ function getCanonicalizedResource(request) {
|
||||
*/
|
||||
|
||||
// Specified subresources:
|
||||
const subresources = [
|
||||
'acl',
|
||||
'cors',
|
||||
'delete',
|
||||
'lifecycle',
|
||||
'location',
|
||||
'logging',
|
||||
'notification',
|
||||
'partNumber',
|
||||
'policy',
|
||||
'requestPayment',
|
||||
'torrent',
|
||||
'uploadId',
|
||||
'uploads',
|
||||
'versionId',
|
||||
'versioning',
|
||||
'versions',
|
||||
'website',
|
||||
];
|
||||
const subresources =
|
||||
clientType === 'GCP' ? gcpSubresources : awsSubresources;
|
||||
|
||||
/*
|
||||
If the request includes parameters in the query string,
|
||||
@@ -95,5 +115,3 @@ function getCanonicalizedResource(request) {
|
||||
}
|
||||
return resourceString;
|
||||
}
|
||||
|
||||
module.exports = getCanonicalizedResource;
|
@@ -1,14 +1,20 @@
|
||||
'use strict'; // eslint-disable-line strict
|
||||
import { Logger } from 'werelogs';
|
||||
import errors from '../../errors';
|
||||
import * as constants from '../../constants';
|
||||
import constructStringToSign from './constructStringToSign';
|
||||
import checkRequestExpiry from './checkRequestExpiry';
|
||||
import algoCheck from './algoCheck';
|
||||
|
||||
const errors = require('../../errors');
|
||||
const constructStringToSign = require('./constructStringToSign');
|
||||
const checkRequestExpiry = require('./checkRequestExpiry');
|
||||
const algoCheck = require('./algoCheck');
|
||||
|
||||
function check(request, log, data) {
|
||||
export function check(request: any, log: Logger, data: { [key: string]: string }) {
|
||||
log.trace('running header auth check');
|
||||
const headers = request.headers;
|
||||
|
||||
const token = headers['x-amz-security-token'];
|
||||
if (token && !constants.iamSecurityToken.pattern.test(token)) {
|
||||
log.debug('invalid security token', { token });
|
||||
return { err: errors.InvalidToken };
|
||||
}
|
||||
|
||||
// Check to make sure timestamp is within 15 minutes of current time
|
||||
let timestamp = headers['x-amz-date'] ?
|
||||
headers['x-amz-date'] : headers.date;
|
||||
@@ -25,6 +31,7 @@ function check(request, log, data) {
|
||||
if (err) {
|
||||
return { err };
|
||||
}
|
||||
|
||||
// Authorization Header should be
|
||||
// in the format of 'AWS AccessKey:Signature'
|
||||
const authInfo = headers.authorization;
|
||||
@@ -44,6 +51,7 @@ function check(request, log, data) {
|
||||
log.trace('invalid authorization header', { authInfo });
|
||||
return { err: errors.MissingSecurityHeader };
|
||||
}
|
||||
// @ts-ignore
|
||||
log.addDefaultFields({ accessKey });
|
||||
|
||||
const signatureFromRequest = authInfo.substring(semicolonIndex + 1).trim();
|
||||
@@ -67,9 +75,8 @@ function check(request, log, data) {
|
||||
authType: 'REST-HEADER',
|
||||
signatureVersion: 'AWS',
|
||||
signatureAge: Date.now() - timestamp,
|
||||
securityToken: token,
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = { check };
|
@@ -1,16 +1,22 @@
|
||||
'use strict'; // eslint-disable-line strict
|
||||
import { Logger } from 'werelogs';
|
||||
import errors from '../../errors';
|
||||
import * as constants from '../../constants';
|
||||
import algoCheck from './algoCheck';
|
||||
import constructStringToSign from './constructStringToSign';
|
||||
|
||||
const errors = require('../../errors');
|
||||
|
||||
const algoCheck = require('./algoCheck');
|
||||
const constructStringToSign = require('./constructStringToSign');
|
||||
|
||||
function check(request, log, data) {
|
||||
export function check(request: any, log: Logger, data: { [key: string]: string }) {
|
||||
log.trace('running query auth check');
|
||||
if (request.method === 'POST') {
|
||||
log.debug('query string auth not supported for post requests');
|
||||
return { err: errors.NotImplemented };
|
||||
}
|
||||
|
||||
const token = data.SecurityToken;
|
||||
if (token && !constants.iamSecurityToken.pattern.test(token)) {
|
||||
log.debug('invalid security token', { token });
|
||||
return { err: errors.InvalidToken };
|
||||
}
|
||||
|
||||
/*
|
||||
Check whether request has expired or if
|
||||
expires parameter is more than 604800000 milliseconds
|
||||
@@ -20,15 +26,20 @@ function check(request, log, data) {
|
||||
milliseconds to compare to Date.now()
|
||||
*/
|
||||
const expirationTime = parseInt(data.Expires, 10) * 1000;
|
||||
if (isNaN(expirationTime)) {
|
||||
if (Number.isNaN(expirationTime)) {
|
||||
log.debug('invalid expires parameter',
|
||||
{ expires: data.Expires });
|
||||
return { err: errors.MissingSecurityHeader };
|
||||
}
|
||||
|
||||
const currentTime = Date.now();
|
||||
// 604800000 ms (seven days).
|
||||
if (expirationTime > currentTime + 604800000) {
|
||||
|
||||
const preSignedURLExpiry = process.env.PRE_SIGN_URL_EXPIRY
|
||||
&& !Number.isNaN(process.env.PRE_SIGN_URL_EXPIRY)
|
||||
? Number.parseInt(process.env.PRE_SIGN_URL_EXPIRY, 10)
|
||||
: constants.defaultPreSignedURLExpiry * 1000;
|
||||
|
||||
if (expirationTime > currentTime + preSignedURLExpiry) {
|
||||
log.debug('expires parameter too far in future',
|
||||
{ expires: request.query.Expires });
|
||||
return { err: errors.AccessDenied };
|
||||
@@ -39,6 +50,7 @@ function check(request, log, data) {
|
||||
return { err: errors.RequestTimeTooSkewed };
|
||||
}
|
||||
const accessKey = data.AWSAccessKeyId;
|
||||
// @ts-ignore
|
||||
log.addDefaultFields({ accessKey });
|
||||
|
||||
const signatureFromRequest = decodeURIComponent(data.Signature);
|
||||
@@ -65,9 +77,8 @@ function check(request, log, data) {
|
||||
algo,
|
||||
authType: 'REST-QUERY-STRING',
|
||||
signatureVersion: 'AWS',
|
||||
securityToken: token,
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = { check };
|
@@ -1,11 +0,0 @@
|
||||
'use strict'; // eslint-disable-line strict
|
||||
|
||||
const headerAuthCheck = require('./headerAuthCheck');
|
||||
const queryAuthCheck = require('./queryAuthCheck');
|
||||
|
||||
const authV4 = {
|
||||
header: headerAuthCheck,
|
||||
query: queryAuthCheck,
|
||||
};
|
||||
|
||||
module.exports = authV4;
|
2
lib/auth/v4/authV4.ts
Normal file
2
lib/auth/v4/authV4.ts
Normal file
@@ -0,0 +1,2 @@
|
||||
export * as header from './headerAuthCheck';
|
||||
export * as query from './queryAuthCheck';
|
@@ -1,59 +0,0 @@
|
||||
'use strict'; // eslint-disable-line strict
|
||||
|
||||
/*
|
||||
AWS's URI encoding rules:
|
||||
URI encode every byte. Uri-Encode() must enforce the following rules:
|
||||
|
||||
URI encode every byte except the unreserved characters:
|
||||
'A'-'Z', 'a'-'z', '0'-'9', '-', '.', '_', and '~'.
|
||||
The space character is a reserved character and must be
|
||||
encoded as "%20" (and not as "+").
|
||||
Each Uri-encoded byte is formed by a '%' and the two-digit
|
||||
hexadecimal value of the byte.
|
||||
Letters in the hexadecimal value must be uppercase, for example "%1A".
|
||||
Encode the forward slash character, '/',
|
||||
everywhere except in the object key name.
|
||||
For example, if the object key name is photos/Jan/sample.jpg,
|
||||
the forward slash in the key name is not encoded.
|
||||
See http://docs.aws.amazon.com/AmazonS3/latest/API/sig-v4-header-based-auth.html
|
||||
*/
|
||||
|
||||
// converts utf8 character to hex and pads "%" before every two hex digits
|
||||
function _toHexUTF8(char) {
|
||||
const hexRep = Buffer.from(char, 'utf8').toString('hex').toUpperCase();
|
||||
let res = '';
|
||||
hexRep.split('').forEach((v, n) => {
|
||||
// pad % before every 2 hex digits
|
||||
if (n % 2 === 0) {
|
||||
res += '%';
|
||||
}
|
||||
res += v;
|
||||
});
|
||||
return res;
|
||||
}
|
||||
|
||||
function awsURIencode(input, encodeSlash, noEncodeStar) {
|
||||
const encSlash = encodeSlash === undefined ? true : encodeSlash;
|
||||
let encoded = '';
|
||||
for (let i = 0; i < input.length; i++) {
|
||||
const ch = input.charAt(i);
|
||||
if ((ch >= 'A' && ch <= 'Z') ||
|
||||
(ch >= 'a' && ch <= 'z') ||
|
||||
(ch >= '0' && ch <= '9') ||
|
||||
ch === '_' || ch === '-' ||
|
||||
ch === '~' || ch === '.') {
|
||||
encoded = encoded.concat(ch);
|
||||
} else if (ch === ' ') {
|
||||
encoded = encoded.concat('%20');
|
||||
} else if (ch === '/') {
|
||||
encoded = encoded.concat(encSlash ? '%2F' : ch);
|
||||
} else if (ch === '*') {
|
||||
encoded = encoded.concat(noEncodeStar ? '*' : '%2A');
|
||||
} else {
|
||||
encoded = encoded.concat(_toHexUTF8(ch));
|
||||
}
|
||||
}
|
||||
return encoded;
|
||||
}
|
||||
|
||||
module.exports = awsURIencode;
|
78
lib/auth/v4/awsURIencode.ts
Normal file
78
lib/auth/v4/awsURIencode.ts
Normal file
@@ -0,0 +1,78 @@
|
||||
/*
|
||||
AWS's URI encoding rules:
|
||||
URI encode every byte. Uri-Encode() must enforce the following rules:
|
||||
|
||||
URI encode every byte except the unreserved characters:
|
||||
'A'-'Z', 'a'-'z', '0'-'9', '-', '.', '_', and '~'.
|
||||
The space character is a reserved character and must be
|
||||
encoded as "%20" (and not as "+").
|
||||
Each Uri-encoded byte is formed by a '%' and the two-digit
|
||||
hexadecimal value of the byte.
|
||||
Letters in the hexadecimal value must be uppercase, for example "%1A".
|
||||
Encode the forward slash character, '/',
|
||||
everywhere except in the object key name.
|
||||
For example, if the object key name is photos/Jan/sample.jpg,
|
||||
the forward slash in the key name is not encoded.
|
||||
See http://docs.aws.amazon.com/AmazonS3/latest/API/sig-v4-header-based-auth.html
|
||||
*/
|
||||
|
||||
// converts utf8 character to hex and pads "%" before every two hex digits
|
||||
function _toHexUTF8(char: string) {
|
||||
const hexRep = Buffer.from(char, 'utf8').toString('hex').toUpperCase();
|
||||
let res = '';
|
||||
hexRep.split('').forEach((v, n) => {
|
||||
// pad % before every 2 hex digits
|
||||
if (n % 2 === 0) {
|
||||
res += '%';
|
||||
}
|
||||
res += v;
|
||||
});
|
||||
return res;
|
||||
}
|
||||
|
||||
export default function awsURIencode(
|
||||
input: string,
|
||||
encodeSlash?: boolean,
|
||||
noEncodeStar?: boolean
|
||||
) {
|
||||
/**
|
||||
* Duplicate query params are not suppported by AWS S3 APIs. These params
|
||||
* are parsed as Arrays by Node.js HTTP parser which breaks this method
|
||||
*/
|
||||
if (typeof input !== 'string') {
|
||||
return '';
|
||||
}
|
||||
|
||||
// precalc slash and star based on configs
|
||||
const slash = encodeSlash === undefined || encodeSlash ? '%2F' : '/';
|
||||
const star = noEncodeStar !== undefined && noEncodeStar ? '*' : '%2A';
|
||||
const encoded: string[] = [];
|
||||
|
||||
const charArray = Array.from(input);
|
||||
for (const ch of charArray) {
|
||||
switch (true) {
|
||||
case ch >= 'A' && ch <= 'Z':
|
||||
case ch >= 'a' && ch <= 'z':
|
||||
case ch >= '0' && ch <= '9':
|
||||
case ch === '-':
|
||||
case ch === '_':
|
||||
case ch === '~':
|
||||
case ch === '.':
|
||||
encoded.push(ch);
|
||||
break;
|
||||
case ch === '/':
|
||||
encoded.push(slash);
|
||||
break;
|
||||
case ch === '*':
|
||||
encoded.push(star);
|
||||
break;
|
||||
case ch === ' ':
|
||||
encoded.push('%20');
|
||||
break;
|
||||
default:
|
||||
encoded.push(_toHexUTF8(ch));
|
||||
break;
|
||||
}
|
||||
}
|
||||
return encoded.join('');
|
||||
}
|
@@ -1,26 +1,38 @@
|
||||
'use strict'; // eslint-disable-line strict
|
||||
|
||||
const crypto = require('crypto');
|
||||
|
||||
const createCanonicalRequest = require('./createCanonicalRequest');
|
||||
import * as crypto from 'crypto';
|
||||
import { Logger } from 'werelogs';
|
||||
import createCanonicalRequest from './createCanonicalRequest';
|
||||
|
||||
/**
|
||||
* constructStringToSign - creates V4 stringToSign
|
||||
* @param {object} params - params object
|
||||
* @returns {string} - stringToSign
|
||||
*/
|
||||
function constructStringToSign(params) {
|
||||
const request = params.request;
|
||||
const signedHeaders = params.signedHeaders;
|
||||
const payloadChecksum = params.payloadChecksum;
|
||||
const credentialScope = params.credentialScope;
|
||||
const timestamp = params.timestamp;
|
||||
const query = params.query;
|
||||
const log = params.log;
|
||||
export default function constructStringToSign(params: {
|
||||
request: any;
|
||||
signedHeaders: any;
|
||||
payloadChecksum: any;
|
||||
credentialScope: string;
|
||||
timestamp: string;
|
||||
query: { [key: string]: string };
|
||||
log?: Logger;
|
||||
proxyPath?: string;
|
||||
awsService: string;
|
||||
}): string | Error {
|
||||
const {
|
||||
request,
|
||||
signedHeaders,
|
||||
payloadChecksum,
|
||||
credentialScope,
|
||||
timestamp,
|
||||
query,
|
||||
log,
|
||||
proxyPath,
|
||||
} = params;
|
||||
const path = proxyPath || request.path;
|
||||
|
||||
const canonicalReqResult = createCanonicalRequest({
|
||||
pHttpVerb: request.method,
|
||||
pResource: request.path,
|
||||
pResource: path,
|
||||
pQuery: query,
|
||||
pHeaders: request.headers,
|
||||
pSignedHeaders: signedHeaders,
|
||||
@@ -28,6 +40,8 @@ function constructStringToSign(params) {
|
||||
service: params.awsService,
|
||||
});
|
||||
|
||||
// TODO Why that line?
|
||||
// @ts-ignore
|
||||
if (canonicalReqResult instanceof Error) {
|
||||
if (log) {
|
||||
log.error('error creating canonicalRequest');
|
||||
@@ -44,5 +58,3 @@ function constructStringToSign(params) {
|
||||
`${credentialScope}\n${canonicalHex}`;
|
||||
return stringToSign;
|
||||
}
|
||||
|
||||
module.exports = constructStringToSign;
|
@@ -1,27 +1,33 @@
|
||||
'use strict'; // eslint-disable-line strict
|
||||
|
||||
const awsURIencode = require('./awsURIencode');
|
||||
const crypto = require('crypto');
|
||||
const queryString = require('querystring');
|
||||
import * as crypto from 'crypto';
|
||||
import * as queryString from 'querystring';
|
||||
import awsURIencode from './awsURIencode';
|
||||
|
||||
/**
|
||||
* createCanonicalRequest - creates V4 canonical request
|
||||
* @param {object} params - contains pHttpVerb (request type),
|
||||
* @param params - contains pHttpVerb (request type),
|
||||
* pResource (parsed from URL), pQuery (request query),
|
||||
* pHeaders (request headers), pSignedHeaders (signed headers from request),
|
||||
* payloadChecksum (from request)
|
||||
* @returns {string} - canonicalRequest
|
||||
* @returns - canonicalRequest
|
||||
*/
|
||||
function createCanonicalRequest(params) {
|
||||
export default function createCanonicalRequest(
|
||||
params: {
|
||||
pHttpVerb: string;
|
||||
pResource: string;
|
||||
pQuery: { [key: string]: string };
|
||||
pHeaders: any;
|
||||
pSignedHeaders: any;
|
||||
service: string;
|
||||
payloadChecksum: string;
|
||||
}
|
||||
) {
|
||||
const pHttpVerb = params.pHttpVerb;
|
||||
const pResource = params.pResource;
|
||||
const pQuery = params.pQuery;
|
||||
const pHeaders = params.pHeaders;
|
||||
const pSignedHeaders = params.pSignedHeaders;
|
||||
const service = params.service;
|
||||
|
||||
let payloadChecksum = params.payloadChecksum;
|
||||
|
||||
if (!payloadChecksum) {
|
||||
if (pHttpVerb === 'GET') {
|
||||
payloadChecksum = 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b' +
|
||||
@@ -34,7 +40,7 @@ function createCanonicalRequest(params) {
|
||||
if (/aws-sdk-java\/[0-9.]+/.test(pHeaders['user-agent'])) {
|
||||
notEncodeStar = true;
|
||||
}
|
||||
let payload = queryString.stringify(pQuery, null, null, {
|
||||
let payload = queryString.stringify(pQuery, undefined, undefined, {
|
||||
encodeURIComponent: input => awsURIencode(input, true,
|
||||
notEncodeStar),
|
||||
});
|
||||
@@ -48,7 +54,8 @@ function createCanonicalRequest(params) {
|
||||
|
||||
// canonical query string
|
||||
let canonicalQueryStr = '';
|
||||
if (pQuery && !((service === 'iam' || service === 'ring') &&
|
||||
if (pQuery && !((service === 'iam' || service === 'ring' ||
|
||||
service === 'sts') &&
|
||||
pHttpVerb === 'POST')) {
|
||||
const sortedQueryParams = Object.keys(pQuery).sort().map(key => {
|
||||
const encodedKey = awsURIencode(key);
|
||||
@@ -60,11 +67,11 @@ function createCanonicalRequest(params) {
|
||||
|
||||
// signed headers
|
||||
const signedHeadersList = pSignedHeaders.split(';');
|
||||
signedHeadersList.sort((a, b) => a.localeCompare(b));
|
||||
signedHeadersList.sort((a: any, b: any) => a.localeCompare(b));
|
||||
const signedHeaders = signedHeadersList.join(';');
|
||||
|
||||
// canonical headers
|
||||
const canonicalHeadersList = signedHeadersList.map(signedHeader => {
|
||||
const canonicalHeadersList = signedHeadersList.map((signedHeader: any) => {
|
||||
if (pHeaders[signedHeader] !== undefined) {
|
||||
const trimmedHeader = pHeaders[signedHeader]
|
||||
.trim().replace(/\s+/g, ' ');
|
||||
@@ -86,5 +93,3 @@ function createCanonicalRequest(params) {
|
||||
`${signedHeaders}\n${payloadChecksum}`;
|
||||
return canonicalRequest;
|
||||
}
|
||||
|
||||
module.exports = createCanonicalRequest;
|
@@ -1,27 +1,40 @@
|
||||
'use strict'; // eslint-disable-line strict
|
||||
|
||||
const errors = require('../../../lib/errors');
|
||||
|
||||
const constructStringToSign = require('./constructStringToSign');
|
||||
const checkTimeSkew = require('./timeUtils').checkTimeSkew;
|
||||
const convertUTCtoISO8601 = require('./timeUtils').convertUTCtoISO8601;
|
||||
const convertAmzTimeToMs = require('./timeUtils').convertAmzTimeToMs;
|
||||
const extractAuthItems = require('./validateInputs').extractAuthItems;
|
||||
const validateCredentials = require('./validateInputs').validateCredentials;
|
||||
const areSignedHeadersComplete =
|
||||
require('./validateInputs').areSignedHeadersComplete;
|
||||
import { Logger } from 'werelogs';
|
||||
import errors from '../../../lib/errors';
|
||||
import * as constants from '../../constants';
|
||||
import constructStringToSign from './constructStringToSign';
|
||||
import {
|
||||
checkTimeSkew,
|
||||
convertUTCtoISO8601,
|
||||
convertAmzTimeToMs,
|
||||
} from './timeUtils';
|
||||
import {
|
||||
extractAuthItems,
|
||||
validateCredentials,
|
||||
areSignedHeadersComplete,
|
||||
} from './validateInputs';
|
||||
|
||||
/**
|
||||
* V4 header auth check
|
||||
* @param {object} request - HTTP request object
|
||||
* @param {object} log - logging object
|
||||
* @param {object} data - Parameters from queryString parsing or body of
|
||||
* @param request - HTTP request object
|
||||
* @param log - logging object
|
||||
* @param data - Parameters from queryString parsing or body of
|
||||
* POST request
|
||||
* @param {string} awsService - Aws service ('iam' or 's3')
|
||||
* @return {callback} calls callback
|
||||
* @param awsService - Aws service ('iam' or 's3')
|
||||
*/
|
||||
function check(request, log, data, awsService) {
|
||||
export function check(
|
||||
request: any,
|
||||
log: Logger,
|
||||
data: { [key: string]: string },
|
||||
awsService: string
|
||||
) {
|
||||
log.trace('running header auth check');
|
||||
|
||||
const token = request.headers['x-amz-security-token'];
|
||||
if (token && !constants.iamSecurityToken.pattern.test(token)) {
|
||||
log.debug('invalid security token', { token });
|
||||
return { err: errors.InvalidToken };
|
||||
}
|
||||
|
||||
// authorization header
|
||||
const authHeader = request.headers.authorization;
|
||||
if (!authHeader) {
|
||||
@@ -54,16 +67,16 @@ function check(request, log, data, awsService) {
|
||||
|
||||
log.trace('authorization header from request', { authHeader });
|
||||
|
||||
const signatureFromRequest = authHeaderItems.signatureFromRequest;
|
||||
const credentialsArr = authHeaderItems.credentialsArr;
|
||||
const signedHeaders = authHeaderItems.signedHeaders;
|
||||
const signatureFromRequest = authHeaderItems.signatureFromRequest!;
|
||||
const credentialsArr = authHeaderItems.credentialsArr!;
|
||||
const signedHeaders = authHeaderItems.signedHeaders!;
|
||||
|
||||
if (!areSignedHeadersComplete(signedHeaders, request.headers)) {
|
||||
log.debug('signedHeaders are incomplete', { signedHeaders });
|
||||
return { err: errors.AccessDenied };
|
||||
}
|
||||
|
||||
let timestamp;
|
||||
let timestamp: string | undefined;
|
||||
// check request timestamp
|
||||
const xAmzDate = request.headers['x-amz-date'];
|
||||
if (xAmzDate) {
|
||||
@@ -90,7 +103,7 @@ function check(request, log, data, awsService) {
|
||||
log);
|
||||
if (validationResult instanceof Error) {
|
||||
log.debug('credentials in improper format', { credentialsArr,
|
||||
timestamp, validationResult });
|
||||
timestamp, validationResult });
|
||||
return { err: validationResult };
|
||||
}
|
||||
// credentialsArr is [accessKey, date, region, aws-service, aws4_request]
|
||||
@@ -153,9 +166,8 @@ function check(request, log, data, awsService) {
|
||||
// chunk evaluation
|
||||
credentialScope,
|
||||
timestamp,
|
||||
securityToken: token,
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = { check };
|
@@ -1,33 +1,37 @@
|
||||
'use strict'; // eslint-disable-line strict
|
||||
|
||||
const errors = require('../../errors');
|
||||
|
||||
const constructStringToSign = require('./constructStringToSign');
|
||||
const checkTimeSkew = require('./timeUtils').checkTimeSkew;
|
||||
const convertAmzTimeToMs = require('./timeUtils').convertAmzTimeToMs;
|
||||
const validateCredentials = require('./validateInputs').validateCredentials;
|
||||
const extractQueryParams = require('./validateInputs').extractQueryParams;
|
||||
const areSignedHeadersComplete =
|
||||
require('./validateInputs').areSignedHeadersComplete;
|
||||
import { Logger } from 'werelogs';
|
||||
import * as constants from '../../constants';
|
||||
import errors from '../../errors';
|
||||
import constructStringToSign from './constructStringToSign';
|
||||
import { checkTimeSkew, convertAmzTimeToMs } from './timeUtils';
|
||||
import { validateCredentials, extractQueryParams } from './validateInputs';
|
||||
import { areSignedHeadersComplete } from './validateInputs';
|
||||
|
||||
/**
|
||||
* V4 query auth check
|
||||
* @param {object} request - HTTP request object
|
||||
* @param {object} log - logging object
|
||||
* @param {object} data - Contain authentification params (GET or POST data)
|
||||
* @return {callback} calls callback
|
||||
* @param request - HTTP request object
|
||||
* @param log - logging object
|
||||
* @param data - Contain authentification params (GET or POST data)
|
||||
*/
|
||||
function check(request, log, data) {
|
||||
export function check(request: any, log: Logger, data: { [key: string]: string }) {
|
||||
const authParams = extractQueryParams(data, log);
|
||||
|
||||
if (Object.keys(authParams).length !== 5) {
|
||||
return { err: errors.InvalidArgument };
|
||||
}
|
||||
const signedHeaders = authParams.signedHeaders;
|
||||
const signatureFromRequest = authParams.signatureFromRequest;
|
||||
const timestamp = authParams.timestamp;
|
||||
const expiry = authParams.expiry;
|
||||
const credential = authParams.credential;
|
||||
|
||||
// Query params are not specified in AWS documentation as case-insensitive,
|
||||
// so we use case-sensitive
|
||||
const token = data['X-Amz-Security-Token'];
|
||||
if (token && !constants.iamSecurityToken.pattern.test(token)) {
|
||||
log.debug('invalid security token', { token });
|
||||
return { err: errors.InvalidToken };
|
||||
}
|
||||
|
||||
const signedHeaders = authParams.signedHeaders!;
|
||||
const signatureFromRequest = authParams.signatureFromRequest!;
|
||||
const timestamp = authParams.timestamp!;
|
||||
const expiry = authParams.expiry!;
|
||||
const credential = authParams.credential!;
|
||||
|
||||
if (!areSignedHeadersComplete(signedHeaders, request.headers)) {
|
||||
log.debug('signedHeaders are incomplete', { signedHeaders });
|
||||
@@ -38,7 +42,7 @@ function check(request, log, data) {
|
||||
log);
|
||||
if (validationResult instanceof Error) {
|
||||
log.debug('credentials in improper format', { credential,
|
||||
timestamp, validationResult });
|
||||
timestamp, validationResult });
|
||||
return { err: validationResult };
|
||||
}
|
||||
const accessKey = credential[0];
|
||||
@@ -95,9 +99,8 @@ function check(request, log, data) {
|
||||
authType: 'REST-QUERY-STRING',
|
||||
signatureVersion: 'AWS4-HMAC-SHA256',
|
||||
signatureAge: Date.now() - convertAmzTimeToMs(timestamp),
|
||||
securityToken: token,
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = { check };
|
316
lib/auth/v4/streamingV4/V4Transform.ts
Normal file
316
lib/auth/v4/streamingV4/V4Transform.ts
Normal file
@@ -0,0 +1,316 @@
|
||||
import { Transform } from 'stream';
|
||||
import async from 'async';
|
||||
import errors from '../../../errors';
|
||||
import { Logger } from 'werelogs';
|
||||
import Vault, { AuthV4RequestParams } from '../../Vault';
|
||||
import { Callback } from '../../in_memory/types';
|
||||
|
||||
import constructChunkStringToSign from './constructChunkStringToSign';
|
||||
|
||||
export type TransformParams = {
|
||||
accessKey: string;
|
||||
signatureFromRequest: string;
|
||||
region: string;
|
||||
scopeDate: string;
|
||||
timestamp: string;
|
||||
credentialScope: string;
|
||||
};
|
||||
|
||||
/**
|
||||
* This class is designed to handle the chunks sent in a streaming
|
||||
* v4 Auth request
|
||||
*/
|
||||
export default class V4Transform extends Transform {
|
||||
log: Logger;
|
||||
cb: Callback;
|
||||
accessKey: string;
|
||||
region: string;
|
||||
scopeDate: string;
|
||||
timestamp: string;
|
||||
credentialScope: string;
|
||||
lastSignature: string;
|
||||
currentSignature?: string;
|
||||
haveMetadata: boolean;
|
||||
seekingDataSize: number;
|
||||
currentData?: any;
|
||||
dataCursor: number;
|
||||
currentMetadata: any[];
|
||||
lastPieceDone: boolean;
|
||||
lastChunk: boolean;
|
||||
vault: Vault;
|
||||
|
||||
/**
|
||||
* @constructor
|
||||
* @param streamingV4Params - info for chunk authentication
|
||||
* @param streamingV4Params.accessKey - requester's accessKey
|
||||
* @param streamingV4Params.signatureFromRequest - signature
|
||||
* sent with headers
|
||||
* @param streamingV4Params.region - region sent with auth header
|
||||
* @param streamingV4Params.scopeDate - date sent with auth header
|
||||
* @param streamingV4Params.timestamp - date parsed from headers
|
||||
* in ISO 8601 format: YYYYMMDDTHHMMSSZ
|
||||
* @param streamingV4Params.credentialScope - items from auth
|
||||
* header plus the string 'aws4_request' joined with '/':
|
||||
* timestamp/region/aws-service/aws4_request
|
||||
* @param vault - Vault instance passed from CloudServer
|
||||
* @param log - logger object
|
||||
* @param cb - callback to api
|
||||
*/
|
||||
constructor(
|
||||
streamingV4Params: TransformParams,
|
||||
vault: Vault,
|
||||
log: Logger,
|
||||
cb: Callback,
|
||||
) {
|
||||
const { accessKey, signatureFromRequest, region, scopeDate, timestamp,
|
||||
credentialScope } = streamingV4Params;
|
||||
super({});
|
||||
this.log = log;
|
||||
this.cb = cb;
|
||||
this.accessKey = accessKey;
|
||||
this.region = region;
|
||||
this.scopeDate = scopeDate;
|
||||
this.timestamp = timestamp;
|
||||
this.credentialScope = credentialScope;
|
||||
this.lastSignature = signatureFromRequest;
|
||||
this.currentSignature = undefined;
|
||||
this.haveMetadata = false;
|
||||
// keep this as -1 to start since a seekingDataSize of 0
|
||||
// means that chunk is just metadata (as is the case with the
|
||||
// last chunk)
|
||||
this.seekingDataSize = -1;
|
||||
this.currentData = undefined;
|
||||
this.dataCursor = 0;
|
||||
this.currentMetadata = [];
|
||||
this.lastPieceDone = false;
|
||||
this.lastChunk = false;
|
||||
this.vault = vault;
|
||||
}
|
||||
|
||||
/**
|
||||
* This function will parse the metadata portion of the chunk
|
||||
* @param remainingChunk - chunk sent from _transform
|
||||
* @return response - if error, will return 'err' key with
|
||||
* arsenal error value.
|
||||
* if incomplete metadata, will return 'completeMetadata' key with
|
||||
* value false
|
||||
* if complete metadata received, will return 'completeMetadata' key with
|
||||
* value true and the key 'unparsedChunk' with the remaining chunk without
|
||||
* the parsed metadata piece
|
||||
*/
|
||||
_parseMetadata(remainingChunk: Buffer) {
|
||||
let remainingPlusStoredMetadata = remainingChunk;
|
||||
// have metadata pieces so need to add to the front of
|
||||
// remainingChunk
|
||||
if (this.currentMetadata.length > 0) {
|
||||
this.currentMetadata.push(remainingChunk);
|
||||
remainingPlusStoredMetadata = Buffer.concat(this.currentMetadata);
|
||||
// zero out stored metadata
|
||||
this.currentMetadata.length = 0;
|
||||
}
|
||||
let lineBreakIndex = remainingPlusStoredMetadata.indexOf('\r\n');
|
||||
if (lineBreakIndex < 0) {
|
||||
this.currentMetadata.push(remainingPlusStoredMetadata);
|
||||
return { completeMetadata: false };
|
||||
}
|
||||
let fullMetadata = remainingPlusStoredMetadata.slice(0,
|
||||
lineBreakIndex);
|
||||
|
||||
// handle extra line break on end of data chunk
|
||||
if (fullMetadata.length === 0) {
|
||||
const chunkWithoutLeadingLineBreak = remainingPlusStoredMetadata
|
||||
.slice(2);
|
||||
// find second line break
|
||||
lineBreakIndex = chunkWithoutLeadingLineBreak.indexOf('\r\n');
|
||||
if (lineBreakIndex < 0) {
|
||||
this.currentMetadata.push(chunkWithoutLeadingLineBreak);
|
||||
return { completeMetadata: false };
|
||||
}
|
||||
fullMetadata = chunkWithoutLeadingLineBreak.slice(0,
|
||||
lineBreakIndex);
|
||||
}
|
||||
|
||||
const splitMeta = fullMetadata.toString().split(';');
|
||||
this.log.trace('parsed full metadata for chunk', { splitMeta });
|
||||
if (splitMeta.length !== 2) {
|
||||
this.log.trace('chunk body did not contain correct ' +
|
||||
'metadata format');
|
||||
return { err: errors.InvalidArgument };
|
||||
}
|
||||
// chunk-size is sent in hex
|
||||
const dataSize = Number.parseInt(splitMeta[0], 16);
|
||||
if (Number.isNaN(dataSize)) {
|
||||
this.log.trace('chunk body did not contain valid size');
|
||||
return { err: errors.InvalidArgument };
|
||||
}
|
||||
let chunkSig = splitMeta[1];
|
||||
if (!chunkSig || chunkSig.indexOf('chunk-signature=') < 0) {
|
||||
this.log.trace('chunk body did not contain correct sig format');
|
||||
return { err: errors.InvalidArgument };
|
||||
}
|
||||
chunkSig = chunkSig.replace('chunk-signature=', '');
|
||||
this.currentSignature = chunkSig;
|
||||
this.haveMetadata = true;
|
||||
if (dataSize === 0) {
|
||||
this.lastChunk = true;
|
||||
return {
|
||||
completeMetadata: true,
|
||||
};
|
||||
}
|
||||
// + 2 to get \r\n at end
|
||||
this.seekingDataSize = dataSize + 2;
|
||||
this.currentData = Buffer.alloc(dataSize);
|
||||
|
||||
return {
|
||||
completeMetadata: true,
|
||||
// start slice at lineBreak plus 2 to remove line break at end of
|
||||
// metadata piece since length of '\r\n' is 2
|
||||
unparsedChunk: remainingPlusStoredMetadata
|
||||
.slice(lineBreakIndex + 2),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Build the stringToSign and authenticate the chunk
|
||||
* @param dataToSend - chunk sent from _transform or null
|
||||
* if last chunk without data
|
||||
* @param done - callback to _transform
|
||||
* @return executes callback with err if applicable
|
||||
*/
|
||||
_authenticate(dataToSend: Buffer | null, done: Callback) {
|
||||
// use prior sig to construct new string to sign
|
||||
const stringToSign = constructChunkStringToSign(this.timestamp,
|
||||
this.credentialScope, this.lastSignature, dataToSend ?? undefined);
|
||||
this.log.trace('constructed chunk string to sign',
|
||||
{ stringToSign });
|
||||
// once used prior sig to construct string to sign, reassign
|
||||
// lastSignature to current signature
|
||||
this.lastSignature = this.currentSignature!;
|
||||
const vaultParams: AuthV4RequestParams = {
|
||||
log: this.log,
|
||||
data: {
|
||||
accessKey: this.accessKey,
|
||||
signatureFromRequest: this.currentSignature!,
|
||||
region: this.region,
|
||||
scopeDate: this.scopeDate,
|
||||
stringToSign,
|
||||
// TODO FIXME This can not work
|
||||
// @ts-expect-errors
|
||||
timestamp: this.timestamp,
|
||||
credentialScope: this.credentialScope,
|
||||
},
|
||||
};
|
||||
return this.vault.authenticateV4Request(vaultParams, null, err => {
|
||||
if (err) {
|
||||
this.log.trace('err from vault on streaming v4 auth',
|
||||
{ error: err, paramsSentToVault: vaultParams.data });
|
||||
return done(err);
|
||||
}
|
||||
return done();
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* This function will parse the chunk into metadata and data,
|
||||
* use the metadata to authenticate with vault and send the
|
||||
* data on to be stored if authentication passes
|
||||
*
|
||||
* @param chunk - chunk from request body
|
||||
* @param _encoding - Data encoding unused
|
||||
* @param callback - Callback(err, justDataChunk, encoding)
|
||||
* @return executes callback with err if applicable
|
||||
*/
|
||||
_transform(chunk: Buffer, _encoding: string, callback: Callback) {
|
||||
// 'chunk' here is the node streaming chunk
|
||||
// transfer-encoding chunks should be of the format:
|
||||
// string(IntHexBase(chunk-size)) + ";chunk-signature=" +
|
||||
// signature + \r\n + chunk-data + \r\n
|
||||
// Last transfer-encoding chunk will have size 0 and no chunk-data.
|
||||
|
||||
if (this.lastPieceDone) {
|
||||
const slice = chunk.slice(0, 10);
|
||||
this.log.trace('received chunk after end.' +
|
||||
'See first 10 bytes of chunk',
|
||||
{ chunk: slice.toString() });
|
||||
return callback();
|
||||
}
|
||||
let unparsedChunk = chunk;
|
||||
let chunkLeftToEvaluate = true;
|
||||
return async.whilst(
|
||||
// test function
|
||||
() => chunkLeftToEvaluate,
|
||||
// async function
|
||||
done => {
|
||||
if (!this.haveMetadata) {
|
||||
this.log.trace('do not have metadata so calling ' +
|
||||
'_parseMetadata');
|
||||
// need to parse our metadata
|
||||
const parsedMetadataResults =
|
||||
this._parseMetadata(unparsedChunk);
|
||||
if (parsedMetadataResults.err) {
|
||||
return done(parsedMetadataResults.err);
|
||||
}
|
||||
// if do not have full metadata get next chunk
|
||||
if (!parsedMetadataResults.completeMetadata) {
|
||||
chunkLeftToEvaluate = false;
|
||||
return done();
|
||||
}
|
||||
// have metadata so reset unparsedChunk to remaining
|
||||
// without metadata piece
|
||||
// TODO Is that okay?
|
||||
// @ts-expect-errors
|
||||
unparsedChunk = parsedMetadataResults.unparsedChunk;
|
||||
}
|
||||
if (this.lastChunk) {
|
||||
this.log.trace('authenticating final chunk with no data');
|
||||
return this._authenticate(null, err => {
|
||||
if (err) {
|
||||
return done(err);
|
||||
}
|
||||
chunkLeftToEvaluate = false;
|
||||
this.lastPieceDone = true;
|
||||
return done();
|
||||
});
|
||||
}
|
||||
if (unparsedChunk.length < this.seekingDataSize) {
|
||||
// add chunk to currentData and get next chunk
|
||||
unparsedChunk.copy(this.currentData, this.dataCursor);
|
||||
this.dataCursor += unparsedChunk.length;
|
||||
this.seekingDataSize -= unparsedChunk.length;
|
||||
chunkLeftToEvaluate = false;
|
||||
return done();
|
||||
}
|
||||
// parse just the next data piece without \r\n at the end
|
||||
// (therefore, minus 2)
|
||||
const nextDataPiece =
|
||||
unparsedChunk.slice(0, this.seekingDataSize - 2);
|
||||
// add parsed data piece to other currentData pieces
|
||||
// so that this.currentData is the full data piece
|
||||
nextDataPiece.copy(this.currentData, this.dataCursor);
|
||||
return this._authenticate(this.currentData, err => {
|
||||
if (err) {
|
||||
return done(err);
|
||||
}
|
||||
unparsedChunk =
|
||||
unparsedChunk.slice(this.seekingDataSize);
|
||||
this.push(this.currentData);
|
||||
this.haveMetadata = false;
|
||||
this.seekingDataSize = -1;
|
||||
this.currentData = undefined;
|
||||
this.dataCursor = 0;
|
||||
chunkLeftToEvaluate = unparsedChunk.length > 0;
|
||||
return done();
|
||||
});
|
||||
},
|
||||
// final callback
|
||||
err => {
|
||||
if (err) {
|
||||
return this.cb(err as any);
|
||||
}
|
||||
// get next chunk
|
||||
return callback();
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
35
lib/auth/v4/streamingV4/constructChunkStringToSign.ts
Normal file
35
lib/auth/v4/streamingV4/constructChunkStringToSign.ts
Normal file
@@ -0,0 +1,35 @@
|
||||
import * as crypto from 'crypto';
|
||||
import * as constants from '../../../constants';
|
||||
|
||||
/**
|
||||
* Constructs stringToSign for chunk
|
||||
* @param timestamp - date parsed from headers
|
||||
* in ISO 8601 format: YYYYMMDDTHHMMSSZ
|
||||
* @param credentialScope - items from auth
|
||||
* header plus the string 'aws4_request' joined with '/':
|
||||
* timestamp/region/aws-service/aws4_request
|
||||
* @param lastSignature - signature from headers or prior chunk
|
||||
* @param justDataChunk - data portion of chunk
|
||||
* @returns stringToSign
|
||||
*/
|
||||
export default function constructChunkStringToSign(
|
||||
timestamp: string,
|
||||
credentialScope: string,
|
||||
lastSignature: string,
|
||||
justDataChunk?: Buffer | string,
|
||||
) {
|
||||
let currentChunkHash: string;
|
||||
// for last chunk, there will be no data, so use emptyStringHash
|
||||
if (!justDataChunk) {
|
||||
currentChunkHash = constants.emptyStringHash;
|
||||
} else {
|
||||
const hash = crypto.createHash('sha256');
|
||||
const temp = justDataChunk instanceof Buffer
|
||||
? hash.update(justDataChunk)
|
||||
: hash.update(justDataChunk, 'binary');
|
||||
currentChunkHash = temp.digest('hex');
|
||||
}
|
||||
return `AWS4-HMAC-SHA256-PAYLOAD\n${timestamp}\n` +
|
||||
`${credentialScope}\n${lastSignature}\n` +
|
||||
`${constants.emptyStringHash}\n${currentChunkHash}`;
|
||||
}
|
@@ -1,12 +1,11 @@
|
||||
'use strict'; // eslint-disable-line strict
|
||||
import { Logger } from 'werelogs';
|
||||
|
||||
/**
|
||||
* Convert timestamp to milliseconds since Unix Epoch
|
||||
* @param {string} timestamp of ISO8601Timestamp format without
|
||||
* @param timestamp of ISO8601Timestamp format without
|
||||
* dashes or colons, e.g. 20160202T220410Z
|
||||
* @return {number} number of milliseconds since Unix Epoch
|
||||
*/
|
||||
function convertAmzTimeToMs(timestamp) {
|
||||
export function convertAmzTimeToMs(timestamp: string) {
|
||||
const arr = timestamp.split('');
|
||||
// Convert to YYYY-MM-DDTHH:mm:ss.sssZ
|
||||
const ISO8601time = `${arr.slice(0, 4).join('')}-${arr[4]}${arr[5]}` +
|
||||
@@ -15,13 +14,12 @@ function convertAmzTimeToMs(timestamp) {
|
||||
return Date.parse(ISO8601time);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Convert UTC timestamp to ISO 8601 timestamp
|
||||
* @param {string} timestamp of UTC form: Fri, 10 Feb 2012 21:34:55 GMT
|
||||
* @return {string} ISO8601 timestamp of form: YYYYMMDDTHHMMSSZ
|
||||
* @param timestamp of UTC form: Fri, 10 Feb 2012 21:34:55 GMT
|
||||
* @return ISO8601 timestamp of form: YYYYMMDDTHHMMSSZ
|
||||
*/
|
||||
function convertUTCtoISO8601(timestamp) {
|
||||
export function convertUTCtoISO8601(timestamp: string | number) {
|
||||
// convert to ISO string: YYYY-MM-DDTHH:mm:ss.sssZ.
|
||||
const converted = new Date(timestamp).toISOString();
|
||||
// Remove "-"s and "."s and milliseconds
|
||||
@@ -30,13 +28,13 @@ function convertUTCtoISO8601(timestamp) {
|
||||
|
||||
/**
|
||||
* Check whether timestamp predates request or is too old
|
||||
* @param {string} timestamp of ISO8601Timestamp format without
|
||||
* @param timestamp of ISO8601Timestamp format without
|
||||
* dashes or colons, e.g. 20160202T220410Z
|
||||
* @param {number} expiry - number of seconds signature should be valid
|
||||
* @param {object} log - log for request
|
||||
* @return {boolean} true if there is a time problem
|
||||
* @param expiry - number of seconds signature should be valid
|
||||
* @param log - log for request
|
||||
* @return true if there is a time problem
|
||||
*/
|
||||
function checkTimeSkew(timestamp, expiry, log) {
|
||||
export function checkTimeSkew(timestamp: string, expiry: number, log: Logger) {
|
||||
const currentTime = Date.now();
|
||||
const fifteenMinutes = (15 * 60 * 1000);
|
||||
const parsedTimestamp = convertAmzTimeToMs(timestamp);
|
||||
@@ -56,5 +54,3 @@ function checkTimeSkew(timestamp, expiry, log) {
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
module.exports = { convertAmzTimeToMs, convertUTCtoISO8601, checkTimeSkew };
|
@@ -1,17 +1,19 @@
|
||||
'use strict'; // eslint-disable-line strict
|
||||
|
||||
const errors = require('../../../lib/errors');
|
||||
import { Logger } from 'werelogs';
|
||||
import errors from '../../../lib/errors';
|
||||
|
||||
/**
|
||||
* Validate Credentials
|
||||
* @param {array} credentials - contains accessKey, scopeDate,
|
||||
* @param credentials - contains accessKey, scopeDate,
|
||||
* region, service, requestType
|
||||
* @param {string} timestamp - timestamp from request in
|
||||
* @param timestamp - timestamp from request in
|
||||
* the format of ISO 8601: YYYYMMDDTHHMMSSZ
|
||||
* @param {object} log - logging object
|
||||
* @return {boolean} true if credentials are correct format, false if not
|
||||
* @param log - logging object
|
||||
*/
|
||||
function validateCredentials(credentials, timestamp, log) {
|
||||
export function validateCredentials(
|
||||
credentials: [string, string, string, string, string],
|
||||
timestamp: string,
|
||||
log: Logger
|
||||
): Error | {} {
|
||||
if (!Array.isArray(credentials) || credentials.length !== 5) {
|
||||
log.warn('credentials in improper format', { credentials });
|
||||
return errors.InvalidArgument;
|
||||
@@ -41,8 +43,9 @@ function validateCredentials(credentials, timestamp, log) {
|
||||
{ scopeDate, timestampDate });
|
||||
return errors.RequestTimeTooSkewed;
|
||||
}
|
||||
if (service !== 's3' && service !== 'iam' && service !== 'ring') {
|
||||
log.warn('service in credentials is not one of s3/iam/ring', {
|
||||
if (service !== 's3' && service !== 'iam' && service !== 'ring' &&
|
||||
service !== 'sts') {
|
||||
log.warn('service in credentials is not one of s3/iam/ring/sts', {
|
||||
service,
|
||||
});
|
||||
return errors.InvalidArgument;
|
||||
@@ -57,12 +60,21 @@ function validateCredentials(credentials, timestamp, log) {
|
||||
|
||||
/**
|
||||
* Extract and validate components from query object
|
||||
* @param {object} queryObj - query object from request
|
||||
* @param {object} log - logging object
|
||||
* @return {object} object containing extracted query params for authV4
|
||||
* @param queryObj - query object from request
|
||||
* @param log - logging object
|
||||
* @return object containing extracted query params for authV4
|
||||
*/
|
||||
function extractQueryParams(queryObj, log) {
|
||||
const authParams = {};
|
||||
export function extractQueryParams(
|
||||
queryObj: { [key: string]: string | undefined },
|
||||
log: Logger
|
||||
) {
|
||||
const authParams: {
|
||||
signedHeaders?: string;
|
||||
signatureFromRequest?: string;
|
||||
timestamp?: string;
|
||||
expiry?: number;
|
||||
credential?: [string, string, string, string, string];
|
||||
} = {};
|
||||
|
||||
// Do not need the algorithm sent back
|
||||
if (queryObj['X-Amz-Algorithm'] !== 'AWS4-HMAC-SHA256') {
|
||||
@@ -98,7 +110,7 @@ function extractQueryParams(queryObj, log) {
|
||||
return authParams;
|
||||
}
|
||||
|
||||
const expiry = Number.parseInt(queryObj['X-Amz-Expires'], 10);
|
||||
const expiry = Number.parseInt(queryObj['X-Amz-Expires'] ?? 'nope', 10);
|
||||
const sevenDays = 604800;
|
||||
if (expiry && (expiry > 0 && expiry <= sevenDays)) {
|
||||
authParams.expiry = expiry;
|
||||
@@ -109,6 +121,7 @@ function extractQueryParams(queryObj, log) {
|
||||
|
||||
const credential = queryObj['X-Amz-Credential'];
|
||||
if (credential && credential.length > 28 && credential.indexOf('/') > -1) {
|
||||
// @ts-ignore
|
||||
authParams.credential = credential.split('/');
|
||||
} else {
|
||||
log.warn('invalid credential param', { credential });
|
||||
@@ -120,14 +133,17 @@ function extractQueryParams(queryObj, log) {
|
||||
|
||||
/**
|
||||
* Extract and validate components from auth header
|
||||
* @param {string} authHeader - authorization header from request
|
||||
* @param {object} log - logging object
|
||||
* @return {object} object containing extracted auth header items for authV4
|
||||
* @param authHeader - authorization header from request
|
||||
* @param log - logging object
|
||||
* @return object containing extracted auth header items for authV4
|
||||
*/
|
||||
function extractAuthItems(authHeader, log) {
|
||||
const authItems = {};
|
||||
const authArray = authHeader
|
||||
.replace('AWS4-HMAC-SHA256 ', '').split(',');
|
||||
export function extractAuthItems(authHeader: string, log: Logger) {
|
||||
const authItems: {
|
||||
credentialsArr?: [string, string, string, string, string];
|
||||
signedHeaders?: string;
|
||||
signatureFromRequest?: string;
|
||||
} = {};
|
||||
const authArray = authHeader.replace('AWS4-HMAC-SHA256 ', '').split(',');
|
||||
|
||||
if (authArray.length < 3) {
|
||||
return authItems;
|
||||
@@ -137,8 +153,12 @@ function extractAuthItems(authHeader, log) {
|
||||
const signedHeadersStr = authArray[1];
|
||||
const signatureStr = authArray[2];
|
||||
log.trace('credentials from request', { credentialStr });
|
||||
if (credentialStr && credentialStr.trim().startsWith('Credential=')
|
||||
&& credentialStr.indexOf('/') > -1) {
|
||||
if (
|
||||
credentialStr &&
|
||||
credentialStr.trim().startsWith('Credential=') &&
|
||||
credentialStr.indexOf('/') > -1
|
||||
) {
|
||||
// @ts-ignore
|
||||
authItems.credentialsArr = credentialStr
|
||||
.trim().replace('Credential=', '').split('/');
|
||||
} else {
|
||||
@@ -165,11 +185,11 @@ function extractAuthItems(authHeader, log) {
|
||||
/**
|
||||
* Checks whether the signed headers include the host header
|
||||
* and all x-amz- and x-scal- headers in request
|
||||
* @param {string} signedHeaders - signed headers sent with request
|
||||
* @param {object} allHeaders - request.headers
|
||||
* @return {boolean} true if all x-amz-headers included and false if not
|
||||
* @param signedHeaders - signed headers sent with request
|
||||
* @param allHeaders - request.headers
|
||||
* @return true if all x-amz-headers included and false if not
|
||||
*/
|
||||
function areSignedHeadersComplete(signedHeaders, allHeaders) {
|
||||
export function areSignedHeadersComplete(signedHeaders: string, allHeaders: Headers) {
|
||||
const signedHeadersList = signedHeaders.split(';');
|
||||
if (signedHeadersList.indexOf('host') === -1) {
|
||||
return false;
|
||||
@@ -184,6 +204,3 @@ function areSignedHeadersComplete(signedHeaders, allHeaders) {
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
module.exports = { validateCredentials, extractQueryParams,
|
||||
areSignedHeadersComplete, extractAuthItems };
|
@@ -1,8 +0,0 @@
|
||||
'use strict'; // eslint-disable-line strict
|
||||
|
||||
module.exports = {
|
||||
// PublicId is used as the canonicalID for a request that contains
|
||||
// no authentication information. Requestor can access
|
||||
// only public resources
|
||||
publicId: 'http://acs.amazonaws.com/groups/global/AllUsers',
|
||||
};
|
131
lib/constants.ts
Normal file
131
lib/constants.ts
Normal file
@@ -0,0 +1,131 @@
|
||||
import * as crypto from 'crypto';
|
||||
|
||||
// The min value here is to manage further backward compat if we
|
||||
// need it
|
||||
const iamSecurityTokenSizeMin = 128;
|
||||
const iamSecurityTokenSizeMax = 128;
|
||||
// Security token is an hex string (no real format from amazon)
|
||||
const iamSecurityTokenPattern = new RegExp(
|
||||
`^[a-f0-9]{${iamSecurityTokenSizeMin},${iamSecurityTokenSizeMax}}$`,
|
||||
);
|
||||
|
||||
// info about the iam security token
|
||||
export const iamSecurityToken = {
|
||||
min: iamSecurityTokenSizeMin,
|
||||
max: iamSecurityTokenSizeMax,
|
||||
pattern: iamSecurityTokenPattern,
|
||||
};
|
||||
// PublicId is used as the canonicalID for a request that contains
|
||||
// no authentication information. Requestor can access
|
||||
// only public resources
|
||||
export const publicId = 'http://acs.amazonaws.com/groups/global/AllUsers';
|
||||
export const zenkoServiceAccount = 'http://acs.zenko.io/accounts/service';
|
||||
export const metadataFileNamespace = '/MDFile';
|
||||
export const dataFileURL = '/DataFile';
|
||||
// AWS states max size for user-defined metadata
|
||||
// (x-amz-meta- headers) is 2 KB:
|
||||
// http://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectPUT.html
|
||||
// In testing, AWS seems to allow up to 88 more bytes,
|
||||
// so we do the same.
|
||||
export const maximumMetaHeadersSize = 2136;
|
||||
export const emptyFileMd5 = 'd41d8cd98f00b204e9800998ecf8427e';
|
||||
// Version 2 changes the format of the data location property
|
||||
// Version 3 adds the dataStoreName attribute
|
||||
export const mdModelVersion = 3;
|
||||
/*
|
||||
* Splitter is used to build the object name for the overview of a
|
||||
* multipart upload and to build the object names for each part of a
|
||||
* multipart upload. These objects with large names are then stored in
|
||||
* metadata in a "shadow bucket" to a real bucket. The shadow bucket
|
||||
* contains all ongoing multipart uploads. We include in the object
|
||||
* name some of the info we might need to pull about an open multipart
|
||||
* upload or about an individual part with each piece of info separated
|
||||
* by the splitter. We can then extract each piece of info by splitting
|
||||
* the object name string with this splitter.
|
||||
* For instance, assuming a splitter of '...!*!',
|
||||
* the name of the upload overview would be:
|
||||
* overview...!*!objectKey...!*!uploadId
|
||||
* For instance, the name of a part would be:
|
||||
* uploadId...!*!partNumber
|
||||
*
|
||||
* The sequence of characters used in the splitter should not occur
|
||||
* elsewhere in the pieces of info to avoid splitting where not
|
||||
* intended.
|
||||
*
|
||||
* Splitter is also used in adding bucketnames to the
|
||||
* namespacerusersbucket. The object names added to the
|
||||
* namespaceusersbucket are of the form:
|
||||
* canonicalID...!*!bucketname
|
||||
*/
|
||||
|
||||
export const splitter = '..|..';
|
||||
export const usersBucket = 'users..bucket';
|
||||
// MPU Bucket Prefix is used to create the name of the shadow
|
||||
// bucket used for multipart uploads. There is one shadow mpu
|
||||
// bucket per bucket and its name is the mpuBucketPrefix followed
|
||||
// by the name of the final destination bucket for the object
|
||||
// once the multipart upload is complete.
|
||||
export const mpuBucketPrefix = 'mpuShadowBucket';
|
||||
// since aws s3 does not allow capitalized buckets, these may be
|
||||
// used for special internal purposes
|
||||
export const permittedCapitalizedBuckets = {
|
||||
METADATA: true,
|
||||
};
|
||||
/* eslint-disable camelcase */
|
||||
export const externalBackends = { aws_s3: true, azure: true, gcp: true, pfs: true }
|
||||
export const hasCopyPartBackends = { aws_s3: true, gcp: true }
|
||||
export const versioningNotImplBackends = { azure: true, gcp: true }
|
||||
export const mpuMDStoredExternallyBackend = { aws_s3: true, gcp: true }
|
||||
// AWS sets a minimum size limit for parts except for the last part.
|
||||
// http://docs.aws.amazon.com/AmazonS3/latest/API/mpUploadComplete.html
|
||||
export const minimumAllowedPartSize = 5242880;
|
||||
// hex digest of sha256 hash of empty string:
|
||||
export const emptyStringHash = crypto.createHash('sha256').update('', 'binary').digest('hex');
|
||||
// Default expiration value of the S3 pre-signed URL duration
|
||||
// 604800 seconds (seven days).
|
||||
export const legacyLocations = ['sproxyd', 'legacy'];
|
||||
export const defaultPreSignedURLExpiry = 7 * 24 * 60 * 60;
|
||||
// Regex for ISO-8601 formatted date
|
||||
export const shortIso8601Regex = /\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}Z/;
|
||||
export const longIso8601Regex = /\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}.\d{3}Z/;
|
||||
export const supportedNotificationEvents = new Set([
|
||||
's3:ObjectCreated:*',
|
||||
's3:ObjectCreated:Put',
|
||||
's3:ObjectCreated:Copy',
|
||||
's3:ObjectCreated:CompleteMultipartUpload',
|
||||
's3:ObjectRemoved:*',
|
||||
's3:ObjectRemoved:Delete',
|
||||
's3:ObjectRemoved:DeleteMarkerCreated',
|
||||
's3:ObjectTagging:*',
|
||||
's3:ObjectTagging:Put',
|
||||
's3:ObjectTagging:Delete',
|
||||
's3:ObjectAcl:Put',
|
||||
]);
|
||||
export const notificationArnPrefix = 'arn:scality:bucketnotif';
|
||||
// some of the available data backends (if called directly rather
|
||||
// than through the multiple backend gateway) need a key provided
|
||||
// as a string as first parameter of the get/delete methods.
|
||||
export const clientsRequireStringKey = { sproxyd: true, cdmi: true };
|
||||
// HTTP server keep-alive timeout is set to a higher value than
|
||||
// client's free sockets timeout to avoid the risk of triggering
|
||||
// ECONNRESET errors if the server closes the connection at the
|
||||
// exact moment clients attempt to reuse an established connection
|
||||
// for a new request.
|
||||
//
|
||||
// Note: the ability to close inactive connections on the client
|
||||
// after httpClientFreeSocketsTimeout milliseconds requires the
|
||||
// use of "agentkeepalive" module instead of the regular node.js
|
||||
// http.Agent.
|
||||
export const httpServerKeepAliveTimeout = 60000;
|
||||
export const httpClientFreeSocketTimeout = 55000;
|
||||
export const supportedLifecycleRules = [
|
||||
'expiration',
|
||||
'noncurrentVersionExpiration',
|
||||
'abortIncompleteMultipartUpload',
|
||||
];
|
||||
// Maximum number of buckets to cache (bucket metadata)
|
||||
export const maxCachedBuckets = process.env.METADATA_MAX_CACHED_BUCKETS ?
|
||||
Number(process.env.METADATA_MAX_CACHED_BUCKETS) : 1000;
|
||||
|
||||
/** For policy resource arn check we allow empty account ID to not break compatibility */
|
||||
export const policyArnAllowedEmptyAccountId = ['utapi', 'scuba'];
|
151
lib/db.js
151
lib/db.js
@@ -1,151 +0,0 @@
|
||||
'use strict'; // eslint-disable-line strict
|
||||
|
||||
const writeOptions = { sync: true };
|
||||
|
||||
/**
|
||||
* Like Error, but with a property set to true.
|
||||
* TODO: this is copied from kineticlib, should consolidate with the
|
||||
* future errors module
|
||||
*
|
||||
* Example: instead of:
|
||||
* const err = new Error("input is not a buffer");
|
||||
* err.badTypeInput = true;
|
||||
* throw err;
|
||||
* use:
|
||||
* throw propError("badTypeInput", "input is not a buffer");
|
||||
*
|
||||
* @param {String} propName - the property name.
|
||||
* @param {String} message - the Error message.
|
||||
* @returns {Error} the Error object.
|
||||
*/
|
||||
function propError(propName, message) {
|
||||
const err = new Error(message);
|
||||
err[propName] = true;
|
||||
return err;
|
||||
}
|
||||
|
||||
/**
|
||||
* Running transaction with multiple updates to be committed atomically
|
||||
*/
|
||||
class IndexTransaction {
|
||||
/**
|
||||
* Builds a new transaction
|
||||
*
|
||||
* @argument {Leveldb} db an open database to which the updates
|
||||
* will be applied
|
||||
*
|
||||
* @returns {IndexTransaction} a new empty transaction
|
||||
*/
|
||||
constructor(db) {
|
||||
this.operations = [];
|
||||
this.db = db;
|
||||
this.closed = false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a new operation to participate in this running transaction
|
||||
*
|
||||
* @argument {object} op an object with the following attributes:
|
||||
* {
|
||||
* type: 'put' or 'del',
|
||||
* key: the object key,
|
||||
* value: (optional for del) the value to store,
|
||||
* }
|
||||
*
|
||||
* @throws {Error} an error described by the following properties
|
||||
* - invalidTransactionVerb if op is not put or del
|
||||
* - pushOnCommittedTransaction if already committed
|
||||
* - missingKey if the key is missing from the op
|
||||
* - missingValue if putting without a value
|
||||
*
|
||||
* @returns {undefined}
|
||||
*/
|
||||
push(op) {
|
||||
if (this.closed) {
|
||||
throw propError('pushOnCommittedTransaction',
|
||||
'can not add ops to already committed transaction');
|
||||
}
|
||||
|
||||
if (op.type !== 'put' && op.type !== 'del') {
|
||||
throw propError('invalidTransactionVerb',
|
||||
`unknown action type: ${op.type}`);
|
||||
}
|
||||
|
||||
if (op.key === undefined) {
|
||||
throw propError('missingKey', 'missing key');
|
||||
}
|
||||
|
||||
if (op.type === 'put' && op.value === undefined) {
|
||||
throw propError('missingValue', 'missing value');
|
||||
}
|
||||
|
||||
this.operations.push(op);
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a new put operation to this running transaction
|
||||
*
|
||||
* @argument {string} key - the key of the object to put
|
||||
* @argument {string} value - the value to put
|
||||
*
|
||||
* @throws {Error} an error described by the following properties
|
||||
* - pushOnCommittedTransaction if already committed
|
||||
* - missingKey if the key is missing from the op
|
||||
* - missingValue if putting without a value
|
||||
*
|
||||
* @returns {undefined}
|
||||
*
|
||||
* @see push
|
||||
*/
|
||||
put(key, value) {
|
||||
this.push({ type: 'put', key, value });
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a new del operation to this running transaction
|
||||
*
|
||||
* @argument {string} key - the key of the object to delete
|
||||
*
|
||||
* @throws {Error} an error described by the following properties
|
||||
* - pushOnCommittedTransaction if already committed
|
||||
* - missingKey if the key is missing from the op
|
||||
*
|
||||
* @returns {undefined}
|
||||
*
|
||||
* @see push
|
||||
*/
|
||||
del(key) {
|
||||
this.push({ type: 'del', key });
|
||||
}
|
||||
|
||||
/**
|
||||
* Applies the queued updates in this transaction atomically.
|
||||
*
|
||||
* @argument {function} cb function to be called when the commit
|
||||
* finishes, taking an optional error argument
|
||||
*
|
||||
* @returns {undefined}
|
||||
*/
|
||||
commit(cb) {
|
||||
if (this.closed) {
|
||||
return cb(propError('alreadyCommitted',
|
||||
'transaction was already committed'));
|
||||
}
|
||||
|
||||
if (this.operations.length === 0) {
|
||||
return cb(propError('emptyTransaction',
|
||||
'tried to commit an empty transaction'));
|
||||
}
|
||||
|
||||
this.closed = true;
|
||||
|
||||
// The array-of-operations variant of the `batch` method
|
||||
// allows passing options such has `sync: true` whereas the
|
||||
// chained form does not.
|
||||
return this.db.batch(this.operations, writeOptions, cb);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
IndexTransaction,
|
||||
};
|
194
lib/db.ts
Normal file
194
lib/db.ts
Normal file
@@ -0,0 +1,194 @@
|
||||
/**
|
||||
* Like Error, but with a property set to true.
|
||||
* TODO: this is copied from kineticlib, should consolidate with the
|
||||
* future errors module
|
||||
*
|
||||
* Example: instead of:
|
||||
* const err = new Error("input is not a buffer");
|
||||
* err.badTypeInput = true;
|
||||
* throw err;
|
||||
* use:
|
||||
* throw propError("badTypeInput", "input is not a buffer");
|
||||
*
|
||||
* @param propName - the property name.
|
||||
* @param message - the Error message.
|
||||
* @returns the Error object.
|
||||
*/
|
||||
function propError(propName: string, message: string): Error {
|
||||
const err = new Error(message);
|
||||
err[propName] = true;
|
||||
// @ts-ignore
|
||||
err.is = { [propName]: true };
|
||||
return err;
|
||||
}
|
||||
|
||||
/**
|
||||
* Running transaction with multiple updates to be committed atomically
|
||||
*/
|
||||
export class IndexTransaction {
|
||||
operations: { type: 'put' | 'del'; key: string; value?: any }[];
|
||||
db: any;
|
||||
closed: boolean;
|
||||
conditions: { [key: string]: string }[];
|
||||
|
||||
/**
|
||||
* Builds a new transaction
|
||||
*
|
||||
* @argument {Leveldb} db an open database to which the updates
|
||||
* will be applied
|
||||
*
|
||||
* @returns a new empty transaction
|
||||
*/
|
||||
constructor(db: any) {
|
||||
this.operations = [];
|
||||
this.db = db;
|
||||
this.closed = false;
|
||||
this.conditions = [];
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a new operation to participate in this running transaction
|
||||
*
|
||||
* @argument op an object with the following attributes:
|
||||
* {
|
||||
* type: 'put' or 'del',
|
||||
* key: the object key,
|
||||
* value: (optional for del) the value to store,
|
||||
* }
|
||||
*
|
||||
* @throws an error described by the following properties
|
||||
* - invalidTransactionVerb if op is not put or del
|
||||
* - pushOnCommittedTransaction if already committed
|
||||
* - missingKey if the key is missing from the op
|
||||
* - missingValue if putting without a value
|
||||
*/
|
||||
push(op: { type: 'put'; key: string; value: any }): void;
|
||||
push(op: { type: 'del'; key: string }): void;
|
||||
push(op: { type: 'put' | 'del'; key: string; value?: any }): void {
|
||||
if (this.closed) {
|
||||
throw propError(
|
||||
'pushOnCommittedTransaction',
|
||||
'can not add ops to already committed transaction'
|
||||
);
|
||||
}
|
||||
|
||||
if (op.type !== 'put' && op.type !== 'del') {
|
||||
throw propError(
|
||||
'invalidTransactionVerb',
|
||||
`unknown action type: ${op.type}`
|
||||
);
|
||||
}
|
||||
|
||||
if (op.key === undefined) {
|
||||
throw propError('missingKey', 'missing key');
|
||||
}
|
||||
|
||||
if (op.type === 'put' && op.value === undefined) {
|
||||
throw propError('missingValue', 'missing value');
|
||||
}
|
||||
|
||||
this.operations.push(op);
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a new put operation to this running transaction
|
||||
*
|
||||
* @argument {string} key - the key of the object to put
|
||||
* @argument {string} value - the value to put
|
||||
*
|
||||
* @throws {Error} an error described by the following properties
|
||||
* - pushOnCommittedTransaction if already committed
|
||||
* - missingKey if the key is missing from the op
|
||||
* - missingValue if putting without a value
|
||||
* @see push
|
||||
*/
|
||||
put(key: string, value: any) {
|
||||
this.push({ type: 'put', key, value });
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a new del operation to this running transaction
|
||||
*
|
||||
* @argument key - the key of the object to delete
|
||||
*
|
||||
* @throws an error described by the following properties
|
||||
* - pushOnCommittedTransaction if already committed
|
||||
* - missingKey if the key is missing from the op
|
||||
*
|
||||
* @see push
|
||||
*/
|
||||
del(key: string) {
|
||||
this.push({ type: 'del', key });
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a condition for the transaction
|
||||
*
|
||||
* @argument condition an object with the following attributes:
|
||||
* {
|
||||
* <condition>: the object key
|
||||
* }
|
||||
* example: { notExists: 'key1' }
|
||||
*
|
||||
* @throws an error described by the following properties
|
||||
* - pushOnCommittedTransaction if already committed
|
||||
* - missingCondition if the condition is empty
|
||||
*
|
||||
*/
|
||||
addCondition(condition: { [key: string]: string }) {
|
||||
if (this.closed) {
|
||||
throw propError(
|
||||
'pushOnCommittedTransaction',
|
||||
'can not add conditions to already committed transaction'
|
||||
);
|
||||
}
|
||||
if (condition === undefined || Object.keys(condition).length === 0) {
|
||||
throw propError(
|
||||
'missingCondition',
|
||||
'missing condition for conditional put'
|
||||
);
|
||||
}
|
||||
if (typeof condition.notExists !== 'string') {
|
||||
throw propError(
|
||||
'unsupportedConditionalOperation',
|
||||
'missing key or supported condition'
|
||||
);
|
||||
}
|
||||
this.conditions.push(condition);
|
||||
}
|
||||
|
||||
/**
|
||||
* Applies the queued updates in this transaction atomically.
|
||||
*
|
||||
* @argument cb function to be called when the commit
|
||||
* finishes, taking an optional error argument
|
||||
*
|
||||
*/
|
||||
commit(cb: (error: Error | null, data?: any) => void) {
|
||||
if (this.closed) {
|
||||
return cb(
|
||||
propError(
|
||||
'alreadyCommitted',
|
||||
'transaction was already committed'
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
if (this.operations.length === 0) {
|
||||
return cb(
|
||||
propError(
|
||||
'emptyTransaction',
|
||||
'tried to commit an empty transaction'
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
this.closed = true;
|
||||
const options = { sync: true, conditions: this.conditions };
|
||||
|
||||
// The array-of-operations variant of the `batch` method
|
||||
// allows passing options such has `sync: true` whereas the
|
||||
// chained form does not.
|
||||
return this.db.batch(this.operations, options, cb);
|
||||
}
|
||||
}
|
11
lib/errorUtils.ts
Normal file
11
lib/errorUtils.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
export interface ErrorLike {
|
||||
message: any;
|
||||
code: any;
|
||||
stack: any;
|
||||
name: any;
|
||||
}
|
||||
|
||||
export function reshapeExceptionError(error: ErrorLike) {
|
||||
const { message, code, stack, name } = error;
|
||||
return { message, code, stack, name };
|
||||
}
|
@@ -1,35 +0,0 @@
|
||||
'use strict'; // eslint-disable-line strict
|
||||
|
||||
class ArsenalError extends Error {
|
||||
constructor(type, code, desc) {
|
||||
super(type);
|
||||
this.code = code;
|
||||
this.description = desc;
|
||||
this[type] = true;
|
||||
}
|
||||
|
||||
customizeDescription(description) {
|
||||
return new ArsenalError(this.message, this.code, description);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate an Errors instances object.
|
||||
*
|
||||
* @returns {Object.<string, ArsenalError>} - object field by arsenalError
|
||||
* instances
|
||||
*/
|
||||
function errorsGen() {
|
||||
const errors = {};
|
||||
const errorsObj = require('../errors/arsenalErrors.json');
|
||||
|
||||
Object.keys(errorsObj)
|
||||
.filter(index => index !== '_comment')
|
||||
.forEach(index => {
|
||||
errors[index] = new ArsenalError(index, errorsObj[index].code,
|
||||
errorsObj[index].description);
|
||||
});
|
||||
return errors;
|
||||
}
|
||||
|
||||
module.exports = errorsGen();
|
1044
lib/errors/arsenalErrors.ts
Normal file
1044
lib/errors/arsenalErrors.ts
Normal file
File diff suppressed because it is too large
Load Diff
150
lib/errors/index.ts
Normal file
150
lib/errors/index.ts
Normal file
@@ -0,0 +1,150 @@
|
||||
import type { ServerResponse } from 'http';
|
||||
import * as rawErrors from './arsenalErrors';
|
||||
|
||||
/** All possible errors names. */
|
||||
export type Name = keyof typeof rawErrors
|
||||
/** Object containing all errors names. It has the format { [Name]: "Name" } */
|
||||
export type Names = { [Name_ in Name]: Name_ };
|
||||
/** Mapping used to determine an error type. It has the format { [Name]: boolean } */
|
||||
export type Is = { [_ in Name]: boolean };
|
||||
/** Mapping of all possible Errors. It has the format { [Name]: Error } */
|
||||
export type Errors = { [_ in Name]: ArsenalError };
|
||||
|
||||
// This object is reused constantly through createIs, we store it there
|
||||
// to avoid recomputation.
|
||||
const isBase = Object.fromEntries(
|
||||
Object.keys(rawErrors).map(key => [key, false])
|
||||
) as Is;
|
||||
|
||||
// This allows to conditionally add the old behavior of errors to properly
|
||||
// test migration.
|
||||
// Activate CI tests with `ALLOW_UNSAFE_ERROR_COMPARISON=false yarn test`.
|
||||
// Remove this mechanism in ARSN-176.
|
||||
export const allowUnsafeErrComp = (
|
||||
process.env.ALLOW_UNSAFE_ERROR_COMPARISON ?? 'true') === 'true'
|
||||
|
||||
// This contains some metaprog. Be careful.
|
||||
// Proxy can be found on MDN.
|
||||
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Proxy
|
||||
// While this could seems better to avoid metaprog, this allows us to enforce
|
||||
// type-checking properly while avoiding all errors that could happen at runtime.
|
||||
// Even if some errors are made in JavaScript, like using err.is.NonExistingError,
|
||||
// the Proxy will return false.
|
||||
const createIs = (type: Name): Is => {
|
||||
const get = (is: Is, value: string | symbol) => is[value] ?? false;
|
||||
const final = Object.freeze({ ...isBase, [type]: true })
|
||||
return new Proxy(final, { get });
|
||||
};
|
||||
|
||||
export class ArsenalError extends Error {
|
||||
/** HTTP status code. Example: 401, 403, 500, ... */
|
||||
#code: number;
|
||||
/** Text description of the error. */
|
||||
#description: string;
|
||||
/** Type of the error. */
|
||||
#type: Name;
|
||||
/** Object used to determine the error type.
|
||||
* Example: error.is.InternalError */
|
||||
#is: Is;
|
||||
|
||||
private constructor(type: Name, code: number, description: string) {
|
||||
super(type);
|
||||
this.#code = code;
|
||||
this.#description = description;
|
||||
this.#type = type;
|
||||
this.#is = createIs(type);
|
||||
|
||||
// This restores the old behavior of errors, to make sure they're now
|
||||
// backward-compatible. Fortunately it's handled by TS, but it cannot
|
||||
// be type-checked. This means we have to be extremely careful about
|
||||
// what we're doing when using errors.
|
||||
// Disables the feature when in CI tests but not in production.
|
||||
if (allowUnsafeErrComp) {
|
||||
this[type] = true;
|
||||
}
|
||||
}
|
||||
|
||||
/** Output the error as a JSON string */
|
||||
toString() {
|
||||
const errorType = this.message;
|
||||
const errorMessage = this.#description;
|
||||
return JSON.stringify({ errorType, errorMessage });
|
||||
}
|
||||
|
||||
flatten() {
|
||||
return {
|
||||
is_arsenal_error: true,
|
||||
code: this.#code,
|
||||
description: this.#description,
|
||||
type: this.#type,
|
||||
stack: this.stack
|
||||
}
|
||||
}
|
||||
|
||||
static unflatten(flat_obj) {
|
||||
if (!flat_obj.is_arsenal_error) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const err = new ArsenalError(
|
||||
flat_obj.type,
|
||||
flat_obj.code,
|
||||
flat_obj.description
|
||||
)
|
||||
err.stack = flat_obj.stack
|
||||
return err;
|
||||
}
|
||||
|
||||
/** Write the error in an HTTP response */
|
||||
writeResponse(res: ServerResponse) {
|
||||
res.writeHead(this.#code);
|
||||
const asStr = this.toString();
|
||||
res.end(asStr);
|
||||
}
|
||||
|
||||
/** Clone the error with a new description.*/
|
||||
customizeDescription(description: string): ArsenalError {
|
||||
const type = this.#type;
|
||||
const code = this.#code;
|
||||
return new ArsenalError(type, code, description);
|
||||
}
|
||||
|
||||
/** Used to determine the error type. Example: error.is.InternalError */
|
||||
get is() {
|
||||
return this.#is;
|
||||
}
|
||||
|
||||
/** HTTP status code. Example: 401, 403, 500, ... */
|
||||
get code() {
|
||||
return this.#code;
|
||||
}
|
||||
|
||||
/** Text description of the error. */
|
||||
get description() {
|
||||
return this.#description;
|
||||
}
|
||||
|
||||
/**
|
||||
* Type of the error, belonging to Name. is should be prefered instead of
|
||||
* type in a daily-basis, but type remains accessible for future use. */
|
||||
get type() {
|
||||
return this.#type;
|
||||
}
|
||||
|
||||
/** Generate all possible errors. An instance is created by default. */
|
||||
static errors() {
|
||||
const errors = {}
|
||||
Object.entries(rawErrors).forEach((value) => {
|
||||
const name = value[0] as Name;
|
||||
const error = value[1];
|
||||
const { code, description } = error;
|
||||
const get = () => new ArsenalError(name, code, description);
|
||||
Object.defineProperty(errors, name, { get });
|
||||
});
|
||||
return errors as Errors
|
||||
}
|
||||
}
|
||||
|
||||
/** Mapping of all possible Errors.
|
||||
* Use them with errors[error].customizeDescription for any customization. */
|
||||
export default ArsenalError.errors();
|
20
lib/executables/pensieveCreds/README.md
Normal file
20
lib/executables/pensieveCreds/README.md
Normal file
@@ -0,0 +1,20 @@
|
||||
# Get Pensieve Credentials Executable
|
||||
|
||||
## To make executable file from getPensieveCreds.js
|
||||
|
||||
`npm install -g pkg`
|
||||
`pkg getPensieveCreds.js`
|
||||
|
||||
This will build a mac, linux and windows file.
|
||||
If you just want linux, for example:
|
||||
`pkg getPensieveCreds.js --targets node6-linux-x64`
|
||||
|
||||
For further options, see https://github.com/zeit/pkg
|
||||
|
||||
## To run the executable file
|
||||
|
||||
Call the output executable file with an
|
||||
argument that names the service you
|
||||
are trying to get credentials for (e.g., clueso):
|
||||
|
||||
`./getPensieveCreds-linux serviceName`
|
45
lib/executables/pensieveCreds/getPensieveCreds.js
Normal file
45
lib/executables/pensieveCreds/getPensieveCreds.js
Normal file
@@ -0,0 +1,45 @@
|
||||
const async = require('async');
|
||||
const MetadataFileClient =
|
||||
require('../../storage/metadata/file/MetadataFileClient');
|
||||
const mdClient = new MetadataFileClient({
|
||||
host: 's3-metadata',
|
||||
port: '9993',
|
||||
});
|
||||
const { loadOverlayVersion, parseServiceCredentials } = require('./utils');
|
||||
|
||||
const serviceName = process.argv[2];
|
||||
if (serviceName === undefined) {
|
||||
throw new Error('Missing service name (e.g., clueso)');
|
||||
}
|
||||
const tokenKey = 'auth/zenko/remote-management-token';
|
||||
|
||||
const mdDb = mdClient.openDB(error => {
|
||||
if (error) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
const db = mdDb.openSub('PENSIEVE');
|
||||
return async.waterfall([
|
||||
cb => db.get('configuration/overlay-version', {}, cb),
|
||||
(version, cb) => loadOverlayVersion(db, version, cb),
|
||||
(conf, cb) => db.get(tokenKey, {}, (err, instanceAuth) => {
|
||||
if (err) {
|
||||
return cb(err);
|
||||
}
|
||||
const creds = parseServiceCredentials(conf, instanceAuth,
|
||||
serviceName);
|
||||
return cb(null, creds);
|
||||
}),
|
||||
], (err, creds) => {
|
||||
db.disconnect();
|
||||
if (err) {
|
||||
throw err;
|
||||
}
|
||||
if (!creds) {
|
||||
throw new Error('No credentials found');
|
||||
}
|
||||
process.stdout.write(`export AWS_ACCESS_KEY_ID="${creds.accessKey}"\n`);
|
||||
process.stdout
|
||||
.write(`export AWS_SECRET_ACCESS_KEY="${creds.secretKey}"`);
|
||||
});
|
||||
});
|
14
lib/executables/pensieveCreds/package.json
Normal file
14
lib/executables/pensieveCreds/package.json
Normal file
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"name": "pensievecreds",
|
||||
"version": "1.0.0",
|
||||
"description": "Executable tool for Pensieve",
|
||||
"main": "getPensieveCreds.js",
|
||||
"scripts": {
|
||||
"test": "mocha --recursive --timeout 5500 tests/unit"
|
||||
},
|
||||
"dependencies": {
|
||||
"mocha": "2.5.3",
|
||||
"async": "^2.6.0",
|
||||
"node-forge": "^0.7.1"
|
||||
}
|
||||
}
|
7
lib/executables/pensieveCreds/tests/resources.json
Normal file
7
lib/executables/pensieveCreds/tests/resources.json
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"privateKey": "-----BEGIN RSA PRIVATE KEY-----\r\nMIIEowIBAAKCAQEAj13sSYE40lAX2qpBvfdGfcSVNtBf8i5FH+E8FAhORwwPu+2S\r\n3yBQbgwHq30WWxunGb1NmZL1wkVZ+vf12DtxqFRnMA08LfO4oO6oC4V8XfKeuHyJ\r\n1qlaKRINz6r9yDkTHtwWoBnlAINurlcNKgGD5p7D+G26Chbr/Oo0ZwHula9DxXy6\r\neH8/bJ5/BynyNyyWRPoAO+UkUdY5utkFCUq2dbBIhovMgjjikf5p2oWqnRKXc+JK\r\nBegr6lSHkkhyqNhTmd8+wA+8Cace4sy1ajY1t5V4wfRZea5vwl/HlyyKodvHdxng\r\nJgg6H61JMYPkplY6Gr9OryBKEAgq02zYoYTDfwIDAQABAoIBAAuDYGlavkRteCzw\r\nRU1LIVcSRWVcgIgDXTu9K8T0Ec0008Kkxomyn6LmxmroJbZ1VwsDH8s4eRH73ckA\r\nxrZxt6Pr+0lplq6eBvKtl8MtGhq1VDe+kJczjHEF6SQHOFAu/TEaPZrn2XMcGvRX\r\nO1BnRL9tepFlxm3u/06VRFYNWqqchM+tFyzLu2AuiuKd5+slSX7KZvVgdkY1ErKH\r\ngB75lPyhPb77C/6ptqUisVMSO4JhLhsD0+ekDVY982Sb7KkI+szdWSbtMx9Ek2Wo\r\ntXwJz7I8T7IbODy9aW9G+ydyhMDFmaEYIaDVFKJj5+fluNza3oQ5PtFNVE50GQJA\r\nsisGqfECgYEAwpkwt0KpSamSEH6qknNYPOwxgEuXWoFVzibko7is2tFPvY+YJowb\r\n68MqHIYhf7gHLq2dc5Jg1TTbGqLECjVxp4xLU4c95KBy1J9CPAcuH4xQLDXmeLzP\r\nJ2YgznRocbzAMCDAwafCr3uY9FM7oGDHAi5bE5W11xWx+9MlFExL3JkCgYEAvJp5\r\nf+JGN1W037bQe2QLYUWGszewZsvplnNOeytGQa57w4YdF42lPhMz6Kc/zdzKZpN9\r\njrshiIDhAD5NCno6dwqafBAW9WZl0sn7EnlLhD4Lwm8E9bRHnC9H82yFuqmNrzww\r\nzxBCQogJISwHiVz4EkU48B283ecBn0wT/fAa19cCgYEApKWsnEHgrhy1IxOpCoRh\r\nUhqdv2k1xDPN/8DUjtnAFtwmVcLa/zJopU/Zn4y1ZzSzjwECSTi+iWZRQ/YXXHPf\r\nl92SFjhFW92Niuy8w8FnevXjF6T7PYiy1SkJ9OR1QlZrXc04iiGBDazLu115A7ce\r\nanACS03OLw+CKgl6Q/RR83ECgYBCUngDVoimkMcIHHt3yJiP3ikeAKlRnMdJlsa0\r\nXWVZV4hCG3lDfRXsnEgWuimftNKf+6GdfYSvQdLdiQsCcjT5A4uLsQTByv5nf4uA\r\n1ZKOsFrmRrARzxGXhLDikvj7yP//7USkq+0BBGFhfuAvl7fMhPceyPZPehqB7/jf\r\nxX1LBQKBgAn5GgSXzzS0e06ZlP/VrKxreOHa5Z8wOmqqYQ0QTeczAbNNmuITdwwB\r\nNkbRqpVXRIfuj0BQBegAiix8om1W4it0cwz54IXBwQULxJR1StWxj3jo4QtpMQ+z\r\npVPdB1Ilb9zPV1YvDwRfdS1xsobzznAx56ecsXduZjs9mF61db8Q\r\n-----END RSA PRIVATE KEY-----\r\n",
|
||||
"publicKey": "-----BEGIN PUBLIC KEY-----\r\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAj13sSYE40lAX2qpBvfdG\r\nfcSVNtBf8i5FH+E8FAhORwwPu+2S3yBQbgwHq30WWxunGb1NmZL1wkVZ+vf12Dtx\r\nqFRnMA08LfO4oO6oC4V8XfKeuHyJ1qlaKRINz6r9yDkTHtwWoBnlAINurlcNKgGD\r\n5p7D+G26Chbr/Oo0ZwHula9DxXy6eH8/bJ5/BynyNyyWRPoAO+UkUdY5utkFCUq2\r\ndbBIhovMgjjikf5p2oWqnRKXc+JKBegr6lSHkkhyqNhTmd8+wA+8Cace4sy1ajY1\r\nt5V4wfRZea5vwl/HlyyKodvHdxngJgg6H61JMYPkplY6Gr9OryBKEAgq02zYoYTD\r\nfwIDAQAB\r\n-----END PUBLIC KEY-----\r\n",
|
||||
"accessKey": "QXP3VDG3SALNBX2QBJ1C",
|
||||
"secretKey": "K5FyqZo5uFKfw9QBtn95o6vuPuD0zH/1seIrqPKqGnz8AxALNSx6EeRq7G1I6JJpS1XN13EhnwGn2ipsml3Uf2fQ00YgEmImG8wzGVZm8fWotpVO4ilN4JGyQCah81rNX4wZ9xHqDD7qYR5MyIERxR/osoXfctOwY7GGUjRKJfLOguNUlpaovejg6mZfTvYAiDF+PTO1sKUYqHt1IfKQtsK3dov1EFMBB5pWM7sVfncq/CthKN5M+VHx9Y87qdoP3+7AW+RCBbSDOfQgxvqtS7PIAf10mDl8k2kEURLz+RqChu4O4S0UzbEmtja7wa7WYhYKv/tM/QeW7kyNJMmnPg==",
|
||||
"decryptedSecretKey": "n7PSZ3U6SgerF9PCNhXYsq3S3fRKVGdZTicGV8Ur"
|
||||
}
|
39
lib/executables/pensieveCreds/tests/unit/utilsSpec.js
Normal file
39
lib/executables/pensieveCreds/tests/unit/utilsSpec.js
Normal file
@@ -0,0 +1,39 @@
|
||||
const assert = require('assert');
|
||||
const { parseServiceCredentials, decryptSecret } =
|
||||
require('../../utils');
|
||||
const { privateKey, accessKey, secretKey, decryptedSecretKey }
|
||||
= require('../resources.json');
|
||||
|
||||
describe('decyrptSecret', () => {
|
||||
it('should decrypt a secret', () => {
|
||||
const instanceCredentials = {
|
||||
privateKey,
|
||||
};
|
||||
const result = decryptSecret(instanceCredentials, secretKey);
|
||||
assert.strictEqual(result, decryptedSecretKey);
|
||||
});
|
||||
});
|
||||
|
||||
describe('parseServiceCredentials', () => {
|
||||
const conf = {
|
||||
users: [{ accessKey,
|
||||
accountType: 'service-clueso',
|
||||
secretKey,
|
||||
userName: 'Search Service Account' }],
|
||||
};
|
||||
const auth = JSON.stringify({ privateKey });
|
||||
|
||||
it('should parse service credentials', () => {
|
||||
const result = parseServiceCredentials(conf, auth, 'clueso');
|
||||
const expectedResult = {
|
||||
accessKey,
|
||||
secretKey: decryptedSecretKey,
|
||||
};
|
||||
assert.deepStrictEqual(result, expectedResult);
|
||||
});
|
||||
|
||||
it('should return undefined if no such service', () => {
|
||||
const result = parseServiceCredentials(conf, auth, undefined);
|
||||
assert.strictEqual(result, undefined);
|
||||
});
|
||||
});
|
38
lib/executables/pensieveCreds/utils.js
Normal file
38
lib/executables/pensieveCreds/utils.js
Normal file
@@ -0,0 +1,38 @@
|
||||
const forge = require('node-forge');
|
||||
|
||||
function decryptSecret(instanceCredentials, secret) {
|
||||
const privateKey = forge.pki.privateKeyFromPem(
|
||||
instanceCredentials.privateKey);
|
||||
const encryptedSecretKey = forge.util.decode64(secret);
|
||||
return privateKey.decrypt(encryptedSecretKey, 'RSA-OAEP', {
|
||||
md: forge.md.sha256.create(),
|
||||
});
|
||||
}
|
||||
|
||||
function loadOverlayVersion(db, version, cb) {
|
||||
db.get(`configuration/overlay/${version}`, {}, (err, val) => {
|
||||
if (err) {
|
||||
return cb(err);
|
||||
}
|
||||
return cb(null, JSON.parse(val));
|
||||
});
|
||||
}
|
||||
|
||||
function parseServiceCredentials(conf, auth, serviceName) {
|
||||
const instanceAuth = JSON.parse(auth);
|
||||
const serviceAccount = (conf.users || []).find(
|
||||
u => u.accountType === `service-${serviceName}`);
|
||||
if (!serviceAccount) {
|
||||
return undefined;
|
||||
}
|
||||
return {
|
||||
accessKey: serviceAccount.accessKey,
|
||||
secretKey: decryptSecret(instanceAuth, serviceAccount.secretKey),
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
decryptSecret,
|
||||
loadOverlayVersion,
|
||||
parseServiceCredentials,
|
||||
};
|
@@ -1,6 +1,4 @@
|
||||
'use strict'; // eslint-disable-line strict
|
||||
|
||||
const ciphers = [
|
||||
export const ciphers = [
|
||||
'DHE-RSA-AES128-GCM-SHA256',
|
||||
'ECDHE-ECDSA-AES128-GCM-SHA256',
|
||||
'ECDHE-RSA-AES256-GCM-SHA384',
|
||||
@@ -28,7 +26,3 @@ const ciphers = [
|
||||
'!EDH-RSA-DES-CBC3-SHA',
|
||||
'!KRB5-DES-CBC3-SHA',
|
||||
].join(':');
|
||||
|
||||
module.exports = {
|
||||
ciphers,
|
||||
};
|
@@ -29,16 +29,11 @@ c2CNfUEqyRbJF4pE9ZcdQReT5p/llmyhQdvq6cHH+cKJk63C6DHRVoStsnsUcvKe
|
||||
bLxKsygK77ttjr61cxLoDJeGd5L5h1CPmwIBAg==
|
||||
-----END DH PARAMETERS-----
|
||||
*/
|
||||
'use strict'; // eslint-disable-line strict
|
||||
|
||||
const dhparam =
|
||||
export const dhparam =
|
||||
'MIIBCAKCAQEAh99T77KGNuiY9N6xrCJ3QNv4SFADTa3CD+1VMTAdRJLHUNpglB+i' +
|
||||
'AoTYiLDFZgtTCpx0ZZUD+JM3qiCZy0OK5/ZGlVD7sZmxjRtdpVK4qIPtwav8t0J7' +
|
||||
'c2CNfUEqyRbJF4pE9ZcdQReT5p/llmyhQdvq6cHH+cKJk63C6DHRVoStsnsUcvKe' +
|
||||
'23PLGZulKg8H3eRBxHamHkmyuEVDtoNhMIoJONsdXSpo5GgcD4EQMM8xb/qsnCxn' +
|
||||
'6QIGTBvcHskxtlTZOfUPk4XQ6Yb3tQi2TurzkQHLln4U7p/GZs+D+6D3SgSPqr6P' +
|
||||
'bLxKsygK77ttjr61cxLoDJeGd5L5h1CPmwIBAg==';
|
||||
|
||||
module.exports = {
|
||||
dhparam,
|
||||
};
|
2
lib/https/index.ts
Normal file
2
lib/https/index.ts
Normal file
@@ -0,0 +1,2 @@
|
||||
export * as ciphers from './ciphers'
|
||||
export * as dhparam from './dh2048'
|
@@ -1,83 +0,0 @@
|
||||
'use strict'; // eslint-disable-line strict
|
||||
|
||||
const ipaddr = require('ipaddr.js');
|
||||
|
||||
/**
|
||||
* checkIPinRangeOrMatch checks whether a given ip address is in an ip address
|
||||
* range or matches the given ip address
|
||||
* @param {string} cidr - ip address range or ip address
|
||||
* @param {object} ip - parsed ip address
|
||||
* @return {boolean} true if in range, false if not
|
||||
*/
|
||||
function checkIPinRangeOrMatch(cidr, ip) {
|
||||
// If there is an exact match of the ip address, no need to check ranges
|
||||
if (ip.toString() === cidr) {
|
||||
return true;
|
||||
}
|
||||
let range;
|
||||
|
||||
try {
|
||||
range = ipaddr.IPv4.parseCIDR(cidr);
|
||||
} catch (err) {
|
||||
try {
|
||||
// not ipv4 so try ipv6
|
||||
range = ipaddr.IPv6.parseCIDR(cidr);
|
||||
} catch (err) {
|
||||
// range is not valid ipv4 or ipv6
|
||||
return false;
|
||||
}
|
||||
}
|
||||
try {
|
||||
return ip.match(range);
|
||||
} catch (err) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse IP address into object representation
|
||||
* @param {string} ip - IPV4/IPV6/IPV4-mapped IPV6 address
|
||||
* @return {object} parsedIp - Object representation of parsed IP
|
||||
*/
|
||||
function parseIp(ip) {
|
||||
if (ipaddr.IPv4.isValid(ip)) {
|
||||
return ipaddr.parse(ip);
|
||||
}
|
||||
if (ipaddr.IPv6.isValid(ip)) {
|
||||
// also parses IPv6 mapped IPv4 addresses into IPv4 representation
|
||||
return ipaddr.process(ip);
|
||||
}
|
||||
// not valid ip address according to module, so return empty object
|
||||
// which will obviously not match a range of ip addresses that the parsedIp
|
||||
// is being tested against
|
||||
return {};
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Checks if an IP adress matches a given list of CIDR ranges
|
||||
* @param {string[]} cidrList - List of CIDR ranges
|
||||
* @param {string} ip - IP address
|
||||
* @return {boolean} - true if there is match or false for no match
|
||||
*/
|
||||
function ipMatchCidrList(cidrList, ip) {
|
||||
const parsedIp = parseIp(ip);
|
||||
return cidrList.some(item => {
|
||||
let cidr;
|
||||
// patch the cidr if range is not specified
|
||||
if (item.indexOf('/') === -1) {
|
||||
if (item.startsWith('127.')) {
|
||||
cidr = `${item}/8`;
|
||||
} else if (ipaddr.IPv4.isValid(item)) {
|
||||
cidr = `${item}/32`;
|
||||
}
|
||||
}
|
||||
return checkIPinRangeOrMatch(cidr || item, parsedIp);
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
checkIPinRangeOrMatch,
|
||||
ipMatchCidrList,
|
||||
parseIp,
|
||||
};
|
71
lib/ipCheck.ts
Normal file
71
lib/ipCheck.ts
Normal file
@@ -0,0 +1,71 @@
|
||||
import ipaddr from 'ipaddr.js';
|
||||
|
||||
/**
|
||||
* checkIPinRangeOrMatch checks whether a given ip address is in an ip address
|
||||
* range or matches the given ip address
|
||||
* @param cidr - ip address range or ip address
|
||||
* @param ip - parsed ip address
|
||||
* @return true if in range, false if not
|
||||
*/
|
||||
export function checkIPinRangeOrMatch(
|
||||
cidr: string,
|
||||
ip: ipaddr.IPv4 | ipaddr.IPv6,
|
||||
): boolean {
|
||||
// If there is an exact match of the ip address, no need to check ranges
|
||||
if (ip.toString() === cidr) {
|
||||
return true;
|
||||
}
|
||||
try {
|
||||
if (ip instanceof ipaddr.IPv6) {
|
||||
const range = ipaddr.IPv6.parseCIDR(cidr);
|
||||
return ip.match(range);
|
||||
} else {
|
||||
const range = ipaddr.IPv4.parseCIDR(cidr);
|
||||
return ip.match(range);
|
||||
}
|
||||
} catch (error) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse IP address into object representation
|
||||
* @param ip - IPV4/IPV6/IPV4-mapped IPV6 address
|
||||
* @return parsedIp - Object representation of parsed IP
|
||||
*/
|
||||
export function parseIp(ip: string): ipaddr.IPv4 | ipaddr.IPv6 | {} {
|
||||
if (ipaddr.IPv4.isValid(ip)) {
|
||||
return ipaddr.parse(ip);
|
||||
}
|
||||
if (ipaddr.IPv6.isValid(ip)) {
|
||||
// also parses IPv6 mapped IPv4 addresses into IPv4 representation
|
||||
return ipaddr.process(ip);
|
||||
}
|
||||
return {};
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if an IP adress matches a given list of CIDR ranges
|
||||
* @param cidrList - List of CIDR ranges
|
||||
* @param ip - IP address
|
||||
* @return - true if there is match or false for no match
|
||||
*/
|
||||
export function ipMatchCidrList(cidrList: string[], ip: string): boolean {
|
||||
const parsedIp = parseIp(ip);
|
||||
return cidrList.some((item) => {
|
||||
let cidr: string | undefined;
|
||||
// patch the cidr if range is not specified
|
||||
if (item.indexOf('/') === -1) {
|
||||
if (item.startsWith('127.')) {
|
||||
cidr = `${item}/8`;
|
||||
} else if (ipaddr.IPv4.isValid(item)) {
|
||||
cidr = `${item}/32`;
|
||||
}
|
||||
}
|
||||
return (
|
||||
(parsedIp instanceof ipaddr.IPv4 ||
|
||||
parsedIp instanceof ipaddr.IPv6) &&
|
||||
checkIPinRangeOrMatch(cidr || item, parsedIp)
|
||||
);
|
||||
});
|
||||
}
|
33
lib/jsutil.ts
Normal file
33
lib/jsutil.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
import * as util from 'util';
|
||||
const debug = util.debuglog('jsutil');
|
||||
|
||||
// JavaScript utility functions
|
||||
|
||||
/**
|
||||
* force <tt>func</tt> to be called only once, even if actually called
|
||||
* multiple times. The cached result of the first call is then
|
||||
* returned (if any).
|
||||
*
|
||||
* @note underscore.js provides this functionality but not worth
|
||||
* adding a new dependency for such a small use case.
|
||||
*
|
||||
* @param func function to call at most once
|
||||
|
||||
* @return a callable wrapper mirroring <tt>func</tt> but
|
||||
* only calls <tt>func</tt> at first invocation.
|
||||
*/
|
||||
export function once<T>(func: (...args: any[]) => T): (...args: any[]) => T {
|
||||
type State = { called: boolean; res: any };
|
||||
const state: State = { called: false, res: undefined };
|
||||
return function wrapper(...args: any[]) {
|
||||
if (!state.called) {
|
||||
state.called = true;
|
||||
state.res = func.apply(func, args);
|
||||
} else {
|
||||
const m1 = 'function already called:';
|
||||
const m2 = 'returning cached result:';
|
||||
debug(m1, func, m2, state.res);
|
||||
}
|
||||
return state.res;
|
||||
};
|
||||
}
|
126
lib/metrics/RedisClient.ts
Normal file
126
lib/metrics/RedisClient.ts
Normal file
@@ -0,0 +1,126 @@
|
||||
import Redis from 'ioredis';
|
||||
import { Logger } from 'werelogs';
|
||||
|
||||
export type Config = { host: string; port: number; password: string };
|
||||
export type Callback = (error: Error | null, value?: any) => void;
|
||||
|
||||
export default class RedisClient {
|
||||
_client: Redis.Redis;
|
||||
|
||||
constructor(config: Config, logger: Logger) {
|
||||
this._client = new Redis(config);
|
||||
this._client.on('error', err =>
|
||||
logger.trace('error from redis', {
|
||||
error: err,
|
||||
method: 'RedisClient.constructor',
|
||||
redisHost: config.host,
|
||||
redisPort: config.port,
|
||||
})
|
||||
);
|
||||
return this;
|
||||
}
|
||||
|
||||
/** increment value of a key by 1 and set a ttl */
|
||||
incrEx(key: string, expiry: number, cb: Callback) {
|
||||
const exp = expiry.toString();
|
||||
return this._client
|
||||
.multi([['incr', key], ['expire', key, exp]])
|
||||
.exec(cb);
|
||||
}
|
||||
|
||||
/** increment value of a key by a given amount and set a ttl */
|
||||
incrbyEx(key: string, amount: number, expiry: number, cb: Callback) {
|
||||
const am = amount.toString();
|
||||
const exp = expiry.toString();
|
||||
return this._client
|
||||
.multi([['incrby', key, am], ['expire', key, exp]])
|
||||
.exec(cb);
|
||||
}
|
||||
|
||||
/** execute a batch of commands */
|
||||
batch(cmds: string[][], cb: Callback) {
|
||||
return this._client.pipeline(cmds).exec(cb);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a key exists
|
||||
* @param cb - callback
|
||||
* If cb response returns 0, key does not exist.
|
||||
* If cb response returns 1, key exists.
|
||||
*/
|
||||
exists(key: string, cb: Callback) {
|
||||
return this._client.exists(key, cb);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a value and its score to a sorted set. If no sorted set exists, this
|
||||
* will create a new one for the given key.
|
||||
* @param score - score used to order set
|
||||
*/
|
||||
zadd(key: string, score: number, value: string, cb: Callback) {
|
||||
return this._client.zadd(key, score, value, cb);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get number of elements in a sorted set.
|
||||
* Note: using this on a key that does not exist will return 0.
|
||||
* Note: using this on an existing key that isn't a sorted set will
|
||||
* return an error WRONGTYPE.
|
||||
*/
|
||||
zcard(key: string, cb: Callback) {
|
||||
return this._client.zcard(key, cb);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the score for given value in a sorted set
|
||||
* Note: using this on a key that does not exist will return nil.
|
||||
* Note: using this on a value that does not exist in a valid sorted set key
|
||||
* will return nil.
|
||||
*/
|
||||
zscore(key: string, value: string, cb: Callback) {
|
||||
return this._client.zscore(key, value, cb);
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove a value from a sorted set
|
||||
* @param value - value within sorted set. Can specify multiple values within an array
|
||||
* @param {function} cb - callback
|
||||
* The cb response returns number of values removed
|
||||
*/
|
||||
zrem(key: string, value: string | string[], cb: Callback) {
|
||||
return this._client.zrem(key, value, cb);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get specified range of elements in a sorted set
|
||||
* @param start - start index (inclusive)
|
||||
* @param end - end index (inclusive) (can use -1)
|
||||
*/
|
||||
zrange(key: string, start: number, end: number, cb: Callback) {
|
||||
return this._client.zrange(key, start, end, cb);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get range of elements in a sorted set based off score
|
||||
* @param min - min score value (inclusive)
|
||||
* (can use "-inf")
|
||||
* @param max - max score value (inclusive)
|
||||
* (can use "+inf")
|
||||
*/
|
||||
zrangebyscore(
|
||||
key: string,
|
||||
min: number | string,
|
||||
max: number | string,
|
||||
cb: Callback,
|
||||
) {
|
||||
return this._client.zrangebyscore(key, min, max, cb);
|
||||
}
|
||||
|
||||
clear(cb: Callback) {
|
||||
return this._client.flushdb(cb);
|
||||
}
|
||||
|
||||
disconnect() {
|
||||
this._client.disconnect();
|
||||
}
|
||||
}
|
163
lib/metrics/StatsClient.ts
Normal file
163
lib/metrics/StatsClient.ts
Normal file
@@ -0,0 +1,163 @@
|
||||
import async from 'async';
|
||||
import RedisClient from './RedisClient';
|
||||
import { Logger } from 'werelogs';
|
||||
|
||||
export default class StatsClient {
|
||||
_redis: RedisClient;
|
||||
_interval: number;
|
||||
_expiry: number;
|
||||
|
||||
/**
|
||||
* @constructor
|
||||
* @param redisClient - RedisClient instance
|
||||
* @param interval - sampling interval by seconds
|
||||
* @param expiry - sampling duration by seconds
|
||||
*/
|
||||
constructor(redisClient: RedisClient, interval: number, expiry: number) {
|
||||
this._redis = redisClient;
|
||||
this._interval = interval;
|
||||
this._expiry = expiry;
|
||||
return this;
|
||||
}
|
||||
|
||||
/** Utility function to use when callback is undefined */
|
||||
_noop() {}
|
||||
|
||||
/**
|
||||
* normalize to the nearest interval
|
||||
* @param d - Date instance
|
||||
* @return timestamp - normalized to the nearest interval
|
||||
*/
|
||||
_normalizeTimestamp(d: Date): number {
|
||||
const s = d.getSeconds();
|
||||
return d.setSeconds(s - s % this._interval, 0);
|
||||
}
|
||||
|
||||
/**
|
||||
* set timestamp to the previous interval
|
||||
* @param d - Date instance
|
||||
* @return timestamp - set to the previous interval
|
||||
*/
|
||||
_setPrevInterval(d: Date): number {
|
||||
return d.setSeconds(d.getSeconds() - this._interval);
|
||||
}
|
||||
|
||||
/**
|
||||
* build redis key to get total number of occurrences on the server
|
||||
* @param name - key name identifier
|
||||
* @param d - Date instance
|
||||
* @return key - key for redis
|
||||
*/
|
||||
_buildKey(name: string, d: Date): string {
|
||||
return `${name}:${this._normalizeTimestamp(d)}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* reduce the array of values to a single value
|
||||
* typical input looks like [[null, '1'], [null, '2'], [null, null]...]
|
||||
* @param arr - Date instance
|
||||
* @return key - key for redis
|
||||
*/
|
||||
_getCount(arr: [any, string | null][]): number {
|
||||
return arr.reduce((prev, a) => {
|
||||
let num = parseInt(a[1] ?? '', 10);
|
||||
num = Number.isNaN(num) ? 0 : num;
|
||||
return prev + num;
|
||||
}, 0);
|
||||
}
|
||||
|
||||
/**
|
||||
* report/record a new request received on the server
|
||||
* @param id - service identifier
|
||||
* @param incr - optional param increment
|
||||
*/
|
||||
reportNewRequest(
|
||||
id: string,
|
||||
incr?: number | ((error: Error | null, value?: any) => void),
|
||||
cb?: (error: Error | null, value?: any) => void,
|
||||
) {
|
||||
if (!this._redis) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
let callback: (error: Error | null, value?: any) => void;
|
||||
let amount: number;
|
||||
if (typeof incr === 'function') {
|
||||
// In case where optional `incr` is not passed, but `cb` is passed
|
||||
callback = incr;
|
||||
amount = 1;
|
||||
} else {
|
||||
callback = (cb && typeof cb === 'function') ? cb : this._noop;
|
||||
amount = (typeof incr === 'number') ? incr : 1;
|
||||
}
|
||||
|
||||
const key = this._buildKey(`${id}:requests`, new Date());
|
||||
|
||||
return this._redis.incrbyEx(key, amount, this._expiry, callback);
|
||||
}
|
||||
|
||||
/**
|
||||
* report/record a request that ended up being a 500 on the server
|
||||
* @param id - service identifier
|
||||
*/
|
||||
report500(id: string, cb?: (error: Error | null, value?: any) => void) {
|
||||
if (!this._redis) {
|
||||
return undefined;
|
||||
}
|
||||
const callback = cb || this._noop;
|
||||
const key = this._buildKey(`${id}:500s`, new Date());
|
||||
return this._redis.incrEx(key, this._expiry, callback);
|
||||
}
|
||||
|
||||
/**
|
||||
* get stats for the last x seconds, x being the sampling duration
|
||||
* @param log - Werelogs request logger
|
||||
* @param id - service identifier
|
||||
*/
|
||||
getStats(log: Logger, id: string, cb: (error: Error | null, value?: any) => void) {
|
||||
if (!this._redis) {
|
||||
return cb(null, {});
|
||||
}
|
||||
const d = new Date();
|
||||
const totalKeys = Math.floor(this._expiry / this._interval);
|
||||
const reqsKeys: ['get', string][] = [];
|
||||
const req500sKeys: ['get', string][] = [];
|
||||
for (let i = 0; i < totalKeys; i++) {
|
||||
reqsKeys.push(['get', this._buildKey(`${id}:requests`, d)]);
|
||||
req500sKeys.push(['get', this._buildKey(`${id}:500s`, d)]);
|
||||
this._setPrevInterval(d);
|
||||
}
|
||||
return async.parallel([
|
||||
next => this._redis.batch(reqsKeys, next),
|
||||
next => this._redis.batch(req500sKeys, next),
|
||||
], (err, results) => {
|
||||
/**
|
||||
* Batch result is of the format
|
||||
* [ [null, '1'], [null, '2'], [null, '3'] ] where each
|
||||
* item is the result of the each batch command
|
||||
* Foreach item in the result, index 0 signifies the error and
|
||||
* index 1 contains the result
|
||||
*/
|
||||
const statsRes = {
|
||||
'requests': 0,
|
||||
'500s': 0,
|
||||
'sampleDuration': this._expiry,
|
||||
};
|
||||
if (err) {
|
||||
log.error('error getting stats', {
|
||||
error: err,
|
||||
method: 'StatsClient.getStats',
|
||||
});
|
||||
/**
|
||||
* Redis for stats is not a critial component, ignoring
|
||||
* any error here as returning an InternalError
|
||||
* would be confused with the health of the service
|
||||
*/
|
||||
return cb(null, statsRes);
|
||||
}
|
||||
statsRes.requests = this._getCount((results as any)[0]);
|
||||
statsRes['500s'] = this._getCount((results as any)[1]);
|
||||
return cb(null, statsRes);
|
||||
});
|
||||
}
|
||||
}
|
125
lib/metrics/StatsModel.ts
Normal file
125
lib/metrics/StatsModel.ts
Normal file
@@ -0,0 +1,125 @@
|
||||
import StatsClient from './StatsClient';
|
||||
|
||||
/**
|
||||
* @class StatsModel
|
||||
*
|
||||
* @classdesc Extend and overwrite how timestamps are normalized by minutes
|
||||
* rather than by seconds
|
||||
*/
|
||||
export default class StatsModel extends StatsClient {
|
||||
/**
|
||||
* normalize date timestamp to the nearest hour
|
||||
* @param d - Date instance
|
||||
* @return timestamp - normalized to the nearest hour
|
||||
*/
|
||||
normalizeTimestampByHour(d: Date): number {
|
||||
return d.setMinutes(0, 0, 0);
|
||||
}
|
||||
|
||||
/**
|
||||
* get previous hour to date given
|
||||
* @param d - Date instance
|
||||
* @return timestamp - one hour prior to date passed
|
||||
*/
|
||||
_getDatePreviousHour(d: Date): number {
|
||||
return d.setHours(d.getHours() - 1);
|
||||
}
|
||||
|
||||
/**
|
||||
* normalize to the nearest interval
|
||||
* @param d - Date instance
|
||||
* @return timestamp - normalized to the nearest interval
|
||||
*/
|
||||
_normalizeTimestamp(d: Date): number {
|
||||
const m = d.getMinutes();
|
||||
return d.setMinutes(m - m % (Math.floor(this._interval / 60)), 0, 0);
|
||||
}
|
||||
|
||||
/**
|
||||
* override the method to get the result as an array of integers separated
|
||||
* by each interval
|
||||
* typical input looks like [[null, '1'], [null, '2'], [null, null]...]
|
||||
* @param arr - each index contains the result of each batch command
|
||||
* where index 0 signifies the error and index 1 contains the result
|
||||
* @return array of integers, ordered from most recent interval to
|
||||
* oldest interval
|
||||
*/
|
||||
// @ts-ignore
|
||||
// TODO change name or conform to parent class method
|
||||
_getCount(arr: [any, string | null][]) {
|
||||
return arr.reduce<number[]>((store, i) => {
|
||||
let num = parseInt(i[1] ?? '', 10);
|
||||
num = Number.isNaN(num) ? 0 : num;
|
||||
store.push(num);
|
||||
return store;
|
||||
}, []);
|
||||
}
|
||||
|
||||
/**
|
||||
* get list of sorted set key timestamps
|
||||
* @param epoch - epoch time
|
||||
* @return array of sorted set key timestamps
|
||||
*/
|
||||
getSortedSetHours(epoch: number) {
|
||||
const timestamps: number[] = [];
|
||||
let date = this.normalizeTimestampByHour(new Date(epoch));
|
||||
while (timestamps.length < 24) {
|
||||
timestamps.push(date);
|
||||
date = this._getDatePreviousHour(new Date(date));
|
||||
}
|
||||
return timestamps;
|
||||
}
|
||||
|
||||
/**
|
||||
* get the normalized hour timestamp for given epoch time
|
||||
* @param epoch - epoch time
|
||||
* @return normalized hour timestamp for given time
|
||||
*/
|
||||
getSortedSetCurrentHour(epoch: number) {
|
||||
return this.normalizeTimestampByHour(new Date(epoch));
|
||||
}
|
||||
|
||||
/**
|
||||
* helper method to add element to a sorted set, applying TTL if new set
|
||||
* @param key - name of key
|
||||
* @param score - score used to order set
|
||||
* @param value - value to store
|
||||
* @param cb - callback
|
||||
*/
|
||||
addToSortedSet(
|
||||
key: string,
|
||||
score: number,
|
||||
value: string,
|
||||
cb: (error: Error | null, value?: any) => void,
|
||||
) {
|
||||
this._redis.exists(key, (err, resCode) => {
|
||||
if (err) {
|
||||
return cb(err);
|
||||
}
|
||||
if (resCode === 0) {
|
||||
// milliseconds in a day
|
||||
const msInADay = 24 * 60 * 60 * 1000;
|
||||
const nearestHour = this.normalizeTimestampByHour(new Date());
|
||||
// in seconds
|
||||
const ttl = Math.ceil(
|
||||
(msInADay - (Date.now() - nearestHour)) / 1000);
|
||||
const cmds = [
|
||||
['zadd', key, score.toString(), value],
|
||||
['expire', key, ttl.toString()],
|
||||
];
|
||||
return this._redis.batch(cmds, (err, res) => {
|
||||
if (err) {
|
||||
return cb(err);
|
||||
}
|
||||
const cmdErr = res.find((r: any) => r[0] !== null);
|
||||
if (cmdErr) {
|
||||
return cb(cmdErr);
|
||||
}
|
||||
const successResponse = res[0][1];
|
||||
return cb(null, successResponse);
|
||||
});
|
||||
}
|
||||
return this._redis.zadd(key, score, value, cb);
|
||||
});
|
||||
}
|
||||
}
|
35
lib/metrics/ZenkoMetrics.ts
Normal file
35
lib/metrics/ZenkoMetrics.ts
Normal file
@@ -0,0 +1,35 @@
|
||||
import promClient from 'prom-client';
|
||||
|
||||
export default class ZenkoMetrics {
|
||||
static createCounter(params: promClient.CounterConfiguration<string>) {
|
||||
return new promClient.Counter(params);
|
||||
}
|
||||
|
||||
static createGauge(params: promClient.GaugeConfiguration<string>) {
|
||||
return new promClient.Gauge(params);
|
||||
}
|
||||
|
||||
static createHistogram(params: promClient.HistogramConfiguration<string>) {
|
||||
return new promClient.Histogram(params);
|
||||
}
|
||||
|
||||
static createSummary(params: promClient.SummaryConfiguration<string>) {
|
||||
return new promClient.Summary(params);
|
||||
}
|
||||
|
||||
static getMetric(name: string) {
|
||||
return promClient.register.getSingleMetric(name);
|
||||
}
|
||||
|
||||
static async asPrometheus() {
|
||||
return promClient.register.metrics();
|
||||
}
|
||||
|
||||
static asPrometheusContentType() {
|
||||
return promClient.register.contentType;
|
||||
}
|
||||
|
||||
static collectDefaultMetrics() {
|
||||
return promClient.collectDefaultMetrics();
|
||||
}
|
||||
}
|
4
lib/metrics/index.ts
Normal file
4
lib/metrics/index.ts
Normal file
@@ -0,0 +1,4 @@
|
||||
export { default as StatsClient } from './StatsClient';
|
||||
export { default as StatsModel } from './StatsModel';
|
||||
export { default as RedisClient } from './RedisClient';
|
||||
export { default as ZenkoMetrics } from './ZenkoMetrics';
|
116
lib/models/ARN.ts
Normal file
116
lib/models/ARN.ts
Normal file
@@ -0,0 +1,116 @@
|
||||
import errors from '../errors'
|
||||
|
||||
const validServices = {
|
||||
aws: ['s3', 'iam', 'sts', 'ring'],
|
||||
scality: ['utapi', 'sso'],
|
||||
};
|
||||
|
||||
export default class ARN {
|
||||
_partition: string;
|
||||
_service: string;
|
||||
_region: string | null;
|
||||
_accountId?: string | null;
|
||||
_resource: string;
|
||||
|
||||
/**
|
||||
*
|
||||
* Create an ARN object from its individual components
|
||||
*
|
||||
* @constructor
|
||||
* @param partition - ARN partition (e.g. 'aws')
|
||||
* @param service - service name in partition (e.g. 's3')
|
||||
* @param [region] - AWS region
|
||||
* @param [accountId] - AWS 12-digit account ID
|
||||
* @param resource - AWS resource path (e.g. 'foo/bar')
|
||||
*/
|
||||
constructor(
|
||||
partition: string,
|
||||
service: string,
|
||||
region: string | undefined | null,
|
||||
accountId: string | undefined | null,
|
||||
resource: string,
|
||||
) {
|
||||
this._partition = partition;
|
||||
this._service = service;
|
||||
this._region = region || null;
|
||||
this._accountId = accountId || null;
|
||||
this._resource = resource;
|
||||
}
|
||||
|
||||
static createFromString(arnStr: string) {
|
||||
const [arn, partition, service, region, accountId,
|
||||
resourceType, resource] = arnStr.split(':');
|
||||
|
||||
if (arn !== 'arn') {
|
||||
return { error: errors.InvalidArgument.customizeDescription(
|
||||
'bad ARN: must start with "arn:"') };
|
||||
}
|
||||
if (!partition) {
|
||||
return { error: errors.InvalidArgument.customizeDescription(
|
||||
'bad ARN: must include a partition name, like "aws" in ' +
|
||||
'"arn:aws:..."') };
|
||||
}
|
||||
if (!service) {
|
||||
return { error: errors.InvalidArgument.customizeDescription(
|
||||
'bad ARN: must include a service name, like "s3" in ' +
|
||||
'"arn:aws:s3:..."') };
|
||||
}
|
||||
if (validServices[partition] === undefined) {
|
||||
return { error: errors.InvalidArgument.customizeDescription(
|
||||
`bad ARN: unknown partition "${partition}", should be a ` +
|
||||
'valid partition name like "aws" in "arn:aws:..."') };
|
||||
}
|
||||
if (!validServices[partition].includes(service)) {
|
||||
return { error: errors.InvalidArgument.customizeDescription(
|
||||
`bad ARN: unsupported ${partition} service "${service}"`) };
|
||||
}
|
||||
if (accountId && !/^([0-9]{12}|[*])$/.test(accountId)) {
|
||||
return { error: errors.InvalidArgument.customizeDescription(
|
||||
`bad ARN: bad account ID "${accountId}": ` +
|
||||
'must be a 12-digit number or "*"') };
|
||||
}
|
||||
const fullResource = (resource !== undefined ?
|
||||
`${resourceType}:${resource}` : resourceType);
|
||||
return new ARN(partition, service, region, accountId, fullResource);
|
||||
}
|
||||
|
||||
getPartition() {
|
||||
return this._partition;
|
||||
}
|
||||
getService() {
|
||||
return this._service;
|
||||
}
|
||||
getRegion() {
|
||||
return this._region;
|
||||
}
|
||||
getAccountId() {
|
||||
return this._accountId;
|
||||
}
|
||||
getResource() {
|
||||
return this._resource;
|
||||
}
|
||||
|
||||
isIAMAccount() {
|
||||
return this.getService() === 'iam'
|
||||
&& this.getAccountId() !== null
|
||||
&& this.getAccountId() !== '*'
|
||||
&& this.getResource() === 'root';
|
||||
}
|
||||
isIAMUser() {
|
||||
return this.getService() === 'iam'
|
||||
&& this.getAccountId() !== null
|
||||
&& this.getAccountId() !== '*'
|
||||
&& this.getResource().startsWith('user/');
|
||||
}
|
||||
isIAMRole() {
|
||||
return this.getService() === 'iam'
|
||||
&& this.getAccountId() !== null
|
||||
&& this.getResource().startsWith('role');
|
||||
}
|
||||
|
||||
toString() {
|
||||
return ['arn', this.getPartition(), this.getService(),
|
||||
this.getRegion(), this.getAccountId(), this.getResource()]
|
||||
.join(':');
|
||||
}
|
||||
}
|
271
lib/models/BackendInfo.ts
Normal file
271
lib/models/BackendInfo.ts
Normal file
@@ -0,0 +1,271 @@
|
||||
import { RequestLogger } from 'werelogs';
|
||||
|
||||
import { legacyLocations } from '../constants';
|
||||
import escapeForXml from '../s3middleware/escapeForXml';
|
||||
|
||||
type CloudServerConfig = any;
|
||||
|
||||
export default class BackendInfo {
|
||||
_config: CloudServerConfig;
|
||||
_requestEndpoint: string;
|
||||
_objectLocationConstraint?: string;
|
||||
_bucketLocationConstraint?: string;
|
||||
_legacyLocationConstraint?: string;
|
||||
|
||||
/**
|
||||
* Represents the info necessary to evaluate which data backend to use
|
||||
* on a data put call.
|
||||
* @constructor
|
||||
* @param config - CloudServer config containing list of locations
|
||||
* @param objectLocationConstraint - location constraint
|
||||
* for object based on user meta header
|
||||
* @param bucketLocationConstraint - location
|
||||
* constraint for bucket based on bucket metadata
|
||||
* @param requestEndpoint - endpoint to which request was made
|
||||
* @param legacyLocationConstraint - legacy location constraint
|
||||
*/
|
||||
constructor(
|
||||
config: CloudServerConfig,
|
||||
objectLocationConstraint: string | undefined,
|
||||
bucketLocationConstraint: string | undefined,
|
||||
requestEndpoint: string,
|
||||
legacyLocationConstraint: string | undefined,
|
||||
) {
|
||||
this._config = config;
|
||||
this._objectLocationConstraint = objectLocationConstraint;
|
||||
this._bucketLocationConstraint = bucketLocationConstraint;
|
||||
this._requestEndpoint = requestEndpoint;
|
||||
this._legacyLocationConstraint = legacyLocationConstraint;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* validate proposed location constraint against config
|
||||
* @param config - CloudServer config
|
||||
* @param locationConstraint - value of user
|
||||
* metadata location constraint header or bucket location constraint
|
||||
* @param log - werelogs logger
|
||||
* @return - true if valid, false if not
|
||||
*/
|
||||
static isValidLocationConstraint(
|
||||
config: CloudServerConfig,
|
||||
locationConstraint: string | undefined,
|
||||
log: RequestLogger,
|
||||
) {
|
||||
if (!locationConstraint || !(locationConstraint in config.locationConstraints)) {
|
||||
log.trace('proposed locationConstraint is invalid',
|
||||
{ locationConstraint });
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* validate that request endpoint is listed in the restEndpoint config
|
||||
* @param config - CloudServer config
|
||||
* @param requestEndpoint - request endpoint
|
||||
* @param log - werelogs logger
|
||||
* @return true if present, false if not
|
||||
*/
|
||||
static isRequestEndpointPresent(
|
||||
config: CloudServerConfig,
|
||||
requestEndpoint: string,
|
||||
log: RequestLogger,
|
||||
) {
|
||||
if (!(requestEndpoint in config.restEndpoints)) {
|
||||
log.trace('requestEndpoint does not match config restEndpoints',
|
||||
{ requestEndpoint });
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* validate that locationConstraint for request Endpoint matches
|
||||
* one config locationConstraint
|
||||
* @param config - CloudServer config
|
||||
* @param requestEndpoint - request endpoint
|
||||
* @param log - werelogs logger
|
||||
* @return - true if matches, false if not
|
||||
*/
|
||||
static isRequestEndpointValueValid(
|
||||
config: CloudServerConfig,
|
||||
requestEndpoint: string,
|
||||
log: RequestLogger,
|
||||
) {
|
||||
const restEndpoint = config.restEndpoints[requestEndpoint];
|
||||
if (!(restEndpoint in config.locationConstraints)) {
|
||||
log.trace('the default locationConstraint for request' +
|
||||
'Endpoint does not match any config locationConstraint',
|
||||
{ requestEndpoint });
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* validate that s3 server is running with a file or memory backend
|
||||
* @param config - CloudServer config
|
||||
* @param log - werelogs logger
|
||||
* @return - true if running with file/mem backend, false if not
|
||||
*/
|
||||
static isMemOrFileBackend(config: CloudServerConfig, log: RequestLogger) {
|
||||
if (config.backends.data === 'mem' || config.backends.data === 'file') {
|
||||
log.trace('use data backend for the location', {
|
||||
dataBackend: config.backends.data,
|
||||
method: 'isMemOrFileBackend',
|
||||
});
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* validate requestEndpoint against config or mem/file data backend
|
||||
* - if there is no match for the request endpoint in the config
|
||||
* restEndpoints and data backend is set to mem or file we will use this
|
||||
* data backend for the location.
|
||||
* - if locationConstraint for request Endpoint does not match
|
||||
* any config locationConstraint, we will return an error
|
||||
* @param config - CloudServer config
|
||||
* @param requestEndpoint - request endpoint
|
||||
* @param log - werelogs logger
|
||||
* @return - true if valid, false if not
|
||||
*/
|
||||
static isValidRequestEndpointOrBackend(
|
||||
config: CloudServerConfig,
|
||||
requestEndpoint: string,
|
||||
log: RequestLogger,
|
||||
) {
|
||||
if (!BackendInfo.isRequestEndpointPresent(config, requestEndpoint,
|
||||
log)) {
|
||||
return BackendInfo.isMemOrFileBackend(config, log);
|
||||
}
|
||||
return BackendInfo.isRequestEndpointValueValid(config, requestEndpoint,
|
||||
log);
|
||||
}
|
||||
|
||||
/**
|
||||
* validate controlling BackendInfo Parameter
|
||||
* @param config - CloudServer config
|
||||
* @param objectLocationConstraint - value of user
|
||||
* metadata location constraint header
|
||||
* @param bucketLocationConstraint - location
|
||||
* constraint from bucket metadata
|
||||
* @param requestEndpoint - endpoint of request
|
||||
* @param log - werelogs logger
|
||||
* @return - location constraint validity
|
||||
*/
|
||||
static controllingBackendParam(
|
||||
config: CloudServerConfig,
|
||||
objectLocationConstraint: string | undefined,
|
||||
bucketLocationConstraint: string | null,
|
||||
requestEndpoint: string,
|
||||
log: RequestLogger,
|
||||
) {
|
||||
if (objectLocationConstraint) {
|
||||
if (BackendInfo.isValidLocationConstraint(config,
|
||||
objectLocationConstraint, log)) {
|
||||
log.trace('objectLocationConstraint is valid');
|
||||
return { isValid: true };
|
||||
}
|
||||
log.trace('objectLocationConstraint is invalid');
|
||||
return { isValid: false, description: 'Object Location Error - ' +
|
||||
`Your object location "${escapeForXml(objectLocationConstraint)}"` +
|
||||
'is not in your location config - Please update.' };
|
||||
}
|
||||
if (bucketLocationConstraint) {
|
||||
if (BackendInfo.isValidLocationConstraint(config,
|
||||
bucketLocationConstraint, log)) {
|
||||
log.trace('bucketLocationConstraint is valid');
|
||||
return { isValid: true };
|
||||
}
|
||||
log.trace('bucketLocationConstraint is invalid');
|
||||
return { isValid: false, description: 'Bucket Location Error - ' +
|
||||
`Your bucket location "${escapeForXml(bucketLocationConstraint)}"` +
|
||||
' is not in your location config - Please update.' };
|
||||
}
|
||||
const legacyLocationConstraint =
|
||||
BackendInfo.getLegacyLocationConstraint(config);
|
||||
if (legacyLocationConstraint) {
|
||||
log.trace('legacy location is valid');
|
||||
return { isValid: true, legacyLocationConstraint };
|
||||
}
|
||||
if (!BackendInfo.isValidRequestEndpointOrBackend(config,
|
||||
requestEndpoint, log)) {
|
||||
return { isValid: false, description: 'Endpoint Location Error - ' +
|
||||
`Your endpoint "${requestEndpoint}" is not in restEndpoints ` +
|
||||
'in your config OR the default location constraint for request ' +
|
||||
`endpoint "${escapeForXml(requestEndpoint)}" does not ` +
|
||||
'match any config locationConstraint - Please update.' };
|
||||
}
|
||||
if (BackendInfo.isRequestEndpointPresent(config, requestEndpoint,
|
||||
log)) {
|
||||
return { isValid: true };
|
||||
}
|
||||
return { isValid: true, defaultedToDataBackend: true };
|
||||
}
|
||||
|
||||
/**
|
||||
* Return legacyLocationConstraint
|
||||
* @param config CloudServer config
|
||||
* @return legacyLocationConstraint;
|
||||
*/
|
||||
static getLegacyLocationConstraint(config: CloudServerConfig) {
|
||||
return legacyLocations.find(ll => config.locationConstraints[ll]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Return objectLocationConstraint
|
||||
* @return objectLocationConstraint;
|
||||
*/
|
||||
getObjectLocationConstraint() {
|
||||
return this._objectLocationConstraint;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return bucketLocationConstraint
|
||||
* @return bucketLocationConstraint;
|
||||
*/
|
||||
getBucketLocationConstraint() {
|
||||
return this._bucketLocationConstraint;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return requestEndpoint
|
||||
* @return requestEndpoint;
|
||||
*/
|
||||
getRequestEndpoint() {
|
||||
return this._requestEndpoint;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return locationConstraint that should be used with put request
|
||||
* Order of priority is:
|
||||
* (1) objectLocationConstraint,
|
||||
* (2) bucketLocationConstraint,
|
||||
* (3) legacyLocationConstraint,
|
||||
* (4) default locationConstraint for requestEndpoint if requestEndpoint
|
||||
* is listed in restEndpoints in config.json
|
||||
* (5) default data backend
|
||||
* @return locationConstraint;
|
||||
*/
|
||||
getControllingLocationConstraint(): string {
|
||||
const objectLC = this.getObjectLocationConstraint();
|
||||
const bucketLC = this.getBucketLocationConstraint();
|
||||
const reqEndpoint = this.getRequestEndpoint();
|
||||
if (objectLC) {
|
||||
return objectLC;
|
||||
}
|
||||
if (bucketLC) {
|
||||
return bucketLC;
|
||||
}
|
||||
if (this._legacyLocationConstraint) {
|
||||
return this._legacyLocationConstraint;
|
||||
}
|
||||
if (this._config.restEndpoints[reqEndpoint]) {
|
||||
return this._config.restEndpoints[reqEndpoint];
|
||||
}
|
||||
return this._config.backends.data;
|
||||
}
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user