Compare commits
2188 Commits
bf/trycatc
...
developmen
Author | SHA1 | Date |
---|---|---|
Vitaliy Filippov | 19855115ae | |
Vitaliy Filippov | 329d8ef32c | |
Vitaliy Filippov | f0ded4ea4f | |
Vitaliy Filippov | 3eea263384 | |
Vitaliy Filippov | c26d4f7d70 | |
Vitaliy Filippov | 63137e7a7b | |
Vitaliy Filippov | fdb23b1cd2 | |
Vitaliy Filippov | 4120eac127 | |
Maha Benzekri | d9bbd6cf3e | |
Maha Benzekri | 65e89d286d | |
Maha Benzekri | dcbc5ca98f | |
Maha Benzekri | 817bb836ec | |
Maha Benzekri | e3e4b2aea7 | |
Francois Ferrand | 9cd72221e8 | |
Francois Ferrand | bdcd4685ad | |
Francois Ferrand | b2b6c47ba7 | |
Jonathan Gramain | da173d53b4 | |
Jonathan Gramain | 7eb2701f21 | |
Jonathan Gramain | 6ec3c8e10d | |
Jonathan Gramain | 7aaf277db2 | |
Francois Ferrand | 67421f8c76 | |
Francois Ferrand | bf2260b1ae | |
Francois Ferrand | 11e0e1b489 | |
Anurag Mittal | f13ec2cf4c | |
Anurag Mittal | e369c7e6d2 | |
Anurag Mittal | c5c1db4568 | |
Anurag Mittal | 58f4d3cb3a | |
Anurag Mittal | b049f39e2a | |
williamlardier | 30eaaf15eb | |
williamlardier | 9d16fb0a34 | |
williamlardier | cdc612f379 | |
williamlardier | 61dd65b2c4 | |
bert-e | 2c0696322e | |
Maha Benzekri | c464a70b90 | |
Maha Benzekri | af07bb3df4 | |
Maha Benzekri | 1858654f34 | |
Maha Benzekri | 0475c8520a | |
Maha Benzekri | 31a4de5372 | |
Maha Benzekri | 0c53d13439 | |
Maha Benzekri | cad8b14df1 | |
Nicolas Humbert | fe29bacc79 | |
Nicolas Humbert | a86cff4631 | |
Kerkesni | f13a5d79ea | |
Maha Benzekri | ca8f570f15 | |
Maha Benzekri | a4bca10faf | |
Jonathan Gramain | c2ab4a2052 | |
Jonathan Gramain | fd0aa314eb | |
Jonathan Gramain | a643a3e6cc | |
Jonathan Gramain | e9d815cc9d | |
Jonathan Gramain | c86d24fc8f | |
Jonathan Gramain | 3b6d3838f5 | |
Jonathan Gramain | fcdfa889be | |
Mickael Bourgois | 5b8fcf0313 | |
Mickael Bourgois | bdfde26fe4 | |
Mickael Bourgois | e53613783a | |
Mickael Bourgois | 69dbbb143a | |
Mickael Bourgois | 403c4e5040 | |
Nicolas Humbert | a1dc2bd84d | |
Nicolas Humbert | 01409d690c | |
Nicolas Humbert | 9ee40f343b | |
bert-e | 77ed018b4f | |
bert-e | f77700236f | |
Nicolas Humbert | 43ff16b28a | |
bert-e | 05c628728d | |
Nicolas Humbert | 2a807dc4ef | |
Nicolas Humbert | 1f8b0a4032 | |
bert-e | 0dd7fe9875 | |
Mickael Bourgois | f7a6af8d9a | |
Mickael Bourgois | e6d0eff1a8 | |
Mickael Bourgois | 9d558351e7 | |
Mickael Bourgois | 68150da72e | |
Mickael Bourgois | 2b2c4bc50e | |
Mickael Bourgois | 3068086a97 | |
Mickael Bourgois | 0af7eb5530 | |
bert-e | 7e372b7bd5 | |
bert-e | a121810552 | |
bert-e | 9bf1bcc483 | |
Nicolas Humbert | 06402c6c94 | |
Nicolas Humbert | a6f3c82827 | |
Nicolas Humbert | f1891851b3 | |
bert-e | a1eed4fefb | |
Nicolas Humbert | 68204448a1 | |
Nicolas Humbert | 40e271f7e2 | |
bert-e | d8f7f18f5a | |
bert-e | 5f4d7afefb | |
bert-e | 2482fdfafc | |
bert-e | e151b3fff1 | |
Nicolas Humbert | b8bbdbbd81 | |
Nicolas Humbert | 46258bca74 | |
williamlardier | b6bc11881a | |
williamlardier | 648257612b | |
williamlardier | 7423fac674 | |
williamlardier | 9647043a02 | |
williamlardier | f9e1f91791 | |
williamlardier | 9c5bc2bfe0 | |
Jonathan Gramain | 1a0a981271 | |
bert-e | a45b2eb6a4 | |
bert-e | b00378d46d | |
Mickael Bourgois | 2c3bfb16ef | |
Jonathan Gramain | c72d8be223 | |
Jonathan Gramain | f63cb3c762 | |
bert-e | 15fd621c5c | |
bert-e | effbf63dd4 | |
bert-e | 285fe2f63b | |
bert-e | 1d8ebe6a9c | |
bert-e | 00555597e0 | |
bert-e | bddc2ccd01 | |
Jonathan Gramain | 7908654b51 | |
Jonathan Gramain | 0d7cf8d40a | |
Jonathan Gramain | c4c75e976c | |
Jonathan Gramain | 1266a14253 | |
williamlardier | 851c72bd0f | |
bert-e | 722b6ae699 | |
bert-e | 29925a15ad | |
williamlardier | 6b64f50450 | |
Jonathan Gramain | 8dc3ba7ca6 | |
bert-e | 3c2283b062 | |
Jonathan Gramain | a6a76acede | |
Jonathan Gramain | 6a116734a9 | |
Jonathan Gramain | 9325ea4996 | |
Jonathan Gramain | 33ba89f0cf | |
Jonathan Gramain | c67331d350 | |
Jonathan Gramain | 6d6f1860ef | |
Nicolas Humbert | cbe6a5e2d6 | |
Mickael Bourgois | be1557d972 | |
Mickael Bourgois | a03463061c | |
Mickael Bourgois | 8ad0ea73a7 | |
Mickael Bourgois | a94040d13b | |
Mickael Bourgois | f265ed6122 | |
Mickael Bourgois | 7301c706fd | |
Mickael Bourgois | bfc8dee559 | |
Frédéric Meinnel | 5a5ef7c572 | |
Frédéric Meinnel | 918c2c5473 | |
Frédéric Meinnel | 29f39ab480 | |
Frédéric Meinnel | b7ac7f4616 | |
Frédéric Meinnel | f8ce90f9c3 | |
Frédéric Meinnel | 5734d11cf1 | |
Frédéric Meinnel | 4da59769d2 | |
Frédéric Meinnel | 60573991ee | |
Jonathan Gramain | 6f58f9dd68 | |
Jonathan Gramain | 3b9c93be68 | |
Jonathan Gramain | 081af3e795 | |
bert-e | 042f541a45 | |
bert-e | 63bf2cb5b1 | |
bert-e | 39f42d9cb4 | |
Mickael Bourgois | 02f126f040 | |
bert-e | 1477a70e47 | |
Mickael Bourgois | 7233ec2635 | |
Mickael Bourgois | c4b44016bc | |
Mickael Bourgois | a78a84faa7 | |
Mickael Bourgois | c3ff6526a1 | |
Frédéric Meinnel | 59d47a3e21 | |
Frédéric Meinnel | 6b61347c29 | |
Mickael Bourgois | 4bf29524eb | |
Mickael Bourgois | 9aa001c4d1 | |
Frédéric Meinnel | aea4663ff2 | |
Frédéric Meinnel | 5012e9209c | |
Frédéric Meinnel | 1568ad59c6 | |
bert-e | c2f6b45116 | |
bert-e | a0322b131c | |
Mickael Bourgois | b5487e3c94 | |
bert-e | 993b9e6093 | |
bert-e | ddd6c87831 | |
Mickael Bourgois | f2974cbd07 | |
bert-e | 7440794d93 | |
Mickael Bourgois | 1efab676bc | |
Mickael Bourgois | a167e1d5fa | |
Mickael Bourgois | c7e153917a | |
bert-e | 087369b37d | |
bert-e | 2d2030dfe4 | |
bert-e | 45cc4aa79e | |
Will Toozs | da80e12dab | |
Will Toozs | a7cf94d0fe | |
Jonathan Gramain | 2a82095d03 | |
Jonathan Gramain | 44b3d25459 | |
Jonathan Gramain | f1d6e30fb6 | |
Jonathan Gramain | 9186643caa | |
Jonathan Gramain | 485a76ceb9 | |
Jonathan Gramain | 00109a2c44 | |
Jonathan Gramain | aed1247825 | |
Jonathan Gramain | 0507c04ce9 | |
Will Toozs | 62736abba4 | |
Will Toozs | 97118f09c4 | |
Will Toozs | 5a84a8c0ad | |
bert-e | 37234efd14 | |
Jonathan Gramain | 2799381ef2 | |
Jonathan Gramain | a3f13e5387 | |
Jonathan Gramain | f4e83086d6 | |
Jonathan Gramain | d08a267965 | |
Jonathan Gramain | 063a2fb8fb | |
Jonathan Gramain | 1bc3360daf | |
Jonathan Gramain | 206f14bdf5 | |
Maha Benzekri | 74ff1691a0 | |
Maha Benzekri | 5ffae72693 | |
Maha Benzekri | 477a574500 | |
bert-e | 2a4ea38301 | |
bert-e | df4c22154e | |
Maha Benzekri | 3642ac03b2 | |
Francois Ferrand | d800179f86 | |
Francois Ferrand | c1c45a4af9 | |
Francois Ferrand | da536ed037 | |
Nicolas Humbert | 06901104e8 | |
Nicolas Humbert | a99a6d9d97 | |
Nicolas Humbert | 06244059a8 | |
Nicolas Humbert | 079f631711 | |
Benoit A. | 863f45d256 | |
KillianG | 4b642cf8b4 | |
KillianG | 2537f8aa9a | |
Maha Benzekri | 7866a1d06f | |
Maha Benzekri | 29ef2ef265 | |
Maha Benzekri | 1509f1bdfe | |
Maha Benzekri | 13d349d211 | |
Maha Benzekri | 34a32c967d | |
Maha Benzekri | 90ab985271 | |
Maha Benzekri | fbf5562a11 | |
bert-e | d79ed1b9c8 | |
bert-e | c34ad0dc31 | |
Maha Benzekri | df5ff0f400 | |
Maha Benzekri | 777783171a | |
Will Toozs | 39988e52e2 | |
Will Toozs | 79c82a4c3d | |
williamlardier | 17b5bbc233 | |
williamlardier | 4aa8b5cc6e | |
williamlardier | 5deed6c2e1 | |
Nicolas Humbert | af34571771 | |
Nicolas Humbert | 79b83a9067 | |
Nicolas Humbert | 5fd675a316 | |
Nicolas Humbert | d84cc974d3 | |
Maha Benzekri | dcf0f902ff | |
Maha Benzekri | 0177fbe98f | |
Maha Benzekri | f49cea3914 | |
Maha Benzekri | 73c6f41fa3 | |
bert-e | 5b66f8d089 | |
bert-e | b61d178b18 | |
Maha Benzekri | 9ea39c6ed9 | |
Florent Monjalet | e51b06cfea | |
Florent Monjalet | f2bc701f8c | |
Nicolas Humbert | 4d6b03ba47 | |
Nicolas Humbert | f03f049683 | |
Nicolas Humbert | d7b51de024 | |
Nicolas Humbert | cf51adf1c7 | |
Nicolas Humbert | 8a7c1be2d1 | |
Nicolas Humbert | c049df0a97 | |
Nicolas Humbert | 2b2667e29a | |
Nicolas Humbert | 8eb4a29c36 | |
bert-e | 862317703e | |
Nicolas Humbert | e69a97f240 | |
Nicolas Humbert | 81e838000f | |
bert-e | 547ce816e0 | |
Nicolas Humbert | 8256d6debf | |
bert-e | 15d5e93a2d | |
Nicolas Humbert | 69c1698eb7 | |
bert-e | d11bcb56e9 | |
Nicolas Humbert | c2cd90925f | |
bert-e | 0ed35c3d86 | |
bert-e | b1723594eb | |
Nicolas Humbert | c0218821ff | |
Nicolas Humbert | 49e32758fb | |
Nicolas Humbert | e13d0f5ed8 | |
Nicolas Humbert | 0d5907956f | |
Nicolas Humbert | f0c5d60ce9 | |
Nicolas Humbert | 8c2f4cf357 | |
Nicolas Humbert | f3f1da9bb3 | |
Nicolas Humbert | 036b75842e | |
Nicolas Humbert | 7ac5774635 | |
Nicolas Humbert | f3b928fce0 | |
Nicolas Humbert | 7173a357d9 | |
Nicolas Humbert | 7c4f461196 | |
Nicolas Humbert | 0a4d6f862f | |
bert-e | 8716fee67d | |
bert-e | 2938bb0c88 | |
williamlardier | 05c93446ab | |
williamlardier | 8d758327dd | |
williamlardier | be63c09624 | |
Nicolas Humbert | 4615875462 | |
Rahul Padigela | bdb59a0e63 | |
bert-e | a89d1d8d75 | |
Rahul Padigela | 89e5f7dffe | |
williamlardier | 57e84980c8 | |
williamlardier | 51bfd41bea | |
Nicolas Humbert | 96cbaeb821 | |
Nicolas Humbert | cb01346d07 | |
Nicolas Humbert | 3f24336b83 | |
Nicolas Humbert | 1e66518a79 | |
bert-e | 15b68fa9fa | |
Nicolas Humbert | 51703a65f5 | |
bert-e | 09aaa2d5ee | |
Nicolas Humbert | ad39d90b6f | |
Jonathan Gramain | 20e9fe4adb | |
bert-e | e9c67f7f67 | |
Jonathan Gramain | af3fd17ec2 | |
bert-e | 536d474f57 | |
bert-e | 55e68cfa17 | |
bert-e | 67c98fd81b | |
williamlardier | 5cd70d7cf1 | |
KillianG | 25be9014c9 | |
KillianG | ed42f24580 | |
KillianG | ce076cb3df | |
KillianG | 4bc3de52ff | |
bert-e | beb5f69be3 | |
bert-e | 5f3540a0d5 | |
bert-e | 654d628d39 | |
gaspardmoindrot | e8a409e337 | |
Alexander Chan | 4093bf2b04 | |
Alexander Chan | d0bb6d5b0c | |
bert-e | 3f7229eebe | |
bert-e | 7eb9d52da5 | |
Nicolas Humbert | e216c9dd20 | |
williamlardier | 0c1afe535b | |
williamlardier | 73335ae6ec | |
Alexander Chan | 99c514e8f2 | |
Alexander Chan | cfd9fdcfc4 | |
Alexander Chan | d809dac5e3 | |
williamlardier | 53dac8d233 | |
williamlardier | 6d5ef07eee | |
williamlardier | 272166e406 | |
williamlardier | 3af05e672b | |
williamlardier | 8b0c90cb2f | |
Alexander Chan | dfc9b761e2 | |
Alexander Chan | 04f1eb7f04 | |
bert-e | c204b90847 | |
bert-e | 78d6e7fd72 | |
Alexander Chan | 7768fa8d35 | |
KillianG | 4d9a9adc48 | |
KillianG | c4804e52ee | |
KillianG | 671cf3a679 | |
Jonathan Gramain | 9a5e27f97b | |
Jonathan Gramain | d744a709d2 | |
Jonathan Gramain | a9d003c6f8 | |
Jonathan Gramain | 99e04bd6fa | |
Jonathan Gramain | d3bdddeba3 | |
bert-e | 3252f7de03 | |
Jonathan Gramain | c4cc5a2c3d | |
Jonathan Gramain | fedd0190cc | |
Jonathan Gramain | 56fd4ad734 | |
Jonathan Gramain | ebe6b65fcf | |
Nicolas Humbert | 7994bf7b96 | |
Nicolas Humbert | 4be0a06c4a | |
bert-e | da7dbdc51f | |
Will Toozs | 2103ef1237 | |
Will Toozs | dbc1c54246 | |
bert-e | 6c22f8404d | |
KillianG | 00e03f0592 | |
KillianG | d453758b7d | |
KillianG | a964dc99c3 | |
Jonathan Gramain | 3a4da1d7c0 | |
williamlardier | 5074e6c0a4 | |
williamlardier | bd05dd6918 | |
williamlardier | fbda12ce3c | |
Nicolas Humbert | b02934bb39 | |
Nicolas Humbert | c9a444969b | |
Nicolas Humbert | 5d018860ec | |
bert-e | 5838e02096 | |
Nicolas Humbert | ecd600ac4b | |
Naren | ab0324da05 | |
Naren | 2b353b33af | |
Naren | 5377b20ceb | |
Naren | 21b329b301 | |
Naren | bd76402586 | |
bert-e | fd57f47be1 | |
bert-e | 94edf8be70 | |
Naren | 1d104345fd | |
Jonathan Gramain | 58e47e5015 | |
Jonathan Gramain | 4d782ecec6 | |
Jonathan Gramain | 655a10ce52 | |
Jonathan Gramain | 0c7f0e607d | |
Jonathan Gramain | caa5d53e9b | |
Jonathan Gramain | 21da975187 | |
bert-e | e0df67a115 | |
Naren | 7e18ae77e0 | |
Naren | 4750118f85 | |
Naren | c273c8b823 | |
Jonathan Gramain | d3b50fafa8 | |
Naren | 47e68a9b60 | |
Naren | bd0a199ffa | |
Naren | 4b1f69bcbb | |
Naren | e3a6814e3f | |
Alexander Chan | bf4072151f | |
Alexander Chan | f33cd69e45 | |
Alexander Chan | acd13ff31b | |
Alexander Chan | bb3e5d078f | |
Jonathan Gramain | 22fa04b7e7 | |
Jonathan Gramain | 10a94a0a96 | |
bert-e | 4d71a834d5 | |
Alexander Chan | 054f61d6c1 | |
Alexander Chan | fa26a487f5 | |
Alexander Chan | c1dd2e4946 | |
Alexander Chan | a714103b82 | |
Jonathan Gramain | 66740f5aba | |
Jonathan Gramain | a3a83dd89c | |
williamlardier | 8db8109391 | |
Jonathan Gramain | d90af29019 | |
Jonathan Gramain | 9d8d98fcc9 | |
Jonathan Gramain | 01830d19a0 | |
Jonathan Gramain | 49cc018fa4 | |
Jonathan Gramain | dd87c869ca | |
Jonathan Gramain | df44cffb96 | |
Jonathan Gramain | 164053d1e8 | |
Jonathan Gramain | af741c50fb | |
williamlardier | 9c46703b89 | |
williamlardier | 47672d60ce | |
Jonathan Gramain | 6d41d103e8 | |
Jonathan Gramain | 34ccca9b07 | |
Jonathan Gramain | 6e5d8d14af | |
Jonathan Gramain | 890ac08dcd | |
Jonathan Gramain | 4cda9f6a6b | |
Jonathan Gramain | fbb62ef17c | |
Jonathan Gramain | 4949b7cc35 | |
Jonathan Gramain | 2b6fee4e84 | |
Jonathan Gramain | 8077186c3a | |
Jonathan Gramain | 1c07618b18 | |
Jonathan Gramain | 4d7eaee0cc | |
williamlardier | c460338163 | |
williamlardier | f17d52b602 | |
williamlardier | a6b234b7a8 | |
williamlardier | ff353bb4d6 | |
williamlardier | 0f9c9c2f18 | |
williamlardier | f6b2cf2c1a | |
Kerkesni | ecafbae36a | |
Kerkesni | d1cd7e8dba | |
Francois Ferrand | 3da6719200 | |
Francois Ferrand | c0dd54ef51 | |
Francois Ferrand | 7910792390 | |
Francois Ferrand | a4f4c51290 | |
Francois Ferrand | 66c4bc52b5 | |
Francois Ferrand | 81cd6652d6 | |
Francois Ferrand | 2a07f67244 | |
Francois Ferrand | 1a634015ee | |
williamlardier | 7a88a54918 | |
williamlardier | b25e620750 | |
williamlardier | 38ef89cc83 | |
williamlardier | 1a6c828bfc | |
williamlardier | 3d769c6960 | |
williamlardier | 8a27920a85 | |
williamlardier | 7642a22176 | |
Jonathan Gramain | 7b64896234 | |
Jonathan Gramain | 4f0a846814 | |
bert-e | 8f63687ef3 | |
Kerkesni | 26f45fa81a | |
Kerkesni | 76b59057f7 | |
Kerkesni | ae0da3d605 | |
bert-e | 7c1bd453ee | |
bert-e | 162d9ec46b | |
Kerkesni | ccd6462015 | |
Kerkesni | 665c77570c | |
Kerkesni | 27307b397c | |
Kerkesni | 414eada32b | |
Kerkesni | fdf0c6fe99 | |
Kerkesni | 8cc0be7da2 | |
bert-e | 65231633a7 | |
Kerkesni | 9a975723c1 | |
Kerkesni | ef024ddef3 | |
Kerkesni | b61138a348 | |
Kerkesni | d852eef08e | |
Kerkesni | fd63b857f3 | |
Alexander Chan | 92c567414a | |
Alexander Chan | ec55e39175 | |
Jonathan Gramain | c343820cae | |
Jonathan Gramain | 0f9da6a44e | |
Jonathan Gramain | 53a42f7411 | |
Jonathan Gramain | 9c2bed8034 | |
williamlardier | 8307a1513e | |
williamlardier | 706c2425fe | |
williamlardier | 8618d77de9 | |
williamlardier | 9d614a4ab3 | |
williamlardier | 7763685cb0 | |
Artem Bakalov | 8abe746222 | |
Artem Bakalov | 4c6712741b | |
bert-e | e74cca6795 | |
Artem Bakalov | 87b060f2ae | |
bert-e | 1427abecb7 | |
bert-e | 9dc357ab8d | |
bert-e | 4771ce3067 | |
Artem Bakalov | f62c3d22ed | |
williamlardier | 4e8a907d99 | |
williamlardier | a237e38c51 | |
williamlardier | 4388cb7790 | |
williamlardier | 095a2012cb | |
Killian Gardahaut | 6f42b3e64c | |
Killian Gardahaut | 264e0c1aad | |
Jonathan Gramain | 237872a5a3 | |
Jonathan Gramain | 0130355e1a | |
bert-e | 390fd97edf | |
Nicolas Humbert | 1c9e4eb93d | |
bert-e | af50ef47d7 | |
bert-e | a4f163f466 | |
Nicolas Humbert | 4d0cc9bc12 | |
bert-e | 657f969d05 | |
Jonathan Gramain | 4f2b1ca960 | |
bert-e | b43cf22b2c | |
Killian Gardahaut | 46c44ccaa6 | |
Killian Gardahaut | f45f65596b | |
bert-e | 90c63168c1 | |
bert-e | 10402ae78d | |
Jonathan Gramain | 5cd1df8601 | |
Jonathan Gramain | ee38856f29 | |
Jonathan Gramain | fe5f868f43 | |
Jonathan Gramain | dc229bb8aa | |
Killian Gardahaut | c0ee81eb7a | |
Killian Gardahaut | a6a48e812f | |
bert-e | 604a0170f1 | |
bert-e | 5a8372437b | |
Killian Gardahaut | 9d8f4793c9 | |
Killian Gardahaut | 69d33a3341 | |
Killian Gardahaut | c4ead93bd9 | |
Jonathan Gramain | 981c9c1a23 | |
Jonathan Gramain | 71de409ee9 | |
KillianG | 806f988334 | |
KillianG | 976a05c3e5 | |
KillianG | 46c24c5cc3 | |
Killian Gardahaut | c5004cb521 | |
KillianG | bc9cfb0b6d | |
KillianG | 4b6e342ff8 | |
Killian Gardahaut | d48d4d0c18 | |
Killian Gardahaut | 5a32c8eca0 | |
Kerkesni | 480f5a4427 | |
bert-e | 852ae9bd0f | |
Kerkesni | 6c132bca90 | |
Taylor McKinnon | 3d77540c47 | |
Taylor McKinnon | 3882ecf1a0 | |
Taylor McKinnon | 4f0506cf31 | |
Taylor McKinnon | acf38cc010 | |
Nicolas Humbert | d92a91f076 | |
Nicolas Humbert | 28779db602 | |
Alexander Chan | 8db16c5532 | |
Jordi Bertran de Balanda | 33439ec215 | |
Jordi Bertran de Balanda | 785b824b69 | |
bert-e | 9873c0f112 | |
Jordi Bertran de Balanda | 63212e2db3 | |
Nicolas Humbert | 725a492c2c | |
Nicolas Humbert | e446e3e132 | |
bert-e | 25c6b34a1e | |
Jordi Bertran de Balanda | 721d7ede93 | |
Jordi Bertran de Balanda | 3179d1c620 | |
Nicolas Humbert | fbbba32d69 | |
Jordi Bertran de Balanda | 56c1ba5c21 | |
Will Toozs | 73431094a3 | |
Will Toozs | aed1d8419b | |
Will Toozs | c3cb0aa514 | |
bert-e | 5919d20fa4 | |
Nicolas Humbert | 56665069c1 | |
Nicolas Humbert | 61fe54bd73 | |
Francois Ferrand | e227d9d5ca | |
Francois Ferrand | a206b5f95e | |
Francois Ferrand | 9b8f9f8afd | |
Francois Ferrand | cdcc44d272 | |
Francois Ferrand | 066be20a9d | |
Xin LI | 5acef6895f | |
Xin LI | 6e3386f693 | |
Xin LI | 2c630848ee | |
williamlardier | f7d360fe0b | |
williamlardier | 0a61b43252 | |
williamlardier | c014e630be | |
williamlardier | a747d5feda | |
KillianG | 765857071a | |
KillianG | 91b39da7e5 | |
williamlardier | 2cc6ebe9b4 | |
Xin LI | 5634e1bb1f | |
williamlardier | 7887d22d0d | |
williamlardier | 2f142aea7f | |
williamlardier | 26a046c9b2 | |
bert-e | ab23d59daf | |
williamlardier | b744385584 | |
bert-e | 6950df200a | |
williamlardier | d407cd702b | |
williamlardier | 3265d162a7 | |
bert-e | 67200d80ad | |
williamlardier | 20a071fba9 | |
bert-e | aa2992cd9f | |
bert-e | f897dee3c5 | |
williamlardier | 0e2071ed3b | |
williamlardier | ad579b2bd2 | |
Guillaume Hivert | 139da904a7 | |
Guillaume Hivert | e8851b40c0 | |
Guillaume Hivert | 536f36df4e | |
Naren | cd9456b510 | |
Alexander Chan | 15f07538d8 | |
Guillaume Hivert | e95d07af12 | |
Guillaume Hivert | 571128efb1 | |
Guillaume Hivert | f1478cbc66 | |
Guillaume Hivert | b21f7f3440 | |
Guillaume Hivert | ca2d23710f | |
Guillaume Hivert | 310fd30266 | |
Guillaume Hivert | 75c5c855d9 | |
Guillaume Hivert | 8743e9c3ac | |
bert-e | b2af7c0aea | |
Guillaume Hivert | 43d466e2fe | |
bert-e | 58c24376aa | |
Guillaume Hivert | efa8c8e611 | |
Guillaume Hivert | 62c13c1eed | |
Guillaume Hivert | ee81fa5829 | |
Guillaume Hivert | 820ad4f8af | |
Guillaume Hivert | 34eeecf6de | |
Guillaume Hivert | 050f5ed002 | |
Guillaume Hivert | 2fba338639 | |
Guillaume Hivert | 950ac8e19b | |
Guillaume Hivert | 61929bb91a | |
Guillaume Hivert | 9175148bd1 | |
Guillaume Hivert | 5f08ea9310 | |
Guillaume Hivert | 707bf795a9 | |
Guillaume Hivert | fcf64798dc | |
Guillaume Hivert | 9b607be633 | |
Guillaume Hivert | 01a8992cec | |
Guillaume Hivert | 301541223d | |
Guillaume Hivert | 4f58a4b2f3 | |
Guillaume Hivert | 6f3babd223 | |
bert-e | d7df1df2b6 | |
Artem Bakalov | 3f26b432b7 | |
bert-e | f59b1b5e07 | |
bert-e | b684bdbaa9 | |
Guillaume Hivert | a3418603d0 | |
Guillaume Hivert | 947ccd90d9 | |
Guillaume Hivert | 23113616d9 | |
Guillaume Hivert | f460ffdb21 | |
Guillaume Hivert | dfa49c79c5 | |
Guillaume Hivert | ba94dc7e86 | |
Guillaume Hivert | e582882883 | |
Guillaume Hivert | dd61c1abbe | |
Guillaume Hivert | 5e8f4f2a30 | |
Guillaume Hivert | a15f8a56e3 | |
Guillaume Hivert | 43e82f7f33 | |
Guillaume Hivert | f54feec57f | |
bert-e | d7625ced17 | |
bert-e | bbe5f293f4 | |
Guillaume Hivert | a2c1989a5d | |
bert-e | 8ad1cceeb8 | |
bert-e | 24755c8472 | |
bert-e | bd970c65ea | |
bert-e | fb39a4095e | |
bert-e | 32dfba2f89 | |
Kerkesni | 43a8772529 | |
Guillaume Hivert | a2ca197bd8 | |
Guillaume Hivert | fc05956983 | |
Xin LI | 3ed46f2d16 | |
williamlardier | 5c936c94ee | |
Xin LI | f87101eef6 | |
Xin LI | 14f86282b6 | |
Xin LI | f9dba52d38 | |
Yutaka Oishi | 6714aed351 | |
williamlardier | 99f96dd377 | |
williamlardier | ae08d89d7d | |
williamlardier | c48e2948f0 | |
williamlardier | fc942febca | |
williamlardier | a4fe998c34 | |
williamlardier | 1460e94488 | |
williamlardier | dcc7117d88 | |
williamlardier | 99cee367aa | |
williamlardier | ad5a4c152d | |
bert-e | b608c043f5 | |
Guillaume Hivert | 8ec4a11a4b | |
bert-e | 079c09e1ec | |
Guillaume Hivert | c9ff3cd60e | |
bert-e | 75f07440ef | |
bert-e | 3a6bac1158 | |
Guillaume Hivert | a15d4cd130 | |
bert-e | f2d119326a | |
Guillaume Hivert | 45ba80ec23 | |
Guillaume Hivert | 2a019f3788 | |
bert-e | 5e22900c0f | |
Guillaume Hivert | 32cff324d8 | |
Guillaume Hivert | e62ed598e8 | |
Guillaume Hivert | cda5d7cfed | |
bert-e | a217ad58e8 | |
bert-e | e46b90cbad | |
bert-e | 10cf10daa4 | |
Guillaume Hivert | 6ec2f99a91 | |
bert-e | dfd8f20bf2 | |
bert-e | 435f9f7f3c | |
Guillaume Hivert | fc17ab4299 | |
Guillaume Hivert | 44f398b01f | |
Guillaume Hivert | dc32d78b0f | |
Guillaume Hivert | 9f1ea09ee6 | |
Guillaume Hivert | 073d752ad8 | |
Guillaume Hivert | 37c325f033 | |
bert-e | 3454e934f5 | |
Guillaume Hivert | 76bffb2a23 | |
Guillaume Hivert | bd498d414b | |
Guillaume Hivert | f98c65ffb4 | |
Guillaume Hivert | eae29c53dd | |
Guillaume Hivert | 8d17b69eb8 | |
Guillaume Hivert | 938d64f48e | |
Guillaume Hivert | 485ca38867 | |
Guillaume Hivert | 355c540510 | |
Jordi Bertran de Balanda | 399fdaaed0 | |
Jordi Bertran de Balanda | d97a218170 | |
Jordi Bertran de Balanda | 5084c8f971 | |
Jordi Bertran de Balanda | 82c3330321 | |
williamlardier | 3388de6fb6 | |
Guillaume Hivert | db70743439 | |
Alexander Chan | 86e9d4a356 | |
williamlardier | a0010efbdd | |
Nicolas Humbert | 8eb7efd58a | |
williamlardier | 25ae7e443b | |
williamlardier | 4afa1ed78d | |
williamlardier | 706dfddf5f | |
williamlardier | 4cce306a12 | |
williamlardier | f3bf6f2615 | |
williamlardier | bbe51b2e5e | |
williamlardier | 3cd06256d6 | |
Yutaka Oishi | 6e42216549 | |
williamlardier | e37712e94f | |
williamlardier | ac30d29509 | |
Xin LI | 1f235d569d | |
williamlardier | 320713a764 | |
williamlardier | 4594578919 | |
williamlardier | bc0cb0a8fe | |
williamlardier | 9e0cee849c | |
Artem Bakalov | fbf686feab | |
Guillaume Hivert | 4b795a245c | |
Guillaume Hivert | 983d59d565 | |
Guillaume Hivert | fd7f0a1a91 | |
bert-e | 459fd99316 | |
Guillaume Hivert | d6e4bca3ed | |
Guillaume Hivert | 235b2ac6d4 | |
bert-e | f49006a64e | |
bert-e | 8025ce08fe | |
Guillaume Hivert | 75811ba553 | |
Guillaume Hivert | 26de19b22b | |
Guillaume Hivert | 72bdd130f0 | |
Guillaume Hivert | 4131732b74 | |
Guillaume Hivert | 7cecbe27be | |
Guillaume Hivert | 3fab05071d | |
Guillaume Hivert | a98f2cede5 | |
Guillaume Hivert | 283a0863c2 | |
Guillaume Hivert | 18b089fc2d | |
Guillaume Hivert | 60139abb10 | |
Guillaume Hivert | 2cc1a9886f | |
Guillaume Hivert | 1c7122b7e4 | |
Guillaume Hivert | 4eba3ca6a0 | |
Guillaume Hivert | 670d57a9db | |
Guillaume Hivert | 8784113544 | |
bert-e | bffb00266f | |
bert-e | a6cd3a67e0 | |
dependabot[bot] | 18605a9546 | |
dependabot[bot] | 74d7fe5e68 | |
dependabot[bot] | e707cf4398 | |
bert-e | 47c34a4f5c | |
bert-e | 59f7e32037 | |
Jordi Bertran de Balanda | fb286c6403 | |
Jordi Bertran de Balanda | c9f279ac9b | |
williamlardier | 7f93695300 | |
bert-e | cecb5fc1b1 | |
bert-e | 75ba3733aa | |
dependabot[bot] | 7c6f5d34b8 | |
bert-e | 7e3190a600 | |
Jordi Bertran de Balanda | e9c4a5ce99 | |
Jordi Bertran de Balanda | 2622781a1d | |
Guillaume Hivert | f378a85799 | |
bert-e | 23ea19bcb3 | |
Guillaume Hivert | c6249cd2d5 | |
KillianG | d2c1400cb6 | |
Guillaume Hivert | 97019d3b44 | |
bert-e | 6da31dfd18 | |
Guillaume Hivert | 75b4e6328e | |
Guillaume Hivert | eb9f936e78 | |
Yutaka Oishi | ee1e65d778 | |
williamlardier | 3534927ccf | |
Jordi Bertran de Balanda | 0e3edb847e | |
Jordi Bertran de Balanda | d1930c08e8 | |
bert-e | a9f9fe99a5 | |
bert-e | 3dd0fbfc80 | |
Jordi Bertran de Balanda | a587f78242 | |
Guillaume Hivert | 2202ebac8a | |
Guillaume Hivert | 40e5100cd8 | |
Guillaume Hivert | 0851aa1406 | |
Guillaume Hivert | 5c16601657 | |
Guillaume Hivert | 3ff3330f1a | |
Guillaume Hivert | 5b02d20e4d | |
Guillaume Hivert | 867da9a3d0 | |
Guillaume Hivert | c9f6d35fa4 | |
Guillaume Hivert | c79a5c2ee3 | |
Guillaume Hivert | a400beb8b9 | |
Guillaume Hivert | 8ce0b07e63 | |
Guillaume Hivert | a0876d3df5 | |
Guillaume Hivert | e829fa3d3f | |
Guillaume Hivert | da25890556 | |
Guillaume Hivert | 8df0f5863a | |
Guillaume Hivert | 2d66248303 | |
Guillaume Hivert | 8221852eef | |
Guillaume Hivert | d50e1bfd6d | |
Guillaume Hivert | 5f453789d4 | |
Guillaume Hivert | 7658481128 | |
Guillaume Hivert | 593bb31ac3 | |
Guillaume Hivert | f5e89c9660 | |
Guillaume Hivert | 62db2267fc | |
Guillaume Hivert | f6544f7a2e | |
bert-e | 3ce4effafb | |
Kerkesni | 5ec6acc061 | |
bert-e | 6c7a1316ae | |
bert-e | b1897708e5 | |
bert-e | 019907e2ab | |
bert-e | 73729c7bdb | |
Guillaume Hivert | d6635097c7 | |
Kerkesni | 3f5e553d8a | |
bert-e | efea69ff70 | |
bert-e | 187ba67cc8 | |
Guillaume Hivert | 8a2b62815b | |
bert-e | 0dbbb80bea | |
bert-e | c808873996 | |
Guillaume Hivert | 2eecda3079 | |
bert-e | 011606e146 | |
Guillaume Hivert | a3378c3df5 | |
Guillaume Hivert | 8271b3ba21 | |
Guillaume Hivert | a1b980b95b | |
Guillaume Hivert | e063eeeced | |
Guillaume Hivert | a5051cffba | |
Guillaume Hivert | 24deac9f92 | |
Guillaume Hivert | 3621c7bc77 | |
Guillaume Hivert | 57c2d4fcd8 | |
bert-e | 4c47264a78 | |
bert-e | 835ffe79c6 | |
bert-e | f69087814e | |
Ronnie Smith | cd432fa920 | |
Ronnie Smith | 1ac27e8125 | |
Ronnie Smith | af0ab673d7 | |
Ronnie Smith | deb88ae03b | |
Ronnie Smith | 334edbc17b | |
Ronnie Smith | a2777d929e | |
bert-e | 271b28e59b | |
Guillaume Hivert | 03c7b6ea3e | |
Guillaume Hivert | 872034073e | |
Guillaume Hivert | 3d39b61a46 | |
Guillaume Hivert | c55c790a5d | |
Jordi Bertran de Balanda | ccbc1ed10c | |
bert-e | 7f641d2755 | |
bert-e | 348c80060e | |
bert-e | df91750c5a | |
bert-e | b81d24c3ef | |
bert-e | 1f2caf6a01 | |
Ronnie Smith | 1333195dcd | |
bert-e | f822c7bad9 | |
bert-e | b3ce76d7d8 | |
bert-e | c03c67d9fb | |
Ronnie Smith | 0f72b7c188 | |
Artem Bakalov | 18887d10b3 | |
Artem Bakalov | 07fd3451ab | |
Ronnie Smith | 223897bbff | |
Ronnie Smith | 473e241d5c | |
bert-e | e4d888c07b | |
bert-e | dece118ba9 | |
bert-e | ffe53ab72e | |
Will Toozs | a077cc199f | |
bert-e | b0cb6d9c0f | |
Nicolas Humbert | c13cff150f | |
Alexander Chan | e0da963226 | |
bert-e | 209f3bae44 | |
bert-e | e446f20223 | |
Guillaume Hivert | e311f0d83d | |
Guillaume Hivert | dab763884a | |
Guillaume Hivert | 4f22e526ee | |
Guillaume Hivert | dd0ca967c4 | |
Guillaume Hivert | 3951bb289c | |
Guillaume Hivert | 7b0bb25358 | |
Guillaume Hivert | b97de6505c | |
Guillaume Hivert | a5ad298c3b | |
Guillaume Hivert | 57ab049565 | |
bert-e | 6919af95f2 | |
bert-e | 6a5f0964ff | |
Guillaume Hivert | b94c13a115 | |
Guillaume Hivert | 666da6b1aa | |
Guillaume Hivert | 7192d4bc93 | |
Guillaume Hivert | 66043e5cd0 | |
Guillaume Hivert | bb2951be2c | |
Guillaume Hivert | 0d68de5ec4 | |
Guillaume Hivert | f4e43f2cc7 | |
Guillaume Hivert | b829b7662e | |
Will Toozs | e4be1d8d35 | |
bert-e | 1523f6baa6 | |
Guillaume Hivert | 941d3ba73d | |
bert-e | 9556d5cd61 | |
bert-e | c517e4531a | |
Guillaume Hivert | 1fc6c2db86 | |
Guillaume Hivert | c5949b547d | |
Kerkesni | 7bcb81985a | |
bert-e | 68ac02ad54 | |
Guillaume Hivert | 0d479c82c5 | |
Guillaume Hivert | f958ed3204 | |
Guillaume Hivert | 3fdd6b8e80 | |
Guillaume Hivert | 4193511d1b | |
Guillaume Hivert | 3bf00b14b8 | |
Guillaume Hivert | 7d4c22594f | |
Guillaume Hivert | 6f588c00d7 | |
Guillaume Hivert | 441630d57e | |
bert-e | 7d80db5d7f | |
Guillaume Hivert | 3946a01871 | |
bert-e | 34ef6d0434 | |
Jordi Bertran de Balanda | 6f36a85353 | |
bert-e | 0ce6a79961 | |
Guillaume Hivert | 5d4ed36096 | |
Kerkesni | 7477b881ed | |
Guillaume Hivert | 3874d16f42 | |
Guillaume Hivert | 282dc7afb3 | |
Guillaume Hivert | 617ec1f500 | |
Guillaume Hivert | 37157118af | |
Guillaume Hivert | 33bea4adb3 | |
Guillaume Hivert | a0b62a9948 | |
Guillaume Hivert | c7c2c7ffaa | |
Guillaume Hivert | 362b82326e | |
Guillaume Hivert | 38d462c833 | |
Guillaume Hivert | 7b73e34f9f | |
Guillaume Hivert | d88ad57032 | |
Guillaume Hivert | 800f79f125 | |
Guillaume Hivert | 522dfbc0db | |
Guillaume Hivert | 918ad4c7c2 | |
Guillaume Hivert | 2c8e611a15 | |
Guillaume Hivert | 0158fb0967 | |
Guillaume Hivert | fac5605a18 | |
Guillaume Hivert | fd33b9271b | |
bert-e | 72057b1efc | |
bert-e | 529840fa37 | |
bert-e | 0e7c47a7e9 | |
Guillaume Hivert | 0a5f7c4ea9 | |
bert-e | 0e4ac99d9d | |
KillianG | 218d21b819 | |
KillianG | 0b51a6a3f0 | |
bert-e | 9333323301 | |
bert-e | 67639f64d4 | |
bert-e | e5929b9f91 | |
bert-e | 8998544c06 | |
bert-e | 36fd21a3cd | |
KillianG | df33583aea | |
KillianG | 050d649db5 | |
bert-e | de81f65306 | |
Killian Gardahaut | 48fe6779bb | |
Killian Gardahaut | 6acc199eca | |
Killian Gardahaut | 6eff4565dd | |
KillianG | 8cc333e7f7 | |
KillianG | cbcaa97abb | |
bert-e | 5eaf67ac93 | |
KillianG | d18971bc6e | |
bert-e | 193a399ae2 | |
KillianG | f5bce507a5 | |
bert-e | 4de18e5b26 | |
Jordi Bertran de Balanda | c7e2743bf9 | |
Jordi Bertran de Balanda | a8029d8779 | |
Jordi Bertran de Balanda | ee49ec7d72 | |
bert-e | d639f4cffe | |
bert-e | 07e8d44406 | |
Guillaume Hivert | b2ec34c8f2 | |
KillianG | fb31f93829 | |
Guillaume Hivert | ab823b2797 | |
Guillaume Hivert | e7502c9ffd | |
Guillaume Hivert | 9de879ecc2 | |
Guillaume Hivert | 68ca9a6e94 | |
bert-e | 6c6ee31f34 | |
bert-e | 310834c237 | |
Kerkesni | 64351cf20d | |
KillianG | 118f6dc787 | |
Ronnie Smith | b58b4d0773 | |
Ronnie Smith | 3faf2433c7 | |
Ronnie Smith | 9a0915d40e | |
Ronnie Smith | 36d3a67a68 | |
Ronnie Smith | d3d2529719 | |
Ronnie Smith | 23b9cf6e21 | |
Ronnie Smith | 66910fb1a4 | |
Xin LI | 3d156a58dd | |
Xin LI | 7737ec4904 | |
Ronnie Smith | 24c82170d8 | |
Ronnie Smith | e26073ed6d | |
Ronnie Smith | 0088a2849f | |
Ronnie Smith | e902eb61db | |
Ronnie Smith | 4cfd78c955 | |
Ronnie Smith | cfee038a34 | |
Ronnie Smith | 06c2a0d90d | |
Ronnie Smith | 1e241bd79c | |
Ronnie Smith | 0d526df512 | |
Kerkesni | d18f4d10bd | |
Kerkesni | e0bc4383cd | |
bert-e | de17f221bf | |
Kerkesni | d46301b498 | |
Kerkesni | 0bb2a44912 | |
Guillaume Hivert | 2c1fb773fd | |
Guillaume Hivert | 961b5abe41 | |
Guillaume Hivert | d0527d1ac1 | |
Guillaume Hivert | 08cb0a8c1c | |
Guillaume Hivert | de0678d5bf | |
Guillaume Hivert | f619c0d33f | |
Guillaume Hivert | 7fea1d58a8 | |
Guillaume Hivert | db25abeb99 | |
Guillaume Hivert | e90e37c42f | |
Guillaume Hivert | 38bb284694 | |
Guillaume Hivert | a123b3d781 | |
Guillaume Hivert | 9b583b0541 | |
Xin.LI | 3528c24276 | |
Xin LI | 6d8294d0c0 | |
Xin LI | 23bfc17a26 | |
bert-e | 0f6a1f2982 | |
Nicolas Humbert | bff13f1190 | |
bert-e | c857e743c8 | |
bert-e | 27e06c51cc | |
Nicolas Humbert | 7d254a0556 | |
Kerkesni | 5f8edd35e9 | |
Kerkesni | 3c4359b696 | |
Kerkesni | 8ecf1d9808 | |
Kerkesni | 74e4934654 | |
Kerkesni | eac87fc9de | |
Kerkesni | e2be4d895d | |
bert-e | c0f7ebbaa9 | |
Kerkesni | 60fcedc251 | |
Kerkesni | 10ef395501 | |
Kerkesni | d1c8e67901 | |
Kerkesni | 266aabef37 | |
Kerkesni | b63c909808 | |
Kerkesni | 02ee339214 | |
Kerkesni | 5ca7f86350 | |
Kerkesni | 50a4fd8dc1 | |
bert-e | 5de0c2a7da | |
Kerkesni | b942516dca | |
Kerkesni | 54181af522 | |
Kerkesni | 21af204956 | |
Kerkesni | 68a27be345 | |
Kerkesni | 06350ffe15 | |
Taylor McKinnon | 5da4cd88ff | |
bert-e | 6bb68ee0e3 | |
Taylor McKinnon | 9a4bae40e6 | |
bert-e | 54e9635cab | |
Vianney Rancurel | b8f803338b | |
Guillaume Hivert | 4a1215adb5 | |
Guillaume Hivert | fc8d7532c6 | |
Guillaume Hivert | 1818bfe6c8 | |
Guillaume Hivert | 5cd929ea8a | |
Guillaume Hivert | 1138ce43af | |
Guillaume Hivert | 8b4e9cc0aa | |
Guillaume Hivert | ff6ea2a6d5 | |
Guillaume Hivert | 3b3600db92 | |
bert-e | 51c5247d01 | |
Vianney Rancurel | 7813a312b5 | |
Vianney Rancurel | 7b451242b6 | |
Vianney Rancurel | 5f8c92a0a2 | |
Thomas Carmet | 35a4552c0f | |
Vianney Rancurel | 0dbdff3a00 | |
bert-e | 80b91d724d | |
bert-e | 29bab6f1f1 | |
bert-e | 40843d4bed | |
Taylor McKinnon | ab8cad95d7 | |
bert-e | b3fd77d08f | |
Vianney Rancurel | 44f37bd156 | |
Taylor McKinnon | b855de50eb | |
Taylor McKinnon | ed6bc63e75 | |
Taylor McKinnon | 00602beadd | |
Taylor McKinnon | 6861ac477a | |
Rached Ben Mustapha | c95f84e887 | |
Rached Ben Mustapha | 4303cd8f5b | |
Rached Ben Mustapha | 0c73c952fa | |
Nicolas Humbert | 3c9ab1bb99 | |
Nicolas Humbert | 3c30adaf85 | |
Nicolas Humbert | 2f40ff3883 | |
Nicolas Humbert | 90d6556229 | |
bert-e | 98edeae3f2 | |
bert-e | d813842f89 | |
bert-e | f7802650ee | |
bert-e | 4f15e4f267 | |
bert-e | f28783e616 | |
Nicolas Humbert | d0684396b6 | |
Xin LI | 68c5b42e6f | |
Xin LI | 6933bb8422 | |
Xin LI | 7e180fcad8 | |
Naren | 41d482cf7d | |
Nicolas Humbert | 1e334924f9 | |
Naren | 49239cc76e | |
bert-e | 4dc39e37b2 | |
Naren | 9b9a8660d9 | |
williamlardier | 8d17fcac0f | |
williamlardier | 1c3fcc5a65 | |
Ronnie Smith | f5b0f1e082 | |
Ronnie Smith | 10f0a934b0 | |
Ronnie Smith | 8c3f304d9b | |
williamlardier | 708aab707d | |
williamlardier | 3a1cbdeedb | |
bert-e | 38705d1962 | |
bert-e | faf5701248 | |
Ronnie Smith | efb3629eb0 | |
Ronnie Smith | 4cbb5a5dd6 | |
bert-e | e8084d4ab9 | |
bert-e | 22eca9b61c | |
Ronnie Smith | 6733d30439 | |
Naren | 59a679831b | |
Naren | 8b1846647b | |
bert-e | 26da124e27 | |
bert-e | d5dad4734f | |
bert-e | 47b121c17b | |
bert-e | e7869d832e | |
bert-e | a1e14fccb1 | |
Ronnie Smith | c605c1e1a2 | |
bert-e | 994bd0a6be | |
Naren | f0981e2c57 | |
Ronnie Smith | 1e2a6c387e | |
Ronnie Smith | 1348fc820f | |
Ronnie Smith | 79a363786f | |
bert-e | 86e3c02126 | |
bert-e | 0c17c748fe | |
bert-e | 030f47a88a | |
bert-e | 8f6731aa6a | |
bert-e | 9c185007a2 | |
Artem Bakalov | ea2f8ebd01 | |
Artem Bakalov | b640bbb45e | |
Taylor McKinnon | d9fcf275ce | |
Taylor McKinnon | d7a4bef3b3 | |
Taylor McKinnon | fc7711cca2 | |
Ronnie Smith | 66b03695c3 | |
Rahul Padigela | 3575e651e3 | |
Rahul Padigela | fa19a34306 | |
Ronnie Smith | 79699324d9 | |
Ronnie Smith | 3919808d14 | |
Xin LI | 3ab7ef4e8d | |
Xin LI | e531d3eae1 | |
Nicolas Humbert | 9ebcc9690e | |
Nicolas Humbert | 95759509cb | |
Dimitri Bourreau | b1dea67eef | |
Dimitri Bourreau | c3196181c1 | |
Dimitri Bourreau | c24ad4f887 | |
Dimitri Bourreau | ad1c623c80 | |
Dimitri Bourreau | 9d81cad0aa | |
Dimitri Bourreau | 5f72738b7f | |
Dimitri Bourreau | 70278f86ab | |
Dimitri Bourreau | 083dd7454a | |
williamlardier | 6cdae52d57 | |
williamlardier | 995cb59db4 | |
Alexander Chan | 385e34b472 | |
Alexander Chan | 8aa0f9d030 | |
Jonathan Gramain | f102c5ec8c | |
Jonathan Gramain | 3b0ea3d7a1 | |
Jonathan Gramain | 5ce057a498 | |
Jonathan Gramain | 8c3f88e233 | |
bert-e | e912617f02 | |
williamlardier | 3abde0bc74 | |
bert-e | cf49c7d8bf | |
Alexander Chan | e6e49a70c9 | |
Rached Ben Mustapha | 77f971957b | |
Ronnie Smith | ed1d6c12c2 | |
williamlardier | 27f17f9535 | |
williamlardier | 4658651593 | |
Jonathan Gramain | 7af6a73b3b | |
Jonathan Gramain | 8c2db870c7 | |
bert-e | 8728ff5c80 | |
Ronnie Smith | 7c16652e57 | |
Jonathan Gramain | 04581abbf6 | |
Jonathan Gramain | abfbe90a57 | |
bert-e | 67e5cc770d | |
bert-e | 5a9d667936 | |
Jonathan Gramain | b1c9474159 | |
Ilke | 8e8d771a64 | |
Rahul Padigela | 29dd069a5f | |
Rahul Padigela | f1793bfe51 | |
Rahul Padigela | 07a110ff86 | |
Rahul Padigela | c696f9a38b | |
Rahul Padigela | f941132c8a | |
Rahul Padigela | b42f1d3943 | |
bert-e | c0825231e9 | |
Rahul Padigela | 2246a9fbdc | |
Rahul Padigela | 86270d8495 | |
Naren | c27b359fba | |
Alexandre Lavigne | bb8bdbc6ea | |
Nicolas Humbert | 413f0c9433 | |
Nicolas Humbert | ab3fa2f13d | |
Naren | bfbda5d38b | |
Naren | 2e6b1791bb | |
Naren | 1f8cfecf43 | |
Alexandre Lavigne | 6a250feea9 | |
Thomas Carmet | 0a33d4b74e | |
Thomas Carmet | 9a544b9890 | |
Thomas Carmet | e52330b935 | |
Thomas Carmet | 4b08dd5263 | |
Ronnie Smith | a2b6846e2e | |
Ronnie Smith | 3fdfc7196b | |
Ronnie Smith | f602fb9601 | |
Thomas Carmet | c237a25448 | |
Thomas Carmet | ce7bba1f8d | |
Thomas Carmet | 5aaec6a4e6 | |
Thomas Carmet | 46338119b6 | |
Thomas Carmet | 36f6ca47e9 | |
Thomas Carmet | 11278e7334 | |
bert-e | c0fe2efbc2 | |
bert-e | cd50d46162 | |
Jonathan Gramain | 016107500f | |
Jonathan Gramain | b0633d8a13 | |
Jonathan Gramain | 04ebaa8d8f | |
Jonathan Gramain | c495ecacb0 | |
bert-e | b27caf5814 | |
bert-e | 3f702c29cd | |
anurag4DSB | 8603ca5b99 | |
bert-e | f5f6cb5692 | |
bert-e | 7b4e65eaf1 | |
anurag4DSB | f101a0f3a0 | |
bert-e | 87ba4a7b4a | |
bert-e | e0b95fe931 | |
bert-e | 9ff605f875 | |
naren-scality | db7d8b0b45 | |
Thomas Carmet | 4e160db87d | |
bert-e | 46d3a1e53c | |
Thomas Carmet | ef6197250c | |
bert-e | dc698f4d5c | |
Jonathan Gramain | 9aa8710a57 | |
bert-e | 8c7907f753 | |
Ronnie Smith | 735c6f2fb5 | |
bert-e | 395a881d92 | |
bert-e | 942c6c2a1e | |
Ronnie Smith | 836c65e91e | |
bert-e | 3d6306d2a3 | |
bert-e | 4a6b69247b | |
bert-e | 681740fbe7 | |
Gregoire Doumergue | 66a48f44da | |
Gregoire Doumergue | fa3ec78e25 | |
Alexander Chan | d381ec14d8 | |
Alexander Chan | 112cee9118 | |
bert-e | 0bdcd866bc | |
Jonathan Gramain | 6fdfbcb223 | |
Jonathan Gramain | c41f1ca4b3 | |
Jonathan Gramain | 856a1634d4 | |
Jonathan Gramain | 888273bb2f | |
Jonathan Gramain | 2921864aac | |
bert-e | 4665f3da5c | |
Jonathan Gramain | 0df0d952d2 | |
Jonathan Gramain | 1978405fb9 | |
Jonathan Gramain | d019076854 | |
bert-e | 54eb3ede5f | |
Gregoire Doumergue | 8f4453862d | |
bert-e | be4dea481d | |
Gregoire Doumergue | 3e1d8c8ed7 | |
Rached Ben Mustapha | d15e2d5df6 | |
Rached Ben Mustapha | a41d4db1c4 | |
Rached Ben Mustapha | 00d9c9af0c | |
Taylor McKinnon | 93503cf505 | |
bert-e | 0f63de2f05 | |
Rahul Padigela | 7aafd05b74 | |
bert-e | 16a5e6a550 | |
bert-e | 5540afa194 | |
Rached Ben Mustapha | 864d2e8a28 | |
Rached Ben Mustapha | 6b9e7fc11f | |
Nicolas Humbert | 058455061d | |
vrancurel | 15703aafca | |
vrancurel | d1e4c8dbb9 | |
bert-e | db000bc5e1 | |
bert-e | e87198f7ba | |
vrancurel | 06c35c15a5 | |
vrancurel | a7bfedfa2b | |
bert-e | 2794fe0636 | |
bert-e | 68c8189f53 | |
bert-e | 041731e6eb | |
Jonathan Gramain | 6347358cc2 | |
Nicolas Humbert | d51361ce06 | |
Nicolas Humbert | 453fd8b722 | |
Nicolas Humbert | 739f0a709c | |
bert-e | ffbe46edfb | |
bert-e | ea6e0c464b | |
bert-e | 2621aa7e53 | |
bert-e | 4948e3a75e | |
bert-e | b4aeab77b9 | |
Ronnie Smith | 3ed07317e5 | |
philipyoo | 13f8d796b4 | |
Bennett Buchanan | 9bdc330e9b | |
bert-e | e1a3b05330 | |
bert-e | bcb6836a23 | |
bert-e | 0151504158 | |
Taylor McKinnon | cd15540cb9 | |
bert-e | 048e8b02bc | |
Ilke | fe264673e1 | |
bert-e | 1d899efec8 | |
bert-e | e022fc9b99 | |
bert-e | 0487a18623 | |
Taylor McKinnon | 4cb8f715e9 | |
Taylor McKinnon | 5e1fe450f6 | |
bert-e | 580dda4d48 | |
bert-e | 8a1987ba69 | |
Taylor McKinnon | a4ccb94978 | |
bert-e | a17054e3a4 | |
bert-e | fa47c5045b | |
Ronnie Smith | 3098fcf1e1 | |
bert-e | a8df2b7b96 | |
bert-e | cd9949cb11 | |
Ronnie Smith | 41b3babc69 | |
Taylor McKinnon | d572fc953b | |
Taylor McKinnon | 990987bb6a | |
Alexander Chan | 2a78d4f413 | |
Alexander Chan | d2c7165214 | |
bert-e | 1999a586fd | |
Taylor McKinnon | faab2347f9 | |
bert-e | a1c0dd2472 | |
bert-e | 9a2b01c92e | |
bert-e | 403d9b5a08 | |
bert-e | a22032f9a5 | |
Taylor McKinnon | 71c1c01b35 | |
bert-e | dd38e32797 | |
naren-scality | 941b644e9e | |
bert-e | 274bf80720 | |
bert-e | 7a92327da2 | |
Jonathan Gramain | ecaf9f843a | |
Jonathan Gramain | 3506fd9f4e | |
Ronnie Smith | 25bd1f6111 | |
bert-e | bf4c40dfb8 | |
Ronnie Smith | d533bc4e0f | |
Jonathan Gramain | 2d41b034aa | |
Jonathan Gramain | 4aa5071a0d | |
Jonathan Gramain | c6976e996e | |
Ronnie Smith | 1584c4acb1 | |
Rached Ben Mustapha | bb8ec629bf | |
dependabot[bot] | f1345ec2ed | |
Rached Ben Mustapha | 4bbaa83b87 | |
bert-e | 58697f7915 | |
vrancurel | 147946747c | |
Ronnie Smith | bf4a6fe01b | |
alexandre merle | c703ba66e7 | |
alexandre merle | 20c77f9f85 | |
alexandre merle | edb27cc9a8 | |
alexandre merle | 79e0dfa38f | |
alexandre merle | e1118803e6 | |
bert-e | 1230e72c49 | |
bert-e | 6eacd79f07 | |
alexandre merle | f17006b91e | |
bert-e | 372df634c4 | |
alexandre merle | 65966f5ddf | |
bert-e | 2b96888eb7 | |
bert-e | f6223d1472 | |
alexandre merle | b3080e9ac6 | |
bert-e | a0909885f1 | |
bert-e | 7d58ca38ce | |
alexandre merle | 9484366844 | |
alexandre merle | 5d100645aa | |
alexandre merle | b8bef65f00 | |
alexandre merle | 7358bd10f8 | |
bert-e | 356edf8478 | |
bert-e | 26a00babb4 | |
bert-e | 1cfb869631 | |
Dora Korpar | 03521ac8ce | |
bert-e | 0403ca65fc | |
bert-e | f2bf36a2eb | |
Ilke | 38f851e30e | |
Rahul Padigela | 269e005198 | |
bert-e | c84d41c06f | |
bert-e | 10627f51d1 | |
Rahul Padigela | 1ee4a610fc | |
bert-e | aa5f714081 | |
Dora Korpar | 38cc5d65d1 | |
Dora Korpar | 8dfe60a1a7 | |
Dora Korpar | c08a6f69e0 | |
Jonathan Gramain | d27c0577ee | |
Jonathan Gramain | ff539645ea | |
Jonathan Gramain | e5c3bb188a | |
Jonathan Gramain | 2461b5c2f7 | |
Jonathan Gramain | 747307cac2 | |
Jonathan Gramain | 5942d9d70c | |
bert-e | 8ed84786fc | |
Jonathan Gramain | ed446c569c | |
Jonathan Gramain | 918a1d7c89 | |
Jonathan Gramain | 15140cd6bb | |
bert-e | 1e40e76bb2 | |
bert-e | af92067069 | |
bert-e | f4058dd6ef | |
bert-e | 2ec26f23b0 | |
Jonathan Gramain | 0d328d18d1 | |
bert-e | 04f7692bad | |
Anurag Mittal | edbb4770bf | |
bert-e | 32752ac504 | |
Dora Korpar | 096407487b | |
vrancurel | 549f187893 | |
bert-e | 93cd582e3a | |
Dora Korpar | 2d28231e97 | |
vrancurel | 2582108f97 | |
bert-e | b25867f9c2 | |
bert-e | 7b60166d08 | |
Anurag Mittal | 2e1f689344 | |
bert-e | 8887a67261 | |
Dora Korpar | 236c72d2df | |
Ronnie Smith | 437ecc57f9 | |
bert-e | 759f0ef949 | |
Dora Korpar | aa9c9e54ff | |
bert-e | 0014aa3467 | |
Dora Korpar | 775f380a6c | |
Dora Korpar | 1727f4bd3f | |
Dora Korpar | d71c8eac86 | |
Dora Korpar | 645902ac42 | |
bert-e | 7eb6304956 | |
Dora Korpar | 3d219c208d | |
bert-e | ce98e9d104 | |
bert-e | fb08fa36fc | |
bert-e | 36d932bbce | |
Dora Korpar | 694553c752 | |
bert-e | 7f2c40cf6d | |
Dora Korpar | 6fff00d088 | |
bert-e | 6a78af0f39 | |
bert-e | f73dc3dd68 | |
bert-e | 18aa07f49e | |
bert-e | 459839cb8a | |
bert-e | 5c7664e5d2 | |
Jonathan Gramain | 8ec0611d08 | |
Jonathan Gramain | 6baca6f1e2 | |
bert-e | 718c8ba461 | |
bert-e | 899415dce9 | |
bert-e | 3dac99da94 | |
Jonathan Gramain | e6180b769a | |
Jonathan Gramain | 35f43b880e | |
bert-e | 78d62636c3 | |
bert-e | f295bcafa5 | |
bert-e | 580e25a9e8 | |
Ilke | e6622dfdce | |
Ilke | 91bb3ea291 | |
Dora Korpar | 9b8f813d02 | |
Dora Korpar | 0f70366774 | |
bert-e | 478904116f | |
bert-e | 9048f31618 | |
Dora Korpar | b5853078c6 | |
dependabot[bot] | ffc632034d | |
bert-e | fb8cf65091 | |
bert-e | fa8f705452 | |
bert-e | e12e0a3a5c | |
bert-e | efdffd6b99 | |
Jonathan Gramain | 7792f7c603 | |
bert-e | 31f92ebcef | |
Jonathan Gramain | 438001cf60 | |
bert-e | 32fc05e04b | |
Jonathan Gramain | 9ded1d2051 | |
bert-e | 668d90b7d0 | |
bert-e | 9f90e1ea26 | |
bert-e | 86ed244d7a | |
dependabot[bot] | 310599249d | |
bert-e | c1cfc59a0e | |
bert-e | f8888b9338 | |
bert-e | f9dafb1f6b | |
bert-e | 1073bac469 | |
bert-e | f956b02387 | |
bert-e | e8e9e00f11 | |
bert-e | 89b950a7e8 | |
Jonathan Gramain | 86bca2502e | |
bert-e | de50c62825 | |
Jonathan Gramain | 6fb57f3271 | |
dependabot[bot] | 2943a1ebe8 | |
dependabot[bot] | 88c133b90a | |
bert-e | 3aa49eed1d | |
bert-e | d6bf1ab748 | |
bert-e | 5a50da6d90 | |
bert-e | e93af8ad45 | |
Jonathan Gramain | a9c3b2218f | |
Jonathan Gramain | f459498e18 | |
bert-e | 248ea9cea5 | |
bert-e | 1a00552657 | |
dependabot[bot] | 64390da174 | |
bert-e | 55323aa7a2 | |
bert-e | a20e875908 | |
bert-e | b95b8b6cd3 | |
bert-e | 1e377c8801 | |
bert-e | c5055d4e72 | |
bert-e | a3a83f5ec8 | |
bert-e | 51d3312de8 | |
bert-e | 5e39c4c2c8 | |
bert-e | 60fe8f09cc | |
bert-e | 7b4a295d8a | |
Ilke | 6383d14d49 | |
bert-e | 1e47b00568 | |
bert-e | c0aee417f9 | |
Ilke | 55b6ceadab | |
Ilke | 321bb400d3 | |
Jonathan Gramain | 0e4035d45b | |
Jonathan Gramain | a18285ced8 | |
bert-e | ea3c09957d | |
Jonathan Gramain | 53a49c3747 | |
Rahul Padigela | dc4e1829fc | |
bert-e | eab66494cf | |
bert-e | 01e9b7c80e | |
dependabot[bot] | 60751e1363 | |
bert-e | ff4afb6c0f | |
bert-e | 76498cf31c | |
dependabot[bot] | 58b44556f6 | |
bert-e | ef87129383 | |
bert-e | 003b4cfd27 | |
dependabot[bot] | 2aa4a9b5aa | |
dependabot[bot] | 59cc006882 | |
bert-e | 3b438e03cd | |
bert-e | db793c6e07 | |
bert-e | b4763b541e | |
Rahul Padigela | 82b6017180 | |
bert-e | f2787ec013 | |
bert-e | 8cd5b714c0 | |
bert-e | 6f5614e461 | |
bert-e | 560ccef3ec | |
bert-e | 4617d66cb8 | |
Dora Korpar | 3f4ed31153 | |
Rahul Padigela | b2c054e7c7 | |
Dora Korpar | 9716781cbe | |
Jonathan Gramain | fc23f68d0f | |
bert-e | a61c1914d6 | |
Jonathan Gramain | 3d064b9003 | |
bert-e | 2a4da20c0a | |
naren-scality | 1f5d33f006 | |
bert-e | 14c4696482 | |
bert-e | 6c53c023b8 | |
bert-e | 65065dd4e3 | |
bert-e | 275226278f | |
bert-e | 3f82448a67 | |
bert-e | b4b5712df7 | |
bert-e | 6530f0ace4 | |
Anurag Mittal | 2b23c0d559 | |
bert-e | 750c021c37 | |
Dora Korpar | 16c4464864 | |
Dora Korpar | 41c2ebcd61 | |
bert-e | ee4d94c0fb | |
Dora Korpar | 48eeb1bc72 | |
bert-e | 98f1d219a9 | |
Dora Korpar | b77199b085 | |
Dora Korpar | fb363030c0 | |
Dora Korpar | 7aeb32e223 | |
Dora Korpar | 9b82caf129 | |
bert-e | 5bdee7eb8a | |
Jonathan Gramain | 709d1e3884 | |
Ilke | 9c12ff241e | |
bert-e | b8fd646097 | |
bert-e | 2125465761 | |
bert-e | a9d6e05c6e | |
Ilke | b98c4b6dfd | |
Ilke | dc412e8953 | |
Ilke | d06989a149 | |
Jonathan Gramain | 5f66ee992a | |
bert-e | 36b68be051 | |
bert-e | 0d49eff7e4 | |
bert-e | badaa8599b | |
bert-e | 3f19a00b32 | |
Ilke | 5d78367d1c | |
bert-e | ea8166cf7a | |
bert-e | cd9bdcfa61 | |
bert-e | b30da5ca67 | |
bert-e | c06f735e82 | |
bert-e | d699f78f91 | |
bert-e | b8c4ae4203 | |
Dora Korpar | 0cf9a9cdd5 | |
bert-e | d201e572fd | |
bert-e | 53cc766032 | |
bert-e | 400dc24281 | |
bert-e | a82f9a2b70 | |
bert-e | f59cea6b34 | |
bert-e | d0367eb6d0 | |
bert-e | 9cac91c413 | |
bert-e | f19feb949d | |
Jonathan Gramain | d66d9245b9 | |
Jonathan Gramain | fb89b4e683 | |
Jonathan Gramain | 1bda8559bc | |
Jonathan Gramain | 19dc603fe3 | |
Jonathan Gramain | bbef1964d7 | |
bert-e | 6c62091622 | |
Jonathan Gramain | cf4d90877f | |
Jonathan Gramain | bf43c8498d | |
bert-e | 43cd5f59b0 | |
Dora Korpar | ef4a2dc077 | |
bert-e | dd7390ade6 | |
bert-e | 5dff968096 | |
Dora Korpar | a3739cc836 | |
bert-e | 97682f56bf | |
bert-e | 2676b8384b | |
bert-e | ce4ca533e2 | |
Jonathan Gramain | 7a8437c30e | |
bert-e | 4544239269 | |
bert-e | 26bff09887 | |
Jonathan Gramain | 4c3b4d1012 | |
Pepijn Van Eeckhoudt | f6165146ec | |
Dora Korpar | cc5b5e1971 | |
Ilke | 9f580444f3 | |
Ilke | 93fe6fa94d | |
Ilke | f988270a0c | |
Jonathan Gramain | d9ff2c2060 | |
bert-e | 2b9ac57230 | |
bert-e | 336e42a9e0 | |
Jonathan Gramain | bbfc32e67e | |
bert-e | e553342616 | |
bert-e | fc0123ea5e | |
bert-e | 4d54b49c03 | |
Ronnie Smith | 5f6dda1aa1 | |
Ilke | 8a9dbc4de7 | |
Jonathan Gramain | 81d05b6ea8 | |
Ilke | 65e92ebd92 | |
bert-e | 44b8de565f | |
Ilke | d350f3db82 | |
vrancurel | 3ed66c50f6 | |
bert-e | 90e1cff9f9 | |
Jonathan Gramain | 9f323b32ea | |
bert-e | c848d1f13d | |
bert-e | dee53c8ad8 | |
bert-e | eeb3ba970c | |
bert-e | 9680071e1a | |
bert-e | c322c3b887 | |
Anurag Mittal | 2c892835cb | |
bert-e | 6dd3aa92a4 | |
bert-e | 04b063da70 | |
Dora Korpar | 3d0c3bea2e | |
bert-e | a9618bc0bb | |
bert-e | 0d4efa67eb | |
bert-e | b6042035c0 | |
bert-e | 3068ce38a0 | |
Anurag Mittal | e1e2a4964a | |
bert-e | d2fafe8ef3 | |
bert-e | 030a3f33f1 | |
Taylor McKinnon | ed1cc0f1bf | |
bert-e | fb18cba367 | |
Taylor McKinnon | 80d231a3fa | |
bert-e | bab9d5dc24 | |
bert-e | 2940500db6 | |
Dora Korpar | 0008b7989f | |
Alexander Chan | e531e5e711 | |
bert-e | f54d356669 | |
Jonathan Gramain | c1bb2ac058 | |
Jonathan Gramain | d76eeeea89 | |
bert-e | 7aedc5f1f7 | |
Jonathan Gramain | d03f2d9ed8 | |
Alexander Chan | ad58f66981 | |
bert-e | 85b5599ce2 | |
Dora Korpar | 3121d29140 | |
Jonathan Gramain | a75db3122f | |
bert-e | d994e2ae60 | |
Rached Ben Mustapha | c443793968 | |
Rached Ben Mustapha | 517a034291 | |
Rached Ben Mustapha | cc6671f37c | |
Rached Ben Mustapha | 87bb3126a3 | |
bert-e | b99577eaeb | |
bert-e | cedd08686a | |
naren-scality | eb9559cb18 | |
bert-e | 635d2fe6d9 | |
bert-e | 7f63022caa | |
naren-scality | a7b6fc8fb8 | |
Jianqin Wang | 9557e36438 | |
bert-e | 2bb0e171d8 | |
Dora Korpar | 61d779083f | |
bert-e | 68f5d3c9f2 | |
Dora Korpar | b0e56d64cd | |
vrancurel | 71caf08c19 | |
Guillaume Gimenez | 38403b84aa | |
Jianqin Wang | 21610dd88d | |
bbuchanan9 | 7566d1f0a9 | |
bbuchanan9 | 28415a5c9b | |
Taylor McKinnon | 506a9ad37d | |
bert-e | 1c6e56e8ef | |
bbuchanan9 | 9d02f86cf5 | |
bert-e | 5c4547a3a9 | |
Dora Korpar | 12ad2d9423 | |
bbuchanan9 | 5de85713ef | |
Rahul Padigela | 68defde532 | |
Dora Korpar | 9e5d4ae95b | |
Dora Korpar | 633ce2c069 | |
Dora Korpar | 32c895b21a | |
Dora Korpar | 08ddc07d1c | |
Dora Korpar | 006f77dd28 | |
Katherine Laue | bc6c9c8c36 | |
bert-e | 3dc9b958f7 | |
bert-e | c789d38df0 | |
Katherine Laue | f8bf038b81 | |
vrancurel | 4b5c0ff923 | |
vrancurel | 62536f66df | |
bert-e | 9032b89e6f | |
Dora Korpar | 3b705a9434 | |
vrancurel | 9014761c70 | |
bert-e | 8d9864264d | |
bert-e | 6c7de4124d | |
Katherine Laue | ae626b22ce | |
Rahul Padigela | 839182292c | |
Rahul Padigela | a197b2b6a4 | |
bert-e | 59803d7b67 | |
Rahul Padigela | 1d4bb01e1e | |
Katherine Laue | 0e2a79cad3 | |
bert-e | adf6cfc8e4 | |
bert-e | 40aa7d836f | |
bert-e | 98737a69ba | |
Rahul Padigela | ce08806aea | |
bert-e | 4fa15fce2a | |
bert-e | 279f08c870 | |
Dora Korpar | 94653a14c4 | |
anurag4dsb | 05a8475f1c | |
anurag4dsb | 8c664d9076 | |
bert-e | 0f53c78ccd | |
anurag4dsb | 470f38f7f9 | |
Jianqin Wang | 77172f33f8 | |
Guillaume Gimenez | 0a0fe7f1da | |
Salim | 6d7437a776 | |
bert-e | 1a6174dadf | |
vrancurel | c57cde88bb | |
Rahul Padigela | 6e97c01edd | |
Rahul Padigela | dd6fde61bb | |
bert-e | b03f5b80ac | |
Rahul Padigela | 9f2e74ec69 | |
Rahul Padigela | 9894b88e5f | |
Rahul Padigela | 54f6a7aa42 | |
Benoit A | 3e8c43e05b | |
Nicolas Humbert | 633efcbc50 | |
Alexander Chan | d99b430ac4 | |
philipyoo | 8f71d4ff03 | |
Rahul Padigela | d0f77cee75 | |
bert-e | 4419db7b23 | |
Rahul Padigela | 3672df0fc4 | |
Dora Korpar | 9b223bea87 | |
Guillaume Gimenez | b7dfc3a9c0 | |
Dora Korpar | 787f66458f | |
Dora Korpar | 618b179d5c | |
bert-e | e6ddad1193 | |
bert-e | 6575be0050 | |
bert-e | 933dc1da17 | |
Rahul Padigela | 30ccf9a398 | |
Jianqin Wang | 1f7263c320 | |
Jianqin Wang | 9da1a8e1f7 | |
Jianqin Wang | 14f8690a9a | |
Jianqin Wang | 700cb4eb48 | |
bert-e | ae8dd1bb0e | |
Jianqin Wang | bfb4a3034a | |
philipyoo | 7dd4dca7e5 | |
bert-e | a5d248000e | |
Taylor McKinnon | dae12b245b | |
bert-e | c0129eb0d7 | |
philipyoo | bd0d6c1942 | |
Jonathan Gramain | ed2d393e98 | |
bert-e | 886110138a | |
Jonathan Gramain | 397eecb370 | |
bert-e | 3623b992da | |
Jonathan Gramain | 78b64bebed | |
Dora Korpar | e857bb5f5a | |
Benoit A | 9c1dab1055 | |
bert-e | e18850911e | |
Jonathan Gramain | 2ff9cf866d | |
bbuchanan9 | cc6ed165dd | |
Dora Korpar | a6b5c21e5d | |
bbuchanan9 | 64426b1450 | |
bert-e | 160fe96b18 | |
Taylor McKinnon | 59290513e3 | |
Rahul Padigela | 6b9be35d8e | |
bbuchanan9 | dffcbefe9b | |
bbuchanan9 | c470cfb5b1 | |
philipyoo | abcff1b04e | |
bbuchanan9 | 6791d1b561 | |
bert-e | a8e0a30918 | |
philipyoo | 487fe8bf35 | |
bert-e | b7c84ef7d3 | |
bert-e | b55295818f | |
Guillaume Gimenez | c6e06cc235 | |
philipyoo | 0213bcfd25 | |
bert-e | 32b0946679 | |
JianqinWang | bef886d8ad | |
philipyoo | d44c2f123e | |
bert-e | f199d52c54 | |
bert-e | c30250539f | |
bert-e | b9c419dde7 | |
bert-e | 57c971ef0f | |
bert-e | 0eaae2bb2a | |
bert-e | 5cf3948ba2 | |
bert-e | 226088c8fb | |
bert-e | d8320da1bb | |
bert-e | 436cb5109a | |
Rahul Padigela | 7a60ad9c21 | |
Rahul Padigela | bca10414bc | |
bert-e | 8f0cab8d91 | |
bert-e | d5d6243c01 | |
Rahul Padigela | 53d0ad38b8 | |
Jonathan Gramain | 40c234bb5f | |
bert-e | 26e2b5e425 | |
bert-e | df5a61cb8d | |
bert-e | b01a390c46 | |
Guillaume Gimenez | 87103f83e1 | |
Guillaume Gimenez | 7fb16cbca6 | |
Guillaume Gimenez | 2a8a5dcb94 | |
Guillaume Gimenez | ff5d62f7de | |
bert-e | 9ba5d64cd2 | |
bert-e | f4d4c9b76e | |
bert-e | 97035596e1 | |
bert-e | 2c149ea9b1 | |
philipyoo | 735ad74bda | |
bert-e | 1636c87556 | |
bert-e | 8e2d6d42a8 | |
bert-e | f11d6e223d | |
bert-e | 8c19dcdc7c | |
Jonathan Gramain | 63d4e3c3f5 | |
philipyoo | ebe2d1f24d | |
bert-e | 6a1bc69336 | |
bert-e | 0144158a37 | |
Guillaume Gimenez | cae763669b | |
bert-e | aea19c9cc2 | |
bert-e | daaeb5637a | |
Guillaume Gimenez | b3598c5d0e | |
Dora Korpar | c479933448 | |
JianqinWang | f804aa9657 | |
Jonathan Gramain | ad35b9ec78 | |
Jonathan Gramain | 9fe0ba5c8c | |
bert-e | 2fe1e4da3c | |
bert-e | ac365eef18 | |
bert-e | 6a4784417f | |
bert-e | 0ed8c750c9 | |
bert-e | 0d33e5a69f | |
Guillaume Gimenez | f7aa22f9a6 | |
bert-e | ac470f4233 | |
bert-e | 23d406dc81 | |
Jonathan Gramain | 15b0d05493 | |
David Pineau | 9f544b2409 | |
David Pineau | fcdbff62cc | |
JianqinWang | f11ccbfefa | |
bert-e | c8c0527f65 | |
JianqinWang | d81d309420 | |
Dora Korpar | c657b4b469 | |
Dora Korpar | 65c99ff86d | |
Jonathan Gramain | 645433ed0c | |
JianqinWang | f9bb82ce43 | |
bert-e | ab4500b842 | |
bert-e | 40a802b715 | |
anurag4dsb | 6edf027459 | |
Giacomo Guiulfo | 84bf7bd511 | |
Giacomo Guiulfo | b5fa54ec11 | |
Bennett Buchanan | 58e9f26ae0 | |
Giacomo Guiulfo | d6fdd153aa | |
Giacomo Guiulfo | 1e05f0f54e | |
Giacomo Guiulfo | 9c66b7ceba | |
bert-e | 0555d0b41a | |
Guillaume Gimenez | 39f2a53beb | |
Bennett Buchanan | 0a75792ca6 | |
bert-e | 5225fc231d | |
Guillaume Gimenez | 30c3ce1e2b | |
Taylor McKinnon | aa157c6d13 | |
Bennett Buchanan | 699890d2d7 | |
Jonathan Gramain | ea1a7d4d87 | |
bert-e | a9297e707a | |
Bennett Buchanan | 75dccc528d | |
bert-e | 5d7cf78eda | |
Giacomo Guiulfo | 0a364fe379 | |
Rahul Padigela | 345031f2bd | |
greenkeeper[bot] | 0bc1fe1a71 | |
greenkeeper[bot] | f23e457b83 | |
greenkeeper[bot] | 09aca2dcf4 | |
greenkeeper[bot] | d304334e92 | |
greenkeeper[bot] | 7955b97810 | |
Rahul Padigela | d14cef843b | |
Dora Korpar | f2b39fb3d7 | |
Dora Korpar | 9a009746be | |
Dora Korpar | 90476ea9fd | |
Dora Korpar | b28a9fcec9 | |
Jeremy Desanlis | 3e08bad2da | |
philipyoo | 13b156b226 | |
JianqinWang | 07f655c2f8 | |
JianqinWang | f496cec8bf | |
bert-e | 7f5413699d | |
Jonathan Gramain | d620fef517 | |
Jonathan Gramain | 8ac3cf5548 | |
Giacomo Guiulfo | ebd9a74666 | |
bert-e | a1f9bef60e | |
philipyoo | 899107913c | |
Jonathan Gramain | 18dfc6b4fa | |
Bennett Buchanan | 89873b4c02 | |
Rahul Padigela | 9fe16c64fa | |
Rahul Padigela | 879823c428 | |
vrancurel | 3dee6e2d0b | |
vrancurel | 3545eb4d62 | |
Dora Korpar | 0a85eeb8b7 | |
Dora Korpar | 83759870f2 | |
Dora Korpar | 0604c9daff | |
Alexander Chan | 0d4bf3c17f | |
Alexander Chan | 0117b39dcf | |
Bennett Buchanan | 549ca1f683 | |
bert-e | e4a66343fb | |
philipyoo | a89fdde6fd | |
philipyoo | 872a2d88e5 | |
philipyoo | 0c9c462634 | |
philipyoo | a3973ac7d3 | |
bert-e | d1a8693fe5 | |
Jeremy Desanlis | 5687a48599 | |
Nicolas Humbert | 9dca871e1b | |
philipyoo | 7088812c80 | |
philipyoo | 9f742d4921 | |
bert-e | 2c31728905 | |
Bennett Buchanan | 125ccbbfa9 | |
bert-e | 40c8b37b30 | |
bert-e | 879075e4ec | |
philipyoo | 79ed68ce9f | |
Jeremy Desanlis | 7290208a20 | |
bert-e | cbfacb5ec0 | |
Jeremy Desanlis | eb2aef6064 | |
philipyoo | 06dfdd9612 | |
philipyoo | bf95506495 | |
Alexander Chan | db743f8269 | |
Alexander Chan | a2311bb69c | |
Alexander Chan | c8f323237f | |
Rahul Padigela | 5cf55fcb68 | |
Rahul Padigela | de94a0e62e | |
Rahul Padigela | 2b13994795 | |
Rahul Padigela | 769a461178 | |
Rahul Padigela | c11fc1d9d8 | |
bert-e | b8ad86a1f1 | |
Giacomo Guiulfo | 12c4df722b | |
bert-e | f566e32322 | |
philipyoo | 6413c92fbc | |
bert-e | 29182cce05 | |
Jonathan Gramain | 9fb5b8b10d | |
vrancurel | 5631a892c6 | |
Rahul Padigela | dfcdea46fc | |
Rahul Padigela | be02e59bfe | |
Rahul Padigela | fdbeed1c4e | |
bert-e | 91fbc3fd23 | |
philipyoo | 241338bcfa | |
Rached Ben Mustapha | 6db80e9411 | |
bert-e | d701352635 | |
Alexander Chan | b291ccc03f | |
Bennett Buchanan | 0426f44dee | |
Rahul Padigela | 1b9242788a | |
Bennett Buchanan | 1a2ea2f353 | |
Bennett Buchanan | c36280a6e8 | |
bert-e | c749725410 | |
Alexander Chan | 3d06ec6230 | |
Jonathan Gramain | 159ebb4283 | |
Alexander Chan | e17333b19e | |
philipyoo | b3b22292c4 | |
bert-e | 68d27ed5bf | |
bert-e | 1e79964253 | |
philipyoo | 5f76343c2e | |
Alexander Chan | d907c9942d | |
Alexander Chan | c63b0713c0 | |
Alexander Chan | 6a9a88800a | |
Dora Korpar | 5834f15397 | |
bert-e | b50f6c4678 | |
bert-e | edeab02107 | |
David Pineau | c64cccdf55 | |
David Pineau | 6736508364 | |
David Pineau | c6292fcfe1 | |
David Pineau | 9ceab4b158 | |
vrancurel | af2b3a4bc3 | |
philipyoo | 1e9ad08830 | |
David Pineau | 9e66fda610 | |
David Pineau | 059dc71235 | |
David Pineau | 41c272d7b1 | |
David Pineau | 14d7fead87 | |
Rahul Padigela | 888e154f0e | |
JianqinWang | dea1df2ee6 | |
Nicolas Humbert | 8448f909e4 | |
bert-e | 2b16e84733 | |
philipyoo | a1a6f65364 | |
bert-e | 7cf0c97d8e | |
Taylor McKinnon | 10e7b976d5 | |
vrancurel | e80ea95ad8 | |
Jeremy Desanlis | 7075318dd2 | |
bert-e | 38f68fba1a | |
vrancurel | 16f9a6f5f6 | |
bert-e | c48e4b89bd | |
Bennett Buchanan | 2a8169e936 | |
Alexander Chan | 1af67fffc7 | |
Guillaume Gimenez | e9ac11b1fe | |
bert-e | 30dcd6ef86 | |
Alexander Chan | 2ce9db4e01 | |
philipyoo | 9e234e2b41 | |
philipyoo | 83a831f512 | |
Guillaume Gimenez | 32c2a6fe99 | |
Rahul Padigela | 063361377c | |
Rahul Padigela | ea7f28c82d | |
Rahul Padigela | a9e760b32e | |
Rahul Padigela | 3b16a307b8 | |
Rahul Padigela | f8dfa378a1 | |
Jonathan Gramain | e16eadb474 | |
Rahul Padigela | 5bf7fef53c | |
philipyoo | 659aee2fc2 | |
Rahul Padigela | bde52ab89b | |
Jonathan Gramain | 0ddb4da8a9 | |
Rached Ben Mustapha | 56e280236b | |
Rached Ben Mustapha | f904f04401 | |
Rahul Padigela | db45fee9e8 | |
JianqinWang | ecc431c715 | |
JianqinWang | 6f694ae7f4 | |
Rahul Padigela | e7862d3922 | |
Jonathan Gramain | de7ebf70d7 | |
Rahul Padigela | 1425f03c1e | |
Alexander Chan | ad527911a2 | |
Rahul Padigela | 6c528688ee | |
Nicolas Humbert | e53aa2efd2 | |
Rahul Padigela | 873bc9b647 | |
Nicolas Humbert | 160b960607 | |
Rahul Padigela | 843bd1fe13 | |
Alexander Chan | 93a2a79699 | |
Rahul Padigela | ef32d5e94d | |
Alexander Chan | 45d9c3d999 | |
Rahul Padigela | a2ce46d8d0 | |
anurag4DSB | 0c0bffa2c3 | |
ironman-machine | d966c0bda9 | |
Rahul Padigela | cb86a857cc | |
Alexander Chan | 55c9441bd7 | |
David Pineau | cae55a65c8 | |
philipyoo | 114cbf5571 | |
Alexander Chan | f2bab3b3d6 | |
philipyoo | 3276d235bb | |
philipyoo | ee2aed10f3 | |
Rahul Padigela | 19bee770ea | |
Rahul Padigela | e0c5d03436 | |
Rahul Padigela | c8a7148645 | |
Rahul Padigela | 8ca5dce4fe | |
Bennett Buchanan | 599fb5709b | |
Rahul Padigela | 1161d5f75d | |
Rahul Padigela | 26b6c5d1d9 | |
Bennett Buchanan | 8fd50cd20e | |
Rahul Padigela | 1f6b5bf2bd | |
Rached Ben Mustapha | a7813daea9 | |
Rahul Padigela | 5d4eb84425 | |
Alexander Chan | 9511fff479 | |
Rahul Padigela | d70f64a6d0 | |
Alexander Chan | ee66dc811c | |
Rahul Padigela | 2710471726 | |
Dora Korpar | 9aee9f6cf0 | |
Rahul Padigela | a168fab266 | |
Dora Korpar | 92da4c90e5 | |
Rahul Padigela | a95d5ea15d | |
Salim | aad05faa12 | |
Rahul Padigela | ab230ebfe7 | |
Salim | b3103e1307 | |
Salim | f3b0091210 | |
Rahul Padigela | f633b91072 | |
Alexander Chan | 87807462dc | |
Rahul Padigela | d7f114d504 | |
Rached Ben Mustapha | 5ef168e654 | |
Rahul Padigela | 82b4055c6c | |
Rached Ben Mustapha | 91ccccfe85 | |
Rached Ben Mustapha | 696999874b | |
Rached Ben Mustapha | d2bed3bf9a | |
Rahul Padigela | ad42baa5ff | |
Rached Ben Mustapha | 6ac92b2ad2 | |
Rahul Padigela | 13dbf48867 | |
Rached Ben Mustapha | e79ad68e96 | |
Rahul Padigela | a4a5fe0db0 | |
Bennett Buchanan | f838fcc31f | |
VR | eb9dd23b14 | |
JianqinWang | edbf7ab650 | |
Rahul Padigela | e068950903 | |
Rahul Padigela | 1ceb7b264c | |
vrancurel | 5a29aaa10c | |
Rahul Padigela | 7587f7ba25 | |
Rahul Padigela | 795b145594 | |
Jeremy Desanlis | 58f027a693 | |
Rahul Padigela | e09348d658 | |
Alexander Chan | bddb90c6a1 | |
Rahul Padigela | 94efaaccc2 | |
Rahul Padigela | 463a8ebe15 | |
alexandre-merle | d9bf780296 | |
Alexandre Merle | ab701e1f33 | |
Alexandre Merle | 0c588da450 | |
philipyoo | f17ce17857 | |
Rahul Padigela | 200df1f50f | |
Rahul Padigela | 3a5250e2e9 | |
ironman-machine | 48cb7b3b05 | |
Nicolas Humbert | 84c4c147a2 | |
Rahul Padigela | 958e818655 | |
philipyoo | 91dd219c47 | |
Alexander Chan | 5f3d478edb | |
philipyoo | d311ca61bc | |
Rahul Padigela | 04d56cfdff | |
Rahul Padigela | 73dd529c29 | |
philipyoo | a9aa40c168 | |
ironman-machine | 62289d388b | |
Dora Korpar | 832fbb024e | |
ironman-machine | 449bf1a4f5 | |
Dora Korpar | 8cd4601f55 | |
ironman-machine | 189194a4e7 | |
JianqinWang | a9a6b2433d | |
JianqinWang | fa19fc8859 | |
JianqinWang | a269619698 | |
Rahul Padigela | da1da43597 | |
Rahul Padigela | caac4e4e7e | |
Rahul Padigela | 67250133dc | |
JianqinWang | d3f3be03ae | |
ironman-machine | 94e15a8030 | |
Dora Korpar | 417e316076 | |
ironman-machine | 1a9f1afd2c | |
JianqinWang | 9a5afdbc5c | |
JianqinWang | 83cf54512b | |
ironman-machine | 7e3ad64456 | |
Nicolas Humbert | eba0cb6116 | |
Rahul Padigela | eb56ed6192 | |
Rahul Padigela | 47ed80113f | |
ironman-machine | 9d5d63a58a | |
ironman-machine | 6e929c64bd | |
Flavien Lebarbe | 0af6abf565 | |
ironman-machine | 3280b683ad | |
Alexandre Merle | 2c83a05fd0 | |
David Pineau | e3318ad7d5 | |
Alexandre Merle | 4becaac072 | |
Alexandre Merle | 1cb22484ed | |
David Pineau | 6ff44ece1f | |
Alexandre Merle | a72af2b7d1 | |
Lauren Spiegel | fd23e82ab9 | |
Lauren Spiegel | d7cf5e8ccf | |
flavien-scality | d0f4f95f0d | |
Alexandre Merle | 0e606b1061 | |
ironman-machine | 44ead88d83 | |
David Pineau | d6522c1a2d | |
ironman-machine | 5e3b5b9eb0 | |
Thibault Riviere | 9d832ba2e8 | |
Thomas Carmet | 5b2ce43348 | |
ThibaultRiviere | 9fb1cc901c | |
vrancurel | d8e1497940 | |
Thomas Carmet | 98b866cdd8 | |
David Pineau | 647b4b992c | |
Thomas Carmet | 381664e8aa | |
alexandre-merle | b6c051df89 | |
ironman-machine | 506bef141b | |
Alexandre Merle | b3e9cbf7ff | |
ironman-machine | 76a036c73d | |
Rahul Padigela | ba593850b9 | |
Alexandre Merle | d5202aec91 | |
alexandre-merle | face851f94 | |
Alexandre Merle | e5fe7075dd | |
alexandre-merle | f323bc9b53 | |
Alexandre Merle | 4684ec1fc0 | |
Alexandre Merle | a5bc2c1650 | |
ThibaultRiviere | 4193394340 | |
Thibault Riviere | 0f1b0dad01 | |
ironman-machine | 393d6edc07 | |
vrancurel | 70638eaf7a | |
David Pineau | 71e5a5776e | |
Anne Harper | fb1df3ec46 | |
Anne Harper | 58c0578451 | |
Lauren Spiegel | 9d0156dfdf | |
Lauren Spiegel | 8d8028b83f | |
Lauren Spiegel | b99fe2cd8d | |
Lauren Spiegel | cc26f288be | |
David Pineau | c20a594061 | |
Thibault Riviere | 0ff9d77eec | |
ironman-machine | d0c8aeb398 | |
Dora Korpar | 6354123f0f | |
Bennett Buchanan | b4d04ce1f5 | |
Dora Korpar | 0df78fe030 | |
ironman-machine | 84b2673814 | |
Dora Korpar | d28269cbf5 | |
Lauren Spiegel | 90c85c7dc7 | |
Rahul Padigela | 0a137be794 | |
ironman-machine | 58a29072e6 | |
ironman-machine | 251bd0fa42 | |
Thibault Riviere | ed99e5b903 | |
ironman-machine | 048f9bf54c | |
Lauren Spiegel | 0a2b66ec34 | |
Lauren Spiegel | 1f5e71ba3b | |
Lauren Spiegel | 43cb132638 | |
Rahul Padigela | e95bf801c5 | |
Rahul Padigela | 0e780fae7e | |
Rahul Padigela | 79e60c5bcb | |
Bennett Buchanan | 19e8bd11bd | |
ironman-machine | c4ea3bf9a4 | |
Lauren Spiegel | 526dcf4148 | |
Rahul Padigela | 0e5547197a | |
Bennett Buchanan | 0eed5840bf | |
Bennett Buchanan | 0a06ec4cba | |
Rahul Padigela | 280c447a6f | |
Bennett Buchanan | 2c1bf72cc6 | |
Jonathan Gramain | 69922222b4 | |
Rahul Padigela | 77c9ed6c5d | |
Rahul Padigela | 36e0d84f56 | |
ThibaultRiviere | f6706ca7db | |
ironman-machine | a31d38e1b5 | |
Alexander Chan | 8d3f247b5d | |
Rahul Padigela | 4e51246b43 | |
Electra Chong | 4c282c42f9 | |
ironman-machine | 7158882b63 | |
philipyoo | 7ee8391833 | |
ironman-machine | a8ef4d5a22 | |
Guillaume Gimenez | 56fdb5c511 | |
Electra Chong | 2a490f8a70 | |
Nicolas Humbert | 0fde855c37 | |
ironman-machine | 4434e8a9ee | |
jeremyds | bdac98700a | |
Electra Chong | 6f7d964dda | |
Jeremy Desanlis | 377539f977 | |
Nicolas Humbert | ad498fdb77 | |
ironman-machine | c25c0884dc | |
Nicolas Humbert | 607df9840b | |
Rahul Padigela | 78cbf36d7d | |
ironman-machine | 91fd9086d6 | |
Alexander Chan | 42125aa7be | |
ThibaultRiviere | 1ac024fca0 | |
ThibaultRiviere | 2bbac71fad | |
Thibault Riviere | adad816b3a | |
Rahul Padigela | fd51a4bb90 | |
Nicolas Humbert | 64edb99a3e | |
ironman-machine | 2eee4fb6fe | |
Electra Chong | 71db93185f | |
ironman-machine | 1270412d4b | |
Jonathan Gramain | a10c674f68 | |
ironman-machine | 44800cf175 | |
Jonathan Gramain | 51a4146876 | |
Jonathan Gramain | 3c54bd740f | |
Rahul Padigela | 563bbfcb8b | |
Bennett Buchanan | 4942fab225 | |
Rahul Padigela | 91a828805b | |
Rahul Padigela | eb9b60c0ef | |
alexandremerle | 66acfbbab4 | |
Electra Chong | efe8ed76ba | |
Rahul Padigela | dad0d456d3 | |
Bennett Buchanan | 639374522d | |
Rahul Padigela | 03f82ea891 | |
Bennett Buchanan | 5dc752c6a9 | |
ironman-machine | f5ad8b5428 | |
Lauren Spiegel | 575c59bf2c | |
Rahul Padigela | 28c2492e50 | |
Lauren Spiegel | 1312b4d2e9 | |
ironman-machine | deb3bf3981 | |
Lauren Spiegel | 09d6c7e5ae | |
ironman-machine | a8cc170fdb | |
Lauren Spiegel | c7eb4c8e26 | |
Lauren Spiegel | 15958fdfef | |
Lauren Spiegel | c22ae0c6bb |
|
@ -1 +1,6 @@
|
||||||
{ "extends": "scality" }
|
{
|
||||||
|
"extends": "scality",
|
||||||
|
"parserOptions": {
|
||||||
|
"ecmaVersion": 2020
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -0,0 +1,10 @@
|
||||||
|
---
|
||||||
|
version: 2
|
||||||
|
updates:
|
||||||
|
- package-ecosystem: npm
|
||||||
|
directory: "/"
|
||||||
|
schedule:
|
||||||
|
interval: daily
|
||||||
|
time: "13:00"
|
||||||
|
open-pull-requests-limit: 10
|
||||||
|
target-branch: "development/7.4"
|
|
@ -0,0 +1,25 @@
|
||||||
|
---
|
||||||
|
name: codeQL
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [development/*, stabilization/*, hotfix/*]
|
||||||
|
pull_request:
|
||||||
|
branches: [development/*, stabilization/*, hotfix/*]
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
analyze:
|
||||||
|
name: Static analysis with CodeQL
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Initialize CodeQL
|
||||||
|
uses: github/codeql-action/init@v3
|
||||||
|
with:
|
||||||
|
languages: javascript, typescript
|
||||||
|
|
||||||
|
- name: Build and analyze
|
||||||
|
uses: github/codeql-action/analyze@v3
|
|
@ -0,0 +1,16 @@
|
||||||
|
---
|
||||||
|
name: dependency review
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
branches: [development/*, stabilization/*, hotfix/*]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
dependency-review:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: 'Checkout Repository'
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: 'Dependency Review'
|
||||||
|
uses: actions/dependency-review-action@v4
|
|
@ -0,0 +1,82 @@
|
||||||
|
---
|
||||||
|
name: tests
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches-ignore:
|
||||||
|
- 'development/**'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
test:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
services:
|
||||||
|
# Label used to access the service container
|
||||||
|
redis:
|
||||||
|
# Docker Hub image
|
||||||
|
image: redis
|
||||||
|
# Set health checks to wait until redis has started
|
||||||
|
options: >-
|
||||||
|
--health-cmd "redis-cli ping"
|
||||||
|
--health-interval 10s
|
||||||
|
--health-timeout 5s
|
||||||
|
--health-retries 5
|
||||||
|
ports:
|
||||||
|
# Maps port 6379 on service container to the host
|
||||||
|
- 6379:6379
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
- uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: '16'
|
||||||
|
cache: 'yarn'
|
||||||
|
- name: install dependencies
|
||||||
|
run: yarn install --frozen-lockfile --prefer-offline --network-concurrency 1
|
||||||
|
continue-on-error: true # TODO ARSN-97 Remove it when no errors in TS
|
||||||
|
- name: lint yaml
|
||||||
|
run: yarn --silent lint_yml
|
||||||
|
- name: lint javascript
|
||||||
|
run: yarn --silent lint --max-warnings 0
|
||||||
|
- name: lint markdown
|
||||||
|
run: yarn --silent lint_md
|
||||||
|
- name: add hostname
|
||||||
|
run: |
|
||||||
|
sudo sh -c "echo '127.0.0.1 testrequestbucket.localhost' >> /etc/hosts"
|
||||||
|
- name: test and coverage
|
||||||
|
run: yarn --silent coverage
|
||||||
|
- name: run functional tests
|
||||||
|
run: yarn ft_test
|
||||||
|
- uses: codecov/codecov-action@v4
|
||||||
|
with:
|
||||||
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
|
- name: run executables tests
|
||||||
|
run: yarn install && yarn test
|
||||||
|
working-directory: 'lib/executables/pensieveCreds/'
|
||||||
|
|
||||||
|
compile:
|
||||||
|
name: Compile and upload build artifacts
|
||||||
|
needs: test
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
- name: Install NodeJS
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: '16'
|
||||||
|
cache: yarn
|
||||||
|
- name: Install dependencies
|
||||||
|
run: yarn install --frozen-lockfile --prefer-offline
|
||||||
|
continue-on-error: true # TODO ARSN-97 Remove it when no errors in TS
|
||||||
|
- name: Compile
|
||||||
|
run: yarn build
|
||||||
|
continue-on-error: true # TODO ARSN-97 Remove it when no errors in TS
|
||||||
|
- name: Upload artifacts
|
||||||
|
uses: scality/action-artifacts@v4
|
||||||
|
with:
|
||||||
|
url: https://artifacts.scality.net
|
||||||
|
user: ${{ secrets.ARTIFACTS_USER }}
|
||||||
|
password: ${{ secrets.ARTIFACTS_PASSWORD }}
|
||||||
|
source: ./build
|
||||||
|
method: upload
|
||||||
|
if: success()
|
|
@ -3,3 +3,16 @@
|
||||||
|
|
||||||
# Dependency directory
|
# Dependency directory
|
||||||
node_modules/
|
node_modules/
|
||||||
|
*/node_modules/
|
||||||
|
|
||||||
|
# Build executables
|
||||||
|
*-win.exe
|
||||||
|
*-linux
|
||||||
|
*-macos
|
||||||
|
|
||||||
|
# Coverage
|
||||||
|
coverage/
|
||||||
|
.nyc_output/
|
||||||
|
|
||||||
|
# TypeScript
|
||||||
|
build/
|
||||||
|
|
|
@ -0,0 +1,12 @@
|
||||||
|
{
|
||||||
|
"$schema": "https://swc.rs/schema.json",
|
||||||
|
"jsc": {
|
||||||
|
"parser": {
|
||||||
|
"syntax": "typescript"
|
||||||
|
},
|
||||||
|
"target": "es2017"
|
||||||
|
},
|
||||||
|
"module": {
|
||||||
|
"type": "commonjs"
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,7 +1,6 @@
|
||||||
# Arsenal
|
# Arsenal
|
||||||
|
|
||||||
[![CircleCI][badgepub]](https://circleci.com/gh/scality/Arsenal)
|
[![codecov](https://codecov.io/gh/scality/Arsenal/branch/development/8.1/graph/badge.svg?token=X0esXhJSwb)](https://codecov.io/gh/scality/Arsenal)
|
||||||
[![Scality CI][badgepriv]](http://ci.ironmann.io/gh/scality/Arsenal)
|
|
||||||
|
|
||||||
Common utilities for the S3 project components
|
Common utilities for the S3 project components
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,6 @@
|
||||||
|
module.exports = {
|
||||||
|
presets: [
|
||||||
|
['@babel/preset-env', { targets: { node: 'current' } }],
|
||||||
|
'@babel/preset-typescript',
|
||||||
|
],
|
||||||
|
};
|
|
@ -6,14 +6,16 @@ general:
|
||||||
|
|
||||||
machine:
|
machine:
|
||||||
node:
|
node:
|
||||||
version: 6.9.5
|
version: 6.13.1
|
||||||
services:
|
services:
|
||||||
- redis
|
- redis
|
||||||
environment:
|
environment:
|
||||||
CXX: g++-4.9
|
CXX: g++-4.9
|
||||||
|
|
||||||
dependencies:
|
dependencies:
|
||||||
pre:
|
override:
|
||||||
|
- rm -rf node_modules
|
||||||
|
- npm install
|
||||||
- sudo pip install yamllint
|
- sudo pip install yamllint
|
||||||
|
|
||||||
test:
|
test:
|
||||||
|
@ -23,3 +25,4 @@ test:
|
||||||
- npm run --silent lint_md
|
- npm run --silent lint_md
|
||||||
- npm run --silent test
|
- npm run --silent test
|
||||||
- npm run ft_test
|
- npm run ft_test
|
||||||
|
- cd lib/executables/pensieveCreds && npm install && npm test
|
||||||
|
|
|
@ -0,0 +1,260 @@
|
||||||
|
# BucketInfo Model Version History
|
||||||
|
|
||||||
|
## Model Version 0/1
|
||||||
|
|
||||||
|
### Properties
|
||||||
|
|
||||||
|
``` javascript
|
||||||
|
this._acl = aclInstance;
|
||||||
|
this._name = name;
|
||||||
|
this._owner = owner;
|
||||||
|
this._ownerDisplayName = ownerDisplayName;
|
||||||
|
this._creationDate = creationDate;
|
||||||
|
```
|
||||||
|
|
||||||
|
### Usage
|
||||||
|
|
||||||
|
No explicit references in the code since mdBucketModelVersion
|
||||||
|
property not added until Model Version 2
|
||||||
|
|
||||||
|
## Model Version 2
|
||||||
|
|
||||||
|
### Properties Added
|
||||||
|
|
||||||
|
``` javascript
|
||||||
|
this._mdBucketModelVersion = mdBucketModelVersion || 0
|
||||||
|
this._transient = transient || false;
|
||||||
|
this._deleted = deleted || false;
|
||||||
|
```
|
||||||
|
|
||||||
|
### Usage
|
||||||
|
|
||||||
|
Used to determine which splitter to use ( < 2 means old splitter)
|
||||||
|
|
||||||
|
## Model version 3
|
||||||
|
|
||||||
|
### Properties Added
|
||||||
|
|
||||||
|
```
|
||||||
|
this._serverSideEncryption = serverSideEncryption || null;
|
||||||
|
```
|
||||||
|
|
||||||
|
### Usage
|
||||||
|
|
||||||
|
Used to store the server bucket encryption info
|
||||||
|
|
||||||
|
## Model version 4
|
||||||
|
|
||||||
|
### Properties Added
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
this._locationConstraint = LocationConstraint || null;
|
||||||
|
```
|
||||||
|
|
||||||
|
### Usage
|
||||||
|
|
||||||
|
Used to store the location constraint of the bucket
|
||||||
|
|
||||||
|
## Model version 5
|
||||||
|
|
||||||
|
### Properties Added
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
this._websiteConfiguration = websiteConfiguration || null;
|
||||||
|
this._cors = cors || null;
|
||||||
|
```
|
||||||
|
|
||||||
|
### Usage
|
||||||
|
|
||||||
|
Used to store the bucket website configuration info
|
||||||
|
and to store CORS rules to apply to cross-domain requests
|
||||||
|
|
||||||
|
## Model version 6
|
||||||
|
|
||||||
|
### Properties Added
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
this._lifecycleConfiguration = lifecycleConfiguration || null;
|
||||||
|
```
|
||||||
|
|
||||||
|
### Usage
|
||||||
|
|
||||||
|
Used to store the bucket lifecycle configuration info
|
||||||
|
|
||||||
|
## Model version 7
|
||||||
|
|
||||||
|
### Properties Added
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
this._uid = uid || uuid();
|
||||||
|
```
|
||||||
|
|
||||||
|
### Usage
|
||||||
|
|
||||||
|
Used to set a unique identifier on a bucket
|
||||||
|
|
||||||
|
## Model version 8
|
||||||
|
|
||||||
|
### Properties Added
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
this._readLocationConstraint = readLocationConstraint || null;
|
||||||
|
```
|
||||||
|
|
||||||
|
### Usage
|
||||||
|
|
||||||
|
Used to store default read location of the bucket
|
||||||
|
|
||||||
|
## Model version 9
|
||||||
|
|
||||||
|
### Properties Added
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
this._isNFS = isNFS || null;
|
||||||
|
```
|
||||||
|
|
||||||
|
### Usage
|
||||||
|
|
||||||
|
Used to determine whether the bucket may be accessed through NFS
|
||||||
|
|
||||||
|
## Model version 10
|
||||||
|
|
||||||
|
### Properties Added
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
this._ingestion = ingestionConfig || null;
|
||||||
|
```
|
||||||
|
|
||||||
|
### Usage
|
||||||
|
|
||||||
|
Used to store the ingestion status of a bucket
|
||||||
|
|
||||||
|
## Model version 11
|
||||||
|
|
||||||
|
### Properties Added
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
this._azureInfo = azureInfo || null;
|
||||||
|
```
|
||||||
|
|
||||||
|
### Usage
|
||||||
|
|
||||||
|
Used to store Azure storage account specific information
|
||||||
|
|
||||||
|
## Model version 12
|
||||||
|
|
||||||
|
### Properties Added
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
this._objectLockEnabled = objectLockEnabled || false;
|
||||||
|
this._objectLockConfiguration = objectLockConfiguration || null;
|
||||||
|
```
|
||||||
|
|
||||||
|
### Usage
|
||||||
|
|
||||||
|
Used to determine whether object lock capabilities are enabled on a bucket and
|
||||||
|
to store the object lock configuration of the bucket
|
||||||
|
|
||||||
|
## Model version 13
|
||||||
|
|
||||||
|
### Properties Added
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
this._notificationConfiguration = notificationConfiguration || null;
|
||||||
|
```
|
||||||
|
|
||||||
|
### Usage
|
||||||
|
|
||||||
|
Used to store the bucket notification configuration info
|
||||||
|
|
||||||
|
## Model version 14
|
||||||
|
|
||||||
|
### Properties Added
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
this._serverSideEncryption.configuredMasterKeyId = configuredMasterKeyId || undefined;
|
||||||
|
```
|
||||||
|
|
||||||
|
### Usage
|
||||||
|
|
||||||
|
Used to store the users configured KMS key id
|
||||||
|
|
||||||
|
## Model version 15
|
||||||
|
|
||||||
|
### Properties Added
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
this._tags = tags || null;
|
||||||
|
```
|
||||||
|
|
||||||
|
The Tag Set of a bucket is an array of objects with Key and Value:
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
[
|
||||||
|
{
|
||||||
|
Key: 'something',
|
||||||
|
Value: 'some_data'
|
||||||
|
}
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
## Model version 16
|
||||||
|
|
||||||
|
### Properties Added
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
this._capabilities = capabilities || undefined;
|
||||||
|
```
|
||||||
|
|
||||||
|
For capacity-enabled buckets, contains the following data:
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
{
|
||||||
|
_capabilities: {
|
||||||
|
VeeamSOSApi?: {
|
||||||
|
SystemInfo?: {
|
||||||
|
ProtocolVersion: String,
|
||||||
|
ModelName: String,
|
||||||
|
ProtocolCapabilities: {
|
||||||
|
CapacityInfo: Boolean,
|
||||||
|
UploadSessions: Boolean,
|
||||||
|
IAMSTS: Boolean,
|
||||||
|
},
|
||||||
|
APIEndpoints: {
|
||||||
|
IAMEndpoint: String,
|
||||||
|
STSEndpoint: String,
|
||||||
|
},
|
||||||
|
SystemRecommendations?: {
|
||||||
|
S3ConcurrentTaskLimit: Number,
|
||||||
|
S3MultiObjectDelete: Number,
|
||||||
|
StorageCurrentTasksLimit: Number,
|
||||||
|
KbBlockSize: Number,
|
||||||
|
}
|
||||||
|
LastModified?: String,
|
||||||
|
},
|
||||||
|
CapacityInfo?: {
|
||||||
|
Capacity: Number,
|
||||||
|
Available: Number,
|
||||||
|
Used: Number,
|
||||||
|
LastModified?: String,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Usage
|
||||||
|
|
||||||
|
Used to store bucket tagging
|
||||||
|
|
||||||
|
## Model version 17
|
||||||
|
|
||||||
|
### Properties Added
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
this._quotaMax = quotaMax || 0;
|
||||||
|
```
|
||||||
|
|
||||||
|
### Usage
|
||||||
|
|
||||||
|
Used to store bucket quota
|
|
@ -0,0 +1,27 @@
|
||||||
|
# Delimiter
|
||||||
|
|
||||||
|
The Delimiter class handles raw listings from the database with an
|
||||||
|
optional delimiter, and fills in a curated listing with "Contents" and
|
||||||
|
"CommonPrefixes" as a result.
|
||||||
|
|
||||||
|
## Expected Behavior
|
||||||
|
|
||||||
|
- only lists keys belonging to the given **prefix** (if provided)
|
||||||
|
|
||||||
|
- groups listed keys that have a common prefix ending with a delimiter
|
||||||
|
inside CommonPrefixes
|
||||||
|
|
||||||
|
- can take a **marker** or **continuationToken** to list from a specific key
|
||||||
|
|
||||||
|
- can take a **maxKeys** parameter to limit how many keys can be returned
|
||||||
|
|
||||||
|
## State Chart
|
||||||
|
|
||||||
|
- States with grey background are *Idle* states, which are waiting for
|
||||||
|
a new listing key
|
||||||
|
|
||||||
|
- States with blue background are *Processing* states, which are
|
||||||
|
actively processing a new listing key passed by the filter()
|
||||||
|
function
|
||||||
|
|
||||||
|
![Delimiter State Chart](./pics/delimiterStateChart.svg)
|
|
@ -0,0 +1,45 @@
|
||||||
|
# DelimiterMaster
|
||||||
|
|
||||||
|
The DelimiterMaster class handles raw listings from the database of a
|
||||||
|
versioned or non-versioned bucket with an optional delimiter, and
|
||||||
|
fills in a curated listing with "Contents" and "CommonPrefixes" as a
|
||||||
|
result.
|
||||||
|
|
||||||
|
## Expected Behavior
|
||||||
|
|
||||||
|
- only lists latest versions of versioned buckets
|
||||||
|
|
||||||
|
- only lists keys belonging to the given **prefix** (if provided)
|
||||||
|
|
||||||
|
- does not list latest versions that are delete markers
|
||||||
|
|
||||||
|
- groups listed keys that have a common prefix ending with a delimiter
|
||||||
|
inside CommonPrefixes
|
||||||
|
|
||||||
|
- can take a **marker** or **continuationToken** to list from a specific key
|
||||||
|
|
||||||
|
- can take a **maxKeys** parameter to limit how many keys can be returned
|
||||||
|
|
||||||
|
- reconciles internal PHD keys with the next version (those are
|
||||||
|
created when a specific version that is the latest version is
|
||||||
|
deleted)
|
||||||
|
|
||||||
|
- skips internal keys like replay keys
|
||||||
|
|
||||||
|
## State Chart
|
||||||
|
|
||||||
|
- States with grey background are *Idle* states, which are waiting for
|
||||||
|
a new listing key
|
||||||
|
|
||||||
|
- States with blue background are *Processing* states, which are
|
||||||
|
actively processing a new listing key passed by the filter()
|
||||||
|
function
|
||||||
|
|
||||||
|
### Bucket Vformat=v0
|
||||||
|
|
||||||
|
![DelimiterMaster State Chart for v0 format](./pics/delimiterMasterV0StateChart.svg)
|
||||||
|
|
||||||
|
### Bucket Vformat=v1
|
||||||
|
|
||||||
|
For buckets in versioning key format **v1**, the algorithm used is the
|
||||||
|
one from [Delimiter](delimiter.md).
|
|
@ -0,0 +1,33 @@
|
||||||
|
# DelimiterVersions
|
||||||
|
|
||||||
|
The DelimiterVersions class handles raw listings from the database of a
|
||||||
|
versioned or non-versioned bucket with an optional delimiter, and
|
||||||
|
fills in a curated listing with "Versions" and "CommonPrefixes" as a
|
||||||
|
result.
|
||||||
|
|
||||||
|
## Expected Behavior
|
||||||
|
|
||||||
|
- lists individual distinct versions of versioned buckets
|
||||||
|
|
||||||
|
- only lists keys belonging to the given **prefix** (if provided)
|
||||||
|
|
||||||
|
- groups listed keys that have a common prefix ending with a delimiter
|
||||||
|
inside CommonPrefixes
|
||||||
|
|
||||||
|
- can take a **keyMarker** and optionally a **versionIdMarker** to
|
||||||
|
list from a specific key or version
|
||||||
|
|
||||||
|
- can take a **maxKeys** parameter to limit how many keys can be returned
|
||||||
|
|
||||||
|
- skips internal keys like replay keys
|
||||||
|
|
||||||
|
## State Chart
|
||||||
|
|
||||||
|
- States with grey background are *Idle* states, which are waiting for
|
||||||
|
a new listing key
|
||||||
|
|
||||||
|
- States with blue background are *Processing* states, which are
|
||||||
|
actively processing a new listing key passed by the filter()
|
||||||
|
function
|
||||||
|
|
||||||
|
![DelimiterVersions State Chart](./pics/delimiterVersionsStateChart.svg)
|
|
@ -0,0 +1,45 @@
|
||||||
|
digraph {
|
||||||
|
node [shape="box",style="filled,rounded",fontsize=16,fixedsize=true,width=3];
|
||||||
|
edge [fontsize=14];
|
||||||
|
rankdir=TB;
|
||||||
|
|
||||||
|
START [shape="circle",width=0.2,label="",style="filled",fillcolor="black"]
|
||||||
|
END [shape="circle",width=0.2,label="",style="filled",fillcolor="black",peripheries=2]
|
||||||
|
|
||||||
|
node [fillcolor="lightgrey"];
|
||||||
|
"NotSkippingPrefixNorVersions.Idle" [label="NotSkippingPrefixNorVersions",group="NotSkippingPrefixNorVersions",width=4];
|
||||||
|
"SkippingPrefix.Idle" [label="SkippingPrefix",group="SkippingPrefix"];
|
||||||
|
"SkippingVersions.Idle" [label="SkippingVersions",group="SkippingVersions"];
|
||||||
|
"WaitVersionAfterPHD.Idle" [label="WaitVersionAfterPHD",group="WaitVersionAfterPHD"];
|
||||||
|
|
||||||
|
node [fillcolor="lightblue"];
|
||||||
|
"NotSkippingPrefixNorVersions.Processing" [label="NotSkippingPrefixNorVersions",group="NotSkippingPrefixNorVersions",width=4];
|
||||||
|
"SkippingPrefix.Processing" [label="SkippingPrefix",group="SkippingPrefix"];
|
||||||
|
"SkippingVersions.Processing" [label="SkippingVersions",group="SkippingVersions"];
|
||||||
|
"WaitVersionAfterPHD.Processing" [label="WaitVersionAfterPHD",group="WaitVersionAfterPHD"];
|
||||||
|
|
||||||
|
START -> "SkippingVersions.Idle" [label="[marker != undefined]"]
|
||||||
|
START -> "NotSkippingPrefixNorVersions.Idle" [label="[marker == undefined]"]
|
||||||
|
|
||||||
|
"NotSkippingPrefixNorVersions.Idle" -> "NotSkippingPrefixNorVersions.Processing" [label="filter(key, value)"]
|
||||||
|
"SkippingPrefix.Idle" -> "SkippingPrefix.Processing" [label="filter(key, value)"]
|
||||||
|
"SkippingVersions.Idle" -> "SkippingVersions.Processing" [label="filter(key, value)"]
|
||||||
|
"WaitVersionAfterPHD.Idle" -> "WaitVersionAfterPHD.Processing" [label="filter(key, value)"]
|
||||||
|
|
||||||
|
|
||||||
|
"NotSkippingPrefixNorVersions.Processing" -> "SkippingVersions.Idle" [label="[Version.isDeleteMarker(value)]\n-> FILTER_ACCEPT"]
|
||||||
|
"NotSkippingPrefixNorVersions.Processing" -> "WaitVersionAfterPHD.Idle" [label="[Version.isPHD(value)]\n-> FILTER_ACCEPT"]
|
||||||
|
"NotSkippingPrefixNorVersions.Processing" -> "SkippingPrefix.Idle" [label="[key.startsWith(<ReplayPrefix>)]\n/ prefix <- <ReplayPrefix>\n-> FILTER_SKIP"]
|
||||||
|
"NotSkippingPrefixNorVersions.Processing" -> END [label="[isListableKey(key, value) and\nKeys == maxKeys]\n-> FILTER_END"]
|
||||||
|
"NotSkippingPrefixNorVersions.Processing" -> "SkippingPrefix.Idle" [label="[isListableKey(key, value) and\nnKeys < maxKeys and\nhasDelimiter(key)]\n/ prefix <- prefixOf(key)\n/ CommonPrefixes.append(prefixOf(key))\n-> FILTER_ACCEPT"]
|
||||||
|
"NotSkippingPrefixNorVersions.Processing" -> "SkippingVersions.Idle" [label="[isListableKey(key, value) and\nnKeys < maxKeys and\nnot hasDelimiter(key)]\n/ Contents.append(key, value)\n-> FILTER_ACCEPT"]
|
||||||
|
|
||||||
|
"SkippingPrefix.Processing" -> "SkippingPrefix.Idle" [label="[key.startsWith(prefix)]\n-> FILTER_SKIP"]
|
||||||
|
"SkippingPrefix.Processing" -> "NotSkippingPrefixNorVersions.Processing" [label="[not key.startsWith(prefix)]"]
|
||||||
|
|
||||||
|
"SkippingVersions.Processing" -> "SkippingVersions.Idle" [label="[isVersionKey(key)]\n-> FILTER_SKIP"]
|
||||||
|
"SkippingVersions.Processing" -> "NotSkippingPrefixNorVersions.Processing" [label="[not isVersionKey(key)]"]
|
||||||
|
|
||||||
|
"WaitVersionAfterPHD.Processing" -> "NotSkippingPrefixNorVersions.Processing" [label="[isVersionKey(key) and master(key) == PHDkey]\n/ key <- master(key)"]
|
||||||
|
"WaitVersionAfterPHD.Processing" -> "NotSkippingPrefixNorVersions.Processing" [label="[not isVersionKey(key) or master(key) != PHDkey]"]
|
||||||
|
}
|
|
@ -0,0 +1,216 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||||
|
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
|
||||||
|
"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||||
|
<!-- Generated by graphviz version 2.43.0 (0)
|
||||||
|
-->
|
||||||
|
<!-- Title: %3 Pages: 1 -->
|
||||||
|
<svg width="2313pt" height="460pt"
|
||||||
|
viewBox="0.00 0.00 2313.37 460.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
|
||||||
|
<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 456)">
|
||||||
|
<title>%3</title>
|
||||||
|
<polygon fill="white" stroke="transparent" points="-4,4 -4,-456 2309.37,-456 2309.37,4 -4,4"/>
|
||||||
|
<!-- START -->
|
||||||
|
<g id="node1" class="node">
|
||||||
|
<title>START</title>
|
||||||
|
<ellipse fill="black" stroke="black" cx="35.37" cy="-445" rx="7" ry="7"/>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingPrefixNorVersions.Idle -->
|
||||||
|
<g id="node3" class="node">
|
||||||
|
<title>NotSkippingPrefixNorVersions.Idle</title>
|
||||||
|
<path fill="lightgrey" stroke="black" d="M925.37,-387C925.37,-387 661.37,-387 661.37,-387 655.37,-387 649.37,-381 649.37,-375 649.37,-375 649.37,-363 649.37,-363 649.37,-357 655.37,-351 661.37,-351 661.37,-351 925.37,-351 925.37,-351 931.37,-351 937.37,-357 937.37,-363 937.37,-363 937.37,-375 937.37,-375 937.37,-381 931.37,-387 925.37,-387"/>
|
||||||
|
<text text-anchor="middle" x="793.37" y="-365.2" font-family="Times,serif" font-size="16.00">NotSkippingPrefixNorVersions</text>
|
||||||
|
</g>
|
||||||
|
<!-- START->NotSkippingPrefixNorVersions.Idle -->
|
||||||
|
<g id="edge2" class="edge">
|
||||||
|
<title>START->NotSkippingPrefixNorVersions.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M42.39,-443.31C95.3,-438.15 434.98,-404.99 638.94,-385.08"/>
|
||||||
|
<polygon fill="black" stroke="black" points="639.54,-388.53 649.15,-384.08 638.86,-381.57 639.54,-388.53"/>
|
||||||
|
<text text-anchor="middle" x="497.87" y="-408.8" font-family="Times,serif" font-size="14.00">[marker == undefined]</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingVersions.Idle -->
|
||||||
|
<g id="node5" class="node">
|
||||||
|
<title>SkippingVersions.Idle</title>
|
||||||
|
<path fill="lightgrey" stroke="black" d="M242.37,-138C242.37,-138 50.37,-138 50.37,-138 44.37,-138 38.37,-132 38.37,-126 38.37,-126 38.37,-114 38.37,-114 38.37,-108 44.37,-102 50.37,-102 50.37,-102 242.37,-102 242.37,-102 248.37,-102 254.37,-108 254.37,-114 254.37,-114 254.37,-126 254.37,-126 254.37,-132 248.37,-138 242.37,-138"/>
|
||||||
|
<text text-anchor="middle" x="146.37" y="-116.2" font-family="Times,serif" font-size="16.00">SkippingVersions</text>
|
||||||
|
</g>
|
||||||
|
<!-- START->SkippingVersions.Idle -->
|
||||||
|
<g id="edge1" class="edge">
|
||||||
|
<title>START->SkippingVersions.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M33.04,-438.14C20.64,-405.9 -34.57,-248.17 33.37,-156 36.76,-151.4 40.74,-147.39 45.16,-143.89"/>
|
||||||
|
<polygon fill="black" stroke="black" points="47.27,-146.68 53.53,-138.13 43.3,-140.92 47.27,-146.68"/>
|
||||||
|
<text text-anchor="middle" x="85.87" y="-321.8" font-family="Times,serif" font-size="14.00">[marker != undefined]</text>
|
||||||
|
</g>
|
||||||
|
<!-- END -->
|
||||||
|
<g id="node2" class="node">
|
||||||
|
<title>END</title>
|
||||||
|
<ellipse fill="black" stroke="black" cx="727.37" cy="-120" rx="7" ry="7"/>
|
||||||
|
<ellipse fill="none" stroke="black" cx="727.37" cy="-120" rx="11" ry="11"/>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingPrefixNorVersions.Processing -->
|
||||||
|
<g id="node7" class="node">
|
||||||
|
<title>NotSkippingPrefixNorVersions.Processing</title>
|
||||||
|
<path fill="lightblue" stroke="black" d="M925.37,-300C925.37,-300 661.37,-300 661.37,-300 655.37,-300 649.37,-294 649.37,-288 649.37,-288 649.37,-276 649.37,-276 649.37,-270 655.37,-264 661.37,-264 661.37,-264 925.37,-264 925.37,-264 931.37,-264 937.37,-270 937.37,-276 937.37,-276 937.37,-288 937.37,-288 937.37,-294 931.37,-300 925.37,-300"/>
|
||||||
|
<text text-anchor="middle" x="793.37" y="-278.2" font-family="Times,serif" font-size="16.00">NotSkippingPrefixNorVersions</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingPrefixNorVersions.Idle->NotSkippingPrefixNorVersions.Processing -->
|
||||||
|
<g id="edge3" class="edge">
|
||||||
|
<title>NotSkippingPrefixNorVersions.Idle->NotSkippingPrefixNorVersions.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M793.37,-350.8C793.37,-339.16 793.37,-323.55 793.37,-310.24"/>
|
||||||
|
<polygon fill="black" stroke="black" points="796.87,-310.18 793.37,-300.18 789.87,-310.18 796.87,-310.18"/>
|
||||||
|
<text text-anchor="middle" x="851.37" y="-321.8" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingPrefix.Idle -->
|
||||||
|
<g id="node4" class="node">
|
||||||
|
<title>SkippingPrefix.Idle</title>
|
||||||
|
<path fill="lightgrey" stroke="black" d="M1209.37,-138C1209.37,-138 1017.37,-138 1017.37,-138 1011.37,-138 1005.37,-132 1005.37,-126 1005.37,-126 1005.37,-114 1005.37,-114 1005.37,-108 1011.37,-102 1017.37,-102 1017.37,-102 1209.37,-102 1209.37,-102 1215.37,-102 1221.37,-108 1221.37,-114 1221.37,-114 1221.37,-126 1221.37,-126 1221.37,-132 1215.37,-138 1209.37,-138"/>
|
||||||
|
<text text-anchor="middle" x="1113.37" y="-116.2" font-family="Times,serif" font-size="16.00">SkippingPrefix</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingPrefix.Processing -->
|
||||||
|
<g id="node8" class="node">
|
||||||
|
<title>SkippingPrefix.Processing</title>
|
||||||
|
<path fill="lightblue" stroke="black" d="M1070.37,-36C1070.37,-36 878.37,-36 878.37,-36 872.37,-36 866.37,-30 866.37,-24 866.37,-24 866.37,-12 866.37,-12 866.37,-6 872.37,0 878.37,0 878.37,0 1070.37,0 1070.37,0 1076.37,0 1082.37,-6 1082.37,-12 1082.37,-12 1082.37,-24 1082.37,-24 1082.37,-30 1076.37,-36 1070.37,-36"/>
|
||||||
|
<text text-anchor="middle" x="974.37" y="-14.2" font-family="Times,serif" font-size="16.00">SkippingPrefix</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingPrefix.Idle->SkippingPrefix.Processing -->
|
||||||
|
<g id="edge4" class="edge">
|
||||||
|
<title>SkippingPrefix.Idle->SkippingPrefix.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M1011.89,-101.96C994.96,-97.13 981.04,-91.17 975.37,-84 967.11,-73.56 966.25,-58.93 967.72,-46.2"/>
|
||||||
|
<polygon fill="black" stroke="black" points="971.22,-46.52 969.4,-36.09 964.31,-45.38 971.22,-46.52"/>
|
||||||
|
<text text-anchor="middle" x="1033.37" y="-65.3" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingVersions.Processing -->
|
||||||
|
<g id="node9" class="node">
|
||||||
|
<title>SkippingVersions.Processing</title>
|
||||||
|
<path fill="lightblue" stroke="black" d="M381.37,-36C381.37,-36 189.37,-36 189.37,-36 183.37,-36 177.37,-30 177.37,-24 177.37,-24 177.37,-12 177.37,-12 177.37,-6 183.37,0 189.37,0 189.37,0 381.37,0 381.37,0 387.37,0 393.37,-6 393.37,-12 393.37,-12 393.37,-24 393.37,-24 393.37,-30 387.37,-36 381.37,-36"/>
|
||||||
|
<text text-anchor="middle" x="285.37" y="-14.2" font-family="Times,serif" font-size="16.00">SkippingVersions</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingVersions.Idle->SkippingVersions.Processing -->
|
||||||
|
<g id="edge5" class="edge">
|
||||||
|
<title>SkippingVersions.Idle->SkippingVersions.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M141.4,-101.91C138.35,-87.58 136.8,-67.37 147.37,-54 151.89,-48.28 161.64,-43.34 173.99,-39.12"/>
|
||||||
|
<polygon fill="black" stroke="black" points="175.39,-42.36 183.89,-36.04 173.3,-35.67 175.39,-42.36"/>
|
||||||
|
<text text-anchor="middle" x="205.37" y="-65.3" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
|
||||||
|
</g>
|
||||||
|
<!-- WaitVersionAfterPHD.Idle -->
|
||||||
|
<g id="node6" class="node">
|
||||||
|
<title>WaitVersionAfterPHD.Idle</title>
|
||||||
|
<path fill="lightgrey" stroke="black" d="M1534.37,-138C1534.37,-138 1342.37,-138 1342.37,-138 1336.37,-138 1330.37,-132 1330.37,-126 1330.37,-126 1330.37,-114 1330.37,-114 1330.37,-108 1336.37,-102 1342.37,-102 1342.37,-102 1534.37,-102 1534.37,-102 1540.37,-102 1546.37,-108 1546.37,-114 1546.37,-114 1546.37,-126 1546.37,-126 1546.37,-132 1540.37,-138 1534.37,-138"/>
|
||||||
|
<text text-anchor="middle" x="1438.37" y="-116.2" font-family="Times,serif" font-size="16.00">WaitVersionAfterPHD</text>
|
||||||
|
</g>
|
||||||
|
<!-- WaitVersionAfterPHD.Processing -->
|
||||||
|
<g id="node10" class="node">
|
||||||
|
<title>WaitVersionAfterPHD.Processing</title>
|
||||||
|
<path fill="lightblue" stroke="black" d="M1534.37,-36C1534.37,-36 1342.37,-36 1342.37,-36 1336.37,-36 1330.37,-30 1330.37,-24 1330.37,-24 1330.37,-12 1330.37,-12 1330.37,-6 1336.37,0 1342.37,0 1342.37,0 1534.37,0 1534.37,0 1540.37,0 1546.37,-6 1546.37,-12 1546.37,-12 1546.37,-24 1546.37,-24 1546.37,-30 1540.37,-36 1534.37,-36"/>
|
||||||
|
<text text-anchor="middle" x="1438.37" y="-14.2" font-family="Times,serif" font-size="16.00">WaitVersionAfterPHD</text>
|
||||||
|
</g>
|
||||||
|
<!-- WaitVersionAfterPHD.Idle->WaitVersionAfterPHD.Processing -->
|
||||||
|
<g id="edge6" class="edge">
|
||||||
|
<title>WaitVersionAfterPHD.Idle->WaitVersionAfterPHD.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M1438.37,-101.58C1438.37,-86.38 1438.37,-64.07 1438.37,-46.46"/>
|
||||||
|
<polygon fill="black" stroke="black" points="1441.87,-46.22 1438.37,-36.22 1434.87,-46.22 1441.87,-46.22"/>
|
||||||
|
<text text-anchor="middle" x="1496.37" y="-65.3" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingPrefixNorVersions.Processing->END -->
|
||||||
|
<g id="edge10" class="edge">
|
||||||
|
<title>NotSkippingPrefixNorVersions.Processing->END</title>
|
||||||
|
<path fill="none" stroke="black" d="M649.15,-273.62C611.7,-268.54 578.44,-260.07 566.37,-246 540.33,-215.64 540,-186.08 566.37,-156 586.46,-133.07 673.88,-148.86 702.37,-138 705.22,-136.91 708.06,-135.44 710.76,-133.82"/>
|
||||||
|
<polygon fill="black" stroke="black" points="712.88,-136.61 719.13,-128.05 708.91,-130.84 712.88,-136.61"/>
|
||||||
|
<text text-anchor="middle" x="672.87" y="-212.3" font-family="Times,serif" font-size="14.00">[isListableKey(key, value) and</text>
|
||||||
|
<text text-anchor="middle" x="672.87" y="-197.3" font-family="Times,serif" font-size="14.00">Keys == maxKeys]</text>
|
||||||
|
<text text-anchor="middle" x="672.87" y="-182.3" font-family="Times,serif" font-size="14.00">-> FILTER_END</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingPrefixNorVersions.Processing->SkippingPrefix.Idle -->
|
||||||
|
<g id="edge9" class="edge">
|
||||||
|
<title>NotSkippingPrefixNorVersions.Processing->SkippingPrefix.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M937.6,-274.31C1018.89,-269.01 1106.69,-260.11 1119.37,-246 1143.16,-219.51 1134.03,-175.72 1124.38,-147.62"/>
|
||||||
|
<polygon fill="black" stroke="black" points="1127.6,-146.22 1120.86,-138.04 1121.03,-148.64 1127.6,-146.22"/>
|
||||||
|
<text text-anchor="middle" x="1254.37" y="-212.3" font-family="Times,serif" font-size="14.00">[key.startsWith(<ReplayPrefix>)]</text>
|
||||||
|
<text text-anchor="middle" x="1254.37" y="-197.3" font-family="Times,serif" font-size="14.00">/ prefix <- <ReplayPrefix></text>
|
||||||
|
<text text-anchor="middle" x="1254.37" y="-182.3" font-family="Times,serif" font-size="14.00">-> FILTER_SKIP</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingPrefixNorVersions.Processing->SkippingPrefix.Idle -->
|
||||||
|
<g id="edge11" class="edge">
|
||||||
|
<title>NotSkippingPrefixNorVersions.Processing->SkippingPrefix.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M799.18,-263.65C800.96,-258.05 802.85,-251.79 804.37,-246 814.73,-206.45 793.03,-183.41 823.37,-156 851.23,-130.83 954.1,-142.59 991.37,-138 992.65,-137.84 993.94,-137.68 995.24,-137.52"/>
|
||||||
|
<polygon fill="black" stroke="black" points="995.81,-140.98 1005.29,-136.25 994.93,-134.03 995.81,-140.98"/>
|
||||||
|
<text text-anchor="middle" x="969.37" y="-234.8" font-family="Times,serif" font-size="14.00">[isListableKey(key, value) and</text>
|
||||||
|
<text text-anchor="middle" x="969.37" y="-219.8" font-family="Times,serif" font-size="14.00">nKeys < maxKeys and</text>
|
||||||
|
<text text-anchor="middle" x="969.37" y="-204.8" font-family="Times,serif" font-size="14.00">hasDelimiter(key)]</text>
|
||||||
|
<text text-anchor="middle" x="969.37" y="-189.8" font-family="Times,serif" font-size="14.00">/ prefix <- prefixOf(key)</text>
|
||||||
|
<text text-anchor="middle" x="969.37" y="-174.8" font-family="Times,serif" font-size="14.00">/ CommonPrefixes.append(prefixOf(key))</text>
|
||||||
|
<text text-anchor="middle" x="969.37" y="-159.8" font-family="Times,serif" font-size="14.00">-> FILTER_ACCEPT</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingPrefixNorVersions.Processing->SkippingVersions.Idle -->
|
||||||
|
<g id="edge7" class="edge">
|
||||||
|
<title>NotSkippingPrefixNorVersions.Processing->SkippingVersions.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M649.11,-279.23C439.56,-275.94 73.58,-267.19 53.37,-246 25.76,-217.06 30.6,-188.89 53.37,-156 56.56,-151.39 60.44,-147.39 64.78,-143.91"/>
|
||||||
|
<polygon fill="black" stroke="black" points="66.8,-146.76 73.04,-138.2 62.83,-141 66.8,-146.76"/>
|
||||||
|
<text text-anchor="middle" x="167.87" y="-204.8" font-family="Times,serif" font-size="14.00">[Version.isDeleteMarker(value)]</text>
|
||||||
|
<text text-anchor="middle" x="167.87" y="-189.8" font-family="Times,serif" font-size="14.00">-> FILTER_ACCEPT</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingPrefixNorVersions.Processing->SkippingVersions.Idle -->
|
||||||
|
<g id="edge12" class="edge">
|
||||||
|
<title>NotSkippingPrefixNorVersions.Processing->SkippingVersions.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M649.33,-279.1C514.97,-275.99 331.4,-267.75 305.37,-246 273.69,-219.53 311.53,-185.22 282.37,-156 276.73,-150.36 270.32,-145.59 263.42,-141.56"/>
|
||||||
|
<polygon fill="black" stroke="black" points="264.92,-138.39 254.44,-136.84 261.67,-144.59 264.92,-138.39"/>
|
||||||
|
<text text-anchor="middle" x="411.87" y="-227.3" font-family="Times,serif" font-size="14.00">[isListableKey(key, value) and</text>
|
||||||
|
<text text-anchor="middle" x="411.87" y="-212.3" font-family="Times,serif" font-size="14.00">nKeys < maxKeys and</text>
|
||||||
|
<text text-anchor="middle" x="411.87" y="-197.3" font-family="Times,serif" font-size="14.00">not hasDelimiter(key)]</text>
|
||||||
|
<text text-anchor="middle" x="411.87" y="-182.3" font-family="Times,serif" font-size="14.00">/ Contents.append(key, value)</text>
|
||||||
|
<text text-anchor="middle" x="411.87" y="-167.3" font-family="Times,serif" font-size="14.00">-> FILTER_ACCEPT</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingPrefixNorVersions.Processing->WaitVersionAfterPHD.Idle -->
|
||||||
|
<g id="edge8" class="edge">
|
||||||
|
<title>NotSkippingPrefixNorVersions.Processing->WaitVersionAfterPHD.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M937.38,-280.87C1099.43,-279.42 1344.59,-272.74 1378.37,-246 1411.11,-220.08 1384.48,-192.16 1405.37,-156 1407.38,-152.52 1409.8,-149.11 1412.4,-145.87"/>
|
||||||
|
<polygon fill="black" stroke="black" points="1415.16,-148.04 1419.13,-138.21 1409.9,-143.41 1415.16,-148.04"/>
|
||||||
|
<text text-anchor="middle" x="1486.87" y="-204.8" font-family="Times,serif" font-size="14.00">[Version.isPHD(value)]</text>
|
||||||
|
<text text-anchor="middle" x="1486.87" y="-189.8" font-family="Times,serif" font-size="14.00">-> FILTER_ACCEPT</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingPrefix.Processing->SkippingPrefix.Idle -->
|
||||||
|
<g id="edge13" class="edge">
|
||||||
|
<title>SkippingPrefix.Processing->SkippingPrefix.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M1064.61,-36.08C1074.44,-40.7 1083.66,-46.57 1091.37,-54 1101.65,-63.92 1107.13,-78.81 1110.04,-91.84"/>
|
||||||
|
<polygon fill="black" stroke="black" points="1106.62,-92.56 1111.88,-101.76 1113.5,-91.29 1106.62,-92.56"/>
|
||||||
|
<text text-anchor="middle" x="1190.37" y="-72.8" font-family="Times,serif" font-size="14.00">[key.startsWith(prefix)]</text>
|
||||||
|
<text text-anchor="middle" x="1190.37" y="-57.8" font-family="Times,serif" font-size="14.00">-> FILTER_SKIP</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingPrefix.Processing->NotSkippingPrefixNorVersions.Processing -->
|
||||||
|
<g id="edge14" class="edge">
|
||||||
|
<title>SkippingPrefix.Processing->NotSkippingPrefixNorVersions.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M899.82,-36.01C864.18,-48.2 824.54,-68.57 802.37,-102 771.84,-148.02 779.31,-216.26 786.77,-253.8"/>
|
||||||
|
<polygon fill="black" stroke="black" points="783.43,-254.92 788.94,-263.97 790.28,-253.46 783.43,-254.92"/>
|
||||||
|
<text text-anchor="middle" x="899.37" y="-116.3" font-family="Times,serif" font-size="14.00">[not key.startsWith(prefix)]</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingVersions.Processing->SkippingVersions.Idle -->
|
||||||
|
<g id="edge15" class="edge">
|
||||||
|
<title>SkippingVersions.Processing->SkippingVersions.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M283.88,-36.24C281.71,-50.87 276.4,-71.43 263.37,-84 258.07,-89.11 252.06,-93.48 245.62,-97.21"/>
|
||||||
|
<polygon fill="black" stroke="black" points="243.85,-94.19 236.61,-101.92 247.09,-100.39 243.85,-94.19"/>
|
||||||
|
<text text-anchor="middle" x="349.87" y="-72.8" font-family="Times,serif" font-size="14.00">[isVersionKey(key)]</text>
|
||||||
|
<text text-anchor="middle" x="349.87" y="-57.8" font-family="Times,serif" font-size="14.00">-> FILTER_SKIP</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingVersions.Processing->NotSkippingPrefixNorVersions.Processing -->
|
||||||
|
<g id="edge16" class="edge">
|
||||||
|
<title>SkippingVersions.Processing->NotSkippingPrefixNorVersions.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M382.46,-36.08C396.72,-40.7 410.82,-46.57 423.37,-54 476.67,-85.57 487.28,-102.42 518.37,-156 539.39,-192.23 514.46,-218.85 546.37,-246 561.72,-259.06 598.56,-267.25 639.23,-272.39"/>
|
||||||
|
<polygon fill="black" stroke="black" points="639.01,-275.89 649.36,-273.59 639.84,-268.93 639.01,-275.89"/>
|
||||||
|
<text text-anchor="middle" x="590.37" y="-116.3" font-family="Times,serif" font-size="14.00">[not isVersionKey(key)]</text>
|
||||||
|
</g>
|
||||||
|
<!-- WaitVersionAfterPHD.Processing->NotSkippingPrefixNorVersions.Processing -->
|
||||||
|
<g id="edge17" class="edge">
|
||||||
|
<title>WaitVersionAfterPHD.Processing->NotSkippingPrefixNorVersions.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M1536.41,-36.13C1544.73,-40.79 1552.27,-46.65 1558.37,-54 1585.64,-86.89 1597.89,-215.12 1568.37,-246 1547.29,-268.05 1167.71,-276.42 947.74,-279.43"/>
|
||||||
|
<polygon fill="black" stroke="black" points="947.67,-275.93 937.71,-279.57 947.76,-282.93 947.67,-275.93"/>
|
||||||
|
<text text-anchor="middle" x="1758.37" y="-123.8" font-family="Times,serif" font-size="14.00">[isVersionKey(key) and master(key) == PHDkey]</text>
|
||||||
|
<text text-anchor="middle" x="1758.37" y="-108.8" font-family="Times,serif" font-size="14.00">/ key <- master(key)</text>
|
||||||
|
</g>
|
||||||
|
<!-- WaitVersionAfterPHD.Processing->NotSkippingPrefixNorVersions.Processing -->
|
||||||
|
<g id="edge18" class="edge">
|
||||||
|
<title>WaitVersionAfterPHD.Processing->NotSkippingPrefixNorVersions.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M1546.51,-21.25C1677.94,-26.54 1888.29,-44.09 1937.37,-102 1947.71,-114.21 1946.85,-125.11 1937.37,-138 1841.62,-268.08 1749.48,-218.23 1590.37,-246 1471.26,-266.79 1143.92,-275.5 947.77,-278.94"/>
|
||||||
|
<polygon fill="black" stroke="black" points="947.6,-275.44 937.66,-279.11 947.72,-282.44 947.6,-275.44"/>
|
||||||
|
<text text-anchor="middle" x="2124.87" y="-116.3" font-family="Times,serif" font-size="14.00">[not isVersionKey(key) or master(key) != PHDkey]</text>
|
||||||
|
</g>
|
||||||
|
</g>
|
||||||
|
</svg>
|
After Width: | Height: | Size: 18 KiB |
|
@ -0,0 +1,35 @@
|
||||||
|
digraph {
|
||||||
|
node [shape="box",style="filled,rounded",fontsize=16,fixedsize=true,width=3];
|
||||||
|
edge [fontsize=14];
|
||||||
|
rankdir=TB;
|
||||||
|
|
||||||
|
START [shape="circle",width=0.2,label="",style="filled",fillcolor="black"]
|
||||||
|
END [shape="circle",width=0.2,label="",style="filled",fillcolor="black",peripheries=2]
|
||||||
|
|
||||||
|
node [fillcolor="lightgrey"];
|
||||||
|
"NotSkipping.Idle" [label="NotSkipping",group="NotSkipping"];
|
||||||
|
"NeverSkipping.Idle" [label="NeverSkipping",group="NeverSkipping"];
|
||||||
|
"NotSkippingPrefix.Idle" [label="NotSkippingPrefix",group="NotSkippingPrefix"];
|
||||||
|
"SkippingPrefix.Idle" [label="SkippingPrefix",group="SkippingPrefix"];
|
||||||
|
|
||||||
|
node [fillcolor="lightblue"];
|
||||||
|
"NeverSkipping.Processing" [label="NeverSkipping",group="NeverSkipping"];
|
||||||
|
"NotSkippingPrefix.Processing" [label="NotSkippingPrefix",group="NotSkippingPrefix"];
|
||||||
|
"SkippingPrefix.Processing" [label="SkippingPrefix",group="SkippingPrefix"];
|
||||||
|
|
||||||
|
START -> "NotSkipping.Idle"
|
||||||
|
"NotSkipping.Idle" -> "NeverSkipping.Idle" [label="[delimiter == undefined]"]
|
||||||
|
"NotSkipping.Idle" -> "NotSkippingPrefix.Idle" [label="[delimiter == '/']"]
|
||||||
|
|
||||||
|
"NeverSkipping.Idle" -> "NeverSkipping.Processing" [label="filter(key, value)"]
|
||||||
|
"NotSkippingPrefix.Idle" -> "NotSkippingPrefix.Processing" [label="filter(key, value)"]
|
||||||
|
"SkippingPrefix.Idle" -> "SkippingPrefix.Processing" [label="filter(key, value)"]
|
||||||
|
|
||||||
|
"NeverSkipping.Processing" -> END [label="[nKeys == maxKeys]\n-> FILTER_END"]
|
||||||
|
"NeverSkipping.Processing" -> "NeverSkipping.Idle" [label="[nKeys < maxKeys]\n/ Contents.append(key, value)\n -> FILTER_ACCEPT"]
|
||||||
|
"NotSkippingPrefix.Processing" -> END [label="[nKeys == maxKeys]\n -> FILTER_END"]
|
||||||
|
"NotSkippingPrefix.Processing" -> "SkippingPrefix.Idle" [label="[nKeys < maxKeys and hasDelimiter(key)]\n/ prefix <- prefixOf(key)\n/ CommonPrefixes.append(prefixOf(key))\n-> FILTER_ACCEPT"]
|
||||||
|
"NotSkippingPrefix.Processing" -> "NotSkippingPrefix.Idle" [label="[nKeys < maxKeys and not hasDelimiter(key)]\n/ Contents.append(key, value)\n -> FILTER_ACCEPT"]
|
||||||
|
"SkippingPrefix.Processing" -> "SkippingPrefix.Idle" [label="[key.startsWith(prefix)]\n-> FILTER_SKIP"]
|
||||||
|
"SkippingPrefix.Processing" -> "NotSkippingPrefix.Processing" [label="[not key.startsWith(prefix)]"]
|
||||||
|
}
|
|
@ -0,0 +1,166 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||||
|
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
|
||||||
|
"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||||
|
<!-- Generated by graphviz version 2.43.0 (0)
|
||||||
|
-->
|
||||||
|
<!-- Title: %3 Pages: 1 -->
|
||||||
|
<svg width="975pt" height="533pt"
|
||||||
|
viewBox="0.00 0.00 975.00 533.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
|
||||||
|
<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 529)">
|
||||||
|
<title>%3</title>
|
||||||
|
<polygon fill="white" stroke="transparent" points="-4,4 -4,-529 971,-529 971,4 -4,4"/>
|
||||||
|
<!-- START -->
|
||||||
|
<g id="node1" class="node">
|
||||||
|
<title>START</title>
|
||||||
|
<ellipse fill="black" stroke="black" cx="283" cy="-518" rx="7" ry="7"/>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkipping.Idle -->
|
||||||
|
<g id="node3" class="node">
|
||||||
|
<title>NotSkipping.Idle</title>
|
||||||
|
<path fill="lightgrey" stroke="black" d="M379,-474C379,-474 187,-474 187,-474 181,-474 175,-468 175,-462 175,-462 175,-450 175,-450 175,-444 181,-438 187,-438 187,-438 379,-438 379,-438 385,-438 391,-444 391,-450 391,-450 391,-462 391,-462 391,-468 385,-474 379,-474"/>
|
||||||
|
<text text-anchor="middle" x="283" y="-452.2" font-family="Times,serif" font-size="16.00">NotSkipping</text>
|
||||||
|
</g>
|
||||||
|
<!-- START->NotSkipping.Idle -->
|
||||||
|
<g id="edge1" class="edge">
|
||||||
|
<title>START->NotSkipping.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M283,-510.58C283,-504.23 283,-494.07 283,-484.3"/>
|
||||||
|
<polygon fill="black" stroke="black" points="286.5,-484.05 283,-474.05 279.5,-484.05 286.5,-484.05"/>
|
||||||
|
</g>
|
||||||
|
<!-- END -->
|
||||||
|
<g id="node2" class="node">
|
||||||
|
<title>END</title>
|
||||||
|
<ellipse fill="black" stroke="black" cx="196" cy="-120" rx="7" ry="7"/>
|
||||||
|
<ellipse fill="none" stroke="black" cx="196" cy="-120" rx="11" ry="11"/>
|
||||||
|
</g>
|
||||||
|
<!-- NeverSkipping.Idle -->
|
||||||
|
<g id="node4" class="node">
|
||||||
|
<title>NeverSkipping.Idle</title>
|
||||||
|
<path fill="lightgrey" stroke="black" d="M262,-387C262,-387 70,-387 70,-387 64,-387 58,-381 58,-375 58,-375 58,-363 58,-363 58,-357 64,-351 70,-351 70,-351 262,-351 262,-351 268,-351 274,-357 274,-363 274,-363 274,-375 274,-375 274,-381 268,-387 262,-387"/>
|
||||||
|
<text text-anchor="middle" x="166" y="-365.2" font-family="Times,serif" font-size="16.00">NeverSkipping</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkipping.Idle->NeverSkipping.Idle -->
|
||||||
|
<g id="edge2" class="edge">
|
||||||
|
<title>NotSkipping.Idle->NeverSkipping.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M216.5,-437.82C206.51,-433.18 196.91,-427.34 189,-420 182.25,-413.74 177.33,-405.11 173.81,-396.79"/>
|
||||||
|
<polygon fill="black" stroke="black" points="177.05,-395.47 170.3,-387.31 170.49,-397.9 177.05,-395.47"/>
|
||||||
|
<text text-anchor="middle" x="279.5" y="-408.8" font-family="Times,serif" font-size="14.00">[delimiter == undefined]</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingPrefix.Idle -->
|
||||||
|
<g id="node5" class="node">
|
||||||
|
<title>NotSkippingPrefix.Idle</title>
|
||||||
|
<path fill="lightgrey" stroke="black" d="M496,-387C496,-387 304,-387 304,-387 298,-387 292,-381 292,-375 292,-375 292,-363 292,-363 292,-357 298,-351 304,-351 304,-351 496,-351 496,-351 502,-351 508,-357 508,-363 508,-363 508,-375 508,-375 508,-381 502,-387 496,-387"/>
|
||||||
|
<text text-anchor="middle" x="400" y="-365.2" font-family="Times,serif" font-size="16.00">NotSkippingPrefix</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkipping.Idle->NotSkippingPrefix.Idle -->
|
||||||
|
<g id="edge3" class="edge">
|
||||||
|
<title>NotSkipping.Idle->NotSkippingPrefix.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M340.77,-437.93C351.2,-433.2 361.45,-427.29 370,-420 377.58,-413.53 383.76,-404.65 388.51,-396.16"/>
|
||||||
|
<polygon fill="black" stroke="black" points="391.63,-397.74 393.08,-387.24 385.4,-394.54 391.63,-397.74"/>
|
||||||
|
<text text-anchor="middle" x="442.5" y="-408.8" font-family="Times,serif" font-size="14.00">[delimiter == '/']</text>
|
||||||
|
</g>
|
||||||
|
<!-- NeverSkipping.Processing -->
|
||||||
|
<g id="node7" class="node">
|
||||||
|
<title>NeverSkipping.Processing</title>
|
||||||
|
<path fill="lightblue" stroke="black" d="M204,-270C204,-270 12,-270 12,-270 6,-270 0,-264 0,-258 0,-258 0,-246 0,-246 0,-240 6,-234 12,-234 12,-234 204,-234 204,-234 210,-234 216,-240 216,-246 216,-246 216,-258 216,-258 216,-264 210,-270 204,-270"/>
|
||||||
|
<text text-anchor="middle" x="108" y="-248.2" font-family="Times,serif" font-size="16.00">NeverSkipping</text>
|
||||||
|
</g>
|
||||||
|
<!-- NeverSkipping.Idle->NeverSkipping.Processing -->
|
||||||
|
<g id="edge4" class="edge">
|
||||||
|
<title>NeverSkipping.Idle->NeverSkipping.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M64.1,-350.93C47.33,-346.11 33.58,-340.17 28,-333 15.72,-317.21 17.05,-304.74 28,-288 30.93,-283.52 34.58,-279.6 38.69,-276.19"/>
|
||||||
|
<polygon fill="black" stroke="black" points="40.97,-278.86 47.1,-270.22 36.92,-273.16 40.97,-278.86"/>
|
||||||
|
<text text-anchor="middle" x="86" y="-306.8" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingPrefix.Processing -->
|
||||||
|
<g id="node8" class="node">
|
||||||
|
<title>NotSkippingPrefix.Processing</title>
|
||||||
|
<path fill="lightblue" stroke="black" d="M554,-270C554,-270 362,-270 362,-270 356,-270 350,-264 350,-258 350,-258 350,-246 350,-246 350,-240 356,-234 362,-234 362,-234 554,-234 554,-234 560,-234 566,-240 566,-246 566,-246 566,-258 566,-258 566,-264 560,-270 554,-270"/>
|
||||||
|
<text text-anchor="middle" x="458" y="-248.2" font-family="Times,serif" font-size="16.00">NotSkippingPrefix</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingPrefix.Idle->NotSkippingPrefix.Processing -->
|
||||||
|
<g id="edge5" class="edge">
|
||||||
|
<title>NotSkippingPrefix.Idle->NotSkippingPrefix.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M395.69,-350.84C392.38,-333.75 390.03,-307.33 401,-288 403.42,-283.74 406.58,-279.94 410.19,-276.55"/>
|
||||||
|
<polygon fill="black" stroke="black" points="412.5,-279.18 418.1,-270.18 408.11,-273.73 412.5,-279.18"/>
|
||||||
|
<text text-anchor="middle" x="459" y="-306.8" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingPrefix.Idle -->
|
||||||
|
<g id="node6" class="node">
|
||||||
|
<title>SkippingPrefix.Idle</title>
|
||||||
|
<path fill="lightgrey" stroke="black" d="M554,-138C554,-138 362,-138 362,-138 356,-138 350,-132 350,-126 350,-126 350,-114 350,-114 350,-108 356,-102 362,-102 362,-102 554,-102 554,-102 560,-102 566,-108 566,-114 566,-114 566,-126 566,-126 566,-132 560,-138 554,-138"/>
|
||||||
|
<text text-anchor="middle" x="458" y="-116.2" font-family="Times,serif" font-size="16.00">SkippingPrefix</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingPrefix.Processing -->
|
||||||
|
<g id="node9" class="node">
|
||||||
|
<title>SkippingPrefix.Processing</title>
|
||||||
|
<path fill="lightblue" stroke="black" d="M691,-36C691,-36 499,-36 499,-36 493,-36 487,-30 487,-24 487,-24 487,-12 487,-12 487,-6 493,0 499,0 499,0 691,0 691,0 697,0 703,-6 703,-12 703,-12 703,-24 703,-24 703,-30 697,-36 691,-36"/>
|
||||||
|
<text text-anchor="middle" x="595" y="-14.2" font-family="Times,serif" font-size="16.00">SkippingPrefix</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingPrefix.Idle->SkippingPrefix.Processing -->
|
||||||
|
<g id="edge6" class="edge">
|
||||||
|
<title>SkippingPrefix.Idle->SkippingPrefix.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M452.35,-101.95C448.76,-87.65 446.54,-67.45 457,-54 461.44,-48.29 471.08,-43.36 483.3,-39.15"/>
|
||||||
|
<polygon fill="black" stroke="black" points="484.61,-42.41 493.1,-36.07 482.51,-35.73 484.61,-42.41"/>
|
||||||
|
<text text-anchor="middle" x="515" y="-65.3" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
|
||||||
|
</g>
|
||||||
|
<!-- NeverSkipping.Processing->END -->
|
||||||
|
<g id="edge7" class="edge">
|
||||||
|
<title>NeverSkipping.Processing->END</title>
|
||||||
|
<path fill="none" stroke="black" d="M102.91,-233.88C97.93,-213.45 93.18,-179.15 109,-156 123.79,-134.35 154.41,-126.09 175.08,-122.94"/>
|
||||||
|
<polygon fill="black" stroke="black" points="175.62,-126.4 185.11,-121.69 174.76,-119.45 175.62,-126.4"/>
|
||||||
|
<text text-anchor="middle" x="185" y="-189.8" font-family="Times,serif" font-size="14.00">[nKeys == maxKeys]</text>
|
||||||
|
<text text-anchor="middle" x="185" y="-174.8" font-family="Times,serif" font-size="14.00">-> FILTER_END</text>
|
||||||
|
</g>
|
||||||
|
<!-- NeverSkipping.Processing->NeverSkipping.Idle -->
|
||||||
|
<g id="edge8" class="edge">
|
||||||
|
<title>NeverSkipping.Processing->NeverSkipping.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M129.49,-270.27C134.87,-275.48 140.18,-281.55 144,-288 153.56,-304.17 159.09,-324.63 162.21,-340.81"/>
|
||||||
|
<polygon fill="black" stroke="black" points="158.78,-341.49 163.94,-350.74 165.68,-340.29 158.78,-341.49"/>
|
||||||
|
<text text-anchor="middle" x="265.5" y="-321.8" font-family="Times,serif" font-size="14.00">[nKeys < maxKeys]</text>
|
||||||
|
<text text-anchor="middle" x="265.5" y="-306.8" font-family="Times,serif" font-size="14.00">/ Contents.append(key, value)</text>
|
||||||
|
<text text-anchor="middle" x="265.5" y="-291.8" font-family="Times,serif" font-size="14.00"> -> FILTER_ACCEPT</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingPrefix.Processing->END -->
|
||||||
|
<g id="edge9" class="edge">
|
||||||
|
<title>NotSkippingPrefix.Processing->END</title>
|
||||||
|
<path fill="none" stroke="black" d="M349.96,-237.93C333,-232.81 316.36,-225.74 302,-216 275.27,-197.87 285.01,-177.6 261,-156 247.64,-143.98 229.41,-134.62 215.65,-128.62"/>
|
||||||
|
<polygon fill="black" stroke="black" points="216.74,-125.28 206.16,-124.7 214.07,-131.75 216.74,-125.28"/>
|
||||||
|
<text text-anchor="middle" x="378" y="-189.8" font-family="Times,serif" font-size="14.00">[nKeys == maxKeys]</text>
|
||||||
|
<text text-anchor="middle" x="378" y="-174.8" font-family="Times,serif" font-size="14.00"> -> FILTER_END</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingPrefix.Processing->NotSkippingPrefix.Idle -->
|
||||||
|
<g id="edge11" class="edge">
|
||||||
|
<title>NotSkippingPrefix.Processing->NotSkippingPrefix.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M499.64,-270.11C506.59,-274.86 512.87,-280.76 517,-288 526.9,-305.38 528.94,-316.96 517,-333 513.56,-337.62 509.53,-341.66 505.07,-345.18"/>
|
||||||
|
<polygon fill="black" stroke="black" points="502.89,-342.43 496.63,-350.98 506.85,-348.2 502.89,-342.43"/>
|
||||||
|
<text text-anchor="middle" x="690.5" y="-321.8" font-family="Times,serif" font-size="14.00">[nKeys < maxKeys and not hasDelimiter(key)]</text>
|
||||||
|
<text text-anchor="middle" x="690.5" y="-306.8" font-family="Times,serif" font-size="14.00">/ Contents.append(key, value)</text>
|
||||||
|
<text text-anchor="middle" x="690.5" y="-291.8" font-family="Times,serif" font-size="14.00"> -> FILTER_ACCEPT</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingPrefix.Processing->SkippingPrefix.Idle -->
|
||||||
|
<g id="edge10" class="edge">
|
||||||
|
<title>NotSkippingPrefix.Processing->SkippingPrefix.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M458,-233.74C458,-211.98 458,-174.32 458,-148.56"/>
|
||||||
|
<polygon fill="black" stroke="black" points="461.5,-148.33 458,-138.33 454.5,-148.33 461.5,-148.33"/>
|
||||||
|
<text text-anchor="middle" x="609.5" y="-204.8" font-family="Times,serif" font-size="14.00">[nKeys < maxKeys and hasDelimiter(key)]</text>
|
||||||
|
<text text-anchor="middle" x="609.5" y="-189.8" font-family="Times,serif" font-size="14.00">/ prefix <- prefixOf(key)</text>
|
||||||
|
<text text-anchor="middle" x="609.5" y="-174.8" font-family="Times,serif" font-size="14.00">/ CommonPrefixes.append(prefixOf(key))</text>
|
||||||
|
<text text-anchor="middle" x="609.5" y="-159.8" font-family="Times,serif" font-size="14.00">-> FILTER_ACCEPT</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingPrefix.Processing->SkippingPrefix.Idle -->
|
||||||
|
<g id="edge12" class="edge">
|
||||||
|
<title>SkippingPrefix.Processing->SkippingPrefix.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M593.49,-36.23C591.32,-50.84 586,-71.39 573,-84 567.75,-89.09 561.77,-93.45 555.38,-97.17"/>
|
||||||
|
<polygon fill="black" stroke="black" points="553.66,-94.12 546.43,-101.87 556.91,-100.32 553.66,-94.12"/>
|
||||||
|
<text text-anchor="middle" x="672" y="-72.8" font-family="Times,serif" font-size="14.00">[key.startsWith(prefix)]</text>
|
||||||
|
<text text-anchor="middle" x="672" y="-57.8" font-family="Times,serif" font-size="14.00">-> FILTER_SKIP</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingPrefix.Processing->NotSkippingPrefix.Processing -->
|
||||||
|
<g id="edge13" class="edge">
|
||||||
|
<title>SkippingPrefix.Processing->NotSkippingPrefix.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M703.16,-31.64C728.6,-36.87 750.75,-44.11 759,-54 778.46,-77.34 776.26,-200.01 762,-216 749.37,-230.17 656.13,-239.42 576.2,-244.84"/>
|
||||||
|
<polygon fill="black" stroke="black" points="575.77,-241.36 566.03,-245.51 576.24,-248.34 575.77,-241.36"/>
|
||||||
|
<text text-anchor="middle" x="870" y="-116.3" font-family="Times,serif" font-size="14.00">[not key.startsWith(prefix)]</text>
|
||||||
|
</g>
|
||||||
|
</g>
|
||||||
|
</svg>
|
After Width: | Height: | Size: 12 KiB |
|
@ -0,0 +1,50 @@
|
||||||
|
digraph {
|
||||||
|
node [shape="box",style="filled,rounded",fontsize=16,fixedsize=true,width=3];
|
||||||
|
edge [fontsize=14];
|
||||||
|
rankdir=TB;
|
||||||
|
|
||||||
|
START [shape="circle",width=0.2,label="",style="filled",fillcolor="black"]
|
||||||
|
END [shape="circle",width=0.2,label="",style="filled",fillcolor="black",peripheries=2]
|
||||||
|
|
||||||
|
node [fillcolor="lightgrey"];
|
||||||
|
"NotSkipping.Idle" [label="NotSkipping",group="NotSkipping",width=4];
|
||||||
|
"SkippingPrefix.Idle" [label="SkippingPrefix",group="SkippingPrefix"];
|
||||||
|
"WaitForNullKey.Idle" [label="WaitForNullKey",group="WaitForNullKey"];
|
||||||
|
"SkippingVersions.Idle" [label="SkippingVersions",group="SkippingVersions"];
|
||||||
|
|
||||||
|
node [fillcolor="lightblue"];
|
||||||
|
"NotSkipping.Processing" [label="NotSkipping",group="NotSkipping",width=4];
|
||||||
|
"NotSkippingV0.Processing" [label="NotSkippingV0",group="NotSkipping",width=4];
|
||||||
|
"NotSkippingV1.Processing" [label="NotSkippingV1",group="NotSkipping",width=4];
|
||||||
|
"NotSkippingCommon.Processing" [label="NotSkippingCommon",group="NotSkipping",width=4];
|
||||||
|
"SkippingPrefix.Processing" [label="SkippingPrefix",group="SkippingPrefix"];
|
||||||
|
"WaitForNullKey.Processing" [label="WaitForNullKey",group="WaitForNullKey"];
|
||||||
|
"SkippingVersions.Processing" [label="SkippingVersions",group="SkippingVersions"];
|
||||||
|
|
||||||
|
START -> "WaitForNullKey.Idle" [label="[versionIdMarker != undefined]"]
|
||||||
|
START -> "NotSkipping.Idle" [label="[versionIdMarker == undefined]"]
|
||||||
|
|
||||||
|
"NotSkipping.Idle" -> "NotSkipping.Processing" [label="filter(key, value)"]
|
||||||
|
"SkippingPrefix.Idle" -> "SkippingPrefix.Processing" [label="filter(key, value)"]
|
||||||
|
"WaitForNullKey.Idle" -> "WaitForNullKey.Processing" [label="filter(key, value)"]
|
||||||
|
"SkippingVersions.Idle" -> "SkippingVersions.Processing" [label="filter(key, value)"]
|
||||||
|
|
||||||
|
"NotSkipping.Processing" -> "NotSkippingV0.Processing" [label="vFormat='v0'"]
|
||||||
|
"NotSkipping.Processing" -> "NotSkippingV1.Processing" [label="vFormat='v1'"]
|
||||||
|
|
||||||
|
"WaitForNullKey.Processing" -> "NotSkipping.Processing" [label="master(key) != keyMarker"]
|
||||||
|
"WaitForNullKey.Processing" -> "SkippingVersions.Processing" [label="master(key) == keyMarker"]
|
||||||
|
"NotSkippingV0.Processing" -> "SkippingPrefix.Idle" [label="[key.startsWith(<ReplayPrefix>)]\n/ prefix <- <ReplayPrefix>\n-> FILTER_SKIP"]
|
||||||
|
"NotSkippingV0.Processing" -> "NotSkipping.Idle" [label="[Version.isPHD(value)]\n-> FILTER_ACCEPT"]
|
||||||
|
"NotSkippingV0.Processing" -> "NotSkippingCommon.Processing" [label="[not key.startsWith(<ReplayPrefix>)\nand not Version.isPHD(value)]"]
|
||||||
|
"NotSkippingV1.Processing" -> "NotSkippingCommon.Processing" [label="[always]"]
|
||||||
|
"NotSkippingCommon.Processing" -> END [label="[isListableKey(key, value) and\nKeys == maxKeys]\n-> FILTER_END"]
|
||||||
|
"NotSkippingCommon.Processing" -> "SkippingPrefix.Idle" [label="[isListableKey(key, value) and\nnKeys < maxKeys and\nhasDelimiter(key)]\n/ prefix <- prefixOf(key)\n/ CommonPrefixes.append(prefixOf(key))\n-> FILTER_ACCEPT"]
|
||||||
|
"NotSkippingCommon.Processing" -> "NotSkipping.Idle" [label="[isListableKey(key, value) and\nnKeys < maxKeys and\nnot hasDelimiter(key)]\n/ Contents.append(key, versionId, value)\n-> FILTER_ACCEPT"]
|
||||||
|
|
||||||
|
"SkippingPrefix.Processing" -> "SkippingPrefix.Idle" [label="[key.startsWith(prefix)]\n-> FILTER_SKIP"]
|
||||||
|
"SkippingPrefix.Processing" -> "NotSkipping.Processing" [label="[not key.startsWith(prefix)]"]
|
||||||
|
"SkippingVersions.Processing" -> "NotSkipping.Processing" [label="master(key) !== keyMarker or \nversionId > versionIdMarker"]
|
||||||
|
"SkippingVersions.Processing" -> "SkippingVersions.Idle" [label="master(key) === keyMarker and \nversionId < versionIdMarker\n-> FILTER_SKIP"]
|
||||||
|
"SkippingVersions.Processing" -> "SkippingVersions.Idle" [label="master(key) === keyMarker and \nversionId == versionIdMarker\n-> FILTER_ACCEPT"]
|
||||||
|
}
|
|
@ -0,0 +1,265 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||||
|
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
|
||||||
|
"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||||
|
<!-- Generated by graphviz version 2.43.0 (0)
|
||||||
|
-->
|
||||||
|
<!-- Title: %3 Pages: 1 -->
|
||||||
|
<svg width="1522pt" height="922pt"
|
||||||
|
viewBox="0.00 0.00 1522.26 922.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
|
||||||
|
<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 918)">
|
||||||
|
<title>%3</title>
|
||||||
|
<polygon fill="white" stroke="transparent" points="-4,4 -4,-918 1518.26,-918 1518.26,4 -4,4"/>
|
||||||
|
<!-- START -->
|
||||||
|
<g id="node1" class="node">
|
||||||
|
<title>START</title>
|
||||||
|
<ellipse fill="black" stroke="black" cx="393.26" cy="-907" rx="7" ry="7"/>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkipping.Idle -->
|
||||||
|
<g id="node3" class="node">
|
||||||
|
<title>NotSkipping.Idle</title>
|
||||||
|
<path fill="lightgrey" stroke="black" d="M436.26,-675C436.26,-675 172.26,-675 172.26,-675 166.26,-675 160.26,-669 160.26,-663 160.26,-663 160.26,-651 160.26,-651 160.26,-645 166.26,-639 172.26,-639 172.26,-639 436.26,-639 436.26,-639 442.26,-639 448.26,-645 448.26,-651 448.26,-651 448.26,-663 448.26,-663 448.26,-669 442.26,-675 436.26,-675"/>
|
||||||
|
<text text-anchor="middle" x="304.26" y="-653.2" font-family="Times,serif" font-size="16.00">NotSkipping</text>
|
||||||
|
</g>
|
||||||
|
<!-- START->NotSkipping.Idle -->
|
||||||
|
<g id="edge2" class="edge">
|
||||||
|
<title>START->NotSkipping.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M391.06,-899.87C380.45,-870.31 334.26,-741.58 313.93,-684.93"/>
|
||||||
|
<polygon fill="black" stroke="black" points="317.12,-683.46 310.45,-675.23 310.53,-685.82 317.12,-683.46"/>
|
||||||
|
<text text-anchor="middle" x="470.76" y="-783.8" font-family="Times,serif" font-size="14.00">[versionIdMarker == undefined]</text>
|
||||||
|
</g>
|
||||||
|
<!-- WaitForNullKey.Idle -->
|
||||||
|
<g id="node5" class="node">
|
||||||
|
<title>WaitForNullKey.Idle</title>
|
||||||
|
<path fill="lightgrey" stroke="black" d="M692.26,-849C692.26,-849 500.26,-849 500.26,-849 494.26,-849 488.26,-843 488.26,-837 488.26,-837 488.26,-825 488.26,-825 488.26,-819 494.26,-813 500.26,-813 500.26,-813 692.26,-813 692.26,-813 698.26,-813 704.26,-819 704.26,-825 704.26,-825 704.26,-837 704.26,-837 704.26,-843 698.26,-849 692.26,-849"/>
|
||||||
|
<text text-anchor="middle" x="596.26" y="-827.2" font-family="Times,serif" font-size="16.00">WaitForNullKey</text>
|
||||||
|
</g>
|
||||||
|
<!-- START->WaitForNullKey.Idle -->
|
||||||
|
<g id="edge1" class="edge">
|
||||||
|
<title>START->WaitForNullKey.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M399.56,-903.7C420.56,-896.05 489.7,-870.85 540.08,-852.48"/>
|
||||||
|
<polygon fill="black" stroke="black" points="541.38,-855.73 549.57,-849.02 538.98,-849.16 541.38,-855.73"/>
|
||||||
|
<text text-anchor="middle" x="608.76" y="-870.8" font-family="Times,serif" font-size="14.00">[versionIdMarker != undefined]</text>
|
||||||
|
</g>
|
||||||
|
<!-- END -->
|
||||||
|
<g id="node2" class="node">
|
||||||
|
<title>END</title>
|
||||||
|
<ellipse fill="black" stroke="black" cx="45.26" cy="-120" rx="7" ry="7"/>
|
||||||
|
<ellipse fill="none" stroke="black" cx="45.26" cy="-120" rx="11" ry="11"/>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkipping.Processing -->
|
||||||
|
<g id="node7" class="node">
|
||||||
|
<title>NotSkipping.Processing</title>
|
||||||
|
<path fill="lightblue" stroke="black" d="M761.26,-558C761.26,-558 497.26,-558 497.26,-558 491.26,-558 485.26,-552 485.26,-546 485.26,-546 485.26,-534 485.26,-534 485.26,-528 491.26,-522 497.26,-522 497.26,-522 761.26,-522 761.26,-522 767.26,-522 773.26,-528 773.26,-534 773.26,-534 773.26,-546 773.26,-546 773.26,-552 767.26,-558 761.26,-558"/>
|
||||||
|
<text text-anchor="middle" x="629.26" y="-536.2" font-family="Times,serif" font-size="16.00">NotSkipping</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkipping.Idle->NotSkipping.Processing -->
|
||||||
|
<g id="edge3" class="edge">
|
||||||
|
<title>NotSkipping.Idle->NotSkipping.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M333.17,-638.98C364.86,-620.99 417.68,-592.92 466.26,-576 483.64,-569.95 502.44,-564.74 520.88,-560.34"/>
|
||||||
|
<polygon fill="black" stroke="black" points="521.83,-563.71 530.78,-558.04 520.25,-556.89 521.83,-563.71"/>
|
||||||
|
<text text-anchor="middle" x="524.26" y="-594.8" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingPrefix.Idle -->
|
||||||
|
<g id="node4" class="node">
|
||||||
|
<title>SkippingPrefix.Idle</title>
|
||||||
|
<path fill="lightgrey" stroke="black" d="M662.26,-138C662.26,-138 470.26,-138 470.26,-138 464.26,-138 458.26,-132 458.26,-126 458.26,-126 458.26,-114 458.26,-114 458.26,-108 464.26,-102 470.26,-102 470.26,-102 662.26,-102 662.26,-102 668.26,-102 674.26,-108 674.26,-114 674.26,-114 674.26,-126 674.26,-126 674.26,-132 668.26,-138 662.26,-138"/>
|
||||||
|
<text text-anchor="middle" x="566.26" y="-116.2" font-family="Times,serif" font-size="16.00">SkippingPrefix</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingPrefix.Processing -->
|
||||||
|
<g id="node11" class="node">
|
||||||
|
<title>SkippingPrefix.Processing</title>
|
||||||
|
<path fill="lightblue" stroke="black" d="M779.26,-36C779.26,-36 587.26,-36 587.26,-36 581.26,-36 575.26,-30 575.26,-24 575.26,-24 575.26,-12 575.26,-12 575.26,-6 581.26,0 587.26,0 587.26,0 779.26,0 779.26,0 785.26,0 791.26,-6 791.26,-12 791.26,-12 791.26,-24 791.26,-24 791.26,-30 785.26,-36 779.26,-36"/>
|
||||||
|
<text text-anchor="middle" x="683.26" y="-14.2" font-family="Times,serif" font-size="16.00">SkippingPrefix</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingPrefix.Idle->SkippingPrefix.Processing -->
|
||||||
|
<g id="edge4" class="edge">
|
||||||
|
<title>SkippingPrefix.Idle->SkippingPrefix.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M552.64,-101.74C543.31,-87.68 534.41,-67.95 545.26,-54 549.71,-48.29 559.34,-43.36 571.56,-39.15"/>
|
||||||
|
<polygon fill="black" stroke="black" points="572.87,-42.41 581.36,-36.07 570.77,-35.73 572.87,-42.41"/>
|
||||||
|
<text text-anchor="middle" x="603.26" y="-65.3" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
|
||||||
|
</g>
|
||||||
|
<!-- WaitForNullKey.Processing -->
|
||||||
|
<g id="node12" class="node">
|
||||||
|
<title>WaitForNullKey.Processing</title>
|
||||||
|
<path fill="lightblue" stroke="black" d="M692.26,-762C692.26,-762 500.26,-762 500.26,-762 494.26,-762 488.26,-756 488.26,-750 488.26,-750 488.26,-738 488.26,-738 488.26,-732 494.26,-726 500.26,-726 500.26,-726 692.26,-726 692.26,-726 698.26,-726 704.26,-732 704.26,-738 704.26,-738 704.26,-750 704.26,-750 704.26,-756 698.26,-762 692.26,-762"/>
|
||||||
|
<text text-anchor="middle" x="596.26" y="-740.2" font-family="Times,serif" font-size="16.00">WaitForNullKey</text>
|
||||||
|
</g>
|
||||||
|
<!-- WaitForNullKey.Idle->WaitForNullKey.Processing -->
|
||||||
|
<g id="edge5" class="edge">
|
||||||
|
<title>WaitForNullKey.Idle->WaitForNullKey.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M596.26,-812.8C596.26,-801.16 596.26,-785.55 596.26,-772.24"/>
|
||||||
|
<polygon fill="black" stroke="black" points="599.76,-772.18 596.26,-762.18 592.76,-772.18 599.76,-772.18"/>
|
||||||
|
<text text-anchor="middle" x="654.26" y="-783.8" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingVersions.Idle -->
|
||||||
|
<g id="node6" class="node">
|
||||||
|
<title>SkippingVersions.Idle</title>
|
||||||
|
<path fill="lightgrey" stroke="black" d="M1241.26,-558C1241.26,-558 1049.26,-558 1049.26,-558 1043.26,-558 1037.26,-552 1037.26,-546 1037.26,-546 1037.26,-534 1037.26,-534 1037.26,-528 1043.26,-522 1049.26,-522 1049.26,-522 1241.26,-522 1241.26,-522 1247.26,-522 1253.26,-528 1253.26,-534 1253.26,-534 1253.26,-546 1253.26,-546 1253.26,-552 1247.26,-558 1241.26,-558"/>
|
||||||
|
<text text-anchor="middle" x="1145.26" y="-536.2" font-family="Times,serif" font-size="16.00">SkippingVersions</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingVersions.Processing -->
|
||||||
|
<g id="node13" class="node">
|
||||||
|
<title>SkippingVersions.Processing</title>
|
||||||
|
<path fill="lightblue" stroke="black" d="M1241.26,-675C1241.26,-675 1049.26,-675 1049.26,-675 1043.26,-675 1037.26,-669 1037.26,-663 1037.26,-663 1037.26,-651 1037.26,-651 1037.26,-645 1043.26,-639 1049.26,-639 1049.26,-639 1241.26,-639 1241.26,-639 1247.26,-639 1253.26,-645 1253.26,-651 1253.26,-651 1253.26,-663 1253.26,-663 1253.26,-669 1247.26,-675 1241.26,-675"/>
|
||||||
|
<text text-anchor="middle" x="1145.26" y="-653.2" font-family="Times,serif" font-size="16.00">SkippingVersions</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingVersions.Idle->SkippingVersions.Processing -->
|
||||||
|
<g id="edge6" class="edge">
|
||||||
|
<title>SkippingVersions.Idle->SkippingVersions.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M1145.26,-558.25C1145.26,-576.77 1145.26,-606.45 1145.26,-628.25"/>
|
||||||
|
<polygon fill="black" stroke="black" points="1141.76,-628.53 1145.26,-638.53 1148.76,-628.53 1141.76,-628.53"/>
|
||||||
|
<text text-anchor="middle" x="1203.26" y="-594.8" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingV0.Processing -->
|
||||||
|
<g id="node8" class="node">
|
||||||
|
<title>NotSkippingV0.Processing</title>
|
||||||
|
<path fill="lightblue" stroke="black" d="M436.26,-411C436.26,-411 172.26,-411 172.26,-411 166.26,-411 160.26,-405 160.26,-399 160.26,-399 160.26,-387 160.26,-387 160.26,-381 166.26,-375 172.26,-375 172.26,-375 436.26,-375 436.26,-375 442.26,-375 448.26,-381 448.26,-387 448.26,-387 448.26,-399 448.26,-399 448.26,-405 442.26,-411 436.26,-411"/>
|
||||||
|
<text text-anchor="middle" x="304.26" y="-389.2" font-family="Times,serif" font-size="16.00">NotSkippingV0</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkipping.Processing->NotSkippingV0.Processing -->
|
||||||
|
<g id="edge7" class="edge">
|
||||||
|
<title>NotSkipping.Processing->NotSkippingV0.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M573.96,-521.95C558.07,-516.64 540.84,-510.46 525.26,-504 460.22,-477.02 387.62,-439.36 343.97,-415.84"/>
|
||||||
|
<polygon fill="black" stroke="black" points="345.57,-412.72 335.11,-411.04 342.24,-418.88 345.57,-412.72"/>
|
||||||
|
<text text-anchor="middle" x="573.76" y="-462.8" font-family="Times,serif" font-size="14.00">vFormat='v0'</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingV1.Processing -->
|
||||||
|
<g id="node9" class="node">
|
||||||
|
<title>NotSkippingV1.Processing</title>
|
||||||
|
<path fill="lightblue" stroke="black" d="M758.26,-411C758.26,-411 494.26,-411 494.26,-411 488.26,-411 482.26,-405 482.26,-399 482.26,-399 482.26,-387 482.26,-387 482.26,-381 488.26,-375 494.26,-375 494.26,-375 758.26,-375 758.26,-375 764.26,-375 770.26,-381 770.26,-387 770.26,-387 770.26,-399 770.26,-399 770.26,-405 764.26,-411 758.26,-411"/>
|
||||||
|
<text text-anchor="middle" x="626.26" y="-389.2" font-family="Times,serif" font-size="16.00">NotSkippingV1</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkipping.Processing->NotSkippingV1.Processing -->
|
||||||
|
<g id="edge8" class="edge">
|
||||||
|
<title>NotSkipping.Processing->NotSkippingV1.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M628.91,-521.8C628.39,-496.94 627.44,-450.74 626.83,-421.23"/>
|
||||||
|
<polygon fill="black" stroke="black" points="630.32,-421.11 626.62,-411.18 623.33,-421.25 630.32,-421.11"/>
|
||||||
|
<text text-anchor="middle" x="676.76" y="-462.8" font-family="Times,serif" font-size="14.00">vFormat='v1'</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingV0.Processing->NotSkipping.Idle -->
|
||||||
|
<g id="edge12" class="edge">
|
||||||
|
<title>NotSkippingV0.Processing->NotSkipping.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M304.26,-411.25C304.26,-455.74 304.26,-574.61 304.26,-628.62"/>
|
||||||
|
<polygon fill="black" stroke="black" points="300.76,-628.81 304.26,-638.81 307.76,-628.81 300.76,-628.81"/>
|
||||||
|
<text text-anchor="middle" x="385.76" y="-543.8" font-family="Times,serif" font-size="14.00">[Version.isPHD(value)]</text>
|
||||||
|
<text text-anchor="middle" x="385.76" y="-528.8" font-family="Times,serif" font-size="14.00">-> FILTER_ACCEPT</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingV0.Processing->SkippingPrefix.Idle -->
|
||||||
|
<g id="edge11" class="edge">
|
||||||
|
<title>NotSkippingV0.Processing->SkippingPrefix.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M448.41,-376.93C508.52,-369.95 565.63,-362.09 570.26,-357 622.9,-299.12 594.8,-196.31 577.11,-147.78"/>
|
||||||
|
<polygon fill="black" stroke="black" points="580.33,-146.4 573.53,-138.28 573.78,-148.87 580.33,-146.4"/>
|
||||||
|
<text text-anchor="middle" x="720.26" y="-297.8" font-family="Times,serif" font-size="14.00">[key.startsWith(<ReplayPrefix>)]</text>
|
||||||
|
<text text-anchor="middle" x="720.26" y="-282.8" font-family="Times,serif" font-size="14.00">/ prefix <- <ReplayPrefix></text>
|
||||||
|
<text text-anchor="middle" x="720.26" y="-267.8" font-family="Times,serif" font-size="14.00">-> FILTER_SKIP</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingCommon.Processing -->
|
||||||
|
<g id="node10" class="node">
|
||||||
|
<title>NotSkippingCommon.Processing</title>
|
||||||
|
<path fill="lightblue" stroke="black" d="M436.26,-304.5C436.26,-304.5 172.26,-304.5 172.26,-304.5 166.26,-304.5 160.26,-298.5 160.26,-292.5 160.26,-292.5 160.26,-280.5 160.26,-280.5 160.26,-274.5 166.26,-268.5 172.26,-268.5 172.26,-268.5 436.26,-268.5 436.26,-268.5 442.26,-268.5 448.26,-274.5 448.26,-280.5 448.26,-280.5 448.26,-292.5 448.26,-292.5 448.26,-298.5 442.26,-304.5 436.26,-304.5"/>
|
||||||
|
<text text-anchor="middle" x="304.26" y="-282.7" font-family="Times,serif" font-size="16.00">NotSkippingCommon</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingV0.Processing->NotSkippingCommon.Processing -->
|
||||||
|
<g id="edge13" class="edge">
|
||||||
|
<title>NotSkippingV0.Processing->NotSkippingCommon.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M304.26,-374.74C304.26,-358.48 304.26,-333.85 304.26,-314.9"/>
|
||||||
|
<polygon fill="black" stroke="black" points="307.76,-314.78 304.26,-304.78 300.76,-314.78 307.76,-314.78"/>
|
||||||
|
<text text-anchor="middle" x="435.26" y="-345.8" font-family="Times,serif" font-size="14.00">[not key.startsWith(<ReplayPrefix>)</text>
|
||||||
|
<text text-anchor="middle" x="435.26" y="-330.8" font-family="Times,serif" font-size="14.00">and not Version.isPHD(value)]</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingV1.Processing->NotSkippingCommon.Processing -->
|
||||||
|
<g id="edge14" class="edge">
|
||||||
|
<title>NotSkippingV1.Processing->NotSkippingCommon.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M616.43,-374.83C606.75,-359.62 590.48,-338.14 570.26,-327 549.98,-315.83 505.48,-307.38 458.57,-301.23"/>
|
||||||
|
<polygon fill="black" stroke="black" points="458.9,-297.74 448.53,-299.95 458.01,-304.69 458.9,-297.74"/>
|
||||||
|
<text text-anchor="middle" x="632.26" y="-338.3" font-family="Times,serif" font-size="14.00">[always]</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingCommon.Processing->END -->
|
||||||
|
<g id="edge15" class="edge">
|
||||||
|
<title>NotSkippingCommon.Processing->END</title>
|
||||||
|
<path fill="none" stroke="black" d="M159.92,-279.56C109.8,-274.24 62.13,-264.33 46.26,-246 20.92,-216.72 30.42,-167.54 38.5,-140.42"/>
|
||||||
|
<polygon fill="black" stroke="black" points="41.94,-141.16 41.67,-130.57 35.27,-139.02 41.94,-141.16"/>
|
||||||
|
<text text-anchor="middle" x="152.76" y="-212.3" font-family="Times,serif" font-size="14.00">[isListableKey(key, value) and</text>
|
||||||
|
<text text-anchor="middle" x="152.76" y="-197.3" font-family="Times,serif" font-size="14.00">Keys == maxKeys]</text>
|
||||||
|
<text text-anchor="middle" x="152.76" y="-182.3" font-family="Times,serif" font-size="14.00">-> FILTER_END</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingCommon.Processing->NotSkipping.Idle -->
|
||||||
|
<g id="edge17" class="edge">
|
||||||
|
<title>NotSkippingCommon.Processing->NotSkipping.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M214.74,-304.54C146.51,-322.73 57.06,-358.99 13.26,-429 -49.27,-528.95 128.43,-602.49 233.32,-635.95"/>
|
||||||
|
<polygon fill="black" stroke="black" points="232.34,-639.31 242.93,-638.97 234.43,-632.63 232.34,-639.31"/>
|
||||||
|
<text text-anchor="middle" x="156.76" y="-492.8" font-family="Times,serif" font-size="14.00">[isListableKey(key, value) and</text>
|
||||||
|
<text text-anchor="middle" x="156.76" y="-477.8" font-family="Times,serif" font-size="14.00">nKeys < maxKeys and</text>
|
||||||
|
<text text-anchor="middle" x="156.76" y="-462.8" font-family="Times,serif" font-size="14.00">not hasDelimiter(key)]</text>
|
||||||
|
<text text-anchor="middle" x="156.76" y="-447.8" font-family="Times,serif" font-size="14.00">/ Contents.append(key, versionId, value)</text>
|
||||||
|
<text text-anchor="middle" x="156.76" y="-432.8" font-family="Times,serif" font-size="14.00">-> FILTER_ACCEPT</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingCommon.Processing->SkippingPrefix.Idle -->
|
||||||
|
<g id="edge16" class="edge">
|
||||||
|
<title>NotSkippingCommon.Processing->SkippingPrefix.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M292.14,-268.23C288.18,-261.59 284.27,-253.75 282.26,-246 272.21,-207.28 255.76,-185.96 282.26,-156 293.6,-143.18 374.98,-134.02 447.74,-128.3"/>
|
||||||
|
<polygon fill="black" stroke="black" points="448.24,-131.77 457.94,-127.51 447.7,-124.79 448.24,-131.77"/>
|
||||||
|
<text text-anchor="middle" x="428.26" y="-234.8" font-family="Times,serif" font-size="14.00">[isListableKey(key, value) and</text>
|
||||||
|
<text text-anchor="middle" x="428.26" y="-219.8" font-family="Times,serif" font-size="14.00">nKeys < maxKeys and</text>
|
||||||
|
<text text-anchor="middle" x="428.26" y="-204.8" font-family="Times,serif" font-size="14.00">hasDelimiter(key)]</text>
|
||||||
|
<text text-anchor="middle" x="428.26" y="-189.8" font-family="Times,serif" font-size="14.00">/ prefix <- prefixOf(key)</text>
|
||||||
|
<text text-anchor="middle" x="428.26" y="-174.8" font-family="Times,serif" font-size="14.00">/ CommonPrefixes.append(prefixOf(key))</text>
|
||||||
|
<text text-anchor="middle" x="428.26" y="-159.8" font-family="Times,serif" font-size="14.00">-> FILTER_ACCEPT</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingPrefix.Processing->SkippingPrefix.Idle -->
|
||||||
|
<g id="edge18" class="edge">
|
||||||
|
<title>SkippingPrefix.Processing->SkippingPrefix.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M681.57,-36.04C679.28,-50.54 673.9,-71.03 661.26,-84 656.4,-88.99 650.77,-93.28 644.72,-96.95"/>
|
||||||
|
<polygon fill="black" stroke="black" points="642.71,-94.06 635.6,-101.92 646.05,-100.21 642.71,-94.06"/>
|
||||||
|
<text text-anchor="middle" x="759.26" y="-72.8" font-family="Times,serif" font-size="14.00">[key.startsWith(prefix)]</text>
|
||||||
|
<text text-anchor="middle" x="759.26" y="-57.8" font-family="Times,serif" font-size="14.00">-> FILTER_SKIP</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingPrefix.Processing->NotSkipping.Processing -->
|
||||||
|
<g id="edge19" class="edge">
|
||||||
|
<title>SkippingPrefix.Processing->NotSkipping.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M791.46,-33.51C815.84,-38.71 837.21,-45.46 846.26,-54 868.07,-74.57 864.26,-89.02 864.26,-119 864.26,-394 864.26,-394 864.26,-394 864.26,-462.4 791.27,-499.6 726.64,-519.12"/>
|
||||||
|
<polygon fill="black" stroke="black" points="725.39,-515.84 716.77,-521.99 727.35,-522.56 725.39,-515.84"/>
|
||||||
|
<text text-anchor="middle" x="961.26" y="-282.8" font-family="Times,serif" font-size="14.00">[not key.startsWith(prefix)]</text>
|
||||||
|
</g>
|
||||||
|
<!-- WaitForNullKey.Processing->NotSkipping.Processing -->
|
||||||
|
<g id="edge9" class="edge">
|
||||||
|
<title>WaitForNullKey.Processing->NotSkipping.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M599.08,-725.78C604.81,-690.67 617.89,-610.59 624.8,-568.31"/>
|
||||||
|
<polygon fill="black" stroke="black" points="628.3,-568.61 626.46,-558.18 621.39,-567.48 628.3,-568.61"/>
|
||||||
|
<text text-anchor="middle" x="707.26" y="-653.3" font-family="Times,serif" font-size="14.00">master(key) != keyMarker</text>
|
||||||
|
</g>
|
||||||
|
<!-- WaitForNullKey.Processing->SkippingVersions.Processing -->
|
||||||
|
<g id="edge10" class="edge">
|
||||||
|
<title>WaitForNullKey.Processing->SkippingVersions.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M704.4,-726.26C797.32,-711.87 931.09,-691.16 1026.87,-676.33"/>
|
||||||
|
<polygon fill="black" stroke="black" points="1027.55,-679.77 1036.89,-674.78 1026.47,-672.85 1027.55,-679.77"/>
|
||||||
|
<text text-anchor="middle" x="1001.26" y="-696.8" font-family="Times,serif" font-size="14.00">master(key) == keyMarker</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingVersions.Processing->SkippingVersions.Idle -->
|
||||||
|
<g id="edge21" class="edge">
|
||||||
|
<title>SkippingVersions.Processing->SkippingVersions.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M1241.89,-638.98C1249.74,-634.29 1256.75,-628.4 1262.26,-621 1274.21,-604.96 1274.21,-592.04 1262.26,-576 1258.82,-571.38 1254.79,-567.34 1250.33,-563.82"/>
|
||||||
|
<polygon fill="black" stroke="black" points="1252.11,-560.8 1241.89,-558.02 1248.15,-566.57 1252.11,-560.8"/>
|
||||||
|
<text text-anchor="middle" x="1392.26" y="-609.8" font-family="Times,serif" font-size="14.00">master(key) === keyMarker and </text>
|
||||||
|
<text text-anchor="middle" x="1392.26" y="-594.8" font-family="Times,serif" font-size="14.00">versionId < versionIdMarker</text>
|
||||||
|
<text text-anchor="middle" x="1392.26" y="-579.8" font-family="Times,serif" font-size="14.00">-> FILTER_SKIP</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingVersions.Processing->SkippingVersions.Idle -->
|
||||||
|
<g id="edge22" class="edge">
|
||||||
|
<title>SkippingVersions.Processing->SkippingVersions.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M1036.97,-654.38C978.97,-650.96 915.73,-642.25 897.26,-621 884.15,-605.9 884.15,-591.1 897.26,-576 914.65,-555.99 971.71,-547.1 1026.73,-543.28"/>
|
||||||
|
<polygon fill="black" stroke="black" points="1027.21,-546.76 1036.97,-542.62 1026.76,-539.77 1027.21,-546.76"/>
|
||||||
|
<text text-anchor="middle" x="1019.26" y="-609.8" font-family="Times,serif" font-size="14.00">master(key) === keyMarker and </text>
|
||||||
|
<text text-anchor="middle" x="1019.26" y="-594.8" font-family="Times,serif" font-size="14.00">versionId == versionIdMarker</text>
|
||||||
|
<text text-anchor="middle" x="1019.26" y="-579.8" font-family="Times,serif" font-size="14.00">-> FILTER_ACCEPT</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingVersions.Processing->NotSkipping.Processing -->
|
||||||
|
<g id="edge20" class="edge">
|
||||||
|
<title>SkippingVersions.Processing->NotSkipping.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M1037.02,-651.24C897.84,-644.67 672.13,-632.37 657.26,-621 641.04,-608.6 634.18,-586.13 631.3,-568.16"/>
|
||||||
|
<polygon fill="black" stroke="black" points="634.76,-567.68 630.02,-558.21 627.82,-568.57 634.76,-567.68"/>
|
||||||
|
<text text-anchor="middle" x="770.26" y="-602.3" font-family="Times,serif" font-size="14.00">master(key) !== keyMarker or </text>
|
||||||
|
<text text-anchor="middle" x="770.26" y="-587.3" font-family="Times,serif" font-size="14.00">versionId > versionIdMarker</text>
|
||||||
|
</g>
|
||||||
|
</g>
|
||||||
|
</svg>
|
After Width: | Height: | Size: 21 KiB |
|
@ -1,715 +0,0 @@
|
||||||
{
|
|
||||||
"_comment": "------------------- Amazon errors ------------------",
|
|
||||||
"AccessDenied": {
|
|
||||||
"code": 403,
|
|
||||||
"description": "Access Denied"
|
|
||||||
},
|
|
||||||
"AccessForbidden": {
|
|
||||||
"code": 403,
|
|
||||||
"description": "Access Forbidden"
|
|
||||||
},
|
|
||||||
"AccountProblem": {
|
|
||||||
"code": 403,
|
|
||||||
"description": "There is a problem with your AWS account that prevents the operation from completing successfully. Please use Contact Us."
|
|
||||||
},
|
|
||||||
"AmbiguousGrantByEmailAddress": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "The email address you provided is associated with more than one account."
|
|
||||||
},
|
|
||||||
"BadDigest": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "The Content-MD5 you specified did not match what we received."
|
|
||||||
},
|
|
||||||
"BucketAlreadyExists": {
|
|
||||||
"code": 409,
|
|
||||||
"description": "The requested bucket name is not available. The bucket namespace is shared by all users of the system. Please select a different name and try again."
|
|
||||||
},
|
|
||||||
"BucketAlreadyOwnedByYou": {
|
|
||||||
"code": 409,
|
|
||||||
"description": "Your previous request to create the named bucket succeeded and you already own it. You get this error in all AWS regions except US Standard, us-east-1. In us-east-1 region, you will get 200 OK, but it is no-op (if bucket exists S3 will not do anything)."
|
|
||||||
},
|
|
||||||
"BucketNotEmpty": {
|
|
||||||
"code": 409,
|
|
||||||
"description": "The bucket you tried to delete is not empty."
|
|
||||||
},
|
|
||||||
"CredentialsNotSupported": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "This request does not support credentials."
|
|
||||||
},
|
|
||||||
"CrossLocationLoggingProhibited": {
|
|
||||||
"code": 403,
|
|
||||||
"description": "Cross-location logging not allowed. Buckets in one geographic location cannot log information to a bucket in another location."
|
|
||||||
},
|
|
||||||
"DeleteConflict": {
|
|
||||||
"code": 409,
|
|
||||||
"description": "The request was rejected because it attempted to delete a resource that has attached subordinate entities. The error message describes these entities."
|
|
||||||
},
|
|
||||||
"EntityTooSmall": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "Your proposed upload is smaller than the minimum allowed object size."
|
|
||||||
},
|
|
||||||
"EntityTooLarge": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "Your proposed upload exceeds the maximum allowed object size."
|
|
||||||
},
|
|
||||||
"ExpiredToken": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "The provided token has expired."
|
|
||||||
},
|
|
||||||
"IllegalVersioningConfigurationException": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "Indicates that the versioning configuration specified in the request is invalid."
|
|
||||||
},
|
|
||||||
"IncompleteBody": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "You did not provide the number of bytes specified by the Content-Length HTTP header."
|
|
||||||
},
|
|
||||||
"IncorrectNumberOfFilesInPostRequest": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "POST requires exactly one file upload per request."
|
|
||||||
},
|
|
||||||
"InlineDataTooLarge": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "Inline data exceeds the maximum allowed size."
|
|
||||||
},
|
|
||||||
"InternalError": {
|
|
||||||
"code": 500,
|
|
||||||
"description": "We encountered an internal error. Please try again."
|
|
||||||
},
|
|
||||||
"InvalidAccessKeyId": {
|
|
||||||
"code": 403,
|
|
||||||
"description": "The AWS access key Id you provided does not exist in our records."
|
|
||||||
},
|
|
||||||
"InvalidAddressingHeader": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "You must specify the Anonymous role."
|
|
||||||
},
|
|
||||||
"InvalidArgument": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "Invalid Argument"
|
|
||||||
},
|
|
||||||
"InvalidBucketName": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "The specified bucket is not valid."
|
|
||||||
},
|
|
||||||
"InvalidBucketState": {
|
|
||||||
"code": 409,
|
|
||||||
"description": "The request is not valid with the current state of the bucket."
|
|
||||||
},
|
|
||||||
"InvalidDigest": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "The Content-MD5 you specified is not valid."
|
|
||||||
},
|
|
||||||
"InvalidEncryptionAlgorithmError": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "The encryption request you specified is not valid. The valid value is AES256."
|
|
||||||
},
|
|
||||||
"InvalidLocationConstraint": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "The specified location constraint is not valid."
|
|
||||||
},
|
|
||||||
"InvalidObjectState": {
|
|
||||||
"code": 403,
|
|
||||||
"description": "The operation is not valid for the current state of the object."
|
|
||||||
},
|
|
||||||
"InvalidPart": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "One or more of the specified parts could not be found. The part might not have been uploaded, or the specified entity tag might not have matched the part's entity tag."
|
|
||||||
},
|
|
||||||
"InvalidPartOrder": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "The list of parts was not in ascending order.Parts list must specified in order by part number."
|
|
||||||
},
|
|
||||||
"InvalidPartNumber": {
|
|
||||||
"code": 416,
|
|
||||||
"description": "The requested partnumber is not satisfiable."
|
|
||||||
},
|
|
||||||
"InvalidPayer": {
|
|
||||||
"code": 403,
|
|
||||||
"description": "All access to this object has been disabled."
|
|
||||||
},
|
|
||||||
"InvalidPolicyDocument": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "The content of the form does not meet the conditions specified in the policy document."
|
|
||||||
},
|
|
||||||
"InvalidRange": {
|
|
||||||
"code": 416,
|
|
||||||
"description": "The requested range cannot be satisfied."
|
|
||||||
},
|
|
||||||
"InvalidRedirectLocation": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "The website redirect location must have a prefix of 'http://' or 'https://' or '/'."
|
|
||||||
},
|
|
||||||
"InvalidRequest": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "SOAP requests must be made over an HTTPS connection."
|
|
||||||
},
|
|
||||||
"InvalidSecurity": {
|
|
||||||
"code": 403,
|
|
||||||
"description": "The provided security credentials are not valid."
|
|
||||||
},
|
|
||||||
"InvalidSOAPRequest": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "The SOAP request body is invalid."
|
|
||||||
},
|
|
||||||
"InvalidStorageClass": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "The storage class you specified is not valid."
|
|
||||||
},
|
|
||||||
"InvalidTag": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "The Tag you have provided is invalid"
|
|
||||||
},
|
|
||||||
"InvalidTargetBucketForLogging": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "The target bucket for logging does not exist, is not owned by you, or does not have the appropriate grants for the log-delivery group."
|
|
||||||
},
|
|
||||||
"InvalidToken": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "The provided token is malformed or otherwise invalid."
|
|
||||||
},
|
|
||||||
"InvalidURI": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "Couldn't parse the specified URI."
|
|
||||||
},
|
|
||||||
"KeyTooLong": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "Your key is too long."
|
|
||||||
},
|
|
||||||
"LimitExceeded": {
|
|
||||||
"code": 409,
|
|
||||||
"description": " The request was rejected because it attempted to create resources beyond the current AWS account limits. The error message describes the limit exceeded."
|
|
||||||
},
|
|
||||||
"MalformedACLError": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "The XML you provided was not well-formed or did not validate against our published schema."
|
|
||||||
},
|
|
||||||
"MalformedPOSTRequest": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "The body of your POST request is not well-formed multipart/form-data."
|
|
||||||
},
|
|
||||||
"MalformedXML": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "The XML you provided was not well-formed or did not validate against our published schema."
|
|
||||||
},
|
|
||||||
"MaxMessageLengthExceeded": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "Your request was too big."
|
|
||||||
},
|
|
||||||
"MaxPostPreDataLengthExceededError": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "Your POST request fields preceding the upload file were too large."
|
|
||||||
},
|
|
||||||
"MetadataTooLarge": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "Your metadata headers exceed the maximum allowed metadata size."
|
|
||||||
},
|
|
||||||
"MethodNotAllowed": {
|
|
||||||
"code": 405,
|
|
||||||
"description": "The specified method is not allowed against this resource."
|
|
||||||
},
|
|
||||||
"MissingAttachment": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "A SOAP attachment was expected, but none were found."
|
|
||||||
},
|
|
||||||
"MissingContentLength": {
|
|
||||||
"code": 411,
|
|
||||||
"description": "You must provide the Content-Length HTTP header."
|
|
||||||
},
|
|
||||||
"MissingRequestBodyError": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "Request body is empty"
|
|
||||||
},
|
|
||||||
"MissingSecurityElement": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "The SOAP 1.1 request is missing a security element."
|
|
||||||
},
|
|
||||||
"MissingSecurityHeader": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "Your request is missing a required header."
|
|
||||||
},
|
|
||||||
"NoLoggingStatusForKey": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "There is no such thing as a logging status subresource for a key."
|
|
||||||
},
|
|
||||||
"NoSuchBucket": {
|
|
||||||
"code": 404,
|
|
||||||
"description": "The specified bucket does not exist."
|
|
||||||
},
|
|
||||||
"NoSuchCORSConfiguration": {
|
|
||||||
"code": 404,
|
|
||||||
"description": "The CORS configuration does not exist"
|
|
||||||
},
|
|
||||||
"NoSuchKey": {
|
|
||||||
"code": 404,
|
|
||||||
"description": "The specified key does not exist."
|
|
||||||
},
|
|
||||||
"NoSuchLifecycleConfiguration": {
|
|
||||||
"code": 404,
|
|
||||||
"description": "The lifecycle configuration does not exist."
|
|
||||||
},
|
|
||||||
"NoSuchWebsiteConfiguration": {
|
|
||||||
"code": 404,
|
|
||||||
"description": "The specified bucket does not have a website configuration"
|
|
||||||
},
|
|
||||||
"NoSuchUpload": {
|
|
||||||
"code": 404,
|
|
||||||
"description": "The specified multipart upload does not exist. The upload ID might be invalid, or the multipart upload might have been aborted or completed."
|
|
||||||
},
|
|
||||||
"NoSuchVersion": {
|
|
||||||
"code": 404,
|
|
||||||
"description": "Indicates that the version ID specified in the request does not match an existing version."
|
|
||||||
},
|
|
||||||
"ReplicationConfigurationNotFoundError": {
|
|
||||||
"code": 404,
|
|
||||||
"description": "The replication configuration was not found"
|
|
||||||
},
|
|
||||||
"NotImplemented": {
|
|
||||||
"code": 501,
|
|
||||||
"description": "A header you provided implies functionality that is not implemented."
|
|
||||||
},
|
|
||||||
"NotModified": {
|
|
||||||
"code": 304,
|
|
||||||
"description": "Not Modified."
|
|
||||||
},
|
|
||||||
"NotSignedUp": {
|
|
||||||
"code": 403,
|
|
||||||
"description": "Your account is not signed up for the S3 service. You must sign up before you can use S3. "
|
|
||||||
},
|
|
||||||
"NoSuchBucketPolicy": {
|
|
||||||
"code": 404,
|
|
||||||
"description": "The specified bucket does not have a bucket policy."
|
|
||||||
},
|
|
||||||
"OperationAborted": {
|
|
||||||
"code": 409,
|
|
||||||
"description": "A conflicting conditional operation is currently in progress against this resource. Try again."
|
|
||||||
},
|
|
||||||
"PermanentRedirect": {
|
|
||||||
"code": 301,
|
|
||||||
"description": "The bucket you are attempting to access must be addressed using the specified endpoint. Send all future requests to this endpoint."
|
|
||||||
},
|
|
||||||
"PreconditionFailed": {
|
|
||||||
"code": 412,
|
|
||||||
"description": "At least one of the preconditions you specified did not hold."
|
|
||||||
},
|
|
||||||
"Redirect": {
|
|
||||||
"code": 307,
|
|
||||||
"description": "Temporary redirect."
|
|
||||||
},
|
|
||||||
"RestoreAlreadyInProgress": {
|
|
||||||
"code": 409,
|
|
||||||
"description": "Object restore is already in progress."
|
|
||||||
},
|
|
||||||
"RequestIsNotMultiPartContent": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "Bucket POST must be of the enclosure-type multipart/form-data."
|
|
||||||
},
|
|
||||||
"RequestTimeout": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "Your socket connection to the server was not read from or written to within the timeout period."
|
|
||||||
},
|
|
||||||
"RequestTimeTooSkewed": {
|
|
||||||
"code": 403,
|
|
||||||
"description": "The difference between the request time and the server's time is too large."
|
|
||||||
},
|
|
||||||
"RequestTorrentOfBucketError": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "Requesting the torrent file of a bucket is not permitted."
|
|
||||||
},
|
|
||||||
"SignatureDoesNotMatch": {
|
|
||||||
"code": 403,
|
|
||||||
"description": "The request signature we calculated does not match the signature you provided."
|
|
||||||
},
|
|
||||||
"_comment" : {
|
|
||||||
"note" : "This is an AWS S3 specific error. We are opting to use the more general 'ServiceUnavailable' error used throughout AWS (IAM/EC2) to have uniformity of error messages even though we are potentially compromising S3 compatibility.",
|
|
||||||
"ServiceUnavailable": {
|
|
||||||
"code": 503,
|
|
||||||
"description": "Reduce your request rate."
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"ServiceUnavailable": {
|
|
||||||
"code": 503,
|
|
||||||
"description": "The request has failed due to a temporary failure of the server."
|
|
||||||
},
|
|
||||||
"SlowDown": {
|
|
||||||
"code": 503,
|
|
||||||
"description": "Reduce your request rate."
|
|
||||||
},
|
|
||||||
"TemporaryRedirect": {
|
|
||||||
"code": 307,
|
|
||||||
"description": "You are being redirected to the bucket while DNS updates."
|
|
||||||
},
|
|
||||||
"TokenRefreshRequired": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "The provided token must be refreshed."
|
|
||||||
},
|
|
||||||
"TooManyBuckets": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "You have attempted to create more buckets than allowed."
|
|
||||||
},
|
|
||||||
"TooManyParts": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "You have attempted to upload more parts than allowed."
|
|
||||||
},
|
|
||||||
"UnexpectedContent": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "This request does not support content."
|
|
||||||
},
|
|
||||||
"UnresolvableGrantByEmailAddress": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "The email address you provided does not match any account on record."
|
|
||||||
},
|
|
||||||
"UserKeyMustBeSpecified": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "The bucket POST must contain the specified field name. If it is specified, check the order of the fields."
|
|
||||||
},
|
|
||||||
"NoSuchEntity": {
|
|
||||||
"code": 404,
|
|
||||||
"description": "The request was rejected because it referenced an entity that does not exist. The error message describes the entity."
|
|
||||||
},
|
|
||||||
"WrongFormat": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "Data entered by the user has a wrong format."
|
|
||||||
},
|
|
||||||
"Forbidden": {
|
|
||||||
"code": 403,
|
|
||||||
"description": "Authentication failed."
|
|
||||||
},
|
|
||||||
"EntityDoesNotExist": {
|
|
||||||
"code": 404,
|
|
||||||
"description": "Not found."
|
|
||||||
},
|
|
||||||
"EntityAlreadyExists": {
|
|
||||||
"code": 409,
|
|
||||||
"description": "The request was rejected because it attempted to create a resource that already exists."
|
|
||||||
},
|
|
||||||
"ServiceFailure": {
|
|
||||||
"code": 500,
|
|
||||||
"description": "Server error: the request processing has failed because of an unknown error, exception or failure."
|
|
||||||
},
|
|
||||||
"IncompleteSignature": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "The request signature does not conform to AWS standards."
|
|
||||||
},
|
|
||||||
"InternalFailure": {
|
|
||||||
"code": 500,
|
|
||||||
"description": "The request processing has failed because of an unknown error, exception or failure."
|
|
||||||
},
|
|
||||||
"InvalidAction": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "The action or operation requested is invalid. Verify that the action is typed correctly."
|
|
||||||
},
|
|
||||||
"InvalidClientTokenId": {
|
|
||||||
"code": 403,
|
|
||||||
"description": "The X.509 certificate or AWS access key ID provided does not exist in our records."
|
|
||||||
},
|
|
||||||
"InvalidParameterCombination": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "Parameters that must not be used together were used together."
|
|
||||||
},
|
|
||||||
"InvalidParameterValue": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "An invalid or out-of-range value was supplied for the input parameter."
|
|
||||||
},
|
|
||||||
"InvalidQueryParameter": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "The AWS query string is malformed or does not adhere to AWS standards."
|
|
||||||
},
|
|
||||||
"MalformedQueryString": {
|
|
||||||
"code": 404,
|
|
||||||
"description": "The query string contains a syntax error."
|
|
||||||
},
|
|
||||||
"MissingAction": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "The request is missing an action or a required parameter."
|
|
||||||
},
|
|
||||||
"MissingAuthenticationToken": {
|
|
||||||
"code": 403,
|
|
||||||
"description": "The request must contain either a valid (registered) AWS access key ID or X.509 certificate."
|
|
||||||
},
|
|
||||||
"MissingParameter": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "A required parameter for the specified action is not supplied."
|
|
||||||
},
|
|
||||||
"OptInRequired": {
|
|
||||||
"code": 403,
|
|
||||||
"description": "The AWS access key ID needs a subscription for the service."
|
|
||||||
},
|
|
||||||
"RequestExpired": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "The request reached the service more than 15 minutes after the date stamp on the request or more than 15 minutes after the request expiration date (such as for pre-signed URLs), or the date stamp on the request is more than 15 minutes in the future."
|
|
||||||
},
|
|
||||||
"Throttling": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "The request was denied due to request throttling."
|
|
||||||
},
|
|
||||||
"AccountNotFound": {
|
|
||||||
"code": 404,
|
|
||||||
"description": "No account was found in Vault, please contact your system administrator."
|
|
||||||
},
|
|
||||||
"ValidationError": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "The specified value is invalid."
|
|
||||||
},
|
|
||||||
"MalformedPolicyDocument": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "Syntax errors in policy."
|
|
||||||
},
|
|
||||||
"InvalidInput": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "The request was rejected because an invalid or out-of-range value was supplied for an input parameter."
|
|
||||||
},
|
|
||||||
"_comment": "-------------- Special non-AWS S3 errors --------------",
|
|
||||||
"MPUinProgress": {
|
|
||||||
"code": 409,
|
|
||||||
"description": "The bucket you tried to delete has an ongoing multipart upload."
|
|
||||||
},
|
|
||||||
"_comment": "-------------- Internal project errors --------------",
|
|
||||||
"_comment": "----------------------- Vault -----------------------",
|
|
||||||
"_comment": "#### formatErrors ####",
|
|
||||||
"BadName": {
|
|
||||||
"description": "name not ok",
|
|
||||||
"code": 5001
|
|
||||||
},
|
|
||||||
"BadAccount": {
|
|
||||||
"description": "account not ok",
|
|
||||||
"code": 5002
|
|
||||||
},
|
|
||||||
"BadGroup": {
|
|
||||||
"description": "group not ok",
|
|
||||||
"code": 5003
|
|
||||||
},
|
|
||||||
"BadId": {
|
|
||||||
"description": "id not ok",
|
|
||||||
"code": 5004
|
|
||||||
},
|
|
||||||
"BadAccountName": {
|
|
||||||
"description": "accountName not ok",
|
|
||||||
"code": 5005
|
|
||||||
},
|
|
||||||
"BadNameFriendly": {
|
|
||||||
"description": "nameFriendly not ok",
|
|
||||||
"code": 5006
|
|
||||||
},
|
|
||||||
"BadEmailAddress": {
|
|
||||||
"description": "email address not ok",
|
|
||||||
"code": 5007
|
|
||||||
},
|
|
||||||
"BadPath": {
|
|
||||||
"description": "path not ok",
|
|
||||||
"code": 5008
|
|
||||||
},
|
|
||||||
"BadArn": {
|
|
||||||
"description": "arn not ok",
|
|
||||||
"code": 5009
|
|
||||||
},
|
|
||||||
"BadCreateDate": {
|
|
||||||
"description": "createDate not ok",
|
|
||||||
"code": 5010
|
|
||||||
},
|
|
||||||
"BadLastUsedDate": {
|
|
||||||
"description": "lastUsedDate not ok",
|
|
||||||
"code": 5011
|
|
||||||
},
|
|
||||||
"BadNotBefore": {
|
|
||||||
"description": "notBefore not ok",
|
|
||||||
"code": 5012
|
|
||||||
},
|
|
||||||
"BadNotAfter": {
|
|
||||||
"description": "notAfter not ok",
|
|
||||||
"code": 5013
|
|
||||||
},
|
|
||||||
"BadSaltedPwd": {
|
|
||||||
"description": "salted password not ok",
|
|
||||||
"code": 5014
|
|
||||||
},
|
|
||||||
"ok": {
|
|
||||||
"description": "No error",
|
|
||||||
"code": 200
|
|
||||||
},
|
|
||||||
"BadUser": {
|
|
||||||
"description": "user not ok",
|
|
||||||
"code": 5016
|
|
||||||
},
|
|
||||||
"BadSaltedPasswd": {
|
|
||||||
"description": "salted password not ok",
|
|
||||||
"code": 5017
|
|
||||||
},
|
|
||||||
"BadPasswdDate": {
|
|
||||||
"description": "password date not ok",
|
|
||||||
"code": 5018
|
|
||||||
},
|
|
||||||
"BadCanonicalId": {
|
|
||||||
"description": "canonicalId not ok",
|
|
||||||
"code": 5019
|
|
||||||
},
|
|
||||||
"BadAlias": {
|
|
||||||
"description": "alias not ok",
|
|
||||||
"code": 5020
|
|
||||||
},
|
|
||||||
"_comment": "#### internalErrors ####",
|
|
||||||
"DBPutFailed": {
|
|
||||||
"description": "DB put failed",
|
|
||||||
"code": 5021
|
|
||||||
},
|
|
||||||
"_comment": "#### alreadyExistErrors ####",
|
|
||||||
"AccountEmailAlreadyUsed": {
|
|
||||||
"description": "an other account already uses that email",
|
|
||||||
"code": 5022
|
|
||||||
},
|
|
||||||
"AccountNameAlreadyUsed": {
|
|
||||||
"description": "an other account already uses that name",
|
|
||||||
"code": 5023
|
|
||||||
},
|
|
||||||
"UserEmailAlreadyUsed": {
|
|
||||||
"description": "an other user already uses that email",
|
|
||||||
"code": 5024
|
|
||||||
},
|
|
||||||
"UserNameAlreadyUsed": {
|
|
||||||
"description": "an other user already uses that name",
|
|
||||||
"code": 5025
|
|
||||||
},
|
|
||||||
"_comment": "#### doesntExistErrors ####",
|
|
||||||
"NoParentAccount": {
|
|
||||||
"description": "parent account does not exist",
|
|
||||||
"code": 5026
|
|
||||||
},
|
|
||||||
"_comment": "#### authErrors ####",
|
|
||||||
"BadStringToSign": {
|
|
||||||
"description": "stringToSign not ok'",
|
|
||||||
"code": 5027
|
|
||||||
},
|
|
||||||
"BadSignatureFromRequest": {
|
|
||||||
"description": "signatureFromRequest not ok",
|
|
||||||
"code": 5028
|
|
||||||
},
|
|
||||||
"BadAlgorithm": {
|
|
||||||
"description": "hashAlgorithm not ok",
|
|
||||||
"code": 5029
|
|
||||||
},
|
|
||||||
"SecretKeyDoesNotExist": {
|
|
||||||
"description": "secret key does not exist",
|
|
||||||
"code": 5030
|
|
||||||
},
|
|
||||||
"InvalidRegion": {
|
|
||||||
"description": "Region was not provided or is not recognized by the system",
|
|
||||||
"code": 5031
|
|
||||||
},
|
|
||||||
"ScopeDate": {
|
|
||||||
"description": "scope date is missing, or format is invalid",
|
|
||||||
"code": 5032
|
|
||||||
},
|
|
||||||
"BadAccessKey": {
|
|
||||||
"description": "access key not ok",
|
|
||||||
"code": 5033
|
|
||||||
},
|
|
||||||
"NoDict": {
|
|
||||||
"description": "no dictionary of params provided for signature verification",
|
|
||||||
"code": 5034
|
|
||||||
},
|
|
||||||
"BadSecretKey": {
|
|
||||||
"description": "secretKey not ok",
|
|
||||||
"code": 5035
|
|
||||||
},
|
|
||||||
"BadSecretKeyValue": {
|
|
||||||
"description": "secretKey value not ok",
|
|
||||||
"code": 5036
|
|
||||||
},
|
|
||||||
"BadSecretKeyStatus": {
|
|
||||||
"description": "secretKey status not ok",
|
|
||||||
"code": 5037
|
|
||||||
},
|
|
||||||
"_comment": "#### OidcpErrors ####",
|
|
||||||
"BadUrl": {
|
|
||||||
"description": "url not ok",
|
|
||||||
"code": 5038
|
|
||||||
},
|
|
||||||
"BadClientIdList": {
|
|
||||||
"description": "client id list not ok'",
|
|
||||||
"code": 5039
|
|
||||||
},
|
|
||||||
"BadThumbprintList": {
|
|
||||||
"description": "thumbprint list not ok'",
|
|
||||||
"code": 5040
|
|
||||||
},
|
|
||||||
"BadObject": {
|
|
||||||
"description": "Object not ok'",
|
|
||||||
"code": 5041
|
|
||||||
},
|
|
||||||
"_comment": "#### RoleErrors ####",
|
|
||||||
"BadRole": {
|
|
||||||
"description": "role not ok",
|
|
||||||
"code": 5042
|
|
||||||
},
|
|
||||||
"_comment": "#### SamlpErrors ####",
|
|
||||||
"BadSamlp": {
|
|
||||||
"description": "samlp not ok",
|
|
||||||
"code": 5043
|
|
||||||
},
|
|
||||||
"BadMetadataDocument": {
|
|
||||||
"description": "metadata document not ok",
|
|
||||||
"code": 5044
|
|
||||||
},
|
|
||||||
"BadSessionIndex": {
|
|
||||||
"description": "session index not ok",
|
|
||||||
"code": 5045
|
|
||||||
},
|
|
||||||
"Unauthorized": {
|
|
||||||
"description": "not authenticated",
|
|
||||||
"code": 401
|
|
||||||
},
|
|
||||||
"_comment": "--------------------- MetaData ---------------------",
|
|
||||||
"_comment": "#### formatErrors ####",
|
|
||||||
"CacheUpdated": {
|
|
||||||
"description": "The cache has been updated",
|
|
||||||
"code": 500
|
|
||||||
},
|
|
||||||
"DBNotFound": {
|
|
||||||
"description": "This DB does not exist",
|
|
||||||
"code": 404
|
|
||||||
},
|
|
||||||
"DBAlreadyExists": {
|
|
||||||
"description": "This DB already exist",
|
|
||||||
"code": 409
|
|
||||||
},
|
|
||||||
"ObjNotFound": {
|
|
||||||
"description": "This object does not exist",
|
|
||||||
"code": 404
|
|
||||||
},
|
|
||||||
"PermissionDenied": {
|
|
||||||
"description": "Permission denied",
|
|
||||||
"code": 403
|
|
||||||
},
|
|
||||||
"BadRequest": {
|
|
||||||
"description": "BadRequest",
|
|
||||||
"code": 400
|
|
||||||
},
|
|
||||||
"RaftSessionNotLeader": {
|
|
||||||
"description": "NotLeader",
|
|
||||||
"code": 500
|
|
||||||
},
|
|
||||||
"RaftSessionLeaderNotConnected": {
|
|
||||||
"description": "RaftSessionLeaderNotConnected",
|
|
||||||
"code": 400
|
|
||||||
},
|
|
||||||
"NoLeaderForDB": {
|
|
||||||
"description": "NoLeaderForDB",
|
|
||||||
"code": 400
|
|
||||||
},
|
|
||||||
"RouteNotFound": {
|
|
||||||
"description": "RouteNotFound",
|
|
||||||
"code": 404
|
|
||||||
},
|
|
||||||
"NoMapsInConfig": {
|
|
||||||
"description": "NoMapsInConfig",
|
|
||||||
"code": 404
|
|
||||||
},
|
|
||||||
"DBAPINotReady": {
|
|
||||||
"message": "DBAPINotReady",
|
|
||||||
"code": 500
|
|
||||||
},
|
|
||||||
"NotEnoughMapsInConfig:": {
|
|
||||||
"description": "NotEnoughMapsInConfig",
|
|
||||||
"code": 400
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -0,0 +1,28 @@
|
||||||
|
{
|
||||||
|
"groups": {
|
||||||
|
"default": {
|
||||||
|
"packages": [
|
||||||
|
"lib/executables/pensieveCreds/package.json",
|
||||||
|
"package.json"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"branchPrefix": "improvement/greenkeeper.io/",
|
||||||
|
"commitMessages": {
|
||||||
|
"initialBadge": "docs(readme): add Greenkeeper badge",
|
||||||
|
"initialDependencies": "chore(package): update dependencies",
|
||||||
|
"initialBranches": "chore(bert-e): whitelist greenkeeper branches",
|
||||||
|
"dependencyUpdate": "fix(package): update ${dependency} to version ${version}",
|
||||||
|
"devDependencyUpdate": "chore(package): update ${dependency} to version ${version}",
|
||||||
|
"dependencyPin": "fix: pin ${dependency} to ${oldVersionResolved}",
|
||||||
|
"devDependencyPin": "chore: pin ${dependency} to ${oldVersionResolved}",
|
||||||
|
"closes": "\n\nCloses #${number}"
|
||||||
|
},
|
||||||
|
"ignore": [
|
||||||
|
"ajv",
|
||||||
|
"eslint",
|
||||||
|
"eslint-plugin-react",
|
||||||
|
"eslint-config-airbnb",
|
||||||
|
"eslint-config-scality"
|
||||||
|
]
|
||||||
|
}
|
110
index.js
110
index.js
|
@ -1,110 +0,0 @@
|
||||||
module.exports = {
|
|
||||||
auth: require('./lib/auth/auth'),
|
|
||||||
constants: require('./lib/constants'),
|
|
||||||
db: require('./lib/db'),
|
|
||||||
errors: require('./lib/errors.js'),
|
|
||||||
shuffle: require('./lib/shuffle'),
|
|
||||||
stringHash: require('./lib/stringHash'),
|
|
||||||
ipCheck: require('./lib/ipCheck'),
|
|
||||||
jsutil: require('./lib/jsutil'),
|
|
||||||
https: {
|
|
||||||
ciphers: require('./lib/https/ciphers.js'),
|
|
||||||
dhparam: require('./lib/https/dh2048.js'),
|
|
||||||
},
|
|
||||||
algorithms: {
|
|
||||||
list: {
|
|
||||||
Basic: require('./lib/algos/list/basic').List,
|
|
||||||
Delimiter: require('./lib/algos/list/delimiter').Delimiter,
|
|
||||||
DelimiterVersions: require('./lib/algos/list/delimiterVersions')
|
|
||||||
.DelimiterVersions,
|
|
||||||
DelimiterMaster: require('./lib/algos/list/delimiterMaster')
|
|
||||||
.DelimiterMaster,
|
|
||||||
MPU: require('./lib/algos/list/MPU').MultipartUploads,
|
|
||||||
},
|
|
||||||
listTools: {
|
|
||||||
DelimiterTools: require('./lib/algos/list/tools'),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
policies: {
|
|
||||||
evaluators: require('./lib/policyEvaluator/evaluator.js'),
|
|
||||||
validateUserPolicy: require('./lib/policy/policyValidator')
|
|
||||||
.validateUserPolicy,
|
|
||||||
evaluatePrincipal: require('./lib/policyEvaluator/principal'),
|
|
||||||
RequestContext: require('./lib/policyEvaluator/RequestContext.js'),
|
|
||||||
},
|
|
||||||
Clustering: require('./lib/Clustering'),
|
|
||||||
testing: {
|
|
||||||
matrix: require('./lib/testing/matrix.js'),
|
|
||||||
},
|
|
||||||
versioning: {
|
|
||||||
VersioningConstants: require('./lib/versioning/constants.js')
|
|
||||||
.VersioningConstants,
|
|
||||||
Version: require('./lib/versioning/Version.js').Version,
|
|
||||||
VersionID: require('./lib/versioning/VersionID.js'),
|
|
||||||
},
|
|
||||||
network: {
|
|
||||||
http: {
|
|
||||||
server: require('./lib/network/http/server'),
|
|
||||||
},
|
|
||||||
rpc: require('./lib/network/rpc/rpc'),
|
|
||||||
level: require('./lib/network/rpc/level-net'),
|
|
||||||
rest: {
|
|
||||||
RESTServer: require('./lib/network/rest/RESTServer'),
|
|
||||||
RESTClient: require('./lib/network/rest/RESTClient'),
|
|
||||||
},
|
|
||||||
RoundRobin: require('./lib/network/RoundRobin'),
|
|
||||||
},
|
|
||||||
s3routes: {
|
|
||||||
routes: require('./lib/s3routes/routes'),
|
|
||||||
routesUtils: require('./lib/s3routes/routesUtils'),
|
|
||||||
},
|
|
||||||
s3middleware: {
|
|
||||||
userMetadata: require('./lib/s3middleware/userMetadata'),
|
|
||||||
convertToXml: require('./lib/s3middleware/convertToXml'),
|
|
||||||
escapeForXml: require('./lib/s3middleware/escapeForXml'),
|
|
||||||
tagging: require('./lib/s3middleware/tagging'),
|
|
||||||
validateConditionalHeaders:
|
|
||||||
require('./lib/s3middleware/validateConditionalHeaders')
|
|
||||||
.validateConditionalHeaders,
|
|
||||||
MD5Sum: require('./lib/s3middleware/MD5Sum'),
|
|
||||||
objectUtils: require('./lib/s3middleware/objectUtils'),
|
|
||||||
azureHelper: {
|
|
||||||
mpuUtils:
|
|
||||||
require('./lib/s3middleware/azureHelpers/mpuUtils'),
|
|
||||||
ResultsCollector:
|
|
||||||
require('./lib/s3middleware/azureHelpers/ResultsCollector'),
|
|
||||||
SubStreamInterface:
|
|
||||||
require('./lib/s3middleware/azureHelpers/SubStreamInterface'),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
storage: {
|
|
||||||
metadata: {
|
|
||||||
MetadataFileServer:
|
|
||||||
require('./lib/storage/metadata/file/MetadataFileServer'),
|
|
||||||
MetadataFileClient:
|
|
||||||
require('./lib/storage/metadata/file/MetadataFileClient'),
|
|
||||||
LogConsumer:
|
|
||||||
require('./lib/storage/metadata/bucketclient/LogConsumer'),
|
|
||||||
},
|
|
||||||
data: {
|
|
||||||
file: {
|
|
||||||
DataFileStore:
|
|
||||||
require('./lib/storage/data/file/DataFileStore'),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
utils: require('./lib/storage/utils'),
|
|
||||||
},
|
|
||||||
models: {
|
|
||||||
BucketInfo: require('./lib/models/BucketInfo'),
|
|
||||||
ObjectMD: require('./lib/models/ObjectMD'),
|
|
||||||
ObjectMDLocation: require('./lib/models/ObjectMDLocation'),
|
|
||||||
ARN: require('./lib/models/ARN'),
|
|
||||||
WebsiteConfiguration: require('./lib/models/WebsiteConfiguration'),
|
|
||||||
ReplicationConfiguration:
|
|
||||||
require('./lib/models/ReplicationConfiguration'),
|
|
||||||
},
|
|
||||||
metrics: {
|
|
||||||
StatsClient: require('./lib/metrics/StatsClient'),
|
|
||||||
RedisClient: require('./lib/metrics/RedisClient'),
|
|
||||||
},
|
|
||||||
};
|
|
|
@ -0,0 +1,175 @@
|
||||||
|
import * as evaluators from './lib/policyEvaluator/evaluator';
|
||||||
|
import evaluatePrincipal from './lib/policyEvaluator/principal';
|
||||||
|
import RequestContext, {
|
||||||
|
actionNeedQuotaCheck,
|
||||||
|
actionNeedQuotaCheckCopy,
|
||||||
|
actionWithDataDeletion } from './lib/policyEvaluator/RequestContext';
|
||||||
|
import * as requestUtils from './lib/policyEvaluator/requestUtils';
|
||||||
|
import * as actionMaps from './lib/policyEvaluator/utils/actionMaps';
|
||||||
|
import { validateUserPolicy } from './lib/policy/policyValidator'
|
||||||
|
import * as locationConstraints from './lib/patches/locationConstraints';
|
||||||
|
import * as userMetadata from './lib/s3middleware/userMetadata';
|
||||||
|
import convertToXml from './lib/s3middleware/convertToXml';
|
||||||
|
import escapeForXml from './lib/s3middleware/escapeForXml';
|
||||||
|
import * as objectLegalHold from './lib/s3middleware/objectLegalHold';
|
||||||
|
import * as tagging from './lib/s3middleware/tagging';
|
||||||
|
import { checkDateModifiedHeaders } from './lib/s3middleware/validateConditionalHeaders';
|
||||||
|
import { validateConditionalHeaders } from './lib/s3middleware/validateConditionalHeaders';
|
||||||
|
import MD5Sum from './lib/s3middleware/MD5Sum';
|
||||||
|
import NullStream from './lib/s3middleware/nullStream';
|
||||||
|
import * as objectUtils from './lib/s3middleware/objectUtils';
|
||||||
|
import * as mpuUtils from './lib/s3middleware/azureHelpers/mpuUtils';
|
||||||
|
import ResultsCollector from './lib/s3middleware/azureHelpers/ResultsCollector';
|
||||||
|
import SubStreamInterface from './lib/s3middleware/azureHelpers/SubStreamInterface';
|
||||||
|
import { prepareStream } from './lib/s3middleware/prepareStream';
|
||||||
|
import * as processMpuParts from './lib/s3middleware/processMpuParts';
|
||||||
|
import * as retention from './lib/s3middleware/objectRetention';
|
||||||
|
import * as objectRestore from './lib/s3middleware/objectRestore';
|
||||||
|
import * as lifecycleHelpers from './lib/s3middleware/lifecycleHelpers';
|
||||||
|
export { default as errors } from './lib/errors';
|
||||||
|
export { default as Clustering } from './lib/Clustering';
|
||||||
|
export * as ClusterRPC from './lib/clustering/ClusterRPC';
|
||||||
|
export * as ipCheck from './lib/ipCheck';
|
||||||
|
export * as auth from './lib/auth/auth';
|
||||||
|
export * as constants from './lib/constants';
|
||||||
|
export * as https from './lib/https';
|
||||||
|
export * as metrics from './lib/metrics';
|
||||||
|
export * as network from './lib/network';
|
||||||
|
export * as s3routes from './lib/s3routes';
|
||||||
|
export * as versioning from './lib/versioning';
|
||||||
|
export * as stream from './lib/stream';
|
||||||
|
export * as jsutil from './lib/jsutil';
|
||||||
|
export { default as stringHash } from './lib/stringHash';
|
||||||
|
export * as db from './lib/db';
|
||||||
|
export * as errorUtils from './lib/errorUtils';
|
||||||
|
export { default as shuffle } from './lib/shuffle';
|
||||||
|
export * as models from './lib/models';
|
||||||
|
|
||||||
|
export const algorithms = {
|
||||||
|
list: require('./lib/algos/list/exportAlgos'),
|
||||||
|
listTools: {
|
||||||
|
DelimiterTools: require('./lib/algos/list/tools'),
|
||||||
|
Skip: require('./lib/algos/list/skip'),
|
||||||
|
},
|
||||||
|
cache: {
|
||||||
|
GapSet: require('./lib/algos/cache/GapSet'),
|
||||||
|
GapCache: require('./lib/algos/cache/GapCache'),
|
||||||
|
LRUCache: require('./lib/algos/cache/LRUCache'),
|
||||||
|
},
|
||||||
|
stream: {
|
||||||
|
MergeStream: require('./lib/algos/stream/MergeStream'),
|
||||||
|
},
|
||||||
|
SortedSet: require('./lib/algos/set/SortedSet'),
|
||||||
|
Heap: require('./lib/algos/heap/Heap'),
|
||||||
|
};
|
||||||
|
|
||||||
|
export const policies = {
|
||||||
|
evaluators,
|
||||||
|
validateUserPolicy,
|
||||||
|
evaluatePrincipal,
|
||||||
|
RequestContext,
|
||||||
|
requestUtils,
|
||||||
|
actionMaps,
|
||||||
|
actionNeedQuotaCheck,
|
||||||
|
actionWithDataDeletion,
|
||||||
|
actionNeedQuotaCheckCopy,
|
||||||
|
};
|
||||||
|
|
||||||
|
export const testing = {
|
||||||
|
matrix: require('./lib/testing/matrix.js'),
|
||||||
|
};
|
||||||
|
|
||||||
|
export const s3middleware = {
|
||||||
|
userMetadata,
|
||||||
|
convertToXml,
|
||||||
|
escapeForXml,
|
||||||
|
objectLegalHold,
|
||||||
|
tagging,
|
||||||
|
checkDateModifiedHeaders,
|
||||||
|
validateConditionalHeaders,
|
||||||
|
MD5Sum,
|
||||||
|
NullStream,
|
||||||
|
objectUtils,
|
||||||
|
azureHelper: {
|
||||||
|
mpuUtils,
|
||||||
|
ResultsCollector,
|
||||||
|
SubStreamInterface,
|
||||||
|
},
|
||||||
|
prepareStream,
|
||||||
|
processMpuParts,
|
||||||
|
retention,
|
||||||
|
objectRestore,
|
||||||
|
lifecycleHelpers,
|
||||||
|
};
|
||||||
|
|
||||||
|
export const storage = {
|
||||||
|
metadata: {
|
||||||
|
MetadataWrapper: require('./lib/storage/metadata/MetadataWrapper'),
|
||||||
|
bucketclient: {
|
||||||
|
BucketClientInterface:
|
||||||
|
require('./lib/storage/metadata/bucketclient/' +
|
||||||
|
'BucketClientInterface'),
|
||||||
|
LogConsumer:
|
||||||
|
require('./lib/storage/metadata/bucketclient/LogConsumer'),
|
||||||
|
},
|
||||||
|
file: {
|
||||||
|
BucketFileInterface:
|
||||||
|
require('./lib/storage/metadata/file/BucketFileInterface'),
|
||||||
|
MetadataFileServer:
|
||||||
|
require('./lib/storage/metadata/file/MetadataFileServer'),
|
||||||
|
MetadataFileClient:
|
||||||
|
require('./lib/storage/metadata/file/MetadataFileClient'),
|
||||||
|
},
|
||||||
|
inMemory: {
|
||||||
|
metastore:
|
||||||
|
require('./lib/storage/metadata/in_memory/metastore'),
|
||||||
|
metadata: require('./lib/storage/metadata/in_memory/metadata'),
|
||||||
|
bucketUtilities:
|
||||||
|
require('./lib/storage/metadata/in_memory/bucket_utilities'),
|
||||||
|
},
|
||||||
|
mongoclient: {
|
||||||
|
MongoClientInterface:
|
||||||
|
require('./lib/storage/metadata/mongoclient/' +
|
||||||
|
'MongoClientInterface'),
|
||||||
|
LogConsumer:
|
||||||
|
require('./lib/storage/metadata/mongoclient/LogConsumer'),
|
||||||
|
},
|
||||||
|
proxy: {
|
||||||
|
Server: require('./lib/storage/metadata/proxy/Server'),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
data: {
|
||||||
|
DataWrapper: require('./lib/storage/data/DataWrapper'),
|
||||||
|
MultipleBackendGateway:
|
||||||
|
require('./lib/storage/data/MultipleBackendGateway'),
|
||||||
|
parseLC: require('./lib/storage/data/LocationConstraintParser'),
|
||||||
|
file: {
|
||||||
|
DataFileStore:
|
||||||
|
require('./lib/storage/data/file/DataFileStore'),
|
||||||
|
DataFileInterface:
|
||||||
|
require('./lib/storage/data/file/DataFileInterface'),
|
||||||
|
},
|
||||||
|
external: {
|
||||||
|
AwsClient: require('./lib/storage/data/external/AwsClient'),
|
||||||
|
AzureClient: require('./lib/storage/data/external/AzureClient'),
|
||||||
|
GcpClient: require('./lib/storage/data/external/GcpClient'),
|
||||||
|
GCP: require('./lib/storage/data/external/GCP/GcpService'),
|
||||||
|
GcpUtils: require('./lib/storage/data/external/GCP/GcpUtils'),
|
||||||
|
GcpSigner: require('./lib/storage/data/external/GCP/GcpSigner'),
|
||||||
|
PfsClient: require('./lib/storage/data/external/PfsClient'),
|
||||||
|
backendUtils: require('./lib/storage/data/external/utils'),
|
||||||
|
},
|
||||||
|
inMemory: {
|
||||||
|
datastore: require('./lib/storage/data/in_memory/datastore'),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
utils: require('./lib/storage/utils'),
|
||||||
|
};
|
||||||
|
|
||||||
|
export const pensieve = {
|
||||||
|
credentialUtils: require('./lib/executables/pensieveCreds/utils'),
|
||||||
|
};
|
||||||
|
|
||||||
|
export const patches = {
|
||||||
|
locationConstraints,
|
||||||
|
};
|
|
@ -1,18 +1,28 @@
|
||||||
'use strict'; // eslint-disable-line
|
import cluster, { Worker } from 'cluster';
|
||||||
|
import * as werelogs from 'werelogs';
|
||||||
|
|
||||||
const cluster = require('cluster');
|
export default class Clustering {
|
||||||
|
_size: number;
|
||||||
|
_shutdownTimeout: number;
|
||||||
|
_logger: werelogs.Logger;
|
||||||
|
_shutdown: boolean;
|
||||||
|
_workers: (Worker | undefined)[];
|
||||||
|
_workersTimeout: (NodeJS.Timeout | undefined)[];
|
||||||
|
_workersStatus: (number | string | undefined)[];
|
||||||
|
_status: number;
|
||||||
|
_exitCb?: (clustering: Clustering, exitSignal?: string) => void;
|
||||||
|
_index?: number;
|
||||||
|
|
||||||
class Clustering {
|
|
||||||
/**
|
/**
|
||||||
* Constructor
|
* Constructor
|
||||||
*
|
*
|
||||||
* @param {number} size Cluster size
|
* @param size Cluster size
|
||||||
* @param {Logger} logger Logger object
|
* @param logger Logger object
|
||||||
* @param {number} [shutdownTimeout=5000] Change default shutdown timeout
|
* @param [shutdownTimeout=5000] Change default shutdown timeout
|
||||||
* releasing ressources
|
* releasing ressources
|
||||||
* @return {Clustering} itself
|
* @return itself
|
||||||
*/
|
*/
|
||||||
constructor(size, logger, shutdownTimeout) {
|
constructor(size: number, logger: werelogs.Logger, shutdownTimeout?: number) {
|
||||||
this._size = size;
|
this._size = size;
|
||||||
if (size < 1) {
|
if (size < 1) {
|
||||||
throw new Error('Cluster size must be greater than or equal to 1');
|
throw new Error('Cluster size must be greater than or equal to 1');
|
||||||
|
@ -32,7 +42,6 @@ class Clustering {
|
||||||
* Method called after a stop() call
|
* Method called after a stop() call
|
||||||
*
|
*
|
||||||
* @private
|
* @private
|
||||||
* @return {undefined}
|
|
||||||
*/
|
*/
|
||||||
_afterStop() {
|
_afterStop() {
|
||||||
// Asuming all workers shutdown gracefully
|
// Asuming all workers shutdown gracefully
|
||||||
|
@ -41,10 +50,11 @@ class Clustering {
|
||||||
for (let i = 0; i < size; ++i) {
|
for (let i = 0; i < size; ++i) {
|
||||||
// If the process return an error code or killed by a signal,
|
// If the process return an error code or killed by a signal,
|
||||||
// set the status
|
// set the status
|
||||||
if (typeof this._workersStatus[i] === 'number') {
|
const status = this._workersStatus[i];
|
||||||
this._status = this._workersStatus[i];
|
if (typeof status === 'number') {
|
||||||
|
this._status = status;
|
||||||
break;
|
break;
|
||||||
} else if (typeof this._workersStatus[i] === 'string') {
|
} else if (typeof status === 'string') {
|
||||||
this._status = 1;
|
this._status = 1;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@ -58,13 +68,17 @@ class Clustering {
|
||||||
/**
|
/**
|
||||||
* Method called when a worker exited
|
* Method called when a worker exited
|
||||||
*
|
*
|
||||||
* @param {Cluster.worker} worker - Current worker
|
* @param worker - Current worker
|
||||||
* @param {number} i - Worker index
|
* @param i - Worker index
|
||||||
* @param {number} code - Exit code
|
* @param code - Exit code
|
||||||
* @param {string} signal - Exit signal
|
* @param signal - Exit signal
|
||||||
* @return {undefined}
|
|
||||||
*/
|
*/
|
||||||
_workerExited(worker, i, code, signal) {
|
_workerExited(
|
||||||
|
worker: Worker,
|
||||||
|
i: number,
|
||||||
|
code: number,
|
||||||
|
signal: string,
|
||||||
|
) {
|
||||||
// If the worker:
|
// If the worker:
|
||||||
// - was killed by a signal
|
// - was killed by a signal
|
||||||
// - return an error code
|
// - return an error code
|
||||||
|
@ -91,8 +105,9 @@ class Clustering {
|
||||||
this._workersStatus[i] = undefined;
|
this._workersStatus[i] = undefined;
|
||||||
}
|
}
|
||||||
this._workers[i] = undefined;
|
this._workers[i] = undefined;
|
||||||
if (this._workersTimeout[i]) {
|
const timeout = this._workersTimeout[i];
|
||||||
clearTimeout(this._workersTimeout[i]);
|
if (timeout) {
|
||||||
|
clearTimeout(timeout);
|
||||||
this._workersTimeout[i] = undefined;
|
this._workersTimeout[i] = undefined;
|
||||||
}
|
}
|
||||||
// If we don't trigger the stop method, the watchdog
|
// If we don't trigger the stop method, the watchdog
|
||||||
|
@ -110,29 +125,28 @@ class Clustering {
|
||||||
/**
|
/**
|
||||||
* Method to start a worker
|
* Method to start a worker
|
||||||
*
|
*
|
||||||
* @param {number} i Index of the starting worker
|
* @param i Index of the starting worker
|
||||||
* @return {undefined}
|
|
||||||
*/
|
*/
|
||||||
startWorker(i) {
|
startWorker(i: number) {
|
||||||
if (!cluster.isMaster) {
|
if (!cluster.isPrimary) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
// Fork a new worker
|
// Fork a new worker
|
||||||
this._workers[i] = cluster.fork();
|
this._workers[i] = cluster.fork();
|
||||||
// Listen for message from the worker
|
// Listen for message from the worker
|
||||||
this._workers[i].on('message', msg => {
|
this._workers[i]!.on('message', msg => {
|
||||||
// If the worker is ready, send him his id
|
// If the worker is ready, send him his id
|
||||||
if (msg === 'ready') {
|
if (msg === 'ready') {
|
||||||
this._workers[i].send({ msg: 'setup', id: i });
|
this._workers[i]!.send({ msg: 'setup', id: i });
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
this._workers[i].on('exit', (code, signal) =>
|
this._workers[i]!.on('exit', (code, signal) =>
|
||||||
this._workerExited(this._workers[i], i, code, signal));
|
this._workerExited(this._workers[i]!, i, code, signal));
|
||||||
// Trigger when the worker was started
|
// Trigger when the worker was started
|
||||||
this._workers[i].on('online', () => {
|
this._workers[i]!.on('online', () => {
|
||||||
this._logger.info('Worker started', {
|
this._logger.info('Worker started', {
|
||||||
id: i,
|
id: i,
|
||||||
childPid: this._workers[i].process.pid,
|
childPid: this._workers[i]!.process.pid,
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -140,10 +154,10 @@ class Clustering {
|
||||||
/**
|
/**
|
||||||
* Method to put handler on cluster exit
|
* Method to put handler on cluster exit
|
||||||
*
|
*
|
||||||
* @param {function} cb - Callback(Clustering, [exitSignal])
|
* @param cb - Callback(Clustering, [exitSignal])
|
||||||
* @return {Clustering} Itself
|
* @return Itself
|
||||||
*/
|
*/
|
||||||
onExit(cb) {
|
onExit(cb: (clustering: Clustering, exitSignal?: string) => void) {
|
||||||
this._exitCb = cb;
|
this._exitCb = cb;
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
@ -152,33 +166,33 @@ class Clustering {
|
||||||
* Method to start the cluster (if master) or to start the callback
|
* Method to start the cluster (if master) or to start the callback
|
||||||
* (worker)
|
* (worker)
|
||||||
*
|
*
|
||||||
* @param {function} cb - Callback to run the worker
|
* @param cb - Callback to run the worker
|
||||||
* @return {Clustering} itself
|
* @return itself
|
||||||
*/
|
*/
|
||||||
start(cb) {
|
start(cb: (clustering: Clustering) => void) {
|
||||||
process.on('SIGINT', () => this.stop('SIGINT'));
|
process.on('SIGINT', () => this.stop('SIGINT'));
|
||||||
process.on('SIGHUP', () => this.stop('SIGHUP'));
|
process.on('SIGHUP', () => this.stop('SIGHUP'));
|
||||||
process.on('SIGQUIT', () => this.stop('SIGQUIT'));
|
process.on('SIGQUIT', () => this.stop('SIGQUIT'));
|
||||||
process.on('SIGTERM', () => this.stop('SIGTERM'));
|
process.on('SIGTERM', () => this.stop('SIGTERM'));
|
||||||
process.on('SIGPIPE', () => {});
|
process.on('SIGPIPE', () => {});
|
||||||
process.on('exit', (code, signal) => {
|
process.on('exit', (code?: number, signal?: string) => {
|
||||||
if (this._exitCb) {
|
if (this._exitCb) {
|
||||||
this._status = code || 0;
|
this._status = code || 0;
|
||||||
return this._exitCb(this, signal);
|
return this._exitCb(this, signal);
|
||||||
}
|
}
|
||||||
return process.exit(code || 0);
|
return process.exit(code || 0);
|
||||||
});
|
});
|
||||||
process.on('uncaughtException', err => {
|
process.on('uncaughtException', (err: Error) => {
|
||||||
this._logger.fatal('caught error', {
|
this._logger.fatal('caught error', {
|
||||||
error: err.message,
|
error: err.message,
|
||||||
stack: err.stack.split('\n').map(str => str.trim()),
|
stack: err.stack?.split('\n')?.map(str => str.trim()),
|
||||||
});
|
});
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
});
|
});
|
||||||
if (!cluster.isMaster) {
|
if (!cluster.isPrimary) {
|
||||||
// Waiting for message from master to
|
// Waiting for message from master to
|
||||||
// know the id of the slave cluster
|
// know the id of the slave cluster
|
||||||
process.on('message', msg => {
|
process.on('message', (msg: any) => {
|
||||||
if (msg.msg === 'setup') {
|
if (msg.msg === 'setup') {
|
||||||
this._index = msg.id;
|
this._index = msg.id;
|
||||||
cb(this);
|
cb(this);
|
||||||
|
@ -186,7 +200,7 @@ class Clustering {
|
||||||
});
|
});
|
||||||
// Send message to the master, to let him know
|
// Send message to the master, to let him know
|
||||||
// the worker has started
|
// the worker has started
|
||||||
process.send('ready');
|
process.send?.('ready');
|
||||||
} else {
|
} else {
|
||||||
for (let i = 0; i < this._size; ++i) {
|
for (let i = 0; i < this._size; ++i) {
|
||||||
this.startWorker(i);
|
this.startWorker(i);
|
||||||
|
@ -198,7 +212,7 @@ class Clustering {
|
||||||
/**
|
/**
|
||||||
* Method to get workers
|
* Method to get workers
|
||||||
*
|
*
|
||||||
* @return {Cluster.Worker[]} Workers
|
* @return Workers
|
||||||
*/
|
*/
|
||||||
getWorkers() {
|
getWorkers() {
|
||||||
return this._workers;
|
return this._workers;
|
||||||
|
@ -207,7 +221,7 @@ class Clustering {
|
||||||
/**
|
/**
|
||||||
* Method to get the status of the cluster
|
* Method to get the status of the cluster
|
||||||
*
|
*
|
||||||
* @return {number} Status code
|
* @return Status code
|
||||||
*/
|
*/
|
||||||
getStatus() {
|
getStatus() {
|
||||||
return this._status;
|
return this._status;
|
||||||
|
@ -216,7 +230,7 @@ class Clustering {
|
||||||
/**
|
/**
|
||||||
* Method to return if it's the master process
|
* Method to return if it's the master process
|
||||||
*
|
*
|
||||||
* @return {boolean} - True if master, false otherwise
|
* @return - True if master, false otherwise
|
||||||
*/
|
*/
|
||||||
isMaster() {
|
isMaster() {
|
||||||
return this._index === undefined;
|
return this._index === undefined;
|
||||||
|
@ -225,7 +239,7 @@ class Clustering {
|
||||||
/**
|
/**
|
||||||
* Method to get index of the worker
|
* Method to get index of the worker
|
||||||
*
|
*
|
||||||
* @return {number|undefined} Worker index, undefined if it's master
|
* @return Worker index, undefined if it's master
|
||||||
*/
|
*/
|
||||||
getIndex() {
|
getIndex() {
|
||||||
return this._index;
|
return this._index;
|
||||||
|
@ -234,11 +248,10 @@ class Clustering {
|
||||||
/**
|
/**
|
||||||
* Method to stop the cluster
|
* Method to stop the cluster
|
||||||
*
|
*
|
||||||
* @param {string} signal - Set internally when processes killed by signal
|
* @param signal - Set internally when processes killed by signal
|
||||||
* @return {undefined}
|
|
||||||
*/
|
*/
|
||||||
stop(signal) {
|
stop(signal?: string) {
|
||||||
if (!cluster.isMaster) {
|
if (!cluster.isPrimary) {
|
||||||
if (this._exitCb) {
|
if (this._exitCb) {
|
||||||
return this._exitCb(this, signal);
|
return this._exitCb(this, signal);
|
||||||
}
|
}
|
||||||
|
@ -251,13 +264,17 @@ class Clustering {
|
||||||
}
|
}
|
||||||
this._workersTimeout[i] = setTimeout(() => {
|
this._workersTimeout[i] = setTimeout(() => {
|
||||||
// Kill the worker if the sigterm was ignored or take too long
|
// Kill the worker if the sigterm was ignored or take too long
|
||||||
|
if (worker.process.pid) {
|
||||||
process.kill(worker.process.pid, 'SIGKILL');
|
process.kill(worker.process.pid, 'SIGKILL');
|
||||||
|
}
|
||||||
}, this._shutdownTimeout);
|
}, this._shutdownTimeout);
|
||||||
// Send sigterm to the process, allowing to release ressources
|
// Send sigterm to the process, allowing to release ressources
|
||||||
// and save some states
|
// and save some states
|
||||||
|
if (worker.process.pid) {
|
||||||
return process.kill(worker.process.pid, 'SIGTERM');
|
return process.kill(worker.process.pid, 'SIGTERM');
|
||||||
|
} else {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = Clustering;
|
|
|
@ -0,0 +1,363 @@
|
||||||
|
import { OrderedSet } from '@js-sdsl/ordered-set';
|
||||||
|
import {
|
||||||
|
default as GapSet,
|
||||||
|
GapSetEntry,
|
||||||
|
} from './GapSet';
|
||||||
|
|
||||||
|
// the API is similar but is not strictly a superset of GapSetInterface
|
||||||
|
// so we don't extend from it
|
||||||
|
export interface GapCacheInterface {
|
||||||
|
exposureDelayMs: number;
|
||||||
|
maxGapWeight: number;
|
||||||
|
size: number;
|
||||||
|
|
||||||
|
setGap: (firstKey: string, lastKey: string, weight: number) => void;
|
||||||
|
removeOverlappingGaps: (overlappingKeys: string[]) => number;
|
||||||
|
lookupGap: (minKey: string, maxKey?: string) => Promise<GapSetEntry | null>;
|
||||||
|
[Symbol.iterator]: () => Iterator<GapSetEntry>;
|
||||||
|
toArray: () => GapSetEntry[];
|
||||||
|
};
|
||||||
|
|
||||||
|
class GapCacheUpdateSet {
|
||||||
|
newGaps: GapSet;
|
||||||
|
updatedKeys: OrderedSet<string>;
|
||||||
|
|
||||||
|
constructor(maxGapWeight: number) {
|
||||||
|
this.newGaps = new GapSet(maxGapWeight);
|
||||||
|
this.updatedKeys = new OrderedSet();
|
||||||
|
}
|
||||||
|
|
||||||
|
addUpdateBatch(updatedKeys: OrderedSet<string>): void {
|
||||||
|
this.updatedKeys.union(updatedKeys);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Cache of listing "gaps" i.e. ranges of keys that can be skipped
|
||||||
|
* over during listing (because they only contain delete markers as
|
||||||
|
* latest versions).
|
||||||
|
*
|
||||||
|
* Typically, a single GapCache instance would be attached to a raft session.
|
||||||
|
*
|
||||||
|
* The API usage is as follows:
|
||||||
|
*
|
||||||
|
* - Initialize a GapCache instance by calling start() (this starts an internal timer)
|
||||||
|
*
|
||||||
|
* - Insert a gap or update an existing one via setGap()
|
||||||
|
*
|
||||||
|
* - Lookup existing gaps via lookupGap()
|
||||||
|
*
|
||||||
|
* - Invalidate gaps that overlap a specific set of keys via removeOverlappingGaps()
|
||||||
|
*
|
||||||
|
* - Shut down a GapCache instance by calling stop() (this stops the internal timer)
|
||||||
|
*
|
||||||
|
* Gaps inserted via setGap() are not exposed immediately to lookupGap(), but only:
|
||||||
|
*
|
||||||
|
* - after a certain delay always larger than 'exposureDelayMs' and usually shorter
|
||||||
|
* than twice this value (but might be slightly longer in rare cases)
|
||||||
|
*
|
||||||
|
* - and only if they haven't been invalidated by a recent call to removeOverlappingGaps()
|
||||||
|
*
|
||||||
|
* This ensures atomicity between gap creation and invalidation from updates under
|
||||||
|
* the condition that a gap is created from first key to last key within the time defined
|
||||||
|
* by 'exposureDelayMs'.
|
||||||
|
*
|
||||||
|
* The implementation is based on two extra temporary "update sets" on top of the main
|
||||||
|
* exposed gap set, one called "staging" and the other "frozen", each containing a
|
||||||
|
* temporary updated gap set and a list of updated keys to invalidate gaps with (coming
|
||||||
|
* from calls to removeOverlappingGaps()). Every "exposureDelayMs" milliseconds, the frozen
|
||||||
|
* gaps are invalidated by all key updates coming from either of the "staging" or "frozen"
|
||||||
|
* update set, then merged into the exposed gaps set, after which the staging updates become
|
||||||
|
* the frozen updates and won't receive any new gap until the next cycle.
|
||||||
|
*/
|
||||||
|
export default class GapCache implements GapCacheInterface {
|
||||||
|
_exposureDelayMs: number;
|
||||||
|
maxGaps: number;
|
||||||
|
|
||||||
|
_stagingUpdates: GapCacheUpdateSet;
|
||||||
|
_frozenUpdates: GapCacheUpdateSet;
|
||||||
|
_exposedGaps: GapSet;
|
||||||
|
_exposeFrozenInterval: NodeJS.Timeout | null;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @constructor
|
||||||
|
*
|
||||||
|
* @param {number} exposureDelayMs - minimum delay between
|
||||||
|
* insertion of a gap via setGap() and its exposure via
|
||||||
|
* lookupGap()
|
||||||
|
* @param {number} maxGaps - maximum number of cached gaps, after
|
||||||
|
* which no new gap can be added by setGap(). (Note: a future
|
||||||
|
* improvement could replace this by an eviction strategy)
|
||||||
|
* @param {number} maxGapWeight - maximum "weight" of individual
|
||||||
|
* cached gaps, which is also the granularity for
|
||||||
|
* invalidation. Individual gaps can be chained together,
|
||||||
|
* which lookupGap() transparently consolidates in the response
|
||||||
|
* into a single large gap.
|
||||||
|
*/
|
||||||
|
constructor(exposureDelayMs: number, maxGaps: number, maxGapWeight: number) {
|
||||||
|
this._exposureDelayMs = exposureDelayMs;
|
||||||
|
this.maxGaps = maxGaps;
|
||||||
|
|
||||||
|
this._stagingUpdates = new GapCacheUpdateSet(maxGapWeight);
|
||||||
|
this._frozenUpdates = new GapCacheUpdateSet(maxGapWeight);
|
||||||
|
this._exposedGaps = new GapSet(maxGapWeight);
|
||||||
|
this._exposeFrozenInterval = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a GapCache from an array of exposed gap entries (used in tests)
|
||||||
|
*
|
||||||
|
* @return {GapCache} - a new GapCache instance
|
||||||
|
*/
|
||||||
|
static createFromArray(
|
||||||
|
gaps: GapSetEntry[],
|
||||||
|
exposureDelayMs: number,
|
||||||
|
maxGaps: number,
|
||||||
|
maxGapWeight: number
|
||||||
|
): GapCache {
|
||||||
|
const gapCache = new GapCache(exposureDelayMs, maxGaps, maxGapWeight);
|
||||||
|
gapCache._exposedGaps = GapSet.createFromArray(gaps, maxGapWeight)
|
||||||
|
return gapCache;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Internal helper to remove gaps in the staging and frozen sets
|
||||||
|
* overlapping with previously updated keys, right before the
|
||||||
|
* frozen gaps get exposed.
|
||||||
|
*
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
_removeOverlappingGapsBeforeExpose(): void {
|
||||||
|
for (const { updatedKeys } of [this._stagingUpdates, this._frozenUpdates]) {
|
||||||
|
if (updatedKeys.size() === 0) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
for (const { newGaps } of [this._stagingUpdates, this._frozenUpdates]) {
|
||||||
|
if (newGaps.size === 0) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
newGaps.removeOverlappingGaps(updatedKeys);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This function is the core mechanism that updates the exposed gaps in the
|
||||||
|
* cache. It is called on a regular interval defined by 'exposureDelayMs'.
|
||||||
|
*
|
||||||
|
* It does the following in order:
|
||||||
|
*
|
||||||
|
* - remove gaps from the frozen set that overlap with any key present in a
|
||||||
|
* batch passed to removeOverlappingGaps() since the last two triggers of
|
||||||
|
* _exposeFrozen()
|
||||||
|
*
|
||||||
|
* - merge the remaining gaps from the frozen set to the exposed set, which
|
||||||
|
* makes them visible from calls to lookupGap()
|
||||||
|
*
|
||||||
|
* - rotate by freezing the currently staging updates and initiating a new
|
||||||
|
* staging updates set
|
||||||
|
*
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
_exposeFrozen(): void {
|
||||||
|
this._removeOverlappingGapsBeforeExpose();
|
||||||
|
for (const gap of this._frozenUpdates.newGaps) {
|
||||||
|
// Use a trivial strategy to keep the cache size within
|
||||||
|
// limits: refuse to add new gaps when the size is above
|
||||||
|
// the 'maxGaps' threshold. We solely rely on
|
||||||
|
// removeOverlappingGaps() to make space for new gaps.
|
||||||
|
if (this._exposedGaps.size < this.maxGaps) {
|
||||||
|
this._exposedGaps.setGap(gap.firstKey, gap.lastKey, gap.weight);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
this._frozenUpdates = this._stagingUpdates;
|
||||||
|
this._stagingUpdates = new GapCacheUpdateSet(this.maxGapWeight);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Start the internal GapCache timer
|
||||||
|
*
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
start(): void {
|
||||||
|
if (this._exposeFrozenInterval) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
this._exposeFrozenInterval = setInterval(
|
||||||
|
() => this._exposeFrozen(),
|
||||||
|
this._exposureDelayMs);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Stop the internal GapCache timer
|
||||||
|
*
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
stop(): void {
|
||||||
|
if (this._exposeFrozenInterval) {
|
||||||
|
clearInterval(this._exposeFrozenInterval);
|
||||||
|
this._exposeFrozenInterval = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Record a gap between two keys, associated with a weight to
|
||||||
|
* limit individual gap's spanning ranges in the cache, for a more
|
||||||
|
* granular invalidation.
|
||||||
|
*
|
||||||
|
* The function handles splitting and merging existing gaps to
|
||||||
|
* maintain an optimal weight of cache entries.
|
||||||
|
*
|
||||||
|
* NOTE 1: the caller must ensure that the full length of the gap
|
||||||
|
* between 'firstKey' and 'lastKey' has been built from a listing
|
||||||
|
* snapshot that is more recent than 'exposureDelayMs' milliseconds,
|
||||||
|
* in order to guarantee that the exposed gap will be fully
|
||||||
|
* covered (and potentially invalidated) from recent calls to
|
||||||
|
* removeOverlappingGaps().
|
||||||
|
*
|
||||||
|
* NOTE 2: a usual pattern when building a large gap from multiple
|
||||||
|
* calls to setGap() is to start the next gap from 'lastKey',
|
||||||
|
* which will be passed as 'firstKey' in the next call, so that
|
||||||
|
* gaps can be chained together and consolidated by lookupGap().
|
||||||
|
*
|
||||||
|
* @param {string} firstKey - first key of the gap
|
||||||
|
* @param {string} lastKey - last key of the gap, must be greater
|
||||||
|
* or equal than 'firstKey'
|
||||||
|
* @param {number} weight - total weight between 'firstKey' and 'lastKey'
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
setGap(firstKey: string, lastKey: string, weight: number): void {
|
||||||
|
this._stagingUpdates.newGaps.setGap(firstKey, lastKey, weight);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Remove gaps that overlap with a given set of keys. Used to
|
||||||
|
* invalidate gaps when keys are inserted or deleted.
|
||||||
|
*
|
||||||
|
* @param {OrderedSet<string> | string[]} overlappingKeys - remove gaps that
|
||||||
|
* overlap with any of this set of keys
|
||||||
|
* @return {number} - how many gaps were removed from the exposed
|
||||||
|
* gaps only (overlapping gaps not yet exposed are also invalidated
|
||||||
|
* but are not accounted for in the returned value)
|
||||||
|
*/
|
||||||
|
removeOverlappingGaps(overlappingKeys: OrderedSet<string> | string[]): number {
|
||||||
|
let overlappingKeysSet;
|
||||||
|
if (Array.isArray(overlappingKeys)) {
|
||||||
|
overlappingKeysSet = new OrderedSet(overlappingKeys);
|
||||||
|
} else {
|
||||||
|
overlappingKeysSet = overlappingKeys;
|
||||||
|
}
|
||||||
|
this._stagingUpdates.addUpdateBatch(overlappingKeysSet);
|
||||||
|
return this._exposedGaps.removeOverlappingGaps(overlappingKeysSet);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Lookup the next exposed gap that overlaps with [minKey, maxKey]. Internally
|
||||||
|
* chained gaps are coalesced in the response into a single contiguous large gap.
|
||||||
|
*
|
||||||
|
* @param {string} minKey - minimum key overlapping with the returned gap
|
||||||
|
* @param {string} [maxKey] - maximum key overlapping with the returned gap
|
||||||
|
* @return {Promise<GapSetEntry | null>} - result of the lookup if a gap
|
||||||
|
* was found, null otherwise, as a Promise
|
||||||
|
*/
|
||||||
|
lookupGap(minKey: string, maxKey?: string): Promise<GapSetEntry | null> {
|
||||||
|
return this._exposedGaps.lookupGap(minKey, maxKey);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the maximum weight setting for individual gaps.
|
||||||
|
*
|
||||||
|
* @return {number} - maximum weight of individual gaps
|
||||||
|
*/
|
||||||
|
get maxGapWeight(): number {
|
||||||
|
return this._exposedGaps.maxWeight;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set the maximum weight setting for individual gaps.
|
||||||
|
*
|
||||||
|
* @param {number} gapWeight - maximum weight of individual gaps
|
||||||
|
*/
|
||||||
|
set maxGapWeight(gapWeight: number) {
|
||||||
|
this._exposedGaps.maxWeight = gapWeight;
|
||||||
|
// also update transient gap sets
|
||||||
|
this._stagingUpdates.newGaps.maxWeight = gapWeight;
|
||||||
|
this._frozenUpdates.newGaps.maxWeight = gapWeight;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the exposure delay in milliseconds, which is the minimum
|
||||||
|
* time after which newly cached gaps will be exposed by
|
||||||
|
* lookupGap().
|
||||||
|
*
|
||||||
|
* @return {number} - exposure delay in milliseconds
|
||||||
|
*/
|
||||||
|
get exposureDelayMs(): number {
|
||||||
|
return this._exposureDelayMs;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set the exposure delay in milliseconds, which is the minimum
|
||||||
|
* time after which newly cached gaps will be exposed by
|
||||||
|
* lookupGap(). Setting this attribute automatically updates the
|
||||||
|
* internal state to honor the new value.
|
||||||
|
*
|
||||||
|
* @param {number} - exposure delay in milliseconds
|
||||||
|
*/
|
||||||
|
set exposureDelayMs(exposureDelayMs: number) {
|
||||||
|
if (exposureDelayMs !== this._exposureDelayMs) {
|
||||||
|
this._exposureDelayMs = exposureDelayMs;
|
||||||
|
if (this._exposeFrozenInterval) {
|
||||||
|
// invalidate all pending gap updates, as the new interval may not be
|
||||||
|
// safe for them
|
||||||
|
this._stagingUpdates = new GapCacheUpdateSet(this.maxGapWeight);
|
||||||
|
this._frozenUpdates = new GapCacheUpdateSet(this.maxGapWeight);
|
||||||
|
|
||||||
|
// reinitialize the _exposeFrozenInterval timer with the updated delay
|
||||||
|
this.stop();
|
||||||
|
this.start();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the number of exposed gaps
|
||||||
|
*
|
||||||
|
* @return {number} number of exposed gaps
|
||||||
|
*/
|
||||||
|
get size(): number {
|
||||||
|
return this._exposedGaps.size;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Iterate over exposed gaps
|
||||||
|
*
|
||||||
|
* @return {Iterator<GapSetEntry>} an iterator over exposed gaps
|
||||||
|
*/
|
||||||
|
[Symbol.iterator](): Iterator<GapSetEntry> {
|
||||||
|
return this._exposedGaps[Symbol.iterator]();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get an array of all exposed gaps
|
||||||
|
*
|
||||||
|
* @return {GapSetEntry[]} array of exposed gaps
|
||||||
|
*/
|
||||||
|
toArray(): GapSetEntry[] {
|
||||||
|
return this._exposedGaps.toArray();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clear all exposed and staging gaps from the cache.
|
||||||
|
*
|
||||||
|
* Note: retains invalidating updates from removeOverlappingGaps()
|
||||||
|
* for correctness of gaps inserted afterwards.
|
||||||
|
*
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
clear(): void {
|
||||||
|
this._stagingUpdates.newGaps = new GapSet(this.maxGapWeight);
|
||||||
|
this._frozenUpdates.newGaps = new GapSet(this.maxGapWeight);
|
||||||
|
this._exposedGaps = new GapSet(this.maxGapWeight);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,366 @@
|
||||||
|
import assert from 'assert';
|
||||||
|
import { OrderedSet } from '@js-sdsl/ordered-set';
|
||||||
|
|
||||||
|
import errors from '../../errors';
|
||||||
|
|
||||||
|
export type GapSetEntry = {
|
||||||
|
firstKey: string,
|
||||||
|
lastKey: string,
|
||||||
|
weight: number,
|
||||||
|
};
|
||||||
|
|
||||||
|
export interface GapSetInterface {
|
||||||
|
maxWeight: number;
|
||||||
|
size: number;
|
||||||
|
|
||||||
|
setGap: (firstKey: string, lastKey: string, weight: number) => GapSetEntry;
|
||||||
|
removeOverlappingGaps: (overlappingKeys: string[]) => number;
|
||||||
|
lookupGap: (minKey: string, maxKey?: string) => Promise<GapSetEntry | null>;
|
||||||
|
[Symbol.iterator]: () => Iterator<GapSetEntry>;
|
||||||
|
toArray: () => GapSetEntry[];
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Specialized data structure to support caching of listing "gaps",
|
||||||
|
* i.e. ranges of keys that can be skipped over during listing
|
||||||
|
* (because they only contain delete markers as latest versions)
|
||||||
|
*/
|
||||||
|
export default class GapSet implements GapSetInterface, Iterable<GapSetEntry> {
|
||||||
|
_gaps: OrderedSet<GapSetEntry>;
|
||||||
|
_maxWeight: number;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @constructor
|
||||||
|
|
||||||
|
* @param {number} maxWeight - weight threshold for each cached
|
||||||
|
* gap (unitless). Triggers splitting gaps when reached
|
||||||
|
*/
|
||||||
|
constructor(maxWeight: number) {
|
||||||
|
this._gaps = new OrderedSet(
|
||||||
|
[],
|
||||||
|
(left: GapSetEntry, right: GapSetEntry) => (
|
||||||
|
left.firstKey < right.firstKey ? -1 :
|
||||||
|
left.firstKey > right.firstKey ? 1 : 0
|
||||||
|
)
|
||||||
|
);
|
||||||
|
this._maxWeight = maxWeight;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a GapSet from an array of gap entries (used in tests)
|
||||||
|
*/
|
||||||
|
static createFromArray(gaps: GapSetEntry[], maxWeight: number): GapSet {
|
||||||
|
const gapSet = new GapSet(maxWeight);
|
||||||
|
for (const gap of gaps) {
|
||||||
|
gapSet._gaps.insert(gap);
|
||||||
|
}
|
||||||
|
return gapSet;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Record a gap between two keys, associated with a weight to limit
|
||||||
|
* individual gap sizes in the cache.
|
||||||
|
*
|
||||||
|
* The function handles splitting and merging existing gaps to
|
||||||
|
* maintain an optimal weight of cache entries.
|
||||||
|
*
|
||||||
|
* @param {string} firstKey - first key of the gap
|
||||||
|
* @param {string} lastKey - last key of the gap, must be greater
|
||||||
|
* or equal than 'firstKey'
|
||||||
|
* @param {number} weight - total weight between 'firstKey' and 'lastKey'
|
||||||
|
* @return {GapSetEntry} - existing or new gap entry
|
||||||
|
*/
|
||||||
|
setGap(firstKey: string, lastKey: string, weight: number): GapSetEntry {
|
||||||
|
assert(lastKey >= firstKey);
|
||||||
|
|
||||||
|
// Step 1/4: Find the closest left-overlapping gap, and either re-use it
|
||||||
|
// or chain it with a new gap depending on the weights if it exists (otherwise
|
||||||
|
// just creates a new gap).
|
||||||
|
const curGapIt = this._gaps.reverseLowerBound(<GapSetEntry>{ firstKey });
|
||||||
|
let curGap;
|
||||||
|
if (curGapIt.isAccessible()) {
|
||||||
|
curGap = curGapIt.pointer;
|
||||||
|
if (curGap.lastKey >= lastKey) {
|
||||||
|
// return fully overlapping gap already cached
|
||||||
|
return curGap;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let remainingWeight = weight;
|
||||||
|
if (!curGap // no previous gap
|
||||||
|
|| curGap.lastKey < firstKey // previous gap not overlapping
|
||||||
|
|| (curGap.lastKey === firstKey // previous gap overlapping by one key...
|
||||||
|
&& curGap.weight + weight > this._maxWeight) // ...but we can't extend it
|
||||||
|
) {
|
||||||
|
// create a new gap indexed by 'firstKey'
|
||||||
|
curGap = { firstKey, lastKey: firstKey, weight: 0 };
|
||||||
|
this._gaps.insert(curGap);
|
||||||
|
} else if (curGap.lastKey > firstKey && weight > this._maxWeight) {
|
||||||
|
// previous gap is either fully or partially contained in the new gap
|
||||||
|
// and cannot be extended: substract its weight from the total (heuristic
|
||||||
|
// in case the previous gap doesn't start at 'firstKey', which is the
|
||||||
|
// uncommon case)
|
||||||
|
remainingWeight -= curGap.weight;
|
||||||
|
|
||||||
|
// there may be an existing chained gap starting with the previous gap's
|
||||||
|
// 'lastKey': use it if it exists
|
||||||
|
const chainedGapIt = this._gaps.find(<GapSetEntry>{ firstKey: curGap.lastKey });
|
||||||
|
if (chainedGapIt.isAccessible()) {
|
||||||
|
curGap = chainedGapIt.pointer;
|
||||||
|
} else {
|
||||||
|
// no existing chained gap: chain a new gap to the previous gap
|
||||||
|
curGap = {
|
||||||
|
firstKey: curGap.lastKey,
|
||||||
|
lastKey: curGap.lastKey,
|
||||||
|
weight: 0,
|
||||||
|
};
|
||||||
|
this._gaps.insert(curGap);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Step 2/4: Cleanup existing gaps fully included in firstKey -> lastKey, and
|
||||||
|
// aggregate their weights in curGap to define the minimum weight up to the
|
||||||
|
// last merged gap.
|
||||||
|
let nextGap;
|
||||||
|
while (true) {
|
||||||
|
const nextGapIt = this._gaps.upperBound(<GapSetEntry>{ firstKey: curGap.firstKey });
|
||||||
|
nextGap = nextGapIt.isAccessible() && nextGapIt.pointer;
|
||||||
|
// stop the cleanup when no more gap or if the next gap is not fully
|
||||||
|
// included in curGap
|
||||||
|
if (!nextGap || nextGap.lastKey > lastKey) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
this._gaps.eraseElementByIterator(nextGapIt);
|
||||||
|
curGap.lastKey = nextGap.lastKey;
|
||||||
|
curGap.weight += nextGap.weight;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 3/4: Extend curGap to lastKey, adjusting the weight.
|
||||||
|
// At this point, curGap weight is the minimum weight of the finished gap, save it
|
||||||
|
// for step 4.
|
||||||
|
let minMergedWeight = curGap.weight;
|
||||||
|
if (curGap.lastKey === firstKey && firstKey !== lastKey) {
|
||||||
|
// extend the existing gap by the full amount 'firstKey -> lastKey'
|
||||||
|
curGap.lastKey = lastKey;
|
||||||
|
curGap.weight += remainingWeight;
|
||||||
|
} else if (curGap.lastKey <= lastKey) {
|
||||||
|
curGap.lastKey = lastKey;
|
||||||
|
curGap.weight = remainingWeight;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 4/4: Find the closest right-overlapping gap, and if it exists, either merge
|
||||||
|
// it or chain it with curGap depending on the weights.
|
||||||
|
if (nextGap && nextGap.firstKey <= lastKey) {
|
||||||
|
// nextGap overlaps with the new gap: check if we can merge it
|
||||||
|
minMergedWeight += nextGap.weight;
|
||||||
|
let mergedWeight;
|
||||||
|
if (lastKey === nextGap.firstKey) {
|
||||||
|
// nextGap is chained with curGap: add the full weight of nextGap
|
||||||
|
mergedWeight = curGap.weight + nextGap.weight;
|
||||||
|
} else {
|
||||||
|
// strict overlap: don't add nextGap's weight unless
|
||||||
|
// it's larger than the sum of merged ranges (as it is
|
||||||
|
// then included in `minMergedWeight`)
|
||||||
|
mergedWeight = Math.max(curGap.weight, minMergedWeight);
|
||||||
|
}
|
||||||
|
if (mergedWeight <= this._maxWeight) {
|
||||||
|
// merge nextGap into curGap
|
||||||
|
curGap.lastKey = nextGap.lastKey;
|
||||||
|
curGap.weight = mergedWeight;
|
||||||
|
this._gaps.eraseElementByKey(nextGap);
|
||||||
|
} else {
|
||||||
|
// adjust the last key to chain with nextGap and substract the next
|
||||||
|
// gap's weight from curGap (heuristic)
|
||||||
|
curGap.lastKey = nextGap.firstKey;
|
||||||
|
curGap.weight = Math.max(mergedWeight - nextGap.weight, 0);
|
||||||
|
curGap = nextGap;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// return a copy of curGap
|
||||||
|
return Object.assign({}, curGap);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Remove gaps that overlap with one or more keys in a given array or
|
||||||
|
* OrderedSet. Used to invalidate gaps when keys are inserted or deleted.
|
||||||
|
*
|
||||||
|
* @param {OrderedSet<string> | string[]} overlappingKeys - remove gaps that overlap
|
||||||
|
* with any of this set of keys
|
||||||
|
* @return {number} - how many gaps were removed
|
||||||
|
*/
|
||||||
|
removeOverlappingGaps(overlappingKeys: OrderedSet<string> | string[]): number {
|
||||||
|
// To optimize processing with a large number of keys and/or gaps, this function:
|
||||||
|
//
|
||||||
|
// 1. converts the overlappingKeys array to a OrderedSet (if not already a OrderedSet)
|
||||||
|
// 2. queries both the gaps set and the overlapping keys set in a loop, which allows:
|
||||||
|
// - skipping ranges of overlapping keys at once when there is no new overlapping gap
|
||||||
|
// - skipping ranges of gaps at once when there is no overlapping key
|
||||||
|
//
|
||||||
|
// This way, it is efficient when the number of non-overlapping gaps is large
|
||||||
|
// (which is the most common case in practice).
|
||||||
|
|
||||||
|
let overlappingKeysSet;
|
||||||
|
if (Array.isArray(overlappingKeys)) {
|
||||||
|
overlappingKeysSet = new OrderedSet(overlappingKeys);
|
||||||
|
} else {
|
||||||
|
overlappingKeysSet = overlappingKeys;
|
||||||
|
}
|
||||||
|
const firstKeyIt = overlappingKeysSet.begin();
|
||||||
|
let currentKey = firstKeyIt.isAccessible() && firstKeyIt.pointer;
|
||||||
|
let nRemoved = 0;
|
||||||
|
while (currentKey) {
|
||||||
|
const closestGapIt = this._gaps.reverseUpperBound(<GapSetEntry>{ firstKey: currentKey });
|
||||||
|
if (closestGapIt.isAccessible()) {
|
||||||
|
const closestGap = closestGapIt.pointer;
|
||||||
|
if (currentKey <= closestGap.lastKey) {
|
||||||
|
// currentKey overlaps closestGap: remove the gap
|
||||||
|
this._gaps.eraseElementByIterator(closestGapIt);
|
||||||
|
nRemoved += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const nextGapIt = this._gaps.lowerBound(<GapSetEntry>{ firstKey: currentKey });
|
||||||
|
if (!nextGapIt.isAccessible()) {
|
||||||
|
// no more gap: we're done
|
||||||
|
return nRemoved;
|
||||||
|
}
|
||||||
|
const nextGap = nextGapIt.pointer;
|
||||||
|
// advance to the last key potentially overlapping with nextGap
|
||||||
|
let currentKeyIt = overlappingKeysSet.reverseLowerBound(nextGap.lastKey);
|
||||||
|
if (currentKeyIt.isAccessible()) {
|
||||||
|
currentKey = currentKeyIt.pointer;
|
||||||
|
if (currentKey >= nextGap.firstKey) {
|
||||||
|
// currentKey overlaps nextGap: remove the gap
|
||||||
|
this._gaps.eraseElementByIterator(nextGapIt);
|
||||||
|
nRemoved += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// advance to the first key potentially overlapping with another gap
|
||||||
|
currentKeyIt = overlappingKeysSet.lowerBound(nextGap.lastKey);
|
||||||
|
currentKey = currentKeyIt.isAccessible() && currentKeyIt.pointer;
|
||||||
|
}
|
||||||
|
return nRemoved;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Internal helper to coalesce multiple chained gaps into a single gap.
|
||||||
|
*
|
||||||
|
* It is only used to construct lookupGap() return values and
|
||||||
|
* doesn't modify the GapSet.
|
||||||
|
*
|
||||||
|
* NOTE: The function may take a noticeable amount of time and CPU
|
||||||
|
* to execute if a large number of chained gaps have to be
|
||||||
|
* coalesced, but it should never take more than a few seconds. In
|
||||||
|
* most cases it should take less than a millisecond. It regularly
|
||||||
|
* yields to the nodejs event loop to avoid blocking it during a
|
||||||
|
* long execution.
|
||||||
|
*
|
||||||
|
* @param {GapSetEntry} firstGap - first gap of the chain to coalesce with
|
||||||
|
* the next ones in the chain
|
||||||
|
* @return {Promise<GapSetEntry>} - a new coalesced entry, as a Promise
|
||||||
|
*/
|
||||||
|
_coalesceGapChain(firstGap: GapSetEntry): Promise<GapSetEntry> {
|
||||||
|
return new Promise(resolve => {
|
||||||
|
const coalescedGap: GapSetEntry = Object.assign({}, firstGap);
|
||||||
|
const coalesceGapChainIteration = () => {
|
||||||
|
// efficiency trade-off: 100 iterations of log(N) complexity lookups should
|
||||||
|
// not block the event loop for too long
|
||||||
|
for (let opCounter = 0; opCounter < 100; ++opCounter) {
|
||||||
|
const chainedGapIt = this._gaps.find(
|
||||||
|
<GapSetEntry>{ firstKey: coalescedGap.lastKey });
|
||||||
|
if (!chainedGapIt.isAccessible()) {
|
||||||
|
// chain is complete
|
||||||
|
return resolve(coalescedGap);
|
||||||
|
}
|
||||||
|
const chainedGap = chainedGapIt.pointer;
|
||||||
|
if (chainedGap.firstKey === chainedGap.lastKey) {
|
||||||
|
// found a single-key gap: chain is complete
|
||||||
|
return resolve(coalescedGap);
|
||||||
|
}
|
||||||
|
coalescedGap.lastKey = chainedGap.lastKey;
|
||||||
|
coalescedGap.weight += chainedGap.weight;
|
||||||
|
}
|
||||||
|
// yield to the event loop before continuing the process
|
||||||
|
// of coalescing the gap chain
|
||||||
|
return process.nextTick(coalesceGapChainIteration);
|
||||||
|
};
|
||||||
|
coalesceGapChainIteration();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Lookup the next gap that overlaps with [minKey, maxKey]. Internally chained
|
||||||
|
* gaps are coalesced in the response into a single contiguous large gap.
|
||||||
|
*
|
||||||
|
* @param {string} minKey - minimum key overlapping with the returned gap
|
||||||
|
* @param {string} [maxKey] - maximum key overlapping with the returned gap
|
||||||
|
* @return {Promise<GapSetEntry | null>} - result of the lookup if a gap
|
||||||
|
* was found, null otherwise, as a Promise
|
||||||
|
*/
|
||||||
|
async lookupGap(minKey: string, maxKey?: string): Promise<GapSetEntry | null> {
|
||||||
|
let firstGap: GapSetEntry | null = null;
|
||||||
|
const minGapIt = this._gaps.reverseLowerBound(<GapSetEntry>{ firstKey: minKey });
|
||||||
|
const minGap = minGapIt.isAccessible() && minGapIt.pointer;
|
||||||
|
if (minGap && minGap.lastKey >= minKey) {
|
||||||
|
firstGap = minGap;
|
||||||
|
} else {
|
||||||
|
const maxGapIt = this._gaps.upperBound(<GapSetEntry>{ firstKey: minKey });
|
||||||
|
const maxGap = maxGapIt.isAccessible() && maxGapIt.pointer;
|
||||||
|
if (maxGap && (maxKey === undefined || maxGap.firstKey <= maxKey)) {
|
||||||
|
firstGap = maxGap;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (!firstGap) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
return this._coalesceGapChain(firstGap);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the maximum weight setting for individual gaps.
|
||||||
|
*
|
||||||
|
* @return {number} - maximum weight of individual gaps
|
||||||
|
*/
|
||||||
|
get maxWeight(): number {
|
||||||
|
return this._maxWeight;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set the maximum weight setting for individual gaps.
|
||||||
|
*
|
||||||
|
* @param {number} gapWeight - maximum weight of individual gaps
|
||||||
|
*/
|
||||||
|
set maxWeight(gapWeight: number) {
|
||||||
|
this._maxWeight = gapWeight;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the number of gaps stored in this set.
|
||||||
|
*
|
||||||
|
* @return {number} - number of gaps stored in this set
|
||||||
|
*/
|
||||||
|
get size(): number {
|
||||||
|
return this._gaps.size();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Iterate over each gap of the set, ordered by first key
|
||||||
|
*
|
||||||
|
* @return {Iterator<GapSetEntry>} - an iterator over all gaps
|
||||||
|
* Example:
|
||||||
|
* for (const gap of myGapSet) { ... }
|
||||||
|
*/
|
||||||
|
[Symbol.iterator](): Iterator<GapSetEntry> {
|
||||||
|
return this._gaps[Symbol.iterator]();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return an array containing all gaps, ordered by first key
|
||||||
|
*
|
||||||
|
* NOTE: there is a toArray() method in the OrderedSet implementation
|
||||||
|
* but it does not scale well and overflows the stack quickly. This is
|
||||||
|
* why we provide an implementation based on an iterator.
|
||||||
|
*
|
||||||
|
* @return {GapSetEntry[]} - an array containing all gaps
|
||||||
|
*/
|
||||||
|
toArray(): GapSetEntry[] {
|
||||||
|
return [...this];
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,167 @@
|
||||||
|
const assert = require('assert');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @class
|
||||||
|
* @classdesc Implements a key-value in-memory cache with a capped
|
||||||
|
* number of items and a Least Recently Used (LRU) strategy for
|
||||||
|
* eviction.
|
||||||
|
*/
|
||||||
|
class LRUCache {
|
||||||
|
/**
|
||||||
|
* @constructor
|
||||||
|
* @param {number} maxEntries - maximum number of entries kept in
|
||||||
|
* the cache
|
||||||
|
*/
|
||||||
|
constructor(maxEntries) {
|
||||||
|
assert(maxEntries >= 1);
|
||||||
|
this._maxEntries = maxEntries;
|
||||||
|
this.clear();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add or update the value associated to a key in the cache,
|
||||||
|
* making it the most recently accessed for eviction purpose.
|
||||||
|
*
|
||||||
|
* @param {string} key - key to add
|
||||||
|
* @param {object} value - associated value (can be of any type)
|
||||||
|
* @return {boolean} true if the cache contained an entry with
|
||||||
|
* this key, false if it did not
|
||||||
|
*/
|
||||||
|
add(key, value) {
|
||||||
|
let entry = this._entryMap[key];
|
||||||
|
if (entry) {
|
||||||
|
entry.value = value;
|
||||||
|
// make the entry the most recently used by re-pushing it
|
||||||
|
// to the head of the LRU list
|
||||||
|
this._lruRemoveEntry(entry);
|
||||||
|
this._lruPushEntry(entry);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (this._entryCount === this._maxEntries) {
|
||||||
|
// if the cache is already full, abide by the LRU strategy
|
||||||
|
// and remove the least recently used entry from the cache
|
||||||
|
// before pushing the new entry
|
||||||
|
this._removeEntry(this._lruTail);
|
||||||
|
}
|
||||||
|
entry = { key, value };
|
||||||
|
this._entryMap[key] = entry;
|
||||||
|
this._entryCount += 1;
|
||||||
|
this._lruPushEntry(entry);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the value associated to a key in the cache, making it the
|
||||||
|
* most recently accessed for eviction purpose.
|
||||||
|
*
|
||||||
|
* @param {string} key - key of which to fetch the associated value
|
||||||
|
* @return {object|undefined} - returns the associated value if
|
||||||
|
* exists in the cache, or undefined if not found - either if the
|
||||||
|
* key was never added or if it has been evicted from the cache.
|
||||||
|
*/
|
||||||
|
get(key) {
|
||||||
|
const entry = this._entryMap[key];
|
||||||
|
if (entry) {
|
||||||
|
// make the entry the most recently used by re-pushing it
|
||||||
|
// to the head of the LRU list
|
||||||
|
this._lruRemoveEntry(entry);
|
||||||
|
this._lruPushEntry(entry);
|
||||||
|
return entry.value;
|
||||||
|
}
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Remove an entry from the cache if exists
|
||||||
|
*
|
||||||
|
* @param {string} key - key to remove
|
||||||
|
* @return {boolean} true if an entry has been removed, false if
|
||||||
|
* there was no entry with this key in the cache - either if the
|
||||||
|
* key was never added or if it has been evicted from the cache.
|
||||||
|
*/
|
||||||
|
remove(key) {
|
||||||
|
const entry = this._entryMap[key];
|
||||||
|
if (entry) {
|
||||||
|
this._removeEntry(entry);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the current number of cached entries
|
||||||
|
*
|
||||||
|
* @return {number} current number of cached entries
|
||||||
|
*/
|
||||||
|
count() {
|
||||||
|
return this._entryCount;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Remove all entries from the cache
|
||||||
|
*
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
clear() {
|
||||||
|
this._entryMap = {};
|
||||||
|
this._entryCount = 0;
|
||||||
|
this._lruHead = null;
|
||||||
|
this._lruTail = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Push an entry to the front of the LRU list, making it the most
|
||||||
|
* recently accessed
|
||||||
|
*
|
||||||
|
* @param {object} entry - entry to push
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
_lruPushEntry(entry) {
|
||||||
|
/* eslint-disable no-param-reassign */
|
||||||
|
entry._lruNext = this._lruHead;
|
||||||
|
entry._lruPrev = null;
|
||||||
|
if (this._lruHead) {
|
||||||
|
this._lruHead._lruPrev = entry;
|
||||||
|
}
|
||||||
|
this._lruHead = entry;
|
||||||
|
if (!this._lruTail) {
|
||||||
|
this._lruTail = entry;
|
||||||
|
}
|
||||||
|
/* eslint-enable no-param-reassign */
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Remove an entry from the LRU list
|
||||||
|
*
|
||||||
|
* @param {object} entry - entry to remove
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
_lruRemoveEntry(entry) {
|
||||||
|
/* eslint-disable no-param-reassign */
|
||||||
|
if (entry._lruPrev) {
|
||||||
|
entry._lruPrev._lruNext = entry._lruNext;
|
||||||
|
} else {
|
||||||
|
this._lruHead = entry._lruNext;
|
||||||
|
}
|
||||||
|
if (entry._lruNext) {
|
||||||
|
entry._lruNext._lruPrev = entry._lruPrev;
|
||||||
|
} else {
|
||||||
|
this._lruTail = entry._lruPrev;
|
||||||
|
}
|
||||||
|
/* eslint-enable no-param-reassign */
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Helper function to remove an existing entry from the cache
|
||||||
|
*
|
||||||
|
* @param {object} entry - cache entry to remove
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
_removeEntry(entry) {
|
||||||
|
this._lruRemoveEntry(entry);
|
||||||
|
delete this._entryMap[entry.key];
|
||||||
|
this._entryCount -= 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = LRUCache;
|
|
@ -0,0 +1,124 @@
|
||||||
|
export enum HeapOrder {
|
||||||
|
Min = -1,
|
||||||
|
Max = 1,
|
||||||
|
}
|
||||||
|
|
||||||
|
export enum CompareResult {
|
||||||
|
LT = -1,
|
||||||
|
EQ = 0,
|
||||||
|
GT = 1,
|
||||||
|
}
|
||||||
|
|
||||||
|
export type CompareFunction = (x: any, y: any) => CompareResult;
|
||||||
|
|
||||||
|
export class Heap {
|
||||||
|
size: number;
|
||||||
|
_maxSize: number;
|
||||||
|
_order: HeapOrder;
|
||||||
|
_heap: any[];
|
||||||
|
_cmpFn: CompareFunction;
|
||||||
|
|
||||||
|
constructor(size: number, order: HeapOrder, cmpFn: CompareFunction) {
|
||||||
|
this.size = 0;
|
||||||
|
this._maxSize = size;
|
||||||
|
this._order = order;
|
||||||
|
this._cmpFn = cmpFn;
|
||||||
|
this._heap = new Array<any>(this._maxSize);
|
||||||
|
}
|
||||||
|
|
||||||
|
_parent(i: number): number {
|
||||||
|
return Math.floor((i - 1) / 2);
|
||||||
|
}
|
||||||
|
|
||||||
|
_left(i: number): number {
|
||||||
|
return Math.floor((2 * i) + 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
_right(i: number): number {
|
||||||
|
return Math.floor((2 * i) + 2);
|
||||||
|
}
|
||||||
|
|
||||||
|
_shouldSwap(childIdx: number, parentIdx: number): boolean {
|
||||||
|
return this._cmpFn(this._heap[childIdx], this._heap[parentIdx]) as number === this._order as number;
|
||||||
|
}
|
||||||
|
|
||||||
|
_swap(i: number, j: number) {
|
||||||
|
const tmp = this._heap[i];
|
||||||
|
this._heap[i] = this._heap[j];
|
||||||
|
this._heap[j] = tmp;
|
||||||
|
}
|
||||||
|
|
||||||
|
_heapify(i: number) {
|
||||||
|
const l = this._left(i);
|
||||||
|
const r = this._right(i);
|
||||||
|
let c = i;
|
||||||
|
|
||||||
|
if (l < this.size && this._shouldSwap(l, c)) {
|
||||||
|
c = l;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (r < this.size && this._shouldSwap(r, c)) {
|
||||||
|
c = r;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (c != i) {
|
||||||
|
this._swap(c, i);
|
||||||
|
this._heapify(c);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
add(item: any): any {
|
||||||
|
if (this.size >= this._maxSize) {
|
||||||
|
return new Error('Max heap size reached');
|
||||||
|
}
|
||||||
|
|
||||||
|
++this.size;
|
||||||
|
let c = this.size - 1;
|
||||||
|
this._heap[c] = item;
|
||||||
|
|
||||||
|
while (c > 0) {
|
||||||
|
if (!this._shouldSwap(c, this._parent(c))) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
this._swap(c, this._parent(c));
|
||||||
|
c = this._parent(c);
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
};
|
||||||
|
|
||||||
|
remove(): any {
|
||||||
|
if (this.size <= 0) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const ret = this._heap[0];
|
||||||
|
this._heap[0] = this._heap[this.size - 1];
|
||||||
|
this._heapify(0);
|
||||||
|
--this.size;
|
||||||
|
|
||||||
|
return ret;
|
||||||
|
};
|
||||||
|
|
||||||
|
peek(): any {
|
||||||
|
if (this.size <= 0) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return this._heap[0];
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export class MinHeap extends Heap {
|
||||||
|
constructor(size: number, cmpFn: CompareFunction) {
|
||||||
|
super(size, HeapOrder.Min, cmpFn);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export class MaxHeap extends Heap {
|
||||||
|
constructor(size: number, cmpFn: CompareFunction) {
|
||||||
|
super(size, HeapOrder.Max, cmpFn);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -1,6 +1,23 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
'use strict'; // eslint-disable-line strict
|
||||||
|
|
||||||
const { FILTER_SKIP, SKIP_NONE } = require('./tools');
|
const { FILTER_ACCEPT, SKIP_NONE } = require('./tools');
|
||||||
|
|
||||||
|
// Use a heuristic to amortize the cost of JSON
|
||||||
|
// serialization/deserialization only on largest metadata where the
|
||||||
|
// potential for size reduction is high, considering the bulk of the
|
||||||
|
// blob size is due to the "location" field containing a large number
|
||||||
|
// of MPU parts.
|
||||||
|
//
|
||||||
|
// Measured on some standard metadata:
|
||||||
|
// - 100 parts -> 9K blob
|
||||||
|
// - 2000 parts -> 170K blob
|
||||||
|
//
|
||||||
|
// Using a 10K threshold should lead to a worst case of about 10M to
|
||||||
|
// store a raw listing of 1000 entries, even with some growth
|
||||||
|
// multiplication factor due to some internal memory duplication, it
|
||||||
|
// should stay within reasonable memory limits.
|
||||||
|
|
||||||
|
const TRIM_METADATA_MIN_BLOB_SIZE = 10000;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Base class of listing extensions.
|
* Base class of listing extensions.
|
||||||
|
@ -23,6 +40,38 @@ class Extension {
|
||||||
this.keys = 0;
|
this.keys = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Filters-out non-requested optional fields from the value. This function
|
||||||
|
* shall be applied on any value that is to be returned as part of the
|
||||||
|
* result of a listing extension.
|
||||||
|
*
|
||||||
|
* @param {String} value - The JSON value of a listing item
|
||||||
|
*
|
||||||
|
* @return {String} The value that may have been trimmed of some
|
||||||
|
* heavy unused fields, or left untouched (depending on size
|
||||||
|
* heuristics)
|
||||||
|
*/
|
||||||
|
trimMetadata(value) {
|
||||||
|
let ret = undefined;
|
||||||
|
if (value.length >= TRIM_METADATA_MIN_BLOB_SIZE) {
|
||||||
|
try {
|
||||||
|
ret = JSON.parse(value);
|
||||||
|
delete ret.location;
|
||||||
|
ret = JSON.stringify(ret);
|
||||||
|
} catch (e) {
|
||||||
|
// Prefer returning an unfiltered data rather than
|
||||||
|
// stopping the service in case of parsing failure.
|
||||||
|
// The risk of this approach is a potential
|
||||||
|
// reproduction of MD-692, where too much memory is
|
||||||
|
// used by repd.
|
||||||
|
this.logger.warn(
|
||||||
|
'Could not parse Object Metadata while listing',
|
||||||
|
{ err: e.toString() });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ret || value;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Generates listing parameters that metadata can understand from the input
|
* Generates listing parameters that metadata can understand from the input
|
||||||
* parameters. What metadata can understand: gt, gte, lt, lte, limit, keys,
|
* parameters. What metadata can understand: gt, gte, lt, lte, limit, keys,
|
||||||
|
@ -43,21 +92,26 @@ class Extension {
|
||||||
* @param {object} entry - a listing entry from metadata
|
* @param {object} entry - a listing entry from metadata
|
||||||
* expected format: { key, value }
|
* expected format: { key, value }
|
||||||
* @return {number} - result of filtering the entry:
|
* @return {number} - result of filtering the entry:
|
||||||
* > 0: entry is accepted and included in the result
|
* FILTER_ACCEPT: entry is accepted and may or not be included
|
||||||
* = 0: entry is accepted but not included (skipping)
|
* in the result
|
||||||
* < 0: entry is not accepted, listing should finish
|
* FILTER_SKIP: listing may skip directly (with "gte" param) to
|
||||||
|
* the key returned by the skipping() method
|
||||||
|
* FILTER_END: the results are complete, listing can be stopped
|
||||||
*/
|
*/
|
||||||
filter(entry) {
|
filter(/* entry: { key, value } */) {
|
||||||
return entry ? FILTER_SKIP : FILTER_SKIP;
|
return FILTER_ACCEPT;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Provides the insight into why filter is skipping an entry. This could be
|
* Provides the next key at which the listing task is allowed to skip to.
|
||||||
* because it is skipping a range of delimited keys or a range of specific
|
* This could allow to skip over:
|
||||||
* version when doing master version listing.
|
* - a key prefix ending with the delimiter
|
||||||
|
* - all remaining versions of an object when doing a current
|
||||||
|
* versions listing in v0 format
|
||||||
|
* - a cached "gap" of deleted objects when doing a current
|
||||||
|
* versions listing in v0 format
|
||||||
*
|
*
|
||||||
* @return {string} - the insight: a common prefix or a master key,
|
* @return {string} - the next key at which the listing task is allowed to skip to
|
||||||
* or SKIP_NONE if there is no insight
|
|
||||||
*/
|
*/
|
||||||
skipping() {
|
skipping() {
|
||||||
return SKIP_NONE;
|
return SKIP_NONE;
|
||||||
|
|
|
@ -1,7 +1,10 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
'use strict'; // eslint-disable-line strict
|
||||||
|
|
||||||
const { inc, checkLimit, FILTER_END, FILTER_ACCEPT } = require('./tools');
|
const { inc, checkLimit, listingParamsMasterKeysV0ToV1,
|
||||||
|
FILTER_END, FILTER_ACCEPT, SKIP_NONE } = require('./tools');
|
||||||
const DEFAULT_MAX_KEYS = 1000;
|
const DEFAULT_MAX_KEYS = 1000;
|
||||||
|
const VSConst = require('../../versioning/constants').VersioningConstants;
|
||||||
|
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
|
||||||
|
|
||||||
function numberDefault(num, defaultNum) {
|
function numberDefault(num, defaultNum) {
|
||||||
const parsedNum = Number.parseInt(num, 10);
|
const parsedNum = Number.parseInt(num, 10);
|
||||||
|
@ -17,10 +20,12 @@ class MultipartUploads {
|
||||||
* Init and check parameters
|
* Init and check parameters
|
||||||
* @param {Object} params - The parameters you sent to DBD
|
* @param {Object} params - The parameters you sent to DBD
|
||||||
* @param {RequestLogger} logger - The logger of the request
|
* @param {RequestLogger} logger - The logger of the request
|
||||||
|
* @param {String} [vFormat] - versioning key format
|
||||||
* @return {undefined}
|
* @return {undefined}
|
||||||
*/
|
*/
|
||||||
constructor(params, logger) {
|
constructor(params, logger, vFormat) {
|
||||||
this.params = params;
|
this.params = params;
|
||||||
|
this.vFormat = vFormat || BucketVersioningKeyFormat.v0;
|
||||||
this.CommonPrefixes = [];
|
this.CommonPrefixes = [];
|
||||||
this.Uploads = [];
|
this.Uploads = [];
|
||||||
this.IsTruncated = false;
|
this.IsTruncated = false;
|
||||||
|
@ -33,9 +38,20 @@ class MultipartUploads {
|
||||||
this.delimiter = params.delimiter;
|
this.delimiter = params.delimiter;
|
||||||
this.splitter = params.splitter;
|
this.splitter = params.splitter;
|
||||||
this.logger = logger;
|
this.logger = logger;
|
||||||
|
|
||||||
|
Object.assign(this, {
|
||||||
|
[BucketVersioningKeyFormat.v0]: {
|
||||||
|
genMDParams: this.genMDParamsV0,
|
||||||
|
getObjectKey: this.getObjectKeyV0,
|
||||||
|
},
|
||||||
|
[BucketVersioningKeyFormat.v1]: {
|
||||||
|
genMDParams: this.genMDParamsV1,
|
||||||
|
getObjectKey: this.getObjectKeyV1,
|
||||||
|
},
|
||||||
|
}[this.vFormat]);
|
||||||
}
|
}
|
||||||
|
|
||||||
genMDParams() {
|
genMDParamsV0() {
|
||||||
const params = {};
|
const params = {};
|
||||||
if (this.params.keyMarker) {
|
if (this.params.keyMarker) {
|
||||||
params.gt = `overview${this.params.splitter}` +
|
params.gt = `overview${this.params.splitter}` +
|
||||||
|
@ -57,6 +73,11 @@ class MultipartUploads {
|
||||||
return params;
|
return params;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
genMDParamsV1() {
|
||||||
|
const v0params = this.genMDParamsV0();
|
||||||
|
return listingParamsMasterKeysV0ToV1(v0params);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This function adds the elements to the Uploads
|
* This function adds the elements to the Uploads
|
||||||
* Set the NextKeyMarker to the current key
|
* Set the NextKeyMarker to the current key
|
||||||
|
@ -101,6 +122,14 @@ class MultipartUploads {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
getObjectKeyV0(obj) {
|
||||||
|
return obj.key;
|
||||||
|
}
|
||||||
|
|
||||||
|
getObjectKeyV1(obj) {
|
||||||
|
return obj.key.slice(DbPrefixes.Master.length);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This function applies filter on each element
|
* This function applies filter on each element
|
||||||
* @param {String} obj - The key and value of the element
|
* @param {String} obj - The key and value of the element
|
||||||
|
@ -113,7 +142,7 @@ class MultipartUploads {
|
||||||
this.IsTruncated = this.maxKeys > 0;
|
this.IsTruncated = this.maxKeys > 0;
|
||||||
return FILTER_END;
|
return FILTER_END;
|
||||||
}
|
}
|
||||||
const key = obj.key;
|
const key = this.getObjectKey(obj);
|
||||||
const value = obj.value;
|
const value = obj.value;
|
||||||
if (this.delimiter) {
|
if (this.delimiter) {
|
||||||
const mpuPrefixSlice = `overview${this.splitter}`.length;
|
const mpuPrefixSlice = `overview${this.splitter}`.length;
|
||||||
|
@ -134,7 +163,7 @@ class MultipartUploads {
|
||||||
}
|
}
|
||||||
|
|
||||||
skipping() {
|
skipping() {
|
||||||
return '';
|
return SKIP_NONE;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -21,6 +21,8 @@ class List extends Extension {
|
||||||
this.res = [];
|
this.res = [];
|
||||||
if (parameters) {
|
if (parameters) {
|
||||||
this.maxKeys = checkLimit(parameters.maxKeys, DEFAULT_MAX_KEYS);
|
this.maxKeys = checkLimit(parameters.maxKeys, DEFAULT_MAX_KEYS);
|
||||||
|
this.filterKey = parameters.filterKey;
|
||||||
|
this.filterKeyStartsWith = parameters.filterKeyStartsWith;
|
||||||
} else {
|
} else {
|
||||||
this.maxKeys = DEFAULT_MAX_KEYS;
|
this.maxKeys = DEFAULT_MAX_KEYS;
|
||||||
}
|
}
|
||||||
|
@ -28,14 +30,14 @@ class List extends Extension {
|
||||||
}
|
}
|
||||||
|
|
||||||
genMDParams() {
|
genMDParams() {
|
||||||
const params = {
|
const params = this.parameters ? {
|
||||||
gt: this.parameters.gt,
|
gt: this.parameters.gt,
|
||||||
gte: this.parameters.gte || this.parameters.start,
|
gte: this.parameters.gte || this.parameters.start,
|
||||||
lt: this.parameters.lt,
|
lt: this.parameters.lt,
|
||||||
lte: this.parameters.lte || this.parameters.end,
|
lte: this.parameters.lte || this.parameters.end,
|
||||||
keys: this.parameters.keys,
|
keys: this.parameters.keys,
|
||||||
values: this.parameters.values,
|
values: this.parameters.values,
|
||||||
};
|
} : {};
|
||||||
Object.keys(params).forEach(key => {
|
Object.keys(params).forEach(key => {
|
||||||
if (params[key] === null || params[key] === undefined) {
|
if (params[key] === null || params[key] === undefined) {
|
||||||
delete params[key];
|
delete params[key];
|
||||||
|
@ -44,6 +46,43 @@ class List extends Extension {
|
||||||
return params;
|
return params;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Filters customAttributes sub-object if present
|
||||||
|
*
|
||||||
|
* @param {String} value - The JSON value of a listing item
|
||||||
|
*
|
||||||
|
* @return {Boolean} Returns true if matches, else false.
|
||||||
|
*/
|
||||||
|
customFilter(value) {
|
||||||
|
let _value;
|
||||||
|
try {
|
||||||
|
_value = JSON.parse(value);
|
||||||
|
} catch (e) {
|
||||||
|
// Prefer returning an unfiltered data rather than
|
||||||
|
// stopping the service in case of parsing failure.
|
||||||
|
// The risk of this approach is a potential
|
||||||
|
// reproduction of MD-692, where too much memory is
|
||||||
|
// used by repd.
|
||||||
|
this.logger.warn(
|
||||||
|
'Could not parse Object Metadata while listing',
|
||||||
|
{ err: e.toString() });
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
if (_value.customAttributes !== undefined) {
|
||||||
|
for (const key of Object.keys(_value.customAttributes)) {
|
||||||
|
if (this.filterKey !== undefined &&
|
||||||
|
key === this.filterKey) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (this.filterKeyStartsWith !== undefined &&
|
||||||
|
key.startsWith(this.filterKeyStartsWith)) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Function apply on each element
|
* Function apply on each element
|
||||||
* Just add it to the array
|
* Just add it to the array
|
||||||
|
@ -52,11 +91,24 @@ class List extends Extension {
|
||||||
* < 0 : listing done
|
* < 0 : listing done
|
||||||
*/
|
*/
|
||||||
filter(elem) {
|
filter(elem) {
|
||||||
// Check first in case of maxkeys <= 0
|
// Check if the result array is full
|
||||||
if (this.keys >= this.maxKeys) {
|
if (this.keys >= this.maxKeys) {
|
||||||
return FILTER_END;
|
return FILTER_END;
|
||||||
}
|
}
|
||||||
|
if ((this.filterKey !== undefined ||
|
||||||
|
this.filterKeyStartsWith !== undefined) &&
|
||||||
|
typeof elem === 'object' &&
|
||||||
|
!this.customFilter(elem.value)) {
|
||||||
|
return FILTER_ACCEPT;
|
||||||
|
}
|
||||||
|
if (typeof elem === 'object') {
|
||||||
|
this.res.push({
|
||||||
|
key: elem.key,
|
||||||
|
value: this.trimMetadata(elem.value),
|
||||||
|
});
|
||||||
|
} else {
|
||||||
this.res.push(elem);
|
this.res.push(elem);
|
||||||
|
}
|
||||||
this.keys++;
|
this.keys++;
|
||||||
return FILTER_ACCEPT;
|
return FILTER_ACCEPT;
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,224 +0,0 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
|
||||||
|
|
||||||
const Extension = require('./Extension').default;
|
|
||||||
const { inc, FILTER_END, FILTER_ACCEPT, FILTER_SKIP } = require('./tools');
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Find the next delimiter in the path
|
|
||||||
*
|
|
||||||
* @param {string} key - path of the object
|
|
||||||
* @param {string} delimiter - string to find
|
|
||||||
* @param {number} index - index to start at
|
|
||||||
* @return {number} delimiterIndex - returns -1 in case no delimiter is found
|
|
||||||
*/
|
|
||||||
function nextDelimiter(key, delimiter, index) {
|
|
||||||
return key.indexOf(delimiter, index);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Find the common prefix in the path
|
|
||||||
*
|
|
||||||
* @param {String} key - path of the object
|
|
||||||
* @param {String} delimiter - separator
|
|
||||||
* @param {Number} delimiterIndex - 'folder' index in the path
|
|
||||||
* @return {String} - CommonPrefix
|
|
||||||
*/
|
|
||||||
function getCommonPrefix(key, delimiter, delimiterIndex) {
|
|
||||||
return key.substring(0, delimiterIndex + delimiter.length);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Handle object listing with parameters
|
|
||||||
*
|
|
||||||
* @prop {String[]} CommonPrefixes - 'folders' defined by the delimiter
|
|
||||||
* @prop {String[]} Contents - 'files' to list
|
|
||||||
* @prop {Boolean} IsTruncated - truncated listing flag
|
|
||||||
* @prop {String|undefined} NextMarker - marker per amazon format
|
|
||||||
* @prop {Number} keys - count of listed keys
|
|
||||||
* @prop {String|undefined} delimiter - separator per amazon format
|
|
||||||
* @prop {String|undefined} prefix - prefix per amazon format
|
|
||||||
* @prop {Number} maxKeys - number of keys to list
|
|
||||||
*/
|
|
||||||
class Delimiter extends Extension {
|
|
||||||
/**
|
|
||||||
* Create a new Delimiter instance
|
|
||||||
* @constructor
|
|
||||||
* @param {Object} parameters - listing parameters
|
|
||||||
* @param {String} [parameters.delimiter] - delimiter per amazon
|
|
||||||
* format
|
|
||||||
* @param {String} [parameters.prefix] - prefix per amazon
|
|
||||||
* format
|
|
||||||
* @param {String} [parameters.marker] - marker per amazon
|
|
||||||
* format
|
|
||||||
* @param {Number} [parameters.maxKeys] - number of keys to list
|
|
||||||
* @param {Boolean} [parameters.alphabeticalOrder] - Either the result is
|
|
||||||
* alphabetically ordered
|
|
||||||
* or not.
|
|
||||||
*/
|
|
||||||
constructor(parameters) {
|
|
||||||
super(parameters);
|
|
||||||
// original listing parameters
|
|
||||||
this.delimiter = parameters.delimiter;
|
|
||||||
this.prefix = parameters.prefix;
|
|
||||||
this.marker = parameters.marker;
|
|
||||||
this.maxKeys = parameters.maxKeys || 1000;
|
|
||||||
this.alphabeticalOrder =
|
|
||||||
typeof parameters.alphabeticalOrder !== 'undefined' ?
|
|
||||||
parameters.alphabeticalOrder : true;
|
|
||||||
|
|
||||||
// results
|
|
||||||
this.CommonPrefixes = [];
|
|
||||||
this.Contents = [];
|
|
||||||
this.IsTruncated = false;
|
|
||||||
this.NextMarker = parameters.marker;
|
|
||||||
|
|
||||||
if (this.delimiter !== undefined &&
|
|
||||||
this.NextMarker !== undefined &&
|
|
||||||
this.NextMarker.startsWith(this.prefix || '')) {
|
|
||||||
const nextDelimiterIndex =
|
|
||||||
this.NextMarker.indexOf(this.delimiter,
|
|
||||||
this.prefix
|
|
||||||
? this.prefix.length
|
|
||||||
: 0);
|
|
||||||
this.NextMarker =
|
|
||||||
this.NextMarker.slice(0, nextDelimiterIndex +
|
|
||||||
this.delimiter.length);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
genMDParams() {
|
|
||||||
const params = {};
|
|
||||||
if (this.prefix) {
|
|
||||||
params.gte = this.prefix;
|
|
||||||
params.lt = inc(this.prefix);
|
|
||||||
}
|
|
||||||
if (this.marker) {
|
|
||||||
if (params.gte && params.gte > this.marker) {
|
|
||||||
return params;
|
|
||||||
}
|
|
||||||
delete params.gte;
|
|
||||||
params.gt = this.marker;
|
|
||||||
}
|
|
||||||
return params;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* check if the max keys count has been reached and set the
|
|
||||||
* final state of the result if it is the case
|
|
||||||
* @return {Boolean} - indicates if the iteration has to stop
|
|
||||||
*/
|
|
||||||
_reachedMaxKeys() {
|
|
||||||
if (this.keys >= this.maxKeys) {
|
|
||||||
// In cases of maxKeys <= 0 -> IsTruncated = false
|
|
||||||
this.IsTruncated = this.maxKeys > 0;
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Add a (key, value) tuple to the listing
|
|
||||||
* Set the NextMarker to the current key
|
|
||||||
* Increment the keys counter
|
|
||||||
* @param {String} key - The key to add
|
|
||||||
* @param {String} value - The value of the key
|
|
||||||
* @return {number} - indicates if iteration should continue
|
|
||||||
*/
|
|
||||||
addContents(key, value) {
|
|
||||||
if (this._reachedMaxKeys()) {
|
|
||||||
return FILTER_END;
|
|
||||||
}
|
|
||||||
this.Contents.push({ key, value });
|
|
||||||
this.NextMarker = key;
|
|
||||||
++this.keys;
|
|
||||||
return FILTER_ACCEPT;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Filter to apply on each iteration, based on:
|
|
||||||
* - prefix
|
|
||||||
* - delimiter
|
|
||||||
* - maxKeys
|
|
||||||
* The marker is being handled directly by levelDB
|
|
||||||
* @param {Object} obj - The key and value of the element
|
|
||||||
* @param {String} obj.key - The key of the element
|
|
||||||
* @param {String} obj.value - The value of the element
|
|
||||||
* @return {number} - indicates if iteration should continue
|
|
||||||
*/
|
|
||||||
filter(obj) {
|
|
||||||
const key = obj.key;
|
|
||||||
const value = obj.value;
|
|
||||||
if ((this.prefix && !key.startsWith(this.prefix))
|
|
||||||
|| (this.alphabeticalOrder
|
|
||||||
&& typeof this.NextMarker === 'string'
|
|
||||||
&& key <= this.NextMarker)) {
|
|
||||||
return FILTER_SKIP;
|
|
||||||
}
|
|
||||||
if (this.delimiter) {
|
|
||||||
const baseIndex = this.prefix ? this.prefix.length : 0;
|
|
||||||
const delimiterIndex = nextDelimiter(key,
|
|
||||||
this.delimiter,
|
|
||||||
baseIndex);
|
|
||||||
if (delimiterIndex === -1) {
|
|
||||||
return this.addContents(key, value);
|
|
||||||
}
|
|
||||||
return this.addCommonPrefix(key, delimiterIndex);
|
|
||||||
}
|
|
||||||
return this.addContents(key, value);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Add a Common Prefix in the list
|
|
||||||
* @param {String} key - object name
|
|
||||||
* @param {Number} index - after prefix starting point
|
|
||||||
* @return {Boolean} - indicates if iteration should continue
|
|
||||||
*/
|
|
||||||
addCommonPrefix(key, index) {
|
|
||||||
const commonPrefix = getCommonPrefix(key, this.delimiter, index);
|
|
||||||
if (this.CommonPrefixes.indexOf(commonPrefix) === -1
|
|
||||||
&& this.NextMarker !== commonPrefix) {
|
|
||||||
if (this._reachedMaxKeys()) {
|
|
||||||
return FILTER_END;
|
|
||||||
}
|
|
||||||
this.CommonPrefixes.push(commonPrefix);
|
|
||||||
this.NextMarker = commonPrefix;
|
|
||||||
++this.keys;
|
|
||||||
return FILTER_ACCEPT;
|
|
||||||
}
|
|
||||||
return FILTER_SKIP;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* If repd happens to want to skip listing, here is an idea.
|
|
||||||
*
|
|
||||||
* @return {string} - the present range (NextMarker) if repd believes
|
|
||||||
* that it's enough and should move on
|
|
||||||
*/
|
|
||||||
skipping() {
|
|
||||||
return this.NextMarker;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Return an object containing all mandatory fields to use once the
|
|
||||||
* iteration is done, doesn't show a NextMarker field if the output
|
|
||||||
* isn't truncated
|
|
||||||
* @return {Object} - following amazon format
|
|
||||||
*/
|
|
||||||
result() {
|
|
||||||
/* NextMarker is only provided when delimiter is used.
|
|
||||||
* specified in v1 listing documentation
|
|
||||||
* http://docs.aws.amazon.com/AmazonS3/latest/API/RESTBucketGET.html
|
|
||||||
*/
|
|
||||||
return {
|
|
||||||
CommonPrefixes: this.CommonPrefixes,
|
|
||||||
Contents: this.Contents,
|
|
||||||
IsTruncated: this.IsTruncated,
|
|
||||||
NextMarker: (this.IsTruncated && this.delimiter)
|
|
||||||
? this.NextMarker
|
|
||||||
: undefined,
|
|
||||||
Delimiter: this.delimiter,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = { Delimiter };
|
|
|
@ -0,0 +1,356 @@
|
||||||
|
'use strict'; // eslint-disable-line strict
|
||||||
|
|
||||||
|
const Extension = require('./Extension').default;
|
||||||
|
const { inc, listingParamsMasterKeysV0ToV1,
|
||||||
|
FILTER_END, FILTER_ACCEPT, FILTER_SKIP, SKIP_NONE } = require('./tools');
|
||||||
|
const VSConst = require('../../versioning/constants').VersioningConstants;
|
||||||
|
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
|
||||||
|
|
||||||
|
export interface FilterState {
|
||||||
|
id: number,
|
||||||
|
};
|
||||||
|
|
||||||
|
export interface FilterReturnValue {
|
||||||
|
FILTER_ACCEPT,
|
||||||
|
FILTER_SKIP,
|
||||||
|
FILTER_END,
|
||||||
|
};
|
||||||
|
|
||||||
|
export const enum DelimiterFilterStateId {
|
||||||
|
NotSkipping = 1,
|
||||||
|
SkippingPrefix = 2,
|
||||||
|
};
|
||||||
|
|
||||||
|
export interface DelimiterFilterState_NotSkipping extends FilterState {
|
||||||
|
id: DelimiterFilterStateId.NotSkipping,
|
||||||
|
};
|
||||||
|
|
||||||
|
export interface DelimiterFilterState_SkippingPrefix extends FilterState {
|
||||||
|
id: DelimiterFilterStateId.SkippingPrefix,
|
||||||
|
prefix: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
type KeyHandler = (key: string, value: string) => FilterReturnValue;
|
||||||
|
|
||||||
|
export type ResultObject = {
|
||||||
|
CommonPrefixes: string[];
|
||||||
|
Contents: {
|
||||||
|
key: string;
|
||||||
|
value: string;
|
||||||
|
}[];
|
||||||
|
IsTruncated: boolean;
|
||||||
|
Delimiter ?: string;
|
||||||
|
NextMarker ?: string;
|
||||||
|
NextContinuationToken ?: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle object listing with parameters
|
||||||
|
*
|
||||||
|
* @prop {String[]} CommonPrefixes - 'folders' defined by the delimiter
|
||||||
|
* @prop {String[]} Contents - 'files' to list
|
||||||
|
* @prop {Boolean} IsTruncated - truncated listing flag
|
||||||
|
* @prop {String|undefined} NextMarker - marker per amazon format
|
||||||
|
* @prop {Number} keys - count of listed keys
|
||||||
|
* @prop {String|undefined} delimiter - separator per amazon format
|
||||||
|
* @prop {String|undefined} prefix - prefix per amazon format
|
||||||
|
* @prop {Number} maxKeys - number of keys to list
|
||||||
|
*/
|
||||||
|
export class Delimiter extends Extension {
|
||||||
|
|
||||||
|
state: FilterState;
|
||||||
|
keyHandlers: { [id: number]: KeyHandler };
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new Delimiter instance
|
||||||
|
* @constructor
|
||||||
|
* @param {Object} parameters - listing parameters
|
||||||
|
* @param {String} [parameters.delimiter] - delimiter per amazon
|
||||||
|
* format
|
||||||
|
* @param {String} [parameters.prefix] - prefix per amazon
|
||||||
|
* format
|
||||||
|
* @param {String} [parameters.marker] - marker per amazon
|
||||||
|
* format
|
||||||
|
* @param {Number} [parameters.maxKeys] - number of keys to list
|
||||||
|
* @param {Boolean} [parameters.v2] - indicates whether v2
|
||||||
|
* format
|
||||||
|
* @param {String} [parameters.startAfter] - marker per amazon
|
||||||
|
* format
|
||||||
|
* @param {String} [parameters.continuationToken] - obfuscated amazon
|
||||||
|
* token
|
||||||
|
* @param {RequestLogger} logger - The logger of the
|
||||||
|
* request
|
||||||
|
* @param {String} [vFormat] - versioning key format
|
||||||
|
*/
|
||||||
|
constructor(parameters, logger, vFormat) {
|
||||||
|
super(parameters, logger);
|
||||||
|
// original listing parameters
|
||||||
|
this.delimiter = parameters.delimiter;
|
||||||
|
this.prefix = parameters.prefix;
|
||||||
|
this.maxKeys = parameters.maxKeys || 1000;
|
||||||
|
|
||||||
|
if (parameters.v2) {
|
||||||
|
this.marker = parameters.continuationToken || parameters.startAfter;
|
||||||
|
} else {
|
||||||
|
this.marker = parameters.marker;
|
||||||
|
}
|
||||||
|
this.nextMarker = this.marker;
|
||||||
|
|
||||||
|
this.vFormat = vFormat || BucketVersioningKeyFormat.v0;
|
||||||
|
// results
|
||||||
|
this.CommonPrefixes = [];
|
||||||
|
this.Contents = [];
|
||||||
|
this.IsTruncated = false;
|
||||||
|
this.keyHandlers = {};
|
||||||
|
|
||||||
|
Object.assign(this, {
|
||||||
|
[BucketVersioningKeyFormat.v0]: {
|
||||||
|
genMDParams: this.genMDParamsV0,
|
||||||
|
getObjectKey: this.getObjectKeyV0,
|
||||||
|
skipping: this.skippingV0,
|
||||||
|
},
|
||||||
|
[BucketVersioningKeyFormat.v1]: {
|
||||||
|
genMDParams: this.genMDParamsV1,
|
||||||
|
getObjectKey: this.getObjectKeyV1,
|
||||||
|
skipping: this.skippingV1,
|
||||||
|
},
|
||||||
|
}[this.vFormat]);
|
||||||
|
|
||||||
|
// if there is a delimiter, we may skip ranges by prefix,
|
||||||
|
// hence using the NotSkippingPrefix flavor that checks the
|
||||||
|
// subprefix up to the delimiter for the NotSkipping state
|
||||||
|
if (this.delimiter) {
|
||||||
|
this.setKeyHandler(
|
||||||
|
DelimiterFilterStateId.NotSkipping,
|
||||||
|
this.keyHandler_NotSkippingPrefix.bind(this));
|
||||||
|
} else {
|
||||||
|
// listing without a delimiter never has to skip over any
|
||||||
|
// prefix -> use NeverSkipping flavor for the NotSkipping
|
||||||
|
// state
|
||||||
|
this.setKeyHandler(
|
||||||
|
DelimiterFilterStateId.NotSkipping,
|
||||||
|
this.keyHandler_NeverSkipping.bind(this));
|
||||||
|
}
|
||||||
|
this.setKeyHandler(
|
||||||
|
DelimiterFilterStateId.SkippingPrefix,
|
||||||
|
this.keyHandler_SkippingPrefix.bind(this));
|
||||||
|
|
||||||
|
this.state = <DelimiterFilterState_NotSkipping> {
|
||||||
|
id: DelimiterFilterStateId.NotSkipping,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
genMDParamsV0() {
|
||||||
|
const params: { gt ?: string, gte ?: string, lt ?: string } = {};
|
||||||
|
if (this.prefix) {
|
||||||
|
params.gte = this.prefix;
|
||||||
|
params.lt = inc(this.prefix);
|
||||||
|
}
|
||||||
|
if (this.marker && this.delimiter) {
|
||||||
|
const commonPrefix = this.getCommonPrefix(this.marker);
|
||||||
|
if (commonPrefix) {
|
||||||
|
const afterPrefix = inc(commonPrefix);
|
||||||
|
if (!params.gte || afterPrefix > params.gte) {
|
||||||
|
params.gte = afterPrefix;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (this.marker && (!params.gte || this.marker >= params.gte)) {
|
||||||
|
delete params.gte;
|
||||||
|
params.gt = this.marker;
|
||||||
|
}
|
||||||
|
return params;
|
||||||
|
}
|
||||||
|
|
||||||
|
genMDParamsV1() {
|
||||||
|
const params = this.genMDParamsV0();
|
||||||
|
return listingParamsMasterKeysV0ToV1(params);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* check if the max keys count has been reached and set the
|
||||||
|
* final state of the result if it is the case
|
||||||
|
* @return {Boolean} - indicates if the iteration has to stop
|
||||||
|
*/
|
||||||
|
_reachedMaxKeys(): boolean {
|
||||||
|
if (this.keys >= this.maxKeys) {
|
||||||
|
// In cases of maxKeys <= 0 -> IsTruncated = false
|
||||||
|
this.IsTruncated = this.maxKeys > 0;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add a (key, value) tuple to the listing
|
||||||
|
* Set the NextMarker to the current key
|
||||||
|
* Increment the keys counter
|
||||||
|
* @param {String} key - The key to add
|
||||||
|
* @param {String} value - The value of the key
|
||||||
|
* @return {number} - indicates if iteration should continue
|
||||||
|
*/
|
||||||
|
addContents(key: string, value: string): void {
|
||||||
|
this.Contents.push({ key, value: this.trimMetadata(value) });
|
||||||
|
++this.keys;
|
||||||
|
this.nextMarker = key;
|
||||||
|
}
|
||||||
|
|
||||||
|
getCommonPrefix(key: string): string | undefined {
|
||||||
|
if (!this.delimiter) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
const baseIndex = this.prefix ? this.prefix.length : 0;
|
||||||
|
const delimiterIndex = key.indexOf(this.delimiter, baseIndex);
|
||||||
|
if (delimiterIndex === -1) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
return key.substring(0, delimiterIndex + this.delimiter.length);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add a Common Prefix in the list
|
||||||
|
* @param {String} commonPrefix - common prefix to add
|
||||||
|
* @param {String} key - full key starting with commonPrefix
|
||||||
|
* @return {Boolean} - indicates if iteration should continue
|
||||||
|
*/
|
||||||
|
addCommonPrefix(commonPrefix: string, key: string): void {
|
||||||
|
// add the new prefix to the list
|
||||||
|
this.CommonPrefixes.push(commonPrefix);
|
||||||
|
++this.keys;
|
||||||
|
this.nextMarker = commonPrefix;
|
||||||
|
}
|
||||||
|
|
||||||
|
addCommonPrefixOrContents(key: string, value: string): string | undefined {
|
||||||
|
// add the subprefix to the common prefixes if the key has the delimiter
|
||||||
|
const commonPrefix = this.getCommonPrefix(key);
|
||||||
|
if (commonPrefix) {
|
||||||
|
this.addCommonPrefix(commonPrefix, key);
|
||||||
|
return commonPrefix;
|
||||||
|
}
|
||||||
|
this.addContents(key, value);
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
getObjectKeyV0(obj: { key: string }): string {
|
||||||
|
return obj.key;
|
||||||
|
}
|
||||||
|
|
||||||
|
getObjectKeyV1(obj: { key: string }): string {
|
||||||
|
return obj.key.slice(DbPrefixes.Master.length);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Filter to apply on each iteration, based on:
|
||||||
|
* - prefix
|
||||||
|
* - delimiter
|
||||||
|
* - maxKeys
|
||||||
|
* The marker is being handled directly by levelDB
|
||||||
|
* @param {Object} obj - The key and value of the element
|
||||||
|
* @param {String} obj.key - The key of the element
|
||||||
|
* @param {String} obj.value - The value of the element
|
||||||
|
* @return {number} - indicates if iteration should continue
|
||||||
|
*/
|
||||||
|
filter(obj: { key: string, value: string }): FilterReturnValue {
|
||||||
|
const key = this.getObjectKey(obj);
|
||||||
|
const value = obj.value;
|
||||||
|
|
||||||
|
return this.handleKey(key, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
setState(state: FilterState): void {
|
||||||
|
this.state = state;
|
||||||
|
}
|
||||||
|
|
||||||
|
setKeyHandler(stateId: number, keyHandler: KeyHandler): void {
|
||||||
|
this.keyHandlers[stateId] = keyHandler;
|
||||||
|
}
|
||||||
|
|
||||||
|
handleKey(key: string, value: string): FilterReturnValue {
|
||||||
|
return this.keyHandlers[this.state.id](key, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
keyHandler_NeverSkipping(key: string, value: string): FilterReturnValue {
|
||||||
|
if (this._reachedMaxKeys()) {
|
||||||
|
return FILTER_END;
|
||||||
|
}
|
||||||
|
this.addContents(key, value);
|
||||||
|
return FILTER_ACCEPT;
|
||||||
|
}
|
||||||
|
|
||||||
|
keyHandler_NotSkippingPrefix(key: string, value: string): FilterReturnValue {
|
||||||
|
if (this._reachedMaxKeys()) {
|
||||||
|
return FILTER_END;
|
||||||
|
}
|
||||||
|
const commonPrefix = this.addCommonPrefixOrContents(key, value);
|
||||||
|
if (commonPrefix) {
|
||||||
|
// transition into SkippingPrefix state to skip all following keys
|
||||||
|
// while they start with the same prefix
|
||||||
|
this.setState(<DelimiterFilterState_SkippingPrefix> {
|
||||||
|
id: DelimiterFilterStateId.SkippingPrefix,
|
||||||
|
prefix: commonPrefix,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return FILTER_ACCEPT;
|
||||||
|
}
|
||||||
|
|
||||||
|
keyHandler_SkippingPrefix(key: string, value: string): FilterReturnValue {
|
||||||
|
const { prefix } = <DelimiterFilterState_SkippingPrefix> this.state;
|
||||||
|
if (key.startsWith(prefix)) {
|
||||||
|
return FILTER_SKIP;
|
||||||
|
}
|
||||||
|
this.setState(<DelimiterFilterState_NotSkipping> {
|
||||||
|
id: DelimiterFilterStateId.NotSkipping,
|
||||||
|
});
|
||||||
|
return this.handleKey(key, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
skippingBase(): string | undefined {
|
||||||
|
switch (this.state.id) {
|
||||||
|
case DelimiterFilterStateId.SkippingPrefix:
|
||||||
|
const { prefix } = <DelimiterFilterState_SkippingPrefix> this.state;
|
||||||
|
return inc(prefix);
|
||||||
|
|
||||||
|
default:
|
||||||
|
return SKIP_NONE;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
skippingV0() {
|
||||||
|
return this.skippingBase();
|
||||||
|
}
|
||||||
|
|
||||||
|
skippingV1() {
|
||||||
|
const skipTo = this.skippingBase();
|
||||||
|
if (skipTo === SKIP_NONE) {
|
||||||
|
return SKIP_NONE;
|
||||||
|
}
|
||||||
|
return DbPrefixes.Master + skipTo;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return an object containing all mandatory fields to use once the
|
||||||
|
* iteration is done, doesn't show a NextMarker field if the output
|
||||||
|
* isn't truncated
|
||||||
|
* @return {Object} - following amazon format
|
||||||
|
*/
|
||||||
|
result(): ResultObject {
|
||||||
|
/* NextMarker is only provided when delimiter is used.
|
||||||
|
* specified in v1 listing documentation
|
||||||
|
* http://docs.aws.amazon.com/AmazonS3/latest/API/RESTBucketGET.html
|
||||||
|
*/
|
||||||
|
const result: ResultObject = {
|
||||||
|
CommonPrefixes: this.CommonPrefixes,
|
||||||
|
Contents: this.Contents,
|
||||||
|
IsTruncated: this.IsTruncated,
|
||||||
|
Delimiter: this.delimiter,
|
||||||
|
};
|
||||||
|
if (this.parameters.v2) {
|
||||||
|
result.NextContinuationToken = this.IsTruncated
|
||||||
|
? this.nextMarker : undefined;
|
||||||
|
} else {
|
||||||
|
result.NextMarker = (this.IsTruncated && this.delimiter)
|
||||||
|
? this.nextMarker : undefined;
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,127 @@
|
||||||
|
const { DelimiterMaster } = require('./delimiterMaster');
|
||||||
|
const { FILTER_ACCEPT, FILTER_END } = require('./tools');
|
||||||
|
|
||||||
|
type ResultObject = {
|
||||||
|
Contents: {
|
||||||
|
key: string;
|
||||||
|
value: string;
|
||||||
|
}[];
|
||||||
|
IsTruncated: boolean;
|
||||||
|
NextMarker ?: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle object listing with parameters. This extends the base class DelimiterMaster
|
||||||
|
* to return the master/current versions.
|
||||||
|
*/
|
||||||
|
class DelimiterCurrent extends DelimiterMaster {
|
||||||
|
/**
|
||||||
|
* Delimiter listing of current versions.
|
||||||
|
* @param {Object} parameters - listing parameters
|
||||||
|
* @param {String} parameters.beforeDate - limit the response to keys older than beforeDate
|
||||||
|
* @param {String} parameters.excludedDataStoreName - excluded datatore name
|
||||||
|
* @param {Number} parameters.maxScannedLifecycleListingEntries - max number of entries to be scanned
|
||||||
|
* @param {RequestLogger} logger - The logger of the request
|
||||||
|
* @param {String} [vFormat] - versioning key format
|
||||||
|
*/
|
||||||
|
constructor(parameters, logger, vFormat) {
|
||||||
|
super(parameters, logger, vFormat);
|
||||||
|
|
||||||
|
this.beforeDate = parameters.beforeDate;
|
||||||
|
this.excludedDataStoreName = parameters.excludedDataStoreName;
|
||||||
|
this.maxScannedLifecycleListingEntries = parameters.maxScannedLifecycleListingEntries;
|
||||||
|
this.scannedKeys = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
genMDParamsV0() {
|
||||||
|
const params = super.genMDParamsV0();
|
||||||
|
// lastModified and dataStoreName parameters are used by metadata that enables built-in filtering,
|
||||||
|
// a feature currently exclusive to MongoDB
|
||||||
|
if (this.beforeDate) {
|
||||||
|
params.lastModified = {
|
||||||
|
lt: this.beforeDate,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.excludedDataStoreName) {
|
||||||
|
params.dataStoreName = {
|
||||||
|
ne: this.excludedDataStoreName,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return params;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parses the stringified entry's value.
|
||||||
|
* @param s - sringified value
|
||||||
|
* @return - undefined if parsing fails, otherwise it contains the parsed value.
|
||||||
|
*/
|
||||||
|
_parse(s) {
|
||||||
|
let p;
|
||||||
|
try {
|
||||||
|
p = JSON.parse(s);
|
||||||
|
} catch (e: any) {
|
||||||
|
this.logger.warn(
|
||||||
|
'Could not parse Object Metadata while listing',
|
||||||
|
{ err: e.toString() });
|
||||||
|
}
|
||||||
|
return p;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* check if the max keys count has been reached and set the
|
||||||
|
* final state of the result if it is the case
|
||||||
|
*
|
||||||
|
* specialized implementation on DelimiterCurrent to also check
|
||||||
|
* the number of scanned keys
|
||||||
|
*
|
||||||
|
* @return {Boolean} - indicates if the iteration has to stop
|
||||||
|
*/
|
||||||
|
_reachedMaxKeys(): boolean {
|
||||||
|
if (this.maxScannedLifecycleListingEntries && this.scannedKeys >= this.maxScannedLifecycleListingEntries) {
|
||||||
|
this.IsTruncated = true;
|
||||||
|
this.logger.info('listing stopped due to reaching the maximum scanned entries limit',
|
||||||
|
{
|
||||||
|
maxScannedLifecycleListingEntries: this.maxScannedLifecycleListingEntries,
|
||||||
|
scannedKeys: this.scannedKeys,
|
||||||
|
});
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return super._reachedMaxKeys();
|
||||||
|
}
|
||||||
|
|
||||||
|
addContents(key, value) {
|
||||||
|
++this.scannedKeys;
|
||||||
|
const parsedValue = this._parse(value);
|
||||||
|
// if parsing fails, skip the key.
|
||||||
|
if (parsedValue) {
|
||||||
|
const lastModified = parsedValue['last-modified'];
|
||||||
|
const dataStoreName = parsedValue.dataStoreName;
|
||||||
|
// We then check if the current version is older than the "beforeDate" and
|
||||||
|
// "excludedDataStoreName" is not specified or if specified and the data store name is different.
|
||||||
|
if ((!this.beforeDate || (lastModified && lastModified < this.beforeDate)) &&
|
||||||
|
(!this.excludedDataStoreName || dataStoreName !== this.excludedDataStoreName)) {
|
||||||
|
super.addContents(key, value);
|
||||||
|
}
|
||||||
|
// In the event of a timeout occurring before any content is added,
|
||||||
|
// NextMarker is updated even if the object is not eligible.
|
||||||
|
// It minimizes the amount of data that the client needs to re-process if the request times out.
|
||||||
|
this.nextMarker = key;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
result(): object {
|
||||||
|
const result: ResultObject = {
|
||||||
|
Contents: this.Contents,
|
||||||
|
IsTruncated: this.IsTruncated,
|
||||||
|
};
|
||||||
|
|
||||||
|
if (this.IsTruncated) {
|
||||||
|
result.NextMarker = this.nextMarker;
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
module.exports = { DelimiterCurrent };
|
|
@ -1,94 +0,0 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
|
||||||
|
|
||||||
const Delimiter = require('./delimiter').Delimiter;
|
|
||||||
const Version = require('../../versioning/Version').Version;
|
|
||||||
const VSConst = require('../../versioning/constants').VersioningConstants;
|
|
||||||
const { FILTER_ACCEPT, FILTER_SKIP, SKIP_NONE } = require('./tools');
|
|
||||||
|
|
||||||
const VID_SEP = VSConst.VersionId.Separator;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Handle object listing with parameters. This extends the base class Delimiter
|
|
||||||
* to return the raw master versions of existing objects.
|
|
||||||
*/
|
|
||||||
class DelimiterMaster extends Delimiter {
|
|
||||||
/**
|
|
||||||
* Delimiter listing of master versions.
|
|
||||||
* @param {Object} parameters - listing parameters
|
|
||||||
* @param {String} parameters.delimiter - delimiter per amazon format
|
|
||||||
* @param {String} parameters.prefix - prefix per amazon format
|
|
||||||
* @param {String} parameters.marker - marker per amazon format
|
|
||||||
* @param {Number} parameters.maxKeys - number of keys to list
|
|
||||||
*/
|
|
||||||
constructor(parameters) {
|
|
||||||
super(parameters);
|
|
||||||
this.prvPHDKey = undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Filter to apply on each iteration, based on:
|
|
||||||
* - prefix
|
|
||||||
* - delimiter
|
|
||||||
* - maxKeys
|
|
||||||
* The marker is being handled directly by levelDB
|
|
||||||
* @param {Object} obj - The key and value of the element
|
|
||||||
* @param {String} obj.key - The key of the element
|
|
||||||
* @param {String} obj.value - The value of the element
|
|
||||||
* @return {number} - indicates if iteration should continue
|
|
||||||
*/
|
|
||||||
filter(obj) {
|
|
||||||
let key = obj.key;
|
|
||||||
const value = obj.value;
|
|
||||||
if ((this.prefix && !key.startsWith(this.prefix))
|
|
||||||
|| (typeof this.NextMarker === 'string' &&
|
|
||||||
key <= this.NextMarker)) {
|
|
||||||
return FILTER_SKIP;
|
|
||||||
}
|
|
||||||
const versionIdIndex = key.indexOf(VID_SEP);
|
|
||||||
if (versionIdIndex >= 0) {
|
|
||||||
// generally we do not accept a specific version,
|
|
||||||
// we only do when the master version is a PHD version
|
|
||||||
key = key.slice(0, versionIdIndex);
|
|
||||||
if (key !== this.prvPHDKey) {
|
|
||||||
return FILTER_ACCEPT; // trick repd to not increase its streak
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (Version.isPHD(value)) {
|
|
||||||
// master version is a PHD version: wait for the next version
|
|
||||||
this.prvPHDKey = key;
|
|
||||||
return FILTER_ACCEPT; // trick repd to not increase its streak
|
|
||||||
}
|
|
||||||
if (Version.isDeleteMarker(value)) {
|
|
||||||
// version is a delete marker, ignore
|
|
||||||
return FILTER_ACCEPT; // trick repd to not increase its streak
|
|
||||||
}
|
|
||||||
// non-PHD master version or a version whose master is a PHD version
|
|
||||||
this.prvPHDKey = undefined;
|
|
||||||
if (this.delimiter) {
|
|
||||||
// check if the key has the delimiter
|
|
||||||
const baseIndex = this.prefix ? this.prefix.length : 0;
|
|
||||||
const delimiterIndex = key.indexOf(this.delimiter, baseIndex);
|
|
||||||
if (delimiterIndex >= 0) {
|
|
||||||
// try to add the prefix to the list
|
|
||||||
return this.addCommonPrefix(key, delimiterIndex);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return this.addContents(key, value);
|
|
||||||
}
|
|
||||||
|
|
||||||
skipping() {
|
|
||||||
if (this.NextMarker) {
|
|
||||||
// next marker:
|
|
||||||
// - foo/ : skipping foo/
|
|
||||||
// - foo : skipping foo.
|
|
||||||
const index = this.NextMarker.lastIndexOf(this.delimiter);
|
|
||||||
if (index === this.NextMarker.length - 1) {
|
|
||||||
return this.NextMarker;
|
|
||||||
}
|
|
||||||
return this.NextMarker + VID_SEP;
|
|
||||||
}
|
|
||||||
return SKIP_NONE;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = { DelimiterMaster };
|
|
|
@ -0,0 +1,620 @@
|
||||||
|
import {
|
||||||
|
Delimiter,
|
||||||
|
FilterState,
|
||||||
|
FilterReturnValue,
|
||||||
|
DelimiterFilterStateId,
|
||||||
|
DelimiterFilterState_NotSkipping,
|
||||||
|
DelimiterFilterState_SkippingPrefix,
|
||||||
|
ResultObject,
|
||||||
|
} from './delimiter';
|
||||||
|
const Version = require('../../versioning/Version').Version;
|
||||||
|
const VSConst = require('../../versioning/constants').VersioningConstants;
|
||||||
|
const { BucketVersioningKeyFormat } = VSConst;
|
||||||
|
const { FILTER_ACCEPT, FILTER_SKIP, FILTER_END, SKIP_NONE, inc } = require('./tools');
|
||||||
|
|
||||||
|
import { GapSetEntry } from '../cache/GapSet';
|
||||||
|
import { GapCacheInterface } from '../cache/GapCache';
|
||||||
|
|
||||||
|
const VID_SEP = VSConst.VersionId.Separator;
|
||||||
|
const { DbPrefixes } = VSConst;
|
||||||
|
|
||||||
|
export const enum DelimiterMasterFilterStateId {
|
||||||
|
SkippingVersionsV0 = 101,
|
||||||
|
WaitVersionAfterPHDV0 = 102,
|
||||||
|
SkippingGapV0 = 103,
|
||||||
|
};
|
||||||
|
|
||||||
|
interface DelimiterMasterFilterState_SkippingVersionsV0 extends FilterState {
|
||||||
|
id: DelimiterMasterFilterStateId.SkippingVersionsV0,
|
||||||
|
masterKey: string,
|
||||||
|
};
|
||||||
|
|
||||||
|
interface DelimiterMasterFilterState_WaitVersionAfterPHDV0 extends FilterState {
|
||||||
|
id: DelimiterMasterFilterStateId.WaitVersionAfterPHDV0,
|
||||||
|
masterKey: string,
|
||||||
|
};
|
||||||
|
|
||||||
|
interface DelimiterMasterFilterState_SkippingGapV0 extends FilterState {
|
||||||
|
id: DelimiterMasterFilterStateId.SkippingGapV0,
|
||||||
|
};
|
||||||
|
|
||||||
|
export const enum GapCachingState {
|
||||||
|
NoGapCache = 0, // there is no gap cache
|
||||||
|
UnknownGap = 1, // waiting for a cache lookup
|
||||||
|
GapLookupInProgress = 2, // asynchronous gap lookup in progress
|
||||||
|
GapCached = 3, // an upcoming or already skippable gap is cached
|
||||||
|
NoMoreGap = 4, // the cache doesn't have any more gaps inside the listed range
|
||||||
|
};
|
||||||
|
|
||||||
|
type GapCachingInfo_NoGapCache = {
|
||||||
|
state: GapCachingState.NoGapCache;
|
||||||
|
};
|
||||||
|
|
||||||
|
type GapCachingInfo_NoCachedGap = {
|
||||||
|
state: GapCachingState.UnknownGap
|
||||||
|
| GapCachingState.GapLookupInProgress
|
||||||
|
gapCache: GapCacheInterface;
|
||||||
|
};
|
||||||
|
|
||||||
|
type GapCachingInfo_GapCached = {
|
||||||
|
state: GapCachingState.GapCached;
|
||||||
|
gapCache: GapCacheInterface;
|
||||||
|
gapCached: GapSetEntry;
|
||||||
|
};
|
||||||
|
|
||||||
|
type GapCachingInfo_NoMoreGap = {
|
||||||
|
state: GapCachingState.NoMoreGap;
|
||||||
|
};
|
||||||
|
|
||||||
|
type GapCachingInfo = GapCachingInfo_NoGapCache
|
||||||
|
| GapCachingInfo_NoCachedGap
|
||||||
|
| GapCachingInfo_GapCached
|
||||||
|
| GapCachingInfo_NoMoreGap;
|
||||||
|
|
||||||
|
|
||||||
|
export const enum GapBuildingState {
|
||||||
|
Disabled = 0, // no gap cache or no gap building needed (e.g. in V1 versioning format)
|
||||||
|
NotBuilding = 1, // not currently building a gap (i.e. not listing within a gap)
|
||||||
|
Building = 2, // currently building a gap (i.e. listing within a gap)
|
||||||
|
Expired = 3, // not allowed to build due to exposure delay timeout
|
||||||
|
};
|
||||||
|
|
||||||
|
type GapBuildingInfo_NothingToBuild = {
|
||||||
|
state: GapBuildingState.Disabled | GapBuildingState.Expired;
|
||||||
|
};
|
||||||
|
|
||||||
|
type GapBuildingParams = {
|
||||||
|
/**
|
||||||
|
* minimum weight for a gap to be created in the cache
|
||||||
|
*/
|
||||||
|
minGapWeight: number;
|
||||||
|
/**
|
||||||
|
* trigger a cache setGap() call every N skippable keys
|
||||||
|
*/
|
||||||
|
triggerSaveGapWeight: number;
|
||||||
|
/**
|
||||||
|
* timestamp to assess whether we're still inside the validity period to
|
||||||
|
* be allowed to build gaps
|
||||||
|
*/
|
||||||
|
initTimestamp: number;
|
||||||
|
};
|
||||||
|
|
||||||
|
type GapBuildingInfo_NotBuilding = {
|
||||||
|
state: GapBuildingState.NotBuilding;
|
||||||
|
gapCache: GapCacheInterface;
|
||||||
|
params: GapBuildingParams;
|
||||||
|
};
|
||||||
|
|
||||||
|
type GapBuildingInfo_Building = {
|
||||||
|
state: GapBuildingState.Building;
|
||||||
|
gapCache: GapCacheInterface;
|
||||||
|
params: GapBuildingParams;
|
||||||
|
/**
|
||||||
|
* Gap currently being created
|
||||||
|
*/
|
||||||
|
gap: GapSetEntry;
|
||||||
|
/**
|
||||||
|
* total current weight of the gap being created
|
||||||
|
*/
|
||||||
|
gapWeight: number;
|
||||||
|
};
|
||||||
|
|
||||||
|
type GapBuildingInfo = GapBuildingInfo_NothingToBuild
|
||||||
|
| GapBuildingInfo_NotBuilding
|
||||||
|
| GapBuildingInfo_Building;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle object listing with parameters. This extends the base class Delimiter
|
||||||
|
* to return the raw master versions of existing objects.
|
||||||
|
*/
|
||||||
|
export class DelimiterMaster extends Delimiter {
|
||||||
|
|
||||||
|
_gapCaching: GapCachingInfo;
|
||||||
|
_gapBuilding: GapBuildingInfo;
|
||||||
|
_refreshedBuildingParams: GapBuildingParams | null;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Delimiter listing of master versions.
|
||||||
|
* @param {Object} parameters - listing parameters
|
||||||
|
* @param {String} [parameters.delimiter] - delimiter per amazon format
|
||||||
|
* @param {String} [parameters.prefix] - prefix per amazon format
|
||||||
|
* @param {String} [parameters.marker] - marker per amazon format
|
||||||
|
* @param {Number} [parameters.maxKeys] - number of keys to list
|
||||||
|
* @param {Boolean} [parameters.v2] - indicates whether v2 format
|
||||||
|
* @param {String} [parameters.startAfter] - marker per amazon v2 format
|
||||||
|
* @param {String} [parameters.continuationToken] - obfuscated amazon token
|
||||||
|
* @param {RequestLogger} logger - The logger of the request
|
||||||
|
* @param {String} [vFormat="v0"] - versioning key format
|
||||||
|
*/
|
||||||
|
constructor(parameters, logger, vFormat?: string) {
|
||||||
|
super(parameters, logger, vFormat);
|
||||||
|
|
||||||
|
if (this.vFormat === BucketVersioningKeyFormat.v0) {
|
||||||
|
// override Delimiter's implementation of NotSkipping for
|
||||||
|
// DelimiterMaster logic (skipping versions and special
|
||||||
|
// handling of delete markers and PHDs)
|
||||||
|
this.setKeyHandler(
|
||||||
|
DelimiterFilterStateId.NotSkipping,
|
||||||
|
this.keyHandler_NotSkippingPrefixNorVersionsV0.bind(this));
|
||||||
|
|
||||||
|
// add extra state handlers specific to DelimiterMaster with v0 format
|
||||||
|
this.setKeyHandler(
|
||||||
|
DelimiterMasterFilterStateId.SkippingVersionsV0,
|
||||||
|
this.keyHandler_SkippingVersionsV0.bind(this));
|
||||||
|
|
||||||
|
this.setKeyHandler(
|
||||||
|
DelimiterMasterFilterStateId.WaitVersionAfterPHDV0,
|
||||||
|
this.keyHandler_WaitVersionAfterPHDV0.bind(this));
|
||||||
|
|
||||||
|
this.setKeyHandler(
|
||||||
|
DelimiterMasterFilterStateId.SkippingGapV0,
|
||||||
|
this.keyHandler_SkippingGapV0.bind(this));
|
||||||
|
|
||||||
|
if (this.marker) {
|
||||||
|
// distinct initial state to include some special logic
|
||||||
|
// before the first master key is found that does not have
|
||||||
|
// to be checked afterwards
|
||||||
|
this.state = <DelimiterMasterFilterState_SkippingVersionsV0> {
|
||||||
|
id: DelimiterMasterFilterStateId.SkippingVersionsV0,
|
||||||
|
masterKey: this.marker,
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
this.state = <DelimiterFilterState_NotSkipping> {
|
||||||
|
id: DelimiterFilterStateId.NotSkipping,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// save base implementation of the `NotSkipping` state in
|
||||||
|
// Delimiter before overriding it with ours, to be able to call it from there
|
||||||
|
this.keyHandler_NotSkipping_Delimiter = this.keyHandlers[DelimiterFilterStateId.NotSkipping];
|
||||||
|
this.setKeyHandler(
|
||||||
|
DelimiterFilterStateId.NotSkipping,
|
||||||
|
this.keyHandler_NotSkippingPrefixNorVersionsV1.bind(this));
|
||||||
|
}
|
||||||
|
// in v1, we can directly use Delimiter's implementation,
|
||||||
|
// which is already set to the proper state
|
||||||
|
|
||||||
|
// default initialization of the gap cache and building states, can be
|
||||||
|
// set by refreshGapCache()
|
||||||
|
this._gapCaching = {
|
||||||
|
state: GapCachingState.NoGapCache,
|
||||||
|
};
|
||||||
|
this._gapBuilding = {
|
||||||
|
state: GapBuildingState.Disabled,
|
||||||
|
};
|
||||||
|
this._refreshedBuildingParams = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the validity period left before a refresh of the gap cache is needed
|
||||||
|
* to continue building new gaps.
|
||||||
|
*
|
||||||
|
* @return {number|null} one of:
|
||||||
|
* - the remaining time in milliseconds in which gaps can be added to the
|
||||||
|
* cache before a call to refreshGapCache() is required
|
||||||
|
* - or 0 if there is no time left and a call to refreshGapCache() is required
|
||||||
|
* to resume caching gaps
|
||||||
|
* - or null if refreshing the cache is never needed (because the gap cache
|
||||||
|
* is either not available or not used)
|
||||||
|
*/
|
||||||
|
getGapBuildingValidityPeriodMs(): number | null {
|
||||||
|
let gapBuilding;
|
||||||
|
switch (this._gapBuilding.state) {
|
||||||
|
case GapBuildingState.Disabled:
|
||||||
|
return null;
|
||||||
|
case GapBuildingState.Expired:
|
||||||
|
return 0;
|
||||||
|
case GapBuildingState.NotBuilding:
|
||||||
|
gapBuilding = <GapBuildingInfo_NotBuilding> this._gapBuilding;
|
||||||
|
break;
|
||||||
|
case GapBuildingState.Building:
|
||||||
|
gapBuilding = <GapBuildingInfo_Building> this._gapBuilding;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
const { gapCache, params } = gapBuilding;
|
||||||
|
const elapsedTime = Date.now() - params.initTimestamp;
|
||||||
|
return Math.max(gapCache.exposureDelayMs - elapsedTime, 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Refresh the gaps caching logic (gaps are series of current delete markers
|
||||||
|
* in V0 bucket metadata format). It has two effects:
|
||||||
|
*
|
||||||
|
* - starts exposing existing and future gaps from the cache to efficiently
|
||||||
|
* skip over series of current delete markers that have been seen and cached
|
||||||
|
* earlier
|
||||||
|
*
|
||||||
|
* - enables building and caching new gaps (or extend existing ones), for a
|
||||||
|
* limited time period defined by the `gapCacheProxy.exposureDelayMs` value
|
||||||
|
* in milliseconds. To refresh the validity period and resume building and
|
||||||
|
* caching new gaps, one must restart a new listing from the database (starting
|
||||||
|
* at the current listing key, included), then call refreshGapCache() again.
|
||||||
|
*
|
||||||
|
* @param {GapCacheInterface} gapCacheProxy - API proxy to the gaps cache
|
||||||
|
* (the proxy should handle prefixing object keys with the bucket name)
|
||||||
|
* @param {number} [minGapWeight=100] - minimum weight of a gap for it to be
|
||||||
|
* added in the cache
|
||||||
|
* @param {number} [triggerSaveGapWeight] - cumulative weight to wait for
|
||||||
|
* before saving the current building gap. Cannot be greater than
|
||||||
|
* `gapCacheProxy.maxGapWeight` (the value is thresholded to `maxGapWeight`
|
||||||
|
* otherwise). Defaults to `gapCacheProxy.maxGapWeight / 2`.
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
refreshGapCache(
|
||||||
|
gapCacheProxy: GapCacheInterface,
|
||||||
|
minGapWeight?: number,
|
||||||
|
triggerSaveGapWeight?: number
|
||||||
|
): void {
|
||||||
|
if (this.vFormat !== BucketVersioningKeyFormat.v0) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (this._gapCaching.state === GapCachingState.NoGapCache) {
|
||||||
|
this._gapCaching = {
|
||||||
|
state: GapCachingState.UnknownGap,
|
||||||
|
gapCache: gapCacheProxy,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
const refreshedBuildingParams: GapBuildingParams = {
|
||||||
|
minGapWeight: minGapWeight || 100,
|
||||||
|
triggerSaveGapWeight: triggerSaveGapWeight
|
||||||
|
|| Math.trunc(gapCacheProxy.maxGapWeight / 2),
|
||||||
|
initTimestamp: Date.now(),
|
||||||
|
};
|
||||||
|
if (this._gapBuilding.state === GapBuildingState.Building) {
|
||||||
|
// refreshed params will be applied as soon as the current building gap is saved
|
||||||
|
this._refreshedBuildingParams = refreshedBuildingParams;
|
||||||
|
} else {
|
||||||
|
this._gapBuilding = {
|
||||||
|
state: GapBuildingState.NotBuilding,
|
||||||
|
gapCache: gapCacheProxy,
|
||||||
|
params: refreshedBuildingParams,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Trigger a lookup of the closest upcoming or already skippable gap.
|
||||||
|
*
|
||||||
|
* @param {string} fromKey - lookup a gap not before 'fromKey'
|
||||||
|
* @return {undefined} - the lookup is asynchronous and its
|
||||||
|
* response is handled inside this function
|
||||||
|
*/
|
||||||
|
_triggerGapLookup(gapCaching: GapCachingInfo_NoCachedGap, fromKey: string): void {
|
||||||
|
this._gapCaching = {
|
||||||
|
state: GapCachingState.GapLookupInProgress,
|
||||||
|
gapCache: gapCaching.gapCache,
|
||||||
|
};
|
||||||
|
const maxKey = this.prefix ? inc(this.prefix) : undefined;
|
||||||
|
gapCaching.gapCache.lookupGap(fromKey, maxKey).then(_gap => {
|
||||||
|
const gap = <GapSetEntry | null> _gap;
|
||||||
|
if (gap) {
|
||||||
|
this._gapCaching = {
|
||||||
|
state: GapCachingState.GapCached,
|
||||||
|
gapCache: gapCaching.gapCache,
|
||||||
|
gapCached: gap,
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
this._gapCaching = {
|
||||||
|
state: GapCachingState.NoMoreGap,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
_checkGapOnMasterDeleteMarker(key: string): FilterReturnValue {
|
||||||
|
switch (this._gapBuilding.state) {
|
||||||
|
case GapBuildingState.Disabled:
|
||||||
|
case GapBuildingState.Expired:
|
||||||
|
break;
|
||||||
|
case GapBuildingState.NotBuilding:
|
||||||
|
this._createBuildingGap(key, 1);
|
||||||
|
break;
|
||||||
|
case GapBuildingState.Building:
|
||||||
|
this._updateBuildingGap(key);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
if (this._gapCaching.state === GapCachingState.GapCached) {
|
||||||
|
const { gapCached } = this._gapCaching;
|
||||||
|
if (key >= gapCached.firstKey) {
|
||||||
|
if (key <= gapCached.lastKey) {
|
||||||
|
// we are inside the last looked up cached gap: transition to
|
||||||
|
// 'SkippingGapV0' state
|
||||||
|
this.setState(<DelimiterMasterFilterState_SkippingGapV0> {
|
||||||
|
id: DelimiterMasterFilterStateId.SkippingGapV0,
|
||||||
|
});
|
||||||
|
// cut the current gap before skipping, it will be merged or
|
||||||
|
// chained with the existing one (depending on its weight)
|
||||||
|
if (this._gapBuilding.state === GapBuildingState.Building) {
|
||||||
|
// substract 1 from the weight because we are going to chain this gap,
|
||||||
|
// which has an overlap of one key.
|
||||||
|
this._gapBuilding.gap.weight -= 1;
|
||||||
|
this._cutBuildingGap();
|
||||||
|
}
|
||||||
|
return FILTER_SKIP;
|
||||||
|
}
|
||||||
|
// as we are past the cached gap, we will need another lookup
|
||||||
|
this._gapCaching = {
|
||||||
|
state: GapCachingState.UnknownGap,
|
||||||
|
gapCache: this._gapCaching.gapCache,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (this._gapCaching.state === GapCachingState.UnknownGap) {
|
||||||
|
this._triggerGapLookup(this._gapCaching, key);
|
||||||
|
}
|
||||||
|
return FILTER_ACCEPT;
|
||||||
|
}
|
||||||
|
|
||||||
|
filter_onNewMasterKeyV0(key: string, value: string): FilterReturnValue {
|
||||||
|
// if this master key is a delete marker, accept it without
|
||||||
|
// adding the version to the contents
|
||||||
|
if (Version.isDeleteMarker(value)) {
|
||||||
|
// update the state to start skipping versions of the new master key
|
||||||
|
this.setState(<DelimiterMasterFilterState_SkippingVersionsV0> {
|
||||||
|
id: DelimiterMasterFilterStateId.SkippingVersionsV0,
|
||||||
|
masterKey: key,
|
||||||
|
});
|
||||||
|
return this._checkGapOnMasterDeleteMarker(key);
|
||||||
|
}
|
||||||
|
if (Version.isPHD(value)) {
|
||||||
|
// master version is a PHD version: wait for the first
|
||||||
|
// following version that will be considered as the actual
|
||||||
|
// master key
|
||||||
|
this.setState(<DelimiterMasterFilterState_WaitVersionAfterPHDV0> {
|
||||||
|
id: DelimiterMasterFilterStateId.WaitVersionAfterPHDV0,
|
||||||
|
masterKey: key,
|
||||||
|
});
|
||||||
|
return FILTER_ACCEPT;
|
||||||
|
}
|
||||||
|
// cut the current gap as soon as a non-deleted entry is seen
|
||||||
|
this._cutBuildingGap();
|
||||||
|
|
||||||
|
if (key.startsWith(DbPrefixes.Replay)) {
|
||||||
|
// skip internal replay prefix entirely
|
||||||
|
this.setState(<DelimiterFilterState_SkippingPrefix> {
|
||||||
|
id: DelimiterFilterStateId.SkippingPrefix,
|
||||||
|
prefix: DbPrefixes.Replay,
|
||||||
|
});
|
||||||
|
return FILTER_SKIP;
|
||||||
|
}
|
||||||
|
if (this._reachedMaxKeys()) {
|
||||||
|
return FILTER_END;
|
||||||
|
}
|
||||||
|
|
||||||
|
const commonPrefix = this.addCommonPrefixOrContents(key, value);
|
||||||
|
if (commonPrefix) {
|
||||||
|
// transition into SkippingPrefix state to skip all following keys
|
||||||
|
// while they start with the same prefix
|
||||||
|
this.setState(<DelimiterFilterState_SkippingPrefix> {
|
||||||
|
id: DelimiterFilterStateId.SkippingPrefix,
|
||||||
|
prefix: commonPrefix,
|
||||||
|
});
|
||||||
|
return FILTER_ACCEPT;
|
||||||
|
}
|
||||||
|
// update the state to start skipping versions of the new master key
|
||||||
|
this.setState(<DelimiterMasterFilterState_SkippingVersionsV0> {
|
||||||
|
id: DelimiterMasterFilterStateId.SkippingVersionsV0,
|
||||||
|
masterKey: key,
|
||||||
|
});
|
||||||
|
return FILTER_ACCEPT;
|
||||||
|
}
|
||||||
|
|
||||||
|
keyHandler_NotSkippingPrefixNorVersionsV0(key: string, value: string): FilterReturnValue {
|
||||||
|
return this.filter_onNewMasterKeyV0(key, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
filter_onNewMasterKeyV1(key: string, value: string): FilterReturnValue {
|
||||||
|
// if this master key is a delete marker, accept it without
|
||||||
|
// adding the version to the contents
|
||||||
|
if (Version.isDeleteMarker(value)) {
|
||||||
|
return FILTER_ACCEPT;
|
||||||
|
}
|
||||||
|
// use base Delimiter's implementation
|
||||||
|
return this.keyHandler_NotSkipping_Delimiter(key, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
keyHandler_NotSkippingPrefixNorVersionsV1(key: string, value: string): FilterReturnValue {
|
||||||
|
return this.filter_onNewMasterKeyV1(key, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
keyHandler_SkippingVersionsV0(key: string, value: string): FilterReturnValue {
|
||||||
|
/* In the SkippingVersionsV0 state, skip all version keys
|
||||||
|
* (<key><versionIdSeparator><version>) */
|
||||||
|
const versionIdIndex = key.indexOf(VID_SEP);
|
||||||
|
if (versionIdIndex !== -1) {
|
||||||
|
// version keys count in the building gap weight because they must
|
||||||
|
// also be listed until skipped
|
||||||
|
if (this._gapBuilding.state === GapBuildingState.Building) {
|
||||||
|
this._updateBuildingGap(key);
|
||||||
|
}
|
||||||
|
return FILTER_SKIP;
|
||||||
|
}
|
||||||
|
return this.filter_onNewMasterKeyV0(key, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
keyHandler_WaitVersionAfterPHDV0(key: string, value: string): FilterReturnValue {
|
||||||
|
// After a PHD key is encountered, the next version key of the
|
||||||
|
// same object if it exists is the new master key, hence
|
||||||
|
// consider it as such and call 'onNewMasterKeyV0' (the test
|
||||||
|
// 'masterKey == phdKey' is probably redundant when we already
|
||||||
|
// know we have a versioned key, since all objects in v0 have
|
||||||
|
// a master key, but keeping it in doubt)
|
||||||
|
const { masterKey: phdKey } = <DelimiterMasterFilterState_WaitVersionAfterPHDV0> this.state;
|
||||||
|
const versionIdIndex = key.indexOf(VID_SEP);
|
||||||
|
if (versionIdIndex !== -1) {
|
||||||
|
const masterKey = key.slice(0, versionIdIndex);
|
||||||
|
if (masterKey === phdKey) {
|
||||||
|
return this.filter_onNewMasterKeyV0(masterKey, value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return this.filter_onNewMasterKeyV0(key, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
keyHandler_SkippingGapV0(key: string, value: string): FilterReturnValue {
|
||||||
|
const { gapCache, gapCached } = <GapCachingInfo_GapCached> this._gapCaching;
|
||||||
|
if (key <= gapCached.lastKey) {
|
||||||
|
return FILTER_SKIP;
|
||||||
|
}
|
||||||
|
this._gapCaching = {
|
||||||
|
state: GapCachingState.UnknownGap,
|
||||||
|
gapCache,
|
||||||
|
};
|
||||||
|
this.setState(<DelimiterMasterFilterState_SkippingVersionsV0> {
|
||||||
|
id: DelimiterMasterFilterStateId.SkippingVersionsV0,
|
||||||
|
});
|
||||||
|
// Start a gap with weight=0 from the latest skippable key. This will
|
||||||
|
// allow to extend the gap just skipped with a chained gap in case
|
||||||
|
// other delete markers are seen after the existing gap is skipped.
|
||||||
|
this._createBuildingGap(gapCached.lastKey, 0, gapCached.weight);
|
||||||
|
|
||||||
|
return this.handleKey(key, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
skippingBase(): string | undefined {
|
||||||
|
switch (this.state.id) {
|
||||||
|
case DelimiterMasterFilterStateId.SkippingVersionsV0:
|
||||||
|
const { masterKey } = <DelimiterMasterFilterState_SkippingVersionsV0> this.state;
|
||||||
|
return masterKey + inc(VID_SEP);
|
||||||
|
|
||||||
|
case DelimiterMasterFilterStateId.SkippingGapV0:
|
||||||
|
const { gapCached } = <GapCachingInfo_GapCached> this._gapCaching;
|
||||||
|
return gapCached.lastKey;
|
||||||
|
|
||||||
|
default:
|
||||||
|
return super.skippingBase();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
result(): ResultObject {
|
||||||
|
this._cutBuildingGap();
|
||||||
|
return super.result();
|
||||||
|
}
|
||||||
|
|
||||||
|
_checkRefreshedBuildingParams(params: GapBuildingParams): GapBuildingParams {
|
||||||
|
if (this._refreshedBuildingParams) {
|
||||||
|
const newParams = this._refreshedBuildingParams;
|
||||||
|
this._refreshedBuildingParams = null;
|
||||||
|
return newParams;
|
||||||
|
}
|
||||||
|
return params;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Save the gap being built if allowed (i.e. still within the
|
||||||
|
* allocated exposure time window).
|
||||||
|
*
|
||||||
|
* @return {boolean} - true if the gap was saved, false if we are
|
||||||
|
* outside the allocated exposure time window.
|
||||||
|
*/
|
||||||
|
_saveBuildingGap(): boolean {
|
||||||
|
const { gapCache, params, gap, gapWeight } =
|
||||||
|
<GapBuildingInfo_Building> this._gapBuilding;
|
||||||
|
const totalElapsed = Date.now() - params.initTimestamp;
|
||||||
|
if (totalElapsed >= gapCache.exposureDelayMs) {
|
||||||
|
this._gapBuilding = {
|
||||||
|
state: GapBuildingState.Expired,
|
||||||
|
};
|
||||||
|
this._refreshedBuildingParams = null;
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
const { firstKey, lastKey, weight } = gap;
|
||||||
|
gapCache.setGap(firstKey, lastKey, weight);
|
||||||
|
this._gapBuilding = {
|
||||||
|
state: GapBuildingState.Building,
|
||||||
|
gapCache,
|
||||||
|
params: this._checkRefreshedBuildingParams(params),
|
||||||
|
gap: {
|
||||||
|
firstKey: gap.lastKey,
|
||||||
|
lastKey: gap.lastKey,
|
||||||
|
weight: 0,
|
||||||
|
},
|
||||||
|
gapWeight,
|
||||||
|
};
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new gap to be extended afterwards
|
||||||
|
*
|
||||||
|
* @param {string} newKey - gap's first key
|
||||||
|
* @param {number} startWeight - initial weight of the building gap (usually 0 or 1)
|
||||||
|
* @param {number} [cachedWeight] - if continuing a cached gap, weight of the existing
|
||||||
|
* cached portion
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
_createBuildingGap(newKey: string, startWeight: number, cachedWeight?: number): void {
|
||||||
|
if (this._gapBuilding.state === GapBuildingState.NotBuilding) {
|
||||||
|
const { gapCache, params } = <GapBuildingInfo_NotBuilding> this._gapBuilding;
|
||||||
|
this._gapBuilding = {
|
||||||
|
state: GapBuildingState.Building,
|
||||||
|
gapCache,
|
||||||
|
params: this._checkRefreshedBuildingParams(params),
|
||||||
|
gap: {
|
||||||
|
firstKey: newKey,
|
||||||
|
lastKey: newKey,
|
||||||
|
weight: startWeight,
|
||||||
|
},
|
||||||
|
gapWeight: (cachedWeight || 0) + startWeight,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
_updateBuildingGap(newKey: string): void {
|
||||||
|
const gapBuilding = <GapBuildingInfo_Building> this._gapBuilding;
|
||||||
|
const { params, gap } = gapBuilding;
|
||||||
|
gap.lastKey = newKey;
|
||||||
|
gap.weight += 1;
|
||||||
|
gapBuilding.gapWeight += 1;
|
||||||
|
// the GapCache API requires updating a gap regularly because it can only split
|
||||||
|
// it once per update, by the known last key. In practice the default behavior
|
||||||
|
// is to trigger an update after a number of keys that is half the maximum weight.
|
||||||
|
// It is also useful for other listings to benefit from the cache sooner.
|
||||||
|
if (gapBuilding.gapWeight >= params.minGapWeight &&
|
||||||
|
gap.weight >= params.triggerSaveGapWeight) {
|
||||||
|
this._saveBuildingGap();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
_cutBuildingGap(): void {
|
||||||
|
if (this._gapBuilding.state === GapBuildingState.Building) {
|
||||||
|
let gapBuilding = <GapBuildingInfo_Building> this._gapBuilding;
|
||||||
|
let { gapCache, params, gap, gapWeight } = gapBuilding;
|
||||||
|
// only set gaps that are significant enough in weight and
|
||||||
|
// with a non-empty extension
|
||||||
|
if (gapWeight >= params.minGapWeight && gap.weight > 0) {
|
||||||
|
// we're done if we were not allowed to save the gap
|
||||||
|
if (!this._saveBuildingGap()) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
// params may have been refreshed, reload them
|
||||||
|
gapBuilding = <GapBuildingInfo_Building> this._gapBuilding;
|
||||||
|
params = gapBuilding.params;
|
||||||
|
}
|
||||||
|
this._gapBuilding = {
|
||||||
|
state: GapBuildingState.NotBuilding,
|
||||||
|
gapCache,
|
||||||
|
params,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,202 @@
|
||||||
|
const { DelimiterVersions } = require('./delimiterVersions');
|
||||||
|
const { FILTER_END, FILTER_SKIP } = require('./tools');
|
||||||
|
|
||||||
|
const TRIM_METADATA_MIN_BLOB_SIZE = 10000;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle object listing with parameters. This extends the base class DelimiterVersions
|
||||||
|
* to return the raw non-current versions objects.
|
||||||
|
*/
|
||||||
|
class DelimiterNonCurrent extends DelimiterVersions {
|
||||||
|
/**
|
||||||
|
* Delimiter listing of non-current versions.
|
||||||
|
* @param {Object} parameters - listing parameters
|
||||||
|
* @param {String} parameters.keyMarker - key marker
|
||||||
|
* @param {String} parameters.versionIdMarker - version id marker
|
||||||
|
* @param {String} parameters.beforeDate - limit the response to keys with stale date older than beforeDate.
|
||||||
|
* “stale date” is the date on when a version becomes non-current.
|
||||||
|
* @param {Number} parameters.maxScannedLifecycleListingEntries - max number of entries to be scanned
|
||||||
|
* @param {String} parameters.excludedDataStoreName - exclude dataStoreName matches from the versions
|
||||||
|
* @param {RequestLogger} logger - The logger of the request
|
||||||
|
* @param {String} [vFormat] - versioning key format
|
||||||
|
*/
|
||||||
|
constructor(parameters, logger, vFormat) {
|
||||||
|
super(parameters, logger, vFormat);
|
||||||
|
|
||||||
|
this.beforeDate = parameters.beforeDate;
|
||||||
|
this.excludedDataStoreName = parameters.excludedDataStoreName;
|
||||||
|
this.maxScannedLifecycleListingEntries = parameters.maxScannedLifecycleListingEntries;
|
||||||
|
|
||||||
|
// internal state
|
||||||
|
this.prevKey = null;
|
||||||
|
this.staleDate = null;
|
||||||
|
|
||||||
|
this.scannedKeys = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
getLastModified(value) {
|
||||||
|
let lastModified;
|
||||||
|
try {
|
||||||
|
const v = JSON.parse(value);
|
||||||
|
lastModified = v['last-modified'];
|
||||||
|
} catch (e) {
|
||||||
|
this.logger.warn('could not parse Object Metadata while listing',
|
||||||
|
{
|
||||||
|
method: 'getLastModified',
|
||||||
|
err: e.toString(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return lastModified;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Overwrite keyHandler_SkippingVersions to include the last version from the previous listing.
|
||||||
|
// The creation (last-modified) date of this version will be the stale date for the following version.
|
||||||
|
// eslint-disable-next-line camelcase
|
||||||
|
keyHandler_SkippingVersions(key, versionId, value) {
|
||||||
|
if (key === this.keyMarker) {
|
||||||
|
// since the nonversioned key equals the marker, there is
|
||||||
|
// necessarily a versionId in this key
|
||||||
|
const _versionId = versionId;
|
||||||
|
if (_versionId < this.versionIdMarker) {
|
||||||
|
// skip all versions until marker
|
||||||
|
return FILTER_SKIP;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
this.setState({
|
||||||
|
id: 1 /* NotSkipping */,
|
||||||
|
});
|
||||||
|
return this.handleKey(key, versionId, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
filter(obj) {
|
||||||
|
if (this.maxScannedLifecycleListingEntries && this.scannedKeys >= this.maxScannedLifecycleListingEntries) {
|
||||||
|
this.IsTruncated = true;
|
||||||
|
this.logger.info('listing stopped due to reaching the maximum scanned entries limit',
|
||||||
|
{
|
||||||
|
maxScannedLifecycleListingEntries: this.maxScannedLifecycleListingEntries,
|
||||||
|
scannedKeys: this.scannedKeys,
|
||||||
|
});
|
||||||
|
return FILTER_END;
|
||||||
|
}
|
||||||
|
++this.scannedKeys;
|
||||||
|
return super.filter(obj);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* NOTE: Each version of a specific key is sorted from the latest to the oldest
|
||||||
|
* thanks to the way version ids are generated.
|
||||||
|
* DESCRIPTION: Skip the version if it represents the master key, but keep its last-modified date in memory,
|
||||||
|
* which will be the stale date of the following version.
|
||||||
|
* The following version is pushed only:
|
||||||
|
* - if the "stale date" (picked up from the previous version) is available (JSON.parse has not failed),
|
||||||
|
* - if "beforeDate" is not specified or if specified and the "stale date" is older.
|
||||||
|
* - if "excludedDataStoreName" is not specified or if specified and the data store name is different
|
||||||
|
* The in-memory "stale date" is then updated with the version's last-modified date to be used for
|
||||||
|
* the following version.
|
||||||
|
* The process stops and returns the available results if either:
|
||||||
|
* - no more metadata key is left to be processed
|
||||||
|
* - the listing reaches the maximum number of key to be returned
|
||||||
|
* - the internal timeout is reached
|
||||||
|
* @param {String} key - The key to add
|
||||||
|
* @param {String} versionId - The version id
|
||||||
|
* @param {String} value - The value of the key
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
addVersion(key, versionId, value) {
|
||||||
|
this.nextKeyMarker = key;
|
||||||
|
this.nextVersionIdMarker = versionId;
|
||||||
|
|
||||||
|
// Skip the version if it represents the non-current version, but keep its last-modified date,
|
||||||
|
// which will be the stale date of the following version.
|
||||||
|
const isCurrentVersion = key !== this.prevKey;
|
||||||
|
if (isCurrentVersion) {
|
||||||
|
this.staleDate = this.getLastModified(value);
|
||||||
|
this.prevKey = key;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// The following version is pushed only:
|
||||||
|
// - if the "stale date" (picked up from the previous version) is available (JSON.parse has not failed),
|
||||||
|
// - if "beforeDate" is not specified or if specified and the "stale date" is older.
|
||||||
|
// - if "excludedDataStoreName" is not specified or if specified and the data store name is different
|
||||||
|
let lastModified;
|
||||||
|
if (this.staleDate && (!this.beforeDate || this.staleDate < this.beforeDate)) {
|
||||||
|
const parsedValue = this._parse(value);
|
||||||
|
// if parsing fails, skip the key.
|
||||||
|
if (parsedValue) {
|
||||||
|
const dataStoreName = parsedValue.dataStoreName;
|
||||||
|
lastModified = parsedValue['last-modified'];
|
||||||
|
if (!this.excludedDataStoreName || dataStoreName !== this.excludedDataStoreName) {
|
||||||
|
const s = this._stringify(parsedValue, this.staleDate);
|
||||||
|
// check that _stringify succeeds to only push objects with a defined staleDate.
|
||||||
|
if (s) {
|
||||||
|
this.Versions.push({ key, value: s });
|
||||||
|
++this.keys;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// The in-memory "stale date" is then updated with the version's last-modified date to be used for
|
||||||
|
// the following version.
|
||||||
|
this.staleDate = lastModified || this.getLastModified(value);
|
||||||
|
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parses the stringified entry's value and remove the location property if too large.
|
||||||
|
* @param {string} s - sringified value
|
||||||
|
* @return {object} p - undefined if parsing fails, otherwise it contains the parsed value.
|
||||||
|
*/
|
||||||
|
_parse(s) {
|
||||||
|
let p;
|
||||||
|
try {
|
||||||
|
p = JSON.parse(s);
|
||||||
|
if (s.length >= TRIM_METADATA_MIN_BLOB_SIZE) {
|
||||||
|
delete p.location;
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
this.logger.warn('Could not parse Object Metadata while listing', {
|
||||||
|
method: 'DelimiterNonCurrent._parse',
|
||||||
|
err: e.toString(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return p;
|
||||||
|
}
|
||||||
|
|
||||||
|
_stringify(parsedMD, staleDate) {
|
||||||
|
const p = parsedMD;
|
||||||
|
let s = undefined;
|
||||||
|
p.staleDate = staleDate;
|
||||||
|
try {
|
||||||
|
s = JSON.stringify(p);
|
||||||
|
} catch (e) {
|
||||||
|
this.logger.warn('could not stringify Object Metadata while listing', {
|
||||||
|
method: 'DelimiterNonCurrent._stringify',
|
||||||
|
err: e.toString(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return s;
|
||||||
|
}
|
||||||
|
|
||||||
|
result() {
|
||||||
|
const { Versions, IsTruncated, NextKeyMarker, NextVersionIdMarker } = super.result();
|
||||||
|
|
||||||
|
const result = {
|
||||||
|
Contents: Versions,
|
||||||
|
IsTruncated,
|
||||||
|
};
|
||||||
|
|
||||||
|
if (NextKeyMarker) {
|
||||||
|
result.NextKeyMarker = NextKeyMarker;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (NextVersionIdMarker) {
|
||||||
|
result.NextVersionIdMarker = NextVersionIdMarker;
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
module.exports = { DelimiterNonCurrent };
|
|
@ -0,0 +1,204 @@
|
||||||
|
const DelimiterVersions = require('./delimiterVersions').DelimiterVersions;
|
||||||
|
const { FILTER_END } = require('./tools');
|
||||||
|
const TRIM_METADATA_MIN_BLOB_SIZE = 10000;
|
||||||
|
/**
|
||||||
|
* Handle object listing with parameters. This extends the base class DelimiterVersions
|
||||||
|
* to return the orphan delete markers. Orphan delete markers are also
|
||||||
|
* refered as expired object delete marker.
|
||||||
|
* They are delete marker with zero noncurrent versions.
|
||||||
|
*/
|
||||||
|
class DelimiterOrphanDeleteMarker extends DelimiterVersions {
|
||||||
|
/**
|
||||||
|
* Delimiter listing of orphan delete markers.
|
||||||
|
* @param {Object} parameters - listing parameters
|
||||||
|
* @param {String} parameters.beforeDate - limit the response to keys older than beforeDate
|
||||||
|
* @param {Number} parameters.maxScannedLifecycleListingEntries - max number of entries to be scanned
|
||||||
|
* @param {RequestLogger} logger - The logger of the request
|
||||||
|
* @param {String} [vFormat] - versioning key format
|
||||||
|
*/
|
||||||
|
constructor(parameters, logger, vFormat) {
|
||||||
|
const {
|
||||||
|
marker,
|
||||||
|
maxKeys,
|
||||||
|
prefix,
|
||||||
|
beforeDate,
|
||||||
|
maxScannedLifecycleListingEntries,
|
||||||
|
} = parameters;
|
||||||
|
|
||||||
|
const versionParams = {
|
||||||
|
// The orphan delete marker logic uses the term 'marker' instead of 'keyMarker',
|
||||||
|
// as the latter could suggest the presence of a 'versionIdMarker'.
|
||||||
|
keyMarker: marker,
|
||||||
|
maxKeys,
|
||||||
|
prefix,
|
||||||
|
};
|
||||||
|
super(versionParams, logger, vFormat);
|
||||||
|
|
||||||
|
this.maxScannedLifecycleListingEntries = maxScannedLifecycleListingEntries;
|
||||||
|
this.beforeDate = beforeDate;
|
||||||
|
// this.prevKeyName is used as a marker for the next listing when the current one reaches its entry limit.
|
||||||
|
// We cannot rely on this.keyName, as it contains the name of the current key.
|
||||||
|
// In the event of a listing interruption due to reaching the maximum scanned entries,
|
||||||
|
// relying on this.keyName would cause the next listing to skip the current key because S3 starts
|
||||||
|
// listing after the marker.
|
||||||
|
this.prevKeyName = null;
|
||||||
|
this.keyName = null;
|
||||||
|
this.value = null;
|
||||||
|
this.scannedKeys = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
_reachedMaxKeys() {
|
||||||
|
if (this.keys >= this.maxKeys) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
_addOrphan() {
|
||||||
|
const parsedValue = this._parse(this.value);
|
||||||
|
// if parsing fails, skip the key.
|
||||||
|
if (parsedValue) {
|
||||||
|
const lastModified = parsedValue['last-modified'];
|
||||||
|
const isDeleteMarker = parsedValue.isDeleteMarker;
|
||||||
|
// We then check if the orphan version is a delete marker and if it is older than the "beforeDate"
|
||||||
|
if ((!this.beforeDate || (lastModified && lastModified < this.beforeDate)) && isDeleteMarker) {
|
||||||
|
// Prefer returning an untrimmed data rather than stopping the service in case of parsing failure.
|
||||||
|
const s = this._stringify(parsedValue) || this.value;
|
||||||
|
this.Versions.push({ key: this.keyName, value: s });
|
||||||
|
this.nextKeyMarker = this.keyName;
|
||||||
|
++this.keys;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parses the stringified entry's value and remove the location property if too large.
|
||||||
|
* @param {string} s - sringified value
|
||||||
|
* @return {object} p - undefined if parsing fails, otherwise it contains the parsed value.
|
||||||
|
*/
|
||||||
|
_parse(s) {
|
||||||
|
let p;
|
||||||
|
try {
|
||||||
|
p = JSON.parse(s);
|
||||||
|
if (s.length >= TRIM_METADATA_MIN_BLOB_SIZE) {
|
||||||
|
delete p.location;
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
this.logger.warn('Could not parse Object Metadata while listing', {
|
||||||
|
method: 'DelimiterOrphanDeleteMarker._parse',
|
||||||
|
err: e.toString(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return p;
|
||||||
|
}
|
||||||
|
|
||||||
|
_stringify(value) {
|
||||||
|
const p = value;
|
||||||
|
let s = undefined;
|
||||||
|
try {
|
||||||
|
s = JSON.stringify(p);
|
||||||
|
} catch (e) {
|
||||||
|
this.logger.warn('could not stringify Object Metadata while listing',
|
||||||
|
{
|
||||||
|
method: 'DelimiterOrphanDeleteMarker._stringify',
|
||||||
|
err: e.toString(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return s;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* The purpose of _isMaxScannedEntriesReached is to restrict the number of scanned entries,
|
||||||
|
* thus controlling resource overhead (CPU...).
|
||||||
|
* @return {boolean} isMaxScannedEntriesReached - true if the maximum limit on the number
|
||||||
|
* of entries scanned has been reached, false otherwise.
|
||||||
|
*/
|
||||||
|
_isMaxScannedEntriesReached() {
|
||||||
|
return this.maxScannedLifecycleListingEntries && this.scannedKeys >= this.maxScannedLifecycleListingEntries;
|
||||||
|
}
|
||||||
|
|
||||||
|
filter(obj) {
|
||||||
|
if (this._isMaxScannedEntriesReached()) {
|
||||||
|
this.nextKeyMarker = this.prevKeyName;
|
||||||
|
this.IsTruncated = true;
|
||||||
|
this.logger.info('listing stopped due to reaching the maximum scanned entries limit',
|
||||||
|
{
|
||||||
|
maxScannedLifecycleListingEntries: this.maxScannedLifecycleListingEntries,
|
||||||
|
scannedKeys: this.scannedKeys,
|
||||||
|
});
|
||||||
|
return FILTER_END;
|
||||||
|
}
|
||||||
|
++this.scannedKeys;
|
||||||
|
return super.filter(obj);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* NOTE: Each version of a specific key is sorted from the latest to the oldest
|
||||||
|
* thanks to the way version ids are generated.
|
||||||
|
* DESCRIPTION: For a given key, the latest version is kept in memory since it is the current version.
|
||||||
|
* If the following version reference a new key, it means that the previous one was an orphan version.
|
||||||
|
* We then check if the orphan version is a delete marker and if it is older than the "beforeDate"
|
||||||
|
* The process stops and returns the available results if either:
|
||||||
|
* - no more metadata key is left to be processed
|
||||||
|
* - the listing reaches the maximum number of key to be returned
|
||||||
|
* - the internal timeout is reached
|
||||||
|
* NOTE: we cannot leverage MongoDB to list keys older than "beforeDate"
|
||||||
|
* because then we will not be able to assess its orphanage.
|
||||||
|
* @param {String} key - The object key.
|
||||||
|
* @param {String} versionId - The object version id.
|
||||||
|
* @param {String} value - The value of the key
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
addVersion(key, versionId, value) {
|
||||||
|
// For a given key, the youngest version is kept in memory since it represents the current version.
|
||||||
|
if (key !== this.keyName) {
|
||||||
|
// If this.value is defined, it means that <this.keyName, this.value> pair is "allowed" to be an orphan.
|
||||||
|
if (this.value) {
|
||||||
|
this._addOrphan();
|
||||||
|
}
|
||||||
|
this.prevKeyName = this.keyName;
|
||||||
|
this.keyName = key;
|
||||||
|
this.value = value;
|
||||||
|
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// If the key is not the current version, we can skip it in the next listing
|
||||||
|
// in the case where the current listing is interrupted due to reaching the maximum scanned entries.
|
||||||
|
this.prevKeyName = key;
|
||||||
|
this.keyName = key;
|
||||||
|
this.value = null;
|
||||||
|
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
result() {
|
||||||
|
// Only check for remaining last orphan delete marker if the listing is not interrupted.
|
||||||
|
// This will help avoid false positives.
|
||||||
|
if (!this._isMaxScannedEntriesReached()) {
|
||||||
|
// The following check makes sure the last orphan delete marker is not forgotten.
|
||||||
|
if (this.keys < this.maxKeys) {
|
||||||
|
if (this.value) {
|
||||||
|
this._addOrphan();
|
||||||
|
}
|
||||||
|
// The following make sure that if makeKeys is reached, isTruncated is set to true.
|
||||||
|
// We moved the "isTruncated" from _reachedMaxKeys to make sure we take into account the last entity
|
||||||
|
// if listing is truncated right before the last entity and the last entity is a orphan delete marker.
|
||||||
|
} else {
|
||||||
|
this.IsTruncated = this.maxKeys > 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = {
|
||||||
|
Contents: this.Versions,
|
||||||
|
IsTruncated: this.IsTruncated,
|
||||||
|
};
|
||||||
|
|
||||||
|
if (this.IsTruncated) {
|
||||||
|
result.NextMarker = this.nextKeyMarker;
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = { DelimiterOrphanDeleteMarker };
|
|
@ -1,165 +0,0 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
|
||||||
|
|
||||||
const Delimiter = require('./delimiter').Delimiter;
|
|
||||||
const Version = require('../../versioning/Version').Version;
|
|
||||||
const VSConst = require('../../versioning/constants').VersioningConstants;
|
|
||||||
const { inc, FILTER_END, FILTER_ACCEPT, FILTER_SKIP, SKIP_NONE } =
|
|
||||||
require('./tools');
|
|
||||||
|
|
||||||
const VID_SEP = VSConst.VersionId.Separator;
|
|
||||||
|
|
||||||
function formatVersionKey(key, versionId) {
|
|
||||||
return `${key}${VID_SEP}${versionId}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Handle object listing with parameters
|
|
||||||
*
|
|
||||||
* @prop {String[]} CommonPrefixes - 'folders' defined by the delimiter
|
|
||||||
* @prop {String[]} Contents - 'files' to list
|
|
||||||
* @prop {Boolean} IsTruncated - truncated listing flag
|
|
||||||
* @prop {String|undefined} NextMarker - marker per amazon format
|
|
||||||
* @prop {Number} keys - count of listed keys
|
|
||||||
* @prop {String|undefined} delimiter - separator per amazon format
|
|
||||||
* @prop {String|undefined} prefix - prefix per amazon format
|
|
||||||
* @prop {Number} maxKeys - number of keys to list
|
|
||||||
*/
|
|
||||||
class DelimiterVersions extends Delimiter {
|
|
||||||
constructor(parameters) {
|
|
||||||
super(parameters);
|
|
||||||
// specific to version listing
|
|
||||||
this.keyMarker = parameters.keyMarker;
|
|
||||||
this.versionIdMarker = parameters.versionIdMarker;
|
|
||||||
// internal state
|
|
||||||
this.masterKey = undefined;
|
|
||||||
this.masterVersionId = undefined;
|
|
||||||
// listing results
|
|
||||||
this.NextMarker = parameters.keyMarker;
|
|
||||||
this.NextVersionIdMarker = undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
genMDParams() {
|
|
||||||
const params = {};
|
|
||||||
if (this.parameters.prefix) {
|
|
||||||
params.gte = this.parameters.prefix;
|
|
||||||
params.lt = inc(this.parameters.prefix);
|
|
||||||
}
|
|
||||||
if (this.parameters.keyMarker) {
|
|
||||||
if (params.gte && params.gte > this.parameters.keyMarker) {
|
|
||||||
return params;
|
|
||||||
}
|
|
||||||
delete params.gte;
|
|
||||||
if (this.parameters.versionIdMarker) {
|
|
||||||
// versionIdMarker should always come with keyMarker
|
|
||||||
// but may not be the other way around
|
|
||||||
params.gt = formatVersionKey(this.parameters.keyMarker,
|
|
||||||
this.parameters.versionIdMarker);
|
|
||||||
} else {
|
|
||||||
params.gt = inc(this.parameters.keyMarker + VID_SEP);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return params;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Add a (key, versionId, value) tuple to the listing.
|
|
||||||
* Set the NextMarker to the current key
|
|
||||||
* Increment the keys counter
|
|
||||||
* @param {object} obj - the entry to add to the listing result
|
|
||||||
* @param {String} obj.key - The key to add
|
|
||||||
* @param {String} obj.versionId - versionId
|
|
||||||
* @param {String} obj.value - The value of the key
|
|
||||||
* @return {Boolean} - indicates if iteration should continue
|
|
||||||
*/
|
|
||||||
addContents(obj) {
|
|
||||||
if (this._reachedMaxKeys()) {
|
|
||||||
return FILTER_END;
|
|
||||||
}
|
|
||||||
this.Contents.push(obj);
|
|
||||||
this.NextMarker = obj.key;
|
|
||||||
this.NextVersionIdMarker = obj.versionId;
|
|
||||||
++this.keys;
|
|
||||||
return FILTER_ACCEPT;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Filter to apply on each iteration, based on:
|
|
||||||
* - prefix
|
|
||||||
* - delimiter
|
|
||||||
* - maxKeys
|
|
||||||
* The marker is being handled directly by levelDB
|
|
||||||
* @param {Object} obj - The key and value of the element
|
|
||||||
* @param {String} obj.key - The key of the element
|
|
||||||
* @param {String} obj.value - The value of the element
|
|
||||||
* @return {number} - indicates if iteration should continue
|
|
||||||
*/
|
|
||||||
filter(obj) {
|
|
||||||
if (Version.isPHD(obj.value)) {
|
|
||||||
return FILTER_ACCEPT; // trick repd to not increase its streak
|
|
||||||
}
|
|
||||||
if (this.prefix && !obj.key.startsWith(this.prefix)) {
|
|
||||||
return FILTER_SKIP;
|
|
||||||
}
|
|
||||||
let key = obj.key; // original key
|
|
||||||
let versionId = undefined; // versionId
|
|
||||||
const versionIdIndex = obj.key.indexOf(VID_SEP);
|
|
||||||
if (versionIdIndex < 0) {
|
|
||||||
this.masterKey = obj.key;
|
|
||||||
this.masterVersionId =
|
|
||||||
Version.from(obj.value).getVersionId() || 'null';
|
|
||||||
versionId = this.masterVersionId;
|
|
||||||
} else {
|
|
||||||
// eslint-disable-next-line
|
|
||||||
key = obj.key.slice(0, versionIdIndex);
|
|
||||||
// eslint-disable-next-line
|
|
||||||
versionId = obj.key.slice(versionIdIndex + 1);
|
|
||||||
if (this.masterKey === key && this.masterVersionId === versionId) {
|
|
||||||
return FILTER_ACCEPT; // trick repd to not increase its streak
|
|
||||||
}
|
|
||||||
this.masterKey = undefined;
|
|
||||||
this.masterVersionId = undefined;
|
|
||||||
}
|
|
||||||
if (this.delimiter) {
|
|
||||||
const baseIndex = this.prefix ? this.prefix.length : 0;
|
|
||||||
const delimiterIndex = key.indexOf(this.delimiter, baseIndex);
|
|
||||||
if (delimiterIndex >= 0) {
|
|
||||||
return this.addCommonPrefix(key, delimiterIndex);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return this.addContents({ key, value: obj.value, versionId });
|
|
||||||
}
|
|
||||||
|
|
||||||
skipping() {
|
|
||||||
if (this.NextMarker) {
|
|
||||||
const index = this.NextMarker.lastIndexOf(this.delimiter);
|
|
||||||
if (index === this.NextMarker.length - 1) {
|
|
||||||
return this.NextMarker;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return SKIP_NONE;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Return an object containing all mandatory fields to use once the
|
|
||||||
* iteration is done, doesn't show a NextMarker field if the output
|
|
||||||
* isn't truncated
|
|
||||||
* @return {Object} - following amazon format
|
|
||||||
*/
|
|
||||||
result() {
|
|
||||||
/* NextMarker is only provided when delimiter is used.
|
|
||||||
* specified in v1 listing documentation
|
|
||||||
* http://docs.aws.amazon.com/AmazonS3/latest/API/RESTBucketGET.html
|
|
||||||
*/
|
|
||||||
return {
|
|
||||||
CommonPrefixes: this.CommonPrefixes,
|
|
||||||
Versions: this.Contents,
|
|
||||||
IsTruncated: this.IsTruncated,
|
|
||||||
NextKeyMarker: this.IsTruncated ? this.NextMarker : undefined,
|
|
||||||
NextVersionIdMarker: this.IsTruncated ?
|
|
||||||
this.NextVersionIdMarker : undefined,
|
|
||||||
Delimiter: this.delimiter,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = { DelimiterVersions };
|
|
|
@ -0,0 +1,535 @@
|
||||||
|
'use strict'; // eslint-disable-line strict
|
||||||
|
|
||||||
|
const Extension = require('./Extension').default;
|
||||||
|
|
||||||
|
import {
|
||||||
|
FilterState,
|
||||||
|
FilterReturnValue,
|
||||||
|
} from './delimiter';
|
||||||
|
|
||||||
|
const Version = require('../../versioning/Version').Version;
|
||||||
|
const VSConst = require('../../versioning/constants').VersioningConstants;
|
||||||
|
const { inc, FILTER_END, FILTER_ACCEPT, FILTER_SKIP, SKIP_NONE } =
|
||||||
|
require('./tools');
|
||||||
|
|
||||||
|
const VID_SEP = VSConst.VersionId.Separator;
|
||||||
|
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
|
||||||
|
|
||||||
|
export const enum DelimiterVersionsFilterStateId {
|
||||||
|
NotSkipping = 1,
|
||||||
|
SkippingPrefix = 2,
|
||||||
|
SkippingVersions = 3,
|
||||||
|
};
|
||||||
|
|
||||||
|
export interface DelimiterVersionsFilterState_NotSkipping extends FilterState {
|
||||||
|
id: DelimiterVersionsFilterStateId.NotSkipping,
|
||||||
|
};
|
||||||
|
|
||||||
|
export interface DelimiterVersionsFilterState_SkippingPrefix extends FilterState {
|
||||||
|
id: DelimiterVersionsFilterStateId.SkippingPrefix,
|
||||||
|
prefix: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
export interface DelimiterVersionsFilterState_SkippingVersions extends FilterState {
|
||||||
|
id: DelimiterVersionsFilterStateId.SkippingVersions,
|
||||||
|
gt: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
type KeyHandler = (key: string, versionId: string | undefined, value: string) => FilterReturnValue;
|
||||||
|
|
||||||
|
type ResultObject = {
|
||||||
|
CommonPrefixes: string[],
|
||||||
|
Versions: {
|
||||||
|
key: string;
|
||||||
|
value: string;
|
||||||
|
versionId: string;
|
||||||
|
}[];
|
||||||
|
IsTruncated: boolean;
|
||||||
|
Delimiter ?: string;
|
||||||
|
NextKeyMarker ?: string;
|
||||||
|
NextVersionIdMarker ?: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
type GenMDParamsItem = {
|
||||||
|
gt ?: string,
|
||||||
|
gte ?: string,
|
||||||
|
lt ?: string,
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle object listing with parameters
|
||||||
|
*
|
||||||
|
* @prop {String[]} CommonPrefixes - 'folders' defined by the delimiter
|
||||||
|
* @prop {String[]} Contents - 'files' to list
|
||||||
|
* @prop {Boolean} IsTruncated - truncated listing flag
|
||||||
|
* @prop {String|undefined} NextMarker - marker per amazon format
|
||||||
|
* @prop {Number} keys - count of listed keys
|
||||||
|
* @prop {String|undefined} delimiter - separator per amazon format
|
||||||
|
* @prop {String|undefined} prefix - prefix per amazon format
|
||||||
|
* @prop {Number} maxKeys - number of keys to list
|
||||||
|
*/
|
||||||
|
export class DelimiterVersions extends Extension {
|
||||||
|
|
||||||
|
state: FilterState;
|
||||||
|
keyHandlers: { [id: number]: KeyHandler };
|
||||||
|
|
||||||
|
constructor(parameters, logger, vFormat) {
|
||||||
|
super(parameters, logger);
|
||||||
|
// original listing parameters
|
||||||
|
this.delimiter = parameters.delimiter;
|
||||||
|
this.prefix = parameters.prefix;
|
||||||
|
this.maxKeys = parameters.maxKeys || 1000;
|
||||||
|
// specific to version listing
|
||||||
|
this.keyMarker = parameters.keyMarker;
|
||||||
|
this.versionIdMarker = parameters.versionIdMarker;
|
||||||
|
// internal state
|
||||||
|
this.masterKey = undefined;
|
||||||
|
this.masterVersionId = undefined;
|
||||||
|
this.nullKey = null;
|
||||||
|
this.vFormat = vFormat || BucketVersioningKeyFormat.v0;
|
||||||
|
// listing results
|
||||||
|
this.CommonPrefixes = [];
|
||||||
|
this.Versions = [];
|
||||||
|
this.IsTruncated = false;
|
||||||
|
this.nextKeyMarker = parameters.keyMarker;
|
||||||
|
this.nextVersionIdMarker = undefined;
|
||||||
|
|
||||||
|
this.keyHandlers = {};
|
||||||
|
|
||||||
|
Object.assign(this, {
|
||||||
|
[BucketVersioningKeyFormat.v0]: {
|
||||||
|
genMDParams: this.genMDParamsV0,
|
||||||
|
getObjectKey: this.getObjectKeyV0,
|
||||||
|
skipping: this.skippingV0,
|
||||||
|
},
|
||||||
|
[BucketVersioningKeyFormat.v1]: {
|
||||||
|
genMDParams: this.genMDParamsV1,
|
||||||
|
getObjectKey: this.getObjectKeyV1,
|
||||||
|
skipping: this.skippingV1,
|
||||||
|
},
|
||||||
|
}[this.vFormat]);
|
||||||
|
|
||||||
|
if (this.vFormat === BucketVersioningKeyFormat.v0) {
|
||||||
|
this.setKeyHandler(
|
||||||
|
DelimiterVersionsFilterStateId.NotSkipping,
|
||||||
|
this.keyHandler_NotSkippingV0.bind(this));
|
||||||
|
} else {
|
||||||
|
this.setKeyHandler(
|
||||||
|
DelimiterVersionsFilterStateId.NotSkipping,
|
||||||
|
this.keyHandler_NotSkippingV1.bind(this));
|
||||||
|
}
|
||||||
|
this.setKeyHandler(
|
||||||
|
DelimiterVersionsFilterStateId.SkippingPrefix,
|
||||||
|
this.keyHandler_SkippingPrefix.bind(this));
|
||||||
|
|
||||||
|
this.setKeyHandler(
|
||||||
|
DelimiterVersionsFilterStateId.SkippingVersions,
|
||||||
|
this.keyHandler_SkippingVersions.bind(this));
|
||||||
|
|
||||||
|
if (this.versionIdMarker) {
|
||||||
|
this.state = <DelimiterVersionsFilterState_SkippingVersions> {
|
||||||
|
id: DelimiterVersionsFilterStateId.SkippingVersions,
|
||||||
|
gt: `${this.keyMarker}${VID_SEP}${this.versionIdMarker}`,
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
this.state = <DelimiterVersionsFilterState_NotSkipping> {
|
||||||
|
id: DelimiterVersionsFilterStateId.NotSkipping,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
genMDParamsV0() {
|
||||||
|
const params: GenMDParamsItem = {};
|
||||||
|
if (this.prefix) {
|
||||||
|
params.gte = this.prefix;
|
||||||
|
params.lt = inc(this.prefix);
|
||||||
|
}
|
||||||
|
if (this.keyMarker && this.delimiter) {
|
||||||
|
const commonPrefix = this.getCommonPrefix(this.keyMarker);
|
||||||
|
if (commonPrefix) {
|
||||||
|
const afterPrefix = inc(commonPrefix);
|
||||||
|
if (!params.gte || afterPrefix > params.gte) {
|
||||||
|
params.gte = afterPrefix;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (this.keyMarker && (!params.gte || this.keyMarker >= params.gte)) {
|
||||||
|
delete params.gte;
|
||||||
|
if (this.versionIdMarker) {
|
||||||
|
// start from the beginning of versions so we can
|
||||||
|
// check if there's a null key and fetch it
|
||||||
|
// (afterwards, we can skip the rest of versions until
|
||||||
|
// we reach versionIdMarker)
|
||||||
|
params.gte = `${this.keyMarker}${VID_SEP}`;
|
||||||
|
} else {
|
||||||
|
params.gt = `${this.keyMarker}${inc(VID_SEP)}`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return params;
|
||||||
|
}
|
||||||
|
|
||||||
|
genMDParamsV1() {
|
||||||
|
// return an array of two listing params sets to ask for
|
||||||
|
// synchronized listing of M and V ranges
|
||||||
|
const v0Params: GenMDParamsItem = this.genMDParamsV0();
|
||||||
|
const mParams: GenMDParamsItem = {};
|
||||||
|
const vParams: GenMDParamsItem = {};
|
||||||
|
if (v0Params.gt) {
|
||||||
|
mParams.gt = `${DbPrefixes.Master}${v0Params.gt}`;
|
||||||
|
vParams.gt = `${DbPrefixes.Version}${v0Params.gt}`;
|
||||||
|
} else if (v0Params.gte) {
|
||||||
|
mParams.gte = `${DbPrefixes.Master}${v0Params.gte}`;
|
||||||
|
vParams.gte = `${DbPrefixes.Version}${v0Params.gte}`;
|
||||||
|
} else {
|
||||||
|
mParams.gte = DbPrefixes.Master;
|
||||||
|
vParams.gte = DbPrefixes.Version;
|
||||||
|
}
|
||||||
|
if (v0Params.lt) {
|
||||||
|
mParams.lt = `${DbPrefixes.Master}${v0Params.lt}`;
|
||||||
|
vParams.lt = `${DbPrefixes.Version}${v0Params.lt}`;
|
||||||
|
} else {
|
||||||
|
mParams.lt = inc(DbPrefixes.Master);
|
||||||
|
vParams.lt = inc(DbPrefixes.Version);
|
||||||
|
}
|
||||||
|
return [mParams, vParams];
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* check if the max keys count has been reached and set the
|
||||||
|
* final state of the result if it is the case
|
||||||
|
* @return {Boolean} - indicates if the iteration has to stop
|
||||||
|
*/
|
||||||
|
_reachedMaxKeys(): boolean {
|
||||||
|
if (this.keys >= this.maxKeys) {
|
||||||
|
// In cases of maxKeys <= 0 -> IsTruncated = false
|
||||||
|
this.IsTruncated = this.maxKeys > 0;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Used to synchronize listing of M and V prefixes by object key
|
||||||
|
*
|
||||||
|
* @param {object} masterObj object listed from first range
|
||||||
|
* returned by genMDParamsV1() (the master keys range)
|
||||||
|
* @param {object} versionObj object listed from second range
|
||||||
|
* returned by genMDParamsV1() (the version keys range)
|
||||||
|
* @return {number} comparison result:
|
||||||
|
* * -1 if master key < version key
|
||||||
|
* * 1 if master key > version key
|
||||||
|
*/
|
||||||
|
compareObjects(masterObj, versionObj) {
|
||||||
|
const masterKey = masterObj.key.slice(DbPrefixes.Master.length);
|
||||||
|
const versionKey = versionObj.key.slice(DbPrefixes.Version.length);
|
||||||
|
return masterKey < versionKey ? -1 : 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse a listing key into its nonversioned key and version ID components
|
||||||
|
*
|
||||||
|
* @param {string} key - full listing key
|
||||||
|
* @return {object} obj
|
||||||
|
* @return {string} obj.key - nonversioned part of key
|
||||||
|
* @return {string} [obj.versionId] - version ID in the key
|
||||||
|
*/
|
||||||
|
parseKey(fullKey: string): { key: string, versionId ?: string } {
|
||||||
|
const versionIdIndex = fullKey.indexOf(VID_SEP);
|
||||||
|
if (versionIdIndex === -1) {
|
||||||
|
return { key: fullKey };
|
||||||
|
}
|
||||||
|
const nonversionedKey: string = fullKey.slice(0, versionIdIndex);
|
||||||
|
let versionId: string = fullKey.slice(versionIdIndex + 1);
|
||||||
|
return { key: nonversionedKey, versionId };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Include a key in the listing output, in the Versions or CommonPrefix result
|
||||||
|
*
|
||||||
|
* @param {string} key - key (without version ID)
|
||||||
|
* @param {string} versionId - version ID
|
||||||
|
* @param {string} value - metadata value
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
addKey(key: string, versionId: string, value: string) {
|
||||||
|
// add the subprefix to the common prefixes if the key has the delimiter
|
||||||
|
const commonPrefix = this.getCommonPrefix(key);
|
||||||
|
if (commonPrefix) {
|
||||||
|
this.addCommonPrefix(commonPrefix);
|
||||||
|
// transition into SkippingPrefix state to skip all following keys
|
||||||
|
// while they start with the same prefix
|
||||||
|
this.setState(<DelimiterVersionsFilterState_SkippingPrefix> {
|
||||||
|
id: DelimiterVersionsFilterStateId.SkippingPrefix,
|
||||||
|
prefix: commonPrefix,
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
this.addVersion(key, versionId, value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add a (key, versionId, value) tuple to the listing.
|
||||||
|
* Set the NextMarker to the current key
|
||||||
|
* Increment the keys counter
|
||||||
|
* @param {String} key - The key to add
|
||||||
|
* @param {String} versionId - versionId
|
||||||
|
* @param {String} value - The value of the key
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
addVersion(key: string, versionId: string, value: string) {
|
||||||
|
this.Versions.push({
|
||||||
|
key,
|
||||||
|
versionId,
|
||||||
|
value: this.trimMetadata(value),
|
||||||
|
});
|
||||||
|
this.nextKeyMarker = key;
|
||||||
|
this.nextVersionIdMarker = versionId;
|
||||||
|
++this.keys;
|
||||||
|
}
|
||||||
|
|
||||||
|
getCommonPrefix(key: string): string | undefined {
|
||||||
|
if (!this.delimiter) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
const baseIndex = this.prefix ? this.prefix.length : 0;
|
||||||
|
const delimiterIndex = key.indexOf(this.delimiter, baseIndex);
|
||||||
|
if (delimiterIndex === -1) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
return key.substring(0, delimiterIndex + this.delimiter.length);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add a Common Prefix in the list
|
||||||
|
* @param {String} commonPrefix - common prefix to add
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
addCommonPrefix(commonPrefix: string): void {
|
||||||
|
// add the new prefix to the list
|
||||||
|
this.CommonPrefixes.push(commonPrefix);
|
||||||
|
++this.keys;
|
||||||
|
this.nextKeyMarker = commonPrefix;
|
||||||
|
this.nextVersionIdMarker = undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Cache the current null key, to save it for outputting it later at
|
||||||
|
* the correct position
|
||||||
|
*
|
||||||
|
* @param {String} key - nonversioned key of the null key
|
||||||
|
* @param {String} versionId - real version ID of the null key
|
||||||
|
* @param {String} value - value of the null key
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
cacheNullKey(key: string, versionId: string, value: string): void {
|
||||||
|
this.nullKey = { key, versionId, value };
|
||||||
|
}
|
||||||
|
|
||||||
|
getObjectKeyV0(obj: { key: string }): string {
|
||||||
|
return obj.key;
|
||||||
|
}
|
||||||
|
|
||||||
|
getObjectKeyV1(obj: { key: string }): string {
|
||||||
|
return obj.key.slice(DbPrefixes.Master.length);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Filter to apply on each iteration, based on:
|
||||||
|
* - prefix
|
||||||
|
* - delimiter
|
||||||
|
* - maxKeys
|
||||||
|
* The marker is being handled directly by levelDB
|
||||||
|
* @param {Object} obj - The key and value of the element
|
||||||
|
* @param {String} obj.key - The key of the element
|
||||||
|
* @param {String} obj.value - The value of the element
|
||||||
|
* @return {number} - indicates if iteration should continue
|
||||||
|
*/
|
||||||
|
filter(obj: { key: string, value: string }): FilterReturnValue {
|
||||||
|
const key = this.getObjectKey(obj);
|
||||||
|
const value = obj.value;
|
||||||
|
|
||||||
|
const { key: nonversionedKey, versionId: keyVersionId } = this.parseKey(key);
|
||||||
|
if (this.nullKey) {
|
||||||
|
if (this.nullKey.key !== nonversionedKey
|
||||||
|
|| this.nullKey.versionId < <string> keyVersionId) {
|
||||||
|
this.handleKey(
|
||||||
|
this.nullKey.key, this.nullKey.versionId, this.nullKey.value);
|
||||||
|
this.nullKey = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (keyVersionId === '') {
|
||||||
|
// null key
|
||||||
|
this.cacheNullKey(nonversionedKey, Version.from(value).getVersionId(), value);
|
||||||
|
if (this.state.id === DelimiterVersionsFilterStateId.SkippingVersions) {
|
||||||
|
return FILTER_SKIP;
|
||||||
|
}
|
||||||
|
return FILTER_ACCEPT;
|
||||||
|
}
|
||||||
|
return this.handleKey(nonversionedKey, keyVersionId, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
setState(state: FilterState): void {
|
||||||
|
this.state = state;
|
||||||
|
}
|
||||||
|
|
||||||
|
setKeyHandler(stateId: number, keyHandler: KeyHandler): void {
|
||||||
|
this.keyHandlers[stateId] = keyHandler;
|
||||||
|
}
|
||||||
|
|
||||||
|
handleKey(key: string, versionId: string | undefined, value: string): FilterReturnValue {
|
||||||
|
return this.keyHandlers[this.state.id](key, versionId, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
keyHandler_NotSkippingV0(key: string, versionId: string | undefined, value: string): FilterReturnValue {
|
||||||
|
if (key.startsWith(DbPrefixes.Replay)) {
|
||||||
|
// skip internal replay prefix entirely
|
||||||
|
this.setState(<DelimiterVersionsFilterState_SkippingPrefix> {
|
||||||
|
id: DelimiterVersionsFilterStateId.SkippingPrefix,
|
||||||
|
prefix: DbPrefixes.Replay,
|
||||||
|
});
|
||||||
|
return FILTER_SKIP;
|
||||||
|
}
|
||||||
|
if (Version.isPHD(value)) {
|
||||||
|
return FILTER_ACCEPT;
|
||||||
|
}
|
||||||
|
return this.filter_onNewKey(key, versionId, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
keyHandler_NotSkippingV1(key: string, versionId: string | undefined, value: string): FilterReturnValue {
|
||||||
|
// NOTE: this check on PHD is only useful for Artesca, S3C
|
||||||
|
// does not use PHDs in V1 format
|
||||||
|
if (Version.isPHD(value)) {
|
||||||
|
return FILTER_ACCEPT;
|
||||||
|
}
|
||||||
|
return this.filter_onNewKey(key, versionId, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
filter_onNewKey(key: string, versionId: string | undefined, value: string): FilterReturnValue {
|
||||||
|
if (this._reachedMaxKeys()) {
|
||||||
|
return FILTER_END;
|
||||||
|
}
|
||||||
|
if (versionId === undefined) {
|
||||||
|
this.masterKey = key;
|
||||||
|
this.masterVersionId = Version.from(value).getVersionId() || 'null';
|
||||||
|
this.addKey(this.masterKey, this.masterVersionId, value);
|
||||||
|
} else {
|
||||||
|
if (this.masterKey === key && this.masterVersionId === versionId) {
|
||||||
|
// do not add a version key if it is the master version
|
||||||
|
return FILTER_ACCEPT;
|
||||||
|
}
|
||||||
|
this.addKey(key, versionId, value);
|
||||||
|
}
|
||||||
|
return FILTER_ACCEPT;
|
||||||
|
}
|
||||||
|
|
||||||
|
keyHandler_SkippingPrefix(key: string, versionId: string | undefined, value: string): FilterReturnValue {
|
||||||
|
const { prefix } = <DelimiterVersionsFilterState_SkippingPrefix> this.state;
|
||||||
|
if (key.startsWith(prefix)) {
|
||||||
|
return FILTER_SKIP;
|
||||||
|
}
|
||||||
|
this.setState(<DelimiterVersionsFilterState_NotSkipping> {
|
||||||
|
id: DelimiterVersionsFilterStateId.NotSkipping,
|
||||||
|
});
|
||||||
|
return this.handleKey(key, versionId, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
keyHandler_SkippingVersions(key: string, versionId: string | undefined, value: string): FilterReturnValue {
|
||||||
|
if (key === this.keyMarker) {
|
||||||
|
// since the nonversioned key equals the marker, there is
|
||||||
|
// necessarily a versionId in this key
|
||||||
|
const _versionId = <string> versionId;
|
||||||
|
if (_versionId < this.versionIdMarker) {
|
||||||
|
// skip all versions until marker
|
||||||
|
return FILTER_SKIP;
|
||||||
|
}
|
||||||
|
if (_versionId === this.versionIdMarker) {
|
||||||
|
// nothing left to skip, so return ACCEPT, but don't add this version
|
||||||
|
return FILTER_ACCEPT;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
this.setState(<DelimiterVersionsFilterState_NotSkipping> {
|
||||||
|
id: DelimiterVersionsFilterStateId.NotSkipping,
|
||||||
|
});
|
||||||
|
return this.handleKey(key, versionId, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
skippingBase(): string | undefined {
|
||||||
|
switch (this.state.id) {
|
||||||
|
case DelimiterVersionsFilterStateId.SkippingPrefix:
|
||||||
|
const { prefix } = <DelimiterVersionsFilterState_SkippingPrefix> this.state;
|
||||||
|
return inc(prefix);
|
||||||
|
|
||||||
|
case DelimiterVersionsFilterStateId.SkippingVersions:
|
||||||
|
const { gt } = <DelimiterVersionsFilterState_SkippingVersions> this.state;
|
||||||
|
// the contract of skipping() is to return the first key
|
||||||
|
// that can be skipped to, so adding a null byte to skip
|
||||||
|
// over the existing versioned key set in 'gt'
|
||||||
|
return `${gt}\0`;
|
||||||
|
|
||||||
|
default:
|
||||||
|
return SKIP_NONE;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
skippingV0() {
|
||||||
|
return this.skippingBase();
|
||||||
|
}
|
||||||
|
|
||||||
|
skippingV1() {
|
||||||
|
const skipTo = this.skippingBase();
|
||||||
|
if (skipTo === SKIP_NONE) {
|
||||||
|
return SKIP_NONE;
|
||||||
|
}
|
||||||
|
// skip to the same object key in both M and V range listings
|
||||||
|
return [
|
||||||
|
`${DbPrefixes.Master}${skipTo}`,
|
||||||
|
`${DbPrefixes.Version}${skipTo}`,
|
||||||
|
];
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return an object containing all mandatory fields to use once the
|
||||||
|
* iteration is done, doesn't show a NextMarker field if the output
|
||||||
|
* isn't truncated
|
||||||
|
* @return {Object} - following amazon format
|
||||||
|
*/
|
||||||
|
result() {
|
||||||
|
// Add the last null key if still in cache (when it is the
|
||||||
|
// last version of the last key)
|
||||||
|
//
|
||||||
|
// NOTE: _reachedMaxKeys sets IsTruncated to true when it
|
||||||
|
// returns true. Here we want this because either:
|
||||||
|
//
|
||||||
|
// - we did not reach the max keys yet so the result is not
|
||||||
|
// - truncated, and there is still room for the null key in
|
||||||
|
// - the results
|
||||||
|
//
|
||||||
|
// - OR we reached it already while having to process a new
|
||||||
|
// key (so the result is truncated even without the null key)
|
||||||
|
//
|
||||||
|
// - OR we are *just* below the limit but the null key to add
|
||||||
|
// does not fit, so we know the result is now truncated
|
||||||
|
// because there remains the null key to be output.
|
||||||
|
//
|
||||||
|
if (this.nullKey) {
|
||||||
|
this.handleKey(this.nullKey.key, this.nullKey.versionId, this.nullKey.value);
|
||||||
|
}
|
||||||
|
const result: ResultObject = {
|
||||||
|
CommonPrefixes: this.CommonPrefixes,
|
||||||
|
Versions: this.Versions,
|
||||||
|
IsTruncated: this.IsTruncated,
|
||||||
|
};
|
||||||
|
if (this.delimiter) {
|
||||||
|
result.Delimiter = this.delimiter;
|
||||||
|
}
|
||||||
|
if (this.IsTruncated) {
|
||||||
|
result.NextKeyMarker = this.nextKeyMarker;
|
||||||
|
if (this.nextVersionIdMarker) {
|
||||||
|
result.NextVersionIdMarker = this.nextVersionIdMarker;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = { DelimiterVersions };
|
|
@ -0,0 +1,12 @@
|
||||||
|
module.exports = {
|
||||||
|
Basic: require('./basic').List,
|
||||||
|
Delimiter: require('./delimiter').Delimiter,
|
||||||
|
DelimiterVersions: require('./delimiterVersions')
|
||||||
|
.DelimiterVersions,
|
||||||
|
DelimiterMaster: require('./delimiterMaster')
|
||||||
|
.DelimiterMaster,
|
||||||
|
MPU: require('./MPU').MultipartUploads,
|
||||||
|
DelimiterCurrent: require('./delimiterCurrent').DelimiterCurrent,
|
||||||
|
DelimiterNonCurrent: require('./delimiterNonCurrent').DelimiterNonCurrent,
|
||||||
|
DelimiterOrphanDeleteMarker: require('./delimiterOrphanDeleteMarker').DelimiterOrphanDeleteMarker,
|
||||||
|
};
|
|
@ -0,0 +1,85 @@
|
||||||
|
const assert = require('assert');
|
||||||
|
|
||||||
|
const { FILTER_END, FILTER_SKIP, SKIP_NONE } = require('./tools');
|
||||||
|
|
||||||
|
|
||||||
|
const MAX_STREAK_LENGTH = 100;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle the filtering and the skip mechanism of a listing result.
|
||||||
|
*/
|
||||||
|
class Skip {
|
||||||
|
/**
|
||||||
|
* @param {Object} params - skip parameters
|
||||||
|
* @param {Object} params.extension - delimiter extension used (required)
|
||||||
|
* @param {String} params.gte - current range gte (greater than or
|
||||||
|
* equal) used by the client code
|
||||||
|
*/
|
||||||
|
constructor(params) {
|
||||||
|
assert(params.extension);
|
||||||
|
|
||||||
|
this.extension = params.extension;
|
||||||
|
this.gteParams = params.gte;
|
||||||
|
|
||||||
|
this.listingEndCb = null;
|
||||||
|
this.skipRangeCb = null;
|
||||||
|
|
||||||
|
/* Used to count consecutive FILTER_SKIP returned by the extension
|
||||||
|
* filter method. Once this counter reaches MAX_STREAK_LENGTH, the
|
||||||
|
* filter function tries to skip unwanted values by defining a new
|
||||||
|
* range. */
|
||||||
|
this.streakLength = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
setListingEndCb(cb) {
|
||||||
|
this.listingEndCb = cb;
|
||||||
|
}
|
||||||
|
|
||||||
|
setSkipRangeCb(cb) {
|
||||||
|
this.skipRangeCb = cb;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Filter an entry.
|
||||||
|
* @param {Object} entry - entry to filter.
|
||||||
|
* @return {undefined}
|
||||||
|
*
|
||||||
|
* This function calls the listing end or the skip range callbacks if
|
||||||
|
* needed.
|
||||||
|
*/
|
||||||
|
filter(entry) {
|
||||||
|
assert(this.listingEndCb);
|
||||||
|
assert(this.skipRangeCb);
|
||||||
|
|
||||||
|
const filteringResult = this.extension.filter(entry);
|
||||||
|
const skipTo = this.extension.skipping();
|
||||||
|
|
||||||
|
if (filteringResult === FILTER_END) {
|
||||||
|
this.listingEndCb();
|
||||||
|
} else if (filteringResult === FILTER_SKIP
|
||||||
|
&& skipTo !== SKIP_NONE) {
|
||||||
|
if (++this.streakLength >= MAX_STREAK_LENGTH) {
|
||||||
|
let newRange;
|
||||||
|
if (Array.isArray(skipTo)) {
|
||||||
|
newRange = [];
|
||||||
|
for (let i = 0; i < skipTo.length; ++i) {
|
||||||
|
newRange.push(skipTo[i]);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
newRange = skipTo;
|
||||||
|
}
|
||||||
|
/* Avoid to loop on the same range again and again. */
|
||||||
|
if (newRange === this.gteParams) {
|
||||||
|
this.streakLength = 1;
|
||||||
|
} else {
|
||||||
|
this.skipRangeCb(newRange);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
this.streakLength = 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
module.exports = Skip;
|
|
@ -1,3 +1,5 @@
|
||||||
|
const { DbPrefixes } = require('../../versioning/constants').VersioningConstants;
|
||||||
|
|
||||||
// constants for extensions
|
// constants for extensions
|
||||||
const SKIP_NONE = undefined; // to be inline with the values of NextMarker
|
const SKIP_NONE = undefined; // to be inline with the values of NextMarker
|
||||||
const FILTER_ACCEPT = 1;
|
const FILTER_ACCEPT = 1;
|
||||||
|
@ -31,9 +33,36 @@ function inc(str) {
|
||||||
String.fromCharCode(str.charCodeAt(str.length - 1) + 1)) : str;
|
String.fromCharCode(str.charCodeAt(str.length - 1) + 1)) : str;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Transform listing parameters for v0 versioning key format to make
|
||||||
|
* it compatible with v1 format
|
||||||
|
*
|
||||||
|
* @param {object} v0params - listing parameters for v0 format
|
||||||
|
* @return {object} - listing parameters for v1 format
|
||||||
|
*/
|
||||||
|
function listingParamsMasterKeysV0ToV1(v0params) {
|
||||||
|
const v1params = Object.assign({}, v0params);
|
||||||
|
if (v0params.gt !== undefined) {
|
||||||
|
v1params.gt = `${DbPrefixes.Master}${v0params.gt}`;
|
||||||
|
} else if (v0params.gte !== undefined) {
|
||||||
|
v1params.gte = `${DbPrefixes.Master}${v0params.gte}`;
|
||||||
|
} else {
|
||||||
|
v1params.gte = DbPrefixes.Master;
|
||||||
|
}
|
||||||
|
if (v0params.lt !== undefined) {
|
||||||
|
v1params.lt = `${DbPrefixes.Master}${v0params.lt}`;
|
||||||
|
} else if (v0params.lte !== undefined) {
|
||||||
|
v1params.lte = `${DbPrefixes.Master}${v0params.lte}`;
|
||||||
|
} else {
|
||||||
|
v1params.lt = inc(DbPrefixes.Master); // stop after the last master key
|
||||||
|
}
|
||||||
|
return v1params;
|
||||||
|
}
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
checkLimit,
|
checkLimit,
|
||||||
inc,
|
inc,
|
||||||
|
listingParamsMasterKeysV0ToV1,
|
||||||
SKIP_NONE,
|
SKIP_NONE,
|
||||||
FILTER_END,
|
FILTER_END,
|
||||||
FILTER_SKIP,
|
FILTER_SKIP,
|
||||||
|
|
|
@ -0,0 +1,87 @@
|
||||||
|
function indexOf(arr, value) {
|
||||||
|
if (!arr.length) {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
let lo = 0;
|
||||||
|
let hi = arr.length - 1;
|
||||||
|
|
||||||
|
while (hi - lo > 1) {
|
||||||
|
const i = lo + ((hi - lo) >> 1);
|
||||||
|
if (arr[i] > value) {
|
||||||
|
hi = i;
|
||||||
|
} else {
|
||||||
|
lo = i;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (arr[lo] === value) {
|
||||||
|
return lo;
|
||||||
|
}
|
||||||
|
if (arr[hi] === value) {
|
||||||
|
return hi;
|
||||||
|
}
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
function indexAtOrBelow(arr, value) {
|
||||||
|
let i;
|
||||||
|
let lo;
|
||||||
|
let hi;
|
||||||
|
|
||||||
|
if (!arr.length || arr[0] > value) {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
if (arr[arr.length - 1] <= value) {
|
||||||
|
return arr.length - 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
lo = 0;
|
||||||
|
hi = arr.length - 1;
|
||||||
|
|
||||||
|
while (hi - lo > 1) {
|
||||||
|
i = lo + ((hi - lo) >> 1);
|
||||||
|
if (arr[i] > value) {
|
||||||
|
hi = i;
|
||||||
|
} else {
|
||||||
|
lo = i;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return lo;
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* perform symmetric diff in O(m + n)
|
||||||
|
*/
|
||||||
|
function symDiff(k1, k2, v1, v2, cb) {
|
||||||
|
let i = 0;
|
||||||
|
let j = 0;
|
||||||
|
const n = k1.length;
|
||||||
|
const m = k2.length;
|
||||||
|
|
||||||
|
while (i < n && j < m) {
|
||||||
|
if (k1[i] < k2[j]) {
|
||||||
|
cb(v1[i]);
|
||||||
|
i++;
|
||||||
|
} else if (k2[j] < k1[i]) {
|
||||||
|
cb(v2[j]);
|
||||||
|
j++;
|
||||||
|
} else {
|
||||||
|
i++;
|
||||||
|
j++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
while (i < n) {
|
||||||
|
cb(v1[i]);
|
||||||
|
i++;
|
||||||
|
}
|
||||||
|
while (j < m) {
|
||||||
|
cb(v2[j]);
|
||||||
|
j++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
indexOf,
|
||||||
|
indexAtOrBelow,
|
||||||
|
symDiff,
|
||||||
|
};
|
|
@ -0,0 +1,51 @@
|
||||||
|
const ArrayUtils = require('./ArrayUtils');
|
||||||
|
|
||||||
|
class SortedSet {
|
||||||
|
constructor(obj) {
|
||||||
|
if (obj) {
|
||||||
|
this.keys = obj.keys;
|
||||||
|
this.values = obj.values;
|
||||||
|
} else {
|
||||||
|
this.clear();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
clear() {
|
||||||
|
this.keys = [];
|
||||||
|
this.values = [];
|
||||||
|
}
|
||||||
|
|
||||||
|
get size() {
|
||||||
|
return this.keys.length;
|
||||||
|
}
|
||||||
|
|
||||||
|
set(key, value) {
|
||||||
|
const index = ArrayUtils.indexAtOrBelow(this.keys, key);
|
||||||
|
if (this.keys[index] === key) {
|
||||||
|
this.values[index] = value;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
this.keys.splice(index + 1, 0, key);
|
||||||
|
this.values.splice(index + 1, 0, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
isSet(key) {
|
||||||
|
const index = ArrayUtils.indexOf(this.keys, key);
|
||||||
|
return index >= 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
get(key) {
|
||||||
|
const index = ArrayUtils.indexOf(this.keys, key);
|
||||||
|
return index >= 0 ? this.values[index] : undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
del(key) {
|
||||||
|
const index = ArrayUtils.indexOf(this.keys, key);
|
||||||
|
if (index >= 0) {
|
||||||
|
this.keys.splice(index, 1);
|
||||||
|
this.values.splice(index, 1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = SortedSet;
|
|
@ -0,0 +1,106 @@
|
||||||
|
const stream = require('stream');
|
||||||
|
|
||||||
|
class MergeStream extends stream.Readable {
|
||||||
|
constructor(stream1, stream2, compare) {
|
||||||
|
super({ objectMode: true });
|
||||||
|
|
||||||
|
this._compare = compare;
|
||||||
|
this._streams = [stream1, stream2];
|
||||||
|
|
||||||
|
// peekItems elements represent the latest item consumed from
|
||||||
|
// the respective input stream but not yet pushed. It can also
|
||||||
|
// be one of the following special values:
|
||||||
|
// - undefined: stream hasn't started emitting items
|
||||||
|
// - null: EOF reached and no more item to peek
|
||||||
|
this._peekItems = [undefined, undefined];
|
||||||
|
this._streamEof = [false, false];
|
||||||
|
this._streamToResume = null;
|
||||||
|
|
||||||
|
stream1.on('data', item => this._onItem(stream1, item, 0, 1));
|
||||||
|
stream1.once('end', () => this._onEnd(stream1, 0, 1));
|
||||||
|
stream1.once('error', err => this._onError(stream1, err, 0, 1));
|
||||||
|
|
||||||
|
stream2.on('data', item => this._onItem(stream2, item, 1, 0));
|
||||||
|
stream2.once('end', () => this._onEnd(stream2, 1, 0));
|
||||||
|
stream2.once('error', err => this._onError(stream2, err, 1, 0));
|
||||||
|
}
|
||||||
|
|
||||||
|
_read() {
|
||||||
|
if (this._streamToResume) {
|
||||||
|
this._streamToResume.resume();
|
||||||
|
this._streamToResume = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
_destroy(err, callback) {
|
||||||
|
for (let i = 0; i < 2; ++i) {
|
||||||
|
if (!this._streamEof[i]) {
|
||||||
|
this._streams[i].destroy();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
callback();
|
||||||
|
}
|
||||||
|
|
||||||
|
_onItem(myStream, myItem, myIndex, otherIndex) {
|
||||||
|
this._peekItems[myIndex] = myItem;
|
||||||
|
const otherItem = this._peekItems[otherIndex];
|
||||||
|
if (otherItem === undefined) {
|
||||||
|
// wait for the other stream to wake up
|
||||||
|
return myStream.pause();
|
||||||
|
}
|
||||||
|
if (otherItem === null || this._compare(myItem, otherItem) <= 0) {
|
||||||
|
if (!this.push(myItem)) {
|
||||||
|
myStream.pause();
|
||||||
|
this._streamToResume = myStream;
|
||||||
|
}
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
const otherStream = this._streams[otherIndex];
|
||||||
|
const otherMore = this.push(otherItem);
|
||||||
|
if (this._streamEof[otherIndex]) {
|
||||||
|
this._peekItems[otherIndex] = null;
|
||||||
|
return this.push(myItem);
|
||||||
|
}
|
||||||
|
myStream.pause();
|
||||||
|
if (otherMore) {
|
||||||
|
return otherStream.resume();
|
||||||
|
}
|
||||||
|
this._streamToResume = otherStream;
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
_onEnd(myStream, myIndex, otherIndex) {
|
||||||
|
this._streamEof[myIndex] = true;
|
||||||
|
if (this._peekItems[myIndex] === undefined) {
|
||||||
|
this._peekItems[myIndex] = null;
|
||||||
|
}
|
||||||
|
const myItem = this._peekItems[myIndex];
|
||||||
|
const otherItem = this._peekItems[otherIndex];
|
||||||
|
if (otherItem === undefined) {
|
||||||
|
// wait for the other stream to wake up
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
if (otherItem === null) {
|
||||||
|
return this.push(null);
|
||||||
|
}
|
||||||
|
if (myItem === null || this._compare(myItem, otherItem) <= 0) {
|
||||||
|
this.push(otherItem);
|
||||||
|
this._peekItems[myIndex] = null;
|
||||||
|
}
|
||||||
|
if (this._streamEof[otherIndex]) {
|
||||||
|
return this.push(null);
|
||||||
|
}
|
||||||
|
const otherStream = this._streams[otherIndex];
|
||||||
|
return otherStream.resume();
|
||||||
|
}
|
||||||
|
|
||||||
|
_onError(myStream, err, myIndex, otherIndex) {
|
||||||
|
myStream.destroy();
|
||||||
|
if (this._streams[otherIndex]) {
|
||||||
|
this._streams[otherIndex].destroy();
|
||||||
|
}
|
||||||
|
this.emit('error', err);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = MergeStream;
|
|
@ -1,6 +1,4 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
import * as constants from '../constants';
|
||||||
|
|
||||||
const constants = require('../constants');
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Class containing requester's information received from Vault
|
* Class containing requester's information received from Vault
|
||||||
|
@ -8,9 +6,15 @@ const constants = require('../constants');
|
||||||
* shortid, email, accountDisplayName and IAMdisplayName (if applicable)
|
* shortid, email, accountDisplayName and IAMdisplayName (if applicable)
|
||||||
* @return {AuthInfo} an AuthInfo instance
|
* @return {AuthInfo} an AuthInfo instance
|
||||||
*/
|
*/
|
||||||
|
export default class AuthInfo {
|
||||||
|
arn: string;
|
||||||
|
canonicalID: string;
|
||||||
|
shortid: string;
|
||||||
|
email: string;
|
||||||
|
accountDisplayName: string;
|
||||||
|
IAMdisplayName: string;
|
||||||
|
|
||||||
class AuthInfo {
|
constructor(objectFromVault: any) {
|
||||||
constructor(objectFromVault) {
|
|
||||||
// amazon resource name for IAM user (if applicable)
|
// amazon resource name for IAM user (if applicable)
|
||||||
this.arn = objectFromVault.arn;
|
this.arn = objectFromVault.arn;
|
||||||
// account canonicalID
|
// account canonicalID
|
||||||
|
@ -53,10 +57,8 @@ class AuthInfo {
|
||||||
return this.canonicalID.startsWith(
|
return this.canonicalID.startsWith(
|
||||||
`${constants.zenkoServiceAccount}/`);
|
`${constants.zenkoServiceAccount}/`);
|
||||||
}
|
}
|
||||||
isRequesterThisServiceAccount(serviceName) {
|
isRequesterThisServiceAccount(serviceName: string) {
|
||||||
return this.canonicalID ===
|
const computedCanonicalID = `${constants.zenkoServiceAccount}/${serviceName}`;
|
||||||
`${constants.zenkoServiceAccount}/${serviceName}`;
|
return this.canonicalID === computedCanonicalID;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = AuthInfo;
|
|
|
@ -1,16 +1,22 @@
|
||||||
const errors = require('../errors');
|
import { Logger } from 'werelogs';
|
||||||
const AuthInfo = require('./AuthInfo');
|
import errors from '../errors';
|
||||||
|
import AuthInfo from './AuthInfo';
|
||||||
|
|
||||||
/** vaultSignatureCb parses message from Vault and instantiates
|
/** vaultSignatureCb parses message from Vault and instantiates
|
||||||
* @param {object} err - error from vault
|
* @param err - error from vault
|
||||||
* @param {object} authInfo - info from vault
|
* @param authInfo - info from vault
|
||||||
* @param {object} log - log for request
|
* @param log - log for request
|
||||||
* @param {function} callback - callback to authCheck functions
|
* @param callback - callback to authCheck functions
|
||||||
* @param {object} [streamingV4Params] - present if v4 signature;
|
* @param [streamingV4Params] - present if v4 signature;
|
||||||
* items used to calculate signature on chunks if streaming auth
|
* items used to calculate signature on chunks if streaming auth
|
||||||
* @return {undefined}
|
|
||||||
*/
|
*/
|
||||||
function vaultSignatureCb(err, authInfo, log, callback, streamingV4Params) {
|
function vaultSignatureCb(
|
||||||
|
err: Error | null,
|
||||||
|
authInfo: { message: { body: any } },
|
||||||
|
log: Logger,
|
||||||
|
callback: (err: Error | null, data?: any, results?: any, params?: any, infos?: any) => void,
|
||||||
|
streamingV4Params?: any
|
||||||
|
) {
|
||||||
// vaultclient API guarantees that it returns:
|
// vaultclient API guarantees that it returns:
|
||||||
// - either `err`, an Error object with `code` and `message` properties set
|
// - either `err`, an Error object with `code` and `message` properties set
|
||||||
// - or `err == null` and `info` is an object with `message.code` and
|
// - or `err == null` and `info` is an object with `message.code` and
|
||||||
|
@ -24,52 +30,101 @@ function vaultSignatureCb(err, authInfo, log, callback, streamingV4Params) {
|
||||||
const info = authInfo.message.body;
|
const info = authInfo.message.body;
|
||||||
const userInfo = new AuthInfo(info.userInfo);
|
const userInfo = new AuthInfo(info.userInfo);
|
||||||
const authorizationResults = info.authorizationResults;
|
const authorizationResults = info.authorizationResults;
|
||||||
return callback(null, userInfo, authorizationResults, streamingV4Params);
|
const auditLog: { accountDisplayName: string, IAMdisplayName?: string } =
|
||||||
|
{ accountDisplayName: userInfo.getAccountDisplayName() };
|
||||||
|
const iamDisplayName = userInfo.getIAMdisplayName();
|
||||||
|
if (iamDisplayName) {
|
||||||
|
auditLog.IAMdisplayName = iamDisplayName;
|
||||||
|
}
|
||||||
|
// @ts-ignore
|
||||||
|
log.addDefaultFields(auditLog);
|
||||||
|
return callback(null, userInfo, authorizationResults, streamingV4Params, {
|
||||||
|
accountQuota: info.accountQuota || {},
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export type AuthV4RequestParams = {
|
||||||
|
version: 4;
|
||||||
|
log: Logger;
|
||||||
|
data: {
|
||||||
|
accessKey: string;
|
||||||
|
signatureFromRequest: string;
|
||||||
|
region: string;
|
||||||
|
stringToSign: string;
|
||||||
|
scopeDate: string;
|
||||||
|
authType: 'query' | 'header';
|
||||||
|
signatureVersion: string;
|
||||||
|
signatureAge?: number;
|
||||||
|
timestamp: number;
|
||||||
|
credentialScope: string;
|
||||||
|
securityToken: string;
|
||||||
|
algo: string;
|
||||||
|
log: Logger;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Class that provides common authentication methods against different
|
* Class that provides common authentication methods against different
|
||||||
* authentication backends.
|
* authentication backends.
|
||||||
* @class Vault
|
* @class Vault
|
||||||
*/
|
*/
|
||||||
class Vault {
|
export default class Vault {
|
||||||
|
client: any;
|
||||||
|
implName: string;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @constructor
|
* @constructor
|
||||||
* @param {object} client - authentication backend or vault client
|
* @param {object} client - authentication backend or vault client
|
||||||
* @param {string} implName - implementation name for auth backend
|
* @param {string} implName - implementation name for auth backend
|
||||||
*/
|
*/
|
||||||
constructor(client, implName) {
|
constructor(client: any, implName: string) {
|
||||||
this.client = client;
|
this.client = client;
|
||||||
this.implName = implName;
|
this.implName = implName;
|
||||||
}
|
}
|
||||||
/**
|
/**
|
||||||
* authenticateV2Request
|
* authenticateV2Request
|
||||||
*
|
*
|
||||||
* @param {string} params - the authentication parameters as returned by
|
* @param params - the authentication parameters as returned by
|
||||||
* auth.extractParams
|
* auth.extractParams
|
||||||
* @param {number} params.version - shall equal 2
|
* @param params.version - shall equal 2
|
||||||
* @param {string} params.data.accessKey - the user's accessKey
|
* @param params.data.accessKey - the user's accessKey
|
||||||
* @param {string} params.data.signatureFromRequest - the signature read
|
* @param params.data.signatureFromRequest - the signature read
|
||||||
* from the request
|
* from the request
|
||||||
* @param {string} params.data.stringToSign - the stringToSign
|
* @param params.data.stringToSign - the stringToSign
|
||||||
* @param {string} params.data.algo - the hashing algorithm used for the
|
* @param params.data.algo - the hashing algorithm used for the
|
||||||
* signature
|
* signature
|
||||||
* @param {string} params.data.authType - the type of authentication (query
|
* @param params.data.authType - the type of authentication (query
|
||||||
* or header)
|
* or header)
|
||||||
* @param {string} params.data.signatureVersion - the version of the
|
* @param params.data.signatureVersion - the version of the
|
||||||
* signature (AWS or AWS4)
|
* signature (AWS or AWS4)
|
||||||
* @param {number} [params.data.signatureAge] - the age of the signature in
|
* @param [params.data.signatureAge] - the age of the signature in
|
||||||
* ms
|
* ms
|
||||||
* @param {string} params.data.log - the logger object
|
* @param params.data.log - the logger object
|
||||||
* @param {RequestContext []} requestContexts - an array of RequestContext
|
* @param {RequestContext []} requestContexts - an array of RequestContext
|
||||||
* instances which contain information for policy authorization check
|
* instances which contain information for policy authorization check
|
||||||
* @param {function} callback - callback with either error or user info
|
* @param callback - callback with either error or user info
|
||||||
* @returns {undefined}
|
|
||||||
*/
|
*/
|
||||||
authenticateV2Request(params, requestContexts, callback) {
|
authenticateV2Request(
|
||||||
|
params: {
|
||||||
|
version: 2;
|
||||||
|
log: Logger;
|
||||||
|
data: {
|
||||||
|
securityToken: string;
|
||||||
|
accessKey: string;
|
||||||
|
signatureFromRequest: string;
|
||||||
|
stringToSign: string;
|
||||||
|
algo: string;
|
||||||
|
authType: 'query' | 'header';
|
||||||
|
signatureVersion: string;
|
||||||
|
signatureAge?: number;
|
||||||
|
log: Logger;
|
||||||
|
};
|
||||||
|
},
|
||||||
|
requestContexts: any[],
|
||||||
|
callback: (err: Error | null, data?: any) => void
|
||||||
|
) {
|
||||||
params.log.debug('authenticating V2 request');
|
params.log.debug('authenticating V2 request');
|
||||||
let serializedRCsArr;
|
let serializedRCsArr: any;
|
||||||
if (requestContexts) {
|
if (requestContexts) {
|
||||||
serializedRCsArr = requestContexts.map(rc => rc.serialize());
|
serializedRCsArr = requestContexts.map(rc => rc.serialize());
|
||||||
}
|
}
|
||||||
|
@ -79,44 +134,48 @@ class Vault {
|
||||||
params.data.accessKey,
|
params.data.accessKey,
|
||||||
{
|
{
|
||||||
algo: params.data.algo,
|
algo: params.data.algo,
|
||||||
|
// @ts-ignore
|
||||||
reqUid: params.log.getSerializedUids(),
|
reqUid: params.log.getSerializedUids(),
|
||||||
logger: params.log,
|
logger: params.log,
|
||||||
securityToken: params.data.securityToken,
|
securityToken: params.data.securityToken,
|
||||||
requestContext: serializedRCsArr,
|
requestContext: serializedRCsArr,
|
||||||
},
|
},
|
||||||
(err, userInfo) => vaultSignatureCb(err, userInfo,
|
(err: Error | null, userInfo?: any) => vaultSignatureCb(err, userInfo,
|
||||||
params.log, callback)
|
params.log, callback),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
/** authenticateV4Request
|
/** authenticateV4Request
|
||||||
* @param {object} params - the authentication parameters as returned by
|
* @param params - the authentication parameters as returned by
|
||||||
* auth.extractParams
|
* auth.extractParams
|
||||||
* @param {number} params.version - shall equal 4
|
* @param params.version - shall equal 4
|
||||||
* @param {string} params.data.log - the logger object
|
* @param params.data.log - the logger object
|
||||||
* @param {string} params.data.accessKey - the user's accessKey
|
* @param params.data.accessKey - the user's accessKey
|
||||||
* @param {string} params.data.signatureFromRequest - the signature read
|
* @param params.data.signatureFromRequest - the signature read
|
||||||
* from the request
|
* from the request
|
||||||
* @param {string} params.data.region - the AWS region
|
* @param params.data.region - the AWS region
|
||||||
* @param {string} params.data.stringToSign - the stringToSign
|
* @param params.data.stringToSign - the stringToSign
|
||||||
* @param {string} params.data.scopeDate - the timespan to allow the request
|
* @param params.data.scopeDate - the timespan to allow the request
|
||||||
* @param {string} params.data.authType - the type of authentication (query
|
* @param params.data.authType - the type of authentication (query
|
||||||
* or header)
|
* or header)
|
||||||
* @param {string} params.data.signatureVersion - the version of the
|
* @param params.data.signatureVersion - the version of the
|
||||||
* signature (AWS or AWS4)
|
* signature (AWS or AWS4)
|
||||||
* @param {number} params.data.signatureAge - the age of the signature in ms
|
* @param params.data.signatureAge - the age of the signature in ms
|
||||||
* @param {number} params.data.timestamp - signaure timestamp
|
* @param params.data.timestamp - signaure timestamp
|
||||||
* @param {string} params.credentialScope - credentialScope for signature
|
* @param params.credentialScope - credentialScope for signature
|
||||||
* @param {RequestContext [] | null} requestContexts -
|
* @param {RequestContext [] | null} requestContexts -
|
||||||
* an array of RequestContext or null if authenticaiton of a chunk
|
* an array of RequestContext or null if authenticaiton of a chunk
|
||||||
* in streamingv4 auth
|
* in streamingv4 auth
|
||||||
* instances which contain information for policy authorization check
|
* instances which contain information for policy authorization check
|
||||||
* @param {function} callback - callback with either error or user info
|
* @param callback - callback with either error or user info
|
||||||
* @return {undefined}
|
|
||||||
*/
|
*/
|
||||||
authenticateV4Request(params, requestContexts, callback) {
|
authenticateV4Request(
|
||||||
|
params: AuthV4RequestParams,
|
||||||
|
requestContexts: any[] | null,
|
||||||
|
callback: (err: Error | null, data?: any) => void
|
||||||
|
) {
|
||||||
params.log.debug('authenticating V4 request');
|
params.log.debug('authenticating V4 request');
|
||||||
let serializedRCs;
|
let serializedRCs: any;
|
||||||
if (requestContexts) {
|
if (requestContexts) {
|
||||||
serializedRCs = requestContexts.map(rc => rc.serialize());
|
serializedRCs = requestContexts.map(rc => rc.serialize());
|
||||||
}
|
}
|
||||||
|
@ -134,31 +193,39 @@ class Vault {
|
||||||
params.data.region,
|
params.data.region,
|
||||||
params.data.scopeDate,
|
params.data.scopeDate,
|
||||||
{
|
{
|
||||||
|
// @ts-ignore
|
||||||
reqUid: params.log.getSerializedUids(),
|
reqUid: params.log.getSerializedUids(),
|
||||||
logger: params.log,
|
logger: params.log,
|
||||||
securityToken: params.data.securityToken,
|
securityToken: params.data.securityToken,
|
||||||
requestContext: serializedRCs,
|
requestContext: serializedRCs,
|
||||||
},
|
},
|
||||||
(err, userInfo) => vaultSignatureCb(err, userInfo,
|
(err: Error | null, userInfo?: any) => vaultSignatureCb(err, userInfo,
|
||||||
params.log, callback, streamingV4Params)
|
params.log, callback, streamingV4Params),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
/** getCanonicalIds -- call Vault to get canonicalIDs based on email
|
/** getCanonicalIds -- call Vault to get canonicalIDs based on email
|
||||||
* addresses
|
* addresses
|
||||||
* @param {array} emailAddresses - list of emailAddresses
|
* @param emailAddresses - list of emailAddresses
|
||||||
* @param {object} log - log object
|
* @param log - log object
|
||||||
* @param {function} callback - callback with either error or an array
|
* @param callback - callback with either error or an array
|
||||||
* of objects with each object containing the canonicalID and emailAddress
|
* of objects with each object containing the canonicalID and emailAddress
|
||||||
* of an account as properties
|
* of an account as properties
|
||||||
* @return {undefined}
|
|
||||||
*/
|
*/
|
||||||
getCanonicalIds(emailAddresses, log, callback) {
|
getCanonicalIds(
|
||||||
|
emailAddresses: string[],
|
||||||
|
log: Logger,
|
||||||
|
callback: (
|
||||||
|
err: Error | null,
|
||||||
|
data?: { canonicalID: string; email: string }[]
|
||||||
|
) => void
|
||||||
|
) {
|
||||||
log.trace('getting canonicalIDs from Vault based on emailAddresses',
|
log.trace('getting canonicalIDs from Vault based on emailAddresses',
|
||||||
{ emailAddresses });
|
{ emailAddresses });
|
||||||
this.client.getCanonicalIds(emailAddresses,
|
this.client.getCanonicalIds(emailAddresses,
|
||||||
|
// @ts-ignore
|
||||||
{ reqUid: log.getSerializedUids() },
|
{ reqUid: log.getSerializedUids() },
|
||||||
(err, info) => {
|
(err: Error | null, info?: any) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
log.debug('received error message from auth provider',
|
log.debug('received error message from auth provider',
|
||||||
{ errorMessage: err });
|
{ errorMessage: err });
|
||||||
|
@ -166,17 +233,17 @@ class Vault {
|
||||||
}
|
}
|
||||||
const infoFromVault = info.message.body;
|
const infoFromVault = info.message.body;
|
||||||
log.trace('info received from vault', { infoFromVault });
|
log.trace('info received from vault', { infoFromVault });
|
||||||
const foundIds = [];
|
const foundIds: { canonicalID: string; email: string }[] = [];
|
||||||
for (let i = 0; i < Object.keys(infoFromVault).length; i++) {
|
for (let i = 0; i < Object.keys(infoFromVault).length; i++) {
|
||||||
const key = Object.keys(infoFromVault)[i];
|
const key = Object.keys(infoFromVault)[i];
|
||||||
if (infoFromVault[key] === 'WrongFormat'
|
if (infoFromVault[key] === 'WrongFormat'
|
||||||
|| infoFromVault[key] === 'NotFound') {
|
|| infoFromVault[key] === 'NotFound') {
|
||||||
return callback(errors.UnresolvableGrantByEmailAddress);
|
return callback(errors.UnresolvableGrantByEmailAddress);
|
||||||
}
|
}
|
||||||
const obj = {};
|
foundIds.push({
|
||||||
obj.email = key;
|
email: key,
|
||||||
obj.canonicalID = infoFromVault[key];
|
canonicalID: infoFromVault[key],
|
||||||
foundIds.push(obj);
|
})
|
||||||
}
|
}
|
||||||
return callback(null, foundIds);
|
return callback(null, foundIds);
|
||||||
});
|
});
|
||||||
|
@ -184,18 +251,22 @@ class Vault {
|
||||||
|
|
||||||
/** getEmailAddresses -- call Vault to get email addresses based on
|
/** getEmailAddresses -- call Vault to get email addresses based on
|
||||||
* canonicalIDs
|
* canonicalIDs
|
||||||
* @param {array} canonicalIDs - list of canonicalIDs
|
* @param canonicalIDs - list of canonicalIDs
|
||||||
* @param {object} log - log object
|
* @param log - log object
|
||||||
* @param {function} callback - callback with either error or an object
|
* @param callback - callback with either error or an object
|
||||||
* with canonicalID keys and email address values
|
* with canonicalID keys and email address values
|
||||||
* @return {undefined}
|
|
||||||
*/
|
*/
|
||||||
getEmailAddresses(canonicalIDs, log, callback) {
|
getEmailAddresses(
|
||||||
|
canonicalIDs: string[],
|
||||||
|
log: Logger,
|
||||||
|
callback: (err: Error | null, data?: { [key: string]: any }) => void
|
||||||
|
) {
|
||||||
log.trace('getting emailAddresses from Vault based on canonicalIDs',
|
log.trace('getting emailAddresses from Vault based on canonicalIDs',
|
||||||
{ canonicalIDs });
|
{ canonicalIDs });
|
||||||
this.client.getEmailAddresses(canonicalIDs,
|
this.client.getEmailAddresses(canonicalIDs,
|
||||||
|
// @ts-ignore
|
||||||
{ reqUid: log.getSerializedUids() },
|
{ reqUid: log.getSerializedUids() },
|
||||||
(err, info) => {
|
(err: Error | null, info?: any) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
log.debug('received error message from vault',
|
log.debug('received error message from vault',
|
||||||
{ errorMessage: err });
|
{ errorMessage: err });
|
||||||
|
@ -216,6 +287,44 @@ class Vault {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** getAccountIds -- call Vault to get accountIds based on
|
||||||
|
* canonicalIDs
|
||||||
|
* @param canonicalIDs - list of canonicalIDs
|
||||||
|
* @param log - log object
|
||||||
|
* @param callback - callback with either error or an object
|
||||||
|
* with canonicalID keys and accountId values
|
||||||
|
*/
|
||||||
|
getAccountIds(
|
||||||
|
canonicalIDs: string[],
|
||||||
|
log: Logger,
|
||||||
|
callback: (err: Error | null, data?: { [key: string]: string }) => void
|
||||||
|
) {
|
||||||
|
log.trace('getting accountIds from Vault based on canonicalIDs',
|
||||||
|
{ canonicalIDs });
|
||||||
|
this.client.getAccountIds(canonicalIDs,
|
||||||
|
// @ts-expect-error
|
||||||
|
{ reqUid: log.getSerializedUids() },
|
||||||
|
(err: Error | null, info?: any) => {
|
||||||
|
if (err) {
|
||||||
|
log.debug('received error message from vault',
|
||||||
|
{ errorMessage: err });
|
||||||
|
return callback(err);
|
||||||
|
}
|
||||||
|
const infoFromVault = info.message.body;
|
||||||
|
log.trace('info received from vault', { infoFromVault });
|
||||||
|
const result = {};
|
||||||
|
/* If the accountId was not found in Vault, do not
|
||||||
|
send the canonicalID back to the API */
|
||||||
|
Object.keys(infoFromVault).forEach(key => {
|
||||||
|
if (infoFromVault[key] !== 'NotFound' &&
|
||||||
|
infoFromVault[key] !== 'WrongFormat') {
|
||||||
|
result[key] = infoFromVault[key];
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return callback(null, result);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
/** checkPolicies -- call Vault to evaluate policies
|
/** checkPolicies -- call Vault to evaluate policies
|
||||||
* @param {object} requestContextParams - parameters needed to construct
|
* @param {object} requestContextParams - parameters needed to construct
|
||||||
* requestContext in Vault
|
* requestContext in Vault
|
||||||
|
@ -228,14 +337,19 @@ class Vault {
|
||||||
* @param {object} log - log object
|
* @param {object} log - log object
|
||||||
* @param {function} callback - callback with either error or an array
|
* @param {function} callback - callback with either error or an array
|
||||||
* of authorization results
|
* of authorization results
|
||||||
* @return {undefined}
|
|
||||||
*/
|
*/
|
||||||
checkPolicies(requestContextParams, userArn, log, callback) {
|
checkPolicies(
|
||||||
|
requestContextParams: any[],
|
||||||
|
userArn: string,
|
||||||
|
log: Logger,
|
||||||
|
callback: (err: Error | null, data?: any[]) => void
|
||||||
|
) {
|
||||||
log.trace('sending request context params to vault to evaluate' +
|
log.trace('sending request context params to vault to evaluate' +
|
||||||
'policies');
|
'policies');
|
||||||
this.client.checkPolicies(requestContextParams, userArn, {
|
this.client.checkPolicies(requestContextParams, userArn, {
|
||||||
|
// @ts-ignore
|
||||||
reqUid: log.getSerializedUids(),
|
reqUid: log.getSerializedUids(),
|
||||||
}, (err, info) => {
|
}, (err: Error | null, info?: any) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
log.debug('received error message from auth provider',
|
log.debug('received error message from auth provider',
|
||||||
{ error: err });
|
{ error: err });
|
||||||
|
@ -246,13 +360,14 @@ class Vault {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
checkHealth(log, callback) {
|
checkHealth(log: Logger, callback: (err: Error | null, data?: any) => void) {
|
||||||
if (!this.client.healthcheck) {
|
if (!this.client.healthcheck) {
|
||||||
const defResp = {};
|
const defResp = {};
|
||||||
defResp[this.implName] = { code: 200, message: 'OK' };
|
defResp[this.implName] = { code: 200, message: 'OK' };
|
||||||
return callback(null, defResp);
|
return callback(null, defResp);
|
||||||
}
|
}
|
||||||
return this.client.healthcheck(log.getSerializedUids(), (err, obj) => {
|
// @ts-ignore
|
||||||
|
return this.client.healthcheck(log.getSerializedUids(), (err: Error | null, obj?: any) => {
|
||||||
const respBody = {};
|
const respBody = {};
|
||||||
if (err) {
|
if (err) {
|
||||||
log.debug(`error from ${this.implName}`, { error: err });
|
log.debug(`error from ${this.implName}`, { error: err });
|
||||||
|
@ -271,6 +386,19 @@ class Vault {
|
||||||
return callback(null, respBody);
|
return callback(null, respBody);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = Vault;
|
report(log: Logger, callback: (err: Error | null, data?: any) => void) {
|
||||||
|
// call the report function of the client
|
||||||
|
if (!this.client.report) {
|
||||||
|
return callback(null, {});
|
||||||
|
}
|
||||||
|
// @ts-ignore
|
||||||
|
return this.client.report(log.getSerializedUids(), (err: Error | null, obj?: any) => {
|
||||||
|
if (err) {
|
||||||
|
log.debug(`error from ${this.implName}`, { error: err });
|
||||||
|
return callback(err);
|
||||||
|
}
|
||||||
|
return callback(null, obj);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
223
lib/auth/auth.js
223
lib/auth/auth.js
|
@ -1,223 +0,0 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
|
||||||
|
|
||||||
const crypto = require('crypto');
|
|
||||||
const errors = require('../errors');
|
|
||||||
const queryString = require('querystring');
|
|
||||||
const AuthInfo = require('./AuthInfo');
|
|
||||||
const v2 = require('./v2/authV2');
|
|
||||||
const v4 = require('./v4/authV4');
|
|
||||||
const constants = require('../constants');
|
|
||||||
const constructStringToSignV4 = require('./v4/constructStringToSign');
|
|
||||||
const convertUTCtoISO8601 = require('./v4/timeUtils').convertUTCtoISO8601;
|
|
||||||
const vaultUtilities = require('./in_memory/vaultUtilities');
|
|
||||||
const backend = require('./in_memory/Backend');
|
|
||||||
const validateAuthConfig = require('./in_memory/validateAuthConfig');
|
|
||||||
const AuthLoader = require('./in_memory/AuthLoader');
|
|
||||||
const Vault = require('./Vault');
|
|
||||||
|
|
||||||
let vault = null;
|
|
||||||
const auth = {};
|
|
||||||
const checkFunctions = {
|
|
||||||
v2: {
|
|
||||||
headers: v2.header.check,
|
|
||||||
query: v2.query.check,
|
|
||||||
},
|
|
||||||
v4: {
|
|
||||||
headers: v4.header.check,
|
|
||||||
query: v4.query.check,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
// If no auth information is provided in request, then user is part of
|
|
||||||
// 'All Users Group' so use this group as the canonicalID for the publicUser
|
|
||||||
const publicUserInfo = new AuthInfo({ canonicalID: constants.publicId });
|
|
||||||
|
|
||||||
function setAuthHandler(handler) {
|
|
||||||
vault = handler;
|
|
||||||
return auth;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This function will check validity of request parameters to authenticate
|
|
||||||
*
|
|
||||||
* @param {Http.Request} request - Http request object
|
|
||||||
* @param {object} log - Logger object
|
|
||||||
* @param {string} awsService - Aws service related
|
|
||||||
* @param {object} data - Parameters from queryString parsing or body of
|
|
||||||
* POST request
|
|
||||||
*
|
|
||||||
* @return {object} ret
|
|
||||||
* @return {object} ret.err - arsenal.errors object if any error was found
|
|
||||||
* @return {object} ret.params - auth parameters to use later on for signature
|
|
||||||
* computation and check
|
|
||||||
* @return {object} ret.params.version - the auth scheme version
|
|
||||||
* (undefined, 2, 4)
|
|
||||||
* @return {object} ret.params.data - the auth scheme's specific data
|
|
||||||
*/
|
|
||||||
function extractParams(request, log, awsService, data) {
|
|
||||||
log.trace('entered', { method: 'Arsenal.auth.server.extractParams' });
|
|
||||||
const authHeader = request.headers.authorization;
|
|
||||||
let version = null;
|
|
||||||
let method = null;
|
|
||||||
|
|
||||||
// Identify auth version and method to dispatch to the right check function
|
|
||||||
if (authHeader) {
|
|
||||||
method = 'headers';
|
|
||||||
// TODO: Check for security token header to handle temporary security
|
|
||||||
// credentials
|
|
||||||
if (authHeader.startsWith('AWS ')) {
|
|
||||||
version = 'v2';
|
|
||||||
} else if (authHeader.startsWith('AWS4')) {
|
|
||||||
version = 'v4';
|
|
||||||
} else {
|
|
||||||
log.trace('invalid authorization security header',
|
|
||||||
{ header: authHeader });
|
|
||||||
return { err: errors.AccessDenied };
|
|
||||||
}
|
|
||||||
} else if (data.Signature) {
|
|
||||||
method = 'query';
|
|
||||||
version = 'v2';
|
|
||||||
} else if (data['X-Amz-Algorithm']) {
|
|
||||||
method = 'query';
|
|
||||||
version = 'v4';
|
|
||||||
}
|
|
||||||
|
|
||||||
// Here, either both values are set, or none is set
|
|
||||||
if (version !== null && method !== null) {
|
|
||||||
if (!checkFunctions[version] || !checkFunctions[version][method]) {
|
|
||||||
log.trace('invalid auth version or method',
|
|
||||||
{ version, authMethod: method });
|
|
||||||
return { err: errors.NotImplemented };
|
|
||||||
}
|
|
||||||
log.trace('identified auth method', { version, authMethod: method });
|
|
||||||
return checkFunctions[version][method](request, log, data, awsService);
|
|
||||||
}
|
|
||||||
|
|
||||||
// no auth info identified
|
|
||||||
log.debug('assuming public user');
|
|
||||||
return { err: null, params: publicUserInfo };
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This function will check validity of request parameters to authenticate
|
|
||||||
*
|
|
||||||
* @param {Http.Request} request - Http request object
|
|
||||||
* @param {object} log - Logger object
|
|
||||||
* @param {function} cb - the callback
|
|
||||||
* @param {string} awsService - Aws service related
|
|
||||||
* @param {RequestContext[] | null} requestContexts - array of RequestContext
|
|
||||||
* or null if no requestContexts to be sent to Vault (for instance,
|
|
||||||
* in multi-object delete request)
|
|
||||||
* @return {undefined}
|
|
||||||
*/
|
|
||||||
function doAuth(request, log, cb, awsService, requestContexts) {
|
|
||||||
const res = extractParams(request, log, awsService, request.query);
|
|
||||||
if (res.err) {
|
|
||||||
return cb(res.err);
|
|
||||||
} else if (res.params instanceof AuthInfo) {
|
|
||||||
return cb(null, res.params);
|
|
||||||
}
|
|
||||||
if (requestContexts) {
|
|
||||||
requestContexts.forEach(requestContext => {
|
|
||||||
requestContext.setAuthType(res.params.data.authType);
|
|
||||||
requestContext.setSignatureVersion(res.params
|
|
||||||
.data.signatureVersion);
|
|
||||||
requestContext.setSignatureAge(res.params.data.signatureAge);
|
|
||||||
requestContext.setSecurityToken(res.params.data.securityToken);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Corner cases managed, we're left with normal auth
|
|
||||||
res.params.log = log;
|
|
||||||
if (res.params.version === 2) {
|
|
||||||
return vault.authenticateV2Request(res.params, requestContexts, cb);
|
|
||||||
}
|
|
||||||
if (res.params.version === 4) {
|
|
||||||
return vault.authenticateV4Request(res.params, requestContexts, cb,
|
|
||||||
awsService);
|
|
||||||
}
|
|
||||||
|
|
||||||
log.error('authentication method not found', {
|
|
||||||
method: 'Arsenal.auth.doAuth',
|
|
||||||
});
|
|
||||||
return cb(errors.InternalError);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This function will generate a version 4 header
|
|
||||||
*
|
|
||||||
* @param {Http.Request} request - Http request object
|
|
||||||
* @param {object} data - Parameters from queryString parsing or body of
|
|
||||||
* POST request
|
|
||||||
* @param {string} accessKey - the accessKey
|
|
||||||
* @param {string} secretKeyValue - the secretKey
|
|
||||||
* @param {string} awsService - Aws service related
|
|
||||||
* @param {sting} [proxyPath] - path that gets proxied by reverse proxy
|
|
||||||
* @return {undefined}
|
|
||||||
*/
|
|
||||||
function generateV4Headers(request, data, accessKey, secretKeyValue,
|
|
||||||
awsService, proxyPath) {
|
|
||||||
Object.assign(request, { headers: {} });
|
|
||||||
const amzDate = convertUTCtoISO8601(Date.now());
|
|
||||||
// get date without time
|
|
||||||
const scopeDate = amzDate.slice(0, amzDate.indexOf('T'));
|
|
||||||
const region = 'us-east-1';
|
|
||||||
const service = awsService || 'iam';
|
|
||||||
const credentialScope =
|
|
||||||
`${scopeDate}/${region}/${service}/aws4_request`;
|
|
||||||
const timestamp = amzDate;
|
|
||||||
const algorithm = 'AWS4-HMAC-SHA256';
|
|
||||||
|
|
||||||
let payload = '';
|
|
||||||
if (request.method === 'POST') {
|
|
||||||
payload = queryString.stringify(data, null, null, {
|
|
||||||
encodeURIComponent,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
const payloadChecksum = crypto.createHash('sha256')
|
|
||||||
.update(payload, 'binary').digest('hex');
|
|
||||||
request.setHeader('host', request._headers.host);
|
|
||||||
request.setHeader('x-amz-date', amzDate);
|
|
||||||
request.setHeader('x-amz-content-sha256', payloadChecksum);
|
|
||||||
Object.assign(request.headers, request._headers);
|
|
||||||
const signedHeaders = Object.keys(request._headers)
|
|
||||||
.filter(headerName =>
|
|
||||||
headerName.startsWith('x-amz-')
|
|
||||||
|| headerName.startsWith('x-scal-')
|
|
||||||
|| headerName === 'host'
|
|
||||||
).sort().join(';');
|
|
||||||
const params = { request, signedHeaders, payloadChecksum,
|
|
||||||
credentialScope, timestamp, query: data,
|
|
||||||
awsService: service, proxyPath };
|
|
||||||
const stringToSign = constructStringToSignV4(params);
|
|
||||||
const signingKey = vaultUtilities.calculateSigningKey(secretKeyValue,
|
|
||||||
region,
|
|
||||||
scopeDate,
|
|
||||||
service);
|
|
||||||
const signature = crypto.createHmac('sha256', signingKey)
|
|
||||||
.update(stringToSign, 'binary').digest('hex');
|
|
||||||
const authorizationHeader = `${algorithm} Credential=${accessKey}` +
|
|
||||||
`/${credentialScope}, SignedHeaders=${signedHeaders}, ` +
|
|
||||||
`Signature=${signature}`;
|
|
||||||
request.setHeader('authorization', authorizationHeader);
|
|
||||||
Object.assign(request, { headers: {} });
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
setHandler: setAuthHandler,
|
|
||||||
server: {
|
|
||||||
extractParams,
|
|
||||||
doAuth,
|
|
||||||
},
|
|
||||||
client: {
|
|
||||||
generateV4Headers,
|
|
||||||
},
|
|
||||||
inMemory: {
|
|
||||||
backend,
|
|
||||||
validateAuthConfig,
|
|
||||||
AuthLoader,
|
|
||||||
},
|
|
||||||
AuthInfo,
|
|
||||||
Vault,
|
|
||||||
};
|
|
|
@ -0,0 +1,265 @@
|
||||||
|
import * as crypto from 'crypto';
|
||||||
|
import { Logger } from 'werelogs';
|
||||||
|
import errors from '../errors';
|
||||||
|
import * as queryString from 'querystring';
|
||||||
|
import AuthInfo from './AuthInfo';
|
||||||
|
import * as v2 from './v2/authV2';
|
||||||
|
import * as v4 from './v4/authV4';
|
||||||
|
import * as constants from '../constants';
|
||||||
|
import constructStringToSignV2 from './v2/constructStringToSign';
|
||||||
|
import constructStringToSignV4 from './v4/constructStringToSign';
|
||||||
|
import { convertUTCtoISO8601 } from './v4/timeUtils';
|
||||||
|
import * as vaultUtilities from './backends/in_memory/vaultUtilities';
|
||||||
|
import * as inMemoryBackend from './backends/in_memory/Backend';
|
||||||
|
import baseBackend from './backends/base';
|
||||||
|
import chainBackend from './backends/ChainBackend';
|
||||||
|
import validateAuthConfig from './backends/in_memory/validateAuthConfig';
|
||||||
|
import AuthLoader from './backends/in_memory/AuthLoader';
|
||||||
|
import Vault from './Vault';
|
||||||
|
|
||||||
|
let vault: Vault | null = null;
|
||||||
|
const auth = {};
|
||||||
|
const checkFunctions = {
|
||||||
|
v2: {
|
||||||
|
headers: v2.header.check,
|
||||||
|
query: v2.query.check,
|
||||||
|
},
|
||||||
|
v4: {
|
||||||
|
headers: v4.header.check,
|
||||||
|
query: v4.query.check,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
// If no auth information is provided in request, then user is part of
|
||||||
|
// 'All Users Group' so use this group as the canonicalID for the publicUser
|
||||||
|
const publicUserInfo = new AuthInfo({ canonicalID: constants.publicId });
|
||||||
|
|
||||||
|
function setAuthHandler(handler: Vault) {
|
||||||
|
vault = handler;
|
||||||
|
return auth;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This function will check validity of request parameters to authenticate
|
||||||
|
*
|
||||||
|
* @param request - Http request object
|
||||||
|
* @param log - Logger object
|
||||||
|
* @param awsService - Aws service related
|
||||||
|
* @param data - Parameters from queryString parsing or body of
|
||||||
|
* POST request
|
||||||
|
*
|
||||||
|
* @return ret
|
||||||
|
* @return ret.err - arsenal.errors object if any error was found
|
||||||
|
* @return ret.params - auth parameters to use later on for signature
|
||||||
|
* computation and check
|
||||||
|
* @return ret.params.version - the auth scheme version
|
||||||
|
* (undefined, 2, 4)
|
||||||
|
* @return ret.params.data - the auth scheme's specific data
|
||||||
|
*/
|
||||||
|
function extractParams(
|
||||||
|
request: any,
|
||||||
|
log: Logger,
|
||||||
|
awsService: string,
|
||||||
|
data: { [key: string]: string }
|
||||||
|
) {
|
||||||
|
log.trace('entered', { method: 'Arsenal.auth.server.extractParams' });
|
||||||
|
const authHeader = request.headers.authorization;
|
||||||
|
let version: 'v2' |'v4' | null = null;
|
||||||
|
let method: 'query' | 'headers' | null = null;
|
||||||
|
|
||||||
|
// Identify auth version and method to dispatch to the right check function
|
||||||
|
if (authHeader) {
|
||||||
|
method = 'headers';
|
||||||
|
// TODO: Check for security token header to handle temporary security
|
||||||
|
// credentials
|
||||||
|
if (authHeader.startsWith('AWS ')) {
|
||||||
|
version = 'v2';
|
||||||
|
} else if (authHeader.startsWith('AWS4')) {
|
||||||
|
version = 'v4';
|
||||||
|
} else {
|
||||||
|
log.trace('invalid authorization security header',
|
||||||
|
{ header: authHeader });
|
||||||
|
return { err: errors.AccessDenied };
|
||||||
|
}
|
||||||
|
} else if (data.Signature) {
|
||||||
|
method = 'query';
|
||||||
|
version = 'v2';
|
||||||
|
} else if (data['X-Amz-Algorithm']) {
|
||||||
|
method = 'query';
|
||||||
|
version = 'v4';
|
||||||
|
}
|
||||||
|
|
||||||
|
// Here, either both values are set, or none is set
|
||||||
|
if (version !== null && method !== null) {
|
||||||
|
if (!checkFunctions[version] || !checkFunctions[version][method]) {
|
||||||
|
log.trace('invalid auth version or method',
|
||||||
|
{ version, authMethod: method });
|
||||||
|
return { err: errors.NotImplemented };
|
||||||
|
}
|
||||||
|
log.trace('identified auth method', { version, authMethod: method });
|
||||||
|
return checkFunctions[version][method](request, log, data, awsService);
|
||||||
|
}
|
||||||
|
|
||||||
|
// no auth info identified
|
||||||
|
log.debug('assuming public user');
|
||||||
|
return { err: null, params: publicUserInfo };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This function will check validity of request parameters to authenticate
|
||||||
|
*
|
||||||
|
* @param request - Http request object
|
||||||
|
* @param log - Logger object
|
||||||
|
* @param cb - the callback
|
||||||
|
* @param awsService - Aws service related
|
||||||
|
* @param {RequestContext[] | null} requestContexts - array of RequestContext
|
||||||
|
* or null if no requestContexts to be sent to Vault (for instance,
|
||||||
|
* in multi-object delete request)
|
||||||
|
*/
|
||||||
|
function doAuth(
|
||||||
|
request: any,
|
||||||
|
log: Logger,
|
||||||
|
cb: (err: Error | null, data?: any) => void,
|
||||||
|
awsService: string,
|
||||||
|
requestContexts: any[] | null
|
||||||
|
) {
|
||||||
|
const res = extractParams(request, log, awsService, request.query);
|
||||||
|
if (res.err) {
|
||||||
|
return cb(res.err);
|
||||||
|
} else if (res.params instanceof AuthInfo) {
|
||||||
|
return cb(null, res.params);
|
||||||
|
}
|
||||||
|
if (requestContexts) {
|
||||||
|
requestContexts.forEach((requestContext) => {
|
||||||
|
const { params } = res
|
||||||
|
if ('data' in params) {
|
||||||
|
const { data } = params
|
||||||
|
requestContext.setAuthType(data.authType);
|
||||||
|
requestContext.setSignatureVersion(data.signatureVersion);
|
||||||
|
requestContext.setSecurityToken(data.securityToken);
|
||||||
|
if ('signatureAge' in data) {
|
||||||
|
requestContext.setSignatureAge(data.signatureAge);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Corner cases managed, we're left with normal auth
|
||||||
|
// TODO What's happening here?
|
||||||
|
// @ts-ignore
|
||||||
|
res.params.log = log;
|
||||||
|
if (res.params.version === 2) {
|
||||||
|
// @ts-ignore
|
||||||
|
return vault!.authenticateV2Request(res.params, requestContexts, cb);
|
||||||
|
}
|
||||||
|
if (res.params.version === 4) {
|
||||||
|
// @ts-ignore
|
||||||
|
return vault!.authenticateV4Request(res.params, requestContexts, cb);
|
||||||
|
}
|
||||||
|
|
||||||
|
log.error('authentication method not found', {
|
||||||
|
method: 'Arsenal.auth.doAuth',
|
||||||
|
});
|
||||||
|
return cb(errors.InternalError);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This function will generate a version 4 content-md5 header
|
||||||
|
* It looks at the request path to determine what kind of header encoding is required
|
||||||
|
*
|
||||||
|
* @param path - the request path
|
||||||
|
* @param payload - the request payload to hash
|
||||||
|
*/
|
||||||
|
function generateContentMD5Header(
|
||||||
|
path: string,
|
||||||
|
payload: string,
|
||||||
|
) {
|
||||||
|
const encoding = path && path.startsWith('/_/backbeat/') ? 'hex' : 'base64';
|
||||||
|
return crypto.createHash('md5').update(payload, 'binary').digest(encoding);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* This function will generate a version 4 header
|
||||||
|
*
|
||||||
|
* @param request - Http request object
|
||||||
|
* @param data - Parameters from queryString parsing or body of
|
||||||
|
* POST request
|
||||||
|
* @param accessKey - the accessKey
|
||||||
|
* @param secretKeyValue - the secretKey
|
||||||
|
* @param awsService - Aws service related
|
||||||
|
* @param [proxyPath] - path that gets proxied by reverse proxy
|
||||||
|
* @param [sessionToken] - security token if the access/secret keys
|
||||||
|
* are temporary credentials from STS
|
||||||
|
* @param [payload] - body of the request if any
|
||||||
|
*/
|
||||||
|
function generateV4Headers(
|
||||||
|
request: any,
|
||||||
|
data: { [key: string]: string },
|
||||||
|
accessKey: string,
|
||||||
|
secretKeyValue: string,
|
||||||
|
awsService: string,
|
||||||
|
proxyPath?: string,
|
||||||
|
sessionToken?: string,
|
||||||
|
payload?: string,
|
||||||
|
) {
|
||||||
|
Object.assign(request, { headers: {} });
|
||||||
|
const amzDate = convertUTCtoISO8601(Date.now());
|
||||||
|
// get date without time
|
||||||
|
const scopeDate = amzDate.slice(0, amzDate.indexOf('T'));
|
||||||
|
const region = 'us-east-1';
|
||||||
|
const service = awsService || 'iam';
|
||||||
|
const credentialScope =
|
||||||
|
`${scopeDate}/${region}/${service}/aws4_request`;
|
||||||
|
const timestamp = amzDate;
|
||||||
|
const algorithm = 'AWS4-HMAC-SHA256';
|
||||||
|
|
||||||
|
payload = payload || '';
|
||||||
|
if (request.method === 'POST') {
|
||||||
|
payload = queryString.stringify(data, undefined, undefined, {
|
||||||
|
encodeURIComponent,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
const payloadChecksum = crypto.createHash('sha256')
|
||||||
|
.update(payload, 'binary').digest('hex');
|
||||||
|
request.setHeader('host', request._headers.host);
|
||||||
|
request.setHeader('x-amz-date', amzDate);
|
||||||
|
request.setHeader('x-amz-content-sha256', payloadChecksum);
|
||||||
|
request.setHeader('content-md5', generateContentMD5Header(request.path, payload));
|
||||||
|
|
||||||
|
if (sessionToken) {
|
||||||
|
request.setHeader('x-amz-security-token', sessionToken);
|
||||||
|
}
|
||||||
|
|
||||||
|
Object.assign(request.headers, request._headers);
|
||||||
|
const signedHeaders = Object.keys(request._headers)
|
||||||
|
.filter(headerName =>
|
||||||
|
headerName.startsWith('x-amz-')
|
||||||
|
|| headerName.startsWith('x-scal-')
|
||||||
|
|| headerName === 'content-md5'
|
||||||
|
|| headerName === 'host',
|
||||||
|
).sort().join(';');
|
||||||
|
const params = { request, signedHeaders, payloadChecksum,
|
||||||
|
credentialScope, timestamp, query: data,
|
||||||
|
awsService: service, proxyPath };
|
||||||
|
const stringToSign = constructStringToSignV4(params);
|
||||||
|
const signingKey = vaultUtilities.calculateSigningKey(secretKeyValue,
|
||||||
|
region,
|
||||||
|
scopeDate,
|
||||||
|
service);
|
||||||
|
const signature = crypto.createHmac('sha256', signingKey)
|
||||||
|
.update(stringToSign as string, 'binary').digest('hex');
|
||||||
|
const authorizationHeader = `${algorithm} Credential=${accessKey}` +
|
||||||
|
`/${credentialScope}, SignedHeaders=${signedHeaders}, ` +
|
||||||
|
`Signature=${signature}`;
|
||||||
|
request.setHeader('authorization', authorizationHeader);
|
||||||
|
Object.assign(request, { headers: {} });
|
||||||
|
}
|
||||||
|
|
||||||
|
export const server = { extractParams, doAuth }
|
||||||
|
export const client = { generateV4Headers, constructStringToSignV2 }
|
||||||
|
export const inMemory = { backend: inMemoryBackend, validateAuthConfig, AuthLoader }
|
||||||
|
export const backends = { baseBackend, chainBackend }
|
||||||
|
export {
|
||||||
|
setAuthHandler as setHandler,
|
||||||
|
AuthInfo,
|
||||||
|
Vault
|
||||||
|
}
|
|
@ -0,0 +1,233 @@
|
||||||
|
import assert from 'assert';
|
||||||
|
import async from 'async';
|
||||||
|
import errors from '../../errors';
|
||||||
|
import BaseBackend from './base';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Class that provides an authentication backend that will verify signatures
|
||||||
|
* and retrieve emails and canonical ids associated with an account using a
|
||||||
|
* given list of authentication backends and vault clients.
|
||||||
|
*
|
||||||
|
* @class ChainBackend
|
||||||
|
*/
|
||||||
|
export default class ChainBackend extends BaseBackend {
|
||||||
|
_clients: any[];
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @constructor
|
||||||
|
* @param {string} service - service id
|
||||||
|
* @param {object[]} clients - list of authentication backends or vault clients
|
||||||
|
*/
|
||||||
|
constructor(service: string, clients: any[]) {
|
||||||
|
super(service);
|
||||||
|
|
||||||
|
assert(Array.isArray(clients) && clients.length > 0, 'invalid client list');
|
||||||
|
assert(clients.every(client =>
|
||||||
|
typeof client.verifySignatureV4 === 'function' &&
|
||||||
|
typeof client.verifySignatureV2 === 'function' &&
|
||||||
|
typeof client.getCanonicalIds === 'function' &&
|
||||||
|
typeof client.getEmailAddresses === 'function' &&
|
||||||
|
typeof client.checkPolicies === 'function' &&
|
||||||
|
typeof client.healthcheck === 'function',
|
||||||
|
), 'invalid client: missing required auth backend methods');
|
||||||
|
this._clients = clients;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/*
|
||||||
|
* try task against each client for one to be successful
|
||||||
|
*/
|
||||||
|
_tryEachClient(task: any, cb: any) {
|
||||||
|
// @ts-ignore
|
||||||
|
async.tryEach(this._clients.map(client => done => task(client, done)), cb);
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* apply task to all clients
|
||||||
|
*/
|
||||||
|
_forEachClient(task: any, cb: any) {
|
||||||
|
async.map(this._clients, task, cb);
|
||||||
|
}
|
||||||
|
|
||||||
|
verifySignatureV2(
|
||||||
|
stringToSign: string,
|
||||||
|
signatureFromRequest: string,
|
||||||
|
accessKey: string,
|
||||||
|
options: any,
|
||||||
|
callback: any,
|
||||||
|
) {
|
||||||
|
this._tryEachClient((client, done) => client.verifySignatureV2(
|
||||||
|
stringToSign,
|
||||||
|
signatureFromRequest,
|
||||||
|
accessKey,
|
||||||
|
options,
|
||||||
|
done,
|
||||||
|
), callback);
|
||||||
|
}
|
||||||
|
|
||||||
|
verifySignatureV4(
|
||||||
|
stringToSign: string,
|
||||||
|
signatureFromRequest: string,
|
||||||
|
accessKey: string,
|
||||||
|
region: string,
|
||||||
|
scopeDate: string,
|
||||||
|
options: any,
|
||||||
|
callback: any,
|
||||||
|
) {
|
||||||
|
this._tryEachClient((client, done) => client.verifySignatureV4(
|
||||||
|
stringToSign,
|
||||||
|
signatureFromRequest,
|
||||||
|
accessKey,
|
||||||
|
region,
|
||||||
|
scopeDate,
|
||||||
|
options,
|
||||||
|
done,
|
||||||
|
), callback);
|
||||||
|
}
|
||||||
|
|
||||||
|
static _mergeObjects(objectResponses: any) {
|
||||||
|
return objectResponses.reduce(
|
||||||
|
(retObj, resObj) => Object.assign(retObj, resObj.message.body),
|
||||||
|
{});
|
||||||
|
}
|
||||||
|
|
||||||
|
getCanonicalIds(emailAddresses: string[], options: any, callback: any) {
|
||||||
|
this._forEachClient(
|
||||||
|
(client, done) => client.getCanonicalIds(emailAddresses, options, done),
|
||||||
|
(err, res) => {
|
||||||
|
if (err) {
|
||||||
|
return callback(err);
|
||||||
|
}
|
||||||
|
// TODO: atm naive merge, better handling of conflicting email results
|
||||||
|
return callback(null, {
|
||||||
|
message: {
|
||||||
|
body: ChainBackend._mergeObjects(res),
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
getEmailAddresses(canonicalIDs: string[], options: any, callback: any) {
|
||||||
|
this._forEachClient(
|
||||||
|
(client, done) => client.getEmailAddresses(canonicalIDs, options, done),
|
||||||
|
(err, res) => {
|
||||||
|
if (err) {
|
||||||
|
return callback(err);
|
||||||
|
}
|
||||||
|
return callback(null, {
|
||||||
|
message: {
|
||||||
|
body: ChainBackend._mergeObjects(res),
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* merge policy responses into a single message
|
||||||
|
*/
|
||||||
|
static _mergePolicies(policyResponses: any) {
|
||||||
|
const policyMap: any = {};
|
||||||
|
|
||||||
|
policyResponses.forEach(resp => {
|
||||||
|
if (!resp.message || !Array.isArray(resp.message.body)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const check = (policy) => {
|
||||||
|
const key = (policy.arn || '') + (policy.versionId || '') + (policy.action || '');
|
||||||
|
if (!policyMap[key] || !policyMap[key].isAllowed) {
|
||||||
|
policyMap[key] = policy;
|
||||||
|
}
|
||||||
|
// else is duplicate policy
|
||||||
|
};
|
||||||
|
|
||||||
|
resp.message.body.forEach(policy => {
|
||||||
|
if (Array.isArray(policy)) {
|
||||||
|
policy.forEach(authResult => check(authResult));
|
||||||
|
} else {
|
||||||
|
check(policy);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
return Object.keys(policyMap).map(key => {
|
||||||
|
const policyRes: any = { isAllowed: policyMap[key].isAllowed };
|
||||||
|
if (policyMap[key].arn !== '') {
|
||||||
|
policyRes.arn = policyMap[key].arn;
|
||||||
|
}
|
||||||
|
if (policyMap[key].versionId) {
|
||||||
|
policyRes.versionId = policyMap[key].versionId;
|
||||||
|
}
|
||||||
|
if (policyMap[key].isImplicit !== undefined) {
|
||||||
|
policyRes.isImplicit = policyMap[key].isImplicit;
|
||||||
|
}
|
||||||
|
if (policyMap[key].action) {
|
||||||
|
policyRes.action = policyMap[key].action;
|
||||||
|
}
|
||||||
|
return policyRes;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
response format:
|
||||||
|
{ message: {
|
||||||
|
body: [{}],
|
||||||
|
code: number,
|
||||||
|
message: string,
|
||||||
|
} }
|
||||||
|
*/
|
||||||
|
checkPolicies(requestContextParams: any, userArn: string, options: any, callback: any) {
|
||||||
|
this._forEachClient((client, done) => client.checkPolicies(
|
||||||
|
requestContextParams,
|
||||||
|
userArn,
|
||||||
|
options,
|
||||||
|
done,
|
||||||
|
), (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
return callback(err);
|
||||||
|
}
|
||||||
|
return callback(null, {
|
||||||
|
message: {
|
||||||
|
body: ChainBackend._mergePolicies(res),
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
healthcheck(reqUid: string, callback: any) {
|
||||||
|
this._forEachClient((client, done) =>
|
||||||
|
client.healthcheck(reqUid, (err, res) => done(null, {
|
||||||
|
error: !!err ? err : null,
|
||||||
|
status: res,
|
||||||
|
}),
|
||||||
|
), (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
return callback(err);
|
||||||
|
}
|
||||||
|
|
||||||
|
const isError = res.some(results => !!results.error);
|
||||||
|
if (isError) {
|
||||||
|
return callback(errors.InternalError, res);
|
||||||
|
}
|
||||||
|
return callback(null, res);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
report(reqUid: string, callback: any) {
|
||||||
|
this._forEachClient((client, done) =>
|
||||||
|
client.report(reqUid, done),
|
||||||
|
(err, res) => {
|
||||||
|
if (err) {
|
||||||
|
return callback(err);
|
||||||
|
}
|
||||||
|
const mergedRes = res.reduce((acc, val) => {
|
||||||
|
Object.keys(val).forEach(k => {
|
||||||
|
acc[k] = val[k];
|
||||||
|
});
|
||||||
|
return acc;
|
||||||
|
}, {});
|
||||||
|
|
||||||
|
return callback(null, mergedRes);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,96 @@
|
||||||
|
import errors from '../../errors';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Base backend class
|
||||||
|
*
|
||||||
|
* @class BaseBackend
|
||||||
|
*/
|
||||||
|
export default class BaseBackend {
|
||||||
|
service: string;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @constructor
|
||||||
|
* @param {string} service - service identifer for construction arn
|
||||||
|
*/
|
||||||
|
constructor(service: string) {
|
||||||
|
this.service = service;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** verifySignatureV2
|
||||||
|
* @param stringToSign - string to sign built per AWS rules
|
||||||
|
* @param signatureFromRequest - signature sent with request
|
||||||
|
* @param accessKey - account accessKey
|
||||||
|
* @param options - contains algorithm (SHA1 or SHA256)
|
||||||
|
* @param callback - callback with either error or user info
|
||||||
|
* @return calls callback
|
||||||
|
*/
|
||||||
|
verifySignatureV2(
|
||||||
|
stringToSign: string,
|
||||||
|
signatureFromRequest: string,
|
||||||
|
accessKey: string,
|
||||||
|
options: any,
|
||||||
|
callback: any
|
||||||
|
) {
|
||||||
|
return callback(errors.AuthMethodNotImplemented);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/** verifySignatureV4
|
||||||
|
* @param stringToSign - string to sign built per AWS rules
|
||||||
|
* @param signatureFromRequest - signature sent with request
|
||||||
|
* @param accessKey - account accessKey
|
||||||
|
* @param region - region specified in request credential
|
||||||
|
* @param scopeDate - date specified in request credential
|
||||||
|
* @param options - options to send to Vault
|
||||||
|
* (just contains reqUid for logging in Vault)
|
||||||
|
* @param callback - callback with either error or user info
|
||||||
|
* @return calls callback
|
||||||
|
*/
|
||||||
|
verifySignatureV4(
|
||||||
|
stringToSign: string,
|
||||||
|
signatureFromRequest: string,
|
||||||
|
accessKey: string,
|
||||||
|
region: string,
|
||||||
|
scopeDate: string,
|
||||||
|
options: any,
|
||||||
|
callback: any
|
||||||
|
) {
|
||||||
|
return callback(errors.AuthMethodNotImplemented);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets canonical ID's for a list of accounts
|
||||||
|
* based on email associated with account
|
||||||
|
* @param emails - list of email addresses
|
||||||
|
* @param options - to send log id to vault
|
||||||
|
* @param callback - callback to calling function
|
||||||
|
* @returns callback with either error or
|
||||||
|
* object with email addresses as keys and canonical IDs
|
||||||
|
* as values
|
||||||
|
*/
|
||||||
|
getCanonicalIds(emails: string[], options: any, callback: any) {
|
||||||
|
return callback(errors.AuthMethodNotImplemented);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets email addresses (referred to as diplay names for getACL's)
|
||||||
|
* for a list of accounts based on canonical IDs associated with account
|
||||||
|
* @param canonicalIDs - list of canonicalIDs
|
||||||
|
* @param options - to send log id to vault
|
||||||
|
* @param callback - callback to calling function
|
||||||
|
* @returns callback with either error or
|
||||||
|
* an object from Vault containing account canonicalID
|
||||||
|
* as each object key and an email address as the value (or "NotFound")
|
||||||
|
*/
|
||||||
|
getEmailAddresses(canonicalIDs: string[], options: any, callback: any) {
|
||||||
|
return callback(errors.AuthMethodNotImplemented);
|
||||||
|
}
|
||||||
|
|
||||||
|
checkPolicies(requestContextParams: any, userArn: string, options: any, callback: any) {
|
||||||
|
return callback(null, { message: { body: [] } });
|
||||||
|
}
|
||||||
|
|
||||||
|
healthcheck(reqUid: string, callback: any) {
|
||||||
|
return callback(null, { code: 200, message: 'OK' });
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,204 @@
|
||||||
|
import * as fs from 'fs';
|
||||||
|
import glob from 'simple-glob';
|
||||||
|
import joi from 'joi';
|
||||||
|
import werelogs from 'werelogs';
|
||||||
|
import * as types from './types';
|
||||||
|
import { Account, Accounts } from './types';
|
||||||
|
import ARN from '../../../models/ARN';
|
||||||
|
|
||||||
|
/** Load authentication information from files or pre-loaded account objects */
|
||||||
|
export default class AuthLoader {
|
||||||
|
#log: werelogs.Logger;
|
||||||
|
#authData: Accounts;
|
||||||
|
#isValid: 'waiting-for-validation' | 'valid' | 'invalid';
|
||||||
|
|
||||||
|
constructor(logApi: { Logger: typeof werelogs.Logger } = werelogs) {
|
||||||
|
this.#log = new logApi.Logger('S3');
|
||||||
|
this.#authData = { accounts: [] };
|
||||||
|
this.#isValid = 'waiting-for-validation';
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Add one or more accounts to the authentication info */
|
||||||
|
addAccounts(authData: Accounts, filePath?: string) {
|
||||||
|
const isValid = this.#isAuthDataValid(authData, filePath);
|
||||||
|
if (isValid) {
|
||||||
|
this.#authData.accounts = [
|
||||||
|
...this.#authData.accounts,
|
||||||
|
...authData.accounts,
|
||||||
|
];
|
||||||
|
// defer validity checking when getting data to avoid
|
||||||
|
// logging multiple times the errors (we need to validate
|
||||||
|
// all accounts at once to detect duplicate values)
|
||||||
|
if (this.#isValid === 'valid') {
|
||||||
|
this.#isValid = 'waiting-for-validation';
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
this.#isValid = 'invalid';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add account information from a file. Use { legacy: false } as an option
|
||||||
|
* to use the new, Promise-based version.
|
||||||
|
*
|
||||||
|
* @param filePath - file path containing JSON
|
||||||
|
* authentication info (see {@link addAccounts()} for format)
|
||||||
|
*/
|
||||||
|
addFile(filePath: string, options: { legacy: false }): Promise<void>;
|
||||||
|
/** @deprecated Please use Promise-version instead. */
|
||||||
|
addFile(filePath: string, options?: { legacy: true }): void;
|
||||||
|
addFile(filePath: string, options = { legacy: true }) {
|
||||||
|
// On deprecation, remove the legacy part and keep the promises.
|
||||||
|
const readFunc: any = options.legacy ? fs.readFileSync : fs.promises.readFile;
|
||||||
|
const readResult = readFunc(filePath, 'utf8') as Promise<string> | string;
|
||||||
|
const prom = Promise.resolve(readResult).then((data) => {
|
||||||
|
const authData = JSON.parse(data);
|
||||||
|
this.addAccounts(authData, filePath);
|
||||||
|
});
|
||||||
|
return options.legacy ? undefined : prom;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add account information from a filesystem path
|
||||||
|
*
|
||||||
|
* @param globPattern - filesystem glob pattern,
|
||||||
|
* can be a single string or an array of glob patterns. Globs
|
||||||
|
* can be simple file paths or can contain glob matching
|
||||||
|
* characters, like '/a/b/*.json'. The matching files are
|
||||||
|
* individually loaded as JSON and accounts are added. See
|
||||||
|
* {@link addAccounts()} for JSON format.
|
||||||
|
*/
|
||||||
|
addFilesByGlob(globPattern: string | string[]) {
|
||||||
|
// FIXME switch glob to async version
|
||||||
|
const files = glob(globPattern);
|
||||||
|
files.forEach((filePath) => this.addFile(filePath));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Perform validation on authentication info previously
|
||||||
|
* loaded. Note that it has to be done on the entire set after an
|
||||||
|
* update to catch duplicate account IDs or access keys.
|
||||||
|
*/
|
||||||
|
validate() {
|
||||||
|
if (this.#isValid === 'waiting-for-validation') {
|
||||||
|
const isValid = this.#isAuthDataValid(this.#authData);
|
||||||
|
this.#isValid = isValid ? 'valid' : 'invalid';
|
||||||
|
}
|
||||||
|
return this.#isValid === 'valid';
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get authentication info as a plain JS object containing all accounts
|
||||||
|
* under the "accounts" attribute, with validation.
|
||||||
|
*/
|
||||||
|
get data() {
|
||||||
|
return this.validate() ? this.#authData : null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** backward-compat: ignore arn if starts with 'aws:' and log a warning */
|
||||||
|
#isNotLegacyAWSARN(account: Account, filePath?: string) {
|
||||||
|
if (account.arn.startsWith('aws:')) {
|
||||||
|
const { name: accountName, arn: accountArn } = account;
|
||||||
|
this.#log.error(
|
||||||
|
'account must have a valid AWS ARN, legacy examples ' +
|
||||||
|
"starting with 'aws:' are not supported anymore. " +
|
||||||
|
'Please convert to a proper account entry (see ' +
|
||||||
|
'examples at https://github.com/scality/S3/blob/' +
|
||||||
|
'master/conf/authdata.json). Also note that support ' +
|
||||||
|
'for account users has been dropped.',
|
||||||
|
{ accountName, accountArn, filePath }
|
||||||
|
);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
#isValidUsers(account: Account, filePath?: string) {
|
||||||
|
if (account.users) {
|
||||||
|
const { name: accountName, arn: accountArn } = account;
|
||||||
|
this.#log.error(
|
||||||
|
'support for account users has been dropped, consider ' +
|
||||||
|
'turning users into account entries (see examples at ' +
|
||||||
|
'https://github.com/scality/S3/blob/master/conf/' +
|
||||||
|
'authdata.json)',
|
||||||
|
{ accountName, accountArn, filePath }
|
||||||
|
);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
#isValidARN(account: Account, filePath?: string) {
|
||||||
|
const arnObj = ARN.createFromString(account.arn);
|
||||||
|
const { name: accountName, arn: accountArn } = account;
|
||||||
|
if (arnObj instanceof ARN) {
|
||||||
|
if (!arnObj.isIAMAccount()) {
|
||||||
|
this.#log.error('authentication config validation error', {
|
||||||
|
reason: 'not an IAM account ARN',
|
||||||
|
accountName,
|
||||||
|
accountArn,
|
||||||
|
filePath,
|
||||||
|
});
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
this.#log.error('authentication config validation error', {
|
||||||
|
reason: arnObj.error.description,
|
||||||
|
accountName,
|
||||||
|
accountArn,
|
||||||
|
filePath,
|
||||||
|
});
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
#isAuthDataValid(authData: any, filePath?: string) {
|
||||||
|
const options = { abortEarly: true };
|
||||||
|
const response = types.validators.accounts.validate(authData, options);
|
||||||
|
if (response.error) {
|
||||||
|
this.#dumpJoiErrors(response.error.details, filePath);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
const validAccounts = response.value.accounts.filter(
|
||||||
|
(account: Account) =>
|
||||||
|
this.#isNotLegacyAWSARN(account, filePath) &&
|
||||||
|
this.#isValidUsers(account, filePath) &&
|
||||||
|
this.#isValidARN(account, filePath)
|
||||||
|
);
|
||||||
|
const areSomeInvalidAccounts =
|
||||||
|
validAccounts.length !== response.value.accounts.length;
|
||||||
|
if (areSomeInvalidAccounts) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
const keys = validAccounts.flatMap((account) => account.keys);
|
||||||
|
const uniqueKeysValidator = types.validators.keys.unique('access');
|
||||||
|
const areKeysUnique = uniqueKeysValidator.validate(keys);
|
||||||
|
if (areKeysUnique.error) {
|
||||||
|
this.#dumpJoiErrors(areKeysUnique.error.details, filePath);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
#dumpJoiErrors(errors: joi.ValidationErrorItem[], filePath?: string) {
|
||||||
|
errors.forEach((err) => {
|
||||||
|
const baseLogInfo = { item: err.path, filePath };
|
||||||
|
const logInfo = () => {
|
||||||
|
if (err.type === 'array.unique') {
|
||||||
|
const reason = `duplicate value '${err.context?.path}'`;
|
||||||
|
const dupValue = err.context?.value[err.context.path];
|
||||||
|
return { ...baseLogInfo, reason, dupValue };
|
||||||
|
} else {
|
||||||
|
const reason = err.message;
|
||||||
|
const context = err.context;
|
||||||
|
return { ...baseLogInfo, reason, context };
|
||||||
|
}
|
||||||
|
};
|
||||||
|
this.#log.error(
|
||||||
|
'authentication config validation error',
|
||||||
|
logInfo()
|
||||||
|
);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,194 @@
|
||||||
|
import crypto from 'crypto';
|
||||||
|
import { Logger } from 'werelogs';
|
||||||
|
import errors from '../../../errors';
|
||||||
|
import { calculateSigningKey, hashSignature } from './vaultUtilities';
|
||||||
|
import Indexer from './Indexer';
|
||||||
|
import BaseBackend from '../base';
|
||||||
|
import { Accounts } from './types';
|
||||||
|
|
||||||
|
function _formatResponse(userInfoToSend: any) {
|
||||||
|
return {
|
||||||
|
message: {
|
||||||
|
body: { userInfo: userInfoToSend },
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Class that provides a memory backend for verifying signatures and getting
|
||||||
|
* emails and canonical ids associated with an account.
|
||||||
|
*
|
||||||
|
* @class InMemoryBackend
|
||||||
|
*/
|
||||||
|
class InMemoryBackend extends BaseBackend {
|
||||||
|
indexer: Indexer;
|
||||||
|
formatResponse: any;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @constructor
|
||||||
|
* @param service - service identifer for construction arn
|
||||||
|
* @param indexer - indexer instance for retrieving account info
|
||||||
|
* @param formatter - function which accepts user info to send
|
||||||
|
* back and returns it in an object
|
||||||
|
*/
|
||||||
|
constructor(service: string, indexer: Indexer, formatter: typeof _formatResponse) {
|
||||||
|
super(service);
|
||||||
|
this.indexer = indexer;
|
||||||
|
this.formatResponse = formatter;
|
||||||
|
}
|
||||||
|
|
||||||
|
verifySignatureV2(
|
||||||
|
stringToSign: string,
|
||||||
|
signatureFromRequest: string,
|
||||||
|
accessKey: string,
|
||||||
|
options: any,
|
||||||
|
callback: any,
|
||||||
|
) {
|
||||||
|
const entity = this.indexer.getEntityByKey(accessKey);
|
||||||
|
if (!entity) {
|
||||||
|
return callback(errors.InvalidAccessKeyId);
|
||||||
|
}
|
||||||
|
const secretKey = this.indexer.getSecretKey(entity, accessKey);
|
||||||
|
const reconstructedSig =
|
||||||
|
hashSignature(stringToSign, secretKey, options.algo);
|
||||||
|
if (signatureFromRequest !== reconstructedSig) {
|
||||||
|
return callback(errors.SignatureDoesNotMatch);
|
||||||
|
}
|
||||||
|
const userInfoToSend = {
|
||||||
|
accountDisplayName: this.indexer.getAcctDisplayName(entity),
|
||||||
|
canonicalID: entity.canonicalID,
|
||||||
|
arn: entity.arn,
|
||||||
|
// @ts-ignore
|
||||||
|
IAMdisplayName: entity.IAMdisplayName,
|
||||||
|
};
|
||||||
|
const vaultReturnObject = this.formatResponse(userInfoToSend);
|
||||||
|
return callback(null, vaultReturnObject);
|
||||||
|
}
|
||||||
|
|
||||||
|
verifySignatureV4(
|
||||||
|
stringToSign: string,
|
||||||
|
signatureFromRequest: string,
|
||||||
|
accessKey: string,
|
||||||
|
region: string,
|
||||||
|
scopeDate: string,
|
||||||
|
options: any,
|
||||||
|
callback: any,
|
||||||
|
) {
|
||||||
|
const entity = this.indexer.getEntityByKey(accessKey);
|
||||||
|
if (!entity) {
|
||||||
|
return callback(errors.InvalidAccessKeyId);
|
||||||
|
}
|
||||||
|
const secretKey = this.indexer.getSecretKey(entity, accessKey);
|
||||||
|
const signingKey = calculateSigningKey(secretKey, region, scopeDate);
|
||||||
|
const reconstructedSig = crypto.createHmac('sha256', signingKey)
|
||||||
|
.update(stringToSign, 'binary').digest('hex');
|
||||||
|
if (signatureFromRequest !== reconstructedSig) {
|
||||||
|
return callback(errors.SignatureDoesNotMatch);
|
||||||
|
}
|
||||||
|
const userInfoToSend = {
|
||||||
|
accountDisplayName: this.indexer.getAcctDisplayName(entity),
|
||||||
|
canonicalID: entity.canonicalID,
|
||||||
|
arn: entity.arn,
|
||||||
|
// @ts-ignore
|
||||||
|
IAMdisplayName: entity.IAMdisplayName,
|
||||||
|
};
|
||||||
|
const vaultReturnObject = this.formatResponse(userInfoToSend);
|
||||||
|
return callback(null, vaultReturnObject);
|
||||||
|
}
|
||||||
|
|
||||||
|
getCanonicalIds(emails: string[], log: Logger, cb: any) {
|
||||||
|
const results = {};
|
||||||
|
emails.forEach(email => {
|
||||||
|
const lowercasedEmail = email.toLowerCase();
|
||||||
|
const entity = this.indexer.getEntityByEmail(lowercasedEmail);
|
||||||
|
if (!entity) {
|
||||||
|
results[email] = 'NotFound';
|
||||||
|
} else {
|
||||||
|
results[email] =
|
||||||
|
entity.canonicalID;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
const vaultReturnObject = {
|
||||||
|
message: {
|
||||||
|
body: results,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
return cb(null, vaultReturnObject);
|
||||||
|
}
|
||||||
|
|
||||||
|
getEmailAddresses(canonicalIDs: string[], options: any, cb: any) {
|
||||||
|
const results = {};
|
||||||
|
canonicalIDs.forEach(canonicalId => {
|
||||||
|
const foundEntity = this.indexer.getEntityByCanId(canonicalId);
|
||||||
|
if (!foundEntity || !foundEntity.email) {
|
||||||
|
results[canonicalId] = 'NotFound';
|
||||||
|
} else {
|
||||||
|
results[canonicalId] = foundEntity.email;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
const vaultReturnObject = {
|
||||||
|
message: {
|
||||||
|
body: results,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
return cb(null, vaultReturnObject);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets accountIds for a list of accounts based on
|
||||||
|
* the canonical IDs associated with the account
|
||||||
|
* @param canonicalIDs - list of canonicalIDs
|
||||||
|
* @param options - to send log id to vault
|
||||||
|
* @param cb - callback to calling function
|
||||||
|
* @returns callback with either error or
|
||||||
|
* an object from Vault containing account canonicalID
|
||||||
|
* as each object key and an accountId as the value (or "NotFound")
|
||||||
|
*/
|
||||||
|
getAccountIds(canonicalIDs: string[], options: any, cb: any) {
|
||||||
|
const results = {};
|
||||||
|
canonicalIDs.forEach(canonicalID => {
|
||||||
|
const foundEntity = this.indexer.getEntityByCanId(canonicalID);
|
||||||
|
if (!foundEntity || !foundEntity.shortid) {
|
||||||
|
results[canonicalID] = 'Not Found';
|
||||||
|
} else {
|
||||||
|
results[canonicalID] = foundEntity.shortid;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
const vaultReturnObject = {
|
||||||
|
message: {
|
||||||
|
body: results,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
return cb(null, vaultReturnObject);
|
||||||
|
}
|
||||||
|
|
||||||
|
report(log: Logger, callback: any) {
|
||||||
|
return callback(null, {});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class S3AuthBackend extends InMemoryBackend {
|
||||||
|
/**
|
||||||
|
* @constructor
|
||||||
|
* @param authdata - the authentication config file's data
|
||||||
|
* @param authdata.accounts - array of account objects
|
||||||
|
* @param authdata.accounts[].name - account name
|
||||||
|
* @param authdata.accounts[].email - account email
|
||||||
|
* @param authdata.accounts[].arn - IAM resource name
|
||||||
|
* @param authdata.accounts[].canonicalID - account canonical ID
|
||||||
|
* @param authdata.accounts[].shortid - short account ID
|
||||||
|
* @param authdata.accounts[].keys - array of key objects
|
||||||
|
* @param authdata.accounts[].keys[].access - access key
|
||||||
|
* @param authdata.accounts[].keys[].secret - secret key
|
||||||
|
*/
|
||||||
|
constructor(authdata?: Accounts) {
|
||||||
|
super('s3', new Indexer(authdata), _formatResponse);
|
||||||
|
}
|
||||||
|
|
||||||
|
refreshAuthData(authData?: Accounts) {
|
||||||
|
this.indexer = new Indexer(authData);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export { S3AuthBackend as s3 }
|
|
@ -0,0 +1,93 @@
|
||||||
|
import { Accounts, Account, Entity } from './types';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Class that provides an internal indexing over the simple data provided by
|
||||||
|
* the authentication configuration file for the memory backend. This allows
|
||||||
|
* accessing the different authentication entities through various types of
|
||||||
|
* keys.
|
||||||
|
*/
|
||||||
|
export default class Indexer {
|
||||||
|
accountsBy: {
|
||||||
|
canId: { [id: string]: Entity | undefined },
|
||||||
|
accessKey: { [id: string]: Entity | undefined },
|
||||||
|
email: { [id: string]: Entity | undefined },
|
||||||
|
}
|
||||||
|
|
||||||
|
constructor(authdata?: Accounts) {
|
||||||
|
this.accountsBy = {
|
||||||
|
canId: {},
|
||||||
|
accessKey: {},
|
||||||
|
email: {},
|
||||||
|
};
|
||||||
|
|
||||||
|
/*
|
||||||
|
* This may happen if the application is configured to use another
|
||||||
|
* authentication backend than in-memory.
|
||||||
|
* As such, we're managing the error here to avoid screwing up there.
|
||||||
|
*/
|
||||||
|
if (!authdata) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
this.#build(authdata);
|
||||||
|
}
|
||||||
|
|
||||||
|
#indexAccount(account: Account) {
|
||||||
|
const accountData: Entity = {
|
||||||
|
arn: account.arn,
|
||||||
|
canonicalID: account.canonicalID,
|
||||||
|
shortid: account.shortid,
|
||||||
|
accountDisplayName: account.name,
|
||||||
|
email: account.email.toLowerCase(),
|
||||||
|
keys: [],
|
||||||
|
};
|
||||||
|
this.accountsBy.canId[accountData.canonicalID] = accountData;
|
||||||
|
this.accountsBy.email[accountData.email] = accountData;
|
||||||
|
if (account.keys !== undefined) {
|
||||||
|
account.keys.forEach(key => {
|
||||||
|
accountData.keys.push(key);
|
||||||
|
this.accountsBy.accessKey[key.access] = accountData;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#build(authdata: Accounts) {
|
||||||
|
authdata.accounts.forEach(account => {
|
||||||
|
this.#indexAccount(account);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/** This method returns the account associated to a canonical ID. */
|
||||||
|
getEntityByCanId(canId: string): Entity | undefined {
|
||||||
|
return this.accountsBy.canId[canId];
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This method returns the entity (either an account or a user) associated
|
||||||
|
* to a canonical ID.
|
||||||
|
* @param {string} key - The accessKey of the entity
|
||||||
|
*/
|
||||||
|
getEntityByKey(key: string): Entity | undefined {
|
||||||
|
return this.accountsBy.accessKey[key];
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This method returns the entity (either an account or a user) associated
|
||||||
|
* to an email address.
|
||||||
|
*/
|
||||||
|
getEntityByEmail(email: string): Entity | undefined {
|
||||||
|
const lowerCasedEmail = email.toLowerCase();
|
||||||
|
return this.accountsBy.email[lowerCasedEmail];
|
||||||
|
}
|
||||||
|
|
||||||
|
/** This method returns the secret key associated with the entity. */
|
||||||
|
getSecretKey(entity: Entity, accessKey: string) {
|
||||||
|
const keys = entity.keys.filter(kv => kv.access === accessKey);
|
||||||
|
return keys[0].secret;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** This method returns the account display name associated with the entity. */
|
||||||
|
getAcctDisplayName(entity: Entity) {
|
||||||
|
return entity.accountDisplayName;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,51 @@
|
||||||
|
import joi from 'joi';
|
||||||
|
|
||||||
|
export type Callback<Data = any> = (err?: Error | null | undefined, data?: Data) => void;
|
||||||
|
|
||||||
|
export type Credentials = { access: string; secret: string };
|
||||||
|
export type Base = {
|
||||||
|
arn: string;
|
||||||
|
canonicalID: string;
|
||||||
|
shortid: string;
|
||||||
|
email: string;
|
||||||
|
keys: Credentials[];
|
||||||
|
};
|
||||||
|
export type Account = Base & { name: string; users: any[] };
|
||||||
|
export type Accounts = { accounts: Account[] };
|
||||||
|
export type Entity = Base & { accountDisplayName: string };
|
||||||
|
|
||||||
|
const keys = ((): joi.ArraySchema => {
|
||||||
|
const str = joi.string().required();
|
||||||
|
const items = { access: str, secret: str };
|
||||||
|
return joi.array().items(items).required();
|
||||||
|
})();
|
||||||
|
|
||||||
|
const account = (() => {
|
||||||
|
return joi.object<Account>({
|
||||||
|
name: joi.string().required(),
|
||||||
|
email: joi.string().email().required(),
|
||||||
|
arn: joi.string().required(),
|
||||||
|
canonicalID: joi.string().required(),
|
||||||
|
shortid: joi
|
||||||
|
.string()
|
||||||
|
.regex(/^[0-9]{12}$/)
|
||||||
|
.required(),
|
||||||
|
keys: keys,
|
||||||
|
// backward-compat
|
||||||
|
users: joi.array(),
|
||||||
|
});
|
||||||
|
})();
|
||||||
|
|
||||||
|
const accounts = (() => {
|
||||||
|
return joi.object<Accounts>({
|
||||||
|
accounts: joi
|
||||||
|
.array()
|
||||||
|
.items(account)
|
||||||
|
.required()
|
||||||
|
.unique('arn')
|
||||||
|
.unique('email')
|
||||||
|
.unique('canonicalID'),
|
||||||
|
});
|
||||||
|
})();
|
||||||
|
|
||||||
|
export const validators = { keys, account, accounts };
|
|
@ -0,0 +1,16 @@
|
||||||
|
import { Logger } from 'werelogs';
|
||||||
|
import AuthLoader from './AuthLoader';
|
||||||
|
import { Accounts } from './types';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @deprecated please use {@link AuthLoader} class instead
|
||||||
|
* @return true on erroneous data false on success
|
||||||
|
*/
|
||||||
|
export default function validateAuthConfig(
|
||||||
|
authdata: Accounts,
|
||||||
|
logApi?: { Logger: typeof Logger }
|
||||||
|
) {
|
||||||
|
const authLoader = new AuthLoader(logApi);
|
||||||
|
authLoader.addAccounts(authdata);
|
||||||
|
return !authLoader.validate();
|
||||||
|
}
|
|
@ -1,6 +1,4 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
import * as crypto from 'crypto';
|
||||||
|
|
||||||
const crypto = require('crypto');
|
|
||||||
|
|
||||||
/** hashSignature for v2 Auth
|
/** hashSignature for v2 Auth
|
||||||
* @param {string} stringToSign - built string to sign per AWS rules
|
* @param {string} stringToSign - built string to sign per AWS rules
|
||||||
|
@ -8,11 +6,19 @@ const crypto = require('crypto');
|
||||||
* @param {string} algorithm - either SHA256 or SHA1
|
* @param {string} algorithm - either SHA256 or SHA1
|
||||||
* @return {string} reconstructed signature
|
* @return {string} reconstructed signature
|
||||||
*/
|
*/
|
||||||
function hashSignature(stringToSign, secretKey, algorithm) {
|
export function hashSignature(
|
||||||
|
stringToSign: string,
|
||||||
|
secretKey: string,
|
||||||
|
algorithm: 'SHA256' | 'SHA1'
|
||||||
|
): string {
|
||||||
const hmacObject = crypto.createHmac(algorithm, secretKey);
|
const hmacObject = crypto.createHmac(algorithm, secretKey);
|
||||||
return hmacObject.update(stringToSign, 'binary').digest('base64');
|
return hmacObject.update(stringToSign, 'binary').digest('base64');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const sha256Digest = (key: string | Buffer, data: string) => {
|
||||||
|
return crypto.createHmac('sha256', key).update(data, 'binary').digest();
|
||||||
|
};
|
||||||
|
|
||||||
/** calculateSigningKey for v4 Auth
|
/** calculateSigningKey for v4 Auth
|
||||||
* @param {string} secretKey - requester's secretKey
|
* @param {string} secretKey - requester's secretKey
|
||||||
* @param {string} region - region included in request
|
* @param {string} region - region included in request
|
||||||
|
@ -20,16 +26,15 @@ function hashSignature(stringToSign, secretKey, algorithm) {
|
||||||
* @param {string} [service] - To specify another service than s3
|
* @param {string} [service] - To specify another service than s3
|
||||||
* @return {string} signingKey - signingKey to calculate signature
|
* @return {string} signingKey - signingKey to calculate signature
|
||||||
*/
|
*/
|
||||||
function calculateSigningKey(secretKey, region, scopeDate, service) {
|
export function calculateSigningKey(
|
||||||
const dateKey = crypto.createHmac('sha256', `AWS4${secretKey}`)
|
secretKey: string,
|
||||||
.update(scopeDate, 'binary').digest();
|
region: string,
|
||||||
const dateRegionKey = crypto.createHmac('sha256', dateKey)
|
scopeDate: string,
|
||||||
.update(region, 'binary').digest();
|
service?: string
|
||||||
const dateRegionServiceKey = crypto.createHmac('sha256', dateRegionKey)
|
): Buffer {
|
||||||
.update(service || 's3', 'binary').digest();
|
const dateKey = sha256Digest(`AWS4${secretKey}`, scopeDate);
|
||||||
const signingKey = crypto.createHmac('sha256', dateRegionServiceKey)
|
const dateRegionKey = sha256Digest(dateKey, region);
|
||||||
.update('aws4_request', 'binary').digest();
|
const dateRegionServiceKey = sha256Digest(dateRegionKey, service || 's3');
|
||||||
|
const signingKey = sha256Digest(dateRegionServiceKey, 'aws4_request');
|
||||||
return signingKey;
|
return signingKey;
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = { hashSignature, calculateSigningKey };
|
|
|
@ -1,223 +0,0 @@
|
||||||
const fs = require('fs');
|
|
||||||
const glob = require('simple-glob');
|
|
||||||
const joi = require('joi');
|
|
||||||
const werelogs = require('werelogs');
|
|
||||||
|
|
||||||
const ARN = require('../../models/ARN');
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Load authentication information from files or pre-loaded account
|
|
||||||
* objects
|
|
||||||
*
|
|
||||||
* @class AuthLoader
|
|
||||||
*/
|
|
||||||
class AuthLoader {
|
|
||||||
constructor(logApi) {
|
|
||||||
this._log = new (logApi || werelogs).Logger('S3');
|
|
||||||
this._authData = { accounts: [] };
|
|
||||||
// null: unknown validity, true/false: valid or invalid
|
|
||||||
this._isValid = null;
|
|
||||||
|
|
||||||
this._joiKeysValidator = joi.array()
|
|
||||||
.items({
|
|
||||||
access: joi.string().required(),
|
|
||||||
secret: joi.string().required(),
|
|
||||||
})
|
|
||||||
.required();
|
|
||||||
|
|
||||||
const accountsJoi = joi.array()
|
|
||||||
.items({
|
|
||||||
name: joi.string().required(),
|
|
||||||
email: joi.string().email().required(),
|
|
||||||
arn: joi.string().required(),
|
|
||||||
canonicalID: joi.string().required(),
|
|
||||||
shortid: joi.string().regex(/^[0-9]{12}$/).required(),
|
|
||||||
keys: this._joiKeysValidator,
|
|
||||||
// backward-compat
|
|
||||||
users: joi.array(),
|
|
||||||
})
|
|
||||||
.required()
|
|
||||||
.unique('arn')
|
|
||||||
.unique('email')
|
|
||||||
.unique('canonicalID');
|
|
||||||
this._joiValidator = joi.object({ accounts: accountsJoi });
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* add one or more accounts to the authentication info
|
|
||||||
*
|
|
||||||
* @param {object} authData - authentication data
|
|
||||||
* @param {object[]} authData.accounts - array of account data
|
|
||||||
* @param {string} authData.accounts[].name - account name
|
|
||||||
* @param {string} authData.accounts[].email: email address
|
|
||||||
* @param {string} authData.accounts[].arn: account ARN,
|
|
||||||
* e.g. 'arn:aws:iam::123456789012:root'
|
|
||||||
* @param {string} authData.accounts[].canonicalID account
|
|
||||||
* canonical ID
|
|
||||||
* @param {string} authData.accounts[].shortid account ID number,
|
|
||||||
* e.g. '123456789012'
|
|
||||||
* @param {object[]} authData.accounts[].keys array of
|
|
||||||
* access/secret keys
|
|
||||||
* @param {object[]} authData.accounts[].keys[].access access key
|
|
||||||
* @param {object[]} authData.accounts[].keys[].secret secret key
|
|
||||||
* @param {string} [filePath] - optional file path info for
|
|
||||||
* logging purpose
|
|
||||||
* @return {undefined}
|
|
||||||
*/
|
|
||||||
addAccounts(authData, filePath) {
|
|
||||||
const isValid = this._validateData(authData, filePath);
|
|
||||||
if (isValid) {
|
|
||||||
this._authData.accounts =
|
|
||||||
this._authData.accounts.concat(authData.accounts);
|
|
||||||
// defer validity checking when getting data to avoid
|
|
||||||
// logging multiple times the errors (we need to validate
|
|
||||||
// all accounts at once to detect duplicate values)
|
|
||||||
if (this._isValid) {
|
|
||||||
this._isValid = null;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
this._isValid = false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* add account information from a file
|
|
||||||
*
|
|
||||||
* @param {string} filePath - file path containing JSON
|
|
||||||
* authentication info (see {@link addAccounts()} for format)
|
|
||||||
* @return {undefined}
|
|
||||||
*/
|
|
||||||
addFile(filePath) {
|
|
||||||
const authData = JSON.parse(fs.readFileSync(filePath));
|
|
||||||
this.addAccounts(authData, filePath);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* add account information from a filesystem path
|
|
||||||
*
|
|
||||||
* @param {string|string[]} globPattern - filesystem glob pattern,
|
|
||||||
* can be a single string or an array of glob patterns. Globs
|
|
||||||
* can be simple file paths or can contain glob matching
|
|
||||||
* characters, like '/a/b/*.json'. The matching files are
|
|
||||||
* individually loaded as JSON and accounts are added. See
|
|
||||||
* {@link addAccounts()} for JSON format.
|
|
||||||
* @return {undefined}
|
|
||||||
*/
|
|
||||||
addFilesByGlob(globPattern) {
|
|
||||||
const files = glob(globPattern);
|
|
||||||
files.forEach(filePath => this.addFile(filePath));
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* perform validation on authentication info previously
|
|
||||||
* loaded. Note that it has to be done on the entire set after an
|
|
||||||
* update to catch duplicate account IDs or access keys.
|
|
||||||
*
|
|
||||||
* @return {boolean} true if authentication info is valid
|
|
||||||
* false otherwise
|
|
||||||
*/
|
|
||||||
validate() {
|
|
||||||
if (this._isValid === null) {
|
|
||||||
this._isValid = this._validateData(this._authData);
|
|
||||||
}
|
|
||||||
return this._isValid;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* get authentication info as a plain JS object containing all accounts
|
|
||||||
* under the "accounts" attribute, with validation.
|
|
||||||
*
|
|
||||||
* @return {object|null} the validated authentication data
|
|
||||||
* null if invalid
|
|
||||||
*/
|
|
||||||
getData() {
|
|
||||||
return this.validate() ? this._authData : null;
|
|
||||||
}
|
|
||||||
|
|
||||||
_validateData(authData, filePath) {
|
|
||||||
const res = joi.validate(authData, this._joiValidator,
|
|
||||||
{ abortEarly: false });
|
|
||||||
if (res.error) {
|
|
||||||
this._dumpJoiErrors(res.error.details, filePath);
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
let allKeys = [];
|
|
||||||
let arnError = false;
|
|
||||||
const validatedAuth = res.value;
|
|
||||||
validatedAuth.accounts.forEach(account => {
|
|
||||||
// backward-compat: ignore arn if starts with 'aws:' and log a
|
|
||||||
// warning
|
|
||||||
if (account.arn.startsWith('aws:')) {
|
|
||||||
this._log.error(
|
|
||||||
'account must have a valid AWS ARN, legacy examples ' +
|
|
||||||
'starting with \'aws:\' are not supported anymore. ' +
|
|
||||||
'Please convert to a proper account entry (see ' +
|
|
||||||
'examples at https://github.com/scality/S3/blob/' +
|
|
||||||
'master/conf/authdata.json). Also note that support ' +
|
|
||||||
'for account users has been dropped.',
|
|
||||||
{ accountName: account.name, accountArn: account.arn,
|
|
||||||
filePath });
|
|
||||||
arnError = true;
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
if (account.users) {
|
|
||||||
this._log.error(
|
|
||||||
'support for account users has been dropped, consider ' +
|
|
||||||
'turning users into account entries (see examples at ' +
|
|
||||||
'https://github.com/scality/S3/blob/master/conf/' +
|
|
||||||
'authdata.json)',
|
|
||||||
{ accountName: account.name, accountArn: account.arn,
|
|
||||||
filePath });
|
|
||||||
arnError = true;
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
const arnObj = ARN.createFromString(account.arn);
|
|
||||||
if (arnObj.error) {
|
|
||||||
this._log.error(
|
|
||||||
'authentication config validation error',
|
|
||||||
{ reason: arnObj.error.description,
|
|
||||||
accountName: account.name, accountArn: account.arn,
|
|
||||||
filePath });
|
|
||||||
arnError = true;
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
if (!arnObj.isIAMAccount()) {
|
|
||||||
this._log.error(
|
|
||||||
'authentication config validation error',
|
|
||||||
{ reason: 'not an IAM account ARN',
|
|
||||||
accountName: account.name, accountArn: account.arn,
|
|
||||||
filePath });
|
|
||||||
arnError = true;
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
allKeys = allKeys.concat(account.keys);
|
|
||||||
});
|
|
||||||
if (arnError) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
const uniqueKeysRes = joi.validate(
|
|
||||||
allKeys, this._joiKeysValidator.unique('access'));
|
|
||||||
if (uniqueKeysRes.error) {
|
|
||||||
this._dumpJoiErrors(uniqueKeysRes.error.details, filePath);
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
_dumpJoiErrors(errors, filePath) {
|
|
||||||
errors.forEach(err => {
|
|
||||||
const logInfo = { item: err.path, filePath };
|
|
||||||
if (err.type === 'array.unique') {
|
|
||||||
logInfo.reason = `duplicate value '${err.context.path}'`;
|
|
||||||
logInfo.dupValue = err.context.value[err.context.path];
|
|
||||||
} else {
|
|
||||||
logInfo.reason = err.message;
|
|
||||||
logInfo.context = err.context;
|
|
||||||
}
|
|
||||||
this._log.error('authentication config validation error',
|
|
||||||
logInfo);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = AuthLoader;
|
|
|
@ -1,189 +0,0 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
|
||||||
|
|
||||||
const crypto = require('crypto');
|
|
||||||
|
|
||||||
const errors = require('../../errors');
|
|
||||||
const calculateSigningKey = require('./vaultUtilities').calculateSigningKey;
|
|
||||||
const hashSignature = require('./vaultUtilities').hashSignature;
|
|
||||||
const Indexer = require('./Indexer');
|
|
||||||
|
|
||||||
function _formatResponse(userInfoToSend) {
|
|
||||||
return {
|
|
||||||
message: {
|
|
||||||
body: { userInfo: userInfoToSend },
|
|
||||||
},
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Class that provides a memory backend for verifying signatures and getting
|
|
||||||
* emails and canonical ids associated with an account.
|
|
||||||
*
|
|
||||||
* @class Backend
|
|
||||||
*/
|
|
||||||
class Backend {
|
|
||||||
/**
|
|
||||||
* @constructor
|
|
||||||
* @param {string} service - service identifer for construction arn
|
|
||||||
* @param {Indexer} indexer - indexer instance for retrieving account info
|
|
||||||
* @param {function} formatter - function which accepts user info to send
|
|
||||||
* back and returns it in an object
|
|
||||||
*/
|
|
||||||
constructor(service, indexer, formatter) {
|
|
||||||
this.service = service;
|
|
||||||
this.indexer = indexer;
|
|
||||||
this.formatResponse = formatter;
|
|
||||||
}
|
|
||||||
|
|
||||||
/** verifySignatureV2
|
|
||||||
* @param {string} stringToSign - string to sign built per AWS rules
|
|
||||||
* @param {string} signatureFromRequest - signature sent with request
|
|
||||||
* @param {string} accessKey - account accessKey
|
|
||||||
* @param {object} options - contains algorithm (SHA1 or SHA256)
|
|
||||||
* @param {function} callback - callback with either error or user info
|
|
||||||
* @return {function} calls callback
|
|
||||||
*/
|
|
||||||
verifySignatureV2(stringToSign, signatureFromRequest,
|
|
||||||
accessKey, options, callback) {
|
|
||||||
const entity = this.indexer.getEntityByKey(accessKey);
|
|
||||||
if (!entity) {
|
|
||||||
return callback(errors.InvalidAccessKeyId);
|
|
||||||
}
|
|
||||||
const secretKey = this.indexer.getSecretKey(entity, accessKey);
|
|
||||||
const reconstructedSig =
|
|
||||||
hashSignature(stringToSign, secretKey, options.algo);
|
|
||||||
if (signatureFromRequest !== reconstructedSig) {
|
|
||||||
return callback(errors.SignatureDoesNotMatch);
|
|
||||||
}
|
|
||||||
const userInfoToSend = {
|
|
||||||
accountDisplayName: this.indexer.getAcctDisplayName(entity),
|
|
||||||
canonicalID: entity.canonicalID,
|
|
||||||
arn: entity.arn,
|
|
||||||
IAMdisplayName: entity.IAMdisplayName,
|
|
||||||
};
|
|
||||||
const vaultReturnObject = this.formatResponse(userInfoToSend);
|
|
||||||
return callback(null, vaultReturnObject);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/** verifySignatureV4
|
|
||||||
* @param {string} stringToSign - string to sign built per AWS rules
|
|
||||||
* @param {string} signatureFromRequest - signature sent with request
|
|
||||||
* @param {string} accessKey - account accessKey
|
|
||||||
* @param {string} region - region specified in request credential
|
|
||||||
* @param {string} scopeDate - date specified in request credential
|
|
||||||
* @param {object} options - options to send to Vault
|
|
||||||
* (just contains reqUid for logging in Vault)
|
|
||||||
* @param {function} callback - callback with either error or user info
|
|
||||||
* @return {function} calls callback
|
|
||||||
*/
|
|
||||||
verifySignatureV4(stringToSign, signatureFromRequest, accessKey,
|
|
||||||
region, scopeDate, options, callback) {
|
|
||||||
const entity = this.indexer.getEntityByKey(accessKey);
|
|
||||||
if (!entity) {
|
|
||||||
return callback(errors.InvalidAccessKeyId);
|
|
||||||
}
|
|
||||||
const secretKey = this.indexer.getSecretKey(entity, accessKey);
|
|
||||||
const signingKey = calculateSigningKey(secretKey, region, scopeDate);
|
|
||||||
const reconstructedSig = crypto.createHmac('sha256', signingKey)
|
|
||||||
.update(stringToSign, 'binary').digest('hex');
|
|
||||||
if (signatureFromRequest !== reconstructedSig) {
|
|
||||||
return callback(errors.SignatureDoesNotMatch);
|
|
||||||
}
|
|
||||||
const userInfoToSend = {
|
|
||||||
accountDisplayName: this.indexer.getAcctDisplayName(entity),
|
|
||||||
canonicalID: entity.canonicalID,
|
|
||||||
arn: entity.arn,
|
|
||||||
IAMdisplayName: entity.IAMdisplayName,
|
|
||||||
};
|
|
||||||
const vaultReturnObject = this.formatResponse(userInfoToSend);
|
|
||||||
return callback(null, vaultReturnObject);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Gets canonical ID's for a list of accounts
|
|
||||||
* based on email associated with account
|
|
||||||
* @param {array} emails - list of email addresses
|
|
||||||
* @param {object} log - log object
|
|
||||||
* @param {function} cb - callback to calling function
|
|
||||||
* @returns {function} callback with either error or
|
|
||||||
* object with email addresses as keys and canonical IDs
|
|
||||||
* as values
|
|
||||||
*/
|
|
||||||
getCanonicalIds(emails, log, cb) {
|
|
||||||
const results = {};
|
|
||||||
emails.forEach(email => {
|
|
||||||
const lowercasedEmail = email.toLowerCase();
|
|
||||||
const entity = this.indexer.getEntityByEmail(lowercasedEmail);
|
|
||||||
if (!entity) {
|
|
||||||
results[email] = 'NotFound';
|
|
||||||
} else {
|
|
||||||
results[email] =
|
|
||||||
entity.canonicalID;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
const vaultReturnObject = {
|
|
||||||
message: {
|
|
||||||
body: results,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
return cb(null, vaultReturnObject);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Gets email addresses (referred to as diplay names for getACL's)
|
|
||||||
* for a list of accounts based on canonical IDs associated with account
|
|
||||||
* @param {array} canonicalIDs - list of canonicalIDs
|
|
||||||
* @param {object} options - to send log id to vault
|
|
||||||
* @param {function} cb - callback to calling function
|
|
||||||
* @returns {function} callback with either error or
|
|
||||||
* an object from Vault containing account canonicalID
|
|
||||||
* as each object key and an email address as the value (or "NotFound")
|
|
||||||
*/
|
|
||||||
getEmailAddresses(canonicalIDs, options, cb) {
|
|
||||||
const results = {};
|
|
||||||
canonicalIDs.forEach(canonicalId => {
|
|
||||||
const foundEntity = this.indexer.getEntityByCanId(canonicalId);
|
|
||||||
if (!foundEntity || !foundEntity.email) {
|
|
||||||
results[canonicalId] = 'NotFound';
|
|
||||||
} else {
|
|
||||||
results[canonicalId] = foundEntity.email;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
const vaultReturnObject = {
|
|
||||||
message: {
|
|
||||||
body: results,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
return cb(null, vaultReturnObject);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
class S3AuthBackend extends Backend {
|
|
||||||
/**
|
|
||||||
* @constructor
|
|
||||||
* @param {object} authdata - the authentication config file's data
|
|
||||||
* @param {object[]} authdata.accounts - array of account objects
|
|
||||||
* @param {string=} authdata.accounts[].name - account name
|
|
||||||
* @param {string} authdata.accounts[].email - account email
|
|
||||||
* @param {string} authdata.accounts[].arn - IAM resource name
|
|
||||||
* @param {string} authdata.accounts[].canonicalID - account canonical ID
|
|
||||||
* @param {string} authdata.accounts[].shortid - short account ID
|
|
||||||
* @param {object[]=} authdata.accounts[].keys - array of key objects
|
|
||||||
* @param {string} authdata.accounts[].keys[].access - access key
|
|
||||||
* @param {string} authdata.accounts[].keys[].secret - secret key
|
|
||||||
* @return {undefined}
|
|
||||||
*/
|
|
||||||
constructor(authdata) {
|
|
||||||
super('s3', new Indexer(authdata), _formatResponse);
|
|
||||||
}
|
|
||||||
|
|
||||||
refreshAuthData(authData) {
|
|
||||||
this.indexer = new Indexer(authData);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
s3: S3AuthBackend,
|
|
||||||
};
|
|
|
@ -1,145 +0,0 @@
|
||||||
/**
|
|
||||||
* Class that provides an internal indexing over the simple data provided by
|
|
||||||
* the authentication configuration file for the memory backend. This allows
|
|
||||||
* accessing the different authentication entities through various types of
|
|
||||||
* keys.
|
|
||||||
*
|
|
||||||
* @class Indexer
|
|
||||||
*/
|
|
||||||
class Indexer {
|
|
||||||
/**
|
|
||||||
* @constructor
|
|
||||||
* @param {object} authdata - the authentication config file's data
|
|
||||||
* @param {object[]} authdata.accounts - array of account objects
|
|
||||||
* @param {string=} authdata.accounts[].name - account name
|
|
||||||
* @param {string} authdata.accounts[].email - account email
|
|
||||||
* @param {string} authdata.accounts[].arn - IAM resource name
|
|
||||||
* @param {string} authdata.accounts[].canonicalID - account canonical ID
|
|
||||||
* @param {string} authdata.accounts[].shortid - short account ID
|
|
||||||
* @param {object[]=} authdata.accounts[].keys - array of key objects
|
|
||||||
* @param {string} authdata.accounts[].keys[].access - access key
|
|
||||||
* @param {string} authdata.accounts[].keys[].secret - secret key
|
|
||||||
* @return {undefined}
|
|
||||||
*/
|
|
||||||
constructor(authdata) {
|
|
||||||
this.accountsBy = {
|
|
||||||
canId: {},
|
|
||||||
accessKey: {},
|
|
||||||
email: {},
|
|
||||||
};
|
|
||||||
|
|
||||||
/*
|
|
||||||
* This may happen if the application is configured to use another
|
|
||||||
* authentication backend than in-memory.
|
|
||||||
* As such, we're managing the error here to avoid screwing up there.
|
|
||||||
*/
|
|
||||||
if (!authdata) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
this._build(authdata);
|
|
||||||
}
|
|
||||||
|
|
||||||
_indexAccount(account) {
|
|
||||||
const accountData = {
|
|
||||||
arn: account.arn,
|
|
||||||
canonicalID: account.canonicalID,
|
|
||||||
shortid: account.shortid,
|
|
||||||
accountDisplayName: account.name,
|
|
||||||
email: account.email.toLowerCase(),
|
|
||||||
keys: [],
|
|
||||||
};
|
|
||||||
this.accountsBy.canId[accountData.canonicalID] = accountData;
|
|
||||||
this.accountsBy.email[accountData.email] = accountData;
|
|
||||||
if (account.keys !== undefined) {
|
|
||||||
account.keys.forEach(key => {
|
|
||||||
accountData.keys.push(key);
|
|
||||||
this.accountsBy.accessKey[key.access] = accountData;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
_build(authdata) {
|
|
||||||
authdata.accounts.forEach(account => {
|
|
||||||
this._indexAccount(account);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This method returns the account associated to a canonical ID.
|
|
||||||
*
|
|
||||||
* @param {string} canId - The canonicalId of the account
|
|
||||||
* @return {Object} account - The account object
|
|
||||||
* @return {Object} account.arn - The account's ARN
|
|
||||||
* @return {Object} account.canonicalID - The account's canonical ID
|
|
||||||
* @return {Object} account.shortid - The account's internal shortid
|
|
||||||
* @return {Object} account.accountDisplayName - The account's display name
|
|
||||||
* @return {Object} account.email - The account's lowercased email
|
|
||||||
*/
|
|
||||||
getEntityByCanId(canId) {
|
|
||||||
return this.accountsBy.canId[canId];
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This method returns the entity (either an account or a user) associated
|
|
||||||
* to a canonical ID.
|
|
||||||
*
|
|
||||||
* @param {string} key - The accessKey of the entity
|
|
||||||
* @return {Object} entity - The entity object
|
|
||||||
* @return {Object} entity.arn - The entity's ARN
|
|
||||||
* @return {Object} entity.canonicalID - The canonical ID for the entity's
|
|
||||||
* account
|
|
||||||
* @return {Object} entity.shortid - The entity's internal shortid
|
|
||||||
* @return {Object} entity.accountDisplayName - The entity's account
|
|
||||||
* display name
|
|
||||||
* @return {Object} entity.IAMDisplayName - The user's display name
|
|
||||||
* (if the entity is an user)
|
|
||||||
* @return {Object} entity.email - The entity's lowercased email
|
|
||||||
*/
|
|
||||||
getEntityByKey(key) {
|
|
||||||
return this.accountsBy.accessKey[key];
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This method returns the entity (either an account or a user) associated
|
|
||||||
* to an email address.
|
|
||||||
*
|
|
||||||
* @param {string} email - The email address
|
|
||||||
* @return {Object} entity - The entity object
|
|
||||||
* @return {Object} entity.arn - The entity's ARN
|
|
||||||
* @return {Object} entity.canonicalID - The canonical ID for the entity's
|
|
||||||
* account
|
|
||||||
* @return {Object} entity.shortid - The entity's internal shortid
|
|
||||||
* @return {Object} entity.accountDisplayName - The entity's account
|
|
||||||
* display name
|
|
||||||
* @return {Object} entity.IAMDisplayName - The user's display name
|
|
||||||
* (if the entity is an user)
|
|
||||||
* @return {Object} entity.email - The entity's lowercased email
|
|
||||||
*/
|
|
||||||
getEntityByEmail(email) {
|
|
||||||
const lowerCasedEmail = email.toLowerCase();
|
|
||||||
return this.accountsBy.email[lowerCasedEmail];
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This method returns the secret key associated with the entity.
|
|
||||||
* @param {Object} entity - the entity object
|
|
||||||
* @param {string} accessKey - access key
|
|
||||||
* @returns {string} secret key
|
|
||||||
*/
|
|
||||||
getSecretKey(entity, accessKey) {
|
|
||||||
return entity.keys
|
|
||||||
.filter(kv => kv.access === accessKey)[0].secret;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This method returns the account display name associated with the entity.
|
|
||||||
* @param {Object} entity - the entity object
|
|
||||||
* @returns {string} account display name
|
|
||||||
*/
|
|
||||||
getAcctDisplayName(entity) {
|
|
||||||
return entity.accountDisplayName;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = Indexer;
|
|
|
@ -1,18 +0,0 @@
|
||||||
const AuthLoader = require('./AuthLoader');
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @deprecated please use {@link AuthLoader} class instead
|
|
||||||
*
|
|
||||||
* @param {object} authdata - the authentication config file's data
|
|
||||||
* @param {werelogs.API} logApi - object providing a constructor function
|
|
||||||
* for the Logger object
|
|
||||||
* @return {boolean} true on erroneous data
|
|
||||||
* false on success
|
|
||||||
*/
|
|
||||||
function validateAuthConfig(authdata, logApi) {
|
|
||||||
const authLoader = new AuthLoader(logApi);
|
|
||||||
authLoader.addAccounts(authdata);
|
|
||||||
return !authLoader.validate();
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = validateAuthConfig;
|
|
|
@ -1,7 +1,5 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
export default function algoCheck(signatureLength: number) {
|
||||||
|
let algo: 'sha256' | 'sha1';
|
||||||
function algoCheck(signatureLength) {
|
|
||||||
let algo;
|
|
||||||
// If the signature sent is 44 characters,
|
// If the signature sent is 44 characters,
|
||||||
// this means that sha256 was used:
|
// this means that sha256 was used:
|
||||||
// 44 characters in base64
|
// 44 characters in base64
|
||||||
|
@ -13,7 +11,6 @@ function algoCheck(signatureLength) {
|
||||||
if (signatureLength === SHA1LEN) {
|
if (signatureLength === SHA1LEN) {
|
||||||
algo = 'sha1';
|
algo = 'sha1';
|
||||||
}
|
}
|
||||||
|
// @ts-ignore
|
||||||
return algo;
|
return algo;
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = algoCheck;
|
|
|
@ -1,11 +0,0 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
|
||||||
|
|
||||||
const headerAuthCheck = require('./headerAuthCheck');
|
|
||||||
const queryAuthCheck = require('./queryAuthCheck');
|
|
||||||
|
|
||||||
const authV2 = {
|
|
||||||
header: headerAuthCheck,
|
|
||||||
query: queryAuthCheck,
|
|
||||||
};
|
|
||||||
|
|
||||||
module.exports = authV2;
|
|
|
@ -0,0 +1,2 @@
|
||||||
|
export * as header from './headerAuthCheck';
|
||||||
|
export * as query from './queryAuthCheck';
|
|
@ -1,9 +1,9 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
import { Logger } from 'werelogs';
|
||||||
const errors = require('../../errors');
|
import errors from '../../errors';
|
||||||
|
|
||||||
const epochTime = new Date('1970-01-01').getTime();
|
const epochTime = new Date('1970-01-01').getTime();
|
||||||
|
|
||||||
function checkRequestExpiry(timestamp, log) {
|
export default function checkRequestExpiry(timestamp: number, log: Logger) {
|
||||||
// If timestamp is before epochTime, the request is invalid and return
|
// If timestamp is before epochTime, the request is invalid and return
|
||||||
// errors.AccessDenied
|
// errors.AccessDenied
|
||||||
if (timestamp < epochTime) {
|
if (timestamp < epochTime) {
|
||||||
|
@ -32,5 +32,3 @@ function checkRequestExpiry(timestamp, log) {
|
||||||
|
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = checkRequestExpiry;
|
|
|
@ -1,11 +1,14 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
import { Logger } from 'werelogs';
|
||||||
|
import utf8 from 'utf8';
|
||||||
|
import getCanonicalizedAmzHeaders from './getCanonicalizedAmzHeaders';
|
||||||
|
import getCanonicalizedResource from './getCanonicalizedResource';
|
||||||
|
|
||||||
const utf8 = require('utf8');
|
export default function constructStringToSign(
|
||||||
|
request: any,
|
||||||
const getCanonicalizedAmzHeaders = require('./getCanonicalizedAmzHeaders');
|
data: { [key: string]: string },
|
||||||
const getCanonicalizedResource = require('./getCanonicalizedResource');
|
log: Logger,
|
||||||
|
clientType?: any
|
||||||
function constructStringToSign(request, data, log) {
|
) {
|
||||||
/*
|
/*
|
||||||
Build signature per AWS requirements:
|
Build signature per AWS requirements:
|
||||||
StringToSign = HTTP-Verb + '\n' +
|
StringToSign = HTTP-Verb + '\n' +
|
||||||
|
@ -38,9 +41,7 @@ function constructStringToSign(request, data, log) {
|
||||||
const date = query.Expires ? query.Expires : headers.date;
|
const date = query.Expires ? query.Expires : headers.date;
|
||||||
const combinedQueryHeaders = Object.assign({}, headers, query);
|
const combinedQueryHeaders = Object.assign({}, headers, query);
|
||||||
stringToSign += (date ? `${date}\n` : '\n')
|
stringToSign += (date ? `${date}\n` : '\n')
|
||||||
+ getCanonicalizedAmzHeaders(combinedQueryHeaders)
|
+ getCanonicalizedAmzHeaders(combinedQueryHeaders, clientType)
|
||||||
+ getCanonicalizedResource(request);
|
+ getCanonicalizedResource(request, clientType);
|
||||||
return utf8.encode(stringToSign);
|
return utf8.encode(stringToSign);
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = constructStringToSign;
|
|
|
@ -1,13 +1,14 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
export default function getCanonicalizedAmzHeaders(headers: Headers, clientType: string) {
|
||||||
|
|
||||||
function getCanonicalizedAmzHeaders(headers) {
|
|
||||||
/*
|
/*
|
||||||
Iterate through headers and pull any headers that are x-amz headers.
|
Iterate through headers and pull any headers that are x-amz headers.
|
||||||
Need to include 'x-amz-date' here even though AWS docs
|
Need to include 'x-amz-date' here even though AWS docs
|
||||||
ambiguous on this.
|
ambiguous on this.
|
||||||
*/
|
*/
|
||||||
|
const filterFn = clientType === 'GCP' ?
|
||||||
|
(val: string) => val.substr(0, 7) === 'x-goog-' :
|
||||||
|
(val: string) => val.substr(0, 6) === 'x-amz-';
|
||||||
const amzHeaders = Object.keys(headers)
|
const amzHeaders = Object.keys(headers)
|
||||||
.filter(val => val.substr(0, 6) === 'x-amz-')
|
.filter(filterFn)
|
||||||
.map(val => [val.trim(), headers[val].trim()]);
|
.map(val => [val.trim(), headers[val].trim()]);
|
||||||
/*
|
/*
|
||||||
AWS docs state that duplicate headers should be combined
|
AWS docs state that duplicate headers should be combined
|
||||||
|
@ -40,5 +41,3 @@ function getCanonicalizedAmzHeaders(headers) {
|
||||||
`${headerStr}${current[0]}:${current[1]}\n`,
|
`${headerStr}${current[0]}:${current[1]}\n`,
|
||||||
'');
|
'');
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = getCanonicalizedAmzHeaders;
|
|
|
@ -1,8 +1,45 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
import * as url from 'url';
|
||||||
|
|
||||||
const url = require('url');
|
const gcpSubresources = [
|
||||||
|
'acl',
|
||||||
|
'billing',
|
||||||
|
'compose',
|
||||||
|
'cors',
|
||||||
|
'encryption',
|
||||||
|
'lifecycle',
|
||||||
|
'location',
|
||||||
|
'logging',
|
||||||
|
'storageClass',
|
||||||
|
'tagging',
|
||||||
|
'upload_id',
|
||||||
|
'versioning',
|
||||||
|
'versions',
|
||||||
|
'websiteConfig',
|
||||||
|
];
|
||||||
|
|
||||||
function getCanonicalizedResource(request) {
|
const awsSubresources = [
|
||||||
|
'acl',
|
||||||
|
'cors',
|
||||||
|
'delete',
|
||||||
|
'lifecycle',
|
||||||
|
'location',
|
||||||
|
'logging',
|
||||||
|
'notification',
|
||||||
|
'partNumber',
|
||||||
|
'policy',
|
||||||
|
'requestPayment',
|
||||||
|
'tagging',
|
||||||
|
'torrent',
|
||||||
|
'uploadId',
|
||||||
|
'uploads',
|
||||||
|
'versionId',
|
||||||
|
'versioning',
|
||||||
|
'replication',
|
||||||
|
'versions',
|
||||||
|
'website',
|
||||||
|
];
|
||||||
|
|
||||||
|
export default function getCanonicalizedResource(request: any, clientType: string) {
|
||||||
/*
|
/*
|
||||||
This variable is used to determine whether to insert
|
This variable is used to determine whether to insert
|
||||||
a '?' or '&'. Once a query parameter is added to the resourceString,
|
a '?' or '&'. Once a query parameter is added to the resourceString,
|
||||||
|
@ -24,27 +61,8 @@ function getCanonicalizedResource(request) {
|
||||||
*/
|
*/
|
||||||
|
|
||||||
// Specified subresources:
|
// Specified subresources:
|
||||||
const subresources = [
|
const subresources =
|
||||||
'acl',
|
clientType === 'GCP' ? gcpSubresources : awsSubresources;
|
||||||
'cors',
|
|
||||||
'delete',
|
|
||||||
'lifecycle',
|
|
||||||
'location',
|
|
||||||
'logging',
|
|
||||||
'notification',
|
|
||||||
'partNumber',
|
|
||||||
'policy',
|
|
||||||
'requestPayment',
|
|
||||||
'tagging',
|
|
||||||
'torrent',
|
|
||||||
'uploadId',
|
|
||||||
'uploads',
|
|
||||||
'versionId',
|
|
||||||
'versioning',
|
|
||||||
'replication',
|
|
||||||
'versions',
|
|
||||||
'website',
|
|
||||||
];
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
If the request includes parameters in the query string,
|
If the request includes parameters in the query string,
|
||||||
|
@ -97,5 +115,3 @@ function getCanonicalizedResource(request) {
|
||||||
}
|
}
|
||||||
return resourceString;
|
return resourceString;
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = getCanonicalizedResource;
|
|
|
@ -1,12 +1,11 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
import { Logger } from 'werelogs';
|
||||||
|
import errors from '../../errors';
|
||||||
|
import * as constants from '../../constants';
|
||||||
|
import constructStringToSign from './constructStringToSign';
|
||||||
|
import checkRequestExpiry from './checkRequestExpiry';
|
||||||
|
import algoCheck from './algoCheck';
|
||||||
|
|
||||||
const errors = require('../../errors');
|
export function check(request: any, log: Logger, data: { [key: string]: string }) {
|
||||||
const constants = require('../../constants');
|
|
||||||
const constructStringToSign = require('./constructStringToSign');
|
|
||||||
const checkRequestExpiry = require('./checkRequestExpiry');
|
|
||||||
const algoCheck = require('./algoCheck');
|
|
||||||
|
|
||||||
function check(request, log, data) {
|
|
||||||
log.trace('running header auth check');
|
log.trace('running header auth check');
|
||||||
const headers = request.headers;
|
const headers = request.headers;
|
||||||
|
|
||||||
|
@ -52,6 +51,7 @@ function check(request, log, data) {
|
||||||
log.trace('invalid authorization header', { authInfo });
|
log.trace('invalid authorization header', { authInfo });
|
||||||
return { err: errors.MissingSecurityHeader };
|
return { err: errors.MissingSecurityHeader };
|
||||||
}
|
}
|
||||||
|
// @ts-ignore
|
||||||
log.addDefaultFields({ accessKey });
|
log.addDefaultFields({ accessKey });
|
||||||
|
|
||||||
const signatureFromRequest = authInfo.substring(semicolonIndex + 1).trim();
|
const signatureFromRequest = authInfo.substring(semicolonIndex + 1).trim();
|
||||||
|
@ -80,5 +80,3 @@ function check(request, log, data) {
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = { check };
|
|
|
@ -1,11 +1,10 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
import { Logger } from 'werelogs';
|
||||||
|
import errors from '../../errors';
|
||||||
|
import * as constants from '../../constants';
|
||||||
|
import algoCheck from './algoCheck';
|
||||||
|
import constructStringToSign from './constructStringToSign';
|
||||||
|
|
||||||
const errors = require('../../errors');
|
export function check(request: any, log: Logger, data: { [key: string]: string }) {
|
||||||
const constants = require('../../constants');
|
|
||||||
const algoCheck = require('./algoCheck');
|
|
||||||
const constructStringToSign = require('./constructStringToSign');
|
|
||||||
|
|
||||||
function check(request, log, data) {
|
|
||||||
log.trace('running query auth check');
|
log.trace('running query auth check');
|
||||||
if (request.method === 'POST') {
|
if (request.method === 'POST') {
|
||||||
log.debug('query string auth not supported for post requests');
|
log.debug('query string auth not supported for post requests');
|
||||||
|
@ -20,8 +19,8 @@ function check(request, log, data) {
|
||||||
|
|
||||||
/*
|
/*
|
||||||
Check whether request has expired or if
|
Check whether request has expired or if
|
||||||
expires parameter is more than 100000000 milliseconds
|
expires parameter is more than 604800000 milliseconds
|
||||||
(1 day and 4 hours) in the future.
|
(7 days) in the future.
|
||||||
Expires time is provided in seconds so need to
|
Expires time is provided in seconds so need to
|
||||||
multiply by 1000 to obtain
|
multiply by 1000 to obtain
|
||||||
milliseconds to compare to Date.now()
|
milliseconds to compare to Date.now()
|
||||||
|
@ -34,8 +33,13 @@ function check(request, log, data) {
|
||||||
}
|
}
|
||||||
|
|
||||||
const currentTime = Date.now();
|
const currentTime = Date.now();
|
||||||
// 100000000 ms (one day and 4 hours).
|
|
||||||
if (expirationTime > currentTime + 100000000) {
|
const preSignedURLExpiry = process.env.PRE_SIGN_URL_EXPIRY
|
||||||
|
&& !Number.isNaN(process.env.PRE_SIGN_URL_EXPIRY)
|
||||||
|
? Number.parseInt(process.env.PRE_SIGN_URL_EXPIRY, 10)
|
||||||
|
: constants.defaultPreSignedURLExpiry * 1000;
|
||||||
|
|
||||||
|
if (expirationTime > currentTime + preSignedURLExpiry) {
|
||||||
log.debug('expires parameter too far in future',
|
log.debug('expires parameter too far in future',
|
||||||
{ expires: request.query.Expires });
|
{ expires: request.query.Expires });
|
||||||
return { err: errors.AccessDenied };
|
return { err: errors.AccessDenied };
|
||||||
|
@ -46,6 +50,7 @@ function check(request, log, data) {
|
||||||
return { err: errors.RequestTimeTooSkewed };
|
return { err: errors.RequestTimeTooSkewed };
|
||||||
}
|
}
|
||||||
const accessKey = data.AWSAccessKeyId;
|
const accessKey = data.AWSAccessKeyId;
|
||||||
|
// @ts-ignore
|
||||||
log.addDefaultFields({ accessKey });
|
log.addDefaultFields({ accessKey });
|
||||||
|
|
||||||
const signatureFromRequest = decodeURIComponent(data.Signature);
|
const signatureFromRequest = decodeURIComponent(data.Signature);
|
||||||
|
@ -77,5 +82,3 @@ function check(request, log, data) {
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = { check };
|
|
|
@ -1,11 +0,0 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
|
||||||
|
|
||||||
const headerAuthCheck = require('./headerAuthCheck');
|
|
||||||
const queryAuthCheck = require('./queryAuthCheck');
|
|
||||||
|
|
||||||
const authV4 = {
|
|
||||||
header: headerAuthCheck,
|
|
||||||
query: queryAuthCheck,
|
|
||||||
};
|
|
||||||
|
|
||||||
module.exports = authV4;
|
|
|
@ -0,0 +1,2 @@
|
||||||
|
export * as header from './headerAuthCheck';
|
||||||
|
export * as query from './queryAuthCheck';
|
|
@ -1,5 +1,3 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
AWS's URI encoding rules:
|
AWS's URI encoding rules:
|
||||||
URI encode every byte. Uri-Encode() must enforce the following rules:
|
URI encode every byte. Uri-Encode() must enforce the following rules:
|
||||||
|
@ -19,7 +17,7 @@ See http://docs.aws.amazon.com/AmazonS3/latest/API/sig-v4-header-based-auth.html
|
||||||
*/
|
*/
|
||||||
|
|
||||||
// converts utf8 character to hex and pads "%" before every two hex digits
|
// converts utf8 character to hex and pads "%" before every two hex digits
|
||||||
function _toHexUTF8(char) {
|
function _toHexUTF8(char: string) {
|
||||||
const hexRep = Buffer.from(char, 'utf8').toString('hex').toUpperCase();
|
const hexRep = Buffer.from(char, 'utf8').toString('hex').toUpperCase();
|
||||||
let res = '';
|
let res = '';
|
||||||
hexRep.split('').forEach((v, n) => {
|
hexRep.split('').forEach((v, n) => {
|
||||||
|
@ -32,11 +30,23 @@ function _toHexUTF8(char) {
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
function awsURIencode(input, encodeSlash) {
|
export default function awsURIencode(
|
||||||
const encSlash = encodeSlash === undefined ? true : encodeSlash;
|
input: string,
|
||||||
let encoded = '';
|
encodeSlash?: boolean,
|
||||||
|
noEncodeStar?: boolean
|
||||||
|
) {
|
||||||
|
/**
|
||||||
|
* Duplicate query params are not suppported by AWS S3 APIs. These params
|
||||||
|
* are parsed as Arrays by Node.js HTTP parser which breaks this method
|
||||||
|
*/
|
||||||
|
if (typeof input !== 'string') {
|
||||||
|
return '';
|
||||||
|
}
|
||||||
|
let encoded = "";
|
||||||
|
const slash = encodeSlash === undefined || encodeSlash ? '%2F' : '/';
|
||||||
|
const star = noEncodeStar !== undefined && noEncodeStar ? '*' : '%2A';
|
||||||
for (let i = 0; i < input.length; i++) {
|
for (let i = 0; i < input.length; i++) {
|
||||||
const ch = input.charAt(i);
|
let ch = input.charAt(i);
|
||||||
if ((ch >= 'A' && ch <= 'Z') ||
|
if ((ch >= 'A' && ch <= 'Z') ||
|
||||||
(ch >= 'a' && ch <= 'z') ||
|
(ch >= 'a' && ch <= 'z') ||
|
||||||
(ch >= '0' && ch <= '9') ||
|
(ch >= '0' && ch <= '9') ||
|
||||||
|
@ -46,12 +56,26 @@ function awsURIencode(input, encodeSlash) {
|
||||||
} else if (ch === ' ') {
|
} else if (ch === ' ') {
|
||||||
encoded = encoded.concat('%20');
|
encoded = encoded.concat('%20');
|
||||||
} else if (ch === '/') {
|
} else if (ch === '/') {
|
||||||
encoded = encoded.concat(encSlash ? '%2F' : ch);
|
encoded = encoded.concat(slash);
|
||||||
|
} else if (ch === '*') {
|
||||||
|
encoded = encoded.concat(star);
|
||||||
} else {
|
} else {
|
||||||
|
if (ch >= '\uD800' && ch <= '\uDBFF') {
|
||||||
|
// If this character is a high surrogate peek the next character
|
||||||
|
// and join it with this one if the next character is a low
|
||||||
|
// surrogate.
|
||||||
|
// Otherwise the encoded URI will contain the two surrogates as
|
||||||
|
// two distinct UTF-8 sequences which is not valid UTF-8.
|
||||||
|
if (i + 1 < input.length) {
|
||||||
|
const ch2 = input.charAt(i + 1);
|
||||||
|
if (ch2 >= '\uDC00' && ch2 <= '\uDFFF') {
|
||||||
|
i++;
|
||||||
|
ch += ch2;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
encoded = encoded.concat(_toHexUTF8(ch));
|
encoded = encoded.concat(_toHexUTF8(ch));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return encoded;
|
return encoded;
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = awsURIencode;
|
|
|
@ -1,17 +1,33 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
import * as crypto from 'crypto';
|
||||||
|
import { Logger } from 'werelogs';
|
||||||
const crypto = require('crypto');
|
import createCanonicalRequest from './createCanonicalRequest';
|
||||||
|
|
||||||
const createCanonicalRequest = require('./createCanonicalRequest');
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* constructStringToSign - creates V4 stringToSign
|
* constructStringToSign - creates V4 stringToSign
|
||||||
* @param {object} params - params object
|
* @param {object} params - params object
|
||||||
* @returns {string} - stringToSign
|
* @returns {string} - stringToSign
|
||||||
*/
|
*/
|
||||||
function constructStringToSign(params) {
|
export default function constructStringToSign(params: {
|
||||||
const { request, signedHeaders, payloadChecksum, credentialScope, timestamp,
|
request: any;
|
||||||
query, log, proxyPath } = params;
|
signedHeaders: any;
|
||||||
|
payloadChecksum: any;
|
||||||
|
credentialScope: string;
|
||||||
|
timestamp: string;
|
||||||
|
query: { [key: string]: string };
|
||||||
|
log?: Logger;
|
||||||
|
proxyPath?: string;
|
||||||
|
awsService: string;
|
||||||
|
}): string | Error {
|
||||||
|
const {
|
||||||
|
request,
|
||||||
|
signedHeaders,
|
||||||
|
payloadChecksum,
|
||||||
|
credentialScope,
|
||||||
|
timestamp,
|
||||||
|
query,
|
||||||
|
log,
|
||||||
|
proxyPath,
|
||||||
|
} = params;
|
||||||
const path = proxyPath || request.path;
|
const path = proxyPath || request.path;
|
||||||
|
|
||||||
const canonicalReqResult = createCanonicalRequest({
|
const canonicalReqResult = createCanonicalRequest({
|
||||||
|
@ -24,6 +40,8 @@ function constructStringToSign(params) {
|
||||||
service: params.awsService,
|
service: params.awsService,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// TODO Why that line?
|
||||||
|
// @ts-ignore
|
||||||
if (canonicalReqResult instanceof Error) {
|
if (canonicalReqResult instanceof Error) {
|
||||||
if (log) {
|
if (log) {
|
||||||
log.error('error creating canonicalRequest');
|
log.error('error creating canonicalRequest');
|
||||||
|
@ -40,5 +58,3 @@ function constructStringToSign(params) {
|
||||||
`${credentialScope}\n${canonicalHex}`;
|
`${credentialScope}\n${canonicalHex}`;
|
||||||
return stringToSign;
|
return stringToSign;
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = constructStringToSign;
|
|
|
@ -1,34 +1,48 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
import * as crypto from 'crypto';
|
||||||
|
import * as queryString from 'querystring';
|
||||||
const awsURIencode = require('./awsURIencode');
|
import awsURIencode from './awsURIencode';
|
||||||
const crypto = require('crypto');
|
|
||||||
const queryString = require('querystring');
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* createCanonicalRequest - creates V4 canonical request
|
* createCanonicalRequest - creates V4 canonical request
|
||||||
* @param {object} params - contains pHttpVerb (request type),
|
* @param params - contains pHttpVerb (request type),
|
||||||
* pResource (parsed from URL), pQuery (request query),
|
* pResource (parsed from URL), pQuery (request query),
|
||||||
* pHeaders (request headers), pSignedHeaders (signed headers from request),
|
* pHeaders (request headers), pSignedHeaders (signed headers from request),
|
||||||
* payloadChecksum (from request)
|
* payloadChecksum (from request)
|
||||||
* @returns {string} - canonicalRequest
|
* @returns - canonicalRequest
|
||||||
*/
|
*/
|
||||||
function createCanonicalRequest(params) {
|
export default function createCanonicalRequest(
|
||||||
|
params: {
|
||||||
|
pHttpVerb: string;
|
||||||
|
pResource: string;
|
||||||
|
pQuery: { [key: string]: string };
|
||||||
|
pHeaders: any;
|
||||||
|
pSignedHeaders: any;
|
||||||
|
service: string;
|
||||||
|
payloadChecksum: string;
|
||||||
|
}
|
||||||
|
) {
|
||||||
const pHttpVerb = params.pHttpVerb;
|
const pHttpVerb = params.pHttpVerb;
|
||||||
const pResource = params.pResource;
|
const pResource = params.pResource;
|
||||||
const pQuery = params.pQuery;
|
const pQuery = params.pQuery;
|
||||||
const pHeaders = params.pHeaders;
|
const pHeaders = params.pHeaders;
|
||||||
const pSignedHeaders = params.pSignedHeaders;
|
const pSignedHeaders = params.pSignedHeaders;
|
||||||
const service = params.service;
|
const service = params.service;
|
||||||
|
|
||||||
let payloadChecksum = params.payloadChecksum;
|
let payloadChecksum = params.payloadChecksum;
|
||||||
|
|
||||||
if (!payloadChecksum) {
|
if (!payloadChecksum) {
|
||||||
if (pHttpVerb === 'GET') {
|
if (pHttpVerb === 'GET') {
|
||||||
payloadChecksum = 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b' +
|
payloadChecksum = 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b' +
|
||||||
'934ca495991b7852b855';
|
'934ca495991b7852b855';
|
||||||
} else if (pHttpVerb === 'POST') {
|
} else if (pHttpVerb === 'POST') {
|
||||||
let payload = queryString.stringify(pQuery, null, null, {
|
let notEncodeStar = false;
|
||||||
encodeURIComponent: awsURIencode,
|
// The java sdk does not encode the '*' parameter to compute the
|
||||||
|
// signature, if the user-agent is recognized, we need to keep
|
||||||
|
// the plain '*' as well.
|
||||||
|
if (/aws-sdk-java\/[0-9.]+/.test(pHeaders['user-agent'])) {
|
||||||
|
notEncodeStar = true;
|
||||||
|
}
|
||||||
|
let payload = queryString.stringify(pQuery, undefined, undefined, {
|
||||||
|
encodeURIComponent: input => awsURIencode(input, true,
|
||||||
|
notEncodeStar),
|
||||||
});
|
});
|
||||||
payload = payload.replace(/%20/g, '+');
|
payload = payload.replace(/%20/g, '+');
|
||||||
payloadChecksum = crypto.createHash('sha256')
|
payloadChecksum = crypto.createHash('sha256')
|
||||||
|
@ -53,11 +67,11 @@ function createCanonicalRequest(params) {
|
||||||
|
|
||||||
// signed headers
|
// signed headers
|
||||||
const signedHeadersList = pSignedHeaders.split(';');
|
const signedHeadersList = pSignedHeaders.split(';');
|
||||||
signedHeadersList.sort((a, b) => a.localeCompare(b));
|
signedHeadersList.sort((a: any, b: any) => a.localeCompare(b));
|
||||||
const signedHeaders = signedHeadersList.join(';');
|
const signedHeaders = signedHeadersList.join(';');
|
||||||
|
|
||||||
// canonical headers
|
// canonical headers
|
||||||
const canonicalHeadersList = signedHeadersList.map(signedHeader => {
|
const canonicalHeadersList = signedHeadersList.map((signedHeader: any) => {
|
||||||
if (pHeaders[signedHeader] !== undefined) {
|
if (pHeaders[signedHeader] !== undefined) {
|
||||||
const trimmedHeader = pHeaders[signedHeader]
|
const trimmedHeader = pHeaders[signedHeader]
|
||||||
.trim().replace(/\s+/g, ' ');
|
.trim().replace(/\s+/g, ' ');
|
||||||
|
@ -79,5 +93,3 @@ function createCanonicalRequest(params) {
|
||||||
`${signedHeaders}\n${payloadChecksum}`;
|
`${signedHeaders}\n${payloadChecksum}`;
|
||||||
return canonicalRequest;
|
return canonicalRequest;
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = createCanonicalRequest;
|
|
|
@ -1,27 +1,32 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
import { Logger } from 'werelogs';
|
||||||
|
import errors from '../../../lib/errors';
|
||||||
const errors = require('../../../lib/errors');
|
import * as constants from '../../constants';
|
||||||
const constants = require('../../constants');
|
import constructStringToSign from './constructStringToSign';
|
||||||
|
import {
|
||||||
const constructStringToSign = require('./constructStringToSign');
|
checkTimeSkew,
|
||||||
const checkTimeSkew = require('./timeUtils').checkTimeSkew;
|
convertUTCtoISO8601,
|
||||||
const convertUTCtoISO8601 = require('./timeUtils').convertUTCtoISO8601;
|
convertAmzTimeToMs,
|
||||||
const convertAmzTimeToMs = require('./timeUtils').convertAmzTimeToMs;
|
} from './timeUtils';
|
||||||
const extractAuthItems = require('./validateInputs').extractAuthItems;
|
import {
|
||||||
const validateCredentials = require('./validateInputs').validateCredentials;
|
extractAuthItems,
|
||||||
const areSignedHeadersComplete =
|
validateCredentials,
|
||||||
require('./validateInputs').areSignedHeadersComplete;
|
areSignedHeadersComplete,
|
||||||
|
} from './validateInputs';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* V4 header auth check
|
* V4 header auth check
|
||||||
* @param {object} request - HTTP request object
|
* @param request - HTTP request object
|
||||||
* @param {object} log - logging object
|
* @param log - logging object
|
||||||
* @param {object} data - Parameters from queryString parsing or body of
|
* @param data - Parameters from queryString parsing or body of
|
||||||
* POST request
|
* POST request
|
||||||
* @param {string} awsService - Aws service ('iam' or 's3')
|
* @param awsService - Aws service ('iam' or 's3')
|
||||||
* @return {callback} calls callback
|
|
||||||
*/
|
*/
|
||||||
function check(request, log, data, awsService) {
|
export function check(
|
||||||
|
request: any,
|
||||||
|
log: Logger,
|
||||||
|
data: { [key: string]: string },
|
||||||
|
awsService: string
|
||||||
|
) {
|
||||||
log.trace('running header auth check');
|
log.trace('running header auth check');
|
||||||
|
|
||||||
const token = request.headers['x-amz-security-token'];
|
const token = request.headers['x-amz-security-token'];
|
||||||
|
@ -62,16 +67,16 @@ function check(request, log, data, awsService) {
|
||||||
|
|
||||||
log.trace('authorization header from request', { authHeader });
|
log.trace('authorization header from request', { authHeader });
|
||||||
|
|
||||||
const signatureFromRequest = authHeaderItems.signatureFromRequest;
|
const signatureFromRequest = authHeaderItems.signatureFromRequest!;
|
||||||
const credentialsArr = authHeaderItems.credentialsArr;
|
const credentialsArr = authHeaderItems.credentialsArr!;
|
||||||
const signedHeaders = authHeaderItems.signedHeaders;
|
const signedHeaders = authHeaderItems.signedHeaders!;
|
||||||
|
|
||||||
if (!areSignedHeadersComplete(signedHeaders, request.headers)) {
|
if (!areSignedHeadersComplete(signedHeaders, request.headers)) {
|
||||||
log.debug('signedHeaders are incomplete', { signedHeaders });
|
log.debug('signedHeaders are incomplete', { signedHeaders });
|
||||||
return { err: errors.AccessDenied };
|
return { err: errors.AccessDenied };
|
||||||
}
|
}
|
||||||
|
|
||||||
let timestamp;
|
let timestamp: string | undefined;
|
||||||
// check request timestamp
|
// check request timestamp
|
||||||
const xAmzDate = request.headers['x-amz-date'];
|
const xAmzDate = request.headers['x-amz-date'];
|
||||||
if (xAmzDate) {
|
if (xAmzDate) {
|
||||||
|
@ -127,6 +132,17 @@ function check(request, log, data, awsService) {
|
||||||
return { err: errors.RequestTimeTooSkewed };
|
return { err: errors.RequestTimeTooSkewed };
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let proxyPath: string | undefined;
|
||||||
|
if (request.headers.proxy_path) {
|
||||||
|
try {
|
||||||
|
proxyPath = decodeURIComponent(request.headers.proxy_path);
|
||||||
|
} catch (err) {
|
||||||
|
log.debug('invalid proxy_path header', { proxyPath, err });
|
||||||
|
return { err: errors.InvalidArgument.customizeDescription(
|
||||||
|
'invalid proxy_path header') };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const stringToSign = constructStringToSign({
|
const stringToSign = constructStringToSign({
|
||||||
log,
|
log,
|
||||||
request,
|
request,
|
||||||
|
@ -136,6 +152,7 @@ function check(request, log, data, awsService) {
|
||||||
timestamp,
|
timestamp,
|
||||||
payloadChecksum,
|
payloadChecksum,
|
||||||
awsService: service,
|
awsService: service,
|
||||||
|
proxyPath,
|
||||||
});
|
});
|
||||||
log.trace('constructed stringToSign', { stringToSign });
|
log.trace('constructed stringToSign', { stringToSign });
|
||||||
if (stringToSign instanceof Error) {
|
if (stringToSign instanceof Error) {
|
||||||
|
@ -166,5 +183,3 @@ function check(request, log, data, awsService) {
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = { check };
|
|
|
@ -1,24 +1,18 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
import { Logger } from 'werelogs';
|
||||||
|
import * as constants from '../../constants';
|
||||||
const constants = require('../../constants');
|
import errors from '../../errors';
|
||||||
const errors = require('../../errors');
|
import constructStringToSign from './constructStringToSign';
|
||||||
|
import { checkTimeSkew, convertAmzTimeToMs } from './timeUtils';
|
||||||
const constructStringToSign = require('./constructStringToSign');
|
import { validateCredentials, extractQueryParams } from './validateInputs';
|
||||||
const checkTimeSkew = require('./timeUtils').checkTimeSkew;
|
import { areSignedHeadersComplete } from './validateInputs';
|
||||||
const convertAmzTimeToMs = require('./timeUtils').convertAmzTimeToMs;
|
|
||||||
const validateCredentials = require('./validateInputs').validateCredentials;
|
|
||||||
const extractQueryParams = require('./validateInputs').extractQueryParams;
|
|
||||||
const areSignedHeadersComplete =
|
|
||||||
require('./validateInputs').areSignedHeadersComplete;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* V4 query auth check
|
* V4 query auth check
|
||||||
* @param {object} request - HTTP request object
|
* @param request - HTTP request object
|
||||||
* @param {object} log - logging object
|
* @param log - logging object
|
||||||
* @param {object} data - Contain authentification params (GET or POST data)
|
* @param data - Contain authentification params (GET or POST data)
|
||||||
* @return {callback} calls callback
|
|
||||||
*/
|
*/
|
||||||
function check(request, log, data) {
|
export function check(request: any, log: Logger, data: { [key: string]: string }) {
|
||||||
const authParams = extractQueryParams(data, log);
|
const authParams = extractQueryParams(data, log);
|
||||||
|
|
||||||
if (Object.keys(authParams).length !== 5) {
|
if (Object.keys(authParams).length !== 5) {
|
||||||
|
@ -33,11 +27,11 @@ function check(request, log, data) {
|
||||||
return { err: errors.InvalidToken };
|
return { err: errors.InvalidToken };
|
||||||
}
|
}
|
||||||
|
|
||||||
const signedHeaders = authParams.signedHeaders;
|
const signedHeaders = authParams.signedHeaders!;
|
||||||
const signatureFromRequest = authParams.signatureFromRequest;
|
const signatureFromRequest = authParams.signatureFromRequest!;
|
||||||
const timestamp = authParams.timestamp;
|
const timestamp = authParams.timestamp!;
|
||||||
const expiry = authParams.expiry;
|
const expiry = authParams.expiry!;
|
||||||
const credential = authParams.credential;
|
const credential = authParams.credential!;
|
||||||
|
|
||||||
if (!areSignedHeadersComplete(signedHeaders, request.headers)) {
|
if (!areSignedHeadersComplete(signedHeaders, request.headers)) {
|
||||||
log.debug('signedHeaders are incomplete', { signedHeaders });
|
log.debug('signedHeaders are incomplete', { signedHeaders });
|
||||||
|
@ -62,6 +56,17 @@ function check(request, log, data) {
|
||||||
return { err: errors.RequestTimeTooSkewed };
|
return { err: errors.RequestTimeTooSkewed };
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let proxyPath: string | undefined;
|
||||||
|
if (request.headers.proxy_path) {
|
||||||
|
try {
|
||||||
|
proxyPath = decodeURIComponent(request.headers.proxy_path);
|
||||||
|
} catch (err) {
|
||||||
|
log.debug('invalid proxy_path header', { proxyPath });
|
||||||
|
return { err: errors.InvalidArgument.customizeDescription(
|
||||||
|
'invalid proxy_path header') };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// In query v4 auth, the canonical request needs
|
// In query v4 auth, the canonical request needs
|
||||||
// to include the query params OTHER THAN
|
// to include the query params OTHER THAN
|
||||||
// the signature so create a
|
// the signature so create a
|
||||||
|
@ -87,6 +92,7 @@ function check(request, log, data) {
|
||||||
credentialScope:
|
credentialScope:
|
||||||
`${scopeDate}/${region}/${service}/${requestType}`,
|
`${scopeDate}/${region}/${service}/${requestType}`,
|
||||||
awsService: service,
|
awsService: service,
|
||||||
|
proxyPath,
|
||||||
});
|
});
|
||||||
if (stringToSign instanceof Error) {
|
if (stringToSign instanceof Error) {
|
||||||
return { err: stringToSign };
|
return { err: stringToSign };
|
||||||
|
@ -110,5 +116,3 @@ function check(request, log, data) {
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = { check };
|
|
|
@ -0,0 +1,316 @@
|
||||||
|
import { Transform } from 'stream';
|
||||||
|
import async from 'async';
|
||||||
|
import errors from '../../../errors';
|
||||||
|
import { Logger } from 'werelogs';
|
||||||
|
import Vault, { AuthV4RequestParams } from '../../Vault';
|
||||||
|
import { Callback } from '../../backends/in_memory/types';
|
||||||
|
|
||||||
|
import constructChunkStringToSign from './constructChunkStringToSign';
|
||||||
|
|
||||||
|
export type TransformParams = {
|
||||||
|
accessKey: string;
|
||||||
|
signatureFromRequest: string;
|
||||||
|
region: string;
|
||||||
|
scopeDate: string;
|
||||||
|
timestamp: string;
|
||||||
|
credentialScope: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This class is designed to handle the chunks sent in a streaming
|
||||||
|
* v4 Auth request
|
||||||
|
*/
|
||||||
|
export default class V4Transform extends Transform {
|
||||||
|
log: Logger;
|
||||||
|
cb: Callback;
|
||||||
|
accessKey: string;
|
||||||
|
region: string;
|
||||||
|
scopeDate: string;
|
||||||
|
timestamp: string;
|
||||||
|
credentialScope: string;
|
||||||
|
lastSignature: string;
|
||||||
|
currentSignature?: string;
|
||||||
|
haveMetadata: boolean;
|
||||||
|
seekingDataSize: number;
|
||||||
|
currentData?: any;
|
||||||
|
dataCursor: number;
|
||||||
|
currentMetadata: any[];
|
||||||
|
lastPieceDone: boolean;
|
||||||
|
lastChunk: boolean;
|
||||||
|
vault: Vault;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @constructor
|
||||||
|
* @param streamingV4Params - info for chunk authentication
|
||||||
|
* @param streamingV4Params.accessKey - requester's accessKey
|
||||||
|
* @param streamingV4Params.signatureFromRequest - signature
|
||||||
|
* sent with headers
|
||||||
|
* @param streamingV4Params.region - region sent with auth header
|
||||||
|
* @param streamingV4Params.scopeDate - date sent with auth header
|
||||||
|
* @param streamingV4Params.timestamp - date parsed from headers
|
||||||
|
* in ISO 8601 format: YYYYMMDDTHHMMSSZ
|
||||||
|
* @param streamingV4Params.credentialScope - items from auth
|
||||||
|
* header plus the string 'aws4_request' joined with '/':
|
||||||
|
* timestamp/region/aws-service/aws4_request
|
||||||
|
* @param vault - Vault instance passed from CloudServer
|
||||||
|
* @param log - logger object
|
||||||
|
* @param cb - callback to api
|
||||||
|
*/
|
||||||
|
constructor(
|
||||||
|
streamingV4Params: TransformParams,
|
||||||
|
vault: Vault,
|
||||||
|
log: Logger,
|
||||||
|
cb: Callback,
|
||||||
|
) {
|
||||||
|
const { accessKey, signatureFromRequest, region, scopeDate, timestamp,
|
||||||
|
credentialScope } = streamingV4Params;
|
||||||
|
super({});
|
||||||
|
this.log = log;
|
||||||
|
this.cb = cb;
|
||||||
|
this.accessKey = accessKey;
|
||||||
|
this.region = region;
|
||||||
|
this.scopeDate = scopeDate;
|
||||||
|
this.timestamp = timestamp;
|
||||||
|
this.credentialScope = credentialScope;
|
||||||
|
this.lastSignature = signatureFromRequest;
|
||||||
|
this.currentSignature = undefined;
|
||||||
|
this.haveMetadata = false;
|
||||||
|
// keep this as -1 to start since a seekingDataSize of 0
|
||||||
|
// means that chunk is just metadata (as is the case with the
|
||||||
|
// last chunk)
|
||||||
|
this.seekingDataSize = -1;
|
||||||
|
this.currentData = undefined;
|
||||||
|
this.dataCursor = 0;
|
||||||
|
this.currentMetadata = [];
|
||||||
|
this.lastPieceDone = false;
|
||||||
|
this.lastChunk = false;
|
||||||
|
this.vault = vault;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This function will parse the metadata portion of the chunk
|
||||||
|
* @param remainingChunk - chunk sent from _transform
|
||||||
|
* @return response - if error, will return 'err' key with
|
||||||
|
* arsenal error value.
|
||||||
|
* if incomplete metadata, will return 'completeMetadata' key with
|
||||||
|
* value false
|
||||||
|
* if complete metadata received, will return 'completeMetadata' key with
|
||||||
|
* value true and the key 'unparsedChunk' with the remaining chunk without
|
||||||
|
* the parsed metadata piece
|
||||||
|
*/
|
||||||
|
_parseMetadata(remainingChunk: Buffer) {
|
||||||
|
let remainingPlusStoredMetadata = remainingChunk;
|
||||||
|
// have metadata pieces so need to add to the front of
|
||||||
|
// remainingChunk
|
||||||
|
if (this.currentMetadata.length > 0) {
|
||||||
|
this.currentMetadata.push(remainingChunk);
|
||||||
|
remainingPlusStoredMetadata = Buffer.concat(this.currentMetadata);
|
||||||
|
// zero out stored metadata
|
||||||
|
this.currentMetadata.length = 0;
|
||||||
|
}
|
||||||
|
let lineBreakIndex = remainingPlusStoredMetadata.indexOf('\r\n');
|
||||||
|
if (lineBreakIndex < 0) {
|
||||||
|
this.currentMetadata.push(remainingPlusStoredMetadata);
|
||||||
|
return { completeMetadata: false };
|
||||||
|
}
|
||||||
|
let fullMetadata = remainingPlusStoredMetadata.slice(0,
|
||||||
|
lineBreakIndex);
|
||||||
|
|
||||||
|
// handle extra line break on end of data chunk
|
||||||
|
if (fullMetadata.length === 0) {
|
||||||
|
const chunkWithoutLeadingLineBreak = remainingPlusStoredMetadata
|
||||||
|
.slice(2);
|
||||||
|
// find second line break
|
||||||
|
lineBreakIndex = chunkWithoutLeadingLineBreak.indexOf('\r\n');
|
||||||
|
if (lineBreakIndex < 0) {
|
||||||
|
this.currentMetadata.push(chunkWithoutLeadingLineBreak);
|
||||||
|
return { completeMetadata: false };
|
||||||
|
}
|
||||||
|
fullMetadata = chunkWithoutLeadingLineBreak.slice(0,
|
||||||
|
lineBreakIndex);
|
||||||
|
}
|
||||||
|
|
||||||
|
const splitMeta = fullMetadata.toString().split(';');
|
||||||
|
this.log.trace('parsed full metadata for chunk', { splitMeta });
|
||||||
|
if (splitMeta.length !== 2) {
|
||||||
|
this.log.trace('chunk body did not contain correct ' +
|
||||||
|
'metadata format');
|
||||||
|
return { err: errors.InvalidArgument };
|
||||||
|
}
|
||||||
|
// chunk-size is sent in hex
|
||||||
|
const dataSize = Number.parseInt(splitMeta[0], 16);
|
||||||
|
if (Number.isNaN(dataSize)) {
|
||||||
|
this.log.trace('chunk body did not contain valid size');
|
||||||
|
return { err: errors.InvalidArgument };
|
||||||
|
}
|
||||||
|
let chunkSig = splitMeta[1];
|
||||||
|
if (!chunkSig || chunkSig.indexOf('chunk-signature=') < 0) {
|
||||||
|
this.log.trace('chunk body did not contain correct sig format');
|
||||||
|
return { err: errors.InvalidArgument };
|
||||||
|
}
|
||||||
|
chunkSig = chunkSig.replace('chunk-signature=', '');
|
||||||
|
this.currentSignature = chunkSig;
|
||||||
|
this.haveMetadata = true;
|
||||||
|
if (dataSize === 0) {
|
||||||
|
this.lastChunk = true;
|
||||||
|
return {
|
||||||
|
completeMetadata: true,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
// + 2 to get \r\n at end
|
||||||
|
this.seekingDataSize = dataSize + 2;
|
||||||
|
this.currentData = Buffer.alloc(dataSize);
|
||||||
|
|
||||||
|
return {
|
||||||
|
completeMetadata: true,
|
||||||
|
// start slice at lineBreak plus 2 to remove line break at end of
|
||||||
|
// metadata piece since length of '\r\n' is 2
|
||||||
|
unparsedChunk: remainingPlusStoredMetadata
|
||||||
|
.slice(lineBreakIndex + 2),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build the stringToSign and authenticate the chunk
|
||||||
|
* @param dataToSend - chunk sent from _transform or null
|
||||||
|
* if last chunk without data
|
||||||
|
* @param done - callback to _transform
|
||||||
|
* @return executes callback with err if applicable
|
||||||
|
*/
|
||||||
|
_authenticate(dataToSend: Buffer | null, done: Callback) {
|
||||||
|
// use prior sig to construct new string to sign
|
||||||
|
const stringToSign = constructChunkStringToSign(this.timestamp,
|
||||||
|
this.credentialScope, this.lastSignature, dataToSend ?? undefined);
|
||||||
|
this.log.trace('constructed chunk string to sign',
|
||||||
|
{ stringToSign });
|
||||||
|
// once used prior sig to construct string to sign, reassign
|
||||||
|
// lastSignature to current signature
|
||||||
|
this.lastSignature = this.currentSignature!;
|
||||||
|
const vaultParams: AuthV4RequestParams = {
|
||||||
|
log: this.log,
|
||||||
|
data: {
|
||||||
|
accessKey: this.accessKey,
|
||||||
|
signatureFromRequest: this.currentSignature!,
|
||||||
|
region: this.region,
|
||||||
|
scopeDate: this.scopeDate,
|
||||||
|
stringToSign,
|
||||||
|
// TODO FIXME This can not work
|
||||||
|
// @ts-expect-errors
|
||||||
|
timestamp: this.timestamp,
|
||||||
|
credentialScope: this.credentialScope,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
return this.vault.authenticateV4Request(vaultParams, null, err => {
|
||||||
|
if (err) {
|
||||||
|
this.log.trace('err from vault on streaming v4 auth',
|
||||||
|
{ error: err, paramsSentToVault: vaultParams.data });
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This function will parse the chunk into metadata and data,
|
||||||
|
* use the metadata to authenticate with vault and send the
|
||||||
|
* data on to be stored if authentication passes
|
||||||
|
*
|
||||||
|
* @param chunk - chunk from request body
|
||||||
|
* @param _encoding - Data encoding unused
|
||||||
|
* @param callback - Callback(err, justDataChunk, encoding)
|
||||||
|
* @return executes callback with err if applicable
|
||||||
|
*/
|
||||||
|
_transform(chunk: Buffer, _encoding: string, callback: Callback) {
|
||||||
|
// 'chunk' here is the node streaming chunk
|
||||||
|
// transfer-encoding chunks should be of the format:
|
||||||
|
// string(IntHexBase(chunk-size)) + ";chunk-signature=" +
|
||||||
|
// signature + \r\n + chunk-data + \r\n
|
||||||
|
// Last transfer-encoding chunk will have size 0 and no chunk-data.
|
||||||
|
|
||||||
|
if (this.lastPieceDone) {
|
||||||
|
const slice = chunk.slice(0, 10);
|
||||||
|
this.log.trace('received chunk after end.' +
|
||||||
|
'See first 10 bytes of chunk',
|
||||||
|
{ chunk: slice.toString() });
|
||||||
|
return callback();
|
||||||
|
}
|
||||||
|
let unparsedChunk = chunk;
|
||||||
|
let chunkLeftToEvaluate = true;
|
||||||
|
return async.whilst(
|
||||||
|
// test function
|
||||||
|
() => chunkLeftToEvaluate,
|
||||||
|
// async function
|
||||||
|
done => {
|
||||||
|
if (!this.haveMetadata) {
|
||||||
|
this.log.trace('do not have metadata so calling ' +
|
||||||
|
'_parseMetadata');
|
||||||
|
// need to parse our metadata
|
||||||
|
const parsedMetadataResults =
|
||||||
|
this._parseMetadata(unparsedChunk);
|
||||||
|
if (parsedMetadataResults.err) {
|
||||||
|
return done(parsedMetadataResults.err);
|
||||||
|
}
|
||||||
|
// if do not have full metadata get next chunk
|
||||||
|
if (!parsedMetadataResults.completeMetadata) {
|
||||||
|
chunkLeftToEvaluate = false;
|
||||||
|
return done();
|
||||||
|
}
|
||||||
|
// have metadata so reset unparsedChunk to remaining
|
||||||
|
// without metadata piece
|
||||||
|
// TODO Is that okay?
|
||||||
|
// @ts-expect-errors
|
||||||
|
unparsedChunk = parsedMetadataResults.unparsedChunk;
|
||||||
|
}
|
||||||
|
if (this.lastChunk) {
|
||||||
|
this.log.trace('authenticating final chunk with no data');
|
||||||
|
return this._authenticate(null, err => {
|
||||||
|
if (err) {
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
chunkLeftToEvaluate = false;
|
||||||
|
this.lastPieceDone = true;
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
if (unparsedChunk.length < this.seekingDataSize) {
|
||||||
|
// add chunk to currentData and get next chunk
|
||||||
|
unparsedChunk.copy(this.currentData, this.dataCursor);
|
||||||
|
this.dataCursor += unparsedChunk.length;
|
||||||
|
this.seekingDataSize -= unparsedChunk.length;
|
||||||
|
chunkLeftToEvaluate = false;
|
||||||
|
return done();
|
||||||
|
}
|
||||||
|
// parse just the next data piece without \r\n at the end
|
||||||
|
// (therefore, minus 2)
|
||||||
|
const nextDataPiece =
|
||||||
|
unparsedChunk.slice(0, this.seekingDataSize - 2);
|
||||||
|
// add parsed data piece to other currentData pieces
|
||||||
|
// so that this.currentData is the full data piece
|
||||||
|
nextDataPiece.copy(this.currentData, this.dataCursor);
|
||||||
|
return this._authenticate(this.currentData, err => {
|
||||||
|
if (err) {
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
unparsedChunk =
|
||||||
|
unparsedChunk.slice(this.seekingDataSize);
|
||||||
|
this.push(this.currentData);
|
||||||
|
this.haveMetadata = false;
|
||||||
|
this.seekingDataSize = -1;
|
||||||
|
this.currentData = undefined;
|
||||||
|
this.dataCursor = 0;
|
||||||
|
chunkLeftToEvaluate = unparsedChunk.length > 0;
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
},
|
||||||
|
// final callback
|
||||||
|
err => {
|
||||||
|
if (err) {
|
||||||
|
return this.cb(err as any);
|
||||||
|
}
|
||||||
|
// get next chunk
|
||||||
|
return callback();
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,35 @@
|
||||||
|
import * as crypto from 'crypto';
|
||||||
|
import * as constants from '../../../constants';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Constructs stringToSign for chunk
|
||||||
|
* @param timestamp - date parsed from headers
|
||||||
|
* in ISO 8601 format: YYYYMMDDTHHMMSSZ
|
||||||
|
* @param credentialScope - items from auth
|
||||||
|
* header plus the string 'aws4_request' joined with '/':
|
||||||
|
* timestamp/region/aws-service/aws4_request
|
||||||
|
* @param lastSignature - signature from headers or prior chunk
|
||||||
|
* @param justDataChunk - data portion of chunk
|
||||||
|
* @returns stringToSign
|
||||||
|
*/
|
||||||
|
export default function constructChunkStringToSign(
|
||||||
|
timestamp: string,
|
||||||
|
credentialScope: string,
|
||||||
|
lastSignature: string,
|
||||||
|
justDataChunk?: Buffer | string,
|
||||||
|
) {
|
||||||
|
let currentChunkHash: string;
|
||||||
|
// for last chunk, there will be no data, so use emptyStringHash
|
||||||
|
if (!justDataChunk) {
|
||||||
|
currentChunkHash = constants.emptyStringHash;
|
||||||
|
} else {
|
||||||
|
const hash = crypto.createHash('sha256');
|
||||||
|
const temp = justDataChunk instanceof Buffer
|
||||||
|
? hash.update(justDataChunk)
|
||||||
|
: hash.update(justDataChunk, 'binary');
|
||||||
|
currentChunkHash = temp.digest('hex');
|
||||||
|
}
|
||||||
|
return `AWS4-HMAC-SHA256-PAYLOAD\n${timestamp}\n` +
|
||||||
|
`${credentialScope}\n${lastSignature}\n` +
|
||||||
|
`${constants.emptyStringHash}\n${currentChunkHash}`;
|
||||||
|
}
|
|
@ -1,12 +1,11 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
import { Logger } from 'werelogs';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Convert timestamp to milliseconds since Unix Epoch
|
* Convert timestamp to milliseconds since Unix Epoch
|
||||||
* @param {string} timestamp of ISO8601Timestamp format without
|
* @param timestamp of ISO8601Timestamp format without
|
||||||
* dashes or colons, e.g. 20160202T220410Z
|
* dashes or colons, e.g. 20160202T220410Z
|
||||||
* @return {number} number of milliseconds since Unix Epoch
|
|
||||||
*/
|
*/
|
||||||
function convertAmzTimeToMs(timestamp) {
|
export function convertAmzTimeToMs(timestamp: string) {
|
||||||
const arr = timestamp.split('');
|
const arr = timestamp.split('');
|
||||||
// Convert to YYYY-MM-DDTHH:mm:ss.sssZ
|
// Convert to YYYY-MM-DDTHH:mm:ss.sssZ
|
||||||
const ISO8601time = `${arr.slice(0, 4).join('')}-${arr[4]}${arr[5]}` +
|
const ISO8601time = `${arr.slice(0, 4).join('')}-${arr[4]}${arr[5]}` +
|
||||||
|
@ -15,13 +14,12 @@ function convertAmzTimeToMs(timestamp) {
|
||||||
return Date.parse(ISO8601time);
|
return Date.parse(ISO8601time);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Convert UTC timestamp to ISO 8601 timestamp
|
* Convert UTC timestamp to ISO 8601 timestamp
|
||||||
* @param {string} timestamp of UTC form: Fri, 10 Feb 2012 21:34:55 GMT
|
* @param timestamp of UTC form: Fri, 10 Feb 2012 21:34:55 GMT
|
||||||
* @return {string} ISO8601 timestamp of form: YYYYMMDDTHHMMSSZ
|
* @return ISO8601 timestamp of form: YYYYMMDDTHHMMSSZ
|
||||||
*/
|
*/
|
||||||
function convertUTCtoISO8601(timestamp) {
|
export function convertUTCtoISO8601(timestamp: string | number) {
|
||||||
// convert to ISO string: YYYY-MM-DDTHH:mm:ss.sssZ.
|
// convert to ISO string: YYYY-MM-DDTHH:mm:ss.sssZ.
|
||||||
const converted = new Date(timestamp).toISOString();
|
const converted = new Date(timestamp).toISOString();
|
||||||
// Remove "-"s and "."s and milliseconds
|
// Remove "-"s and "."s and milliseconds
|
||||||
|
@ -30,13 +28,13 @@ function convertUTCtoISO8601(timestamp) {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Check whether timestamp predates request or is too old
|
* Check whether timestamp predates request or is too old
|
||||||
* @param {string} timestamp of ISO8601Timestamp format without
|
* @param timestamp of ISO8601Timestamp format without
|
||||||
* dashes or colons, e.g. 20160202T220410Z
|
* dashes or colons, e.g. 20160202T220410Z
|
||||||
* @param {number} expiry - number of seconds signature should be valid
|
* @param expiry - number of seconds signature should be valid
|
||||||
* @param {object} log - log for request
|
* @param log - log for request
|
||||||
* @return {boolean} true if there is a time problem
|
* @return true if there is a time problem
|
||||||
*/
|
*/
|
||||||
function checkTimeSkew(timestamp, expiry, log) {
|
export function checkTimeSkew(timestamp: string, expiry: number, log: Logger) {
|
||||||
const currentTime = Date.now();
|
const currentTime = Date.now();
|
||||||
const fifteenMinutes = (15 * 60 * 1000);
|
const fifteenMinutes = (15 * 60 * 1000);
|
||||||
const parsedTimestamp = convertAmzTimeToMs(timestamp);
|
const parsedTimestamp = convertAmzTimeToMs(timestamp);
|
||||||
|
@ -56,5 +54,3 @@ function checkTimeSkew(timestamp, expiry, log) {
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = { convertAmzTimeToMs, convertUTCtoISO8601, checkTimeSkew };
|
|
|
@ -1,17 +1,19 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
import { Logger } from 'werelogs';
|
||||||
|
import errors from '../../../lib/errors';
|
||||||
const errors = require('../../../lib/errors');
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Validate Credentials
|
* Validate Credentials
|
||||||
* @param {array} credentials - contains accessKey, scopeDate,
|
* @param credentials - contains accessKey, scopeDate,
|
||||||
* region, service, requestType
|
* region, service, requestType
|
||||||
* @param {string} timestamp - timestamp from request in
|
* @param timestamp - timestamp from request in
|
||||||
* the format of ISO 8601: YYYYMMDDTHHMMSSZ
|
* the format of ISO 8601: YYYYMMDDTHHMMSSZ
|
||||||
* @param {object} log - logging object
|
* @param log - logging object
|
||||||
* @return {boolean} true if credentials are correct format, false if not
|
|
||||||
*/
|
*/
|
||||||
function validateCredentials(credentials, timestamp, log) {
|
export function validateCredentials(
|
||||||
|
credentials: [string, string, string, string, string],
|
||||||
|
timestamp: string,
|
||||||
|
log: Logger
|
||||||
|
): Error | {} {
|
||||||
if (!Array.isArray(credentials) || credentials.length !== 5) {
|
if (!Array.isArray(credentials) || credentials.length !== 5) {
|
||||||
log.warn('credentials in improper format', { credentials });
|
log.warn('credentials in improper format', { credentials });
|
||||||
return errors.InvalidArgument;
|
return errors.InvalidArgument;
|
||||||
|
@ -58,12 +60,21 @@ function validateCredentials(credentials, timestamp, log) {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Extract and validate components from query object
|
* Extract and validate components from query object
|
||||||
* @param {object} queryObj - query object from request
|
* @param queryObj - query object from request
|
||||||
* @param {object} log - logging object
|
* @param log - logging object
|
||||||
* @return {object} object containing extracted query params for authV4
|
* @return object containing extracted query params for authV4
|
||||||
*/
|
*/
|
||||||
function extractQueryParams(queryObj, log) {
|
export function extractQueryParams(
|
||||||
const authParams = {};
|
queryObj: { [key: string]: string | undefined },
|
||||||
|
log: Logger
|
||||||
|
) {
|
||||||
|
const authParams: {
|
||||||
|
signedHeaders?: string;
|
||||||
|
signatureFromRequest?: string;
|
||||||
|
timestamp?: string;
|
||||||
|
expiry?: number;
|
||||||
|
credential?: [string, string, string, string, string];
|
||||||
|
} = {};
|
||||||
|
|
||||||
// Do not need the algorithm sent back
|
// Do not need the algorithm sent back
|
||||||
if (queryObj['X-Amz-Algorithm'] !== 'AWS4-HMAC-SHA256') {
|
if (queryObj['X-Amz-Algorithm'] !== 'AWS4-HMAC-SHA256') {
|
||||||
|
@ -99,8 +110,9 @@ function extractQueryParams(queryObj, log) {
|
||||||
return authParams;
|
return authParams;
|
||||||
}
|
}
|
||||||
|
|
||||||
const expiry = Number.parseInt(queryObj['X-Amz-Expires'], 10);
|
const expiry = Number.parseInt(queryObj['X-Amz-Expires'] ?? 'nope', 10);
|
||||||
if (expiry && (expiry > 0 && expiry < 604801)) {
|
const sevenDays = 604800;
|
||||||
|
if (expiry && (expiry > 0 && expiry <= sevenDays)) {
|
||||||
authParams.expiry = expiry;
|
authParams.expiry = expiry;
|
||||||
} else {
|
} else {
|
||||||
log.warn('invalid expiry', { expiry });
|
log.warn('invalid expiry', { expiry });
|
||||||
|
@ -109,6 +121,7 @@ function extractQueryParams(queryObj, log) {
|
||||||
|
|
||||||
const credential = queryObj['X-Amz-Credential'];
|
const credential = queryObj['X-Amz-Credential'];
|
||||||
if (credential && credential.length > 28 && credential.indexOf('/') > -1) {
|
if (credential && credential.length > 28 && credential.indexOf('/') > -1) {
|
||||||
|
// @ts-ignore
|
||||||
authParams.credential = credential.split('/');
|
authParams.credential = credential.split('/');
|
||||||
} else {
|
} else {
|
||||||
log.warn('invalid credential param', { credential });
|
log.warn('invalid credential param', { credential });
|
||||||
|
@ -120,14 +133,17 @@ function extractQueryParams(queryObj, log) {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Extract and validate components from auth header
|
* Extract and validate components from auth header
|
||||||
* @param {string} authHeader - authorization header from request
|
* @param authHeader - authorization header from request
|
||||||
* @param {object} log - logging object
|
* @param log - logging object
|
||||||
* @return {object} object containing extracted auth header items for authV4
|
* @return object containing extracted auth header items for authV4
|
||||||
*/
|
*/
|
||||||
function extractAuthItems(authHeader, log) {
|
export function extractAuthItems(authHeader: string, log: Logger) {
|
||||||
const authItems = {};
|
const authItems: {
|
||||||
const authArray = authHeader
|
credentialsArr?: [string, string, string, string, string];
|
||||||
.replace('AWS4-HMAC-SHA256 ', '').split(',');
|
signedHeaders?: string;
|
||||||
|
signatureFromRequest?: string;
|
||||||
|
} = {};
|
||||||
|
const authArray = authHeader.replace('AWS4-HMAC-SHA256 ', '').split(',');
|
||||||
|
|
||||||
if (authArray.length < 3) {
|
if (authArray.length < 3) {
|
||||||
return authItems;
|
return authItems;
|
||||||
|
@ -137,8 +153,12 @@ function extractAuthItems(authHeader, log) {
|
||||||
const signedHeadersStr = authArray[1];
|
const signedHeadersStr = authArray[1];
|
||||||
const signatureStr = authArray[2];
|
const signatureStr = authArray[2];
|
||||||
log.trace('credentials from request', { credentialStr });
|
log.trace('credentials from request', { credentialStr });
|
||||||
if (credentialStr && credentialStr.trim().startsWith('Credential=')
|
if (
|
||||||
&& credentialStr.indexOf('/') > -1) {
|
credentialStr &&
|
||||||
|
credentialStr.trim().startsWith('Credential=') &&
|
||||||
|
credentialStr.indexOf('/') > -1
|
||||||
|
) {
|
||||||
|
// @ts-ignore
|
||||||
authItems.credentialsArr = credentialStr
|
authItems.credentialsArr = credentialStr
|
||||||
.trim().replace('Credential=', '').split('/');
|
.trim().replace('Credential=', '').split('/');
|
||||||
} else {
|
} else {
|
||||||
|
@ -165,11 +185,11 @@ function extractAuthItems(authHeader, log) {
|
||||||
/**
|
/**
|
||||||
* Checks whether the signed headers include the host header
|
* Checks whether the signed headers include the host header
|
||||||
* and all x-amz- and x-scal- headers in request
|
* and all x-amz- and x-scal- headers in request
|
||||||
* @param {string} signedHeaders - signed headers sent with request
|
* @param signedHeaders - signed headers sent with request
|
||||||
* @param {object} allHeaders - request.headers
|
* @param allHeaders - request.headers
|
||||||
* @return {boolean} true if all x-amz-headers included and false if not
|
* @return true if all x-amz-headers included and false if not
|
||||||
*/
|
*/
|
||||||
function areSignedHeadersComplete(signedHeaders, allHeaders) {
|
export function areSignedHeadersComplete(signedHeaders: string, allHeaders: Headers) {
|
||||||
const signedHeadersList = signedHeaders.split(';');
|
const signedHeadersList = signedHeaders.split(';');
|
||||||
if (signedHeadersList.indexOf('host') === -1) {
|
if (signedHeadersList.indexOf('host') === -1) {
|
||||||
return false;
|
return false;
|
||||||
|
@ -184,6 +204,3 @@ function areSignedHeadersComplete(signedHeaders, allHeaders) {
|
||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = { validateCredentials, extractQueryParams,
|
|
||||||
areSignedHeadersComplete, extractAuthItems };
|
|
|
@ -0,0 +1,569 @@
|
||||||
|
import cluster, { Worker } from 'cluster';
|
||||||
|
import * as werelogs from 'werelogs';
|
||||||
|
|
||||||
|
import { default as errors } from '../../lib/errors';
|
||||||
|
|
||||||
|
const rpcLogger = new werelogs.Logger('ClusterRPC');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Remote procedure calls support between cluster workers.
|
||||||
|
*
|
||||||
|
* When using the cluster module, new processes are forked and are
|
||||||
|
* dispatched workloads, usually HTTP requests. The ClusterRPC module
|
||||||
|
* implements a RPC system to send commands to all cluster worker
|
||||||
|
* processes at once from any particular worker, and retrieve their
|
||||||
|
* individual command results, like a distributed map operation.
|
||||||
|
*
|
||||||
|
* The existing nodejs cluster IPC channel is setup from the primary
|
||||||
|
* to each worker, but not between workers, so there has to be a hop
|
||||||
|
* by the primary.
|
||||||
|
*
|
||||||
|
* How a command is treated:
|
||||||
|
*
|
||||||
|
* - a worker sends a command message to the primary
|
||||||
|
*
|
||||||
|
* - the primary then forwards that command to each existing worker
|
||||||
|
* (including the requestor)
|
||||||
|
*
|
||||||
|
* - each worker then executes the command and returns a result or an
|
||||||
|
* error
|
||||||
|
*
|
||||||
|
* - the primary gathers all workers results into an array
|
||||||
|
*
|
||||||
|
* - finally, the primary dispatches the results array to the original
|
||||||
|
* requesting worker
|
||||||
|
*
|
||||||
|
*
|
||||||
|
* Limitations:
|
||||||
|
*
|
||||||
|
* - The command payload must be serializable, which means that:
|
||||||
|
* - it should not contain circular references
|
||||||
|
* - it should be of a reasonable size to be sent in a single RPC message
|
||||||
|
*
|
||||||
|
* - The "toWorkers" parameter of value "*" targets the set of workers
|
||||||
|
* that are available at the time the command is dispatched. Any new
|
||||||
|
* worker spawned after the command has been dispatched for
|
||||||
|
* processing, but before the command completes, don't execute
|
||||||
|
* the command and hence are not part of the results array.
|
||||||
|
*
|
||||||
|
*
|
||||||
|
* To set it up:
|
||||||
|
*
|
||||||
|
* - On the primary:
|
||||||
|
* if (cluster.isPrimary) {
|
||||||
|
* setupRPCPrimary();
|
||||||
|
* }
|
||||||
|
*
|
||||||
|
* - On the workers:
|
||||||
|
* if (!cluster.isPrimary) {
|
||||||
|
* setupRPCWorker({
|
||||||
|
* handler1: (payload: object, uids: string, callback: HandlerCallback) => void,
|
||||||
|
* handler2: ...
|
||||||
|
* });
|
||||||
|
* }
|
||||||
|
* Handler functions will be passed the command payload, request
|
||||||
|
* serialized uids, and must call the callback when the worker is done
|
||||||
|
* processing the command:
|
||||||
|
* callback(error: Error | null | undefined, result?: any)
|
||||||
|
*
|
||||||
|
* When this setup is done, any worker can start sending commands by calling
|
||||||
|
* the async function sendWorkerCommand().
|
||||||
|
*/
|
||||||
|
|
||||||
|
// exported types
|
||||||
|
|
||||||
|
export type ResultObject = {
|
||||||
|
error: Error | null;
|
||||||
|
result: any;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* saved Promise for sendWorkerCommand
|
||||||
|
*/
|
||||||
|
export type CommandPromise = {
|
||||||
|
resolve: (results?: ResultObject[]) => void;
|
||||||
|
reject: (error: Error) => void;
|
||||||
|
timeout: NodeJS.Timeout | null;
|
||||||
|
};
|
||||||
|
export type HandlerCallback = (error: (Error & { code?: number }) | null | undefined, result?: any) => void;
|
||||||
|
export type HandlerFunction = (payload: object, uids: string, callback: HandlerCallback) => void;
|
||||||
|
export type HandlersMap = {
|
||||||
|
[index: string]: HandlerFunction;
|
||||||
|
};
|
||||||
|
export type PrimaryHandlerFunction = (worker: Worker, payload: object, uids: string, callback: HandlerCallback) => void;
|
||||||
|
export type PrimaryHandlersMap = Record<string, PrimaryHandlerFunction>;
|
||||||
|
|
||||||
|
// private types
|
||||||
|
|
||||||
|
type RPCMessage<T extends string, P> = {
|
||||||
|
type: T;
|
||||||
|
uids: string;
|
||||||
|
payload: P;
|
||||||
|
};
|
||||||
|
|
||||||
|
type RPCCommandMessage = RPCMessage<'cluster-rpc:command', any> & {
|
||||||
|
toWorkers: string;
|
||||||
|
toHandler: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
type MarshalledResultObject = {
|
||||||
|
error: string | null;
|
||||||
|
errorCode?: number;
|
||||||
|
result: any;
|
||||||
|
};
|
||||||
|
|
||||||
|
type RPCCommandResultMessage = RPCMessage<'cluster-rpc:commandResult', MarshalledResultObject>;
|
||||||
|
|
||||||
|
type RPCCommandResultsMessage = RPCMessage<'cluster-rpc:commandResults', {
|
||||||
|
results: MarshalledResultObject[];
|
||||||
|
}>;
|
||||||
|
|
||||||
|
type RPCCommandErrorMessage = RPCMessage<'cluster-rpc:commandError', {
|
||||||
|
error: string;
|
||||||
|
}>;
|
||||||
|
|
||||||
|
interface RPCSetupOptions {
|
||||||
|
/**
|
||||||
|
* As werelogs is not a peerDependency, arsenal and a parent project
|
||||||
|
* might have their own separate versions duplicated in dependencies.
|
||||||
|
* The config are therefore not shared.
|
||||||
|
* Use this to propagate werelogs config to arsenal's ClusterRPC.
|
||||||
|
*/
|
||||||
|
werelogsConfig?: Parameters<typeof werelogs.configure>[0];
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* In primary: store worker IDs that are waiting to be dispatched
|
||||||
|
* their command's results, as a mapping.
|
||||||
|
*/
|
||||||
|
const uidsToWorkerId: {
|
||||||
|
[index: string]: number;
|
||||||
|
} = {};
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* In primary: store worker responses for commands in progress as a
|
||||||
|
* mapping.
|
||||||
|
*
|
||||||
|
* Result objects are 'null' while the worker is still processing the
|
||||||
|
* command. When a worker finishes processing it stores the result as:
|
||||||
|
* {
|
||||||
|
* error: string | null,
|
||||||
|
* result: any
|
||||||
|
* }
|
||||||
|
*/
|
||||||
|
const uidsToCommandResults: {
|
||||||
|
[index: string]: {
|
||||||
|
[index: number]: MarshalledResultObject | null;
|
||||||
|
};
|
||||||
|
} = {};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* In workers: store promise callbacks for commands waiting to be
|
||||||
|
* dispatched, as a mapping.
|
||||||
|
*/
|
||||||
|
const uidsToCommandPromise: {
|
||||||
|
[index: string]: CommandPromise;
|
||||||
|
} = {};
|
||||||
|
|
||||||
|
|
||||||
|
function _isRpcMessage(message) {
|
||||||
|
return (message !== null &&
|
||||||
|
typeof message === 'object' &&
|
||||||
|
typeof message.type === 'string' &&
|
||||||
|
message.type.startsWith('cluster-rpc:'));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Setup cluster RPC system on the primary
|
||||||
|
*
|
||||||
|
* @param {object} [handlers] - mapping of handler names to handler functions
|
||||||
|
* handler function:
|
||||||
|
* `handler({Worker} worker, {object} payload, {string} uids, {function} callback)`
|
||||||
|
* handler callback must be called when worker is done with the command:
|
||||||
|
* `callback({Error|null} error, {any} [result])`
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
export function setupRPCPrimary(handlers?: PrimaryHandlersMap, options?: RPCSetupOptions) {
|
||||||
|
if (options?.werelogsConfig) {
|
||||||
|
werelogs.configure(options.werelogsConfig);
|
||||||
|
}
|
||||||
|
cluster.on('message', (worker, message) => {
|
||||||
|
if (_isRpcMessage(message)) {
|
||||||
|
_handlePrimaryMessage(worker, message, handlers);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Setup RPCs on a cluster worker process
|
||||||
|
*
|
||||||
|
* @param {object} handlers - mapping of handler names to handler functions
|
||||||
|
* handler function:
|
||||||
|
* handler({object} payload, {string} uids, {function} callback)
|
||||||
|
* handler callback must be called when worker is done with the command:
|
||||||
|
* callback({Error|null} error, {any} [result])
|
||||||
|
* @return {undefined}
|
||||||
|
* }
|
||||||
|
*/
|
||||||
|
export function setupRPCWorker(handlers: HandlersMap, options?: RPCSetupOptions) {
|
||||||
|
if (!process.send) {
|
||||||
|
throw new Error('fatal: cannot setup cluster RPC: "process.send" is not available');
|
||||||
|
}
|
||||||
|
if (options?.werelogsConfig) {
|
||||||
|
werelogs.configure(options.werelogsConfig);
|
||||||
|
}
|
||||||
|
process.on('message', (message: RPCCommandMessage | RPCCommandResultsMessage) => {
|
||||||
|
if (_isRpcMessage(message)) {
|
||||||
|
_handleWorkerMessage(message, handlers);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Send a command for workers to execute in parallel, and wait for results
|
||||||
|
*
|
||||||
|
* @param {string} toWorkers - which workers should execute the command
|
||||||
|
* Currently the supported values are:
|
||||||
|
* - "*", meaning all workers will execute the command
|
||||||
|
* - "PRIMARY", meaning primary process will execute the command
|
||||||
|
* @param {string} toHandler - name of handler that will execute the
|
||||||
|
* command in workers, as declared in setupRPCWorker() parameter object
|
||||||
|
* @param {string} uids - unique identifier of the command, must be
|
||||||
|
* unique across all commands in progress
|
||||||
|
* @param {object} payload - message payload, sent as-is to the handler
|
||||||
|
* @param {number} [timeoutMs=60000] - timeout the command with a
|
||||||
|
* "RequestTimeout" error after this number of milliseconds - set to 0
|
||||||
|
* to disable timeouts (the command may then hang forever)
|
||||||
|
* @returns {Promise}
|
||||||
|
*/
|
||||||
|
export async function sendWorkerCommand(
|
||||||
|
toWorkers: string,
|
||||||
|
toHandler: string,
|
||||||
|
uids: string,
|
||||||
|
payload: object,
|
||||||
|
timeoutMs: number = 60000
|
||||||
|
) {
|
||||||
|
if (typeof uids !== 'string') {
|
||||||
|
rpcLogger.error('missing or invalid "uids" field', { uids });
|
||||||
|
throw errors.MissingParameter;
|
||||||
|
}
|
||||||
|
if (uidsToCommandPromise[uids] !== undefined) {
|
||||||
|
rpcLogger.error('a command is already in progress with same uids', { uids });
|
||||||
|
throw errors.OperationAborted;
|
||||||
|
}
|
||||||
|
rpcLogger.info('sending command', { toWorkers, toHandler, uids, payload });
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
let timeout: NodeJS.Timeout | null = null;
|
||||||
|
if (timeoutMs) {
|
||||||
|
timeout = setTimeout(() => {
|
||||||
|
delete uidsToCommandPromise[uids];
|
||||||
|
reject(errors.RequestTimeout);
|
||||||
|
}, timeoutMs);
|
||||||
|
}
|
||||||
|
uidsToCommandPromise[uids] = { resolve, reject, timeout };
|
||||||
|
const message: RPCCommandMessage = {
|
||||||
|
type: 'cluster-rpc:command',
|
||||||
|
toWorkers,
|
||||||
|
toHandler,
|
||||||
|
uids,
|
||||||
|
payload,
|
||||||
|
};
|
||||||
|
return process.send?.(message);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the number of commands in flight
|
||||||
|
* @returns {number}
|
||||||
|
*/
|
||||||
|
export function getPendingCommandsCount() {
|
||||||
|
return Object.keys(uidsToCommandPromise).length;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function _dispatchCommandResultsToWorker(
|
||||||
|
worker: Worker,
|
||||||
|
uids: string,
|
||||||
|
resultsArray: MarshalledResultObject[]
|
||||||
|
): void {
|
||||||
|
const message: RPCCommandResultsMessage = {
|
||||||
|
type: 'cluster-rpc:commandResults',
|
||||||
|
uids,
|
||||||
|
payload: {
|
||||||
|
results: resultsArray,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
worker.send(message);
|
||||||
|
}
|
||||||
|
|
||||||
|
function _dispatchCommandErrorToWorker(
|
||||||
|
worker: Worker,
|
||||||
|
uids: string,
|
||||||
|
error: Error,
|
||||||
|
): void {
|
||||||
|
const message: RPCCommandErrorMessage = {
|
||||||
|
type: 'cluster-rpc:commandError',
|
||||||
|
uids,
|
||||||
|
payload: {
|
||||||
|
error: error.message,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
worker.send(message);
|
||||||
|
}
|
||||||
|
|
||||||
|
function _sendPrimaryCommandResult(
|
||||||
|
worker: Worker,
|
||||||
|
uids: string,
|
||||||
|
error: (Error & { code?: number }) | null | undefined,
|
||||||
|
result?: any
|
||||||
|
): void {
|
||||||
|
const message: RPCCommandResultsMessage = {
|
||||||
|
type: 'cluster-rpc:commandResults',
|
||||||
|
uids,
|
||||||
|
payload: {
|
||||||
|
results: [{ error: error?.message || null, errorCode: error?.code, result }],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
worker.send?.(message);
|
||||||
|
}
|
||||||
|
|
||||||
|
function _handlePrimaryCommandMessage(
|
||||||
|
fromWorker: Worker,
|
||||||
|
logger: any,
|
||||||
|
message: RPCCommandMessage,
|
||||||
|
handlers?: PrimaryHandlersMap
|
||||||
|
): void {
|
||||||
|
const { toWorkers, toHandler, uids, payload } = message;
|
||||||
|
if (toWorkers === '*') {
|
||||||
|
if (uidsToWorkerId[uids] !== undefined) {
|
||||||
|
logger.warn('new command already has a waiting worker with same uids', {
|
||||||
|
uids, workerId: uidsToWorkerId[uids],
|
||||||
|
});
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
const commandResults = {};
|
||||||
|
for (const workerId of Object.keys(cluster.workers || {})) {
|
||||||
|
commandResults[workerId] = null;
|
||||||
|
}
|
||||||
|
uidsToWorkerId[uids] = fromWorker?.id;
|
||||||
|
uidsToCommandResults[uids] = commandResults;
|
||||||
|
|
||||||
|
for (const [workerId, worker] of Object.entries(cluster.workers || {})) {
|
||||||
|
logger.debug('sending command message to worker', {
|
||||||
|
workerId, toHandler, payload,
|
||||||
|
});
|
||||||
|
if (worker) {
|
||||||
|
worker.send(message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if (toWorkers === 'PRIMARY') {
|
||||||
|
const { toHandler, uids, payload } = message;
|
||||||
|
const cb: HandlerCallback = (err, result) => _sendPrimaryCommandResult(fromWorker, uids, err, result);
|
||||||
|
|
||||||
|
if (toHandler in (handlers || {})) {
|
||||||
|
return handlers![toHandler](fromWorker, payload, uids, cb);
|
||||||
|
}
|
||||||
|
logger.error('no such handler in "toHandler" field from worker command message', {
|
||||||
|
toHandler,
|
||||||
|
});
|
||||||
|
return cb(errors.NotImplemented);
|
||||||
|
} else {
|
||||||
|
logger.error('unsupported "toWorkers" field from worker command message', {
|
||||||
|
toWorkers,
|
||||||
|
});
|
||||||
|
if (fromWorker) {
|
||||||
|
_dispatchCommandErrorToWorker(fromWorker, uids, errors.NotImplemented);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function _handlePrimaryCommandResultMessage(
|
||||||
|
fromWorkerId: number,
|
||||||
|
logger: any,
|
||||||
|
message: RPCCommandResultMessage
|
||||||
|
): void {
|
||||||
|
const { uids, payload } = message;
|
||||||
|
const commandResults = uidsToCommandResults[uids];
|
||||||
|
if (!commandResults) {
|
||||||
|
logger.warn('received command response message from worker for command not in flight', {
|
||||||
|
workerId: fromWorkerId,
|
||||||
|
uids,
|
||||||
|
});
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
if (commandResults[fromWorkerId] === undefined) {
|
||||||
|
logger.warn('received command response message with unexpected worker ID', {
|
||||||
|
workerId: fromWorkerId,
|
||||||
|
uids,
|
||||||
|
});
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
if (commandResults[fromWorkerId] !== null) {
|
||||||
|
logger.warn('ignoring duplicate command response from worker', {
|
||||||
|
workerId: fromWorkerId,
|
||||||
|
uids,
|
||||||
|
});
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
commandResults[fromWorkerId] = payload;
|
||||||
|
const commandResultsArray = Object.values(commandResults);
|
||||||
|
if (commandResultsArray.every(response => response !== null)) {
|
||||||
|
logger.debug('all workers responded to command', { uids });
|
||||||
|
const completeCommandResultsArray = <MarshalledResultObject[]> commandResultsArray;
|
||||||
|
const toWorkerId = uidsToWorkerId[uids];
|
||||||
|
const toWorker = cluster.workers?.[toWorkerId];
|
||||||
|
|
||||||
|
delete uidsToCommandResults[uids];
|
||||||
|
delete uidsToWorkerId[uids];
|
||||||
|
|
||||||
|
if (!toWorker) {
|
||||||
|
logger.warn('worker shut down while its command was executing', {
|
||||||
|
workerId: toWorkerId, uids,
|
||||||
|
});
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
// send back response to original worker
|
||||||
|
_dispatchCommandResultsToWorker(toWorker, uids, completeCommandResultsArray);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function _handlePrimaryMessage(
|
||||||
|
fromWorker: Worker,
|
||||||
|
message: RPCCommandMessage | RPCCommandResultMessage,
|
||||||
|
handlers?: PrimaryHandlersMap
|
||||||
|
): void {
|
||||||
|
const { type: messageType, uids } = message;
|
||||||
|
const logger = rpcLogger.newRequestLoggerFromSerializedUids(uids);
|
||||||
|
logger.debug('primary received message from worker', {
|
||||||
|
workerId: fromWorker?.id, rpcMessage: message,
|
||||||
|
});
|
||||||
|
if (messageType === 'cluster-rpc:command') {
|
||||||
|
return _handlePrimaryCommandMessage(fromWorker, logger, message, handlers);
|
||||||
|
}
|
||||||
|
if (messageType === 'cluster-rpc:commandResult') {
|
||||||
|
return _handlePrimaryCommandResultMessage(fromWorker?.id, logger, message);
|
||||||
|
}
|
||||||
|
logger.error('unsupported message type', {
|
||||||
|
workerId: fromWorker?.id, messageType, uids,
|
||||||
|
});
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
function _sendWorkerCommandResult(
|
||||||
|
uids: string,
|
||||||
|
error: Error | null | undefined,
|
||||||
|
result?: any
|
||||||
|
): void {
|
||||||
|
const message: RPCCommandResultMessage = {
|
||||||
|
type: 'cluster-rpc:commandResult',
|
||||||
|
uids,
|
||||||
|
payload: {
|
||||||
|
error: error ? error.message : null,
|
||||||
|
result,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
process.send?.(message);
|
||||||
|
}
|
||||||
|
|
||||||
|
function _handleWorkerCommandMessage(
|
||||||
|
logger: any,
|
||||||
|
message: RPCCommandMessage,
|
||||||
|
handlers: HandlersMap
|
||||||
|
): void {
|
||||||
|
const { toHandler, uids, payload } = message;
|
||||||
|
const cb: HandlerCallback = (err, result) => _sendWorkerCommandResult(uids, err, result);
|
||||||
|
|
||||||
|
if (toHandler in handlers) {
|
||||||
|
return handlers[toHandler](payload, uids, cb);
|
||||||
|
}
|
||||||
|
logger.error('no such handler in "toHandler" field from worker command message', {
|
||||||
|
toHandler,
|
||||||
|
});
|
||||||
|
return cb(errors.NotImplemented);
|
||||||
|
}
|
||||||
|
|
||||||
|
function _handleWorkerCommandResultsMessage(
|
||||||
|
logger: any,
|
||||||
|
message: RPCCommandResultsMessage,
|
||||||
|
): void {
|
||||||
|
const { uids, payload } = message;
|
||||||
|
const { results } = payload;
|
||||||
|
const commandPromise: CommandPromise = uidsToCommandPromise[uids];
|
||||||
|
if (commandPromise === undefined) {
|
||||||
|
logger.error('missing promise for command results', { uids, payload });
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
if (commandPromise.timeout) {
|
||||||
|
clearTimeout(commandPromise.timeout);
|
||||||
|
}
|
||||||
|
delete uidsToCommandPromise[uids];
|
||||||
|
const unmarshalledResults = results.map(workerResult => {
|
||||||
|
let workerError: Error | null = null;
|
||||||
|
if (workerResult.error) {
|
||||||
|
if (workerResult.error in errors) {
|
||||||
|
workerError = errors[workerResult.error];
|
||||||
|
} else {
|
||||||
|
workerError = new Error(workerResult.error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (workerError && workerResult.errorCode) {
|
||||||
|
(workerError as Error & { code: number }).code = workerResult.errorCode;
|
||||||
|
}
|
||||||
|
const unmarshalledResult: ResultObject = {
|
||||||
|
error: workerError,
|
||||||
|
result: workerResult.result,
|
||||||
|
};
|
||||||
|
return unmarshalledResult;
|
||||||
|
});
|
||||||
|
return commandPromise.resolve(unmarshalledResults);
|
||||||
|
}
|
||||||
|
|
||||||
|
function _handleWorkerCommandErrorMessage(
|
||||||
|
logger: any,
|
||||||
|
message: RPCCommandErrorMessage,
|
||||||
|
): void {
|
||||||
|
const { uids, payload } = message;
|
||||||
|
const { error } = payload;
|
||||||
|
const commandPromise: CommandPromise = uidsToCommandPromise[uids];
|
||||||
|
if (commandPromise === undefined) {
|
||||||
|
logger.error('missing promise for command results', { uids, payload });
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
if (commandPromise.timeout) {
|
||||||
|
clearTimeout(commandPromise.timeout);
|
||||||
|
}
|
||||||
|
delete uidsToCommandPromise[uids];
|
||||||
|
let commandError: Error | null = null;
|
||||||
|
if (error in errors) {
|
||||||
|
commandError = errors[error];
|
||||||
|
} else {
|
||||||
|
commandError = new Error(error);
|
||||||
|
}
|
||||||
|
return commandPromise.reject(<Error> commandError);
|
||||||
|
}
|
||||||
|
|
||||||
|
function _handleWorkerMessage(
|
||||||
|
message: RPCCommandMessage | RPCCommandResultsMessage | RPCCommandErrorMessage,
|
||||||
|
handlers: HandlersMap
|
||||||
|
): void {
|
||||||
|
const { type: messageType, uids } = message;
|
||||||
|
const workerId = cluster.worker?.id;
|
||||||
|
const logger = rpcLogger.newRequestLoggerFromSerializedUids(uids);
|
||||||
|
logger.debug('worker received message from primary', {
|
||||||
|
workerId, rpcMessage: message,
|
||||||
|
});
|
||||||
|
if (messageType === 'cluster-rpc:command') {
|
||||||
|
return _handleWorkerCommandMessage(logger, message, handlers);
|
||||||
|
}
|
||||||
|
if (messageType === 'cluster-rpc:commandResults') {
|
||||||
|
return _handleWorkerCommandResultsMessage(logger, message);
|
||||||
|
}
|
||||||
|
if (messageType === 'cluster-rpc:commandError') {
|
||||||
|
return _handleWorkerCommandErrorMessage(logger, message);
|
||||||
|
}
|
||||||
|
logger.error('unsupported message type', {
|
||||||
|
workerId, messageType,
|
||||||
|
});
|
||||||
|
return undefined;
|
||||||
|
}
|
|
@ -1,70 +0,0 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
|
||||||
|
|
||||||
// The min value here is to manage further backward compat if we
|
|
||||||
// need it
|
|
||||||
const iamSecurityTokenSizeMin = 128;
|
|
||||||
const iamSecurityTokenSizeMax = 128;
|
|
||||||
// Security token is an hex string (no real format from amazon)
|
|
||||||
const iamSecurityTokenPattern =
|
|
||||||
new RegExp(`^[a-f0-9]{${iamSecurityTokenSizeMin},` +
|
|
||||||
`${iamSecurityTokenSizeMax}}$`);
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
// info about the iam security token
|
|
||||||
iamSecurityToken: {
|
|
||||||
min: iamSecurityTokenSizeMin,
|
|
||||||
max: iamSecurityTokenSizeMax,
|
|
||||||
pattern: iamSecurityTokenPattern,
|
|
||||||
},
|
|
||||||
// PublicId is used as the canonicalID for a request that contains
|
|
||||||
// no authentication information. Requestor can access
|
|
||||||
// only public resources
|
|
||||||
publicId: 'http://acs.amazonaws.com/groups/global/AllUsers',
|
|
||||||
zenkoServiceAccount: 'http://acs.zenko.io/accounts/service',
|
|
||||||
metadataFileNamespace: '/MDFile',
|
|
||||||
dataFileURL: '/DataFile',
|
|
||||||
// AWS states max size for user-defined metadata
|
|
||||||
// (x-amz-meta- headers) is 2 KB:
|
|
||||||
// http://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectPUT.html
|
|
||||||
// In testing, AWS seems to allow up to 88 more bytes,
|
|
||||||
// so we do the same.
|
|
||||||
maximumMetaHeadersSize: 2136,
|
|
||||||
emptyFileMd5: 'd41d8cd98f00b204e9800998ecf8427e',
|
|
||||||
// Version 2 changes the format of the data location property
|
|
||||||
// Version 3 adds the dataStoreName attribute
|
|
||||||
mdModelVersion: 3,
|
|
||||||
/*
|
|
||||||
* Splitter is used to build the object name for the overview of a
|
|
||||||
* multipart upload and to build the object names for each part of a
|
|
||||||
* multipart upload. These objects with large names are then stored in
|
|
||||||
* metadata in a "shadow bucket" to a real bucket. The shadow bucket
|
|
||||||
* contains all ongoing multipart uploads. We include in the object
|
|
||||||
* name some of the info we might need to pull about an open multipart
|
|
||||||
* upload or about an individual part with each piece of info separated
|
|
||||||
* by the splitter. We can then extract each piece of info by splitting
|
|
||||||
* the object name string with this splitter.
|
|
||||||
* For instance, assuming a splitter of '...!*!',
|
|
||||||
* the name of the upload overview would be:
|
|
||||||
* overview...!*!objectKey...!*!uploadId
|
|
||||||
* For instance, the name of a part would be:
|
|
||||||
* uploadId...!*!partNumber
|
|
||||||
*
|
|
||||||
* The sequence of characters used in the splitter should not occur
|
|
||||||
* elsewhere in the pieces of info to avoid splitting where not
|
|
||||||
* intended.
|
|
||||||
*
|
|
||||||
* Splitter is also used in adding bucketnames to the
|
|
||||||
* namespacerusersbucket. The object names added to the
|
|
||||||
* namespaceusersbucket are of the form:
|
|
||||||
* canonicalID...!*!bucketname
|
|
||||||
*/
|
|
||||||
|
|
||||||
splitter: '..|..',
|
|
||||||
usersBucket: 'users..bucket',
|
|
||||||
// MPU Bucket Prefix is used to create the name of the shadow
|
|
||||||
// bucket used for multipart uploads. There is one shadow mpu
|
|
||||||
// bucket per bucket and its name is the mpuBucketPrefix followed
|
|
||||||
// by the name of the final destination bucket for the object
|
|
||||||
// once the multipart upload is complete.
|
|
||||||
mpuBucketPrefix: 'mpuShadowBucket',
|
|
||||||
};
|
|
|
@ -0,0 +1,177 @@
|
||||||
|
import * as crypto from 'crypto';
|
||||||
|
|
||||||
|
// The min value here is to manage further backward compat if we
|
||||||
|
// need it
|
||||||
|
// Default value
|
||||||
|
export const vaultGeneratedIamSecurityTokenSizeMin = 128;
|
||||||
|
// Safe to assume that a typical token size is less than 8192 bytes
|
||||||
|
export const vaultGeneratedIamSecurityTokenSizeMax = 8192;
|
||||||
|
// Base-64
|
||||||
|
export const vaultGeneratedIamSecurityTokenPattern = /^[A-Za-z0-9/+=]*$/;
|
||||||
|
|
||||||
|
// info about the iam security token
|
||||||
|
export const iamSecurityToken = {
|
||||||
|
min: vaultGeneratedIamSecurityTokenSizeMin,
|
||||||
|
max: vaultGeneratedIamSecurityTokenSizeMax,
|
||||||
|
pattern: vaultGeneratedIamSecurityTokenPattern,
|
||||||
|
};
|
||||||
|
// PublicId is used as the canonicalID for a request that contains
|
||||||
|
// no authentication information. Requestor can access
|
||||||
|
// only public resources
|
||||||
|
export const publicId = 'http://acs.amazonaws.com/groups/global/AllUsers';
|
||||||
|
export const zenkoServiceAccount = 'http://acs.zenko.io/accounts/service';
|
||||||
|
export const metadataFileNamespace = '/MDFile';
|
||||||
|
export const dataFileURL = '/DataFile';
|
||||||
|
export const passthroughFileURL = '/PassthroughFile';
|
||||||
|
// AWS states max size for user-defined metadata
|
||||||
|
// (x-amz-meta- headers) is 2 KB:
|
||||||
|
// http://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectPUT.html
|
||||||
|
// In testing, AWS seems to allow up to 88 more bytes,
|
||||||
|
// so we do the same.
|
||||||
|
export const maximumMetaHeadersSize = 2136;
|
||||||
|
export const emptyFileMd5 = 'd41d8cd98f00b204e9800998ecf8427e';
|
||||||
|
// Version 2 changes the format of the data location property
|
||||||
|
// Version 3 adds the dataStoreName attribute
|
||||||
|
// Version 4 add the Creation-Time and Content-Language attributes,
|
||||||
|
// and add support for x-ms-meta-* headers in UserMetadata
|
||||||
|
// Version 5 adds the azureInfo structure
|
||||||
|
// Version 6 adds a "deleted" flag that is updated to true before
|
||||||
|
// the object gets deleted. This is done to keep object metadata in the
|
||||||
|
// oplog when deleting the object, as oplog deletion events don't contain
|
||||||
|
// any metadata of the object.
|
||||||
|
// version 6 also adds the "isPHD" flag that is used to indicate that the master
|
||||||
|
// object is a placeholder and is not up to date.
|
||||||
|
export const mdModelVersion = 6;
|
||||||
|
/*
|
||||||
|
* Splitter is used to build the object name for the overview of a
|
||||||
|
* multipart upload and to build the object names for each part of a
|
||||||
|
* multipart upload. These objects with large names are then stored in
|
||||||
|
* metadata in a "shadow bucket" to a real bucket. The shadow bucket
|
||||||
|
* contains all ongoing multipart uploads. We include in the object
|
||||||
|
* name some of the info we might need to pull about an open multipart
|
||||||
|
* upload or about an individual part with each piece of info separated
|
||||||
|
* by the splitter. We can then extract each piece of info by splitting
|
||||||
|
* the object name string with this splitter.
|
||||||
|
* For instance, assuming a splitter of '...!*!',
|
||||||
|
* the name of the upload overview would be:
|
||||||
|
* overview...!*!objectKey...!*!uploadId
|
||||||
|
* For instance, the name of a part would be:
|
||||||
|
* uploadId...!*!partNumber
|
||||||
|
*
|
||||||
|
* The sequence of characters used in the splitter should not occur
|
||||||
|
* elsewhere in the pieces of info to avoid splitting where not
|
||||||
|
* intended.
|
||||||
|
*
|
||||||
|
* Splitter is also used in adding bucketnames to the
|
||||||
|
* namespacerusersbucket. The object names added to the
|
||||||
|
* namespaceusersbucket are of the form:
|
||||||
|
* canonicalID...!*!bucketname
|
||||||
|
*/
|
||||||
|
|
||||||
|
export const splitter = '..|..';
|
||||||
|
export const usersBucket = 'users..bucket';
|
||||||
|
// MPU Bucket Prefix is used to create the name of the shadow
|
||||||
|
// bucket used for multipart uploads. There is one shadow mpu
|
||||||
|
// bucket per bucket and its name is the mpuBucketPrefix followed
|
||||||
|
// by the name of the final destination bucket for the object
|
||||||
|
// once the multipart upload is complete.
|
||||||
|
export const mpuBucketPrefix = 'mpuShadowBucket';
|
||||||
|
// since aws s3 does not allow capitalized buckets, these may be
|
||||||
|
// used for special internal purposes
|
||||||
|
export const permittedCapitalizedBuckets = {
|
||||||
|
METADATA: true,
|
||||||
|
};
|
||||||
|
// Setting a lower object key limit to account for:
|
||||||
|
// - Mongo key limit of 1012 bytes
|
||||||
|
// - Version ID in Mongo Key if versioned of 33
|
||||||
|
// - Max bucket name length if bucket match false of 63
|
||||||
|
// - Extra prefix slash for bucket prefix if bucket match of 1
|
||||||
|
export const objectKeyByteLimit = 915;
|
||||||
|
/* delimiter for location-constraint. The location constraint will be able
|
||||||
|
* to include the ingestion flag
|
||||||
|
*/
|
||||||
|
export const zenkoSeparator = ':';
|
||||||
|
/* eslint-disable camelcase */
|
||||||
|
export const externalBackends = { aws_s3: true, azure: true, gcp: true, pfs: true };
|
||||||
|
export const replicationBackends = { aws_s3: true, azure: true, gcp: true };
|
||||||
|
// hex digest of sha256 hash of empty string:
|
||||||
|
export const emptyStringHash = crypto.createHash('sha256')
|
||||||
|
.update('', 'binary').digest('hex');
|
||||||
|
export const mpuMDStoredExternallyBackend = { aws_s3: true, gcp: true };
|
||||||
|
// AWS sets a minimum size limit for parts except for the last part.
|
||||||
|
// http://docs.aws.amazon.com/AmazonS3/latest/API/mpUploadComplete.html
|
||||||
|
export const minimumAllowedPartSize = 5242880;
|
||||||
|
export const gcpMaximumAllowedPartCount = 1024;
|
||||||
|
// GCP Object Tagging Prefix
|
||||||
|
export const gcpTaggingPrefix = 'aws-tag-';
|
||||||
|
export const productName = 'APN/1.0 Scality/1.0 Scality CloudServer for Zenko';
|
||||||
|
export const legacyLocations = ['sproxyd', 'legacy'];
|
||||||
|
// healthcheck default call from nginx is every 2 seconds
|
||||||
|
// for external backends, don't call unless at least 1 minute
|
||||||
|
// (60,000 milliseconds) since last call
|
||||||
|
export const externalBackendHealthCheckInterval = 60000;
|
||||||
|
// some of the available data backends (if called directly rather
|
||||||
|
// than through the multiple backend gateway) need a key provided
|
||||||
|
// as a string as first parameter of the get/delete methods.
|
||||||
|
export const clientsRequireStringKey = { sproxyd: true, cdmi: true };
|
||||||
|
export const hasCopyPartBackends = { aws_s3: true, gcp: true };
|
||||||
|
export const versioningNotImplBackends = { azure: true, gcp: true };
|
||||||
|
// user metadata applied on zenko-created objects
|
||||||
|
export const zenkoIDHeader = 'x-amz-meta-zenko-instance-id';
|
||||||
|
// Default expiration value of the S3 pre-signed URL duration
|
||||||
|
// 604800 seconds (seven days).
|
||||||
|
export const defaultPreSignedURLExpiry = 7 * 24 * 60 * 60;
|
||||||
|
// Regex for ISO-8601 formatted date
|
||||||
|
export const shortIso8601Regex = /\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}Z/;
|
||||||
|
export const longIso8601Regex = /\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}.\d{3}Z/;
|
||||||
|
export const supportedNotificationEvents = new Set([
|
||||||
|
's3:ObjectCreated:*',
|
||||||
|
's3:ObjectCreated:Put',
|
||||||
|
's3:ObjectCreated:Copy',
|
||||||
|
's3:ObjectCreated:CompleteMultipartUpload',
|
||||||
|
's3:ObjectRemoved:*',
|
||||||
|
's3:ObjectRemoved:Delete',
|
||||||
|
's3:ObjectRemoved:DeleteMarkerCreated',
|
||||||
|
's3:Replication:OperationFailedReplication',
|
||||||
|
's3:ObjectTagging:*',
|
||||||
|
's3:ObjectTagging:Put',
|
||||||
|
's3:ObjectTagging:Delete',
|
||||||
|
's3:ObjectAcl:Put',
|
||||||
|
's3:ObjectRestore:*',
|
||||||
|
's3:ObjectRestore:Post',
|
||||||
|
's3:ObjectRestore:Completed',
|
||||||
|
's3:ObjectRestore:Delete',
|
||||||
|
's3:LifecycleTransition',
|
||||||
|
's3:LifecycleExpiration:*',
|
||||||
|
's3:LifecycleExpiration:DeleteMarkerCreated',
|
||||||
|
's3:LifecycleExpiration:Delete',
|
||||||
|
]);
|
||||||
|
export const notificationArnPrefix = 'arn:scality:bucketnotif';
|
||||||
|
// HTTP server keep-alive timeout is set to a higher value than
|
||||||
|
// client's free sockets timeout to avoid the risk of triggering
|
||||||
|
// ECONNRESET errors if the server closes the connection at the
|
||||||
|
// exact moment clients attempt to reuse an established connection
|
||||||
|
// for a new request.
|
||||||
|
//
|
||||||
|
// Note: the ability to close inactive connections on the client
|
||||||
|
// after httpClientFreeSocketsTimeout milliseconds requires the
|
||||||
|
// use of "agentkeepalive" module instead of the regular node.js
|
||||||
|
// http.Agent.
|
||||||
|
export const httpServerKeepAliveTimeout = 60000;
|
||||||
|
export const httpClientFreeSocketTimeout = 55000;
|
||||||
|
export const supportedLifecycleRules = [
|
||||||
|
'expiration',
|
||||||
|
'noncurrentVersionExpiration',
|
||||||
|
'abortIncompleteMultipartUpload',
|
||||||
|
'transitions',
|
||||||
|
'noncurrentVersionTransition',
|
||||||
|
];
|
||||||
|
// Maximum number of buckets to cache (bucket metadata)
|
||||||
|
export const maxCachedBuckets = process.env.METADATA_MAX_CACHED_BUCKETS ?
|
||||||
|
Number(process.env.METADATA_MAX_CACHED_BUCKETS) : 1000;
|
||||||
|
|
||||||
|
export const validRestoreObjectTiers = new Set(['Expedited', 'Standard', 'Bulk']);
|
||||||
|
export const maxBatchingConcurrentOperations = 5;
|
||||||
|
|
||||||
|
/** For policy resource arn check we allow empty account ID to not break compatibility */
|
||||||
|
export const policyArnAllowedEmptyAccountId = ['utapi', 'scuba'];
|
151
lib/db.js
151
lib/db.js
|
@ -1,151 +0,0 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
|
||||||
|
|
||||||
const writeOptions = { sync: true };
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Like Error, but with a property set to true.
|
|
||||||
* TODO: this is copied from kineticlib, should consolidate with the
|
|
||||||
* future errors module
|
|
||||||
*
|
|
||||||
* Example: instead of:
|
|
||||||
* const err = new Error("input is not a buffer");
|
|
||||||
* err.badTypeInput = true;
|
|
||||||
* throw err;
|
|
||||||
* use:
|
|
||||||
* throw propError("badTypeInput", "input is not a buffer");
|
|
||||||
*
|
|
||||||
* @param {String} propName - the property name.
|
|
||||||
* @param {String} message - the Error message.
|
|
||||||
* @returns {Error} the Error object.
|
|
||||||
*/
|
|
||||||
function propError(propName, message) {
|
|
||||||
const err = new Error(message);
|
|
||||||
err[propName] = true;
|
|
||||||
return err;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Running transaction with multiple updates to be committed atomically
|
|
||||||
*/
|
|
||||||
class IndexTransaction {
|
|
||||||
/**
|
|
||||||
* Builds a new transaction
|
|
||||||
*
|
|
||||||
* @argument {Leveldb} db an open database to which the updates
|
|
||||||
* will be applied
|
|
||||||
*
|
|
||||||
* @returns {IndexTransaction} a new empty transaction
|
|
||||||
*/
|
|
||||||
constructor(db) {
|
|
||||||
this.operations = [];
|
|
||||||
this.db = db;
|
|
||||||
this.closed = false;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Adds a new operation to participate in this running transaction
|
|
||||||
*
|
|
||||||
* @argument {object} op an object with the following attributes:
|
|
||||||
* {
|
|
||||||
* type: 'put' or 'del',
|
|
||||||
* key: the object key,
|
|
||||||
* value: (optional for del) the value to store,
|
|
||||||
* }
|
|
||||||
*
|
|
||||||
* @throws {Error} an error described by the following properties
|
|
||||||
* - invalidTransactionVerb if op is not put or del
|
|
||||||
* - pushOnCommittedTransaction if already committed
|
|
||||||
* - missingKey if the key is missing from the op
|
|
||||||
* - missingValue if putting without a value
|
|
||||||
*
|
|
||||||
* @returns {undefined}
|
|
||||||
*/
|
|
||||||
push(op) {
|
|
||||||
if (this.closed) {
|
|
||||||
throw propError('pushOnCommittedTransaction',
|
|
||||||
'can not add ops to already committed transaction');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (op.type !== 'put' && op.type !== 'del') {
|
|
||||||
throw propError('invalidTransactionVerb',
|
|
||||||
`unknown action type: ${op.type}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (op.key === undefined) {
|
|
||||||
throw propError('missingKey', 'missing key');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (op.type === 'put' && op.value === undefined) {
|
|
||||||
throw propError('missingValue', 'missing value');
|
|
||||||
}
|
|
||||||
|
|
||||||
this.operations.push(op);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Adds a new put operation to this running transaction
|
|
||||||
*
|
|
||||||
* @argument {string} key - the key of the object to put
|
|
||||||
* @argument {string} value - the value to put
|
|
||||||
*
|
|
||||||
* @throws {Error} an error described by the following properties
|
|
||||||
* - pushOnCommittedTransaction if already committed
|
|
||||||
* - missingKey if the key is missing from the op
|
|
||||||
* - missingValue if putting without a value
|
|
||||||
*
|
|
||||||
* @returns {undefined}
|
|
||||||
*
|
|
||||||
* @see push
|
|
||||||
*/
|
|
||||||
put(key, value) {
|
|
||||||
this.push({ type: 'put', key, value });
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Adds a new del operation to this running transaction
|
|
||||||
*
|
|
||||||
* @argument {string} key - the key of the object to delete
|
|
||||||
*
|
|
||||||
* @throws {Error} an error described by the following properties
|
|
||||||
* - pushOnCommittedTransaction if already committed
|
|
||||||
* - missingKey if the key is missing from the op
|
|
||||||
*
|
|
||||||
* @returns {undefined}
|
|
||||||
*
|
|
||||||
* @see push
|
|
||||||
*/
|
|
||||||
del(key) {
|
|
||||||
this.push({ type: 'del', key });
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Applies the queued updates in this transaction atomically.
|
|
||||||
*
|
|
||||||
* @argument {function} cb function to be called when the commit
|
|
||||||
* finishes, taking an optional error argument
|
|
||||||
*
|
|
||||||
* @returns {undefined}
|
|
||||||
*/
|
|
||||||
commit(cb) {
|
|
||||||
if (this.closed) {
|
|
||||||
return cb(propError('alreadyCommitted',
|
|
||||||
'transaction was already committed'));
|
|
||||||
}
|
|
||||||
|
|
||||||
if (this.operations.length === 0) {
|
|
||||||
return cb(propError('emptyTransaction',
|
|
||||||
'tried to commit an empty transaction'));
|
|
||||||
}
|
|
||||||
|
|
||||||
this.closed = true;
|
|
||||||
|
|
||||||
// The array-of-operations variant of the `batch` method
|
|
||||||
// allows passing options such has `sync: true` whereas the
|
|
||||||
// chained form does not.
|
|
||||||
return this.db.batch(this.operations, writeOptions, cb);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
IndexTransaction,
|
|
||||||
};
|
|
|
@ -0,0 +1,194 @@
|
||||||
|
/**
|
||||||
|
* Like Error, but with a property set to true.
|
||||||
|
* TODO: this is copied from kineticlib, should consolidate with the
|
||||||
|
* future errors module
|
||||||
|
*
|
||||||
|
* Example: instead of:
|
||||||
|
* const err = new Error("input is not a buffer");
|
||||||
|
* err.badTypeInput = true;
|
||||||
|
* throw err;
|
||||||
|
* use:
|
||||||
|
* throw propError("badTypeInput", "input is not a buffer");
|
||||||
|
*
|
||||||
|
* @param propName - the property name.
|
||||||
|
* @param message - the Error message.
|
||||||
|
* @returns the Error object.
|
||||||
|
*/
|
||||||
|
function propError(propName: string, message: string): Error {
|
||||||
|
const err = new Error(message);
|
||||||
|
err[propName] = true;
|
||||||
|
// @ts-ignore
|
||||||
|
err.is = { [propName]: true };
|
||||||
|
return err;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Running transaction with multiple updates to be committed atomically
|
||||||
|
*/
|
||||||
|
export class IndexTransaction {
|
||||||
|
operations: { type: 'put' | 'del'; key: string; value?: any }[];
|
||||||
|
db: any;
|
||||||
|
closed: boolean;
|
||||||
|
conditions: { [key: string]: string }[];
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Builds a new transaction
|
||||||
|
*
|
||||||
|
* @argument {Leveldb} db an open database to which the updates
|
||||||
|
* will be applied
|
||||||
|
*
|
||||||
|
* @returns a new empty transaction
|
||||||
|
*/
|
||||||
|
constructor(db: any) {
|
||||||
|
this.operations = [];
|
||||||
|
this.db = db;
|
||||||
|
this.closed = false;
|
||||||
|
this.conditions = [];
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Adds a new operation to participate in this running transaction
|
||||||
|
*
|
||||||
|
* @argument op an object with the following attributes:
|
||||||
|
* {
|
||||||
|
* type: 'put' or 'del',
|
||||||
|
* key: the object key,
|
||||||
|
* value: (optional for del) the value to store,
|
||||||
|
* }
|
||||||
|
*
|
||||||
|
* @throws an error described by the following properties
|
||||||
|
* - invalidTransactionVerb if op is not put or del
|
||||||
|
* - pushOnCommittedTransaction if already committed
|
||||||
|
* - missingKey if the key is missing from the op
|
||||||
|
* - missingValue if putting without a value
|
||||||
|
*/
|
||||||
|
push(op: { type: 'put'; key: string; value: any }): void;
|
||||||
|
push(op: { type: 'del'; key: string }): void;
|
||||||
|
push(op: { type: 'put' | 'del'; key: string; value?: any }): void {
|
||||||
|
if (this.closed) {
|
||||||
|
throw propError(
|
||||||
|
'pushOnCommittedTransaction',
|
||||||
|
'can not add ops to already committed transaction'
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (op.type !== 'put' && op.type !== 'del') {
|
||||||
|
throw propError(
|
||||||
|
'invalidTransactionVerb',
|
||||||
|
`unknown action type: ${op.type}`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (op.key === undefined) {
|
||||||
|
throw propError('missingKey', 'missing key');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (op.type === 'put' && op.value === undefined) {
|
||||||
|
throw propError('missingValue', 'missing value');
|
||||||
|
}
|
||||||
|
|
||||||
|
this.operations.push(op);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Adds a new put operation to this running transaction
|
||||||
|
*
|
||||||
|
* @argument {string} key - the key of the object to put
|
||||||
|
* @argument {string} value - the value to put
|
||||||
|
*
|
||||||
|
* @throws {Error} an error described by the following properties
|
||||||
|
* - pushOnCommittedTransaction if already committed
|
||||||
|
* - missingKey if the key is missing from the op
|
||||||
|
* - missingValue if putting without a value
|
||||||
|
* @see push
|
||||||
|
*/
|
||||||
|
put(key: string, value: any) {
|
||||||
|
this.push({ type: 'put', key, value });
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Adds a new del operation to this running transaction
|
||||||
|
*
|
||||||
|
* @argument key - the key of the object to delete
|
||||||
|
*
|
||||||
|
* @throws an error described by the following properties
|
||||||
|
* - pushOnCommittedTransaction if already committed
|
||||||
|
* - missingKey if the key is missing from the op
|
||||||
|
*
|
||||||
|
* @see push
|
||||||
|
*/
|
||||||
|
del(key: string) {
|
||||||
|
this.push({ type: 'del', key });
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Adds a condition for the transaction
|
||||||
|
*
|
||||||
|
* @argument condition an object with the following attributes:
|
||||||
|
* {
|
||||||
|
* <condition>: the object key
|
||||||
|
* }
|
||||||
|
* example: { notExists: 'key1' }
|
||||||
|
*
|
||||||
|
* @throws an error described by the following properties
|
||||||
|
* - pushOnCommittedTransaction if already committed
|
||||||
|
* - missingCondition if the condition is empty
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
addCondition(condition: { [key: string]: string }) {
|
||||||
|
if (this.closed) {
|
||||||
|
throw propError(
|
||||||
|
'pushOnCommittedTransaction',
|
||||||
|
'can not add conditions to already committed transaction'
|
||||||
|
);
|
||||||
|
}
|
||||||
|
if (condition === undefined || Object.keys(condition).length === 0) {
|
||||||
|
throw propError(
|
||||||
|
'missingCondition',
|
||||||
|
'missing condition for conditional put'
|
||||||
|
);
|
||||||
|
}
|
||||||
|
if (typeof condition.notExists !== 'string' && typeof condition.exists !== 'string') {
|
||||||
|
throw propError(
|
||||||
|
'unsupportedConditionalOperation',
|
||||||
|
'missing key or supported condition'
|
||||||
|
);
|
||||||
|
}
|
||||||
|
this.conditions.push(condition);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Applies the queued updates in this transaction atomically.
|
||||||
|
*
|
||||||
|
* @argument cb function to be called when the commit
|
||||||
|
* finishes, taking an optional error argument
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
commit(cb: (error: Error | null, data?: any) => void) {
|
||||||
|
if (this.closed) {
|
||||||
|
return cb(
|
||||||
|
propError(
|
||||||
|
'alreadyCommitted',
|
||||||
|
'transaction was already committed'
|
||||||
|
)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.operations.length === 0) {
|
||||||
|
return cb(
|
||||||
|
propError(
|
||||||
|
'emptyTransaction',
|
||||||
|
'tried to commit an empty transaction'
|
||||||
|
)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
this.closed = true;
|
||||||
|
const options = { sync: true, conditions: this.conditions };
|
||||||
|
|
||||||
|
// The array-of-operations variant of the `batch` method
|
||||||
|
// allows passing options such has `sync: true` whereas the
|
||||||
|
// chained form does not.
|
||||||
|
return this.db.batch(this.operations, options, cb);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,11 @@
|
||||||
|
export interface ErrorLike {
|
||||||
|
message: any;
|
||||||
|
code: any;
|
||||||
|
stack: any;
|
||||||
|
name: any;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function reshapeExceptionError(error: ErrorLike) {
|
||||||
|
const { message, code, stack, name } = error;
|
||||||
|
return { message, code, stack, name };
|
||||||
|
}
|
|
@ -1,35 +0,0 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
|
||||||
|
|
||||||
class ArsenalError extends Error {
|
|
||||||
constructor(type, code, desc) {
|
|
||||||
super(type);
|
|
||||||
this.code = code;
|
|
||||||
this.description = desc;
|
|
||||||
this[type] = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
customizeDescription(description) {
|
|
||||||
return new ArsenalError(this.message, this.code, description);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Generate an Errors instances object.
|
|
||||||
*
|
|
||||||
* @returns {Object.<string, ArsenalError>} - object field by arsenalError
|
|
||||||
* instances
|
|
||||||
*/
|
|
||||||
function errorsGen() {
|
|
||||||
const errors = {};
|
|
||||||
const errorsObj = require('../errors/arsenalErrors.json');
|
|
||||||
|
|
||||||
Object.keys(errorsObj)
|
|
||||||
.filter(index => index !== '_comment')
|
|
||||||
.forEach(index => {
|
|
||||||
errors[index] = new ArsenalError(index, errorsObj[index].code,
|
|
||||||
errorsObj[index].description);
|
|
||||||
});
|
|
||||||
return errors;
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = errorsGen();
|
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,175 @@
|
||||||
|
import type { ServerResponse } from 'http';
|
||||||
|
import * as rawErrors from './arsenalErrors';
|
||||||
|
|
||||||
|
/** All possible errors names. */
|
||||||
|
export type Name = keyof typeof rawErrors;
|
||||||
|
/** Object containing all errors names. It has the format { [Name]: "Name" } */
|
||||||
|
export type Names = { [Name_ in Name]: Name_ };
|
||||||
|
/** Mapping used to determine an error type. It has the format { [Name]: boolean } */
|
||||||
|
export type Is = { [_ in Name]: boolean };
|
||||||
|
/** Mapping of all possible Errors. It has the format { [Name]: Error } */
|
||||||
|
export type Errors = { [_ in Name]: ArsenalError };
|
||||||
|
|
||||||
|
// This object is reused constantly through createIs, we store it there
|
||||||
|
// to avoid recomputation.
|
||||||
|
const isBase = Object.fromEntries(
|
||||||
|
Object.keys(rawErrors).map((key) => [key, false])
|
||||||
|
) as Is;
|
||||||
|
|
||||||
|
// This allows to conditionally add the old behavior of errors to properly
|
||||||
|
// test migration.
|
||||||
|
// Activate CI tests with `ALLOW_UNSAFE_ERROR_COMPARISON=false yarn test`.
|
||||||
|
// Remove this mechanism in ARSN-176.
|
||||||
|
export const allowUnsafeErrComp = (
|
||||||
|
process.env.ALLOW_UNSAFE_ERROR_COMPARISON ?? 'true') === 'true'
|
||||||
|
|
||||||
|
// This contains some metaprog. Be careful.
|
||||||
|
// Proxy can be found on MDN.
|
||||||
|
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Proxy
|
||||||
|
// While this could seems better to avoid metaprog, this allows us to enforce
|
||||||
|
// type-checking properly while avoiding all errors that could happen at runtime.
|
||||||
|
// Even if some errors are made in JavaScript, like using err.is.NonExistingError,
|
||||||
|
// the Proxy will return false.
|
||||||
|
const createIs = (type: Name): Is => {
|
||||||
|
const get = (is: Is, value: string | symbol) => is[value] ?? false;
|
||||||
|
const final = Object.freeze({ ...isBase, [type]: true });
|
||||||
|
return new Proxy(final, { get });
|
||||||
|
};
|
||||||
|
|
||||||
|
export class ArsenalError extends Error {
|
||||||
|
/** HTTP status code. Example: 401, 403, 500, ... */
|
||||||
|
#code: number;
|
||||||
|
/** Text description of the error. */
|
||||||
|
#description: string;
|
||||||
|
/** Type of the error. */
|
||||||
|
#type: Name;
|
||||||
|
/** Object used to determine the error type.
|
||||||
|
* Example: error.is.InternalError */
|
||||||
|
#is: Is;
|
||||||
|
/** A map of error metadata (can be extra fields
|
||||||
|
* that only show in debug mode) */
|
||||||
|
#metadata: Map<string, Object[]>;
|
||||||
|
|
||||||
|
private constructor(type: Name, code: number, description: string,
|
||||||
|
metadata?: Map<string, Object[]>) {
|
||||||
|
super(type);
|
||||||
|
this.#code = code;
|
||||||
|
this.#description = description;
|
||||||
|
this.#type = type;
|
||||||
|
this.#is = createIs(type);
|
||||||
|
this.#metadata = metadata ?? new Map<string, Object[]>();
|
||||||
|
|
||||||
|
// This restores the old behavior of errors, to make sure they're now
|
||||||
|
// backward-compatible. Fortunately it's handled by TS, but it cannot
|
||||||
|
// be type-checked. This means we have to be extremely careful about
|
||||||
|
// what we're doing when using errors.
|
||||||
|
// Disables the feature when in CI tests but not in production.
|
||||||
|
if (allowUnsafeErrComp) {
|
||||||
|
this[type] = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Output the error as a JSON string */
|
||||||
|
toString() {
|
||||||
|
const errorType = this.message;
|
||||||
|
const errorMessage = this.#description;
|
||||||
|
return JSON.stringify({ errorType, errorMessage });
|
||||||
|
}
|
||||||
|
|
||||||
|
flatten() {
|
||||||
|
return {
|
||||||
|
is_arsenal_error: true,
|
||||||
|
code: this.#code,
|
||||||
|
description: this.#description,
|
||||||
|
type: this.#type,
|
||||||
|
stack: this.stack
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
static unflatten(flat_obj) {
|
||||||
|
if (!flat_obj.is_arsenal_error) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const err = new ArsenalError(
|
||||||
|
flat_obj.type,
|
||||||
|
flat_obj.code,
|
||||||
|
flat_obj.description
|
||||||
|
)
|
||||||
|
err.stack = flat_obj.stack
|
||||||
|
return err;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Write the error in an HTTP response */
|
||||||
|
writeResponse(res: ServerResponse) {
|
||||||
|
res.writeHead(this.#code);
|
||||||
|
const asStr = this.toString();
|
||||||
|
res.end(asStr);
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Clone the error with a new description.*/
|
||||||
|
customizeDescription(description: string): ArsenalError {
|
||||||
|
const type = this.#type;
|
||||||
|
const code = this.#code;
|
||||||
|
const metadata = new Map(this.#metadata);
|
||||||
|
const err = new ArsenalError(type, code, description, metadata);
|
||||||
|
err.stack = this.stack;
|
||||||
|
return err;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Clone the error with a new metadata field */
|
||||||
|
addMetadataEntry(key: string, value: Object[]): ArsenalError {
|
||||||
|
const type = this.#type;
|
||||||
|
const code = this.#code;
|
||||||
|
const description = this.#description;
|
||||||
|
const metadata = new Map(this.#metadata);
|
||||||
|
metadata.set(key, value);
|
||||||
|
const err = new ArsenalError(type, code, description, metadata);
|
||||||
|
err.stack = this.stack;
|
||||||
|
return err;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Used to determine the error type. Example: error.is.InternalError */
|
||||||
|
get is() {
|
||||||
|
return this.#is;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** HTTP status code. Example: 401, 403, 500, ... */
|
||||||
|
get code() {
|
||||||
|
return this.#code;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Text description of the error. */
|
||||||
|
get description() {
|
||||||
|
return this.#description;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Type of the error, belonging to Name. is should be prefered instead of
|
||||||
|
* type in a daily-basis, but type remains accessible for future use. */
|
||||||
|
get type() {
|
||||||
|
return this.#type;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** A map of error metadata */
|
||||||
|
get metadata() {
|
||||||
|
return this.#metadata;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Generate all possible errors. An instance is created by default. */
|
||||||
|
static errors() {
|
||||||
|
const errors = {};
|
||||||
|
Object.entries(rawErrors).forEach((value) => {
|
||||||
|
const name = value[0] as Name;
|
||||||
|
const error = value[1];
|
||||||
|
const { code, description } = error;
|
||||||
|
const get = () => new ArsenalError(name, code, description);
|
||||||
|
Object.defineProperty(errors, name, { get });
|
||||||
|
});
|
||||||
|
return errors as Errors;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Mapping of all possible Errors.
|
||||||
|
* Use them with errors[error].customizeDescription for any customization. */
|
||||||
|
export default ArsenalError.errors();
|
|
@ -0,0 +1,20 @@
|
||||||
|
# Get Pensieve Credentials Executable
|
||||||
|
|
||||||
|
## To make executable file from getPensieveCreds.js
|
||||||
|
|
||||||
|
`npm install -g pkg`
|
||||||
|
`pkg getPensieveCreds.js`
|
||||||
|
|
||||||
|
This will build a mac, linux and windows file.
|
||||||
|
If you just want linux, for example:
|
||||||
|
`pkg getPensieveCreds.js --targets node6-linux-x64`
|
||||||
|
|
||||||
|
For further options, see https://github.com/zeit/pkg
|
||||||
|
|
||||||
|
## To run the executable file
|
||||||
|
|
||||||
|
Call the output executable file with an
|
||||||
|
argument that names the service you
|
||||||
|
are trying to get credentials for (e.g., clueso):
|
||||||
|
|
||||||
|
`./getPensieveCreds-linux serviceName`
|
|
@ -0,0 +1,45 @@
|
||||||
|
const async = require('async');
|
||||||
|
const MetadataFileClient =
|
||||||
|
require('../../storage/metadata/file/MetadataFileClient');
|
||||||
|
const mdClient = new MetadataFileClient({
|
||||||
|
host: 's3-metadata',
|
||||||
|
port: '9993',
|
||||||
|
});
|
||||||
|
const { loadOverlayVersion, parseServiceCredentials } = require('./utils');
|
||||||
|
|
||||||
|
const serviceName = process.argv[2];
|
||||||
|
if (serviceName === undefined) {
|
||||||
|
throw new Error('Missing service name (e.g., clueso)');
|
||||||
|
}
|
||||||
|
const tokenKey = 'auth/zenko/remote-management-token';
|
||||||
|
|
||||||
|
const mdDb = mdClient.openDB(error => {
|
||||||
|
if (error) {
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
|
||||||
|
const db = mdDb.openSub('PENSIEVE');
|
||||||
|
return async.waterfall([
|
||||||
|
cb => db.get('configuration/overlay-version', {}, cb),
|
||||||
|
(version, cb) => loadOverlayVersion(db, version, cb),
|
||||||
|
(conf, cb) => db.get(tokenKey, {}, (err, instanceAuth) => {
|
||||||
|
if (err) {
|
||||||
|
return cb(err);
|
||||||
|
}
|
||||||
|
const creds = parseServiceCredentials(conf, instanceAuth,
|
||||||
|
serviceName);
|
||||||
|
return cb(null, creds);
|
||||||
|
}),
|
||||||
|
], (err, creds) => {
|
||||||
|
db.disconnect();
|
||||||
|
if (err) {
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
if (!creds) {
|
||||||
|
throw new Error('No credentials found');
|
||||||
|
}
|
||||||
|
process.stdout.write(`export AWS_ACCESS_KEY_ID="${creds.accessKey}"\n`);
|
||||||
|
process.stdout
|
||||||
|
.write(`export AWS_SECRET_ACCESS_KEY="${creds.secretKey}"`);
|
||||||
|
});
|
||||||
|
});
|
|
@ -0,0 +1,14 @@
|
||||||
|
{
|
||||||
|
"name": "pensievecreds",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"description": "Executable tool for Pensieve",
|
||||||
|
"main": "getPensieveCreds.js",
|
||||||
|
"scripts": {
|
||||||
|
"test": "mocha --recursive --timeout 5500 tests/unit"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"mocha": "5.2.0",
|
||||||
|
"async": "~2.6.1",
|
||||||
|
"node-forge": "^0.7.1"
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,7 @@
|
||||||
|
{
|
||||||
|
"privateKey": "-----BEGIN RSA PRIVATE KEY-----\r\nMIIEowIBAAKCAQEAj13sSYE40lAX2qpBvfdGfcSVNtBf8i5FH+E8FAhORwwPu+2S\r\n3yBQbgwHq30WWxunGb1NmZL1wkVZ+vf12DtxqFRnMA08LfO4oO6oC4V8XfKeuHyJ\r\n1qlaKRINz6r9yDkTHtwWoBnlAINurlcNKgGD5p7D+G26Chbr/Oo0ZwHula9DxXy6\r\neH8/bJ5/BynyNyyWRPoAO+UkUdY5utkFCUq2dbBIhovMgjjikf5p2oWqnRKXc+JK\r\nBegr6lSHkkhyqNhTmd8+wA+8Cace4sy1ajY1t5V4wfRZea5vwl/HlyyKodvHdxng\r\nJgg6H61JMYPkplY6Gr9OryBKEAgq02zYoYTDfwIDAQABAoIBAAuDYGlavkRteCzw\r\nRU1LIVcSRWVcgIgDXTu9K8T0Ec0008Kkxomyn6LmxmroJbZ1VwsDH8s4eRH73ckA\r\nxrZxt6Pr+0lplq6eBvKtl8MtGhq1VDe+kJczjHEF6SQHOFAu/TEaPZrn2XMcGvRX\r\nO1BnRL9tepFlxm3u/06VRFYNWqqchM+tFyzLu2AuiuKd5+slSX7KZvVgdkY1ErKH\r\ngB75lPyhPb77C/6ptqUisVMSO4JhLhsD0+ekDVY982Sb7KkI+szdWSbtMx9Ek2Wo\r\ntXwJz7I8T7IbODy9aW9G+ydyhMDFmaEYIaDVFKJj5+fluNza3oQ5PtFNVE50GQJA\r\nsisGqfECgYEAwpkwt0KpSamSEH6qknNYPOwxgEuXWoFVzibko7is2tFPvY+YJowb\r\n68MqHIYhf7gHLq2dc5Jg1TTbGqLECjVxp4xLU4c95KBy1J9CPAcuH4xQLDXmeLzP\r\nJ2YgznRocbzAMCDAwafCr3uY9FM7oGDHAi5bE5W11xWx+9MlFExL3JkCgYEAvJp5\r\nf+JGN1W037bQe2QLYUWGszewZsvplnNOeytGQa57w4YdF42lPhMz6Kc/zdzKZpN9\r\njrshiIDhAD5NCno6dwqafBAW9WZl0sn7EnlLhD4Lwm8E9bRHnC9H82yFuqmNrzww\r\nzxBCQogJISwHiVz4EkU48B283ecBn0wT/fAa19cCgYEApKWsnEHgrhy1IxOpCoRh\r\nUhqdv2k1xDPN/8DUjtnAFtwmVcLa/zJopU/Zn4y1ZzSzjwECSTi+iWZRQ/YXXHPf\r\nl92SFjhFW92Niuy8w8FnevXjF6T7PYiy1SkJ9OR1QlZrXc04iiGBDazLu115A7ce\r\nanACS03OLw+CKgl6Q/RR83ECgYBCUngDVoimkMcIHHt3yJiP3ikeAKlRnMdJlsa0\r\nXWVZV4hCG3lDfRXsnEgWuimftNKf+6GdfYSvQdLdiQsCcjT5A4uLsQTByv5nf4uA\r\n1ZKOsFrmRrARzxGXhLDikvj7yP//7USkq+0BBGFhfuAvl7fMhPceyPZPehqB7/jf\r\nxX1LBQKBgAn5GgSXzzS0e06ZlP/VrKxreOHa5Z8wOmqqYQ0QTeczAbNNmuITdwwB\r\nNkbRqpVXRIfuj0BQBegAiix8om1W4it0cwz54IXBwQULxJR1StWxj3jo4QtpMQ+z\r\npVPdB1Ilb9zPV1YvDwRfdS1xsobzznAx56ecsXduZjs9mF61db8Q\r\n-----END RSA PRIVATE KEY-----\r\n",
|
||||||
|
"publicKey": "-----BEGIN PUBLIC KEY-----\r\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAj13sSYE40lAX2qpBvfdG\r\nfcSVNtBf8i5FH+E8FAhORwwPu+2S3yBQbgwHq30WWxunGb1NmZL1wkVZ+vf12Dtx\r\nqFRnMA08LfO4oO6oC4V8XfKeuHyJ1qlaKRINz6r9yDkTHtwWoBnlAINurlcNKgGD\r\n5p7D+G26Chbr/Oo0ZwHula9DxXy6eH8/bJ5/BynyNyyWRPoAO+UkUdY5utkFCUq2\r\ndbBIhovMgjjikf5p2oWqnRKXc+JKBegr6lSHkkhyqNhTmd8+wA+8Cace4sy1ajY1\r\nt5V4wfRZea5vwl/HlyyKodvHdxngJgg6H61JMYPkplY6Gr9OryBKEAgq02zYoYTD\r\nfwIDAQAB\r\n-----END PUBLIC KEY-----\r\n",
|
||||||
|
"accessKey": "QXP3VDG3SALNBX2QBJ1C",
|
||||||
|
"secretKey": "K5FyqZo5uFKfw9QBtn95o6vuPuD0zH/1seIrqPKqGnz8AxALNSx6EeRq7G1I6JJpS1XN13EhnwGn2ipsml3Uf2fQ00YgEmImG8wzGVZm8fWotpVO4ilN4JGyQCah81rNX4wZ9xHqDD7qYR5MyIERxR/osoXfctOwY7GGUjRKJfLOguNUlpaovejg6mZfTvYAiDF+PTO1sKUYqHt1IfKQtsK3dov1EFMBB5pWM7sVfncq/CthKN5M+VHx9Y87qdoP3+7AW+RCBbSDOfQgxvqtS7PIAf10mDl8k2kEURLz+RqChu4O4S0UzbEmtja7wa7WYhYKv/tM/QeW7kyNJMmnPg==",
|
||||||
|
"decryptedSecretKey": "n7PSZ3U6SgerF9PCNhXYsq3S3fRKVGdZTicGV8Ur"
|
||||||
|
}
|
|
@ -0,0 +1,39 @@
|
||||||
|
const assert = require('assert');
|
||||||
|
const { parseServiceCredentials, decryptSecret } =
|
||||||
|
require('../../utils');
|
||||||
|
const { privateKey, accessKey, secretKey, decryptedSecretKey }
|
||||||
|
= require('../resources.json');
|
||||||
|
|
||||||
|
describe('decyrptSecret', () => {
|
||||||
|
it('should decrypt a secret', () => {
|
||||||
|
const instanceCredentials = {
|
||||||
|
privateKey,
|
||||||
|
};
|
||||||
|
const result = decryptSecret(instanceCredentials, secretKey);
|
||||||
|
assert.strictEqual(result, decryptedSecretKey);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('parseServiceCredentials', () => {
|
||||||
|
const conf = {
|
||||||
|
users: [{ accessKey,
|
||||||
|
accountType: 'service-clueso',
|
||||||
|
secretKey,
|
||||||
|
userName: 'Search Service Account' }],
|
||||||
|
};
|
||||||
|
const auth = JSON.stringify({ privateKey });
|
||||||
|
|
||||||
|
it('should parse service credentials', () => {
|
||||||
|
const result = parseServiceCredentials(conf, auth, 'clueso');
|
||||||
|
const expectedResult = {
|
||||||
|
accessKey,
|
||||||
|
secretKey: decryptedSecretKey,
|
||||||
|
};
|
||||||
|
assert.deepStrictEqual(result, expectedResult);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return undefined if no such service', () => {
|
||||||
|
const result = parseServiceCredentials(conf, auth, undefined);
|
||||||
|
assert.strictEqual(result, undefined);
|
||||||
|
});
|
||||||
|
});
|
|
@ -0,0 +1,38 @@
|
||||||
|
const forge = require('node-forge');
|
||||||
|
|
||||||
|
function decryptSecret(instanceCredentials, secret) {
|
||||||
|
const privateKey = forge.pki.privateKeyFromPem(
|
||||||
|
instanceCredentials.privateKey);
|
||||||
|
const encryptedSecretKey = forge.util.decode64(secret);
|
||||||
|
return privateKey.decrypt(encryptedSecretKey, 'RSA-OAEP', {
|
||||||
|
md: forge.md.sha256.create(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function loadOverlayVersion(db, version, cb) {
|
||||||
|
db.get(`configuration/overlay/${version}`, {}, (err, val) => {
|
||||||
|
if (err) {
|
||||||
|
return cb(err);
|
||||||
|
}
|
||||||
|
return cb(null, JSON.parse(val));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseServiceCredentials(conf, auth, serviceName) {
|
||||||
|
const instanceAuth = JSON.parse(auth);
|
||||||
|
const serviceAccount = (conf.users || []).find(
|
||||||
|
u => u.accountType === `service-${serviceName}`);
|
||||||
|
if (!serviceAccount) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
accessKey: serviceAccount.accessKey,
|
||||||
|
secretKey: decryptSecret(instanceAuth, serviceAccount.secretKey),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
decryptSecret,
|
||||||
|
loadOverlayVersion,
|
||||||
|
parseServiceCredentials,
|
||||||
|
};
|
|
@ -1,6 +1,4 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
export const ciphers = [
|
||||||
|
|
||||||
const ciphers = [
|
|
||||||
'DHE-RSA-AES128-GCM-SHA256',
|
'DHE-RSA-AES128-GCM-SHA256',
|
||||||
'ECDHE-ECDSA-AES128-GCM-SHA256',
|
'ECDHE-ECDSA-AES128-GCM-SHA256',
|
||||||
'ECDHE-RSA-AES256-GCM-SHA384',
|
'ECDHE-RSA-AES256-GCM-SHA384',
|
||||||
|
@ -28,7 +26,3 @@ const ciphers = [
|
||||||
'!EDH-RSA-DES-CBC3-SHA',
|
'!EDH-RSA-DES-CBC3-SHA',
|
||||||
'!KRB5-DES-CBC3-SHA',
|
'!KRB5-DES-CBC3-SHA',
|
||||||
].join(':');
|
].join(':');
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
ciphers,
|
|
||||||
};
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue