Compare commits
1630 Commits
feature/AR
...
developmen
Author | SHA1 | Date |
---|---|---|
Vitaliy Filippov | 19855115ae | |
Vitaliy Filippov | 329d8ef32c | |
Vitaliy Filippov | f0ded4ea4f | |
Vitaliy Filippov | 3eea263384 | |
Vitaliy Filippov | c26d4f7d70 | |
Vitaliy Filippov | 63137e7a7b | |
Vitaliy Filippov | fdb23b1cd2 | |
Vitaliy Filippov | 4120eac127 | |
Maha Benzekri | d9bbd6cf3e | |
Maha Benzekri | 65e89d286d | |
Maha Benzekri | dcbc5ca98f | |
Maha Benzekri | 817bb836ec | |
Maha Benzekri | e3e4b2aea7 | |
Francois Ferrand | 9cd72221e8 | |
Francois Ferrand | bdcd4685ad | |
Francois Ferrand | b2b6c47ba7 | |
Jonathan Gramain | da173d53b4 | |
Jonathan Gramain | 7eb2701f21 | |
Jonathan Gramain | 6ec3c8e10d | |
Jonathan Gramain | 7aaf277db2 | |
Francois Ferrand | 67421f8c76 | |
Francois Ferrand | bf2260b1ae | |
Francois Ferrand | 11e0e1b489 | |
Anurag Mittal | f13ec2cf4c | |
Anurag Mittal | e369c7e6d2 | |
Anurag Mittal | c5c1db4568 | |
Anurag Mittal | 58f4d3cb3a | |
Anurag Mittal | b049f39e2a | |
williamlardier | 30eaaf15eb | |
williamlardier | 9d16fb0a34 | |
williamlardier | cdc612f379 | |
williamlardier | 61dd65b2c4 | |
bert-e | 2c0696322e | |
Maha Benzekri | c464a70b90 | |
Maha Benzekri | af07bb3df4 | |
Maha Benzekri | 1858654f34 | |
Maha Benzekri | 0475c8520a | |
Maha Benzekri | 31a4de5372 | |
Maha Benzekri | 0c53d13439 | |
Maha Benzekri | cad8b14df1 | |
Nicolas Humbert | fe29bacc79 | |
Nicolas Humbert | a86cff4631 | |
Kerkesni | f13a5d79ea | |
Maha Benzekri | ca8f570f15 | |
Maha Benzekri | a4bca10faf | |
Jonathan Gramain | c2ab4a2052 | |
Jonathan Gramain | fd0aa314eb | |
Jonathan Gramain | a643a3e6cc | |
Jonathan Gramain | e9d815cc9d | |
Jonathan Gramain | c86d24fc8f | |
Jonathan Gramain | 3b6d3838f5 | |
Jonathan Gramain | fcdfa889be | |
Mickael Bourgois | 5b8fcf0313 | |
Mickael Bourgois | bdfde26fe4 | |
Mickael Bourgois | e53613783a | |
Mickael Bourgois | 69dbbb143a | |
Mickael Bourgois | 403c4e5040 | |
Nicolas Humbert | a1dc2bd84d | |
Nicolas Humbert | 01409d690c | |
Nicolas Humbert | 9ee40f343b | |
bert-e | 77ed018b4f | |
bert-e | f77700236f | |
Nicolas Humbert | 43ff16b28a | |
bert-e | 05c628728d | |
Nicolas Humbert | 2a807dc4ef | |
Nicolas Humbert | 1f8b0a4032 | |
bert-e | 0dd7fe9875 | |
Mickael Bourgois | f7a6af8d9a | |
Mickael Bourgois | e6d0eff1a8 | |
Mickael Bourgois | 9d558351e7 | |
Mickael Bourgois | 68150da72e | |
Mickael Bourgois | 2b2c4bc50e | |
Mickael Bourgois | 3068086a97 | |
Mickael Bourgois | 0af7eb5530 | |
bert-e | 7e372b7bd5 | |
bert-e | a121810552 | |
bert-e | 9bf1bcc483 | |
Nicolas Humbert | 06402c6c94 | |
Nicolas Humbert | a6f3c82827 | |
Nicolas Humbert | f1891851b3 | |
bert-e | a1eed4fefb | |
Nicolas Humbert | 68204448a1 | |
Nicolas Humbert | 40e271f7e2 | |
bert-e | d8f7f18f5a | |
bert-e | 5f4d7afefb | |
bert-e | 2482fdfafc | |
bert-e | e151b3fff1 | |
Nicolas Humbert | b8bbdbbd81 | |
Nicolas Humbert | 46258bca74 | |
williamlardier | b6bc11881a | |
williamlardier | 648257612b | |
williamlardier | 7423fac674 | |
williamlardier | 9647043a02 | |
williamlardier | f9e1f91791 | |
williamlardier | 9c5bc2bfe0 | |
Jonathan Gramain | 1a0a981271 | |
bert-e | a45b2eb6a4 | |
bert-e | b00378d46d | |
Mickael Bourgois | 2c3bfb16ef | |
Jonathan Gramain | c72d8be223 | |
Jonathan Gramain | f63cb3c762 | |
bert-e | 15fd621c5c | |
bert-e | effbf63dd4 | |
bert-e | 285fe2f63b | |
bert-e | 1d8ebe6a9c | |
bert-e | 00555597e0 | |
bert-e | bddc2ccd01 | |
Jonathan Gramain | 7908654b51 | |
Jonathan Gramain | 0d7cf8d40a | |
Jonathan Gramain | c4c75e976c | |
Jonathan Gramain | 1266a14253 | |
williamlardier | 851c72bd0f | |
bert-e | 722b6ae699 | |
bert-e | 29925a15ad | |
williamlardier | 6b64f50450 | |
Jonathan Gramain | 8dc3ba7ca6 | |
bert-e | 3c2283b062 | |
Jonathan Gramain | a6a76acede | |
Jonathan Gramain | 6a116734a9 | |
Jonathan Gramain | 9325ea4996 | |
Jonathan Gramain | 33ba89f0cf | |
Jonathan Gramain | c67331d350 | |
Jonathan Gramain | 6d6f1860ef | |
Nicolas Humbert | cbe6a5e2d6 | |
Mickael Bourgois | be1557d972 | |
Mickael Bourgois | a03463061c | |
Mickael Bourgois | 8ad0ea73a7 | |
Mickael Bourgois | a94040d13b | |
Mickael Bourgois | f265ed6122 | |
Mickael Bourgois | 7301c706fd | |
Mickael Bourgois | bfc8dee559 | |
Frédéric Meinnel | 5a5ef7c572 | |
Frédéric Meinnel | 918c2c5473 | |
Frédéric Meinnel | 29f39ab480 | |
Frédéric Meinnel | b7ac7f4616 | |
Frédéric Meinnel | f8ce90f9c3 | |
Frédéric Meinnel | 5734d11cf1 | |
Frédéric Meinnel | 4da59769d2 | |
Frédéric Meinnel | 60573991ee | |
Jonathan Gramain | 6f58f9dd68 | |
Jonathan Gramain | 3b9c93be68 | |
Jonathan Gramain | 081af3e795 | |
bert-e | 042f541a45 | |
bert-e | 63bf2cb5b1 | |
bert-e | 39f42d9cb4 | |
Mickael Bourgois | 02f126f040 | |
bert-e | 1477a70e47 | |
Mickael Bourgois | 7233ec2635 | |
Mickael Bourgois | c4b44016bc | |
Mickael Bourgois | a78a84faa7 | |
Mickael Bourgois | c3ff6526a1 | |
Frédéric Meinnel | 59d47a3e21 | |
Frédéric Meinnel | 6b61347c29 | |
Mickael Bourgois | 4bf29524eb | |
Mickael Bourgois | 9aa001c4d1 | |
Frédéric Meinnel | aea4663ff2 | |
Frédéric Meinnel | 5012e9209c | |
Frédéric Meinnel | 1568ad59c6 | |
bert-e | c2f6b45116 | |
bert-e | a0322b131c | |
Mickael Bourgois | b5487e3c94 | |
bert-e | 993b9e6093 | |
bert-e | ddd6c87831 | |
Mickael Bourgois | f2974cbd07 | |
bert-e | 7440794d93 | |
Mickael Bourgois | 1efab676bc | |
Mickael Bourgois | a167e1d5fa | |
Mickael Bourgois | c7e153917a | |
bert-e | 087369b37d | |
bert-e | 2d2030dfe4 | |
bert-e | 45cc4aa79e | |
Will Toozs | da80e12dab | |
Will Toozs | a7cf94d0fe | |
Jonathan Gramain | 2a82095d03 | |
Jonathan Gramain | 44b3d25459 | |
Jonathan Gramain | f1d6e30fb6 | |
Jonathan Gramain | 9186643caa | |
Jonathan Gramain | 485a76ceb9 | |
Jonathan Gramain | 00109a2c44 | |
Jonathan Gramain | aed1247825 | |
Jonathan Gramain | 0507c04ce9 | |
Will Toozs | 62736abba4 | |
Will Toozs | 97118f09c4 | |
Will Toozs | 5a84a8c0ad | |
bert-e | 37234efd14 | |
Jonathan Gramain | 2799381ef2 | |
Jonathan Gramain | a3f13e5387 | |
Jonathan Gramain | f4e83086d6 | |
Jonathan Gramain | d08a267965 | |
Jonathan Gramain | 063a2fb8fb | |
Jonathan Gramain | 1bc3360daf | |
Jonathan Gramain | 206f14bdf5 | |
Maha Benzekri | 74ff1691a0 | |
Maha Benzekri | 5ffae72693 | |
Maha Benzekri | 477a574500 | |
bert-e | 2a4ea38301 | |
bert-e | df4c22154e | |
Maha Benzekri | 3642ac03b2 | |
Francois Ferrand | d800179f86 | |
Francois Ferrand | c1c45a4af9 | |
Francois Ferrand | da536ed037 | |
Nicolas Humbert | 06901104e8 | |
Nicolas Humbert | a99a6d9d97 | |
Nicolas Humbert | 06244059a8 | |
Nicolas Humbert | 079f631711 | |
Benoit A. | 863f45d256 | |
KillianG | 4b642cf8b4 | |
KillianG | 2537f8aa9a | |
Maha Benzekri | 7866a1d06f | |
Maha Benzekri | 29ef2ef265 | |
Maha Benzekri | 1509f1bdfe | |
Maha Benzekri | 13d349d211 | |
Maha Benzekri | 34a32c967d | |
Maha Benzekri | 90ab985271 | |
Maha Benzekri | fbf5562a11 | |
bert-e | d79ed1b9c8 | |
bert-e | c34ad0dc31 | |
Maha Benzekri | df5ff0f400 | |
Maha Benzekri | 777783171a | |
Will Toozs | 39988e52e2 | |
Will Toozs | 79c82a4c3d | |
williamlardier | 17b5bbc233 | |
williamlardier | 4aa8b5cc6e | |
williamlardier | 5deed6c2e1 | |
Nicolas Humbert | af34571771 | |
Nicolas Humbert | 79b83a9067 | |
Nicolas Humbert | 5fd675a316 | |
Nicolas Humbert | d84cc974d3 | |
Maha Benzekri | dcf0f902ff | |
Maha Benzekri | 0177fbe98f | |
Maha Benzekri | f49cea3914 | |
Maha Benzekri | 73c6f41fa3 | |
bert-e | 5b66f8d089 | |
bert-e | b61d178b18 | |
Maha Benzekri | 9ea39c6ed9 | |
Florent Monjalet | e51b06cfea | |
Florent Monjalet | f2bc701f8c | |
Nicolas Humbert | 4d6b03ba47 | |
Nicolas Humbert | f03f049683 | |
Nicolas Humbert | d7b51de024 | |
Nicolas Humbert | cf51adf1c7 | |
Nicolas Humbert | 8a7c1be2d1 | |
Nicolas Humbert | c049df0a97 | |
Nicolas Humbert | 2b2667e29a | |
Nicolas Humbert | 8eb4a29c36 | |
bert-e | 862317703e | |
Nicolas Humbert | e69a97f240 | |
Nicolas Humbert | 81e838000f | |
bert-e | 547ce816e0 | |
Nicolas Humbert | 8256d6debf | |
bert-e | 15d5e93a2d | |
Nicolas Humbert | 69c1698eb7 | |
bert-e | d11bcb56e9 | |
Nicolas Humbert | c2cd90925f | |
bert-e | 0ed35c3d86 | |
bert-e | b1723594eb | |
Nicolas Humbert | c0218821ff | |
Nicolas Humbert | 49e32758fb | |
Nicolas Humbert | e13d0f5ed8 | |
Nicolas Humbert | 0d5907956f | |
Nicolas Humbert | f0c5d60ce9 | |
Nicolas Humbert | 8c2f4cf357 | |
Nicolas Humbert | f3f1da9bb3 | |
Nicolas Humbert | 036b75842e | |
Nicolas Humbert | 7ac5774635 | |
Nicolas Humbert | f3b928fce0 | |
Nicolas Humbert | 7173a357d9 | |
Nicolas Humbert | 7c4f461196 | |
Nicolas Humbert | 0a4d6f862f | |
bert-e | 8716fee67d | |
bert-e | 2938bb0c88 | |
williamlardier | 05c93446ab | |
williamlardier | 8d758327dd | |
williamlardier | be63c09624 | |
Nicolas Humbert | 4615875462 | |
Rahul Padigela | bdb59a0e63 | |
bert-e | a89d1d8d75 | |
Rahul Padigela | 89e5f7dffe | |
williamlardier | 57e84980c8 | |
williamlardier | 51bfd41bea | |
Nicolas Humbert | 96cbaeb821 | |
Nicolas Humbert | cb01346d07 | |
Nicolas Humbert | 3f24336b83 | |
Nicolas Humbert | 1e66518a79 | |
bert-e | 15b68fa9fa | |
Nicolas Humbert | 51703a65f5 | |
bert-e | 09aaa2d5ee | |
Nicolas Humbert | ad39d90b6f | |
Jonathan Gramain | 20e9fe4adb | |
bert-e | e9c67f7f67 | |
Jonathan Gramain | af3fd17ec2 | |
bert-e | 536d474f57 | |
bert-e | 55e68cfa17 | |
bert-e | 67c98fd81b | |
williamlardier | 5cd70d7cf1 | |
KillianG | 25be9014c9 | |
KillianG | ed42f24580 | |
KillianG | ce076cb3df | |
KillianG | 4bc3de52ff | |
bert-e | beb5f69be3 | |
bert-e | 5f3540a0d5 | |
bert-e | 654d628d39 | |
gaspardmoindrot | e8a409e337 | |
Alexander Chan | 4093bf2b04 | |
Alexander Chan | d0bb6d5b0c | |
bert-e | 3f7229eebe | |
bert-e | 7eb9d52da5 | |
Nicolas Humbert | e216c9dd20 | |
williamlardier | 0c1afe535b | |
williamlardier | 73335ae6ec | |
Alexander Chan | 99c514e8f2 | |
Alexander Chan | cfd9fdcfc4 | |
Alexander Chan | d809dac5e3 | |
williamlardier | 53dac8d233 | |
williamlardier | 6d5ef07eee | |
williamlardier | 272166e406 | |
williamlardier | 3af05e672b | |
williamlardier | 8b0c90cb2f | |
Alexander Chan | dfc9b761e2 | |
Alexander Chan | 04f1eb7f04 | |
bert-e | c204b90847 | |
bert-e | 78d6e7fd72 | |
Alexander Chan | 7768fa8d35 | |
KillianG | 4d9a9adc48 | |
KillianG | c4804e52ee | |
KillianG | 671cf3a679 | |
Jonathan Gramain | 9a5e27f97b | |
Jonathan Gramain | d744a709d2 | |
Jonathan Gramain | a9d003c6f8 | |
Jonathan Gramain | 99e04bd6fa | |
Jonathan Gramain | d3bdddeba3 | |
bert-e | 3252f7de03 | |
Jonathan Gramain | c4cc5a2c3d | |
Jonathan Gramain | fedd0190cc | |
Jonathan Gramain | 56fd4ad734 | |
Jonathan Gramain | ebe6b65fcf | |
Nicolas Humbert | 7994bf7b96 | |
Nicolas Humbert | 4be0a06c4a | |
bert-e | da7dbdc51f | |
Will Toozs | 2103ef1237 | |
Will Toozs | dbc1c54246 | |
bert-e | 6c22f8404d | |
KillianG | 00e03f0592 | |
KillianG | d453758b7d | |
KillianG | a964dc99c3 | |
Jonathan Gramain | 3a4da1d7c0 | |
williamlardier | 5074e6c0a4 | |
williamlardier | bd05dd6918 | |
williamlardier | fbda12ce3c | |
Nicolas Humbert | b02934bb39 | |
Nicolas Humbert | c9a444969b | |
Nicolas Humbert | 5d018860ec | |
bert-e | 5838e02096 | |
Nicolas Humbert | ecd600ac4b | |
Naren | ab0324da05 | |
Naren | 2b353b33af | |
Naren | 5377b20ceb | |
Naren | 21b329b301 | |
Naren | bd76402586 | |
bert-e | fd57f47be1 | |
bert-e | 94edf8be70 | |
Naren | 1d104345fd | |
Jonathan Gramain | 58e47e5015 | |
Jonathan Gramain | 4d782ecec6 | |
Jonathan Gramain | 655a10ce52 | |
Jonathan Gramain | 0c7f0e607d | |
Jonathan Gramain | caa5d53e9b | |
Jonathan Gramain | 21da975187 | |
bert-e | e0df67a115 | |
Naren | 7e18ae77e0 | |
Naren | 4750118f85 | |
Naren | c273c8b823 | |
Jonathan Gramain | d3b50fafa8 | |
Naren | 47e68a9b60 | |
Naren | bd0a199ffa | |
Naren | 4b1f69bcbb | |
Naren | e3a6814e3f | |
Alexander Chan | bf4072151f | |
Alexander Chan | f33cd69e45 | |
Alexander Chan | acd13ff31b | |
Alexander Chan | bb3e5d078f | |
Jonathan Gramain | 22fa04b7e7 | |
Jonathan Gramain | 10a94a0a96 | |
bert-e | 4d71a834d5 | |
Alexander Chan | 054f61d6c1 | |
Alexander Chan | fa26a487f5 | |
Alexander Chan | c1dd2e4946 | |
Alexander Chan | a714103b82 | |
Jonathan Gramain | 66740f5aba | |
Jonathan Gramain | a3a83dd89c | |
williamlardier | 8db8109391 | |
Jonathan Gramain | d90af29019 | |
Jonathan Gramain | 9d8d98fcc9 | |
Jonathan Gramain | 01830d19a0 | |
Jonathan Gramain | 49cc018fa4 | |
Jonathan Gramain | dd87c869ca | |
Jonathan Gramain | df44cffb96 | |
Jonathan Gramain | 164053d1e8 | |
Jonathan Gramain | af741c50fb | |
williamlardier | 9c46703b89 | |
williamlardier | 47672d60ce | |
Jonathan Gramain | 6d41d103e8 | |
Jonathan Gramain | 34ccca9b07 | |
Jonathan Gramain | 6e5d8d14af | |
Jonathan Gramain | 890ac08dcd | |
Jonathan Gramain | 4cda9f6a6b | |
Jonathan Gramain | fbb62ef17c | |
Jonathan Gramain | 4949b7cc35 | |
Jonathan Gramain | 2b6fee4e84 | |
Jonathan Gramain | 8077186c3a | |
Jonathan Gramain | 1c07618b18 | |
Jonathan Gramain | 4d7eaee0cc | |
williamlardier | c460338163 | |
williamlardier | f17d52b602 | |
williamlardier | a6b234b7a8 | |
williamlardier | ff353bb4d6 | |
williamlardier | 0f9c9c2f18 | |
williamlardier | f6b2cf2c1a | |
Kerkesni | ecafbae36a | |
Kerkesni | d1cd7e8dba | |
Francois Ferrand | 3da6719200 | |
Francois Ferrand | c0dd54ef51 | |
Francois Ferrand | 7910792390 | |
Francois Ferrand | a4f4c51290 | |
Francois Ferrand | 66c4bc52b5 | |
Francois Ferrand | 81cd6652d6 | |
Francois Ferrand | 2a07f67244 | |
Francois Ferrand | 1a634015ee | |
williamlardier | 7a88a54918 | |
williamlardier | b25e620750 | |
williamlardier | 38ef89cc83 | |
williamlardier | 1a6c828bfc | |
williamlardier | 3d769c6960 | |
williamlardier | 8a27920a85 | |
williamlardier | 7642a22176 | |
Jonathan Gramain | 7b64896234 | |
Jonathan Gramain | 4f0a846814 | |
bert-e | 8f63687ef3 | |
Kerkesni | 26f45fa81a | |
Kerkesni | 76b59057f7 | |
Kerkesni | ae0da3d605 | |
bert-e | 7c1bd453ee | |
bert-e | 162d9ec46b | |
Kerkesni | ccd6462015 | |
Kerkesni | 665c77570c | |
Kerkesni | 27307b397c | |
Kerkesni | 414eada32b | |
Kerkesni | fdf0c6fe99 | |
Kerkesni | 8cc0be7da2 | |
bert-e | 65231633a7 | |
Kerkesni | 9a975723c1 | |
Kerkesni | ef024ddef3 | |
Kerkesni | b61138a348 | |
Kerkesni | d852eef08e | |
Kerkesni | fd63b857f3 | |
Alexander Chan | 92c567414a | |
Alexander Chan | ec55e39175 | |
Jonathan Gramain | c343820cae | |
Jonathan Gramain | 0f9da6a44e | |
Jonathan Gramain | 53a42f7411 | |
Jonathan Gramain | 9c2bed8034 | |
williamlardier | 8307a1513e | |
williamlardier | 706c2425fe | |
williamlardier | 8618d77de9 | |
williamlardier | 9d614a4ab3 | |
williamlardier | 7763685cb0 | |
Artem Bakalov | 8abe746222 | |
Artem Bakalov | 4c6712741b | |
bert-e | e74cca6795 | |
Artem Bakalov | 87b060f2ae | |
bert-e | 1427abecb7 | |
bert-e | 9dc357ab8d | |
bert-e | 4771ce3067 | |
Artem Bakalov | f62c3d22ed | |
williamlardier | 4e8a907d99 | |
williamlardier | a237e38c51 | |
williamlardier | 4388cb7790 | |
williamlardier | 095a2012cb | |
Killian Gardahaut | 6f42b3e64c | |
Killian Gardahaut | 264e0c1aad | |
Jonathan Gramain | 237872a5a3 | |
Jonathan Gramain | 0130355e1a | |
bert-e | 390fd97edf | |
Nicolas Humbert | 1c9e4eb93d | |
bert-e | af50ef47d7 | |
bert-e | a4f163f466 | |
Nicolas Humbert | 4d0cc9bc12 | |
bert-e | 657f969d05 | |
Jonathan Gramain | 4f2b1ca960 | |
bert-e | b43cf22b2c | |
Killian Gardahaut | 46c44ccaa6 | |
Killian Gardahaut | f45f65596b | |
bert-e | 90c63168c1 | |
bert-e | 10402ae78d | |
Jonathan Gramain | 5cd1df8601 | |
Jonathan Gramain | ee38856f29 | |
Jonathan Gramain | fe5f868f43 | |
Jonathan Gramain | dc229bb8aa | |
Killian Gardahaut | c0ee81eb7a | |
Killian Gardahaut | a6a48e812f | |
bert-e | 604a0170f1 | |
bert-e | 5a8372437b | |
Killian Gardahaut | 9d8f4793c9 | |
Killian Gardahaut | 69d33a3341 | |
Killian Gardahaut | c4ead93bd9 | |
Jonathan Gramain | 981c9c1a23 | |
Jonathan Gramain | 71de409ee9 | |
KillianG | 806f988334 | |
KillianG | 976a05c3e5 | |
KillianG | 46c24c5cc3 | |
Killian Gardahaut | c5004cb521 | |
KillianG | bc9cfb0b6d | |
KillianG | 4b6e342ff8 | |
Killian Gardahaut | d48d4d0c18 | |
Killian Gardahaut | 5a32c8eca0 | |
Kerkesni | 480f5a4427 | |
bert-e | 852ae9bd0f | |
Kerkesni | 6c132bca90 | |
Taylor McKinnon | 3d77540c47 | |
Taylor McKinnon | 3882ecf1a0 | |
Taylor McKinnon | 4f0506cf31 | |
Taylor McKinnon | acf38cc010 | |
Nicolas Humbert | d92a91f076 | |
Nicolas Humbert | 28779db602 | |
Alexander Chan | 8db16c5532 | |
Jordi Bertran de Balanda | 33439ec215 | |
Jordi Bertran de Balanda | 785b824b69 | |
bert-e | 9873c0f112 | |
Jordi Bertran de Balanda | 63212e2db3 | |
Nicolas Humbert | 725a492c2c | |
Nicolas Humbert | e446e3e132 | |
bert-e | 25c6b34a1e | |
Jordi Bertran de Balanda | 721d7ede93 | |
Jordi Bertran de Balanda | 3179d1c620 | |
Nicolas Humbert | fbbba32d69 | |
Jordi Bertran de Balanda | 56c1ba5c21 | |
Will Toozs | 73431094a3 | |
Will Toozs | aed1d8419b | |
Will Toozs | c3cb0aa514 | |
bert-e | 5919d20fa4 | |
Nicolas Humbert | 56665069c1 | |
Nicolas Humbert | 61fe54bd73 | |
Francois Ferrand | e227d9d5ca | |
Francois Ferrand | a206b5f95e | |
Francois Ferrand | 9b8f9f8afd | |
Francois Ferrand | cdcc44d272 | |
Francois Ferrand | 066be20a9d | |
Xin LI | 5acef6895f | |
Xin LI | 6e3386f693 | |
Xin LI | 2c630848ee | |
williamlardier | f7d360fe0b | |
williamlardier | 0a61b43252 | |
williamlardier | c014e630be | |
williamlardier | a747d5feda | |
KillianG | 765857071a | |
KillianG | 91b39da7e5 | |
williamlardier | 2cc6ebe9b4 | |
Xin LI | 5634e1bb1f | |
williamlardier | 7887d22d0d | |
williamlardier | 2f142aea7f | |
williamlardier | 26a046c9b2 | |
bert-e | ab23d59daf | |
williamlardier | b744385584 | |
bert-e | 6950df200a | |
williamlardier | d407cd702b | |
williamlardier | 3265d162a7 | |
bert-e | 67200d80ad | |
williamlardier | 20a071fba9 | |
bert-e | aa2992cd9f | |
bert-e | f897dee3c5 | |
williamlardier | 0e2071ed3b | |
williamlardier | ad579b2bd2 | |
Guillaume Hivert | 139da904a7 | |
Guillaume Hivert | e8851b40c0 | |
Guillaume Hivert | 536f36df4e | |
Naren | cd9456b510 | |
Alexander Chan | 15f07538d8 | |
Guillaume Hivert | e95d07af12 | |
Guillaume Hivert | 571128efb1 | |
Guillaume Hivert | f1478cbc66 | |
Guillaume Hivert | b21f7f3440 | |
Guillaume Hivert | ca2d23710f | |
Guillaume Hivert | 310fd30266 | |
Guillaume Hivert | 75c5c855d9 | |
Guillaume Hivert | 8743e9c3ac | |
bert-e | b2af7c0aea | |
Guillaume Hivert | 43d466e2fe | |
bert-e | 58c24376aa | |
Guillaume Hivert | efa8c8e611 | |
Guillaume Hivert | 62c13c1eed | |
Guillaume Hivert | ee81fa5829 | |
Guillaume Hivert | 820ad4f8af | |
Guillaume Hivert | 34eeecf6de | |
Guillaume Hivert | 050f5ed002 | |
Guillaume Hivert | 2fba338639 | |
Guillaume Hivert | 950ac8e19b | |
Guillaume Hivert | 61929bb91a | |
Guillaume Hivert | 9175148bd1 | |
Guillaume Hivert | 5f08ea9310 | |
Guillaume Hivert | 707bf795a9 | |
Guillaume Hivert | fcf64798dc | |
Guillaume Hivert | 9b607be633 | |
Guillaume Hivert | 01a8992cec | |
Guillaume Hivert | 301541223d | |
Guillaume Hivert | 4f58a4b2f3 | |
Guillaume Hivert | 6f3babd223 | |
bert-e | d7df1df2b6 | |
Artem Bakalov | 3f26b432b7 | |
bert-e | f59b1b5e07 | |
bert-e | b684bdbaa9 | |
Guillaume Hivert | a3418603d0 | |
Guillaume Hivert | 947ccd90d9 | |
Guillaume Hivert | 23113616d9 | |
Guillaume Hivert | f460ffdb21 | |
Guillaume Hivert | dfa49c79c5 | |
Guillaume Hivert | ba94dc7e86 | |
Guillaume Hivert | e582882883 | |
Guillaume Hivert | dd61c1abbe | |
Guillaume Hivert | 5e8f4f2a30 | |
Guillaume Hivert | a15f8a56e3 | |
Guillaume Hivert | 43e82f7f33 | |
Guillaume Hivert | f54feec57f | |
bert-e | d7625ced17 | |
bert-e | bbe5f293f4 | |
Guillaume Hivert | a2c1989a5d | |
bert-e | 8ad1cceeb8 | |
bert-e | 24755c8472 | |
bert-e | bd970c65ea | |
bert-e | fb39a4095e | |
bert-e | 32dfba2f89 | |
Kerkesni | 43a8772529 | |
Guillaume Hivert | a2ca197bd8 | |
Guillaume Hivert | fc05956983 | |
Xin LI | 3ed46f2d16 | |
williamlardier | 5c936c94ee | |
Xin LI | f87101eef6 | |
Xin LI | 14f86282b6 | |
Xin LI | f9dba52d38 | |
Yutaka Oishi | 6714aed351 | |
williamlardier | 99f96dd377 | |
williamlardier | ae08d89d7d | |
williamlardier | c48e2948f0 | |
williamlardier | fc942febca | |
williamlardier | a4fe998c34 | |
williamlardier | 1460e94488 | |
williamlardier | dcc7117d88 | |
williamlardier | 99cee367aa | |
williamlardier | ad5a4c152d | |
bert-e | b608c043f5 | |
Guillaume Hivert | 8ec4a11a4b | |
bert-e | 079c09e1ec | |
Guillaume Hivert | c9ff3cd60e | |
bert-e | 75f07440ef | |
bert-e | 3a6bac1158 | |
Guillaume Hivert | a15d4cd130 | |
bert-e | f2d119326a | |
Guillaume Hivert | 45ba80ec23 | |
Guillaume Hivert | 2a019f3788 | |
bert-e | 5e22900c0f | |
Guillaume Hivert | 32cff324d8 | |
Guillaume Hivert | e62ed598e8 | |
Guillaume Hivert | cda5d7cfed | |
bert-e | a217ad58e8 | |
bert-e | e46b90cbad | |
bert-e | 10cf10daa4 | |
Guillaume Hivert | 6ec2f99a91 | |
bert-e | dfd8f20bf2 | |
bert-e | 435f9f7f3c | |
Guillaume Hivert | fc17ab4299 | |
Guillaume Hivert | 44f398b01f | |
Guillaume Hivert | dc32d78b0f | |
Guillaume Hivert | 9f1ea09ee6 | |
Guillaume Hivert | 073d752ad8 | |
Guillaume Hivert | 37c325f033 | |
bert-e | 3454e934f5 | |
Guillaume Hivert | 76bffb2a23 | |
Guillaume Hivert | bd498d414b | |
Guillaume Hivert | f98c65ffb4 | |
Guillaume Hivert | eae29c53dd | |
Guillaume Hivert | 8d17b69eb8 | |
Guillaume Hivert | 938d64f48e | |
Guillaume Hivert | 485ca38867 | |
Guillaume Hivert | 355c540510 | |
Jordi Bertran de Balanda | 399fdaaed0 | |
Jordi Bertran de Balanda | d97a218170 | |
Jordi Bertran de Balanda | 5084c8f971 | |
Jordi Bertran de Balanda | 82c3330321 | |
williamlardier | 3388de6fb6 | |
Guillaume Hivert | db70743439 | |
Alexander Chan | 86e9d4a356 | |
williamlardier | a0010efbdd | |
Nicolas Humbert | 8eb7efd58a | |
williamlardier | 25ae7e443b | |
williamlardier | 4afa1ed78d | |
williamlardier | 706dfddf5f | |
williamlardier | 4cce306a12 | |
williamlardier | f3bf6f2615 | |
williamlardier | bbe51b2e5e | |
williamlardier | 3cd06256d6 | |
Yutaka Oishi | 6e42216549 | |
williamlardier | e37712e94f | |
williamlardier | ac30d29509 | |
Xin LI | 1f235d569d | |
williamlardier | 320713a764 | |
williamlardier | 4594578919 | |
williamlardier | bc0cb0a8fe | |
williamlardier | 9e0cee849c | |
Artem Bakalov | fbf686feab | |
Guillaume Hivert | 4b795a245c | |
Guillaume Hivert | 983d59d565 | |
Guillaume Hivert | fd7f0a1a91 | |
bert-e | 459fd99316 | |
Guillaume Hivert | d6e4bca3ed | |
Guillaume Hivert | 235b2ac6d4 | |
bert-e | f49006a64e | |
bert-e | 8025ce08fe | |
Guillaume Hivert | 75811ba553 | |
Guillaume Hivert | 26de19b22b | |
Guillaume Hivert | 72bdd130f0 | |
Guillaume Hivert | 4131732b74 | |
Guillaume Hivert | 7cecbe27be | |
Guillaume Hivert | 3fab05071d | |
Guillaume Hivert | a98f2cede5 | |
Guillaume Hivert | 283a0863c2 | |
Guillaume Hivert | 18b089fc2d | |
Guillaume Hivert | 60139abb10 | |
Guillaume Hivert | 2cc1a9886f | |
Guillaume Hivert | 1c7122b7e4 | |
Guillaume Hivert | 4eba3ca6a0 | |
Guillaume Hivert | 670d57a9db | |
Guillaume Hivert | 8784113544 | |
bert-e | bffb00266f | |
bert-e | a6cd3a67e0 | |
dependabot[bot] | 18605a9546 | |
dependabot[bot] | 74d7fe5e68 | |
dependabot[bot] | e707cf4398 | |
bert-e | 47c34a4f5c | |
bert-e | 59f7e32037 | |
Jordi Bertran de Balanda | fb286c6403 | |
Jordi Bertran de Balanda | c9f279ac9b | |
williamlardier | 7f93695300 | |
bert-e | cecb5fc1b1 | |
bert-e | 75ba3733aa | |
dependabot[bot] | 7c6f5d34b8 | |
bert-e | 7e3190a600 | |
Jordi Bertran de Balanda | e9c4a5ce99 | |
Jordi Bertran de Balanda | 2622781a1d | |
Guillaume Hivert | f378a85799 | |
bert-e | 23ea19bcb3 | |
Guillaume Hivert | c6249cd2d5 | |
KillianG | d2c1400cb6 | |
Guillaume Hivert | 97019d3b44 | |
bert-e | 6da31dfd18 | |
Guillaume Hivert | 75b4e6328e | |
Guillaume Hivert | eb9f936e78 | |
Yutaka Oishi | ee1e65d778 | |
williamlardier | 3534927ccf | |
Jordi Bertran de Balanda | 0e3edb847e | |
Jordi Bertran de Balanda | d1930c08e8 | |
bert-e | a9f9fe99a5 | |
bert-e | 3dd0fbfc80 | |
Jordi Bertran de Balanda | a587f78242 | |
Guillaume Hivert | 2202ebac8a | |
Guillaume Hivert | 40e5100cd8 | |
Guillaume Hivert | 0851aa1406 | |
Guillaume Hivert | 5c16601657 | |
Guillaume Hivert | 3ff3330f1a | |
Guillaume Hivert | 5b02d20e4d | |
Guillaume Hivert | 867da9a3d0 | |
Guillaume Hivert | c9f6d35fa4 | |
Guillaume Hivert | c79a5c2ee3 | |
Guillaume Hivert | a400beb8b9 | |
Guillaume Hivert | 8ce0b07e63 | |
Guillaume Hivert | a0876d3df5 | |
Guillaume Hivert | e829fa3d3f | |
Guillaume Hivert | da25890556 | |
Guillaume Hivert | 8df0f5863a | |
Guillaume Hivert | 2d66248303 | |
Guillaume Hivert | 8221852eef | |
Guillaume Hivert | d50e1bfd6d | |
Guillaume Hivert | 5f453789d4 | |
Guillaume Hivert | 7658481128 | |
Guillaume Hivert | 593bb31ac3 | |
Guillaume Hivert | f5e89c9660 | |
Guillaume Hivert | 62db2267fc | |
Guillaume Hivert | f6544f7a2e | |
bert-e | 3ce4effafb | |
Kerkesni | 5ec6acc061 | |
bert-e | 6c7a1316ae | |
bert-e | b1897708e5 | |
bert-e | 019907e2ab | |
bert-e | 73729c7bdb | |
Guillaume Hivert | d6635097c7 | |
Kerkesni | 3f5e553d8a | |
bert-e | efea69ff70 | |
bert-e | 187ba67cc8 | |
Guillaume Hivert | 8a2b62815b | |
bert-e | 0dbbb80bea | |
bert-e | c808873996 | |
Guillaume Hivert | 2eecda3079 | |
bert-e | 011606e146 | |
Guillaume Hivert | a3378c3df5 | |
Guillaume Hivert | 8271b3ba21 | |
Guillaume Hivert | a1b980b95b | |
Guillaume Hivert | e063eeeced | |
Guillaume Hivert | a5051cffba | |
Guillaume Hivert | 24deac9f92 | |
Guillaume Hivert | 3621c7bc77 | |
Guillaume Hivert | 57c2d4fcd8 | |
bert-e | 4c47264a78 | |
bert-e | 835ffe79c6 | |
bert-e | f69087814e | |
Ronnie Smith | cd432fa920 | |
Ronnie Smith | 1ac27e8125 | |
Ronnie Smith | af0ab673d7 | |
Ronnie Smith | deb88ae03b | |
Ronnie Smith | 334edbc17b | |
Ronnie Smith | a2777d929e | |
bert-e | 271b28e59b | |
Guillaume Hivert | 03c7b6ea3e | |
Guillaume Hivert | 872034073e | |
Guillaume Hivert | 3d39b61a46 | |
Guillaume Hivert | c55c790a5d | |
Jordi Bertran de Balanda | ccbc1ed10c | |
bert-e | 7f641d2755 | |
bert-e | 348c80060e | |
bert-e | df91750c5a | |
bert-e | b81d24c3ef | |
bert-e | 1f2caf6a01 | |
Ronnie Smith | 1333195dcd | |
bert-e | f822c7bad9 | |
bert-e | b3ce76d7d8 | |
bert-e | c03c67d9fb | |
Ronnie Smith | 0f72b7c188 | |
Artem Bakalov | 18887d10b3 | |
Artem Bakalov | 07fd3451ab | |
Ronnie Smith | 223897bbff | |
Ronnie Smith | 473e241d5c | |
bert-e | e4d888c07b | |
bert-e | dece118ba9 | |
bert-e | ffe53ab72e | |
Will Toozs | a077cc199f | |
bert-e | b0cb6d9c0f | |
Nicolas Humbert | c13cff150f | |
Alexander Chan | e0da963226 | |
bert-e | 209f3bae44 | |
bert-e | e446f20223 | |
Guillaume Hivert | e311f0d83d | |
Guillaume Hivert | dab763884a | |
Guillaume Hivert | 4f22e526ee | |
Guillaume Hivert | dd0ca967c4 | |
Guillaume Hivert | 3951bb289c | |
Guillaume Hivert | 7b0bb25358 | |
Guillaume Hivert | b97de6505c | |
Guillaume Hivert | a5ad298c3b | |
Guillaume Hivert | 57ab049565 | |
bert-e | 6919af95f2 | |
bert-e | 6a5f0964ff | |
Guillaume Hivert | b94c13a115 | |
Guillaume Hivert | 666da6b1aa | |
Guillaume Hivert | 7192d4bc93 | |
Guillaume Hivert | 66043e5cd0 | |
Guillaume Hivert | bb2951be2c | |
Guillaume Hivert | 0d68de5ec4 | |
Guillaume Hivert | f4e43f2cc7 | |
Guillaume Hivert | b829b7662e | |
Will Toozs | e4be1d8d35 | |
bert-e | 1523f6baa6 | |
Guillaume Hivert | 941d3ba73d | |
bert-e | c517e4531a | |
Guillaume Hivert | 1fc6c2db86 | |
Guillaume Hivert | c5949b547d | |
Kerkesni | 7bcb81985a | |
bert-e | 68ac02ad54 | |
Guillaume Hivert | 0d479c82c5 | |
Guillaume Hivert | f958ed3204 | |
Guillaume Hivert | 3fdd6b8e80 | |
Guillaume Hivert | 4193511d1b | |
Guillaume Hivert | 3bf00b14b8 | |
Guillaume Hivert | 7d4c22594f | |
Guillaume Hivert | 6f588c00d7 | |
Guillaume Hivert | 441630d57e | |
bert-e | 7d80db5d7f | |
Guillaume Hivert | 3946a01871 | |
bert-e | 34ef6d0434 | |
bert-e | 0ce6a79961 | |
Kerkesni | 7477b881ed | |
Guillaume Hivert | 3874d16f42 | |
Guillaume Hivert | 282dc7afb3 | |
Guillaume Hivert | 617ec1f500 | |
Guillaume Hivert | 37157118af | |
Guillaume Hivert | 33bea4adb3 | |
Guillaume Hivert | a0b62a9948 | |
Guillaume Hivert | c7c2c7ffaa | |
Guillaume Hivert | 362b82326e | |
Guillaume Hivert | 38d462c833 | |
Guillaume Hivert | 7b73e34f9f | |
Guillaume Hivert | d88ad57032 | |
Guillaume Hivert | 800f79f125 | |
Guillaume Hivert | 522dfbc0db | |
Guillaume Hivert | 918ad4c7c2 | |
Guillaume Hivert | 2c8e611a15 | |
Guillaume Hivert | 0158fb0967 | |
Guillaume Hivert | fac5605a18 | |
bert-e | 72057b1efc | |
bert-e | 529840fa37 | |
Guillaume Hivert | 0a5f7c4ea9 | |
bert-e | 0e4ac99d9d | |
KillianG | 218d21b819 | |
bert-e | 9333323301 | |
bert-e | e5929b9f91 | |
bert-e | 8998544c06 | |
KillianG | df33583aea | |
KillianG | 050d649db5 | |
bert-e | de81f65306 | |
bert-e | 5eaf67ac93 | |
bert-e | 193a399ae2 | |
bert-e | 4de18e5b26 | |
Jordi Bertran de Balanda | c7e2743bf9 | |
Jordi Bertran de Balanda | a8029d8779 | |
bert-e | d639f4cffe | |
Guillaume Hivert | b2ec34c8f2 | |
KillianG | fb31f93829 | |
bert-e | 6c6ee31f34 | |
Kerkesni | 64351cf20d | |
Ronnie Smith | b58b4d0773 | |
Ronnie Smith | 9a0915d40e | |
Ronnie Smith | 36d3a67a68 | |
Xin LI | 3d156a58dd | |
Xin LI | 7737ec4904 | |
Kerkesni | d18f4d10bd | |
Kerkesni | e0bc4383cd | |
bert-e | de17f221bf | |
Kerkesni | d46301b498 | |
Kerkesni | 0bb2a44912 | |
Guillaume Hivert | 2c1fb773fd | |
Xin.LI | 3528c24276 | |
Xin LI | 6d8294d0c0 | |
Xin LI | 23bfc17a26 | |
bert-e | 0f6a1f2982 | |
Nicolas Humbert | bff13f1190 | |
bert-e | c857e743c8 | |
Kerkesni | 5f8edd35e9 | |
Kerkesni | 3c4359b696 | |
Kerkesni | 8ecf1d9808 | |
Kerkesni | 74e4934654 | |
Kerkesni | eac87fc9de | |
Kerkesni | e2be4d895d | |
bert-e | c0f7ebbaa9 | |
Kerkesni | 60fcedc251 | |
Kerkesni | 10ef395501 | |
Kerkesni | d1c8e67901 | |
Kerkesni | 266aabef37 | |
Kerkesni | b63c909808 | |
Kerkesni | 02ee339214 | |
Kerkesni | 5ca7f86350 | |
Kerkesni | 50a4fd8dc1 | |
bert-e | 5de0c2a7da | |
Kerkesni | b942516dca | |
Kerkesni | 54181af522 | |
Kerkesni | 21af204956 | |
Kerkesni | 68a27be345 | |
Kerkesni | 06350ffe15 | |
Taylor McKinnon | 5da4cd88ff | |
bert-e | 6bb68ee0e3 | |
Taylor McKinnon | 9a4bae40e6 | |
bert-e | 54e9635cab | |
Vianney Rancurel | b8f803338b | |
Guillaume Hivert | 4a1215adb5 | |
Guillaume Hivert | fc8d7532c6 | |
Guillaume Hivert | 1818bfe6c8 | |
Guillaume Hivert | 5cd929ea8a | |
Guillaume Hivert | 1138ce43af | |
Guillaume Hivert | 8b4e9cc0aa | |
Guillaume Hivert | ff6ea2a6d5 | |
Guillaume Hivert | 3b3600db92 | |
bert-e | 51c5247d01 | |
Vianney Rancurel | 7813a312b5 | |
Thomas Carmet | 35a4552c0f | |
Vianney Rancurel | 0dbdff3a00 | |
bert-e | 80b91d724d | |
bert-e | 40843d4bed | |
bert-e | b3fd77d08f | |
Taylor McKinnon | ed6bc63e75 | |
Rached Ben Mustapha | c95f84e887 | |
Nicolas Humbert | 3c9ab1bb99 | |
Nicolas Humbert | 3c30adaf85 | |
bert-e | 98edeae3f2 | |
bert-e | 4f15e4f267 | |
Xin LI | 68c5b42e6f | |
Xin LI | 6933bb8422 | |
Xin LI | 7e180fcad8 | |
Naren | 41d482cf7d | |
Nicolas Humbert | 1e334924f9 | |
Naren | 49239cc76e | |
williamlardier | 8d17fcac0f | |
williamlardier | 1c3fcc5a65 | |
Ronnie Smith | f5b0f1e082 | |
williamlardier | 708aab707d | |
williamlardier | 3a1cbdeedb | |
bert-e | faf5701248 | |
Ronnie Smith | 4cbb5a5dd6 | |
bert-e | 22eca9b61c | |
Naren | 59a679831b | |
bert-e | 26da124e27 | |
bert-e | 47b121c17b | |
Ronnie Smith | c605c1e1a2 | |
bert-e | 994bd0a6be | |
Ronnie Smith | 1e2a6c387e | |
Ronnie Smith | 1348fc820f | |
Ronnie Smith | 79a363786f | |
bert-e | 86e3c02126 | |
bert-e | 8f6731aa6a | |
Artem Bakalov | ea2f8ebd01 | |
Artem Bakalov | b640bbb45e | |
Taylor McKinnon | d9fcf275ce | |
Ronnie Smith | 66b03695c3 | |
Rahul Padigela | 3575e651e3 | |
Rahul Padigela | fa19a34306 | |
Xin LI | 3ab7ef4e8d | |
Xin LI | e531d3eae1 | |
Nicolas Humbert | 9ebcc9690e | |
Nicolas Humbert | 95759509cb | |
williamlardier | 6cdae52d57 | |
williamlardier | 995cb59db4 | |
Alexander Chan | 385e34b472 | |
Jonathan Gramain | f102c5ec8c | |
bert-e | e912617f02 | |
williamlardier | 3abde0bc74 | |
bert-e | cf49c7d8bf | |
Alexander Chan | e6e49a70c9 | |
Rached Ben Mustapha | 77f971957b | |
Ronnie Smith | ed1d6c12c2 | |
williamlardier | 27f17f9535 | |
williamlardier | 4658651593 | |
Jonathan Gramain | 7af6a73b3b | |
bert-e | 8728ff5c80 | |
Ronnie Smith | 7c16652e57 | |
bert-e | 5a9d667936 | |
Rahul Padigela | 29dd069a5f | |
Rahul Padigela | f1793bfe51 | |
Rahul Padigela | b42f1d3943 | |
Naren | c27b359fba | |
Alexandre Lavigne | bb8bdbc6ea | |
Nicolas Humbert | 413f0c9433 | |
Nicolas Humbert | ab3fa2f13d | |
Naren | bfbda5d38b | |
Naren | 2e6b1791bb | |
Naren | 1f8cfecf43 | |
Alexandre Lavigne | 6a250feea9 | |
Thomas Carmet | 0a33d4b74e | |
Thomas Carmet | 9a544b9890 | |
Ronnie Smith | a2b6846e2e | |
Ronnie Smith | 3fdfc7196b | |
Ronnie Smith | f602fb9601 | |
Thomas Carmet | c237a25448 | |
Thomas Carmet | 5aaec6a4e6 | |
Thomas Carmet | 11278e7334 | |
bert-e | c0fe2efbc2 | |
Jonathan Gramain | b0633d8a13 | |
bert-e | b27caf5814 | |
bert-e | f5f6cb5692 | |
bert-e | 87ba4a7b4a | |
bert-e | 9ff605f875 | |
Thomas Carmet | 4e160db87d | |
bert-e | dc698f4d5c | |
bert-e | 8c7907f753 | |
bert-e | 395a881d92 | |
bert-e | 3d6306d2a3 | |
bert-e | 681740fbe7 | |
Alexander Chan | d381ec14d8 | |
bert-e | 0bdcd866bc | |
Jonathan Gramain | 856a1634d4 | |
Jonathan Gramain | 2921864aac | |
bert-e | 4665f3da5c | |
Jonathan Gramain | 0df0d952d2 | |
bert-e | 54eb3ede5f | |
bert-e | be4dea481d | |
Rached Ben Mustapha | d15e2d5df6 | |
Taylor McKinnon | 93503cf505 | |
bert-e | 0f63de2f05 | |
bert-e | 16a5e6a550 | |
Rached Ben Mustapha | 864d2e8a28 | |
vrancurel | 15703aafca | |
bert-e | db000bc5e1 | |
vrancurel | 06c35c15a5 | |
bert-e | 68c8189f53 | |
bert-e | 041731e6eb | |
Nicolas Humbert | d51361ce06 | |
Nicolas Humbert | 453fd8b722 | |
bert-e | 2621aa7e53 | |
bert-e | b4aeab77b9 | |
bert-e | e1a3b05330 | |
bert-e | 0151504158 | |
bert-e | 048e8b02bc | |
bert-e | 1d899efec8 | |
Taylor McKinnon | 4cb8f715e9 | |
bert-e | 580dda4d48 | |
bert-e | a17054e3a4 | |
bert-e | a8df2b7b96 | |
Taylor McKinnon | d572fc953b | |
Alexander Chan | 2a78d4f413 | |
Alexander Chan | d2c7165214 | |
bert-e | 1999a586fd | |
bert-e | a1c0dd2472 | |
bert-e | a22032f9a5 | |
bert-e | dd38e32797 | |
bert-e | 274bf80720 | |
Ronnie Smith | 25bd1f6111 | |
Jonathan Gramain | 2d41b034aa | |
Rached Ben Mustapha | bb8ec629bf | |
Rached Ben Mustapha | 4bbaa83b87 | |
bert-e | 58697f7915 | |
Ronnie Smith | bf4a6fe01b | |
alexandre merle | c703ba66e7 | |
alexandre merle | 20c77f9f85 | |
alexandre merle | edb27cc9a8 | |
alexandre merle | 79e0dfa38f | |
alexandre merle | e1118803e6 | |
bert-e | 1230e72c49 | |
bert-e | 372df634c4 | |
bert-e | 2b96888eb7 | |
bert-e | a0909885f1 | |
alexandre merle | 5d100645aa | |
bert-e | 356edf8478 | |
bert-e | 1cfb869631 | |
bert-e | 0403ca65fc | |
Rahul Padigela | 269e005198 | |
bert-e | 10627f51d1 | |
bert-e | aa5f714081 | |
Jonathan Gramain | d27c0577ee | |
Jonathan Gramain | ff539645ea | |
Jonathan Gramain | e5c3bb188a | |
Jonathan Gramain | 2461b5c2f7 | |
Jonathan Gramain | 747307cac2 | |
Jonathan Gramain | 5942d9d70c | |
bert-e | 8ed84786fc | |
bert-e | 1e40e76bb2 | |
bert-e | f4058dd6ef | |
bert-e | 04f7692bad | |
bert-e | 32752ac504 | |
vrancurel | 549f187893 | |
bert-e | 93cd582e3a | |
vrancurel | 2582108f97 | |
bert-e | b25867f9c2 | |
bert-e | 7b60166d08 | |
bert-e | 8887a67261 | |
Ronnie Smith | 437ecc57f9 | |
bert-e | 759f0ef949 | |
bert-e | 0014aa3467 | |
Dora Korpar | 1727f4bd3f | |
Dora Korpar | d71c8eac86 | |
bert-e | 7eb6304956 | |
bert-e | ce98e9d104 | |
bert-e | 36d932bbce | |
bert-e | 7f2c40cf6d | |
bert-e | 6a78af0f39 | |
bert-e | f73dc3dd68 | |
Jonathan Gramain | 8ec0611d08 | |
Jonathan Gramain | 6baca6f1e2 | |
bert-e | 78d62636c3 | |
Dora Korpar | 9b8f813d02 | |
Dora Korpar | 0f70366774 | |
bert-e | fb8cf65091 | |
Jonathan Gramain | 7792f7c603 | |
bert-e | 668d90b7d0 | |
bert-e | c1cfc59a0e | |
bert-e | f956b02387 | |
Jonathan Gramain | 86bca2502e | |
bert-e | 3aa49eed1d | |
Jonathan Gramain | a9c3b2218f | |
Jonathan Gramain | f459498e18 | |
bert-e | 55323aa7a2 | |
bert-e | a20e875908 | |
bert-e | a3a83f5ec8 | |
bert-e | 51d3312de8 | |
Ilke | 6383d14d49 | |
Jonathan Gramain | 0e4035d45b | |
Jonathan Gramain | a18285ced8 | |
Rahul Padigela | dc4e1829fc | |
bert-e | 3b438e03cd | |
bert-e | f2787ec013 | |
bert-e | 560ccef3ec | |
Dora Korpar | 3f4ed31153 | |
Jonathan Gramain | fc23f68d0f | |
bert-e | 2a4da20c0a | |
bert-e | 14c4696482 | |
bert-e | 275226278f | |
bert-e | b4b5712df7 | |
bert-e | 750c021c37 | |
bert-e | ee4d94c0fb | |
bert-e | 98f1d219a9 | |
Dora Korpar | fb363030c0 | |
Dora Korpar | 7aeb32e223 | |
bert-e | 5bdee7eb8a | |
bert-e | b8fd646097 | |
bert-e | a9d6e05c6e | |
Ilke | dc412e8953 | |
bert-e | 36b68be051 | |
bert-e | 3f19a00b32 | |
bert-e | ea8166cf7a | |
bert-e | c06f735e82 | |
bert-e | b8c4ae4203 | |
Dora Korpar | 0cf9a9cdd5 | |
bert-e | d201e572fd | |
bert-e | 400dc24281 | |
bert-e | f59cea6b34 | |
bert-e | f19feb949d | |
Jonathan Gramain | bbef1964d7 | |
bert-e | 43cd5f59b0 | |
bert-e | dd7390ade6 | |
Dora Korpar | a3739cc836 | |
bert-e | 97682f56bf | |
bert-e | ce4ca533e2 | |
bert-e | 26bff09887 | |
Pepijn Van Eeckhoudt | f6165146ec | |
Ilke | 9f580444f3 | |
Ilke | 93fe6fa94d | |
Jonathan Gramain | d9ff2c2060 | |
bert-e | e553342616 | |
Ilke | 8a9dbc4de7 | |
Jonathan Gramain | 81d05b6ea8 | |
bert-e | 44b8de565f | |
vrancurel | 3ed66c50f6 | |
bert-e | 90e1cff9f9 | |
Jonathan Gramain | 9f323b32ea | |
bert-e | dee53c8ad8 | |
bert-e | 9680071e1a | |
bert-e | 6dd3aa92a4 | |
bert-e | a9618bc0bb | |
bert-e | b6042035c0 | |
bert-e | d2fafe8ef3 | |
bert-e | fb18cba367 | |
bert-e | bab9d5dc24 | |
Alexander Chan | e531e5e711 | |
bert-e | f54d356669 | |
Jonathan Gramain | c1bb2ac058 | |
Jonathan Gramain | d76eeeea89 | |
Alexander Chan | ad58f66981 | |
bert-e | 85b5599ce2 | |
Dora Korpar | 3121d29140 | |
Jonathan Gramain | a75db3122f | |
bert-e | d994e2ae60 | |
Rached Ben Mustapha | c443793968 | |
Rached Ben Mustapha | 517a034291 | |
Rached Ben Mustapha | cc6671f37c | |
Rached Ben Mustapha | 87bb3126a3 | |
bert-e | cedd08686a | |
bert-e | 635d2fe6d9 | |
Jianqin Wang | 9557e36438 | |
bert-e | 2bb0e171d8 | |
bert-e | 68f5d3c9f2 | |
vrancurel | 71caf08c19 | |
Guillaume Gimenez | 38403b84aa | |
Jianqin Wang | 21610dd88d | |
bbuchanan9 | 7566d1f0a9 | |
bbuchanan9 | 28415a5c9b | |
Taylor McKinnon | 506a9ad37d | |
bert-e | 1c6e56e8ef | |
bbuchanan9 | 9d02f86cf5 | |
bert-e | 5c4547a3a9 | |
bbuchanan9 | 5de85713ef | |
Rahul Padigela | 68defde532 | |
Dora Korpar | 9e5d4ae95b | |
Dora Korpar | 633ce2c069 | |
Dora Korpar | 08ddc07d1c | |
Katherine Laue | bc6c9c8c36 | |
bert-e | 3dc9b958f7 | |
vrancurel | 4b5c0ff923 | |
vrancurel | 62536f66df | |
bert-e | 9032b89e6f | |
vrancurel | 9014761c70 | |
bert-e | 8d9864264d | |
Rahul Padigela | 839182292c | |
Rahul Padigela | a197b2b6a4 | |
bert-e | adf6cfc8e4 | |
bert-e | 40aa7d836f | |
bert-e | 4fa15fce2a | |
bert-e | 279f08c870 | |
anurag4dsb | 05a8475f1c | |
anurag4dsb | 8c664d9076 | |
Jianqin Wang | 77172f33f8 | |
Guillaume Gimenez | 0a0fe7f1da | |
Salim | 6d7437a776 | |
bert-e | 1a6174dadf | |
vrancurel | c57cde88bb | |
Rahul Padigela | 6e97c01edd | |
Rahul Padigela | dd6fde61bb | |
Benoit A | 3e8c43e05b | |
Nicolas Humbert | 633efcbc50 | |
Alexander Chan | d99b430ac4 | |
philipyoo | 8f71d4ff03 | |
Rahul Padigela | d0f77cee75 | |
bert-e | 4419db7b23 | |
Rahul Padigela | 3672df0fc4 | |
Dora Korpar | 9b223bea87 | |
Guillaume Gimenez | b7dfc3a9c0 | |
Dora Korpar | 787f66458f | |
Dora Korpar | 618b179d5c | |
bert-e | e6ddad1193 | |
bert-e | 6575be0050 | |
Jianqin Wang | 1f7263c320 | |
Jianqin Wang | 9da1a8e1f7 | |
Jianqin Wang | 14f8690a9a | |
Jianqin Wang | 700cb4eb48 | |
philipyoo | 7dd4dca7e5 | |
bert-e | a5d248000e | |
Taylor McKinnon | dae12b245b | |
bert-e | c0129eb0d7 | |
philipyoo | bd0d6c1942 | |
Jonathan Gramain | ed2d393e98 | |
bert-e | 886110138a | |
Jonathan Gramain | 397eecb370 | |
bert-e | 3623b992da | |
Jonathan Gramain | 78b64bebed | |
Dora Korpar | e857bb5f5a | |
Benoit A | 9c1dab1055 | |
bert-e | e18850911e | |
Jonathan Gramain | 2ff9cf866d | |
bbuchanan9 | cc6ed165dd | |
Dora Korpar | a6b5c21e5d | |
bbuchanan9 | 64426b1450 | |
bert-e | 160fe96b18 | |
Taylor McKinnon | 59290513e3 | |
Rahul Padigela | 6b9be35d8e | |
bbuchanan9 | dffcbefe9b | |
bbuchanan9 | c470cfb5b1 | |
philipyoo | abcff1b04e | |
bbuchanan9 | 6791d1b561 | |
bert-e | a8e0a30918 | |
philipyoo | 487fe8bf35 | |
bert-e | b7c84ef7d3 | |
bert-e | b55295818f | |
philipyoo | 0213bcfd25 | |
bert-e | 32b0946679 | |
JianqinWang | bef886d8ad | |
philipyoo | d44c2f123e | |
bert-e | f199d52c54 | |
bert-e | b9c419dde7 | |
bert-e | 5cf3948ba2 | |
bert-e | 226088c8fb | |
Rahul Padigela | bca10414bc | |
bert-e | 8f0cab8d91 | |
Jonathan Gramain | 40c234bb5f | |
bert-e | 26e2b5e425 | |
bert-e | df5a61cb8d | |
bert-e | b01a390c46 | |
Guillaume Gimenez | 87103f83e1 | |
bert-e | 9ba5d64cd2 | |
bert-e | f4d4c9b76e | |
bert-e | 2c149ea9b1 | |
philipyoo | 735ad74bda | |
bert-e | 1636c87556 | |
bert-e | 8e2d6d42a8 | |
bert-e | f11d6e223d | |
philipyoo | ebe2d1f24d | |
bert-e | 6a1bc69336 | |
bert-e | 0144158a37 | |
bert-e | aea19c9cc2 | |
bert-e | daaeb5637a | |
Dora Korpar | c479933448 | |
JianqinWang | f804aa9657 | |
Jonathan Gramain | ad35b9ec78 | |
Jonathan Gramain | 9fe0ba5c8c | |
bert-e | 2fe1e4da3c | |
bert-e | 6a4784417f | |
bert-e | 0ed8c750c9 | |
bert-e | 0d33e5a69f | |
bert-e | ac470f4233 | |
bert-e | 23d406dc81 | |
JianqinWang | f11ccbfefa | |
bert-e | c8c0527f65 | |
JianqinWang | d81d309420 | |
Dora Korpar | c657b4b469 | |
Dora Korpar | 65c99ff86d | |
Jonathan Gramain | 645433ed0c | |
JianqinWang | f9bb82ce43 | |
bert-e | ab4500b842 | |
bert-e | 40a802b715 | |
Giacomo Guiulfo | 84bf7bd511 | |
Giacomo Guiulfo | b5fa54ec11 | |
Bennett Buchanan | 58e9f26ae0 | |
Giacomo Guiulfo | d6fdd153aa | |
Giacomo Guiulfo | 1e05f0f54e | |
Giacomo Guiulfo | 9c66b7ceba | |
bert-e | 0555d0b41a | |
Guillaume Gimenez | 39f2a53beb | |
Bennett Buchanan | 0a75792ca6 | |
bert-e | 5225fc231d | |
Guillaume Gimenez | 30c3ce1e2b | |
Taylor McKinnon | aa157c6d13 | |
Bennett Buchanan | 699890d2d7 | |
Jonathan Gramain | ea1a7d4d87 | |
bert-e | a9297e707a | |
Bennett Buchanan | 75dccc528d | |
bert-e | 5d7cf78eda | |
Giacomo Guiulfo | 0a364fe379 | |
Rahul Padigela | 345031f2bd | |
greenkeeper[bot] | 0bc1fe1a71 | |
greenkeeper[bot] | f23e457b83 | |
greenkeeper[bot] | 09aca2dcf4 | |
greenkeeper[bot] | d304334e92 | |
greenkeeper[bot] | 7955b97810 | |
Rahul Padigela | d14cef843b | |
Dora Korpar | f2b39fb3d7 | |
Dora Korpar | 9a009746be | |
Jeremy Desanlis | 3e08bad2da | |
philipyoo | 13b156b226 | |
JianqinWang | 07f655c2f8 | |
JianqinWang | f496cec8bf | |
bert-e | 7f5413699d | |
Jonathan Gramain | d620fef517 | |
Jonathan Gramain | 8ac3cf5548 | |
Giacomo Guiulfo | ebd9a74666 | |
bert-e | a1f9bef60e | |
philipyoo | 899107913c | |
Jonathan Gramain | 18dfc6b4fa | |
Rahul Padigela | 9fe16c64fa | |
vrancurel | 3dee6e2d0b | |
vrancurel | 3545eb4d62 | |
Dora Korpar | 0a85eeb8b7 | |
Dora Korpar | 83759870f2 | |
Alexander Chan | 0d4bf3c17f | |
Alexander Chan | 0117b39dcf | |
Bennett Buchanan | 549ca1f683 | |
bert-e | e4a66343fb | |
philipyoo | a89fdde6fd | |
philipyoo | 872a2d88e5 | |
philipyoo | 0c9c462634 | |
philipyoo | a3973ac7d3 | |
bert-e | d1a8693fe5 | |
Jeremy Desanlis | 5687a48599 | |
Nicolas Humbert | 9dca871e1b | |
philipyoo | 7088812c80 | |
philipyoo | 9f742d4921 | |
bert-e | 2c31728905 | |
Bennett Buchanan | 125ccbbfa9 | |
bert-e | 40c8b37b30 | |
bert-e | 879075e4ec | |
philipyoo | 79ed68ce9f | |
bert-e | cbfacb5ec0 | |
philipyoo | 06dfdd9612 | |
philipyoo | bf95506495 | |
Alexander Chan | db743f8269 | |
Alexander Chan | a2311bb69c | |
Alexander Chan | c8f323237f | |
Rahul Padigela | 5cf55fcb68 | |
Rahul Padigela | de94a0e62e | |
Rahul Padigela | 2b13994795 | |
Rahul Padigela | 769a461178 | |
Rahul Padigela | c11fc1d9d8 | |
bert-e | b8ad86a1f1 | |
Giacomo Guiulfo | 12c4df722b | |
bert-e | f566e32322 | |
philipyoo | 6413c92fbc | |
bert-e | 29182cce05 | |
Jonathan Gramain | 9fb5b8b10d | |
vrancurel | 5631a892c6 | |
Rahul Padigela | dfcdea46fc | |
Rahul Padigela | be02e59bfe | |
Rahul Padigela | fdbeed1c4e | |
bert-e | 91fbc3fd23 | |
philipyoo | 241338bcfa | |
Rached Ben Mustapha | 6db80e9411 | |
bert-e | d701352635 | |
Alexander Chan | b291ccc03f | |
Bennett Buchanan | 0426f44dee | |
Rahul Padigela | 1b9242788a | |
Bennett Buchanan | 1a2ea2f353 | |
Bennett Buchanan | c36280a6e8 | |
bert-e | c749725410 | |
Alexander Chan | 3d06ec6230 | |
Jonathan Gramain | 159ebb4283 | |
Alexander Chan | e17333b19e | |
philipyoo | b3b22292c4 | |
bert-e | 68d27ed5bf | |
bert-e | 1e79964253 | |
philipyoo | 5f76343c2e | |
Alexander Chan | d907c9942d | |
Alexander Chan | c63b0713c0 | |
Alexander Chan | 6a9a88800a | |
Dora Korpar | 5834f15397 | |
bert-e | b50f6c4678 | |
bert-e | edeab02107 | |
David Pineau | c64cccdf55 | |
vrancurel | af2b3a4bc3 | |
philipyoo | 1e9ad08830 | |
David Pineau | 9e66fda610 | |
Rahul Padigela | 888e154f0e | |
Nicolas Humbert | 8448f909e4 | |
bert-e | 2b16e84733 | |
philipyoo | a1a6f65364 | |
bert-e | 7cf0c97d8e | |
Taylor McKinnon | 10e7b976d5 | |
vrancurel | e80ea95ad8 | |
Jeremy Desanlis | 7075318dd2 | |
bert-e | 38f68fba1a | |
vrancurel | 16f9a6f5f6 | |
bert-e | c48e4b89bd | |
Bennett Buchanan | 2a8169e936 | |
Alexander Chan | 1af67fffc7 | |
Guillaume Gimenez | e9ac11b1fe | |
bert-e | 30dcd6ef86 | |
Alexander Chan | 2ce9db4e01 | |
philipyoo | 9e234e2b41 | |
philipyoo | 83a831f512 | |
Guillaume Gimenez | 32c2a6fe99 | |
Rahul Padigela | 063361377c | |
Rahul Padigela | ea7f28c82d | |
Rahul Padigela | a9e760b32e | |
Rahul Padigela | 3b16a307b8 | |
Rahul Padigela | f8dfa378a1 | |
Jonathan Gramain | e16eadb474 | |
Rahul Padigela | 5bf7fef53c | |
philipyoo | 659aee2fc2 | |
Rahul Padigela | bde52ab89b | |
Jonathan Gramain | 0ddb4da8a9 | |
Rached Ben Mustapha | 56e280236b | |
Rached Ben Mustapha | f904f04401 | |
Rahul Padigela | db45fee9e8 | |
JianqinWang | ecc431c715 | |
JianqinWang | 6f694ae7f4 | |
Rahul Padigela | e7862d3922 | |
Jonathan Gramain | de7ebf70d7 | |
Rahul Padigela | 1425f03c1e | |
Alexander Chan | ad527911a2 | |
Rahul Padigela | 6c528688ee | |
Nicolas Humbert | e53aa2efd2 | |
Rahul Padigela | 873bc9b647 | |
Nicolas Humbert | 160b960607 | |
Rahul Padigela | 843bd1fe13 | |
Alexander Chan | 93a2a79699 | |
Rahul Padigela | ef32d5e94d | |
Alexander Chan | 45d9c3d999 | |
Rahul Padigela | a2ce46d8d0 | |
anurag4DSB | 0c0bffa2c3 | |
ironman-machine | d966c0bda9 | |
Rahul Padigela | cb86a857cc | |
Alexander Chan | 55c9441bd7 | |
David Pineau | cae55a65c8 | |
philipyoo | 114cbf5571 | |
Alexander Chan | f2bab3b3d6 | |
philipyoo | 3276d235bb | |
philipyoo | ee2aed10f3 | |
Rahul Padigela | 19bee770ea | |
Rahul Padigela | e0c5d03436 | |
Rahul Padigela | c8a7148645 | |
Rahul Padigela | 8ca5dce4fe | |
Bennett Buchanan | 599fb5709b | |
Rahul Padigela | 1161d5f75d | |
Rahul Padigela | 26b6c5d1d9 | |
Bennett Buchanan | 8fd50cd20e | |
Rahul Padigela | 1f6b5bf2bd | |
Rached Ben Mustapha | a7813daea9 | |
Rahul Padigela | 5d4eb84425 | |
Alexander Chan | 9511fff479 | |
Rahul Padigela | d70f64a6d0 | |
Alexander Chan | ee66dc811c | |
Rahul Padigela | 2710471726 | |
Dora Korpar | 9aee9f6cf0 | |
Rahul Padigela | a168fab266 | |
Dora Korpar | 92da4c90e5 | |
Rahul Padigela | a95d5ea15d | |
Salim | aad05faa12 | |
Rahul Padigela | ab230ebfe7 | |
Salim | b3103e1307 | |
Salim | f3b0091210 | |
Rahul Padigela | f633b91072 | |
Alexander Chan | 87807462dc | |
Rahul Padigela | d7f114d504 | |
Rached Ben Mustapha | 5ef168e654 | |
Rahul Padigela | 82b4055c6c | |
Rached Ben Mustapha | 91ccccfe85 | |
Rached Ben Mustapha | 696999874b | |
Rached Ben Mustapha | d2bed3bf9a | |
Rahul Padigela | ad42baa5ff | |
Rached Ben Mustapha | 6ac92b2ad2 | |
Rahul Padigela | 13dbf48867 | |
Rached Ben Mustapha | e79ad68e96 | |
Rahul Padigela | a4a5fe0db0 | |
Bennett Buchanan | f838fcc31f | |
VR | eb9dd23b14 | |
JianqinWang | edbf7ab650 | |
Rahul Padigela | e068950903 | |
Rahul Padigela | 1ceb7b264c | |
vrancurel | 5a29aaa10c | |
Rahul Padigela | 7587f7ba25 | |
Rahul Padigela | 795b145594 | |
Jeremy Desanlis | 58f027a693 | |
Rahul Padigela | e09348d658 | |
Alexander Chan | bddb90c6a1 | |
Rahul Padigela | 94efaaccc2 | |
Rahul Padigela | 463a8ebe15 | |
philipyoo | f17ce17857 | |
Rahul Padigela | 3a5250e2e9 | |
ironman-machine | 48cb7b3b05 | |
Nicolas Humbert | 84c4c147a2 | |
Rahul Padigela | 958e818655 | |
philipyoo | 91dd219c47 | |
Alexander Chan | 5f3d478edb | |
Rahul Padigela | 04d56cfdff | |
Rahul Padigela | 73dd529c29 | |
philipyoo | a9aa40c168 | |
ironman-machine | 189194a4e7 | |
JianqinWang | a9a6b2433d | |
JianqinWang | fa19fc8859 | |
JianqinWang | a269619698 | |
Rahul Padigela | da1da43597 | |
Rahul Padigela | caac4e4e7e | |
Rahul Padigela | 67250133dc | |
JianqinWang | d3f3be03ae | |
ironman-machine | 1a9f1afd2c | |
JianqinWang | 9a5afdbc5c | |
JianqinWang | 83cf54512b | |
ironman-machine | 7e3ad64456 | |
Nicolas Humbert | eba0cb6116 | |
Lauren Spiegel | fd23e82ab9 | |
Lauren Spiegel | d7cf5e8ccf | |
flavien-scality | d0f4f95f0d | |
Alexandre Merle | 0e606b1061 | |
ironman-machine | 44ead88d83 | |
vrancurel | d8e1497940 | |
ThibaultRiviere | 4193394340 | |
Thibault Riviere | 0f1b0dad01 | |
ironman-machine | 393d6edc07 | |
vrancurel | 70638eaf7a | |
Lauren Spiegel | 9d0156dfdf | |
Lauren Spiegel | 8d8028b83f | |
Lauren Spiegel | b99fe2cd8d | |
Lauren Spiegel | cc26f288be |
|
@ -1 +1,6 @@
|
||||||
{ "extends": "scality" }
|
{
|
||||||
|
"extends": "scality",
|
||||||
|
"parserOptions": {
|
||||||
|
"ecmaVersion": 2020
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -0,0 +1,25 @@
|
||||||
|
---
|
||||||
|
name: codeQL
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [development/*, stabilization/*, hotfix/*]
|
||||||
|
pull_request:
|
||||||
|
branches: [development/*, stabilization/*, hotfix/*]
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
analyze:
|
||||||
|
name: Static analysis with CodeQL
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Initialize CodeQL
|
||||||
|
uses: github/codeql-action/init@v3
|
||||||
|
with:
|
||||||
|
languages: javascript, typescript
|
||||||
|
|
||||||
|
- name: Build and analyze
|
||||||
|
uses: github/codeql-action/analyze@v3
|
|
@ -0,0 +1,16 @@
|
||||||
|
---
|
||||||
|
name: dependency review
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
branches: [development/*, stabilization/*, hotfix/*]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
dependency-review:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: 'Checkout Repository'
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: 'Dependency Review'
|
||||||
|
uses: actions/dependency-review-action@v4
|
|
@ -25,24 +25,30 @@ jobs:
|
||||||
- 6379:6379
|
- 6379:6379
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v4
|
||||||
- uses: actions/setup-node@v2
|
- uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
node-version: '16'
|
node-version: '16'
|
||||||
cache: 'yarn'
|
cache: 'yarn'
|
||||||
- name: install dependencies
|
- name: install dependencies
|
||||||
run: yarn install --frozen-lockfile --prefer-offline
|
run: yarn install --frozen-lockfile --prefer-offline --network-concurrency 1
|
||||||
continue-on-error: true # TODO ARSN-97 Remove it when no errors in TS
|
continue-on-error: true # TODO ARSN-97 Remove it when no errors in TS
|
||||||
- name: lint yaml
|
- name: lint yaml
|
||||||
run: yarn --silent lint_yml
|
run: yarn --silent lint_yml
|
||||||
- name: lint javascript
|
- name: lint javascript
|
||||||
run: yarn --silent lint -- --max-warnings 0
|
run: yarn --silent lint --max-warnings 0
|
||||||
- name: lint markdown
|
- name: lint markdown
|
||||||
run: yarn --silent lint_md
|
run: yarn --silent lint_md
|
||||||
- name: run unit tests
|
- name: add hostname
|
||||||
run: yarn test
|
run: |
|
||||||
|
sudo sh -c "echo '127.0.0.1 testrequestbucket.localhost' >> /etc/hosts"
|
||||||
|
- name: test and coverage
|
||||||
|
run: yarn --silent coverage
|
||||||
- name: run functional tests
|
- name: run functional tests
|
||||||
run: yarn ft_test
|
run: yarn ft_test
|
||||||
|
- uses: codecov/codecov-action@v4
|
||||||
|
with:
|
||||||
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
- name: run executables tests
|
- name: run executables tests
|
||||||
run: yarn install && yarn test
|
run: yarn install && yarn test
|
||||||
working-directory: 'lib/executables/pensieveCreds/'
|
working-directory: 'lib/executables/pensieveCreds/'
|
||||||
|
@ -53,9 +59,9 @@ jobs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v4
|
||||||
- name: Install NodeJS
|
- name: Install NodeJS
|
||||||
uses: actions/setup-node@v2
|
uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
node-version: '16'
|
node-version: '16'
|
||||||
cache: yarn
|
cache: yarn
|
||||||
|
@ -66,7 +72,7 @@ jobs:
|
||||||
run: yarn build
|
run: yarn build
|
||||||
continue-on-error: true # TODO ARSN-97 Remove it when no errors in TS
|
continue-on-error: true # TODO ARSN-97 Remove it when no errors in TS
|
||||||
- name: Upload artifacts
|
- name: Upload artifacts
|
||||||
uses: scality/action-artifacts@v2
|
uses: scality/action-artifacts@v4
|
||||||
with:
|
with:
|
||||||
url: https://artifacts.scality.net
|
url: https://artifacts.scality.net
|
||||||
user: ${{ secrets.ARTIFACTS_USER }}
|
user: ${{ secrets.ARTIFACTS_USER }}
|
||||||
|
|
|
@ -0,0 +1,12 @@
|
||||||
|
{
|
||||||
|
"$schema": "https://swc.rs/schema.json",
|
||||||
|
"jsc": {
|
||||||
|
"parser": {
|
||||||
|
"syntax": "typescript"
|
||||||
|
},
|
||||||
|
"target": "es2017"
|
||||||
|
},
|
||||||
|
"module": {
|
||||||
|
"type": "commonjs"
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,5 +1,7 @@
|
||||||
# Arsenal
|
# Arsenal
|
||||||
|
|
||||||
|
[![codecov](https://codecov.io/gh/scality/Arsenal/branch/development/8.1/graph/badge.svg?token=X0esXhJSwb)](https://codecov.io/gh/scality/Arsenal)
|
||||||
|
|
||||||
Common utilities for the S3 project components
|
Common utilities for the S3 project components
|
||||||
|
|
||||||
Within this repository, you will be able to find the shared libraries for the
|
Within this repository, you will be able to find the shared libraries for the
|
||||||
|
|
|
@ -85,6 +85,66 @@ Used to store the bucket lifecycle configuration info
|
||||||
|
|
||||||
### Properties Added
|
### Properties Added
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
this._uid = uid || uuid();
|
||||||
|
```
|
||||||
|
|
||||||
|
### Usage
|
||||||
|
|
||||||
|
Used to set a unique identifier on a bucket
|
||||||
|
|
||||||
|
## Model version 8
|
||||||
|
|
||||||
|
### Properties Added
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
this._readLocationConstraint = readLocationConstraint || null;
|
||||||
|
```
|
||||||
|
|
||||||
|
### Usage
|
||||||
|
|
||||||
|
Used to store default read location of the bucket
|
||||||
|
|
||||||
|
## Model version 9
|
||||||
|
|
||||||
|
### Properties Added
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
this._isNFS = isNFS || null;
|
||||||
|
```
|
||||||
|
|
||||||
|
### Usage
|
||||||
|
|
||||||
|
Used to determine whether the bucket may be accessed through NFS
|
||||||
|
|
||||||
|
## Model version 10
|
||||||
|
|
||||||
|
### Properties Added
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
this._ingestion = ingestionConfig || null;
|
||||||
|
```
|
||||||
|
|
||||||
|
### Usage
|
||||||
|
|
||||||
|
Used to store the ingestion status of a bucket
|
||||||
|
|
||||||
|
## Model version 11
|
||||||
|
|
||||||
|
### Properties Added
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
this._azureInfo = azureInfo || null;
|
||||||
|
```
|
||||||
|
|
||||||
|
### Usage
|
||||||
|
|
||||||
|
Used to store Azure storage account specific information
|
||||||
|
|
||||||
|
## Model version 12
|
||||||
|
|
||||||
|
### Properties Added
|
||||||
|
|
||||||
```javascript
|
```javascript
|
||||||
this._objectLockEnabled = objectLockEnabled || false;
|
this._objectLockEnabled = objectLockEnabled || false;
|
||||||
this._objectLockConfiguration = objectLockConfiguration || null;
|
this._objectLockConfiguration = objectLockConfiguration || null;
|
||||||
|
@ -95,7 +155,7 @@ this._objectLockConfiguration = objectLockConfiguration || null;
|
||||||
Used to determine whether object lock capabilities are enabled on a bucket and
|
Used to determine whether object lock capabilities are enabled on a bucket and
|
||||||
to store the object lock configuration of the bucket
|
to store the object lock configuration of the bucket
|
||||||
|
|
||||||
## Model version 8
|
## Model version 13
|
||||||
|
|
||||||
### Properties Added
|
### Properties Added
|
||||||
|
|
||||||
|
@ -107,7 +167,7 @@ this._notificationConfiguration = notificationConfiguration || null;
|
||||||
|
|
||||||
Used to store the bucket notification configuration info
|
Used to store the bucket notification configuration info
|
||||||
|
|
||||||
## Model version 9
|
## Model version 14
|
||||||
|
|
||||||
### Properties Added
|
### Properties Added
|
||||||
|
|
||||||
|
@ -119,19 +179,7 @@ this._serverSideEncryption.configuredMasterKeyId = configuredMasterKeyId || unde
|
||||||
|
|
||||||
Used to store the users configured KMS key id
|
Used to store the users configured KMS key id
|
||||||
|
|
||||||
## Model version 10
|
## Model version 15
|
||||||
|
|
||||||
### Properties Added
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
this._uid = uid || uuid();
|
|
||||||
```
|
|
||||||
|
|
||||||
### Usage
|
|
||||||
|
|
||||||
Used to set a unique identifier on a bucket
|
|
||||||
|
|
||||||
## Model version 11
|
|
||||||
|
|
||||||
### Properties Added
|
### Properties Added
|
||||||
|
|
||||||
|
@ -139,6 +187,74 @@ Used to set a unique identifier on a bucket
|
||||||
this._tags = tags || null;
|
this._tags = tags || null;
|
||||||
```
|
```
|
||||||
|
|
||||||
|
The Tag Set of a bucket is an array of objects with Key and Value:
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
[
|
||||||
|
{
|
||||||
|
Key: 'something',
|
||||||
|
Value: 'some_data'
|
||||||
|
}
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
## Model version 16
|
||||||
|
|
||||||
|
### Properties Added
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
this._capabilities = capabilities || undefined;
|
||||||
|
```
|
||||||
|
|
||||||
|
For capacity-enabled buckets, contains the following data:
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
{
|
||||||
|
_capabilities: {
|
||||||
|
VeeamSOSApi?: {
|
||||||
|
SystemInfo?: {
|
||||||
|
ProtocolVersion: String,
|
||||||
|
ModelName: String,
|
||||||
|
ProtocolCapabilities: {
|
||||||
|
CapacityInfo: Boolean,
|
||||||
|
UploadSessions: Boolean,
|
||||||
|
IAMSTS: Boolean,
|
||||||
|
},
|
||||||
|
APIEndpoints: {
|
||||||
|
IAMEndpoint: String,
|
||||||
|
STSEndpoint: String,
|
||||||
|
},
|
||||||
|
SystemRecommendations?: {
|
||||||
|
S3ConcurrentTaskLimit: Number,
|
||||||
|
S3MultiObjectDelete: Number,
|
||||||
|
StorageCurrentTasksLimit: Number,
|
||||||
|
KbBlockSize: Number,
|
||||||
|
}
|
||||||
|
LastModified?: String,
|
||||||
|
},
|
||||||
|
CapacityInfo?: {
|
||||||
|
Capacity: Number,
|
||||||
|
Available: Number,
|
||||||
|
Used: Number,
|
||||||
|
LastModified?: String,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
### Usage
|
### Usage
|
||||||
|
|
||||||
Used to store bucket tagging
|
Used to store bucket tagging
|
||||||
|
|
||||||
|
## Model version 17
|
||||||
|
|
||||||
|
### Properties Added
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
this._quotaMax = quotaMax || 0;
|
||||||
|
```
|
||||||
|
|
||||||
|
### Usage
|
||||||
|
|
||||||
|
Used to store bucket quota
|
|
@ -0,0 +1,27 @@
|
||||||
|
# Delimiter
|
||||||
|
|
||||||
|
The Delimiter class handles raw listings from the database with an
|
||||||
|
optional delimiter, and fills in a curated listing with "Contents" and
|
||||||
|
"CommonPrefixes" as a result.
|
||||||
|
|
||||||
|
## Expected Behavior
|
||||||
|
|
||||||
|
- only lists keys belonging to the given **prefix** (if provided)
|
||||||
|
|
||||||
|
- groups listed keys that have a common prefix ending with a delimiter
|
||||||
|
inside CommonPrefixes
|
||||||
|
|
||||||
|
- can take a **marker** or **continuationToken** to list from a specific key
|
||||||
|
|
||||||
|
- can take a **maxKeys** parameter to limit how many keys can be returned
|
||||||
|
|
||||||
|
## State Chart
|
||||||
|
|
||||||
|
- States with grey background are *Idle* states, which are waiting for
|
||||||
|
a new listing key
|
||||||
|
|
||||||
|
- States with blue background are *Processing* states, which are
|
||||||
|
actively processing a new listing key passed by the filter()
|
||||||
|
function
|
||||||
|
|
||||||
|
![Delimiter State Chart](./pics/delimiterStateChart.svg)
|
|
@ -0,0 +1,45 @@
|
||||||
|
# DelimiterMaster
|
||||||
|
|
||||||
|
The DelimiterMaster class handles raw listings from the database of a
|
||||||
|
versioned or non-versioned bucket with an optional delimiter, and
|
||||||
|
fills in a curated listing with "Contents" and "CommonPrefixes" as a
|
||||||
|
result.
|
||||||
|
|
||||||
|
## Expected Behavior
|
||||||
|
|
||||||
|
- only lists latest versions of versioned buckets
|
||||||
|
|
||||||
|
- only lists keys belonging to the given **prefix** (if provided)
|
||||||
|
|
||||||
|
- does not list latest versions that are delete markers
|
||||||
|
|
||||||
|
- groups listed keys that have a common prefix ending with a delimiter
|
||||||
|
inside CommonPrefixes
|
||||||
|
|
||||||
|
- can take a **marker** or **continuationToken** to list from a specific key
|
||||||
|
|
||||||
|
- can take a **maxKeys** parameter to limit how many keys can be returned
|
||||||
|
|
||||||
|
- reconciles internal PHD keys with the next version (those are
|
||||||
|
created when a specific version that is the latest version is
|
||||||
|
deleted)
|
||||||
|
|
||||||
|
- skips internal keys like replay keys
|
||||||
|
|
||||||
|
## State Chart
|
||||||
|
|
||||||
|
- States with grey background are *Idle* states, which are waiting for
|
||||||
|
a new listing key
|
||||||
|
|
||||||
|
- States with blue background are *Processing* states, which are
|
||||||
|
actively processing a new listing key passed by the filter()
|
||||||
|
function
|
||||||
|
|
||||||
|
### Bucket Vformat=v0
|
||||||
|
|
||||||
|
![DelimiterMaster State Chart for v0 format](./pics/delimiterMasterV0StateChart.svg)
|
||||||
|
|
||||||
|
### Bucket Vformat=v1
|
||||||
|
|
||||||
|
For buckets in versioning key format **v1**, the algorithm used is the
|
||||||
|
one from [Delimiter](delimiter.md).
|
|
@ -0,0 +1,33 @@
|
||||||
|
# DelimiterVersions
|
||||||
|
|
||||||
|
The DelimiterVersions class handles raw listings from the database of a
|
||||||
|
versioned or non-versioned bucket with an optional delimiter, and
|
||||||
|
fills in a curated listing with "Versions" and "CommonPrefixes" as a
|
||||||
|
result.
|
||||||
|
|
||||||
|
## Expected Behavior
|
||||||
|
|
||||||
|
- lists individual distinct versions of versioned buckets
|
||||||
|
|
||||||
|
- only lists keys belonging to the given **prefix** (if provided)
|
||||||
|
|
||||||
|
- groups listed keys that have a common prefix ending with a delimiter
|
||||||
|
inside CommonPrefixes
|
||||||
|
|
||||||
|
- can take a **keyMarker** and optionally a **versionIdMarker** to
|
||||||
|
list from a specific key or version
|
||||||
|
|
||||||
|
- can take a **maxKeys** parameter to limit how many keys can be returned
|
||||||
|
|
||||||
|
- skips internal keys like replay keys
|
||||||
|
|
||||||
|
## State Chart
|
||||||
|
|
||||||
|
- States with grey background are *Idle* states, which are waiting for
|
||||||
|
a new listing key
|
||||||
|
|
||||||
|
- States with blue background are *Processing* states, which are
|
||||||
|
actively processing a new listing key passed by the filter()
|
||||||
|
function
|
||||||
|
|
||||||
|
![DelimiterVersions State Chart](./pics/delimiterVersionsStateChart.svg)
|
|
@ -0,0 +1,45 @@
|
||||||
|
digraph {
|
||||||
|
node [shape="box",style="filled,rounded",fontsize=16,fixedsize=true,width=3];
|
||||||
|
edge [fontsize=14];
|
||||||
|
rankdir=TB;
|
||||||
|
|
||||||
|
START [shape="circle",width=0.2,label="",style="filled",fillcolor="black"]
|
||||||
|
END [shape="circle",width=0.2,label="",style="filled",fillcolor="black",peripheries=2]
|
||||||
|
|
||||||
|
node [fillcolor="lightgrey"];
|
||||||
|
"NotSkippingPrefixNorVersions.Idle" [label="NotSkippingPrefixNorVersions",group="NotSkippingPrefixNorVersions",width=4];
|
||||||
|
"SkippingPrefix.Idle" [label="SkippingPrefix",group="SkippingPrefix"];
|
||||||
|
"SkippingVersions.Idle" [label="SkippingVersions",group="SkippingVersions"];
|
||||||
|
"WaitVersionAfterPHD.Idle" [label="WaitVersionAfterPHD",group="WaitVersionAfterPHD"];
|
||||||
|
|
||||||
|
node [fillcolor="lightblue"];
|
||||||
|
"NotSkippingPrefixNorVersions.Processing" [label="NotSkippingPrefixNorVersions",group="NotSkippingPrefixNorVersions",width=4];
|
||||||
|
"SkippingPrefix.Processing" [label="SkippingPrefix",group="SkippingPrefix"];
|
||||||
|
"SkippingVersions.Processing" [label="SkippingVersions",group="SkippingVersions"];
|
||||||
|
"WaitVersionAfterPHD.Processing" [label="WaitVersionAfterPHD",group="WaitVersionAfterPHD"];
|
||||||
|
|
||||||
|
START -> "SkippingVersions.Idle" [label="[marker != undefined]"]
|
||||||
|
START -> "NotSkippingPrefixNorVersions.Idle" [label="[marker == undefined]"]
|
||||||
|
|
||||||
|
"NotSkippingPrefixNorVersions.Idle" -> "NotSkippingPrefixNorVersions.Processing" [label="filter(key, value)"]
|
||||||
|
"SkippingPrefix.Idle" -> "SkippingPrefix.Processing" [label="filter(key, value)"]
|
||||||
|
"SkippingVersions.Idle" -> "SkippingVersions.Processing" [label="filter(key, value)"]
|
||||||
|
"WaitVersionAfterPHD.Idle" -> "WaitVersionAfterPHD.Processing" [label="filter(key, value)"]
|
||||||
|
|
||||||
|
|
||||||
|
"NotSkippingPrefixNorVersions.Processing" -> "SkippingVersions.Idle" [label="[Version.isDeleteMarker(value)]\n-> FILTER_ACCEPT"]
|
||||||
|
"NotSkippingPrefixNorVersions.Processing" -> "WaitVersionAfterPHD.Idle" [label="[Version.isPHD(value)]\n-> FILTER_ACCEPT"]
|
||||||
|
"NotSkippingPrefixNorVersions.Processing" -> "SkippingPrefix.Idle" [label="[key.startsWith(<ReplayPrefix>)]\n/ prefix <- <ReplayPrefix>\n-> FILTER_SKIP"]
|
||||||
|
"NotSkippingPrefixNorVersions.Processing" -> END [label="[isListableKey(key, value) and\nKeys == maxKeys]\n-> FILTER_END"]
|
||||||
|
"NotSkippingPrefixNorVersions.Processing" -> "SkippingPrefix.Idle" [label="[isListableKey(key, value) and\nnKeys < maxKeys and\nhasDelimiter(key)]\n/ prefix <- prefixOf(key)\n/ CommonPrefixes.append(prefixOf(key))\n-> FILTER_ACCEPT"]
|
||||||
|
"NotSkippingPrefixNorVersions.Processing" -> "SkippingVersions.Idle" [label="[isListableKey(key, value) and\nnKeys < maxKeys and\nnot hasDelimiter(key)]\n/ Contents.append(key, value)\n-> FILTER_ACCEPT"]
|
||||||
|
|
||||||
|
"SkippingPrefix.Processing" -> "SkippingPrefix.Idle" [label="[key.startsWith(prefix)]\n-> FILTER_SKIP"]
|
||||||
|
"SkippingPrefix.Processing" -> "NotSkippingPrefixNorVersions.Processing" [label="[not key.startsWith(prefix)]"]
|
||||||
|
|
||||||
|
"SkippingVersions.Processing" -> "SkippingVersions.Idle" [label="[isVersionKey(key)]\n-> FILTER_SKIP"]
|
||||||
|
"SkippingVersions.Processing" -> "NotSkippingPrefixNorVersions.Processing" [label="[not isVersionKey(key)]"]
|
||||||
|
|
||||||
|
"WaitVersionAfterPHD.Processing" -> "NotSkippingPrefixNorVersions.Processing" [label="[isVersionKey(key) and master(key) == PHDkey]\n/ key <- master(key)"]
|
||||||
|
"WaitVersionAfterPHD.Processing" -> "NotSkippingPrefixNorVersions.Processing" [label="[not isVersionKey(key) or master(key) != PHDkey]"]
|
||||||
|
}
|
|
@ -0,0 +1,216 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||||
|
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
|
||||||
|
"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||||
|
<!-- Generated by graphviz version 2.43.0 (0)
|
||||||
|
-->
|
||||||
|
<!-- Title: %3 Pages: 1 -->
|
||||||
|
<svg width="2313pt" height="460pt"
|
||||||
|
viewBox="0.00 0.00 2313.37 460.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
|
||||||
|
<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 456)">
|
||||||
|
<title>%3</title>
|
||||||
|
<polygon fill="white" stroke="transparent" points="-4,4 -4,-456 2309.37,-456 2309.37,4 -4,4"/>
|
||||||
|
<!-- START -->
|
||||||
|
<g id="node1" class="node">
|
||||||
|
<title>START</title>
|
||||||
|
<ellipse fill="black" stroke="black" cx="35.37" cy="-445" rx="7" ry="7"/>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingPrefixNorVersions.Idle -->
|
||||||
|
<g id="node3" class="node">
|
||||||
|
<title>NotSkippingPrefixNorVersions.Idle</title>
|
||||||
|
<path fill="lightgrey" stroke="black" d="M925.37,-387C925.37,-387 661.37,-387 661.37,-387 655.37,-387 649.37,-381 649.37,-375 649.37,-375 649.37,-363 649.37,-363 649.37,-357 655.37,-351 661.37,-351 661.37,-351 925.37,-351 925.37,-351 931.37,-351 937.37,-357 937.37,-363 937.37,-363 937.37,-375 937.37,-375 937.37,-381 931.37,-387 925.37,-387"/>
|
||||||
|
<text text-anchor="middle" x="793.37" y="-365.2" font-family="Times,serif" font-size="16.00">NotSkippingPrefixNorVersions</text>
|
||||||
|
</g>
|
||||||
|
<!-- START->NotSkippingPrefixNorVersions.Idle -->
|
||||||
|
<g id="edge2" class="edge">
|
||||||
|
<title>START->NotSkippingPrefixNorVersions.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M42.39,-443.31C95.3,-438.15 434.98,-404.99 638.94,-385.08"/>
|
||||||
|
<polygon fill="black" stroke="black" points="639.54,-388.53 649.15,-384.08 638.86,-381.57 639.54,-388.53"/>
|
||||||
|
<text text-anchor="middle" x="497.87" y="-408.8" font-family="Times,serif" font-size="14.00">[marker == undefined]</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingVersions.Idle -->
|
||||||
|
<g id="node5" class="node">
|
||||||
|
<title>SkippingVersions.Idle</title>
|
||||||
|
<path fill="lightgrey" stroke="black" d="M242.37,-138C242.37,-138 50.37,-138 50.37,-138 44.37,-138 38.37,-132 38.37,-126 38.37,-126 38.37,-114 38.37,-114 38.37,-108 44.37,-102 50.37,-102 50.37,-102 242.37,-102 242.37,-102 248.37,-102 254.37,-108 254.37,-114 254.37,-114 254.37,-126 254.37,-126 254.37,-132 248.37,-138 242.37,-138"/>
|
||||||
|
<text text-anchor="middle" x="146.37" y="-116.2" font-family="Times,serif" font-size="16.00">SkippingVersions</text>
|
||||||
|
</g>
|
||||||
|
<!-- START->SkippingVersions.Idle -->
|
||||||
|
<g id="edge1" class="edge">
|
||||||
|
<title>START->SkippingVersions.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M33.04,-438.14C20.64,-405.9 -34.57,-248.17 33.37,-156 36.76,-151.4 40.74,-147.39 45.16,-143.89"/>
|
||||||
|
<polygon fill="black" stroke="black" points="47.27,-146.68 53.53,-138.13 43.3,-140.92 47.27,-146.68"/>
|
||||||
|
<text text-anchor="middle" x="85.87" y="-321.8" font-family="Times,serif" font-size="14.00">[marker != undefined]</text>
|
||||||
|
</g>
|
||||||
|
<!-- END -->
|
||||||
|
<g id="node2" class="node">
|
||||||
|
<title>END</title>
|
||||||
|
<ellipse fill="black" stroke="black" cx="727.37" cy="-120" rx="7" ry="7"/>
|
||||||
|
<ellipse fill="none" stroke="black" cx="727.37" cy="-120" rx="11" ry="11"/>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingPrefixNorVersions.Processing -->
|
||||||
|
<g id="node7" class="node">
|
||||||
|
<title>NotSkippingPrefixNorVersions.Processing</title>
|
||||||
|
<path fill="lightblue" stroke="black" d="M925.37,-300C925.37,-300 661.37,-300 661.37,-300 655.37,-300 649.37,-294 649.37,-288 649.37,-288 649.37,-276 649.37,-276 649.37,-270 655.37,-264 661.37,-264 661.37,-264 925.37,-264 925.37,-264 931.37,-264 937.37,-270 937.37,-276 937.37,-276 937.37,-288 937.37,-288 937.37,-294 931.37,-300 925.37,-300"/>
|
||||||
|
<text text-anchor="middle" x="793.37" y="-278.2" font-family="Times,serif" font-size="16.00">NotSkippingPrefixNorVersions</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingPrefixNorVersions.Idle->NotSkippingPrefixNorVersions.Processing -->
|
||||||
|
<g id="edge3" class="edge">
|
||||||
|
<title>NotSkippingPrefixNorVersions.Idle->NotSkippingPrefixNorVersions.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M793.37,-350.8C793.37,-339.16 793.37,-323.55 793.37,-310.24"/>
|
||||||
|
<polygon fill="black" stroke="black" points="796.87,-310.18 793.37,-300.18 789.87,-310.18 796.87,-310.18"/>
|
||||||
|
<text text-anchor="middle" x="851.37" y="-321.8" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingPrefix.Idle -->
|
||||||
|
<g id="node4" class="node">
|
||||||
|
<title>SkippingPrefix.Idle</title>
|
||||||
|
<path fill="lightgrey" stroke="black" d="M1209.37,-138C1209.37,-138 1017.37,-138 1017.37,-138 1011.37,-138 1005.37,-132 1005.37,-126 1005.37,-126 1005.37,-114 1005.37,-114 1005.37,-108 1011.37,-102 1017.37,-102 1017.37,-102 1209.37,-102 1209.37,-102 1215.37,-102 1221.37,-108 1221.37,-114 1221.37,-114 1221.37,-126 1221.37,-126 1221.37,-132 1215.37,-138 1209.37,-138"/>
|
||||||
|
<text text-anchor="middle" x="1113.37" y="-116.2" font-family="Times,serif" font-size="16.00">SkippingPrefix</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingPrefix.Processing -->
|
||||||
|
<g id="node8" class="node">
|
||||||
|
<title>SkippingPrefix.Processing</title>
|
||||||
|
<path fill="lightblue" stroke="black" d="M1070.37,-36C1070.37,-36 878.37,-36 878.37,-36 872.37,-36 866.37,-30 866.37,-24 866.37,-24 866.37,-12 866.37,-12 866.37,-6 872.37,0 878.37,0 878.37,0 1070.37,0 1070.37,0 1076.37,0 1082.37,-6 1082.37,-12 1082.37,-12 1082.37,-24 1082.37,-24 1082.37,-30 1076.37,-36 1070.37,-36"/>
|
||||||
|
<text text-anchor="middle" x="974.37" y="-14.2" font-family="Times,serif" font-size="16.00">SkippingPrefix</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingPrefix.Idle->SkippingPrefix.Processing -->
|
||||||
|
<g id="edge4" class="edge">
|
||||||
|
<title>SkippingPrefix.Idle->SkippingPrefix.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M1011.89,-101.96C994.96,-97.13 981.04,-91.17 975.37,-84 967.11,-73.56 966.25,-58.93 967.72,-46.2"/>
|
||||||
|
<polygon fill="black" stroke="black" points="971.22,-46.52 969.4,-36.09 964.31,-45.38 971.22,-46.52"/>
|
||||||
|
<text text-anchor="middle" x="1033.37" y="-65.3" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingVersions.Processing -->
|
||||||
|
<g id="node9" class="node">
|
||||||
|
<title>SkippingVersions.Processing</title>
|
||||||
|
<path fill="lightblue" stroke="black" d="M381.37,-36C381.37,-36 189.37,-36 189.37,-36 183.37,-36 177.37,-30 177.37,-24 177.37,-24 177.37,-12 177.37,-12 177.37,-6 183.37,0 189.37,0 189.37,0 381.37,0 381.37,0 387.37,0 393.37,-6 393.37,-12 393.37,-12 393.37,-24 393.37,-24 393.37,-30 387.37,-36 381.37,-36"/>
|
||||||
|
<text text-anchor="middle" x="285.37" y="-14.2" font-family="Times,serif" font-size="16.00">SkippingVersions</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingVersions.Idle->SkippingVersions.Processing -->
|
||||||
|
<g id="edge5" class="edge">
|
||||||
|
<title>SkippingVersions.Idle->SkippingVersions.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M141.4,-101.91C138.35,-87.58 136.8,-67.37 147.37,-54 151.89,-48.28 161.64,-43.34 173.99,-39.12"/>
|
||||||
|
<polygon fill="black" stroke="black" points="175.39,-42.36 183.89,-36.04 173.3,-35.67 175.39,-42.36"/>
|
||||||
|
<text text-anchor="middle" x="205.37" y="-65.3" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
|
||||||
|
</g>
|
||||||
|
<!-- WaitVersionAfterPHD.Idle -->
|
||||||
|
<g id="node6" class="node">
|
||||||
|
<title>WaitVersionAfterPHD.Idle</title>
|
||||||
|
<path fill="lightgrey" stroke="black" d="M1534.37,-138C1534.37,-138 1342.37,-138 1342.37,-138 1336.37,-138 1330.37,-132 1330.37,-126 1330.37,-126 1330.37,-114 1330.37,-114 1330.37,-108 1336.37,-102 1342.37,-102 1342.37,-102 1534.37,-102 1534.37,-102 1540.37,-102 1546.37,-108 1546.37,-114 1546.37,-114 1546.37,-126 1546.37,-126 1546.37,-132 1540.37,-138 1534.37,-138"/>
|
||||||
|
<text text-anchor="middle" x="1438.37" y="-116.2" font-family="Times,serif" font-size="16.00">WaitVersionAfterPHD</text>
|
||||||
|
</g>
|
||||||
|
<!-- WaitVersionAfterPHD.Processing -->
|
||||||
|
<g id="node10" class="node">
|
||||||
|
<title>WaitVersionAfterPHD.Processing</title>
|
||||||
|
<path fill="lightblue" stroke="black" d="M1534.37,-36C1534.37,-36 1342.37,-36 1342.37,-36 1336.37,-36 1330.37,-30 1330.37,-24 1330.37,-24 1330.37,-12 1330.37,-12 1330.37,-6 1336.37,0 1342.37,0 1342.37,0 1534.37,0 1534.37,0 1540.37,0 1546.37,-6 1546.37,-12 1546.37,-12 1546.37,-24 1546.37,-24 1546.37,-30 1540.37,-36 1534.37,-36"/>
|
||||||
|
<text text-anchor="middle" x="1438.37" y="-14.2" font-family="Times,serif" font-size="16.00">WaitVersionAfterPHD</text>
|
||||||
|
</g>
|
||||||
|
<!-- WaitVersionAfterPHD.Idle->WaitVersionAfterPHD.Processing -->
|
||||||
|
<g id="edge6" class="edge">
|
||||||
|
<title>WaitVersionAfterPHD.Idle->WaitVersionAfterPHD.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M1438.37,-101.58C1438.37,-86.38 1438.37,-64.07 1438.37,-46.46"/>
|
||||||
|
<polygon fill="black" stroke="black" points="1441.87,-46.22 1438.37,-36.22 1434.87,-46.22 1441.87,-46.22"/>
|
||||||
|
<text text-anchor="middle" x="1496.37" y="-65.3" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingPrefixNorVersions.Processing->END -->
|
||||||
|
<g id="edge10" class="edge">
|
||||||
|
<title>NotSkippingPrefixNorVersions.Processing->END</title>
|
||||||
|
<path fill="none" stroke="black" d="M649.15,-273.62C611.7,-268.54 578.44,-260.07 566.37,-246 540.33,-215.64 540,-186.08 566.37,-156 586.46,-133.07 673.88,-148.86 702.37,-138 705.22,-136.91 708.06,-135.44 710.76,-133.82"/>
|
||||||
|
<polygon fill="black" stroke="black" points="712.88,-136.61 719.13,-128.05 708.91,-130.84 712.88,-136.61"/>
|
||||||
|
<text text-anchor="middle" x="672.87" y="-212.3" font-family="Times,serif" font-size="14.00">[isListableKey(key, value) and</text>
|
||||||
|
<text text-anchor="middle" x="672.87" y="-197.3" font-family="Times,serif" font-size="14.00">Keys == maxKeys]</text>
|
||||||
|
<text text-anchor="middle" x="672.87" y="-182.3" font-family="Times,serif" font-size="14.00">-> FILTER_END</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingPrefixNorVersions.Processing->SkippingPrefix.Idle -->
|
||||||
|
<g id="edge9" class="edge">
|
||||||
|
<title>NotSkippingPrefixNorVersions.Processing->SkippingPrefix.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M937.6,-274.31C1018.89,-269.01 1106.69,-260.11 1119.37,-246 1143.16,-219.51 1134.03,-175.72 1124.38,-147.62"/>
|
||||||
|
<polygon fill="black" stroke="black" points="1127.6,-146.22 1120.86,-138.04 1121.03,-148.64 1127.6,-146.22"/>
|
||||||
|
<text text-anchor="middle" x="1254.37" y="-212.3" font-family="Times,serif" font-size="14.00">[key.startsWith(<ReplayPrefix>)]</text>
|
||||||
|
<text text-anchor="middle" x="1254.37" y="-197.3" font-family="Times,serif" font-size="14.00">/ prefix <- <ReplayPrefix></text>
|
||||||
|
<text text-anchor="middle" x="1254.37" y="-182.3" font-family="Times,serif" font-size="14.00">-> FILTER_SKIP</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingPrefixNorVersions.Processing->SkippingPrefix.Idle -->
|
||||||
|
<g id="edge11" class="edge">
|
||||||
|
<title>NotSkippingPrefixNorVersions.Processing->SkippingPrefix.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M799.18,-263.65C800.96,-258.05 802.85,-251.79 804.37,-246 814.73,-206.45 793.03,-183.41 823.37,-156 851.23,-130.83 954.1,-142.59 991.37,-138 992.65,-137.84 993.94,-137.68 995.24,-137.52"/>
|
||||||
|
<polygon fill="black" stroke="black" points="995.81,-140.98 1005.29,-136.25 994.93,-134.03 995.81,-140.98"/>
|
||||||
|
<text text-anchor="middle" x="969.37" y="-234.8" font-family="Times,serif" font-size="14.00">[isListableKey(key, value) and</text>
|
||||||
|
<text text-anchor="middle" x="969.37" y="-219.8" font-family="Times,serif" font-size="14.00">nKeys < maxKeys and</text>
|
||||||
|
<text text-anchor="middle" x="969.37" y="-204.8" font-family="Times,serif" font-size="14.00">hasDelimiter(key)]</text>
|
||||||
|
<text text-anchor="middle" x="969.37" y="-189.8" font-family="Times,serif" font-size="14.00">/ prefix <- prefixOf(key)</text>
|
||||||
|
<text text-anchor="middle" x="969.37" y="-174.8" font-family="Times,serif" font-size="14.00">/ CommonPrefixes.append(prefixOf(key))</text>
|
||||||
|
<text text-anchor="middle" x="969.37" y="-159.8" font-family="Times,serif" font-size="14.00">-> FILTER_ACCEPT</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingPrefixNorVersions.Processing->SkippingVersions.Idle -->
|
||||||
|
<g id="edge7" class="edge">
|
||||||
|
<title>NotSkippingPrefixNorVersions.Processing->SkippingVersions.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M649.11,-279.23C439.56,-275.94 73.58,-267.19 53.37,-246 25.76,-217.06 30.6,-188.89 53.37,-156 56.56,-151.39 60.44,-147.39 64.78,-143.91"/>
|
||||||
|
<polygon fill="black" stroke="black" points="66.8,-146.76 73.04,-138.2 62.83,-141 66.8,-146.76"/>
|
||||||
|
<text text-anchor="middle" x="167.87" y="-204.8" font-family="Times,serif" font-size="14.00">[Version.isDeleteMarker(value)]</text>
|
||||||
|
<text text-anchor="middle" x="167.87" y="-189.8" font-family="Times,serif" font-size="14.00">-> FILTER_ACCEPT</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingPrefixNorVersions.Processing->SkippingVersions.Idle -->
|
||||||
|
<g id="edge12" class="edge">
|
||||||
|
<title>NotSkippingPrefixNorVersions.Processing->SkippingVersions.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M649.33,-279.1C514.97,-275.99 331.4,-267.75 305.37,-246 273.69,-219.53 311.53,-185.22 282.37,-156 276.73,-150.36 270.32,-145.59 263.42,-141.56"/>
|
||||||
|
<polygon fill="black" stroke="black" points="264.92,-138.39 254.44,-136.84 261.67,-144.59 264.92,-138.39"/>
|
||||||
|
<text text-anchor="middle" x="411.87" y="-227.3" font-family="Times,serif" font-size="14.00">[isListableKey(key, value) and</text>
|
||||||
|
<text text-anchor="middle" x="411.87" y="-212.3" font-family="Times,serif" font-size="14.00">nKeys < maxKeys and</text>
|
||||||
|
<text text-anchor="middle" x="411.87" y="-197.3" font-family="Times,serif" font-size="14.00">not hasDelimiter(key)]</text>
|
||||||
|
<text text-anchor="middle" x="411.87" y="-182.3" font-family="Times,serif" font-size="14.00">/ Contents.append(key, value)</text>
|
||||||
|
<text text-anchor="middle" x="411.87" y="-167.3" font-family="Times,serif" font-size="14.00">-> FILTER_ACCEPT</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingPrefixNorVersions.Processing->WaitVersionAfterPHD.Idle -->
|
||||||
|
<g id="edge8" class="edge">
|
||||||
|
<title>NotSkippingPrefixNorVersions.Processing->WaitVersionAfterPHD.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M937.38,-280.87C1099.43,-279.42 1344.59,-272.74 1378.37,-246 1411.11,-220.08 1384.48,-192.16 1405.37,-156 1407.38,-152.52 1409.8,-149.11 1412.4,-145.87"/>
|
||||||
|
<polygon fill="black" stroke="black" points="1415.16,-148.04 1419.13,-138.21 1409.9,-143.41 1415.16,-148.04"/>
|
||||||
|
<text text-anchor="middle" x="1486.87" y="-204.8" font-family="Times,serif" font-size="14.00">[Version.isPHD(value)]</text>
|
||||||
|
<text text-anchor="middle" x="1486.87" y="-189.8" font-family="Times,serif" font-size="14.00">-> FILTER_ACCEPT</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingPrefix.Processing->SkippingPrefix.Idle -->
|
||||||
|
<g id="edge13" class="edge">
|
||||||
|
<title>SkippingPrefix.Processing->SkippingPrefix.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M1064.61,-36.08C1074.44,-40.7 1083.66,-46.57 1091.37,-54 1101.65,-63.92 1107.13,-78.81 1110.04,-91.84"/>
|
||||||
|
<polygon fill="black" stroke="black" points="1106.62,-92.56 1111.88,-101.76 1113.5,-91.29 1106.62,-92.56"/>
|
||||||
|
<text text-anchor="middle" x="1190.37" y="-72.8" font-family="Times,serif" font-size="14.00">[key.startsWith(prefix)]</text>
|
||||||
|
<text text-anchor="middle" x="1190.37" y="-57.8" font-family="Times,serif" font-size="14.00">-> FILTER_SKIP</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingPrefix.Processing->NotSkippingPrefixNorVersions.Processing -->
|
||||||
|
<g id="edge14" class="edge">
|
||||||
|
<title>SkippingPrefix.Processing->NotSkippingPrefixNorVersions.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M899.82,-36.01C864.18,-48.2 824.54,-68.57 802.37,-102 771.84,-148.02 779.31,-216.26 786.77,-253.8"/>
|
||||||
|
<polygon fill="black" stroke="black" points="783.43,-254.92 788.94,-263.97 790.28,-253.46 783.43,-254.92"/>
|
||||||
|
<text text-anchor="middle" x="899.37" y="-116.3" font-family="Times,serif" font-size="14.00">[not key.startsWith(prefix)]</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingVersions.Processing->SkippingVersions.Idle -->
|
||||||
|
<g id="edge15" class="edge">
|
||||||
|
<title>SkippingVersions.Processing->SkippingVersions.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M283.88,-36.24C281.71,-50.87 276.4,-71.43 263.37,-84 258.07,-89.11 252.06,-93.48 245.62,-97.21"/>
|
||||||
|
<polygon fill="black" stroke="black" points="243.85,-94.19 236.61,-101.92 247.09,-100.39 243.85,-94.19"/>
|
||||||
|
<text text-anchor="middle" x="349.87" y="-72.8" font-family="Times,serif" font-size="14.00">[isVersionKey(key)]</text>
|
||||||
|
<text text-anchor="middle" x="349.87" y="-57.8" font-family="Times,serif" font-size="14.00">-> FILTER_SKIP</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingVersions.Processing->NotSkippingPrefixNorVersions.Processing -->
|
||||||
|
<g id="edge16" class="edge">
|
||||||
|
<title>SkippingVersions.Processing->NotSkippingPrefixNorVersions.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M382.46,-36.08C396.72,-40.7 410.82,-46.57 423.37,-54 476.67,-85.57 487.28,-102.42 518.37,-156 539.39,-192.23 514.46,-218.85 546.37,-246 561.72,-259.06 598.56,-267.25 639.23,-272.39"/>
|
||||||
|
<polygon fill="black" stroke="black" points="639.01,-275.89 649.36,-273.59 639.84,-268.93 639.01,-275.89"/>
|
||||||
|
<text text-anchor="middle" x="590.37" y="-116.3" font-family="Times,serif" font-size="14.00">[not isVersionKey(key)]</text>
|
||||||
|
</g>
|
||||||
|
<!-- WaitVersionAfterPHD.Processing->NotSkippingPrefixNorVersions.Processing -->
|
||||||
|
<g id="edge17" class="edge">
|
||||||
|
<title>WaitVersionAfterPHD.Processing->NotSkippingPrefixNorVersions.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M1536.41,-36.13C1544.73,-40.79 1552.27,-46.65 1558.37,-54 1585.64,-86.89 1597.89,-215.12 1568.37,-246 1547.29,-268.05 1167.71,-276.42 947.74,-279.43"/>
|
||||||
|
<polygon fill="black" stroke="black" points="947.67,-275.93 937.71,-279.57 947.76,-282.93 947.67,-275.93"/>
|
||||||
|
<text text-anchor="middle" x="1758.37" y="-123.8" font-family="Times,serif" font-size="14.00">[isVersionKey(key) and master(key) == PHDkey]</text>
|
||||||
|
<text text-anchor="middle" x="1758.37" y="-108.8" font-family="Times,serif" font-size="14.00">/ key <- master(key)</text>
|
||||||
|
</g>
|
||||||
|
<!-- WaitVersionAfterPHD.Processing->NotSkippingPrefixNorVersions.Processing -->
|
||||||
|
<g id="edge18" class="edge">
|
||||||
|
<title>WaitVersionAfterPHD.Processing->NotSkippingPrefixNorVersions.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M1546.51,-21.25C1677.94,-26.54 1888.29,-44.09 1937.37,-102 1947.71,-114.21 1946.85,-125.11 1937.37,-138 1841.62,-268.08 1749.48,-218.23 1590.37,-246 1471.26,-266.79 1143.92,-275.5 947.77,-278.94"/>
|
||||||
|
<polygon fill="black" stroke="black" points="947.6,-275.44 937.66,-279.11 947.72,-282.44 947.6,-275.44"/>
|
||||||
|
<text text-anchor="middle" x="2124.87" y="-116.3" font-family="Times,serif" font-size="14.00">[not isVersionKey(key) or master(key) != PHDkey]</text>
|
||||||
|
</g>
|
||||||
|
</g>
|
||||||
|
</svg>
|
After Width: | Height: | Size: 18 KiB |
|
@ -0,0 +1,35 @@
|
||||||
|
digraph {
|
||||||
|
node [shape="box",style="filled,rounded",fontsize=16,fixedsize=true,width=3];
|
||||||
|
edge [fontsize=14];
|
||||||
|
rankdir=TB;
|
||||||
|
|
||||||
|
START [shape="circle",width=0.2,label="",style="filled",fillcolor="black"]
|
||||||
|
END [shape="circle",width=0.2,label="",style="filled",fillcolor="black",peripheries=2]
|
||||||
|
|
||||||
|
node [fillcolor="lightgrey"];
|
||||||
|
"NotSkipping.Idle" [label="NotSkipping",group="NotSkipping"];
|
||||||
|
"NeverSkipping.Idle" [label="NeverSkipping",group="NeverSkipping"];
|
||||||
|
"NotSkippingPrefix.Idle" [label="NotSkippingPrefix",group="NotSkippingPrefix"];
|
||||||
|
"SkippingPrefix.Idle" [label="SkippingPrefix",group="SkippingPrefix"];
|
||||||
|
|
||||||
|
node [fillcolor="lightblue"];
|
||||||
|
"NeverSkipping.Processing" [label="NeverSkipping",group="NeverSkipping"];
|
||||||
|
"NotSkippingPrefix.Processing" [label="NotSkippingPrefix",group="NotSkippingPrefix"];
|
||||||
|
"SkippingPrefix.Processing" [label="SkippingPrefix",group="SkippingPrefix"];
|
||||||
|
|
||||||
|
START -> "NotSkipping.Idle"
|
||||||
|
"NotSkipping.Idle" -> "NeverSkipping.Idle" [label="[delimiter == undefined]"]
|
||||||
|
"NotSkipping.Idle" -> "NotSkippingPrefix.Idle" [label="[delimiter == '/']"]
|
||||||
|
|
||||||
|
"NeverSkipping.Idle" -> "NeverSkipping.Processing" [label="filter(key, value)"]
|
||||||
|
"NotSkippingPrefix.Idle" -> "NotSkippingPrefix.Processing" [label="filter(key, value)"]
|
||||||
|
"SkippingPrefix.Idle" -> "SkippingPrefix.Processing" [label="filter(key, value)"]
|
||||||
|
|
||||||
|
"NeverSkipping.Processing" -> END [label="[nKeys == maxKeys]\n-> FILTER_END"]
|
||||||
|
"NeverSkipping.Processing" -> "NeverSkipping.Idle" [label="[nKeys < maxKeys]\n/ Contents.append(key, value)\n -> FILTER_ACCEPT"]
|
||||||
|
"NotSkippingPrefix.Processing" -> END [label="[nKeys == maxKeys]\n -> FILTER_END"]
|
||||||
|
"NotSkippingPrefix.Processing" -> "SkippingPrefix.Idle" [label="[nKeys < maxKeys and hasDelimiter(key)]\n/ prefix <- prefixOf(key)\n/ CommonPrefixes.append(prefixOf(key))\n-> FILTER_ACCEPT"]
|
||||||
|
"NotSkippingPrefix.Processing" -> "NotSkippingPrefix.Idle" [label="[nKeys < maxKeys and not hasDelimiter(key)]\n/ Contents.append(key, value)\n -> FILTER_ACCEPT"]
|
||||||
|
"SkippingPrefix.Processing" -> "SkippingPrefix.Idle" [label="[key.startsWith(prefix)]\n-> FILTER_SKIP"]
|
||||||
|
"SkippingPrefix.Processing" -> "NotSkippingPrefix.Processing" [label="[not key.startsWith(prefix)]"]
|
||||||
|
}
|
|
@ -0,0 +1,166 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||||
|
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
|
||||||
|
"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||||
|
<!-- Generated by graphviz version 2.43.0 (0)
|
||||||
|
-->
|
||||||
|
<!-- Title: %3 Pages: 1 -->
|
||||||
|
<svg width="975pt" height="533pt"
|
||||||
|
viewBox="0.00 0.00 975.00 533.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
|
||||||
|
<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 529)">
|
||||||
|
<title>%3</title>
|
||||||
|
<polygon fill="white" stroke="transparent" points="-4,4 -4,-529 971,-529 971,4 -4,4"/>
|
||||||
|
<!-- START -->
|
||||||
|
<g id="node1" class="node">
|
||||||
|
<title>START</title>
|
||||||
|
<ellipse fill="black" stroke="black" cx="283" cy="-518" rx="7" ry="7"/>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkipping.Idle -->
|
||||||
|
<g id="node3" class="node">
|
||||||
|
<title>NotSkipping.Idle</title>
|
||||||
|
<path fill="lightgrey" stroke="black" d="M379,-474C379,-474 187,-474 187,-474 181,-474 175,-468 175,-462 175,-462 175,-450 175,-450 175,-444 181,-438 187,-438 187,-438 379,-438 379,-438 385,-438 391,-444 391,-450 391,-450 391,-462 391,-462 391,-468 385,-474 379,-474"/>
|
||||||
|
<text text-anchor="middle" x="283" y="-452.2" font-family="Times,serif" font-size="16.00">NotSkipping</text>
|
||||||
|
</g>
|
||||||
|
<!-- START->NotSkipping.Idle -->
|
||||||
|
<g id="edge1" class="edge">
|
||||||
|
<title>START->NotSkipping.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M283,-510.58C283,-504.23 283,-494.07 283,-484.3"/>
|
||||||
|
<polygon fill="black" stroke="black" points="286.5,-484.05 283,-474.05 279.5,-484.05 286.5,-484.05"/>
|
||||||
|
</g>
|
||||||
|
<!-- END -->
|
||||||
|
<g id="node2" class="node">
|
||||||
|
<title>END</title>
|
||||||
|
<ellipse fill="black" stroke="black" cx="196" cy="-120" rx="7" ry="7"/>
|
||||||
|
<ellipse fill="none" stroke="black" cx="196" cy="-120" rx="11" ry="11"/>
|
||||||
|
</g>
|
||||||
|
<!-- NeverSkipping.Idle -->
|
||||||
|
<g id="node4" class="node">
|
||||||
|
<title>NeverSkipping.Idle</title>
|
||||||
|
<path fill="lightgrey" stroke="black" d="M262,-387C262,-387 70,-387 70,-387 64,-387 58,-381 58,-375 58,-375 58,-363 58,-363 58,-357 64,-351 70,-351 70,-351 262,-351 262,-351 268,-351 274,-357 274,-363 274,-363 274,-375 274,-375 274,-381 268,-387 262,-387"/>
|
||||||
|
<text text-anchor="middle" x="166" y="-365.2" font-family="Times,serif" font-size="16.00">NeverSkipping</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkipping.Idle->NeverSkipping.Idle -->
|
||||||
|
<g id="edge2" class="edge">
|
||||||
|
<title>NotSkipping.Idle->NeverSkipping.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M216.5,-437.82C206.51,-433.18 196.91,-427.34 189,-420 182.25,-413.74 177.33,-405.11 173.81,-396.79"/>
|
||||||
|
<polygon fill="black" stroke="black" points="177.05,-395.47 170.3,-387.31 170.49,-397.9 177.05,-395.47"/>
|
||||||
|
<text text-anchor="middle" x="279.5" y="-408.8" font-family="Times,serif" font-size="14.00">[delimiter == undefined]</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingPrefix.Idle -->
|
||||||
|
<g id="node5" class="node">
|
||||||
|
<title>NotSkippingPrefix.Idle</title>
|
||||||
|
<path fill="lightgrey" stroke="black" d="M496,-387C496,-387 304,-387 304,-387 298,-387 292,-381 292,-375 292,-375 292,-363 292,-363 292,-357 298,-351 304,-351 304,-351 496,-351 496,-351 502,-351 508,-357 508,-363 508,-363 508,-375 508,-375 508,-381 502,-387 496,-387"/>
|
||||||
|
<text text-anchor="middle" x="400" y="-365.2" font-family="Times,serif" font-size="16.00">NotSkippingPrefix</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkipping.Idle->NotSkippingPrefix.Idle -->
|
||||||
|
<g id="edge3" class="edge">
|
||||||
|
<title>NotSkipping.Idle->NotSkippingPrefix.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M340.77,-437.93C351.2,-433.2 361.45,-427.29 370,-420 377.58,-413.53 383.76,-404.65 388.51,-396.16"/>
|
||||||
|
<polygon fill="black" stroke="black" points="391.63,-397.74 393.08,-387.24 385.4,-394.54 391.63,-397.74"/>
|
||||||
|
<text text-anchor="middle" x="442.5" y="-408.8" font-family="Times,serif" font-size="14.00">[delimiter == '/']</text>
|
||||||
|
</g>
|
||||||
|
<!-- NeverSkipping.Processing -->
|
||||||
|
<g id="node7" class="node">
|
||||||
|
<title>NeverSkipping.Processing</title>
|
||||||
|
<path fill="lightblue" stroke="black" d="M204,-270C204,-270 12,-270 12,-270 6,-270 0,-264 0,-258 0,-258 0,-246 0,-246 0,-240 6,-234 12,-234 12,-234 204,-234 204,-234 210,-234 216,-240 216,-246 216,-246 216,-258 216,-258 216,-264 210,-270 204,-270"/>
|
||||||
|
<text text-anchor="middle" x="108" y="-248.2" font-family="Times,serif" font-size="16.00">NeverSkipping</text>
|
||||||
|
</g>
|
||||||
|
<!-- NeverSkipping.Idle->NeverSkipping.Processing -->
|
||||||
|
<g id="edge4" class="edge">
|
||||||
|
<title>NeverSkipping.Idle->NeverSkipping.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M64.1,-350.93C47.33,-346.11 33.58,-340.17 28,-333 15.72,-317.21 17.05,-304.74 28,-288 30.93,-283.52 34.58,-279.6 38.69,-276.19"/>
|
||||||
|
<polygon fill="black" stroke="black" points="40.97,-278.86 47.1,-270.22 36.92,-273.16 40.97,-278.86"/>
|
||||||
|
<text text-anchor="middle" x="86" y="-306.8" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingPrefix.Processing -->
|
||||||
|
<g id="node8" class="node">
|
||||||
|
<title>NotSkippingPrefix.Processing</title>
|
||||||
|
<path fill="lightblue" stroke="black" d="M554,-270C554,-270 362,-270 362,-270 356,-270 350,-264 350,-258 350,-258 350,-246 350,-246 350,-240 356,-234 362,-234 362,-234 554,-234 554,-234 560,-234 566,-240 566,-246 566,-246 566,-258 566,-258 566,-264 560,-270 554,-270"/>
|
||||||
|
<text text-anchor="middle" x="458" y="-248.2" font-family="Times,serif" font-size="16.00">NotSkippingPrefix</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingPrefix.Idle->NotSkippingPrefix.Processing -->
|
||||||
|
<g id="edge5" class="edge">
|
||||||
|
<title>NotSkippingPrefix.Idle->NotSkippingPrefix.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M395.69,-350.84C392.38,-333.75 390.03,-307.33 401,-288 403.42,-283.74 406.58,-279.94 410.19,-276.55"/>
|
||||||
|
<polygon fill="black" stroke="black" points="412.5,-279.18 418.1,-270.18 408.11,-273.73 412.5,-279.18"/>
|
||||||
|
<text text-anchor="middle" x="459" y="-306.8" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingPrefix.Idle -->
|
||||||
|
<g id="node6" class="node">
|
||||||
|
<title>SkippingPrefix.Idle</title>
|
||||||
|
<path fill="lightgrey" stroke="black" d="M554,-138C554,-138 362,-138 362,-138 356,-138 350,-132 350,-126 350,-126 350,-114 350,-114 350,-108 356,-102 362,-102 362,-102 554,-102 554,-102 560,-102 566,-108 566,-114 566,-114 566,-126 566,-126 566,-132 560,-138 554,-138"/>
|
||||||
|
<text text-anchor="middle" x="458" y="-116.2" font-family="Times,serif" font-size="16.00">SkippingPrefix</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingPrefix.Processing -->
|
||||||
|
<g id="node9" class="node">
|
||||||
|
<title>SkippingPrefix.Processing</title>
|
||||||
|
<path fill="lightblue" stroke="black" d="M691,-36C691,-36 499,-36 499,-36 493,-36 487,-30 487,-24 487,-24 487,-12 487,-12 487,-6 493,0 499,0 499,0 691,0 691,0 697,0 703,-6 703,-12 703,-12 703,-24 703,-24 703,-30 697,-36 691,-36"/>
|
||||||
|
<text text-anchor="middle" x="595" y="-14.2" font-family="Times,serif" font-size="16.00">SkippingPrefix</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingPrefix.Idle->SkippingPrefix.Processing -->
|
||||||
|
<g id="edge6" class="edge">
|
||||||
|
<title>SkippingPrefix.Idle->SkippingPrefix.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M452.35,-101.95C448.76,-87.65 446.54,-67.45 457,-54 461.44,-48.29 471.08,-43.36 483.3,-39.15"/>
|
||||||
|
<polygon fill="black" stroke="black" points="484.61,-42.41 493.1,-36.07 482.51,-35.73 484.61,-42.41"/>
|
||||||
|
<text text-anchor="middle" x="515" y="-65.3" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
|
||||||
|
</g>
|
||||||
|
<!-- NeverSkipping.Processing->END -->
|
||||||
|
<g id="edge7" class="edge">
|
||||||
|
<title>NeverSkipping.Processing->END</title>
|
||||||
|
<path fill="none" stroke="black" d="M102.91,-233.88C97.93,-213.45 93.18,-179.15 109,-156 123.79,-134.35 154.41,-126.09 175.08,-122.94"/>
|
||||||
|
<polygon fill="black" stroke="black" points="175.62,-126.4 185.11,-121.69 174.76,-119.45 175.62,-126.4"/>
|
||||||
|
<text text-anchor="middle" x="185" y="-189.8" font-family="Times,serif" font-size="14.00">[nKeys == maxKeys]</text>
|
||||||
|
<text text-anchor="middle" x="185" y="-174.8" font-family="Times,serif" font-size="14.00">-> FILTER_END</text>
|
||||||
|
</g>
|
||||||
|
<!-- NeverSkipping.Processing->NeverSkipping.Idle -->
|
||||||
|
<g id="edge8" class="edge">
|
||||||
|
<title>NeverSkipping.Processing->NeverSkipping.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M129.49,-270.27C134.87,-275.48 140.18,-281.55 144,-288 153.56,-304.17 159.09,-324.63 162.21,-340.81"/>
|
||||||
|
<polygon fill="black" stroke="black" points="158.78,-341.49 163.94,-350.74 165.68,-340.29 158.78,-341.49"/>
|
||||||
|
<text text-anchor="middle" x="265.5" y="-321.8" font-family="Times,serif" font-size="14.00">[nKeys < maxKeys]</text>
|
||||||
|
<text text-anchor="middle" x="265.5" y="-306.8" font-family="Times,serif" font-size="14.00">/ Contents.append(key, value)</text>
|
||||||
|
<text text-anchor="middle" x="265.5" y="-291.8" font-family="Times,serif" font-size="14.00"> -> FILTER_ACCEPT</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingPrefix.Processing->END -->
|
||||||
|
<g id="edge9" class="edge">
|
||||||
|
<title>NotSkippingPrefix.Processing->END</title>
|
||||||
|
<path fill="none" stroke="black" d="M349.96,-237.93C333,-232.81 316.36,-225.74 302,-216 275.27,-197.87 285.01,-177.6 261,-156 247.64,-143.98 229.41,-134.62 215.65,-128.62"/>
|
||||||
|
<polygon fill="black" stroke="black" points="216.74,-125.28 206.16,-124.7 214.07,-131.75 216.74,-125.28"/>
|
||||||
|
<text text-anchor="middle" x="378" y="-189.8" font-family="Times,serif" font-size="14.00">[nKeys == maxKeys]</text>
|
||||||
|
<text text-anchor="middle" x="378" y="-174.8" font-family="Times,serif" font-size="14.00"> -> FILTER_END</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingPrefix.Processing->NotSkippingPrefix.Idle -->
|
||||||
|
<g id="edge11" class="edge">
|
||||||
|
<title>NotSkippingPrefix.Processing->NotSkippingPrefix.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M499.64,-270.11C506.59,-274.86 512.87,-280.76 517,-288 526.9,-305.38 528.94,-316.96 517,-333 513.56,-337.62 509.53,-341.66 505.07,-345.18"/>
|
||||||
|
<polygon fill="black" stroke="black" points="502.89,-342.43 496.63,-350.98 506.85,-348.2 502.89,-342.43"/>
|
||||||
|
<text text-anchor="middle" x="690.5" y="-321.8" font-family="Times,serif" font-size="14.00">[nKeys < maxKeys and not hasDelimiter(key)]</text>
|
||||||
|
<text text-anchor="middle" x="690.5" y="-306.8" font-family="Times,serif" font-size="14.00">/ Contents.append(key, value)</text>
|
||||||
|
<text text-anchor="middle" x="690.5" y="-291.8" font-family="Times,serif" font-size="14.00"> -> FILTER_ACCEPT</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingPrefix.Processing->SkippingPrefix.Idle -->
|
||||||
|
<g id="edge10" class="edge">
|
||||||
|
<title>NotSkippingPrefix.Processing->SkippingPrefix.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M458,-233.74C458,-211.98 458,-174.32 458,-148.56"/>
|
||||||
|
<polygon fill="black" stroke="black" points="461.5,-148.33 458,-138.33 454.5,-148.33 461.5,-148.33"/>
|
||||||
|
<text text-anchor="middle" x="609.5" y="-204.8" font-family="Times,serif" font-size="14.00">[nKeys < maxKeys and hasDelimiter(key)]</text>
|
||||||
|
<text text-anchor="middle" x="609.5" y="-189.8" font-family="Times,serif" font-size="14.00">/ prefix <- prefixOf(key)</text>
|
||||||
|
<text text-anchor="middle" x="609.5" y="-174.8" font-family="Times,serif" font-size="14.00">/ CommonPrefixes.append(prefixOf(key))</text>
|
||||||
|
<text text-anchor="middle" x="609.5" y="-159.8" font-family="Times,serif" font-size="14.00">-> FILTER_ACCEPT</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingPrefix.Processing->SkippingPrefix.Idle -->
|
||||||
|
<g id="edge12" class="edge">
|
||||||
|
<title>SkippingPrefix.Processing->SkippingPrefix.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M593.49,-36.23C591.32,-50.84 586,-71.39 573,-84 567.75,-89.09 561.77,-93.45 555.38,-97.17"/>
|
||||||
|
<polygon fill="black" stroke="black" points="553.66,-94.12 546.43,-101.87 556.91,-100.32 553.66,-94.12"/>
|
||||||
|
<text text-anchor="middle" x="672" y="-72.8" font-family="Times,serif" font-size="14.00">[key.startsWith(prefix)]</text>
|
||||||
|
<text text-anchor="middle" x="672" y="-57.8" font-family="Times,serif" font-size="14.00">-> FILTER_SKIP</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingPrefix.Processing->NotSkippingPrefix.Processing -->
|
||||||
|
<g id="edge13" class="edge">
|
||||||
|
<title>SkippingPrefix.Processing->NotSkippingPrefix.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M703.16,-31.64C728.6,-36.87 750.75,-44.11 759,-54 778.46,-77.34 776.26,-200.01 762,-216 749.37,-230.17 656.13,-239.42 576.2,-244.84"/>
|
||||||
|
<polygon fill="black" stroke="black" points="575.77,-241.36 566.03,-245.51 576.24,-248.34 575.77,-241.36"/>
|
||||||
|
<text text-anchor="middle" x="870" y="-116.3" font-family="Times,serif" font-size="14.00">[not key.startsWith(prefix)]</text>
|
||||||
|
</g>
|
||||||
|
</g>
|
||||||
|
</svg>
|
After Width: | Height: | Size: 12 KiB |
|
@ -0,0 +1,50 @@
|
||||||
|
digraph {
|
||||||
|
node [shape="box",style="filled,rounded",fontsize=16,fixedsize=true,width=3];
|
||||||
|
edge [fontsize=14];
|
||||||
|
rankdir=TB;
|
||||||
|
|
||||||
|
START [shape="circle",width=0.2,label="",style="filled",fillcolor="black"]
|
||||||
|
END [shape="circle",width=0.2,label="",style="filled",fillcolor="black",peripheries=2]
|
||||||
|
|
||||||
|
node [fillcolor="lightgrey"];
|
||||||
|
"NotSkipping.Idle" [label="NotSkipping",group="NotSkipping",width=4];
|
||||||
|
"SkippingPrefix.Idle" [label="SkippingPrefix",group="SkippingPrefix"];
|
||||||
|
"WaitForNullKey.Idle" [label="WaitForNullKey",group="WaitForNullKey"];
|
||||||
|
"SkippingVersions.Idle" [label="SkippingVersions",group="SkippingVersions"];
|
||||||
|
|
||||||
|
node [fillcolor="lightblue"];
|
||||||
|
"NotSkipping.Processing" [label="NotSkipping",group="NotSkipping",width=4];
|
||||||
|
"NotSkippingV0.Processing" [label="NotSkippingV0",group="NotSkipping",width=4];
|
||||||
|
"NotSkippingV1.Processing" [label="NotSkippingV1",group="NotSkipping",width=4];
|
||||||
|
"NotSkippingCommon.Processing" [label="NotSkippingCommon",group="NotSkipping",width=4];
|
||||||
|
"SkippingPrefix.Processing" [label="SkippingPrefix",group="SkippingPrefix"];
|
||||||
|
"WaitForNullKey.Processing" [label="WaitForNullKey",group="WaitForNullKey"];
|
||||||
|
"SkippingVersions.Processing" [label="SkippingVersions",group="SkippingVersions"];
|
||||||
|
|
||||||
|
START -> "WaitForNullKey.Idle" [label="[versionIdMarker != undefined]"]
|
||||||
|
START -> "NotSkipping.Idle" [label="[versionIdMarker == undefined]"]
|
||||||
|
|
||||||
|
"NotSkipping.Idle" -> "NotSkipping.Processing" [label="filter(key, value)"]
|
||||||
|
"SkippingPrefix.Idle" -> "SkippingPrefix.Processing" [label="filter(key, value)"]
|
||||||
|
"WaitForNullKey.Idle" -> "WaitForNullKey.Processing" [label="filter(key, value)"]
|
||||||
|
"SkippingVersions.Idle" -> "SkippingVersions.Processing" [label="filter(key, value)"]
|
||||||
|
|
||||||
|
"NotSkipping.Processing" -> "NotSkippingV0.Processing" [label="vFormat='v0'"]
|
||||||
|
"NotSkipping.Processing" -> "NotSkippingV1.Processing" [label="vFormat='v1'"]
|
||||||
|
|
||||||
|
"WaitForNullKey.Processing" -> "NotSkipping.Processing" [label="master(key) != keyMarker"]
|
||||||
|
"WaitForNullKey.Processing" -> "SkippingVersions.Processing" [label="master(key) == keyMarker"]
|
||||||
|
"NotSkippingV0.Processing" -> "SkippingPrefix.Idle" [label="[key.startsWith(<ReplayPrefix>)]\n/ prefix <- <ReplayPrefix>\n-> FILTER_SKIP"]
|
||||||
|
"NotSkippingV0.Processing" -> "NotSkipping.Idle" [label="[Version.isPHD(value)]\n-> FILTER_ACCEPT"]
|
||||||
|
"NotSkippingV0.Processing" -> "NotSkippingCommon.Processing" [label="[not key.startsWith(<ReplayPrefix>)\nand not Version.isPHD(value)]"]
|
||||||
|
"NotSkippingV1.Processing" -> "NotSkippingCommon.Processing" [label="[always]"]
|
||||||
|
"NotSkippingCommon.Processing" -> END [label="[isListableKey(key, value) and\nKeys == maxKeys]\n-> FILTER_END"]
|
||||||
|
"NotSkippingCommon.Processing" -> "SkippingPrefix.Idle" [label="[isListableKey(key, value) and\nnKeys < maxKeys and\nhasDelimiter(key)]\n/ prefix <- prefixOf(key)\n/ CommonPrefixes.append(prefixOf(key))\n-> FILTER_ACCEPT"]
|
||||||
|
"NotSkippingCommon.Processing" -> "NotSkipping.Idle" [label="[isListableKey(key, value) and\nnKeys < maxKeys and\nnot hasDelimiter(key)]\n/ Contents.append(key, versionId, value)\n-> FILTER_ACCEPT"]
|
||||||
|
|
||||||
|
"SkippingPrefix.Processing" -> "SkippingPrefix.Idle" [label="[key.startsWith(prefix)]\n-> FILTER_SKIP"]
|
||||||
|
"SkippingPrefix.Processing" -> "NotSkipping.Processing" [label="[not key.startsWith(prefix)]"]
|
||||||
|
"SkippingVersions.Processing" -> "NotSkipping.Processing" [label="master(key) !== keyMarker or \nversionId > versionIdMarker"]
|
||||||
|
"SkippingVersions.Processing" -> "SkippingVersions.Idle" [label="master(key) === keyMarker and \nversionId < versionIdMarker\n-> FILTER_SKIP"]
|
||||||
|
"SkippingVersions.Processing" -> "SkippingVersions.Idle" [label="master(key) === keyMarker and \nversionId == versionIdMarker\n-> FILTER_ACCEPT"]
|
||||||
|
}
|
|
@ -0,0 +1,265 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||||
|
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
|
||||||
|
"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||||
|
<!-- Generated by graphviz version 2.43.0 (0)
|
||||||
|
-->
|
||||||
|
<!-- Title: %3 Pages: 1 -->
|
||||||
|
<svg width="1522pt" height="922pt"
|
||||||
|
viewBox="0.00 0.00 1522.26 922.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
|
||||||
|
<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 918)">
|
||||||
|
<title>%3</title>
|
||||||
|
<polygon fill="white" stroke="transparent" points="-4,4 -4,-918 1518.26,-918 1518.26,4 -4,4"/>
|
||||||
|
<!-- START -->
|
||||||
|
<g id="node1" class="node">
|
||||||
|
<title>START</title>
|
||||||
|
<ellipse fill="black" stroke="black" cx="393.26" cy="-907" rx="7" ry="7"/>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkipping.Idle -->
|
||||||
|
<g id="node3" class="node">
|
||||||
|
<title>NotSkipping.Idle</title>
|
||||||
|
<path fill="lightgrey" stroke="black" d="M436.26,-675C436.26,-675 172.26,-675 172.26,-675 166.26,-675 160.26,-669 160.26,-663 160.26,-663 160.26,-651 160.26,-651 160.26,-645 166.26,-639 172.26,-639 172.26,-639 436.26,-639 436.26,-639 442.26,-639 448.26,-645 448.26,-651 448.26,-651 448.26,-663 448.26,-663 448.26,-669 442.26,-675 436.26,-675"/>
|
||||||
|
<text text-anchor="middle" x="304.26" y="-653.2" font-family="Times,serif" font-size="16.00">NotSkipping</text>
|
||||||
|
</g>
|
||||||
|
<!-- START->NotSkipping.Idle -->
|
||||||
|
<g id="edge2" class="edge">
|
||||||
|
<title>START->NotSkipping.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M391.06,-899.87C380.45,-870.31 334.26,-741.58 313.93,-684.93"/>
|
||||||
|
<polygon fill="black" stroke="black" points="317.12,-683.46 310.45,-675.23 310.53,-685.82 317.12,-683.46"/>
|
||||||
|
<text text-anchor="middle" x="470.76" y="-783.8" font-family="Times,serif" font-size="14.00">[versionIdMarker == undefined]</text>
|
||||||
|
</g>
|
||||||
|
<!-- WaitForNullKey.Idle -->
|
||||||
|
<g id="node5" class="node">
|
||||||
|
<title>WaitForNullKey.Idle</title>
|
||||||
|
<path fill="lightgrey" stroke="black" d="M692.26,-849C692.26,-849 500.26,-849 500.26,-849 494.26,-849 488.26,-843 488.26,-837 488.26,-837 488.26,-825 488.26,-825 488.26,-819 494.26,-813 500.26,-813 500.26,-813 692.26,-813 692.26,-813 698.26,-813 704.26,-819 704.26,-825 704.26,-825 704.26,-837 704.26,-837 704.26,-843 698.26,-849 692.26,-849"/>
|
||||||
|
<text text-anchor="middle" x="596.26" y="-827.2" font-family="Times,serif" font-size="16.00">WaitForNullKey</text>
|
||||||
|
</g>
|
||||||
|
<!-- START->WaitForNullKey.Idle -->
|
||||||
|
<g id="edge1" class="edge">
|
||||||
|
<title>START->WaitForNullKey.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M399.56,-903.7C420.56,-896.05 489.7,-870.85 540.08,-852.48"/>
|
||||||
|
<polygon fill="black" stroke="black" points="541.38,-855.73 549.57,-849.02 538.98,-849.16 541.38,-855.73"/>
|
||||||
|
<text text-anchor="middle" x="608.76" y="-870.8" font-family="Times,serif" font-size="14.00">[versionIdMarker != undefined]</text>
|
||||||
|
</g>
|
||||||
|
<!-- END -->
|
||||||
|
<g id="node2" class="node">
|
||||||
|
<title>END</title>
|
||||||
|
<ellipse fill="black" stroke="black" cx="45.26" cy="-120" rx="7" ry="7"/>
|
||||||
|
<ellipse fill="none" stroke="black" cx="45.26" cy="-120" rx="11" ry="11"/>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkipping.Processing -->
|
||||||
|
<g id="node7" class="node">
|
||||||
|
<title>NotSkipping.Processing</title>
|
||||||
|
<path fill="lightblue" stroke="black" d="M761.26,-558C761.26,-558 497.26,-558 497.26,-558 491.26,-558 485.26,-552 485.26,-546 485.26,-546 485.26,-534 485.26,-534 485.26,-528 491.26,-522 497.26,-522 497.26,-522 761.26,-522 761.26,-522 767.26,-522 773.26,-528 773.26,-534 773.26,-534 773.26,-546 773.26,-546 773.26,-552 767.26,-558 761.26,-558"/>
|
||||||
|
<text text-anchor="middle" x="629.26" y="-536.2" font-family="Times,serif" font-size="16.00">NotSkipping</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkipping.Idle->NotSkipping.Processing -->
|
||||||
|
<g id="edge3" class="edge">
|
||||||
|
<title>NotSkipping.Idle->NotSkipping.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M333.17,-638.98C364.86,-620.99 417.68,-592.92 466.26,-576 483.64,-569.95 502.44,-564.74 520.88,-560.34"/>
|
||||||
|
<polygon fill="black" stroke="black" points="521.83,-563.71 530.78,-558.04 520.25,-556.89 521.83,-563.71"/>
|
||||||
|
<text text-anchor="middle" x="524.26" y="-594.8" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingPrefix.Idle -->
|
||||||
|
<g id="node4" class="node">
|
||||||
|
<title>SkippingPrefix.Idle</title>
|
||||||
|
<path fill="lightgrey" stroke="black" d="M662.26,-138C662.26,-138 470.26,-138 470.26,-138 464.26,-138 458.26,-132 458.26,-126 458.26,-126 458.26,-114 458.26,-114 458.26,-108 464.26,-102 470.26,-102 470.26,-102 662.26,-102 662.26,-102 668.26,-102 674.26,-108 674.26,-114 674.26,-114 674.26,-126 674.26,-126 674.26,-132 668.26,-138 662.26,-138"/>
|
||||||
|
<text text-anchor="middle" x="566.26" y="-116.2" font-family="Times,serif" font-size="16.00">SkippingPrefix</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingPrefix.Processing -->
|
||||||
|
<g id="node11" class="node">
|
||||||
|
<title>SkippingPrefix.Processing</title>
|
||||||
|
<path fill="lightblue" stroke="black" d="M779.26,-36C779.26,-36 587.26,-36 587.26,-36 581.26,-36 575.26,-30 575.26,-24 575.26,-24 575.26,-12 575.26,-12 575.26,-6 581.26,0 587.26,0 587.26,0 779.26,0 779.26,0 785.26,0 791.26,-6 791.26,-12 791.26,-12 791.26,-24 791.26,-24 791.26,-30 785.26,-36 779.26,-36"/>
|
||||||
|
<text text-anchor="middle" x="683.26" y="-14.2" font-family="Times,serif" font-size="16.00">SkippingPrefix</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingPrefix.Idle->SkippingPrefix.Processing -->
|
||||||
|
<g id="edge4" class="edge">
|
||||||
|
<title>SkippingPrefix.Idle->SkippingPrefix.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M552.64,-101.74C543.31,-87.68 534.41,-67.95 545.26,-54 549.71,-48.29 559.34,-43.36 571.56,-39.15"/>
|
||||||
|
<polygon fill="black" stroke="black" points="572.87,-42.41 581.36,-36.07 570.77,-35.73 572.87,-42.41"/>
|
||||||
|
<text text-anchor="middle" x="603.26" y="-65.3" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
|
||||||
|
</g>
|
||||||
|
<!-- WaitForNullKey.Processing -->
|
||||||
|
<g id="node12" class="node">
|
||||||
|
<title>WaitForNullKey.Processing</title>
|
||||||
|
<path fill="lightblue" stroke="black" d="M692.26,-762C692.26,-762 500.26,-762 500.26,-762 494.26,-762 488.26,-756 488.26,-750 488.26,-750 488.26,-738 488.26,-738 488.26,-732 494.26,-726 500.26,-726 500.26,-726 692.26,-726 692.26,-726 698.26,-726 704.26,-732 704.26,-738 704.26,-738 704.26,-750 704.26,-750 704.26,-756 698.26,-762 692.26,-762"/>
|
||||||
|
<text text-anchor="middle" x="596.26" y="-740.2" font-family="Times,serif" font-size="16.00">WaitForNullKey</text>
|
||||||
|
</g>
|
||||||
|
<!-- WaitForNullKey.Idle->WaitForNullKey.Processing -->
|
||||||
|
<g id="edge5" class="edge">
|
||||||
|
<title>WaitForNullKey.Idle->WaitForNullKey.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M596.26,-812.8C596.26,-801.16 596.26,-785.55 596.26,-772.24"/>
|
||||||
|
<polygon fill="black" stroke="black" points="599.76,-772.18 596.26,-762.18 592.76,-772.18 599.76,-772.18"/>
|
||||||
|
<text text-anchor="middle" x="654.26" y="-783.8" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingVersions.Idle -->
|
||||||
|
<g id="node6" class="node">
|
||||||
|
<title>SkippingVersions.Idle</title>
|
||||||
|
<path fill="lightgrey" stroke="black" d="M1241.26,-558C1241.26,-558 1049.26,-558 1049.26,-558 1043.26,-558 1037.26,-552 1037.26,-546 1037.26,-546 1037.26,-534 1037.26,-534 1037.26,-528 1043.26,-522 1049.26,-522 1049.26,-522 1241.26,-522 1241.26,-522 1247.26,-522 1253.26,-528 1253.26,-534 1253.26,-534 1253.26,-546 1253.26,-546 1253.26,-552 1247.26,-558 1241.26,-558"/>
|
||||||
|
<text text-anchor="middle" x="1145.26" y="-536.2" font-family="Times,serif" font-size="16.00">SkippingVersions</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingVersions.Processing -->
|
||||||
|
<g id="node13" class="node">
|
||||||
|
<title>SkippingVersions.Processing</title>
|
||||||
|
<path fill="lightblue" stroke="black" d="M1241.26,-675C1241.26,-675 1049.26,-675 1049.26,-675 1043.26,-675 1037.26,-669 1037.26,-663 1037.26,-663 1037.26,-651 1037.26,-651 1037.26,-645 1043.26,-639 1049.26,-639 1049.26,-639 1241.26,-639 1241.26,-639 1247.26,-639 1253.26,-645 1253.26,-651 1253.26,-651 1253.26,-663 1253.26,-663 1253.26,-669 1247.26,-675 1241.26,-675"/>
|
||||||
|
<text text-anchor="middle" x="1145.26" y="-653.2" font-family="Times,serif" font-size="16.00">SkippingVersions</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingVersions.Idle->SkippingVersions.Processing -->
|
||||||
|
<g id="edge6" class="edge">
|
||||||
|
<title>SkippingVersions.Idle->SkippingVersions.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M1145.26,-558.25C1145.26,-576.77 1145.26,-606.45 1145.26,-628.25"/>
|
||||||
|
<polygon fill="black" stroke="black" points="1141.76,-628.53 1145.26,-638.53 1148.76,-628.53 1141.76,-628.53"/>
|
||||||
|
<text text-anchor="middle" x="1203.26" y="-594.8" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingV0.Processing -->
|
||||||
|
<g id="node8" class="node">
|
||||||
|
<title>NotSkippingV0.Processing</title>
|
||||||
|
<path fill="lightblue" stroke="black" d="M436.26,-411C436.26,-411 172.26,-411 172.26,-411 166.26,-411 160.26,-405 160.26,-399 160.26,-399 160.26,-387 160.26,-387 160.26,-381 166.26,-375 172.26,-375 172.26,-375 436.26,-375 436.26,-375 442.26,-375 448.26,-381 448.26,-387 448.26,-387 448.26,-399 448.26,-399 448.26,-405 442.26,-411 436.26,-411"/>
|
||||||
|
<text text-anchor="middle" x="304.26" y="-389.2" font-family="Times,serif" font-size="16.00">NotSkippingV0</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkipping.Processing->NotSkippingV0.Processing -->
|
||||||
|
<g id="edge7" class="edge">
|
||||||
|
<title>NotSkipping.Processing->NotSkippingV0.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M573.96,-521.95C558.07,-516.64 540.84,-510.46 525.26,-504 460.22,-477.02 387.62,-439.36 343.97,-415.84"/>
|
||||||
|
<polygon fill="black" stroke="black" points="345.57,-412.72 335.11,-411.04 342.24,-418.88 345.57,-412.72"/>
|
||||||
|
<text text-anchor="middle" x="573.76" y="-462.8" font-family="Times,serif" font-size="14.00">vFormat='v0'</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingV1.Processing -->
|
||||||
|
<g id="node9" class="node">
|
||||||
|
<title>NotSkippingV1.Processing</title>
|
||||||
|
<path fill="lightblue" stroke="black" d="M758.26,-411C758.26,-411 494.26,-411 494.26,-411 488.26,-411 482.26,-405 482.26,-399 482.26,-399 482.26,-387 482.26,-387 482.26,-381 488.26,-375 494.26,-375 494.26,-375 758.26,-375 758.26,-375 764.26,-375 770.26,-381 770.26,-387 770.26,-387 770.26,-399 770.26,-399 770.26,-405 764.26,-411 758.26,-411"/>
|
||||||
|
<text text-anchor="middle" x="626.26" y="-389.2" font-family="Times,serif" font-size="16.00">NotSkippingV1</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkipping.Processing->NotSkippingV1.Processing -->
|
||||||
|
<g id="edge8" class="edge">
|
||||||
|
<title>NotSkipping.Processing->NotSkippingV1.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M628.91,-521.8C628.39,-496.94 627.44,-450.74 626.83,-421.23"/>
|
||||||
|
<polygon fill="black" stroke="black" points="630.32,-421.11 626.62,-411.18 623.33,-421.25 630.32,-421.11"/>
|
||||||
|
<text text-anchor="middle" x="676.76" y="-462.8" font-family="Times,serif" font-size="14.00">vFormat='v1'</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingV0.Processing->NotSkipping.Idle -->
|
||||||
|
<g id="edge12" class="edge">
|
||||||
|
<title>NotSkippingV0.Processing->NotSkipping.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M304.26,-411.25C304.26,-455.74 304.26,-574.61 304.26,-628.62"/>
|
||||||
|
<polygon fill="black" stroke="black" points="300.76,-628.81 304.26,-638.81 307.76,-628.81 300.76,-628.81"/>
|
||||||
|
<text text-anchor="middle" x="385.76" y="-543.8" font-family="Times,serif" font-size="14.00">[Version.isPHD(value)]</text>
|
||||||
|
<text text-anchor="middle" x="385.76" y="-528.8" font-family="Times,serif" font-size="14.00">-> FILTER_ACCEPT</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingV0.Processing->SkippingPrefix.Idle -->
|
||||||
|
<g id="edge11" class="edge">
|
||||||
|
<title>NotSkippingV0.Processing->SkippingPrefix.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M448.41,-376.93C508.52,-369.95 565.63,-362.09 570.26,-357 622.9,-299.12 594.8,-196.31 577.11,-147.78"/>
|
||||||
|
<polygon fill="black" stroke="black" points="580.33,-146.4 573.53,-138.28 573.78,-148.87 580.33,-146.4"/>
|
||||||
|
<text text-anchor="middle" x="720.26" y="-297.8" font-family="Times,serif" font-size="14.00">[key.startsWith(<ReplayPrefix>)]</text>
|
||||||
|
<text text-anchor="middle" x="720.26" y="-282.8" font-family="Times,serif" font-size="14.00">/ prefix <- <ReplayPrefix></text>
|
||||||
|
<text text-anchor="middle" x="720.26" y="-267.8" font-family="Times,serif" font-size="14.00">-> FILTER_SKIP</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingCommon.Processing -->
|
||||||
|
<g id="node10" class="node">
|
||||||
|
<title>NotSkippingCommon.Processing</title>
|
||||||
|
<path fill="lightblue" stroke="black" d="M436.26,-304.5C436.26,-304.5 172.26,-304.5 172.26,-304.5 166.26,-304.5 160.26,-298.5 160.26,-292.5 160.26,-292.5 160.26,-280.5 160.26,-280.5 160.26,-274.5 166.26,-268.5 172.26,-268.5 172.26,-268.5 436.26,-268.5 436.26,-268.5 442.26,-268.5 448.26,-274.5 448.26,-280.5 448.26,-280.5 448.26,-292.5 448.26,-292.5 448.26,-298.5 442.26,-304.5 436.26,-304.5"/>
|
||||||
|
<text text-anchor="middle" x="304.26" y="-282.7" font-family="Times,serif" font-size="16.00">NotSkippingCommon</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingV0.Processing->NotSkippingCommon.Processing -->
|
||||||
|
<g id="edge13" class="edge">
|
||||||
|
<title>NotSkippingV0.Processing->NotSkippingCommon.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M304.26,-374.74C304.26,-358.48 304.26,-333.85 304.26,-314.9"/>
|
||||||
|
<polygon fill="black" stroke="black" points="307.76,-314.78 304.26,-304.78 300.76,-314.78 307.76,-314.78"/>
|
||||||
|
<text text-anchor="middle" x="435.26" y="-345.8" font-family="Times,serif" font-size="14.00">[not key.startsWith(<ReplayPrefix>)</text>
|
||||||
|
<text text-anchor="middle" x="435.26" y="-330.8" font-family="Times,serif" font-size="14.00">and not Version.isPHD(value)]</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingV1.Processing->NotSkippingCommon.Processing -->
|
||||||
|
<g id="edge14" class="edge">
|
||||||
|
<title>NotSkippingV1.Processing->NotSkippingCommon.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M616.43,-374.83C606.75,-359.62 590.48,-338.14 570.26,-327 549.98,-315.83 505.48,-307.38 458.57,-301.23"/>
|
||||||
|
<polygon fill="black" stroke="black" points="458.9,-297.74 448.53,-299.95 458.01,-304.69 458.9,-297.74"/>
|
||||||
|
<text text-anchor="middle" x="632.26" y="-338.3" font-family="Times,serif" font-size="14.00">[always]</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingCommon.Processing->END -->
|
||||||
|
<g id="edge15" class="edge">
|
||||||
|
<title>NotSkippingCommon.Processing->END</title>
|
||||||
|
<path fill="none" stroke="black" d="M159.92,-279.56C109.8,-274.24 62.13,-264.33 46.26,-246 20.92,-216.72 30.42,-167.54 38.5,-140.42"/>
|
||||||
|
<polygon fill="black" stroke="black" points="41.94,-141.16 41.67,-130.57 35.27,-139.02 41.94,-141.16"/>
|
||||||
|
<text text-anchor="middle" x="152.76" y="-212.3" font-family="Times,serif" font-size="14.00">[isListableKey(key, value) and</text>
|
||||||
|
<text text-anchor="middle" x="152.76" y="-197.3" font-family="Times,serif" font-size="14.00">Keys == maxKeys]</text>
|
||||||
|
<text text-anchor="middle" x="152.76" y="-182.3" font-family="Times,serif" font-size="14.00">-> FILTER_END</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingCommon.Processing->NotSkipping.Idle -->
|
||||||
|
<g id="edge17" class="edge">
|
||||||
|
<title>NotSkippingCommon.Processing->NotSkipping.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M214.74,-304.54C146.51,-322.73 57.06,-358.99 13.26,-429 -49.27,-528.95 128.43,-602.49 233.32,-635.95"/>
|
||||||
|
<polygon fill="black" stroke="black" points="232.34,-639.31 242.93,-638.97 234.43,-632.63 232.34,-639.31"/>
|
||||||
|
<text text-anchor="middle" x="156.76" y="-492.8" font-family="Times,serif" font-size="14.00">[isListableKey(key, value) and</text>
|
||||||
|
<text text-anchor="middle" x="156.76" y="-477.8" font-family="Times,serif" font-size="14.00">nKeys < maxKeys and</text>
|
||||||
|
<text text-anchor="middle" x="156.76" y="-462.8" font-family="Times,serif" font-size="14.00">not hasDelimiter(key)]</text>
|
||||||
|
<text text-anchor="middle" x="156.76" y="-447.8" font-family="Times,serif" font-size="14.00">/ Contents.append(key, versionId, value)</text>
|
||||||
|
<text text-anchor="middle" x="156.76" y="-432.8" font-family="Times,serif" font-size="14.00">-> FILTER_ACCEPT</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingCommon.Processing->SkippingPrefix.Idle -->
|
||||||
|
<g id="edge16" class="edge">
|
||||||
|
<title>NotSkippingCommon.Processing->SkippingPrefix.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M292.14,-268.23C288.18,-261.59 284.27,-253.75 282.26,-246 272.21,-207.28 255.76,-185.96 282.26,-156 293.6,-143.18 374.98,-134.02 447.74,-128.3"/>
|
||||||
|
<polygon fill="black" stroke="black" points="448.24,-131.77 457.94,-127.51 447.7,-124.79 448.24,-131.77"/>
|
||||||
|
<text text-anchor="middle" x="428.26" y="-234.8" font-family="Times,serif" font-size="14.00">[isListableKey(key, value) and</text>
|
||||||
|
<text text-anchor="middle" x="428.26" y="-219.8" font-family="Times,serif" font-size="14.00">nKeys < maxKeys and</text>
|
||||||
|
<text text-anchor="middle" x="428.26" y="-204.8" font-family="Times,serif" font-size="14.00">hasDelimiter(key)]</text>
|
||||||
|
<text text-anchor="middle" x="428.26" y="-189.8" font-family="Times,serif" font-size="14.00">/ prefix <- prefixOf(key)</text>
|
||||||
|
<text text-anchor="middle" x="428.26" y="-174.8" font-family="Times,serif" font-size="14.00">/ CommonPrefixes.append(prefixOf(key))</text>
|
||||||
|
<text text-anchor="middle" x="428.26" y="-159.8" font-family="Times,serif" font-size="14.00">-> FILTER_ACCEPT</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingPrefix.Processing->SkippingPrefix.Idle -->
|
||||||
|
<g id="edge18" class="edge">
|
||||||
|
<title>SkippingPrefix.Processing->SkippingPrefix.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M681.57,-36.04C679.28,-50.54 673.9,-71.03 661.26,-84 656.4,-88.99 650.77,-93.28 644.72,-96.95"/>
|
||||||
|
<polygon fill="black" stroke="black" points="642.71,-94.06 635.6,-101.92 646.05,-100.21 642.71,-94.06"/>
|
||||||
|
<text text-anchor="middle" x="759.26" y="-72.8" font-family="Times,serif" font-size="14.00">[key.startsWith(prefix)]</text>
|
||||||
|
<text text-anchor="middle" x="759.26" y="-57.8" font-family="Times,serif" font-size="14.00">-> FILTER_SKIP</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingPrefix.Processing->NotSkipping.Processing -->
|
||||||
|
<g id="edge19" class="edge">
|
||||||
|
<title>SkippingPrefix.Processing->NotSkipping.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M791.46,-33.51C815.84,-38.71 837.21,-45.46 846.26,-54 868.07,-74.57 864.26,-89.02 864.26,-119 864.26,-394 864.26,-394 864.26,-394 864.26,-462.4 791.27,-499.6 726.64,-519.12"/>
|
||||||
|
<polygon fill="black" stroke="black" points="725.39,-515.84 716.77,-521.99 727.35,-522.56 725.39,-515.84"/>
|
||||||
|
<text text-anchor="middle" x="961.26" y="-282.8" font-family="Times,serif" font-size="14.00">[not key.startsWith(prefix)]</text>
|
||||||
|
</g>
|
||||||
|
<!-- WaitForNullKey.Processing->NotSkipping.Processing -->
|
||||||
|
<g id="edge9" class="edge">
|
||||||
|
<title>WaitForNullKey.Processing->NotSkipping.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M599.08,-725.78C604.81,-690.67 617.89,-610.59 624.8,-568.31"/>
|
||||||
|
<polygon fill="black" stroke="black" points="628.3,-568.61 626.46,-558.18 621.39,-567.48 628.3,-568.61"/>
|
||||||
|
<text text-anchor="middle" x="707.26" y="-653.3" font-family="Times,serif" font-size="14.00">master(key) != keyMarker</text>
|
||||||
|
</g>
|
||||||
|
<!-- WaitForNullKey.Processing->SkippingVersions.Processing -->
|
||||||
|
<g id="edge10" class="edge">
|
||||||
|
<title>WaitForNullKey.Processing->SkippingVersions.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M704.4,-726.26C797.32,-711.87 931.09,-691.16 1026.87,-676.33"/>
|
||||||
|
<polygon fill="black" stroke="black" points="1027.55,-679.77 1036.89,-674.78 1026.47,-672.85 1027.55,-679.77"/>
|
||||||
|
<text text-anchor="middle" x="1001.26" y="-696.8" font-family="Times,serif" font-size="14.00">master(key) == keyMarker</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingVersions.Processing->SkippingVersions.Idle -->
|
||||||
|
<g id="edge21" class="edge">
|
||||||
|
<title>SkippingVersions.Processing->SkippingVersions.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M1241.89,-638.98C1249.74,-634.29 1256.75,-628.4 1262.26,-621 1274.21,-604.96 1274.21,-592.04 1262.26,-576 1258.82,-571.38 1254.79,-567.34 1250.33,-563.82"/>
|
||||||
|
<polygon fill="black" stroke="black" points="1252.11,-560.8 1241.89,-558.02 1248.15,-566.57 1252.11,-560.8"/>
|
||||||
|
<text text-anchor="middle" x="1392.26" y="-609.8" font-family="Times,serif" font-size="14.00">master(key) === keyMarker and </text>
|
||||||
|
<text text-anchor="middle" x="1392.26" y="-594.8" font-family="Times,serif" font-size="14.00">versionId < versionIdMarker</text>
|
||||||
|
<text text-anchor="middle" x="1392.26" y="-579.8" font-family="Times,serif" font-size="14.00">-> FILTER_SKIP</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingVersions.Processing->SkippingVersions.Idle -->
|
||||||
|
<g id="edge22" class="edge">
|
||||||
|
<title>SkippingVersions.Processing->SkippingVersions.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M1036.97,-654.38C978.97,-650.96 915.73,-642.25 897.26,-621 884.15,-605.9 884.15,-591.1 897.26,-576 914.65,-555.99 971.71,-547.1 1026.73,-543.28"/>
|
||||||
|
<polygon fill="black" stroke="black" points="1027.21,-546.76 1036.97,-542.62 1026.76,-539.77 1027.21,-546.76"/>
|
||||||
|
<text text-anchor="middle" x="1019.26" y="-609.8" font-family="Times,serif" font-size="14.00">master(key) === keyMarker and </text>
|
||||||
|
<text text-anchor="middle" x="1019.26" y="-594.8" font-family="Times,serif" font-size="14.00">versionId == versionIdMarker</text>
|
||||||
|
<text text-anchor="middle" x="1019.26" y="-579.8" font-family="Times,serif" font-size="14.00">-> FILTER_ACCEPT</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingVersions.Processing->NotSkipping.Processing -->
|
||||||
|
<g id="edge20" class="edge">
|
||||||
|
<title>SkippingVersions.Processing->NotSkipping.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M1037.02,-651.24C897.84,-644.67 672.13,-632.37 657.26,-621 641.04,-608.6 634.18,-586.13 631.3,-568.16"/>
|
||||||
|
<polygon fill="black" stroke="black" points="634.76,-567.68 630.02,-558.21 627.82,-568.57 634.76,-567.68"/>
|
||||||
|
<text text-anchor="middle" x="770.26" y="-602.3" font-family="Times,serif" font-size="14.00">master(key) !== keyMarker or </text>
|
||||||
|
<text text-anchor="middle" x="770.26" y="-587.3" font-family="Times,serif" font-size="14.00">versionId > versionIdMarker</text>
|
||||||
|
</g>
|
||||||
|
</g>
|
||||||
|
</svg>
|
After Width: | Height: | Size: 21 KiB |
|
@ -0,0 +1,28 @@
|
||||||
|
{
|
||||||
|
"groups": {
|
||||||
|
"default": {
|
||||||
|
"packages": [
|
||||||
|
"lib/executables/pensieveCreds/package.json",
|
||||||
|
"package.json"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"branchPrefix": "improvement/greenkeeper.io/",
|
||||||
|
"commitMessages": {
|
||||||
|
"initialBadge": "docs(readme): add Greenkeeper badge",
|
||||||
|
"initialDependencies": "chore(package): update dependencies",
|
||||||
|
"initialBranches": "chore(bert-e): whitelist greenkeeper branches",
|
||||||
|
"dependencyUpdate": "fix(package): update ${dependency} to version ${version}",
|
||||||
|
"devDependencyUpdate": "chore(package): update ${dependency} to version ${version}",
|
||||||
|
"dependencyPin": "fix: pin ${dependency} to ${oldVersionResolved}",
|
||||||
|
"devDependencyPin": "chore: pin ${dependency} to ${oldVersionResolved}",
|
||||||
|
"closes": "\n\nCloses #${number}"
|
||||||
|
},
|
||||||
|
"ignore": [
|
||||||
|
"ajv",
|
||||||
|
"eslint",
|
||||||
|
"eslint-plugin-react",
|
||||||
|
"eslint-config-airbnb",
|
||||||
|
"eslint-config-scality"
|
||||||
|
]
|
||||||
|
}
|
178
index.ts
178
index.ts
|
@ -1,104 +1,105 @@
|
||||||
|
import * as evaluators from './lib/policyEvaluator/evaluator';
|
||||||
|
import evaluatePrincipal from './lib/policyEvaluator/principal';
|
||||||
|
import RequestContext, {
|
||||||
|
actionNeedQuotaCheck,
|
||||||
|
actionNeedQuotaCheckCopy,
|
||||||
|
actionWithDataDeletion } from './lib/policyEvaluator/RequestContext';
|
||||||
|
import * as requestUtils from './lib/policyEvaluator/requestUtils';
|
||||||
|
import * as actionMaps from './lib/policyEvaluator/utils/actionMaps';
|
||||||
|
import { validateUserPolicy } from './lib/policy/policyValidator'
|
||||||
|
import * as locationConstraints from './lib/patches/locationConstraints';
|
||||||
|
import * as userMetadata from './lib/s3middleware/userMetadata';
|
||||||
|
import convertToXml from './lib/s3middleware/convertToXml';
|
||||||
|
import escapeForXml from './lib/s3middleware/escapeForXml';
|
||||||
|
import * as objectLegalHold from './lib/s3middleware/objectLegalHold';
|
||||||
|
import * as tagging from './lib/s3middleware/tagging';
|
||||||
|
import { checkDateModifiedHeaders } from './lib/s3middleware/validateConditionalHeaders';
|
||||||
|
import { validateConditionalHeaders } from './lib/s3middleware/validateConditionalHeaders';
|
||||||
|
import MD5Sum from './lib/s3middleware/MD5Sum';
|
||||||
|
import NullStream from './lib/s3middleware/nullStream';
|
||||||
|
import * as objectUtils from './lib/s3middleware/objectUtils';
|
||||||
|
import * as mpuUtils from './lib/s3middleware/azureHelpers/mpuUtils';
|
||||||
|
import ResultsCollector from './lib/s3middleware/azureHelpers/ResultsCollector';
|
||||||
|
import SubStreamInterface from './lib/s3middleware/azureHelpers/SubStreamInterface';
|
||||||
|
import { prepareStream } from './lib/s3middleware/prepareStream';
|
||||||
|
import * as processMpuParts from './lib/s3middleware/processMpuParts';
|
||||||
|
import * as retention from './lib/s3middleware/objectRetention';
|
||||||
|
import * as objectRestore from './lib/s3middleware/objectRestore';
|
||||||
|
import * as lifecycleHelpers from './lib/s3middleware/lifecycleHelpers';
|
||||||
export { default as errors } from './lib/errors';
|
export { default as errors } from './lib/errors';
|
||||||
|
export { default as Clustering } from './lib/Clustering';
|
||||||
export const auth = require('./lib/auth/auth');
|
export * as ClusterRPC from './lib/clustering/ClusterRPC';
|
||||||
export const constants = require('./lib/constants');
|
export * as ipCheck from './lib/ipCheck';
|
||||||
export const db = require('./lib/db');
|
export * as auth from './lib/auth/auth';
|
||||||
export const shuffle = require('./lib/shuffle');
|
export * as constants from './lib/constants';
|
||||||
export const stringHash = require('./lib/stringHash');
|
export * as https from './lib/https';
|
||||||
export const ipCheck = require('./lib/ipCheck');
|
export * as metrics from './lib/metrics';
|
||||||
export const jsutil = require('./lib/jsutil');
|
export * as network from './lib/network';
|
||||||
export const Clustering = require('./lib/Clustering');
|
export * as s3routes from './lib/s3routes';
|
||||||
|
export * as versioning from './lib/versioning';
|
||||||
export const https = {
|
export * as stream from './lib/stream';
|
||||||
ciphers: require('./lib/https/ciphers.js'),
|
export * as jsutil from './lib/jsutil';
|
||||||
dhparam: require('./lib/https/dh2048.js'),
|
export { default as stringHash } from './lib/stringHash';
|
||||||
};
|
export * as db from './lib/db';
|
||||||
|
export * as errorUtils from './lib/errorUtils';
|
||||||
|
export { default as shuffle } from './lib/shuffle';
|
||||||
|
export * as models from './lib/models';
|
||||||
|
|
||||||
export const algorithms = {
|
export const algorithms = {
|
||||||
list: {
|
list: require('./lib/algos/list/exportAlgos'),
|
||||||
Basic: require('./lib/algos/list/basic').List,
|
|
||||||
Delimiter: require('./lib/algos/list/delimiter').Delimiter,
|
|
||||||
DelimiterVersions: require('./lib/algos/list/delimiterVersions').DelimiterVersions,
|
|
||||||
DelimiterMaster: require('./lib/algos/list/delimiterMaster').DelimiterMaster,
|
|
||||||
MPU: require('./lib/algos/list/MPU').MultipartUploads,
|
|
||||||
},
|
|
||||||
listTools: {
|
listTools: {
|
||||||
DelimiterTools: require('./lib/algos/list/tools'),
|
DelimiterTools: require('./lib/algos/list/tools'),
|
||||||
|
Skip: require('./lib/algos/list/skip'),
|
||||||
},
|
},
|
||||||
cache: {
|
cache: {
|
||||||
|
GapSet: require('./lib/algos/cache/GapSet'),
|
||||||
|
GapCache: require('./lib/algos/cache/GapCache'),
|
||||||
LRUCache: require('./lib/algos/cache/LRUCache'),
|
LRUCache: require('./lib/algos/cache/LRUCache'),
|
||||||
},
|
},
|
||||||
stream: {
|
stream: {
|
||||||
MergeStream: require('./lib/algos/stream/MergeStream'),
|
MergeStream: require('./lib/algos/stream/MergeStream'),
|
||||||
},
|
},
|
||||||
SortedSet: require('./lib/algos/set/SortedSet'),
|
SortedSet: require('./lib/algos/set/SortedSet'),
|
||||||
|
Heap: require('./lib/algos/heap/Heap'),
|
||||||
};
|
};
|
||||||
|
|
||||||
export const policies = {
|
export const policies = {
|
||||||
evaluators: require('./lib/policyEvaluator/evaluator.js'),
|
evaluators,
|
||||||
validateUserPolicy: require('./lib/policy/policyValidator').validateUserPolicy,
|
validateUserPolicy,
|
||||||
evaluatePrincipal: require('./lib/policyEvaluator/principal'),
|
evaluatePrincipal,
|
||||||
RequestContext: require('./lib/policyEvaluator/RequestContext.js'),
|
RequestContext,
|
||||||
requestUtils: require('./lib/policyEvaluator/requestUtils'),
|
requestUtils,
|
||||||
actionMaps: require('./lib/policyEvaluator/utils/actionMaps'),
|
actionMaps,
|
||||||
|
actionNeedQuotaCheck,
|
||||||
|
actionWithDataDeletion,
|
||||||
|
actionNeedQuotaCheckCopy,
|
||||||
};
|
};
|
||||||
|
|
||||||
export const testing = {
|
export const testing = {
|
||||||
matrix: require('./lib/testing/matrix.js'),
|
matrix: require('./lib/testing/matrix.js'),
|
||||||
};
|
};
|
||||||
|
|
||||||
export const versioning = {
|
|
||||||
VersioningConstants: require('./lib/versioning/constants.js').VersioningConstants,
|
|
||||||
Version: require('./lib/versioning/Version.js').Version,
|
|
||||||
VersionID: require('./lib/versioning/VersionID.js'),
|
|
||||||
WriteGatheringManager: require('./lib/versioning/WriteGatheringManager.js'),
|
|
||||||
WriteCache: require('./lib/versioning/WriteCache.js'),
|
|
||||||
VersioningRequestProcessor: require('./lib/versioning/VersioningRequestProcessor.js'),
|
|
||||||
};
|
|
||||||
|
|
||||||
export const network = {
|
|
||||||
http: {
|
|
||||||
server: require('./lib/network/http/server'),
|
|
||||||
utils: require('./lib/network/http/utils'),
|
|
||||||
},
|
|
||||||
rpc: require('./lib/network/rpc/rpc'),
|
|
||||||
level: require('./lib/network/rpc/level-net'),
|
|
||||||
rest: {
|
|
||||||
RESTServer: require('./lib/network/rest/RESTServer'),
|
|
||||||
RESTClient: require('./lib/network/rest/RESTClient'),
|
|
||||||
},
|
|
||||||
probe: {
|
|
||||||
ProbeServer: require('./lib/network/probe/ProbeServer'),
|
|
||||||
},
|
|
||||||
RoundRobin: require('./lib/network/RoundRobin'),
|
|
||||||
kmip: require('./lib/network/kmip'),
|
|
||||||
kmipClient: require('./lib/network/kmip/Client'),
|
|
||||||
};
|
|
||||||
|
|
||||||
export const s3routes = {
|
|
||||||
routes: require('./lib/s3routes/routes'),
|
|
||||||
routesUtils: require('./lib/s3routes/routesUtils'),
|
|
||||||
};
|
|
||||||
|
|
||||||
export const s3middleware = {
|
export const s3middleware = {
|
||||||
userMetadata: require('./lib/s3middleware/userMetadata'),
|
userMetadata,
|
||||||
convertToXml: require('./lib/s3middleware/convertToXml'),
|
convertToXml,
|
||||||
escapeForXml: require('./lib/s3middleware/escapeForXml'),
|
escapeForXml,
|
||||||
objectLegalHold: require('./lib/s3middleware/objectLegalHold'),
|
objectLegalHold,
|
||||||
tagging: require('./lib/s3middleware/tagging'),
|
tagging,
|
||||||
validateConditionalHeaders:
|
checkDateModifiedHeaders,
|
||||||
require('./lib/s3middleware/validateConditionalHeaders').validateConditionalHeaders,
|
validateConditionalHeaders,
|
||||||
MD5Sum: require('./lib/s3middleware/MD5Sum'),
|
MD5Sum,
|
||||||
NullStream: require('./lib/s3middleware/nullStream'),
|
NullStream,
|
||||||
objectUtils: require('./lib/s3middleware/objectUtils'),
|
objectUtils,
|
||||||
azureHelper: {
|
azureHelper: {
|
||||||
mpuUtils: require('./lib/s3middleware/azureHelpers/mpuUtils'),
|
mpuUtils,
|
||||||
ResultsCollector: require('./lib/s3middleware/azureHelpers/ResultsCollector'),
|
ResultsCollector,
|
||||||
SubStreamInterface: require('./lib/s3middleware/azureHelpers/SubStreamInterface'),
|
SubStreamInterface,
|
||||||
},
|
},
|
||||||
processMpuParts: require('./lib/s3middleware/processMpuParts'),
|
prepareStream,
|
||||||
retention: require('./lib/s3middleware/objectRetention'),
|
processMpuParts,
|
||||||
lifecycleHelpers: require('./lib/s3middleware/lifecycleHelpers'),
|
retention,
|
||||||
|
objectRestore,
|
||||||
|
lifecycleHelpers,
|
||||||
};
|
};
|
||||||
|
|
||||||
export const storage = {
|
export const storage = {
|
||||||
|
@ -165,31 +166,10 @@ export const storage = {
|
||||||
utils: require('./lib/storage/utils'),
|
utils: require('./lib/storage/utils'),
|
||||||
};
|
};
|
||||||
|
|
||||||
export const models = {
|
|
||||||
BucketInfo: require('./lib/models/BucketInfo'),
|
|
||||||
ObjectMD: require('./lib/models/ObjectMD'),
|
|
||||||
ObjectMDLocation: require('./lib/models/ObjectMDLocation'),
|
|
||||||
ARN: require('./lib/models/ARN'),
|
|
||||||
WebsiteConfiguration: require('./lib/models/WebsiteConfiguration'),
|
|
||||||
ReplicationConfiguration: require('./lib/models/ReplicationConfiguration'),
|
|
||||||
LifecycleConfiguration: require('./lib/models/LifecycleConfiguration'),
|
|
||||||
LifecycleRule: require('./lib/models/LifecycleRule'),
|
|
||||||
BucketPolicy: require('./lib/models/BucketPolicy'),
|
|
||||||
ObjectLockConfiguration: require('./lib/models/ObjectLockConfiguration'),
|
|
||||||
NotificationConfiguration: require('./lib/models/NotificationConfiguration'),
|
|
||||||
};
|
|
||||||
|
|
||||||
export const metrics = {
|
|
||||||
StatsClient: require('./lib/metrics/StatsClient'),
|
|
||||||
StatsModel: require('./lib/metrics/StatsModel'),
|
|
||||||
RedisClient: require('./lib/metrics/RedisClient'),
|
|
||||||
ZenkoMetrics: require('./lib/metrics/ZenkoMetrics'),
|
|
||||||
};
|
|
||||||
|
|
||||||
export const pensieve = {
|
export const pensieve = {
|
||||||
credentialUtils: require('./lib/executables/pensieveCreds/utils'),
|
credentialUtils: require('./lib/executables/pensieveCreds/utils'),
|
||||||
};
|
};
|
||||||
|
|
||||||
export const stream = {
|
export const patches = {
|
||||||
readJSONStreamObject: require('./lib/stream/readJSONStreamObject'),
|
locationConstraints,
|
||||||
};
|
};
|
||||||
|
|
|
@ -1,18 +1,28 @@
|
||||||
'use strict'; // eslint-disable-line
|
import cluster, { Worker } from 'cluster';
|
||||||
|
import * as werelogs from 'werelogs';
|
||||||
|
|
||||||
const cluster = require('cluster');
|
export default class Clustering {
|
||||||
|
_size: number;
|
||||||
|
_shutdownTimeout: number;
|
||||||
|
_logger: werelogs.Logger;
|
||||||
|
_shutdown: boolean;
|
||||||
|
_workers: (Worker | undefined)[];
|
||||||
|
_workersTimeout: (NodeJS.Timeout | undefined)[];
|
||||||
|
_workersStatus: (number | string | undefined)[];
|
||||||
|
_status: number;
|
||||||
|
_exitCb?: (clustering: Clustering, exitSignal?: string) => void;
|
||||||
|
_index?: number;
|
||||||
|
|
||||||
class Clustering {
|
|
||||||
/**
|
/**
|
||||||
* Constructor
|
* Constructor
|
||||||
*
|
*
|
||||||
* @param {number} size Cluster size
|
* @param size Cluster size
|
||||||
* @param {Logger} logger Logger object
|
* @param logger Logger object
|
||||||
* @param {number} [shutdownTimeout=5000] Change default shutdown timeout
|
* @param [shutdownTimeout=5000] Change default shutdown timeout
|
||||||
* releasing ressources
|
* releasing ressources
|
||||||
* @return {Clustering} itself
|
* @return itself
|
||||||
*/
|
*/
|
||||||
constructor(size, logger, shutdownTimeout) {
|
constructor(size: number, logger: werelogs.Logger, shutdownTimeout?: number) {
|
||||||
this._size = size;
|
this._size = size;
|
||||||
if (size < 1) {
|
if (size < 1) {
|
||||||
throw new Error('Cluster size must be greater than or equal to 1');
|
throw new Error('Cluster size must be greater than or equal to 1');
|
||||||
|
@ -32,7 +42,6 @@ class Clustering {
|
||||||
* Method called after a stop() call
|
* Method called after a stop() call
|
||||||
*
|
*
|
||||||
* @private
|
* @private
|
||||||
* @return {undefined}
|
|
||||||
*/
|
*/
|
||||||
_afterStop() {
|
_afterStop() {
|
||||||
// Asuming all workers shutdown gracefully
|
// Asuming all workers shutdown gracefully
|
||||||
|
@ -41,10 +50,11 @@ class Clustering {
|
||||||
for (let i = 0; i < size; ++i) {
|
for (let i = 0; i < size; ++i) {
|
||||||
// If the process return an error code or killed by a signal,
|
// If the process return an error code or killed by a signal,
|
||||||
// set the status
|
// set the status
|
||||||
if (typeof this._workersStatus[i] === 'number') {
|
const status = this._workersStatus[i];
|
||||||
this._status = this._workersStatus[i];
|
if (typeof status === 'number') {
|
||||||
|
this._status = status;
|
||||||
break;
|
break;
|
||||||
} else if (typeof this._workersStatus[i] === 'string') {
|
} else if (typeof status === 'string') {
|
||||||
this._status = 1;
|
this._status = 1;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@ -58,13 +68,17 @@ class Clustering {
|
||||||
/**
|
/**
|
||||||
* Method called when a worker exited
|
* Method called when a worker exited
|
||||||
*
|
*
|
||||||
* @param {Cluster.worker} worker - Current worker
|
* @param worker - Current worker
|
||||||
* @param {number} i - Worker index
|
* @param i - Worker index
|
||||||
* @param {number} code - Exit code
|
* @param code - Exit code
|
||||||
* @param {string} signal - Exit signal
|
* @param signal - Exit signal
|
||||||
* @return {undefined}
|
|
||||||
*/
|
*/
|
||||||
_workerExited(worker, i, code, signal) {
|
_workerExited(
|
||||||
|
worker: Worker,
|
||||||
|
i: number,
|
||||||
|
code: number,
|
||||||
|
signal: string,
|
||||||
|
) {
|
||||||
// If the worker:
|
// If the worker:
|
||||||
// - was killed by a signal
|
// - was killed by a signal
|
||||||
// - return an error code
|
// - return an error code
|
||||||
|
@ -91,8 +105,9 @@ class Clustering {
|
||||||
this._workersStatus[i] = undefined;
|
this._workersStatus[i] = undefined;
|
||||||
}
|
}
|
||||||
this._workers[i] = undefined;
|
this._workers[i] = undefined;
|
||||||
if (this._workersTimeout[i]) {
|
const timeout = this._workersTimeout[i];
|
||||||
clearTimeout(this._workersTimeout[i]);
|
if (timeout) {
|
||||||
|
clearTimeout(timeout);
|
||||||
this._workersTimeout[i] = undefined;
|
this._workersTimeout[i] = undefined;
|
||||||
}
|
}
|
||||||
// If we don't trigger the stop method, the watchdog
|
// If we don't trigger the stop method, the watchdog
|
||||||
|
@ -110,29 +125,28 @@ class Clustering {
|
||||||
/**
|
/**
|
||||||
* Method to start a worker
|
* Method to start a worker
|
||||||
*
|
*
|
||||||
* @param {number} i Index of the starting worker
|
* @param i Index of the starting worker
|
||||||
* @return {undefined}
|
|
||||||
*/
|
*/
|
||||||
startWorker(i) {
|
startWorker(i: number) {
|
||||||
if (!cluster.isMaster) {
|
if (!cluster.isPrimary) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
// Fork a new worker
|
// Fork a new worker
|
||||||
this._workers[i] = cluster.fork();
|
this._workers[i] = cluster.fork();
|
||||||
// Listen for message from the worker
|
// Listen for message from the worker
|
||||||
this._workers[i].on('message', msg => {
|
this._workers[i]!.on('message', msg => {
|
||||||
// If the worker is ready, send him his id
|
// If the worker is ready, send him his id
|
||||||
if (msg === 'ready') {
|
if (msg === 'ready') {
|
||||||
this._workers[i].send({ msg: 'setup', id: i });
|
this._workers[i]!.send({ msg: 'setup', id: i });
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
this._workers[i].on('exit', (code, signal) =>
|
this._workers[i]!.on('exit', (code, signal) =>
|
||||||
this._workerExited(this._workers[i], i, code, signal));
|
this._workerExited(this._workers[i]!, i, code, signal));
|
||||||
// Trigger when the worker was started
|
// Trigger when the worker was started
|
||||||
this._workers[i].on('online', () => {
|
this._workers[i]!.on('online', () => {
|
||||||
this._logger.info('Worker started', {
|
this._logger.info('Worker started', {
|
||||||
id: i,
|
id: i,
|
||||||
childPid: this._workers[i].process.pid,
|
childPid: this._workers[i]!.process.pid,
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -140,10 +154,10 @@ class Clustering {
|
||||||
/**
|
/**
|
||||||
* Method to put handler on cluster exit
|
* Method to put handler on cluster exit
|
||||||
*
|
*
|
||||||
* @param {function} cb - Callback(Clustering, [exitSignal])
|
* @param cb - Callback(Clustering, [exitSignal])
|
||||||
* @return {Clustering} Itself
|
* @return Itself
|
||||||
*/
|
*/
|
||||||
onExit(cb) {
|
onExit(cb: (clustering: Clustering, exitSignal?: string) => void) {
|
||||||
this._exitCb = cb;
|
this._exitCb = cb;
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
@ -152,33 +166,33 @@ class Clustering {
|
||||||
* Method to start the cluster (if master) or to start the callback
|
* Method to start the cluster (if master) or to start the callback
|
||||||
* (worker)
|
* (worker)
|
||||||
*
|
*
|
||||||
* @param {function} cb - Callback to run the worker
|
* @param cb - Callback to run the worker
|
||||||
* @return {Clustering} itself
|
* @return itself
|
||||||
*/
|
*/
|
||||||
start(cb) {
|
start(cb: (clustering: Clustering) => void) {
|
||||||
process.on('SIGINT', () => this.stop('SIGINT'));
|
process.on('SIGINT', () => this.stop('SIGINT'));
|
||||||
process.on('SIGHUP', () => this.stop('SIGHUP'));
|
process.on('SIGHUP', () => this.stop('SIGHUP'));
|
||||||
process.on('SIGQUIT', () => this.stop('SIGQUIT'));
|
process.on('SIGQUIT', () => this.stop('SIGQUIT'));
|
||||||
process.on('SIGTERM', () => this.stop('SIGTERM'));
|
process.on('SIGTERM', () => this.stop('SIGTERM'));
|
||||||
process.on('SIGPIPE', () => {});
|
process.on('SIGPIPE', () => {});
|
||||||
process.on('exit', (code, signal) => {
|
process.on('exit', (code?: number, signal?: string) => {
|
||||||
if (this._exitCb) {
|
if (this._exitCb) {
|
||||||
this._status = code || 0;
|
this._status = code || 0;
|
||||||
return this._exitCb(this, signal);
|
return this._exitCb(this, signal);
|
||||||
}
|
}
|
||||||
return process.exit(code || 0);
|
return process.exit(code || 0);
|
||||||
});
|
});
|
||||||
process.on('uncaughtException', err => {
|
process.on('uncaughtException', (err: Error) => {
|
||||||
this._logger.fatal('caught error', {
|
this._logger.fatal('caught error', {
|
||||||
error: err.message,
|
error: err.message,
|
||||||
stack: err.stack.split('\n').map(str => str.trim()),
|
stack: err.stack?.split('\n')?.map(str => str.trim()),
|
||||||
});
|
});
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
});
|
});
|
||||||
if (!cluster.isMaster) {
|
if (!cluster.isPrimary) {
|
||||||
// Waiting for message from master to
|
// Waiting for message from master to
|
||||||
// know the id of the slave cluster
|
// know the id of the slave cluster
|
||||||
process.on('message', msg => {
|
process.on('message', (msg: any) => {
|
||||||
if (msg.msg === 'setup') {
|
if (msg.msg === 'setup') {
|
||||||
this._index = msg.id;
|
this._index = msg.id;
|
||||||
cb(this);
|
cb(this);
|
||||||
|
@ -186,7 +200,7 @@ class Clustering {
|
||||||
});
|
});
|
||||||
// Send message to the master, to let him know
|
// Send message to the master, to let him know
|
||||||
// the worker has started
|
// the worker has started
|
||||||
process.send('ready');
|
process.send?.('ready');
|
||||||
} else {
|
} else {
|
||||||
for (let i = 0; i < this._size; ++i) {
|
for (let i = 0; i < this._size; ++i) {
|
||||||
this.startWorker(i);
|
this.startWorker(i);
|
||||||
|
@ -198,7 +212,7 @@ class Clustering {
|
||||||
/**
|
/**
|
||||||
* Method to get workers
|
* Method to get workers
|
||||||
*
|
*
|
||||||
* @return {Cluster.Worker[]} Workers
|
* @return Workers
|
||||||
*/
|
*/
|
||||||
getWorkers() {
|
getWorkers() {
|
||||||
return this._workers;
|
return this._workers;
|
||||||
|
@ -207,7 +221,7 @@ class Clustering {
|
||||||
/**
|
/**
|
||||||
* Method to get the status of the cluster
|
* Method to get the status of the cluster
|
||||||
*
|
*
|
||||||
* @return {number} Status code
|
* @return Status code
|
||||||
*/
|
*/
|
||||||
getStatus() {
|
getStatus() {
|
||||||
return this._status;
|
return this._status;
|
||||||
|
@ -216,7 +230,7 @@ class Clustering {
|
||||||
/**
|
/**
|
||||||
* Method to return if it's the master process
|
* Method to return if it's the master process
|
||||||
*
|
*
|
||||||
* @return {boolean} - True if master, false otherwise
|
* @return - True if master, false otherwise
|
||||||
*/
|
*/
|
||||||
isMaster() {
|
isMaster() {
|
||||||
return this._index === undefined;
|
return this._index === undefined;
|
||||||
|
@ -225,7 +239,7 @@ class Clustering {
|
||||||
/**
|
/**
|
||||||
* Method to get index of the worker
|
* Method to get index of the worker
|
||||||
*
|
*
|
||||||
* @return {number|undefined} Worker index, undefined if it's master
|
* @return Worker index, undefined if it's master
|
||||||
*/
|
*/
|
||||||
getIndex() {
|
getIndex() {
|
||||||
return this._index;
|
return this._index;
|
||||||
|
@ -234,11 +248,10 @@ class Clustering {
|
||||||
/**
|
/**
|
||||||
* Method to stop the cluster
|
* Method to stop the cluster
|
||||||
*
|
*
|
||||||
* @param {string} signal - Set internally when processes killed by signal
|
* @param signal - Set internally when processes killed by signal
|
||||||
* @return {undefined}
|
|
||||||
*/
|
*/
|
||||||
stop(signal) {
|
stop(signal?: string) {
|
||||||
if (!cluster.isMaster) {
|
if (!cluster.isPrimary) {
|
||||||
if (this._exitCb) {
|
if (this._exitCb) {
|
||||||
return this._exitCb(this, signal);
|
return this._exitCb(this, signal);
|
||||||
}
|
}
|
||||||
|
@ -251,13 +264,17 @@ class Clustering {
|
||||||
}
|
}
|
||||||
this._workersTimeout[i] = setTimeout(() => {
|
this._workersTimeout[i] = setTimeout(() => {
|
||||||
// Kill the worker if the sigterm was ignored or take too long
|
// Kill the worker if the sigterm was ignored or take too long
|
||||||
|
if (worker.process.pid) {
|
||||||
process.kill(worker.process.pid, 'SIGKILL');
|
process.kill(worker.process.pid, 'SIGKILL');
|
||||||
|
}
|
||||||
}, this._shutdownTimeout);
|
}, this._shutdownTimeout);
|
||||||
// Send sigterm to the process, allowing to release ressources
|
// Send sigterm to the process, allowing to release ressources
|
||||||
// and save some states
|
// and save some states
|
||||||
|
if (worker.process.pid) {
|
||||||
return process.kill(worker.process.pid, 'SIGTERM');
|
return process.kill(worker.process.pid, 'SIGTERM');
|
||||||
|
} else {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = Clustering;
|
|
|
@ -0,0 +1,363 @@
|
||||||
|
import { OrderedSet } from '@js-sdsl/ordered-set';
|
||||||
|
import {
|
||||||
|
default as GapSet,
|
||||||
|
GapSetEntry,
|
||||||
|
} from './GapSet';
|
||||||
|
|
||||||
|
// the API is similar but is not strictly a superset of GapSetInterface
|
||||||
|
// so we don't extend from it
|
||||||
|
export interface GapCacheInterface {
|
||||||
|
exposureDelayMs: number;
|
||||||
|
maxGapWeight: number;
|
||||||
|
size: number;
|
||||||
|
|
||||||
|
setGap: (firstKey: string, lastKey: string, weight: number) => void;
|
||||||
|
removeOverlappingGaps: (overlappingKeys: string[]) => number;
|
||||||
|
lookupGap: (minKey: string, maxKey?: string) => Promise<GapSetEntry | null>;
|
||||||
|
[Symbol.iterator]: () => Iterator<GapSetEntry>;
|
||||||
|
toArray: () => GapSetEntry[];
|
||||||
|
};
|
||||||
|
|
||||||
|
class GapCacheUpdateSet {
|
||||||
|
newGaps: GapSet;
|
||||||
|
updatedKeys: OrderedSet<string>;
|
||||||
|
|
||||||
|
constructor(maxGapWeight: number) {
|
||||||
|
this.newGaps = new GapSet(maxGapWeight);
|
||||||
|
this.updatedKeys = new OrderedSet();
|
||||||
|
}
|
||||||
|
|
||||||
|
addUpdateBatch(updatedKeys: OrderedSet<string>): void {
|
||||||
|
this.updatedKeys.union(updatedKeys);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Cache of listing "gaps" i.e. ranges of keys that can be skipped
|
||||||
|
* over during listing (because they only contain delete markers as
|
||||||
|
* latest versions).
|
||||||
|
*
|
||||||
|
* Typically, a single GapCache instance would be attached to a raft session.
|
||||||
|
*
|
||||||
|
* The API usage is as follows:
|
||||||
|
*
|
||||||
|
* - Initialize a GapCache instance by calling start() (this starts an internal timer)
|
||||||
|
*
|
||||||
|
* - Insert a gap or update an existing one via setGap()
|
||||||
|
*
|
||||||
|
* - Lookup existing gaps via lookupGap()
|
||||||
|
*
|
||||||
|
* - Invalidate gaps that overlap a specific set of keys via removeOverlappingGaps()
|
||||||
|
*
|
||||||
|
* - Shut down a GapCache instance by calling stop() (this stops the internal timer)
|
||||||
|
*
|
||||||
|
* Gaps inserted via setGap() are not exposed immediately to lookupGap(), but only:
|
||||||
|
*
|
||||||
|
* - after a certain delay always larger than 'exposureDelayMs' and usually shorter
|
||||||
|
* than twice this value (but might be slightly longer in rare cases)
|
||||||
|
*
|
||||||
|
* - and only if they haven't been invalidated by a recent call to removeOverlappingGaps()
|
||||||
|
*
|
||||||
|
* This ensures atomicity between gap creation and invalidation from updates under
|
||||||
|
* the condition that a gap is created from first key to last key within the time defined
|
||||||
|
* by 'exposureDelayMs'.
|
||||||
|
*
|
||||||
|
* The implementation is based on two extra temporary "update sets" on top of the main
|
||||||
|
* exposed gap set, one called "staging" and the other "frozen", each containing a
|
||||||
|
* temporary updated gap set and a list of updated keys to invalidate gaps with (coming
|
||||||
|
* from calls to removeOverlappingGaps()). Every "exposureDelayMs" milliseconds, the frozen
|
||||||
|
* gaps are invalidated by all key updates coming from either of the "staging" or "frozen"
|
||||||
|
* update set, then merged into the exposed gaps set, after which the staging updates become
|
||||||
|
* the frozen updates and won't receive any new gap until the next cycle.
|
||||||
|
*/
|
||||||
|
export default class GapCache implements GapCacheInterface {
|
||||||
|
_exposureDelayMs: number;
|
||||||
|
maxGaps: number;
|
||||||
|
|
||||||
|
_stagingUpdates: GapCacheUpdateSet;
|
||||||
|
_frozenUpdates: GapCacheUpdateSet;
|
||||||
|
_exposedGaps: GapSet;
|
||||||
|
_exposeFrozenInterval: NodeJS.Timeout | null;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @constructor
|
||||||
|
*
|
||||||
|
* @param {number} exposureDelayMs - minimum delay between
|
||||||
|
* insertion of a gap via setGap() and its exposure via
|
||||||
|
* lookupGap()
|
||||||
|
* @param {number} maxGaps - maximum number of cached gaps, after
|
||||||
|
* which no new gap can be added by setGap(). (Note: a future
|
||||||
|
* improvement could replace this by an eviction strategy)
|
||||||
|
* @param {number} maxGapWeight - maximum "weight" of individual
|
||||||
|
* cached gaps, which is also the granularity for
|
||||||
|
* invalidation. Individual gaps can be chained together,
|
||||||
|
* which lookupGap() transparently consolidates in the response
|
||||||
|
* into a single large gap.
|
||||||
|
*/
|
||||||
|
constructor(exposureDelayMs: number, maxGaps: number, maxGapWeight: number) {
|
||||||
|
this._exposureDelayMs = exposureDelayMs;
|
||||||
|
this.maxGaps = maxGaps;
|
||||||
|
|
||||||
|
this._stagingUpdates = new GapCacheUpdateSet(maxGapWeight);
|
||||||
|
this._frozenUpdates = new GapCacheUpdateSet(maxGapWeight);
|
||||||
|
this._exposedGaps = new GapSet(maxGapWeight);
|
||||||
|
this._exposeFrozenInterval = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a GapCache from an array of exposed gap entries (used in tests)
|
||||||
|
*
|
||||||
|
* @return {GapCache} - a new GapCache instance
|
||||||
|
*/
|
||||||
|
static createFromArray(
|
||||||
|
gaps: GapSetEntry[],
|
||||||
|
exposureDelayMs: number,
|
||||||
|
maxGaps: number,
|
||||||
|
maxGapWeight: number
|
||||||
|
): GapCache {
|
||||||
|
const gapCache = new GapCache(exposureDelayMs, maxGaps, maxGapWeight);
|
||||||
|
gapCache._exposedGaps = GapSet.createFromArray(gaps, maxGapWeight)
|
||||||
|
return gapCache;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Internal helper to remove gaps in the staging and frozen sets
|
||||||
|
* overlapping with previously updated keys, right before the
|
||||||
|
* frozen gaps get exposed.
|
||||||
|
*
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
_removeOverlappingGapsBeforeExpose(): void {
|
||||||
|
for (const { updatedKeys } of [this._stagingUpdates, this._frozenUpdates]) {
|
||||||
|
if (updatedKeys.size() === 0) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
for (const { newGaps } of [this._stagingUpdates, this._frozenUpdates]) {
|
||||||
|
if (newGaps.size === 0) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
newGaps.removeOverlappingGaps(updatedKeys);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This function is the core mechanism that updates the exposed gaps in the
|
||||||
|
* cache. It is called on a regular interval defined by 'exposureDelayMs'.
|
||||||
|
*
|
||||||
|
* It does the following in order:
|
||||||
|
*
|
||||||
|
* - remove gaps from the frozen set that overlap with any key present in a
|
||||||
|
* batch passed to removeOverlappingGaps() since the last two triggers of
|
||||||
|
* _exposeFrozen()
|
||||||
|
*
|
||||||
|
* - merge the remaining gaps from the frozen set to the exposed set, which
|
||||||
|
* makes them visible from calls to lookupGap()
|
||||||
|
*
|
||||||
|
* - rotate by freezing the currently staging updates and initiating a new
|
||||||
|
* staging updates set
|
||||||
|
*
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
_exposeFrozen(): void {
|
||||||
|
this._removeOverlappingGapsBeforeExpose();
|
||||||
|
for (const gap of this._frozenUpdates.newGaps) {
|
||||||
|
// Use a trivial strategy to keep the cache size within
|
||||||
|
// limits: refuse to add new gaps when the size is above
|
||||||
|
// the 'maxGaps' threshold. We solely rely on
|
||||||
|
// removeOverlappingGaps() to make space for new gaps.
|
||||||
|
if (this._exposedGaps.size < this.maxGaps) {
|
||||||
|
this._exposedGaps.setGap(gap.firstKey, gap.lastKey, gap.weight);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
this._frozenUpdates = this._stagingUpdates;
|
||||||
|
this._stagingUpdates = new GapCacheUpdateSet(this.maxGapWeight);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Start the internal GapCache timer
|
||||||
|
*
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
start(): void {
|
||||||
|
if (this._exposeFrozenInterval) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
this._exposeFrozenInterval = setInterval(
|
||||||
|
() => this._exposeFrozen(),
|
||||||
|
this._exposureDelayMs);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Stop the internal GapCache timer
|
||||||
|
*
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
stop(): void {
|
||||||
|
if (this._exposeFrozenInterval) {
|
||||||
|
clearInterval(this._exposeFrozenInterval);
|
||||||
|
this._exposeFrozenInterval = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Record a gap between two keys, associated with a weight to
|
||||||
|
* limit individual gap's spanning ranges in the cache, for a more
|
||||||
|
* granular invalidation.
|
||||||
|
*
|
||||||
|
* The function handles splitting and merging existing gaps to
|
||||||
|
* maintain an optimal weight of cache entries.
|
||||||
|
*
|
||||||
|
* NOTE 1: the caller must ensure that the full length of the gap
|
||||||
|
* between 'firstKey' and 'lastKey' has been built from a listing
|
||||||
|
* snapshot that is more recent than 'exposureDelayMs' milliseconds,
|
||||||
|
* in order to guarantee that the exposed gap will be fully
|
||||||
|
* covered (and potentially invalidated) from recent calls to
|
||||||
|
* removeOverlappingGaps().
|
||||||
|
*
|
||||||
|
* NOTE 2: a usual pattern when building a large gap from multiple
|
||||||
|
* calls to setGap() is to start the next gap from 'lastKey',
|
||||||
|
* which will be passed as 'firstKey' in the next call, so that
|
||||||
|
* gaps can be chained together and consolidated by lookupGap().
|
||||||
|
*
|
||||||
|
* @param {string} firstKey - first key of the gap
|
||||||
|
* @param {string} lastKey - last key of the gap, must be greater
|
||||||
|
* or equal than 'firstKey'
|
||||||
|
* @param {number} weight - total weight between 'firstKey' and 'lastKey'
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
setGap(firstKey: string, lastKey: string, weight: number): void {
|
||||||
|
this._stagingUpdates.newGaps.setGap(firstKey, lastKey, weight);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Remove gaps that overlap with a given set of keys. Used to
|
||||||
|
* invalidate gaps when keys are inserted or deleted.
|
||||||
|
*
|
||||||
|
* @param {OrderedSet<string> | string[]} overlappingKeys - remove gaps that
|
||||||
|
* overlap with any of this set of keys
|
||||||
|
* @return {number} - how many gaps were removed from the exposed
|
||||||
|
* gaps only (overlapping gaps not yet exposed are also invalidated
|
||||||
|
* but are not accounted for in the returned value)
|
||||||
|
*/
|
||||||
|
removeOverlappingGaps(overlappingKeys: OrderedSet<string> | string[]): number {
|
||||||
|
let overlappingKeysSet;
|
||||||
|
if (Array.isArray(overlappingKeys)) {
|
||||||
|
overlappingKeysSet = new OrderedSet(overlappingKeys);
|
||||||
|
} else {
|
||||||
|
overlappingKeysSet = overlappingKeys;
|
||||||
|
}
|
||||||
|
this._stagingUpdates.addUpdateBatch(overlappingKeysSet);
|
||||||
|
return this._exposedGaps.removeOverlappingGaps(overlappingKeysSet);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Lookup the next exposed gap that overlaps with [minKey, maxKey]. Internally
|
||||||
|
* chained gaps are coalesced in the response into a single contiguous large gap.
|
||||||
|
*
|
||||||
|
* @param {string} minKey - minimum key overlapping with the returned gap
|
||||||
|
* @param {string} [maxKey] - maximum key overlapping with the returned gap
|
||||||
|
* @return {Promise<GapSetEntry | null>} - result of the lookup if a gap
|
||||||
|
* was found, null otherwise, as a Promise
|
||||||
|
*/
|
||||||
|
lookupGap(minKey: string, maxKey?: string): Promise<GapSetEntry | null> {
|
||||||
|
return this._exposedGaps.lookupGap(minKey, maxKey);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the maximum weight setting for individual gaps.
|
||||||
|
*
|
||||||
|
* @return {number} - maximum weight of individual gaps
|
||||||
|
*/
|
||||||
|
get maxGapWeight(): number {
|
||||||
|
return this._exposedGaps.maxWeight;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set the maximum weight setting for individual gaps.
|
||||||
|
*
|
||||||
|
* @param {number} gapWeight - maximum weight of individual gaps
|
||||||
|
*/
|
||||||
|
set maxGapWeight(gapWeight: number) {
|
||||||
|
this._exposedGaps.maxWeight = gapWeight;
|
||||||
|
// also update transient gap sets
|
||||||
|
this._stagingUpdates.newGaps.maxWeight = gapWeight;
|
||||||
|
this._frozenUpdates.newGaps.maxWeight = gapWeight;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the exposure delay in milliseconds, which is the minimum
|
||||||
|
* time after which newly cached gaps will be exposed by
|
||||||
|
* lookupGap().
|
||||||
|
*
|
||||||
|
* @return {number} - exposure delay in milliseconds
|
||||||
|
*/
|
||||||
|
get exposureDelayMs(): number {
|
||||||
|
return this._exposureDelayMs;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set the exposure delay in milliseconds, which is the minimum
|
||||||
|
* time after which newly cached gaps will be exposed by
|
||||||
|
* lookupGap(). Setting this attribute automatically updates the
|
||||||
|
* internal state to honor the new value.
|
||||||
|
*
|
||||||
|
* @param {number} - exposure delay in milliseconds
|
||||||
|
*/
|
||||||
|
set exposureDelayMs(exposureDelayMs: number) {
|
||||||
|
if (exposureDelayMs !== this._exposureDelayMs) {
|
||||||
|
this._exposureDelayMs = exposureDelayMs;
|
||||||
|
if (this._exposeFrozenInterval) {
|
||||||
|
// invalidate all pending gap updates, as the new interval may not be
|
||||||
|
// safe for them
|
||||||
|
this._stagingUpdates = new GapCacheUpdateSet(this.maxGapWeight);
|
||||||
|
this._frozenUpdates = new GapCacheUpdateSet(this.maxGapWeight);
|
||||||
|
|
||||||
|
// reinitialize the _exposeFrozenInterval timer with the updated delay
|
||||||
|
this.stop();
|
||||||
|
this.start();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the number of exposed gaps
|
||||||
|
*
|
||||||
|
* @return {number} number of exposed gaps
|
||||||
|
*/
|
||||||
|
get size(): number {
|
||||||
|
return this._exposedGaps.size;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Iterate over exposed gaps
|
||||||
|
*
|
||||||
|
* @return {Iterator<GapSetEntry>} an iterator over exposed gaps
|
||||||
|
*/
|
||||||
|
[Symbol.iterator](): Iterator<GapSetEntry> {
|
||||||
|
return this._exposedGaps[Symbol.iterator]();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get an array of all exposed gaps
|
||||||
|
*
|
||||||
|
* @return {GapSetEntry[]} array of exposed gaps
|
||||||
|
*/
|
||||||
|
toArray(): GapSetEntry[] {
|
||||||
|
return this._exposedGaps.toArray();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clear all exposed and staging gaps from the cache.
|
||||||
|
*
|
||||||
|
* Note: retains invalidating updates from removeOverlappingGaps()
|
||||||
|
* for correctness of gaps inserted afterwards.
|
||||||
|
*
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
clear(): void {
|
||||||
|
this._stagingUpdates.newGaps = new GapSet(this.maxGapWeight);
|
||||||
|
this._frozenUpdates.newGaps = new GapSet(this.maxGapWeight);
|
||||||
|
this._exposedGaps = new GapSet(this.maxGapWeight);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,366 @@
|
||||||
|
import assert from 'assert';
|
||||||
|
import { OrderedSet } from '@js-sdsl/ordered-set';
|
||||||
|
|
||||||
|
import errors from '../../errors';
|
||||||
|
|
||||||
|
export type GapSetEntry = {
|
||||||
|
firstKey: string,
|
||||||
|
lastKey: string,
|
||||||
|
weight: number,
|
||||||
|
};
|
||||||
|
|
||||||
|
export interface GapSetInterface {
|
||||||
|
maxWeight: number;
|
||||||
|
size: number;
|
||||||
|
|
||||||
|
setGap: (firstKey: string, lastKey: string, weight: number) => GapSetEntry;
|
||||||
|
removeOverlappingGaps: (overlappingKeys: string[]) => number;
|
||||||
|
lookupGap: (minKey: string, maxKey?: string) => Promise<GapSetEntry | null>;
|
||||||
|
[Symbol.iterator]: () => Iterator<GapSetEntry>;
|
||||||
|
toArray: () => GapSetEntry[];
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Specialized data structure to support caching of listing "gaps",
|
||||||
|
* i.e. ranges of keys that can be skipped over during listing
|
||||||
|
* (because they only contain delete markers as latest versions)
|
||||||
|
*/
|
||||||
|
export default class GapSet implements GapSetInterface, Iterable<GapSetEntry> {
|
||||||
|
_gaps: OrderedSet<GapSetEntry>;
|
||||||
|
_maxWeight: number;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @constructor
|
||||||
|
|
||||||
|
* @param {number} maxWeight - weight threshold for each cached
|
||||||
|
* gap (unitless). Triggers splitting gaps when reached
|
||||||
|
*/
|
||||||
|
constructor(maxWeight: number) {
|
||||||
|
this._gaps = new OrderedSet(
|
||||||
|
[],
|
||||||
|
(left: GapSetEntry, right: GapSetEntry) => (
|
||||||
|
left.firstKey < right.firstKey ? -1 :
|
||||||
|
left.firstKey > right.firstKey ? 1 : 0
|
||||||
|
)
|
||||||
|
);
|
||||||
|
this._maxWeight = maxWeight;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a GapSet from an array of gap entries (used in tests)
|
||||||
|
*/
|
||||||
|
static createFromArray(gaps: GapSetEntry[], maxWeight: number): GapSet {
|
||||||
|
const gapSet = new GapSet(maxWeight);
|
||||||
|
for (const gap of gaps) {
|
||||||
|
gapSet._gaps.insert(gap);
|
||||||
|
}
|
||||||
|
return gapSet;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Record a gap between two keys, associated with a weight to limit
|
||||||
|
* individual gap sizes in the cache.
|
||||||
|
*
|
||||||
|
* The function handles splitting and merging existing gaps to
|
||||||
|
* maintain an optimal weight of cache entries.
|
||||||
|
*
|
||||||
|
* @param {string} firstKey - first key of the gap
|
||||||
|
* @param {string} lastKey - last key of the gap, must be greater
|
||||||
|
* or equal than 'firstKey'
|
||||||
|
* @param {number} weight - total weight between 'firstKey' and 'lastKey'
|
||||||
|
* @return {GapSetEntry} - existing or new gap entry
|
||||||
|
*/
|
||||||
|
setGap(firstKey: string, lastKey: string, weight: number): GapSetEntry {
|
||||||
|
assert(lastKey >= firstKey);
|
||||||
|
|
||||||
|
// Step 1/4: Find the closest left-overlapping gap, and either re-use it
|
||||||
|
// or chain it with a new gap depending on the weights if it exists (otherwise
|
||||||
|
// just creates a new gap).
|
||||||
|
const curGapIt = this._gaps.reverseLowerBound(<GapSetEntry>{ firstKey });
|
||||||
|
let curGap;
|
||||||
|
if (curGapIt.isAccessible()) {
|
||||||
|
curGap = curGapIt.pointer;
|
||||||
|
if (curGap.lastKey >= lastKey) {
|
||||||
|
// return fully overlapping gap already cached
|
||||||
|
return curGap;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let remainingWeight = weight;
|
||||||
|
if (!curGap // no previous gap
|
||||||
|
|| curGap.lastKey < firstKey // previous gap not overlapping
|
||||||
|
|| (curGap.lastKey === firstKey // previous gap overlapping by one key...
|
||||||
|
&& curGap.weight + weight > this._maxWeight) // ...but we can't extend it
|
||||||
|
) {
|
||||||
|
// create a new gap indexed by 'firstKey'
|
||||||
|
curGap = { firstKey, lastKey: firstKey, weight: 0 };
|
||||||
|
this._gaps.insert(curGap);
|
||||||
|
} else if (curGap.lastKey > firstKey && weight > this._maxWeight) {
|
||||||
|
// previous gap is either fully or partially contained in the new gap
|
||||||
|
// and cannot be extended: substract its weight from the total (heuristic
|
||||||
|
// in case the previous gap doesn't start at 'firstKey', which is the
|
||||||
|
// uncommon case)
|
||||||
|
remainingWeight -= curGap.weight;
|
||||||
|
|
||||||
|
// there may be an existing chained gap starting with the previous gap's
|
||||||
|
// 'lastKey': use it if it exists
|
||||||
|
const chainedGapIt = this._gaps.find(<GapSetEntry>{ firstKey: curGap.lastKey });
|
||||||
|
if (chainedGapIt.isAccessible()) {
|
||||||
|
curGap = chainedGapIt.pointer;
|
||||||
|
} else {
|
||||||
|
// no existing chained gap: chain a new gap to the previous gap
|
||||||
|
curGap = {
|
||||||
|
firstKey: curGap.lastKey,
|
||||||
|
lastKey: curGap.lastKey,
|
||||||
|
weight: 0,
|
||||||
|
};
|
||||||
|
this._gaps.insert(curGap);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Step 2/4: Cleanup existing gaps fully included in firstKey -> lastKey, and
|
||||||
|
// aggregate their weights in curGap to define the minimum weight up to the
|
||||||
|
// last merged gap.
|
||||||
|
let nextGap;
|
||||||
|
while (true) {
|
||||||
|
const nextGapIt = this._gaps.upperBound(<GapSetEntry>{ firstKey: curGap.firstKey });
|
||||||
|
nextGap = nextGapIt.isAccessible() && nextGapIt.pointer;
|
||||||
|
// stop the cleanup when no more gap or if the next gap is not fully
|
||||||
|
// included in curGap
|
||||||
|
if (!nextGap || nextGap.lastKey > lastKey) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
this._gaps.eraseElementByIterator(nextGapIt);
|
||||||
|
curGap.lastKey = nextGap.lastKey;
|
||||||
|
curGap.weight += nextGap.weight;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 3/4: Extend curGap to lastKey, adjusting the weight.
|
||||||
|
// At this point, curGap weight is the minimum weight of the finished gap, save it
|
||||||
|
// for step 4.
|
||||||
|
let minMergedWeight = curGap.weight;
|
||||||
|
if (curGap.lastKey === firstKey && firstKey !== lastKey) {
|
||||||
|
// extend the existing gap by the full amount 'firstKey -> lastKey'
|
||||||
|
curGap.lastKey = lastKey;
|
||||||
|
curGap.weight += remainingWeight;
|
||||||
|
} else if (curGap.lastKey <= lastKey) {
|
||||||
|
curGap.lastKey = lastKey;
|
||||||
|
curGap.weight = remainingWeight;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 4/4: Find the closest right-overlapping gap, and if it exists, either merge
|
||||||
|
// it or chain it with curGap depending on the weights.
|
||||||
|
if (nextGap && nextGap.firstKey <= lastKey) {
|
||||||
|
// nextGap overlaps with the new gap: check if we can merge it
|
||||||
|
minMergedWeight += nextGap.weight;
|
||||||
|
let mergedWeight;
|
||||||
|
if (lastKey === nextGap.firstKey) {
|
||||||
|
// nextGap is chained with curGap: add the full weight of nextGap
|
||||||
|
mergedWeight = curGap.weight + nextGap.weight;
|
||||||
|
} else {
|
||||||
|
// strict overlap: don't add nextGap's weight unless
|
||||||
|
// it's larger than the sum of merged ranges (as it is
|
||||||
|
// then included in `minMergedWeight`)
|
||||||
|
mergedWeight = Math.max(curGap.weight, minMergedWeight);
|
||||||
|
}
|
||||||
|
if (mergedWeight <= this._maxWeight) {
|
||||||
|
// merge nextGap into curGap
|
||||||
|
curGap.lastKey = nextGap.lastKey;
|
||||||
|
curGap.weight = mergedWeight;
|
||||||
|
this._gaps.eraseElementByKey(nextGap);
|
||||||
|
} else {
|
||||||
|
// adjust the last key to chain with nextGap and substract the next
|
||||||
|
// gap's weight from curGap (heuristic)
|
||||||
|
curGap.lastKey = nextGap.firstKey;
|
||||||
|
curGap.weight = Math.max(mergedWeight - nextGap.weight, 0);
|
||||||
|
curGap = nextGap;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// return a copy of curGap
|
||||||
|
return Object.assign({}, curGap);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Remove gaps that overlap with one or more keys in a given array or
|
||||||
|
* OrderedSet. Used to invalidate gaps when keys are inserted or deleted.
|
||||||
|
*
|
||||||
|
* @param {OrderedSet<string> | string[]} overlappingKeys - remove gaps that overlap
|
||||||
|
* with any of this set of keys
|
||||||
|
* @return {number} - how many gaps were removed
|
||||||
|
*/
|
||||||
|
removeOverlappingGaps(overlappingKeys: OrderedSet<string> | string[]): number {
|
||||||
|
// To optimize processing with a large number of keys and/or gaps, this function:
|
||||||
|
//
|
||||||
|
// 1. converts the overlappingKeys array to a OrderedSet (if not already a OrderedSet)
|
||||||
|
// 2. queries both the gaps set and the overlapping keys set in a loop, which allows:
|
||||||
|
// - skipping ranges of overlapping keys at once when there is no new overlapping gap
|
||||||
|
// - skipping ranges of gaps at once when there is no overlapping key
|
||||||
|
//
|
||||||
|
// This way, it is efficient when the number of non-overlapping gaps is large
|
||||||
|
// (which is the most common case in practice).
|
||||||
|
|
||||||
|
let overlappingKeysSet;
|
||||||
|
if (Array.isArray(overlappingKeys)) {
|
||||||
|
overlappingKeysSet = new OrderedSet(overlappingKeys);
|
||||||
|
} else {
|
||||||
|
overlappingKeysSet = overlappingKeys;
|
||||||
|
}
|
||||||
|
const firstKeyIt = overlappingKeysSet.begin();
|
||||||
|
let currentKey = firstKeyIt.isAccessible() && firstKeyIt.pointer;
|
||||||
|
let nRemoved = 0;
|
||||||
|
while (currentKey) {
|
||||||
|
const closestGapIt = this._gaps.reverseUpperBound(<GapSetEntry>{ firstKey: currentKey });
|
||||||
|
if (closestGapIt.isAccessible()) {
|
||||||
|
const closestGap = closestGapIt.pointer;
|
||||||
|
if (currentKey <= closestGap.lastKey) {
|
||||||
|
// currentKey overlaps closestGap: remove the gap
|
||||||
|
this._gaps.eraseElementByIterator(closestGapIt);
|
||||||
|
nRemoved += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const nextGapIt = this._gaps.lowerBound(<GapSetEntry>{ firstKey: currentKey });
|
||||||
|
if (!nextGapIt.isAccessible()) {
|
||||||
|
// no more gap: we're done
|
||||||
|
return nRemoved;
|
||||||
|
}
|
||||||
|
const nextGap = nextGapIt.pointer;
|
||||||
|
// advance to the last key potentially overlapping with nextGap
|
||||||
|
let currentKeyIt = overlappingKeysSet.reverseLowerBound(nextGap.lastKey);
|
||||||
|
if (currentKeyIt.isAccessible()) {
|
||||||
|
currentKey = currentKeyIt.pointer;
|
||||||
|
if (currentKey >= nextGap.firstKey) {
|
||||||
|
// currentKey overlaps nextGap: remove the gap
|
||||||
|
this._gaps.eraseElementByIterator(nextGapIt);
|
||||||
|
nRemoved += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// advance to the first key potentially overlapping with another gap
|
||||||
|
currentKeyIt = overlappingKeysSet.lowerBound(nextGap.lastKey);
|
||||||
|
currentKey = currentKeyIt.isAccessible() && currentKeyIt.pointer;
|
||||||
|
}
|
||||||
|
return nRemoved;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Internal helper to coalesce multiple chained gaps into a single gap.
|
||||||
|
*
|
||||||
|
* It is only used to construct lookupGap() return values and
|
||||||
|
* doesn't modify the GapSet.
|
||||||
|
*
|
||||||
|
* NOTE: The function may take a noticeable amount of time and CPU
|
||||||
|
* to execute if a large number of chained gaps have to be
|
||||||
|
* coalesced, but it should never take more than a few seconds. In
|
||||||
|
* most cases it should take less than a millisecond. It regularly
|
||||||
|
* yields to the nodejs event loop to avoid blocking it during a
|
||||||
|
* long execution.
|
||||||
|
*
|
||||||
|
* @param {GapSetEntry} firstGap - first gap of the chain to coalesce with
|
||||||
|
* the next ones in the chain
|
||||||
|
* @return {Promise<GapSetEntry>} - a new coalesced entry, as a Promise
|
||||||
|
*/
|
||||||
|
_coalesceGapChain(firstGap: GapSetEntry): Promise<GapSetEntry> {
|
||||||
|
return new Promise(resolve => {
|
||||||
|
const coalescedGap: GapSetEntry = Object.assign({}, firstGap);
|
||||||
|
const coalesceGapChainIteration = () => {
|
||||||
|
// efficiency trade-off: 100 iterations of log(N) complexity lookups should
|
||||||
|
// not block the event loop for too long
|
||||||
|
for (let opCounter = 0; opCounter < 100; ++opCounter) {
|
||||||
|
const chainedGapIt = this._gaps.find(
|
||||||
|
<GapSetEntry>{ firstKey: coalescedGap.lastKey });
|
||||||
|
if (!chainedGapIt.isAccessible()) {
|
||||||
|
// chain is complete
|
||||||
|
return resolve(coalescedGap);
|
||||||
|
}
|
||||||
|
const chainedGap = chainedGapIt.pointer;
|
||||||
|
if (chainedGap.firstKey === chainedGap.lastKey) {
|
||||||
|
// found a single-key gap: chain is complete
|
||||||
|
return resolve(coalescedGap);
|
||||||
|
}
|
||||||
|
coalescedGap.lastKey = chainedGap.lastKey;
|
||||||
|
coalescedGap.weight += chainedGap.weight;
|
||||||
|
}
|
||||||
|
// yield to the event loop before continuing the process
|
||||||
|
// of coalescing the gap chain
|
||||||
|
return process.nextTick(coalesceGapChainIteration);
|
||||||
|
};
|
||||||
|
coalesceGapChainIteration();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Lookup the next gap that overlaps with [minKey, maxKey]. Internally chained
|
||||||
|
* gaps are coalesced in the response into a single contiguous large gap.
|
||||||
|
*
|
||||||
|
* @param {string} minKey - minimum key overlapping with the returned gap
|
||||||
|
* @param {string} [maxKey] - maximum key overlapping with the returned gap
|
||||||
|
* @return {Promise<GapSetEntry | null>} - result of the lookup if a gap
|
||||||
|
* was found, null otherwise, as a Promise
|
||||||
|
*/
|
||||||
|
async lookupGap(minKey: string, maxKey?: string): Promise<GapSetEntry | null> {
|
||||||
|
let firstGap: GapSetEntry | null = null;
|
||||||
|
const minGapIt = this._gaps.reverseLowerBound(<GapSetEntry>{ firstKey: minKey });
|
||||||
|
const minGap = minGapIt.isAccessible() && minGapIt.pointer;
|
||||||
|
if (minGap && minGap.lastKey >= minKey) {
|
||||||
|
firstGap = minGap;
|
||||||
|
} else {
|
||||||
|
const maxGapIt = this._gaps.upperBound(<GapSetEntry>{ firstKey: minKey });
|
||||||
|
const maxGap = maxGapIt.isAccessible() && maxGapIt.pointer;
|
||||||
|
if (maxGap && (maxKey === undefined || maxGap.firstKey <= maxKey)) {
|
||||||
|
firstGap = maxGap;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (!firstGap) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
return this._coalesceGapChain(firstGap);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the maximum weight setting for individual gaps.
|
||||||
|
*
|
||||||
|
* @return {number} - maximum weight of individual gaps
|
||||||
|
*/
|
||||||
|
get maxWeight(): number {
|
||||||
|
return this._maxWeight;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set the maximum weight setting for individual gaps.
|
||||||
|
*
|
||||||
|
* @param {number} gapWeight - maximum weight of individual gaps
|
||||||
|
*/
|
||||||
|
set maxWeight(gapWeight: number) {
|
||||||
|
this._maxWeight = gapWeight;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the number of gaps stored in this set.
|
||||||
|
*
|
||||||
|
* @return {number} - number of gaps stored in this set
|
||||||
|
*/
|
||||||
|
get size(): number {
|
||||||
|
return this._gaps.size();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Iterate over each gap of the set, ordered by first key
|
||||||
|
*
|
||||||
|
* @return {Iterator<GapSetEntry>} - an iterator over all gaps
|
||||||
|
* Example:
|
||||||
|
* for (const gap of myGapSet) { ... }
|
||||||
|
*/
|
||||||
|
[Symbol.iterator](): Iterator<GapSetEntry> {
|
||||||
|
return this._gaps[Symbol.iterator]();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return an array containing all gaps, ordered by first key
|
||||||
|
*
|
||||||
|
* NOTE: there is a toArray() method in the OrderedSet implementation
|
||||||
|
* but it does not scale well and overflows the stack quickly. This is
|
||||||
|
* why we provide an implementation based on an iterator.
|
||||||
|
*
|
||||||
|
* @return {GapSetEntry[]} - an array containing all gaps
|
||||||
|
*/
|
||||||
|
toArray(): GapSetEntry[] {
|
||||||
|
return [...this];
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,124 @@
|
||||||
|
export enum HeapOrder {
|
||||||
|
Min = -1,
|
||||||
|
Max = 1,
|
||||||
|
}
|
||||||
|
|
||||||
|
export enum CompareResult {
|
||||||
|
LT = -1,
|
||||||
|
EQ = 0,
|
||||||
|
GT = 1,
|
||||||
|
}
|
||||||
|
|
||||||
|
export type CompareFunction = (x: any, y: any) => CompareResult;
|
||||||
|
|
||||||
|
export class Heap {
|
||||||
|
size: number;
|
||||||
|
_maxSize: number;
|
||||||
|
_order: HeapOrder;
|
||||||
|
_heap: any[];
|
||||||
|
_cmpFn: CompareFunction;
|
||||||
|
|
||||||
|
constructor(size: number, order: HeapOrder, cmpFn: CompareFunction) {
|
||||||
|
this.size = 0;
|
||||||
|
this._maxSize = size;
|
||||||
|
this._order = order;
|
||||||
|
this._cmpFn = cmpFn;
|
||||||
|
this._heap = new Array<any>(this._maxSize);
|
||||||
|
}
|
||||||
|
|
||||||
|
_parent(i: number): number {
|
||||||
|
return Math.floor((i - 1) / 2);
|
||||||
|
}
|
||||||
|
|
||||||
|
_left(i: number): number {
|
||||||
|
return Math.floor((2 * i) + 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
_right(i: number): number {
|
||||||
|
return Math.floor((2 * i) + 2);
|
||||||
|
}
|
||||||
|
|
||||||
|
_shouldSwap(childIdx: number, parentIdx: number): boolean {
|
||||||
|
return this._cmpFn(this._heap[childIdx], this._heap[parentIdx]) as number === this._order as number;
|
||||||
|
}
|
||||||
|
|
||||||
|
_swap(i: number, j: number) {
|
||||||
|
const tmp = this._heap[i];
|
||||||
|
this._heap[i] = this._heap[j];
|
||||||
|
this._heap[j] = tmp;
|
||||||
|
}
|
||||||
|
|
||||||
|
_heapify(i: number) {
|
||||||
|
const l = this._left(i);
|
||||||
|
const r = this._right(i);
|
||||||
|
let c = i;
|
||||||
|
|
||||||
|
if (l < this.size && this._shouldSwap(l, c)) {
|
||||||
|
c = l;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (r < this.size && this._shouldSwap(r, c)) {
|
||||||
|
c = r;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (c != i) {
|
||||||
|
this._swap(c, i);
|
||||||
|
this._heapify(c);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
add(item: any): any {
|
||||||
|
if (this.size >= this._maxSize) {
|
||||||
|
return new Error('Max heap size reached');
|
||||||
|
}
|
||||||
|
|
||||||
|
++this.size;
|
||||||
|
let c = this.size - 1;
|
||||||
|
this._heap[c] = item;
|
||||||
|
|
||||||
|
while (c > 0) {
|
||||||
|
if (!this._shouldSwap(c, this._parent(c))) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
this._swap(c, this._parent(c));
|
||||||
|
c = this._parent(c);
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
};
|
||||||
|
|
||||||
|
remove(): any {
|
||||||
|
if (this.size <= 0) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const ret = this._heap[0];
|
||||||
|
this._heap[0] = this._heap[this.size - 1];
|
||||||
|
this._heapify(0);
|
||||||
|
--this.size;
|
||||||
|
|
||||||
|
return ret;
|
||||||
|
};
|
||||||
|
|
||||||
|
peek(): any {
|
||||||
|
if (this.size <= 0) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return this._heap[0];
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export class MinHeap extends Heap {
|
||||||
|
constructor(size: number, cmpFn: CompareFunction) {
|
||||||
|
super(size, HeapOrder.Min, cmpFn);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export class MaxHeap extends Heap {
|
||||||
|
constructor(size: number, cmpFn: CompareFunction) {
|
||||||
|
super(size, HeapOrder.Max, cmpFn);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
'use strict'; // eslint-disable-line strict
|
||||||
|
|
||||||
const { FILTER_SKIP, SKIP_NONE } = require('./tools');
|
const { FILTER_ACCEPT, SKIP_NONE } = require('./tools');
|
||||||
|
|
||||||
// Use a heuristic to amortize the cost of JSON
|
// Use a heuristic to amortize the cost of JSON
|
||||||
// serialization/deserialization only on largest metadata where the
|
// serialization/deserialization only on largest metadata where the
|
||||||
|
@ -92,21 +92,26 @@ class Extension {
|
||||||
* @param {object} entry - a listing entry from metadata
|
* @param {object} entry - a listing entry from metadata
|
||||||
* expected format: { key, value }
|
* expected format: { key, value }
|
||||||
* @return {number} - result of filtering the entry:
|
* @return {number} - result of filtering the entry:
|
||||||
* > 0: entry is accepted and included in the result
|
* FILTER_ACCEPT: entry is accepted and may or not be included
|
||||||
* = 0: entry is accepted but not included (skipping)
|
* in the result
|
||||||
* < 0: entry is not accepted, listing should finish
|
* FILTER_SKIP: listing may skip directly (with "gte" param) to
|
||||||
|
* the key returned by the skipping() method
|
||||||
|
* FILTER_END: the results are complete, listing can be stopped
|
||||||
*/
|
*/
|
||||||
filter(entry) {
|
filter(/* entry: { key, value } */) {
|
||||||
return entry ? FILTER_SKIP : FILTER_SKIP;
|
return FILTER_ACCEPT;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Provides the insight into why filter is skipping an entry. This could be
|
* Provides the next key at which the listing task is allowed to skip to.
|
||||||
* because it is skipping a range of delimited keys or a range of specific
|
* This could allow to skip over:
|
||||||
* version when doing master version listing.
|
* - a key prefix ending with the delimiter
|
||||||
|
* - all remaining versions of an object when doing a current
|
||||||
|
* versions listing in v0 format
|
||||||
|
* - a cached "gap" of deleted objects when doing a current
|
||||||
|
* versions listing in v0 format
|
||||||
*
|
*
|
||||||
* @return {string} - the insight: a common prefix or a master key,
|
* @return {string} - the next key at which the listing task is allowed to skip to
|
||||||
* or SKIP_NONE if there is no insight
|
|
||||||
*/
|
*/
|
||||||
skipping() {
|
skipping() {
|
||||||
return SKIP_NONE;
|
return SKIP_NONE;
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
'use strict'; // eslint-disable-line strict
|
||||||
|
|
||||||
const { inc, checkLimit, listingParamsMasterKeysV0ToV1,
|
const { inc, checkLimit, listingParamsMasterKeysV0ToV1,
|
||||||
FILTER_END, FILTER_ACCEPT } = require('./tools');
|
FILTER_END, FILTER_ACCEPT, SKIP_NONE } = require('./tools');
|
||||||
const DEFAULT_MAX_KEYS = 1000;
|
const DEFAULT_MAX_KEYS = 1000;
|
||||||
const VSConst = require('../../versioning/constants').VersioningConstants;
|
const VSConst = require('../../versioning/constants').VersioningConstants;
|
||||||
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
|
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
|
||||||
|
@ -163,7 +163,7 @@ class MultipartUploads {
|
||||||
}
|
}
|
||||||
|
|
||||||
skipping() {
|
skipping() {
|
||||||
return '';
|
return SKIP_NONE;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
const Extension = require('./Extension').default;
|
const Extension = require('./Extension').default;
|
||||||
|
|
||||||
const { checkLimit, FILTER_END, FILTER_ACCEPT, FILTER_SKIP } = require('./tools');
|
const { checkLimit, FILTER_END, FILTER_ACCEPT } = require('./tools');
|
||||||
const DEFAULT_MAX_KEYS = 10000;
|
const DEFAULT_MAX_KEYS = 10000;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -91,7 +91,7 @@ class List extends Extension {
|
||||||
* < 0 : listing done
|
* < 0 : listing done
|
||||||
*/
|
*/
|
||||||
filter(elem) {
|
filter(elem) {
|
||||||
// Check first in case of maxkeys <= 0
|
// Check if the result array is full
|
||||||
if (this.keys >= this.maxKeys) {
|
if (this.keys >= this.maxKeys) {
|
||||||
return FILTER_END;
|
return FILTER_END;
|
||||||
}
|
}
|
||||||
|
@ -99,7 +99,7 @@ class List extends Extension {
|
||||||
this.filterKeyStartsWith !== undefined) &&
|
this.filterKeyStartsWith !== undefined) &&
|
||||||
typeof elem === 'object' &&
|
typeof elem === 'object' &&
|
||||||
!this.customFilter(elem.value)) {
|
!this.customFilter(elem.value)) {
|
||||||
return FILTER_SKIP;
|
return FILTER_ACCEPT;
|
||||||
}
|
}
|
||||||
if (typeof elem === 'object') {
|
if (typeof elem === 'object') {
|
||||||
this.res.push({
|
this.res.push({
|
||||||
|
|
|
@ -1,274 +0,0 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
|
||||||
|
|
||||||
const Extension = require('./Extension').default;
|
|
||||||
const { inc, listingParamsMasterKeysV0ToV1,
|
|
||||||
FILTER_END, FILTER_ACCEPT, FILTER_SKIP } = require('./tools');
|
|
||||||
const VSConst = require('../../versioning/constants').VersioningConstants;
|
|
||||||
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Find the common prefix in the path
|
|
||||||
*
|
|
||||||
* @param {String} key - path of the object
|
|
||||||
* @param {String} delimiter - separator
|
|
||||||
* @param {Number} delimiterIndex - 'folder' index in the path
|
|
||||||
* @return {String} - CommonPrefix
|
|
||||||
*/
|
|
||||||
function getCommonPrefix(key, delimiter, delimiterIndex) {
|
|
||||||
return key.substring(0, delimiterIndex + delimiter.length);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Handle object listing with parameters
|
|
||||||
*
|
|
||||||
* @prop {String[]} CommonPrefixes - 'folders' defined by the delimiter
|
|
||||||
* @prop {String[]} Contents - 'files' to list
|
|
||||||
* @prop {Boolean} IsTruncated - truncated listing flag
|
|
||||||
* @prop {String|undefined} NextMarker - marker per amazon format
|
|
||||||
* @prop {Number} keys - count of listed keys
|
|
||||||
* @prop {String|undefined} delimiter - separator per amazon format
|
|
||||||
* @prop {String|undefined} prefix - prefix per amazon format
|
|
||||||
* @prop {Number} maxKeys - number of keys to list
|
|
||||||
*/
|
|
||||||
class Delimiter extends Extension {
|
|
||||||
/**
|
|
||||||
* Create a new Delimiter instance
|
|
||||||
* @constructor
|
|
||||||
* @param {Object} parameters - listing parameters
|
|
||||||
* @param {String} [parameters.delimiter] - delimiter per amazon
|
|
||||||
* format
|
|
||||||
* @param {String} [parameters.prefix] - prefix per amazon
|
|
||||||
* format
|
|
||||||
* @param {String} [parameters.marker] - marker per amazon
|
|
||||||
* format
|
|
||||||
* @param {Number} [parameters.maxKeys] - number of keys to list
|
|
||||||
* @param {Boolean} [parameters.v2] - indicates whether v2
|
|
||||||
* format
|
|
||||||
* @param {String} [parameters.startAfter] - marker per amazon
|
|
||||||
* format
|
|
||||||
* @param {String} [parameters.continuationToken] - obfuscated amazon
|
|
||||||
* token
|
|
||||||
* @param {Boolean} [parameters.alphabeticalOrder] - Either the result is
|
|
||||||
* alphabetically ordered
|
|
||||||
* or not
|
|
||||||
* @param {RequestLogger} logger - The logger of the
|
|
||||||
* request
|
|
||||||
* @param {String} [vFormat] - versioning key format
|
|
||||||
*/
|
|
||||||
constructor(parameters, logger, vFormat) {
|
|
||||||
super(parameters, logger);
|
|
||||||
// original listing parameters
|
|
||||||
this.delimiter = parameters.delimiter;
|
|
||||||
this.prefix = parameters.prefix;
|
|
||||||
this.marker = parameters.marker;
|
|
||||||
this.maxKeys = parameters.maxKeys || 1000;
|
|
||||||
this.startAfter = parameters.startAfter;
|
|
||||||
this.continuationToken = parameters.continuationToken;
|
|
||||||
this.alphabeticalOrder =
|
|
||||||
typeof parameters.alphabeticalOrder !== 'undefined' ?
|
|
||||||
parameters.alphabeticalOrder : true;
|
|
||||||
|
|
||||||
this.vFormat = vFormat || BucketVersioningKeyFormat.v0;
|
|
||||||
// results
|
|
||||||
this.CommonPrefixes = [];
|
|
||||||
this.Contents = [];
|
|
||||||
this.IsTruncated = false;
|
|
||||||
this.NextMarker = parameters.marker;
|
|
||||||
this.NextContinuationToken =
|
|
||||||
parameters.continuationToken || parameters.startAfter;
|
|
||||||
|
|
||||||
this.startMarker = parameters.v2 ? 'startAfter' : 'marker';
|
|
||||||
this.continueMarker = parameters.v2 ? 'continuationToken' : 'marker';
|
|
||||||
this.nextContinueMarker = parameters.v2 ?
|
|
||||||
'NextContinuationToken' : 'NextMarker';
|
|
||||||
|
|
||||||
if (this.delimiter !== undefined &&
|
|
||||||
this[this.nextContinueMarker] !== undefined &&
|
|
||||||
this[this.nextContinueMarker].startsWith(this.prefix || '')) {
|
|
||||||
const nextDelimiterIndex =
|
|
||||||
this[this.nextContinueMarker].indexOf(this.delimiter,
|
|
||||||
this.prefix ? this.prefix.length : 0);
|
|
||||||
this[this.nextContinueMarker] =
|
|
||||||
this[this.nextContinueMarker].slice(0, nextDelimiterIndex +
|
|
||||||
this.delimiter.length);
|
|
||||||
}
|
|
||||||
|
|
||||||
Object.assign(this, {
|
|
||||||
[BucketVersioningKeyFormat.v0]: {
|
|
||||||
genMDParams: this.genMDParamsV0,
|
|
||||||
getObjectKey: this.getObjectKeyV0,
|
|
||||||
skipping: this.skippingV0,
|
|
||||||
},
|
|
||||||
[BucketVersioningKeyFormat.v1]: {
|
|
||||||
genMDParams: this.genMDParamsV1,
|
|
||||||
getObjectKey: this.getObjectKeyV1,
|
|
||||||
skipping: this.skippingV1,
|
|
||||||
},
|
|
||||||
}[this.vFormat]);
|
|
||||||
}
|
|
||||||
|
|
||||||
genMDParamsV0() {
|
|
||||||
const params = {};
|
|
||||||
if (this.prefix) {
|
|
||||||
params.gte = this.prefix;
|
|
||||||
params.lt = inc(this.prefix);
|
|
||||||
}
|
|
||||||
const startVal = this[this.continueMarker] || this[this.startMarker];
|
|
||||||
if (startVal) {
|
|
||||||
if (params.gte && params.gte > startVal) {
|
|
||||||
return params;
|
|
||||||
}
|
|
||||||
delete params.gte;
|
|
||||||
params.gt = startVal;
|
|
||||||
}
|
|
||||||
return params;
|
|
||||||
}
|
|
||||||
|
|
||||||
genMDParamsV1() {
|
|
||||||
const params = this.genMDParamsV0();
|
|
||||||
return listingParamsMasterKeysV0ToV1(params);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* check if the max keys count has been reached and set the
|
|
||||||
* final state of the result if it is the case
|
|
||||||
* @return {Boolean} - indicates if the iteration has to stop
|
|
||||||
*/
|
|
||||||
_reachedMaxKeys() {
|
|
||||||
if (this.keys >= this.maxKeys) {
|
|
||||||
// In cases of maxKeys <= 0 -> IsTruncated = false
|
|
||||||
this.IsTruncated = this.maxKeys > 0;
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Add a (key, value) tuple to the listing
|
|
||||||
* Set the NextMarker to the current key
|
|
||||||
* Increment the keys counter
|
|
||||||
* @param {String} key - The key to add
|
|
||||||
* @param {String} value - The value of the key
|
|
||||||
* @return {number} - indicates if iteration should continue
|
|
||||||
*/
|
|
||||||
addContents(key, value) {
|
|
||||||
if (this._reachedMaxKeys()) {
|
|
||||||
return FILTER_END;
|
|
||||||
}
|
|
||||||
this.Contents.push({ key, value: this.trimMetadata(value) });
|
|
||||||
this[this.nextContinueMarker] = key;
|
|
||||||
++this.keys;
|
|
||||||
return FILTER_ACCEPT;
|
|
||||||
}
|
|
||||||
|
|
||||||
getObjectKeyV0(obj) {
|
|
||||||
return obj.key;
|
|
||||||
}
|
|
||||||
|
|
||||||
getObjectKeyV1(obj) {
|
|
||||||
return obj.key.slice(DbPrefixes.Master.length);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Filter to apply on each iteration, based on:
|
|
||||||
* - prefix
|
|
||||||
* - delimiter
|
|
||||||
* - maxKeys
|
|
||||||
* The marker is being handled directly by levelDB
|
|
||||||
* @param {Object} obj - The key and value of the element
|
|
||||||
* @param {String} obj.key - The key of the element
|
|
||||||
* @param {String} obj.value - The value of the element
|
|
||||||
* @return {number} - indicates if iteration should continue
|
|
||||||
*/
|
|
||||||
filter(obj) {
|
|
||||||
const key = this.getObjectKey(obj);
|
|
||||||
const value = obj.value;
|
|
||||||
if ((this.prefix && !key.startsWith(this.prefix))
|
|
||||||
|| (this.alphabeticalOrder
|
|
||||||
&& typeof this[this.nextContinueMarker] === 'string'
|
|
||||||
&& key <= this[this.nextContinueMarker])) {
|
|
||||||
return FILTER_SKIP;
|
|
||||||
}
|
|
||||||
if (this.delimiter) {
|
|
||||||
const baseIndex = this.prefix ? this.prefix.length : 0;
|
|
||||||
const delimiterIndex = key.indexOf(this.delimiter, baseIndex);
|
|
||||||
if (delimiterIndex === -1) {
|
|
||||||
return this.addContents(key, value);
|
|
||||||
}
|
|
||||||
return this.addCommonPrefix(key, delimiterIndex);
|
|
||||||
}
|
|
||||||
return this.addContents(key, value);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Add a Common Prefix in the list
|
|
||||||
* @param {String} key - object name
|
|
||||||
* @param {Number} index - after prefix starting point
|
|
||||||
* @return {Boolean} - indicates if iteration should continue
|
|
||||||
*/
|
|
||||||
addCommonPrefix(key, index) {
|
|
||||||
const commonPrefix = getCommonPrefix(key, this.delimiter, index);
|
|
||||||
if (this.CommonPrefixes.indexOf(commonPrefix) === -1
|
|
||||||
&& this[this.nextContinueMarker] !== commonPrefix) {
|
|
||||||
if (this._reachedMaxKeys()) {
|
|
||||||
return FILTER_END;
|
|
||||||
}
|
|
||||||
this.CommonPrefixes.push(commonPrefix);
|
|
||||||
this[this.nextContinueMarker] = commonPrefix;
|
|
||||||
++this.keys;
|
|
||||||
return FILTER_ACCEPT;
|
|
||||||
}
|
|
||||||
return FILTER_SKIP;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* If repd happens to want to skip listing on a bucket in v0
|
|
||||||
* versioning key format, here is an idea.
|
|
||||||
*
|
|
||||||
* @return {string} - the present range (NextMarker) if repd believes
|
|
||||||
* that it's enough and should move on
|
|
||||||
*/
|
|
||||||
skippingV0() {
|
|
||||||
return this[this.nextContinueMarker];
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* If repd happens to want to skip listing on a bucket in v1
|
|
||||||
* versioning key format, here is an idea.
|
|
||||||
*
|
|
||||||
* @return {string} - the present range (NextMarker) if repd believes
|
|
||||||
* that it's enough and should move on
|
|
||||||
*/
|
|
||||||
skippingV1() {
|
|
||||||
return DbPrefixes.Master + this[this.nextContinueMarker];
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Return an object containing all mandatory fields to use once the
|
|
||||||
* iteration is done, doesn't show a NextMarker field if the output
|
|
||||||
* isn't truncated
|
|
||||||
* @return {Object} - following amazon format
|
|
||||||
*/
|
|
||||||
result() {
|
|
||||||
/* NextMarker is only provided when delimiter is used.
|
|
||||||
* specified in v1 listing documentation
|
|
||||||
* http://docs.aws.amazon.com/AmazonS3/latest/API/RESTBucketGET.html
|
|
||||||
*/
|
|
||||||
const result = {
|
|
||||||
CommonPrefixes: this.CommonPrefixes,
|
|
||||||
Contents: this.Contents,
|
|
||||||
IsTruncated: this.IsTruncated,
|
|
||||||
Delimiter: this.delimiter,
|
|
||||||
};
|
|
||||||
if (this.parameters.v2) {
|
|
||||||
result.NextContinuationToken = this.IsTruncated
|
|
||||||
? this.NextContinuationToken : undefined;
|
|
||||||
} else {
|
|
||||||
result.NextMarker = (this.IsTruncated && this.delimiter)
|
|
||||||
? this.NextMarker : undefined;
|
|
||||||
}
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = { Delimiter };
|
|
|
@ -0,0 +1,356 @@
|
||||||
|
'use strict'; // eslint-disable-line strict
|
||||||
|
|
||||||
|
const Extension = require('./Extension').default;
|
||||||
|
const { inc, listingParamsMasterKeysV0ToV1,
|
||||||
|
FILTER_END, FILTER_ACCEPT, FILTER_SKIP, SKIP_NONE } = require('./tools');
|
||||||
|
const VSConst = require('../../versioning/constants').VersioningConstants;
|
||||||
|
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
|
||||||
|
|
||||||
|
export interface FilterState {
|
||||||
|
id: number,
|
||||||
|
};
|
||||||
|
|
||||||
|
export interface FilterReturnValue {
|
||||||
|
FILTER_ACCEPT,
|
||||||
|
FILTER_SKIP,
|
||||||
|
FILTER_END,
|
||||||
|
};
|
||||||
|
|
||||||
|
export const enum DelimiterFilterStateId {
|
||||||
|
NotSkipping = 1,
|
||||||
|
SkippingPrefix = 2,
|
||||||
|
};
|
||||||
|
|
||||||
|
export interface DelimiterFilterState_NotSkipping extends FilterState {
|
||||||
|
id: DelimiterFilterStateId.NotSkipping,
|
||||||
|
};
|
||||||
|
|
||||||
|
export interface DelimiterFilterState_SkippingPrefix extends FilterState {
|
||||||
|
id: DelimiterFilterStateId.SkippingPrefix,
|
||||||
|
prefix: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
type KeyHandler = (key: string, value: string) => FilterReturnValue;
|
||||||
|
|
||||||
|
export type ResultObject = {
|
||||||
|
CommonPrefixes: string[];
|
||||||
|
Contents: {
|
||||||
|
key: string;
|
||||||
|
value: string;
|
||||||
|
}[];
|
||||||
|
IsTruncated: boolean;
|
||||||
|
Delimiter ?: string;
|
||||||
|
NextMarker ?: string;
|
||||||
|
NextContinuationToken ?: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle object listing with parameters
|
||||||
|
*
|
||||||
|
* @prop {String[]} CommonPrefixes - 'folders' defined by the delimiter
|
||||||
|
* @prop {String[]} Contents - 'files' to list
|
||||||
|
* @prop {Boolean} IsTruncated - truncated listing flag
|
||||||
|
* @prop {String|undefined} NextMarker - marker per amazon format
|
||||||
|
* @prop {Number} keys - count of listed keys
|
||||||
|
* @prop {String|undefined} delimiter - separator per amazon format
|
||||||
|
* @prop {String|undefined} prefix - prefix per amazon format
|
||||||
|
* @prop {Number} maxKeys - number of keys to list
|
||||||
|
*/
|
||||||
|
export class Delimiter extends Extension {
|
||||||
|
|
||||||
|
state: FilterState;
|
||||||
|
keyHandlers: { [id: number]: KeyHandler };
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new Delimiter instance
|
||||||
|
* @constructor
|
||||||
|
* @param {Object} parameters - listing parameters
|
||||||
|
* @param {String} [parameters.delimiter] - delimiter per amazon
|
||||||
|
* format
|
||||||
|
* @param {String} [parameters.prefix] - prefix per amazon
|
||||||
|
* format
|
||||||
|
* @param {String} [parameters.marker] - marker per amazon
|
||||||
|
* format
|
||||||
|
* @param {Number} [parameters.maxKeys] - number of keys to list
|
||||||
|
* @param {Boolean} [parameters.v2] - indicates whether v2
|
||||||
|
* format
|
||||||
|
* @param {String} [parameters.startAfter] - marker per amazon
|
||||||
|
* format
|
||||||
|
* @param {String} [parameters.continuationToken] - obfuscated amazon
|
||||||
|
* token
|
||||||
|
* @param {RequestLogger} logger - The logger of the
|
||||||
|
* request
|
||||||
|
* @param {String} [vFormat] - versioning key format
|
||||||
|
*/
|
||||||
|
constructor(parameters, logger, vFormat) {
|
||||||
|
super(parameters, logger);
|
||||||
|
// original listing parameters
|
||||||
|
this.delimiter = parameters.delimiter;
|
||||||
|
this.prefix = parameters.prefix;
|
||||||
|
this.maxKeys = parameters.maxKeys || 1000;
|
||||||
|
|
||||||
|
if (parameters.v2) {
|
||||||
|
this.marker = parameters.continuationToken || parameters.startAfter;
|
||||||
|
} else {
|
||||||
|
this.marker = parameters.marker;
|
||||||
|
}
|
||||||
|
this.nextMarker = this.marker;
|
||||||
|
|
||||||
|
this.vFormat = vFormat || BucketVersioningKeyFormat.v0;
|
||||||
|
// results
|
||||||
|
this.CommonPrefixes = [];
|
||||||
|
this.Contents = [];
|
||||||
|
this.IsTruncated = false;
|
||||||
|
this.keyHandlers = {};
|
||||||
|
|
||||||
|
Object.assign(this, {
|
||||||
|
[BucketVersioningKeyFormat.v0]: {
|
||||||
|
genMDParams: this.genMDParamsV0,
|
||||||
|
getObjectKey: this.getObjectKeyV0,
|
||||||
|
skipping: this.skippingV0,
|
||||||
|
},
|
||||||
|
[BucketVersioningKeyFormat.v1]: {
|
||||||
|
genMDParams: this.genMDParamsV1,
|
||||||
|
getObjectKey: this.getObjectKeyV1,
|
||||||
|
skipping: this.skippingV1,
|
||||||
|
},
|
||||||
|
}[this.vFormat]);
|
||||||
|
|
||||||
|
// if there is a delimiter, we may skip ranges by prefix,
|
||||||
|
// hence using the NotSkippingPrefix flavor that checks the
|
||||||
|
// subprefix up to the delimiter for the NotSkipping state
|
||||||
|
if (this.delimiter) {
|
||||||
|
this.setKeyHandler(
|
||||||
|
DelimiterFilterStateId.NotSkipping,
|
||||||
|
this.keyHandler_NotSkippingPrefix.bind(this));
|
||||||
|
} else {
|
||||||
|
// listing without a delimiter never has to skip over any
|
||||||
|
// prefix -> use NeverSkipping flavor for the NotSkipping
|
||||||
|
// state
|
||||||
|
this.setKeyHandler(
|
||||||
|
DelimiterFilterStateId.NotSkipping,
|
||||||
|
this.keyHandler_NeverSkipping.bind(this));
|
||||||
|
}
|
||||||
|
this.setKeyHandler(
|
||||||
|
DelimiterFilterStateId.SkippingPrefix,
|
||||||
|
this.keyHandler_SkippingPrefix.bind(this));
|
||||||
|
|
||||||
|
this.state = <DelimiterFilterState_NotSkipping> {
|
||||||
|
id: DelimiterFilterStateId.NotSkipping,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
genMDParamsV0() {
|
||||||
|
const params: { gt ?: string, gte ?: string, lt ?: string } = {};
|
||||||
|
if (this.prefix) {
|
||||||
|
params.gte = this.prefix;
|
||||||
|
params.lt = inc(this.prefix);
|
||||||
|
}
|
||||||
|
if (this.marker && this.delimiter) {
|
||||||
|
const commonPrefix = this.getCommonPrefix(this.marker);
|
||||||
|
if (commonPrefix) {
|
||||||
|
const afterPrefix = inc(commonPrefix);
|
||||||
|
if (!params.gte || afterPrefix > params.gte) {
|
||||||
|
params.gte = afterPrefix;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (this.marker && (!params.gte || this.marker >= params.gte)) {
|
||||||
|
delete params.gte;
|
||||||
|
params.gt = this.marker;
|
||||||
|
}
|
||||||
|
return params;
|
||||||
|
}
|
||||||
|
|
||||||
|
genMDParamsV1() {
|
||||||
|
const params = this.genMDParamsV0();
|
||||||
|
return listingParamsMasterKeysV0ToV1(params);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* check if the max keys count has been reached and set the
|
||||||
|
* final state of the result if it is the case
|
||||||
|
* @return {Boolean} - indicates if the iteration has to stop
|
||||||
|
*/
|
||||||
|
_reachedMaxKeys(): boolean {
|
||||||
|
if (this.keys >= this.maxKeys) {
|
||||||
|
// In cases of maxKeys <= 0 -> IsTruncated = false
|
||||||
|
this.IsTruncated = this.maxKeys > 0;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add a (key, value) tuple to the listing
|
||||||
|
* Set the NextMarker to the current key
|
||||||
|
* Increment the keys counter
|
||||||
|
* @param {String} key - The key to add
|
||||||
|
* @param {String} value - The value of the key
|
||||||
|
* @return {number} - indicates if iteration should continue
|
||||||
|
*/
|
||||||
|
addContents(key: string, value: string): void {
|
||||||
|
this.Contents.push({ key, value: this.trimMetadata(value) });
|
||||||
|
++this.keys;
|
||||||
|
this.nextMarker = key;
|
||||||
|
}
|
||||||
|
|
||||||
|
getCommonPrefix(key: string): string | undefined {
|
||||||
|
if (!this.delimiter) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
const baseIndex = this.prefix ? this.prefix.length : 0;
|
||||||
|
const delimiterIndex = key.indexOf(this.delimiter, baseIndex);
|
||||||
|
if (delimiterIndex === -1) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
return key.substring(0, delimiterIndex + this.delimiter.length);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add a Common Prefix in the list
|
||||||
|
* @param {String} commonPrefix - common prefix to add
|
||||||
|
* @param {String} key - full key starting with commonPrefix
|
||||||
|
* @return {Boolean} - indicates if iteration should continue
|
||||||
|
*/
|
||||||
|
addCommonPrefix(commonPrefix: string, key: string): void {
|
||||||
|
// add the new prefix to the list
|
||||||
|
this.CommonPrefixes.push(commonPrefix);
|
||||||
|
++this.keys;
|
||||||
|
this.nextMarker = commonPrefix;
|
||||||
|
}
|
||||||
|
|
||||||
|
addCommonPrefixOrContents(key: string, value: string): string | undefined {
|
||||||
|
// add the subprefix to the common prefixes if the key has the delimiter
|
||||||
|
const commonPrefix = this.getCommonPrefix(key);
|
||||||
|
if (commonPrefix) {
|
||||||
|
this.addCommonPrefix(commonPrefix, key);
|
||||||
|
return commonPrefix;
|
||||||
|
}
|
||||||
|
this.addContents(key, value);
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
getObjectKeyV0(obj: { key: string }): string {
|
||||||
|
return obj.key;
|
||||||
|
}
|
||||||
|
|
||||||
|
getObjectKeyV1(obj: { key: string }): string {
|
||||||
|
return obj.key.slice(DbPrefixes.Master.length);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Filter to apply on each iteration, based on:
|
||||||
|
* - prefix
|
||||||
|
* - delimiter
|
||||||
|
* - maxKeys
|
||||||
|
* The marker is being handled directly by levelDB
|
||||||
|
* @param {Object} obj - The key and value of the element
|
||||||
|
* @param {String} obj.key - The key of the element
|
||||||
|
* @param {String} obj.value - The value of the element
|
||||||
|
* @return {number} - indicates if iteration should continue
|
||||||
|
*/
|
||||||
|
filter(obj: { key: string, value: string }): FilterReturnValue {
|
||||||
|
const key = this.getObjectKey(obj);
|
||||||
|
const value = obj.value;
|
||||||
|
|
||||||
|
return this.handleKey(key, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
setState(state: FilterState): void {
|
||||||
|
this.state = state;
|
||||||
|
}
|
||||||
|
|
||||||
|
setKeyHandler(stateId: number, keyHandler: KeyHandler): void {
|
||||||
|
this.keyHandlers[stateId] = keyHandler;
|
||||||
|
}
|
||||||
|
|
||||||
|
handleKey(key: string, value: string): FilterReturnValue {
|
||||||
|
return this.keyHandlers[this.state.id](key, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
keyHandler_NeverSkipping(key: string, value: string): FilterReturnValue {
|
||||||
|
if (this._reachedMaxKeys()) {
|
||||||
|
return FILTER_END;
|
||||||
|
}
|
||||||
|
this.addContents(key, value);
|
||||||
|
return FILTER_ACCEPT;
|
||||||
|
}
|
||||||
|
|
||||||
|
keyHandler_NotSkippingPrefix(key: string, value: string): FilterReturnValue {
|
||||||
|
if (this._reachedMaxKeys()) {
|
||||||
|
return FILTER_END;
|
||||||
|
}
|
||||||
|
const commonPrefix = this.addCommonPrefixOrContents(key, value);
|
||||||
|
if (commonPrefix) {
|
||||||
|
// transition into SkippingPrefix state to skip all following keys
|
||||||
|
// while they start with the same prefix
|
||||||
|
this.setState(<DelimiterFilterState_SkippingPrefix> {
|
||||||
|
id: DelimiterFilterStateId.SkippingPrefix,
|
||||||
|
prefix: commonPrefix,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return FILTER_ACCEPT;
|
||||||
|
}
|
||||||
|
|
||||||
|
keyHandler_SkippingPrefix(key: string, value: string): FilterReturnValue {
|
||||||
|
const { prefix } = <DelimiterFilterState_SkippingPrefix> this.state;
|
||||||
|
if (key.startsWith(prefix)) {
|
||||||
|
return FILTER_SKIP;
|
||||||
|
}
|
||||||
|
this.setState(<DelimiterFilterState_NotSkipping> {
|
||||||
|
id: DelimiterFilterStateId.NotSkipping,
|
||||||
|
});
|
||||||
|
return this.handleKey(key, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
skippingBase(): string | undefined {
|
||||||
|
switch (this.state.id) {
|
||||||
|
case DelimiterFilterStateId.SkippingPrefix:
|
||||||
|
const { prefix } = <DelimiterFilterState_SkippingPrefix> this.state;
|
||||||
|
return inc(prefix);
|
||||||
|
|
||||||
|
default:
|
||||||
|
return SKIP_NONE;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
skippingV0() {
|
||||||
|
return this.skippingBase();
|
||||||
|
}
|
||||||
|
|
||||||
|
skippingV1() {
|
||||||
|
const skipTo = this.skippingBase();
|
||||||
|
if (skipTo === SKIP_NONE) {
|
||||||
|
return SKIP_NONE;
|
||||||
|
}
|
||||||
|
return DbPrefixes.Master + skipTo;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return an object containing all mandatory fields to use once the
|
||||||
|
* iteration is done, doesn't show a NextMarker field if the output
|
||||||
|
* isn't truncated
|
||||||
|
* @return {Object} - following amazon format
|
||||||
|
*/
|
||||||
|
result(): ResultObject {
|
||||||
|
/* NextMarker is only provided when delimiter is used.
|
||||||
|
* specified in v1 listing documentation
|
||||||
|
* http://docs.aws.amazon.com/AmazonS3/latest/API/RESTBucketGET.html
|
||||||
|
*/
|
||||||
|
const result: ResultObject = {
|
||||||
|
CommonPrefixes: this.CommonPrefixes,
|
||||||
|
Contents: this.Contents,
|
||||||
|
IsTruncated: this.IsTruncated,
|
||||||
|
Delimiter: this.delimiter,
|
||||||
|
};
|
||||||
|
if (this.parameters.v2) {
|
||||||
|
result.NextContinuationToken = this.IsTruncated
|
||||||
|
? this.nextMarker : undefined;
|
||||||
|
} else {
|
||||||
|
result.NextMarker = (this.IsTruncated && this.delimiter)
|
||||||
|
? this.nextMarker : undefined;
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,127 @@
|
||||||
|
const { DelimiterMaster } = require('./delimiterMaster');
|
||||||
|
const { FILTER_ACCEPT, FILTER_END } = require('./tools');
|
||||||
|
|
||||||
|
type ResultObject = {
|
||||||
|
Contents: {
|
||||||
|
key: string;
|
||||||
|
value: string;
|
||||||
|
}[];
|
||||||
|
IsTruncated: boolean;
|
||||||
|
NextMarker ?: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle object listing with parameters. This extends the base class DelimiterMaster
|
||||||
|
* to return the master/current versions.
|
||||||
|
*/
|
||||||
|
class DelimiterCurrent extends DelimiterMaster {
|
||||||
|
/**
|
||||||
|
* Delimiter listing of current versions.
|
||||||
|
* @param {Object} parameters - listing parameters
|
||||||
|
* @param {String} parameters.beforeDate - limit the response to keys older than beforeDate
|
||||||
|
* @param {String} parameters.excludedDataStoreName - excluded datatore name
|
||||||
|
* @param {Number} parameters.maxScannedLifecycleListingEntries - max number of entries to be scanned
|
||||||
|
* @param {RequestLogger} logger - The logger of the request
|
||||||
|
* @param {String} [vFormat] - versioning key format
|
||||||
|
*/
|
||||||
|
constructor(parameters, logger, vFormat) {
|
||||||
|
super(parameters, logger, vFormat);
|
||||||
|
|
||||||
|
this.beforeDate = parameters.beforeDate;
|
||||||
|
this.excludedDataStoreName = parameters.excludedDataStoreName;
|
||||||
|
this.maxScannedLifecycleListingEntries = parameters.maxScannedLifecycleListingEntries;
|
||||||
|
this.scannedKeys = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
genMDParamsV0() {
|
||||||
|
const params = super.genMDParamsV0();
|
||||||
|
// lastModified and dataStoreName parameters are used by metadata that enables built-in filtering,
|
||||||
|
// a feature currently exclusive to MongoDB
|
||||||
|
if (this.beforeDate) {
|
||||||
|
params.lastModified = {
|
||||||
|
lt: this.beforeDate,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.excludedDataStoreName) {
|
||||||
|
params.dataStoreName = {
|
||||||
|
ne: this.excludedDataStoreName,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return params;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parses the stringified entry's value.
|
||||||
|
* @param s - sringified value
|
||||||
|
* @return - undefined if parsing fails, otherwise it contains the parsed value.
|
||||||
|
*/
|
||||||
|
_parse(s) {
|
||||||
|
let p;
|
||||||
|
try {
|
||||||
|
p = JSON.parse(s);
|
||||||
|
} catch (e: any) {
|
||||||
|
this.logger.warn(
|
||||||
|
'Could not parse Object Metadata while listing',
|
||||||
|
{ err: e.toString() });
|
||||||
|
}
|
||||||
|
return p;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* check if the max keys count has been reached and set the
|
||||||
|
* final state of the result if it is the case
|
||||||
|
*
|
||||||
|
* specialized implementation on DelimiterCurrent to also check
|
||||||
|
* the number of scanned keys
|
||||||
|
*
|
||||||
|
* @return {Boolean} - indicates if the iteration has to stop
|
||||||
|
*/
|
||||||
|
_reachedMaxKeys(): boolean {
|
||||||
|
if (this.maxScannedLifecycleListingEntries && this.scannedKeys >= this.maxScannedLifecycleListingEntries) {
|
||||||
|
this.IsTruncated = true;
|
||||||
|
this.logger.info('listing stopped due to reaching the maximum scanned entries limit',
|
||||||
|
{
|
||||||
|
maxScannedLifecycleListingEntries: this.maxScannedLifecycleListingEntries,
|
||||||
|
scannedKeys: this.scannedKeys,
|
||||||
|
});
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return super._reachedMaxKeys();
|
||||||
|
}
|
||||||
|
|
||||||
|
addContents(key, value) {
|
||||||
|
++this.scannedKeys;
|
||||||
|
const parsedValue = this._parse(value);
|
||||||
|
// if parsing fails, skip the key.
|
||||||
|
if (parsedValue) {
|
||||||
|
const lastModified = parsedValue['last-modified'];
|
||||||
|
const dataStoreName = parsedValue.dataStoreName;
|
||||||
|
// We then check if the current version is older than the "beforeDate" and
|
||||||
|
// "excludedDataStoreName" is not specified or if specified and the data store name is different.
|
||||||
|
if ((!this.beforeDate || (lastModified && lastModified < this.beforeDate)) &&
|
||||||
|
(!this.excludedDataStoreName || dataStoreName !== this.excludedDataStoreName)) {
|
||||||
|
super.addContents(key, value);
|
||||||
|
}
|
||||||
|
// In the event of a timeout occurring before any content is added,
|
||||||
|
// NextMarker is updated even if the object is not eligible.
|
||||||
|
// It minimizes the amount of data that the client needs to re-process if the request times out.
|
||||||
|
this.nextMarker = key;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
result(): object {
|
||||||
|
const result: ResultObject = {
|
||||||
|
Contents: this.Contents,
|
||||||
|
IsTruncated: this.IsTruncated,
|
||||||
|
};
|
||||||
|
|
||||||
|
if (this.IsTruncated) {
|
||||||
|
result.NextMarker = this.nextMarker;
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
module.exports = { DelimiterCurrent };
|
|
@ -1,196 +0,0 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
|
||||||
|
|
||||||
const Delimiter = require('./delimiter').Delimiter;
|
|
||||||
const Version = require('../../versioning/Version').Version;
|
|
||||||
const VSConst = require('../../versioning/constants').VersioningConstants;
|
|
||||||
const { BucketVersioningKeyFormat } = VSConst;
|
|
||||||
const { FILTER_ACCEPT, FILTER_SKIP, SKIP_NONE } = require('./tools');
|
|
||||||
|
|
||||||
const VID_SEP = VSConst.VersionId.Separator;
|
|
||||||
const { DbPrefixes } = VSConst;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Handle object listing with parameters. This extends the base class Delimiter
|
|
||||||
* to return the raw master versions of existing objects.
|
|
||||||
*/
|
|
||||||
class DelimiterMaster extends Delimiter {
|
|
||||||
/**
|
|
||||||
* Delimiter listing of master versions.
|
|
||||||
* @param {Object} parameters - listing parameters
|
|
||||||
* @param {String} parameters.delimiter - delimiter per amazon format
|
|
||||||
* @param {String} parameters.prefix - prefix per amazon format
|
|
||||||
* @param {String} parameters.marker - marker per amazon format
|
|
||||||
* @param {Number} parameters.maxKeys - number of keys to list
|
|
||||||
* @param {Boolean} parameters.v2 - indicates whether v2 format
|
|
||||||
* @param {String} parameters.startAfter - marker per amazon v2 format
|
|
||||||
* @param {String} parameters.continuationToken - obfuscated amazon token
|
|
||||||
* @param {RequestLogger} logger - The logger of the request
|
|
||||||
* @param {String} [vFormat] - versioning key format
|
|
||||||
*/
|
|
||||||
constructor(parameters, logger, vFormat) {
|
|
||||||
super(parameters, logger, vFormat);
|
|
||||||
// non-PHD master version or a version whose master is a PHD version
|
|
||||||
this.prvKey = undefined;
|
|
||||||
this.prvPHDKey = undefined;
|
|
||||||
this.inReplayPrefix = false;
|
|
||||||
|
|
||||||
Object.assign(this, {
|
|
||||||
[BucketVersioningKeyFormat.v0]: {
|
|
||||||
filter: this.filterV0,
|
|
||||||
skipping: this.skippingV0,
|
|
||||||
},
|
|
||||||
[BucketVersioningKeyFormat.v1]: {
|
|
||||||
filter: this.filterV1,
|
|
||||||
skipping: this.skippingV1,
|
|
||||||
},
|
|
||||||
}[this.vFormat]);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Filter to apply on each iteration for buckets in v0 format,
|
|
||||||
* based on:
|
|
||||||
* - prefix
|
|
||||||
* - delimiter
|
|
||||||
* - maxKeys
|
|
||||||
* The marker is being handled directly by levelDB
|
|
||||||
* @param {Object} obj - The key and value of the element
|
|
||||||
* @param {String} obj.key - The key of the element
|
|
||||||
* @param {String} obj.value - The value of the element
|
|
||||||
* @return {number} - indicates if iteration should continue
|
|
||||||
*/
|
|
||||||
filterV0(obj) {
|
|
||||||
let key = obj.key;
|
|
||||||
const value = obj.value;
|
|
||||||
|
|
||||||
if (key.startsWith(DbPrefixes.Replay)) {
|
|
||||||
this.inReplayPrefix = true;
|
|
||||||
return FILTER_SKIP;
|
|
||||||
}
|
|
||||||
this.inReplayPrefix = false;
|
|
||||||
|
|
||||||
/* Skip keys not starting with the prefix or not alphabetically
|
|
||||||
* ordered. */
|
|
||||||
if ((this.prefix && !key.startsWith(this.prefix))
|
|
||||||
|| (typeof this[this.nextContinueMarker] === 'string' &&
|
|
||||||
key <= this[this.nextContinueMarker])) {
|
|
||||||
return FILTER_SKIP;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Skip version keys (<key><versionIdSeparator><version>) if we already
|
|
||||||
* have a master version. */
|
|
||||||
const versionIdIndex = key.indexOf(VID_SEP);
|
|
||||||
if (versionIdIndex >= 0) {
|
|
||||||
key = key.slice(0, versionIdIndex);
|
|
||||||
/* - key === this.prvKey is triggered when a master version has
|
|
||||||
* been accepted for this key,
|
|
||||||
* - key === this.NextMarker or this.NextContinueToken is triggered
|
|
||||||
* when a listing page ends on an accepted obj and the next page
|
|
||||||
* starts with a version of this object.
|
|
||||||
* In that case prvKey is default set to undefined
|
|
||||||
* in the constructor and comparing to NextMarker is the only
|
|
||||||
* way to know we should not accept this version. This test is
|
|
||||||
* not redundant with the one at the beginning of this function,
|
|
||||||
* we are comparing here the key without the version suffix,
|
|
||||||
* - key startsWith the previous NextMarker happens because we set
|
|
||||||
* NextMarker to the common prefix instead of the whole key
|
|
||||||
* value. (TODO: remove this test once ZENKO-1048 is fixed)
|
|
||||||
* */
|
|
||||||
if (key === this.prvKey || key === this[this.nextContinueMarker] ||
|
|
||||||
(this.delimiter &&
|
|
||||||
key.startsWith(this[this.nextContinueMarker]))) {
|
|
||||||
/* master version already filtered */
|
|
||||||
return FILTER_SKIP;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (Version.isPHD(value)) {
|
|
||||||
/* master version is a PHD version, we want to wait for the next
|
|
||||||
* one:
|
|
||||||
* - Set the prvKey to undefined to not skip the next version,
|
|
||||||
* - return accept to avoid users to skip the next values in range
|
|
||||||
* (skip scan mechanism in metadata backend like Metadata or
|
|
||||||
* MongoClient). */
|
|
||||||
this.prvKey = undefined;
|
|
||||||
this.prvPHDKey = key;
|
|
||||||
return FILTER_ACCEPT;
|
|
||||||
}
|
|
||||||
if (Version.isDeleteMarker(value)) {
|
|
||||||
/* This entry is a deleteMarker which has not been filtered by the
|
|
||||||
* version test. Either :
|
|
||||||
* - it is a deleteMarker on the master version, we want to SKIP
|
|
||||||
* all the following entries with this key (no master version),
|
|
||||||
* - or a deleteMarker following a PHD (setting prvKey to undefined
|
|
||||||
* when an entry is a PHD avoids the skip on version for the
|
|
||||||
* next entry). In that case we expect the master version to
|
|
||||||
* follow. */
|
|
||||||
if (key === this.prvPHDKey) {
|
|
||||||
this.prvKey = undefined;
|
|
||||||
return FILTER_ACCEPT;
|
|
||||||
}
|
|
||||||
this.prvKey = key;
|
|
||||||
return FILTER_SKIP;
|
|
||||||
}
|
|
||||||
|
|
||||||
this.prvKey = key;
|
|
||||||
if (this.delimiter) {
|
|
||||||
// check if the key has the delimiter
|
|
||||||
const baseIndex = this.prefix ? this.prefix.length : 0;
|
|
||||||
const delimiterIndex = key.indexOf(this.delimiter, baseIndex);
|
|
||||||
if (delimiterIndex >= 0) {
|
|
||||||
// try to add the prefix to the list
|
|
||||||
return this.addCommonPrefix(key, delimiterIndex);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return this.addContents(key, value);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Filter to apply on each iteration for buckets in v1 format,
|
|
||||||
* based on:
|
|
||||||
* - prefix
|
|
||||||
* - delimiter
|
|
||||||
* - maxKeys
|
|
||||||
* The marker is being handled directly by levelDB
|
|
||||||
* @param {Object} obj - The key and value of the element
|
|
||||||
* @param {String} obj.key - The key of the element
|
|
||||||
* @param {String} obj.value - The value of the element
|
|
||||||
* @return {number} - indicates if iteration should continue
|
|
||||||
*/
|
|
||||||
filterV1(obj) {
|
|
||||||
// Filtering master keys in v1 is simply listing the master
|
|
||||||
// keys, as the state of version keys do not change the
|
|
||||||
// result, so we can use Delimiter method directly.
|
|
||||||
return super.filter(obj);
|
|
||||||
}
|
|
||||||
|
|
||||||
skippingBase() {
|
|
||||||
if (this[this.nextContinueMarker]) {
|
|
||||||
// next marker or next continuation token:
|
|
||||||
// - foo/ : skipping foo/
|
|
||||||
// - foo : skipping foo.
|
|
||||||
const index = this[this.nextContinueMarker].
|
|
||||||
lastIndexOf(this.delimiter);
|
|
||||||
if (index === this[this.nextContinueMarker].length - 1) {
|
|
||||||
return this[this.nextContinueMarker];
|
|
||||||
}
|
|
||||||
return this[this.nextContinueMarker] + VID_SEP;
|
|
||||||
}
|
|
||||||
return SKIP_NONE;
|
|
||||||
}
|
|
||||||
|
|
||||||
skippingV0() {
|
|
||||||
if (this.inReplayPrefix) {
|
|
||||||
return DbPrefixes.Replay;
|
|
||||||
}
|
|
||||||
return this.skippingBase();
|
|
||||||
}
|
|
||||||
|
|
||||||
skippingV1() {
|
|
||||||
const skipTo = this.skippingBase();
|
|
||||||
if (skipTo === SKIP_NONE) {
|
|
||||||
return SKIP_NONE;
|
|
||||||
}
|
|
||||||
return DbPrefixes.Master + skipTo;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = { DelimiterMaster };
|
|
|
@ -0,0 +1,620 @@
|
||||||
|
import {
|
||||||
|
Delimiter,
|
||||||
|
FilterState,
|
||||||
|
FilterReturnValue,
|
||||||
|
DelimiterFilterStateId,
|
||||||
|
DelimiterFilterState_NotSkipping,
|
||||||
|
DelimiterFilterState_SkippingPrefix,
|
||||||
|
ResultObject,
|
||||||
|
} from './delimiter';
|
||||||
|
const Version = require('../../versioning/Version').Version;
|
||||||
|
const VSConst = require('../../versioning/constants').VersioningConstants;
|
||||||
|
const { BucketVersioningKeyFormat } = VSConst;
|
||||||
|
const { FILTER_ACCEPT, FILTER_SKIP, FILTER_END, SKIP_NONE, inc } = require('./tools');
|
||||||
|
|
||||||
|
import { GapSetEntry } from '../cache/GapSet';
|
||||||
|
import { GapCacheInterface } from '../cache/GapCache';
|
||||||
|
|
||||||
|
const VID_SEP = VSConst.VersionId.Separator;
|
||||||
|
const { DbPrefixes } = VSConst;
|
||||||
|
|
||||||
|
export const enum DelimiterMasterFilterStateId {
|
||||||
|
SkippingVersionsV0 = 101,
|
||||||
|
WaitVersionAfterPHDV0 = 102,
|
||||||
|
SkippingGapV0 = 103,
|
||||||
|
};
|
||||||
|
|
||||||
|
interface DelimiterMasterFilterState_SkippingVersionsV0 extends FilterState {
|
||||||
|
id: DelimiterMasterFilterStateId.SkippingVersionsV0,
|
||||||
|
masterKey: string,
|
||||||
|
};
|
||||||
|
|
||||||
|
interface DelimiterMasterFilterState_WaitVersionAfterPHDV0 extends FilterState {
|
||||||
|
id: DelimiterMasterFilterStateId.WaitVersionAfterPHDV0,
|
||||||
|
masterKey: string,
|
||||||
|
};
|
||||||
|
|
||||||
|
interface DelimiterMasterFilterState_SkippingGapV0 extends FilterState {
|
||||||
|
id: DelimiterMasterFilterStateId.SkippingGapV0,
|
||||||
|
};
|
||||||
|
|
||||||
|
export const enum GapCachingState {
|
||||||
|
NoGapCache = 0, // there is no gap cache
|
||||||
|
UnknownGap = 1, // waiting for a cache lookup
|
||||||
|
GapLookupInProgress = 2, // asynchronous gap lookup in progress
|
||||||
|
GapCached = 3, // an upcoming or already skippable gap is cached
|
||||||
|
NoMoreGap = 4, // the cache doesn't have any more gaps inside the listed range
|
||||||
|
};
|
||||||
|
|
||||||
|
type GapCachingInfo_NoGapCache = {
|
||||||
|
state: GapCachingState.NoGapCache;
|
||||||
|
};
|
||||||
|
|
||||||
|
type GapCachingInfo_NoCachedGap = {
|
||||||
|
state: GapCachingState.UnknownGap
|
||||||
|
| GapCachingState.GapLookupInProgress
|
||||||
|
gapCache: GapCacheInterface;
|
||||||
|
};
|
||||||
|
|
||||||
|
type GapCachingInfo_GapCached = {
|
||||||
|
state: GapCachingState.GapCached;
|
||||||
|
gapCache: GapCacheInterface;
|
||||||
|
gapCached: GapSetEntry;
|
||||||
|
};
|
||||||
|
|
||||||
|
type GapCachingInfo_NoMoreGap = {
|
||||||
|
state: GapCachingState.NoMoreGap;
|
||||||
|
};
|
||||||
|
|
||||||
|
type GapCachingInfo = GapCachingInfo_NoGapCache
|
||||||
|
| GapCachingInfo_NoCachedGap
|
||||||
|
| GapCachingInfo_GapCached
|
||||||
|
| GapCachingInfo_NoMoreGap;
|
||||||
|
|
||||||
|
|
||||||
|
export const enum GapBuildingState {
|
||||||
|
Disabled = 0, // no gap cache or no gap building needed (e.g. in V1 versioning format)
|
||||||
|
NotBuilding = 1, // not currently building a gap (i.e. not listing within a gap)
|
||||||
|
Building = 2, // currently building a gap (i.e. listing within a gap)
|
||||||
|
Expired = 3, // not allowed to build due to exposure delay timeout
|
||||||
|
};
|
||||||
|
|
||||||
|
type GapBuildingInfo_NothingToBuild = {
|
||||||
|
state: GapBuildingState.Disabled | GapBuildingState.Expired;
|
||||||
|
};
|
||||||
|
|
||||||
|
type GapBuildingParams = {
|
||||||
|
/**
|
||||||
|
* minimum weight for a gap to be created in the cache
|
||||||
|
*/
|
||||||
|
minGapWeight: number;
|
||||||
|
/**
|
||||||
|
* trigger a cache setGap() call every N skippable keys
|
||||||
|
*/
|
||||||
|
triggerSaveGapWeight: number;
|
||||||
|
/**
|
||||||
|
* timestamp to assess whether we're still inside the validity period to
|
||||||
|
* be allowed to build gaps
|
||||||
|
*/
|
||||||
|
initTimestamp: number;
|
||||||
|
};
|
||||||
|
|
||||||
|
type GapBuildingInfo_NotBuilding = {
|
||||||
|
state: GapBuildingState.NotBuilding;
|
||||||
|
gapCache: GapCacheInterface;
|
||||||
|
params: GapBuildingParams;
|
||||||
|
};
|
||||||
|
|
||||||
|
type GapBuildingInfo_Building = {
|
||||||
|
state: GapBuildingState.Building;
|
||||||
|
gapCache: GapCacheInterface;
|
||||||
|
params: GapBuildingParams;
|
||||||
|
/**
|
||||||
|
* Gap currently being created
|
||||||
|
*/
|
||||||
|
gap: GapSetEntry;
|
||||||
|
/**
|
||||||
|
* total current weight of the gap being created
|
||||||
|
*/
|
||||||
|
gapWeight: number;
|
||||||
|
};
|
||||||
|
|
||||||
|
type GapBuildingInfo = GapBuildingInfo_NothingToBuild
|
||||||
|
| GapBuildingInfo_NotBuilding
|
||||||
|
| GapBuildingInfo_Building;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle object listing with parameters. This extends the base class Delimiter
|
||||||
|
* to return the raw master versions of existing objects.
|
||||||
|
*/
|
||||||
|
export class DelimiterMaster extends Delimiter {
|
||||||
|
|
||||||
|
_gapCaching: GapCachingInfo;
|
||||||
|
_gapBuilding: GapBuildingInfo;
|
||||||
|
_refreshedBuildingParams: GapBuildingParams | null;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Delimiter listing of master versions.
|
||||||
|
* @param {Object} parameters - listing parameters
|
||||||
|
* @param {String} [parameters.delimiter] - delimiter per amazon format
|
||||||
|
* @param {String} [parameters.prefix] - prefix per amazon format
|
||||||
|
* @param {String} [parameters.marker] - marker per amazon format
|
||||||
|
* @param {Number} [parameters.maxKeys] - number of keys to list
|
||||||
|
* @param {Boolean} [parameters.v2] - indicates whether v2 format
|
||||||
|
* @param {String} [parameters.startAfter] - marker per amazon v2 format
|
||||||
|
* @param {String} [parameters.continuationToken] - obfuscated amazon token
|
||||||
|
* @param {RequestLogger} logger - The logger of the request
|
||||||
|
* @param {String} [vFormat="v0"] - versioning key format
|
||||||
|
*/
|
||||||
|
constructor(parameters, logger, vFormat?: string) {
|
||||||
|
super(parameters, logger, vFormat);
|
||||||
|
|
||||||
|
if (this.vFormat === BucketVersioningKeyFormat.v0) {
|
||||||
|
// override Delimiter's implementation of NotSkipping for
|
||||||
|
// DelimiterMaster logic (skipping versions and special
|
||||||
|
// handling of delete markers and PHDs)
|
||||||
|
this.setKeyHandler(
|
||||||
|
DelimiterFilterStateId.NotSkipping,
|
||||||
|
this.keyHandler_NotSkippingPrefixNorVersionsV0.bind(this));
|
||||||
|
|
||||||
|
// add extra state handlers specific to DelimiterMaster with v0 format
|
||||||
|
this.setKeyHandler(
|
||||||
|
DelimiterMasterFilterStateId.SkippingVersionsV0,
|
||||||
|
this.keyHandler_SkippingVersionsV0.bind(this));
|
||||||
|
|
||||||
|
this.setKeyHandler(
|
||||||
|
DelimiterMasterFilterStateId.WaitVersionAfterPHDV0,
|
||||||
|
this.keyHandler_WaitVersionAfterPHDV0.bind(this));
|
||||||
|
|
||||||
|
this.setKeyHandler(
|
||||||
|
DelimiterMasterFilterStateId.SkippingGapV0,
|
||||||
|
this.keyHandler_SkippingGapV0.bind(this));
|
||||||
|
|
||||||
|
if (this.marker) {
|
||||||
|
// distinct initial state to include some special logic
|
||||||
|
// before the first master key is found that does not have
|
||||||
|
// to be checked afterwards
|
||||||
|
this.state = <DelimiterMasterFilterState_SkippingVersionsV0> {
|
||||||
|
id: DelimiterMasterFilterStateId.SkippingVersionsV0,
|
||||||
|
masterKey: this.marker,
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
this.state = <DelimiterFilterState_NotSkipping> {
|
||||||
|
id: DelimiterFilterStateId.NotSkipping,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// save base implementation of the `NotSkipping` state in
|
||||||
|
// Delimiter before overriding it with ours, to be able to call it from there
|
||||||
|
this.keyHandler_NotSkipping_Delimiter = this.keyHandlers[DelimiterFilterStateId.NotSkipping];
|
||||||
|
this.setKeyHandler(
|
||||||
|
DelimiterFilterStateId.NotSkipping,
|
||||||
|
this.keyHandler_NotSkippingPrefixNorVersionsV1.bind(this));
|
||||||
|
}
|
||||||
|
// in v1, we can directly use Delimiter's implementation,
|
||||||
|
// which is already set to the proper state
|
||||||
|
|
||||||
|
// default initialization of the gap cache and building states, can be
|
||||||
|
// set by refreshGapCache()
|
||||||
|
this._gapCaching = {
|
||||||
|
state: GapCachingState.NoGapCache,
|
||||||
|
};
|
||||||
|
this._gapBuilding = {
|
||||||
|
state: GapBuildingState.Disabled,
|
||||||
|
};
|
||||||
|
this._refreshedBuildingParams = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the validity period left before a refresh of the gap cache is needed
|
||||||
|
* to continue building new gaps.
|
||||||
|
*
|
||||||
|
* @return {number|null} one of:
|
||||||
|
* - the remaining time in milliseconds in which gaps can be added to the
|
||||||
|
* cache before a call to refreshGapCache() is required
|
||||||
|
* - or 0 if there is no time left and a call to refreshGapCache() is required
|
||||||
|
* to resume caching gaps
|
||||||
|
* - or null if refreshing the cache is never needed (because the gap cache
|
||||||
|
* is either not available or not used)
|
||||||
|
*/
|
||||||
|
getGapBuildingValidityPeriodMs(): number | null {
|
||||||
|
let gapBuilding;
|
||||||
|
switch (this._gapBuilding.state) {
|
||||||
|
case GapBuildingState.Disabled:
|
||||||
|
return null;
|
||||||
|
case GapBuildingState.Expired:
|
||||||
|
return 0;
|
||||||
|
case GapBuildingState.NotBuilding:
|
||||||
|
gapBuilding = <GapBuildingInfo_NotBuilding> this._gapBuilding;
|
||||||
|
break;
|
||||||
|
case GapBuildingState.Building:
|
||||||
|
gapBuilding = <GapBuildingInfo_Building> this._gapBuilding;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
const { gapCache, params } = gapBuilding;
|
||||||
|
const elapsedTime = Date.now() - params.initTimestamp;
|
||||||
|
return Math.max(gapCache.exposureDelayMs - elapsedTime, 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Refresh the gaps caching logic (gaps are series of current delete markers
|
||||||
|
* in V0 bucket metadata format). It has two effects:
|
||||||
|
*
|
||||||
|
* - starts exposing existing and future gaps from the cache to efficiently
|
||||||
|
* skip over series of current delete markers that have been seen and cached
|
||||||
|
* earlier
|
||||||
|
*
|
||||||
|
* - enables building and caching new gaps (or extend existing ones), for a
|
||||||
|
* limited time period defined by the `gapCacheProxy.exposureDelayMs` value
|
||||||
|
* in milliseconds. To refresh the validity period and resume building and
|
||||||
|
* caching new gaps, one must restart a new listing from the database (starting
|
||||||
|
* at the current listing key, included), then call refreshGapCache() again.
|
||||||
|
*
|
||||||
|
* @param {GapCacheInterface} gapCacheProxy - API proxy to the gaps cache
|
||||||
|
* (the proxy should handle prefixing object keys with the bucket name)
|
||||||
|
* @param {number} [minGapWeight=100] - minimum weight of a gap for it to be
|
||||||
|
* added in the cache
|
||||||
|
* @param {number} [triggerSaveGapWeight] - cumulative weight to wait for
|
||||||
|
* before saving the current building gap. Cannot be greater than
|
||||||
|
* `gapCacheProxy.maxGapWeight` (the value is thresholded to `maxGapWeight`
|
||||||
|
* otherwise). Defaults to `gapCacheProxy.maxGapWeight / 2`.
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
refreshGapCache(
|
||||||
|
gapCacheProxy: GapCacheInterface,
|
||||||
|
minGapWeight?: number,
|
||||||
|
triggerSaveGapWeight?: number
|
||||||
|
): void {
|
||||||
|
if (this.vFormat !== BucketVersioningKeyFormat.v0) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (this._gapCaching.state === GapCachingState.NoGapCache) {
|
||||||
|
this._gapCaching = {
|
||||||
|
state: GapCachingState.UnknownGap,
|
||||||
|
gapCache: gapCacheProxy,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
const refreshedBuildingParams: GapBuildingParams = {
|
||||||
|
minGapWeight: minGapWeight || 100,
|
||||||
|
triggerSaveGapWeight: triggerSaveGapWeight
|
||||||
|
|| Math.trunc(gapCacheProxy.maxGapWeight / 2),
|
||||||
|
initTimestamp: Date.now(),
|
||||||
|
};
|
||||||
|
if (this._gapBuilding.state === GapBuildingState.Building) {
|
||||||
|
// refreshed params will be applied as soon as the current building gap is saved
|
||||||
|
this._refreshedBuildingParams = refreshedBuildingParams;
|
||||||
|
} else {
|
||||||
|
this._gapBuilding = {
|
||||||
|
state: GapBuildingState.NotBuilding,
|
||||||
|
gapCache: gapCacheProxy,
|
||||||
|
params: refreshedBuildingParams,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Trigger a lookup of the closest upcoming or already skippable gap.
|
||||||
|
*
|
||||||
|
* @param {string} fromKey - lookup a gap not before 'fromKey'
|
||||||
|
* @return {undefined} - the lookup is asynchronous and its
|
||||||
|
* response is handled inside this function
|
||||||
|
*/
|
||||||
|
_triggerGapLookup(gapCaching: GapCachingInfo_NoCachedGap, fromKey: string): void {
|
||||||
|
this._gapCaching = {
|
||||||
|
state: GapCachingState.GapLookupInProgress,
|
||||||
|
gapCache: gapCaching.gapCache,
|
||||||
|
};
|
||||||
|
const maxKey = this.prefix ? inc(this.prefix) : undefined;
|
||||||
|
gapCaching.gapCache.lookupGap(fromKey, maxKey).then(_gap => {
|
||||||
|
const gap = <GapSetEntry | null> _gap;
|
||||||
|
if (gap) {
|
||||||
|
this._gapCaching = {
|
||||||
|
state: GapCachingState.GapCached,
|
||||||
|
gapCache: gapCaching.gapCache,
|
||||||
|
gapCached: gap,
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
this._gapCaching = {
|
||||||
|
state: GapCachingState.NoMoreGap,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
_checkGapOnMasterDeleteMarker(key: string): FilterReturnValue {
|
||||||
|
switch (this._gapBuilding.state) {
|
||||||
|
case GapBuildingState.Disabled:
|
||||||
|
case GapBuildingState.Expired:
|
||||||
|
break;
|
||||||
|
case GapBuildingState.NotBuilding:
|
||||||
|
this._createBuildingGap(key, 1);
|
||||||
|
break;
|
||||||
|
case GapBuildingState.Building:
|
||||||
|
this._updateBuildingGap(key);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
if (this._gapCaching.state === GapCachingState.GapCached) {
|
||||||
|
const { gapCached } = this._gapCaching;
|
||||||
|
if (key >= gapCached.firstKey) {
|
||||||
|
if (key <= gapCached.lastKey) {
|
||||||
|
// we are inside the last looked up cached gap: transition to
|
||||||
|
// 'SkippingGapV0' state
|
||||||
|
this.setState(<DelimiterMasterFilterState_SkippingGapV0> {
|
||||||
|
id: DelimiterMasterFilterStateId.SkippingGapV0,
|
||||||
|
});
|
||||||
|
// cut the current gap before skipping, it will be merged or
|
||||||
|
// chained with the existing one (depending on its weight)
|
||||||
|
if (this._gapBuilding.state === GapBuildingState.Building) {
|
||||||
|
// substract 1 from the weight because we are going to chain this gap,
|
||||||
|
// which has an overlap of one key.
|
||||||
|
this._gapBuilding.gap.weight -= 1;
|
||||||
|
this._cutBuildingGap();
|
||||||
|
}
|
||||||
|
return FILTER_SKIP;
|
||||||
|
}
|
||||||
|
// as we are past the cached gap, we will need another lookup
|
||||||
|
this._gapCaching = {
|
||||||
|
state: GapCachingState.UnknownGap,
|
||||||
|
gapCache: this._gapCaching.gapCache,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (this._gapCaching.state === GapCachingState.UnknownGap) {
|
||||||
|
this._triggerGapLookup(this._gapCaching, key);
|
||||||
|
}
|
||||||
|
return FILTER_ACCEPT;
|
||||||
|
}
|
||||||
|
|
||||||
|
filter_onNewMasterKeyV0(key: string, value: string): FilterReturnValue {
|
||||||
|
// if this master key is a delete marker, accept it without
|
||||||
|
// adding the version to the contents
|
||||||
|
if (Version.isDeleteMarker(value)) {
|
||||||
|
// update the state to start skipping versions of the new master key
|
||||||
|
this.setState(<DelimiterMasterFilterState_SkippingVersionsV0> {
|
||||||
|
id: DelimiterMasterFilterStateId.SkippingVersionsV0,
|
||||||
|
masterKey: key,
|
||||||
|
});
|
||||||
|
return this._checkGapOnMasterDeleteMarker(key);
|
||||||
|
}
|
||||||
|
if (Version.isPHD(value)) {
|
||||||
|
// master version is a PHD version: wait for the first
|
||||||
|
// following version that will be considered as the actual
|
||||||
|
// master key
|
||||||
|
this.setState(<DelimiterMasterFilterState_WaitVersionAfterPHDV0> {
|
||||||
|
id: DelimiterMasterFilterStateId.WaitVersionAfterPHDV0,
|
||||||
|
masterKey: key,
|
||||||
|
});
|
||||||
|
return FILTER_ACCEPT;
|
||||||
|
}
|
||||||
|
// cut the current gap as soon as a non-deleted entry is seen
|
||||||
|
this._cutBuildingGap();
|
||||||
|
|
||||||
|
if (key.startsWith(DbPrefixes.Replay)) {
|
||||||
|
// skip internal replay prefix entirely
|
||||||
|
this.setState(<DelimiterFilterState_SkippingPrefix> {
|
||||||
|
id: DelimiterFilterStateId.SkippingPrefix,
|
||||||
|
prefix: DbPrefixes.Replay,
|
||||||
|
});
|
||||||
|
return FILTER_SKIP;
|
||||||
|
}
|
||||||
|
if (this._reachedMaxKeys()) {
|
||||||
|
return FILTER_END;
|
||||||
|
}
|
||||||
|
|
||||||
|
const commonPrefix = this.addCommonPrefixOrContents(key, value);
|
||||||
|
if (commonPrefix) {
|
||||||
|
// transition into SkippingPrefix state to skip all following keys
|
||||||
|
// while they start with the same prefix
|
||||||
|
this.setState(<DelimiterFilterState_SkippingPrefix> {
|
||||||
|
id: DelimiterFilterStateId.SkippingPrefix,
|
||||||
|
prefix: commonPrefix,
|
||||||
|
});
|
||||||
|
return FILTER_ACCEPT;
|
||||||
|
}
|
||||||
|
// update the state to start skipping versions of the new master key
|
||||||
|
this.setState(<DelimiterMasterFilterState_SkippingVersionsV0> {
|
||||||
|
id: DelimiterMasterFilterStateId.SkippingVersionsV0,
|
||||||
|
masterKey: key,
|
||||||
|
});
|
||||||
|
return FILTER_ACCEPT;
|
||||||
|
}
|
||||||
|
|
||||||
|
keyHandler_NotSkippingPrefixNorVersionsV0(key: string, value: string): FilterReturnValue {
|
||||||
|
return this.filter_onNewMasterKeyV0(key, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
filter_onNewMasterKeyV1(key: string, value: string): FilterReturnValue {
|
||||||
|
// if this master key is a delete marker, accept it without
|
||||||
|
// adding the version to the contents
|
||||||
|
if (Version.isDeleteMarker(value)) {
|
||||||
|
return FILTER_ACCEPT;
|
||||||
|
}
|
||||||
|
// use base Delimiter's implementation
|
||||||
|
return this.keyHandler_NotSkipping_Delimiter(key, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
keyHandler_NotSkippingPrefixNorVersionsV1(key: string, value: string): FilterReturnValue {
|
||||||
|
return this.filter_onNewMasterKeyV1(key, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
keyHandler_SkippingVersionsV0(key: string, value: string): FilterReturnValue {
|
||||||
|
/* In the SkippingVersionsV0 state, skip all version keys
|
||||||
|
* (<key><versionIdSeparator><version>) */
|
||||||
|
const versionIdIndex = key.indexOf(VID_SEP);
|
||||||
|
if (versionIdIndex !== -1) {
|
||||||
|
// version keys count in the building gap weight because they must
|
||||||
|
// also be listed until skipped
|
||||||
|
if (this._gapBuilding.state === GapBuildingState.Building) {
|
||||||
|
this._updateBuildingGap(key);
|
||||||
|
}
|
||||||
|
return FILTER_SKIP;
|
||||||
|
}
|
||||||
|
return this.filter_onNewMasterKeyV0(key, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
keyHandler_WaitVersionAfterPHDV0(key: string, value: string): FilterReturnValue {
|
||||||
|
// After a PHD key is encountered, the next version key of the
|
||||||
|
// same object if it exists is the new master key, hence
|
||||||
|
// consider it as such and call 'onNewMasterKeyV0' (the test
|
||||||
|
// 'masterKey == phdKey' is probably redundant when we already
|
||||||
|
// know we have a versioned key, since all objects in v0 have
|
||||||
|
// a master key, but keeping it in doubt)
|
||||||
|
const { masterKey: phdKey } = <DelimiterMasterFilterState_WaitVersionAfterPHDV0> this.state;
|
||||||
|
const versionIdIndex = key.indexOf(VID_SEP);
|
||||||
|
if (versionIdIndex !== -1) {
|
||||||
|
const masterKey = key.slice(0, versionIdIndex);
|
||||||
|
if (masterKey === phdKey) {
|
||||||
|
return this.filter_onNewMasterKeyV0(masterKey, value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return this.filter_onNewMasterKeyV0(key, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
keyHandler_SkippingGapV0(key: string, value: string): FilterReturnValue {
|
||||||
|
const { gapCache, gapCached } = <GapCachingInfo_GapCached> this._gapCaching;
|
||||||
|
if (key <= gapCached.lastKey) {
|
||||||
|
return FILTER_SKIP;
|
||||||
|
}
|
||||||
|
this._gapCaching = {
|
||||||
|
state: GapCachingState.UnknownGap,
|
||||||
|
gapCache,
|
||||||
|
};
|
||||||
|
this.setState(<DelimiterMasterFilterState_SkippingVersionsV0> {
|
||||||
|
id: DelimiterMasterFilterStateId.SkippingVersionsV0,
|
||||||
|
});
|
||||||
|
// Start a gap with weight=0 from the latest skippable key. This will
|
||||||
|
// allow to extend the gap just skipped with a chained gap in case
|
||||||
|
// other delete markers are seen after the existing gap is skipped.
|
||||||
|
this._createBuildingGap(gapCached.lastKey, 0, gapCached.weight);
|
||||||
|
|
||||||
|
return this.handleKey(key, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
skippingBase(): string | undefined {
|
||||||
|
switch (this.state.id) {
|
||||||
|
case DelimiterMasterFilterStateId.SkippingVersionsV0:
|
||||||
|
const { masterKey } = <DelimiterMasterFilterState_SkippingVersionsV0> this.state;
|
||||||
|
return masterKey + inc(VID_SEP);
|
||||||
|
|
||||||
|
case DelimiterMasterFilterStateId.SkippingGapV0:
|
||||||
|
const { gapCached } = <GapCachingInfo_GapCached> this._gapCaching;
|
||||||
|
return gapCached.lastKey;
|
||||||
|
|
||||||
|
default:
|
||||||
|
return super.skippingBase();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
result(): ResultObject {
|
||||||
|
this._cutBuildingGap();
|
||||||
|
return super.result();
|
||||||
|
}
|
||||||
|
|
||||||
|
_checkRefreshedBuildingParams(params: GapBuildingParams): GapBuildingParams {
|
||||||
|
if (this._refreshedBuildingParams) {
|
||||||
|
const newParams = this._refreshedBuildingParams;
|
||||||
|
this._refreshedBuildingParams = null;
|
||||||
|
return newParams;
|
||||||
|
}
|
||||||
|
return params;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Save the gap being built if allowed (i.e. still within the
|
||||||
|
* allocated exposure time window).
|
||||||
|
*
|
||||||
|
* @return {boolean} - true if the gap was saved, false if we are
|
||||||
|
* outside the allocated exposure time window.
|
||||||
|
*/
|
||||||
|
_saveBuildingGap(): boolean {
|
||||||
|
const { gapCache, params, gap, gapWeight } =
|
||||||
|
<GapBuildingInfo_Building> this._gapBuilding;
|
||||||
|
const totalElapsed = Date.now() - params.initTimestamp;
|
||||||
|
if (totalElapsed >= gapCache.exposureDelayMs) {
|
||||||
|
this._gapBuilding = {
|
||||||
|
state: GapBuildingState.Expired,
|
||||||
|
};
|
||||||
|
this._refreshedBuildingParams = null;
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
const { firstKey, lastKey, weight } = gap;
|
||||||
|
gapCache.setGap(firstKey, lastKey, weight);
|
||||||
|
this._gapBuilding = {
|
||||||
|
state: GapBuildingState.Building,
|
||||||
|
gapCache,
|
||||||
|
params: this._checkRefreshedBuildingParams(params),
|
||||||
|
gap: {
|
||||||
|
firstKey: gap.lastKey,
|
||||||
|
lastKey: gap.lastKey,
|
||||||
|
weight: 0,
|
||||||
|
},
|
||||||
|
gapWeight,
|
||||||
|
};
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new gap to be extended afterwards
|
||||||
|
*
|
||||||
|
* @param {string} newKey - gap's first key
|
||||||
|
* @param {number} startWeight - initial weight of the building gap (usually 0 or 1)
|
||||||
|
* @param {number} [cachedWeight] - if continuing a cached gap, weight of the existing
|
||||||
|
* cached portion
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
_createBuildingGap(newKey: string, startWeight: number, cachedWeight?: number): void {
|
||||||
|
if (this._gapBuilding.state === GapBuildingState.NotBuilding) {
|
||||||
|
const { gapCache, params } = <GapBuildingInfo_NotBuilding> this._gapBuilding;
|
||||||
|
this._gapBuilding = {
|
||||||
|
state: GapBuildingState.Building,
|
||||||
|
gapCache,
|
||||||
|
params: this._checkRefreshedBuildingParams(params),
|
||||||
|
gap: {
|
||||||
|
firstKey: newKey,
|
||||||
|
lastKey: newKey,
|
||||||
|
weight: startWeight,
|
||||||
|
},
|
||||||
|
gapWeight: (cachedWeight || 0) + startWeight,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
_updateBuildingGap(newKey: string): void {
|
||||||
|
const gapBuilding = <GapBuildingInfo_Building> this._gapBuilding;
|
||||||
|
const { params, gap } = gapBuilding;
|
||||||
|
gap.lastKey = newKey;
|
||||||
|
gap.weight += 1;
|
||||||
|
gapBuilding.gapWeight += 1;
|
||||||
|
// the GapCache API requires updating a gap regularly because it can only split
|
||||||
|
// it once per update, by the known last key. In practice the default behavior
|
||||||
|
// is to trigger an update after a number of keys that is half the maximum weight.
|
||||||
|
// It is also useful for other listings to benefit from the cache sooner.
|
||||||
|
if (gapBuilding.gapWeight >= params.minGapWeight &&
|
||||||
|
gap.weight >= params.triggerSaveGapWeight) {
|
||||||
|
this._saveBuildingGap();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
_cutBuildingGap(): void {
|
||||||
|
if (this._gapBuilding.state === GapBuildingState.Building) {
|
||||||
|
let gapBuilding = <GapBuildingInfo_Building> this._gapBuilding;
|
||||||
|
let { gapCache, params, gap, gapWeight } = gapBuilding;
|
||||||
|
// only set gaps that are significant enough in weight and
|
||||||
|
// with a non-empty extension
|
||||||
|
if (gapWeight >= params.minGapWeight && gap.weight > 0) {
|
||||||
|
// we're done if we were not allowed to save the gap
|
||||||
|
if (!this._saveBuildingGap()) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
// params may have been refreshed, reload them
|
||||||
|
gapBuilding = <GapBuildingInfo_Building> this._gapBuilding;
|
||||||
|
params = gapBuilding.params;
|
||||||
|
}
|
||||||
|
this._gapBuilding = {
|
||||||
|
state: GapBuildingState.NotBuilding,
|
||||||
|
gapCache,
|
||||||
|
params,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,202 @@
|
||||||
|
const { DelimiterVersions } = require('./delimiterVersions');
|
||||||
|
const { FILTER_END, FILTER_SKIP } = require('./tools');
|
||||||
|
|
||||||
|
const TRIM_METADATA_MIN_BLOB_SIZE = 10000;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle object listing with parameters. This extends the base class DelimiterVersions
|
||||||
|
* to return the raw non-current versions objects.
|
||||||
|
*/
|
||||||
|
class DelimiterNonCurrent extends DelimiterVersions {
|
||||||
|
/**
|
||||||
|
* Delimiter listing of non-current versions.
|
||||||
|
* @param {Object} parameters - listing parameters
|
||||||
|
* @param {String} parameters.keyMarker - key marker
|
||||||
|
* @param {String} parameters.versionIdMarker - version id marker
|
||||||
|
* @param {String} parameters.beforeDate - limit the response to keys with stale date older than beforeDate.
|
||||||
|
* “stale date” is the date on when a version becomes non-current.
|
||||||
|
* @param {Number} parameters.maxScannedLifecycleListingEntries - max number of entries to be scanned
|
||||||
|
* @param {String} parameters.excludedDataStoreName - exclude dataStoreName matches from the versions
|
||||||
|
* @param {RequestLogger} logger - The logger of the request
|
||||||
|
* @param {String} [vFormat] - versioning key format
|
||||||
|
*/
|
||||||
|
constructor(parameters, logger, vFormat) {
|
||||||
|
super(parameters, logger, vFormat);
|
||||||
|
|
||||||
|
this.beforeDate = parameters.beforeDate;
|
||||||
|
this.excludedDataStoreName = parameters.excludedDataStoreName;
|
||||||
|
this.maxScannedLifecycleListingEntries = parameters.maxScannedLifecycleListingEntries;
|
||||||
|
|
||||||
|
// internal state
|
||||||
|
this.prevKey = null;
|
||||||
|
this.staleDate = null;
|
||||||
|
|
||||||
|
this.scannedKeys = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
getLastModified(value) {
|
||||||
|
let lastModified;
|
||||||
|
try {
|
||||||
|
const v = JSON.parse(value);
|
||||||
|
lastModified = v['last-modified'];
|
||||||
|
} catch (e) {
|
||||||
|
this.logger.warn('could not parse Object Metadata while listing',
|
||||||
|
{
|
||||||
|
method: 'getLastModified',
|
||||||
|
err: e.toString(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return lastModified;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Overwrite keyHandler_SkippingVersions to include the last version from the previous listing.
|
||||||
|
// The creation (last-modified) date of this version will be the stale date for the following version.
|
||||||
|
// eslint-disable-next-line camelcase
|
||||||
|
keyHandler_SkippingVersions(key, versionId, value) {
|
||||||
|
if (key === this.keyMarker) {
|
||||||
|
// since the nonversioned key equals the marker, there is
|
||||||
|
// necessarily a versionId in this key
|
||||||
|
const _versionId = versionId;
|
||||||
|
if (_versionId < this.versionIdMarker) {
|
||||||
|
// skip all versions until marker
|
||||||
|
return FILTER_SKIP;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
this.setState({
|
||||||
|
id: 1 /* NotSkipping */,
|
||||||
|
});
|
||||||
|
return this.handleKey(key, versionId, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
filter(obj) {
|
||||||
|
if (this.maxScannedLifecycleListingEntries && this.scannedKeys >= this.maxScannedLifecycleListingEntries) {
|
||||||
|
this.IsTruncated = true;
|
||||||
|
this.logger.info('listing stopped due to reaching the maximum scanned entries limit',
|
||||||
|
{
|
||||||
|
maxScannedLifecycleListingEntries: this.maxScannedLifecycleListingEntries,
|
||||||
|
scannedKeys: this.scannedKeys,
|
||||||
|
});
|
||||||
|
return FILTER_END;
|
||||||
|
}
|
||||||
|
++this.scannedKeys;
|
||||||
|
return super.filter(obj);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* NOTE: Each version of a specific key is sorted from the latest to the oldest
|
||||||
|
* thanks to the way version ids are generated.
|
||||||
|
* DESCRIPTION: Skip the version if it represents the master key, but keep its last-modified date in memory,
|
||||||
|
* which will be the stale date of the following version.
|
||||||
|
* The following version is pushed only:
|
||||||
|
* - if the "stale date" (picked up from the previous version) is available (JSON.parse has not failed),
|
||||||
|
* - if "beforeDate" is not specified or if specified and the "stale date" is older.
|
||||||
|
* - if "excludedDataStoreName" is not specified or if specified and the data store name is different
|
||||||
|
* The in-memory "stale date" is then updated with the version's last-modified date to be used for
|
||||||
|
* the following version.
|
||||||
|
* The process stops and returns the available results if either:
|
||||||
|
* - no more metadata key is left to be processed
|
||||||
|
* - the listing reaches the maximum number of key to be returned
|
||||||
|
* - the internal timeout is reached
|
||||||
|
* @param {String} key - The key to add
|
||||||
|
* @param {String} versionId - The version id
|
||||||
|
* @param {String} value - The value of the key
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
addVersion(key, versionId, value) {
|
||||||
|
this.nextKeyMarker = key;
|
||||||
|
this.nextVersionIdMarker = versionId;
|
||||||
|
|
||||||
|
// Skip the version if it represents the non-current version, but keep its last-modified date,
|
||||||
|
// which will be the stale date of the following version.
|
||||||
|
const isCurrentVersion = key !== this.prevKey;
|
||||||
|
if (isCurrentVersion) {
|
||||||
|
this.staleDate = this.getLastModified(value);
|
||||||
|
this.prevKey = key;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// The following version is pushed only:
|
||||||
|
// - if the "stale date" (picked up from the previous version) is available (JSON.parse has not failed),
|
||||||
|
// - if "beforeDate" is not specified or if specified and the "stale date" is older.
|
||||||
|
// - if "excludedDataStoreName" is not specified or if specified and the data store name is different
|
||||||
|
let lastModified;
|
||||||
|
if (this.staleDate && (!this.beforeDate || this.staleDate < this.beforeDate)) {
|
||||||
|
const parsedValue = this._parse(value);
|
||||||
|
// if parsing fails, skip the key.
|
||||||
|
if (parsedValue) {
|
||||||
|
const dataStoreName = parsedValue.dataStoreName;
|
||||||
|
lastModified = parsedValue['last-modified'];
|
||||||
|
if (!this.excludedDataStoreName || dataStoreName !== this.excludedDataStoreName) {
|
||||||
|
const s = this._stringify(parsedValue, this.staleDate);
|
||||||
|
// check that _stringify succeeds to only push objects with a defined staleDate.
|
||||||
|
if (s) {
|
||||||
|
this.Versions.push({ key, value: s });
|
||||||
|
++this.keys;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// The in-memory "stale date" is then updated with the version's last-modified date to be used for
|
||||||
|
// the following version.
|
||||||
|
this.staleDate = lastModified || this.getLastModified(value);
|
||||||
|
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parses the stringified entry's value and remove the location property if too large.
|
||||||
|
* @param {string} s - sringified value
|
||||||
|
* @return {object} p - undefined if parsing fails, otherwise it contains the parsed value.
|
||||||
|
*/
|
||||||
|
_parse(s) {
|
||||||
|
let p;
|
||||||
|
try {
|
||||||
|
p = JSON.parse(s);
|
||||||
|
if (s.length >= TRIM_METADATA_MIN_BLOB_SIZE) {
|
||||||
|
delete p.location;
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
this.logger.warn('Could not parse Object Metadata while listing', {
|
||||||
|
method: 'DelimiterNonCurrent._parse',
|
||||||
|
err: e.toString(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return p;
|
||||||
|
}
|
||||||
|
|
||||||
|
_stringify(parsedMD, staleDate) {
|
||||||
|
const p = parsedMD;
|
||||||
|
let s = undefined;
|
||||||
|
p.staleDate = staleDate;
|
||||||
|
try {
|
||||||
|
s = JSON.stringify(p);
|
||||||
|
} catch (e) {
|
||||||
|
this.logger.warn('could not stringify Object Metadata while listing', {
|
||||||
|
method: 'DelimiterNonCurrent._stringify',
|
||||||
|
err: e.toString(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return s;
|
||||||
|
}
|
||||||
|
|
||||||
|
result() {
|
||||||
|
const { Versions, IsTruncated, NextKeyMarker, NextVersionIdMarker } = super.result();
|
||||||
|
|
||||||
|
const result = {
|
||||||
|
Contents: Versions,
|
||||||
|
IsTruncated,
|
||||||
|
};
|
||||||
|
|
||||||
|
if (NextKeyMarker) {
|
||||||
|
result.NextKeyMarker = NextKeyMarker;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (NextVersionIdMarker) {
|
||||||
|
result.NextVersionIdMarker = NextVersionIdMarker;
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
module.exports = { DelimiterNonCurrent };
|
|
@ -0,0 +1,204 @@
|
||||||
|
const DelimiterVersions = require('./delimiterVersions').DelimiterVersions;
|
||||||
|
const { FILTER_END } = require('./tools');
|
||||||
|
const TRIM_METADATA_MIN_BLOB_SIZE = 10000;
|
||||||
|
/**
|
||||||
|
* Handle object listing with parameters. This extends the base class DelimiterVersions
|
||||||
|
* to return the orphan delete markers. Orphan delete markers are also
|
||||||
|
* refered as expired object delete marker.
|
||||||
|
* They are delete marker with zero noncurrent versions.
|
||||||
|
*/
|
||||||
|
class DelimiterOrphanDeleteMarker extends DelimiterVersions {
|
||||||
|
/**
|
||||||
|
* Delimiter listing of orphan delete markers.
|
||||||
|
* @param {Object} parameters - listing parameters
|
||||||
|
* @param {String} parameters.beforeDate - limit the response to keys older than beforeDate
|
||||||
|
* @param {Number} parameters.maxScannedLifecycleListingEntries - max number of entries to be scanned
|
||||||
|
* @param {RequestLogger} logger - The logger of the request
|
||||||
|
* @param {String} [vFormat] - versioning key format
|
||||||
|
*/
|
||||||
|
constructor(parameters, logger, vFormat) {
|
||||||
|
const {
|
||||||
|
marker,
|
||||||
|
maxKeys,
|
||||||
|
prefix,
|
||||||
|
beforeDate,
|
||||||
|
maxScannedLifecycleListingEntries,
|
||||||
|
} = parameters;
|
||||||
|
|
||||||
|
const versionParams = {
|
||||||
|
// The orphan delete marker logic uses the term 'marker' instead of 'keyMarker',
|
||||||
|
// as the latter could suggest the presence of a 'versionIdMarker'.
|
||||||
|
keyMarker: marker,
|
||||||
|
maxKeys,
|
||||||
|
prefix,
|
||||||
|
};
|
||||||
|
super(versionParams, logger, vFormat);
|
||||||
|
|
||||||
|
this.maxScannedLifecycleListingEntries = maxScannedLifecycleListingEntries;
|
||||||
|
this.beforeDate = beforeDate;
|
||||||
|
// this.prevKeyName is used as a marker for the next listing when the current one reaches its entry limit.
|
||||||
|
// We cannot rely on this.keyName, as it contains the name of the current key.
|
||||||
|
// In the event of a listing interruption due to reaching the maximum scanned entries,
|
||||||
|
// relying on this.keyName would cause the next listing to skip the current key because S3 starts
|
||||||
|
// listing after the marker.
|
||||||
|
this.prevKeyName = null;
|
||||||
|
this.keyName = null;
|
||||||
|
this.value = null;
|
||||||
|
this.scannedKeys = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
_reachedMaxKeys() {
|
||||||
|
if (this.keys >= this.maxKeys) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
_addOrphan() {
|
||||||
|
const parsedValue = this._parse(this.value);
|
||||||
|
// if parsing fails, skip the key.
|
||||||
|
if (parsedValue) {
|
||||||
|
const lastModified = parsedValue['last-modified'];
|
||||||
|
const isDeleteMarker = parsedValue.isDeleteMarker;
|
||||||
|
// We then check if the orphan version is a delete marker and if it is older than the "beforeDate"
|
||||||
|
if ((!this.beforeDate || (lastModified && lastModified < this.beforeDate)) && isDeleteMarker) {
|
||||||
|
// Prefer returning an untrimmed data rather than stopping the service in case of parsing failure.
|
||||||
|
const s = this._stringify(parsedValue) || this.value;
|
||||||
|
this.Versions.push({ key: this.keyName, value: s });
|
||||||
|
this.nextKeyMarker = this.keyName;
|
||||||
|
++this.keys;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parses the stringified entry's value and remove the location property if too large.
|
||||||
|
* @param {string} s - sringified value
|
||||||
|
* @return {object} p - undefined if parsing fails, otherwise it contains the parsed value.
|
||||||
|
*/
|
||||||
|
_parse(s) {
|
||||||
|
let p;
|
||||||
|
try {
|
||||||
|
p = JSON.parse(s);
|
||||||
|
if (s.length >= TRIM_METADATA_MIN_BLOB_SIZE) {
|
||||||
|
delete p.location;
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
this.logger.warn('Could not parse Object Metadata while listing', {
|
||||||
|
method: 'DelimiterOrphanDeleteMarker._parse',
|
||||||
|
err: e.toString(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return p;
|
||||||
|
}
|
||||||
|
|
||||||
|
_stringify(value) {
|
||||||
|
const p = value;
|
||||||
|
let s = undefined;
|
||||||
|
try {
|
||||||
|
s = JSON.stringify(p);
|
||||||
|
} catch (e) {
|
||||||
|
this.logger.warn('could not stringify Object Metadata while listing',
|
||||||
|
{
|
||||||
|
method: 'DelimiterOrphanDeleteMarker._stringify',
|
||||||
|
err: e.toString(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return s;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* The purpose of _isMaxScannedEntriesReached is to restrict the number of scanned entries,
|
||||||
|
* thus controlling resource overhead (CPU...).
|
||||||
|
* @return {boolean} isMaxScannedEntriesReached - true if the maximum limit on the number
|
||||||
|
* of entries scanned has been reached, false otherwise.
|
||||||
|
*/
|
||||||
|
_isMaxScannedEntriesReached() {
|
||||||
|
return this.maxScannedLifecycleListingEntries && this.scannedKeys >= this.maxScannedLifecycleListingEntries;
|
||||||
|
}
|
||||||
|
|
||||||
|
filter(obj) {
|
||||||
|
if (this._isMaxScannedEntriesReached()) {
|
||||||
|
this.nextKeyMarker = this.prevKeyName;
|
||||||
|
this.IsTruncated = true;
|
||||||
|
this.logger.info('listing stopped due to reaching the maximum scanned entries limit',
|
||||||
|
{
|
||||||
|
maxScannedLifecycleListingEntries: this.maxScannedLifecycleListingEntries,
|
||||||
|
scannedKeys: this.scannedKeys,
|
||||||
|
});
|
||||||
|
return FILTER_END;
|
||||||
|
}
|
||||||
|
++this.scannedKeys;
|
||||||
|
return super.filter(obj);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* NOTE: Each version of a specific key is sorted from the latest to the oldest
|
||||||
|
* thanks to the way version ids are generated.
|
||||||
|
* DESCRIPTION: For a given key, the latest version is kept in memory since it is the current version.
|
||||||
|
* If the following version reference a new key, it means that the previous one was an orphan version.
|
||||||
|
* We then check if the orphan version is a delete marker and if it is older than the "beforeDate"
|
||||||
|
* The process stops and returns the available results if either:
|
||||||
|
* - no more metadata key is left to be processed
|
||||||
|
* - the listing reaches the maximum number of key to be returned
|
||||||
|
* - the internal timeout is reached
|
||||||
|
* NOTE: we cannot leverage MongoDB to list keys older than "beforeDate"
|
||||||
|
* because then we will not be able to assess its orphanage.
|
||||||
|
* @param {String} key - The object key.
|
||||||
|
* @param {String} versionId - The object version id.
|
||||||
|
* @param {String} value - The value of the key
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
addVersion(key, versionId, value) {
|
||||||
|
// For a given key, the youngest version is kept in memory since it represents the current version.
|
||||||
|
if (key !== this.keyName) {
|
||||||
|
// If this.value is defined, it means that <this.keyName, this.value> pair is "allowed" to be an orphan.
|
||||||
|
if (this.value) {
|
||||||
|
this._addOrphan();
|
||||||
|
}
|
||||||
|
this.prevKeyName = this.keyName;
|
||||||
|
this.keyName = key;
|
||||||
|
this.value = value;
|
||||||
|
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// If the key is not the current version, we can skip it in the next listing
|
||||||
|
// in the case where the current listing is interrupted due to reaching the maximum scanned entries.
|
||||||
|
this.prevKeyName = key;
|
||||||
|
this.keyName = key;
|
||||||
|
this.value = null;
|
||||||
|
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
result() {
|
||||||
|
// Only check for remaining last orphan delete marker if the listing is not interrupted.
|
||||||
|
// This will help avoid false positives.
|
||||||
|
if (!this._isMaxScannedEntriesReached()) {
|
||||||
|
// The following check makes sure the last orphan delete marker is not forgotten.
|
||||||
|
if (this.keys < this.maxKeys) {
|
||||||
|
if (this.value) {
|
||||||
|
this._addOrphan();
|
||||||
|
}
|
||||||
|
// The following make sure that if makeKeys is reached, isTruncated is set to true.
|
||||||
|
// We moved the "isTruncated" from _reachedMaxKeys to make sure we take into account the last entity
|
||||||
|
// if listing is truncated right before the last entity and the last entity is a orphan delete marker.
|
||||||
|
} else {
|
||||||
|
this.IsTruncated = this.maxKeys > 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = {
|
||||||
|
Contents: this.Versions,
|
||||||
|
IsTruncated: this.IsTruncated,
|
||||||
|
};
|
||||||
|
|
||||||
|
if (this.IsTruncated) {
|
||||||
|
result.NextMarker = this.nextKeyMarker;
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = { DelimiterOrphanDeleteMarker };
|
|
@ -1,283 +0,0 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
|
||||||
|
|
||||||
const Delimiter = require('./delimiter').Delimiter;
|
|
||||||
const Version = require('../../versioning/Version').Version;
|
|
||||||
const VSConst = require('../../versioning/constants').VersioningConstants;
|
|
||||||
const { inc, FILTER_END, FILTER_ACCEPT, FILTER_SKIP, SKIP_NONE } =
|
|
||||||
require('./tools');
|
|
||||||
|
|
||||||
const VID_SEP = VSConst.VersionId.Separator;
|
|
||||||
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Handle object listing with parameters
|
|
||||||
*
|
|
||||||
* @prop {String[]} CommonPrefixes - 'folders' defined by the delimiter
|
|
||||||
* @prop {String[]} Contents - 'files' to list
|
|
||||||
* @prop {Boolean} IsTruncated - truncated listing flag
|
|
||||||
* @prop {String|undefined} NextMarker - marker per amazon format
|
|
||||||
* @prop {Number} keys - count of listed keys
|
|
||||||
* @prop {String|undefined} delimiter - separator per amazon format
|
|
||||||
* @prop {String|undefined} prefix - prefix per amazon format
|
|
||||||
* @prop {Number} maxKeys - number of keys to list
|
|
||||||
*/
|
|
||||||
class DelimiterVersions extends Delimiter {
|
|
||||||
constructor(parameters, logger, vFormat) {
|
|
||||||
super(parameters, logger, vFormat);
|
|
||||||
// specific to version listing
|
|
||||||
this.keyMarker = parameters.keyMarker;
|
|
||||||
this.versionIdMarker = parameters.versionIdMarker;
|
|
||||||
// internal state
|
|
||||||
this.masterKey = undefined;
|
|
||||||
this.masterVersionId = undefined;
|
|
||||||
// listing results
|
|
||||||
this.NextMarker = parameters.keyMarker;
|
|
||||||
this.NextVersionIdMarker = undefined;
|
|
||||||
this.inReplayPrefix = false;
|
|
||||||
|
|
||||||
Object.assign(this, {
|
|
||||||
[BucketVersioningKeyFormat.v0]: {
|
|
||||||
genMDParams: this.genMDParamsV0,
|
|
||||||
filter: this.filterV0,
|
|
||||||
skipping: this.skippingV0,
|
|
||||||
},
|
|
||||||
[BucketVersioningKeyFormat.v1]: {
|
|
||||||
genMDParams: this.genMDParamsV1,
|
|
||||||
filter: this.filterV1,
|
|
||||||
skipping: this.skippingV1,
|
|
||||||
},
|
|
||||||
}[this.vFormat]);
|
|
||||||
}
|
|
||||||
|
|
||||||
genMDParamsV0() {
|
|
||||||
const params = {};
|
|
||||||
if (this.parameters.prefix) {
|
|
||||||
params.gte = this.parameters.prefix;
|
|
||||||
params.lt = inc(this.parameters.prefix);
|
|
||||||
}
|
|
||||||
if (this.parameters.keyMarker) {
|
|
||||||
if (params.gte && params.gte > this.parameters.keyMarker) {
|
|
||||||
return params;
|
|
||||||
}
|
|
||||||
delete params.gte;
|
|
||||||
if (this.parameters.versionIdMarker) {
|
|
||||||
// versionIdMarker should always come with keyMarker
|
|
||||||
// but may not be the other way around
|
|
||||||
params.gt = this.parameters.keyMarker
|
|
||||||
+ VID_SEP
|
|
||||||
+ this.parameters.versionIdMarker;
|
|
||||||
} else {
|
|
||||||
params.gt = inc(this.parameters.keyMarker + VID_SEP);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return params;
|
|
||||||
}
|
|
||||||
|
|
||||||
genMDParamsV1() {
|
|
||||||
// return an array of two listing params sets to ask for
|
|
||||||
// synchronized listing of M and V ranges
|
|
||||||
const params = [{}, {}];
|
|
||||||
if (this.parameters.prefix) {
|
|
||||||
params[0].gte = DbPrefixes.Master + this.parameters.prefix;
|
|
||||||
params[0].lt = DbPrefixes.Master + inc(this.parameters.prefix);
|
|
||||||
params[1].gte = DbPrefixes.Version + this.parameters.prefix;
|
|
||||||
params[1].lt = DbPrefixes.Version + inc(this.parameters.prefix);
|
|
||||||
} else {
|
|
||||||
params[0].gte = DbPrefixes.Master;
|
|
||||||
params[0].lt = inc(DbPrefixes.Master); // stop after the last master key
|
|
||||||
params[1].gte = DbPrefixes.Version;
|
|
||||||
params[1].lt = inc(DbPrefixes.Version); // stop after the last version key
|
|
||||||
}
|
|
||||||
if (this.parameters.keyMarker) {
|
|
||||||
if (params[1].gte <= DbPrefixes.Version + this.parameters.keyMarker) {
|
|
||||||
delete params[0].gte;
|
|
||||||
delete params[1].gte;
|
|
||||||
params[0].gt = DbPrefixes.Master + inc(this.parameters.keyMarker + VID_SEP);
|
|
||||||
if (this.parameters.versionIdMarker) {
|
|
||||||
// versionIdMarker should always come with keyMarker
|
|
||||||
// but may not be the other way around
|
|
||||||
params[1].gt = DbPrefixes.Version
|
|
||||||
+ this.parameters.keyMarker
|
|
||||||
+ VID_SEP
|
|
||||||
+ this.parameters.versionIdMarker;
|
|
||||||
} else {
|
|
||||||
params[1].gt = DbPrefixes.Version
|
|
||||||
+ inc(this.parameters.keyMarker + VID_SEP);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return params;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Used to synchronize listing of M and V prefixes by object key
|
|
||||||
*
|
|
||||||
* @param {object} masterObj object listed from first range
|
|
||||||
* returned by genMDParamsV1() (the master keys range)
|
|
||||||
* @param {object} versionObj object listed from second range
|
|
||||||
* returned by genMDParamsV1() (the version keys range)
|
|
||||||
* @return {number} comparison result:
|
|
||||||
* * -1 if master key < version key
|
|
||||||
* * 1 if master key > version key
|
|
||||||
*/
|
|
||||||
compareObjects(masterObj, versionObj) {
|
|
||||||
const masterKey = masterObj.key.slice(DbPrefixes.Master.length);
|
|
||||||
const versionKey = versionObj.key.slice(DbPrefixes.Version.length);
|
|
||||||
return masterKey < versionKey ? -1 : 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Add a (key, versionId, value) tuple to the listing.
|
|
||||||
* Set the NextMarker to the current key
|
|
||||||
* Increment the keys counter
|
|
||||||
* @param {object} obj - the entry to add to the listing result
|
|
||||||
* @param {String} obj.key - The key to add
|
|
||||||
* @param {String} obj.versionId - versionId
|
|
||||||
* @param {String} obj.value - The value of the key
|
|
||||||
* @return {Boolean} - indicates if iteration should continue
|
|
||||||
*/
|
|
||||||
addContents(obj) {
|
|
||||||
if (this._reachedMaxKeys()) {
|
|
||||||
return FILTER_END;
|
|
||||||
}
|
|
||||||
this.Contents.push({
|
|
||||||
key: obj.key,
|
|
||||||
value: this.trimMetadata(obj.value),
|
|
||||||
versionId: obj.versionId,
|
|
||||||
});
|
|
||||||
this.NextMarker = obj.key;
|
|
||||||
this.NextVersionIdMarker = obj.versionId;
|
|
||||||
++this.keys;
|
|
||||||
return FILTER_ACCEPT;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Filter to apply on each iteration if bucket is in v0
|
|
||||||
* versioning key format, based on:
|
|
||||||
* - prefix
|
|
||||||
* - delimiter
|
|
||||||
* - maxKeys
|
|
||||||
* The marker is being handled directly by levelDB
|
|
||||||
* @param {Object} obj - The key and value of the element
|
|
||||||
* @param {String} obj.key - The key of the element
|
|
||||||
* @param {String} obj.value - The value of the element
|
|
||||||
* @return {number} - indicates if iteration should continue
|
|
||||||
*/
|
|
||||||
filterV0(obj) {
|
|
||||||
if (obj.key.startsWith(DbPrefixes.Replay)) {
|
|
||||||
this.inReplayPrefix = true;
|
|
||||||
return FILTER_SKIP;
|
|
||||||
}
|
|
||||||
this.inReplayPrefix = false;
|
|
||||||
|
|
||||||
if (Version.isPHD(obj.value)) {
|
|
||||||
// return accept to avoid skipping the next values in range
|
|
||||||
return FILTER_ACCEPT;
|
|
||||||
}
|
|
||||||
return this.filterCommon(obj.key, obj.value);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Filter to apply on each iteration if bucket is in v1
|
|
||||||
* versioning key format, based on:
|
|
||||||
* - prefix
|
|
||||||
* - delimiter
|
|
||||||
* - maxKeys
|
|
||||||
* The marker is being handled directly by levelDB
|
|
||||||
* @param {Object} obj - The key and value of the element
|
|
||||||
* @param {String} obj.key - The key of the element
|
|
||||||
* @param {String} obj.value - The value of the element
|
|
||||||
* @return {number} - indicates if iteration should continue
|
|
||||||
*/
|
|
||||||
filterV1(obj) {
|
|
||||||
if (Version.isPHD(obj.value)) {
|
|
||||||
// return accept to avoid skipping the next values in range
|
|
||||||
return FILTER_ACCEPT;
|
|
||||||
}
|
|
||||||
// this function receives both M and V keys, but their prefix
|
|
||||||
// length is the same so we can remove their prefix without
|
|
||||||
// looking at the type of key
|
|
||||||
return this.filterCommon(obj.key.slice(DbPrefixes.Master.length),
|
|
||||||
obj.value);
|
|
||||||
}
|
|
||||||
|
|
||||||
filterCommon(key, value) {
|
|
||||||
if (this.prefix && !key.startsWith(this.prefix)) {
|
|
||||||
return FILTER_SKIP;
|
|
||||||
}
|
|
||||||
let nonversionedKey;
|
|
||||||
let versionId = undefined;
|
|
||||||
const versionIdIndex = key.indexOf(VID_SEP);
|
|
||||||
if (versionIdIndex < 0) {
|
|
||||||
nonversionedKey = key;
|
|
||||||
this.masterKey = key;
|
|
||||||
this.masterVersionId =
|
|
||||||
Version.from(value).getVersionId() || 'null';
|
|
||||||
versionId = this.masterVersionId;
|
|
||||||
} else {
|
|
||||||
nonversionedKey = key.slice(0, versionIdIndex);
|
|
||||||
versionId = key.slice(versionIdIndex + 1);
|
|
||||||
// skip a version key if it is the master version
|
|
||||||
if (this.masterKey === nonversionedKey && this.masterVersionId === versionId) {
|
|
||||||
return FILTER_SKIP;
|
|
||||||
}
|
|
||||||
this.masterKey = undefined;
|
|
||||||
this.masterVersionId = undefined;
|
|
||||||
}
|
|
||||||
if (this.delimiter) {
|
|
||||||
const baseIndex = this.prefix ? this.prefix.length : 0;
|
|
||||||
const delimiterIndex = nonversionedKey.indexOf(this.delimiter, baseIndex);
|
|
||||||
if (delimiterIndex >= 0) {
|
|
||||||
return this.addCommonPrefix(nonversionedKey, delimiterIndex);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return this.addContents({ key: nonversionedKey, value, versionId });
|
|
||||||
}
|
|
||||||
|
|
||||||
skippingV0() {
|
|
||||||
if (this.inReplayPrefix) {
|
|
||||||
return DbPrefixes.Replay;
|
|
||||||
}
|
|
||||||
if (this.NextMarker) {
|
|
||||||
const index = this.NextMarker.lastIndexOf(this.delimiter);
|
|
||||||
if (index === this.NextMarker.length - 1) {
|
|
||||||
return this.NextMarker;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return SKIP_NONE;
|
|
||||||
}
|
|
||||||
|
|
||||||
skippingV1() {
|
|
||||||
const skipV0 = this.skippingV0();
|
|
||||||
if (skipV0 === SKIP_NONE) {
|
|
||||||
return SKIP_NONE;
|
|
||||||
}
|
|
||||||
// skip to the same object key in both M and V range listings
|
|
||||||
return [DbPrefixes.Master + skipV0,
|
|
||||||
DbPrefixes.Version + skipV0];
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Return an object containing all mandatory fields to use once the
|
|
||||||
* iteration is done, doesn't show a NextMarker field if the output
|
|
||||||
* isn't truncated
|
|
||||||
* @return {Object} - following amazon format
|
|
||||||
*/
|
|
||||||
result() {
|
|
||||||
/* NextMarker is only provided when delimiter is used.
|
|
||||||
* specified in v1 listing documentation
|
|
||||||
* http://docs.aws.amazon.com/AmazonS3/latest/API/RESTBucketGET.html
|
|
||||||
*/
|
|
||||||
return {
|
|
||||||
CommonPrefixes: this.CommonPrefixes,
|
|
||||||
Versions: this.Contents,
|
|
||||||
IsTruncated: this.IsTruncated,
|
|
||||||
NextKeyMarker: this.IsTruncated ? this.NextMarker : undefined,
|
|
||||||
NextVersionIdMarker: this.IsTruncated ?
|
|
||||||
this.NextVersionIdMarker : undefined,
|
|
||||||
Delimiter: this.delimiter,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = { DelimiterVersions };
|
|
|
@ -0,0 +1,535 @@
|
||||||
|
'use strict'; // eslint-disable-line strict
|
||||||
|
|
||||||
|
const Extension = require('./Extension').default;
|
||||||
|
|
||||||
|
import {
|
||||||
|
FilterState,
|
||||||
|
FilterReturnValue,
|
||||||
|
} from './delimiter';
|
||||||
|
|
||||||
|
const Version = require('../../versioning/Version').Version;
|
||||||
|
const VSConst = require('../../versioning/constants').VersioningConstants;
|
||||||
|
const { inc, FILTER_END, FILTER_ACCEPT, FILTER_SKIP, SKIP_NONE } =
|
||||||
|
require('./tools');
|
||||||
|
|
||||||
|
const VID_SEP = VSConst.VersionId.Separator;
|
||||||
|
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
|
||||||
|
|
||||||
|
export const enum DelimiterVersionsFilterStateId {
|
||||||
|
NotSkipping = 1,
|
||||||
|
SkippingPrefix = 2,
|
||||||
|
SkippingVersions = 3,
|
||||||
|
};
|
||||||
|
|
||||||
|
export interface DelimiterVersionsFilterState_NotSkipping extends FilterState {
|
||||||
|
id: DelimiterVersionsFilterStateId.NotSkipping,
|
||||||
|
};
|
||||||
|
|
||||||
|
export interface DelimiterVersionsFilterState_SkippingPrefix extends FilterState {
|
||||||
|
id: DelimiterVersionsFilterStateId.SkippingPrefix,
|
||||||
|
prefix: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
export interface DelimiterVersionsFilterState_SkippingVersions extends FilterState {
|
||||||
|
id: DelimiterVersionsFilterStateId.SkippingVersions,
|
||||||
|
gt: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
type KeyHandler = (key: string, versionId: string | undefined, value: string) => FilterReturnValue;
|
||||||
|
|
||||||
|
type ResultObject = {
|
||||||
|
CommonPrefixes: string[],
|
||||||
|
Versions: {
|
||||||
|
key: string;
|
||||||
|
value: string;
|
||||||
|
versionId: string;
|
||||||
|
}[];
|
||||||
|
IsTruncated: boolean;
|
||||||
|
Delimiter ?: string;
|
||||||
|
NextKeyMarker ?: string;
|
||||||
|
NextVersionIdMarker ?: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
type GenMDParamsItem = {
|
||||||
|
gt ?: string,
|
||||||
|
gte ?: string,
|
||||||
|
lt ?: string,
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle object listing with parameters
|
||||||
|
*
|
||||||
|
* @prop {String[]} CommonPrefixes - 'folders' defined by the delimiter
|
||||||
|
* @prop {String[]} Contents - 'files' to list
|
||||||
|
* @prop {Boolean} IsTruncated - truncated listing flag
|
||||||
|
* @prop {String|undefined} NextMarker - marker per amazon format
|
||||||
|
* @prop {Number} keys - count of listed keys
|
||||||
|
* @prop {String|undefined} delimiter - separator per amazon format
|
||||||
|
* @prop {String|undefined} prefix - prefix per amazon format
|
||||||
|
* @prop {Number} maxKeys - number of keys to list
|
||||||
|
*/
|
||||||
|
export class DelimiterVersions extends Extension {
|
||||||
|
|
||||||
|
state: FilterState;
|
||||||
|
keyHandlers: { [id: number]: KeyHandler };
|
||||||
|
|
||||||
|
constructor(parameters, logger, vFormat) {
|
||||||
|
super(parameters, logger);
|
||||||
|
// original listing parameters
|
||||||
|
this.delimiter = parameters.delimiter;
|
||||||
|
this.prefix = parameters.prefix;
|
||||||
|
this.maxKeys = parameters.maxKeys || 1000;
|
||||||
|
// specific to version listing
|
||||||
|
this.keyMarker = parameters.keyMarker;
|
||||||
|
this.versionIdMarker = parameters.versionIdMarker;
|
||||||
|
// internal state
|
||||||
|
this.masterKey = undefined;
|
||||||
|
this.masterVersionId = undefined;
|
||||||
|
this.nullKey = null;
|
||||||
|
this.vFormat = vFormat || BucketVersioningKeyFormat.v0;
|
||||||
|
// listing results
|
||||||
|
this.CommonPrefixes = [];
|
||||||
|
this.Versions = [];
|
||||||
|
this.IsTruncated = false;
|
||||||
|
this.nextKeyMarker = parameters.keyMarker;
|
||||||
|
this.nextVersionIdMarker = undefined;
|
||||||
|
|
||||||
|
this.keyHandlers = {};
|
||||||
|
|
||||||
|
Object.assign(this, {
|
||||||
|
[BucketVersioningKeyFormat.v0]: {
|
||||||
|
genMDParams: this.genMDParamsV0,
|
||||||
|
getObjectKey: this.getObjectKeyV0,
|
||||||
|
skipping: this.skippingV0,
|
||||||
|
},
|
||||||
|
[BucketVersioningKeyFormat.v1]: {
|
||||||
|
genMDParams: this.genMDParamsV1,
|
||||||
|
getObjectKey: this.getObjectKeyV1,
|
||||||
|
skipping: this.skippingV1,
|
||||||
|
},
|
||||||
|
}[this.vFormat]);
|
||||||
|
|
||||||
|
if (this.vFormat === BucketVersioningKeyFormat.v0) {
|
||||||
|
this.setKeyHandler(
|
||||||
|
DelimiterVersionsFilterStateId.NotSkipping,
|
||||||
|
this.keyHandler_NotSkippingV0.bind(this));
|
||||||
|
} else {
|
||||||
|
this.setKeyHandler(
|
||||||
|
DelimiterVersionsFilterStateId.NotSkipping,
|
||||||
|
this.keyHandler_NotSkippingV1.bind(this));
|
||||||
|
}
|
||||||
|
this.setKeyHandler(
|
||||||
|
DelimiterVersionsFilterStateId.SkippingPrefix,
|
||||||
|
this.keyHandler_SkippingPrefix.bind(this));
|
||||||
|
|
||||||
|
this.setKeyHandler(
|
||||||
|
DelimiterVersionsFilterStateId.SkippingVersions,
|
||||||
|
this.keyHandler_SkippingVersions.bind(this));
|
||||||
|
|
||||||
|
if (this.versionIdMarker) {
|
||||||
|
this.state = <DelimiterVersionsFilterState_SkippingVersions> {
|
||||||
|
id: DelimiterVersionsFilterStateId.SkippingVersions,
|
||||||
|
gt: `${this.keyMarker}${VID_SEP}${this.versionIdMarker}`,
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
this.state = <DelimiterVersionsFilterState_NotSkipping> {
|
||||||
|
id: DelimiterVersionsFilterStateId.NotSkipping,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
genMDParamsV0() {
|
||||||
|
const params: GenMDParamsItem = {};
|
||||||
|
if (this.prefix) {
|
||||||
|
params.gte = this.prefix;
|
||||||
|
params.lt = inc(this.prefix);
|
||||||
|
}
|
||||||
|
if (this.keyMarker && this.delimiter) {
|
||||||
|
const commonPrefix = this.getCommonPrefix(this.keyMarker);
|
||||||
|
if (commonPrefix) {
|
||||||
|
const afterPrefix = inc(commonPrefix);
|
||||||
|
if (!params.gte || afterPrefix > params.gte) {
|
||||||
|
params.gte = afterPrefix;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (this.keyMarker && (!params.gte || this.keyMarker >= params.gte)) {
|
||||||
|
delete params.gte;
|
||||||
|
if (this.versionIdMarker) {
|
||||||
|
// start from the beginning of versions so we can
|
||||||
|
// check if there's a null key and fetch it
|
||||||
|
// (afterwards, we can skip the rest of versions until
|
||||||
|
// we reach versionIdMarker)
|
||||||
|
params.gte = `${this.keyMarker}${VID_SEP}`;
|
||||||
|
} else {
|
||||||
|
params.gt = `${this.keyMarker}${inc(VID_SEP)}`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return params;
|
||||||
|
}
|
||||||
|
|
||||||
|
genMDParamsV1() {
|
||||||
|
// return an array of two listing params sets to ask for
|
||||||
|
// synchronized listing of M and V ranges
|
||||||
|
const v0Params: GenMDParamsItem = this.genMDParamsV0();
|
||||||
|
const mParams: GenMDParamsItem = {};
|
||||||
|
const vParams: GenMDParamsItem = {};
|
||||||
|
if (v0Params.gt) {
|
||||||
|
mParams.gt = `${DbPrefixes.Master}${v0Params.gt}`;
|
||||||
|
vParams.gt = `${DbPrefixes.Version}${v0Params.gt}`;
|
||||||
|
} else if (v0Params.gte) {
|
||||||
|
mParams.gte = `${DbPrefixes.Master}${v0Params.gte}`;
|
||||||
|
vParams.gte = `${DbPrefixes.Version}${v0Params.gte}`;
|
||||||
|
} else {
|
||||||
|
mParams.gte = DbPrefixes.Master;
|
||||||
|
vParams.gte = DbPrefixes.Version;
|
||||||
|
}
|
||||||
|
if (v0Params.lt) {
|
||||||
|
mParams.lt = `${DbPrefixes.Master}${v0Params.lt}`;
|
||||||
|
vParams.lt = `${DbPrefixes.Version}${v0Params.lt}`;
|
||||||
|
} else {
|
||||||
|
mParams.lt = inc(DbPrefixes.Master);
|
||||||
|
vParams.lt = inc(DbPrefixes.Version);
|
||||||
|
}
|
||||||
|
return [mParams, vParams];
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* check if the max keys count has been reached and set the
|
||||||
|
* final state of the result if it is the case
|
||||||
|
* @return {Boolean} - indicates if the iteration has to stop
|
||||||
|
*/
|
||||||
|
_reachedMaxKeys(): boolean {
|
||||||
|
if (this.keys >= this.maxKeys) {
|
||||||
|
// In cases of maxKeys <= 0 -> IsTruncated = false
|
||||||
|
this.IsTruncated = this.maxKeys > 0;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Used to synchronize listing of M and V prefixes by object key
|
||||||
|
*
|
||||||
|
* @param {object} masterObj object listed from first range
|
||||||
|
* returned by genMDParamsV1() (the master keys range)
|
||||||
|
* @param {object} versionObj object listed from second range
|
||||||
|
* returned by genMDParamsV1() (the version keys range)
|
||||||
|
* @return {number} comparison result:
|
||||||
|
* * -1 if master key < version key
|
||||||
|
* * 1 if master key > version key
|
||||||
|
*/
|
||||||
|
compareObjects(masterObj, versionObj) {
|
||||||
|
const masterKey = masterObj.key.slice(DbPrefixes.Master.length);
|
||||||
|
const versionKey = versionObj.key.slice(DbPrefixes.Version.length);
|
||||||
|
return masterKey < versionKey ? -1 : 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse a listing key into its nonversioned key and version ID components
|
||||||
|
*
|
||||||
|
* @param {string} key - full listing key
|
||||||
|
* @return {object} obj
|
||||||
|
* @return {string} obj.key - nonversioned part of key
|
||||||
|
* @return {string} [obj.versionId] - version ID in the key
|
||||||
|
*/
|
||||||
|
parseKey(fullKey: string): { key: string, versionId ?: string } {
|
||||||
|
const versionIdIndex = fullKey.indexOf(VID_SEP);
|
||||||
|
if (versionIdIndex === -1) {
|
||||||
|
return { key: fullKey };
|
||||||
|
}
|
||||||
|
const nonversionedKey: string = fullKey.slice(0, versionIdIndex);
|
||||||
|
let versionId: string = fullKey.slice(versionIdIndex + 1);
|
||||||
|
return { key: nonversionedKey, versionId };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Include a key in the listing output, in the Versions or CommonPrefix result
|
||||||
|
*
|
||||||
|
* @param {string} key - key (without version ID)
|
||||||
|
* @param {string} versionId - version ID
|
||||||
|
* @param {string} value - metadata value
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
addKey(key: string, versionId: string, value: string) {
|
||||||
|
// add the subprefix to the common prefixes if the key has the delimiter
|
||||||
|
const commonPrefix = this.getCommonPrefix(key);
|
||||||
|
if (commonPrefix) {
|
||||||
|
this.addCommonPrefix(commonPrefix);
|
||||||
|
// transition into SkippingPrefix state to skip all following keys
|
||||||
|
// while they start with the same prefix
|
||||||
|
this.setState(<DelimiterVersionsFilterState_SkippingPrefix> {
|
||||||
|
id: DelimiterVersionsFilterStateId.SkippingPrefix,
|
||||||
|
prefix: commonPrefix,
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
this.addVersion(key, versionId, value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add a (key, versionId, value) tuple to the listing.
|
||||||
|
* Set the NextMarker to the current key
|
||||||
|
* Increment the keys counter
|
||||||
|
* @param {String} key - The key to add
|
||||||
|
* @param {String} versionId - versionId
|
||||||
|
* @param {String} value - The value of the key
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
addVersion(key: string, versionId: string, value: string) {
|
||||||
|
this.Versions.push({
|
||||||
|
key,
|
||||||
|
versionId,
|
||||||
|
value: this.trimMetadata(value),
|
||||||
|
});
|
||||||
|
this.nextKeyMarker = key;
|
||||||
|
this.nextVersionIdMarker = versionId;
|
||||||
|
++this.keys;
|
||||||
|
}
|
||||||
|
|
||||||
|
getCommonPrefix(key: string): string | undefined {
|
||||||
|
if (!this.delimiter) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
const baseIndex = this.prefix ? this.prefix.length : 0;
|
||||||
|
const delimiterIndex = key.indexOf(this.delimiter, baseIndex);
|
||||||
|
if (delimiterIndex === -1) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
return key.substring(0, delimiterIndex + this.delimiter.length);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add a Common Prefix in the list
|
||||||
|
* @param {String} commonPrefix - common prefix to add
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
addCommonPrefix(commonPrefix: string): void {
|
||||||
|
// add the new prefix to the list
|
||||||
|
this.CommonPrefixes.push(commonPrefix);
|
||||||
|
++this.keys;
|
||||||
|
this.nextKeyMarker = commonPrefix;
|
||||||
|
this.nextVersionIdMarker = undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Cache the current null key, to save it for outputting it later at
|
||||||
|
* the correct position
|
||||||
|
*
|
||||||
|
* @param {String} key - nonversioned key of the null key
|
||||||
|
* @param {String} versionId - real version ID of the null key
|
||||||
|
* @param {String} value - value of the null key
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
cacheNullKey(key: string, versionId: string, value: string): void {
|
||||||
|
this.nullKey = { key, versionId, value };
|
||||||
|
}
|
||||||
|
|
||||||
|
getObjectKeyV0(obj: { key: string }): string {
|
||||||
|
return obj.key;
|
||||||
|
}
|
||||||
|
|
||||||
|
getObjectKeyV1(obj: { key: string }): string {
|
||||||
|
return obj.key.slice(DbPrefixes.Master.length);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Filter to apply on each iteration, based on:
|
||||||
|
* - prefix
|
||||||
|
* - delimiter
|
||||||
|
* - maxKeys
|
||||||
|
* The marker is being handled directly by levelDB
|
||||||
|
* @param {Object} obj - The key and value of the element
|
||||||
|
* @param {String} obj.key - The key of the element
|
||||||
|
* @param {String} obj.value - The value of the element
|
||||||
|
* @return {number} - indicates if iteration should continue
|
||||||
|
*/
|
||||||
|
filter(obj: { key: string, value: string }): FilterReturnValue {
|
||||||
|
const key = this.getObjectKey(obj);
|
||||||
|
const value = obj.value;
|
||||||
|
|
||||||
|
const { key: nonversionedKey, versionId: keyVersionId } = this.parseKey(key);
|
||||||
|
if (this.nullKey) {
|
||||||
|
if (this.nullKey.key !== nonversionedKey
|
||||||
|
|| this.nullKey.versionId < <string> keyVersionId) {
|
||||||
|
this.handleKey(
|
||||||
|
this.nullKey.key, this.nullKey.versionId, this.nullKey.value);
|
||||||
|
this.nullKey = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (keyVersionId === '') {
|
||||||
|
// null key
|
||||||
|
this.cacheNullKey(nonversionedKey, Version.from(value).getVersionId(), value);
|
||||||
|
if (this.state.id === DelimiterVersionsFilterStateId.SkippingVersions) {
|
||||||
|
return FILTER_SKIP;
|
||||||
|
}
|
||||||
|
return FILTER_ACCEPT;
|
||||||
|
}
|
||||||
|
return this.handleKey(nonversionedKey, keyVersionId, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
setState(state: FilterState): void {
|
||||||
|
this.state = state;
|
||||||
|
}
|
||||||
|
|
||||||
|
setKeyHandler(stateId: number, keyHandler: KeyHandler): void {
|
||||||
|
this.keyHandlers[stateId] = keyHandler;
|
||||||
|
}
|
||||||
|
|
||||||
|
handleKey(key: string, versionId: string | undefined, value: string): FilterReturnValue {
|
||||||
|
return this.keyHandlers[this.state.id](key, versionId, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
keyHandler_NotSkippingV0(key: string, versionId: string | undefined, value: string): FilterReturnValue {
|
||||||
|
if (key.startsWith(DbPrefixes.Replay)) {
|
||||||
|
// skip internal replay prefix entirely
|
||||||
|
this.setState(<DelimiterVersionsFilterState_SkippingPrefix> {
|
||||||
|
id: DelimiterVersionsFilterStateId.SkippingPrefix,
|
||||||
|
prefix: DbPrefixes.Replay,
|
||||||
|
});
|
||||||
|
return FILTER_SKIP;
|
||||||
|
}
|
||||||
|
if (Version.isPHD(value)) {
|
||||||
|
return FILTER_ACCEPT;
|
||||||
|
}
|
||||||
|
return this.filter_onNewKey(key, versionId, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
keyHandler_NotSkippingV1(key: string, versionId: string | undefined, value: string): FilterReturnValue {
|
||||||
|
// NOTE: this check on PHD is only useful for Artesca, S3C
|
||||||
|
// does not use PHDs in V1 format
|
||||||
|
if (Version.isPHD(value)) {
|
||||||
|
return FILTER_ACCEPT;
|
||||||
|
}
|
||||||
|
return this.filter_onNewKey(key, versionId, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
filter_onNewKey(key: string, versionId: string | undefined, value: string): FilterReturnValue {
|
||||||
|
if (this._reachedMaxKeys()) {
|
||||||
|
return FILTER_END;
|
||||||
|
}
|
||||||
|
if (versionId === undefined) {
|
||||||
|
this.masterKey = key;
|
||||||
|
this.masterVersionId = Version.from(value).getVersionId() || 'null';
|
||||||
|
this.addKey(this.masterKey, this.masterVersionId, value);
|
||||||
|
} else {
|
||||||
|
if (this.masterKey === key && this.masterVersionId === versionId) {
|
||||||
|
// do not add a version key if it is the master version
|
||||||
|
return FILTER_ACCEPT;
|
||||||
|
}
|
||||||
|
this.addKey(key, versionId, value);
|
||||||
|
}
|
||||||
|
return FILTER_ACCEPT;
|
||||||
|
}
|
||||||
|
|
||||||
|
keyHandler_SkippingPrefix(key: string, versionId: string | undefined, value: string): FilterReturnValue {
|
||||||
|
const { prefix } = <DelimiterVersionsFilterState_SkippingPrefix> this.state;
|
||||||
|
if (key.startsWith(prefix)) {
|
||||||
|
return FILTER_SKIP;
|
||||||
|
}
|
||||||
|
this.setState(<DelimiterVersionsFilterState_NotSkipping> {
|
||||||
|
id: DelimiterVersionsFilterStateId.NotSkipping,
|
||||||
|
});
|
||||||
|
return this.handleKey(key, versionId, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
keyHandler_SkippingVersions(key: string, versionId: string | undefined, value: string): FilterReturnValue {
|
||||||
|
if (key === this.keyMarker) {
|
||||||
|
// since the nonversioned key equals the marker, there is
|
||||||
|
// necessarily a versionId in this key
|
||||||
|
const _versionId = <string> versionId;
|
||||||
|
if (_versionId < this.versionIdMarker) {
|
||||||
|
// skip all versions until marker
|
||||||
|
return FILTER_SKIP;
|
||||||
|
}
|
||||||
|
if (_versionId === this.versionIdMarker) {
|
||||||
|
// nothing left to skip, so return ACCEPT, but don't add this version
|
||||||
|
return FILTER_ACCEPT;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
this.setState(<DelimiterVersionsFilterState_NotSkipping> {
|
||||||
|
id: DelimiterVersionsFilterStateId.NotSkipping,
|
||||||
|
});
|
||||||
|
return this.handleKey(key, versionId, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
skippingBase(): string | undefined {
|
||||||
|
switch (this.state.id) {
|
||||||
|
case DelimiterVersionsFilterStateId.SkippingPrefix:
|
||||||
|
const { prefix } = <DelimiterVersionsFilterState_SkippingPrefix> this.state;
|
||||||
|
return inc(prefix);
|
||||||
|
|
||||||
|
case DelimiterVersionsFilterStateId.SkippingVersions:
|
||||||
|
const { gt } = <DelimiterVersionsFilterState_SkippingVersions> this.state;
|
||||||
|
// the contract of skipping() is to return the first key
|
||||||
|
// that can be skipped to, so adding a null byte to skip
|
||||||
|
// over the existing versioned key set in 'gt'
|
||||||
|
return `${gt}\0`;
|
||||||
|
|
||||||
|
default:
|
||||||
|
return SKIP_NONE;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
skippingV0() {
|
||||||
|
return this.skippingBase();
|
||||||
|
}
|
||||||
|
|
||||||
|
skippingV1() {
|
||||||
|
const skipTo = this.skippingBase();
|
||||||
|
if (skipTo === SKIP_NONE) {
|
||||||
|
return SKIP_NONE;
|
||||||
|
}
|
||||||
|
// skip to the same object key in both M and V range listings
|
||||||
|
return [
|
||||||
|
`${DbPrefixes.Master}${skipTo}`,
|
||||||
|
`${DbPrefixes.Version}${skipTo}`,
|
||||||
|
];
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return an object containing all mandatory fields to use once the
|
||||||
|
* iteration is done, doesn't show a NextMarker field if the output
|
||||||
|
* isn't truncated
|
||||||
|
* @return {Object} - following amazon format
|
||||||
|
*/
|
||||||
|
result() {
|
||||||
|
// Add the last null key if still in cache (when it is the
|
||||||
|
// last version of the last key)
|
||||||
|
//
|
||||||
|
// NOTE: _reachedMaxKeys sets IsTruncated to true when it
|
||||||
|
// returns true. Here we want this because either:
|
||||||
|
//
|
||||||
|
// - we did not reach the max keys yet so the result is not
|
||||||
|
// - truncated, and there is still room for the null key in
|
||||||
|
// - the results
|
||||||
|
//
|
||||||
|
// - OR we reached it already while having to process a new
|
||||||
|
// key (so the result is truncated even without the null key)
|
||||||
|
//
|
||||||
|
// - OR we are *just* below the limit but the null key to add
|
||||||
|
// does not fit, so we know the result is now truncated
|
||||||
|
// because there remains the null key to be output.
|
||||||
|
//
|
||||||
|
if (this.nullKey) {
|
||||||
|
this.handleKey(this.nullKey.key, this.nullKey.versionId, this.nullKey.value);
|
||||||
|
}
|
||||||
|
const result: ResultObject = {
|
||||||
|
CommonPrefixes: this.CommonPrefixes,
|
||||||
|
Versions: this.Versions,
|
||||||
|
IsTruncated: this.IsTruncated,
|
||||||
|
};
|
||||||
|
if (this.delimiter) {
|
||||||
|
result.Delimiter = this.delimiter;
|
||||||
|
}
|
||||||
|
if (this.IsTruncated) {
|
||||||
|
result.NextKeyMarker = this.nextKeyMarker;
|
||||||
|
if (this.nextVersionIdMarker) {
|
||||||
|
result.NextVersionIdMarker = this.nextVersionIdMarker;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = { DelimiterVersions };
|
|
@ -6,4 +6,7 @@ module.exports = {
|
||||||
DelimiterMaster: require('./delimiterMaster')
|
DelimiterMaster: require('./delimiterMaster')
|
||||||
.DelimiterMaster,
|
.DelimiterMaster,
|
||||||
MPU: require('./MPU').MultipartUploads,
|
MPU: require('./MPU').MultipartUploads,
|
||||||
|
DelimiterCurrent: require('./delimiterCurrent').DelimiterCurrent,
|
||||||
|
DelimiterNonCurrent: require('./delimiterNonCurrent').DelimiterNonCurrent,
|
||||||
|
DelimiterOrphanDeleteMarker: require('./delimiterOrphanDeleteMarker').DelimiterOrphanDeleteMarker,
|
||||||
};
|
};
|
||||||
|
|
|
@ -52,21 +52,21 @@ class Skip {
|
||||||
assert(this.skipRangeCb);
|
assert(this.skipRangeCb);
|
||||||
|
|
||||||
const filteringResult = this.extension.filter(entry);
|
const filteringResult = this.extension.filter(entry);
|
||||||
const skippingRange = this.extension.skipping();
|
const skipTo = this.extension.skipping();
|
||||||
|
|
||||||
if (filteringResult === FILTER_END) {
|
if (filteringResult === FILTER_END) {
|
||||||
this.listingEndCb();
|
this.listingEndCb();
|
||||||
} else if (filteringResult === FILTER_SKIP
|
} else if (filteringResult === FILTER_SKIP
|
||||||
&& skippingRange !== SKIP_NONE) {
|
&& skipTo !== SKIP_NONE) {
|
||||||
if (++this.streakLength >= MAX_STREAK_LENGTH) {
|
if (++this.streakLength >= MAX_STREAK_LENGTH) {
|
||||||
let newRange;
|
let newRange;
|
||||||
if (Array.isArray(skippingRange)) {
|
if (Array.isArray(skipTo)) {
|
||||||
newRange = [];
|
newRange = [];
|
||||||
for (let i = 0; i < skippingRange.length; ++i) {
|
for (let i = 0; i < skipTo.length; ++i) {
|
||||||
newRange.push(this._inc(skippingRange[i]));
|
newRange.push(skipTo[i]);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
newRange = this._inc(skippingRange);
|
newRange = skipTo;
|
||||||
}
|
}
|
||||||
/* Avoid to loop on the same range again and again. */
|
/* Avoid to loop on the same range again and again. */
|
||||||
if (newRange === this.gteParams) {
|
if (newRange === this.gteParams) {
|
||||||
|
@ -79,16 +79,6 @@ class Skip {
|
||||||
this.streakLength = 0;
|
this.streakLength = 0;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
_inc(str) {
|
|
||||||
if (!str) {
|
|
||||||
return str;
|
|
||||||
}
|
|
||||||
const lastCharValue = str.charCodeAt(str.length - 1);
|
|
||||||
const lastCharNewValue = String.fromCharCode(lastCharValue + 1);
|
|
||||||
|
|
||||||
return `${str.slice(0, str.length - 1)}${lastCharNewValue}`;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,4 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
import * as constants from '../constants';
|
||||||
|
|
||||||
const constants = require('../constants');
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Class containing requester's information received from Vault
|
* Class containing requester's information received from Vault
|
||||||
|
@ -8,9 +6,15 @@ const constants = require('../constants');
|
||||||
* shortid, email, accountDisplayName and IAMdisplayName (if applicable)
|
* shortid, email, accountDisplayName and IAMdisplayName (if applicable)
|
||||||
* @return {AuthInfo} an AuthInfo instance
|
* @return {AuthInfo} an AuthInfo instance
|
||||||
*/
|
*/
|
||||||
|
export default class AuthInfo {
|
||||||
|
arn: string;
|
||||||
|
canonicalID: string;
|
||||||
|
shortid: string;
|
||||||
|
email: string;
|
||||||
|
accountDisplayName: string;
|
||||||
|
IAMdisplayName: string;
|
||||||
|
|
||||||
class AuthInfo {
|
constructor(objectFromVault: any) {
|
||||||
constructor(objectFromVault) {
|
|
||||||
// amazon resource name for IAM user (if applicable)
|
// amazon resource name for IAM user (if applicable)
|
||||||
this.arn = objectFromVault.arn;
|
this.arn = objectFromVault.arn;
|
||||||
// account canonicalID
|
// account canonicalID
|
||||||
|
@ -53,10 +57,8 @@ class AuthInfo {
|
||||||
return this.canonicalID.startsWith(
|
return this.canonicalID.startsWith(
|
||||||
`${constants.zenkoServiceAccount}/`);
|
`${constants.zenkoServiceAccount}/`);
|
||||||
}
|
}
|
||||||
isRequesterThisServiceAccount(serviceName) {
|
isRequesterThisServiceAccount(serviceName: string) {
|
||||||
return this.canonicalID ===
|
const computedCanonicalID = `${constants.zenkoServiceAccount}/${serviceName}`;
|
||||||
`${constants.zenkoServiceAccount}/${serviceName}`;
|
return this.canonicalID === computedCanonicalID;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = AuthInfo;
|
|
|
@ -1,16 +1,22 @@
|
||||||
const errors = require('../errors').default;
|
import { Logger } from 'werelogs';
|
||||||
const AuthInfo = require('./AuthInfo');
|
import errors from '../errors';
|
||||||
|
import AuthInfo from './AuthInfo';
|
||||||
|
|
||||||
/** vaultSignatureCb parses message from Vault and instantiates
|
/** vaultSignatureCb parses message from Vault and instantiates
|
||||||
* @param {object} err - error from vault
|
* @param err - error from vault
|
||||||
* @param {object} authInfo - info from vault
|
* @param authInfo - info from vault
|
||||||
* @param {object} log - log for request
|
* @param log - log for request
|
||||||
* @param {function} callback - callback to authCheck functions
|
* @param callback - callback to authCheck functions
|
||||||
* @param {object} [streamingV4Params] - present if v4 signature;
|
* @param [streamingV4Params] - present if v4 signature;
|
||||||
* items used to calculate signature on chunks if streaming auth
|
* items used to calculate signature on chunks if streaming auth
|
||||||
* @return {undefined}
|
|
||||||
*/
|
*/
|
||||||
function vaultSignatureCb(err, authInfo, log, callback, streamingV4Params) {
|
function vaultSignatureCb(
|
||||||
|
err: Error | null,
|
||||||
|
authInfo: { message: { body: any } },
|
||||||
|
log: Logger,
|
||||||
|
callback: (err: Error | null, data?: any, results?: any, params?: any, infos?: any) => void,
|
||||||
|
streamingV4Params?: any
|
||||||
|
) {
|
||||||
// vaultclient API guarantees that it returns:
|
// vaultclient API guarantees that it returns:
|
||||||
// - either `err`, an Error object with `code` and `message` properties set
|
// - either `err`, an Error object with `code` and `message` properties set
|
||||||
// - or `err == null` and `info` is an object with `message.code` and
|
// - or `err == null` and `info` is an object with `message.code` and
|
||||||
|
@ -24,58 +30,101 @@ function vaultSignatureCb(err, authInfo, log, callback, streamingV4Params) {
|
||||||
const info = authInfo.message.body;
|
const info = authInfo.message.body;
|
||||||
const userInfo = new AuthInfo(info.userInfo);
|
const userInfo = new AuthInfo(info.userInfo);
|
||||||
const authorizationResults = info.authorizationResults;
|
const authorizationResults = info.authorizationResults;
|
||||||
const auditLog = { accountDisplayName: userInfo.getAccountDisplayName() };
|
const auditLog: { accountDisplayName: string, IAMdisplayName?: string } =
|
||||||
|
{ accountDisplayName: userInfo.getAccountDisplayName() };
|
||||||
const iamDisplayName = userInfo.getIAMdisplayName();
|
const iamDisplayName = userInfo.getIAMdisplayName();
|
||||||
if (iamDisplayName) {
|
if (iamDisplayName) {
|
||||||
auditLog.IAMdisplayName = iamDisplayName;
|
auditLog.IAMdisplayName = iamDisplayName;
|
||||||
}
|
}
|
||||||
|
// @ts-ignore
|
||||||
log.addDefaultFields(auditLog);
|
log.addDefaultFields(auditLog);
|
||||||
return callback(null, userInfo, authorizationResults, streamingV4Params);
|
return callback(null, userInfo, authorizationResults, streamingV4Params, {
|
||||||
|
accountQuota: info.accountQuota || {},
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export type AuthV4RequestParams = {
|
||||||
|
version: 4;
|
||||||
|
log: Logger;
|
||||||
|
data: {
|
||||||
|
accessKey: string;
|
||||||
|
signatureFromRequest: string;
|
||||||
|
region: string;
|
||||||
|
stringToSign: string;
|
||||||
|
scopeDate: string;
|
||||||
|
authType: 'query' | 'header';
|
||||||
|
signatureVersion: string;
|
||||||
|
signatureAge?: number;
|
||||||
|
timestamp: number;
|
||||||
|
credentialScope: string;
|
||||||
|
securityToken: string;
|
||||||
|
algo: string;
|
||||||
|
log: Logger;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Class that provides common authentication methods against different
|
* Class that provides common authentication methods against different
|
||||||
* authentication backends.
|
* authentication backends.
|
||||||
* @class Vault
|
* @class Vault
|
||||||
*/
|
*/
|
||||||
class Vault {
|
export default class Vault {
|
||||||
|
client: any;
|
||||||
|
implName: string;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @constructor
|
* @constructor
|
||||||
* @param {object} client - authentication backend or vault client
|
* @param {object} client - authentication backend or vault client
|
||||||
* @param {string} implName - implementation name for auth backend
|
* @param {string} implName - implementation name for auth backend
|
||||||
*/
|
*/
|
||||||
constructor(client, implName) {
|
constructor(client: any, implName: string) {
|
||||||
this.client = client;
|
this.client = client;
|
||||||
this.implName = implName;
|
this.implName = implName;
|
||||||
}
|
}
|
||||||
/**
|
/**
|
||||||
* authenticateV2Request
|
* authenticateV2Request
|
||||||
*
|
*
|
||||||
* @param {string} params - the authentication parameters as returned by
|
* @param params - the authentication parameters as returned by
|
||||||
* auth.extractParams
|
* auth.extractParams
|
||||||
* @param {number} params.version - shall equal 2
|
* @param params.version - shall equal 2
|
||||||
* @param {string} params.data.accessKey - the user's accessKey
|
* @param params.data.accessKey - the user's accessKey
|
||||||
* @param {string} params.data.signatureFromRequest - the signature read
|
* @param params.data.signatureFromRequest - the signature read
|
||||||
* from the request
|
* from the request
|
||||||
* @param {string} params.data.stringToSign - the stringToSign
|
* @param params.data.stringToSign - the stringToSign
|
||||||
* @param {string} params.data.algo - the hashing algorithm used for the
|
* @param params.data.algo - the hashing algorithm used for the
|
||||||
* signature
|
* signature
|
||||||
* @param {string} params.data.authType - the type of authentication (query
|
* @param params.data.authType - the type of authentication (query
|
||||||
* or header)
|
* or header)
|
||||||
* @param {string} params.data.signatureVersion - the version of the
|
* @param params.data.signatureVersion - the version of the
|
||||||
* signature (AWS or AWS4)
|
* signature (AWS or AWS4)
|
||||||
* @param {number} [params.data.signatureAge] - the age of the signature in
|
* @param [params.data.signatureAge] - the age of the signature in
|
||||||
* ms
|
* ms
|
||||||
* @param {string} params.data.log - the logger object
|
* @param params.data.log - the logger object
|
||||||
* @param {RequestContext []} requestContexts - an array of RequestContext
|
* @param {RequestContext []} requestContexts - an array of RequestContext
|
||||||
* instances which contain information for policy authorization check
|
* instances which contain information for policy authorization check
|
||||||
* @param {function} callback - callback with either error or user info
|
* @param callback - callback with either error or user info
|
||||||
* @returns {undefined}
|
|
||||||
*/
|
*/
|
||||||
authenticateV2Request(params, requestContexts, callback) {
|
authenticateV2Request(
|
||||||
|
params: {
|
||||||
|
version: 2;
|
||||||
|
log: Logger;
|
||||||
|
data: {
|
||||||
|
securityToken: string;
|
||||||
|
accessKey: string;
|
||||||
|
signatureFromRequest: string;
|
||||||
|
stringToSign: string;
|
||||||
|
algo: string;
|
||||||
|
authType: 'query' | 'header';
|
||||||
|
signatureVersion: string;
|
||||||
|
signatureAge?: number;
|
||||||
|
log: Logger;
|
||||||
|
};
|
||||||
|
},
|
||||||
|
requestContexts: any[],
|
||||||
|
callback: (err: Error | null, data?: any) => void
|
||||||
|
) {
|
||||||
params.log.debug('authenticating V2 request');
|
params.log.debug('authenticating V2 request');
|
||||||
let serializedRCsArr;
|
let serializedRCsArr: any;
|
||||||
if (requestContexts) {
|
if (requestContexts) {
|
||||||
serializedRCsArr = requestContexts.map(rc => rc.serialize());
|
serializedRCsArr = requestContexts.map(rc => rc.serialize());
|
||||||
}
|
}
|
||||||
|
@ -85,44 +134,48 @@ class Vault {
|
||||||
params.data.accessKey,
|
params.data.accessKey,
|
||||||
{
|
{
|
||||||
algo: params.data.algo,
|
algo: params.data.algo,
|
||||||
|
// @ts-ignore
|
||||||
reqUid: params.log.getSerializedUids(),
|
reqUid: params.log.getSerializedUids(),
|
||||||
logger: params.log,
|
logger: params.log,
|
||||||
securityToken: params.data.securityToken,
|
securityToken: params.data.securityToken,
|
||||||
requestContext: serializedRCsArr,
|
requestContext: serializedRCsArr,
|
||||||
},
|
},
|
||||||
(err, userInfo) => vaultSignatureCb(err, userInfo,
|
(err: Error | null, userInfo?: any) => vaultSignatureCb(err, userInfo,
|
||||||
params.log, callback)
|
params.log, callback),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
/** authenticateV4Request
|
/** authenticateV4Request
|
||||||
* @param {object} params - the authentication parameters as returned by
|
* @param params - the authentication parameters as returned by
|
||||||
* auth.extractParams
|
* auth.extractParams
|
||||||
* @param {number} params.version - shall equal 4
|
* @param params.version - shall equal 4
|
||||||
* @param {string} params.data.log - the logger object
|
* @param params.data.log - the logger object
|
||||||
* @param {string} params.data.accessKey - the user's accessKey
|
* @param params.data.accessKey - the user's accessKey
|
||||||
* @param {string} params.data.signatureFromRequest - the signature read
|
* @param params.data.signatureFromRequest - the signature read
|
||||||
* from the request
|
* from the request
|
||||||
* @param {string} params.data.region - the AWS region
|
* @param params.data.region - the AWS region
|
||||||
* @param {string} params.data.stringToSign - the stringToSign
|
* @param params.data.stringToSign - the stringToSign
|
||||||
* @param {string} params.data.scopeDate - the timespan to allow the request
|
* @param params.data.scopeDate - the timespan to allow the request
|
||||||
* @param {string} params.data.authType - the type of authentication (query
|
* @param params.data.authType - the type of authentication (query
|
||||||
* or header)
|
* or header)
|
||||||
* @param {string} params.data.signatureVersion - the version of the
|
* @param params.data.signatureVersion - the version of the
|
||||||
* signature (AWS or AWS4)
|
* signature (AWS or AWS4)
|
||||||
* @param {number} params.data.signatureAge - the age of the signature in ms
|
* @param params.data.signatureAge - the age of the signature in ms
|
||||||
* @param {number} params.data.timestamp - signaure timestamp
|
* @param params.data.timestamp - signaure timestamp
|
||||||
* @param {string} params.credentialScope - credentialScope for signature
|
* @param params.credentialScope - credentialScope for signature
|
||||||
* @param {RequestContext [] | null} requestContexts -
|
* @param {RequestContext [] | null} requestContexts -
|
||||||
* an array of RequestContext or null if authenticaiton of a chunk
|
* an array of RequestContext or null if authenticaiton of a chunk
|
||||||
* in streamingv4 auth
|
* in streamingv4 auth
|
||||||
* instances which contain information for policy authorization check
|
* instances which contain information for policy authorization check
|
||||||
* @param {function} callback - callback with either error or user info
|
* @param callback - callback with either error or user info
|
||||||
* @return {undefined}
|
|
||||||
*/
|
*/
|
||||||
authenticateV4Request(params, requestContexts, callback) {
|
authenticateV4Request(
|
||||||
|
params: AuthV4RequestParams,
|
||||||
|
requestContexts: any[] | null,
|
||||||
|
callback: (err: Error | null, data?: any) => void
|
||||||
|
) {
|
||||||
params.log.debug('authenticating V4 request');
|
params.log.debug('authenticating V4 request');
|
||||||
let serializedRCs;
|
let serializedRCs: any;
|
||||||
if (requestContexts) {
|
if (requestContexts) {
|
||||||
serializedRCs = requestContexts.map(rc => rc.serialize());
|
serializedRCs = requestContexts.map(rc => rc.serialize());
|
||||||
}
|
}
|
||||||
|
@ -140,31 +193,39 @@ class Vault {
|
||||||
params.data.region,
|
params.data.region,
|
||||||
params.data.scopeDate,
|
params.data.scopeDate,
|
||||||
{
|
{
|
||||||
|
// @ts-ignore
|
||||||
reqUid: params.log.getSerializedUids(),
|
reqUid: params.log.getSerializedUids(),
|
||||||
logger: params.log,
|
logger: params.log,
|
||||||
securityToken: params.data.securityToken,
|
securityToken: params.data.securityToken,
|
||||||
requestContext: serializedRCs,
|
requestContext: serializedRCs,
|
||||||
},
|
},
|
||||||
(err, userInfo) => vaultSignatureCb(err, userInfo,
|
(err: Error | null, userInfo?: any) => vaultSignatureCb(err, userInfo,
|
||||||
params.log, callback, streamingV4Params)
|
params.log, callback, streamingV4Params),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
/** getCanonicalIds -- call Vault to get canonicalIDs based on email
|
/** getCanonicalIds -- call Vault to get canonicalIDs based on email
|
||||||
* addresses
|
* addresses
|
||||||
* @param {array} emailAddresses - list of emailAddresses
|
* @param emailAddresses - list of emailAddresses
|
||||||
* @param {object} log - log object
|
* @param log - log object
|
||||||
* @param {function} callback - callback with either error or an array
|
* @param callback - callback with either error or an array
|
||||||
* of objects with each object containing the canonicalID and emailAddress
|
* of objects with each object containing the canonicalID and emailAddress
|
||||||
* of an account as properties
|
* of an account as properties
|
||||||
* @return {undefined}
|
|
||||||
*/
|
*/
|
||||||
getCanonicalIds(emailAddresses, log, callback) {
|
getCanonicalIds(
|
||||||
|
emailAddresses: string[],
|
||||||
|
log: Logger,
|
||||||
|
callback: (
|
||||||
|
err: Error | null,
|
||||||
|
data?: { canonicalID: string; email: string }[]
|
||||||
|
) => void
|
||||||
|
) {
|
||||||
log.trace('getting canonicalIDs from Vault based on emailAddresses',
|
log.trace('getting canonicalIDs from Vault based on emailAddresses',
|
||||||
{ emailAddresses });
|
{ emailAddresses });
|
||||||
this.client.getCanonicalIds(emailAddresses,
|
this.client.getCanonicalIds(emailAddresses,
|
||||||
|
// @ts-ignore
|
||||||
{ reqUid: log.getSerializedUids() },
|
{ reqUid: log.getSerializedUids() },
|
||||||
(err, info) => {
|
(err: Error | null, info?: any) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
log.debug('received error message from auth provider',
|
log.debug('received error message from auth provider',
|
||||||
{ errorMessage: err });
|
{ errorMessage: err });
|
||||||
|
@ -172,17 +233,17 @@ class Vault {
|
||||||
}
|
}
|
||||||
const infoFromVault = info.message.body;
|
const infoFromVault = info.message.body;
|
||||||
log.trace('info received from vault', { infoFromVault });
|
log.trace('info received from vault', { infoFromVault });
|
||||||
const foundIds = [];
|
const foundIds: { canonicalID: string; email: string }[] = [];
|
||||||
for (let i = 0; i < Object.keys(infoFromVault).length; i++) {
|
for (let i = 0; i < Object.keys(infoFromVault).length; i++) {
|
||||||
const key = Object.keys(infoFromVault)[i];
|
const key = Object.keys(infoFromVault)[i];
|
||||||
if (infoFromVault[key] === 'WrongFormat'
|
if (infoFromVault[key] === 'WrongFormat'
|
||||||
|| infoFromVault[key] === 'NotFound') {
|
|| infoFromVault[key] === 'NotFound') {
|
||||||
return callback(errors.UnresolvableGrantByEmailAddress);
|
return callback(errors.UnresolvableGrantByEmailAddress);
|
||||||
}
|
}
|
||||||
const obj = {};
|
foundIds.push({
|
||||||
obj.email = key;
|
email: key,
|
||||||
obj.canonicalID = infoFromVault[key];
|
canonicalID: infoFromVault[key],
|
||||||
foundIds.push(obj);
|
})
|
||||||
}
|
}
|
||||||
return callback(null, foundIds);
|
return callback(null, foundIds);
|
||||||
});
|
});
|
||||||
|
@ -190,18 +251,22 @@ class Vault {
|
||||||
|
|
||||||
/** getEmailAddresses -- call Vault to get email addresses based on
|
/** getEmailAddresses -- call Vault to get email addresses based on
|
||||||
* canonicalIDs
|
* canonicalIDs
|
||||||
* @param {array} canonicalIDs - list of canonicalIDs
|
* @param canonicalIDs - list of canonicalIDs
|
||||||
* @param {object} log - log object
|
* @param log - log object
|
||||||
* @param {function} callback - callback with either error or an object
|
* @param callback - callback with either error or an object
|
||||||
* with canonicalID keys and email address values
|
* with canonicalID keys and email address values
|
||||||
* @return {undefined}
|
|
||||||
*/
|
*/
|
||||||
getEmailAddresses(canonicalIDs, log, callback) {
|
getEmailAddresses(
|
||||||
|
canonicalIDs: string[],
|
||||||
|
log: Logger,
|
||||||
|
callback: (err: Error | null, data?: { [key: string]: any }) => void
|
||||||
|
) {
|
||||||
log.trace('getting emailAddresses from Vault based on canonicalIDs',
|
log.trace('getting emailAddresses from Vault based on canonicalIDs',
|
||||||
{ canonicalIDs });
|
{ canonicalIDs });
|
||||||
this.client.getEmailAddresses(canonicalIDs,
|
this.client.getEmailAddresses(canonicalIDs,
|
||||||
|
// @ts-ignore
|
||||||
{ reqUid: log.getSerializedUids() },
|
{ reqUid: log.getSerializedUids() },
|
||||||
(err, info) => {
|
(err: Error | null, info?: any) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
log.debug('received error message from vault',
|
log.debug('received error message from vault',
|
||||||
{ errorMessage: err });
|
{ errorMessage: err });
|
||||||
|
@ -224,18 +289,22 @@ class Vault {
|
||||||
|
|
||||||
/** getAccountIds -- call Vault to get accountIds based on
|
/** getAccountIds -- call Vault to get accountIds based on
|
||||||
* canonicalIDs
|
* canonicalIDs
|
||||||
* @param {array} canonicalIDs - list of canonicalIDs
|
* @param canonicalIDs - list of canonicalIDs
|
||||||
* @param {object} log - log object
|
* @param log - log object
|
||||||
* @param {function} callback - callback with either error or an object
|
* @param callback - callback with either error or an object
|
||||||
* with canonicalID keys and accountId values
|
* with canonicalID keys and accountId values
|
||||||
* @return {undefined}
|
|
||||||
*/
|
*/
|
||||||
getAccountIds(canonicalIDs, log, callback) {
|
getAccountIds(
|
||||||
|
canonicalIDs: string[],
|
||||||
|
log: Logger,
|
||||||
|
callback: (err: Error | null, data?: { [key: string]: string }) => void
|
||||||
|
) {
|
||||||
log.trace('getting accountIds from Vault based on canonicalIDs',
|
log.trace('getting accountIds from Vault based on canonicalIDs',
|
||||||
{ canonicalIDs });
|
{ canonicalIDs });
|
||||||
this.client.getAccountIds(canonicalIDs,
|
this.client.getAccountIds(canonicalIDs,
|
||||||
|
// @ts-expect-error
|
||||||
{ reqUid: log.getSerializedUids() },
|
{ reqUid: log.getSerializedUids() },
|
||||||
(err, info) => {
|
(err: Error | null, info?: any) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
log.debug('received error message from vault',
|
log.debug('received error message from vault',
|
||||||
{ errorMessage: err });
|
{ errorMessage: err });
|
||||||
|
@ -268,14 +337,19 @@ class Vault {
|
||||||
* @param {object} log - log object
|
* @param {object} log - log object
|
||||||
* @param {function} callback - callback with either error or an array
|
* @param {function} callback - callback with either error or an array
|
||||||
* of authorization results
|
* of authorization results
|
||||||
* @return {undefined}
|
|
||||||
*/
|
*/
|
||||||
checkPolicies(requestContextParams, userArn, log, callback) {
|
checkPolicies(
|
||||||
|
requestContextParams: any[],
|
||||||
|
userArn: string,
|
||||||
|
log: Logger,
|
||||||
|
callback: (err: Error | null, data?: any[]) => void
|
||||||
|
) {
|
||||||
log.trace('sending request context params to vault to evaluate' +
|
log.trace('sending request context params to vault to evaluate' +
|
||||||
'policies');
|
'policies');
|
||||||
this.client.checkPolicies(requestContextParams, userArn, {
|
this.client.checkPolicies(requestContextParams, userArn, {
|
||||||
|
// @ts-ignore
|
||||||
reqUid: log.getSerializedUids(),
|
reqUid: log.getSerializedUids(),
|
||||||
}, (err, info) => {
|
}, (err: Error | null, info?: any) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
log.debug('received error message from auth provider',
|
log.debug('received error message from auth provider',
|
||||||
{ error: err });
|
{ error: err });
|
||||||
|
@ -286,13 +360,14 @@ class Vault {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
checkHealth(log, callback) {
|
checkHealth(log: Logger, callback: (err: Error | null, data?: any) => void) {
|
||||||
if (!this.client.healthcheck) {
|
if (!this.client.healthcheck) {
|
||||||
const defResp = {};
|
const defResp = {};
|
||||||
defResp[this.implName] = { code: 200, message: 'OK' };
|
defResp[this.implName] = { code: 200, message: 'OK' };
|
||||||
return callback(null, defResp);
|
return callback(null, defResp);
|
||||||
}
|
}
|
||||||
return this.client.healthcheck(log.getSerializedUids(), (err, obj) => {
|
// @ts-ignore
|
||||||
|
return this.client.healthcheck(log.getSerializedUids(), (err: Error | null, obj?: any) => {
|
||||||
const respBody = {};
|
const respBody = {};
|
||||||
if (err) {
|
if (err) {
|
||||||
log.debug(`error from ${this.implName}`, { error: err });
|
log.debug(`error from ${this.implName}`, { error: err });
|
||||||
|
@ -311,6 +386,19 @@ class Vault {
|
||||||
return callback(null, respBody);
|
return callback(null, respBody);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = Vault;
|
report(log: Logger, callback: (err: Error | null, data?: any) => void) {
|
||||||
|
// call the report function of the client
|
||||||
|
if (!this.client.report) {
|
||||||
|
return callback(null, {});
|
||||||
|
}
|
||||||
|
// @ts-ignore
|
||||||
|
return this.client.report(log.getSerializedUids(), (err: Error | null, obj?: any) => {
|
||||||
|
if (err) {
|
||||||
|
log.debug(`error from ${this.implName}`, { error: err });
|
||||||
|
return callback(err);
|
||||||
|
}
|
||||||
|
return callback(null, obj);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,22 +1,23 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
import * as crypto from 'crypto';
|
||||||
|
import { Logger } from 'werelogs';
|
||||||
|
import errors from '../errors';
|
||||||
|
import * as queryString from 'querystring';
|
||||||
|
import AuthInfo from './AuthInfo';
|
||||||
|
import * as v2 from './v2/authV2';
|
||||||
|
import * as v4 from './v4/authV4';
|
||||||
|
import * as constants from '../constants';
|
||||||
|
import constructStringToSignV2 from './v2/constructStringToSign';
|
||||||
|
import constructStringToSignV4 from './v4/constructStringToSign';
|
||||||
|
import { convertUTCtoISO8601 } from './v4/timeUtils';
|
||||||
|
import * as vaultUtilities from './backends/in_memory/vaultUtilities';
|
||||||
|
import * as inMemoryBackend from './backends/in_memory/Backend';
|
||||||
|
import baseBackend from './backends/base';
|
||||||
|
import chainBackend from './backends/ChainBackend';
|
||||||
|
import validateAuthConfig from './backends/in_memory/validateAuthConfig';
|
||||||
|
import AuthLoader from './backends/in_memory/AuthLoader';
|
||||||
|
import Vault from './Vault';
|
||||||
|
|
||||||
const crypto = require('crypto');
|
let vault: Vault | null = null;
|
||||||
const errors = require('../errors').default;
|
|
||||||
const queryString = require('querystring');
|
|
||||||
const AuthInfo = require('./AuthInfo');
|
|
||||||
const v2 = require('./v2/authV2');
|
|
||||||
const v4 = require('./v4/authV4');
|
|
||||||
const constants = require('../constants');
|
|
||||||
const constructStringToSignV2 = require('./v2/constructStringToSign');
|
|
||||||
const constructStringToSignV4 = require('./v4/constructStringToSign');
|
|
||||||
const convertUTCtoISO8601 = require('./v4/timeUtils').convertUTCtoISO8601;
|
|
||||||
const vaultUtilities = require('./in_memory/vaultUtilities');
|
|
||||||
const backend = require('./in_memory/Backend');
|
|
||||||
const validateAuthConfig = require('./in_memory/validateAuthConfig');
|
|
||||||
const AuthLoader = require('./in_memory/AuthLoader');
|
|
||||||
const Vault = require('./Vault');
|
|
||||||
|
|
||||||
let vault = null;
|
|
||||||
const auth = {};
|
const auth = {};
|
||||||
const checkFunctions = {
|
const checkFunctions = {
|
||||||
v2: {
|
v2: {
|
||||||
|
@ -33,7 +34,7 @@ const checkFunctions = {
|
||||||
// 'All Users Group' so use this group as the canonicalID for the publicUser
|
// 'All Users Group' so use this group as the canonicalID for the publicUser
|
||||||
const publicUserInfo = new AuthInfo({ canonicalID: constants.publicId });
|
const publicUserInfo = new AuthInfo({ canonicalID: constants.publicId });
|
||||||
|
|
||||||
function setAuthHandler(handler) {
|
function setAuthHandler(handler: Vault) {
|
||||||
vault = handler;
|
vault = handler;
|
||||||
return auth;
|
return auth;
|
||||||
}
|
}
|
||||||
|
@ -41,25 +42,30 @@ function setAuthHandler(handler) {
|
||||||
/**
|
/**
|
||||||
* This function will check validity of request parameters to authenticate
|
* This function will check validity of request parameters to authenticate
|
||||||
*
|
*
|
||||||
* @param {Http.Request} request - Http request object
|
* @param request - Http request object
|
||||||
* @param {object} log - Logger object
|
* @param log - Logger object
|
||||||
* @param {string} awsService - Aws service related
|
* @param awsService - Aws service related
|
||||||
* @param {object} data - Parameters from queryString parsing or body of
|
* @param data - Parameters from queryString parsing or body of
|
||||||
* POST request
|
* POST request
|
||||||
*
|
*
|
||||||
* @return {object} ret
|
* @return ret
|
||||||
* @return {object} ret.err - arsenal.errors object if any error was found
|
* @return ret.err - arsenal.errors object if any error was found
|
||||||
* @return {object} ret.params - auth parameters to use later on for signature
|
* @return ret.params - auth parameters to use later on for signature
|
||||||
* computation and check
|
* computation and check
|
||||||
* @return {object} ret.params.version - the auth scheme version
|
* @return ret.params.version - the auth scheme version
|
||||||
* (undefined, 2, 4)
|
* (undefined, 2, 4)
|
||||||
* @return {object} ret.params.data - the auth scheme's specific data
|
* @return ret.params.data - the auth scheme's specific data
|
||||||
*/
|
*/
|
||||||
function extractParams(request, log, awsService, data) {
|
function extractParams(
|
||||||
|
request: any,
|
||||||
|
log: Logger,
|
||||||
|
awsService: string,
|
||||||
|
data: { [key: string]: string }
|
||||||
|
) {
|
||||||
log.trace('entered', { method: 'Arsenal.auth.server.extractParams' });
|
log.trace('entered', { method: 'Arsenal.auth.server.extractParams' });
|
||||||
const authHeader = request.headers.authorization;
|
const authHeader = request.headers.authorization;
|
||||||
let version = null;
|
let version: 'v2' |'v4' | null = null;
|
||||||
let method = null;
|
let method: 'query' | 'headers' | null = null;
|
||||||
|
|
||||||
// Identify auth version and method to dispatch to the right check function
|
// Identify auth version and method to dispatch to the right check function
|
||||||
if (authHeader) {
|
if (authHeader) {
|
||||||
|
@ -102,16 +108,21 @@ function extractParams(request, log, awsService, data) {
|
||||||
/**
|
/**
|
||||||
* This function will check validity of request parameters to authenticate
|
* This function will check validity of request parameters to authenticate
|
||||||
*
|
*
|
||||||
* @param {Http.Request} request - Http request object
|
* @param request - Http request object
|
||||||
* @param {object} log - Logger object
|
* @param log - Logger object
|
||||||
* @param {function} cb - the callback
|
* @param cb - the callback
|
||||||
* @param {string} awsService - Aws service related
|
* @param awsService - Aws service related
|
||||||
* @param {RequestContext[] | null} requestContexts - array of RequestContext
|
* @param {RequestContext[] | null} requestContexts - array of RequestContext
|
||||||
* or null if no requestContexts to be sent to Vault (for instance,
|
* or null if no requestContexts to be sent to Vault (for instance,
|
||||||
* in multi-object delete request)
|
* in multi-object delete request)
|
||||||
* @return {undefined}
|
|
||||||
*/
|
*/
|
||||||
function doAuth(request, log, cb, awsService, requestContexts) {
|
function doAuth(
|
||||||
|
request: any,
|
||||||
|
log: Logger,
|
||||||
|
cb: (err: Error | null, data?: any) => void,
|
||||||
|
awsService: string,
|
||||||
|
requestContexts: any[] | null
|
||||||
|
) {
|
||||||
const res = extractParams(request, log, awsService, request.query);
|
const res = extractParams(request, log, awsService, request.query);
|
||||||
if (res.err) {
|
if (res.err) {
|
||||||
return cb(res.err);
|
return cb(res.err);
|
||||||
|
@ -119,23 +130,31 @@ function doAuth(request, log, cb, awsService, requestContexts) {
|
||||||
return cb(null, res.params);
|
return cb(null, res.params);
|
||||||
}
|
}
|
||||||
if (requestContexts) {
|
if (requestContexts) {
|
||||||
requestContexts.forEach(requestContext => {
|
requestContexts.forEach((requestContext) => {
|
||||||
requestContext.setAuthType(res.params.data.authType);
|
const { params } = res
|
||||||
requestContext.setSignatureVersion(res.params
|
if ('data' in params) {
|
||||||
.data.signatureVersion);
|
const { data } = params
|
||||||
requestContext.setSignatureAge(res.params.data.signatureAge);
|
requestContext.setAuthType(data.authType);
|
||||||
requestContext.setSecurityToken(res.params.data.securityToken);
|
requestContext.setSignatureVersion(data.signatureVersion);
|
||||||
|
requestContext.setSecurityToken(data.securityToken);
|
||||||
|
if ('signatureAge' in data) {
|
||||||
|
requestContext.setSignatureAge(data.signatureAge);
|
||||||
|
}
|
||||||
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// Corner cases managed, we're left with normal auth
|
// Corner cases managed, we're left with normal auth
|
||||||
|
// TODO What's happening here?
|
||||||
|
// @ts-ignore
|
||||||
res.params.log = log;
|
res.params.log = log;
|
||||||
if (res.params.version === 2) {
|
if (res.params.version === 2) {
|
||||||
return vault.authenticateV2Request(res.params, requestContexts, cb);
|
// @ts-ignore
|
||||||
|
return vault!.authenticateV2Request(res.params, requestContexts, cb);
|
||||||
}
|
}
|
||||||
if (res.params.version === 4) {
|
if (res.params.version === 4) {
|
||||||
return vault.authenticateV4Request(res.params, requestContexts, cb,
|
// @ts-ignore
|
||||||
awsService);
|
return vault!.authenticateV4Request(res.params, requestContexts, cb);
|
||||||
}
|
}
|
||||||
|
|
||||||
log.error('authentication method not found', {
|
log.error('authentication method not found', {
|
||||||
|
@ -144,22 +163,44 @@ function doAuth(request, log, cb, awsService, requestContexts) {
|
||||||
return cb(errors.InternalError);
|
return cb(errors.InternalError);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This function will generate a version 4 content-md5 header
|
||||||
|
* It looks at the request path to determine what kind of header encoding is required
|
||||||
|
*
|
||||||
|
* @param path - the request path
|
||||||
|
* @param payload - the request payload to hash
|
||||||
|
*/
|
||||||
|
function generateContentMD5Header(
|
||||||
|
path: string,
|
||||||
|
payload: string,
|
||||||
|
) {
|
||||||
|
const encoding = path && path.startsWith('/_/backbeat/') ? 'hex' : 'base64';
|
||||||
|
return crypto.createHash('md5').update(payload, 'binary').digest(encoding);
|
||||||
|
}
|
||||||
/**
|
/**
|
||||||
* This function will generate a version 4 header
|
* This function will generate a version 4 header
|
||||||
*
|
*
|
||||||
* @param {Http.Request} request - Http request object
|
* @param request - Http request object
|
||||||
* @param {object} data - Parameters from queryString parsing or body of
|
* @param data - Parameters from queryString parsing or body of
|
||||||
* POST request
|
* POST request
|
||||||
* @param {string} accessKey - the accessKey
|
* @param accessKey - the accessKey
|
||||||
* @param {string} secretKeyValue - the secretKey
|
* @param secretKeyValue - the secretKey
|
||||||
* @param {string} awsService - Aws service related
|
* @param awsService - Aws service related
|
||||||
* @param {sting} [proxyPath] - path that gets proxied by reverse proxy
|
* @param [proxyPath] - path that gets proxied by reverse proxy
|
||||||
* @param {string} [sessionToken] - security token if the access/secret keys
|
* @param [sessionToken] - security token if the access/secret keys
|
||||||
* are temporary credentials from STS
|
* are temporary credentials from STS
|
||||||
* @return {undefined}
|
* @param [payload] - body of the request if any
|
||||||
*/
|
*/
|
||||||
function generateV4Headers(request, data, accessKey, secretKeyValue,
|
function generateV4Headers(
|
||||||
awsService, proxyPath, sessionToken) {
|
request: any,
|
||||||
|
data: { [key: string]: string },
|
||||||
|
accessKey: string,
|
||||||
|
secretKeyValue: string,
|
||||||
|
awsService: string,
|
||||||
|
proxyPath?: string,
|
||||||
|
sessionToken?: string,
|
||||||
|
payload?: string,
|
||||||
|
) {
|
||||||
Object.assign(request, { headers: {} });
|
Object.assign(request, { headers: {} });
|
||||||
const amzDate = convertUTCtoISO8601(Date.now());
|
const amzDate = convertUTCtoISO8601(Date.now());
|
||||||
// get date without time
|
// get date without time
|
||||||
|
@ -171,9 +212,9 @@ function generateV4Headers(request, data, accessKey, secretKeyValue,
|
||||||
const timestamp = amzDate;
|
const timestamp = amzDate;
|
||||||
const algorithm = 'AWS4-HMAC-SHA256';
|
const algorithm = 'AWS4-HMAC-SHA256';
|
||||||
|
|
||||||
let payload = '';
|
payload = payload || '';
|
||||||
if (request.method === 'POST') {
|
if (request.method === 'POST') {
|
||||||
payload = queryString.stringify(data, null, null, {
|
payload = queryString.stringify(data, undefined, undefined, {
|
||||||
encodeURIComponent,
|
encodeURIComponent,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -182,6 +223,7 @@ function generateV4Headers(request, data, accessKey, secretKeyValue,
|
||||||
request.setHeader('host', request._headers.host);
|
request.setHeader('host', request._headers.host);
|
||||||
request.setHeader('x-amz-date', amzDate);
|
request.setHeader('x-amz-date', amzDate);
|
||||||
request.setHeader('x-amz-content-sha256', payloadChecksum);
|
request.setHeader('x-amz-content-sha256', payloadChecksum);
|
||||||
|
request.setHeader('content-md5', generateContentMD5Header(request.path, payload));
|
||||||
|
|
||||||
if (sessionToken) {
|
if (sessionToken) {
|
||||||
request.setHeader('x-amz-security-token', sessionToken);
|
request.setHeader('x-amz-security-token', sessionToken);
|
||||||
|
@ -192,7 +234,8 @@ function generateV4Headers(request, data, accessKey, secretKeyValue,
|
||||||
.filter(headerName =>
|
.filter(headerName =>
|
||||||
headerName.startsWith('x-amz-')
|
headerName.startsWith('x-amz-')
|
||||||
|| headerName.startsWith('x-scal-')
|
|| headerName.startsWith('x-scal-')
|
||||||
|| headerName === 'host'
|
|| headerName === 'content-md5'
|
||||||
|
|| headerName === 'host',
|
||||||
).sort().join(';');
|
).sort().join(';');
|
||||||
const params = { request, signedHeaders, payloadChecksum,
|
const params = { request, signedHeaders, payloadChecksum,
|
||||||
credentialScope, timestamp, query: data,
|
credentialScope, timestamp, query: data,
|
||||||
|
@ -203,7 +246,7 @@ function generateV4Headers(request, data, accessKey, secretKeyValue,
|
||||||
scopeDate,
|
scopeDate,
|
||||||
service);
|
service);
|
||||||
const signature = crypto.createHmac('sha256', signingKey)
|
const signature = crypto.createHmac('sha256', signingKey)
|
||||||
.update(stringToSign, 'binary').digest('hex');
|
.update(stringToSign as string, 'binary').digest('hex');
|
||||||
const authorizationHeader = `${algorithm} Credential=${accessKey}` +
|
const authorizationHeader = `${algorithm} Credential=${accessKey}` +
|
||||||
`/${credentialScope}, SignedHeaders=${signedHeaders}, ` +
|
`/${credentialScope}, SignedHeaders=${signedHeaders}, ` +
|
||||||
`Signature=${signature}`;
|
`Signature=${signature}`;
|
||||||
|
@ -211,21 +254,12 @@ function generateV4Headers(request, data, accessKey, secretKeyValue,
|
||||||
Object.assign(request, { headers: {} });
|
Object.assign(request, { headers: {} });
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
export const server = { extractParams, doAuth }
|
||||||
setHandler: setAuthHandler,
|
export const client = { generateV4Headers, constructStringToSignV2 }
|
||||||
server: {
|
export const inMemory = { backend: inMemoryBackend, validateAuthConfig, AuthLoader }
|
||||||
extractParams,
|
export const backends = { baseBackend, chainBackend }
|
||||||
doAuth,
|
export {
|
||||||
},
|
setAuthHandler as setHandler,
|
||||||
client: {
|
|
||||||
generateV4Headers,
|
|
||||||
constructStringToSignV2,
|
|
||||||
},
|
|
||||||
inMemory: {
|
|
||||||
backend,
|
|
||||||
validateAuthConfig,
|
|
||||||
AuthLoader,
|
|
||||||
},
|
|
||||||
AuthInfo,
|
AuthInfo,
|
||||||
Vault,
|
Vault
|
||||||
};
|
}
|
|
@ -0,0 +1,233 @@
|
||||||
|
import assert from 'assert';
|
||||||
|
import async from 'async';
|
||||||
|
import errors from '../../errors';
|
||||||
|
import BaseBackend from './base';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Class that provides an authentication backend that will verify signatures
|
||||||
|
* and retrieve emails and canonical ids associated with an account using a
|
||||||
|
* given list of authentication backends and vault clients.
|
||||||
|
*
|
||||||
|
* @class ChainBackend
|
||||||
|
*/
|
||||||
|
export default class ChainBackend extends BaseBackend {
|
||||||
|
_clients: any[];
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @constructor
|
||||||
|
* @param {string} service - service id
|
||||||
|
* @param {object[]} clients - list of authentication backends or vault clients
|
||||||
|
*/
|
||||||
|
constructor(service: string, clients: any[]) {
|
||||||
|
super(service);
|
||||||
|
|
||||||
|
assert(Array.isArray(clients) && clients.length > 0, 'invalid client list');
|
||||||
|
assert(clients.every(client =>
|
||||||
|
typeof client.verifySignatureV4 === 'function' &&
|
||||||
|
typeof client.verifySignatureV2 === 'function' &&
|
||||||
|
typeof client.getCanonicalIds === 'function' &&
|
||||||
|
typeof client.getEmailAddresses === 'function' &&
|
||||||
|
typeof client.checkPolicies === 'function' &&
|
||||||
|
typeof client.healthcheck === 'function',
|
||||||
|
), 'invalid client: missing required auth backend methods');
|
||||||
|
this._clients = clients;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/*
|
||||||
|
* try task against each client for one to be successful
|
||||||
|
*/
|
||||||
|
_tryEachClient(task: any, cb: any) {
|
||||||
|
// @ts-ignore
|
||||||
|
async.tryEach(this._clients.map(client => done => task(client, done)), cb);
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* apply task to all clients
|
||||||
|
*/
|
||||||
|
_forEachClient(task: any, cb: any) {
|
||||||
|
async.map(this._clients, task, cb);
|
||||||
|
}
|
||||||
|
|
||||||
|
verifySignatureV2(
|
||||||
|
stringToSign: string,
|
||||||
|
signatureFromRequest: string,
|
||||||
|
accessKey: string,
|
||||||
|
options: any,
|
||||||
|
callback: any,
|
||||||
|
) {
|
||||||
|
this._tryEachClient((client, done) => client.verifySignatureV2(
|
||||||
|
stringToSign,
|
||||||
|
signatureFromRequest,
|
||||||
|
accessKey,
|
||||||
|
options,
|
||||||
|
done,
|
||||||
|
), callback);
|
||||||
|
}
|
||||||
|
|
||||||
|
verifySignatureV4(
|
||||||
|
stringToSign: string,
|
||||||
|
signatureFromRequest: string,
|
||||||
|
accessKey: string,
|
||||||
|
region: string,
|
||||||
|
scopeDate: string,
|
||||||
|
options: any,
|
||||||
|
callback: any,
|
||||||
|
) {
|
||||||
|
this._tryEachClient((client, done) => client.verifySignatureV4(
|
||||||
|
stringToSign,
|
||||||
|
signatureFromRequest,
|
||||||
|
accessKey,
|
||||||
|
region,
|
||||||
|
scopeDate,
|
||||||
|
options,
|
||||||
|
done,
|
||||||
|
), callback);
|
||||||
|
}
|
||||||
|
|
||||||
|
static _mergeObjects(objectResponses: any) {
|
||||||
|
return objectResponses.reduce(
|
||||||
|
(retObj, resObj) => Object.assign(retObj, resObj.message.body),
|
||||||
|
{});
|
||||||
|
}
|
||||||
|
|
||||||
|
getCanonicalIds(emailAddresses: string[], options: any, callback: any) {
|
||||||
|
this._forEachClient(
|
||||||
|
(client, done) => client.getCanonicalIds(emailAddresses, options, done),
|
||||||
|
(err, res) => {
|
||||||
|
if (err) {
|
||||||
|
return callback(err);
|
||||||
|
}
|
||||||
|
// TODO: atm naive merge, better handling of conflicting email results
|
||||||
|
return callback(null, {
|
||||||
|
message: {
|
||||||
|
body: ChainBackend._mergeObjects(res),
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
getEmailAddresses(canonicalIDs: string[], options: any, callback: any) {
|
||||||
|
this._forEachClient(
|
||||||
|
(client, done) => client.getEmailAddresses(canonicalIDs, options, done),
|
||||||
|
(err, res) => {
|
||||||
|
if (err) {
|
||||||
|
return callback(err);
|
||||||
|
}
|
||||||
|
return callback(null, {
|
||||||
|
message: {
|
||||||
|
body: ChainBackend._mergeObjects(res),
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* merge policy responses into a single message
|
||||||
|
*/
|
||||||
|
static _mergePolicies(policyResponses: any) {
|
||||||
|
const policyMap: any = {};
|
||||||
|
|
||||||
|
policyResponses.forEach(resp => {
|
||||||
|
if (!resp.message || !Array.isArray(resp.message.body)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const check = (policy) => {
|
||||||
|
const key = (policy.arn || '') + (policy.versionId || '') + (policy.action || '');
|
||||||
|
if (!policyMap[key] || !policyMap[key].isAllowed) {
|
||||||
|
policyMap[key] = policy;
|
||||||
|
}
|
||||||
|
// else is duplicate policy
|
||||||
|
};
|
||||||
|
|
||||||
|
resp.message.body.forEach(policy => {
|
||||||
|
if (Array.isArray(policy)) {
|
||||||
|
policy.forEach(authResult => check(authResult));
|
||||||
|
} else {
|
||||||
|
check(policy);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
return Object.keys(policyMap).map(key => {
|
||||||
|
const policyRes: any = { isAllowed: policyMap[key].isAllowed };
|
||||||
|
if (policyMap[key].arn !== '') {
|
||||||
|
policyRes.arn = policyMap[key].arn;
|
||||||
|
}
|
||||||
|
if (policyMap[key].versionId) {
|
||||||
|
policyRes.versionId = policyMap[key].versionId;
|
||||||
|
}
|
||||||
|
if (policyMap[key].isImplicit !== undefined) {
|
||||||
|
policyRes.isImplicit = policyMap[key].isImplicit;
|
||||||
|
}
|
||||||
|
if (policyMap[key].action) {
|
||||||
|
policyRes.action = policyMap[key].action;
|
||||||
|
}
|
||||||
|
return policyRes;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
response format:
|
||||||
|
{ message: {
|
||||||
|
body: [{}],
|
||||||
|
code: number,
|
||||||
|
message: string,
|
||||||
|
} }
|
||||||
|
*/
|
||||||
|
checkPolicies(requestContextParams: any, userArn: string, options: any, callback: any) {
|
||||||
|
this._forEachClient((client, done) => client.checkPolicies(
|
||||||
|
requestContextParams,
|
||||||
|
userArn,
|
||||||
|
options,
|
||||||
|
done,
|
||||||
|
), (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
return callback(err);
|
||||||
|
}
|
||||||
|
return callback(null, {
|
||||||
|
message: {
|
||||||
|
body: ChainBackend._mergePolicies(res),
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
healthcheck(reqUid: string, callback: any) {
|
||||||
|
this._forEachClient((client, done) =>
|
||||||
|
client.healthcheck(reqUid, (err, res) => done(null, {
|
||||||
|
error: !!err ? err : null,
|
||||||
|
status: res,
|
||||||
|
}),
|
||||||
|
), (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
return callback(err);
|
||||||
|
}
|
||||||
|
|
||||||
|
const isError = res.some(results => !!results.error);
|
||||||
|
if (isError) {
|
||||||
|
return callback(errors.InternalError, res);
|
||||||
|
}
|
||||||
|
return callback(null, res);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
report(reqUid: string, callback: any) {
|
||||||
|
this._forEachClient((client, done) =>
|
||||||
|
client.report(reqUid, done),
|
||||||
|
(err, res) => {
|
||||||
|
if (err) {
|
||||||
|
return callback(err);
|
||||||
|
}
|
||||||
|
const mergedRes = res.reduce((acc, val) => {
|
||||||
|
Object.keys(val).forEach(k => {
|
||||||
|
acc[k] = val[k];
|
||||||
|
});
|
||||||
|
return acc;
|
||||||
|
}, {});
|
||||||
|
|
||||||
|
return callback(null, mergedRes);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,96 @@
|
||||||
|
import errors from '../../errors';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Base backend class
|
||||||
|
*
|
||||||
|
* @class BaseBackend
|
||||||
|
*/
|
||||||
|
export default class BaseBackend {
|
||||||
|
service: string;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @constructor
|
||||||
|
* @param {string} service - service identifer for construction arn
|
||||||
|
*/
|
||||||
|
constructor(service: string) {
|
||||||
|
this.service = service;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** verifySignatureV2
|
||||||
|
* @param stringToSign - string to sign built per AWS rules
|
||||||
|
* @param signatureFromRequest - signature sent with request
|
||||||
|
* @param accessKey - account accessKey
|
||||||
|
* @param options - contains algorithm (SHA1 or SHA256)
|
||||||
|
* @param callback - callback with either error or user info
|
||||||
|
* @return calls callback
|
||||||
|
*/
|
||||||
|
verifySignatureV2(
|
||||||
|
stringToSign: string,
|
||||||
|
signatureFromRequest: string,
|
||||||
|
accessKey: string,
|
||||||
|
options: any,
|
||||||
|
callback: any
|
||||||
|
) {
|
||||||
|
return callback(errors.AuthMethodNotImplemented);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/** verifySignatureV4
|
||||||
|
* @param stringToSign - string to sign built per AWS rules
|
||||||
|
* @param signatureFromRequest - signature sent with request
|
||||||
|
* @param accessKey - account accessKey
|
||||||
|
* @param region - region specified in request credential
|
||||||
|
* @param scopeDate - date specified in request credential
|
||||||
|
* @param options - options to send to Vault
|
||||||
|
* (just contains reqUid for logging in Vault)
|
||||||
|
* @param callback - callback with either error or user info
|
||||||
|
* @return calls callback
|
||||||
|
*/
|
||||||
|
verifySignatureV4(
|
||||||
|
stringToSign: string,
|
||||||
|
signatureFromRequest: string,
|
||||||
|
accessKey: string,
|
||||||
|
region: string,
|
||||||
|
scopeDate: string,
|
||||||
|
options: any,
|
||||||
|
callback: any
|
||||||
|
) {
|
||||||
|
return callback(errors.AuthMethodNotImplemented);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets canonical ID's for a list of accounts
|
||||||
|
* based on email associated with account
|
||||||
|
* @param emails - list of email addresses
|
||||||
|
* @param options - to send log id to vault
|
||||||
|
* @param callback - callback to calling function
|
||||||
|
* @returns callback with either error or
|
||||||
|
* object with email addresses as keys and canonical IDs
|
||||||
|
* as values
|
||||||
|
*/
|
||||||
|
getCanonicalIds(emails: string[], options: any, callback: any) {
|
||||||
|
return callback(errors.AuthMethodNotImplemented);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets email addresses (referred to as diplay names for getACL's)
|
||||||
|
* for a list of accounts based on canonical IDs associated with account
|
||||||
|
* @param canonicalIDs - list of canonicalIDs
|
||||||
|
* @param options - to send log id to vault
|
||||||
|
* @param callback - callback to calling function
|
||||||
|
* @returns callback with either error or
|
||||||
|
* an object from Vault containing account canonicalID
|
||||||
|
* as each object key and an email address as the value (or "NotFound")
|
||||||
|
*/
|
||||||
|
getEmailAddresses(canonicalIDs: string[], options: any, callback: any) {
|
||||||
|
return callback(errors.AuthMethodNotImplemented);
|
||||||
|
}
|
||||||
|
|
||||||
|
checkPolicies(requestContextParams: any, userArn: string, options: any, callback: any) {
|
||||||
|
return callback(null, { message: { body: [] } });
|
||||||
|
}
|
||||||
|
|
||||||
|
healthcheck(reqUid: string, callback: any) {
|
||||||
|
return callback(null, { code: 200, message: 'OK' });
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,204 @@
|
||||||
|
import * as fs from 'fs';
|
||||||
|
import glob from 'simple-glob';
|
||||||
|
import joi from 'joi';
|
||||||
|
import werelogs from 'werelogs';
|
||||||
|
import * as types from './types';
|
||||||
|
import { Account, Accounts } from './types';
|
||||||
|
import ARN from '../../../models/ARN';
|
||||||
|
|
||||||
|
/** Load authentication information from files or pre-loaded account objects */
|
||||||
|
export default class AuthLoader {
|
||||||
|
#log: werelogs.Logger;
|
||||||
|
#authData: Accounts;
|
||||||
|
#isValid: 'waiting-for-validation' | 'valid' | 'invalid';
|
||||||
|
|
||||||
|
constructor(logApi: { Logger: typeof werelogs.Logger } = werelogs) {
|
||||||
|
this.#log = new logApi.Logger('S3');
|
||||||
|
this.#authData = { accounts: [] };
|
||||||
|
this.#isValid = 'waiting-for-validation';
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Add one or more accounts to the authentication info */
|
||||||
|
addAccounts(authData: Accounts, filePath?: string) {
|
||||||
|
const isValid = this.#isAuthDataValid(authData, filePath);
|
||||||
|
if (isValid) {
|
||||||
|
this.#authData.accounts = [
|
||||||
|
...this.#authData.accounts,
|
||||||
|
...authData.accounts,
|
||||||
|
];
|
||||||
|
// defer validity checking when getting data to avoid
|
||||||
|
// logging multiple times the errors (we need to validate
|
||||||
|
// all accounts at once to detect duplicate values)
|
||||||
|
if (this.#isValid === 'valid') {
|
||||||
|
this.#isValid = 'waiting-for-validation';
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
this.#isValid = 'invalid';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add account information from a file. Use { legacy: false } as an option
|
||||||
|
* to use the new, Promise-based version.
|
||||||
|
*
|
||||||
|
* @param filePath - file path containing JSON
|
||||||
|
* authentication info (see {@link addAccounts()} for format)
|
||||||
|
*/
|
||||||
|
addFile(filePath: string, options: { legacy: false }): Promise<void>;
|
||||||
|
/** @deprecated Please use Promise-version instead. */
|
||||||
|
addFile(filePath: string, options?: { legacy: true }): void;
|
||||||
|
addFile(filePath: string, options = { legacy: true }) {
|
||||||
|
// On deprecation, remove the legacy part and keep the promises.
|
||||||
|
const readFunc: any = options.legacy ? fs.readFileSync : fs.promises.readFile;
|
||||||
|
const readResult = readFunc(filePath, 'utf8') as Promise<string> | string;
|
||||||
|
const prom = Promise.resolve(readResult).then((data) => {
|
||||||
|
const authData = JSON.parse(data);
|
||||||
|
this.addAccounts(authData, filePath);
|
||||||
|
});
|
||||||
|
return options.legacy ? undefined : prom;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add account information from a filesystem path
|
||||||
|
*
|
||||||
|
* @param globPattern - filesystem glob pattern,
|
||||||
|
* can be a single string or an array of glob patterns. Globs
|
||||||
|
* can be simple file paths or can contain glob matching
|
||||||
|
* characters, like '/a/b/*.json'. The matching files are
|
||||||
|
* individually loaded as JSON and accounts are added. See
|
||||||
|
* {@link addAccounts()} for JSON format.
|
||||||
|
*/
|
||||||
|
addFilesByGlob(globPattern: string | string[]) {
|
||||||
|
// FIXME switch glob to async version
|
||||||
|
const files = glob(globPattern);
|
||||||
|
files.forEach((filePath) => this.addFile(filePath));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Perform validation on authentication info previously
|
||||||
|
* loaded. Note that it has to be done on the entire set after an
|
||||||
|
* update to catch duplicate account IDs or access keys.
|
||||||
|
*/
|
||||||
|
validate() {
|
||||||
|
if (this.#isValid === 'waiting-for-validation') {
|
||||||
|
const isValid = this.#isAuthDataValid(this.#authData);
|
||||||
|
this.#isValid = isValid ? 'valid' : 'invalid';
|
||||||
|
}
|
||||||
|
return this.#isValid === 'valid';
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get authentication info as a plain JS object containing all accounts
|
||||||
|
* under the "accounts" attribute, with validation.
|
||||||
|
*/
|
||||||
|
get data() {
|
||||||
|
return this.validate() ? this.#authData : null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** backward-compat: ignore arn if starts with 'aws:' and log a warning */
|
||||||
|
#isNotLegacyAWSARN(account: Account, filePath?: string) {
|
||||||
|
if (account.arn.startsWith('aws:')) {
|
||||||
|
const { name: accountName, arn: accountArn } = account;
|
||||||
|
this.#log.error(
|
||||||
|
'account must have a valid AWS ARN, legacy examples ' +
|
||||||
|
"starting with 'aws:' are not supported anymore. " +
|
||||||
|
'Please convert to a proper account entry (see ' +
|
||||||
|
'examples at https://github.com/scality/S3/blob/' +
|
||||||
|
'master/conf/authdata.json). Also note that support ' +
|
||||||
|
'for account users has been dropped.',
|
||||||
|
{ accountName, accountArn, filePath }
|
||||||
|
);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
#isValidUsers(account: Account, filePath?: string) {
|
||||||
|
if (account.users) {
|
||||||
|
const { name: accountName, arn: accountArn } = account;
|
||||||
|
this.#log.error(
|
||||||
|
'support for account users has been dropped, consider ' +
|
||||||
|
'turning users into account entries (see examples at ' +
|
||||||
|
'https://github.com/scality/S3/blob/master/conf/' +
|
||||||
|
'authdata.json)',
|
||||||
|
{ accountName, accountArn, filePath }
|
||||||
|
);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
#isValidARN(account: Account, filePath?: string) {
|
||||||
|
const arnObj = ARN.createFromString(account.arn);
|
||||||
|
const { name: accountName, arn: accountArn } = account;
|
||||||
|
if (arnObj instanceof ARN) {
|
||||||
|
if (!arnObj.isIAMAccount()) {
|
||||||
|
this.#log.error('authentication config validation error', {
|
||||||
|
reason: 'not an IAM account ARN',
|
||||||
|
accountName,
|
||||||
|
accountArn,
|
||||||
|
filePath,
|
||||||
|
});
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
this.#log.error('authentication config validation error', {
|
||||||
|
reason: arnObj.error.description,
|
||||||
|
accountName,
|
||||||
|
accountArn,
|
||||||
|
filePath,
|
||||||
|
});
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
#isAuthDataValid(authData: any, filePath?: string) {
|
||||||
|
const options = { abortEarly: true };
|
||||||
|
const response = types.validators.accounts.validate(authData, options);
|
||||||
|
if (response.error) {
|
||||||
|
this.#dumpJoiErrors(response.error.details, filePath);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
const validAccounts = response.value.accounts.filter(
|
||||||
|
(account: Account) =>
|
||||||
|
this.#isNotLegacyAWSARN(account, filePath) &&
|
||||||
|
this.#isValidUsers(account, filePath) &&
|
||||||
|
this.#isValidARN(account, filePath)
|
||||||
|
);
|
||||||
|
const areSomeInvalidAccounts =
|
||||||
|
validAccounts.length !== response.value.accounts.length;
|
||||||
|
if (areSomeInvalidAccounts) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
const keys = validAccounts.flatMap((account) => account.keys);
|
||||||
|
const uniqueKeysValidator = types.validators.keys.unique('access');
|
||||||
|
const areKeysUnique = uniqueKeysValidator.validate(keys);
|
||||||
|
if (areKeysUnique.error) {
|
||||||
|
this.#dumpJoiErrors(areKeysUnique.error.details, filePath);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
#dumpJoiErrors(errors: joi.ValidationErrorItem[], filePath?: string) {
|
||||||
|
errors.forEach((err) => {
|
||||||
|
const baseLogInfo = { item: err.path, filePath };
|
||||||
|
const logInfo = () => {
|
||||||
|
if (err.type === 'array.unique') {
|
||||||
|
const reason = `duplicate value '${err.context?.path}'`;
|
||||||
|
const dupValue = err.context?.value[err.context.path];
|
||||||
|
return { ...baseLogInfo, reason, dupValue };
|
||||||
|
} else {
|
||||||
|
const reason = err.message;
|
||||||
|
const context = err.context;
|
||||||
|
return { ...baseLogInfo, reason, context };
|
||||||
|
}
|
||||||
|
};
|
||||||
|
this.#log.error(
|
||||||
|
'authentication config validation error',
|
||||||
|
logInfo()
|
||||||
|
);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,13 +1,12 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
import crypto from 'crypto';
|
||||||
|
import { Logger } from 'werelogs';
|
||||||
|
import errors from '../../../errors';
|
||||||
|
import { calculateSigningKey, hashSignature } from './vaultUtilities';
|
||||||
|
import Indexer from './Indexer';
|
||||||
|
import BaseBackend from '../base';
|
||||||
|
import { Accounts } from './types';
|
||||||
|
|
||||||
const crypto = require('crypto');
|
function _formatResponse(userInfoToSend: any) {
|
||||||
|
|
||||||
const errors = require('../../errors').default;
|
|
||||||
const calculateSigningKey = require('./vaultUtilities').calculateSigningKey;
|
|
||||||
const hashSignature = require('./vaultUtilities').hashSignature;
|
|
||||||
const Indexer = require('./Indexer');
|
|
||||||
|
|
||||||
function _formatResponse(userInfoToSend) {
|
|
||||||
return {
|
return {
|
||||||
message: {
|
message: {
|
||||||
body: { userInfo: userInfoToSend },
|
body: { userInfo: userInfoToSend },
|
||||||
|
@ -19,32 +18,32 @@ function _formatResponse(userInfoToSend) {
|
||||||
* Class that provides a memory backend for verifying signatures and getting
|
* Class that provides a memory backend for verifying signatures and getting
|
||||||
* emails and canonical ids associated with an account.
|
* emails and canonical ids associated with an account.
|
||||||
*
|
*
|
||||||
* @class Backend
|
* @class InMemoryBackend
|
||||||
*/
|
*/
|
||||||
class Backend {
|
class InMemoryBackend extends BaseBackend {
|
||||||
|
indexer: Indexer;
|
||||||
|
formatResponse: any;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @constructor
|
* @constructor
|
||||||
* @param {string} service - service identifer for construction arn
|
* @param service - service identifer for construction arn
|
||||||
* @param {Indexer} indexer - indexer instance for retrieving account info
|
* @param indexer - indexer instance for retrieving account info
|
||||||
* @param {function} formatter - function which accepts user info to send
|
* @param formatter - function which accepts user info to send
|
||||||
* back and returns it in an object
|
* back and returns it in an object
|
||||||
*/
|
*/
|
||||||
constructor(service, indexer, formatter) {
|
constructor(service: string, indexer: Indexer, formatter: typeof _formatResponse) {
|
||||||
this.service = service;
|
super(service);
|
||||||
this.indexer = indexer;
|
this.indexer = indexer;
|
||||||
this.formatResponse = formatter;
|
this.formatResponse = formatter;
|
||||||
}
|
}
|
||||||
|
|
||||||
/** verifySignatureV2
|
verifySignatureV2(
|
||||||
* @param {string} stringToSign - string to sign built per AWS rules
|
stringToSign: string,
|
||||||
* @param {string} signatureFromRequest - signature sent with request
|
signatureFromRequest: string,
|
||||||
* @param {string} accessKey - account accessKey
|
accessKey: string,
|
||||||
* @param {object} options - contains algorithm (SHA1 or SHA256)
|
options: any,
|
||||||
* @param {function} callback - callback with either error or user info
|
callback: any,
|
||||||
* @return {function} calls callback
|
) {
|
||||||
*/
|
|
||||||
verifySignatureV2(stringToSign, signatureFromRequest,
|
|
||||||
accessKey, options, callback) {
|
|
||||||
const entity = this.indexer.getEntityByKey(accessKey);
|
const entity = this.indexer.getEntityByKey(accessKey);
|
||||||
if (!entity) {
|
if (!entity) {
|
||||||
return callback(errors.InvalidAccessKeyId);
|
return callback(errors.InvalidAccessKeyId);
|
||||||
|
@ -59,26 +58,22 @@ class Backend {
|
||||||
accountDisplayName: this.indexer.getAcctDisplayName(entity),
|
accountDisplayName: this.indexer.getAcctDisplayName(entity),
|
||||||
canonicalID: entity.canonicalID,
|
canonicalID: entity.canonicalID,
|
||||||
arn: entity.arn,
|
arn: entity.arn,
|
||||||
|
// @ts-ignore
|
||||||
IAMdisplayName: entity.IAMdisplayName,
|
IAMdisplayName: entity.IAMdisplayName,
|
||||||
};
|
};
|
||||||
const vaultReturnObject = this.formatResponse(userInfoToSend);
|
const vaultReturnObject = this.formatResponse(userInfoToSend);
|
||||||
return callback(null, vaultReturnObject);
|
return callback(null, vaultReturnObject);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
verifySignatureV4(
|
||||||
/** verifySignatureV4
|
stringToSign: string,
|
||||||
* @param {string} stringToSign - string to sign built per AWS rules
|
signatureFromRequest: string,
|
||||||
* @param {string} signatureFromRequest - signature sent with request
|
accessKey: string,
|
||||||
* @param {string} accessKey - account accessKey
|
region: string,
|
||||||
* @param {string} region - region specified in request credential
|
scopeDate: string,
|
||||||
* @param {string} scopeDate - date specified in request credential
|
options: any,
|
||||||
* @param {object} options - options to send to Vault
|
callback: any,
|
||||||
* (just contains reqUid for logging in Vault)
|
) {
|
||||||
* @param {function} callback - callback with either error or user info
|
|
||||||
* @return {function} calls callback
|
|
||||||
*/
|
|
||||||
verifySignatureV4(stringToSign, signatureFromRequest, accessKey,
|
|
||||||
region, scopeDate, options, callback) {
|
|
||||||
const entity = this.indexer.getEntityByKey(accessKey);
|
const entity = this.indexer.getEntityByKey(accessKey);
|
||||||
if (!entity) {
|
if (!entity) {
|
||||||
return callback(errors.InvalidAccessKeyId);
|
return callback(errors.InvalidAccessKeyId);
|
||||||
|
@ -94,23 +89,14 @@ class Backend {
|
||||||
accountDisplayName: this.indexer.getAcctDisplayName(entity),
|
accountDisplayName: this.indexer.getAcctDisplayName(entity),
|
||||||
canonicalID: entity.canonicalID,
|
canonicalID: entity.canonicalID,
|
||||||
arn: entity.arn,
|
arn: entity.arn,
|
||||||
|
// @ts-ignore
|
||||||
IAMdisplayName: entity.IAMdisplayName,
|
IAMdisplayName: entity.IAMdisplayName,
|
||||||
};
|
};
|
||||||
const vaultReturnObject = this.formatResponse(userInfoToSend);
|
const vaultReturnObject = this.formatResponse(userInfoToSend);
|
||||||
return callback(null, vaultReturnObject);
|
return callback(null, vaultReturnObject);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
getCanonicalIds(emails: string[], log: Logger, cb: any) {
|
||||||
* Gets canonical ID's for a list of accounts
|
|
||||||
* based on email associated with account
|
|
||||||
* @param {array} emails - list of email addresses
|
|
||||||
* @param {object} log - log object
|
|
||||||
* @param {function} cb - callback to calling function
|
|
||||||
* @returns {function} callback with either error or
|
|
||||||
* object with email addresses as keys and canonical IDs
|
|
||||||
* as values
|
|
||||||
*/
|
|
||||||
getCanonicalIds(emails, log, cb) {
|
|
||||||
const results = {};
|
const results = {};
|
||||||
emails.forEach(email => {
|
emails.forEach(email => {
|
||||||
const lowercasedEmail = email.toLowerCase();
|
const lowercasedEmail = email.toLowerCase();
|
||||||
|
@ -130,17 +116,7 @@ class Backend {
|
||||||
return cb(null, vaultReturnObject);
|
return cb(null, vaultReturnObject);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
getEmailAddresses(canonicalIDs: string[], options: any, cb: any) {
|
||||||
* Gets email addresses (referred to as diplay names for getACL's)
|
|
||||||
* for a list of accounts based on canonical IDs associated with account
|
|
||||||
* @param {array} canonicalIDs - list of canonicalIDs
|
|
||||||
* @param {object} options - to send log id to vault
|
|
||||||
* @param {function} cb - callback to calling function
|
|
||||||
* @returns {function} callback with either error or
|
|
||||||
* an object from Vault containing account canonicalID
|
|
||||||
* as each object key and an email address as the value (or "NotFound")
|
|
||||||
*/
|
|
||||||
getEmailAddresses(canonicalIDs, options, cb) {
|
|
||||||
const results = {};
|
const results = {};
|
||||||
canonicalIDs.forEach(canonicalId => {
|
canonicalIDs.forEach(canonicalId => {
|
||||||
const foundEntity = this.indexer.getEntityByCanId(canonicalId);
|
const foundEntity = this.indexer.getEntityByCanId(canonicalId);
|
||||||
|
@ -161,14 +137,14 @@ class Backend {
|
||||||
/**
|
/**
|
||||||
* Gets accountIds for a list of accounts based on
|
* Gets accountIds for a list of accounts based on
|
||||||
* the canonical IDs associated with the account
|
* the canonical IDs associated with the account
|
||||||
* @param {array} canonicalIDs - list of canonicalIDs
|
* @param canonicalIDs - list of canonicalIDs
|
||||||
* @param {object} options - to send log id to vault
|
* @param options - to send log id to vault
|
||||||
* @param {function} cb - callback to calling function
|
* @param cb - callback to calling function
|
||||||
* @returns {function} callback with either error or
|
* @returns callback with either error or
|
||||||
* an object from Vault containing account canonicalID
|
* an object from Vault containing account canonicalID
|
||||||
* as each object key and an accountId as the value (or "NotFound")
|
* as each object key and an accountId as the value (or "NotFound")
|
||||||
*/
|
*/
|
||||||
getAccountIds(canonicalIDs, options, cb) {
|
getAccountIds(canonicalIDs: string[], options: any, cb: any) {
|
||||||
const results = {};
|
const results = {};
|
||||||
canonicalIDs.forEach(canonicalID => {
|
canonicalIDs.forEach(canonicalID => {
|
||||||
const foundEntity = this.indexer.getEntityByCanId(canonicalID);
|
const foundEntity = this.indexer.getEntityByCanId(canonicalID);
|
||||||
|
@ -185,33 +161,34 @@ class Backend {
|
||||||
};
|
};
|
||||||
return cb(null, vaultReturnObject);
|
return cb(null, vaultReturnObject);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
report(log: Logger, callback: any) {
|
||||||
|
return callback(null, {});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
class S3AuthBackend extends Backend {
|
class S3AuthBackend extends InMemoryBackend {
|
||||||
/**
|
/**
|
||||||
* @constructor
|
* @constructor
|
||||||
* @param {object} authdata - the authentication config file's data
|
* @param authdata - the authentication config file's data
|
||||||
* @param {object[]} authdata.accounts - array of account objects
|
* @param authdata.accounts - array of account objects
|
||||||
* @param {string=} authdata.accounts[].name - account name
|
* @param authdata.accounts[].name - account name
|
||||||
* @param {string} authdata.accounts[].email - account email
|
* @param authdata.accounts[].email - account email
|
||||||
* @param {string} authdata.accounts[].arn - IAM resource name
|
* @param authdata.accounts[].arn - IAM resource name
|
||||||
* @param {string} authdata.accounts[].canonicalID - account canonical ID
|
* @param authdata.accounts[].canonicalID - account canonical ID
|
||||||
* @param {string} authdata.accounts[].shortid - short account ID
|
* @param authdata.accounts[].shortid - short account ID
|
||||||
* @param {object[]=} authdata.accounts[].keys - array of key objects
|
* @param authdata.accounts[].keys - array of key objects
|
||||||
* @param {string} authdata.accounts[].keys[].access - access key
|
* @param authdata.accounts[].keys[].access - access key
|
||||||
* @param {string} authdata.accounts[].keys[].secret - secret key
|
* @param authdata.accounts[].keys[].secret - secret key
|
||||||
* @return {undefined}
|
|
||||||
*/
|
*/
|
||||||
constructor(authdata) {
|
constructor(authdata?: Accounts) {
|
||||||
super('s3', new Indexer(authdata), _formatResponse);
|
super('s3', new Indexer(authdata), _formatResponse);
|
||||||
}
|
}
|
||||||
|
|
||||||
refreshAuthData(authData) {
|
refreshAuthData(authData?: Accounts) {
|
||||||
this.indexer = new Indexer(authData);
|
this.indexer = new Indexer(authData);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
export { S3AuthBackend as s3 }
|
||||||
s3: S3AuthBackend,
|
|
||||||
};
|
|
|
@ -0,0 +1,93 @@
|
||||||
|
import { Accounts, Account, Entity } from './types';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Class that provides an internal indexing over the simple data provided by
|
||||||
|
* the authentication configuration file for the memory backend. This allows
|
||||||
|
* accessing the different authentication entities through various types of
|
||||||
|
* keys.
|
||||||
|
*/
|
||||||
|
export default class Indexer {
|
||||||
|
accountsBy: {
|
||||||
|
canId: { [id: string]: Entity | undefined },
|
||||||
|
accessKey: { [id: string]: Entity | undefined },
|
||||||
|
email: { [id: string]: Entity | undefined },
|
||||||
|
}
|
||||||
|
|
||||||
|
constructor(authdata?: Accounts) {
|
||||||
|
this.accountsBy = {
|
||||||
|
canId: {},
|
||||||
|
accessKey: {},
|
||||||
|
email: {},
|
||||||
|
};
|
||||||
|
|
||||||
|
/*
|
||||||
|
* This may happen if the application is configured to use another
|
||||||
|
* authentication backend than in-memory.
|
||||||
|
* As such, we're managing the error here to avoid screwing up there.
|
||||||
|
*/
|
||||||
|
if (!authdata) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
this.#build(authdata);
|
||||||
|
}
|
||||||
|
|
||||||
|
#indexAccount(account: Account) {
|
||||||
|
const accountData: Entity = {
|
||||||
|
arn: account.arn,
|
||||||
|
canonicalID: account.canonicalID,
|
||||||
|
shortid: account.shortid,
|
||||||
|
accountDisplayName: account.name,
|
||||||
|
email: account.email.toLowerCase(),
|
||||||
|
keys: [],
|
||||||
|
};
|
||||||
|
this.accountsBy.canId[accountData.canonicalID] = accountData;
|
||||||
|
this.accountsBy.email[accountData.email] = accountData;
|
||||||
|
if (account.keys !== undefined) {
|
||||||
|
account.keys.forEach(key => {
|
||||||
|
accountData.keys.push(key);
|
||||||
|
this.accountsBy.accessKey[key.access] = accountData;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#build(authdata: Accounts) {
|
||||||
|
authdata.accounts.forEach(account => {
|
||||||
|
this.#indexAccount(account);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/** This method returns the account associated to a canonical ID. */
|
||||||
|
getEntityByCanId(canId: string): Entity | undefined {
|
||||||
|
return this.accountsBy.canId[canId];
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This method returns the entity (either an account or a user) associated
|
||||||
|
* to a canonical ID.
|
||||||
|
* @param {string} key - The accessKey of the entity
|
||||||
|
*/
|
||||||
|
getEntityByKey(key: string): Entity | undefined {
|
||||||
|
return this.accountsBy.accessKey[key];
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This method returns the entity (either an account or a user) associated
|
||||||
|
* to an email address.
|
||||||
|
*/
|
||||||
|
getEntityByEmail(email: string): Entity | undefined {
|
||||||
|
const lowerCasedEmail = email.toLowerCase();
|
||||||
|
return this.accountsBy.email[lowerCasedEmail];
|
||||||
|
}
|
||||||
|
|
||||||
|
/** This method returns the secret key associated with the entity. */
|
||||||
|
getSecretKey(entity: Entity, accessKey: string) {
|
||||||
|
const keys = entity.keys.filter(kv => kv.access === accessKey);
|
||||||
|
return keys[0].secret;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** This method returns the account display name associated with the entity. */
|
||||||
|
getAcctDisplayName(entity: Entity) {
|
||||||
|
return entity.accountDisplayName;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,51 @@
|
||||||
|
import joi from 'joi';
|
||||||
|
|
||||||
|
export type Callback<Data = any> = (err?: Error | null | undefined, data?: Data) => void;
|
||||||
|
|
||||||
|
export type Credentials = { access: string; secret: string };
|
||||||
|
export type Base = {
|
||||||
|
arn: string;
|
||||||
|
canonicalID: string;
|
||||||
|
shortid: string;
|
||||||
|
email: string;
|
||||||
|
keys: Credentials[];
|
||||||
|
};
|
||||||
|
export type Account = Base & { name: string; users: any[] };
|
||||||
|
export type Accounts = { accounts: Account[] };
|
||||||
|
export type Entity = Base & { accountDisplayName: string };
|
||||||
|
|
||||||
|
const keys = ((): joi.ArraySchema => {
|
||||||
|
const str = joi.string().required();
|
||||||
|
const items = { access: str, secret: str };
|
||||||
|
return joi.array().items(items).required();
|
||||||
|
})();
|
||||||
|
|
||||||
|
const account = (() => {
|
||||||
|
return joi.object<Account>({
|
||||||
|
name: joi.string().required(),
|
||||||
|
email: joi.string().email().required(),
|
||||||
|
arn: joi.string().required(),
|
||||||
|
canonicalID: joi.string().required(),
|
||||||
|
shortid: joi
|
||||||
|
.string()
|
||||||
|
.regex(/^[0-9]{12}$/)
|
||||||
|
.required(),
|
||||||
|
keys: keys,
|
||||||
|
// backward-compat
|
||||||
|
users: joi.array(),
|
||||||
|
});
|
||||||
|
})();
|
||||||
|
|
||||||
|
const accounts = (() => {
|
||||||
|
return joi.object<Accounts>({
|
||||||
|
accounts: joi
|
||||||
|
.array()
|
||||||
|
.items(account)
|
||||||
|
.required()
|
||||||
|
.unique('arn')
|
||||||
|
.unique('email')
|
||||||
|
.unique('canonicalID'),
|
||||||
|
});
|
||||||
|
})();
|
||||||
|
|
||||||
|
export const validators = { keys, account, accounts };
|
|
@ -0,0 +1,16 @@
|
||||||
|
import { Logger } from 'werelogs';
|
||||||
|
import AuthLoader from './AuthLoader';
|
||||||
|
import { Accounts } from './types';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @deprecated please use {@link AuthLoader} class instead
|
||||||
|
* @return true on erroneous data false on success
|
||||||
|
*/
|
||||||
|
export default function validateAuthConfig(
|
||||||
|
authdata: Accounts,
|
||||||
|
logApi?: { Logger: typeof Logger }
|
||||||
|
) {
|
||||||
|
const authLoader = new AuthLoader(logApi);
|
||||||
|
authLoader.addAccounts(authdata);
|
||||||
|
return !authLoader.validate();
|
||||||
|
}
|
|
@ -1,6 +1,4 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
import * as crypto from 'crypto';
|
||||||
|
|
||||||
const crypto = require('crypto');
|
|
||||||
|
|
||||||
/** hashSignature for v2 Auth
|
/** hashSignature for v2 Auth
|
||||||
* @param {string} stringToSign - built string to sign per AWS rules
|
* @param {string} stringToSign - built string to sign per AWS rules
|
||||||
|
@ -8,11 +6,19 @@ const crypto = require('crypto');
|
||||||
* @param {string} algorithm - either SHA256 or SHA1
|
* @param {string} algorithm - either SHA256 or SHA1
|
||||||
* @return {string} reconstructed signature
|
* @return {string} reconstructed signature
|
||||||
*/
|
*/
|
||||||
function hashSignature(stringToSign, secretKey, algorithm) {
|
export function hashSignature(
|
||||||
|
stringToSign: string,
|
||||||
|
secretKey: string,
|
||||||
|
algorithm: 'SHA256' | 'SHA1'
|
||||||
|
): string {
|
||||||
const hmacObject = crypto.createHmac(algorithm, secretKey);
|
const hmacObject = crypto.createHmac(algorithm, secretKey);
|
||||||
return hmacObject.update(stringToSign, 'binary').digest('base64');
|
return hmacObject.update(stringToSign, 'binary').digest('base64');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const sha256Digest = (key: string | Buffer, data: string) => {
|
||||||
|
return crypto.createHmac('sha256', key).update(data, 'binary').digest();
|
||||||
|
};
|
||||||
|
|
||||||
/** calculateSigningKey for v4 Auth
|
/** calculateSigningKey for v4 Auth
|
||||||
* @param {string} secretKey - requester's secretKey
|
* @param {string} secretKey - requester's secretKey
|
||||||
* @param {string} region - region included in request
|
* @param {string} region - region included in request
|
||||||
|
@ -20,16 +26,15 @@ function hashSignature(stringToSign, secretKey, algorithm) {
|
||||||
* @param {string} [service] - To specify another service than s3
|
* @param {string} [service] - To specify another service than s3
|
||||||
* @return {string} signingKey - signingKey to calculate signature
|
* @return {string} signingKey - signingKey to calculate signature
|
||||||
*/
|
*/
|
||||||
function calculateSigningKey(secretKey, region, scopeDate, service) {
|
export function calculateSigningKey(
|
||||||
const dateKey = crypto.createHmac('sha256', `AWS4${secretKey}`)
|
secretKey: string,
|
||||||
.update(scopeDate, 'binary').digest();
|
region: string,
|
||||||
const dateRegionKey = crypto.createHmac('sha256', dateKey)
|
scopeDate: string,
|
||||||
.update(region, 'binary').digest();
|
service?: string
|
||||||
const dateRegionServiceKey = crypto.createHmac('sha256', dateRegionKey)
|
): Buffer {
|
||||||
.update(service || 's3', 'binary').digest();
|
const dateKey = sha256Digest(`AWS4${secretKey}`, scopeDate);
|
||||||
const signingKey = crypto.createHmac('sha256', dateRegionServiceKey)
|
const dateRegionKey = sha256Digest(dateKey, region);
|
||||||
.update('aws4_request', 'binary').digest();
|
const dateRegionServiceKey = sha256Digest(dateRegionKey, service || 's3');
|
||||||
|
const signingKey = sha256Digest(dateRegionServiceKey, 'aws4_request');
|
||||||
return signingKey;
|
return signingKey;
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = { hashSignature, calculateSigningKey };
|
|
|
@ -1,223 +0,0 @@
|
||||||
const fs = require('fs');
|
|
||||||
const glob = require('simple-glob');
|
|
||||||
const joi = require('@hapi/joi');
|
|
||||||
const werelogs = require('werelogs');
|
|
||||||
|
|
||||||
const ARN = require('../../models/ARN');
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Load authentication information from files or pre-loaded account
|
|
||||||
* objects
|
|
||||||
*
|
|
||||||
* @class AuthLoader
|
|
||||||
*/
|
|
||||||
class AuthLoader {
|
|
||||||
constructor(logApi) {
|
|
||||||
this._log = new (logApi || werelogs).Logger('S3');
|
|
||||||
this._authData = { accounts: [] };
|
|
||||||
// null: unknown validity, true/false: valid or invalid
|
|
||||||
this._isValid = null;
|
|
||||||
|
|
||||||
this._joiKeysValidator = joi.array()
|
|
||||||
.items({
|
|
||||||
access: joi.string().required(),
|
|
||||||
secret: joi.string().required(),
|
|
||||||
})
|
|
||||||
.required();
|
|
||||||
|
|
||||||
const accountsJoi = joi.array()
|
|
||||||
.items({
|
|
||||||
name: joi.string().required(),
|
|
||||||
email: joi.string().email().required(),
|
|
||||||
arn: joi.string().required(),
|
|
||||||
canonicalID: joi.string().required(),
|
|
||||||
shortid: joi.string().regex(/^[0-9]{12}$/).required(),
|
|
||||||
keys: this._joiKeysValidator,
|
|
||||||
// backward-compat
|
|
||||||
users: joi.array(),
|
|
||||||
})
|
|
||||||
.required()
|
|
||||||
.unique('arn')
|
|
||||||
.unique('email')
|
|
||||||
.unique('canonicalID');
|
|
||||||
this._joiValidator = joi.object({ accounts: accountsJoi });
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* add one or more accounts to the authentication info
|
|
||||||
*
|
|
||||||
* @param {object} authData - authentication data
|
|
||||||
* @param {object[]} authData.accounts - array of account data
|
|
||||||
* @param {string} authData.accounts[].name - account name
|
|
||||||
* @param {string} authData.accounts[].email: email address
|
|
||||||
* @param {string} authData.accounts[].arn: account ARN,
|
|
||||||
* e.g. 'arn:aws:iam::123456789012:root'
|
|
||||||
* @param {string} authData.accounts[].canonicalID account
|
|
||||||
* canonical ID
|
|
||||||
* @param {string} authData.accounts[].shortid account ID number,
|
|
||||||
* e.g. '123456789012'
|
|
||||||
* @param {object[]} authData.accounts[].keys array of
|
|
||||||
* access/secret keys
|
|
||||||
* @param {object[]} authData.accounts[].keys[].access access key
|
|
||||||
* @param {object[]} authData.accounts[].keys[].secret secret key
|
|
||||||
* @param {string} [filePath] - optional file path info for
|
|
||||||
* logging purpose
|
|
||||||
* @return {undefined}
|
|
||||||
*/
|
|
||||||
addAccounts(authData, filePath) {
|
|
||||||
const isValid = this._validateData(authData, filePath);
|
|
||||||
if (isValid) {
|
|
||||||
this._authData.accounts =
|
|
||||||
this._authData.accounts.concat(authData.accounts);
|
|
||||||
// defer validity checking when getting data to avoid
|
|
||||||
// logging multiple times the errors (we need to validate
|
|
||||||
// all accounts at once to detect duplicate values)
|
|
||||||
if (this._isValid) {
|
|
||||||
this._isValid = null;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
this._isValid = false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* add account information from a file
|
|
||||||
*
|
|
||||||
* @param {string} filePath - file path containing JSON
|
|
||||||
* authentication info (see {@link addAccounts()} for format)
|
|
||||||
* @return {undefined}
|
|
||||||
*/
|
|
||||||
addFile(filePath) {
|
|
||||||
const authData = JSON.parse(fs.readFileSync(filePath));
|
|
||||||
this.addAccounts(authData, filePath);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* add account information from a filesystem path
|
|
||||||
*
|
|
||||||
* @param {string|string[]} globPattern - filesystem glob pattern,
|
|
||||||
* can be a single string or an array of glob patterns. Globs
|
|
||||||
* can be simple file paths or can contain glob matching
|
|
||||||
* characters, like '/a/b/*.json'. The matching files are
|
|
||||||
* individually loaded as JSON and accounts are added. See
|
|
||||||
* {@link addAccounts()} for JSON format.
|
|
||||||
* @return {undefined}
|
|
||||||
*/
|
|
||||||
addFilesByGlob(globPattern) {
|
|
||||||
const files = glob(globPattern);
|
|
||||||
files.forEach(filePath => this.addFile(filePath));
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* perform validation on authentication info previously
|
|
||||||
* loaded. Note that it has to be done on the entire set after an
|
|
||||||
* update to catch duplicate account IDs or access keys.
|
|
||||||
*
|
|
||||||
* @return {boolean} true if authentication info is valid
|
|
||||||
* false otherwise
|
|
||||||
*/
|
|
||||||
validate() {
|
|
||||||
if (this._isValid === null) {
|
|
||||||
this._isValid = this._validateData(this._authData);
|
|
||||||
}
|
|
||||||
return this._isValid;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* get authentication info as a plain JS object containing all accounts
|
|
||||||
* under the "accounts" attribute, with validation.
|
|
||||||
*
|
|
||||||
* @return {object|null} the validated authentication data
|
|
||||||
* null if invalid
|
|
||||||
*/
|
|
||||||
getData() {
|
|
||||||
return this.validate() ? this._authData : null;
|
|
||||||
}
|
|
||||||
|
|
||||||
_validateData(authData, filePath) {
|
|
||||||
const res = joi.validate(authData, this._joiValidator,
|
|
||||||
{ abortEarly: false });
|
|
||||||
if (res.error) {
|
|
||||||
this._dumpJoiErrors(res.error.details, filePath);
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
let allKeys = [];
|
|
||||||
let arnError = false;
|
|
||||||
const validatedAuth = res.value;
|
|
||||||
validatedAuth.accounts.forEach(account => {
|
|
||||||
// backward-compat: ignore arn if starts with 'aws:' and log a
|
|
||||||
// warning
|
|
||||||
if (account.arn.startsWith('aws:')) {
|
|
||||||
this._log.error(
|
|
||||||
'account must have a valid AWS ARN, legacy examples ' +
|
|
||||||
'starting with \'aws:\' are not supported anymore. ' +
|
|
||||||
'Please convert to a proper account entry (see ' +
|
|
||||||
'examples at https://github.com/scality/S3/blob/' +
|
|
||||||
'master/conf/authdata.json). Also note that support ' +
|
|
||||||
'for account users has been dropped.',
|
|
||||||
{ accountName: account.name, accountArn: account.arn,
|
|
||||||
filePath });
|
|
||||||
arnError = true;
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
if (account.users) {
|
|
||||||
this._log.error(
|
|
||||||
'support for account users has been dropped, consider ' +
|
|
||||||
'turning users into account entries (see examples at ' +
|
|
||||||
'https://github.com/scality/S3/blob/master/conf/' +
|
|
||||||
'authdata.json)',
|
|
||||||
{ accountName: account.name, accountArn: account.arn,
|
|
||||||
filePath });
|
|
||||||
arnError = true;
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
const arnObj = ARN.createFromString(account.arn);
|
|
||||||
if (arnObj.error) {
|
|
||||||
this._log.error(
|
|
||||||
'authentication config validation error',
|
|
||||||
{ reason: arnObj.error.description,
|
|
||||||
accountName: account.name, accountArn: account.arn,
|
|
||||||
filePath });
|
|
||||||
arnError = true;
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
if (!arnObj.isIAMAccount()) {
|
|
||||||
this._log.error(
|
|
||||||
'authentication config validation error',
|
|
||||||
{ reason: 'not an IAM account ARN',
|
|
||||||
accountName: account.name, accountArn: account.arn,
|
|
||||||
filePath });
|
|
||||||
arnError = true;
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
allKeys = allKeys.concat(account.keys);
|
|
||||||
});
|
|
||||||
if (arnError) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
const uniqueKeysRes = joi.validate(
|
|
||||||
allKeys, this._joiKeysValidator.unique('access'));
|
|
||||||
if (uniqueKeysRes.error) {
|
|
||||||
this._dumpJoiErrors(uniqueKeysRes.error.details, filePath);
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
_dumpJoiErrors(errors, filePath) {
|
|
||||||
errors.forEach(err => {
|
|
||||||
const logInfo = { item: err.path, filePath };
|
|
||||||
if (err.type === 'array.unique') {
|
|
||||||
logInfo.reason = `duplicate value '${err.context.path}'`;
|
|
||||||
logInfo.dupValue = err.context.value[err.context.path];
|
|
||||||
} else {
|
|
||||||
logInfo.reason = err.message;
|
|
||||||
logInfo.context = err.context;
|
|
||||||
}
|
|
||||||
this._log.error('authentication config validation error',
|
|
||||||
logInfo);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = AuthLoader;
|
|
|
@ -1,145 +0,0 @@
|
||||||
/**
|
|
||||||
* Class that provides an internal indexing over the simple data provided by
|
|
||||||
* the authentication configuration file for the memory backend. This allows
|
|
||||||
* accessing the different authentication entities through various types of
|
|
||||||
* keys.
|
|
||||||
*
|
|
||||||
* @class Indexer
|
|
||||||
*/
|
|
||||||
class Indexer {
|
|
||||||
/**
|
|
||||||
* @constructor
|
|
||||||
* @param {object} authdata - the authentication config file's data
|
|
||||||
* @param {object[]} authdata.accounts - array of account objects
|
|
||||||
* @param {string=} authdata.accounts[].name - account name
|
|
||||||
* @param {string} authdata.accounts[].email - account email
|
|
||||||
* @param {string} authdata.accounts[].arn - IAM resource name
|
|
||||||
* @param {string} authdata.accounts[].canonicalID - account canonical ID
|
|
||||||
* @param {string} authdata.accounts[].shortid - short account ID
|
|
||||||
* @param {object[]=} authdata.accounts[].keys - array of key objects
|
|
||||||
* @param {string} authdata.accounts[].keys[].access - access key
|
|
||||||
* @param {string} authdata.accounts[].keys[].secret - secret key
|
|
||||||
* @return {undefined}
|
|
||||||
*/
|
|
||||||
constructor(authdata) {
|
|
||||||
this.accountsBy = {
|
|
||||||
canId: {},
|
|
||||||
accessKey: {},
|
|
||||||
email: {},
|
|
||||||
};
|
|
||||||
|
|
||||||
/*
|
|
||||||
* This may happen if the application is configured to use another
|
|
||||||
* authentication backend than in-memory.
|
|
||||||
* As such, we're managing the error here to avoid screwing up there.
|
|
||||||
*/
|
|
||||||
if (!authdata) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
this._build(authdata);
|
|
||||||
}
|
|
||||||
|
|
||||||
_indexAccount(account) {
|
|
||||||
const accountData = {
|
|
||||||
arn: account.arn,
|
|
||||||
canonicalID: account.canonicalID,
|
|
||||||
shortid: account.shortid,
|
|
||||||
accountDisplayName: account.name,
|
|
||||||
email: account.email.toLowerCase(),
|
|
||||||
keys: [],
|
|
||||||
};
|
|
||||||
this.accountsBy.canId[accountData.canonicalID] = accountData;
|
|
||||||
this.accountsBy.email[accountData.email] = accountData;
|
|
||||||
if (account.keys !== undefined) {
|
|
||||||
account.keys.forEach(key => {
|
|
||||||
accountData.keys.push(key);
|
|
||||||
this.accountsBy.accessKey[key.access] = accountData;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
_build(authdata) {
|
|
||||||
authdata.accounts.forEach(account => {
|
|
||||||
this._indexAccount(account);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This method returns the account associated to a canonical ID.
|
|
||||||
*
|
|
||||||
* @param {string} canId - The canonicalId of the account
|
|
||||||
* @return {Object} account - The account object
|
|
||||||
* @return {Object} account.arn - The account's ARN
|
|
||||||
* @return {Object} account.canonicalID - The account's canonical ID
|
|
||||||
* @return {Object} account.shortid - The account's internal shortid
|
|
||||||
* @return {Object} account.accountDisplayName - The account's display name
|
|
||||||
* @return {Object} account.email - The account's lowercased email
|
|
||||||
*/
|
|
||||||
getEntityByCanId(canId) {
|
|
||||||
return this.accountsBy.canId[canId];
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This method returns the entity (either an account or a user) associated
|
|
||||||
* to a canonical ID.
|
|
||||||
*
|
|
||||||
* @param {string} key - The accessKey of the entity
|
|
||||||
* @return {Object} entity - The entity object
|
|
||||||
* @return {Object} entity.arn - The entity's ARN
|
|
||||||
* @return {Object} entity.canonicalID - The canonical ID for the entity's
|
|
||||||
* account
|
|
||||||
* @return {Object} entity.shortid - The entity's internal shortid
|
|
||||||
* @return {Object} entity.accountDisplayName - The entity's account
|
|
||||||
* display name
|
|
||||||
* @return {Object} entity.IAMDisplayName - The user's display name
|
|
||||||
* (if the entity is an user)
|
|
||||||
* @return {Object} entity.email - The entity's lowercased email
|
|
||||||
*/
|
|
||||||
getEntityByKey(key) {
|
|
||||||
return this.accountsBy.accessKey[key];
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This method returns the entity (either an account or a user) associated
|
|
||||||
* to an email address.
|
|
||||||
*
|
|
||||||
* @param {string} email - The email address
|
|
||||||
* @return {Object} entity - The entity object
|
|
||||||
* @return {Object} entity.arn - The entity's ARN
|
|
||||||
* @return {Object} entity.canonicalID - The canonical ID for the entity's
|
|
||||||
* account
|
|
||||||
* @return {Object} entity.shortid - The entity's internal shortid
|
|
||||||
* @return {Object} entity.accountDisplayName - The entity's account
|
|
||||||
* display name
|
|
||||||
* @return {Object} entity.IAMDisplayName - The user's display name
|
|
||||||
* (if the entity is an user)
|
|
||||||
* @return {Object} entity.email - The entity's lowercased email
|
|
||||||
*/
|
|
||||||
getEntityByEmail(email) {
|
|
||||||
const lowerCasedEmail = email.toLowerCase();
|
|
||||||
return this.accountsBy.email[lowerCasedEmail];
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This method returns the secret key associated with the entity.
|
|
||||||
* @param {Object} entity - the entity object
|
|
||||||
* @param {string} accessKey - access key
|
|
||||||
* @returns {string} secret key
|
|
||||||
*/
|
|
||||||
getSecretKey(entity, accessKey) {
|
|
||||||
return entity.keys
|
|
||||||
.filter(kv => kv.access === accessKey)[0].secret;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This method returns the account display name associated with the entity.
|
|
||||||
* @param {Object} entity - the entity object
|
|
||||||
* @returns {string} account display name
|
|
||||||
*/
|
|
||||||
getAcctDisplayName(entity) {
|
|
||||||
return entity.accountDisplayName;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = Indexer;
|
|
|
@ -1,18 +0,0 @@
|
||||||
const AuthLoader = require('./AuthLoader');
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @deprecated please use {@link AuthLoader} class instead
|
|
||||||
*
|
|
||||||
* @param {object} authdata - the authentication config file's data
|
|
||||||
* @param {werelogs.API} logApi - object providing a constructor function
|
|
||||||
* for the Logger object
|
|
||||||
* @return {boolean} true on erroneous data
|
|
||||||
* false on success
|
|
||||||
*/
|
|
||||||
function validateAuthConfig(authdata, logApi) {
|
|
||||||
const authLoader = new AuthLoader(logApi);
|
|
||||||
authLoader.addAccounts(authdata);
|
|
||||||
return !authLoader.validate();
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = validateAuthConfig;
|
|
|
@ -1,7 +1,5 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
export default function algoCheck(signatureLength: number) {
|
||||||
|
let algo: 'sha256' | 'sha1';
|
||||||
function algoCheck(signatureLength) {
|
|
||||||
let algo;
|
|
||||||
// If the signature sent is 44 characters,
|
// If the signature sent is 44 characters,
|
||||||
// this means that sha256 was used:
|
// this means that sha256 was used:
|
||||||
// 44 characters in base64
|
// 44 characters in base64
|
||||||
|
@ -13,7 +11,6 @@ function algoCheck(signatureLength) {
|
||||||
if (signatureLength === SHA1LEN) {
|
if (signatureLength === SHA1LEN) {
|
||||||
algo = 'sha1';
|
algo = 'sha1';
|
||||||
}
|
}
|
||||||
|
// @ts-ignore
|
||||||
return algo;
|
return algo;
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = algoCheck;
|
|
|
@ -1,11 +0,0 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
|
||||||
|
|
||||||
const headerAuthCheck = require('./headerAuthCheck');
|
|
||||||
const queryAuthCheck = require('./queryAuthCheck');
|
|
||||||
|
|
||||||
const authV2 = {
|
|
||||||
header: headerAuthCheck,
|
|
||||||
query: queryAuthCheck,
|
|
||||||
};
|
|
||||||
|
|
||||||
module.exports = authV2;
|
|
|
@ -0,0 +1,2 @@
|
||||||
|
export * as header from './headerAuthCheck';
|
||||||
|
export * as query from './queryAuthCheck';
|
|
@ -1,9 +1,9 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
import { Logger } from 'werelogs';
|
||||||
const errors = require('../../errors').default;
|
import errors from '../../errors';
|
||||||
|
|
||||||
const epochTime = new Date('1970-01-01').getTime();
|
const epochTime = new Date('1970-01-01').getTime();
|
||||||
|
|
||||||
function checkRequestExpiry(timestamp, log) {
|
export default function checkRequestExpiry(timestamp: number, log: Logger) {
|
||||||
// If timestamp is before epochTime, the request is invalid and return
|
// If timestamp is before epochTime, the request is invalid and return
|
||||||
// errors.AccessDenied
|
// errors.AccessDenied
|
||||||
if (timestamp < epochTime) {
|
if (timestamp < epochTime) {
|
||||||
|
@ -32,5 +32,3 @@ function checkRequestExpiry(timestamp, log) {
|
||||||
|
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = checkRequestExpiry;
|
|
|
@ -1,11 +1,14 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
import { Logger } from 'werelogs';
|
||||||
|
import utf8 from 'utf8';
|
||||||
|
import getCanonicalizedAmzHeaders from './getCanonicalizedAmzHeaders';
|
||||||
|
import getCanonicalizedResource from './getCanonicalizedResource';
|
||||||
|
|
||||||
const utf8 = require('utf8');
|
export default function constructStringToSign(
|
||||||
|
request: any,
|
||||||
const getCanonicalizedAmzHeaders = require('./getCanonicalizedAmzHeaders');
|
data: { [key: string]: string },
|
||||||
const getCanonicalizedResource = require('./getCanonicalizedResource');
|
log: Logger,
|
||||||
|
clientType?: any
|
||||||
function constructStringToSign(request, data, log, clientType) {
|
) {
|
||||||
/*
|
/*
|
||||||
Build signature per AWS requirements:
|
Build signature per AWS requirements:
|
||||||
StringToSign = HTTP-Verb + '\n' +
|
StringToSign = HTTP-Verb + '\n' +
|
||||||
|
@ -42,5 +45,3 @@ function constructStringToSign(request, data, log, clientType) {
|
||||||
+ getCanonicalizedResource(request, clientType);
|
+ getCanonicalizedResource(request, clientType);
|
||||||
return utf8.encode(stringToSign);
|
return utf8.encode(stringToSign);
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = constructStringToSign;
|
|
|
@ -1,14 +1,12 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
export default function getCanonicalizedAmzHeaders(headers: Headers, clientType: string) {
|
||||||
|
|
||||||
function getCanonicalizedAmzHeaders(headers, clientType) {
|
|
||||||
/*
|
/*
|
||||||
Iterate through headers and pull any headers that are x-amz headers.
|
Iterate through headers and pull any headers that are x-amz headers.
|
||||||
Need to include 'x-amz-date' here even though AWS docs
|
Need to include 'x-amz-date' here even though AWS docs
|
||||||
ambiguous on this.
|
ambiguous on this.
|
||||||
*/
|
*/
|
||||||
const filterFn = clientType === 'GCP' ?
|
const filterFn = clientType === 'GCP' ?
|
||||||
val => val.substr(0, 7) === 'x-goog-' :
|
(val: string) => val.substr(0, 7) === 'x-goog-' :
|
||||||
val => val.substr(0, 6) === 'x-amz-';
|
(val: string) => val.substr(0, 6) === 'x-amz-';
|
||||||
const amzHeaders = Object.keys(headers)
|
const amzHeaders = Object.keys(headers)
|
||||||
.filter(filterFn)
|
.filter(filterFn)
|
||||||
.map(val => [val.trim(), headers[val].trim()]);
|
.map(val => [val.trim(), headers[val].trim()]);
|
||||||
|
@ -43,5 +41,3 @@ function getCanonicalizedAmzHeaders(headers, clientType) {
|
||||||
`${headerStr}${current[0]}:${current[1]}\n`,
|
`${headerStr}${current[0]}:${current[1]}\n`,
|
||||||
'');
|
'');
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = getCanonicalizedAmzHeaders;
|
|
|
@ -1,6 +1,4 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
import * as url from 'url';
|
||||||
|
|
||||||
const url = require('url');
|
|
||||||
|
|
||||||
const gcpSubresources = [
|
const gcpSubresources = [
|
||||||
'acl',
|
'acl',
|
||||||
|
@ -41,7 +39,7 @@ const awsSubresources = [
|
||||||
'website',
|
'website',
|
||||||
];
|
];
|
||||||
|
|
||||||
function getCanonicalizedResource(request, clientType) {
|
export default function getCanonicalizedResource(request: any, clientType: string) {
|
||||||
/*
|
/*
|
||||||
This variable is used to determine whether to insert
|
This variable is used to determine whether to insert
|
||||||
a '?' or '&'. Once a query parameter is added to the resourceString,
|
a '?' or '&'. Once a query parameter is added to the resourceString,
|
||||||
|
@ -117,5 +115,3 @@ function getCanonicalizedResource(request, clientType) {
|
||||||
}
|
}
|
||||||
return resourceString;
|
return resourceString;
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = getCanonicalizedResource;
|
|
|
@ -1,12 +1,11 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
import { Logger } from 'werelogs';
|
||||||
|
import errors from '../../errors';
|
||||||
|
import * as constants from '../../constants';
|
||||||
|
import constructStringToSign from './constructStringToSign';
|
||||||
|
import checkRequestExpiry from './checkRequestExpiry';
|
||||||
|
import algoCheck from './algoCheck';
|
||||||
|
|
||||||
const errors = require('../../errors').default;
|
export function check(request: any, log: Logger, data: { [key: string]: string }) {
|
||||||
const constants = require('../../constants');
|
|
||||||
const constructStringToSign = require('./constructStringToSign');
|
|
||||||
const checkRequestExpiry = require('./checkRequestExpiry');
|
|
||||||
const algoCheck = require('./algoCheck');
|
|
||||||
|
|
||||||
function check(request, log, data) {
|
|
||||||
log.trace('running header auth check');
|
log.trace('running header auth check');
|
||||||
const headers = request.headers;
|
const headers = request.headers;
|
||||||
|
|
||||||
|
@ -52,6 +51,7 @@ function check(request, log, data) {
|
||||||
log.trace('invalid authorization header', { authInfo });
|
log.trace('invalid authorization header', { authInfo });
|
||||||
return { err: errors.MissingSecurityHeader };
|
return { err: errors.MissingSecurityHeader };
|
||||||
}
|
}
|
||||||
|
// @ts-ignore
|
||||||
log.addDefaultFields({ accessKey });
|
log.addDefaultFields({ accessKey });
|
||||||
|
|
||||||
const signatureFromRequest = authInfo.substring(semicolonIndex + 1).trim();
|
const signatureFromRequest = authInfo.substring(semicolonIndex + 1).trim();
|
||||||
|
@ -80,5 +80,3 @@ function check(request, log, data) {
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = { check };
|
|
|
@ -1,11 +1,10 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
import { Logger } from 'werelogs';
|
||||||
|
import errors from '../../errors';
|
||||||
|
import * as constants from '../../constants';
|
||||||
|
import algoCheck from './algoCheck';
|
||||||
|
import constructStringToSign from './constructStringToSign';
|
||||||
|
|
||||||
const errors = require('../../errors').default;
|
export function check(request: any, log: Logger, data: { [key: string]: string }) {
|
||||||
const constants = require('../../constants');
|
|
||||||
const algoCheck = require('./algoCheck');
|
|
||||||
const constructStringToSign = require('./constructStringToSign');
|
|
||||||
|
|
||||||
function check(request, log, data) {
|
|
||||||
log.trace('running query auth check');
|
log.trace('running query auth check');
|
||||||
if (request.method === 'POST') {
|
if (request.method === 'POST') {
|
||||||
log.debug('query string auth not supported for post requests');
|
log.debug('query string auth not supported for post requests');
|
||||||
|
@ -51,6 +50,7 @@ function check(request, log, data) {
|
||||||
return { err: errors.RequestTimeTooSkewed };
|
return { err: errors.RequestTimeTooSkewed };
|
||||||
}
|
}
|
||||||
const accessKey = data.AWSAccessKeyId;
|
const accessKey = data.AWSAccessKeyId;
|
||||||
|
// @ts-ignore
|
||||||
log.addDefaultFields({ accessKey });
|
log.addDefaultFields({ accessKey });
|
||||||
|
|
||||||
const signatureFromRequest = decodeURIComponent(data.Signature);
|
const signatureFromRequest = decodeURIComponent(data.Signature);
|
||||||
|
@ -82,5 +82,3 @@ function check(request, log, data) {
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = { check };
|
|
|
@ -1,11 +0,0 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
|
||||||
|
|
||||||
const headerAuthCheck = require('./headerAuthCheck');
|
|
||||||
const queryAuthCheck = require('./queryAuthCheck');
|
|
||||||
|
|
||||||
const authV4 = {
|
|
||||||
header: headerAuthCheck,
|
|
||||||
query: queryAuthCheck,
|
|
||||||
};
|
|
||||||
|
|
||||||
module.exports = authV4;
|
|
|
@ -0,0 +1,2 @@
|
||||||
|
export * as header from './headerAuthCheck';
|
||||||
|
export * as query from './queryAuthCheck';
|
|
@ -1,5 +1,3 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
AWS's URI encoding rules:
|
AWS's URI encoding rules:
|
||||||
URI encode every byte. Uri-Encode() must enforce the following rules:
|
URI encode every byte. Uri-Encode() must enforce the following rules:
|
||||||
|
@ -19,7 +17,7 @@ See http://docs.aws.amazon.com/AmazonS3/latest/API/sig-v4-header-based-auth.html
|
||||||
*/
|
*/
|
||||||
|
|
||||||
// converts utf8 character to hex and pads "%" before every two hex digits
|
// converts utf8 character to hex and pads "%" before every two hex digits
|
||||||
function _toHexUTF8(char) {
|
function _toHexUTF8(char: string) {
|
||||||
const hexRep = Buffer.from(char, 'utf8').toString('hex').toUpperCase();
|
const hexRep = Buffer.from(char, 'utf8').toString('hex').toUpperCase();
|
||||||
let res = '';
|
let res = '';
|
||||||
hexRep.split('').forEach((v, n) => {
|
hexRep.split('').forEach((v, n) => {
|
||||||
|
@ -32,18 +30,23 @@ function _toHexUTF8(char) {
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
function awsURIencode(input, encodeSlash, noEncodeStar) {
|
export default function awsURIencode(
|
||||||
const encSlash = encodeSlash === undefined ? true : encodeSlash;
|
input: string,
|
||||||
let encoded = '';
|
encodeSlash?: boolean,
|
||||||
|
noEncodeStar?: boolean
|
||||||
|
) {
|
||||||
/**
|
/**
|
||||||
* Duplicate query params are not suppported by AWS S3 APIs. These params
|
* Duplicate query params are not suppported by AWS S3 APIs. These params
|
||||||
* are parsed as Arrays by Node.js HTTP parser which breaks this method
|
* are parsed as Arrays by Node.js HTTP parser which breaks this method
|
||||||
*/
|
*/
|
||||||
if (typeof input !== 'string') {
|
if (typeof input !== 'string') {
|
||||||
return encoded;
|
return '';
|
||||||
}
|
}
|
||||||
|
let encoded = "";
|
||||||
|
const slash = encodeSlash === undefined || encodeSlash ? '%2F' : '/';
|
||||||
|
const star = noEncodeStar !== undefined && noEncodeStar ? '*' : '%2A';
|
||||||
for (let i = 0; i < input.length; i++) {
|
for (let i = 0; i < input.length; i++) {
|
||||||
const ch = input.charAt(i);
|
let ch = input.charAt(i);
|
||||||
if ((ch >= 'A' && ch <= 'Z') ||
|
if ((ch >= 'A' && ch <= 'Z') ||
|
||||||
(ch >= 'a' && ch <= 'z') ||
|
(ch >= 'a' && ch <= 'z') ||
|
||||||
(ch >= '0' && ch <= '9') ||
|
(ch >= '0' && ch <= '9') ||
|
||||||
|
@ -53,14 +56,26 @@ function awsURIencode(input, encodeSlash, noEncodeStar) {
|
||||||
} else if (ch === ' ') {
|
} else if (ch === ' ') {
|
||||||
encoded = encoded.concat('%20');
|
encoded = encoded.concat('%20');
|
||||||
} else if (ch === '/') {
|
} else if (ch === '/') {
|
||||||
encoded = encoded.concat(encSlash ? '%2F' : ch);
|
encoded = encoded.concat(slash);
|
||||||
} else if (ch === '*') {
|
} else if (ch === '*') {
|
||||||
encoded = encoded.concat(noEncodeStar ? '*' : '%2A');
|
encoded = encoded.concat(star);
|
||||||
} else {
|
} else {
|
||||||
|
if (ch >= '\uD800' && ch <= '\uDBFF') {
|
||||||
|
// If this character is a high surrogate peek the next character
|
||||||
|
// and join it with this one if the next character is a low
|
||||||
|
// surrogate.
|
||||||
|
// Otherwise the encoded URI will contain the two surrogates as
|
||||||
|
// two distinct UTF-8 sequences which is not valid UTF-8.
|
||||||
|
if (i + 1 < input.length) {
|
||||||
|
const ch2 = input.charAt(i + 1);
|
||||||
|
if (ch2 >= '\uDC00' && ch2 <= '\uDFFF') {
|
||||||
|
i++;
|
||||||
|
ch += ch2;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
encoded = encoded.concat(_toHexUTF8(ch));
|
encoded = encoded.concat(_toHexUTF8(ch));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return encoded;
|
return encoded;
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = awsURIencode;
|
|
|
@ -1,17 +1,33 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
import * as crypto from 'crypto';
|
||||||
|
import { Logger } from 'werelogs';
|
||||||
const crypto = require('crypto');
|
import createCanonicalRequest from './createCanonicalRequest';
|
||||||
|
|
||||||
const createCanonicalRequest = require('./createCanonicalRequest');
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* constructStringToSign - creates V4 stringToSign
|
* constructStringToSign - creates V4 stringToSign
|
||||||
* @param {object} params - params object
|
* @param {object} params - params object
|
||||||
* @returns {string} - stringToSign
|
* @returns {string} - stringToSign
|
||||||
*/
|
*/
|
||||||
function constructStringToSign(params) {
|
export default function constructStringToSign(params: {
|
||||||
const { request, signedHeaders, payloadChecksum, credentialScope, timestamp,
|
request: any;
|
||||||
query, log, proxyPath } = params;
|
signedHeaders: any;
|
||||||
|
payloadChecksum: any;
|
||||||
|
credentialScope: string;
|
||||||
|
timestamp: string;
|
||||||
|
query: { [key: string]: string };
|
||||||
|
log?: Logger;
|
||||||
|
proxyPath?: string;
|
||||||
|
awsService: string;
|
||||||
|
}): string | Error {
|
||||||
|
const {
|
||||||
|
request,
|
||||||
|
signedHeaders,
|
||||||
|
payloadChecksum,
|
||||||
|
credentialScope,
|
||||||
|
timestamp,
|
||||||
|
query,
|
||||||
|
log,
|
||||||
|
proxyPath,
|
||||||
|
} = params;
|
||||||
const path = proxyPath || request.path;
|
const path = proxyPath || request.path;
|
||||||
|
|
||||||
const canonicalReqResult = createCanonicalRequest({
|
const canonicalReqResult = createCanonicalRequest({
|
||||||
|
@ -24,6 +40,8 @@ function constructStringToSign(params) {
|
||||||
service: params.awsService,
|
service: params.awsService,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// TODO Why that line?
|
||||||
|
// @ts-ignore
|
||||||
if (canonicalReqResult instanceof Error) {
|
if (canonicalReqResult instanceof Error) {
|
||||||
if (log) {
|
if (log) {
|
||||||
log.error('error creating canonicalRequest');
|
log.error('error creating canonicalRequest');
|
||||||
|
@ -40,5 +58,3 @@ function constructStringToSign(params) {
|
||||||
`${credentialScope}\n${canonicalHex}`;
|
`${credentialScope}\n${canonicalHex}`;
|
||||||
return stringToSign;
|
return stringToSign;
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = constructStringToSign;
|
|
|
@ -1,27 +1,33 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
import * as crypto from 'crypto';
|
||||||
|
import * as queryString from 'querystring';
|
||||||
const awsURIencode = require('./awsURIencode');
|
import awsURIencode from './awsURIencode';
|
||||||
const crypto = require('crypto');
|
|
||||||
const queryString = require('querystring');
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* createCanonicalRequest - creates V4 canonical request
|
* createCanonicalRequest - creates V4 canonical request
|
||||||
* @param {object} params - contains pHttpVerb (request type),
|
* @param params - contains pHttpVerb (request type),
|
||||||
* pResource (parsed from URL), pQuery (request query),
|
* pResource (parsed from URL), pQuery (request query),
|
||||||
* pHeaders (request headers), pSignedHeaders (signed headers from request),
|
* pHeaders (request headers), pSignedHeaders (signed headers from request),
|
||||||
* payloadChecksum (from request)
|
* payloadChecksum (from request)
|
||||||
* @returns {string} - canonicalRequest
|
* @returns - canonicalRequest
|
||||||
*/
|
*/
|
||||||
function createCanonicalRequest(params) {
|
export default function createCanonicalRequest(
|
||||||
|
params: {
|
||||||
|
pHttpVerb: string;
|
||||||
|
pResource: string;
|
||||||
|
pQuery: { [key: string]: string };
|
||||||
|
pHeaders: any;
|
||||||
|
pSignedHeaders: any;
|
||||||
|
service: string;
|
||||||
|
payloadChecksum: string;
|
||||||
|
}
|
||||||
|
) {
|
||||||
const pHttpVerb = params.pHttpVerb;
|
const pHttpVerb = params.pHttpVerb;
|
||||||
const pResource = params.pResource;
|
const pResource = params.pResource;
|
||||||
const pQuery = params.pQuery;
|
const pQuery = params.pQuery;
|
||||||
const pHeaders = params.pHeaders;
|
const pHeaders = params.pHeaders;
|
||||||
const pSignedHeaders = params.pSignedHeaders;
|
const pSignedHeaders = params.pSignedHeaders;
|
||||||
const service = params.service;
|
const service = params.service;
|
||||||
|
|
||||||
let payloadChecksum = params.payloadChecksum;
|
let payloadChecksum = params.payloadChecksum;
|
||||||
|
|
||||||
if (!payloadChecksum) {
|
if (!payloadChecksum) {
|
||||||
if (pHttpVerb === 'GET') {
|
if (pHttpVerb === 'GET') {
|
||||||
payloadChecksum = 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b' +
|
payloadChecksum = 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b' +
|
||||||
|
@ -34,7 +40,7 @@ function createCanonicalRequest(params) {
|
||||||
if (/aws-sdk-java\/[0-9.]+/.test(pHeaders['user-agent'])) {
|
if (/aws-sdk-java\/[0-9.]+/.test(pHeaders['user-agent'])) {
|
||||||
notEncodeStar = true;
|
notEncodeStar = true;
|
||||||
}
|
}
|
||||||
let payload = queryString.stringify(pQuery, null, null, {
|
let payload = queryString.stringify(pQuery, undefined, undefined, {
|
||||||
encodeURIComponent: input => awsURIencode(input, true,
|
encodeURIComponent: input => awsURIencode(input, true,
|
||||||
notEncodeStar),
|
notEncodeStar),
|
||||||
});
|
});
|
||||||
|
@ -61,11 +67,11 @@ function createCanonicalRequest(params) {
|
||||||
|
|
||||||
// signed headers
|
// signed headers
|
||||||
const signedHeadersList = pSignedHeaders.split(';');
|
const signedHeadersList = pSignedHeaders.split(';');
|
||||||
signedHeadersList.sort((a, b) => a.localeCompare(b));
|
signedHeadersList.sort((a: any, b: any) => a.localeCompare(b));
|
||||||
const signedHeaders = signedHeadersList.join(';');
|
const signedHeaders = signedHeadersList.join(';');
|
||||||
|
|
||||||
// canonical headers
|
// canonical headers
|
||||||
const canonicalHeadersList = signedHeadersList.map(signedHeader => {
|
const canonicalHeadersList = signedHeadersList.map((signedHeader: any) => {
|
||||||
if (pHeaders[signedHeader] !== undefined) {
|
if (pHeaders[signedHeader] !== undefined) {
|
||||||
const trimmedHeader = pHeaders[signedHeader]
|
const trimmedHeader = pHeaders[signedHeader]
|
||||||
.trim().replace(/\s+/g, ' ');
|
.trim().replace(/\s+/g, ' ');
|
||||||
|
@ -87,5 +93,3 @@ function createCanonicalRequest(params) {
|
||||||
`${signedHeaders}\n${payloadChecksum}`;
|
`${signedHeaders}\n${payloadChecksum}`;
|
||||||
return canonicalRequest;
|
return canonicalRequest;
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = createCanonicalRequest;
|
|
|
@ -1,27 +1,32 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
import { Logger } from 'werelogs';
|
||||||
|
import errors from '../../../lib/errors';
|
||||||
const errors = require('../../../lib/errors').default;
|
import * as constants from '../../constants';
|
||||||
const constants = require('../../constants');
|
import constructStringToSign from './constructStringToSign';
|
||||||
|
import {
|
||||||
const constructStringToSign = require('./constructStringToSign');
|
checkTimeSkew,
|
||||||
const checkTimeSkew = require('./timeUtils').checkTimeSkew;
|
convertUTCtoISO8601,
|
||||||
const convertUTCtoISO8601 = require('./timeUtils').convertUTCtoISO8601;
|
convertAmzTimeToMs,
|
||||||
const convertAmzTimeToMs = require('./timeUtils').convertAmzTimeToMs;
|
} from './timeUtils';
|
||||||
const extractAuthItems = require('./validateInputs').extractAuthItems;
|
import {
|
||||||
const validateCredentials = require('./validateInputs').validateCredentials;
|
extractAuthItems,
|
||||||
const areSignedHeadersComplete =
|
validateCredentials,
|
||||||
require('./validateInputs').areSignedHeadersComplete;
|
areSignedHeadersComplete,
|
||||||
|
} from './validateInputs';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* V4 header auth check
|
* V4 header auth check
|
||||||
* @param {object} request - HTTP request object
|
* @param request - HTTP request object
|
||||||
* @param {object} log - logging object
|
* @param log - logging object
|
||||||
* @param {object} data - Parameters from queryString parsing or body of
|
* @param data - Parameters from queryString parsing or body of
|
||||||
* POST request
|
* POST request
|
||||||
* @param {string} awsService - Aws service ('iam' or 's3')
|
* @param awsService - Aws service ('iam' or 's3')
|
||||||
* @return {callback} calls callback
|
|
||||||
*/
|
*/
|
||||||
function check(request, log, data, awsService) {
|
export function check(
|
||||||
|
request: any,
|
||||||
|
log: Logger,
|
||||||
|
data: { [key: string]: string },
|
||||||
|
awsService: string
|
||||||
|
) {
|
||||||
log.trace('running header auth check');
|
log.trace('running header auth check');
|
||||||
|
|
||||||
const token = request.headers['x-amz-security-token'];
|
const token = request.headers['x-amz-security-token'];
|
||||||
|
@ -62,16 +67,16 @@ function check(request, log, data, awsService) {
|
||||||
|
|
||||||
log.trace('authorization header from request', { authHeader });
|
log.trace('authorization header from request', { authHeader });
|
||||||
|
|
||||||
const signatureFromRequest = authHeaderItems.signatureFromRequest;
|
const signatureFromRequest = authHeaderItems.signatureFromRequest!;
|
||||||
const credentialsArr = authHeaderItems.credentialsArr;
|
const credentialsArr = authHeaderItems.credentialsArr!;
|
||||||
const signedHeaders = authHeaderItems.signedHeaders;
|
const signedHeaders = authHeaderItems.signedHeaders!;
|
||||||
|
|
||||||
if (!areSignedHeadersComplete(signedHeaders, request.headers)) {
|
if (!areSignedHeadersComplete(signedHeaders, request.headers)) {
|
||||||
log.debug('signedHeaders are incomplete', { signedHeaders });
|
log.debug('signedHeaders are incomplete', { signedHeaders });
|
||||||
return { err: errors.AccessDenied };
|
return { err: errors.AccessDenied };
|
||||||
}
|
}
|
||||||
|
|
||||||
let timestamp;
|
let timestamp: string | undefined;
|
||||||
// check request timestamp
|
// check request timestamp
|
||||||
const xAmzDate = request.headers['x-amz-date'];
|
const xAmzDate = request.headers['x-amz-date'];
|
||||||
if (xAmzDate) {
|
if (xAmzDate) {
|
||||||
|
@ -127,6 +132,17 @@ function check(request, log, data, awsService) {
|
||||||
return { err: errors.RequestTimeTooSkewed };
|
return { err: errors.RequestTimeTooSkewed };
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let proxyPath: string | undefined;
|
||||||
|
if (request.headers.proxy_path) {
|
||||||
|
try {
|
||||||
|
proxyPath = decodeURIComponent(request.headers.proxy_path);
|
||||||
|
} catch (err) {
|
||||||
|
log.debug('invalid proxy_path header', { proxyPath, err });
|
||||||
|
return { err: errors.InvalidArgument.customizeDescription(
|
||||||
|
'invalid proxy_path header') };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const stringToSign = constructStringToSign({
|
const stringToSign = constructStringToSign({
|
||||||
log,
|
log,
|
||||||
request,
|
request,
|
||||||
|
@ -136,6 +152,7 @@ function check(request, log, data, awsService) {
|
||||||
timestamp,
|
timestamp,
|
||||||
payloadChecksum,
|
payloadChecksum,
|
||||||
awsService: service,
|
awsService: service,
|
||||||
|
proxyPath,
|
||||||
});
|
});
|
||||||
log.trace('constructed stringToSign', { stringToSign });
|
log.trace('constructed stringToSign', { stringToSign });
|
||||||
if (stringToSign instanceof Error) {
|
if (stringToSign instanceof Error) {
|
||||||
|
@ -166,5 +183,3 @@ function check(request, log, data, awsService) {
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = { check };
|
|
|
@ -1,24 +1,18 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
import { Logger } from 'werelogs';
|
||||||
|
import * as constants from '../../constants';
|
||||||
const constants = require('../../constants');
|
import errors from '../../errors';
|
||||||
const errors = require('../../errors').default;
|
import constructStringToSign from './constructStringToSign';
|
||||||
|
import { checkTimeSkew, convertAmzTimeToMs } from './timeUtils';
|
||||||
const constructStringToSign = require('./constructStringToSign');
|
import { validateCredentials, extractQueryParams } from './validateInputs';
|
||||||
const checkTimeSkew = require('./timeUtils').checkTimeSkew;
|
import { areSignedHeadersComplete } from './validateInputs';
|
||||||
const convertAmzTimeToMs = require('./timeUtils').convertAmzTimeToMs;
|
|
||||||
const validateCredentials = require('./validateInputs').validateCredentials;
|
|
||||||
const extractQueryParams = require('./validateInputs').extractQueryParams;
|
|
||||||
const areSignedHeadersComplete =
|
|
||||||
require('./validateInputs').areSignedHeadersComplete;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* V4 query auth check
|
* V4 query auth check
|
||||||
* @param {object} request - HTTP request object
|
* @param request - HTTP request object
|
||||||
* @param {object} log - logging object
|
* @param log - logging object
|
||||||
* @param {object} data - Contain authentification params (GET or POST data)
|
* @param data - Contain authentification params (GET or POST data)
|
||||||
* @return {callback} calls callback
|
|
||||||
*/
|
*/
|
||||||
function check(request, log, data) {
|
export function check(request: any, log: Logger, data: { [key: string]: string }) {
|
||||||
const authParams = extractQueryParams(data, log);
|
const authParams = extractQueryParams(data, log);
|
||||||
|
|
||||||
if (Object.keys(authParams).length !== 5) {
|
if (Object.keys(authParams).length !== 5) {
|
||||||
|
@ -33,11 +27,11 @@ function check(request, log, data) {
|
||||||
return { err: errors.InvalidToken };
|
return { err: errors.InvalidToken };
|
||||||
}
|
}
|
||||||
|
|
||||||
const signedHeaders = authParams.signedHeaders;
|
const signedHeaders = authParams.signedHeaders!;
|
||||||
const signatureFromRequest = authParams.signatureFromRequest;
|
const signatureFromRequest = authParams.signatureFromRequest!;
|
||||||
const timestamp = authParams.timestamp;
|
const timestamp = authParams.timestamp!;
|
||||||
const expiry = authParams.expiry;
|
const expiry = authParams.expiry!;
|
||||||
const credential = authParams.credential;
|
const credential = authParams.credential!;
|
||||||
|
|
||||||
if (!areSignedHeadersComplete(signedHeaders, request.headers)) {
|
if (!areSignedHeadersComplete(signedHeaders, request.headers)) {
|
||||||
log.debug('signedHeaders are incomplete', { signedHeaders });
|
log.debug('signedHeaders are incomplete', { signedHeaders });
|
||||||
|
@ -62,6 +56,17 @@ function check(request, log, data) {
|
||||||
return { err: errors.RequestTimeTooSkewed };
|
return { err: errors.RequestTimeTooSkewed };
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let proxyPath: string | undefined;
|
||||||
|
if (request.headers.proxy_path) {
|
||||||
|
try {
|
||||||
|
proxyPath = decodeURIComponent(request.headers.proxy_path);
|
||||||
|
} catch (err) {
|
||||||
|
log.debug('invalid proxy_path header', { proxyPath });
|
||||||
|
return { err: errors.InvalidArgument.customizeDescription(
|
||||||
|
'invalid proxy_path header') };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// In query v4 auth, the canonical request needs
|
// In query v4 auth, the canonical request needs
|
||||||
// to include the query params OTHER THAN
|
// to include the query params OTHER THAN
|
||||||
// the signature so create a
|
// the signature so create a
|
||||||
|
@ -87,6 +92,7 @@ function check(request, log, data) {
|
||||||
credentialScope:
|
credentialScope:
|
||||||
`${scopeDate}/${region}/${service}/${requestType}`,
|
`${scopeDate}/${region}/${service}/${requestType}`,
|
||||||
awsService: service,
|
awsService: service,
|
||||||
|
proxyPath,
|
||||||
});
|
});
|
||||||
if (stringToSign instanceof Error) {
|
if (stringToSign instanceof Error) {
|
||||||
return { err: stringToSign };
|
return { err: stringToSign };
|
||||||
|
@ -110,5 +116,3 @@ function check(request, log, data) {
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = { check };
|
|
|
@ -1,33 +1,67 @@
|
||||||
const { Transform } = require('stream');
|
import { Transform } from 'stream';
|
||||||
|
import async from 'async';
|
||||||
|
import errors from '../../../errors';
|
||||||
|
import { Logger } from 'werelogs';
|
||||||
|
import Vault, { AuthV4RequestParams } from '../../Vault';
|
||||||
|
import { Callback } from '../../backends/in_memory/types';
|
||||||
|
|
||||||
const async = require('async');
|
import constructChunkStringToSign from './constructChunkStringToSign';
|
||||||
const errors = require('../../../errors').default;
|
|
||||||
|
|
||||||
const constructChunkStringToSign = require('./constructChunkStringToSign');
|
export type TransformParams = {
|
||||||
|
accessKey: string;
|
||||||
|
signatureFromRequest: string;
|
||||||
|
region: string;
|
||||||
|
scopeDate: string;
|
||||||
|
timestamp: string;
|
||||||
|
credentialScope: string;
|
||||||
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This class is designed to handle the chunks sent in a streaming
|
* This class is designed to handle the chunks sent in a streaming
|
||||||
* v4 Auth request
|
* v4 Auth request
|
||||||
*/
|
*/
|
||||||
class V4Transform extends Transform {
|
export default class V4Transform extends Transform {
|
||||||
|
log: Logger;
|
||||||
|
cb: Callback;
|
||||||
|
accessKey: string;
|
||||||
|
region: string;
|
||||||
|
scopeDate: string;
|
||||||
|
timestamp: string;
|
||||||
|
credentialScope: string;
|
||||||
|
lastSignature: string;
|
||||||
|
currentSignature?: string;
|
||||||
|
haveMetadata: boolean;
|
||||||
|
seekingDataSize: number;
|
||||||
|
currentData?: any;
|
||||||
|
dataCursor: number;
|
||||||
|
currentMetadata: any[];
|
||||||
|
lastPieceDone: boolean;
|
||||||
|
lastChunk: boolean;
|
||||||
|
vault: Vault;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @constructor
|
* @constructor
|
||||||
* @param {object} streamingV4Params - info for chunk authentication
|
* @param streamingV4Params - info for chunk authentication
|
||||||
* @param {string} streamingV4Params.accessKey - requester's accessKey
|
* @param streamingV4Params.accessKey - requester's accessKey
|
||||||
* @param {string} streamingV4Params.signatureFromRequest - signature
|
* @param streamingV4Params.signatureFromRequest - signature
|
||||||
* sent with headers
|
* sent with headers
|
||||||
* @param {string} streamingV4Params.region - region sent with auth header
|
* @param streamingV4Params.region - region sent with auth header
|
||||||
* @param {string} streamingV4Params.scopeDate - date sent with auth header
|
* @param streamingV4Params.scopeDate - date sent with auth header
|
||||||
* @param {string} streamingV4Params.timestamp - date parsed from headers
|
* @param streamingV4Params.timestamp - date parsed from headers
|
||||||
* in ISO 8601 format: YYYYMMDDTHHMMSSZ
|
* in ISO 8601 format: YYYYMMDDTHHMMSSZ
|
||||||
* @param {string} streamingV4Params.credentialScope - items from auth
|
* @param streamingV4Params.credentialScope - items from auth
|
||||||
* header plus the string 'aws4_request' joined with '/':
|
* header plus the string 'aws4_request' joined with '/':
|
||||||
* timestamp/region/aws-service/aws4_request
|
* timestamp/region/aws-service/aws4_request
|
||||||
* @param {object} vault - Vault instance passed from CloudServer
|
* @param vault - Vault instance passed from CloudServer
|
||||||
* @param {object} log - logger object
|
* @param log - logger object
|
||||||
* @param {function} cb - callback to api
|
* @param cb - callback to api
|
||||||
*/
|
*/
|
||||||
constructor(streamingV4Params, vault, log, cb) {
|
constructor(
|
||||||
|
streamingV4Params: TransformParams,
|
||||||
|
vault: Vault,
|
||||||
|
log: Logger,
|
||||||
|
cb: Callback,
|
||||||
|
) {
|
||||||
const { accessKey, signatureFromRequest, region, scopeDate, timestamp,
|
const { accessKey, signatureFromRequest, region, scopeDate, timestamp,
|
||||||
credentialScope } = streamingV4Params;
|
credentialScope } = streamingV4Params;
|
||||||
super({});
|
super({});
|
||||||
|
@ -55,8 +89,8 @@ class V4Transform extends Transform {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This function will parse the metadata portion of the chunk
|
* This function will parse the metadata portion of the chunk
|
||||||
* @param {Buffer} remainingChunk - chunk sent from _transform
|
* @param remainingChunk - chunk sent from _transform
|
||||||
* @return {object} response - if error, will return 'err' key with
|
* @return response - if error, will return 'err' key with
|
||||||
* arsenal error value.
|
* arsenal error value.
|
||||||
* if incomplete metadata, will return 'completeMetadata' key with
|
* if incomplete metadata, will return 'completeMetadata' key with
|
||||||
* value false
|
* value false
|
||||||
|
@ -64,7 +98,7 @@ class V4Transform extends Transform {
|
||||||
* value true and the key 'unparsedChunk' with the remaining chunk without
|
* value true and the key 'unparsedChunk' with the remaining chunk without
|
||||||
* the parsed metadata piece
|
* the parsed metadata piece
|
||||||
*/
|
*/
|
||||||
_parseMetadata(remainingChunk) {
|
_parseMetadata(remainingChunk: Buffer) {
|
||||||
let remainingPlusStoredMetadata = remainingChunk;
|
let remainingPlusStoredMetadata = remainingChunk;
|
||||||
// have metadata pieces so need to add to the front of
|
// have metadata pieces so need to add to the front of
|
||||||
// remainingChunk
|
// remainingChunk
|
||||||
|
@ -103,9 +137,8 @@ class V4Transform extends Transform {
|
||||||
'metadata format');
|
'metadata format');
|
||||||
return { err: errors.InvalidArgument };
|
return { err: errors.InvalidArgument };
|
||||||
}
|
}
|
||||||
let dataSize = splitMeta[0];
|
|
||||||
// chunk-size is sent in hex
|
// chunk-size is sent in hex
|
||||||
dataSize = Number.parseInt(dataSize, 16);
|
const dataSize = Number.parseInt(splitMeta[0], 16);
|
||||||
if (Number.isNaN(dataSize)) {
|
if (Number.isNaN(dataSize)) {
|
||||||
this.log.trace('chunk body did not contain valid size');
|
this.log.trace('chunk body did not contain valid size');
|
||||||
return { err: errors.InvalidArgument };
|
return { err: errors.InvalidArgument };
|
||||||
|
@ -139,28 +172,30 @@ class V4Transform extends Transform {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Build the stringToSign and authenticate the chunk
|
* Build the stringToSign and authenticate the chunk
|
||||||
* @param {Buffer} dataToSend - chunk sent from _transform or null
|
* @param dataToSend - chunk sent from _transform or null
|
||||||
* if last chunk without data
|
* if last chunk without data
|
||||||
* @param {function} done - callback to _transform
|
* @param done - callback to _transform
|
||||||
* @return {function} executes callback with err if applicable
|
* @return executes callback with err if applicable
|
||||||
*/
|
*/
|
||||||
_authenticate(dataToSend, done) {
|
_authenticate(dataToSend: Buffer | null, done: Callback) {
|
||||||
// use prior sig to construct new string to sign
|
// use prior sig to construct new string to sign
|
||||||
const stringToSign = constructChunkStringToSign(this.timestamp,
|
const stringToSign = constructChunkStringToSign(this.timestamp,
|
||||||
this.credentialScope, this.lastSignature, dataToSend);
|
this.credentialScope, this.lastSignature, dataToSend ?? undefined);
|
||||||
this.log.trace('constructed chunk string to sign',
|
this.log.trace('constructed chunk string to sign',
|
||||||
{ stringToSign });
|
{ stringToSign });
|
||||||
// once used prior sig to construct string to sign, reassign
|
// once used prior sig to construct string to sign, reassign
|
||||||
// lastSignature to current signature
|
// lastSignature to current signature
|
||||||
this.lastSignature = this.currentSignature;
|
this.lastSignature = this.currentSignature!;
|
||||||
const vaultParams = {
|
const vaultParams: AuthV4RequestParams = {
|
||||||
log: this.log,
|
log: this.log,
|
||||||
data: {
|
data: {
|
||||||
accessKey: this.accessKey,
|
accessKey: this.accessKey,
|
||||||
signatureFromRequest: this.currentSignature,
|
signatureFromRequest: this.currentSignature!,
|
||||||
region: this.region,
|
region: this.region,
|
||||||
scopeDate: this.scopeDate,
|
scopeDate: this.scopeDate,
|
||||||
stringToSign,
|
stringToSign,
|
||||||
|
// TODO FIXME This can not work
|
||||||
|
// @ts-expect-errors
|
||||||
timestamp: this.timestamp,
|
timestamp: this.timestamp,
|
||||||
credentialScope: this.credentialScope,
|
credentialScope: this.credentialScope,
|
||||||
},
|
},
|
||||||
|
@ -181,12 +216,12 @@ class V4Transform extends Transform {
|
||||||
* use the metadata to authenticate with vault and send the
|
* use the metadata to authenticate with vault and send the
|
||||||
* data on to be stored if authentication passes
|
* data on to be stored if authentication passes
|
||||||
*
|
*
|
||||||
* @param {Buffer} chunk - chunk from request body
|
* @param chunk - chunk from request body
|
||||||
* @param {string} encoding - Data encoding
|
* @param _encoding - Data encoding unused
|
||||||
* @param {function} callback - Callback(err, justDataChunk, encoding)
|
* @param callback - Callback(err, justDataChunk, encoding)
|
||||||
* @return {function }executes callback with err if applicable
|
* @return executes callback with err if applicable
|
||||||
*/
|
*/
|
||||||
_transform(chunk, encoding, callback) {
|
_transform(chunk: Buffer, _encoding: string, callback: Callback) {
|
||||||
// 'chunk' here is the node streaming chunk
|
// 'chunk' here is the node streaming chunk
|
||||||
// transfer-encoding chunks should be of the format:
|
// transfer-encoding chunks should be of the format:
|
||||||
// string(IntHexBase(chunk-size)) + ";chunk-signature=" +
|
// string(IntHexBase(chunk-size)) + ";chunk-signature=" +
|
||||||
|
@ -223,6 +258,8 @@ class V4Transform extends Transform {
|
||||||
}
|
}
|
||||||
// have metadata so reset unparsedChunk to remaining
|
// have metadata so reset unparsedChunk to remaining
|
||||||
// without metadata piece
|
// without metadata piece
|
||||||
|
// TODO Is that okay?
|
||||||
|
// @ts-expect-errors
|
||||||
unparsedChunk = parsedMetadataResults.unparsedChunk;
|
unparsedChunk = parsedMetadataResults.unparsedChunk;
|
||||||
}
|
}
|
||||||
if (this.lastChunk) {
|
if (this.lastChunk) {
|
||||||
|
@ -269,7 +306,7 @@ class V4Transform extends Transform {
|
||||||
// final callback
|
// final callback
|
||||||
err => {
|
err => {
|
||||||
if (err) {
|
if (err) {
|
||||||
return this.cb(err);
|
return this.cb(err as any);
|
||||||
}
|
}
|
||||||
// get next chunk
|
// get next chunk
|
||||||
return callback();
|
return callback();
|
||||||
|
@ -277,5 +314,3 @@ class V4Transform extends Transform {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = V4Transform;
|
|
|
@ -1,32 +0,0 @@
|
||||||
const crypto = require('crypto');
|
|
||||||
|
|
||||||
const constants = require('../../../constants');
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Constructs stringToSign for chunk
|
|
||||||
* @param {string} timestamp - date parsed from headers
|
|
||||||
* in ISO 8601 format: YYYYMMDDTHHMMSSZ
|
|
||||||
* @param {string} credentialScope - items from auth
|
|
||||||
* header plus the string 'aws4_request' joined with '/':
|
|
||||||
* timestamp/region/aws-service/aws4_request
|
|
||||||
* @param {string} lastSignature - signature from headers or prior chunk
|
|
||||||
* @param {string} justDataChunk - data portion of chunk
|
|
||||||
* @returns {string} stringToSign
|
|
||||||
*/
|
|
||||||
function constructChunkStringToSign(timestamp,
|
|
||||||
credentialScope, lastSignature, justDataChunk) {
|
|
||||||
let currentChunkHash;
|
|
||||||
// for last chunk, there will be no data, so use emptyStringHash
|
|
||||||
if (!justDataChunk) {
|
|
||||||
currentChunkHash = constants.emptyStringHash;
|
|
||||||
} else {
|
|
||||||
currentChunkHash = crypto.createHash('sha256');
|
|
||||||
currentChunkHash = currentChunkHash
|
|
||||||
.update(justDataChunk, 'binary').digest('hex');
|
|
||||||
}
|
|
||||||
return `AWS4-HMAC-SHA256-PAYLOAD\n${timestamp}\n` +
|
|
||||||
`${credentialScope}\n${lastSignature}\n` +
|
|
||||||
`${constants.emptyStringHash}\n${currentChunkHash}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = constructChunkStringToSign;
|
|
|
@ -0,0 +1,35 @@
|
||||||
|
import * as crypto from 'crypto';
|
||||||
|
import * as constants from '../../../constants';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Constructs stringToSign for chunk
|
||||||
|
* @param timestamp - date parsed from headers
|
||||||
|
* in ISO 8601 format: YYYYMMDDTHHMMSSZ
|
||||||
|
* @param credentialScope - items from auth
|
||||||
|
* header plus the string 'aws4_request' joined with '/':
|
||||||
|
* timestamp/region/aws-service/aws4_request
|
||||||
|
* @param lastSignature - signature from headers or prior chunk
|
||||||
|
* @param justDataChunk - data portion of chunk
|
||||||
|
* @returns stringToSign
|
||||||
|
*/
|
||||||
|
export default function constructChunkStringToSign(
|
||||||
|
timestamp: string,
|
||||||
|
credentialScope: string,
|
||||||
|
lastSignature: string,
|
||||||
|
justDataChunk?: Buffer | string,
|
||||||
|
) {
|
||||||
|
let currentChunkHash: string;
|
||||||
|
// for last chunk, there will be no data, so use emptyStringHash
|
||||||
|
if (!justDataChunk) {
|
||||||
|
currentChunkHash = constants.emptyStringHash;
|
||||||
|
} else {
|
||||||
|
const hash = crypto.createHash('sha256');
|
||||||
|
const temp = justDataChunk instanceof Buffer
|
||||||
|
? hash.update(justDataChunk)
|
||||||
|
: hash.update(justDataChunk, 'binary');
|
||||||
|
currentChunkHash = temp.digest('hex');
|
||||||
|
}
|
||||||
|
return `AWS4-HMAC-SHA256-PAYLOAD\n${timestamp}\n` +
|
||||||
|
`${credentialScope}\n${lastSignature}\n` +
|
||||||
|
`${constants.emptyStringHash}\n${currentChunkHash}`;
|
||||||
|
}
|
|
@ -1,12 +1,11 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
import { Logger } from 'werelogs';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Convert timestamp to milliseconds since Unix Epoch
|
* Convert timestamp to milliseconds since Unix Epoch
|
||||||
* @param {string} timestamp of ISO8601Timestamp format without
|
* @param timestamp of ISO8601Timestamp format without
|
||||||
* dashes or colons, e.g. 20160202T220410Z
|
* dashes or colons, e.g. 20160202T220410Z
|
||||||
* @return {number} number of milliseconds since Unix Epoch
|
|
||||||
*/
|
*/
|
||||||
function convertAmzTimeToMs(timestamp) {
|
export function convertAmzTimeToMs(timestamp: string) {
|
||||||
const arr = timestamp.split('');
|
const arr = timestamp.split('');
|
||||||
// Convert to YYYY-MM-DDTHH:mm:ss.sssZ
|
// Convert to YYYY-MM-DDTHH:mm:ss.sssZ
|
||||||
const ISO8601time = `${arr.slice(0, 4).join('')}-${arr[4]}${arr[5]}` +
|
const ISO8601time = `${arr.slice(0, 4).join('')}-${arr[4]}${arr[5]}` +
|
||||||
|
@ -15,13 +14,12 @@ function convertAmzTimeToMs(timestamp) {
|
||||||
return Date.parse(ISO8601time);
|
return Date.parse(ISO8601time);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Convert UTC timestamp to ISO 8601 timestamp
|
* Convert UTC timestamp to ISO 8601 timestamp
|
||||||
* @param {string} timestamp of UTC form: Fri, 10 Feb 2012 21:34:55 GMT
|
* @param timestamp of UTC form: Fri, 10 Feb 2012 21:34:55 GMT
|
||||||
* @return {string} ISO8601 timestamp of form: YYYYMMDDTHHMMSSZ
|
* @return ISO8601 timestamp of form: YYYYMMDDTHHMMSSZ
|
||||||
*/
|
*/
|
||||||
function convertUTCtoISO8601(timestamp) {
|
export function convertUTCtoISO8601(timestamp: string | number) {
|
||||||
// convert to ISO string: YYYY-MM-DDTHH:mm:ss.sssZ.
|
// convert to ISO string: YYYY-MM-DDTHH:mm:ss.sssZ.
|
||||||
const converted = new Date(timestamp).toISOString();
|
const converted = new Date(timestamp).toISOString();
|
||||||
// Remove "-"s and "."s and milliseconds
|
// Remove "-"s and "."s and milliseconds
|
||||||
|
@ -30,13 +28,13 @@ function convertUTCtoISO8601(timestamp) {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Check whether timestamp predates request or is too old
|
* Check whether timestamp predates request or is too old
|
||||||
* @param {string} timestamp of ISO8601Timestamp format without
|
* @param timestamp of ISO8601Timestamp format without
|
||||||
* dashes or colons, e.g. 20160202T220410Z
|
* dashes or colons, e.g. 20160202T220410Z
|
||||||
* @param {number} expiry - number of seconds signature should be valid
|
* @param expiry - number of seconds signature should be valid
|
||||||
* @param {object} log - log for request
|
* @param log - log for request
|
||||||
* @return {boolean} true if there is a time problem
|
* @return true if there is a time problem
|
||||||
*/
|
*/
|
||||||
function checkTimeSkew(timestamp, expiry, log) {
|
export function checkTimeSkew(timestamp: string, expiry: number, log: Logger) {
|
||||||
const currentTime = Date.now();
|
const currentTime = Date.now();
|
||||||
const fifteenMinutes = (15 * 60 * 1000);
|
const fifteenMinutes = (15 * 60 * 1000);
|
||||||
const parsedTimestamp = convertAmzTimeToMs(timestamp);
|
const parsedTimestamp = convertAmzTimeToMs(timestamp);
|
||||||
|
@ -56,5 +54,3 @@ function checkTimeSkew(timestamp, expiry, log) {
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = { convertAmzTimeToMs, convertUTCtoISO8601, checkTimeSkew };
|
|
|
@ -1,17 +1,19 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
import { Logger } from 'werelogs';
|
||||||
|
import errors from '../../../lib/errors';
|
||||||
const errors = require('../../../lib/errors').default;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Validate Credentials
|
* Validate Credentials
|
||||||
* @param {array} credentials - contains accessKey, scopeDate,
|
* @param credentials - contains accessKey, scopeDate,
|
||||||
* region, service, requestType
|
* region, service, requestType
|
||||||
* @param {string} timestamp - timestamp from request in
|
* @param timestamp - timestamp from request in
|
||||||
* the format of ISO 8601: YYYYMMDDTHHMMSSZ
|
* the format of ISO 8601: YYYYMMDDTHHMMSSZ
|
||||||
* @param {object} log - logging object
|
* @param log - logging object
|
||||||
* @return {boolean} true if credentials are correct format, false if not
|
|
||||||
*/
|
*/
|
||||||
function validateCredentials(credentials, timestamp, log) {
|
export function validateCredentials(
|
||||||
|
credentials: [string, string, string, string, string],
|
||||||
|
timestamp: string,
|
||||||
|
log: Logger
|
||||||
|
): Error | {} {
|
||||||
if (!Array.isArray(credentials) || credentials.length !== 5) {
|
if (!Array.isArray(credentials) || credentials.length !== 5) {
|
||||||
log.warn('credentials in improper format', { credentials });
|
log.warn('credentials in improper format', { credentials });
|
||||||
return errors.InvalidArgument;
|
return errors.InvalidArgument;
|
||||||
|
@ -58,12 +60,21 @@ function validateCredentials(credentials, timestamp, log) {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Extract and validate components from query object
|
* Extract and validate components from query object
|
||||||
* @param {object} queryObj - query object from request
|
* @param queryObj - query object from request
|
||||||
* @param {object} log - logging object
|
* @param log - logging object
|
||||||
* @return {object} object containing extracted query params for authV4
|
* @return object containing extracted query params for authV4
|
||||||
*/
|
*/
|
||||||
function extractQueryParams(queryObj, log) {
|
export function extractQueryParams(
|
||||||
const authParams = {};
|
queryObj: { [key: string]: string | undefined },
|
||||||
|
log: Logger
|
||||||
|
) {
|
||||||
|
const authParams: {
|
||||||
|
signedHeaders?: string;
|
||||||
|
signatureFromRequest?: string;
|
||||||
|
timestamp?: string;
|
||||||
|
expiry?: number;
|
||||||
|
credential?: [string, string, string, string, string];
|
||||||
|
} = {};
|
||||||
|
|
||||||
// Do not need the algorithm sent back
|
// Do not need the algorithm sent back
|
||||||
if (queryObj['X-Amz-Algorithm'] !== 'AWS4-HMAC-SHA256') {
|
if (queryObj['X-Amz-Algorithm'] !== 'AWS4-HMAC-SHA256') {
|
||||||
|
@ -99,7 +110,7 @@ function extractQueryParams(queryObj, log) {
|
||||||
return authParams;
|
return authParams;
|
||||||
}
|
}
|
||||||
|
|
||||||
const expiry = Number.parseInt(queryObj['X-Amz-Expires'], 10);
|
const expiry = Number.parseInt(queryObj['X-Amz-Expires'] ?? 'nope', 10);
|
||||||
const sevenDays = 604800;
|
const sevenDays = 604800;
|
||||||
if (expiry && (expiry > 0 && expiry <= sevenDays)) {
|
if (expiry && (expiry > 0 && expiry <= sevenDays)) {
|
||||||
authParams.expiry = expiry;
|
authParams.expiry = expiry;
|
||||||
|
@ -110,6 +121,7 @@ function extractQueryParams(queryObj, log) {
|
||||||
|
|
||||||
const credential = queryObj['X-Amz-Credential'];
|
const credential = queryObj['X-Amz-Credential'];
|
||||||
if (credential && credential.length > 28 && credential.indexOf('/') > -1) {
|
if (credential && credential.length > 28 && credential.indexOf('/') > -1) {
|
||||||
|
// @ts-ignore
|
||||||
authParams.credential = credential.split('/');
|
authParams.credential = credential.split('/');
|
||||||
} else {
|
} else {
|
||||||
log.warn('invalid credential param', { credential });
|
log.warn('invalid credential param', { credential });
|
||||||
|
@ -121,14 +133,17 @@ function extractQueryParams(queryObj, log) {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Extract and validate components from auth header
|
* Extract and validate components from auth header
|
||||||
* @param {string} authHeader - authorization header from request
|
* @param authHeader - authorization header from request
|
||||||
* @param {object} log - logging object
|
* @param log - logging object
|
||||||
* @return {object} object containing extracted auth header items for authV4
|
* @return object containing extracted auth header items for authV4
|
||||||
*/
|
*/
|
||||||
function extractAuthItems(authHeader, log) {
|
export function extractAuthItems(authHeader: string, log: Logger) {
|
||||||
const authItems = {};
|
const authItems: {
|
||||||
const authArray = authHeader
|
credentialsArr?: [string, string, string, string, string];
|
||||||
.replace('AWS4-HMAC-SHA256 ', '').split(',');
|
signedHeaders?: string;
|
||||||
|
signatureFromRequest?: string;
|
||||||
|
} = {};
|
||||||
|
const authArray = authHeader.replace('AWS4-HMAC-SHA256 ', '').split(',');
|
||||||
|
|
||||||
if (authArray.length < 3) {
|
if (authArray.length < 3) {
|
||||||
return authItems;
|
return authItems;
|
||||||
|
@ -138,8 +153,12 @@ function extractAuthItems(authHeader, log) {
|
||||||
const signedHeadersStr = authArray[1];
|
const signedHeadersStr = authArray[1];
|
||||||
const signatureStr = authArray[2];
|
const signatureStr = authArray[2];
|
||||||
log.trace('credentials from request', { credentialStr });
|
log.trace('credentials from request', { credentialStr });
|
||||||
if (credentialStr && credentialStr.trim().startsWith('Credential=')
|
if (
|
||||||
&& credentialStr.indexOf('/') > -1) {
|
credentialStr &&
|
||||||
|
credentialStr.trim().startsWith('Credential=') &&
|
||||||
|
credentialStr.indexOf('/') > -1
|
||||||
|
) {
|
||||||
|
// @ts-ignore
|
||||||
authItems.credentialsArr = credentialStr
|
authItems.credentialsArr = credentialStr
|
||||||
.trim().replace('Credential=', '').split('/');
|
.trim().replace('Credential=', '').split('/');
|
||||||
} else {
|
} else {
|
||||||
|
@ -166,11 +185,11 @@ function extractAuthItems(authHeader, log) {
|
||||||
/**
|
/**
|
||||||
* Checks whether the signed headers include the host header
|
* Checks whether the signed headers include the host header
|
||||||
* and all x-amz- and x-scal- headers in request
|
* and all x-amz- and x-scal- headers in request
|
||||||
* @param {string} signedHeaders - signed headers sent with request
|
* @param signedHeaders - signed headers sent with request
|
||||||
* @param {object} allHeaders - request.headers
|
* @param allHeaders - request.headers
|
||||||
* @return {boolean} true if all x-amz-headers included and false if not
|
* @return true if all x-amz-headers included and false if not
|
||||||
*/
|
*/
|
||||||
function areSignedHeadersComplete(signedHeaders, allHeaders) {
|
export function areSignedHeadersComplete(signedHeaders: string, allHeaders: Headers) {
|
||||||
const signedHeadersList = signedHeaders.split(';');
|
const signedHeadersList = signedHeaders.split(';');
|
||||||
if (signedHeadersList.indexOf('host') === -1) {
|
if (signedHeadersList.indexOf('host') === -1) {
|
||||||
return false;
|
return false;
|
||||||
|
@ -185,6 +204,3 @@ function areSignedHeadersComplete(signedHeaders, allHeaders) {
|
||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = { validateCredentials, extractQueryParams,
|
|
||||||
areSignedHeadersComplete, extractAuthItems };
|
|
|
@ -0,0 +1,569 @@
|
||||||
|
import cluster, { Worker } from 'cluster';
|
||||||
|
import * as werelogs from 'werelogs';
|
||||||
|
|
||||||
|
import { default as errors } from '../../lib/errors';
|
||||||
|
|
||||||
|
const rpcLogger = new werelogs.Logger('ClusterRPC');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Remote procedure calls support between cluster workers.
|
||||||
|
*
|
||||||
|
* When using the cluster module, new processes are forked and are
|
||||||
|
* dispatched workloads, usually HTTP requests. The ClusterRPC module
|
||||||
|
* implements a RPC system to send commands to all cluster worker
|
||||||
|
* processes at once from any particular worker, and retrieve their
|
||||||
|
* individual command results, like a distributed map operation.
|
||||||
|
*
|
||||||
|
* The existing nodejs cluster IPC channel is setup from the primary
|
||||||
|
* to each worker, but not between workers, so there has to be a hop
|
||||||
|
* by the primary.
|
||||||
|
*
|
||||||
|
* How a command is treated:
|
||||||
|
*
|
||||||
|
* - a worker sends a command message to the primary
|
||||||
|
*
|
||||||
|
* - the primary then forwards that command to each existing worker
|
||||||
|
* (including the requestor)
|
||||||
|
*
|
||||||
|
* - each worker then executes the command and returns a result or an
|
||||||
|
* error
|
||||||
|
*
|
||||||
|
* - the primary gathers all workers results into an array
|
||||||
|
*
|
||||||
|
* - finally, the primary dispatches the results array to the original
|
||||||
|
* requesting worker
|
||||||
|
*
|
||||||
|
*
|
||||||
|
* Limitations:
|
||||||
|
*
|
||||||
|
* - The command payload must be serializable, which means that:
|
||||||
|
* - it should not contain circular references
|
||||||
|
* - it should be of a reasonable size to be sent in a single RPC message
|
||||||
|
*
|
||||||
|
* - The "toWorkers" parameter of value "*" targets the set of workers
|
||||||
|
* that are available at the time the command is dispatched. Any new
|
||||||
|
* worker spawned after the command has been dispatched for
|
||||||
|
* processing, but before the command completes, don't execute
|
||||||
|
* the command and hence are not part of the results array.
|
||||||
|
*
|
||||||
|
*
|
||||||
|
* To set it up:
|
||||||
|
*
|
||||||
|
* - On the primary:
|
||||||
|
* if (cluster.isPrimary) {
|
||||||
|
* setupRPCPrimary();
|
||||||
|
* }
|
||||||
|
*
|
||||||
|
* - On the workers:
|
||||||
|
* if (!cluster.isPrimary) {
|
||||||
|
* setupRPCWorker({
|
||||||
|
* handler1: (payload: object, uids: string, callback: HandlerCallback) => void,
|
||||||
|
* handler2: ...
|
||||||
|
* });
|
||||||
|
* }
|
||||||
|
* Handler functions will be passed the command payload, request
|
||||||
|
* serialized uids, and must call the callback when the worker is done
|
||||||
|
* processing the command:
|
||||||
|
* callback(error: Error | null | undefined, result?: any)
|
||||||
|
*
|
||||||
|
* When this setup is done, any worker can start sending commands by calling
|
||||||
|
* the async function sendWorkerCommand().
|
||||||
|
*/
|
||||||
|
|
||||||
|
// exported types
|
||||||
|
|
||||||
|
export type ResultObject = {
|
||||||
|
error: Error | null;
|
||||||
|
result: any;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* saved Promise for sendWorkerCommand
|
||||||
|
*/
|
||||||
|
export type CommandPromise = {
|
||||||
|
resolve: (results?: ResultObject[]) => void;
|
||||||
|
reject: (error: Error) => void;
|
||||||
|
timeout: NodeJS.Timeout | null;
|
||||||
|
};
|
||||||
|
export type HandlerCallback = (error: (Error & { code?: number }) | null | undefined, result?: any) => void;
|
||||||
|
export type HandlerFunction = (payload: object, uids: string, callback: HandlerCallback) => void;
|
||||||
|
export type HandlersMap = {
|
||||||
|
[index: string]: HandlerFunction;
|
||||||
|
};
|
||||||
|
export type PrimaryHandlerFunction = (worker: Worker, payload: object, uids: string, callback: HandlerCallback) => void;
|
||||||
|
export type PrimaryHandlersMap = Record<string, PrimaryHandlerFunction>;
|
||||||
|
|
||||||
|
// private types
|
||||||
|
|
||||||
|
type RPCMessage<T extends string, P> = {
|
||||||
|
type: T;
|
||||||
|
uids: string;
|
||||||
|
payload: P;
|
||||||
|
};
|
||||||
|
|
||||||
|
type RPCCommandMessage = RPCMessage<'cluster-rpc:command', any> & {
|
||||||
|
toWorkers: string;
|
||||||
|
toHandler: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
type MarshalledResultObject = {
|
||||||
|
error: string | null;
|
||||||
|
errorCode?: number;
|
||||||
|
result: any;
|
||||||
|
};
|
||||||
|
|
||||||
|
type RPCCommandResultMessage = RPCMessage<'cluster-rpc:commandResult', MarshalledResultObject>;
|
||||||
|
|
||||||
|
type RPCCommandResultsMessage = RPCMessage<'cluster-rpc:commandResults', {
|
||||||
|
results: MarshalledResultObject[];
|
||||||
|
}>;
|
||||||
|
|
||||||
|
type RPCCommandErrorMessage = RPCMessage<'cluster-rpc:commandError', {
|
||||||
|
error: string;
|
||||||
|
}>;
|
||||||
|
|
||||||
|
interface RPCSetupOptions {
|
||||||
|
/**
|
||||||
|
* As werelogs is not a peerDependency, arsenal and a parent project
|
||||||
|
* might have their own separate versions duplicated in dependencies.
|
||||||
|
* The config are therefore not shared.
|
||||||
|
* Use this to propagate werelogs config to arsenal's ClusterRPC.
|
||||||
|
*/
|
||||||
|
werelogsConfig?: Parameters<typeof werelogs.configure>[0];
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* In primary: store worker IDs that are waiting to be dispatched
|
||||||
|
* their command's results, as a mapping.
|
||||||
|
*/
|
||||||
|
const uidsToWorkerId: {
|
||||||
|
[index: string]: number;
|
||||||
|
} = {};
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* In primary: store worker responses for commands in progress as a
|
||||||
|
* mapping.
|
||||||
|
*
|
||||||
|
* Result objects are 'null' while the worker is still processing the
|
||||||
|
* command. When a worker finishes processing it stores the result as:
|
||||||
|
* {
|
||||||
|
* error: string | null,
|
||||||
|
* result: any
|
||||||
|
* }
|
||||||
|
*/
|
||||||
|
const uidsToCommandResults: {
|
||||||
|
[index: string]: {
|
||||||
|
[index: number]: MarshalledResultObject | null;
|
||||||
|
};
|
||||||
|
} = {};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* In workers: store promise callbacks for commands waiting to be
|
||||||
|
* dispatched, as a mapping.
|
||||||
|
*/
|
||||||
|
const uidsToCommandPromise: {
|
||||||
|
[index: string]: CommandPromise;
|
||||||
|
} = {};
|
||||||
|
|
||||||
|
|
||||||
|
function _isRpcMessage(message) {
|
||||||
|
return (message !== null &&
|
||||||
|
typeof message === 'object' &&
|
||||||
|
typeof message.type === 'string' &&
|
||||||
|
message.type.startsWith('cluster-rpc:'));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Setup cluster RPC system on the primary
|
||||||
|
*
|
||||||
|
* @param {object} [handlers] - mapping of handler names to handler functions
|
||||||
|
* handler function:
|
||||||
|
* `handler({Worker} worker, {object} payload, {string} uids, {function} callback)`
|
||||||
|
* handler callback must be called when worker is done with the command:
|
||||||
|
* `callback({Error|null} error, {any} [result])`
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
export function setupRPCPrimary(handlers?: PrimaryHandlersMap, options?: RPCSetupOptions) {
|
||||||
|
if (options?.werelogsConfig) {
|
||||||
|
werelogs.configure(options.werelogsConfig);
|
||||||
|
}
|
||||||
|
cluster.on('message', (worker, message) => {
|
||||||
|
if (_isRpcMessage(message)) {
|
||||||
|
_handlePrimaryMessage(worker, message, handlers);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Setup RPCs on a cluster worker process
|
||||||
|
*
|
||||||
|
* @param {object} handlers - mapping of handler names to handler functions
|
||||||
|
* handler function:
|
||||||
|
* handler({object} payload, {string} uids, {function} callback)
|
||||||
|
* handler callback must be called when worker is done with the command:
|
||||||
|
* callback({Error|null} error, {any} [result])
|
||||||
|
* @return {undefined}
|
||||||
|
* }
|
||||||
|
*/
|
||||||
|
export function setupRPCWorker(handlers: HandlersMap, options?: RPCSetupOptions) {
|
||||||
|
if (!process.send) {
|
||||||
|
throw new Error('fatal: cannot setup cluster RPC: "process.send" is not available');
|
||||||
|
}
|
||||||
|
if (options?.werelogsConfig) {
|
||||||
|
werelogs.configure(options.werelogsConfig);
|
||||||
|
}
|
||||||
|
process.on('message', (message: RPCCommandMessage | RPCCommandResultsMessage) => {
|
||||||
|
if (_isRpcMessage(message)) {
|
||||||
|
_handleWorkerMessage(message, handlers);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Send a command for workers to execute in parallel, and wait for results
|
||||||
|
*
|
||||||
|
* @param {string} toWorkers - which workers should execute the command
|
||||||
|
* Currently the supported values are:
|
||||||
|
* - "*", meaning all workers will execute the command
|
||||||
|
* - "PRIMARY", meaning primary process will execute the command
|
||||||
|
* @param {string} toHandler - name of handler that will execute the
|
||||||
|
* command in workers, as declared in setupRPCWorker() parameter object
|
||||||
|
* @param {string} uids - unique identifier of the command, must be
|
||||||
|
* unique across all commands in progress
|
||||||
|
* @param {object} payload - message payload, sent as-is to the handler
|
||||||
|
* @param {number} [timeoutMs=60000] - timeout the command with a
|
||||||
|
* "RequestTimeout" error after this number of milliseconds - set to 0
|
||||||
|
* to disable timeouts (the command may then hang forever)
|
||||||
|
* @returns {Promise}
|
||||||
|
*/
|
||||||
|
export async function sendWorkerCommand(
|
||||||
|
toWorkers: string,
|
||||||
|
toHandler: string,
|
||||||
|
uids: string,
|
||||||
|
payload: object,
|
||||||
|
timeoutMs: number = 60000
|
||||||
|
) {
|
||||||
|
if (typeof uids !== 'string') {
|
||||||
|
rpcLogger.error('missing or invalid "uids" field', { uids });
|
||||||
|
throw errors.MissingParameter;
|
||||||
|
}
|
||||||
|
if (uidsToCommandPromise[uids] !== undefined) {
|
||||||
|
rpcLogger.error('a command is already in progress with same uids', { uids });
|
||||||
|
throw errors.OperationAborted;
|
||||||
|
}
|
||||||
|
rpcLogger.info('sending command', { toWorkers, toHandler, uids, payload });
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
let timeout: NodeJS.Timeout | null = null;
|
||||||
|
if (timeoutMs) {
|
||||||
|
timeout = setTimeout(() => {
|
||||||
|
delete uidsToCommandPromise[uids];
|
||||||
|
reject(errors.RequestTimeout);
|
||||||
|
}, timeoutMs);
|
||||||
|
}
|
||||||
|
uidsToCommandPromise[uids] = { resolve, reject, timeout };
|
||||||
|
const message: RPCCommandMessage = {
|
||||||
|
type: 'cluster-rpc:command',
|
||||||
|
toWorkers,
|
||||||
|
toHandler,
|
||||||
|
uids,
|
||||||
|
payload,
|
||||||
|
};
|
||||||
|
return process.send?.(message);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the number of commands in flight
|
||||||
|
* @returns {number}
|
||||||
|
*/
|
||||||
|
export function getPendingCommandsCount() {
|
||||||
|
return Object.keys(uidsToCommandPromise).length;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function _dispatchCommandResultsToWorker(
|
||||||
|
worker: Worker,
|
||||||
|
uids: string,
|
||||||
|
resultsArray: MarshalledResultObject[]
|
||||||
|
): void {
|
||||||
|
const message: RPCCommandResultsMessage = {
|
||||||
|
type: 'cluster-rpc:commandResults',
|
||||||
|
uids,
|
||||||
|
payload: {
|
||||||
|
results: resultsArray,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
worker.send(message);
|
||||||
|
}
|
||||||
|
|
||||||
|
function _dispatchCommandErrorToWorker(
|
||||||
|
worker: Worker,
|
||||||
|
uids: string,
|
||||||
|
error: Error,
|
||||||
|
): void {
|
||||||
|
const message: RPCCommandErrorMessage = {
|
||||||
|
type: 'cluster-rpc:commandError',
|
||||||
|
uids,
|
||||||
|
payload: {
|
||||||
|
error: error.message,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
worker.send(message);
|
||||||
|
}
|
||||||
|
|
||||||
|
function _sendPrimaryCommandResult(
|
||||||
|
worker: Worker,
|
||||||
|
uids: string,
|
||||||
|
error: (Error & { code?: number }) | null | undefined,
|
||||||
|
result?: any
|
||||||
|
): void {
|
||||||
|
const message: RPCCommandResultsMessage = {
|
||||||
|
type: 'cluster-rpc:commandResults',
|
||||||
|
uids,
|
||||||
|
payload: {
|
||||||
|
results: [{ error: error?.message || null, errorCode: error?.code, result }],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
worker.send?.(message);
|
||||||
|
}
|
||||||
|
|
||||||
|
function _handlePrimaryCommandMessage(
|
||||||
|
fromWorker: Worker,
|
||||||
|
logger: any,
|
||||||
|
message: RPCCommandMessage,
|
||||||
|
handlers?: PrimaryHandlersMap
|
||||||
|
): void {
|
||||||
|
const { toWorkers, toHandler, uids, payload } = message;
|
||||||
|
if (toWorkers === '*') {
|
||||||
|
if (uidsToWorkerId[uids] !== undefined) {
|
||||||
|
logger.warn('new command already has a waiting worker with same uids', {
|
||||||
|
uids, workerId: uidsToWorkerId[uids],
|
||||||
|
});
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
const commandResults = {};
|
||||||
|
for (const workerId of Object.keys(cluster.workers || {})) {
|
||||||
|
commandResults[workerId] = null;
|
||||||
|
}
|
||||||
|
uidsToWorkerId[uids] = fromWorker?.id;
|
||||||
|
uidsToCommandResults[uids] = commandResults;
|
||||||
|
|
||||||
|
for (const [workerId, worker] of Object.entries(cluster.workers || {})) {
|
||||||
|
logger.debug('sending command message to worker', {
|
||||||
|
workerId, toHandler, payload,
|
||||||
|
});
|
||||||
|
if (worker) {
|
||||||
|
worker.send(message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if (toWorkers === 'PRIMARY') {
|
||||||
|
const { toHandler, uids, payload } = message;
|
||||||
|
const cb: HandlerCallback = (err, result) => _sendPrimaryCommandResult(fromWorker, uids, err, result);
|
||||||
|
|
||||||
|
if (toHandler in (handlers || {})) {
|
||||||
|
return handlers![toHandler](fromWorker, payload, uids, cb);
|
||||||
|
}
|
||||||
|
logger.error('no such handler in "toHandler" field from worker command message', {
|
||||||
|
toHandler,
|
||||||
|
});
|
||||||
|
return cb(errors.NotImplemented);
|
||||||
|
} else {
|
||||||
|
logger.error('unsupported "toWorkers" field from worker command message', {
|
||||||
|
toWorkers,
|
||||||
|
});
|
||||||
|
if (fromWorker) {
|
||||||
|
_dispatchCommandErrorToWorker(fromWorker, uids, errors.NotImplemented);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function _handlePrimaryCommandResultMessage(
|
||||||
|
fromWorkerId: number,
|
||||||
|
logger: any,
|
||||||
|
message: RPCCommandResultMessage
|
||||||
|
): void {
|
||||||
|
const { uids, payload } = message;
|
||||||
|
const commandResults = uidsToCommandResults[uids];
|
||||||
|
if (!commandResults) {
|
||||||
|
logger.warn('received command response message from worker for command not in flight', {
|
||||||
|
workerId: fromWorkerId,
|
||||||
|
uids,
|
||||||
|
});
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
if (commandResults[fromWorkerId] === undefined) {
|
||||||
|
logger.warn('received command response message with unexpected worker ID', {
|
||||||
|
workerId: fromWorkerId,
|
||||||
|
uids,
|
||||||
|
});
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
if (commandResults[fromWorkerId] !== null) {
|
||||||
|
logger.warn('ignoring duplicate command response from worker', {
|
||||||
|
workerId: fromWorkerId,
|
||||||
|
uids,
|
||||||
|
});
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
commandResults[fromWorkerId] = payload;
|
||||||
|
const commandResultsArray = Object.values(commandResults);
|
||||||
|
if (commandResultsArray.every(response => response !== null)) {
|
||||||
|
logger.debug('all workers responded to command', { uids });
|
||||||
|
const completeCommandResultsArray = <MarshalledResultObject[]> commandResultsArray;
|
||||||
|
const toWorkerId = uidsToWorkerId[uids];
|
||||||
|
const toWorker = cluster.workers?.[toWorkerId];
|
||||||
|
|
||||||
|
delete uidsToCommandResults[uids];
|
||||||
|
delete uidsToWorkerId[uids];
|
||||||
|
|
||||||
|
if (!toWorker) {
|
||||||
|
logger.warn('worker shut down while its command was executing', {
|
||||||
|
workerId: toWorkerId, uids,
|
||||||
|
});
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
// send back response to original worker
|
||||||
|
_dispatchCommandResultsToWorker(toWorker, uids, completeCommandResultsArray);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function _handlePrimaryMessage(
|
||||||
|
fromWorker: Worker,
|
||||||
|
message: RPCCommandMessage | RPCCommandResultMessage,
|
||||||
|
handlers?: PrimaryHandlersMap
|
||||||
|
): void {
|
||||||
|
const { type: messageType, uids } = message;
|
||||||
|
const logger = rpcLogger.newRequestLoggerFromSerializedUids(uids);
|
||||||
|
logger.debug('primary received message from worker', {
|
||||||
|
workerId: fromWorker?.id, rpcMessage: message,
|
||||||
|
});
|
||||||
|
if (messageType === 'cluster-rpc:command') {
|
||||||
|
return _handlePrimaryCommandMessage(fromWorker, logger, message, handlers);
|
||||||
|
}
|
||||||
|
if (messageType === 'cluster-rpc:commandResult') {
|
||||||
|
return _handlePrimaryCommandResultMessage(fromWorker?.id, logger, message);
|
||||||
|
}
|
||||||
|
logger.error('unsupported message type', {
|
||||||
|
workerId: fromWorker?.id, messageType, uids,
|
||||||
|
});
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
function _sendWorkerCommandResult(
|
||||||
|
uids: string,
|
||||||
|
error: Error | null | undefined,
|
||||||
|
result?: any
|
||||||
|
): void {
|
||||||
|
const message: RPCCommandResultMessage = {
|
||||||
|
type: 'cluster-rpc:commandResult',
|
||||||
|
uids,
|
||||||
|
payload: {
|
||||||
|
error: error ? error.message : null,
|
||||||
|
result,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
process.send?.(message);
|
||||||
|
}
|
||||||
|
|
||||||
|
function _handleWorkerCommandMessage(
|
||||||
|
logger: any,
|
||||||
|
message: RPCCommandMessage,
|
||||||
|
handlers: HandlersMap
|
||||||
|
): void {
|
||||||
|
const { toHandler, uids, payload } = message;
|
||||||
|
const cb: HandlerCallback = (err, result) => _sendWorkerCommandResult(uids, err, result);
|
||||||
|
|
||||||
|
if (toHandler in handlers) {
|
||||||
|
return handlers[toHandler](payload, uids, cb);
|
||||||
|
}
|
||||||
|
logger.error('no such handler in "toHandler" field from worker command message', {
|
||||||
|
toHandler,
|
||||||
|
});
|
||||||
|
return cb(errors.NotImplemented);
|
||||||
|
}
|
||||||
|
|
||||||
|
function _handleWorkerCommandResultsMessage(
|
||||||
|
logger: any,
|
||||||
|
message: RPCCommandResultsMessage,
|
||||||
|
): void {
|
||||||
|
const { uids, payload } = message;
|
||||||
|
const { results } = payload;
|
||||||
|
const commandPromise: CommandPromise = uidsToCommandPromise[uids];
|
||||||
|
if (commandPromise === undefined) {
|
||||||
|
logger.error('missing promise for command results', { uids, payload });
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
if (commandPromise.timeout) {
|
||||||
|
clearTimeout(commandPromise.timeout);
|
||||||
|
}
|
||||||
|
delete uidsToCommandPromise[uids];
|
||||||
|
const unmarshalledResults = results.map(workerResult => {
|
||||||
|
let workerError: Error | null = null;
|
||||||
|
if (workerResult.error) {
|
||||||
|
if (workerResult.error in errors) {
|
||||||
|
workerError = errors[workerResult.error];
|
||||||
|
} else {
|
||||||
|
workerError = new Error(workerResult.error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (workerError && workerResult.errorCode) {
|
||||||
|
(workerError as Error & { code: number }).code = workerResult.errorCode;
|
||||||
|
}
|
||||||
|
const unmarshalledResult: ResultObject = {
|
||||||
|
error: workerError,
|
||||||
|
result: workerResult.result,
|
||||||
|
};
|
||||||
|
return unmarshalledResult;
|
||||||
|
});
|
||||||
|
return commandPromise.resolve(unmarshalledResults);
|
||||||
|
}
|
||||||
|
|
||||||
|
function _handleWorkerCommandErrorMessage(
|
||||||
|
logger: any,
|
||||||
|
message: RPCCommandErrorMessage,
|
||||||
|
): void {
|
||||||
|
const { uids, payload } = message;
|
||||||
|
const { error } = payload;
|
||||||
|
const commandPromise: CommandPromise = uidsToCommandPromise[uids];
|
||||||
|
if (commandPromise === undefined) {
|
||||||
|
logger.error('missing promise for command results', { uids, payload });
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
if (commandPromise.timeout) {
|
||||||
|
clearTimeout(commandPromise.timeout);
|
||||||
|
}
|
||||||
|
delete uidsToCommandPromise[uids];
|
||||||
|
let commandError: Error | null = null;
|
||||||
|
if (error in errors) {
|
||||||
|
commandError = errors[error];
|
||||||
|
} else {
|
||||||
|
commandError = new Error(error);
|
||||||
|
}
|
||||||
|
return commandPromise.reject(<Error> commandError);
|
||||||
|
}
|
||||||
|
|
||||||
|
function _handleWorkerMessage(
|
||||||
|
message: RPCCommandMessage | RPCCommandResultsMessage | RPCCommandErrorMessage,
|
||||||
|
handlers: HandlersMap
|
||||||
|
): void {
|
||||||
|
const { type: messageType, uids } = message;
|
||||||
|
const workerId = cluster.worker?.id;
|
||||||
|
const logger = rpcLogger.newRequestLoggerFromSerializedUids(uids);
|
||||||
|
logger.debug('worker received message from primary', {
|
||||||
|
workerId, rpcMessage: message,
|
||||||
|
});
|
||||||
|
if (messageType === 'cluster-rpc:command') {
|
||||||
|
return _handleWorkerCommandMessage(logger, message, handlers);
|
||||||
|
}
|
||||||
|
if (messageType === 'cluster-rpc:commandResults') {
|
||||||
|
return _handleWorkerCommandResultsMessage(logger, message);
|
||||||
|
}
|
||||||
|
if (messageType === 'cluster-rpc:commandError') {
|
||||||
|
return _handleWorkerCommandErrorMessage(logger, message);
|
||||||
|
}
|
||||||
|
logger.error('unsupported message type', {
|
||||||
|
workerId, messageType,
|
||||||
|
});
|
||||||
|
return undefined;
|
||||||
|
}
|
121
lib/constants.js
121
lib/constants.js
|
@ -1,121 +0,0 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
|
||||||
|
|
||||||
// The min value here is to manage further backward compat if we
|
|
||||||
// need it
|
|
||||||
const iamSecurityTokenSizeMin = 128;
|
|
||||||
const iamSecurityTokenSizeMax = 128;
|
|
||||||
// Security token is an hex string (no real format from amazon)
|
|
||||||
const iamSecurityTokenPattern =
|
|
||||||
new RegExp(`^[a-f0-9]{${iamSecurityTokenSizeMin},` +
|
|
||||||
`${iamSecurityTokenSizeMax}}$`);
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
// info about the iam security token
|
|
||||||
iamSecurityToken: {
|
|
||||||
min: iamSecurityTokenSizeMin,
|
|
||||||
max: iamSecurityTokenSizeMax,
|
|
||||||
pattern: iamSecurityTokenPattern,
|
|
||||||
},
|
|
||||||
// PublicId is used as the canonicalID for a request that contains
|
|
||||||
// no authentication information. Requestor can access
|
|
||||||
// only public resources
|
|
||||||
publicId: 'http://acs.amazonaws.com/groups/global/AllUsers',
|
|
||||||
zenkoServiceAccount: 'http://acs.zenko.io/accounts/service',
|
|
||||||
metadataFileNamespace: '/MDFile',
|
|
||||||
dataFileURL: '/DataFile',
|
|
||||||
// AWS states max size for user-defined metadata
|
|
||||||
// (x-amz-meta- headers) is 2 KB:
|
|
||||||
// http://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectPUT.html
|
|
||||||
// In testing, AWS seems to allow up to 88 more bytes,
|
|
||||||
// so we do the same.
|
|
||||||
maximumMetaHeadersSize: 2136,
|
|
||||||
emptyFileMd5: 'd41d8cd98f00b204e9800998ecf8427e',
|
|
||||||
// Version 2 changes the format of the data location property
|
|
||||||
// Version 3 adds the dataStoreName attribute
|
|
||||||
mdModelVersion: 3,
|
|
||||||
/*
|
|
||||||
* Splitter is used to build the object name for the overview of a
|
|
||||||
* multipart upload and to build the object names for each part of a
|
|
||||||
* multipart upload. These objects with large names are then stored in
|
|
||||||
* metadata in a "shadow bucket" to a real bucket. The shadow bucket
|
|
||||||
* contains all ongoing multipart uploads. We include in the object
|
|
||||||
* name some of the info we might need to pull about an open multipart
|
|
||||||
* upload or about an individual part with each piece of info separated
|
|
||||||
* by the splitter. We can then extract each piece of info by splitting
|
|
||||||
* the object name string with this splitter.
|
|
||||||
* For instance, assuming a splitter of '...!*!',
|
|
||||||
* the name of the upload overview would be:
|
|
||||||
* overview...!*!objectKey...!*!uploadId
|
|
||||||
* For instance, the name of a part would be:
|
|
||||||
* uploadId...!*!partNumber
|
|
||||||
*
|
|
||||||
* The sequence of characters used in the splitter should not occur
|
|
||||||
* elsewhere in the pieces of info to avoid splitting where not
|
|
||||||
* intended.
|
|
||||||
*
|
|
||||||
* Splitter is also used in adding bucketnames to the
|
|
||||||
* namespacerusersbucket. The object names added to the
|
|
||||||
* namespaceusersbucket are of the form:
|
|
||||||
* canonicalID...!*!bucketname
|
|
||||||
*/
|
|
||||||
|
|
||||||
splitter: '..|..',
|
|
||||||
usersBucket: 'users..bucket',
|
|
||||||
// MPU Bucket Prefix is used to create the name of the shadow
|
|
||||||
// bucket used for multipart uploads. There is one shadow mpu
|
|
||||||
// bucket per bucket and its name is the mpuBucketPrefix followed
|
|
||||||
// by the name of the final destination bucket for the object
|
|
||||||
// once the multipart upload is complete.
|
|
||||||
mpuBucketPrefix: 'mpuShadowBucket',
|
|
||||||
// since aws s3 does not allow capitalized buckets, these may be
|
|
||||||
// used for special internal purposes
|
|
||||||
permittedCapitalizedBuckets: {
|
|
||||||
METADATA: true,
|
|
||||||
},
|
|
||||||
/* eslint-disable camelcase */
|
|
||||||
externalBackends: { aws_s3: true, azure: true, gcp: true, pfs: true },
|
|
||||||
hasCopyPartBackends: { aws_s3: true, gcp: true },
|
|
||||||
versioningNotImplBackends: { azure: true, gcp: true },
|
|
||||||
mpuMDStoredExternallyBackend: { aws_s3: true, gcp: true },
|
|
||||||
/* eslint-enable camelcase */
|
|
||||||
// Default expiration value of the S3 pre-signed URL duration
|
|
||||||
// 604800 seconds (seven days).
|
|
||||||
defaultPreSignedURLExpiry: 7 * 24 * 60 * 60,
|
|
||||||
// Regex for ISO-8601 formatted date
|
|
||||||
shortIso8601Regex: /\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}Z/,
|
|
||||||
longIso8601Regex: /\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}.\d{3}Z/,
|
|
||||||
supportedNotificationEvents: new Set([
|
|
||||||
's3:ObjectCreated:*',
|
|
||||||
's3:ObjectCreated:Put',
|
|
||||||
's3:ObjectCreated:Copy',
|
|
||||||
's3:ObjectCreated:CompleteMultipartUpload',
|
|
||||||
's3:ObjectRemoved:*',
|
|
||||||
's3:ObjectRemoved:Delete',
|
|
||||||
's3:ObjectRemoved:DeleteMarkerCreated',
|
|
||||||
]),
|
|
||||||
notificationArnPrefix: 'arn:scality:bucketnotif',
|
|
||||||
// some of the available data backends (if called directly rather
|
|
||||||
// than through the multiple backend gateway) need a key provided
|
|
||||||
// as a string as first parameter of the get/delete methods.
|
|
||||||
clientsRequireStringKey: { sproxyd: true, cdmi: true },
|
|
||||||
// HTTP server keep-alive timeout is set to a higher value than
|
|
||||||
// client's free sockets timeout to avoid the risk of triggering
|
|
||||||
// ECONNRESET errors if the server closes the connection at the
|
|
||||||
// exact moment clients attempt to reuse an established connection
|
|
||||||
// for a new request.
|
|
||||||
//
|
|
||||||
// Note: the ability to close inactive connections on the client
|
|
||||||
// after httpClientFreeSocketsTimeout milliseconds requires the
|
|
||||||
// use of "agentkeepalive" module instead of the regular node.js
|
|
||||||
// http.Agent.
|
|
||||||
httpServerKeepAliveTimeout: 60000,
|
|
||||||
httpClientFreeSocketTimeout: 55000,
|
|
||||||
supportedLifecycleRules: [
|
|
||||||
'expiration',
|
|
||||||
'noncurrentVersionExpiration',
|
|
||||||
'abortIncompleteMultipartUpload',
|
|
||||||
],
|
|
||||||
// Maximum number of buckets to cache (bucket metadata)
|
|
||||||
maxCachedBuckets: process.env.METADATA_MAX_CACHED_BUCKETS ?
|
|
||||||
Number(process.env.METADATA_MAX_CACHED_BUCKETS) : 1000,
|
|
||||||
};
|
|
|
@ -0,0 +1,177 @@
|
||||||
|
import * as crypto from 'crypto';
|
||||||
|
|
||||||
|
// The min value here is to manage further backward compat if we
|
||||||
|
// need it
|
||||||
|
// Default value
|
||||||
|
export const vaultGeneratedIamSecurityTokenSizeMin = 128;
|
||||||
|
// Safe to assume that a typical token size is less than 8192 bytes
|
||||||
|
export const vaultGeneratedIamSecurityTokenSizeMax = 8192;
|
||||||
|
// Base-64
|
||||||
|
export const vaultGeneratedIamSecurityTokenPattern = /^[A-Za-z0-9/+=]*$/;
|
||||||
|
|
||||||
|
// info about the iam security token
|
||||||
|
export const iamSecurityToken = {
|
||||||
|
min: vaultGeneratedIamSecurityTokenSizeMin,
|
||||||
|
max: vaultGeneratedIamSecurityTokenSizeMax,
|
||||||
|
pattern: vaultGeneratedIamSecurityTokenPattern,
|
||||||
|
};
|
||||||
|
// PublicId is used as the canonicalID for a request that contains
|
||||||
|
// no authentication information. Requestor can access
|
||||||
|
// only public resources
|
||||||
|
export const publicId = 'http://acs.amazonaws.com/groups/global/AllUsers';
|
||||||
|
export const zenkoServiceAccount = 'http://acs.zenko.io/accounts/service';
|
||||||
|
export const metadataFileNamespace = '/MDFile';
|
||||||
|
export const dataFileURL = '/DataFile';
|
||||||
|
export const passthroughFileURL = '/PassthroughFile';
|
||||||
|
// AWS states max size for user-defined metadata
|
||||||
|
// (x-amz-meta- headers) is 2 KB:
|
||||||
|
// http://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectPUT.html
|
||||||
|
// In testing, AWS seems to allow up to 88 more bytes,
|
||||||
|
// so we do the same.
|
||||||
|
export const maximumMetaHeadersSize = 2136;
|
||||||
|
export const emptyFileMd5 = 'd41d8cd98f00b204e9800998ecf8427e';
|
||||||
|
// Version 2 changes the format of the data location property
|
||||||
|
// Version 3 adds the dataStoreName attribute
|
||||||
|
// Version 4 add the Creation-Time and Content-Language attributes,
|
||||||
|
// and add support for x-ms-meta-* headers in UserMetadata
|
||||||
|
// Version 5 adds the azureInfo structure
|
||||||
|
// Version 6 adds a "deleted" flag that is updated to true before
|
||||||
|
// the object gets deleted. This is done to keep object metadata in the
|
||||||
|
// oplog when deleting the object, as oplog deletion events don't contain
|
||||||
|
// any metadata of the object.
|
||||||
|
// version 6 also adds the "isPHD" flag that is used to indicate that the master
|
||||||
|
// object is a placeholder and is not up to date.
|
||||||
|
export const mdModelVersion = 6;
|
||||||
|
/*
|
||||||
|
* Splitter is used to build the object name for the overview of a
|
||||||
|
* multipart upload and to build the object names for each part of a
|
||||||
|
* multipart upload. These objects with large names are then stored in
|
||||||
|
* metadata in a "shadow bucket" to a real bucket. The shadow bucket
|
||||||
|
* contains all ongoing multipart uploads. We include in the object
|
||||||
|
* name some of the info we might need to pull about an open multipart
|
||||||
|
* upload or about an individual part with each piece of info separated
|
||||||
|
* by the splitter. We can then extract each piece of info by splitting
|
||||||
|
* the object name string with this splitter.
|
||||||
|
* For instance, assuming a splitter of '...!*!',
|
||||||
|
* the name of the upload overview would be:
|
||||||
|
* overview...!*!objectKey...!*!uploadId
|
||||||
|
* For instance, the name of a part would be:
|
||||||
|
* uploadId...!*!partNumber
|
||||||
|
*
|
||||||
|
* The sequence of characters used in the splitter should not occur
|
||||||
|
* elsewhere in the pieces of info to avoid splitting where not
|
||||||
|
* intended.
|
||||||
|
*
|
||||||
|
* Splitter is also used in adding bucketnames to the
|
||||||
|
* namespacerusersbucket. The object names added to the
|
||||||
|
* namespaceusersbucket are of the form:
|
||||||
|
* canonicalID...!*!bucketname
|
||||||
|
*/
|
||||||
|
|
||||||
|
export const splitter = '..|..';
|
||||||
|
export const usersBucket = 'users..bucket';
|
||||||
|
// MPU Bucket Prefix is used to create the name of the shadow
|
||||||
|
// bucket used for multipart uploads. There is one shadow mpu
|
||||||
|
// bucket per bucket and its name is the mpuBucketPrefix followed
|
||||||
|
// by the name of the final destination bucket for the object
|
||||||
|
// once the multipart upload is complete.
|
||||||
|
export const mpuBucketPrefix = 'mpuShadowBucket';
|
||||||
|
// since aws s3 does not allow capitalized buckets, these may be
|
||||||
|
// used for special internal purposes
|
||||||
|
export const permittedCapitalizedBuckets = {
|
||||||
|
METADATA: true,
|
||||||
|
};
|
||||||
|
// Setting a lower object key limit to account for:
|
||||||
|
// - Mongo key limit of 1012 bytes
|
||||||
|
// - Version ID in Mongo Key if versioned of 33
|
||||||
|
// - Max bucket name length if bucket match false of 63
|
||||||
|
// - Extra prefix slash for bucket prefix if bucket match of 1
|
||||||
|
export const objectKeyByteLimit = 915;
|
||||||
|
/* delimiter for location-constraint. The location constraint will be able
|
||||||
|
* to include the ingestion flag
|
||||||
|
*/
|
||||||
|
export const zenkoSeparator = ':';
|
||||||
|
/* eslint-disable camelcase */
|
||||||
|
export const externalBackends = { aws_s3: true, azure: true, gcp: true, pfs: true };
|
||||||
|
export const replicationBackends = { aws_s3: true, azure: true, gcp: true };
|
||||||
|
// hex digest of sha256 hash of empty string:
|
||||||
|
export const emptyStringHash = crypto.createHash('sha256')
|
||||||
|
.update('', 'binary').digest('hex');
|
||||||
|
export const mpuMDStoredExternallyBackend = { aws_s3: true, gcp: true };
|
||||||
|
// AWS sets a minimum size limit for parts except for the last part.
|
||||||
|
// http://docs.aws.amazon.com/AmazonS3/latest/API/mpUploadComplete.html
|
||||||
|
export const minimumAllowedPartSize = 5242880;
|
||||||
|
export const gcpMaximumAllowedPartCount = 1024;
|
||||||
|
// GCP Object Tagging Prefix
|
||||||
|
export const gcpTaggingPrefix = 'aws-tag-';
|
||||||
|
export const productName = 'APN/1.0 Scality/1.0 Scality CloudServer for Zenko';
|
||||||
|
export const legacyLocations = ['sproxyd', 'legacy'];
|
||||||
|
// healthcheck default call from nginx is every 2 seconds
|
||||||
|
// for external backends, don't call unless at least 1 minute
|
||||||
|
// (60,000 milliseconds) since last call
|
||||||
|
export const externalBackendHealthCheckInterval = 60000;
|
||||||
|
// some of the available data backends (if called directly rather
|
||||||
|
// than through the multiple backend gateway) need a key provided
|
||||||
|
// as a string as first parameter of the get/delete methods.
|
||||||
|
export const clientsRequireStringKey = { sproxyd: true, cdmi: true };
|
||||||
|
export const hasCopyPartBackends = { aws_s3: true, gcp: true };
|
||||||
|
export const versioningNotImplBackends = { azure: true, gcp: true };
|
||||||
|
// user metadata applied on zenko-created objects
|
||||||
|
export const zenkoIDHeader = 'x-amz-meta-zenko-instance-id';
|
||||||
|
// Default expiration value of the S3 pre-signed URL duration
|
||||||
|
// 604800 seconds (seven days).
|
||||||
|
export const defaultPreSignedURLExpiry = 7 * 24 * 60 * 60;
|
||||||
|
// Regex for ISO-8601 formatted date
|
||||||
|
export const shortIso8601Regex = /\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}Z/;
|
||||||
|
export const longIso8601Regex = /\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}.\d{3}Z/;
|
||||||
|
export const supportedNotificationEvents = new Set([
|
||||||
|
's3:ObjectCreated:*',
|
||||||
|
's3:ObjectCreated:Put',
|
||||||
|
's3:ObjectCreated:Copy',
|
||||||
|
's3:ObjectCreated:CompleteMultipartUpload',
|
||||||
|
's3:ObjectRemoved:*',
|
||||||
|
's3:ObjectRemoved:Delete',
|
||||||
|
's3:ObjectRemoved:DeleteMarkerCreated',
|
||||||
|
's3:Replication:OperationFailedReplication',
|
||||||
|
's3:ObjectTagging:*',
|
||||||
|
's3:ObjectTagging:Put',
|
||||||
|
's3:ObjectTagging:Delete',
|
||||||
|
's3:ObjectAcl:Put',
|
||||||
|
's3:ObjectRestore:*',
|
||||||
|
's3:ObjectRestore:Post',
|
||||||
|
's3:ObjectRestore:Completed',
|
||||||
|
's3:ObjectRestore:Delete',
|
||||||
|
's3:LifecycleTransition',
|
||||||
|
's3:LifecycleExpiration:*',
|
||||||
|
's3:LifecycleExpiration:DeleteMarkerCreated',
|
||||||
|
's3:LifecycleExpiration:Delete',
|
||||||
|
]);
|
||||||
|
export const notificationArnPrefix = 'arn:scality:bucketnotif';
|
||||||
|
// HTTP server keep-alive timeout is set to a higher value than
|
||||||
|
// client's free sockets timeout to avoid the risk of triggering
|
||||||
|
// ECONNRESET errors if the server closes the connection at the
|
||||||
|
// exact moment clients attempt to reuse an established connection
|
||||||
|
// for a new request.
|
||||||
|
//
|
||||||
|
// Note: the ability to close inactive connections on the client
|
||||||
|
// after httpClientFreeSocketsTimeout milliseconds requires the
|
||||||
|
// use of "agentkeepalive" module instead of the regular node.js
|
||||||
|
// http.Agent.
|
||||||
|
export const httpServerKeepAliveTimeout = 60000;
|
||||||
|
export const httpClientFreeSocketTimeout = 55000;
|
||||||
|
export const supportedLifecycleRules = [
|
||||||
|
'expiration',
|
||||||
|
'noncurrentVersionExpiration',
|
||||||
|
'abortIncompleteMultipartUpload',
|
||||||
|
'transitions',
|
||||||
|
'noncurrentVersionTransition',
|
||||||
|
];
|
||||||
|
// Maximum number of buckets to cache (bucket metadata)
|
||||||
|
export const maxCachedBuckets = process.env.METADATA_MAX_CACHED_BUCKETS ?
|
||||||
|
Number(process.env.METADATA_MAX_CACHED_BUCKETS) : 1000;
|
||||||
|
|
||||||
|
export const validRestoreObjectTiers = new Set(['Expedited', 'Standard', 'Bulk']);
|
||||||
|
export const maxBatchingConcurrentOperations = 5;
|
||||||
|
|
||||||
|
/** For policy resource arn check we allow empty account ID to not break compatibility */
|
||||||
|
export const policyArnAllowedEmptyAccountId = ['utapi', 'scuba'];
|
|
@ -1,7 +1,3 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
|
||||||
|
|
||||||
const writeOptions = { sync: true };
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Like Error, but with a property set to true.
|
* Like Error, but with a property set to true.
|
||||||
* TODO: this is copied from kineticlib, should consolidate with the
|
* TODO: this is copied from kineticlib, should consolidate with the
|
||||||
|
@ -14,29 +10,36 @@ const writeOptions = { sync: true };
|
||||||
* use:
|
* use:
|
||||||
* throw propError("badTypeInput", "input is not a buffer");
|
* throw propError("badTypeInput", "input is not a buffer");
|
||||||
*
|
*
|
||||||
* @param {String} propName - the property name.
|
* @param propName - the property name.
|
||||||
* @param {String} message - the Error message.
|
* @param message - the Error message.
|
||||||
* @returns {Error} the Error object.
|
* @returns the Error object.
|
||||||
*/
|
*/
|
||||||
function propError(propName, message) {
|
function propError(propName: string, message: string): Error {
|
||||||
const err = new Error(message);
|
const err = new Error(message);
|
||||||
err[propName] = true;
|
err[propName] = true;
|
||||||
|
// @ts-ignore
|
||||||
|
err.is = { [propName]: true };
|
||||||
return err;
|
return err;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Running transaction with multiple updates to be committed atomically
|
* Running transaction with multiple updates to be committed atomically
|
||||||
*/
|
*/
|
||||||
class IndexTransaction {
|
export class IndexTransaction {
|
||||||
|
operations: { type: 'put' | 'del'; key: string; value?: any }[];
|
||||||
|
db: any;
|
||||||
|
closed: boolean;
|
||||||
|
conditions: { [key: string]: string }[];
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Builds a new transaction
|
* Builds a new transaction
|
||||||
*
|
*
|
||||||
* @argument {Leveldb} db an open database to which the updates
|
* @argument {Leveldb} db an open database to which the updates
|
||||||
* will be applied
|
* will be applied
|
||||||
*
|
*
|
||||||
* @returns {IndexTransaction} a new empty transaction
|
* @returns a new empty transaction
|
||||||
*/
|
*/
|
||||||
constructor(db) {
|
constructor(db: any) {
|
||||||
this.operations = [];
|
this.operations = [];
|
||||||
this.db = db;
|
this.db = db;
|
||||||
this.closed = false;
|
this.closed = false;
|
||||||
|
@ -46,30 +49,34 @@ class IndexTransaction {
|
||||||
/**
|
/**
|
||||||
* Adds a new operation to participate in this running transaction
|
* Adds a new operation to participate in this running transaction
|
||||||
*
|
*
|
||||||
* @argument {object} op an object with the following attributes:
|
* @argument op an object with the following attributes:
|
||||||
* {
|
* {
|
||||||
* type: 'put' or 'del',
|
* type: 'put' or 'del',
|
||||||
* key: the object key,
|
* key: the object key,
|
||||||
* value: (optional for del) the value to store,
|
* value: (optional for del) the value to store,
|
||||||
* }
|
* }
|
||||||
*
|
*
|
||||||
* @throws {Error} an error described by the following properties
|
* @throws an error described by the following properties
|
||||||
* - invalidTransactionVerb if op is not put or del
|
* - invalidTransactionVerb if op is not put or del
|
||||||
* - pushOnCommittedTransaction if already committed
|
* - pushOnCommittedTransaction if already committed
|
||||||
* - missingKey if the key is missing from the op
|
* - missingKey if the key is missing from the op
|
||||||
* - missingValue if putting without a value
|
* - missingValue if putting without a value
|
||||||
*
|
|
||||||
* @returns {undefined}
|
|
||||||
*/
|
*/
|
||||||
push(op) {
|
push(op: { type: 'put'; key: string; value: any }): void;
|
||||||
|
push(op: { type: 'del'; key: string }): void;
|
||||||
|
push(op: { type: 'put' | 'del'; key: string; value?: any }): void {
|
||||||
if (this.closed) {
|
if (this.closed) {
|
||||||
throw propError('pushOnCommittedTransaction',
|
throw propError(
|
||||||
'can not add ops to already committed transaction');
|
'pushOnCommittedTransaction',
|
||||||
|
'can not add ops to already committed transaction'
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (op.type !== 'put' && op.type !== 'del') {
|
if (op.type !== 'put' && op.type !== 'del') {
|
||||||
throw propError('invalidTransactionVerb',
|
throw propError(
|
||||||
`unknown action type: ${op.type}`);
|
'invalidTransactionVerb',
|
||||||
|
`unknown action type: ${op.type}`
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (op.key === undefined) {
|
if (op.key === undefined) {
|
||||||
|
@ -93,57 +100,59 @@ class IndexTransaction {
|
||||||
* - pushOnCommittedTransaction if already committed
|
* - pushOnCommittedTransaction if already committed
|
||||||
* - missingKey if the key is missing from the op
|
* - missingKey if the key is missing from the op
|
||||||
* - missingValue if putting without a value
|
* - missingValue if putting without a value
|
||||||
*
|
|
||||||
* @returns {undefined}
|
|
||||||
*
|
|
||||||
* @see push
|
* @see push
|
||||||
*/
|
*/
|
||||||
put(key, value) {
|
put(key: string, value: any) {
|
||||||
this.push({ type: 'put', key, value });
|
this.push({ type: 'put', key, value });
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Adds a new del operation to this running transaction
|
* Adds a new del operation to this running transaction
|
||||||
*
|
*
|
||||||
* @argument {string} key - the key of the object to delete
|
* @argument key - the key of the object to delete
|
||||||
*
|
*
|
||||||
* @throws {Error} an error described by the following properties
|
* @throws an error described by the following properties
|
||||||
* - pushOnCommittedTransaction if already committed
|
* - pushOnCommittedTransaction if already committed
|
||||||
* - missingKey if the key is missing from the op
|
* - missingKey if the key is missing from the op
|
||||||
*
|
*
|
||||||
* @returns {undefined}
|
|
||||||
*
|
|
||||||
* @see push
|
* @see push
|
||||||
*/
|
*/
|
||||||
del(key) {
|
del(key: string) {
|
||||||
this.push({ type: 'del', key });
|
this.push({ type: 'del', key });
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Adds a condition for the transaction
|
* Adds a condition for the transaction
|
||||||
*
|
*
|
||||||
* @argument {object} condition an object with the following attributes:
|
* @argument condition an object with the following attributes:
|
||||||
* {
|
* {
|
||||||
* <condition>: the object key
|
* <condition>: the object key
|
||||||
* }
|
* }
|
||||||
* example: { notExists: 'key1' }
|
* example: { notExists: 'key1' }
|
||||||
*
|
*
|
||||||
* @throws {Error} an error described by the following properties
|
* @throws an error described by the following properties
|
||||||
* - pushOnCommittedTransaction if already committed
|
* - pushOnCommittedTransaction if already committed
|
||||||
* - missingCondition if the condition is empty
|
* - missingCondition if the condition is empty
|
||||||
*
|
*
|
||||||
* @returns {undefined}
|
|
||||||
*/
|
*/
|
||||||
addCondition(condition) {
|
addCondition(condition: { [key: string]: string }) {
|
||||||
if (this.closed) {
|
if (this.closed) {
|
||||||
throw propError('pushOnCommittedTransaction',
|
throw propError(
|
||||||
'can not add conditions to already committed transaction');
|
'pushOnCommittedTransaction',
|
||||||
|
'can not add conditions to already committed transaction'
|
||||||
|
);
|
||||||
}
|
}
|
||||||
if (condition === undefined || Object.keys(condition).length === 0) {
|
if (condition === undefined || Object.keys(condition).length === 0) {
|
||||||
throw propError('missingCondition', 'missing condition for conditional put');
|
throw propError(
|
||||||
|
'missingCondition',
|
||||||
|
'missing condition for conditional put'
|
||||||
|
);
|
||||||
}
|
}
|
||||||
if (typeof (condition.notExists) !== 'string') {
|
if (typeof condition.notExists !== 'string' && typeof condition.exists !== 'string') {
|
||||||
throw propError('unsupportedConditionalOperation', 'missing key or supported condition');
|
throw propError(
|
||||||
|
'unsupportedConditionalOperation',
|
||||||
|
'missing key or supported condition'
|
||||||
|
);
|
||||||
}
|
}
|
||||||
this.conditions.push(condition);
|
this.conditions.push(condition);
|
||||||
}
|
}
|
||||||
|
@ -151,32 +160,35 @@ class IndexTransaction {
|
||||||
/**
|
/**
|
||||||
* Applies the queued updates in this transaction atomically.
|
* Applies the queued updates in this transaction atomically.
|
||||||
*
|
*
|
||||||
* @argument {function} cb function to be called when the commit
|
* @argument cb function to be called when the commit
|
||||||
* finishes, taking an optional error argument
|
* finishes, taking an optional error argument
|
||||||
*
|
*
|
||||||
* @returns {undefined}
|
|
||||||
*/
|
*/
|
||||||
commit(cb) {
|
commit(cb: (error: Error | null, data?: any) => void) {
|
||||||
if (this.closed) {
|
if (this.closed) {
|
||||||
return cb(propError('alreadyCommitted',
|
return cb(
|
||||||
'transaction was already committed'));
|
propError(
|
||||||
|
'alreadyCommitted',
|
||||||
|
'transaction was already committed'
|
||||||
|
)
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (this.operations.length === 0) {
|
if (this.operations.length === 0) {
|
||||||
return cb(propError('emptyTransaction',
|
return cb(
|
||||||
'tried to commit an empty transaction'));
|
propError(
|
||||||
|
'emptyTransaction',
|
||||||
|
'tried to commit an empty transaction'
|
||||||
|
)
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
this.closed = true;
|
this.closed = true;
|
||||||
writeOptions.conditions = this.conditions;
|
const options = { sync: true, conditions: this.conditions };
|
||||||
|
|
||||||
// The array-of-operations variant of the `batch` method
|
// The array-of-operations variant of the `batch` method
|
||||||
// allows passing options such has `sync: true` whereas the
|
// allows passing options such has `sync: true` whereas the
|
||||||
// chained form does not.
|
// chained form does not.
|
||||||
return this.db.batch(this.operations, writeOptions, cb);
|
return this.db.batch(this.operations, options, cb);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
IndexTransaction,
|
|
||||||
};
|
|
|
@ -1,13 +0,0 @@
|
||||||
function reshapeExceptionError(error) {
|
|
||||||
const { message, code, stack, name } = error;
|
|
||||||
return {
|
|
||||||
message,
|
|
||||||
code,
|
|
||||||
stack,
|
|
||||||
name,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
reshapeExceptionError,
|
|
||||||
};
|
|
|
@ -0,0 +1,11 @@
|
||||||
|
export interface ErrorLike {
|
||||||
|
message: any;
|
||||||
|
code: any;
|
||||||
|
stack: any;
|
||||||
|
name: any;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function reshapeExceptionError(error: ErrorLike) {
|
||||||
|
const { message, code, stack, name } = error;
|
||||||
|
return { message, code, stack, name };
|
||||||
|
}
|
|
@ -42,7 +42,7 @@ export const BucketAlreadyOwnedByYou: ErrorFormat = {
|
||||||
code: 409,
|
code: 409,
|
||||||
|
|
||||||
description:
|
description:
|
||||||
'Your previous request to create the named bucket succeeded and you already own it. You get this error in all AWS regions except US Standard, us-east-1. In us-east-1 region, you will get 200 OK, but it is no-op (if bucket exists S3 will not do anything).',
|
'A bucket with this name exists and is already owned by you',
|
||||||
};
|
};
|
||||||
|
|
||||||
export const BucketNotEmpty: ErrorFormat = {
|
export const BucketNotEmpty: ErrorFormat = {
|
||||||
|
@ -365,6 +365,11 @@ export const NoSuchWebsiteConfiguration: ErrorFormat = {
|
||||||
description: 'The specified bucket does not have a website configuration',
|
description: 'The specified bucket does not have a website configuration',
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export const NoSuchTagSet: ErrorFormat = {
|
||||||
|
code: 404,
|
||||||
|
description: 'The TagSet does not exist',
|
||||||
|
};
|
||||||
|
|
||||||
export const NoSuchUpload: ErrorFormat = {
|
export const NoSuchUpload: ErrorFormat = {
|
||||||
code: 404,
|
code: 404,
|
||||||
description:
|
description:
|
||||||
|
@ -685,6 +690,11 @@ export const ReportNotPresent: ErrorFormat = {
|
||||||
'The request was rejected because the credential report does not exist. To generate a credential report, use GenerateCredentialReport.',
|
'The request was rejected because the credential report does not exist. To generate a credential report, use GenerateCredentialReport.',
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export const Found: ErrorFormat = {
|
||||||
|
code: 302,
|
||||||
|
description: 'Resource Found'
|
||||||
|
};
|
||||||
|
|
||||||
// ------------- Special non-AWS S3 errors -------------
|
// ------------- Special non-AWS S3 errors -------------
|
||||||
|
|
||||||
export const MPUinProgress: ErrorFormat = {
|
export const MPUinProgress: ErrorFormat = {
|
||||||
|
@ -1032,3 +1042,15 @@ export const AuthMethodNotImplemented: ErrorFormat = {
|
||||||
description: 'AuthMethodNotImplemented',
|
description: 'AuthMethodNotImplemented',
|
||||||
code: 501,
|
code: 501,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// --------------------- quotaErros ---------------------
|
||||||
|
|
||||||
|
export const NoSuchQuota: ErrorFormat = {
|
||||||
|
code: 404,
|
||||||
|
description: 'The specified resource does not have a quota.',
|
||||||
|
};
|
||||||
|
|
||||||
|
export const QuotaExceeded: ErrorFormat = {
|
||||||
|
code: 429,
|
||||||
|
description: 'The quota set for the resource is exceeded.',
|
||||||
|
};
|
||||||
|
|
|
@ -2,7 +2,7 @@ import type { ServerResponse } from 'http';
|
||||||
import * as rawErrors from './arsenalErrors';
|
import * as rawErrors from './arsenalErrors';
|
||||||
|
|
||||||
/** All possible errors names. */
|
/** All possible errors names. */
|
||||||
export type Name = keyof typeof rawErrors
|
export type Name = keyof typeof rawErrors;
|
||||||
/** Object containing all errors names. It has the format { [Name]: "Name" } */
|
/** Object containing all errors names. It has the format { [Name]: "Name" } */
|
||||||
export type Names = { [Name_ in Name]: Name_ };
|
export type Names = { [Name_ in Name]: Name_ };
|
||||||
/** Mapping used to determine an error type. It has the format { [Name]: boolean } */
|
/** Mapping used to determine an error type. It has the format { [Name]: boolean } */
|
||||||
|
@ -13,9 +13,16 @@ export type Errors = { [_ in Name]: ArsenalError };
|
||||||
// This object is reused constantly through createIs, we store it there
|
// This object is reused constantly through createIs, we store it there
|
||||||
// to avoid recomputation.
|
// to avoid recomputation.
|
||||||
const isBase = Object.fromEntries(
|
const isBase = Object.fromEntries(
|
||||||
Object.keys(rawErrors).map(key => [key, false])
|
Object.keys(rawErrors).map((key) => [key, false])
|
||||||
) as Is;
|
) as Is;
|
||||||
|
|
||||||
|
// This allows to conditionally add the old behavior of errors to properly
|
||||||
|
// test migration.
|
||||||
|
// Activate CI tests with `ALLOW_UNSAFE_ERROR_COMPARISON=false yarn test`.
|
||||||
|
// Remove this mechanism in ARSN-176.
|
||||||
|
export const allowUnsafeErrComp = (
|
||||||
|
process.env.ALLOW_UNSAFE_ERROR_COMPARISON ?? 'true') === 'true'
|
||||||
|
|
||||||
// This contains some metaprog. Be careful.
|
// This contains some metaprog. Be careful.
|
||||||
// Proxy can be found on MDN.
|
// Proxy can be found on MDN.
|
||||||
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Proxy
|
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Proxy
|
||||||
|
@ -25,7 +32,7 @@ const isBase = Object.fromEntries(
|
||||||
// the Proxy will return false.
|
// the Proxy will return false.
|
||||||
const createIs = (type: Name): Is => {
|
const createIs = (type: Name): Is => {
|
||||||
const get = (is: Is, value: string | symbol) => is[value] ?? false;
|
const get = (is: Is, value: string | symbol) => is[value] ?? false;
|
||||||
const final = Object.freeze({ ...isBase, [type]: true })
|
const final = Object.freeze({ ...isBase, [type]: true });
|
||||||
return new Proxy(final, { get });
|
return new Proxy(final, { get });
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -39,13 +46,27 @@ export class ArsenalError extends Error {
|
||||||
/** Object used to determine the error type.
|
/** Object used to determine the error type.
|
||||||
* Example: error.is.InternalError */
|
* Example: error.is.InternalError */
|
||||||
#is: Is;
|
#is: Is;
|
||||||
|
/** A map of error metadata (can be extra fields
|
||||||
|
* that only show in debug mode) */
|
||||||
|
#metadata: Map<string, Object[]>;
|
||||||
|
|
||||||
private constructor(type: Name, code: number, description: string) {
|
private constructor(type: Name, code: number, description: string,
|
||||||
|
metadata?: Map<string, Object[]>) {
|
||||||
super(type);
|
super(type);
|
||||||
this.#code = code;
|
this.#code = code;
|
||||||
this.#description = description;
|
this.#description = description;
|
||||||
this.#type = type;
|
this.#type = type;
|
||||||
this.#is = createIs(type);
|
this.#is = createIs(type);
|
||||||
|
this.#metadata = metadata ?? new Map<string, Object[]>();
|
||||||
|
|
||||||
|
// This restores the old behavior of errors, to make sure they're now
|
||||||
|
// backward-compatible. Fortunately it's handled by TS, but it cannot
|
||||||
|
// be type-checked. This means we have to be extremely careful about
|
||||||
|
// what we're doing when using errors.
|
||||||
|
// Disables the feature when in CI tests but not in production.
|
||||||
|
if (allowUnsafeErrComp) {
|
||||||
|
this[type] = true;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Output the error as a JSON string */
|
/** Output the error as a JSON string */
|
||||||
|
@ -55,6 +76,30 @@ export class ArsenalError extends Error {
|
||||||
return JSON.stringify({ errorType, errorMessage });
|
return JSON.stringify({ errorType, errorMessage });
|
||||||
}
|
}
|
||||||
|
|
||||||
|
flatten() {
|
||||||
|
return {
|
||||||
|
is_arsenal_error: true,
|
||||||
|
code: this.#code,
|
||||||
|
description: this.#description,
|
||||||
|
type: this.#type,
|
||||||
|
stack: this.stack
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
static unflatten(flat_obj) {
|
||||||
|
if (!flat_obj.is_arsenal_error) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const err = new ArsenalError(
|
||||||
|
flat_obj.type,
|
||||||
|
flat_obj.code,
|
||||||
|
flat_obj.description
|
||||||
|
)
|
||||||
|
err.stack = flat_obj.stack
|
||||||
|
return err;
|
||||||
|
}
|
||||||
|
|
||||||
/** Write the error in an HTTP response */
|
/** Write the error in an HTTP response */
|
||||||
writeResponse(res: ServerResponse) {
|
writeResponse(res: ServerResponse) {
|
||||||
res.writeHead(this.#code);
|
res.writeHead(this.#code);
|
||||||
|
@ -66,7 +111,22 @@ export class ArsenalError extends Error {
|
||||||
customizeDescription(description: string): ArsenalError {
|
customizeDescription(description: string): ArsenalError {
|
||||||
const type = this.#type;
|
const type = this.#type;
|
||||||
const code = this.#code;
|
const code = this.#code;
|
||||||
return new ArsenalError(type, code, description);
|
const metadata = new Map(this.#metadata);
|
||||||
|
const err = new ArsenalError(type, code, description, metadata);
|
||||||
|
err.stack = this.stack;
|
||||||
|
return err;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Clone the error with a new metadata field */
|
||||||
|
addMetadataEntry(key: string, value: Object[]): ArsenalError {
|
||||||
|
const type = this.#type;
|
||||||
|
const code = this.#code;
|
||||||
|
const description = this.#description;
|
||||||
|
const metadata = new Map(this.#metadata);
|
||||||
|
metadata.set(key, value);
|
||||||
|
const err = new ArsenalError(type, code, description, metadata);
|
||||||
|
err.stack = this.stack;
|
||||||
|
return err;
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Used to determine the error type. Example: error.is.InternalError */
|
/** Used to determine the error type. Example: error.is.InternalError */
|
||||||
|
@ -91,9 +151,14 @@ export class ArsenalError extends Error {
|
||||||
return this.#type;
|
return this.#type;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** A map of error metadata */
|
||||||
|
get metadata() {
|
||||||
|
return this.#metadata;
|
||||||
|
}
|
||||||
|
|
||||||
/** Generate all possible errors. An instance is created by default. */
|
/** Generate all possible errors. An instance is created by default. */
|
||||||
static errors() {
|
static errors() {
|
||||||
const errors = {}
|
const errors = {};
|
||||||
Object.entries(rawErrors).forEach((value) => {
|
Object.entries(rawErrors).forEach((value) => {
|
||||||
const name = value[0] as Name;
|
const name = value[0] as Name;
|
||||||
const error = value[1];
|
const error = value[1];
|
||||||
|
@ -101,7 +166,7 @@ export class ArsenalError extends Error {
|
||||||
const get = () => new ArsenalError(name, code, description);
|
const get = () => new ArsenalError(name, code, description);
|
||||||
Object.defineProperty(errors, name, { get });
|
Object.defineProperty(errors, name, { get });
|
||||||
});
|
});
|
||||||
return errors as Errors
|
return errors as Errors;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -7,8 +7,8 @@
|
||||||
"test": "mocha --recursive --timeout 5500 tests/unit"
|
"test": "mocha --recursive --timeout 5500 tests/unit"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"mocha": "2.5.3",
|
"mocha": "5.2.0",
|
||||||
"async": "^2.6.0",
|
"async": "~2.6.1",
|
||||||
"node-forge": "^0.7.1"
|
"node-forge": "^0.7.1"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,4 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
export const ciphers = [
|
||||||
|
|
||||||
const ciphers = [
|
|
||||||
'DHE-RSA-AES128-GCM-SHA256',
|
'DHE-RSA-AES128-GCM-SHA256',
|
||||||
'ECDHE-ECDSA-AES128-GCM-SHA256',
|
'ECDHE-ECDSA-AES128-GCM-SHA256',
|
||||||
'ECDHE-RSA-AES256-GCM-SHA384',
|
'ECDHE-RSA-AES256-GCM-SHA384',
|
||||||
|
@ -28,7 +26,3 @@ const ciphers = [
|
||||||
'!EDH-RSA-DES-CBC3-SHA',
|
'!EDH-RSA-DES-CBC3-SHA',
|
||||||
'!KRB5-DES-CBC3-SHA',
|
'!KRB5-DES-CBC3-SHA',
|
||||||
].join(':');
|
].join(':');
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
ciphers,
|
|
||||||
};
|
|
|
@ -29,16 +29,11 @@ c2CNfUEqyRbJF4pE9ZcdQReT5p/llmyhQdvq6cHH+cKJk63C6DHRVoStsnsUcvKe
|
||||||
bLxKsygK77ttjr61cxLoDJeGd5L5h1CPmwIBAg==
|
bLxKsygK77ttjr61cxLoDJeGd5L5h1CPmwIBAg==
|
||||||
-----END DH PARAMETERS-----
|
-----END DH PARAMETERS-----
|
||||||
*/
|
*/
|
||||||
'use strict'; // eslint-disable-line strict
|
|
||||||
|
|
||||||
const dhparam =
|
export const dhparam =
|
||||||
'MIIBCAKCAQEAh99T77KGNuiY9N6xrCJ3QNv4SFADTa3CD+1VMTAdRJLHUNpglB+i' +
|
'MIIBCAKCAQEAh99T77KGNuiY9N6xrCJ3QNv4SFADTa3CD+1VMTAdRJLHUNpglB+i' +
|
||||||
'AoTYiLDFZgtTCpx0ZZUD+JM3qiCZy0OK5/ZGlVD7sZmxjRtdpVK4qIPtwav8t0J7' +
|
'AoTYiLDFZgtTCpx0ZZUD+JM3qiCZy0OK5/ZGlVD7sZmxjRtdpVK4qIPtwav8t0J7' +
|
||||||
'c2CNfUEqyRbJF4pE9ZcdQReT5p/llmyhQdvq6cHH+cKJk63C6DHRVoStsnsUcvKe' +
|
'c2CNfUEqyRbJF4pE9ZcdQReT5p/llmyhQdvq6cHH+cKJk63C6DHRVoStsnsUcvKe' +
|
||||||
'23PLGZulKg8H3eRBxHamHkmyuEVDtoNhMIoJONsdXSpo5GgcD4EQMM8xb/qsnCxn' +
|
'23PLGZulKg8H3eRBxHamHkmyuEVDtoNhMIoJONsdXSpo5GgcD4EQMM8xb/qsnCxn' +
|
||||||
'6QIGTBvcHskxtlTZOfUPk4XQ6Yb3tQi2TurzkQHLln4U7p/GZs+D+6D3SgSPqr6P' +
|
'6QIGTBvcHskxtlTZOfUPk4XQ6Yb3tQi2TurzkQHLln4U7p/GZs+D+6D3SgSPqr6P' +
|
||||||
'bLxKsygK77ttjr61cxLoDJeGd5L5h1CPmwIBAg==';
|
'bLxKsygK77ttjr61cxLoDJeGd5L5h1CPmwIBAg==';
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
dhparam,
|
|
||||||
};
|
|
|
@ -0,0 +1,2 @@
|
||||||
|
export * as ciphers from './ciphers'
|
||||||
|
export * as dhparam from './dh2048'
|
|
@ -1,83 +0,0 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
|
||||||
|
|
||||||
const ipaddr = require('ipaddr.js');
|
|
||||||
|
|
||||||
/**
|
|
||||||
* checkIPinRangeOrMatch checks whether a given ip address is in an ip address
|
|
||||||
* range or matches the given ip address
|
|
||||||
* @param {string} cidr - ip address range or ip address
|
|
||||||
* @param {object} ip - parsed ip address
|
|
||||||
* @return {boolean} true if in range, false if not
|
|
||||||
*/
|
|
||||||
function checkIPinRangeOrMatch(cidr, ip) {
|
|
||||||
// If there is an exact match of the ip address, no need to check ranges
|
|
||||||
if (ip.toString() === cidr) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
let range;
|
|
||||||
|
|
||||||
try {
|
|
||||||
range = ipaddr.IPv4.parseCIDR(cidr);
|
|
||||||
} catch (err) {
|
|
||||||
try {
|
|
||||||
// not ipv4 so try ipv6
|
|
||||||
range = ipaddr.IPv6.parseCIDR(cidr);
|
|
||||||
} catch (err) {
|
|
||||||
// range is not valid ipv4 or ipv6
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
try {
|
|
||||||
return ip.match(range);
|
|
||||||
} catch (err) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Parse IP address into object representation
|
|
||||||
* @param {string} ip - IPV4/IPV6/IPV4-mapped IPV6 address
|
|
||||||
* @return {object} parsedIp - Object representation of parsed IP
|
|
||||||
*/
|
|
||||||
function parseIp(ip) {
|
|
||||||
if (ipaddr.IPv4.isValid(ip)) {
|
|
||||||
return ipaddr.parse(ip);
|
|
||||||
}
|
|
||||||
if (ipaddr.IPv6.isValid(ip)) {
|
|
||||||
// also parses IPv6 mapped IPv4 addresses into IPv4 representation
|
|
||||||
return ipaddr.process(ip);
|
|
||||||
}
|
|
||||||
// not valid ip address according to module, so return empty object
|
|
||||||
// which will obviously not match a range of ip addresses that the parsedIp
|
|
||||||
// is being tested against
|
|
||||||
return {};
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Checks if an IP adress matches a given list of CIDR ranges
|
|
||||||
* @param {string[]} cidrList - List of CIDR ranges
|
|
||||||
* @param {string} ip - IP address
|
|
||||||
* @return {boolean} - true if there is match or false for no match
|
|
||||||
*/
|
|
||||||
function ipMatchCidrList(cidrList, ip) {
|
|
||||||
const parsedIp = parseIp(ip);
|
|
||||||
return cidrList.some(item => {
|
|
||||||
let cidr;
|
|
||||||
// patch the cidr if range is not specified
|
|
||||||
if (item.indexOf('/') === -1) {
|
|
||||||
if (item.startsWith('127.')) {
|
|
||||||
cidr = `${item}/8`;
|
|
||||||
} else if (ipaddr.IPv4.isValid(item)) {
|
|
||||||
cidr = `${item}/32`;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return checkIPinRangeOrMatch(cidr || item, parsedIp);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
checkIPinRangeOrMatch,
|
|
||||||
ipMatchCidrList,
|
|
||||||
parseIp,
|
|
||||||
};
|
|
|
@ -0,0 +1,71 @@
|
||||||
|
import ipaddr from 'ipaddr.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* checkIPinRangeOrMatch checks whether a given ip address is in an ip address
|
||||||
|
* range or matches the given ip address
|
||||||
|
* @param cidr - ip address range or ip address
|
||||||
|
* @param ip - parsed ip address
|
||||||
|
* @return true if in range, false if not
|
||||||
|
*/
|
||||||
|
export function checkIPinRangeOrMatch(
|
||||||
|
cidr: string,
|
||||||
|
ip: ipaddr.IPv4 | ipaddr.IPv6,
|
||||||
|
): boolean {
|
||||||
|
// If there is an exact match of the ip address, no need to check ranges
|
||||||
|
if (ip.toString() === cidr) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
if (ip instanceof ipaddr.IPv6) {
|
||||||
|
const range = ipaddr.IPv6.parseCIDR(cidr);
|
||||||
|
return ip.match(range);
|
||||||
|
} else {
|
||||||
|
const range = ipaddr.IPv4.parseCIDR(cidr);
|
||||||
|
return ip.match(range);
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse IP address into object representation
|
||||||
|
* @param ip - IPV4/IPV6/IPV4-mapped IPV6 address
|
||||||
|
* @return parsedIp - Object representation of parsed IP
|
||||||
|
*/
|
||||||
|
export function parseIp(ip: string): ipaddr.IPv4 | ipaddr.IPv6 | {} {
|
||||||
|
if (ipaddr.IPv4.isValid(ip)) {
|
||||||
|
return ipaddr.parse(ip);
|
||||||
|
}
|
||||||
|
if (ipaddr.IPv6.isValid(ip)) {
|
||||||
|
// also parses IPv6 mapped IPv4 addresses into IPv4 representation
|
||||||
|
return ipaddr.process(ip);
|
||||||
|
}
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checks if an IP adress matches a given list of CIDR ranges
|
||||||
|
* @param cidrList - List of CIDR ranges
|
||||||
|
* @param ip - IP address
|
||||||
|
* @return - true if there is match or false for no match
|
||||||
|
*/
|
||||||
|
export function ipMatchCidrList(cidrList: string[], ip: string): boolean {
|
||||||
|
const parsedIp = parseIp(ip);
|
||||||
|
return cidrList.some((item) => {
|
||||||
|
let cidr: string | undefined;
|
||||||
|
// patch the cidr if range is not specified
|
||||||
|
if (item.indexOf('/') === -1) {
|
||||||
|
if (item.startsWith('127.')) {
|
||||||
|
cidr = `${item}/8`;
|
||||||
|
} else if (ipaddr.IPv4.isValid(item)) {
|
||||||
|
cidr = `${item}/32`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return (
|
||||||
|
(parsedIp instanceof ipaddr.IPv4 ||
|
||||||
|
parsedIp instanceof ipaddr.IPv6) &&
|
||||||
|
checkIPinRangeOrMatch(cidr || item, parsedIp)
|
||||||
|
);
|
||||||
|
});
|
||||||
|
}
|
|
@ -1,32 +0,0 @@
|
||||||
'use strict'; // eslint-disable-line
|
|
||||||
|
|
||||||
const debug = require('util').debuglog('jsutil');
|
|
||||||
|
|
||||||
// JavaScript utility functions
|
|
||||||
|
|
||||||
/**
|
|
||||||
* force <tt>func</tt> to be called only once, even if actually called
|
|
||||||
* multiple times. The cached result of the first call is then
|
|
||||||
* returned (if any).
|
|
||||||
*
|
|
||||||
* @note underscore.js provides this functionality but not worth
|
|
||||||
* adding a new dependency for such a small use case.
|
|
||||||
*
|
|
||||||
* @param {function} func function to call at most once
|
|
||||||
|
|
||||||
* @return {function} a callable wrapper mirroring <tt>func</tt> but
|
|
||||||
* only calls <tt>func</tt> at first invocation.
|
|
||||||
*/
|
|
||||||
module.exports.once = function once(func) {
|
|
||||||
const state = { called: false, res: undefined };
|
|
||||||
return function wrapper(...args) {
|
|
||||||
if (!state.called) {
|
|
||||||
state.called = true;
|
|
||||||
state.res = func.apply(func, args);
|
|
||||||
} else {
|
|
||||||
debug('function already called:', func,
|
|
||||||
'returning cached result:', state.res);
|
|
||||||
}
|
|
||||||
return state.res;
|
|
||||||
};
|
|
||||||
};
|
|
|
@ -0,0 +1,33 @@
|
||||||
|
import * as util from 'util';
|
||||||
|
const debug = util.debuglog('jsutil');
|
||||||
|
|
||||||
|
// JavaScript utility functions
|
||||||
|
|
||||||
|
/**
|
||||||
|
* force <tt>func</tt> to be called only once, even if actually called
|
||||||
|
* multiple times. The cached result of the first call is then
|
||||||
|
* returned (if any).
|
||||||
|
*
|
||||||
|
* @note underscore.js provides this functionality but not worth
|
||||||
|
* adding a new dependency for such a small use case.
|
||||||
|
*
|
||||||
|
* @param func function to call at most once
|
||||||
|
|
||||||
|
* @return a callable wrapper mirroring <tt>func</tt> but
|
||||||
|
* only calls <tt>func</tt> at first invocation.
|
||||||
|
*/
|
||||||
|
export function once<T>(func: (...args: any[]) => T): (...args: any[]) => T {
|
||||||
|
type State = { called: boolean; res: any };
|
||||||
|
const state: State = { called: false, res: undefined };
|
||||||
|
return function wrapper(...args: any[]) {
|
||||||
|
if (!state.called) {
|
||||||
|
state.called = true;
|
||||||
|
state.res = func.apply(func, args);
|
||||||
|
} else {
|
||||||
|
const m1 = 'function already called:';
|
||||||
|
const m2 = 'returning cached result:';
|
||||||
|
debug(m1, func, m2, state.res);
|
||||||
|
}
|
||||||
|
return state.res;
|
||||||
|
};
|
||||||
|
}
|
|
@ -1,162 +0,0 @@
|
||||||
const Redis = require('ioredis');
|
|
||||||
|
|
||||||
class RedisClient {
|
|
||||||
/**
|
|
||||||
* @constructor
|
|
||||||
* @param {Object} config - config
|
|
||||||
* @param {string} config.host - Redis host
|
|
||||||
* @param {number} config.port - Redis port
|
|
||||||
* @param {string} config.password - Redis password
|
|
||||||
* @param {werelogs.Logger} logger - logger instance
|
|
||||||
*/
|
|
||||||
constructor(config, logger) {
|
|
||||||
this._client = new Redis(config);
|
|
||||||
this._client.on('error', err =>
|
|
||||||
logger.trace('error from redis', {
|
|
||||||
error: err,
|
|
||||||
method: 'RedisClient.constructor',
|
|
||||||
redisHost: config.host,
|
|
||||||
redisPort: config.port,
|
|
||||||
})
|
|
||||||
);
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* increment value of a key by 1 and set a ttl
|
|
||||||
* @param {string} key - key holding the value
|
|
||||||
* @param {number} expiry - expiry in seconds
|
|
||||||
* @param {callback} cb - callback
|
|
||||||
* @return {undefined}
|
|
||||||
*/
|
|
||||||
incrEx(key, expiry, cb) {
|
|
||||||
return this._client
|
|
||||||
.multi([['incr', key], ['expire', key, expiry]])
|
|
||||||
.exec(cb);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* increment value of a key by a given amount and set a ttl
|
|
||||||
* @param {string} key - key holding the value
|
|
||||||
* @param {number} amount - amount to increase by
|
|
||||||
* @param {number} expiry - expiry in seconds
|
|
||||||
* @param {callback} cb - callback
|
|
||||||
* @return {undefined}
|
|
||||||
*/
|
|
||||||
incrbyEx(key, amount, expiry, cb) {
|
|
||||||
return this._client
|
|
||||||
.multi([['incrby', key, amount], ['expire', key, expiry]])
|
|
||||||
.exec(cb);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* execute a batch of commands
|
|
||||||
* @param {string[]} cmds - list of commands
|
|
||||||
* @param {callback} cb - callback
|
|
||||||
* @return {undefined}
|
|
||||||
*/
|
|
||||||
batch(cmds, cb) {
|
|
||||||
return this._client.pipeline(cmds).exec(cb);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Checks if a key exists
|
|
||||||
* @param {string} key - name of key
|
|
||||||
* @param {function} cb - callback
|
|
||||||
* If cb response returns 0, key does not exist.
|
|
||||||
* If cb response returns 1, key exists.
|
|
||||||
* @return {undefined}
|
|
||||||
*/
|
|
||||||
exists(key, cb) {
|
|
||||||
return this._client.exists(key, cb);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Add a value and its score to a sorted set. If no sorted set exists, this
|
|
||||||
* will create a new one for the given key.
|
|
||||||
* @param {string} key - name of key
|
|
||||||
* @param {integer} score - score used to order set
|
|
||||||
* @param {string} value - value to store
|
|
||||||
* @param {callback} cb - callback
|
|
||||||
* @return {undefined}
|
|
||||||
*/
|
|
||||||
zadd(key, score, value, cb) {
|
|
||||||
return this._client.zadd(key, score, value, cb);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get number of elements in a sorted set.
|
|
||||||
* Note: using this on a key that does not exist will return 0.
|
|
||||||
* Note: using this on an existing key that isn't a sorted set will
|
|
||||||
* return an error WRONGTYPE.
|
|
||||||
* @param {string} key - name of key
|
|
||||||
* @param {function} cb - callback
|
|
||||||
* @return {undefined}
|
|
||||||
*/
|
|
||||||
zcard(key, cb) {
|
|
||||||
return this._client.zcard(key, cb);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the score for given value in a sorted set
|
|
||||||
* Note: using this on a key that does not exist will return nil.
|
|
||||||
* Note: using this on a value that does not exist in a valid sorted set key
|
|
||||||
* will return nil.
|
|
||||||
* @param {string} key - name of key
|
|
||||||
* @param {string} value - value within sorted set
|
|
||||||
* @param {function} cb - callback
|
|
||||||
* @return {undefined}
|
|
||||||
*/
|
|
||||||
zscore(key, value, cb) {
|
|
||||||
return this._client.zscore(key, value, cb);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Remove a value from a sorted set
|
|
||||||
* @param {string} key - name of key
|
|
||||||
* @param {string|array} value - value within sorted set. Can specify
|
|
||||||
* multiple values within an array
|
|
||||||
* @param {function} cb - callback
|
|
||||||
* The cb response returns number of values removed
|
|
||||||
* @return {undefined}
|
|
||||||
*/
|
|
||||||
zrem(key, value, cb) {
|
|
||||||
return this._client.zrem(key, value, cb);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get specified range of elements in a sorted set
|
|
||||||
* @param {string} key - name of key
|
|
||||||
* @param {integer} start - start index (inclusive)
|
|
||||||
* @param {integer} end - end index (inclusive) (can use -1)
|
|
||||||
* @param {function} cb - callback
|
|
||||||
* @return {undefined}
|
|
||||||
*/
|
|
||||||
zrange(key, start, end, cb) {
|
|
||||||
return this._client.zrange(key, start, end, cb);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get range of elements in a sorted set based off score
|
|
||||||
* @param {string} key - name of key
|
|
||||||
* @param {integer|string} min - min score value (inclusive)
|
|
||||||
* (can use "-inf")
|
|
||||||
* @param {integer|string} max - max score value (inclusive)
|
|
||||||
* (can use "+inf")
|
|
||||||
* @param {function} cb - callback
|
|
||||||
* @return {undefined}
|
|
||||||
*/
|
|
||||||
zrangebyscore(key, min, max, cb) {
|
|
||||||
return this._client.zrangebyscore(key, min, max, cb);
|
|
||||||
}
|
|
||||||
|
|
||||||
clear(cb) {
|
|
||||||
return this._client.flushdb(cb);
|
|
||||||
}
|
|
||||||
|
|
||||||
disconnect() {
|
|
||||||
this._client.disconnect();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = RedisClient;
|
|
|
@ -0,0 +1,218 @@
|
||||||
|
import Redis from 'ioredis';
|
||||||
|
import { Logger } from 'werelogs';
|
||||||
|
|
||||||
|
export type Config = { host: string; port: number; password: string };
|
||||||
|
export type Callback = (error: Error | null, value?: any) => void;
|
||||||
|
|
||||||
|
export default class RedisClient {
|
||||||
|
_client: Redis.Redis;
|
||||||
|
|
||||||
|
constructor(config: Config, logger: Logger) {
|
||||||
|
this._client = new Redis(config);
|
||||||
|
this._client.on('error', err =>
|
||||||
|
logger.trace('error from redis', {
|
||||||
|
error: err,
|
||||||
|
method: 'RedisClient.constructor',
|
||||||
|
redisHost: config.host,
|
||||||
|
redisPort: config.port,
|
||||||
|
})
|
||||||
|
);
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* scan a pattern and return matching keys
|
||||||
|
* @param pattern - string pattern to match with all existing keys
|
||||||
|
* @param [count=10] - scan count
|
||||||
|
* @param cb - callback (error, result)
|
||||||
|
*/
|
||||||
|
scan(pattern: string, count = 10, cb: Callback) {
|
||||||
|
const params = { match: pattern, count };
|
||||||
|
const keys: any[] = [];
|
||||||
|
|
||||||
|
const stream = this._client.scanStream(params);
|
||||||
|
stream.on('data', resultKeys => {
|
||||||
|
for (let i = 0; i < resultKeys.length; i++) {
|
||||||
|
keys.push(resultKeys[i]);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
stream.on('end', () => {
|
||||||
|
cb(null, keys);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/** increment value of a key by 1 and set a ttl
|
||||||
|
* @param key - key holding the value
|
||||||
|
* @param expiry - expiry in seconds
|
||||||
|
* @param cb - callback
|
||||||
|
*/
|
||||||
|
incrEx(key: string, expiry: number, cb: Callback) {
|
||||||
|
const exp = expiry.toString();
|
||||||
|
return this._client
|
||||||
|
.multi([['incr', key], ['expire', key, exp]])
|
||||||
|
.exec(cb);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* increment value of a key by a given amount
|
||||||
|
* @param key - key holding the value
|
||||||
|
* @param amount - amount to increase by
|
||||||
|
* @param cb - callback
|
||||||
|
*/
|
||||||
|
incrby(key: string, amount: number, cb: Callback) {
|
||||||
|
return this._client.incrby(key, amount, cb);
|
||||||
|
}
|
||||||
|
|
||||||
|
/** increment value of a key by a given amount and set a ttl
|
||||||
|
* @param key - key holding the value
|
||||||
|
* @param amount - amount to increase by
|
||||||
|
* @param expiry - expiry in seconds
|
||||||
|
* @param cb - callback
|
||||||
|
*/
|
||||||
|
incrbyEx(key: string, amount: number, expiry: number, cb: Callback) {
|
||||||
|
const am = amount.toString();
|
||||||
|
const exp = expiry.toString();
|
||||||
|
return this._client
|
||||||
|
.multi([['incrby', key, am], ['expire', key, exp]])
|
||||||
|
.exec(cb);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* decrement value of a key by a given amount
|
||||||
|
* @param key - key holding the value
|
||||||
|
* @param amount - amount to increase by
|
||||||
|
* @param cb - callback
|
||||||
|
*/
|
||||||
|
decrby(key: string, amount: number, cb: Callback) {
|
||||||
|
return this._client.decrby(key, amount, cb);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* execute a batch of commands
|
||||||
|
* @param cmds - list of commands
|
||||||
|
* @param cb - callback
|
||||||
|
* @return
|
||||||
|
*/
|
||||||
|
batch(cmds: string[][], cb: Callback) {
|
||||||
|
return this._client.pipeline(cmds).exec(cb);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checks if a key exists
|
||||||
|
* @param key - name of key
|
||||||
|
* @param cb - callback
|
||||||
|
* If cb response returns 0, key does not exist.
|
||||||
|
* If cb response returns 1, key exists.
|
||||||
|
*/
|
||||||
|
exists(key: string, cb: Callback) {
|
||||||
|
return this._client.exists(key, cb);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* get value stored at key
|
||||||
|
* @param key - key holding the value
|
||||||
|
* @param cb - callback
|
||||||
|
*/
|
||||||
|
get(key: string, cb: Callback) {
|
||||||
|
return this._client.get(key, cb);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add a value and its score to a sorted set. If no sorted set exists, this
|
||||||
|
* will create a new one for the given key.
|
||||||
|
* @param key - name of key
|
||||||
|
* @param score - score used to order set
|
||||||
|
* @param value - value to store
|
||||||
|
* @param cb - callback
|
||||||
|
*/
|
||||||
|
zadd(key: string, score: number, value: string, cb: Callback) {
|
||||||
|
return this._client.zadd(key, score, value, cb);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get number of elements in a sorted set.
|
||||||
|
* Note: using this on a key that does not exist will return 0.
|
||||||
|
* Note: using this on an existing key that isn't a sorted set will
|
||||||
|
* return an error WRONGTYPE.
|
||||||
|
* @param key - name of key
|
||||||
|
* @param cb - callback
|
||||||
|
*/
|
||||||
|
zcard(key: string, cb: Callback) {
|
||||||
|
return this._client.zcard(key, cb);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the score for given value in a sorted set
|
||||||
|
* Note: using this on a key that does not exist will return nil.
|
||||||
|
* Note: using this on a value that does not exist in a valid sorted set key
|
||||||
|
* will return nil.
|
||||||
|
* @param key - name of key
|
||||||
|
* @param value - value within sorted set
|
||||||
|
* @param cb - callback
|
||||||
|
*/
|
||||||
|
zscore(key: string, value: string, cb: Callback) {
|
||||||
|
return this._client.zscore(key, value, cb);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Remove a value from a sorted set
|
||||||
|
* @param key - name of key
|
||||||
|
* @param value - value within sorted set. Can specify
|
||||||
|
* multiple values within an array
|
||||||
|
* @param cb - callback
|
||||||
|
* The cb response returns number of values removed
|
||||||
|
*/
|
||||||
|
zrem(key: string, value: string | string[], cb: Callback) {
|
||||||
|
return this._client.zrem(key, value, cb);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get specified range of elements in a sorted set
|
||||||
|
* @param key - name of key
|
||||||
|
* @param start - start index (inclusive)
|
||||||
|
* @param end - end index (inclusive) (can use -1)
|
||||||
|
* @param cb - callback
|
||||||
|
*/
|
||||||
|
zrange(key: string, start: number, end: number, cb: Callback) {
|
||||||
|
return this._client.zrange(key, start, end, cb);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get range of elements in a sorted set based off score
|
||||||
|
* @param key - name of key
|
||||||
|
* @param min - min score value (inclusive)
|
||||||
|
* (can use "-inf")
|
||||||
|
* @param max - max score value (inclusive)
|
||||||
|
* (can use "+inf")
|
||||||
|
* @param cb - callback
|
||||||
|
*/
|
||||||
|
zrangebyscore(
|
||||||
|
key: string,
|
||||||
|
min: number | string,
|
||||||
|
max: number | string,
|
||||||
|
cb: Callback,
|
||||||
|
) {
|
||||||
|
return this._client.zrangebyscore(key, min, max, cb);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* get TTL or expiration in seconds
|
||||||
|
* @param key - name of key
|
||||||
|
* @param cb - callback
|
||||||
|
*/
|
||||||
|
ttl(key: string, cb: Callback) {
|
||||||
|
return this._client.ttl(key, cb);
|
||||||
|
}
|
||||||
|
|
||||||
|
clear(cb: Callback) {
|
||||||
|
return this._client.flushdb(cb);
|
||||||
|
}
|
||||||
|
|
||||||
|
disconnect() {
|
||||||
|
this._client.disconnect();
|
||||||
|
}
|
||||||
|
|
||||||
|
listClients(cb: Callback) {
|
||||||
|
return this._client.client('list', cb);
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,163 +0,0 @@
|
||||||
const async = require('async');
|
|
||||||
|
|
||||||
class StatsClient {
|
|
||||||
/**
|
|
||||||
* @constructor
|
|
||||||
* @param {object} redisClient - RedisClient instance
|
|
||||||
* @param {number} interval - sampling interval by seconds
|
|
||||||
* @param {number} expiry - sampling duration by seconds
|
|
||||||
*/
|
|
||||||
constructor(redisClient, interval, expiry) {
|
|
||||||
this._redis = redisClient;
|
|
||||||
this._interval = interval;
|
|
||||||
this._expiry = expiry;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
|
||||||
* Utility function to use when callback is undefined
|
|
||||||
*/
|
|
||||||
_noop() {}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* normalize to the nearest interval
|
|
||||||
* @param {object} d - Date instance
|
|
||||||
* @return {number} timestamp - normalized to the nearest interval
|
|
||||||
*/
|
|
||||||
_normalizeTimestamp(d) {
|
|
||||||
const s = d.getSeconds();
|
|
||||||
return d.setSeconds(s - s % this._interval, 0);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* set timestamp to the previous interval
|
|
||||||
* @param {object} d - Date instance
|
|
||||||
* @return {number} timestamp - set to the previous interval
|
|
||||||
*/
|
|
||||||
_setPrevInterval(d) {
|
|
||||||
return d.setSeconds(d.getSeconds() - this._interval);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* build redis key to get total number of occurrences on the server
|
|
||||||
* @param {string} name - key name identifier
|
|
||||||
* @param {object} d - Date instance
|
|
||||||
* @return {string} key - key for redis
|
|
||||||
*/
|
|
||||||
_buildKey(name, d) {
|
|
||||||
return `${name}:${this._normalizeTimestamp(d)}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* reduce the array of values to a single value
|
|
||||||
* typical input looks like [[null, '1'], [null, '2'], [null, null]...]
|
|
||||||
* @param {array} arr - Date instance
|
|
||||||
* @return {string} key - key for redis
|
|
||||||
*/
|
|
||||||
_getCount(arr) {
|
|
||||||
return arr.reduce((prev, a) => {
|
|
||||||
let num = parseInt(a[1], 10);
|
|
||||||
num = Number.isNaN(num) ? 0 : num;
|
|
||||||
return prev + num;
|
|
||||||
}, 0);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* report/record a new request received on the server
|
|
||||||
* @param {string} id - service identifier
|
|
||||||
* @param {number} incr - optional param increment
|
|
||||||
* @param {function} cb - callback
|
|
||||||
* @return {undefined}
|
|
||||||
*/
|
|
||||||
reportNewRequest(id, incr, cb) {
|
|
||||||
if (!this._redis) {
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
let callback;
|
|
||||||
let amount;
|
|
||||||
if (typeof incr === 'function') {
|
|
||||||
// In case where optional `incr` is not passed, but `cb` is passed
|
|
||||||
callback = incr;
|
|
||||||
amount = 1;
|
|
||||||
} else {
|
|
||||||
callback = (cb && typeof cb === 'function') ? cb : this._noop;
|
|
||||||
amount = (typeof incr === 'number') ? incr : 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
const key = this._buildKey(`${id}:requests`, new Date());
|
|
||||||
|
|
||||||
return this._redis.incrbyEx(key, amount, this._expiry, callback);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* report/record a request that ended up being a 500 on the server
|
|
||||||
* @param {string} id - service identifier
|
|
||||||
* @param {callback} cb - callback
|
|
||||||
* @return {undefined}
|
|
||||||
*/
|
|
||||||
report500(id, cb) {
|
|
||||||
if (!this._redis) {
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
const callback = cb || this._noop;
|
|
||||||
const key = this._buildKey(`${id}:500s`, new Date());
|
|
||||||
return this._redis.incrEx(key, this._expiry, callback);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* get stats for the last x seconds, x being the sampling duration
|
|
||||||
* @param {object} log - Werelogs request logger
|
|
||||||
* @param {string} id - service identifier
|
|
||||||
* @param {callback} cb - callback to call with the err/result
|
|
||||||
* @return {undefined}
|
|
||||||
*/
|
|
||||||
getStats(log, id, cb) {
|
|
||||||
if (!this._redis) {
|
|
||||||
return cb(null, {});
|
|
||||||
}
|
|
||||||
const d = new Date();
|
|
||||||
const totalKeys = Math.floor(this._expiry / this._interval);
|
|
||||||
const reqsKeys = [];
|
|
||||||
const req500sKeys = [];
|
|
||||||
for (let i = 0; i < totalKeys; i++) {
|
|
||||||
reqsKeys.push(['get', this._buildKey(`${id}:requests`, d)]);
|
|
||||||
req500sKeys.push(['get', this._buildKey(`${id}:500s`, d)]);
|
|
||||||
this._setPrevInterval(d);
|
|
||||||
}
|
|
||||||
return async.parallel([
|
|
||||||
next => this._redis.batch(reqsKeys, next),
|
|
||||||
next => this._redis.batch(req500sKeys, next),
|
|
||||||
], (err, results) => {
|
|
||||||
/**
|
|
||||||
* Batch result is of the format
|
|
||||||
* [ [null, '1'], [null, '2'], [null, '3'] ] where each
|
|
||||||
* item is the result of the each batch command
|
|
||||||
* Foreach item in the result, index 0 signifies the error and
|
|
||||||
* index 1 contains the result
|
|
||||||
*/
|
|
||||||
const statsRes = {
|
|
||||||
'requests': 0,
|
|
||||||
'500s': 0,
|
|
||||||
'sampleDuration': this._expiry,
|
|
||||||
};
|
|
||||||
if (err) {
|
|
||||||
log.error('error getting stats', {
|
|
||||||
error: err,
|
|
||||||
method: 'StatsClient.getStats',
|
|
||||||
});
|
|
||||||
/**
|
|
||||||
* Redis for stats is not a critial component, ignoring
|
|
||||||
* any error here as returning an InternalError
|
|
||||||
* would be confused with the health of the service
|
|
||||||
*/
|
|
||||||
return cb(null, statsRes);
|
|
||||||
}
|
|
||||||
statsRes.requests = this._getCount(results[0]);
|
|
||||||
statsRes['500s'] = this._getCount(results[1]);
|
|
||||||
return cb(null, statsRes);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = StatsClient;
|
|
|
@ -0,0 +1,230 @@
|
||||||
|
import async from 'async';
|
||||||
|
import RedisClient from './RedisClient';
|
||||||
|
import { Logger } from 'werelogs';
|
||||||
|
|
||||||
|
export type Callback = (error: Error | null, value?: any) => void;
|
||||||
|
|
||||||
|
export default class StatsClient {
|
||||||
|
_redis: RedisClient;
|
||||||
|
_interval: number;
|
||||||
|
_expiry: number;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @constructor
|
||||||
|
* @param redisClient - RedisClient instance
|
||||||
|
* @param interval - sampling interval by seconds
|
||||||
|
* @param expiry - sampling duration by seconds
|
||||||
|
*/
|
||||||
|
constructor(redisClient: RedisClient, interval: number, expiry: number) {
|
||||||
|
this._redis = redisClient;
|
||||||
|
this._interval = interval;
|
||||||
|
this._expiry = expiry;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Utility function to use when callback is undefined */
|
||||||
|
_noop() {}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* normalize to the nearest interval
|
||||||
|
* @param d - Date instance
|
||||||
|
* @return timestamp - normalized to the nearest interval
|
||||||
|
*/
|
||||||
|
_normalizeTimestamp(d: Date): number {
|
||||||
|
const s = d.getSeconds();
|
||||||
|
return d.setSeconds(s - s % this._interval, 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* set timestamp to the previous interval
|
||||||
|
* @param d - Date instance
|
||||||
|
* @return timestamp - set to the previous interval
|
||||||
|
*/
|
||||||
|
_setPrevInterval(d: Date): number {
|
||||||
|
return d.setSeconds(d.getSeconds() - this._interval);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* build redis key to get total number of occurrences on the server
|
||||||
|
* @param name - key name identifier
|
||||||
|
* @param d - Date instance
|
||||||
|
* @return key - key for redis
|
||||||
|
*/
|
||||||
|
buildKey(name: string, d: Date): string {
|
||||||
|
return `${name}:${this._normalizeTimestamp(d)}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* reduce the array of values to a single value
|
||||||
|
* typical input looks like [[null, '1'], [null, '2'], [null, null]...]
|
||||||
|
* @param arr - Date instance
|
||||||
|
* @return key - key for redis
|
||||||
|
*/
|
||||||
|
_getCount(arr: [any, string | null][]): number {
|
||||||
|
return arr.reduce((prev, a) => {
|
||||||
|
let num = parseInt(a[1] ?? '', 10);
|
||||||
|
num = Number.isNaN(num) ? 0 : num;
|
||||||
|
return prev + num;
|
||||||
|
}, 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* report/record a new request received on the server
|
||||||
|
* @param id - service identifier
|
||||||
|
* @param incr - optional param increment
|
||||||
|
*/
|
||||||
|
reportNewRequest(
|
||||||
|
id: string,
|
||||||
|
incr?: number | ((error: Error | null, value?: any) => void),
|
||||||
|
cb?: (error: Error | null, value?: any) => void,
|
||||||
|
) {
|
||||||
|
if (!this._redis) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
let callback: (error: Error | null, value?: any) => void;
|
||||||
|
let amount: number;
|
||||||
|
if (typeof incr === 'function') {
|
||||||
|
// In case where optional `incr` is not passed, but `cb` is passed
|
||||||
|
callback = incr;
|
||||||
|
amount = 1;
|
||||||
|
} else {
|
||||||
|
callback = (cb && typeof cb === 'function') ? cb : this._noop;
|
||||||
|
amount = (typeof incr === 'number') ? incr : 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
const key = this.buildKey(`${id}:requests`, new Date());
|
||||||
|
|
||||||
|
return this._redis.incrbyEx(key, amount, this._expiry, callback);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Increment the given key by the given value.
|
||||||
|
* @param key - The Redis key to increment
|
||||||
|
* @param incr - The value to increment by
|
||||||
|
* @param [cb] - callback
|
||||||
|
*/
|
||||||
|
incrementKey(key: string, incr: number, cb: Callback) {
|
||||||
|
const callback = cb || this._noop;
|
||||||
|
return this._redis.incrby(key, incr, callback);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Decrement the given key by the given value.
|
||||||
|
* @param key - The Redis key to decrement
|
||||||
|
* @param decr - The value to decrement by
|
||||||
|
* @param [cb] - callback
|
||||||
|
*/
|
||||||
|
decrementKey(key: string, decr: number, cb: Callback) {
|
||||||
|
const callback = cb || this._noop;
|
||||||
|
return this._redis.decrby(key, decr, callback);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* report/record a request that ended up being a 500 on the server
|
||||||
|
* @param id - service identifier
|
||||||
|
*/
|
||||||
|
report500(id: string, cb?: (error: Error | null, value?: any) => void) {
|
||||||
|
if (!this._redis) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
const callback = cb || this._noop;
|
||||||
|
const key = this.buildKey(`${id}:500s`, new Date());
|
||||||
|
return this._redis.incrEx(key, this._expiry, callback);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* wrapper on `getStats` that handles a list of keys
|
||||||
|
* @param log - Werelogs request logger
|
||||||
|
* @param ids - service identifiers
|
||||||
|
* @param cb - callback to call with the err/result
|
||||||
|
*/
|
||||||
|
getAllStats(log: Logger, ids: string[], cb: Callback) {
|
||||||
|
if (!this._redis) {
|
||||||
|
return cb(null, {});
|
||||||
|
}
|
||||||
|
|
||||||
|
const statsRes = {
|
||||||
|
'requests': 0,
|
||||||
|
'500s': 0,
|
||||||
|
'sampleDuration': this._expiry,
|
||||||
|
};
|
||||||
|
let requests = 0;
|
||||||
|
let errors = 0;
|
||||||
|
|
||||||
|
// for now set concurrency to default of 10
|
||||||
|
return async.eachLimit(ids, 10, (id: string, done) => {
|
||||||
|
this.getStats(log, id, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
requests += res.requests;
|
||||||
|
errors += res['500s'];
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
}, error => {
|
||||||
|
if (error) {
|
||||||
|
log.error('error getting stats', {
|
||||||
|
error,
|
||||||
|
method: 'StatsClient.getAllStats',
|
||||||
|
});
|
||||||
|
return cb(null, statsRes);
|
||||||
|
}
|
||||||
|
statsRes.requests = requests;
|
||||||
|
statsRes['500s'] = errors;
|
||||||
|
return cb(null, statsRes);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* get stats for the last x seconds, x being the sampling duration
|
||||||
|
* @param log - Werelogs request logger
|
||||||
|
* @param id - service identifier
|
||||||
|
*/
|
||||||
|
getStats(log: Logger, id: string, cb: (error: Error | null, value?: any) => void) {
|
||||||
|
if (!this._redis) {
|
||||||
|
return cb(null, {});
|
||||||
|
}
|
||||||
|
const d = new Date();
|
||||||
|
const totalKeys = Math.floor(this._expiry / this._interval);
|
||||||
|
const reqsKeys: ['get', string][] = [];
|
||||||
|
const req500sKeys: ['get', string][] = [];
|
||||||
|
for (let i = 0; i < totalKeys; i++) {
|
||||||
|
reqsKeys.push(['get', this.buildKey(`${id}:requests`, d)]);
|
||||||
|
req500sKeys.push(['get', this.buildKey(`${id}:500s`, d)]);
|
||||||
|
this._setPrevInterval(d);
|
||||||
|
}
|
||||||
|
return async.parallel([
|
||||||
|
next => this._redis.batch(reqsKeys, next),
|
||||||
|
next => this._redis.batch(req500sKeys, next),
|
||||||
|
], (err, results) => {
|
||||||
|
/**
|
||||||
|
* Batch result is of the format
|
||||||
|
* [ [null, '1'], [null, '2'], [null, '3'] ] where each
|
||||||
|
* item is the result of the each batch command
|
||||||
|
* Foreach item in the result, index 0 signifies the error and
|
||||||
|
* index 1 contains the result
|
||||||
|
*/
|
||||||
|
const statsRes = {
|
||||||
|
'requests': 0,
|
||||||
|
'500s': 0,
|
||||||
|
'sampleDuration': this._expiry,
|
||||||
|
};
|
||||||
|
if (err) {
|
||||||
|
log.error('error getting stats', {
|
||||||
|
error: err,
|
||||||
|
method: 'StatsClient.getStats',
|
||||||
|
});
|
||||||
|
/**
|
||||||
|
* Redis for stats is not a critial component, ignoring
|
||||||
|
* any error here as returning an InternalError
|
||||||
|
* would be confused with the health of the service
|
||||||
|
*/
|
||||||
|
return cb(null, statsRes);
|
||||||
|
}
|
||||||
|
statsRes.requests = this._getCount((results as any)[0]);
|
||||||
|
statsRes['500s'] = this._getCount((results as any)[1]);
|
||||||
|
return cb(null, statsRes);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,120 +0,0 @@
|
||||||
const StatsClient = require('./StatsClient');
|
|
||||||
/**
|
|
||||||
* @class StatsModel
|
|
||||||
*
|
|
||||||
* @classdesc Extend and overwrite how timestamps are normalized by minutes
|
|
||||||
* rather than by seconds
|
|
||||||
*/
|
|
||||||
class StatsModel extends StatsClient {
|
|
||||||
/**
|
|
||||||
* normalize date timestamp to the nearest hour
|
|
||||||
* @param {Date} d - Date instance
|
|
||||||
* @return {number} timestamp - normalized to the nearest hour
|
|
||||||
*/
|
|
||||||
normalizeTimestampByHour(d) {
|
|
||||||
return d.setMinutes(0, 0, 0);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* get previous hour to date given
|
|
||||||
* @param {Date} d - Date instance
|
|
||||||
* @return {number} timestamp - one hour prior to date passed
|
|
||||||
*/
|
|
||||||
_getDatePreviousHour(d) {
|
|
||||||
return d.setHours(d.getHours() - 1);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* normalize to the nearest interval
|
|
||||||
* @param {object} d - Date instance
|
|
||||||
* @return {number} timestamp - normalized to the nearest interval
|
|
||||||
*/
|
|
||||||
_normalizeTimestamp(d) {
|
|
||||||
const m = d.getMinutes();
|
|
||||||
return d.setMinutes(m - m % (Math.floor(this._interval / 60)), 0, 0);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* override the method to get the result as an array of integers separated
|
|
||||||
* by each interval
|
|
||||||
* typical input looks like [[null, '1'], [null, '2'], [null, null]...]
|
|
||||||
* @param {array} arr - each index contains the result of each batch command
|
|
||||||
* where index 0 signifies the error and index 1 contains the result
|
|
||||||
* @return {array} array of integers, ordered from most recent interval to
|
|
||||||
* oldest interval
|
|
||||||
*/
|
|
||||||
_getCount(arr) {
|
|
||||||
return arr.reduce((store, i) => {
|
|
||||||
let num = parseInt(i[1], 10);
|
|
||||||
num = Number.isNaN(num) ? 0 : num;
|
|
||||||
store.push(num);
|
|
||||||
return store;
|
|
||||||
}, []);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* get list of sorted set key timestamps
|
|
||||||
* @param {number} epoch - epoch time
|
|
||||||
* @return {array} array of sorted set key timestamps
|
|
||||||
*/
|
|
||||||
getSortedSetHours(epoch) {
|
|
||||||
const timestamps = [];
|
|
||||||
let date = this.normalizeTimestampByHour(new Date(epoch));
|
|
||||||
while (timestamps.length < 24) {
|
|
||||||
timestamps.push(date);
|
|
||||||
date = this._getDatePreviousHour(new Date(date));
|
|
||||||
}
|
|
||||||
return timestamps;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* get the normalized hour timestamp for given epoch time
|
|
||||||
* @param {number} epoch - epoch time
|
|
||||||
* @return {string} normalized hour timestamp for given time
|
|
||||||
*/
|
|
||||||
getSortedSetCurrentHour(epoch) {
|
|
||||||
return this.normalizeTimestampByHour(new Date(epoch));
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* helper method to add element to a sorted set, applying TTL if new set
|
|
||||||
* @param {string} key - name of key
|
|
||||||
* @param {integer} score - score used to order set
|
|
||||||
* @param {string} value - value to store
|
|
||||||
* @param {callback} cb - callback
|
|
||||||
* @return {undefined}
|
|
||||||
*/
|
|
||||||
addToSortedSet(key, score, value, cb) {
|
|
||||||
this._redis.exists(key, (err, resCode) => {
|
|
||||||
if (err) {
|
|
||||||
return cb(err);
|
|
||||||
}
|
|
||||||
if (resCode === 0) {
|
|
||||||
// milliseconds in a day
|
|
||||||
const msInADay = 24 * 60 * 60 * 1000;
|
|
||||||
const nearestHour = this.normalizeTimestampByHour(new Date());
|
|
||||||
// in seconds
|
|
||||||
const ttl = Math.ceil(
|
|
||||||
(msInADay - (Date.now() - nearestHour)) / 1000);
|
|
||||||
const cmds = [
|
|
||||||
['zadd', key, score, value],
|
|
||||||
['expire', key, ttl],
|
|
||||||
];
|
|
||||||
return this._redis.batch(cmds, (err, res) => {
|
|
||||||
if (err) {
|
|
||||||
return cb(err);
|
|
||||||
}
|
|
||||||
const cmdErr = res.find(r => r[0] !== null);
|
|
||||||
if (cmdErr) {
|
|
||||||
return cb(cmdErr);
|
|
||||||
}
|
|
||||||
const successResponse = res[0][1];
|
|
||||||
return cb(null, successResponse);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
return this._redis.zadd(key, score, value, cb);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = StatsModel;
|
|
|
@ -0,0 +1,232 @@
|
||||||
|
import StatsClient from './StatsClient';
|
||||||
|
import { Logger } from 'werelogs';
|
||||||
|
import async from 'async';
|
||||||
|
|
||||||
|
export type Callback = (error: Error | null, value?: any) => void;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @class StatsModel
|
||||||
|
*
|
||||||
|
* @classdesc Extend and overwrite how timestamps are normalized by minutes
|
||||||
|
* rather than by seconds
|
||||||
|
*/
|
||||||
|
export default class StatsModel extends StatsClient {
|
||||||
|
/**
|
||||||
|
* Utility method to convert 2d array rows to columns, and vice versa
|
||||||
|
* See also: https://docs.ruby-lang.org/en/2.0.0/Array.html#method-i-zip
|
||||||
|
* @param arrays - 2d array of integers
|
||||||
|
* @return converted array
|
||||||
|
*/
|
||||||
|
_zip(arrays: number[][]) {
|
||||||
|
if (arrays.length > 0 && arrays.every(a => Array.isArray(a))) {
|
||||||
|
return arrays[0].map((_, i) => arrays.map(a => a[i]));
|
||||||
|
}
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* normalize to the nearest interval
|
||||||
|
* @param d - Date instance
|
||||||
|
* @return timestamp - normalized to the nearest interval
|
||||||
|
*/
|
||||||
|
_normalizeTimestamp(d: Date) {
|
||||||
|
const m = d.getMinutes();
|
||||||
|
return d.setMinutes(m - m % (Math.floor(this._interval / 60)), 0, 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* override the method to get the count as an array of integers separated
|
||||||
|
* by each interval
|
||||||
|
* typical input looks like [[null, '1'], [null, '2'], [null, null]...]
|
||||||
|
* @param arr - each index contains the result of each batch command
|
||||||
|
* where index 0 signifies the error and index 1 contains the result
|
||||||
|
* @return array of integers, ordered from most recent interval to
|
||||||
|
* oldest interval with length of (expiry / interval)
|
||||||
|
*/
|
||||||
|
// @ts-expect-errors
|
||||||
|
_getCount(arr: [any, string | null][]): number[] {
|
||||||
|
const size = Math.floor(this._expiry / this._interval);
|
||||||
|
const array = arr.reduce((store, i) => {
|
||||||
|
let num = parseInt(i[1] ?? '', 10);
|
||||||
|
num = Number.isNaN(num) ? 0 : num;
|
||||||
|
store.push(num);
|
||||||
|
return store;
|
||||||
|
}, [] as number[]);
|
||||||
|
|
||||||
|
if (array.length < size) {
|
||||||
|
array.push(...Array(size - array.length).fill(0));
|
||||||
|
}
|
||||||
|
return array;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* wrapper on `getStats` that handles a list of keys
|
||||||
|
* override the method to reduce the returned 2d array from `_getCount`
|
||||||
|
* @param log - Werelogs request logger
|
||||||
|
* @param ids - service identifiers
|
||||||
|
* @param cb - callback to call with the err/result
|
||||||
|
*/
|
||||||
|
getAllStats(log: Logger, ids: string[], cb: Callback) {
|
||||||
|
if (!this._redis) {
|
||||||
|
return cb(null, {});
|
||||||
|
}
|
||||||
|
|
||||||
|
const size = Math.floor(this._expiry / this._interval);
|
||||||
|
const statsRes = {
|
||||||
|
'requests': Array(size).fill(0),
|
||||||
|
'500s': Array(size).fill(0),
|
||||||
|
'sampleDuration': this._expiry,
|
||||||
|
};
|
||||||
|
const requests: any[] = [];
|
||||||
|
const errors: any[] = [];
|
||||||
|
|
||||||
|
if (ids.length === 0) {
|
||||||
|
return cb(null, statsRes);
|
||||||
|
}
|
||||||
|
|
||||||
|
// for now set concurrency to default of 10
|
||||||
|
return async.eachLimit(ids, 10, (id, done) => {
|
||||||
|
this.getStats(log, id, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
requests.push(res.requests);
|
||||||
|
errors.push(res['500s']);
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
}, error => {
|
||||||
|
if (error) {
|
||||||
|
log.error('error getting stats', {
|
||||||
|
error,
|
||||||
|
method: 'StatsModel.getAllStats',
|
||||||
|
});
|
||||||
|
return cb(null, statsRes);
|
||||||
|
}
|
||||||
|
|
||||||
|
statsRes.requests = this._zip(requests).map(arr =>
|
||||||
|
arr.reduce((acc, i) => acc + i), 0);
|
||||||
|
statsRes['500s'] = this._zip(errors).map(arr =>
|
||||||
|
arr.reduce((acc, i) => acc + i), 0);
|
||||||
|
|
||||||
|
return cb(null, statsRes);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handles getting a list of global keys.
|
||||||
|
* @param ids - Service identifiers
|
||||||
|
* @param log - Werelogs request logger
|
||||||
|
* @param cb - Callback
|
||||||
|
*/
|
||||||
|
getAllGlobalStats(ids: string[], log: Logger, cb: Callback) {
|
||||||
|
const reqsKeys = ids.map(key => (['get', key]));
|
||||||
|
return this._redis.batch(reqsKeys, (err, res) => {
|
||||||
|
const statsRes = { requests: 0 };
|
||||||
|
if (err) {
|
||||||
|
log.error('error getting metrics', {
|
||||||
|
error: err,
|
||||||
|
method: 'StatsClient.getAllGlobalStats',
|
||||||
|
});
|
||||||
|
return cb(null, statsRes);
|
||||||
|
}
|
||||||
|
statsRes.requests = res.reduce((sum, curr) => {
|
||||||
|
const [cmdErr, val] = curr;
|
||||||
|
if (cmdErr) {
|
||||||
|
// Log any individual request errors from the batch request.
|
||||||
|
log.error('error getting metrics', {
|
||||||
|
error: cmdErr,
|
||||||
|
method: 'StatsClient.getAllGlobalStats',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return sum + (Number.parseInt(val, 10) || 0);
|
||||||
|
}, 0);
|
||||||
|
return cb(null, statsRes);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* normalize date timestamp to the nearest hour
|
||||||
|
* @param d - Date instance
|
||||||
|
* @return timestamp - normalized to the nearest hour
|
||||||
|
*/
|
||||||
|
normalizeTimestampByHour(d: Date) {
|
||||||
|
return d.setMinutes(0, 0, 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* get previous hour to date given
|
||||||
|
* @param d - Date instance
|
||||||
|
* @return timestamp - one hour prior to date passed
|
||||||
|
*/
|
||||||
|
_getDatePreviousHour(d: Date) {
|
||||||
|
return d.setHours(d.getHours() - 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* get list of sorted set key timestamps
|
||||||
|
* @param epoch - epoch time
|
||||||
|
* @return array of sorted set key timestamps
|
||||||
|
*/
|
||||||
|
getSortedSetHours(epoch: number) {
|
||||||
|
const timestamps: number[] = [];
|
||||||
|
let date = this.normalizeTimestampByHour(new Date(epoch));
|
||||||
|
while (timestamps.length < 24) {
|
||||||
|
timestamps.push(date);
|
||||||
|
date = this._getDatePreviousHour(new Date(date));
|
||||||
|
}
|
||||||
|
return timestamps;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* get the normalized hour timestamp for given epoch time
|
||||||
|
* @param epoch - epoch time
|
||||||
|
* @return normalized hour timestamp for given time
|
||||||
|
*/
|
||||||
|
getSortedSetCurrentHour(epoch: number) {
|
||||||
|
return this.normalizeTimestampByHour(new Date(epoch));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* helper method to add element to a sorted set, applying TTL if new set
|
||||||
|
* @param key - name of key
|
||||||
|
* @param score - score used to order set
|
||||||
|
* @param value - value to store
|
||||||
|
* @param cb - callback
|
||||||
|
*/
|
||||||
|
addToSortedSet(
|
||||||
|
key: string,
|
||||||
|
score: number,
|
||||||
|
value: string,
|
||||||
|
cb: (error: Error | null, value?: any) => void,
|
||||||
|
) {
|
||||||
|
this._redis.exists(key, (err, resCode) => {
|
||||||
|
if (err) {
|
||||||
|
return cb(err);
|
||||||
|
}
|
||||||
|
if (resCode === 0) {
|
||||||
|
// milliseconds in a day
|
||||||
|
const msInADay = 24 * 60 * 60 * 1000;
|
||||||
|
const nearestHour = this.normalizeTimestampByHour(new Date());
|
||||||
|
// in seconds
|
||||||
|
const ttl = Math.ceil(
|
||||||
|
(msInADay - (Date.now() - nearestHour)) / 1000);
|
||||||
|
const cmds = [
|
||||||
|
['zadd', key, score.toString(), value],
|
||||||
|
['expire', key, ttl.toString()],
|
||||||
|
];
|
||||||
|
return this._redis.batch(cmds, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
return cb(err);
|
||||||
|
}
|
||||||
|
const cmdErr = res.find((r: any) => r[0] !== null);
|
||||||
|
if (cmdErr) {
|
||||||
|
return cb(cmdErr);
|
||||||
|
}
|
||||||
|
const successResponse = res[0][1];
|
||||||
|
return cb(null, successResponse);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return this._redis.zadd(key, score, value, cb);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,40 +0,0 @@
|
||||||
const promClient = require('prom-client');
|
|
||||||
|
|
||||||
const collectDefaultMetricsIntervalMs =
|
|
||||||
process.env.COLLECT_DEFAULT_METRICS_INTERVAL_MS !== undefined ?
|
|
||||||
Number.parseInt(process.env.COLLECT_DEFAULT_METRICS_INTERVAL_MS, 10) :
|
|
||||||
10000;
|
|
||||||
|
|
||||||
promClient.collectDefaultMetrics({ timeout: collectDefaultMetricsIntervalMs });
|
|
||||||
|
|
||||||
class ZenkoMetrics {
|
|
||||||
static createCounter(params) {
|
|
||||||
return new promClient.Counter(params);
|
|
||||||
}
|
|
||||||
|
|
||||||
static createGauge(params) {
|
|
||||||
return new promClient.Gauge(params);
|
|
||||||
}
|
|
||||||
|
|
||||||
static createHistogram(params) {
|
|
||||||
return new promClient.Histogram(params);
|
|
||||||
}
|
|
||||||
|
|
||||||
static createSummary(params) {
|
|
||||||
return new promClient.Summary(params);
|
|
||||||
}
|
|
||||||
|
|
||||||
static getMetric(name) {
|
|
||||||
return promClient.register.getSingleMetric(name);
|
|
||||||
}
|
|
||||||
|
|
||||||
static asPrometheus() {
|
|
||||||
return promClient.register.metrics();
|
|
||||||
}
|
|
||||||
|
|
||||||
static asPrometheusContentType() {
|
|
||||||
return promClient.register.contentType;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = ZenkoMetrics;
|
|
|
@ -0,0 +1,35 @@
|
||||||
|
import promClient from 'prom-client';
|
||||||
|
|
||||||
|
export default class ZenkoMetrics {
|
||||||
|
static createCounter(params: promClient.CounterConfiguration<string>) {
|
||||||
|
return new promClient.Counter(params);
|
||||||
|
}
|
||||||
|
|
||||||
|
static createGauge(params: promClient.GaugeConfiguration<string>) {
|
||||||
|
return new promClient.Gauge(params);
|
||||||
|
}
|
||||||
|
|
||||||
|
static createHistogram(params: promClient.HistogramConfiguration<string>) {
|
||||||
|
return new promClient.Histogram(params);
|
||||||
|
}
|
||||||
|
|
||||||
|
static createSummary(params: promClient.SummaryConfiguration<string>) {
|
||||||
|
return new promClient.Summary(params);
|
||||||
|
}
|
||||||
|
|
||||||
|
static getMetric(name: string) {
|
||||||
|
return promClient.register.getSingleMetric(name);
|
||||||
|
}
|
||||||
|
|
||||||
|
static async asPrometheus() {
|
||||||
|
return promClient.register.metrics();
|
||||||
|
}
|
||||||
|
|
||||||
|
static asPrometheusContentType() {
|
||||||
|
return promClient.register.contentType;
|
||||||
|
}
|
||||||
|
|
||||||
|
static collectDefaultMetrics() {
|
||||||
|
return promClient.collectDefaultMetrics();
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,4 @@
|
||||||
|
export { default as StatsClient } from './StatsClient';
|
||||||
|
export { default as StatsModel } from './StatsModel';
|
||||||
|
export { default as RedisClient } from './RedisClient';
|
||||||
|
export { default as ZenkoMetrics } from './ZenkoMetrics';
|
|
@ -1,23 +1,35 @@
|
||||||
const errors = require('../errors').default;
|
import errors from '../errors'
|
||||||
|
|
||||||
const validServices = {
|
const validServices = {
|
||||||
aws: ['s3', 'iam', 'sts', 'ring'],
|
aws: ['s3', 'iam', 'sts', 'ring'],
|
||||||
scality: ['utapi', 'sso'],
|
scality: ['utapi', 'sso'],
|
||||||
};
|
};
|
||||||
|
|
||||||
class ARN {
|
export default class ARN {
|
||||||
|
_partition: string;
|
||||||
|
_service: string;
|
||||||
|
_region: string | null;
|
||||||
|
_accountId?: string | null;
|
||||||
|
_resource: string;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* Create an ARN object from its individual components
|
* Create an ARN object from its individual components
|
||||||
*
|
*
|
||||||
* @constructor
|
* @constructor
|
||||||
* @param {string} partition - ARN partition (e.g. 'aws')
|
* @param partition - ARN partition (e.g. 'aws')
|
||||||
* @param {string} service - service name in partition (e.g. 's3')
|
* @param service - service name in partition (e.g. 's3')
|
||||||
* @param {string} [region] - AWS region
|
* @param [region] - AWS region
|
||||||
* @param {string} [accountId] - AWS 12-digit account ID
|
* @param [accountId] - AWS 12-digit account ID
|
||||||
* @param {string} resource - AWS resource path (e.g. 'foo/bar')
|
* @param resource - AWS resource path (e.g. 'foo/bar')
|
||||||
*/
|
*/
|
||||||
constructor(partition, service, region, accountId, resource) {
|
constructor(
|
||||||
|
partition: string,
|
||||||
|
service: string,
|
||||||
|
region: string | undefined | null,
|
||||||
|
accountId: string | undefined | null,
|
||||||
|
resource: string,
|
||||||
|
) {
|
||||||
this._partition = partition;
|
this._partition = partition;
|
||||||
this._service = service;
|
this._service = service;
|
||||||
this._region = region || null;
|
this._region = region || null;
|
||||||
|
@ -25,7 +37,7 @@ class ARN {
|
||||||
this._resource = resource;
|
this._resource = resource;
|
||||||
}
|
}
|
||||||
|
|
||||||
static createFromString(arnStr) {
|
static createFromString(arnStr: string) {
|
||||||
const [arn, partition, service, region, accountId,
|
const [arn, partition, service, region, accountId,
|
||||||
resourceType, resource] = arnStr.split(':');
|
resourceType, resource] = arnStr.split(':');
|
||||||
|
|
||||||
|
@ -102,5 +114,3 @@ class ARN {
|
||||||
.join(':');
|
.join(':');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = ARN;
|
|
|
@ -1,22 +1,36 @@
|
||||||
const { legacyLocations } = require('../constants');
|
import { RequestLogger } from 'werelogs';
|
||||||
const escapeForXml = require('../s3middleware/escapeForXml');
|
|
||||||
|
import { legacyLocations } from '../constants';
|
||||||
|
import escapeForXml from '../s3middleware/escapeForXml';
|
||||||
|
|
||||||
|
type CloudServerConfig = any;
|
||||||
|
|
||||||
|
export default class BackendInfo {
|
||||||
|
_config: CloudServerConfig;
|
||||||
|
_requestEndpoint: string;
|
||||||
|
_objectLocationConstraint?: string;
|
||||||
|
_bucketLocationConstraint?: string;
|
||||||
|
_legacyLocationConstraint?: string;
|
||||||
|
|
||||||
class BackendInfo {
|
|
||||||
/**
|
/**
|
||||||
* Represents the info necessary to evaluate which data backend to use
|
* Represents the info necessary to evaluate which data backend to use
|
||||||
* on a data put call.
|
* on a data put call.
|
||||||
* @constructor
|
* @constructor
|
||||||
* @param {object} config - CloudServer config containing list of locations
|
* @param config - CloudServer config containing list of locations
|
||||||
* @param {string | undefined} objectLocationConstraint - location constraint
|
* @param objectLocationConstraint - location constraint
|
||||||
* for object based on user meta header
|
* for object based on user meta header
|
||||||
* @param {string | undefined } bucketLocationConstraint - location
|
* @param bucketLocationConstraint - location
|
||||||
* constraint for bucket based on bucket metadata
|
* constraint for bucket based on bucket metadata
|
||||||
* @param {string} requestEndpoint - endpoint to which request was made
|
* @param requestEndpoint - endpoint to which request was made
|
||||||
* @param {string | undefined } legacyLocationConstraint - legacy location
|
* @param legacyLocationConstraint - legacy location constraint
|
||||||
* constraint
|
|
||||||
*/
|
*/
|
||||||
constructor(config, objectLocationConstraint, bucketLocationConstraint,
|
constructor(
|
||||||
requestEndpoint, legacyLocationConstraint) {
|
config: CloudServerConfig,
|
||||||
|
objectLocationConstraint: string | undefined,
|
||||||
|
bucketLocationConstraint: string | undefined,
|
||||||
|
requestEndpoint: string,
|
||||||
|
legacyLocationConstraint: string | undefined,
|
||||||
|
) {
|
||||||
this._config = config;
|
this._config = config;
|
||||||
this._objectLocationConstraint = objectLocationConstraint;
|
this._objectLocationConstraint = objectLocationConstraint;
|
||||||
this._bucketLocationConstraint = bucketLocationConstraint;
|
this._bucketLocationConstraint = bucketLocationConstraint;
|
||||||
|
@ -27,15 +41,18 @@ class BackendInfo {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* validate proposed location constraint against config
|
* validate proposed location constraint against config
|
||||||
* @param {object} config - CloudServer config
|
* @param config - CloudServer config
|
||||||
* @param {string | undefined} locationConstraint - value of user
|
* @param locationConstraint - value of user
|
||||||
* metadata location constraint header or bucket location constraint
|
* metadata location constraint header or bucket location constraint
|
||||||
* @param {object} log - werelogs logger
|
* @param log - werelogs logger
|
||||||
* @return {boolean} - true if valid, false if not
|
* @return - true if valid, false if not
|
||||||
*/
|
*/
|
||||||
static isValidLocationConstraint(config, locationConstraint, log) {
|
static isValidLocationConstraint(
|
||||||
if (Object.keys(config.locationConstraints).
|
config: CloudServerConfig,
|
||||||
indexOf(locationConstraint) < 0) {
|
locationConstraint: string | undefined,
|
||||||
|
log: RequestLogger,
|
||||||
|
) {
|
||||||
|
if (!locationConstraint || !(locationConstraint in config.locationConstraints)) {
|
||||||
log.trace('proposed locationConstraint is invalid',
|
log.trace('proposed locationConstraint is invalid',
|
||||||
{ locationConstraint });
|
{ locationConstraint });
|
||||||
return false;
|
return false;
|
||||||
|
@ -45,14 +62,17 @@ class BackendInfo {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* validate that request endpoint is listed in the restEndpoint config
|
* validate that request endpoint is listed in the restEndpoint config
|
||||||
* @param {object} config - CloudServer config
|
* @param config - CloudServer config
|
||||||
* @param {string} requestEndpoint - request endpoint
|
* @param requestEndpoint - request endpoint
|
||||||
* @param {object} log - werelogs logger
|
* @param log - werelogs logger
|
||||||
* @return {boolean} - true if present, false if not
|
* @return true if present, false if not
|
||||||
*/
|
*/
|
||||||
static isRequestEndpointPresent(config, requestEndpoint, log) {
|
static isRequestEndpointPresent(
|
||||||
if (Object.keys(config.restEndpoints).
|
config: CloudServerConfig,
|
||||||
indexOf(requestEndpoint) < 0) {
|
requestEndpoint: string,
|
||||||
|
log: RequestLogger,
|
||||||
|
) {
|
||||||
|
if (!(requestEndpoint in config.restEndpoints)) {
|
||||||
log.trace('requestEndpoint does not match config restEndpoints',
|
log.trace('requestEndpoint does not match config restEndpoints',
|
||||||
{ requestEndpoint });
|
{ requestEndpoint });
|
||||||
return false;
|
return false;
|
||||||
|
@ -63,14 +83,18 @@ class BackendInfo {
|
||||||
/**
|
/**
|
||||||
* validate that locationConstraint for request Endpoint matches
|
* validate that locationConstraint for request Endpoint matches
|
||||||
* one config locationConstraint
|
* one config locationConstraint
|
||||||
* @param {object} config - CloudServer config
|
* @param config - CloudServer config
|
||||||
* @param {string} requestEndpoint - request endpoint
|
* @param requestEndpoint - request endpoint
|
||||||
* @param {object} log - werelogs logger
|
* @param log - werelogs logger
|
||||||
* @return {boolean} - true if matches, false if not
|
* @return - true if matches, false if not
|
||||||
*/
|
*/
|
||||||
static isRequestEndpointValueValid(config, requestEndpoint, log) {
|
static isRequestEndpointValueValid(
|
||||||
if (Object.keys(config.locationConstraints).
|
config: CloudServerConfig,
|
||||||
indexOf(config.restEndpoints[requestEndpoint]) < 0) {
|
requestEndpoint: string,
|
||||||
|
log: RequestLogger,
|
||||||
|
) {
|
||||||
|
const restEndpoint = config.restEndpoints[requestEndpoint];
|
||||||
|
if (!(restEndpoint in config.locationConstraints)) {
|
||||||
log.trace('the default locationConstraint for request' +
|
log.trace('the default locationConstraint for request' +
|
||||||
'Endpoint does not match any config locationConstraint',
|
'Endpoint does not match any config locationConstraint',
|
||||||
{ requestEndpoint });
|
{ requestEndpoint });
|
||||||
|
@ -81,11 +105,11 @@ class BackendInfo {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* validate that s3 server is running with a file or memory backend
|
* validate that s3 server is running with a file or memory backend
|
||||||
* @param {object} config - CloudServer config
|
* @param config - CloudServer config
|
||||||
* @param {object} log - werelogs logger
|
* @param log - werelogs logger
|
||||||
* @return {boolean} - true if running with file/mem backend, false if not
|
* @return - true if running with file/mem backend, false if not
|
||||||
*/
|
*/
|
||||||
static isMemOrFileBackend(config, log) {
|
static isMemOrFileBackend(config: CloudServerConfig, log: RequestLogger) {
|
||||||
if (config.backends.data === 'mem' || config.backends.data === 'file') {
|
if (config.backends.data === 'mem' || config.backends.data === 'file') {
|
||||||
log.trace('use data backend for the location', {
|
log.trace('use data backend for the location', {
|
||||||
dataBackend: config.backends.data,
|
dataBackend: config.backends.data,
|
||||||
|
@ -103,12 +127,16 @@ class BackendInfo {
|
||||||
* data backend for the location.
|
* data backend for the location.
|
||||||
* - if locationConstraint for request Endpoint does not match
|
* - if locationConstraint for request Endpoint does not match
|
||||||
* any config locationConstraint, we will return an error
|
* any config locationConstraint, we will return an error
|
||||||
* @param {object} config - CloudServer config
|
* @param config - CloudServer config
|
||||||
* @param {string} requestEndpoint - request endpoint
|
* @param requestEndpoint - request endpoint
|
||||||
* @param {object} log - werelogs logger
|
* @param log - werelogs logger
|
||||||
* @return {boolean} - true if valid, false if not
|
* @return - true if valid, false if not
|
||||||
*/
|
*/
|
||||||
static isValidRequestEndpointOrBackend(config, requestEndpoint, log) {
|
static isValidRequestEndpointOrBackend(
|
||||||
|
config: CloudServerConfig,
|
||||||
|
requestEndpoint: string,
|
||||||
|
log: RequestLogger,
|
||||||
|
) {
|
||||||
if (!BackendInfo.isRequestEndpointPresent(config, requestEndpoint,
|
if (!BackendInfo.isRequestEndpointPresent(config, requestEndpoint,
|
||||||
log)) {
|
log)) {
|
||||||
return BackendInfo.isMemOrFileBackend(config, log);
|
return BackendInfo.isMemOrFileBackend(config, log);
|
||||||
|
@ -119,17 +147,22 @@ class BackendInfo {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* validate controlling BackendInfo Parameter
|
* validate controlling BackendInfo Parameter
|
||||||
* @param {object} config - CloudServer config
|
* @param config - CloudServer config
|
||||||
* @param {string | undefined} objectLocationConstraint - value of user
|
* @param objectLocationConstraint - value of user
|
||||||
* metadata location constraint header
|
* metadata location constraint header
|
||||||
* @param {string | null} bucketLocationConstraint - location
|
* @param bucketLocationConstraint - location
|
||||||
* constraint from bucket metadata
|
* constraint from bucket metadata
|
||||||
* @param {string} requestEndpoint - endpoint of request
|
* @param requestEndpoint - endpoint of request
|
||||||
* @param {object} log - werelogs logger
|
* @param log - werelogs logger
|
||||||
* @return {object} - location constraint validity
|
* @return - location constraint validity
|
||||||
*/
|
*/
|
||||||
static controllingBackendParam(config, objectLocationConstraint,
|
static controllingBackendParam(
|
||||||
bucketLocationConstraint, requestEndpoint, log) {
|
config: CloudServerConfig,
|
||||||
|
objectLocationConstraint: string | undefined,
|
||||||
|
bucketLocationConstraint: string | null,
|
||||||
|
requestEndpoint: string,
|
||||||
|
log: RequestLogger,
|
||||||
|
) {
|
||||||
if (objectLocationConstraint) {
|
if (objectLocationConstraint) {
|
||||||
if (BackendInfo.isValidLocationConstraint(config,
|
if (BackendInfo.isValidLocationConstraint(config,
|
||||||
objectLocationConstraint, log)) {
|
objectLocationConstraint, log)) {
|
||||||
|
@ -175,16 +208,16 @@ class BackendInfo {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Return legacyLocationConstraint
|
* Return legacyLocationConstraint
|
||||||
* @param {object} config CloudServer config
|
* @param config CloudServer config
|
||||||
* @return {string | undefined} legacyLocationConstraint;
|
* @return legacyLocationConstraint;
|
||||||
*/
|
*/
|
||||||
static getLegacyLocationConstraint(config) {
|
static getLegacyLocationConstraint(config: CloudServerConfig) {
|
||||||
return legacyLocations.find(ll => config.locationConstraints[ll]);
|
return legacyLocations.find(ll => config.locationConstraints[ll]);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Return objectLocationConstraint
|
* Return objectLocationConstraint
|
||||||
* @return {string | undefined} objectLocationConstraint;
|
* @return objectLocationConstraint;
|
||||||
*/
|
*/
|
||||||
getObjectLocationConstraint() {
|
getObjectLocationConstraint() {
|
||||||
return this._objectLocationConstraint;
|
return this._objectLocationConstraint;
|
||||||
|
@ -192,7 +225,7 @@ class BackendInfo {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Return bucketLocationConstraint
|
* Return bucketLocationConstraint
|
||||||
* @return {string | undefined} bucketLocationConstraint;
|
* @return bucketLocationConstraint;
|
||||||
*/
|
*/
|
||||||
getBucketLocationConstraint() {
|
getBucketLocationConstraint() {
|
||||||
return this._bucketLocationConstraint;
|
return this._bucketLocationConstraint;
|
||||||
|
@ -200,7 +233,7 @@ class BackendInfo {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Return requestEndpoint
|
* Return requestEndpoint
|
||||||
* @return {string} requestEndpoint;
|
* @return requestEndpoint;
|
||||||
*/
|
*/
|
||||||
getRequestEndpoint() {
|
getRequestEndpoint() {
|
||||||
return this._requestEndpoint;
|
return this._requestEndpoint;
|
||||||
|
@ -215,9 +248,9 @@ class BackendInfo {
|
||||||
* (4) default locationConstraint for requestEndpoint if requestEndpoint
|
* (4) default locationConstraint for requestEndpoint if requestEndpoint
|
||||||
* is listed in restEndpoints in config.json
|
* is listed in restEndpoints in config.json
|
||||||
* (5) default data backend
|
* (5) default data backend
|
||||||
* @return {string} locationConstraint;
|
* @return locationConstraint;
|
||||||
*/
|
*/
|
||||||
getControllingLocationConstraint() {
|
getControllingLocationConstraint(): string {
|
||||||
const objectLC = this.getObjectLocationConstraint();
|
const objectLC = this.getObjectLocationConstraint();
|
||||||
const bucketLC = this.getBucketLocationConstraint();
|
const bucketLC = this.getBucketLocationConstraint();
|
||||||
const reqEndpoint = this.getRequestEndpoint();
|
const reqEndpoint = this.getRequestEndpoint();
|
||||||
|
@ -236,5 +269,3 @@ class BackendInfo {
|
||||||
return this._config.backends.data;
|
return this._config.backends.data;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = BackendInfo;
|
|
|
@ -0,0 +1,281 @@
|
||||||
|
export type DeleteRetentionPolicy = {
|
||||||
|
enabled: boolean;
|
||||||
|
days: number;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Helper class to ease access to the Azure specific information for
|
||||||
|
* storage accounts mapped to buckets.
|
||||||
|
*/
|
||||||
|
export default class BucketAzureInfo {
|
||||||
|
_data: {
|
||||||
|
sku: string;
|
||||||
|
accessTier: string;
|
||||||
|
kind: string;
|
||||||
|
systemKeys: string[];
|
||||||
|
tenantKeys: string[];
|
||||||
|
subscriptionId: string;
|
||||||
|
resourceGroup: string;
|
||||||
|
deleteRetentionPolicy: DeleteRetentionPolicy;
|
||||||
|
managementPolicies: any[];
|
||||||
|
httpsOnly: boolean;
|
||||||
|
tags: any;
|
||||||
|
networkACL: any[];
|
||||||
|
cname: string;
|
||||||
|
azureFilesAADIntegration: boolean;
|
||||||
|
hnsEnabled: boolean;
|
||||||
|
logging: any;
|
||||||
|
hourMetrics: any;
|
||||||
|
minuteMetrics: any;
|
||||||
|
serviceVersion: string;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* @constructor
|
||||||
|
* @param obj - Raw structure for the Azure info on storage account
|
||||||
|
* @param obj.sku - SKU name of this storage account
|
||||||
|
* @param obj.accessTier - Access Tier name of this storage account
|
||||||
|
* @param obj.kind - Kind name of this storage account
|
||||||
|
* @param obj.systemKeys - pair of shared keys for the system
|
||||||
|
* @param obj.tenantKeys - pair of shared keys for the tenant
|
||||||
|
* @param obj.subscriptionId - subscription ID the storage account
|
||||||
|
* belongs to
|
||||||
|
* @param obj.resourceGroup - Resource group name the storage
|
||||||
|
* account belongs to
|
||||||
|
* @param obj.deleteRetentionPolicy - Delete retention policy
|
||||||
|
* @param obj.deleteRetentionPolicy.enabled -
|
||||||
|
* @param obj.deleteRetentionPolicy.days -
|
||||||
|
* @param obj.managementPolicies - Management policies for this
|
||||||
|
* storage account
|
||||||
|
* @param obj.httpsOnly - Server the content of this storage
|
||||||
|
* account through HTTPS only
|
||||||
|
* @param obj.tags - Set of tags applied on this storage account
|
||||||
|
* @param obj.networkACL - Network ACL of this storage account
|
||||||
|
* @param obj.cname - CNAME of this storage account
|
||||||
|
* @param obj.azureFilesAADIntegration - whether or not Azure
|
||||||
|
* Files AAD Integration is enabled for this storage account
|
||||||
|
* @param obj.hnsEnabled - whether or not a hierarchical namespace
|
||||||
|
* is enabled for this storage account
|
||||||
|
* @param obj.logging - service properties: logging
|
||||||
|
* @param obj.hourMetrics - service properties: hourMetrics
|
||||||
|
* @param obj.minuteMetrics - service properties: minuteMetrics
|
||||||
|
* @param obj.serviceVersion - service properties: serviceVersion
|
||||||
|
*/
|
||||||
|
constructor(obj: {
|
||||||
|
sku: string;
|
||||||
|
accessTier: string;
|
||||||
|
kind: string;
|
||||||
|
systemKeys: string[];
|
||||||
|
tenantKeys: string[];
|
||||||
|
subscriptionId: string;
|
||||||
|
resourceGroup: string;
|
||||||
|
deleteRetentionPolicy: DeleteRetentionPolicy;
|
||||||
|
managementPolicies: any[];
|
||||||
|
httpsOnly: boolean;
|
||||||
|
tags: any;
|
||||||
|
networkACL: any[];
|
||||||
|
cname: string;
|
||||||
|
azureFilesAADIntegration: boolean;
|
||||||
|
hnsEnabled: boolean;
|
||||||
|
logging: any;
|
||||||
|
hourMetrics: any;
|
||||||
|
minuteMetrics: any;
|
||||||
|
serviceVersion: string;
|
||||||
|
}) {
|
||||||
|
this._data = {
|
||||||
|
sku: obj.sku,
|
||||||
|
accessTier: obj.accessTier,
|
||||||
|
kind: obj.kind,
|
||||||
|
systemKeys: obj.systemKeys,
|
||||||
|
tenantKeys: obj.tenantKeys,
|
||||||
|
subscriptionId: obj.subscriptionId,
|
||||||
|
resourceGroup: obj.resourceGroup,
|
||||||
|
deleteRetentionPolicy: obj.deleteRetentionPolicy,
|
||||||
|
managementPolicies: obj.managementPolicies,
|
||||||
|
httpsOnly: obj.httpsOnly,
|
||||||
|
tags: obj.tags,
|
||||||
|
networkACL: obj.networkACL,
|
||||||
|
cname: obj.cname,
|
||||||
|
azureFilesAADIntegration: obj.azureFilesAADIntegration,
|
||||||
|
hnsEnabled: obj.hnsEnabled,
|
||||||
|
logging: obj.logging,
|
||||||
|
hourMetrics: obj.hourMetrics,
|
||||||
|
minuteMetrics: obj.minuteMetrics,
|
||||||
|
serviceVersion: obj.serviceVersion,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
getSku() {
|
||||||
|
return this._data.sku;
|
||||||
|
}
|
||||||
|
|
||||||
|
setSku(sku: string) {
|
||||||
|
this._data.sku = sku;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
getAccessTier() {
|
||||||
|
return this._data.accessTier;
|
||||||
|
}
|
||||||
|
|
||||||
|
setAccessTier(accessTier: string) {
|
||||||
|
this._data.accessTier = accessTier;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
getKind() {
|
||||||
|
return this._data.kind;
|
||||||
|
}
|
||||||
|
|
||||||
|
setKind(kind: string) {
|
||||||
|
this._data.kind = kind;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
getSystemKeys() {
|
||||||
|
return this._data.systemKeys;
|
||||||
|
}
|
||||||
|
|
||||||
|
setSystemKeys(systemKeys: string[]) {
|
||||||
|
this._data.systemKeys = systemKeys;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
getTenantKeys() {
|
||||||
|
return this._data.tenantKeys;
|
||||||
|
}
|
||||||
|
|
||||||
|
setTenantKeys(tenantKeys: string[]) {
|
||||||
|
this._data.tenantKeys = tenantKeys;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
getSubscriptionId() {
|
||||||
|
return this._data.subscriptionId;
|
||||||
|
}
|
||||||
|
|
||||||
|
setSubscriptionId(subscriptionId: string) {
|
||||||
|
this._data.subscriptionId = subscriptionId;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
getResourceGroup() {
|
||||||
|
return this._data.resourceGroup;
|
||||||
|
}
|
||||||
|
|
||||||
|
setResourceGroup(resourceGroup: string) {
|
||||||
|
this._data.resourceGroup = resourceGroup;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
getDeleteRetentionPolicy() {
|
||||||
|
return this._data.deleteRetentionPolicy;
|
||||||
|
}
|
||||||
|
|
||||||
|
setDeleteRetentionPolicy(deleteRetentionPolicy: DeleteRetentionPolicy) {
|
||||||
|
this._data.deleteRetentionPolicy = deleteRetentionPolicy;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
getManagementPolicies() {
|
||||||
|
return this._data.managementPolicies;
|
||||||
|
}
|
||||||
|
|
||||||
|
setManagementPolicies(managementPolicies: any[]) {
|
||||||
|
this._data.managementPolicies = managementPolicies;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
getHttpsOnly() {
|
||||||
|
return this._data.httpsOnly;
|
||||||
|
}
|
||||||
|
|
||||||
|
setHttpsOnly(httpsOnly: boolean) {
|
||||||
|
this._data.httpsOnly = httpsOnly;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
getTags() {
|
||||||
|
return this._data.tags;
|
||||||
|
}
|
||||||
|
|
||||||
|
setTags(tags: any) {
|
||||||
|
this._data.tags = tags;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
getNetworkACL() {
|
||||||
|
return this._data.networkACL;
|
||||||
|
}
|
||||||
|
|
||||||
|
setNetworkACL(networkACL: any[]) {
|
||||||
|
this._data.networkACL = networkACL;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
getCname() {
|
||||||
|
return this._data.cname;
|
||||||
|
}
|
||||||
|
|
||||||
|
setCname(cname: string) {
|
||||||
|
this._data.cname = cname;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
getAzureFilesAADIntegration() {
|
||||||
|
return this._data.azureFilesAADIntegration;
|
||||||
|
}
|
||||||
|
|
||||||
|
setAzureFilesAADIntegration(azureFilesAADIntegration: boolean) {
|
||||||
|
this._data.azureFilesAADIntegration = azureFilesAADIntegration;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
getHnsEnabled() {
|
||||||
|
return this._data.hnsEnabled;
|
||||||
|
}
|
||||||
|
|
||||||
|
setHnsEnabled(hnsEnabled: boolean) {
|
||||||
|
this._data.hnsEnabled = hnsEnabled;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
getLogging() {
|
||||||
|
return this._data.logging;
|
||||||
|
}
|
||||||
|
|
||||||
|
setLogging(logging: any) {
|
||||||
|
this._data.logging = logging;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
getHourMetrics() {
|
||||||
|
return this._data.hourMetrics;
|
||||||
|
}
|
||||||
|
|
||||||
|
setHourMetrics(hourMetrics: any) {
|
||||||
|
this._data.hourMetrics = hourMetrics;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
getMinuteMetrics() {
|
||||||
|
return this._data.minuteMetrics;
|
||||||
|
}
|
||||||
|
|
||||||
|
setMinuteMetrics(minuteMetrics: any) {
|
||||||
|
this._data.minuteMetrics = minuteMetrics;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
getServiceVersion() {
|
||||||
|
return this._data.serviceVersion;
|
||||||
|
}
|
||||||
|
|
||||||
|
setServiceVersion(serviceVersion: any) {
|
||||||
|
this._data.serviceVersion = serviceVersion;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
getValue() {
|
||||||
|
return this._data;
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,680 +0,0 @@
|
||||||
const assert = require('assert');
|
|
||||||
const uuid = require('uuid/v4');
|
|
||||||
|
|
||||||
const { WebsiteConfiguration } = require('./WebsiteConfiguration');
|
|
||||||
const ReplicationConfiguration = require('./ReplicationConfiguration');
|
|
||||||
const LifecycleConfiguration = require('./LifecycleConfiguration');
|
|
||||||
const ObjectLockConfiguration = require('./ObjectLockConfiguration');
|
|
||||||
const BucketPolicy = require('./BucketPolicy');
|
|
||||||
const NotificationConfiguration = require('./NotificationConfiguration');
|
|
||||||
|
|
||||||
// WHEN UPDATING THIS NUMBER, UPDATE BucketInfoModelVersion.md CHANGELOG
|
|
||||||
// BucketInfoModelVersion.md can be found in the root of this repository
|
|
||||||
const modelVersion = 10;
|
|
||||||
|
|
||||||
class BucketInfo {
|
|
||||||
/**
|
|
||||||
* Represents all bucket information.
|
|
||||||
* @constructor
|
|
||||||
* @param {string} name - bucket name
|
|
||||||
* @param {string} owner - bucket owner's name
|
|
||||||
* @param {string} ownerDisplayName - owner's display name
|
|
||||||
* @param {object} creationDate - creation date of bucket
|
|
||||||
* @param {number} mdBucketModelVersion - bucket model version
|
|
||||||
* @param {object} [acl] - bucket ACLs (no need to copy
|
|
||||||
* ACL object since referenced object will not be used outside of
|
|
||||||
* BucketInfo instance)
|
|
||||||
* @param {boolean} transient - flag indicating whether bucket is transient
|
|
||||||
* @param {boolean} deleted - flag indicating whether attempt to delete
|
|
||||||
* @param {object} serverSideEncryption - sse information for this bucket
|
|
||||||
* @param {number} serverSideEncryption.cryptoScheme -
|
|
||||||
* cryptoScheme used
|
|
||||||
* @param {string} serverSideEncryption.algorithm -
|
|
||||||
* algorithm to use
|
|
||||||
* @param {string} serverSideEncryption.masterKeyId -
|
|
||||||
* key to get master key
|
|
||||||
* @param {string} serverSideEncryption.configuredMasterKeyId -
|
|
||||||
* custom KMS key id specified by user
|
|
||||||
* @param {boolean} serverSideEncryption.mandatory -
|
|
||||||
* true for mandatory encryption
|
|
||||||
* bucket has been made
|
|
||||||
* @param {object} versioningConfiguration - versioning configuration
|
|
||||||
* @param {string} versioningConfiguration.Status - versioning status
|
|
||||||
* @param {object} versioningConfiguration.MfaDelete - versioning mfa delete
|
|
||||||
* @param {string} locationConstraint - locationConstraint for bucket
|
|
||||||
* @param {WebsiteConfiguration} [websiteConfiguration] - website
|
|
||||||
* configuration
|
|
||||||
* @param {object[]} [cors] - collection of CORS rules to apply
|
|
||||||
* @param {string} [cors[].id] - optional ID to identify rule
|
|
||||||
* @param {string[]} cors[].allowedMethods - methods allowed for CORS request
|
|
||||||
* @param {string[]} cors[].allowedOrigins - origins allowed for CORS request
|
|
||||||
* @param {string[]} [cors[].allowedHeaders] - headers allowed in an OPTIONS
|
|
||||||
* request via the Access-Control-Request-Headers header
|
|
||||||
* @param {number} [cors[].maxAgeSeconds] - seconds browsers should cache
|
|
||||||
* OPTIONS response
|
|
||||||
* @param {string[]} [cors[].exposeHeaders] - headers expose to applications
|
|
||||||
* @param {object} [replicationConfiguration] - replication configuration
|
|
||||||
* @param {object} [lifecycleConfiguration] - lifecycle configuration
|
|
||||||
* @param {object} [bucketPolicy] - bucket policy
|
|
||||||
* @param {string} [uid] - unique identifier for the bucket, necessary
|
|
||||||
* @param {boolean} [objectLockEnabled] - true when object lock enabled
|
|
||||||
* @param {object} [objectLockConfiguration] - object lock configuration
|
|
||||||
* @param {object} [notificationConfiguration] - bucket notification configuration
|
|
||||||
* @param {object[]} [tags] - bucket tags
|
|
||||||
*/
|
|
||||||
constructor(name, owner, ownerDisplayName, creationDate,
|
|
||||||
mdBucketModelVersion, acl, transient, deleted,
|
|
||||||
serverSideEncryption, versioningConfiguration,
|
|
||||||
locationConstraint, websiteConfiguration, cors,
|
|
||||||
replicationConfiguration, lifecycleConfiguration,
|
|
||||||
bucketPolicy, uid, objectLockEnabled, objectLockConfiguration,
|
|
||||||
notificationConfiguration, tags) {
|
|
||||||
assert.strictEqual(typeof name, 'string');
|
|
||||||
assert.strictEqual(typeof owner, 'string');
|
|
||||||
assert.strictEqual(typeof ownerDisplayName, 'string');
|
|
||||||
assert.strictEqual(typeof creationDate, 'string');
|
|
||||||
if (mdBucketModelVersion) {
|
|
||||||
assert.strictEqual(typeof mdBucketModelVersion, 'number');
|
|
||||||
}
|
|
||||||
if (acl) {
|
|
||||||
assert.strictEqual(typeof acl, 'object');
|
|
||||||
assert(Array.isArray(acl.FULL_CONTROL));
|
|
||||||
assert(Array.isArray(acl.WRITE));
|
|
||||||
assert(Array.isArray(acl.WRITE_ACP));
|
|
||||||
assert(Array.isArray(acl.READ));
|
|
||||||
assert(Array.isArray(acl.READ_ACP));
|
|
||||||
}
|
|
||||||
if (serverSideEncryption) {
|
|
||||||
assert.strictEqual(typeof serverSideEncryption, 'object');
|
|
||||||
const { cryptoScheme, algorithm, masterKeyId,
|
|
||||||
configuredMasterKeyId, mandatory } = serverSideEncryption;
|
|
||||||
assert.strictEqual(typeof cryptoScheme, 'number');
|
|
||||||
assert.strictEqual(typeof algorithm, 'string');
|
|
||||||
assert.strictEqual(typeof masterKeyId, 'string');
|
|
||||||
assert.strictEqual(typeof mandatory, 'boolean');
|
|
||||||
if (configuredMasterKeyId !== undefined) {
|
|
||||||
assert.strictEqual(typeof configuredMasterKeyId, 'string');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (versioningConfiguration) {
|
|
||||||
assert.strictEqual(typeof versioningConfiguration, 'object');
|
|
||||||
const { Status, MfaDelete } = versioningConfiguration;
|
|
||||||
assert(Status === undefined ||
|
|
||||||
Status === 'Enabled' ||
|
|
||||||
Status === 'Suspended');
|
|
||||||
assert(MfaDelete === undefined ||
|
|
||||||
MfaDelete === 'Enabled' ||
|
|
||||||
MfaDelete === 'Disabled');
|
|
||||||
}
|
|
||||||
if (locationConstraint) {
|
|
||||||
assert.strictEqual(typeof locationConstraint, 'string');
|
|
||||||
}
|
|
||||||
if (websiteConfiguration) {
|
|
||||||
assert(websiteConfiguration instanceof WebsiteConfiguration);
|
|
||||||
const { indexDocument, errorDocument, redirectAllRequestsTo,
|
|
||||||
routingRules } = websiteConfiguration;
|
|
||||||
assert(indexDocument === undefined ||
|
|
||||||
typeof indexDocument === 'string');
|
|
||||||
assert(errorDocument === undefined ||
|
|
||||||
typeof errorDocument === 'string');
|
|
||||||
assert(redirectAllRequestsTo === undefined ||
|
|
||||||
typeof redirectAllRequestsTo === 'object');
|
|
||||||
assert(routingRules === undefined ||
|
|
||||||
Array.isArray(routingRules));
|
|
||||||
}
|
|
||||||
if (cors) {
|
|
||||||
assert(Array.isArray(cors));
|
|
||||||
}
|
|
||||||
if (replicationConfiguration) {
|
|
||||||
ReplicationConfiguration.validateConfig(replicationConfiguration);
|
|
||||||
}
|
|
||||||
if (lifecycleConfiguration) {
|
|
||||||
LifecycleConfiguration.validateConfig(lifecycleConfiguration);
|
|
||||||
}
|
|
||||||
if (bucketPolicy) {
|
|
||||||
BucketPolicy.validatePolicy(bucketPolicy);
|
|
||||||
}
|
|
||||||
if (uid) {
|
|
||||||
assert.strictEqual(typeof uid, 'string');
|
|
||||||
assert.strictEqual(uid.length, 36);
|
|
||||||
}
|
|
||||||
if (objectLockConfiguration) {
|
|
||||||
ObjectLockConfiguration.validateConfig(objectLockConfiguration);
|
|
||||||
}
|
|
||||||
if (notificationConfiguration) {
|
|
||||||
NotificationConfiguration.validateConfig(notificationConfiguration);
|
|
||||||
}
|
|
||||||
const aclInstance = acl || {
|
|
||||||
Canned: 'private',
|
|
||||||
FULL_CONTROL: [],
|
|
||||||
WRITE: [],
|
|
||||||
WRITE_ACP: [],
|
|
||||||
READ: [],
|
|
||||||
READ_ACP: [],
|
|
||||||
};
|
|
||||||
if (tags) {
|
|
||||||
assert(Array.isArray(tags));
|
|
||||||
}
|
|
||||||
|
|
||||||
// IF UPDATING PROPERTIES, INCREMENT MODELVERSION NUMBER ABOVE
|
|
||||||
this._acl = aclInstance;
|
|
||||||
this._name = name;
|
|
||||||
this._owner = owner;
|
|
||||||
this._ownerDisplayName = ownerDisplayName;
|
|
||||||
this._creationDate = creationDate;
|
|
||||||
this._mdBucketModelVersion = mdBucketModelVersion || 0;
|
|
||||||
this._transient = transient || false;
|
|
||||||
this._deleted = deleted || false;
|
|
||||||
this._serverSideEncryption = serverSideEncryption || null;
|
|
||||||
this._versioningConfiguration = versioningConfiguration || null;
|
|
||||||
this._locationConstraint = locationConstraint || null;
|
|
||||||
this._websiteConfiguration = websiteConfiguration || null;
|
|
||||||
this._replicationConfiguration = replicationConfiguration || null;
|
|
||||||
this._cors = cors || null;
|
|
||||||
this._lifecycleConfiguration = lifecycleConfiguration || null;
|
|
||||||
this._bucketPolicy = bucketPolicy || null;
|
|
||||||
this._uid = uid || uuid();
|
|
||||||
this._objectLockEnabled = objectLockEnabled || false;
|
|
||||||
this._objectLockConfiguration = objectLockConfiguration || null;
|
|
||||||
this._notificationConfiguration = notificationConfiguration || null;
|
|
||||||
this._tags = tags || null;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Serialize the object
|
|
||||||
* @return {string} - stringified object
|
|
||||||
*/
|
|
||||||
serialize() {
|
|
||||||
const bucketInfos = {
|
|
||||||
acl: this._acl,
|
|
||||||
name: this._name,
|
|
||||||
owner: this._owner,
|
|
||||||
ownerDisplayName: this._ownerDisplayName,
|
|
||||||
creationDate: this._creationDate,
|
|
||||||
mdBucketModelVersion: this._mdBucketModelVersion,
|
|
||||||
transient: this._transient,
|
|
||||||
deleted: this._deleted,
|
|
||||||
serverSideEncryption: this._serverSideEncryption,
|
|
||||||
versioningConfiguration: this._versioningConfiguration,
|
|
||||||
locationConstraint: this._locationConstraint,
|
|
||||||
websiteConfiguration: undefined,
|
|
||||||
cors: this._cors,
|
|
||||||
replicationConfiguration: this._replicationConfiguration,
|
|
||||||
lifecycleConfiguration: this._lifecycleConfiguration,
|
|
||||||
bucketPolicy: this._bucketPolicy,
|
|
||||||
uid: this._uid,
|
|
||||||
objectLockEnabled: this._objectLockEnabled,
|
|
||||||
objectLockConfiguration: this._objectLockConfiguration,
|
|
||||||
notificationConfiguration: this._notificationConfiguration,
|
|
||||||
tags: this._tags,
|
|
||||||
};
|
|
||||||
if (this._websiteConfiguration) {
|
|
||||||
bucketInfos.websiteConfiguration =
|
|
||||||
this._websiteConfiguration.getConfig();
|
|
||||||
}
|
|
||||||
return JSON.stringify(bucketInfos);
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* deSerialize the JSON string
|
|
||||||
* @param {string} stringBucket - the stringified bucket
|
|
||||||
* @return {object} - parsed string
|
|
||||||
*/
|
|
||||||
static deSerialize(stringBucket) {
|
|
||||||
const obj = JSON.parse(stringBucket);
|
|
||||||
const websiteConfig = obj.websiteConfiguration ?
|
|
||||||
new WebsiteConfiguration(obj.websiteConfiguration) : null;
|
|
||||||
return new BucketInfo(obj.name, obj.owner, obj.ownerDisplayName,
|
|
||||||
obj.creationDate, obj.mdBucketModelVersion, obj.acl,
|
|
||||||
obj.transient, obj.deleted, obj.serverSideEncryption,
|
|
||||||
obj.versioningConfiguration, obj.locationConstraint, websiteConfig,
|
|
||||||
obj.cors, obj.replicationConfiguration, obj.lifecycleConfiguration,
|
|
||||||
obj.bucketPolicy, obj.uid, obj.objectLockEnabled,
|
|
||||||
obj.objectLockConfiguration, obj.notificationConfiguration, obj.tags);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns the current model version for the data structure
|
|
||||||
* @return {number} - the current model version set above in the file
|
|
||||||
*/
|
|
||||||
static currentModelVersion() {
|
|
||||||
return modelVersion;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Create a BucketInfo from an object
|
|
||||||
*
|
|
||||||
* @param {object} data - object containing data
|
|
||||||
* @return {BucketInfo} Return an BucketInfo
|
|
||||||
*/
|
|
||||||
static fromObj(data) {
|
|
||||||
return new BucketInfo(data._name, data._owner, data._ownerDisplayName,
|
|
||||||
data._creationDate, data._mdBucketModelVersion, data._acl,
|
|
||||||
data._transient, data._deleted, data._serverSideEncryption,
|
|
||||||
data._versioningConfiguration, data._locationConstraint,
|
|
||||||
data._websiteConfiguration, data._cors,
|
|
||||||
data._replicationConfiguration, data._lifecycleConfiguration,
|
|
||||||
data._bucketPolicy, data._uid, data._objectLockEnabled,
|
|
||||||
data._objectLockConfiguration, data._notificationConfiguration, data._tags);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the ACLs.
|
|
||||||
* @return {object} acl
|
|
||||||
*/
|
|
||||||
getAcl() {
|
|
||||||
return this._acl;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Set the canned acl's.
|
|
||||||
* @param {string} cannedACL - canned ACL being set
|
|
||||||
* @return {BucketInfo} - bucket info instance
|
|
||||||
*/
|
|
||||||
setCannedAcl(cannedACL) {
|
|
||||||
this._acl.Canned = cannedACL;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Set a specific ACL.
|
|
||||||
* @param {string} canonicalID - id for account being given access
|
|
||||||
* @param {string} typeOfGrant - type of grant being granted
|
|
||||||
* @return {BucketInfo} - bucket info instance
|
|
||||||
*/
|
|
||||||
setSpecificAcl(canonicalID, typeOfGrant) {
|
|
||||||
this._acl[typeOfGrant].push(canonicalID);
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Set all ACLs.
|
|
||||||
* @param {object} acl - new set of ACLs
|
|
||||||
* @return {BucketInfo} - bucket info instance
|
|
||||||
*/
|
|
||||||
setFullAcl(acl) {
|
|
||||||
this._acl = acl;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Get the server side encryption information
|
|
||||||
* @return {object} serverSideEncryption
|
|
||||||
*/
|
|
||||||
getServerSideEncryption() {
|
|
||||||
return this._serverSideEncryption;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Set server side encryption information
|
|
||||||
* @param {object} serverSideEncryption - server side encryption information
|
|
||||||
* @return {BucketInfo} - bucket info instance
|
|
||||||
*/
|
|
||||||
setServerSideEncryption(serverSideEncryption) {
|
|
||||||
this._serverSideEncryption = serverSideEncryption;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Get the versioning configuration information
|
|
||||||
* @return {object} versioningConfiguration
|
|
||||||
*/
|
|
||||||
getVersioningConfiguration() {
|
|
||||||
return this._versioningConfiguration;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Set versioning configuration information
|
|
||||||
* @param {object} versioningConfiguration - versioning information
|
|
||||||
* @return {BucketInfo} - bucket info instance
|
|
||||||
*/
|
|
||||||
setVersioningConfiguration(versioningConfiguration) {
|
|
||||||
this._versioningConfiguration = versioningConfiguration;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Check that versioning is 'Enabled' on the given bucket.
|
|
||||||
* @return {boolean} - `true` if versioning is 'Enabled', otherwise `false`
|
|
||||||
*/
|
|
||||||
isVersioningEnabled() {
|
|
||||||
const versioningConfig = this.getVersioningConfiguration();
|
|
||||||
return versioningConfig ? versioningConfig.Status === 'Enabled' : false;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Get the website configuration information
|
|
||||||
* @return {object} websiteConfiguration
|
|
||||||
*/
|
|
||||||
getWebsiteConfiguration() {
|
|
||||||
return this._websiteConfiguration;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Set website configuration information
|
|
||||||
* @param {object} websiteConfiguration - configuration for bucket website
|
|
||||||
* @return {BucketInfo} - bucket info instance
|
|
||||||
*/
|
|
||||||
setWebsiteConfiguration(websiteConfiguration) {
|
|
||||||
this._websiteConfiguration = websiteConfiguration;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Set replication configuration information
|
|
||||||
* @param {object} replicationConfiguration - replication information
|
|
||||||
* @return {BucketInfo} - bucket info instance
|
|
||||||
*/
|
|
||||||
setReplicationConfiguration(replicationConfiguration) {
|
|
||||||
this._replicationConfiguration = replicationConfiguration;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Get replication configuration information
|
|
||||||
* @return {object|null} replication configuration information or `null` if
|
|
||||||
* the bucket does not have a replication configuration
|
|
||||||
*/
|
|
||||||
getReplicationConfiguration() {
|
|
||||||
return this._replicationConfiguration;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Get lifecycle configuration information
|
|
||||||
* @return {object|null} lifecycle configuration information or `null` if
|
|
||||||
* the bucket does not have a lifecycle configuration
|
|
||||||
*/
|
|
||||||
getLifecycleConfiguration() {
|
|
||||||
return this._lifecycleConfiguration;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Set lifecycle configuration information
|
|
||||||
* @param {object} lifecycleConfiguration - lifecycle information
|
|
||||||
* @return {BucketInfo} - bucket info instance
|
|
||||||
*/
|
|
||||||
setLifecycleConfiguration(lifecycleConfiguration) {
|
|
||||||
this._lifecycleConfiguration = lifecycleConfiguration;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Get bucket policy statement
|
|
||||||
* @return {object|null} bucket policy statement or `null` if the bucket
|
|
||||||
* does not have a bucket policy
|
|
||||||
*/
|
|
||||||
getBucketPolicy() {
|
|
||||||
return this._bucketPolicy;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Set bucket policy statement
|
|
||||||
* @param {object} bucketPolicy - bucket policy
|
|
||||||
* @return {BucketInfo} - bucket info instance
|
|
||||||
*/
|
|
||||||
setBucketPolicy(bucketPolicy) {
|
|
||||||
this._bucketPolicy = bucketPolicy;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Get object lock configuration
|
|
||||||
* @return {object|null} object lock configuration information or `null` if
|
|
||||||
* the bucket does not have an object lock configuration
|
|
||||||
*/
|
|
||||||
getObjectLockConfiguration() {
|
|
||||||
return this._objectLockConfiguration;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Set object lock configuration
|
|
||||||
* @param {object} objectLockConfiguration - object lock information
|
|
||||||
* @return {BucketInfo} - bucket info instance
|
|
||||||
*/
|
|
||||||
setObjectLockConfiguration(objectLockConfiguration) {
|
|
||||||
this._objectLockConfiguration = objectLockConfiguration;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Get notification configuration
|
|
||||||
* @return {object|null} notification configuration information or 'null' if
|
|
||||||
* the bucket does not have a notification configuration
|
|
||||||
*/
|
|
||||||
getNotificationConfiguration() {
|
|
||||||
return this._notificationConfiguration;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Set notification configuraiton
|
|
||||||
* @param {object} notificationConfiguration - bucket notification information
|
|
||||||
* @return {BucketInfo} - bucket info instance
|
|
||||||
*/
|
|
||||||
setNotificationConfiguration(notificationConfiguration) {
|
|
||||||
this._notificationConfiguration = notificationConfiguration;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Get cors resource
|
|
||||||
* @return {object[]} cors
|
|
||||||
*/
|
|
||||||
getCors() {
|
|
||||||
return this._cors;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Set cors resource
|
|
||||||
* @param {object[]} rules - collection of CORS rules
|
|
||||||
* @param {string} [rules.id] - optional id to identify rule
|
|
||||||
* @param {string[]} rules[].allowedMethods - methods allowed for CORS
|
|
||||||
* @param {string[]} rules[].allowedOrigins - origins allowed for CORS
|
|
||||||
* @param {string[]} [rules[].allowedHeaders] - headers allowed in an
|
|
||||||
* OPTIONS request via the Access-Control-Request-Headers header
|
|
||||||
* @param {number} [rules[].maxAgeSeconds] - seconds browsers should cache
|
|
||||||
* OPTIONS response
|
|
||||||
* @param {string[]} [rules[].exposeHeaders] - headers to expose to external
|
|
||||||
* applications
|
|
||||||
* @return {BucketInfo} - bucket info instance
|
|
||||||
*/
|
|
||||||
setCors(rules) {
|
|
||||||
this._cors = rules;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* get the serverside encryption algorithm
|
|
||||||
* @return {string} - sse algorithm used by this bucket
|
|
||||||
*/
|
|
||||||
getSseAlgorithm() {
|
|
||||||
if (!this._serverSideEncryption) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
return this._serverSideEncryption.algorithm;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* get the server side encryption master key Id
|
|
||||||
* @return {string} - sse master key Id used by this bucket
|
|
||||||
*/
|
|
||||||
getSseMasterKeyId() {
|
|
||||||
if (!this._serverSideEncryption) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
return this._serverSideEncryption.masterKeyId;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Get bucket name.
|
|
||||||
* @return {string} - bucket name
|
|
||||||
*/
|
|
||||||
getName() {
|
|
||||||
return this._name;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Set bucket name.
|
|
||||||
* @param {string} bucketName - new bucket name
|
|
||||||
* @return {BucketInfo} - bucket info instance
|
|
||||||
*/
|
|
||||||
setName(bucketName) {
|
|
||||||
this._name = bucketName;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Get bucket owner.
|
|
||||||
* @return {string} - bucket owner's canonicalID
|
|
||||||
*/
|
|
||||||
getOwner() {
|
|
||||||
return this._owner;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Set bucket owner.
|
|
||||||
* @param {string} ownerCanonicalID - bucket owner canonicalID
|
|
||||||
* @return {BucketInfo} - bucket info instance
|
|
||||||
*/
|
|
||||||
setOwner(ownerCanonicalID) {
|
|
||||||
this._owner = ownerCanonicalID;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Get bucket owner display name.
|
|
||||||
* @return {string} - bucket owner dispaly name
|
|
||||||
*/
|
|
||||||
getOwnerDisplayName() {
|
|
||||||
return this._ownerDisplayName;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Set bucket owner display name.
|
|
||||||
* @param {string} ownerDisplayName - bucket owner display name
|
|
||||||
* @return {BucketInfo} - bucket info instance
|
|
||||||
*/
|
|
||||||
setOwnerDisplayName(ownerDisplayName) {
|
|
||||||
this._ownerDisplayName = ownerDisplayName;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Get bucket creation date.
|
|
||||||
* @return {object} - bucket creation date
|
|
||||||
*/
|
|
||||||
getCreationDate() {
|
|
||||||
return this._creationDate;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Set location constraint.
|
|
||||||
* @param {string} location - bucket location constraint
|
|
||||||
* @return {BucketInfo} - bucket info instance
|
|
||||||
*/
|
|
||||||
setLocationConstraint(location) {
|
|
||||||
this._locationConstraint = location;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get location constraint.
|
|
||||||
* @return {string} - bucket location constraint
|
|
||||||
*/
|
|
||||||
getLocationConstraint() {
|
|
||||||
return this._locationConstraint;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Set Bucket model version
|
|
||||||
*
|
|
||||||
* @param {number} version - Model version
|
|
||||||
* @return {BucketInfo} - bucket info instance
|
|
||||||
*/
|
|
||||||
setMdBucketModelVersion(version) {
|
|
||||||
this._mdBucketModelVersion = version;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Get Bucket model version
|
|
||||||
*
|
|
||||||
* @return {number} Bucket model version
|
|
||||||
*/
|
|
||||||
getMdBucketModelVersion() {
|
|
||||||
return this._mdBucketModelVersion;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Add transient flag.
|
|
||||||
* @return {BucketInfo} - bucket info instance
|
|
||||||
*/
|
|
||||||
addTransientFlag() {
|
|
||||||
this._transient = true;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Remove transient flag.
|
|
||||||
* @return {BucketInfo} - bucket info instance
|
|
||||||
*/
|
|
||||||
removeTransientFlag() {
|
|
||||||
this._transient = false;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Check transient flag.
|
|
||||||
* @return {boolean} - depending on whether transient flag in place
|
|
||||||
*/
|
|
||||||
hasTransientFlag() {
|
|
||||||
return !!this._transient;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Add deleted flag.
|
|
||||||
* @return {BucketInfo} - bucket info instance
|
|
||||||
*/
|
|
||||||
addDeletedFlag() {
|
|
||||||
this._deleted = true;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Remove deleted flag.
|
|
||||||
* @return {BucketInfo} - bucket info instance
|
|
||||||
*/
|
|
||||||
removeDeletedFlag() {
|
|
||||||
this._deleted = false;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Check deleted flag.
|
|
||||||
* @return {boolean} - depending on whether deleted flag in place
|
|
||||||
*/
|
|
||||||
hasDeletedFlag() {
|
|
||||||
return !!this._deleted;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Check if the versioning mode is on.
|
|
||||||
* @return {boolean} - versioning mode status
|
|
||||||
*/
|
|
||||||
isVersioningOn() {
|
|
||||||
return this._versioningConfiguration &&
|
|
||||||
this._versioningConfiguration.Status === 'Enabled';
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Get unique id of bucket.
|
|
||||||
* @return {string} - unique id
|
|
||||||
*/
|
|
||||||
getUid() {
|
|
||||||
return this._uid;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Set unique id of bucket.
|
|
||||||
* @param {string} uid - unique identifier for the bucket
|
|
||||||
* @return {BucketInfo} - bucket info instance
|
|
||||||
*/
|
|
||||||
setUid(uid) {
|
|
||||||
this._uid = uid;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Check if object lock is enabled.
|
|
||||||
* @return {boolean} - depending on whether object lock is enabled
|
|
||||||
*/
|
|
||||||
isObjectLockEnabled() {
|
|
||||||
return !!this._objectLockEnabled;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Set the value of objectLockEnabled field.
|
|
||||||
* @param {boolean} enabled - true if object lock enabled else false.
|
|
||||||
* @return {BucketInfo} - bucket info instance
|
|
||||||
*/
|
|
||||||
setObjectLockEnabled(enabled) {
|
|
||||||
this._objectLockEnabled = enabled;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the value of bucket tags
|
|
||||||
* @return {object[]} - Array of bucket tags as {"key" : "key", "value": "value"}
|
|
||||||
*/
|
|
||||||
getTags() {
|
|
||||||
return this._tags;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Set bucket tags
|
|
||||||
* @param {object[]} tags - collection of tags
|
|
||||||
* @param {string[]} tags[].key - key of the tag
|
|
||||||
* @param {string[]} tags[].value - value of the tag
|
|
||||||
* @return {BucketInfo} - bucket info instance
|
|
||||||
*/
|
|
||||||
setTags(tags) {
|
|
||||||
this._tags = tags;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = BucketInfo;
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue