Compare commits
708 Commits
310834c237
...
0509e8ecd3
Author | SHA1 | Date |
---|---|---|
bert-e | 0509e8ecd3 | |
Ronnie Smith | 42ccd59526 | |
bert-e | 6c6ee31f34 | |
Ronnie Smith | b58b4d0773 | |
Ronnie Smith | 9a0915d40e | |
Ronnie Smith | 36d3a67a68 | |
Xin LI | 3d156a58dd | |
Xin LI | 7737ec4904 | |
Kerkesni | d18f4d10bd | |
Kerkesni | e0bc4383cd | |
bert-e | de17f221bf | |
Kerkesni | d46301b498 | |
Kerkesni | 0bb2a44912 | |
Guillaume Hivert | 2c1fb773fd | |
Xin.LI | 3528c24276 | |
Xin LI | 6d8294d0c0 | |
Xin LI | 23bfc17a26 | |
bert-e | 0f6a1f2982 | |
Nicolas Humbert | bff13f1190 | |
bert-e | c857e743c8 | |
Kerkesni | 5f8edd35e9 | |
Kerkesni | 3c4359b696 | |
Kerkesni | 8ecf1d9808 | |
Kerkesni | 74e4934654 | |
Kerkesni | eac87fc9de | |
Kerkesni | e2be4d895d | |
bert-e | c0f7ebbaa9 | |
Kerkesni | 60fcedc251 | |
Kerkesni | 10ef395501 | |
Kerkesni | d1c8e67901 | |
Kerkesni | 266aabef37 | |
Kerkesni | b63c909808 | |
Kerkesni | 02ee339214 | |
Kerkesni | 5ca7f86350 | |
Kerkesni | 50a4fd8dc1 | |
bert-e | 5de0c2a7da | |
Kerkesni | b942516dca | |
Kerkesni | 54181af522 | |
Kerkesni | 21af204956 | |
Kerkesni | 68a27be345 | |
Kerkesni | 06350ffe15 | |
Taylor McKinnon | 5da4cd88ff | |
bert-e | 6bb68ee0e3 | |
Taylor McKinnon | 9a4bae40e6 | |
bert-e | 54e9635cab | |
Vianney Rancurel | b8f803338b | |
Guillaume Hivert | 4a1215adb5 | |
Guillaume Hivert | fc8d7532c6 | |
Guillaume Hivert | 1818bfe6c8 | |
Guillaume Hivert | 5cd929ea8a | |
Guillaume Hivert | 1138ce43af | |
Guillaume Hivert | 8b4e9cc0aa | |
Guillaume Hivert | ff6ea2a6d5 | |
Guillaume Hivert | 3b3600db92 | |
bert-e | 51c5247d01 | |
Vianney Rancurel | 7813a312b5 | |
Thomas Carmet | 35a4552c0f | |
Vianney Rancurel | 0dbdff3a00 | |
bert-e | 80b91d724d | |
bert-e | 40843d4bed | |
bert-e | b3fd77d08f | |
Taylor McKinnon | ed6bc63e75 | |
Rached Ben Mustapha | c95f84e887 | |
Nicolas Humbert | 3c9ab1bb99 | |
Nicolas Humbert | 3c30adaf85 | |
bert-e | 98edeae3f2 | |
bert-e | 4f15e4f267 | |
Xin LI | 68c5b42e6f | |
Xin LI | 6933bb8422 | |
Xin LI | 7e180fcad8 | |
Naren | 41d482cf7d | |
Nicolas Humbert | 1e334924f9 | |
Naren | 49239cc76e | |
williamlardier | 8d17fcac0f | |
williamlardier | 1c3fcc5a65 | |
Ronnie Smith | f5b0f1e082 | |
williamlardier | 708aab707d | |
williamlardier | 3a1cbdeedb | |
bert-e | faf5701248 | |
Ronnie Smith | 4cbb5a5dd6 | |
bert-e | 22eca9b61c | |
Naren | 59a679831b | |
bert-e | 26da124e27 | |
bert-e | 47b121c17b | |
Ronnie Smith | c605c1e1a2 | |
bert-e | 994bd0a6be | |
Ronnie Smith | 1e2a6c387e | |
Ronnie Smith | 1348fc820f | |
Ronnie Smith | 79a363786f | |
bert-e | 86e3c02126 | |
bert-e | 8f6731aa6a | |
Artem Bakalov | ea2f8ebd01 | |
Artem Bakalov | b640bbb45e | |
Taylor McKinnon | d9fcf275ce | |
Ronnie Smith | 66b03695c3 | |
Rahul Padigela | 3575e651e3 | |
Rahul Padigela | fa19a34306 | |
Xin LI | 3ab7ef4e8d | |
Xin LI | e531d3eae1 | |
Nicolas Humbert | 9ebcc9690e | |
Nicolas Humbert | 95759509cb | |
williamlardier | 6cdae52d57 | |
williamlardier | 995cb59db4 | |
Alexander Chan | 385e34b472 | |
Jonathan Gramain | f102c5ec8c | |
bert-e | e912617f02 | |
williamlardier | 3abde0bc74 | |
bert-e | cf49c7d8bf | |
Alexander Chan | e6e49a70c9 | |
Rached Ben Mustapha | 77f971957b | |
Ronnie Smith | ed1d6c12c2 | |
williamlardier | 27f17f9535 | |
williamlardier | 4658651593 | |
Jonathan Gramain | 7af6a73b3b | |
bert-e | 8728ff5c80 | |
Ronnie Smith | 7c16652e57 | |
bert-e | 5a9d667936 | |
Rahul Padigela | 29dd069a5f | |
Rahul Padigela | f1793bfe51 | |
Rahul Padigela | b42f1d3943 | |
Naren | c27b359fba | |
Alexandre Lavigne | bb8bdbc6ea | |
Nicolas Humbert | 413f0c9433 | |
Nicolas Humbert | ab3fa2f13d | |
Naren | bfbda5d38b | |
Naren | 2e6b1791bb | |
Naren | 1f8cfecf43 | |
Alexandre Lavigne | 6a250feea9 | |
Thomas Carmet | 0a33d4b74e | |
Thomas Carmet | 9a544b9890 | |
Ronnie Smith | a2b6846e2e | |
Ronnie Smith | 3fdfc7196b | |
Ronnie Smith | f602fb9601 | |
Thomas Carmet | c237a25448 | |
Thomas Carmet | 5aaec6a4e6 | |
Thomas Carmet | 11278e7334 | |
bert-e | c0fe2efbc2 | |
Jonathan Gramain | b0633d8a13 | |
bert-e | b27caf5814 | |
bert-e | f5f6cb5692 | |
bert-e | 87ba4a7b4a | |
bert-e | 9ff605f875 | |
Thomas Carmet | 4e160db87d | |
bert-e | dc698f4d5c | |
bert-e | 8c7907f753 | |
bert-e | 395a881d92 | |
bert-e | 3d6306d2a3 | |
bert-e | 681740fbe7 | |
Alexander Chan | d381ec14d8 | |
bert-e | 0bdcd866bc | |
Jonathan Gramain | 856a1634d4 | |
Jonathan Gramain | 2921864aac | |
bert-e | 4665f3da5c | |
Jonathan Gramain | 0df0d952d2 | |
bert-e | 54eb3ede5f | |
bert-e | be4dea481d | |
Rached Ben Mustapha | d15e2d5df6 | |
Taylor McKinnon | 93503cf505 | |
bert-e | 0f63de2f05 | |
bert-e | 16a5e6a550 | |
Rached Ben Mustapha | 864d2e8a28 | |
vrancurel | 15703aafca | |
bert-e | db000bc5e1 | |
vrancurel | 06c35c15a5 | |
bert-e | 68c8189f53 | |
bert-e | 041731e6eb | |
Nicolas Humbert | d51361ce06 | |
Nicolas Humbert | 453fd8b722 | |
bert-e | 2621aa7e53 | |
bert-e | b4aeab77b9 | |
bert-e | e1a3b05330 | |
bert-e | 0151504158 | |
bert-e | 048e8b02bc | |
bert-e | 1d899efec8 | |
Taylor McKinnon | 4cb8f715e9 | |
bert-e | 580dda4d48 | |
bert-e | a17054e3a4 | |
bert-e | a8df2b7b96 | |
Taylor McKinnon | d572fc953b | |
Alexander Chan | 2a78d4f413 | |
Alexander Chan | d2c7165214 | |
bert-e | 1999a586fd | |
bert-e | a1c0dd2472 | |
bert-e | a22032f9a5 | |
bert-e | dd38e32797 | |
bert-e | 274bf80720 | |
Ronnie Smith | 25bd1f6111 | |
Jonathan Gramain | 2d41b034aa | |
Rached Ben Mustapha | bb8ec629bf | |
Rached Ben Mustapha | 4bbaa83b87 | |
bert-e | 58697f7915 | |
Ronnie Smith | bf4a6fe01b | |
alexandre merle | c703ba66e7 | |
alexandre merle | 20c77f9f85 | |
alexandre merle | edb27cc9a8 | |
alexandre merle | 79e0dfa38f | |
alexandre merle | e1118803e6 | |
bert-e | 1230e72c49 | |
bert-e | 372df634c4 | |
bert-e | 2b96888eb7 | |
bert-e | a0909885f1 | |
alexandre merle | 5d100645aa | |
bert-e | 356edf8478 | |
bert-e | 1cfb869631 | |
bert-e | 0403ca65fc | |
Rahul Padigela | 269e005198 | |
bert-e | 10627f51d1 | |
bert-e | aa5f714081 | |
Jonathan Gramain | d27c0577ee | |
Jonathan Gramain | ff539645ea | |
Jonathan Gramain | e5c3bb188a | |
Jonathan Gramain | 2461b5c2f7 | |
Jonathan Gramain | 747307cac2 | |
Jonathan Gramain | 5942d9d70c | |
bert-e | 8ed84786fc | |
bert-e | 1e40e76bb2 | |
bert-e | f4058dd6ef | |
bert-e | 04f7692bad | |
bert-e | 32752ac504 | |
vrancurel | 549f187893 | |
bert-e | 93cd582e3a | |
vrancurel | 2582108f97 | |
bert-e | b25867f9c2 | |
bert-e | 7b60166d08 | |
bert-e | 8887a67261 | |
Ronnie Smith | 437ecc57f9 | |
bert-e | 759f0ef949 | |
bert-e | 0014aa3467 | |
Dora Korpar | 1727f4bd3f | |
Dora Korpar | d71c8eac86 | |
bert-e | 7eb6304956 | |
bert-e | ce98e9d104 | |
bert-e | 36d932bbce | |
bert-e | 7f2c40cf6d | |
bert-e | 6a78af0f39 | |
bert-e | f73dc3dd68 | |
Jonathan Gramain | 8ec0611d08 | |
Jonathan Gramain | 6baca6f1e2 | |
bert-e | 78d62636c3 | |
Dora Korpar | 9b8f813d02 | |
Dora Korpar | 0f70366774 | |
bert-e | fb8cf65091 | |
Jonathan Gramain | 7792f7c603 | |
bert-e | 668d90b7d0 | |
bert-e | c1cfc59a0e | |
bert-e | f956b02387 | |
Jonathan Gramain | 86bca2502e | |
bert-e | 3aa49eed1d | |
Jonathan Gramain | a9c3b2218f | |
Jonathan Gramain | f459498e18 | |
bert-e | 55323aa7a2 | |
bert-e | a20e875908 | |
bert-e | a3a83f5ec8 | |
bert-e | 51d3312de8 | |
Ilke | 6383d14d49 | |
Jonathan Gramain | 0e4035d45b | |
Jonathan Gramain | a18285ced8 | |
Rahul Padigela | dc4e1829fc | |
bert-e | 3b438e03cd | |
bert-e | f2787ec013 | |
bert-e | 560ccef3ec | |
Dora Korpar | 3f4ed31153 | |
Jonathan Gramain | fc23f68d0f | |
bert-e | 2a4da20c0a | |
bert-e | 14c4696482 | |
bert-e | 275226278f | |
bert-e | b4b5712df7 | |
bert-e | 750c021c37 | |
bert-e | ee4d94c0fb | |
bert-e | 98f1d219a9 | |
Dora Korpar | fb363030c0 | |
Dora Korpar | 7aeb32e223 | |
bert-e | 5bdee7eb8a | |
bert-e | b8fd646097 | |
bert-e | a9d6e05c6e | |
Ilke | dc412e8953 | |
bert-e | 36b68be051 | |
bert-e | 3f19a00b32 | |
bert-e | ea8166cf7a | |
bert-e | c06f735e82 | |
bert-e | b8c4ae4203 | |
Dora Korpar | 0cf9a9cdd5 | |
bert-e | d201e572fd | |
bert-e | 400dc24281 | |
bert-e | f59cea6b34 | |
bert-e | f19feb949d | |
Jonathan Gramain | bbef1964d7 | |
bert-e | 43cd5f59b0 | |
bert-e | dd7390ade6 | |
Dora Korpar | a3739cc836 | |
bert-e | 97682f56bf | |
bert-e | ce4ca533e2 | |
bert-e | 26bff09887 | |
Pepijn Van Eeckhoudt | f6165146ec | |
Ilke | 9f580444f3 | |
Ilke | 93fe6fa94d | |
Jonathan Gramain | d9ff2c2060 | |
bert-e | e553342616 | |
Ilke | 8a9dbc4de7 | |
Jonathan Gramain | 81d05b6ea8 | |
bert-e | 44b8de565f | |
vrancurel | 3ed66c50f6 | |
bert-e | 90e1cff9f9 | |
Jonathan Gramain | 9f323b32ea | |
bert-e | dee53c8ad8 | |
bert-e | 9680071e1a | |
bert-e | 6dd3aa92a4 | |
bert-e | a9618bc0bb | |
bert-e | b6042035c0 | |
bert-e | d2fafe8ef3 | |
bert-e | fb18cba367 | |
bert-e | bab9d5dc24 | |
Alexander Chan | e531e5e711 | |
bert-e | f54d356669 | |
Jonathan Gramain | c1bb2ac058 | |
Jonathan Gramain | d76eeeea89 | |
Alexander Chan | ad58f66981 | |
bert-e | 85b5599ce2 | |
Dora Korpar | 3121d29140 | |
Jonathan Gramain | a75db3122f | |
bert-e | d994e2ae60 | |
Rached Ben Mustapha | c443793968 | |
Rached Ben Mustapha | 517a034291 | |
Rached Ben Mustapha | cc6671f37c | |
Rached Ben Mustapha | 87bb3126a3 | |
bert-e | cedd08686a | |
bert-e | 635d2fe6d9 | |
Jianqin Wang | 9557e36438 | |
bert-e | 2bb0e171d8 | |
bert-e | 68f5d3c9f2 | |
vrancurel | 71caf08c19 | |
Guillaume Gimenez | 38403b84aa | |
Jianqin Wang | 21610dd88d | |
bbuchanan9 | 7566d1f0a9 | |
bbuchanan9 | 28415a5c9b | |
Taylor McKinnon | 506a9ad37d | |
bert-e | 1c6e56e8ef | |
bbuchanan9 | 9d02f86cf5 | |
bert-e | 5c4547a3a9 | |
bbuchanan9 | 5de85713ef | |
Rahul Padigela | 68defde532 | |
Dora Korpar | 9e5d4ae95b | |
Dora Korpar | 633ce2c069 | |
Dora Korpar | 08ddc07d1c | |
Katherine Laue | bc6c9c8c36 | |
bert-e | 3dc9b958f7 | |
vrancurel | 4b5c0ff923 | |
vrancurel | 62536f66df | |
bert-e | 9032b89e6f | |
vrancurel | 9014761c70 | |
bert-e | 8d9864264d | |
Rahul Padigela | 839182292c | |
Rahul Padigela | a197b2b6a4 | |
bert-e | adf6cfc8e4 | |
bert-e | 40aa7d836f | |
bert-e | 4fa15fce2a | |
bert-e | 279f08c870 | |
anurag4dsb | 05a8475f1c | |
anurag4dsb | 8c664d9076 | |
Jianqin Wang | 77172f33f8 | |
Guillaume Gimenez | 0a0fe7f1da | |
Salim | 6d7437a776 | |
bert-e | 1a6174dadf | |
vrancurel | c57cde88bb | |
Rahul Padigela | 6e97c01edd | |
Rahul Padigela | dd6fde61bb | |
Benoit A | 3e8c43e05b | |
Nicolas Humbert | 633efcbc50 | |
Alexander Chan | d99b430ac4 | |
philipyoo | 8f71d4ff03 | |
Rahul Padigela | d0f77cee75 | |
bert-e | 4419db7b23 | |
Rahul Padigela | 3672df0fc4 | |
Dora Korpar | 9b223bea87 | |
Guillaume Gimenez | b7dfc3a9c0 | |
Dora Korpar | 787f66458f | |
Dora Korpar | 618b179d5c | |
bert-e | e6ddad1193 | |
bert-e | 6575be0050 | |
Jianqin Wang | 1f7263c320 | |
Jianqin Wang | 9da1a8e1f7 | |
Jianqin Wang | 14f8690a9a | |
Jianqin Wang | 700cb4eb48 | |
philipyoo | 7dd4dca7e5 | |
bert-e | a5d248000e | |
Taylor McKinnon | dae12b245b | |
bert-e | c0129eb0d7 | |
philipyoo | bd0d6c1942 | |
Jonathan Gramain | ed2d393e98 | |
bert-e | 886110138a | |
Jonathan Gramain | 397eecb370 | |
bert-e | 3623b992da | |
Jonathan Gramain | 78b64bebed | |
Dora Korpar | e857bb5f5a | |
Benoit A | 9c1dab1055 | |
bert-e | e18850911e | |
Jonathan Gramain | 2ff9cf866d | |
bbuchanan9 | cc6ed165dd | |
Dora Korpar | a6b5c21e5d | |
bbuchanan9 | 64426b1450 | |
bert-e | 160fe96b18 | |
Taylor McKinnon | 59290513e3 | |
Rahul Padigela | 6b9be35d8e | |
bbuchanan9 | dffcbefe9b | |
bbuchanan9 | c470cfb5b1 | |
philipyoo | abcff1b04e | |
bbuchanan9 | 6791d1b561 | |
bert-e | a8e0a30918 | |
philipyoo | 487fe8bf35 | |
bert-e | b7c84ef7d3 | |
bert-e | b55295818f | |
philipyoo | 0213bcfd25 | |
bert-e | 32b0946679 | |
JianqinWang | bef886d8ad | |
philipyoo | d44c2f123e | |
bert-e | f199d52c54 | |
bert-e | b9c419dde7 | |
bert-e | 5cf3948ba2 | |
bert-e | 226088c8fb | |
Rahul Padigela | bca10414bc | |
bert-e | 8f0cab8d91 | |
Jonathan Gramain | 40c234bb5f | |
bert-e | 26e2b5e425 | |
bert-e | df5a61cb8d | |
bert-e | b01a390c46 | |
Guillaume Gimenez | 87103f83e1 | |
bert-e | 9ba5d64cd2 | |
bert-e | f4d4c9b76e | |
bert-e | 2c149ea9b1 | |
philipyoo | 735ad74bda | |
bert-e | 1636c87556 | |
bert-e | 8e2d6d42a8 | |
bert-e | f11d6e223d | |
philipyoo | ebe2d1f24d | |
bert-e | 6a1bc69336 | |
bert-e | 0144158a37 | |
bert-e | aea19c9cc2 | |
bert-e | daaeb5637a | |
Dora Korpar | c479933448 | |
JianqinWang | f804aa9657 | |
Jonathan Gramain | ad35b9ec78 | |
Jonathan Gramain | 9fe0ba5c8c | |
bert-e | 2fe1e4da3c | |
bert-e | 6a4784417f | |
bert-e | 0ed8c750c9 | |
bert-e | 0d33e5a69f | |
bert-e | ac470f4233 | |
bert-e | 23d406dc81 | |
JianqinWang | f11ccbfefa | |
bert-e | c8c0527f65 | |
JianqinWang | d81d309420 | |
Dora Korpar | c657b4b469 | |
Dora Korpar | 65c99ff86d | |
Jonathan Gramain | 645433ed0c | |
JianqinWang | f9bb82ce43 | |
bert-e | ab4500b842 | |
bert-e | 40a802b715 | |
Giacomo Guiulfo | 84bf7bd511 | |
Giacomo Guiulfo | b5fa54ec11 | |
Bennett Buchanan | 58e9f26ae0 | |
Giacomo Guiulfo | d6fdd153aa | |
Giacomo Guiulfo | 1e05f0f54e | |
Giacomo Guiulfo | 9c66b7ceba | |
bert-e | 0555d0b41a | |
Guillaume Gimenez | 39f2a53beb | |
Bennett Buchanan | 0a75792ca6 | |
bert-e | 5225fc231d | |
Guillaume Gimenez | 30c3ce1e2b | |
Taylor McKinnon | aa157c6d13 | |
Bennett Buchanan | 699890d2d7 | |
Jonathan Gramain | ea1a7d4d87 | |
bert-e | a9297e707a | |
Bennett Buchanan | 75dccc528d | |
bert-e | 5d7cf78eda | |
Giacomo Guiulfo | 0a364fe379 | |
Rahul Padigela | 345031f2bd | |
greenkeeper[bot] | 0bc1fe1a71 | |
greenkeeper[bot] | f23e457b83 | |
greenkeeper[bot] | 09aca2dcf4 | |
greenkeeper[bot] | d304334e92 | |
greenkeeper[bot] | 7955b97810 | |
Rahul Padigela | d14cef843b | |
Dora Korpar | f2b39fb3d7 | |
Dora Korpar | 9a009746be | |
Jeremy Desanlis | 3e08bad2da | |
philipyoo | 13b156b226 | |
JianqinWang | 07f655c2f8 | |
JianqinWang | f496cec8bf | |
bert-e | 7f5413699d | |
Jonathan Gramain | d620fef517 | |
Jonathan Gramain | 8ac3cf5548 | |
Giacomo Guiulfo | ebd9a74666 | |
bert-e | a1f9bef60e | |
philipyoo | 899107913c | |
Jonathan Gramain | 18dfc6b4fa | |
Rahul Padigela | 9fe16c64fa | |
vrancurel | 3dee6e2d0b | |
vrancurel | 3545eb4d62 | |
Dora Korpar | 0a85eeb8b7 | |
Dora Korpar | 83759870f2 | |
Alexander Chan | 0d4bf3c17f | |
Alexander Chan | 0117b39dcf | |
Bennett Buchanan | 549ca1f683 | |
bert-e | e4a66343fb | |
philipyoo | a89fdde6fd | |
philipyoo | 872a2d88e5 | |
philipyoo | 0c9c462634 | |
philipyoo | a3973ac7d3 | |
bert-e | d1a8693fe5 | |
Jeremy Desanlis | 5687a48599 | |
Nicolas Humbert | 9dca871e1b | |
philipyoo | 7088812c80 | |
philipyoo | 9f742d4921 | |
bert-e | 2c31728905 | |
Bennett Buchanan | 125ccbbfa9 | |
bert-e | 40c8b37b30 | |
bert-e | 879075e4ec | |
philipyoo | 79ed68ce9f | |
bert-e | cbfacb5ec0 | |
philipyoo | 06dfdd9612 | |
philipyoo | bf95506495 | |
Alexander Chan | db743f8269 | |
Alexander Chan | a2311bb69c | |
Alexander Chan | c8f323237f | |
Rahul Padigela | 5cf55fcb68 | |
Rahul Padigela | de94a0e62e | |
Rahul Padigela | 2b13994795 | |
Rahul Padigela | 769a461178 | |
Rahul Padigela | c11fc1d9d8 | |
bert-e | b8ad86a1f1 | |
Giacomo Guiulfo | 12c4df722b | |
bert-e | f566e32322 | |
philipyoo | 6413c92fbc | |
bert-e | 29182cce05 | |
Jonathan Gramain | 9fb5b8b10d | |
vrancurel | 5631a892c6 | |
Rahul Padigela | dfcdea46fc | |
Rahul Padigela | be02e59bfe | |
Rahul Padigela | fdbeed1c4e | |
bert-e | 91fbc3fd23 | |
philipyoo | 241338bcfa | |
Rached Ben Mustapha | 6db80e9411 | |
bert-e | d701352635 | |
Alexander Chan | b291ccc03f | |
Bennett Buchanan | 0426f44dee | |
Rahul Padigela | 1b9242788a | |
Bennett Buchanan | 1a2ea2f353 | |
Bennett Buchanan | c36280a6e8 | |
bert-e | c749725410 | |
Alexander Chan | 3d06ec6230 | |
Jonathan Gramain | 159ebb4283 | |
Alexander Chan | e17333b19e | |
philipyoo | b3b22292c4 | |
bert-e | 68d27ed5bf | |
bert-e | 1e79964253 | |
philipyoo | 5f76343c2e | |
Alexander Chan | d907c9942d | |
Alexander Chan | c63b0713c0 | |
Alexander Chan | 6a9a88800a | |
Dora Korpar | 5834f15397 | |
bert-e | b50f6c4678 | |
bert-e | edeab02107 | |
David Pineau | c64cccdf55 | |
vrancurel | af2b3a4bc3 | |
philipyoo | 1e9ad08830 | |
David Pineau | 9e66fda610 | |
Rahul Padigela | 888e154f0e | |
Nicolas Humbert | 8448f909e4 | |
bert-e | 2b16e84733 | |
philipyoo | a1a6f65364 | |
bert-e | 7cf0c97d8e | |
Taylor McKinnon | 10e7b976d5 | |
vrancurel | e80ea95ad8 | |
Jeremy Desanlis | 7075318dd2 | |
bert-e | 38f68fba1a | |
vrancurel | 16f9a6f5f6 | |
bert-e | c48e4b89bd | |
Bennett Buchanan | 2a8169e936 | |
Alexander Chan | 1af67fffc7 | |
Guillaume Gimenez | e9ac11b1fe | |
bert-e | 30dcd6ef86 | |
Alexander Chan | 2ce9db4e01 | |
philipyoo | 9e234e2b41 | |
philipyoo | 83a831f512 | |
Guillaume Gimenez | 32c2a6fe99 | |
Rahul Padigela | 063361377c | |
Rahul Padigela | ea7f28c82d | |
Rahul Padigela | a9e760b32e | |
Rahul Padigela | 3b16a307b8 | |
Rahul Padigela | f8dfa378a1 | |
Jonathan Gramain | e16eadb474 | |
Rahul Padigela | 5bf7fef53c | |
philipyoo | 659aee2fc2 | |
Rahul Padigela | bde52ab89b | |
Jonathan Gramain | 0ddb4da8a9 | |
Rached Ben Mustapha | 56e280236b | |
Rached Ben Mustapha | f904f04401 | |
Rahul Padigela | db45fee9e8 | |
JianqinWang | ecc431c715 | |
JianqinWang | 6f694ae7f4 | |
Rahul Padigela | e7862d3922 | |
Jonathan Gramain | de7ebf70d7 | |
Rahul Padigela | 1425f03c1e | |
Alexander Chan | ad527911a2 | |
Rahul Padigela | 6c528688ee | |
Nicolas Humbert | e53aa2efd2 | |
Rahul Padigela | 873bc9b647 | |
Nicolas Humbert | 160b960607 | |
Rahul Padigela | 843bd1fe13 | |
Alexander Chan | 93a2a79699 | |
Rahul Padigela | ef32d5e94d | |
Alexander Chan | 45d9c3d999 | |
Rahul Padigela | a2ce46d8d0 | |
anurag4DSB | 0c0bffa2c3 | |
ironman-machine | d966c0bda9 | |
Rahul Padigela | cb86a857cc | |
Alexander Chan | 55c9441bd7 | |
David Pineau | cae55a65c8 | |
philipyoo | 114cbf5571 | |
Alexander Chan | f2bab3b3d6 | |
philipyoo | 3276d235bb | |
philipyoo | ee2aed10f3 | |
Rahul Padigela | 19bee770ea | |
Rahul Padigela | e0c5d03436 | |
Rahul Padigela | c8a7148645 | |
Rahul Padigela | 8ca5dce4fe | |
Bennett Buchanan | 599fb5709b | |
Rahul Padigela | 1161d5f75d | |
Rahul Padigela | 26b6c5d1d9 | |
Bennett Buchanan | 8fd50cd20e | |
Rahul Padigela | 1f6b5bf2bd | |
Rached Ben Mustapha | a7813daea9 | |
Rahul Padigela | 5d4eb84425 | |
Alexander Chan | 9511fff479 | |
Rahul Padigela | d70f64a6d0 | |
Alexander Chan | ee66dc811c | |
Rahul Padigela | 2710471726 | |
Dora Korpar | 9aee9f6cf0 | |
Rahul Padigela | a168fab266 | |
Dora Korpar | 92da4c90e5 | |
Rahul Padigela | a95d5ea15d | |
Salim | aad05faa12 | |
Rahul Padigela | ab230ebfe7 | |
Salim | b3103e1307 | |
Salim | f3b0091210 | |
Rahul Padigela | f633b91072 | |
Alexander Chan | 87807462dc | |
Rahul Padigela | d7f114d504 | |
Rached Ben Mustapha | 5ef168e654 | |
Rahul Padigela | 82b4055c6c | |
Rached Ben Mustapha | 91ccccfe85 | |
Rached Ben Mustapha | 696999874b | |
Rached Ben Mustapha | d2bed3bf9a | |
Rahul Padigela | ad42baa5ff | |
Rached Ben Mustapha | 6ac92b2ad2 | |
Rahul Padigela | 13dbf48867 | |
Rached Ben Mustapha | e79ad68e96 | |
Rahul Padigela | a4a5fe0db0 | |
Bennett Buchanan | f838fcc31f | |
VR | eb9dd23b14 | |
JianqinWang | edbf7ab650 | |
Rahul Padigela | e068950903 | |
Rahul Padigela | 1ceb7b264c | |
vrancurel | 5a29aaa10c | |
Rahul Padigela | 7587f7ba25 | |
Rahul Padigela | 795b145594 | |
Jeremy Desanlis | 58f027a693 | |
Rahul Padigela | e09348d658 | |
Alexander Chan | bddb90c6a1 | |
Rahul Padigela | 94efaaccc2 | |
Rahul Padigela | 463a8ebe15 | |
philipyoo | f17ce17857 | |
Rahul Padigela | 3a5250e2e9 | |
ironman-machine | 48cb7b3b05 | |
Nicolas Humbert | 84c4c147a2 | |
Rahul Padigela | 958e818655 | |
philipyoo | 91dd219c47 | |
Alexander Chan | 5f3d478edb | |
Rahul Padigela | 04d56cfdff | |
Rahul Padigela | 73dd529c29 | |
philipyoo | a9aa40c168 | |
ironman-machine | 189194a4e7 | |
JianqinWang | a9a6b2433d | |
JianqinWang | fa19fc8859 | |
JianqinWang | a269619698 | |
Rahul Padigela | da1da43597 | |
Rahul Padigela | caac4e4e7e | |
Rahul Padigela | 67250133dc | |
JianqinWang | d3f3be03ae | |
ironman-machine | 1a9f1afd2c | |
JianqinWang | 9a5afdbc5c | |
JianqinWang | 83cf54512b | |
ironman-machine | 7e3ad64456 | |
Nicolas Humbert | eba0cb6116 | |
Lauren Spiegel | fd23e82ab9 | |
Lauren Spiegel | d7cf5e8ccf | |
flavien-scality | d0f4f95f0d | |
Alexandre Merle | 0e606b1061 | |
ironman-machine | 44ead88d83 | |
vrancurel | d8e1497940 | |
ThibaultRiviere | 4193394340 | |
Thibault Riviere | 0f1b0dad01 | |
ironman-machine | 393d6edc07 | |
vrancurel | 70638eaf7a | |
Lauren Spiegel | 9d0156dfdf | |
Lauren Spiegel | 8d8028b83f | |
Lauren Spiegel | b99fe2cd8d | |
Lauren Spiegel | cc26f288be |
|
@ -39,10 +39,14 @@ jobs:
|
|||
run: yarn --silent lint -- --max-warnings 0
|
||||
- name: lint markdown
|
||||
run: yarn --silent lint_md
|
||||
- name: run unit tests
|
||||
run: yarn test
|
||||
- name: add hostname
|
||||
run: |
|
||||
sudo sh -c "echo '127.0.0.1 testrequestbucket.localhost' >> /etc/hosts"
|
||||
- name: test and coverage
|
||||
run: yarn --silent coverage
|
||||
- name: run functional tests
|
||||
run: yarn ft_test
|
||||
- uses: codecov/codecov-action@v2
|
||||
- name: run executables tests
|
||||
run: yarn install && yarn test
|
||||
working-directory: 'lib/executables/pensieveCreds/'
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
# Arsenal
|
||||
|
||||
[![codecov](https://codecov.io/gh/scality/Arsenal/branch/development/8.1/graph/badge.svg?token=X0esXhJSwb)](https://codecov.io/gh/scality/Arsenal)
|
||||
|
||||
Common utilities for the S3 project components
|
||||
|
||||
Within this repository, you will be able to find the shared libraries for the
|
||||
|
|
|
@ -85,6 +85,66 @@ Used to store the bucket lifecycle configuration info
|
|||
|
||||
### Properties Added
|
||||
|
||||
```javascript
|
||||
this._uid = uid || uuid();
|
||||
```
|
||||
|
||||
### Usage
|
||||
|
||||
Used to set a unique identifier on a bucket
|
||||
|
||||
## Model version 8
|
||||
|
||||
### Properties Added
|
||||
|
||||
```javascript
|
||||
this._readLocationConstraint = readLocationConstraint || null;
|
||||
```
|
||||
|
||||
### Usage
|
||||
|
||||
Used to store default read location of the bucket
|
||||
|
||||
## Model version 9
|
||||
|
||||
### Properties Added
|
||||
|
||||
```javascript
|
||||
this._isNFS = isNFS || null;
|
||||
```
|
||||
|
||||
### Usage
|
||||
|
||||
Used to determine whether the bucket may be accessed through NFS
|
||||
|
||||
## Model version 10
|
||||
|
||||
### Properties Added
|
||||
|
||||
```javascript
|
||||
this._ingestion = ingestionConfig || null;
|
||||
```
|
||||
|
||||
### Usage
|
||||
|
||||
Used to store the ingestion status of a bucket
|
||||
|
||||
## Model version 11
|
||||
|
||||
### Properties Added
|
||||
|
||||
```javascript
|
||||
this._azureInfo = azureInfo || null;
|
||||
```
|
||||
|
||||
### Usage
|
||||
|
||||
Used to store Azure storage account specific information
|
||||
|
||||
## Model version 12
|
||||
|
||||
### Properties Added
|
||||
|
||||
```javascript
|
||||
this._objectLockEnabled = objectLockEnabled || false;
|
||||
this._objectLockConfiguration = objectLockConfiguration || null;
|
||||
|
@ -95,7 +155,7 @@ this._objectLockConfiguration = objectLockConfiguration || null;
|
|||
Used to determine whether object lock capabilities are enabled on a bucket and
|
||||
to store the object lock configuration of the bucket
|
||||
|
||||
## Model version 8
|
||||
## Model version 13
|
||||
|
||||
### Properties Added
|
||||
|
||||
|
@ -107,7 +167,7 @@ this._notificationConfiguration = notificationConfiguration || null;
|
|||
|
||||
Used to store the bucket notification configuration info
|
||||
|
||||
## Model version 9
|
||||
## Model version 14
|
||||
|
||||
### Properties Added
|
||||
|
||||
|
@ -118,15 +178,3 @@ this._serverSideEncryption.configuredMasterKeyId = configuredMasterKeyId || unde
|
|||
### Usage
|
||||
|
||||
Used to store the users configured KMS key id
|
||||
|
||||
## Model version 10
|
||||
|
||||
### Properties Added
|
||||
|
||||
```javascript
|
||||
this._uid = uid || uuid();
|
||||
```
|
||||
|
||||
### Usage
|
||||
|
||||
Used to set a unique identifier on a bucket
|
||||
|
|
|
@ -26,7 +26,7 @@
|
|||
},
|
||||
"BucketAlreadyOwnedByYou": {
|
||||
"code": 409,
|
||||
"description": "Your previous request to create the named bucket succeeded and you already own it. You get this error in all AWS regions except US Standard, us-east-1. In us-east-1 region, you will get 200 OK, but it is no-op (if bucket exists S3 will not do anything)."
|
||||
"description": "A bucket with this name exists and is already owned by you"
|
||||
},
|
||||
"BucketNotEmpty": {
|
||||
"code": 409,
|
||||
|
@ -403,6 +403,10 @@
|
|||
"code": 409,
|
||||
"description": "The request was rejected because it attempted to create a resource that already exists."
|
||||
},
|
||||
"KeyAlreadyExists": {
|
||||
"code": 409,
|
||||
"description": "The request was rejected because it attempted to create a resource that already exists."
|
||||
},
|
||||
"ServiceFailure": {
|
||||
"code": 500,
|
||||
"description": "Server error: the request processing has failed because of an unknown error, exception or failure."
|
||||
|
@ -760,5 +764,10 @@
|
|||
"ReadOnly": {
|
||||
"description": "trying to write to read only back-end",
|
||||
"code": 403
|
||||
},
|
||||
"_comment": "----------------------- authbackend -----------------------",
|
||||
"AuthMethodNotImplemented": {
|
||||
"description": "AuthMethodNotImplemented",
|
||||
"code": 501
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,28 @@
|
|||
{
|
||||
"groups": {
|
||||
"default": {
|
||||
"packages": [
|
||||
"lib/executables/pensieveCreds/package.json",
|
||||
"package.json"
|
||||
]
|
||||
}
|
||||
},
|
||||
"branchPrefix": "improvement/greenkeeper.io/",
|
||||
"commitMessages": {
|
||||
"initialBadge": "docs(readme): add Greenkeeper badge",
|
||||
"initialDependencies": "chore(package): update dependencies",
|
||||
"initialBranches": "chore(bert-e): whitelist greenkeeper branches",
|
||||
"dependencyUpdate": "fix(package): update ${dependency} to version ${version}",
|
||||
"devDependencyUpdate": "chore(package): update ${dependency} to version ${version}",
|
||||
"dependencyPin": "fix: pin ${dependency} to ${oldVersionResolved}",
|
||||
"devDependencyPin": "chore: pin ${dependency} to ${oldVersionResolved}",
|
||||
"closes": "\n\nCloses #${number}"
|
||||
},
|
||||
"ignore": [
|
||||
"ajv",
|
||||
"eslint",
|
||||
"eslint-plugin-react",
|
||||
"eslint-config-airbnb",
|
||||
"eslint-config-scality"
|
||||
]
|
||||
}
|
|
@ -0,0 +1,202 @@
|
|||
module.exports = {
|
||||
auth: require('./lib/auth/auth'),
|
||||
constants: require('./lib/constants'),
|
||||
db: require('./lib/db'),
|
||||
errors: require('./lib/errors.js'),
|
||||
errorUtils: require('./lib/errorUtils'),
|
||||
shuffle: require('./lib/shuffle'),
|
||||
stringHash: require('./lib/stringHash'),
|
||||
ipCheck: require('./lib/ipCheck'),
|
||||
jsutil: require('./lib/jsutil'),
|
||||
https: {
|
||||
ciphers: require('./lib/https/ciphers.js'),
|
||||
dhparam: require('./lib/https/dh2048.js'),
|
||||
},
|
||||
algorithms: {
|
||||
list: require('./lib/algos/list/exportAlgos'),
|
||||
listTools: {
|
||||
DelimiterTools: require('./lib/algos/list/tools'),
|
||||
Skip: require('./lib/algos/list/skip'),
|
||||
},
|
||||
cache: {
|
||||
LRUCache: require('./lib/algos/cache/LRUCache'),
|
||||
},
|
||||
stream: {
|
||||
MergeStream: require('./lib/algos/stream/MergeStream'),
|
||||
},
|
||||
SortedSet: require('./lib/algos/set/SortedSet'),
|
||||
},
|
||||
policies: {
|
||||
evaluators: require('./lib/policyEvaluator/evaluator.js'),
|
||||
validateUserPolicy: require('./lib/policy/policyValidator')
|
||||
.validateUserPolicy,
|
||||
evaluatePrincipal: require('./lib/policyEvaluator/principal'),
|
||||
RequestContext: require('./lib/policyEvaluator/RequestContext.js'),
|
||||
requestUtils: require('./lib/policyEvaluator/requestUtils'),
|
||||
actionMaps: require('./lib/policyEvaluator/utils/actionMaps'),
|
||||
},
|
||||
Clustering: require('./lib/Clustering'),
|
||||
testing: {
|
||||
matrix: require('./lib/testing/matrix.js'),
|
||||
},
|
||||
versioning: {
|
||||
VersioningConstants: require('./lib/versioning/constants.js')
|
||||
.VersioningConstants,
|
||||
Version: require('./lib/versioning/Version.js').Version,
|
||||
VersionID: require('./lib/versioning/VersionID.js'),
|
||||
WriteGatheringManager: require('./lib/versioning/WriteGatheringManager.js'),
|
||||
WriteCache: require('./lib/versioning/WriteCache.js'),
|
||||
VersioningRequestProcessor: require('./lib/versioning/VersioningRequestProcessor.js'),
|
||||
},
|
||||
network: {
|
||||
http: {
|
||||
server: require('./lib/network/http/server'),
|
||||
utils: require('./lib/network/http/utils'),
|
||||
},
|
||||
rpc: require('./lib/network/rpc/rpc'),
|
||||
level: require('./lib/network/rpc/level-net'),
|
||||
rest: {
|
||||
RESTServer: require('./lib/network/rest/RESTServer'),
|
||||
RESTClient: require('./lib/network/rest/RESTClient'),
|
||||
},
|
||||
RoundRobin: require('./lib/network/RoundRobin'),
|
||||
probe: {
|
||||
ProbeServer: require('./lib/network/probe/ProbeServer'),
|
||||
HealthProbeServer:
|
||||
require('./lib/network/probe/HealthProbeServer.js'),
|
||||
Utils: require('./lib/network/probe/Utils.js'),
|
||||
},
|
||||
kmip: require('./lib/network/kmip'),
|
||||
kmipClient: require('./lib/network/kmip/Client'),
|
||||
},
|
||||
s3routes: {
|
||||
routes: require('./lib/s3routes/routes'),
|
||||
routesUtils: require('./lib/s3routes/routesUtils'),
|
||||
},
|
||||
s3middleware: {
|
||||
userMetadata: require('./lib/s3middleware/userMetadata'),
|
||||
convertToXml: require('./lib/s3middleware/convertToXml'),
|
||||
escapeForXml: require('./lib/s3middleware/escapeForXml'),
|
||||
objectLegalHold: require('./lib/s3middleware/objectLegalHold'),
|
||||
tagging: require('./lib/s3middleware/tagging'),
|
||||
checkDateModifiedHeaders:
|
||||
require('./lib/s3middleware/validateConditionalHeaders')
|
||||
.checkDateModifiedHeaders,
|
||||
validateConditionalHeaders:
|
||||
require('./lib/s3middleware/validateConditionalHeaders')
|
||||
.validateConditionalHeaders,
|
||||
MD5Sum: require('./lib/s3middleware/MD5Sum'),
|
||||
NullStream: require('./lib/s3middleware/nullStream'),
|
||||
objectUtils: require('./lib/s3middleware/objectUtils'),
|
||||
azureHelper: {
|
||||
mpuUtils:
|
||||
require('./lib/s3middleware/azureHelpers/mpuUtils'),
|
||||
ResultsCollector:
|
||||
require('./lib/s3middleware/azureHelpers/ResultsCollector'),
|
||||
SubStreamInterface:
|
||||
require('./lib/s3middleware/azureHelpers/SubStreamInterface'),
|
||||
},
|
||||
prepareStream: require('./lib/s3middleware/prepareStream'),
|
||||
processMpuParts: require('./lib/s3middleware/processMpuParts'),
|
||||
retention: require('./lib/s3middleware/objectRetention'),
|
||||
lifecycleHelpers: require('./lib/s3middleware/lifecycleHelpers'),
|
||||
},
|
||||
storage: {
|
||||
metadata: {
|
||||
MetadataWrapper: require('./lib/storage/metadata/MetadataWrapper'),
|
||||
bucketclient: {
|
||||
BucketClientInterface:
|
||||
require('./lib/storage/metadata/bucketclient/' +
|
||||
'BucketClientInterface'),
|
||||
LogConsumer:
|
||||
require('./lib/storage/metadata/bucketclient/LogConsumer'),
|
||||
},
|
||||
file: {
|
||||
BucketFileInterface:
|
||||
require('./lib/storage/metadata/file/BucketFileInterface'),
|
||||
MetadataFileServer:
|
||||
require('./lib/storage/metadata/file/MetadataFileServer'),
|
||||
MetadataFileClient:
|
||||
require('./lib/storage/metadata/file/MetadataFileClient'),
|
||||
},
|
||||
inMemory: {
|
||||
metastore:
|
||||
require('./lib/storage/metadata/in_memory/metastore'),
|
||||
metadata: require('./lib/storage/metadata/in_memory/metadata'),
|
||||
bucketUtilities:
|
||||
require('./lib/storage/metadata/in_memory/bucket_utilities'),
|
||||
},
|
||||
mongoclient: {
|
||||
MongoClientInterface:
|
||||
require('./lib/storage/metadata/mongoclient/' +
|
||||
'MongoClientInterface'),
|
||||
LogConsumer:
|
||||
require('./lib/storage/metadata/mongoclient/LogConsumer'),
|
||||
},
|
||||
proxy: {
|
||||
Server: require('./lib/storage/metadata/proxy/Server'),
|
||||
},
|
||||
},
|
||||
data: {
|
||||
DataWrapper: require('./lib/storage/data/DataWrapper'),
|
||||
MultipleBackendGateway:
|
||||
require('./lib/storage/data/MultipleBackendGateway'),
|
||||
parseLC: require('./lib/storage/data/LocationConstraintParser'),
|
||||
file: {
|
||||
DataFileStore:
|
||||
require('./lib/storage/data/file/DataFileStore'),
|
||||
DataFileInterface:
|
||||
require('./lib/storage/data/file/DataFileInterface'),
|
||||
},
|
||||
external: {
|
||||
AwsClient: require('./lib/storage/data/external/AwsClient'),
|
||||
AzureClient: require('./lib/storage/data/external/AzureClient'),
|
||||
GcpClient: require('./lib/storage/data/external/GcpClient'),
|
||||
GCP: require('./lib/storage/data/external/GCP/GcpService'),
|
||||
GcpUtils: require('./lib/storage/data/external/GCP/GcpUtils'),
|
||||
GcpSigner: require('./lib/storage/data/external/GCP/GcpSigner'),
|
||||
PfsClient: require('./lib/storage/data/external/PfsClient'),
|
||||
backendUtils: require('./lib/storage/data/external/utils'),
|
||||
},
|
||||
inMemory: {
|
||||
datastore: require('./lib/storage/data/in_memory/datastore'),
|
||||
},
|
||||
},
|
||||
utils: require('./lib/storage/utils'),
|
||||
},
|
||||
models: {
|
||||
BackendInfo: require('./lib/models/BackendInfo'),
|
||||
BucketInfo: require('./lib/models/BucketInfo'),
|
||||
BucketAzureInfo: require('./lib/models/BucketAzureInfo'),
|
||||
ObjectMD: require('./lib/models/ObjectMD'),
|
||||
ObjectMDLocation: require('./lib/models/ObjectMDLocation'),
|
||||
ObjectMDAzureInfo: require('./lib/models/ObjectMDAzureInfo'),
|
||||
ARN: require('./lib/models/ARN'),
|
||||
WebsiteConfiguration: require('./lib/models/WebsiteConfiguration'),
|
||||
ReplicationConfiguration:
|
||||
require('./lib/models/ReplicationConfiguration'),
|
||||
LifecycleConfiguration:
|
||||
require('./lib/models/LifecycleConfiguration'),
|
||||
LifecycleRule: require('./lib/models/LifecycleRule'),
|
||||
BucketPolicy: require('./lib/models/BucketPolicy'),
|
||||
ObjectLockConfiguration:
|
||||
require('./lib/models/ObjectLockConfiguration'),
|
||||
NotificationConfiguration:
|
||||
require('./lib/models/NotificationConfiguration'),
|
||||
},
|
||||
metrics: {
|
||||
StatsClient: require('./lib/metrics/StatsClient'),
|
||||
StatsModel: require('./lib/metrics/StatsModel'),
|
||||
RedisClient: require('./lib/metrics/RedisClient'),
|
||||
ZenkoMetrics: require('./lib/metrics/ZenkoMetrics'),
|
||||
},
|
||||
pensieve: {
|
||||
credentialUtils: require('./lib/executables/pensieveCreds/utils'),
|
||||
},
|
||||
stream: {
|
||||
readJSONStreamObject: require('./lib/stream/readJSONStreamObject'),
|
||||
},
|
||||
patches: {
|
||||
locationConstraints: require('./lib/patches/locationConstraints'),
|
||||
},
|
||||
};
|
51
index.ts
51
index.ts
|
@ -2,6 +2,7 @@ export const auth = require('./lib/auth/auth');
|
|||
export const constants = require('./lib/constants');
|
||||
export const db = require('./lib/db');
|
||||
export const errors = require('./lib/errors.js');
|
||||
export const errorUtils = require('./lib/errorUtils');
|
||||
export const shuffle = require('./lib/shuffle');
|
||||
export const stringHash = require('./lib/stringHash');
|
||||
export const ipCheck = require('./lib/ipCheck');
|
||||
|
@ -14,15 +15,10 @@ export const https = {
|
|||
};
|
||||
|
||||
export const algorithms = {
|
||||
list: {
|
||||
Basic: require('./lib/algos/list/basic').List,
|
||||
Delimiter: require('./lib/algos/list/delimiter').Delimiter,
|
||||
DelimiterVersions: require('./lib/algos/list/delimiterVersions').DelimiterVersions,
|
||||
DelimiterMaster: require('./lib/algos/list/delimiterMaster').DelimiterMaster,
|
||||
MPU: require('./lib/algos/list/MPU').MultipartUploads,
|
||||
},
|
||||
list: require('./lib/algos/list/exportAlgos'),
|
||||
listTools: {
|
||||
DelimiterTools: require('./lib/algos/list/tools'),
|
||||
Skip: require('./lib/algos/list/skip'),
|
||||
},
|
||||
cache: {
|
||||
LRUCache: require('./lib/algos/cache/LRUCache'),
|
||||
|
@ -58,6 +54,7 @@ export const versioning = {
|
|||
export const network = {
|
||||
http: {
|
||||
server: require('./lib/network/http/server'),
|
||||
utils: require('./lib/network/http/utils'),
|
||||
},
|
||||
rpc: require('./lib/network/rpc/rpc'),
|
||||
level: require('./lib/network/rpc/level-net'),
|
||||
|
@ -65,10 +62,13 @@ export const network = {
|
|||
RESTServer: require('./lib/network/rest/RESTServer'),
|
||||
RESTClient: require('./lib/network/rest/RESTClient'),
|
||||
},
|
||||
RoundRobin: require('./lib/network/RoundRobin'),
|
||||
probe: {
|
||||
ProbeServer: require('./lib/network/probe/ProbeServer'),
|
||||
HealthProbeServer:
|
||||
require('./lib/network/probe/HealthProbeServer.js'),
|
||||
Utils: require('./lib/network/probe/Utils.js'),
|
||||
},
|
||||
RoundRobin: require('./lib/network/RoundRobin'),
|
||||
kmip: require('./lib/network/kmip'),
|
||||
kmipClient: require('./lib/network/kmip/Client'),
|
||||
};
|
||||
|
@ -84,16 +84,24 @@ export const s3middleware = {
|
|||
escapeForXml: require('./lib/s3middleware/escapeForXml'),
|
||||
objectLegalHold: require('./lib/s3middleware/objectLegalHold'),
|
||||
tagging: require('./lib/s3middleware/tagging'),
|
||||
checkDateModifiedHeaders:
|
||||
require('./lib/s3middleware/validateConditionalHeaders')
|
||||
.checkDateModifiedHeaders,
|
||||
validateConditionalHeaders:
|
||||
require('./lib/s3middleware/validateConditionalHeaders').validateConditionalHeaders,
|
||||
require('./lib/s3middleware/validateConditionalHeaders')
|
||||
.validateConditionalHeaders,
|
||||
MD5Sum: require('./lib/s3middleware/MD5Sum'),
|
||||
NullStream: require('./lib/s3middleware/nullStream'),
|
||||
objectUtils: require('./lib/s3middleware/objectUtils'),
|
||||
azureHelper: {
|
||||
mpuUtils: require('./lib/s3middleware/azureHelpers/mpuUtils'),
|
||||
ResultsCollector: require('./lib/s3middleware/azureHelpers/ResultsCollector'),
|
||||
SubStreamInterface: require('./lib/s3middleware/azureHelpers/SubStreamInterface'),
|
||||
mpuUtils:
|
||||
require('./lib/s3middleware/azureHelpers/mpuUtils'),
|
||||
ResultsCollector:
|
||||
require('./lib/s3middleware/azureHelpers/ResultsCollector'),
|
||||
SubStreamInterface:
|
||||
require('./lib/s3middleware/azureHelpers/SubStreamInterface'),
|
||||
},
|
||||
prepareStream: require('./lib/s3middleware/prepareStream'),
|
||||
processMpuParts: require('./lib/s3middleware/processMpuParts'),
|
||||
retention: require('./lib/s3middleware/objectRetention'),
|
||||
lifecycleHelpers: require('./lib/s3middleware/lifecycleHelpers'),
|
||||
|
@ -164,17 +172,24 @@ export const storage = {
|
|||
};
|
||||
|
||||
export const models = {
|
||||
BackendInfo: require('./lib/models/BackendInfo'),
|
||||
BucketInfo: require('./lib/models/BucketInfo'),
|
||||
BucketAzureInfo: require('./lib/models/BucketAzureInfo'),
|
||||
ObjectMD: require('./lib/models/ObjectMD'),
|
||||
ObjectMDLocation: require('./lib/models/ObjectMDLocation'),
|
||||
ObjectMDAzureInfo: require('./lib/models/ObjectMDAzureInfo'),
|
||||
ARN: require('./lib/models/ARN'),
|
||||
WebsiteConfiguration: require('./lib/models/WebsiteConfiguration'),
|
||||
ReplicationConfiguration: require('./lib/models/ReplicationConfiguration'),
|
||||
LifecycleConfiguration: require('./lib/models/LifecycleConfiguration'),
|
||||
ReplicationConfiguration:
|
||||
require('./lib/models/ReplicationConfiguration'),
|
||||
LifecycleConfiguration:
|
||||
require('./lib/models/LifecycleConfiguration'),
|
||||
LifecycleRule: require('./lib/models/LifecycleRule'),
|
||||
BucketPolicy: require('./lib/models/BucketPolicy'),
|
||||
ObjectLockConfiguration: require('./lib/models/ObjectLockConfiguration'),
|
||||
NotificationConfiguration: require('./lib/models/NotificationConfiguration'),
|
||||
ObjectLockConfiguration:
|
||||
require('./lib/models/ObjectLockConfiguration'),
|
||||
NotificationConfiguration:
|
||||
require('./lib/models/NotificationConfiguration'),
|
||||
};
|
||||
|
||||
export const metrics = {
|
||||
|
@ -191,3 +206,7 @@ export const pensieve = {
|
|||
export const stream = {
|
||||
readJSONStreamObject: require('./lib/stream/readJSONStreamObject'),
|
||||
};
|
||||
|
||||
export const patches = {
|
||||
locationConstraints: require('./lib/patches/locationConstraints'),
|
||||
};
|
||||
|
|
|
@ -91,7 +91,7 @@ class Vault {
|
|||
requestContext: serializedRCsArr,
|
||||
},
|
||||
(err, userInfo) => vaultSignatureCb(err, userInfo,
|
||||
params.log, callback)
|
||||
params.log, callback),
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -146,7 +146,7 @@ class Vault {
|
|||
requestContext: serializedRCs,
|
||||
},
|
||||
(err, userInfo) => vaultSignatureCb(err, userInfo,
|
||||
params.log, callback, streamingV4Params)
|
||||
params.log, callback, streamingV4Params),
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -10,11 +10,13 @@ const constants = require('../constants');
|
|||
const constructStringToSignV2 = require('./v2/constructStringToSign');
|
||||
const constructStringToSignV4 = require('./v4/constructStringToSign');
|
||||
const convertUTCtoISO8601 = require('./v4/timeUtils').convertUTCtoISO8601;
|
||||
const vaultUtilities = require('./in_memory/vaultUtilities');
|
||||
const backend = require('./in_memory/Backend');
|
||||
const validateAuthConfig = require('./in_memory/validateAuthConfig');
|
||||
const AuthLoader = require('./in_memory/AuthLoader');
|
||||
const vaultUtilities = require('./backends/in_memory/vaultUtilities');
|
||||
const inMemoryBackend = require('./backends/in_memory/Backend');
|
||||
const validateAuthConfig = require('./backends/in_memory/validateAuthConfig');
|
||||
const AuthLoader = require('./backends/in_memory/AuthLoader');
|
||||
const Vault = require('./Vault');
|
||||
const baseBackend = require('./backends/base');
|
||||
const chainBackend = require('./backends/ChainBackend');
|
||||
|
||||
let vault = null;
|
||||
const auth = {};
|
||||
|
@ -192,7 +194,7 @@ function generateV4Headers(request, data, accessKey, secretKeyValue,
|
|||
.filter(headerName =>
|
||||
headerName.startsWith('x-amz-')
|
||||
|| headerName.startsWith('x-scal-')
|
||||
|| headerName === 'host'
|
||||
|| headerName === 'host',
|
||||
).sort().join(';');
|
||||
const params = { request, signedHeaders, payloadChecksum,
|
||||
credentialScope, timestamp, query: data,
|
||||
|
@ -222,10 +224,14 @@ module.exports = {
|
|||
constructStringToSignV2,
|
||||
},
|
||||
inMemory: {
|
||||
backend,
|
||||
backend: inMemoryBackend,
|
||||
validateAuthConfig,
|
||||
AuthLoader,
|
||||
},
|
||||
backends: {
|
||||
baseBackend,
|
||||
chainBackend,
|
||||
},
|
||||
AuthInfo,
|
||||
Vault,
|
||||
};
|
||||
|
|
|
@ -0,0 +1,189 @@
|
|||
'use strict'; // eslint-disable-line strict
|
||||
|
||||
const assert = require('assert');
|
||||
const async = require('async');
|
||||
|
||||
const errors = require('../../errors');
|
||||
const BaseBackend = require('./base');
|
||||
|
||||
/**
|
||||
* Class that provides an authentication backend that will verify signatures
|
||||
* and retrieve emails and canonical ids associated with an account using a
|
||||
* given list of authentication backends and vault clients.
|
||||
*
|
||||
* @class ChainBackend
|
||||
*/
|
||||
class ChainBackend extends BaseBackend {
|
||||
/**
|
||||
* @constructor
|
||||
* @param {string} service - service id
|
||||
* @param {object[]} clients - list of authentication backends or vault clients
|
||||
*/
|
||||
constructor(service, clients) {
|
||||
super(service);
|
||||
|
||||
assert(Array.isArray(clients) && clients.length > 0, 'invalid client list');
|
||||
assert(clients.every(client =>
|
||||
typeof client.verifySignatureV4 === 'function' &&
|
||||
typeof client.verifySignatureV2 === 'function' &&
|
||||
typeof client.getCanonicalIds === 'function' &&
|
||||
typeof client.getEmailAddresses === 'function' &&
|
||||
typeof client.checkPolicies === 'function' &&
|
||||
typeof client.healthcheck === 'function',
|
||||
), 'invalid client: missing required auth backend methods');
|
||||
this._clients = clients;
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
* try task against each client for one to be successful
|
||||
*/
|
||||
_tryEachClient(task, cb) {
|
||||
async.tryEach(this._clients.map(client => done => task(client, done)), cb);
|
||||
}
|
||||
|
||||
/*
|
||||
* apply task to all clients
|
||||
*/
|
||||
_forEachClient(task, cb) {
|
||||
async.map(this._clients, task, cb);
|
||||
}
|
||||
|
||||
verifySignatureV2(stringToSign, signatureFromRequest, accessKey, options, callback) {
|
||||
this._tryEachClient((client, done) => client.verifySignatureV2(
|
||||
stringToSign,
|
||||
signatureFromRequest,
|
||||
accessKey,
|
||||
options,
|
||||
done,
|
||||
), callback);
|
||||
}
|
||||
|
||||
verifySignatureV4(stringToSign, signatureFromRequest, accessKey, region, scopeDate, options, callback) {
|
||||
this._tryEachClient((client, done) => client.verifySignatureV4(
|
||||
stringToSign,
|
||||
signatureFromRequest,
|
||||
accessKey,
|
||||
region,
|
||||
scopeDate,
|
||||
options,
|
||||
done,
|
||||
), callback);
|
||||
}
|
||||
|
||||
static _mergeObjects(objectResponses) {
|
||||
return objectResponses.reduce(
|
||||
(retObj, resObj) => Object.assign(retObj, resObj.message.body),
|
||||
{});
|
||||
}
|
||||
|
||||
getCanonicalIds(emailAddresses, options, callback) {
|
||||
this._forEachClient(
|
||||
(client, done) => client.getCanonicalIds(emailAddresses, options, done),
|
||||
(err, res) => {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
// TODO: atm naive merge, better handling of conflicting email results
|
||||
return callback(null, {
|
||||
message: {
|
||||
body: ChainBackend._mergeObjects(res),
|
||||
},
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
getEmailAddresses(canonicalIDs, options, callback) {
|
||||
this._forEachClient(
|
||||
(client, done) => client.getEmailAddresses(canonicalIDs, options, done),
|
||||
(err, res) => {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
return callback(null, {
|
||||
message: {
|
||||
body: ChainBackend._mergeObjects(res),
|
||||
},
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/*
|
||||
* merge policy responses into a single message
|
||||
*/
|
||||
static _mergePolicies(policyResponses) {
|
||||
const policyMap = {};
|
||||
|
||||
policyResponses.forEach(resp => {
|
||||
if (!resp.message || !Array.isArray(resp.message.body)) {
|
||||
return;
|
||||
}
|
||||
|
||||
resp.message.body.forEach(policy => {
|
||||
const key = (policy.arn || '') + (policy.versionId || '');
|
||||
if (!policyMap[key] || !policyMap[key].isAllowed) {
|
||||
policyMap[key] = policy;
|
||||
}
|
||||
// else is duplicate policy
|
||||
});
|
||||
});
|
||||
|
||||
return Object.keys(policyMap).map(key => {
|
||||
const policyRes = { isAllowed: policyMap[key].isAllowed };
|
||||
if (policyMap[key].arn !== '') {
|
||||
policyRes.arn = policyMap[key].arn;
|
||||
}
|
||||
if (policyMap[key].versionId) {
|
||||
policyRes.versionId = policyMap[key].versionId;
|
||||
}
|
||||
return policyRes;
|
||||
});
|
||||
}
|
||||
|
||||
/*
|
||||
response format:
|
||||
{ message: {
|
||||
body: [{}],
|
||||
code: number,
|
||||
message: string,
|
||||
} }
|
||||
*/
|
||||
checkPolicies(requestContextParams, userArn, options, callback) {
|
||||
this._forEachClient((client, done) => client.checkPolicies(
|
||||
requestContextParams,
|
||||
userArn,
|
||||
options,
|
||||
done,
|
||||
), (err, res) => {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
return callback(null, {
|
||||
message: {
|
||||
body: ChainBackend._mergePolicies(res),
|
||||
},
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
healthcheck(reqUid, callback) {
|
||||
this._forEachClient((client, done) =>
|
||||
client.healthcheck(reqUid, (err, res) => done(null, {
|
||||
error: !!err ? err : null,
|
||||
status: res,
|
||||
}),
|
||||
), (err, res) => {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
const isError = res.some(results => !!results.error);
|
||||
if (isError) {
|
||||
return callback(errors.InternalError, res);
|
||||
}
|
||||
return callback(null, res);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = ChainBackend;
|
|
@ -0,0 +1,86 @@
|
|||
'use strict'; // eslint-disable-line strict
|
||||
|
||||
const errors = require('../../errors');
|
||||
|
||||
/**
|
||||
* Base backend class
|
||||
*
|
||||
* @class BaseBackend
|
||||
*/
|
||||
class BaseBackend {
|
||||
/**
|
||||
* @constructor
|
||||
* @param {string} service - service identifer for construction arn
|
||||
*/
|
||||
constructor(service) {
|
||||
this.service = service;
|
||||
}
|
||||
|
||||
/** verifySignatureV2
|
||||
* @param {string} stringToSign - string to sign built per AWS rules
|
||||
* @param {string} signatureFromRequest - signature sent with request
|
||||
* @param {string} accessKey - account accessKey
|
||||
* @param {object} options - contains algorithm (SHA1 or SHA256)
|
||||
* @param {function} callback - callback with either error or user info
|
||||
* @return {function} calls callback
|
||||
*/
|
||||
verifySignatureV2(stringToSign, signatureFromRequest,
|
||||
accessKey, options, callback) {
|
||||
return callback(errors.AuthMethodNotImplemented);
|
||||
}
|
||||
|
||||
|
||||
/** verifySignatureV4
|
||||
* @param {string} stringToSign - string to sign built per AWS rules
|
||||
* @param {string} signatureFromRequest - signature sent with request
|
||||
* @param {string} accessKey - account accessKey
|
||||
* @param {string} region - region specified in request credential
|
||||
* @param {string} scopeDate - date specified in request credential
|
||||
* @param {object} options - options to send to Vault
|
||||
* (just contains reqUid for logging in Vault)
|
||||
* @param {function} callback - callback with either error or user info
|
||||
* @return {function} calls callback
|
||||
*/
|
||||
verifySignatureV4(stringToSign, signatureFromRequest, accessKey,
|
||||
region, scopeDate, options, callback) {
|
||||
return callback(errors.AuthMethodNotImplemented);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets canonical ID's for a list of accounts
|
||||
* based on email associated with account
|
||||
* @param {array} emails - list of email addresses
|
||||
* @param {object} options - to send log id to vault
|
||||
* @param {function} callback - callback to calling function
|
||||
* @returns {function} callback with either error or
|
||||
* object with email addresses as keys and canonical IDs
|
||||
* as values
|
||||
*/
|
||||
getCanonicalIds(emails, options, callback) {
|
||||
return callback(errors.AuthMethodNotImplemented);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets email addresses (referred to as diplay names for getACL's)
|
||||
* for a list of accounts based on canonical IDs associated with account
|
||||
* @param {array} canonicalIDs - list of canonicalIDs
|
||||
* @param {object} options - to send log id to vault
|
||||
* @param {function} callback - callback to calling function
|
||||
* @returns {function} callback with either error or
|
||||
* an object from Vault containing account canonicalID
|
||||
* as each object key and an email address as the value (or "NotFound")
|
||||
*/
|
||||
getEmailAddresses(canonicalIDs, options, callback) {
|
||||
return callback(errors.AuthMethodNotImplemented);
|
||||
}
|
||||
|
||||
checkPolicies(requestContextParams, userArn, options, callback) {
|
||||
return callback(null, { message: { body: [] } });
|
||||
}
|
||||
|
||||
healthcheck(reqUid, callback) {
|
||||
return callback(null, { code: 200, message: 'OK' });
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = BaseBackend;
|
|
@ -3,7 +3,7 @@ const glob = require('simple-glob');
|
|||
const joi = require('@hapi/joi');
|
||||
const werelogs = require('werelogs');
|
||||
|
||||
const ARN = require('../../models/ARN');
|
||||
const ARN = require('../../../models/ARN');
|
||||
|
||||
/**
|
||||
* Load authentication information from files or pre-loaded account
|
|
@ -2,10 +2,11 @@
|
|||
|
||||
const crypto = require('crypto');
|
||||
|
||||
const errors = require('../../errors');
|
||||
const errors = require('../../../errors');
|
||||
const calculateSigningKey = require('./vaultUtilities').calculateSigningKey;
|
||||
const hashSignature = require('./vaultUtilities').hashSignature;
|
||||
const Indexer = require('./Indexer');
|
||||
const BaseBackend = require('../base');
|
||||
|
||||
function _formatResponse(userInfoToSend) {
|
||||
return {
|
||||
|
@ -19,9 +20,9 @@ function _formatResponse(userInfoToSend) {
|
|||
* Class that provides a memory backend for verifying signatures and getting
|
||||
* emails and canonical ids associated with an account.
|
||||
*
|
||||
* @class Backend
|
||||
* @class InMemoryBackend
|
||||
*/
|
||||
class Backend {
|
||||
class InMemoryBackend extends BaseBackend {
|
||||
/**
|
||||
* @constructor
|
||||
* @param {string} service - service identifer for construction arn
|
||||
|
@ -30,19 +31,11 @@ class Backend {
|
|||
* back and returns it in an object
|
||||
*/
|
||||
constructor(service, indexer, formatter) {
|
||||
this.service = service;
|
||||
super(service);
|
||||
this.indexer = indexer;
|
||||
this.formatResponse = formatter;
|
||||
}
|
||||
|
||||
/** verifySignatureV2
|
||||
* @param {string} stringToSign - string to sign built per AWS rules
|
||||
* @param {string} signatureFromRequest - signature sent with request
|
||||
* @param {string} accessKey - account accessKey
|
||||
* @param {object} options - contains algorithm (SHA1 or SHA256)
|
||||
* @param {function} callback - callback with either error or user info
|
||||
* @return {function} calls callback
|
||||
*/
|
||||
verifySignatureV2(stringToSign, signatureFromRequest,
|
||||
accessKey, options, callback) {
|
||||
const entity = this.indexer.getEntityByKey(accessKey);
|
||||
|
@ -65,18 +58,6 @@ class Backend {
|
|||
return callback(null, vaultReturnObject);
|
||||
}
|
||||
|
||||
|
||||
/** verifySignatureV4
|
||||
* @param {string} stringToSign - string to sign built per AWS rules
|
||||
* @param {string} signatureFromRequest - signature sent with request
|
||||
* @param {string} accessKey - account accessKey
|
||||
* @param {string} region - region specified in request credential
|
||||
* @param {string} scopeDate - date specified in request credential
|
||||
* @param {object} options - options to send to Vault
|
||||
* (just contains reqUid for logging in Vault)
|
||||
* @param {function} callback - callback with either error or user info
|
||||
* @return {function} calls callback
|
||||
*/
|
||||
verifySignatureV4(stringToSign, signatureFromRequest, accessKey,
|
||||
region, scopeDate, options, callback) {
|
||||
const entity = this.indexer.getEntityByKey(accessKey);
|
||||
|
@ -100,16 +81,6 @@ class Backend {
|
|||
return callback(null, vaultReturnObject);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets canonical ID's for a list of accounts
|
||||
* based on email associated with account
|
||||
* @param {array} emails - list of email addresses
|
||||
* @param {object} log - log object
|
||||
* @param {function} cb - callback to calling function
|
||||
* @returns {function} callback with either error or
|
||||
* object with email addresses as keys and canonical IDs
|
||||
* as values
|
||||
*/
|
||||
getCanonicalIds(emails, log, cb) {
|
||||
const results = {};
|
||||
emails.forEach(email => {
|
||||
|
@ -130,16 +101,6 @@ class Backend {
|
|||
return cb(null, vaultReturnObject);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets email addresses (referred to as diplay names for getACL's)
|
||||
* for a list of accounts based on canonical IDs associated with account
|
||||
* @param {array} canonicalIDs - list of canonicalIDs
|
||||
* @param {object} options - to send log id to vault
|
||||
* @param {function} cb - callback to calling function
|
||||
* @returns {function} callback with either error or
|
||||
* an object from Vault containing account canonicalID
|
||||
* as each object key and an email address as the value (or "NotFound")
|
||||
*/
|
||||
getEmailAddresses(canonicalIDs, options, cb) {
|
||||
const results = {};
|
||||
canonicalIDs.forEach(canonicalId => {
|
||||
|
@ -188,7 +149,7 @@ class Backend {
|
|||
}
|
||||
|
||||
|
||||
class S3AuthBackend extends Backend {
|
||||
class S3AuthBackend extends InMemoryBackend {
|
||||
/**
|
||||
* @constructor
|
||||
* @param {object} authdata - the authentication config file's data
|
|
@ -43,7 +43,7 @@ function awsURIencode(input, encodeSlash, noEncodeStar) {
|
|||
return encoded;
|
||||
}
|
||||
for (let i = 0; i < input.length; i++) {
|
||||
const ch = input.charAt(i);
|
||||
let ch = input.charAt(i);
|
||||
if ((ch >= 'A' && ch <= 'Z') ||
|
||||
(ch >= 'a' && ch <= 'z') ||
|
||||
(ch >= '0' && ch <= '9') ||
|
||||
|
@ -57,6 +57,20 @@ function awsURIencode(input, encodeSlash, noEncodeStar) {
|
|||
} else if (ch === '*') {
|
||||
encoded = encoded.concat(noEncodeStar ? '*' : '%2A');
|
||||
} else {
|
||||
if (ch >= '\uD800' && ch <= '\uDBFF') {
|
||||
// If this character is a high surrogate peek the next character
|
||||
// and join it with this one if the next character is a low
|
||||
// surrogate.
|
||||
// Otherwise the encoded URI will contain the two surrogates as
|
||||
// two distinct UTF-8 sequences which is not valid UTF-8.
|
||||
if (i + 1 < input.length) {
|
||||
const ch2 = input.charAt(i + 1);
|
||||
if (ch2 >= '\uDC00' && ch2 <= '\uDFFF') {
|
||||
i++;
|
||||
ch += ch2;
|
||||
}
|
||||
}
|
||||
}
|
||||
encoded = encoded.concat(_toHexUTF8(ch));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -127,6 +127,17 @@ function check(request, log, data, awsService) {
|
|||
return { err: errors.RequestTimeTooSkewed };
|
||||
}
|
||||
|
||||
let proxyPath = null;
|
||||
if (request.headers.proxy_path) {
|
||||
try {
|
||||
proxyPath = decodeURIComponent(request.headers.proxy_path);
|
||||
} catch (err) {
|
||||
log.debug('invalid proxy_path header', { proxyPath, err });
|
||||
return { err: errors.InvalidArgument.customizeDescription(
|
||||
'invalid proxy_path header') };
|
||||
}
|
||||
}
|
||||
|
||||
const stringToSign = constructStringToSign({
|
||||
log,
|
||||
request,
|
||||
|
@ -136,6 +147,7 @@ function check(request, log, data, awsService) {
|
|||
timestamp,
|
||||
payloadChecksum,
|
||||
awsService: service,
|
||||
proxyPath,
|
||||
});
|
||||
log.trace('constructed stringToSign', { stringToSign });
|
||||
if (stringToSign instanceof Error) {
|
||||
|
|
|
@ -62,6 +62,17 @@ function check(request, log, data) {
|
|||
return { err: errors.RequestTimeTooSkewed };
|
||||
}
|
||||
|
||||
let proxyPath = null;
|
||||
if (request.headers.proxy_path) {
|
||||
try {
|
||||
proxyPath = decodeURIComponent(request.headers.proxy_path);
|
||||
} catch (err) {
|
||||
log.debug('invalid proxy_path header', { proxyPath });
|
||||
return { err: errors.InvalidArgument.customizeDescription(
|
||||
'invalid proxy_path header') };
|
||||
}
|
||||
}
|
||||
|
||||
// In query v4 auth, the canonical request needs
|
||||
// to include the query params OTHER THAN
|
||||
// the signature so create a
|
||||
|
@ -87,6 +98,7 @@ function check(request, log, data) {
|
|||
credentialScope:
|
||||
`${scopeDate}/${region}/${service}/${requestType}`,
|
||||
awsService: service,
|
||||
proxyPath,
|
||||
});
|
||||
if (stringToSign instanceof Error) {
|
||||
return { err: stringToSign };
|
||||
|
|
|
@ -1,20 +1,21 @@
|
|||
'use strict'; // eslint-disable-line strict
|
||||
const crypto = require('crypto');
|
||||
|
||||
// The min value here is to manage further backward compat if we
|
||||
// need it
|
||||
const iamSecurityTokenSizeMin = 128;
|
||||
const iamSecurityTokenSizeMax = 128;
|
||||
// Security token is an hex string (no real format from amazon)
|
||||
const iamSecurityTokenPattern =
|
||||
new RegExp(`^[a-f0-9]{${iamSecurityTokenSizeMin},` +
|
||||
`${iamSecurityTokenSizeMax}}$`);
|
||||
// Default value
|
||||
const vaultGeneratedIamSecurityTokenSizeMin = 128;
|
||||
// Safe to assume that a typical token size is less than 8192 bytes
|
||||
const vaultGeneratedIamSecurityTokenSizeMax = 8192;
|
||||
// Base-64
|
||||
const vaultGeneratedIamSecurityTokenPattern = /^[A-Za-z0-9/+=]*$/;
|
||||
|
||||
module.exports = {
|
||||
// info about the iam security token
|
||||
iamSecurityToken: {
|
||||
min: iamSecurityTokenSizeMin,
|
||||
max: iamSecurityTokenSizeMax,
|
||||
pattern: iamSecurityTokenPattern,
|
||||
min: vaultGeneratedIamSecurityTokenSizeMin,
|
||||
max: vaultGeneratedIamSecurityTokenSizeMax,
|
||||
pattern: vaultGeneratedIamSecurityTokenPattern,
|
||||
},
|
||||
// PublicId is used as the canonicalID for a request that contains
|
||||
// no authentication information. Requestor can access
|
||||
|
@ -23,6 +24,7 @@ module.exports = {
|
|||
zenkoServiceAccount: 'http://acs.zenko.io/accounts/service',
|
||||
metadataFileNamespace: '/MDFile',
|
||||
dataFileURL: '/DataFile',
|
||||
passthroughFileURL: '/PassthroughFile',
|
||||
// AWS states max size for user-defined metadata
|
||||
// (x-amz-meta- headers) is 2 KB:
|
||||
// http://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectPUT.html
|
||||
|
@ -32,7 +34,10 @@ module.exports = {
|
|||
emptyFileMd5: 'd41d8cd98f00b204e9800998ecf8427e',
|
||||
// Version 2 changes the format of the data location property
|
||||
// Version 3 adds the dataStoreName attribute
|
||||
mdModelVersion: 3,
|
||||
// Version 4 add the Creation-Time and Content-Language attributes,
|
||||
// and add support for x-ms-meta-* headers in UserMetadata
|
||||
// Version 5 adds the azureInfo structure
|
||||
mdModelVersion: 5,
|
||||
/*
|
||||
* Splitter is used to build the object name for the overview of a
|
||||
* multipart upload and to build the object names for each part of a
|
||||
|
@ -72,9 +77,44 @@ module.exports = {
|
|||
permittedCapitalizedBuckets: {
|
||||
METADATA: true,
|
||||
},
|
||||
// Setting a lower object key limit to account for:
|
||||
// - Mongo key limit of 1012 bytes
|
||||
// - Version ID in Mongo Key if versioned of 33
|
||||
// - Max bucket name length if bucket match false of 63
|
||||
// - Extra prefix slash for bucket prefix if bucket match of 1
|
||||
objectKeyByteLimit: 915,
|
||||
/* delimiter for location-constraint. The location constraint will be able
|
||||
* to include the ingestion flag
|
||||
*/
|
||||
zenkoSeparator: ':',
|
||||
/* eslint-disable camelcase */
|
||||
externalBackends: { aws_s3: true, azure: true, gcp: true, pfs: true },
|
||||
/* eslint-enable camelcase */
|
||||
replicationBackends: { aws_s3: true, azure: true, gcp: true },
|
||||
|
||||
// hex digest of sha256 hash of empty string:
|
||||
emptyStringHash: crypto.createHash('sha256')
|
||||
.update('', 'binary').digest('hex'),
|
||||
mpuMDStoredExternallyBackend: { aws_s3: true, gcp: true },
|
||||
// AWS sets a minimum size limit for parts except for the last part.
|
||||
// http://docs.aws.amazon.com/AmazonS3/latest/API/mpUploadComplete.html
|
||||
minimumAllowedPartSize: 5242880,
|
||||
gcpMaximumAllowedPartCount: 1024,
|
||||
// GCP Object Tagging Prefix
|
||||
gcpTaggingPrefix: 'aws-tag-',
|
||||
productName: 'APN/1.0 Scality/1.0 Scality CloudServer for Zenko',
|
||||
legacyLocations: ['sproxyd', 'legacy'],
|
||||
// healthcheck default call from nginx is every 2 seconds
|
||||
// for external backends, don't call unless at least 1 minute
|
||||
// (60,000 milliseconds) since last call
|
||||
externalBackendHealthCheckInterval: 60000,
|
||||
// some of the available data backends (if called directly rather
|
||||
// than through the multiple backend gateway) need a key provided
|
||||
// as a string as first parameter of the get/delete methods.
|
||||
clientsRequireStringKey: { sproxyd: true, cdmi: true },
|
||||
hasCopyPartBackends: { aws_s3: true, gcp: true },
|
||||
versioningNotImplBackends: { azure: true, gcp: true },
|
||||
// user metadata applied on zenko-created objects
|
||||
zenkoIDHeader: 'x-amz-meta-zenko-instance-id',
|
||||
// Default expiration value of the S3 pre-signed URL duration
|
||||
// 604800 seconds (seven days).
|
||||
defaultPreSignedURLExpiry: 7 * 24 * 60 * 60,
|
||||
|
@ -91,10 +131,6 @@ module.exports = {
|
|||
's3:ObjectRemoved:DeleteMarkerCreated',
|
||||
]),
|
||||
notificationArnPrefix: 'arn:scality:bucketnotif',
|
||||
// some of the available data backends (if called directly rather
|
||||
// than through the multiple backend gateway) need a key provided
|
||||
// as a string as first parameter of the get/delete methods.
|
||||
clientsRequireStringKey: { sproxyd: true, cdmi: true },
|
||||
// HTTP server keep-alive timeout is set to a higher value than
|
||||
// client's free sockets timeout to avoid the risk of triggering
|
||||
// ECONNRESET errors if the server closes the connection at the
|
||||
|
|
|
@ -7,8 +7,8 @@
|
|||
"test": "mocha --recursive --timeout 5500 tests/unit"
|
||||
},
|
||||
"dependencies": {
|
||||
"mocha": "2.5.3",
|
||||
"async": "^2.6.0",
|
||||
"mocha": "5.2.0",
|
||||
"async": "~2.6.1",
|
||||
"node-forge": "^0.7.1"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -17,11 +17,33 @@ class RedisClient {
|
|||
method: 'RedisClient.constructor',
|
||||
redisHost: config.host,
|
||||
redisPort: config.port,
|
||||
})
|
||||
}),
|
||||
);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* scan a pattern and return matching keys
|
||||
* @param {string} pattern - string pattern to match with all existing keys
|
||||
* @param {number} [count=10] - scan count
|
||||
* @param {callback} cb - callback (error, result)
|
||||
* @return {undefined}
|
||||
*/
|
||||
scan(pattern, count = 10, cb) {
|
||||
const params = { match: pattern, count };
|
||||
const keys = [];
|
||||
|
||||
const stream = this._client.scanStream(params);
|
||||
stream.on('data', resultKeys => {
|
||||
for (let i = 0; i < resultKeys.length; i++) {
|
||||
keys.push(resultKeys[i]);
|
||||
}
|
||||
});
|
||||
stream.on('end', () => {
|
||||
cb(null, keys);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* increment value of a key by 1 and set a ttl
|
||||
* @param {string} key - key holding the value
|
||||
|
@ -35,6 +57,17 @@ class RedisClient {
|
|||
.exec(cb);
|
||||
}
|
||||
|
||||
/**
|
||||
* increment value of a key by a given amount
|
||||
* @param {string} key - key holding the value
|
||||
* @param {number} amount - amount to increase by
|
||||
* @param {callback} cb - callback
|
||||
* @return {undefined}
|
||||
*/
|
||||
incrby(key, amount, cb) {
|
||||
return this._client.incrby(key, amount, cb);
|
||||
}
|
||||
|
||||
/**
|
||||
* increment value of a key by a given amount and set a ttl
|
||||
* @param {string} key - key holding the value
|
||||
|
@ -50,13 +83,24 @@ class RedisClient {
|
|||
}
|
||||
|
||||
/**
|
||||
* execute a batch of commands
|
||||
* @param {string[]} cmds - list of commands
|
||||
* decrement value of a key by a given amount
|
||||
* @param {string} key - key holding the value
|
||||
* @param {number} amount - amount to increase by
|
||||
* @param {callback} cb - callback
|
||||
* @return {undefined}
|
||||
*/
|
||||
batch(cmds, cb) {
|
||||
return this._client.pipeline(cmds).exec(cb);
|
||||
decrby(key, amount, cb) {
|
||||
return this._client.decrby(key, amount, cb);
|
||||
}
|
||||
|
||||
/**
|
||||
* get value stored at key
|
||||
* @param {string} key - key holding the value
|
||||
* @param {callback} cb - callback
|
||||
* @return {undefined}
|
||||
*/
|
||||
get(key, cb) {
|
||||
return this._client.get(key, cb);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -71,6 +115,16 @@ class RedisClient {
|
|||
return this._client.exists(key, cb);
|
||||
}
|
||||
|
||||
/**
|
||||
* execute a batch of commands
|
||||
* @param {string[]} cmds - list of commands
|
||||
* @param {callback} cb - callback
|
||||
* @return {undefined}
|
||||
*/
|
||||
batch(cmds, cb) {
|
||||
return this._client.pipeline(cmds).exec(cb);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a value and its score to a sorted set. If no sorted set exists, this
|
||||
* will create a new one for the given key.
|
||||
|
@ -150,12 +204,26 @@ class RedisClient {
|
|||
return this._client.zrangebyscore(key, min, max, cb);
|
||||
}
|
||||
|
||||
/**
|
||||
* get TTL or expiration in seconds
|
||||
* @param {string} key - name of key
|
||||
* @param {function} cb - callback
|
||||
* @return {undefined}
|
||||
*/
|
||||
ttl(key, cb) {
|
||||
return this._client.ttl(key, cb);
|
||||
}
|
||||
|
||||
clear(cb) {
|
||||
return this._client.flushdb(cb);
|
||||
}
|
||||
|
||||
disconnect() {
|
||||
this._client.disconnect();
|
||||
disconnect(cb) {
|
||||
return this._client.quit(cb);
|
||||
}
|
||||
|
||||
listClients(cb) {
|
||||
return this._client.client('list', cb);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -41,11 +41,11 @@ class StatsClient {
|
|||
/**
|
||||
* build redis key to get total number of occurrences on the server
|
||||
* @param {string} name - key name identifier
|
||||
* @param {object} d - Date instance
|
||||
* @param {Date} date - Date instance
|
||||
* @return {string} key - key for redis
|
||||
*/
|
||||
_buildKey(name, d) {
|
||||
return `${name}:${this._normalizeTimestamp(d)}`;
|
||||
buildKey(name, date) {
|
||||
return `${name}:${this._normalizeTimestamp(date)}`;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -85,11 +85,35 @@ class StatsClient {
|
|||
amount = (typeof incr === 'number') ? incr : 1;
|
||||
}
|
||||
|
||||
const key = this._buildKey(`${id}:requests`, new Date());
|
||||
const key = this.buildKey(`${id}:requests`, new Date());
|
||||
|
||||
return this._redis.incrbyEx(key, amount, this._expiry, callback);
|
||||
}
|
||||
|
||||
/**
|
||||
* Increment the given key by the given value.
|
||||
* @param {String} key - The Redis key to increment
|
||||
* @param {Number} incr - The value to increment by
|
||||
* @param {function} [cb] - callback
|
||||
* @return {undefined}
|
||||
*/
|
||||
incrementKey(key, incr, cb) {
|
||||
const callback = cb || this._noop;
|
||||
return this._redis.incrby(key, incr, callback);
|
||||
}
|
||||
|
||||
/**
|
||||
* Decrement the given key by the given value.
|
||||
* @param {String} key - The Redis key to decrement
|
||||
* @param {Number} decr - The value to decrement by
|
||||
* @param {function} [cb] - callback
|
||||
* @return {undefined}
|
||||
*/
|
||||
decrementKey(key, decr, cb) {
|
||||
const callback = cb || this._noop;
|
||||
return this._redis.decrby(key, decr, callback);
|
||||
}
|
||||
|
||||
/**
|
||||
* report/record a request that ended up being a 500 on the server
|
||||
* @param {string} id - service identifier
|
||||
|
@ -101,10 +125,54 @@ class StatsClient {
|
|||
return undefined;
|
||||
}
|
||||
const callback = cb || this._noop;
|
||||
const key = this._buildKey(`${id}:500s`, new Date());
|
||||
const key = this.buildKey(`${id}:500s`, new Date());
|
||||
return this._redis.incrEx(key, this._expiry, callback);
|
||||
}
|
||||
|
||||
/**
|
||||
* wrapper on `getStats` that handles a list of keys
|
||||
* @param {object} log - Werelogs request logger
|
||||
* @param {array} ids - service identifiers
|
||||
* @param {callback} cb - callback to call with the err/result
|
||||
* @return {undefined}
|
||||
*/
|
||||
getAllStats(log, ids, cb) {
|
||||
if (!this._redis) {
|
||||
return cb(null, {});
|
||||
}
|
||||
|
||||
const statsRes = {
|
||||
'requests': 0,
|
||||
'500s': 0,
|
||||
'sampleDuration': this._expiry,
|
||||
};
|
||||
let requests = 0;
|
||||
let errors = 0;
|
||||
|
||||
// for now set concurrency to default of 10
|
||||
return async.eachLimit(ids, 10, (id, done) => {
|
||||
this.getStats(log, id, (err, res) => {
|
||||
if (err) {
|
||||
return done(err);
|
||||
}
|
||||
requests += res.requests;
|
||||
errors += res['500s'];
|
||||
return done();
|
||||
});
|
||||
}, error => {
|
||||
if (error) {
|
||||
log.error('error getting stats', {
|
||||
error,
|
||||
method: 'StatsClient.getAllStats',
|
||||
});
|
||||
return cb(null, statsRes);
|
||||
}
|
||||
statsRes.requests = requests;
|
||||
statsRes['500s'] = errors;
|
||||
return cb(null, statsRes);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* get stats for the last x seconds, x being the sampling duration
|
||||
* @param {object} log - Werelogs request logger
|
||||
|
@ -121,8 +189,8 @@ class StatsClient {
|
|||
const reqsKeys = [];
|
||||
const req500sKeys = [];
|
||||
for (let i = 0; i < totalKeys; i++) {
|
||||
reqsKeys.push(['get', this._buildKey(`${id}:requests`, d)]);
|
||||
req500sKeys.push(['get', this._buildKey(`${id}:500s`, d)]);
|
||||
reqsKeys.push(['get', this.buildKey(`${id}:requests`, d)]);
|
||||
req500sKeys.push(['get', this.buildKey(`${id}:500s`, d)]);
|
||||
this._setPrevInterval(d);
|
||||
}
|
||||
return async.parallel([
|
||||
|
|
|
@ -1,4 +1,7 @@
|
|||
const async = require('async');
|
||||
|
||||
const StatsClient = require('./StatsClient');
|
||||
|
||||
/**
|
||||
* @class StatsModel
|
||||
*
|
||||
|
@ -6,6 +9,140 @@ const StatsClient = require('./StatsClient');
|
|||
* rather than by seconds
|
||||
*/
|
||||
class StatsModel extends StatsClient {
|
||||
/**
|
||||
* Utility method to convert 2d array rows to columns, and vice versa
|
||||
* See also: https://docs.ruby-lang.org/en/2.0.0/Array.html#method-i-zip
|
||||
* @param {array} arrays - 2d array of integers
|
||||
* @return {array} converted array
|
||||
*/
|
||||
_zip(arrays) {
|
||||
if (arrays.length > 0 && arrays.every(a => Array.isArray(a))) {
|
||||
return arrays[0].map((_, i) => arrays.map(a => a[i]));
|
||||
}
|
||||
return [];
|
||||
}
|
||||
|
||||
/**
|
||||
* normalize to the nearest interval
|
||||
* @param {object} d - Date instance
|
||||
* @return {number} timestamp - normalized to the nearest interval
|
||||
*/
|
||||
_normalizeTimestamp(d) {
|
||||
const m = d.getMinutes();
|
||||
return d.setMinutes(m - m % (Math.floor(this._interval / 60)), 0, 0);
|
||||
}
|
||||
|
||||
/**
|
||||
* override the method to get the count as an array of integers separated
|
||||
* by each interval
|
||||
* typical input looks like [[null, '1'], [null, '2'], [null, null]...]
|
||||
* @param {array} arr - each index contains the result of each batch command
|
||||
* where index 0 signifies the error and index 1 contains the result
|
||||
* @return {array} array of integers, ordered from most recent interval to
|
||||
* oldest interval with length of (expiry / interval)
|
||||
*/
|
||||
_getCount(arr) {
|
||||
const size = Math.floor(this._expiry / this._interval);
|
||||
const array = arr.reduce((store, i) => {
|
||||
let num = parseInt(i[1], 10);
|
||||
num = Number.isNaN(num) ? 0 : num;
|
||||
store.push(num);
|
||||
return store;
|
||||
}, []);
|
||||
|
||||
if (array.length < size) {
|
||||
array.push(...Array(size - array.length).fill(0));
|
||||
}
|
||||
return array;
|
||||
}
|
||||
|
||||
/**
|
||||
* wrapper on `getStats` that handles a list of keys
|
||||
* override the method to reduce the returned 2d array from `_getCount`
|
||||
* @param {object} log - Werelogs request logger
|
||||
* @param {array} ids - service identifiers
|
||||
* @param {callback} cb - callback to call with the err/result
|
||||
* @return {undefined}
|
||||
*/
|
||||
getAllStats(log, ids, cb) {
|
||||
if (!this._redis) {
|
||||
return cb(null, {});
|
||||
}
|
||||
|
||||
const size = Math.floor(this._expiry / this._interval);
|
||||
const statsRes = {
|
||||
'requests': Array(size).fill(0),
|
||||
'500s': Array(size).fill(0),
|
||||
'sampleDuration': this._expiry,
|
||||
};
|
||||
const requests = [];
|
||||
const errors = [];
|
||||
|
||||
if (ids.length === 0) {
|
||||
return cb(null, statsRes);
|
||||
}
|
||||
|
||||
// for now set concurrency to default of 10
|
||||
return async.eachLimit(ids, 10, (id, done) => {
|
||||
this.getStats(log, id, (err, res) => {
|
||||
if (err) {
|
||||
return done(err);
|
||||
}
|
||||
requests.push(res.requests);
|
||||
errors.push(res['500s']);
|
||||
return done();
|
||||
});
|
||||
}, error => {
|
||||
if (error) {
|
||||
log.error('error getting stats', {
|
||||
error,
|
||||
method: 'StatsModel.getAllStats',
|
||||
});
|
||||
return cb(null, statsRes);
|
||||
}
|
||||
|
||||
statsRes.requests = this._zip(requests).map(arr =>
|
||||
arr.reduce((acc, i) => acc + i), 0);
|
||||
statsRes['500s'] = this._zip(errors).map(arr =>
|
||||
arr.reduce((acc, i) => acc + i), 0);
|
||||
|
||||
return cb(null, statsRes);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Handles getting a list of global keys.
|
||||
* @param {array} ids - Service identifiers
|
||||
* @param {object} log - Werelogs request logger
|
||||
* @param {function} cb - Callback
|
||||
* @return {undefined}
|
||||
*/
|
||||
getAllGlobalStats(ids, log, cb) {
|
||||
const reqsKeys = ids.map(key => (['get', key]));
|
||||
return this._redis.batch(reqsKeys, (err, res) => {
|
||||
const statsRes = { requests: 0 };
|
||||
if (err) {
|
||||
log.error('error getting metrics', {
|
||||
error: err,
|
||||
method: 'StatsClient.getAllGlobalStats',
|
||||
});
|
||||
return cb(null, statsRes);
|
||||
}
|
||||
statsRes.requests = res.reduce((sum, curr) => {
|
||||
const [cmdErr, val] = curr;
|
||||
if (cmdErr) {
|
||||
// Log any individual request errors from the batch request.
|
||||
log.error('error getting metrics', {
|
||||
error: cmdErr,
|
||||
method: 'StatsClient.getAllGlobalStats',
|
||||
});
|
||||
}
|
||||
return sum + (Number.parseInt(val, 10) || 0);
|
||||
}, 0);
|
||||
return cb(null, statsRes);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* normalize date timestamp to the nearest hour
|
||||
* @param {Date} d - Date instance
|
||||
|
@ -24,34 +161,6 @@ class StatsModel extends StatsClient {
|
|||
return d.setHours(d.getHours() - 1);
|
||||
}
|
||||
|
||||
/**
|
||||
* normalize to the nearest interval
|
||||
* @param {object} d - Date instance
|
||||
* @return {number} timestamp - normalized to the nearest interval
|
||||
*/
|
||||
_normalizeTimestamp(d) {
|
||||
const m = d.getMinutes();
|
||||
return d.setMinutes(m - m % (Math.floor(this._interval / 60)), 0, 0);
|
||||
}
|
||||
|
||||
/**
|
||||
* override the method to get the result as an array of integers separated
|
||||
* by each interval
|
||||
* typical input looks like [[null, '1'], [null, '2'], [null, null]...]
|
||||
* @param {array} arr - each index contains the result of each batch command
|
||||
* where index 0 signifies the error and index 1 contains the result
|
||||
* @return {array} array of integers, ordered from most recent interval to
|
||||
* oldest interval
|
||||
*/
|
||||
_getCount(arr) {
|
||||
return arr.reduce((store, i) => {
|
||||
let num = parseInt(i[1], 10);
|
||||
num = Number.isNaN(num) ? 0 : num;
|
||||
store.push(num);
|
||||
return store;
|
||||
}, []);
|
||||
}
|
||||
|
||||
/**
|
||||
* get list of sorted set key timestamps
|
||||
* @param {number} epoch - epoch time
|
||||
|
|
|
@ -0,0 +1,237 @@
|
|||
/**
|
||||
* Helper class to ease access to the Azure specific information for
|
||||
* storage accounts mapped to buckets.
|
||||
*/
|
||||
class BucketAzureInfo {
|
||||
/**
|
||||
* @constructor
|
||||
* @param {object} obj - Raw structure for the Azure info on storage account
|
||||
* @param {string} obj.sku - SKU name of this storage account
|
||||
* @param {string} obj.accessTier - Access Tier name of this storage account
|
||||
* @param {string} obj.kind - Kind name of this storage account
|
||||
* @param {string[]} obj.systemKeys - pair of shared keys for the system
|
||||
* @param {string[]} obj.tenantKeys - pair of shared keys for the tenant
|
||||
* @param {string} obj.subscriptionId - subscription ID the storage account
|
||||
* belongs to
|
||||
* @param {string} obj.resourceGroup - Resource group name the storage
|
||||
* account belongs to
|
||||
* @param {object} obj.deleteRetentionPolicy - Delete retention policy
|
||||
* @param {boolean} obj.deleteRetentionPolicy.enabled -
|
||||
* @param {number} obj.deleteRetentionPolicy.days -
|
||||
* @param {object[]} obj.managementPolicies - Management policies for this
|
||||
* storage account
|
||||
* @param {boolean} obj.httpsOnly - Server the content of this storage
|
||||
* account through HTTPS only
|
||||
* @param {object} obj.tags - Set of tags applied on this storage account
|
||||
* @param {object[]} obj.networkACL - Network ACL of this storage account
|
||||
* @param {string} obj.cname - CNAME of this storage account
|
||||
* @param {boolean} obj.azureFilesAADIntegration - whether or not Azure
|
||||
* Files AAD Integration is enabled for this storage account
|
||||
* @param {boolean} obj.hnsEnabled - whether or not a hierarchical namespace
|
||||
* is enabled for this storage account
|
||||
* @param {object} obj.logging - service properties: logging
|
||||
* @param {object} obj.hourMetrics - service properties: hourMetrics
|
||||
* @param {object} obj.minuteMetrics - service properties: minuteMetrics
|
||||
* @param {string} obj.serviceVersion - service properties: serviceVersion
|
||||
*/
|
||||
constructor(obj) {
|
||||
this._data = {
|
||||
sku: obj.sku,
|
||||
accessTier: obj.accessTier,
|
||||
kind: obj.kind,
|
||||
systemKeys: obj.systemKeys,
|
||||
tenantKeys: obj.tenantKeys,
|
||||
subscriptionId: obj.subscriptionId,
|
||||
resourceGroup: obj.resourceGroup,
|
||||
deleteRetentionPolicy: obj.deleteRetentionPolicy,
|
||||
managementPolicies: obj.managementPolicies,
|
||||
httpsOnly: obj.httpsOnly,
|
||||
tags: obj.tags,
|
||||
networkACL: obj.networkACL,
|
||||
cname: obj.cname,
|
||||
azureFilesAADIntegration: obj.azureFilesAADIntegration,
|
||||
hnsEnabled: obj.hnsEnabled,
|
||||
logging: obj.logging,
|
||||
hourMetrics: obj.hourMetrics,
|
||||
minuteMetrics: obj.minuteMetrics,
|
||||
serviceVersion: obj.serviceVersion,
|
||||
};
|
||||
}
|
||||
|
||||
getSku() {
|
||||
return this._data.sku;
|
||||
}
|
||||
|
||||
setSku(sku) {
|
||||
this._data.sku = sku;
|
||||
return this;
|
||||
}
|
||||
|
||||
getAccessTier() {
|
||||
return this._data.accessTier;
|
||||
}
|
||||
|
||||
setAccessTier(accessTier) {
|
||||
this._data.accessTier = accessTier;
|
||||
return this;
|
||||
}
|
||||
|
||||
getKind() {
|
||||
return this._data.kind;
|
||||
}
|
||||
|
||||
setKind(kind) {
|
||||
this._data.kind = kind;
|
||||
return this;
|
||||
}
|
||||
|
||||
getSystemKeys() {
|
||||
return this._data.systemKeys;
|
||||
}
|
||||
|
||||
setSystemKeys(systemKeys) {
|
||||
this._data.systemKeys = systemKeys;
|
||||
return this;
|
||||
}
|
||||
|
||||
getTenantKeys() {
|
||||
return this._data.tenantKeys;
|
||||
}
|
||||
|
||||
setTenantKeys(tenantKeys) {
|
||||
this._data.tenantKeys = tenantKeys;
|
||||
return this;
|
||||
}
|
||||
|
||||
getSubscriptionId() {
|
||||
return this._data.subscriptionId;
|
||||
}
|
||||
|
||||
setSubscriptionId(subscriptionId) {
|
||||
this._data.subscriptionId = subscriptionId;
|
||||
return this;
|
||||
}
|
||||
|
||||
getResourceGroup() {
|
||||
return this._data.resourceGroup;
|
||||
}
|
||||
|
||||
setResourceGroup(resourceGroup) {
|
||||
this._data.resourceGroup = resourceGroup;
|
||||
return this;
|
||||
}
|
||||
|
||||
getDeleteRetentionPolicy() {
|
||||
return this._data.deleteRetentionPolicy;
|
||||
}
|
||||
|
||||
setDeleteRetentionPolicy(deleteRetentionPolicy) {
|
||||
this._data.deleteRetentionPolicy = deleteRetentionPolicy;
|
||||
return this;
|
||||
}
|
||||
|
||||
getManagementPolicies() {
|
||||
return this._data.managementPolicies;
|
||||
}
|
||||
|
||||
setManagementPolicies(managementPolicies) {
|
||||
this._data.managementPolicies = managementPolicies;
|
||||
return this;
|
||||
}
|
||||
|
||||
getHttpsOnly() {
|
||||
return this._data.httpsOnly;
|
||||
}
|
||||
|
||||
setHttpsOnly(httpsOnly) {
|
||||
this._data.httpsOnly = httpsOnly;
|
||||
return this;
|
||||
}
|
||||
|
||||
getTags() {
|
||||
return this._data.tags;
|
||||
}
|
||||
|
||||
setTags(tags) {
|
||||
this._data.tags = tags;
|
||||
return this;
|
||||
}
|
||||
|
||||
getNetworkACL() {
|
||||
return this._data.networkACL;
|
||||
}
|
||||
|
||||
setNetworkACL(networkACL) {
|
||||
this._data.networkACL = networkACL;
|
||||
return this;
|
||||
}
|
||||
|
||||
getCname() {
|
||||
return this._data.cname;
|
||||
}
|
||||
|
||||
setCname(cname) {
|
||||
this._data.cname = cname;
|
||||
return this;
|
||||
}
|
||||
|
||||
getAzureFilesAADIntegration() {
|
||||
return this._data.azureFilesAADIntegration;
|
||||
}
|
||||
|
||||
setAzureFilesAADIntegration(azureFilesAADIntegration) {
|
||||
this._data.azureFilesAADIntegration = azureFilesAADIntegration;
|
||||
return this;
|
||||
}
|
||||
|
||||
getHnsEnabled() {
|
||||
return this._data.hnsEnabled;
|
||||
}
|
||||
|
||||
setHnsEnabled(hnsEnabled) {
|
||||
this._data.hnsEnabled = hnsEnabled;
|
||||
return this;
|
||||
}
|
||||
|
||||
getLogging() {
|
||||
return this._data.logging;
|
||||
}
|
||||
|
||||
setLogging(logging) {
|
||||
this._data.logging = logging;
|
||||
return this;
|
||||
}
|
||||
|
||||
getHourMetrics() {
|
||||
return this._data.hourMetrics;
|
||||
}
|
||||
|
||||
setHourMetrics(hourMetrics) {
|
||||
this._data.hourMetrics = hourMetrics;
|
||||
return this;
|
||||
}
|
||||
|
||||
getMinuteMetrics() {
|
||||
return this._data.minuteMetrics;
|
||||
}
|
||||
|
||||
setMinuteMetrics(minuteMetrics) {
|
||||
this._data.minuteMetrics = minuteMetrics;
|
||||
return this;
|
||||
}
|
||||
|
||||
getServiceVersion() {
|
||||
return this._data.serviceVersion;
|
||||
}
|
||||
|
||||
setServiceVersion(serviceVersion) {
|
||||
this._data.serviceVersion = serviceVersion;
|
||||
return this;
|
||||
}
|
||||
|
||||
getValue() {
|
||||
return this._data;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = BucketAzureInfo;
|
|
@ -9,8 +9,9 @@ const BucketPolicy = require('./BucketPolicy');
|
|||
const NotificationConfiguration = require('./NotificationConfiguration');
|
||||
|
||||
// WHEN UPDATING THIS NUMBER, UPDATE BucketInfoModelVersion.md CHANGELOG
|
||||
// BucketInfoModelVersion.md can be found in the root of this repository
|
||||
const modelVersion = 10;
|
||||
// BucketInfoModelVersion.md can be found in documentation/ at the root
|
||||
// of this repository
|
||||
const modelVersion = 14;
|
||||
|
||||
class BucketInfo {
|
||||
/**
|
||||
|
@ -41,7 +42,8 @@ class BucketInfo {
|
|||
* @param {object} versioningConfiguration - versioning configuration
|
||||
* @param {string} versioningConfiguration.Status - versioning status
|
||||
* @param {object} versioningConfiguration.MfaDelete - versioning mfa delete
|
||||
* @param {string} locationConstraint - locationConstraint for bucket
|
||||
* @param {string} locationConstraint - locationConstraint for bucket that
|
||||
* also includes the ingestion flag
|
||||
* @param {WebsiteConfiguration} [websiteConfiguration] - website
|
||||
* configuration
|
||||
* @param {object[]} [cors] - collection of CORS rules to apply
|
||||
|
@ -57,6 +59,11 @@ class BucketInfo {
|
|||
* @param {object} [lifecycleConfiguration] - lifecycle configuration
|
||||
* @param {object} [bucketPolicy] - bucket policy
|
||||
* @param {string} [uid] - unique identifier for the bucket, necessary
|
||||
* @param {string} readLocationConstraint - readLocationConstraint for bucket
|
||||
* addition for use with lifecycle operations
|
||||
* @param {boolean} [isNFS] - whether the bucket is on NFS
|
||||
* @param {object} [ingestionConfig] - object for ingestion status: en/dis
|
||||
* @param {object} [azureInfo] - Azure storage account specific info
|
||||
* @param {boolean} [objectLockEnabled] - true when object lock enabled
|
||||
* @param {object} [objectLockConfiguration] - object lock configuration
|
||||
* @param {object} [notificationConfiguration] - bucket notification configuration
|
||||
|
@ -66,8 +73,9 @@ class BucketInfo {
|
|||
serverSideEncryption, versioningConfiguration,
|
||||
locationConstraint, websiteConfiguration, cors,
|
||||
replicationConfiguration, lifecycleConfiguration,
|
||||
bucketPolicy, uid, objectLockEnabled, objectLockConfiguration,
|
||||
notificationConfiguration) {
|
||||
bucketPolicy, uid, readLocationConstraint, isNFS,
|
||||
ingestionConfig, azureInfo, objectLockEnabled,
|
||||
objectLockConfiguration, notificationConfiguration) {
|
||||
assert.strictEqual(typeof name, 'string');
|
||||
assert.strictEqual(typeof owner, 'string');
|
||||
assert.strictEqual(typeof ownerDisplayName, 'string');
|
||||
|
@ -108,6 +116,15 @@ class BucketInfo {
|
|||
if (locationConstraint) {
|
||||
assert.strictEqual(typeof locationConstraint, 'string');
|
||||
}
|
||||
if (ingestionConfig) {
|
||||
assert.strictEqual(typeof ingestionConfig, 'object');
|
||||
}
|
||||
if (azureInfo) {
|
||||
assert.strictEqual(typeof azureInfo, 'object');
|
||||
}
|
||||
if (readLocationConstraint) {
|
||||
assert.strictEqual(typeof readLocationConstraint, 'string');
|
||||
}
|
||||
if (websiteConfiguration) {
|
||||
assert(websiteConfiguration instanceof WebsiteConfiguration);
|
||||
const { indexDocument, errorDocument, redirectAllRequestsTo,
|
||||
|
@ -164,12 +181,16 @@ class BucketInfo {
|
|||
this._serverSideEncryption = serverSideEncryption || null;
|
||||
this._versioningConfiguration = versioningConfiguration || null;
|
||||
this._locationConstraint = locationConstraint || null;
|
||||
this._readLocationConstraint = readLocationConstraint || null;
|
||||
this._websiteConfiguration = websiteConfiguration || null;
|
||||
this._replicationConfiguration = replicationConfiguration || null;
|
||||
this._cors = cors || null;
|
||||
this._lifecycleConfiguration = lifecycleConfiguration || null;
|
||||
this._bucketPolicy = bucketPolicy || null;
|
||||
this._uid = uid || uuid();
|
||||
this._isNFS = isNFS || null;
|
||||
this._ingestion = ingestionConfig || null;
|
||||
this._azureInfo = azureInfo || null;
|
||||
this._objectLockEnabled = objectLockEnabled || false;
|
||||
this._objectLockConfiguration = objectLockConfiguration || null;
|
||||
this._notificationConfiguration = notificationConfiguration || null;
|
||||
|
@ -192,12 +213,16 @@ class BucketInfo {
|
|||
serverSideEncryption: this._serverSideEncryption,
|
||||
versioningConfiguration: this._versioningConfiguration,
|
||||
locationConstraint: this._locationConstraint,
|
||||
readLocationConstraint: this._readLocationConstraint,
|
||||
websiteConfiguration: undefined,
|
||||
cors: this._cors,
|
||||
replicationConfiguration: this._replicationConfiguration,
|
||||
lifecycleConfiguration: this._lifecycleConfiguration,
|
||||
bucketPolicy: this._bucketPolicy,
|
||||
uid: this._uid,
|
||||
isNFS: this._isNFS,
|
||||
ingestion: this._ingestion,
|
||||
azureInfo: this._azureInfo,
|
||||
objectLockEnabled: this._objectLockEnabled,
|
||||
objectLockConfiguration: this._objectLockConfiguration,
|
||||
notificationConfiguration: this._notificationConfiguration,
|
||||
|
@ -222,7 +247,8 @@ class BucketInfo {
|
|||
obj.transient, obj.deleted, obj.serverSideEncryption,
|
||||
obj.versioningConfiguration, obj.locationConstraint, websiteConfig,
|
||||
obj.cors, obj.replicationConfiguration, obj.lifecycleConfiguration,
|
||||
obj.bucketPolicy, obj.uid, obj.objectLockEnabled,
|
||||
obj.bucketPolicy, obj.uid, obj.readLocationConstraint, obj.isNFS,
|
||||
obj.ingestion, obj.azureInfo, obj.objectLockEnabled,
|
||||
obj.objectLockConfiguration, obj.notificationConfiguration);
|
||||
}
|
||||
|
||||
|
@ -247,8 +273,10 @@ class BucketInfo {
|
|||
data._versioningConfiguration, data._locationConstraint,
|
||||
data._websiteConfiguration, data._cors,
|
||||
data._replicationConfiguration, data._lifecycleConfiguration,
|
||||
data._bucketPolicy, data._uid, data._objectLockEnabled,
|
||||
data._objectLockConfiguration, data._notificationConfiguration);
|
||||
data._bucketPolicy, data._uid, data._readLocationConstraint,
|
||||
data._isNFS, data._ingestion, data._azureInfo,
|
||||
data._objectLockEnabled, data._objectLockConfiguration,
|
||||
data._notificationConfiguration);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -545,6 +573,17 @@ class BucketInfo {
|
|||
return this._locationConstraint;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get read location constraint.
|
||||
* @return {string} - bucket read location constraint
|
||||
*/
|
||||
getReadLocationConstraint() {
|
||||
if (this._readLocationConstraint) {
|
||||
return this._readLocationConstraint;
|
||||
}
|
||||
return this._locationConstraint;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set Bucket model version
|
||||
*
|
||||
|
@ -633,6 +672,85 @@ class BucketInfo {
|
|||
this._uid = uid;
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* Check if the bucket is an NFS bucket.
|
||||
* @return {boolean} - Wether the bucket is NFS or not
|
||||
*/
|
||||
isNFS() {
|
||||
return this._isNFS;
|
||||
}
|
||||
/**
|
||||
* Set whether the bucket is an NFS bucket.
|
||||
* @param {boolean} isNFS - Wether the bucket is NFS or not
|
||||
* @return {BucketInfo} - bucket info instance
|
||||
*/
|
||||
setIsNFS(isNFS) {
|
||||
this._isNFS = isNFS;
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* enable ingestion, set 'this._ingestion' to { status: 'enabled' }
|
||||
* @return {BucketInfo} - bucket info instance
|
||||
*/
|
||||
enableIngestion() {
|
||||
this._ingestion = { status: 'enabled' };
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* disable ingestion, set 'this._ingestion' to { status: 'disabled' }
|
||||
* @return {BucketInfo} - bucket info instance
|
||||
*/
|
||||
disableIngestion() {
|
||||
this._ingestion = { status: 'disabled' };
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* Get ingestion configuration
|
||||
* @return {object} - bucket ingestion configuration: Enabled or Disabled
|
||||
*/
|
||||
getIngestion() {
|
||||
return this._ingestion;
|
||||
}
|
||||
|
||||
/**
|
||||
** Check if bucket is an ingestion bucket
|
||||
* @return {boolean} - 'true' if bucket is ingestion bucket, 'false' if
|
||||
* otherwise
|
||||
*/
|
||||
isIngestionBucket() {
|
||||
const ingestionConfig = this.getIngestion();
|
||||
if (ingestionConfig) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
/**
|
||||
* Check if ingestion is enabled
|
||||
* @return {boolean} - 'true' if ingestion is enabled, otherwise 'false'
|
||||
*/
|
||||
isIngestionEnabled() {
|
||||
const ingestionConfig = this.getIngestion();
|
||||
return ingestionConfig ? ingestionConfig.status === 'enabled' : false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the Azure specific storage account information for this bucket
|
||||
* @return {object} - a structure suitable for {@link BucketAzureIno}
|
||||
* constructor
|
||||
*/
|
||||
getAzureInfo() {
|
||||
return this._azureInfo;
|
||||
}
|
||||
/**
|
||||
* Set the Azure specific storage account information for this bucket
|
||||
* @param {object} azureInfo - a structure suitable for
|
||||
* {@link BucketAzureInfo} construction
|
||||
* @return {BucketInfo} - bucket info instance
|
||||
*/
|
||||
setAzureInfo(azureInfo) {
|
||||
this._azureInfo = azureInfo;
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* Check if object lock is enabled.
|
||||
* @return {boolean} - depending on whether object lock is enabled
|
||||
|
|
|
@ -5,6 +5,8 @@ const errors = require('../errors');
|
|||
const LifecycleRule = require('./LifecycleRule');
|
||||
const escapeForXml = require('../s3middleware/escapeForXml');
|
||||
|
||||
const MAX_DAYS = 2147483647; // Max 32-bit signed binary integer.
|
||||
|
||||
/**
|
||||
* Format of xml request:
|
||||
|
||||
|
@ -85,10 +87,13 @@ class LifecycleConfiguration {
|
|||
/**
|
||||
* Create a Lifecycle Configuration instance
|
||||
* @param {string} xml - the parsed xml
|
||||
* @param {object} config - the CloudServer config
|
||||
* @return {object} - LifecycleConfiguration instance
|
||||
*/
|
||||
constructor(xml) {
|
||||
constructor(xml, config) {
|
||||
this._parsedXML = xml;
|
||||
this._storageClasses =
|
||||
config.replicationEndpoints.map(endpoint => endpoint.site);
|
||||
this._ruleIDs = [];
|
||||
this._tagKeys = [];
|
||||
this._config = {};
|
||||
|
@ -211,9 +216,10 @@ class LifecycleConfiguration {
|
|||
*/
|
||||
_parseRule(rule) {
|
||||
const ruleObj = {};
|
||||
if (rule.Transition || rule.NoncurrentVersionTransition) {
|
||||
if (rule.NoncurrentVersionTransition) {
|
||||
ruleObj.error = errors.NotImplemented.customizeDescription(
|
||||
'Transition lifecycle action not yet implemented');
|
||||
'NoncurrentVersionTransition lifecycle action not yet ' +
|
||||
'implemented');
|
||||
return ruleObj;
|
||||
}
|
||||
// Either Prefix or Filter must be included, but can be empty string
|
||||
|
@ -468,6 +474,315 @@ class LifecycleConfiguration {
|
|||
return statusObj;
|
||||
}
|
||||
|
||||
/**
|
||||
* Finds the prefix and/or tags of the given rule and gets the error message
|
||||
* @param {object} rule - The rule to find the prefix in
|
||||
* @return {string} - The prefix of filter information
|
||||
*/
|
||||
_getRuleFilterDesc(rule) {
|
||||
if (rule.Prefix) {
|
||||
return `prefix '${rule.Prefix[0]}'`;
|
||||
}
|
||||
// There must be a filter if no top-level prefix is provided. First
|
||||
// check if there are multiple filters (i.e. `Filter.And`).
|
||||
if (rule.Filter[0] === undefined || rule.Filter[0].And === undefined) {
|
||||
const { Prefix, Tag } = rule.Filter[0] || {};
|
||||
if (Prefix) {
|
||||
return `filter '(prefix=${Prefix[0]})'`;
|
||||
}
|
||||
if (Tag) {
|
||||
const { Key, Value } = Tag[0];
|
||||
return `filter '(tag: key=${Key[0]}, value=${Value[0]})'`;
|
||||
}
|
||||
return 'filter (all)';
|
||||
}
|
||||
const filters = [];
|
||||
const { Prefix, Tag } = rule.Filter[0].And[0];
|
||||
if (Prefix) {
|
||||
filters.push(`prefix=${Prefix[0]}`);
|
||||
}
|
||||
Tag.forEach(tag => {
|
||||
const { Key, Value } = tag;
|
||||
filters.push(`tag: key=${Key[0]}, value=${Value[0]}`);
|
||||
});
|
||||
const joinedFilters = filters.join(' and ');
|
||||
return `filter '(${joinedFilters})'`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks the validity of the given field
|
||||
* @param {object} params - Given function parameters
|
||||
* @param {string} params.days - The value of the field to check
|
||||
* @param {string} params.field - The field name with the value
|
||||
* @param {string} params.ancestor - The immediate ancestor field
|
||||
* @return {object|null} Returns an error object or `null`
|
||||
*/
|
||||
_checkDays(params) {
|
||||
const { days, field, ancestor } = params;
|
||||
if (days < 0) {
|
||||
const msg = `'${field}' in ${ancestor} action must be nonnegative`;
|
||||
return errors.InvalidArgument.customizeDescription(msg);
|
||||
}
|
||||
if (days > MAX_DAYS) {
|
||||
return errors.MalformedXML.customizeDescription(
|
||||
`'${field}' in ${ancestor} action must not exceed ${MAX_DAYS}`);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks the validity of the given storage class
|
||||
* @param {object} params - Given function parameters
|
||||
* @param {array} params.usedStorageClasses - Storage classes used in other
|
||||
* rules
|
||||
* @param {string} params.storageClass - The storage class of the current
|
||||
* rule
|
||||
* @param {string} params.ancestor - The immediate ancestor field
|
||||
* @param {string} params.prefix - The prefix of the rule
|
||||
* @return {object|null} Returns an error object or `null`
|
||||
*/
|
||||
_checkStorageClasses(params) {
|
||||
const { usedStorageClasses, storageClass, ancestor, rule } = params;
|
||||
if (!this._storageClasses.includes(storageClass)) {
|
||||
// This differs from the AWS message. This will help the user since
|
||||
// the StorageClass does not conform to AWS specs.
|
||||
const list = `'${this._storageClasses.join("', '")}'`;
|
||||
const msg = `'StorageClass' must be one of ${list}`;
|
||||
return errors.MalformedXML.customizeDescription(msg);
|
||||
}
|
||||
if (usedStorageClasses.includes(storageClass)) {
|
||||
const msg = `'StorageClass' must be different for '${ancestor}' ` +
|
||||
`actions in same 'Rule' with ${this._getRuleFilterDesc(rule)}`;
|
||||
return errors.InvalidRequest.customizeDescription(msg);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensure that transition rules are at least a day apart from each other.
|
||||
* @param {object} params - Given function parameters
|
||||
* @param {string} [params.days] - The days of the current transition
|
||||
* @param {string} [params.date] - The date of the current transition
|
||||
* @param {string} params.storageClass - The storage class of the current
|
||||
* rule
|
||||
* @param {string} params.rule - The current rule
|
||||
* @return {undefined}
|
||||
*/
|
||||
_checkTimeGap(params) {
|
||||
const { days, date, storageClass, rule } = params;
|
||||
const invalidTransition = rule.Transition.find(transition => {
|
||||
if (storageClass === transition.StorageClass[0]) {
|
||||
return false;
|
||||
}
|
||||
if (days !== undefined) {
|
||||
return Number.parseInt(transition.Days[0], 10) === days;
|
||||
}
|
||||
if (date !== undefined) {
|
||||
const timestamp = new Date(date).getTime();
|
||||
const compareTimestamp = new Date(transition.Date[0]).getTime();
|
||||
const oneDay = 24 * 60 * 60 * 1000; // Milliseconds in a day.
|
||||
return Math.abs(timestamp - compareTimestamp) < oneDay;
|
||||
}
|
||||
return false;
|
||||
});
|
||||
if (invalidTransition) {
|
||||
const timeType = days !== undefined ? 'Days' : 'Date';
|
||||
const filterMsg = this._getRuleFilterDesc(rule);
|
||||
const compareStorageClass = invalidTransition.StorageClass[0];
|
||||
const msg = `'${timeType}' in the 'Transition' action for ` +
|
||||
`StorageClass '${storageClass}' for ${filterMsg} must be at ` +
|
||||
`least one day apart from ${filterMsg} in the 'Transition' ` +
|
||||
`action for StorageClass '${compareStorageClass}'`;
|
||||
return errors.InvalidArgument.customizeDescription(msg);
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks transition time type (i.e. 'Date' or 'Days') only occurs once
|
||||
* across transitions and across transitions and expiration policies
|
||||
* @param {object} params - Given function parameters
|
||||
* @param {string} params.usedTimeType - The time type that has been used by
|
||||
* another rule
|
||||
* @param {string} params.currentTimeType - the time type used by the
|
||||
* current rule
|
||||
* @param {string} params.rule - The current rule
|
||||
* @return {object|null} Returns an error object or `null`
|
||||
*/
|
||||
_checkTimeType(params) {
|
||||
const { usedTimeType, currentTimeType, rule } = params;
|
||||
if (usedTimeType && usedTimeType !== currentTimeType) {
|
||||
const msg = "Found mixed 'Date' and 'Days' based Transition " +
|
||||
'actions in lifecycle rule for ' +
|
||||
`${this._getRuleFilterDesc(rule)}`;
|
||||
return errors.InvalidRequest.customizeDescription(msg);
|
||||
}
|
||||
// Transition time type cannot differ from the expiration, if provided.
|
||||
if (rule.Expiration &&
|
||||
rule.Expiration[0][currentTimeType] === undefined) {
|
||||
const msg = "Found mixed 'Date' and 'Days' based Expiration and " +
|
||||
'Transition actions in lifecycle rule for ' +
|
||||
`${this._getRuleFilterDesc(rule)}`;
|
||||
return errors.InvalidRequest.customizeDescription(msg);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks the validity of the given date
|
||||
* @param {string} date - The date the check
|
||||
* @return {object|null} Returns an error object or `null`
|
||||
*/
|
||||
_checkDate(date) {
|
||||
const isoRegex = new RegExp('^(-?(?:[1-9][0-9]*)?[0-9]{4})-' +
|
||||
'(1[0-2]|0[1-9])-(3[01]|0[1-9]|[12][0-9])T(2[0-3]|[01][0-9])' +
|
||||
':([0-5][0-9]):([0-5][0-9])(.[0-9]+)?(Z)?$');
|
||||
if (!isoRegex.test(date)) {
|
||||
const msg = 'Date must be in ISO 8601 format';
|
||||
return errors.InvalidArgument.customizeDescription(msg);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses the NonCurrentVersionTransition value
|
||||
* @param {object} rule - Rule object from Rule array from this._parsedXml
|
||||
* @return {object} - Contains error if parsing failed, otherwise contains
|
||||
* the parsed nonCurrentVersionTransition array
|
||||
*
|
||||
* Format of result:
|
||||
* result = {
|
||||
* error: <error>,
|
||||
* nonCurrentVersionTransition: [
|
||||
* {
|
||||
* noncurrentDays: <non-current-days>,
|
||||
* storageClass: <storage-class>,
|
||||
* },
|
||||
* ...
|
||||
* ]
|
||||
* }
|
||||
*/
|
||||
_parseNoncurrentVersionTransition(rule) {
|
||||
const nonCurrentVersionTransition = [];
|
||||
const usedStorageClasses = [];
|
||||
for (let i = 0; i < rule.NoncurrentVersionTransition.length; i++) {
|
||||
const t = rule.NoncurrentVersionTransition[i]; // Transition object
|
||||
const noncurrentDays =
|
||||
t.NoncurrentDays && Number.parseInt(t.NoncurrentDays[0], 10);
|
||||
const storageClass = t.StorageClass && t.StorageClass[0];
|
||||
if (noncurrentDays === undefined || storageClass === undefined) {
|
||||
return { error: errors.MalformedXML };
|
||||
}
|
||||
let error = this._checkDays({
|
||||
days: noncurrentDays,
|
||||
field: 'NoncurrentDays',
|
||||
ancestor: 'NoncurrentVersionTransition',
|
||||
});
|
||||
if (error) {
|
||||
return { error };
|
||||
}
|
||||
error = this._checkStorageClasses({
|
||||
storageClass,
|
||||
usedStorageClasses,
|
||||
ancestor: 'NoncurrentVersionTransition',
|
||||
rule,
|
||||
});
|
||||
if (error) {
|
||||
return { error };
|
||||
}
|
||||
nonCurrentVersionTransition.push({ noncurrentDays, storageClass });
|
||||
usedStorageClasses.push(storageClass);
|
||||
}
|
||||
return { nonCurrentVersionTransition };
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses the Transition value
|
||||
* @param {object} rule - Rule object from Rule array from this._parsedXml
|
||||
* @return {object} - Contains error if parsing failed, otherwise contains
|
||||
* the parsed transition array
|
||||
*
|
||||
* Format of result:
|
||||
* result = {
|
||||
* error: <error>,
|
||||
* transition: [
|
||||
* {
|
||||
* days: <days>,
|
||||
* date: <date>,
|
||||
* storageClass: <storage-class>,
|
||||
* },
|
||||
* ...
|
||||
* ]
|
||||
* }
|
||||
*/
|
||||
_parseTransition(rule) {
|
||||
const transition = [];
|
||||
const usedStorageClasses = [];
|
||||
let usedTimeType = null;
|
||||
for (let i = 0; i < rule.Transition.length; i++) {
|
||||
const t = rule.Transition[i]; // Transition object
|
||||
const days = t.Days && Number.parseInt(t.Days[0], 10);
|
||||
const date = t.Date && t.Date[0];
|
||||
const storageClass = t.StorageClass && t.StorageClass[0];
|
||||
if ((days === undefined && date === undefined) ||
|
||||
(days !== undefined && date !== undefined) ||
|
||||
(storageClass === undefined)) {
|
||||
return { error: errors.MalformedXML };
|
||||
}
|
||||
let error = this._checkStorageClasses({
|
||||
storageClass,
|
||||
usedStorageClasses,
|
||||
ancestor: 'Transition',
|
||||
rule,
|
||||
});
|
||||
if (error) {
|
||||
return { error };
|
||||
}
|
||||
usedStorageClasses.push(storageClass);
|
||||
if (days !== undefined) {
|
||||
error = this._checkTimeType({
|
||||
usedTimeType,
|
||||
currentTimeType: 'Days',
|
||||
rule,
|
||||
});
|
||||
if (error) {
|
||||
return { error };
|
||||
}
|
||||
usedTimeType = 'Days';
|
||||
error = this._checkDays({
|
||||
days,
|
||||
field: 'Days',
|
||||
ancestor: 'Transition',
|
||||
});
|
||||
if (error) {
|
||||
return { error };
|
||||
}
|
||||
transition.push({ days, storageClass });
|
||||
}
|
||||
if (date !== undefined) {
|
||||
error = this._checkTimeType({
|
||||
usedTimeType,
|
||||
currentTimeType: 'Date',
|
||||
rule,
|
||||
});
|
||||
if (error) {
|
||||
return { error };
|
||||
}
|
||||
usedTimeType = 'Date';
|
||||
error = this._checkDate(date);
|
||||
if (error) {
|
||||
return { error };
|
||||
}
|
||||
transition.push({ date, storageClass });
|
||||
}
|
||||
error = this._checkTimeGap({ days, date, storageClass, rule });
|
||||
if (error) {
|
||||
return { error };
|
||||
}
|
||||
}
|
||||
return { transition };
|
||||
}
|
||||
|
||||
/**
|
||||
* Check that action component of rule is valid
|
||||
* @param {object} rule - a rule object from Rule array from this._parsedXml
|
||||
|
@ -492,8 +807,13 @@ class LifecycleConfiguration {
|
|||
const actionsObj = {};
|
||||
actionsObj.propName = 'actions';
|
||||
actionsObj.actions = [];
|
||||
const validActions = ['AbortIncompleteMultipartUpload',
|
||||
'Expiration', 'NoncurrentVersionExpiration'];
|
||||
const validActions = [
|
||||
'AbortIncompleteMultipartUpload',
|
||||
'Expiration',
|
||||
'NoncurrentVersionExpiration',
|
||||
'NoncurrentVersionTransition',
|
||||
'Transition',
|
||||
];
|
||||
validActions.forEach(a => {
|
||||
if (rule[a]) {
|
||||
actionsObj.actions.push({ actionName: `${a}` });
|
||||
|
@ -510,7 +830,8 @@ class LifecycleConfiguration {
|
|||
if (action.error) {
|
||||
actionsObj.error = action.error;
|
||||
} else {
|
||||
const actionTimes = ['days', 'date', 'deleteMarker'];
|
||||
const actionTimes = ['days', 'date', 'deleteMarker',
|
||||
'transition', 'nonCurrentVersionTransition'];
|
||||
actionTimes.forEach(t => {
|
||||
if (action[t]) {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
|
@ -597,12 +918,9 @@ class LifecycleConfiguration {
|
|||
return expObj;
|
||||
}
|
||||
if (subExp.Date) {
|
||||
const isoRegex = new RegExp('^(-?(?:[1-9][0-9]*)?[0-9]{4})-' +
|
||||
'(1[0-2]|0[1-9])-(3[01]|0[1-9]|[12][0-9])T(2[0-3]|[01][0-9])' +
|
||||
':([0-5][0-9]):([0-5][0-9])(.[0-9]+)?(Z)?$');
|
||||
if (!isoRegex.test(subExp.Date[0])) {
|
||||
expObj.error = errors.InvalidArgument.customizeDescription(
|
||||
'Date must be in ISO 8601 format');
|
||||
const error = this._checkDate(subExp.Date[0]);
|
||||
if (error) {
|
||||
expObj.error = error;
|
||||
} else {
|
||||
expObj.date = subExp.Date[0];
|
||||
}
|
||||
|
@ -714,6 +1032,26 @@ class LifecycleConfiguration {
|
|||
if (a.deleteMarker) {
|
||||
assert.strictEqual(typeof a.deleteMarker, 'string');
|
||||
}
|
||||
if (a.nonCurrentVersionTransition) {
|
||||
assert.strictEqual(
|
||||
typeof a.nonCurrentVersionTransition, 'object');
|
||||
a.nonCurrentVersionTransition.forEach(t => {
|
||||
assert.strictEqual(typeof t.noncurrentDays, 'number');
|
||||
assert.strictEqual(typeof t.storageClass, 'string');
|
||||
});
|
||||
}
|
||||
if (a.transition) {
|
||||
assert.strictEqual(typeof a.transition, 'object');
|
||||
a.transition.forEach(t => {
|
||||
if (t.days || t.days === 0) {
|
||||
assert.strictEqual(typeof t.days, 'number');
|
||||
}
|
||||
if (t.date !== undefined) {
|
||||
assert.strictEqual(typeof t.date, 'string');
|
||||
}
|
||||
assert.strictEqual(typeof t.storageClass, 'string');
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
@ -763,7 +1101,8 @@ class LifecycleConfiguration {
|
|||
}
|
||||
|
||||
const Actions = actions.map(action => {
|
||||
const { actionName, days, date, deleteMarker } = action;
|
||||
const { actionName, days, date, deleteMarker,
|
||||
nonCurrentVersionTransition, transition } = action;
|
||||
let Action;
|
||||
if (actionName === 'AbortIncompleteMultipartUpload') {
|
||||
Action = `<${actionName}><DaysAfterInitiation>${days}` +
|
||||
|
@ -780,6 +1119,40 @@ class LifecycleConfiguration {
|
|||
Action = `<${actionName}>${Days}${Date}${DelMarker}` +
|
||||
`</${actionName}>`;
|
||||
}
|
||||
if (actionName === 'NoncurrentVersionTransition') {
|
||||
const xml = [];
|
||||
nonCurrentVersionTransition.forEach(transition => {
|
||||
const { noncurrentDays, storageClass } = transition;
|
||||
xml.push(
|
||||
`<${actionName}>`,
|
||||
`<NoncurrentDays>${noncurrentDays}` +
|
||||
'</NoncurrentDays>',
|
||||
`<StorageClass>${storageClass}</StorageClass>`,
|
||||
`</${actionName}>`,
|
||||
);
|
||||
});
|
||||
Action = xml.join('');
|
||||
}
|
||||
if (actionName === 'Transition') {
|
||||
const xml = [];
|
||||
transition.forEach(transition => {
|
||||
const { days, date, storageClass } = transition;
|
||||
let element;
|
||||
if (days !== undefined) {
|
||||
element = `<Days>${days}</Days>`;
|
||||
}
|
||||
if (date !== undefined) {
|
||||
element = `<Date>${date}</Date>`;
|
||||
}
|
||||
xml.push(
|
||||
`<${actionName}>`,
|
||||
element,
|
||||
`<StorageClass>${storageClass}</StorageClass>`,
|
||||
`</${actionName}>`,
|
||||
);
|
||||
});
|
||||
Action = xml.join('');
|
||||
}
|
||||
return Action;
|
||||
}).join('');
|
||||
return `<Rule>${ID}${Status}${Filter}${Actions}</Rule>`;
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
const crypto = require('crypto');
|
||||
|
||||
const constants = require('../constants');
|
||||
const VersionIDUtils = require('../versioning/VersionID');
|
||||
|
||||
|
@ -8,7 +10,6 @@ const ObjectMDLocation = require('./ObjectMDLocation');
|
|||
* mpuPart metadata for example)
|
||||
*/
|
||||
class ObjectMD {
|
||||
|
||||
/**
|
||||
* Create a new instance of ObjectMD. Parameter <tt>objMd</tt> is
|
||||
* reserved for internal use, users should call
|
||||
|
@ -28,9 +29,14 @@ class ObjectMD {
|
|||
} else {
|
||||
this._updateFromParsedJSON(objMd);
|
||||
}
|
||||
if (!this._data['creation-time']) {
|
||||
this.setCreationTime(this.getLastModified());
|
||||
}
|
||||
} else {
|
||||
// set newly-created object md modified time to current time
|
||||
this._data['last-modified'] = new Date().toJSON();
|
||||
const dt = new Date().toJSON();
|
||||
this.setLastModified(dt);
|
||||
this.setCreationTime(dt);
|
||||
}
|
||||
// set latest md model version now that we ensured
|
||||
// backward-compat conversion
|
||||
|
@ -85,6 +91,8 @@ class ObjectMD {
|
|||
'content-length': 0,
|
||||
'content-type': '',
|
||||
'content-md5': '',
|
||||
'content-language': '',
|
||||
'creation-time': undefined,
|
||||
// simple/no version. will expand once object versioning is
|
||||
// introduced
|
||||
'x-amz-version-id': 'null',
|
||||
|
@ -106,6 +114,7 @@ class ObjectMD {
|
|||
},
|
||||
'key': '',
|
||||
'location': null,
|
||||
'azureInfo': undefined,
|
||||
// versionId, isNull, nullVersionId and isDeleteMarker
|
||||
// should be undefined when not set explicitly
|
||||
'isNull': undefined,
|
||||
|
@ -124,6 +133,7 @@ class ObjectMD {
|
|||
role: '',
|
||||
storageType: '',
|
||||
dataStoreVersionId: '',
|
||||
isNFS: null,
|
||||
},
|
||||
'dataStoreName': '',
|
||||
'originOp': '',
|
||||
|
@ -356,6 +366,50 @@ class ObjectMD {
|
|||
return this._data['content-md5'];
|
||||
}
|
||||
|
||||
/**
|
||||
* Set content-language
|
||||
*
|
||||
* @param {string} contentLanguage - content-language
|
||||
* @return {ObjectMD} itself
|
||||
*/
|
||||
setContentLanguage(contentLanguage) {
|
||||
this._data['content-language'] = contentLanguage;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns content-language
|
||||
*
|
||||
* @return {string} content-language
|
||||
*/
|
||||
getContentLanguage() {
|
||||
return this._data['content-language'];
|
||||
}
|
||||
|
||||
/**
|
||||
* Set Creation Date
|
||||
*
|
||||
* @param {string} creationTime - Creation Date
|
||||
* @return {ObjectMD} itself
|
||||
*/
|
||||
setCreationTime(creationTime) {
|
||||
this._data['creation-time'] = creationTime;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns Creation Date
|
||||
*
|
||||
* @return {string} Creation Date
|
||||
*/
|
||||
getCreationTime() {
|
||||
// If creation-time is not set fallback to LastModified
|
||||
if (!this._data['creation-time']) {
|
||||
return this.getLastModified();
|
||||
}
|
||||
return this._data['creation-time'];
|
||||
}
|
||||
|
||||
/**
|
||||
* Set version id
|
||||
*
|
||||
|
@ -599,6 +653,29 @@ class ObjectMD {
|
|||
return reducedLocations;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the Azure specific information
|
||||
* @param {object} azureInfo - a plain JS structure representing the
|
||||
* Azure specific information for a Blob or a Container (see constructor
|
||||
* of {@link ObjectMDAzureInfo} for a description of the fields of this
|
||||
* structure
|
||||
* @return {ObjectMD} itself
|
||||
*/
|
||||
setAzureInfo(azureInfo) {
|
||||
this._data.azureInfo = azureInfo;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the Azure specific information
|
||||
* @return {object} a plain JS structure representing the Azure specific
|
||||
* information for a Blob or a Container an suitable for the constructor
|
||||
* of {@link ObjectMDAzureInfo}.
|
||||
*/
|
||||
getAzureInfo() {
|
||||
return this._data.azureInfo;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set metadata isNull value
|
||||
*
|
||||
|
@ -680,6 +757,19 @@ class ObjectMD {
|
|||
return this._data.isDeleteMarker || false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get if the object is a multipart upload (MPU)
|
||||
*
|
||||
* The function checks the "content-md5" field: if it contains a
|
||||
* dash ('-') it is a MPU, as the content-md5 string ends with
|
||||
* "-[nbparts]" for MPUs.
|
||||
*
|
||||
* @return {boolean} Whether object is a multipart upload
|
||||
*/
|
||||
isMultipartUpload() {
|
||||
return this.getContentMd5().includes('-');
|
||||
}
|
||||
|
||||
/**
|
||||
* Set metadata versionId value
|
||||
*
|
||||
|
@ -707,8 +797,11 @@ class ObjectMD {
|
|||
* @return {string|undefined} The encoded object versionId
|
||||
*/
|
||||
getEncodedVersionId() {
|
||||
if (this.getVersionId()) {
|
||||
return VersionIDUtils.encode(this.getVersionId());
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set metadata uploadId value
|
||||
|
@ -750,6 +843,20 @@ class ObjectMD {
|
|||
return this._data.tags;
|
||||
}
|
||||
|
||||
getUserMetadata() {
|
||||
const metaHeaders = {};
|
||||
const data = this.getValue();
|
||||
Object.keys(data).forEach(key => {
|
||||
if (key.startsWith('x-amz-meta-')) {
|
||||
metaHeaders[key] = data[key];
|
||||
}
|
||||
});
|
||||
if (Object.keys(metaHeaders).length > 0) {
|
||||
return JSON.stringify(metaHeaders);
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set replication information
|
||||
*
|
||||
|
@ -758,7 +865,7 @@ class ObjectMD {
|
|||
*/
|
||||
setReplicationInfo(replicationInfo) {
|
||||
const { status, backends, content, destination, storageClass, role,
|
||||
storageType, dataStoreVersionId } = replicationInfo;
|
||||
storageType, dataStoreVersionId, isNFS } = replicationInfo;
|
||||
this._data.replicationInfo = {
|
||||
status,
|
||||
backends,
|
||||
|
@ -768,6 +875,7 @@ class ObjectMD {
|
|||
role,
|
||||
storageType: storageType || '',
|
||||
dataStoreVersionId: dataStoreVersionId || '',
|
||||
isNFS: isNFS || null,
|
||||
};
|
||||
return this;
|
||||
}
|
||||
|
@ -786,6 +894,24 @@ class ObjectMD {
|
|||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set whether the replication is occurring from an NFS bucket.
|
||||
* @param {Boolean} isNFS - Whether replication from an NFS bucket
|
||||
* @return {ObjectMD} itself
|
||||
*/
|
||||
setReplicationIsNFS(isNFS) {
|
||||
this._data.replicationInfo.isNFS = isNFS;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get whether the replication is occurring from an NFS bucket.
|
||||
* @return {Boolean} Whether replication from an NFS bucket
|
||||
*/
|
||||
getReplicationIsNFS() {
|
||||
return this._data.replicationInfo.isNFS;
|
||||
}
|
||||
|
||||
setReplicationSiteStatus(site, status) {
|
||||
const backend = this._data.replicationInfo.backends
|
||||
.find(o => o.site === site);
|
||||
|
@ -832,6 +958,11 @@ class ObjectMD {
|
|||
return this;
|
||||
}
|
||||
|
||||
setReplicationStorageType(storageType) {
|
||||
this._data.replicationInfo.storageType = storageType;
|
||||
return this;
|
||||
}
|
||||
|
||||
setReplicationStorageClass(storageClass) {
|
||||
this._data.replicationInfo.storageClass = storageClass;
|
||||
return this;
|
||||
|
@ -913,6 +1044,9 @@ class ObjectMD {
|
|||
Object.keys(metaHeaders).forEach(key => {
|
||||
if (key.startsWith('x-amz-meta-')) {
|
||||
this._data[key] = metaHeaders[key];
|
||||
} else if (key.startsWith('x-ms-meta-')) {
|
||||
const _key = key.replace('x-ms-meta-', 'x-amz-meta-');
|
||||
this._data[_key] = metaHeaders[key];
|
||||
}
|
||||
});
|
||||
// If a multipart object and the acl is already parsed, we update it
|
||||
|
@ -922,6 +1056,20 @@ class ObjectMD {
|
|||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear all existing meta headers (used for Azure)
|
||||
*
|
||||
* @return {ObjectMD} itself
|
||||
*/
|
||||
clearMetadataValues() {
|
||||
Object.keys(this._data).forEach(key => {
|
||||
if (key.startsWith('x-amz-meta')) {
|
||||
delete this._data[key];
|
||||
}
|
||||
});
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* overrideMetadataValues (used for complete MPU and object copy)
|
||||
*
|
||||
|
@ -933,6 +1081,39 @@ class ObjectMD {
|
|||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create or update the microVersionId field
|
||||
*
|
||||
* This field can be used to force an update in MongoDB. This can
|
||||
* be needed in the following cases:
|
||||
*
|
||||
* - in case no other metadata field changes
|
||||
*
|
||||
* - to detect a change when fields change but object version does
|
||||
* not change e.g. when ingesting a putObjectTagging coming from
|
||||
* S3C to Zenko
|
||||
*
|
||||
* - to manage conflicts during concurrent updates, using
|
||||
* conditions on the microVersionId field.
|
||||
*
|
||||
* It's a field of 16 hexadecimal characters randomly generated
|
||||
*
|
||||
* @return {ObjectMD} itself
|
||||
*/
|
||||
updateMicroVersionId() {
|
||||
this._data.microVersionId = crypto.randomBytes(8).toString('hex');
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the microVersionId field, or null if not set
|
||||
*
|
||||
* @return {string|null} the microVersionId field if exists, or
|
||||
* {null} if it does not exist
|
||||
*/
|
||||
getMicroVersionId() {
|
||||
return this._data.microVersionId || null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set object legal hold status
|
||||
* @param {boolean} legalHold - true if legal hold is 'ON' false if 'OFF'
|
||||
|
|
|
@ -0,0 +1,162 @@
|
|||
/**
|
||||
* Helper class to ease access to the Azure specific information for
|
||||
* Blob and Container objects.
|
||||
*/
|
||||
class ObjectMDAzureInfo {
|
||||
/**
|
||||
* @constructor
|
||||
* @param {object} obj - Raw structure for the Azure info on Blob/Container
|
||||
* @param {string} obj.containerPublicAccess - Public access authorization
|
||||
* type
|
||||
* @param {object[]} obj.containerStoredAccessPolicies - Access policies
|
||||
* for Shared Access Signature bearer
|
||||
* @param {object} obj.containerImmutabilityPolicy - data immutability
|
||||
* policy for this container
|
||||
* @param {boolean} obj.containerLegalHoldStatus - legal hold status for
|
||||
* this container
|
||||
* @param {boolean} obj.containerDeletionInProgress - deletion in progress
|
||||
* indicator for this container
|
||||
* @param {string} obj.blobType - defines the type of blob for this object
|
||||
* @param {string} obj.blobContentMD5 - whole object MD5 sum set by the
|
||||
* client through the Azure API
|
||||
* @param {string} obj.blobIssuedETag - backup of the issued ETag on MD only
|
||||
* operations like Set Blob Properties and Set Blob Metadata
|
||||
* @param {object} obj.blobCopyInfo - information pertaining to past and
|
||||
* pending copy operation targeting this object
|
||||
* @param {number} obj.blobSequenceNumber - sequence number for a PageBlob
|
||||
* @param {Date} obj.blobAccessTierChangeTime - date of change of tier
|
||||
* @param {boolean} obj.blobUncommitted - A block has been put for a
|
||||
* nonexistent blob which is about to be created
|
||||
*/
|
||||
constructor(obj) {
|
||||
this._data = {
|
||||
containerPublicAccess: obj.containerPublicAccess,
|
||||
containerStoredAccessPolicies: obj.containerStoredAccessPolicies,
|
||||
containerImmutabilityPolicy: obj.containerImmutabilityPolicy,
|
||||
containerLegalHoldStatus: obj.containerLegalHoldStatus,
|
||||
containerDeletionInProgress: obj.containerDeletionInProgress,
|
||||
blobType: obj.blobType,
|
||||
blobContentMD5: obj.blobContentMD5,
|
||||
blobIssuedETag: obj.blobIssuedETag,
|
||||
blobCopyInfo: obj.blobCopyInfo,
|
||||
blobSequenceNumber: obj.blobSequenceNumber,
|
||||
blobAccessTierChangeTime: obj.blobAccessTierChangeTime,
|
||||
blobUncommitted: obj.blobUncommitted,
|
||||
};
|
||||
}
|
||||
|
||||
getContainerPublicAccess() {
|
||||
return this._data.containerPublicAccess;
|
||||
}
|
||||
|
||||
setContainerPublicAccess(containerPublicAccess) {
|
||||
this._data.containerPublicAccess = containerPublicAccess;
|
||||
return this;
|
||||
}
|
||||
|
||||
getContainerStoredAccessPolicies() {
|
||||
return this._data.containerStoredAccessPolicies;
|
||||
}
|
||||
|
||||
setContainerStoredAccessPolicies(containerStoredAccessPolicies) {
|
||||
this._data.containerStoredAccessPolicies =
|
||||
containerStoredAccessPolicies;
|
||||
return this;
|
||||
}
|
||||
|
||||
getContainerImmutabilityPolicy() {
|
||||
return this._data.containerImmutabilityPolicy;
|
||||
}
|
||||
|
||||
setContainerImmutabilityPolicy(containerImmutabilityPolicy) {
|
||||
this._data.containerImmutabilityPolicy = containerImmutabilityPolicy;
|
||||
return this;
|
||||
}
|
||||
|
||||
getContainerLegalHoldStatus() {
|
||||
return this._data.containerLegalHoldStatus;
|
||||
}
|
||||
|
||||
setContainerLegalHoldStatus(containerLegalHoldStatus) {
|
||||
this._data.containerLegalHoldStatus = containerLegalHoldStatus;
|
||||
return this;
|
||||
}
|
||||
|
||||
getContainerDeletionInProgress() {
|
||||
return this._data.containerDeletionInProgress;
|
||||
}
|
||||
|
||||
setContainerDeletionInProgress(containerDeletionInProgress) {
|
||||
this._data.containerDeletionInProgress = containerDeletionInProgress;
|
||||
return this;
|
||||
}
|
||||
|
||||
getBlobType() {
|
||||
return this._data.blobType;
|
||||
}
|
||||
|
||||
setBlobType(blobType) {
|
||||
this._data.blobType = blobType;
|
||||
return this;
|
||||
}
|
||||
|
||||
getBlobContentMD5() {
|
||||
return this._data.blobContentMD5;
|
||||
}
|
||||
|
||||
setBlobContentMD5(blobContentMD5) {
|
||||
this._data.blobContentMD5 = blobContentMD5;
|
||||
return this;
|
||||
}
|
||||
|
||||
getBlobIssuedETag() {
|
||||
return this._data.blobIssuedETag;
|
||||
}
|
||||
|
||||
setBlobIssuedETag(blobIssuedETag) {
|
||||
this._data.blobIssuedETag = blobIssuedETag;
|
||||
return this;
|
||||
}
|
||||
|
||||
getBlobCopyInfo() {
|
||||
return this._data.blobCopyInfo;
|
||||
}
|
||||
|
||||
setBlobCopyInfo(blobCopyInfo) {
|
||||
this._data.blobCopyInfo = blobCopyInfo;
|
||||
return this;
|
||||
}
|
||||
|
||||
getBlobSequenceNumber() {
|
||||
return this._data.blobSequenceNumber;
|
||||
}
|
||||
|
||||
setBlobSequenceNumber(blobSequenceNumber) {
|
||||
this._data.blobSequenceNumber = blobSequenceNumber;
|
||||
return this;
|
||||
}
|
||||
|
||||
getBlobAccessTierChangeTime() {
|
||||
return this._data.blobAccessTierChangeTime;
|
||||
}
|
||||
|
||||
setBlobAccessTierChangeTime(blobAccessTierChangeTime) {
|
||||
this._data.blobAccessTierChangeTime = blobAccessTierChangeTime;
|
||||
return this;
|
||||
}
|
||||
|
||||
getBlobUncommitted() {
|
||||
return this._data.blobUncommitted;
|
||||
}
|
||||
|
||||
setBlobUncommitted(blobUncommitted) {
|
||||
this._data.blobUncommitted = blobUncommitted;
|
||||
return this;
|
||||
}
|
||||
|
||||
getValue() {
|
||||
return this._data;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = ObjectMDAzureInfo;
|
|
@ -3,7 +3,6 @@
|
|||
* 'location' array
|
||||
*/
|
||||
class ObjectMDLocation {
|
||||
|
||||
/**
|
||||
* @constructor
|
||||
* @param {object} locationObj - single data location info
|
||||
|
@ -14,10 +13,14 @@ class ObjectMDLocation {
|
|||
* @param {string} locationObj.dataStoreName - type of data store
|
||||
* @param {string} locationObj.dataStoreETag - internal ETag of
|
||||
* data part
|
||||
* @param {string} [locationObj.dataStoreVersionId] - versionId,
|
||||
* needed for cloud backends
|
||||
* @param {number} [location.cryptoScheme] - if location data is
|
||||
* encrypted: the encryption scheme version
|
||||
* @param {string} [location.cipheredDataKey] - if location data
|
||||
* is encrypted: the base64-encoded ciphered data key
|
||||
* @param {string} [locationObj.blockId] - blockId of the part,
|
||||
* set by the Azure Blob Service REST API frontend
|
||||
*/
|
||||
constructor(locationObj) {
|
||||
this._data = {
|
||||
|
@ -26,6 +29,8 @@ class ObjectMDLocation {
|
|||
size: locationObj.size,
|
||||
dataStoreName: locationObj.dataStoreName,
|
||||
dataStoreETag: locationObj.dataStoreETag,
|
||||
dataStoreVersionId: locationObj.dataStoreVersionId,
|
||||
blockId: locationObj.blockId,
|
||||
};
|
||||
if (locationObj.cryptoScheme) {
|
||||
this._data.cryptoScheme = locationObj.cryptoScheme;
|
||||
|
@ -47,6 +52,7 @@ class ObjectMDLocation {
|
|||
* @param {object} location - single data location info
|
||||
* @param {string} location.key - data backend key
|
||||
* @param {string} location.dataStoreName - type of data store
|
||||
* @param {string} [location.dataStoreVersionId] - data backend version ID
|
||||
* @param {number} [location.cryptoScheme] - if location data is
|
||||
* encrypted: the encryption scheme version
|
||||
* @param {string} [location.cipheredDataKey] - if location data
|
||||
|
@ -57,6 +63,7 @@ class ObjectMDLocation {
|
|||
[
|
||||
'key',
|
||||
'dataStoreName',
|
||||
'dataStoreVersionId',
|
||||
'cryptoScheme',
|
||||
'cipheredDataKey',
|
||||
].forEach(attrName => {
|
||||
|
@ -73,6 +80,10 @@ class ObjectMDLocation {
|
|||
return this._data.dataStoreETag;
|
||||
}
|
||||
|
||||
getDataStoreVersionId() {
|
||||
return this._data.dataStoreVersionId;
|
||||
}
|
||||
|
||||
getPartNumber() {
|
||||
return Number.parseInt(this._data.dataStoreETag.split(':')[0], 10);
|
||||
}
|
||||
|
@ -107,6 +118,15 @@ class ObjectMDLocation {
|
|||
return this._data.cipheredDataKey;
|
||||
}
|
||||
|
||||
getBlockId() {
|
||||
return this._data.blockId;
|
||||
}
|
||||
|
||||
setBlockId(blockId) {
|
||||
this._data.blockId = blockId;
|
||||
return this;
|
||||
}
|
||||
|
||||
getValue() {
|
||||
return this._data;
|
||||
}
|
||||
|
|
|
@ -59,6 +59,7 @@ class ReplicationConfiguration {
|
|||
this._rules = null;
|
||||
this._prevStorageClass = null;
|
||||
this._hasScalityDestination = null;
|
||||
this._preferredReadLocation = null;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -85,6 +86,18 @@ class ReplicationConfiguration {
|
|||
return this._rules;
|
||||
}
|
||||
|
||||
/**
|
||||
* The preferred read location
|
||||
* @return {string|null} - The preferred read location if defined,
|
||||
* otherwise null
|
||||
*
|
||||
* FIXME ideally we should be able to specify one preferred read
|
||||
* location for each rule
|
||||
*/
|
||||
getPreferredReadLocation() {
|
||||
return this._preferredReadLocation;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the replication configuration
|
||||
* @return {object} - The replication configuration
|
||||
|
@ -94,6 +107,7 @@ class ReplicationConfiguration {
|
|||
role: this.getRole(),
|
||||
destination: this.getDestination(),
|
||||
rules: this.getRules(),
|
||||
preferredReadLocation: this.getPreferredReadLocation(),
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -292,6 +306,14 @@ class ReplicationConfiguration {
|
|||
return undefined;
|
||||
}
|
||||
const storageClasses = destination.StorageClass[0].split(',');
|
||||
const prefReadIndex = storageClasses.findIndex(storageClass =>
|
||||
storageClass.endsWith(':preferred_read'));
|
||||
if (prefReadIndex !== -1) {
|
||||
const prefRead = storageClasses[prefReadIndex].split(':')[0];
|
||||
// remove :preferred_read tag from storage class name
|
||||
storageClasses[prefReadIndex] = prefRead;
|
||||
this._preferredReadLocation = prefRead;
|
||||
}
|
||||
const isValidStorageClass = storageClasses.every(storageClass => {
|
||||
if (validStorageClasses.includes(storageClass)) {
|
||||
this._hasScalityDestination =
|
||||
|
|
|
@ -10,7 +10,6 @@ const { checkSupportIPv6 } = require('./utils');
|
|||
|
||||
|
||||
class Server {
|
||||
|
||||
/**
|
||||
* @constructor
|
||||
*
|
||||
|
@ -369,6 +368,8 @@ class Server {
|
|||
error: err.stack || err,
|
||||
address: sock.address(),
|
||||
});
|
||||
// socket is not systematically destroyed
|
||||
sock.destroy();
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -342,8 +342,6 @@ class KMIP {
|
|||
return cb(null, response);
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -0,0 +1,76 @@
|
|||
const httpServer = require('../http/server');
|
||||
const werelogs = require('werelogs');
|
||||
const errors = require('../../errors');
|
||||
const ZenkoMetrics = require('../../metrics/ZenkoMetrics');
|
||||
const { sendSuccess, sendError } = require('./Utils');
|
||||
|
||||
function checkStub(log) { // eslint-disable-line
|
||||
return true;
|
||||
}
|
||||
|
||||
class HealthProbeServer extends httpServer {
|
||||
constructor(params) {
|
||||
const logging = new werelogs.Logger('HealthProbeServer');
|
||||
super(params.port, logging);
|
||||
this.logging = logging;
|
||||
this.setBindAddress(params.bindAddress || 'localhost');
|
||||
// hooking our request processing function by calling the
|
||||
// parent's method for that
|
||||
this.onRequest(this._onRequest);
|
||||
this._reqHandlers = {
|
||||
'/_/health/liveness': this._onLiveness.bind(this),
|
||||
'/_/health/readiness': this._onReadiness.bind(this),
|
||||
'/_/monitoring/metrics': this._onMetrics.bind(this),
|
||||
};
|
||||
this._livenessCheck = params.livenessCheck || checkStub;
|
||||
this._readinessCheck = params.readinessCheck || checkStub;
|
||||
}
|
||||
|
||||
onLiveCheck(f) {
|
||||
this._livenessCheck = f;
|
||||
}
|
||||
|
||||
onReadyCheck(f) {
|
||||
this._readinessCheck = f;
|
||||
}
|
||||
|
||||
_onRequest(req, res) {
|
||||
const log = this.logging.newRequestLogger();
|
||||
log.debug('request received', { method: req.method,
|
||||
url: req.url });
|
||||
|
||||
if (req.method !== 'GET') {
|
||||
sendError(res, log, errors.MethodNotAllowed);
|
||||
} else if (req.url in this._reqHandlers) {
|
||||
this._reqHandlers[req.url](req, res, log);
|
||||
} else {
|
||||
sendError(res, log, errors.InvalidURI);
|
||||
}
|
||||
}
|
||||
|
||||
_onLiveness(req, res, log) {
|
||||
if (this._livenessCheck(log)) {
|
||||
sendSuccess(res, log);
|
||||
} else {
|
||||
sendError(res, log, errors.ServiceUnavailable);
|
||||
}
|
||||
}
|
||||
|
||||
_onReadiness(req, res, log) {
|
||||
if (this._readinessCheck(log)) {
|
||||
sendSuccess(res, log);
|
||||
} else {
|
||||
sendError(res, log, errors.ServiceUnavailable);
|
||||
}
|
||||
}
|
||||
|
||||
// expose metrics to Prometheus
|
||||
_onMetrics(req, res) {
|
||||
res.writeHead(200, {
|
||||
'Content-Type': ZenkoMetrics.asPrometheusContentType(),
|
||||
});
|
||||
res.end(ZenkoMetrics.asPrometheus());
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = HealthProbeServer;
|
|
@ -3,19 +3,17 @@ const werelogs = require('werelogs');
|
|||
const errors = require('../../errors');
|
||||
|
||||
const DEFAULT_LIVE_ROUTE = '/_/live';
|
||||
const DEFAULT_READY_ROUTE = '/_/live';
|
||||
const DEFAULT_METRICS_ROUTE = '/_/metrics';
|
||||
const DEFAULT_READY_ROUTE = '/_/ready';
|
||||
const DEFAULT_METRICS_ROUTE = '/metrics';
|
||||
|
||||
/**
|
||||
* ProbeDelegate is used to determine if a probe is successful or
|
||||
* if any errors are present.
|
||||
* If everything is working as intended, it is a no-op.
|
||||
* Otherwise, return a string representing what is failing.
|
||||
* ProbeDelegate is used to handle probe checks.
|
||||
* You can sendSuccess and sendError from Utils to handle success
|
||||
* and failure conditions.
|
||||
* @callback ProbeDelegate
|
||||
* @param { import('http').ServerResponse } res - HTTP response for writing
|
||||
* @param {werelogs.Logger} log - Werelogs instance for logging if you choose to
|
||||
* @return {(string|undefined)} String representing issues to report. An empty
|
||||
* string or undefined is used to represent no issues.
|
||||
* @return {undefined}
|
||||
*/
|
||||
|
||||
/**
|
||||
|
@ -91,13 +89,7 @@ class ProbeServer extends httpServer {
|
|||
return;
|
||||
}
|
||||
|
||||
const probeResponse = this._handlers.get(req.url)(res, log);
|
||||
if (probeResponse !== undefined && probeResponse !== '') {
|
||||
// Return an internal error with the response
|
||||
errors.InternalError
|
||||
.customizeDescription(probeResponse)
|
||||
.writeResponse(res);
|
||||
}
|
||||
this._handlers.get(req.url)(res, log);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,41 @@
|
|||
/**
|
||||
* Send a successful HTTP response of 200 OK
|
||||
* @param {http.ServerResponse} res - HTTP response for writing
|
||||
* @param {werelogs.Logger} log - Werelogs instance for logging if you choose to
|
||||
* @param {string} [message] - Message to send as response, defaults to OK
|
||||
* @returns {undefined}
|
||||
*/
|
||||
function sendSuccess(res, log, message = 'OK') {
|
||||
log.debug('replying with success');
|
||||
res.writeHead(200);
|
||||
res.end(message);
|
||||
}
|
||||
|
||||
/**
|
||||
* Send an Arsenal Error response
|
||||
* @param {http.ServerResponse} res - HTTP response for writing
|
||||
* @param {werelogs.Logger} log - Werelogs instance for logging if you choose to
|
||||
* @param {ArsenalError} error - Error to send back to the user
|
||||
* @param {string} [optMessage] - Message to use instead of the errors message
|
||||
* @returns {undefined}
|
||||
*/
|
||||
function sendError(res, log, error, optMessage) {
|
||||
const message = optMessage || error.description || '';
|
||||
log.debug('sending back error response',
|
||||
{
|
||||
httpCode: error.code,
|
||||
errorType: error.message,
|
||||
error: message,
|
||||
},
|
||||
);
|
||||
res.writeHead(error.code);
|
||||
res.end(JSON.stringify({
|
||||
errorType: error.message,
|
||||
errorMessage: message,
|
||||
}));
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
sendSuccess,
|
||||
sendError,
|
||||
};
|
|
@ -81,6 +81,7 @@ class RESTClient {
|
|||
|
||||
this.host = params.host;
|
||||
this.port = params.port;
|
||||
this.isPassthrough = params.isPassthrough || false;
|
||||
this.setupLogging(params.logApi);
|
||||
this.httpAgent = new HttpAgent({
|
||||
keepAlive: true,
|
||||
|
@ -119,11 +120,13 @@ class RESTClient {
|
|||
doRequest(method, headers, key, log, responseCb) {
|
||||
const reqHeaders = headers || {};
|
||||
const urlKey = key || '';
|
||||
const prefix = this.isPassthrough ?
|
||||
constants.passthroughFileURL : constants.dataFileURL;
|
||||
const reqParams = {
|
||||
hostname: this.host,
|
||||
port: this.port,
|
||||
method,
|
||||
path: `${constants.dataFileURL}/${urlKey}`,
|
||||
path: encodeURI(`${prefix}/${urlKey}`),
|
||||
headers: reqHeaders,
|
||||
agent: this.httpAgent,
|
||||
};
|
||||
|
|
|
@ -7,7 +7,7 @@ const werelogs = require('werelogs');
|
|||
|
||||
const httpServer = require('../http/server');
|
||||
const constants = require('../../constants');
|
||||
const utils = require('./utils');
|
||||
const { parseURL } = require('./utils');
|
||||
const httpUtils = require('../http/utils');
|
||||
const errors = require('../../errors');
|
||||
|
||||
|
@ -37,42 +37,6 @@ function sendError(res, log, error, optMessage) {
|
|||
errorMessage: message })}\n`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse the given url and return a pathInfo object. Sanity checks are
|
||||
* performed.
|
||||
*
|
||||
* @param {String} urlStr - URL to parse
|
||||
* @param {Boolean} expectKey - whether the command expects to see a
|
||||
* key in the URL
|
||||
* @return {Object} a pathInfo object with URL items containing the
|
||||
* following attributes:
|
||||
* - pathInfo.service {String} - The name of REST service ("DataFile")
|
||||
* - pathInfo.key {String} - The requested key
|
||||
*/
|
||||
function parseURL(urlStr, expectKey) {
|
||||
const urlObj = url.parse(urlStr);
|
||||
const pathInfo = utils.explodePath(urlObj.path);
|
||||
if (pathInfo.service !== constants.dataFileURL) {
|
||||
throw errors.InvalidAction.customizeDescription(
|
||||
`unsupported service '${pathInfo.service}'`);
|
||||
}
|
||||
if (expectKey && pathInfo.key === undefined) {
|
||||
throw errors.MissingParameter.customizeDescription(
|
||||
'URL is missing key');
|
||||
}
|
||||
if (!expectKey && pathInfo.key !== undefined) {
|
||||
// note: we may implement rewrite functionality by allowing a
|
||||
// key in the URL, though we may still provide the new key in
|
||||
// the Location header to keep immutability property and
|
||||
// atomicity of the update (we would just remove the old
|
||||
// object when the new one has been written entirely in this
|
||||
// case, saving a request over an equivalent PUT + DELETE).
|
||||
throw errors.InvalidURI.customizeDescription(
|
||||
'PUT url cannot contain a key');
|
||||
}
|
||||
return pathInfo;
|
||||
}
|
||||
|
||||
/**
|
||||
* @class
|
||||
* @classdesc REST Server interface
|
||||
|
@ -81,7 +45,6 @@ function parseURL(urlStr, expectKey) {
|
|||
* start() to start listening to the configured port.
|
||||
*/
|
||||
class RESTServer extends httpServer {
|
||||
|
||||
/**
|
||||
* @constructor
|
||||
* @param {Object} params - constructor params
|
||||
|
|
|
@ -1,8 +1,19 @@
|
|||
'use strict'; // eslint-disable-line
|
||||
|
||||
const errors = require('../../errors');
|
||||
const constants = require('../../constants');
|
||||
const url = require('url');
|
||||
|
||||
module.exports.explodePath = function explodePath(path) {
|
||||
const passthroughPrefixLength = constants.passthroughFileURL.length;
|
||||
|
||||
function explodePath(path) {
|
||||
if (path.startsWith(constants.passthroughFileURL)) {
|
||||
const key = path.slice(passthroughPrefixLength + 1);
|
||||
return {
|
||||
service: constants.passthroughFileURL,
|
||||
key: key.length > 0 ? key : undefined,
|
||||
};
|
||||
}
|
||||
const pathMatch = /^(\/[a-zA-Z0-9]+)(\/([0-9a-f]*))?$/.exec(path);
|
||||
if (pathMatch) {
|
||||
return {
|
||||
|
@ -12,4 +23,46 @@ module.exports.explodePath = function explodePath(path) {
|
|||
};
|
||||
}
|
||||
throw errors.InvalidURI.customizeDescription('malformed URI');
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse the given url and return a pathInfo object. Sanity checks are
|
||||
* performed.
|
||||
*
|
||||
* @param {String} urlStr - URL to parse
|
||||
* @param {Boolean} expectKey - whether the command expects to see a
|
||||
* key in the URL
|
||||
* @return {Object} a pathInfo object with URL items containing the
|
||||
* following attributes:
|
||||
* - pathInfo.service {String} - The name of REST service ("DataFile")
|
||||
* - pathInfo.key {String} - The requested key
|
||||
*/
|
||||
function parseURL(urlStr, expectKey) {
|
||||
const urlObj = url.parse(urlStr);
|
||||
const pathInfo = explodePath(decodeURI(urlObj.path));
|
||||
if ((pathInfo.service !== constants.dataFileURL)
|
||||
&& (pathInfo.service !== constants.passthroughFileURL)) {
|
||||
throw errors.InvalidAction.customizeDescription(
|
||||
`unsupported service '${pathInfo.service}'`);
|
||||
}
|
||||
if (expectKey && pathInfo.key === undefined) {
|
||||
throw errors.MissingParameter.customizeDescription(
|
||||
'URL is missing key');
|
||||
}
|
||||
if (!expectKey && pathInfo.key !== undefined) {
|
||||
// note: we may implement rewrite functionality by allowing a
|
||||
// key in the URL, though we may still provide the new key in
|
||||
// the Location header to keep immutability property and
|
||||
// atomicity of the update (we would just remove the old
|
||||
// object when the new one has been written entirely in this
|
||||
// case, saving a request over an equivalent PUT + DELETE).
|
||||
throw errors.InvalidURI.customizeDescription(
|
||||
'PUT url cannot contain a key');
|
||||
}
|
||||
return pathInfo;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
explodePath,
|
||||
parseURL,
|
||||
};
|
||||
|
|
|
@ -17,7 +17,6 @@ const rpc = require('./rpc.js');
|
|||
* RPC client object accessing the sub-level transparently.
|
||||
*/
|
||||
class LevelDbClient extends rpc.BaseClient {
|
||||
|
||||
/**
|
||||
* @constructor
|
||||
*
|
||||
|
@ -78,7 +77,6 @@ class LevelDbClient extends rpc.BaseClient {
|
|||
* env.subDb (env is passed as first parameter of received RPC calls).
|
||||
*/
|
||||
class LevelDbService extends rpc.BaseService {
|
||||
|
||||
/**
|
||||
* @constructor
|
||||
*
|
||||
|
|
|
@ -37,7 +37,6 @@ let streamRPCJSONObj;
|
|||
* an error occurred).
|
||||
*/
|
||||
class BaseClient extends EventEmitter {
|
||||
|
||||
/**
|
||||
* @constructor
|
||||
*
|
||||
|
@ -251,7 +250,6 @@ class BaseClient extends EventEmitter {
|
|||
*
|
||||
*/
|
||||
class BaseService {
|
||||
|
||||
/**
|
||||
* @constructor
|
||||
*
|
||||
|
|
|
@ -0,0 +1,159 @@
|
|||
'use strict'; // eslint-disable-line strict
|
||||
|
||||
const { URL } = require('url');
|
||||
const { decryptSecret } = require('../executables/pensieveCreds/utils');
|
||||
|
||||
function patchLocations(overlayLocations, creds, log) {
|
||||
if (!overlayLocations) {
|
||||
return {};
|
||||
}
|
||||
|
||||
const locations = {};
|
||||
Object.keys(overlayLocations).forEach(k => {
|
||||
const l = overlayLocations[k];
|
||||
const location = {
|
||||
name: k,
|
||||
objectId: l.objectId,
|
||||
details: l.details || {},
|
||||
locationType: l.locationType,
|
||||
};
|
||||
let supportsVersioning = false;
|
||||
let pathStyle = process.env.CI_CEPH !== undefined;
|
||||
|
||||
switch (l.locationType) {
|
||||
case 'location-mem-v1':
|
||||
location.type = 'mem';
|
||||
location.details = { supportsVersioning: true };
|
||||
break;
|
||||
case 'location-file-v1':
|
||||
location.type = 'file';
|
||||
location.details = { supportsVersioning: true };
|
||||
break;
|
||||
case 'location-azure-v1':
|
||||
location.type = 'azure';
|
||||
if (l.details.secretKey && l.details.secretKey.length > 0) {
|
||||
location.details = {
|
||||
bucketMatch: l.details.bucketMatch,
|
||||
azureStorageEndpoint: l.details.endpoint,
|
||||
azureStorageAccountName: l.details.accessKey,
|
||||
azureStorageAccessKey: decryptSecret(creds,
|
||||
l.details.secretKey),
|
||||
azureContainerName: l.details.bucketName,
|
||||
};
|
||||
}
|
||||
break;
|
||||
case 'location-ceph-radosgw-s3-v1':
|
||||
case 'location-scality-ring-s3-v1':
|
||||
pathStyle = true; // fallthrough
|
||||
case 'location-aws-s3-v1':
|
||||
case 'location-wasabi-v1':
|
||||
supportsVersioning = true; // fallthrough
|
||||
case 'location-do-spaces-v1':
|
||||
location.type = 'aws_s3';
|
||||
if (l.details.secretKey && l.details.secretKey.length > 0) {
|
||||
let https = true;
|
||||
let awsEndpoint = l.details.endpoint ||
|
||||
's3.amazonaws.com';
|
||||
if (awsEndpoint.includes('://')) {
|
||||
const url = new URL(awsEndpoint);
|
||||
awsEndpoint = url.host;
|
||||
https = url.protocol.includes('https');
|
||||
}
|
||||
|
||||
location.details = {
|
||||
credentials: {
|
||||
accessKey: l.details.accessKey,
|
||||
secretKey: decryptSecret(creds,
|
||||
l.details.secretKey),
|
||||
},
|
||||
bucketName: l.details.bucketName,
|
||||
bucketMatch: l.details.bucketMatch,
|
||||
serverSideEncryption:
|
||||
Boolean(l.details.serverSideEncryption),
|
||||
region: l.details.region,
|
||||
awsEndpoint,
|
||||
supportsVersioning,
|
||||
pathStyle,
|
||||
https,
|
||||
};
|
||||
}
|
||||
break;
|
||||
case 'location-gcp-v1':
|
||||
location.type = 'gcp';
|
||||
if (l.details.secretKey && l.details.secretKey.length > 0) {
|
||||
location.details = {
|
||||
credentials: {
|
||||
accessKey: l.details.accessKey,
|
||||
secretKey: decryptSecret(creds,
|
||||
l.details.secretKey),
|
||||
},
|
||||
bucketName: l.details.bucketName,
|
||||
mpuBucketName: l.details.mpuBucketName,
|
||||
bucketMatch: l.details.bucketMatch,
|
||||
gcpEndpoint: l.details.endpoint ||
|
||||
'storage.googleapis.com',
|
||||
https: true,
|
||||
};
|
||||
}
|
||||
break;
|
||||
case 'location-scality-sproxyd-v1':
|
||||
location.type = 'scality';
|
||||
if (l.details && l.details.bootstrapList &&
|
||||
l.details.proxyPath) {
|
||||
location.details = {
|
||||
supportsVersioning: true,
|
||||
connector: {
|
||||
sproxyd: {
|
||||
chordCos: l.details.chordCos || null,
|
||||
bootstrap: l.details.bootstrapList,
|
||||
path: l.details.proxyPath,
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
break;
|
||||
case 'location-nfs-mount-v1':
|
||||
location.type = 'pfs';
|
||||
if (l.details) {
|
||||
location.details = {
|
||||
supportsVersioning: true,
|
||||
bucketMatch: true,
|
||||
pfsDaemonEndpoint: {
|
||||
host: `${l.name}-cosmos-pfsd`,
|
||||
port: 80,
|
||||
},
|
||||
};
|
||||
}
|
||||
break;
|
||||
case 'location-scality-hdclient-v2':
|
||||
location.type = 'scality';
|
||||
if (l.details && l.details.bootstrapList) {
|
||||
location.details = {
|
||||
supportsVersioning: true,
|
||||
connector: {
|
||||
hdclient: {
|
||||
bootstrap: l.details.bootstrapList,
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
break;
|
||||
default:
|
||||
log.info(
|
||||
'unknown location type',
|
||||
{ locationType: l.locationType },
|
||||
);
|
||||
return;
|
||||
}
|
||||
location.sizeLimitGB = l.sizeLimitGB || null;
|
||||
location.isTransient = Boolean(l.isTransient);
|
||||
location.legacyAwsBehavior = Boolean(l.legacyAwsBehavior);
|
||||
locations[location.name] = location;
|
||||
return;
|
||||
});
|
||||
return locations;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
patchLocations,
|
||||
};
|
|
@ -38,6 +38,10 @@
|
|||
"type": "string",
|
||||
"pattern": "^arn:aws:iam::[0-9]{12}:saml-provider/[\\w._-]{1,128}$"
|
||||
},
|
||||
"principalFederatedOidcIdp": {
|
||||
"type": "string",
|
||||
"pattern": "^(?:http(s)?:\/\/)?[\\w.-]+(?:\\.[\\w\\.-]+)+[\\w\\-\\._~:/?#[\\]@!\\$&'\\(\\)\\*\\+,;=.]+$"
|
||||
},
|
||||
"principalAWSItem": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
|
@ -98,6 +102,9 @@
|
|||
"oneOf": [
|
||||
{
|
||||
"$ref": "#/definitions/principalFederatedSamlIdp"
|
||||
},
|
||||
{
|
||||
"$ref": "#/definitions/principalFederatedOidcIdp"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
|
@ -30,6 +30,7 @@ const sharedActionMap = {
|
|||
bypassGovernanceRetention: 's3:BypassGovernanceRetention',
|
||||
listMultipartUploads: 's3:ListBucketMultipartUploads',
|
||||
listParts: 's3:ListMultipartUploadParts',
|
||||
metadataSearch: 's3:MetadataSearch',
|
||||
multipartDelete: 's3:AbortMultipartUpload',
|
||||
objectDelete: 's3:DeleteObject',
|
||||
objectDeleteTagging: 's3:DeleteObjectTagging',
|
||||
|
@ -116,6 +117,7 @@ const actionMonitoringMapS3 = {
|
|||
initiateMultipartUpload: 'CreateMultipartUpload',
|
||||
listMultipartUploads: 'ListMultipartUploads',
|
||||
listParts: 'ListParts',
|
||||
metadataSearch: 'MetadataSearch',
|
||||
multiObjectDelete: 'DeleteObjects',
|
||||
multipartDelete: 'AbortMultipartUpload',
|
||||
objectCopy: 'CopyObject',
|
||||
|
@ -159,6 +161,7 @@ const actionMapIAM = {
|
|||
getPolicyVersion: 'iam:GetPolicyVersion',
|
||||
getUser: 'iam:GetUser',
|
||||
listAccessKeys: 'iam:ListAccessKeys',
|
||||
listEntitiesForPolicy: 'iam:ListEntitiesForPolicy',
|
||||
listGroupPolicies: 'iam:ListGroupPolicies',
|
||||
listGroups: 'iam:ListGroups',
|
||||
listGroupsForUser: 'iam:ListGroupsForUser',
|
||||
|
|
|
@ -146,6 +146,8 @@ conditions.findConditionKey = (key, requestContext) => {
|
|||
map.set('s3:ObjLocationConstraint',
|
||||
headers['x-amz-meta-scal-location-constraint']);
|
||||
map.set('sts:ExternalId', requestContext.getRequesterExternalId());
|
||||
map.set('keycloak:groups', requesterInfo.keycloakGroup);
|
||||
map.set('keycloak:roles', requesterInfo.keycloakRole);
|
||||
map.set('iam:PolicyArn', requestContext.getPolicyArn());
|
||||
// s3:ExistingObjectTag - Used to check that existing object tag has
|
||||
// specific tag key and value. Extraction of correct tag key is done in CloudServer.
|
||||
|
|
|
@ -6,7 +6,6 @@ const crypto = require('crypto');
|
|||
* data through a stream
|
||||
*/
|
||||
class MD5Sum extends Transform {
|
||||
|
||||
/**
|
||||
* @constructor
|
||||
*/
|
||||
|
@ -40,7 +39,6 @@ class MD5Sum extends Transform {
|
|||
this.emit('hashed');
|
||||
callback(null);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
module.exports = MD5Sum;
|
||||
|
|
|
@ -121,7 +121,7 @@ log, cb) => {
|
|||
return cb(errors.BadDigest);
|
||||
}
|
||||
return cb(errors.InternalError.customizeDescription(
|
||||
`Error returned from Azure: ${err.message}`)
|
||||
`Error returned from Azure: ${err.message}`),
|
||||
);
|
||||
}
|
||||
const md5 = result.headers['content-md5'] || '';
|
||||
|
|
|
@ -33,7 +33,7 @@ convertMethods.listMultipartUploads = xmlParams => {
|
|||
xml.push('<?xml version="1.0" encoding="UTF-8"?>',
|
||||
'<ListMultipartUploadsResult ' +
|
||||
'xmlns="http://s3.amazonaws.com/doc/2006-03-01/">',
|
||||
`<Bucket>${escapeForXml(xmlParams.bucketName)}</Bucket>`
|
||||
`<Bucket>${escapeForXml(xmlParams.bucketName)}</Bucket>`,
|
||||
);
|
||||
|
||||
// For certain XML elements, if it is `undefined`, AWS returns either an
|
||||
|
@ -58,7 +58,7 @@ convertMethods.listMultipartUploads = xmlParams => {
|
|||
});
|
||||
|
||||
xml.push(`<MaxUploads>${escapeForXml(l.MaxKeys)}</MaxUploads>`,
|
||||
`<IsTruncated>${escapeForXml(l.IsTruncated)}</IsTruncated>`
|
||||
`<IsTruncated>${escapeForXml(l.IsTruncated)}</IsTruncated>`,
|
||||
);
|
||||
|
||||
l.Uploads.forEach(upload => {
|
||||
|
@ -84,14 +84,14 @@ convertMethods.listMultipartUploads = xmlParams => {
|
|||
`<StorageClass>${escapeForXml(val.StorageClass)}` +
|
||||
'</StorageClass>',
|
||||
`<Initiated>${escapeForXml(val.Initiated)}</Initiated>`,
|
||||
'</Upload>'
|
||||
'</Upload>',
|
||||
);
|
||||
});
|
||||
|
||||
l.CommonPrefixes.forEach(prefix => {
|
||||
xml.push('<CommonPrefixes>',
|
||||
`<Prefix>${escapeForXml(prefix)}</Prefix>`,
|
||||
'</CommonPrefixes>'
|
||||
'</CommonPrefixes>',
|
||||
);
|
||||
});
|
||||
|
||||
|
|
|
@ -5,7 +5,6 @@ const Readable = require('stream').Readable;
|
|||
* This class is used to produce zeros filled buffers for a reader consumption
|
||||
*/
|
||||
class NullStream extends Readable {
|
||||
|
||||
/**
|
||||
* Construct a new zeros filled buffers producer that will
|
||||
* produce as much bytes as specified by the range parameter, or the size
|
||||
|
|
|
@ -68,6 +68,31 @@ function _checkUnmodifiedSince(ifUnmodifiedSinceTime, lastModified) {
|
|||
return res;
|
||||
}
|
||||
|
||||
/**
|
||||
* checks 'if-modified-since' and 'if-unmodified-since' headers if included in
|
||||
* request against last-modified date of object
|
||||
* @param {object} headers - headers from request object
|
||||
* @param {string} lastModified - last modified date of object
|
||||
* @return {object} contains modifiedSince and unmodifiedSince res objects
|
||||
*/
|
||||
function checkDateModifiedHeaders(headers, lastModified) {
|
||||
let lastModifiedDate = new Date(lastModified);
|
||||
lastModifiedDate.setMilliseconds(0);
|
||||
lastModifiedDate = lastModifiedDate.getTime();
|
||||
|
||||
const ifModifiedSinceHeader = headers['if-modified-since'] ||
|
||||
headers['x-amz-copy-source-if-modified-since'];
|
||||
const ifUnmodifiedSinceHeader = headers['if-unmodified-since'] ||
|
||||
headers['x-amz-copy-source-if-unmodified-since'];
|
||||
|
||||
const modifiedSinceRes = _checkModifiedSince(ifModifiedSinceHeader,
|
||||
lastModifiedDate);
|
||||
const unmodifiedSinceRes = _checkUnmodifiedSince(ifUnmodifiedSinceHeader,
|
||||
lastModifiedDate);
|
||||
|
||||
return { modifiedSinceRes, unmodifiedSinceRes };
|
||||
}
|
||||
|
||||
/**
|
||||
* validateConditionalHeaders - validates 'if-modified-since',
|
||||
* 'if-unmodified-since', 'if-match' or 'if-none-match' headers if included in
|
||||
|
@ -79,23 +104,14 @@ function _checkUnmodifiedSince(ifUnmodifiedSinceTime, lastModified) {
|
|||
* empty object if no error
|
||||
*/
|
||||
function validateConditionalHeaders(headers, lastModified, contentMD5) {
|
||||
let lastModifiedDate = new Date(lastModified);
|
||||
lastModifiedDate.setMilliseconds(0);
|
||||
lastModifiedDate = lastModifiedDate.getTime();
|
||||
const ifMatchHeader = headers['if-match'] ||
|
||||
headers['x-amz-copy-source-if-match'];
|
||||
const ifNoneMatchHeader = headers['if-none-match'] ||
|
||||
headers['x-amz-copy-source-if-none-match'];
|
||||
const ifModifiedSinceHeader = headers['if-modified-since'] ||
|
||||
headers['x-amz-copy-source-if-modified-since'];
|
||||
const ifUnmodifiedSinceHeader = headers['if-unmodified-since'] ||
|
||||
headers['x-amz-copy-source-if-unmodified-since'];
|
||||
const etagMatchRes = _checkEtagMatch(ifMatchHeader, contentMD5);
|
||||
const etagNoneMatchRes = _checkEtagNoneMatch(ifNoneMatchHeader, contentMD5);
|
||||
const modifiedSinceRes = _checkModifiedSince(ifModifiedSinceHeader,
|
||||
lastModifiedDate);
|
||||
const unmodifiedSinceRes = _checkUnmodifiedSince(ifUnmodifiedSinceHeader,
|
||||
lastModifiedDate);
|
||||
const { modifiedSinceRes, unmodifiedSinceRes } =
|
||||
checkDateModifiedHeaders(headers, lastModified);
|
||||
// If-Unmodified-Since condition evaluates to false and If-Match
|
||||
// is not present, then return the error. Otherwise, If-Unmodified-Since is
|
||||
// silent when If-Match match, and when If-Match does not match, it's the
|
||||
|
@ -120,5 +136,6 @@ module.exports = {
|
|||
_checkEtagNoneMatch,
|
||||
_checkModifiedSince,
|
||||
_checkUnmodifiedSince,
|
||||
checkDateModifiedHeaders,
|
||||
validateConditionalHeaders,
|
||||
};
|
||||
|
|
|
@ -10,6 +10,7 @@ const routeOPTIONS = require('./routes/routeOPTIONS');
|
|||
const routesUtils = require('./routesUtils');
|
||||
const routeWebsite = require('./routes/routeWebsite');
|
||||
|
||||
const { objectKeyByteLimit } = require('../constants');
|
||||
const requestUtils = require('../../lib/policyEvaluator/requestUtils');
|
||||
|
||||
const routeMap = {
|
||||
|
@ -57,8 +58,14 @@ function checkBucketAndKey(bucketName, objectKey, method, reqQuery,
|
|||
blacklistedPrefixes.object);
|
||||
if (!result.isValid) {
|
||||
log.debug('invalid object key', { objectKey });
|
||||
return errors.InvalidArgument.customizeDescription('Object key ' +
|
||||
`must not start with "${result.invalidPrefix}".`);
|
||||
if (result.invalidPrefix) {
|
||||
return errors.InvalidArgument.customizeDescription('Invalid ' +
|
||||
'prefix - object key cannot start with ' +
|
||||
`"${result.invalidPrefix}".`);
|
||||
}
|
||||
return errors.KeyTooLong.customizeDescription('Object key is too ' +
|
||||
'long. Maximum number of bytes allowed in keys is ' +
|
||||
`${objectKeyByteLimit}.`);
|
||||
}
|
||||
}
|
||||
if ((reqQuery.partNumber || reqQuery.uploadId)
|
||||
|
@ -115,8 +122,8 @@ function checkTypes(req, res, params, logger, s3config) {
|
|||
assert.strictEqual(typeof pre, 'string',
|
||||
'bad routes param: each blacklisted object prefix must be a string');
|
||||
});
|
||||
assert.strictEqual(typeof params.dataRetrievalFn, 'function',
|
||||
'bad routes param: dataRetrievalFn must be a defined function');
|
||||
assert.strictEqual(typeof params.dataRetrievalParams, 'object',
|
||||
'bad routes param: dataRetrievalParams must be a defined object');
|
||||
if (s3config) {
|
||||
assert.strictEqual(typeof s3config, 'object', 'bad routes param: s3config must be an object');
|
||||
}
|
||||
|
@ -138,7 +145,8 @@ function checkTypes(req, res, params, logger, s3config) {
|
|||
* @param {string[]} params.blacklistedPrefixes.object - object prefixes
|
||||
* @param {object} params.unsupportedQueries - object containing true/false
|
||||
* values for whether queries are supported
|
||||
* @param {function} params.dataRetrievalFn - function to retrieve data
|
||||
* @param {function} params.dataRetrievalParams - params to create instance of
|
||||
* data retrieval function
|
||||
* @param {RequestLogger} logger - werelogs logger instance
|
||||
* @param {String} [s3config] - s3 configuration
|
||||
* @returns {undefined}
|
||||
|
@ -153,7 +161,7 @@ function routes(req, res, params, logger, s3config) {
|
|||
allEndpoints,
|
||||
websiteEndpoints,
|
||||
blacklistedPrefixes,
|
||||
dataRetrievalFn,
|
||||
dataRetrievalParams,
|
||||
} = params;
|
||||
|
||||
const clientInfo = {
|
||||
|
@ -174,7 +182,8 @@ function routes(req, res, params, logger, s3config) {
|
|||
logger.newRequestLoggerFromSerializedUids(reqUids) :
|
||||
logger.newRequestLogger());
|
||||
|
||||
if (!req.url.startsWith('/_/healthcheck')) {
|
||||
if (!req.url.startsWith('/_/healthcheck') &&
|
||||
!req.url.startsWith('/_/report')) {
|
||||
log.info('received request', clientInfo);
|
||||
}
|
||||
|
||||
|
@ -235,10 +244,11 @@ function routes(req, res, params, logger, s3config) {
|
|||
|
||||
// bucket website request
|
||||
if (websiteEndpoints && websiteEndpoints.indexOf(req.parsedHost) > -1) {
|
||||
return routeWebsite(req, res, api, log, statsClient, dataRetrievalFn);
|
||||
return routeWebsite(req, res, api, log, statsClient,
|
||||
dataRetrievalParams);
|
||||
}
|
||||
|
||||
return method(req, res, api, log, statsClient, dataRetrievalFn);
|
||||
return method(req, res, api, log, statsClient, dataRetrievalParams);
|
||||
}
|
||||
|
||||
module.exports = routes;
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
const errors = require('../../errors');
|
||||
const routesUtils = require('../routesUtils');
|
||||
|
||||
function routerGET(request, response, api, log, statsClient, dataRetrievalFn) {
|
||||
function routerGET(request, response, api, log, statsClient,
|
||||
dataRetrievalParams) {
|
||||
log.debug('routing request', { method: 'routerGET' });
|
||||
if (request.bucketName === undefined && request.objectKey !== undefined) {
|
||||
routesUtils.responseXMLBody(errors.NoSuchBucket, null, response, log);
|
||||
|
@ -99,6 +100,13 @@ function routerGET(request, response, api, log, statsClient, dataRetrievalFn) {
|
|||
return routesUtils.responseXMLBody(err, xml, response,
|
||||
log, corsHeaders);
|
||||
});
|
||||
} else if (request.query.search !== undefined) {
|
||||
api.callApiMethod('metadataSearch', request, response, log,
|
||||
(err, xml, corsHeaders) => {
|
||||
routesUtils.statsReport500(err, statsClient);
|
||||
return routesUtils.responseXMLBody(err, xml, response,
|
||||
log, corsHeaders);
|
||||
});
|
||||
} else {
|
||||
// GET bucket
|
||||
api.callApiMethod('bucketGet', request, response, log,
|
||||
|
@ -157,8 +165,8 @@ function routerGET(request, response, api, log, statsClient, dataRetrievalFn) {
|
|||
log.end().addDefaultFields({ contentLength });
|
||||
routesUtils.statsReport500(err, statsClient);
|
||||
return routesUtils.responseStreamData(err, request.query,
|
||||
resMetaHeaders, dataGetInfo, dataRetrievalFn, response,
|
||||
range, log);
|
||||
resMetaHeaders, dataGetInfo, dataRetrievalParams,
|
||||
response, range, log);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,7 +2,7 @@ const errors = require('../../errors');
|
|||
const routesUtils = require('../routesUtils');
|
||||
|
||||
function routerWebsite(request, response, api, log, statsClient,
|
||||
dataRetrievalFn) {
|
||||
dataRetrievalParams) {
|
||||
log.debug('routing request', { method: 'routerWebsite' });
|
||||
// website endpoint only supports GET and HEAD and must have a bucket
|
||||
// http://docs.aws.amazon.com/AmazonS3/latest/dev/WebsiteEndpoints.html
|
||||
|
@ -27,7 +27,7 @@ function routerWebsite(request, response, api, log, statsClient,
|
|||
// user has their own error page
|
||||
if (err && dataGetInfo) {
|
||||
return routesUtils.streamUserErrorPage(err, dataGetInfo,
|
||||
dataRetrievalFn, response, resMetaHeaders, log);
|
||||
dataRetrievalParams, response, resMetaHeaders, log);
|
||||
}
|
||||
// send default error html response
|
||||
if (err) {
|
||||
|
@ -37,7 +37,7 @@ function routerWebsite(request, response, api, log, statsClient,
|
|||
}
|
||||
// no error, stream data
|
||||
return routesUtils.responseStreamData(null, request.query,
|
||||
resMetaHeaders, dataGetInfo, dataRetrievalFn, response,
|
||||
resMetaHeaders, dataGetInfo, dataRetrievalParams, response,
|
||||
null, log);
|
||||
});
|
||||
}
|
||||
|
|
|
@ -4,6 +4,9 @@ const errors = require('../errors');
|
|||
const constants = require('../constants');
|
||||
const { eachSeries } = require('async');
|
||||
|
||||
const DataWrapper = require('../storage/data/DataWrapper');
|
||||
const { objectKeyByteLimit } = require('../constants');
|
||||
|
||||
const responseErr = new Error();
|
||||
responseErr.code = 'ResponseError';
|
||||
responseErr.message = 'response closed by client request before all data sent';
|
||||
|
@ -115,7 +118,7 @@ const XMLResponseBackend = {
|
|||
`<Message>${errCode.description}</Message>`,
|
||||
'<Resource></Resource>',
|
||||
`<RequestId>${log.getSerializedUids()}</RequestId>`,
|
||||
'</Error>'
|
||||
'</Error>',
|
||||
);
|
||||
const xmlStr = xml.join('');
|
||||
const bytesSent = Buffer.byteLength(xmlStr);
|
||||
|
@ -257,7 +260,7 @@ function okContentHeadersResponse(overrideParams, resHeaders,
|
|||
return response;
|
||||
}
|
||||
|
||||
function retrieveDataAzure(locations, retrieveDataFn, response, logger) {
|
||||
function retrieveDataAzure(locations, retrieveDataParams, response, logger) {
|
||||
const errorHandlerFn = () => { response.connection.destroy(); };
|
||||
const current = locations.shift();
|
||||
|
||||
|
@ -265,7 +268,18 @@ function retrieveDataAzure(locations, retrieveDataFn, response, logger) {
|
|||
logger.error('error piping data from source');
|
||||
errorHandlerFn(err);
|
||||
});
|
||||
return retrieveDataFn(current, response, logger, err => {
|
||||
const {
|
||||
client,
|
||||
implName,
|
||||
config,
|
||||
kms,
|
||||
metadata,
|
||||
locStorageCheckFn,
|
||||
vault,
|
||||
} = retrieveDataParams;
|
||||
const data = new DataWrapper(
|
||||
client, implName, config, kms, metadata, locStorageCheckFn, vault);
|
||||
return data.get(current, response, logger, err => {
|
||||
if (err) {
|
||||
logger.error('failed to get object from source', {
|
||||
error: err,
|
||||
|
@ -278,12 +292,12 @@ function retrieveDataAzure(locations, retrieveDataFn, response, logger) {
|
|||
});
|
||||
}
|
||||
|
||||
function retrieveData(locations, retrieveDataFn, response, log) {
|
||||
function retrieveData(locations, retrieveDataParams, response, log) {
|
||||
if (locations.length === 0) {
|
||||
return response.end();
|
||||
}
|
||||
if (locations[0].azureStreamingOptions) {
|
||||
return retrieveDataAzure(locations, retrieveDataFn, response, log);
|
||||
return retrieveDataAzure(locations, retrieveDataParams, response, log);
|
||||
}
|
||||
// response is of type http.ServerResponse
|
||||
let responseDestroyed = false;
|
||||
|
@ -293,16 +307,33 @@ function retrieveData(locations, retrieveDataFn, response, log) {
|
|||
response.destroy();
|
||||
responseDestroyed = true;
|
||||
};
|
||||
|
||||
const _destroyReadable = readable => {
|
||||
// s3-data sends Readable stream only which does not implement destroy
|
||||
if (readable && readable.destroy) {
|
||||
readable.destroy();
|
||||
}
|
||||
};
|
||||
|
||||
// the S3-client might close the connection while we are processing it
|
||||
response.once('close', () => {
|
||||
responseDestroyed = true;
|
||||
if (currentStream) {
|
||||
currentStream.destroy();
|
||||
}
|
||||
_destroyReadable(currentStream);
|
||||
});
|
||||
|
||||
const {
|
||||
client,
|
||||
implName,
|
||||
config,
|
||||
kms,
|
||||
metadata,
|
||||
locStorageCheckFn,
|
||||
vault,
|
||||
} = retrieveDataParams;
|
||||
const data = new DataWrapper(
|
||||
client, implName, config, kms, metadata, locStorageCheckFn, vault);
|
||||
return eachSeries(locations,
|
||||
(current, next) => retrieveDataFn(current, response, log,
|
||||
(current, next) => data.get(current, response, log,
|
||||
(err, readable) => {
|
||||
// NB: readable is of IncomingMessage type
|
||||
if (err) {
|
||||
|
@ -319,7 +350,7 @@ function retrieveData(locations, retrieveDataFn, response, log) {
|
|||
if (responseDestroyed || response.isclosed) {
|
||||
log.debug(
|
||||
'response destroyed before readable could stream');
|
||||
readable.destroy();
|
||||
_destroyReadable(readable);
|
||||
return next(responseErr);
|
||||
}
|
||||
// readable stream successfully consumed
|
||||
|
@ -345,7 +376,7 @@ function retrieveData(locations, retrieveDataFn, response, log) {
|
|||
// call end for all cases (error/success) per node.js docs
|
||||
// recommendation
|
||||
response.end();
|
||||
}
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -461,7 +492,8 @@ const routesUtils = {
|
|||
* @param {array | null} dataLocations --
|
||||
* - array of locations to get streams from sproxyd
|
||||
* - null if no data for object and only metadata
|
||||
* @param {function} retrieveDataFn - function to handle streaming data
|
||||
* @param {object} retrieveDataParams - params to create instance of data
|
||||
* retrieval function
|
||||
* @param {http.ServerResponse} response - response sent to the client
|
||||
* @param {array | undefined} range - range in format of [start, end]
|
||||
* if range header contained in request or undefined if not
|
||||
|
@ -469,7 +501,7 @@ const routesUtils = {
|
|||
* @return {undefined}
|
||||
*/
|
||||
responseStreamData(errCode, overrideParams, resHeaders, dataLocations,
|
||||
retrieveDataFn, response, range, log) {
|
||||
retrieveDataParams, response, range, log) {
|
||||
if (errCode && !response.headersSent) {
|
||||
return XMLResponseBackend.errorResponse(errCode, response, log,
|
||||
resHeaders);
|
||||
|
@ -505,20 +537,21 @@ const routesUtils = {
|
|||
httpCode: response.statusCode,
|
||||
});
|
||||
});
|
||||
return retrieveData(dataLocations, retrieveDataFn, response, log);
|
||||
return retrieveData(dataLocations, retrieveDataParams, response, log);
|
||||
},
|
||||
|
||||
/**
|
||||
* @param {object} err -- arsenal error object
|
||||
* @param {array} dataLocations --
|
||||
* - array of locations to get streams from backend
|
||||
* @param {function} retrieveDataFn - function to handle streaming data
|
||||
* @param {object} retrieveDataParams - params to create instance of
|
||||
* data retrieval function
|
||||
* @param {http.ServerResponse} response - response sent to the client
|
||||
* @param {object} corsHeaders - CORS-related response headers
|
||||
* @param {object} log - Werelogs logger
|
||||
* @return {undefined}
|
||||
*/
|
||||
streamUserErrorPage(err, dataLocations, retrieveDataFn, response,
|
||||
streamUserErrorPage(err, dataLocations, retrieveDataParams, response,
|
||||
corsHeaders, log) {
|
||||
setCommonResponseHeaders(corsHeaders, response, log);
|
||||
response.writeHead(err.code, { 'Content-type': 'text/html' });
|
||||
|
@ -527,7 +560,7 @@ const routesUtils = {
|
|||
httpCode: response.statusCode,
|
||||
});
|
||||
});
|
||||
return retrieveData(dataLocations, retrieveDataFn, response, log);
|
||||
return retrieveData(dataLocations, retrieveDataParams, response, log);
|
||||
},
|
||||
|
||||
/**
|
||||
|
@ -558,7 +591,7 @@ const routesUtils = {
|
|||
`<h1>${err.code} ${response.statusMessage}</h1>`,
|
||||
'<ul>',
|
||||
`<li>Code: ${err.message}</li>`,
|
||||
`<li>Message: ${err.description}</li>`
|
||||
`<li>Message: ${err.description}</li>`,
|
||||
);
|
||||
|
||||
if (!userErrorPageFailure && bucketName) {
|
||||
|
@ -568,7 +601,7 @@ const routesUtils = {
|
|||
`<li>RequestId: ${log.getSerializedUids()}</li>`,
|
||||
// AWS response contains HostId here.
|
||||
// TODO: consider adding
|
||||
'</ul>'
|
||||
'</ul>',
|
||||
);
|
||||
if (userErrorPageFailure) {
|
||||
html.push(
|
||||
|
@ -578,13 +611,13 @@ const routesUtils = {
|
|||
'<ul>',
|
||||
`<li>Code: ${err.message}</li>`,
|
||||
`<li>Message: ${err.description}</li>`,
|
||||
'</ul>'
|
||||
'</ul>',
|
||||
);
|
||||
}
|
||||
html.push(
|
||||
'<hr/>',
|
||||
'</body>',
|
||||
'</html>'
|
||||
'</html>',
|
||||
);
|
||||
|
||||
return response.end(html.join(''), 'utf8', () => {
|
||||
|
@ -814,7 +847,7 @@ const routesUtils = {
|
|||
return bucketName;
|
||||
}
|
||||
throw new Error(
|
||||
`bad request: hostname ${host} is not in valid endpoints`
|
||||
`bad request: hostname ${host} is not in valid endpoints`,
|
||||
);
|
||||
},
|
||||
|
||||
|
@ -881,6 +914,9 @@ const routesUtils = {
|
|||
if (invalidPrefix) {
|
||||
return { isValid: false, invalidPrefix };
|
||||
}
|
||||
if (Buffer.byteLength(objectKey, 'utf8') > objectKeyByteLimit) {
|
||||
return { isValid: false };
|
||||
}
|
||||
return { isValid: true };
|
||||
},
|
||||
|
||||
|
|
|
@ -29,5 +29,4 @@ server.start(() => {
|
|||
logger.info('Metadata Proxy Server successfully started. ' +
|
||||
`Using the ${metadataWrapper.implName} backend`);
|
||||
});
|
||||
|
||||
```
|
||||
|
|
|
@ -160,7 +160,7 @@ class TestMatrix {
|
|||
const result = Object.keys(matrixChild.params)
|
||||
.every(currentKey =>
|
||||
Object.prototype.toString.call(
|
||||
matrixChild.params[currentKey]
|
||||
matrixChild.params[currentKey],
|
||||
).indexOf('Array') === -1);
|
||||
|
||||
if (result === true) {
|
||||
|
|
38
package.json
38
package.json
|
@ -3,14 +3,14 @@
|
|||
"engines": {
|
||||
"node": ">=16"
|
||||
},
|
||||
"version": "7.10.15",
|
||||
"version": "8.1.39",
|
||||
"description": "Common utilities for the S3 project components",
|
||||
"main": "build/index.js",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/scality/Arsenal.git"
|
||||
},
|
||||
"author": "Giorgio Regni",
|
||||
"author": "Scality Inc.",
|
||||
"license": "Apache-2.0",
|
||||
"bugs": {
|
||||
"url": "https://github.com/scality/Arsenal/issues"
|
||||
|
@ -21,17 +21,18 @@
|
|||
"JSONStream": "^1.0.0",
|
||||
"agentkeepalive": "^4.1.3",
|
||||
"ajv": "6.12.2",
|
||||
"async": "~2.1.5",
|
||||
"async": "~2.6.1",
|
||||
"aws-sdk": "2.80.0",
|
||||
"azure-storage": "2.10.3",
|
||||
"backo": "^1.1.0",
|
||||
"base-x": "3.0.8",
|
||||
"base62": "2.0.1",
|
||||
"bson": "4.0.0",
|
||||
"debug": "~2.6.9",
|
||||
"debug": "~4.1.0",
|
||||
"diskusage": "^1.1.1",
|
||||
"fcntl": "github:scality/node-fcntl#0.2.0",
|
||||
"hdclient": "scality/hdclient#1.1.0",
|
||||
"https-proxy-agent": "^2.2.0",
|
||||
"ioredis": "^4.28.5",
|
||||
"ipaddr.js": "1.9.1",
|
||||
"level": "~5.0.1",
|
||||
|
@ -39,11 +40,11 @@
|
|||
"mongodb": "^3.0.1",
|
||||
"node-forge": "^0.7.1",
|
||||
"prom-client": "10.2.3",
|
||||
"simple-glob": "^0.2",
|
||||
"socket.io": "~2.3.0",
|
||||
"socket.io-client": "~2.3.0",
|
||||
"simple-glob": "^0.2.0",
|
||||
"socket.io": "2.4.1",
|
||||
"socket.io-client": "2.4.0",
|
||||
"sproxydclient": "github:scality/sproxydclient#8.0.3",
|
||||
"utf8": "2.1.2",
|
||||
"utf8": "3.0.0",
|
||||
"uuid": "^3.0.1",
|
||||
"werelogs": "scality/werelogs#8.1.0",
|
||||
"xml2js": "~0.4.23"
|
||||
|
@ -57,13 +58,13 @@
|
|||
"@sinonjs/fake-timers": "^6.0.1",
|
||||
"@types/jest": "^27.4.1",
|
||||
"@types/node": "^17.0.21",
|
||||
"eslint": "2.13.1",
|
||||
"eslint": "^8.9.0",
|
||||
"eslint-config-airbnb": "6.2.0",
|
||||
"eslint-config-scality": "scality/Guidelines#7.10.2",
|
||||
"eslint-config-scality": "scality/Guidelines#ec33dfb",
|
||||
"eslint-plugin-react": "^4.3.0",
|
||||
"jest": "^27.5.1",
|
||||
"mocha": "8.0.1",
|
||||
"mongodb-memory-server": "^6.0.2",
|
||||
"nyc": "^15.1.0",
|
||||
"sinon": "^9.0.2",
|
||||
"temp": "0.9.1",
|
||||
"ts-jest": "^27.1.3",
|
||||
|
@ -77,11 +78,15 @@
|
|||
"test": "jest tests/unit",
|
||||
"build": "tsc",
|
||||
"prepare": "yarn build || true",
|
||||
"ft_test": "jest tests/functional --testTimeout=120000 --forceExit"
|
||||
"ft_test": "jest tests/functional --testTimeout=120000 --forceExit",
|
||||
"coverage": "nyc --clean jest tests --coverage --testTimeout=120000 --forceExit"
|
||||
},
|
||||
"private": true,
|
||||
"jest": {
|
||||
"private": true,
|
||||
"maxWorkers": 1,
|
||||
"coverageReporters": [
|
||||
"json"
|
||||
],
|
||||
"collectCoverageFrom": [
|
||||
"lib/**/*.{js,ts}",
|
||||
"index.js"
|
||||
|
@ -94,5 +99,12 @@
|
|||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"nyc": {
|
||||
"tempDirectory": "coverage",
|
||||
"reporter": [
|
||||
"lcov",
|
||||
"text"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
|
|
@ -41,7 +41,7 @@ describe('KMIP Low Level Driver', () => {
|
|||
return done(err);
|
||||
}
|
||||
const responsePayload = response.lookup(
|
||||
'Response Message/Batch Item/Response Payload'
|
||||
'Response Message/Batch Item/Response Payload',
|
||||
)[0];
|
||||
assert.deepStrictEqual(responsePayload,
|
||||
requestPayload);
|
||||
|
|
|
@ -0,0 +1,463 @@
|
|||
const async = require('async');
|
||||
const assert = require('assert');
|
||||
const sinon = require('sinon');
|
||||
const werelogs = require('werelogs');
|
||||
const { MongoMemoryReplSet } = require('mongodb-memory-server');
|
||||
const { errors, versioning } = require('../../../../index');
|
||||
const logger = new werelogs.Logger('MongoClientInterface', 'debug', 'debug');
|
||||
const BucketInfo = require('../../../../lib/models/BucketInfo');
|
||||
const MetadataWrapper =
|
||||
require('../../../../lib/storage/metadata/MetadataWrapper');
|
||||
const genVID = require('../../../../lib/versioning/VersionID').generateVersionId;
|
||||
const { BucketVersioningKeyFormat } = versioning.VersioningConstants;
|
||||
|
||||
const IMPL_NAME = 'mongodb';
|
||||
const DB_NAME = 'metadata';
|
||||
const BUCKET_NAME = 'test-bucket';
|
||||
const replicationGroupId = 'RG001';
|
||||
|
||||
const mongoserver = new MongoMemoryReplSet({
|
||||
debug: false,
|
||||
instanceOpts: [
|
||||
{ port: 27018 },
|
||||
],
|
||||
replSet: {
|
||||
name: 'rs0',
|
||||
count: 1,
|
||||
DB_NAME,
|
||||
storageEngine: 'ephemeralForTest',
|
||||
},
|
||||
});
|
||||
|
||||
let uidCounter = 0;
|
||||
function generateVersionId() {
|
||||
return genVID(`${process.pid}.${uidCounter++}`,
|
||||
replicationGroupId);
|
||||
}
|
||||
|
||||
const variations = [
|
||||
{ it: '(v0)', vFormat: BucketVersioningKeyFormat.v0 },
|
||||
{ it: '(v1)', vFormat: BucketVersioningKeyFormat.v1 },
|
||||
];
|
||||
describe('MongoClientInterface::metadata.deleteObjectMD', () => {
|
||||
let metadata;
|
||||
let collection;
|
||||
|
||||
function getObjectCount(cb) {
|
||||
collection.countDocuments((err, count) => {
|
||||
if (err) {
|
||||
cb(err);
|
||||
}
|
||||
cb(null, count);
|
||||
});
|
||||
}
|
||||
|
||||
function getObject(key, cb) {
|
||||
collection.findOne({
|
||||
_id: key,
|
||||
}, {}, (err, doc) => {
|
||||
if (err) {
|
||||
return cb(err);
|
||||
}
|
||||
if (!doc) {
|
||||
return cb(errors.NoSuchKey);
|
||||
}
|
||||
return cb(null, doc.value);
|
||||
});
|
||||
}
|
||||
|
||||
beforeAll(done => {
|
||||
mongoserver.waitUntilRunning().then(() => {
|
||||
const opts = {
|
||||
mongodb: {
|
||||
replicaSetHosts: 'localhost:27018',
|
||||
writeConcern: 'majority',
|
||||
replicaSet: 'rs0',
|
||||
readPreference: 'primary',
|
||||
database: DB_NAME,
|
||||
},
|
||||
};
|
||||
metadata = new MetadataWrapper(IMPL_NAME, opts, null, logger);
|
||||
metadata.setup(done);
|
||||
});
|
||||
});
|
||||
|
||||
afterAll(done => {
|
||||
async.series([
|
||||
next => metadata.close(next),
|
||||
next => mongoserver.stop()
|
||||
.then(() => next())
|
||||
.catch(next),
|
||||
], done);
|
||||
});
|
||||
|
||||
variations.forEach(variation => {
|
||||
const itOnlyInV1 = variation.vFormat === 'v1' ? it : it.skip;
|
||||
describe(`vFormat : ${variation.vFormat}`, () => {
|
||||
beforeEach(done => {
|
||||
const bucketMD = BucketInfo.fromObj({
|
||||
_name: BUCKET_NAME,
|
||||
_owner: 'testowner',
|
||||
_ownerDisplayName: 'testdisplayname',
|
||||
_creationDate: new Date().toJSON(),
|
||||
_acl: {
|
||||
Canned: 'private',
|
||||
FULL_CONTROL: [],
|
||||
WRITE: [],
|
||||
WRITE_ACP: [],
|
||||
READ: [],
|
||||
READ_ACP: [],
|
||||
},
|
||||
_mdBucketModelVersion: 10,
|
||||
_transient: false,
|
||||
_deleted: false,
|
||||
_serverSideEncryption: null,
|
||||
_versioningConfiguration: null,
|
||||
_locationConstraint: 'us-east-1',
|
||||
_readLocationConstraint: null,
|
||||
_cors: null,
|
||||
_replicationConfiguration: null,
|
||||
_lifecycleConfiguration: null,
|
||||
_uid: '',
|
||||
_isNFS: null,
|
||||
ingestion: null,
|
||||
});
|
||||
async.series([
|
||||
next => {
|
||||
metadata.client.defaultBucketKeyFormat = variation.vFormat;
|
||||
return next();
|
||||
},
|
||||
next => metadata.createBucket(BUCKET_NAME, bucketMD, logger, err => {
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
collection = metadata.client.getCollection(BUCKET_NAME);
|
||||
return next();
|
||||
}),
|
||||
], done);
|
||||
});
|
||||
|
||||
afterEach(done => {
|
||||
metadata.deleteBucket(BUCKET_NAME, logger, done);
|
||||
});
|
||||
|
||||
it(`Should delete non versioned object ${variation.vFormat}`, done => {
|
||||
const params = {
|
||||
objName: 'non-deleted-object',
|
||||
objVal: {
|
||||
key: 'non-deleted-object',
|
||||
versionId: 'null',
|
||||
},
|
||||
};
|
||||
const versionParams = {
|
||||
versioning: false,
|
||||
versionId: null,
|
||||
repairMaster: null,
|
||||
};
|
||||
return async.series([
|
||||
next => {
|
||||
// we put the master version of object
|
||||
metadata.putObjectMD(BUCKET_NAME, params.objName, params.objVal,
|
||||
versionParams, logger, next);
|
||||
},
|
||||
next => {
|
||||
// we put the master version of a second object
|
||||
params.objName = 'object-to-deleted';
|
||||
params.objVal.key = 'object-to-deleted';
|
||||
metadata.putObjectMD(BUCKET_NAME, params.objName, params.objVal,
|
||||
versionParams, logger, next);
|
||||
},
|
||||
next => {
|
||||
// We delete the first object
|
||||
metadata.deleteObjectMD(BUCKET_NAME, params.objName, null, logger, next);
|
||||
},
|
||||
next => {
|
||||
// Object must be removed
|
||||
metadata.getObjectMD(BUCKET_NAME, params.objName, null, logger, err => {
|
||||
assert.deepStrictEqual(err, errors.NoSuchKey);
|
||||
return next();
|
||||
});
|
||||
},
|
||||
next => {
|
||||
// only 1 object remaining in db
|
||||
getObjectCount((err, count) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(count, 1);
|
||||
return next();
|
||||
});
|
||||
},
|
||||
], done);
|
||||
});
|
||||
|
||||
it(`Should not throw error when object non existent ${variation.vFormat}`, done => {
|
||||
const objName = 'non-existent-object';
|
||||
metadata.deleteObjectMD(BUCKET_NAME, objName, null, logger, err => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it(`Should not throw error when bucket non existent ${variation.vFormat}`, done => {
|
||||
const objName = 'non-existent-object';
|
||||
metadata.deleteObjectMD(BUCKET_NAME, objName, null, logger, err => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it(`Master should not be updated when non lastest version is deleted ${variation.vFormat}`, done => {
|
||||
let versionId1 = null;
|
||||
const params = {
|
||||
objName: 'test-object',
|
||||
objVal: {
|
||||
key: 'test-object',
|
||||
versionId: 'null',
|
||||
},
|
||||
vFormat: 'v0',
|
||||
};
|
||||
const versionParams = {
|
||||
versioning: true,
|
||||
versionId: null,
|
||||
repairMaster: null,
|
||||
};
|
||||
return async.series([
|
||||
next => {
|
||||
// we start by creating a new version and master
|
||||
versionId1 = generateVersionId(this.replicationGroupId);
|
||||
params.versionId = versionId1;
|
||||
params.objVal.versionId = versionId1;
|
||||
versionParams.versionId = versionId1;
|
||||
metadata.putObjectMD(BUCKET_NAME, params.objName, params.objVal,
|
||||
versionParams, logger, next);
|
||||
},
|
||||
next => {
|
||||
// we create a second version of the same object (master is updated)
|
||||
params.objVal.versionId = 'version2';
|
||||
versionParams.versionId = null;
|
||||
metadata.putObjectMD(BUCKET_NAME, params.objName, params.objVal,
|
||||
versionParams, logger, next);
|
||||
},
|
||||
next => {
|
||||
// we delete the first version
|
||||
metadata.deleteObjectMD(BUCKET_NAME, params.objName, { versionId: versionId1 },
|
||||
logger, next);
|
||||
},
|
||||
next => {
|
||||
// the first version should no longer be available
|
||||
metadata.getObjectMD(BUCKET_NAME, params.objName, { versionId: versionId1 }, logger, err => {
|
||||
assert.deepStrictEqual(err, errors.NoSuchKey);
|
||||
return next();
|
||||
});
|
||||
},
|
||||
next => {
|
||||
// master must be containing second version metadata
|
||||
metadata.getObjectMD(BUCKET_NAME, params.objName, null, logger, (err, data) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.notStrictEqual(data.versionId, versionId1);
|
||||
return next();
|
||||
});
|
||||
},
|
||||
next => {
|
||||
// master and one version remaining in db
|
||||
getObjectCount((err, count) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(count, 2);
|
||||
return next();
|
||||
});
|
||||
},
|
||||
], done);
|
||||
});
|
||||
|
||||
it(`Master should be updated when last version is deleted ${variation.vFormat}`, done => {
|
||||
let versionId1;
|
||||
let versionId2;
|
||||
|
||||
const params = {
|
||||
objName: 'test-object',
|
||||
objVal: {
|
||||
key: 'test-object',
|
||||
versionId: 'null',
|
||||
isLast: false,
|
||||
},
|
||||
};
|
||||
const versionParams = {
|
||||
versioning: true,
|
||||
versionId: null,
|
||||
repairMaster: null,
|
||||
};
|
||||
return async.series([
|
||||
next => {
|
||||
// we start by creating a new version and master
|
||||
versionId1 = generateVersionId(this.replicationGroupId);
|
||||
params.versionId = versionId1;
|
||||
params.objVal.versionId = versionId1;
|
||||
versionParams.versionId = versionId1;
|
||||
metadata.putObjectMD(BUCKET_NAME, params.objName, params.objVal,
|
||||
versionParams, logger, next);
|
||||
},
|
||||
next => {
|
||||
// we create a second version of the same object (master is updated)
|
||||
// params.objVal.versionId = 'version2';
|
||||
// versionParams.versionId = null;
|
||||
versionId2 = generateVersionId(this.replicationGroupId);
|
||||
params.versionId = versionId2;
|
||||
params.objVal.versionId = versionId2;
|
||||
versionParams.versionId = versionId2;
|
||||
metadata.putObjectMD(BUCKET_NAME, params.objName, params.objVal,
|
||||
versionParams, logger, next);
|
||||
},
|
||||
next => {
|
||||
// deleting latest version
|
||||
metadata.deleteObjectMD(BUCKET_NAME, params.objName, { versionId: versionId2 },
|
||||
logger, next);
|
||||
},
|
||||
next => {
|
||||
// latest version must be removed
|
||||
metadata.getObjectMD(BUCKET_NAME, params.objName, { versionId: versionId2 }, logger, err => {
|
||||
assert.deepStrictEqual(err, errors.NoSuchKey);
|
||||
return next();
|
||||
});
|
||||
},
|
||||
next => {
|
||||
// master must be updated to contain first version data
|
||||
metadata.getObjectMD(BUCKET_NAME, params.objName, null, logger, (err, data) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(data.versionId, versionId1);
|
||||
return next();
|
||||
});
|
||||
},
|
||||
next => {
|
||||
// one master and version in the db
|
||||
getObjectCount((err, count) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(count, 2);
|
||||
return next();
|
||||
});
|
||||
},
|
||||
], done);
|
||||
});
|
||||
|
||||
it(`Should fail when version id non existent ${variation.vFormat}`, done => {
|
||||
const versionId = generateVersionId(this.replicationGroupId);
|
||||
const objName = 'test-object';
|
||||
metadata.deleteObjectMD(BUCKET_NAME, objName, { versionId }, logger, err => {
|
||||
assert.deepStrictEqual(err, errors.NoSuchKey);
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
itOnlyInV1(`Should create master when delete marker removed ${variation.vFormat}`, done => {
|
||||
const objVal = {
|
||||
key: 'test-object',
|
||||
isDeleteMarker: false,
|
||||
};
|
||||
const params = {
|
||||
versioning: true,
|
||||
versionId: null,
|
||||
repairMaster: null,
|
||||
};
|
||||
let firstVersionVersionId;
|
||||
let deleteMarkerVersionId;
|
||||
async.series([
|
||||
// We first create a new version and master
|
||||
next => metadata.putObjectMD(BUCKET_NAME, 'test-object', objVal, params, logger, (err, res) => {
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
firstVersionVersionId = JSON.parse(res).versionId;
|
||||
return next();
|
||||
}),
|
||||
// putting a delete marker as last version
|
||||
next => {
|
||||
objVal.isDeleteMarker = true;
|
||||
params.versionId = null;
|
||||
return metadata.putObjectMD(BUCKET_NAME, 'test-object', objVal, params, logger, (err, res) => {
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
deleteMarkerVersionId = JSON.parse(res).versionId;
|
||||
return next();
|
||||
});
|
||||
},
|
||||
next => {
|
||||
// using fake clock to override the setTimeout used by the repair
|
||||
const clock = sinon.useFakeTimers();
|
||||
return metadata.deleteObjectMD(BUCKET_NAME, 'test-object', { versionId: deleteMarkerVersionId },
|
||||
logger, () => {
|
||||
// running the repair callback
|
||||
clock.runAll();
|
||||
clock.restore();
|
||||
return next();
|
||||
});
|
||||
},
|
||||
// waiting for the repair callback to finish
|
||||
next => setTimeout(next, 100),
|
||||
// master should be created
|
||||
next => {
|
||||
getObject('\x7fMtest-object', (err, object) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(object.key, 'test-object');
|
||||
assert.strictEqual(object.versionId, firstVersionVersionId);
|
||||
assert.strictEqual(object.isDeleteMarker, false);
|
||||
return next();
|
||||
});
|
||||
},
|
||||
], done);
|
||||
});
|
||||
|
||||
itOnlyInV1(`Should delete master when delete marker becomes last version ${variation.vFormat}`, done => {
|
||||
const objVal = {
|
||||
key: 'test-object',
|
||||
isDeleteMarker: false,
|
||||
};
|
||||
const params = {
|
||||
versioning: true,
|
||||
versionId: null,
|
||||
repairMaster: null,
|
||||
};
|
||||
let versionId;
|
||||
async.series([
|
||||
// We first create a new version and master
|
||||
next => metadata.putObjectMD(BUCKET_NAME, 'test-object', objVal, params, logger, next),
|
||||
// putting a delete marker as last version
|
||||
next => {
|
||||
objVal.isDeleteMarker = true;
|
||||
params.versionId = null;
|
||||
return metadata.putObjectMD(BUCKET_NAME, 'test-object', objVal, params, logger, next);
|
||||
},
|
||||
// putting new version on top of delete marker
|
||||
next => {
|
||||
objVal.isDeleteMarker = false;
|
||||
return metadata.putObjectMD(BUCKET_NAME, 'test-object', objVal, params, logger, (err, res) => {
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
versionId = JSON.parse(res).versionId;
|
||||
return next();
|
||||
});
|
||||
},
|
||||
next => {
|
||||
// using fake clock to override the setTimeout used by the repair
|
||||
const clock = sinon.useFakeTimers();
|
||||
return metadata.deleteObjectMD(BUCKET_NAME, 'test-object', { versionId },
|
||||
logger, () => {
|
||||
// running the repair callback
|
||||
clock.runAll();
|
||||
clock.restore();
|
||||
return next();
|
||||
});
|
||||
},
|
||||
// waiting for the repair callback to finish
|
||||
next => setTimeout(next, 100),
|
||||
// master must be deleted
|
||||
next => {
|
||||
getObject('\x7fMtest-object', err => {
|
||||
assert.deepStrictEqual(err, errors.NoSuchKey);
|
||||
return next();
|
||||
});
|
||||
},
|
||||
], done);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,283 @@
|
|||
const async = require('async');
|
||||
const assert = require('assert');
|
||||
const werelogs = require('werelogs');
|
||||
const { MongoMemoryReplSet } = require('mongodb-memory-server');
|
||||
const { errors, versioning } = require('../../../../index');
|
||||
const logger = new werelogs.Logger('MongoClientInterface', 'debug', 'debug');
|
||||
const BucketInfo = require('../../../../lib/models/BucketInfo');
|
||||
const MetadataWrapper =
|
||||
require('../../../../lib/storage/metadata/MetadataWrapper');
|
||||
const genVID = versioning.VersionID.generateVersionId;
|
||||
const { BucketVersioningKeyFormat } = versioning.VersioningConstants;
|
||||
const { formatMasterKey } = require('../../../../lib/storage/metadata/mongoclient/utils');
|
||||
|
||||
const IMPL_NAME = 'mongodb';
|
||||
const DB_NAME = 'metadata';
|
||||
const BUCKET_NAME = 'test-bucket';
|
||||
const replicationGroupId = 'RG001';
|
||||
|
||||
const mongoserver = new MongoMemoryReplSet({
|
||||
debug: false,
|
||||
instanceOpts: [
|
||||
{ port: 27019 },
|
||||
],
|
||||
replSet: {
|
||||
name: 'rs0',
|
||||
count: 1,
|
||||
DB_NAME,
|
||||
storageEngine: 'ephemeralForTest',
|
||||
},
|
||||
});
|
||||
|
||||
let uidCounter = 0;
|
||||
function generateVersionId() {
|
||||
return genVID(`${process.pid}.${uidCounter++}`,
|
||||
replicationGroupId);
|
||||
}
|
||||
|
||||
const variations = [
|
||||
{ it: '(v0)', vFormat: BucketVersioningKeyFormat.v0 },
|
||||
{ it: '(v1)', vFormat: BucketVersioningKeyFormat.v1 },
|
||||
];
|
||||
|
||||
describe('MongoClientInterface::metadata.getObjectMD', () => {
|
||||
let metadata;
|
||||
let collection;
|
||||
let versionId1;
|
||||
let versionId2;
|
||||
|
||||
let params = {
|
||||
objName: 'pfx1-test-object',
|
||||
objVal: {
|
||||
key: 'pfx1-test-object',
|
||||
versionId: 'null',
|
||||
},
|
||||
};
|
||||
|
||||
function updateMasterObject(objName, versionId, objVal, vFormat, cb) {
|
||||
const mKey = formatMasterKey(objName, vFormat);
|
||||
collection.updateOne(
|
||||
{
|
||||
_id: mKey,
|
||||
$or: [{
|
||||
'value.versionId': {
|
||||
$exists: false,
|
||||
},
|
||||
},
|
||||
{
|
||||
'value.versionId': {
|
||||
$gt: versionId,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
$set: { _id: mKey, value: objVal },
|
||||
},
|
||||
{ upsert: true },
|
||||
err => {
|
||||
if (err) {
|
||||
return cb(err);
|
||||
}
|
||||
return cb(null);
|
||||
});
|
||||
}
|
||||
|
||||
beforeAll(done => {
|
||||
mongoserver.waitUntilRunning().then(() => {
|
||||
const opts = {
|
||||
mongodb: {
|
||||
replicaSetHosts: 'localhost:27019',
|
||||
writeConcern: 'majority',
|
||||
replicaSet: 'rs0',
|
||||
readPreference: 'primary',
|
||||
database: DB_NAME,
|
||||
},
|
||||
};
|
||||
metadata = new MetadataWrapper(IMPL_NAME, opts, null, logger);
|
||||
metadata.setup(done);
|
||||
});
|
||||
});
|
||||
|
||||
afterAll(done => {
|
||||
async.series([
|
||||
next => metadata.close(next),
|
||||
next => mongoserver.stop()
|
||||
.then(() => next())
|
||||
.catch(next),
|
||||
], done);
|
||||
});
|
||||
|
||||
variations.forEach(variation => {
|
||||
const itOnlyInV1 = variation.vFormat === 'v1' ? it : it.skip;
|
||||
describe(`vFormat : ${variation.vFormat}`, () => {
|
||||
beforeEach(done => {
|
||||
const bucketMD = BucketInfo.fromObj({
|
||||
_name: BUCKET_NAME,
|
||||
_owner: 'testowner',
|
||||
_ownerDisplayName: 'testdisplayname',
|
||||
_creationDate: new Date().toJSON(),
|
||||
_acl: {
|
||||
Canned: 'private',
|
||||
FULL_CONTROL: [],
|
||||
WRITE: [],
|
||||
WRITE_ACP: [],
|
||||
READ: [],
|
||||
READ_ACP: [],
|
||||
},
|
||||
_mdBucketModelVersion: 10,
|
||||
_transient: false,
|
||||
_deleted: false,
|
||||
_serverSideEncryption: null,
|
||||
_versioningConfiguration: null,
|
||||
_locationConstraint: 'us-east-1',
|
||||
_readLocationConstraint: null,
|
||||
_cors: null,
|
||||
_replicationConfiguration: null,
|
||||
_lifecycleConfiguration: null,
|
||||
_uid: '',
|
||||
_isNFS: null,
|
||||
ingestion: null,
|
||||
});
|
||||
const versionParams = {
|
||||
versioning: true,
|
||||
versionId: null,
|
||||
repairMaster: null,
|
||||
};
|
||||
async.series([
|
||||
next => {
|
||||
metadata.client.defaultBucketKeyFormat = variation.vFormat;
|
||||
return next();
|
||||
},
|
||||
next => metadata.createBucket(BUCKET_NAME, bucketMD, logger, err => {
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
collection = metadata.client.getCollection(BUCKET_NAME);
|
||||
return next();
|
||||
}),
|
||||
next => {
|
||||
metadata.putObjectMD(BUCKET_NAME, params.objName, params.objVal,
|
||||
versionParams, logger, (err, res) => {
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
versionId1 = JSON.parse(res).versionId;
|
||||
return next(null);
|
||||
});
|
||||
},
|
||||
next => {
|
||||
metadata.putObjectMD(BUCKET_NAME, params.objName, params.objVal,
|
||||
versionParams, logger, (err, res) => {
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
versionId2 = JSON.parse(res).versionId;
|
||||
return next(null);
|
||||
});
|
||||
},
|
||||
], done);
|
||||
});
|
||||
|
||||
afterEach(done => {
|
||||
// reset params
|
||||
params = {
|
||||
objName: 'pfx1-test-object',
|
||||
objVal: {
|
||||
key: 'pfx1-test-object',
|
||||
versionId: 'null',
|
||||
},
|
||||
};
|
||||
metadata.deleteBucket(BUCKET_NAME, logger, done);
|
||||
});
|
||||
|
||||
it(`Should return latest version of object ${variation.it}`, done =>
|
||||
metadata.getObjectMD(BUCKET_NAME, params.objName, null, logger, (err, object) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(object.key, params.objName);
|
||||
assert.strictEqual(object.versionId, versionId2);
|
||||
return done();
|
||||
}));
|
||||
|
||||
it(`Should return the specified version of object ${variation.it}`, done =>
|
||||
metadata.getObjectMD(BUCKET_NAME, params.objName, { versionId: versionId1 }, logger, (err, object) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(object.key, params.objName);
|
||||
assert.strictEqual(object.versionId, versionId1);
|
||||
return done();
|
||||
}));
|
||||
|
||||
it(`Should throw error when version non existent ${variation.it}`, done => {
|
||||
const versionId = '1234567890';
|
||||
return metadata.getObjectMD(BUCKET_NAME, params.objName, { versionId }, logger, (err, object) => {
|
||||
assert.deepStrictEqual(object, undefined);
|
||||
assert.deepStrictEqual(err, errors.NoSuchKey);
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it(`Should throw error when object non existent ${variation.it}`, done => {
|
||||
const objName = 'non-existent-object';
|
||||
return metadata.getObjectMD(BUCKET_NAME, objName, null, logger, err => {
|
||||
assert.deepStrictEqual(err, errors.NoSuchKey);
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it(`Should throw error when object non existent ${variation.it}`, done => {
|
||||
const bucketName = 'non-existent-bucket';
|
||||
return metadata.getObjectMD(bucketName, params.objName, null, logger, (err, object) => {
|
||||
assert.deepStrictEqual(object, undefined);
|
||||
assert.deepStrictEqual(err, errors.NoSuchKey);
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it(`Should return latest version when master is PHD ${variation.it}`, done => {
|
||||
async.series([
|
||||
next => {
|
||||
const objectName = variation.vFormat === 'v0' ? 'pfx1-test-object' : '\x7fMpfx1-test-object';
|
||||
// adding isPHD flag to master
|
||||
const phdVersionId = generateVersionId();
|
||||
params.objVal.versionId = phdVersionId;
|
||||
params.objVal.isPHD = true;
|
||||
updateMasterObject(objectName, phdVersionId, params.objVal,
|
||||
variation.vFormat, next);
|
||||
},
|
||||
// Should return latest object version
|
||||
next => metadata.getObjectMD(BUCKET_NAME, params.objName, null, logger, (err, object) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(object.key, params.objName);
|
||||
assert.strictEqual(object.versionId, versionId2);
|
||||
delete params.isPHD;
|
||||
return next();
|
||||
}),
|
||||
], done);
|
||||
});
|
||||
|
||||
itOnlyInV1(`Should return last version when master deleted ${variation.vFormat}`, done => {
|
||||
const versioningParams = {
|
||||
versioning: true,
|
||||
versionId: null,
|
||||
repairMaster: null,
|
||||
};
|
||||
async.series([
|
||||
// putting a delete marker as last version
|
||||
next => {
|
||||
params.versionId = null;
|
||||
params.objVal.isDeleteMarker = true;
|
||||
return metadata.putObjectMD(BUCKET_NAME, params.objName, params.objVal, versioningParams,
|
||||
logger, next);
|
||||
},
|
||||
next => metadata.getObjectMD(BUCKET_NAME, params.objName, null, logger, (err, object) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(object.key, params.objName);
|
||||
assert.strictEqual(object.isDeleteMarker, true);
|
||||
params.objVal.isDeleteMarker = null;
|
||||
return next();
|
||||
}),
|
||||
], done);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,412 @@
|
|||
const async = require('async');
|
||||
const assert = require('assert');
|
||||
const werelogs = require('werelogs');
|
||||
const { MongoMemoryReplSet } = require('mongodb-memory-server');
|
||||
const logger = new werelogs.Logger('MongoClientInterface', 'debug', 'debug');
|
||||
const BucketInfo = require('../../../../lib/models/BucketInfo');
|
||||
const MetadataWrapper =
|
||||
require('../../../../lib/storage/metadata/MetadataWrapper');
|
||||
const { versioning } = require('../../../../index');
|
||||
const { BucketVersioningKeyFormat } = versioning.VersioningConstants;
|
||||
|
||||
const IMPL_NAME = 'mongodb';
|
||||
const DB_NAME = 'metadata';
|
||||
const BUCKET_NAME = 'test-bucket';
|
||||
|
||||
const mongoserver = new MongoMemoryReplSet({
|
||||
debug: false,
|
||||
instanceOpts: [
|
||||
{ port: 27020 },
|
||||
],
|
||||
replSet: {
|
||||
name: 'rs0',
|
||||
count: 1,
|
||||
DB_NAME,
|
||||
storageEngine: 'ephemeralForTest',
|
||||
},
|
||||
});
|
||||
|
||||
const variations = [
|
||||
{ it: '(v0)', vFormat: BucketVersioningKeyFormat.v0 },
|
||||
{ it: '(v1)', vFormat: BucketVersioningKeyFormat.v1 },
|
||||
];
|
||||
|
||||
describe('MongoClientInterface::metadata.listObject', () => {
|
||||
let metadata;
|
||||
|
||||
function putBulkObjectVersions(bucketName, objName, objVal, params, versionNb, cb) {
|
||||
let count = 0;
|
||||
async.whilst(
|
||||
() => count < versionNb,
|
||||
cbIterator => {
|
||||
count++;
|
||||
// eslint-disable-next-line
|
||||
return metadata.putObjectMD(bucketName, objName, objVal, params,
|
||||
logger, cbIterator);
|
||||
},
|
||||
err => {
|
||||
if (err) {
|
||||
return cb(err);
|
||||
}
|
||||
return cb(null);
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
beforeAll(done => {
|
||||
mongoserver.waitUntilRunning().then(() => {
|
||||
const opts = {
|
||||
mongodb: {
|
||||
replicaSetHosts: 'localhost:27020',
|
||||
writeConcern: 'majority',
|
||||
replicaSet: 'rs0',
|
||||
readPreference: 'primary',
|
||||
database: DB_NAME,
|
||||
},
|
||||
};
|
||||
metadata = new MetadataWrapper(IMPL_NAME, opts, null, logger);
|
||||
metadata.setup(done);
|
||||
});
|
||||
});
|
||||
|
||||
afterAll(done => {
|
||||
async.series([
|
||||
next => metadata.close(next),
|
||||
next => mongoserver.stop()
|
||||
.then(() => next())
|
||||
.catch(next),
|
||||
], done);
|
||||
});
|
||||
|
||||
variations.forEach(variation => {
|
||||
beforeEach(done => {
|
||||
const bucketMD = BucketInfo.fromObj({
|
||||
_name: BUCKET_NAME,
|
||||
_owner: 'testowner',
|
||||
_ownerDisplayName: 'testdisplayname',
|
||||
_creationDate: new Date().toJSON(),
|
||||
_acl: {
|
||||
Canned: 'private',
|
||||
FULL_CONTROL: [],
|
||||
WRITE: [],
|
||||
WRITE_ACP: [],
|
||||
READ: [],
|
||||
READ_ACP: [],
|
||||
},
|
||||
_mdBucketModelVersion: 10,
|
||||
_transient: false,
|
||||
_deleted: false,
|
||||
_serverSideEncryption: null,
|
||||
_versioningConfiguration: null,
|
||||
_locationConstraint: 'us-east-1',
|
||||
_readLocationConstraint: null,
|
||||
_cors: null,
|
||||
_replicationConfiguration: null,
|
||||
_lifecycleConfiguration: null,
|
||||
_uid: '',
|
||||
_isNFS: null,
|
||||
ingestion: null,
|
||||
});
|
||||
const versionParams = {
|
||||
versioning: true,
|
||||
versionId: null,
|
||||
repairMaster: null,
|
||||
};
|
||||
async.series([
|
||||
next => {
|
||||
metadata.client.defaultBucketKeyFormat = variation.vFormat;
|
||||
return next();
|
||||
},
|
||||
next => metadata.createBucket(BUCKET_NAME, bucketMD, logger, err => {
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
return next();
|
||||
}),
|
||||
next => {
|
||||
const params = {
|
||||
objName: 'pfx1-test-object',
|
||||
objVal: {
|
||||
key: 'pfx1-test-object',
|
||||
versionId: 'null',
|
||||
},
|
||||
nbVersions: 5,
|
||||
};
|
||||
putBulkObjectVersions(BUCKET_NAME, params.objName, params.objVal, versionParams,
|
||||
params.nbVersions, next);
|
||||
},
|
||||
next => {
|
||||
const params = {
|
||||
objName: 'pfx2-test-object',
|
||||
objVal: {
|
||||
key: 'pfx2-test-object',
|
||||
versionId: 'null',
|
||||
},
|
||||
nbVersions: 5,
|
||||
};
|
||||
putBulkObjectVersions(BUCKET_NAME, params.objName, params.objVal, versionParams,
|
||||
params.nbVersions, next);
|
||||
},
|
||||
next => {
|
||||
const params = {
|
||||
objName: 'pfx3-test-object',
|
||||
objVal: {
|
||||
key: 'pfx3-test-object',
|
||||
versionId: 'null',
|
||||
},
|
||||
nbVersions: 5,
|
||||
};
|
||||
putBulkObjectVersions(BUCKET_NAME, params.objName, params.objVal, versionParams,
|
||||
params.nbVersions, next);
|
||||
},
|
||||
], done);
|
||||
});
|
||||
|
||||
afterEach(done => {
|
||||
metadata.deleteBucket(BUCKET_NAME, logger, done);
|
||||
});
|
||||
|
||||
it(`Should list master versions of objects ${variation.it}`, done => {
|
||||
const params = {
|
||||
listingType: 'DelimiterMaster',
|
||||
maxKeys: 100,
|
||||
};
|
||||
return metadata.listObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(data.Contents.length, 3);
|
||||
assert.strictEqual(data.Contents[0].key, 'pfx1-test-object');
|
||||
assert.strictEqual(data.Contents[1].key, 'pfx2-test-object');
|
||||
assert.strictEqual(data.Contents[2].key, 'pfx3-test-object');
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it(`Should truncate list of master versions of objects ${variation.it}`, done => {
|
||||
const params = {
|
||||
listingType: 'DelimiterMaster',
|
||||
maxKeys: 2,
|
||||
};
|
||||
return metadata.listObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(data.Contents.length, 2);
|
||||
assert.strictEqual(data.Contents[0].key, 'pfx1-test-object');
|
||||
assert.strictEqual(data.Contents[1].key, 'pfx2-test-object');
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it(`Should list master versions of objects that start with prefix ${variation.it}`, done => {
|
||||
const bucketName = BUCKET_NAME;
|
||||
const params = {
|
||||
listingType: 'DelimiterMaster',
|
||||
maxKeys: 100,
|
||||
prefix: 'pfx2',
|
||||
};
|
||||
return metadata.listObject(bucketName, params, logger, (err, data) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(data.Contents.length, 1);
|
||||
assert.strictEqual(data.Contents[0].key, 'pfx2-test-object');
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it(`Should return empty results when bucket non existent (master) ${variation.it}`, done => {
|
||||
const bucketName = 'non-existent-bucket';
|
||||
const params = {
|
||||
listingType: 'DelimiterMaster',
|
||||
maxKeys: 100,
|
||||
};
|
||||
return metadata.listObject(bucketName, params, logger, (err, data) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert(data);
|
||||
assert.strictEqual(data.Contents.length, 0);
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it(`Should list all versions of objects ${variation.it}`, done => {
|
||||
const bucketName = BUCKET_NAME;
|
||||
const params = {
|
||||
listingType: 'DelimiterVersions',
|
||||
maxKeys: 1000,
|
||||
};
|
||||
const versionsPerKey = {};
|
||||
return metadata.listObject(bucketName, params, logger, (err, data) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(data.Versions.length, 15);
|
||||
data.Versions.forEach(version => {
|
||||
versionsPerKey[version.key] = (versionsPerKey[version.key] || 0) + 1;
|
||||
});
|
||||
assert.strictEqual(versionsPerKey['pfx1-test-object'], 5);
|
||||
assert.strictEqual(versionsPerKey['pfx2-test-object'], 5);
|
||||
assert.strictEqual(versionsPerKey['pfx3-test-object'], 5);
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it(`Should truncate list of master versions of objects ${variation.it}`, done => {
|
||||
const params = {
|
||||
listingType: 'DelimiterVersions',
|
||||
maxKeys: 5,
|
||||
};
|
||||
const versionsPerKey = {};
|
||||
return metadata.listObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(data.Versions.length, 5);
|
||||
data.Versions.forEach(version => {
|
||||
versionsPerKey[version.key] = (versionsPerKey[version.key] || 0) + 1;
|
||||
});
|
||||
assert.strictEqual(versionsPerKey['pfx1-test-object'], 5);
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it(`Should list versions of objects that start with prefix ${variation.it}`, done => {
|
||||
const params = {
|
||||
listingType: 'DelimiterVersions',
|
||||
maxKeys: 100,
|
||||
prefix: 'pfx2',
|
||||
};
|
||||
const versionsPerKey = {};
|
||||
return metadata.listObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(data.Versions.length, 5);
|
||||
data.Versions.forEach(version => {
|
||||
versionsPerKey[version.key] = (versionsPerKey[version.key] || 0) + 1;
|
||||
});
|
||||
assert.strictEqual(versionsPerKey['pfx2-test-object'], 5);
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it(`Should return empty results when bucket not existing (version) ${variation.it}`, done => {
|
||||
const bucketName = 'non-existent-bucket';
|
||||
const params = {
|
||||
listingType: 'DelimiterVersions',
|
||||
maxKeys: 100,
|
||||
};
|
||||
return metadata.listObject(bucketName, params, logger, (err, data) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert(data);
|
||||
assert.strictEqual(data.Versions.length, 0);
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it(`should check entire list with pagination (version) ${variation.it}`, done => {
|
||||
const versionsPerKey = {};
|
||||
const bucketName = BUCKET_NAME;
|
||||
const get = (maxKeys, keyMarker, versionIdMarker, cb) => metadata.listObject(bucketName, {
|
||||
listingType: 'DelimiterVersions',
|
||||
maxKeys,
|
||||
keyMarker,
|
||||
versionIdMarker,
|
||||
}, logger, (err, res) => {
|
||||
if (err) {
|
||||
return cb(err);
|
||||
}
|
||||
res.Versions.forEach(version => {
|
||||
versionsPerKey[version.key] = (versionsPerKey[version.key] || 0) + 1;
|
||||
});
|
||||
if (res.IsTruncated) {
|
||||
return get(maxKeys, res.NextKeyMarker, res.NextVersionIdMarker, cb);
|
||||
}
|
||||
return cb(null);
|
||||
});
|
||||
return get(3, null, null, err => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(Object.keys(versionsPerKey).length, 3);
|
||||
assert.strictEqual(versionsPerKey['pfx1-test-object'], 5);
|
||||
assert.strictEqual(versionsPerKey['pfx2-test-object'], 5);
|
||||
assert.strictEqual(versionsPerKey['pfx3-test-object'], 5);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it(`should not list phd master key when listing masters ${variation.it}`, done => {
|
||||
const objVal = {
|
||||
key: 'pfx1-test-object',
|
||||
versionId: 'null',
|
||||
};
|
||||
const versionParams = {
|
||||
versioning: true,
|
||||
};
|
||||
const params = {
|
||||
listingType: 'DelimiterMaster',
|
||||
prefix: 'pfx1',
|
||||
};
|
||||
let versionId;
|
||||
let lastVersionId;
|
||||
async.series([
|
||||
next => metadata.putObjectMD(BUCKET_NAME, 'pfx1-test-object', objVal, versionParams,
|
||||
logger, (err, res) => {
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
versionId = JSON.parse(res).versionId;
|
||||
return next(null);
|
||||
}),
|
||||
next => metadata.putObjectMD(BUCKET_NAME, 'pfx1-test-object', objVal, versionParams,
|
||||
logger, (err, res) => {
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
lastVersionId = JSON.parse(res).versionId;
|
||||
return next(null);
|
||||
}),
|
||||
// when deleting the last version of an object a PHD master is created
|
||||
// and kept for 15s before it's repaired
|
||||
next => metadata.deleteObjectMD(BUCKET_NAME, 'pfx1-test-object', { versionId: lastVersionId },
|
||||
logger, next),
|
||||
next => metadata.listObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||
assert.ifError(err);
|
||||
assert.strictEqual(data.Contents[0].value.VersionId, versionId);
|
||||
return next();
|
||||
}),
|
||||
], done);
|
||||
});
|
||||
|
||||
it(`should not list phd master key when listing versions ${variation.it}`, done => {
|
||||
const objVal = {
|
||||
key: 'pfx1-test-object',
|
||||
versionId: 'null',
|
||||
};
|
||||
const versionParams = {
|
||||
versioning: true,
|
||||
};
|
||||
const params = {
|
||||
listingType: 'DelimiterVersions',
|
||||
prefix: 'pfx1',
|
||||
};
|
||||
let lastVersionId;
|
||||
let versionIds;
|
||||
async.series([
|
||||
next => metadata.listObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||
assert.ifError(err);
|
||||
assert.strictEqual(data.Versions.length, 5);
|
||||
versionIds = data.Versions.map(version => version.VersionId);
|
||||
return next();
|
||||
}),
|
||||
next => metadata.putObjectMD(BUCKET_NAME, 'pfx1-test-object', objVal, versionParams,
|
||||
logger, (err, res) => {
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
lastVersionId = JSON.parse(res).versionId;
|
||||
return next(null);
|
||||
}),
|
||||
// when deleting the last version of an object a PHD master is created
|
||||
// and kept for 15s before it's repaired
|
||||
next => metadata.deleteObjectMD(BUCKET_NAME, 'pfx1-test-object', { versionId: lastVersionId },
|
||||
logger, next),
|
||||
next => metadata.listObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||
assert.ifError(err);
|
||||
const newVersionIds = data.Versions.map(version => version.VersionId);
|
||||
assert.strictEqual(data.Versions.length, 5);
|
||||
assert(versionIds.every(version => newVersionIds.includes(version)));
|
||||
return next();
|
||||
}),
|
||||
], done);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,429 @@
|
|||
const async = require('async');
|
||||
const assert = require('assert');
|
||||
const werelogs = require('werelogs');
|
||||
const { MongoMemoryReplSet } = require('mongodb-memory-server');
|
||||
const { errors, versioning } = require('../../../../index');
|
||||
const logger = new werelogs.Logger('MongoClientInterface', 'debug', 'debug');
|
||||
const BucketInfo = require('../../../../lib/models/BucketInfo');
|
||||
const MetadataWrapper =
|
||||
require('../../../../lib/storage/metadata/MetadataWrapper');
|
||||
const { BucketVersioningKeyFormat } = versioning.VersioningConstants;
|
||||
|
||||
const IMPL_NAME = 'mongodb';
|
||||
const DB_NAME = 'metadata';
|
||||
const BUCKET_NAME = 'test-bucket';
|
||||
const OBJECT_NAME = 'test-object';
|
||||
const VERSION_ID = '98451712418844999999RG001 22019.0';
|
||||
|
||||
const mongoserver = new MongoMemoryReplSet({
|
||||
debug: false,
|
||||
instanceOpts: [
|
||||
{ port: 27021 },
|
||||
],
|
||||
replSet: {
|
||||
name: 'rs0',
|
||||
count: 1,
|
||||
DB_NAME,
|
||||
storageEngine: 'ephemeralForTest',
|
||||
},
|
||||
});
|
||||
|
||||
const variations = [
|
||||
{ it: '(v0)', vFormat: BucketVersioningKeyFormat.v0 },
|
||||
{ it: '(v1)', vFormat: BucketVersioningKeyFormat.v1 },
|
||||
];
|
||||
|
||||
describe('MongoClientInterface:metadata.putObjectMD', () => {
|
||||
let metadata;
|
||||
let collection;
|
||||
|
||||
function getObject(key, cb) {
|
||||
collection.findOne({
|
||||
_id: key,
|
||||
}, {}, (err, doc) => {
|
||||
if (err) {
|
||||
return cb(err);
|
||||
}
|
||||
if (!doc) {
|
||||
return cb(errors.NoSuchKey);
|
||||
}
|
||||
return cb(null, doc.value);
|
||||
});
|
||||
}
|
||||
|
||||
function getObjectCount(cb) {
|
||||
collection.countDocuments((err, count) => {
|
||||
if (err) {
|
||||
cb(err);
|
||||
}
|
||||
cb(null, count);
|
||||
});
|
||||
}
|
||||
|
||||
beforeAll(done => {
|
||||
mongoserver.waitUntilRunning().then(() => {
|
||||
const opts = {
|
||||
mongodb: {
|
||||
replicaSetHosts: 'localhost:27021',
|
||||
writeConcern: 'majority',
|
||||
replicaSet: 'rs0',
|
||||
readPreference: 'primary',
|
||||
database: DB_NAME,
|
||||
},
|
||||
};
|
||||
metadata = new MetadataWrapper(IMPL_NAME, opts, null, logger);
|
||||
metadata.setup(done);
|
||||
});
|
||||
});
|
||||
|
||||
afterAll(done => {
|
||||
async.series([
|
||||
next => metadata.close(next),
|
||||
next => mongoserver.stop()
|
||||
.then(() => next())
|
||||
.catch(next),
|
||||
], done);
|
||||
});
|
||||
|
||||
variations.forEach(variation => {
|
||||
const itOnlyInV1 = variation.vFormat === 'v1' ? it : it.skip;
|
||||
describe(`vFormat : ${variation.vFormat}`, () => {
|
||||
beforeEach(done => {
|
||||
const bucketMD = BucketInfo.fromObj({
|
||||
_name: BUCKET_NAME,
|
||||
_owner: 'testowner',
|
||||
_ownerDisplayName: 'testdisplayname',
|
||||
_creationDate: new Date().toJSON(),
|
||||
_acl: {
|
||||
Canned: 'private',
|
||||
FULL_CONTROL: [],
|
||||
WRITE: [],
|
||||
WRITE_ACP: [],
|
||||
READ: [],
|
||||
READ_ACP: [],
|
||||
},
|
||||
_mdBucketModelVersion: 10,
|
||||
_transient: false,
|
||||
_deleted: false,
|
||||
_serverSideEncryption: null,
|
||||
_versioningConfiguration: null,
|
||||
_locationConstraint: 'us-east-1',
|
||||
_readLocationConstraint: null,
|
||||
_cors: null,
|
||||
_replicationConfiguration: null,
|
||||
_lifecycleConfiguration: null,
|
||||
_uid: '',
|
||||
_isNFS: null,
|
||||
ingestion: null,
|
||||
});
|
||||
async.series([
|
||||
next => {
|
||||
metadata.client.defaultBucketKeyFormat = variation.vFormat;
|
||||
return next();
|
||||
},
|
||||
next => metadata.createBucket(BUCKET_NAME, bucketMD, logger, err => {
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
collection = metadata.client.getCollection(BUCKET_NAME);
|
||||
return next();
|
||||
}),
|
||||
], done);
|
||||
});
|
||||
|
||||
afterEach(done => {
|
||||
metadata.deleteBucket(BUCKET_NAME, logger, done);
|
||||
});
|
||||
|
||||
it(`Should put a new non versionned object ${variation.it}`, done => {
|
||||
const objVal = {
|
||||
key: OBJECT_NAME,
|
||||
versionId: 'null',
|
||||
updated: false,
|
||||
};
|
||||
const params = {
|
||||
versioning: null,
|
||||
versionId: null,
|
||||
repairMaster: null,
|
||||
};
|
||||
async.series([
|
||||
next => metadata.putObjectMD(BUCKET_NAME, OBJECT_NAME, objVal, params, logger, next),
|
||||
next => {
|
||||
const key = variation.vFormat === 'v0' ? 'test-object' : '\x7fMtest-object';
|
||||
getObject(key, (err, object) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(object.key, OBJECT_NAME);
|
||||
return next();
|
||||
});
|
||||
},
|
||||
// When versionning not active only one document is created (master)
|
||||
next => getObjectCount((err, count) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(count, 1);
|
||||
return next();
|
||||
}),
|
||||
], done);
|
||||
});
|
||||
|
||||
it(`Should update the metadata ${variation.it}`, done => {
|
||||
const objVal = {
|
||||
key: OBJECT_NAME,
|
||||
versionId: 'null',
|
||||
updated: false,
|
||||
};
|
||||
const params = {
|
||||
versioning: null,
|
||||
versionId: null,
|
||||
repairMaster: null,
|
||||
};
|
||||
async.series([
|
||||
next => metadata.putObjectMD(BUCKET_NAME, OBJECT_NAME, objVal, params, logger, next),
|
||||
next => {
|
||||
objVal.updated = true;
|
||||
metadata.putObjectMD(BUCKET_NAME, OBJECT_NAME, objVal, params, logger, next);
|
||||
},
|
||||
// object metadata must be updated
|
||||
next => {
|
||||
const key = variation.vFormat === 'v0' ? 'test-object' : '\x7fMtest-object';
|
||||
getObject(key, (err, object) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(object.key, OBJECT_NAME);
|
||||
assert.strictEqual(object.updated, true);
|
||||
return next();
|
||||
});
|
||||
},
|
||||
// Only a master version should be created
|
||||
next => getObjectCount((err, count) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(count, 1);
|
||||
return next();
|
||||
}),
|
||||
], done);
|
||||
});
|
||||
|
||||
it(`Should put versionned object with the specific versionId ${variation.it}`, done => {
|
||||
const objVal = {
|
||||
key: OBJECT_NAME,
|
||||
versionId: VERSION_ID,
|
||||
updated: false,
|
||||
};
|
||||
const params = {
|
||||
versioning: true,
|
||||
versionId: VERSION_ID,
|
||||
repairMaster: null,
|
||||
};
|
||||
async.series([
|
||||
next => metadata.putObjectMD(BUCKET_NAME, OBJECT_NAME, objVal, params, logger, next),
|
||||
// checking if metadata corresponds to what was given to the function
|
||||
next => {
|
||||
const key = variation.vFormat === 'v0' ? 'test-object' : '\x7fMtest-object';
|
||||
getObject(key, (err, object) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(object.key, OBJECT_NAME);
|
||||
assert.strictEqual(object.versionId, VERSION_ID);
|
||||
return next();
|
||||
});
|
||||
},
|
||||
// We'll have one master and one version
|
||||
next => getObjectCount((err, count) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(count, 2);
|
||||
return next();
|
||||
}),
|
||||
], done);
|
||||
});
|
||||
|
||||
it(`Should put new version and update master ${variation.it}`, done => {
|
||||
const objVal = {
|
||||
key: OBJECT_NAME,
|
||||
versionId: VERSION_ID,
|
||||
updated: false,
|
||||
};
|
||||
const params = {
|
||||
versioning: true,
|
||||
versionId: null,
|
||||
repairMaster: null,
|
||||
};
|
||||
let versionId = null;
|
||||
|
||||
async.series([
|
||||
// We first create a master and a version
|
||||
next => metadata.putObjectMD(BUCKET_NAME, OBJECT_NAME, objVal, params, logger, (err, data) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
versionId = JSON.parse(data).versionId;
|
||||
return next();
|
||||
}),
|
||||
// We put another version of the object
|
||||
next => metadata.putObjectMD(BUCKET_NAME, OBJECT_NAME, objVal, params, logger, next),
|
||||
// Master must be updated
|
||||
next => {
|
||||
const key = variation.vFormat === 'v0' ? 'test-object' : '\x7fMtest-object';
|
||||
getObject(key, (err, object) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(object.key, OBJECT_NAME);
|
||||
assert.notStrictEqual(object.versionId, versionId);
|
||||
return next();
|
||||
});
|
||||
},
|
||||
// we'll have two versions and one master
|
||||
next => getObjectCount((err, count) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(count, 3);
|
||||
return next();
|
||||
}),
|
||||
], done);
|
||||
});
|
||||
|
||||
it(`Should update master when versionning is disabled ${variation.it}`, done => {
|
||||
const objVal = {
|
||||
key: OBJECT_NAME,
|
||||
versionId: VERSION_ID,
|
||||
updated: false,
|
||||
};
|
||||
const params = {
|
||||
versioning: true,
|
||||
versionId: null,
|
||||
repairMaster: null,
|
||||
};
|
||||
let versionId = null;
|
||||
async.series([
|
||||
// We first create a new version and master
|
||||
next => metadata.putObjectMD(BUCKET_NAME, OBJECT_NAME, objVal, params, logger, (err, data) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
versionId = JSON.parse(data).versionId;
|
||||
return next();
|
||||
}),
|
||||
next => {
|
||||
// Disabling versionning and putting new version
|
||||
params.versioning = false;
|
||||
params.versionId = '';
|
||||
return metadata.putObjectMD(BUCKET_NAME, OBJECT_NAME, objVal, params, logger, next);
|
||||
},
|
||||
// Master must be updated
|
||||
next => {
|
||||
const key = variation.vFormat === 'v0' ? 'test-object' : '\x7fMtest-object';
|
||||
getObject(key, (err, object) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(object.key, OBJECT_NAME);
|
||||
assert.notStrictEqual(object.versionId, versionId);
|
||||
return next();
|
||||
});
|
||||
},
|
||||
// The second put shouldn't create a new version
|
||||
next => getObjectCount((err, count) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(count, 2);
|
||||
return next();
|
||||
}),
|
||||
], done);
|
||||
});
|
||||
|
||||
it(`Should update latest version and repair master ${variation.it}`, done => {
|
||||
const objVal = {
|
||||
key: OBJECT_NAME,
|
||||
versionId: VERSION_ID,
|
||||
updated: false,
|
||||
};
|
||||
const params = {
|
||||
versioning: true,
|
||||
versionId: VERSION_ID,
|
||||
repairMaster: null,
|
||||
};
|
||||
async.series([
|
||||
// We first create a new version and master
|
||||
next => metadata.putObjectMD(BUCKET_NAME, OBJECT_NAME, objVal, params, logger, next),
|
||||
next => {
|
||||
// Updating the version and repairing master
|
||||
params.repairMaster = true;
|
||||
objVal.updated = true;
|
||||
return metadata.putObjectMD(BUCKET_NAME, OBJECT_NAME, objVal, params, logger, next);
|
||||
},
|
||||
// Master must be updated
|
||||
next => {
|
||||
const key = variation.vFormat === 'v0' ? 'test-object' : '\x7fMtest-object';
|
||||
getObject(key, (err, object) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(object.key, OBJECT_NAME);
|
||||
assert.strictEqual(object.versionId, VERSION_ID);
|
||||
assert.strictEqual(object.updated, true);
|
||||
return next();
|
||||
});
|
||||
},
|
||||
// The second put shouldn't create a new version
|
||||
next => getObjectCount((err, count) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(count, 2);
|
||||
return next();
|
||||
}),
|
||||
], done);
|
||||
});
|
||||
|
||||
itOnlyInV1(`Should delete master when last version is delete marker ${variation.it}`, done => {
|
||||
const objVal = {
|
||||
key: OBJECT_NAME,
|
||||
versionId: VERSION_ID,
|
||||
updated: false,
|
||||
isDeleteMarker: false,
|
||||
};
|
||||
const params = {
|
||||
versioning: true,
|
||||
versionId: VERSION_ID,
|
||||
repairMaster: null,
|
||||
};
|
||||
async.series([
|
||||
// We first create a new version and master
|
||||
next => metadata.putObjectMD(BUCKET_NAME, OBJECT_NAME, objVal, params, logger, next),
|
||||
// putting a delete marker as last version
|
||||
next => {
|
||||
objVal.isDeleteMarker = true;
|
||||
params.versionId = null;
|
||||
return metadata.putObjectMD(BUCKET_NAME, OBJECT_NAME, objVal, params, logger, next);
|
||||
},
|
||||
// master must be deleted
|
||||
next => getObject('\x7fMtest-object', err => {
|
||||
assert.deepStrictEqual(err, errors.NoSuchKey);
|
||||
return next();
|
||||
}),
|
||||
], done);
|
||||
});
|
||||
|
||||
itOnlyInV1(`Should create master when new version is put on top of delete marker ${variation.it}`, done => {
|
||||
const objVal = {
|
||||
key: OBJECT_NAME,
|
||||
versionId: VERSION_ID,
|
||||
updated: false,
|
||||
isDeleteMarker: false,
|
||||
};
|
||||
const params = {
|
||||
versioning: true,
|
||||
versionId: VERSION_ID,
|
||||
repairMaster: null,
|
||||
};
|
||||
async.series([
|
||||
// We first create a new version and master
|
||||
next => metadata.putObjectMD(BUCKET_NAME, OBJECT_NAME, objVal, params, logger, next),
|
||||
// putting a delete marker as last version
|
||||
next => {
|
||||
objVal.isDeleteMarker = true;
|
||||
params.versionId = null;
|
||||
return metadata.putObjectMD(BUCKET_NAME, OBJECT_NAME, objVal, params, logger, next);
|
||||
},
|
||||
// We put a new version on top of delete marker
|
||||
next => {
|
||||
objVal.isDeleteMarker = false;
|
||||
objVal.updated = true;
|
||||
return metadata.putObjectMD(BUCKET_NAME, OBJECT_NAME, objVal, params, logger, next);
|
||||
},
|
||||
// master must be created
|
||||
next => getObject('\x7fMtest-object', (err, object) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(object.key, OBJECT_NAME);
|
||||
assert.strictEqual(object.updated, true);
|
||||
assert.strictEqual(object.isDeleteMarker, false);
|
||||
return next();
|
||||
}),
|
||||
], done);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,330 @@
|
|||
const async = require('async');
|
||||
const assert = require('assert');
|
||||
const werelogs = require('werelogs');
|
||||
const { MongoMemoryReplSet } = require('mongodb-memory-server');
|
||||
const { errors, versioning } = require('../../../../index');
|
||||
const logger = new werelogs.Logger('MongoClientInterface', 'debug', 'debug');
|
||||
const BucketInfo = require('../../../../lib/models/BucketInfo');
|
||||
const MetadataWrapper =
|
||||
require('../../../../lib/storage/metadata/MetadataWrapper');
|
||||
const { BucketVersioningKeyFormat } = versioning.VersioningConstants;
|
||||
|
||||
const IMP_NAME = 'mongodb';
|
||||
const DB_NAME = 'metadata';
|
||||
const BUCKET_NAME = 'testbucket';
|
||||
|
||||
const mongoserver = new MongoMemoryReplSet({
|
||||
debug: false,
|
||||
instanceOpts: [
|
||||
{ port: 27022 },
|
||||
],
|
||||
replSet: {
|
||||
name: 'rs0',
|
||||
count: 1,
|
||||
DB_NAME,
|
||||
storageEngine: 'ephemeralForTest',
|
||||
},
|
||||
});
|
||||
|
||||
describe('MongoClientInterface:withCond', () => {
|
||||
let metadata;
|
||||
|
||||
const variations = [
|
||||
{ it: '(v0)', vFormat: BucketVersioningKeyFormat.v0 },
|
||||
{ it: '(v1)', vFormat: BucketVersioningKeyFormat.v1 },
|
||||
];
|
||||
|
||||
beforeAll(done => {
|
||||
mongoserver.waitUntilRunning().then(() => {
|
||||
const opts = {
|
||||
mongodb: {
|
||||
replicaSetHosts: 'localhost:27022',
|
||||
writeConcern: 'majority',
|
||||
replicaSet: 'rs0',
|
||||
readPreference: 'primary',
|
||||
database: DB_NAME,
|
||||
},
|
||||
};
|
||||
metadata = new MetadataWrapper(IMP_NAME, opts, null, logger);
|
||||
metadata.setup(done);
|
||||
});
|
||||
});
|
||||
|
||||
afterAll(done => {
|
||||
async.series([
|
||||
next => metadata.close(next),
|
||||
next => mongoserver.stop()
|
||||
.then(() => next())
|
||||
.catch(next),
|
||||
], done);
|
||||
});
|
||||
|
||||
variations.forEach(variation => {
|
||||
describe('::putObjectWithCond', () => {
|
||||
beforeEach(done => {
|
||||
const bucketMD = BucketInfo.fromObj({
|
||||
_name: BUCKET_NAME,
|
||||
_owner: 'testowner',
|
||||
_ownerDisplayName: 'testdisplayname',
|
||||
_creationDate: new Date().toJSON(),
|
||||
_acl: {
|
||||
Canned: 'private',
|
||||
FULL_CONTROL: [],
|
||||
WRITE: [],
|
||||
WRITE_ACP: [],
|
||||
READ: [],
|
||||
READ_ACP: [],
|
||||
},
|
||||
_mdBucketModelVersion: 10,
|
||||
_transient: false,
|
||||
_deleted: false,
|
||||
_serverSideEncryption: null,
|
||||
_versioningConfiguration: null,
|
||||
_locationConstraint: 'us-east-1',
|
||||
_readLocationConstraint: null,
|
||||
_cors: null,
|
||||
_replicationConfiguration: null,
|
||||
_lifecycleConfiguration: null,
|
||||
_uid: '',
|
||||
_isNFS: null,
|
||||
ingestion: null,
|
||||
});
|
||||
async.series([
|
||||
next => {
|
||||
metadata.client.defaultBucketKeyFormat = variation.vFormat;
|
||||
return next();
|
||||
},
|
||||
next => metadata.createBucket(BUCKET_NAME, bucketMD, logger, next),
|
||||
], done);
|
||||
});
|
||||
|
||||
afterEach(done => {
|
||||
metadata.deleteBucket(BUCKET_NAME, logger, done);
|
||||
});
|
||||
|
||||
const tests = [
|
||||
[
|
||||
`should upsert object if an existing object does not exist ${variation.it}`,
|
||||
{
|
||||
initVal: null,
|
||||
upsertVal: { value: { number: 42, string: 'forty-two' } },
|
||||
conditions: { value: { number: 24 } },
|
||||
expectedVal: { value: { number: 42, string: 'forty-two' } },
|
||||
error: null,
|
||||
},
|
||||
],
|
||||
[
|
||||
`should not update an existing object if the conditions fails ${variation.it}`,
|
||||
{
|
||||
initVal: { value: { number: 0, string: 'zero' } },
|
||||
upsertVal: { value: { number: 42, string: 'forty-two' } },
|
||||
conditions: { value: { number: 24 } },
|
||||
expectedVal: { value: { number: 0, string: 'zero' } },
|
||||
error: errors.InternalError,
|
||||
},
|
||||
],
|
||||
[
|
||||
`should not update an existing object if the conditions fails ${variation.it}`,
|
||||
{
|
||||
initVal: { value: { number: 0, string: 'zero' } },
|
||||
upsertVal: { value: { number: 42, string: 'forty-two' } },
|
||||
conditions: { value: { string: { $eq: 'twenty-four' } } },
|
||||
expectedVal: { value: { number: 0, string: 'zero' } },
|
||||
error: errors.InternalError,
|
||||
},
|
||||
],
|
||||
[
|
||||
`should not update an existing object if the conditions fails ${variation.it}`,
|
||||
{
|
||||
initVal: { value: { number: 0, string: 'zero' } },
|
||||
upsertVal: { value: { number: 42, string: 'forty-two' } },
|
||||
conditions: {
|
||||
value: {
|
||||
string: { $eq: 'twenty-four' },
|
||||
number: { $eq: 0 },
|
||||
},
|
||||
},
|
||||
expectedVal: { value: { number: 0, string: 'zero' } },
|
||||
error: errors.InternalError,
|
||||
},
|
||||
],
|
||||
[
|
||||
`should update an existing object if the conditions passes ${variation.it}`,
|
||||
{
|
||||
initVal: { value: { number: 24, string: 'twenty-four' } },
|
||||
upsertVal: { value: { number: 42, string: 'forty-two' } },
|
||||
conditions: { value: { number: 24 } },
|
||||
expectedVal: { value: { number: 42, string: 'forty-two' } },
|
||||
error: null,
|
||||
},
|
||||
],
|
||||
[
|
||||
`should update an existing object if the conditions passes ${variation.it}`,
|
||||
{
|
||||
initVal: { value: { number: 24, string: 'twenty-four' } },
|
||||
upsertVal: { value: { number: 42, string: 'forty-two' } },
|
||||
conditions: { value: { string: { $eq: 'twenty-four' } } },
|
||||
expectedVal: { value: { number: 42, string: 'forty-two' } },
|
||||
error: null,
|
||||
},
|
||||
],
|
||||
[
|
||||
`should update an existing object if the conditions passes ${variation.it}`,
|
||||
{
|
||||
initVal: { value: { number: 24, string: 'twenty-four' } },
|
||||
upsertVal: { value: { number: 42, string: 'forty-two' } },
|
||||
conditions: {
|
||||
value: {
|
||||
string: { $eq: 'twenty-four' },
|
||||
number: { $eq: 24 },
|
||||
},
|
||||
},
|
||||
expectedVal: { value: { number: 42, string: 'forty-two' } },
|
||||
error: null,
|
||||
},
|
||||
],
|
||||
];
|
||||
tests.forEach(([msg, testCase]) => it(msg, done => {
|
||||
const objectKey = 'testkey';
|
||||
const {
|
||||
initVal, upsertVal, conditions, expectedVal, error,
|
||||
} = testCase;
|
||||
const params = { conditions };
|
||||
async.series([
|
||||
next => {
|
||||
if (!initVal) {
|
||||
return next();
|
||||
}
|
||||
return metadata.putObjectMD(BUCKET_NAME, objectKey, initVal,
|
||||
{}, logger, next);
|
||||
},
|
||||
next => metadata.putObjectWithCond(BUCKET_NAME, objectKey,
|
||||
upsertVal, params, logger, err => {
|
||||
if (error) {
|
||||
assert.deepStrictEqual(err, error);
|
||||
return next();
|
||||
}
|
||||
assert(!err);
|
||||
return next();
|
||||
}),
|
||||
next => metadata.getObjectMD(BUCKET_NAME, objectKey, {}, logger,
|
||||
(err, res) => {
|
||||
assert(!err);
|
||||
assert.deepStrictEqual(res, expectedVal);
|
||||
next();
|
||||
}),
|
||||
], done);
|
||||
}));
|
||||
});
|
||||
|
||||
describe('::deleteObjectWithCond', () => {
|
||||
const tests = [
|
||||
[
|
||||
`should return no such key if the object does not exist ${variation.it}`,
|
||||
{
|
||||
initVal: null,
|
||||
conditions: { value: { number: 24 } },
|
||||
expectedVal: null,
|
||||
error: errors.NoSuchKey,
|
||||
},
|
||||
],
|
||||
[
|
||||
`should return no such key if the conditions fails ${variation.it}`,
|
||||
{
|
||||
initVal: { value: { number: 0, string: 'zero' } },
|
||||
conditions: { value: { number: { $eq: 24 } } },
|
||||
expectedVal: { value: { number: 0, string: 'zero' } },
|
||||
error: errors.NoSuchKey,
|
||||
},
|
||||
],
|
||||
[
|
||||
`should return no such key if the conditions fails ${variation.it}`,
|
||||
{
|
||||
initVal: { value: { number: 0, string: 'zero' } },
|
||||
conditions: { value: { string: 'twenty-four' } },
|
||||
expectedVal: { value: { number: 0, string: 'zero' } },
|
||||
error: errors.NoSuchKey,
|
||||
},
|
||||
],
|
||||
[
|
||||
`should return no such key if the conditions fails ${variation.it}`,
|
||||
{
|
||||
initVal: { value: { number: 0, string: 'zero' } },
|
||||
conditions: {
|
||||
value: {
|
||||
string: 'twenty-four',
|
||||
number: { $eq: 0 },
|
||||
},
|
||||
},
|
||||
expectedVal: { value: { number: 0, string: 'zero' } },
|
||||
error: errors.NoSuchKey,
|
||||
},
|
||||
],
|
||||
[
|
||||
`should successfully delete matched object ${variation.it}`,
|
||||
{
|
||||
initVal: { value: { number: 24, string: 'twenty-four' } },
|
||||
conditions: { value: { number: 24 } },
|
||||
expectedVal: null,
|
||||
error: null,
|
||||
},
|
||||
],
|
||||
[
|
||||
`should successfully delete matched object ${variation.it}`,
|
||||
{
|
||||
initVal: { value: { number: 24, string: 'twenty-four' } },
|
||||
conditions: { value: { string: { $eq: 'twenty-four' } } },
|
||||
expectedVal: null,
|
||||
error: null,
|
||||
},
|
||||
],
|
||||
[
|
||||
`should successfully delete matched object ${variation.it}`,
|
||||
{
|
||||
initVal: { value: { number: 24, string: 'twenty-four' } },
|
||||
conditions: {
|
||||
value: {
|
||||
string: { $eq: 'twenty-four' },
|
||||
number: { $eq: 24 },
|
||||
},
|
||||
},
|
||||
expectedVal: null,
|
||||
error: null,
|
||||
},
|
||||
],
|
||||
];
|
||||
tests.forEach(([msg, testCase]) => it(msg, done => {
|
||||
const objectKey = 'testkey';
|
||||
const { initVal, conditions, expectedVal, error } = testCase;
|
||||
const params = { conditions };
|
||||
async.series([
|
||||
next => {
|
||||
if (!initVal) {
|
||||
return next();
|
||||
}
|
||||
return metadata.putObjectMD(BUCKET_NAME, objectKey, initVal,
|
||||
{}, logger, next);
|
||||
},
|
||||
next => metadata.deleteObjectWithCond(BUCKET_NAME, objectKey,
|
||||
params, logger, err => {
|
||||
if (error) {
|
||||
assert.deepStrictEqual(err, error);
|
||||
return next();
|
||||
}
|
||||
assert(!err);
|
||||
return next();
|
||||
}),
|
||||
next => metadata.getObjectMD(BUCKET_NAME, objectKey, {}, logger,
|
||||
(err, res) => {
|
||||
if (expectedVal) {
|
||||
assert.deepStrictEqual(res, expectedVal);
|
||||
} else {
|
||||
assert.deepStrictEqual(err, errors.NoSuchKey);
|
||||
}
|
||||
return next();
|
||||
}),
|
||||
], done);
|
||||
}));
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,319 @@
|
|||
'use strict'; // eslint-disable-line strict
|
||||
|
||||
const werelogs = require('werelogs');
|
||||
const assert = require('assert');
|
||||
const async = require('async');
|
||||
|
||||
const logger = new werelogs.Logger('MetadataProxyServer', 'debug', 'debug');
|
||||
const MetadataWrapper =
|
||||
require('../../../lib/storage/metadata/MetadataWrapper');
|
||||
const BucketRoutes =
|
||||
require('../../../lib/storage/metadata/proxy/BucketdRoutes');
|
||||
const metadataWrapper = new MetadataWrapper('mem', {}, null, logger);
|
||||
const { RequestDispatcher } = require('../../utils/mdProxyUtils');
|
||||
|
||||
const routes = new BucketRoutes(metadataWrapper, logger);
|
||||
const dispatcher = new RequestDispatcher(routes);
|
||||
|
||||
const Bucket = 'test';
|
||||
const bucketInfo = {
|
||||
acl: {
|
||||
Canned: 'private',
|
||||
FULL_CONTROL: [],
|
||||
WRITE: [],
|
||||
WRITE_ACP: [],
|
||||
READ: [],
|
||||
READ_ACP: [],
|
||||
},
|
||||
name: Bucket,
|
||||
owner: '9d8fe19a78974c56dceb2ea4a8f01ed0f5fecb9d29f80e9e3b84104e4a3ea520',
|
||||
ownerDisplayName: 'anonymousCoward',
|
||||
creationDate: '2018-06-04T17:45:42.592Z',
|
||||
mdBucketModelVersion: 8,
|
||||
transient: false,
|
||||
deleted: false,
|
||||
serverSideEncryption: null,
|
||||
versioningConfiguration: null,
|
||||
locationConstraint: 'us-east-1',
|
||||
readLocationConstraint: 'us-east-1',
|
||||
cors: null,
|
||||
replicationConfiguration: null,
|
||||
lifecycleConfiguration: null,
|
||||
uid: 'fea97818-6a9a-11e8-9777-e311618cc5d4',
|
||||
isNFS: null,
|
||||
};
|
||||
|
||||
const objects = [
|
||||
'aaa',
|
||||
'bbb/xaa',
|
||||
'bbb/xbb',
|
||||
'bbb/xcc',
|
||||
'ccc',
|
||||
'ddd',
|
||||
];
|
||||
|
||||
function _getExpectedListing(prefix, objects) {
|
||||
const filtered = objects.map(key => {
|
||||
const deprefixed = key.slice(prefix.length);
|
||||
return deprefixed.replace(/[/].*/, '/');
|
||||
});
|
||||
const keySet = {};
|
||||
return filtered.filter(key => {
|
||||
if (keySet[key]) {
|
||||
return false;
|
||||
}
|
||||
if (key === '') {
|
||||
return false;
|
||||
}
|
||||
keySet[key] = true;
|
||||
return true;
|
||||
});
|
||||
}
|
||||
|
||||
function _listingURL(prefix, marker) {
|
||||
const reSlash = /[/]/g;
|
||||
const escapedPrefix = prefix.replace(reSlash, '%2F');
|
||||
const escapedMarker = marker.replace(reSlash, '%2F');
|
||||
return `/default/bucket/${Bucket}?delimiter=%2F&prefix=` +
|
||||
`${escapedPrefix}&maxKeys=1&marker=${escapedMarker}`;
|
||||
}
|
||||
|
||||
function _listObjects(prefix, objects, cb) {
|
||||
const keys = _getExpectedListing(prefix, objects);
|
||||
const markers = keys.slice(0);
|
||||
markers.unshift(undefined);
|
||||
const lastKey = keys[keys.length - 1];
|
||||
const listing = keys.map((key, index) => ({
|
||||
key,
|
||||
marker: markers[index],
|
||||
NextMarker: markers[index + 1],
|
||||
IsTruncated: key !== lastKey,
|
||||
isPrefix: key.endsWith('/'),
|
||||
}));
|
||||
async.mapLimit(listing, 5, (obj, next) => {
|
||||
const currentMarker = obj.marker === undefined ? '' : obj.marker;
|
||||
dispatcher.get(_listingURL(prefix, prefix + currentMarker),
|
||||
(err, response, body) => {
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
if (obj.isPrefix) {
|
||||
assert.strictEqual(body.Contents.length, 0);
|
||||
assert.strictEqual(body.CommonPrefixes.length,
|
||||
1);
|
||||
assert.strictEqual(body.CommonPrefixes[0],
|
||||
prefix + obj.key);
|
||||
} else {
|
||||
assert.strictEqual(body.Contents.length, 1);
|
||||
assert.strictEqual(body.CommonPrefixes.length,
|
||||
0);
|
||||
assert.strictEqual(body.Contents[0].key,
|
||||
prefix + obj.key);
|
||||
}
|
||||
assert.strictEqual(body.IsTruncated,
|
||||
obj.IsTruncated);
|
||||
if (body.IsTruncated) {
|
||||
assert.strictEqual(body.NextMarker,
|
||||
prefix + obj.NextMarker);
|
||||
}
|
||||
return next();
|
||||
});
|
||||
}, err => cb(err));
|
||||
}
|
||||
|
||||
function _createObjects(objects, cb) {
|
||||
async.mapLimit(objects, 5, (key, next) => {
|
||||
dispatcher.post(`/default/bucket/${Bucket}/${key}`,
|
||||
{ key }, next);
|
||||
}, err => {
|
||||
cb(err);
|
||||
});
|
||||
}
|
||||
|
||||
function _readObjects(objects, cb) {
|
||||
async.mapLimit(objects, 5, (key, next) => {
|
||||
dispatcher.get(`/default/bucket/${Bucket}/${key}`,
|
||||
(err, response, body) => {
|
||||
assert.deepStrictEqual(body.key, key);
|
||||
next(err);
|
||||
});
|
||||
}, err => {
|
||||
cb(err);
|
||||
});
|
||||
}
|
||||
|
||||
function _deleteObjects(objects, cb) {
|
||||
async.mapLimit(objects, 5, (key, next) => {
|
||||
dispatcher.delete(`/default/bucket/${Bucket}/${key}`,
|
||||
err => next(err));
|
||||
}, err => {
|
||||
cb(err);
|
||||
});
|
||||
}
|
||||
|
||||
describe('Basic Metadata Proxy Server test',
|
||||
() => {
|
||||
jest.setTimeout(10000);
|
||||
it('Shoud get the metadataInformation', done => {
|
||||
dispatcher.get('/default/metadataInformation',
|
||||
(err, response, body) => {
|
||||
if (err) {
|
||||
return done(err);
|
||||
}
|
||||
assert.deepStrictEqual(
|
||||
body, { metadataVersion: 2 });
|
||||
return done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Basic Metadata Proxy Server CRUD test', () => {
|
||||
jest.setTimeout(10000);
|
||||
|
||||
beforeEach(done => {
|
||||
dispatcher.post(`/default/bucket/${Bucket}`, bucketInfo,
|
||||
done);
|
||||
});
|
||||
|
||||
afterEach(done => {
|
||||
dispatcher.delete(`/default/bucket/${Bucket}`, done);
|
||||
});
|
||||
|
||||
it('Should get the bucket attributes', done => {
|
||||
dispatcher.get(`/default/attributes/${Bucket}`,
|
||||
(err, response, body) => {
|
||||
if (err) {
|
||||
return done(err);
|
||||
}
|
||||
assert.deepStrictEqual(body.name,
|
||||
bucketInfo.name);
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('Should crud an object', done => {
|
||||
async.waterfall([
|
||||
next => dispatcher.post(`/default/bucket/${Bucket}/test1`,
|
||||
{ foo: 'gabu' }, err => next(err)),
|
||||
next => dispatcher.get(`/default/bucket/${Bucket}/test1`,
|
||||
(err, response, body) => {
|
||||
if (!err) {
|
||||
assert.deepStrictEqual(body.foo,
|
||||
'gabu');
|
||||
next(err);
|
||||
}
|
||||
}),
|
||||
next => dispatcher.post(`/default/bucket/${Bucket}/test1`,
|
||||
{ foo: 'zome' }, err => next(err)),
|
||||
next => dispatcher.get(`/default/bucket/${Bucket}/test1`,
|
||||
(err, response, body) => {
|
||||
if (!err) {
|
||||
assert.deepStrictEqual(body.foo,
|
||||
'zome');
|
||||
next(err);
|
||||
}
|
||||
}),
|
||||
next => dispatcher.delete(`/default/bucket/${Bucket}/test1`,
|
||||
err => next(err)),
|
||||
], err => done(err));
|
||||
});
|
||||
|
||||
it('Should list objects', done => {
|
||||
async.waterfall([
|
||||
next => _createObjects(objects, next),
|
||||
next => _readObjects(objects, next),
|
||||
next => _listObjects('', objects, next),
|
||||
next => _listObjects('bbb/', objects, next),
|
||||
next => _deleteObjects(objects, next),
|
||||
], err => {
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
|
||||
it('Should update bucket properties', done => {
|
||||
dispatcher.get(
|
||||
`/default/attributes/${Bucket}`, (err, response, body) => {
|
||||
assert.strictEqual(err, null);
|
||||
const bucketInfo = body;
|
||||
const newOwnerDisplayName = 'divertedfrom';
|
||||
bucketInfo.ownerDisplayName = newOwnerDisplayName;
|
||||
dispatcher.post(
|
||||
`/default/attributes/${Bucket}`, bucketInfo, err => {
|
||||
assert.strictEqual(err, null);
|
||||
dispatcher.get(
|
||||
`/default/attributes/${Bucket}`,
|
||||
(err, response, body) => {
|
||||
assert.strictEqual(err, null);
|
||||
const newBucketInfo = body;
|
||||
assert.strictEqual(
|
||||
newBucketInfo.ownerDisplayName,
|
||||
newOwnerDisplayName);
|
||||
done(null);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('Should fail to list a non existing bucket', done => {
|
||||
dispatcher.get('/default/bucket/nonexisting',
|
||||
(err, response) => {
|
||||
assert.strictEqual(
|
||||
response.responseHead.statusCode,
|
||||
404);
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
|
||||
it('Should fail to get attributes from a non existing bucket', done => {
|
||||
dispatcher.get('/default/attributes/nonexisting',
|
||||
(err, response) => {
|
||||
assert.strictEqual(
|
||||
response.responseHead.statusCode,
|
||||
404);
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
|
||||
it('should succeed a health check', done => {
|
||||
dispatcher.get('/_/healthcheck', (err, response, body) => {
|
||||
if (err) {
|
||||
return done(err);
|
||||
}
|
||||
const expectedResponse = {
|
||||
memorybucket: {
|
||||
code: 200,
|
||||
message: 'OK',
|
||||
},
|
||||
};
|
||||
assert.strictEqual(response.responseHead.statusCode, 200);
|
||||
assert.deepStrictEqual(body, expectedResponse);
|
||||
return done(err);
|
||||
});
|
||||
});
|
||||
|
||||
it('should work with parallel route', done => {
|
||||
const objectName = 'theObj';
|
||||
async.waterfall([
|
||||
next => _createObjects([objectName], next),
|
||||
next => {
|
||||
dispatcher.get(
|
||||
`/default/parallel/${Bucket}/${objectName}`,
|
||||
(err, response, body) => {
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
assert.strictEqual(response.responseHead.statusCode,
|
||||
200);
|
||||
const bucketMD = JSON.parse(body.bucket);
|
||||
const objectMD = JSON.parse(body.obj);
|
||||
const expectedObjectMD = { key: objectName };
|
||||
assert.deepStrictEqual(bucketMD.name,
|
||||
bucketInfo.name);
|
||||
assert.deepStrictEqual(objectMD, expectedObjectMD);
|
||||
return next(err);
|
||||
});
|
||||
},
|
||||
next => _deleteObjects([objectName], next),
|
||||
], done);
|
||||
});
|
||||
});
|
|
@ -0,0 +1,318 @@
|
|||
'use strict'; // eslint-disable-line strict
|
||||
|
||||
const assert = require('assert');
|
||||
const async = require('async');
|
||||
|
||||
const RedisClient = require('../../../lib/metrics/RedisClient');
|
||||
const StatsModel = require('../../../lib/metrics/StatsModel');
|
||||
|
||||
// setup redis client
|
||||
const config = {
|
||||
host: '127.0.0.1',
|
||||
port: 6379,
|
||||
enableOfflineQueue: true,
|
||||
};
|
||||
const fakeLogger = {
|
||||
trace: () => {},
|
||||
error: () => {},
|
||||
};
|
||||
const redisClient = new RedisClient(config, fakeLogger);
|
||||
|
||||
// setup stats model
|
||||
const STATS_INTERVAL = 300; // 5 minutes
|
||||
const STATS_EXPIRY = 86400; // 24 hours
|
||||
const statsModel = new StatsModel(redisClient, STATS_INTERVAL, STATS_EXPIRY);
|
||||
|
||||
function setExpectedStats(expected) {
|
||||
return expected.concat(
|
||||
Array((STATS_EXPIRY / STATS_INTERVAL) - expected.length).fill(0));
|
||||
}
|
||||
|
||||
// Since many methods were overwritten, these tests should validate the changes
|
||||
// made to the original methods
|
||||
describe('StatsModel class', () => {
|
||||
const id = 'arsenal-test';
|
||||
const id2 = 'test-2';
|
||||
const id3 = 'test-3';
|
||||
|
||||
afterEach(() => redisClient.clear(() => {}));
|
||||
|
||||
it('should convert a 2d array columns into rows and vice versa using _zip',
|
||||
() => {
|
||||
const arrays = [
|
||||
[1, 2, 3],
|
||||
[4, 5, 6],
|
||||
[7, 8, 9],
|
||||
];
|
||||
|
||||
const res = statsModel._zip(arrays);
|
||||
const expected = [
|
||||
[1, 4, 7],
|
||||
[2, 5, 8],
|
||||
[3, 6, 9],
|
||||
];
|
||||
|
||||
assert.deepStrictEqual(res, expected);
|
||||
});
|
||||
|
||||
it('_zip should return an empty array if given an invalid array', () => {
|
||||
const arrays = [];
|
||||
|
||||
const res = statsModel._zip(arrays);
|
||||
|
||||
assert.deepStrictEqual(res, []);
|
||||
});
|
||||
|
||||
it('_getCount should return a an array of all valid integer values',
|
||||
() => {
|
||||
const res = statsModel._getCount([
|
||||
[null, '1'],
|
||||
[null, '2'],
|
||||
[null, null],
|
||||
]);
|
||||
assert.deepStrictEqual(res, setExpectedStats([1, 2, 0]));
|
||||
});
|
||||
|
||||
it('should correctly record a new request by default one increment',
|
||||
done => {
|
||||
async.series([
|
||||
next => {
|
||||
statsModel.reportNewRequest(id, (err, res) => {
|
||||
assert.ifError(err);
|
||||
|
||||
const expected = [[null, 1], [null, 1]];
|
||||
assert.deepStrictEqual(res, expected);
|
||||
next();
|
||||
});
|
||||
},
|
||||
next => {
|
||||
statsModel.reportNewRequest(id, (err, res) => {
|
||||
assert.ifError(err);
|
||||
|
||||
const expected = [[null, 2], [null, 1]];
|
||||
assert.deepStrictEqual(res, expected);
|
||||
next();
|
||||
});
|
||||
},
|
||||
], done);
|
||||
});
|
||||
|
||||
it('should record new requests by defined amount increments', done => {
|
||||
function noop() {}
|
||||
|
||||
async.series([
|
||||
next => {
|
||||
statsModel.reportNewRequest(id, 9);
|
||||
statsModel.getStats(fakeLogger, id, (err, res) => {
|
||||
assert.ifError(err);
|
||||
|
||||
assert.deepStrictEqual(res.requests, setExpectedStats([9]));
|
||||
next();
|
||||
});
|
||||
},
|
||||
next => {
|
||||
statsModel.reportNewRequest(id);
|
||||
statsModel.getStats(fakeLogger, id, (err, res) => {
|
||||
assert.ifError(err);
|
||||
|
||||
assert.deepStrictEqual(res.requests,
|
||||
setExpectedStats([10]));
|
||||
next();
|
||||
});
|
||||
},
|
||||
next => {
|
||||
statsModel.reportNewRequest(id, noop);
|
||||
statsModel.getStats(fakeLogger, id, (err, res) => {
|
||||
assert.ifError(err);
|
||||
|
||||
assert.deepStrictEqual(res.requests,
|
||||
setExpectedStats([11]));
|
||||
next();
|
||||
});
|
||||
},
|
||||
], done);
|
||||
});
|
||||
|
||||
it('should correctly record a 500 on the server', done => {
|
||||
statsModel.report500(id, (err, res) => {
|
||||
assert.ifError(err);
|
||||
|
||||
const expected = [[null, 1], [null, 1]];
|
||||
assert.deepStrictEqual(res, expected);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should respond back with total requests as an array', done => {
|
||||
async.series([
|
||||
next => {
|
||||
statsModel.reportNewRequest(id, err => {
|
||||
assert.ifError(err);
|
||||
next();
|
||||
});
|
||||
},
|
||||
next => {
|
||||
statsModel.report500(id, err => {
|
||||
assert.ifError(err);
|
||||
next();
|
||||
});
|
||||
},
|
||||
next => {
|
||||
statsModel.getStats(fakeLogger, id, (err, res) => {
|
||||
assert.ifError(err);
|
||||
|
||||
const expected = {
|
||||
'requests': setExpectedStats([1]),
|
||||
'500s': setExpectedStats([1]),
|
||||
'sampleDuration': STATS_EXPIRY,
|
||||
};
|
||||
assert.deepStrictEqual(res, expected);
|
||||
next();
|
||||
});
|
||||
},
|
||||
], done);
|
||||
});
|
||||
|
||||
it('should not crash on empty results', done => {
|
||||
async.series([
|
||||
next => {
|
||||
statsModel.getStats(fakeLogger, id, (err, res) => {
|
||||
assert.ifError(err);
|
||||
const expected = {
|
||||
'requests': setExpectedStats([]),
|
||||
'500s': setExpectedStats([]),
|
||||
'sampleDuration': STATS_EXPIRY,
|
||||
};
|
||||
assert.deepStrictEqual(res, expected);
|
||||
next();
|
||||
});
|
||||
},
|
||||
next => {
|
||||
statsModel.getAllStats(fakeLogger, id, (err, res) => {
|
||||
assert.ifError(err);
|
||||
const expected = {
|
||||
'requests': setExpectedStats([]),
|
||||
'500s': setExpectedStats([]),
|
||||
'sampleDuration': STATS_EXPIRY,
|
||||
};
|
||||
assert.deepStrictEqual(res, expected);
|
||||
next();
|
||||
});
|
||||
},
|
||||
], done);
|
||||
});
|
||||
|
||||
it('should return a zero-filled array if no ids are passed to getAllStats',
|
||||
done => {
|
||||
statsModel.getAllStats(fakeLogger, [], (err, res) => {
|
||||
assert.ifError(err);
|
||||
|
||||
assert.deepStrictEqual(res.requests, setExpectedStats([]));
|
||||
assert.deepStrictEqual(res['500s'], setExpectedStats([]));
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should get accurately reported data for given id from getAllStats',
|
||||
done => {
|
||||
statsModel.reportNewRequest(id, 9);
|
||||
statsModel.reportNewRequest(id2, 2);
|
||||
statsModel.reportNewRequest(id3, 3);
|
||||
statsModel.report500(id);
|
||||
|
||||
async.series([
|
||||
next => {
|
||||
statsModel.getAllStats(fakeLogger, [id], (err, res) => {
|
||||
assert.ifError(err);
|
||||
|
||||
assert.equal(res.requests[0], 9);
|
||||
assert.equal(res['500s'][0], 1);
|
||||
next();
|
||||
});
|
||||
},
|
||||
next => {
|
||||
statsModel.getAllStats(fakeLogger, [id, id2, id3],
|
||||
(err, res) => {
|
||||
assert.ifError(err);
|
||||
|
||||
assert.equal(res.requests[0], 14);
|
||||
assert.deepStrictEqual(res.requests,
|
||||
setExpectedStats([14]));
|
||||
next();
|
||||
});
|
||||
},
|
||||
], done);
|
||||
});
|
||||
|
||||
it('should normalize to the nearest hour using normalizeTimestampByHour',
|
||||
() => {
|
||||
const date = new Date('2018-09-13T23:30:59.195Z');
|
||||
const newDate = new Date(statsModel.normalizeTimestampByHour(date));
|
||||
|
||||
assert.strictEqual(date.getHours(), newDate.getHours());
|
||||
assert.strictEqual(newDate.getMinutes(), 0);
|
||||
assert.strictEqual(newDate.getSeconds(), 0);
|
||||
assert.strictEqual(newDate.getMilliseconds(), 0);
|
||||
});
|
||||
|
||||
it('should get previous hour using _getDatePreviousHour', () => {
|
||||
const date = new Date('2018-09-13T23:30:59.195Z');
|
||||
const newDate = statsModel._getDatePreviousHour(new Date(date));
|
||||
|
||||
const millisecondsInOneHour = 3600000;
|
||||
assert.strictEqual(date - newDate, millisecondsInOneHour);
|
||||
});
|
||||
|
||||
it('should get an array of hourly timestamps using getSortedSetHours',
|
||||
() => {
|
||||
const epoch = 1536882476501;
|
||||
const millisecondsInOneHour = 3600000;
|
||||
|
||||
const expected = [];
|
||||
let dateInMilliseconds = statsModel.normalizeTimestampByHour(
|
||||
new Date(epoch));
|
||||
|
||||
for (let i = 0; i < 24; i++) {
|
||||
expected.push(dateInMilliseconds);
|
||||
dateInMilliseconds -= millisecondsInOneHour;
|
||||
}
|
||||
const res = statsModel.getSortedSetHours(epoch);
|
||||
|
||||
assert.deepStrictEqual(res, expected);
|
||||
});
|
||||
|
||||
it('should apply TTL on a new sorted set using addToSortedSet', done => {
|
||||
const key = 'a-test-key';
|
||||
const score = 100;
|
||||
const value = 'a-value';
|
||||
|
||||
const now = Date.now();
|
||||
const nearestHour = statsModel.normalizeTimestampByHour(new Date(now));
|
||||
|
||||
statsModel.addToSortedSet(key, score, value, (err, res) => {
|
||||
assert.ifError(err);
|
||||
// check both a "zadd" and "expire" occurred
|
||||
assert.equal(res, 1);
|
||||
redisClient.ttl(key, (err, res) => {
|
||||
assert.ifError(err);
|
||||
// assert this new set has a ttl applied
|
||||
assert(res > 0);
|
||||
|
||||
const adjustmentSecs = now - nearestHour;
|
||||
const msInADay = 24 * 60 * 60 * 1000;
|
||||
const msInAnHour = 60 * 60 * 1000;
|
||||
const upperLimitSecs =
|
||||
Math.ceil((msInADay - adjustmentSecs) / 1000);
|
||||
const lowerLimitSecs =
|
||||
Math.floor((msInADay - adjustmentSecs - msInAnHour) / 1000);
|
||||
|
||||
// assert new ttl is between 23 and 24 hours adjusted by time
|
||||
// elapsed since normalized hourly time
|
||||
assert(res >= lowerLimitSecs);
|
||||
assert(res <= upperLimitSecs);
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,326 @@
|
|||
const assert = require('assert');
|
||||
|
||||
const ChainBackend = require('../../../lib/auth/auth').backends.chainBackend;
|
||||
const BaseBackend = require('../../../lib/auth/auth').backends.baseBackend;
|
||||
const errors = require('../../../lib/errors');
|
||||
|
||||
|
||||
const testError = new Error('backend error');
|
||||
|
||||
const backendWithAllMethods = {
|
||||
verifySignatureV2: () => {},
|
||||
verifySignatureV4: () => {},
|
||||
getCanonicalIds: () => {},
|
||||
getEmailAddresses: () => {},
|
||||
checkPolicies: () => {},
|
||||
healthcheck: () => {},
|
||||
};
|
||||
|
||||
function getBackendWithMissingMethod(methodName) {
|
||||
const backend = Object.assign({}, backendWithAllMethods);
|
||||
delete backend[methodName];
|
||||
return backend;
|
||||
}
|
||||
|
||||
class TestBackend extends BaseBackend {
|
||||
constructor(service, error, result) {
|
||||
super(service);
|
||||
this._error = error;
|
||||
this._result = result;
|
||||
}
|
||||
|
||||
verifySignatureV2(stringToSign, signatureFromRequest, accessKey, options, callback) {
|
||||
return callback(this._error, this._result);
|
||||
}
|
||||
|
||||
verifySignatureV4(stringToSign, signatureFromRequest, accessKey, region, scopeDate, options, callback) {
|
||||
return callback(this._error, this._result);
|
||||
}
|
||||
|
||||
getCanonicalIds(emailAddresses, options, callback) {
|
||||
return callback(this._error, this._result);
|
||||
}
|
||||
|
||||
getEmailAddresses(canonicalIDs, options, callback) {
|
||||
return callback(this._error, this._result);
|
||||
}
|
||||
|
||||
checkPolicies(requestContextParams, userArn, options, callback) {
|
||||
return callback(this._error, this._result);
|
||||
}
|
||||
|
||||
healthcheck(reqUid, callback) {
|
||||
return callback(this._error, this._result);
|
||||
}
|
||||
}
|
||||
|
||||
describe('Auth Backend: Chain Backend', () => {
|
||||
[
|
||||
['should throw an error if client list is not an array', null],
|
||||
['should throw an error if client list empty', []],
|
||||
['should throw an error if a client is missing the verifySignatureV2 method', [
|
||||
new TestBackend(),
|
||||
getBackendWithMissingMethod('verifySignatureV2'),
|
||||
]],
|
||||
['should throw an error if a client is missing the verifySignatureV4 auth method', [
|
||||
new TestBackend(),
|
||||
getBackendWithMissingMethod('verifySignatureV4'),
|
||||
]],
|
||||
['should throw an error if a client is missing the getCanonicalId method', [
|
||||
new TestBackend(),
|
||||
getBackendWithMissingMethod('getCanonicalIds'),
|
||||
]],
|
||||
['should throw an error if a client is missing the getEmailAddresses method', [
|
||||
new TestBackend(),
|
||||
getBackendWithMissingMethod('getEmailAddresses'),
|
||||
]],
|
||||
['should throw an error if a client is missing the checkPolicies method', [
|
||||
new TestBackend(),
|
||||
getBackendWithMissingMethod('checkPolicies'),
|
||||
]],
|
||||
['should throw an error if a client is missing the healthcheck method', [
|
||||
new TestBackend(),
|
||||
getBackendWithMissingMethod('healthcheck'),
|
||||
]],
|
||||
].forEach(([msg, input]) => it(msg, () => {
|
||||
assert.throws(() => {
|
||||
new ChainBackend('chain', input); // eslint-disable-line no-new
|
||||
});
|
||||
}));
|
||||
|
||||
[
|
||||
// function name, function args
|
||||
['verifySignatureV2', [null, null, null, null]],
|
||||
['verifySignatureV4', [null, null, null, null, null, null]],
|
||||
].forEach(([fn, fnArgs]) =>
|
||||
describe(`::${fn}`, () => {
|
||||
it('should return an error if none of the clients returns a result', done => {
|
||||
const backend = new ChainBackend('chain', [
|
||||
new TestBackend('test1', testError, null),
|
||||
new TestBackend('test2', testError, null),
|
||||
new TestBackend('test3', testError, null),
|
||||
]);
|
||||
|
||||
backend[fn](...fnArgs, err => {
|
||||
assert.deepStrictEqual(err, testError);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
[
|
||||
[
|
||||
'should return result of the first successful client (multiple successful client)',
|
||||
'expectedResult',
|
||||
// backend constructor args
|
||||
[
|
||||
['test1', null, 'expectedResult'],
|
||||
['test2', null, 'test2'],
|
||||
['test3', testError, null],
|
||||
],
|
||||
],
|
||||
[
|
||||
'should return result of successful client',
|
||||
'expectedResult',
|
||||
// backend constructor args
|
||||
[
|
||||
['test1', testError, null],
|
||||
['test2', null, 'expectedResult'],
|
||||
['test3', testError, null],
|
||||
],
|
||||
],
|
||||
[
|
||||
'should return result of successful client',
|
||||
'expectedResult',
|
||||
// backend constructor args
|
||||
[
|
||||
['test1', testError, null],
|
||||
['test1', testError, null],
|
||||
['test3', null, 'expectedResult'],
|
||||
],
|
||||
],
|
||||
].forEach(([msg, expected, backendArgs]) => {
|
||||
it(msg, done => {
|
||||
const backend = new ChainBackend('chain',
|
||||
backendArgs.map((args) => new TestBackend(...args)));
|
||||
backend[fn](...fnArgs, (err, res) => {
|
||||
assert.ifError(err);
|
||||
assert.strictEqual(res, expected);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
}));
|
||||
|
||||
[
|
||||
// function name, function args
|
||||
['getCanonicalIds', [null, null]],
|
||||
['getEmailAddresses', [null, null]],
|
||||
].forEach(([fn, fnArgs]) =>
|
||||
describe(`::${fn}`, () => {
|
||||
it('should return an error if any of the clients fails', done => {
|
||||
const backend = new ChainBackend('chain', [
|
||||
new TestBackend('test1', null, { message: { body: { test1: 'aaa' } } }),
|
||||
new TestBackend('test2', testError, null),
|
||||
new TestBackend('test3', null, { message: { body: { test2: 'bbb' } } }),
|
||||
]);
|
||||
|
||||
backend[fn](...fnArgs, err => {
|
||||
assert.deepStrictEqual(err, testError);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should merge results from clients into a single response object', done => {
|
||||
const backend = new ChainBackend('chain', [
|
||||
new TestBackend('test1', null, { message: { body: { test1: 'aaa' } } }),
|
||||
new TestBackend('test2', null, { message: { body: { test2: 'bbb' } } }),
|
||||
]);
|
||||
|
||||
backend[fn](...fnArgs, (err, res) => {
|
||||
assert.ifError(err);
|
||||
assert.deepStrictEqual(res, {
|
||||
message: { body: {
|
||||
test1: 'aaa',
|
||||
test2: 'bbb',
|
||||
} },
|
||||
});
|
||||
done();
|
||||
});
|
||||
});
|
||||
}));
|
||||
|
||||
describe('::checkPolicies', () => {
|
||||
it('should return an error if any of the clients fails', done => {
|
||||
const backend = new ChainBackend('chain', [
|
||||
new TestBackend('test1', null, {
|
||||
message: { body: [{ isAllowed: false, arn: 'arn:aws:s3:::policybucket/obj1' }] },
|
||||
}),
|
||||
new TestBackend('test2', testError, null),
|
||||
new TestBackend('test3', null, {
|
||||
message: { body: [{ isAllowed: true, arn: 'arn:aws:s3:::policybucket/obj1' }] },
|
||||
}),
|
||||
]);
|
||||
|
||||
backend.checkPolicies(null, null, null, err => {
|
||||
assert.deepStrictEqual(err, testError);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should merge results from clients into a single response object', done => {
|
||||
const backend = new ChainBackend('chain', [
|
||||
new TestBackend('test1', null, {
|
||||
message: { body: [{ isAllowed: false, arn: 'arn:aws:s3:::policybucket/obj1' }] },
|
||||
}),
|
||||
new TestBackend('test2', null, {
|
||||
message: { body: [{ isAllowed: true, arn: 'arn:aws:s3:::policybucket/obj2' }] },
|
||||
}),
|
||||
new TestBackend('test3', null, {
|
||||
message: { body: [{ isAllowed: true, arn: 'arn:aws:s3:::policybucket/obj1' }] },
|
||||
}),
|
||||
]);
|
||||
|
||||
backend.checkPolicies(null, null, null, (err, res) => {
|
||||
assert.ifError(err);
|
||||
assert.deepStrictEqual(res, {
|
||||
message: { body: [
|
||||
{ isAllowed: true, arn: 'arn:aws:s3:::policybucket/obj1' },
|
||||
{ isAllowed: true, arn: 'arn:aws:s3:::policybucket/obj2' },
|
||||
] },
|
||||
});
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
describe('::_mergeObject', () => {
|
||||
it('should correctly merge reponses', () => {
|
||||
const objectResps = [
|
||||
{ message: { body: {
|
||||
id1: 'email1@test.com',
|
||||
wrongformatcanid: 'WrongFormat',
|
||||
id4: 'email4@test.com',
|
||||
} } },
|
||||
{ message: { body: {
|
||||
id2: 'NotFound',
|
||||
id3: 'email3@test.com',
|
||||
id4: 'email5@test.com',
|
||||
} } },
|
||||
];
|
||||
assert.deepStrictEqual(
|
||||
ChainBackend._mergeObjects(objectResps),
|
||||
{
|
||||
id1: 'email1@test.com',
|
||||
wrongformatcanid: 'WrongFormat',
|
||||
id2: 'NotFound',
|
||||
id3: 'email3@test.com',
|
||||
// id4 should be overwritten
|
||||
id4: 'email5@test.com',
|
||||
},
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('::_mergePolicies', () => {
|
||||
it('should correctly merge policies', () => {
|
||||
const policyResps = [
|
||||
{ message: { body: [
|
||||
{ isAllowed: false, arn: 'arn:aws:s3:::policybucket/true1' },
|
||||
{ isAllowed: true, arn: 'arn:aws:s3:::policybucket/true2' },
|
||||
{ isAllowed: false, arn: 'arn:aws:s3:::policybucket/false1' },
|
||||
] } },
|
||||
{ message: { body: [
|
||||
{ isAllowed: true, arn: 'arn:aws:s3:::policybucket/true1' },
|
||||
{ isAllowed: false, arn: 'arn:aws:s3:::policybucket/true2' },
|
||||
{ isAllowed: false, arn: 'arn:aws:s3:::policybucket/false2' },
|
||||
] } },
|
||||
];
|
||||
assert.deepStrictEqual(
|
||||
ChainBackend._mergePolicies(policyResps),
|
||||
[
|
||||
{ isAllowed: true, arn: 'arn:aws:s3:::policybucket/true1' },
|
||||
{ isAllowed: true, arn: 'arn:aws:s3:::policybucket/true2' },
|
||||
{ isAllowed: false, arn: 'arn:aws:s3:::policybucket/false1' },
|
||||
{ isAllowed: false, arn: 'arn:aws:s3:::policybucket/false2' },
|
||||
],
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('::checkhealth', () => {
|
||||
it('should return error if a single client is unhealthy', done => {
|
||||
const backend = new ChainBackend('chain', [
|
||||
new TestBackend('test1', null, { code: 200 }),
|
||||
new TestBackend('test2', testError, { code: 503 }),
|
||||
new TestBackend('test3', null, { code: 200 }),
|
||||
]);
|
||||
backend.healthcheck(null, (err, res) => {
|
||||
assert.deepStrictEqual(err, errors.InternalError);
|
||||
assert.deepStrictEqual(res, [
|
||||
{ error: null, status: { code: 200 } },
|
||||
{ error: testError, status: { code: 503 } },
|
||||
{ error: null, status: { code: 200 } },
|
||||
]);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should return result if all clients are healthy', done => {
|
||||
const backend = new ChainBackend('chain', [
|
||||
new TestBackend('test1', null, { msg: 'test1', code: 200 }),
|
||||
new TestBackend('test2', null, { msg: 'test2', code: 200 }),
|
||||
new TestBackend('test3', null, { msg: 'test3', code: 200 }),
|
||||
]);
|
||||
backend.healthcheck(null, (err, res) => {
|
||||
assert.ifError(err);
|
||||
assert.deepStrictEqual(res, [
|
||||
{ error: null, status: { msg: 'test1', code: 200 } },
|
||||
{ error: null, status: { msg: 'test2', code: 200 } },
|
||||
{ error: null, status: { msg: 'test3', code: 200 } },
|
||||
]);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,6 +1,6 @@
|
|||
const assert = require('assert');
|
||||
|
||||
const Indexer = require('../../../../lib/auth/in_memory/Indexer');
|
||||
const Indexer = require('../../../../lib/auth/backends/in_memory/Indexer');
|
||||
const ref = require('./sample_authdata.json');
|
||||
const { should } = require('./AuthLoader.spec');
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@ const assert = require('assert');
|
|||
const constructStringToSign =
|
||||
require('../../../../lib/auth/v2/constructStringToSign');
|
||||
const hashSignature =
|
||||
require('../../../../lib/auth/in_memory/vaultUtilities').hashSignature;
|
||||
require('../../../../lib/auth/backends/in_memory/vaultUtilities').hashSignature;
|
||||
const DummyRequestLogger = require('../../helpers').DummyRequestLogger;
|
||||
|
||||
const log = new DummyRequestLogger();
|
||||
|
|
|
@ -54,6 +54,14 @@ describe('should URIencode in accordance with AWS rules', () => {
|
|||
assert.strictEqual(actualOutput, expectedOutput);
|
||||
});
|
||||
|
||||
it('should encode codepoints that use surrogate pairs in UTF-16 as a ' +
|
||||
'single UTF-8 sequence', () => {
|
||||
const input = '/s3amazonaws.com/I-like-🌮s';
|
||||
const expectedOutput = '%2Fs3amazonaws.com%2FI-like-%F0%9F%8C%AEs';
|
||||
const actualOutput = awsURIencode(input);
|
||||
assert.strictEqual(actualOutput, expectedOutput);
|
||||
});
|
||||
|
||||
it('should skip invalid query params', () => {
|
||||
const input = ['s3:ObjectCreated:*', 's3:ObjectRemoved:*',
|
||||
's3:BucketCreated:*', 's3:BucketRemoved:*'];
|
||||
|
|
|
@ -269,4 +269,33 @@ describe('v4 headerAuthCheck', () => {
|
|||
assert.strictEqual(res.params.version, 4);
|
||||
done();
|
||||
});
|
||||
|
||||
it('should not return error if proxy_path header is added', done => {
|
||||
// Freezes time so date created within function will be Feb 8, 2016
|
||||
const clock = fakeTimers.install({ now: 1454962445000 });
|
||||
/* eslint-disable camelcase */
|
||||
const alteredRequest = createAlteredRequest({
|
||||
proxy_path: 'proxy/1234' }, 'headers', request, headers);
|
||||
/* eslint-enable camelcase */
|
||||
const res = headerAuthCheck(alteredRequest, log);
|
||||
clock.uninstall();
|
||||
assert.strictEqual(res.err, null);
|
||||
done();
|
||||
});
|
||||
|
||||
it('should return InvalidRequest error if proxy_path header is invalid',
|
||||
done => {
|
||||
// Freezes time so date created within function will be Feb 8, 2016
|
||||
const clock = fakeTimers.install({ now: 1454962445000 });
|
||||
/* eslint-disable camelcase */
|
||||
const alteredRequest = createAlteredRequest({
|
||||
proxy_path: 'absc%2proxy/1234' }, 'headers', request, headers);
|
||||
/* eslint-enable camelcase */
|
||||
const res = headerAuthCheck(alteredRequest, log);
|
||||
clock.uninstall();
|
||||
assert.deepStrictEqual(res.err,
|
||||
errors.InvalidArgument.customizeDescription(
|
||||
'invalid proxy_path header'));
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
|
|
@ -225,4 +225,34 @@ describe('v4 queryAuthCheck', () => {
|
|||
assert.strictEqual(res.params.version, 4);
|
||||
done();
|
||||
});
|
||||
|
||||
it('should successfully return no error if proxy_path header is added',
|
||||
done => {
|
||||
// Freezes time so date created within function will be Feb 8, 2016
|
||||
const clock = fakeTimers.install({ now: 1454974984001 });
|
||||
/* eslint-disable camelcase */
|
||||
const alteredRequest = createAlteredRequest({ proxy_path:
|
||||
'proxy/1234' }, 'headers', request, query);
|
||||
/* eslint-enable camelcase */
|
||||
const res = queryAuthCheck(alteredRequest, log, alteredRequest.query);
|
||||
clock.uninstall();
|
||||
assert.deepStrictEqual(res.err, null);
|
||||
done();
|
||||
});
|
||||
|
||||
it('should return InvalidRequest error if proxy_path header is invalid',
|
||||
done => {
|
||||
// Freezes time so date created within function will be Feb 8, 2016
|
||||
const clock = fakeTimers.install({ now: 1454974984001 });
|
||||
/* eslint-disable camelcase */
|
||||
const alteredRequest = createAlteredRequest({ proxy_path:
|
||||
'absc%2proxy/1234' }, 'headers', request, query);
|
||||
/* eslint-enable camelcase */
|
||||
const res = queryAuthCheck(alteredRequest, log, alteredRequest.query);
|
||||
clock.uninstall();
|
||||
assert.deepStrictEqual(res.err,
|
||||
errors.InvalidArgument.customizeDescription(
|
||||
'invalid proxy_path header'));
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
const assert = require('assert');
|
||||
|
||||
const calculateSigningKey =
|
||||
require('../../../../lib/auth/in_memory/vaultUtilities')
|
||||
require('../../../../lib/auth/backends/in_memory/vaultUtilities')
|
||||
.calculateSigningKey;
|
||||
|
||||
describe('v4 signing key calculation', () => {
|
||||
|
|
|
@ -0,0 +1,100 @@
|
|||
const assert = require('assert');
|
||||
const { Readable } = require('stream');
|
||||
|
||||
const V4Transform =
|
||||
require('../../../../../lib/auth/v4/streamingV4/V4Transform');
|
||||
const Backend = require('../../../../../lib/auth/backends/in_memory/Backend').s3;
|
||||
const Vault = require('../../../../../lib/auth/Vault');
|
||||
const { DummyRequestLogger } = require('../../../helpers');
|
||||
|
||||
const log = new DummyRequestLogger();
|
||||
const streamingV4Params = {
|
||||
accessKey: 'accessKey1',
|
||||
signatureFromRequest: '2b8637632a997e06ee7b6c85d7' +
|
||||
'147d2025e8f04d4374f4d7d7320de1618c7509',
|
||||
region: 'us-east-1',
|
||||
scopeDate: '20170516',
|
||||
timestamp: '20170516T204738Z',
|
||||
credentialScope: '20170516/us-east-1/s3/aws4_request',
|
||||
};
|
||||
const dummyAuthData = {
|
||||
accounts:
|
||||
[{ name: 'Bart',
|
||||
email: 'sampleaccount1@sampling.com',
|
||||
arn: 'arn:aws:iam::123456789012:root',
|
||||
canonicalID:
|
||||
'79a59df900b949e55d96a1e698fbacedf' +
|
||||
'd6e09d98eacf8f8d5218e7cd47ef2be',
|
||||
shortid: '123456789012',
|
||||
keys: [{ access: 'accessKey1', secret: 'verySecretKey1' }] },
|
||||
{ name: 'Lisa',
|
||||
email: 'sampleaccount2@sampling.com',
|
||||
arn: 'arn:aws:iam::123456789013:root',
|
||||
canonicalID:
|
||||
'79a59df900b949e55d96a1e698fbacedf' +
|
||||
'd6e09d98eacf8f8d5218e7cd47ef2bf',
|
||||
shortid: '123456789013',
|
||||
keys: [{ access: 'accessKey2', secret: 'verySecretKey2' }] },
|
||||
],
|
||||
};
|
||||
const vault = new Vault(new Backend(dummyAuthData), 'vaultMem');
|
||||
|
||||
class AuthMe extends Readable {
|
||||
constructor(chunks) {
|
||||
super();
|
||||
this._parts = chunks;
|
||||
this._index = 0;
|
||||
}
|
||||
|
||||
_read() {
|
||||
this.push(this._parts[this._index]);
|
||||
this._index++;
|
||||
}
|
||||
}
|
||||
|
||||
describe('V4Transform class', () => {
|
||||
it('should authenticate successfully', done => {
|
||||
const v4Transform = new V4Transform(streamingV4Params,
|
||||
vault, log, err => {
|
||||
assert.strictEqual(err, null);
|
||||
});
|
||||
const filler1 = '8;chunk-signature=51d2511f7c6887907dff20474d8db6' +
|
||||
'7d557e5f515a6fa6a8466bb12f8833bcca\r\ncontents\r\n';
|
||||
const filler2 = '0;chunk-signature=c0eac24b7ce72141ec077df9753db' +
|
||||
'4cc8b7991491806689da0395c8bd0231e48\r\n';
|
||||
const chunks = [
|
||||
Buffer.from(filler1),
|
||||
Buffer.from(filler2),
|
||||
null,
|
||||
];
|
||||
const authMe = new AuthMe(chunks);
|
||||
authMe.pipe(v4Transform);
|
||||
v4Transform.on('finish', () => {
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should ignore data sent after final chunk', done => {
|
||||
const v4Transform = new V4Transform(streamingV4Params,
|
||||
vault, log, err => {
|
||||
assert.strictEqual(err, null);
|
||||
done();
|
||||
});
|
||||
const filler1 = '8;chunk-signature=51d2511f7c6887907dff20474d8db6' +
|
||||
'7d557e5f515a6fa6a8466bb12f8833bcca\r\ncontents\r\n';
|
||||
const filler2 = '0;chunk-signature=c0eac24b7ce72141ec077df9753db' +
|
||||
'4cc8b7991491806689da0395c8bd0231e48\r\n';
|
||||
const filler3 = '\r\n';
|
||||
const chunks = [
|
||||
Buffer.from(filler1),
|
||||
Buffer.from(filler2),
|
||||
Buffer.from(filler3),
|
||||
null,
|
||||
];
|
||||
const authMe = new AuthMe(chunks);
|
||||
authMe.pipe(v4Transform);
|
||||
v4Transform.on('finish', () => {
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
|
@ -57,7 +57,6 @@ function zpad(key, length = 15) {
|
|||
}
|
||||
|
||||
class DummyRequestLogger {
|
||||
|
||||
constructor() {
|
||||
this.ops = [];
|
||||
this.counts = {
|
||||
|
@ -110,5 +109,11 @@ class DummyRequestLogger {
|
|||
}
|
||||
}
|
||||
|
||||
module.exports = { makeid, timeDiff, makeAuthInfo,
|
||||
createAlteredRequest, zpad, DummyRequestLogger };
|
||||
module.exports = {
|
||||
makeid,
|
||||
timeDiff,
|
||||
makeAuthInfo,
|
||||
createAlteredRequest,
|
||||
zpad,
|
||||
DummyRequestLogger,
|
||||
};
|
||||
|
|
|
@ -69,7 +69,7 @@ describe('Check IP matches a list of CIDR ranges', () => {
|
|||
[['192.168.1.1'], '192.168.1.1'],
|
||||
].forEach(item =>
|
||||
it(`should match IP ${item[0][0]} without CIDR range`,
|
||||
() => cidrListMatchCheck(item[0], item[1], true))
|
||||
() => cidrListMatchCheck(item[0], item[1], true)),
|
||||
);
|
||||
|
||||
it('should not range match if CIDR range is not provided',
|
||||
|
|
|
@ -0,0 +1,157 @@
|
|||
const assert = require('assert');
|
||||
const BackendInfo = require('../../../lib/models/BackendInfo');
|
||||
const { DummyRequestLogger } = require('../helpers');
|
||||
const DummyConfig = require('../../utils/DummyConfig');
|
||||
|
||||
const log = new DummyRequestLogger();
|
||||
const data = 'mem';
|
||||
|
||||
const memLocation = 'scality-internal-mem';
|
||||
const fileLocation = 'scality-internal-file';
|
||||
const legacyLocation = 'legacy';
|
||||
|
||||
const dummyConfig = new DummyConfig();
|
||||
const dummyBackendInfo = new BackendInfo(dummyConfig, memLocation,
|
||||
fileLocation, '127.0.0.1');
|
||||
|
||||
const dummyLegacyConfig = new DummyConfig(true);
|
||||
|
||||
describe('BackendInfo class', () => {
|
||||
describe('controllingBackendParam', () => {
|
||||
beforeEach(() => {
|
||||
dummyConfig.backends.data = data;
|
||||
dummyLegacyConfig.backends.data = data;
|
||||
});
|
||||
it('should return object with applicable error if ' +
|
||||
'objectLocationConstraint is invalid', () => {
|
||||
const res = BackendInfo.controllingBackendParam(dummyConfig,
|
||||
'notValid', fileLocation, '127.0.0.1', log);
|
||||
assert.equal(res.isValid, false);
|
||||
assert((res.description).indexOf('Object Location Error')
|
||||
> -1);
|
||||
});
|
||||
it('should return object with applicable error if ' +
|
||||
'bucketLocationConstraint is invalid and no ' +
|
||||
'objectLocationConstraint was provided', () => {
|
||||
const res = BackendInfo.controllingBackendParam(dummyConfig,
|
||||
undefined, 'notValid', '127.0.0.1', log);
|
||||
assert.equal(res.isValid, false);
|
||||
assert((res.description).indexOf('Bucket ' +
|
||||
'Location Error') > -1);
|
||||
});
|
||||
it('If requestEndpoint is invalid, no objectLocationConstraint or ' +
|
||||
'bucketLocationConstraint was provided, data backend is set to ' +
|
||||
'"scality" should return "object with applicable error"', () => {
|
||||
dummyConfig.backends.data = 'scality';
|
||||
const res = BackendInfo.controllingBackendParam(dummyConfig,
|
||||
undefined, undefined, 'notValid', log);
|
||||
assert.equal(res.isValid, false);
|
||||
assert((res.description).indexOf('Endpoint Location Error') > -1);
|
||||
});
|
||||
|
||||
it('If requestEndpoint is invalid, no objectLocationConstraint or ' +
|
||||
'bucketLocationConstraint was provided, data backend is set to ' +
|
||||
'"scality" should return isValid if legacy location constraint', () => {
|
||||
dummyLegacyConfig.backends.data = 'scality';
|
||||
const res = BackendInfo.controllingBackendParam(dummyLegacyConfig,
|
||||
undefined, undefined, 'notValid', log);
|
||||
assert.equal(res.isValid, true);
|
||||
});
|
||||
|
||||
it('If requestEndpoint is invalid, no objectLocationConstraint or ' +
|
||||
'bucketLocationConstraint was provided and data backend is set to ' +
|
||||
'"multiple" and legacy location constraint should return ' +
|
||||
'"object with applicable error"', () => {
|
||||
dummyConfig.backends.data = 'multiple';
|
||||
const res = BackendInfo.controllingBackendParam(dummyConfig,
|
||||
undefined, undefined, 'notValid', log);
|
||||
assert.equal(res.isValid, false);
|
||||
assert((res.description).indexOf('Endpoint Location Error') > -1);
|
||||
});
|
||||
|
||||
it('If requestEndpoint is invalid, no objectLocationConstraint or ' +
|
||||
'bucketLocationConstraint was provided and data backend is set to ' +
|
||||
'"multiple" and legacy location constraint should return isValid if ' +
|
||||
'legacy location constraint', () => {
|
||||
dummyLegacyConfig.backends.data = 'multiple';
|
||||
const res = BackendInfo.controllingBackendParam(dummyLegacyConfig,
|
||||
undefined, undefined, 'notValid', log);
|
||||
assert.equal(res.isValid, true);
|
||||
});
|
||||
|
||||
it('should return isValid if requestEndpoint is invalid and ' +
|
||||
'data backend is set to "file"', () => {
|
||||
dummyConfig.backends.data = 'file';
|
||||
const res = BackendInfo.controllingBackendParam(dummyConfig,
|
||||
memLocation, fileLocation, 'notValid', log);
|
||||
assert.equal(res.isValid, true);
|
||||
});
|
||||
|
||||
it('should return isValid if requestEndpoint is invalid and ' +
|
||||
'data backend is set to "mem"', () => {
|
||||
dummyConfig.backends.data = 'mem';
|
||||
const res = BackendInfo.controllingBackendParam(dummyConfig,
|
||||
memLocation, fileLocation, 'notValid', log);
|
||||
assert.equal(res.isValid, true);
|
||||
});
|
||||
|
||||
it('should return isValid if requestEndpoint is invalid but ' +
|
||||
'valid objectLocationConstraint was provided', () => {
|
||||
dummyConfig.backends.data = 'multiple';
|
||||
const res = BackendInfo.controllingBackendParam(dummyConfig,
|
||||
memLocation, undefined, 'notValid', log);
|
||||
assert.equal(res.isValid, true);
|
||||
});
|
||||
|
||||
it('should return isValid if requestEndpoint is invalid but ' +
|
||||
'valid bucketLocationConstraint was provided', () => {
|
||||
dummyConfig.backends.data = 'multiple';
|
||||
const res = BackendInfo.controllingBackendParam(dummyConfig,
|
||||
undefined, memLocation, 'notValid', log);
|
||||
assert.equal(res.isValid, true);
|
||||
});
|
||||
|
||||
it('should return isValid if all backend ' +
|
||||
'parameters are valid', () => {
|
||||
const res = BackendInfo.controllingBackendParam(dummyConfig,
|
||||
memLocation, fileLocation, '127.0.0.1', log);
|
||||
assert.equal(res.isValid, true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getControllingLocationConstraint', () => {
|
||||
it('should return object location constraint', () => {
|
||||
const controllingLC =
|
||||
dummyBackendInfo.getControllingLocationConstraint();
|
||||
assert.strictEqual(controllingLC, memLocation);
|
||||
});
|
||||
});
|
||||
|
||||
describe('legacy for getControllingLocationConstraint', () => {
|
||||
const dummyBackendInfoLegacy = new BackendInfo(dummyLegacyConfig, null,
|
||||
null, '127.0.0.1', legacyLocation);
|
||||
it('should return legacy location constraint', () => {
|
||||
const controllingLC =
|
||||
dummyBackendInfoLegacy.getControllingLocationConstraint();
|
||||
assert.strictEqual(controllingLC, legacyLocation);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getters', () => {
|
||||
it('should return object location constraint', () => {
|
||||
const objectLC =
|
||||
dummyBackendInfo.getObjectLocationConstraint();
|
||||
assert.strictEqual(objectLC, memLocation);
|
||||
});
|
||||
it('should return bucket location constraint', () => {
|
||||
const bucketLC =
|
||||
dummyBackendInfo.getBucketLocationConstraint();
|
||||
assert.strictEqual(bucketLC, fileLocation);
|
||||
});
|
||||
it('should return request endpoint', () => {
|
||||
const reqEndpoint =
|
||||
dummyBackendInfo.getRequestEndpoint();
|
||||
assert.strictEqual(reqEndpoint, '127.0.0.1');
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,170 @@
|
|||
const assert = require('assert');
|
||||
const BucketAzureInfo = require('../../../index').models.BucketAzureInfo;
|
||||
|
||||
|
||||
const testAzureInfoObj = {
|
||||
sku: 'skuname',
|
||||
accessTier: 'accessTierName',
|
||||
kind: 'kindName',
|
||||
systemKeys: ['key1', 'key2'],
|
||||
tenantKeys: ['key3', 'key4'],
|
||||
subscriptionId: 'subscriptionIdName',
|
||||
resourceGroup: 'resourceGroupName',
|
||||
deleteRetentionPolicy: { enabled: true, days: 14 },
|
||||
managementPolicies: [],
|
||||
httpsOnly: false,
|
||||
tags: { foo: 'bar' },
|
||||
networkACL: [],
|
||||
cname: 'www.example.com',
|
||||
azureFilesAADIntegration: false,
|
||||
hnsEnabled: false,
|
||||
logging: {},
|
||||
hourMetrics: {},
|
||||
minuteMetrics: {},
|
||||
serviceVersion: '2018-03-28',
|
||||
};
|
||||
|
||||
const azureInfo = new BucketAzureInfo(testAzureInfoObj);
|
||||
|
||||
|
||||
describe('BucketAzureInfo value', () => {
|
||||
it('should return the correct value', () => {
|
||||
const azureInfoObj = azureInfo.getValue();
|
||||
assert.deepStrictEqual(azureInfoObj, testAzureInfoObj);
|
||||
});
|
||||
});
|
||||
|
||||
describe('BucketAzureInfo setters/getters', () => {
|
||||
it('should control the sku attribute', () => {
|
||||
const sku = 'new sku value';
|
||||
azureInfo.setSku(sku);
|
||||
assert.deepStrictEqual(azureInfo.getSku(), sku);
|
||||
});
|
||||
it('should control the accessTier attribute', () => {
|
||||
const accessTier = 'new accessTier value';
|
||||
azureInfo.setAccessTier(accessTier);
|
||||
assert.deepStrictEqual(azureInfo.getAccessTier(), accessTier);
|
||||
});
|
||||
it('should control the kind attribute', () => {
|
||||
const kind = 'new kind value';
|
||||
azureInfo.setKind(kind);
|
||||
assert.deepStrictEqual(azureInfo.getKind(), kind);
|
||||
});
|
||||
it('should control the systemKeys attribute', () => {
|
||||
const systemKeys = ['newKey1', 'newKey2'];
|
||||
azureInfo.setSystemKeys(systemKeys);
|
||||
assert.deepStrictEqual(azureInfo.getSystemKeys(),
|
||||
systemKeys);
|
||||
});
|
||||
it('should control the tenantKeys attribute', () => {
|
||||
const tenantKeys = ['newKey3', 'newKey4'];
|
||||
azureInfo.setTenantKeys(tenantKeys);
|
||||
assert.deepStrictEqual(azureInfo.getTenantKeys(),
|
||||
tenantKeys);
|
||||
});
|
||||
it('should control the subscriptionId attribute', () => {
|
||||
const subscriptionId = 'new subscription value';
|
||||
azureInfo.setSubscriptionId(subscriptionId);
|
||||
assert.deepStrictEqual(azureInfo.getSubscriptionId(),
|
||||
subscriptionId);
|
||||
});
|
||||
it('should control the resourceGroup attribute', () => {
|
||||
const resourceGroup = 'new resource group value';
|
||||
azureInfo.setResourceGroup(resourceGroup);
|
||||
assert.deepStrictEqual(azureInfo.getResourceGroup(),
|
||||
resourceGroup);
|
||||
});
|
||||
it('should control the deleteRetentionPolicy attribute', () => {
|
||||
const deleteRetentionPolicy = { enabled: false };
|
||||
azureInfo.setDeleteRetentionPolicy(deleteRetentionPolicy);
|
||||
assert.deepStrictEqual(azureInfo.getDeleteRetentionPolicy(),
|
||||
deleteRetentionPolicy);
|
||||
});
|
||||
it('should control the managementPolicies attribute', () => {
|
||||
const managementPolicies = [{}];
|
||||
azureInfo.setManagementPolicies(managementPolicies);
|
||||
assert.deepStrictEqual(azureInfo.getManagementPolicies(),
|
||||
managementPolicies);
|
||||
});
|
||||
it('should control the httpsOnly attribute', () => {
|
||||
const httpsOnly = true;
|
||||
azureInfo.setHttpsOnly(httpsOnly);
|
||||
assert.deepStrictEqual(azureInfo.getHttpsOnly(),
|
||||
httpsOnly);
|
||||
});
|
||||
it('should control the tags attribute', () => {
|
||||
const tags = { baz: 'baz' };
|
||||
azureInfo.setTags(tags);
|
||||
assert.deepStrictEqual(azureInfo.getTags(),
|
||||
tags);
|
||||
});
|
||||
it('should control the networkACL attribute', () => {
|
||||
const networkACL = [{}];
|
||||
azureInfo.setNetworkACL(networkACL);
|
||||
assert.deepStrictEqual(azureInfo.getNetworkACL(),
|
||||
networkACL);
|
||||
});
|
||||
it('should control the cname attribute', () => {
|
||||
const cname = 'new cname value';
|
||||
azureInfo.setCname(cname);
|
||||
assert.deepStrictEqual(azureInfo.getCname(),
|
||||
cname);
|
||||
});
|
||||
it('should control the azureFilesAADIntegration attribute', () => {
|
||||
const azureFilesAADIntegration = true;
|
||||
azureInfo.setAzureFilesAADIntegration(azureFilesAADIntegration);
|
||||
assert.deepStrictEqual(azureInfo.getAzureFilesAADIntegration(),
|
||||
azureFilesAADIntegration);
|
||||
});
|
||||
it('should control the hnsEnabled attribute', () => {
|
||||
const hnsEnabled = true;
|
||||
azureInfo.setHnsEnabled(hnsEnabled);
|
||||
assert.deepStrictEqual(azureInfo.getHnsEnabled(),
|
||||
hnsEnabled);
|
||||
});
|
||||
it('should control the logging attribute', () => {
|
||||
const logging = {
|
||||
version: '1.0',
|
||||
delete: false,
|
||||
read: false,
|
||||
write: false,
|
||||
retentionPolicy: {
|
||||
enabled: false,
|
||||
days: 0,
|
||||
},
|
||||
};
|
||||
azureInfo.setLogging(logging);
|
||||
assert.deepStrictEqual(azureInfo.getLogging(), logging);
|
||||
});
|
||||
it('should control the hourMetrics attribute', () => {
|
||||
const hourMetrics = {
|
||||
version: '1.0',
|
||||
enabled: false,
|
||||
includeAPIs: false,
|
||||
retentionPolicy: {
|
||||
enabled: false,
|
||||
days: 0,
|
||||
},
|
||||
};
|
||||
azureInfo.setHourMetrics(hourMetrics);
|
||||
assert.deepStrictEqual(azureInfo.getHourMetrics(), hourMetrics);
|
||||
});
|
||||
it('should control the minuteMetrics attribute', () => {
|
||||
const minuteMetrics = {
|
||||
version: '1.0',
|
||||
enabled: false,
|
||||
includeAPIs: false,
|
||||
retentionPolicy: {
|
||||
enabled: false,
|
||||
days: 0,
|
||||
},
|
||||
};
|
||||
azureInfo.setMinuteMetrics(minuteMetrics);
|
||||
assert.deepStrictEqual(azureInfo.getMinuteMetrics(), minuteMetrics);
|
||||
});
|
||||
it('should control the serviceVersion attribute', () => {
|
||||
const serviceVersion = '2019-08-01';
|
||||
azureInfo.setServiceVersion(serviceVersion);
|
||||
assert.deepStrictEqual(azureInfo.getServiceVersion(), serviceVersion);
|
||||
});
|
||||
});
|
|
@ -59,6 +59,8 @@ const testWebsiteConfiguration = new WebsiteConfiguration({
|
|||
});
|
||||
|
||||
const testLocationConstraint = 'us-west-1';
|
||||
const testReadLocationConstraint = 'us-west-2';
|
||||
const testLocationConstraintIngest = 'us-west-3:ingest';
|
||||
|
||||
const testCorsConfiguration = [
|
||||
{ id: 'test',
|
||||
|
@ -116,7 +118,25 @@ const testLifecycleConfiguration = {
|
|||
],
|
||||
};
|
||||
|
||||
const testIngestionConfiguration = { status: 'enabled' };
|
||||
const testUid = '99ae3446-7082-4c17-ac97-52965dc004ec';
|
||||
const testAzureInfo = {
|
||||
sku: 'skuname',
|
||||
accessTier: 'accessTierName',
|
||||
kind: 'kindName',
|
||||
systemKeys: ['key1', 'key2'],
|
||||
tenantKeys: ['key1', 'key2'],
|
||||
subscriptionId: 'subscriptionIdName',
|
||||
resourceGroup: 'resourceGroupName',
|
||||
deleteRetentionPolicy: { enabled: true, days: 14 },
|
||||
managementPolicies: [],
|
||||
httpsOnly: false,
|
||||
tags: { foo: 'bar' },
|
||||
networkACL: [],
|
||||
cname: 'www.example.com',
|
||||
azureFilesAADIntegration: false,
|
||||
hnsEnabled: false,
|
||||
};
|
||||
|
||||
const testBucketPolicy = {
|
||||
Version: '2012-10-17',
|
||||
|
@ -181,8 +201,8 @@ Object.keys(acl).forEach(
|
|||
testCorsConfiguration,
|
||||
testReplicationConfiguration,
|
||||
testLifecycleConfiguration,
|
||||
testBucketPolicy,
|
||||
testUid,
|
||||
testBucketPolicy, testUid, undefined,
|
||||
true, undefined, testAzureInfo,
|
||||
testobjectLockEnabled,
|
||||
testObjectLockConfiguration,
|
||||
testNotificationConfiguration);
|
||||
|
@ -204,6 +224,7 @@ Object.keys(acl).forEach(
|
|||
versioningConfiguration:
|
||||
dummyBucket._versioningConfiguration,
|
||||
locationConstraint: dummyBucket._locationConstraint,
|
||||
readLocationConstraint: dummyBucket._readLocationConstraint,
|
||||
websiteConfiguration: dummyBucket._websiteConfiguration
|
||||
.getConfig(),
|
||||
cors: dummyBucket._cors,
|
||||
|
@ -213,6 +234,9 @@ Object.keys(acl).forEach(
|
|||
dummyBucket._lifecycleConfiguration,
|
||||
bucketPolicy: dummyBucket._bucketPolicy,
|
||||
uid: dummyBucket._uid,
|
||||
isNFS: dummyBucket._isNFS,
|
||||
ingestion: dummyBucket._ingestion,
|
||||
azureInfo: dummyBucket._azureInfo,
|
||||
objectLockEnabled: dummyBucket._objectLockEnabled,
|
||||
objectLockConfiguration:
|
||||
dummyBucket._objectLockConfiguration,
|
||||
|
@ -232,7 +256,57 @@ Object.keys(acl).forEach(
|
|||
});
|
||||
});
|
||||
|
||||
describe('fromObj on BucketInfo class', () => {
|
||||
it('should create BucketInfo instance from fromObj', done => {
|
||||
const dataObj = {
|
||||
_acl: dummyBucket._acl,
|
||||
_name: dummyBucket._name,
|
||||
_owner: dummyBucket._owner,
|
||||
_ownerDisplayName: dummyBucket._ownerDisplayName,
|
||||
_creationDate: dummyBucket._creationDate,
|
||||
_mdBucketModelVersion: dummyBucket._mdBucketModelVersion,
|
||||
_transient: dummyBucket._transient,
|
||||
_deleted: dummyBucket._deleted,
|
||||
_serverSideEncryption: dummyBucket._serverSideEncryption,
|
||||
_versioningConfiguration:
|
||||
dummyBucket._versioningConfiguration,
|
||||
_locationConstraint: dummyBucket._locationConstraint,
|
||||
_readLocationConstraint: dummyBucket._readLocationConstraint,
|
||||
_websiteConfiguration: testWebsiteConfiguration,
|
||||
_cors: dummyBucket._cors,
|
||||
_replicationConfiguration:
|
||||
dummyBucket._replicationConfiguration,
|
||||
_lifecycleConfiguration:
|
||||
dummyBucket._lifecycleConfiguration,
|
||||
_bucketPolicy: dummyBucket._bucketPolicy,
|
||||
_uid: dummyBucket._uid,
|
||||
_isNFS: dummyBucket._isNFS,
|
||||
_ingestion: dummyBucket._ingestion,
|
||||
_azureInfo: dummyBucket._azureInfo,
|
||||
_objectLockEnabled: dummyBucket._objectLockEnabled,
|
||||
_objectLockConfiguration:
|
||||
dummyBucket._objectLockConfiguration,
|
||||
_notificationConfiguration:
|
||||
dummyBucket._notificationConfiguration,
|
||||
};
|
||||
const fromObj = BucketInfo.fromObj(dataObj);
|
||||
assert(fromObj instanceof BucketInfo);
|
||||
assert.deepStrictEqual(fromObj, dummyBucket);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
describe('constructor', () => {
|
||||
it('this should have the right BucketInfo types',
|
||||
() => {
|
||||
assert.strictEqual(typeof dummyBucket.getName(), 'string');
|
||||
assert.strictEqual(typeof dummyBucket.getOwner(), 'string');
|
||||
assert.strictEqual(typeof dummyBucket.getOwnerDisplayName(),
|
||||
'string');
|
||||
assert.strictEqual(typeof dummyBucket.getCreationDate(),
|
||||
'string');
|
||||
assert.strictEqual(typeof dummyBucket.getUid(), 'string');
|
||||
});
|
||||
it('this should have the right BucketInfo types', () => {
|
||||
assert.strictEqual(typeof dummyBucket.getName(), 'string');
|
||||
assert.strictEqual(typeof dummyBucket.getOwner(), 'string');
|
||||
|
@ -309,6 +383,18 @@ Object.keys(acl).forEach(
|
|||
assert.deepStrictEqual(dummyBucket.getLocationConstraint(),
|
||||
testLocationConstraint);
|
||||
});
|
||||
it('getReadLocationConstraint should return locationConstraint ' +
|
||||
'if readLocationConstraint hasn\'t been set', () => {
|
||||
assert.deepStrictEqual(dummyBucket.getReadLocationConstraint(),
|
||||
testLocationConstraint);
|
||||
});
|
||||
it('getReadLocationConstraint should return readLocationConstraint',
|
||||
() => {
|
||||
dummyBucket._readLocationConstraint =
|
||||
testReadLocationConstraint;
|
||||
assert.deepStrictEqual(dummyBucket.getReadLocationConstraint(),
|
||||
testReadLocationConstraint);
|
||||
});
|
||||
it('getCors should return CORS configuration', () => {
|
||||
assert.deepStrictEqual(dummyBucket.getCors(),
|
||||
testCorsConfiguration);
|
||||
|
@ -324,6 +410,17 @@ Object.keys(acl).forEach(
|
|||
it('getUid should return unique id of bucket', () => {
|
||||
assert.deepStrictEqual(dummyBucket.getUid(), testUid);
|
||||
});
|
||||
it('isNFS should return whether bucket is on NFS', () => {
|
||||
assert.deepStrictEqual(dummyBucket.isNFS(), true);
|
||||
});
|
||||
it('setIsNFS should set whether bucket is on NFS', () => {
|
||||
dummyBucket.setIsNFS(false);
|
||||
assert.deepStrictEqual(dummyBucket.isNFS(), false);
|
||||
});
|
||||
it('getAzureInfo should return the expected structure', () => {
|
||||
const azureInfo = dummyBucket.getAzureInfo();
|
||||
assert.deepStrictEqual(azureInfo, testAzureInfo);
|
||||
});
|
||||
it('object lock should be disabled by default', () => {
|
||||
assert.deepStrictEqual(
|
||||
dummyBucket.isObjectLockEnabled(), false);
|
||||
|
@ -407,8 +504,7 @@ Object.keys(acl).forEach(
|
|||
protocol: 'https',
|
||||
},
|
||||
};
|
||||
dummyBucket
|
||||
.setWebsiteConfiguration(newWebsiteConfiguration);
|
||||
dummyBucket.setWebsiteConfiguration(newWebsiteConfiguration);
|
||||
assert.deepStrictEqual(dummyBucket.getWebsiteConfiguration(),
|
||||
newWebsiteConfiguration);
|
||||
});
|
||||
|
@ -473,6 +569,22 @@ Object.keys(acl).forEach(
|
|||
assert.deepStrictEqual(
|
||||
dummyBucket.getBucketPolicy(), newBucketPolicy);
|
||||
});
|
||||
it('enableIngestion should set ingestion status to enabled', () => {
|
||||
dummyBucket.enableIngestion();
|
||||
assert.deepStrictEqual(dummyBucket.getIngestion(),
|
||||
{ status: 'enabled' });
|
||||
});
|
||||
it('disableIngestion should set ingestion status to null', () => {
|
||||
dummyBucket.disableIngestion();
|
||||
assert.deepStrictEqual(dummyBucket.getIngestion(),
|
||||
{ status: 'disabled' });
|
||||
});
|
||||
it('setAzureInfo should work', () => {
|
||||
const dummyAzureInfo = {};
|
||||
dummyBucket.setAzureInfo(dummyAzureInfo);
|
||||
const azureInfo = dummyBucket.getAzureInfo();
|
||||
assert.deepStrictEqual(azureInfo, dummyAzureInfo);
|
||||
});
|
||||
it('setObjectLockConfiguration should set object lock ' +
|
||||
'configuration', () => {
|
||||
const newObjectLockConfig = {
|
||||
|
@ -519,5 +631,77 @@ Object.keys(acl).forEach(
|
|||
dummyBucket.getUid(), testUid);
|
||||
});
|
||||
});
|
||||
})
|
||||
}),
|
||||
);
|
||||
|
||||
describe('uid default', () => {
|
||||
it('should set uid if none is specified by constructor params', () => {
|
||||
const dummyBucket = new BucketInfo(
|
||||
bucketName, owner, ownerDisplayName, testDate,
|
||||
BucketInfo.currentModelVersion(), acl[emptyAcl],
|
||||
false, false, {
|
||||
cryptoScheme: 1,
|
||||
algorithm: 'sha1',
|
||||
masterKeyId: 'somekey',
|
||||
mandatory: true,
|
||||
}, testVersioningConfiguration,
|
||||
testLocationConstraint,
|
||||
testWebsiteConfiguration,
|
||||
testCorsConfiguration,
|
||||
testReplicationConfiguration,
|
||||
testLifecycleConfiguration);
|
||||
|
||||
const defaultUid = dummyBucket.getUid();
|
||||
assert(defaultUid);
|
||||
assert.strictEqual(defaultUid.length, 36);
|
||||
});
|
||||
});
|
||||
|
||||
describe('ingest', () => {
|
||||
it('should enable ingestion if ingestion param sent on bucket creation',
|
||||
() => {
|
||||
const dummyBucket = new BucketInfo(
|
||||
bucketName, owner, ownerDisplayName, testDate,
|
||||
BucketInfo.currentModelVersion(), acl[emptyAcl],
|
||||
false, false, {
|
||||
cryptoScheme: 1,
|
||||
algorithm: 'sha1',
|
||||
masterKeyId: 'somekey',
|
||||
mandatory: true,
|
||||
}, testVersioningConfiguration,
|
||||
testLocationConstraintIngest,
|
||||
testWebsiteConfiguration,
|
||||
testCorsConfiguration,
|
||||
testReplicationConfiguration,
|
||||
testLifecycleConfiguration,
|
||||
testBucketPolicy,
|
||||
testUid, undefined, true, testIngestionConfiguration);
|
||||
assert.deepStrictEqual(dummyBucket.getIngestion(),
|
||||
{ status: 'enabled' });
|
||||
assert.strictEqual(dummyBucket.isIngestionBucket(), true);
|
||||
assert.strictEqual(dummyBucket.isIngestionEnabled(), true);
|
||||
});
|
||||
|
||||
it('should have ingestion as null if no ingestion param was sent on' +
|
||||
'bucket creation', () => {
|
||||
const dummyBucket = new BucketInfo(
|
||||
bucketName, owner, ownerDisplayName, testDate,
|
||||
BucketInfo.currentModelVersion(), acl[emptyAcl],
|
||||
false, false, {
|
||||
cryptoScheme: 1,
|
||||
algorithm: 'sha1',
|
||||
masterKeyId: 'somekey',
|
||||
mandatory: true,
|
||||
}, testVersioningConfiguration,
|
||||
testLocationConstraintIngest,
|
||||
testWebsiteConfiguration,
|
||||
testCorsConfiguration,
|
||||
testReplicationConfiguration,
|
||||
testLifecycleConfiguration,
|
||||
testBucketPolicy,
|
||||
testUid, undefined, true);
|
||||
assert.deepStrictEqual(dummyBucket.getIngestion(), null);
|
||||
assert.strictEqual(dummyBucket.isIngestionBucket(), false);
|
||||
assert.strictEqual(dummyBucket.isIngestionEnabled(), false);
|
||||
});
|
||||
});
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
const assert = require('assert');
|
||||
const { parseString } = require('xml2js');
|
||||
const errors = require('../../../lib/errors');
|
||||
|
||||
const LifecycleConfiguration =
|
||||
require('../../../lib/models/LifecycleConfiguration.js');
|
||||
|
@ -10,6 +11,18 @@ const days = {
|
|||
Expiration: 'Days',
|
||||
};
|
||||
|
||||
const mockConfig = {
|
||||
replicationEndpoints: [
|
||||
{
|
||||
site: 'a',
|
||||
},
|
||||
{
|
||||
site: 'b',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const MAX_DAYS = 2147483647; // Max 32-bit signed binary integer.
|
||||
const date = new Date();
|
||||
date.setUTCHours(0, 0, 0, 0);
|
||||
|
||||
|
@ -58,12 +71,6 @@ const requiredTags = [
|
|||
{ tag: 'Action', error: 'InvalidRequest',
|
||||
errMessage: 'Rule does not include valid action' }];
|
||||
|
||||
const notImplementedActions = [
|
||||
{ tag: 'Transition',
|
||||
errMessage: 'Transition lifecycle action not yet implemented' },
|
||||
{ tag: 'NoncurrentVersionTransition',
|
||||
errMessage: 'Transition lifecycle action not yet implemented' }];
|
||||
|
||||
const invalidActions = [
|
||||
{ tag: 'AbortIncompleteMultipartUpload', label: 'no-time',
|
||||
error: 'MalformedXML',
|
||||
|
@ -274,8 +281,8 @@ function generateParsedXml(errorTag, tagObj, cb) {
|
|||
}
|
||||
|
||||
function checkError(parsedXml, error, errMessage, cb) {
|
||||
const lcConfig = new LifecycleConfiguration(parsedXml).
|
||||
getLifecycleConfiguration();
|
||||
const lcConfig = new LifecycleConfiguration(parsedXml, mockConfig)
|
||||
.getLifecycleConfiguration();
|
||||
assert.strictEqual(lcConfig.error[error], true);
|
||||
assert.strictEqual(lcConfig.error.description, errMessage);
|
||||
cb();
|
||||
|
@ -301,16 +308,6 @@ describe('LifecycleConfiguration class getLifecycleConfiguration', () => {
|
|||
});
|
||||
});
|
||||
|
||||
notImplementedActions.forEach(action => {
|
||||
const expError = 'NotImplemented';
|
||||
it(`should return ${expError} error for ${action.tag} action`,
|
||||
done => {
|
||||
generateParsedXml('Action', action, parsedXml => {
|
||||
checkError(parsedXml, expError, action.errMessage, done);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
invalidActions.forEach(a => {
|
||||
it(`should return ${a.error} for ${a.label} action error`,
|
||||
done => {
|
||||
|
@ -361,14 +358,14 @@ describe('LifecycleConfiguration class getLifecycleConfiguration', () => {
|
|||
});
|
||||
});
|
||||
|
||||
it('should use last listed Prefix if multiple Prefixes included', done => {
|
||||
tagObj.label = 'mult-prefixes';
|
||||
tagObj.lastPrefix = 'coco';
|
||||
it('should apply all unique Key tags if multiple tags included', done => {
|
||||
tagObj.label = 'mult-tags';
|
||||
generateParsedXml('Filter', tagObj, parsedXml => {
|
||||
const lcConfig = new LifecycleConfiguration(parsedXml).
|
||||
getLifecycleConfiguration();
|
||||
assert.strictEqual(tagObj.lastPrefix,
|
||||
lcConfig.rules[0].filter.rulePrefix);
|
||||
const lcConfig = new LifecycleConfiguration(parsedXml, mockConfig)
|
||||
.getLifecycleConfiguration();
|
||||
const expected = [{ key: 'color', val: 'blue' },
|
||||
{ key: 'shape', val: 'circle' }];
|
||||
assert.deepStrictEqual(expected, lcConfig.rules[0].filter.tags);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -385,7 +382,7 @@ describe('LifecycleConfiguration class getLifecycleConfiguration', () => {
|
|||
tagObj.label = 'empty-prefix';
|
||||
const expectedPrefix = '';
|
||||
generateParsedXml('Filter', tagObj, parsedXml => {
|
||||
const lcConfig = new LifecycleConfiguration(parsedXml).
|
||||
const lcConfig = new LifecycleConfiguration(parsedXml, mockConfig).
|
||||
getLifecycleConfiguration();
|
||||
assert.strictEqual(expectedPrefix,
|
||||
lcConfig.rules[0].filter.rulePrefix);
|
||||
|
@ -394,6 +391,472 @@ describe('LifecycleConfiguration class getLifecycleConfiguration', () => {
|
|||
});
|
||||
});
|
||||
|
||||
describe('LifecycleConfiguration', () => {
|
||||
const lifecycleConfiguration = new LifecycleConfiguration({}, mockConfig);
|
||||
function getParsedXML() {
|
||||
return {
|
||||
LifecycleConfiguration: {
|
||||
Rule: [{
|
||||
ID: ['test-id'],
|
||||
Prefix: [''],
|
||||
Status: ['Enabled'],
|
||||
Expiration: [{
|
||||
Days: 1,
|
||||
}],
|
||||
}],
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
describe('::_getRuleFilterDesc', () => {
|
||||
it('should get Prefix', () => {
|
||||
const rule = getParsedXML().LifecycleConfiguration.Rule[0];
|
||||
const ruleFilter = lifecycleConfiguration._getRuleFilterDesc(rule);
|
||||
assert.strictEqual(ruleFilter, "prefix ''");
|
||||
});
|
||||
|
||||
it('should get Filter.Prefix', () => {
|
||||
const rule = getParsedXML().LifecycleConfiguration.Rule[0];
|
||||
delete rule.Prefix;
|
||||
rule.Filter = [{ Prefix: [''] }];
|
||||
const ruleFilter = lifecycleConfiguration._getRuleFilterDesc(rule);
|
||||
assert.strictEqual(ruleFilter, "filter '(prefix=)'");
|
||||
});
|
||||
|
||||
it('should get Filter.Tag', () => {
|
||||
const rule = getParsedXML().LifecycleConfiguration.Rule[0];
|
||||
delete rule.Prefix;
|
||||
rule.Filter = [{ Tag: [{ Key: ['a'], Value: [''] }] }];
|
||||
const ruleFilter = lifecycleConfiguration._getRuleFilterDesc(rule);
|
||||
assert.strictEqual(ruleFilter, "filter '(tag: key=a, value=)'");
|
||||
});
|
||||
|
||||
it('should get Filter.And', () => {
|
||||
const rule = getParsedXML().LifecycleConfiguration.Rule[0];
|
||||
delete rule.Prefix;
|
||||
rule.Filter = [{
|
||||
And: [{
|
||||
Prefix: [''],
|
||||
Tag: [{
|
||||
Key: ['a'],
|
||||
Value: ['b'],
|
||||
},
|
||||
{
|
||||
Key: ['c'],
|
||||
Value: ['d'],
|
||||
}],
|
||||
}],
|
||||
}];
|
||||
const ruleFilter = lifecycleConfiguration._getRuleFilterDesc(rule);
|
||||
assert.strictEqual(ruleFilter, 'filter ' +
|
||||
"'(prefix= and tag: key=a, value=b and tag: key=c, value=d)'");
|
||||
});
|
||||
|
||||
it('should get Filter.And without Prefix', () => {
|
||||
const rule = getParsedXML().LifecycleConfiguration.Rule[0];
|
||||
delete rule.Prefix;
|
||||
rule.Filter = [{
|
||||
And: [{
|
||||
Tag: [{
|
||||
Key: ['a'],
|
||||
Value: ['b'],
|
||||
},
|
||||
{
|
||||
Key: ['c'],
|
||||
Value: ['d'],
|
||||
}],
|
||||
}],
|
||||
}];
|
||||
const ruleFilter = lifecycleConfiguration._getRuleFilterDesc(rule);
|
||||
assert.strictEqual(ruleFilter,
|
||||
"filter '(tag: key=a, value=b and tag: key=c, value=d)'");
|
||||
});
|
||||
|
||||
it('should get Filter with empty object', () => {
|
||||
const rule = {
|
||||
ID: ['test-id'],
|
||||
Status: ['Enabled'],
|
||||
Expiration: [{
|
||||
Days: 1,
|
||||
}],
|
||||
};
|
||||
rule.Filter = [{}];
|
||||
const ruleFilter = lifecycleConfiguration._getRuleFilterDesc(rule);
|
||||
assert.strictEqual(ruleFilter, 'filter (all)');
|
||||
});
|
||||
|
||||
it('should get empty Filter', () => {
|
||||
const rule = {
|
||||
ID: ['test-id'],
|
||||
Status: ['Enabled'],
|
||||
Expiration: [{
|
||||
Days: 1,
|
||||
}],
|
||||
};
|
||||
rule.Filter = [];
|
||||
const ruleFilter = lifecycleConfiguration._getRuleFilterDesc(rule);
|
||||
assert.strictEqual(ruleFilter, 'filter (all)');
|
||||
});
|
||||
});
|
||||
|
||||
describe('::_checkDays', () => {
|
||||
it(`should return no error when days value is 0 - ${MAX_DAYS}`, () => {
|
||||
const error = lifecycleConfiguration._checkDays({
|
||||
days: 0,
|
||||
});
|
||||
assert.strictEqual(error, null);
|
||||
});
|
||||
|
||||
it('should return error when exceeding max value', () => {
|
||||
const error = lifecycleConfiguration._checkDays({
|
||||
days: MAX_DAYS + 1,
|
||||
field: 'a',
|
||||
ancestor: 'b',
|
||||
});
|
||||
const msg = "'a' in b action must not exceed 2147483647";
|
||||
const expected = errors.MalformedXML.customizeDescription(msg);
|
||||
assert.deepStrictEqual(error, expected);
|
||||
});
|
||||
|
||||
it('should return error when negative value', () => {
|
||||
const error = lifecycleConfiguration._checkDays({
|
||||
days: -1,
|
||||
field: 'a',
|
||||
ancestor: 'b',
|
||||
});
|
||||
const msg = "'a' in b action must be nonnegative";
|
||||
const expected = errors.InvalidArgument.customizeDescription(msg);
|
||||
assert.deepStrictEqual(error, expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('::_checkStorageClasses', () => {
|
||||
it('should return no error when StorageClass is first one used', () => {
|
||||
const error = lifecycleConfiguration._checkStorageClasses({
|
||||
usedStorageClasses: [],
|
||||
storageClass: 'a',
|
||||
});
|
||||
assert.strictEqual(error, null);
|
||||
});
|
||||
|
||||
it('should return no error when StorageClass has not been used', () => {
|
||||
const error = lifecycleConfiguration._checkStorageClasses({
|
||||
usedStorageClasses: ['a'],
|
||||
storageClass: 'b',
|
||||
});
|
||||
assert.strictEqual(error, null);
|
||||
});
|
||||
|
||||
it('should return error when unknown StorageClass is given', () => {
|
||||
const error = lifecycleConfiguration._checkStorageClasses({
|
||||
storageClass: 'c',
|
||||
});
|
||||
const msg = "'StorageClass' must be one of 'a', 'b'";
|
||||
const expected = errors.MalformedXML.customizeDescription(msg);
|
||||
assert.deepStrictEqual(error, expected);
|
||||
});
|
||||
|
||||
it('should return error when StorageClass has been used', () => {
|
||||
const error = lifecycleConfiguration._checkStorageClasses({
|
||||
usedStorageClasses: ['a'],
|
||||
storageClass: 'a',
|
||||
field: 'a',
|
||||
ancestor: 'b',
|
||||
rule: getParsedXML().LifecycleConfiguration.Rule[0],
|
||||
});
|
||||
const msg = "'StorageClass' must be different for 'b' actions " +
|
||||
"in same 'Rule' with prefix ''";
|
||||
const expected = errors.InvalidRequest.customizeDescription(msg);
|
||||
assert.deepStrictEqual(error, expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('::_checkTimeType', () => {
|
||||
it('should return no error when first time type in rule', () => {
|
||||
const error = lifecycleConfiguration._checkTimeType({
|
||||
usedTimeType: null,
|
||||
currentTimeType: 'Date',
|
||||
rule: {},
|
||||
});
|
||||
assert.strictEqual(error, null);
|
||||
});
|
||||
|
||||
it('should return no error when time type is same as others', () => {
|
||||
const error = lifecycleConfiguration._checkTimeType({
|
||||
usedTimeType: 'Date',
|
||||
currentTimeType: 'Date',
|
||||
rule: {},
|
||||
});
|
||||
assert.strictEqual(error, null);
|
||||
});
|
||||
|
||||
it('should return error when time type differs from others', () => {
|
||||
const error = lifecycleConfiguration._checkTimeType({
|
||||
usedTimeType: 'Date',
|
||||
currentTimeType: 'Days',
|
||||
rule: getParsedXML().LifecycleConfiguration.Rule[0],
|
||||
});
|
||||
const msg = "Found mixed 'Date' and 'Days' based Transition " +
|
||||
"actions in lifecycle rule for prefix ''";
|
||||
const expected = errors.InvalidRequest.customizeDescription(msg);
|
||||
assert.deepStrictEqual(error, expected);
|
||||
});
|
||||
|
||||
it('should return error when time type differs across expiration',
|
||||
() => {
|
||||
const error = lifecycleConfiguration._checkTimeType({
|
||||
usedTimeType: 'Date',
|
||||
currentTimeType: 'Date',
|
||||
rule: getParsedXML().LifecycleConfiguration.Rule[0],
|
||||
});
|
||||
const msg = "Found mixed 'Date' and 'Days' based Expiration and " +
|
||||
"Transition actions in lifecycle rule for prefix ''";
|
||||
const expected = errors.InvalidRequest.customizeDescription(msg);
|
||||
assert.deepStrictEqual(error, expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('::_checkDate', () => {
|
||||
it('should return no error valid ISO date', () => {
|
||||
const date = '2016-01-01T00:00:00.000Z';
|
||||
const error = lifecycleConfiguration._checkDate(date);
|
||||
assert.strictEqual(error, null);
|
||||
});
|
||||
|
||||
it('should return error when invalid ISO date', () => {
|
||||
const date = 'Fri, 01 Jan 2016 00:00:00 GMT';
|
||||
const error = lifecycleConfiguration._checkDate(date);
|
||||
const msg = 'Date must be in ISO 8601 format';
|
||||
const expected = errors.InvalidArgument.customizeDescription(msg);
|
||||
assert.deepStrictEqual(error, expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('::_parseNoncurrentVersionTransition', () => {
|
||||
function getRule() {
|
||||
return {
|
||||
NoncurrentVersionTransition: [
|
||||
{
|
||||
NoncurrentDays: ['0'],
|
||||
StorageClass: ['a'],
|
||||
},
|
||||
{
|
||||
NoncurrentDays: ['1'],
|
||||
StorageClass: ['b'],
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
it('should return correctly parsed result object', () => {
|
||||
const rule = getRule();
|
||||
const result =
|
||||
lifecycleConfiguration._parseNoncurrentVersionTransition(rule);
|
||||
assert.deepStrictEqual(result, {
|
||||
nonCurrentVersionTransition: [
|
||||
{
|
||||
noncurrentDays: 0,
|
||||
storageClass: 'a',
|
||||
},
|
||||
{
|
||||
noncurrentDays: 1,
|
||||
storageClass: 'b',
|
||||
},
|
||||
],
|
||||
});
|
||||
});
|
||||
|
||||
it('should return parsed result object with error', () => {
|
||||
const rule = getRule();
|
||||
rule.NoncurrentVersionTransition[0].NoncurrentDays[0] = '-1';
|
||||
const result =
|
||||
lifecycleConfiguration._parseNoncurrentVersionTransition(rule);
|
||||
const msg = "'NoncurrentDays' in NoncurrentVersionTransition " +
|
||||
'action must be nonnegative';
|
||||
const error = errors.InvalidArgument.customizeDescription(msg);
|
||||
assert.deepStrictEqual(result.error, error);
|
||||
});
|
||||
});
|
||||
|
||||
describe('::_parseTransition with Days', () => {
|
||||
function getRule() {
|
||||
return {
|
||||
Transition: [
|
||||
{
|
||||
Days: ['0'],
|
||||
StorageClass: ['a'],
|
||||
},
|
||||
{
|
||||
Days: ['1'],
|
||||
StorageClass: ['b'],
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
it('should return correctly parsed result object', () => {
|
||||
const rule = getRule();
|
||||
const result = lifecycleConfiguration._parseTransition(rule);
|
||||
assert.deepStrictEqual(result, {
|
||||
transition: [
|
||||
{
|
||||
days: 0,
|
||||
storageClass: 'a',
|
||||
},
|
||||
{
|
||||
days: 1,
|
||||
storageClass: 'b',
|
||||
},
|
||||
],
|
||||
});
|
||||
});
|
||||
|
||||
it('should return parsed result object with error when days is ' +
|
||||
'negative', () => {
|
||||
const rule = getRule();
|
||||
rule.Transition[0].Days[0] = '-1';
|
||||
const result = lifecycleConfiguration._parseTransition(rule);
|
||||
const msg = "'Days' in Transition action must be nonnegative";
|
||||
const error = errors.InvalidArgument.customizeDescription(msg);
|
||||
assert.deepStrictEqual(result.error, error);
|
||||
});
|
||||
|
||||
it('should return parsed result object with error when two ' +
|
||||
'transition days are the same', () => {
|
||||
const rule = getRule();
|
||||
rule.Prefix = ['prefix'];
|
||||
rule.Transition[1].Days[0] = '0';
|
||||
const result = lifecycleConfiguration._parseTransition(rule);
|
||||
const msg = "'Days' in the 'Transition' action for StorageClass " +
|
||||
"'a' for prefix 'prefix' must be at least one day apart from " +
|
||||
"prefix 'prefix' in the 'Transition' action for StorageClass " +
|
||||
"'b'";
|
||||
const error = errors.InvalidArgument.customizeDescription(msg);
|
||||
assert.deepStrictEqual(result.error, error);
|
||||
});
|
||||
});
|
||||
|
||||
describe('::_parseTransition with Date', () => {
|
||||
it('should return parsed result object with error when dates are not ' +
|
||||
'more than one day apart', () => {
|
||||
const rule = {
|
||||
Prefix: ['prefix'],
|
||||
Transition: [
|
||||
{
|
||||
Date: ['2019-01-01T00:00:00.000Z'],
|
||||
StorageClass: ['a'],
|
||||
},
|
||||
{
|
||||
Date: ['2019-01-01T23:59:59.999Z'],
|
||||
StorageClass: ['b'],
|
||||
},
|
||||
],
|
||||
};
|
||||
const result = lifecycleConfiguration._parseTransition(rule);
|
||||
const msg = "'Date' in the 'Transition' action for StorageClass " +
|
||||
"'a' for prefix 'prefix' must be at least one day apart from " +
|
||||
"prefix 'prefix' in the 'Transition' action for StorageClass " +
|
||||
"'b'";
|
||||
const error = errors.InvalidArgument.customizeDescription(msg);
|
||||
assert.deepStrictEqual(result.error, error);
|
||||
});
|
||||
});
|
||||
|
||||
describe('::_checkTimeGap', () => {
|
||||
it('should not return error when only one transition', () => {
|
||||
const params = {
|
||||
rule: {
|
||||
Transition: [{
|
||||
Days: ['0'],
|
||||
StorageClass: ['a'],
|
||||
}],
|
||||
},
|
||||
days: 0,
|
||||
storageClass: 'a',
|
||||
};
|
||||
const error = lifecycleConfiguration._checkTimeGap(params);
|
||||
assert.strictEqual(error, undefined);
|
||||
});
|
||||
|
||||
it('should not return error when transitions have days greater than ' +
|
||||
'24 hours apart', () => {
|
||||
const params = {
|
||||
rule: {
|
||||
Transition: [{
|
||||
Days: ['0'],
|
||||
StorageClass: ['a'],
|
||||
}, {
|
||||
Days: ['1'],
|
||||
StorageClass: ['b'],
|
||||
}],
|
||||
},
|
||||
days: 0,
|
||||
storageClass: 'a',
|
||||
};
|
||||
const error = lifecycleConfiguration._checkTimeGap(params);
|
||||
assert.strictEqual(error, undefined);
|
||||
});
|
||||
|
||||
it('should return error when transitions have same day', () => {
|
||||
const params = {
|
||||
rule: {
|
||||
Prefix: 'prefix',
|
||||
Transition: [{
|
||||
Days: ['0'],
|
||||
StorageClass: ['a'],
|
||||
}, {
|
||||
Days: ['0'],
|
||||
StorageClass: ['b'],
|
||||
}],
|
||||
},
|
||||
days: 0,
|
||||
storageClass: 'a',
|
||||
};
|
||||
const error = lifecycleConfiguration._checkTimeGap(params);
|
||||
assert(error.InvalidArgument);
|
||||
});
|
||||
|
||||
it('should not return error when transitions have dates greater than ' +
|
||||
'24 hours apart', () => {
|
||||
const params = {
|
||||
rule: {
|
||||
Transition: [{
|
||||
Date: ['2019-01-01T00:00:00.000Z'],
|
||||
StorageClass: ['a'],
|
||||
}, {
|
||||
Date: ['2019-01-02T00:00:00.000Z'],
|
||||
StorageClass: ['b'],
|
||||
}],
|
||||
},
|
||||
date: '2019-01-01T00:00:00.000Z',
|
||||
storageClass: 'a',
|
||||
};
|
||||
const error = lifecycleConfiguration._checkTimeGap(params);
|
||||
assert.strictEqual(error, undefined);
|
||||
});
|
||||
|
||||
it('should return error when transitions have dates less than 24 ' +
|
||||
'hours apart', () => {
|
||||
const params = {
|
||||
rule: {
|
||||
Prefix: 'prefix',
|
||||
Transition: [{
|
||||
Date: ['2019-01-01T00:00:00.000Z'],
|
||||
StorageClass: ['a'],
|
||||
}, {
|
||||
Date: ['2019-01-01T23:59:59.999Z'],
|
||||
StorageClass: ['b'],
|
||||
}],
|
||||
},
|
||||
date: '2019-01-01T00:00:00.000Z',
|
||||
storageClass: 'a',
|
||||
};
|
||||
const error = lifecycleConfiguration._checkTimeGap(params);
|
||||
assert(error.InvalidArgument);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('LifecycleConfiguration::getConfigJson', () => {
|
||||
const tests = [
|
||||
[
|
||||
|
@ -744,7 +1207,7 @@ describe('LifecycleConfiguration::getConfigJson', () => {
|
|||
`should return correct configuration: ${msg}`, () => {
|
||||
assert.deepStrictEqual(
|
||||
LifecycleConfiguration.getConfigJson(input),
|
||||
expected
|
||||
expected,
|
||||
);
|
||||
}));
|
||||
});
|
||||
|
|
|
@ -35,6 +35,9 @@ describe('ObjectMD class setters/getters', () => {
|
|||
['LastModified', new Date().toJSON()],
|
||||
['ContentMd5', null, ''],
|
||||
['ContentMd5', 'content-md5'],
|
||||
['ContentLanguage', null, ''],
|
||||
['ContentLanguage', 'content-language', ''],
|
||||
['CreationTime', new Date().toJSON()],
|
||||
['AmzVersionId', null, 'null'],
|
||||
['AmzVersionId', 'version-id'],
|
||||
['AmzServerVersionId', null, ''],
|
||||
|
@ -91,6 +94,7 @@ describe('ObjectMD class setters/getters', () => {
|
|||
role: '',
|
||||
storageType: '',
|
||||
dataStoreVersionId: '',
|
||||
isNFS: null,
|
||||
}],
|
||||
['ReplicationInfo', {
|
||||
status: 'PENDING',
|
||||
|
@ -106,8 +110,24 @@ describe('ObjectMD class setters/getters', () => {
|
|||
'arn:aws:iam::account-id:role/dest-resource',
|
||||
storageType: 'aws_s3',
|
||||
dataStoreVersionId: '',
|
||||
isNFS: null,
|
||||
}],
|
||||
['DataStoreName', null, ''],
|
||||
['ReplicationIsNFS', null, null],
|
||||
['ReplicationIsNFS', true],
|
||||
['AzureInfo', {
|
||||
containerPublicAccess: 'container',
|
||||
containerStoredAccessPolicies: [],
|
||||
containerImmutabilityPolicy: {},
|
||||
containerLegalHoldStatus: false,
|
||||
containerDeletionInProgress: false,
|
||||
blobType: 'BlockBlob',
|
||||
blobContentMD5: 'ABCDEF==',
|
||||
blobCopyInfo: {},
|
||||
blobSequenceNumber: 42,
|
||||
blobAccessTierChangeTime: 'abcdef',
|
||||
blobUncommitted: false,
|
||||
}],
|
||||
['LegalHold', null, false],
|
||||
['LegalHold', true],
|
||||
['RetentionMode', 'GOVERNANCE'],
|
||||
|
@ -163,6 +183,11 @@ describe('ObjectMD class setters/getters', () => {
|
|||
}]);
|
||||
});
|
||||
|
||||
it('ObjectMD::setReplicationStorageType', () => {
|
||||
md.setReplicationStorageType('a');
|
||||
assert.strictEqual(md.getReplicationStorageType(), 'a');
|
||||
});
|
||||
|
||||
it('ObjectMD::setReplicationStorageClass', () => {
|
||||
md.setReplicationStorageClass('a');
|
||||
assert.strictEqual(md.getReplicationStorageClass(), 'a');
|
||||
|
@ -207,6 +232,65 @@ describe('ObjectMD class setters/getters', () => {
|
|||
md.getReplicationSiteDataStoreVersionId('zenko'), 'a');
|
||||
});
|
||||
|
||||
it('ObjectMd::isMultipartUpload', () => {
|
||||
md.setContentMd5('68b329da9893e34099c7d8ad5cb9c940');
|
||||
assert.strictEqual(md.isMultipartUpload(), false);
|
||||
md.setContentMd5('741e0f4bad5b093044dc54a74d911094-1');
|
||||
assert.strictEqual(md.isMultipartUpload(), true);
|
||||
md.setContentMd5('bda0c0bed89c8bdb9e409df7ae7073c5-9876');
|
||||
assert.strictEqual(md.isMultipartUpload(), true);
|
||||
});
|
||||
|
||||
it('ObjectMD::getUserMetadata', () => {
|
||||
md.setUserMetadata({
|
||||
'x-amz-meta-foo': 'bar',
|
||||
'x-amz-meta-baz': 'qux',
|
||||
// This one should be filtered out
|
||||
'x-amz-storage-class': 'STANDARD_IA',
|
||||
// This one should be changed to 'x-amz-meta-foobar'
|
||||
'x-ms-meta-foobar': 'bar',
|
||||
// ACLs are updated
|
||||
'acl': {
|
||||
FULL_CONTROL: ['john'],
|
||||
},
|
||||
});
|
||||
assert.deepStrictEqual(JSON.parse(md.getUserMetadata()), {
|
||||
'x-amz-meta-foo': 'bar',
|
||||
'x-amz-meta-baz': 'qux',
|
||||
'x-amz-meta-foobar': 'bar',
|
||||
});
|
||||
assert.deepStrictEqual(md.getAcl(), {
|
||||
FULL_CONTROL: ['john'],
|
||||
});
|
||||
});
|
||||
|
||||
it('ObjectMD:clearMetadataValues', () => {
|
||||
md.setUserMetadata({
|
||||
'x-amz-meta-foo': 'bar',
|
||||
});
|
||||
md.clearMetadataValues();
|
||||
assert.strictEqual(md.getUserMetadata(), undefined);
|
||||
});
|
||||
|
||||
it('ObjectMD::microVersionId unset', () => {
|
||||
assert.strictEqual(md.getMicroVersionId(), null);
|
||||
});
|
||||
|
||||
it('ObjectMD::microVersionId set', () => {
|
||||
const generatedIds = new Set();
|
||||
for (let i = 0; i < 100; ++i) {
|
||||
md.updateMicroVersionId();
|
||||
generatedIds.add(md.getMicroVersionId());
|
||||
}
|
||||
// all generated IDs should be different
|
||||
assert.strictEqual(generatedIds.size, 100);
|
||||
generatedIds.forEach(key => {
|
||||
// length is always 16 in hex because leading 0s are
|
||||
// also encoded in the 8-byte random buffer.
|
||||
assert.strictEqual(key.length, 16);
|
||||
});
|
||||
});
|
||||
|
||||
it('ObjectMD::set/getRetentionMode', () => {
|
||||
md.setRetentionMode('COMPLIANCE');
|
||||
assert.deepStrictEqual(md.getRetentionMode(), 'COMPLIANCE');
|
||||
|
@ -324,6 +408,8 @@ describe('getAttributes static method', () => {
|
|||
'content-length': true,
|
||||
'content-type': true,
|
||||
'content-md5': true,
|
||||
'content-language': true,
|
||||
'creation-time': true,
|
||||
'x-amz-version-id': true,
|
||||
'x-amz-server-version-id': true,
|
||||
'x-amz-storage-class': true,
|
||||
|
@ -334,6 +420,7 @@ describe('getAttributes static method', () => {
|
|||
'acl': true,
|
||||
'key': true,
|
||||
'location': true,
|
||||
'azureInfo': true,
|
||||
'isNull': true,
|
||||
'nullVersionId': true,
|
||||
'nullUploadId': true,
|
||||
|
@ -361,6 +448,8 @@ describe('ObjectMD::getReducedLocations', () => {
|
|||
start: 0,
|
||||
dataStoreName: 'file',
|
||||
dataStoreETag: '1:0e5a6f42662652d44fcf978399ef5709',
|
||||
dataStoreVersionId: 'someversion1',
|
||||
blockId: 'someBlockId1',
|
||||
},
|
||||
{
|
||||
key: '4e67844b674b093a9e109d42172922ea1f32ec12',
|
||||
|
@ -368,6 +457,8 @@ describe('ObjectMD::getReducedLocations', () => {
|
|||
start: 1,
|
||||
dataStoreName: 'file',
|
||||
dataStoreETag: '2:9ca655158ca025aa00a818b6b81f9e48',
|
||||
dataStoreVersionId: 'someversion2',
|
||||
blockId: 'someBlockId2',
|
||||
},
|
||||
];
|
||||
md.setLocation(locations);
|
||||
|
@ -383,6 +474,8 @@ describe('ObjectMD::getReducedLocations', () => {
|
|||
start: 0,
|
||||
dataStoreName: 'file',
|
||||
dataStoreETag: '1:0e5a6f42662652d44fcf978399ef5709',
|
||||
dataStoreVersionId: 'someversion',
|
||||
blockId: 'someBlockId',
|
||||
},
|
||||
{
|
||||
key: 'deebfb287cfcee1d137b0136562d2d776ba491e1',
|
||||
|
@ -390,6 +483,8 @@ describe('ObjectMD::getReducedLocations', () => {
|
|||
start: 1,
|
||||
dataStoreName: 'file',
|
||||
dataStoreETag: '1:0e5a6f42662652d44fcf978399ef5709',
|
||||
dataStoreVersionId: 'someversion',
|
||||
blockId: 'someBlockId',
|
||||
},
|
||||
{
|
||||
key: '4e67844b674b093a9e109d42172922ea1f32ec12',
|
||||
|
@ -397,6 +492,8 @@ describe('ObjectMD::getReducedLocations', () => {
|
|||
start: 2,
|
||||
dataStoreName: 'file',
|
||||
dataStoreETag: '2:9ca655158ca025aa00a818b6b81f9e48',
|
||||
dataStoreVersionId: 'someversion2',
|
||||
blockId: 'someBlockId2',
|
||||
},
|
||||
]);
|
||||
assert.deepStrictEqual(md.getReducedLocations(), [
|
||||
|
@ -406,6 +503,8 @@ describe('ObjectMD::getReducedLocations', () => {
|
|||
start: 0,
|
||||
dataStoreName: 'file',
|
||||
dataStoreETag: '1:0e5a6f42662652d44fcf978399ef5709',
|
||||
dataStoreVersionId: 'someversion',
|
||||
blockId: 'someBlockId',
|
||||
},
|
||||
{
|
||||
key: '4e67844b674b093a9e109d42172922ea1f32ec12',
|
||||
|
@ -413,6 +512,8 @@ describe('ObjectMD::getReducedLocations', () => {
|
|||
start: 2,
|
||||
dataStoreName: 'file',
|
||||
dataStoreETag: '2:9ca655158ca025aa00a818b6b81f9e48',
|
||||
dataStoreVersionId: 'someversion2',
|
||||
blockId: 'someBlockId2',
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
@ -426,6 +527,8 @@ describe('ObjectMD::getReducedLocations', () => {
|
|||
start: 0,
|
||||
dataStoreName: 'file',
|
||||
dataStoreETag: '1:0e5a6f42662652d44fcf978399ef5709',
|
||||
dataStoreVersionId: 'someversion',
|
||||
blockId: 'someBlockId',
|
||||
},
|
||||
{
|
||||
key: 'deebfb287cfcee1d137b0136562d2d776ba491e1',
|
||||
|
@ -433,6 +536,8 @@ describe('ObjectMD::getReducedLocations', () => {
|
|||
start: 1,
|
||||
dataStoreName: 'file',
|
||||
dataStoreETag: '2:9ca655158ca025aa00a818b6b81f9e48',
|
||||
dataStoreVersionId: 'someversion2',
|
||||
blockId: 'someBlockId2',
|
||||
},
|
||||
{
|
||||
key: '4e67844b674b093a9e109d42172922ea1f32ec12',
|
||||
|
@ -440,6 +545,8 @@ describe('ObjectMD::getReducedLocations', () => {
|
|||
start: 2,
|
||||
dataStoreName: 'file',
|
||||
dataStoreETag: '2:9ca655158ca025aa00a818b6b81f9e48',
|
||||
dataStoreVersionId: 'someversion2',
|
||||
blockId: 'someBlockId2',
|
||||
},
|
||||
]);
|
||||
assert.deepStrictEqual(md.getReducedLocations(), [
|
||||
|
@ -449,6 +556,8 @@ describe('ObjectMD::getReducedLocations', () => {
|
|||
start: 0,
|
||||
dataStoreName: 'file',
|
||||
dataStoreETag: '1:0e5a6f42662652d44fcf978399ef5709',
|
||||
dataStoreVersionId: 'someversion',
|
||||
blockId: 'someBlockId',
|
||||
},
|
||||
{
|
||||
key: '4e67844b674b093a9e109d42172922ea1f32ec12',
|
||||
|
@ -456,6 +565,8 @@ describe('ObjectMD::getReducedLocations', () => {
|
|||
start: 1,
|
||||
dataStoreName: 'file',
|
||||
dataStoreETag: '2:9ca655158ca025aa00a818b6b81f9e48',
|
||||
dataStoreVersionId: 'someversion2',
|
||||
blockId: 'someBlockId2',
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
@ -469,6 +580,8 @@ describe('ObjectMD::getReducedLocations', () => {
|
|||
start: 0,
|
||||
dataStoreName: 'file',
|
||||
dataStoreETag: '1:0e5a6f42662652d44fcf978399ef5709',
|
||||
dataStoreVersionId: 'someversion',
|
||||
blockId: 'someBlockId',
|
||||
},
|
||||
{
|
||||
key: 'c1c1e055b19eb5a61adb8a665e626ff589cff234',
|
||||
|
@ -476,6 +589,8 @@ describe('ObjectMD::getReducedLocations', () => {
|
|||
start: 1,
|
||||
dataStoreName: 'file',
|
||||
dataStoreETag: '1:0e5a6f42662652d44fcf978399ef5709',
|
||||
dataStoreVersionId: 'someversion',
|
||||
blockId: 'someBlockId',
|
||||
},
|
||||
{
|
||||
key: 'deebfb287cfcee1d137b0136562d2d776ba491e1',
|
||||
|
@ -483,6 +598,8 @@ describe('ObjectMD::getReducedLocations', () => {
|
|||
start: 3,
|
||||
dataStoreName: 'file',
|
||||
dataStoreETag: '1:0e5a6f42662652d44fcf978399ef5709',
|
||||
dataStoreVersionId: 'someversion',
|
||||
blockId: 'someBlockId',
|
||||
},
|
||||
{
|
||||
key: '8e67844b674b093a9e109d42172922ea1f32ec14',
|
||||
|
@ -490,6 +607,8 @@ describe('ObjectMD::getReducedLocations', () => {
|
|||
start: 4,
|
||||
dataStoreName: 'file',
|
||||
dataStoreETag: '2:9ca655158ca025aa00a818b6b81f9e48',
|
||||
dataStoreVersionId: 'someversion2',
|
||||
blockId: 'someBlockId2',
|
||||
},
|
||||
{
|
||||
key: 'd1d1e055b19eb5a61adb8a665e626ff589cff233',
|
||||
|
@ -497,6 +616,8 @@ describe('ObjectMD::getReducedLocations', () => {
|
|||
start: 7,
|
||||
dataStoreName: 'file',
|
||||
dataStoreETag: '2:9ca655158ca025aa00a818b6b81f9e48',
|
||||
dataStoreVersionId: 'someversion2',
|
||||
blockId: 'someBlockId2',
|
||||
},
|
||||
{
|
||||
key: '0e67844b674b093a9e109d42172922ea1f32ec11',
|
||||
|
@ -504,6 +625,8 @@ describe('ObjectMD::getReducedLocations', () => {
|
|||
start: 17,
|
||||
dataStoreName: 'file',
|
||||
dataStoreETag: '2:9ca655158ca025aa00a818b6b81f9e48',
|
||||
dataStoreVersionId: 'someversion2',
|
||||
blockId: 'someBlockId2',
|
||||
},
|
||||
{
|
||||
key: '8e67844b674b093a9e109d42172922ea1f32ec14',
|
||||
|
@ -511,6 +634,8 @@ describe('ObjectMD::getReducedLocations', () => {
|
|||
start: 27,
|
||||
dataStoreName: 'file',
|
||||
dataStoreETag: '3:1ca655158ca025aa00a818b6b81f9e4c',
|
||||
dataStoreVersionId: 'someversion3',
|
||||
blockId: 'someBlockId3',
|
||||
},
|
||||
{
|
||||
key: '7e67844b674b093a9e109d42172922ea1f32ec1f',
|
||||
|
@ -518,6 +643,8 @@ describe('ObjectMD::getReducedLocations', () => {
|
|||
start: 42,
|
||||
dataStoreName: 'file',
|
||||
dataStoreETag: '3:1ca655158ca025aa00a818b6b81f9e4c',
|
||||
dataStoreVersionId: 'someversion3',
|
||||
blockId: 'someBlockId3',
|
||||
},
|
||||
{
|
||||
key: '1237844b674b093a9e109d42172922ea1f32ec19',
|
||||
|
@ -525,6 +652,8 @@ describe('ObjectMD::getReducedLocations', () => {
|
|||
start: 44,
|
||||
dataStoreName: 'file',
|
||||
dataStoreETag: '4:afa655158ca025aa00a818b6b81f9e4d',
|
||||
dataStoreVersionId: 'someversion4',
|
||||
blockId: 'someBlockId4',
|
||||
},
|
||||
{
|
||||
key: '4567844b674b093a9e109d42172922ea1f32ec00',
|
||||
|
@ -532,6 +661,8 @@ describe('ObjectMD::getReducedLocations', () => {
|
|||
start: 50,
|
||||
dataStoreName: 'file',
|
||||
dataStoreETag: '4:afa655158ca025aa00a818b6b81f9e4d',
|
||||
dataStoreVersionId: 'someversion4',
|
||||
blockId: 'someBlockId4',
|
||||
},
|
||||
{
|
||||
key: '53d7844b674b093a9e109d42172922ea1f32ec02',
|
||||
|
@ -539,6 +670,8 @@ describe('ObjectMD::getReducedLocations', () => {
|
|||
start: 54,
|
||||
dataStoreName: 'file',
|
||||
dataStoreETag: '4:afa655158ca025aa00a818b6b81f9e4d',
|
||||
dataStoreVersionId: 'someversion4',
|
||||
blockId: 'someBlockId4',
|
||||
},
|
||||
{
|
||||
key: '6f6d7844b674b093a9e109d42172922ea1f32ec01',
|
||||
|
@ -546,6 +679,8 @@ describe('ObjectMD::getReducedLocations', () => {
|
|||
start: 63,
|
||||
dataStoreName: 'file',
|
||||
dataStoreETag: '4:afa655158ca025aa00a818b6b81f9e4d',
|
||||
dataStoreVersionId: 'someversion4',
|
||||
blockId: 'someBlockId4',
|
||||
},
|
||||
]);
|
||||
assert.deepStrictEqual(md.getReducedLocations(), [
|
||||
|
@ -555,6 +690,8 @@ describe('ObjectMD::getReducedLocations', () => {
|
|||
start: 0,
|
||||
dataStoreName: 'file',
|
||||
dataStoreETag: '1:0e5a6f42662652d44fcf978399ef5709',
|
||||
dataStoreVersionId: 'someversion',
|
||||
blockId: 'someBlockId',
|
||||
},
|
||||
{
|
||||
key: '0e67844b674b093a9e109d42172922ea1f32ec11',
|
||||
|
@ -562,6 +699,8 @@ describe('ObjectMD::getReducedLocations', () => {
|
|||
start: 4,
|
||||
dataStoreName: 'file',
|
||||
dataStoreETag: '2:9ca655158ca025aa00a818b6b81f9e48',
|
||||
dataStoreVersionId: 'someversion2',
|
||||
blockId: 'someBlockId2',
|
||||
},
|
||||
{
|
||||
key: '7e67844b674b093a9e109d42172922ea1f32ec1f',
|
||||
|
@ -569,6 +708,8 @@ describe('ObjectMD::getReducedLocations', () => {
|
|||
start: 27,
|
||||
dataStoreName: 'file',
|
||||
dataStoreETag: '3:1ca655158ca025aa00a818b6b81f9e4c',
|
||||
dataStoreVersionId: 'someversion3',
|
||||
blockId: 'someBlockId3',
|
||||
},
|
||||
{
|
||||
key: '6f6d7844b674b093a9e109d42172922ea1f32ec01',
|
||||
|
@ -576,6 +717,8 @@ describe('ObjectMD::getReducedLocations', () => {
|
|||
start: 44,
|
||||
dataStoreName: 'file',
|
||||
dataStoreETag: '4:afa655158ca025aa00a818b6b81f9e4d',
|
||||
dataStoreVersionId: 'someversion4',
|
||||
blockId: 'someBlockId4',
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
|
|
@ -0,0 +1,102 @@
|
|||
const assert = require('assert');
|
||||
const ObjectMDAzureInfo = require('../../../index').models.ObjectMDAzureInfo;
|
||||
|
||||
const testAzureInfoObj = {
|
||||
containerPublicAccess: 'container',
|
||||
containerStoredAccessPolicies: [],
|
||||
containerImmutabilityPolicy: {},
|
||||
containerLegalHoldStatus: false,
|
||||
containerDeletionInProgress: false,
|
||||
blobType: 'BlockBlob',
|
||||
blobContentMD5: 'ABCDEF==',
|
||||
blobIssuedETag: '0xabcdef',
|
||||
blobCopyInfo: {},
|
||||
blobSequenceNumber: 42,
|
||||
blobAccessTierChangeTime: 'abcdef',
|
||||
blobUncommitted: false,
|
||||
};
|
||||
|
||||
const azureInfo = new ObjectMDAzureInfo(testAzureInfoObj);
|
||||
|
||||
describe('ObjectMDAzureInfo value', () => {
|
||||
it('should return the correct value', () => {
|
||||
const azureInfoObj = azureInfo.getValue();
|
||||
assert.deepStrictEqual(azureInfoObj, testAzureInfoObj);
|
||||
});
|
||||
});
|
||||
|
||||
describe('ObjectMDAzureInfo setters/getters', () => {
|
||||
it('should control the containerPublicAccess attribute', () => {
|
||||
const containerPublicAccess = 'new public access value';
|
||||
azureInfo.setContainerPublicAccess(containerPublicAccess);
|
||||
assert.deepStrictEqual(azureInfo.getContainerPublicAccess(),
|
||||
containerPublicAccess);
|
||||
});
|
||||
it('should control the containerStoredAccessPolicies attribute', () => {
|
||||
const containerStoredAccessPolicies = [{}];
|
||||
azureInfo.setContainerStoredAccessPolicies(
|
||||
containerStoredAccessPolicies);
|
||||
assert.deepStrictEqual(azureInfo.getContainerStoredAccessPolicies(),
|
||||
containerStoredAccessPolicies);
|
||||
});
|
||||
it('should control the containerImmutabilityPolicy attribute', () => {
|
||||
const containerImmutabilityPolicy = { foo: 1 };
|
||||
azureInfo.setContainerImmutabilityPolicy(containerImmutabilityPolicy);
|
||||
assert.deepStrictEqual(azureInfo.getContainerImmutabilityPolicy(),
|
||||
containerImmutabilityPolicy);
|
||||
});
|
||||
it('should control the containerLegalHoldStatus attribute', () => {
|
||||
const containerLegalHoldStatus = true;
|
||||
azureInfo.setContainerLegalHoldStatus(containerLegalHoldStatus);
|
||||
assert.deepStrictEqual(azureInfo.getContainerLegalHoldStatus(),
|
||||
containerLegalHoldStatus);
|
||||
});
|
||||
it('should control the containerDeletionInProgress attribute', () => {
|
||||
const containerDeletionInProgress = true;
|
||||
azureInfo.setContainerDeletionInProgress(containerDeletionInProgress);
|
||||
assert.deepStrictEqual(azureInfo.getContainerDeletionInProgress(),
|
||||
containerDeletionInProgress);
|
||||
});
|
||||
it('should control the blobType attribute', () => {
|
||||
const blobType = 'PlopBlob';
|
||||
azureInfo.setBlobType(blobType);
|
||||
assert.deepStrictEqual(azureInfo.getBlobType(),
|
||||
blobType);
|
||||
});
|
||||
it('should control the blobContentMD5 attribute', () => {
|
||||
const blobContentMD5 = 'ABC';
|
||||
azureInfo.setBlobContentMD5(blobContentMD5);
|
||||
assert.deepStrictEqual(azureInfo.getBlobContentMD5(),
|
||||
blobContentMD5);
|
||||
});
|
||||
it('should control the blobIssuedETag attribute', () => {
|
||||
const blobIssuedETag = '0x123456';
|
||||
azureInfo.setBlobIssuedETag(blobIssuedETag);
|
||||
assert.deepStrictEqual(azureInfo.getBlobIssuedETag(),
|
||||
blobIssuedETag);
|
||||
});
|
||||
it('should control the blobCopyInfo attribute', () => {
|
||||
const blobCopyInfo = { meh: 46 };
|
||||
azureInfo.setBlobCopyInfo(blobCopyInfo);
|
||||
assert.deepStrictEqual(azureInfo.getBlobCopyInfo(),
|
||||
blobCopyInfo);
|
||||
});
|
||||
it('should control the blobSequenceNumber attribute', () => {
|
||||
const blobSequenceNumber = 8888;
|
||||
azureInfo.setBlobSequenceNumber(blobSequenceNumber);
|
||||
assert.deepStrictEqual(azureInfo.getBlobSequenceNumber(),
|
||||
blobSequenceNumber);
|
||||
});
|
||||
it('should control the blobAccessTierChangeTime attribute', () => {
|
||||
const blobAccessTierChangeTime = 'MMXIX';
|
||||
azureInfo.setBlobAccessTierChangeTime(blobAccessTierChangeTime);
|
||||
assert.deepStrictEqual(azureInfo.getBlobAccessTierChangeTime(),
|
||||
blobAccessTierChangeTime);
|
||||
});
|
||||
it('should control the blobUncommitted attribute', () => {
|
||||
const blobUncommitted = true;
|
||||
azureInfo.setBlobUncommitted(blobUncommitted);
|
||||
assert.deepStrictEqual(azureInfo.getBlobUncommitted(),
|
||||
blobUncommitted);
|
||||
});
|
||||
});
|
|
@ -9,6 +9,8 @@ describe('ObjectMDLocation', () => {
|
|||
size: 100,
|
||||
dataStoreName: 'awsbackend',
|
||||
dataStoreETag: '2:abcdefghi',
|
||||
dataStoreVersionId: 'someversion',
|
||||
blockId: 'someBlockId',
|
||||
cryptoScheme: 1,
|
||||
cipheredDataKey: 'CiPhErEdDaTaKeY',
|
||||
};
|
||||
|
@ -16,10 +18,12 @@ describe('ObjectMDLocation', () => {
|
|||
assert.strictEqual(location.getKey(), 'fookey');
|
||||
assert.strictEqual(location.getDataStoreName(), 'awsbackend');
|
||||
assert.strictEqual(location.getDataStoreETag(), '2:abcdefghi');
|
||||
assert.strictEqual(location.getDataStoreVersionId(), 'someversion');
|
||||
assert.strictEqual(location.getPartNumber(), 2);
|
||||
assert.strictEqual(location.getPartETag(), 'abcdefghi');
|
||||
assert.strictEqual(location.getPartStart(), 42);
|
||||
assert.strictEqual(location.getPartSize(), 100);
|
||||
assert.strictEqual(location.getBlockId(), 'someBlockId');
|
||||
assert.strictEqual(location.getCryptoScheme(), 1);
|
||||
assert.strictEqual(location.getCipheredDataKey(), 'CiPhErEdDaTaKeY');
|
||||
|
||||
|
@ -36,6 +40,8 @@ describe('ObjectMDLocation', () => {
|
|||
size: 100,
|
||||
dataStoreName: 'awsbackend',
|
||||
dataStoreETag: '2:abcdefghi',
|
||||
dataStoreVersionId: 'someversion',
|
||||
blockId: 'someBlockId',
|
||||
cryptoScheme: 1,
|
||||
cipheredDataKey: 'CiPhErEdDaTaKeY',
|
||||
});
|
||||
|
@ -43,6 +49,7 @@ describe('ObjectMDLocation', () => {
|
|||
dataStoreName: 'gcpbackend' });
|
||||
assert.strictEqual(location.getKey(), 'secondkey');
|
||||
assert.strictEqual(location.getDataStoreName(), 'gcpbackend');
|
||||
assert.strictEqual(location.getDataStoreVersionId(), undefined);
|
||||
assert.strictEqual(location.getCryptoScheme(), undefined);
|
||||
assert.strictEqual(location.getCipheredDataKey(), undefined);
|
||||
assert.deepStrictEqual(location.getValue(), {
|
||||
|
@ -51,14 +58,19 @@ describe('ObjectMDLocation', () => {
|
|||
key: 'secondkey',
|
||||
size: 100,
|
||||
start: 42,
|
||||
blockId: 'someBlockId',
|
||||
});
|
||||
location.setDataLocation({ key: 'thirdkey',
|
||||
dataStoreName: 'azurebackend',
|
||||
dataStoreVersionId: 'newversion',
|
||||
cryptoScheme: 1,
|
||||
cipheredDataKey: 'NeWcIpHeReDdAtAkEy' });
|
||||
assert.strictEqual(location.getKey(), 'thirdkey');
|
||||
assert.strictEqual(location.getDataStoreName(), 'azurebackend');
|
||||
assert.strictEqual(location.getDataStoreVersionId(), 'newversion');
|
||||
assert.strictEqual(location.getCryptoScheme(), 1);
|
||||
assert.strictEqual(location.getCipheredDataKey(), 'NeWcIpHeReDdAtAkEy');
|
||||
location.setBlockId('otherBlockId');
|
||||
assert.strictEqual(location.getBlockId(), 'otherBlockId');
|
||||
});
|
||||
});
|
||||
|
|
|
@ -0,0 +1,74 @@
|
|||
const assert = require('assert');
|
||||
const { parseString } = require('xml2js');
|
||||
|
||||
const werelogs = require('werelogs');
|
||||
|
||||
const ReplicationConfiguration =
|
||||
require('../../../lib/models/ReplicationConfiguration');
|
||||
|
||||
const logger = new werelogs.Logger('test:ReplicationConfiguration');
|
||||
|
||||
const mockedConfig = {
|
||||
replicationEndpoints: [{
|
||||
type: 'scality',
|
||||
site: 'ring',
|
||||
default: true,
|
||||
}, {
|
||||
type: 'aws_s3',
|
||||
site: 'awsbackend',
|
||||
}, {
|
||||
type: 'gcp',
|
||||
site: 'gcpbackend',
|
||||
}, {
|
||||
type: 'azure',
|
||||
site: 'azurebackend',
|
||||
}],
|
||||
};
|
||||
|
||||
|
||||
function getXMLConfig(hasPreferredRead) {
|
||||
return `
|
||||
<ReplicationConfiguration>
|
||||
<Role>arn:aws:iam::root:role/s3-replication-role</Role>
|
||||
<Rule>
|
||||
<ID>Replication-Rule-1</ID>
|
||||
<Status>Enabled</Status>
|
||||
<Prefix>someprefix/</Prefix>
|
||||
<Destination>
|
||||
<Bucket>arn:aws:s3:::destbucket</Bucket>
|
||||
<StorageClass>awsbackend,` +
|
||||
`gcpbackend${hasPreferredRead ? ':preferred_read' : ''},azurebackend` +
|
||||
`</StorageClass>
|
||||
</Destination>
|
||||
</Rule>
|
||||
</ReplicationConfiguration>
|
||||
`;
|
||||
}
|
||||
|
||||
describe('ReplicationConfiguration class', () => {
|
||||
it('should parse replication config XML without preferred read', done => {
|
||||
const repConfigXML = getXMLConfig(false);
|
||||
parseString(repConfigXML, (err, parsedXml) => {
|
||||
assert.ifError(err);
|
||||
const repConf = new ReplicationConfiguration(
|
||||
parsedXml, logger, mockedConfig);
|
||||
const repConfErr = repConf.parseConfiguration();
|
||||
assert.ifError(repConfErr);
|
||||
assert.strictEqual(repConf.getPreferredReadLocation(), null);
|
||||
done();
|
||||
});
|
||||
});
|
||||
it('should parse replication config XML with preferred read', done => {
|
||||
const repConfigXML = getXMLConfig(true);
|
||||
parseString(repConfigXML, (err, parsedXml) => {
|
||||
assert.ifError(err);
|
||||
const repConf = new ReplicationConfiguration(
|
||||
parsedXml, logger, mockedConfig);
|
||||
const repConfErr = repConf.parseConfiguration();
|
||||
assert.ifError(repConfErr);
|
||||
assert.strictEqual(repConf.getPreferredReadLocation(),
|
||||
'gcpbackend');
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,212 @@
|
|||
const assert = require('assert');
|
||||
const HealthProbeServer =
|
||||
require('../../../../lib/network/probe/HealthProbeServer');
|
||||
const http = require('http');
|
||||
|
||||
function makeRequest(meth, uri) {
|
||||
const params = {
|
||||
hostname: 'localhost',
|
||||
port: 4042,
|
||||
method: meth,
|
||||
path: uri,
|
||||
};
|
||||
const req = http.request(params);
|
||||
req.setNoDelay(true);
|
||||
return req;
|
||||
}
|
||||
|
||||
const healthcheckEndpoints = [
|
||||
'/_/health/liveness',
|
||||
'/_/health/readiness',
|
||||
];
|
||||
|
||||
const badHealthcheckEndpoints = [
|
||||
'/_/health/liveness_thisiswrong',
|
||||
'/_/health/readiness_thisiswrong',
|
||||
];
|
||||
|
||||
describe('network.probe.HealthProbeServer', () => {
|
||||
describe('service is "up"', () => {
|
||||
let server;
|
||||
function setup(done) {
|
||||
server = new HealthProbeServer({ port: 4042 });
|
||||
server._cbOnListening = done;
|
||||
server.start();
|
||||
}
|
||||
|
||||
beforeAll(done => {
|
||||
setup(done);
|
||||
});
|
||||
|
||||
afterAll(done => {
|
||||
server.stop();
|
||||
done();
|
||||
});
|
||||
healthcheckEndpoints.forEach(ep => {
|
||||
it('should perform a GET and ' +
|
||||
'return 200 OK', done => {
|
||||
makeRequest('GET', ep)
|
||||
.on('response', res => {
|
||||
assert(res.statusCode === 200);
|
||||
done();
|
||||
})
|
||||
.on('error', err => {
|
||||
assert.ifError(err);
|
||||
done();
|
||||
}).end();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('service is "down"', () => {
|
||||
let server;
|
||||
function setup(done) {
|
||||
function falseStub() {
|
||||
return false;
|
||||
}
|
||||
server = new HealthProbeServer({
|
||||
port: 4042,
|
||||
livenessCheck: falseStub,
|
||||
readinessCheck: falseStub,
|
||||
});
|
||||
server.start();
|
||||
done();
|
||||
}
|
||||
|
||||
beforeAll(done => {
|
||||
setup(done);
|
||||
});
|
||||
|
||||
afterAll(done => {
|
||||
server.stop();
|
||||
done();
|
||||
});
|
||||
|
||||
healthcheckEndpoints.forEach(ep => {
|
||||
it('should perform a GET and ' +
|
||||
'return 503 ServiceUnavailable', done => {
|
||||
makeRequest('GET', ep)
|
||||
.on('response', res => {
|
||||
assert(res.statusCode === 503);
|
||||
done();
|
||||
})
|
||||
.on('error', err => {
|
||||
assert.ifError(err);
|
||||
done();
|
||||
}).end();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Invalid Methods', () => {
|
||||
jest.setTimeout(10000);
|
||||
let server;
|
||||
function setup(done) {
|
||||
server = new HealthProbeServer({
|
||||
port: 4042,
|
||||
});
|
||||
server.start();
|
||||
done();
|
||||
}
|
||||
|
||||
beforeAll(done => {
|
||||
setup(done);
|
||||
});
|
||||
|
||||
afterAll(done => {
|
||||
server.stop();
|
||||
done();
|
||||
});
|
||||
|
||||
healthcheckEndpoints.forEach(ep => {
|
||||
it('should perform a POST and ' +
|
||||
'return 405 MethodNotAllowed', done => {
|
||||
makeRequest('POST', ep)
|
||||
.on('response', res => {
|
||||
assert(res.statusCode === 405);
|
||||
done();
|
||||
})
|
||||
.on('error', err => {
|
||||
assert.ifError(err);
|
||||
done();
|
||||
}).end();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Invalid URI', () => {
|
||||
let server;
|
||||
function setup(done) {
|
||||
server = new HealthProbeServer({
|
||||
port: 4042,
|
||||
});
|
||||
server.start();
|
||||
done();
|
||||
}
|
||||
|
||||
beforeAll(done => {
|
||||
setup(done);
|
||||
});
|
||||
|
||||
afterAll(done => {
|
||||
server.stop();
|
||||
done();
|
||||
});
|
||||
|
||||
badHealthcheckEndpoints.forEach(ep => {
|
||||
it('should perform a GET and ' +
|
||||
'return 400 InvalidURI', done => {
|
||||
makeRequest('GET', ep)
|
||||
.on('response', res => {
|
||||
assert(res.statusCode === 400);
|
||||
done();
|
||||
})
|
||||
.on('error', err => {
|
||||
assert.ifError(err);
|
||||
done();
|
||||
}).end();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('metrics route', () => {
|
||||
let server;
|
||||
function setup(done) {
|
||||
server = new HealthProbeServer({ port: 4042 });
|
||||
server._cbOnListening = done;
|
||||
server.start();
|
||||
}
|
||||
|
||||
beforeAll(done => {
|
||||
setup(done);
|
||||
});
|
||||
|
||||
afterAll(done => {
|
||||
server.stop();
|
||||
done();
|
||||
});
|
||||
it('should expose metrics', done => {
|
||||
makeRequest('GET', '/_/monitoring/metrics')
|
||||
.on('response', res => {
|
||||
assert(res.statusCode === 200);
|
||||
const respBufs = [];
|
||||
res.on('data', data => {
|
||||
respBufs.push(data);
|
||||
});
|
||||
res.on('end', () => {
|
||||
const respContents = respBufs.join('');
|
||||
assert(respContents.length > 0);
|
||||
done();
|
||||
});
|
||||
res.on('error', err => {
|
||||
assert.ifError(err);
|
||||
done();
|
||||
});
|
||||
})
|
||||
.on('error', err => {
|
||||
assert.ifError(err);
|
||||
done();
|
||||
}).end();
|
||||
});
|
||||
});
|
||||
});
|
|
@ -52,7 +52,7 @@ describe('network.probe.ProbeServer', () => {
|
|||
});
|
||||
});
|
||||
|
||||
it('does nothing if probe successful', done => {
|
||||
it('allows probe to handle requests', done => {
|
||||
server.addHandler('/check', res => {
|
||||
res.writeHead(200);
|
||||
res.end();
|
||||
|
@ -87,20 +87,4 @@ describe('network.probe.ProbeServer', () => {
|
|||
}
|
||||
});
|
||||
});
|
||||
|
||||
it('500 response on bad probe', done => {
|
||||
server.addHandler('/check', () => 'check failed');
|
||||
makeRequest('GET', '/check', (err, res) => {
|
||||
assert.ifError(err);
|
||||
assert.strictEqual(res.statusCode, 500);
|
||||
res.setEncoding('utf8');
|
||||
res.on('data', body => {
|
||||
assert.strictEqual(
|
||||
body,
|
||||
'{"errorType":"InternalError","errorMessage":"check failed"}',
|
||||
);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -0,0 +1,74 @@
|
|||
const assert = require('assert');
|
||||
const errors = require('../../../../lib/errors');
|
||||
const { sendError, sendSuccess } = require('../../../../lib/network/probe/Utils');
|
||||
const sinon = require('sinon');
|
||||
|
||||
describe('network.probe.Utils', () => {
|
||||
let mockLogger;
|
||||
|
||||
beforeEach(() => {
|
||||
mockLogger = {
|
||||
debug: sinon.fake(),
|
||||
};
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
sinon.restore();
|
||||
});
|
||||
|
||||
it('send success will return 200 OK', done => {
|
||||
const mockRes = {
|
||||
writeHead: sinon.fake(status => assert.strictEqual(200, status)),
|
||||
end: sinon.fake(msg => {
|
||||
assert.strictEqual(msg, 'OK');
|
||||
done();
|
||||
}),
|
||||
};
|
||||
sendSuccess(mockRes, mockLogger);
|
||||
});
|
||||
|
||||
it('send success will return 200 and optional message', done => {
|
||||
const mockRes = {
|
||||
writeHead: sinon.fake(status => assert.strictEqual(200, status)),
|
||||
end: sinon.fake(msg => {
|
||||
assert.strictEqual(msg, 'Granted');
|
||||
done();
|
||||
}),
|
||||
};
|
||||
sendSuccess(mockRes, mockLogger, 'Granted');
|
||||
});
|
||||
|
||||
it('send error will return send an Arsenal Error and code', done => {
|
||||
const mockRes = {
|
||||
writeHead: sinon.fake(status => assert.strictEqual(405, status)),
|
||||
end: sinon.fake(msg => {
|
||||
assert.deepStrictEqual(
|
||||
JSON.parse(msg),
|
||||
{
|
||||
errorType: 'MethodNotAllowed',
|
||||
errorMessage: errors.MethodNotAllowed.description,
|
||||
},
|
||||
);
|
||||
done();
|
||||
}),
|
||||
};
|
||||
sendError(mockRes, mockLogger, errors.MethodNotAllowed);
|
||||
});
|
||||
|
||||
it('send error will return send an Arsenal Error and code using optional message', done => {
|
||||
const mockRes = {
|
||||
writeHead: sinon.fake(status => assert.strictEqual(405, status)),
|
||||
end: sinon.fake(msg => {
|
||||
assert.deepStrictEqual(
|
||||
JSON.parse(msg),
|
||||
{
|
||||
errorType: 'MethodNotAllowed',
|
||||
errorMessage: 'Very much not allowed',
|
||||
},
|
||||
);
|
||||
done();
|
||||
}),
|
||||
};
|
||||
sendError(mockRes, mockLogger, errors.MethodNotAllowed, 'Very much not allowed');
|
||||
});
|
||||
});
|
|
@ -0,0 +1,31 @@
|
|||
'use strict'; // eslint-disable-line
|
||||
|
||||
const assert = require('assert');
|
||||
const constants = require('../../../../lib/constants');
|
||||
const { parseURL } = require('../../../../lib/network/rest/utils');
|
||||
|
||||
describe('parseURL function', () => {
|
||||
[
|
||||
{
|
||||
inputUrl: `${constants.passthroughFileURL}/test`,
|
||||
expectedKey: 'test',
|
||||
},
|
||||
{
|
||||
inputUrl: `${constants.passthroughFileURL}/test with spaces`,
|
||||
expectedKey: 'test with spaces',
|
||||
},
|
||||
{
|
||||
inputUrl: `${constants.passthroughFileURL}` +
|
||||
'/test%20with%20encoded%20spaces',
|
||||
expectedKey: 'test with encoded spaces',
|
||||
},
|
||||
].forEach(testCase => {
|
||||
const { inputUrl, expectedKey } = testCase;
|
||||
|
||||
it(`should return ${expectedKey} with url "${inputUrl}"`,
|
||||
() => {
|
||||
const pathInfo = parseURL(inputUrl, true);
|
||||
assert.strictEqual(pathInfo.key, expectedKey);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,9 @@
|
|||
{
|
||||
"privateKey": "-----BEGIN RSA PRIVATE KEY-----\r\nMIIEowIBAAKCAQEAj13sSYE40lAX2qpBvfdGfcSVNtBf8i5FH+E8FAhORwwPu+2S\r\n3yBQbgwHq30WWxunGb1NmZL1wkVZ+vf12DtxqFRnMA08LfO4oO6oC4V8XfKeuHyJ\r\n1qlaKRINz6r9yDkTHtwWoBnlAINurlcNKgGD5p7D+G26Chbr/Oo0ZwHula9DxXy6\r\neH8/bJ5/BynyNyyWRPoAO+UkUdY5utkFCUq2dbBIhovMgjjikf5p2oWqnRKXc+JK\r\nBegr6lSHkkhyqNhTmd8+wA+8Cace4sy1ajY1t5V4wfRZea5vwl/HlyyKodvHdxng\r\nJgg6H61JMYPkplY6Gr9OryBKEAgq02zYoYTDfwIDAQABAoIBAAuDYGlavkRteCzw\r\nRU1LIVcSRWVcgIgDXTu9K8T0Ec0008Kkxomyn6LmxmroJbZ1VwsDH8s4eRH73ckA\r\nxrZxt6Pr+0lplq6eBvKtl8MtGhq1VDe+kJczjHEF6SQHOFAu/TEaPZrn2XMcGvRX\r\nO1BnRL9tepFlxm3u/06VRFYNWqqchM+tFyzLu2AuiuKd5+slSX7KZvVgdkY1ErKH\r\ngB75lPyhPb77C/6ptqUisVMSO4JhLhsD0+ekDVY982Sb7KkI+szdWSbtMx9Ek2Wo\r\ntXwJz7I8T7IbODy9aW9G+ydyhMDFmaEYIaDVFKJj5+fluNza3oQ5PtFNVE50GQJA\r\nsisGqfECgYEAwpkwt0KpSamSEH6qknNYPOwxgEuXWoFVzibko7is2tFPvY+YJowb\r\n68MqHIYhf7gHLq2dc5Jg1TTbGqLECjVxp4xLU4c95KBy1J9CPAcuH4xQLDXmeLzP\r\nJ2YgznRocbzAMCDAwafCr3uY9FM7oGDHAi5bE5W11xWx+9MlFExL3JkCgYEAvJp5\r\nf+JGN1W037bQe2QLYUWGszewZsvplnNOeytGQa57w4YdF42lPhMz6Kc/zdzKZpN9\r\njrshiIDhAD5NCno6dwqafBAW9WZl0sn7EnlLhD4Lwm8E9bRHnC9H82yFuqmNrzww\r\nzxBCQogJISwHiVz4EkU48B283ecBn0wT/fAa19cCgYEApKWsnEHgrhy1IxOpCoRh\r\nUhqdv2k1xDPN/8DUjtnAFtwmVcLa/zJopU/Zn4y1ZzSzjwECSTi+iWZRQ/YXXHPf\r\nl92SFjhFW92Niuy8w8FnevXjF6T7PYiy1SkJ9OR1QlZrXc04iiGBDazLu115A7ce\r\nanACS03OLw+CKgl6Q/RR83ECgYBCUngDVoimkMcIHHt3yJiP3ikeAKlRnMdJlsa0\r\nXWVZV4hCG3lDfRXsnEgWuimftNKf+6GdfYSvQdLdiQsCcjT5A4uLsQTByv5nf4uA\r\n1ZKOsFrmRrARzxGXhLDikvj7yP//7USkq+0BBGFhfuAvl7fMhPceyPZPehqB7/jf\r\nxX1LBQKBgAn5GgSXzzS0e06ZlP/VrKxreOHa5Z8wOmqqYQ0QTeczAbNNmuITdwwB\r\nNkbRqpVXRIfuj0BQBegAiix8om1W4it0cwz54IXBwQULxJR1StWxj3jo4QtpMQ+z\r\npVPdB1Ilb9zPV1YvDwRfdS1xsobzznAx56ecsXduZjs9mF61db8Q\r\n-----END RSA PRIVATE KEY-----\r\n",
|
||||
"publicKey": "-----BEGIN PUBLIC KEY-----\r\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAj13sSYE40lAX2qpBvfdG\r\nfcSVNtBf8i5FH+E8FAhORwwPu+2S3yBQbgwHq30WWxunGb1NmZL1wkVZ+vf12Dtx\r\nqFRnMA08LfO4oO6oC4V8XfKeuHyJ1qlaKRINz6r9yDkTHtwWoBnlAINurlcNKgGD\r\n5p7D+G26Chbr/Oo0ZwHula9DxXy6eH8/bJ5/BynyNyyWRPoAO+UkUdY5utkFCUq2\r\ndbBIhovMgjjikf5p2oWqnRKXc+JKBegr6lSHkkhyqNhTmd8+wA+8Cace4sy1ajY1\r\nt5V4wfRZea5vwl/HlyyKodvHdxngJgg6H61JMYPkplY6Gr9OryBKEAgq02zYoYTD\r\nfwIDAQAB\r\n-----END PUBLIC KEY-----\r\n",
|
||||
"accessKey": "QXP3VDG3SALNBX2QBJ1C",
|
||||
"secretKey": "K5FyqZo5uFKfw9QBtn95o6vuPuD0zH/1seIrqPKqGnz8AxALNSx6EeRq7G1I6JJpS1XN13EhnwGn2ipsml3Uf2fQ00YgEmImG8wzGVZm8fWotpVO4ilN4JGyQCah81rNX4wZ9xHqDD7qYR5MyIERxR/osoXfctOwY7GGUjRKJfLOguNUlpaovejg6mZfTvYAiDF+PTO1sKUYqHt1IfKQtsK3dov1EFMBB5pWM7sVfncq/CthKN5M+VHx9Y87qdoP3+7AW+RCBbSDOfQgxvqtS7PIAf10mDl8k2kEURLz+RqChu4O4S0UzbEmtja7wa7WYhYKv/tM/QeW7kyNJMmnPg==",
|
||||
"decryptedSecretKey": "n7PSZ3U6SgerF9PCNhXYsq3S3fRKVGdZTicGV8Ur",
|
||||
"canonicalId": "79a59df900b949e55d96a1e698fbacedfd6e09d98eacf8f8d5218e7cd47ef2be",
|
||||
"userName": "arsenal-0"
|
||||
}
|
|
@ -0,0 +1,353 @@
|
|||
const assert = require('assert');
|
||||
const { patchLocations } = require('../../../lib/patches/locationConstraints');
|
||||
const {
|
||||
privateKey, accessKey, decryptedSecretKey, secretKey,
|
||||
} = require('./creds.json');
|
||||
|
||||
const tests = [
|
||||
{
|
||||
locationType: 'location-mem-v1',
|
||||
locations: {
|
||||
name: 'legacy',
|
||||
objectId: 'legacy',
|
||||
},
|
||||
expected: {
|
||||
details: {
|
||||
supportsVersioning: true,
|
||||
},
|
||||
isTransient: false,
|
||||
legacyAwsBehavior: false,
|
||||
name: 'mem-loc',
|
||||
objectId: 'legacy',
|
||||
sizeLimitGB: null,
|
||||
type: 'mem',
|
||||
},
|
||||
},
|
||||
{
|
||||
locationType: 'location-file-v1',
|
||||
locations: {
|
||||
objectId: 'us-east-1',
|
||||
legacyAwsBehavior: true,
|
||||
},
|
||||
expected: {
|
||||
details: {
|
||||
supportsVersioning: true,
|
||||
},
|
||||
isTransient: false,
|
||||
legacyAwsBehavior: true,
|
||||
objectId: 'us-east-1',
|
||||
sizeLimitGB: null,
|
||||
type: 'file',
|
||||
},
|
||||
},
|
||||
{
|
||||
locationType: 'location-azure-v1',
|
||||
locations: {
|
||||
objectId: 'azurebackendtest',
|
||||
details: {
|
||||
bucketMatch: 'azurebucketmatch',
|
||||
endpoint: 'azure.end.point',
|
||||
bucketName: 'azurebucketname',
|
||||
accessKey,
|
||||
secretKey,
|
||||
},
|
||||
},
|
||||
expected: {
|
||||
details: {
|
||||
azureContainerName: 'azurebucketname',
|
||||
azureStorageAccessKey: decryptedSecretKey,
|
||||
azureStorageAccountName: accessKey,
|
||||
azureStorageEndpoint: 'azure.end.point',
|
||||
bucketMatch: 'azurebucketmatch',
|
||||
},
|
||||
isTransient: false,
|
||||
legacyAwsBehavior: false,
|
||||
objectId: 'azurebackendtest',
|
||||
sizeLimitGB: null,
|
||||
type: 'azure',
|
||||
},
|
||||
},
|
||||
{
|
||||
locationType: 'location-aws-s3-v1',
|
||||
locations: {
|
||||
objectId: 'awsbackendtest',
|
||||
details: {
|
||||
bucketMatch: 'awsbucketmatch',
|
||||
endpoint: 'aws.end.point',
|
||||
bucketName: 'awsbucketname',
|
||||
region: 'us-west-1',
|
||||
accessKey,
|
||||
secretKey,
|
||||
},
|
||||
},
|
||||
expected: {
|
||||
details: {
|
||||
awsEndpoint: 'aws.end.point',
|
||||
bucketMatch: 'awsbucketmatch',
|
||||
bucketName: 'awsbucketname',
|
||||
https: true,
|
||||
pathStyle: false,
|
||||
serverSideEncryption: false,
|
||||
supportsVersioning: true,
|
||||
region: 'us-west-1',
|
||||
credentials: {
|
||||
accessKey,
|
||||
secretKey: decryptedSecretKey,
|
||||
},
|
||||
},
|
||||
isTransient: false,
|
||||
legacyAwsBehavior: false,
|
||||
objectId: 'awsbackendtest',
|
||||
sizeLimitGB: null,
|
||||
type: 'aws_s3',
|
||||
},
|
||||
},
|
||||
{
|
||||
locationType: 'location-gcp-v1',
|
||||
locations: {
|
||||
name: 'gcpbackendtest',
|
||||
objectId: 'gcpbackendtest',
|
||||
details: {
|
||||
bucketMatch: 'gcpbucketmatch',
|
||||
endpoint: 'gcp.end.point',
|
||||
accessKey: 'gcpaccesskey',
|
||||
secretKey,
|
||||
bucketName: 'gcpbucketname',
|
||||
},
|
||||
},
|
||||
expected: {
|
||||
details: {
|
||||
bucketMatch: 'gcpbucketmatch',
|
||||
bucketName: 'gcpbucketname',
|
||||
credentials: {
|
||||
accessKey: 'gcpaccesskey',
|
||||
secretKey: decryptedSecretKey,
|
||||
},
|
||||
gcpEndpoint: 'gcp.end.point',
|
||||
mpuBucketName: undefined,
|
||||
https: true,
|
||||
},
|
||||
legacyAwsBehavior: false,
|
||||
isTransient: false,
|
||||
sizeLimitGB: null,
|
||||
type: 'gcp',
|
||||
objectId: 'gcpbackendtest',
|
||||
},
|
||||
},
|
||||
{
|
||||
locationType: 'location-scality-sproxyd-v1',
|
||||
locations: {
|
||||
name: 'sproxydbackendtest',
|
||||
objectId: 'sproxydbackendtest',
|
||||
details: {
|
||||
chordCos: 3,
|
||||
bootstrapList: ['localhost:8001', 'localhost:8002'],
|
||||
proxyPath: '/proxy/path',
|
||||
},
|
||||
},
|
||||
expected: {
|
||||
details: {
|
||||
connector: {
|
||||
sproxyd: {
|
||||
chordCos: 3,
|
||||
bootstrap: [
|
||||
'localhost:8001',
|
||||
'localhost:8002',
|
||||
],
|
||||
path: '/proxy/path',
|
||||
},
|
||||
},
|
||||
supportsVersioning: true,
|
||||
},
|
||||
legacyAwsBehavior: false,
|
||||
isTransient: false,
|
||||
sizeLimitGB: null,
|
||||
type: 'scality',
|
||||
objectId: 'sproxydbackendtest',
|
||||
},
|
||||
},
|
||||
{
|
||||
locationType: 'location-scality-ring-s3-v1',
|
||||
locations: {
|
||||
objectId: 'httpsawsbackendtest',
|
||||
details: {
|
||||
bucketMatch: 'rings3bucketmatch',
|
||||
endpoint: 'https://secure.ring.end.point',
|
||||
accessKey: 'rings3accesskey',
|
||||
secretKey,
|
||||
bucketName: 'rings3bucketname',
|
||||
region: 'us-west-1',
|
||||
},
|
||||
},
|
||||
expected: {
|
||||
details: {
|
||||
awsEndpoint: 'secure.ring.end.point',
|
||||
bucketMatch: 'rings3bucketmatch',
|
||||
bucketName: 'rings3bucketname',
|
||||
credentials: {
|
||||
accessKey: 'rings3accesskey',
|
||||
secretKey: decryptedSecretKey,
|
||||
},
|
||||
https: true,
|
||||
pathStyle: true,
|
||||
region: 'us-west-1',
|
||||
serverSideEncryption: false,
|
||||
supportsVersioning: true,
|
||||
},
|
||||
legacyAwsBehavior: false,
|
||||
isTransient: false,
|
||||
sizeLimitGB: null,
|
||||
type: 'aws_s3',
|
||||
objectId: 'httpsawsbackendtest',
|
||||
},
|
||||
},
|
||||
{
|
||||
locationType: 'location-ceph-radosgw-s3-v1',
|
||||
locations: {
|
||||
objectId: 'cephbackendtest',
|
||||
details: {
|
||||
bucketMatch: 'cephbucketmatch',
|
||||
endpoint: 'https://secure.ceph.end.point',
|
||||
accessKey: 'cephs3accesskey',
|
||||
secretKey,
|
||||
bucketName: 'cephbucketname',
|
||||
region: 'us-west-1',
|
||||
},
|
||||
},
|
||||
expected: {
|
||||
details: {
|
||||
awsEndpoint: 'secure.ceph.end.point',
|
||||
bucketMatch: 'cephbucketmatch',
|
||||
bucketName: 'cephbucketname',
|
||||
credentials: {
|
||||
accessKey: 'cephs3accesskey',
|
||||
secretKey: decryptedSecretKey,
|
||||
},
|
||||
https: true,
|
||||
pathStyle: true,
|
||||
region: 'us-west-1',
|
||||
serverSideEncryption: false,
|
||||
supportsVersioning: true,
|
||||
},
|
||||
legacyAwsBehavior: false,
|
||||
isTransient: false,
|
||||
sizeLimitGB: null,
|
||||
type: 'aws_s3',
|
||||
objectId: 'cephbackendtest',
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'transient enabled',
|
||||
locationType: 'location-file-v1',
|
||||
locations: {
|
||||
objectId: 'transienttest',
|
||||
isTransient: true,
|
||||
},
|
||||
expected: {
|
||||
type: 'file',
|
||||
objectId: 'transienttest',
|
||||
legacyAwsBehavior: false,
|
||||
isTransient: true,
|
||||
sizeLimitGB: null,
|
||||
details: {
|
||||
supportsVersioning: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'limited size',
|
||||
locationType: 'location-file-v1',
|
||||
locations: {
|
||||
objectId: 'sizelimitedtest',
|
||||
sizeLimitGB: 1024,
|
||||
},
|
||||
expected: {
|
||||
type: 'file',
|
||||
objectId: 'sizelimitedtest',
|
||||
legacyAwsBehavior: false,
|
||||
isTransient: false,
|
||||
sizeLimitGB: 1024,
|
||||
details: {
|
||||
supportsVersioning: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'zero size limit',
|
||||
locationType: 'location-file-v1',
|
||||
locations: {
|
||||
objectId: 'sizezerotest',
|
||||
sizeLimitGB: 0,
|
||||
},
|
||||
expected: {
|
||||
type: 'file',
|
||||
objectId: 'sizezerotest',
|
||||
legacyAwsBehavior: false,
|
||||
isTransient: false,
|
||||
sizeLimitGB: null,
|
||||
details: {
|
||||
supportsVersioning: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
describe('patch location constriants', () => {
|
||||
const mockLog = {
|
||||
info: () => {},
|
||||
};
|
||||
|
||||
tests.forEach(spec => {
|
||||
const testName = spec.name || `should patch ${spec.locationType}`;
|
||||
it(testName, () => {
|
||||
// copy specs to include extra attributes
|
||||
const locations = spec.locations;
|
||||
const expected = spec.expected;
|
||||
|
||||
// add a name to the locations and expected without having to include it
|
||||
const locationName = spec.name || `name-${spec.locationType}`;
|
||||
locations.name = locationName;
|
||||
expected.name = locationName;
|
||||
|
||||
// also add the location type
|
||||
locations.locationType = spec.locationType;
|
||||
expected.locationType = spec.locationType;
|
||||
|
||||
assert.deepStrictEqual(
|
||||
patchLocations(
|
||||
{ [locationName]: locations },
|
||||
{ privateKey },
|
||||
mockLog,
|
||||
),
|
||||
{ [locationName]: expected },
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
it('undefined location', () => {
|
||||
assert.deepStrictEqual(
|
||||
patchLocations(
|
||||
undefined,
|
||||
{ privateKey },
|
||||
mockLog,
|
||||
),
|
||||
{},
|
||||
);
|
||||
});
|
||||
|
||||
it('bad location type', () => {
|
||||
assert.deepStrictEqual(
|
||||
patchLocations(
|
||||
{
|
||||
name: {
|
||||
locationType: 'bad-location',
|
||||
},
|
||||
},
|
||||
{ privateKey },
|
||||
mockLog,
|
||||
),
|
||||
{},
|
||||
);
|
||||
});
|
||||
});
|
|
@ -522,7 +522,7 @@ describe('LifecycleUtils::filterRules', () => {
|
|||
const expRes1 = getRuleIDs(mBucketRules.filter(rule =>
|
||||
(rule.Filter && rule.Filter.Tag &&
|
||||
rule.Filter.Tag.Key === 'tag1' &&
|
||||
rule.Filter.Tag.Value === 'val1')
|
||||
rule.Filter.Tag.Value === 'val1'),
|
||||
));
|
||||
assert.deepStrictEqual(expRes1, getRuleIDs(res1));
|
||||
|
||||
|
@ -532,7 +532,7 @@ describe('LifecycleUtils::filterRules', () => {
|
|||
const expRes2 = getRuleIDs(mBucketRules.filter(rule =>
|
||||
rule.Filter && rule.Filter.Tag &&
|
||||
rule.Filter.Tag.Key === 'tag3-1' &&
|
||||
rule.Filter.Tag.Value === 'val3'
|
||||
rule.Filter.Tag.Value === 'val3',
|
||||
));
|
||||
assert.deepStrictEqual(expRes2, getRuleIDs(res2));
|
||||
});
|
||||
|
|
|
@ -0,0 +1,56 @@
|
|||
const assert = require('assert');
|
||||
const crypto = require('crypto');
|
||||
|
||||
const { createAggregateETag } =
|
||||
require('../../../lib/s3middleware/processMpuParts');
|
||||
|
||||
describe('createAggregateETag', () => {
|
||||
[{
|
||||
partETags: ['3858f62230ac3c915f300c664312c63f'],
|
||||
aggregateETag: 'c4529dc85643bb0c5a96e46587377777-1',
|
||||
}, {
|
||||
partETags: ['ffc88b4ca90a355f8ddba6b2c3b2af5c',
|
||||
'd067a0fa9dc61a6e7195ca99696b5a89'],
|
||||
aggregateETag: '620e8b191a353bdc9189840bb3904928-2',
|
||||
}, {
|
||||
partETags: ['ffc88b4ca90a355f8ddba6b2c3b2af5c',
|
||||
'd067a0fa9dc61a6e7195ca99696b5a89',
|
||||
'49dcd91231f801159e893fb5c6674985',
|
||||
'1292a1f4afecfeb84e1b200389d1c904',
|
||||
'6b70b0751c98492074a7359f0f70d76d',
|
||||
'5c55c71b3b582f6b700f83bb834f2430',
|
||||
'84562b55618378a7ac5cfcbc7f3b2ceb',
|
||||
'b5693c44bad7a2cf51c82c6a2fe1a4b6',
|
||||
'628b37ac2dee9c123cd2e3e2e486eb27',
|
||||
'4cacc7e3b7933e54422243964db169af',
|
||||
'0add1fb9122cc9df84aee7c4bb86d658',
|
||||
'5887704d69ee209f32c9314c345c8084',
|
||||
'374e87eeee83bed471b78eefc8d7e28e',
|
||||
'4e2af9f5fa8b64b19f78ddfbcfcab148',
|
||||
'8e06231275f3afe7953fc7d57b65723f',
|
||||
'c972158cb957cf48e18b475b908d5d82',
|
||||
'311c2324dd756c9655129de049f69c9b',
|
||||
'0188a9df3e1c4ce18f81e4ba24c672a0',
|
||||
'1a15c4da6038a6626ad16473712eb358',
|
||||
'd13c52938d8e0f01192d16b0de17ea4c'],
|
||||
aggregateETag: 'd3d5a0ab698dd360e755a467f7899e7e-20',
|
||||
}].forEach(test => {
|
||||
it(`should compute aggregate ETag with ${test.partETags.length} parts`,
|
||||
() => {
|
||||
const aggregateETag = createAggregateETag(test.partETags);
|
||||
assert.strictEqual(aggregateETag, test.aggregateETag);
|
||||
});
|
||||
});
|
||||
|
||||
it('should compute aggregate ETag with 10000 parts', () => {
|
||||
const partETags = [];
|
||||
for (let i = 0; i < 10000; ++i) {
|
||||
const md5hash = crypto.createHash('md5');
|
||||
md5hash.update(`part${i}`, 'binary');
|
||||
partETags.push(md5hash.digest('hex'));
|
||||
}
|
||||
const aggregateETag = createAggregateETag(partETags);
|
||||
assert.strictEqual(
|
||||
aggregateETag, 'bff290751e485f06dcc0203c77ed2fd9-10000');
|
||||
});
|
||||
});
|
|
@ -7,6 +7,7 @@ const {
|
|||
_checkEtagNoneMatch,
|
||||
_checkModifiedSince,
|
||||
_checkUnmodifiedSince,
|
||||
checkDateModifiedHeaders,
|
||||
validateConditionalHeaders,
|
||||
} = require('../../../lib/s3middleware/validateConditionalHeaders');
|
||||
|
||||
|
@ -172,6 +173,59 @@ describe('validateConditionalHeaders util function ::', () => {
|
|||
});
|
||||
});
|
||||
|
||||
describe('checkDateModifiedHeaders util function: ', () => {
|
||||
const expectedSuccess = {
|
||||
present: true,
|
||||
error: null,
|
||||
};
|
||||
|
||||
const expectedAbsense = {
|
||||
present: false,
|
||||
error: null,
|
||||
};
|
||||
|
||||
it('should return NotModified error for \'if-modified-since\' header',
|
||||
() => {
|
||||
const header = {};
|
||||
header['if-modified-since'] = afterLastModified;
|
||||
const { modifiedSinceRes, unmodifiedSinceRes } =
|
||||
checkDateModifiedHeaders(header, lastModified);
|
||||
assert.deepStrictEqual(modifiedSinceRes.error, errors.NotModified);
|
||||
assert.deepStrictEqual(unmodifiedSinceRes, expectedAbsense);
|
||||
});
|
||||
|
||||
it('should return PreconditionFailed error for \'if-unmodified-since\' ' +
|
||||
'header', () => {
|
||||
const header = {};
|
||||
header['if-unmodified-since'] = beforeLastModified;
|
||||
const { modifiedSinceRes, unmodifiedSinceRes } =
|
||||
checkDateModifiedHeaders(header, lastModified);
|
||||
assert.deepStrictEqual(unmodifiedSinceRes.error,
|
||||
errors.PreconditionFailed);
|
||||
assert.deepStrictEqual(modifiedSinceRes, expectedAbsense);
|
||||
});
|
||||
|
||||
it('should succeed if \'if-modified-since\' header value is earlier ' +
|
||||
'than last modified', () => {
|
||||
const header = {};
|
||||
header['if-modified-since'] = beforeLastModified;
|
||||
const { modifiedSinceRes, unmodifiedSinceRes } =
|
||||
checkDateModifiedHeaders(header, lastModified);
|
||||
assert.deepStrictEqual(modifiedSinceRes, expectedSuccess);
|
||||
assert.deepStrictEqual(unmodifiedSinceRes, expectedAbsense);
|
||||
});
|
||||
|
||||
it('should succeed if \'if-unmodified-since\' header value is later ' +
|
||||
'than last modified', () => {
|
||||
const header = {};
|
||||
header['if-unmodified-since'] = afterLastModified;
|
||||
const { modifiedSinceRes, unmodifiedSinceRes } =
|
||||
checkDateModifiedHeaders(header, lastModified);
|
||||
assert.deepStrictEqual(unmodifiedSinceRes, expectedSuccess);
|
||||
assert.deepStrictEqual(modifiedSinceRes, expectedAbsense);
|
||||
});
|
||||
});
|
||||
|
||||
describe('_checkEtagMatch function :', () => {
|
||||
const expectedSuccess = {
|
||||
present: true,
|
||||
|
|
|
@ -0,0 +1,36 @@
|
|||
const assert = require('assert');
|
||||
const routesUtils = require('../../../../lib/s3routes/routesUtils.js');
|
||||
|
||||
const bannedStr = 'banned';
|
||||
const prefixBlacklist = [];
|
||||
|
||||
// byte size of 915
|
||||
const keyutf8 = '%EA%9D%8B崰㈌㒈保轖䳷䀰⺩ቆ楪秲ⴝ㿅鼎ꓜ퇬枅염곞召㸾⌙ꪊᆐ庍뉆䌗幐鸆䛃➟녩' +
|
||||
'ˍ뙪臅⠙≼绒벊냂詴 끴鹲萯⇂㭢䈊퉉楝舳㷖족痴䧫㾵ำꎆ꼵껪멷誕㳓腜쒃컹㑻鳃삚舿췈孨੦⮀NJ곓꺼꜈' +
|
||||
'嗼뫘悕錸瑺륒㜓垻ㆩꝿ詀펉ᆙ舑䜾힑藪碙ꀎꂰ췊Ᏻ 㘺幽醛잯ද汧Ꟑꛒⶨ쪸숞헹㭔ꡔᘼ뺓ᡆᑟ䅅퀭耓弧⢠⇙' +
|
||||
'폪ް蛧⃪Ἔ돫ꕢ븥ヲ캂䝄쟐颺ᓾ둾Ұ껗礞ᾰ瘹蒯硳풛瞋襎奺熝妒컚쉴⿂㽝㝳駵鈚䄖戭䌸ᇁ䙪鸮ᐴ稫ⶭ뀟ھ⦿' +
|
||||
'䴳稉ꉕ捈袿놾띐✯伤䃫⸧ꠏ瘌틳藔ˋ㫣敀䔩㭘식↴⧵佶痊牌ꪌ搒꾛æᤈべ쉴挜敗羥誜嘳ֶꫜ걵ࣀ묟ኋ拃秷膤䨸菥' +
|
||||
'䟆곘縧멀煣卲챸⧃⏶혣ਔ뙞밺㊑ک씌촃Ȅ頰ᖅ懚ホῐ꠷㯢먈㝹밷㮇䘖桲阥黾噘烻ᓧ鈠ᴥ徰穆ꘛ蹕綻表鯍裊' +
|
||||
'鮕漨踒ꠍ픸Ä☶莒浏钸목탬툖氭ˠٸ൪㤌ᶟ訧ᜒೳ揪Ⴛ摖㸣᳑걀ꢢ䏹ῖ"';
|
||||
|
||||
describe('routesUtils.isValidObjectKey', () => {
|
||||
it('should return isValid false if object key name starts with a ' +
|
||||
'blacklisted prefix', () => {
|
||||
const result = routesUtils.isValidObjectKey('bannedkey', [bannedStr]);
|
||||
// return { isValid: false, invalidPrefix };
|
||||
assert.strictEqual(result.isValid, false);
|
||||
assert.strictEqual(result.invalidPrefix, bannedStr);
|
||||
});
|
||||
|
||||
it('should return isValid false if object key name exceeds length of 915',
|
||||
() => {
|
||||
const key = 'a'.repeat(916);
|
||||
const result = routesUtils.isValidObjectKey(key, prefixBlacklist);
|
||||
assert.strictEqual(result.isValid, false);
|
||||
});
|
||||
|
||||
it('should return isValid true for a utf8 string of byte size 915', () => {
|
||||
const result = routesUtils.isValidObjectKey(keyutf8, prefixBlacklist);
|
||||
assert.strictEqual(result.isValid, true);
|
||||
});
|
||||
});
|
|
@ -0,0 +1,80 @@
|
|||
const assert = require('assert');
|
||||
const http = require('http');
|
||||
|
||||
const werelogs = require('werelogs');
|
||||
const logger = new werelogs.Logger('test:routesUtils.responseStreamData');
|
||||
|
||||
const { responseStreamData } = require('../../../../lib/s3routes/routesUtils.js');
|
||||
const AwsClient = require('../../../../lib/storage/data/external/AwsClient');
|
||||
const DummyObjectStream = require('../../storage/data/DummyObjectStream');
|
||||
|
||||
werelogs.configure({
|
||||
level: 'debug',
|
||||
dump: 'error',
|
||||
});
|
||||
|
||||
describe('routesUtils.responseStreamData', () => {
|
||||
const awsAgent = new http.Agent({
|
||||
keepAlive: true,
|
||||
});
|
||||
const awsConfig = {
|
||||
s3Params: {
|
||||
endpoint: 'http://localhost:8888',
|
||||
maxRetries: 0,
|
||||
s3ForcePathStyle: true,
|
||||
accessKeyId: 'accessKey',
|
||||
secretAccessKey: 'secretKey',
|
||||
httpOptions: {
|
||||
agent: awsAgent,
|
||||
},
|
||||
},
|
||||
bucketName: 'awsTestBucketName',
|
||||
dataStoreName: 'awsDataStore',
|
||||
serverSideEncryption: false,
|
||||
type: 'aws',
|
||||
};
|
||||
let httpServer;
|
||||
let awsClient;
|
||||
|
||||
beforeAll(done => {
|
||||
awsClient = new AwsClient(awsConfig);
|
||||
httpServer = http.createServer((req, res) => {
|
||||
const objStream = new DummyObjectStream(0, 10000000);
|
||||
res.setHeader('content-length', 10000000);
|
||||
objStream.pipe(res);
|
||||
}).listen(8888);
|
||||
httpServer.on('listening', done);
|
||||
httpServer.on('error', err => assert.ifError(err));
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
httpServer.close();
|
||||
});
|
||||
|
||||
it('should not leak socket if client closes the connection before ' +
|
||||
'data backend starts streaming', done => {
|
||||
responseStreamData(undefined, {}, {}, [{
|
||||
key: 'foo',
|
||||
size: 10000000,
|
||||
}], {
|
||||
client: awsClient,
|
||||
implName: 'impl',
|
||||
config: {},
|
||||
locStorageCheckFn: () => {},
|
||||
}, {
|
||||
setHeader: () => {},
|
||||
writeHead: () => {},
|
||||
on: () => {},
|
||||
once: () => {},
|
||||
emit: () => {},
|
||||
write: () => {},
|
||||
end: () => setTimeout(() => {
|
||||
const nOpenSockets = Object.keys(awsAgent.sockets).length;
|
||||
assert.strictEqual(nOpenSockets, 0);
|
||||
done();
|
||||
}, 1000),
|
||||
// fake a connection close from the S3 client by setting the "isclosed" flag
|
||||
isclosed: true,
|
||||
}, undefined, logger.newRequestLogger());
|
||||
});
|
||||
});
|
|
@ -28,9 +28,7 @@ describe('test generating versionIds', () => {
|
|||
|
||||
// nodejs 10 no longer returns error for non-hex string versionIds
|
||||
it.skip('should return error decoding non-hex string versionIds', () => {
|
||||
const encoded = vids.map(vid => VID.hexEncode(vid));
|
||||
const decoded = encoded.map(vid => VID.hexDecode(`${vid}foo`));
|
||||
decoded.forEach(result => assert(result instanceof Error));
|
||||
assert(VID.hexDecode('foo') instanceof Error);
|
||||
});
|
||||
|
||||
it('should encode and decode versionIds', () => {
|
||||
|
|
|
@ -0,0 +1,538 @@
|
|||
const async = require('async');
|
||||
const crypto = require('crypto');
|
||||
|
||||
const errors = require('../../lib/errors');
|
||||
const kms = [];
|
||||
let count = 1;
|
||||
|
||||
/** Class exposing common createDataKey,
|
||||
createDecipher and createCipher functions. */
|
||||
class Common {
|
||||
static _algorithm() {
|
||||
return 'aes-256-ctr';
|
||||
}
|
||||
|
||||
/* AES-256 Key */
|
||||
static _keySize() {
|
||||
return 32;
|
||||
}
|
||||
|
||||
/* IV is 128bit for AES-256-CTR */
|
||||
static _IVSize() {
|
||||
return 16;
|
||||
}
|
||||
|
||||
/* block size is 128bit for AES-256-CTR */
|
||||
static _aesBlockSize() {
|
||||
return 16;
|
||||
}
|
||||
|
||||
/**
|
||||
Creates data key to encrypt and decrypt the actual data (which data
|
||||
key is ciphered and deciphered by the appliance). The encrypted data key
|
||||
is stored in the object's metadata. We also use this function to create
|
||||
the bucket key for the file and in memory implementations.
|
||||
@return {buffer} - random key
|
||||
*/
|
||||
static createDataKey() {
|
||||
return Buffer.from(crypto.randomBytes(this._keySize()));
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {buffer} derivedIV - the stringified bucket
|
||||
* @param {number} counter - quotient of the offset and blocksize
|
||||
* @return {buffer} - the incremented IV
|
||||
*/
|
||||
static _incrementIV(derivedIV, counter) {
|
||||
const newIV = derivedIV;
|
||||
const len = derivedIV.length;
|
||||
let i = len - 1;
|
||||
let ctr = counter;
|
||||
while (ctr !== 0) {
|
||||
const mod = (ctr + newIV[i]) % 256;
|
||||
ctr = Math.floor((ctr + newIV[i]) / 256);
|
||||
newIV[i] = mod;
|
||||
i -= 1;
|
||||
if (i < 0) {
|
||||
i = len - 1;
|
||||
}
|
||||
}
|
||||
return newIV;
|
||||
}
|
||||
|
||||
/**
|
||||
* Derive key to use in cipher
|
||||
* @param {number} cryptoScheme - cryptoScheme being used
|
||||
* @param {buffer} dataKey - the unencrypted key (either from the
|
||||
* appliance on a get or originally generated by kms in the case of a put)
|
||||
* @param {object} log - logger object
|
||||
* @param {function} cb - cb from createDecipher
|
||||
* @returns {undefined}
|
||||
* @callback called with (err, derivedKey, derivedIV)
|
||||
*/
|
||||
static _deriveKey(cryptoScheme, dataKey, log, cb) {
|
||||
if (cryptoScheme <= 1) {
|
||||
/* we are not storing hashed human password.
|
||||
* It's a random key, so 1 iteration and
|
||||
* a fixed salt is enough for our usecase.
|
||||
* don't change the salt, the iteration number
|
||||
* or the digest algorithm (sha1 here) without
|
||||
* bumping the cryptoScheme number saved in the object
|
||||
* metadata along with the dataKey.
|
||||
*/
|
||||
const salt = Buffer.from('ItsTasty', 'utf8');
|
||||
const iterations = 1;
|
||||
return crypto.pbkdf2(
|
||||
dataKey, salt, iterations,
|
||||
this._keySize(), 'sha1', (err, derivedKey) => {
|
||||
if (err) {
|
||||
log.error('pbkdf2 function failed on key derivation',
|
||||
{ error: err });
|
||||
cb(errors.InternalError);
|
||||
return;
|
||||
}
|
||||
crypto.pbkdf2(
|
||||
derivedKey, salt, iterations,
|
||||
this._IVSize(), 'sha1', (err, derivedIV) => {
|
||||
if (err) {
|
||||
log.error(
|
||||
'pbkdf2 function failed on IV derivation',
|
||||
{ error: err });
|
||||
return cb(errors.InternalError);
|
||||
}
|
||||
// derivedKey is the actual data encryption or
|
||||
// decryption key used in the AES ctr cipher
|
||||
return cb(null, derivedKey, derivedIV);
|
||||
});
|
||||
});
|
||||
}
|
||||
log.error('Unknown cryptographic scheme', { cryptoScheme });
|
||||
return cb(errors.InternalError);
|
||||
}
|
||||
|
||||
/**
|
||||
* createDecipher
|
||||
* @param {number} cryptoScheme - cryptoScheme being used
|
||||
* @param {buffer} dataKey - the unencrypted key (either from the
|
||||
* appliance on a get or originally generated by kms in the case of a put)
|
||||
* @param {number} offset - offset
|
||||
* @param {object} log - logger object
|
||||
* @param {function} cb - cb from external call
|
||||
* @returns {undefined}
|
||||
* @callback called with (err, decipher: ReadWritable.stream)
|
||||
*/
|
||||
static createDecipher(cryptoScheme, dataKey, offset, log, cb) {
|
||||
this._deriveKey(
|
||||
cryptoScheme, dataKey, log,
|
||||
(err, derivedKey, derivedIV) => {
|
||||
if (err) {
|
||||
log.debug('key derivation failed', { error: err });
|
||||
return cb(err);
|
||||
}
|
||||
const aesBlockSize = this._aesBlockSize();
|
||||
const blocks = Math.floor(offset / aesBlockSize);
|
||||
const toSkip = offset % aesBlockSize;
|
||||
const iv = this._incrementIV(derivedIV, blocks);
|
||||
const cipher = crypto.createDecipheriv(this._algorithm(),
|
||||
derivedKey, iv);
|
||||
if (toSkip) {
|
||||
/* Above, we advanced to the latest boundary not
|
||||
greater than the offset amount. Here we advance by
|
||||
the toSkip amount if necessary. */
|
||||
const dummyBuffer = Buffer.alloc(toSkip);
|
||||
cipher.write(dummyBuffer);
|
||||
cipher.read();
|
||||
}
|
||||
return cb(null, cipher);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* createCipher (currently same as createDecipher function above. this
|
||||
* wrapper is included to preserve flexibility)
|
||||
* @param {number} cryptoScheme - cryptoScheme being used
|
||||
* @param {buffer} dataKey - the unencrypted key (either from the
|
||||
* appliance on a get or originally generated by kms in the case of a put)
|
||||
* @param {number} offset - offset
|
||||
* @param {object} log - logger object
|
||||
* @param {function} cb - cb from external call
|
||||
* @returns {undefined}
|
||||
* @callback called with (err, cipher: ReadWritable.stream)
|
||||
*/
|
||||
static createCipher(cryptoScheme, dataKey, offset, log, cb) {
|
||||
/* aes-256-ctr decipher is both ways */
|
||||
this.createDecipher(cryptoScheme, dataKey, offset, log, cb);
|
||||
}
|
||||
}
|
||||
|
||||
const backend = {
|
||||
/*
|
||||
* Target implementation will be async. let's mimic it
|
||||
*/
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} bucketName - bucket name
|
||||
* @param {object} log - logger object
|
||||
* @param {function} cb - callback
|
||||
* @returns {undefined}
|
||||
* @callback called with (err, masterKeyId: string)
|
||||
*/
|
||||
createBucketKey: function createBucketKeyMem(bucketName, log, cb) {
|
||||
process.nextTick(() => {
|
||||
// Using createDataKey here for purposes of createBucketKeyMem
|
||||
// so that we do not need a separate function.
|
||||
kms[count] = Common.createDataKey();
|
||||
cb(null, (count++).toString());
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} bucketKeyId - the Id of the bucket key
|
||||
* @param {object} log - logger object
|
||||
* @param {function} cb - callback
|
||||
* @returns {undefined}
|
||||
* @callback called with (err)
|
||||
*/
|
||||
destroyBucketKey: function destroyBucketKeyMem(bucketKeyId, log, cb) {
|
||||
process.nextTick(() => {
|
||||
kms[bucketKeyId] = undefined;
|
||||
cb(null);
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {number} cryptoScheme - crypto scheme version number
|
||||
* @param {string} masterKeyId - key to retrieve master key
|
||||
* @param {buffer} plainTextDataKey - data key
|
||||
* @param {object} log - logger object
|
||||
* @param {function} cb - callback
|
||||
* @returns {undefined}
|
||||
* @callback called with (err, cipheredDataKey: Buffer)
|
||||
*/
|
||||
cipherDataKey: function cipherDataKeyMem(cryptoScheme,
|
||||
masterKeyId,
|
||||
plainTextDataKey,
|
||||
log,
|
||||
cb) {
|
||||
process.nextTick(() => {
|
||||
Common.createCipher(
|
||||
cryptoScheme, kms[masterKeyId], 0, log,
|
||||
(err, cipher) => {
|
||||
if (err) {
|
||||
cb(err);
|
||||
return;
|
||||
}
|
||||
let cipheredDataKey =
|
||||
cipher.update(plainTextDataKey);
|
||||
// call final() to ensure that any bytes remaining in
|
||||
// the output of the stream are captured
|
||||
const final = cipher.final();
|
||||
if (final.length !== 0) {
|
||||
cipheredDataKey =
|
||||
Buffer.concat([cipheredDataKey,
|
||||
final]);
|
||||
}
|
||||
cb(null, cipheredDataKey);
|
||||
});
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {number} cryptoScheme - crypto scheme version number
|
||||
* @param {string} masterKeyId - key to retrieve master key
|
||||
* @param {buffer} cipheredDataKey - data key
|
||||
* @param {object} log - logger object
|
||||
* @param {function} cb - callback
|
||||
* @returns {undefined}
|
||||
* @callback called with (err, plainTextDataKey: Buffer)
|
||||
*/
|
||||
decipherDataKey: function decipherDataKeyMem(cryptoScheme,
|
||||
masterKeyId,
|
||||
cipheredDataKey,
|
||||
log,
|
||||
cb) {
|
||||
process.nextTick(() => {
|
||||
Common.createDecipher(
|
||||
cryptoScheme, kms[masterKeyId], 0, log,
|
||||
(err, decipher) => {
|
||||
if (err) {
|
||||
cb(err);
|
||||
return;
|
||||
}
|
||||
let plainTextDataKey =
|
||||
decipher.update(cipheredDataKey);
|
||||
const final = decipher.final();
|
||||
if (final.length !== 0) {
|
||||
plainTextDataKey =
|
||||
Buffer.concat([plainTextDataKey,
|
||||
final]);
|
||||
}
|
||||
cb(null, plainTextDataKey);
|
||||
});
|
||||
});
|
||||
},
|
||||
|
||||
};
|
||||
|
||||
const client = backend;
|
||||
const implName = 'mem';
|
||||
|
||||
class KMS {
|
||||
/**
|
||||
*
|
||||
* @param {string} bucketName - bucket name
|
||||
* @param {object} log - logger object
|
||||
* @param {function} cb - callback
|
||||
* @returns {undefined}
|
||||
* @callback called with (err, masterKeyId: string)
|
||||
*/
|
||||
static createBucketKey(bucketName, log, cb) {
|
||||
log.debug('creating a new bucket key');
|
||||
client.createBucketKey(bucketName, log, (err, masterKeyId) => {
|
||||
if (err) {
|
||||
log.debug('error from kms', { implName, error: err });
|
||||
return cb(err);
|
||||
}
|
||||
log.trace('bucket key created in kms');
|
||||
return cb(null, masterKeyId);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} bucketName - bucket name
|
||||
* @param {object} headers - request headers
|
||||
* @param {object} log - logger object
|
||||
* @param {function} cb - callback
|
||||
* @returns {undefined}
|
||||
* @callback called with (err, serverSideEncryptionInfo: object)
|
||||
*/
|
||||
static bucketLevelEncryption(bucketName, headers, log, cb) {
|
||||
const sseAlgorithm = headers['x-amz-scal-server-side-encryption'];
|
||||
const sseMasterKeyId =
|
||||
headers['x-amz-scal-server-side-encryption-aws-kms-key-id'];
|
||||
/*
|
||||
The purpose of bucket level encryption is so that the client does not
|
||||
have to send appropriate headers to trigger encryption on each object
|
||||
put in an "encrypted bucket". Customer provided keys are not
|
||||
feasible in this system because we do not want to store this key
|
||||
in the bucket metadata.
|
||||
*/
|
||||
if (sseAlgorithm === 'AES256' ||
|
||||
(sseAlgorithm === 'aws:kms' && sseMasterKeyId === undefined)) {
|
||||
this.createBucketKey(bucketName, log, (err, masterKeyId) => {
|
||||
if (err) {
|
||||
cb(err);
|
||||
return;
|
||||
}
|
||||
const serverSideEncryptionInfo = {
|
||||
cryptoScheme: 1,
|
||||
algorithm: sseAlgorithm,
|
||||
masterKeyId,
|
||||
mandatory: true,
|
||||
};
|
||||
cb(null, serverSideEncryptionInfo);
|
||||
});
|
||||
} else if (sseAlgorithm === 'aws:kms') {
|
||||
const serverSideEncryptionInfo = {
|
||||
cryptoScheme: 1,
|
||||
algorithm: sseAlgorithm,
|
||||
masterKeyId: sseMasterKeyId,
|
||||
mandatory: true,
|
||||
};
|
||||
cb(null, serverSideEncryptionInfo);
|
||||
} else {
|
||||
/*
|
||||
* no encryption
|
||||
*/
|
||||
cb(null, null);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} bucketKeyId - the Id of the bucket key
|
||||
* @param {object} log - logger object
|
||||
* @param {function} cb - callback
|
||||
* @returns {undefined}
|
||||
* @callback called with (err)
|
||||
*/
|
||||
static destroyBucketKey(bucketKeyId, log, cb) {
|
||||
log.debug('deleting bucket key', { bucketKeyId });
|
||||
client.destroyBucketKey(bucketKeyId, log, err => {
|
||||
if (err) {
|
||||
log.debug('error from kms', { implName, error: err });
|
||||
return cb(err);
|
||||
}
|
||||
log.trace('bucket key destroyed in kms');
|
||||
return cb(null);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {object} log - logger object
|
||||
* @returns {buffer} newKey - a data key
|
||||
*/
|
||||
static createDataKey(log) {
|
||||
log.debug('creating a new data key');
|
||||
const newKey = Common.createDataKey();
|
||||
log.trace('data key created by the kms');
|
||||
return newKey;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* createCipherBundle
|
||||
* @param {object} serverSideEncryptionInfo - info for encryption
|
||||
* @param {number} serverSideEncryptionInfo.cryptoScheme -
|
||||
* cryptoScheme used
|
||||
* @param {string} serverSideEncryptionInfo.algorithm -
|
||||
* algorithm to use
|
||||
* @param {string} serverSideEncryptionInfo.masterKeyId -
|
||||
* key to get master key
|
||||
* @param {boolean} serverSideEncryptionInfo.mandatory -
|
||||
* true for mandatory encryption
|
||||
* @param {object} log - logger object
|
||||
* @param {function} cb - cb from external call
|
||||
* @returns {undefined}
|
||||
* @callback called with (err, cipherBundle)
|
||||
*/
|
||||
static createCipherBundle(serverSideEncryptionInfo,
|
||||
log, cb) {
|
||||
const dataKey = this.createDataKey(log);
|
||||
const cipherBundle = {
|
||||
algorithm: serverSideEncryptionInfo.algorithm,
|
||||
masterKeyId: serverSideEncryptionInfo.masterKeyId,
|
||||
cryptoScheme: 1,
|
||||
cipheredDataKey: null,
|
||||
cipher: null,
|
||||
};
|
||||
|
||||
async.waterfall([
|
||||
function cipherDataKey(next) {
|
||||
log.debug('ciphering a data key');
|
||||
return client.cipherDataKey(cipherBundle.cryptoScheme,
|
||||
serverSideEncryptionInfo.masterKeyId,
|
||||
dataKey, log, (err, cipheredDataKey) => {
|
||||
if (err) {
|
||||
log.debug('error from kms',
|
||||
{ implName, error: err });
|
||||
return next(err);
|
||||
}
|
||||
log.trace('data key ciphered by the kms');
|
||||
return next(null, cipheredDataKey);
|
||||
});
|
||||
},
|
||||
function createCipher(cipheredDataKey, next) {
|
||||
log.debug('creating a cipher');
|
||||
cipherBundle.cipheredDataKey =
|
||||
cipheredDataKey.toString('base64');
|
||||
return Common.createCipher(cipherBundle.cryptoScheme,
|
||||
dataKey, 0, log, (err, cipher) => {
|
||||
dataKey.fill(0);
|
||||
if (err) {
|
||||
log.debug('error from kms',
|
||||
{ implName, error: err });
|
||||
return next(err);
|
||||
}
|
||||
log.trace('cipher created by the kms');
|
||||
return next(null, cipher);
|
||||
});
|
||||
},
|
||||
function finishCipherBundle(cipher, next) {
|
||||
cipherBundle.cipher = cipher;
|
||||
return next(null, cipherBundle);
|
||||
},
|
||||
], (err, cipherBundle) => {
|
||||
if (err) {
|
||||
log.error('error processing cipher bundle',
|
||||
{ implName, error: err });
|
||||
}
|
||||
return cb(err, cipherBundle);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* createDecipherBundle
|
||||
* @param {object} serverSideEncryptionInfo - info for decryption
|
||||
* @param {number} serverSideEncryptionInfo.cryptoScheme -
|
||||
* cryptoScheme used
|
||||
* @param {string} serverSideEncryptionInfo.algorithm -
|
||||
* algorithm to use
|
||||
* @param {string} serverSideEncryptionInfo.masterKeyId -
|
||||
* key to get master key
|
||||
* @param {boolean} serverSideEncryptionInfo.mandatory -
|
||||
* true for mandatory encryption
|
||||
* @param {buffer} serverSideEncryptionInfo.cipheredDataKey -
|
||||
* ciphered data key
|
||||
* @param {number} offset - offset for decryption
|
||||
* @param {object} log - logger object
|
||||
* @param {function} cb - cb from external call
|
||||
* @returns {undefined}
|
||||
* @callback called with (err, decipherBundle)
|
||||
*/
|
||||
static createDecipherBundle(serverSideEncryptionInfo, offset,
|
||||
log, cb) {
|
||||
if (!serverSideEncryptionInfo.masterKeyId ||
|
||||
!serverSideEncryptionInfo.cipheredDataKey ||
|
||||
!serverSideEncryptionInfo.cryptoScheme) {
|
||||
log.error('Invalid cryptographic information', { implName });
|
||||
return cb(errors.InternalError);
|
||||
}
|
||||
const decipherBundle = {
|
||||
cryptoScheme: serverSideEncryptionInfo.cryptoScheme,
|
||||
decipher: null,
|
||||
};
|
||||
return async.waterfall([
|
||||
function decipherDataKey(next) {
|
||||
return client.decipherDataKey(
|
||||
decipherBundle.cryptoScheme,
|
||||
serverSideEncryptionInfo.masterKeyId,
|
||||
serverSideEncryptionInfo.cipheredDataKey,
|
||||
log, (err, plainTextDataKey) => {
|
||||
log.debug('deciphering a data key');
|
||||
if (err) {
|
||||
log.debug('error from kms',
|
||||
{ implName, error: err });
|
||||
return next(err);
|
||||
}
|
||||
log.trace('data key deciphered by the kms');
|
||||
return next(null, plainTextDataKey);
|
||||
});
|
||||
},
|
||||
function createDecipher(plainTextDataKey, next) {
|
||||
log.debug('creating a decipher');
|
||||
return Common.createDecipher(decipherBundle.cryptoScheme,
|
||||
plainTextDataKey, offset, log, (err, decipher) => {
|
||||
plainTextDataKey.fill(0);
|
||||
if (err) {
|
||||
log.debug('error from kms',
|
||||
{ implName, error: err });
|
||||
return next(err);
|
||||
}
|
||||
log.trace('decipher created by the kms');
|
||||
return next(null, decipher);
|
||||
});
|
||||
},
|
||||
function finishDecipherBundle(decipher, next) {
|
||||
decipherBundle.decipher = decipher;
|
||||
return next(null, decipherBundle);
|
||||
},
|
||||
], (err, decipherBundle) => {
|
||||
if (err) {
|
||||
log.error('error processing decipher bundle',
|
||||
{ implName, error: err });
|
||||
return cb(err);
|
||||
}
|
||||
return cb(err, decipherBundle);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = KMS;
|
|
@ -177,7 +177,7 @@ class LoopbackServerChannel extends EchoChannel {
|
|||
serverExtensions.map(extension =>
|
||||
this.KMIP.TextString(
|
||||
extension.name,
|
||||
extension.value)
|
||||
extension.value),
|
||||
)));
|
||||
}
|
||||
if (queryFunctions.includes('Query Extension Map')) {
|
||||
|
|
|
@ -50,7 +50,6 @@ class EchoChannel extends EventEmitter {
|
|||
this.clogged = true;
|
||||
return this;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
class MirrorChannel extends EchoChannel {
|
||||
|
|
|
@ -0,0 +1,185 @@
|
|||
'use strict'; // eslint-disable-line strict
|
||||
|
||||
|
||||
class DummyProxyResponse {
|
||||
/**
|
||||
* Create a new instance of this dummy class
|
||||
*
|
||||
* This dummy class implements the minimum feature set
|
||||
* of the class http.OutgoingMessage suitable for the
|
||||
* arsenal.storage.metadata.proxy.BucketdRoutes test
|
||||
* without using an actuall http server.
|
||||
*
|
||||
* @param {function} doneCB - function called once the response is
|
||||
* ready to be consummed. (err, response, body)
|
||||
*/
|
||||
constructor(doneCB) {
|
||||
this.headers = {};
|
||||
this.body = null;
|
||||
this.endCalled = false;
|
||||
this.responseHead = null;
|
||||
this.doneCB = doneCB;
|
||||
}
|
||||
writeHead(statusCode, statusMessage, header) {
|
||||
this.responseHead = {
|
||||
statusCode,
|
||||
statusMessage,
|
||||
header,
|
||||
};
|
||||
}
|
||||
write(data) {
|
||||
this.body = data;
|
||||
}
|
||||
end(cb) {
|
||||
if (this.endCalled) {
|
||||
return;
|
||||
}
|
||||
this.endCalled = true;
|
||||
process.nextTick(() => {
|
||||
cb(null);
|
||||
this.doneCB(null, this, JSON.parse(this.body));
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
class DummyProxyRequest {
|
||||
/**
|
||||
* Create a new instance of this dummy class
|
||||
*
|
||||
* This dummy class implements the minimum feature set
|
||||
* of the class http.IncomingMessage suitable for the
|
||||
* arsenal.storage.metadata.proxy.BucketdRoutes test
|
||||
* without using an actuall http server.
|
||||
*
|
||||
* @param {object} params - parameter set describing the intended request
|
||||
* @param {string} params.method - http method to fake
|
||||
* @param {string} params.url - url to fake
|
||||
* @param {string} params.body - request body to fake
|
||||
* @param {boolean} params.json - if set, assume the body to be a JSON
|
||||
* value to be serialized
|
||||
* @param {object} params.headers - request headers to fake
|
||||
*/
|
||||
constructor(params) {
|
||||
this.method = params.method;
|
||||
this.url = params.url;
|
||||
this.json = params.json;
|
||||
this.body = Buffer.from(
|
||||
this.json ? JSON.stringify(params.body) : (params.body || ''));
|
||||
this.headers = params.headers;
|
||||
this.socket = {
|
||||
remoteAddress: '127.0.0.1',
|
||||
remotePort: 32769,
|
||||
};
|
||||
this.dataConsummed = false;
|
||||
this.endCB = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* bind a callback to a particular event on the request processing
|
||||
*
|
||||
* @param {string} event - one of 'data', 'end' or 'error'
|
||||
* @param {function} callback - a function suitable for the associated event
|
||||
* @returns {object} this
|
||||
*/
|
||||
on(event, callback) {
|
||||
switch (event) {
|
||||
case 'data':
|
||||
process.nextTick(() => {
|
||||
callback(this.body);
|
||||
this.dataConsummed = true;
|
||||
if (this.endCB) {
|
||||
this.endCB();
|
||||
}
|
||||
});
|
||||
break;
|
||||
case 'end':
|
||||
if (!this.dataConsummed) {
|
||||
this.endCB = callback;
|
||||
} else {
|
||||
process.nextTick(() => {
|
||||
callback();
|
||||
});
|
||||
}
|
||||
break;
|
||||
case 'error':
|
||||
// never happen with this mockup class
|
||||
break;
|
||||
default:
|
||||
process.nextTick(() => callback(new Error(
|
||||
`Unsupported DummyProxyRequest.on event '${event}'`)));
|
||||
}
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
class RequestDispatcher {
|
||||
/**
|
||||
* Construct a new RequestDispatcher object.
|
||||
*
|
||||
* This class connects the provided Routes class to a dummy interface
|
||||
* that enables tests to perform requests without using an actual http
|
||||
* server.
|
||||
*
|
||||
* @param {object} routes - an instance of a Routes dispatcher class
|
||||
*/
|
||||
constructor(routes) {
|
||||
this.routes = routes;
|
||||
}
|
||||
|
||||
/**
|
||||
* fake a POST request on the associated Routes dispatcher
|
||||
*
|
||||
* @param {string} path - the path of the object to be posted
|
||||
* @param {object} objectMD - the metadata to post for this object
|
||||
* @param {function} callback - called once the request has been processed
|
||||
* with these parameters (err)
|
||||
* @returns {undefined}
|
||||
*/
|
||||
post(path, objectMD, callback) {
|
||||
this.routes.dispatch(new DummyProxyRequest({
|
||||
method: 'POST',
|
||||
url: path,
|
||||
json: true,
|
||||
body: objectMD,
|
||||
headers: {},
|
||||
}), new DummyProxyResponse(callback));
|
||||
}
|
||||
|
||||
/**
|
||||
* fake a GET request on the associated Routes dispatcher
|
||||
*
|
||||
* @param {string} path - the path of the object to be retrieved
|
||||
* @param {function} callback - called once the request has been processed
|
||||
* with these parameters (err, response, body)
|
||||
* @returns {undefined}
|
||||
*/
|
||||
get(path, callback) {
|
||||
this.routes.dispatch(new DummyProxyRequest({
|
||||
method: 'GET',
|
||||
url: path,
|
||||
json: true,
|
||||
body: '',
|
||||
headers: {},
|
||||
}), new DummyProxyResponse(callback));
|
||||
}
|
||||
|
||||
/**
|
||||
* fake a DELETE request on the associated Routes dispatcher
|
||||
*
|
||||
* @param {string} path - the path of the object to be deleted
|
||||
* @param {function} callback - called once the request has been processed
|
||||
* with these parameters (err)
|
||||
* @returns {undefined}
|
||||
*/
|
||||
delete(path, callback) {
|
||||
this.routes.dispatch(new DummyProxyRequest({
|
||||
method: 'DELETE',
|
||||
url: path,
|
||||
json: true,
|
||||
body: '',
|
||||
headers: {},
|
||||
}), new DummyProxyResponse(callback));
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { RequestDispatcher };
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue