Compare commits

..

450 Commits

Author SHA1 Message Date
Vitaliy Filippov facc276e8b move away require libv2/config from libv2/redis 2024-08-13 02:17:04 +03:00
Vitaliy Filippov c8e3999fb3 Require defaults.json instead of fs.readFileSync 2024-08-13 01:14:02 +03:00
Vitaliy Filippov 9fa777cdba Split require utils to help webpack remove libV2 2024-08-13 01:10:22 +03:00
Vitaliy Filippov e6d48f3b47 Make vault client optional / support receiving its instance from outside 2024-07-23 19:22:54 +03:00
Vitaliy Filippov 0050625f81 Change git dependency URLs 2024-07-21 18:12:40 +03:00
Vitaliy Filippov 0a66c57a0a Remove yarn lock 2024-07-21 17:34:07 +03:00
Vitaliy Filippov 6711c4241a Forget LFS object 2024-07-21 17:34:07 +03:00
Jonathan Gramain 3800e4b185 Merge remote-tracking branch 'origin/w/7.70/bugfix/UTAPI-105-useListOfSentinelNodes' into w/8.1/bugfix/UTAPI-105-useListOfSentinelNodes 2024-06-27 10:09:15 -07:00
Jonathan Gramain 20667ff741 Merge remote-tracking branch 'origin/bugfix/UTAPI-105-useListOfSentinelNodes' into w/7.70/bugfix/UTAPI-105-useListOfSentinelNodes 2024-06-27 10:06:43 -07:00
Jonathan Gramain 88d18f3eb6 UTAPI-105 bump version 2024-06-25 15:10:02 -07:00
Jonathan Gramain 426dfd0860 bf: UTAPI-105 UtapiReindex: use list of redis sentinels
Use a list of Redis sentinels that are running on stateful nodes only,
instead of localhost.

Previously, a stateless-only node wouldn't have a local sentinel node
running, causing UtapiReindex to fail.

Added a failover mechanism in case of connection error on the current
sentinel, to try each other one in turn.
2024-06-25 15:10:02 -07:00
bert-e ac4fd2c5f5 Merge branch 'improvement/UTAPI-103/support_reindex_by_account' into tmp/octopus/w/8.1/improvement/UTAPI-103/support_reindex_by_account 2024-06-12 18:28:11 +00:00
Taylor McKinnon 69b94c57aa impr(UTAPI-103): Remove undeclared variable from log message 2024-06-12 11:27:16 -07:00
Taylor McKinnon f5262b7875 impr(UTAPI-103): Support reindexing by acccount 2024-06-12 11:27:16 -07:00
Taylor McKinnon ee1c0fcd1b impr(UTAPI-103): Support multiple specified buckets and prep for account support 2024-06-12 11:27:16 -07:00
Taylor McKinnon 5efb70dc63 impr(UTAPI-103): Add --dry-run option 2024-06-12 11:27:16 -07:00
Taylor McKinnon 210ba2fd82 impr(UTAPI-103): Add BucketDClient.get_bucket_md() 2024-06-06 12:10:40 -07:00
Taylor McKinnon 34af848b93 impr(UTAPI-103): Add BucketNotFound Exeception for _get_bucket_attributes 2024-06-06 12:08:40 -07:00
Taylor McKinnon 402fd406e3 impr(UTAPI-103): Add small LRU cache to BucketDClient._get_bucket_attributes 2024-06-06 12:06:46 -07:00
bert-e f9ae694c0c Merge branch 'w/7.70/bugfix/UTAPI-101/fix_release_workflow' into tmp/octopus/w/8.1/bugfix/UTAPI-101/fix_release_workflow 2024-05-16 17:16:03 +00:00
bert-e 960d990e89 Merge branch 'bugfix/UTAPI-101/fix_release_workflow' into tmp/octopus/w/7.70/bugfix/UTAPI-101/fix_release_workflow 2024-05-16 17:16:03 +00:00
Taylor McKinnon 7fde3488b9 impr(UTAPI-101): Remove secrets: inherit from release workflow 2024-05-15 10:32:38 -07:00
Taylor McKinnon 79c2ff0c72 Merge remote-tracking branch 'origin/w/7.70/bugfix/UTAPI-100/utapi_python_version_fix' into w/8.1/bugfix/UTAPI-100/utapi_python_version_fix 2024-05-07 10:56:37 -07:00
Taylor McKinnon ae904b89bf Merge remote-tracking branch 'origin/bugfix/UTAPI-100/utapi_python_version_fix' into w/7.70/bugfix/UTAPI-100/utapi_python_version_fix 2024-05-07 10:55:23 -07:00
Taylor McKinnon 60db367054 bf(UTAPI-100): Bump version 2024-05-06 11:20:17 -07:00
Taylor McKinnon c9ba521b6d bf(UTAPI-100): Remove use of 3.7+ only parameter 2024-05-06 11:16:58 -07:00
Francois Ferrand ce89418788
Update Release.md for ghcr migration
Issue: UTAPI-99
2024-04-18 15:55:13 +02:00
Francois Ferrand 5faaf493a5
Merge branch 'w/7.70/improvement/VAULT-567' into w/8.1/improvement/VAULT-567 2024-04-18 15:54:58 +02:00
Francois Ferrand da143dba67
Merge branch 'w/7.10/improvement/VAULT-567' into w/7.70/improvement/VAULT-567 2024-04-18 15:54:35 +02:00
Francois Ferrand 6e0ec16f00
Fix caching of python packages
Issue: UTAPI-99
2024-04-18 15:54:04 +02:00
Francois Ferrand 4449f44c9a
Bump github actions
- docker-build@v2
- checkout@v4
- setup-buildx@v3
- setup-node@v4
- setup-python@v5
- login@v3
- build-push@v5
- gh-release@v2
- ssh-to-runner@1.7.0

Issue: UTAPI-99
2024-04-18 15:53:26 +02:00
Francois Ferrand c4e786d6cd
Migrate to ghcr
Issue: UTAPI-99
2024-04-18 15:53:20 +02:00
Francois Ferrand bdb483e6b4
Merge branch 'improvement/UTAPI-99' into w/7.10/improvement/VAULT-567 2024-04-18 15:52:47 +02:00
Francois Ferrand 20916c6f0e
Fix caching of python packages
Issue: UTAPI-99
2024-04-18 15:47:05 +02:00
Francois Ferrand 5976018d0e
Bump github actions
- checkout@v4
- setup-qemu@v3
- setup-buildx@v3
- setup-node@v4
- setup-python@v5
- login@v3
- build-push@v5
- gh-release@v2

Issue: UTAPI-99
2024-04-17 15:02:44 +02:00
Francois Ferrand 9e1f14ed17
Migrate to ghcr
Issue: UTAPI-99
2024-04-17 14:42:58 +02:00
bert-e 34699432ee Merge branch 'w/7.70/improvement/UTAPI-98/bump-redis' into tmp/octopus/w/8.1/improvement/UTAPI-98/bump-redis 2024-01-22 15:39:38 +00:00
bert-e 438a25982d Merge branch 'improvement/UTAPI-98/bump-redis' into tmp/octopus/w/7.70/improvement/UTAPI-98/bump-redis 2024-01-22 15:39:37 +00:00
Nicolas Humbert 8804e9ff69 UTAPI-98 Bump Redis version 2024-01-22 16:36:01 +01:00
Taylor McKinnon 27e1c44829 Merge remote-tracking branch 'origin/w/7.70/improvement/UTAPI-97/reindex_only_latest_for_olock_buckets_option' into w/8.1/improvement/UTAPI-97/reindex_only_latest_for_olock_buckets_option 2023-12-11 09:38:41 -08:00
Taylor McKinnon e8882a28cc Merge remote-tracking branch 'origin/improvement/UTAPI-97/reindex_only_latest_for_olock_buckets_option' into w/7.70/improvement/UTAPI-97/reindex_only_latest_for_olock_buckets_option 2023-12-11 09:37:25 -08:00
Taylor McKinnon b93998118c impr(UTAPI-97): Bump version 2023-12-11 09:25:01 -08:00
Taylor McKinnon 9195835f70 impr(UTAPI-97): Add config option to reindex only latest version in object locked buckets 2023-12-11 09:25:01 -08:00
bert-e 8dfb06cdbc Merge branch 'w/7.70/improvement/UTAPI-96/switch_to_scality_ssh_action' into tmp/octopus/w/8.1/improvement/UTAPI-96/switch_to_scality_ssh_action 2023-10-09 16:32:55 +00:00
bert-e 934136635e Merge branch 'improvement/UTAPI-96/switch_to_scality_ssh_action' into tmp/octopus/w/7.70/improvement/UTAPI-96/switch_to_scality_ssh_action 2023-10-09 16:32:55 +00:00
Taylor McKinnon 9f36624799 impr(UTAPI-96): Switch to scality/actions/action-ssh-to-runner 2023-10-09 09:30:34 -07:00
Taylor McKinnon 59aa9b9ab9 Merge remote-tracking branch 'origin/w/7.70/bugfix/UTAPI-92/bump_utapi_version' into w/8.1/bugfix/UTAPI-92/bump_utapi_version 2023-05-31 13:45:38 -07:00
Taylor McKinnon 9eecef0a24 Merge remote-tracking branch 'origin/bugfix/UTAPI-92/bump_utapi_version' into w/7.70/bugfix/UTAPI-92/bump_utapi_version 2023-05-31 13:44:34 -07:00
Taylor McKinnon c29af16e46 UTAPI-92: Bump version 2023-05-31 13:43:04 -07:00
bert-e 8757ac8bb0 Merge branch 'w/7.70/bugfix/UTAPI-92/fix_redis_password_config' into tmp/octopus/w/8.1/bugfix/UTAPI-92/fix_redis_password_config 2023-05-26 17:39:02 +00:00
bert-e 34ceac8563 Merge branch 'bugfix/UTAPI-92/fix_redis_password_config' into tmp/octopus/w/7.70/bugfix/UTAPI-92/fix_redis_password_config 2023-05-26 17:39:02 +00:00
Taylor McKinnon 7f9c9aa202 bf(UTAPI-92): Fix redis password loading 2023-05-25 15:03:36 -07:00
Taylor McKinnon 41b690aa5d Merge remote-tracking branch 'origin/w/7.70/bugfix/UTAPI-88/bump_version_7_10_13' into w/8.1/bugfix/UTAPI-88/bump_version_7_10_13 2023-04-11 16:10:46 -07:00
Taylor McKinnon 3f08327fe6 Merge remote-tracking branch 'origin/bugfix/UTAPI-88/bump_version_7_10_13' into w/7.70/bugfix/UTAPI-88/bump_version_7_10_13 2023-04-11 16:09:35 -07:00
Taylor McKinnon 84bc7e180f bf(UTAPI-88): Release 7.10.13 2023-04-11 16:07:23 -07:00
bert-e e328095606 Merge branches 'w/8.1/bugfix/UTAPI-88-do-not-error-500-in-case-of-negative-metric' and 'q/1279/7.70/bugfix/UTAPI-88-do-not-error-500-in-case-of-negative-metric' into tmp/octopus/q/8.1 2023-04-10 23:34:43 +00:00
bert-e cb9d2b8d2b Merge branches 'w/7.70/bugfix/UTAPI-88-do-not-error-500-in-case-of-negative-metric' and 'q/1279/7.10/bugfix/UTAPI-88-do-not-error-500-in-case-of-negative-metric' into tmp/octopus/q/7.70 2023-04-10 23:34:42 +00:00
bert-e de73fe9ee0 Merge branch 'bugfix/UTAPI-88-do-not-error-500-in-case-of-negative-metric' into q/7.10 2023-04-10 23:34:42 +00:00
bert-e 0d33f81e35 Merge branch 'w/7.70/bugfix/UTAPI-88-do-not-error-500-in-case-of-negative-metric' into tmp/octopus/w/8.1/bugfix/UTAPI-88-do-not-error-500-in-case-of-negative-metric 2023-04-10 23:28:07 +00:00
bert-e 13fb668d94 Merge branch 'bugfix/UTAPI-88-do-not-error-500-in-case-of-negative-metric' into tmp/octopus/w/7.70/bugfix/UTAPI-88-do-not-error-500-in-case-of-negative-metric 2023-04-10 23:28:07 +00:00
scality-gelbart 0fc08f3d7d bf(UTAPI-88): Replace transient state API error with info log message and 200 response 2023-04-10 16:27:21 -07:00
Naren 334c4c26a1 Merge remote-tracking branch 'origin/improvement/UTAPI-91-release-7-70-0' into w/8.1/improvement/UTAPI-91-release-7-70-0 2023-03-28 18:36:52 -07:00
Naren 5319a24704 impr: UTAPI-91 bump version to 7.70.0 2023-03-28 18:05:13 -07:00
Naren ed3628ef01 impr: UTAPI-90 bump version to 8.1.10 2023-03-15 11:20:42 -07:00
Naren 34e881f0e9 impr: UTAPI-90 upgrade bucketclient and vaultclient 2023-03-15 11:19:21 -07:00
Naren 13befbd535 Merge remote-tracking branch 'origin/improvement/UTAPI-90-upgrade-prom-client' into w/8.1/improvement/UTAPI-90-upgrade-prom-client 2023-03-15 11:12:08 -07:00
Naren 347cf3c1cb impr: UTAPI-90 bump version to 7.10.12 2023-03-15 11:03:06 -07:00
Naren 9b5fe56f48 impr: UTAPI-90 upgrade bucketclient and vaultclient 2023-03-15 11:02:22 -07:00
Naren 988f478957 impr: UTAPI-90 upgrade arsenal for prom-client upgrade 2023-03-14 18:54:16 -07:00
bert-e 5f24e749ea Merge branch 'improvement/UTAPI-89-update-metric-names' into tmp/octopus/w/8.1/improvement/UTAPI-89-update-metric-names 2023-02-28 16:56:12 +00:00
Naren 480bde079b impr UTAPI-89 update metric names 2023-02-28 08:54:25 -08:00
Taylor McKinnon 0ba5a02ba7
Bump version to 8.1.9 2022-10-26 11:47:44 -07:00
Taylor McKinnon e75ce33f35 Merge remote-tracking branch 'origin/bugfix/UTAPI-87/handle_zero_byte_objs_in_ver_susp_buck' into w/8.1/bugfix/UTAPI-87/handle_zero_byte_objs_in_ver_susp_buck 2022-10-25 13:36:06 -07:00
Taylor McKinnon 3ec818bca1 bf(UTAPI-87): Bump version to 7.10.11 2022-10-25 13:34:21 -07:00
Taylor McKinnon c3111dfadf bf(UTAPI-87): Handle deleting zero byte objects in version suspended buckets 2022-10-25 13:34:21 -07:00
Taylor McKinnon 451f88d27e Merge remote-tracking branch 'origin/bugfix/UTAPI-85/bump_version' into w/8.1/bugfix/UTAPI-85/bump_version 2022-10-17 14:47:06 -07:00
Taylor McKinnon 71f162169d bf(UTAPI-85): Bump version to 7.10.10 2022-10-17 14:45:26 -07:00
bert-e c0abf3e53f Merge branches 'w/8.1/bugfix/UTAPI-85/allow_host_port_override' and 'q/1271/7.10/bugfix/UTAPI-85/allow_host_port_override' into tmp/octopus/q/8.1 2022-10-17 21:24:40 +00:00
bert-e 3e740a2f6a Merge branch 'bugfix/UTAPI-85/allow_host_port_override' into q/7.10 2022-10-17 21:24:40 +00:00
bert-e 93134e6ccb Merge branches 'w/8.1/bugfix/UTAPI-82-v1-delete-inconsistency' and 'q/1267/7.10/bugfix/UTAPI-82-v1-delete-inconsistency' into tmp/octopus/q/8.1 2022-10-15 00:09:09 +00:00
bert-e b4b52c0de7 Merge branch 'bugfix/UTAPI-82-v1-delete-inconsistency' into q/7.10 2022-10-15 00:09:09 +00:00
Artem Bakalov 4faac178ef Merge remote-tracking branch 'origin/bugfix/UTAPI-82-v1-delete-inconsistency' into w/8.1/bugfix/UTAPI-82-v1-delete-inconsistency 2022-10-14 17:01:02 -07:00
Artem Bakalov 193d1a5d92 UTAPI-82 fix delete inconsistency 2022-10-14 16:55:16 -07:00
bert-e f90213d3d5 Merge branch 'bugfix/UTAPI-85/allow_host_port_override' into tmp/octopus/w/8.1/bugfix/UTAPI-85/allow_host_port_override 2022-10-13 18:24:33 +00:00
Taylor McKinnon 7eb35d51f4 bf(UTAPI-85): Allow host and port to be overridden 2022-10-13 11:02:31 -07:00
bert-e 2e04a5cc44 Merge branch 'improvement/UTAPI-83/provide_warp10_image' into tmp/octopus/w/8.1/improvement/UTAPI-83/provide_warp10_image 2022-10-05 20:17:37 +00:00
Taylor McKinnon 52520e4de1 impr(UTAPI-83): Add warp 10 release workflow 2022-10-05 13:17:05 -07:00
Taylor McKinnon 3391130d43 Merge remote-tracking branch 'origin/bugfix/UTAPI-84/fix_nodesvc_base_config' into w/8.1/bugfix/UTAPI-84/fix_nodesvc_base_config 2022-10-03 15:34:56 -07:00
Taylor McKinnon f3a9a57f58 bf(UTAPI-840): Fix nodesvc-base image config 2022-10-03 15:33:31 -07:00
Taylor McKinnon c0aa52beab Merge remote-tracking branch 'origin/feature/UTAPI-71/add_nodesvc_based_image_and_release_workflow' into w/8.1/feature/UTAPI-71/add_nodesvc_based_image_and_release_workflow 2022-09-23 10:46:41 -07:00
Taylor McKinnon 0ae108f15e ft(UTAPI-71): Rework release workflow to support S3C releases 2022-09-23 10:45:16 -07:00
Taylor McKinnon 2f99e1ddd5 ft(UTAPI-71): Split v2 tests into with/without sensision enabled 2022-09-23 10:45:16 -07:00
Taylor McKinnon cbeae49d47 ft(UTAPI-71): Fix sensision inside warp 10 image 2022-09-23 10:45:16 -07:00
Taylor McKinnon 64d3ecb10f ft(UTAPI-71): Call build-ci from tests 2022-09-23 10:45:16 -07:00
Taylor McKinnon df57f68b9a ft(UTAPI-71): Add build workflows 2022-09-23 10:45:16 -07:00
Taylor McKinnon db5a43f412 ft(UTAPI-71): Backport Dockerfile from development/8.1 branch 2022-09-22 10:52:20 -07:00
Taylor McKinnon 116a2108b0 ft(UTAPI-71): Add nodesvc-base based image 2022-09-22 10:52:20 -07:00
Taylor McKinnon 750cabc565 Merge remote-tracking branch 'origin/bugfix/UTAPI-81/add_bucket_tagging_methods' into w/8.1/bugfix/UTAPI-81/add_bucket_tagging_methods 2022-08-04 12:53:02 -07:00
Taylor McKinnon 469b862a69 bf(UTAPI-81): Add bucket tagging operations 2022-08-04 12:49:23 -07:00
Taylor McKinnon 62bf4d86e6 Merge remote-tracking branch 'origin/improvement/UTAPI-80/release_7_10_7' into w/8.1/improvement/UTAPI-80/release_7_10_7 2022-07-22 11:19:18 -07:00
Taylor McKinnon a072535050 impr(UTAPI-80): Release 7.10.7 2022-07-22 11:17:56 -07:00
bert-e 29b52a0346 Merge branch 'bugfix/UTAPI-78/fix_user_auth_with_no_resources' into tmp/octopus/w/8.1/bugfix/UTAPI-78/fix_user_auth_with_no_resources 2022-07-21 16:38:05 +00:00
Taylor McKinnon 1168720f98 bf(UTAPI-78): Fix second stage user auth with no resources 2022-07-20 09:37:34 -07:00
Jonathan Gramain ff5a75bb11 Merge remote-tracking branch 'origin/bugfix/UTAPI-77-bumpOasTools' into w/8.1/bugfix/UTAPI-77-bumpOasTools 2022-06-20 15:10:40 -07:00
bert-e 65726f6d0b Merge branches 'w/8.1/feature/UTAPI-76/breakout_leveldb_and_datalog' and 'q/1251/7.10/feature/UTAPI-76/breakout_leveldb_and_datalog' into tmp/octopus/q/8.1 2022-06-08 21:59:45 +00:00
bert-e eed137768d Merge branches 'w/8.1/feature/UTAPI-75/Add_metrics_for_latest_check_snapshot_timestamps' and 'q/1249/7.10/feature/UTAPI-75/Add_metrics_for_latest_check_snapshot_timestamps' into tmp/octopus/q/8.1 2022-06-07 22:32:44 +00:00
bert-e 0257e97bc2 Merge branch 'feature/UTAPI-76/breakout_leveldb_and_datalog' into tmp/octopus/w/8.1/feature/UTAPI-76/breakout_leveldb_and_datalog 2022-06-07 22:32:07 +00:00
bert-e 55b640faba Merge branch 'feature/UTAPI-75/Add_metrics_for_latest_check_snapshot_timestamps' into tmp/octopus/w/8.1/feature/UTAPI-75/Add_metrics_for_latest_check_snapshot_timestamps 2022-06-03 16:43:12 +00:00
bert-e 54516db267 Merge branch 'feature/UTAPI-70/add_metrics_to_http_server' into tmp/octopus/w/8.1/feature/UTAPI-70/add_metrics_to_http_server 2022-05-26 16:50:41 +00:00
bert-e 1c6c159423 Merge branches 'w/8.1/feature/UTAPI-69/Add_async_task_metrics' and 'q/1239/7.10/feature/UTAPI-69/Add_async_task_metrics' into tmp/octopus/q/8.1 2022-05-26 16:32:49 +00:00
bert-e 22805fe7e7 Merge branch 'feature/UTAPI-69/Add_async_task_metrics' into tmp/octopus/w/8.1/feature/UTAPI-69/Add_async_task_metrics 2022-05-26 16:23:04 +00:00
bert-e ca82189fd7 Merge branches 'w/8.1/feature/UTAPI-67/Add_base_prometheus_framework' and 'q/1235/7.10/feature/UTAPI-67/Add_base_prometheus_framework' into tmp/octopus/q/8.1 2022-05-24 17:46:30 +00:00
bert-e 2f26d380f6 Merge branch 'feature/UTAPI-67/Add_base_prometheus_framework' into tmp/octopus/w/8.1/feature/UTAPI-67/Add_base_prometheus_framework 2022-05-24 17:12:06 +00:00
Taylor McKinnon 9f1552488c impr(UTAPI-66): Update Dockerfile with --network-concurrency 2022-05-18 10:06:08 -07:00
bert-e bf366e9472 Merge branch 'improvement/UTAPI-66/migrate_to_arsenal_7_10_18' into tmp/octopus/w/8.1/improvement/UTAPI-66/migrate_to_arsenal_7_10_18 2022-05-18 16:34:32 +00:00
bert-e 002a7ad1ca Merge branch 'improvement/UTAPI-66/migrate_to_arsenal_7_10_18' into tmp/octopus/w/8.1/improvement/UTAPI-66/migrate_to_arsenal_7_10_18 2022-05-18 16:33:37 +00:00
bert-e c2bff35bc6 Merge branch 'improvement/UTAPI-66/migrate_to_arsenal_7_10_18' into tmp/octopus/w/8.1/improvement/UTAPI-66/migrate_to_arsenal_7_10_18 2022-05-18 16:30:07 +00:00
Taylor McKinnon 280c4bae3a Merge remote-tracking branch 'origin/improvement/UTAPI-66/migrate_to_arsenal_7_10_18' into w/8.1/improvement/UTAPI-66/migrate_to_arsenal_7_10_18 2022-05-17 13:39:36 -07:00
bert-e 3c09767315 Merge branch 'bugfix/UTAPI-72/add_missing_await_to_pushMetric' into tmp/octopus/w/8.1/bugfix/UTAPI-72/add_missing_await_to_pushMetric 2022-05-06 20:13:26 +00:00
Erwan Bernard 2c1d25a50e Merge remote-tracking branch 'origin/w/7.10/feature/RELENG-5645/patch-usage-of-action-gh-release' into w/8.1/feature/RELENG-5645/patch-usage-of-action-gh-release 2022-04-01 17:01:05 +02:00
Xin LI 3eed7b295d bugfix: UTAPI-64 update vaultclient, bucketclient, oas-tools to fix critical 2022-03-31 19:45:00 +02:00
bert-e c359ddee7e Merge branch 'w/7.10/bugfix/UTAPI-63/fix_arsenal_require_for_dhparam' into tmp/octopus/w/8.1/bugfix/UTAPI-63/fix_arsenal_require_for_dhparam 2022-03-11 18:09:51 +00:00
Naren 7d7b46bc5e feature: UTAPI-59 update yarn.lock 2022-02-07 16:18:03 -08:00
Naren b82bed39db Merge remote-tracking branch 'origin/w/7.10/feature/UTAPI-59-update-version-and-deps' into w/8.1/feature/UTAPI-59-update-version-and-deps 2022-02-07 16:15:18 -08:00
Naren cc3bceebcf Merge remote-tracking branch 'origin/w/7.10/feature/UTAPI-59/UpgradeToNode16' into w/8.1/feature/UTAPI-59/UpgradeToNode16 2022-02-07 14:22:20 -08:00
Nicolas Humbert 20479f0dfa Merge remote-tracking branch 'origin/development/8.1' into w/8.1/feature/UTAPI-59/UpgradeToNode16 2022-02-04 16:56:05 +01:00
bert-e 7179917edc Merge branches 'w/8.1/feature/UTAPI-44-migrate-github-actions' and 'q/1173/7.10/feature/UTAPI-44-migrate-github-actions' into tmp/octopus/q/8.1 2022-02-03 22:18:40 +00:00
Naren 92adb8c320 Merge remote-tracking branch 'origin/improvement/UTAPI-61-lock-bucket-client-version' into w/8.1/improvement/UTAPI-61-lock-bucket-client-version 2022-02-02 16:14:42 -08:00
Thomas Carmet b770331a12 Merge remote-tracking branch 'origin/w/7.10/feature/UTAPI-44-migrate-github-actions' into w/8.1/feature/UTAPI-44-migrate-github-actions 2022-02-02 11:09:09 -08:00
bert-e 765b149cbf Merge branch 'w/7.10/feature/UTAPI-59/UpgradeToNode16' into tmp/octopus/w/8.1/feature/UTAPI-59/UpgradeToNode16 2022-01-26 23:38:35 +00:00
Nicolas Humbert 1fc6c29864 update Docker node image 2022-01-26 15:50:19 -05:00
Nicolas Humbert 135581fa63 Merge remote-tracking branch 'origin/w/7.10/feature/UTAPI-59/UpgradeToNode16' into w/8.1/feature/UTAPI-59/UpgradeToNode16 2022-01-26 15:47:41 -05:00
Artem Bakalov c27442cc89 S3C-5397 - adds exponential backoff to metadata requests to prevent failures during leader elections 2022-01-26 11:02:26 -08:00
bert-e 89bb6c6e5d Merge branch 'improvement/UTAPI-58/limit_max_size_of_snapshots' into tmp/octopus/w/8.1/improvement/UTAPI-58/limit_max_size_of_snapshots 2022-01-07 19:45:00 +00:00
bert-e 792580c6d6 Merge branch 'improvement/UTAPI-58/limit_max_size_of_snapshots' into tmp/octopus/w/8.1/improvement/UTAPI-58/limit_max_size_of_snapshots 2022-01-06 20:00:26 +00:00
bert-e 29475f1b9a Merge branches 'w/8.1/improvement/UTAPI-55/warp10_request_error_logging' and 'q/1202/7.10/improvement/UTAPI-55/warp10_request_error_logging' into tmp/octopus/q/8.1 2021-12-06 21:47:28 +00:00
bert-e 255f428b84 Merge branch 'improvement/UTAPI-55/warp10_request_error_logging' into tmp/octopus/w/8.1/improvement/UTAPI-55/warp10_request_error_logging 2021-12-06 20:51:32 +00:00
bert-e 202dc39eb5 Merge branch 'feature/UTAPI-56/expose_warp10_request_timeouts_in_config' into tmp/octopus/w/8.1/feature/UTAPI-56/expose_warp10_request_timeouts_in_config 2021-12-03 19:07:23 +00:00
bert-e 25bd285d35 Merge branch 'bugfix/UTAPI-54/fix_service_user_test' into tmp/octopus/w/8.1/bugfix/UTAPI-54/fix_service_user_test 2021-11-29 20:44:52 +00:00
Taylor McKinnon 5a4ba9f72a bf: add yarn.lock to image 2021-11-23 10:19:41 -08:00
bert-e 494381beec Merge branch 'bugfix/UTAPI-53/handle_missing_content_length' into tmp/octopus/w/8.1/bugfix/UTAPI-53/handle_missing_content_length 2021-11-23 17:58:18 +00:00
Taylor McKinnon be10ca2ba8 Merge remote-tracking branch 'origin/feature/UTAPI-50/bump_version_to_7.10.5' into w/8.1/feature/UTAPI-50/bump_version_to_7.10.5 2021-11-18 10:05:37 -08:00
bert-e 1800407606 Merge branch 'bugfix/UTAPI-49/fix_config_file_schema_event_filter' into tmp/octopus/w/8.1/bugfix/UTAPI-49/fix_config_file_schema_event_filter 2021-11-17 22:39:27 +00:00
Taylor McKinnon 029fe17019 Merge remote-tracking branch 'origin/feature/UTAPI-48/bump_version_to_7.10.4' into w/8.1/feature/UTAPI-48/bump_version_to_7.10.4 2021-11-17 09:46:16 -08:00
bert-e ada9e2bf55 Merge branches 'w/8.1/bugfix/UTAPI-46-redisv2-backoff' and 'q/1180/7.10/bugfix/UTAPI-46-redisv2-backoff' into tmp/octopus/q/8.1 2021-11-16 23:27:56 +00:00
bert-e 260d2f83ef Merge branch 'bugfix/UTAPI-46-redisv2-backoff' into tmp/octopus/w/8.1/bugfix/UTAPI-46-redisv2-backoff 2021-11-16 22:55:55 +00:00
bert-e bbb6764aa7 Merge branches 'w/8.1/feature/UTAPI-43/event_allow_deny_filter' and 'q/1174/7.10/feature/UTAPI-43/event_allow_deny_filter' into tmp/octopus/q/8.1 2021-11-16 17:50:25 +00:00
bert-e e63f9c9009 Merge branch 'feature/UTAPI-43/event_allow_deny_filter' into tmp/octopus/w/8.1/feature/UTAPI-43/event_allow_deny_filter 2021-11-16 17:35:45 +00:00
Rached Ben Mustapha 5650b072ce Merge remote-tracking branch 'origin/bugfix/UTAPI-38-wait-for-redis-ready-main' into w/8.1/bugfix/UTAPI-38-wait-for-redis-ready-main 2021-11-08 18:25:43 +00:00
Rached Ben Mustapha 1ef954975e Merge remote-tracking branch 'origin/bugfix/UTAPI-38-wait-for-redis-ready-main' into w/8.1/bugfix/UTAPI-38-wait-for-redis-ready-main 2021-11-05 02:47:51 +00:00
bert-e 989715af95 Merge branches 'w/8.1/feature/UTAPI-41/release_7_10_2' and 'q/1169/7.10/feature/UTAPI-41/release_7_10_2' into tmp/octopus/q/8.1 2021-11-01 22:13:17 +00:00
Taylor McKinnon 87887e57a0 Merge remote-tracking branch 'origin/feature/UTAPI-41/release_7_10_2' into w/8.1/feature/UTAPI-41/release_7_10_2 2021-11-01 12:39:30 -07:00
bert-e a8bccfd261 Merge branches 'w/8.1/bugfix/UTAPI-42/change_upstream_warp10_repo' and 'q/1167/7.10/bugfix/UTAPI-42/change_upstream_warp10_repo' into tmp/octopus/q/8.1 2021-11-01 19:31:00 +00:00
bert-e 89bd9751e4 Merge branch 'bugfix/UTAPI-42/change_upstream_warp10_repo' into tmp/octopus/w/8.1/bugfix/UTAPI-42/change_upstream_warp10_repo 2021-11-01 19:24:49 +00:00
bert-e b1d55217a8 Merge branches 'w/8.1/bugfix/UTAPI-39/add_crr_metrics_for_v2' and 'q/1159/7.10/bugfix/UTAPI-39/add_crr_metrics_for_v2' into tmp/octopus/q/8.1 2021-11-01 19:17:43 +00:00
bert-e 0bb15ae1c6 Merge branch 'bugfix/UTAPI-39/add_crr_metrics_for_v2' into tmp/octopus/w/8.1/bugfix/UTAPI-39/add_crr_metrics_for_v2 2021-11-01 19:10:24 +00:00
bert-e cb9e79be48 Merge branch 'w/7.10/feature/UTAPI-40/release_7_4_11' into tmp/octopus/w/8.1/feature/UTAPI-40/release_7_4_11 2021-10-29 23:04:25 +00:00
bert-e c528f75d98 Merge branches 'w/8.1/bugfix/UTAPI-34-implementCRRActions' and 'q/1154/7.10/bugfix/UTAPI-34-implementCRRActions' into tmp/octopus/q/8.1 2021-10-29 18:40:12 +00:00
bert-e 14779a24ec Merge branch 'w/7.10/bugfix/UTAPI-34-implementCRRActions' into tmp/octopus/w/8.1/bugfix/UTAPI-34-implementCRRActions 2021-10-28 23:08:57 +00:00
bert-e df79f65abf Merge branches 'w/8.1/improvement/UTAPI-36/bump_vault_cpu_req' and 'q/1152/7.10/improvement/UTAPI-36/bump_vault_cpu_req' into tmp/octopus/q/8.1 2021-10-22 16:04:50 +00:00
bert-e be1be375f3 Merge branch 'improvement/UTAPI-36/bump_vault_cpu_req' into tmp/octopus/w/8.1/improvement/UTAPI-36/bump_vault_cpu_req 2021-10-21 22:00:49 +00:00
bert-e 3a5d379510 Merge branch 'w/7.10/bugfix/UTAPI-28/catch_all_listing_errors' into tmp/octopus/w/8.1/bugfix/UTAPI-28/catch_all_listing_errors 2021-10-20 23:41:11 +00:00
bert-e 3a80fb708e Merge branch 'w/7.10/bugfix/UTAPI-35/backport_fixes_for_74' into tmp/octopus/w/8.1/bugfix/UTAPI-35/backport_fixes_for_74 2021-10-19 20:46:32 +00:00
Taylor McKinnon 945aa9665f Merge remote-tracking branch 'origin/feature/UTAPI-33/add_ensure_service_user' into w/8.1/feature/UTAPI-33/add_ensure_service_user 2021-10-15 13:20:12 -07:00
bert-e 3b2d4a18d4 Merge branch 'improvement/UTAPI-32/change_service_user_arnPrefix_to_full_arn' into tmp/octopus/w/8.1/improvement/UTAPI-32/change_service_user_arnPrefix_to_full_arn 2021-10-12 19:27:41 +00:00
Thomas Carmet fa3fb82e5c Merge branch 'w/7.10/feature/UTAPI-30-align-package-version' into w/8.1/feature/UTAPI-30-align-package-version 2021-10-07 10:36:17 -07:00
bert-e 0001d2218a Merge branch 'bugfix/UTAPI-29/fix_bucketd_tls_config' into tmp/octopus/w/8.1/bugfix/UTAPI-29/fix_bucketd_tls_config 2021-10-06 00:30:05 +00:00
bert-e b38000c771 Merge branch 'bugfix/UTAPI-27/max_retries_for_bucketd_requests' into tmp/octopus/w/8.1/bugfix/UTAPI-27/max_retries_for_bucketd_requests 2021-10-04 20:41:14 +00:00
bert-e 34d38fb2b7 Merge branches 'w/8.1/feature/UTAPI-26/add_service_user' and 'q/1141/7.10/feature/UTAPI-26/add_service_user' into tmp/octopus/q/8.1 2021-10-01 17:43:00 +00:00
bert-e 6d99ac3dae Merge branch 'feature/UTAPI-26/add_service_user' into tmp/octopus/w/8.1/feature/UTAPI-26/add_service_user 2021-10-01 17:34:30 +00:00
bert-e 7755a3fa3d Merge branches 'w/8.1/feature/UTAPI-24/limit_user_credentials_via_filtering' and 'q/1138/7.10/feature/UTAPI-24/limit_user_credentials_via_filtering' into tmp/octopus/q/8.1 2021-10-01 17:24:19 +00:00
bert-e 9bca308db7 Merge branch 'feature/UTAPI-24/limit_user_credentials_via_filtering' into tmp/octopus/w/8.1/feature/UTAPI-24/limit_user_credentials_via_filtering 2021-10-01 17:18:46 +00:00
bert-e 0159387ba9 Merge branch 'q/1137/7.10/feature/UTAPI-23/limit_account_keys_via_filtering' into tmp/normal/q/8.1 2021-10-01 16:56:45 +00:00
Taylor McKinnon 05017af754 Merge remote-tracking branch 'origin/development/8.1' into w/8.1/feature/UTAPI-23/limit_account_keys_via_filtering 2021-10-01 09:48:47 -07:00
bert-e 6544f118c0 Merge branch 'feature/UTAPI-23/limit_account_keys_via_filtering' into tmp/octopus/w/8.1/feature/UTAPI-23/limit_account_keys_via_filtering 2021-10-01 16:04:37 +00:00
bert-e b970847ca3 Merge branches 'w/8.1/bugfix/UTAPI-25_add-bucket-lifecycle-operations' and 'q/1140/7.10/bugfix/UTAPI-25_add-bucket-lifecycle-operations' into tmp/octopus/q/8.1 2021-09-29 22:22:37 +00:00
bert-e f19f6c7ab6 Merge branch 'bugfix/UTAPI-25_add-bucket-lifecycle-operations' into tmp/octopus/w/8.1/bugfix/UTAPI-25_add-bucket-lifecycle-operations 2021-09-29 16:31:47 +00:00
bert-e 54362134de Merge branches 'w/8.1/feature/UTAPI-7-pin-arsenal' and 'q/1122/7.10/feature/UTAPI-7-pin-arsenal' into tmp/octopus/q/8.1 2021-09-14 20:03:52 +00:00
bert-e 959a26cc62 Merge branch 'bugfix/UTAPI-21__convert_reindex_workers_flag_to_int' into tmp/octopus/w/8.1/bugfix/UTAPI-21__convert_reindex_workers_flag_to_int 2021-09-13 16:54:30 +00:00
bert-e 96e2b2c731 Merge branches 'w/8.1/bugfix/S3C-4784_redis-connection-build-up' and 'q/1128/7.10/bugfix/S3C-4784_redis-connection-build-up' into tmp/octopus/q/8.1 2021-09-02 17:22:21 +00:00
bert-e 85182216e4 Merge branch 'bugfix/S3C-4784_redis-connection-build-up' into tmp/octopus/w/8.1/bugfix/S3C-4784_redis-connection-build-up 2021-09-02 17:15:50 +00:00
Thomas Carmet 9532aec058 Merge branch 'w/7.10/feature/UTAPI-7-pin-arsenal' into w/8.1/feature/UTAPI-7-pin-arsenal 2021-09-01 14:08:24 -07:00
bert-e 214bf4189f Merge branch 'w/7.10/bugfix/S3C-4784_redis-connection-buildup-stabilization' into tmp/octopus/w/8.1/bugfix/S3C-4784_redis-connection-buildup-stabilization 2021-08-27 16:27:36 +00:00
bert-e 2e4c2c66d5 Merge branch 'w/7.10/bugfix/UTAPI-6_warp10_leak_fix_jmx' into tmp/octopus/w/8.1/bugfix/UTAPI-6_warp10_leak_fix_jmx 2021-08-16 17:22:02 +00:00
bert-e b2e4683c5d Merge branch 'w/7.10/feature/UTAPI-5-bump-werelogs' into tmp/octopus/w/8.1/feature/UTAPI-5-bump-werelogs 2021-08-12 17:25:13 +00:00
bert-e 5487001cee Merge branches 'w/8.1/feature/UTAPI-1_prometheus_metrics' and 'q/1093/7.10/feature/UTAPI-1_prometheus_metrics' into tmp/octopus/q/8.1 2021-07-31 00:40:12 +00:00
bert-e 3779c8c144 Merge branch 'feature/UTAPI-1_prometheus_metrics' into tmp/octopus/w/8.1/feature/UTAPI-1_prometheus_metrics 2021-07-31 00:30:42 +00:00
bert-e 7f3f6bb753 Merge branches 'w/8.1/bugfix/S3C-4550_avoid_reindex_diff_flapping' and 'q/1082/7.10/bugfix/S3C-4550_avoid_reindex_diff_flapping' into tmp/octopus/q/8.1 2021-07-01 22:18:18 +00:00
bert-e 8715b0d096 Merge branch 'feature/S3C-4439_bucket-encryption-api-operations-to-utapi-v2' into tmp/octopus/w/8.1/feature/S3C-4439_bucket-encryption-api-operations-to-utapi-v2 2021-06-29 02:23:39 +00:00
bert-e 7e38de823a Merge branch 'bugfix/S3C-4550_avoid_reindex_diff_flapping' into tmp/octopus/w/8.1/bugfix/S3C-4550_avoid_reindex_diff_flapping 2021-06-24 19:47:21 +00:00
bert-e 1695a01f9e Merge branch 'feature/S3C-4240_specific_bucxket_reindex_flag' into tmp/octopus/w/8.1/feature/S3C-4240_specific_bucxket_reindex_flag 2021-06-24 18:28:13 +00:00
bert-e 77042591b6 Merge branch 'bugfix/S3C-4429_null_sizeD_during_reindex' into tmp/octopus/w/8.1/bugfix/S3C-4429_null_sizeD_during_reindex 2021-05-26 19:28:25 +00:00
bert-e a2d1c47451 Merge branches 'w/8.1/bugfix/S3C-3692_fix_accountid_support_in_list_metrics_js' and 'q/1068/7.10/bugfix/S3C-3692_fix_accountid_support_in_list_metrics_js' into tmp/octopus/q/8.1 2021-05-25 19:10:04 +00:00
bert-e 0adc018e10 Merge branch 'bugfix/S3C-3692_fix_accountid_support_in_list_metrics_js' into tmp/octopus/w/8.1/bugfix/S3C-3692_fix_accountid_support_in_list_metrics_js 2021-05-25 17:15:57 +00:00
bert-e 3af6c176b3 Merge branch 'improvement/S3C-4388_adjust_reindex_log_levels' into tmp/octopus/w/8.1/improvement/S3C-4388_adjust_reindex_log_levels 2021-05-25 00:14:37 +00:00
Taylor McKinnon edbae18a62 Merge remote-tracking branch 'origin/bugfix/S3C-4424_switch_protobuf_ext_to_git_lfs' into w/8.1/bugfix/S3C-4424_switch_protobuf_ext_to_git_lfs 2021-05-24 16:32:17 -07:00
bert-e 16c3782ca4 Merge branch 'bugfix/S3C-4151_fix_user_support' into tmp/octopus/w/8.1/bugfix/S3C-4151_fix_user_support 2021-04-15 21:12:11 +00:00
bert-e d5967bcee1 Merge branch 'w/7.10/bugfix/S3C-3996-backport-7.4' into tmp/octopus/w/8.1/bugfix/S3C-3996-backport-7.4 2021-04-01 17:08:52 +00:00
bert-e e0d816a759 Merge branch 'bugfix/S3C-3996/reduce-reindex-logging' into tmp/octopus/w/8.1/bugfix/S3C-3996/reduce-reindex-logging 2021-04-01 07:21:29 +00:00
Alexander Chan 3d0b92f319 bugfix: ZENKO-3300 fix incrby call 2021-03-18 15:18:38 -07:00
bert-e 9e85797380 Merge branches 'w/8.1/bugfix/S3C-4061-missing-content-length-workaround' and 'q/1030/7.10/bugfix/S3C-4061-missing-content-length-workaround' into tmp/octopus/q/8.1 2021-03-17 21:30:15 +00:00
bert-e 2cce109bd6 Merge branch 'w/7.10/bugfix/S3C-4061-missing-content-length-workaround' into tmp/octopus/w/8.1/bugfix/S3C-4061-missing-content-length-workaround 2021-03-17 21:18:52 +00:00
bert-e 7eba6d84a7 Merge branch 'w/7.10/bugfix/S3C-4119-stale-bucket-workaround' into tmp/octopus/w/8.1/bugfix/S3C-4119-stale-bucket-workaround 2021-03-17 21:17:29 +00:00
bert-e cecc13d63e Merge branch 'w/7.10/bugfix/S3C-4167_fix_typo_in_error_message' into tmp/octopus/w/8.1/bugfix/S3C-4167_fix_typo_in_error_message 2021-03-17 19:05:49 +00:00
bert-e 361bc24c79 Merge branches 'w/8.1/bugfix/S3C-4145_fix_error_responses' and 'q/1023/7.10/bugfix/S3C-4145_fix_error_responses' into tmp/octopus/q/8.1 2021-03-16 03:43:59 +00:00
bert-e 6e66608d0e Merge branches 'w/8.1/feature/S3C-4100_manual_adjustment_task' and 'q/998/7.10/feature/S3C-4100_manual_adjustment_task' into tmp/octopus/q/8.1 2021-03-16 03:43:45 +00:00
bert-e 00a3475bf6 Merge branch 'w/7.10/bugfix/S3C-4145_fix_error_responses' into tmp/octopus/w/8.1/bugfix/S3C-4145_fix_error_responses 2021-03-16 01:53:05 +00:00
bert-e 3fb8078677 Merge branches 'development/8.1' and 'w/7.10/feature/S3C-4100_manual_adjustment_task' into tmp/octopus/w/8.1/feature/S3C-4100_manual_adjustment_task 2021-03-16 01:33:13 +00:00
bert-e 9f6c56d682 Merge branches 'w/8.1/bugfix/S3C-4139_allow_only_start_timestamp_in_req' and 'q/1013/7.10/bugfix/S3C-4139_allow_only_start_timestamp_in_req' into tmp/octopus/q/8.1 2021-03-16 00:14:59 +00:00
bert-e 28618ac3d8 Merge branches 'w/8.1/bugfix/S3C-4137_add_opId_translation_to_ingest_route' and 'q/1002/7.10/bugfix/S3C-4137_add_opId_translation_to_ingest_route' into tmp/octopus/q/8.1 2021-03-16 00:14:27 +00:00
bert-e d467389474 Merge branch 'w/7.10/bugfix/S3C-4139_allow_only_start_timestamp_in_req' into tmp/octopus/w/8.1/bugfix/S3C-4139_allow_only_start_timestamp_in_req 2021-03-15 19:03:04 +00:00
bert-e 167c5e36fe Merge branch 'w/7.10/bugfix/S3C-4145_fix_error_responses' into tmp/octopus/w/8.1/bugfix/S3C-4145_fix_error_responses 2021-03-15 18:54:40 +00:00
bert-e 8bd5e56ee9 Merge branch 'w/7.10/bugfix/S3C-4139_allow_only_start_timestamp_in_req' into tmp/octopus/w/8.1/bugfix/S3C-4139_allow_only_start_timestamp_in_req 2021-03-15 18:51:35 +00:00
bert-e bd82e9ec8c Merge branch 'w/7.10/bugfix/S3C-4137_add_opId_translation_to_ingest_route' into tmp/octopus/w/8.1/bugfix/S3C-4137_add_opId_translation_to_ingest_route 2021-03-15 17:12:57 +00:00
bert-e 037b0f5d17 Merge branch 'w/7.10/feature/S3C-4100_manual_adjustment_task' into tmp/octopus/w/8.1/feature/S3C-4100_manual_adjustment_task 2021-03-13 00:55:12 +00:00
bert-e a1002dc126 Merge branches 'w/8.1/bugfix/S3C-4085_handle_unauthorized' and 'q/985/7.10/bugfix/S3C-4085_handle_unauthorized' into tmp/octopus/q/8.1 2021-03-09 18:47:12 +00:00
bert-e 6c02f3e109 Merge branches 'w/8.1/bugfix/S3C-4022-bump-warp10' and 'q/989/7.10/bugfix/S3C-4022-bump-warp10' into tmp/octopus/q/8.1 2021-03-09 16:21:55 +00:00
bert-e 594b34472f Merge branch 'w/7.10/bugfix/S3C-4085_handle_unauthorized' into tmp/octopus/w/8.1/bugfix/S3C-4085_handle_unauthorized 2021-03-09 01:51:19 +00:00
bert-e dbfac82feb Merge branch 'w/7.10/bugfix/S3C-4022-bump-warp10' into tmp/octopus/w/8.1/bugfix/S3C-4022-bump-warp10 2021-03-09 01:12:39 +00:00
bert-e a03af1f05f Merge branches 'w/8.1/bugfix/S3C-4067_handle_multiple_gts_during_checkpoint_creation' and 'q/978/7.10/bugfix/S3C-4067_handle_multiple_gts_during_checkpoint_creation' into tmp/octopus/q/8.1 2021-03-05 19:43:38 +00:00
bert-e 387f0f9a9b Merge branch 'w/7.10/bugfix/S3C-4067_handle_multiple_gts_during_checkpoint_creation' into tmp/octopus/w/8.1/bugfix/S3C-4067_handle_multiple_gts_during_checkpoint_creation 2021-03-05 00:37:08 +00:00
bert-e ec250c4df2 Merge branches 'w/8.1/bugfix/S3C-4049_call_delete_in_slices_rather_than_once' and 'q/968/7.10/bugfix/S3C-4049_call_delete_in_slices_rather_than_once' into tmp/octopus/q/8.1 2021-03-05 00:10:12 +00:00
bert-e 5a28fc992e Merge branch 'w/7.10/bugfix/S3C-4049_call_delete_in_slices_rather_than_once' into tmp/octopus/w/8.1/bugfix/S3C-4049_call_delete_in_slices_rather_than_once 2021-03-04 19:12:19 +00:00
bert-e b5a5f70d1c Merge branch 'w/7.10/bugfix/S3C-4057_handle_timestamp_conflicts' into tmp/octopus/w/8.1/bugfix/S3C-4057_handle_timestamp_conflicts 2021-03-03 19:21:01 +00:00
bert-e ee814772a7 Merge branch 'w/7.10/improvement/S3C-4034_simplify_soft_limit' into tmp/octopus/w/8.1/improvement/S3C-4034_simplify_soft_limit 2021-02-23 23:21:36 +00:00
bert-e 4c016f2838 Merge branch 'w/7.10/bugfix/S3C-4035-increasePodLimits' into tmp/octopus/w/8.1/bugfix/S3C-4035-increasePodLimits 2021-02-23 18:45:10 +00:00
bert-e 875b1fed30 Merge branch 'w/7.10/bugfix/S3C-3620-avoidCrashOnRedisError' into tmp/octopus/w/8.1/bugfix/S3C-3620-avoidCrashOnRedisError 2021-02-23 00:18:05 +00:00
bert-e bc58f13b8d Merge branches 'w/8.1/feature/S3C-4033-bump-version' and 'q/938/7.10/feature/S3C-4033-bump-version' into tmp/octopus/q/8.1 2021-02-23 00:02:08 +00:00
Thomas Carmet cae286b4ac Merge remote-tracking branch 'origin/feature/S3C-4033-bump-version' into w/8.1/feature/S3C-4033-bump-version 2021-02-22 15:52:39 -08:00
bert-e d862258cfd Merge branches 'w/8.1/bugfix/S3C-4023_add_missing_property' and 'q/918/7.10/bugfix/S3C-4023_add_missing_property' into tmp/octopus/q/8.1 2021-02-22 21:39:22 +00:00
bert-e 0fa9b0081c Merge branch 'w/7.10/bugfix/S3C-4030_fix_flaky_testGetStorage' into tmp/octopus/w/8.1/bugfix/S3C-4030_fix_flaky_testGetStorage 2021-02-22 19:29:00 +00:00
bert-e b257883e4b Merge branch 'w/7.10/bugfix/S3C-4023_add_missing_property' into tmp/octopus/w/8.1/bugfix/S3C-4023_add_missing_property 2021-02-22 17:53:59 +00:00
bert-e 459464a97f Merge branch 'w/7.10/bugfix/S3C-3970_reduce_scanning_of_event_during_query' into tmp/octopus/w/8.1/bugfix/S3C-3970_reduce_scanning_of_event_during_query 2021-02-19 06:44:01 +00:00
bert-e 357f8fe1e4 Merge branch 'w/7.10/improvement/S3C-3971_reduce_event_footprint' into tmp/octopus/w/8.1/improvement/S3C-3971_reduce_event_footprint 2021-02-11 00:17:20 +00:00
bert-e 2dee952d57 Merge branch 'w/7.9/bugfix/S3C-3940_prevent_negative_metrics' into tmp/octopus/w/8.1/bugfix/S3C-3940_prevent_negative_metrics 2021-02-09 23:02:18 +00:00
bert-e 71dac22a13 Merge branch 'feature/ZENKO-3110-add-release-stage' into q/8.1 2021-02-06 00:07:12 +00:00
Thomas Carmet 740951dc4b Switching to yarn instead of npm to run utapi 2021-02-05 15:12:20 -08:00
Thomas Carmet 5ef95cbc1e Upgrading nodejs to 10.22 2021-02-05 15:08:00 -08:00
Thomas Carmet 72463f72df ZENKO-3110 setting up release stage for utapi 2021-02-05 14:41:42 -08:00
bert-e e424a00bc6 Merge branches 'w/8.1/bugfix/S3C-3937_handle_phd_in_reindex' and 'q/898/7.9/bugfix/S3C-3937_handle_phd_in_reindex' into tmp/octopus/q/8.1 2021-02-05 22:03:02 +00:00
bert-e 0afb78e7ae Merge branch 'bugfix/S3C-3937_handle_phd_in_reindex' into tmp/octopus/w/8.1/bugfix/S3C-3937_handle_phd_in_reindex 2021-02-05 21:01:35 +00:00
bert-e df5e11eb9d Merge branch 'bugfix/S3C-3830-fetch-warp10-results' into tmp/octopus/w/8.1/bugfix/S3C-3830-fetch-warp10-results 2021-02-02 23:28:05 +00:00
bert-e 97406a81a2 Merge branch 'bugfix/S3C-3830-fetch-warp10-results' into tmp/octopus/w/8.1/bugfix/S3C-3830-fetch-warp10-results 2021-02-02 20:33:41 +00:00
bert-e 684c3389e9 Merge branch 'improvement/S3C-3800_rework_failover' into tmp/octopus/w/8.1/improvement/S3C-3800_rework_failover 2021-01-30 00:27:10 +00:00
bert-e 9b1f55ec76 Merge branch 'feature/S3C-3609_hard_disk_limit' into tmp/octopus/w/8.1/feature/S3C-3609_hard_disk_limit 2021-01-29 20:49:10 +00:00
bert-e 56a368d9a6 Merge branch 'feature/S3C-3707_soft_disk_limit' into tmp/octopus/w/8.1/feature/S3C-3707_soft_disk_limit 2021-01-28 22:14:07 +00:00
bert-e 32e14b5099 Merge branch 'feature/S3C-3707_fix_tests' into tmp/octopus/w/8.1/feature/S3C-3707_fix_tests 2021-01-27 22:28:15 +00:00
bert-e 2349cf3791 Merge branches 'w/8.1/feature/S3C-3812_warp10_deletion' and 'q/874/7.9/feature/S3C-3812_warp10_deletion' into tmp/octopus/q/8.1 2021-01-15 23:19:15 +00:00
bert-e 3ca3e7fd32 Merge branch 'feature/S3C-3812_warp10_deletion' into tmp/octopus/w/8.1/feature/S3C-3812_warp10_deletion 2021-01-15 23:15:51 +00:00
bert-e bb559f08c8 Merge branch 'bugfix/S3C-3764_upgrade_@senx/warp10' into tmp/octopus/w/8.1/bugfix/S3C-3764_upgrade_@senx/warp10 2021-01-13 23:38:57 +00:00
bert-e 0e83b9db55 Merge branches 'w/8.1/feature/S3C-3721_monitor_disk_usage_task' and 'q/871/7.9/feature/S3C-3721_monitor_disk_usage_task' into tmp/octopus/q/8.1 2021-01-12 18:36:40 +00:00
bert-e 3ff4fc9cc1 Merge branches 'w/8.1/bugfix/S3C-3725_translate_listBucketMulitpartUploads_in_migrate' and 'q/869/7.9/bugfix/S3C-3725_translate_listBucketMulitpartUploads_in_migrate' into tmp/octopus/q/8.1 2021-01-12 17:38:45 +00:00
bert-e 22cead971d Merge branch 'feature/S3C-3721_monitor_disk_usage_task' into tmp/octopus/w/8.1/feature/S3C-3721_monitor_disk_usage_task 2021-01-12 01:12:02 +00:00
bert-e 98f528cf62 Merge branch 'bugfix/S3C-3725_translate_listBucketMulitpartUploads_in_migrate' into tmp/octopus/w/8.1/bugfix/S3C-3725_translate_listBucketMulitpartUploads_in_migrate 2021-01-11 19:41:28 +00:00
bert-e daf0b2c8d9 Merge branch 'feature/S3C-3524_update_warp10' into tmp/octopus/w/8.1/feature/S3C-3524_update_warp10 2021-01-07 22:13:22 +00:00
Taylor McKinnon a8491a3241 Merge remote-tracking branch 'origin/feature/S3C-3771_add_client_tls_support' into w/8.1/feature/S3C-3771_add_client_tls_support 2021-01-05 09:54:52 -08:00
bert-e c8a36e45bb Merge branch 'bugfix/S3C-3767_fix_internal_tls_config' into tmp/octopus/w/8.1/bugfix/S3C-3767_fix_internal_tls_config 2020-12-30 23:09:19 +00:00
bert-e 97784eaa70 Merge branch 'bugfix/S3C-3763_batch_ingestion_of_shards' into tmp/octopus/w/8.1/bugfix/S3C-3763_batch_ingestion_of_shards 2020-12-28 19:22:40 +00:00
bert-e cd88e9f4bf Merge branches 'w/8.1/bugfix/S3C-3505_support_bucket_option_in_v1_reindex' and 'q/853/7.9/bugfix/S3C-3505_support_bucket_option_in_v1_reindex' into tmp/octopus/q/8.1 2020-12-15 19:06:43 +00:00
bert-e c784f09bc0 Merge branch 'bugfix/S3C-3505_support_bucket_option_in_v1_reindex' into tmp/octopus/w/8.1/bugfix/S3C-3505_support_bucket_option_in_v1_reindex 2020-12-15 18:51:54 +00:00
bert-e d61f9997c8 Merge branch 'bugfix/S3C-3680_fix_mpu_edgecase_in_migration' into tmp/octopus/w/8.1/bugfix/S3C-3680_fix_mpu_edgecase_in_migration 2020-12-11 21:58:07 +00:00
bert-e d9555e0038 Merge branches 'w/8.1/bugfix/S3C-3696_bump_default_java_max_heap' and 'q/849/7.9/bugfix/S3C-3696_bump_default_java_max_heap' into tmp/octopus/q/8.1 2020-12-11 21:41:28 +00:00
bert-e e5a814aa13 Merge branch 'bugfix/S3C-3696_bump_default_java_max_heap' into tmp/octopus/w/8.1/bugfix/S3C-3696_bump_default_java_max_heap 2020-12-11 20:15:43 +00:00
bert-e 1ed1b901c2 Merge branches 'w/8.1/bugfix/S3C-3689_fix_incorrect_Date.now' and 'q/845/7.9/bugfix/S3C-3689_fix_incorrect_Date.now' into tmp/octopus/q/8.1 2020-12-11 20:08:14 +00:00
bert-e 04a8021fe5 Merge branches 'w/8.1/bugfix/S3C-3679_cleanup_closed_redis_client' and 'q/841/7.9/bugfix/S3C-3679_cleanup_closed_redis_client' into tmp/octopus/q/8.1 2020-12-11 19:53:54 +00:00
bert-e d80bd66387 Merge branch 'bugfix/S3C-3679_cleanup_closed_redis_client' into tmp/octopus/w/8.1/bugfix/S3C-3679_cleanup_closed_redis_client 2020-12-11 19:49:20 +00:00
bert-e 4678fdae05 Merge branch 'bugfix/S3C-3689_fix_incorrect_Date.now' into tmp/octopus/w/8.1/bugfix/S3C-3689_fix_incorrect_Date.now 2020-12-08 19:46:35 +00:00
bert-e 3f7ea3e121 Merge branch 'w/7.9/bugfix/S3C-3553_improve_redis_reconnection_logic' into tmp/octopus/w/8.1/bugfix/S3C-3553_improve_redis_reconnection_logic 2020-11-18 23:53:16 +00:00
bert-e f136d7d994 Merge branches 'w/8.1/bugfix/S3C-3516_fix_authz_for_account_lvl_metrics' and 'q/823/7.9/bugfix/S3C-3516_fix_authz_for_account_lvl_metrics' into tmp/octopus/q/8.1 2020-11-12 18:16:20 +00:00
bert-e a1e6c4d11a Merge branches 'w/8.1/improvement/S3C-3520_drop_hex_encoding_for_warp10_data' and 'q/828/7.9/improvement/S3C-3520_drop_hex_encoding_for_warp10_data' into tmp/octopus/q/8.1 2020-11-11 23:50:04 +00:00
bert-e 97014cf67b Merge branch 'bugfix/S3C-3516_fix_authz_for_account_lvl_metrics' into tmp/octopus/w/8.1/bugfix/S3C-3516_fix_authz_for_account_lvl_metrics 2020-11-11 23:49:42 +00:00
bert-e c8ac4cf688 Merge branch 'improvement/S3C-3520_drop_hex_encoding_for_warp10_data' into tmp/octopus/w/8.1/improvement/S3C-3520_drop_hex_encoding_for_warp10_data 2020-11-11 22:08:42 +00:00
bert-e 4554828a52 Merge branch 'bugfix/S3C-3514_add_missing_logline' into tmp/octopus/w/8.1/bugfix/S3C-3514_add_missing_logline 2020-11-09 23:46:53 +00:00
bert-e 4caa7f5641 Merge branch 'w/7.9/feature/S3C-3484_extend_warp10_image' into tmp/octopus/w/8.1/feature/S3C-3484_extend_warp10_image 2020-10-28 22:06:32 +00:00
bert-e 92ffbcc3d7 Merge branch 'w/7.9/feature/S3C-3484_extend_warp10_image' into tmp/octopus/w/8.1/feature/S3C-3484_extend_warp10_image 2020-10-28 19:10:21 +00:00
bert-e 1197733b17 Merge branch 'w/7.9/bugfix/S3C-3485_fix_listMetrics_handler' into tmp/octopus/w/8.1/bugfix/S3C-3485_fix_listMetrics_handler 2020-10-27 23:39:12 +00:00
bert-e 78bc6290b2 Merge branch 'w/7.9/bugfix/S3C-3483_adjust_default_schedules' into tmp/octopus/w/8.1/bugfix/S3C-3483_adjust_default_schedules 2020-10-27 20:21:53 +00:00
bert-e 124744b562 Merge branches 'w/8.1/bugfix/S3C-3426_fix_user_creds_support' and 'q/785/7.9/bugfix/S3C-3426_fix_user_creds_support' into tmp/octopus/q/8.1 2020-10-23 05:47:28 +00:00
bert-e 4a806da678 Merge branches 'w/8.1/bugfix/S3C-3446_convert_account_to_canonical_id' and 'q/794/7.9/bugfix/S3C-3446_convert_account_to_canonical_id' into tmp/octopus/q/8.1 2020-10-23 05:33:40 +00:00
bert-e 3f4f34976c Merge branch 'w/7.9/bugfix/S3C-3446_convert_account_to_canonical_id' into tmp/octopus/w/8.1/bugfix/S3C-3446_convert_account_to_canonical_id 2020-10-23 00:37:32 +00:00
bert-e 74b8c91244 Merge branches 'w/8.1/feature/S3C-3010_add_migrate_task' and 'q/769/7.9/feature/S3C-3010_add_migrate_task' into tmp/octopus/q/8.1 2020-10-23 00:23:34 +00:00
bert-e df4e96132c Merge branch 'w/7.9/feature/S3C-3010_add_migrate_task' into tmp/octopus/w/8.1/feature/S3C-3010_add_migrate_task 2020-10-21 23:45:37 +00:00
bert-e ffe3ece284 Merge branch 'w/7.9/bugfix/S3C-3426_fix_user_creds_support' into tmp/octopus/w/8.1/bugfix/S3C-3426_fix_user_creds_support 2020-10-21 23:42:51 +00:00
bert-e d6d53eed8a Merge branch 'w/7.9/bugfix/S3C-3447_switch_to_node_schedule' into tmp/octopus/w/8.1/bugfix/S3C-3447_switch_to_node_schedule 2020-10-21 21:15:29 +00:00
bert-e e3a6844fc5 Merge branch 'w/7.9/bugfix/S3C-3438_add_missing_reindex_schema' into tmp/octopus/w/8.1/bugfix/S3C-3438_add_missing_reindex_schema 2020-10-19 22:30:06 +00:00
bert-e 58c73db7c3 Merge branch 'w/7.9/bugfix/S3C-2576-update-vaultclient' into tmp/octopus/w/8.1/bugfix/S3C-2576-update-vaultclient 2020-10-13 14:48:06 +00:00
bert-e 9a7ea1e564 Merge branch 'w/7.9/bugfix/S3C-3322_bump_vaultclient' into tmp/octopus/w/8.1/bugfix/S3C-3322_bump_vaultclient 2020-10-12 20:56:10 +00:00
bert-e 3a6e0a4c40 Merge branch 'w/7.9/bugfix/S3C-3424_remove_creds_from_client' into tmp/octopus/w/8.1/bugfix/S3C-3424_remove_creds_from_client 2020-10-08 23:17:10 +00:00
bert-e 62498fa330 Merge branches 'w/8.1/feature/S3C-3423_add_client_ip_limiting_middleware' and 'q/741/7.9/feature/S3C-3423_add_client_ip_limiting_middleware' into tmp/octopus/q/8.1 2020-10-08 20:54:35 +00:00
bert-e e9a252f3c4 Merge branch 'w/7.9/feature/S3C-3423_add_client_ip_limiting_middleware' into tmp/octopus/w/8.1/feature/S3C-3423_add_client_ip_limiting_middleware 2020-10-08 20:50:22 +00:00
bert-e 1b1d1ce35c Merge branch 'w/7.9/feature/S3C-3418_change_on_wire_ingestion_format_json' into tmp/octopus/w/8.1/feature/S3C-3418_change_on_wire_ingestion_format_json 2020-10-07 22:32:37 +00:00
bert-e 0b308adf07 Merge branches 'w/8.1/bugfix/S3C-3307_handle_deleted_buckets_in_reindex_port' and 'q/726/7.9/bugfix/S3C-3307_handle_deleted_buckets_in_reindex_port' into tmp/octopus/q/8.1 2020-10-06 22:55:41 +00:00
bert-e 73735f0f27 Merge branch 'w/7.9/bugfix/S3C-3307_handle_deleted_buckets_in_reindex_port' into tmp/octopus/w/8.1/bugfix/S3C-3307_handle_deleted_buckets_in_reindex_port 2020-10-06 21:19:03 +00:00
bert-e 7b1ed984e8 Merge branches 'w/8.1/bugfix/S3C-3307_handle_deleted_buckets_in_reindex' and 'q/695/7.9/bugfix/S3C-3307_handle_deleted_buckets_in_reindex' into tmp/octopus/q/8.1 2020-10-06 19:08:10 +00:00
bert-e 0adb775a1c Merge branch 'w/7.9/bugfix/S3C-3308_fix_mpuShadowBucket_counters_for_v2' into tmp/octopus/w/8.1/bugfix/S3C-3308_fix_mpuShadowBucket_counters_for_v2 2020-10-05 17:42:07 +00:00
bert-e a64b25c26e Merge branch 'w/7.9/bugfix/S3C-3307_handle_deleted_buckets_in_reindex' into tmp/octopus/w/8.1/bugfix/S3C-3307_handle_deleted_buckets_in_reindex 2020-10-05 17:36:57 +00:00
bert-e 392636df76 Merge branch 'w/7.8/bugfix/S3C-3376_remove_requirement_for_access_key' into tmp/octopus/w/8.1/bugfix/S3C-3376_remove_requirement_for_access_key 2020-10-01 18:14:21 +00:00
bert-e 32920a5a97 Merge branches 'w/8.1/feature/S3C-3382_add_configuration_for_ingestion_speed' and 'q/698/7.8/feature/S3C-3382_add_configuration_for_ingestion_speed' into tmp/octopus/q/8.1 2020-10-01 17:25:48 +00:00
bert-e b875c72451 Merge branches 'w/8.1/bugfix/S3C-3361_bump_v2_toggle_timeout' and 'q/702/7.8/bugfix/S3C-3361_bump_v2_toggle_timeout' into tmp/octopus/q/8.1 2020-09-30 00:11:03 +00:00
bert-e abbbad9f5f Merge branch 'bugfix/S3C-3361_bump_v2_toggle_timeout' into tmp/octopus/w/8.1/bugfix/S3C-3361_bump_v2_toggle_timeout 2020-09-29 21:43:20 +00:00
bert-e 4028b265f3 Merge branch 'feature/S3C-3382_add_configuration_for_ingestion_speed' into tmp/octopus/w/8.1/feature/S3C-3382_add_configuration_for_ingestion_speed 2020-09-29 21:33:15 +00:00
bert-e 6051cada33 Merge branch 'w/7.8/bugfix/S3C-3308_fix_mpuShadowBucket_counters' into tmp/octopus/w/8.1/bugfix/S3C-3308_fix_mpuShadowBucket_counters 2020-09-28 20:05:41 +00:00
bert-e dd1ef6860e Merge branch 'bugfix/S3C-3363_add_missing_logger_for_replay' into tmp/octopus/w/8.1/bugfix/S3C-3363_add_missing_logger_for_replay 2020-09-24 20:54:47 +00:00
bert-e 33c1af303d Merge branches 'w/8.1/bugfix/S3C-3362_fix_reindex_default_schedule' and 'q/683/7.8/bugfix/S3C-3362_fix_reindex_default_schedule' into tmp/octopus/q/8.1 2020-09-24 02:53:34 +00:00
bert-e 3d79444672 Merge branches 'w/8.1/feature/S3C-3324_redis_backed_routes' and 'q/668/7.8/feature/S3C-3324_redis_backed_routes' into tmp/octopus/q/8.1 2020-09-24 02:53:08 +00:00
bert-e 1bd45ffd68 Merge branch 'bugfix/S3C-3362_fix_reindex_default_schedule' into tmp/octopus/w/8.1/bugfix/S3C-3362_fix_reindex_default_schedule 2020-09-24 01:10:25 +00:00
bert-e f19018f9e7 Merge branch 'feature/S3C-3324_redis_backed_routes' into tmp/octopus/w/8.1/feature/S3C-3324_redis_backed_routes 2020-09-24 00:48:00 +00:00
bert-e 87658b4351 Merge branches 'w/8.1/bugfix/S3C-1997_fix_ioredis_failover' and 'q/671/7.8/bugfix/S3C-1997_fix_ioredis_failover' into tmp/octopus/q/8.1 2020-09-24 00:07:29 +00:00
bert-e 91eed09651 Merge branch 'bugfix/S3C-1997_fix_ioredis_failover' into tmp/octopus/w/8.1/bugfix/S3C-1997_fix_ioredis_failover 2020-09-24 00:03:21 +00:00
bert-e 728d54501b Merge branches 'w/8.1/bugfix/S3C-3358_fix_metrics_resp_for_single_resource' and 'q/674/7.8/bugfix/S3C-3358_fix_metrics_resp_for_single_resource' into tmp/octopus/q/8.1 2020-09-23 18:20:27 +00:00
bert-e 911cfb3c36 Merge branch 'bugfix/S3C-3358_fix_metrics_resp_for_single_resource' into tmp/octopus/w/8.1/bugfix/S3C-3358_fix_metrics_resp_for_single_resource 2020-09-23 18:06:30 +00:00
bert-e 5d7ed8520f Merge branches 'w/8.1/feature/S3C-3351_cache_node_deps_backport' and 'q/659/7.8/feature/S3C-3351_cache_node_deps_backport' into tmp/octopus/q/8.1 2020-09-23 17:23:14 +00:00
Taylor McKinnon dd118e4a44 Merge remote-tracking branch 'origin/feature/S3C-3286_add_reindex_task' into w/8.1/feature/S3C-3286_add_reindex_task 2020-09-17 11:57:37 -07:00
bert-e 3076eaf115 Merge branch 'w/7.8/feature/S3C-3351_cache_node_deps_backport' into tmp/octopus/w/8.1/feature/S3C-3351_cache_node_deps_backport 2020-09-17 18:47:55 +00:00
bert-e fcd74b5707 Merge branches 'w/8.1/feature/S3C-3351_cache_node_deps' and 'q/656/7.8/feature/S3C-3351_cache_node_deps' into tmp/octopus/q/8.1 2020-09-17 17:20:16 +00:00
bert-e ad52015f73 Merge branch 'feature/S3C-3351_cache_node_deps' into tmp/octopus/w/8.1/feature/S3C-3351_cache_node_deps 2020-09-17 17:11:39 +00:00
bert-e 60d0dc794d Merge branches 'w/8.1/feature/S3C-3324_add_counter_backend_to_cache' and 'q/651/7.8/feature/S3C-3324_add_counter_backend_to_cache' into tmp/octopus/q/8.1 2020-09-16 17:21:13 +00:00
bert-e 14db2c93ce Merge branch 'feature/S3C-3324_add_counter_backend_to_cache' into tmp/octopus/w/8.1/feature/S3C-3324_add_counter_backend_to_cache 2020-09-11 22:28:44 +00:00
bert-e 4d4906dc94 Merge branch 'improvement/S3C-3325-support-bucket-notification-apis-utapi' into tmp/octopus/w/8.1/improvement/S3C-3325-support-bucket-notification-apis-utapi 2020-09-09 18:59:27 +00:00
bert-e 7a9ce9bf2c Merge branches 'w/8.1/feature/S3C-3269_add_tls_support' and 'q/642/7.8/feature/S3C-3269_add_tls_support' into tmp/octopus/q/8.1 2020-09-04 22:24:54 +00:00
bert-e d203a94367 Merge branch 'feature/S3C-3269_add_tls_support' into tmp/octopus/w/8.1/feature/S3C-3269_add_tls_support 2020-09-04 21:52:16 +00:00
Taylor McKinnon bea0a4607a Merge remote-tracking branch 'origin/bugfix/S3C-3087_bump_version' into w/8.1/bugfix/S3C-3087_bump_version 2020-08-24 12:05:39 -07:00
bert-e 047aa4f8eb Merge branch 'feature/S3C-3265_warp10_failover' into tmp/octopus/w/8.1/feature/S3C-3265_warp10_failover 2020-08-15 02:37:51 +00:00
bert-e 4aae68208c Merge branch 'bugfix/S3C-3264_fix_sentinel_parsing' into tmp/octopus/w/8.1/bugfix/S3C-3264_fix_sentinel_parsing 2020-08-14 18:46:21 +00:00
bert-e 0b7781d3b8 Merge branches 'w/8.1/bugfix/S3C-3255_ensure_ingest_timestamp_precision' and 'q/624/7.8/bugfix/S3C-3255_ensure_ingest_timestamp_precision' into tmp/octopus/q/8.1 2020-08-13 21:46:34 +00:00
bert-e 2a2f818745 Merge branch 'bugfix/S3C-3255_ensure_ingest_timestamp_precision' into tmp/octopus/w/8.1/bugfix/S3C-3255_ensure_ingest_timestamp_precision 2020-08-13 20:57:20 +00:00
bert-e 8064180984 Merge branch 'feature/S3C-3003_add_repair_process' into tmp/octopus/w/8.1/feature/S3C-3003_add_repair_process 2020-08-13 19:55:43 +00:00
bert-e 6e096c9d39 Merge branches 'w/8.1/bugfix/S3C-3242_rework_redis_config' and 'q/614/7.8/bugfix/S3C-3242_rework_redis_config' into tmp/octopus/q/8.1 2020-08-11 17:30:55 +00:00
bert-e 86285d1e45 Merge branches 'w/8.1/bugfix/S3C-3240_fix_tasks_exports' and 'q/611/7.8/bugfix/S3C-3240_fix_tasks_exports' into tmp/octopus/q/8.1 2020-08-10 23:43:30 +00:00
bert-e 8ea9a5dc0a Merge branches 'w/8.1/bugfix/S3C-3243_fix_warp10_token_config' and 'q/609/7.8/bugfix/S3C-3243_fix_warp10_token_config' into tmp/octopus/q/8.1 2020-08-10 23:29:27 +00:00
bert-e 390f1bb3c1 Merge branch 'w/7.8/bugfix/S3C-3240_fix_tasks_exports' into tmp/octopus/w/8.1/bugfix/S3C-3240_fix_tasks_exports 2020-08-10 23:23:15 +00:00
bert-e 3b2fc1b045 Merge branch 'w/7.8/bugfix/S3C-3242_rework_redis_config' into tmp/octopus/w/8.1/bugfix/S3C-3242_rework_redis_config 2020-08-10 23:18:17 +00:00
bert-e 58f20049f3 Merge branch 'bugfix/S3C-3243_fix_warp10_token_config' into tmp/octopus/w/8.1/bugfix/S3C-3243_fix_warp10_token_config 2020-08-10 23:13:09 +00:00
bert-e db05aaf2a3 Merge branch 'bugfix/S3C-3241_remove_warp10_workaround' into tmp/octopus/w/8.1/bugfix/S3C-3241_remove_warp10_workaround 2020-08-10 23:11:48 +00:00
bert-e 16c43c202b Merge branch 'feature/S3C-3235_fix_getMetricsAt_with_no_data_81' into q/8.1 2020-08-07 19:08:16 +00:00
bert-e 90beac2fa7 Merge branch 'w/7.8/feature/S3C-3230_add_authv4_support' into tmp/octopus/w/8.1/feature/S3C-3230_add_authv4_support 2020-08-05 23:03:09 +00:00
bert-e 14446a10c2 Merge branch 'w/7.8/bugfix/S3C-3235_fix_getMetricsAt_with_no_data' into tmp/octopus/w/8.1/bugfix/S3C-3235_fix_getMetricsAt_with_no_data 2020-08-04 20:04:49 +00:00
bert-e 8e1417ad6b Merge branch 'w/7.8/feature/S3C-3020_add_functional_test_for_snapshot_task' into tmp/octopus/w/8.1/feature/S3C-3020_add_functional_test_for_snapshot_task 2020-08-03 21:49:18 +00:00
bert-e ae29a7d346 Merge branch 'w/7.8/feature/S3C-3020_add_functional_test_for_checkpoint_task' into tmp/octopus/w/8.1/feature/S3C-3020_add_functional_test_for_checkpoint_task 2020-08-03 21:42:48 +00:00
bert-e 646f921ded Merge branch 'w/7.8/feature/S3C-3020_add_functional_test_ingest_task' into tmp/octopus/w/8.1/feature/S3C-3020_add_functional_test_ingest_task 2020-08-03 19:39:54 +00:00
bert-e 31ff2aa63b Merge branch 'w/7.8/feature/S3C-3007_Add_listMetrics_handler' into tmp/octopus/w/8.1/feature/S3C-3007_Add_listMetrics_handler 2020-08-03 19:30:40 +00:00
bert-e 37f6b4ddc5 Merge branch 'w/7.8/feature/S3C-3132-utapi-v2-push-metric' into tmp/octopus/w/8.1/feature/S3C-3132-utapi-v2-push-metric 2020-07-31 20:37:59 +00:00
bert-e d043d8dcae Merge branch 'w/7.8/feature/S3C-3006_add_metric_calculation_macro' into tmp/octopus/w/8.1/feature/S3C-3006_add_metric_calculation_macro 2020-07-29 21:52:13 +00:00
bert-e 6dbc500fa9 Merge branch 'w/7.8/feature/S3C-3196-update-node' into tmp/octopus/w/8.1/feature/S3C-3196-update-node 2020-07-27 21:55:34 +00:00
bert-e 8e1550d61a Merge branch 'w/7.8/feature/S3C-3196-update-node' into tmp/octopus/w/8.1/feature/S3C-3196-update-node 2020-07-27 07:40:11 +00:00
bert-e 4a811d7e86 Merge branch 'w/7.8/feature/S3C-3196-update-node' into tmp/octopus/w/8.1/feature/S3C-3196-update-node 2020-07-25 03:28:19 +00:00
bert-e 1225f45805 Merge branch 'w/7.8/feature/S3C-3020_add_lag_flag_to_task' into tmp/octopus/w/8.1/feature/S3C-3020_add_lag_flag_to_task 2020-07-20 21:40:30 +00:00
bert-e 81e5c9e98c Merge branch 'w/7.8/feature/S3C-3020_add_snapshot_creation_task' into tmp/octopus/w/8.1/feature/S3C-3020_add_snapshot_creation_task 2020-07-20 20:21:06 +00:00
bert-e cdb9ef06d1 Merge branch 'w/7.8/feature/S3C-3002_add_checkpoint_creation_task' into tmp/octopus/w/8.1/feature/S3C-3002_add_checkpoint_creation_task 2020-07-20 19:21:38 +00:00
bert-e 186807e798 Merge branch 'w/7.8/feature/S3C-3001_add_redis_to_warp10_task' into tmp/octopus/w/8.1/feature/S3C-3001_add_redis_to_warp10_task 2020-07-16 17:21:16 +00:00
bert-e 25ffbe3bbc Merge branch 'w/7.8/feature/S3C-3001_Add_shard_tracking_to_redis_backend' into tmp/octopus/w/8.1/feature/S3C-3001_Add_shard_tracking_to_redis_backend 2020-07-13 19:06:58 +00:00
bert-e 77f8fc4b11 Merge branch 'w/7.8/feature/S3C-3001_usec_resolution_for_shards' into tmp/octopus/w/8.1/feature/S3C-3001_usec_resolution_for_shards 2020-07-13 00:25:33 +00:00
bert-e a02fd4830c Merge branch 'w/7.8/feature/S3C-3005_add_ingest_api' into tmp/octopus/w/8.1/feature/S3C-3005_add_ingest_api 2020-07-07 17:54:27 +00:00
bert-e aa314c5ed9 Merge branch 'w/7.8/feature/S3C-3008_Add_cloudserver_client' into tmp/octopus/w/8.1/feature/S3C-3008_Add_cloudserver_client 2020-07-01 18:58:57 +00:00
bert-e 51858fe41a Merge branch 'w/7.8/feature/S3C-3004_Add_http_server' into tmp/octopus/w/8.1/feature/S3C-3004_Add_http_server 2020-06-29 19:45:46 +00:00
Taylor McKinnon 70a79537fe Merge remote-tracking branch 'origin/w/7.8/feature/S3C-3004_Add_http_server' into w/8.1/feature/S3C-3004_Add_http_server 2020-06-28 21:10:12 -07:00
bert-e b335675a36 Merge branch 'w/7.8/feature/S3C-3004_Add_process_absraction' into tmp/octopus/w/8.1/feature/S3C-3004_Add_process_absraction 2020-06-19 19:36:52 +00:00
bert-e 707620acf7 Merge branch 'w/7.8/feature/S3C-3004_Add_config_loading' into tmp/octopus/w/8.1/feature/S3C-3004_Add_config_loading 2020-06-18 23:47:50 +00:00
bert-e 30306f3dce Merge branch 'w/7.8/feature/S3C-3004_Add_stub_openapi' into tmp/octopus/w/8.1/feature/S3C-3004_Add_stub_openapi 2020-06-18 22:26:29 +00:00
bert-e d82623014d Merge branch 'w/7.8/feature/S3C-3000_Add_warp10_client' into tmp/octopus/w/8.1/feature/S3C-3000_Add_warp10_client 2020-06-18 20:31:03 +00:00
bert-e 1c5c011699 Merge branch 'w/7.8/feature/S3C-2999_add_redis_cache_client' into tmp/octopus/w/8.1/feature/S3C-2999_add_redis_cache_client 2020-06-18 20:26:15 +00:00
bert-e 282a55c724 Merge branch 'w/7.8/feature/S3C-2960-object-lock-metrics' into tmp/octopus/w/8.1/feature/S3C-2960-object-lock-metrics 2020-06-16 17:33:22 +00:00
Taylor McKinnon 90c8f49222 Merge remote-tracking branch 'origin/w/7.8/feature/S3C-3041_Add_v2_toggle' into w/8.1/feature/S3C-3041_Add_v2_toggle 2020-06-10 17:40:37 -07:00
Taylor McKinnon 2d0c104cc2 Merge remote-tracking branch 'origin/w/7.8/bugfix/S3C-3043_bump_scality_guidelines' into w/8.1/bugfix/S3C-3043_bump_scality_guidelines 2020-06-10 15:10:04 -07:00
bert-e 72bda734bf Merge branch 'w/7.8/bugfix/S3C-3023_bump_warp10_version' into tmp/octopus/w/8.1/bugfix/S3C-3023_bump_warp10_version 2020-06-02 23:09:36 +00:00
bert-e a2e8fe51b4 Merge branch 'feature/S3C-2878_Add_warp10_Dockerfile' into tmp/octopus/w/8.1/feature/S3C-2878_Add_warp10_Dockerfile 2020-05-26 19:49:42 +00:00
bert-e b494f1e85c Merge branch 'w/7.7/bugfix/S3C-2408/mpu-overwrite' into tmp/octopus/w/8.1/bugfix/S3C-2408/mpu-overwrite 2020-04-22 02:20:17 +00:00
bert-e af1a01b692 Merge branch 'w/8.0/improvement/S3C-2808-clean-upstream' into tmp/octopus/w/8.1/improvement/S3C-2808-clean-upstream 2020-04-21 23:59:47 +00:00
Rahul Padigela 1f0f7d91ff Merge remote-tracking branch 'origin/w/7.7/improvement/S3C-2808-clean-upstream' into w/8.0/improvement/S3C-2808-clean-upstream 2020-04-21 16:59:04 -07:00
Taylor McKinnon 5c6386e33d Merge remote-tracking branch 'origin/w/8.0/bugfix/S3C-2603_Update_utapi_reindex' into w/8.1/bugfix/S3C-2603_Update_utapi_reindex 2020-04-06 12:34:17 -07:00
Taylor McKinnon 0bf2a533f5 Merge remote-tracking branch 'origin/w/7.7/bugfix/S3C-2603_Update_utapi_reindex' into w/8.0/bugfix/S3C-2603_Update_utapi_reindex 2020-04-06 12:33:13 -07:00
bert-e c59323952d Merge branch 'w/8.0/bugfix/S3C-2604-listMultipleBucketMetrics' into tmp/octopus/w/8.1/bugfix/S3C-2604-listMultipleBucketMetrics 2020-02-26 09:30:18 +00:00
bert-e b05d8c5528 Merge branch 'w/7.7/bugfix/S3C-2604-listMultipleBucketMetrics' into tmp/octopus/w/8.0/bugfix/S3C-2604-listMultipleBucketMetrics 2020-02-26 09:30:17 +00:00
bert-e a5430ba8a8 Merge branch 'w/8.0/bugfix/S3C-2604-list-multiple-bucket-metrics' into tmp/octopus/w/8.1/bugfix/S3C-2604-list-multiple-bucket-metrics 2020-02-25 19:24:23 +00:00
bert-e cc0087c3ba Merge branch 'w/7.7/bugfix/S3C-2604-list-multiple-bucket-metrics' into tmp/octopus/w/8.0/bugfix/S3C-2604-list-multiple-bucket-metrics 2020-02-25 19:24:23 +00:00
bert-e 6a45a13ab4 Merge branch 'w/8.0/bugfix/S3C-2475/utapi_response_correction' into tmp/octopus/w/8.1/bugfix/S3C-2475/utapi_response_correction 2020-02-05 19:34:07 +00:00
bert-e 3dd760835f Merge branch 'w/7.7/bugfix/S3C-2475/utapi_response_correction' into tmp/octopus/w/8.0/bugfix/S3C-2475/utapi_response_correction 2020-02-05 19:34:07 +00:00
Flavien Lebarbé 2720bdb096 Merge branch 'development/8.0' into development/8.1 2019-11-11 19:15:36 +01:00
Flavien Lebarbé 54390f82ba Merge branch 'development/7.6' into development/8.0 2019-11-11 19:15:09 +01:00
Katherine Laue 5ccb8d03be Update yarn.lock 2019-09-11 15:41:50 -07:00
Katherine Laue 9fdad30ca0 improvement/S3C-2365 update vaultclient dep 2019-09-11 15:41:27 -07:00
Katherine Laue 3180aa2d02 update yarn.lock 2019-09-11 15:40:50 -07:00
Katherine Laue 60e4ed7880 remove yarn.lock 2019-09-11 15:40:50 -07:00
Katherine Laue d77d15c7dd update yarn.lock 2019-09-11 15:40:50 -07:00
Katherine Laue dc34912298 improvement/S3C-2364 install yarn frozen lockfile 2019-09-11 15:40:50 -07:00
Katherine Laue 4a845e80cd improvement/S3C-2364 migrate package manager to yarn 2019-09-11 15:40:50 -07:00
bbuchanan9 b56405f031 improvement S3C-234 Operation counters config 2019-09-11 15:40:50 -07:00
bbuchanan9 4d6fd39693 bugfix: S3C-2342 BucketD listing functional tests 2019-09-11 15:38:11 -07:00
bbuchanan9 b3a3383289 bugfix: S3C-2317 Add uuid module as a dependency 2019-09-11 15:38:11 -07:00
bbuchanan9 196acf9fc8 bugfix: S3C-2342 Add bucket listing pagination 2019-09-11 15:38:11 -07:00
bbuchanan9 347ac8faf1 bugfix: S3C-2315 Support versioning with reindex 2019-09-11 15:38:11 -07:00
bbuchanan9 a62c22f06d improvement: S3C-2337 Parallelize tests 2019-09-11 15:38:11 -07:00
bbuchanan9 d65b9a65ee bugfix: S3C-2317 Append UUID to sorted set members 2019-09-11 15:38:11 -07:00
bert-e 9533009100 Merge branch 'w/7.5/improvement/S3C-2337-parallelize-tests' into tmp/octopus/w/8.0/improvement/S3C-2337-parallelize-tests 2019-07-19 17:12:55 +00:00
bert-e d336997813 Merge branch 'w/7.5/bugfix/S3C-2317/use-uuid' into tmp/octopus/w/8.0/bugfix/S3C-2317/use-uuid 2019-07-19 01:22:33 +00:00
Katherine Laue 166d2c06cf Merge remote-tracking branch 'origin/w/7.5/improvement/S3C-2332-update-vaultclient' into w/8.0/improvement/S3C-2332-update-vaultclient 2019-07-16 13:52:39 -07:00
bbuchanan9 9042956610 improvement: S3C-2314 Update Scality dependencies 2019-07-15 13:54:34 -07:00
bert-e 4f754e26f9 Merge branch 'improvement/S3C-2314/update-scality-dependencies' into tmp/octopus/w/8.0/improvement/S3C-2314/update-scality-dependencies 2019-07-15 20:29:07 +00:00
bbuchanan9 dfb7a83b2a Merge remote-tracking branch 'origin/w/7.5/bugfix/S3C-2322/incorrect-expire-TTL-config-field' into w/8.0/bugfix/S3C-2322/incorrect-expire-TTL-config-field 2019-07-12 17:22:14 -07:00
Katherine Laue e8ac66ff09 Merge remote-tracking branch 'origin/w/7.5/improvement/S3C-2290-upgrade-nodejs' into w/8.0/improvement/S3C-2290-upgrade-nodejs 2019-07-08 14:39:56 -07:00
bert-e 1919808c09 Merge branch 'w/7.5/feature/S3C-2273/maintenance-testing-for-utapi-reindexer' into tmp/octopus/w/8.0/feature/S3C-2273/maintenance-testing-for-utapi-reindexer 2019-06-25 20:24:37 +00:00
bert-e 46f62388cd Merge branch 'w/7.5/feature/S3C-2260-maintenance-testing-for-utapi-reindexer' into tmp/octopus/w/8.0/feature/S3C-2260-maintenance-testing-for-utapi-reindexer 2019-06-19 17:58:13 +00:00
bert-e 894f37750f Merge branch 'w/7.5/bugfix/S3C-2019-reindex-script-redis-authentication' into tmp/octopus/w/8.0/bugfix/S3C-2019-reindex-script-redis-authentication 2019-06-07 04:28:53 +00:00
bert-e a990c743af Merge branch 'w/7.5/bugfix/S3C-2019-redis-sentinel-password' into tmp/octopus/w/8.0/bugfix/S3C-2019-redis-sentinel-password 2019-06-06 05:34:54 +00:00
bert-e 3a3083c379 Merge branch 'w/7.5/bugfix/S3C-2076/update-default-reindex-schedule' into tmp/octopus/w/8.0/bugfix/S3C-2076/update-default-reindex-schedule 2019-06-05 18:48:28 +00:00
bert-e 39b4b8b623 Merge branch 'w/7.5/bugfix/S3C-2076/add-utapi-reindex' into tmp/octopus/w/8.0/bugfix/S3C-2076/add-utapi-reindex 2019-06-05 04:45:30 +00:00
bert-e c5165a0338 Merge branches 'w/8.0/improvement/S3C-2034-bump-ioredis-version' and 'q/249/7.5/improvement/S3C-2034-bump-ioredis-version' into tmp/octopus/q/8.0 2019-05-21 17:38:37 +00:00
bert-e ef56d39193 Merge branch 'w/7.5/improvement/S3C-2034-bump-ioredis-version' into tmp/octopus/w/8.0/improvement/S3C-2034-bump-ioredis-version 2019-05-20 21:49:40 +00:00
bert-e da7144389d Merge branch 'w/7.5/bugfix/S3C-2195/upload-copy-part-metrics' into tmp/octopus/w/8.0/bugfix/S3C-2195/upload-copy-part-metrics 2019-05-20 20:44:58 +00:00
bert-e d2020f8190 Merge branches 'w/8.0/bugfix/S3C-2155/allow-range-into-the-future' and 'q/242/7.5/bugfix/S3C-2155/allow-range-into-the-future' into tmp/octopus/q/8.0 2019-05-10 00:03:41 +00:00
bert-e 27ef9dfa33 Merge branch 'w/7.5/bugfix/S3C-2155/allow-range-into-the-future' into tmp/octopus/w/8.0/bugfix/S3C-2155/allow-range-into-the-future 2019-05-09 23:17:31 +00:00
bert-e fae26f0933 Merge branches 'w/8.0/bugfix/S3C-2105/push-backbeat-metrics' and 'q/236/7.5/bugfix/S3C-2105/push-backbeat-metrics' into tmp/octopus/q/8.0 2019-05-09 23:09:10 +00:00
bert-e 270591bf23 Merge branch 'w/7.5/bugfix/S3C-1506/start-end-reducer-values' into tmp/octopus/w/8.0/bugfix/S3C-1506/start-end-reducer-values 2019-05-09 17:01:16 +00:00
bert-e 12fa8b567c Merge branch 'w/7.5/bugfix/S3C-2105/push-backbeat-metrics' into tmp/octopus/w/8.0/bugfix/S3C-2105/push-backbeat-metrics 2019-05-08 23:04:06 +00:00
bbuchanan9 fac88a209f Merge remote-tracking branch 'origin/w/7.5/bugfix/S3C-1506/add-long-range-request-ft-tests' into w/8.0/bugfix/S3C-1506/add-long-range-request-ft-tests 2019-05-02 15:34:43 -07:00
bert-e ef2c350724 Merge branch 'w/7.5/bugfix/S3C-2155/time-range-validation' into tmp/octopus/w/8.0/bugfix/S3C-2155/time-range-validation 2019-05-01 23:15:42 +00:00
bert-e 46bb81e9f8 Merge branch 'w/7.5/bugfix/S3C-2155/time-range-validation' into tmp/octopus/w/8.0/bugfix/S3C-2155/time-range-validation 2019-05-01 23:08:25 +00:00
bert-e 829369d37b Merge branch 'w/7.5/bugfix/S3C-1506/prevent-heap-memory-issue' into tmp/octopus/w/8.0/bugfix/S3C-1506/prevent-heap-memory-issue 2019-04-30 20:07:06 +00:00
bert-e b5def9cb54 Merge branch 'w/7.5/improvement/S3C-2140/do-not-track-dump.rbd' into tmp/octopus/w/8.0/improvement/S3C-2140/do-not-track-dump.rbd 2019-04-26 20:13:05 +00:00
bert-e 2b514a618e Merge branch 'w/7.5/feature/S3C-2133/add-eve-support' into tmp/octopus/w/8.0/feature/S3C-2133/add-eve-support 2019-04-26 17:25:50 +00:00
bbuchanan9 4f119ea917 documentation: S3C-2070 Update README
* Remove outdated CI badge
* Update links
* Update component name
* Fix typos
* Redefine CLI input fields
2019-04-04 14:58:35 -07:00
anurag4dsb 608fddb4bd
Merge remote-tracking branch 'origin/feature/S3C-1561-getStorageUsedForAccountQuotas' into w/8.0/feature/S3C-1561-getStorageUsedForAccountQuotas 2019-01-24 11:25:17 -08:00
Rahul Padigela f2f1d0c742 improvement: reply arsenal errors to the client
Without replying Arsenal style errors, the lib breaks the contract and causes an
exception on the cloudserver
2018-08-31 16:27:33 -07:00
Dora Korpar 6d0c8dd1c0 bf: ZENKO 676 - only location metrics 2018-07-06 13:17:05 -07:00
bert-e cd3324df87 Merge branch 'bugfix/dependencies' into tmp/octopus/w/8.0/bugfix/dependencies 2018-06-29 14:07:22 +00:00
David Pineau 4664ee3cca Merge remote-tracking branch 'origin/development/7.4' into development/8.0 2018-06-28 19:45:42 +02:00
David Pineau a00aa6f05f Merge remote-tracking branch 'origin/development/7.4' into development/8.0 2018-06-28 14:58:45 +02:00
bert-e 4b646285d2 Merge branch 'feature/ZENKO-142-location-quota-metric' into q/8.0 2018-06-27 17:27:55 +00:00
bert-e e77bcc8e72 Merge branch 'feature/S3C-1212-expire-metrics' into tmp/octopus/w/8.0/feature/S3C-1212-expire-metrics 2018-06-26 22:10:37 +00:00
Rahul Padigela e3511ee7ef Merge remote-tracking branch 'origin/development/7.4' into improvement/port-7.4 2018-06-26 14:55:42 -07:00
Dora Korpar fc634ee028 ft: ZENKO 142 Location quota metrics 2018-06-26 14:44:35 -07:00
Rahul Padigela 4c776b3eb5
Merge pull request #177 from scality/ft/ZENKO-465-utapi-docker-image
ft: ZENKO 465 Utapi docker image
2018-06-07 14:10:00 -07:00
Dora Korpar 33024215e3 ft: ZENKO 465 Utapi docker image 2018-06-07 10:37:20 -07:00
Dora Korpar 4965d96f5c
Merge pull request #175 from scality/ft/ZENKO-386-utapi-service-accounts
ft: ZENKO 386 zenko utapi integration
2018-06-04 14:10:32 -07:00
Dora Korpar 0bfd8a66fb ft: ZENKO 386 zenko utapi integration 2018-05-31 11:59:40 -07:00
Rahul Padigela a8a8ad42ff chore: update version and dependencies 2018-05-31 11:23:56 -07:00
Rahul Padigela 8e11d15893
Merge pull request #174 from scality/fwdport/7.4-beta-master
Fwdport/7.4 beta master
2018-04-23 00:15:16 -07:00
Rahul Padigela bf1cbe4bf4 Merge remote-tracking branch 'origin/rel/7.4-beta' into fwdport/7.4-beta-master 2018-04-23 00:12:38 -07:00
Rahul Padigela a4ab00ad92
Merge pull request #173 from scality/fwdport/7.4-7.4-beta
Fwdport/7.4 7.4 beta
2018-04-19 11:04:48 -07:00
Rahul Padigela 6c4e7aedce Merge remote-tracking branch 'origin/rel/7.4' into fwdport/7.4-7.4-beta 2018-04-19 11:01:53 -07:00
Stefano Maffulli b27c57bcfc
Merge pull request #172 from scality/FT/addIssueTemplate
FT: ZNC-26: add issue template
2018-04-12 15:45:21 -07:00
LaureVergeron 1fda068967 FT: ZNC-26: add issue template 2018-04-11 11:11:17 +02:00
Rahul Padigela 18bf5bb00e
Merge pull request #170 from scality/fwdport/7.4-beta-master
Fwdport/7.4 beta master
2018-03-27 15:33:52 -07:00
Alexander Chan 6de529b8b4 fix dependencies 2018-03-20 08:15:38 -07:00
Alexander Chan ec3efcb9af Merge remote-tracking branch 'origin/rel/7.4-beta' into fwdport/7.4-beta-master 2018-03-19 16:05:52 -07:00
Rahul Padigela d77f8cc46c ft: update version 2018-03-14 13:30:18 -07:00
Rahul Padigela 7487555957
Merge pull request #141 from scality/ft/add-example-python-request-script
FT: Add python request example
2018-03-05 11:13:03 -08:00
Bennett Buchanan 7fbddc071b FT: Add python request example 2018-03-05 11:10:56 -08:00
ironman-machine 6d708d54d0 merge #160 2018-02-09 14:14:25 +00:00
Rayene Ben Rayana 6ab610b27f ft: add eve ci support 2018-02-01 16:48:06 -08:00
63 changed files with 1437 additions and 5776 deletions

87
.github/ISSUE_TEMPLATE.md vendored Normal file
View File

@ -0,0 +1,87 @@
# General support information
GitHub Issues are **reserved** for actionable bug reports (including
documentation inaccuracies), and feature requests.
**All questions** (regarding configuration, usecases, performance, community,
events, setup and usage recommendations, among other things) should be asked on
the **[Zenko Forum](http://forum.zenko.io/)**.
> Questions opened as GitHub issues will systematically be closed, and moved to
> the [Zenko Forum](http://forum.zenko.io/).
--------------------------------------------------------------------------------
## Avoiding duplicates
When reporting a new issue/requesting a feature, make sure that we do not have
any duplicates already open:
- search the issue list for this repository (use the search bar, select
"Issues" on the left pane after searching);
- if there is a duplicate, please do not open your issue, and add a comment
to the existing issue instead.
--------------------------------------------------------------------------------
## Bug report information
(delete this section (everything between the lines) if you're not reporting a
bug but requesting a feature)
### Description
Briefly describe the problem you are having in a few paragraphs.
### Steps to reproduce the issue
Please provide steps to reproduce, including full log output
### Actual result
Describe the results you received
### Expected result
Describe the results you expected
### Additional information
- Node.js version,
- Docker version,
- npm version,
- distribution/OS,
- optional: anything else you deem helpful to us.
--------------------------------------------------------------------------------
## Feature Request
(delete this section (everything between the lines) if you're not requesting
a feature but reporting a bug)
### Proposal
Describe the feature
### Current behavior
What currently happens
### Desired behavior
What you would like to happen
### Usecase
Please provide usecases for changing the current behavior
### Additional information
- Is this request for your company? Y/N
- If Y: Company name:
- Are you using any Scality Enterprise Edition products (RING, Zenko EE)? Y/N
- Are you willing to contribute this feature yourself?
- Position/Title:
- How did you hear about us?
--------------------------------------------------------------------------------

View File

@ -1,8 +1,7 @@
FROM registry.scality.com/vault-dev/vault:c2607856
FROM ghcr.io/scality/vault:c2607856
ENV VAULT_DB_BACKEND LEVELDB
RUN chmod 400 tests/utils/keyfile
ENTRYPOINT yarn start

65
.github/workflows/build-ci.yaml vendored Normal file
View File

@ -0,0 +1,65 @@
name: build-ci-images
on:
workflow_call:
jobs:
warp10-ci:
uses: scality/workflows/.github/workflows/docker-build.yaml@v2
secrets:
REGISTRY_LOGIN: ${{ github.repository_owner }}
REGISTRY_PASSWORD: ${{ github.token }}
with:
name: warp10-ci
context: .
file: images/warp10/Dockerfile
lfs: true
redis-ci:
uses: scality/workflows/.github/workflows/docker-build.yaml@v2
secrets:
REGISTRY_LOGIN: ${{ github.repository_owner }}
REGISTRY_PASSWORD: ${{ github.token }}
with:
name: redis-ci
context: .
file: images/redis/Dockerfile
redis-replica-ci:
uses: scality/workflows/.github/workflows/docker-build.yaml@v2
needs:
- redis-ci
secrets:
REGISTRY_LOGIN: ${{ github.repository_owner }}
REGISTRY_PASSWORD: ${{ github.token }}
with:
name: redis-replica-ci
context: .github/docker/redis-replica
build-args: |
REDIS_IMAGE=ghcr.io/${{ github.repository }}/redis-ci:${{ github.sha }}
vault-ci:
runs-on: ubuntu-20.04
steps:
- name: Checkout
uses: actions/checkout@v4
with:
lfs: true
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to GitHub Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ github.token }}
- name: Build and push vault Image
uses: docker/build-push-action@v5
with:
push: true
context: .github/docker/vault
tags: ghcr.io/${{ github.repository }}/vault-ci:${{ github.sha }}
cache-from: type=gha,scope=vault
cache-to: type=gha,mode=max,scope=vault

16
.github/workflows/build-dev.yaml vendored Normal file
View File

@ -0,0 +1,16 @@
name: build-dev-image
on:
push:
branches-ignore:
- 'development/**'
jobs:
build-dev:
uses: scality/workflows/.github/workflows/docker-build.yaml@v2
secrets:
REGISTRY_LOGIN: ${{ github.repository_owner }}
REGISTRY_PASSWORD: ${{ github.token }}
with:
namespace: ${{ github.repository_owner }}
name: ${{ github.event.repository.name }}

39
.github/workflows/release-warp10.yaml vendored Normal file
View File

@ -0,0 +1,39 @@
name: release-warp10
on:
workflow_dispatch:
inputs:
tag:
type: string
description: 'Tag to be released'
required: true
create-github-release:
type: boolean
description: Create a tag and matching Github release.
required: false
default: true
jobs:
build:
uses: scality/workflows/.github/workflows/docker-build.yaml@v2
secrets: inherit
with:
name: warp10
context: .
file: images/warp10/Dockerfile
tag: ${{ github.event.inputs.tag }}
lfs: true
release:
if: ${{ inputs.create-github-release }}
runs-on: ubuntu-latest
needs: build
steps:
- uses: softprops/action-gh-release@v2
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
name: Release utapi/warp10:${{ github.event.inputs.tag }}-warp10
tag_name: ${{ github.event.inputs.tag }}-warp10
generate_release_notes: false
target_commitish: ${{ github.sha }}

View File

@ -3,44 +3,43 @@ name: release
on:
workflow_dispatch:
inputs:
dockerfile:
description: Dockerfile to build image from
type: choice
options:
- images/nodesvc-base/Dockerfile
- Dockerfile
required: true
tag:
type: string
description: 'Tag to be released'
required: true
create-github-release:
type: boolean
description: Create a tag and matching Github release.
required: false
default: false
jobs:
build:
uses: scality/workflows/.github/workflows/docker-build.yaml@v2
with:
namespace: ${{ github.repository_owner }}
name: ${{ github.event.repository.name }}
context: .
file: ${{ github.event.inputs.dockerfile}}
tag: ${{ github.event.inputs.tag }}
release:
if: ${{ inputs.create-github-release }}
runs-on: ubuntu-latest
needs: build
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
- name: Set up Docker Buildk
uses: docker/setup-buildx-action@v1
- name: Login to Registry
uses: docker/login-action@v1
with:
registry: registry.scality.com
username: ${{ secrets.REGISTRY_LOGIN }}
password: ${{ secrets.REGISTRY_PASSWORD }}
- name: Build and push utapi image
uses: docker/build-push-action@v2
with:
context: .
push: true
tags: "registry.scality.com/utapi/utapi:${{ github.event.inputs.tag }}"
- name: Create Release
uses: softprops/action-gh-release@v1
- uses: softprops/action-gh-release@v2
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GITHUB_TOKEN: ${{ github.token }}
with:
name: Release ${{ github.event.inputs.tag }}
tag_name: ${{ github.event.inputs.tag }}
generate_release_notes: true
target_commitish: ${{ github.sha }}

View File

@ -4,83 +4,32 @@ name: tests
on:
push:
branches-ignore:
- 'development/**'
- 'development/**'
workflow_dispatch:
inputs:
debug:
description: Debug (enable the ability to SSH to runners)
type: boolean
required: false
default: 'false'
connection-timeout-m:
type: number
required: false
description: Timeout for ssh connection to worker (minutes)
default: 30
jobs:
build:
runs-on: ubuntu-20.04
steps:
- name: Checkout
uses: actions/checkout@v2.3.4
with:
lfs: true
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1.6.0
- name: Login to GitHub Registry
uses: docker/login-action@v1.10.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Login to Scality Registry
uses: docker/login-action@v1.10.0
with:
registry: registry.scality.com
username: ${{ secrets.REGISTRY_LOGIN }}
password: ${{ secrets.REGISTRY_PASSWORD }}
- name: Build and push redis CI image
uses: docker/build-push-action@v2.7.0
with:
push: true
file: images/redis/Dockerfile
context: '.'
tags: ghcr.io/${{ github.repository }}/redis-ci:${{ github.sha }}
cache-from: type=gha,scope=redis
cache-to: type=gha,mode=max,scope=redis
- name: Build and push redis replica CI image
uses: docker/build-push-action@v2.7.0
with:
push: true
context: .github/docker/redis-replica
build-args: |
REDIS_IMAGE=ghcr.io/${{ github.repository }}/redis-ci:${{ github.sha }}
tags: ghcr.io/${{ github.repository }}/redis-replica-ci:${{ github.sha }}
cache-from: type=gha,scope=redis-replica
cache-to: type=gha,mode=max,scope=redis-replica
- name: Build and push warp10 Image
uses: docker/build-push-action@v2.7.0
with:
push: true
file: images/warp10/Dockerfile
context: '.'
tags: ghcr.io/${{ github.repository }}/warp10-ci:${{ github.sha }}
cache-from: type=gha,scope=warp10
cache-to: type=gha,mode=max,scope=warp10
- name: Build and push vault Image
uses: docker/build-push-action@v2.7.0
with:
push: true
context: '.github/docker/vault'
tags: ghcr.io/${{ github.repository }}/vault-ci:${{ github.sha }}
cache-from: type=gha,scope=vault
cache-to: type=gha,mode=max,scope=vault
build-ci:
uses: ./.github/workflows/build-ci.yaml
lint:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
uses: actions/checkout@v4
with:
lfs: true
- uses: actions/setup-node@v2
- uses: actions/setup-node@v4
with:
node-version: '16.13.2'
cache: yarn
@ -91,8 +40,9 @@ jobs:
- name: run static analysis tools on code
run: yarn run lint
tests:
needs: build
tests-v1:
needs:
- build-ci
runs-on: ubuntu-latest
env:
REINDEX_PYTHON_INTERPRETER: python3
@ -105,16 +55,16 @@ jobs:
command: yarn test
env:
UTAPI_METRICS_ENABLED: 'true'
- name: run client tests
- name: run v1 client tests
command: bash ./.github/scripts/run_ft_tests.bash false ft_test:client
env: {}
- name: run server tests
- name: run v1 server tests
command: bash ./.github/scripts/run_ft_tests.bash false ft_test:server
env: {}
- name: run cron tests
- name: run v1 cron tests
command: bash ./.github/scripts/run_ft_tests.bash false ft_test:cron
env: {}
- name: run interval tests
- name: run v1 interval tests
command: bash ./.github/scripts/run_ft_tests.bash true ft_test:interval
env: {}
services:
@ -138,7 +88,7 @@ jobs:
--health-timeout 5s
--health-retries 5
redis-sentinel:
image: bitnami/redis-sentinel:6.2
image: bitnami/redis-sentinel:7.2.4
env:
REDIS_MASTER_SET: scality-s3
REDIS_SENTINEL_PORT_NUMBER: '16379'
@ -169,59 +119,31 @@ jobs:
steps:
- name: Checkout
uses: actions/checkout@v2
uses: actions/checkout@v4
with:
lfs: true
- uses: actions/setup-node@v2
- uses: actions/setup-node@v4
with:
node-version: '16.13.2'
cache: yarn
- uses: actions/setup-python@v2
- uses: actions/setup-python@v5
with:
python-version: '3.9'
- uses: actions/cache@v2
with:
path: ~/.cache/pip
key: ${{ runner.os }}-pip
cache: pip
- name: Install python deps
run: |
pip install requests
pip install redis
run: pip install -r requirements.txt
- name: install dependencies
run: yarn install --frozen-lockfile --network-concurrency 1
- name: ${{ matrix.test.name }}
run: ${{ matrix.test.command }}
env: ${{ matrix.test.env }}
tests-with-vault:
needs: build
tests-v2-with-vault:
needs:
- build-ci
runs-on: ubuntu-latest
env:
REINDEX_PYTHON_INTERPRETER: python3
name: ${{ matrix.test.name }}
strategy:
fail-fast: false
matrix:
test:
- name: run v2 functional tests
command: bash ./.github/scripts/run_ft_tests.bash true ft_test:v2
env:
UTAPI_CACHE_BACKEND: redis
UTAPI_SERVICE_USER_ENABLED: 'true'
UTAPI_LOG_LEVEL: trace
SETUP_CMD: "run start_v2:server"
- name: run v2 soft limit test
command: bash ./.github/scripts/run_ft_tests.bash true ft_test:softLimit
env:
UTAPI_CACHE_BACKEND: redis
UTAPI_LOG_LEVEL: trace
SETUP_CMD: "run start_v2:server"
- name: run v2 hard limit test
command: bash ./.github/scripts/run_ft_tests.bash true ft_test:hardLimit
env:
UTAPI_CACHE_BACKEND: redis
UTAPI_LOG_LEVEL: trace
SETUP_CMD: "run start_v2:server"
services:
redis:
image: ghcr.io/${{ github.repository }}/redis-ci:${{ github.sha }}
@ -243,7 +165,7 @@ jobs:
--health-timeout 5s
--health-retries 5
redis-sentinel:
image: bitnami/redis-sentinel:6.2
image: bitnami/redis-sentinel:7.2.4
env:
REDIS_MASTER_SET: scality-s3
REDIS_SENTINEL_PORT_NUMBER: '16379'
@ -286,24 +208,140 @@ jobs:
steps:
- name: Checkout
uses: actions/checkout@v2
uses: actions/checkout@v4
with:
lfs: true
- uses: actions/setup-node@v2
- uses: actions/setup-node@v4
with:
node-version: '16.13.2'
cache: yarn
- uses: actions/setup-python@v2
- uses: actions/setup-python@v5
with:
python-version: '3.9'
- uses: actions/cache@v2
with:
path: ~/.cache/pip
key: ${{ runner.os }}-pip
cache: pip
- name: Install python deps
run: |
pip install requests
pip install redis
run: pip install -r requirements.txt
- name: install dependencies
run: yarn install --frozen-lockfile --network-concurrency 1
- name: Wait for warp10 for 60 seconds
run: sleep 60
- name: run v2 functional tests
run: bash ./.github/scripts/run_ft_tests.bash true ft_test:v2
env:
UTAPI_CACHE_BACKEND: redis
UTAPI_SERVICE_USER_ENABLED: 'true'
UTAPI_LOG_LEVEL: trace
SETUP_CMD: "run start_v2:server"
- name: 'Debug: SSH to runner'
uses: scality/actions/action-ssh-to-runner@1.7.0
timeout-minutes: ${{ fromJSON(github.event.inputs.connection-timeout-m) }}
continue-on-error: true
with:
tmate-server-host: ${{ secrets.TMATE_SERVER_HOST }}
tmate-server-port: ${{ secrets.TMATE_SERVER_PORT }}
tmate-server-rsa-fingerprint: ${{ secrets.TMATE_SERVER_RSA_FINGERPRINT }}
tmate-server-ed25519-fingerprint: ${{ secrets.TMATE_SERVER_ED25519_FINGERPRINT }}
if: ${{ ( github.event.inputs.debug == true || github.event.inputs.debug == 'true' ) }}
tests-v2-without-sensision:
needs:
- build-ci
runs-on: ubuntu-latest
env:
REINDEX_PYTHON_INTERPRETER: python3
name: ${{ matrix.test.name }}
strategy:
fail-fast: false
matrix:
test:
- name: run v2 soft limit test
command: bash ./.github/scripts/run_ft_tests.bash true ft_test:softLimit
env:
UTAPI_CACHE_BACKEND: redis
UTAPI_LOG_LEVEL: trace
SETUP_CMD: "run start_v2:server"
- name: run v2 hard limit test
command: bash ./.github/scripts/run_ft_tests.bash true ft_test:hardLimit
env:
UTAPI_CACHE_BACKEND: redis
UTAPI_LOG_LEVEL: trace
SETUP_CMD: "run start_v2:server"
services:
redis:
image: ghcr.io/${{ github.repository }}/redis-ci:${{ github.sha }}
ports:
- 6379:6379
- 9121:9121
options: >-
--health-cmd "redis-cli ping"
--health-interval 10s
--health-timeout 5s
--health-retries 5
redis-replica:
image: ghcr.io/${{ github.repository }}/redis-replica-ci:${{ github.sha }}
ports:
- 6380:6380
options: >-
--health-cmd "redis-cli -p 6380 ping"
--health-interval 10s
--health-timeout 5s
--health-retries 5
redis-sentinel:
image: bitnami/redis-sentinel:7.2.4
env:
REDIS_MASTER_SET: scality-s3
REDIS_SENTINEL_PORT_NUMBER: '16379'
REDIS_SENTINEL_QUORUM: '1'
ports:
- 16379:16379
options: >-
--health-cmd "redis-cli -p 16379 ping"
--health-interval 10s
--health-timeout 5s
--health-retries 5
warp10:
image: ghcr.io/${{ github.repository }}/warp10-ci:${{ github.sha }}
env:
standalone.port: '4802'
warpscript.maxops: '10000000'
ports:
- 4802:4802
- 8082:8082
- 9718:9718
options: >-
--health-cmd "curl localhost:4802/api/v0/check"
--health-interval 10s
--health-timeout 5s
--health-retries 10
--health-start-period 60s
vault:
image: ghcr.io/${{ github.repository }}/vault-ci:${{ github.sha }}
ports:
- 8500:8500
- 8600:8600
- 8700:8700
- 8800:8800
options: >-
--health-cmd "curl http://localhost:8500/_/healthcheck"
--health-interval 10s
--health-timeout 5s
--health-retries 10
steps:
- name: Checkout
uses: actions/checkout@v4
with:
lfs: true
- uses: actions/setup-node@v4
with:
node-version: '16.13.2'
cache: yarn
- uses: actions/setup-python@v5
with:
python-version: '3.9'
cache: pip
- name: Install python deps
run: pip install -r requirements.txt
- name: install dependencies
run: yarn install --frozen-lockfile --network-concurrency 1
- name: Wait for warp10 a little bit
@ -311,6 +349,13 @@ jobs:
- name: ${{ matrix.test.name }}
run: ${{ matrix.test.command }}
env: ${{ matrix.test.env }}
- name: Setup tmate session
uses: mxschmitt/action-tmate@v3
if: failure()
- name: 'Debug: SSH to runner'
uses: scality/actions/action-ssh-to-runner@1.7.0
timeout-minutes: ${{ fromJSON(github.event.inputs.connection-timeout-m) }}
continue-on-error: true
with:
tmate-server-host: ${{ secrets.TMATE_SERVER_HOST }}
tmate-server-port: ${{ secrets.TMATE_SERVER_PORT }}
tmate-server-rsa-fingerprint: ${{ secrets.TMATE_SERVER_RSA_FINGERPRINT }}
tmate-server-ed25519-fingerprint: ${{ secrets.TMATE_SERVER_ED25519_FINGERPRINT }}
if: ${{ ( github.event.inputs.debug == true || github.event.inputs.debug == 'true' ) }}

31
Dockerfile Normal file
View File

@ -0,0 +1,31 @@
FROM node:16.13.2-buster-slim
WORKDIR /usr/src/app
COPY package.json yarn.lock /usr/src/app/
RUN apt-get update \
&& apt-get install -y \
curl \
gnupg2
RUN curl -sS http://dl.yarnpkg.com/debian/pubkey.gpg | apt-key add - \
&& echo "deb http://dl.yarnpkg.com/debian/ stable main" | tee /etc/apt/sources.list.d/yarn.list
RUN apt-get update \
&& apt-get install -y jq git python3 build-essential yarn --no-install-recommends \
&& yarn cache clean \
&& yarn install --frozen-lockfile --production --ignore-optional --network-concurrency=1 \
&& apt-get autoremove --purge -y python3 git build-essential \
&& rm -rf /var/lib/apt/lists/* \
&& yarn cache clean \
&& rm -rf ~/.node-gyp \
&& rm -rf /tmp/yarn-*
# Keep the .git directory in order to properly report version
COPY . /usr/src/app
ENTRYPOINT ["/usr/src/app/docker-entrypoint.sh"]
CMD [ "yarn", "start" ]
EXPOSE 8100

View File

@ -3,9 +3,8 @@
![Utapi logo](res/utapi-logo.png)
[![Circle CI][badgepub]](https://circleci.com/gh/scality/utapi)
[![Scality CI][badgepriv]](http://ci.ironmann.io/gh/scality/utapi)
Service Utilization API for tracking resource usage and metrics reporting
Service Utilization API for tracking resource usage and metrics reporting.
## Design
@ -88,13 +87,13 @@ Server is running.
1. Create an IAM user
```
aws iam --endpoint-url <endpoint> create-user --user-name utapiuser
aws iam --endpoint-url <endpoint> create-user --user-name <user-name>
```
2. Create access key for the user
```
aws iam --endpoint-url <endpoint> create-access-key --user-name utapiuser
aws iam --endpoint-url <endpoint> create-access-key --user-name <user-name>
```
3. Define a managed IAM policy
@ -203,12 +202,11 @@ Server is running.
5. Attach user to the managed policy
```
aws --endpoint-url <endpoint> iam attach-user-policy --user-name utapiuser
--policy-arn <policy arn>
aws --endpoint-url <endpoint> iam attach-user-policy --user-name
<user-name> --policy-arn <policy arn>
```
Now the user `utapiuser` has access to ListMetrics request in Utapi on all
buckets.
Now the user has access to ListMetrics request in Utapi on all buckets.
### Signing request with Auth V4
@ -224,16 +222,18 @@ following urls for reference.
You may also view examples making a request with Auth V4 using various languages
and AWS SDKs [here](/examples).
Alternatively, you can use a nifty command line tool available in Scality's S3.
Alternatively, you can use a nifty command line tool available in Scality's
CloudServer.
You can git clone S3 repo from here https://github.com/scality/S3.git and follow
the instructions in README to install the dependencies.
You can git clone the CloudServer repo from here
https://github.com/scality/cloudserver and follow the instructions in the README
to install the dependencies.
If you have S3 running inside a docker container you can docker exec into the S3
container as
If you have CloudServer running inside a docker container you can docker exec
into the CloudServer container as
```
docker exec -it <container id> bash
docker exec -it <container-id> bash
```
and then run the command
@ -271,7 +271,7 @@ Usage: list_metrics [options]
-v, --verbose
```
A typical call to list metrics for a bucket `demo` to Utapi in a https enabled
An example call to list metrics for a bucket `demo` to Utapi in a https enabled
deployment would be
```
@ -283,7 +283,7 @@ Both start and end times are time expressed as UNIX epoch timestamps **expressed
in milliseconds**.
Keep in mind, since Utapi metrics are normalized to the nearest 15 min.
interval, so start time and end time need to be in specific format as follows.
interval, start time and end time need to be in the specific format as follows.
#### Start time
@ -297,7 +297,7 @@ Date: Tue Oct 11 2016 17:35:25 GMT-0700 (PDT)
Unix timestamp (milliseconds): 1476232525320
Here's a typical JS method to get start timestamp
Here's an example JS method to get a start timestamp
```javascript
function getStartTimestamp(t) {
@ -317,7 +317,7 @@ seconds and milliseconds set to 59 and 999 respectively. So valid end timestamps
would look something like `09:14:59:999`, `09:29:59:999`, `09:44:59:999` and
`09:59:59:999`.
Here's a typical JS method to get end timestamp
Here's an example JS method to get an end timestamp
```javascript
function getEndTimestamp(t) {
@ -342,4 +342,3 @@ In order to contribute, please follow the
https://github.com/scality/Guidelines/blob/master/CONTRIBUTING.md).
[badgepub]: http://circleci.com/gh/scality/utapi.svg?style=svg
[badgepriv]: http://ci.ironmann.io/gh/scality/utapi.svg?style=svg

View File

@ -27,7 +27,7 @@ x-models:
services:
redis-0:
image: redis:6
image: redis:7.2.4
command: redis-server --port 6379 --slave-announce-ip "${EXTERNAL_HOST}"
ports:
- 6379:6379
@ -35,7 +35,7 @@ services:
- HOST_IP="${EXTERNAL_HOST}"
redis-1:
image: redis:6
image: redis:7.2.4
command: redis-server --port 6380 --slaveof "${EXTERNAL_HOST}" 6379 --slave-announce-ip "${EXTERNAL_HOST}"
ports:
- 6380:6380
@ -43,7 +43,7 @@ services:
- HOST_IP="${EXTERNAL_HOST}"
redis-sentinel-0:
image: redis:6
image: redis:7.2.4
command: |-
bash -c 'cat > /tmp/sentinel.conf <<EOF
port 16379

47
docker-entrypoint.sh Executable file
View File

@ -0,0 +1,47 @@
#!/bin/bash
# set -e stops the execution of a script if a command or pipeline has an error
set -e
# modifying config.json
JQ_FILTERS_CONFIG="."
if [[ "$LOG_LEVEL" ]]; then
if [[ "$LOG_LEVEL" == "info" || "$LOG_LEVEL" == "debug" || "$LOG_LEVEL" == "trace" ]]; then
JQ_FILTERS_CONFIG="$JQ_FILTERS_CONFIG | .log.logLevel=\"$LOG_LEVEL\""
echo "Log level has been modified to $LOG_LEVEL"
else
echo "The log level you provided is incorrect (info/debug/trace)"
fi
fi
if [[ "$WORKERS" ]]; then
JQ_FILTERS_CONFIG="$JQ_FILTERS_CONFIG | .workers=\"$WORKERS\""
fi
if [[ "$REDIS_HOST" ]]; then
JQ_FILTERS_CONFIG="$JQ_FILTERS_CONFIG | .redis.host=\"$REDIS_HOST\""
fi
if [[ "$REDIS_PORT" ]]; then
JQ_FILTERS_CONFIG="$JQ_FILTERS_CONFIG | .redis.port=\"$REDIS_PORT\""
fi
if [[ "$VAULTD_HOST" ]]; then
JQ_FILTERS_CONFIG="$JQ_FILTERS_CONFIG | .vaultd.host=\"$VAULTD_HOST\""
fi
if [[ "$VAULTD_PORT" ]]; then
JQ_FILTERS_CONFIG="$JQ_FILTERS_CONFIG | .vaultd.port=\"$VAULTD_PORT\""
fi
if [[ "$HEALTHCHECKS_ALLOWFROM" ]]; then
JQ_FILTERS_CONFIG="$JQ_FILTERS_CONFIG | .healthChecks.allowFrom=[\"$HEALTHCHECKS_ALLOWFROM\"]"
fi
if [[ $JQ_FILTERS_CONFIG != "." ]]; then
jq "$JQ_FILTERS_CONFIG" config.json > config.json.tmp
mv config.json.tmp config.json
fi
exec "$@"

42
docs/RELEASE.md Normal file
View File

@ -0,0 +1,42 @@
# Utapi Release Plan
## Docker Image Generation
Docker images are hosted on [ghcr.io](https://github.com/orgs/scality/packages).
Utapi has one namespace there:
* Namespace: ghcr.io/scality/utapi
With every CI build, the CI will push images, tagging the
content with the developer branch's short SHA-1 commit hash.
This allows those images to be used by developers, CI builds,
build chain and so on.
Tagged versions of utapi will be stored in the production namespace.
## How to Pull Docker Images
```sh
docker pull ghcr.io/scality/utapi:<commit hash>
docker pull ghcr.io/scality/utapi:<tag>
```
## Release Process
To release a production image:
* Name the tag for the repository and Docker image.
* Use the `yarn version` command with the same tag to update `package.json`.
* Create a PR and merge the `package.json` change.
* Tag the repository using the same tag.
* [Force a build] using:
* A given branch that ideally matches the tag.
* The `release` stage.
* An extra property with the name `tag` and its value being the actual tag.
[Force a build]:
https://eve.devsca.com/github/scality/utapi/#/builders/bootstrap/force/force

View File

@ -0,0 +1,90 @@
import sys, os, base64, datetime, hashlib, hmac, datetime, calendar, json
import requests # pip install requests
access_key = '9EQTVVVCLSSG6QBMNKO5'
secret_key = 'T5mK/skkkwJ/mTjXZnHyZ5UzgGIN=k9nl4dyTmDH'
method = 'POST'
service = 's3'
host = 'localhost:8100'
region = 'us-east-1'
canonical_uri = '/buckets'
canonical_querystring = 'Action=ListMetrics&Version=20160815'
content_type = 'application/x-amz-json-1.0'
algorithm = 'AWS4-HMAC-SHA256'
t = datetime.datetime.utcnow()
amz_date = t.strftime('%Y%m%dT%H%M%SZ')
date_stamp = t.strftime('%Y%m%d')
# Key derivation functions. See:
# http://docs.aws.amazon.com/general/latest/gr/signature-v4-examples.html#signature-v4-examples-python
def sign(key, msg):
return hmac.new(key, msg.encode("utf-8"), hashlib.sha256).digest()
def getSignatureKey(key, date_stamp, regionName, serviceName):
kDate = sign(('AWS4' + key).encode('utf-8'), date_stamp)
kRegion = sign(kDate, regionName)
kService = sign(kRegion, serviceName)
kSigning = sign(kService, 'aws4_request')
return kSigning
def get_start_time(t):
start = t.replace(minute=t.minute - t.minute % 15, second=0, microsecond=0)
return calendar.timegm(start.utctimetuple()) * 1000;
def get_end_time(t):
end = t.replace(minute=t.minute - t.minute % 15, second=0, microsecond=0)
return calendar.timegm(end.utctimetuple()) * 1000 - 1;
start_time = get_start_time(datetime.datetime(2016, 1, 1, 0, 0, 0, 0))
end_time = get_end_time(datetime.datetime(2016, 2, 1, 0, 0, 0, 0))
# Request parameters for listing Utapi bucket metrics--passed in a JSON block.
bucketListing = {
'buckets': [ 'utapi-test' ],
'timeRange': [ start_time, end_time ],
}
request_parameters = json.dumps(bucketListing)
payload_hash = hashlib.sha256(request_parameters).hexdigest()
canonical_headers = \
'content-type:{0}\nhost:{1}\nx-amz-content-sha256:{2}\nx-amz-date:{3}\n' \
.format(content_type, host, payload_hash, amz_date)
signed_headers = 'content-type;host;x-amz-content-sha256;x-amz-date'
canonical_request = '{0}\n{1}\n{2}\n{3}\n{4}\n{5}' \
.format(method, canonical_uri, canonical_querystring, canonical_headers,
signed_headers, payload_hash)
credential_scope = '{0}/{1}/{2}/aws4_request' \
.format(date_stamp, region, service)
string_to_sign = '{0}\n{1}\n{2}\n{3}' \
.format(algorithm, amz_date, credential_scope,
hashlib.sha256(canonical_request).hexdigest())
signing_key = getSignatureKey(secret_key, date_stamp, region, service)
signature = hmac.new(signing_key, (string_to_sign).encode('utf-8'),
hashlib.sha256).hexdigest()
authorization_header = \
'{0} Credential={1}/{2}, SignedHeaders={3}, Signature={4}' \
.format(algorithm, access_key, credential_scope, signed_headers, signature)
# The 'host' header is added automatically by the Python 'requests' library.
headers = {
'Content-Type': content_type,
'X-Amz-Content-Sha256': payload_hash,
'X-Amz-Date': amz_date,
'Authorization': authorization_header
}
endpoint = 'http://' + host + canonical_uri + '?' + canonical_querystring;
r = requests.post(endpoint, data=request_parameters, headers=headers)
print (r.text)

View File

@ -0,0 +1,20 @@
FROM ghcr.io/scality/federation/nodesvc-base:7.10.5.0
ENV UTAPI_CONFIG_FILE=${CONF_DIR}/config.json
WORKDIR ${HOME_DIR}/utapi
COPY ./package.json ./yarn.lock ${HOME_DIR}/utapi
# Remove when gitcache is sorted out
RUN rm /root/.gitconfig
RUN yarn install --production --frozen-lockfile --network-concurrency 1
COPY . ${HOME_DIR}/utapi
RUN chown -R ${USER} ${HOME_DIR}/utapi
USER ${USER}
CMD bash -c "source ${CONF_DIR}/env && export && supervisord -c ${CONF_DIR}/${SUPERVISORD_CONF}"

View File

@ -1 +0,0 @@
*.jar filter=lfs diff=lfs merge=lfs -text

View File

@ -0,0 +1,2 @@
standalone.host = 0.0.0.0
standalone.port = 4802

View File

@ -13,7 +13,7 @@ RUN apk add zip unzip build-base \
&& cd .. \
&& go build -a -o /usr/local/go/warp10_sensision_exporter
FROM registry.scality.com/utapi/warp10:2.8.1-95-g73e7de80
FROM ghcr.io/scality/utapi/warp10:2.8.1-95-g73e7de80
# Override baked in version
# Remove when updating to a numbered release
@ -27,8 +27,6 @@ ENV SENSISION_DATA_DIR /data/sensision
ENV SENSISION_PORT 8082
# Modify Warp 10 default config
ENV standalone.host 0.0.0.0
ENV standalone.port 4802
ENV standalone.home /opt/warp10
ENV warpscript.repository.directory /usr/local/share/warpscript
ENV warp.token.file /static.tokens
@ -53,6 +51,6 @@ COPY --from=builder /usr/local/go/warp10_sensision_exporter /usr/local/bin/warp1
ADD ./images/warp10/s6 /etc
ADD ./warpscript /usr/local/share/warpscript
ADD ./images/warp10/static.tokens /
ADD ./images/warp10/90-default-host-port.conf $WARP10_CONF_TEMPLATES/90-default-host-port.conf
CMD /init

View File

@ -3,9 +3,8 @@
JAVA="/usr/bin/java"
JAVA_OPTS=""
VERSION=1.0.23
SENSISION_CONFIG=${SENSISION_DATA_DIR}/conf/sensision.conf
SENSISION_JAR=${SENSISION_HOME}/bin/sensision-${VERSION}.jar
SENSISION_JAR=${SENSISION_HOME}/bin/sensision-${SENSISION_VERSION}.jar
SENSISION_CP=${SENSISION_HOME}/etc:${SENSISION_JAR}
SENSISION_CLASS=io.warp10.sensision.Main
export MALLOC_ARENA_MAX=1

View File

@ -1,3 +0,0 @@
version https://git-lfs.github.com/spec/v1
oid sha256:389d2135867c399a389901460c5f2cc09f4857d0c6d08632c2638c25fb150c46
size 15468553

View File

@ -1,5 +1,4 @@
/* eslint-disable global-require */
// eslint-disable-line strict
let toExport;

View File

@ -1,35 +1,13 @@
/* eslint-disable no-bitwise */
const assert = require('assert');
const fs = require('fs');
const path = require('path');
/**
* Reads from a config file and returns the content as a config object
*/
class Config {
constructor() {
/*
* By default, the config file is "config.json" at the root.
* It can be overridden using the UTAPI_CONFIG_FILE environment var.
*/
this._basePath = path.resolve(__dirname, '..');
this.path = `${this._basePath}/config.json`;
if (process.env.UTAPI_CONFIG_FILE !== undefined) {
this.path = process.env.UTAPI_CONFIG_FILE;
}
// Read config automatically
this._getConfig();
}
_getConfig() {
let config;
try {
const data = fs.readFileSync(this.path, { encoding: 'utf-8' });
config = JSON.parse(data);
} catch (err) {
throw new Error(`could not parse config file: ${err.message}`);
}
constructor(config) {
this.component = config.component;
this.port = 9500;
if (config.port !== undefined) {
@ -115,18 +93,26 @@ class Config {
}
}
this.vaultd = {};
if (config.vaultd) {
if (config.vaultd.port !== undefined) {
assert(Number.isInteger(config.vaultd.port)
&& config.vaultd.port > 0,
'bad config: vaultd port must be a positive integer');
this.vaultd.port = config.vaultd.port;
}
if (config.vaultd.host !== undefined) {
assert.strictEqual(typeof config.vaultd.host, 'string',
'bad config: vaultd host must be a string');
this.vaultd.host = config.vaultd.host;
if (config.vaultclient) {
// Instance passed from outside
this.vaultclient = config.vaultclient;
this.vaultd = null;
} else {
// Connection data
this.vaultclient = null;
this.vaultd = {};
if (config.vaultd) {
if (config.vaultd.port !== undefined) {
assert(Number.isInteger(config.vaultd.port)
&& config.vaultd.port > 0,
'bad config: vaultd port must be a positive integer');
this.vaultd.port = config.vaultd.port;
}
if (config.vaultd.host !== undefined) {
assert.strictEqual(typeof config.vaultd.host, 'string',
'bad config: vaultd host must be a string');
this.vaultd.host = config.vaultd.host;
}
}
}
@ -141,12 +127,11 @@ class Config {
const { key, cert, ca } = config.certFilePaths
? config.certFilePaths : {};
if (key && cert) {
const keypath = (key[0] === '/') ? key : `${this._basePath}/${key}`;
const certpath = (cert[0] === '/')
? cert : `${this._basePath}/${cert}`;
const keypath = key;
const certpath = cert;
let capath;
if (ca) {
capath = (ca[0] === '/') ? ca : `${this._basePath}/${ca}`;
capath = ca;
assert.doesNotThrow(() => fs.accessSync(capath, fs.F_OK | fs.R_OK),
`File not found or unreachable: ${capath}`);
}
@ -172,8 +157,13 @@ class Config {
+ 'expireMetrics must be a boolean');
this.expireMetrics = config.expireMetrics;
}
return config;
if (config.onlyCountLatestWhenObjectLocked !== undefined) {
assert(typeof config.onlyCountLatestWhenObjectLocked === 'boolean',
'bad config: onlyCountLatestWhenObjectLocked must be a boolean');
this.onlyCountLatestWhenObjectLocked = config.onlyCountLatestWhenObjectLocked;
}
}
}
module.exports = new Config();
module.exports = Config;

View File

@ -81,6 +81,17 @@ class Datastore {
return this._client.call((backend, done) => backend.incr(key, done), cb);
}
/**
* increment value of a key by the provided value
* @param {string} key - key holding the value
* @param {string} value - value containing the data
* @param {callback} cb - callback
* @return {undefined}
*/
incrby(key, value, cb) {
return this._client.call((backend, done) => backend.incrby(key, value, done), cb);
}
/**
* decrement value of a key by 1
* @param {string} key - key holding the value

View File

@ -6,8 +6,6 @@ const async = require('async');
const { errors } = require('arsenal');
const { getMetricFromKey, getKeys, generateStateKey } = require('./schema');
const s3metricResponseJSON = require('../models/s3metricResponse');
const config = require('./Config');
const Vault = require('./Vault');
const MAX_RANGE_MS = (((1000 * 60) * 60) * 24) * 30; // One month.
@ -23,7 +21,6 @@ class ListMetrics {
constructor(metric, component) {
this.metric = metric;
this.service = component;
this.vault = new Vault(config);
}
/**
@ -83,9 +80,10 @@ class ListMetrics {
const resources = validator.get(this.metric);
const timeRange = validator.get('timeRange');
const datastore = utapiRequest.getDatastore();
const vault = utapiRequest.getVault();
// map account ids to canonical ids
if (this.metric === 'accounts') {
return this.vault.getCanonicalIds(resources, log, (err, list) => {
return vault.getCanonicalIds(resources, log, (err, list) => {
if (err) {
return cb(err);
}
@ -124,10 +122,11 @@ class ListMetrics {
const fifteenMinutes = 15 * 60 * 1000; // In milliseconds
const timeRange = [start - fifteenMinutes, end];
const datastore = utapiRequest.getDatastore();
const vault = utapiRequest.getVault();
// map account ids to canonical ids
if (this.metric === 'accounts') {
return this.vault.getCanonicalIds(resources, log, (err, list) => {
return vault.getCanonicalIds(resources, log, (err, list) => {
if (err) {
return cb(err);
}
@ -313,11 +312,10 @@ class ListMetrics {
});
if (!areMetricsPositive) {
return cb(errors.InternalError.customizeDescription(
'Utapi is in a transient state for this time period as '
+ 'metrics are being collected. Please try again in a few '
+ 'minutes.',
));
log.info('negative metric value found', {
error: resource,
method: 'ListMetrics.getMetrics',
});
}
/**
* Batch result is of the format

View File

@ -99,6 +99,7 @@ const metricObj = {
buckets: 'bucket',
accounts: 'accountId',
users: 'userId',
location: 'location',
};
class UtapiClient {
@ -122,13 +123,17 @@ class UtapiClient {
const api = (config || {}).logApi || werelogs;
this.log = new api.Logger('UtapiClient');
// By default, we push all resource types
this.metrics = ['buckets', 'accounts', 'users', 'service'];
this.metrics = ['buckets', 'accounts', 'users', 'service', 'location'];
this.service = 's3';
this.disableOperationCounters = false;
this.enabledOperationCounters = [];
this.disableClient = true;
if (config && !config.disableClient) {
this.disableClient = false;
this.expireMetrics = config.expireMetrics;
this.expireMetricsTTL = config.expireMetricsTTL || 0;
if (config.metrics) {
const message = 'invalid property in UtapiClient configuration';
assert(Array.isArray(config.metrics), `${message}: metrics `
@ -156,9 +161,6 @@ class UtapiClient {
if (config.enabledOperationCounters) {
this.enabledOperationCounters = config.enabledOperationCounters;
}
this.disableClient = false;
this.expireMetrics = config.expireMetrics;
this.expireMetricsTTL = config.expireMetricsTTL || 0;
}
}
@ -546,7 +548,9 @@ class UtapiClient {
if (this._isCounterEnabled(counterAction)) {
cmds.push(['incr', generateKey(p, counterAction, timestamp)]);
}
cmds.push(['zrangebyscore', generateStateKey(p, 'storageUtilized'), timestamp, timestamp]);
});
return this.ds.batch(cmds, (err, results) => {
if (err) {
log.error('error pushing metric', {
@ -580,13 +584,48 @@ class UtapiClient {
// empty.
actionCounter = Number.isNaN(actionCounter)
|| actionCounter < 0 ? 1 : actionCounter;
if (Number.isInteger(params.byteLength)) {
/* byteLength is passed in from cloudserver under the follow conditions:
* - bucket versioning is suspended
* - object version id is null
* - the content length of the object exists
* In this case, the master key is deleted and replaced with a delete marker.
* The decrement accounts for the deletion of the master key when utapi reports
* on the number of objects.
*/
actionCounter -= 1;
}
const key = generateStateKey(p, 'numberOfObjects');
const byteArr = results[index + commandsGroupSize - 1][1];
const oldByteLength = byteArr ? parseInt(byteArr[0], 10) : 0;
const newByteLength = member.serialize(Math.max(0, oldByteLength - params.byteLength));
cmds2.push(
['zremrangebyscore', key, timestamp, timestamp],
['zadd', key, timestamp, member.serialize(actionCounter)],
);
if (Number.isInteger(params.byteLength)) {
cmds2.push(
['decr', generateCounter(p, 'numberOfObjectsCounter')],
['decrby', generateCounter(p, 'storageUtilizedCounter'), params.byteLength],
);
}
if (byteArr) {
cmds2.push(
['zremrangebyscore', generateStateKey(p, 'storageUtilized'), timestamp, timestamp],
['zadd', generateStateKey(p, 'storageUtilized'), timestamp, newByteLength],
);
}
return true;
});
if (noErr) {
return this.ds.batch(cmds2, cb);
}
@ -1117,6 +1156,69 @@ class UtapiClient {
});
}
/**
*
* @param {string} location - name of data location
* @param {number} updateSize - size in bytes to update location metric by,
* could be negative, indicating deleted object
* @param {string} reqUid - Request Unique Identifier
* @param {function} callback - callback to call
* @return {undefined}
*/
pushLocationMetric(location, updateSize, reqUid, callback) {
const log = this.log.newRequestLoggerFromSerializedUids(reqUid);
const params = {
level: 'location',
service: 's3',
location,
};
this._checkMetricTypes(params);
const action = (updateSize < 0) ? 'decrby' : 'incrby';
const size = (updateSize < 0) ? -updateSize : updateSize;
return this.ds[action](generateKey(params, 'locationStorage'), size,
err => {
if (err) {
log.error('error pushing metric', {
method: 'UtapiClient.pushLocationMetric',
error: err,
});
return callback(errors.InternalError);
}
return callback();
});
}
/**
*
* @param {string} location - name of data backend to get metric for
* @param {string} reqUid - Request Unique Identifier
* @param {function} callback - callback to call
* @return {undefined}
*/
getLocationMetric(location, reqUid, callback) {
const log = this.log.newRequestLoggerFromSerializedUids(reqUid);
const params = {
level: 'location',
service: 's3',
location,
};
const redisKey = generateKey(params, 'locationStorage');
return this.ds.get(redisKey, (err, bytesStored) => {
if (err) {
log.error('error getting metric', {
method: 'UtapiClient: getLocationMetric',
error: err,
});
return callback(errors.InternalError);
}
// if err and bytesStored are null, key does not exist yet
if (bytesStored === null) {
return callback(null, 0);
}
return callback(null, bytesStored);
});
}
/**
* Get storage used by bucket/account/user/service
* @param {object} params - params for the metrics

View File

@ -16,15 +16,19 @@ const REINDEX_PYTHON_INTERPRETER = process.env.REINDEX_PYTHON_INTERPRETER !== un
? process.env.REINDEX_PYTHON_INTERPRETER
: 'python3.7';
const EXIT_CODE_SENTINEL_CONNECTION = 100;
class UtapiReindex {
constructor(config) {
this._enabled = false;
this._schedule = REINDEX_SCHEDULE;
this._sentinel = {
host: '127.0.0.1',
port: 16379,
this._redis = {
name: 'scality-s3',
sentinelPassword: '',
sentinels: [{
host: '127.0.0.1',
port: 16379,
}],
};
this._bucketd = {
host: '127.0.0.1',
@ -42,14 +46,13 @@ class UtapiReindex {
if (config && config.password) {
this._password = config.password;
}
if (config && config.sentinel) {
if (config && config.redis) {
const {
host, port, name, sentinelPassword,
} = config.sentinel;
this._sentinel.host = host || this._sentinel.host;
this._sentinel.port = port || this._sentinel.port;
this._sentinel.name = name || this._sentinel.name;
this._sentinel.sentinelPassword = sentinelPassword || this._sentinel.sentinelPassword;
name, sentinelPassword, sentinels,
} = config.redis;
this._redis.name = name || this._redis.name;
this._redis.sentinelPassword = sentinelPassword || this._redis.sentinelPassword;
this._redis.sentinels = sentinels || this._redis.sentinels;
}
if (config && config.bucketd) {
const { host, port } = config.bucketd;
@ -61,17 +64,16 @@ class UtapiReindex {
this._log = new werelogs.Logger('UtapiReindex', { level, dump });
}
this._onlyCountLatestWhenObjectLocked = (config && config.onlyCountLatestWhenObjectLocked === true);
this._requestLogger = this._log.newRequestLogger();
}
_getRedisClient() {
const client = new RedisClient({
sentinels: [{
host: this._sentinel.host,
port: this._sentinel.port,
}],
name: this._sentinel.name,
sentinelPassword: this._sentinel.sentinelPassword,
sentinels: this._redis.sentinels,
name: this._redis.name,
sentinelPassword: this._redis.sentinelPassword,
password: this._password,
});
client.connect();
@ -86,17 +88,18 @@ class UtapiReindex {
return this.ds.del(REINDEX_LOCK_KEY);
}
_buildFlags() {
_buildFlags(sentinel) {
const flags = {
/* eslint-disable camelcase */
sentinel_ip: this._sentinel.host,
sentinel_port: this._sentinel.port,
sentinel_cluster_name: this._sentinel.name,
sentinel_ip: sentinel.host,
sentinel_port: sentinel.port,
sentinel_cluster_name: this._redis.name,
bucketd_addr: `http://${this._bucketd.host}:${this._bucketd.port}`,
};
if (this._sentinel.sentinelPassword) {
flags.redis_password = this._sentinel.sentinelPassword;
if (this._redis.sentinelPassword) {
flags.redis_password = this._redis.sentinelPassword;
}
/* eslint-enable camelcase */
const opts = [];
Object.keys(flags)
@ -105,11 +108,15 @@ class UtapiReindex {
opts.push(name);
opts.push(flags[flag]);
});
if (this._onlyCountLatestWhenObjectLocked) {
opts.push('--only-latest-when-locked');
}
return opts;
}
_runScript(path, done) {
const flags = this._buildFlags();
_runScriptWithSentinels(path, remainingSentinels, done) {
const flags = this._buildFlags(remainingSentinels.shift());
this._requestLogger.debug(`launching subprocess ${path} with flags: ${flags}`);
const process = childProcess.spawn(REINDEX_PYTHON_INTERPRETER, [path, ...flags]);
process.stdout.on('data', data => {
@ -136,6 +143,17 @@ class UtapiReindex {
statusCode: code,
script: path,
});
if (code === EXIT_CODE_SENTINEL_CONNECTION) {
if (remainingSentinels.length > 0) {
this._requestLogger.info('retrying with next sentinel host', {
script: path,
});
return this._runScriptWithSentinels(path, remainingSentinels, done);
}
this._requestLogger.error('no more sentinel host to try', {
script: path,
});
}
} else {
this._requestLogger.info('script exited successfully', {
statusCode: code,
@ -146,6 +164,11 @@ class UtapiReindex {
});
}
_runScript(path, done) {
const remainingSentinels = [...this._redis.sentinels];
this._runScriptWithSentinels(path, remainingSentinels, done);
}
_attemptLock(job) {
this._requestLogger.info('attempting to acquire the lock to begin job');
this._lock()

View File

@ -14,6 +14,15 @@ class UtapiRequest {
this._datastore = null;
this._requestQuery = null;
this._requestPath = null;
this._vault = null;
}
getVault() {
return this._vault;
}
setVault() {
return this._vault;
}
/**

View File

@ -1,16 +1,21 @@
import requests
import redis
import json
import argparse
import ast
import sys
import time
import urllib
from concurrent.futures import ThreadPoolExecutor
import json
import logging
import re
import redis
import requests
import sys
from threading import Thread
from concurrent.futures import ThreadPoolExecutor
import time
import urllib
import argparse
logging.basicConfig(level=logging.INFO)
_log = logging.getLogger('utapi-reindex:reporting')
SENTINEL_CONNECT_TIMEOUT_SECONDS = 10
EXIT_CODE_SENTINEL_CONNECTION_ERROR = 100
def get_options():
parser = argparse.ArgumentParser()
@ -29,8 +34,19 @@ class askRedis():
def __init__(self, ip="127.0.0.1", port="16379", sentinel_cluster_name="scality-s3", password=None):
self._password = password
r = redis.Redis(host=ip, port=port, db=0, password=password)
self._ip, self._port = r.sentinel_get_master_addr_by_name(sentinel_cluster_name)
r = redis.Redis(
host=ip,
port=port,
db=0,
password=password,
socket_connect_timeout=SENTINEL_CONNECT_TIMEOUT_SECONDS
)
try:
self._ip, self._port = r.sentinel_get_master_addr_by_name(sentinel_cluster_name)
except (redis.exceptions.ConnectionError, redis.exceptions.TimeoutError) as e:
_log.error(f'Failed to connect to redis sentinel at {ip}:{port}: {e}')
# use a specific error code to hint on retrying with another sentinel node
sys.exit(EXIT_CODE_SENTINEL_CONNECTION_ERROR)
def read(self, resource, name):
r = redis.Redis(host=self._ip, port=self._port, db=0, password=self._password)
@ -98,4 +114,4 @@ if __name__ == '__main__':
data = U.read('accounts', userid)
content = "Account:%s|NumberOFfiles:%s|StorageCapacity:%s " % (
userid, data["files"], data["total_size"])
executor.submit(safe_print, content)
executor.submit(safe_print, content)

View File

@ -1,5 +1,6 @@
import argparse
import concurrent.futures as futures
import functools
import itertools
import json
import logging
@ -8,9 +9,9 @@ import re
import sys
import time
import urllib
from pathlib import Path
from collections import defaultdict, namedtuple
from concurrent.futures import ThreadPoolExecutor
from pprint import pprint
import redis
import requests
@ -24,6 +25,9 @@ MPU_SHADOW_BUCKET_PREFIX = 'mpuShadowBucket'
ACCOUNT_UPDATE_CHUNKSIZE = 100
SENTINEL_CONNECT_TIMEOUT_SECONDS = 10
EXIT_CODE_SENTINEL_CONNECTION_ERROR = 100
def get_options():
parser = argparse.ArgumentParser()
parser.add_argument("-i", "--sentinel-ip", default='127.0.0.1', help="Sentinel IP")
@ -32,9 +36,38 @@ def get_options():
parser.add_argument("-n", "--sentinel-cluster-name", default='scality-s3', help="Redis cluster name")
parser.add_argument("-s", "--bucketd-addr", default='http://127.0.0.1:9000', help="URL of the bucketd server")
parser.add_argument("-w", "--worker", default=10, type=int, help="Number of workers")
parser.add_argument("-b", "--bucket", default=None, help="Bucket to be processed")
parser.add_argument("-r", "--max-retries", default=2, type=int, help="Max retries before failing a bucketd request")
return parser.parse_args()
parser.add_argument("--only-latest-when-locked", action='store_true', help="Only index the latest version of a key when the bucket has a default object lock policy")
parser.add_argument("--debug", action='store_true', help="Enable debug logging")
parser.add_argument("--dry-run", action="store_true", help="Do not update redis")
group = parser.add_mutually_exclusive_group()
group.add_argument("-a", "--account", default=[], help="account canonical ID (all account buckets will be processed)", action="append", type=nonempty_string('account'))
group.add_argument("--account-file", default=None, help="file containing account canonical IDs, one ID per line", type=existing_file)
group.add_argument("-b", "--bucket", default=[], help="bucket name", action="append", type=nonempty_string('bucket'))
group.add_argument("--bucket-file", default=None, help="file containing bucket names, one bucket name per line", type=existing_file)
options = parser.parse_args()
if options.bucket_file:
with open(options.bucket_file) as f:
options.bucket = [line.strip() for line in f if line.strip()]
elif options.account_file:
with open(options.account_file) as f:
options.account = [line.strip() for line in f if line.strip()]
return options
def nonempty_string(flag):
def inner(value):
if not value.strip():
raise argparse.ArgumentTypeError("%s: value must not be empty"%flag)
return value
return inner
def existing_file(path):
path = Path(path).resolve()
if not path.exists():
raise argparse.ArgumentTypeError("File does not exist: %s"%path)
return path
def chunks(iterable, size):
it = iter(iterable)
@ -49,7 +82,7 @@ def _encoded(func):
return urllib.parse.quote(val.encode('utf-8'))
return inner
Bucket = namedtuple('Bucket', ['userid', 'name'])
Bucket = namedtuple('Bucket', ['userid', 'name', 'object_lock_enabled'])
MPU = namedtuple('MPU', ['bucket', 'key', 'upload_id'])
BucketContents = namedtuple('BucketContents', ['bucket', 'obj_count', 'total_size'])
@ -61,15 +94,21 @@ class InvalidListing(Exception):
def __init__(self, bucket):
super().__init__('Invalid contents found while listing bucket %s'%bucket)
class BucketNotFound(Exception):
def __init__(self, bucket):
super().__init__('Bucket %s not found'%bucket)
class BucketDClient:
'''Performs Listing calls against bucketd'''
__url_format = '{addr}/default/bucket/{bucket}'
__url_attribute_format = '{addr}/default/attributes/{bucket}'
__url_bucket_format = '{addr}/default/bucket/{bucket}'
__headers = {"x-scal-request-uids": "utapi-reindex-list-buckets"}
def __init__(self, bucketd_addr=None, max_retries=2):
def __init__(self, bucketd_addr=None, max_retries=2, only_latest_when_locked=False):
self._bucketd_addr = bucketd_addr
self._max_retries = max_retries
self._only_latest_when_locked = only_latest_when_locked
self._session = requests.Session()
def _do_req(self, url, check_500=True, **kwargs):
@ -101,7 +140,7 @@ class BucketDClient:
parameters value. On the first request the function will be called with
`None` and should return its initial value. Return `None` for the param to be excluded.
'''
url = self.__url_format.format(addr=self._bucketd_addr, bucket=bucket)
url = self.__url_bucket_format.format(addr=self._bucketd_addr, bucket=bucket)
static_params = {k: v for k, v in kwargs.items() if not callable(v)}
dynamic_params = {k: v for k, v in kwargs.items() if callable(v)}
is_truncated = True # Set to True for first loop
@ -114,6 +153,9 @@ class BucketDClient:
_log.debug('listing bucket bucket: %s params: %s'%(
bucket, ', '.join('%s=%s'%p for p in params.items())))
resp = self._do_req(url, params=params)
if resp.status_code == 404:
_log.debug('Bucket not found bucket: %s'%bucket)
return
if resp.status_code == 200:
payload = resp.json()
except ValueError as e:
@ -135,7 +177,37 @@ class BucketDClient:
else:
is_truncated = len(payload) > 0
def list_buckets(self, name = None):
@functools.lru_cache(maxsize=16)
def _get_bucket_attributes(self, name):
url = self.__url_attribute_format.format(addr=self._bucketd_addr, bucket=name)
try:
resp = self._do_req(url)
if resp.status_code == 200:
return resp.json()
else:
_log.error('Error getting bucket attributes bucket:%s status_code:%s'%(name, resp.status_code))
raise BucketNotFound(name)
except ValueError as e:
_log.exception(e)
_log.error('Invalid attributes response body! bucket:%s'%name)
raise
except MaxRetriesReached:
_log.error('Max retries reached getting bucket attributes bucket:%s'%name)
raise
except Exception as e:
_log.exception(e)
_log.error('Unhandled exception getting bucket attributes bucket:%s'%name)
raise
def get_bucket_md(self, name):
md = self._get_bucket_attributes(name)
canonId = md.get('owner')
if canonId is None:
_log.error('No owner found for bucket %s'%name)
raise InvalidListing(name)
return Bucket(canonId, name, md.get('objectLockEnabled', False))
def list_buckets(self, account=None):
def get_next_marker(p):
if p is None:
@ -147,19 +219,24 @@ class BucketDClient:
'maxKeys': 1000,
'marker': get_next_marker
}
if account is not None:
params['prefix'] = '%s..|..' % account
for _, payload in self._list_bucket(USERS_BUCKET, **params):
buckets = []
for result in payload['Contents']:
for result in payload.get('Contents', []):
match = re.match("(\w+)..\|..(\w+.*)", result['key'])
bucket = Bucket(*match.groups())
if name is None or bucket.name == name:
buckets.append(bucket)
bucket = Bucket(*match.groups(), False)
# We need to get the attributes for each bucket to determine if it is locked
if self._only_latest_when_locked:
bucket_attrs = self._get_bucket_attributes(bucket.name)
object_lock_enabled = bucket_attrs.get('objectLockEnabled', False)
bucket = bucket._replace(object_lock_enabled=object_lock_enabled)
buckets.append(bucket)
if buckets:
yield buckets
if name is not None:
# Break on the first matching bucket if a name is given
break
def list_mpus(self, bucket):
_bucket = MPU_SHADOW_BUCKET_PREFIX + bucket.name
@ -196,18 +273,12 @@ class BucketDClient:
upload_id=key['value']['UploadId']))
return keys
def _sum_objects(self, bucket, listing):
def _sum_objects(self, bucket, listing, only_latest_when_locked = False):
count = 0
total_size = 0
last_master = None
last_size = None
for status_code, payload in listing:
contents = payload['Contents'] if isinstance(payload, dict) else payload
if contents is None:
_log.error('Invalid contents in listing. bucket:%s status_code:%s'%(bucket, status_code))
raise InvalidListing(bucket)
for obj in contents:
count += 1
last_key = None
try:
for obj in listing:
if isinstance(obj['value'], dict):
# bucketd v6 returns a dict:
data = obj.get('value', {})
@ -216,39 +287,51 @@ class BucketDClient:
# bucketd v7 returns an encoded string
data = json.loads(obj['value'])
size = data.get('content-length', 0)
is_latest = obj['key'] != last_key
last_key = obj['key']
if only_latest_when_locked and bucket.object_lock_enabled and not is_latest:
_log.debug('Skipping versioned key: %s'%obj['key'])
continue
count += 1
total_size += size
# If versioned, subtract the size of the master to avoid double counting
if last_master is not None and obj['key'].startswith(last_master + '\x00'):
_log.debug('Detected versioned key: %s - subtracting master size: %i'% (
obj['key'],
last_size,
))
total_size -= last_size
count -= 1
last_master = None
# Only save master versions
elif '\x00' not in obj['key']:
last_master = obj['key']
last_size = size
except InvalidListing:
_log.error('Invalid contents in listing. bucket:%s'%bucket.name)
raise InvalidListing(bucket.name)
return count, total_size
def _extract_listing(self, key, listing):
for status_code, payload in listing:
contents = payload[key] if isinstance(payload, dict) else payload
if contents is None:
raise InvalidListing('')
for obj in contents:
yield obj
def count_bucket_contents(self, bucket):
def get_next_marker(p):
if p is None or len(p) == 0:
def get_key_marker(p):
if p is None:
return ''
return p[-1].get('key', '')
return p.get('NextKeyMarker', '')
def get_vid_marker(p):
if p is None:
return ''
return p.get('NextVersionIdMarker', '')
params = {
'listingType': 'Basic',
'listingType': 'DelimiterVersions',
'maxKeys': 1000,
'gt': get_next_marker,
'keyMarker': get_key_marker,
'versionIdMarker': get_vid_marker,
}
count, total_size = self._sum_objects(bucket.name, self._list_bucket(bucket.name, **params))
listing = self._list_bucket(bucket.name, **params)
count, total_size = self._sum_objects(bucket, self._extract_listing('Versions', listing), self._only_latest_when_locked)
return BucketContents(
bucket=bucket,
obj_count=count,
@ -256,7 +339,8 @@ class BucketDClient:
)
def count_mpu_parts(self, mpu):
_bucket = MPU_SHADOW_BUCKET_PREFIX + mpu.bucket.name
shadow_bucket_name = MPU_SHADOW_BUCKET_PREFIX + mpu.bucket.name
shadow_bucket = mpu.bucket._replace(name=shadow_bucket_name)
def get_prefix(p):
if p is None:
@ -276,13 +360,31 @@ class BucketDClient:
'listingType': 'Delimiter',
}
count, total_size = self._sum_objects(_bucket, self._list_bucket(_bucket, **params))
listing = self._list_bucket(shadow_bucket_name, **params)
count, total_size = self._sum_objects(shadow_bucket, self._extract_listing('Contents', listing))
return BucketContents(
bucket=mpu.bucket._replace(name=_bucket),
bucket=shadow_bucket,
obj_count=0, # MPU parts are not counted towards numberOfObjects
total_size=total_size
)
def list_all_buckets(bucket_client):
return bucket_client.list_buckets()
def list_specific_accounts(bucket_client, accounts):
for account in accounts:
yield from bucket_client.list_buckets(account=account)
def list_specific_buckets(bucket_client, buckets):
batch = []
for bucket in buckets:
try:
batch.append(bucket_client.get_bucket_md(bucket))
except BucketNotFound:
_log.error('Failed to list bucket %s. Removing from results.'%bucket)
continue
yield batch
def index_bucket(client, bucket):
'''
@ -322,9 +424,16 @@ def get_redis_client(options):
host=options.sentinel_ip,
port=options.sentinel_port,
db=0,
password=options.redis_password
password=options.redis_password,
socket_connect_timeout=SENTINEL_CONNECT_TIMEOUT_SECONDS
)
ip, port = sentinel.sentinel_get_master_addr_by_name(options.sentinel_cluster_name)
try:
ip, port = sentinel.sentinel_get_master_addr_by_name(options.sentinel_cluster_name)
except (redis.exceptions.ConnectionError, redis.exceptions.TimeoutError) as e:
_log.error(f'Failed to connect to redis sentinel at {options.sentinel_ip}:{options.sentinel_port}: {e}')
# use a specific error code to hint on retrying with another sentinel node
sys.exit(EXIT_CODE_SENTINEL_CONNECTION_ERROR)
return redis.Redis(
host=ip,
port=port,
@ -358,16 +467,24 @@ def log_report(resource, name, obj_count, total_size):
if __name__ == '__main__':
options = get_options()
if options.bucket is not None and not options.bucket.strip():
print('You must provide a bucket name with the --bucket flag')
sys.exit(1)
bucket_client = BucketDClient(options.bucketd_addr, options.max_retries)
if options.debug:
_log.setLevel(logging.DEBUG)
bucket_client = BucketDClient(options.bucketd_addr, options.max_retries, options.only_latest_when_locked)
redis_client = get_redis_client(options)
account_reports = {}
observed_buckets = set()
failed_accounts = set()
if options.account:
batch_generator = list_specific_accounts(bucket_client, options.account)
elif options.bucket:
batch_generator = list_specific_buckets(bucket_client, options.bucket)
else:
batch_generator = list_all_buckets(bucket_client)
with ThreadPoolExecutor(max_workers=options.worker) as executor:
for batch in bucket_client.list_buckets(options.bucket):
for batch in batch_generator:
bucket_reports = {}
jobs = { executor.submit(index_bucket, bucket_client, b): b for b in batch }
for job in futures.as_completed(jobs.keys()):
@ -386,51 +503,84 @@ if __name__ == '__main__':
update_report(account_reports, total.bucket.userid, total.obj_count, total.total_size)
# Bucket reports can be updated as we get them
pipeline = redis_client.pipeline(transaction=False) # No transaction to reduce redis load
for bucket, report in bucket_reports.items():
update_redis(pipeline, 'buckets', bucket, report['obj_count'], report['total_size'])
log_report('buckets', bucket, report['obj_count'], report['total_size'])
pipeline.execute()
if options.dry_run:
for bucket, report in bucket_reports.items():
_log.info(
"DryRun: resource buckets [%s] would be updated with obj_count %i and total_size %i" % (
bucket, report['obj_count'], report['total_size']
)
)
else:
pipeline = redis_client.pipeline(transaction=False) # No transaction to reduce redis load
for bucket, report in bucket_reports.items():
update_redis(pipeline, 'buckets', bucket, report['obj_count'], report['total_size'])
log_report('buckets', bucket, report['obj_count'], report['total_size'])
pipeline.execute()
stale_buckets = set()
recorded_buckets = set(get_resources_from_redis(redis_client, 'buckets'))
if options.bucket is None:
stale_buckets = recorded_buckets.difference(observed_buckets)
elif observed_buckets and options.bucket not in recorded_buckets:
# The provided bucket does not exist, so clean up any metrics
stale_buckets = { options.bucket }
if options.bucket:
stale_buckets = { b for b in options.bucket if b not in observed_buckets }
elif options.account:
_log.warning('Stale buckets will not be cleared when using the --account or --account-file flags')
else:
stale_buckets = set()
stale_buckets = recorded_buckets.difference(observed_buckets)
_log.info('Found %s stale buckets' % len(stale_buckets))
for chunk in chunks(stale_buckets, ACCOUNT_UPDATE_CHUNKSIZE):
pipeline = redis_client.pipeline(transaction=False) # No transaction to reduce redis load
for bucket in chunk:
update_redis(pipeline, 'buckets', bucket, 0, 0)
log_report('buckets', bucket, 0, 0)
pipeline.execute()
if options.dry_run:
_log.info("DryRun: not updating stale buckets")
else:
for chunk in chunks(stale_buckets, ACCOUNT_UPDATE_CHUNKSIZE):
pipeline = redis_client.pipeline(transaction=False) # No transaction to reduce redis load
for bucket in chunk:
update_redis(pipeline, 'buckets', bucket, 0, 0)
log_report('buckets', bucket, 0, 0)
pipeline.execute()
# Account metrics are not updated if a bucket is specified
if options.bucket is None:
if options.bucket:
_log.warning('Account metrics will not be updated when using the --bucket or --bucket-file flags')
else:
# Don't update any accounts with failed listings
without_failed = filter(lambda x: x[0] not in failed_accounts, account_reports.items())
# Update total account reports in chunks
for chunk in chunks(without_failed, ACCOUNT_UPDATE_CHUNKSIZE):
pipeline = redis_client.pipeline(transaction=False) # No transaction to reduce redis load
for userid, report in chunk:
update_redis(pipeline, 'accounts', userid, report['obj_count'], report['total_size'])
log_report('accounts', userid, report['obj_count'], report['total_size'])
pipeline.execute()
if options.dry_run:
for userid, report in account_reports.items():
_log.info(
"DryRun: resource account [%s] would be updated with obj_count %i and total_size %i" % (
userid, report['obj_count'], report['total_size']
)
)
else:
# Update total account reports in chunks
for chunk in chunks(without_failed, ACCOUNT_UPDATE_CHUNKSIZE):
pipeline = redis_client.pipeline(transaction=False) # No transaction to reduce redis load
for userid, report in chunk:
update_redis(pipeline, 'accounts', userid, report['obj_count'], report['total_size'])
log_report('accounts', userid, report['obj_count'], report['total_size'])
pipeline.execute()
if options.account:
for account in options.account:
if account in failed_accounts:
_log.error("No metrics updated for account %s, one or more buckets failed" % account)
# Include failed_accounts in observed_accounts to avoid clearing metrics
observed_accounts = failed_accounts.union(set(account_reports.keys()))
recorded_accounts = set(get_resources_from_redis(redis_client, 'accounts'))
# Stale accounts and buckets are ones that do not appear in the listing, but have recorded values
stale_accounts = recorded_accounts.difference(observed_accounts)
if options.account:
stale_accounts = { a for a in options.account if a not in observed_accounts }
else:
# Stale accounts and buckets are ones that do not appear in the listing, but have recorded values
stale_accounts = recorded_accounts.difference(observed_accounts)
_log.info('Found %s stale accounts' % len(stale_accounts))
for chunk in chunks(stale_accounts, ACCOUNT_UPDATE_CHUNKSIZE):
pipeline = redis_client.pipeline(transaction=False) # No transaction to reduce redis load
for account in chunk:
update_redis(pipeline, 'accounts', account, 0, 0)
log_report('accounts', account, 0, 0)
pipeline.execute()
if options.dry_run:
_log.info("DryRun: not updating stale accounts")
else:
for chunk in chunks(stale_accounts, ACCOUNT_UPDATE_CHUNKSIZE):
pipeline = redis_client.pipeline(transaction=False) # No transaction to reduce redis load
for account in chunk:
update_redis(pipeline, 'accounts', account, 0, 0)
log_report('accounts', account, 0, 0)
pipeline.execute()

View File

@ -68,10 +68,10 @@ const keys = {
*/
function getSchemaPrefix(params, timestamp) {
const {
bucket, accountId, userId, level, service,
bucket, accountId, userId, level, service, location,
} = params;
// `service` property must remain last because other objects also include it
const id = bucket || accountId || userId || service;
const id = bucket || accountId || userId || location || service;
const prefix = timestamp ? `${service}:${level}:${timestamp}:${id}:`
: `${service}:${level}:${id}:`;
return prefix;
@ -86,9 +86,13 @@ function getSchemaPrefix(params, timestamp) {
*/
function generateKey(params, metric, timestamp) {
const prefix = getSchemaPrefix(params, timestamp);
if (params.location) {
return `${prefix}locationStorage`;
}
return keys[metric](prefix);
}
/**
* Returns a list of the counters for a metric type
* @param {object} params - object with metric type and id as a property

View File

@ -7,7 +7,6 @@ const { Clustering, errors, ipCheck } = require('arsenal');
const arsenalHttps = require('arsenal').https;
const { Logger } = require('werelogs');
const config = require('./Config');
const routes = require('../router/routes');
const Route = require('../router/Route');
const Router = require('../router/Router');
@ -28,7 +27,12 @@ class UtapiServer {
constructor(worker, port, datastore, logger, config) {
this.worker = worker;
this.port = port;
this.router = new Router(config);
this.vault = config.vaultclient;
if (!this.vault) {
const Vault = require('./Vault');
this.vault = new Vault(config);
}
this.router = new Router(config, this.vault);
this.logger = logger;
this.datastore = datastore;
this.server = null;
@ -71,6 +75,7 @@ class UtapiServer {
req.socket.setNoDelay();
const { query, path, pathname } = url.parse(req.url, true);
const utapiRequest = new UtapiRequest()
.setVault(this.vault)
.setRequest(req)
.setLog(this.logger.newRequestLogger())
.setResponse(res)
@ -214,8 +219,7 @@ class UtapiServer {
* @property {object} params.log - logger configuration
* @return {undefined}
*/
function spawn(params) {
Object.assign(config, params);
function spawn(config) {
const {
workers, redis, log, port,
} = config;

View File

@ -23,10 +23,6 @@
"healthChecks": {
"allowFrom": ["127.0.0.1/8", "::1"]
},
"vaultd": {
"host": "127.0.0.1",
"port": 8500
},
"cacheBackend": "memory",
"development": false,
"nodeId": "single_node",

View File

@ -2,6 +2,8 @@ const fs = require('fs');
const path = require('path');
const Joi = require('@hapi/joi');
const assert = require('assert');
const defaults = require('./defaults.json');
const werelogs = require('werelogs');
const {
truthy, envNamespace, allowedFilterFields, allowedFilterStates,
@ -71,7 +73,6 @@ class Config {
constructor(overrides) {
this._basePath = path.join(__dirname, '../../');
this._configPath = _loadFromEnv('CONFIG_FILE', defaultConfigPath);
this._defaultsPath = path.join(__dirname, 'defaults.json');
this.host = undefined;
this.port = undefined;
@ -89,6 +90,11 @@ class Config {
parsedConfig = this._recursiveUpdate(parsedConfig, overrides);
}
Object.assign(this, parsedConfig);
werelogs.configure({
level: Config.logging.level,
dump: Config.logging.dumpLevel,
});
}
static _readFile(path, encoding = 'utf-8') {
@ -113,7 +119,7 @@ class Config {
}
_loadDefaults() {
return Config._readJSON(this._defaultsPath);
return defaults;
}
_loadUserConfig() {
@ -192,6 +198,10 @@ class Config {
`${prefix}_SENTINEL_PASSWORD`,
config.sentinelPassword,
);
redisConf.password = _loadFromEnv(
`${prefix}_PASSWORD`,
config.password,
);
} else {
redisConf.host = _loadFromEnv(
`${prefix}_HOST`,

View File

@ -22,6 +22,7 @@ const constants = {
'deleteBucketEncryption',
'deleteBucketLifecycle',
'deleteBucketReplication',
'deleteBucketTagging',
'deleteBucketWebsite',
'deleteObject',
'deleteObjectTagging',
@ -34,6 +35,7 @@ const constants = {
'getBucketObjectLock',
'getBucketReplication',
'getBucketVersioning',
'getBucketTagging',
'getBucketWebsite',
'getObject',
'getObjectAcl',
@ -55,6 +57,7 @@ const constants = {
'putBucketObjectLock',
'putBucketReplication',
'putBucketVersioning',
'putBucketTagging',
'putBucketWebsite',
'putDeleteMarkerObject',
'putObject',

View File

@ -1,6 +1,6 @@
/* eslint-disable no-restricted-syntax */
const arsenal = require('arsenal');
const async = require('async');
const metadata = require('./client');
const { LoggerContext, logger } = require('../utils');
const { keyVersionSplitter } = require('../constants');
@ -12,9 +12,14 @@ const moduleLogger = new LoggerContext({
module: 'metadata.client',
});
const ebConfig = {
times: 10,
interval: retryCount => 50 * (2 ** retryCount),
};
const PAGE_SIZE = 1000;
function _listingWrapper(bucket, params) {
async function _listingWrapper(bucket, params) {
return new Promise(
(resolve, reject) => metadata.listObject(
bucket,
@ -41,7 +46,7 @@ function _listObject(bucket, prefix, hydrateFunc) {
try {
// eslint-disable-next-line no-await-in-loop
res = await _listingWrapper(bucket, { ...listingParams, gt });
res = await async.retryable(ebConfig, _listingWrapper)(bucket, { ...listingParams, gt });
} catch (error) {
moduleLogger.error('Error during listing', { error });
throw error;

View File

@ -6,7 +6,8 @@ const BackOff = require('backo');
const { whilst } = require('async');
const errors = require('./errors');
const { LoggerContext, asyncOrCallback } = require('./utils');
const { LoggerContext } = require('./utils/log');
const { asyncOrCallback } = require('./utils/func');
const moduleLogger = new LoggerContext({
module: 'redis',

View File

@ -1,13 +1,13 @@
const promClient = require('prom-client');
const httpRequestsTotal = new promClient.Counter({
name: 'utapi_http_requests_total',
name: 's3_utapi_http_requests_total',
help: 'Total number of HTTP requests',
labelNames: ['action', 'code'],
});
const httpRequestDurationSeconds = new promClient.Histogram({
name: 'utapi_http_request_duration_seconds',
name: 's3_utapi_http_request_duration_seconds',
help: 'Duration of HTTP requests in seconds',
labelNames: ['action', 'code'],
// buckets for response time from 0.1ms to 60s

View File

@ -68,20 +68,20 @@ class BaseTask extends Process {
const taskNameSnake = taskName.replace(/[A-Z]/g, letter => `_${letter.toLowerCase()}`);
const executionDuration = new promClient.Gauge({
name: `utapi${taskNameSnake}_duration_seconds`,
name: `s3_utapi${taskNameSnake}_duration_seconds`,
help: `Execution time of the ${taskName} task`,
labelNames: ['origin', 'containerName'],
});
const executionAttempts = new promClient.Counter({
name: `utapi${taskNameSnake}_attempts_total`,
help: `Number of attempts to execute the ${taskName} task`,
name: `s3_utapi${taskNameSnake}_attempts_total`,
help: `Total number of attempts to execute the ${taskName} task`,
labelNames: ['origin', 'containerName'],
});
const executionFailures = new promClient.Counter({
name: `utapi${taskNameSnake}_failures_total`,
help: `Number of failures executing the ${taskName} task`,
name: `s3_utapi${taskNameSnake}_failures_total`,
help: `Total number of failures executing the ${taskName} task`,
labelNames: ['origin', 'containerName'],
});

View File

@ -24,14 +24,14 @@ class CreateCheckpoint extends BaseTask {
// eslint-disable-next-line class-methods-use-this
_registerMetricHandlers() {
const created = new promClient.Counter({
name: 'utapi_create_checkpoint_created_total',
help: 'Number of checkpoints created',
name: 's3_utapi_create_checkpoint_created_total',
help: 'Total number of checkpoints created',
labelNames: ['origin', 'containerName'],
});
const getLastCheckpoint = this._getLastCheckpoint.bind(this);
const lastCheckpoint = new promClient.Gauge({
name: 'utapi_create_checkpoint_last_checkpoint_seconds',
name: 's3_utapi_create_checkpoint_last_checkpoint_seconds',
help: 'Timestamp of the last successfully created checkpoint',
labelNames: ['origin', 'containerName'],
async collect() {

View File

@ -24,14 +24,14 @@ class CreateSnapshot extends BaseTask {
// eslint-disable-next-line class-methods-use-this
_registerMetricHandlers() {
const created = new promClient.Counter({
name: 'utapi_create_snapshot_created_total',
help: 'Number of snapshots created',
name: 's3_utapi_create_snapshot_created_total',
help: 'Total number of snapshots created',
labelNames: ['origin', 'containerName'],
});
const getLastSnapshot = this._getLastSnapshot.bind(this);
const lastSnapshot = new promClient.Gauge({
name: 'utapi_create_snapshot_last_snapshot_seconds',
name: 's3_utapi_create_snapshot_last_snapshot_seconds',
help: 'Timestamp of the last successfully created snapshot',
labelNames: ['origin', 'containerName'],
async collect() {

View File

@ -52,31 +52,31 @@ class MonitorDiskUsage extends BaseTask {
// eslint-disable-next-line class-methods-use-this
_registerMetricHandlers() {
const isLocked = new promClient.Gauge({
name: 'utapi_monitor_disk_usage_is_locked',
name: 's3_utapi_monitor_disk_usage_is_locked',
help: 'Indicates whether the monitored warp 10 has had writes disabled',
labelNames: ['origin', 'containerName'],
});
const leveldbBytes = new promClient.Gauge({
name: 'utapi_monitor_disk_usage_leveldb_bytes',
name: 's3_utapi_monitor_disk_usage_leveldb_bytes',
help: 'Total bytes used by warp 10 leveldb',
labelNames: ['origin', 'containerName'],
});
const datalogBytes = new promClient.Gauge({
name: 'utapi_monitor_disk_usage_datalog_bytes',
name: 's3_utapi_monitor_disk_usage_datalog_bytes',
help: 'Total bytes used by warp 10 datalog',
labelNames: ['origin', 'containerName'],
});
const hardLimitRatio = new promClient.Gauge({
name: 'utapi_monitor_disk_usage_hard_limit_ratio',
name: 's3_utapi_monitor_disk_usage_hard_limit_ratio',
help: 'Percent of the hard limit used by warp 10',
labelNames: ['origin', 'containerName'],
});
const hardLimitSetting = new promClient.Gauge({
name: 'utapi_monitor_disk_usage_hard_limit_bytes',
name: 's3_utapi_monitor_disk_usage_hard_limit_bytes',
help: 'The hard limit setting in bytes',
labelNames: ['origin', 'containerName'],
});

View File

@ -32,26 +32,26 @@ class IngestShardTask extends BaseTask {
// eslint-disable-next-line class-methods-use-this
_registerMetricHandlers() {
const ingestedTotal = new promClient.Counter({
name: 'utapi_ingest_shard_task_ingest_total',
help: 'Number of metrics ingested',
name: 's3_utapi_ingest_shard_task_ingest_total',
help: 'Total number of metrics ingested',
labelNames: ['origin', 'containerName'],
});
const ingestedSlow = new promClient.Counter({
name: 'utapi_ingest_shard_task_slow_total',
help: 'Number of slow metrics ingested',
name: 's3_utapi_ingest_shard_task_slow_total',
help: 'Total number of slow metrics ingested',
labelNames: ['origin', 'containerName'],
});
const ingestedShards = new promClient.Counter({
name: 'utapi_ingest_shard_task_shard_ingest_total',
help: 'Number of metric shards ingested',
name: 's3_utapi_ingest_shard_task_shard_ingest_total',
help: 'Total number of metric shards ingested',
labelNames: ['origin', 'containerName'],
});
const shardAgeTotal = new promClient.Counter({
name: 'utapi_ingest_shard_task_shard_age_total',
help: 'Aggregated age of shards',
name: 's3_utapi_ingest_shard_task_shard_age_total',
help: 'Total aggregated age of shards',
labelNames: ['origin', 'containerName'],
});

View File

@ -31,6 +31,12 @@ class ReindexTask extends BaseTask {
this._defaultLag = 0;
const eventFilters = (config && config.filter) || {};
this._shouldReindex = buildFilterChain((config && config.filter) || {});
// exponential backoff: max wait = 50 * 2 ^ 10 milliseconds ~= 51 seconds
this.ebConfig = {
times: 10,
interval: retryCount => 50 * (2 ** retryCount),
};
if (Object.keys(eventFilters).length !== 0) {
logEventFilter((...args) => logger.info(...args), 'reindex resource filtering enabled', eventFilters);
}
@ -158,7 +164,6 @@ class ReindexTask extends BaseTask {
if (this._program.bucket.length) {
return this._program.bucket.map(name => ({ name }));
}
return metadata.listBuckets();
}
@ -180,8 +185,8 @@ class ReindexTask extends BaseTask {
let mpuTotal;
try {
bktTotal = await async.retryable(ReindexTask._indexBucket)(bucket.name);
mpuTotal = await async.retryable(ReindexTask._indexMpuBucket)(mpuBucket);
bktTotal = await async.retryable(this.ebConfig, ReindexTask._indexBucket)(bucket.name);
mpuTotal = await async.retryable(this.ebConfig, ReindexTask._indexMpuBucket)(mpuBucket);
} catch (error) {
logger.error(
'failed bucket reindex. any associated account will be skipped',

View File

@ -24,8 +24,8 @@ class RepairTask extends BaseTask {
// eslint-disable-next-line class-methods-use-this
_registerMetricHandlers() {
const created = new promClient.Counter({
name: 'utapi_repair_task_created_total',
help: 'Number of repair records created',
name: 's3_utapi_repair_task_created_total',
help: 'Total number of repair records created',
labelNames: ['origin', 'containerName'],
});

View File

@ -1,14 +1,6 @@
const werelogs = require('werelogs');
const config = require('../config');
const { comprehend } = require('./func');
const loggerConfig = {
level: config.logging.level,
dump: config.logging.dumpLevel,
};
werelogs.configure(loggerConfig);
const rootLogger = new werelogs.Logger('Utapi');
class LoggerContext {
@ -78,8 +70,6 @@ class LoggerContext {
}
}
rootLogger.debug('logger initialized', { loggerConfig });
function buildRequestLogger(req) {
let reqUids = [];
if (req.headers['x-scal-request-uids'] !== undefined) {

View File

@ -1,6 +1,5 @@
const assert = require('assert');
const { auth, policies } = require('arsenal');
const vaultclient = require('vaultclient');
const config = require('../config');
const errors = require('../errors');
/**
@ -9,9 +8,17 @@ const errors = require('../errors');
*/
class VaultWrapper extends auth.Vault {
create(config) {
if (config.vaultd.host) {
return new VaultWrapper(config);
}
return null;
}
constructor(options) {
let client;
const { host, port } = options.vaultd;
const vaultclient = require('vaultclient');
if (options.tls) {
const { key, cert, ca } = options.tls;
client = new vaultclient.Client(host, port, true, key, cert,
@ -119,7 +126,7 @@ class VaultWrapper extends auth.Vault {
}
}
const vault = new VaultWrapper(config);
const vault = VaultWrapper.create(config);
auth.setHandler(vault);
module.exports = {

View File

@ -3,7 +3,7 @@
"engines": {
"node": ">=16"
},
"version": "7.10.6",
"version": "8.1.15",
"description": "API for tracking resource utilization and reporting metrics",
"main": "index.js",
"repository": {
@ -19,13 +19,12 @@
"dependencies": {
"@hapi/joi": "^17.1.1",
"@senx/warp10": "^1.0.14",
"arsenal": "git+https://github.com/scality/Arsenal#7.10.28",
"arsenal": "git+https://git.yourcmc.ru/vitalif/zenko-arsenal.git#development/8.1",
"async": "^3.2.0",
"aws-sdk": "^2.1005.0",
"aws4": "^1.8.0",
"backo": "^1.1.0",
"body-parser": "^1.19.0",
"bucketclient": "scality/bucketclient#7.10.4",
"byte-size": "^7.0.0",
"commander": "^5.1.0",
"cron-parser": "^2.15.0",
@ -38,17 +37,16 @@
"needle": "^2.5.0",
"node-schedule": "^1.3.2",
"oas-tools": "^2.2.2",
"prom-client": "^13.1.0",
"prom-client": "14.2.0",
"uuid": "^3.3.2",
"vaultclient": "scality/vaultclient#7.10.8",
"werelogs": "scality/werelogs#8.1.0"
"werelogs": "git+https://git.yourcmc.ru/vitalif/zenko-werelogs.git#development/8.1"
},
"devDependencies": {
"eslint": "6.0.1",
"eslint-config-airbnb": "17.1.0",
"eslint-config-scality": "scality/Guidelines#7.10.2",
"eslint": "^8.14.0",
"eslint-config-airbnb-base": "^15.0.0",
"eslint-config-scality": "git+https://git.yourcmc.ru/vitalif/zenko-eslint-config-scality.git",
"eslint-plugin-import": "^2.18.0",
"mocha": "^3.0.2",
"mocha": ">=3.1.2",
"nodemon": "^2.0.4",
"protobufjs": "^6.10.1",
"sinon": "^9.0.2"

2
requirements.txt Normal file
View File

@ -0,0 +1,2 @@
redis==5.0.3
requests==2.31.0

View File

@ -3,17 +3,16 @@ const assert = require('assert');
const url = require('url');
const { auth, errors, policies } = require('arsenal');
const safeJsonParse = require('../utils/safeJsonParse');
const Vault = require('../lib/Vault');
class Router {
/**
* @constructor
* @param {Config} config - Config instance
*/
constructor(config) {
constructor(config, vault) {
this._service = config.component;
this._routes = {};
this._vault = new Vault(config);
this._vault = vault;
}
/**
@ -266,6 +265,10 @@ class Router {
*/
_processSecurityChecks(utapiRequest, route, cb) {
const log = utapiRequest.getLog();
if (process.env.UTAPI_AUTH === 'false') {
// Zenko route request does not need to go through Vault
return this._startRequest(utapiRequest, route, cb);
}
return this._authSquared(utapiRequest, err => {
if (err) {
log.trace('error from vault', { errors: err });

View File

@ -1,4 +1,21 @@
const config = require('./lib/Config');
const fs = require('fs');
const path = require('path');
const Config = require('./lib/Config');
const server = require('./lib/server');
server(Object.assign({}, config, { component: 's3' }));
/*
* By default, the config file is "config.json" at the root.
* It can be overridden using the UTAPI_CONFIG_FILE environment var.
*/
const cfgpath = process.env.UTAPI_CONFIG_FILE || (__dirname+'/config.json');
let cfg;
try {
cfg = JSON.parse(fs.readFileSync(cfgpath, { encoding: 'utf-8' }));
} catch (err) {
throw new Error(`could not parse config file: ${err.message}`);
}
cfg.component = 's3';
server(new Config(cfg));

View File

@ -41,7 +41,7 @@ describe('Test MonitorDiskUsage hard limit', function () {
assert(lockSpy.calledOnce);
assert(unlockSpy.notCalled);
assert(execStub.calledOnce);
await assertMetricValue('utapi_monitor_disk_usage_hard_limit_bytes', 1);
await assertMetricValue('s3_utapi_monitor_disk_usage_hard_limit_bytes', 1);
});
it('should trigger a database unlock if below the limit', async () => {

View File

@ -15,7 +15,7 @@ class CustomTask extends BaseTask {
// eslint-disable-next-line class-methods-use-this
_registerMetricHandlers() {
const foo = new promClient.Gauge({
name: 'utapi_custom_task_foo_total',
name: 's3_utapi_custom_task_foo_total',
help: 'Count of foos',
labelNames: ['origin', 'containerName'],
});
@ -58,26 +58,26 @@ describe('Test BaseTask metrics', () => {
it('should push metrics for a task execution', async () => {
await task.execute();
const timeValues = await getMetricValues('utapi_custom_task_duration_seconds');
const timeValues = await getMetricValues('s3_utapi_custom_task_duration_seconds');
assert.strictEqual(timeValues.length, 1);
const attemptsValues = await getMetricValues('utapi_custom_task_attempts_total');
const attemptsValues = await getMetricValues('s3_utapi_custom_task_attempts_total');
assert.deepStrictEqual(attemptsValues, [{ value: 1, labels: {} }]);
const failuresValues = await getMetricValues('utapi_custom_task_failures_total');
const failuresValues = await getMetricValues('s3_utapi_custom_task_failures_total');
assert.deepStrictEqual(failuresValues, []);
});
it('should push metrics for a failed task execution', async () => {
sinon.replace(task, '_execute', sinon.fake.rejects('forced failure'));
await task.execute();
const failuresValues = await getMetricValues('utapi_custom_task_failures_total');
const failuresValues = await getMetricValues('s3_utapi_custom_task_failures_total');
assert.deepStrictEqual(failuresValues, [{ value: 1, labels: {} }]);
});
it('should allow custom handlers to be registered', async () => {
await task.execute();
const fooValues = await getMetricValues('utapi_custom_task_foo_total');
const fooValues = await getMetricValues('s3_utapi_custom_task_foo_total');
assert.deepStrictEqual(fooValues, [{ value: 1, labels: {} }]);
});
});

View File

@ -80,7 +80,7 @@ describe('Test CreateCheckpoint', function () {
assert.strictEqual(series.length, 3);
assertResults(totals, series);
await assertMetricValue('utapi_create_checkpoint_created_total', series.length);
await assertMetricValue('s3_utapi_create_checkpoint_created_total', series.length);
});
it('should only include events not in an existing checkpoint', async () => {

View File

@ -88,7 +88,7 @@ describe('Test CreateSnapshot', function () {
assert.strictEqual(series.length, 3);
assertResults(totals, series);
await assertMetricValue('utapi_create_snapshot_created_total', series.length);
await assertMetricValue('s3_utapi_create_snapshot_created_total', series.length);
});
it('should create a snapshot from more than one checkpoint', async () => {

View File

@ -69,8 +69,8 @@ describe('Test MonitorDiskUsage', () => {
const expectedTotalSize = expectedSingleSize * 2;
assert.strictEqual(task.usage, expectedTotalSize);
// Should equal the usage minus the empty datalog
await assertMetricValue('utapi_monitor_disk_usage_leveldb_bytes', expectedSingleSize);
await assertMetricValue('utapi_monitor_disk_usage_datalog_bytes', expectedSingleSize);
await assertMetricValue('s3_utapi_monitor_disk_usage_leveldb_bytes', expectedSingleSize);
await assertMetricValue('s3_utapi_monitor_disk_usage_datalog_bytes', expectedSingleSize);
});
});

View File

@ -92,9 +92,9 @@ describe('Test IngestShards', function () {
'@utapi/decodeEvent',
);
assertResults(events, series);
await assertMetricValue('utapi_ingest_shard_task_ingest_total', events.length);
await assertMetricValue('utapi_ingest_shard_task_shard_ingest_total', 1);
const metricValues = await getMetricValues('utapi_ingest_shard_task_shard_age_total');
await assertMetricValue('s3_utapi_ingest_shard_task_ingest_total', events.length);
await assertMetricValue('s3_utapi_ingest_shard_task_shard_ingest_total', 1);
const metricValues = await getMetricValues('s3_utapi_ingest_shard_task_shard_age_total');
assert.strictEqual(metricValues.length, 1);
const [metric] = metricValues;
assert(metric.value > 0);
@ -118,7 +118,7 @@ describe('Test IngestShards', function () {
);
assertResults(events, series);
await assertMetricValue('utapi_ingest_shard_task_ingest_total', events.length);
await assertMetricValue('s3_utapi_ingest_shard_task_ingest_total', events.length);
});
it('should ingest old metrics as repair', async () => {
@ -138,7 +138,7 @@ describe('Test IngestShards', function () {
'@utapi/decodeEvent',
);
assertResults(events, series);
await assertMetricValue('utapi_ingest_shard_task_slow_total', events.length);
await assertMetricValue('s3_utapi_ingest_shard_task_slow_total', events.length);
});
it('should strip the event uuid during ingestion', async () => {
@ -170,7 +170,6 @@ describe('Test IngestShards', function () {
const results = await warp10.fetch({
className: 'utapi.event', labels: { node: prefix }, start: start + 1, stop: -2,
});
const series = JSON.parse(results.result[0])[0];
const timestamps = series.v.map(ev => ev[0]);
assert.deepStrictEqual([
@ -179,7 +178,8 @@ describe('Test IngestShards', function () {
], timestamps);
});
it('should increment microseconds for several duplicate timestamps', async () => {
// please unskip this in https://scality.atlassian.net/browse/UTAPI-65
it.skip('should increment microseconds for several duplicate timestamps', async () => {
const start = shardFromTimestamp(getTs(-120));
const events = generateFakeEvents(start, start + 5, 5)
.map(ev => { ev.timestamp = start; return ev; });
@ -190,7 +190,6 @@ describe('Test IngestShards', function () {
const results = await warp10.fetch({
className: 'utapi.event', labels: { node: prefix }, start: start + 5, stop: -5,
});
const series = JSON.parse(results.result[0])[0];
const timestamps = series.v.map(ev => ev[0]);
assert.deepStrictEqual([

View File

@ -177,4 +177,49 @@ describe('Test ReindexTask', function () {
assert.strictEqual(series[0].values.length, 2);
series[0].values.map(value => assert.deepStrictEqual(value, bucketRecord));
});
describe('exponential backoff', () => {
it('should retry when bucketd is unreachable', done => {
// disable bucketd to simulate downtime
bucketd.end();
const bucketDStub = sinon.stub(bucketd, '_getBucketResponse');
bucketDStub.onFirstCall().callsFake(
// Once the timeout promise resolves, bucketd is able to be called.
// If we make a call after 10 seconds, this shows that retries
// have been occuring during bucketd downtime.
() => {
return {
key: 'foo',
value: 'bar',
};
},
);
const reindexPromise = new Promise((resolve, reject) => {
reindexTask._execute()
.then(() => {
resolve('reindexed');
})
.catch(err => {
reject(err);
});
});
const timeoutPromise = new Promise(resolve => {
const f = () => {
bucketd.start();
resolve();
};
setTimeout(f, 10000);
});
Promise.all([reindexPromise, timeoutPromise])
.then(values => {
assert.strictEqual(values[0], 'reindexed');
sinon.restore();
done();
});
});
});
});

View File

@ -80,7 +80,7 @@ describe('Test Repair', function () {
assert.strictEqual(series.length, 3);
assertResults(totals, series);
await assertMetricValue('utapi_repair_task_created_total', series.length);
await assertMetricValue('s3_utapi_repair_task_created_total', series.length);
});
it('should only include events not in an existing correction', async () => {

View File

@ -233,7 +233,8 @@ describe('Test UtapiClient', function () {
});
});
it('should get the current storage for an account using the cache', async () => {
// please unskip this in https://scality.atlassian.net/browse/UTAPI-65
it.skip('should get the current storage for an account using the cache', async () => {
await async.eachOf(totals.accounts, async (total, acc) => {
cacheClient.updateAccountCounterBase(acc, total.bytes);
});

View File

@ -39,7 +39,7 @@ function getMetricResponse(schemaKey) {
return response;
}
function assertMetrics(schemaKey, metricName, props, isNegativeValue, done) {
function assertMetrics(schemaKey, metricName, props, done) {
const timestamp = new Date().setMinutes(0, 0, 0);
const timeRange = [timestamp, timestamp];
const expectedRes = getMetricResponse(schemaKey);
@ -51,16 +51,6 @@ function assertMetrics(schemaKey, metricName, props, isNegativeValue, done) {
datastore,
logger,
(err, res) => {
if (isNegativeValue) {
assert(err.is.InternalError);
assert.strictEqual(
err.description,
'Utapi is in a transient state for this time period as '
+ 'metrics are being collected. Please try again in a few '
+ 'minutes.',
);
return done();
}
assert.strictEqual(err, null);
// overwrite operations metrics
if (expectedResProps.operations) {
@ -98,13 +88,12 @@ function testOps(schemaKey, keyIndex, metricindex, isNegativeValue, done) {
if (keyIndex === 'storageUtilized' || keyIndex === 'numberOfObjects') {
key = generateStateKey(schemaObject, keyIndex);
val = isNegativeValue ? -1024 : 1024;
props[metricindex] = [val, val];
props[metricindex] = isNegativeValue ? [0, 0] : [val, val];
memBackend.zadd(key, timestamp, val, () =>
assertMetrics(
schemaKey,
schemaObject[schemaKey],
props,
isNegativeValue,
done,
));
} else if (keyIndex === 'incomingBytes' || keyIndex === 'outgoingBytes') {
@ -116,7 +105,6 @@ function testOps(schemaKey, keyIndex, metricindex, isNegativeValue, done) {
schemaKey,
schemaObject[schemaKey],
props,
isNegativeValue,
done,
));
} else {
@ -129,7 +117,6 @@ function testOps(schemaKey, keyIndex, metricindex, isNegativeValue, done) {
schemaKey,
schemaObject[schemaKey],
props,
isNegativeValue,
done,
));
}
@ -145,7 +132,6 @@ Object.keys(metricLevels).forEach(schemaKey => {
schemaKey,
resourceNames[schemaKey],
null,
false,
done,
));

View File

@ -21,6 +21,9 @@ const config = {
localCache: redisLocal,
component: 's3',
};
const location = 'foo-backend';
const incrby = 100;
const decrby = -30;
function isSortedSetKey(key) {
return key.endsWith('storageUtilized') || key.endsWith('numberOfObjects');
@ -76,6 +79,29 @@ function setMockData(data, timestamp, cb) {
return cb();
}
function getLocationObject(bytesValue) {
const obj = {};
obj[`s3:location:${location}:locationStorage`] = `${bytesValue}`;
return obj;
}
function testLocationMetric(c, params, expected, cb) {
const { location, updateSize } = params;
if (updateSize) {
c.pushLocationMetric(location, updateSize, REQUID, err => {
assert.equal(err, null);
assert.deepStrictEqual(memoryBackend.data, expected);
return cb();
});
} else {
c.getLocationMetric(location, REQUID, (err, bytesStored) => {
assert.equal(err, null);
assert.strictEqual(bytesStored, expected);
return cb();
});
}
}
describe('UtapiClient:: enable/disable client', () => {
it('should disable client when no redis config is provided', () => {
const c = new UtapiClient();
@ -247,7 +273,11 @@ tests.forEach(test => {
c.setDataStore(ds);
c.pushMetric(metric, REQUID, params, () => {
deserializeMemoryBackend(memoryBackend.data);
assert.deepStrictEqual(memoryBackend.data, expected);
Object.keys(expected).forEach(key => {
if (memoryBackend.data[key]) {
assert.deepStrictEqual(memoryBackend.data[key], expected[key]);
}
});
return cb();
});
}
@ -490,6 +520,7 @@ tests.forEach(test => {
storageUtilized: '1024',
numberOfObjects: '1',
};
setMockData(data, timestamp, () => {
testMetric('deleteObject', params, expected, done);
});
@ -667,6 +698,40 @@ tests.forEach(test => {
testMetric('putDeleteMarkerObject', metricTypes, expected, done);
});
it('should push putDeleteMarkerObject metrics and have correct bytes and number of objects', done => {
const expected = buildExpectedResult({
action: 'PutObject',
numberOfObjects: '1',
});
const metrics = {
bucket: '5741-repro',
keys: ['foo2'],
byteLength: undefined,
newByteLength: 258,
oldByteLength: null,
numberOfObjects: 1,
accountId: '79a59df900b949e55d96a1e698fbacedfd6e09d98eacf8f8d5218e7cd47ef2be',
userId: undefined,
};
testMetric('putObject', Object.assign(metrics, metricTypes), expected, () => {
const expected = buildExpectedResult({
action: 'DeleteObject',
numberOfObjects: '1',
});
const metrics2 = {
bucket: '5741-repro',
keys: ['foo2'],
byteLength: 258,
newByteLength: undefined,
oldByteLength: undefined,
numberOfObjects: undefined,
accountId: '79a59df900b949e55d96a1e698fbacedfd6e09d98eacf8f8d5218e7cd47ef2be',
userId: undefined,
};
testMetric('putDeleteMarkerObject', Object.assign(metrics2, metricTypes), expected, done);
});
});
it('should push putBucketReplication metrics', done => {
const expected = buildExpectedResult({
action: 'PutBucketReplication',
@ -766,3 +831,27 @@ tests.forEach(test => {
});
});
});
describe('UtapiClient:: location quota metrics', () => {
beforeEach(function beFn() {
this.currentTest.c = new UtapiClient(config);
this.currentTest.c.setDataStore(ds);
});
afterEach(() => memoryBackend.flushDb());
it('should increment location metric', function itFn(done) {
const expected = getLocationObject(incrby);
testLocationMetric(this.test.c, { location, updateSize: incrby },
expected, done);
});
it('should decrement location metric', function itFn(done) {
const expected = getLocationObject(decrby);
testLocationMetric(this.test.c, { location, updateSize: decrby },
expected, done);
});
it('should list location metric', function itFn(done) {
const expected = 0;
testLocationMetric(this.test.c, { location }, expected, done);
});
});

View File

@ -114,20 +114,21 @@ describe('Test middleware', () => {
req.ctx = new RequestContext(req);
middleware.httpMetricsMiddleware(req, resp);
await assertMetricValue('utapi_http_requests_total', 1);
const duration = await getMetricValues('utapi_http_request_duration_seconds');
await assertMetricValue('s3_utapi_http_requests_total', 1);
const durationMetric = 's3_utapi_http_request_duration_seconds';
const duration = await getMetricValues(durationMetric);
// 14 defined buckets + 1 for Infinity
assert.strictEqual(
duration.filter(metric => metric.metricName === 'utapi_http_request_duration_seconds_bucket').length,
duration.filter(metric => metric.metricName === `${durationMetric}_bucket`).length,
15,
);
const count = duration.filter(metric => metric.metricName === 'utapi_http_request_duration_seconds_count');
const count = duration.filter(metric => metric.metricName === `${durationMetric}_count`);
assert.deepStrictEqual(count, [{
labels: {
action: 'listMetrics',
code: 200,
},
metricName: 'utapi_http_request_duration_seconds_count',
metricName: `${durationMetric}_count`,
value: 1,
}]);
assert.strictEqual(count[0].value, 1);
@ -137,7 +138,7 @@ describe('Test middleware', () => {
const req = templateRequest();
req.ctx = new RequestContext(req);
middleware.httpMetricsMiddleware(req, resp);
assert.rejects(() => getMetricValues('utapi_http_requests_total'));
assert.rejects(() => getMetricValues('s3_utapi_http_requests_total'));
});
});
});

View File

@ -112,6 +112,17 @@ class BucketD {
return body;
}
_getBucketVersionResponse(bucketName) {
const body = {
CommonPrefixes: [],
IsTruncated: false,
Versions: (this._bucketContent[bucketName] || [])
// patch in a versionId to more closely match the real response
.map(entry => ({ ...entry, versionId: 'null' })),
};
return body;
}
_getShadowBucketOverviewResponse(bucketName) {
const mpus = (this._bucketContent[bucketName] || []).map(o => ({
key: o.key,
@ -137,6 +148,8 @@ class BucketD {
|| req.query.listingType === 'Delimiter'
) {
req.body = this._getBucketResponse(bucketName);
} else if (req.query.listingType === 'DelimiterVersions') {
req.body = this._getBucketVersionResponse(bucketName);
}
// v2 reindex uses `Basic` listing type for everything

5290
yarn.lock

File diff suppressed because it is too large Load Diff