Compare commits
45 Commits
developmen
...
ghivert/re
Author | SHA1 | Date |
---|---|---|
Guillaume Hivert | 8400204d95 | |
Guillaume Hivert | 0d057fb2d0 | |
Guillaume Hivert | 86c8c77dd2 | |
Guillaume Hivert | 52284a871a | |
Guillaume Hivert | a2d3dfeb21 | |
Guillaume Hivert | 73e0150612 | |
Guillaume Hivert | 89d5970aac | |
Guillaume Hivert | 8f4870f5a2 | |
Guillaume Hivert | 11a9032411 | |
Guillaume Hivert | 8e11405cf7 | |
Guillaume Hivert | 4b76407edd | |
Guillaume Hivert | 10a2a09a79 | |
Guillaume Hivert | a27379559c | |
Guillaume Hivert | 797f61d6c0 | |
Guillaume Hivert | 1a63e07297 | |
Guillaume Hivert | 403d6d3e79 | |
Guillaume Hivert | b5ed8b39d5 | |
Guillaume Hivert | f4fcc4b773 | |
Guillaume Hivert | d9d91596c0 | |
Guillaume Hivert | f2c343e8da | |
Guillaume Hivert | 952594945a | |
Guillaume Hivert | da48725ace | |
Guillaume Hivert | 13ecfd0c56 | |
Guillaume Hivert | 8ff5bf713a | |
Guillaume Hivert | bde858d8a7 | |
Guillaume Hivert | 841d4e4823 | |
Guillaume Hivert | 9069bd9c61 | |
Guillaume Hivert | 55b003c00f | |
Guillaume Hivert | 21fc1b7c64 | |
Guillaume Hivert | 8723084507 | |
Guillaume Hivert | 9f0a16c779 | |
Guillaume Hivert | d1cbd6b33f | |
Guillaume Hivert | 48cf48c8eb | |
Guillaume Hivert | b9a38fdbe0 | |
Guillaume Hivert | 5d5393bf4e | |
Guillaume Hivert | f05701d5b8 | |
Guillaume Hivert | df67f0ac35 | |
Guillaume Hivert | 109c3dc1f0 | |
Guillaume Hivert | 71da9c5ac6 | |
Guillaume Hivert | 6e02185ea0 | |
Guillaume Hivert | 948e724c31 | |
Guillaume Hivert | 377f52aad4 | |
Guillaume Hivert | db99440bce | |
Guillaume Hivert | cb2853a388 | |
Guillaume Hivert | df4b2e8eae |
|
@ -32,6 +32,7 @@ jobs:
|
||||||
cache: 'yarn'
|
cache: 'yarn'
|
||||||
- name: install dependencies
|
- name: install dependencies
|
||||||
run: yarn cache clean && yarn install --frozen-lockfile
|
run: yarn cache clean && yarn install --frozen-lockfile
|
||||||
|
continue-on-error: true # TODO ARSN-97 Remove it when no errors in TS
|
||||||
- name: lint yaml
|
- name: lint yaml
|
||||||
run: yarn --silent lint_yml
|
run: yarn --silent lint_yml
|
||||||
- name: lint javascript
|
- name: lint javascript
|
||||||
|
@ -49,3 +50,31 @@ jobs:
|
||||||
- name: run executables tests
|
- name: run executables tests
|
||||||
run: yarn install && yarn test
|
run: yarn install && yarn test
|
||||||
working-directory: 'lib/executables/pensieveCreds/'
|
working-directory: 'lib/executables/pensieveCreds/'
|
||||||
|
|
||||||
|
compile:
|
||||||
|
name: Compile and upload build artifacts
|
||||||
|
needs: test
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Install NodeJS
|
||||||
|
uses: actions/setup-node@v2
|
||||||
|
with:
|
||||||
|
node-version: '16'
|
||||||
|
cache: yarn
|
||||||
|
- name: Install dependencies
|
||||||
|
run: yarn cache clean && yarn install --frozen-lockfile
|
||||||
|
continue-on-error: true # TODO ARSN-97 Remove it when no errors in TS
|
||||||
|
- name: Compile
|
||||||
|
run: yarn build
|
||||||
|
continue-on-error: true # TODO ARSN-97 Remove it when no errors in TS
|
||||||
|
- name: Upload artifacts
|
||||||
|
uses: scality/action-artifacts@v2
|
||||||
|
with:
|
||||||
|
url: https://artifacts.scality.net
|
||||||
|
user: ${{ secrets.ARTIFACTS_USER }}
|
||||||
|
password: ${{ secrets.ARTIFACTS_PASSWORD }}
|
||||||
|
source: ./build
|
||||||
|
method: upload
|
||||||
|
if: success()
|
||||||
|
|
|
@ -13,3 +13,6 @@ node_modules/
|
||||||
# Coverage
|
# Coverage
|
||||||
coverage/
|
coverage/
|
||||||
.nyc_output/
|
.nyc_output/
|
||||||
|
|
||||||
|
# TypeScript
|
||||||
|
build/
|
||||||
|
|
|
@ -0,0 +1,6 @@
|
||||||
|
module.exports = {
|
||||||
|
presets: [
|
||||||
|
['@babel/preset-env', { targets: { node: 'current' } }],
|
||||||
|
'@babel/preset-typescript',
|
||||||
|
],
|
||||||
|
};
|
|
@ -1,773 +0,0 @@
|
||||||
{
|
|
||||||
"_comment": "------------------- Amazon errors ------------------",
|
|
||||||
"AccessDenied": {
|
|
||||||
"code": 403,
|
|
||||||
"description": "Access Denied"
|
|
||||||
},
|
|
||||||
"AccessForbidden": {
|
|
||||||
"code": 403,
|
|
||||||
"description": "Access Forbidden"
|
|
||||||
},
|
|
||||||
"AccountProblem": {
|
|
||||||
"code": 403,
|
|
||||||
"description": "There is a problem with your AWS account that prevents the operation from completing successfully. Please use Contact Us."
|
|
||||||
},
|
|
||||||
"AmbiguousGrantByEmailAddress": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "The email address you provided is associated with more than one account."
|
|
||||||
},
|
|
||||||
"BadDigest": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "The Content-MD5 you specified did not match what we received."
|
|
||||||
},
|
|
||||||
"BucketAlreadyExists": {
|
|
||||||
"code": 409,
|
|
||||||
"description": "The requested bucket name is not available. The bucket namespace is shared by all users of the system. Please select a different name and try again."
|
|
||||||
},
|
|
||||||
"BucketAlreadyOwnedByYou": {
|
|
||||||
"code": 409,
|
|
||||||
"description": "Your previous request to create the named bucket succeeded and you already own it. You get this error in all AWS regions except US Standard, us-east-1. In us-east-1 region, you will get 200 OK, but it is no-op (if bucket exists S3 will not do anything)."
|
|
||||||
},
|
|
||||||
"BucketNotEmpty": {
|
|
||||||
"code": 409,
|
|
||||||
"description": "The bucket you tried to delete is not empty."
|
|
||||||
},
|
|
||||||
"CredentialsNotSupported": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "This request does not support credentials."
|
|
||||||
},
|
|
||||||
"CrossLocationLoggingProhibited": {
|
|
||||||
"code": 403,
|
|
||||||
"description": "Cross-location logging not allowed. Buckets in one geographic location cannot log information to a bucket in another location."
|
|
||||||
},
|
|
||||||
"DeleteConflict": {
|
|
||||||
"code": 409,
|
|
||||||
"description": "The request was rejected because it attempted to delete a resource that has attached subordinate entities. The error message describes these entities."
|
|
||||||
},
|
|
||||||
"EntityTooSmall": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "Your proposed upload is smaller than the minimum allowed object size."
|
|
||||||
},
|
|
||||||
"EntityTooLarge": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "Your proposed upload exceeds the maximum allowed object size."
|
|
||||||
},
|
|
||||||
"ExpiredToken": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "The provided token has expired."
|
|
||||||
},
|
|
||||||
"HttpHeadersTooLarge": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "Your http headers exceed the maximum allowed http headers size."
|
|
||||||
},
|
|
||||||
"IllegalVersioningConfigurationException": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "Indicates that the versioning configuration specified in the request is invalid."
|
|
||||||
},
|
|
||||||
"IncompleteBody": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "You did not provide the number of bytes specified by the Content-Length HTTP header."
|
|
||||||
},
|
|
||||||
"IncorrectNumberOfFilesInPostRequest": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "POST requires exactly one file upload per request."
|
|
||||||
},
|
|
||||||
"InlineDataTooLarge": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "Inline data exceeds the maximum allowed size."
|
|
||||||
},
|
|
||||||
"InternalError": {
|
|
||||||
"code": 500,
|
|
||||||
"description": "We encountered an internal error. Please try again."
|
|
||||||
},
|
|
||||||
"InvalidAccessKeyId": {
|
|
||||||
"code": 403,
|
|
||||||
"description": "The AWS access key Id you provided does not exist in our records."
|
|
||||||
},
|
|
||||||
"InvalidAddressingHeader": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "You must specify the Anonymous role."
|
|
||||||
},
|
|
||||||
"InvalidArgument": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "Invalid Argument"
|
|
||||||
},
|
|
||||||
"InvalidBucketName": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "The specified bucket is not valid."
|
|
||||||
},
|
|
||||||
"InvalidBucketState": {
|
|
||||||
"code": 409,
|
|
||||||
"description": "The request is not valid with the current state of the bucket."
|
|
||||||
},
|
|
||||||
"InvalidDigest": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "The Content-MD5 you specified is not valid."
|
|
||||||
},
|
|
||||||
"InvalidEncryptionAlgorithmError": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "The encryption request you specified is not valid. The valid value is AES256."
|
|
||||||
},
|
|
||||||
"InvalidLocationConstraint": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "The specified location constraint is not valid."
|
|
||||||
},
|
|
||||||
"InvalidObjectState": {
|
|
||||||
"code": 403,
|
|
||||||
"description": "The operation is not valid for the current state of the object."
|
|
||||||
},
|
|
||||||
"InvalidPart": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "One or more of the specified parts could not be found. The part might not have been uploaded, or the specified entity tag might not have matched the part's entity tag."
|
|
||||||
},
|
|
||||||
"InvalidPartOrder": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "The list of parts was not in ascending order.Parts list must specified in order by part number."
|
|
||||||
},
|
|
||||||
"InvalidPartNumber": {
|
|
||||||
"code": 416,
|
|
||||||
"description": "The requested partnumber is not satisfiable."
|
|
||||||
},
|
|
||||||
"InvalidPayer": {
|
|
||||||
"code": 403,
|
|
||||||
"description": "All access to this object has been disabled."
|
|
||||||
},
|
|
||||||
"InvalidPolicyDocument": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "The content of the form does not meet the conditions specified in the policy document."
|
|
||||||
},
|
|
||||||
"InvalidRange": {
|
|
||||||
"code": 416,
|
|
||||||
"description": "The requested range cannot be satisfied."
|
|
||||||
},
|
|
||||||
"InvalidRedirectLocation": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "The website redirect location must have a prefix of 'http://' or 'https://' or '/'."
|
|
||||||
},
|
|
||||||
"InvalidRequest": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "SOAP requests must be made over an HTTPS connection."
|
|
||||||
},
|
|
||||||
"InvalidSecurity": {
|
|
||||||
"code": 403,
|
|
||||||
"description": "The provided security credentials are not valid."
|
|
||||||
},
|
|
||||||
"InvalidSOAPRequest": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "The SOAP request body is invalid."
|
|
||||||
},
|
|
||||||
"InvalidStorageClass": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "The storage class you specified is not valid."
|
|
||||||
},
|
|
||||||
"InvalidTag": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "The Tag you have provided is invalid"
|
|
||||||
},
|
|
||||||
"InvalidTargetBucketForLogging": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "The target bucket for logging does not exist, is not owned by you, or does not have the appropriate grants for the log-delivery group."
|
|
||||||
},
|
|
||||||
"InvalidToken": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "The provided token is malformed or otherwise invalid."
|
|
||||||
},
|
|
||||||
"InvalidURI": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "Couldn't parse the specified URI."
|
|
||||||
},
|
|
||||||
"KeyTooLong": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "Your key is too long."
|
|
||||||
},
|
|
||||||
"LimitExceeded": {
|
|
||||||
"code": 409,
|
|
||||||
"description": " The request was rejected because it attempted to create resources beyond the current AWS account limits. The error message describes the limit exceeded."
|
|
||||||
},
|
|
||||||
"MalformedACLError": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "The XML you provided was not well-formed or did not validate against our published schema."
|
|
||||||
},
|
|
||||||
"MalformedPOSTRequest": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "The body of your POST request is not well-formed multipart/form-data."
|
|
||||||
},
|
|
||||||
"MalformedXML": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "The XML you provided was not well-formed or did not validate against our published schema."
|
|
||||||
},
|
|
||||||
"MaxMessageLengthExceeded": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "Your request was too big."
|
|
||||||
},
|
|
||||||
"MaxPostPreDataLengthExceededError": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "Your POST request fields preceding the upload file were too large."
|
|
||||||
},
|
|
||||||
"MetadataTooLarge": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "Your metadata headers exceed the maximum allowed metadata size."
|
|
||||||
},
|
|
||||||
"MethodNotAllowed": {
|
|
||||||
"code": 405,
|
|
||||||
"description": "The specified method is not allowed against this resource."
|
|
||||||
},
|
|
||||||
"MissingAttachment": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "A SOAP attachment was expected, but none were found."
|
|
||||||
},
|
|
||||||
"MissingContentLength": {
|
|
||||||
"code": 411,
|
|
||||||
"description": "You must provide the Content-Length HTTP header."
|
|
||||||
},
|
|
||||||
"MissingRequestBodyError": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "Request body is empty"
|
|
||||||
},
|
|
||||||
"MissingRequiredParameter": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "Your request is missing a required parameter."
|
|
||||||
},
|
|
||||||
"MissingSecurityElement": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "The SOAP 1.1 request is missing a security element."
|
|
||||||
},
|
|
||||||
"MissingSecurityHeader": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "Your request is missing a required header."
|
|
||||||
},
|
|
||||||
"NoLoggingStatusForKey": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "There is no such thing as a logging status subresource for a key."
|
|
||||||
},
|
|
||||||
"NoSuchBucket": {
|
|
||||||
"code": 404,
|
|
||||||
"description": "The specified bucket does not exist."
|
|
||||||
},
|
|
||||||
"NoSuchCORSConfiguration": {
|
|
||||||
"code": 404,
|
|
||||||
"description": "The CORS configuration does not exist"
|
|
||||||
},
|
|
||||||
"NoSuchKey": {
|
|
||||||
"code": 404,
|
|
||||||
"description": "The specified key does not exist."
|
|
||||||
},
|
|
||||||
"NoSuchLifecycleConfiguration": {
|
|
||||||
"code": 404,
|
|
||||||
"description": "The lifecycle configuration does not exist."
|
|
||||||
},
|
|
||||||
"NoSuchObjectLockConfiguration": {
|
|
||||||
"code": 404,
|
|
||||||
"description": "The specified object does not have a ObjectLock configuration."
|
|
||||||
},
|
|
||||||
"NoSuchWebsiteConfiguration": {
|
|
||||||
"code": 404,
|
|
||||||
"description": "The specified bucket does not have a website configuration"
|
|
||||||
},
|
|
||||||
"NoSuchUpload": {
|
|
||||||
"code": 404,
|
|
||||||
"description": "The specified multipart upload does not exist. The upload ID might be invalid, or the multipart upload might have been aborted or completed."
|
|
||||||
},
|
|
||||||
"NoSuchVersion": {
|
|
||||||
"code": 404,
|
|
||||||
"description": "Indicates that the version ID specified in the request does not match an existing version."
|
|
||||||
},
|
|
||||||
"ReplicationConfigurationNotFoundError": {
|
|
||||||
"code": 404,
|
|
||||||
"description": "The replication configuration was not found"
|
|
||||||
},
|
|
||||||
"ObjectLockConfigurationNotFoundError": {
|
|
||||||
"code": 404,
|
|
||||||
"description": "The object lock configuration was not found"
|
|
||||||
},
|
|
||||||
"ServerSideEncryptionConfigurationNotFoundError" : {
|
|
||||||
"code": 404,
|
|
||||||
"description": "The server side encryption configuration was not found"
|
|
||||||
},
|
|
||||||
"NotImplemented": {
|
|
||||||
"code": 501,
|
|
||||||
"description": "A header you provided implies functionality that is not implemented."
|
|
||||||
},
|
|
||||||
"NotModified": {
|
|
||||||
"code": 304,
|
|
||||||
"description": "Not Modified."
|
|
||||||
},
|
|
||||||
"NotSignedUp": {
|
|
||||||
"code": 403,
|
|
||||||
"description": "Your account is not signed up for the S3 service. You must sign up before you can use S3. "
|
|
||||||
},
|
|
||||||
"NoSuchBucketPolicy": {
|
|
||||||
"code": 404,
|
|
||||||
"description": "The specified bucket does not have a bucket policy."
|
|
||||||
},
|
|
||||||
"OperationAborted": {
|
|
||||||
"code": 409,
|
|
||||||
"description": "A conflicting conditional operation is currently in progress against this resource. Try again."
|
|
||||||
},
|
|
||||||
"PermanentRedirect": {
|
|
||||||
"code": 301,
|
|
||||||
"description": "The bucket you are attempting to access must be addressed using the specified endpoint. Send all future requests to this endpoint."
|
|
||||||
},
|
|
||||||
"PreconditionFailed": {
|
|
||||||
"code": 412,
|
|
||||||
"description": "At least one of the preconditions you specified did not hold."
|
|
||||||
},
|
|
||||||
"Redirect": {
|
|
||||||
"code": 307,
|
|
||||||
"description": "Temporary redirect."
|
|
||||||
},
|
|
||||||
"RestoreAlreadyInProgress": {
|
|
||||||
"code": 409,
|
|
||||||
"description": "Object restore is already in progress."
|
|
||||||
},
|
|
||||||
"RequestIsNotMultiPartContent": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "Bucket POST must be of the enclosure-type multipart/form-data."
|
|
||||||
},
|
|
||||||
"RequestTimeout": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "Your socket connection to the server was not read from or written to within the timeout period."
|
|
||||||
},
|
|
||||||
"RequestTimeTooSkewed": {
|
|
||||||
"code": 403,
|
|
||||||
"description": "The difference between the request time and the server's time is too large."
|
|
||||||
},
|
|
||||||
"RequestTorrentOfBucketError": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "Requesting the torrent file of a bucket is not permitted."
|
|
||||||
},
|
|
||||||
"SignatureDoesNotMatch": {
|
|
||||||
"code": 403,
|
|
||||||
"description": "The request signature we calculated does not match the signature you provided."
|
|
||||||
},
|
|
||||||
"_comment" : {
|
|
||||||
"note" : "This is an AWS S3 specific error. We are opting to use the more general 'ServiceUnavailable' error used throughout AWS (IAM/EC2) to have uniformity of error messages even though we are potentially compromising S3 compatibility.",
|
|
||||||
"ServiceUnavailable": {
|
|
||||||
"code": 503,
|
|
||||||
"description": "Reduce your request rate."
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"ServiceUnavailable": {
|
|
||||||
"code": 503,
|
|
||||||
"description": "The request has failed due to a temporary failure of the server."
|
|
||||||
},
|
|
||||||
"SlowDown": {
|
|
||||||
"code": 503,
|
|
||||||
"description": "Reduce your request rate."
|
|
||||||
},
|
|
||||||
"TemporaryRedirect": {
|
|
||||||
"code": 307,
|
|
||||||
"description": "You are being redirected to the bucket while DNS updates."
|
|
||||||
},
|
|
||||||
"TokenRefreshRequired": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "The provided token must be refreshed."
|
|
||||||
},
|
|
||||||
"TooManyBuckets": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "You have attempted to create more buckets than allowed."
|
|
||||||
},
|
|
||||||
"TooManyParts": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "You have attempted to upload more parts than allowed."
|
|
||||||
},
|
|
||||||
"UnexpectedContent": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "This request does not support content."
|
|
||||||
},
|
|
||||||
"UnresolvableGrantByEmailAddress": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "The email address you provided does not match any account on record."
|
|
||||||
},
|
|
||||||
"UserKeyMustBeSpecified": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "The bucket POST must contain the specified field name. If it is specified, check the order of the fields."
|
|
||||||
},
|
|
||||||
"NoSuchEntity": {
|
|
||||||
"code": 404,
|
|
||||||
"description": "The request was rejected because it referenced an entity that does not exist. The error message describes the entity."
|
|
||||||
},
|
|
||||||
"WrongFormat": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "Data entered by the user has a wrong format."
|
|
||||||
},
|
|
||||||
"Forbidden": {
|
|
||||||
"code": 403,
|
|
||||||
"description": "Authentication failed."
|
|
||||||
},
|
|
||||||
"EntityDoesNotExist": {
|
|
||||||
"code": 404,
|
|
||||||
"description": "Not found."
|
|
||||||
},
|
|
||||||
"EntityAlreadyExists": {
|
|
||||||
"code": 409,
|
|
||||||
"description": "The request was rejected because it attempted to create a resource that already exists."
|
|
||||||
},
|
|
||||||
"KeyAlreadyExists": {
|
|
||||||
"code": 409,
|
|
||||||
"description": "The request was rejected because it attempted to create a resource that already exists."
|
|
||||||
},
|
|
||||||
"ServiceFailure": {
|
|
||||||
"code": 500,
|
|
||||||
"description": "Server error: the request processing has failed because of an unknown error, exception or failure."
|
|
||||||
},
|
|
||||||
"IncompleteSignature": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "The request signature does not conform to AWS standards."
|
|
||||||
},
|
|
||||||
"InternalFailure": {
|
|
||||||
"code": 500,
|
|
||||||
"description": "The request processing has failed because of an unknown error, exception or failure."
|
|
||||||
},
|
|
||||||
"InvalidAction": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "The action or operation requested is invalid. Verify that the action is typed correctly."
|
|
||||||
},
|
|
||||||
"InvalidClientTokenId": {
|
|
||||||
"code": 403,
|
|
||||||
"description": "The X.509 certificate or AWS access key ID provided does not exist in our records."
|
|
||||||
},
|
|
||||||
"InvalidParameterCombination": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "Parameters that must not be used together were used together."
|
|
||||||
},
|
|
||||||
"InvalidParameterValue": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "An invalid or out-of-range value was supplied for the input parameter."
|
|
||||||
},
|
|
||||||
"InvalidQueryParameter": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "The AWS query string is malformed or does not adhere to AWS standards."
|
|
||||||
},
|
|
||||||
"MalformedQueryString": {
|
|
||||||
"code": 404,
|
|
||||||
"description": "The query string contains a syntax error."
|
|
||||||
},
|
|
||||||
"MissingAction": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "The request is missing an action or a required parameter."
|
|
||||||
},
|
|
||||||
"MissingAuthenticationToken": {
|
|
||||||
"code": 403,
|
|
||||||
"description": "The request must contain either a valid (registered) AWS access key ID or X.509 certificate."
|
|
||||||
},
|
|
||||||
"MissingParameter": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "A required parameter for the specified action is not supplied."
|
|
||||||
},
|
|
||||||
"OptInRequired": {
|
|
||||||
"code": 403,
|
|
||||||
"description": "The AWS access key ID needs a subscription for the service."
|
|
||||||
},
|
|
||||||
"RequestExpired": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "The request reached the service more than 15 minutes after the date stamp on the request or more than 15 minutes after the request expiration date (such as for pre-signed URLs), or the date stamp on the request is more than 15 minutes in the future."
|
|
||||||
},
|
|
||||||
"Throttling": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "The request was denied due to request throttling."
|
|
||||||
},
|
|
||||||
"AccountNotFound": {
|
|
||||||
"code": 404,
|
|
||||||
"description": "No account was found in Vault, please contact your system administrator."
|
|
||||||
},
|
|
||||||
"ValidationError": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "The specified value is invalid."
|
|
||||||
},
|
|
||||||
"MalformedPolicyDocument": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "Syntax errors in policy."
|
|
||||||
},
|
|
||||||
"InvalidInput": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "The request was rejected because an invalid or out-of-range value was supplied for an input parameter."
|
|
||||||
},
|
|
||||||
"MalformedPolicy": {
|
|
||||||
"code": 400,
|
|
||||||
"description": "This policy contains invalid Json"
|
|
||||||
},
|
|
||||||
"ReportExpired": {
|
|
||||||
"code": 410,
|
|
||||||
"description": "The request was rejected because the most recent credential report has expired. To generate a new credential report, use GenerateCredentialReport."
|
|
||||||
},
|
|
||||||
"ReportInProgress": {
|
|
||||||
"code": 404,
|
|
||||||
"description": "The request was rejected because the credential report is still being generated."
|
|
||||||
},
|
|
||||||
"ReportNotPresent": {
|
|
||||||
"code": 410,
|
|
||||||
"description": "The request was rejected because the credential report does not exist. To generate a credential report, use GenerateCredentialReport."
|
|
||||||
},
|
|
||||||
"_comment": "-------------- Special non-AWS S3 errors --------------",
|
|
||||||
"MPUinProgress": {
|
|
||||||
"code": 409,
|
|
||||||
"description": "The bucket you tried to delete has an ongoing multipart upload."
|
|
||||||
},
|
|
||||||
"LocationNotFound": {
|
|
||||||
"code": 424,
|
|
||||||
"description": "The object data location does not exist."
|
|
||||||
},
|
|
||||||
"_comment": "-------------- Internal project errors --------------",
|
|
||||||
"_comment": "----------------------- Vault -----------------------",
|
|
||||||
"_comment": "#### formatErrors ####",
|
|
||||||
"BadName": {
|
|
||||||
"description": "name not ok",
|
|
||||||
"code": 5001
|
|
||||||
},
|
|
||||||
"BadAccount": {
|
|
||||||
"description": "account not ok",
|
|
||||||
"code": 5002
|
|
||||||
},
|
|
||||||
"BadGroup": {
|
|
||||||
"description": "group not ok",
|
|
||||||
"code": 5003
|
|
||||||
},
|
|
||||||
"BadId": {
|
|
||||||
"description": "id not ok",
|
|
||||||
"code": 5004
|
|
||||||
},
|
|
||||||
"BadAccountName": {
|
|
||||||
"description": "accountName not ok",
|
|
||||||
"code": 5005
|
|
||||||
},
|
|
||||||
"BadNameFriendly": {
|
|
||||||
"description": "nameFriendly not ok",
|
|
||||||
"code": 5006
|
|
||||||
},
|
|
||||||
"BadEmailAddress": {
|
|
||||||
"description": "email address not ok",
|
|
||||||
"code": 5007
|
|
||||||
},
|
|
||||||
"BadPath": {
|
|
||||||
"description": "path not ok",
|
|
||||||
"code": 5008
|
|
||||||
},
|
|
||||||
"BadArn": {
|
|
||||||
"description": "arn not ok",
|
|
||||||
"code": 5009
|
|
||||||
},
|
|
||||||
"BadCreateDate": {
|
|
||||||
"description": "createDate not ok",
|
|
||||||
"code": 5010
|
|
||||||
},
|
|
||||||
"BadLastUsedDate": {
|
|
||||||
"description": "lastUsedDate not ok",
|
|
||||||
"code": 5011
|
|
||||||
},
|
|
||||||
"BadNotBefore": {
|
|
||||||
"description": "notBefore not ok",
|
|
||||||
"code": 5012
|
|
||||||
},
|
|
||||||
"BadNotAfter": {
|
|
||||||
"description": "notAfter not ok",
|
|
||||||
"code": 5013
|
|
||||||
},
|
|
||||||
"BadSaltedPwd": {
|
|
||||||
"description": "salted password not ok",
|
|
||||||
"code": 5014
|
|
||||||
},
|
|
||||||
"ok": {
|
|
||||||
"description": "No error",
|
|
||||||
"code": 200
|
|
||||||
},
|
|
||||||
"BadUser": {
|
|
||||||
"description": "user not ok",
|
|
||||||
"code": 5016
|
|
||||||
},
|
|
||||||
"BadSaltedPasswd": {
|
|
||||||
"description": "salted password not ok",
|
|
||||||
"code": 5017
|
|
||||||
},
|
|
||||||
"BadPasswdDate": {
|
|
||||||
"description": "password date not ok",
|
|
||||||
"code": 5018
|
|
||||||
},
|
|
||||||
"BadCanonicalId": {
|
|
||||||
"description": "canonicalId not ok",
|
|
||||||
"code": 5019
|
|
||||||
},
|
|
||||||
"BadAlias": {
|
|
||||||
"description": "alias not ok",
|
|
||||||
"code": 5020
|
|
||||||
},
|
|
||||||
"_comment": "#### internalErrors ####",
|
|
||||||
"DBPutFailed": {
|
|
||||||
"description": "DB put failed",
|
|
||||||
"code": 5021
|
|
||||||
},
|
|
||||||
"_comment": "#### alreadyExistErrors ####",
|
|
||||||
"AccountEmailAlreadyUsed": {
|
|
||||||
"description": "an other account already uses that email",
|
|
||||||
"code": 5022
|
|
||||||
},
|
|
||||||
"AccountNameAlreadyUsed": {
|
|
||||||
"description": "an other account already uses that name",
|
|
||||||
"code": 5023
|
|
||||||
},
|
|
||||||
"UserEmailAlreadyUsed": {
|
|
||||||
"description": "an other user already uses that email",
|
|
||||||
"code": 5024
|
|
||||||
},
|
|
||||||
"UserNameAlreadyUsed": {
|
|
||||||
"description": "an other user already uses that name",
|
|
||||||
"code": 5025
|
|
||||||
},
|
|
||||||
"_comment": "#### doesntExistErrors ####",
|
|
||||||
"NoParentAccount": {
|
|
||||||
"description": "parent account does not exist",
|
|
||||||
"code": 5026
|
|
||||||
},
|
|
||||||
"_comment": "#### authErrors ####",
|
|
||||||
"BadStringToSign": {
|
|
||||||
"description": "stringToSign not ok'",
|
|
||||||
"code": 5027
|
|
||||||
},
|
|
||||||
"BadSignatureFromRequest": {
|
|
||||||
"description": "signatureFromRequest not ok",
|
|
||||||
"code": 5028
|
|
||||||
},
|
|
||||||
"BadAlgorithm": {
|
|
||||||
"description": "hashAlgorithm not ok",
|
|
||||||
"code": 5029
|
|
||||||
},
|
|
||||||
"SecretKeyDoesNotExist": {
|
|
||||||
"description": "secret key does not exist",
|
|
||||||
"code": 5030
|
|
||||||
},
|
|
||||||
"InvalidRegion": {
|
|
||||||
"description": "Region was not provided or is not recognized by the system",
|
|
||||||
"code": 5031
|
|
||||||
},
|
|
||||||
"ScopeDate": {
|
|
||||||
"description": "scope date is missing, or format is invalid",
|
|
||||||
"code": 5032
|
|
||||||
},
|
|
||||||
"BadAccessKey": {
|
|
||||||
"description": "access key not ok",
|
|
||||||
"code": 5033
|
|
||||||
},
|
|
||||||
"NoDict": {
|
|
||||||
"description": "no dictionary of params provided for signature verification",
|
|
||||||
"code": 5034
|
|
||||||
},
|
|
||||||
"BadSecretKey": {
|
|
||||||
"description": "secretKey not ok",
|
|
||||||
"code": 5035
|
|
||||||
},
|
|
||||||
"BadSecretKeyValue": {
|
|
||||||
"description": "secretKey value not ok",
|
|
||||||
"code": 5036
|
|
||||||
},
|
|
||||||
"BadSecretKeyStatus": {
|
|
||||||
"description": "secretKey status not ok",
|
|
||||||
"code": 5037
|
|
||||||
},
|
|
||||||
"_comment": "#### OidcpErrors ####",
|
|
||||||
"BadUrl": {
|
|
||||||
"description": "url not ok",
|
|
||||||
"code": 5038
|
|
||||||
},
|
|
||||||
"BadClientIdList": {
|
|
||||||
"description": "client id list not ok'",
|
|
||||||
"code": 5039
|
|
||||||
},
|
|
||||||
"BadThumbprintList": {
|
|
||||||
"description": "thumbprint list not ok'",
|
|
||||||
"code": 5040
|
|
||||||
},
|
|
||||||
"BadObject": {
|
|
||||||
"description": "Object not ok'",
|
|
||||||
"code": 5041
|
|
||||||
},
|
|
||||||
"_comment": "#### RoleErrors ####",
|
|
||||||
"BadRole": {
|
|
||||||
"description": "role not ok",
|
|
||||||
"code": 5042
|
|
||||||
},
|
|
||||||
"_comment": "#### SamlpErrors ####",
|
|
||||||
"BadSamlp": {
|
|
||||||
"description": "samlp not ok",
|
|
||||||
"code": 5043
|
|
||||||
},
|
|
||||||
"BadMetadataDocument": {
|
|
||||||
"description": "metadata document not ok",
|
|
||||||
"code": 5044
|
|
||||||
},
|
|
||||||
"BadSessionIndex": {
|
|
||||||
"description": "session index not ok",
|
|
||||||
"code": 5045
|
|
||||||
},
|
|
||||||
"Unauthorized": {
|
|
||||||
"description": "not authenticated",
|
|
||||||
"code": 401
|
|
||||||
},
|
|
||||||
"_comment": "--------------------- MetaData ---------------------",
|
|
||||||
"_comment": "#### formatErrors ####",
|
|
||||||
"CacheUpdated": {
|
|
||||||
"description": "The cache has been updated",
|
|
||||||
"code": 500
|
|
||||||
},
|
|
||||||
"DBNotFound": {
|
|
||||||
"description": "This DB does not exist",
|
|
||||||
"code": 404
|
|
||||||
},
|
|
||||||
"DBAlreadyExists": {
|
|
||||||
"description": "This DB already exist",
|
|
||||||
"code": 409
|
|
||||||
},
|
|
||||||
"ObjNotFound": {
|
|
||||||
"description": "This object does not exist",
|
|
||||||
"code": 404
|
|
||||||
},
|
|
||||||
"PermissionDenied": {
|
|
||||||
"description": "Permission denied",
|
|
||||||
"code": 403
|
|
||||||
},
|
|
||||||
"BadRequest": {
|
|
||||||
"description": "BadRequest",
|
|
||||||
"code": 400
|
|
||||||
},
|
|
||||||
"RaftSessionNotLeader": {
|
|
||||||
"description": "NotLeader",
|
|
||||||
"code": 500
|
|
||||||
},
|
|
||||||
"RaftSessionLeaderNotConnected": {
|
|
||||||
"description": "RaftSessionLeaderNotConnected",
|
|
||||||
"code": 400
|
|
||||||
},
|
|
||||||
"NoLeaderForDB": {
|
|
||||||
"description": "NoLeaderForDB",
|
|
||||||
"code": 400
|
|
||||||
},
|
|
||||||
"RouteNotFound": {
|
|
||||||
"description": "RouteNotFound",
|
|
||||||
"code": 404
|
|
||||||
},
|
|
||||||
"NoMapsInConfig": {
|
|
||||||
"description": "NoMapsInConfig",
|
|
||||||
"code": 404
|
|
||||||
},
|
|
||||||
"DBAPINotReady": {
|
|
||||||
"message": "DBAPINotReady",
|
|
||||||
"code": 500
|
|
||||||
},
|
|
||||||
"NotEnoughMapsInConfig:": {
|
|
||||||
"description": "NotEnoughMapsInConfig",
|
|
||||||
"code": 400
|
|
||||||
},
|
|
||||||
"TooManyRequests": {
|
|
||||||
"description": "TooManyRequests",
|
|
||||||
"code": 429
|
|
||||||
},
|
|
||||||
"_comment": "----------------------- cdmiclient -----------------------",
|
|
||||||
"ReadOnly": {
|
|
||||||
"description": "trying to write to read only back-end",
|
|
||||||
"code": 403
|
|
||||||
},
|
|
||||||
"_comment": "----------------------- authbackend -----------------------",
|
|
||||||
"AuthMethodNotImplemented": {
|
|
||||||
"description": "AuthMethodNotImplemented",
|
|
||||||
"code": 501
|
|
||||||
}
|
|
||||||
}
|
|
202
index.js
202
index.js
|
@ -1,202 +0,0 @@
|
||||||
module.exports = {
|
|
||||||
auth: require('./lib/auth/auth'),
|
|
||||||
constants: require('./lib/constants'),
|
|
||||||
db: require('./lib/db'),
|
|
||||||
errors: require('./lib/errors.js'),
|
|
||||||
errorUtils: require('./lib/errorUtils'),
|
|
||||||
shuffle: require('./lib/shuffle'),
|
|
||||||
stringHash: require('./lib/stringHash'),
|
|
||||||
ipCheck: require('./lib/ipCheck'),
|
|
||||||
jsutil: require('./lib/jsutil'),
|
|
||||||
https: {
|
|
||||||
ciphers: require('./lib/https/ciphers.js'),
|
|
||||||
dhparam: require('./lib/https/dh2048.js'),
|
|
||||||
},
|
|
||||||
algorithms: {
|
|
||||||
list: require('./lib/algos/list/exportAlgos'),
|
|
||||||
listTools: {
|
|
||||||
DelimiterTools: require('./lib/algos/list/tools'),
|
|
||||||
Skip: require('./lib/algos/list/skip'),
|
|
||||||
},
|
|
||||||
cache: {
|
|
||||||
LRUCache: require('./lib/algos/cache/LRUCache'),
|
|
||||||
},
|
|
||||||
stream: {
|
|
||||||
MergeStream: require('./lib/algos/stream/MergeStream'),
|
|
||||||
},
|
|
||||||
SortedSet: require('./lib/algos/set/SortedSet'),
|
|
||||||
},
|
|
||||||
policies: {
|
|
||||||
evaluators: require('./lib/policyEvaluator/evaluator.js'),
|
|
||||||
validateUserPolicy: require('./lib/policy/policyValidator')
|
|
||||||
.validateUserPolicy,
|
|
||||||
evaluatePrincipal: require('./lib/policyEvaluator/principal'),
|
|
||||||
RequestContext: require('./lib/policyEvaluator/RequestContext.js'),
|
|
||||||
requestUtils: require('./lib/policyEvaluator/requestUtils'),
|
|
||||||
actionMaps: require('./lib/policyEvaluator/utils/actionMaps'),
|
|
||||||
},
|
|
||||||
Clustering: require('./lib/Clustering'),
|
|
||||||
testing: {
|
|
||||||
matrix: require('./lib/testing/matrix.js'),
|
|
||||||
},
|
|
||||||
versioning: {
|
|
||||||
VersioningConstants: require('./lib/versioning/constants.js')
|
|
||||||
.VersioningConstants,
|
|
||||||
Version: require('./lib/versioning/Version.js').Version,
|
|
||||||
VersionID: require('./lib/versioning/VersionID.js'),
|
|
||||||
WriteGatheringManager: require('./lib/versioning/WriteGatheringManager.js'),
|
|
||||||
WriteCache: require('./lib/versioning/WriteCache.js'),
|
|
||||||
VersioningRequestProcessor: require('./lib/versioning/VersioningRequestProcessor.js'),
|
|
||||||
},
|
|
||||||
network: {
|
|
||||||
http: {
|
|
||||||
server: require('./lib/network/http/server'),
|
|
||||||
utils: require('./lib/network/http/utils'),
|
|
||||||
},
|
|
||||||
rpc: require('./lib/network/rpc/rpc'),
|
|
||||||
level: require('./lib/network/rpc/level-net'),
|
|
||||||
rest: {
|
|
||||||
RESTServer: require('./lib/network/rest/RESTServer'),
|
|
||||||
RESTClient: require('./lib/network/rest/RESTClient'),
|
|
||||||
},
|
|
||||||
RoundRobin: require('./lib/network/RoundRobin'),
|
|
||||||
probe: {
|
|
||||||
ProbeServer: require('./lib/network/probe/ProbeServer'),
|
|
||||||
HealthProbeServer:
|
|
||||||
require('./lib/network/probe/HealthProbeServer.js'),
|
|
||||||
Utils: require('./lib/network/probe/Utils.js'),
|
|
||||||
},
|
|
||||||
kmip: require('./lib/network/kmip'),
|
|
||||||
kmipClient: require('./lib/network/kmip/Client'),
|
|
||||||
},
|
|
||||||
s3routes: {
|
|
||||||
routes: require('./lib/s3routes/routes'),
|
|
||||||
routesUtils: require('./lib/s3routes/routesUtils'),
|
|
||||||
},
|
|
||||||
s3middleware: {
|
|
||||||
userMetadata: require('./lib/s3middleware/userMetadata'),
|
|
||||||
convertToXml: require('./lib/s3middleware/convertToXml'),
|
|
||||||
escapeForXml: require('./lib/s3middleware/escapeForXml'),
|
|
||||||
objectLegalHold: require('./lib/s3middleware/objectLegalHold'),
|
|
||||||
tagging: require('./lib/s3middleware/tagging'),
|
|
||||||
checkDateModifiedHeaders:
|
|
||||||
require('./lib/s3middleware/validateConditionalHeaders')
|
|
||||||
.checkDateModifiedHeaders,
|
|
||||||
validateConditionalHeaders:
|
|
||||||
require('./lib/s3middleware/validateConditionalHeaders')
|
|
||||||
.validateConditionalHeaders,
|
|
||||||
MD5Sum: require('./lib/s3middleware/MD5Sum'),
|
|
||||||
NullStream: require('./lib/s3middleware/nullStream'),
|
|
||||||
objectUtils: require('./lib/s3middleware/objectUtils'),
|
|
||||||
azureHelper: {
|
|
||||||
mpuUtils:
|
|
||||||
require('./lib/s3middleware/azureHelpers/mpuUtils'),
|
|
||||||
ResultsCollector:
|
|
||||||
require('./lib/s3middleware/azureHelpers/ResultsCollector'),
|
|
||||||
SubStreamInterface:
|
|
||||||
require('./lib/s3middleware/azureHelpers/SubStreamInterface'),
|
|
||||||
},
|
|
||||||
prepareStream: require('./lib/s3middleware/prepareStream'),
|
|
||||||
processMpuParts: require('./lib/s3middleware/processMpuParts'),
|
|
||||||
retention: require('./lib/s3middleware/objectRetention'),
|
|
||||||
lifecycleHelpers: require('./lib/s3middleware/lifecycleHelpers'),
|
|
||||||
},
|
|
||||||
storage: {
|
|
||||||
metadata: {
|
|
||||||
MetadataWrapper: require('./lib/storage/metadata/MetadataWrapper'),
|
|
||||||
bucketclient: {
|
|
||||||
BucketClientInterface:
|
|
||||||
require('./lib/storage/metadata/bucketclient/' +
|
|
||||||
'BucketClientInterface'),
|
|
||||||
LogConsumer:
|
|
||||||
require('./lib/storage/metadata/bucketclient/LogConsumer'),
|
|
||||||
},
|
|
||||||
file: {
|
|
||||||
BucketFileInterface:
|
|
||||||
require('./lib/storage/metadata/file/BucketFileInterface'),
|
|
||||||
MetadataFileServer:
|
|
||||||
require('./lib/storage/metadata/file/MetadataFileServer'),
|
|
||||||
MetadataFileClient:
|
|
||||||
require('./lib/storage/metadata/file/MetadataFileClient'),
|
|
||||||
},
|
|
||||||
inMemory: {
|
|
||||||
metastore:
|
|
||||||
require('./lib/storage/metadata/in_memory/metastore'),
|
|
||||||
metadata: require('./lib/storage/metadata/in_memory/metadata'),
|
|
||||||
bucketUtilities:
|
|
||||||
require('./lib/storage/metadata/in_memory/bucket_utilities'),
|
|
||||||
},
|
|
||||||
mongoclient: {
|
|
||||||
MongoClientInterface:
|
|
||||||
require('./lib/storage/metadata/mongoclient/' +
|
|
||||||
'MongoClientInterface'),
|
|
||||||
LogConsumer:
|
|
||||||
require('./lib/storage/metadata/mongoclient/LogConsumer'),
|
|
||||||
},
|
|
||||||
proxy: {
|
|
||||||
Server: require('./lib/storage/metadata/proxy/Server'),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
data: {
|
|
||||||
DataWrapper: require('./lib/storage/data/DataWrapper'),
|
|
||||||
MultipleBackendGateway:
|
|
||||||
require('./lib/storage/data/MultipleBackendGateway'),
|
|
||||||
parseLC: require('./lib/storage/data/LocationConstraintParser'),
|
|
||||||
file: {
|
|
||||||
DataFileStore:
|
|
||||||
require('./lib/storage/data/file/DataFileStore'),
|
|
||||||
DataFileInterface:
|
|
||||||
require('./lib/storage/data/file/DataFileInterface'),
|
|
||||||
},
|
|
||||||
external: {
|
|
||||||
AwsClient: require('./lib/storage/data/external/AwsClient'),
|
|
||||||
AzureClient: require('./lib/storage/data/external/AzureClient'),
|
|
||||||
GcpClient: require('./lib/storage/data/external/GcpClient'),
|
|
||||||
GCP: require('./lib/storage/data/external/GCP/GcpService'),
|
|
||||||
GcpUtils: require('./lib/storage/data/external/GCP/GcpUtils'),
|
|
||||||
GcpSigner: require('./lib/storage/data/external/GCP/GcpSigner'),
|
|
||||||
PfsClient: require('./lib/storage/data/external/PfsClient'),
|
|
||||||
backendUtils: require('./lib/storage/data/external/utils'),
|
|
||||||
},
|
|
||||||
inMemory: {
|
|
||||||
datastore: require('./lib/storage/data/in_memory/datastore'),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
utils: require('./lib/storage/utils'),
|
|
||||||
},
|
|
||||||
models: {
|
|
||||||
BackendInfo: require('./lib/models/BackendInfo'),
|
|
||||||
BucketInfo: require('./lib/models/BucketInfo'),
|
|
||||||
BucketAzureInfo: require('./lib/models/BucketAzureInfo'),
|
|
||||||
ObjectMD: require('./lib/models/ObjectMD'),
|
|
||||||
ObjectMDLocation: require('./lib/models/ObjectMDLocation'),
|
|
||||||
ObjectMDAzureInfo: require('./lib/models/ObjectMDAzureInfo'),
|
|
||||||
ARN: require('./lib/models/ARN'),
|
|
||||||
WebsiteConfiguration: require('./lib/models/WebsiteConfiguration'),
|
|
||||||
ReplicationConfiguration:
|
|
||||||
require('./lib/models/ReplicationConfiguration'),
|
|
||||||
LifecycleConfiguration:
|
|
||||||
require('./lib/models/LifecycleConfiguration'),
|
|
||||||
LifecycleRule: require('./lib/models/LifecycleRule'),
|
|
||||||
BucketPolicy: require('./lib/models/BucketPolicy'),
|
|
||||||
ObjectLockConfiguration:
|
|
||||||
require('./lib/models/ObjectLockConfiguration'),
|
|
||||||
NotificationConfiguration:
|
|
||||||
require('./lib/models/NotificationConfiguration'),
|
|
||||||
},
|
|
||||||
metrics: {
|
|
||||||
StatsClient: require('./lib/metrics/StatsClient'),
|
|
||||||
StatsModel: require('./lib/metrics/StatsModel'),
|
|
||||||
RedisClient: require('./lib/metrics/RedisClient'),
|
|
||||||
ZenkoMetrics: require('./lib/metrics/ZenkoMetrics'),
|
|
||||||
},
|
|
||||||
pensieve: {
|
|
||||||
credentialUtils: require('./lib/executables/pensieveCreds/utils'),
|
|
||||||
},
|
|
||||||
stream: {
|
|
||||||
readJSONStreamObject: require('./lib/stream/readJSONStreamObject'),
|
|
||||||
},
|
|
||||||
patches: {
|
|
||||||
locationConstraints: require('./lib/patches/locationConstraints'),
|
|
||||||
},
|
|
||||||
};
|
|
|
@ -0,0 +1,75 @@
|
||||||
|
export * as auth from './lib/auth/auth';
|
||||||
|
export * as constants from './lib/constants';
|
||||||
|
export * as db from './lib/db';
|
||||||
|
export { default as errors } from './lib/errors';
|
||||||
|
export * as errorUtils from './lib/errorUtils';
|
||||||
|
export { default as shuffle } from './lib/shuffle';
|
||||||
|
export { default as stringHash } from './lib/stringHash';
|
||||||
|
export * as ipCheck from './lib/ipCheck';
|
||||||
|
export * as jsutil from './lib/jsutil';
|
||||||
|
export * as https from './lib/https';
|
||||||
|
export { default as Clustering } from './lib/Clustering';
|
||||||
|
export * as algorithms from './lib/algos';
|
||||||
|
export * as policies from './lib/policyEvaluator';
|
||||||
|
export * as testing from './lib/testing';
|
||||||
|
export * as versioning from './lib/versioning';
|
||||||
|
export * as network from './lib/network';
|
||||||
|
export * as s3routes from './lib/s3routes';
|
||||||
|
export * as s3middleware from './lib/s3middleware';
|
||||||
|
export * as models from './lib/models';
|
||||||
|
export * as metrics from './lib/metrics';
|
||||||
|
export * as stream from './lib/stream';
|
||||||
|
|
||||||
|
export const pensieve = {
|
||||||
|
credentialUtils: require('./lib/executables/pensieveCreds/utils'),
|
||||||
|
};
|
||||||
|
|
||||||
|
export const storage = {
|
||||||
|
metadata: {
|
||||||
|
MetadataWrapper: require('./lib/storage/metadata/MetadataWrapper'),
|
||||||
|
bucketclient: {
|
||||||
|
BucketClientInterface: require('./lib/storage/metadata/bucketclient/BucketClientInterface'),
|
||||||
|
LogConsumer: require('./lib/storage/metadata/bucketclient/LogConsumer'),
|
||||||
|
},
|
||||||
|
file: {
|
||||||
|
BucketFileInterface: require('./lib/storage/metadata/file/BucketFileInterface'),
|
||||||
|
MetadataFileServer: require('./lib/storage/metadata/file/MetadataFileServer'),
|
||||||
|
MetadataFileClient: require('./lib/storage/metadata/file/MetadataFileClient'),
|
||||||
|
},
|
||||||
|
inMemory: {
|
||||||
|
metastore: require('./lib/storage/metadata/in_memory/metastore'),
|
||||||
|
metadata: require('./lib/storage/metadata/in_memory/metadata'),
|
||||||
|
bucketUtilities: require('./lib/storage/metadata/in_memory/bucket_utilities'),
|
||||||
|
},
|
||||||
|
mongoclient: {
|
||||||
|
MongoClientInterface: require('./lib/storage/metadata/mongoclient/MongoClientInterface'),
|
||||||
|
LogConsumer: require('./lib/storage/metadata/mongoclient/LogConsumer'),
|
||||||
|
},
|
||||||
|
proxy: {
|
||||||
|
Server: require('./lib/storage/metadata/proxy/Server'),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
data: {
|
||||||
|
DataWrapper: require('./lib/storage/data/DataWrapper'),
|
||||||
|
MultipleBackendGateway: require('./lib/storage/data/MultipleBackendGateway'),
|
||||||
|
parseLC: require('./lib/storage/data/LocationConstraintParser'),
|
||||||
|
file: {
|
||||||
|
DataFileStore: require('./lib/storage/data/file/DataFileStore'),
|
||||||
|
DataFileInterface: require('./lib/storage/data/file/DataFileInterface'),
|
||||||
|
},
|
||||||
|
external: {
|
||||||
|
AwsClient: require('./lib/storage/data/external/AwsClient'),
|
||||||
|
AzureClient: require('./lib/storage/data/external/AzureClient'),
|
||||||
|
GcpClient: require('./lib/storage/data/external/GcpClient'),
|
||||||
|
GCP: require('./lib/storage/data/external/GCP/GcpService'),
|
||||||
|
GcpUtils: require('./lib/storage/data/external/GCP/GcpUtils'),
|
||||||
|
GcpSigner: require('./lib/storage/data/external/GCP/GcpSigner'),
|
||||||
|
PfsClient: require('./lib/storage/data/external/PfsClient'),
|
||||||
|
backendUtils: require('./lib/storage/data/external/utils'),
|
||||||
|
},
|
||||||
|
inMemory: {
|
||||||
|
datastore: require('./lib/storage/data/in_memory/datastore'),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
utils: require('./lib/storage/utils'),
|
||||||
|
};
|
|
@ -1,8 +1,6 @@
|
||||||
'use strict'; // eslint-disable-line
|
import * as cluster from 'cluster';
|
||||||
|
|
||||||
const cluster = require('cluster');
|
export default class Clustering {
|
||||||
|
|
||||||
class Clustering {
|
|
||||||
/**
|
/**
|
||||||
* Constructor
|
* Constructor
|
||||||
*
|
*
|
||||||
|
@ -259,5 +257,3 @@ class Clustering {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = Clustering;
|
|
|
@ -1,4 +1,4 @@
|
||||||
const assert = require('assert');
|
import assert from 'assert';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @class
|
* @class
|
||||||
|
@ -6,13 +6,19 @@ const assert = require('assert');
|
||||||
* number of items and a Least Recently Used (LRU) strategy for
|
* number of items and a Least Recently Used (LRU) strategy for
|
||||||
* eviction.
|
* eviction.
|
||||||
*/
|
*/
|
||||||
class LRUCache {
|
export default class LRUCache {
|
||||||
|
_maxEntries;
|
||||||
|
_entryMap;
|
||||||
|
_entryCount;
|
||||||
|
_lruTail;
|
||||||
|
_lruHead;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @constructor
|
* @constructor
|
||||||
* @param {number} maxEntries - maximum number of entries kept in
|
* @param maxEntries - maximum number of entries kept in
|
||||||
* the cache
|
* the cache
|
||||||
*/
|
*/
|
||||||
constructor(maxEntries) {
|
constructor(maxEntries: number) {
|
||||||
assert(maxEntries >= 1);
|
assert(maxEntries >= 1);
|
||||||
this._maxEntries = maxEntries;
|
this._maxEntries = maxEntries;
|
||||||
this.clear();
|
this.clear();
|
||||||
|
@ -22,12 +28,12 @@ class LRUCache {
|
||||||
* Add or update the value associated to a key in the cache,
|
* Add or update the value associated to a key in the cache,
|
||||||
* making it the most recently accessed for eviction purpose.
|
* making it the most recently accessed for eviction purpose.
|
||||||
*
|
*
|
||||||
* @param {string} key - key to add
|
* @param key - key to add
|
||||||
* @param {object} value - associated value (can be of any type)
|
* @param value - associated value (can be of any type)
|
||||||
* @return {boolean} true if the cache contained an entry with
|
* @return true if the cache contained an entry with
|
||||||
* this key, false if it did not
|
* this key, false if it did not
|
||||||
*/
|
*/
|
||||||
add(key, value) {
|
add(key: string, value): boolean {
|
||||||
let entry = this._entryMap[key];
|
let entry = this._entryMap[key];
|
||||||
if (entry) {
|
if (entry) {
|
||||||
entry.value = value;
|
entry.value = value;
|
||||||
|
@ -54,12 +60,12 @@ class LRUCache {
|
||||||
* Get the value associated to a key in the cache, making it the
|
* Get the value associated to a key in the cache, making it the
|
||||||
* most recently accessed for eviction purpose.
|
* most recently accessed for eviction purpose.
|
||||||
*
|
*
|
||||||
* @param {string} key - key of which to fetch the associated value
|
* @param key - key of which to fetch the associated value
|
||||||
* @return {object|undefined} - returns the associated value if
|
* @return returns the associated value if
|
||||||
* exists in the cache, or undefined if not found - either if the
|
* exists in the cache, or undefined if not found - either if the
|
||||||
* key was never added or if it has been evicted from the cache.
|
* key was never added or if it has been evicted from the cache.
|
||||||
*/
|
*/
|
||||||
get(key) {
|
get(key: string) {
|
||||||
const entry = this._entryMap[key];
|
const entry = this._entryMap[key];
|
||||||
if (entry) {
|
if (entry) {
|
||||||
// make the entry the most recently used by re-pushing it
|
// make the entry the most recently used by re-pushing it
|
||||||
|
@ -74,12 +80,12 @@ class LRUCache {
|
||||||
/**
|
/**
|
||||||
* Remove an entry from the cache if exists
|
* Remove an entry from the cache if exists
|
||||||
*
|
*
|
||||||
* @param {string} key - key to remove
|
* @param key - key to remove
|
||||||
* @return {boolean} true if an entry has been removed, false if
|
* @return true if an entry has been removed, false if
|
||||||
* there was no entry with this key in the cache - either if the
|
* there was no entry with this key in the cache - either if the
|
||||||
* key was never added or if it has been evicted from the cache.
|
* key was never added or if it has been evicted from the cache.
|
||||||
*/
|
*/
|
||||||
remove(key) {
|
remove(key: string): boolean {
|
||||||
const entry = this._entryMap[key];
|
const entry = this._entryMap[key];
|
||||||
if (entry) {
|
if (entry) {
|
||||||
this._removeEntry(entry);
|
this._removeEntry(entry);
|
||||||
|
@ -91,16 +97,14 @@ class LRUCache {
|
||||||
/**
|
/**
|
||||||
* Get the current number of cached entries
|
* Get the current number of cached entries
|
||||||
*
|
*
|
||||||
* @return {number} current number of cached entries
|
* @return current number of cached entries
|
||||||
*/
|
*/
|
||||||
count() {
|
count(): number {
|
||||||
return this._entryCount;
|
return this._entryCount;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Remove all entries from the cache
|
* Remove all entries from the cache
|
||||||
*
|
|
||||||
* @return {undefined}
|
|
||||||
*/
|
*/
|
||||||
clear() {
|
clear() {
|
||||||
this._entryMap = {};
|
this._entryMap = {};
|
||||||
|
@ -113,8 +117,7 @@ class LRUCache {
|
||||||
* Push an entry to the front of the LRU list, making it the most
|
* Push an entry to the front of the LRU list, making it the most
|
||||||
* recently accessed
|
* recently accessed
|
||||||
*
|
*
|
||||||
* @param {object} entry - entry to push
|
* @param entry - entry to push
|
||||||
* @return {undefined}
|
|
||||||
*/
|
*/
|
||||||
_lruPushEntry(entry) {
|
_lruPushEntry(entry) {
|
||||||
/* eslint-disable no-param-reassign */
|
/* eslint-disable no-param-reassign */
|
||||||
|
@ -133,8 +136,7 @@ class LRUCache {
|
||||||
/**
|
/**
|
||||||
* Remove an entry from the LRU list
|
* Remove an entry from the LRU list
|
||||||
*
|
*
|
||||||
* @param {object} entry - entry to remove
|
* @param entry - entry to remove
|
||||||
* @return {undefined}
|
|
||||||
*/
|
*/
|
||||||
_lruRemoveEntry(entry) {
|
_lruRemoveEntry(entry) {
|
||||||
/* eslint-disable no-param-reassign */
|
/* eslint-disable no-param-reassign */
|
||||||
|
@ -154,8 +156,7 @@ class LRUCache {
|
||||||
/**
|
/**
|
||||||
* Helper function to remove an existing entry from the cache
|
* Helper function to remove an existing entry from the cache
|
||||||
*
|
*
|
||||||
* @param {object} entry - cache entry to remove
|
* @param entry - cache entry to remove
|
||||||
* @return {undefined}
|
|
||||||
*/
|
*/
|
||||||
_removeEntry(entry) {
|
_removeEntry(entry) {
|
||||||
this._lruRemoveEntry(entry);
|
this._lruRemoveEntry(entry);
|
||||||
|
@ -163,5 +164,3 @@ class LRUCache {
|
||||||
this._entryCount -= 1;
|
this._entryCount -= 1;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = LRUCache;
|
|
|
@ -0,0 +1,10 @@
|
||||||
|
import * as DelimiterTools from './list/tools';
|
||||||
|
import { Skip } from './list/skip';
|
||||||
|
import LRUCache from './cache/LRUCache';
|
||||||
|
import MergeStream from './stream/MergeStream';
|
||||||
|
|
||||||
|
export * as list from './list/exportAlgos';
|
||||||
|
export { default as SortedSet } from './set/SortedSet';
|
||||||
|
export const listTools = { DelimiterTools, Skip };
|
||||||
|
export const cache = { LRUCache };
|
||||||
|
export const stream = { MergeStream };
|
|
@ -1,7 +1,4 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
import { FILTER_SKIP, SKIP_NONE } from './tools';
|
||||||
|
|
||||||
const { FILTER_SKIP, SKIP_NONE } = require('./tools');
|
|
||||||
|
|
||||||
// Use a heuristic to amortize the cost of JSON
|
// Use a heuristic to amortize the cost of JSON
|
||||||
// serialization/deserialization only on largest metadata where the
|
// serialization/deserialization only on largest metadata where the
|
||||||
// potential for size reduction is high, considering the bulk of the
|
// potential for size reduction is high, considering the bulk of the
|
||||||
|
@ -22,7 +19,12 @@ const TRIM_METADATA_MIN_BLOB_SIZE = 10000;
|
||||||
/**
|
/**
|
||||||
* Base class of listing extensions.
|
* Base class of listing extensions.
|
||||||
*/
|
*/
|
||||||
class Extension {
|
export default class Extension {
|
||||||
|
parameters;
|
||||||
|
logger;
|
||||||
|
res?: any[];
|
||||||
|
keys: number;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This takes a list of parameters and a logger as the inputs.
|
* This takes a list of parameters and a logger as the inputs.
|
||||||
* Derivatives should have their own format regarding parameters.
|
* Derivatives should have their own format regarding parameters.
|
||||||
|
@ -51,14 +53,14 @@ class Extension {
|
||||||
* heavy unused fields, or left untouched (depending on size
|
* heavy unused fields, or left untouched (depending on size
|
||||||
* heuristics)
|
* heuristics)
|
||||||
*/
|
*/
|
||||||
trimMetadata(value) {
|
trimMetadata(value: string): string {
|
||||||
let ret = undefined;
|
let ret: any = undefined;
|
||||||
if (value.length >= TRIM_METADATA_MIN_BLOB_SIZE) {
|
if (value.length >= TRIM_METADATA_MIN_BLOB_SIZE) {
|
||||||
try {
|
try {
|
||||||
ret = JSON.parse(value);
|
ret = JSON.parse(value);
|
||||||
delete ret.location;
|
delete ret.location;
|
||||||
ret = JSON.stringify(ret);
|
ret = JSON.stringify(ret);
|
||||||
} catch (e) {
|
} catch (e: any) {
|
||||||
// Prefer returning an unfiltered data rather than
|
// Prefer returning an unfiltered data rather than
|
||||||
// stopping the service in case of parsing failure.
|
// stopping the service in case of parsing failure.
|
||||||
// The risk of this approach is a potential
|
// The risk of this approach is a potential
|
||||||
|
@ -66,7 +68,8 @@ class Extension {
|
||||||
// used by repd.
|
// used by repd.
|
||||||
this.logger.warn(
|
this.logger.warn(
|
||||||
'Could not parse Object Metadata while listing',
|
'Could not parse Object Metadata while listing',
|
||||||
{ err: e.toString() });
|
{ err: e.toString() }
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return ret || value;
|
return ret || value;
|
||||||
|
@ -96,7 +99,7 @@ class Extension {
|
||||||
* = 0: entry is accepted but not included (skipping)
|
* = 0: entry is accepted but not included (skipping)
|
||||||
* < 0: entry is not accepted, listing should finish
|
* < 0: entry is not accepted, listing should finish
|
||||||
*/
|
*/
|
||||||
filter(entry) {
|
filter(entry): number {
|
||||||
return entry ? FILTER_SKIP : FILTER_SKIP;
|
return entry ? FILTER_SKIP : FILTER_SKIP;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -105,20 +108,18 @@ class Extension {
|
||||||
* because it is skipping a range of delimited keys or a range of specific
|
* because it is skipping a range of delimited keys or a range of specific
|
||||||
* version when doing master version listing.
|
* version when doing master version listing.
|
||||||
*
|
*
|
||||||
* @return {string} - the insight: a common prefix or a master key,
|
* @return the insight: a common prefix or a master key,
|
||||||
* or SKIP_NONE if there is no insight
|
* or SKIP_NONE if there is no insight
|
||||||
*/
|
*/
|
||||||
skipping() {
|
skipping(): string | undefined {
|
||||||
return SKIP_NONE;
|
return SKIP_NONE;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the listing resutls. Format depends on derivatives' specific logic.
|
* Get the listing resutls. Format depends on derivatives' specific logic.
|
||||||
* @return {Array} - The listed elements
|
* @return The listed elements
|
||||||
*/
|
*/
|
||||||
result() {
|
result() {
|
||||||
return this.res;
|
return this.res;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports.default = Extension;
|
|
|
@ -1,9 +1,12 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
import {
|
||||||
|
inc,
|
||||||
const { inc, checkLimit, listingParamsMasterKeysV0ToV1,
|
checkLimit,
|
||||||
FILTER_END, FILTER_ACCEPT } = require('./tools');
|
listingParamsMasterKeysV0ToV1,
|
||||||
|
FILTER_END,
|
||||||
|
FILTER_ACCEPT,
|
||||||
|
} from './tools';
|
||||||
const DEFAULT_MAX_KEYS = 1000;
|
const DEFAULT_MAX_KEYS = 1000;
|
||||||
const VSConst = require('../../versioning/constants').VersioningConstants;
|
import { VersioningConstants as VSConst } from '../../versioning/constants';
|
||||||
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
|
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
|
||||||
|
|
||||||
function numberDefault(num, defaultNum) {
|
function numberDefault(num, defaultNum) {
|
||||||
|
@ -14,7 +17,22 @@ function numberDefault(num, defaultNum) {
|
||||||
/**
|
/**
|
||||||
* Class for the MultipartUploads extension
|
* Class for the MultipartUploads extension
|
||||||
*/
|
*/
|
||||||
class MultipartUploads {
|
export class MultipartUploads {
|
||||||
|
params
|
||||||
|
vFormat
|
||||||
|
CommonPrefixes
|
||||||
|
Uploads
|
||||||
|
IsTruncated
|
||||||
|
NextKeyMarker
|
||||||
|
NextUploadIdMarker
|
||||||
|
prefixLength
|
||||||
|
queryPrefixLength
|
||||||
|
keys
|
||||||
|
maxKeys
|
||||||
|
delimiter
|
||||||
|
splitter
|
||||||
|
logger
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Constructor of the extension
|
* Constructor of the extension
|
||||||
* Init and check parameters
|
* Init and check parameters
|
||||||
|
@ -39,7 +57,9 @@ class MultipartUploads {
|
||||||
this.splitter = params.splitter;
|
this.splitter = params.splitter;
|
||||||
this.logger = logger;
|
this.logger = logger;
|
||||||
|
|
||||||
Object.assign(this, {
|
Object.assign(
|
||||||
|
this,
|
||||||
|
{
|
||||||
[BucketVersioningKeyFormat.v0]: {
|
[BucketVersioningKeyFormat.v0]: {
|
||||||
genMDParams: this.genMDParamsV0,
|
genMDParams: this.genMDParamsV0,
|
||||||
getObjectKey: this.getObjectKeyV0,
|
getObjectKey: this.getObjectKeyV0,
|
||||||
|
@ -48,13 +68,15 @@ class MultipartUploads {
|
||||||
genMDParams: this.genMDParamsV1,
|
genMDParams: this.genMDParamsV1,
|
||||||
getObjectKey: this.getObjectKeyV1,
|
getObjectKey: this.getObjectKeyV1,
|
||||||
},
|
},
|
||||||
}[this.vFormat]);
|
}[this.vFormat]
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
genMDParamsV0() {
|
genMDParamsV0() {
|
||||||
const params = {};
|
const params = {};
|
||||||
if (this.params.keyMarker) {
|
if (this.params.keyMarker) {
|
||||||
params.gt = `overview${this.params.splitter}` +
|
params.gt =
|
||||||
|
`overview${this.params.splitter}` +
|
||||||
`${this.params.keyMarker}${this.params.splitter}`;
|
`${this.params.keyMarker}${this.params.splitter}`;
|
||||||
if (this.params.uploadIdMarker) {
|
if (this.params.uploadIdMarker) {
|
||||||
params.gt += `${this.params.uploadIdMarker}`;
|
params.gt += `${this.params.uploadIdMarker}`;
|
||||||
|
@ -147,14 +169,20 @@ class MultipartUploads {
|
||||||
if (this.delimiter) {
|
if (this.delimiter) {
|
||||||
const mpuPrefixSlice = `overview${this.splitter}`.length;
|
const mpuPrefixSlice = `overview${this.splitter}`.length;
|
||||||
const mpuKey = key.slice(mpuPrefixSlice);
|
const mpuKey = key.slice(mpuPrefixSlice);
|
||||||
const commonPrefixIndex = mpuKey.indexOf(this.delimiter,
|
const commonPrefixIndex = mpuKey.indexOf(
|
||||||
this.queryPrefixLength);
|
this.delimiter,
|
||||||
|
this.queryPrefixLength
|
||||||
|
);
|
||||||
|
|
||||||
if (commonPrefixIndex === -1) {
|
if (commonPrefixIndex === -1) {
|
||||||
this.addUpload(value);
|
this.addUpload(value);
|
||||||
} else {
|
} else {
|
||||||
this.addCommonPrefix(mpuKey.substring(0,
|
this.addCommonPrefix(
|
||||||
commonPrefixIndex + this.delimiter.length));
|
mpuKey.substring(
|
||||||
|
0,
|
||||||
|
commonPrefixIndex + this.delimiter.length
|
||||||
|
)
|
||||||
|
);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
this.addUpload(value);
|
this.addUpload(value);
|
||||||
|
@ -182,7 +210,3 @@ class MultipartUploads {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
MultipartUploads,
|
|
||||||
};
|
|
|
@ -1,14 +1,17 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
import Extension from './Extension';
|
||||||
|
import { checkLimit, FILTER_END, FILTER_ACCEPT, FILTER_SKIP } from './tools';
|
||||||
|
|
||||||
const Extension = require('./Extension').default;
|
|
||||||
|
|
||||||
const { checkLimit, FILTER_END, FILTER_ACCEPT, FILTER_SKIP } = require('./tools');
|
|
||||||
const DEFAULT_MAX_KEYS = 10000;
|
const DEFAULT_MAX_KEYS = 10000;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Class of an extension doing the simple listing
|
* Class of an extension doing the simple listing
|
||||||
*/
|
*/
|
||||||
class List extends Extension {
|
export class List extends Extension {
|
||||||
|
maxKeys: number;
|
||||||
|
filterKey;
|
||||||
|
filterKeyStartsWith;
|
||||||
|
res: any[];
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Constructor
|
* Constructor
|
||||||
* Set the logger and the res
|
* Set the logger and the res
|
||||||
|
@ -30,15 +33,17 @@ class List extends Extension {
|
||||||
}
|
}
|
||||||
|
|
||||||
genMDParams() {
|
genMDParams() {
|
||||||
const params = this.parameters ? {
|
const params = this.parameters
|
||||||
|
? {
|
||||||
gt: this.parameters.gt,
|
gt: this.parameters.gt,
|
||||||
gte: this.parameters.gte || this.parameters.start,
|
gte: this.parameters.gte || this.parameters.start,
|
||||||
lt: this.parameters.lt,
|
lt: this.parameters.lt,
|
||||||
lte: this.parameters.lte || this.parameters.end,
|
lte: this.parameters.lte || this.parameters.end,
|
||||||
keys: this.parameters.keys,
|
keys: this.parameters.keys,
|
||||||
values: this.parameters.values,
|
values: this.parameters.values,
|
||||||
} : {};
|
}
|
||||||
Object.keys(params).forEach(key => {
|
: {};
|
||||||
|
Object.keys(params).forEach((key) => {
|
||||||
if (params[key] === null || params[key] === undefined) {
|
if (params[key] === null || params[key] === undefined) {
|
||||||
delete params[key];
|
delete params[key];
|
||||||
}
|
}
|
||||||
|
@ -53,29 +58,30 @@ class List extends Extension {
|
||||||
*
|
*
|
||||||
* @return {Boolean} Returns true if matches, else false.
|
* @return {Boolean} Returns true if matches, else false.
|
||||||
*/
|
*/
|
||||||
customFilter(value) {
|
customFilter(value: string): boolean {
|
||||||
let _value;
|
let _value: any;
|
||||||
try {
|
try {
|
||||||
_value = JSON.parse(value);
|
_value = JSON.parse(value);
|
||||||
} catch (e) {
|
} catch (e: any) {
|
||||||
// Prefer returning an unfiltered data rather than
|
// Prefer returning an unfiltered data rather than
|
||||||
// stopping the service in case of parsing failure.
|
// stopping the service in case of parsing failure.
|
||||||
// The risk of this approach is a potential
|
// The risk of this approach is a potential
|
||||||
// reproduction of MD-692, where too much memory is
|
// reproduction of MD-692, where too much memory is
|
||||||
// used by repd.
|
// used by repd.
|
||||||
this.logger.warn(
|
this.logger.warn('Could not parse Object Metadata while listing', {
|
||||||
'Could not parse Object Metadata while listing',
|
err: e.toString(),
|
||||||
{ err: e.toString() });
|
});
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
if (_value.customAttributes !== undefined) {
|
if (_value.customAttributes !== undefined) {
|
||||||
for (const key of Object.keys(_value.customAttributes)) {
|
for (const key of Object.keys(_value.customAttributes)) {
|
||||||
if (this.filterKey !== undefined &&
|
if (this.filterKey !== undefined && key === this.filterKey) {
|
||||||
key === this.filterKey) {
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
if (this.filterKeyStartsWith !== undefined &&
|
if (
|
||||||
key.startsWith(this.filterKeyStartsWith)) {
|
this.filterKeyStartsWith !== undefined &&
|
||||||
|
key.startsWith(this.filterKeyStartsWith)
|
||||||
|
) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -90,15 +96,17 @@ class List extends Extension {
|
||||||
* @return {number} - > 0 : continue listing
|
* @return {number} - > 0 : continue listing
|
||||||
* < 0 : listing done
|
* < 0 : listing done
|
||||||
*/
|
*/
|
||||||
filter(elem) {
|
filter(elem): number {
|
||||||
// Check first in case of maxkeys <= 0
|
// Check first in case of maxkeys <= 0
|
||||||
if (this.keys >= this.maxKeys) {
|
if (this.keys >= this.maxKeys) {
|
||||||
return FILTER_END;
|
return FILTER_END;
|
||||||
}
|
}
|
||||||
if ((this.filterKey !== undefined ||
|
if (
|
||||||
|
(this.filterKey !== undefined ||
|
||||||
this.filterKeyStartsWith !== undefined) &&
|
this.filterKeyStartsWith !== undefined) &&
|
||||||
typeof elem === 'object' &&
|
typeof elem === 'object' &&
|
||||||
!this.customFilter(elem.value)) {
|
!this.customFilter(elem.value)
|
||||||
|
) {
|
||||||
return FILTER_SKIP;
|
return FILTER_SKIP;
|
||||||
}
|
}
|
||||||
if (typeof elem === 'object') {
|
if (typeof elem === 'object') {
|
||||||
|
@ -121,7 +129,3 @@ class List extends Extension {
|
||||||
return this.res;
|
return this.res;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
List,
|
|
||||||
};
|
|
|
@ -1,9 +1,12 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
import Extension from './Extension';
|
||||||
|
import {
|
||||||
const Extension = require('./Extension').default;
|
inc,
|
||||||
const { inc, listingParamsMasterKeysV0ToV1,
|
listingParamsMasterKeysV0ToV1,
|
||||||
FILTER_END, FILTER_ACCEPT, FILTER_SKIP } = require('./tools');
|
FILTER_END,
|
||||||
const VSConst = require('../../versioning/constants').VersioningConstants;
|
FILTER_ACCEPT,
|
||||||
|
FILTER_SKIP,
|
||||||
|
} from './tools';
|
||||||
|
import { VersioningConstants as VSConst } from '../../versioning/constants';
|
||||||
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
|
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -14,7 +17,11 @@ const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
|
||||||
* @param {Number} delimiterIndex - 'folder' index in the path
|
* @param {Number} delimiterIndex - 'folder' index in the path
|
||||||
* @return {String} - CommonPrefix
|
* @return {String} - CommonPrefix
|
||||||
*/
|
*/
|
||||||
function getCommonPrefix(key, delimiter, delimiterIndex) {
|
function getCommonPrefix(
|
||||||
|
key: string,
|
||||||
|
delimiter: string,
|
||||||
|
delimiterIndex: number
|
||||||
|
): string {
|
||||||
return key.substring(0, delimiterIndex + delimiter.length);
|
return key.substring(0, delimiterIndex + delimiter.length);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -30,7 +37,25 @@ function getCommonPrefix(key, delimiter, delimiterIndex) {
|
||||||
* @prop {String|undefined} prefix - prefix per amazon format
|
* @prop {String|undefined} prefix - prefix per amazon format
|
||||||
* @prop {Number} maxKeys - number of keys to list
|
* @prop {Number} maxKeys - number of keys to list
|
||||||
*/
|
*/
|
||||||
class Delimiter extends Extension {
|
export class Delimiter extends Extension {
|
||||||
|
CommonPrefixes: string[];
|
||||||
|
Contents: string[];
|
||||||
|
IsTruncated: boolean;
|
||||||
|
NextMarker?: string;
|
||||||
|
keys: number;
|
||||||
|
delimiter?: string;
|
||||||
|
prefix?: string;
|
||||||
|
maxKeys: number;
|
||||||
|
marker;
|
||||||
|
startAfter;
|
||||||
|
continuationToken;
|
||||||
|
alphabeticalOrder;
|
||||||
|
vFormat;
|
||||||
|
NextContinuationToken;
|
||||||
|
startMarker;
|
||||||
|
continueMarker;
|
||||||
|
nextContinueMarker;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create a new Delimiter instance
|
* Create a new Delimiter instance
|
||||||
* @constructor
|
* @constructor
|
||||||
|
@ -58,6 +83,7 @@ class Delimiter extends Extension {
|
||||||
constructor(parameters, logger, vFormat) {
|
constructor(parameters, logger, vFormat) {
|
||||||
super(parameters, logger);
|
super(parameters, logger);
|
||||||
// original listing parameters
|
// original listing parameters
|
||||||
|
this.keys = 0;
|
||||||
this.delimiter = parameters.delimiter;
|
this.delimiter = parameters.delimiter;
|
||||||
this.prefix = parameters.prefix;
|
this.prefix = parameters.prefix;
|
||||||
this.marker = parameters.marker;
|
this.marker = parameters.marker;
|
||||||
|
@ -65,8 +91,9 @@ class Delimiter extends Extension {
|
||||||
this.startAfter = parameters.startAfter;
|
this.startAfter = parameters.startAfter;
|
||||||
this.continuationToken = parameters.continuationToken;
|
this.continuationToken = parameters.continuationToken;
|
||||||
this.alphabeticalOrder =
|
this.alphabeticalOrder =
|
||||||
typeof parameters.alphabeticalOrder !== 'undefined' ?
|
typeof parameters.alphabeticalOrder !== 'undefined'
|
||||||
parameters.alphabeticalOrder : true;
|
? parameters.alphabeticalOrder
|
||||||
|
: true;
|
||||||
|
|
||||||
this.vFormat = vFormat || BucketVersioningKeyFormat.v0;
|
this.vFormat = vFormat || BucketVersioningKeyFormat.v0;
|
||||||
// results
|
// results
|
||||||
|
@ -79,21 +106,28 @@ class Delimiter extends Extension {
|
||||||
|
|
||||||
this.startMarker = parameters.v2 ? 'startAfter' : 'marker';
|
this.startMarker = parameters.v2 ? 'startAfter' : 'marker';
|
||||||
this.continueMarker = parameters.v2 ? 'continuationToken' : 'marker';
|
this.continueMarker = parameters.v2 ? 'continuationToken' : 'marker';
|
||||||
this.nextContinueMarker = parameters.v2 ?
|
this.nextContinueMarker = parameters.v2
|
||||||
'NextContinuationToken' : 'NextMarker';
|
? 'NextContinuationToken'
|
||||||
|
: 'NextMarker';
|
||||||
|
|
||||||
if (this.delimiter !== undefined &&
|
if (
|
||||||
|
this.delimiter !== undefined &&
|
||||||
this[this.nextContinueMarker] !== undefined &&
|
this[this.nextContinueMarker] !== undefined &&
|
||||||
this[this.nextContinueMarker].startsWith(this.prefix || '')) {
|
this[this.nextContinueMarker].startsWith(this.prefix || '')
|
||||||
const nextDelimiterIndex =
|
) {
|
||||||
this[this.nextContinueMarker].indexOf(this.delimiter,
|
const nextDelimiterIndex = this[this.nextContinueMarker].indexOf(
|
||||||
this.prefix ? this.prefix.length : 0);
|
this.delimiter,
|
||||||
this[this.nextContinueMarker] =
|
this.prefix ? this.prefix.length : 0
|
||||||
this[this.nextContinueMarker].slice(0, nextDelimiterIndex +
|
);
|
||||||
this.delimiter.length);
|
this[this.nextContinueMarker] = this[this.nextContinueMarker].slice(
|
||||||
|
0,
|
||||||
|
nextDelimiterIndex + this.delimiter.length
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
Object.assign(this, {
|
Object.assign(
|
||||||
|
this,
|
||||||
|
{
|
||||||
[BucketVersioningKeyFormat.v0]: {
|
[BucketVersioningKeyFormat.v0]: {
|
||||||
genMDParams: this.genMDParamsV0,
|
genMDParams: this.genMDParamsV0,
|
||||||
getObjectKey: this.getObjectKeyV0,
|
getObjectKey: this.getObjectKeyV0,
|
||||||
|
@ -104,11 +138,12 @@ class Delimiter extends Extension {
|
||||||
getObjectKey: this.getObjectKeyV1,
|
getObjectKey: this.getObjectKeyV1,
|
||||||
skipping: this.skippingV1,
|
skipping: this.skippingV1,
|
||||||
},
|
},
|
||||||
}[this.vFormat]);
|
}[this.vFormat]
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
genMDParamsV0() {
|
genMDParamsV0() {
|
||||||
const params = {};
|
const params: { gte?: string; lt?: string; gt?: string } = {};
|
||||||
if (this.prefix) {
|
if (this.prefix) {
|
||||||
params.gte = this.prefix;
|
params.gte = this.prefix;
|
||||||
params.lt = inc(this.prefix);
|
params.lt = inc(this.prefix);
|
||||||
|
@ -134,7 +169,7 @@ class Delimiter extends Extension {
|
||||||
* final state of the result if it is the case
|
* final state of the result if it is the case
|
||||||
* @return {Boolean} - indicates if the iteration has to stop
|
* @return {Boolean} - indicates if the iteration has to stop
|
||||||
*/
|
*/
|
||||||
_reachedMaxKeys() {
|
_reachedMaxKeys(): boolean {
|
||||||
if (this.keys >= this.maxKeys) {
|
if (this.keys >= this.maxKeys) {
|
||||||
// In cases of maxKeys <= 0 -> IsTruncated = false
|
// In cases of maxKeys <= 0 -> IsTruncated = false
|
||||||
this.IsTruncated = this.maxKeys > 0;
|
this.IsTruncated = this.maxKeys > 0;
|
||||||
|
@ -151,7 +186,7 @@ class Delimiter extends Extension {
|
||||||
* @param {String} value - The value of the key
|
* @param {String} value - The value of the key
|
||||||
* @return {number} - indicates if iteration should continue
|
* @return {number} - indicates if iteration should continue
|
||||||
*/
|
*/
|
||||||
addContents(key, value) {
|
addContents(key: string, value: string): number {
|
||||||
if (this._reachedMaxKeys()) {
|
if (this._reachedMaxKeys()) {
|
||||||
return FILTER_END;
|
return FILTER_END;
|
||||||
}
|
}
|
||||||
|
@ -161,11 +196,11 @@ class Delimiter extends Extension {
|
||||||
return FILTER_ACCEPT;
|
return FILTER_ACCEPT;
|
||||||
}
|
}
|
||||||
|
|
||||||
getObjectKeyV0(obj) {
|
getObjectKeyV0(obj: { key: string }) {
|
||||||
return obj.key;
|
return obj.key;
|
||||||
}
|
}
|
||||||
|
|
||||||
getObjectKeyV1(obj) {
|
getObjectKeyV1(obj: { key: string }) {
|
||||||
return obj.key.slice(DbPrefixes.Master.length);
|
return obj.key.slice(DbPrefixes.Master.length);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -180,13 +215,15 @@ class Delimiter extends Extension {
|
||||||
* @param {String} obj.value - The value of the element
|
* @param {String} obj.value - The value of the element
|
||||||
* @return {number} - indicates if iteration should continue
|
* @return {number} - indicates if iteration should continue
|
||||||
*/
|
*/
|
||||||
filter(obj) {
|
filter(obj: { key: string; value: string }): number {
|
||||||
const key = this.getObjectKey(obj);
|
const key = this.getObjectKey(obj);
|
||||||
const value = obj.value;
|
const value = obj.value;
|
||||||
if ((this.prefix && !key.startsWith(this.prefix))
|
if (
|
||||||
|| (this.alphabeticalOrder
|
(this.prefix && !key.startsWith(this.prefix)) ||
|
||||||
&& typeof this[this.nextContinueMarker] === 'string'
|
(this.alphabeticalOrder &&
|
||||||
&& key <= this[this.nextContinueMarker])) {
|
typeof this[this.nextContinueMarker] === 'string' &&
|
||||||
|
key <= this[this.nextContinueMarker])
|
||||||
|
) {
|
||||||
return FILTER_SKIP;
|
return FILTER_SKIP;
|
||||||
}
|
}
|
||||||
if (this.delimiter) {
|
if (this.delimiter) {
|
||||||
|
@ -206,10 +243,12 @@ class Delimiter extends Extension {
|
||||||
* @param {Number} index - after prefix starting point
|
* @param {Number} index - after prefix starting point
|
||||||
* @return {Boolean} - indicates if iteration should continue
|
* @return {Boolean} - indicates if iteration should continue
|
||||||
*/
|
*/
|
||||||
addCommonPrefix(key, index) {
|
addCommonPrefix(key: string, index: number): boolean {
|
||||||
const commonPrefix = getCommonPrefix(key, this.delimiter, index);
|
const commonPrefix = getCommonPrefix(key, this.delimiter, index);
|
||||||
if (this.CommonPrefixes.indexOf(commonPrefix) === -1
|
if (
|
||||||
&& this[this.nextContinueMarker] !== commonPrefix) {
|
this.CommonPrefixes.indexOf(commonPrefix) === -1 &&
|
||||||
|
this[this.nextContinueMarker] !== commonPrefix
|
||||||
|
) {
|
||||||
if (this._reachedMaxKeys()) {
|
if (this._reachedMaxKeys()) {
|
||||||
return FILTER_END;
|
return FILTER_END;
|
||||||
}
|
}
|
||||||
|
@ -228,7 +267,7 @@ class Delimiter extends Extension {
|
||||||
* @return {string} - the present range (NextMarker) if repd believes
|
* @return {string} - the present range (NextMarker) if repd believes
|
||||||
* that it's enough and should move on
|
* that it's enough and should move on
|
||||||
*/
|
*/
|
||||||
skippingV0() {
|
skippingV0(): string {
|
||||||
return this[this.nextContinueMarker];
|
return this[this.nextContinueMarker];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -239,7 +278,7 @@ class Delimiter extends Extension {
|
||||||
* @return {string} - the present range (NextMarker) if repd believes
|
* @return {string} - the present range (NextMarker) if repd believes
|
||||||
* that it's enough and should move on
|
* that it's enough and should move on
|
||||||
*/
|
*/
|
||||||
skippingV1() {
|
skippingV1(): string {
|
||||||
return DbPrefixes.Master + this[this.nextContinueMarker];
|
return DbPrefixes.Master + this[this.nextContinueMarker];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -261,14 +300,17 @@ class Delimiter extends Extension {
|
||||||
Delimiter: this.delimiter,
|
Delimiter: this.delimiter,
|
||||||
};
|
};
|
||||||
if (this.parameters.v2) {
|
if (this.parameters.v2) {
|
||||||
|
//
|
||||||
result.NextContinuationToken = this.IsTruncated
|
result.NextContinuationToken = this.IsTruncated
|
||||||
? this.NextContinuationToken : undefined;
|
? this.NextContinuationToken
|
||||||
|
: undefined;
|
||||||
} else {
|
} else {
|
||||||
result.NextMarker = (this.IsTruncated && this.delimiter)
|
//
|
||||||
? this.NextMarker : undefined;
|
result.NextMarker =
|
||||||
|
this.IsTruncated && this.delimiter
|
||||||
|
? this.NextMarker
|
||||||
|
: undefined;
|
||||||
}
|
}
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = { Delimiter };
|
|
|
@ -1,10 +1,8 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
import { Delimiter } from './delimiter';
|
||||||
|
import { Version } from '../../versioning/Version';
|
||||||
const Delimiter = require('./delimiter').Delimiter;
|
import { VersioningConstants as VSConst } from '../../versioning/constants';
|
||||||
const Version = require('../../versioning/Version').Version;
|
|
||||||
const VSConst = require('../../versioning/constants').VersioningConstants;
|
|
||||||
const { BucketVersioningKeyFormat } = VSConst;
|
const { BucketVersioningKeyFormat } = VSConst;
|
||||||
const { FILTER_ACCEPT, FILTER_SKIP, SKIP_NONE } = require('./tools');
|
import { FILTER_ACCEPT, FILTER_SKIP, SKIP_NONE } from './tools';
|
||||||
|
|
||||||
const VID_SEP = VSConst.VersionId.Separator;
|
const VID_SEP = VSConst.VersionId.Separator;
|
||||||
const { DbPrefixes } = VSConst;
|
const { DbPrefixes } = VSConst;
|
||||||
|
@ -13,7 +11,11 @@ const { DbPrefixes } = VSConst;
|
||||||
* Handle object listing with parameters. This extends the base class Delimiter
|
* Handle object listing with parameters. This extends the base class Delimiter
|
||||||
* to return the raw master versions of existing objects.
|
* to return the raw master versions of existing objects.
|
||||||
*/
|
*/
|
||||||
class DelimiterMaster extends Delimiter {
|
export class DelimiterMaster extends Delimiter {
|
||||||
|
prvKey;
|
||||||
|
prvPHDKey;
|
||||||
|
inReplayPrefix;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Delimiter listing of master versions.
|
* Delimiter listing of master versions.
|
||||||
* @param {Object} parameters - listing parameters
|
* @param {Object} parameters - listing parameters
|
||||||
|
@ -34,7 +36,9 @@ class DelimiterMaster extends Delimiter {
|
||||||
this.prvPHDKey = undefined;
|
this.prvPHDKey = undefined;
|
||||||
this.inReplayPrefix = false;
|
this.inReplayPrefix = false;
|
||||||
|
|
||||||
Object.assign(this, {
|
Object.assign(
|
||||||
|
this,
|
||||||
|
{
|
||||||
[BucketVersioningKeyFormat.v0]: {
|
[BucketVersioningKeyFormat.v0]: {
|
||||||
filter: this.filterV0,
|
filter: this.filterV0,
|
||||||
skipping: this.skippingV0,
|
skipping: this.skippingV0,
|
||||||
|
@ -43,7 +47,8 @@ class DelimiterMaster extends Delimiter {
|
||||||
filter: this.filterV1,
|
filter: this.filterV1,
|
||||||
skipping: this.skippingV1,
|
skipping: this.skippingV1,
|
||||||
},
|
},
|
||||||
}[this.vFormat]);
|
}[this.vFormat]
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -58,7 +63,7 @@ class DelimiterMaster extends Delimiter {
|
||||||
* @param {String} obj.value - The value of the element
|
* @param {String} obj.value - The value of the element
|
||||||
* @return {number} - indicates if iteration should continue
|
* @return {number} - indicates if iteration should continue
|
||||||
*/
|
*/
|
||||||
filterV0(obj) {
|
filterV0(obj: { key: string; value: string }): number {
|
||||||
let key = obj.key;
|
let key = obj.key;
|
||||||
const value = obj.value;
|
const value = obj.value;
|
||||||
|
|
||||||
|
@ -70,9 +75,11 @@ class DelimiterMaster extends Delimiter {
|
||||||
|
|
||||||
/* Skip keys not starting with the prefix or not alphabetically
|
/* Skip keys not starting with the prefix or not alphabetically
|
||||||
* ordered. */
|
* ordered. */
|
||||||
if ((this.prefix && !key.startsWith(this.prefix))
|
if (
|
||||||
|| (typeof this[this.nextContinueMarker] === 'string' &&
|
(this.prefix && !key.startsWith(this.prefix)) ||
|
||||||
key <= this[this.nextContinueMarker])) {
|
(typeof this[this.nextContinueMarker] === 'string' &&
|
||||||
|
key <= this[this.nextContinueMarker])
|
||||||
|
) {
|
||||||
return FILTER_SKIP;
|
return FILTER_SKIP;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -95,9 +102,12 @@ class DelimiterMaster extends Delimiter {
|
||||||
* NextMarker to the common prefix instead of the whole key
|
* NextMarker to the common prefix instead of the whole key
|
||||||
* value. (TODO: remove this test once ZENKO-1048 is fixed)
|
* value. (TODO: remove this test once ZENKO-1048 is fixed)
|
||||||
* */
|
* */
|
||||||
if (key === this.prvKey || key === this[this.nextContinueMarker] ||
|
if (
|
||||||
|
key === this.prvKey ||
|
||||||
|
key === this[this.nextContinueMarker] ||
|
||||||
(this.delimiter &&
|
(this.delimiter &&
|
||||||
key.startsWith(this[this.nextContinueMarker]))) {
|
key.startsWith(this[this.nextContinueMarker]))
|
||||||
|
) {
|
||||||
/* master version already filtered */
|
/* master version already filtered */
|
||||||
return FILTER_SKIP;
|
return FILTER_SKIP;
|
||||||
}
|
}
|
||||||
|
@ -155,7 +165,7 @@ class DelimiterMaster extends Delimiter {
|
||||||
* @param {String} obj.value - The value of the element
|
* @param {String} obj.value - The value of the element
|
||||||
* @return {number} - indicates if iteration should continue
|
* @return {number} - indicates if iteration should continue
|
||||||
*/
|
*/
|
||||||
filterV1(obj) {
|
filterV1(obj: { key: string; value: string }): number {
|
||||||
// Filtering master keys in v1 is simply listing the master
|
// Filtering master keys in v1 is simply listing the master
|
||||||
// keys, as the state of version keys do not change the
|
// keys, as the state of version keys do not change the
|
||||||
// result, so we can use Delimiter method directly.
|
// result, so we can use Delimiter method directly.
|
||||||
|
@ -167,8 +177,9 @@ class DelimiterMaster extends Delimiter {
|
||||||
// next marker or next continuation token:
|
// next marker or next continuation token:
|
||||||
// - foo/ : skipping foo/
|
// - foo/ : skipping foo/
|
||||||
// - foo : skipping foo.
|
// - foo : skipping foo.
|
||||||
const index = this[this.nextContinueMarker].
|
const index = this[this.nextContinueMarker].lastIndexOf(
|
||||||
lastIndexOf(this.delimiter);
|
this.delimiter
|
||||||
|
);
|
||||||
if (index === this[this.nextContinueMarker].length - 1) {
|
if (index === this[this.nextContinueMarker].length - 1) {
|
||||||
return this[this.nextContinueMarker];
|
return this[this.nextContinueMarker];
|
||||||
}
|
}
|
||||||
|
@ -192,5 +203,3 @@ class DelimiterMaster extends Delimiter {
|
||||||
return DbPrefixes.Master + skipTo;
|
return DbPrefixes.Master + skipTo;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = { DelimiterMaster };
|
|
|
@ -1,10 +1,13 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
import { Delimiter } from './delimiter';
|
||||||
|
import { Version } from '../../versioning/Version';
|
||||||
const Delimiter = require('./delimiter').Delimiter;
|
import { VersioningConstants as VSConst } from '../../versioning/constants';
|
||||||
const Version = require('../../versioning/Version').Version;
|
import {
|
||||||
const VSConst = require('../../versioning/constants').VersioningConstants;
|
inc,
|
||||||
const { inc, FILTER_END, FILTER_ACCEPT, FILTER_SKIP, SKIP_NONE } =
|
FILTER_END,
|
||||||
require('./tools');
|
FILTER_ACCEPT,
|
||||||
|
FILTER_SKIP,
|
||||||
|
SKIP_NONE,
|
||||||
|
} from './tools';
|
||||||
|
|
||||||
const VID_SEP = VSConst.VersionId.Separator;
|
const VID_SEP = VSConst.VersionId.Separator;
|
||||||
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
|
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
|
||||||
|
@ -21,7 +24,16 @@ const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
|
||||||
* @prop {String|undefined} prefix - prefix per amazon format
|
* @prop {String|undefined} prefix - prefix per amazon format
|
||||||
* @prop {Number} maxKeys - number of keys to list
|
* @prop {Number} maxKeys - number of keys to list
|
||||||
*/
|
*/
|
||||||
class DelimiterVersions extends Delimiter {
|
export class DelimiterVersions extends Delimiter {
|
||||||
|
CommonPrefixes: string[];
|
||||||
|
Contents: string[];
|
||||||
|
IsTruncated: boolean;
|
||||||
|
NextMarker?: string;
|
||||||
|
keys: number;
|
||||||
|
delimiter?: string;
|
||||||
|
prefix?: string;
|
||||||
|
maxKeys: number;
|
||||||
|
|
||||||
constructor(parameters, logger, vFormat) {
|
constructor(parameters, logger, vFormat) {
|
||||||
super(parameters, logger, vFormat);
|
super(parameters, logger, vFormat);
|
||||||
// specific to version listing
|
// specific to version listing
|
||||||
|
@ -35,7 +47,9 @@ class DelimiterVersions extends Delimiter {
|
||||||
this.NextVersionIdMarker = undefined;
|
this.NextVersionIdMarker = undefined;
|
||||||
this.inReplayPrefix = false;
|
this.inReplayPrefix = false;
|
||||||
|
|
||||||
Object.assign(this, {
|
Object.assign(
|
||||||
|
this,
|
||||||
|
{
|
||||||
[BucketVersioningKeyFormat.v0]: {
|
[BucketVersioningKeyFormat.v0]: {
|
||||||
genMDParams: this.genMDParamsV0,
|
genMDParams: this.genMDParamsV0,
|
||||||
filter: this.filterV0,
|
filter: this.filterV0,
|
||||||
|
@ -46,7 +60,8 @@ class DelimiterVersions extends Delimiter {
|
||||||
filter: this.filterV1,
|
filter: this.filterV1,
|
||||||
skipping: this.skippingV1,
|
skipping: this.skippingV1,
|
||||||
},
|
},
|
||||||
}[this.vFormat]);
|
}[this.vFormat]
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
genMDParamsV0() {
|
genMDParamsV0() {
|
||||||
|
@ -63,9 +78,10 @@ class DelimiterVersions extends Delimiter {
|
||||||
if (this.parameters.versionIdMarker) {
|
if (this.parameters.versionIdMarker) {
|
||||||
// versionIdMarker should always come with keyMarker
|
// versionIdMarker should always come with keyMarker
|
||||||
// but may not be the other way around
|
// but may not be the other way around
|
||||||
params.gt = this.parameters.keyMarker
|
params.gt =
|
||||||
+ VID_SEP
|
this.parameters.keyMarker +
|
||||||
+ this.parameters.versionIdMarker;
|
VID_SEP +
|
||||||
|
this.parameters.versionIdMarker;
|
||||||
} else {
|
} else {
|
||||||
params.gt = inc(this.parameters.keyMarker + VID_SEP);
|
params.gt = inc(this.parameters.keyMarker + VID_SEP);
|
||||||
}
|
}
|
||||||
|
@ -89,20 +105,27 @@ class DelimiterVersions extends Delimiter {
|
||||||
params[1].lt = inc(DbPrefixes.Version); // stop after the last version key
|
params[1].lt = inc(DbPrefixes.Version); // stop after the last version key
|
||||||
}
|
}
|
||||||
if (this.parameters.keyMarker) {
|
if (this.parameters.keyMarker) {
|
||||||
if (params[1].gte <= DbPrefixes.Version + this.parameters.keyMarker) {
|
if (
|
||||||
|
params[1].gte <=
|
||||||
|
DbPrefixes.Version + this.parameters.keyMarker
|
||||||
|
) {
|
||||||
delete params[0].gte;
|
delete params[0].gte;
|
||||||
delete params[1].gte;
|
delete params[1].gte;
|
||||||
params[0].gt = DbPrefixes.Master + inc(this.parameters.keyMarker + VID_SEP);
|
params[0].gt =
|
||||||
|
DbPrefixes.Master +
|
||||||
|
inc(this.parameters.keyMarker + VID_SEP);
|
||||||
if (this.parameters.versionIdMarker) {
|
if (this.parameters.versionIdMarker) {
|
||||||
// versionIdMarker should always come with keyMarker
|
// versionIdMarker should always come with keyMarker
|
||||||
// but may not be the other way around
|
// but may not be the other way around
|
||||||
params[1].gt = DbPrefixes.Version
|
params[1].gt =
|
||||||
+ this.parameters.keyMarker
|
DbPrefixes.Version +
|
||||||
+ VID_SEP
|
this.parameters.keyMarker +
|
||||||
+ this.parameters.versionIdMarker;
|
VID_SEP +
|
||||||
|
this.parameters.versionIdMarker;
|
||||||
} else {
|
} else {
|
||||||
params[1].gt = DbPrefixes.Version
|
params[1].gt =
|
||||||
+ inc(this.parameters.keyMarker + VID_SEP);
|
DbPrefixes.Version +
|
||||||
|
inc(this.parameters.keyMarker + VID_SEP);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -120,7 +143,7 @@ class DelimiterVersions extends Delimiter {
|
||||||
* * -1 if master key < version key
|
* * -1 if master key < version key
|
||||||
* * 1 if master key > version key
|
* * 1 if master key > version key
|
||||||
*/
|
*/
|
||||||
compareObjects(masterObj, versionObj) {
|
compareObjects(masterObj, versionObj): number {
|
||||||
const masterKey = masterObj.key.slice(DbPrefixes.Master.length);
|
const masterKey = masterObj.key.slice(DbPrefixes.Master.length);
|
||||||
const versionKey = versionObj.key.slice(DbPrefixes.Version.length);
|
const versionKey = versionObj.key.slice(DbPrefixes.Version.length);
|
||||||
return masterKey < versionKey ? -1 : 1;
|
return masterKey < versionKey ? -1 : 1;
|
||||||
|
@ -136,7 +159,11 @@ class DelimiterVersions extends Delimiter {
|
||||||
* @param {String} obj.value - The value of the key
|
* @param {String} obj.value - The value of the key
|
||||||
* @return {Boolean} - indicates if iteration should continue
|
* @return {Boolean} - indicates if iteration should continue
|
||||||
*/
|
*/
|
||||||
addContents(obj) {
|
addContents(obj: {
|
||||||
|
key: string;
|
||||||
|
versionId: string;
|
||||||
|
value: string;
|
||||||
|
}): boolean {
|
||||||
if (this._reachedMaxKeys()) {
|
if (this._reachedMaxKeys()) {
|
||||||
return FILTER_END;
|
return FILTER_END;
|
||||||
}
|
}
|
||||||
|
@ -163,7 +190,7 @@ class DelimiterVersions extends Delimiter {
|
||||||
* @param {String} obj.value - The value of the element
|
* @param {String} obj.value - The value of the element
|
||||||
* @return {number} - indicates if iteration should continue
|
* @return {number} - indicates if iteration should continue
|
||||||
*/
|
*/
|
||||||
filterV0(obj) {
|
filterV0(obj: { key: string; value: string }): number {
|
||||||
if (obj.key.startsWith(DbPrefixes.Replay)) {
|
if (obj.key.startsWith(DbPrefixes.Replay)) {
|
||||||
this.inReplayPrefix = true;
|
this.inReplayPrefix = true;
|
||||||
return FILTER_SKIP;
|
return FILTER_SKIP;
|
||||||
|
@ -189,12 +216,14 @@ class DelimiterVersions extends Delimiter {
|
||||||
* @param {String} obj.value - The value of the element
|
* @param {String} obj.value - The value of the element
|
||||||
* @return {number} - indicates if iteration should continue
|
* @return {number} - indicates if iteration should continue
|
||||||
*/
|
*/
|
||||||
filterV1(obj) {
|
filterV1(obj: { key: string; value: string }): number {
|
||||||
// this function receives both M and V keys, but their prefix
|
// this function receives both M and V keys, but their prefix
|
||||||
// length is the same so we can remove their prefix without
|
// length is the same so we can remove their prefix without
|
||||||
// looking at the type of key
|
// looking at the type of key
|
||||||
return this.filterCommon(obj.key.slice(DbPrefixes.Master.length),
|
return this.filterCommon(
|
||||||
obj.value);
|
obj.key.slice(DbPrefixes.Master.length),
|
||||||
|
obj.value
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
filterCommon(key, value) {
|
filterCommon(key, value) {
|
||||||
|
@ -207,14 +236,16 @@ class DelimiterVersions extends Delimiter {
|
||||||
if (versionIdIndex < 0) {
|
if (versionIdIndex < 0) {
|
||||||
nonversionedKey = key;
|
nonversionedKey = key;
|
||||||
this.masterKey = key;
|
this.masterKey = key;
|
||||||
this.masterVersionId =
|
this.masterVersionId = Version.from(value).getVersionId() || 'null';
|
||||||
Version.from(value).getVersionId() || 'null';
|
|
||||||
versionId = this.masterVersionId;
|
versionId = this.masterVersionId;
|
||||||
} else {
|
} else {
|
||||||
nonversionedKey = key.slice(0, versionIdIndex);
|
nonversionedKey = key.slice(0, versionIdIndex);
|
||||||
versionId = key.slice(versionIdIndex + 1);
|
versionId = key.slice(versionIdIndex + 1);
|
||||||
// skip a version key if it is the master version
|
// skip a version key if it is the master version
|
||||||
if (this.masterKey === nonversionedKey && this.masterVersionId === versionId) {
|
if (
|
||||||
|
this.masterKey === nonversionedKey &&
|
||||||
|
this.masterVersionId === versionId
|
||||||
|
) {
|
||||||
return FILTER_SKIP;
|
return FILTER_SKIP;
|
||||||
}
|
}
|
||||||
this.masterKey = undefined;
|
this.masterKey = undefined;
|
||||||
|
@ -222,7 +253,10 @@ class DelimiterVersions extends Delimiter {
|
||||||
}
|
}
|
||||||
if (this.delimiter) {
|
if (this.delimiter) {
|
||||||
const baseIndex = this.prefix ? this.prefix.length : 0;
|
const baseIndex = this.prefix ? this.prefix.length : 0;
|
||||||
const delimiterIndex = nonversionedKey.indexOf(this.delimiter, baseIndex);
|
const delimiterIndex = nonversionedKey.indexOf(
|
||||||
|
this.delimiter,
|
||||||
|
baseIndex
|
||||||
|
);
|
||||||
if (delimiterIndex >= 0) {
|
if (delimiterIndex >= 0) {
|
||||||
return this.addCommonPrefix(nonversionedKey, delimiterIndex);
|
return this.addCommonPrefix(nonversionedKey, delimiterIndex);
|
||||||
}
|
}
|
||||||
|
@ -249,8 +283,7 @@ class DelimiterVersions extends Delimiter {
|
||||||
return SKIP_NONE;
|
return SKIP_NONE;
|
||||||
}
|
}
|
||||||
// skip to the same object key in both M and V range listings
|
// skip to the same object key in both M and V range listings
|
||||||
return [DbPrefixes.Master + skipV0,
|
return [DbPrefixes.Master + skipV0, DbPrefixes.Version + skipV0];
|
||||||
DbPrefixes.Version + skipV0];
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -269,11 +302,10 @@ class DelimiterVersions extends Delimiter {
|
||||||
Versions: this.Contents,
|
Versions: this.Contents,
|
||||||
IsTruncated: this.IsTruncated,
|
IsTruncated: this.IsTruncated,
|
||||||
NextKeyMarker: this.IsTruncated ? this.NextMarker : undefined,
|
NextKeyMarker: this.IsTruncated ? this.NextMarker : undefined,
|
||||||
NextVersionIdMarker: this.IsTruncated ?
|
NextVersionIdMarker: this.IsTruncated
|
||||||
this.NextVersionIdMarker : undefined,
|
? this.NextVersionIdMarker
|
||||||
|
: undefined,
|
||||||
Delimiter: this.delimiter,
|
Delimiter: this.delimiter,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = { DelimiterVersions };
|
|
|
@ -1,9 +0,0 @@
|
||||||
module.exports = {
|
|
||||||
Basic: require('./basic').List,
|
|
||||||
Delimiter: require('./delimiter').Delimiter,
|
|
||||||
DelimiterVersions: require('./delimiterVersions')
|
|
||||||
.DelimiterVersions,
|
|
||||||
DelimiterMaster: require('./delimiterMaster')
|
|
||||||
.DelimiterMaster,
|
|
||||||
MPU: require('./MPU').MultipartUploads,
|
|
||||||
};
|
|
|
@ -0,0 +1,5 @@
|
||||||
|
export { List as Basic } from './basic';
|
||||||
|
export { Delimiter } from './delimiter';
|
||||||
|
export { DelimiterVersions } from './delimiterVersions';
|
||||||
|
export { DelimiterMaster } from './delimiterMaster';
|
||||||
|
export { MultipartUploads as MPU } from './MPU';
|
|
@ -1,21 +1,25 @@
|
||||||
const assert = require('assert');
|
import assert from 'assert';
|
||||||
|
import { FILTER_END, FILTER_SKIP, SKIP_NONE } from './tools';
|
||||||
const { FILTER_END, FILTER_SKIP, SKIP_NONE } = require('./tools');
|
|
||||||
|
|
||||||
|
|
||||||
const MAX_STREAK_LENGTH = 100;
|
const MAX_STREAK_LENGTH = 100;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Handle the filtering and the skip mechanism of a listing result.
|
* Handle the filtering and the skip mechanism of a listing result.
|
||||||
*/
|
*/
|
||||||
class Skip {
|
export class Skip {
|
||||||
|
extension;
|
||||||
|
gteParams;
|
||||||
|
listingEndCb;
|
||||||
|
skipRangeCb;
|
||||||
|
streakLength;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {Object} params - skip parameters
|
* @param {Object} params - skip parameters
|
||||||
* @param {Object} params.extension - delimiter extension used (required)
|
* @param {Object} params.extension - delimiter extension used (required)
|
||||||
* @param {String} params.gte - current range gte (greater than or
|
* @param {String} params.gte - current range gte (greater than or
|
||||||
* equal) used by the client code
|
* equal) used by the client code
|
||||||
*/
|
*/
|
||||||
constructor(params) {
|
constructor(params: { extension: any; gte: string }) {
|
||||||
assert(params.extension);
|
assert(params.extension);
|
||||||
|
|
||||||
this.extension = params.extension;
|
this.extension = params.extension;
|
||||||
|
@ -47,7 +51,7 @@ class Skip {
|
||||||
* This function calls the listing end or the skip range callbacks if
|
* This function calls the listing end or the skip range callbacks if
|
||||||
* needed.
|
* needed.
|
||||||
*/
|
*/
|
||||||
filter(entry) {
|
filter(entry): void {
|
||||||
assert(this.listingEndCb);
|
assert(this.listingEndCb);
|
||||||
assert(this.skipRangeCb);
|
assert(this.skipRangeCb);
|
||||||
|
|
||||||
|
@ -56,8 +60,10 @@ class Skip {
|
||||||
|
|
||||||
if (filteringResult === FILTER_END) {
|
if (filteringResult === FILTER_END) {
|
||||||
this.listingEndCb();
|
this.listingEndCb();
|
||||||
} else if (filteringResult === FILTER_SKIP
|
} else if (
|
||||||
&& skippingRange !== SKIP_NONE) {
|
filteringResult === FILTER_SKIP &&
|
||||||
|
skippingRange !== SKIP_NONE
|
||||||
|
) {
|
||||||
if (++this.streakLength >= MAX_STREAK_LENGTH) {
|
if (++this.streakLength >= MAX_STREAK_LENGTH) {
|
||||||
const newRange = this._inc(skippingRange);
|
const newRange = this._inc(skippingRange);
|
||||||
|
|
||||||
|
@ -73,7 +79,7 @@ class Skip {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
_inc(str) {
|
_inc(str: string) {
|
||||||
if (!str) {
|
if (!str) {
|
||||||
return str;
|
return str;
|
||||||
}
|
}
|
||||||
|
@ -83,6 +89,3 @@ class Skip {
|
||||||
return `${str.slice(0, str.length - 1)}${lastCharNewValue}`;
|
return `${str.slice(0, str.length - 1)}${lastCharNewValue}`;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
module.exports = Skip;
|
|
|
@ -1,10 +1,11 @@
|
||||||
const { DbPrefixes } = require('../../versioning/constants').VersioningConstants;
|
import { VersioningConstants as VSConst } from '../../versioning/constants';
|
||||||
|
const { DbPrefixes } = VSConst
|
||||||
|
|
||||||
// constants for extensions
|
// constants for extensions
|
||||||
const SKIP_NONE = undefined; // to be inline with the values of NextMarker
|
export const SKIP_NONE = undefined; // to be inline with the values of NextMarker
|
||||||
const FILTER_ACCEPT = 1;
|
export const FILTER_ACCEPT = 1;
|
||||||
const FILTER_SKIP = 0;
|
export const FILTER_SKIP = 0;
|
||||||
const FILTER_END = -1;
|
export const FILTER_END = -1;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This function check if number is valid
|
* This function check if number is valid
|
||||||
|
@ -15,8 +16,8 @@ const FILTER_END = -1;
|
||||||
* @param {Number} limit - The limit to respect
|
* @param {Number} limit - The limit to respect
|
||||||
* @return {Number} - The parsed number || limit
|
* @return {Number} - The parsed number || limit
|
||||||
*/
|
*/
|
||||||
function checkLimit(number, limit) {
|
export function checkLimit(number: number, limit: number): number {
|
||||||
const parsed = Number.parseInt(number, 10);
|
const parsed = Number.parseInt(number, 10)
|
||||||
const valid = !Number.isNaN(parsed) && (!limit || parsed <= limit);
|
const valid = !Number.isNaN(parsed) && (!limit || parsed <= limit);
|
||||||
return valid ? parsed : limit;
|
return valid ? parsed : limit;
|
||||||
}
|
}
|
||||||
|
@ -28,7 +29,7 @@ function checkLimit(number, limit) {
|
||||||
* @return {string} - the incremented string
|
* @return {string} - the incremented string
|
||||||
* or the input if it is not valid
|
* or the input if it is not valid
|
||||||
*/
|
*/
|
||||||
function inc(str) {
|
export function inc(str: string): string {
|
||||||
return str ? (str.slice(0, str.length - 1) +
|
return str ? (str.slice(0, str.length - 1) +
|
||||||
String.fromCharCode(str.charCodeAt(str.length - 1) + 1)) : str;
|
String.fromCharCode(str.charCodeAt(str.length - 1) + 1)) : str;
|
||||||
}
|
}
|
||||||
|
@ -40,7 +41,7 @@ function inc(str) {
|
||||||
* @param {object} v0params - listing parameters for v0 format
|
* @param {object} v0params - listing parameters for v0 format
|
||||||
* @return {object} - listing parameters for v1 format
|
* @return {object} - listing parameters for v1 format
|
||||||
*/
|
*/
|
||||||
function listingParamsMasterKeysV0ToV1(v0params) {
|
export function listingParamsMasterKeysV0ToV1(v0params: any): any {
|
||||||
const v1params = Object.assign({}, v0params);
|
const v1params = Object.assign({}, v0params);
|
||||||
if (v0params.gt !== undefined) {
|
if (v0params.gt !== undefined) {
|
||||||
v1params.gt = `${DbPrefixes.Master}${v0params.gt}`;
|
v1params.gt = `${DbPrefixes.Master}${v0params.gt}`;
|
||||||
|
@ -58,13 +59,3 @@ function listingParamsMasterKeysV0ToV1(v0params) {
|
||||||
}
|
}
|
||||||
return v1params;
|
return v1params;
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
checkLimit,
|
|
||||||
inc,
|
|
||||||
listingParamsMasterKeysV0ToV1,
|
|
||||||
SKIP_NONE,
|
|
||||||
FILTER_END,
|
|
||||||
FILTER_SKIP,
|
|
||||||
FILTER_ACCEPT,
|
|
||||||
};
|
|
|
@ -1,4 +1,4 @@
|
||||||
function indexOf(arr, value) {
|
export function indexOf<T>(arr: T[], value: T) {
|
||||||
if (!arr.length) {
|
if (!arr.length) {
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
|
@ -22,10 +22,10 @@ function indexOf(arr, value) {
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
function indexAtOrBelow(arr, value) {
|
export function indexAtOrBelow<T>(arr: T[], value: T) {
|
||||||
let i;
|
let i: number;
|
||||||
let lo;
|
let lo: number;
|
||||||
let hi;
|
let hi: number;
|
||||||
|
|
||||||
if (!arr.length || arr[0] > value) {
|
if (!arr.length || arr[0] > value) {
|
||||||
return -1;
|
return -1;
|
||||||
|
@ -52,7 +52,7 @@ function indexAtOrBelow(arr, value) {
|
||||||
/*
|
/*
|
||||||
* perform symmetric diff in O(m + n)
|
* perform symmetric diff in O(m + n)
|
||||||
*/
|
*/
|
||||||
function symDiff(k1, k2, v1, v2, cb) {
|
export function symDiff(k1, k2, v1, v2, cb) {
|
||||||
let i = 0;
|
let i = 0;
|
||||||
let j = 0;
|
let j = 0;
|
||||||
const n = k1.length;
|
const n = k1.length;
|
||||||
|
@ -79,9 +79,3 @@ function symDiff(k1, k2, v1, v2, cb) {
|
||||||
j++;
|
j++;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
indexOf,
|
|
||||||
indexAtOrBelow,
|
|
||||||
symDiff,
|
|
||||||
};
|
|
|
@ -1,13 +1,12 @@
|
||||||
const ArrayUtils = require('./ArrayUtils');
|
import * as ArrayUtils from './ArrayUtils';
|
||||||
|
|
||||||
class SortedSet {
|
export default class SortedSet<Key, Value> {
|
||||||
constructor(obj) {
|
keys: Key[];
|
||||||
if (obj) {
|
values: Value[];
|
||||||
this.keys = obj.keys;
|
|
||||||
this.values = obj.values;
|
constructor(obj?: { keys: Key[]; values: Value[] }) {
|
||||||
} else {
|
this.keys = obj?.keys ?? [];
|
||||||
this.clear();
|
this.values = obj?.values ?? [];
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
clear() {
|
clear() {
|
||||||
|
@ -19,7 +18,7 @@ class SortedSet {
|
||||||
return this.keys.length;
|
return this.keys.length;
|
||||||
}
|
}
|
||||||
|
|
||||||
set(key, value) {
|
set(key: Key, value: Value) {
|
||||||
const index = ArrayUtils.indexAtOrBelow(this.keys, key);
|
const index = ArrayUtils.indexAtOrBelow(this.keys, key);
|
||||||
if (this.keys[index] === key) {
|
if (this.keys[index] === key) {
|
||||||
this.values[index] = value;
|
this.values[index] = value;
|
||||||
|
@ -29,17 +28,17 @@ class SortedSet {
|
||||||
this.values.splice(index + 1, 0, value);
|
this.values.splice(index + 1, 0, value);
|
||||||
}
|
}
|
||||||
|
|
||||||
isSet(key) {
|
isSet(key: Key) {
|
||||||
const index = ArrayUtils.indexOf(this.keys, key);
|
const index = ArrayUtils.indexOf(this.keys, key);
|
||||||
return index >= 0;
|
return index >= 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
get(key) {
|
get(key: Key) {
|
||||||
const index = ArrayUtils.indexOf(this.keys, key);
|
const index = ArrayUtils.indexOf(this.keys, key);
|
||||||
return index >= 0 ? this.values[index] : undefined;
|
return index >= 0 ? this.values[index] : undefined;
|
||||||
}
|
}
|
||||||
|
|
||||||
del(key) {
|
del(key: Key) {
|
||||||
const index = ArrayUtils.indexOf(this.keys, key);
|
const index = ArrayUtils.indexOf(this.keys, key);
|
||||||
if (index >= 0) {
|
if (index >= 0) {
|
||||||
this.keys.splice(index, 1);
|
this.keys.splice(index, 1);
|
||||||
|
@ -47,5 +46,3 @@ class SortedSet {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = SortedSet;
|
|
|
@ -1,7 +1,17 @@
|
||||||
const stream = require('stream');
|
import stream from 'stream';
|
||||||
|
|
||||||
class MergeStream extends stream.Readable {
|
export default class MergeStream extends stream.Readable {
|
||||||
constructor(stream1, stream2, compare) {
|
_compare: (a: any, b: any) => number;
|
||||||
|
_streams: [stream.Readable, stream.Readable];
|
||||||
|
_peekItems: [undefined | null, undefined | null];
|
||||||
|
_streamEof: [boolean, boolean];
|
||||||
|
_streamToResume: stream.Readable | null;
|
||||||
|
|
||||||
|
constructor(
|
||||||
|
stream1: stream.Readable,
|
||||||
|
stream2: stream.Readable,
|
||||||
|
compare: (a: any, b: any) => number
|
||||||
|
) {
|
||||||
super({ objectMode: true });
|
super({ objectMode: true });
|
||||||
|
|
||||||
this._compare = compare;
|
this._compare = compare;
|
||||||
|
@ -16,13 +26,13 @@ class MergeStream extends stream.Readable {
|
||||||
this._streamEof = [false, false];
|
this._streamEof = [false, false];
|
||||||
this._streamToResume = null;
|
this._streamToResume = null;
|
||||||
|
|
||||||
stream1.on('data', item => this._onItem(stream1, item, 0, 1));
|
stream1.on('data', (item) => this._onItem(stream1, item, 0, 1));
|
||||||
stream1.once('end', () => this._onEnd(stream1, 0, 1));
|
stream1.once('end', () => this._onEnd(stream1, 0, 1));
|
||||||
stream1.once('error', err => this._onError(stream1, err, 0, 1));
|
stream1.once('error', (err) => this._onError(stream1, err, 0, 1));
|
||||||
|
|
||||||
stream2.on('data', item => this._onItem(stream2, item, 1, 0));
|
stream2.on('data', (item) => this._onItem(stream2, item, 1, 0));
|
||||||
stream2.once('end', () => this._onEnd(stream2, 1, 0));
|
stream2.once('end', () => this._onEnd(stream2, 1, 0));
|
||||||
stream2.once('error', err => this._onError(stream2, err, 1, 0));
|
stream2.once('error', (err) => this._onError(stream2, err, 1, 0));
|
||||||
}
|
}
|
||||||
|
|
||||||
_read() {
|
_read() {
|
||||||
|
@ -41,7 +51,7 @@ class MergeStream extends stream.Readable {
|
||||||
callback();
|
callback();
|
||||||
}
|
}
|
||||||
|
|
||||||
_onItem(myStream, myItem, myIndex, otherIndex) {
|
_onItem(myStream: stream.Readable, myItem, myIndex, otherIndex) {
|
||||||
this._peekItems[myIndex] = myItem;
|
this._peekItems[myIndex] = myItem;
|
||||||
const otherItem = this._peekItems[otherIndex];
|
const otherItem = this._peekItems[otherIndex];
|
||||||
if (otherItem === undefined) {
|
if (otherItem === undefined) {
|
||||||
|
@ -69,7 +79,7 @@ class MergeStream extends stream.Readable {
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
|
|
||||||
_onEnd(myStream, myIndex, otherIndex) {
|
_onEnd(myStream: stream.Readable, myIndex, otherIndex) {
|
||||||
this._streamEof[myIndex] = true;
|
this._streamEof[myIndex] = true;
|
||||||
if (this._peekItems[myIndex] === undefined) {
|
if (this._peekItems[myIndex] === undefined) {
|
||||||
this._peekItems[myIndex] = null;
|
this._peekItems[myIndex] = null;
|
||||||
|
@ -94,7 +104,7 @@ class MergeStream extends stream.Readable {
|
||||||
return otherStream.resume();
|
return otherStream.resume();
|
||||||
}
|
}
|
||||||
|
|
||||||
_onError(myStream, err, myIndex, otherIndex) {
|
_onError(myStream: stream.Readable, err, myIndex, otherIndex) {
|
||||||
myStream.destroy();
|
myStream.destroy();
|
||||||
if (this._streams[otherIndex]) {
|
if (this._streams[otherIndex]) {
|
||||||
this._streams[otherIndex].destroy();
|
this._streams[otherIndex].destroy();
|
||||||
|
@ -102,5 +112,3 @@ class MergeStream extends stream.Readable {
|
||||||
this.emit('error', err);
|
this.emit('error', err);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = MergeStream;
|
|
|
@ -1,6 +1,4 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
import * as constants from '../constants';
|
||||||
|
|
||||||
const constants = require('../constants');
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Class containing requester's information received from Vault
|
* Class containing requester's information received from Vault
|
||||||
|
@ -8,9 +6,15 @@ const constants = require('../constants');
|
||||||
* shortid, email, accountDisplayName and IAMdisplayName (if applicable)
|
* shortid, email, accountDisplayName and IAMdisplayName (if applicable)
|
||||||
* @return {AuthInfo} an AuthInfo instance
|
* @return {AuthInfo} an AuthInfo instance
|
||||||
*/
|
*/
|
||||||
|
export default class AuthInfo {
|
||||||
|
arn: string;
|
||||||
|
canonicalID: string;
|
||||||
|
shortid: string;
|
||||||
|
email: string;
|
||||||
|
accountDisplayName: string;
|
||||||
|
IAMdisplayName: string;
|
||||||
|
|
||||||
class AuthInfo {
|
constructor(objectFromVault: any) {
|
||||||
constructor(objectFromVault) {
|
|
||||||
// amazon resource name for IAM user (if applicable)
|
// amazon resource name for IAM user (if applicable)
|
||||||
this.arn = objectFromVault.arn;
|
this.arn = objectFromVault.arn;
|
||||||
// account canonicalID
|
// account canonicalID
|
||||||
|
@ -50,13 +54,10 @@ class AuthInfo {
|
||||||
return this.canonicalID === constants.publicId;
|
return this.canonicalID === constants.publicId;
|
||||||
}
|
}
|
||||||
isRequesterAServiceAccount() {
|
isRequesterAServiceAccount() {
|
||||||
return this.canonicalID.startsWith(
|
return this.canonicalID.startsWith(`${constants.zenkoServiceAccount}/`);
|
||||||
`${constants.zenkoServiceAccount}/`);
|
|
||||||
}
|
}
|
||||||
isRequesterThisServiceAccount(serviceName) {
|
isRequesterThisServiceAccount(serviceName: string) {
|
||||||
return this.canonicalID ===
|
const computedCanonicalID = `${constants.zenkoServiceAccount}/${serviceName}`;
|
||||||
`${constants.zenkoServiceAccount}/${serviceName}`;
|
return this.canonicalID === computedCanonicalID;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = AuthInfo;
|
|
|
@ -1,16 +1,22 @@
|
||||||
const errors = require('../errors');
|
import { Logger } from 'werelogs';
|
||||||
const AuthInfo = require('./AuthInfo');
|
import errors from '../errors';
|
||||||
|
import AuthInfo from './AuthInfo';
|
||||||
|
|
||||||
/** vaultSignatureCb parses message from Vault and instantiates
|
/** vaultSignatureCb parses message from Vault and instantiates
|
||||||
* @param {object} err - error from vault
|
* @param err - error from vault
|
||||||
* @param {object} authInfo - info from vault
|
* @param authInfo - info from vault
|
||||||
* @param {object} log - log for request
|
* @param log - log for request
|
||||||
* @param {function} callback - callback to authCheck functions
|
* @param callback - callback to authCheck functions
|
||||||
* @param {object} [streamingV4Params] - present if v4 signature;
|
* @param [streamingV4Params] - present if v4 signature;
|
||||||
* items used to calculate signature on chunks if streaming auth
|
* items used to calculate signature on chunks if streaming auth
|
||||||
* @return {undefined}
|
|
||||||
*/
|
*/
|
||||||
function vaultSignatureCb(err, authInfo, log, callback, streamingV4Params) {
|
function vaultSignatureCb(
|
||||||
|
err: Error | null,
|
||||||
|
authInfo: { message: { body: any } },
|
||||||
|
log: Logger,
|
||||||
|
callback: (err: Error | null, data?: any, results?: any, params?: any) => void,
|
||||||
|
streamingV4Params?: any
|
||||||
|
) {
|
||||||
// vaultclient API guarantees that it returns:
|
// vaultclient API guarantees that it returns:
|
||||||
// - either `err`, an Error object with `code` and `message` properties set
|
// - either `err`, an Error object with `code` and `message` properties set
|
||||||
// - or `err == null` and `info` is an object with `message.code` and
|
// - or `err == null` and `info` is an object with `message.code` and
|
||||||
|
@ -24,11 +30,13 @@ function vaultSignatureCb(err, authInfo, log, callback, streamingV4Params) {
|
||||||
const info = authInfo.message.body;
|
const info = authInfo.message.body;
|
||||||
const userInfo = new AuthInfo(info.userInfo);
|
const userInfo = new AuthInfo(info.userInfo);
|
||||||
const authorizationResults = info.authorizationResults;
|
const authorizationResults = info.authorizationResults;
|
||||||
const auditLog = { accountDisplayName: userInfo.getAccountDisplayName() };
|
const auditLog: { accountDisplayName: string, IAMdisplayName?: string } =
|
||||||
|
{ accountDisplayName: userInfo.getAccountDisplayName() };
|
||||||
const iamDisplayName = userInfo.getIAMdisplayName();
|
const iamDisplayName = userInfo.getIAMdisplayName();
|
||||||
if (iamDisplayName) {
|
if (iamDisplayName) {
|
||||||
auditLog.IAMdisplayName = iamDisplayName;
|
auditLog.IAMdisplayName = iamDisplayName;
|
||||||
}
|
}
|
||||||
|
// @ts-ignore
|
||||||
log.addDefaultFields(auditLog);
|
log.addDefaultFields(auditLog);
|
||||||
return callback(null, userInfo, authorizationResults, streamingV4Params);
|
return callback(null, userInfo, authorizationResults, streamingV4Params);
|
||||||
}
|
}
|
||||||
|
@ -39,43 +47,63 @@ function vaultSignatureCb(err, authInfo, log, callback, streamingV4Params) {
|
||||||
* authentication backends.
|
* authentication backends.
|
||||||
* @class Vault
|
* @class Vault
|
||||||
*/
|
*/
|
||||||
class Vault {
|
export default class Vault {
|
||||||
|
client: any;
|
||||||
|
implName: string;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @constructor
|
* @constructor
|
||||||
* @param {object} client - authentication backend or vault client
|
* @param {object} client - authentication backend or vault client
|
||||||
* @param {string} implName - implementation name for auth backend
|
* @param {string} implName - implementation name for auth backend
|
||||||
*/
|
*/
|
||||||
constructor(client, implName) {
|
constructor(client: any, implName: string) {
|
||||||
this.client = client;
|
this.client = client;
|
||||||
this.implName = implName;
|
this.implName = implName;
|
||||||
}
|
}
|
||||||
/**
|
/**
|
||||||
* authenticateV2Request
|
* authenticateV2Request
|
||||||
*
|
*
|
||||||
* @param {string} params - the authentication parameters as returned by
|
* @param params - the authentication parameters as returned by
|
||||||
* auth.extractParams
|
* auth.extractParams
|
||||||
* @param {number} params.version - shall equal 2
|
* @param params.version - shall equal 2
|
||||||
* @param {string} params.data.accessKey - the user's accessKey
|
* @param params.data.accessKey - the user's accessKey
|
||||||
* @param {string} params.data.signatureFromRequest - the signature read
|
* @param params.data.signatureFromRequest - the signature read
|
||||||
* from the request
|
* from the request
|
||||||
* @param {string} params.data.stringToSign - the stringToSign
|
* @param params.data.stringToSign - the stringToSign
|
||||||
* @param {string} params.data.algo - the hashing algorithm used for the
|
* @param params.data.algo - the hashing algorithm used for the
|
||||||
* signature
|
* signature
|
||||||
* @param {string} params.data.authType - the type of authentication (query
|
* @param params.data.authType - the type of authentication (query
|
||||||
* or header)
|
* or header)
|
||||||
* @param {string} params.data.signatureVersion - the version of the
|
* @param params.data.signatureVersion - the version of the
|
||||||
* signature (AWS or AWS4)
|
* signature (AWS or AWS4)
|
||||||
* @param {number} [params.data.signatureAge] - the age of the signature in
|
* @param [params.data.signatureAge] - the age of the signature in
|
||||||
* ms
|
* ms
|
||||||
* @param {string} params.data.log - the logger object
|
* @param params.data.log - the logger object
|
||||||
* @param {RequestContext []} requestContexts - an array of RequestContext
|
* @param {RequestContext []} requestContexts - an array of RequestContext
|
||||||
* instances which contain information for policy authorization check
|
* instances which contain information for policy authorization check
|
||||||
* @param {function} callback - callback with either error or user info
|
* @param callback - callback with either error or user info
|
||||||
* @returns {undefined}
|
|
||||||
*/
|
*/
|
||||||
authenticateV2Request(params, requestContexts, callback) {
|
authenticateV2Request(
|
||||||
|
params: {
|
||||||
|
version: 2;
|
||||||
|
log: Logger;
|
||||||
|
data: {
|
||||||
|
securityToken: string;
|
||||||
|
accessKey: string;
|
||||||
|
signatureFromRequest: string;
|
||||||
|
stringToSign: string;
|
||||||
|
algo: string;
|
||||||
|
authType: 'query' | 'header';
|
||||||
|
signatureVersion: string;
|
||||||
|
signatureAge?: number;
|
||||||
|
log: Logger;
|
||||||
|
};
|
||||||
|
},
|
||||||
|
requestContexts: any[],
|
||||||
|
callback: (err: Error | null, data?: any) => void
|
||||||
|
) {
|
||||||
params.log.debug('authenticating V2 request');
|
params.log.debug('authenticating V2 request');
|
||||||
let serializedRCsArr;
|
let serializedRCsArr: any;
|
||||||
if (requestContexts) {
|
if (requestContexts) {
|
||||||
serializedRCsArr = requestContexts.map(rc => rc.serialize());
|
serializedRCsArr = requestContexts.map(rc => rc.serialize());
|
||||||
}
|
}
|
||||||
|
@ -85,44 +113,66 @@ class Vault {
|
||||||
params.data.accessKey,
|
params.data.accessKey,
|
||||||
{
|
{
|
||||||
algo: params.data.algo,
|
algo: params.data.algo,
|
||||||
|
// @ts-ignore
|
||||||
reqUid: params.log.getSerializedUids(),
|
reqUid: params.log.getSerializedUids(),
|
||||||
logger: params.log,
|
logger: params.log,
|
||||||
securityToken: params.data.securityToken,
|
securityToken: params.data.securityToken,
|
||||||
requestContext: serializedRCsArr,
|
requestContext: serializedRCsArr,
|
||||||
},
|
},
|
||||||
(err, userInfo) => vaultSignatureCb(err, userInfo,
|
(err: Error | null, userInfo?: any) => vaultSignatureCb(err, userInfo,
|
||||||
params.log, callback),
|
params.log, callback),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
/** authenticateV4Request
|
/** authenticateV4Request
|
||||||
* @param {object} params - the authentication parameters as returned by
|
* @param params - the authentication parameters as returned by
|
||||||
* auth.extractParams
|
* auth.extractParams
|
||||||
* @param {number} params.version - shall equal 4
|
* @param params.version - shall equal 4
|
||||||
* @param {string} params.data.log - the logger object
|
* @param params.data.log - the logger object
|
||||||
* @param {string} params.data.accessKey - the user's accessKey
|
* @param params.data.accessKey - the user's accessKey
|
||||||
* @param {string} params.data.signatureFromRequest - the signature read
|
* @param params.data.signatureFromRequest - the signature read
|
||||||
* from the request
|
* from the request
|
||||||
* @param {string} params.data.region - the AWS region
|
* @param params.data.region - the AWS region
|
||||||
* @param {string} params.data.stringToSign - the stringToSign
|
* @param params.data.stringToSign - the stringToSign
|
||||||
* @param {string} params.data.scopeDate - the timespan to allow the request
|
* @param params.data.scopeDate - the timespan to allow the request
|
||||||
* @param {string} params.data.authType - the type of authentication (query
|
* @param params.data.authType - the type of authentication (query
|
||||||
* or header)
|
* or header)
|
||||||
* @param {string} params.data.signatureVersion - the version of the
|
* @param params.data.signatureVersion - the version of the
|
||||||
* signature (AWS or AWS4)
|
* signature (AWS or AWS4)
|
||||||
* @param {number} params.data.signatureAge - the age of the signature in ms
|
* @param params.data.signatureAge - the age of the signature in ms
|
||||||
* @param {number} params.data.timestamp - signaure timestamp
|
* @param params.data.timestamp - signaure timestamp
|
||||||
* @param {string} params.credentialScope - credentialScope for signature
|
* @param params.credentialScope - credentialScope for signature
|
||||||
* @param {RequestContext [] | null} requestContexts -
|
* @param {RequestContext [] | null} requestContexts -
|
||||||
* an array of RequestContext or null if authenticaiton of a chunk
|
* an array of RequestContext or null if authenticaiton of a chunk
|
||||||
* in streamingv4 auth
|
* in streamingv4 auth
|
||||||
* instances which contain information for policy authorization check
|
* instances which contain information for policy authorization check
|
||||||
* @param {function} callback - callback with either error or user info
|
* @param callback - callback with either error or user info
|
||||||
* @return {undefined}
|
|
||||||
*/
|
*/
|
||||||
authenticateV4Request(params, requestContexts, callback) {
|
authenticateV4Request(
|
||||||
|
params: {
|
||||||
|
version: 4;
|
||||||
|
log: Logger;
|
||||||
|
data: {
|
||||||
|
accessKey: string;
|
||||||
|
signatureFromRequest: string;
|
||||||
|
region: string;
|
||||||
|
stringToSign: string;
|
||||||
|
scopeDate: string;
|
||||||
|
authType: 'query' | 'header';
|
||||||
|
signatureVersion: string;
|
||||||
|
signatureAge?: number;
|
||||||
|
timestamp: number;
|
||||||
|
credentialScope: string;
|
||||||
|
securityToken: string;
|
||||||
|
algo: string;
|
||||||
|
log: Logger;
|
||||||
|
};
|
||||||
|
},
|
||||||
|
requestContexts: any[],
|
||||||
|
callback: (err: Error | null, data?: any) => void
|
||||||
|
) {
|
||||||
params.log.debug('authenticating V4 request');
|
params.log.debug('authenticating V4 request');
|
||||||
let serializedRCs;
|
let serializedRCs: any;
|
||||||
if (requestContexts) {
|
if (requestContexts) {
|
||||||
serializedRCs = requestContexts.map(rc => rc.serialize());
|
serializedRCs = requestContexts.map(rc => rc.serialize());
|
||||||
}
|
}
|
||||||
|
@ -140,31 +190,39 @@ class Vault {
|
||||||
params.data.region,
|
params.data.region,
|
||||||
params.data.scopeDate,
|
params.data.scopeDate,
|
||||||
{
|
{
|
||||||
|
// @ts-ignore
|
||||||
reqUid: params.log.getSerializedUids(),
|
reqUid: params.log.getSerializedUids(),
|
||||||
logger: params.log,
|
logger: params.log,
|
||||||
securityToken: params.data.securityToken,
|
securityToken: params.data.securityToken,
|
||||||
requestContext: serializedRCs,
|
requestContext: serializedRCs,
|
||||||
},
|
},
|
||||||
(err, userInfo) => vaultSignatureCb(err, userInfo,
|
(err: Error | null, userInfo?: any) => vaultSignatureCb(err, userInfo,
|
||||||
params.log, callback, streamingV4Params),
|
params.log, callback, streamingV4Params),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
/** getCanonicalIds -- call Vault to get canonicalIDs based on email
|
/** getCanonicalIds -- call Vault to get canonicalIDs based on email
|
||||||
* addresses
|
* addresses
|
||||||
* @param {array} emailAddresses - list of emailAddresses
|
* @param emailAddresses - list of emailAddresses
|
||||||
* @param {object} log - log object
|
* @param log - log object
|
||||||
* @param {function} callback - callback with either error or an array
|
* @param callback - callback with either error or an array
|
||||||
* of objects with each object containing the canonicalID and emailAddress
|
* of objects with each object containing the canonicalID and emailAddress
|
||||||
* of an account as properties
|
* of an account as properties
|
||||||
* @return {undefined}
|
|
||||||
*/
|
*/
|
||||||
getCanonicalIds(emailAddresses, log, callback) {
|
getCanonicalIds(
|
||||||
|
emailAddresses: string[],
|
||||||
|
log: Logger,
|
||||||
|
callback: (
|
||||||
|
err: Error | null,
|
||||||
|
data?: { canonicalID: string; email: string }[]
|
||||||
|
) => void
|
||||||
|
) {
|
||||||
log.trace('getting canonicalIDs from Vault based on emailAddresses',
|
log.trace('getting canonicalIDs from Vault based on emailAddresses',
|
||||||
{ emailAddresses });
|
{ emailAddresses });
|
||||||
this.client.getCanonicalIds(emailAddresses,
|
this.client.getCanonicalIds(emailAddresses,
|
||||||
|
// @ts-ignore
|
||||||
{ reqUid: log.getSerializedUids() },
|
{ reqUid: log.getSerializedUids() },
|
||||||
(err, info) => {
|
(err: Error | null, info?: any) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
log.debug('received error message from auth provider',
|
log.debug('received error message from auth provider',
|
||||||
{ errorMessage: err });
|
{ errorMessage: err });
|
||||||
|
@ -172,17 +230,17 @@ class Vault {
|
||||||
}
|
}
|
||||||
const infoFromVault = info.message.body;
|
const infoFromVault = info.message.body;
|
||||||
log.trace('info received from vault', { infoFromVault });
|
log.trace('info received from vault', { infoFromVault });
|
||||||
const foundIds = [];
|
const foundIds: { canonicalID: string; email: string }[] = [];
|
||||||
for (let i = 0; i < Object.keys(infoFromVault).length; i++) {
|
for (let i = 0; i < Object.keys(infoFromVault).length; i++) {
|
||||||
const key = Object.keys(infoFromVault)[i];
|
const key = Object.keys(infoFromVault)[i];
|
||||||
if (infoFromVault[key] === 'WrongFormat'
|
if (infoFromVault[key] === 'WrongFormat'
|
||||||
|| infoFromVault[key] === 'NotFound') {
|
|| infoFromVault[key] === 'NotFound') {
|
||||||
return callback(errors.UnresolvableGrantByEmailAddress);
|
return callback(errors.UnresolvableGrantByEmailAddress);
|
||||||
}
|
}
|
||||||
const obj = {};
|
foundIds.push({
|
||||||
obj.email = key;
|
email: key,
|
||||||
obj.canonicalID = infoFromVault[key];
|
canonicalID: infoFromVault[key],
|
||||||
foundIds.push(obj);
|
})
|
||||||
}
|
}
|
||||||
return callback(null, foundIds);
|
return callback(null, foundIds);
|
||||||
});
|
});
|
||||||
|
@ -190,18 +248,22 @@ class Vault {
|
||||||
|
|
||||||
/** getEmailAddresses -- call Vault to get email addresses based on
|
/** getEmailAddresses -- call Vault to get email addresses based on
|
||||||
* canonicalIDs
|
* canonicalIDs
|
||||||
* @param {array} canonicalIDs - list of canonicalIDs
|
* @param canonicalIDs - list of canonicalIDs
|
||||||
* @param {object} log - log object
|
* @param log - log object
|
||||||
* @param {function} callback - callback with either error or an object
|
* @param callback - callback with either error or an object
|
||||||
* with canonicalID keys and email address values
|
* with canonicalID keys and email address values
|
||||||
* @return {undefined}
|
|
||||||
*/
|
*/
|
||||||
getEmailAddresses(canonicalIDs, log, callback) {
|
getEmailAddresses(
|
||||||
|
canonicalIDs: string[],
|
||||||
|
log: Logger,
|
||||||
|
callback: (err: Error | null, data?: { [key: string]: any }) => void
|
||||||
|
) {
|
||||||
log.trace('getting emailAddresses from Vault based on canonicalIDs',
|
log.trace('getting emailAddresses from Vault based on canonicalIDs',
|
||||||
{ canonicalIDs });
|
{ canonicalIDs });
|
||||||
this.client.getEmailAddresses(canonicalIDs,
|
this.client.getEmailAddresses(canonicalIDs,
|
||||||
|
// @ts-ignore
|
||||||
{ reqUid: log.getSerializedUids() },
|
{ reqUid: log.getSerializedUids() },
|
||||||
(err, info) => {
|
(err: Error | null, info?: any) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
log.debug('received error message from vault',
|
log.debug('received error message from vault',
|
||||||
{ errorMessage: err });
|
{ errorMessage: err });
|
||||||
|
@ -224,18 +286,22 @@ class Vault {
|
||||||
|
|
||||||
/** getAccountIds -- call Vault to get accountIds based on
|
/** getAccountIds -- call Vault to get accountIds based on
|
||||||
* canonicalIDs
|
* canonicalIDs
|
||||||
* @param {array} canonicalIDs - list of canonicalIDs
|
* @param canonicalIDs - list of canonicalIDs
|
||||||
* @param {object} log - log object
|
* @param log - log object
|
||||||
* @param {function} callback - callback with either error or an object
|
* @param callback - callback with either error or an object
|
||||||
* with canonicalID keys and accountId values
|
* with canonicalID keys and accountId values
|
||||||
* @return {undefined}
|
|
||||||
*/
|
*/
|
||||||
getAccountIds(canonicalIDs, log, callback) {
|
getAccountIds(
|
||||||
|
canonicalIDs: string[],
|
||||||
|
log: Logger,
|
||||||
|
callback: (err: Error | null, data?: { [key: string]: string }) => void
|
||||||
|
) {
|
||||||
log.trace('getting accountIds from Vault based on canonicalIDs',
|
log.trace('getting accountIds from Vault based on canonicalIDs',
|
||||||
{ canonicalIDs });
|
{ canonicalIDs });
|
||||||
this.client.getAccountIds(canonicalIDs,
|
this.client.getAccountIds(canonicalIDs,
|
||||||
|
// @ts-ignore
|
||||||
{ reqUid: log.getSerializedUids() },
|
{ reqUid: log.getSerializedUids() },
|
||||||
(err, info) => {
|
(err: Error | null, info?: any) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
log.debug('received error message from vault',
|
log.debug('received error message from vault',
|
||||||
{ errorMessage: err });
|
{ errorMessage: err });
|
||||||
|
@ -268,14 +334,19 @@ class Vault {
|
||||||
* @param {object} log - log object
|
* @param {object} log - log object
|
||||||
* @param {function} callback - callback with either error or an array
|
* @param {function} callback - callback with either error or an array
|
||||||
* of authorization results
|
* of authorization results
|
||||||
* @return {undefined}
|
|
||||||
*/
|
*/
|
||||||
checkPolicies(requestContextParams, userArn, log, callback) {
|
checkPolicies(
|
||||||
|
requestContextParams: any[],
|
||||||
|
userArn: string,
|
||||||
|
log: Logger,
|
||||||
|
callback: (err: Error | null, data?: any[]) => void
|
||||||
|
) {
|
||||||
log.trace('sending request context params to vault to evaluate' +
|
log.trace('sending request context params to vault to evaluate' +
|
||||||
'policies');
|
'policies');
|
||||||
this.client.checkPolicies(requestContextParams, userArn, {
|
this.client.checkPolicies(requestContextParams, userArn, {
|
||||||
|
// @ts-ignore
|
||||||
reqUid: log.getSerializedUids(),
|
reqUid: log.getSerializedUids(),
|
||||||
}, (err, info) => {
|
}, (err: Error | null, info?: any) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
log.debug('received error message from auth provider',
|
log.debug('received error message from auth provider',
|
||||||
{ error: err });
|
{ error: err });
|
||||||
|
@ -286,13 +357,14 @@ class Vault {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
checkHealth(log, callback) {
|
checkHealth(log: Logger, callback: (err: Error | null, data?: any) => void) {
|
||||||
if (!this.client.healthcheck) {
|
if (!this.client.healthcheck) {
|
||||||
const defResp = {};
|
const defResp = {};
|
||||||
defResp[this.implName] = { code: 200, message: 'OK' };
|
defResp[this.implName] = { code: 200, message: 'OK' };
|
||||||
return callback(null, defResp);
|
return callback(null, defResp);
|
||||||
}
|
}
|
||||||
return this.client.healthcheck(log.getSerializedUids(), (err, obj) => {
|
// @ts-ignore
|
||||||
|
return this.client.healthcheck(log.getSerializedUids(), (err: Error | null, obj?: any) => {
|
||||||
const respBody = {};
|
const respBody = {};
|
||||||
if (err) {
|
if (err) {
|
||||||
log.debug(`error from ${this.implName}`, { error: err });
|
log.debug(`error from ${this.implName}`, { error: err });
|
||||||
|
@ -312,5 +384,3 @@ class Vault {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = Vault;
|
|
237
lib/auth/auth.js
237
lib/auth/auth.js
|
@ -1,237 +0,0 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
|
||||||
|
|
||||||
const crypto = require('crypto');
|
|
||||||
const errors = require('../errors');
|
|
||||||
const queryString = require('querystring');
|
|
||||||
const AuthInfo = require('./AuthInfo');
|
|
||||||
const v2 = require('./v2/authV2');
|
|
||||||
const v4 = require('./v4/authV4');
|
|
||||||
const constants = require('../constants');
|
|
||||||
const constructStringToSignV2 = require('./v2/constructStringToSign');
|
|
||||||
const constructStringToSignV4 = require('./v4/constructStringToSign');
|
|
||||||
const convertUTCtoISO8601 = require('./v4/timeUtils').convertUTCtoISO8601;
|
|
||||||
const vaultUtilities = require('./backends/in_memory/vaultUtilities');
|
|
||||||
const inMemoryBackend = require('./backends/in_memory/Backend');
|
|
||||||
const validateAuthConfig = require('./backends/in_memory/validateAuthConfig');
|
|
||||||
const AuthLoader = require('./backends/in_memory/AuthLoader');
|
|
||||||
const Vault = require('./Vault');
|
|
||||||
const baseBackend = require('./backends/base');
|
|
||||||
const chainBackend = require('./backends/ChainBackend');
|
|
||||||
|
|
||||||
let vault = null;
|
|
||||||
const auth = {};
|
|
||||||
const checkFunctions = {
|
|
||||||
v2: {
|
|
||||||
headers: v2.header.check,
|
|
||||||
query: v2.query.check,
|
|
||||||
},
|
|
||||||
v4: {
|
|
||||||
headers: v4.header.check,
|
|
||||||
query: v4.query.check,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
// If no auth information is provided in request, then user is part of
|
|
||||||
// 'All Users Group' so use this group as the canonicalID for the publicUser
|
|
||||||
const publicUserInfo = new AuthInfo({ canonicalID: constants.publicId });
|
|
||||||
|
|
||||||
function setAuthHandler(handler) {
|
|
||||||
vault = handler;
|
|
||||||
return auth;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This function will check validity of request parameters to authenticate
|
|
||||||
*
|
|
||||||
* @param {Http.Request} request - Http request object
|
|
||||||
* @param {object} log - Logger object
|
|
||||||
* @param {string} awsService - Aws service related
|
|
||||||
* @param {object} data - Parameters from queryString parsing or body of
|
|
||||||
* POST request
|
|
||||||
*
|
|
||||||
* @return {object} ret
|
|
||||||
* @return {object} ret.err - arsenal.errors object if any error was found
|
|
||||||
* @return {object} ret.params - auth parameters to use later on for signature
|
|
||||||
* computation and check
|
|
||||||
* @return {object} ret.params.version - the auth scheme version
|
|
||||||
* (undefined, 2, 4)
|
|
||||||
* @return {object} ret.params.data - the auth scheme's specific data
|
|
||||||
*/
|
|
||||||
function extractParams(request, log, awsService, data) {
|
|
||||||
log.trace('entered', { method: 'Arsenal.auth.server.extractParams' });
|
|
||||||
const authHeader = request.headers.authorization;
|
|
||||||
let version = null;
|
|
||||||
let method = null;
|
|
||||||
|
|
||||||
// Identify auth version and method to dispatch to the right check function
|
|
||||||
if (authHeader) {
|
|
||||||
method = 'headers';
|
|
||||||
// TODO: Check for security token header to handle temporary security
|
|
||||||
// credentials
|
|
||||||
if (authHeader.startsWith('AWS ')) {
|
|
||||||
version = 'v2';
|
|
||||||
} else if (authHeader.startsWith('AWS4')) {
|
|
||||||
version = 'v4';
|
|
||||||
} else {
|
|
||||||
log.trace('invalid authorization security header',
|
|
||||||
{ header: authHeader });
|
|
||||||
return { err: errors.AccessDenied };
|
|
||||||
}
|
|
||||||
} else if (data.Signature) {
|
|
||||||
method = 'query';
|
|
||||||
version = 'v2';
|
|
||||||
} else if (data['X-Amz-Algorithm']) {
|
|
||||||
method = 'query';
|
|
||||||
version = 'v4';
|
|
||||||
}
|
|
||||||
|
|
||||||
// Here, either both values are set, or none is set
|
|
||||||
if (version !== null && method !== null) {
|
|
||||||
if (!checkFunctions[version] || !checkFunctions[version][method]) {
|
|
||||||
log.trace('invalid auth version or method',
|
|
||||||
{ version, authMethod: method });
|
|
||||||
return { err: errors.NotImplemented };
|
|
||||||
}
|
|
||||||
log.trace('identified auth method', { version, authMethod: method });
|
|
||||||
return checkFunctions[version][method](request, log, data, awsService);
|
|
||||||
}
|
|
||||||
|
|
||||||
// no auth info identified
|
|
||||||
log.debug('assuming public user');
|
|
||||||
return { err: null, params: publicUserInfo };
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This function will check validity of request parameters to authenticate
|
|
||||||
*
|
|
||||||
* @param {Http.Request} request - Http request object
|
|
||||||
* @param {object} log - Logger object
|
|
||||||
* @param {function} cb - the callback
|
|
||||||
* @param {string} awsService - Aws service related
|
|
||||||
* @param {RequestContext[] | null} requestContexts - array of RequestContext
|
|
||||||
* or null if no requestContexts to be sent to Vault (for instance,
|
|
||||||
* in multi-object delete request)
|
|
||||||
* @return {undefined}
|
|
||||||
*/
|
|
||||||
function doAuth(request, log, cb, awsService, requestContexts) {
|
|
||||||
const res = extractParams(request, log, awsService, request.query);
|
|
||||||
if (res.err) {
|
|
||||||
return cb(res.err);
|
|
||||||
} else if (res.params instanceof AuthInfo) {
|
|
||||||
return cb(null, res.params);
|
|
||||||
}
|
|
||||||
if (requestContexts) {
|
|
||||||
requestContexts.forEach(requestContext => {
|
|
||||||
requestContext.setAuthType(res.params.data.authType);
|
|
||||||
requestContext.setSignatureVersion(res.params
|
|
||||||
.data.signatureVersion);
|
|
||||||
requestContext.setSignatureAge(res.params.data.signatureAge);
|
|
||||||
requestContext.setSecurityToken(res.params.data.securityToken);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Corner cases managed, we're left with normal auth
|
|
||||||
res.params.log = log;
|
|
||||||
if (res.params.version === 2) {
|
|
||||||
return vault.authenticateV2Request(res.params, requestContexts, cb);
|
|
||||||
}
|
|
||||||
if (res.params.version === 4) {
|
|
||||||
return vault.authenticateV4Request(res.params, requestContexts, cb,
|
|
||||||
awsService);
|
|
||||||
}
|
|
||||||
|
|
||||||
log.error('authentication method not found', {
|
|
||||||
method: 'Arsenal.auth.doAuth',
|
|
||||||
});
|
|
||||||
return cb(errors.InternalError);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This function will generate a version 4 header
|
|
||||||
*
|
|
||||||
* @param {Http.Request} request - Http request object
|
|
||||||
* @param {object} data - Parameters from queryString parsing or body of
|
|
||||||
* POST request
|
|
||||||
* @param {string} accessKey - the accessKey
|
|
||||||
* @param {string} secretKeyValue - the secretKey
|
|
||||||
* @param {string} awsService - Aws service related
|
|
||||||
* @param {sting} [proxyPath] - path that gets proxied by reverse proxy
|
|
||||||
* @param {string} [sessionToken] - security token if the access/secret keys
|
|
||||||
* are temporary credentials from STS
|
|
||||||
* @return {undefined}
|
|
||||||
*/
|
|
||||||
function generateV4Headers(request, data, accessKey, secretKeyValue,
|
|
||||||
awsService, proxyPath, sessionToken) {
|
|
||||||
Object.assign(request, { headers: {} });
|
|
||||||
const amzDate = convertUTCtoISO8601(Date.now());
|
|
||||||
// get date without time
|
|
||||||
const scopeDate = amzDate.slice(0, amzDate.indexOf('T'));
|
|
||||||
const region = 'us-east-1';
|
|
||||||
const service = awsService || 'iam';
|
|
||||||
const credentialScope =
|
|
||||||
`${scopeDate}/${region}/${service}/aws4_request`;
|
|
||||||
const timestamp = amzDate;
|
|
||||||
const algorithm = 'AWS4-HMAC-SHA256';
|
|
||||||
|
|
||||||
let payload = '';
|
|
||||||
if (request.method === 'POST') {
|
|
||||||
payload = queryString.stringify(data, null, null, {
|
|
||||||
encodeURIComponent,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
const payloadChecksum = crypto.createHash('sha256')
|
|
||||||
.update(payload, 'binary').digest('hex');
|
|
||||||
request.setHeader('host', request._headers.host);
|
|
||||||
request.setHeader('x-amz-date', amzDate);
|
|
||||||
request.setHeader('x-amz-content-sha256', payloadChecksum);
|
|
||||||
|
|
||||||
if (sessionToken) {
|
|
||||||
request.setHeader('x-amz-security-token', sessionToken);
|
|
||||||
}
|
|
||||||
|
|
||||||
Object.assign(request.headers, request._headers);
|
|
||||||
const signedHeaders = Object.keys(request._headers)
|
|
||||||
.filter(headerName =>
|
|
||||||
headerName.startsWith('x-amz-')
|
|
||||||
|| headerName.startsWith('x-scal-')
|
|
||||||
|| headerName === 'host',
|
|
||||||
).sort().join(';');
|
|
||||||
const params = { request, signedHeaders, payloadChecksum,
|
|
||||||
credentialScope, timestamp, query: data,
|
|
||||||
awsService: service, proxyPath };
|
|
||||||
const stringToSign = constructStringToSignV4(params);
|
|
||||||
const signingKey = vaultUtilities.calculateSigningKey(secretKeyValue,
|
|
||||||
region,
|
|
||||||
scopeDate,
|
|
||||||
service);
|
|
||||||
const signature = crypto.createHmac('sha256', signingKey)
|
|
||||||
.update(stringToSign, 'binary').digest('hex');
|
|
||||||
const authorizationHeader = `${algorithm} Credential=${accessKey}` +
|
|
||||||
`/${credentialScope}, SignedHeaders=${signedHeaders}, ` +
|
|
||||||
`Signature=${signature}`;
|
|
||||||
request.setHeader('authorization', authorizationHeader);
|
|
||||||
Object.assign(request, { headers: {} });
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
setHandler: setAuthHandler,
|
|
||||||
server: {
|
|
||||||
extractParams,
|
|
||||||
doAuth,
|
|
||||||
},
|
|
||||||
client: {
|
|
||||||
generateV4Headers,
|
|
||||||
constructStringToSignV2,
|
|
||||||
},
|
|
||||||
inMemory: {
|
|
||||||
backend: inMemoryBackend,
|
|
||||||
validateAuthConfig,
|
|
||||||
AuthLoader,
|
|
||||||
},
|
|
||||||
backends: {
|
|
||||||
baseBackend,
|
|
||||||
chainBackend,
|
|
||||||
},
|
|
||||||
AuthInfo,
|
|
||||||
Vault,
|
|
||||||
};
|
|
|
@ -0,0 +1,266 @@
|
||||||
|
import * as crypto from 'crypto';
|
||||||
|
import { Logger } from 'werelogs';
|
||||||
|
import errors from '../errors';
|
||||||
|
import * as queryString from 'querystring';
|
||||||
|
import AuthInfo from './AuthInfo';
|
||||||
|
import * as v2 from './v2/authV2';
|
||||||
|
import * as v4 from './v4/authV4';
|
||||||
|
import * as constants from '../constants';
|
||||||
|
import constructStringToSignV2 from './v2/constructStringToSign';
|
||||||
|
import constructStringToSignV4 from './v4/constructStringToSign';
|
||||||
|
import { convertUTCtoISO8601 } from './v4/timeUtils';
|
||||||
|
import * as vaultUtilities from './backends/in_memory/vaultUtilities';
|
||||||
|
import * as inMemoryBackend from './backends/in_memory/Backend';
|
||||||
|
import validateAuthConfig from './backends/in_memory/validateAuthConfig';
|
||||||
|
import AuthLoader from './backends/in_memory/AuthLoader';
|
||||||
|
import Vault from './Vault';
|
||||||
|
import baseBackend from './backends/BaseBackend';
|
||||||
|
import chainBackend from './backends/ChainBackend';
|
||||||
|
|
||||||
|
let vault: Vault | null = null;
|
||||||
|
const auth = {};
|
||||||
|
const checkFunctions = {
|
||||||
|
v2: {
|
||||||
|
headers: v2.header.check,
|
||||||
|
query: v2.query.check,
|
||||||
|
},
|
||||||
|
v4: {
|
||||||
|
headers: v4.header.check,
|
||||||
|
query: v4.query.check,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
// If no auth information is provided in request, then user is part of
|
||||||
|
// 'All Users Group' so use this group as the canonicalID for the publicUser
|
||||||
|
const publicUserInfo = new AuthInfo({ canonicalID: constants.publicId });
|
||||||
|
|
||||||
|
function setAuthHandler(handler: Vault) {
|
||||||
|
vault = handler;
|
||||||
|
return auth;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This function will check validity of request parameters to authenticate
|
||||||
|
*
|
||||||
|
* @param request - Http request object
|
||||||
|
* @param log - Logger object
|
||||||
|
* @param awsService - Aws service related
|
||||||
|
* @param data - Parameters from queryString parsing or body of
|
||||||
|
* POST request
|
||||||
|
*
|
||||||
|
* @return ret
|
||||||
|
* @return ret.err - arsenal.errors object if any error was found
|
||||||
|
* @return ret.params - auth parameters to use later on for signature
|
||||||
|
* computation and check
|
||||||
|
* @return ret.params.version - the auth scheme version
|
||||||
|
* (undefined, 2, 4)
|
||||||
|
* @return ret.params.data - the auth scheme's specific data
|
||||||
|
*/
|
||||||
|
function extractParams(
|
||||||
|
request: any,
|
||||||
|
log: Logger,
|
||||||
|
awsService: string,
|
||||||
|
data: { [key: string]: string }
|
||||||
|
) {
|
||||||
|
log.trace('entered', { method: 'Arsenal.auth.server.extractParams' });
|
||||||
|
const authHeader = request.headers.authorization;
|
||||||
|
let version: 'v2' |'v4' | null = null;
|
||||||
|
let method: 'query' | 'headers' | null = null;
|
||||||
|
|
||||||
|
// Identify auth version and method to dispatch to the right check function
|
||||||
|
if (authHeader) {
|
||||||
|
method = 'headers';
|
||||||
|
// TODO: Check for security token header to handle temporary security
|
||||||
|
// credentials
|
||||||
|
if (authHeader.startsWith('AWS ')) {
|
||||||
|
version = 'v2';
|
||||||
|
} else if (authHeader.startsWith('AWS4')) {
|
||||||
|
version = 'v4';
|
||||||
|
} else {
|
||||||
|
log.trace('invalid authorization security header', {
|
||||||
|
header: authHeader,
|
||||||
|
});
|
||||||
|
return { err: errors.AccessDenied };
|
||||||
|
}
|
||||||
|
} else if (data.Signature) {
|
||||||
|
method = 'query';
|
||||||
|
version = 'v2';
|
||||||
|
} else if (data['X-Amz-Algorithm']) {
|
||||||
|
method = 'query';
|
||||||
|
version = 'v4';
|
||||||
|
}
|
||||||
|
|
||||||
|
// Here, either both values are set, or none is set
|
||||||
|
if (version !== null && method !== null) {
|
||||||
|
if (!checkFunctions[version] || !checkFunctions[version][method]) {
|
||||||
|
log.trace('invalid auth version or method', {
|
||||||
|
version,
|
||||||
|
authMethod: method,
|
||||||
|
});
|
||||||
|
return { err: errors.NotImplemented };
|
||||||
|
}
|
||||||
|
log.trace('identified auth method', { version, authMethod: method });
|
||||||
|
return checkFunctions[version][method](request, log, data, awsService);
|
||||||
|
}
|
||||||
|
|
||||||
|
// no auth info identified
|
||||||
|
log.debug('assuming public user');
|
||||||
|
return { err: null, params: publicUserInfo };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This function will check validity of request parameters to authenticate
|
||||||
|
*
|
||||||
|
* @param request - Http request object
|
||||||
|
* @param log - Logger object
|
||||||
|
* @param cb - the callback
|
||||||
|
* @param awsService - Aws service related
|
||||||
|
* @param {RequestContext[] | null} requestContexts - array of RequestContext
|
||||||
|
* or null if no requestContexts to be sent to Vault (for instance,
|
||||||
|
* in multi-object delete request)
|
||||||
|
*/
|
||||||
|
function doAuth(
|
||||||
|
request: any,
|
||||||
|
log: Logger,
|
||||||
|
cb: (err: Error | null, data?: any) => void,
|
||||||
|
awsService: string,
|
||||||
|
requestContexts: any[] | null
|
||||||
|
) {
|
||||||
|
const res = extractParams(request, log, awsService, request.query);
|
||||||
|
if (res.err) {
|
||||||
|
return cb(res.err);
|
||||||
|
} else if (res.params instanceof AuthInfo) {
|
||||||
|
return cb(null, res.params);
|
||||||
|
}
|
||||||
|
if (requestContexts) {
|
||||||
|
requestContexts.forEach((requestContext) => {
|
||||||
|
const { params } = res
|
||||||
|
if ('data' in params) {
|
||||||
|
const { data } = params
|
||||||
|
requestContext.setAuthType(data.authType);
|
||||||
|
requestContext.setSignatureVersion(data.signatureVersion);
|
||||||
|
requestContext.setSecurityToken(data.securityToken);
|
||||||
|
if ('signatureAge' in data) {
|
||||||
|
requestContext.setSignatureAge(data.signatureAge);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Corner cases managed, we're left with normal auth
|
||||||
|
// TODO What's happening here?
|
||||||
|
// @ts-ignore
|
||||||
|
res.params.log = log;
|
||||||
|
if (res.params.version === 2) {
|
||||||
|
// @ts-ignore
|
||||||
|
return vault!.authenticateV2Request(res.params, requestContexts, cb);
|
||||||
|
}
|
||||||
|
if (res.params.version === 4) {
|
||||||
|
// @ts-ignore
|
||||||
|
return vault!.authenticateV4Request(res.params, requestContexts, cb);
|
||||||
|
}
|
||||||
|
|
||||||
|
log.error('authentication method not found', {
|
||||||
|
method: 'Arsenal.auth.doAuth',
|
||||||
|
});
|
||||||
|
return cb(errors.InternalError);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This function will generate a version 4 header
|
||||||
|
*
|
||||||
|
* @param request - Http request object
|
||||||
|
* @param data - Parameters from queryString parsing or body of
|
||||||
|
* POST request
|
||||||
|
* @param accessKey - the accessKey
|
||||||
|
* @param secretKeyValue - the secretKey
|
||||||
|
* @param awsService - Aws service related
|
||||||
|
* @param [proxyPath] - path that gets proxied by reverse proxy
|
||||||
|
* @param [sessionToken] - security token if the access/secret keys
|
||||||
|
* are temporary credentials from STS
|
||||||
|
*/
|
||||||
|
function generateV4Headers(
|
||||||
|
request: any,
|
||||||
|
data: { [key: string]: string },
|
||||||
|
accessKey: string,
|
||||||
|
secretKeyValue: string,
|
||||||
|
awsService: string,
|
||||||
|
proxyPath: string,
|
||||||
|
sessionToken: string
|
||||||
|
) {
|
||||||
|
Object.assign(request, { headers: {} });
|
||||||
|
const amzDate = convertUTCtoISO8601(Date.now());
|
||||||
|
// get date without time
|
||||||
|
const scopeDate = amzDate.slice(0, amzDate.indexOf('T'));
|
||||||
|
const region = 'us-east-1';
|
||||||
|
const service = awsService || 'iam';
|
||||||
|
const credentialScope = `${scopeDate}/${region}/${service}/aws4_request`;
|
||||||
|
const timestamp = amzDate;
|
||||||
|
const algorithm = 'AWS4-HMAC-SHA256';
|
||||||
|
|
||||||
|
let payload = '';
|
||||||
|
if (request.method === 'POST') {
|
||||||
|
payload = queryString.stringify(data, undefined, undefined, {
|
||||||
|
encodeURIComponent,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
const payloadChecksum = crypto
|
||||||
|
.createHash('sha256')
|
||||||
|
.update(payload, 'binary')
|
||||||
|
.digest('hex');
|
||||||
|
request.setHeader('host', request._headers.host);
|
||||||
|
request.setHeader('x-amz-date', amzDate);
|
||||||
|
request.setHeader('x-amz-content-sha256', payloadChecksum);
|
||||||
|
|
||||||
|
if (sessionToken) {
|
||||||
|
request.setHeader('x-amz-security-token', sessionToken);
|
||||||
|
}
|
||||||
|
|
||||||
|
Object.assign(request.headers, request._headers);
|
||||||
|
const signedHeaders = Object.keys(request._headers)
|
||||||
|
.filter(
|
||||||
|
(headerName) =>
|
||||||
|
headerName.startsWith('x-amz-') ||
|
||||||
|
headerName.startsWith('x-scal-') ||
|
||||||
|
headerName === 'host'
|
||||||
|
)
|
||||||
|
.sort()
|
||||||
|
.join(';');
|
||||||
|
const params = {
|
||||||
|
request,
|
||||||
|
signedHeaders,
|
||||||
|
payloadChecksum,
|
||||||
|
credentialScope,
|
||||||
|
timestamp,
|
||||||
|
query: data,
|
||||||
|
awsService: service,
|
||||||
|
proxyPath,
|
||||||
|
};
|
||||||
|
const stringToSign = constructStringToSignV4(params);
|
||||||
|
const signingKey = vaultUtilities.calculateSigningKey(
|
||||||
|
secretKeyValue,
|
||||||
|
region,
|
||||||
|
scopeDate,
|
||||||
|
service
|
||||||
|
);
|
||||||
|
const signature = crypto
|
||||||
|
.createHmac('sha256', signingKey)
|
||||||
|
.update(stringToSign, 'binary')
|
||||||
|
.digest('hex');
|
||||||
|
const authorizationHeader =
|
||||||
|
`${algorithm} Credential=${accessKey}` +
|
||||||
|
`/${credentialScope}, SignedHeaders=${signedHeaders}, ` +
|
||||||
|
`Signature=${signature}`;
|
||||||
|
request.setHeader('authorization', authorizationHeader);
|
||||||
|
Object.assign(request, { headers: {} });
|
||||||
|
}
|
||||||
|
|
||||||
|
export const server = { extractParams, doAuth };
|
||||||
|
export const client = { generateV4Headers, constructStringToSignV2 };
|
||||||
|
export const inMemory = {
|
||||||
|
backend: inMemoryBackend,
|
||||||
|
validateAuthConfig,
|
||||||
|
AuthLoader,
|
||||||
|
};
|
||||||
|
export const backends = { baseBackend, chainBackend };
|
||||||
|
export { setAuthHandler as setHandler, AuthInfo, Vault };
|
|
@ -0,0 +1,70 @@
|
||||||
|
import errors from '../../errors';
|
||||||
|
import { Callback } from './in_memory/types';
|
||||||
|
|
||||||
|
/** Base backend class */
|
||||||
|
export default class BaseBackend {
|
||||||
|
service: string;
|
||||||
|
|
||||||
|
constructor(service: string) {
|
||||||
|
this.service = service;
|
||||||
|
}
|
||||||
|
|
||||||
|
verifySignatureV2(
|
||||||
|
_stringToSign: string,
|
||||||
|
_signatureFromRequest: string,
|
||||||
|
_accessKey: string,
|
||||||
|
_options: { algo: 'SHA1' | 'SHA256' },
|
||||||
|
callback: Callback
|
||||||
|
) {
|
||||||
|
return callback(errors.AuthMethodNotImplemented);
|
||||||
|
}
|
||||||
|
|
||||||
|
verifySignatureV4(
|
||||||
|
_stringToSign: string,
|
||||||
|
_signatureFromRequest: string,
|
||||||
|
_accessKey: string,
|
||||||
|
_region: string,
|
||||||
|
_scopeDate: string,
|
||||||
|
_options: any,
|
||||||
|
callback: Callback
|
||||||
|
) {
|
||||||
|
return callback(errors.AuthMethodNotImplemented);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets canonical ID's for a list of accounts based on email associated
|
||||||
|
* with account. The callback will be called with either error or object
|
||||||
|
* with email addresses as keys and canonical IDs as values.
|
||||||
|
*/
|
||||||
|
getCanonicalIds(_emails: string[], _options: any, callback: Callback) {
|
||||||
|
return callback(errors.AuthMethodNotImplemented);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets email addresses (referred to as diplay names for getACL's) for a
|
||||||
|
* list of accounts based on canonical IDs associated with account.
|
||||||
|
* The callback will be called with either error or an object from Vault
|
||||||
|
* containing account canonicalID as each object key and an email address
|
||||||
|
* as the value (or "NotFound").
|
||||||
|
*/
|
||||||
|
getEmailAddresses(
|
||||||
|
_canonicalIDs: string[],
|
||||||
|
_options: any,
|
||||||
|
callback: Callback
|
||||||
|
) {
|
||||||
|
return callback(errors.AuthMethodNotImplemented);
|
||||||
|
}
|
||||||
|
|
||||||
|
checkPolicies(
|
||||||
|
_requestContextParams: any,
|
||||||
|
_userArn: string,
|
||||||
|
_options: any,
|
||||||
|
callback: Callback
|
||||||
|
) {
|
||||||
|
return callback(null, { message: { body: [] } });
|
||||||
|
}
|
||||||
|
|
||||||
|
healthcheck(_reqUid: string, callback: Callback) {
|
||||||
|
return callback(null, { code: 200, message: 'OK' });
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,25 +1,31 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
import assert from 'assert';
|
||||||
|
import async from 'async';
|
||||||
|
|
||||||
const assert = require('assert');
|
import { Callback } from './in_memory/types';
|
||||||
const async = require('async');
|
import errors from '../../errors';
|
||||||
|
import BaseBackend from './BaseBackend';
|
||||||
|
|
||||||
const errors = require('../../errors');
|
export type Policy = {
|
||||||
const BaseBackend = require('./base');
|
[key: string]: any;
|
||||||
|
arn?: string;
|
||||||
|
versionId?: string;
|
||||||
|
isAllowed: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Class that provides an authentication backend that will verify signatures
|
* Class that provides an authentication backend that will verify signatures
|
||||||
* and retrieve emails and canonical ids associated with an account using a
|
* and retrieve emails and canonical ids associated with an account using a
|
||||||
* given list of authentication backends and vault clients.
|
* given list of authentication backends and vault clients.
|
||||||
*
|
|
||||||
* @class ChainBackend
|
|
||||||
*/
|
*/
|
||||||
class ChainBackend extends BaseBackend {
|
export default class ChainBackend extends BaseBackend {
|
||||||
|
#clients: BaseBackend[];
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @constructor
|
* @constructor
|
||||||
* @param {string} service - service id
|
* @param {string} service - service id
|
||||||
* @param {object[]} clients - list of authentication backends or vault clients
|
* @param {object[]} clients - list of authentication backends or vault clients
|
||||||
*/
|
*/
|
||||||
constructor(service, clients) {
|
constructor(service: string, clients: BaseBackend[]) {
|
||||||
super(service);
|
super(service);
|
||||||
|
|
||||||
assert(Array.isArray(clients) && clients.length > 0, 'invalid client list');
|
assert(Array.isArray(clients) && clients.length > 0, 'invalid client list');
|
||||||
|
@ -29,56 +35,71 @@ class ChainBackend extends BaseBackend {
|
||||||
typeof client.getCanonicalIds === 'function' &&
|
typeof client.getCanonicalIds === 'function' &&
|
||||||
typeof client.getEmailAddresses === 'function' &&
|
typeof client.getEmailAddresses === 'function' &&
|
||||||
typeof client.checkPolicies === 'function' &&
|
typeof client.checkPolicies === 'function' &&
|
||||||
typeof client.healthcheck === 'function',
|
typeof client.healthcheck === 'function'
|
||||||
), 'invalid client: missing required auth backend methods');
|
), 'invalid client: missing required auth backend methods');
|
||||||
this._clients = clients;
|
this.#clients = clients;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** try task against each client for one to be successful */
|
||||||
/*
|
#tryEachClient(task: (client: BaseBackend, done?: any) => void, cb: Callback) {
|
||||||
* try task against each client for one to be successful
|
// @ts-ignore
|
||||||
*/
|
async.tryEach(this.#clients.map(client => (done: any) => task(client, done)), cb);
|
||||||
_tryEachClient(task, cb) {
|
|
||||||
async.tryEach(this._clients.map(client => done => task(client, done)), cb);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/** apply task to all clients */
|
||||||
* apply task to all clients
|
#forEachClient(task: (client: BaseBackend, done?: any) => void, cb: Callback) {
|
||||||
*/
|
async.map(this.#clients, task, cb);
|
||||||
_forEachClient(task, cb) {
|
|
||||||
async.map(this._clients, task, cb);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
verifySignatureV2(stringToSign, signatureFromRequest, accessKey, options, callback) {
|
verifySignatureV2(
|
||||||
this._tryEachClient((client, done) => client.verifySignatureV2(
|
stringToSign: string,
|
||||||
|
signatureFromRequest: string,
|
||||||
|
accessKey: string,
|
||||||
|
options: any,
|
||||||
|
callback: Callback
|
||||||
|
) {
|
||||||
|
this.#tryEachClient((client, done) => client.verifySignatureV2(
|
||||||
stringToSign,
|
stringToSign,
|
||||||
signatureFromRequest,
|
signatureFromRequest,
|
||||||
accessKey,
|
accessKey,
|
||||||
options,
|
options,
|
||||||
done,
|
done
|
||||||
), callback);
|
), callback);
|
||||||
}
|
}
|
||||||
|
|
||||||
verifySignatureV4(stringToSign, signatureFromRequest, accessKey, region, scopeDate, options, callback) {
|
verifySignatureV4(
|
||||||
this._tryEachClient((client, done) => client.verifySignatureV4(
|
stringToSign: string,
|
||||||
|
signatureFromRequest: string,
|
||||||
|
accessKey: string,
|
||||||
|
region: string,
|
||||||
|
scopeDate: string,
|
||||||
|
options: any,
|
||||||
|
callback: Callback
|
||||||
|
) {
|
||||||
|
this.#tryEachClient((client, done) => client.verifySignatureV4(
|
||||||
stringToSign,
|
stringToSign,
|
||||||
signatureFromRequest,
|
signatureFromRequest,
|
||||||
accessKey,
|
accessKey,
|
||||||
region,
|
region,
|
||||||
scopeDate,
|
scopeDate,
|
||||||
options,
|
options,
|
||||||
done,
|
done
|
||||||
), callback);
|
), callback);
|
||||||
}
|
}
|
||||||
|
|
||||||
static _mergeObjects(objectResponses) {
|
static _mergeObjects(objectResponses: any[]) {
|
||||||
return objectResponses.reduce(
|
return objectResponses.reduce(
|
||||||
(retObj, resObj) => Object.assign(retObj, resObj.message.body),
|
(retObj, resObj) => Object.assign(retObj, resObj.message.body),
|
||||||
{});
|
{}
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
getCanonicalIds(emailAddresses, options, callback) {
|
getCanonicalIds(
|
||||||
this._forEachClient(
|
emailAddresses: string[],
|
||||||
|
options: any,
|
||||||
|
callback: Callback<{ message: { body: any } }>
|
||||||
|
) {
|
||||||
|
this.#forEachClient(
|
||||||
(client, done) => client.getCanonicalIds(emailAddresses, options, done),
|
(client, done) => client.getCanonicalIds(emailAddresses, options, done),
|
||||||
(err, res) => {
|
(err, res) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
|
@ -90,11 +111,16 @@ class ChainBackend extends BaseBackend {
|
||||||
body: ChainBackend._mergeObjects(res),
|
body: ChainBackend._mergeObjects(res),
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
});
|
}
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
getEmailAddresses(canonicalIDs, options, callback) {
|
getEmailAddresses(
|
||||||
this._forEachClient(
|
canonicalIDs: string[],
|
||||||
|
options: any,
|
||||||
|
callback: Callback<{ message: { body: any } }>
|
||||||
|
) {
|
||||||
|
this.#forEachClient(
|
||||||
(client, done) => client.getEmailAddresses(canonicalIDs, options, done),
|
(client, done) => client.getEmailAddresses(canonicalIDs, options, done),
|
||||||
(err, res) => {
|
(err, res) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
|
@ -105,14 +131,13 @@ class ChainBackend extends BaseBackend {
|
||||||
body: ChainBackend._mergeObjects(res),
|
body: ChainBackend._mergeObjects(res),
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
});
|
}
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/** merge policy responses into a single message */
|
||||||
* merge policy responses into a single message
|
static _mergePolicies(policyResponses: { message: { body: any[] } }[]) {
|
||||||
*/
|
const policyMap: { [key: string]: Policy } = {};
|
||||||
static _mergePolicies(policyResponses) {
|
|
||||||
const policyMap = {};
|
|
||||||
|
|
||||||
policyResponses.forEach(resp => {
|
policyResponses.forEach(resp => {
|
||||||
if (!resp.message || !Array.isArray(resp.message.body)) {
|
if (!resp.message || !Array.isArray(resp.message.body)) {
|
||||||
|
@ -128,9 +153,9 @@ class ChainBackend extends BaseBackend {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
return Object.keys(policyMap).map(key => {
|
return Object.keys(policyMap).map((key) => {
|
||||||
const policyRes = { isAllowed: policyMap[key].isAllowed };
|
const policyRes: Policy = { isAllowed: policyMap[key].isAllowed };
|
||||||
if (policyMap[key].arn !== '') {
|
if (policyMap[key].arn && policyMap[key].arn !== '') {
|
||||||
policyRes.arn = policyMap[key].arn;
|
policyRes.arn = policyMap[key].arn;
|
||||||
}
|
}
|
||||||
if (policyMap[key].versionId) {
|
if (policyMap[key].versionId) {
|
||||||
|
@ -140,7 +165,7 @@ class ChainBackend extends BaseBackend {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/**
|
||||||
response format:
|
response format:
|
||||||
{ message: {
|
{ message: {
|
||||||
body: [{}],
|
body: [{}],
|
||||||
|
@ -148,12 +173,17 @@ class ChainBackend extends BaseBackend {
|
||||||
message: string,
|
message: string,
|
||||||
} }
|
} }
|
||||||
*/
|
*/
|
||||||
checkPolicies(requestContextParams, userArn, options, callback) {
|
checkPolicies(
|
||||||
this._forEachClient((client, done) => client.checkPolicies(
|
requestContextParams: any,
|
||||||
|
userArn: string,
|
||||||
|
options: any,
|
||||||
|
callback: Callback<{ message: { body: any } }>
|
||||||
|
) {
|
||||||
|
this.#forEachClient((client, done) => client.checkPolicies(
|
||||||
requestContextParams,
|
requestContextParams,
|
||||||
userArn,
|
userArn,
|
||||||
options,
|
options,
|
||||||
done,
|
done
|
||||||
), (err, res) => {
|
), (err, res) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
return callback(err);
|
return callback(err);
|
||||||
|
@ -166,18 +196,18 @@ class ChainBackend extends BaseBackend {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
healthcheck(reqUid, callback) {
|
healthcheck(reqUid: string, callback: Callback) {
|
||||||
this._forEachClient((client, done) =>
|
this.#forEachClient((client, done) =>
|
||||||
client.healthcheck(reqUid, (err, res) => done(null, {
|
client.healthcheck(reqUid, (err, res) => done(null, {
|
||||||
error: !!err ? err : null,
|
error: !!err ? err : null,
|
||||||
status: res,
|
status: res,
|
||||||
}),
|
})
|
||||||
), (err, res) => {
|
), (err, res) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
return callback(err);
|
return callback(err);
|
||||||
}
|
}
|
||||||
|
|
||||||
const isError = res.some(results => !!results.error);
|
const isError = res.some((results: any) => !!results.error);
|
||||||
if (isError) {
|
if (isError) {
|
||||||
return callback(errors.InternalError, res);
|
return callback(errors.InternalError, res);
|
||||||
}
|
}
|
||||||
|
@ -185,5 +215,3 @@ class ChainBackend extends BaseBackend {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = ChainBackend;
|
|
|
@ -1,86 +0,0 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
|
||||||
|
|
||||||
const errors = require('../../errors');
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Base backend class
|
|
||||||
*
|
|
||||||
* @class BaseBackend
|
|
||||||
*/
|
|
||||||
class BaseBackend {
|
|
||||||
/**
|
|
||||||
* @constructor
|
|
||||||
* @param {string} service - service identifer for construction arn
|
|
||||||
*/
|
|
||||||
constructor(service) {
|
|
||||||
this.service = service;
|
|
||||||
}
|
|
||||||
|
|
||||||
/** verifySignatureV2
|
|
||||||
* @param {string} stringToSign - string to sign built per AWS rules
|
|
||||||
* @param {string} signatureFromRequest - signature sent with request
|
|
||||||
* @param {string} accessKey - account accessKey
|
|
||||||
* @param {object} options - contains algorithm (SHA1 or SHA256)
|
|
||||||
* @param {function} callback - callback with either error or user info
|
|
||||||
* @return {function} calls callback
|
|
||||||
*/
|
|
||||||
verifySignatureV2(stringToSign, signatureFromRequest,
|
|
||||||
accessKey, options, callback) {
|
|
||||||
return callback(errors.AuthMethodNotImplemented);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/** verifySignatureV4
|
|
||||||
* @param {string} stringToSign - string to sign built per AWS rules
|
|
||||||
* @param {string} signatureFromRequest - signature sent with request
|
|
||||||
* @param {string} accessKey - account accessKey
|
|
||||||
* @param {string} region - region specified in request credential
|
|
||||||
* @param {string} scopeDate - date specified in request credential
|
|
||||||
* @param {object} options - options to send to Vault
|
|
||||||
* (just contains reqUid for logging in Vault)
|
|
||||||
* @param {function} callback - callback with either error or user info
|
|
||||||
* @return {function} calls callback
|
|
||||||
*/
|
|
||||||
verifySignatureV4(stringToSign, signatureFromRequest, accessKey,
|
|
||||||
region, scopeDate, options, callback) {
|
|
||||||
return callback(errors.AuthMethodNotImplemented);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Gets canonical ID's for a list of accounts
|
|
||||||
* based on email associated with account
|
|
||||||
* @param {array} emails - list of email addresses
|
|
||||||
* @param {object} options - to send log id to vault
|
|
||||||
* @param {function} callback - callback to calling function
|
|
||||||
* @returns {function} callback with either error or
|
|
||||||
* object with email addresses as keys and canonical IDs
|
|
||||||
* as values
|
|
||||||
*/
|
|
||||||
getCanonicalIds(emails, options, callback) {
|
|
||||||
return callback(errors.AuthMethodNotImplemented);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Gets email addresses (referred to as diplay names for getACL's)
|
|
||||||
* for a list of accounts based on canonical IDs associated with account
|
|
||||||
* @param {array} canonicalIDs - list of canonicalIDs
|
|
||||||
* @param {object} options - to send log id to vault
|
|
||||||
* @param {function} callback - callback to calling function
|
|
||||||
* @returns {function} callback with either error or
|
|
||||||
* an object from Vault containing account canonicalID
|
|
||||||
* as each object key and an email address as the value (or "NotFound")
|
|
||||||
*/
|
|
||||||
getEmailAddresses(canonicalIDs, options, callback) {
|
|
||||||
return callback(errors.AuthMethodNotImplemented);
|
|
||||||
}
|
|
||||||
|
|
||||||
checkPolicies(requestContextParams, userArn, options, callback) {
|
|
||||||
return callback(null, { message: { body: [] } });
|
|
||||||
}
|
|
||||||
|
|
||||||
healthcheck(reqUid, callback) {
|
|
||||||
return callback(null, { code: 200, message: 'OK' });
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = BaseBackend;
|
|
|
@ -1,223 +0,0 @@
|
||||||
const fs = require('fs');
|
|
||||||
const glob = require('simple-glob');
|
|
||||||
const joi = require('@hapi/joi');
|
|
||||||
const werelogs = require('werelogs');
|
|
||||||
|
|
||||||
const ARN = require('../../../models/ARN');
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Load authentication information from files or pre-loaded account
|
|
||||||
* objects
|
|
||||||
*
|
|
||||||
* @class AuthLoader
|
|
||||||
*/
|
|
||||||
class AuthLoader {
|
|
||||||
constructor(logApi) {
|
|
||||||
this._log = new (logApi || werelogs).Logger('S3');
|
|
||||||
this._authData = { accounts: [] };
|
|
||||||
// null: unknown validity, true/false: valid or invalid
|
|
||||||
this._isValid = null;
|
|
||||||
|
|
||||||
this._joiKeysValidator = joi.array()
|
|
||||||
.items({
|
|
||||||
access: joi.string().required(),
|
|
||||||
secret: joi.string().required(),
|
|
||||||
})
|
|
||||||
.required();
|
|
||||||
|
|
||||||
const accountsJoi = joi.array()
|
|
||||||
.items({
|
|
||||||
name: joi.string().required(),
|
|
||||||
email: joi.string().email().required(),
|
|
||||||
arn: joi.string().required(),
|
|
||||||
canonicalID: joi.string().required(),
|
|
||||||
shortid: joi.string().regex(/^[0-9]{12}$/).required(),
|
|
||||||
keys: this._joiKeysValidator,
|
|
||||||
// backward-compat
|
|
||||||
users: joi.array(),
|
|
||||||
})
|
|
||||||
.required()
|
|
||||||
.unique('arn')
|
|
||||||
.unique('email')
|
|
||||||
.unique('canonicalID');
|
|
||||||
this._joiValidator = joi.object({ accounts: accountsJoi });
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* add one or more accounts to the authentication info
|
|
||||||
*
|
|
||||||
* @param {object} authData - authentication data
|
|
||||||
* @param {object[]} authData.accounts - array of account data
|
|
||||||
* @param {string} authData.accounts[].name - account name
|
|
||||||
* @param {string} authData.accounts[].email: email address
|
|
||||||
* @param {string} authData.accounts[].arn: account ARN,
|
|
||||||
* e.g. 'arn:aws:iam::123456789012:root'
|
|
||||||
* @param {string} authData.accounts[].canonicalID account
|
|
||||||
* canonical ID
|
|
||||||
* @param {string} authData.accounts[].shortid account ID number,
|
|
||||||
* e.g. '123456789012'
|
|
||||||
* @param {object[]} authData.accounts[].keys array of
|
|
||||||
* access/secret keys
|
|
||||||
* @param {object[]} authData.accounts[].keys[].access access key
|
|
||||||
* @param {object[]} authData.accounts[].keys[].secret secret key
|
|
||||||
* @param {string} [filePath] - optional file path info for
|
|
||||||
* logging purpose
|
|
||||||
* @return {undefined}
|
|
||||||
*/
|
|
||||||
addAccounts(authData, filePath) {
|
|
||||||
const isValid = this._validateData(authData, filePath);
|
|
||||||
if (isValid) {
|
|
||||||
this._authData.accounts =
|
|
||||||
this._authData.accounts.concat(authData.accounts);
|
|
||||||
// defer validity checking when getting data to avoid
|
|
||||||
// logging multiple times the errors (we need to validate
|
|
||||||
// all accounts at once to detect duplicate values)
|
|
||||||
if (this._isValid) {
|
|
||||||
this._isValid = null;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
this._isValid = false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* add account information from a file
|
|
||||||
*
|
|
||||||
* @param {string} filePath - file path containing JSON
|
|
||||||
* authentication info (see {@link addAccounts()} for format)
|
|
||||||
* @return {undefined}
|
|
||||||
*/
|
|
||||||
addFile(filePath) {
|
|
||||||
const authData = JSON.parse(fs.readFileSync(filePath));
|
|
||||||
this.addAccounts(authData, filePath);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* add account information from a filesystem path
|
|
||||||
*
|
|
||||||
* @param {string|string[]} globPattern - filesystem glob pattern,
|
|
||||||
* can be a single string or an array of glob patterns. Globs
|
|
||||||
* can be simple file paths or can contain glob matching
|
|
||||||
* characters, like '/a/b/*.json'. The matching files are
|
|
||||||
* individually loaded as JSON and accounts are added. See
|
|
||||||
* {@link addAccounts()} for JSON format.
|
|
||||||
* @return {undefined}
|
|
||||||
*/
|
|
||||||
addFilesByGlob(globPattern) {
|
|
||||||
const files = glob(globPattern);
|
|
||||||
files.forEach(filePath => this.addFile(filePath));
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* perform validation on authentication info previously
|
|
||||||
* loaded. Note that it has to be done on the entire set after an
|
|
||||||
* update to catch duplicate account IDs or access keys.
|
|
||||||
*
|
|
||||||
* @return {boolean} true if authentication info is valid
|
|
||||||
* false otherwise
|
|
||||||
*/
|
|
||||||
validate() {
|
|
||||||
if (this._isValid === null) {
|
|
||||||
this._isValid = this._validateData(this._authData);
|
|
||||||
}
|
|
||||||
return this._isValid;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* get authentication info as a plain JS object containing all accounts
|
|
||||||
* under the "accounts" attribute, with validation.
|
|
||||||
*
|
|
||||||
* @return {object|null} the validated authentication data
|
|
||||||
* null if invalid
|
|
||||||
*/
|
|
||||||
getData() {
|
|
||||||
return this.validate() ? this._authData : null;
|
|
||||||
}
|
|
||||||
|
|
||||||
_validateData(authData, filePath) {
|
|
||||||
const res = joi.validate(authData, this._joiValidator,
|
|
||||||
{ abortEarly: false });
|
|
||||||
if (res.error) {
|
|
||||||
this._dumpJoiErrors(res.error.details, filePath);
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
let allKeys = [];
|
|
||||||
let arnError = false;
|
|
||||||
const validatedAuth = res.value;
|
|
||||||
validatedAuth.accounts.forEach(account => {
|
|
||||||
// backward-compat: ignore arn if starts with 'aws:' and log a
|
|
||||||
// warning
|
|
||||||
if (account.arn.startsWith('aws:')) {
|
|
||||||
this._log.error(
|
|
||||||
'account must have a valid AWS ARN, legacy examples ' +
|
|
||||||
'starting with \'aws:\' are not supported anymore. ' +
|
|
||||||
'Please convert to a proper account entry (see ' +
|
|
||||||
'examples at https://github.com/scality/S3/blob/' +
|
|
||||||
'master/conf/authdata.json). Also note that support ' +
|
|
||||||
'for account users has been dropped.',
|
|
||||||
{ accountName: account.name, accountArn: account.arn,
|
|
||||||
filePath });
|
|
||||||
arnError = true;
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
if (account.users) {
|
|
||||||
this._log.error(
|
|
||||||
'support for account users has been dropped, consider ' +
|
|
||||||
'turning users into account entries (see examples at ' +
|
|
||||||
'https://github.com/scality/S3/blob/master/conf/' +
|
|
||||||
'authdata.json)',
|
|
||||||
{ accountName: account.name, accountArn: account.arn,
|
|
||||||
filePath });
|
|
||||||
arnError = true;
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
const arnObj = ARN.createFromString(account.arn);
|
|
||||||
if (arnObj.error) {
|
|
||||||
this._log.error(
|
|
||||||
'authentication config validation error',
|
|
||||||
{ reason: arnObj.error.description,
|
|
||||||
accountName: account.name, accountArn: account.arn,
|
|
||||||
filePath });
|
|
||||||
arnError = true;
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
if (!arnObj.isIAMAccount()) {
|
|
||||||
this._log.error(
|
|
||||||
'authentication config validation error',
|
|
||||||
{ reason: 'not an IAM account ARN',
|
|
||||||
accountName: account.name, accountArn: account.arn,
|
|
||||||
filePath });
|
|
||||||
arnError = true;
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
allKeys = allKeys.concat(account.keys);
|
|
||||||
});
|
|
||||||
if (arnError) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
const uniqueKeysRes = joi.validate(
|
|
||||||
allKeys, this._joiKeysValidator.unique('access'));
|
|
||||||
if (uniqueKeysRes.error) {
|
|
||||||
this._dumpJoiErrors(uniqueKeysRes.error.details, filePath);
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
_dumpJoiErrors(errors, filePath) {
|
|
||||||
errors.forEach(err => {
|
|
||||||
const logInfo = { item: err.path, filePath };
|
|
||||||
if (err.type === 'array.unique') {
|
|
||||||
logInfo.reason = `duplicate value '${err.context.path}'`;
|
|
||||||
logInfo.dupValue = err.context.value[err.context.path];
|
|
||||||
} else {
|
|
||||||
logInfo.reason = err.message;
|
|
||||||
logInfo.context = err.context;
|
|
||||||
}
|
|
||||||
this._log.error('authentication config validation error',
|
|
||||||
logInfo);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = AuthLoader;
|
|
|
@ -0,0 +1,205 @@
|
||||||
|
import * as fs from 'fs';
|
||||||
|
import glob from 'simple-glob';
|
||||||
|
import joi from 'joi';
|
||||||
|
import werelogs from 'werelogs';
|
||||||
|
import * as types from './types';
|
||||||
|
import { Account, Accounts } from './types';
|
||||||
|
|
||||||
|
import ARN from '../../../models/ARN';
|
||||||
|
|
||||||
|
/** Load authentication information from files or pre-loaded account objects */
|
||||||
|
export default class AuthLoader {
|
||||||
|
#log: werelogs.Logger;
|
||||||
|
#authData: Accounts;
|
||||||
|
#isValid: 'waiting-for-validation' | 'valid' | 'invalid';
|
||||||
|
|
||||||
|
constructor(logApi: { Logger: typeof werelogs.Logger } = werelogs) {
|
||||||
|
this.#log = new logApi.Logger('S3');
|
||||||
|
this.#authData = { accounts: [] };
|
||||||
|
this.#isValid = 'waiting-for-validation';
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Add one or more accounts to the authentication info */
|
||||||
|
addAccounts(authData: Accounts, filePath?: string) {
|
||||||
|
const isValid = this.#isAuthDataValid(authData, filePath);
|
||||||
|
if (isValid) {
|
||||||
|
this.#authData.accounts = [
|
||||||
|
...this.#authData.accounts,
|
||||||
|
...authData.accounts,
|
||||||
|
];
|
||||||
|
// defer validity checking when getting data to avoid
|
||||||
|
// logging multiple times the errors (we need to validate
|
||||||
|
// all accounts at once to detect duplicate values)
|
||||||
|
if (this.#isValid === 'valid') {
|
||||||
|
this.#isValid = 'waiting-for-validation';
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
this.#isValid = 'invalid';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add account information from a file. Use { legacy: false } as an option
|
||||||
|
* to use the new, Promise-based version.
|
||||||
|
*
|
||||||
|
* @param filePath - file path containing JSON
|
||||||
|
* authentication info (see {@link addAccounts()} for format)
|
||||||
|
*/
|
||||||
|
addFile(filePath: string, options: { legacy: false }): Promise<void>;
|
||||||
|
/** @deprecated Please use Promise-version instead. */
|
||||||
|
addFile(filePath: string, options?: { legacy: true }): void;
|
||||||
|
addFile(filePath: string, options = { legacy: true }) {
|
||||||
|
// On deprecation, remove the legacy part and keep the promises.
|
||||||
|
const fn: any = options.legacy ? fs.readFileSync : fs.promises.readFile;
|
||||||
|
const temp = fn(filePath, 'utf8') as Promise<string> | string;
|
||||||
|
const prom = Promise.resolve(temp).then((data) => {
|
||||||
|
const authData = JSON.parse(data);
|
||||||
|
this.addAccounts(authData, filePath);
|
||||||
|
});
|
||||||
|
return options.legacy ? undefined : prom;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add account information from a filesystem path
|
||||||
|
*
|
||||||
|
* @param globPattern - filesystem glob pattern,
|
||||||
|
* can be a single string or an array of glob patterns. Globs
|
||||||
|
* can be simple file paths or can contain glob matching
|
||||||
|
* characters, like '/a/b/*.json'. The matching files are
|
||||||
|
* individually loaded as JSON and accounts are added. See
|
||||||
|
* {@link addAccounts()} for JSON format.
|
||||||
|
*/
|
||||||
|
addFilesByGlob(globPattern: string | string[]) {
|
||||||
|
// FIXME switch glob to async version
|
||||||
|
const files = glob(globPattern);
|
||||||
|
files.forEach((filePath) => this.addFile(filePath));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Perform validation on authentication info previously
|
||||||
|
* loaded. Note that it has to be done on the entire set after an
|
||||||
|
* update to catch duplicate account IDs or access keys.
|
||||||
|
*/
|
||||||
|
validate() {
|
||||||
|
if (this.#isValid === 'waiting-for-validation') {
|
||||||
|
const isValid = this.#isAuthDataValid(this.#authData);
|
||||||
|
this.#isValid = isValid ? 'valid' : 'invalid';
|
||||||
|
}
|
||||||
|
return this.#isValid === 'valid';
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get authentication info as a plain JS object containing all accounts
|
||||||
|
* under the "accounts" attribute, with validation.
|
||||||
|
*/
|
||||||
|
get data() {
|
||||||
|
return this.validate() ? this.#authData : null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** backward-compat: ignore arn if starts with 'aws:' and log a warning */
|
||||||
|
#isNotLegacyAWSARN(account: Account, filePath?: string) {
|
||||||
|
if (account.arn.startsWith('aws:')) {
|
||||||
|
const { name: accountName, arn: accountArn } = account;
|
||||||
|
this.#log.error(
|
||||||
|
'account must have a valid AWS ARN, legacy examples ' +
|
||||||
|
"starting with 'aws:' are not supported anymore. " +
|
||||||
|
'Please convert to a proper account entry (see ' +
|
||||||
|
'examples at https://github.com/scality/S3/blob/' +
|
||||||
|
'master/conf/authdata.json). Also note that support ' +
|
||||||
|
'for account users has been dropped.',
|
||||||
|
{ accountName, accountArn, filePath }
|
||||||
|
);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
#isValidUsers(account: Account, filePath?: string) {
|
||||||
|
if (account.users) {
|
||||||
|
const { name: accountName, arn: accountArn } = account;
|
||||||
|
this.#log.error(
|
||||||
|
'support for account users has been dropped, consider ' +
|
||||||
|
'turning users into account entries (see examples at ' +
|
||||||
|
'https://github.com/scality/S3/blob/master/conf/' +
|
||||||
|
'authdata.json)',
|
||||||
|
{ accountName, accountArn, filePath }
|
||||||
|
);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
#isValidARN(account: Account, filePath?: string) {
|
||||||
|
const arnObj = ARN.createFromString(account.arn);
|
||||||
|
const { name: accountName, arn: accountArn } = account;
|
||||||
|
if (arnObj instanceof ARN) {
|
||||||
|
if (!arnObj.isIAMAccount()) {
|
||||||
|
this.#log.error('authentication config validation error', {
|
||||||
|
reason: 'not an IAM account ARN',
|
||||||
|
accountName,
|
||||||
|
accountArn,
|
||||||
|
filePath,
|
||||||
|
});
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
this.#log.error('authentication config validation error', {
|
||||||
|
reason: arnObj.error.description,
|
||||||
|
accountName,
|
||||||
|
accountArn,
|
||||||
|
filePath,
|
||||||
|
});
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
#isAuthDataValid(authData: any, filePath?: string) {
|
||||||
|
const options = { abortEarly: true };
|
||||||
|
const response = types.validators.accounts.validate(authData, options);
|
||||||
|
if (response.error) {
|
||||||
|
this.#dumpJoiErrors(response.error.details, filePath);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
const validAccounts = response.value.accounts.filter(
|
||||||
|
(account: Account) =>
|
||||||
|
this.#isNotLegacyAWSARN(account, filePath) &&
|
||||||
|
this.#isValidUsers(account, filePath) &&
|
||||||
|
this.#isValidARN(account, filePath)
|
||||||
|
);
|
||||||
|
const areSomeInvalidAccounts =
|
||||||
|
validAccounts.length !== response.value.accounts.length;
|
||||||
|
if (areSomeInvalidAccounts) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
const keys = validAccounts.flatMap((account) => account.keys);
|
||||||
|
const uniqueKeysValidator = types.validators.keys.unique('access');
|
||||||
|
const areKeysUnique = uniqueKeysValidator.validate(keys);
|
||||||
|
if (areKeysUnique.error) {
|
||||||
|
this.#dumpJoiErrors(areKeysUnique.error.details, filePath);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
#dumpJoiErrors(errors: joi.ValidationErrorItem[], filePath?: string) {
|
||||||
|
errors.forEach((err) => {
|
||||||
|
const baseLogInfo = { item: err.path, filePath };
|
||||||
|
const logInfo = () => {
|
||||||
|
if (err.type === 'array.unique') {
|
||||||
|
const reason = `duplicate value '${err.context?.path}'`;
|
||||||
|
const dupValue = err.context?.value[err.context.path];
|
||||||
|
return { ...baseLogInfo, reason, dupValue };
|
||||||
|
} else {
|
||||||
|
const reason = err.message;
|
||||||
|
const context = err.context;
|
||||||
|
return { ...baseLogInfo, reason, context };
|
||||||
|
}
|
||||||
|
};
|
||||||
|
this.#log.error(
|
||||||
|
'authentication config validation error',
|
||||||
|
logInfo()
|
||||||
|
);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,14 +1,11 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
import * as crypto from 'crypto';
|
||||||
|
import errors from '../../../errors';
|
||||||
|
import { calculateSigningKey, hashSignature } from './vaultUtilities';
|
||||||
|
import Indexer from './Indexer';
|
||||||
|
import BaseBackend from '../BaseBackend';
|
||||||
|
import { Accounts } from './types';
|
||||||
|
|
||||||
const crypto = require('crypto');
|
function _formatResponse(userInfoToSend: any) {
|
||||||
|
|
||||||
const errors = require('../../../errors');
|
|
||||||
const calculateSigningKey = require('./vaultUtilities').calculateSigningKey;
|
|
||||||
const hashSignature = require('./vaultUtilities').hashSignature;
|
|
||||||
const Indexer = require('./Indexer');
|
|
||||||
const BaseBackend = require('../base');
|
|
||||||
|
|
||||||
function _formatResponse(userInfoToSend) {
|
|
||||||
return {
|
return {
|
||||||
message: {
|
message: {
|
||||||
body: { userInfo: userInfoToSend },
|
body: { userInfo: userInfoToSend },
|
||||||
|
@ -19,32 +16,41 @@ function _formatResponse(userInfoToSend) {
|
||||||
/**
|
/**
|
||||||
* Class that provides a memory backend for verifying signatures and getting
|
* Class that provides a memory backend for verifying signatures and getting
|
||||||
* emails and canonical ids associated with an account.
|
* emails and canonical ids associated with an account.
|
||||||
*
|
|
||||||
* @class InMemoryBackend
|
|
||||||
*/
|
*/
|
||||||
class InMemoryBackend extends BaseBackend {
|
class InMemoryBackend extends BaseBackend {
|
||||||
|
indexer: Indexer;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @constructor
|
* @constructor
|
||||||
* @param {string} service - service identifer for construction arn
|
* @param service - service identifer for construction arn
|
||||||
* @param {Indexer} indexer - indexer instance for retrieving account info
|
* @param indexer - indexer instance for retrieving account info
|
||||||
* @param {function} formatter - function which accepts user info to send
|
|
||||||
* back and returns it in an object
|
|
||||||
*/
|
*/
|
||||||
constructor(service, indexer, formatter) {
|
constructor(service: string, indexer: Indexer) {
|
||||||
super(service);
|
super(service);
|
||||||
this.indexer = indexer;
|
this.indexer = indexer;
|
||||||
this.formatResponse = formatter;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
verifySignatureV2(stringToSign, signatureFromRequest,
|
// CODEQUALITY-TODO-SYNC Should be synchronous
|
||||||
accessKey, options, callback) {
|
verifySignatureV2(
|
||||||
|
stringToSign: string,
|
||||||
|
signatureFromRequest: string,
|
||||||
|
accessKey: string,
|
||||||
|
options: { algo: 'SHA256' | 'SHA1' },
|
||||||
|
callback: (
|
||||||
|
error: Error | null,
|
||||||
|
data?: ReturnType<typeof _formatResponse>
|
||||||
|
) => void
|
||||||
|
) {
|
||||||
const entity = this.indexer.getEntityByKey(accessKey);
|
const entity = this.indexer.getEntityByKey(accessKey);
|
||||||
if (!entity) {
|
if (!entity) {
|
||||||
return callback(errors.InvalidAccessKeyId);
|
return callback(errors.InvalidAccessKeyId);
|
||||||
}
|
}
|
||||||
const secretKey = this.indexer.getSecretKey(entity, accessKey);
|
const secretKey = this.indexer.getSecretKey(entity, accessKey);
|
||||||
const reconstructedSig =
|
const reconstructedSig = hashSignature(
|
||||||
hashSignature(stringToSign, secretKey, options.algo);
|
stringToSign,
|
||||||
|
secretKey,
|
||||||
|
options.algo
|
||||||
|
);
|
||||||
if (signatureFromRequest !== reconstructedSig) {
|
if (signatureFromRequest !== reconstructedSig) {
|
||||||
return callback(errors.SignatureDoesNotMatch);
|
return callback(errors.SignatureDoesNotMatch);
|
||||||
}
|
}
|
||||||
|
@ -52,22 +58,37 @@ class InMemoryBackend extends BaseBackend {
|
||||||
accountDisplayName: this.indexer.getAcctDisplayName(entity),
|
accountDisplayName: this.indexer.getAcctDisplayName(entity),
|
||||||
canonicalID: entity.canonicalID,
|
canonicalID: entity.canonicalID,
|
||||||
arn: entity.arn,
|
arn: entity.arn,
|
||||||
|
// @ts-ignore TODO why ?
|
||||||
IAMdisplayName: entity.IAMdisplayName,
|
IAMdisplayName: entity.IAMdisplayName,
|
||||||
};
|
};
|
||||||
const vaultReturnObject = this.formatResponse(userInfoToSend);
|
const vaultReturnObject = _formatResponse(userInfoToSend);
|
||||||
return callback(null, vaultReturnObject);
|
return callback(null, vaultReturnObject);
|
||||||
}
|
}
|
||||||
|
|
||||||
verifySignatureV4(stringToSign, signatureFromRequest, accessKey,
|
// TODO Options not used. Why ?
|
||||||
region, scopeDate, options, callback) {
|
// CODEQUALITY-TODO-SYNC Should be synchronous
|
||||||
|
verifySignatureV4(
|
||||||
|
stringToSign: string,
|
||||||
|
signatureFromRequest: string,
|
||||||
|
accessKey: string,
|
||||||
|
region: string,
|
||||||
|
scopeDate: string,
|
||||||
|
_options: { algo: 'SHA256' | 'SHA1' },
|
||||||
|
callback: (
|
||||||
|
err: Error | null,
|
||||||
|
data?: ReturnType<typeof _formatResponse>
|
||||||
|
) => void
|
||||||
|
) {
|
||||||
const entity = this.indexer.getEntityByKey(accessKey);
|
const entity = this.indexer.getEntityByKey(accessKey);
|
||||||
if (!entity) {
|
if (!entity) {
|
||||||
return callback(errors.InvalidAccessKeyId);
|
return callback(errors.InvalidAccessKeyId);
|
||||||
}
|
}
|
||||||
const secretKey = this.indexer.getSecretKey(entity, accessKey);
|
const secretKey = this.indexer.getSecretKey(entity, accessKey);
|
||||||
const signingKey = calculateSigningKey(secretKey, region, scopeDate);
|
const signingKey = calculateSigningKey(secretKey, region, scopeDate);
|
||||||
const reconstructedSig = crypto.createHmac('sha256', signingKey)
|
const reconstructedSig = crypto
|
||||||
.update(stringToSign, 'binary').digest('hex');
|
.createHmac('sha256', signingKey)
|
||||||
|
.update(stringToSign, 'binary')
|
||||||
|
.digest('hex');
|
||||||
if (signatureFromRequest !== reconstructedSig) {
|
if (signatureFromRequest !== reconstructedSig) {
|
||||||
return callback(errors.SignatureDoesNotMatch);
|
return callback(errors.SignatureDoesNotMatch);
|
||||||
}
|
}
|
||||||
|
@ -75,22 +96,28 @@ class InMemoryBackend extends BaseBackend {
|
||||||
accountDisplayName: this.indexer.getAcctDisplayName(entity),
|
accountDisplayName: this.indexer.getAcctDisplayName(entity),
|
||||||
canonicalID: entity.canonicalID,
|
canonicalID: entity.canonicalID,
|
||||||
arn: entity.arn,
|
arn: entity.arn,
|
||||||
|
// @ts-ignore TODO why ?
|
||||||
IAMdisplayName: entity.IAMdisplayName,
|
IAMdisplayName: entity.IAMdisplayName,
|
||||||
};
|
};
|
||||||
const vaultReturnObject = this.formatResponse(userInfoToSend);
|
const vaultReturnObject = _formatResponse(userInfoToSend);
|
||||||
return callback(null, vaultReturnObject);
|
return callback(null, vaultReturnObject);
|
||||||
}
|
}
|
||||||
|
|
||||||
getCanonicalIds(emails, log, cb) {
|
// TODO log not used. Why ?
|
||||||
|
// CODEQUALITY-TODO-SYNC Should be synchronous
|
||||||
|
getCanonicalIds(
|
||||||
|
emails: string[],
|
||||||
|
_log: any,
|
||||||
|
cb: (err: null, data: { message: { body: any } }) => void
|
||||||
|
) {
|
||||||
const results = {};
|
const results = {};
|
||||||
emails.forEach(email => {
|
emails.forEach((email) => {
|
||||||
const lowercasedEmail = email.toLowerCase();
|
const lowercasedEmail = email.toLowerCase();
|
||||||
const entity = this.indexer.getEntityByEmail(lowercasedEmail);
|
const entity = this.indexer.getEntityByEmail(lowercasedEmail);
|
||||||
if (!entity) {
|
if (!entity) {
|
||||||
results[email] = 'NotFound';
|
results[email] = 'NotFound';
|
||||||
} else {
|
} else {
|
||||||
results[email] =
|
results[email] = entity.canonicalID;
|
||||||
entity.canonicalID;
|
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
const vaultReturnObject = {
|
const vaultReturnObject = {
|
||||||
|
@ -101,9 +128,15 @@ class InMemoryBackend extends BaseBackend {
|
||||||
return cb(null, vaultReturnObject);
|
return cb(null, vaultReturnObject);
|
||||||
}
|
}
|
||||||
|
|
||||||
getEmailAddresses(canonicalIDs, options, cb) {
|
// TODO options not used. Why ?
|
||||||
|
// CODEQUALITY-TODO-SYNC Should be synchronous
|
||||||
|
getEmailAddresses(
|
||||||
|
canonicalIDs: string[],
|
||||||
|
_options: any,
|
||||||
|
cb: (err: null, data: { message: { body: any } }) => void
|
||||||
|
) {
|
||||||
const results = {};
|
const results = {};
|
||||||
canonicalIDs.forEach(canonicalId => {
|
canonicalIDs.forEach((canonicalId) => {
|
||||||
const foundEntity = this.indexer.getEntityByCanId(canonicalId);
|
const foundEntity = this.indexer.getEntityByCanId(canonicalId);
|
||||||
if (!foundEntity || !foundEntity.email) {
|
if (!foundEntity || !foundEntity.email) {
|
||||||
results[canonicalId] = 'NotFound';
|
results[canonicalId] = 'NotFound';
|
||||||
|
@ -119,19 +152,26 @@ class InMemoryBackend extends BaseBackend {
|
||||||
return cb(null, vaultReturnObject);
|
return cb(null, vaultReturnObject);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TODO options not used. Why ?
|
||||||
|
// CODEQUALITY-TODO-SYNC Should be synchronous
|
||||||
/**
|
/**
|
||||||
* Gets accountIds for a list of accounts based on
|
* Gets accountIds for a list of accounts based on
|
||||||
* the canonical IDs associated with the account
|
* the canonical IDs associated with the account
|
||||||
* @param {array} canonicalIDs - list of canonicalIDs
|
* @param canonicalIDs - list of canonicalIDs
|
||||||
* @param {object} options - to send log id to vault
|
* @param options - to send log id to vault
|
||||||
* @param {function} cb - callback to calling function
|
* @param cb - callback to calling function
|
||||||
* @returns {function} callback with either error or
|
* @return The next is wrong. Here to keep archives.
|
||||||
|
* callback with either error or
|
||||||
* an object from Vault containing account canonicalID
|
* an object from Vault containing account canonicalID
|
||||||
* as each object key and an accountId as the value (or "NotFound")
|
* as each object key and an accountId as the value (or "NotFound")
|
||||||
*/
|
*/
|
||||||
getAccountIds(canonicalIDs, options, cb) {
|
getAccountIds(
|
||||||
|
canonicalIDs: string[],
|
||||||
|
_options: any,
|
||||||
|
cb: (err: null, data: { message: { body: any } }) => void
|
||||||
|
) {
|
||||||
const results = {};
|
const results = {};
|
||||||
canonicalIDs.forEach(canonicalID => {
|
canonicalIDs.forEach((canonicalID) => {
|
||||||
const foundEntity = this.indexer.getEntityByCanId(canonicalID);
|
const foundEntity = this.indexer.getEntityByCanId(canonicalID);
|
||||||
if (!foundEntity || !foundEntity.shortid) {
|
if (!foundEntity || !foundEntity.shortid) {
|
||||||
results[canonicalID] = 'Not Found';
|
results[canonicalID] = 'Not Found';
|
||||||
|
@ -148,31 +188,14 @@ class InMemoryBackend extends BaseBackend {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
class S3AuthBackend extends InMemoryBackend {
|
class S3AuthBackend extends InMemoryBackend {
|
||||||
/**
|
constructor(authdata: Accounts) {
|
||||||
* @constructor
|
super('s3', new Indexer(authdata));
|
||||||
* @param {object} authdata - the authentication config file's data
|
|
||||||
* @param {object[]} authdata.accounts - array of account objects
|
|
||||||
* @param {string=} authdata.accounts[].name - account name
|
|
||||||
* @param {string} authdata.accounts[].email - account email
|
|
||||||
* @param {string} authdata.accounts[].arn - IAM resource name
|
|
||||||
* @param {string} authdata.accounts[].canonicalID - account canonical ID
|
|
||||||
* @param {string} authdata.accounts[].shortid - short account ID
|
|
||||||
* @param {object[]=} authdata.accounts[].keys - array of key objects
|
|
||||||
* @param {string} authdata.accounts[].keys[].access - access key
|
|
||||||
* @param {string} authdata.accounts[].keys[].secret - secret key
|
|
||||||
* @return {undefined}
|
|
||||||
*/
|
|
||||||
constructor(authdata) {
|
|
||||||
super('s3', new Indexer(authdata), _formatResponse);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
refreshAuthData(authData) {
|
refreshAuthData(authData: Accounts) {
|
||||||
this.indexer = new Indexer(authData);
|
this.indexer = new Indexer(authData);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
export { S3AuthBackend as s3 };
|
||||||
s3: S3AuthBackend,
|
|
||||||
};
|
|
|
@ -1,145 +0,0 @@
|
||||||
/**
|
|
||||||
* Class that provides an internal indexing over the simple data provided by
|
|
||||||
* the authentication configuration file for the memory backend. This allows
|
|
||||||
* accessing the different authentication entities through various types of
|
|
||||||
* keys.
|
|
||||||
*
|
|
||||||
* @class Indexer
|
|
||||||
*/
|
|
||||||
class Indexer {
|
|
||||||
/**
|
|
||||||
* @constructor
|
|
||||||
* @param {object} authdata - the authentication config file's data
|
|
||||||
* @param {object[]} authdata.accounts - array of account objects
|
|
||||||
* @param {string=} authdata.accounts[].name - account name
|
|
||||||
* @param {string} authdata.accounts[].email - account email
|
|
||||||
* @param {string} authdata.accounts[].arn - IAM resource name
|
|
||||||
* @param {string} authdata.accounts[].canonicalID - account canonical ID
|
|
||||||
* @param {string} authdata.accounts[].shortid - short account ID
|
|
||||||
* @param {object[]=} authdata.accounts[].keys - array of key objects
|
|
||||||
* @param {string} authdata.accounts[].keys[].access - access key
|
|
||||||
* @param {string} authdata.accounts[].keys[].secret - secret key
|
|
||||||
* @return {undefined}
|
|
||||||
*/
|
|
||||||
constructor(authdata) {
|
|
||||||
this.accountsBy = {
|
|
||||||
canId: {},
|
|
||||||
accessKey: {},
|
|
||||||
email: {},
|
|
||||||
};
|
|
||||||
|
|
||||||
/*
|
|
||||||
* This may happen if the application is configured to use another
|
|
||||||
* authentication backend than in-memory.
|
|
||||||
* As such, we're managing the error here to avoid screwing up there.
|
|
||||||
*/
|
|
||||||
if (!authdata) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
this._build(authdata);
|
|
||||||
}
|
|
||||||
|
|
||||||
_indexAccount(account) {
|
|
||||||
const accountData = {
|
|
||||||
arn: account.arn,
|
|
||||||
canonicalID: account.canonicalID,
|
|
||||||
shortid: account.shortid,
|
|
||||||
accountDisplayName: account.name,
|
|
||||||
email: account.email.toLowerCase(),
|
|
||||||
keys: [],
|
|
||||||
};
|
|
||||||
this.accountsBy.canId[accountData.canonicalID] = accountData;
|
|
||||||
this.accountsBy.email[accountData.email] = accountData;
|
|
||||||
if (account.keys !== undefined) {
|
|
||||||
account.keys.forEach(key => {
|
|
||||||
accountData.keys.push(key);
|
|
||||||
this.accountsBy.accessKey[key.access] = accountData;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
_build(authdata) {
|
|
||||||
authdata.accounts.forEach(account => {
|
|
||||||
this._indexAccount(account);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This method returns the account associated to a canonical ID.
|
|
||||||
*
|
|
||||||
* @param {string} canId - The canonicalId of the account
|
|
||||||
* @return {Object} account - The account object
|
|
||||||
* @return {Object} account.arn - The account's ARN
|
|
||||||
* @return {Object} account.canonicalID - The account's canonical ID
|
|
||||||
* @return {Object} account.shortid - The account's internal shortid
|
|
||||||
* @return {Object} account.accountDisplayName - The account's display name
|
|
||||||
* @return {Object} account.email - The account's lowercased email
|
|
||||||
*/
|
|
||||||
getEntityByCanId(canId) {
|
|
||||||
return this.accountsBy.canId[canId];
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This method returns the entity (either an account or a user) associated
|
|
||||||
* to a canonical ID.
|
|
||||||
*
|
|
||||||
* @param {string} key - The accessKey of the entity
|
|
||||||
* @return {Object} entity - The entity object
|
|
||||||
* @return {Object} entity.arn - The entity's ARN
|
|
||||||
* @return {Object} entity.canonicalID - The canonical ID for the entity's
|
|
||||||
* account
|
|
||||||
* @return {Object} entity.shortid - The entity's internal shortid
|
|
||||||
* @return {Object} entity.accountDisplayName - The entity's account
|
|
||||||
* display name
|
|
||||||
* @return {Object} entity.IAMDisplayName - The user's display name
|
|
||||||
* (if the entity is an user)
|
|
||||||
* @return {Object} entity.email - The entity's lowercased email
|
|
||||||
*/
|
|
||||||
getEntityByKey(key) {
|
|
||||||
return this.accountsBy.accessKey[key];
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This method returns the entity (either an account or a user) associated
|
|
||||||
* to an email address.
|
|
||||||
*
|
|
||||||
* @param {string} email - The email address
|
|
||||||
* @return {Object} entity - The entity object
|
|
||||||
* @return {Object} entity.arn - The entity's ARN
|
|
||||||
* @return {Object} entity.canonicalID - The canonical ID for the entity's
|
|
||||||
* account
|
|
||||||
* @return {Object} entity.shortid - The entity's internal shortid
|
|
||||||
* @return {Object} entity.accountDisplayName - The entity's account
|
|
||||||
* display name
|
|
||||||
* @return {Object} entity.IAMDisplayName - The user's display name
|
|
||||||
* (if the entity is an user)
|
|
||||||
* @return {Object} entity.email - The entity's lowercased email
|
|
||||||
*/
|
|
||||||
getEntityByEmail(email) {
|
|
||||||
const lowerCasedEmail = email.toLowerCase();
|
|
||||||
return this.accountsBy.email[lowerCasedEmail];
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This method returns the secret key associated with the entity.
|
|
||||||
* @param {Object} entity - the entity object
|
|
||||||
* @param {string} accessKey - access key
|
|
||||||
* @returns {string} secret key
|
|
||||||
*/
|
|
||||||
getSecretKey(entity, accessKey) {
|
|
||||||
return entity.keys
|
|
||||||
.filter(kv => kv.access === accessKey)[0].secret;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This method returns the account display name associated with the entity.
|
|
||||||
* @param {Object} entity - the entity object
|
|
||||||
* @returns {string} account display name
|
|
||||||
*/
|
|
||||||
getAcctDisplayName(entity) {
|
|
||||||
return entity.accountDisplayName;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = Indexer;
|
|
|
@ -0,0 +1,93 @@
|
||||||
|
import { Accounts, Account, Entity } from './types';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Class that provides an internal indexing over the simple data provided by
|
||||||
|
* the authentication configuration file for the memory backend. This allows
|
||||||
|
* accessing the different authentication entities through various types of
|
||||||
|
* keys.
|
||||||
|
*/
|
||||||
|
export default class Indexer {
|
||||||
|
accountsBy: {
|
||||||
|
canId: { [id: string]: Entity | undefined },
|
||||||
|
accessKey: { [id: string]: Entity | undefined },
|
||||||
|
email: { [id: string]: Entity | undefined },
|
||||||
|
}
|
||||||
|
|
||||||
|
constructor(authdata?: Accounts) {
|
||||||
|
this.accountsBy = {
|
||||||
|
canId: {},
|
||||||
|
accessKey: {},
|
||||||
|
email: {},
|
||||||
|
};
|
||||||
|
|
||||||
|
/*
|
||||||
|
* This may happen if the application is configured to use another
|
||||||
|
* authentication backend than in-memory.
|
||||||
|
* As such, we're managing the error here to avoid screwing up there.
|
||||||
|
*/
|
||||||
|
if (!authdata) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
this.#build(authdata);
|
||||||
|
}
|
||||||
|
|
||||||
|
#indexAccount(account: Account) {
|
||||||
|
const accountData: Entity = {
|
||||||
|
arn: account.arn,
|
||||||
|
canonicalID: account.canonicalID,
|
||||||
|
shortid: account.shortid,
|
||||||
|
accountDisplayName: account.name,
|
||||||
|
email: account.email.toLowerCase(),
|
||||||
|
keys: [],
|
||||||
|
};
|
||||||
|
this.accountsBy.canId[accountData.canonicalID] = accountData;
|
||||||
|
this.accountsBy.email[accountData.email] = accountData;
|
||||||
|
if (account.keys !== undefined) {
|
||||||
|
account.keys.forEach(key => {
|
||||||
|
accountData.keys.push(key);
|
||||||
|
this.accountsBy.accessKey[key.access] = accountData;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#build(authdata: Accounts) {
|
||||||
|
authdata.accounts.forEach(account => {
|
||||||
|
this.#indexAccount(account);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/** This method returns the account associated to a canonical ID. */
|
||||||
|
getEntityByCanId(canId: string): Entity | undefined {
|
||||||
|
return this.accountsBy.canId[canId];
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This method returns the entity (either an account or a user) associated
|
||||||
|
* to a canonical ID.
|
||||||
|
* @param {string} key - The accessKey of the entity
|
||||||
|
*/
|
||||||
|
getEntityByKey(key: string): Entity | undefined {
|
||||||
|
return this.accountsBy.accessKey[key];
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This method returns the entity (either an account or a user) associated
|
||||||
|
* to an email address.
|
||||||
|
*/
|
||||||
|
getEntityByEmail(email: string): Entity | undefined {
|
||||||
|
const lowerCasedEmail = email.toLowerCase();
|
||||||
|
return this.accountsBy.email[lowerCasedEmail];
|
||||||
|
}
|
||||||
|
|
||||||
|
/** This method returns the secret key associated with the entity. */
|
||||||
|
getSecretKey(entity: Entity, accessKey: string) {
|
||||||
|
const keys = entity.keys.filter(kv => kv.access === accessKey);
|
||||||
|
return keys[0].secret;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** This method returns the account display name associated with the entity. */
|
||||||
|
getAcctDisplayName(entity: Entity) {
|
||||||
|
return entity.accountDisplayName;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,51 @@
|
||||||
|
import joi from 'joi';
|
||||||
|
|
||||||
|
export type Callback<Data = any> = (err: Error | null | undefined, data?: Data) => void;
|
||||||
|
|
||||||
|
export type Key = { access: string; secret: string };
|
||||||
|
export type Base = {
|
||||||
|
arn: string;
|
||||||
|
canonicalID: string;
|
||||||
|
shortid: string;
|
||||||
|
email: string;
|
||||||
|
keys: Key[];
|
||||||
|
};
|
||||||
|
export type Account = Base & { name: string; users: any[] };
|
||||||
|
export type Accounts = { accounts: Account[] };
|
||||||
|
export type Entity = Base & { accountDisplayName: string };
|
||||||
|
|
||||||
|
const keys = ((): joi.ArraySchema => {
|
||||||
|
const str = joi.string().required();
|
||||||
|
const items = { access: str, secret: str };
|
||||||
|
return joi.array().items(items).required();
|
||||||
|
})();
|
||||||
|
|
||||||
|
const account = (() => {
|
||||||
|
return joi.object<Account>({
|
||||||
|
name: joi.string().required(),
|
||||||
|
email: joi.string().email().required(),
|
||||||
|
arn: joi.string().required(),
|
||||||
|
canonicalID: joi.string().required(),
|
||||||
|
shortid: joi
|
||||||
|
.string()
|
||||||
|
.regex(/^[0-9]{12}$/)
|
||||||
|
.required(),
|
||||||
|
keys: keys,
|
||||||
|
// backward-compat
|
||||||
|
users: joi.array(),
|
||||||
|
});
|
||||||
|
})();
|
||||||
|
|
||||||
|
const accounts = (() => {
|
||||||
|
return joi.object<Accounts>({
|
||||||
|
accounts: joi
|
||||||
|
.array()
|
||||||
|
.items(account)
|
||||||
|
.required()
|
||||||
|
.unique('arn')
|
||||||
|
.unique('email')
|
||||||
|
.unique('canonicalID'),
|
||||||
|
});
|
||||||
|
})();
|
||||||
|
|
||||||
|
export const validators = { keys, account, accounts };
|
|
@ -1,18 +0,0 @@
|
||||||
const AuthLoader = require('./AuthLoader');
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @deprecated please use {@link AuthLoader} class instead
|
|
||||||
*
|
|
||||||
* @param {object} authdata - the authentication config file's data
|
|
||||||
* @param {werelogs.API} logApi - object providing a constructor function
|
|
||||||
* for the Logger object
|
|
||||||
* @return {boolean} true on erroneous data
|
|
||||||
* false on success
|
|
||||||
*/
|
|
||||||
function validateAuthConfig(authdata, logApi) {
|
|
||||||
const authLoader = new AuthLoader(logApi);
|
|
||||||
authLoader.addAccounts(authdata);
|
|
||||||
return !authLoader.validate();
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = validateAuthConfig;
|
|
|
@ -0,0 +1,16 @@
|
||||||
|
import { Logger } from 'werelogs';
|
||||||
|
import AuthLoader from './AuthLoader';
|
||||||
|
import { Accounts } from './types';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @deprecated please use {@link AuthLoader} class instead
|
||||||
|
* @return true on erroneous data false on success
|
||||||
|
*/
|
||||||
|
export default function validateAuthConfig(
|
||||||
|
authdata: Accounts,
|
||||||
|
logApi?: { Logger: typeof Logger }
|
||||||
|
) {
|
||||||
|
const authLoader = new AuthLoader(logApi);
|
||||||
|
authLoader.addAccounts(authdata);
|
||||||
|
return !authLoader.validate();
|
||||||
|
}
|
|
@ -1,6 +1,4 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
import * as crypto from 'crypto';
|
||||||
|
|
||||||
const crypto = require('crypto');
|
|
||||||
|
|
||||||
/** hashSignature for v2 Auth
|
/** hashSignature for v2 Auth
|
||||||
* @param {string} stringToSign - built string to sign per AWS rules
|
* @param {string} stringToSign - built string to sign per AWS rules
|
||||||
|
@ -8,11 +6,19 @@ const crypto = require('crypto');
|
||||||
* @param {string} algorithm - either SHA256 or SHA1
|
* @param {string} algorithm - either SHA256 or SHA1
|
||||||
* @return {string} reconstructed signature
|
* @return {string} reconstructed signature
|
||||||
*/
|
*/
|
||||||
function hashSignature(stringToSign, secretKey, algorithm) {
|
export function hashSignature(
|
||||||
|
stringToSign: string,
|
||||||
|
secretKey: string,
|
||||||
|
algorithm: 'SHA256' | 'SHA1'
|
||||||
|
): string {
|
||||||
const hmacObject = crypto.createHmac(algorithm, secretKey);
|
const hmacObject = crypto.createHmac(algorithm, secretKey);
|
||||||
return hmacObject.update(stringToSign, 'binary').digest('base64');
|
return hmacObject.update(stringToSign, 'binary').digest('base64');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const sha256 = (key: string | Buffer, data: string) => {
|
||||||
|
return crypto.createHmac('sha256', key).update(data, 'binary').digest();
|
||||||
|
};
|
||||||
|
|
||||||
/** calculateSigningKey for v4 Auth
|
/** calculateSigningKey for v4 Auth
|
||||||
* @param {string} secretKey - requester's secretKey
|
* @param {string} secretKey - requester's secretKey
|
||||||
* @param {string} region - region included in request
|
* @param {string} region - region included in request
|
||||||
|
@ -20,16 +26,15 @@ function hashSignature(stringToSign, secretKey, algorithm) {
|
||||||
* @param {string} [service] - To specify another service than s3
|
* @param {string} [service] - To specify another service than s3
|
||||||
* @return {string} signingKey - signingKey to calculate signature
|
* @return {string} signingKey - signingKey to calculate signature
|
||||||
*/
|
*/
|
||||||
function calculateSigningKey(secretKey, region, scopeDate, service) {
|
export function calculateSigningKey(
|
||||||
const dateKey = crypto.createHmac('sha256', `AWS4${secretKey}`)
|
secretKey: string,
|
||||||
.update(scopeDate, 'binary').digest();
|
region: string,
|
||||||
const dateRegionKey = crypto.createHmac('sha256', dateKey)
|
scopeDate: string,
|
||||||
.update(region, 'binary').digest();
|
service?: string
|
||||||
const dateRegionServiceKey = crypto.createHmac('sha256', dateRegionKey)
|
): Buffer {
|
||||||
.update(service || 's3', 'binary').digest();
|
const dateKey = sha256(`AWS4${secretKey}`, scopeDate);
|
||||||
const signingKey = crypto.createHmac('sha256', dateRegionServiceKey)
|
const dateRegionKey = sha256(dateKey, region);
|
||||||
.update('aws4_request', 'binary').digest();
|
const dateRegionServiceKey = sha256(dateRegionKey, service || 's3');
|
||||||
|
const signingKey = sha256(dateRegionServiceKey, 'aws4_request');
|
||||||
return signingKey;
|
return signingKey;
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = { hashSignature, calculateSigningKey };
|
|
|
@ -1,7 +1,5 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
export default function algoCheck(signatureLength: number) {
|
||||||
|
let algo: 'sha256' | 'sha1';
|
||||||
function algoCheck(signatureLength) {
|
|
||||||
let algo;
|
|
||||||
// If the signature sent is 44 characters,
|
// If the signature sent is 44 characters,
|
||||||
// this means that sha256 was used:
|
// this means that sha256 was used:
|
||||||
// 44 characters in base64
|
// 44 characters in base64
|
||||||
|
@ -13,7 +11,6 @@ function algoCheck(signatureLength) {
|
||||||
if (signatureLength === SHA1LEN) {
|
if (signatureLength === SHA1LEN) {
|
||||||
algo = 'sha1';
|
algo = 'sha1';
|
||||||
}
|
}
|
||||||
|
// @ts-ignore
|
||||||
return algo;
|
return algo;
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = algoCheck;
|
|
|
@ -1,11 +0,0 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
|
||||||
|
|
||||||
const headerAuthCheck = require('./headerAuthCheck');
|
|
||||||
const queryAuthCheck = require('./queryAuthCheck');
|
|
||||||
|
|
||||||
const authV2 = {
|
|
||||||
header: headerAuthCheck,
|
|
||||||
query: queryAuthCheck,
|
|
||||||
};
|
|
||||||
|
|
||||||
module.exports = authV2;
|
|
|
@ -0,0 +1,2 @@
|
||||||
|
export * as header from './headerAuthCheck';
|
||||||
|
export * as query from './queryAuthCheck';
|
|
@ -1,9 +1,9 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
import { Logger } from 'werelogs';
|
||||||
const errors = require('../../errors');
|
import errors from '../../errors';
|
||||||
|
|
||||||
const epochTime = new Date('1970-01-01').getTime();
|
const epochTime = new Date('1970-01-01').getTime();
|
||||||
|
|
||||||
function checkRequestExpiry(timestamp, log) {
|
export default function checkRequestExpiry(timestamp: number, log: Logger) {
|
||||||
// If timestamp is before epochTime, the request is invalid and return
|
// If timestamp is before epochTime, the request is invalid and return
|
||||||
// errors.AccessDenied
|
// errors.AccessDenied
|
||||||
if (timestamp < epochTime) {
|
if (timestamp < epochTime) {
|
||||||
|
@ -17,7 +17,7 @@ function checkRequestExpiry(timestamp, log) {
|
||||||
log.trace('request timestamp', { requestTimestamp: timestamp });
|
log.trace('request timestamp', { requestTimestamp: timestamp });
|
||||||
log.trace('current timestamp', { currentTimestamp: currentTime });
|
log.trace('current timestamp', { currentTimestamp: currentTime });
|
||||||
|
|
||||||
const fifteenMinutes = (15 * 60 * 1000);
|
const fifteenMinutes = 15 * 60 * 1000;
|
||||||
if (currentTime - timestamp > fifteenMinutes) {
|
if (currentTime - timestamp > fifteenMinutes) {
|
||||||
log.trace('request timestamp is not within 15 minutes of current time');
|
log.trace('request timestamp is not within 15 minutes of current time');
|
||||||
log.debug('request time too skewed', { timestamp });
|
log.debug('request time too skewed', { timestamp });
|
||||||
|
@ -32,5 +32,3 @@ function checkRequestExpiry(timestamp, log) {
|
||||||
|
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = checkRequestExpiry;
|
|
|
@ -1,11 +1,14 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
import { Logger } from 'werelogs';
|
||||||
|
import utf8 from 'utf8';
|
||||||
|
import getCanonicalizedAmzHeaders from './getCanonicalizedAmzHeaders';
|
||||||
|
import getCanonicalizedResource from './getCanonicalizedResource';
|
||||||
|
|
||||||
const utf8 = require('utf8');
|
export default function constructStringToSign(
|
||||||
|
request: any,
|
||||||
const getCanonicalizedAmzHeaders = require('./getCanonicalizedAmzHeaders');
|
data: { [key: string]: string },
|
||||||
const getCanonicalizedResource = require('./getCanonicalizedResource');
|
log: Logger,
|
||||||
|
clientType?: any
|
||||||
function constructStringToSign(request, data, log, clientType) {
|
) {
|
||||||
/*
|
/*
|
||||||
Build signature per AWS requirements:
|
Build signature per AWS requirements:
|
||||||
StringToSign = HTTP-Verb + '\n' +
|
StringToSign = HTTP-Verb + '\n' +
|
||||||
|
@ -23,11 +26,11 @@ function constructStringToSign(request, data, log, clientType) {
|
||||||
|
|
||||||
const contentMD5 = headers['content-md5'] ?
|
const contentMD5 = headers['content-md5'] ?
|
||||||
headers['content-md5'] : query['Content-MD5'];
|
headers['content-md5'] : query['Content-MD5'];
|
||||||
stringToSign += (contentMD5 ? `${contentMD5}\n` : '\n');
|
stringToSign += contentMD5 ? `${contentMD5}\n` : '\n';
|
||||||
|
|
||||||
const contentType = headers['content-type'] ?
|
const contentType = headers['content-type'] ?
|
||||||
headers['content-type'] : query['Content-Type'];
|
headers['content-type'] : query['Content-Type'];
|
||||||
stringToSign += (contentType ? `${contentType}\n` : '\n');
|
stringToSign += contentType ? `${contentType}\n` : '\n';
|
||||||
|
|
||||||
/*
|
/*
|
||||||
AWS docs are conflicting on whether to include x-amz-date header here
|
AWS docs are conflicting on whether to include x-amz-date header here
|
||||||
|
@ -42,5 +45,3 @@ function constructStringToSign(request, data, log, clientType) {
|
||||||
+ getCanonicalizedResource(request, clientType);
|
+ getCanonicalizedResource(request, clientType);
|
||||||
return utf8.encode(stringToSign);
|
return utf8.encode(stringToSign);
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = constructStringToSign;
|
|
|
@ -1,14 +1,12 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
export default function getCanonicalizedAmzHeaders(headers: Headers, clientType: string) {
|
||||||
|
|
||||||
function getCanonicalizedAmzHeaders(headers, clientType) {
|
|
||||||
/*
|
/*
|
||||||
Iterate through headers and pull any headers that are x-amz headers.
|
Iterate through headers and pull any headers that are x-amz headers.
|
||||||
Need to include 'x-amz-date' here even though AWS docs
|
Need to include 'x-amz-date' here even though AWS docs
|
||||||
ambiguous on this.
|
ambiguous on this.
|
||||||
*/
|
*/
|
||||||
const filterFn = clientType === 'GCP' ?
|
const filterFn = clientType === 'GCP' ?
|
||||||
val => val.substr(0, 7) === 'x-goog-' :
|
(val: string) => val.substr(0, 7) === 'x-goog-' :
|
||||||
val => val.substr(0, 6) === 'x-amz-';
|
(val: string) => val.substr(0, 6) === 'x-amz-';
|
||||||
const amzHeaders = Object.keys(headers)
|
const amzHeaders = Object.keys(headers)
|
||||||
.filter(filterFn)
|
.filter(filterFn)
|
||||||
.map(val => [val.trim(), headers[val].trim()]);
|
.map(val => [val.trim(), headers[val].trim()]);
|
||||||
|
@ -43,5 +41,3 @@ function getCanonicalizedAmzHeaders(headers, clientType) {
|
||||||
`${headerStr}${current[0]}:${current[1]}\n`,
|
`${headerStr}${current[0]}:${current[1]}\n`,
|
||||||
'');
|
'');
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = getCanonicalizedAmzHeaders;
|
|
|
@ -1,6 +1,4 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
import * as url from 'url';
|
||||||
|
|
||||||
const url = require('url');
|
|
||||||
|
|
||||||
const gcpSubresources = [
|
const gcpSubresources = [
|
||||||
'acl',
|
'acl',
|
||||||
|
@ -41,7 +39,7 @@ const awsSubresources = [
|
||||||
'website',
|
'website',
|
||||||
];
|
];
|
||||||
|
|
||||||
function getCanonicalizedResource(request, clientType) {
|
export default function getCanonicalizedResource(request: any, clientType: string) {
|
||||||
/*
|
/*
|
||||||
This variable is used to determine whether to insert
|
This variable is used to determine whether to insert
|
||||||
a '?' or '&'. Once a query parameter is added to the resourceString,
|
a '?' or '&'. Once a query parameter is added to the resourceString,
|
||||||
|
@ -117,5 +115,3 @@ function getCanonicalizedResource(request, clientType) {
|
||||||
}
|
}
|
||||||
return resourceString;
|
return resourceString;
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = getCanonicalizedResource;
|
|
|
@ -1,12 +1,11 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
import { Logger } from 'werelogs';
|
||||||
|
import errors from '../../errors';
|
||||||
|
import * as constants from '../../constants';
|
||||||
|
import constructStringToSign from './constructStringToSign';
|
||||||
|
import checkRequestExpiry from './checkRequestExpiry';
|
||||||
|
import algoCheck from './algoCheck';
|
||||||
|
|
||||||
const errors = require('../../errors');
|
export function check(request: any, log: Logger, data: { [key: string]: string }) {
|
||||||
const constants = require('../../constants');
|
|
||||||
const constructStringToSign = require('./constructStringToSign');
|
|
||||||
const checkRequestExpiry = require('./checkRequestExpiry');
|
|
||||||
const algoCheck = require('./algoCheck');
|
|
||||||
|
|
||||||
function check(request, log, data) {
|
|
||||||
log.trace('running header auth check');
|
log.trace('running header auth check');
|
||||||
const headers = request.headers;
|
const headers = request.headers;
|
||||||
|
|
||||||
|
@ -17,15 +16,19 @@ function check(request, log, data) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check to make sure timestamp is within 15 minutes of current time
|
// Check to make sure timestamp is within 15 minutes of current time
|
||||||
let timestamp = headers['x-amz-date'] ?
|
let timestamp = headers['x-amz-date']
|
||||||
headers['x-amz-date'] : headers.date;
|
? headers['x-amz-date']
|
||||||
|
: headers.date;
|
||||||
timestamp = Date.parse(timestamp);
|
timestamp = Date.parse(timestamp);
|
||||||
if (!timestamp) {
|
if (!timestamp) {
|
||||||
log.debug('missing or invalid date header',
|
log.debug('missing or invalid date header', {
|
||||||
{ method: 'auth/v2/headerAuthCheck.check' });
|
method: 'auth/v2/headerAuthCheck.check',
|
||||||
return { err: errors.AccessDenied.
|
});
|
||||||
customizeDescription('Authentication requires a valid Date or ' +
|
return {
|
||||||
'x-amz-date header') };
|
err: errors.AccessDenied.customizeDescription(
|
||||||
|
'Authentication requires a valid Date or ' + 'x-amz-date header'
|
||||||
|
),
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
const err = checkRequestExpiry(timestamp, log);
|
const err = checkRequestExpiry(timestamp, log);
|
||||||
|
@ -46,12 +49,15 @@ function check(request, log, data) {
|
||||||
log.debug('invalid authorization header', { authInfo });
|
log.debug('invalid authorization header', { authInfo });
|
||||||
return { err: errors.InvalidArgument };
|
return { err: errors.InvalidArgument };
|
||||||
}
|
}
|
||||||
const accessKey = semicolonIndex > 4 ?
|
const accessKey =
|
||||||
authInfo.substring(4, semicolonIndex).trim() : undefined;
|
semicolonIndex > 4
|
||||||
|
? authInfo.substring(4, semicolonIndex).trim()
|
||||||
|
: undefined;
|
||||||
if (typeof accessKey !== 'string' || accessKey.length === 0) {
|
if (typeof accessKey !== 'string' || accessKey.length === 0) {
|
||||||
log.trace('invalid authorization header', { authInfo });
|
log.trace('invalid authorization header', { authInfo });
|
||||||
return { err: errors.MissingSecurityHeader };
|
return { err: errors.MissingSecurityHeader };
|
||||||
}
|
}
|
||||||
|
// @ts-ignore
|
||||||
log.addDefaultFields({ accessKey });
|
log.addDefaultFields({ accessKey });
|
||||||
|
|
||||||
const signatureFromRequest = authInfo.substring(semicolonIndex + 1).trim();
|
const signatureFromRequest = authInfo.substring(semicolonIndex + 1).trim();
|
||||||
|
@ -80,5 +86,3 @@ function check(request, log, data) {
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = { check };
|
|
|
@ -1,11 +1,10 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
import { Logger } from 'werelogs';
|
||||||
|
import errors from '../../errors';
|
||||||
|
import * as constants from '../../constants';
|
||||||
|
import algoCheck from './algoCheck';
|
||||||
|
import constructStringToSign from './constructStringToSign';
|
||||||
|
|
||||||
const errors = require('../../errors');
|
export function check(request: any, log: Logger, data: { [key: string]: string }) {
|
||||||
const constants = require('../../constants');
|
|
||||||
const algoCheck = require('./algoCheck');
|
|
||||||
const constructStringToSign = require('./constructStringToSign');
|
|
||||||
|
|
||||||
function check(request, log, data) {
|
|
||||||
log.trace('running query auth check');
|
log.trace('running query auth check');
|
||||||
if (request.method === 'POST') {
|
if (request.method === 'POST') {
|
||||||
log.debug('query string auth not supported for post requests');
|
log.debug('query string auth not supported for post requests');
|
||||||
|
@ -28,29 +27,32 @@ function check(request, log, data) {
|
||||||
*/
|
*/
|
||||||
const expirationTime = parseInt(data.Expires, 10) * 1000;
|
const expirationTime = parseInt(data.Expires, 10) * 1000;
|
||||||
if (Number.isNaN(expirationTime)) {
|
if (Number.isNaN(expirationTime)) {
|
||||||
log.debug('invalid expires parameter',
|
log.debug('invalid expires parameter', { expires: data.Expires });
|
||||||
{ expires: data.Expires });
|
|
||||||
return { err: errors.MissingSecurityHeader };
|
return { err: errors.MissingSecurityHeader };
|
||||||
}
|
}
|
||||||
|
|
||||||
const currentTime = Date.now();
|
const currentTime = Date.now();
|
||||||
|
|
||||||
const preSignedURLExpiry = process.env.PRE_SIGN_URL_EXPIRY
|
const preSignedURLExpiry =
|
||||||
&& !Number.isNaN(process.env.PRE_SIGN_URL_EXPIRY)
|
process.env.PRE_SIGN_URL_EXPIRY &&
|
||||||
|
!Number.isNaN(process.env.PRE_SIGN_URL_EXPIRY)
|
||||||
? Number.parseInt(process.env.PRE_SIGN_URL_EXPIRY, 10)
|
? Number.parseInt(process.env.PRE_SIGN_URL_EXPIRY, 10)
|
||||||
: constants.defaultPreSignedURLExpiry * 1000;
|
: constants.defaultPreSignedURLExpiry * 1000;
|
||||||
|
|
||||||
if (expirationTime > currentTime + preSignedURLExpiry) {
|
if (expirationTime > currentTime + preSignedURLExpiry) {
|
||||||
log.debug('expires parameter too far in future',
|
log.debug('expires parameter too far in future', {
|
||||||
{ expires: request.query.Expires });
|
expires: request.query.Expires,
|
||||||
|
});
|
||||||
return { err: errors.AccessDenied };
|
return { err: errors.AccessDenied };
|
||||||
}
|
}
|
||||||
if (currentTime > expirationTime) {
|
if (currentTime > expirationTime) {
|
||||||
log.debug('current time exceeds expires time',
|
log.debug('current time exceeds expires time', {
|
||||||
{ expires: request.query.Expires });
|
expires: request.query.Expires,
|
||||||
|
});
|
||||||
return { err: errors.RequestTimeTooSkewed };
|
return { err: errors.RequestTimeTooSkewed };
|
||||||
}
|
}
|
||||||
const accessKey = data.AWSAccessKeyId;
|
const accessKey = data.AWSAccessKeyId;
|
||||||
|
// @ts-ignore
|
||||||
log.addDefaultFields({ accessKey });
|
log.addDefaultFields({ accessKey });
|
||||||
|
|
||||||
const signatureFromRequest = decodeURIComponent(data.Signature);
|
const signatureFromRequest = decodeURIComponent(data.Signature);
|
||||||
|
@ -82,5 +84,3 @@ function check(request, log, data) {
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = { check };
|
|
|
@ -1,11 +0,0 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
|
||||||
|
|
||||||
const headerAuthCheck = require('./headerAuthCheck');
|
|
||||||
const queryAuthCheck = require('./queryAuthCheck');
|
|
||||||
|
|
||||||
const authV4 = {
|
|
||||||
header: headerAuthCheck,
|
|
||||||
query: queryAuthCheck,
|
|
||||||
};
|
|
||||||
|
|
||||||
module.exports = authV4;
|
|
|
@ -0,0 +1,2 @@
|
||||||
|
export * as header from './headerAuthCheck';
|
||||||
|
export * as query from './queryAuthCheck';
|
|
@ -1,5 +1,3 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
AWS's URI encoding rules:
|
AWS's URI encoding rules:
|
||||||
URI encode every byte. Uri-Encode() must enforce the following rules:
|
URI encode every byte. Uri-Encode() must enforce the following rules:
|
||||||
|
@ -19,7 +17,7 @@ See http://docs.aws.amazon.com/AmazonS3/latest/API/sig-v4-header-based-auth.html
|
||||||
*/
|
*/
|
||||||
|
|
||||||
// converts utf8 character to hex and pads "%" before every two hex digits
|
// converts utf8 character to hex and pads "%" before every two hex digits
|
||||||
function _toHexUTF8(char) {
|
function _toHexUTF8(char: string) {
|
||||||
const hexRep = Buffer.from(char, 'utf8').toString('hex').toUpperCase();
|
const hexRep = Buffer.from(char, 'utf8').toString('hex').toUpperCase();
|
||||||
let res = '';
|
let res = '';
|
||||||
hexRep.split('').forEach((v, n) => {
|
hexRep.split('').forEach((v, n) => {
|
||||||
|
@ -32,7 +30,11 @@ function _toHexUTF8(char) {
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
function awsURIencode(input, encodeSlash, noEncodeStar) {
|
export default function awsURIencode(
|
||||||
|
input: string,
|
||||||
|
encodeSlash?: boolean,
|
||||||
|
noEncodeStar?: boolean
|
||||||
|
) {
|
||||||
const encSlash = encodeSlash === undefined ? true : encodeSlash;
|
const encSlash = encodeSlash === undefined ? true : encodeSlash;
|
||||||
let encoded = '';
|
let encoded = '';
|
||||||
/**
|
/**
|
||||||
|
@ -44,11 +46,15 @@ function awsURIencode(input, encodeSlash, noEncodeStar) {
|
||||||
}
|
}
|
||||||
for (let i = 0; i < input.length; i++) {
|
for (let i = 0; i < input.length; i++) {
|
||||||
let ch = input.charAt(i);
|
let ch = input.charAt(i);
|
||||||
if ((ch >= 'A' && ch <= 'Z') ||
|
if (
|
||||||
|
(ch >= 'A' && ch <= 'Z') ||
|
||||||
(ch >= 'a' && ch <= 'z') ||
|
(ch >= 'a' && ch <= 'z') ||
|
||||||
(ch >= '0' && ch <= '9') ||
|
(ch >= '0' && ch <= '9') ||
|
||||||
ch === '_' || ch === '-' ||
|
ch === '_' ||
|
||||||
ch === '~' || ch === '.') {
|
ch === '-' ||
|
||||||
|
ch === '~' ||
|
||||||
|
ch === '.'
|
||||||
|
) {
|
||||||
encoded = encoded.concat(ch);
|
encoded = encoded.concat(ch);
|
||||||
} else if (ch === ' ') {
|
} else if (ch === ' ') {
|
||||||
encoded = encoded.concat('%20');
|
encoded = encoded.concat('%20');
|
||||||
|
@ -76,5 +82,3 @@ function awsURIencode(input, encodeSlash, noEncodeStar) {
|
||||||
}
|
}
|
||||||
return encoded;
|
return encoded;
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = awsURIencode;
|
|
|
@ -1,44 +0,0 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
|
||||||
|
|
||||||
const crypto = require('crypto');
|
|
||||||
|
|
||||||
const createCanonicalRequest = require('./createCanonicalRequest');
|
|
||||||
|
|
||||||
/**
|
|
||||||
* constructStringToSign - creates V4 stringToSign
|
|
||||||
* @param {object} params - params object
|
|
||||||
* @returns {string} - stringToSign
|
|
||||||
*/
|
|
||||||
function constructStringToSign(params) {
|
|
||||||
const { request, signedHeaders, payloadChecksum, credentialScope, timestamp,
|
|
||||||
query, log, proxyPath } = params;
|
|
||||||
const path = proxyPath || request.path;
|
|
||||||
|
|
||||||
const canonicalReqResult = createCanonicalRequest({
|
|
||||||
pHttpVerb: request.method,
|
|
||||||
pResource: path,
|
|
||||||
pQuery: query,
|
|
||||||
pHeaders: request.headers,
|
|
||||||
pSignedHeaders: signedHeaders,
|
|
||||||
payloadChecksum,
|
|
||||||
service: params.awsService,
|
|
||||||
});
|
|
||||||
|
|
||||||
if (canonicalReqResult instanceof Error) {
|
|
||||||
if (log) {
|
|
||||||
log.error('error creating canonicalRequest');
|
|
||||||
}
|
|
||||||
return canonicalReqResult;
|
|
||||||
}
|
|
||||||
if (log) {
|
|
||||||
log.debug('constructed canonicalRequest', { canonicalReqResult });
|
|
||||||
}
|
|
||||||
const sha256 = crypto.createHash('sha256');
|
|
||||||
const canonicalHex = sha256.update(canonicalReqResult, 'binary')
|
|
||||||
.digest('hex');
|
|
||||||
const stringToSign = `AWS4-HMAC-SHA256\n${timestamp}\n` +
|
|
||||||
`${credentialScope}\n${canonicalHex}`;
|
|
||||||
return stringToSign;
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = constructStringToSign;
|
|
|
@ -0,0 +1,62 @@
|
||||||
|
import * as crypto from 'crypto';
|
||||||
|
import { Logger } from 'werelogs';
|
||||||
|
import createCanonicalRequest from './createCanonicalRequest';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* constructStringToSign - creates V4 stringToSign
|
||||||
|
* @param {object} params - params object
|
||||||
|
* @returns {string} - stringToSign
|
||||||
|
*/
|
||||||
|
export default function constructStringToSign(params: {
|
||||||
|
request: any;
|
||||||
|
signedHeaders: any;
|
||||||
|
payloadChecksum: any;
|
||||||
|
credentialScope: string;
|
||||||
|
timestamp: string;
|
||||||
|
query: { [key: string]: string };
|
||||||
|
log?: Logger;
|
||||||
|
proxyPath: string;
|
||||||
|
awsService: string;
|
||||||
|
}): string {
|
||||||
|
const {
|
||||||
|
request,
|
||||||
|
signedHeaders,
|
||||||
|
payloadChecksum,
|
||||||
|
credentialScope,
|
||||||
|
timestamp,
|
||||||
|
query,
|
||||||
|
log,
|
||||||
|
proxyPath,
|
||||||
|
} = params;
|
||||||
|
const path = proxyPath || request.path;
|
||||||
|
|
||||||
|
const canonicalReqResult = createCanonicalRequest({
|
||||||
|
pHttpVerb: request.method,
|
||||||
|
pResource: path,
|
||||||
|
pQuery: query,
|
||||||
|
pHeaders: request.headers,
|
||||||
|
pSignedHeaders: signedHeaders,
|
||||||
|
payloadChecksum,
|
||||||
|
service: params.awsService,
|
||||||
|
});
|
||||||
|
|
||||||
|
// TODO Why that line?
|
||||||
|
// @ts-ignore
|
||||||
|
if (canonicalReqResult instanceof Error) {
|
||||||
|
if (log) {
|
||||||
|
log.error('error creating canonicalRequest');
|
||||||
|
}
|
||||||
|
return canonicalReqResult;
|
||||||
|
}
|
||||||
|
if (log) {
|
||||||
|
log.debug('constructed canonicalRequest', { canonicalReqResult });
|
||||||
|
}
|
||||||
|
const sha256 = crypto.createHash('sha256');
|
||||||
|
const canonicalHex = sha256
|
||||||
|
.update(canonicalReqResult, 'binary')
|
||||||
|
.digest('hex');
|
||||||
|
const stringToSign =
|
||||||
|
`AWS4-HMAC-SHA256\n${timestamp}\n` +
|
||||||
|
`${credentialScope}\n${canonicalHex}`;
|
||||||
|
return stringToSign;
|
||||||
|
}
|
|
@ -1,27 +1,33 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
import awsURIencode from './awsURIencode';
|
||||||
|
import * as crypto from 'crypto';
|
||||||
const awsURIencode = require('./awsURIencode');
|
import * as queryString from 'querystring';
|
||||||
const crypto = require('crypto');
|
|
||||||
const queryString = require('querystring');
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* createCanonicalRequest - creates V4 canonical request
|
* createCanonicalRequest - creates V4 canonical request
|
||||||
* @param {object} params - contains pHttpVerb (request type),
|
* @param params - contains pHttpVerb (request type),
|
||||||
* pResource (parsed from URL), pQuery (request query),
|
* pResource (parsed from URL), pQuery (request query),
|
||||||
* pHeaders (request headers), pSignedHeaders (signed headers from request),
|
* pHeaders (request headers), pSignedHeaders (signed headers from request),
|
||||||
* payloadChecksum (from request)
|
* payloadChecksum (from request)
|
||||||
* @returns {string} - canonicalRequest
|
* @returns - canonicalRequest
|
||||||
*/
|
*/
|
||||||
function createCanonicalRequest(params) {
|
export default function createCanonicalRequest(
|
||||||
|
params: {
|
||||||
|
pHttpVerb: string;
|
||||||
|
pResource: string;
|
||||||
|
pQuery: { [key: string]: string };
|
||||||
|
pHeaders: any;
|
||||||
|
pSignedHeaders: any;
|
||||||
|
service: string;
|
||||||
|
payloadChecksum: string;
|
||||||
|
}
|
||||||
|
) {
|
||||||
const pHttpVerb = params.pHttpVerb;
|
const pHttpVerb = params.pHttpVerb;
|
||||||
const pResource = params.pResource;
|
const pResource = params.pResource;
|
||||||
const pQuery = params.pQuery;
|
const pQuery = params.pQuery;
|
||||||
const pHeaders = params.pHeaders;
|
const pHeaders = params.pHeaders;
|
||||||
const pSignedHeaders = params.pSignedHeaders;
|
const pSignedHeaders = params.pSignedHeaders;
|
||||||
const service = params.service;
|
const service = params.service;
|
||||||
|
|
||||||
let payloadChecksum = params.payloadChecksum;
|
let payloadChecksum = params.payloadChecksum;
|
||||||
|
|
||||||
if (!payloadChecksum) {
|
if (!payloadChecksum) {
|
||||||
if (pHttpVerb === 'GET') {
|
if (pHttpVerb === 'GET') {
|
||||||
payloadChecksum = 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b' +
|
payloadChecksum = 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b' +
|
||||||
|
@ -34,7 +40,7 @@ function createCanonicalRequest(params) {
|
||||||
if (/aws-sdk-java\/[0-9.]+/.test(pHeaders['user-agent'])) {
|
if (/aws-sdk-java\/[0-9.]+/.test(pHeaders['user-agent'])) {
|
||||||
notEncodeStar = true;
|
notEncodeStar = true;
|
||||||
}
|
}
|
||||||
let payload = queryString.stringify(pQuery, null, null, {
|
let payload = queryString.stringify(pQuery, undefined, undefined, {
|
||||||
encodeURIComponent: input => awsURIencode(input, true,
|
encodeURIComponent: input => awsURIencode(input, true,
|
||||||
notEncodeStar),
|
notEncodeStar),
|
||||||
});
|
});
|
||||||
|
@ -61,11 +67,11 @@ function createCanonicalRequest(params) {
|
||||||
|
|
||||||
// signed headers
|
// signed headers
|
||||||
const signedHeadersList = pSignedHeaders.split(';');
|
const signedHeadersList = pSignedHeaders.split(';');
|
||||||
signedHeadersList.sort((a, b) => a.localeCompare(b));
|
signedHeadersList.sort((a: any, b: any) => a.localeCompare(b));
|
||||||
const signedHeaders = signedHeadersList.join(';');
|
const signedHeaders = signedHeadersList.join(';');
|
||||||
|
|
||||||
// canonical headers
|
// canonical headers
|
||||||
const canonicalHeadersList = signedHeadersList.map(signedHeader => {
|
const canonicalHeadersList = signedHeadersList.map((signedHeader: any) => {
|
||||||
if (pHeaders[signedHeader] !== undefined) {
|
if (pHeaders[signedHeader] !== undefined) {
|
||||||
const trimmedHeader = pHeaders[signedHeader]
|
const trimmedHeader = pHeaders[signedHeader]
|
||||||
.trim().replace(/\s+/g, ' ');
|
.trim().replace(/\s+/g, ' ');
|
||||||
|
@ -87,5 +93,3 @@ function createCanonicalRequest(params) {
|
||||||
`${signedHeaders}\n${payloadChecksum}`;
|
`${signedHeaders}\n${payloadChecksum}`;
|
||||||
return canonicalRequest;
|
return canonicalRequest;
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = createCanonicalRequest;
|
|
|
@ -1,27 +1,32 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
import { Logger } from 'werelogs';
|
||||||
|
import errors from '../../../lib/errors';
|
||||||
const errors = require('../../../lib/errors');
|
import * as constants from '../../constants';
|
||||||
const constants = require('../../constants');
|
import constructStringToSign from './constructStringToSign';
|
||||||
|
import {
|
||||||
const constructStringToSign = require('./constructStringToSign');
|
checkTimeSkew,
|
||||||
const checkTimeSkew = require('./timeUtils').checkTimeSkew;
|
convertUTCtoISO8601,
|
||||||
const convertUTCtoISO8601 = require('./timeUtils').convertUTCtoISO8601;
|
convertAmzTimeToMs,
|
||||||
const convertAmzTimeToMs = require('./timeUtils').convertAmzTimeToMs;
|
} from './timeUtils';
|
||||||
const extractAuthItems = require('./validateInputs').extractAuthItems;
|
import {
|
||||||
const validateCredentials = require('./validateInputs').validateCredentials;
|
extractAuthItems,
|
||||||
const areSignedHeadersComplete =
|
validateCredentials,
|
||||||
require('./validateInputs').areSignedHeadersComplete;
|
areSignedHeadersComplete,
|
||||||
|
} from './validateInputs';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* V4 header auth check
|
* V4 header auth check
|
||||||
* @param {object} request - HTTP request object
|
* @param request - HTTP request object
|
||||||
* @param {object} log - logging object
|
* @param log - logging object
|
||||||
* @param {object} data - Parameters from queryString parsing or body of
|
* @param data - Parameters from queryString parsing or body of
|
||||||
* POST request
|
* POST request
|
||||||
* @param {string} awsService - Aws service ('iam' or 's3')
|
* @param awsService - Aws service ('iam' or 's3')
|
||||||
* @return {callback} calls callback
|
|
||||||
*/
|
*/
|
||||||
function check(request, log, data, awsService) {
|
export function check(
|
||||||
|
request: any,
|
||||||
|
log: Logger,
|
||||||
|
data: { [key: string]: string },
|
||||||
|
awsService: string
|
||||||
|
) {
|
||||||
log.trace('running header auth check');
|
log.trace('running header auth check');
|
||||||
|
|
||||||
const token = request.headers['x-amz-security-token'];
|
const token = request.headers['x-amz-security-token'];
|
||||||
|
@ -51,8 +56,9 @@ function check(request, log, data, awsService) {
|
||||||
if (payloadChecksum === 'STREAMING-AWS4-HMAC-SHA256-PAYLOAD') {
|
if (payloadChecksum === 'STREAMING-AWS4-HMAC-SHA256-PAYLOAD') {
|
||||||
log.trace('requesting streaming v4 auth');
|
log.trace('requesting streaming v4 auth');
|
||||||
if (request.method !== 'PUT') {
|
if (request.method !== 'PUT') {
|
||||||
log.debug('streaming v4 auth for put only',
|
log.debug('streaming v4 auth for put only', {
|
||||||
{ method: 'auth/v4/headerAuthCheck.check' });
|
method: 'auth/v4/headerAuthCheck.check',
|
||||||
|
});
|
||||||
return { err: errors.InvalidArgument };
|
return { err: errors.InvalidArgument };
|
||||||
}
|
}
|
||||||
if (!request.headers['x-amz-decoded-content-length']) {
|
if (!request.headers['x-amz-decoded-content-length']) {
|
||||||
|
@ -62,24 +68,27 @@ function check(request, log, data, awsService) {
|
||||||
|
|
||||||
log.trace('authorization header from request', { authHeader });
|
log.trace('authorization header from request', { authHeader });
|
||||||
|
|
||||||
const signatureFromRequest = authHeaderItems.signatureFromRequest;
|
const signatureFromRequest = authHeaderItems.signatureFromRequest!;
|
||||||
const credentialsArr = authHeaderItems.credentialsArr;
|
const credentialsArr = authHeaderItems.credentialsArr!;
|
||||||
const signedHeaders = authHeaderItems.signedHeaders;
|
const signedHeaders = authHeaderItems.signedHeaders!;
|
||||||
|
|
||||||
if (!areSignedHeadersComplete(signedHeaders, request.headers)) {
|
if (!areSignedHeadersComplete(signedHeaders, request.headers)) {
|
||||||
log.debug('signedHeaders are incomplete', { signedHeaders });
|
log.debug('signedHeaders are incomplete', { signedHeaders });
|
||||||
return { err: errors.AccessDenied };
|
return { err: errors.AccessDenied };
|
||||||
}
|
}
|
||||||
|
|
||||||
let timestamp;
|
let timestamp: string | undefined;
|
||||||
// check request timestamp
|
// check request timestamp
|
||||||
const xAmzDate = request.headers['x-amz-date'];
|
const xAmzDate = request.headers['x-amz-date'];
|
||||||
if (xAmzDate) {
|
if (xAmzDate) {
|
||||||
const xAmzDateArr = xAmzDate.split('T');
|
const xAmzDateArr = xAmzDate.split('T');
|
||||||
// check that x-amz- date has the correct format and after epochTime
|
// check that x-amz- date has the correct format and after epochTime
|
||||||
if (xAmzDateArr.length === 2 && xAmzDateArr[0].length === 8
|
if (
|
||||||
&& xAmzDateArr[1].length === 7
|
xAmzDateArr.length === 2 &&
|
||||||
&& Number.parseInt(xAmzDateArr[0], 10) > 19700101) {
|
xAmzDateArr[0].length === 8 &&
|
||||||
|
xAmzDateArr[1].length === 7 &&
|
||||||
|
Number.parseInt(xAmzDateArr[0], 10) > 19700101
|
||||||
|
) {
|
||||||
// format of x-amz- date is ISO 8601: YYYYMMDDTHHMMSSZ
|
// format of x-amz- date is ISO 8601: YYYYMMDDTHHMMSSZ
|
||||||
timestamp = request.headers['x-amz-date'];
|
timestamp = request.headers['x-amz-date'];
|
||||||
}
|
}
|
||||||
|
@ -87,18 +96,27 @@ function check(request, log, data, awsService) {
|
||||||
timestamp = convertUTCtoISO8601(request.headers.date);
|
timestamp = convertUTCtoISO8601(request.headers.date);
|
||||||
}
|
}
|
||||||
if (!timestamp) {
|
if (!timestamp) {
|
||||||
log.debug('missing or invalid date header',
|
log.debug('missing or invalid date header', {
|
||||||
{ method: 'auth/v4/headerAuthCheck.check' });
|
method: 'auth/v4/headerAuthCheck.check',
|
||||||
return { err: errors.AccessDenied.
|
});
|
||||||
customizeDescription('Authentication requires a valid Date or ' +
|
return {
|
||||||
'x-amz-date header') };
|
err: errors.AccessDenied.customizeDescription(
|
||||||
|
'Authentication requires a valid Date or ' + 'x-amz-date header'
|
||||||
|
),
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
const validationResult = validateCredentials(credentialsArr, timestamp,
|
const validationResult = validateCredentials(
|
||||||
log);
|
credentialsArr,
|
||||||
|
timestamp,
|
||||||
|
log
|
||||||
|
);
|
||||||
if (validationResult instanceof Error) {
|
if (validationResult instanceof Error) {
|
||||||
log.debug('credentials in improper format', { credentialsArr,
|
log.debug('credentials in improper format', {
|
||||||
timestamp, validationResult });
|
credentialsArr,
|
||||||
|
timestamp,
|
||||||
|
validationResult,
|
||||||
|
});
|
||||||
return { err: validationResult };
|
return { err: validationResult };
|
||||||
}
|
}
|
||||||
// credentialsArr is [accessKey, date, region, aws-service, aws4_request]
|
// credentialsArr is [accessKey, date, region, aws-service, aws4_request]
|
||||||
|
@ -121,20 +139,23 @@ function check(request, log, data, awsService) {
|
||||||
// expiry is as set out in the policy.
|
// expiry is as set out in the policy.
|
||||||
|
|
||||||
// 15 minutes in seconds
|
// 15 minutes in seconds
|
||||||
const expiry = (15 * 60);
|
const expiry = 15 * 60;
|
||||||
const isTimeSkewed = checkTimeSkew(timestamp, expiry, log);
|
const isTimeSkewed = checkTimeSkew(timestamp, expiry, log);
|
||||||
if (isTimeSkewed) {
|
if (isTimeSkewed) {
|
||||||
return { err: errors.RequestTimeTooSkewed };
|
return { err: errors.RequestTimeTooSkewed };
|
||||||
}
|
}
|
||||||
|
|
||||||
let proxyPath = null;
|
let proxyPath: string | null = null;
|
||||||
if (request.headers.proxy_path) {
|
if (request.headers.proxy_path) {
|
||||||
try {
|
try {
|
||||||
proxyPath = decodeURIComponent(request.headers.proxy_path);
|
proxyPath = decodeURIComponent(request.headers.proxy_path);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
log.debug('invalid proxy_path header', { proxyPath, err });
|
log.debug('invalid proxy_path header', { proxyPath, err });
|
||||||
return { err: errors.InvalidArgument.customizeDescription(
|
return {
|
||||||
'invalid proxy_path header') };
|
err: errors.InvalidArgument.customizeDescription(
|
||||||
|
'invalid proxy_path header'
|
||||||
|
),
|
||||||
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -147,14 +168,15 @@ function check(request, log, data, awsService) {
|
||||||
timestamp,
|
timestamp,
|
||||||
payloadChecksum,
|
payloadChecksum,
|
||||||
awsService: service,
|
awsService: service,
|
||||||
proxyPath,
|
proxyPath: proxyPath!,
|
||||||
});
|
});
|
||||||
log.trace('constructed stringToSign', { stringToSign });
|
log.trace('constructed stringToSign', { stringToSign });
|
||||||
|
// TODO Why?
|
||||||
|
// @ts-ignore
|
||||||
if (stringToSign instanceof Error) {
|
if (stringToSign instanceof Error) {
|
||||||
return { err: stringToSign };
|
return { err: stringToSign };
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
err: null,
|
err: null,
|
||||||
params: {
|
params: {
|
||||||
|
@ -178,5 +200,3 @@ function check(request, log, data, awsService) {
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = { check };
|
|
|
@ -1,24 +1,19 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
import { Logger } from 'werelogs';
|
||||||
|
import * as constants from '../../constants';
|
||||||
|
import errors from '../../errors';
|
||||||
|
|
||||||
const constants = require('../../constants');
|
import constructStringToSign from './constructStringToSign';
|
||||||
const errors = require('../../errors');
|
import { checkTimeSkew, convertAmzTimeToMs } from './timeUtils';
|
||||||
|
import { validateCredentials, extractQueryParams } from './validateInputs';
|
||||||
const constructStringToSign = require('./constructStringToSign');
|
import { areSignedHeadersComplete } from './validateInputs';
|
||||||
const checkTimeSkew = require('./timeUtils').checkTimeSkew;
|
|
||||||
const convertAmzTimeToMs = require('./timeUtils').convertAmzTimeToMs;
|
|
||||||
const validateCredentials = require('./validateInputs').validateCredentials;
|
|
||||||
const extractQueryParams = require('./validateInputs').extractQueryParams;
|
|
||||||
const areSignedHeadersComplete =
|
|
||||||
require('./validateInputs').areSignedHeadersComplete;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* V4 query auth check
|
* V4 query auth check
|
||||||
* @param {object} request - HTTP request object
|
* @param request - HTTP request object
|
||||||
* @param {object} log - logging object
|
* @param log - logging object
|
||||||
* @param {object} data - Contain authentification params (GET or POST data)
|
* @param data - Contain authentification params (GET or POST data)
|
||||||
* @return {callback} calls callback
|
|
||||||
*/
|
*/
|
||||||
function check(request, log, data) {
|
export function check(request: any, log: Logger, data: { [key: string]: string }) {
|
||||||
const authParams = extractQueryParams(data, log);
|
const authParams = extractQueryParams(data, log);
|
||||||
|
|
||||||
if (Object.keys(authParams).length !== 5) {
|
if (Object.keys(authParams).length !== 5) {
|
||||||
|
@ -33,22 +28,24 @@ function check(request, log, data) {
|
||||||
return { err: errors.InvalidToken };
|
return { err: errors.InvalidToken };
|
||||||
}
|
}
|
||||||
|
|
||||||
const signedHeaders = authParams.signedHeaders;
|
const signedHeaders = authParams.signedHeaders!;
|
||||||
const signatureFromRequest = authParams.signatureFromRequest;
|
const signatureFromRequest = authParams.signatureFromRequest!;
|
||||||
const timestamp = authParams.timestamp;
|
const timestamp = authParams.timestamp!;
|
||||||
const expiry = authParams.expiry;
|
const expiry = authParams.expiry!;
|
||||||
const credential = authParams.credential;
|
const credential = authParams.credential!;
|
||||||
|
|
||||||
if (!areSignedHeadersComplete(signedHeaders, request.headers)) {
|
if (!areSignedHeadersComplete(signedHeaders, request.headers)) {
|
||||||
log.debug('signedHeaders are incomplete', { signedHeaders });
|
log.debug('signedHeaders are incomplete', { signedHeaders });
|
||||||
return { err: errors.AccessDenied };
|
return { err: errors.AccessDenied };
|
||||||
}
|
}
|
||||||
|
|
||||||
const validationResult = validateCredentials(credential, timestamp,
|
const validationResult = validateCredentials(credential, timestamp, log);
|
||||||
log);
|
|
||||||
if (validationResult instanceof Error) {
|
if (validationResult instanceof Error) {
|
||||||
log.debug('credentials in improper format', { credential,
|
log.debug('credentials in improper format', {
|
||||||
timestamp, validationResult });
|
credential,
|
||||||
|
timestamp,
|
||||||
|
validationResult,
|
||||||
|
});
|
||||||
return { err: validationResult };
|
return { err: validationResult };
|
||||||
}
|
}
|
||||||
const accessKey = credential[0];
|
const accessKey = credential[0];
|
||||||
|
@ -62,14 +59,17 @@ function check(request, log, data) {
|
||||||
return { err: errors.RequestTimeTooSkewed };
|
return { err: errors.RequestTimeTooSkewed };
|
||||||
}
|
}
|
||||||
|
|
||||||
let proxyPath = null;
|
let proxyPath: string | null = null;
|
||||||
if (request.headers.proxy_path) {
|
if (request.headers.proxy_path) {
|
||||||
try {
|
try {
|
||||||
proxyPath = decodeURIComponent(request.headers.proxy_path);
|
proxyPath = decodeURIComponent(request.headers.proxy_path);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
log.debug('invalid proxy_path header', { proxyPath });
|
log.debug('invalid proxy_path header', { proxyPath });
|
||||||
return { err: errors.InvalidArgument.customizeDescription(
|
return {
|
||||||
'invalid proxy_path header') };
|
err: errors.InvalidArgument.customizeDescription(
|
||||||
|
'invalid proxy_path header'
|
||||||
|
),
|
||||||
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -95,11 +95,12 @@ function check(request, log, data) {
|
||||||
signedHeaders,
|
signedHeaders,
|
||||||
payloadChecksum,
|
payloadChecksum,
|
||||||
timestamp,
|
timestamp,
|
||||||
credentialScope:
|
credentialScope: `${scopeDate}/${region}/${service}/${requestType}`,
|
||||||
`${scopeDate}/${region}/${service}/${requestType}`,
|
|
||||||
awsService: service,
|
awsService: service,
|
||||||
proxyPath,
|
proxyPath: proxyPath!,
|
||||||
});
|
});
|
||||||
|
// TODO Why?
|
||||||
|
// @ts-ignore
|
||||||
if (stringToSign instanceof Error) {
|
if (stringToSign instanceof Error) {
|
||||||
return { err: stringToSign };
|
return { err: stringToSign };
|
||||||
}
|
}
|
||||||
|
@ -122,5 +123,3 @@ function check(request, log, data) {
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = { check };
|
|
|
@ -1,17 +1,38 @@
|
||||||
const { Transform } = require('stream');
|
import { Transform } from 'stream';
|
||||||
|
import async from 'async';
|
||||||
const async = require('async');
|
import { Logger } from 'werelogs';
|
||||||
const errors = require('../../../errors');
|
import { Callback } from '../../backends/in_memory/types';
|
||||||
|
import Vault from '../../Vault';
|
||||||
const constructChunkStringToSign = require('./constructChunkStringToSign');
|
import errors from '../../../errors';
|
||||||
|
import constructChunkStringToSign from './constructChunkStringToSign';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This class is designed to handle the chunks sent in a streaming
|
* This class is designed to handle the chunks sent in a streaming
|
||||||
* v4 Auth request
|
* v4 Auth request
|
||||||
*/
|
*/
|
||||||
class V4Transform extends Transform {
|
export default class V4Transform extends Transform {
|
||||||
|
log: Logger;
|
||||||
|
cb: Callback;
|
||||||
|
accessKey: string;
|
||||||
|
region: string;
|
||||||
|
/** Date parsed from headers in ISO8601. */
|
||||||
|
scopeDate: string;
|
||||||
|
/** Date parsed from headers in ISO8601. */
|
||||||
|
timestamp: string;
|
||||||
|
/** Items from auth header, plus the string 'aws4_request' joined with '/': timestamp/region/aws-service/aws4_request */
|
||||||
|
credentialScope: string;
|
||||||
|
lastSignature?: string;
|
||||||
|
currentSignature?: string;
|
||||||
|
haveMetadata: boolean;
|
||||||
|
seekingDataSize: number;
|
||||||
|
currentData?: any;
|
||||||
|
dataCursor: number;
|
||||||
|
currentMetadata: Buffer[];
|
||||||
|
lastPieceDone: boolean;
|
||||||
|
lastChunk: boolean;
|
||||||
|
vault: Vault;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @constructor
|
|
||||||
* @param {object} streamingV4Params - info for chunk authentication
|
* @param {object} streamingV4Params - info for chunk authentication
|
||||||
* @param {string} streamingV4Params.accessKey - requester's accessKey
|
* @param {string} streamingV4Params.accessKey - requester's accessKey
|
||||||
* @param {string} streamingV4Params.signatureFromRequest - signature
|
* @param {string} streamingV4Params.signatureFromRequest - signature
|
||||||
|
@ -27,9 +48,27 @@ class V4Transform extends Transform {
|
||||||
* @param {object} log - logger object
|
* @param {object} log - logger object
|
||||||
* @param {function} cb - callback to api
|
* @param {function} cb - callback to api
|
||||||
*/
|
*/
|
||||||
constructor(streamingV4Params, vault, log, cb) {
|
constructor(
|
||||||
const { accessKey, signatureFromRequest, region, scopeDate, timestamp,
|
streamingV4Params: {
|
||||||
credentialScope } = streamingV4Params;
|
accessKey: string,
|
||||||
|
signatureFromRequest: string,
|
||||||
|
region: string,
|
||||||
|
scopeDate: string,
|
||||||
|
timestamp: string,
|
||||||
|
credentialScope: string
|
||||||
|
},
|
||||||
|
vault: Vault,
|
||||||
|
log: Logger,
|
||||||
|
cb: Callback
|
||||||
|
) {
|
||||||
|
const {
|
||||||
|
accessKey,
|
||||||
|
signatureFromRequest,
|
||||||
|
region,
|
||||||
|
scopeDate,
|
||||||
|
timestamp,
|
||||||
|
credentialScope,
|
||||||
|
} = streamingV4Params;
|
||||||
super({});
|
super({});
|
||||||
this.log = log;
|
this.log = log;
|
||||||
this.cb = cb;
|
this.cb = cb;
|
||||||
|
@ -55,8 +94,8 @@ class V4Transform extends Transform {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This function will parse the metadata portion of the chunk
|
* This function will parse the metadata portion of the chunk
|
||||||
* @param {Buffer} remainingChunk - chunk sent from _transform
|
* @param remainingChunk - chunk sent from _transform
|
||||||
* @return {object} response - if error, will return 'err' key with
|
* @return response - if error, will return 'err' key with
|
||||||
* arsenal error value.
|
* arsenal error value.
|
||||||
* if incomplete metadata, will return 'completeMetadata' key with
|
* if incomplete metadata, will return 'completeMetadata' key with
|
||||||
* value false
|
* value false
|
||||||
|
@ -64,7 +103,7 @@ class V4Transform extends Transform {
|
||||||
* value true and the key 'unparsedChunk' with the remaining chunk without
|
* value true and the key 'unparsedChunk' with the remaining chunk without
|
||||||
* the parsed metadata piece
|
* the parsed metadata piece
|
||||||
*/
|
*/
|
||||||
_parseMetadata(remainingChunk) {
|
_parseMetadata(remainingChunk: Buffer) {
|
||||||
let remainingPlusStoredMetadata = remainingChunk;
|
let remainingPlusStoredMetadata = remainingChunk;
|
||||||
// have metadata pieces so need to add to the front of
|
// have metadata pieces so need to add to the front of
|
||||||
// remainingChunk
|
// remainingChunk
|
||||||
|
@ -79,33 +118,34 @@ class V4Transform extends Transform {
|
||||||
this.currentMetadata.push(remainingPlusStoredMetadata);
|
this.currentMetadata.push(remainingPlusStoredMetadata);
|
||||||
return { completeMetadata: false };
|
return { completeMetadata: false };
|
||||||
}
|
}
|
||||||
let fullMetadata = remainingPlusStoredMetadata.slice(0,
|
let fullMetadata = remainingPlusStoredMetadata.slice(0, lineBreakIndex);
|
||||||
lineBreakIndex);
|
|
||||||
|
|
||||||
// handle extra line break on end of data chunk
|
// handle extra line break on end of data chunk
|
||||||
if (fullMetadata.length === 0) {
|
if (fullMetadata.length === 0) {
|
||||||
const chunkWithoutLeadingLineBreak = remainingPlusStoredMetadata
|
const chunkWithoutLeadingLineBreak =
|
||||||
.slice(2);
|
remainingPlusStoredMetadata.slice(2);
|
||||||
// find second line break
|
// find second line break
|
||||||
lineBreakIndex = chunkWithoutLeadingLineBreak.indexOf('\r\n');
|
lineBreakIndex = chunkWithoutLeadingLineBreak.indexOf('\r\n');
|
||||||
if (lineBreakIndex < 0) {
|
if (lineBreakIndex < 0) {
|
||||||
this.currentMetadata.push(chunkWithoutLeadingLineBreak);
|
this.currentMetadata.push(chunkWithoutLeadingLineBreak);
|
||||||
return { completeMetadata: false };
|
return { completeMetadata: false };
|
||||||
}
|
}
|
||||||
fullMetadata = chunkWithoutLeadingLineBreak.slice(0,
|
fullMetadata = chunkWithoutLeadingLineBreak.slice(
|
||||||
lineBreakIndex);
|
0,
|
||||||
|
lineBreakIndex
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
const splitMeta = fullMetadata.toString().split(';');
|
const splitMeta = fullMetadata.toString().split(';');
|
||||||
this.log.trace('parsed full metadata for chunk', { splitMeta });
|
this.log.trace('parsed full metadata for chunk', { splitMeta });
|
||||||
if (splitMeta.length !== 2) {
|
if (splitMeta.length !== 2) {
|
||||||
this.log.trace('chunk body did not contain correct ' +
|
this.log.trace(
|
||||||
'metadata format');
|
'chunk body did not contain correct ' + 'metadata format'
|
||||||
|
);
|
||||||
return { err: errors.InvalidArgument };
|
return { err: errors.InvalidArgument };
|
||||||
}
|
}
|
||||||
let dataSize = splitMeta[0];
|
|
||||||
// chunk-size is sent in hex
|
// chunk-size is sent in hex
|
||||||
dataSize = Number.parseInt(dataSize, 16);
|
let dataSize = Number.parseInt(splitMeta[0], 16);
|
||||||
if (Number.isNaN(dataSize)) {
|
if (Number.isNaN(dataSize)) {
|
||||||
this.log.trace('chunk body did not contain valid size');
|
this.log.trace('chunk body did not contain valid size');
|
||||||
return { err: errors.InvalidArgument };
|
return { err: errors.InvalidArgument };
|
||||||
|
@ -132,24 +172,28 @@ class V4Transform extends Transform {
|
||||||
completeMetadata: true,
|
completeMetadata: true,
|
||||||
// start slice at lineBreak plus 2 to remove line break at end of
|
// start slice at lineBreak plus 2 to remove line break at end of
|
||||||
// metadata piece since length of '\r\n' is 2
|
// metadata piece since length of '\r\n' is 2
|
||||||
unparsedChunk: remainingPlusStoredMetadata
|
unparsedChunk: remainingPlusStoredMetadata.slice(
|
||||||
.slice(lineBreakIndex + 2),
|
lineBreakIndex + 2
|
||||||
|
),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Build the stringToSign and authenticate the chunk
|
* Build the stringToSign and authenticate the chunk
|
||||||
* @param {Buffer} dataToSend - chunk sent from _transform or null
|
* @param dataToSend - chunk sent from _transform or null
|
||||||
* if last chunk without data
|
* if last chunk without data
|
||||||
* @param {function} done - callback to _transform
|
* @param done - callback to _transform
|
||||||
* @return {function} executes callback with err if applicable
|
* @return executes callback with err if applicable
|
||||||
*/
|
*/
|
||||||
_authenticate(dataToSend, done) {
|
_authenticate(dataToSend: Buffer | null, done: (err?: Error) => void) {
|
||||||
// use prior sig to construct new string to sign
|
// use prior sig to construct new string to sign
|
||||||
const stringToSign = constructChunkStringToSign(this.timestamp,
|
const stringToSign = constructChunkStringToSign(
|
||||||
this.credentialScope, this.lastSignature, dataToSend);
|
this.timestamp,
|
||||||
this.log.trace('constructed chunk string to sign',
|
this.credentialScope,
|
||||||
{ stringToSign });
|
this.lastSignature!,
|
||||||
|
dataToSend
|
||||||
|
);
|
||||||
|
this.log.trace('constructed chunk string to sign', { stringToSign });
|
||||||
// once used prior sig to construct string to sign, reassign
|
// once used prior sig to construct string to sign, reassign
|
||||||
// lastSignature to current signature
|
// lastSignature to current signature
|
||||||
this.lastSignature = this.currentSignature;
|
this.lastSignature = this.currentSignature;
|
||||||
|
@ -165,28 +209,30 @@ class V4Transform extends Transform {
|
||||||
credentialScope: this.credentialScope,
|
credentialScope: this.credentialScope,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
return this.vault.authenticateV4Request(vaultParams, null, err => {
|
return this.vault.authenticateV4Request(vaultParams, null, (err: Error) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
this.log.trace('err from vault on streaming v4 auth',
|
this.log.trace('err from vault on streaming v4 auth', {
|
||||||
{ error: err, paramsSentToVault: vaultParams.data });
|
error: err,
|
||||||
|
paramsSentToVault: vaultParams.data,
|
||||||
|
});
|
||||||
return done(err);
|
return done(err);
|
||||||
}
|
}
|
||||||
return done();
|
return done();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TODO encoding unused. Why?
|
||||||
/**
|
/**
|
||||||
* This function will parse the chunk into metadata and data,
|
* This function will parse the chunk into metadata and data,
|
||||||
* use the metadata to authenticate with vault and send the
|
* use the metadata to authenticate with vault and send the
|
||||||
* data on to be stored if authentication passes
|
* data on to be stored if authentication passes
|
||||||
*
|
*
|
||||||
* @param {Buffer} chunk - chunk from request body
|
* @param chunk - chunk from request body
|
||||||
* @param {string} encoding - Data encoding
|
* @param encoding - Data encoding
|
||||||
* @param {function} callback - Callback(err, justDataChunk, encoding)
|
* @param callback - Callback(err, justDataChunk, encoding)
|
||||||
* @return {function }executes callback with err if applicable
|
* @return executes callback with err if applicable
|
||||||
*/
|
*/
|
||||||
_transform(chunk, encoding, callback) {
|
_transform(chunk: Buffer, _encoding: string, callback: (err?: Error) => void) {
|
||||||
// 'chunk' here is the node streaming chunk
|
// 'chunk' here is the node streaming chunk
|
||||||
// transfer-encoding chunks should be of the format:
|
// transfer-encoding chunks should be of the format:
|
||||||
// string(IntHexBase(chunk-size)) + ";chunk-signature=" +
|
// string(IntHexBase(chunk-size)) + ";chunk-signature=" +
|
||||||
|
@ -195,9 +241,10 @@ class V4Transform extends Transform {
|
||||||
|
|
||||||
if (this.lastPieceDone) {
|
if (this.lastPieceDone) {
|
||||||
const slice = chunk.slice(0, 10);
|
const slice = chunk.slice(0, 10);
|
||||||
this.log.trace('received chunk after end.' +
|
this.log.trace(
|
||||||
'See first 10 bytes of chunk',
|
'received chunk after end.' + 'See first 10 bytes of chunk',
|
||||||
{ chunk: slice.toString() });
|
{ chunk: slice.toString() }
|
||||||
|
);
|
||||||
return callback();
|
return callback();
|
||||||
}
|
}
|
||||||
let unparsedChunk = chunk;
|
let unparsedChunk = chunk;
|
||||||
|
@ -206,10 +253,11 @@ class V4Transform extends Transform {
|
||||||
// test function
|
// test function
|
||||||
() => chunkLeftToEvaluate,
|
() => chunkLeftToEvaluate,
|
||||||
// async function
|
// async function
|
||||||
done => {
|
(done) => {
|
||||||
if (!this.haveMetadata) {
|
if (!this.haveMetadata) {
|
||||||
this.log.trace('do not have metadata so calling ' +
|
this.log.trace(
|
||||||
'_parseMetadata');
|
'do not have metadata so calling ' + '_parseMetadata'
|
||||||
|
);
|
||||||
// need to parse our metadata
|
// need to parse our metadata
|
||||||
const parsedMetadataResults =
|
const parsedMetadataResults =
|
||||||
this._parseMetadata(unparsedChunk);
|
this._parseMetadata(unparsedChunk);
|
||||||
|
@ -223,11 +271,11 @@ class V4Transform extends Transform {
|
||||||
}
|
}
|
||||||
// have metadata so reset unparsedChunk to remaining
|
// have metadata so reset unparsedChunk to remaining
|
||||||
// without metadata piece
|
// without metadata piece
|
||||||
unparsedChunk = parsedMetadataResults.unparsedChunk;
|
unparsedChunk = parsedMetadataResults.unparsedChunk!;
|
||||||
}
|
}
|
||||||
if (this.lastChunk) {
|
if (this.lastChunk) {
|
||||||
this.log.trace('authenticating final chunk with no data');
|
this.log.trace('authenticating final chunk with no data');
|
||||||
return this._authenticate(null, err => {
|
return this._authenticate(null, (err) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
return done(err);
|
return done(err);
|
||||||
}
|
}
|
||||||
|
@ -246,17 +294,18 @@ class V4Transform extends Transform {
|
||||||
}
|
}
|
||||||
// parse just the next data piece without \r\n at the end
|
// parse just the next data piece without \r\n at the end
|
||||||
// (therefore, minus 2)
|
// (therefore, minus 2)
|
||||||
const nextDataPiece =
|
const nextDataPiece = unparsedChunk.slice(
|
||||||
unparsedChunk.slice(0, this.seekingDataSize - 2);
|
0,
|
||||||
|
this.seekingDataSize - 2
|
||||||
|
);
|
||||||
// add parsed data piece to other currentData pieces
|
// add parsed data piece to other currentData pieces
|
||||||
// so that this.currentData is the full data piece
|
// so that this.currentData is the full data piece
|
||||||
nextDataPiece.copy(this.currentData, this.dataCursor);
|
nextDataPiece.copy(this.currentData, this.dataCursor);
|
||||||
return this._authenticate(this.currentData, err => {
|
return this._authenticate(this.currentData, (err) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
return done(err);
|
return done(err);
|
||||||
}
|
}
|
||||||
unparsedChunk =
|
unparsedChunk = unparsedChunk.slice(this.seekingDataSize);
|
||||||
unparsedChunk.slice(this.seekingDataSize);
|
|
||||||
this.push(this.currentData);
|
this.push(this.currentData);
|
||||||
this.haveMetadata = false;
|
this.haveMetadata = false;
|
||||||
this.seekingDataSize = -1;
|
this.seekingDataSize = -1;
|
||||||
|
@ -267,15 +316,13 @@ class V4Transform extends Transform {
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
// final callback
|
// final callback
|
||||||
err => {
|
(err) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
return this.cb(err);
|
return this.cb(err as any);
|
||||||
}
|
}
|
||||||
// get next chunk
|
// get next chunk
|
||||||
return callback();
|
return callback();
|
||||||
},
|
}
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = V4Transform;
|
|
|
@ -1,32 +0,0 @@
|
||||||
const crypto = require('crypto');
|
|
||||||
|
|
||||||
const constants = require('../../../constants');
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Constructs stringToSign for chunk
|
|
||||||
* @param {string} timestamp - date parsed from headers
|
|
||||||
* in ISO 8601 format: YYYYMMDDTHHMMSSZ
|
|
||||||
* @param {string} credentialScope - items from auth
|
|
||||||
* header plus the string 'aws4_request' joined with '/':
|
|
||||||
* timestamp/region/aws-service/aws4_request
|
|
||||||
* @param {string} lastSignature - signature from headers or prior chunk
|
|
||||||
* @param {string} justDataChunk - data portion of chunk
|
|
||||||
* @returns {string} stringToSign
|
|
||||||
*/
|
|
||||||
function constructChunkStringToSign(timestamp,
|
|
||||||
credentialScope, lastSignature, justDataChunk) {
|
|
||||||
let currentChunkHash;
|
|
||||||
// for last chunk, there will be no data, so use emptyStringHash
|
|
||||||
if (!justDataChunk) {
|
|
||||||
currentChunkHash = constants.emptyStringHash;
|
|
||||||
} else {
|
|
||||||
currentChunkHash = crypto.createHash('sha256');
|
|
||||||
currentChunkHash = currentChunkHash
|
|
||||||
.update(justDataChunk, 'binary').digest('hex');
|
|
||||||
}
|
|
||||||
return `AWS4-HMAC-SHA256-PAYLOAD\n${timestamp}\n` +
|
|
||||||
`${credentialScope}\n${lastSignature}\n` +
|
|
||||||
`${constants.emptyStringHash}\n${currentChunkHash}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = constructChunkStringToSign;
|
|
|
@ -0,0 +1,36 @@
|
||||||
|
import * as crypto from 'crypto';
|
||||||
|
import * as constants from '../../../constants';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Constructs stringToSign for chunk
|
||||||
|
* @param timestamp - date parsed from headers in ISO 8601 format: YYYYMMDDTHHMMSSZ
|
||||||
|
* @param credentialScope - items from auth header plus the string
|
||||||
|
* 'aws4_request' joined with '/':
|
||||||
|
* timestamp/region/aws-service/aws4_request
|
||||||
|
* @param lastSignature - signature from headers or prior chunk
|
||||||
|
* @param justDataChunk - data portion of chunk
|
||||||
|
*/
|
||||||
|
export default function constructChunkStringToSign(
|
||||||
|
timestamp: string,
|
||||||
|
credentialScope: string,
|
||||||
|
lastSignature: string,
|
||||||
|
justDataChunk: string | Buffer | null
|
||||||
|
): string {
|
||||||
|
let currentChunkHash: string;
|
||||||
|
// for last chunk, there will be no data, so use emptyStringHash
|
||||||
|
if (!justDataChunk) {
|
||||||
|
currentChunkHash = constants.emptyStringHash;
|
||||||
|
} else {
|
||||||
|
let hash = crypto.createHash('sha256');
|
||||||
|
currentChunkHash = (
|
||||||
|
typeof justDataChunk === 'string'
|
||||||
|
? hash.update(justDataChunk, 'binary')
|
||||||
|
: hash.update(justDataChunk)
|
||||||
|
).digest('hex');
|
||||||
|
}
|
||||||
|
return (
|
||||||
|
`AWS4-HMAC-SHA256-PAYLOAD\n${timestamp}\n` +
|
||||||
|
`${credentialScope}\n${lastSignature}\n` +
|
||||||
|
`${constants.emptyStringHash}\n${currentChunkHash}`
|
||||||
|
);
|
||||||
|
}
|
|
@ -1,12 +1,11 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
import { Logger } from 'werelogs';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Convert timestamp to milliseconds since Unix Epoch
|
* Convert timestamp to milliseconds since Unix Epoch
|
||||||
* @param {string} timestamp of ISO8601Timestamp format without
|
* @param timestamp of ISO8601Timestamp format without
|
||||||
* dashes or colons, e.g. 20160202T220410Z
|
* dashes or colons, e.g. 20160202T220410Z
|
||||||
* @return {number} number of milliseconds since Unix Epoch
|
|
||||||
*/
|
*/
|
||||||
function convertAmzTimeToMs(timestamp) {
|
export function convertAmzTimeToMs(timestamp: string) {
|
||||||
const arr = timestamp.split('');
|
const arr = timestamp.split('');
|
||||||
// Convert to YYYY-MM-DDTHH:mm:ss.sssZ
|
// Convert to YYYY-MM-DDTHH:mm:ss.sssZ
|
||||||
const ISO8601time = `${arr.slice(0, 4).join('')}-${arr[4]}${arr[5]}` +
|
const ISO8601time = `${arr.slice(0, 4).join('')}-${arr[4]}${arr[5]}` +
|
||||||
|
@ -15,13 +14,12 @@ function convertAmzTimeToMs(timestamp) {
|
||||||
return Date.parse(ISO8601time);
|
return Date.parse(ISO8601time);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Convert UTC timestamp to ISO 8601 timestamp
|
* Convert UTC timestamp to ISO 8601 timestamp
|
||||||
* @param {string} timestamp of UTC form: Fri, 10 Feb 2012 21:34:55 GMT
|
* @param timestamp of UTC form: Fri, 10 Feb 2012 21:34:55 GMT
|
||||||
* @return {string} ISO8601 timestamp of form: YYYYMMDDTHHMMSSZ
|
* @return ISO8601 timestamp of form: YYYYMMDDTHHMMSSZ
|
||||||
*/
|
*/
|
||||||
function convertUTCtoISO8601(timestamp) {
|
export function convertUTCtoISO8601(timestamp: string | number) {
|
||||||
// convert to ISO string: YYYY-MM-DDTHH:mm:ss.sssZ.
|
// convert to ISO string: YYYY-MM-DDTHH:mm:ss.sssZ.
|
||||||
const converted = new Date(timestamp).toISOString();
|
const converted = new Date(timestamp).toISOString();
|
||||||
// Remove "-"s and "."s and milliseconds
|
// Remove "-"s and "."s and milliseconds
|
||||||
|
@ -30,13 +28,13 @@ function convertUTCtoISO8601(timestamp) {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Check whether timestamp predates request or is too old
|
* Check whether timestamp predates request or is too old
|
||||||
* @param {string} timestamp of ISO8601Timestamp format without
|
* @param timestamp of ISO8601Timestamp format without
|
||||||
* dashes or colons, e.g. 20160202T220410Z
|
* dashes or colons, e.g. 20160202T220410Z
|
||||||
* @param {number} expiry - number of seconds signature should be valid
|
* @param expiry - number of seconds signature should be valid
|
||||||
* @param {object} log - log for request
|
* @param log - log for request
|
||||||
* @return {boolean} true if there is a time problem
|
* @return true if there is a time problem
|
||||||
*/
|
*/
|
||||||
function checkTimeSkew(timestamp, expiry, log) {
|
export function checkTimeSkew(timestamp: string, expiry: number, log: Logger) {
|
||||||
const currentTime = Date.now();
|
const currentTime = Date.now();
|
||||||
const fifteenMinutes = (15 * 60 * 1000);
|
const fifteenMinutes = (15 * 60 * 1000);
|
||||||
const parsedTimestamp = convertAmzTimeToMs(timestamp);
|
const parsedTimestamp = convertAmzTimeToMs(timestamp);
|
||||||
|
@ -56,5 +54,3 @@ function checkTimeSkew(timestamp, expiry, log) {
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = { convertAmzTimeToMs, convertUTCtoISO8601, checkTimeSkew };
|
|
|
@ -1,17 +1,19 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
import { Logger } from 'werelogs';
|
||||||
|
import errors from '../../../lib/errors';
|
||||||
const errors = require('../../../lib/errors');
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Validate Credentials
|
* Validate Credentials
|
||||||
* @param {array} credentials - contains accessKey, scopeDate,
|
* @param credentials - contains accessKey, scopeDate,
|
||||||
* region, service, requestType
|
* region, service, requestType
|
||||||
* @param {string} timestamp - timestamp from request in
|
* @param timestamp - timestamp from request in
|
||||||
* the format of ISO 8601: YYYYMMDDTHHMMSSZ
|
* the format of ISO 8601: YYYYMMDDTHHMMSSZ
|
||||||
* @param {object} log - logging object
|
* @param log - logging object
|
||||||
* @return {boolean} true if credentials are correct format, false if not
|
|
||||||
*/
|
*/
|
||||||
function validateCredentials(credentials, timestamp, log) {
|
export function validateCredentials(
|
||||||
|
credentials: [string, string, string, string, string],
|
||||||
|
timestamp: string,
|
||||||
|
log: Logger
|
||||||
|
): Error | {} {
|
||||||
if (!Array.isArray(credentials) || credentials.length !== 5) {
|
if (!Array.isArray(credentials) || credentials.length !== 5) {
|
||||||
log.warn('credentials in improper format', { credentials });
|
log.warn('credentials in improper format', { credentials });
|
||||||
return errors.InvalidArgument;
|
return errors.InvalidArgument;
|
||||||
|
@ -37,20 +39,27 @@ function validateCredentials(credentials, timestamp, log) {
|
||||||
// convert timestamp to format of scopeDate YYYYMMDD
|
// convert timestamp to format of scopeDate YYYYMMDD
|
||||||
const timestampDate = timestamp.split('T')[0];
|
const timestampDate = timestamp.split('T')[0];
|
||||||
if (scopeDate.length !== 8 || scopeDate !== timestampDate) {
|
if (scopeDate.length !== 8 || scopeDate !== timestampDate) {
|
||||||
log.warn('scope date must be the same date as the timestamp date',
|
log.warn('scope date must be the same date as the timestamp date', {
|
||||||
{ scopeDate, timestampDate });
|
scopeDate,
|
||||||
|
timestampDate,
|
||||||
|
});
|
||||||
return errors.RequestTimeTooSkewed;
|
return errors.RequestTimeTooSkewed;
|
||||||
}
|
}
|
||||||
if (service !== 's3' && service !== 'iam' && service !== 'ring' &&
|
if (
|
||||||
service !== 'sts') {
|
service !== 's3' &&
|
||||||
|
service !== 'iam' &&
|
||||||
|
service !== 'ring' &&
|
||||||
|
service !== 'sts'
|
||||||
|
) {
|
||||||
log.warn('service in credentials is not one of s3/iam/ring/sts', {
|
log.warn('service in credentials is not one of s3/iam/ring/sts', {
|
||||||
service,
|
service,
|
||||||
});
|
});
|
||||||
return errors.InvalidArgument;
|
return errors.InvalidArgument;
|
||||||
}
|
}
|
||||||
if (requestType !== 'aws4_request') {
|
if (requestType !== 'aws4_request') {
|
||||||
log.warn('requestType contained in params is not aws4_request',
|
log.warn('requestType contained in params is not aws4_request', {
|
||||||
{ requestType });
|
requestType,
|
||||||
|
});
|
||||||
return errors.InvalidArgument;
|
return errors.InvalidArgument;
|
||||||
}
|
}
|
||||||
return {};
|
return {};
|
||||||
|
@ -58,17 +67,27 @@ function validateCredentials(credentials, timestamp, log) {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Extract and validate components from query object
|
* Extract and validate components from query object
|
||||||
* @param {object} queryObj - query object from request
|
* @param queryObj - query object from request
|
||||||
* @param {object} log - logging object
|
* @param log - logging object
|
||||||
* @return {object} object containing extracted query params for authV4
|
* @return object containing extracted query params for authV4
|
||||||
*/
|
*/
|
||||||
function extractQueryParams(queryObj, log) {
|
export function extractQueryParams(
|
||||||
const authParams = {};
|
queryObj: { [key: string]: string | undefined },
|
||||||
|
log: Logger
|
||||||
|
) {
|
||||||
|
const authParams: {
|
||||||
|
signedHeaders?: string;
|
||||||
|
signatureFromRequest?: string;
|
||||||
|
timestamp?: string;
|
||||||
|
expiry?: number;
|
||||||
|
credential?: [string, string, string, string, string];
|
||||||
|
} = {};
|
||||||
|
|
||||||
// Do not need the algorithm sent back
|
// Do not need the algorithm sent back
|
||||||
if (queryObj['X-Amz-Algorithm'] !== 'AWS4-HMAC-SHA256') {
|
if (queryObj['X-Amz-Algorithm'] !== 'AWS4-HMAC-SHA256') {
|
||||||
log.warn('algorithm param incorrect',
|
log.warn('algorithm param incorrect', {
|
||||||
{ algo: queryObj['X-Amz-Algorithm'] });
|
algo: queryObj['X-Amz-Algorithm'],
|
||||||
|
});
|
||||||
return authParams;
|
return authParams;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -81,7 +100,6 @@ function extractQueryParams(queryObj, log) {
|
||||||
return authParams;
|
return authParams;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
const signature = queryObj['X-Amz-Signature'];
|
const signature = queryObj['X-Amz-Signature'];
|
||||||
if (signature && signature.length === 64) {
|
if (signature && signature.length === 64) {
|
||||||
authParams.signatureFromRequest = signature;
|
authParams.signatureFromRequest = signature;
|
||||||
|
@ -94,14 +112,15 @@ function extractQueryParams(queryObj, log) {
|
||||||
if (timestamp && timestamp.length === 16) {
|
if (timestamp && timestamp.length === 16) {
|
||||||
authParams.timestamp = timestamp;
|
authParams.timestamp = timestamp;
|
||||||
} else {
|
} else {
|
||||||
log.warn('missing or invalid timestamp',
|
log.warn('missing or invalid timestamp', {
|
||||||
{ timestamp: queryObj['X-Amz-Date'] });
|
timestamp: queryObj['X-Amz-Date'],
|
||||||
|
});
|
||||||
return authParams;
|
return authParams;
|
||||||
}
|
}
|
||||||
|
|
||||||
const expiry = Number.parseInt(queryObj['X-Amz-Expires'], 10);
|
const expiry = Number.parseInt(queryObj['X-Amz-Expires'] ?? 'nope', 10);
|
||||||
const sevenDays = 604800;
|
const sevenDays = 604800;
|
||||||
if (expiry && (expiry > 0 && expiry <= sevenDays)) {
|
if (expiry && expiry > 0 && expiry <= sevenDays) {
|
||||||
authParams.expiry = expiry;
|
authParams.expiry = expiry;
|
||||||
} else {
|
} else {
|
||||||
log.warn('invalid expiry', { expiry });
|
log.warn('invalid expiry', { expiry });
|
||||||
|
@ -110,6 +129,7 @@ function extractQueryParams(queryObj, log) {
|
||||||
|
|
||||||
const credential = queryObj['X-Amz-Credential'];
|
const credential = queryObj['X-Amz-Credential'];
|
||||||
if (credential && credential.length > 28 && credential.indexOf('/') > -1) {
|
if (credential && credential.length > 28 && credential.indexOf('/') > -1) {
|
||||||
|
// @ts-ignore
|
||||||
authParams.credential = credential.split('/');
|
authParams.credential = credential.split('/');
|
||||||
} else {
|
} else {
|
||||||
log.warn('invalid credential param', { credential });
|
log.warn('invalid credential param', { credential });
|
||||||
|
@ -118,17 +138,19 @@ function extractQueryParams(queryObj, log) {
|
||||||
return authParams;
|
return authParams;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Extract and validate components from auth header
|
* Extract and validate components from auth header
|
||||||
* @param {string} authHeader - authorization header from request
|
* @param authHeader - authorization header from request
|
||||||
* @param {object} log - logging object
|
* @param log - logging object
|
||||||
* @return {object} object containing extracted auth header items for authV4
|
* @return object containing extracted auth header items for authV4
|
||||||
*/
|
*/
|
||||||
function extractAuthItems(authHeader, log) {
|
export function extractAuthItems(authHeader: string, log: Logger) {
|
||||||
const authItems = {};
|
const authItems: {
|
||||||
const authArray = authHeader
|
credentialsArr?: [string, string, string, string, string];
|
||||||
.replace('AWS4-HMAC-SHA256 ', '').split(',');
|
signedHeaders?: string;
|
||||||
|
signatureFromRequest?: string;
|
||||||
|
} = {};
|
||||||
|
const authArray = authHeader.replace('AWS4-HMAC-SHA256 ', '').split(',');
|
||||||
|
|
||||||
if (authArray.length < 3) {
|
if (authArray.length < 3) {
|
||||||
return authItems;
|
return authItems;
|
||||||
|
@ -138,25 +160,35 @@ function extractAuthItems(authHeader, log) {
|
||||||
const signedHeadersStr = authArray[1];
|
const signedHeadersStr = authArray[1];
|
||||||
const signatureStr = authArray[2];
|
const signatureStr = authArray[2];
|
||||||
log.trace('credentials from request', { credentialStr });
|
log.trace('credentials from request', { credentialStr });
|
||||||
if (credentialStr && credentialStr.trim().startsWith('Credential=')
|
if (
|
||||||
&& credentialStr.indexOf('/') > -1) {
|
credentialStr &&
|
||||||
|
credentialStr.trim().startsWith('Credential=') &&
|
||||||
|
credentialStr.indexOf('/') > -1
|
||||||
|
) {
|
||||||
|
// @ts-ignore
|
||||||
authItems.credentialsArr = credentialStr
|
authItems.credentialsArr = credentialStr
|
||||||
.trim().replace('Credential=', '').split('/');
|
.trim()
|
||||||
|
.replace('Credential=', '')
|
||||||
|
.split('/');
|
||||||
} else {
|
} else {
|
||||||
log.warn('missing credentials');
|
log.warn('missing credentials');
|
||||||
}
|
}
|
||||||
log.trace('signed headers from request', { signedHeadersStr });
|
log.trace('signed headers from request', { signedHeadersStr });
|
||||||
if (signedHeadersStr && signedHeadersStr.trim()
|
if (
|
||||||
.startsWith('SignedHeaders=')) {
|
signedHeadersStr &&
|
||||||
|
signedHeadersStr.trim().startsWith('SignedHeaders=')
|
||||||
|
) {
|
||||||
authItems.signedHeaders = signedHeadersStr
|
authItems.signedHeaders = signedHeadersStr
|
||||||
.trim().replace('SignedHeaders=', '');
|
.trim()
|
||||||
|
.replace('SignedHeaders=', '');
|
||||||
} else {
|
} else {
|
||||||
log.warn('missing signed headers');
|
log.warn('missing signed headers');
|
||||||
}
|
}
|
||||||
log.trace('signature from request', { signatureStr });
|
log.trace('signature from request', { signatureStr });
|
||||||
if (signatureStr && signatureStr.trim().startsWith('Signature=')) {
|
if (signatureStr && signatureStr.trim().startsWith('Signature=')) {
|
||||||
authItems.signatureFromRequest = signatureStr
|
authItems.signatureFromRequest = signatureStr
|
||||||
.trim().replace('Signature=', '');
|
.trim()
|
||||||
|
.replace('Signature=', '');
|
||||||
} else {
|
} else {
|
||||||
log.warn('missing signature');
|
log.warn('missing signature');
|
||||||
}
|
}
|
||||||
|
@ -166,25 +198,24 @@ function extractAuthItems(authHeader, log) {
|
||||||
/**
|
/**
|
||||||
* Checks whether the signed headers include the host header
|
* Checks whether the signed headers include the host header
|
||||||
* and all x-amz- and x-scal- headers in request
|
* and all x-amz- and x-scal- headers in request
|
||||||
* @param {string} signedHeaders - signed headers sent with request
|
* @param signedHeaders - signed headers sent with request
|
||||||
* @param {object} allHeaders - request.headers
|
* @param allHeaders - request.headers
|
||||||
* @return {boolean} true if all x-amz-headers included and false if not
|
* @return true if all x-amz-headers included and false if not
|
||||||
*/
|
*/
|
||||||
function areSignedHeadersComplete(signedHeaders, allHeaders) {
|
export function areSignedHeadersComplete(signedHeaders: string, allHeaders: Headers) {
|
||||||
const signedHeadersList = signedHeaders.split(';');
|
const signedHeadersList = signedHeaders.split(';');
|
||||||
if (signedHeadersList.indexOf('host') === -1) {
|
if (signedHeadersList.indexOf('host') === -1) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
const headers = Object.keys(allHeaders);
|
const headers = Object.keys(allHeaders);
|
||||||
for (let i = 0; i < headers.length; i++) {
|
for (let i = 0; i < headers.length; i++) {
|
||||||
if ((headers[i].startsWith('x-amz-')
|
if (
|
||||||
|| headers[i].startsWith('x-scal-'))
|
(headers[i].startsWith('x-amz-') ||
|
||||||
&& signedHeadersList.indexOf(headers[i]) === -1) {
|
headers[i].startsWith('x-scal-')) &&
|
||||||
|
signedHeadersList.indexOf(headers[i]) === -1
|
||||||
|
) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = { validateCredentials, extractQueryParams,
|
|
||||||
areSignedHeadersComplete, extractAuthItems };
|
|
151
lib/constants.js
151
lib/constants.js
|
@ -1,151 +0,0 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
|
||||||
const crypto = require('crypto');
|
|
||||||
|
|
||||||
// The min value here is to manage further backward compat if we
|
|
||||||
// need it
|
|
||||||
// Default value
|
|
||||||
const vaultGeneratedIamSecurityTokenSizeMin = 128;
|
|
||||||
// Safe to assume that a typical token size is less than 8192 bytes
|
|
||||||
const vaultGeneratedIamSecurityTokenSizeMax = 8192;
|
|
||||||
// Base-64
|
|
||||||
const vaultGeneratedIamSecurityTokenPattern = /^[A-Za-z0-9/+=]*$/;
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
// info about the iam security token
|
|
||||||
iamSecurityToken: {
|
|
||||||
min: vaultGeneratedIamSecurityTokenSizeMin,
|
|
||||||
max: vaultGeneratedIamSecurityTokenSizeMax,
|
|
||||||
pattern: vaultGeneratedIamSecurityTokenPattern,
|
|
||||||
},
|
|
||||||
// PublicId is used as the canonicalID for a request that contains
|
|
||||||
// no authentication information. Requestor can access
|
|
||||||
// only public resources
|
|
||||||
publicId: 'http://acs.amazonaws.com/groups/global/AllUsers',
|
|
||||||
zenkoServiceAccount: 'http://acs.zenko.io/accounts/service',
|
|
||||||
metadataFileNamespace: '/MDFile',
|
|
||||||
dataFileURL: '/DataFile',
|
|
||||||
passthroughFileURL: '/PassthroughFile',
|
|
||||||
// AWS states max size for user-defined metadata
|
|
||||||
// (x-amz-meta- headers) is 2 KB:
|
|
||||||
// http://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectPUT.html
|
|
||||||
// In testing, AWS seems to allow up to 88 more bytes,
|
|
||||||
// so we do the same.
|
|
||||||
maximumMetaHeadersSize: 2136,
|
|
||||||
emptyFileMd5: 'd41d8cd98f00b204e9800998ecf8427e',
|
|
||||||
// Version 2 changes the format of the data location property
|
|
||||||
// Version 3 adds the dataStoreName attribute
|
|
||||||
// Version 4 add the Creation-Time and Content-Language attributes,
|
|
||||||
// and add support for x-ms-meta-* headers in UserMetadata
|
|
||||||
// Version 5 adds the azureInfo structure
|
|
||||||
mdModelVersion: 5,
|
|
||||||
/*
|
|
||||||
* Splitter is used to build the object name for the overview of a
|
|
||||||
* multipart upload and to build the object names for each part of a
|
|
||||||
* multipart upload. These objects with large names are then stored in
|
|
||||||
* metadata in a "shadow bucket" to a real bucket. The shadow bucket
|
|
||||||
* contains all ongoing multipart uploads. We include in the object
|
|
||||||
* name some of the info we might need to pull about an open multipart
|
|
||||||
* upload or about an individual part with each piece of info separated
|
|
||||||
* by the splitter. We can then extract each piece of info by splitting
|
|
||||||
* the object name string with this splitter.
|
|
||||||
* For instance, assuming a splitter of '...!*!',
|
|
||||||
* the name of the upload overview would be:
|
|
||||||
* overview...!*!objectKey...!*!uploadId
|
|
||||||
* For instance, the name of a part would be:
|
|
||||||
* uploadId...!*!partNumber
|
|
||||||
*
|
|
||||||
* The sequence of characters used in the splitter should not occur
|
|
||||||
* elsewhere in the pieces of info to avoid splitting where not
|
|
||||||
* intended.
|
|
||||||
*
|
|
||||||
* Splitter is also used in adding bucketnames to the
|
|
||||||
* namespacerusersbucket. The object names added to the
|
|
||||||
* namespaceusersbucket are of the form:
|
|
||||||
* canonicalID...!*!bucketname
|
|
||||||
*/
|
|
||||||
|
|
||||||
splitter: '..|..',
|
|
||||||
usersBucket: 'users..bucket',
|
|
||||||
// MPU Bucket Prefix is used to create the name of the shadow
|
|
||||||
// bucket used for multipart uploads. There is one shadow mpu
|
|
||||||
// bucket per bucket and its name is the mpuBucketPrefix followed
|
|
||||||
// by the name of the final destination bucket for the object
|
|
||||||
// once the multipart upload is complete.
|
|
||||||
mpuBucketPrefix: 'mpuShadowBucket',
|
|
||||||
// since aws s3 does not allow capitalized buckets, these may be
|
|
||||||
// used for special internal purposes
|
|
||||||
permittedCapitalizedBuckets: {
|
|
||||||
METADATA: true,
|
|
||||||
},
|
|
||||||
// Setting a lower object key limit to account for:
|
|
||||||
// - Mongo key limit of 1012 bytes
|
|
||||||
// - Version ID in Mongo Key if versioned of 33
|
|
||||||
// - Max bucket name length if bucket match false of 63
|
|
||||||
// - Extra prefix slash for bucket prefix if bucket match of 1
|
|
||||||
objectKeyByteLimit: 915,
|
|
||||||
/* delimiter for location-constraint. The location constraint will be able
|
|
||||||
* to include the ingestion flag
|
|
||||||
*/
|
|
||||||
zenkoSeparator: ':',
|
|
||||||
/* eslint-disable camelcase */
|
|
||||||
externalBackends: { aws_s3: true, azure: true, gcp: true, pfs: true },
|
|
||||||
replicationBackends: { aws_s3: true, azure: true, gcp: true },
|
|
||||||
|
|
||||||
// hex digest of sha256 hash of empty string:
|
|
||||||
emptyStringHash: crypto.createHash('sha256')
|
|
||||||
.update('', 'binary').digest('hex'),
|
|
||||||
mpuMDStoredExternallyBackend: { aws_s3: true, gcp: true },
|
|
||||||
// AWS sets a minimum size limit for parts except for the last part.
|
|
||||||
// http://docs.aws.amazon.com/AmazonS3/latest/API/mpUploadComplete.html
|
|
||||||
minimumAllowedPartSize: 5242880,
|
|
||||||
gcpMaximumAllowedPartCount: 1024,
|
|
||||||
// GCP Object Tagging Prefix
|
|
||||||
gcpTaggingPrefix: 'aws-tag-',
|
|
||||||
productName: 'APN/1.0 Scality/1.0 Scality CloudServer for Zenko',
|
|
||||||
legacyLocations: ['sproxyd', 'legacy'],
|
|
||||||
// healthcheck default call from nginx is every 2 seconds
|
|
||||||
// for external backends, don't call unless at least 1 minute
|
|
||||||
// (60,000 milliseconds) since last call
|
|
||||||
externalBackendHealthCheckInterval: 60000,
|
|
||||||
// some of the available data backends (if called directly rather
|
|
||||||
// than through the multiple backend gateway) need a key provided
|
|
||||||
// as a string as first parameter of the get/delete methods.
|
|
||||||
clientsRequireStringKey: { sproxyd: true, cdmi: true },
|
|
||||||
hasCopyPartBackends: { aws_s3: true, gcp: true },
|
|
||||||
versioningNotImplBackends: { azure: true, gcp: true },
|
|
||||||
// user metadata applied on zenko-created objects
|
|
||||||
zenkoIDHeader: 'x-amz-meta-zenko-instance-id',
|
|
||||||
// Default expiration value of the S3 pre-signed URL duration
|
|
||||||
// 604800 seconds (seven days).
|
|
||||||
defaultPreSignedURLExpiry: 7 * 24 * 60 * 60,
|
|
||||||
// Regex for ISO-8601 formatted date
|
|
||||||
shortIso8601Regex: /\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}Z/,
|
|
||||||
longIso8601Regex: /\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}.\d{3}Z/,
|
|
||||||
supportedNotificationEvents: new Set([
|
|
||||||
's3:ObjectCreated:*',
|
|
||||||
's3:ObjectCreated:Put',
|
|
||||||
's3:ObjectCreated:Copy',
|
|
||||||
's3:ObjectCreated:CompleteMultipartUpload',
|
|
||||||
's3:ObjectRemoved:*',
|
|
||||||
's3:ObjectRemoved:Delete',
|
|
||||||
's3:ObjectRemoved:DeleteMarkerCreated',
|
|
||||||
]),
|
|
||||||
notificationArnPrefix: 'arn:scality:bucketnotif',
|
|
||||||
// HTTP server keep-alive timeout is set to a higher value than
|
|
||||||
// client's free sockets timeout to avoid the risk of triggering
|
|
||||||
// ECONNRESET errors if the server closes the connection at the
|
|
||||||
// exact moment clients attempt to reuse an established connection
|
|
||||||
// for a new request.
|
|
||||||
//
|
|
||||||
// Note: the ability to close inactive connections on the client
|
|
||||||
// after httpClientFreeSocketsTimeout milliseconds requires the
|
|
||||||
// use of "agentkeepalive" module instead of the regular node.js
|
|
||||||
// http.Agent.
|
|
||||||
httpServerKeepAliveTimeout: 60000,
|
|
||||||
httpClientFreeSocketTimeout: 55000,
|
|
||||||
supportedLifecycleRules: [
|
|
||||||
'expiration',
|
|
||||||
'noncurrentVersionExpiration',
|
|
||||||
'abortIncompleteMultipartUpload',
|
|
||||||
],
|
|
||||||
};
|
|
|
@ -0,0 +1,147 @@
|
||||||
|
import * as crypto from 'crypto';
|
||||||
|
|
||||||
|
// The min value here is to manage further backward compat if we need it
|
||||||
|
// Default value
|
||||||
|
const vaultGeneratedIamSecurityTokenSizeMin = 128;
|
||||||
|
// Safe to assume that a typical token size is less than 8192 bytes
|
||||||
|
const vaultGeneratedIamSecurityTokenSizeMax = 8192;
|
||||||
|
// Base-64
|
||||||
|
const vaultGeneratedIamSecurityTokenPattern = /^[A-Za-z0-9/+=]*$/;
|
||||||
|
|
||||||
|
// info about the iam security token
|
||||||
|
export const iamSecurityToken = {
|
||||||
|
min: vaultGeneratedIamSecurityTokenSizeMin,
|
||||||
|
max: vaultGeneratedIamSecurityTokenSizeMax,
|
||||||
|
pattern: vaultGeneratedIamSecurityTokenPattern,
|
||||||
|
};
|
||||||
|
// PublicId is used as the canonicalID for a request that contains
|
||||||
|
// no authentication information. Requestor can access
|
||||||
|
// only public resources
|
||||||
|
export const publicId = 'http://acs.amazonaws.com/groups/global/AllUsers';
|
||||||
|
export const zenkoServiceAccount = 'http://acs.zenko.io/accounts/service';
|
||||||
|
export const metadataFileNamespace = '/MDFile';
|
||||||
|
export const dataFileURL = '/DataFile';
|
||||||
|
export const passthroughFileURL = '/PassthroughFile';
|
||||||
|
// AWS states max size for user-defined metadata
|
||||||
|
// (x-amz-meta- headers) is 2 KB:
|
||||||
|
// http://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectPUT.html
|
||||||
|
// In testing, AWS seems to allow up to 88 more bytes,
|
||||||
|
// so we do the same.
|
||||||
|
export const maximumMetaHeadersSize = 2136;
|
||||||
|
export const emptyFileMd5 = 'd41d8cd98f00b204e9800998ecf8427e';
|
||||||
|
// Version 2 changes the format of the data location property
|
||||||
|
// Version 3 adds the dataStoreName attribute
|
||||||
|
// Version 4 add the Creation-Time and Content-Language attributes,
|
||||||
|
// and add support for x-ms-meta-* headers in UserMetadata
|
||||||
|
// Version 5 adds the azureInfo structure
|
||||||
|
export const mdModelVersion = 5;
|
||||||
|
/*
|
||||||
|
* Splitter is used to build the object name for the overview of a
|
||||||
|
* multipart upload and to build the object names for each part of a
|
||||||
|
* multipart upload. These objects with large names are then stored in
|
||||||
|
* metadata in a "shadow bucket" to a real bucket. The shadow bucket
|
||||||
|
* contains all ongoing multipart uploads. We include in the object
|
||||||
|
* name some of the info we might need to pull about an open multipart
|
||||||
|
* upload or about an individual part with each piece of info separated
|
||||||
|
* by the splitter. We can then extract each piece of info by splitting
|
||||||
|
* the object name string with this splitter.
|
||||||
|
* For instance, assuming a splitter of '...!*!',
|
||||||
|
* the name of the upload overview would be:
|
||||||
|
* overview...!*!objectKey...!*!uploadId
|
||||||
|
* For instance, the name of a part would be:
|
||||||
|
* uploadId...!*!partNumber
|
||||||
|
*
|
||||||
|
* The sequence of characters used in the splitter should not occur
|
||||||
|
* elsewhere in the pieces of info to avoid splitting where not
|
||||||
|
* intended.
|
||||||
|
*
|
||||||
|
* Splitter is also used in adding bucketnames to the
|
||||||
|
* namespacerusersbucket. The object names added to the
|
||||||
|
* namespaceusersbucket are of the form:
|
||||||
|
* canonicalID...!*!bucketname
|
||||||
|
*/
|
||||||
|
|
||||||
|
export const splitter = '..|..';
|
||||||
|
export const usersBucket = 'users..bucket';
|
||||||
|
// MPU Bucket Prefix is used to create the name of the shadow
|
||||||
|
// bucket used for multipart uploads. There is one shadow mpu
|
||||||
|
// bucket per bucket and its name is the mpuBucketPrefix followed
|
||||||
|
// by the name of the final destination bucket for the object
|
||||||
|
// once the multipart upload is complete.
|
||||||
|
export const mpuBucketPrefix = 'mpuShadowBucket';
|
||||||
|
// since aws s3 does not allow capitalized buckets, these may be
|
||||||
|
// used for special internal purposes
|
||||||
|
export const permittedCapitalizedBuckets = {
|
||||||
|
METADATA: true,
|
||||||
|
};
|
||||||
|
// Setting a lower object key limit to account for:
|
||||||
|
// - Mongo key limit of 1012 bytes
|
||||||
|
// - Version ID in Mongo Key if versioned of 33
|
||||||
|
// - Max bucket name length if bucket match false of 63
|
||||||
|
// - Extra prefix slash for bucket prefix if bucket match of 1
|
||||||
|
export const objectKeyByteLimit = 915;
|
||||||
|
/* delimiter for location-constraint. The location constraint will be able
|
||||||
|
* to include the ingestion flag
|
||||||
|
*/
|
||||||
|
export const zenkoSeparator = ':';
|
||||||
|
/* eslint-disable camelcase */
|
||||||
|
export const externalBackends = { aws_s3: true, azure: true, gcp: true, pfs: true };
|
||||||
|
export const replicationBackends = { aws_s3: true, azure: true, gcp: true };
|
||||||
|
|
||||||
|
// hex digest of sha256 hash of empty string:
|
||||||
|
export const emptyStringHash = crypto.createHash('sha256')
|
||||||
|
.update('', 'binary').digest('hex');
|
||||||
|
export const mpuMDStoredExternallyBackend = { aws_s3: true, gcp: true };
|
||||||
|
// AWS sets a minimum size limit for parts except for the last part.
|
||||||
|
// http://docs.aws.amazon.com/AmazonS3/latest/API/mpUploadComplete.html
|
||||||
|
export const minimumAllowedPartSize = 5242880;
|
||||||
|
export const gcpMaximumAllowedPartCount = 1024;
|
||||||
|
// GCP Object Tagging Prefix
|
||||||
|
export const gcpTaggingPrefix = 'aws-tag-';
|
||||||
|
export const productName = 'APN/1.0 Scality/1.0 Scality CloudServer for Zenko';
|
||||||
|
export const legacyLocations = ['sproxyd', 'legacy'];
|
||||||
|
// healthcheck default call from nginx is every 2 seconds
|
||||||
|
// for external backends, don't call unless at least 1 minute
|
||||||
|
// (60,000 milliseconds) since last call
|
||||||
|
export const externalBackendHealthCheckInterval = 60000;
|
||||||
|
// some of the available data backends (if called directly rather
|
||||||
|
// than through the multiple backend gateway) need a key provided
|
||||||
|
// as a string as first parameter of the get/delete methods.
|
||||||
|
export const clientsRequireStringKey = { sproxyd: true, cdmi: true };
|
||||||
|
export const hasCopyPartBackends = { aws_s3: true, gcp: true };
|
||||||
|
export const versioningNotImplBackends = { azure: true, gcp: true };
|
||||||
|
// user metadata applied on zenko-created objects
|
||||||
|
export const zenkoIDHeader = 'x-amz-meta-zenko-instance-id';
|
||||||
|
// Default expiration value of the S3 pre-signed URL duration
|
||||||
|
// 604800 seconds (seven days).
|
||||||
|
export const defaultPreSignedURLExpiry = 7 * 24 * 60 * 60;
|
||||||
|
// Regex for ISO-8601 formatted date
|
||||||
|
export const shortIso8601Regex = /\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}Z/;
|
||||||
|
export const longIso8601Regex = /\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}.\d{3}Z/;
|
||||||
|
export const supportedNotificationEvents = new Set([
|
||||||
|
's3:ObjectCreated:*',
|
||||||
|
's3:ObjectCreated:Put',
|
||||||
|
's3:ObjectCreated:Copy',
|
||||||
|
's3:ObjectCreated:CompleteMultipartUpload',
|
||||||
|
's3:ObjectRemoved:*',
|
||||||
|
's3:ObjectRemoved:Delete',
|
||||||
|
's3:ObjectRemoved:DeleteMarkerCreated',
|
||||||
|
]);
|
||||||
|
export const notificationArnPrefix = 'arn:scality:bucketnotif';
|
||||||
|
// HTTP server keep-alive timeout is set to a higher value than
|
||||||
|
// client's free sockets timeout to avoid the risk of triggering
|
||||||
|
// ECONNRESET errors if the server closes the connection at the
|
||||||
|
// exact moment clients attempt to reuse an established connection
|
||||||
|
// for a new request.
|
||||||
|
//
|
||||||
|
// Note: the ability to close inactive connections on the client
|
||||||
|
// after httpClientFreeSocketsTimeout milliseconds requires the
|
||||||
|
// use of "agentkeepalive" module instead of the regular node.js
|
||||||
|
// http.Agent.
|
||||||
|
export const httpServerKeepAliveTimeout = 60000;
|
||||||
|
export const httpClientFreeSocketTimeout = 55000;
|
||||||
|
export const supportedLifecycleRules = [
|
||||||
|
'expiration',
|
||||||
|
'noncurrentVersionExpiration',
|
||||||
|
'abortIncompleteMultipartUpload',
|
||||||
|
];
|
|
@ -1,4 +1,4 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
import { LevelDB } from 'level';
|
||||||
|
|
||||||
const writeOptions = { sync: true };
|
const writeOptions = { sync: true };
|
||||||
|
|
||||||
|
@ -18,7 +18,7 @@ const writeOptions = { sync: true };
|
||||||
* @param {String} message - the Error message.
|
* @param {String} message - the Error message.
|
||||||
* @returns {Error} the Error object.
|
* @returns {Error} the Error object.
|
||||||
*/
|
*/
|
||||||
function propError(propName, message) {
|
function propError(propName: string, message: string): Error {
|
||||||
const err = new Error(message);
|
const err = new Error(message);
|
||||||
err[propName] = true;
|
err[propName] = true;
|
||||||
return err;
|
return err;
|
||||||
|
@ -27,7 +27,7 @@ function propError(propName, message) {
|
||||||
/**
|
/**
|
||||||
* Running transaction with multiple updates to be committed atomically
|
* Running transaction with multiple updates to be committed atomically
|
||||||
*/
|
*/
|
||||||
class IndexTransaction {
|
export class IndexTransaction {
|
||||||
/**
|
/**
|
||||||
* Builds a new transaction
|
* Builds a new transaction
|
||||||
*
|
*
|
||||||
|
@ -36,7 +36,7 @@ class IndexTransaction {
|
||||||
*
|
*
|
||||||
* @returns {IndexTransaction} a new empty transaction
|
* @returns {IndexTransaction} a new empty transaction
|
||||||
*/
|
*/
|
||||||
constructor(db) {
|
constructor(db: LevelDB) {
|
||||||
this.operations = [];
|
this.operations = [];
|
||||||
this.db = db;
|
this.db = db;
|
||||||
this.closed = false;
|
this.closed = false;
|
||||||
|
@ -63,13 +63,17 @@ class IndexTransaction {
|
||||||
*/
|
*/
|
||||||
push(op) {
|
push(op) {
|
||||||
if (this.closed) {
|
if (this.closed) {
|
||||||
throw propError('pushOnCommittedTransaction',
|
throw propError(
|
||||||
'can not add ops to already committed transaction');
|
'pushOnCommittedTransaction',
|
||||||
|
'can not add ops to already committed transaction'
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (op.type !== 'put' && op.type !== 'del') {
|
if (op.type !== 'put' && op.type !== 'del') {
|
||||||
throw propError('invalidTransactionVerb',
|
throw propError(
|
||||||
`unknown action type: ${op.type}`);
|
'invalidTransactionVerb',
|
||||||
|
`unknown action type: ${op.type}`
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (op.key === undefined) {
|
if (op.key === undefined) {
|
||||||
|
@ -136,14 +140,22 @@ class IndexTransaction {
|
||||||
*/
|
*/
|
||||||
addCondition(condition) {
|
addCondition(condition) {
|
||||||
if (this.closed) {
|
if (this.closed) {
|
||||||
throw propError('pushOnCommittedTransaction',
|
throw propError(
|
||||||
'can not add conditions to already committed transaction');
|
'pushOnCommittedTransaction',
|
||||||
|
'can not add conditions to already committed transaction'
|
||||||
|
);
|
||||||
}
|
}
|
||||||
if (condition === undefined || Object.keys(condition).length === 0) {
|
if (condition === undefined || Object.keys(condition).length === 0) {
|
||||||
throw propError('missingCondition', 'missing condition for conditional put');
|
throw propError(
|
||||||
|
'missingCondition',
|
||||||
|
'missing condition for conditional put'
|
||||||
|
);
|
||||||
}
|
}
|
||||||
if (typeof (condition.notExists) !== 'string') {
|
if (typeof condition.notExists !== 'string') {
|
||||||
throw propError('unsupportedConditionalOperation', 'missing key or supported condition');
|
throw propError(
|
||||||
|
'unsupportedConditionalOperation',
|
||||||
|
'missing key or supported condition'
|
||||||
|
);
|
||||||
}
|
}
|
||||||
this.conditions.push(condition);
|
this.conditions.push(condition);
|
||||||
}
|
}
|
||||||
|
@ -158,13 +170,21 @@ class IndexTransaction {
|
||||||
*/
|
*/
|
||||||
commit(cb) {
|
commit(cb) {
|
||||||
if (this.closed) {
|
if (this.closed) {
|
||||||
return cb(propError('alreadyCommitted',
|
return cb(
|
||||||
'transaction was already committed'));
|
propError(
|
||||||
|
'alreadyCommitted',
|
||||||
|
'transaction was already committed'
|
||||||
|
)
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (this.operations.length === 0) {
|
if (this.operations.length === 0) {
|
||||||
return cb(propError('emptyTransaction',
|
return cb(
|
||||||
'tried to commit an empty transaction'));
|
propError(
|
||||||
|
'emptyTransaction',
|
||||||
|
'tried to commit an empty transaction'
|
||||||
|
)
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
this.closed = true;
|
this.closed = true;
|
||||||
|
@ -176,7 +196,3 @@ class IndexTransaction {
|
||||||
return this.db.batch(this.operations, writeOptions, cb);
|
return this.db.batch(this.operations, writeOptions, cb);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
IndexTransaction,
|
|
||||||
};
|
|
|
@ -1,4 +1,4 @@
|
||||||
function reshapeExceptionError(error) {
|
export function reshapeExceptionError(error) {
|
||||||
const { message, code, stack, name } = error;
|
const { message, code, stack, name } = error;
|
||||||
return {
|
return {
|
||||||
message,
|
message,
|
||||||
|
@ -7,7 +7,3 @@ function reshapeExceptionError(error) {
|
||||||
name,
|
name,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
reshapeExceptionError,
|
|
||||||
};
|
|
|
@ -1,87 +0,0 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
|
||||||
|
|
||||||
/**
|
|
||||||
* ArsenalError
|
|
||||||
*
|
|
||||||
* @extends {Error}
|
|
||||||
*/
|
|
||||||
class ArsenalError extends Error {
|
|
||||||
/**
|
|
||||||
* constructor.
|
|
||||||
*
|
|
||||||
* @param {string} type - Type of error or message
|
|
||||||
* @param {number} code - HTTP status code
|
|
||||||
* @param {string} desc - Verbose description of error
|
|
||||||
*/
|
|
||||||
constructor(type, code, desc) {
|
|
||||||
super(type);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* HTTP status code of error
|
|
||||||
* @type {number}
|
|
||||||
*/
|
|
||||||
this.code = code;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Description of error
|
|
||||||
* @type {string}
|
|
||||||
*/
|
|
||||||
this.description = desc;
|
|
||||||
|
|
||||||
this[type] = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Output the error as a JSON string
|
|
||||||
* @returns {string} Error as JSON string
|
|
||||||
*/
|
|
||||||
toString() {
|
|
||||||
return JSON.stringify({
|
|
||||||
errorType: this.message,
|
|
||||||
errorMessage: this.description,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Write the error in an HTTP response
|
|
||||||
*
|
|
||||||
* @param { http.ServerResponse } res - Response we are responding to
|
|
||||||
* @returns {undefined}
|
|
||||||
*/
|
|
||||||
writeResponse(res) {
|
|
||||||
res.writeHead(this.code);
|
|
||||||
res.end(this.toString());
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* customizeDescription returns a new ArsenalError with a new description
|
|
||||||
* with the same HTTP code and message.
|
|
||||||
*
|
|
||||||
* @param {string} description - New error description
|
|
||||||
* @returns {ArsenalError} New error
|
|
||||||
*/
|
|
||||||
customizeDescription(description) {
|
|
||||||
return new ArsenalError(this.message, this.code, description);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Generate an Errors instances object.
|
|
||||||
*
|
|
||||||
* @returns {Object.<string, ArsenalError>} - object field by arsenalError
|
|
||||||
* instances
|
|
||||||
*/
|
|
||||||
function errorsGen() {
|
|
||||||
const errors = {};
|
|
||||||
const errorsObj = require('../errors/arsenalErrors.json');
|
|
||||||
|
|
||||||
Object.keys(errorsObj)
|
|
||||||
.filter(index => index !== '_comment')
|
|
||||||
.forEach(index => {
|
|
||||||
errors[index] = new ArsenalError(index, errorsObj[index].code,
|
|
||||||
errorsObj[index].description);
|
|
||||||
});
|
|
||||||
return errors;
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = errorsGen();
|
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,94 @@
|
||||||
|
import * as http from 'http';
|
||||||
|
import * as rawErrors from './arsenal-errors';
|
||||||
|
import * as types from './types';
|
||||||
|
|
||||||
|
export * from './types';
|
||||||
|
|
||||||
|
/** Mapping used to determine an error type. */
|
||||||
|
export type Is = { [Name in types.Name]: boolean };
|
||||||
|
/** Mapping of all possible Errors */
|
||||||
|
export type Errors = { [Property in keyof types.Names]: ArsenalError };
|
||||||
|
|
||||||
|
const entries = Object.entries(rawErrors);
|
||||||
|
|
||||||
|
// This contains some metaprog. Be careful.
|
||||||
|
// Proxy can be found on MDN.
|
||||||
|
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Proxy
|
||||||
|
const createIs = (type: types.Name) => {
|
||||||
|
const get = (_: {}, value: string | symbol) => type === value;
|
||||||
|
return new Proxy({}, { get }) as Is;
|
||||||
|
};
|
||||||
|
|
||||||
|
export class ArsenalError extends Error {
|
||||||
|
/** HTTP status code. Example: 401, 403, 500, ... */
|
||||||
|
#code: number;
|
||||||
|
/** Text description of the error. */
|
||||||
|
#description: string;
|
||||||
|
/** Type of the error. Belongs to errors.types. */
|
||||||
|
#type: types.Name;
|
||||||
|
/** Object used to determine the error type.
|
||||||
|
* Example: error.is.InternalError */
|
||||||
|
#is: Is;
|
||||||
|
|
||||||
|
private constructor(type: types.Name, code: number, description: string) {
|
||||||
|
super(type);
|
||||||
|
this.#code = code;
|
||||||
|
this.#description = description;
|
||||||
|
this.#type = type;
|
||||||
|
this.#is = createIs(type);
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Output the error as a JSON string */
|
||||||
|
toString() {
|
||||||
|
const errorType = this.message;
|
||||||
|
const errorMessage = this.#description;
|
||||||
|
return JSON.stringify({ errorType, errorMessage });
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Write the error in an HTTP response */
|
||||||
|
writeResponse(res: http.ServerResponse) {
|
||||||
|
res.writeHead(this.#code);
|
||||||
|
const asStr = this.toString();
|
||||||
|
res.end(asStr);
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Clone the error with a new description.*/
|
||||||
|
customizeDescription(description: string): ArsenalError {
|
||||||
|
const type = this.#type;
|
||||||
|
const code = this.#code;
|
||||||
|
return new ArsenalError(type, code, description);
|
||||||
|
}
|
||||||
|
|
||||||
|
get is() {
|
||||||
|
return this.#is;
|
||||||
|
}
|
||||||
|
|
||||||
|
get code() {
|
||||||
|
return this.#code;
|
||||||
|
}
|
||||||
|
|
||||||
|
get description() {
|
||||||
|
return this.#description;
|
||||||
|
}
|
||||||
|
|
||||||
|
get type() {
|
||||||
|
return this.#type;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Generate all possible errors. An instance is created by default. */
|
||||||
|
static errors() {
|
||||||
|
return entries.reduce((acc, value) => {
|
||||||
|
const name = value[0] as types.Name;
|
||||||
|
const error = value[1];
|
||||||
|
const { code, description } = error;
|
||||||
|
const err = new ArsenalError(name, code, description);
|
||||||
|
return { ...acc, [name]: err };
|
||||||
|
}, {} as Errors);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Mapping of all possible Errors.
|
||||||
|
* Use them with errors[error].customizeDescription for any customization. */
|
||||||
|
const errors = ArsenalError.errors();
|
||||||
|
|
||||||
|
export default errors;
|
|
@ -0,0 +1,11 @@
|
||||||
|
import * as rawErrors from './arsenal-errors';
|
||||||
|
|
||||||
|
const entries = Object.keys(rawErrors).map((v) => [v, v]);
|
||||||
|
|
||||||
|
/** All possible errors. */
|
||||||
|
export type Name = keyof typeof rawErrors
|
||||||
|
/** Object containing all errors. It has the format [Name]: "Name" */
|
||||||
|
export type Names = { [Name_ in Name]: Name_ };
|
||||||
|
|
||||||
|
/** Use types with error.is(types.InternalError) to have nice autocomplete */
|
||||||
|
export const types: Names = Object.fromEntries(entries);
|
|
@ -1,6 +1,4 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
export const ciphers = [
|
||||||
|
|
||||||
const ciphers = [
|
|
||||||
'DHE-RSA-AES128-GCM-SHA256',
|
'DHE-RSA-AES128-GCM-SHA256',
|
||||||
'ECDHE-ECDSA-AES128-GCM-SHA256',
|
'ECDHE-ECDSA-AES128-GCM-SHA256',
|
||||||
'ECDHE-RSA-AES256-GCM-SHA384',
|
'ECDHE-RSA-AES256-GCM-SHA384',
|
||||||
|
@ -28,7 +26,3 @@ const ciphers = [
|
||||||
'!EDH-RSA-DES-CBC3-SHA',
|
'!EDH-RSA-DES-CBC3-SHA',
|
||||||
'!KRB5-DES-CBC3-SHA',
|
'!KRB5-DES-CBC3-SHA',
|
||||||
].join(':');
|
].join(':');
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
ciphers,
|
|
||||||
};
|
|
|
@ -29,16 +29,11 @@ c2CNfUEqyRbJF4pE9ZcdQReT5p/llmyhQdvq6cHH+cKJk63C6DHRVoStsnsUcvKe
|
||||||
bLxKsygK77ttjr61cxLoDJeGd5L5h1CPmwIBAg==
|
bLxKsygK77ttjr61cxLoDJeGd5L5h1CPmwIBAg==
|
||||||
-----END DH PARAMETERS-----
|
-----END DH PARAMETERS-----
|
||||||
*/
|
*/
|
||||||
'use strict'; // eslint-disable-line strict
|
|
||||||
|
|
||||||
const dhparam =
|
export const dhparam =
|
||||||
'MIIBCAKCAQEAh99T77KGNuiY9N6xrCJ3QNv4SFADTa3CD+1VMTAdRJLHUNpglB+i' +
|
'MIIBCAKCAQEAh99T77KGNuiY9N6xrCJ3QNv4SFADTa3CD+1VMTAdRJLHUNpglB+i' +
|
||||||
'AoTYiLDFZgtTCpx0ZZUD+JM3qiCZy0OK5/ZGlVD7sZmxjRtdpVK4qIPtwav8t0J7' +
|
'AoTYiLDFZgtTCpx0ZZUD+JM3qiCZy0OK5/ZGlVD7sZmxjRtdpVK4qIPtwav8t0J7' +
|
||||||
'c2CNfUEqyRbJF4pE9ZcdQReT5p/llmyhQdvq6cHH+cKJk63C6DHRVoStsnsUcvKe' +
|
'c2CNfUEqyRbJF4pE9ZcdQReT5p/llmyhQdvq6cHH+cKJk63C6DHRVoStsnsUcvKe' +
|
||||||
'23PLGZulKg8H3eRBxHamHkmyuEVDtoNhMIoJONsdXSpo5GgcD4EQMM8xb/qsnCxn' +
|
'23PLGZulKg8H3eRBxHamHkmyuEVDtoNhMIoJONsdXSpo5GgcD4EQMM8xb/qsnCxn' +
|
||||||
'6QIGTBvcHskxtlTZOfUPk4XQ6Yb3tQi2TurzkQHLln4U7p/GZs+D+6D3SgSPqr6P' +
|
'6QIGTBvcHskxtlTZOfUPk4XQ6Yb3tQi2TurzkQHLln4U7p/GZs+D+6D3SgSPqr6P' +
|
||||||
'bLxKsygK77ttjr61cxLoDJeGd5L5h1CPmwIBAg==';
|
'bLxKsygK77ttjr61cxLoDJeGd5L5h1CPmwIBAg==';
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
dhparam,
|
|
||||||
};
|
|
|
@ -0,0 +1,2 @@
|
||||||
|
export * as ciphers from './ciphers'
|
||||||
|
export * as dhparam from './dh2048'
|
|
@ -1,6 +1,4 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
import ipaddr from 'ipaddr.js';
|
||||||
|
|
||||||
const ipaddr = require('ipaddr.js');
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* checkIPinRangeOrMatch checks whether a given ip address is in an ip address
|
* checkIPinRangeOrMatch checks whether a given ip address is in an ip address
|
||||||
|
@ -9,7 +7,7 @@ const ipaddr = require('ipaddr.js');
|
||||||
* @param {object} ip - parsed ip address
|
* @param {object} ip - parsed ip address
|
||||||
* @return {boolean} true if in range, false if not
|
* @return {boolean} true if in range, false if not
|
||||||
*/
|
*/
|
||||||
function checkIPinRangeOrMatch(cidr, ip) {
|
export function checkIPinRangeOrMatch(cidr, ip) {
|
||||||
// If there is an exact match of the ip address, no need to check ranges
|
// If there is an exact match of the ip address, no need to check ranges
|
||||||
if (ip.toString() === cidr) {
|
if (ip.toString() === cidr) {
|
||||||
return true;
|
return true;
|
||||||
|
@ -39,7 +37,7 @@ function checkIPinRangeOrMatch(cidr, ip) {
|
||||||
* @param {string} ip - IPV4/IPV6/IPV4-mapped IPV6 address
|
* @param {string} ip - IPV4/IPV6/IPV4-mapped IPV6 address
|
||||||
* @return {object} parsedIp - Object representation of parsed IP
|
* @return {object} parsedIp - Object representation of parsed IP
|
||||||
*/
|
*/
|
||||||
function parseIp(ip) {
|
export function parseIp(ip) {
|
||||||
if (ipaddr.IPv4.isValid(ip)) {
|
if (ipaddr.IPv4.isValid(ip)) {
|
||||||
return ipaddr.parse(ip);
|
return ipaddr.parse(ip);
|
||||||
}
|
}
|
||||||
|
@ -60,7 +58,7 @@ function parseIp(ip) {
|
||||||
* @param {string} ip - IP address
|
* @param {string} ip - IP address
|
||||||
* @return {boolean} - true if there is match or false for no match
|
* @return {boolean} - true if there is match or false for no match
|
||||||
*/
|
*/
|
||||||
function ipMatchCidrList(cidrList, ip) {
|
export function ipMatchCidrList(cidrList, ip) {
|
||||||
const parsedIp = parseIp(ip);
|
const parsedIp = parseIp(ip);
|
||||||
return cidrList.some(item => {
|
return cidrList.some(item => {
|
||||||
let cidr;
|
let cidr;
|
||||||
|
@ -75,9 +73,3 @@ function ipMatchCidrList(cidrList, ip) {
|
||||||
return checkIPinRangeOrMatch(cidr || item, parsedIp);
|
return checkIPinRangeOrMatch(cidr || item, parsedIp);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
checkIPinRangeOrMatch,
|
|
||||||
ipMatchCidrList,
|
|
||||||
parseIp,
|
|
||||||
};
|
|
|
@ -1,6 +1,5 @@
|
||||||
'use strict'; // eslint-disable-line
|
import { debuglog } from 'util';
|
||||||
|
const debug = debuglog('jsutil');
|
||||||
const debug = require('util').debuglog('jsutil');
|
|
||||||
|
|
||||||
// JavaScript utility functions
|
// JavaScript utility functions
|
||||||
|
|
||||||
|
@ -17,7 +16,7 @@ const debug = require('util').debuglog('jsutil');
|
||||||
* @return {function} a callable wrapper mirroring <tt>func</tt> but
|
* @return {function} a callable wrapper mirroring <tt>func</tt> but
|
||||||
* only calls <tt>func</tt> at first invocation.
|
* only calls <tt>func</tt> at first invocation.
|
||||||
*/
|
*/
|
||||||
module.exports.once = function once(func) {
|
export function once(func) {
|
||||||
const state = { called: false, res: undefined };
|
const state = { called: false, res: undefined };
|
||||||
return function wrapper(...args) {
|
return function wrapper(...args) {
|
||||||
if (!state.called) {
|
if (!state.called) {
|
|
@ -1,6 +1,8 @@
|
||||||
const Redis = require('ioredis');
|
import Redis from 'ioredis';
|
||||||
|
|
||||||
|
export default class RedisClient {
|
||||||
|
_client: Redis
|
||||||
|
|
||||||
class RedisClient {
|
|
||||||
/**
|
/**
|
||||||
* @constructor
|
* @constructor
|
||||||
* @param {Object} config - config
|
* @param {Object} config - config
|
||||||
|
@ -11,13 +13,13 @@ class RedisClient {
|
||||||
*/
|
*/
|
||||||
constructor(config, logger) {
|
constructor(config, logger) {
|
||||||
this._client = new Redis(config);
|
this._client = new Redis(config);
|
||||||
this._client.on('error', err =>
|
this._client.on('error', (err) =>
|
||||||
logger.trace('error from redis', {
|
logger.trace('error from redis', {
|
||||||
error: err,
|
error: err,
|
||||||
method: 'RedisClient.constructor',
|
method: 'RedisClient.constructor',
|
||||||
redisHost: config.host,
|
redisHost: config.host,
|
||||||
redisPort: config.port,
|
redisPort: config.port,
|
||||||
}),
|
})
|
||||||
);
|
);
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
@ -29,12 +31,12 @@ class RedisClient {
|
||||||
* @param {callback} cb - callback (error, result)
|
* @param {callback} cb - callback (error, result)
|
||||||
* @return {undefined}
|
* @return {undefined}
|
||||||
*/
|
*/
|
||||||
scan(pattern, count = 10, cb) {
|
scan(pattern: string, count = 10, cb) {
|
||||||
const params = { match: pattern, count };
|
const params = { match: pattern, count };
|
||||||
const keys = [];
|
const keys = [];
|
||||||
|
|
||||||
const stream = this._client.scanStream(params);
|
const stream = this._client.scanStream(params);
|
||||||
stream.on('data', resultKeys => {
|
stream.on('data', (resultKeys) => {
|
||||||
for (let i = 0; i < resultKeys.length; i++) {
|
for (let i = 0; i < resultKeys.length; i++) {
|
||||||
keys.push(resultKeys[i]);
|
keys.push(resultKeys[i]);
|
||||||
}
|
}
|
||||||
|
@ -51,9 +53,12 @@ class RedisClient {
|
||||||
* @param {callback} cb - callback
|
* @param {callback} cb - callback
|
||||||
* @return {undefined}
|
* @return {undefined}
|
||||||
*/
|
*/
|
||||||
incrEx(key, expiry, cb) {
|
incrEx(key: string, expiry: number, cb) {
|
||||||
return this._client
|
return this._client
|
||||||
.multi([['incr', key], ['expire', key, expiry]])
|
.multi([
|
||||||
|
['incr', key],
|
||||||
|
['expire', key, expiry],
|
||||||
|
])
|
||||||
.exec(cb);
|
.exec(cb);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -64,7 +69,7 @@ class RedisClient {
|
||||||
* @param {callback} cb - callback
|
* @param {callback} cb - callback
|
||||||
* @return {undefined}
|
* @return {undefined}
|
||||||
*/
|
*/
|
||||||
incrby(key, amount, cb) {
|
incrby(key: string, amount: number, cb) {
|
||||||
return this._client.incrby(key, amount, cb);
|
return this._client.incrby(key, amount, cb);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -76,9 +81,12 @@ class RedisClient {
|
||||||
* @param {callback} cb - callback
|
* @param {callback} cb - callback
|
||||||
* @return {undefined}
|
* @return {undefined}
|
||||||
*/
|
*/
|
||||||
incrbyEx(key, amount, expiry, cb) {
|
incrbyEx(key: string, amount: number, expiry: number, cb) {
|
||||||
return this._client
|
return this._client
|
||||||
.multi([['incrby', key, amount], ['expire', key, expiry]])
|
.multi([
|
||||||
|
['incrby', key, amount],
|
||||||
|
['expire', key, expiry],
|
||||||
|
])
|
||||||
.exec(cb);
|
.exec(cb);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -89,7 +97,7 @@ class RedisClient {
|
||||||
* @param {callback} cb - callback
|
* @param {callback} cb - callback
|
||||||
* @return {undefined}
|
* @return {undefined}
|
||||||
*/
|
*/
|
||||||
decrby(key, amount, cb) {
|
decrby(key: string, amount: number, cb) {
|
||||||
return this._client.decrby(key, amount, cb);
|
return this._client.decrby(key, amount, cb);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -99,7 +107,7 @@ class RedisClient {
|
||||||
* @param {callback} cb - callback
|
* @param {callback} cb - callback
|
||||||
* @return {undefined}
|
* @return {undefined}
|
||||||
*/
|
*/
|
||||||
get(key, cb) {
|
get(key: string, cb) {
|
||||||
return this._client.get(key, cb);
|
return this._client.get(key, cb);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -111,7 +119,7 @@ class RedisClient {
|
||||||
* If cb response returns 1, key exists.
|
* If cb response returns 1, key exists.
|
||||||
* @return {undefined}
|
* @return {undefined}
|
||||||
*/
|
*/
|
||||||
exists(key, cb) {
|
exists(key: string, cb) {
|
||||||
return this._client.exists(key, cb);
|
return this._client.exists(key, cb);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -121,7 +129,7 @@ class RedisClient {
|
||||||
* @param {callback} cb - callback
|
* @param {callback} cb - callback
|
||||||
* @return {undefined}
|
* @return {undefined}
|
||||||
*/
|
*/
|
||||||
batch(cmds, cb) {
|
batch(cmds: string[], cb) {
|
||||||
return this._client.pipeline(cmds).exec(cb);
|
return this._client.pipeline(cmds).exec(cb);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -134,7 +142,7 @@ class RedisClient {
|
||||||
* @param {callback} cb - callback
|
* @param {callback} cb - callback
|
||||||
* @return {undefined}
|
* @return {undefined}
|
||||||
*/
|
*/
|
||||||
zadd(key, score, value, cb) {
|
zadd(key: string, score: number, value: string, cb) {
|
||||||
return this._client.zadd(key, score, value, cb);
|
return this._client.zadd(key, score, value, cb);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -147,7 +155,7 @@ class RedisClient {
|
||||||
* @param {function} cb - callback
|
* @param {function} cb - callback
|
||||||
* @return {undefined}
|
* @return {undefined}
|
||||||
*/
|
*/
|
||||||
zcard(key, cb) {
|
zcard(key: string, cb) {
|
||||||
return this._client.zcard(key, cb);
|
return this._client.zcard(key, cb);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -161,7 +169,7 @@ class RedisClient {
|
||||||
* @param {function} cb - callback
|
* @param {function} cb - callback
|
||||||
* @return {undefined}
|
* @return {undefined}
|
||||||
*/
|
*/
|
||||||
zscore(key, value, cb) {
|
zscore(key: string, value: string, cb) {
|
||||||
return this._client.zscore(key, value, cb);
|
return this._client.zscore(key, value, cb);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -174,7 +182,7 @@ class RedisClient {
|
||||||
* The cb response returns number of values removed
|
* The cb response returns number of values removed
|
||||||
* @return {undefined}
|
* @return {undefined}
|
||||||
*/
|
*/
|
||||||
zrem(key, value, cb) {
|
zrem(key: string, value: string | any[], cb) {
|
||||||
return this._client.zrem(key, value, cb);
|
return this._client.zrem(key, value, cb);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -186,7 +194,7 @@ class RedisClient {
|
||||||
* @param {function} cb - callback
|
* @param {function} cb - callback
|
||||||
* @return {undefined}
|
* @return {undefined}
|
||||||
*/
|
*/
|
||||||
zrange(key, start, end, cb) {
|
zrange(key: string, start: number, end: number, cb) {
|
||||||
return this._client.zrange(key, start, end, cb);
|
return this._client.zrange(key, start, end, cb);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -200,7 +208,7 @@ class RedisClient {
|
||||||
* @param {function} cb - callback
|
* @param {function} cb - callback
|
||||||
* @return {undefined}
|
* @return {undefined}
|
||||||
*/
|
*/
|
||||||
zrangebyscore(key, min, max, cb) {
|
zrangebyscore(key: string, min: number | string, max: number | string, cb) {
|
||||||
return this._client.zrangebyscore(key, min, max, cb);
|
return this._client.zrangebyscore(key, min, max, cb);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -210,7 +218,7 @@ class RedisClient {
|
||||||
* @param {function} cb - callback
|
* @param {function} cb - callback
|
||||||
* @return {undefined}
|
* @return {undefined}
|
||||||
*/
|
*/
|
||||||
ttl(key, cb) {
|
ttl(key: string, cb) {
|
||||||
return this._client.ttl(key, cb);
|
return this._client.ttl(key, cb);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -226,5 +234,3 @@ class RedisClient {
|
||||||
return this._client.client('list', cb);
|
return this._client.client('list', cb);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = RedisClient;
|
|
|
@ -1,13 +1,18 @@
|
||||||
const async = require('async');
|
import async from 'async';
|
||||||
|
import RedisClient from './RedisClient';
|
||||||
|
|
||||||
|
export default class StatsClient {
|
||||||
|
_redis?: RedisClient;
|
||||||
|
_interval: number;
|
||||||
|
_expiry: number;
|
||||||
|
|
||||||
class StatsClient {
|
|
||||||
/**
|
/**
|
||||||
* @constructor
|
* @constructor
|
||||||
* @param {object} redisClient - RedisClient instance
|
* @param {object} redisClient - RedisClient instance
|
||||||
* @param {number} interval - sampling interval by seconds
|
* @param {number} interval - sampling interval by seconds
|
||||||
* @param {number} expiry - sampling duration by seconds
|
* @param {number} expiry - sampling duration by seconds
|
||||||
*/
|
*/
|
||||||
constructor(redisClient, interval, expiry) {
|
constructor(redisClient: RedisClient, interval: number, expiry: number) {
|
||||||
this._redis = redisClient;
|
this._redis = redisClient;
|
||||||
this._interval = interval;
|
this._interval = interval;
|
||||||
this._expiry = expiry;
|
this._expiry = expiry;
|
||||||
|
@ -24,9 +29,9 @@ class StatsClient {
|
||||||
* @param {object} d - Date instance
|
* @param {object} d - Date instance
|
||||||
* @return {number} timestamp - normalized to the nearest interval
|
* @return {number} timestamp - normalized to the nearest interval
|
||||||
*/
|
*/
|
||||||
_normalizeTimestamp(d) {
|
_normalizeTimestamp(d: Date): number {
|
||||||
const s = d.getSeconds();
|
const s = d.getSeconds();
|
||||||
return d.setSeconds(s - s % this._interval, 0);
|
return d.setSeconds(s - (s % this._interval), 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -34,7 +39,7 @@ class StatsClient {
|
||||||
* @param {object} d - Date instance
|
* @param {object} d - Date instance
|
||||||
* @return {number} timestamp - set to the previous interval
|
* @return {number} timestamp - set to the previous interval
|
||||||
*/
|
*/
|
||||||
_setPrevInterval(d) {
|
_setPrevInterval(d: Date): number {
|
||||||
return d.setSeconds(d.getSeconds() - this._interval);
|
return d.setSeconds(d.getSeconds() - this._interval);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -44,7 +49,7 @@ class StatsClient {
|
||||||
* @param {Date} date - Date instance
|
* @param {Date} date - Date instance
|
||||||
* @return {string} key - key for redis
|
* @return {string} key - key for redis
|
||||||
*/
|
*/
|
||||||
buildKey(name, date) {
|
buildKey(name: string, date: Date): string {
|
||||||
return `${name}:${this._normalizeTimestamp(date)}`;
|
return `${name}:${this._normalizeTimestamp(date)}`;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -54,7 +59,7 @@ class StatsClient {
|
||||||
* @param {array} arr - Date instance
|
* @param {array} arr - Date instance
|
||||||
* @return {string} key - key for redis
|
* @return {string} key - key for redis
|
||||||
*/
|
*/
|
||||||
_getCount(arr) {
|
_getCount(arr: any[]): string {
|
||||||
return arr.reduce((prev, a) => {
|
return arr.reduce((prev, a) => {
|
||||||
let num = parseInt(a[1], 10);
|
let num = parseInt(a[1], 10);
|
||||||
num = Number.isNaN(num) ? 0 : num;
|
num = Number.isNaN(num) ? 0 : num;
|
||||||
|
@ -69,7 +74,7 @@ class StatsClient {
|
||||||
* @param {function} cb - callback
|
* @param {function} cb - callback
|
||||||
* @return {undefined}
|
* @return {undefined}
|
||||||
*/
|
*/
|
||||||
reportNewRequest(id, incr, cb) {
|
reportNewRequest(id: string, incr: number, cb) {
|
||||||
if (!this._redis) {
|
if (!this._redis) {
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
|
@ -81,8 +86,8 @@ class StatsClient {
|
||||||
callback = incr;
|
callback = incr;
|
||||||
amount = 1;
|
amount = 1;
|
||||||
} else {
|
} else {
|
||||||
callback = (cb && typeof cb === 'function') ? cb : this._noop;
|
callback = cb && typeof cb === 'function' ? cb : this._noop;
|
||||||
amount = (typeof incr === 'number') ? incr : 1;
|
amount = typeof incr === 'number' ? incr : 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
const key = this.buildKey(`${id}:requests`, new Date());
|
const key = this.buildKey(`${id}:requests`, new Date());
|
||||||
|
@ -97,7 +102,7 @@ class StatsClient {
|
||||||
* @param {function} [cb] - callback
|
* @param {function} [cb] - callback
|
||||||
* @return {undefined}
|
* @return {undefined}
|
||||||
*/
|
*/
|
||||||
incrementKey(key, incr, cb) {
|
incrementKey(key: string, incr: number, cb) {
|
||||||
const callback = cb || this._noop;
|
const callback = cb || this._noop;
|
||||||
return this._redis.incrby(key, incr, callback);
|
return this._redis.incrby(key, incr, callback);
|
||||||
}
|
}
|
||||||
|
@ -109,7 +114,7 @@ class StatsClient {
|
||||||
* @param {function} [cb] - callback
|
* @param {function} [cb] - callback
|
||||||
* @return {undefined}
|
* @return {undefined}
|
||||||
*/
|
*/
|
||||||
decrementKey(key, decr, cb) {
|
decrementKey(key: string, decr: number, cb) {
|
||||||
const callback = cb || this._noop;
|
const callback = cb || this._noop;
|
||||||
return this._redis.decrby(key, decr, callback);
|
return this._redis.decrby(key, decr, callback);
|
||||||
}
|
}
|
||||||
|
@ -120,7 +125,7 @@ class StatsClient {
|
||||||
* @param {callback} cb - callback
|
* @param {callback} cb - callback
|
||||||
* @return {undefined}
|
* @return {undefined}
|
||||||
*/
|
*/
|
||||||
report500(id, cb) {
|
report500(id: string, cb) {
|
||||||
if (!this._redis) {
|
if (!this._redis) {
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
|
@ -136,21 +141,24 @@ class StatsClient {
|
||||||
* @param {callback} cb - callback to call with the err/result
|
* @param {callback} cb - callback to call with the err/result
|
||||||
* @return {undefined}
|
* @return {undefined}
|
||||||
*/
|
*/
|
||||||
getAllStats(log, ids, cb) {
|
getAllStats(log, ids: any[], cb) {
|
||||||
if (!this._redis) {
|
if (!this._redis) {
|
||||||
return cb(null, {});
|
return cb(null, {});
|
||||||
}
|
}
|
||||||
|
|
||||||
const statsRes = {
|
const statsRes = {
|
||||||
'requests': 0,
|
requests: 0,
|
||||||
'500s': 0,
|
'500s': 0,
|
||||||
'sampleDuration': this._expiry,
|
sampleDuration: this._expiry,
|
||||||
};
|
};
|
||||||
let requests = 0;
|
let requests = 0;
|
||||||
let errors = 0;
|
let errors = 0;
|
||||||
|
|
||||||
// for now set concurrency to default of 10
|
// for now set concurrency to default of 10
|
||||||
return async.eachLimit(ids, 10, (id, done) => {
|
return async.eachLimit(
|
||||||
|
ids,
|
||||||
|
10,
|
||||||
|
(id, done) => {
|
||||||
this.getStats(log, id, (err, res) => {
|
this.getStats(log, id, (err, res) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
return done(err);
|
return done(err);
|
||||||
|
@ -159,7 +167,8 @@ class StatsClient {
|
||||||
errors += res['500s'];
|
errors += res['500s'];
|
||||||
return done();
|
return done();
|
||||||
});
|
});
|
||||||
}, error => {
|
},
|
||||||
|
(error) => {
|
||||||
if (error) {
|
if (error) {
|
||||||
log.error('error getting stats', {
|
log.error('error getting stats', {
|
||||||
error,
|
error,
|
||||||
|
@ -170,7 +179,8 @@ class StatsClient {
|
||||||
statsRes.requests = requests;
|
statsRes.requests = requests;
|
||||||
statsRes['500s'] = errors;
|
statsRes['500s'] = errors;
|
||||||
return cb(null, statsRes);
|
return cb(null, statsRes);
|
||||||
});
|
}
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -180,7 +190,7 @@ class StatsClient {
|
||||||
* @param {callback} cb - callback to call with the err/result
|
* @param {callback} cb - callback to call with the err/result
|
||||||
* @return {undefined}
|
* @return {undefined}
|
||||||
*/
|
*/
|
||||||
getStats(log, id, cb) {
|
getStats(log, id: string, cb) {
|
||||||
if (!this._redis) {
|
if (!this._redis) {
|
||||||
return cb(null, {});
|
return cb(null, {});
|
||||||
}
|
}
|
||||||
|
@ -205,9 +215,9 @@ class StatsClient {
|
||||||
* index 1 contains the result
|
* index 1 contains the result
|
||||||
*/
|
*/
|
||||||
const statsRes = {
|
const statsRes = {
|
||||||
'requests': 0,
|
requests: 0,
|
||||||
'500s': 0,
|
'500s': 0,
|
||||||
'sampleDuration': this._expiry,
|
sampleDuration: this._expiry,
|
||||||
};
|
};
|
||||||
if (err) {
|
if (err) {
|
||||||
log.error('error getting stats', {
|
log.error('error getting stats', {
|
||||||
|
@ -227,5 +237,3 @@ class StatsClient {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = StatsClient;
|
|
|
@ -1,6 +1,5 @@
|
||||||
const async = require('async');
|
import async from 'async';
|
||||||
|
import StatsClient from './StatsClient';
|
||||||
const StatsClient = require('./StatsClient');
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @class StatsModel
|
* @class StatsModel
|
||||||
|
@ -8,16 +7,16 @@ const StatsClient = require('./StatsClient');
|
||||||
* @classdesc Extend and overwrite how timestamps are normalized by minutes
|
* @classdesc Extend and overwrite how timestamps are normalized by minutes
|
||||||
* rather than by seconds
|
* rather than by seconds
|
||||||
*/
|
*/
|
||||||
class StatsModel extends StatsClient {
|
export default class StatsModel extends StatsClient {
|
||||||
/**
|
/**
|
||||||
* Utility method to convert 2d array rows to columns, and vice versa
|
* Utility method to convert 2d array rows to columns, and vice versa
|
||||||
* See also: https://docs.ruby-lang.org/en/2.0.0/Array.html#method-i-zip
|
* See also: https://docs.ruby-lang.org/en/2.0.0/Array.html#method-i-zip
|
||||||
* @param {array} arrays - 2d array of integers
|
* @param {array} arrays - 2d array of integers
|
||||||
* @return {array} converted array
|
* @return {array} converted array
|
||||||
*/
|
*/
|
||||||
_zip(arrays) {
|
_zip(arrays: number[][]) {
|
||||||
if (arrays.length > 0 && arrays.every(a => Array.isArray(a))) {
|
if (arrays.length > 0 && arrays.every((a) => Array.isArray(a))) {
|
||||||
return arrays[0].map((_, i) => arrays.map(a => a[i]));
|
return arrays[0].map((_, i) => arrays.map((a) => a[i]));
|
||||||
}
|
}
|
||||||
return [];
|
return [];
|
||||||
}
|
}
|
||||||
|
@ -27,9 +26,9 @@ class StatsModel extends StatsClient {
|
||||||
* @param {object} d - Date instance
|
* @param {object} d - Date instance
|
||||||
* @return {number} timestamp - normalized to the nearest interval
|
* @return {number} timestamp - normalized to the nearest interval
|
||||||
*/
|
*/
|
||||||
_normalizeTimestamp(d) {
|
_normalizeTimestamp(d: Date) {
|
||||||
const m = d.getMinutes();
|
const m = d.getMinutes();
|
||||||
return d.setMinutes(m - m % (Math.floor(this._interval / 60)), 0, 0);
|
return d.setMinutes(m - (m % Math.floor(this._interval / 60)), 0, 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -64,16 +63,16 @@ class StatsModel extends StatsClient {
|
||||||
* @param {callback} cb - callback to call with the err/result
|
* @param {callback} cb - callback to call with the err/result
|
||||||
* @return {undefined}
|
* @return {undefined}
|
||||||
*/
|
*/
|
||||||
getAllStats(log, ids, cb) {
|
getAllStats(log, ids: string[], cb) {
|
||||||
if (!this._redis) {
|
if (!this._redis) {
|
||||||
return cb(null, {});
|
return cb(null, {});
|
||||||
}
|
}
|
||||||
|
|
||||||
const size = Math.floor(this._expiry / this._interval);
|
const size = Math.floor(this._expiry / this._interval);
|
||||||
const statsRes = {
|
const statsRes = {
|
||||||
'requests': Array(size).fill(0),
|
requests: Array(size).fill(0),
|
||||||
'500s': Array(size).fill(0),
|
'500s': Array(size).fill(0),
|
||||||
'sampleDuration': this._expiry,
|
sampleDuration: this._expiry,
|
||||||
};
|
};
|
||||||
const requests = [];
|
const requests = [];
|
||||||
const errors = [];
|
const errors = [];
|
||||||
|
@ -117,9 +116,9 @@ class StatsModel extends StatsClient {
|
||||||
* @param {function} cb - Callback
|
* @param {function} cb - Callback
|
||||||
* @return {undefined}
|
* @return {undefined}
|
||||||
*/
|
*/
|
||||||
getAllGlobalStats(ids, log, cb) {
|
getAllGlobalStats(ids: string[], log, cb) {
|
||||||
const reqsKeys = ids.map(key => (['get', key]));
|
const reqsKeys = ids.map((key) => ['get', key]);
|
||||||
return this._redis.batch(reqsKeys, (err, res) => {
|
return this._redis!.batch(reqsKeys, (err, res) => {
|
||||||
const statsRes = { requests: 0 };
|
const statsRes = { requests: 0 };
|
||||||
if (err) {
|
if (err) {
|
||||||
log.error('error getting metrics', {
|
log.error('error getting metrics', {
|
||||||
|
@ -148,7 +147,7 @@ class StatsModel extends StatsClient {
|
||||||
* @param {Date} d - Date instance
|
* @param {Date} d - Date instance
|
||||||
* @return {number} timestamp - normalized to the nearest hour
|
* @return {number} timestamp - normalized to the nearest hour
|
||||||
*/
|
*/
|
||||||
normalizeTimestampByHour(d) {
|
normalizeTimestampByHour(d: Date) {
|
||||||
return d.setMinutes(0, 0, 0);
|
return d.setMinutes(0, 0, 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -157,7 +156,7 @@ class StatsModel extends StatsClient {
|
||||||
* @param {Date} d - Date instance
|
* @param {Date} d - Date instance
|
||||||
* @return {number} timestamp - one hour prior to date passed
|
* @return {number} timestamp - one hour prior to date passed
|
||||||
*/
|
*/
|
||||||
_getDatePreviousHour(d) {
|
_getDatePreviousHour(d: Date) {
|
||||||
return d.setHours(d.getHours() - 1);
|
return d.setHours(d.getHours() - 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -166,8 +165,8 @@ class StatsModel extends StatsClient {
|
||||||
* @param {number} epoch - epoch time
|
* @param {number} epoch - epoch time
|
||||||
* @return {array} array of sorted set key timestamps
|
* @return {array} array of sorted set key timestamps
|
||||||
*/
|
*/
|
||||||
getSortedSetHours(epoch) {
|
getSortedSetHours(epoch: number) {
|
||||||
const timestamps = [];
|
const timestamps: number[] = [];
|
||||||
let date = this.normalizeTimestampByHour(new Date(epoch));
|
let date = this.normalizeTimestampByHour(new Date(epoch));
|
||||||
while (timestamps.length < 24) {
|
while (timestamps.length < 24) {
|
||||||
timestamps.push(date);
|
timestamps.push(date);
|
||||||
|
@ -181,7 +180,7 @@ class StatsModel extends StatsClient {
|
||||||
* @param {number} epoch - epoch time
|
* @param {number} epoch - epoch time
|
||||||
* @return {string} normalized hour timestamp for given time
|
* @return {string} normalized hour timestamp for given time
|
||||||
*/
|
*/
|
||||||
getSortedSetCurrentHour(epoch) {
|
getSortedSetCurrentHour(epoch: number) {
|
||||||
return this.normalizeTimestampByHour(new Date(epoch));
|
return this.normalizeTimestampByHour(new Date(epoch));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -193,8 +192,8 @@ class StatsModel extends StatsClient {
|
||||||
* @param {callback} cb - callback
|
* @param {callback} cb - callback
|
||||||
* @return {undefined}
|
* @return {undefined}
|
||||||
*/
|
*/
|
||||||
addToSortedSet(key, score, value, cb) {
|
addToSortedSet(key: string, score: number, value: string, cb) {
|
||||||
this._redis.exists(key, (err, resCode) => {
|
this._redis!.exists(key, (err, resCode) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
return cb(err);
|
return cb(err);
|
||||||
}
|
}
|
||||||
|
@ -203,8 +202,7 @@ class StatsModel extends StatsClient {
|
||||||
const msInADay = 24 * 60 * 60 * 1000;
|
const msInADay = 24 * 60 * 60 * 1000;
|
||||||
const nearestHour = this.normalizeTimestampByHour(new Date());
|
const nearestHour = this.normalizeTimestampByHour(new Date());
|
||||||
// in seconds
|
// in seconds
|
||||||
const ttl = Math.ceil(
|
const ttl = Math.ceil((msInADay - (Date.now() - nearestHour)) / 1000);
|
||||||
(msInADay - (Date.now() - nearestHour)) / 1000);
|
|
||||||
const cmds = [
|
const cmds = [
|
||||||
['zadd', key, score, value],
|
['zadd', key, score, value],
|
||||||
['expire', key, ttl],
|
['expire', key, ttl],
|
||||||
|
@ -213,7 +211,7 @@ class StatsModel extends StatsClient {
|
||||||
if (err) {
|
if (err) {
|
||||||
return cb(err);
|
return cb(err);
|
||||||
}
|
}
|
||||||
const cmdErr = res.find(r => r[0] !== null);
|
const cmdErr = res.find((r) => r[0] !== null);
|
||||||
if (cmdErr) {
|
if (cmdErr) {
|
||||||
return cb(cmdErr);
|
return cb(cmdErr);
|
||||||
}
|
}
|
||||||
|
@ -221,9 +219,7 @@ class StatsModel extends StatsClient {
|
||||||
return cb(null, successResponse);
|
return cb(null, successResponse);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
return this._redis.zadd(key, score, value, cb);
|
return this._redis!.zadd(key, score, value, cb);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = StatsModel;
|
|
|
@ -1,13 +1,13 @@
|
||||||
const promClient = require('prom-client');
|
import promClient from 'prom-client';
|
||||||
|
|
||||||
const collectDefaultMetricsIntervalMs =
|
const collectDefaultMetricsIntervalMs =
|
||||||
process.env.COLLECT_DEFAULT_METRICS_INTERVAL_MS !== undefined ?
|
process.env.COLLECT_DEFAULT_METRICS_INTERVAL_MS !== undefined
|
||||||
Number.parseInt(process.env.COLLECT_DEFAULT_METRICS_INTERVAL_MS, 10) :
|
? Number.parseInt(process.env.COLLECT_DEFAULT_METRICS_INTERVAL_MS, 10)
|
||||||
10000;
|
: 10000;
|
||||||
|
|
||||||
promClient.collectDefaultMetrics({ timeout: collectDefaultMetricsIntervalMs });
|
promClient.collectDefaultMetrics({ timeout: collectDefaultMetricsIntervalMs });
|
||||||
|
|
||||||
class ZenkoMetrics {
|
export default class ZenkoMetrics {
|
||||||
static createCounter(params) {
|
static createCounter(params) {
|
||||||
return new promClient.Counter(params);
|
return new promClient.Counter(params);
|
||||||
}
|
}
|
||||||
|
@ -36,5 +36,3 @@ class ZenkoMetrics {
|
||||||
return promClient.register.contentType;
|
return promClient.register.contentType;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = ZenkoMetrics;
|
|
|
@ -0,0 +1,4 @@
|
||||||
|
export { default as StatsClient } from './StatsClient';
|
||||||
|
export { default as StatsModel } from './StatsModel';
|
||||||
|
export { default as RedisClient } from './RedisClient';
|
||||||
|
export { default as ZenkoMetrics } from './ZenkoMetrics';
|
|
@ -1,11 +1,17 @@
|
||||||
const errors = require('../errors');
|
import errors from '../errors';
|
||||||
|
|
||||||
const validServices = {
|
const validServices = {
|
||||||
aws: ['s3', 'iam', 'sts', 'ring'],
|
aws: ['s3', 'iam', 'sts', 'ring'],
|
||||||
scality: ['utapi', 'sso'],
|
scality: ['utapi', 'sso'],
|
||||||
};
|
};
|
||||||
|
|
||||||
class ARN {
|
export default class ARN {
|
||||||
|
_partition: string;
|
||||||
|
_service: string;
|
||||||
|
_region: string | null;
|
||||||
|
_accountId: string | null;
|
||||||
|
_resource: string;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
* Create an ARN object from its individual components
|
* Create an ARN object from its individual components
|
||||||
|
@ -17,7 +23,7 @@ class ARN {
|
||||||
* @param {string} [accountId] - AWS 12-digit account ID
|
* @param {string} [accountId] - AWS 12-digit account ID
|
||||||
* @param {string} resource - AWS resource path (e.g. 'foo/bar')
|
* @param {string} resource - AWS resource path (e.g. 'foo/bar')
|
||||||
*/
|
*/
|
||||||
constructor(partition, service, region, accountId, resource) {
|
constructor(partition: string, service: string, region: string, accountId: string, resource: string) {
|
||||||
this._partition = partition;
|
this._partition = partition;
|
||||||
this._service = service;
|
this._service = service;
|
||||||
this._region = region || null;
|
this._region = region || null;
|
||||||
|
@ -25,7 +31,7 @@ class ARN {
|
||||||
this._resource = resource;
|
this._resource = resource;
|
||||||
}
|
}
|
||||||
|
|
||||||
static createFromString(arnStr) {
|
static createFromString(arnStr: string) {
|
||||||
const [arn, partition, service, region, accountId,
|
const [arn, partition, service, region, accountId,
|
||||||
resourceType, resource] = arnStr.split(':');
|
resourceType, resource] = arnStr.split(':');
|
||||||
|
|
||||||
|
@ -57,8 +63,8 @@ class ARN {
|
||||||
`bad ARN: bad account ID "${accountId}": ` +
|
`bad ARN: bad account ID "${accountId}": ` +
|
||||||
'must be a 12-digit number or "*"') };
|
'must be a 12-digit number or "*"') };
|
||||||
}
|
}
|
||||||
const fullResource = (resource !== undefined ?
|
const fullResource = resource !== undefined ?
|
||||||
`${resourceType}:${resource}` : resourceType);
|
`${resourceType}:${resource}` : resourceType;
|
||||||
return new ARN(partition, service, region, accountId, fullResource);
|
return new ARN(partition, service, region, accountId, fullResource);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -79,21 +85,21 @@ class ARN {
|
||||||
}
|
}
|
||||||
|
|
||||||
isIAMAccount() {
|
isIAMAccount() {
|
||||||
return this.getService() === 'iam'
|
return this.getService() === 'iam' &&
|
||||||
&& this.getAccountId() !== null
|
this.getAccountId() !== null &&
|
||||||
&& this.getAccountId() !== '*'
|
this.getAccountId() !== '*' &&
|
||||||
&& this.getResource() === 'root';
|
this.getResource() === 'root';
|
||||||
}
|
}
|
||||||
isIAMUser() {
|
isIAMUser() {
|
||||||
return this.getService() === 'iam'
|
return this.getService() === 'iam' &&
|
||||||
&& this.getAccountId() !== null
|
this.getAccountId() !== null &&
|
||||||
&& this.getAccountId() !== '*'
|
this.getAccountId() !== '*' &&
|
||||||
&& this.getResource().startsWith('user/');
|
this.getResource().startsWith('user/');
|
||||||
}
|
}
|
||||||
isIAMRole() {
|
isIAMRole() {
|
||||||
return this.getService() === 'iam'
|
return this.getService() === 'iam' &&
|
||||||
&& this.getAccountId() !== null
|
this.getAccountId() !== null &&
|
||||||
&& this.getResource().startsWith('role');
|
this.getResource().startsWith('role');
|
||||||
}
|
}
|
||||||
|
|
||||||
toString() {
|
toString() {
|
||||||
|
@ -102,5 +108,3 @@ class ARN {
|
||||||
.join(':');
|
.join(':');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = ARN;
|
|
|
@ -1,7 +1,13 @@
|
||||||
const { legacyLocations } = require('../constants');
|
import { legacyLocations } from '../constants';
|
||||||
const escapeForXml = require('../s3middleware/escapeForXml');
|
import escapeForXml from '../s3middleware/escapeForXml';
|
||||||
|
|
||||||
|
export default class BackendInfo {
|
||||||
|
_config;
|
||||||
|
_objectLocationConstraint;
|
||||||
|
_bucketLocationConstraint;
|
||||||
|
_requestEndpoint;
|
||||||
|
_legacyLocationConstraint;
|
||||||
|
|
||||||
class BackendInfo {
|
|
||||||
/**
|
/**
|
||||||
* Represents the info necessary to evaluate which data backend to use
|
* Represents the info necessary to evaluate which data backend to use
|
||||||
* on a data put call.
|
* on a data put call.
|
||||||
|
@ -236,5 +242,3 @@ class BackendInfo {
|
||||||
return this._config.backends.data;
|
return this._config.backends.data;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = BackendInfo;
|
|
|
@ -2,7 +2,9 @@
|
||||||
* Helper class to ease access to the Azure specific information for
|
* Helper class to ease access to the Azure specific information for
|
||||||
* storage accounts mapped to buckets.
|
* storage accounts mapped to buckets.
|
||||||
*/
|
*/
|
||||||
class BucketAzureInfo {
|
export default class BucketAzureInfo {
|
||||||
|
_data
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @constructor
|
* @constructor
|
||||||
* @param {object} obj - Raw structure for the Azure info on storage account
|
* @param {object} obj - Raw structure for the Azure info on storage account
|
||||||
|
@ -233,5 +235,3 @@ class BucketAzureInfo {
|
||||||
return this._data;
|
return this._data;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = BucketAzureInfo;
|
|
|
@ -1,19 +1,44 @@
|
||||||
const assert = require('assert');
|
import assert from 'assert';
|
||||||
const uuid = require('uuid/v4');
|
import uuid from 'uuid/v4';
|
||||||
|
|
||||||
const { WebsiteConfiguration } = require('./WebsiteConfiguration');
|
import { WebsiteConfiguration } from './WebsiteConfiguration';
|
||||||
const ReplicationConfiguration = require('./ReplicationConfiguration');
|
import ReplicationConfiguration from './ReplicationConfiguration';
|
||||||
const LifecycleConfiguration = require('./LifecycleConfiguration');
|
import LifecycleConfiguration from './LifecycleConfiguration';
|
||||||
const ObjectLockConfiguration = require('./ObjectLockConfiguration');
|
import ObjectLockConfiguration from './ObjectLockConfiguration';
|
||||||
const BucketPolicy = require('./BucketPolicy');
|
import BucketPolicy from './BucketPolicy';
|
||||||
const NotificationConfiguration = require('./NotificationConfiguration');
|
import NotificationConfiguration from './NotificationConfiguration';
|
||||||
|
|
||||||
// WHEN UPDATING THIS NUMBER, UPDATE BucketInfoModelVersion.md CHANGELOG
|
// WHEN UPDATING THIS NUMBER, UPDATE BucketInfoModelVersion.md CHANGELOG
|
||||||
// BucketInfoModelVersion.md can be found in documentation/ at the root
|
// BucketInfoModelVersion.md can be found in documentation/ at the root
|
||||||
// of this repository
|
// of this repository
|
||||||
const modelVersion = 14;
|
const modelVersion = 14;
|
||||||
|
|
||||||
class BucketInfo {
|
export default class BucketInfo {
|
||||||
|
_acl;
|
||||||
|
_name;
|
||||||
|
_owner;
|
||||||
|
_ownerDisplayName;
|
||||||
|
_creationDate;
|
||||||
|
_mdBucketModelVersion;
|
||||||
|
_transient;
|
||||||
|
_deleted;
|
||||||
|
_serverSideEncryption;
|
||||||
|
_versioningConfiguration;
|
||||||
|
_locationConstraint;
|
||||||
|
_readLocationConstraint;
|
||||||
|
_websiteConfiguration;
|
||||||
|
_replicationConfiguration;
|
||||||
|
_cors;
|
||||||
|
_lifecycleConfiguration;
|
||||||
|
_bucketPolicy;
|
||||||
|
_uid;
|
||||||
|
_isNFS;
|
||||||
|
_ingestion;
|
||||||
|
_azureInfo;
|
||||||
|
_objectLockEnabled;
|
||||||
|
_objectLockConfiguration;
|
||||||
|
_notificationConfiguration;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Represents all bucket information.
|
* Represents all bucket information.
|
||||||
* @constructor
|
* @constructor
|
||||||
|
@ -768,5 +793,3 @@ class BucketInfo {
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = BucketInfo;
|
|
|
@ -1,7 +1,7 @@
|
||||||
const assert = require('assert');
|
import assert from 'assert';
|
||||||
|
|
||||||
const errors = require('../errors');
|
import errors from '../errors';
|
||||||
const { validateResourcePolicy } = require('../policy/policyValidator');
|
import { validateResourcePolicy } from '../policy/policyValidator';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Format of json policy:
|
* Format of json policy:
|
||||||
|
@ -49,7 +49,10 @@ const objectActions = [
|
||||||
's3:PutObjectTagging',
|
's3:PutObjectTagging',
|
||||||
];
|
];
|
||||||
|
|
||||||
class BucketPolicy {
|
export default class BucketPolicy {
|
||||||
|
_json
|
||||||
|
_policy
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create a Bucket Policy instance
|
* Create a Bucket Policy instance
|
||||||
* @param {string} json - the json policy
|
* @param {string} json - the json policy
|
||||||
|
@ -75,8 +78,11 @@ class BucketPolicy {
|
||||||
*/
|
*/
|
||||||
_getPolicy() {
|
_getPolicy() {
|
||||||
if (!this._json || this._json === '') {
|
if (!this._json || this._json === '') {
|
||||||
return { error: errors.MalformedPolicy.customizeDescription(
|
return {
|
||||||
'request json is empty or undefined') };
|
error: errors.MalformedPolicy.customizeDescription(
|
||||||
|
'request json is empty or undefined'
|
||||||
|
),
|
||||||
|
};
|
||||||
}
|
}
|
||||||
const validSchema = validateResourcePolicy(this._json);
|
const validSchema = validateResourcePolicy(this._json);
|
||||||
if (validSchema.error) {
|
if (validSchema.error) {
|
||||||
|
@ -104,25 +110,32 @@ class BucketPolicy {
|
||||||
* @return {error} - contains error or empty obj
|
* @return {error} - contains error or empty obj
|
||||||
*/
|
*/
|
||||||
_validateActionResource() {
|
_validateActionResource() {
|
||||||
const invalid = this._policy.Statement.every(s => {
|
const invalid = this._policy.Statement.every((s) => {
|
||||||
const actions = typeof s.Action === 'string' ?
|
const actions =
|
||||||
[s.Action] : s.Action;
|
typeof s.Action === 'string' ? [s.Action] : s.Action;
|
||||||
const resources = typeof s.Resource === 'string' ?
|
const resources =
|
||||||
[s.Resource] : s.Resource;
|
typeof s.Resource === 'string' ? [s.Resource] : s.Resource;
|
||||||
const objectAction = actions.some(a =>
|
const objectAction = actions.some(
|
||||||
a.includes('Object') || objectActions.includes(a));
|
(a) => a.includes('Object') || objectActions.includes(a)
|
||||||
|
);
|
||||||
// wildcardObjectAction checks for actions such as 's3:*' or
|
// wildcardObjectAction checks for actions such as 's3:*' or
|
||||||
// 's3:Put*' but will return false for actions such as
|
// 's3:Put*' but will return false for actions such as
|
||||||
// 's3:PutBucket*'
|
// 's3:PutBucket*'
|
||||||
const wildcardObjectAction = actions.some(
|
const wildcardObjectAction = actions.some(
|
||||||
a => a.includes('*') && !a.includes('Bucket'));
|
(a) => a.includes('*') && !a.includes('Bucket')
|
||||||
const objectResource = resources.some(r => r.includes('/'));
|
);
|
||||||
return ((objectAction && !objectResource) ||
|
const objectResource = resources.some((r) => r.includes('/'));
|
||||||
(objectResource && !objectAction && !wildcardObjectAction));
|
return (
|
||||||
|
(objectAction && !objectResource) ||
|
||||||
|
(objectResource && !objectAction && !wildcardObjectAction)
|
||||||
|
);
|
||||||
});
|
});
|
||||||
if (invalid) {
|
if (invalid) {
|
||||||
return { error: errors.MalformedPolicy.customizeDescription(
|
return {
|
||||||
'Action does not apply to any resource(s) in statement') };
|
error: errors.MalformedPolicy.customizeDescription(
|
||||||
|
'Action does not apply to any resource(s) in statement'
|
||||||
|
),
|
||||||
|
};
|
||||||
}
|
}
|
||||||
return {};
|
return {};
|
||||||
}
|
}
|
||||||
|
@ -139,5 +152,3 @@ class BucketPolicy {
|
||||||
assert.deepStrictEqual(validated, { error: null, valid: true });
|
assert.deepStrictEqual(validated, { error: null, valid: true });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = BucketPolicy;
|
|
|
@ -1,9 +1,9 @@
|
||||||
const assert = require('assert');
|
import assert from 'assert';
|
||||||
const UUID = require('uuid');
|
import UUID from 'uuid';
|
||||||
|
|
||||||
const errors = require('../errors');
|
import errors from '../errors';
|
||||||
const LifecycleRule = require('./LifecycleRule');
|
import LifecycleRule from './LifecycleRule';
|
||||||
const escapeForXml = require('../s3middleware/escapeForXml');
|
import escapeForXml from '../s3middleware/escapeForXml';
|
||||||
|
|
||||||
const MAX_DAYS = 2147483647; // Max 32-bit signed binary integer.
|
const MAX_DAYS = 2147483647; // Max 32-bit signed binary integer.
|
||||||
|
|
||||||
|
@ -83,7 +83,7 @@ const MAX_DAYS = 2147483647; // Max 32-bit signed binary integer.
|
||||||
};
|
};
|
||||||
*/
|
*/
|
||||||
|
|
||||||
class LifecycleConfiguration {
|
export default class LifecycleConfiguration {
|
||||||
/**
|
/**
|
||||||
* Create a Lifecycle Configuration instance
|
* Create a Lifecycle Configuration instance
|
||||||
* @param {string} xml - the parsed xml
|
* @param {string} xml - the parsed xml
|
||||||
|
@ -1220,5 +1220,3 @@ class LifecycleConfiguration {
|
||||||
return { Rules: rulesJSON };
|
return { Rules: rulesJSON };
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = LifecycleConfiguration;
|
|
|
@ -1,11 +1,11 @@
|
||||||
const uuid = require('uuid/v4');
|
import uuid from 'uuid/v4';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @class LifecycleRule
|
* @class LifecycleRule
|
||||||
*
|
*
|
||||||
* @classdesc Simple get/set class to build a single Rule
|
* @classdesc Simple get/set class to build a single Rule
|
||||||
*/
|
*/
|
||||||
class LifecycleRule {
|
export default class LifecycleRule {
|
||||||
constructor(id, status) {
|
constructor(id, status) {
|
||||||
// defaults
|
// defaults
|
||||||
this.id = id || uuid();
|
this.id = id || uuid();
|
||||||
|
@ -134,5 +134,3 @@ class LifecycleRule {
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = LifecycleRule;
|
|
|
@ -1,11 +1,11 @@
|
||||||
const assert = require('assert');
|
import assert from 'assert';
|
||||||
const UUID = require('uuid');
|
import UUID from 'uuid';
|
||||||
|
|
||||||
const {
|
import {
|
||||||
supportedNotificationEvents,
|
supportedNotificationEvents,
|
||||||
notificationArnPrefix,
|
notificationArnPrefix,
|
||||||
} = require('../constants');
|
} from '../constants';
|
||||||
const errors = require('../errors');
|
import errors from '../errors';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Format of xml request:
|
* Format of xml request:
|
||||||
|
@ -51,7 +51,7 @@ const errors = require('../errors');
|
||||||
* }
|
* }
|
||||||
*/
|
*/
|
||||||
|
|
||||||
class NotificationConfiguration {
|
export default class NotificationConfiguration {
|
||||||
/**
|
/**
|
||||||
* Create a Notification Configuration instance
|
* Create a Notification Configuration instance
|
||||||
* @param {string} xml - parsed configuration xml
|
* @param {string} xml - parsed configuration xml
|
||||||
|
@ -307,5 +307,3 @@ class NotificationConfiguration {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = NotificationConfiguration;
|
|
|
@ -1,6 +1,5 @@
|
||||||
const assert = require('assert');
|
import assert from 'assert';
|
||||||
|
import errors from '../errors';
|
||||||
const errors = require('../errors');
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Format of xml request:
|
* Format of xml request:
|
||||||
|
@ -27,7 +26,7 @@ const errors = require('../errors');
|
||||||
* }
|
* }
|
||||||
* }
|
* }
|
||||||
*/
|
*/
|
||||||
class ObjectLockConfiguration {
|
export default class ObjectLockConfiguration {
|
||||||
/**
|
/**
|
||||||
* Create an Object Lock Configuration instance
|
* Create an Object Lock Configuration instance
|
||||||
* @param {string} xml - the parsed configuration xml
|
* @param {string} xml - the parsed configuration xml
|
||||||
|
@ -234,5 +233,3 @@ class ObjectLockConfiguration {
|
||||||
'</ObjectLockConfiguration>';
|
'</ObjectLockConfiguration>';
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = ObjectLockConfiguration;
|
|
|
@ -1,15 +1,15 @@
|
||||||
const crypto = require('crypto');
|
import * as crypto from 'crypto';
|
||||||
|
|
||||||
const constants = require('../constants');
|
import * as constants from '../constants';
|
||||||
const VersionIDUtils = require('../versioning/VersionID');
|
import * as VersionIDUtils from '../versioning/VersionID';
|
||||||
|
|
||||||
const ObjectMDLocation = require('./ObjectMDLocation');
|
import ObjectMDLocation from './ObjectMDLocation';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Class to manage metadata object for regular s3 objects (instead of
|
* Class to manage metadata object for regular s3 objects (instead of
|
||||||
* mpuPart metadata for example)
|
* mpuPart metadata for example)
|
||||||
*/
|
*/
|
||||||
class ObjectMD {
|
export default class ObjectMD {
|
||||||
/**
|
/**
|
||||||
* Create a new instance of ObjectMD. Parameter <tt>objMd</tt> is
|
* Create a new instance of ObjectMD. Parameter <tt>objMd</tt> is
|
||||||
* reserved for internal use, users should call
|
* reserved for internal use, users should call
|
||||||
|
@ -1189,5 +1189,3 @@ class ObjectMD {
|
||||||
return this._data;
|
return this._data;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = ObjectMD;
|
|
|
@ -2,7 +2,7 @@
|
||||||
* Helper class to ease access to the Azure specific information for
|
* Helper class to ease access to the Azure specific information for
|
||||||
* Blob and Container objects.
|
* Blob and Container objects.
|
||||||
*/
|
*/
|
||||||
class ObjectMDAzureInfo {
|
export default class ObjectMDAzureInfo {
|
||||||
/**
|
/**
|
||||||
* @constructor
|
* @constructor
|
||||||
* @param {object} obj - Raw structure for the Azure info on Blob/Container
|
* @param {object} obj - Raw structure for the Azure info on Blob/Container
|
||||||
|
@ -158,5 +158,3 @@ class ObjectMDAzureInfo {
|
||||||
return this._data;
|
return this._data;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = ObjectMDAzureInfo;
|
|
|
@ -2,7 +2,7 @@
|
||||||
* Helper class to ease access to a single data location in metadata
|
* Helper class to ease access to a single data location in metadata
|
||||||
* 'location' array
|
* 'location' array
|
||||||
*/
|
*/
|
||||||
class ObjectMDLocation {
|
export default class ObjectMDLocation {
|
||||||
/**
|
/**
|
||||||
* @constructor
|
* @constructor
|
||||||
* @param {object} locationObj - single data location info
|
* @param {object} locationObj - single data location info
|
||||||
|
@ -126,5 +126,3 @@ class ObjectMDLocation {
|
||||||
return this._data;
|
return this._data;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = ObjectMDLocation;
|
|
|
@ -1,9 +1,9 @@
|
||||||
const assert = require('assert');
|
import assert from 'assert';
|
||||||
const UUID = require('uuid');
|
import UUID from 'uuid';
|
||||||
|
|
||||||
const escapeForXml = require('../s3middleware/escapeForXml');
|
import escapeForXml from '../s3middleware/escapeForXml';
|
||||||
const errors = require('../errors');
|
import errors from '../errors';
|
||||||
const { isValidBucketName } = require('../s3routes/routesUtils');
|
import { isValidBucketName } from '../s3routes/routesUtils';
|
||||||
|
|
||||||
const MAX_RULES = 1000;
|
const MAX_RULES = 1000;
|
||||||
const RULE_ID_LIMIT = 255;
|
const RULE_ID_LIMIT = 255;
|
||||||
|
@ -37,7 +37,19 @@ const validStorageClasses = [
|
||||||
</ReplicationConfiguration>
|
</ReplicationConfiguration>
|
||||||
*/
|
*/
|
||||||
|
|
||||||
class ReplicationConfiguration {
|
export default class ReplicationConfiguration {
|
||||||
|
_parsedXML
|
||||||
|
_log
|
||||||
|
_config
|
||||||
|
_configPrefixes
|
||||||
|
_configIDs
|
||||||
|
_role
|
||||||
|
_destination
|
||||||
|
_rules
|
||||||
|
_prevStorageClass
|
||||||
|
_hasScalityDestination
|
||||||
|
_preferredReadLocation
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create a ReplicationConfiguration instance
|
* Create a ReplicationConfiguration instance
|
||||||
* @param {string} xml - The parsed XML
|
* @param {string} xml - The parsed XML
|
||||||
|
@ -469,5 +481,3 @@ class ReplicationConfiguration {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = ReplicationConfiguration;
|
|
|
@ -1,4 +1,7 @@
|
||||||
class RoutingRule {
|
export class RoutingRule {
|
||||||
|
_redirect;
|
||||||
|
_condition;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Represents a routing rule in a website configuration.
|
* Represents a routing rule in a website configuration.
|
||||||
* @constructor
|
* @constructor
|
||||||
|
@ -52,7 +55,12 @@ class RoutingRule {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
class WebsiteConfiguration {
|
export class WebsiteConfiguration {
|
||||||
|
_indexDocument;
|
||||||
|
_errorDocument;
|
||||||
|
_redirectAllRequestsTo;
|
||||||
|
_routingRules;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Object that represents website configuration
|
* Object that represents website configuration
|
||||||
* @constructor
|
* @constructor
|
||||||
|
@ -188,8 +196,3 @@ class WebsiteConfiguration {
|
||||||
return this._routingRules;
|
return this._routingRules;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
RoutingRule,
|
|
||||||
WebsiteConfiguration,
|
|
||||||
};
|
|
|
@ -0,0 +1,14 @@
|
||||||
|
export { default as BackendInfo } from './BackendInfo';
|
||||||
|
export { default as BucketInfo } from './BucketInfo';
|
||||||
|
export { default as BucketAzureInfo } from './BucketAzureInfo';
|
||||||
|
export { default as ObjectMD } from './ObjectMD';
|
||||||
|
export { default as ObjectMDLocation } from './ObjectMDLocation';
|
||||||
|
export { default as ObjectMDAzureInfo } from './ObjectMDAzureInfo';
|
||||||
|
export { default as ARN } from './ARN';
|
||||||
|
export * as WebsiteConfiguration from './WebsiteConfiguration';
|
||||||
|
export { default as ReplicationConfiguration } from './ReplicationConfiguration';
|
||||||
|
export { default as LifecycleConfiguration } from './LifecycleConfiguration';
|
||||||
|
export { default as LifecycleRule } from './LifecycleRule';
|
||||||
|
export { default as BucketPolicy } from './BucketPolicy';
|
||||||
|
export { default as ObjectLockConfiguration } from './ObjectLockConfiguration';
|
||||||
|
export { default as NotificationConfiguration } from './NotificationConfiguration';
|
|
@ -17,7 +17,7 @@ function shuffle(array) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
class RoundRobin {
|
export default class RoundRobin {
|
||||||
/**
|
/**
|
||||||
* @constructor
|
* @constructor
|
||||||
* @param {object[]|string[]} hostsList - list of hosts to query
|
* @param {object[]|string[]} hostsList - list of hosts to query
|
||||||
|
@ -167,5 +167,3 @@ class RoundRobin {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = RoundRobin;
|
|
|
@ -1,15 +1,13 @@
|
||||||
'use strict'; // eslint-disable-line
|
import * as http from 'http';
|
||||||
|
import * as https from 'https';
|
||||||
const http = require('http');
|
import assert from 'assert';
|
||||||
const https = require('https');
|
import { dhparam } from '../../https/dh2048';
|
||||||
const assert = require('assert');
|
import { ciphers } from '../../https/ciphers';
|
||||||
const dhparam = require('../../https/dh2048').dhparam;
|
import errors from '../../errors';
|
||||||
const ciphers = require('../../https/ciphers').ciphers;
|
import { checkSupportIPv6 } from './utils';
|
||||||
const errors = require('../../errors');
|
|
||||||
const { checkSupportIPv6 } = require('./utils');
|
|
||||||
|
|
||||||
|
|
||||||
class Server {
|
export default class Server {
|
||||||
/**
|
/**
|
||||||
* @constructor
|
* @constructor
|
||||||
*
|
*
|
||||||
|
@ -458,5 +456,3 @@ class Server {
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = Server;
|
|
|
@ -1,7 +1,5 @@
|
||||||
'use strict'; // eslint-disable-line
|
import * as os from 'os';
|
||||||
|
import errors from '../../errors';
|
||||||
const os = require('os');
|
|
||||||
const errors = require('../../errors');
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Parse the Range header into an object
|
* Parse the Range header into an object
|
||||||
|
@ -16,7 +14,7 @@ const errors = require('../../errors');
|
||||||
* - an error attribute of type errors.InvalidArgument if the range
|
* - an error attribute of type errors.InvalidArgument if the range
|
||||||
* syntax is invalid
|
* syntax is invalid
|
||||||
*/
|
*/
|
||||||
function parseRangeSpec(rangeHeader) {
|
export function parseRangeSpec(rangeHeader) {
|
||||||
const rangeMatch = /^bytes=([0-9]+)?-([0-9]+)?$/.exec(rangeHeader);
|
const rangeMatch = /^bytes=([0-9]+)?-([0-9]+)?$/.exec(rangeHeader);
|
||||||
if (rangeMatch) {
|
if (rangeMatch) {
|
||||||
const rangeValues = rangeMatch.slice(1, 3);
|
const rangeValues = rangeMatch.slice(1, 3);
|
||||||
|
@ -55,7 +53,7 @@ function parseRangeSpec(rangeHeader) {
|
||||||
* - or an 'error' attribute of type errors.InvalidRange if the
|
* - or an 'error' attribute of type errors.InvalidRange if the
|
||||||
* requested range is out of object's boundaries.
|
* requested range is out of object's boundaries.
|
||||||
*/
|
*/
|
||||||
function getByteRangeFromSpec(rangeSpec, objectSize) {
|
export function getByteRangeFromSpec(rangeSpec, objectSize) {
|
||||||
if (rangeSpec.suffix !== undefined) {
|
if (rangeSpec.suffix !== undefined) {
|
||||||
if (rangeSpec.suffix === 0) {
|
if (rangeSpec.suffix === 0) {
|
||||||
// 0-byte suffix is always invalid (even on empty objects)
|
// 0-byte suffix is always invalid (even on empty objects)
|
||||||
|
@ -95,7 +93,7 @@ function getByteRangeFromSpec(rangeSpec, objectSize) {
|
||||||
* - or an 'error' attribute instead of type errors.InvalidRange if
|
* - or an 'error' attribute instead of type errors.InvalidRange if
|
||||||
* the requested range is out of object's boundaries.
|
* the requested range is out of object's boundaries.
|
||||||
*/
|
*/
|
||||||
function parseRange(rangeHeader, objectSize) {
|
export function parseRange(rangeHeader, objectSize) {
|
||||||
const rangeSpec = parseRangeSpec(rangeHeader);
|
const rangeSpec = parseRangeSpec(rangeHeader);
|
||||||
if (rangeSpec.error) {
|
if (rangeSpec.error) {
|
||||||
// invalid range syntax is silently ignored in HTTP spec,
|
// invalid range syntax is silently ignored in HTTP spec,
|
||||||
|
@ -105,15 +103,8 @@ function parseRange(rangeHeader, objectSize) {
|
||||||
return getByteRangeFromSpec(rangeSpec, objectSize);
|
return getByteRangeFromSpec(rangeSpec, objectSize);
|
||||||
}
|
}
|
||||||
|
|
||||||
function checkSupportIPv6() {
|
export function checkSupportIPv6() {
|
||||||
const niList = os.networkInterfaces();
|
const niList = os.networkInterfaces();
|
||||||
return Object.keys(niList).some(network =>
|
return Object.keys(niList).some(network =>
|
||||||
niList[network].some(intfc => intfc.family === 'IPv6'));
|
niList[network].some(intfc => intfc.family === 'IPv6'));
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
parseRangeSpec,
|
|
||||||
getByteRangeFromSpec,
|
|
||||||
parseRange,
|
|
||||||
checkSupportIPv6,
|
|
||||||
};
|
|
|
@ -0,0 +1,15 @@
|
||||||
|
import server from './http/server';
|
||||||
|
export * as rpc from './rpc/rpc';
|
||||||
|
export * as level from './rpc/level-net';
|
||||||
|
import RESTServer from './rest/RESTServer';
|
||||||
|
import RESTClient from './rest/RESTClient';
|
||||||
|
export { default as RoundRobin } from './RoundRobin';
|
||||||
|
import * as ProbeServer from './probe/ProbeServer';
|
||||||
|
import HealthProbeServer from './probe/HealthProbeServer';
|
||||||
|
import * as Utils from './probe/Utils';
|
||||||
|
export * as kmip from './kmip';
|
||||||
|
export { default as kmipClient } from './kmip/Client';
|
||||||
|
|
||||||
|
export const http = { server };
|
||||||
|
export const rest = { RESTServer, RESTClient };
|
||||||
|
export const probe = { ProbeServer, HealthProbeServer, Utils };
|
|
@ -1,12 +1,8 @@
|
||||||
'use strict'; // eslint-disable-line
|
import async from 'async';
|
||||||
/* eslint new-cap: "off" */
|
import errors from '../../errors';
|
||||||
|
import TTLVCodec from './codec/ttlv';
|
||||||
const async = require('async');
|
import TlsTransport from './transport/tls';
|
||||||
|
import KMIP from '.';
|
||||||
const errors = require('../../errors');
|
|
||||||
const TTLVCodec = require('./codec/ttlv.js');
|
|
||||||
const TlsTransport = require('./transport/tls.js');
|
|
||||||
const KMIP = require('.');
|
|
||||||
|
|
||||||
const CRYPTOGRAPHIC_OBJECT_TYPE = 'Symmetric Key';
|
const CRYPTOGRAPHIC_OBJECT_TYPE = 'Symmetric Key';
|
||||||
const CRYPTOGRAPHIC_ALGORITHM = 'AES';
|
const CRYPTOGRAPHIC_ALGORITHM = 'AES';
|
||||||
|
@ -241,8 +237,7 @@ function _queryOperationsAndObjects(client, logger, cb) {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export default class Client {
|
||||||
class Client {
|
|
||||||
/**
|
/**
|
||||||
* Construct a high level KMIP driver suitable for cloudserver
|
* Construct a high level KMIP driver suitable for cloudserver
|
||||||
* @param {Object} options - Instance options
|
* @param {Object} options - Instance options
|
||||||
|
@ -604,5 +599,3 @@ class Client {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = Client;
|
|
|
@ -1,6 +1,4 @@
|
||||||
'use strict'; // eslint-disable-line
|
import assert from 'assert';
|
||||||
|
|
||||||
const assert = require('assert');
|
|
||||||
|
|
||||||
|
|
||||||
function _lookup(decodedTTLV, path) {
|
function _lookup(decodedTTLV, path) {
|
||||||
|
@ -31,7 +29,7 @@ function _lookup(decodedTTLV, path) {
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
class Message {
|
export default class Message {
|
||||||
/**
|
/**
|
||||||
* Construct a new abstract Message
|
* Construct a new abstract Message
|
||||||
* @param {Object} content - the content of the message
|
* @param {Object} content - the content of the message
|
||||||
|
@ -50,5 +48,3 @@ class Message {
|
||||||
return _lookup(this.content, path);
|
return _lookup(this.content, path);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = Message;
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue