Compare commits

...

8 Commits

Author SHA1 Message Date
Thomas Carmet 2f124d03f7 S3C-2582 Setting up maven repo on https instead of http
(cherry picked from commit 46a304d35d)
2020-02-25 16:30:40 -08:00
Stephane-Scality 1b7aa4f7c3
Merge pull request #2334 from scality/hotfix/S3C-2503-allow-same-email-acl
bugfix: ZENKO-2503 allow ACLs with same email in single request
2020-02-05 19:27:41 +01:00
Jianqin Wang 6a74b03d4d bugfix: ZENKO-2503 allow ACLs with same email in single request
(cherry picked from commit 8d26509382)
2020-02-04 11:31:21 -08:00
Stephane-Scality 951a245e04
Merge pull request #2292 from scality/hotfix/S3C-2504-reworkChunkedUploadStreamHandling
BACKPORT 7.4.5.1 bugfix: S3C-2504 rework chunked upload stream handling
2020-01-09 14:05:45 +01:00
Jonathan Gramain bb23e658b1 bugfix: S3C-2504 cleanup test bucket
Delete bucket after streaming V4 test to avoid side-effects with other
tests (observed on 8.1+ branches with mongodb
reportHandler::countItems test).
2019-12-27 17:03:28 -08:00
Jonathan Gramain 77a0601dd2 bugfix: S3C-2504 revert changes in UTAPI dependencies
Revert changes introduced to UTAPI dependencies and to "joi"
dependency in yarn.lock in PR #2247.

Those changes are suspicious and not needed, so safer to revert them.
2019-12-27 17:03:28 -08:00
Jonathan Gramain fa1885b128 bugfix: S3C-2504 rework chunked upload stream handling
- make sure the V4Transform._transform() callback is always called
  even in case of error. When there is a client error (like a chunked
  stream signature mismatch), continue consuming the data but do not
  send it to sproxyd. Do not emit an 'error' event as this confuses
  the HTTP stack and retains bad client connections,

- instead of an 'error' event, emit a custom 'clientError' event on
  V4Transform in case of client error to allow a cleanup of sproxyd
  connections,

- depend on a fix of sproxydclient that aborts sproxyd requests when
  the input stream is closed.
2019-12-27 17:02:52 -08:00
Jonathan Gramain 6af6f8287e bugfix: S3C-2517 fix crash with invalid chunked-upload
A Cloudserver worker can crash if a client provides the
"x-amz-content-sha256" header with a value of
"STREAMING-AWS4-HMAC-SHA256-PAYLOAD" to use chunked upload, but
provides a valid AWS V2 signature (chunked-upload requires a V4
signature).

This fixes to instead respond with a 400 Bad Request.
2019-12-10 12:24:48 -08:00
14 changed files with 536 additions and 27 deletions

View File

@ -7,13 +7,22 @@ const V4Transform = require('../../../auth/streamingV4/V4Transform');
* accessKey, signatureFromRequest, region, scopeDate, timestamp, and
* credentialScope (to be used for streaming v4 auth if applicable)
* @param {RequestLogger} log - the current request logger
* @param {function} cb - callback containing the result for V4Transform
* @return {object} - V4Transform object if v4 Auth request, or else the stream
* @param {function} errCb - callback called if an error occurs
* @return {object|null} - V4Transform object if v4 Auth request, or
* the original stream, or null if the request has no V4 params but
* the type of request requires them
*/
function prepareStream(stream, streamingV4Params, log, cb) {
function prepareStream(stream, streamingV4Params, log, errCb) {
if (stream.headers['x-amz-content-sha256'] ===
'STREAMING-AWS4-HMAC-SHA256-PAYLOAD') {
const v4Transform = new V4Transform(streamingV4Params, log, cb);
if (typeof streamingV4Params !== 'object') {
// this might happen if the user provided a valid V2
// Authentication header, while the chunked upload method
// requires V4: in such case we don't get any V4 params
// and we should return an error to the client.
return null;
}
const v4Transform = new V4Transform(streamingV4Params, log, errCb);
stream.pipe(v4Transform);
return v4Transform;
}

View File

@ -1,4 +1,4 @@
const { errors } = require('arsenal');
const { errors, jsutil } = require('arsenal');
const data = require('../../../data/wrapper');
const { prepareStream } = require('./prepareStream');
@ -57,26 +57,31 @@ function checkHashMatchMD5(stream, hashedStream, dataRetrievalInfo, log, cb) {
*/
function dataStore(objectContext, cipherBundle, stream, size,
streamingV4Params, backendInfo, log, cb) {
const dataStream = prepareStream(stream, streamingV4Params, log, cb);
data.put(cipherBundle, dataStream, size, objectContext, backendInfo, log,
const cbOnce = jsutil.once(cb);
const dataStream = prepareStream(stream, streamingV4Params, log, cbOnce);
if (!dataStream) {
return process.nextTick(() => cb(errors.InvalidArgument));
}
return data.put(
cipherBundle, dataStream, size, objectContext, backendInfo, log,
(err, dataRetrievalInfo, hashedStream) => {
if (err) {
log.error('error in datastore', {
error: err,
});
return cb(err);
return cbOnce(err);
}
if (!dataRetrievalInfo) {
log.fatal('data put returned neither an error nor a key', {
method: 'storeObject::dataStore',
});
return cb(errors.InternalError);
return cbOnce(errors.InternalError);
}
log.trace('dataStore: backend stored key', {
dataRetrievalInfo,
});
return checkHashMatchMD5(stream, hashedStream,
dataRetrievalInfo, log, cb);
dataRetrievalInfo, log, cbOnce);
});
}

View File

@ -25,14 +25,14 @@ class V4Transform extends Transform {
* header plus the string 'aws4_request' joined with '/':
* timestamp/region/aws-service/aws4_request
* @param {object} log - logger object
* @param {function} cb - callback to api
* @param {function} errCb - callback called if an error occurs
*/
constructor(streamingV4Params, log, cb) {
constructor(streamingV4Params, log, errCb) {
const { accessKey, signatureFromRequest, region, scopeDate, timestamp,
credentialScope } = streamingV4Params;
super({});
this.log = log;
this.cb = cb;
this.errCb = errCb;
this.accessKey = accessKey;
this.region = region;
this.scopeDate = scopeDate;
@ -50,6 +50,7 @@ class V4Transform extends Transform {
this.currentMetadata = [];
this.lastPieceDone = false;
this.lastChunk = false;
this.clientError = false;
}
/**
@ -192,6 +193,10 @@ class V4Transform extends Transform {
// signature + \r\n + chunk-data + \r\n
// Last transfer-encoding chunk will have size 0 and no chunk-data.
// if there was an error earlier, ignore the remaining data
if (this.clientError) {
return callback();
}
if (this.lastPieceDone) {
const slice = chunk.slice(0, 10);
this.log.trace('received chunk after end.' +
@ -268,7 +273,13 @@ class V4Transform extends Transform {
// final callback
err => {
if (err) {
return this.cb(err);
this.clientError = true;
// Emit the 'clientError' event to notify
// listeners that the client did not provide a
// valid stream of content, e.g. due to a wrong
// signature.
this.emit('clientError');
this.errCb(err);
}
// get next chunk
return callback();

View File

@ -102,6 +102,10 @@ function _put(cipherBundle, value, valueSize,
if (value) {
hashedStream = new MD5Sum();
value.pipe(hashedStream);
value.once('clientError', () => {
log.trace('destroying hashed stream');
hashedStream.destroy();
});
}
if (implName === 'multipleBackends') {

View File

@ -193,14 +193,17 @@ aclUtils.isValidCanonicalId = function isValidCanonicalId(canonicalID) {
aclUtils.reconstructUsersIdentifiedByEmail =
function reconstruct(userInfofromVault, userGrantInfo) {
return userInfofromVault.map(item => {
const userEmail = item.email.toLowerCase();
return userGrantInfo.map(item => {
const userEmail = item.identifier.toLowerCase();
const user = {};
// Find the full user grant info based on email
const user = userGrantInfo
.find(elem => elem.identifier.toLowerCase() === userEmail);
const userId = userInfofromVault
.find(elem => elem.email.toLowerCase() === userEmail);
// Set the identifier to be the canonicalID instead of email
user.identifier = item.canonicalID;
user.identifier = userId.canonicalID;
user.userIDType = 'id';
// copy over ACL grant type: i.e. READ/WRITE...
user.grantType = item.grantType;
return user;
});
};

View File

@ -32,7 +32,7 @@
"mongodb": "^2.2.31",
"node-uuid": "^1.4.3",
"npm-run-all": "~4.1.5",
"sproxydclient": "scality/sproxydclient#0e17e27",
"sproxydclient": "scality/sproxydclient#5dc4903",
"utapi": "scality/utapi#b522b3d",
"utf8": "~2.1.1",
"uuid": "^3.0.1",

View File

@ -80,6 +80,25 @@ describe('PUT Bucket ACL', () => {
});
});
it('should set multiple ACL permissions with same grantee specified' +
'using email', done => {
s3.putBucketAcl({
Bucket: bucketName,
GrantRead: 'emailAddress=sampleaccount1@sampling.com',
GrantWrite: 'emailAddress=sampleaccount1@sampling.com',
}, err => {
assert(!err);
s3.getBucketAcl({
Bucket: bucketName,
}, (err, res) => {
assert(!err);
// expect both READ and WRITE grants to exist
assert.strictEqual(res.Grants.length, 2);
return done();
});
});
});
it('should return InvalidArgument if invalid grantee ' +
'user ID provided in ACL header request', done => {
s3.putBucketAcl({

View File

@ -7,7 +7,7 @@
<id>central</id>
<name>Maven Repository Switchboard</name>
<layout>default</layout>
<url>http://repo1.maven.org/maven2</url>
<url>https://repo1.maven.org/maven2</url>
<snapshots>
<enabled>false</enabled>
</snapshots>
@ -18,7 +18,7 @@
<pluginRepository>
<id>central</id>
<name>Maven Plugin Repository</name>
<url>http://repo1.maven.org/maven2</url>
<url>https://repo1.maven.org/maven2</url>
<layout>default</layout>
<snapshots>
<enabled>false</enabled>

View File

@ -0,0 +1,125 @@
const http = require('http');
const async = require('async');
const assert = require('assert');
const BucketUtility =
require('../../aws-node-sdk/lib/utility/bucket-util');
const HttpRequestAuthV4 = require('../utils/HttpRequestAuthV4');
const config = require('../../config.json');
const DUMMY_SIGNATURE =
'baadc0debaadc0debaadc0debaadc0debaadc0debaadc0debaadc0debaadc0de';
http.globalAgent.keepAlive = true;
const PORT = 8000;
const BUCKET = 'bad-chunk-signature-v4';
const N_PUTS = 100;
const N_DATA_CHUNKS = 20;
const DATA_CHUNK_SIZE = 128 * 1024;
const ALTER_CHUNK_SIGNATURE = true;
const CHUNK_DATA = Buffer.alloc(DATA_CHUNK_SIZE).fill('0').toString();
function createBucket(bucketUtil, cb) {
const createBucket = async.asyncify(bucketUtil.createOne.bind(bucketUtil));
createBucket(BUCKET, cb);
}
function cleanupBucket(bucketUtil, cb) {
const emptyBucket = async.asyncify(bucketUtil.empty.bind(bucketUtil));
const deleteBucket = async.asyncify(bucketUtil.deleteOne.bind(bucketUtil));
async.series([
done => emptyBucket(BUCKET, done),
done => deleteBucket(BUCKET, done),
], cb);
}
class HttpChunkedUploadWithBadSignature extends HttpRequestAuthV4 {
constructor(url, params, callback) {
super(url, params, callback);
this._chunkId = 0;
this._alterSignatureChunkId = params.alterSignatureChunkId;
}
getChunkSignature(chunkData) {
let signature;
if (this._chunkId === this._alterSignatureChunkId) {
// console.log(
// `ALTERING SIGNATURE OF DATA CHUNK #${this._chunkId}`);
signature = DUMMY_SIGNATURE;
} else {
signature = super.getChunkSignature(chunkData);
}
++this._chunkId;
return signature;
}
}
function testChunkedPutWithBadSignature(n, alterSignatureChunkId, cb) {
const req = new HttpChunkedUploadWithBadSignature(
`http://${config.ipAddress}:${PORT}/${BUCKET}/obj-${n}`, {
accessKey: config.accessKey,
secretKey: config.secretKey,
method: 'PUT',
headers: {
'content-length': N_DATA_CHUNKS * DATA_CHUNK_SIZE,
'connection': 'keep-alive',
},
alterSignatureChunkId,
}, res => {
if (alterSignatureChunkId >= 0 &&
alterSignatureChunkId <= N_DATA_CHUNKS) {
assert.strictEqual(res.statusCode, 403);
} else {
assert.strictEqual(res.statusCode, 200);
}
res.on('data', () => {});
res.on('end', cb);
});
req.on('error', err => {
assert.ifError(err);
});
async.timesSeries(N_DATA_CHUNKS, (chunkIndex, done) => {
// console.log(`SENDING NEXT CHUNK OF LENGTH ${CHUNK_DATA.length}`);
if (req.write(CHUNK_DATA)) {
process.nextTick(done);
} else {
req.once('drain', done);
}
}, () => {
req.end();
});
}
describe('streaming V4 signature with bad chunk signature', () => {
const bucketUtil = new BucketUtility('default', {});
before(done => createBucket(bucketUtil, done));
after(done => cleanupBucket(bucketUtil, done));
it('Cloudserver should be robust against bad signature in streaming ' +
'payload', function badSignatureInStreamingPayload(cb) {
this.timeout(120000);
async.timesLimit(N_PUTS, 10, (n, done) => {
// multiple test cases depend on the value of
// alterSignatureChunkId:
// alterSignatureChunkId >= 0 &&
// alterSignatureChunkId < N_DATA_CHUNKS
// <=> alter the signature of the target data chunk
// alterSignatureChunkId == N_DATA_CHUNKS
// <=> alter the signature of the last empty chunk that
// carries the last payload signature
// alterSignatureChunkId > N_DATA_CHUNKS
// <=> no signature is altered (regular test case)
// By making n go from 0 to nDatachunks+1, we cover all
// above cases.
const alterSignatureChunkId = ALTER_CHUNK_SIGNATURE ?
(n % (N_DATA_CHUNKS + 2)) : null;
testChunkedPutWithBadSignature(n, alterSignatureChunkId, done);
}, err => cb(err));
});
});

View File

@ -0,0 +1,255 @@
const crypto = require('crypto');
const http = require('http');
const stream = require('stream');
const url = require('url');
const SERVICE = 's3';
const REGION = 'us-east-1';
const EMPTY_STRING_HASH = crypto.createHash('sha256').digest('hex');
/**
* Execute and sign HTTP requests with AWS signature v4 scheme
*
* The purpose of this class is primarily testing, where the various
* functions used to generate the signing content can be overriden for
* specific test needs, like altering signatures or hashes.
*
* It provides a writable stream interface like the request object
* returned by http.request().
*/
class HttpRequestAuthV4 extends stream.Writable {
/**
* @constructor
* @param {string} url - HTTP URL to the S3 server
* @param {object} params - request parameters
* @param {string} params.accessKey - AWS access key
* @param {string} params.secretKey - AWS secret key
* @param {string} [params.method="GET"] - HTTP method
* @param {object} [params.headers] - HTTP request headers
* example: {
* 'connection': 'keep-alive',
* 'content-length': 1000, // mandatory for PUT object requests
* 'x-amz-content-sha256': '...' // streaming V4 encoding is used
* // if not provided
* }
* @param {function} callback - called when a response arrives:
* callback(res) (see http.request())
*/
constructor(url, params, callback) {
super();
this._url = url;
this._accessKey = params.accessKey;
this._secretKey = params.secretKey;
this._httpParams = params;
this._callback = callback;
this._httpRequest = null;
this._timestamp = null;
this._signingKey = null;
this._chunkedUpload = false;
this._lastSignature = null;
this.once('finish', () => {
if (!this._httpRequest) {
this._initiateRequest(false);
}
if (this._chunkedUpload) {
this._httpRequest.end(this.constructChunkPayload(''));
} else {
this._httpRequest.end();
}
});
}
getCredentialScope() {
const signingDate = this._timestamp.slice(0, 8);
const credentialScope =
`${signingDate}/${REGION}/${SERVICE}/aws4_request`;
// console.log(`CREDENTIAL SCOPE: "${credentialScope}"`);
return credentialScope;
}
getSigningKey() {
const signingDate = this._timestamp.slice(0, 8);
const dateKey = crypto.createHmac('sha256', `AWS4${this._secretKey}`)
.update(signingDate, 'binary').digest();
const dateRegionKey = crypto.createHmac('sha256', dateKey)
.update(REGION, 'binary').digest();
const dateRegionServiceKey = crypto.createHmac('sha256', dateRegionKey)
.update(SERVICE, 'binary').digest();
this._signingKey = crypto.createHmac('sha256', dateRegionServiceKey)
.update('aws4_request', 'binary').digest();
}
createSignature(stringToSign) {
if (!this._signingKey) {
this.getSigningKey();
}
return crypto.createHmac('sha256', this._signingKey)
.update(stringToSign).digest('hex');
}
getCanonicalRequest(urlObj, signedHeaders) {
const method = this._httpParams.method || 'GET';
const signedHeadersList = Object.keys(signedHeaders).sort();
const qsParams = [];
urlObj.searchParams.forEach((value, key) => {
qsParams.push({ key, value });
});
const canonicalQueryString =
qsParams
.sort((a, b) => {
if (a.key !== b.key) {
return a.key < b.key ? -1 : 1;
}
return a.value < b.value ? -1 : 1;
})
.map(param => `${encodeURI(param.key)}=${encodeURI(param.value)}`)
.join('&');
const canonicalSignedHeaders = signedHeadersList
.map(header => `${header}:${signedHeaders[header]}\n`)
.join('');
const canonicalRequest = [
method,
urlObj.pathname,
canonicalQueryString,
canonicalSignedHeaders,
signedHeadersList.join(';'),
signedHeaders['x-amz-content-sha256'],
].join('\n');
// console.log(`CANONICAL REQUEST: "${canonicalRequest}"`);
return canonicalRequest;
}
constructRequestStringToSign(canonicalReq) {
const canonicalReqHash =
crypto.createHash('sha256').update(canonicalReq).digest('hex');
const stringToSign = `AWS4-HMAC-SHA256\n${this._timestamp}\n` +
`${this.getCredentialScope()}\n${canonicalReqHash}`;
// console.log(`STRING TO SIGN: "${stringToSign}"`);
return stringToSign;
}
getAuthorizationSignature(urlObj, signedHeaders) {
const canonicalRequest =
this.getCanonicalRequest(urlObj, signedHeaders);
this._lastSignature = this.createSignature(
this.constructRequestStringToSign(canonicalRequest));
return this._lastSignature;
}
getAuthorizationHeader(urlObj, signedHeaders) {
const authorizationSignature =
this.getAuthorizationSignature(urlObj, signedHeaders);
const signedHeadersList = Object.keys(signedHeaders).sort();
return ['AWS4-HMAC-SHA256',
`Credential=${this._accessKey}/${this.getCredentialScope()},`,
`SignedHeaders=${signedHeadersList.join(';')},`,
`Signature=${authorizationSignature}`,
].join(' ');
}
constructChunkStringToSign(chunkData) {
const currentChunkHash =
crypto.createHash('sha256').update(chunkData.toString())
.digest('hex');
const stringToSign = `AWS4-HMAC-SHA256-PAYLOAD\n${this._timestamp}\n` +
`${this.getCredentialScope()}\n${this._lastSignature}\n` +
`${EMPTY_STRING_HASH}\n${currentChunkHash}`;
// console.log(`CHUNK STRING TO SIGN: "${stringToSign}"`);
return stringToSign;
}
getChunkSignature(chunkData) {
const stringToSign = this.constructChunkStringToSign(chunkData);
this._lastSignature = this.createSignature(stringToSign);
return this._lastSignature;
}
constructChunkPayload(chunkData) {
if (!this._chunkedUpload) {
return chunkData;
}
const chunkSignature = this.getChunkSignature(chunkData);
return [chunkData.length.toString(16),
';chunk-signature=',
chunkSignature,
'\r\n',
chunkData,
'\r\n',
].join('');
}
_constructRequest(hasDataToSend) {
const dateObj = new Date();
const isoDate = dateObj.toISOString();
this._timestamp = [
isoDate.slice(0, 4),
isoDate.slice(5, 7),
isoDate.slice(8, 13),
isoDate.slice(14, 16),
isoDate.slice(17, 19),
'Z',
].join('');
const urlObj = new url.URL(this._url);
const signedHeaders = {
'host': urlObj.host,
'x-amz-date': this._timestamp,
};
const httpHeaders = Object.assign({}, this._httpParams.headers);
let contentLengthHeader;
Object.keys(httpHeaders).forEach(header => {
const lowerHeader = header.toLowerCase();
if (lowerHeader === 'content-length') {
contentLengthHeader = header;
}
if (!['connection',
'transfer-encoding'].includes(lowerHeader)) {
signedHeaders[lowerHeader] = httpHeaders[header];
}
});
if (!signedHeaders['x-amz-content-sha256']) {
if (hasDataToSend) {
signedHeaders['x-amz-content-sha256'] =
'STREAMING-AWS4-HMAC-SHA256-PAYLOAD';
signedHeaders['content-encoding'] = 'aws-chunked';
this._chunkedUpload = true;
if (contentLengthHeader !== undefined) {
signedHeaders['x-amz-decoded-content-length'] =
httpHeaders[contentLengthHeader];
delete signedHeaders['content-length'];
delete httpHeaders[contentLengthHeader];
httpHeaders['transfer-encoding'] = 'chunked';
}
} else {
signedHeaders['x-amz-content-sha256'] = EMPTY_STRING_HASH;
}
}
httpHeaders.Authorization =
this.getAuthorizationHeader(urlObj, signedHeaders);
return Object.assign(httpHeaders, signedHeaders);
}
_initiateRequest(hasDataToSend) {
const httpParams = Object.assign({}, this._httpParams);
httpParams.headers = this._constructRequest(hasDataToSend);
this._httpRequest = http.request(this._url, httpParams, this._callback);
}
_write(chunk, encoding, callback) {
if (!this._httpRequest) {
this._initiateRequest(true);
}
const payload = this.constructChunkPayload(chunk);
if (this._httpRequest.write(payload)) {
return callback();
}
return this._httpRequest.once('drain', callback);
}
}
module.exports = HttpRequestAuthV4;

View File

@ -478,6 +478,23 @@ describe('s3curl putObject', () => {
});
});
it('should not be able to put an object if using streaming ' +
'chunked-upload with a valid V2 signature',
done => {
provideRawOutput([
'--debug',
`--put=${upload}`,
'--',
'-H',
'x-amz-content-sha256: STREAMING-AWS4-HMAC-SHA256-PAYLOAD',
`${endpoint}/${bucket}/${prefix}${delimiter}${upload}1`,
'-v'],
(httpCode, rawOutput) => {
assert.strictEqual(httpCode, '400 BAD REQUEST');
assertError(rawOutput.stdout, 'InvalidArgument', done);
});
});
it('should not be able to put an object in a bucket with an invalid name',
done => {
provideRawOutput([

View File

@ -203,6 +203,50 @@ describe('putBucketACL API', () => {
});
});
it('should set all ACLs sharing the same email in request headers',
done => {
const testACLRequest = {
bucketName,
namespace,
headers: {
'host': `${bucketName}.s3.amazonaws.com`,
'x-amz-grant-full-control':
'emailaddress="sampleaccount1@sampling.com"' +
',emailaddress="sampleaccount2@sampling.com"',
'x-amz-grant-read':
'emailaddress="sampleaccount1@sampling.com"',
'x-amz-grant-write':
'emailaddress="sampleaccount1@sampling.com"',
'x-amz-grant-read-acp':
'id=79a59df900b949e55d96a1e698fbacedfd6e09d98eac' +
'f8f8d5218e7cd47ef2be',
'x-amz-grant-write-acp':
'id=79a59df900b949e55d96a1e698fbacedfd6e09d98eac' +
'f8f8d5218e7cd47ef2bf',
},
url: '/?acl',
query: { acl: '' },
};
bucketPutACL(authInfo, testACLRequest, log, err => {
assert.strictEqual(err, undefined);
metadata.getBucket(bucketName, log, (err, md) => {
assert(md.getAcl().WRITE.indexOf(canonicalIDforSample1)
> -1);
assert(md.getAcl().READ.indexOf(canonicalIDforSample1)
> -1);
assert(md.getAcl().FULL_CONTROL
.indexOf(canonicalIDforSample1) > -1);
assert(md.getAcl().FULL_CONTROL
.indexOf(canonicalIDforSample2) > -1);
assert(md.getAcl().READ_ACP
.indexOf(canonicalIDforSample1) > -1);
assert(md.getAcl().WRITE_ACP
.indexOf(canonicalIDforSample2) > -1);
done();
});
});
});
it('should return an error if invalid grantee user ID ' +
'provided in ACL header request', done => {
// Canonical ID should be a 64-digit hex string

View File

@ -49,13 +49,30 @@ describe('V4Transform class', () => {
});
});
it('should ignore data sent after final chunk', done => {
it('should raise an error if signature is wrong', done => {
const v4Transform = new V4Transform(streamingV4Params, log, err => {
assert.strictEqual(err, null);
assert(err);
done();
});
const filler1 = '8;chunk-signature=51d2511f7c6887907dff20474d8db6' +
'7d557e5f515a6fa6a8466bb12f8833bcca\r\ncontents\r\n';
const filler2 = '0;chunk-signature=baadc0debaadc0debaadc0debaadc0de' +
'baadc0debaadc0debaadc0debaadc0de\r\n';
const chunks = [
Buffer.from(filler1),
Buffer.from(filler2),
null,
];
const authMe = new AuthMe(chunks);
authMe.pipe(v4Transform);
});
it('should ignore data sent after final chunk', done => {
const v4Transform = new V4Transform(streamingV4Params, log, () => {
assert(false);
});
const filler1 = '8;chunk-signature=51d2511f7c6887907dff20474d8db6' +
'7d557e5f515a6fa6a8466bb12f8833bcca\r\ncontents\r\n';
const filler2 = '0;chunk-signature=c0eac24b7ce72141ec077df9753db' +
'4cc8b7991491806689da0395c8bd0231e48\r\n';
const filler3 = '\r\n';

View File

@ -3387,9 +3387,9 @@ sprintf-js@~1.0.2:
resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c"
integrity sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=
sproxydclient@scality/sproxydclient#0e17e27:
version "7.4.1"
resolved "https://codeload.github.com/scality/sproxydclient/tar.gz/0e17e27b35971aab4bc9a6ce40f7eab2f054314e"
sproxydclient@scality/sproxydclient#5dc4903:
version "7.4.6"
resolved "https://codeload.github.com/scality/sproxydclient/tar.gz/5dc4903d3766891d8ec9aa0eb2568c84a93340e9"
dependencies:
async "^3.1.0"
werelogs scality/werelogs#4e0d97c