Compare commits

...

1 Commits

Author SHA1 Message Date
alexandremerle 6fe661f7ef wip [ci skip] 2016-11-03 14:08:42 +01:00
4 changed files with 60 additions and 41 deletions

View File

@ -22,9 +22,10 @@ const routeMap = {
const utapi = new UtapiClient(_config.utapi); const utapi = new UtapiClient(_config.utapi);
function checkUnsuportedRoutes(req, res, log) { function checkUnsuportedRoutes(req, res, log) {
if (req.query.policy !== undefined || const query = req.query;
req.query.cors !== undefined || if (query.policy !== undefined ||
req.query.tagging !== undefined) { query.cors !== undefined ||
query.tagging !== undefined) {
return routesUtils.responseXMLBody( return routesUtils.responseXMLBody(
errors.NotImplemented, null, res, log); errors.NotImplemented, null, res, log);
} }
@ -72,6 +73,16 @@ function checkIP(clientIP) {
_config.healthChecks.allowFrom, clientIP); _config.healthChecks.allowFrom, clientIP);
} }
function safeNormalize(req) {
try {
utils.normalizeRequest(req);
return true;
} catch (err) {
log.trace('could not normalize request', { error: err.stack || err });
return false;
}
}
export default function routes(req, res, logger) { export default function routes(req, res, logger) {
const clientInfo = { const clientInfo = {
clientIP: req.socket.remoteAddress, clientIP: req.socket.remoteAddress,
@ -91,30 +102,26 @@ export default function routes(req, res, logger) {
} }
return healthcheckRouteHandler(req, res, log); return healthcheckRouteHandler(req, res, log);
} }
if (!safeNormalize(req)) {
try { return routesUtils.responseXMLBody(errors.InvalidURI, undefined, res,
utils.normalizeRequest(req); log);
} catch (err) {
log.trace('could not normalize request', { error: err });
return routesUtils.responseXMLBody(
errors.InvalidURI, undefined, res, log);
} }
log.addDefaultFields({ log.addDefaultFields({
bucketName: req.bucketName, bucketName: req.bucketName,
objectKey: req.objectKey, objectKey: req.objectKey,
}); });
const bucketName = req.bucketName;
// if empty name and request not a list Buckets // if empty name and request not a list Buckets
if (!req.bucketName && if (!bucketName &&
!(req.method.toUpperCase() === 'GET' && !req.objectKey)) { !(req.method.toUpperCase() === 'GET' && !req.objectKey)) {
log.warn('empty bucket name', { method: 'routes' }); log.warn('empty bucket name', { method: 'routes' });
return routesUtils.responseXMLBody(errors.MethodNotAllowed, return routesUtils.responseXMLBody(errors.MethodNotAllowed,
undefined, res, log); undefined, res, log);
} }
if (req.bucketName !== undefined && if (bucketName !== undefined &&
utils.isValidBucketName(req.bucketName) === false) { utils.isValidBucketName(bucketName) === false) {
log.warn('invalid bucket name', { bucketName: req.bucketName }); log.warn('invalid bucket name', { bucketName });
return routesUtils.responseXMLBody(errors.InvalidBucketName, return routesUtils.responseXMLBody(errors.InvalidBucketName,
undefined, res, log); undefined, res, log);
} }

View File

@ -13,6 +13,7 @@ const encryptionHeaders = [
'x-amz-server-side-encryption-customer-key', 'x-amz-server-side-encryption-customer-key',
'x-amz-server-side-encryption-customer-key-md5', 'x-amz-server-side-encryption-customer-key-md5',
]; ];
const encryptionHeadersLen = encryptionHeaders.length;
const validStatuses = ['Enabled', 'Suspended']; const validStatuses = ['Enabled', 'Suspended'];
const validMfaDeletes = [undefined, 'Enabled', 'Disabled']; const validMfaDeletes = [undefined, 'Enabled', 'Disabled'];
@ -145,10 +146,13 @@ export default function routePUT(request, response, log, utapi) {
log); log);
} }
} }
const headers = request.headers;
// object level encryption // object level encryption
if (encryptionHeaders.some(i => request.headers[i] !== undefined)) { for (let i = 0; i < encryptionHeadersLen; i++) {
return routesUtils.responseXMLBody(errors.NotImplemented, null, if (headers[encryptionHeaders[i]] !== undefined) {
response, log); return routesUtils.responseXMLBody(errors.NotImplemented, null,
response, log);
}
} }
if (request.query.partNumber) { if (request.query.partNumber) {
if (request.headers['x-amz-copy-source']) { if (request.headers['x-amz-copy-source']) {

View File

@ -12,16 +12,21 @@ import data from '../data/wrapper';
function setCommonResponseHeaders(headers, response, log) { function setCommonResponseHeaders(headers, response, log) {
if (headers && typeof headers === 'object') { if (headers && typeof headers === 'object') {
log.trace('setting response headers', { headers }); log.trace('setting response headers', { headers });
Object.keys(headers).forEach(key => { const keys = Object.keys(headers);
if (headers[key] !== undefined) { const keysLen = keys.length;
response.setHeader(key, headers[key]); for (let i = 0; i < keysLen; i++) {
const key = keys[i];
const header = headers[key];
if (header !== undefined) {
response.setHeader(key, header);
} }
}); }
} }
const uids = log.getSerializedUids();
response.setHeader('server', 'AmazonS3'); response.setHeader('server', 'AmazonS3');
// to be expanded in further implementation of logging of requests // to be expanded in further implementation of logging of requests
response.setHeader('x-amz-id-2', log.getSerializedUids()); response.setHeader('x-amz-id-2', uids);
response.setHeader('x-amz-request-id', log.getSerializedUids()); response.setHeader('x-amz-request-id', uids);
return response; return response;
} }
/** /**
@ -33,9 +38,10 @@ function setCommonResponseHeaders(headers, response, log) {
* @return {object} response - response object with additional headers * @return {object} response - response object with additional headers
*/ */
function okHeaderResponse(headers, response, httpCode, log) { function okHeaderResponse(headers, response, httpCode, log) {
log.trace('sending success header response'); log.debug('sending success header response', {
httpCode,
});
setCommonResponseHeaders(headers, response, log); setCommonResponseHeaders(headers, response, log);
log.debug('response http code', { httpCode });
response.writeHead(httpCode); response.writeHead(httpCode);
return response.end(() => { return response.end(() => {
log.end().info('responded to request', { log.end().info('responded to request', {
@ -53,11 +59,12 @@ function okHeaderResponse(headers, response, httpCode, log) {
* @return {object} response - response object with additional headers * @return {object} response - response object with additional headers
*/ */
function okXMLResponse(xml, response, log, additionalHeaders) { function okXMLResponse(xml, response, log, additionalHeaders) {
log.trace('sending success xml response'); log.debug('sending success xml response', {
xml,
httpCode: 200,
});
setCommonResponseHeaders(additionalHeaders, response, log); setCommonResponseHeaders(additionalHeaders, response, log);
response.writeHead(200, { 'Content-type': 'application/xml' }); response.writeHead(200, { 'Content-type': 'application/xml' });
log.debug('response http code', { httpCode: 200 });
log.trace('xml response', { xml });
return response.end(xml, 'utf8', () => { return response.end(xml, 'utf8', () => {
log.end().info('responded with XML', { log.end().info('responded with XML', {
httpCode: response.statusCode, httpCode: response.statusCode,
@ -76,8 +83,7 @@ function errorXMLResponse(errCode, response, log) {
<RequestId>4442587FB7D0A2F9</RequestId> <RequestId>4442587FB7D0A2F9</RequestId>
</Error> </Error>
*/ */
const xml = []; const xml = [
xml.push(
'<?xml version="1.0" encoding="UTF-8"?>', '<?xml version="1.0" encoding="UTF-8"?>',
'<Error>', '<Error>',
`<Code>${errCode.message}</Code>`, `<Code>${errCode.message}</Code>`,
@ -85,7 +91,7 @@ function errorXMLResponse(errCode, response, log) {
'<Resource></Resource>', '<Resource></Resource>',
`<RequestId>${log.getSerializedUids()}</RequestId>`, `<RequestId>${log.getSerializedUids()}</RequestId>`,
'</Error>' '</Error>'
); ];
setCommonResponseHeaders(null, response, log); setCommonResponseHeaders(null, response, log);
response.writeHead(errCode.code, { 'Content-type': 'application/xml' }); response.writeHead(errCode.code, { 'Content-type': 'application/xml' });
return response.end(xml.join(''), 'utf8', () => { return response.end(xml.join(''), 'utf8', () => {
@ -238,38 +244,40 @@ const routesUtils = {
// Range is inclusive of endpoint so need plus 1 // Range is inclusive of endpoint so need plus 1
const max = end - begin + 1; const max = end - begin + 1;
let total = 0; let total = 0;
for (let i = 0; i < dataLocations.length; i++) { const dataLocationsLen = dataLocations.length;
for (let i = 0; i < dataLocationsLen; i++) {
if (total >= max) { if (total >= max) {
break; break;
} }
const partStart = parseInt(dataLocations[i].start, 10); const current = dataLocations[i];
const partSize = parseInt(dataLocations[i].size, 10); const partStart = parseInt(current.start, 10);
const partSize = parseInt(current.size, 10);
if (partStart + partSize < begin) { if (partStart + partSize < begin) {
continue; continue;
} }
if (partStart >= begin) { if (partStart >= begin) {
// If the whole part is in the range, just include it // If the whole part is in the range, just include it
if (partSize + total <= max) { if (partSize + total <= max) {
const partWithoutRange = dataLocations[i]; const partWithoutRange = current;
partWithoutRange.size = partSize.toString(); partWithoutRange.size = partSize.toString();
parsedLocations.push(partWithoutRange); parsedLocations.push(partWithoutRange);
total += partSize; total += partSize;
// Otherwise set a range limit on the part end // Otherwise set a range limit on the part end
// and we're done // and we're done
} else { } else {
const partWithRange = dataLocations[i]; const partWithRange = current;
// Need to subtract one from endPart since range // Need to subtract one from endPart since range
// includes endPart in byte count // includes endPart in byte count
const endPart = Math.min(partSize - 1, max - total - 1); const endPart = Math.min(partSize - 1, max - total - 1);
partWithRange.range = [0, endPart]; partWithRange.range = [0, endPart];
// modify size to be stored for object put part copy // modify size to be stored for object put part copy
partWithRange.size = (endPart + 1).toString(); partWithRange.size = (endPart + 1).toString();
parsedLocations.push(dataLocations[i]); parsedLocations.push(current);
break; break;
} }
} else { } else {
// Offset start (and end if necessary) // Offset start (and end if necessary)
const partWithRange = dataLocations[i]; const partWithRange = current;
const startOffset = begin - partStart; const startOffset = begin - partStart;
// Use full remaining part if remaining partSize is less // Use full remaining part if remaining partSize is less
// than byte range we need to satisfy. Or use byte range // than byte range we need to satisfy. Or use byte range

View File

@ -48,7 +48,7 @@ class S3Server {
rejectUnauthorized: true, rejectUnauthorized: true,
}, (req, res) => { }, (req, res) => {
// disable nagle algorithm // disable nagle algorithm
req.socket.setNoDelay(); req.socket.setNoDelay(true);
routes(req, res, logger); routes(req, res, logger);
}); });
logger.info('Https server configuration', { logger.info('Https server configuration', {
@ -57,7 +57,7 @@ class S3Server {
} else { } else {
this.server = http.createServer((req, res) => { this.server = http.createServer((req, res) => {
// disable nagle algorithm // disable nagle algorithm
req.socket.setNoDelay(); req.socket.setNoDelay(true);
routes(req, res, logger); routes(req, res, logger);
}); });
logger.info('Http server configuration', { logger.info('Http server configuration', {