Compare commits

...

1 Commits

Author SHA1 Message Date
bbuchanan9 72d1d258cc bugfix: S3C-2052 Delete orphaned data 2019-08-13 18:56:03 -07:00
7 changed files with 183 additions and 4 deletions

View File

@ -0,0 +1,16 @@
const data = require('../../../data/wrapper');
function dataDelete(objectGetInfo, log, cb) {
data.delete(objectGetInfo, log, err => {
if (err) {
log.error('error deleting object data', {
error: err,
method: 'dataDelete',
});
return cb(err);
}
return cb();
});
}
module.exports = { dataDelete };

View File

@ -7,6 +7,7 @@ const { BackendInfo } = require('./apiUtils/object/BackendInfo');
const constants = require('../../constants');
const data = require('../data/wrapper');
const { dataStore } = require('./apiUtils/object/storeObject');
const { dataDelete } = require('./apiUtils/object/deleteObject');
const { isBucketAuthorized } = require('./apiUtils/authorization/aclChecks');
const kms = require('../kms/wrapper');
const metadata = require('../metadata/wrapper');
@ -322,7 +323,12 @@ function objectPutPart(authInfo, request, streamingV4Params, log,
error: err,
method: 'objectPutPart::metadata.putObjectMD',
});
return next(err, destinationBucket);
// Make a best effort single attempt to cleanup the
// orphaned data while returning the original error from
// the metadata layer.
return dataDelete(dataGetInfo, log, () => {
next(err, destinationBucket);
});
}
return next(null, oldLocations, objectLocationConstraint,
destinationBucket, hexDigest, prevObjectSize);

View File

@ -18,6 +18,8 @@ function resetCount() {
}
const backend = {
errors: {}, // Used for simulation of data errors.
put: function putMem(request, size, keyContext, reqUids, callback) {
const log = createLogger(reqUids);
const value = Buffer.alloc(size);
@ -78,8 +80,11 @@ const backend = {
},
delete: function delMem(objectGetInfo, reqUids, callback) {
if (backend.errors.delete) {
return process.nextTick(() => callback(backend.errors.delete));
}
const key = objectGetInfo.key ? objectGetInfo.key : objectGetInfo;
process.nextTick(() => {
return process.nextTick(() => {
delete ds[key];
return callback(null);
});

View File

@ -23,6 +23,8 @@ function inc(str) {
}
const metastore = {
errors: {}, // Used for simulation of metadata errors.
createBucket: (bucketName, bucketMD, log, cb) => {
process.nextTick(() => {
metastore.getBucketAttributes(bucketName, log, (err, bucket) => {
@ -78,7 +80,10 @@ const metastore = {
},
putObject: (bucketName, objName, objVal, params, log, cb) => {
process.nextTick(() => {
if (metastore.errors.putObject) {
return process.nextTick(() => cb(metastore.errors.putObject));
}
return process.nextTick(() => {
metastore.getBucketAttributes(bucketName, log, err => {
if (err) {
return cb(err);

View File

@ -30,7 +30,7 @@ const log = new DummyRequestLogger();
const canonicalID = 'accessKey1';
const authInfo = makeAuthInfo(canonicalID);
const namespace = 'default';
const bucketName = `bucketname-${Date.now}`;
const bucketName = `bucketname-${Date.now()}`;
const body1 = Buffer.from('I am a body', 'utf8');
const body2 = Buffer.from('I am a body with a different ETag', 'utf8');

View File

@ -0,0 +1,42 @@
const assert = require('assert');
const { Logger } = require('werelogs');
const { errors } = require('arsenal');
const helpers = require('../../helpers');
const { ds, backend } = require('../../../../lib/data/in_memory/backend');
const { dataDelete } =
require('../../../../lib/api/apiUtils/object/deleteObject');
const log = new Logger('_').newRequestLogger();
describe('dataDelete utility', () => {
const key = 1;
const value = Buffer.from('_');
beforeEach(() => helpers.cleanup());
describe('success case', () => {
beforeEach(done => {
ds[key] = { value };
dataDelete({ key }, log, done);
});
it('should delete the key', () => {
assert.strictEqual(ds[key], undefined);
});
});
describe('error case', () => {
beforeEach(done => {
ds[key] = { value };
backend.errors.delete = errors.InternalError;
dataDelete({ key }, log, err => {
delete backend.errors.delete;
assert.deepStrictEqual(err, errors.InternalError);
done();
});
});
it('should not delete the key', () => {
assert.deepStrictEqual(ds[key], { value });
});
});
});

View File

@ -0,0 +1,105 @@
const crypto = require('crypto');
const assert = require('assert');
const async = require('async');
const { errors } = require('arsenal');
const { parseString } = require('xml2js');
const helpers = require('../helpers');
const DummyRequest = require('../DummyRequest');
const bucketPut = require('../../../lib/api/bucketPut').bucketPut;
const initiateMultipartUpload =
require('../../../lib/api/initiateMultipartUpload');
const objectPutPart = require('../../../lib/api/objectPutPart');
const { ds } = require('../../../lib/data/in_memory/backend');
const metastore = require('../../../lib/metadata/in_memory/backend');
function createBucket(authInfo, log, cb) {
const request = {
namespace: 'default',
bucketName: 'bucketname',
url: '/',
headers: {
host: 'localhost',
},
post:
'<CreateBucketConfiguration>' +
'<LocationConstraint>scality-internal-mem</LocationConstraint>' +
'</CreateBucketConfiguration>',
};
bucketPut(authInfo, request, log, cb);
}
function initiateMPU(authInfo, log, cb) {
const request = {
namespace: 'default',
bucketName: 'bucketname',
objectKey: 'objectKey',
url: '/objectKey?uploads',
headers: {
host: 'localhost',
},
};
initiateMultipartUpload(authInfo, request, log, cb);
}
function parseUploadID(res, cb) {
parseString(res, (err, json) => {
if (err) {
return cb(err);
}
const uploadId = json.InitiateMultipartUploadResult.UploadId[0];
return cb(null, uploadId);
});
}
function putMPUPart(uploadId, authInfo, log, cb) {
const body = Buffer.from('_', 'utf8');
const request = new DummyRequest({
namespace: 'default',
bucketName: 'bucketname',
objectKey: 'objectKey',
url: `/objectKey?partNumber=1&uploadId=${uploadId}`,
headers: {
host: 'localhost',
},
query: {
partNumber: '1',
uploadId,
},
calculatedHash: crypto
.createHash('md5')
.update(body)
.digest('hex'),
}, body);
objectPutPart(authInfo, request, undefined, log, cb);
}
describe('Multipart Upload API', () => {
beforeEach(() => helpers.cleanup());
describe('when metadata layer fails', () => {
const authInfo = helpers.makeAuthInfo();
const log = new helpers.DummyRequestLogger();
beforeEach(done => {
async.waterfall([
next => createBucket(authInfo, log, next),
(_, next) => initiateMPU(authInfo, log, next),
(res, _, next) => parseUploadID(res, next),
(uploadId, next) => {
metastore.errors.putObject = errors.InternalError;
putMPUPart(uploadId, authInfo, log, err => {
delete metastore.errors.putObject;
assert(err === errors.InternalError);
return next();
});
},
], done);
});
it('should cleanup orphaned data', () => {
assert.strictEqual(ds.length, 2);
assert.strictEqual(ds[0], undefined);
assert.strictEqual(ds[1], undefined);
});
});
});