Compare commits

...

10 Commits

Author SHA1 Message Date
Vianney Rancurel c55c4abe6c ft: ARSN-114 in_memory backend
Minor changes.
Create a basic test
2022-03-15 09:42:30 -07:00
Taylor McKinnon 13f33a81a6 ft(CLDSRV-102): Add Aborted MPU PUT 2022-02-03 15:29:35 -08:00
bbuchanan9 08c1a2046d Revert "bugfix: S3C-2052 Delete orphaned data"
This reverts commit d45fbdbf25e93fbc7fded81a718f29d9adca0bbd.
2019-08-28 17:02:26 -07:00
bbuchanan9 ab239ffa54 Revert "bugfix: S3C-2052 Delete orphaned data in APIs"
This reverts commit 5bf5fc861cab38e24209f86055f525aa627be67b.
2019-08-28 14:34:40 -07:00
bbuchanan9 ad1ee70c4e bugfix: S3C-2052 Delete orphaned data in APIs
Cleanup orphaned data in error cases for
the following APIs:

* objectPut
* objectCopy
* objectCopyPart
2019-08-13 16:08:23 -07:00
bbuchanan9 61bb75b276 bugfix: S3C-2052 Delete orphaned data 2019-08-09 10:31:56 -07:00
Dora Korpar 52bbe85463 ft: S3C-1171 list objects v2 2018-09-04 18:00:10 -07:00
Rahul Padigela 8921a0d9c7 fix: update mem versioning to reflect other backends
This commit fixes the memory backend implementation of versioning to be inline with
Versioning implementation for other backends - When versionId is specified, update
existing version and also update master if put is newer or same version than master.
If master is not available, create master.
2018-01-08 23:47:56 -08:00
Electra Chong 10a99a5f44 chore: require modules instead of import 2017-05-09 17:12:09 -07:00
Electra Chong d053da3f6c rf: clean up listing types, parsing
No longer use 'Basic' listing type.
Move JSON parsing for 'DelimiterVersions' to metadata wrapper to be consistent with what we do for other listing types.
Add some more assertions to listing tests.
2017-05-04 11:27:08 -07:00
12 changed files with 180 additions and 54 deletions

View File

@ -106,6 +106,13 @@ module.exports = {
require('./lib/storage/metadata/file/MetadataFileClient'), require('./lib/storage/metadata/file/MetadataFileClient'),
LogConsumer: LogConsumer:
require('./lib/storage/metadata/bucketclient/LogConsumer'), require('./lib/storage/metadata/bucketclient/LogConsumer'),
inMemory: {
metastore:
require('./lib/storage/metadata/in_memory/metastore'),
metadata: require('./lib/storage/metadata/in_memory/metadata'),
bucketUtilities:
require('./lib/storage/metadata/in_memory/bucket_utilities'),
},
}, },
data: { data: {
file: { file: {

View File

@ -0,0 +1,9 @@
module.exports = {
Basic: require('./basic').List,
Delimiter: require('./delimiter').Delimiter,
DelimiterVersions: require('./delimiterVersions')
.DelimiterVersions,
DelimiterMaster: require('./delimiterMaster')
.DelimiterMaster,
MPU: require('./MPU').MultipartUploads,
};

View File

@ -3,8 +3,6 @@ const errors = require('../../errors');
const BucketInfo = require('../../models/BucketInfo'); const BucketInfo = require('../../models/BucketInfo');
const BucketClientInterface = require('./bucketclient/BucketClientInterface'); const BucketClientInterface = require('./bucketclient/BucketClientInterface');
const BucketFileInterface = require('./file/BucketFileInterface');
const MongoClientInterface = require('./mongoclient/MongoClientInterface');
const metastore = require('./in_memory/metastore'); const metastore = require('./in_memory/metastore');
let CdmiMetadata; let CdmiMetadata;
@ -71,25 +69,10 @@ class MetadataWrapper {
if (clientName === 'mem') { if (clientName === 'mem') {
this.client = metastore; this.client = metastore;
this.implName = 'memorybucket'; this.implName = 'memorybucket';
} else if (clientName === 'file') {
this.client = new BucketFileInterface(params, logger);
this.implName = 'bucketfile';
} else if (clientName === 'scality') { } else if (clientName === 'scality') {
this.client = new BucketClientInterface(params, bucketclient, this.client = new BucketClientInterface(params, bucketclient,
logger); logger);
this.implName = 'bucketclient'; this.implName = 'bucketclient';
} else if (clientName === 'mongodb') {
this.client = new MongoClientInterface({
replicaSetHosts: params.mongodb.replicaSetHosts,
writeConcern: params.mongodb.writeConcern,
replicaSet: params.mongodb.replicaSet,
readPreference: params.mongodb.readPreference,
database: params.mongodb.database,
replicationGroupId: params.replicationGroupId,
path: params.mongodb.path,
logger,
});
this.implName = 'mongoclient';
} else if (clientName === 'cdmi') { } else if (clientName === 'cdmi') {
if (!CdmiMetadata) { if (!CdmiMetadata) {
throw new Error('Unauthorized backend'); throw new Error('Unauthorized backend');

View File

@ -1,6 +1,6 @@
import ListResult from './ListResult'; const ListResult = require('./ListResult');
export class ListMultipartUploadsResult extends ListResult { class ListMultipartUploadsResult extends ListResult {
constructor() { constructor() {
super(); super();
this.Uploads = []; this.Uploads = [];
@ -28,3 +28,7 @@ export class ListMultipartUploadsResult extends ListResult {
this.MaxKeys += 1; this.MaxKeys += 1;
} }
} }
module.exports = {
ListMultipartUploadsResult,
};

View File

@ -24,4 +24,4 @@ class ListResult {
} }
} }
export default ListResult; module.exports = ListResult;

View File

@ -1,4 +1,4 @@
export function markerFilterMPU(allMarkers, array) { function markerFilterMPU(allMarkers, array) {
const { keyMarker, uploadIdMarker } = allMarkers; const { keyMarker, uploadIdMarker } = allMarkers;
for (let i = 0; i < array.length; i++) { for (let i = 0; i < array.length; i++) {
// If the keyMarker is the same as the key, // If the keyMarker is the same as the key,
@ -30,7 +30,7 @@ export function markerFilterMPU(allMarkers, array) {
return array; return array;
} }
export function prefixFilter(prefix, array) { function prefixFilter(prefix, array) {
for (let i = 0; i < array.length; i++) { for (let i = 0; i < array.length; i++) {
if (array[i].indexOf(prefix) !== 0) { if (array[i].indexOf(prefix) !== 0) {
array.splice(i, 1); array.splice(i, 1);
@ -40,6 +40,12 @@ export function prefixFilter(prefix, array) {
return array; return array;
} }
export function isKeyInContents(responseObject, key) { function isKeyInContents(responseObject, key) {
return responseObject.Contents.some(val => val.key === key); return responseObject.Contents.some(val => val.key === key);
} }
module.exports = {
markerFilterMPU,
prefixFilter,
isKeyInContents,
};

View File

@ -1,12 +1,11 @@
import { errors } from 'arsenal'; const errors = require('../../../errors');
import { markerFilterMPU, prefixFilter } const { markerFilterMPU, prefixFilter } = require('./bucket_utilities');
from './bucket_utilities'; const { ListMultipartUploadsResult } = require('./ListMultipartUploadsResult');
import { ListMultipartUploadsResult } from './ListMultipartUploadsResult'; const { metadata } = require('./metadata');
import { metadata } from './metadata';
const defaultMaxKeys = 1000; const defaultMaxKeys = 1000;
export default function getMultipartUploadListing(bucket, params, callback) { function getMultipartUploadListing(bucket, params, callback) {
const { delimiter, keyMarker, const { delimiter, keyMarker,
uploadIdMarker, prefix, queryPrefixLength, splitter } = params; uploadIdMarker, prefix, queryPrefixLength, splitter } = params;
const splitterLen = splitter.length; const splitterLen = splitter.length;
@ -145,3 +144,5 @@ export default function getMultipartUploadListing(bucket, params, callback) {
response.IsTruncated = maxKeys === 0 ? false : response.IsTruncated; response.IsTruncated = maxKeys === 0 ? false : response.IsTruncated;
return callback(null, response); return callback(null, response);
} }
module.exports = getMultipartUploadListing;

View File

@ -3,4 +3,6 @@ const metadata = {
keyMaps: new Map, keyMaps: new Map,
}; };
export { metadata }; module.exports = {
metadata,
};

View File

@ -1,16 +1,15 @@
import { errors, algorithms, versioning } from 'arsenal'; const errors = require('../../../errors');
const list = require('../../../algos/list/exportAlgos');
import getMultipartUploadListing from './getMultipartUploadListing'; const genVID =
import { metadata } from './metadata'; require('../../../versioning/VersionID').generateVersionId;
import config from '../../Config'; const getMultipartUploadListing = require('./getMultipartUploadListing');
const { metadata } = require('./metadata');
const genVID = versioning.VersionID.generateVersionId;
const defaultMaxKeys = 1000; const defaultMaxKeys = 1000;
let uidCounter = 0; let uidCounter = 0;
function generateVersionId() { function generateVersionId() {
return genVID(uidCounter++, config.replicationGroupId); return genVID(uidCounter++, undefined);
} }
function formatVersionKey(key, versionId) { function formatVersionKey(key, versionId) {
@ -79,10 +78,35 @@ const metastore = {
putObject: (bucketName, objName, objVal, params, log, cb) => { putObject: (bucketName, objName, objVal, params, log, cb) => {
process.nextTick(() => { process.nextTick(() => {
metastore.getBucketAttributes(bucketName, log, err => { // Ignore the PUT done by AbortMPU
if (params && params.isAbort) {
return cb(null);
}
return metastore.getBucketAttributes(bucketName, log, err => {
if (err) { if (err) {
return cb(err); return cb(err);
} }
/*
valid combinations of versioning options:
- !versioning && !versionId: normal non-versioning put
- versioning && !versionId: create a new version
- versionId: update (PUT/DELETE) an existing version,
and also update master version in case the put
version is newer or same version than master.
if versionId === '' update master version
*/
if (params && params.versionId) {
objVal.versionId = params.versionId; // eslint-disable-line
const mst = metadata.keyMaps.get(bucketName).get(objName);
if (mst && mst.versionId === params.versionId || !mst) {
metadata.keyMaps.get(bucketName).set(objName, objVal);
}
// eslint-disable-next-line
objName = formatVersionKey(objName, params.versionId);
metadata.keyMaps.get(bucketName).set(objName, objVal);
return cb(null, `{"versionId":"${objVal.versionId}"}`);
}
if (params && params.versioning) { if (params && params.versioning) {
const versionId = generateVersionId(); const versionId = generateVersionId();
objVal.versionId = versionId; // eslint-disable-line objVal.versionId = versionId; // eslint-disable-line
@ -97,16 +121,6 @@ const metastore = {
objVal.versionId = versionId; // eslint-disable-line objVal.versionId = versionId; // eslint-disable-line
metadata.keyMaps.get(bucketName).set(objName, objVal); metadata.keyMaps.get(bucketName).set(objName, objVal);
return cb(null, `{"versionId":"${objVal.versionId}"}`); return cb(null, `{"versionId":"${objVal.versionId}"}`);
} else if (params && params.versionId) {
objVal.versionId = params.versionId; // eslint-disable-line
const mst = metadata.keyMaps.get(bucketName).get(objName);
if (mst && mst.versionId === params.versionId) {
metadata.keyMaps.get(bucketName).set(objName, objVal);
}
// eslint-disable-next-line
objName = formatVersionKey(objName, params.versionId);
metadata.keyMaps.get(bucketName).set(objName, objVal);
return cb(null, `{"versionId":"${objVal.versionId}"}`);
} }
metadata.keyMaps.get(bucketName).set(objName, objVal); metadata.keyMaps.get(bucketName).set(objName, objVal);
return cb(null); return cb(null);
@ -205,7 +219,14 @@ const metastore = {
listObject(bucketName, params, log, cb) { listObject(bucketName, params, log, cb) {
process.nextTick(() => { process.nextTick(() => {
const { prefix, marker, delimiter, maxKeys } = params; const {
prefix,
marker,
delimiter,
maxKeys,
continuationToken,
startAfter,
} = params;
if (prefix && typeof prefix !== 'string') { if (prefix && typeof prefix !== 'string') {
return cb(errors.InvalidArgument); return cb(errors.InvalidArgument);
} }
@ -222,6 +243,14 @@ const metastore = {
return cb(errors.InvalidArgument); return cb(errors.InvalidArgument);
} }
if (continuationToken && typeof continuationToken !== 'string') {
return cb(errors.InvalidArgument);
}
if (startAfter && typeof startAfter !== 'string') {
return cb(errors.InvalidArgument);
}
// If paramMaxKeys is undefined, the default parameter will set it. // If paramMaxKeys is undefined, the default parameter will set it.
// However, if it is null, the default parameter will not set it. // However, if it is null, the default parameter will not set it.
let numKeys = maxKeys; let numKeys = maxKeys;
@ -235,8 +264,8 @@ const metastore = {
// If marker specified, edit the keys array so it // If marker specified, edit the keys array so it
// only contains keys that occur alphabetically after the marker // only contains keys that occur alphabetically after the marker
const listingType = params.listingType || 'Delimiter'; const listingType = params.listingType;
const extension = new algorithms.list[listingType](params, log); const extension = new list[listingType](params, log);
const listingParams = extension.genMDParams(); const listingParams = extension.genMDParams();
const keys = []; const keys = [];
@ -302,4 +331,4 @@ const metastore = {
}, },
}; };
export default metastore; module.exports = metastore;

View File

@ -3,7 +3,7 @@
"engines": { "engines": {
"node": ">=16" "node": ">=16"
}, },
"version": "7.10.13", "version": "7.10.14",
"description": "Common utilities for the S3 project components", "description": "Common utilities for the S3 project components",
"main": "index.js", "main": "index.js",
"repository": { "repository": {

View File

@ -0,0 +1,26 @@
{
"acl": {
"Canned": "private",
"FULL_CONTROL": [],
"WRITE": [],
"WRITE_ACP": [],
"READ": [],
"READ_ACP": []
},
"name": "BucketName",
"owner": "9d8fe19a78974c56dceb2ea4a8f01ed0f5fecb9d29f80e9e3b84104e4a3ea520",
"ownerDisplayName": "anonymousCoward",
"creationDate": "2018-06-04T17:45:42.592Z",
"mdBucketModelVersion": 8,
"transient": false,
"deleted": false,
"serverSideEncryption": null,
"versioningConfiguration": null,
"websiteConfiguration": null,
"locationConstraint": "us-east-1",
"readLocationConstraint": "us-east-1",
"cors": null,
"replicationConfiguration": null,
"lifecycleConfiguration": null,
"uid": "fea97818-6a9a-11e8-9777-e311618cc5d4"
}

View File

@ -0,0 +1,59 @@
const async = require('async');
const assert = require('assert');
const werelogs = require('werelogs');
const MetadataWrapper = require('../../../../../lib/storage/metadata/MetadataWrapper');
const fakeBucketInfo = require('./FakeBucketInfo.json');
describe('InMemory', () => {
const fakeBucket = 'fake';
const logger = new werelogs.Logger('Injector');
const memBackend = new MetadataWrapper(
'mem', {}, null, logger);
before(done => {
memBackend.createBucket(fakeBucket, fakeBucketInfo, logger, done);
});
after(done => {
memBackend.deleteBucket(fakeBucket, logger, done);
});
it('basic', done => {
async.waterfall([
next => {
memBackend.putObjectMD(fakeBucket, 'foo', 'bar', {}, logger, err => {
if (err) {
return next(err);
}
return next();
});
},
next => {
memBackend.getObjectMD(fakeBucket, 'foo', {}, logger, (err, data) => {
if (err) {
return next(err);
}
assert.deepEqual(data, 'bar');
return next();
});
},
next => {
memBackend.deleteObjectMD(fakeBucket, 'foo', {}, logger, err => {
if (err) {
return next(err);
}
return next();
});
},
next => {
memBackend.getObjectMD(fakeBucket, 'foo', {}, logger, err => {
if (err) {
assert.deepEqual(err.message, 'NoSuchKey');
return next();
}
return next(new Error('unexpected success'));
});
},
], done);
});
});