Compare commits

...

2 Commits

Author SHA1 Message Date
vrancurel 049ee02cc1 small changes 2018-10-30 17:18:19 -07:00
vrancurel 4b17ed1b3d fs type location
This specific location has the ability to be written 0-byte
  objects with size and md5sum passed as properties.
  Those objects can be re-read from a local mountPath.
  The mountPath is a propery of the fs location.
  For now only supports reading objects.
2018-09-20 10:53:23 -07:00
7 changed files with 191 additions and 5 deletions

View File

@ -118,8 +118,8 @@ const constants = {
objectLocationConstraintHeader: 'x-amz-meta-scal-location-constraint',
legacyLocations: ['sproxyd', 'legacy'],
/* eslint-disable camelcase */
externalBackends: { aws_s3: true, azure: true, gcp: true },
replicationBackends: { aws_s3: true, azure: true, gcp: true },
externalBackends: { aws_s3: true, azure: true, gcp: true, fs: true },
replicationBackends: { aws_s3: true, azure: true, gcp: true, fs: true },
// some of the available data backends (if called directly rather
// than through the multiple backend gateway) need a key provided
// as a string as first parameter of the get/delete methods.

View File

@ -181,7 +181,8 @@ function createAndStoreObject(bucketName, bucketMD, objectKey, objMD, authInfo,
// regular puts are stored in the same data structure,
// place the retrieval info here into a single element array
const { key, dataStoreName, dataStoreType, dataStoreETag,
dataStoreVersionId } = dataGetInfo;
dataStoreVersionId, dataStoreSize,
dataStoreMd5 } = dataGetInfo;
const prefixedDataStoreETag = dataStoreETag
? `1:${dataStoreETag}`
: `1:${calculatedHash}`;
@ -194,6 +195,12 @@ function createAndStoreObject(bucketName, bucketMD, objectKey, objMD, authInfo,
cipherBundle.cipheredDataKey;
}
metadataStoreParams.contentMD5 = calculatedHash;
if (dataStoreSize !== undefined) {
metadataStoreParams.size = dataStoreSize;
}
if (dataStoreMd5 !== undefined) {
metadataStoreParams.contentMD5 = dataStoreMd5;
}
return next(null, dataGetInfoArr);
},
function getVersioningInfo(infoArr, next) {

156
lib/data/external/FsClient.js vendored Normal file
View File

@ -0,0 +1,156 @@
const { errors, s3middleware } = require('arsenal');
const werelogs = require('werelogs');
const MD5Sum = s3middleware.MD5Sum;
const getMetaHeaders = s3middleware.userMetadata.getMetaHeaders;
const createLogger = require('../multipleBackendLogger');
const { logHelper } = require('./utils');
const { config } = require('../../Config');
const fs = require('fs');
class FsClient {
constructor(config) {
this.clientType = 'fs';
this.type = 'FS';
this._bucketName = config.bucketName;
this._bucketMatch = config.bucketMatch;
this._serverSideEncryption = config.serverSideEncryption;
this._dataStoreName = config.dataStoreName;
this._supportsVersioning = config.supportsVersioning;
this._mountPath = config.mountPath;
this._logger = new werelogs.Logger('FsClient');
}
setup(cb) {
return cb();
}
_createFsKey(requestBucketName, requestObjectKey,
bucketMatch) {
if (bucketMatch) {
return requestObjectKey;
}
return `${requestBucketName}/${requestObjectKey}`;
}
toObjectGetInfo(objectKey, bucketName) {
return {
key: this._createFsKey(bucketName, objectKey, this._bucketMatch),
dataStoreName: this._dataStoreName,
};
}
put(stream, size, keyContext, reqUids, callback) {
const log = createLogger(reqUids);
if (size === 0) {
const b64 = keyContext.metaHeaders['x-amz-meta-md5chksum'];
let md5 = null;
if (b64 !== null) {
md5 = new Buffer(b64, 'base64').toString('hex');
}
return callback(null, keyContext.objectKey, '',
keyContext.metaHeaders['x-amz-meta-size'],
md5
);
}
logHelper(log, 'error', 'Not implemented', errors.NotImplemented,
this._dataStoreName, this.clientType);
return callback(errors.NotImplemented);
}
get(objectGetInfo, range, reqUids, callback) {
const log = createLogger(reqUids);
const filePath = this._mountPath + '/' + objectGetInfo.key;
const readStreamOptions = {
flags: 'r',
encoding: null,
fd: null,
autoClose: false,
};
const rs = fs.createReadStream(filePath, readStreamOptions)
.on('error', err => {
logHelper(log, 'error', 'Error reading file', err,
this._dataStoreName, this.clientType);
console.log('err', err);
})
.on('open', () => {
return callback(null, rs);
});
}
delete(objectGetInfo, reqUids, callback) {
const log = createLogger(reqUids);
logHelper(log, 'error', 'Not implemented', errors.NotImplemented,
this._dataStoreName, this.clientType);
return callback(errors.NotImplemented);
}
healthcheck(location, callback) {
const fsResp = {};
return callback(null, fsResp);
}
createMPU(key, metaHeaders, bucketName, websiteRedirectHeader, contentType,
cacheControl, contentDisposition, contentEncoding, log, callback) {
logHelper(log, 'error', 'Not implemented', errors.NotImplemented,
this._dataStoreName, this.clientType);
return callback(errors.NotImplemented);
}
uploadPart(request, streamingV4Params, stream, size, key, uploadId,
partNumber, bucketName, log, callback) {
logHelper(log, 'error', 'Not implemented', errors.NotImplemented,
this._dataStoreName, this.clientType);
return callback(errors.NotImplemented);
}
listParts(key, uploadId, bucketName, partNumberMarker, maxParts, log,
callback) {
logHelper(log, 'error', 'Not implemented', errors.NotImplemented,
this._dataStoreName, this.clientType);
return callback(errors.NotImplemented);
}
completeMPU(jsonList, mdInfo, key, uploadId, bucketName, log, callback) {
logHelper(log, 'error', 'Not implemented', errors.NotImplemented,
this._dataStoreName, this.clientType);
return callback(errors.NotImplemented);
}
abortMPU(key, uploadId, bucketName, log, callback) {
logHelper(log, 'error', 'Not implemented', errors.NotImplemented,
this._dataStoreName, this.clientType);
return callback(errors.NotImplemented);
}
objectPutTagging(key, bucket, objectMD, log, callback) {
logHelper(log, 'error', 'Not implemented', errors.NotImplemented,
this._dataStoreName, this.clientType);
return callback(errors.NotImplemented);
}
objectDeleteTagging(key, bucket, objectMD, log, callback) {
logHelper(log, 'error', 'Not implemented', errors.NotImplemented,
this._dataStoreName, this.clientType);
return callback(errors.NotImplemented);
}
copyObject(request, destLocationConstraintName, sourceKey,
sourceLocationConstraintName, storeMetadataParams, log, callback) {
logHelper(log, 'error', 'Not implemented', errors.NotImplemented,
this._dataStoreName, this.clientType);
return callback(errors.NotImplemented);
}
uploadPartCopy(request, awsSourceKey, sourceLocationConstraintName,
log, callback) {
logHelper(log, 'error', 'Not implemented', errors.NotImplemented,
this._dataStoreName, this.clientType);
return callback(errors.NotImplemented);
}
}
module.exports = FsClient;

View File

@ -11,6 +11,7 @@ const inMemory = require('./in_memory/backend').backend;
const AwsClient = require('./external/AwsClient');
const GcpClient = require('./external/GcpClient');
const AzureClient = require('./external/AzureClient');
const FsClient = require('./external/FsClient');
const proxyCompareUrl = require('./proxyCompareUrl');
const { config } = require('../Config');
@ -128,6 +129,17 @@ function parseLC() {
});
clients[location].clientType = 'azure';
}
if (locationObj.type === 'fs') {
clients[location] = new FsClient({
bucketName: locationObj.details.bucketName,
bucketMatch: locationObj.details.bucketMatch,
serverSideEncryption: locationObj.details.serverSideEncryption,
dataStoreName: location,
supportsVersioning: locationObj.details.supportsVersioning,
mountPath: locationObj.details.mountPath,
});
clients[location].clientType = 'fs';
}
});
return clients;
}

View File

@ -60,7 +60,8 @@ const multipleBackendGateway = {
}
}
return client.put(writeStream, size, keyContext,
reqUids, (err, key, dataStoreVersionId) => {
reqUids, (err, key, dataStoreVersionId,
dataStoreSize, dataStoreMd5) => {
const log = createLogger(reqUids);
log.debug('put to location', { controllingLocationConstraint });
if (err) {
@ -73,6 +74,8 @@ const multipleBackendGateway = {
dataStoreName: controllingLocationConstraint,
dataStoreType: client.clientType,
dataStoreVersionId,
dataStoreSize,
dataStoreMd5
};
return callback(null, dataRetrievalInfo);
});

View File

@ -94,5 +94,13 @@
"objectId": "sa-east-1",
"legacyAwsBehavior": false,
"details": {}
},
"fs1": {
"type": "fs",
"objectId": "fs",
"legacyAwsBehavior": false,
"details": {
"mountPath": "/home/vr/local-fs"
}
}
}

View File

@ -67,7 +67,7 @@
"cdmiclient": "scality/cdmiclient#8f0c2e6"
},
"scripts": {
"cloudserver": "S3METADATA=mongodb npm-run-all --parallel start_dataserver start_s3server",
"cloudserver": "npm-run-all --parallel start_dataserver start_s3server",
"ft_awssdk": "cd tests/functional/aws-node-sdk && mocha test/",
"ft_awssdk_aws": "cd tests/functional/aws-node-sdk && AWS_ON_AIR=true mocha test/",
"ft_awssdk_buckets": "cd tests/functional/aws-node-sdk && mocha test/bucket",