Compare commits
1 Commits
developmen
...
temp/poc-o
Author | SHA1 | Date |
---|---|---|
williamlardier | 0d09ea6b47 |
|
@ -17,7 +17,7 @@ const { preprocessingVersioningDelete }
|
|||
= require('./apiUtils/object/versioning');
|
||||
const createAndStoreObject = require('./apiUtils/object/createAndStoreObject');
|
||||
const monitoring = require('../utilities/monitoringHandler');
|
||||
const { metadataGetObject } = require('../metadata/metadataUtils');
|
||||
const { metadataGetObject, metadataGetObjects } = require('../metadata/metadataUtils');
|
||||
const { config } = require('../Config');
|
||||
const { isRequesterNonAccountUser } = require('./apiUtils/authorization/permissionChecks');
|
||||
const { hasGovernanceBypassHeader, checkUserGovernanceBypass, ObjectLockInfo }
|
||||
|
@ -356,6 +356,70 @@ function getObjMetadataAndDelete(authInfo, canonicalID, request,
|
|||
});
|
||||
}
|
||||
|
||||
function getObjMetadataAndDeleteNover(authInfo, canonicalID, request,
|
||||
bucketName, bucket, quietSetting, errorResults, inPlay, log, next) {
|
||||
const successfullyDeleted = [];
|
||||
let totalContentLengthDeleted = 0;
|
||||
let numOfObjectsRemoved = 0;
|
||||
|
||||
// Get all objects metadata at once
|
||||
return metadataGetObjects(bucketName, inPlay.map(entry => entry.key), log, (err, objMDs) => {
|
||||
if (err) {
|
||||
monitoring.promMetrics('DELETE', bucketName, err.code,
|
||||
'multiObjectDelete');
|
||||
return next(err);
|
||||
}
|
||||
// if no objects exist, return success
|
||||
if (objMDs.length === 0) {
|
||||
return next(null, quietSetting, errorResults, numOfObjectsRemoved,
|
||||
successfullyDeleted, totalContentLengthDeleted, bucket);
|
||||
}
|
||||
const arrayOfDeletes = [];
|
||||
objMDs.forEach(entry => {
|
||||
entry.options = {};
|
||||
const deleteInfo = {};
|
||||
deleteInfo.deleted = true;
|
||||
if (entry.uploadId) {
|
||||
// eslint-disable-next-line
|
||||
options.replayId = entry.uploadId;
|
||||
}
|
||||
arrayOfDeletes.push({
|
||||
bucketName,
|
||||
objMD: entry,
|
||||
key: entry.key,
|
||||
options: entry.options,
|
||||
op: 's3:ObjectRemoved:Delete',
|
||||
deleteInfo,
|
||||
versionId: undefined,
|
||||
});
|
||||
deleteInfo.newDeleteMarker = true;
|
||||
});
|
||||
|
||||
return services.deleteObjects(bucketName, arrayOfDeletes, log, err => {
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
arrayOfDeletes.forEach(entry => {
|
||||
let isDeleteMarker;
|
||||
let deleteMarkerVersionId;
|
||||
if (entry.deleteInfo.deleted && entry.objMD['content-length']) {
|
||||
numOfObjectsRemoved++;
|
||||
totalContentLengthDeleted += entry.objMD['content-length'];
|
||||
}
|
||||
if (entry.deleteInfo.deleted) {
|
||||
successfullyDeleted.push({
|
||||
entry, isDeleteMarker,
|
||||
deleteMarkerVersionId
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
return next(err, quietSetting, errorResults, numOfObjectsRemoved,
|
||||
successfullyDeleted, totalContentLengthDeleted, bucket);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* multiObjectDelete - Delete multiple objects
|
||||
* @param {AuthInfo} authInfo - Instance of AuthInfo class with requester's info
|
||||
|
@ -549,6 +613,11 @@ function multiObjectDelete(authInfo, request, log, callback) {
|
|||
},
|
||||
function getObjMetadataAndDeleteStep(quietSetting, errorResults, inPlay,
|
||||
bucket, next) {
|
||||
if (!bucket._versioningConfiguration || bucket._versioningConfiguration.Status !== 'Enabled') {
|
||||
return getObjMetadataAndDeleteNover(authInfo, canonicalID, request,
|
||||
bucketName, bucket, quietSetting, errorResults, inPlay,
|
||||
log, next);
|
||||
}
|
||||
return getObjMetadataAndDelete(authInfo, canonicalID, request,
|
||||
bucketName, bucket, quietSetting, errorResults, inPlay,
|
||||
log, next);
|
||||
|
|
|
@ -84,6 +84,32 @@ function metadataGetObject(bucketName, objectKey, versionId, log, cb) {
|
|||
});
|
||||
}
|
||||
|
||||
/** metadataGetObjects - retrieves specified objects or versions from metadata
|
||||
* @param {string} bucketName - name of bucket
|
||||
* @param {string} objectKeys - names of object key
|
||||
* @param {string} [versionId] - version of object to retrieve
|
||||
* @param {RequestLogger} log - request logger
|
||||
* @param {function} cb - callback
|
||||
* @return {undefined} - and call callback with err, bucket md and object md
|
||||
*/
|
||||
function metadataGetObjects(bucketName, objectKeys, log, cb) {
|
||||
// versionId may be 'null', which asks metadata to fetch the null key specifically
|
||||
const options = { versionId: undefined, getDeleteMarker: true };
|
||||
console.log(objectKeys)
|
||||
return metadata.getObjectsMD(bucketName, objectKeys, options, log,
|
||||
(err, objMDs) => {
|
||||
if (err) {
|
||||
if (err.is && err.is.NoSuchKey) {
|
||||
log.debug('objects does not exist in metadata');
|
||||
return cb();
|
||||
}
|
||||
log.debug('err getting objects MD from metadata', { error: err });
|
||||
return cb(err);
|
||||
}
|
||||
return cb(null, objMDs);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate that a bucket is accessible and authorized to the user,
|
||||
* return a specific error code otherwise
|
||||
|
@ -217,4 +243,5 @@ module.exports = {
|
|||
metadataGetObject,
|
||||
metadataValidateBucketAndObj,
|
||||
metadataValidateBucket,
|
||||
metadataGetObjects,
|
||||
};
|
||||
|
|
|
@ -23,7 +23,10 @@ const {
|
|||
initManagementClient,
|
||||
isManagementAgentUsed,
|
||||
} = require('./management/agentClient');
|
||||
const v8 = require('v8');
|
||||
const heapStats = v8.getHeapStatistics();
|
||||
|
||||
const memwatch = require('@airbnb/node-memwatch');
|
||||
const HttpAgent = require('agentkeepalive');
|
||||
const routes = arsenal.s3routes.routes;
|
||||
const { parseLC, MultipleBackendGateway } = arsenal.storage.data;
|
||||
|
@ -53,9 +56,10 @@ if (_config.localCache) {
|
|||
// stats client
|
||||
const STATS_INTERVAL = 5; // 5 seconds
|
||||
const STATS_EXPIRY = 30; // 30 seconds
|
||||
console.log('aaaaaaaaaaaaaaaaaaaaaa', localCacheClient)
|
||||
const statsClient = new StatsClient(localCacheClient, STATS_INTERVAL,
|
||||
STATS_EXPIRY);
|
||||
const enableRemoteManagement = true;
|
||||
const enableRemoteManagement = false;
|
||||
|
||||
class S3Server {
|
||||
/**
|
||||
|
@ -67,10 +71,10 @@ class S3Server {
|
|||
this.worker = worker;
|
||||
this.cluster = true;
|
||||
this.servers = [];
|
||||
http.globalAgent = new HttpAgent({
|
||||
/*http.globalAgent = new HttpAgent({
|
||||
keepAlive: true,
|
||||
freeSocketTimeout: arsenal.constants.httpClientFreeSocketTimeout,
|
||||
});
|
||||
});*/
|
||||
|
||||
process.on('SIGINT', this.cleanUp.bind(this));
|
||||
process.on('SIGHUP', this.cleanUp.bind(this));
|
||||
|
@ -89,6 +93,52 @@ class S3Server {
|
|||
});
|
||||
this.caughtExceptionShutdown();
|
||||
});
|
||||
|
||||
// var hd = new memwatch.HeapDiff();
|
||||
var lastmemoryUsage = 0;
|
||||
const scheduleGc = () => {
|
||||
if (!global.gc) {
|
||||
console.log('Garbage collection is not exposed');
|
||||
return;
|
||||
}
|
||||
|
||||
var nextMinutes = Math.random() + 1;
|
||||
setTimeout(() => {
|
||||
console.log('Start manual gc');
|
||||
global.gc();
|
||||
/*var memoryUsage = process.memoryUsage().heapTotal;
|
||||
if (lastmemoryUsage === 0) {
|
||||
lastmemoryUsage = memoryUsage;
|
||||
}
|
||||
console.log('Manual gc', process.memoryUsage(), 'diff', memoryUsage - lastmemoryUsage);
|
||||
// detect if memory incease is above 2MB
|
||||
if (memoryUsage - lastmemoryUsage > 2 * 1024 * 1024) {
|
||||
var diff = hd.end();
|
||||
console.log('Memory leak?', JSON.stringify(diff));
|
||||
hd = new memwatch.HeapDiff();
|
||||
lastmemoryUsage = process.memoryUsage().heapTotal;
|
||||
}*/
|
||||
scheduleGc();
|
||||
}, nextMinutes * 60 * 1000);
|
||||
}
|
||||
|
||||
// every 30s monitor the heap size growth
|
||||
/*const lastHeapSize = process.memoryUsage().heapUsed;
|
||||
setInterval(() => {
|
||||
const heapSize = process.memoryUsage().heapUsed;
|
||||
const diff = heapSize - lastHeapSize;
|
||||
// display the diff in MB
|
||||
console.log('Heap statistics:');
|
||||
console.log('Heap size diff', diff / 1024 / 1024);
|
||||
console.log('Total heap size (bytes):', heapStats.total_heap_size);
|
||||
console.log('Total heap size (MB):', heapStats.total_heap_size / (1024 * 1024));
|
||||
|
||||
console.log('Heap size limit (bytes):', heapStats.heap_size_limit);
|
||||
console.log('Heap size limit (MB):', heapStats.heap_size_limit / (1024 * 1024));
|
||||
|
||||
}, 30 * 1000);*/
|
||||
|
||||
// scheduleGc();
|
||||
this.started = false;
|
||||
}
|
||||
|
||||
|
|
|
@ -317,6 +317,7 @@ const services = {
|
|||
assert.strictEqual(typeof objectMD, 'object');
|
||||
|
||||
function deleteMDandData() {
|
||||
|
||||
return metadata.deleteObjectMD(bucketName, objectKey, options, log,
|
||||
(err, res) => {
|
||||
if (err) {
|
||||
|
@ -354,6 +355,60 @@ const services = {
|
|||
});
|
||||
},
|
||||
|
||||
deleteObjects(bucketName, arrayOfDeletes, log, cb) {
|
||||
log.trace('deleting objects from bucket');
|
||||
assert.strictEqual(typeof bucketName, 'string');
|
||||
assert(Array.isArray(arrayOfDeletes));
|
||||
|
||||
|
||||
function deleteMDandData(entries) {
|
||||
// measure the time it takes to delete the objects
|
||||
const startTime = Date.now();
|
||||
|
||||
return metadata.batchDeleteObjectMD(bucketName, entries, log, (err, res) => {
|
||||
const delta = Date.now() - startTime;
|
||||
console.log(`[METRIC] s3.batchDeleteObjectMD.time = ${delta} ms`);
|
||||
if (err) {
|
||||
return cb(err, res);
|
||||
}
|
||||
log.trace('deleteObjects: metadata delete OK');
|
||||
const deleteLog = logger.newRequestLoggerFromSerializedUids(log.getSerializedUids());
|
||||
// also measure the storage duration
|
||||
const startTime2 = Date.now();
|
||||
|
||||
// extract all .objMD.location from entries
|
||||
// and delete them in batch
|
||||
const entriesForBatch = arrayOfDeletes.map(entry => entry.objMD);
|
||||
|
||||
data.batchDelete(entriesForBatch, null, null, deleteLog, err => {
|
||||
if (err) {
|
||||
return cb(err);
|
||||
}
|
||||
});
|
||||
const delta2 = Date.now() - startTime2;
|
||||
console.log(`[METRIC] s3.batchDeleteObjectMD.storageDuration = ${delta2} ms`);
|
||||
return cb(null, res);
|
||||
});
|
||||
}
|
||||
|
||||
async.each(arrayOfDeletes, (entry, next) => {
|
||||
const objGetInfo = entry.objMD.location;
|
||||
// special case that prevents azure blocks from unnecessary deletion
|
||||
// will return null if no need
|
||||
data.protectAzureBlocks(bucketName, entry.key, objGetInfo, log, err => {
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
next();
|
||||
});
|
||||
}, err => {
|
||||
if (err) {
|
||||
return cb(err);
|
||||
}
|
||||
return deleteMDandData(arrayOfDeletes);
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
* Gets list of objects in bucket
|
||||
* @param {object} bucketName - bucket in which objectMetadata is stored
|
||||
|
|
|
@ -133,8 +133,16 @@ function healthcheckHandler(clientIP, req, res, log, statsClient) {
|
|||
if (!checkIP(clientIP)) {
|
||||
return healthcheckEndHandler(errors.AccessDenied, []);
|
||||
}
|
||||
return routeHandler(deep, req, res, log, statsClient,
|
||||
healthcheckEndHandler);
|
||||
let err = null;
|
||||
let results = null;
|
||||
writeResponse(res, err, log, results, error => {
|
||||
if (error) {
|
||||
return log.end().warn('healthcheck error', { err: error });
|
||||
}
|
||||
return log.end();
|
||||
});
|
||||
/*return routeHandler(deep, req, res, log, statsClient,
|
||||
healthcheckEndHandler);*/
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
|
|
|
@ -19,9 +19,10 @@
|
|||
},
|
||||
"homepage": "https://github.com/scality/S3#readme",
|
||||
"dependencies": {
|
||||
"@airbnb/node-memwatch": "^2.0.0",
|
||||
"@azure/storage-blob": "^12.12.0",
|
||||
"@hapi/joi": "^17.1.0",
|
||||
"arsenal": "git+https://github.com/scality/arsenal#8.1.98",
|
||||
"arsenal": "git+https://github.com/scality/arsenal#020387b3003838640eb9cd8fdeeac28bdde0db32",
|
||||
"async": "~2.5.0",
|
||||
"aws-sdk": "2.905.0",
|
||||
"bucketclient": "scality/bucketclient#8.1.9",
|
||||
|
@ -38,6 +39,7 @@
|
|||
"mongodb": "^5.2.0",
|
||||
"node-fetch": "^2.6.0",
|
||||
"node-forge": "^0.7.1",
|
||||
"node-memwatch-new": "^0.0.3",
|
||||
"npm-run-all": "~4.1.5",
|
||||
"prom-client": "14.2.0",
|
||||
"request": "^2.81.0",
|
||||
|
@ -99,7 +101,7 @@
|
|||
"start_mdserver": "node mdserver.js",
|
||||
"start_dataserver": "node dataserver.js",
|
||||
"start_pfsserver": "node pfsserver.js",
|
||||
"start_s3server": "node index.js",
|
||||
"start_s3server": "node --expose-gc index.js",
|
||||
"start_dmd": "npm-run-all --parallel start_mdserver start_dataserver",
|
||||
"start_utapi": "node lib/utapi/utapi.js",
|
||||
"start_secure_channel_proxy": "node bin/secure_channel_proxy.js",
|
||||
|
|
24
yarn.lock
24
yarn.lock
|
@ -2,6 +2,14 @@
|
|||
# yarn lockfile v1
|
||||
|
||||
|
||||
"@airbnb/node-memwatch@^2.0.0":
|
||||
version "2.0.0"
|
||||
resolved "https://registry.yarnpkg.com/@airbnb/node-memwatch/-/node-memwatch-2.0.0.tgz#473756b9e078fca923bda536debaf519e867fae1"
|
||||
integrity sha512-4DMP5GQz9ZYklB/FXiE1+yNffzjdiSerpr10QGxBQF56xcZsKLE0PnL/Pq6yC1sLGT0IHgG4UXgz/a5Yd463gw==
|
||||
dependencies:
|
||||
bindings "^1.5.0"
|
||||
nan "^2.14.1"
|
||||
|
||||
"@azure/abort-controller@^1.0.0":
|
||||
version "1.1.0"
|
||||
resolved "https://registry.yarnpkg.com/@azure/abort-controller/-/abort-controller-1.1.0.tgz#788ee78457a55af8a1ad342acb182383d2119249"
|
||||
|
@ -716,9 +724,9 @@ arraybuffer.slice@~0.0.7:
|
|||
optionalDependencies:
|
||||
ioctl "^2.0.2"
|
||||
|
||||
"arsenal@git+https://github.com/scality/arsenal#8.1.98":
|
||||
version "8.1.97"
|
||||
resolved "git+https://github.com/scality/arsenal#3f7229eebe378a0f4852d2c25b9ac33c027fa7eb"
|
||||
"arsenal@git+https://github.com/scality/arsenal#020387b3003838640eb9cd8fdeeac28bdde0db32":
|
||||
version "8.1.96"
|
||||
resolved "git+https://github.com/scality/arsenal#020387b3003838640eb9cd8fdeeac28bdde0db32"
|
||||
dependencies:
|
||||
"@azure/identity" "^3.1.1"
|
||||
"@azure/storage-blob" "^12.12.0"
|
||||
|
@ -4004,7 +4012,7 @@ ms@2.1.3, ms@^2.0.0, ms@^2.1.1:
|
|||
resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2"
|
||||
integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==
|
||||
|
||||
nan@^2.14.0, nan@^2.3.2:
|
||||
nan@^2.14.0, nan@^2.14.1, nan@^2.3.2:
|
||||
version "2.17.0"
|
||||
resolved "https://registry.yarnpkg.com/nan/-/nan-2.17.0.tgz#c0150a2368a182f033e9aa5195ec76ea41a199cb"
|
||||
integrity sha512-2ZTgtl0nJsO0KQCjEpxcIr5D+Yv90plTitZt9JBfQvVJDS5seMl3FOvsh3+9CoYWXf/1l5OaZzzF6nDm4cagaQ==
|
||||
|
@ -4107,6 +4115,14 @@ node-gyp@^8.0.0:
|
|||
tar "^6.1.2"
|
||||
which "^2.0.2"
|
||||
|
||||
node-memwatch-new@^0.0.3:
|
||||
version "0.0.3"
|
||||
resolved "https://registry.yarnpkg.com/node-memwatch-new/-/node-memwatch-new-0.0.3.tgz#ad7ae6053f84e88101165ebdcc2a948dc8b64fe3"
|
||||
integrity sha512-2k6ZK8994yYBOJgPQ/IiuhJuQylRKOuMsmWj1EpNJOjo3gdxIJ9fRrsn6MYl2/VX65A9BKApHOb+PHHqygHYYQ==
|
||||
dependencies:
|
||||
bindings "^1.5.0"
|
||||
nan "^2.14.1"
|
||||
|
||||
node-mocks-http@1.5.2:
|
||||
version "1.5.2"
|
||||
resolved "https://registry.yarnpkg.com/node-mocks-http/-/node-mocks-http-1.5.2.tgz#5378c6ecbc0e077219a8f0986f2c19475b2ae3c3"
|
||||
|
|
Loading…
Reference in New Issue