Compare commits

...

4 Commits

Author SHA1 Message Date
philipyoo 13b5e64e6b [squash] update circle.yml node version 2018-03-20 12:14:34 -07:00
philipyoo f37d5b7906 [squash] update tests.bash 2018-03-20 11:48:22 -07:00
VinceMaestro 8191911965 B2Client function 'put' & functionnal test 'putB2' 2018-03-20 11:48:22 -07:00
VinceMaestro 527f82f3ea Setup config, B2Client class & elementary client library + DOC 2018-03-20 11:48:22 -07:00
19 changed files with 624 additions and 15 deletions

View File

@ -8,7 +8,7 @@ general:
machine:
node:
version: 6.9.5
version: 8.9.0
services:
- redis
- docker

View File

@ -106,7 +106,7 @@ const constants = {
objectLocationConstraintHeader: 'x-amz-meta-scal-location-constraint',
legacyLocations: ['sproxyd', 'legacy'],
/* eslint-disable camelcase */
externalBackends: { aws_s3: true, azure: true, gcp: true },
externalBackends: { aws_s3: true, azure: true, gcp: true, b2: true },
// some of the available data backends (if called directly rather
// than through the multiple backend gateway) need a key provided
// as a string as first parameter of the get/delete methods.
@ -115,7 +115,7 @@ const constants = {
// for external backends, don't call unless at least 1 minute
// (60,000 milliseconds) since last call
externalBackendHealthCheckInterval: 60000,
versioningNotImplBackends: { azure: true },
versioningNotImplBackends: { azure: true, b2: true },
mpuMDStoredExternallyBackend: { aws_s3: true },
/* eslint-enable camelcase */
mpuMDStoredOnS3Backend: { azure: true },

View File

@ -394,3 +394,113 @@ endpoint :code:`zenkotos3.com`:
},
(...)
Backblaze as a data backend
---------------------------
From the Backblaze b2 web page
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
From the Backblaze b2 web page, from your Storage Account dashboard, create a bucket where you will host your data for this new location constraint.
You will also need to get one of your b2 application key and to provide it to CloudServer. This can be found from B2 Cloud Storage Buckets dashboard: click on 'Show Account ID and Application Key', then select you account ID and click on 'Create Application Key'.
Important: When you create a new application key, the old one will no longer work.
In this example, our bucket will be named ``b2bucket``.
From the CloudServer repository
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
locationConfig.json
^^^^^^^^^^^^^^^^^^^
Edit `locationConfig.json` to add a new location constraint. This location constraint will contain the information for the Backblaze B2 bucket to which you will be writing your data whenever you create a CloudServer bucket in this location. There are a few `configurable options` here:
- :code:`type` : set to :code:`b2` to indicate this location constraint is writing data to Backblaze B2;
- :code:`legacyAwsBehavior` : always set to :code:`false`;
- :code:`b2StorageEndpoint` : always set to :code:`api.backblazeb2.com`;
- :code:`b2AccountId` : the B2 Account Id to which your bucket belongs;
- :code:`b2ApplicationKey` : the Application Keys associated to your Account.
- :code:`b2BucketName` : set to an existing bucket in your Backblaze B2 storage account. A non-existing bucket is automatically created;
.. code:: json
(...)
"b2-test": {
"type": "b2",
"legacyAwsBehavior": false,
"details": {
"b2StorageEndpoint": "api.backblazeb2.com",
"b2AccountId": "123456789",
"b2ApplicationKey": "9019bdaa70e062f442218ae806ec2a22dd7b2dc0",
"b2BucketName":"b2bucket"
}
},
(...)
You should now be able to start the server and start writing data to Backblaze through CloudServer.
Start the server with the ability to write to Backblaze
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Inside the repository, once all the files have been edited, you should be able
to start the server and start writing data to AWS S3 through CloudServer.
.. code:: shell
# Start the server locally
$> S3DATA=multiple npm start
Testing: put an object to Backblaze using CloudServer
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In order to start testing pushing to Backblaze, you will need to create a local bucket in the Backblaze region - this local bucket will only store the metadata locally, while both the data and any user metadata (x-amz-meta headers sent with a PUT object, and tags) will be stored on Backblaze.
.. code:: shell
# Create a local bucket storing data in Backblaze
$> s3cmd --host=127.0.0.1:8000 mb s3://b2bucket --region=b2-test
# Put an object to Backblaze, and store the metadata locally
$> s3cmd --host=127.0.0.1:8000 put /etc/hosts s3://b2bucket/testput
upload: '/etc/hosts' -> 's3://b2bucket/testput' [1 of 1]
330 of 330 100% in 0s 380.87 B/s done
# List locally to check you have the metadata
$> s3cmd --host=127.0.0.1:8000 ls s3://b2bucket
2017-10-24 14:38 330 s3://b2bucket/testput
Then, from your Backblaze account, if you go into your bucket, you should see your newly uploaded object:
.. WARNING::
You may experience some latency in the web interface after updating your data on Backblaze. This is due to Backblaze's caching system. It may take several minutes to appear. The refresh button (small arrow) next to 'Buckets/`b2bucket`' into the Browse File page, help displays updated infos faster.
Troubleshooting
~~~~~~~~~~~~~~~
Make sure your :code:`~/.s3cfg` file has credentials matching your local CloudServer credentials defined in :code:`conf/authdata.json`. By default, the access key is accessKey1 and the secret key is verySecretKey1. For more informations, refer to our template ~/.s3cfg .
Pre-existing objects in your Backblaze bucket can unfortunately not be accessed by CloudServer at this time.
Getting Started
Testing
Running functional tests locally:
For the AWS backend, Azure backend and Backblaze backend tests to pass locally, you must modify :code:`tests/locationConfigTests.json` so that :code:`"awsbackend"` specifies a bucketname of a bucket you have access to based on your credentials profile and modify :code:`“azurebackend”` with details for your Azure account.
For Backblaze, modify :code:`"b2backend"` :
.. code:: json
(...)
"b2backend": {
"type": "b2",
"legacyAwsBehavior": false,
"details": {
"b2StorageEndpoint": "api.backblazeb2.com",
"b2AccountId": "123456789",
"b2ApplicationKey": "9019bdaa70e062f442218ae806ec2a22dd7b2dc0",
"b2BucketName":"b2bucket"
}
},
(...)
.. WARNING::
The name must be "b2backend" for the tests to work properly !!

View File

@ -1007,6 +1007,38 @@ class Config extends EventEmitter {
/* eslint-enable camelcase */
};
}
getB2Endpoint(locationConstraint) {
let b2StorageEndpoint =
process.env[`${locationConstraint}_B2_STORAGE_ENDPOINT`] ||
this.locationConstraints[locationConstraint]
.details.b2StorageEndpoint;
if (b2StorageEndpoint.endsWith('/')) {
b2StorageEndpoint = b2StorageEndpoint.substr(0,
b2StorageEndpoint.length - 1);
}
return b2StorageEndpoint;
}
getB2StorageCredentials(locationConstraint) {
const { b2ApplicationKey } =
this.locationConstraints[locationConstraint].details;
const b2ApplicationKeyFromEnv =
process.env[`${locationConstraint}_B2_STORAGE_ACCESS_KEY`];
return {
accountId:
this.getB2AccountId(locationConstraint),
b2ApplicationKey: b2ApplicationKeyFromEnv || b2ApplicationKey,
};
}
getB2AccountId(locationConstraint) {
const { b2AccountId } =
this.locationConstraints[locationConstraint].details;
const b2AccountIdFromEnv =
process.env[`${locationConstraint}_B2_ACCOUNT_ID`];
return b2AccountIdFromEnv || b2AccountId;
}
}
module.exports = {

60
lib/data/external/B2Client.js vendored Normal file
View File

@ -0,0 +1,60 @@
// External imports
const { s3middleware } = require('arsenal');
const async = require('async');
const MD5Sum = s3middleware.MD5Sum;
// path = /lib/
const { config } = require('../../Config');
const { prepareStream } = require('../../api/apiUtils/object/prepareStream');
const { validateAndFilterMpuParts } =
require('../../api/apiUtils/object/processMpuParts');
// path = /lib/data/
const createLogger = require('../multipleBackendLogger');
// path = /lib/data/external/
const { logHelper } = require('./utils');
// path = /lib/data/external/b2_lib/
const get_upload_url = require('./b2_lib/b2_get_upload_url');
const upload_file = require('./b2_lib/b2_upload_file');
const SHA1Sum = require('./b2_lib/b2_sha1sum');
const set_auth_and_bucket_id_once = require('./b2_lib/b2_set_auth_and_bucket_id_once');
// Not implemented methods because of non existing equivalent on B2
// objectPutTagging, objectDeleteTagging, copyObject, uploadPartCopy
class B2Client {
constructor(config) {
this.b2StorageEndpoint = config.b2StorageEndpoint;
this.b2StorageCredentials = config.b2StorageCredentials;
this._b2BucketName = config.b2BucketName;
this._dataStoreName = config.dataStoreName;
this._bucketMatch = config.bucketMatch;
}
async put(stream, size, keyContext, reqUids, callback) {
const log = createLogger(reqUids);
let err = null;
let final_result = [];
try {
await set_auth_and_bucket_id_once(this);
let result = await get_upload_url(this.auth, this.bucketId);
let fileName = keyContext.objectKey;
let hashedStream = new SHA1Sum();
stream.pipe(hashedStream);
// When sending the SHA1 checksum at the end,
// size should size of the file plus the 40 bytes of hex checksum.
result = await upload_file(result, hashedStream, fileName, size + 40)
final_result = [fileName, result.fileId];
} catch (e) {
err = e;
logHelper(log, 'error', 'err from data backend',
err, this._dataStoreName);
} finally {
callback(err, final_result[0], final_result[1]);
}
}
}
module.exports = B2Client;

View File

@ -0,0 +1,55 @@
const https = require('https');
const stream = require('stream');
function async_https_request(options, data, returns_stream) {
let errHandler = {};
options['User-Agent'] = 'Zenko';
return new Promise((resolve, reject) => {
const req = https.request(options, (res) => {
if (returns_stream) {
resolve(res);
}
else {
res.setEncoding('utf8');
res.on('data', (chunk) => {
if (200 != res.statusCode) {
err = JSON.parse(chunk);
errHandler.code = err.status;
errHandler.description = err.code;
errHandler.customizeDescription = err.message;
reject(errHandler);
}
else
resolve(JSON.parse(chunk));
});
}
});
req.on('error', function(err) {
if (err.code && err.status && err.message) {
errHandler.code = err.status;
errHandler.description = err.code;
errHandler.customizeDescription = err.message;
}
else {
errHandler.code = 500;
errHandler.description = 'Internal Error.'
errHandler.customizeDescription = 'An unexpected error has occurred.';
}
reject(errHandler);
});
if (undefined !== data && null !== data)
{
if ('function' === typeof data.pipe)
data.pipe(req);
else
{
req.write(data);
req.end();
}
}
else
req.end();
});
}
module.exports = async_https_request;

View File

@ -0,0 +1,13 @@
var https = require('https');
const async_https_request = require('./async_https_request');
function authorize_account(accountId, applicationKey, host) {
const options = {
host: host,
path: '/b2api/v1/b2_authorize_account',
auth: accountId + ':' + applicationKey
};
return async_https_request(options, null, false);
};
module.exports = authorize_account;

View File

@ -0,0 +1,22 @@
const url = require('url');
const async_https_request = require('./async_https_request');
function get_upload_url(data, bucketId) {
const host = url.parse(data.apiUrl).hostname;
const postData = JSON.stringify({
'bucketId': bucketId,
});
const options = {
host: host,
path: '/b2api/v1/b2_get_upload_url',
method: 'POST',
headers: {
'Authorization': data.authorizationToken,
'Content-Type': 'application/x-www-form-urlencoded',
'Content-Length': postData.length
}
};
return async_https_request(options, postData, false);
};
module.exports = get_upload_url;

View File

@ -0,0 +1,21 @@
var https = require('https');
const async_https_request = require('./async_https_request');
function list_buckets(accountId, token, host, b2BucketName) {
const postData = JSON.stringify({
'accountId': accountId,
});
const options = {
host: host,
path: '/b2api/v1/b2_list_buckets',
method: 'POST',
headers: {
'Authorization': token,
'Content-Type': 'application/x-www-form-urlencoded',
'Content-Length': postData.length
}
};
return async_https_request(options, postData, false);
}
module.exports = list_buckets;

View File

@ -0,0 +1,26 @@
var url = require('url');
const authorize_account = require('./b2_authorize_account');
const list_buckets = require('./b2_list_buckets');
const async = require('async');
async function set_auth_and_bucket_id_once(that) {
if (that.auth == undefined) {
that.auth = await authorize_account(that.b2StorageCredentials.accountId,
that.b2StorageCredentials.b2ApplicationKey,
that.b2StorageEndpoint);
};
if (that.bucketId == undefined) {
that.bucketId = (
await list_buckets(
that.b2StorageCredentials.accountId,
that.auth.authorizationToken,
url.parse(
that.auth.apiUrl
).hostname
)).buckets.find(
bucket => bucket.bucketName == that._b2BucketName
).bucketId;
};
}
module.exports = set_auth_and_bucket_id_once;

47
lib/data/external/b2_lib/b2_sha1sum.js vendored Normal file
View File

@ -0,0 +1,47 @@
const Transform = require('stream').Transform;
const crypto = require('crypto');
/**
* This class is design to compute sha1 hash at the same time as sending
* data through a stream. The sha1 hash is then appended.
*/
class SHA1Sum extends Transform {
/**
* @constructor
*/
constructor() {
super({});
this.hash = crypto.createHash('sha1');
this.completedHash = undefined;
}
/**
* This function will update the current sha1 hash with the next chunk
*
* @param {Buffer|string} chunk - Chunk to compute
* @param {string} encoding - Data encoding
* @param {function} callback - Callback(err, chunk, encoding)
* @return {undefined}
*/
_transform(chunk, encoding, callback) {
this.hash.update(chunk, encoding);
callback(null, chunk, encoding);
}
/**
* This function will end the hash computation
*
* @param {function} callback(err)
* @return {undefined}
*/
_flush(callback) {
this.emit('hashed');
this.completedHash = this.hash.digest('hex');
this.push(this.completedHash);
callback(null);
}
}
module.exports = SHA1Sum;

View File

@ -0,0 +1,20 @@
const url = require('url');
const async_https_request = require('./async_https_request');
function upload_file(auth, stream, fileName, size) {
const options = {
host: url.parse(auth.uploadUrl).hostname,
path: url.parse(auth.uploadUrl).pathname,
method: 'POST',
headers: {
'Authorization': auth.authorizationToken,
'X-Bz-File-Name' : fileName,
'Content-Type': 'text/plain',
'Content-length': size,
'X-Bz-Content-Sha1': 'hex_digits_at_end'
}
};
return async_https_request(options, stream, false);
};
module.exports = upload_file;

View File

@ -92,6 +92,7 @@ const utils = {
config.getLocationConstraintType(locationConstraintDest);
return locationTypeMatch && (isSameBucket || bucketsNotEncrypted) &&
(sourceLocationConstraintType === 'aws_s3' ||
sourceLocationConstraintType === 'b2' ||
(sourceLocationConstraintType === 'azure' &&
config.isSameAzureAccount(locationConstraintSrc,
locationConstraintDest)));

View File

@ -8,6 +8,7 @@ const inMemory = require('./in_memory/backend').backend;
const AwsClient = require('./external/AwsClient');
const GcpClient = require('./external/GcpClient');
const AzureClient = require('./external/AzureClient');
const B2Client = require('./external/B2Client');
const { config } = require('../Config');
@ -119,6 +120,19 @@ function parseLC() {
});
clients[location].clientType = 'azure';
}
if (locationObj.type === 'b2') {
const b2StorageEndpoint = config.getB2Endpoint(location);
const b2StorageCredentials =
config.getB2StorageCredentials(location);
clients[location] = new B2Client({
b2StorageEndpoint,
b2StorageCredentials,
b2BucketName: locationObj.details.b2BucketName,
bucketMatch: locationObj.details.bucketMatch,
dataStoreName: location,
});
clients[location].clientType = 'b2';
}
});
return clients;
}

View File

@ -26,8 +26,11 @@
"azure-storage": "^2.1.0",
"bucketclient": "scality/bucketclient",
"commander": "^2.9.0",
"md5": "^2.2.1",
"node-uuid": "^1.4.3",
"npm-run-all": "~4.0.2",
"sha1": "^1.1.1",
"sha1-file": "^1.0.0",
"sproxydclient": "scality/sproxydclient",
"utapi": "scality/utapi",
"utf8": "~2.1.1",

View File

@ -2,6 +2,9 @@
set -x #echo on
set -e #exit at the first error
mkdir -p ~/.aws
touch ~/.aws/credentials
cat >>~/.aws/credentials <<EOF
[default]
aws_access_key_id = $AWS_ACCESS_KEY_ID_DEFAULT
@ -201,7 +204,6 @@ then
S3BACKEND=mem ENABLE_LOCAL_CACHE=true npm start > $CIRCLE_ARTIFACTS/server_mem_healthchecks.txt & bash wait_for_local_port.bash 8000 40 && ENABLE_LOCAL_CACHE=true npm run ft_healthchecks
killandsleep 8000
fi
exit 0

View File

@ -0,0 +1,147 @@
const assert = require('assert');
const BucketUtility = require('../../../lib/utility/bucket-util');
const withV4 = require('../../support/withV4');
const {
describeSkipIfNotMultiple,
getB2Keys,
b2Location,
} = require('../utils');
const keys = getB2Keys();
const b2Timeout = 10000;
describeSkipIfNotMultiple('Multiple backend PUT object to B2',
function testSuite() {
this.timeout(30000);
withV4(sigCfg => {
let bucketUtil;
let s3;
before(() => {
process.stdout.write('Creating bucket\n');
bucketUtil = new BucketUtility('default', sigCfg);
s3 = bucketUtil.s3;
return s3.createBucketAsync({ Bucket: b2Location })
.catch(err => {
process.stdout.write(`Error creating bucket: ${err}\n`);
throw err;
});
});
after(() => {
process.stdout.write('Emptying bucket\n');
return bucketUtil.empty(b2Location)
.then(() => {
process.stdout.write('Deleting bucket\n');
return bucketUtil.deleteOne(b2Location);
})
.catch(err => {
process.stdout.write('Error emptying/deleting bucket: ' +
`${err}\n`);
throw err;
});
});
keys.forEach(key => {
describe(`${key.describe} size`, () => {
const testKey = `${key.name}-${Date.now()}`;
it('should return no error when testing PUT with valid params', done => {
s3.putObject({
Bucket: b2Location,
Key: testKey,
Body: key.body,
Metadata: { 'scal-location-constraint': b2Location }
}, (err, res) => {
assert.equal(err, null, `Expected success but got error ${err}`);
done();
});
});
it('should return no error && same MD5 when testing GET with valid params', done => {
s3.getObject({ Bucket: b2Location, Key: testKey }, (err, res) => {
assert.equal(err, null, `Expected success but got error ${err}`);
assert.strictEqual(res.ETag, `"${key.MD5}"`, `Expected identical MD5 : got ${res.ETag} , expected: ${key.MD5}`);
done();
});
});
it('should return MissingRequiredParameter when testing PUT with no key', done => {
let tmpKey = null;
s3.putObject({
Bucket: b2Location,
Key: tmpKey,
Body: key.body,
Metadata: { 'scal-location-constraint': b2Location }
}, (err, res) => {
assert.notEqual(err, null, 'Expected error but got success, PUT with no key should always throw, please run test again');
assert.equal(err, 'MissingRequiredParameter: Missing required key \'Key\' in params', `Expected error MissingRequiredParameter but got error ${err}`);
done();
});
});
it('should return MissingRequiredParameter when testing PUT with no location', done => {
let tmpLoc = null;
s3.putObject({
Bucket: tmpLoc,
Key: testKey,
Body: key.body,
Metadata: { 'scal-location-constraint': tmpLoc }
}, (err, res) => {
assert.notEqual(err, null, 'Expected error but got success, PUT with empty location should always throw, please run test again');
assert.equal(err, 'MissingRequiredParameter: Missing required key \'Bucket\' in params', `Expected error MissingRequiredParameter but got error ${err}`);
done();
});
});
it('should return code 400 when testing PUT with non existing location', done => {
let tmpLoc = 'PleaseDontCreateALocationWithThisNameOrThisTestWillFail-' + Date.now();
s3.putObject({
Bucket: tmpLoc,
Key: testKey,
Body: key.body,
Metadata: { 'scal-location-constraint': tmpLoc }
}, (err, res) => {
assert.notEqual(err, null, 'Expected error but got success, this location seems to exist already, please run test again');
assert.equal(err.statusCode, 400, `Expected error 400 but got error ${err.statusCode}`);
done();
});
});
it('should return diff MD5 (if non empty size), when testing GET after PUT a corrupted empty body', done => {
let tmpBody = null
s3.putObject({
Bucket: b2Location,
Key: testKey,
Body: tmpBody,
Metadata: { 'scal-location-constraint': b2Location }
}, (err, res) => {
assert.equal(err, null, `Expected success but got error ${err}`);
s3.getObject({ Bucket: b2Location, Key: testKey }, (err, res) => {
assert.equal(err, null, `Expected success but got error ${err}`);
if (key.describe == 'empty') {
assert.equal(res.ETag, `"${key.MD5}"`, `Expected identicals MD5 but got : ${res.ETag} , expected : ${key.MD5}`);
}
else {
assert.notEqual(res.ETag, `"${key.MD5}"`, `Expected different MD5 but got identicals: ${res.ETag}`);
}
done();
});
});
});
it('should return diff MD5 when testing GET after PUT corrupted body', done => {
let tmpBody = 'PleaseDontCreateABodyWithThisContentOrThisTestWillFail-' + Date.now();
s3.putObject({
Bucket: b2Location,
Key: testKey,
Body: tmpBody,
Metadata: { 'scal-location-constraint': b2Location }
}, (err, res) => {
assert.equal(err, null, `Expected success but got error ${err}`);
s3.getObject({ Bucket: b2Location, Key: testKey }, (err, res) => {
assert.equal(err, null, `Expected success but got error ${err}`);
assert.notEqual(res.ETag, `"${key.MD5}"`, `Expected different MD5 but got identicals: ${res.ETag}`);
done();
});
});
});
})
})
})
})

View File

@ -16,6 +16,7 @@ const awsLocation = 'awsbackend';
const awsLocation2 = 'awsbackend2';
const awsLocationMismatch = 'awsbackendmismatch';
const awsLocationEncryption = 'awsbackendencryption';
const b2Location = 'b2backend';
const azureLocation = 'azurebackend';
const azureLocation2 = 'azurebackend2';
const azureLocationMismatch = 'azurebackendmismatch';
@ -55,6 +56,7 @@ const utils = {
awsLocation2,
awsLocationMismatch,
awsLocationEncryption,
b2Location,
azureLocation,
azureLocation2,
azureLocationMismatch,
@ -153,6 +155,30 @@ utils.getAzureKeys = () => {
return keys;
};
utils.getB2Keys = () => {
const keys = [
{
describe: 'empty',
name: `emptykey-${Date.now()}`,
body: '',
MD5: 'd41d8cd98f00b204e9800998ecf8427e',
},
{
describe: 'normal',
name: `somekey-${Date.now()}`,
body: Buffer.from('I am a body', 'utf8'),
MD5: 'be747eb4b75517bf6b3cf7c5fbb62f3a',
},
{
describe: 'big',
name: `bigkey-${Date.now()}`,
body: Buffer.alloc(10485760),
MD5: 'f1c9645dbc14efddc7d8a322685f26eb',
},
];
return keys;
};
// For contentMD5, Azure requires base64 but AWS requires hex, so convert
// from base64 to hex
utils.convertMD5 = contentMD5 =>

View File

@ -159,5 +159,15 @@
"servieKey": "fake001"
}
}
},
"b2backend": {
"type": "b2",
"legacyAwsBehavior": false,
"details": {
"b2StorageEndpoint": "api.backblazeb2.com",
"b2AccountId": "fakeccountid",
"b2ApplicationKey": "Fake00key123",
"b2BucketName":"zenkotester"
}
}
}