Compare commits

...

38 Commits

Author SHA1 Message Date
Lauren Spiegel 26054af9b0 SQUASH - GGIM's whichcraft 2017-10-31 16:57:14 -07:00
Lauren Spiegel 1e00fefa98 SQUASH -- remove console.log in tool 2017-10-31 16:57:14 -07:00
Cloud User f257c70a8d add java option for sessionconfig 2017-10-31 16:57:14 -07:00
Lauren Spiegel 486b899f67 Validate search params 2017-10-31 16:57:14 -07:00
Cloud User 38e780ce96 reduce cores 2017-10-31 16:57:14 -07:00
Cloud User d21112cf0e add alluxio config for livy 2017-10-31 16:57:14 -07:00
Lauren Spiegel 941acf8246 Update spark settings (based on Alex's changes on swarm 1) 2017-10-31 16:57:14 -07:00
Lauren Spiegel f9f1bca84f Validate sql where clause 2017-10-31 16:57:14 -07:00
Cloud User a5a9738993 configuration updates 2017-10-31 16:54:52 -07:00
Lauren Spiegel 2841ff31bc Expose spark metrics 2017-10-31 16:54:52 -07:00
Lauren Spiegel 0cf2d7a8d9 parse livy responses 2017-10-31 16:54:52 -07:00
Lauren Spiegel bcbd13bfa9 WIP -- session handling 2017-10-31 16:54:23 -07:00
Lauren Spiegel 71cae881fe CLEAN THIS UP -- send full livy response out as json string 2017-10-31 16:53:10 -07:00
Lauren Spiegel f26ff323b2 SQUASH -- remove unnecesary session config params 2017-10-31 16:53:10 -07:00
Lauren Spiegel f3c3bc221d DROP ME -- logs and add test search option 2017-10-31 16:53:10 -07:00
Lauren Spiegel c7d3c9b8cc SQUASH -- add missing parens 2017-10-31 16:53:10 -07:00
Lauren Spiegel 41c55ba940 SQUASH - update search 2017-10-31 16:53:10 -07:00
Lauren Spiegel 2d92e0752a SQUASH -- clean up search code concat 2017-10-31 16:53:10 -07:00
Lauren Spiegel 0b9ee1fb45 FIX: put session if any error on getting 2017-10-31 16:53:10 -07:00
Lauren Spiegel 9c5fdc87ff SQUASH -- remove http 2017-10-31 16:53:10 -07:00
Lauren Spiegel 1e0f1fda5c SQUASH -- http 2017-10-31 16:53:10 -07:00
Lauren Spiegel 9a9724c2f8 SQUASH -- force config 2017-10-31 16:53:10 -07:00
Lauren Spiegel a5167d07d8 SQUASH -- add session config options 2017-10-31 16:53:10 -07:00
Lauren Spiegel 51469633fb SQUASH -- fix query code and s3 endpoint 2017-10-31 16:53:10 -07:00
Lauren Spiegel 3a40cd996a SQUASH - update search code 2017-10-31 16:53:10 -07:00
Lauren Spiegel ab39d1c772 change livy endpoint to livy for env and put query args in quotes 2017-10-31 16:53:10 -07:00
Lauren Spiegel 46dd5d61d7 DROP ME: add lb as endpoint to not mess with test environment for now 2017-10-31 16:53:10 -07:00
Lauren Spiegel 18663d0173 deal with dead session 2017-10-31 16:53:10 -07:00
Lauren Spiegel eb93c5c5be SQUASH -- configure session 2017-10-31 16:53:10 -07:00
Lauren Spiegel 805c85eed5 SQUASH -- begin integration with scala code 2017-10-31 16:53:10 -07:00
Lauren Spiegel 975165ec96 FT: Add search feature to bucketGet 2017-10-31 16:53:10 -07:00
Lauren Spiegel 810a3cd2de FT: Create tool for search listing call 2017-10-31 16:50:36 -07:00
Lauren Spiegel 35f578f599 DROP ME: enable recordlog (zenko will do this itself so just temp for local testing) 2017-10-31 16:50:36 -07:00
Lauren Spiegel 273b14c7a9 Add s3-front-lb endpoint for zenko/clueso 2017-10-31 16:50:36 -07:00
Lauren Spiegel 5ff4db0497 fix whitelist 2017-10-31 16:50:36 -07:00
Lauren Spiegel bd45bd73f3 DROP ME: arsenal dependency 2017-10-31 16:50:36 -07:00
Lauren Spiegel 526c75e260 temporary way of allowing METADATA bucket access 2017-10-31 16:49:10 -07:00
Cloud User 77bde1f2c3 dockerfile to change s3 in docker stack 2017-10-31 16:49:10 -07:00
9 changed files with 595 additions and 9 deletions

97
bin/search_bucket.js Executable file
View File

@ -0,0 +1,97 @@
#!/bin/sh
// 2>/dev/null ; exec "$(which nodejs 2>/dev/null || which node)" "$0" "$@"
'use strict'; // eslint-disable-line strict
const { auth } = require('arsenal');
const commander = require('commander');
const http = require('http');
const https = require('https');
const logger = require('../lib/utilities/logger');
function _performSearch(host,
port,
bucketName,
query,
accessKey,
secretKey,
verbose, ssl) {
const escapedSearch = encodeURIComponent(query);
const options = {
host,
port,
method: 'GET',
path: `/${bucketName}/?search=${escapedSearch}`,
headers: {
'Content-Length': 0,
},
rejectUnauthorized: false,
};
const transport = ssl ? https : http;
const request = transport.request(options, response => {
if (verbose) {
logger.info('response status code', {
statusCode: response.statusCode,
});
logger.info('response headers', { headers: response.headers });
}
const body = [];
response.setEncoding('utf8');
response.on('data', chunk => body.push(chunk));
response.on('end', () => {
if (response.statusCode >= 200 && response.statusCode < 300) {
logger.info('Success');
process.stdout.write(body.join(''));
process.exit(0);
} else {
logger.error('request failed with HTTP Status ', {
statusCode: response.statusCode,
body: body.join(''),
});
process.exit(1);
}
});
});
auth.client.generateV4Headers(request, { search: query },
accessKey, secretKey, 's3');
if (verbose) {
logger.info('request headers', { headers: request._headers });
}
request.end();
}
/**
* This function is used as a binary to send a request to S3 to perform a
* search on the objects in a bucket
*
* @return {undefined}
*/
function searchBucket() {
// TODO: Include other bucket listing possible query params?
commander
.version('0.0.1')
.option('-a, --access-key <accessKey>', 'Access key id')
.option('-k, --secret-key <secretKey>', 'Secret access key')
.option('-b, --bucket <bucket>', 'Name of the bucket')
.option('-q, --query <query>', 'Search query')
.option('-h, --host <host>', 'Host of the server')
.option('-p, --port <port>', 'Port of the server')
.option('-s', '--ssl', 'Enable ssl')
.option('-v, --verbose')
.parse(process.argv);
const { host, port, accessKey, secretKey, bucket, query, verbose, ssl } =
commander;
if (!host || !port || !accessKey || !secretKey || !bucket || !query) {
logger.error('missing parameter');
commander.outputHelp();
process.exit(1);
}
_performSearch(host, port, bucket, query, accessKey, secretKey, verbose,
ssl);
}
searchBucket();

View File

@ -8,7 +8,9 @@
"cloudserver-front": "us-east-1",
"s3.docker.test": "us-east-1",
"127.0.0.2": "us-east-1",
"s3.amazonaws.com": "us-east-1"
"s3.amazonaws.com": "us-east-1",
"s3-front-lb": "us-east-1",
"lb": "us-east-1"
},
"websiteEndpoints": ["s3-website-us-east-1.amazonaws.com",
"s3-website.us-east-2.amazonaws.com",
@ -44,6 +46,11 @@
"healthChecks": {
"allowFrom": ["127.0.0.1/8", "::1"]
},
"livy": {
"host": "livy",
"port": 8998,
"transport": "http"
},
"metadataClient": {
"host": "localhost",
"port": 9990
@ -61,7 +68,8 @@
"port": 9991
},
"recordLog": {
"enabled": false,
"enabled": true,
"recordLogName": "s3-recordlog"
}
},
"whiteListedIps": ["127.0.0.1/8", "::1"]
}

8
docker-compose.yml Normal file
View File

@ -0,0 +1,8 @@
version: '3'
services:
s3-front:
image: 127.0.0.1:5000/s3server
build: .
ports:
- "8000:8000"

View File

@ -414,6 +414,24 @@ class Config extends EventEmitter {
}
}
if (config.livy) {
this.livy = {};
assert.strictEqual(typeof config.livy.host, 'string',
'bad config: livy host must be ' +
'a string');
this.livy.host = config.livy.host;
assert(Number.isInteger(config.livy.port)
&& config.livy.port > 0,
'bad config: livy port must be a positive ' +
'integer');
this.livy.port = config.livy.port;
assert(this.livy.transport !== 'http' &&
this.livy.transport !== 'https', 'bad config: livy ' +
'transport must be either "http" or "https"');
this.livy.transport = config.livy.transport;
}
if (config.dataClient) {
this.dataClient = {};
assert.strictEqual(typeof config.dataClient.host, 'string',
@ -649,6 +667,17 @@ class Config extends EventEmitter {
.concat(config.healthChecks.allowFrom);
}
this.whiteListedIps = [];
if (config.whiteListedIps) {
assert(Array.isArray(config.whiteListedIps), 'config: invalid ' +
'whiteListedIps. whiteListedIps must be array');
config.whiteListedIps.forEach(ip => {
assert.strictEqual(typeof ip, 'string', 'config: invalid ' +
'whiteListedIps. each item in whiteListedIps must be a string');
});
this.whiteListedIps = config.whiteListedIps;
}
if (config.certFilePaths) {
assert(typeof config.certFilePaths === 'object' &&
typeof config.certFilePaths.key === 'string' &&

View File

@ -0,0 +1,72 @@
const Parser = require('node-sql-parser').Parser;
const parser = new Parser();
const { errors } = require('arsenal');
const objModel = require('arsenal').models.ObjectMD;
function _validateTree(whereClause, possibleAttributes) {
let invalidAttribute;
function _searchTree(node) {
if (!invalidAttribute) {
// the node.table would contain userMd for instance
// and then the column would contain x-amz-meta-whatever
if (node.table) {
if (!possibleAttributes[node.table]) {
invalidAttribute = node.table;
}
}
// if there is no table, the column would contain the non-nested
// attribute (e.g., content-length)
if (!node.table && node.column) {
if (!possibleAttributes[node.column]) {
invalidAttribute = node.column;
}
}
// the attributes we care about are always on the left because
// the value being searched for is on the right
if (node.left) {
_searchTree(node.left);
}
if (node.right && node.right.left) {
_searchTree(node.right.left);
}
}
}
_searchTree(whereClause);
return invalidAttribute;
}
/**
* validateSearchParams - validate value of ?search= in request
* @param {string} searchParams - value of search params in request
* which should be jsu sql where clause
* For metadata: userMd.`x-amz-meta-color`=\"blue\"
* For tags: tags.`x-amz-meta-color`=\"blue\"
* For any other attribute: `content-length`=5
* @return {undefined | error} undefined if validates or arsenal error if not
*/
function validateSearchParams(searchParams) {
let ast;
try {
ast =
parser.parse(`SELECT * FROM t WHERE ${searchParams}`);
} catch (e) {
if (e) {
return errors.InvalidArgument
.customizeDescription('Invalid sql where clause ' +
'sent as search query');
}
}
const possibleAttributes = objModel.getAttributes();
// For search we aggregate the user metadata (x-amz-meta)
// under the userMd attribute so add to possibilities
possibleAttributes.userMd = true;
const invalidAttribute = _validateTree(ast.where, possibleAttributes);
if (invalidAttribute) {
return errors.InvalidArgument.customizeDescription('Search param ' +
`contains unknown attribute: ${invalidAttribute}`);
}
return undefined;
}
module.exports = validateSearchParams;

View File

@ -1,5 +1,5 @@
const querystring = require('querystring');
const { errors, versioning, s3middleware } = require('arsenal');
const { errors, versioning, s3middleware, LivyClient } = require('arsenal');
const constants = require('../../constants');
const services = require('../services');
@ -7,11 +7,99 @@ const { metadataValidateBucket } = require('../metadata/metadataUtils');
const collectCorsHeaders = require('../utilities/collectCorsHeaders');
const escapeForXml = s3middleware.escapeForXml;
const { pushMetric } = require('../utapi/utilities');
const validateSearchParams = require('../api/apiUtils/bucket/validateSearch');
const versionIdUtils = versioning.VersionID;
// Sample XML response for GET bucket objects:
/*
const config = require('../Config.js').config;
const werelogs = require('werelogs');
werelogs.configure({ level: config.log.logLevel,
dump: config.log.dumpLevel });
const log = new werelogs.Logger('LivyClient');
const useHttps = !!config.livy.transport.https;
const livyClient = new LivyClient(config.livy.host,
config.livy.port, log, useHttps);
const setUpSessionCode = 'import com.scality.clueso._\n' +
'import com.scality.clueso.query._\n' +
'val config = com.scality.clueso.SparkUtils.' +
'loadCluesoConfig("/apps/spark-modules/application.conf"); \n' +
'SparkUtils.confSparkSession(spark,config); \n' +
'val queryExecutor = MetadataQueryExecutor(spark, config); \n';
const sessionConfig = {
kind: 'spark',
driverMemory: '3g',
numExecutors: 3,
executorMemory: '8g',
jars: ['/apps/spark-modules/clueso-1.0-SNAPSHOT-all.jar'],
conf: { 'spark.hadoop.fs.s3a.impl':
'org.apache.hadoop.fs.s3a.S3AFileSystem',
'spark.hadoop.fs.s3a.connection.ssl.enabled': 'false',
// TODO: We need to figure out how to configure this directly in spark
// or could use the restEndpoints from the config
'spark.hadoop.fs.s3a.endpoint': 'lb',
'spark.driver.extraJavaOptions': '-XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/apps/spark-modules',
'spark.driver.extraClassPath': '/opt/alluxio/core/client/runtime/target/alluxio-core-client-runtime-1.5.0-jar-with-dependencies.jar',
'spark.executor.extraClassPath': '/opt/alluxio/core/client/runtime/target/alluxio-core-client-runtime-1.5.0-jar-with-dependencies.jar',
// TODO: For Zenko, we can send admin keys but for enterprise version,
// s3 will not have access to keys. So, this should be set
// in spark config directly on deployment.
'spark.hadoop.fs.s3a.access.key': 'accessKey1',
'spark.hadoop.fs.s3a.secret.key': 'verySecretKey1',
'spark.hadoop.fs.s3a.path.style.access': 'true',
'spark.cores.max': '6',
'spark.metrics.namespace': 'clueso_searcher',
'spark.driver.port': '38600',
'spark.metrics.conf': '/apps/spark-modules/metrics.properties',
},
};
// parse JSON safely without throwing an exception
function _safeJSONParse(s) {
try {
return JSON.parse(s);
} catch (e) {
return e;
}
}
/**
* @typedef {Object} availableSession
* @property {number} [sessionId] sessionId to use
* @property {boolean} [SlowDown] whether to return SlowDown error
*/
/**
* findAvailableSession - find an idle session
* @param {availableSession[]} sessions - array of session objects
* @return {object} availableSession
* @
*/
function findAvailableSession(sessions) {
const availableSession = {};
const idleSessions = [];
const activeSessions = [];
sessions.forEach(session => {
if(session.state === 'idle') {
idleSessions.push(session);
}
if(session.state === 'busy' || session.state === 'starting') {
activeSessions.push(session);
}
});
if(idleSessions.length > 0) {
const sessionIndex = Math.floor(Math.random() * idleSessions.length);
availableSession.sessionId = idleSessions[sessionIndex].id;
return availableSession;
}
if(activeSessions.length >= 4) {
availableSession.SlowDown = true;
return availableSession;
}
return availableSession;
}
/* Sample XML response for GET bucket objects:
<ListBucketResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
<Name>example-bucket</Name>
<Prefix></Prefix>
@ -203,6 +291,93 @@ function processMasterVersions(bucketName, listParams, list) {
return xml.join('');
}
function handleStatement(sessionId, codeToExecute, corsHeaders, bucketName,
listParams, log, callback) {
log.info('about to postStatement with codeToExecute', { codeToExecute });
return livyClient.postStatement(sessionId,
codeToExecute, (err, res) => {
console.log("code executed!!!", codeToExecute)
console.log("err from livy on postStatement!!", err)
if (err) {
log.info('error from livy posting ' +
'statement', { error: err.message });
return callback(errors.InternalError
.customizeDescription('Error ' +
'performing search'),
null, corsHeaders);
}
if (!res || !Number.isInteger(res.id)) {
log.error('posting statement did not result ' +
'in valid statement id', { resFromLivy: res });
return callback(errors.InternalError
.customizeDescription('Error ' +
'performing search'),
null, corsHeaders);
}
return livyClient.getStatement(sessionId, res.id, (err, res) => {
if (err) {
log.info('error from livy getting ' +
'statement', { error: err.message });
return callback(errors.InternalError
.customizeDescription('Error ' +
'performing search'),
null, corsHeaders);
}
if (!res || !res.data || !res.data['text/plain']
|| !res.status === 'ok') {
log.error('getting statement did not result ' +
'in valid result', { resFromLivy: res });
return callback(errors.InternalError
.customizeDescription('Error ' +
'performing search'),
null, corsHeaders);
}
const parsedRes = _safeJSONParse(res.data['text/plain']);
if (parsedRes instanceof Error) {
log.error('livy returned invalid json',
{ resFromLivy: res });
return callback(errors.InternalError
.customizeDescription('Error ' +
'performing search'),
null, corsHeaders);
}
// Not grouping searched keys by common prefix so just
// set CommonPrefixes to an empty array
const list = { CommonPrefixes: [] };
list.Contents = parsedRes.map(entry => {
return {
key: entry.key,
value: {
LastModified: entry['last-modified'],
ETag: entry['content-md5'],
Size: entry['content-length'],
StorageClass: entry['x-amz-storage-class'],
Owner: {
ID: entry['owner-id'],
DisplayName: entry['owner-display-name'],
},
},
};
});
console.log("constructed list.Contents!!", list.Contents);
if (listParams.maxKeys < list.Contents.length) {
// If received one more key than the max, the
// last item is to send back a next marker
// so remove from contents and send as NextMarker
list.NextMarker = list.Contents.pop().key;
list.isTruncated = 'true';
}
// TODO: (1) handle versioning,
// (2) TEST nextMarkers -- nextMarker should be
// the last key returned if the number of keys is more than the
// max keys (since we request max plus 1)
// (3) TEST sending nextMarker and max keys
const xml = processMasterVersions(bucketName, listParams, list);
return callback(null, xml, corsHeaders);
});
});
}
/**
* bucketGet - Return list of objects in bucket
* @param {AuthInfo} authInfo - Instance of AuthInfo class with
@ -227,6 +402,12 @@ function bucketGet(authInfo, request, log, callback) {
if (Number.isNaN(requestMaxKeys) || requestMaxKeys < 0) {
return callback(errors.InvalidArgument);
}
if (params.search !== undefined) {
const validation = validateSearchParams(params.search);
if (validation instanceof Error) {
return callback(validation);
}
}
// AWS only returns 1000 keys even if max keys are greater.
// Max keys stated in response xml can be greater than actual
// keys returned.
@ -259,6 +440,75 @@ function bucketGet(authInfo, request, log, callback) {
listParams.versionIdMarker = params['version-id-marker'] ?
versionIdUtils.decode(params['version-id-marker']) : undefined;
}
if (params.search !== undefined) {
log.info('performaing search listing', { search: params.search });
// Add escape character to quotes since enclosing where clause
// in quotes when sending to livy
const whereClause = params.search.replace(/"/g, '\\"');
console.log("whereClause!!", whereClause);
// spark should return keys starting AFTER marker alphabetically
// spark should return up to maxKeys
const start = listParams.marker ? `Some("${listParams.marker}")` :
'None';
const searchCodeToExecute =
'queryExecutor.executeAndPrint(MetadataQuery' +
`("${bucketName}", "${whereClause}", ${start}, ` +
// Add one to the keys requested so we can use the last key
// as a next marker if needed
// Might just need the last one???!!!
`${listParams.maxKeys + 1}));\n`;
// List sessions to find available.
// If at least 4 active and busy/starting, return SlowDown error
// (don't want to create too many since holding dataframes
// in mem within a session)
// If idle sessions, use random available one
return livyClient.getSessions(null, null,
(err, res) => {
if (err || !res) {
log.info('err from livy listing sessions',
{ error: err });
return callback(errors.InternalError
.customizeDescription('Error contacting spark ' +
'for search'), null, corsHeaders);
}
const availableSession = findAvailableSession(res.sessions);
if (availableSession.SlowDown) {
return callback(errors.SlowDown, null, corsHeaders);
}
if (availableSession.sessionId === undefined) {
return livyClient.postSession(sessionConfig,
(err, res) => {
if (err) {
log.info('error from livy creating session',
{ error: err.message });
return callback(errors.InternalError
.customizeDescription('Error ' +
'performing search'),
null, corsHeaders);
}
if (!res || !Number.isInteger(res.id)) {
log.error('posting session did not ' +
'result in valid session id',
{ resFromLivy: res });
return callback(errors.InternalError
.customizeDescription('Error ' +
'performing search'),
null, corsHeaders);
}
const codeToExecute = `${setUpSessionCode} ` +
`${searchCodeToExecute};`;
return handleStatement(res.id, codeToExecute,
corsHeaders, bucketName, listParams, log,
callback);
});
}
// no need to create session
return handleStatement(availableSession.sessionId,
searchCodeToExecute, corsHeaders, bucketName,
listParams, log, callback);
});
}
return services.getObjectListing(bucketName, listParams, log,
(err, list) => {
if (err) {

View File

@ -14,6 +14,7 @@ const data = require('./data/wrapper');
const routes = arsenal.s3routes.routes;
const websiteEndpoints = _config.websiteEndpoints;
const whiteListedIps = _config.whiteListedIps;
let allEndpoints;
function updateAllEndpoints() {
@ -77,6 +78,7 @@ class S3Server {
websiteEndpoints,
blacklistedPrefixes,
dataRetrievalFn: data.get,
whiteListedIps,
};
routes(req, res, params, logger);
}

View File

@ -19,12 +19,14 @@
},
"homepage": "https://github.com/scality/S3#readme",
"dependencies": {
"aws-sdk": "2.28.0",
"arsenal": "scality/Arsenal",
"arsenal": "scality/Arsenal#ft/clueso",
"async": "~2.5.0",
"aws-sdk": "2.28.0",
"azure-storage": "^2.1.0",
"bucketclient": "scality/bucketclient",
"commander": "^2.9.0",
"ioredis": "2.4.0",
"node-sql-parser": "0.0.1",
"node-uuid": "^1.4.3",
"npm-run-all": "~4.0.2",
"sproxydclient": "scality/sproxydclient",

View File

@ -0,0 +1,118 @@
const assert = require('assert');
const { errors } = require('arsenal');
const validateSearch =
require('../../../lib/api/apiUtils/bucket/validateSearch');
describe('validate search where clause', () => {
const tests = [
{
it: 'should allow a valid simple search with table attribute',
searchParams: 'userMd.`x-amz-meta-dog`="labrador"',
result: undefined,
},
{
it: 'should allow a simple search with known ' +
'column attribute',
searchParams: '`content-length`="10"',
result: undefined,
},
{
it: 'should allow valid search with AND',
searchParams: 'userMd.`x-amz-meta-dog`="labrador" ' +
'AND userMd.`x-amz-meta-age`="5"',
result: undefined,
},
{
it: 'should allow valid search with OR',
searchParams: 'userMd.`x-amz-meta-dog`="labrador" ' +
'OR userMd.`x-amz-meta-age`="5"',
result: undefined,
},
{
it: 'should allow valid search with double AND',
searchParams: 'userMd.`x-amz-meta-dog`="labrador" ' +
'AND userMd.`x-amz-meta-age`="5" ' +
'AND userMd.`x-amz-meta-whatever`="ok"',
result: undefined,
},
{
it: 'should allow valid chained search with tables and columns',
searchParams: 'userMd.`x-amz-meta-dog`="labrador" ' +
'AND userMd.`x-amz-meta-age`="5" ' +
'AND `content-length`="10"' +
'OR isDeleteMarker="true"' +
'AND userMd.`x-amz-meta-whatever`="ok"',
result: undefined,
},
{
it: 'should allow valid LIKE search',
searchParams: 'userMd.`x-amz-meta-dog` LIKE "lab%" ' +
'AND userMd.`x-amz-meta-age` LIKE "5%" ' +
'AND `content-length`="10"',
result: undefined,
},
{
it: 'should disallow a LIKE search with invalid attribute',
searchParams: 'userNotMd.`x-amz-meta-dog` LIKE "labrador"',
result: errors.InvalidArgument.customizeDescription('Search ' +
'param contains unknown attribute: userNotMd'),
},
{
it: 'should disallow a simple search with unknown attribute',
searchParams: 'userNotMd.`x-amz-meta-dog`="labrador"',
result: errors.InvalidArgument.customizeDescription('Search ' +
'param contains unknown attribute: userNotMd'),
},
{
it: 'should disallow a compound search with unknown ' +
'attribute on right',
searchParams: 'userMd.`x-amz-meta-dog`="labrador" AND ' +
'userNotMd.`x-amz-meta-dog`="labrador"',
result: errors.InvalidArgument.customizeDescription('Search ' +
'param contains unknown attribute: userNotMd'),
},
{
it: 'should disallow a compound search with unknown ' +
'attribute on left',
searchParams: 'userNotMd.`x-amz-meta-dog`="labrador" AND ' +
'userMd.`x-amz-meta-dog`="labrador"',
result: errors.InvalidArgument.customizeDescription('Search ' +
'param contains unknown attribute: userNotMd'),
},
{
it: 'should disallow a chained search with one invalid ' +
'table attribute',
searchParams: 'userMd.`x-amz-meta-dog`="labrador" ' +
'AND userMd.`x-amz-meta-age`="5" ' +
'OR userNotMd.`x-amz-meta-whatever`="ok"',
result: errors.InvalidArgument.customizeDescription('Search ' +
'param contains unknown attribute: userNotMd'),
},
{
it: 'should disallow a simple search with unknown ' +
'column attribute',
searchParams: 'whatever="labrador"',
result: errors.InvalidArgument.customizeDescription('Search ' +
'param contains unknown attribute: whatever'),
},
{
it: 'should disallow a chained search with one invalid ' +
'column attribute',
searchParams: 'userMd.`x-amz-meta-dog`="labrador" ' +
'AND userMd.`x-amz-meta-age`="5" ' +
'OR madeUp="something"' +
'OR userMd.`x-amz-meta-whatever`="ok"',
result: errors.InvalidArgument.customizeDescription('Search ' +
'param contains unknown attribute: madeUp'),
},
];
tests.forEach(test => {
it(test.it, () => {
const actualResult =
validateSearch(test.searchParams);
assert.deepStrictEqual(actualResult, test.result);
});
});
});