diff --git a/circle.yml b/circle.yml
index 384b5cb7d4..f97e8f3a91 100644
--- a/circle.yml
+++ b/circle.yml
@@ -41,7 +41,7 @@ dependencies:
- gem install mime-types -v 3.1
- gem install rspec -v 3.5
- gem install json
- - gem install digest
+ # - gem install digest
# java sdk dependencies
- sudo apt-get install -y -q default-jdk
diff --git a/dataserver.js b/dataserver.js
index e1c87ee09f..f6da50f1f9 100644
--- a/dataserver.js
+++ b/dataserver.js
@@ -6,7 +6,9 @@ const logger = require('./lib/utilities/logger');
if (config.backends.data === 'file' ||
(config.backends.data === 'multiple' &&
- config.backends.metadata !== 'scality')) {
+ config.backends.metadata !== 'scality') &&
+ (config.backends.auth !== 'scality' &&
+ config.backends.metadata !== 'mongodb')) {
const dataServer = new arsenal.network.rest.RESTServer(
{ bindAddress: config.dataDaemon.bindAddress,
port: config.dataDaemon.port,
diff --git a/docs/MD_SEARCH.md b/docs/MD_SEARCH.md
new file mode 100644
index 0000000000..6c01c52d7c
--- /dev/null
+++ b/docs/MD_SEARCH.md
@@ -0,0 +1,229 @@
+# Metadata Search Documenation
+
+## Description
+
+This feature enables metadata search to be performed on the metadata of objects
+stored in Zenko.
+
+## Requirements
+
++ MongoDB
+
+## Design
+
+The MD Search feature expands on the existing `GET Bucket` S3 API. It allows
+users to conduct metadata searches by adding the custom Zenko querystring
+parameter, `search`. The `search` parameter is of a pseudo
+SQL WHERE clause structure and supports basic SQL operators:
+ex. `"A=1 AND B=2 OR C=3"` (more complex queries can also be achieved with the
+use of nesting operators, `(` and `)`).
+
+The search process is as follows:
+
++ Zenko receives a `GET` request.
+
+ ```
+ # regular getBucket request
+ GET /bucketname HTTP/1.1
+ Host: 127.0.0.1:8000
+ Date: Wed, 18 Oct 2018 17:50:00 GMT
+ Authorization: authorization string
+
+ # getBucket versions request
+ GET /bucketname?versions HTTP/1.1
+ Host: 127.0.0.1:8000
+ Date: Wed, 18 Oct 2018 17:50:00 GMT
+ Authorization: authorization string
+
+ # search getBucket request
+ GET /bucketname?search=key%3Dsearch-item HTTP/1.1
+ Host: 127.0.0.1:8000
+ Date: Wed, 18 Oct 2018 17:50:00 GMT
+ Authorization: authorization string
+ ```
+
++ If the request does not contain the query param `search`, a normal bucket
+ listing is performed and a XML result containing the list of objects will be
+ returned as the response.
++ If the request does contain the query parameter `search`, the search string is
+ parsed and validated.
+
+ + If the search string is invalid, an `InvalidArgument` error will be
+ returned as response.
+ + If the search string is valid, it will be parsed and an abstract syntax
+ tree (AST) is generated.
+
++ The AST is then passed to the MongoDB backend to be used as the query filter
+ for retrieving objects in a bucket that satisfies the requested search
+ conditions.
++ The filtered results are then parsed and returned as the response.
+
+The results from MD search is of the same structure as the `GET Bucket`
+results:
+
+```xml
+
+
+ bucketname
+
+
+ 1000
+ false
+
+ objectKey
+ 2018-04-19T18:31:49.426Z
+ "d41d8cd98f00b204e9800998ecf8427e"
+ 0
+
+ 79a59df900b949e55d96a1e698fbacedfd6e09d98eacf8f8d5218e7cd47ef2be
+ Bart
+
+ STANDARD
+
+
+ ...
+
+
+```
+
+## Performing MD Search with Zenko
+
+To make a successful request to Zenko, you would need
+
++ Zenko Credentials
++ Sign request with Auth V4
+
+With requirements, you can peform metadata searches by:
+
++ using the `search_bucket` tool in the
+ [Scality/S3](https://github.com/scality/S3) GitHub repository.
++ creating an AuthV4 signed HTTP request to Zenko in the programming language of
+ choice
+
+### Using the S3 Tool
+
+After cloning the [Scality/S3](https://github.com/scality/S3) GitHub repository
+and installing the necessary dependencies, you can run the following command
+in the S3 project root directory to access the search tool.
+
+```
+node bin/search_bucket
+```
+
+This will generate the following output
+
+```
+Usage: search_bucket [options]
+
+Options:
+
+ -V, --version output the version number
+ -a, --access-key Access key id
+ -k, --secret-key Secret access key
+ -b, --bucket Name of the bucket
+ -q, --query Search query
+ -h, --host Host of the server
+ -p, --port Port of the server
+ -s --ssl
+ -v, --verbose
+ -h, --help output usage information
+```
+
+In the following examples, our Zenko Server is accessible on endpoint
+`http://127.0.0.1:8000` and contains the bucket `zenkobucket`.
+
+```
+# search for objects with metadata "blue"
+node bin/search_bucket -a accessKey1 -k verySecretKey1 -b zenkobucket \
+ -q "x-amz-meta-color=blue" -h 127.0.0.1 -p 8000
+
+# search for objects tagged with "type=color"
+node bin/search_bucket -a accessKey1 -k verySecretKey1 -b zenkobucket \
+ -q "tags.type=color" -h 127.0.0.1 -p 8000
+```
+
+### Coding Examples
+
+Search requests can be also performed by making HTTP requests authenticated
+with the `AWS Signature version 4` scheme.\
+See the following urls for more information about the V4 authentication scheme.
+
++ http://docs.aws.amazon.com/general/latest/gr/sigv4_signing.html
++ http://docs.aws.amazon.com/general/latest/gr/sigv4-signed-request-examples.html
+
+You can also view examples for making requests with Auth V4 in various
+languages [here](../exmaples).
+
+### Specifying Metadata Fields
+
+To search common metadata headers:
+
+```
+ {metadata-key}{supported SQL op}{search value}
+ # example
+ key = blueObject
+ size > 0
+ key LIKE "blue.*"
+```
+
+To search custom user metadata:
+
+```
+ # metadata must be prefixed with "x-amz-meta-"
+ x-amz-meta-{usermetadata-key}{supported SQL op}{search value}
+ # example
+ x-amz-meta-color = blue
+ x-amz-meta-color != red
+ x-amz-meta-color LIKE "b.*"
+```
+
+To search tags:
+
+```
+ # tag searches must be prefixed with "tags."
+ tags.{tag-key}{supported SQL op}{search value}
+ # example
+ tags.type = color
+```
+
+### Differences from SQL
+
+The MD search queries are similar to the `WHERE` clauses of SQL queries, but
+they differ in that:
+
++ MD search queries follow the `PCRE` format
++ Search queries do not require values with hyphens to be enclosed in
+ backticks, ``(`)``
+
+ ```
+ # SQL query
+ `x-amz-meta-search-item` = `ice-cream-cone`
+
+ # MD Search query
+ x-amz-meta-search-item = ice-cream-cone
+ ```
+
++ The search queries do not support all of the SQL operators.
+
+ + Supported SQL Operators: `=`, `<`, `>`, `<=`, `>=`, `!=`,
+ `AND`, `OR`, `LIKE`, `<>`
+ + Unsupported SQL Operators: `NOT`, `BETWEEN`, `IN`, `IS`, `+`,
+ `-`, `%`, `^`, `/`, `*`, `!`
+
+#### Using Regular Expressions in MD Search
+
++ Regular expressions used in MD search differs from SQL in that wildcards are
+ represented with `.*` instead of `%`.
++ Regex patterns must be wrapped in quotes as not doing so can lead to
+ misinterpretation of patterns.
++ Regex patterns can be written in form of the `/pattern/` syntax or
+ just the pattern if one does not require regex options, similar to `PCRE`.
+
+Example regular expressions:
+
+ ```
+ # search for strings containing word substring "helloworld"
+ ".*helloworld.*"
+ "/.*helloworld.*/"
+ "/.*helloworld.*/i"
+ ```
diff --git a/docs/mkdocs.yml b/docs/mkdocs.yml
index 74232a1a82..8c1c853fe1 100644
--- a/docs/mkdocs.yml
+++ b/docs/mkdocs.yml
@@ -1,3 +1,4 @@
+---
# http://www.mkdocs.org/user-guide/configuration/
# https://github.com/mkdocs/mkdocs/wiki/MkDocs-Themes
diff --git a/examples/go-md-search.go b/examples/go-md-search.go
new file mode 100644
index 0000000000..d61272cbe9
--- /dev/null
+++ b/examples/go-md-search.go
@@ -0,0 +1,46 @@
+package main
+
+import (
+ "fmt"
+ "time"
+ "bytes"
+ "net/http"
+ "net/url"
+ "io/ioutil"
+ "github.com/aws/aws-sdk-go/aws/credentials"
+ "github.com/aws/aws-sdk-go/aws/signer/v4"
+)
+
+func main() {
+ // Input AWS access key, secret key
+ aws_access_key_id := "accessKey1"
+ aws_secret_access_key := "verySecretKey1"
+ endpoint := "http://localhost:8000"
+ bucket_name := "bucketname"
+ searchQuery := url.QueryEscape("x-amz-meta-color=blue")
+ buf := bytes.NewBuffer([]byte{})
+
+ requestUrl := fmt.Sprintf("%s/%s?search=%s",
+ endpoint, bucket_name, searchQuery)
+
+ request, err := http.NewRequest("GET", requestUrl, buf)
+ if err != nil {
+ panic(err)
+ }
+ reader := bytes.NewReader(buf.Bytes())
+ credentials := credentials.NewStaticCredentials(aws_access_key_id,
+ aws_secret_access_key, "")
+ signer := v4.NewSigner(credentials)
+ signer.Sign(request, reader, "s3", "us-east-1", time.Now())
+ client := &http.Client{}
+ resp, err := client.Do(request)
+ if err != nil {
+ panic(err)
+ }
+ defer resp.Body.Close()
+ body, err := ioutil.ReadAll(resp.Body)
+ if err != nil {
+ panic(err)
+ }
+ fmt.Println(string(body))
+}
diff --git a/examples/node-md-search.js b/examples/node-md-search.js
new file mode 100644
index 0000000000..a3f29bb12b
--- /dev/null
+++ b/examples/node-md-search.js
@@ -0,0 +1,28 @@
+const { S3 } = require('aws-sdk');
+const config = {
+ sslEnabled: false,
+ endpoint: 'http://127.0.0.1:8000',
+ signatureCache: false,
+ signatureVersion: 'v4',
+ region: 'us-east-1',
+ s3ForcePathStyle: true,
+ accessKeyId: 'accessKey1',
+ secretAccessKey: 'verySecretKey1',
+};
+const s3Client = new S3(config);
+
+const encodedSearch =
+ encodeURIComponent('x-amz-meta-color="blue"');
+const req = s3Client.listObjects({ Bucket: 'bucketname' });
+
+// the build event
+req.on('build', () => {
+ req.httpRequest.path = `${req.httpRequest.path}?search=${encodedSearch}`;
+});
+req.on('success', res => {
+ process.stdout.write(`Result ${res.data}`);
+});
+req.on('error', err => {
+ process.stdout.write(`Error ${err}`);
+});
+req.send();
diff --git a/examples/python-md-search.py b/examples/python-md-search.py
new file mode 100644
index 0000000000..edbcf11b5f
--- /dev/null
+++ b/examples/python-md-search.py
@@ -0,0 +1,79 @@
+import datetime
+import hashlib
+import hmac
+import urllib
+# pip install requests
+import requests
+
+access_key = 'accessKey1'
+secret_key = 'verySecretKey1'
+
+method = 'GET'
+service = 's3'
+host = 'localhost:8000'
+region = 'us-east-1'
+canonical_uri = '/bucketname'
+query = 'x-amz-meta-color=blue'
+canonical_querystring = 'search=%s' % (urllib.quote(query))
+algorithm = 'AWS4-HMAC-SHA256'
+
+t = datetime.datetime.utcnow()
+amz_date = t.strftime('%Y%m%dT%H%M%SZ')
+date_stamp = t.strftime('%Y%m%d')
+
+# Key derivation functions. See:
+# http://docs.aws.amazon.com/general/latest/gr/signature-v4-examples.html#signature-v4-examples-python
+
+
+def sign(key, msg):
+ return hmac.new(key, msg.encode("utf-8"), hashlib.sha256).digest()
+
+
+def getSignatureKey(key, date_stamp, regionName, serviceName):
+ kDate = sign(('AWS4' + key).encode('utf-8'), date_stamp)
+ kRegion = sign(kDate, regionName)
+ kService = sign(kRegion, serviceName)
+ kSigning = sign(kService, 'aws4_request')
+ return kSigning
+
+
+payload_hash = hashlib.sha256('').hexdigest()
+
+canonical_headers = \
+ 'host:{0}\nx-amz-content-sha256:{1}\nx-amz-date:{2}\n' \
+ .format(host, payload_hash, amz_date)
+
+signed_headers = 'host;x-amz-content-sha256;x-amz-date'
+
+canonical_request = '{0}\n{1}\n{2}\n{3}\n{4}\n{5}' \
+ .format(method, canonical_uri, canonical_querystring, canonical_headers,
+ signed_headers, payload_hash)
+print canonical_request
+
+credential_scope = '{0}/{1}/{2}/aws4_request' \
+ .format(date_stamp, region, service)
+
+string_to_sign = '{0}\n{1}\n{2}\n{3}' \
+ .format(algorithm, amz_date, credential_scope,
+ hashlib.sha256(canonical_request).hexdigest())
+
+signing_key = getSignatureKey(secret_key, date_stamp, region, service)
+
+signature = hmac.new(signing_key, (string_to_sign).encode('utf-8'),
+ hashlib.sha256).hexdigest()
+
+authorization_header = \
+ '{0} Credential={1}/{2}, SignedHeaders={3}, Signature={4}' \
+ .format(algorithm, access_key, credential_scope, signed_headers, signature)
+
+# The 'host' header is added automatically by the Python 'requests' library.
+headers = {
+ 'X-Amz-Content-Sha256': payload_hash,
+ 'X-Amz-Date': amz_date,
+ 'Authorization': authorization_header
+}
+
+endpoint = 'http://' + host + canonical_uri + '?' + canonical_querystring
+
+r = requests.get(endpoint, headers=headers)
+print (r.text)
diff --git a/lib/Config.js b/lib/Config.js
index a56478c50a..a14c6f583b 100644
--- a/lib/Config.js
+++ b/lib/Config.js
@@ -78,12 +78,26 @@ function sproxydAssert(configSproxyd) {
function restEndpointsAssert(restEndpoints, locationConstraints) {
assert(typeof restEndpoints === 'object',
'bad config: restEndpoints must be an object of endpoints');
- assert(Object.keys(restEndpoints).every(
- r => typeof restEndpoints[r] === 'string'),
- 'bad config: each endpoint must be a string');
- assert(Object.keys(restEndpoints).every(
- r => typeof locationConstraints[restEndpoints[r]] === 'object'),
- 'bad config: rest endpoint target not in locationConstraints');
+ Object.keys(restEndpoints).forEach(r => {
+ const type = typeof restEndpoints[r];
+ assert(type === 'string' || type === 'object',
+ 'bad config: each endpoint must be a string or an object');
+ if (type === 'string') {
+ assert(typeof locationConstraints[restEndpoints[r]] === 'object',
+ 'bad config: rest endpoint target not in locationConstraints');
+ }
+ if (type === 'object') {
+ const { read, write } = restEndpoints[r];
+ assert(read && typeof read === 'string',
+ 'bad config: preferred read location must be a string');
+ assert(typeof locationConstraints[read] === 'object',
+ 'bad config: preferred read target not in locationConstraints');
+ assert(write && typeof write === 'string',
+ 'bad config: preferred write location must be a string');
+ assert(typeof locationConstraints[write] === 'object',
+ 'bad config: preferred write target not in locationConstraints');
+ }
+ });
}
function gcpLocationConstraintAssert(location, locationObj) {
@@ -438,7 +452,8 @@ class Config extends EventEmitter {
if (config.restEndpoints !== undefined) {
this.restEndpoints = {};
restEndpointsAssert(config.restEndpoints, this.locationConstraints);
- this.restEndpoints = config.restEndpoints;
+ this.restEndpoints =
+ this._normalizeRestEndpoints(config.restEndpoints);
}
if (!config.restEndpoints) {
@@ -640,6 +655,43 @@ class Config extends EventEmitter {
this.mongodb = {};
}
+ if (config.redis) {
+ if (config.redis.sentinels) {
+ this.redis = { sentinels: [], name: null };
+
+ assert(typeof config.redis.name === 'string',
+ 'bad config: redis sentinel name must be a string');
+ this.redis.name = config.redis.name;
+
+ assert(Array.isArray(config.redis.sentinels),
+ 'bad config: redis sentinels must be an array');
+ config.redis.sentinels.forEach(item => {
+ const { host, port } = item;
+ assert(typeof host === 'string',
+ 'bad config: redis sentinel host must be a string');
+ assert(typeof port === 'number',
+ 'bad config: redis sentinel port must be a number');
+ this.redis.sentinels.push({ host, port });
+ });
+ } else {
+ // check for standalone configuration
+ this.redis = {};
+ assert(typeof config.redis.host === 'string',
+ 'bad config: redis.host must be a string');
+ assert(typeof config.redis.port === 'number',
+ 'bad config: redis.port must be a number');
+ this.redis.host = config.redis.host;
+ this.redis.port = config.redis.port;
+ }
+ if (config.redis.password !== undefined) {
+ assert(
+ this._verifyRedisPassword(config.redis.password),
+ 'bad config: invalid password for redis. password must ' +
+ 'be a string');
+ this.redis.password = config.redis.password;
+ }
+ }
+
if (config.utapi) {
this.utapi = { component: 's3' };
if (config.utapi.port) {
@@ -659,42 +711,8 @@ class Config extends EventEmitter {
assert(config.localCache, 'missing required property of utapi ' +
'configuration: localCache');
this.utapi.localCache = config.localCache;
- assert(config.utapi.redis, 'missing required property of utapi ' +
+ assert(config.redis, 'missing required property of utapi ' +
'configuration: redis');
- if (config.utapi.redis.sentinels) {
- this.utapi.redis = { sentinels: [], name: null };
-
- assert(typeof config.utapi.redis.name === 'string',
- 'bad config: redis sentinel name must be a string');
- this.utapi.redis.name = config.utapi.redis.name;
-
- assert(Array.isArray(config.utapi.redis.sentinels),
- 'bad config: redis sentinels must be an array');
- config.utapi.redis.sentinels.forEach(item => {
- const { host, port } = item;
- assert(typeof host === 'string',
- 'bad config: redis sentinel host must be a string');
- assert(typeof port === 'number',
- 'bad config: redis sentinel port must be a number');
- this.utapi.redis.sentinels.push({ host, port });
- });
- } else {
- // check for standalone configuration
- this.utapi.redis = {};
- assert(typeof config.utapi.redis.host === 'string',
- 'bad config: redis.host must be a string');
- assert(typeof config.utapi.redis.port === 'number',
- 'bad config: redis.port must be a number');
- this.utapi.redis.host = config.utapi.redis.host;
- this.utapi.redis.port = config.utapi.redis.port;
- }
- if (config.utapi.redis.password !== undefined) {
- assert(
- this._verifyRedisPassword(config.utapi.redis.password),
- 'config: invalid password for utapi redis. password' +
- ' must be a string');
- this.utapi.redis.password = config.utapi.redis.password;
- }
if (config.utapi.metrics) {
this.utapi.metrics = config.utapi.metrics;
}
@@ -932,6 +950,21 @@ class Config extends EventEmitter {
return typeof password === 'string';
}
+ _normalizeRestEndpoints(restEndpoints) {
+ const retObj = {};
+ Object.keys(restEndpoints).forEach(r => {
+ if (typeof restEndpoints[r] === 'string') {
+ retObj[r] = {
+ read: restEndpoints[r],
+ write: restEndpoints[r],
+ };
+ } else {
+ retObj[r] = restEndpoints[r];
+ }
+ });
+ return retObj;
+ }
+
setAuthDataAccounts(accounts) {
this.authData.accounts = accounts;
this.emit('authdata-update');
@@ -960,7 +993,7 @@ class Config extends EventEmitter {
setRestEndpoints(restEndpoints) {
restEndpointsAssert(restEndpoints, this.locationConstraints);
- this.restEndpoints = restEndpoints;
+ this.restEndpoints = this._normalizeRestEndpoints(restEndpoints);
this.emit('rest-endpoints-update');
}
diff --git a/lib/api/apiUtils/bucket/bucketCreation.js b/lib/api/apiUtils/bucket/bucketCreation.js
index 2374ffe59d..b13aee36c3 100644
--- a/lib/api/apiUtils/bucket/bucketCreation.js
+++ b/lib/api/apiUtils/bucket/bucketCreation.js
@@ -166,12 +166,18 @@ function createBucket(authInfo, bucketName, headers,
const ownerDisplayName =
authInfo.getAccountDisplayName();
const creationDate = new Date().toJSON();
+ const writeLocationConstraint = typeof locationConstraint === 'object' ?
+ locationConstraint.write : locationConstraint;
+ const readLocationConstraint = typeof locationConstraint === 'object' ?
+ locationConstraint.read : null;
const bucket = new BucketInfo(bucketName,
canonicalID, ownerDisplayName, creationDate,
- BucketInfo.currentModelVersion());
+ BucketInfo.currentModelVersion(), null, null, null,
+ null, null, null, null, null, null, null, null,
+ readLocationConstraint);
- if (locationConstraint !== undefined) {
- bucket.setLocationConstraint(locationConstraint);
+ if (writeLocationConstraint) {
+ bucket.setLocationConstraint(writeLocationConstraint);
}
const parseAclParams = {
headers,
@@ -242,7 +248,7 @@ function createBucket(authInfo, bucketName, headers,
// error unless old AWS behavior (us-east-1)
// Existing locationConstraint must have legacyAwsBehavior === true
// New locationConstraint should have legacyAwsBehavior === true
- if (isLegacyAWSBehavior(locationConstraint) &&
+ if (isLegacyAWSBehavior(writeLocationConstraint) &&
isLegacyAWSBehavior(existingBucketMD.getLocationConstraint())) {
log.trace('returning 200 instead of 409 to mirror us-east-1');
return cb(null, existingBucketMD);
diff --git a/lib/api/apiUtils/bucket/checkPreferredLocations.js b/lib/api/apiUtils/bucket/checkPreferredLocations.js
new file mode 100644
index 0000000000..0bd093337d
--- /dev/null
+++ b/lib/api/apiUtils/bucket/checkPreferredLocations.js
@@ -0,0 +1,26 @@
+const { errors } = require('arsenal');
+
+function checkPreferredLocations(location, locationConstraints, log) {
+ const retError = loc => {
+ const errMsg = 'value of the location you are attempting to set - ' +
+ `${loc} - is not listed in the locationConstraint config`;
+ log.trace(`locationConstraint is invalid - ${errMsg}`,
+ { locationConstraint: loc });
+ return errors.InvalidLocationConstraint.customizeDescription(errMsg);
+ };
+ if (typeof location === 'string' && !locationConstraints[location]) {
+ return retError(location);
+ }
+ if (typeof location === 'object') {
+ const { read, write } = location;
+ if (!locationConstraints[read]) {
+ return retError(read);
+ }
+ if (!locationConstraints[write]) {
+ return retError(write);
+ }
+ }
+ return null;
+}
+
+module.exports = checkPreferredLocations;
diff --git a/lib/api/apiUtils/bucket/parseLikeExpression.js b/lib/api/apiUtils/bucket/parseLikeExpression.js
new file mode 100644
index 0000000000..0b85cfa6d8
--- /dev/null
+++ b/lib/api/apiUtils/bucket/parseLikeExpression.js
@@ -0,0 +1,19 @@
+/**
+ * parse LIKE expressions
+ * @param {string} regex - regex pattern
+ * @return {object} MongoDB search object
+ */
+function parseLikeExpression(regex) {
+ if (typeof regex !== 'string') {
+ return null;
+ }
+ const split = regex.split('/');
+ if (split.length < 3 || split[0] !== '') {
+ return { $regex: regex };
+ }
+ const pattern = split.slice(1, split.length - 1).join('/');
+ const regexOpt = split[split.length - 1];
+ return { $regex: new RegExp(pattern), $options: regexOpt };
+}
+
+module.exports = parseLikeExpression;
diff --git a/lib/api/apiUtils/bucket/parseWhere.js b/lib/api/apiUtils/bucket/parseWhere.js
index 858976eea3..d61a002946 100644
--- a/lib/api/apiUtils/bucket/parseWhere.js
+++ b/lib/api/apiUtils/bucket/parseWhere.js
@@ -1,3 +1,4 @@
+const parseLikeExpression = require('./parseLikeExpression');
/*
This code is based on code from https://github.com/olehch/sqltomongo
@@ -31,6 +32,7 @@
*/
const exprMapper = {
'=': '$eq',
+ '!=': '$ne',
'<>': '$ne',
'>': '$gt',
'<': '$lt',
@@ -52,7 +54,7 @@ function parseWhere(root) {
const e2 = parseWhere(root[operator][1]);
// eslint-disable-next-line
- return { '$and' : [
+ return { '$and' : [
e1,
e2,
] };
@@ -61,15 +63,21 @@ function parseWhere(root) {
const e2 = parseWhere(root[operator][1]);
// eslint-disable-next-line
- return { '$or' : [
+ return { '$or' : [
e1,
e2,
] };
}
const field = root[operator][0];
+ const value = root[operator][1];
const expr = exprMapper[operator];
const obj = {};
- obj[`value.${field}`] = { [expr]: root[operator][1] };
+
+ if (operator === 'LIKE') {
+ obj[`value.${field}`] = parseLikeExpression(value);
+ } else {
+ obj[`value.${field}`] = { [expr]: value };
+ }
return obj;
}
diff --git a/lib/api/apiUtils/bucket/validateReplicationConfig.js b/lib/api/apiUtils/bucket/validateReplicationConfig.js
new file mode 100644
index 0000000000..e348d8db3f
--- /dev/null
+++ b/lib/api/apiUtils/bucket/validateReplicationConfig.js
@@ -0,0 +1,25 @@
+/**
+ * Validates that the replication configuration will contain the default
+ * read location as a site if the read and write locations are different
+ * @param {object} config - replication configuration
+ * @param {object} bucket - bucket metadata
+ * @return {boolean} validity of replication configuration with rest endpoint
+ * configuration
+ */
+function validateConfiguration(config, bucket) {
+ const writeLocation = bucket.getLocationConstraint();
+ const readLocation = bucket.getReadLocationConstraint();
+ if (!config || !config.rules) {
+ return false;
+ }
+ const isValid = config.rules.every(rule => {
+ if (!rule.storageClass) {
+ return true;
+ }
+ const storageClasses = rule.storageClass.split(',');
+ return storageClasses.some(site => site === readLocation);
+ });
+ return (writeLocation === readLocation) || isValid;
+}
+
+module.exports = validateConfiguration;
diff --git a/lib/api/apiUtils/bucket/validateSearch.js b/lib/api/apiUtils/bucket/validateSearch.js
index fa942a11d0..c7285c5b5d 100644
--- a/lib/api/apiUtils/bucket/validateSearch.js
+++ b/lib/api/apiUtils/bucket/validateSearch.js
@@ -1,26 +1,49 @@
const Parser = require('sql-where-parser');
-const parser = new Parser();
const { errors } = require('arsenal');
const objModel = require('arsenal').models.ObjectMD;
+const BINARY_OP = 2;
+const sqlConfig = {
+ operators: [
+ {
+ '=': BINARY_OP,
+ '<': BINARY_OP,
+ '>': BINARY_OP,
+ '<>': BINARY_OP,
+ '<=': BINARY_OP,
+ '>=': BINARY_OP,
+ '!=': BINARY_OP,
+ },
+ { LIKE: BINARY_OP },
+ { AND: BINARY_OP },
+ { OR: BINARY_OP },
+ ],
+ tokenizer: {
+ shouldTokenize: ['(', ')', '=', '!=', '<', '>', '<=', '>=', '<>'],
+ shouldMatch: ['"', '\'', '`'],
+ shouldDelimitBy: [' ', '\n', '\r', '\t'],
+ },
+};
+const parser = new Parser(sqlConfig);
+
function _validateTree(whereClause, possibleAttributes) {
let invalidAttribute;
function _searchTree(node) {
- const operator = Object.keys(node)[0];
-
- if (operator === 'AND') {
- _searchTree(node[operator][0]);
- _searchTree(node[operator][1]);
- } else if (operator === 'OR') {
- _searchTree(node[operator][0]);
- _searchTree(node[operator][1]);
+ if (typeof node !== 'object') {
+ invalidAttribute = node;
} else {
- const field = node[operator][0];
-
- if (!possibleAttributes[field] &&
- !field.startsWith('x-amz-meta-')) {
- invalidAttribute = field;
+ const operator = Object.keys(node)[0];
+ if (operator === 'AND' || operator === 'OR') {
+ _searchTree(node[operator][0]);
+ _searchTree(node[operator][1]);
+ } else {
+ const field = node[operator][0];
+ if (!field.startsWith('tags.') &&
+ !possibleAttributes[field] &&
+ !field.startsWith('x-amz-meta-')) {
+ invalidAttribute = field;
+ }
}
}
}
@@ -32,8 +55,8 @@ function _validateTree(whereClause, possibleAttributes) {
* validateSearchParams - validate value of ?search= in request
* @param {string} searchParams - value of search params in request
* which should be jsu sql where clause
- * For metadata: userMd.`x-amz-meta-color`=\"blue\"
- * For tags: tags.`x-amz-meta-color`=\"blue\"
+ * For metadata: x-amz-meta-color=\"blue\"
+ * For tags: tags.x-amz-meta-color=\"blue\"
* For any other attribute: `content-length`=5
* @return {undefined | error} undefined if validates or arsenal error if not
*/
@@ -43,9 +66,11 @@ function validateSearchParams(searchParams) {
ast = parser.parse(searchParams);
} catch (e) {
if (e) {
- return errors.InvalidArgument
- .customizeDescription('Invalid sql where clause ' +
- 'sent as search query');
+ return {
+ error: errors.InvalidArgument
+ .customizeDescription('Invalid sql where clause ' +
+ 'sent as search query'),
+ };
}
}
const possibleAttributes = objModel.getAttributes();
diff --git a/lib/api/apiUtils/object/getReplicationBackendDataLocator.js b/lib/api/apiUtils/object/getReplicationBackendDataLocator.js
new file mode 100644
index 0000000000..986fe3012d
--- /dev/null
+++ b/lib/api/apiUtils/object/getReplicationBackendDataLocator.js
@@ -0,0 +1,45 @@
+const { errors } = require('arsenal');
+
+/**
+ * getReplicationBackendDataLocator - compares given location constraint to
+ * replication backends
+ * @param {object} locationObj - object containing location information
+ * @param {string} locationObj.location - name of location constraint
+ * @param {string} locationObj.key - keyname of object in location constraint
+ * @param {string} locationObj.locationType - type of location constraint
+ * @param {object} replicationInfo - information about object replication
+ * @param {array} replicationInfo.backends - array containing information about
+ * each replication location
+ * @param {string} replicationInfo.backends[].site - name of replication
+ * location
+ * @param {string} replicationInfo.backends[].status - status of replication
+ * @param {string} replicationInfo.backends[].dataStoreVersionId - version id
+ * of object at replication location
+ * @return {object} contains error if no replication backend matches or
+ * dataLocator object
+ */
+function getReplicationBackendDataLocator(locationObj, replicationInfo) {
+ const repBackendResult = {};
+ const locMatch = replicationInfo.backends.find(
+ backend => backend.site === locationObj.location);
+ if (!locMatch) {
+ repBackendResult.error = errors.InvalidLocationConstraint.
+ customizeDescription('Object is not replicated to location ' +
+ 'passed in location header');
+ return repBackendResult;
+ }
+ if (['PENDING', 'FAILED'].includes(locMatch.status)) {
+ repBackendResult.error = errors.NoSuchKey.customizeDescription(
+ `Object replication to specified backend is ${locMatch.status}`);
+ repBackendResult.status = locMatch.status;
+ return repBackendResult;
+ }
+ repBackendResult.dataLocator = [{
+ key: locationObj.key,
+ dataStoreName: locationObj.location,
+ dataStoreType: locationObj.locationType,
+ dataStoreVersionId: locMatch.dataStoreVersionId }];
+ return repBackendResult;
+}
+
+module.exports = getReplicationBackendDataLocator;
diff --git a/lib/api/apiUtils/object/locationHeaderCheck.js b/lib/api/apiUtils/object/locationHeaderCheck.js
new file mode 100644
index 0000000000..c5312d2a28
--- /dev/null
+++ b/lib/api/apiUtils/object/locationHeaderCheck.js
@@ -0,0 +1,37 @@
+const { errors } = require('arsenal');
+
+const { config } = require('../../../Config');
+
+/**
+ * locationHeaderCheck - compares 'x-amz-location-constraint' header
+ * to location constraints in config
+ * @param {object} headers - request headers
+ * @param {string} objectKey - key name of object
+ * @param {string} bucketName - name of bucket
+ * @return {undefined|Object} returns error, object, or undefined
+ * @return {string} return.location - name of location constraint
+ * @return {string} return.key - name of object at location constraint
+ * @return {string} - return.locationType - type of location constraint
+ */
+function locationHeaderCheck(headers, objectKey, bucketName) {
+ const location = headers['x-amz-location-constraint'];
+ if (location) {
+ const validLocation = config.locationConstraints[location];
+ if (!validLocation) {
+ return errors.InvalidLocationConstraint.customizeDescription(
+ 'Invalid location constraint specified in header');
+ }
+ const bucketMatch = validLocation.details.bucketMatch;
+ const backendKey = bucketMatch ? objectKey :
+ `${bucketName}/${objectKey}`;
+ return {
+ location,
+ key: backendKey,
+ locationType: validLocation.type,
+ };
+ }
+ // no location header was passed
+ return undefined;
+}
+
+module.exports = locationHeaderCheck;
diff --git a/lib/api/bucketGet.js b/lib/api/bucketGet.js
index d76d816cd8..9112691807 100644
--- a/lib/api/bucketGet.js
+++ b/lib/api/bucketGet.js
@@ -281,9 +281,28 @@ function bucketGet(authInfo, request, log, callback) {
listParams.versionIdMarker = params['version-id-marker'] ?
versionIdUtils.decode(params['version-id-marker']) : undefined;
}
+ if (!requestMaxKeys) {
+ const emptyList = {
+ CommonPrefixes: [],
+ Contents: [],
+ Versions: [],
+ IsTruncated: false,
+ };
+ return handleResult(listParams, requestMaxKeys, encoding, authInfo,
+ bucketName, emptyList, corsHeaders, log, callback);
+ }
if (params.search !== undefined) {
- log.info('performaing search listing', { search: params.search });
- listParams.mongifiedSearch = parseWhere(validatedAst);
+ log.info('performing search listing', { search: params.search });
+ try {
+ listParams.mongifiedSearch = parseWhere(validatedAst);
+ } catch (err) {
+ log.debug(err.message, {
+ stack: err.stack,
+ });
+ return callback(errors.InvalidArgument
+ .customizeDescription('Invalid sql where clause ' +
+ 'sent as search query'));
+ }
}
return services.getObjectListing(bucketName, listParams, log,
(err, list) => {
diff --git a/lib/api/bucketPut.js b/lib/api/bucketPut.js
index 94e44a63fb..0b4c416c68 100644
--- a/lib/api/bucketPut.js
+++ b/lib/api/bucketPut.js
@@ -4,6 +4,8 @@ const { auth, errors } = require('arsenal');
const vault = require('../auth/vault');
const { createBucket } = require('./apiUtils/bucket/bucketCreation');
+const checkPreferredLocations =
+ require('./apiUtils/bucket/checkPreferredLocations');
const collectCorsHeaders = require('../utilities/collectCorsHeaders');
const { config } = require('../Config');
const aclUtils = require('../utilities/aclUtils');
@@ -43,17 +45,9 @@ function checkLocationConstraint(request, locationConstraint, log) {
'setting us-east-1');
locationConstraintChecked = 'us-east-1';
}
-
- if (!locationConstraints[locationConstraintChecked]) {
- const errMsg = 'value of the location you are attempting to set - ' +
- `${locationConstraintChecked} - is not listed in the ` +
- 'locationConstraint config';
- log.trace(`locationConstraint is invalid - ${errMsg}`,
- { locationConstraint: locationConstraintChecked });
- return { error: errors.InvalidLocationConstraint.
- customizeDescription(errMsg) };
- }
- return { error: null, locationConstraint: locationConstraintChecked };
+ const err = checkPreferredLocations(
+ locationConstraintChecked, locationConstraints, log);
+ return { error: err, locationConstraint: locationConstraintChecked };
}
/*
@@ -131,6 +125,7 @@ function bucketPut(authInfo, request, log, callback) {
next => _parseXML(request, log, next),
// Check policies in Vault for a user.
(locationConstraint, next) => {
+ const location = locationConstraint.write || locationConstraint;
if (authInfo.isRequesterAnIAMUser()) {
const authParams = auth.server.extractParams(request, log, 's3',
request.query);
@@ -148,7 +143,7 @@ function bucketPut(authInfo, request, log, callback) {
sslEnabled: request.connection.encrypted,
apiMethod: 'bucketPut',
awsService: 's3',
- locationConstraint,
+ locationConstraint: location,
requesterInfo: authInfo,
signatureVersion: authParams.params.data.authType,
authType: authParams.params.data.signatureVersion,
@@ -162,7 +157,7 @@ function bucketPut(authInfo, request, log, callback) {
}
if (authorizationResults[0].isAllowed !== true) {
log.trace('authorization check failed for user',
- { locationConstraint });
+ { locationConstraint: location });
return next(errors.AccessDenied);
}
return next(null, locationConstraint);
diff --git a/lib/api/bucketPutLifecycle.js b/lib/api/bucketPutLifecycle.js
index 51e2a03c33..8d235cb2b2 100644
--- a/lib/api/bucketPutLifecycle.js
+++ b/lib/api/bucketPutLifecycle.js
@@ -46,9 +46,6 @@ function bucketPutLifecycle(authInfo, request, log, callback) {
}),
(bucket, lcConfig, next) => {
bucket.setLifecycleConfiguration(lcConfig);
- if (!bucket.getUid()) {
- bucket.generateUid();
- }
metadata.updateBucket(bucket.getName(), bucket, log, err =>
next(err, bucket));
},
diff --git a/lib/api/bucketPutReplication.js b/lib/api/bucketPutReplication.js
index 76879cca89..afec60b4ff 100644
--- a/lib/api/bucketPutReplication.js
+++ b/lib/api/bucketPutReplication.js
@@ -6,6 +6,8 @@ const { metadataValidateBucket } = require('../metadata/metadataUtils');
const { pushMetric } = require('../utapi/utilities');
const { getReplicationConfiguration } =
require('./apiUtils/bucket/getReplicationConfiguration');
+const validateConfiguration =
+ require('./apiUtils/bucket/validateReplicationConfig');
const collectCorsHeaders = require('../utilities/collectCorsHeaders');
// The error response when a bucket does not have versioning 'Enabled'.
@@ -47,8 +49,18 @@ function bucketPutReplication(authInfo, request, log, callback) {
}),
// Set the replication configuration and update the bucket metadata.
(config, bucket, next) => {
+ // validate default read location is a site in replication
+ // configuration
+ if (!validateConfiguration(config, bucket)) {
+ log.error('Replication configuration does not contain the ' +
+ 'read location as a site');
+ return next(errors.InvalidLocationConstraint
+ .customizeDescription(
+ 'Replication configuration does not contain the read' +
+ 'location as a site'));
+ }
bucket.setReplicationConfiguration(config);
- metadata.updateBucket(bucket.getName(), bucket, log, err =>
+ return metadata.updateBucket(bucket.getName(), bucket, log, err =>
next(err, bucket));
},
], (err, bucket) => {
diff --git a/lib/api/objectGet.js b/lib/api/objectGet.js
index 780af3e844..e9c214ea6b 100644
--- a/lib/api/objectGet.js
+++ b/lib/api/objectGet.js
@@ -9,10 +9,28 @@ const collectResponseHeaders = require('../utilities/collectResponseHeaders');
const { pushMetric } = require('../utapi/utilities');
const { getVersionIdResHeader } = require('./apiUtils/object/versioning');
const setPartRanges = require('./apiUtils/object/setPartRanges');
+const locationHeaderCheck =
+ require('./apiUtils/object/locationHeaderCheck');
+const getReplicationBackendDataLocator =
+ require('./apiUtils/object/getReplicationBackendDataLocator');
const { metadataValidateBucketAndObj } = require('../metadata/metadataUtils');
+const { config } = require('../Config');
+const { locationConstraints } = config;
const validateHeaders = s3middleware.validateConditionalHeaders;
+function _retrieveDefaultRead(locationName, objectKey, bucketName) {
+ const readLocation = locationConstraints[locationName];
+ const bucketMatch = readLocation.details.bucketMatch;
+ const backendKey = bucketMatch ? objectKey :
+ `${bucketName}/${objectKey}`;
+ return {
+ location: locationName,
+ key: backendKey,
+ locationType: readLocation.type,
+ };
+}
+
/**
* GET Object - Get an object
* @param {AuthInfo} authInfo - Instance of AuthInfo class with requester's info
@@ -27,6 +45,17 @@ function objectGet(authInfo, request, returnTagCount, log, callback) {
const bucketName = request.bucketName;
const objectKey = request.objectKey;
+ // returns name of location to get from and key if successful
+ const locCheckResult =
+ locationHeaderCheck(request.headers, objectKey, bucketName);
+ if (locCheckResult instanceof Error) {
+ log.trace('invalid location constraint to get from', {
+ location: request.headers['x-amz-location-constraint'],
+ error: locCheckResult,
+ });
+ return callback(locCheckResult);
+ }
+
const decodedVidResult = decodeVersionId(request.query);
if (decodedVidResult instanceof Error) {
log.trace('invalid versionId query', {
@@ -113,6 +142,29 @@ function objectGet(authInfo, request, returnTagCount, log, callback) {
// objMD.location is just a string
dataLocator = Array.isArray(objMD.location) ?
objMD.location : [{ key: objMD.location }];
+
+ const defReadLocation = bucket.getReadLocationConstraint();
+ const defWriteLocation = bucket.getLocationConstraint();
+ const defReadDataLocator = _retrieveDefaultRead(
+ defReadLocation, objectKey, bucketName);
+ let targetLocation = locCheckResult || null;
+ if (objMD.replicationInfo.backends.length > 0 &&
+ defReadLocation !== defWriteLocation) {
+ targetLocation = targetLocation || defReadDataLocator || null;
+ }
+
+ if (targetLocation) {
+ const repBackendResult = getReplicationBackendDataLocator(
+ targetLocation, objMD.replicationInfo);
+ if (repBackendResult.error) {
+ log.error('Error with location constraint header', {
+ error: repBackendResult.error,
+ status: repBackendResult.status,
+ });
+ return callback(repBackendResult.error, null, corsHeaders);
+ }
+ dataLocator = repBackendResult.dataLocator;
+ }
// if the data backend is azure, there will only ever be at
// most one item in the dataLocator array
if (dataLocator[0] && dataLocator[0].dataStoreType === 'azure') {
diff --git a/lib/data/external/AwsClient.js b/lib/data/external/AwsClient.js
index a6aa79f992..e91549ed81 100644
--- a/lib/data/external/AwsClient.js
+++ b/lib/data/external/AwsClient.js
@@ -219,14 +219,13 @@ class AwsClient {
metaHeadersTrimmed[headerKey] = metaHeaders[header];
}
});
- Object.assign(metaHeaders, metaHeadersTrimmed);
const awsBucket = this._awsBucketName;
const awsKey = this._createAwsKey(bucketName, key, this._bucketMatch);
const params = {
Bucket: awsBucket,
Key: awsKey,
WebsiteRedirectLocation: websiteRedirectHeader,
- Metadata: metaHeaders,
+ Metadata: metaHeadersTrimmed,
ContentType: contentType,
CacheControl: cacheControl,
ContentDisposition: contentDisposition,
diff --git a/lib/data/external/AzureClient.js b/lib/data/external/AzureClient.js
index 4de11c1211..830c2af670 100644
--- a/lib/data/external/AzureClient.js
+++ b/lib/data/external/AzureClient.js
@@ -27,12 +27,7 @@ class AzureClient {
if (!parsedUrl.port) {
parsedUrl.port = 80;
}
- const proxyParams = {
- protocol: parsedUrl.protocol,
- host: parsedUrl.hostname,
- proxyAuth: parsedUrl.auth,
- port: parsedUrl.port,
- };
+ const proxyParams = parsedUrl;
if (config.proxy.certs) {
Object.assign(proxyParams, config.proxy.certs);
}
diff --git a/lib/data/external/GCP/GcpApis/mpuHelper.js b/lib/data/external/GCP/GcpApis/mpuHelper.js
index c14e01a66d..fc9e5eea14 100644
--- a/lib/data/external/GCP/GcpApis/mpuHelper.js
+++ b/lib/data/external/GCP/GcpApis/mpuHelper.js
@@ -259,10 +259,11 @@ class MpuHelper {
err);
return next(err);
}
- return next(null, res.Metadata);
+ return next(null, res);
});
},
- (metadata, next) => {
+ (res, next) => {
+ const metadata = res.Metadata;
// copy the final object into the main bucket
const copyMetadata = Object.assign({}, metadata);
copyMetadata['scal-etag'] = aggregateETag;
@@ -272,6 +273,11 @@ class MpuHelper {
Metadata: copyMetadata,
MetadataDirective: 'REPLACE',
CopySource: `${params.MPU}/${copySource}`,
+ ContentType: res.ContentType,
+ CacheControl: res.CacheControl,
+ ContentEncoding: res.ContentEncoding,
+ ContentDisposition: res.ContentDisposition,
+ ContentLanguage: res.ContentLanguage,
};
logger.trace('copyParams', { copyParams });
this.retryCopy(copyParams, (err, res) => {
diff --git a/lib/management/configuration.js b/lib/management/configuration.js
index fd1459e6fc..f6b41ed23d 100644
--- a/lib/management/configuration.js
+++ b/lib/management/configuration.js
@@ -1,5 +1,5 @@
-const forge = require('node-forge');
const { URL } = require('url');
+const arsenal = require('arsenal');
const { buildAuthDataAccount } = require('../auth/in_memory/builder');
const _config = require('../Config').config;
@@ -11,6 +11,7 @@ const { getStoredCredentials } = require('./credentials');
const latestOverlayVersionKey = 'configuration/overlay-version';
const managementDatabaseName = 'PENSIEVE';
const replicatorEndpoint = 'zenko-cloudserver-replicator';
+const { decryptSecret } = arsenal.pensieve.credentialUtils;
function overlayHasVersion(overlay) {
return overlay && overlay.version !== undefined;
@@ -22,29 +23,18 @@ function remoteOverlayIsNewer(cachedOverlay, remoteOverlay) {
remoteOverlay.version > cachedOverlay.version));
}
-function decryptSecret(instanceCredentials, secret) {
- // XXX don't forget to use u.encryptionKeyVersion if present
- const privateKey = forge.pki.privateKeyFromPem(
- instanceCredentials.privateKey);
- const encryptedSecretKey = forge.util.decode64(secret);
- return privateKey.decrypt(encryptedSecretKey, 'RSA-OAEP', {
- md: forge.md.sha256.create(),
- });
-}
-
/**
* Updates the live {Config} object with the new overlay configuration.
*
* No-op if this version was already applied to the live {Config}.
*
- * @param {string} instanceId UUID of this deployment
* @param {object} newConf Overlay configuration to apply
* @param {werelogs~Logger} log Request-scoped logger
* @param {function} cb Function to call with (error, newConf)
*
* @returns {undefined}
*/
-function patchConfiguration(instanceId, newConf, log, cb) {
+function patchConfiguration(newConf, log, cb) {
if (newConf.version === undefined) {
log.debug('no remote configuration created yet');
return process.nextTick(cb, null, newConf);
@@ -56,12 +46,10 @@ function patchConfiguration(instanceId, newConf, log, cb) {
{ configurationVersion: newConf.version });
return process.nextTick(cb, null, newConf);
}
-
- return getStoredCredentials(instanceId, log, (err, creds) => {
+ return getStoredCredentials(log, (err, creds) => {
if (err) {
return cb(err);
}
-
const accounts = [];
if (newConf.users) {
newConf.users.forEach(u => {
@@ -293,4 +281,5 @@ module.exports = {
managementDatabaseName,
patchConfiguration,
saveConfigurationVersion,
+ remoteOverlayIsNewer,
};
diff --git a/lib/management/credentials.js b/lib/management/credentials.js
index 1e3cc718cf..b1305d0da4 100644
--- a/lib/management/credentials.js
+++ b/lib/management/credentials.js
@@ -13,14 +13,13 @@ const tokenRotationDelay = 3600 * 24 * 7 * 1000; // 7 days
*
* The token is used to authenticate stat posting and
*
- * @param {string} instanceId UUID of this deployment
* @param {werelogs~Logger} log Request-scoped logger to be able to trace
* initialization process
* @param {function} callback Function called with (error, result)
*
* @returns {undefined}
*/
-function getStoredCredentials(instanceId, log, callback) {
+function getStoredCredentials(log, callback) {
metadata.getObjectMD(managementDatabaseName, tokenConfigurationKey, {},
log, callback);
}
@@ -94,7 +93,7 @@ function confirmInstanceCredentials(
*/
function initManagementCredentials(
managementEndpoint, instanceId, log, callback) {
- getStoredCredentials(instanceId, log, (error, value) => {
+ getStoredCredentials(log, (error, value) => {
if (error) {
if (error.NoSuchKey) {
return issueCredentials(managementEndpoint, instanceId, log,
diff --git a/lib/management/index.js b/lib/management/index.js
index 20622a5f7c..de26d40648 100644
--- a/lib/management/index.js
+++ b/lib/management/index.js
@@ -84,7 +84,7 @@ function initManagement(log) {
if (err) {
return cb(err);
}
- return patchConfiguration(instanceId, conf, log,
+ return patchConfiguration(conf, log,
err => cb(err, instanceId, token));
}),
], (error, instanceId, token) => {
@@ -104,4 +104,5 @@ function initManagement(log) {
module.exports = {
initManagement,
+ initManagementDatabase,
};
diff --git a/lib/management/poll.js b/lib/management/poll.js
index 1c970c1b19..be8f2f5ce1 100644
--- a/lib/management/poll.js
+++ b/lib/management/poll.js
@@ -43,14 +43,14 @@ function applyConfigurationOverlay(
managementEndpoint, instanceId, remoteToken, log) {
async.waterfall([
wcb => loadCachedOverlay(log, wcb),
- (cachedOverlay, wcb) => patchConfiguration(instanceId, cachedOverlay,
+ (cachedOverlay, wcb) => patchConfiguration(cachedOverlay,
log, wcb),
(cachedOverlay, wcb) =>
loadRemoteOverlay(managementEndpoint, instanceId, remoteToken,
cachedOverlay, log, wcb),
(cachedOverlay, remoteOverlay, wcb) =>
saveConfigurationVersion(cachedOverlay, remoteOverlay, log, wcb),
- (remoteOverlay, wcb) => patchConfiguration(instanceId, remoteOverlay,
+ (remoteOverlay, wcb) => patchConfiguration(remoteOverlay,
log, wcb),
], error => {
if (error) {
diff --git a/lib/management/push.js b/lib/management/push.js
index 27fc8f2740..f9ebd7197d 100644
--- a/lib/management/push.js
+++ b/lib/management/push.js
@@ -113,7 +113,7 @@ function startWSManagementClient(pushEndpoint, instanceId, token) {
}
function applyAndSaveOverlay(overlay, log) {
- patchConfiguration(instanceId, overlay, log, err => {
+ patchConfiguration(overlay, log, err => {
if (err) {
log.error('could not apply pushed overlay', {
error: err,
diff --git a/lib/metadata/ModelVersion.md b/lib/metadata/ModelVersion.md
index 80d09f5118..190d65f695 100644
--- a/lib/metadata/ModelVersion.md
+++ b/lib/metadata/ModelVersion.md
@@ -79,16 +79,29 @@ this._lifecycleConfiguration = lifecycleConfiguration || null;
### Usage
-Used to store the bucket lifecycle configuration info
+Used to store the bucket lifecycle configuration info.
+Backwards compatible: add a uid to bucket if not exist. Otherwise, use existing
## Model version 7
### Properties Added
```javascript
-this._uid = uid || undefined;
+this._uid = uid || uuid();
```
### Usage
Used to set a unique identifier on a bucket
+
+## Model version 8
+
+### Properties Added
+
+```javascript
+this._readLocationConstraint = readLocationConstraint || null;
+```
+
+### Usage
+
+Used to store default read location of the bucket
diff --git a/lib/routes/routeBackbeat.js b/lib/routes/routeBackbeat.js
index fc248ab106..0ea52675dc 100644
--- a/lib/routes/routeBackbeat.js
+++ b/lib/routes/routeBackbeat.js
@@ -424,8 +424,8 @@ function initiateMultipartUpload(request, response, log, callback) {
const contentDisposition = request.headers['x-scal-content-disposition'];
const contentEncoding = request.headers['x-scal-content-encoding'];
const metaHeaders = {
- 'scal-replication-status': 'REPLICA',
- 'scal-version-id': sourceVersionId,
+ 'x-amz-meta-scal-replication-status': 'REPLICA',
+ 'x-amz-meta-scal-version-id': sourceVersionId,
};
if (userMetadata !== undefined) {
try {
diff --git a/lib/services.js b/lib/services.js
index 29a5943d48..8292b468f6 100644
--- a/lib/services.js
+++ b/lib/services.js
@@ -101,6 +101,7 @@ const services = {
const md = new ObjectMD();
// This should be object creator's canonical ID.
md.setOwnerId(authInfo.getCanonicalID())
+ .setKey(objectKey)
.setCacheControl(cacheControl)
.setContentDisposition(contentDisposition)
.setContentEncoding(contentEncoding)
diff --git a/lib/utapi/utapi.js b/lib/utapi/utapi.js
index 7615a9165b..3b0a700edd 100644
--- a/lib/utapi/utapi.js
+++ b/lib/utapi/utapi.js
@@ -3,7 +3,8 @@ const _config = require('../Config').config;
// start utapi server
if (_config.utapi) {
- const fullConfig = Object.assign({}, _config.utapi);
+ const fullConfig = Object.assign({}, _config.utapi,
+ { redis: _config.redis });
if (_config.vaultd) {
Object.assign(fullConfig, { vaultd: _config.vaultd });
}
diff --git a/lib/utapi/utapiReplay.js b/lib/utapi/utapiReplay.js
index 3e20055b8c..8d84d4bf67 100644
--- a/lib/utapi/utapiReplay.js
+++ b/lib/utapi/utapiReplay.js
@@ -1,6 +1,7 @@
const UtapiReplay = require('utapi').UtapiReplay;
const _config = require('../Config').config;
-// start utapi server
-const replay = new UtapiReplay(_config.utapi);
+const utapiConfig = _config.utapi &&
+ Object.assign({}, _config.utapi, { redis: _config.redis });
+const replay = new UtapiReplay(utapiConfig); // start utapi server
replay.start();
diff --git a/lib/utapi/utilities.js b/lib/utapi/utilities.js
index e7b56c9d1c..629e840a2f 100644
--- a/lib/utapi/utilities.js
+++ b/lib/utapi/utilities.js
@@ -5,8 +5,11 @@ const { auth } = require('arsenal');
const { UtapiClient } = require('utapi');
const logger = require('../utilities/logger');
const _config = require('../Config').config;
-// setup utapi client
-const utapi = new UtapiClient(_config.utapi);
+
+const utapiConfig = _config.utapi &&
+ Object.assign({}, _config.utapi, { redis: _config.redis });
+
+const utapi = new UtapiClient(utapiConfig); // setup utapi client
function _listMetrics(host,
port,
diff --git a/lib/utilities/reportHandler.js b/lib/utilities/reportHandler.js
index 8a3388b78d..2a9992ec80 100644
--- a/lib/utilities/reportHandler.js
+++ b/lib/utilities/reportHandler.js
@@ -1,9 +1,8 @@
const fs = require('fs');
const os = require('os');
-const { errors, ipCheck } = require('arsenal');
+const { errors, ipCheck, backbeat } = require('arsenal');
const async = require('async');
-const request = require('request');
const config = require('../Config').config;
const data = require('../data/wrapper');
@@ -76,11 +75,30 @@ function getSystemStats() {
function getCRRStats(log, cb) {
log.debug('getting CRR stats', { method: 'getCRRStats' });
- // TODO: Reuse metrics code from Backbeat by moving it to Arsenal instead of
- // making an HTTP request to the Backbeat metrics route.
- const { host, port } = config.backbeat;
- const params = { url: `http://${host}:${port}/_/metrics/crr/all` };
- return request.get(params, (err, res) => {
+ const { replicationEndpoints, localCache: redis } = config;
+ if (!redis) {
+ log.debug('redis connection not configured', { method: 'getCRRStats' });
+ return process.nextTick(() => cb(null, {}));
+ }
+ const sites = replicationEndpoints.map(endpoint => endpoint.site);
+ const backbeatMetrics = new backbeat.Metrics({
+ redisConfig: redis,
+ validSites: sites,
+ internalStart: Date.now() - 900000, // 15 minutes ago.
+ }, log);
+ const redisKeys = {
+ ops: 'bb:crr:ops',
+ bytes: 'bb:crr:bytes',
+ opsDone: 'bb:crr:opsdone',
+ bytesDone: 'bb:crr:bytesdone',
+ failedCRR: 'bb:crr:failed',
+ };
+ const routes = backbeat.routes(redisKeys, sites);
+ const details = routes.find(route =>
+ route.category === 'metrics' && route.type === 'all');
+ // Add `site` as we're not using Backbeat's request parser for the API's URI
+ details.site = 'all';
+ return backbeatMetrics.getAllMetrics(details, (err, res) => {
if (err) {
log.error('failed to get CRR stats', {
method: 'getCRRStats',
@@ -88,7 +106,7 @@ function getCRRStats(log, cb) {
});
return cb(null, {});
}
- const { completions, backlog, throughput } = res.body;
+ const { completions, backlog, throughput } = res;
if (!completions || !backlog || !throughput) {
log.error('could not get metrics from backbeat', {
method: 'getCRRStats',
@@ -98,19 +116,19 @@ function getCRRStats(log, cb) {
const stats = {
completions: {
count: completions.results.count,
- size: parseFloat(completions.results.size) * 1000,
+ size: completions.results.size,
},
backlog: {
count: backlog.results.count,
- size: parseFloat(backlog.results.size) * 1000,
+ size: backlog.results.size,
},
throughput: {
- count: parseFloat(throughput.results.count) * 1000,
- size: parseFloat(throughput.results.size) * 1000,
+ count: parseFloat(throughput.results.count),
+ size: parseFloat(throughput.results.size),
},
};
return cb(null, stats);
- }).json();
+ });
}
/**
diff --git a/package-lock.json b/package-lock.json
index 7b1927b0b4..a79328dd23 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -1,13 +1,13 @@
{
"name": "s3",
- "version": "7.4.0",
+ "version": "1.0.0-zenko",
"lockfileVersion": 1,
"requires": true,
"dependencies": {
"JSONStream": {
- "version": "1.3.2",
- "resolved": "https://registry.npmjs.org/JSONStream/-/JSONStream-1.3.2.tgz",
- "integrity": "sha1-wQI3G27Dp887hHygDCC7D85Mbeo=",
+ "version": "1.3.3",
+ "resolved": "https://registry.npmjs.org/JSONStream/-/JSONStream-1.3.3.tgz",
+ "integrity": "sha512-3Sp6WZZ/lXl+nTDoGpGWHEpTnnC6X5fnkolYZR6nwIfzbxxvA8utPWe1gCt7i0m9uVGsSz2IS8K8mJ7HmlduMg==",
"requires": {
"jsonparse": "1.3.1",
"through": "2.3.8"
@@ -135,9 +135,9 @@
"integrity": "sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw=="
},
"are-we-there-yet": {
- "version": "1.1.4",
- "resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-1.1.4.tgz",
- "integrity": "sha1-u13KOCu5TwXhUZQ3PRb9O6HKEQ0=",
+ "version": "1.1.5",
+ "resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-1.1.5.tgz",
+ "integrity": "sha512-5hYdAkZlcG8tOLujVDTgCT+uPX0VnpAH28gWsLfzpXYm7wP6mp5Q/gYyR7YQ0cKVJcXJnl3j2kpBan13PtQf6w==",
"requires": {
"delegates": "1.0.0",
"readable-stream": "2.3.6"
@@ -223,9 +223,9 @@
"dev": true
},
"arsenal": {
- "version": "github:scality/Arsenal#c8a714864519f9012571e0e4abdcbdd55ca72126",
+ "version": "github:scality/Arsenal#5bf7fef53ca0f409dbba1e19095a25bd3862b0f8",
"requires": {
- "JSONStream": "1.3.2",
+ "JSONStream": "1.3.3",
"ajv": "4.10.0",
"async": "2.1.5",
"bson": "2.0.4",
@@ -236,15 +236,15 @@
"ipaddr.js": "1.2.0",
"joi": "10.6.0",
"level": "1.6.0",
- "level-sublevel": "6.6.1",
- "mongodb": "3.0.7",
+ "level-sublevel": "6.6.2",
+ "mongodb": "3.0.8",
"node-forge": "0.7.5",
"simple-glob": "0.1.0",
"socket.io": "1.7.4",
"socket.io-client": "1.7.4",
"utf8": "2.1.2",
"uuid": "3.2.1",
- "werelogs": "github:scality/werelogs#224e312f8644e40da96aa4459ded84c5ac262376",
+ "werelogs": "github:scality/werelogs#0f97dc5b1d61cd38d618cf215be3c531c0e4e7d5",
"xml2js": "0.4.19"
},
"dependencies": {
@@ -257,11 +257,11 @@
}
},
"mongodb": {
- "version": "3.0.7",
- "resolved": "https://registry.npmjs.org/mongodb/-/mongodb-3.0.7.tgz",
- "integrity": "sha512-n/14kMJEoARXz1qhpNPhUocqy+z5130jhqgEIX1Tsl8UVpHrndQ8et+VmgC4yPK/I8Tcgc93JEMQCHTekBUnNA==",
+ "version": "3.0.8",
+ "resolved": "https://registry.npmjs.org/mongodb/-/mongodb-3.0.8.tgz",
+ "integrity": "sha512-mj7yIUyAr9xnO2ev8pcVJ9uX7gSum5LLs1qIFoWLxA5Il50+jcojKtaO1/TbexsScZ9Poz00Pc3b86GiSqJ7WA==",
"requires": {
- "mongodb-core": "3.0.7"
+ "mongodb-core": "3.0.8"
}
}
}
@@ -710,10 +710,10 @@
"integrity": "sha512-e/GPy6CE0xL7MOYYRMIEwPGKF21WNaQdPIpV0YvaQDoR7oc47KUZ8c2P/TlRJVQP8RZ4CEsArGBC1NbkCRvl1w=="
},
"bucketclient": {
- "version": "github:scality/bucketclient#4fa5d0ac79c9fdc193e216e615cd2a79355dfe93",
+ "version": "github:scality/bucketclient#dbaf8cf911f447640512d0ac86c6974d735708fe",
"requires": {
- "arsenal": "github:scality/Arsenal#c8a714864519f9012571e0e4abdcbdd55ca72126",
- "werelogs": "github:scality/werelogs#224e312f8644e40da96aa4459ded84c5ac262376"
+ "arsenal": "github:scality/Arsenal#5bf7fef53ca0f409dbba1e19095a25bd3862b0f8",
+ "werelogs": "github:scality/werelogs#0f97dc5b1d61cd38d618cf215be3c531c0e4e7d5"
}
},
"buffer": {
@@ -733,11 +733,30 @@
}
}
},
+ "buffer-alloc": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/buffer-alloc/-/buffer-alloc-1.1.0.tgz",
+ "integrity": "sha1-BVFNM78WVtNUDGhPZbEgLpDsowM=",
+ "requires": {
+ "buffer-alloc-unsafe": "0.1.1",
+ "buffer-fill": "0.1.1"
+ }
+ },
+ "buffer-alloc-unsafe": {
+ "version": "0.1.1",
+ "resolved": "https://registry.npmjs.org/buffer-alloc-unsafe/-/buffer-alloc-unsafe-0.1.1.tgz",
+ "integrity": "sha1-/+H2dVHdBVc33iUzN7/oU9+rGmo="
+ },
"buffer-equal-constant-time": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz",
"integrity": "sha1-+OcRMvf/5uAaXJaXpMbz5I1cyBk="
},
+ "buffer-fill": {
+ "version": "0.1.1",
+ "resolved": "https://registry.npmjs.org/buffer-fill/-/buffer-fill-0.1.1.tgz",
+ "integrity": "sha512-YgBMBzdRLEfgxJIGu2wrvI2E03tMCFU1p7d1KhB4BOoMN0VxmTFjSyN5JtKt9z8Z9JajMHruI6SE25W96wNv7Q=="
+ },
"buffer-from": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.0.0.tgz",
@@ -804,12 +823,12 @@
"integrity": "sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw="
},
"cdmiclient": {
- "version": "github:scality/cdmiclient#a38fe9db658caba82590582b711dab7e0572a028",
+ "version": "github:scality/cdmiclient#e86b46b2dfa52cd4d701bb0d8734300e47524c49",
"optional": true,
"requires": {
- "arsenal": "github:scality/Arsenal#c8a714864519f9012571e0e4abdcbdd55ca72126",
+ "arsenal": "github:scality/Arsenal#5bf7fef53ca0f409dbba1e19095a25bd3862b0f8",
"async": "1.4.2",
- "werelogs": "github:scality/werelogs#224e312f8644e40da96aa4459ded84c5ac262376"
+ "werelogs": "github:scality/werelogs#0f97dc5b1d61cd38d618cf215be3c531c0e4e7d5"
},
"dependencies": {
"async": {
@@ -1104,9 +1123,9 @@
}
},
"deep-extend": {
- "version": "0.4.2",
- "resolved": "https://registry.npmjs.org/deep-extend/-/deep-extend-0.4.2.tgz",
- "integrity": "sha1-SLaZwn4zS/ifEIkr5DL25MfTSn8="
+ "version": "0.5.1",
+ "resolved": "https://registry.npmjs.org/deep-extend/-/deep-extend-0.5.1.tgz",
+ "integrity": "sha512-N8vBdOa+DF7zkRrDCsaOXoCs/E2fJfx9B9MrKnnSiHNh4ws7eSys6YQE4KvT1cecKmOASYQBhbKjeuDD9lT81w=="
},
"deep-is": {
"version": "0.1.3",
@@ -1184,12 +1203,6 @@
"resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz",
"integrity": "sha1-hMbhWbgZBP3KWaDvRM2HDTElD5o="
},
- "depd": {
- "version": "1.1.2",
- "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz",
- "integrity": "sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak=",
- "dev": true
- },
"detect-indent": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/detect-indent/-/detect-indent-4.0.0.tgz",
@@ -1281,7 +1294,19 @@
"base64id": "1.0.0",
"cookie": "0.3.1",
"debug": "2.3.3",
- "engine.io-parser": "1.3.2"
+ "engine.io-parser": "1.3.2",
+ "ws": "1.1.5"
+ },
+ "dependencies": {
+ "ws": {
+ "version": "1.1.5",
+ "resolved": "https://registry.npmjs.org/ws/-/ws-1.1.5.tgz",
+ "integrity": "sha512-o3KqipXNUdS7wpQzBHSe180lBGO60SoK0yVo3CYJgb2MkobuWuBX6dhkYP5ORCLd55y+SaflMOV5fqAB53ux4w==",
+ "requires": {
+ "options": "0.0.6",
+ "ultron": "1.0.2"
+ }
+ }
}
},
"engine.io-client": {
@@ -1298,6 +1323,7 @@
"parsejson": "0.0.3",
"parseqs": "0.0.5",
"parseuri": "0.0.5",
+ "ws": "1.1.5",
"xmlhttprequest-ssl": "1.5.3",
"yeast": "0.1.2"
},
@@ -1306,6 +1332,15 @@
"version": "1.2.1",
"resolved": "https://registry.npmjs.org/component-emitter/-/component-emitter-1.2.1.tgz",
"integrity": "sha1-E3kY1teCg/ffemt8WmPhQOaUJeY="
+ },
+ "ws": {
+ "version": "1.1.5",
+ "resolved": "https://registry.npmjs.org/ws/-/ws-1.1.5.tgz",
+ "integrity": "sha512-o3KqipXNUdS7wpQzBHSe180lBGO60SoK0yVo3CYJgb2MkobuWuBX6dhkYP5ORCLd55y+SaflMOV5fqAB53ux4w==",
+ "requires": {
+ "options": "0.0.6",
+ "ultron": "1.0.2"
+ }
}
}
},
@@ -1543,7 +1578,7 @@
"dev": true
},
"eslint-config-scality": {
- "version": "github:scality/Guidelines#561ab2fef1da33378f1948d6e7a65d8ba74d0136",
+ "version": "github:scality/Guidelines#60403bed422b5a5dfbf8a2152ae5c24ba5db5ede",
"dev": true,
"requires": {
"commander": "1.3.2",
@@ -1629,9 +1664,9 @@
"dev": true
},
"expand-template": {
- "version": "1.1.0",
- "resolved": "https://registry.npmjs.org/expand-template/-/expand-template-1.1.0.tgz",
- "integrity": "sha512-kkjwkMqj0h4w/sb32ERCDxCQkREMCAgS39DscDnSwDsbxnwwM1BTZySdC3Bn1lhY7vL08n9GoO/fVTynjDgRyQ=="
+ "version": "1.1.1",
+ "resolved": "https://registry.npmjs.org/expand-template/-/expand-template-1.1.1.tgz",
+ "integrity": "sha512-cebqLtV8KOZfw0UI8TEFWxtczxxC1jvyUvx6H4fyp1K1FN7A4Q+uggVUlOsI1K8AGU0rwOGqP8nCapdrw8CYQg=="
},
"extend": {
"version": "1.2.1",
@@ -1778,17 +1813,16 @@
"mime-types": "2.1.18"
}
},
- "fresh": {
- "version": "0.5.2",
- "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz",
- "integrity": "sha1-PYyt2Q2XZWn6g1qx+OSyOhBWBac=",
- "dev": true
- },
"from": {
"version": "0.1.7",
"resolved": "https://registry.npmjs.org/from/-/from-0.1.7.tgz",
"integrity": "sha1-g8YK/Fi5xWmXAH7Rp2izqzA6RP4="
},
+ "fs-constants": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/fs-constants/-/fs-constants-1.0.0.tgz",
+ "integrity": "sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow=="
+ },
"fs.realpath": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz",
@@ -1812,7 +1846,7 @@
"signal-exit": "3.0.2",
"string-width": "1.0.2",
"strip-ansi": "3.0.1",
- "wide-align": "1.1.2"
+ "wide-align": "1.1.3"
}
},
"gcp-metadata": {
@@ -2795,15 +2829,16 @@
}
},
"level-sublevel": {
- "version": "6.6.1",
- "resolved": "https://registry.npmjs.org/level-sublevel/-/level-sublevel-6.6.1.tgz",
- "integrity": "sha1-+ad/dSGrcKj46S7VbyGjx4hqRIU=",
+ "version": "6.6.2",
+ "resolved": "https://registry.npmjs.org/level-sublevel/-/level-sublevel-6.6.2.tgz",
+ "integrity": "sha512-+hptqmFYPKFju9QG4F6scvx3ZXkhrSmmhYui+hPzRn/jiC3DJ6VNZRKsIhGMpeajVBWfRV7XiysUThrJ/7PgXQ==",
"requires": {
"bytewise": "1.1.0",
"levelup": "0.19.1",
"ltgt": "2.1.3",
+ "pull-defer": "0.2.2",
"pull-level": "2.0.4",
- "pull-stream": "3.6.7",
+ "pull-stream": "3.6.8",
"typewiselite": "1.0.0",
"xtend": "4.0.1"
},
@@ -2892,7 +2927,7 @@
"bindings": "1.2.1",
"fast-future": "1.0.2",
"nan": "2.5.1",
- "prebuild-install": "2.5.1"
+ "prebuild-install": "2.5.3"
},
"dependencies": {
"bindings": {
@@ -2963,6 +2998,12 @@
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.5.tgz",
"integrity": "sha512-svL3uiZf1RwhH+cWrfZn3A4+U58wbP0tGVTLQPbjplZxZ8ROD9VLuNgsRniTlLe7OlSqR79RUehXgpBW/s0IQw=="
},
+ "lodash.assign": {
+ "version": "4.2.0",
+ "resolved": "https://registry.npmjs.org/lodash.assign/-/lodash.assign-4.2.0.tgz",
+ "integrity": "sha1-DZnzzNem0mHRm9rrkkUAXShYCOc=",
+ "dev": true
+ },
"lodash.isstring": {
"version": "4.0.1",
"resolved": "https://registry.npmjs.org/lodash.isstring/-/lodash.isstring-4.0.1.tgz",
@@ -3068,18 +3109,6 @@
"integrity": "sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g=",
"dev": true
},
- "merge-descriptors": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz",
- "integrity": "sha1-sAqqVW3YtEVoFQ7J0blT8/kMu2E=",
- "dev": true
- },
- "methods": {
- "version": "1.1.2",
- "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz",
- "integrity": "sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4=",
- "dev": true
- },
"mime": {
"version": "2.2.2",
"resolved": "https://registry.npmjs.org/mime/-/mime-2.2.2.tgz",
@@ -3261,9 +3290,9 @@
}
},
"mongodb-core": {
- "version": "3.0.7",
- "resolved": "https://registry.npmjs.org/mongodb-core/-/mongodb-core-3.0.7.tgz",
- "integrity": "sha512-z6YufO7s40wLiv2ssFshqoLS4+Kf+huhHq6KZ7gDArsKNzXYjAwTMnhEIJ9GQ8fIfBGs5tBLNPfbIDoCKGPmOw==",
+ "version": "3.0.8",
+ "resolved": "https://registry.npmjs.org/mongodb-core/-/mongodb-core-3.0.8.tgz",
+ "integrity": "sha512-dFxfhH9N7ohuQnINyIl6dqEF8sYOE0WKuymrFf3L3cipJNrx+S8rAbNOTwa00/fuJCjBMJNFsaA+R2N16//UIw==",
"requires": {
"bson": "1.0.6",
"require_optional": "1.0.1"
@@ -3297,21 +3326,15 @@
"resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.1.tgz",
"integrity": "sha1-KzJxhOiZIQEXeyhWP7XnECrNDKk="
},
- "net": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/net/-/net-1.0.2.tgz",
- "integrity": "sha1-0XV+yaf7I3HYPPR1XOPifhCCk4g=",
- "dev": true
- },
"next-tick": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/next-tick/-/next-tick-1.0.0.tgz",
"integrity": "sha1-yobR/ogoFpsBICCOPchCS524NCw="
},
"node-abi": {
- "version": "2.3.0",
- "resolved": "https://registry.npmjs.org/node-abi/-/node-abi-2.3.0.tgz",
- "integrity": "sha512-zwm6vU3SsVgw3e9fu48JBaRBCJGIvAgysDsqtf5+vEexFE71bEOtaMWb5zr/zODZNzTPtQlqUUpC79k68Hspow==",
+ "version": "2.4.1",
+ "resolved": "https://registry.npmjs.org/node-abi/-/node-abi-2.4.1.tgz",
+ "integrity": "sha512-pUlswqpHQ7zGPI9lGjZ4XDNIEUDbHxsltfIRb7dTnYdhgHWHOcB0MLZKLoCz6UMcGzSPG5wGl1HODZVQAUsH6w==",
"requires": {
"semver": "5.4.1"
}
@@ -3322,20 +3345,13 @@
"integrity": "sha512-MmbQJ2MTESTjt3Gi/3yG1wGpIMhUfcIypUCGtTizFR9IiccFwxSpfp0vtIZlkFclEqERemxfnSdZEMR9VqqEFQ=="
},
"node-mocks-http": {
- "version": "1.5.8",
- "resolved": "https://registry.npmjs.org/node-mocks-http/-/node-mocks-http-1.5.8.tgz",
- "integrity": "sha512-M0hzy8zR8vPy5WQcfRnFHZ0Rzi+Ru3P7QLa4NZQAnxbFHyiwLgzseuvfnItCfa15Q2sWJNNxjNssjpJiQ+wocA==",
+ "version": "1.5.2",
+ "resolved": "https://registry.npmjs.org/node-mocks-http/-/node-mocks-http-1.5.2.tgz",
+ "integrity": "sha1-U3jG7LwOB3IZqPCYbywZR1sq48M=",
"dev": true,
"requires": {
- "accepts": "1.3.3",
- "depd": "1.1.2",
- "fresh": "0.5.2",
- "merge-descriptors": "1.0.1",
- "methods": "1.1.2",
+ "lodash.assign": "4.2.0",
"mime": "1.6.0",
- "net": "1.0.2",
- "parseurl": "1.3.2",
- "range-parser": "1.2.0",
"type-is": "1.6.16"
},
"dependencies": {
@@ -3415,7 +3431,7 @@
"resolved": "https://registry.npmjs.org/npmlog/-/npmlog-4.1.2.tgz",
"integrity": "sha512-2uUqazuKlTaSI/dC8AzicUck7+IrEaOnN/e0jd3Xtt1KcGpwx30v50mL7oPyr/h9bL3E4aZccVwpwP+5W9Vjkg==",
"requires": {
- "are-we-there-yet": "1.1.4",
+ "are-we-there-yet": "1.1.5",
"console-control-strings": "1.1.0",
"gauge": "2.7.4",
"set-blocking": "2.0.0"
@@ -3498,6 +3514,11 @@
"wordwrap": "1.0.0"
}
},
+ "options": {
+ "version": "0.0.6",
+ "resolved": "https://registry.npmjs.org/options/-/options-0.0.6.tgz",
+ "integrity": "sha1-7CLTEoBrtT5zF3Pnza788cZDEo8="
+ },
"os-homedir": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/os-homedir/-/os-homedir-1.0.2.tgz",
@@ -3535,12 +3556,6 @@
"better-assert": "1.0.2"
}
},
- "parseurl": {
- "version": "1.3.2",
- "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.2.tgz",
- "integrity": "sha1-/CidTtiZMRlGDBViUyYs3I3mW/M=",
- "dev": true
- },
"path-is-absolute": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz",
@@ -3614,23 +3629,23 @@
"dev": true
},
"prebuild-install": {
- "version": "2.5.1",
- "resolved": "https://registry.npmjs.org/prebuild-install/-/prebuild-install-2.5.1.tgz",
- "integrity": "sha512-3DX9L6pzwc1m1ksMkW3Ky2WLgPQUBiySOfXVl3WZyAeJSyJb4wtoH9OmeRGcubAWsMlLiL8BTHbwfm/jPQE9Ag==",
+ "version": "2.5.3",
+ "resolved": "https://registry.npmjs.org/prebuild-install/-/prebuild-install-2.5.3.tgz",
+ "integrity": "sha512-/rI36cN2g7vDQnKWN8Uzupi++KjyqS9iS+/fpwG4Ea8d0Pip0PQ5bshUNzVwt+/D2MRfhVAplYMMvWLqWrCF/g==",
"requires": {
"detect-libc": "1.0.3",
- "expand-template": "1.1.0",
+ "expand-template": "1.1.1",
"github-from-package": "0.0.0",
"minimist": "1.2.0",
"mkdirp": "0.5.1",
- "node-abi": "2.3.0",
+ "node-abi": "2.4.1",
"noop-logger": "0.1.1",
"npmlog": "4.1.2",
"os-homedir": "1.0.2",
"pump": "2.0.1",
- "rc": "1.2.6",
- "simple-get": "2.7.0",
- "tar-fs": "1.16.0",
+ "rc": "1.2.7",
+ "simple-get": "2.8.1",
+ "tar-fs": "1.16.2",
"tunnel-agent": "0.6.0",
"which-pm-runs": "1.0.0"
}
@@ -3683,6 +3698,11 @@
"resolved": "https://registry.npmjs.org/pull-cat/-/pull-cat-1.1.11.tgz",
"integrity": "sha1-tkLdElXaN2pwa220+pYvX9t0wxs="
},
+ "pull-defer": {
+ "version": "0.2.2",
+ "resolved": "https://registry.npmjs.org/pull-defer/-/pull-defer-0.2.2.tgz",
+ "integrity": "sha1-CIew/7MK8ypW2+z6csFnInHwexM="
+ },
"pull-level": {
"version": "2.0.4",
"resolved": "https://registry.npmjs.org/pull-level/-/pull-level-2.0.4.tgz",
@@ -3692,7 +3712,7 @@
"pull-cat": "1.1.11",
"pull-live": "1.0.1",
"pull-pushable": "2.2.0",
- "pull-stream": "3.6.7",
+ "pull-stream": "3.6.8",
"pull-window": "2.1.4",
"stream-to-pull-stream": "1.7.2"
}
@@ -3703,7 +3723,7 @@
"integrity": "sha1-pOzuAeMwFV6RJLu89HYfIbOPUfU=",
"requires": {
"pull-cat": "1.1.11",
- "pull-stream": "3.6.7"
+ "pull-stream": "3.6.8"
}
},
"pull-pushable": {
@@ -3712,9 +3732,9 @@
"integrity": "sha1-Xy867UethpGfAbEqLpnW8b13ZYE="
},
"pull-stream": {
- "version": "3.6.7",
- "resolved": "https://registry.npmjs.org/pull-stream/-/pull-stream-3.6.7.tgz",
- "integrity": "sha512-XdE2/o1I2lK7A+sbbA/HjYnd5Xk7wL5CwAKzqHIgcBsluDb0LiKHNTl1K0it3/RKPshQljLf4kl1aJ12YsCCGQ=="
+ "version": "3.6.8",
+ "resolved": "https://registry.npmjs.org/pull-stream/-/pull-stream-3.6.8.tgz",
+ "integrity": "sha512-wQUIptQBcM0rFsUhZoEpOT3vUn73DtTGVq3NQ86c4T7iMOSprDzeKWYq2ksXnbwiuExTKvt+8G9fzNLFQuiO+A=="
},
"pull-window": {
"version": "2.1.4",
@@ -3748,18 +3768,12 @@
"resolved": "https://registry.npmjs.org/querystring/-/querystring-0.2.0.tgz",
"integrity": "sha1-sgmEkgO7Jd+CDadW50cAWHhSFiA="
},
- "range-parser": {
- "version": "1.2.0",
- "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.0.tgz",
- "integrity": "sha1-9JvmtIeJTdxA3MlKMi9hEJLgDV4=",
- "dev": true
- },
"rc": {
- "version": "1.2.6",
- "resolved": "https://registry.npmjs.org/rc/-/rc-1.2.6.tgz",
- "integrity": "sha1-6xiYnG1PTxYsOZ953dKfODVWgJI=",
+ "version": "1.2.7",
+ "resolved": "https://registry.npmjs.org/rc/-/rc-1.2.7.tgz",
+ "integrity": "sha512-LdLD8xD4zzLsAT5xyushXDNscEjB7+2ulnl8+r1pnESlYtlJtVSoCMBGr30eDRJ3+2Gq89jK9P9e4tCEH1+ywA==",
"requires": {
- "deep-extend": "0.4.2",
+ "deep-extend": "0.5.1",
"ini": "1.3.5",
"minimist": "1.2.0",
"strip-json-comments": "2.0.1"
@@ -3977,7 +3991,7 @@
"dev": true
},
"s3blaster": {
- "version": "github:scality/s3blaster#6417f8379201c8d25558b25a26cea5c3ef1c5d78",
+ "version": "github:scality/s3blaster#34c585b5eedb08a87888e2573dce763ab015ce34",
"dev": true,
"requires": {
"async": "1.4.2",
@@ -4005,9 +4019,9 @@
"integrity": "sha512-kKvNJn6Mm93gAczWVJg7wH+wGYWNrDHdWvpUmHyEsgCtIwwo3bqPtV4tR5tuPaUhTOo/kvhVwd8XwwOllGYkbg=="
},
"safe-json-stringify": {
- "version": "1.1.0",
- "resolved": "https://registry.npmjs.org/safe-json-stringify/-/safe-json-stringify-1.1.0.tgz",
- "integrity": "sha512-EzBtUaFH9bHYPc69wqjp0efJI/DPNHdFbGE3uIMn4sVbO0zx8vZ8cG4WKxQfOpUOKsQyGBiT2mTqnCw+6nLswA=="
+ "version": "1.2.0",
+ "resolved": "https://registry.npmjs.org/safe-json-stringify/-/safe-json-stringify-1.2.0.tgz",
+ "integrity": "sha512-gH8eh2nZudPQO6TytOvbxnuhYBOvDBBLW52tz5q6X58lJcd/tkmqFR+5Z9adS8aJtURSXWThWy/xJtJwixErvg=="
},
"sax": {
"version": "1.2.4",
@@ -4070,9 +4084,9 @@
"integrity": "sha1-c0TLuLbib7J9ZrL8hvn21Zl1IcY="
},
"simple-get": {
- "version": "2.7.0",
- "resolved": "https://registry.npmjs.org/simple-get/-/simple-get-2.7.0.tgz",
- "integrity": "sha512-RkE9rGPHcxYZ/baYmgJtOSM63vH0Vyq+ma5TijBcLla41SWlh8t6XYIGMR/oeZcmr+/G8k+zrClkkVrtnQ0esg==",
+ "version": "2.8.1",
+ "resolved": "https://registry.npmjs.org/simple-get/-/simple-get-2.8.1.tgz",
+ "integrity": "sha512-lSSHRSw3mQNUGPAYRqo7xy9dhKmxFXIjLjp4KHpf99GEH2VH7C3AM+Qfx6du6jhfUi6Vm7XnbEVEf7Wb6N8jRw==",
"requires": {
"decompress-response": "3.3.0",
"once": "1.4.0",
@@ -4240,9 +4254,9 @@
"dev": true
},
"sproxydclient": {
- "version": "github:scality/sproxydclient#3d62354076ef5c1ee38fa064ee582923fa416118",
+ "version": "github:scality/sproxydclient#741d1c13b5dd3aa09ad61e650158bc22c30fb32c",
"requires": {
- "werelogs": "github:scality/werelogs#224e312f8644e40da96aa4459ded84c5ac262376"
+ "werelogs": "github:scality/werelogs#0f97dc5b1d61cd38d618cf215be3c531c0e4e7d5"
}
},
"sql-where-parser": {
@@ -4303,7 +4317,7 @@
"integrity": "sha1-dXYJrhzr0zx0MtSvvjH/eGULnd4=",
"requires": {
"looper": "3.0.0",
- "pull-stream": "3.6.7"
+ "pull-stream": "3.6.8"
},
"dependencies": {
"looper": {
@@ -4420,14 +4434,14 @@
}
},
"tar-fs": {
- "version": "1.16.0",
- "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-1.16.0.tgz",
- "integrity": "sha512-I9rb6v7mjWLtOfCau9eH5L7sLJyU2BnxtEZRQ5Mt+eRKmf1F0ohXmT/Jc3fr52kDvjJ/HV5MH3soQfPL5bQ0Yg==",
+ "version": "1.16.2",
+ "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-1.16.2.tgz",
+ "integrity": "sha512-LdknWjPEiZC1nOBwhv0JBzfJBGPJar08dZg2rwZe0ZTLQoRGEzgrl7vF3qUEkCHpI/wN9e7RyCuDhMsJUCLPPQ==",
"requires": {
"chownr": "1.0.1",
"mkdirp": "0.5.1",
"pump": "1.0.3",
- "tar-stream": "1.5.5"
+ "tar-stream": "1.6.1"
},
"dependencies": {
"pump": {
@@ -4442,13 +4456,16 @@
}
},
"tar-stream": {
- "version": "1.5.5",
- "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-1.5.5.tgz",
- "integrity": "sha512-mQdgLPc/Vjfr3VWqWbfxW8yQNiJCbAZ+Gf6GDu1Cy0bdb33ofyiNGBtAY96jHFhDuivCwgW1H9DgTON+INiXgg==",
+ "version": "1.6.1",
+ "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-1.6.1.tgz",
+ "integrity": "sha512-IFLM5wp3QrJODQFPm6/to3LJZrONdBY/otxcvDIQzu217zKye6yVR3hhi9lAjrC2Z+m/j5oDxMPb1qcd8cIvpA==",
"requires": {
"bl": "1.2.2",
+ "buffer-alloc": "1.1.0",
"end-of-stream": "1.4.1",
+ "fs-constants": "1.0.0",
"readable-stream": "2.3.6",
+ "to-buffer": "1.1.1",
"xtend": "4.0.1"
},
"dependencies": {
@@ -4505,6 +4522,11 @@
"resolved": "https://registry.npmjs.org/to-array/-/to-array-0.1.4.tgz",
"integrity": "sha1-F+bBH3PdTz10zaek/zI46a2b+JA="
},
+ "to-buffer": {
+ "version": "1.1.1",
+ "resolved": "https://registry.npmjs.org/to-buffer/-/to-buffer-1.1.1.tgz",
+ "integrity": "sha512-lx9B5iv7msuFYE3dytT+KE5tap+rNYw+K4jVkb9R/asAb+pbBSM17jtunHplhBe6RRJdZx3Pn2Jph24O32mOVg=="
+ },
"to-fast-properties": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-1.0.3.tgz",
@@ -4639,6 +4661,11 @@
"dev": true,
"optional": true
},
+ "ultron": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/ultron/-/ultron-1.0.2.tgz",
+ "integrity": "sha1-rOEWq1V80Zc4ak6I9GhTeMiy5Po="
+ },
"underscore": {
"version": "1.8.3",
"resolved": "https://registry.npmjs.org/underscore/-/underscore-1.8.3.tgz",
@@ -4663,14 +4690,14 @@
}
},
"utapi": {
- "version": "github:scality/utapi#b27c57bcfc0536050d10e9799a76289eca7d0b09",
+ "version": "github:scality/utapi#2970d13b3fea0ab592afe477133308280b80e984",
"requires": {
- "arsenal": "github:scality/Arsenal#c8a714864519f9012571e0e4abdcbdd55ca72126",
+ "arsenal": "github:scality/Arsenal#5bf7fef53ca0f409dbba1e19095a25bd3862b0f8",
"async": "2.5.0",
"ioredis": "2.4.0",
"node-schedule": "1.2.0",
- "vaultclient": "github:scality/vaultclient#891b90720f14ee11494e0e808c682a5fe8160d90",
- "werelogs": "github:scality/werelogs#224e312f8644e40da96aa4459ded84c5ac262376"
+ "vaultclient": "github:scality/vaultclient#b00ae12b30075ba87982fcab81844c339927570b",
+ "werelogs": "github:scality/werelogs#0f97dc5b1d61cd38d618cf215be3c531c0e4e7d5"
}
},
"utf8": {
@@ -4703,11 +4730,11 @@
"integrity": "sha512-YV5KjzvRmSyJ1ee/Dm5UED0G+1L4GZnLN3w6/T+zZm8scVua4sOhYKWTUrKa0H/tMiJyO9QLHMPN+9mB/aMunA=="
},
"vaultclient": {
- "version": "github:scality/vaultclient#891b90720f14ee11494e0e808c682a5fe8160d90",
+ "version": "github:scality/vaultclient#b00ae12b30075ba87982fcab81844c339927570b",
"requires": {
- "arsenal": "github:scality/Arsenal#c8a714864519f9012571e0e4abdcbdd55ca72126",
+ "arsenal": "github:scality/Arsenal#5bf7fef53ca0f409dbba1e19095a25bd3862b0f8",
"commander": "2.9.0",
- "werelogs": "github:scality/werelogs#224e312f8644e40da96aa4459ded84c5ac262376",
+ "werelogs": "github:scality/werelogs#0f97dc5b1d61cd38d618cf215be3c531c0e4e7d5",
"xml2js": "0.4.17"
},
"dependencies": {
@@ -4749,9 +4776,9 @@
}
},
"werelogs": {
- "version": "github:scality/werelogs#224e312f8644e40da96aa4459ded84c5ac262376",
+ "version": "github:scality/werelogs#0f97dc5b1d61cd38d618cf215be3c531c0e4e7d5",
"requires": {
- "safe-json-stringify": "1.1.0"
+ "safe-json-stringify": "1.2.0"
}
},
"which": {
@@ -4768,9 +4795,9 @@
"integrity": "sha1-Zws6+8VS4LVd9rd4DKdGFfI60cs="
},
"wide-align": {
- "version": "1.1.2",
- "resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.2.tgz",
- "integrity": "sha512-ijDLlyQ7s6x1JgCLur53osjm/UXUYD9+0PbYKrBsYisYXzCxN+HC3mYDNy/dWdmf3AwqwU3CXwDCvsNgGK1S0w==",
+ "version": "1.1.3",
+ "resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.3.tgz",
+ "integrity": "sha512-QGkOQc8XL6Bt5PwnsExKBPuMKBxnGxWWW3fU55Xt4feHozMUhdUMaBCk290qpm/wG5u/RSKzwdAC4i51YigihA==",
"requires": {
"string-width": "1.0.2"
}
diff --git a/package.json b/package.json
index b4ef3e7407..5718192ea7 100644
--- a/package.json
+++ b/package.json
@@ -1,7 +1,7 @@
{
"name": "s3",
- "version": "7.4.0",
- "description": "S3 connector",
+ "version": "1.0.0-zenko",
+ "description": "Zenko CloudServer, an open-source Node.js implementation of a server handling the Amazon S3 protocol",
"main": "index.js",
"engines": {
"node": ">=6.9.5"
@@ -12,7 +12,7 @@
"cloud",
"server"
],
- "author": "Giorgio Regni",
+ "author": "Scality Inc.",
"license": "Apache-2.0",
"bugs": {
"url": "https://github.com/scality/S3/issues"
@@ -56,7 +56,7 @@
"lolex": "^1.4.0",
"mocha": "^2.3.4",
"mocha-junit-reporter": "1.11.1",
- "node-mocks-http": "^1.5.2",
+ "node-mocks-http": "1.5.2",
"s3blaster": "scality/s3blaster",
"tv4": "^1.2.7"
},
@@ -77,6 +77,7 @@
"ft_s3cmd": "cd tests/functional/s3cmd && mocha -t 40000 *.js",
"ft_s3curl": "cd tests/functional/s3curl && mocha -t 40000 *.js",
"ft_test": "npm-run-all -s ft_awssdk ft_s3cmd ft_s3curl ft_node ft_healthchecks ft_management",
+ "ft_search": "cd tests/functional/aws-node-sdk && mocha -t 90000 test/mdSearch",
"install_ft_deps": "npm install aws-sdk@2.28.0 bluebird@3.3.1 mocha@2.3.4 mocha-junit-reporter@1.11.1 tv4@1.2.7",
"lint": "eslint $(git ls-files '*.js')",
"lint_md": "mdlint $(git ls-files '*.md')",
diff --git a/tests.bash b/tests.bash
index a128ef3b1e..b197cc8844 100644
--- a/tests.bash
+++ b/tests.bash
@@ -134,25 +134,25 @@ then
killandsleep 8000
- # Run with mongdb backend ; run ft_tests
+ # Run with mongdb backend ; run ft_tests
- S3BACKEND=mem MPU_TESTING=yes S3METADATA=mongodb npm start > $CIRCLE_ARTIFACTS/server_mongodb_awssdk.txt & bash wait_for_local_port.bash 8000 40 && S3DATA=file npm run ft_awssdk
+ S3BACKEND=mem MPU_TESTING=yes S3METADATA=mongodb npm start > $CIRCLE_ARTIFACTS/server_mongodb_awssdk.txt & bash wait_for_local_port.bash 8000 40 && S3DATA=file S3METADATA=mongodb npm run ft_awssdk
killandsleep 8000
- S3BACKEND=mem MPU_TESTING=yes S3METADATA=mongodb npm start > $CIRCLE_ARTIFACTS/server_mongodb_s3cmd.txt & bash wait_for_local_port.bash 8000 40 && S3DATA=file npm run ft_s3cmd
+ S3BACKEND=mem MPU_TESTING=yes S3METADATA=mongodb npm start > $CIRCLE_ARTIFACTS/server_mongodb_s3cmd.txt & bash wait_for_local_port.bash 8000 40 && S3DATA=file S3METADATA=mongodb npm run ft_s3cmd
killandsleep 8000
-
- S3BACKEND=mem MPU_TESTING=yes S3METADATA=mongodb npm start > $CIRCLE_ARTIFACTS/server_mongodb_s3curl.txt & bash wait_for_local_port.bash 8000 40 && S3DATA=file npm run ft_s3curl
+
+ S3BACKEND=mem MPU_TESTING=yes S3METADATA=mongodb npm start > $CIRCLE_ARTIFACTS/server_mongodb_s3curl.txt & bash wait_for_local_port.bash 8000 40 && S3DATA=file S3METADATA=mongodb npm run ft_s3curl
killandsleep 8000
- S3BACKEND=mem MPU_TESTING=yes S3METADATA=mongodb npm start > $CIRCLE_ARTIFACTS/server_mongodb_healthchecks.txt & bash wait_for_local_port.bash 8000 40 && S3DATA=file npm run ft_healthchecks
+ S3BACKEND=mem MPU_TESTING=yes S3METADATA=mongodb npm start > $CIRCLE_ARTIFACTS/server_mongodb_healthchecks.txt & bash wait_for_local_port.bash 8000 40 && S3DATA=file S3METADATA=mongodb npm run ft_healthchecks
killandsleep 8000
- S3BACKEND=mem MPU_TESTING=yes S3METADATA=mongodb npm start > $CIRCLE_ARTIFACTS/server_mongodb_management.txt & bash wait_for_local_port.bash 8000 40 && npm run ft_management
+ S3BACKEND=mem MPU_TESTING=yes S3METADATA=mongodb npm start > $CIRCLE_ARTIFACTS/server_mongodb_management.txt & bash wait_for_local_port.bash 8000 40 && S3METADATA=mongodb npm run ft_management
killandsleep 8000
diff --git a/tests/functional/aws-node-sdk/test/bucket/getLocation.js b/tests/functional/aws-node-sdk/test/bucket/getLocation.js
index 2057b71f16..36627c4eaf 100644
--- a/tests/functional/aws-node-sdk/test/bucket/getLocation.js
+++ b/tests/functional/aws-node-sdk/test/bucket/getLocation.js
@@ -88,7 +88,7 @@ describeSkipAWS('GET bucket location ', () => {
assert.strictEqual(err, null, 'Error creating bucket: ' +
`${err}`);
const host = request.service.endpoint.hostname;
- let endpoint = config.restEndpoints[host];
+ let endpoint = config.restEndpoints[host].write;
// s3 actually returns '' for us-east-1
if (endpoint === 'us-east-1') {
endpoint = '';
diff --git a/tests/functional/aws-node-sdk/test/mdSearch/basicSearch.js b/tests/functional/aws-node-sdk/test/mdSearch/basicSearch.js
new file mode 100644
index 0000000000..6e894bd7f2
--- /dev/null
+++ b/tests/functional/aws-node-sdk/test/mdSearch/basicSearch.js
@@ -0,0 +1,141 @@
+const s3Client = require('./utils/s3SDK');
+const { runAndCheckSearch, runIfMongo } = require('./utils/helpers');
+
+const objectKey = 'findMe';
+const hiddenKey = 'leaveMeAlone';
+const objectTagData = 'item-type=main';
+const hiddenTagData = 'item-type=dessert';
+const userMetadata = { food: 'pizza' };
+const updatedUserMetadata = { food: 'cake' };
+
+runIfMongo('Basic search', () => {
+ const bucketName = `basicsearchmebucket${Date.now()}`;
+ before(done => {
+ s3Client.createBucket({ Bucket: bucketName }, err => {
+ if (err) {
+ return done(err);
+ }
+ return s3Client.putObject({ Bucket: bucketName, Key: objectKey,
+ Metadata: userMetadata, Tagging: objectTagData }, err => {
+ if (err) {
+ return done(err);
+ }
+ return s3Client.putObject({ Bucket: bucketName,
+ Key: hiddenKey, Tagging: hiddenTagData }, done);
+ });
+ });
+ });
+
+ after(done => {
+ s3Client.deleteObjects({ Bucket: bucketName, Delete: { Objects: [
+ { Key: objectKey },
+ { Key: hiddenKey }],
+ } },
+ err => {
+ if (err) {
+ return done(err);
+ }
+ return s3Client.deleteBucket({ Bucket: bucketName }, done);
+ });
+ });
+
+ it('should list object with searched for system metadata', done => {
+ const encodedSearch = encodeURIComponent(`key="${objectKey}"`);
+ return runAndCheckSearch(s3Client, bucketName,
+ encodedSearch, objectKey, done);
+ });
+
+ it('should list object with regex searched for system metadata', done => {
+ const encodedSearch = encodeURIComponent('key LIKE "find.*"');
+ return runAndCheckSearch(s3Client, bucketName,
+ encodedSearch, objectKey, done);
+ });
+
+ it('should list object with regex searched for system metadata with flags',
+ done => {
+ const encodedSearch = encodeURIComponent('key LIKE "/FIND.*/i"');
+ return runAndCheckSearch(s3Client, bucketName,
+ encodedSearch, objectKey, done);
+ });
+
+ it('should return empty when no object match regex', done => {
+ const encodedSearch = encodeURIComponent('key LIKE "/NOTFOUND.*/i"');
+ return runAndCheckSearch(s3Client, bucketName,
+ encodedSearch, null, done);
+ });
+
+ it('should list object with searched for user metadata', done => {
+ const encodedSearch =
+ encodeURIComponent(`x-amz-meta-food="${userMetadata.food}"`);
+ return runAndCheckSearch(s3Client, bucketName, encodedSearch,
+ objectKey, done);
+ });
+
+ it('should list object with searched for tag metadata', done => {
+ const encodedSearch =
+ encodeURIComponent('tags.item-type="main"');
+ return runAndCheckSearch(s3Client, bucketName, encodedSearch,
+ objectKey, done);
+ });
+
+ it('should return empty listing when no object has user md', done => {
+ const encodedSearch =
+ encodeURIComponent('x-amz-meta-food="nosuchfood"');
+ return runAndCheckSearch(s3Client, bucketName,
+ encodedSearch, null, done);
+ });
+
+ describe('search when overwrite object', () => {
+ before(done => {
+ s3Client.putObject({ Bucket: bucketName, Key: objectKey,
+ Metadata: updatedUserMetadata }, done);
+ });
+
+ it('should list object with searched for updated user metadata',
+ done => {
+ const encodedSearch =
+ encodeURIComponent('x-amz-meta-food' +
+ `="${updatedUserMetadata.food}"`);
+ return runAndCheckSearch(s3Client, bucketName, encodedSearch,
+ objectKey, done);
+ });
+ });
+});
+
+runIfMongo('Search when no objects in bucket', () => {
+ const bucketName = `noobjectbucket${Date.now()}`;
+ before(done => {
+ s3Client.createBucket({ Bucket: bucketName }, done);
+ });
+
+ after(done => {
+ s3Client.deleteBucket({ Bucket: bucketName }, done);
+ });
+
+ it('should return empty listing when no objects in bucket', done => {
+ const encodedSearch = encodeURIComponent(`key="${objectKey}"`);
+ return runAndCheckSearch(s3Client, bucketName,
+ encodedSearch, null, done);
+ });
+});
+
+runIfMongo('Invalid regular expression searches', () => {
+ const bucketName = `badregex-${Date.now()}`;
+ before(done => {
+ s3Client.createBucket({ Bucket: bucketName }, done);
+ });
+
+ after(done => {
+ s3Client.deleteBucket({ Bucket: bucketName }, done);
+ });
+
+ it('should return error if pattern is invalid', done => {
+ const encodedSearch = encodeURIComponent('key LIKE "/((helloworld/"');
+ const testError = {
+ code: 'InvalidArgument',
+ message: 'Invalid sql where clause sent as search query',
+ };
+ return runAndCheckSearch(s3Client, bucketName,
+ encodedSearch, testError, done);
+ });
+});
diff --git a/tests/functional/aws-node-sdk/test/mdSearch/utils/helpers.js b/tests/functional/aws-node-sdk/test/mdSearch/utils/helpers.js
new file mode 100644
index 0000000000..a23434f907
--- /dev/null
+++ b/tests/functional/aws-node-sdk/test/mdSearch/utils/helpers.js
@@ -0,0 +1,70 @@
+const assert = require('assert');
+const async = require('async');
+
+function _deleteVersionList(s3Client, versionList, bucket, callback) {
+ if (versionList === undefined || versionList.length === 0) {
+ return callback();
+ }
+ const params = { Bucket: bucket, Delete: { Objects: [] } };
+ versionList.forEach(version => {
+ params.Delete.Objects.push({
+ Key: version.Key, VersionId: version.VersionId });
+ });
+
+ return s3Client.deleteObjects(params, callback);
+}
+
+const testUtils = {};
+
+testUtils.runIfMongo = process.env.S3METADATA === 'mongodb' ?
+ describe : describe.skip;
+
+testUtils.runAndCheckSearch = (s3Client, bucketName, encodedSearch,
+ testResult, done) => {
+ const searchRequest = s3Client.listObjects({ Bucket: bucketName });
+ searchRequest.on('build', () => {
+ searchRequest.httpRequest.path =
+ `${searchRequest.httpRequest.path}?search=${encodedSearch}`;
+ });
+ searchRequest.on('success', res => {
+ if (testResult) {
+ assert(res.data.Contents[0], 'should be Contents listed');
+ assert.strictEqual(res.data.Contents[0].Key, testResult);
+ assert.strictEqual(res.data.Contents.length, 1);
+ } else {
+ assert.strictEqual(res.data.Contents.length, 0);
+ }
+ return done();
+ });
+ searchRequest.on('error', err => {
+ if (testResult) {
+ assert.strictEqual(err.code, testResult.code);
+ assert.strictEqual(err.message, testResult.message);
+ }
+ return done();
+ });
+ searchRequest.send();
+};
+
+testUtils.removeAllVersions = (s3Client, bucket, callback) => {
+ async.waterfall([
+ cb => s3Client.listObjectVersions({ Bucket: bucket }, cb),
+ (data, cb) => _deleteVersionList(s3Client, data.DeleteMarkers, bucket,
+ err => cb(err, data)),
+ (data, cb) => _deleteVersionList(s3Client, data.Versions, bucket,
+ err => cb(err, data)),
+ (data, cb) => {
+ if (data.IsTruncated) {
+ const params = {
+ Bucket: bucket,
+ KeyMarker: data.NextKeyMarker,
+ VersionIdMarker: data.NextVersionIdMarker,
+ };
+ return this.removeAllVersions(params, cb);
+ }
+ return cb();
+ },
+ ], callback);
+};
+
+module.exports = testUtils;
diff --git a/tests/functional/aws-node-sdk/test/mdSearch/utils/s3SDK.js b/tests/functional/aws-node-sdk/test/mdSearch/utils/s3SDK.js
new file mode 100644
index 0000000000..ec2e234797
--- /dev/null
+++ b/tests/functional/aws-node-sdk/test/mdSearch/utils/s3SDK.js
@@ -0,0 +1,17 @@
+const S3 = require('aws-sdk').S3;
+
+const config = {
+ sslEnabled: false,
+ endpoint: 'http://127.0.0.1:8000',
+ apiVersions: { s3: '2006-03-01' },
+ signatureCache: false,
+ signatureVersion: 'v4',
+ region: 'us-east-1',
+ s3ForcePathStyle: true,
+ accessKeyId: 'accessKey1',
+ secretAccessKey: 'verySecretKey1',
+};
+
+const client = new S3(config);
+
+module.exports = client;
diff --git a/tests/functional/aws-node-sdk/test/mdSearch/versionEnabledSearch.js b/tests/functional/aws-node-sdk/test/mdSearch/versionEnabledSearch.js
new file mode 100644
index 0000000000..54eef93dc7
--- /dev/null
+++ b/tests/functional/aws-node-sdk/test/mdSearch/versionEnabledSearch.js
@@ -0,0 +1,61 @@
+const s3Client = require('./utils/s3SDK');
+const { runAndCheckSearch, removeAllVersions, runIfMongo } =
+ require('./utils/helpers');
+
+const userMetadata = { food: 'pizza' };
+const updatedMetadata = { food: 'salad' };
+const masterKey = 'master';
+
+runIfMongo('Search in version enabled bucket', () => {
+ const bucketName = `versionedbucket${Date.now()}`;
+ const VersioningConfiguration = {
+ MFADelete: 'Disabled',
+ Status: 'Enabled',
+ };
+ before(done => {
+ s3Client.createBucket({ Bucket: bucketName }, err => {
+ if (err) {
+ return done(err);
+ }
+ return s3Client.putBucketVersioning({ Bucket: bucketName,
+ VersioningConfiguration }, err => {
+ if (err) {
+ return done(err);
+ }
+ return s3Client.putObject({ Bucket: bucketName,
+ Key: masterKey, Metadata: userMetadata }, done);
+ });
+ });
+ });
+
+ after(done => {
+ removeAllVersions(s3Client, bucketName,
+ err => {
+ if (err) {
+ return done(err);
+ }
+ return s3Client.deleteBucket({ Bucket: bucketName }, done);
+ });
+ });
+
+ it('should list just master object with searched for metadata', done => {
+ const encodedSearch =
+ encodeURIComponent(`x-amz-meta-food="${userMetadata.food}"`);
+ return runAndCheckSearch(s3Client, bucketName,
+ encodedSearch, masterKey, done);
+ });
+
+ describe('New version overwrite', () => {
+ before(done => {
+ s3Client.putObject({ Bucket: bucketName,
+ Key: masterKey, Metadata: updatedMetadata }, done);
+ });
+
+ it('should list just master object with updated metadata', done => {
+ const encodedSearch =
+ encodeURIComponent(`x-amz-meta-food="${updatedMetadata.food}"`);
+ return runAndCheckSearch(s3Client, bucketName,
+ encodedSearch, masterKey, done);
+ });
+ });
+});
diff --git a/tests/functional/aws-node-sdk/test/multipleBackend/put/put.js b/tests/functional/aws-node-sdk/test/multipleBackend/put/put.js
index c2461b283a..c4190bb595 100644
--- a/tests/functional/aws-node-sdk/test/multipleBackend/put/put.js
+++ b/tests/functional/aws-node-sdk/test/multipleBackend/put/put.js
@@ -476,7 +476,7 @@ describe('MultipleBackend put based on request endpoint', () => {
assert.strictEqual(err, null, 'Expected succes, ' +
`got error ${JSON.stringify(err)}`);
const host = request.service.endpoint.hostname;
- let endpoint = config.restEndpoints[host];
+ let endpoint = config.restEndpoints[host].write;
// s3 returns '' for us-east-1
if (endpoint === 'us-east-1') {
endpoint = '';
diff --git a/tests/functional/aws-node-sdk/test/versioning/versioningGeneral1.js b/tests/functional/aws-node-sdk/test/versioning/versioningGeneral1.js
index a1bc31ce83..45f266d0ed 100644
--- a/tests/functional/aws-node-sdk/test/versioning/versioningGeneral1.js
+++ b/tests/functional/aws-node-sdk/test/versioning/versioningGeneral1.js
@@ -6,6 +6,9 @@ const getConfig = require('../support/config');
const bucket = `versioning-bucket-${Date.now()}`;
+const skipIfMongo = process.env.S3METADATA === 'mongodb' ?
+ describe.skip : describe;
+
function comp(v1, v2) {
if (v1.Key > v2.Key) {
return 1;
@@ -23,7 +26,8 @@ function comp(v1, v2) {
}
-describe('aws-node-sdk test bucket versioning listing', function testSuite() {
+skipIfMongo('aws-node-sdk test bucket versioning listing',
+function testSuite() {
this.timeout(600000);
let s3;
const masterVersions = [];
diff --git a/tests/functional/aws-node-sdk/test/versioning/versioningGeneral2.js b/tests/functional/aws-node-sdk/test/versioning/versioningGeneral2.js
index 8a032657b5..d289d397dc 100644
--- a/tests/functional/aws-node-sdk/test/versioning/versioningGeneral2.js
+++ b/tests/functional/aws-node-sdk/test/versioning/versioningGeneral2.js
@@ -6,8 +6,10 @@ const getConfig = require('../support/config');
const bucket = `versioning-bucket-${Date.now()}`;
+const skipIfMongo = process.env.S3METADATA === 'mongodb' ?
+ describe.skip : describe;
-describe('aws-node-sdk test bucket versioning', function testSuite() {
+skipIfMongo('aws-node-sdk test bucket versioning', function testSuite() {
this.timeout(600000);
let s3;
const versionIds = [];
diff --git a/tests/multipleBackend/multipartUpload.js b/tests/multipleBackend/multipartUpload.js
index 299ba21722..ccd064d70b 100644
--- a/tests/multipleBackend/multipartUpload.js
+++ b/tests/multipleBackend/multipartUpload.js
@@ -41,7 +41,7 @@ const bucketName = 'bucketname';
const awsBucket = config.locationConstraints[awsLocation].details.bucketName;
const smallBody = Buffer.from('I am a body', 'utf8');
const bigBody = Buffer.alloc(10485760);
-const locMetaHeader = 'x-amz-meta-scal-location-constraint';
+const locMetaHeader = 'scal-location-constraint';
const bucketPutRequest = {
bucketName,
namespace,
@@ -222,7 +222,8 @@ function assertObjOnBackend(expectedBackend, objectKey, cb) {
return objectGet(authInfo, getObjectGetRequest(zenkoObjectKey), false, log,
(err, result, metaHeaders) => {
assert.equal(err, null, `Error getting object on S3: ${err}`);
- assert.strictEqual(metaHeaders[locMetaHeader], expectedBackend);
+ assert.strictEqual(metaHeaders[`x-amz-meta-${locMetaHeader}`],
+ expectedBackend);
if (expectedBackend === awsLocation) {
return s3.headObject({ Bucket: awsBucket, Key: objectKey },
(err, result) => {
diff --git a/tests/unit/Config.js b/tests/unit/Config.js
index d3bf00fae0..64f6ca0937 100644
--- a/tests/unit/Config.js
+++ b/tests/unit/Config.js
@@ -1,3 +1,5 @@
+const assert = require('assert');
+
describe('Config', () => {
it('should load default config.json without errors', done => {
require('../../lib/Config');
@@ -18,3 +20,63 @@ describe('Config', () => {
return done(new Error('authdata-update event was not emitted'));
});
});
+
+describe('Config::_normalizeRestEndpoints', () => {
+ const tests = [
+ {
+ msg: 'should return an object with read/write locations if given ' +
+ 'a string preferred location',
+ input: { stringendpoint: 'us-east-1' },
+ output: {
+ stringendpoint: {
+ read: 'us-east-1',
+ write: 'us-east-1',
+ },
+ },
+ },
+ {
+ msg: 'should return an object with read/write locations if given ' +
+ 'an object with read/write preference',
+ input: {
+ objectendpoint: {
+ read: 'us-east-1',
+ write: 'us-east-1',
+ },
+ },
+ output: {
+ objectendpoint: {
+ read: 'us-east-1',
+ write: 'us-east-1',
+ },
+ },
+ },
+ {
+ msg: 'should return an object with read/write locations if given ' +
+ 'an object with different read/write preferences',
+ input: {
+ objectendpoint: {
+ read: 'us-east-1',
+ write: 'us-east-2',
+ },
+ },
+ output: {
+ objectendpoint: {
+ read: 'us-east-1',
+ write: 'us-east-2',
+ },
+ },
+ },
+ ];
+
+ let config;
+ before(() => {
+ const { ConfigObject } = require('../../lib/Config');
+ config = new ConfigObject();
+ });
+
+ tests.forEach(test => it(test.msg, () => {
+ const restEndpoints = config._normalizeRestEndpoints(
+ test.input, config.locationConstraints);
+ assert.deepStrictEqual(restEndpoints, test.output);
+ }));
+});
diff --git a/tests/unit/api/bucketGet.js b/tests/unit/api/bucketGet.js
index 53807a4457..4ac4b765ae 100644
--- a/tests/unit/api/bucketGet.js
+++ b/tests/unit/api/bucketGet.js
@@ -125,6 +125,31 @@ describe('bucketGet API', () => {
});
});
+ it('should return empty list when max-keys is set to 0', done => {
+ const testGetRequest = {
+ bucketName,
+ namespace,
+ headers: { host: '/' },
+ url: `/${bucketName}`,
+ query: { 'max-keys': '0' },
+ };
+
+ async.waterfall([
+ next => bucketPut(authInfo, testPutBucketRequest, log, next),
+ (corsHeaders, next) => objectPut(authInfo, testPutObjectRequest1,
+ undefined, log, next),
+ (resHeaders, next) => objectPut(authInfo,
+ testPutObjectRequest2, undefined, log, next),
+ (resHeaders, next) => bucketGet(authInfo, testGetRequest,
+ log, next),
+ (result, corsHeaders, next) => parseString(result, next),
+ ],
+ (err, result) => {
+ assert.strictEqual(result.ListBucketResult.Content, undefined);
+ done();
+ });
+ });
+
it('should return no more keys than max-keys specified', done => {
const testGetRequest = {
bucketName,
diff --git a/tests/unit/api/bucketPutReplication.js b/tests/unit/api/bucketPutReplication.js
index 38d3759a56..aeb981946a 100644
--- a/tests/unit/api/bucketPutReplication.js
+++ b/tests/unit/api/bucketPutReplication.js
@@ -3,6 +3,8 @@ const assert = require('assert');
const { DummyRequestLogger } = require('../helpers');
const { getReplicationConfiguration } =
require('../../../lib/api/apiUtils/bucket/getReplicationConfiguration');
+const validateConfiguration =
+ require('../../../lib/api/apiUtils/bucket/validateReplicationConfig');
const replicationUtils =
require('../../functional/aws-node-sdk/lib/utility/replication');
const log = new DummyRequestLogger();
@@ -104,3 +106,65 @@ describe('\'getReplicationConfiguration\' function', () => {
return checkGeneratedID(xml, done);
});
});
+
+describe('\'validateConfiguration\' function', () => {
+ const withDefaultRead = {
+ role: 'arn:aws:iam::account-id:role/src-resource,' +
+ 'arn:aws:iam::account-id:role/dest-resource',
+ destination: 'arn:aws:s3:::destination-bucket',
+ rules: [{
+ prefix: 'test-prefix',
+ enabled: true,
+ id: 'test-id',
+ storageClass: 'STANDARD,us-east-2',
+ }],
+ };
+
+ const withoutDefaultRead = {
+ role: 'arn:aws:iam::account-id:role/src-resource,' +
+ 'arn:aws:iam::account-id:role/dest-resource',
+ destination: 'arn:aws:s3:::destination-bucket',
+ rules: [{
+ prefix: 'test-prefix',
+ enabled: true,
+ id: 'test-id',
+ storageClass: 'STANDARD',
+ }],
+ };
+
+ [
+ {
+ msg: 'When read/write locations are the same',
+ setDefault: false,
+ bucket: {
+ getLocationConstraint: () => 'us-east-1',
+ getReadLocationConstraint: () => 'us-east-1',
+ },
+ },
+ {
+ msg: 'When read/write locations are different',
+ setDefault: true,
+ bucket: {
+ getLocationConstraint: () => 'us-east-1',
+ getReadLocationConstraint: () => 'us-east-2',
+ },
+ },
+ ].forEach(suite => describe(suite.msg, () => {
+ [
+ {
+ msg: 'and read location is not present in the replication ' +
+ 'config',
+ input: withoutDefaultRead,
+ res: !suite.setDefault,
+ },
+ {
+ msg: 'and read location is present in the replication config',
+ input: withDefaultRead,
+ res: true,
+ },
+ ].forEach(test => it(test.msg, () => {
+ const result = validateConfiguration(test.input, suite.bucket);
+ assert.strictEqual(result, test.res);
+ }));
+ }));
+});
diff --git a/tests/unit/api/parseLikeExpression.js b/tests/unit/api/parseLikeExpression.js
new file mode 100644
index 0000000000..469b6a8df7
--- /dev/null
+++ b/tests/unit/api/parseLikeExpression.js
@@ -0,0 +1,53 @@
+const assert = require('assert');
+const parseLikeExpression =
+ require('../../../lib/api/apiUtils/bucket/parseLikeExpression');
+
+describe('parseLikeExpression', () => {
+ const tests = [
+ {
+ input: '',
+ output: { $regex: '' },
+ },
+ {
+ input: 'ice-cream-cone',
+ output: { $regex: 'ice-cream-cone' },
+ },
+ {
+ input: '/ice-cream-cone/',
+ output: { $regex: /ice-cream-cone/, $options: '' },
+ },
+ {
+ input: '/ice-cream-cone/i',
+ output: { $regex: /ice-cream-cone/, $options: 'i' },
+ },
+ {
+ input: 'an/ice-cream-cone/',
+ output: { $regex: 'an/ice-cream-cone/' },
+ },
+ {
+ input: '///',
+ output: { $regex: /\//, $options: '' },
+ },
+ ];
+ tests.forEach(test => it('should return correct MongoDB query object: ' +
+ `"${test.input}" => ${JSON.stringify(test.output)}`, () => {
+ const res = parseLikeExpression(test.input);
+ assert.deepStrictEqual(res, test.output);
+ }));
+ const badInputTests = [
+ {
+ input: null,
+ output: null,
+ },
+ {
+ input: 1235,
+ output: null,
+ },
+ ];
+ badInputTests.forEach(test => it(
+ 'should return null if input is not a string ' +
+ `"${test.input}" => ${JSON.stringify(test.output)}`, () => {
+ const res = parseLikeExpression(test.input);
+ assert.deepStrictEqual(res, test.output);
+ }));
+});
diff --git a/tests/unit/management/configuration.js b/tests/unit/management/configuration.js
new file mode 100644
index 0000000000..6a40ed737e
--- /dev/null
+++ b/tests/unit/management/configuration.js
@@ -0,0 +1,300 @@
+const assert = require('assert');
+
+const { DummyRequestLogger } = require('../helpers');
+const log = new DummyRequestLogger();
+
+const metadata = require('../../../lib/metadata/wrapper');
+const managementDatabaseName = 'PENSIEVE';
+const tokenConfigurationKey = 'auth/zenko/remote-management-token';
+
+const { privateKey, accessKey, decryptedSecretKey, secretKey, canonicalId,
+ userName } = require('./resources.json');
+const shortid = '123456789012';
+const email = 'customaccount1@setbyenv.com';
+const arn = 'arn:aws:iam::123456789012:root';
+const { config } = require('../../../lib/Config');
+
+const {
+ remoteOverlayIsNewer,
+ patchConfiguration,
+} = require('../../../lib/management/configuration');
+
+const {
+ initManagementDatabase,
+} = require('../../../lib/management/index');
+
+function initManagementCredentialsMock(cb) {
+ return metadata.putObjectMD(managementDatabaseName,
+ tokenConfigurationKey, { privateKey }, {},
+ log, error => cb(error));
+}
+
+function getConfig() {
+ return config;
+}
+
+// Original Config
+const overlayVersionOriginal = Object.assign({}, config.overlayVersion);
+const authDataOriginal = Object.assign({}, config.authData);
+const locationConstraintsOriginal = Object.assign({},
+ config.locationConstraints);
+const restEndpointsOriginal = Object.assign({}, config.restEndpoints);
+const browserAccessEnabledOriginal = config.browserAccessEnabled;
+function resetConfig() {
+ config.overlayVersion = overlayVersionOriginal;
+ config.authData = authDataOriginal;
+ config.locationConstraints = locationConstraintsOriginal;
+ config.restEndpoints = restEndpointsOriginal;
+ config.browserAccessEnabled = browserAccessEnabledOriginal;
+}
+
+function assertConfig(actualConf, expectedConf) {
+ Object.keys(expectedConf).forEach(key => {
+ assert.deepEqual(actualConf[key], expectedConf[key]);
+ });
+}
+
+function checkNoError(err) {
+ assert.strictEqual(err, null, 'Expected success ' +
+ `but got error ${err}`);
+}
+
+describe('patchConfiguration', () => {
+ before(done => initManagementDatabase(log, err => {
+ if (err) {
+ return done(err);
+ }
+ return initManagementCredentialsMock(done);
+ }));
+ beforeEach(() => {
+ resetConfig();
+ });
+ it('should modify config using the new config', done => {
+ const newConf = {
+ version: 1,
+ users: [
+ {
+ secretKey,
+ accessKey,
+ canonicalId,
+ userName,
+ },
+ ],
+ endpoints: [
+ {
+ hostname: '1.1.1.1',
+ locationName: 'us-east-1',
+ },
+ ],
+ locations: {
+ 'legacy': {
+ name: 'legacy',
+ locationType: 'location-mem-v1',
+ },
+ 'us-east-1': {
+ name: 'us-east-1',
+ locationType: 'location-file-v1',
+ legacyAwsBehavior: true,
+ },
+ 'azurebackendtest': {
+ name: 'azurebackendtest',
+ locationType: 'location-azure-v1',
+ details: {
+ bucketMatch: 'azurebucketmatch',
+ endpoint: 'azure.end.point',
+ accessKey: 'azureaccesskey',
+ secretKey,
+ bucketName: 'azurebucketname',
+ },
+ },
+ 'awsbackendtest': {
+ name: 'awsbackendtest',
+ locationType: 'location-aws-s3-v1',
+ details: {
+ bucketMatch: 'awsbucketmatch',
+ endpoint: 'aws.end.point',
+ accessKey: 'awsaccesskey',
+ secretKey,
+ bucketName: 'awsbucketname',
+ },
+ },
+ 'gcpbackendtest': {
+ name: 'gcpbackendtest',
+ locationType: 'location-gcp-v1',
+ details: {
+ bucketMatch: 'gcpbucketmatch',
+ endpoint: 'gcp.end.point',
+ accessKey: 'gcpaccesskey',
+ secretKey,
+ bucketName: 'gcpbucketname',
+ },
+ },
+ },
+ browserAccess: {
+ enabled: true,
+ },
+ };
+ return patchConfiguration(newConf, log, err => {
+ checkNoError(err);
+ const actualConf = getConfig();
+ const expectedConf = {
+ overlayVersion: 1,
+ browserAccessEnabled: true,
+ authData: {
+ accounts: [{
+ name: userName,
+ email,
+ arn,
+ canonicalID: canonicalId,
+ shortid,
+ keys: [{
+ access: accessKey,
+ secret: decryptedSecretKey,
+ }],
+ }],
+ },
+ locationConstraints: {
+ 'legacy': { type: 'mem', legacyAwsBehavior: false },
+ 'us-east-1': { type: 'file', legacyAwsBehavior: true },
+ 'azurebackendtest': {
+ details: {
+ azureContainerName: 'azurebucketname',
+ azureStorageAccessKey: decryptedSecretKey,
+ azureStorageAccountName: 'azureaccesskey',
+ azureStorageEndpoint: 'azure.end.point',
+ bucketMatch: 'azurebucketmatch',
+ },
+ legacyAwsBehavior: false,
+ type: 'azure',
+ },
+ 'awsbackendtest': {
+ details: {
+ awsEndpoint: 'aws.end.point',
+ bucketMatch: 'awsbucketmatch',
+ bucketName: 'awsbucketname',
+ credentials: {
+ accessKey: 'awsaccesskey',
+ secretKey: decryptedSecretKey,
+ },
+ https: true,
+ pathStyle: false,
+ serverSideEncryption: false,
+ supportsVersioning: true,
+ },
+ legacyAwsBehavior: false,
+ type: 'aws_s3',
+ },
+ 'gcpbackendtest': {
+ details: {
+ bucketMatch: 'gcpbucketmatch',
+ bucketName: 'gcpbucketname',
+ credentials: {
+ accessKey: 'gcpaccesskey',
+ secretKey: decryptedSecretKey,
+ },
+ gcpEndpoint: 'gcp.end.point',
+ mpuBucketName: undefined,
+ },
+ legacyAwsBehavior: false,
+ type: 'gcp',
+ },
+ },
+ };
+ assertConfig(actualConf, expectedConf);
+ assert.deepStrictEqual(actualConf.restEndpoints['1.1.1.1'],
+ { read: 'us-east-1', write: 'us-east-1' });
+ return done();
+ });
+ });
+
+ it('should apply second configuration if version (2) is grater than ' +
+ 'overlayVersion (1)', done => {
+ const newConf1 = {
+ version: 1,
+ };
+ const newConf2 = {
+ version: 2,
+ browserAccess: {
+ enabled: true,
+ },
+ };
+ patchConfiguration(newConf1, log, err => {
+ checkNoError(err);
+ return patchConfiguration(newConf2, log, err => {
+ checkNoError(err);
+ const actualConf = getConfig();
+ const expectedConf = {
+ overlayVersion: 2,
+ browserAccessEnabled: true,
+ };
+ assertConfig(actualConf, expectedConf);
+ return done();
+ });
+ });
+ });
+
+ it('should not apply the second configuration if version equals ' +
+ 'overlayVersion', done => {
+ const newConf1 = {
+ version: 1,
+ };
+ const newConf2 = {
+ version: 1,
+ browserAccess: {
+ enabled: true,
+ },
+ };
+ patchConfiguration(newConf1, log, err => {
+ checkNoError(err);
+ return patchConfiguration(newConf2, log, err => {
+ checkNoError(err);
+ const actualConf = getConfig();
+ const expectedConf = {
+ overlayVersion: 1,
+ browserAccessEnabled: undefined,
+ };
+ assertConfig(actualConf, expectedConf);
+ return done();
+ });
+ });
+ });
+});
+
+describe('remoteOverlayIsNewer', () => {
+ it('should return remoteOverlayIsNewer equals false if remote overlay ' +
+ 'is less than the cached', () => {
+ const cachedOverlay = {
+ version: 2,
+ };
+ const remoteOverlay = {
+ version: 1,
+ };
+ const isRemoteOverlayNewer = remoteOverlayIsNewer(cachedOverlay,
+ remoteOverlay);
+ assert.equal(isRemoteOverlayNewer, false);
+ });
+ it('should return remoteOverlayIsNewer equals false if remote overlay ' +
+ 'and the cached one are equal', () => {
+ const cachedOverlay = {
+ version: 1,
+ };
+ const remoteOverlay = {
+ version: 1,
+ };
+ const isRemoteOverlayNewer = remoteOverlayIsNewer(cachedOverlay,
+ remoteOverlay);
+ assert.equal(isRemoteOverlayNewer, false);
+ });
+ it('should return remoteOverlayIsNewer equals true if remote overlay ' +
+ 'version is greater than the cached one ', () => {
+ const cachedOverlay = {
+ version: 0,
+ };
+ const remoteOverlay = {
+ version: 1,
+ };
+ const isRemoteOverlayNewer = remoteOverlayIsNewer(cachedOverlay,
+ remoteOverlay);
+ assert.equal(isRemoteOverlayNewer, true);
+ });
+});
diff --git a/tests/unit/management/resources.json b/tests/unit/management/resources.json
new file mode 100644
index 0000000000..962ff806cd
--- /dev/null
+++ b/tests/unit/management/resources.json
@@ -0,0 +1,9 @@
+{
+ "privateKey": "-----BEGIN RSA PRIVATE KEY-----\r\nMIIEowIBAAKCAQEAj13sSYE40lAX2qpBvfdGfcSVNtBf8i5FH+E8FAhORwwPu+2S\r\n3yBQbgwHq30WWxunGb1NmZL1wkVZ+vf12DtxqFRnMA08LfO4oO6oC4V8XfKeuHyJ\r\n1qlaKRINz6r9yDkTHtwWoBnlAINurlcNKgGD5p7D+G26Chbr/Oo0ZwHula9DxXy6\r\neH8/bJ5/BynyNyyWRPoAO+UkUdY5utkFCUq2dbBIhovMgjjikf5p2oWqnRKXc+JK\r\nBegr6lSHkkhyqNhTmd8+wA+8Cace4sy1ajY1t5V4wfRZea5vwl/HlyyKodvHdxng\r\nJgg6H61JMYPkplY6Gr9OryBKEAgq02zYoYTDfwIDAQABAoIBAAuDYGlavkRteCzw\r\nRU1LIVcSRWVcgIgDXTu9K8T0Ec0008Kkxomyn6LmxmroJbZ1VwsDH8s4eRH73ckA\r\nxrZxt6Pr+0lplq6eBvKtl8MtGhq1VDe+kJczjHEF6SQHOFAu/TEaPZrn2XMcGvRX\r\nO1BnRL9tepFlxm3u/06VRFYNWqqchM+tFyzLu2AuiuKd5+slSX7KZvVgdkY1ErKH\r\ngB75lPyhPb77C/6ptqUisVMSO4JhLhsD0+ekDVY982Sb7KkI+szdWSbtMx9Ek2Wo\r\ntXwJz7I8T7IbODy9aW9G+ydyhMDFmaEYIaDVFKJj5+fluNza3oQ5PtFNVE50GQJA\r\nsisGqfECgYEAwpkwt0KpSamSEH6qknNYPOwxgEuXWoFVzibko7is2tFPvY+YJowb\r\n68MqHIYhf7gHLq2dc5Jg1TTbGqLECjVxp4xLU4c95KBy1J9CPAcuH4xQLDXmeLzP\r\nJ2YgznRocbzAMCDAwafCr3uY9FM7oGDHAi5bE5W11xWx+9MlFExL3JkCgYEAvJp5\r\nf+JGN1W037bQe2QLYUWGszewZsvplnNOeytGQa57w4YdF42lPhMz6Kc/zdzKZpN9\r\njrshiIDhAD5NCno6dwqafBAW9WZl0sn7EnlLhD4Lwm8E9bRHnC9H82yFuqmNrzww\r\nzxBCQogJISwHiVz4EkU48B283ecBn0wT/fAa19cCgYEApKWsnEHgrhy1IxOpCoRh\r\nUhqdv2k1xDPN/8DUjtnAFtwmVcLa/zJopU/Zn4y1ZzSzjwECSTi+iWZRQ/YXXHPf\r\nl92SFjhFW92Niuy8w8FnevXjF6T7PYiy1SkJ9OR1QlZrXc04iiGBDazLu115A7ce\r\nanACS03OLw+CKgl6Q/RR83ECgYBCUngDVoimkMcIHHt3yJiP3ikeAKlRnMdJlsa0\r\nXWVZV4hCG3lDfRXsnEgWuimftNKf+6GdfYSvQdLdiQsCcjT5A4uLsQTByv5nf4uA\r\n1ZKOsFrmRrARzxGXhLDikvj7yP//7USkq+0BBGFhfuAvl7fMhPceyPZPehqB7/jf\r\nxX1LBQKBgAn5GgSXzzS0e06ZlP/VrKxreOHa5Z8wOmqqYQ0QTeczAbNNmuITdwwB\r\nNkbRqpVXRIfuj0BQBegAiix8om1W4it0cwz54IXBwQULxJR1StWxj3jo4QtpMQ+z\r\npVPdB1Ilb9zPV1YvDwRfdS1xsobzznAx56ecsXduZjs9mF61db8Q\r\n-----END RSA PRIVATE KEY-----\r\n",
+ "publicKey": "-----BEGIN PUBLIC KEY-----\r\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAj13sSYE40lAX2qpBvfdG\r\nfcSVNtBf8i5FH+E8FAhORwwPu+2S3yBQbgwHq30WWxunGb1NmZL1wkVZ+vf12Dtx\r\nqFRnMA08LfO4oO6oC4V8XfKeuHyJ1qlaKRINz6r9yDkTHtwWoBnlAINurlcNKgGD\r\n5p7D+G26Chbr/Oo0ZwHula9DxXy6eH8/bJ5/BynyNyyWRPoAO+UkUdY5utkFCUq2\r\ndbBIhovMgjjikf5p2oWqnRKXc+JKBegr6lSHkkhyqNhTmd8+wA+8Cace4sy1ajY1\r\nt5V4wfRZea5vwl/HlyyKodvHdxngJgg6H61JMYPkplY6Gr9OryBKEAgq02zYoYTD\r\nfwIDAQAB\r\n-----END PUBLIC KEY-----\r\n",
+ "accessKey": "QXP3VDG3SALNBX2QBJ1C",
+ "secretKey": "K5FyqZo5uFKfw9QBtn95o6vuPuD0zH/1seIrqPKqGnz8AxALNSx6EeRq7G1I6JJpS1XN13EhnwGn2ipsml3Uf2fQ00YgEmImG8wzGVZm8fWotpVO4ilN4JGyQCah81rNX4wZ9xHqDD7qYR5MyIERxR/osoXfctOwY7GGUjRKJfLOguNUlpaovejg6mZfTvYAiDF+PTO1sKUYqHt1IfKQtsK3dov1EFMBB5pWM7sVfncq/CthKN5M+VHx9Y87qdoP3+7AW+RCBbSDOfQgxvqtS7PIAf10mDl8k2kEURLz+RqChu4O4S0UzbEmtja7wa7WYhYKv/tM/QeW7kyNJMmnPg==",
+ "decryptedSecretKey": "n7PSZ3U6SgerF9PCNhXYsq3S3fRKVGdZTicGV8Ur",
+ "canonicalId": "79a59df900b949e55d96a1e698fbacedfd6e09d98eacf8f8d5218e7cd47ef2be",
+ "userName": "orbituser"
+}
diff --git a/tests/unit/multipleBackend/getReplicationBackendDataLocator.js b/tests/unit/multipleBackend/getReplicationBackendDataLocator.js
new file mode 100644
index 0000000000..0ff16e2e58
--- /dev/null
+++ b/tests/unit/multipleBackend/getReplicationBackendDataLocator.js
@@ -0,0 +1,53 @@
+const assert = require('assert');
+
+const getReplicationBackendDataLocator = require(
+ '../../../lib/api/apiUtils/object/getReplicationBackendDataLocator');
+
+const locCheckResult = {
+ location: 'spoofbackend',
+ key: 'spoofkey',
+ locationType: 'spoof',
+};
+const repNoMatch = { backends: [{ site: 'nomatch' }] };
+const repMatchPending = { backends:
+ [{ site: 'spoofbackend', status: 'PENDING', dataVersionId: '' }] };
+const repMatchFailed = { backends:
+ [{ site: 'spoofbackend', status: 'FAILED', dataVersionId: '' }] };
+const repMatch = { backends: [{
+ site: 'spoofbackend',
+ status: 'COMPLETE',
+ dataStoreVersionId: 'spoofid' }],
+};
+const expDataLocator = [{
+ key: locCheckResult.key,
+ dataStoreName: locCheckResult.location,
+ dataStoreType: locCheckResult.locationType,
+ dataStoreVersionId: repMatch.backends[0].dataStoreVersionId,
+}];
+
+
+describe('Replication Backend Compare', () => {
+ it('should return error if no match in replication backends', () => {
+ const repBackendResult =
+ getReplicationBackendDataLocator(locCheckResult, repNoMatch);
+ assert(repBackendResult.error.InvalidLocationConstraint);
+ });
+ it('should return error if backend status is PENDING', () => {
+ const repBackendResult =
+ getReplicationBackendDataLocator(locCheckResult, repMatchPending);
+ assert(repBackendResult.error.NoSuchKey);
+ assert.strictEqual(repBackendResult.status, 'PENDING');
+ });
+ it('should return error if backend status is FAILED', () => {
+ const repBackendResult =
+ getReplicationBackendDataLocator(locCheckResult, repMatchFailed);
+ assert(repBackendResult.error.NoSuchKey);
+ assert.strictEqual(repBackendResult.status, 'FAILED');
+ });
+ it('should return dataLocator obj if backend matches and rep is complete',
+ () => {
+ const repBackendResult =
+ getReplicationBackendDataLocator(locCheckResult, repMatch);
+ assert.deepStrictEqual(repBackendResult.dataLocator, expDataLocator);
+ });
+});
diff --git a/tests/unit/multipleBackend/locationConstraintCheck.js b/tests/unit/multipleBackend/locationConstraintCheck.js
index d249b38769..34c98f9a30 100644
--- a/tests/unit/multipleBackend/locationConstraintCheck.js
+++ b/tests/unit/multipleBackend/locationConstraintCheck.js
@@ -1,4 +1,5 @@
const assert = require('assert');
+
const { BackendInfo } = require('../../../lib/api/apiUtils/object/BackendInfo');
const BucketInfo = require('arsenal').models.BucketInfo;
const DummyRequest = require('../DummyRequest');
diff --git a/tests/unit/multipleBackend/locationHeaderCheck.js b/tests/unit/multipleBackend/locationHeaderCheck.js
new file mode 100644
index 0000000000..6db3f02908
--- /dev/null
+++ b/tests/unit/multipleBackend/locationHeaderCheck.js
@@ -0,0 +1,45 @@
+const assert = require('assert');
+const { errors } = require('arsenal');
+
+const locationHeaderCheck =
+ require('../../../lib/api/apiUtils/object/locationHeaderCheck');
+
+const objectKey = 'locationHeaderCheckObject';
+const bucketName = 'locationHeaderCheckBucket';
+
+const testCases = [
+ {
+ location: 'doesnotexist',
+ expRes: errors.InvalidLocationConstraint.customizeDescription(
+ 'Invalid location constraint specified in header'),
+ }, {
+ location: '',
+ expRes: undefined,
+ }, {
+ location: 'awsbackend',
+ expRes: {
+ location: 'awsbackend',
+ key: objectKey,
+ locationType: 'aws_s3',
+ },
+ }, {
+ location: 'awsbackendmismatch',
+ expRes: {
+ location: 'awsbackendmismatch',
+ key: `${bucketName}/${objectKey}`,
+ locationType: 'aws_s3',
+ },
+ },
+];
+
+describe('Location Header Check', () => {
+ testCases.forEach(test => {
+ it('should return expected result with location constraint header ' +
+ `set to ${test.location}`, () => {
+ const headers = { 'x-amz-location-constraint': `${test.location}` };
+ const checkRes =
+ locationHeaderCheck(headers, objectKey, bucketName);
+ assert.deepStrictEqual(checkRes, test.expRes);
+ });
+ });
+});
diff --git a/tests/unit/utils/proxyCompareURL.js b/tests/unit/utils/proxyCompareURL.js
index 88ac13824b..9500e9a934 100644
--- a/tests/unit/utils/proxyCompareURL.js
+++ b/tests/unit/utils/proxyCompareURL.js
@@ -42,11 +42,10 @@ const testCases = [
describe('proxyCompareURL util function', () => {
testCases.forEach(test => {
- it(`should return ${test.expRes} if ${test.desc}`, done => {
+ it(`should return ${test.expRes} if ${test.desc}`, () => {
process.env.NO_PROXY = test.noProxy;
const proxyMatch = proxyCompareUrl(test.endpoint);
assert.strictEqual(test.expRes, proxyMatch);
- done();
});
});
diff --git a/tests/unit/utils/validateSearch.js b/tests/unit/utils/validateSearch.js
index 8946221c8f..458a2e6e3a 100644
--- a/tests/unit/utils/validateSearch.js
+++ b/tests/unit/utils/validateSearch.js
@@ -106,6 +106,12 @@ describe('validate search where clause', () => {
result: errors.InvalidArgument.customizeDescription('Search ' +
'param contains unknown attribute: madeUp'),
},
+ {
+ it: 'should disallow unsupported query operators',
+ searchParams: 'x-amz-meta-dog BETWEEN "labrador"',
+ result: errors.InvalidArgument.customizeDescription(
+ 'Invalid sql where clause sent as search query'),
+ },
];
tests.forEach(test => {