diff --git a/config.js b/config.js index f7028364..7a7e60bf 100755 --- a/config.js +++ b/config.js @@ -133,6 +133,34 @@ var conf = convict({ env: 'DB_AUTH_NAME' } }, + search: { + enabled: { + doc: 'If true, API responds to collection /search endpoints', + format: Boolean, + default: false + }, + minQueryLength: { + doc: 'Minimum search string length', + format: Number, + default: 3 + }, + wordCollection: { + doc: 'The name of the datastore collection that will hold tokenized words', + format: String, + default: 'words' + }, + datastore: { + doc: 'The datastore to use for storing and querying indexed documents', + format: String, + default: '@dadi/api-mongodb' + }, + database: { + doc: 'The name of the database to use for storing and querying indexed documents', + format: String, + default: 'search', + env: 'DB_SEARCH_NAME' + } + }, caching: { ttl: { doc: '', diff --git a/config/config.test.json.sample b/config/config.test.json.sample index 97483794..163718f1 100755 --- a/config/config.test.json.sample +++ b/config/config.test.json.sample @@ -47,6 +47,13 @@ "defaultBucket": "mediaStore", "basePath": "test/acceptance/temp-workspace/media" }, + "search": { + "enabled": false, + "minQueryLength": 3, + "wordCollection": "words", + "datastore": "./../../../test/test-connector", + "database": "search" + }, "feedback": false, "cors": false, "cluster": false diff --git a/config/mongodb.test.json b/config/mongodb.test.json index 508abdc6..35896dec 100644 --- a/config/mongodb.test.json +++ b/config/mongodb.test.json @@ -34,8 +34,13 @@ ], "username": "", "password": "", - "replicaSet": "", - "ssl": false - } - } + }, + "search": { + "hosts": [ + { + "host": "127.0.0.1", + "port": 27017 + } + ] + }, } diff --git a/dadi/lib/controller/documents.js b/dadi/lib/controller/documents.js index 2e79d133..253ffbe8 100644 --- a/dadi/lib/controller/documents.js +++ b/dadi/lib/controller/documents.js @@ -214,7 +214,7 @@ Collection.prototype.registerRoutes = function (route, filePath) { }) // Creating generic route. - this.server.app.use(`${route}/:id(${this.ID_PATTERN})?/:action(count|stats)?`, (req, res, next) => { + this.server.app.use(`${route}/:id(${this.ID_PATTERN})?/:action(count|search|stats)?`, (req, res, next) => { try { // Map request method to controller method. let method = req.params.action || (req.method && req.method.toLowerCase()) @@ -252,7 +252,7 @@ Collection.prototype.stats = function (req, res, next) { Collection.prototype.unregisterRoutes = function (route) { this.server.app.unuse(`${route}/config`) - this.server.app.unuse(`${route}/:id(${this.ID_PATTERN})?/:action(count|stats)?`) + this.server.app.unuse(`${route}/:id(${this.ID_PATTERN})?/:action(count|search|stats)?`) } module.exports = function (model, server) { diff --git a/dadi/lib/controller/index.js b/dadi/lib/controller/index.js index 1172306d..e2c45546 100755 --- a/dadi/lib/controller/index.js +++ b/dadi/lib/controller/index.js @@ -102,6 +102,11 @@ Controller.prototype._prepareQueryOptions = function (options) { ) } + // `q` represents a search query, e.g. `?q=foo bar baz`. + if (options.q) { + queryOptions.search = options.q + } + // Specified / default number of records to return. let limit = parseInt(options.count || settings.count) || 50 @@ -161,6 +166,61 @@ Controller.prototype._prepareQueryOptions = function (options) { Controller.prototype.ID_PATTERN = ID_PATTERN +/** + * Handle collection search endpoints + * Example: /1.0/library/books/search?q=title + */ +Controller.prototype.search = function (req, res, next) { + let path = url.parse(req.url, true) + let options = path.query + + let queryOptions = this._prepareQueryOptions(options) + + if (queryOptions.errors.length !== 0) { + return help.sendBackJSON(400, res, next)(null, queryOptions) + } else { + queryOptions = queryOptions.queryOptions + } + + return this.model.search({ + client: req.dadiApiClient, + options: queryOptions + }).then(query => { + let ids = query._id['$containsAny'].map(id => id.toString()) + + return this.model.find({ + client: req.dadiApiClient, + language: options.lang, + query, + options: queryOptions + }).then(results => { + results.results = results.results.sort((a, b) => { + let aIndex = ids.indexOf(a._id.toString()) + let bIndex = ids.indexOf(b._id.toString()) + + if (aIndex === bIndex) return 0 + + return aIndex > bIndex ? 1 : -1 + }) + + return this.model.formatForOutput( + results.results, + { + client: req.dadiApiClient, + composeOverride: queryOptions.compose, + language: options.lang, + urlFields: queryOptions.fields + } + ).then(formattedResults => { + results.results = formattedResults + return help.sendBackJSON(200, res, next)(null, results) + }) + }) + }).catch(error => { + return help.sendBackJSON(null, res, next)(error) + }) +} + module.exports = function (model) { return new Controller(model) } diff --git a/dadi/lib/controller/searchIndex.js b/dadi/lib/controller/searchIndex.js new file mode 100644 index 00000000..f1c5db3c --- /dev/null +++ b/dadi/lib/controller/searchIndex.js @@ -0,0 +1,44 @@ +const acl = require('./../model/acl') +const config = require('./../../../config') +const help = require('./../help') + +const SearchIndex = function (server) { + this.server = server + + server.app.routeMethods('/api/index', { + post: this.post.bind(this) + }) +} + +SearchIndex.prototype.post = function (req, res, next) { + if (!req.dadiApiClient.clientId) { + return help.sendBackJSON(null, res, next)( + acl.createError(req.dadiApiClient) + ) + } + + // 404 if Search is not enabled + if (config.get('search.enabled') !== true) { + return next() + } + + res.statusCode = 204 + res.end(JSON.stringify({'message': 'Indexing started'})) + + try { + Object.keys(this.server.components).forEach(key => { + let value = this.server.components[key] + + let hasModel = Object.keys(value).includes('model') && + value.model.constructor.name === 'Model' + + if (hasModel) { + value.model.searchHandler.batchIndex() + } + }) + } catch (err) { + console.log(err) + } +} + +module.exports = server => new SearchIndex(server) diff --git a/dadi/lib/index.js b/dadi/lib/index.js index ee81dec8..48380779 100755 --- a/dadi/lib/index.js +++ b/dadi/lib/index.js @@ -30,13 +30,13 @@ var LanguagesController = require(path.join(__dirname, '/controller/languages')) var MediaController = require(path.join(__dirname, '/controller/media')) var ResourcesController = require(path.join(__dirname, '/controller/resources')) var RolesController = require(path.join(__dirname, '/controller/roles')) +var SearchIndexController = require(path.join(__dirname, '/controller/searchIndex')) var StatusEndpointController = require(path.join(__dirname, '/controller/status')) var dadiBoot = require('@dadi/boot') var help = require(path.join(__dirname, '/help')) var Model = require(path.join(__dirname, '/model')) var mediaModel = require(path.join(__dirname, '/model/media')) var monitor = require(path.join(__dirname, '/monitor')) -var search = require(path.join(__dirname, '/search')) var config = require(path.join(__dirname, '/../../config')) @@ -248,9 +248,6 @@ Server.prototype.start = function (done) { // caching layer cache(this).init() - // search layer - search(this) - // start listening var server = this.server = app.listen() @@ -266,6 +263,7 @@ Server.prototype.start = function (done) { LanguagesController(this) ResourcesController(this) RolesController(this) + SearchIndexController(this) this.readyState = 1 diff --git a/dadi/lib/model/collections/create.js b/dadi/lib/model/collections/create.js index 0dd3892f..281eeb2b 100644 --- a/dadi/lib/model/collections/create.js +++ b/dadi/lib/model/collections/create.js @@ -156,6 +156,9 @@ function create ({ results } + // Asynchronous search index. + this.searchHandler.index(returnData.results) + // Run any `afterCreate` hooks. if (this.settings.hooks && (typeof this.settings.hooks.afterCreate === 'object')) { returnData.results.forEach(document => { diff --git a/dadi/lib/model/index.js b/dadi/lib/model/index.js index daadf85f..c6822d1e 100755 --- a/dadi/lib/model/index.js +++ b/dadi/lib/model/index.js @@ -7,6 +7,7 @@ const deepMerge = require('deepmerge') const fields = require('./../fields') const History = require('./history') const logger = require('@dadi/logger') +const Search = require('./../search') const Validator = require('./validator') /** @@ -76,6 +77,13 @@ const Model = function (name, schema, connection, settings) { this.compose = this.settings.compose } + // setup search context + this.searchHandler = new Search(this) + + if (this.searchHandler.canUse()) { + this.searchHandler.init() + } + // Add any configured indexes. if (this.settings.index && !Array.isArray(this.settings.index)) { this.settings.index = [ @@ -781,6 +789,7 @@ Model.prototype.getStats = require('./collections/getStats') Model.prototype.revisions = require('./collections/getRevisions') // (!) Deprecated in favour of `getRevisions` Model.prototype.stats = require('./collections/getStats') // (!) Deprecated in favour of `getStats` Model.prototype.update = require('./collections/update') +Model.prototype.search = require('./search') module.exports = function (name, schema, connection, settings) { if (schema) { diff --git a/dadi/lib/model/search.js b/dadi/lib/model/search.js new file mode 100644 index 00000000..abe1e05e --- /dev/null +++ b/dadi/lib/model/search.js @@ -0,0 +1,47 @@ +const config = require('./../../../config') +const debug = require('debug')('api:model:search') + +/** + * Searches for documents in the database and returns a + * metadata object. + * + * @param {Object} query - the search query + * @param {Object} options - an options object + * @returns {Promise} + */ +module.exports = function ({ + client, + options = {} +} = {}) { + let err + + if (!this.searchHandler.canUse()) { + err = new Error('Not Implemented') + err.statusCode = 501 + err.json = { + errors: [{ + message: `Search is disabled or an invalid data connector has been specified.` + }] + } + } else if (!options.search || options.search.length < config.get('search.minQueryLength')) { + err = new Error('Bad Request') + err.statusCode = 400 + err.json = { + errors: [{ + message: `Search query must be at least ${config.get('search.minQueryLength')} characters.` + }] + } + } + + if (err) { + return Promise.reject(err) + } + + return this.validateAccess({ + client, + type: 'read' + }).then(() => { + debug(options.search) + return this.searchHandler.find(options.search) + }) +} diff --git a/dadi/lib/search/analysers/standard.js b/dadi/lib/search/analysers/standard.js new file mode 100644 index 00000000..d64758f0 --- /dev/null +++ b/dadi/lib/search/analysers/standard.js @@ -0,0 +1,121 @@ +'use strict' + +const natural = require('natural') +const TfIdf = natural.TfIdf +const tokenizer = new natural.RegexpTokenizer({ + // pattern: new RegExp(/[^A-Za-zÅåÀÈÌÒÙàèìòùÁÉÍÓÚÝáéíóúýÂÊÎÔÛâêîôûÃÑÕãñõÄËÏÖÜŸäëïöüÿŠŽšžÇç]/i) + pattern: new RegExp(/[^a-zA-Z\u00C0-\u017F]/i) +}) + +class StandardAnalyzer { + constructor (fieldRules) { + this.fieldRules = fieldRules + this.tfidf = new TfIdf() + this.tfidf.setTokenizer(tokenizer) + } + + add (field, value) { + if (Array.isArray(value)) { + let filteredValues = value.filter(this.isValid) + filteredValues.forEach(val => this.tfidf.addDocument(val, field)) + } else if (this.isValid(value)) { + this.tfidf.addDocument(value, field) + } + } + + isValid (value) { + return typeof value === 'string' + } + + getWordsInField (index) { + return this.tfidf.listTerms(index) + .map(item => item.term) + } + + getAllWords () { + let words = this.tfidf.documents.map((doc, indx) => { + return this.getWordsInField(indx) + }) + + if (words.length) { + words = words.reduce((a, b) => a.concat(b)) + } + + return this.unique(words) + } + + tokenize (query) { + return tokenizer + .tokenize(query) + .map(word => word.toLowerCase()) + } + + unique (list) { + if (!Array.isArray(list)) { + return [] + } + + return [...new Set(list)] + } + + areValidWords (words) { + if (!Array.isArray(words)) { + return false + } + + return words.every(word => { + return typeof word === 'object' && + word.hasOwnProperty('weight') && + word.hasOwnProperty('word') + }) + } + + mergeWeights (words) { + if (!this.areValidWords(words)) return [] + + return words + .reduce((prev, current) => { + let match = prev.find(wordSearch => wordSearch.word === current.word) + + if (match) { + match.count = match.count ? match.count + 1 : 2 + match.weight += current.weight + return prev + } + return prev.concat(current) + }, []) + .map(match => { + if (match.count) { + match.weight = match.weight / match.count + delete match.count + } + + return match + }) + } + + getWordInstances () { + let words = this.getAllWords() + if (!words || !words.length) return [] + + let docWords = this.tfidf.documents + .map((doc, index) => { + let rules = this.fieldRules[doc.__key.split(':')[0]] + + return words + .filter(word => doc[word]) + .map(word => { + let weight = this.tfidf.tfidf(word, index) * rules.weight + + return { + weight, + word + } + }) + }).reduce((a, b) => a.concat(b)) + + return this.mergeWeights(docWords) + } +} + +module.exports = StandardAnalyzer diff --git a/dadi/lib/search/index.js b/dadi/lib/search/index.js index 663da745..ac869351 100644 --- a/dadi/lib/search/index.js +++ b/dadi/lib/search/index.js @@ -1,77 +1,603 @@ +'use strict' + const path = require('path') -const url = require('url') -const help = require(path.join(__dirname, '/../help')) -const model = require(path.join(__dirname, '/../model')) -/* -Search middleware allowing cross-collection querying -Search query URI format: -http://host[:port]/version/search?collections=database/collection[,database2/collection2,...[,databaseN/collectionN]]&query={"title":{"$regex":"brother"}} -Example search query: -http://api.example.com/1.0/search?collections=library/books,library/films&query={"title":{"$regex":"brother"}} -*/ -module.exports = function (server) { - server.app.use('/:version/search', (req, res, next) => { - if (req.method && req.method.toLowerCase() !== 'get') { - return next() +const config = require(path.join(__dirname, '/../../../config')) +const Connection = require(path.join(__dirname, '/../model/connection')) +const debug = require('debug')('api:search') +const DataStore = require(path.join(__dirname, '../datastore')) +const promiseQueue = require('js-promise-queue') +const StandardAnalyser = require('./analysers/standard') +const DefaultAnalyser = StandardAnalyser +const pageLimit = 20 + +/** + * Handles collection searching in API + * @constructor Search + * @classdesc Indexes documents as they are inserted/updated, and performs search tasks. + * N.B. May only be used with the MongoDB Data Connector. + */ +const Search = function (model) { + if (!model || model.constructor.name !== 'Model') { + throw new Error('model should be an instance of Model') + } + + this.model = model + this.indexableFields = this.getIndexableFields() + this.analyser = new DefaultAnalyser(this.indexableFields) +} + +/** + * Determines if searching is enabled for the current collection. Search is available if: + * - the configured DataStore allows it, + * - the main configuration setting of "enabled" is "true", and + * - the current collection schema contains at least one indexable field. + * + * An indexable field has the following configuration: + * + * ```json + * "search": { + * "weight": 2 + * } + * ``` + * @returns {Boolean} - boolean value indicating whether Search is enabled for this collection + */ +Search.prototype.canUse = function () { + let searchConfig = config.get('search') + + this.datastore = DataStore(searchConfig.datastore) + + return (typeof this.datastore.search !== 'undefined') && + searchConfig.enabled && + Object.keys(this.indexableFields).length > 0 +} + +/** + * + */ +Search.prototype.init = function () { + this.wordCollection = config.get('search.wordCollection') + this.searchCollection = this.model.searchCollection || this.model.name + 'Search' + + debug('initialised wordCollection: %s, indexCollection: %s', this.wordCollection, this.searchCollection) + + this.initialiseConnections() + this.applyIndexListeners() +} + +/** + * Initialise connections to the `word` database collection and the current collection's + * `search` database collection - typically the collection name with "Search" appended. + */ +Search.prototype.initialiseConnections = function () { + let searchConfig = config.get('search') + + this.wordConnection = Connection( + { + collection: this.wordCollection, + database: searchConfig.database, + override: true + }, + this.wordCollection, + searchConfig.datastore + ) + + this.searchConnection = Connection( + { + collection: this.searchCollection, + database: searchConfig.database, + override: true + }, + this.searchCollection, + searchConfig.datastore + ) + + this.wordConnection.setMaxListeners(35) + this.searchConnection.setMaxListeners(35) +} + +/** + * Apply Index Listeners + * Fires a call to the data controllers index method with the schemas index rules. + */ + // TODO: this will change with @eduardo's Connection Recovery branch +Search.prototype.applyIndexListeners = function () { + this.wordConnection.once('connect', database => { + database.index(this.wordCollection, this.getWordSchema().settings.index) + }) + + this.searchConnection.once('connect', database => { + database.index(this.searchCollection, this.getSearchSchema().settings.index) + }) +} + +/** + * Find documents in the "words" collection matching the specified searchTerm, using the results of the query + * to fetch results from the current collection's search collection, ultimately leading to a set of IDs for documents + * that contain the searchTerm + * + * @param {String} searchTerm - the search query passed to the collection search endpoint + * @return {Promise} - resolves with a query containing IDs of documents that contain the searchTerm + */ +Search.prototype.find = function (searchTerm) { + debug(this.canUse() ? 'search enabled' : 'search disabled') + + debug('find in %s: %s', this.searchCollection, searchTerm) + + let tokenized = this.analyser.tokenize(searchTerm) + + return this.getWords(tokenized).then(words => { + let wordIds = words.results.map(word => word._id.toString()) + + debug('searching %s for words %o', this.searchCollection, words.results.map(word => word.word)) + + return this.searchConnection.datastore.search({ + words: wordIds, + collection: this.searchCollection, + schema: this.getSearchSchema().fields, + settings: this.getSearchSchema().settings, + opions: { limit: pageLimit } + }).then(wordInstances => { + wordInstances = wordInstances.map(instance => instance._id.document) + + return { + _id: { + '$containsAny': wordInstances + } + } + }) + }) +} + +/** + * Removes entries in the collection's search collection that match the specified documents + * @param {Array} documents - an array of documents for which to remove word instances + * @return {Promise} - Query to delete instances with matching document ids. + */ +Search.prototype.delete = function (documents) { + if (!this.canUse() || !Array.isArray(documents)) { + return Promise.resolve() + } + + debug('deleting documents from the %s index', this.searchCollection) + + let deleteQueue = documents.map(document => { + return this.clearDocumentInstances(document._id.toString()) + }) + + return Promise.all(deleteQueue) +} + +/** + * Query the "words" collection for results that match any of the words specified. If there are no + * results, re-query the collection using the same set of words but each converted to a regular expression + * + * @param {Array} words - an array of words extracted from the search term + * @return {Promise} Query against the words collection. + */ +Search.prototype.getWords = function (words) { + let wordQuery = { word: { '$containsAny': words } } + + return this.wordConnection.datastore.find({ + query: wordQuery, + collection: this.wordCollection, + options: {}, + schema: this.getWordSchema().fields, + settings: this.getWordSchema().settings + }).then(response => { + // Try a second pass with regular expressions + if (response.results.length === 0) { + let regexWords = words.map(word => new RegExp(word)) + let regexQuery = { word: { '$containsAny': regexWords } } + + return this.wordConnection.datastore.find({ + query: regexQuery, + collection: this.wordCollection, + options: {}, + schema: this.getWordSchema().fields, + settings: this.getWordSchema().settings + }) } - let parsedUrl = url.parse(req.url, true) - let options = parsedUrl.query + return response + }) +} + +/** + * Searches the collection's "search" collection using the word ID to obtain document IDs for querying the main collection + * The "words" argument should be similar to the following exampe: + * ``` + * [ { _id: 59f2e4be2b58ff41a4f9c14b, word: 'quick' }, { _id: 59f2e4be2b58ff41a4f9c14c, word: 'brown' } ] + * ``` + * @param {Array} words - an array of "word" result objects, each containing an ID that can be used to query the search collection + * @returns {Promise.} A Promise that returns an Array containing found instances of the specified words + * ``` + * [ + * { + * _id: { + * document: '59f2e8fb01eaec491579ff9d' + * }, + * count: 2, + * weight: 1.2274112777602189 + * } + * ] + * ``` + */ +Search.prototype.getInstancesOfWords = function (words) { + let ids = words.map(word => word._id.toString()) - // no collection and no query params - if (!(options.collections && options.query)) { - return help.sendBackJSON(400, res, next)(null, {'error': 'Bad Request'}) + return this.searchConnection.datastore.findInSearchIndex({ + documentIds: ids, + collection: this.searchCollection, + opions: { limit: pageLimit }, + schema: this.getSearchSchema().fields, + settings: this.getSearchSchema().settings + }) +} + +/** + * Returns all fields from the current collction's schema that have a valid search property + * @return {Object} - an object whose keys are the index fields, the value of which represents it's search rules + * ```json + * { title: { indexed: true, store: true, weight: 2 } } + * ``` + */ +Search.prototype.getIndexableFields = function () { + let schema = this.model.schema + + let indexableFields = Object.keys(schema).filter(key => { + return this.hasSearchField(schema[key]) + }) + + let fields = {} + + indexableFields.forEach(key => { + fields[key] = schema[key].search + }) + + return fields +} + +/** + * Determine if the specified collection schema field has a valid search property + * @param {Object} field - a collection schema field object + * @return {Boolean} `true` if the field has a valid search property + */ +Search.prototype.hasSearchField = function (field) { + return typeof field === 'object' && + field.search && + !isNaN(field.search.weight) +} + +/** + * Removes properties from the specified document that aren't configured to be indexed + * + * @param {Object} document - a document to be indexed + * @return {Object} the specified document with non-indexable properties removed + */ +Search.prototype.removeNonIndexableFields = function (document) { + if (typeof document !== 'object') return {} + + // set of languages configured for API, so we can keep translation fields + // in the document for indexing + let supportedLanguages = config.get('i18n.languages') + let fieldSeparator = config.get('i18n.fieldCharacter') + + let indexableFields = Object.keys(document).filter(key => { + if (key.indexOf(fieldSeparator) > 0) { + let keyParts = key.split(fieldSeparator) + return this.indexableFields[keyParts[0]] && supportedLanguages.includes(keyParts[1]) + } else { + return this.indexableFields[key] } + }) - // split the collections param - let collections = options.collections.split(',') + let sanitisedDocument = {} - // extract the query from the querystring - let query = help.parseQuery(options.query) + indexableFields.forEach(key => { + sanitisedDocument[key] = document[key] + }) - // determine API version - let apiVersion = parsedUrl.pathname.split('/')[1] + return sanitisedDocument +} + +/** + * Index the specified documents + * @param {Array} documents - an array of documents to be indexed + * @return {Promise} - Queries to index documents. + */ +Search.prototype.index = function (documents) { + if (!this.canUse() || !Array.isArray(documents)) { + return Promise.resolve() + } + + promiseQueue(documents, this.indexDocument.bind(this), { + interval: 300 + }) +} + +/** + * Index the specified document by inserting words from the indexable fields into the + * "words" collection + * + * @param {Object} document - a document to be indexed + * @return {[type]} [description] + */ +Search.prototype.indexDocument = function (document) { + let reducedDocument = this.removeNonIndexableFields(document) + let words = this.analyseDocumentWords(reducedDocument) + let uniqueWords + + return this.getWords(words).then(existingWords => { + if (existingWords.results.length) { + uniqueWords = words.filter(word => { + return existingWords.results.every(result => result.word !== word) + }) + } else { + uniqueWords = words + } - // no collections specfied - if (collections.length === 0) { - return help.sendBackJSON(400, res, next)(null, {'error': 'Bad Request'}) + let data = this.formatInsertQuery(uniqueWords) + + if (!uniqueWords.length) { + return this.clearAndInsertWordInstances(words, document._id.toString()) } - let results = {} - let idx = 0 + // insert unique words into the words collection + return this.wordConnection.datastore.insert({ + data: data, + collection: this.wordCollection, + options: {}, + schema: this.getWordSchema().fields, + settings: this.getWordSchema().settings + }).then(response => { + return this.clearAndInsertWordInstances(words, document._id.toString()) + }).catch(err => { + console.log(err) + }) + }) +} + +/** + * Analyse Document Words + * Pass all words to an instance of analyser and return all words. + * @param {Object} doc A document from the database, with non-indexable fields removed. + * @return {Array} A list of analysed words. + */ +Search.prototype.analyseDocumentWords = function (doc) { + // add the document to the analyser index + Object.keys(doc).map(key => { + this.analyser.add(key, doc[key]) + }) - collections.forEach(collection => { - // get the database and collection name from the - // collection parameter - let parts = collection.split('/') - let database, name, mod + // add the document to a fresh analyser instance so we can get only the + // indexable words from THIS DOCUMENT + let analyser = new DefaultAnalyser(this.indexableFields) - query._apiVersion = apiVersion + Object.keys(doc).map(key => { + analyser.add(key, doc[key]) + }) - if (Array.isArray(parts) && parts.length > 1) { - database = parts[0] - name = parts[1] - mod = model(name, null, null, database) + // return indexable words from THIS DOCUMENT only + return analyser.getAllWords() +} + +/** + * Formats the specified words for inserting into the database + * + * @param {Array} words - an array of words + * @return {Array} - an array of objects in the format `{ word: }` + */ +Search.prototype.formatInsertQuery = function (words) { + return words.map(word => { + return { word } + }) +} + +/** + * Find all words that exist in the current version of a document, removes all indexed words relating to a specific document, and finally insert new word instances + * @param {Array} words - an array of words matching document word list. + * @param {Class} analyser - an analyser + * @param {String} docId - the current document ID + * @return {Promise} Chained word query, document instance delete and document instance insert. + */ +Search.prototype.clearAndInsertWordInstances = function (words, docId) { + // The word index is unique, so results aren't always returned. + // Fetch word entries again to get ids. + let query = { + word: { + '$containsAny': words + } + } + + return this.wordConnection.datastore.find({ + query, + collection: this.wordCollection, + options: {}, + schema: this.getWordSchema().fields, + settings: this.getWordSchema().settings + }).then(results => { + // Get all word instances from Analyser + return this.clearDocumentInstances(docId).then(response => { + if (response.deletedCount) { + debug('Removed %s documents from the %s index', response.deletedCount, this.searchCollection) } - if (mod) { - mod.find(query, (err, docs) => { - if (err) { - return help.sendBackJSON(500, res, next)(err) - } + return this.insertWordInstances(results.results, docId) + }) + }) + .catch(err => { + console.log(err) + }) +} + +/** + * Insert Document word instances. + * @param {Class} analyser Instance of document populated analyser class. + * @param {[type]} words Results from database query for word list. + * @param {String} docId Current document ID. + * @return {Promise} Insert query for document word instances. + */ +Search.prototype.insertWordInstances = function (words, docId) { + let instances = this.analyser.getWordInstances() - // add data to final results array, keyed - // on collection name - results[name] = docs + if (!instances) return + + instances = instances.filter(instance => { + return words.find(wordResult => { + return wordResult.word === instance.word + }) + }) - idx++ + let data = instances.map(instance => { + let word = words.find(wordResult => wordResult.word === instance.word)._id.toString() - // send back data - if (idx === collections.length) { - return help.sendBackJSON(200, res, next)(err, results) + return Object.assign(instance, {word, document: docId}) + }) + + // Insert word instances into search collection. + this.searchConnection.datastore.insert({ + data: data, + collection: this.searchCollection, + options: {}, + schema: this.getSearchSchema().fields, + settings: this.getSearchSchema().settings + }) +} + +/** + * Remove entries in the collection's search collection that match the specified document ID + * + * @param {String} docId - the document ID to remove word instances for + * @return {Promise} - Database delete query. + */ +Search.prototype.clearDocumentInstances = function (docId) { + return this.searchConnection.datastore.delete({ + query: { document: docId }, + collection: this.searchCollection, + schema: this.getSearchSchema().fields + }) +} + +/** + * Index an entire collection, in batches of documents + * + * @param {Number} page - the current page of documents to process + * @param {Number} limit - the number of documents to process + */ +Search.prototype.batchIndex = function (page = 1, limit = 1000) { + if (!Object.keys(this.indexableFields).length) return + + let skip = (page - 1) * limit + + let fields = Object.assign({}, ...Object.keys(this.indexableFields).map(key => { + return {[key]: 1} + })) + + let options = { + skip, + page, + limit, + fields + } + + debug(`Indexing page ${page} (${limit} per page)`) + console.log(`Indexing page ${page} (${limit} per page)`) + + if (this.model.connection.db) { + this.runBatchIndex(options) + } + + this.model.connection.once('connect', database => { + this.runBatchIndex(options) + }) +} + +/** + * Run Batch Index + * Performs indexing across an entire collection. + * @param {Object} options find query options. + */ +Search.prototype.runBatchIndex = function (options) { + this.model.connection.datastore.find({ + query: {}, + collection: this.model.name, + options: options, + schema: this.model.schema, + settings: this.model.settings + }).then(({metadata, results}) => { + if (results && results.length) { + debug(`Indexed ${results.length} ${results.length === 1 ? 'record' : 'records'} for ${this.model.name}`) + + if (results.length > 0) { + this.index(results).then(response => { + debug(`Indexed page ${options.page}/${metadata.totalPages}`) + + if (options.page * options.limit < metadata.totalCount) { + return this.batchIndex(options.page + 1, options.limit) } }) } - }) + } }) } + +/** + * Return the template for the "words" collection schema, used to create the database collection + * @return {Object} - the collection schema for the "words" collection + */ +Search.prototype.getWordSchema = function () { + return { + fields: { + word: { + type: 'String', + required: true + } + }, + settings: { + cache: true, + index: [{ + keys: { word: 1 }, + options: { unique: true } + }] + } + } +} + +/** + * Return the template for the current collection's "search" collection schema, used to create the database collection + * @return {Object} - the collection schema for the "search" collection + */ +Search.prototype.getSearchSchema = function () { + return { + fields: { + word: { + type: 'Reference', + required: true + }, + document: { + type: 'Reference', + required: true + }, + weight: { + type: 'Number', + required: true + } + }, + settings: { + cache: true, + index: [ + { + keys: { word: 1 } + }, + { + keys: { document: 1 } + }, + { + keys: { weight: 1 } + } + ] + } + } +} + +module.exports = Search diff --git a/package-lock.json b/package-lock.json index e74c2df2..4d4f8647 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,13 +1,13 @@ { "name": "@dadi/api", - "version": "4.0.0-rc3", + "version": "4.0.4", "lockfileVersion": 1, "requires": true, "dependencies": { "@commitlint/cli": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/@commitlint/cli/-/cli-4.1.1.tgz", - "integrity": "sha512-kt4Ib/h6yRGr+vqc+uV8uHzq4s9tbOQffookE+SphDS9FvtZt1UUgBdNlOe4V4bkY6qKocR+RlNRw1+gOCw3hg==", + "integrity": "sha1-NJAonpCBegrCtrkRurFLrdjegPU=", "dev": true, "requires": { "@commitlint/core": "4.3.0", @@ -41,7 +41,7 @@ "@commitlint/core": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/@commitlint/core/-/core-4.3.0.tgz", - "integrity": "sha512-oWAlGWIOoquQVErLeAXFUOlAQDVJxa0196z7kt/BNcEGjfqRrEwxncZ9GFjycGYb0CyS/YQ1VDe4l8YfrSmbQg==", + "integrity": "sha1-6IGgoWWUrzreCb5NErdwuZE7wmE=", "dev": true, "requires": { "@marionebl/conventional-commits-parser": "3.0.0", @@ -161,6 +161,12 @@ "moment": "2.19.3" } }, + "@dadi/metadata": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@dadi/metadata/-/metadata-2.0.0.tgz", + "integrity": "sha512-GI0v4QEROhkDeIKmfMrHD8+9rOdgc8KbLLIZcY/FU5pYfudil7Njj+8DW2vId1tUrUvbZn3875h8TuEN9Zkrig==", + "dev": true + }, "@dadi/status": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/@dadi/status/-/status-1.0.3.tgz", @@ -206,7 +212,7 @@ "@marionebl/git-raw-commits": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/@marionebl/git-raw-commits/-/git-raw-commits-1.2.0.tgz", - "integrity": "sha512-kI7s1W+GFMLJkuesgBdMgr1NCkChqfhP+wT6REoPsgtJGGwN0L/84gSw9pyH3u1bAK3uHjAkGZQ2bileBVVWtg==", + "integrity": "sha1-fNim38Calt+Y2PvpF1xZccwHyCs=", "dev": true, "requires": { "dargs": "4.1.0", @@ -230,7 +236,7 @@ "glob": { "version": "7.1.2", "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.2.tgz", - "integrity": "sha512-MJTUg1kjuLeQCJ+ccE4Vpa6kKVXkPYJ2mOCQyUuKLcLQsdrMCpBPUi8qVE6+YuaJkozeA9NusTAw3hLr8Xe5EQ==", + "integrity": "sha1-wZyd+aAocC1nhhI4SmVSQExjbRU=", "dev": true, "requires": { "fs.realpath": "1.0.0", @@ -244,7 +250,7 @@ "minimatch": { "version": "3.0.4", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", - "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", + "integrity": "sha1-UWbihkV/AzBgZL5Ul+jbsMPTIIM=", "dev": true, "requires": { "brace-expansion": "1.1.11" @@ -253,7 +259,7 @@ "rimraf": { "version": "2.6.2", "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.6.2.tgz", - "integrity": "sha512-lreewLK/BlghmxtfH36YYVg1i8IAce4TI7oao75I1g245+6BctqTVQiBP3YUJ9C6DQOXJmkYR9X9fCLtCOJc5w==", + "integrity": "sha1-LtgVDSShbqhlHm1u8PR8QVjOejY=", "dev": true, "requires": { "glob": "7.1.2" @@ -300,6 +306,11 @@ } } }, + "afinn-165": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/afinn-165/-/afinn-165-1.0.2.tgz", + "integrity": "sha512-oVbXkteWA6XgYndv3dXYVvulStflVYQtR2K+zp2PyaVhPkkOhZ8tAvk9V7cwaI43GwZaNqRoC2VTpoaWmFyBTA==" + }, "ajv": { "version": "5.5.2", "resolved": "https://registry.npmjs.org/ajv/-/ajv-5.5.2.tgz", @@ -373,6 +384,14 @@ "normalize-path": "2.1.1" } }, + "apparatus": { + "version": "0.0.10", + "resolved": "https://registry.npmjs.org/apparatus/-/apparatus-0.0.10.tgz", + "integrity": "sha512-KLy/ugo33KZA7nugtQ7O0E1c8kQ52N3IvD/XgIh4w/Nr28ypfkwDfA67F1ev4N1m5D+BOk1+b2dEJDfpj/VvZg==", + "requires": { + "sylvester": "0.0.12" + } + }, "append-transform": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/append-transform/-/append-transform-0.4.0.tgz", @@ -958,6 +977,12 @@ } } }, + "browser-stdout": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.1.tgz", + "integrity": "sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw==", + "dev": true + }, "buffer": { "version": "4.9.1", "resolved": "https://registry.npmjs.org/buffer/-/buffer-4.9.1.tgz", @@ -1263,7 +1288,7 @@ "color-convert": { "version": "1.9.1", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.1.tgz", - "integrity": "sha512-mjGanIiwQJskCC18rPR6OmrZ6fm2Lc7PeGFYwCmy5J34wC6F1PzdGL6xeMfmgicfYcNLGuVFA3WzXtIDCQSZxQ==", + "integrity": "sha1-wSYRB66y8pTr/+ye2eytUppgl+0=", "requires": { "color-name": "1.1.3" } @@ -1302,6 +1327,12 @@ "dot-prop": "3.0.0" } }, + "component-emitter": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/component-emitter/-/component-emitter-1.2.1.tgz", + "integrity": "sha1-E3kY1teCg/ffemt8WmPhQOaUJeY=", + "dev": true + }, "concat-map": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", @@ -1360,7 +1391,7 @@ "conventional-changelog-angular": { "version": "1.6.2", "resolved": "https://registry.npmjs.org/conventional-changelog-angular/-/conventional-changelog-angular-1.6.2.tgz", - "integrity": "sha512-LiGZkMJOCJFLNzDlZo3f+DpblcDSzsaYHUWhC+kzsqq+no4qwDP3uW0HVIHueXT4jJDhYNaE9t/XCD7vu7xR1g==", + "integrity": "sha1-CoETE95GMm5eThHawoHWHP4fAMQ=", "dev": true, "requires": { "compare-func": "1.3.2", @@ -1393,6 +1424,12 @@ } } }, + "cookiejar": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/cookiejar/-/cookiejar-2.1.2.tgz", + "integrity": "sha512-Mw+adcfzPxcPeI+0WlvRrr/3lGVO0bD75SxX6811cxSh1Wbxx7xZBGK1eVtDf6si8rg2lhnUjsVLMFMfbRIuwA==", + "dev": true + }, "copy-descriptor": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/copy-descriptor/-/copy-descriptor-0.1.1.tgz", @@ -1412,7 +1449,7 @@ "cosmiconfig": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-3.1.0.tgz", - "integrity": "sha512-zedsBhLSbPBms+kE7AH4vHg6JsKDz6epSv2/+5XHs8ILHlgDciSJfSWf8sX9aQ52Jb7KI7VswUTsLpR/G0cr2Q==", + "integrity": "sha1-ZAqUv5hH8yGABAPNJzr2BmXHM5c=", "dev": true, "requires": { "is-directory": "0.3.1", @@ -1432,6 +1469,88 @@ } } }, + "coveralls": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/coveralls/-/coveralls-3.0.2.tgz", + "integrity": "sha512-Tv0LKe/MkBOilH2v7WBiTBdudg2ChfGbdXafc/s330djpF3zKOmuehTeRwjXWc7pzfj9FrDUTA7tEx6Div8NFw==", + "dev": true, + "requires": { + "growl": "1.10.5", + "js-yaml": "3.12.0", + "lcov-parse": "0.0.10", + "log-driver": "1.2.7", + "minimist": "1.2.0", + "request": "2.87.0" + }, + "dependencies": { + "esprima": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", + "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", + "dev": true + }, + "form-data": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.2.tgz", + "integrity": "sha1-SXBJi+YEwgwAXU9cI67NIda0kJk=", + "dev": true, + "requires": { + "asynckit": "0.4.0", + "combined-stream": "1.0.6", + "mime-types": "2.1.17" + }, + "dependencies": { + "combined-stream": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.6.tgz", + "integrity": "sha1-cj599ugBrFYTETp+RFqbactjKBg=", + "dev": true, + "requires": { + "delayed-stream": "1.0.0" + } + } + } + }, + "js-yaml": { + "version": "3.12.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.12.0.tgz", + "integrity": "sha512-PIt2cnwmPfL4hKNwqeiuz4bKfnzHTBv6HyVgjahA6mPLwPDzjDWrplJBMjHUFxku/N3FlmrbyPclad+I+4mJ3A==", + "dev": true, + "requires": { + "argparse": "1.0.9", + "esprima": "4.0.1" + } + }, + "request": { + "version": "2.87.0", + "resolved": "https://registry.npmjs.org/request/-/request-2.87.0.tgz", + "integrity": "sha512-fcogkm7Az5bsS6Sl0sibkbhcKsnyon/jV1kF3ajGmF0c8HrttdKTPRT9hieOaQHA5HEq6r8OyWOo/o781C1tNw==", + "dev": true, + "requires": { + "aws-sign2": "0.7.0", + "aws4": "1.6.0", + "caseless": "0.12.0", + "combined-stream": "1.0.5", + "extend": "3.0.1", + "forever-agent": "0.6.1", + "form-data": "2.3.2", + "har-validator": "5.0.3", + "http-signature": "1.2.0", + "is-typedarray": "1.0.0", + "isstream": "0.1.2", + "json-stringify-safe": "5.0.1", + "mime-types": "2.1.17", + "oauth-sign": "0.8.2", + "performance-now": "2.1.0", + "qs": "6.5.1", + "safe-buffer": "5.1.1", + "tough-cookie": "2.3.3", + "tunnel-agent": "0.6.0", + "uuid": "3.3.2" + } + } + } + }, "create-error-class": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/create-error-class/-/create-error-class-3.0.2.tgz", @@ -1443,7 +1562,7 @@ "create-react-class": { "version": "15.6.3", "resolved": "https://registry.npmjs.org/create-react-class/-/create-react-class-15.6.3.tgz", - "integrity": "sha512-M+/3Q6E6DLO6Yx3OwrWjwHBnvfXXYA7W+dFjt/ZDBemHO1DDZhsalX/NUtnTYclN6GfnBDRh4qRHjcDHmlJBJg==", + "integrity": "sha1-LXMjf7P5cK5uvgEanmb0bbyoADY=", "dev": true, "requires": { "fbjs": "0.8.16", @@ -1535,7 +1654,7 @@ "debug": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/debug/-/debug-3.1.0.tgz", - "integrity": "sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g==", + "integrity": "sha1-W7WgZyYotkFJVmuhaBnmFRjGcmE=", "requires": { "ms": "2.0.0" } @@ -1738,6 +1857,12 @@ } } }, + "diff": { + "version": "3.5.0", + "resolved": "https://registry.npmjs.org/diff/-/diff-3.5.0.tgz", + "integrity": "sha512-A46qtFgd+g7pDZinpnwiRJtxbC1hpgf0uzP3iG89scHk0AUC7A1TGxf5OiiOUv/JMZR8GOt8hL900hV0bOy5xA==", + "dev": true + }, "doctrine": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-1.5.0.tgz", @@ -1985,7 +2110,7 @@ "debug": { "version": "2.6.9", "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "integrity": "sha1-XRKFFd8TT/Mn6QpMk/Tgd6U2NB8=", "dev": true, "requires": { "ms": "2.0.0" @@ -2287,6 +2412,12 @@ "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz", "integrity": "sha1-lpGEQOMEGnpBT4xS48V06zw+HgU=" }, + "faker": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/faker/-/faker-4.1.0.tgz", + "integrity": "sha1-HkW7vsxndLPBlfrSg1EJxtdIzD8=", + "dev": true + }, "fakeredis": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/fakeredis/-/fakeredis-1.0.3.tgz", @@ -2509,6 +2640,12 @@ "samsam": "1.1.2" } }, + "formidable": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/formidable/-/formidable-1.2.1.tgz", + "integrity": "sha512-Fs9VRguL0gqGHkXS5GQiMCr1VhZBxz0JnJs4JmMp/2jL18Fmbzvv7vOFRU+U8TBkHEE/CX1qDXzJplVULgsLeg==", + "dev": true + }, "fragment-cache": { "version": "0.2.1", "resolved": "https://registry.npmjs.org/fragment-cache/-/fragment-cache-0.2.1.tgz", @@ -3009,7 +3146,7 @@ "fsu": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/fsu/-/fsu-1.0.4.tgz", - "integrity": "sha512-T8DGjqL3DNJsA/uHWUTIZhJ/VuEqi3QdNsQBAWpKtoIPS/8rK4HWG79ae2+HEw+Cz9e5lIsWghpoXCcNsrDPFA==", + "integrity": "sha1-WGqPvY0ZrN8zDOy88X1kHpw3C6A=", "dev": true }, "generate-function": { @@ -3167,6 +3304,12 @@ "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.1.11.tgz", "integrity": "sha1-Dovf5NHduIVNZOBOp8AOKgJuVlg=" }, + "growl": { + "version": "1.10.5", + "resolved": "https://registry.npmjs.org/growl/-/growl-1.10.5.tgz", + "integrity": "sha512-qBr4OuELkhPenW6goKVXiv47US3clb3/IbuWF9KNKEijAy9oeHxU9IgzjvJhHkUzhaj7rOUD7+YGWqUjLp5oSA==", + "dev": true + }, "handlebars": { "version": "4.0.11", "resolved": "https://registry.npmjs.org/handlebars/-/handlebars-4.0.11.tgz", @@ -3263,6 +3406,12 @@ "sntp": "2.1.0" } }, + "he": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/he/-/he-1.1.1.tgz", + "integrity": "sha1-k0EP0hsAlzUVH4howvJx80J+I/0=", + "dev": true + }, "hoek": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/hoek/-/hoek-4.2.0.tgz", @@ -4009,6 +4158,11 @@ "resolved": "https://registry.npmjs.org/jmespath/-/jmespath-0.15.0.tgz", "integrity": "sha1-o/Iiqarp+Wb10nx5ZRDigJF2Qhc=" }, + "js-promise-queue": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/js-promise-queue/-/js-promise-queue-1.1.0.tgz", + "integrity": "sha512-cPGuny7ogxJ8StsPKMxcWC4d6g5xngiYgbtVXitksYIsKM05KBKLzZLxMr7rWTulCp0wEcJ8Nh973z4Do1I2lg==" + }, "js-tokens": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-3.0.2.tgz", @@ -4059,7 +4213,6 @@ "version": "1.0.1", "resolved": "https://registry.npmjs.org/json-stable-stringify/-/json-stable-stringify-1.0.1.tgz", "integrity": "sha1-mnWdOcXy/1A/1TAGRu1EX4jE+a8=", - "dev": true, "requires": { "jsonify": "0.0.0" } @@ -4085,8 +4238,7 @@ "jsonify": { "version": "0.0.0", "resolved": "https://registry.npmjs.org/jsonify/-/jsonify-0.0.0.tgz", - "integrity": "sha1-LHS27kHZPKUbe1qu6PUDYx0lKnM=", - "dev": true + "integrity": "sha1-LHS27kHZPKUbe1qu6PUDYx0lKnM=" }, "jsonparse": { "version": "1.3.1", @@ -4203,6 +4355,11 @@ } } }, + "langs": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/langs/-/langs-2.0.0.tgz", + "integrity": "sha1-AMMs5IFSpJphRFC5uiYyq1igo2Q=" + }, "latest-version": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/latest-version/-/latest-version-2.0.0.tgz", @@ -4227,6 +4384,12 @@ "invert-kv": "1.0.0" } }, + "lcov-parse": { + "version": "0.0.10", + "resolved": "https://registry.npmjs.org/lcov-parse/-/lcov-parse-0.0.10.tgz", + "integrity": "sha1-GwuP+ayceIklBYK3C3ExXZ2m2aM=", + "dev": true + }, "length-stream": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/length-stream/-/length-stream-0.1.1.tgz", @@ -4309,7 +4472,7 @@ "lodash.isfunction": { "version": "3.0.9", "resolved": "https://registry.npmjs.org/lodash.isfunction/-/lodash.isfunction-3.0.9.tgz", - "integrity": "sha512-AirXNj15uRIMMPihnkInB4i3NHeb4iBtNg9WRWuK2o31S+ePwwNmDPaTL3o7dTJ+VXNZim7rFs4rxN4YU1oUJw==", + "integrity": "sha1-Bt4l302zJ6yTGYHRvbBn5a9o0FE=", "dev": true }, "lodash.isinteger": { @@ -4356,6 +4519,12 @@ "lodash._reinterpolate": "3.0.0" } }, + "log-driver": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/log-driver/-/log-driver-1.2.7.tgz", + "integrity": "sha512-U7KCmLdqsGHBLeWqYlFA0V0Sl6P08EE1ZrmA9cxjUE0WVqT9qnyVDPz1kzpFEP0jdJuFnasWIfSd7fsaNXkpbg==", + "dev": true + }, "log-symbols": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-2.2.0.tgz", @@ -4364,6 +4533,12 @@ "chalk": "2.3.1" } }, + "lokijs": { + "version": "1.5.5", + "resolved": "https://registry.npmjs.org/lokijs/-/lokijs-1.5.5.tgz", + "integrity": "sha1-HCH4KvdXkDf63nueSBNIXCNwi7Y=", + "dev": true + }, "lolex": { "version": "1.3.2", "resolved": "https://registry.npmjs.org/lolex/-/lolex-1.3.2.tgz", @@ -4460,6 +4635,12 @@ "integrity": "sha1-sAqqVW3YtEVoFQ7J0blT8/kMu2E=", "dev": true }, + "methods": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", + "integrity": "sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4=", + "dev": true + }, "micromatch": { "version": "3.1.10", "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-3.1.10.tgz", @@ -4555,10 +4736,69 @@ } } }, + "mocha": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/mocha/-/mocha-5.2.0.tgz", + "integrity": "sha512-2IUgKDhc3J7Uug+FxMXuqIyYzH7gJjXECKe/w43IGgQHTSj3InJi+yAA7T24L9bQMRKiUEHxEX37G5JpVUGLcQ==", + "dev": true, + "requires": { + "browser-stdout": "1.3.1", + "commander": "2.15.1", + "debug": "3.1.0", + "diff": "3.5.0", + "escape-string-regexp": "1.0.5", + "glob": "7.1.2", + "growl": "1.10.5", + "he": "1.1.1", + "minimatch": "3.0.4", + "mkdirp": "0.5.1", + "supports-color": "5.4.0" + }, + "dependencies": { + "commander": { + "version": "2.15.1", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.15.1.tgz", + "integrity": "sha512-VlfT9F3V0v+jr4yxPc5gg9s62/fIVWsd2Bk2iD435um1NlGMYdVCq+MjcXnhYq2icNOizHr1kK+5TI6H0Hy0ag==", + "dev": true + }, + "glob": { + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.2.tgz", + "integrity": "sha512-MJTUg1kjuLeQCJ+ccE4Vpa6kKVXkPYJ2mOCQyUuKLcLQsdrMCpBPUi8qVE6+YuaJkozeA9NusTAw3hLr8Xe5EQ==", + "dev": true, + "requires": { + "fs.realpath": "1.0.0", + "inflight": "1.0.6", + "inherits": "2.0.3", + "minimatch": "3.0.4", + "once": "1.4.0", + "path-is-absolute": "1.0.1" + } + }, + "minimatch": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", + "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", + "dev": true, + "requires": { + "brace-expansion": "1.1.11" + } + }, + "supports-color": { + "version": "5.4.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.4.0.tgz", + "integrity": "sha512-zjaXglF5nnWpsq470jSv6P9DwPvgLkuapYmfDm3JWOm0vkNTVF2tI4UrN2r6jH1qM/uc/WtxYY1hYoA2dOKj5w==", + "dev": true, + "requires": { + "has-flag": "3.0.0" + } + } + } + }, "mochawesome": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/mochawesome/-/mochawesome-2.3.1.tgz", - "integrity": "sha512-amkBeQZz/IUTm2o1VLHiih30RHdt4uMAAhyvd5oJ5FMq5gCmqzFS29pobVyGajqzPvC+na8U+nzO8DtftQoLLw==", + "integrity": "sha1-Q7JEXkuiX1hbzaaeVZLAgV+cNwk=", "dev": true, "requires": { "babel-runtime": "6.26.0", @@ -4604,7 +4844,7 @@ "diff": { "version": "3.4.0", "resolved": "https://registry.npmjs.org/diff/-/diff-3.4.0.tgz", - "integrity": "sha512-QpVuMTEoJMF7cKzi6bvWhRulU1fZqZnvyVQgNhPaxxuTYwyjn/j1v9falseQ/uXWwPnO56RBfwtg4h/EQXmucA==", + "integrity": "sha1-sdhVB9rzlkgo3lSzfQ1zumfdpWw=", "dev": true }, "lodash": { @@ -4641,7 +4881,7 @@ "mochawesome-report-generator": { "version": "2.3.2", "resolved": "https://registry.npmjs.org/mochawesome-report-generator/-/mochawesome-report-generator-2.3.2.tgz", - "integrity": "sha512-T2bY3ezsZuKmM9DyZff/F7WlhHVfEq2Y2dZb9PdfT+ZclFz/b7iIqXBThi6j5Y+xUSOV1LY4rg072Fc0xdmiRQ==", + "integrity": "sha1-OmiFmW0yg2Ej/kAttDdOkJ5UoSc=", "dev": true, "requires": { "chalk": "1.1.3", @@ -4745,6 +4985,16 @@ } } }, + "mock-require": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/mock-require/-/mock-require-3.0.2.tgz", + "integrity": "sha512-aD/Y1ZFHqw5pHg3HVQ50dLbfaAAcytS6sqLuhP51Dk3TSPdFb2VkSAa3mjrHifLIlGAtwQHJHINafAyqAne7vA==", + "dev": true, + "requires": { + "get-caller-file": "1.0.2", + "normalize-path": "2.1.1" + } + }, "module-not-found-error": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/module-not-found-error/-/module-not-found-error-1.0.1.tgz", @@ -4808,6 +5058,18 @@ "integrity": "sha1-IKMYwwy0X3H+et+/eyHJnBRy7xE=", "dev": true }, + "natural": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/natural/-/natural-0.6.1.tgz", + "integrity": "sha1-Iwe/BPFyAShq6hRjCgj31/vC0GA=", + "requires": { + "afinn-165": "1.0.2", + "apparatus": "0.0.10", + "json-stable-stringify": "1.0.1", + "sylvester": "0.0.12", + "underscore": "1.8.3" + } + }, "natural-compare": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", @@ -4823,7 +5085,7 @@ "node-fetch": { "version": "1.7.3", "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-1.7.3.tgz", - "integrity": "sha512-NhZ4CsKx7cYm2vSrBAr2PvFOe6sWDf0UYLRqA6svUYg7+/TSfVAu49jYC4BvQ4Sms9SZgdqGBgroqfDhJdTyKQ==", + "integrity": "sha1-mA9vcthSEaU0fGsrwYxbhMPrR+8=", "dev": true, "requires": { "encoding": "0.1.12", @@ -5050,7 +5312,7 @@ "p-limit": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-1.2.0.tgz", - "integrity": "sha512-Y/OtIaXtUPr4/YpMv1pCL5L5ed0rumAaAeBSj12F+bSlMdys7i8oQF/GUJmfpTS/QoaRrS/k6pma29haJpsMng==", + "integrity": "sha1-DpK2vty1nwIsE9DxlJ3ILRWQnxw=", "dev": true, "requires": { "p-try": "1.0.0" @@ -5257,7 +5519,7 @@ "promise": { "version": "7.3.1", "resolved": "https://registry.npmjs.org/promise/-/promise-7.3.1.tgz", - "integrity": "sha512-nolQXZ/4L+bP/UGlkfaIujX9BKxGwmQ9OT4mOt5yvy8iK1h3wqTEJCijzGANTCCl9nWjY41juyAn2K3Q1hLLTg==", + "integrity": "sha1-BktyYCsY+Q8pGSuLG8QY/9Hr078=", "dev": true, "requires": { "asap": "2.0.6" @@ -5838,7 +6100,7 @@ "debug": { "version": "2.6.9", "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "integrity": "sha1-XRKFFd8TT/Mn6QpMk/Tgd6U2NB8=", "requires": { "ms": "2.0.0" } @@ -5979,7 +6241,7 @@ "diff": { "version": "3.4.0", "resolved": "https://registry.npmjs.org/diff/-/diff-3.4.0.tgz", - "integrity": "sha512-QpVuMTEoJMF7cKzi6bvWhRulU1fZqZnvyVQgNhPaxxuTYwyjn/j1v9falseQ/uXWwPnO56RBfwtg4h/EQXmucA==", + "integrity": "sha1-sdhVB9rzlkgo3lSzfQ1zumfdpWw=", "dev": true }, "formatio": { @@ -6000,7 +6262,7 @@ "samsam": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/samsam/-/samsam-1.3.0.tgz", - "integrity": "sha512-1HwIYD/8UlOtFS3QO3w7ey+SdSDFE4HRNLZoZRYVQefrOY3l17epswImeB1ijgJFQJodIaHcwkp3r/myBjFVbg==", + "integrity": "sha1-jR2TUOJWItow3j5EumkrUiGrfFA=", "dev": true } } @@ -6216,7 +6478,7 @@ "split2": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/split2/-/split2-2.2.0.tgz", - "integrity": "sha512-RAb22TG39LhI31MbreBgIuKiIKhVsawfTgEGqKHTK87aG+ul/PB8Sqoi3I7kVdRWiCfrKxK3uo4/YUkpNvhPbw==", + "integrity": "sha1-GGsldbz4PoW30YRldWI47k7kJJM=", "dev": true, "requires": { "through2": "2.0.3" @@ -6387,6 +6649,62 @@ "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", "integrity": "sha1-PFMZQukIwml8DsNEhYwobHygpgo=" }, + "superagent": { + "version": "3.8.2", + "resolved": "https://registry.npmjs.org/superagent/-/superagent-3.8.2.tgz", + "integrity": "sha512-gVH4QfYHcY3P0f/BZzavLreHW3T1v7hG9B+hpMQotGQqurOvhv87GcMCd6LWySmBuf+BDR44TQd0aISjVHLeNQ==", + "dev": true, + "requires": { + "component-emitter": "1.2.1", + "cookiejar": "2.1.2", + "debug": "3.1.0", + "extend": "3.0.1", + "form-data": "2.3.2", + "formidable": "1.2.1", + "methods": "1.1.2", + "mime": "1.6.0", + "qs": "6.5.1", + "readable-stream": "2.3.4" + }, + "dependencies": { + "combined-stream": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.6.tgz", + "integrity": "sha1-cj599ugBrFYTETp+RFqbactjKBg=", + "dev": true, + "requires": { + "delayed-stream": "1.0.0" + } + }, + "form-data": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.2.tgz", + "integrity": "sha1-SXBJi+YEwgwAXU9cI67NIda0kJk=", + "dev": true, + "requires": { + "asynckit": "0.4.0", + "combined-stream": "1.0.6", + "mime-types": "2.1.17" + } + }, + "mime": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", + "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==", + "dev": true + } + } + }, + "supertest": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/supertest/-/supertest-3.1.0.tgz", + "integrity": "sha512-O44AMnmJqx294uJQjfUmEyYOg7d9mylNFsMw/Wkz4evKd1njyPrtCN+U6ZIC7sKtfEVQhfTqFFijlXx8KP/Czw==", + "dev": true, + "requires": { + "methods": "1.1.2", + "superagent": "3.8.2" + } + }, "supports-color": { "version": "5.2.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.2.0.tgz", @@ -6395,6 +6713,11 @@ "has-flag": "3.0.0" } }, + "sylvester": { + "version": "0.0.12", + "resolved": "https://registry.npmjs.org/sylvester/-/sylvester-0.0.12.tgz", + "integrity": "sha1-WohEFc0tACxX56OqyZRip1zp/bQ=" + }, "table": { "version": "3.8.3", "resolved": "https://registry.npmjs.org/table/-/table-3.8.3.tgz", @@ -6488,13 +6811,13 @@ "tcomb": { "version": "3.2.24", "resolved": "https://registry.npmjs.org/tcomb/-/tcomb-3.2.24.tgz", - "integrity": "sha512-N9IrL2iIyS/f4+WHYZaMh04ZqDL8yEit9cVdnn+fOuL6jbKo1fusNswHOjSo/kbYwLUKRS1OlQmAkyeNxyEUhA==", + "integrity": "sha1-f0JwU8w5O1mXxMPYWcogQRGAiHs=", "dev": true }, "tcomb-validation": { "version": "3.4.1", "resolved": "https://registry.npmjs.org/tcomb-validation/-/tcomb-validation-3.4.1.tgz", - "integrity": "sha512-urVVMQOma4RXwiVCa2nM2eqrAomHROHvWPuj6UkDGz/eb5kcy0x6P0dVt6kzpUZtYMNoAqJLWmz1BPtxrtjtrA==", + "integrity": "sha1-p2luwXbOVqCB2eAZ+LcypaiJS2U=", "dev": true, "requires": { "tcomb": "3.2.24" @@ -6509,7 +6832,7 @@ "text-extensions": { "version": "1.7.0", "resolved": "https://registry.npmjs.org/text-extensions/-/text-extensions-1.7.0.tgz", - "integrity": "sha512-AKXZeDq230UaSzaO5s3qQUZOaC7iKbzq0jOFL614R7d9R593HLqAOL0cYoqLdkNrjBSOdmoQI06yigq1TSBXAg==", + "integrity": "sha1-+qq6JiXtdG1WiiPk0KrNm/CKizk=", "dev": true }, "text-table": { @@ -6647,7 +6970,7 @@ "type-detect": { "version": "4.0.8", "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz", - "integrity": "sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==", + "integrity": "sha1-dkb7XxiHHPu3dJ5pvTmmOI63RQw=", "dev": true }, "type-is": { @@ -6682,7 +7005,7 @@ "ua-parser-js": { "version": "0.7.17", "resolved": "https://registry.npmjs.org/ua-parser-js/-/ua-parser-js-0.7.17.tgz", - "integrity": "sha512-uRdSdu1oA1rncCQL7sCj8vSyZkgtL7faaw9Tc9rZ3mGgraQ7+Pdx7w5mnOSF3gw9ZNG6oc+KXfkon3bKuROm0g==", + "integrity": "sha1-6exflJi57JEOeuOsYmqAXE0J7Kw=", "dev": true }, "uglify-js": { diff --git a/package.json b/package.json index 86cac148..098518b9 100644 --- a/package.json +++ b/package.json @@ -23,6 +23,7 @@ "@dadi/et": "^2.0.0", "@dadi/format-error": "^1.7.0", "@dadi/logger": "^1.4.1", + "@dadi/metadata": "^2.0.0", "@dadi/status": "latest", "async": "^2.6.1", "aws-sdk": "2.249.1", @@ -39,12 +40,14 @@ "deepmerge": "^2.1.0", "fs-extra": "^3.0.1", "imagesize": "^1.0.0", + "js-promise-queue": "^1.1.0", "jsonwebtoken": "^8.0.0", "langs": "^2.0.0", "length-stream": "^0.1.1", "mime": "^2.3.1", "mkdirp": "^0.5.1", "moment": "2.19.3", + "natural": "^0.6.1", "object-path": "^0.11.4", "parse-comments": "0.4.3", "path-to-regexp": "~1.7.0", @@ -63,17 +66,17 @@ "devDependencies": { "@commitlint/cli": "~4.1.1", "@commitlint/config-angular": "~3.1.1", - "@dadi/metadata": "^2.0.0", "aws-sdk-mock": "1.6.1", "coveralls": "^3.0.1", "env-test": "1.0.0", + "faker": "^4.1.0", "fakeredis": "1.0.3", "form-data": "2.1.4", "husky": "^0.13.3", "istanbul": "^1.1.0-alpha.1", "istanbul-cobertura-badger": "^1.3.1", "lokijs": "^1.5.3", - "mocha": "~4.0.1", + "mocha": "^5.2.0", "mochawesome": "^2.1.0", "mock-require": "^3.0.2", "proxyquire": "^1.7.4", @@ -85,7 +88,7 @@ "uuid": "^3.3.2" }, "dataConnectorDependencies": { - "@dadi/api-mongodb": "4.1.0" + "@dadi/api-mongodb": "4.2.0" }, "greenkeeper": { "ignore": [ diff --git a/test/acceptance/acl/clients-api/roles-delete.js b/test/acceptance/acl/clients-api/roles-delete.js index 9a474dbd..41815cb2 100644 --- a/test/acceptance/acl/clients-api/roles-delete.js +++ b/test/acceptance/acl/clients-api/roles-delete.js @@ -31,6 +31,10 @@ module.exports = () => { }) }) + afterEach(done => { + help.removeACLData(done) + }) + describe('error states', () => { it('should return 401 if the request does not include a valid bearer token', done => { client diff --git a/test/acceptance/acl/collections-api.js b/test/acceptance/acl/collections-api.js index 2fcde1a0..ac68a381 100644 --- a/test/acceptance/acl/collections-api.js +++ b/test/acceptance/acl/collections-api.js @@ -20,6 +20,14 @@ let docs describe('Collections API', () => { before(done => { + config.set('search', { + 'enabled': true, + 'minQueryLength': 3, + 'wordCollection': 'words', + 'datastore': './../../../test/test-connector', + 'database': 'testdb' + }) + app.start(err => { if (err) return done(err) @@ -62,10 +70,113 @@ describe('Collections API', () => { after(done => { help.removeACLData(() => { + config.set('search', { + 'enabled': false, + 'minQueryLength': 3, + 'wordCollection': 'words', + 'datastore': './../../../test/test-connector', + 'database': 'testdb' + }) + app.stop(done) }) }) + describe('Search', function () { + it('should return 403 with no permissions', function (done) { + let testClient = { + clientId: 'apiClient', + secret: 'someSecret', + resources: { 'collection:testdb_test-schema': {} } + } + + help.createACLClient(testClient).then(() => { + client + .post(config.get('auth.tokenUrl')) + .set('content-type', 'application/json') + .send(testClient) + .expect(200) + .end((err, res) => { + if (err) return done(err) + + let bearerToken = res.body.accessToken + + client + .get(`/vtest/testdb/test-schema/search?q=xyz`) + .set('content-type', 'application/json') + .set('Authorization', `Bearer ${bearerToken}`) + .end((err, res) => { + if (err) return done(err) + res.statusCode.should.eql(403) + done() + }) + }) + }) + }) + + it('should return 403 with no read permission', function (done) { + let testClient = { + clientId: 'apiClient', + secret: 'someSecret', + resources: { 'collection:testdb_test-schema': PERMISSIONS.NO_READ } + } + + help.createACLClient(testClient).then(() => { + client + .post(config.get('auth.tokenUrl')) + .set('content-type', 'application/json') + .send(testClient) + .expect(200) + .end((err, res) => { + if (err) return done(err) + + let bearerToken = res.body.accessToken + + client + .get(`/vtest/testdb/test-schema/search?q=xyz`) + .set('content-type', 'application/json') + .set('Authorization', `Bearer ${bearerToken}`) + .end((err, res) => { + if (err) return done(err) + res.statusCode.should.eql(403) + done() + }) + }) + }) + }) + + it('should return 200 with read permission', function (done) { + let testClient = { + clientId: 'apiClient', + secret: 'someSecret', + resources: { 'collection:testdb_test-schema': PERMISSIONS.READ } + } + + help.createACLClient(testClient).then(() => { + client + .post(config.get('auth.tokenUrl')) + .set('content-type', 'application/json') + .send(testClient) + .expect(200) + .end((err, res) => { + if (err) return done(err) + + let bearerToken = res.body.accessToken + + client + .get(`/vtest/testdb/test-schema/search?q=fghj`) + .set('content-type', 'application/json') + .set('Authorization', `Bearer ${bearerToken}`) + .end((err, res) => { + if (err) return done(err) + res.statusCode.should.eql(200) + done() + }) + }) + }) + }) + }) + describe('GET', function () { it('should return 403 with no permissions', function (done) { let testClient = { @@ -198,10 +309,6 @@ describe('Collections API', () => { resources: { 'collection:testdb_test-schema': PERMISSIONS.READ_EXCLUDE_FIELDS } } - let params = { - fields: JSON.stringify({ field1: 1, title: 1 }) - } - help.createACLClient(testClient).then(() => { client .post(config.get('auth.tokenUrl')) @@ -212,10 +319,9 @@ describe('Collections API', () => { if (err) return done(err) let bearerToken = res.body.accessToken - let query = require('querystring').stringify(params) client - .get(`/vtest/testdb/test-schema/?${query}`) + .get('/vtest/testdb/test-schema/?fields={"field1":1,"title":1}') .set('content-type', 'application/json') .set('Authorization', `Bearer ${bearerToken}`) .end((err, res) => { @@ -294,7 +400,7 @@ describe('Collections API', () => { done() }) }) - }) + }) }) }) @@ -362,7 +468,7 @@ describe('Collections API', () => { done() }) }) - }) + }) }) }) @@ -515,9 +621,9 @@ describe('Collections API', () => { let modelSettings = Object.assign({}, app.components['/vtest/testdb/test-schema'].model.settings) app.components['/vtest/testdb/test-schema'].model.settings.authenticate = [ - "POST", - "PUT", - "DELETE" + 'POST', + 'PUT', + 'DELETE' ] client @@ -537,10 +643,10 @@ describe('Collections API', () => { let modelSettings = Object.assign({}, app.components['/vtest/testdb/test-schema'].model.settings) app.components['/vtest/testdb/test-schema'].model.settings.authenticate = [ - "GET", - "POST", - "PUT", - "DELETE" + 'GET', + 'POST', + 'PUT', + 'DELETE' ] client @@ -704,7 +810,7 @@ describe('Collections API', () => { { field1: 'Value one' }, { field1: 'Value two' }, { field1: 'Value three' } - ] + ] help.getBearerTokenWithPermissions({ accessType: 'admin' @@ -769,7 +875,7 @@ describe('Collections API', () => { { field1: 'Value two' }, { field1: 'Value three' }, { field1: 'Value four' } - ] + ] help.getBearerTokenWithPermissions({ accessType: 'admin' @@ -809,9 +915,9 @@ describe('Collections API', () => { }) }) }) - }) + }) }) - }) + }) }) describe('POST', function () { @@ -943,9 +1049,9 @@ describe('Collections API', () => { let modelSettings = Object.assign({}, app.components['/vtest/testdb/test-schema'].model.settings) app.components['/vtest/testdb/test-schema'].model.settings.authenticate = [ - "GET", - "PUT", - "DELETE" + 'GET', + 'PUT', + 'DELETE' ] client @@ -968,10 +1074,10 @@ describe('Collections API', () => { let modelSettings = Object.assign({}, app.components['/vtest/testdb/test-schema'].model.settings) app.components['/vtest/testdb/test-schema'].model.settings.authenticate = [ - "GET", - "POST", - "PUT", - "DELETE" + 'GET', + 'POST', + 'PUT', + 'DELETE' ] client @@ -990,7 +1096,7 @@ describe('Collections API', () => { done() }) - }) + }) }) describe('PUT', function () { @@ -1282,7 +1388,7 @@ describe('Collections API', () => { }) }) }) - }) + }) it('should return 200 and not update any documents when the query differs from the filter permission', function (done) { let testClient = { @@ -1378,9 +1484,9 @@ describe('Collections API', () => { let modelSettings = Object.assign({}, app.components['/vtest/testdb/test-schema'].model.settings) app.components['/vtest/testdb/test-schema'].model.settings.authenticate = [ - "GET", - "POST", - "DELETE" + 'GET', + 'POST', + 'DELETE' ] client @@ -1407,10 +1513,10 @@ describe('Collections API', () => { let modelSettings = Object.assign({}, app.components['/vtest/testdb/test-schema'].model.settings) app.components['/vtest/testdb/test-schema'].model.settings.authenticate = [ - "GET", - "POST", - "PUT", - "DELETE" + 'GET', + 'POST', + 'PUT', + 'DELETE' ] client @@ -1433,7 +1539,7 @@ describe('Collections API', () => { done() }) - }) + }) }) describe('DELETE', function () { @@ -1681,9 +1787,9 @@ describe('Collections API', () => { let modelSettings = Object.assign({}, app.components['/vtest/testdb/test-schema'].model.settings) app.components['/vtest/testdb/test-schema'].model.settings.authenticate = [ - "GET", - "POST", - "PUT" + 'GET', + 'POST', + 'PUT' ] client @@ -1710,10 +1816,10 @@ describe('Collections API', () => { let modelSettings = Object.assign({}, app.components['/vtest/testdb/test-schema'].model.settings) app.components['/vtest/testdb/test-schema'].model.settings.authenticate = [ - "GET", - "POST", - "PUT", - "DELETE" + 'GET', + 'POST', + 'PUT', + 'DELETE' ] client @@ -1736,6 +1842,6 @@ describe('Collections API', () => { done() }) - }) + }) }) }) diff --git a/test/acceptance/cache.js b/test/acceptance/cache.js index 1e439b0e..61f3e1e6 100755 --- a/test/acceptance/cache.js +++ b/test/acceptance/cache.js @@ -571,78 +571,6 @@ describe('Cache', function (done) { done() }) - it.skip('should throw error if can\'t connect to Redis client', function (done) { - delete require.cache[__dirname + '/../../config.js'] - delete require.cache[__dirname + '/../../dadi/lib/'] - - config.loadFile(config.configPath()) - - should.throws(function () { app.start(function () {}) }, Error) - - app.stop(done) - }) - - it.skip('should initialise Redis client', function (done) { - delete require.cache[__dirname + '/../../config.js'] - config.loadFile(config.configPath()) - - // sinon.stub(redis, 'createClient', fakeredis.createClient); - - delete require.cache[__dirname + '/../../dadi/lib/'] - cache.reset() - - try { - app.stop(function () {}) - // app.start(function(){}); - } catch (err) { - } - - var c = cache(app) - // redis.createClient.restore(); - // c.redisClient.should.not.be.null; - // app.stop(function(){}); - done() - }) - - it.skip('should fallback to directory cache if Redis client fails', function (done) { - delete require.cache[__dirname + '/../../config.js'] - config.loadFile(config.configPath()) - - var EventEmitter = require('events') - var util = require('util') - - /* Fake redis client */ - function Client () { - this.end = function (reallyEnd) { } - EventEmitter.call(this) - } - - util.inherits(Client, EventEmitter) - var redisClient = new Client() - /* End Fake redis client */ - - sinon.stub(redis, 'createClient').returns(redisClient) - - delete require.cache[__dirname + '/../../dadi/lib/'] - cache.reset() - - var c = cache(app) - // redis.createClient.restore(); - - setTimeout(function () { - // emit an error event - redisClient.emit('error', { code: 'CONNECTION_BROKEN'}) - - config.get('caching.directory.enabled').should.eql(true) - - try { - app.stop(done) - } catch (err) { - done() - } - }, 1000) - }) - it('should check key exists in Redis', function (done) { delete require.cache[__dirname + '/../../dadi/lib/'] diff --git a/test/acceptance/db-connection.js b/test/acceptance/db-connection.js index 15d5c687..bced59bf 100644 --- a/test/acceptance/db-connection.js +++ b/test/acceptance/db-connection.js @@ -86,6 +86,8 @@ describe('Database connection', () => { res.body.results[0].title.should.eql(mockDocument.title) datastore._spies.index.calledOnce.should.eql(true) + datastore._spies.index.callCount.should.be.above(0) + setTimeout(() => { client .get('/vtest/noauthdb/articles?cache=false') @@ -115,7 +117,7 @@ describe('Database connection', () => { res.statusCode.should.eql(200) res.body.results.length.should.eql(1) res.body.results[0].title.should.eql(mockDocument.title) - datastore._spies.index.calledOnce.should.eql(true) + datastore._spies.index.callCount.should.be.above(0) testConnector._mock.disconnect() @@ -223,7 +225,7 @@ describe('Database connection', () => { res.statusCode.should.eql(200) res.body.results[0].title.should.eql('Dadi') res.body.results[0].published.state.should.eql(1) - datastore._spies.index.calledOnce.should.eql(true) + datastore._spies.index.callCount.should.be.above(0) setTimeout(() => { client @@ -266,7 +268,7 @@ describe('Database connection', () => { res.statusCode.should.eql(200) res.body.results[0].title.should.eql('Dadi') res.body.results[0].published.state.should.eql(1) - datastore._spies.index.calledOnce.should.eql(true) + datastore._spies.index.callCount.should.be.above(0) testConnector._mock.disconnect() @@ -346,7 +348,7 @@ describe('Database connection', () => { res.statusCode.should.eql(204) res.body.should.eql('') - datastore._spies.index.calledOnce.should.eql(true) + datastore._spies.index.callCount.should.be.above(0) testConnector._mock.disconnect() @@ -611,7 +613,7 @@ describe('Database connection', () => { res.statusCode.should.eql(200) res.body.results[0].title.should.eql('A brand new title') res.body.results[0].published.state.should.eql(1) - datastore._spies.index.calledOnce.should.eql(true) + datastore._spies.index.callCount.should.be.above(0) done() }) diff --git a/test/acceptance/help.js b/test/acceptance/help.js index ac529b9b..7885aab9 100755 --- a/test/acceptance/help.js +++ b/test/acceptance/help.js @@ -15,7 +15,7 @@ module.exports.createDoc = function (token, done) { .post('/vtest/testdb/test-schema') .set('Authorization', 'Bearer ' + token) .send({field1: ((Math.random() * 10) | 1).toString()}) - //.expect(200) + // .expect(200) .end(function (err, res) { if (err) return done(err) res.body.results.length.should.equal(1) @@ -431,7 +431,7 @@ module.exports.getCollectionMap = function () { databases.forEach(database => { let databasePath = path.join(versionPath, database) let stats = fs.statSync(databasePath) - + if (stats.isDirectory()) { let collections = fs.readdirSync(databasePath) @@ -447,7 +447,7 @@ module.exports.getCollectionMap = function () { map[`/${version}/${database}/${collectionName}`] = require(collectionPath) }) - } + } }) }) @@ -477,7 +477,7 @@ module.exports.writeTempFile = function (filePath, data, callback) { fs.ensureDir( path.dirname(fullPath), err => { - fs.writeFileSync(fullPath, parsedData) + fs.writeFileSync(fullPath, parsedData) } ) diff --git a/test/acceptance/i18n.js b/test/acceptance/i18n.js index 18d92105..afabe33d 100644 --- a/test/acceptance/i18n.js +++ b/test/acceptance/i18n.js @@ -405,7 +405,7 @@ describe('Multi-language', function () { done() }) }) - }) + }) it('should populate a `_i18n` field with a mapping of the language used for each translatable field', done => { config.set('i18n.languages', ['pt', 'fr']) @@ -699,7 +699,7 @@ describe('Multi-language', function () { if (++i === Object.keys(translations).length) { config.set('i18n.languages', configBackup.i18n.languages) - done() + done() } }) }) diff --git a/test/acceptance/search.js b/test/acceptance/search.js new file mode 100644 index 00000000..4da90ccf --- /dev/null +++ b/test/acceptance/search.js @@ -0,0 +1,415 @@ +const app = require('../../dadi/lib/') +const config = require('../../config') +const help = require('./help') +const model = require('../../dadi/lib/model/') +const should = require('should') +const request = require('supertest') + +// variables scoped for use throughout tests +let bearerToken +let connectionString = 'http://' + config.get('server.host') + ':' + config.get('server.port') +let configBackup = config.get() + +describe('Search', function () { + this.timeout(4000) + + let cleanupFn + + before(function (done) { + help.dropDatabase('testdb', function (err) { + if (err) return done(err) + + config.set('search', { + 'enabled': true, + 'minQueryLength': 3, + 'wordCollection': 'words', + 'datastore': './../../../test/test-connector', + 'database': 'testdb' + }) + + config.set('i18n.languages', ['fr', 'pt']) + + app.start(function () { + help.getBearerTokenWithAccessType('admin', function (err, token) { + if (err) return done(err) + + bearerToken = token + + let schema = { + 'fields': { + 'field1': { + 'type': 'String', + 'required': false + }, + 'title': { + 'type': 'String', + 'required': false, + 'search': { + 'weight': 2 + } + }, + 'field2': { + 'type': 'Number', + 'required': false + }, + 'field3': { + 'type': 'ObjectID', + 'required': false + }, + '_fieldWithUnderscore': { + 'type': 'Object', + 'required': false + } + }, + 'settings': { + 'count': 40 + } + } + + help.writeTempFile( + 'temp-workspace/collections/vtest/testdb/collection.test-schema.json', + schema, + callback1 => { + help.writeTempFile( + 'temp-workspace/collections/v1/testdb/collection.test-schema.json', + schema, + callback2 => { + cleanupFn = () => { + callback1() + callback2() + } + + done() + } + ) + } + ) + }) + }) + }) + }) + + after(function (done) { + config.set('search', { + 'enabled': false + }) + + config.set('i18n.languages', configBackup.i18n.languages) + + app.stop(() => { + cleanupFn() + done() + }) + }) + + describe('Disabled', function () { + it('should return 501 when calling a /search endpoint', function (done) { + config.set('search.enabled', false) + + var client = request(connectionString) + client + .get('/vtest/testdb/test-schema/search') + .set('Authorization', 'Bearer ' + bearerToken) + .expect(501) + .end((err, res) => { + config.set('search.enabled', true) + done() + }) + }) + }) + + describe('Enabled', function () { + it('should return 400 when calling a /search endpoint with no query', function (done) { + let searchModel = model('test-schema') + searchModel.searchHandler.init() + + var client = request(connectionString) + client + .get('/vtest/testdb/test-schema/search') + .set('Authorization', 'Bearer ' + bearerToken) + .expect(400) + .end(done) + }) + + it('should return 400 when calling a /search endpoint with a short query', function (done) { + let searchModel = model('test-schema') + searchModel.searchHandler.init() + + var client = request(connectionString) + client + .get('/vtest/testdb/test-schema/search?q=xx') + .set('Authorization', 'Bearer ' + bearerToken) + .expect(400) + .end((err, res) => { + if (err) return done(err) + done() + }) + }) + + it('should return empty results when no documents match a query', function (done) { + let searchModel = model('test-schema') + searchModel.searchHandler.init() + + var client = request(connectionString) + client + .get('/vtest/testdb/test-schema/search?q=xxx') + .set('Authorization', 'Bearer ' + bearerToken) + .expect(200) + .end((err, res) => { + if (err) return done(err) + should.exist(res.body.results) + res.body.results.should.be.Array + res.body.results.length.should.eql(0) + done() + }) + }) + + it('should return results when documents match a query', function (done) { + let searchModel = model('test-schema') + searchModel.searchHandler.init() + + var client = request(connectionString) + + var doc = { + field1: 'The quick brown fox jumps', + title: 'The quick brown fox jumps over the lazy dog' + } + + client + .post('/vtest/testdb/test-schema') + .set('Authorization', 'Bearer ' + bearerToken) + .set('content-type', 'application/json') + .send(doc) + .expect(200) + .end((err, res) => { + client + .get('/vtest/testdb/test-schema/search?q=quick%20brown') + .set('Authorization', 'Bearer ' + bearerToken) + .expect(200) + .end((err, res) => { + if (err) return done(err) + should.exist(res.body.results) + + res.body.results.should.be.Array + res.body.results.length.should.eql(1) + + done() + }) + }) + }) + + it('should return metadata containing the search term', function (done) { + let searchModel = model('test-schema') + searchModel.searchHandler.init() + + var client = request(connectionString) + + var doc = { + field1: 'The quick brown fox jumps', + title: 'The quick brown fox jumps over the lazy dog' + } + + client + .post('/vtest/testdb/test-schema') + .set('Authorization', 'Bearer ' + bearerToken) + .set('content-type', 'application/json') + .send(doc) + .expect(200) + .end((err, res) => { + client + .get('/vtest/testdb/test-schema/search?q=quick%20brown') + .set('Authorization', 'Bearer ' + bearerToken) + .expect(200) + .end((err, res) => { + if (err) return done(err) + should.exist(res.body.metadata) + should.exist(res.body.metadata.search) + res.body.metadata.search.should.eql('quick brown') + + done() + }) + }) + }) + }) + + describe('Multi-language', function () { + it('should retrieve all language variations if no `lang` parameter is supplied', done => { + let document = { + title: 'The Little Prince', + 'title:pt': 'O Principezinho', + 'title:fr': 'Le Petit Prince' + } + + var client = request(connectionString) + + client + .post('/vtest/testdb/test-schema') + .set('Authorization', `Bearer ${bearerToken}`) + .send(document) + .expect(200) + .end((err, res) => { + if (err) return done(err) + + client + .get(`/vtest/testdb/test-schema/search?q=Prince`) + .set('Authorization', `Bearer ${bearerToken}`) + .expect(200) + .end((err, res) => { + res.body.results.length.should.eql(1) + + let result = res.body.results[0] + + result.title.should.eql(document.title) + result['title:pt'].should.eql(document['title:pt']) + result['title:fr'].should.eql(document['title:fr']) + + should.not.exist(result._i18n) + + done() + }) + }) + }) + + it('should return the translation version of a field when there is one set for the language in the `lang` parameter, falling back to the default language', done => { + config.set('i18n.languages', ['pt', 'fr']) + + let documents = [ + { + title: 'The Little Prince', + 'title:pt': 'O Principezinho', + 'title:fr': 'Le Petit Prince' + }, + { + title: 'The Untranslatable' + } + ] + + var client = request(connectionString) + + client + .post(`/vtest/testdb/test-schema`) + .set('Authorization', `Bearer ${bearerToken}`) + .send(documents) + .expect(200) + .end((err, res) => { + if (err) return done(err) + + client + .get('/vtest/testdb/test-schema/search?q=Principezinho&lang=pt') + .set('Authorization', `Bearer ${bearerToken}`) + .expect(200) + .end((err, res) => { + res.body.results.length.should.eql(2) + + let results = res.body.results + + results[0].title.should.eql(documents[0]['title:pt']) + results[0]._i18n.title.should.eql('pt') + should.not.exist(results[0]['title:pt']) + should.not.exist(results[0]['title:fr']) + + // results[1].title.should.eql(documents[1].title) + // results[1]._i18n.title.should.eql( + // config.get('i18n.defaultLanguage') + // ) + // should.not.exist(results[1]['title:pt']) + // should.not.exist(results[1]['title:fr']) + + config.set('i18n.languages', configBackup.i18n.languages) + + done() + }) + }) + }) + + it('should return the translation version of a field when the fields projection is set to include the field in question', done => { + config.set('i18n.languages', ['pt', 'fr']) + + let documents = [ + { + title: 'The Little Prince', + 'title:pt': 'O Principezinho', + 'title:fr': 'Le Petit Prince' + }, + { + title: 'The Untranslatable' + } + ] + + var client = request(connectionString) + + client + .post('/vtest/testdb/test-schema') + .set('Authorization', `Bearer ${bearerToken}`) + .send(documents) + .expect(200) + .end((err, res) => { + if (err) return done(err) + + client + .get(`/vtest/testdb/test-schema/search?q=Principezinho&fields={"title":1}&lang=pt`) + .set('Authorization', `Bearer ${bearerToken}`) + .expect(200) + .end((err, res) => { + let results = res.body.results + + results[0].title.should.eql(documents[0]['title:pt']) + results[0]._i18n.title.should.eql('pt') + should.not.exist(results[0]['title:pt']) + should.not.exist(results[0]['title:fr']) + + // results[1].title.should.eql(documents[1].title) + // results[1]._i18n.title.should.eql( + // config.get('i18n.defaultLanguage') + // ) + // should.not.exist(results[1]['title:pt']) + // should.not.exist(results[1]['title:fr']) + + config.set('i18n.languages', configBackup.i18n.languages) + + done() + }) + }) + }) + + it('should return the original version of a field when the requested language is not part of `i18n.languages`', done => { + config.set('i18n.languages', ['fr']) + + let document = { + title: 'The Little Prince', + 'title:pt': 'O Principezinho', + 'title:fr': 'Le Petit Prince' + } + + var client = request(connectionString) + + client + .post('/vtest/testdb/test-schema') + .set('Authorization', `Bearer ${bearerToken}`) + .send(document) + .expect(200) + .end((err, res) => { + if (err) return done(err) + + client + .get(`/vtest/testdb/test-schema/search?q=Prince&fields={"title":1}&lang=pt`) + .set('Authorization', `Bearer ${bearerToken}`) + .expect(200) + .end((err, res) => { + // res.body.results.length.should.eql(1) + + let results = res.body.results + + results[0].title.should.eql(document.title) + results[0]._i18n.title.should.eql('en') + should.not.exist(results[0]['title:pt']) + should.not.exist(results[0]['title:fr']) + + config.set('i18n.languages', configBackup.i18n.languages) + + done() + }) + }) + }) + }) +}) diff --git a/test/acceptance/search_collections.js b/test/acceptance/search_collections.js deleted file mode 100644 index b9344261..00000000 --- a/test/acceptance/search_collections.js +++ /dev/null @@ -1,150 +0,0 @@ -var app = require('./../../dadi/lib/') -var config = require('./../../config') -var fs = require('fs') -var help = require('./help') -var request = require('supertest') -var should = require('should') - -// variables scoped for use throughout tests -var bearerToken -var connectionString = 'http://' + config.get('server.host') + ':' + config.get('server.port') - -let cleanupFn - -describe('Search', function () { - this.timeout(5000) - - describe('Collections', function () { - before(function (done) { - help.dropDatabase('testdb', function (err) { - if (err) return done(err) - - let schema = { - "fields": { - "field1": { - "type": "String", - "required": false - }, - "field2": { - "type": "Number", - "required": false - }, - "field3": { - "type": "ObjectID", - "required": false - } - }, - "settings": { - "count": 40, - "displayName": "Test Collection", - "description": "Test Collection" - } - } - - help.writeTempFile( - 'temp-workspace/collections/vtest/testdb/collection.test-schema.json', - schema, - callback => { - cleanupFn = callback - - app.start(function () { - help.getBearerTokenWithAccessType('admin', function (err, token) { - if (err) return done(err) - - bearerToken = token - - done() - }) - }) - } - ) - }) - }) - - after(function (done) { - app.stop(() => { - cleanupFn() - done() - }) - }) - - it('should return docs from specified collections', function (done) { - // sample URL "/:version/search?collections=collection/model&query={"field1":{"$regex":"est"}}" - - var doc = { field1: 'Test', field2: 1234 } - - help.createDocWithSpecificVersion(bearerToken, 'vtest', doc, function (err, doc) { - if (err) return done(err) - - var client = request(connectionString) - - client - .get('/vtest/search?collections=testdb/test-schema,testdb/articles&query={"field1":{"$regex":"est"}}') - .set('Authorization', 'Bearer ' + bearerToken) - // .expect(200) - .expect('content-type', 'application/json') - .end(function (err, res) { - if (err) { - console.log(err) - return done(err) - } - should.exist(res.body['test-schema'].results) - res.body['test-schema'].results.should.be.Array - res.body['test-schema'].results.length.should.equal(1) - res.body['test-schema'].results[0].field1.should.equal('Test') - done() - }) - }) - }) - - it('should return 404 if method used is not GET', function (done) { - var doc = { field1: 'Test', field2: 1234 } - - help.createDocWithSpecificVersion(bearerToken, 'vtest', doc, function (err, doc) { - if (err) return done(err) - - var client = request(connectionString) - - client - .put('/vtest/search') - .set('Authorization', 'Bearer ' + bearerToken) - .expect(404) - .end(done) - }) - }) - - it('should return 400 if no collections or query specified', function (done) { - var doc = { field1: 'Test', field2: 1234 } - - help.createDocWithSpecificVersion(bearerToken, 'vtest', doc, function (err, doc) { - if (err) return done(err) - - var client = request(connectionString) - - client - .get('/vtest/search') - .set('Authorization', 'Bearer ' + bearerToken) - .expect(400) - .expect('content-type', 'application/json') - .end(done) - }) - }) - - it('should return 400 if no collections specified', function (done) { - var doc = { field1: 'Test', field2: 1234 } - - help.createDocWithSpecificVersion(bearerToken, 'vtest', doc, function (err, doc) { - if (err) return done(err) - - var client = request(connectionString) - - client - .get('/vtest/search?query={"field1":{"$regex":"est"}}') - .set('Authorization', 'Bearer ' + bearerToken) - .expect(400) - .expect('content-type', 'application/json') - .end(done) - }) - }) - }) -}) diff --git a/test/acceptance/workspace/collections/vtest/testdb/collection.articles.json b/test/acceptance/workspace/collections/vtest/testdb/collection.articles.json index bb9f5c64..9532ac23 100644 --- a/test/acceptance/workspace/collections/vtest/testdb/collection.articles.json +++ b/test/acceptance/workspace/collections/vtest/testdb/collection.articles.json @@ -178,12 +178,12 @@ "allowDelete": true, "count": 20, "sortOrder": 1, - "sort": "publicationDate", "index": { "enabled": true, "keys": { "_id": 1, - "urls": 1 + "urls": 1, + "publicationDate": 1 } }, "hooks": { @@ -211,6 +211,6 @@ ], "afterUpdate": [] }, - "lastModifiedAt": 1472947876485 + "lastModifiedAt": 1509474495923 } } \ No newline at end of file diff --git a/test/acceptance/workspace/collections/vtest/testdb/collection.publications.json b/test/acceptance/workspace/collections/vtest/testdb/collection.publications.json index 6f0e959b..b2a1e35a 100644 --- a/test/acceptance/workspace/collections/vtest/testdb/collection.publications.json +++ b/test/acceptance/workspace/collections/vtest/testdb/collection.publications.json @@ -93,6 +93,6 @@ } ] }, - "lastModifiedAt": 1484287084405 + "lastModifiedAt": 1509474495854 } } \ No newline at end of file diff --git a/test/acceptance/workspace/collections/vtest/testdb/collection.test-schema.json b/test/acceptance/workspace/collections/vtest/testdb/collection.test-schema.json index a4c2748a..e3dcf1e5 100755 --- a/test/acceptance/workspace/collections/vtest/testdb/collection.test-schema.json +++ b/test/acceptance/workspace/collections/vtest/testdb/collection.test-schema.json @@ -4,17 +4,14 @@ "type": "String", "label": "Title", "comments": "The title of the entry", - "placement": "Main content", "validation": {}, - "required": false, - "message": "", - "display": { - "index": true, - "edit": true - } + "required": false }, "title": { "type": "String", + "label": "Title", + "comments": "The title of the entry", + "validation": {}, "required": false, "search": { "weight": 2 @@ -29,6 +26,6 @@ "sortOrder": 1, "storeRevisions": true, "revisionCollection": "testSchemaHistory", - "lastModifiedAt": 1509360344054 + "lastModifiedAt": 1509474499125 } } \ No newline at end of file diff --git a/test/test-connector/index.js b/test/test-connector/index.js index f51b5e64..abde8d22 100644 --- a/test/test-connector/index.js +++ b/test/test-connector/index.js @@ -146,8 +146,8 @@ DataStore.prototype.connect = function ({database, collection}) { name: database, uuid: Math.random() } - this._debug('connect: new db', { + database, collection }) @@ -287,7 +287,7 @@ DataStore.prototype.find = function ({ query, collection, options = {}, schema, collection: collName, query, results - }) + }) let returnData = {} returnData.results = results.map(this.formatDocumentForOutput.bind(this)) @@ -502,6 +502,41 @@ DataStore.prototype.insert = function ({data, collection, options = {}, schema, }) } +/** Search for documents in the database + * + * @param {Object|Array} words - + * @param {string} collection - the name of the collection to search + * @param {object} options - options to modify the query + * @param {Object} schema - the JSON schema for the collection + * @param {Object} settings - the JSON settings configuration for the collection + * @returns {Promise.} + */ +DataStore.prototype.search = function ({ words, collection, options = {}, schema, settings }) { + if (this._mockIsDisconnected(collection)) { + this.readyState = STATE_DISCONNECTED + + return Promise.reject(new Error('DB_DISCONNECTED')) + } + + debug('search in %s for %o', collection, words) + + return new Promise((resolve, reject) => { + this.getCollection(collection).then(collection => { + let results + + let query = { + word: { + '$containsAny': words + } + } + + let baseResultset = collection.chain().find(query) + + return resolve(baseResultset.mapReduce(searchMapFn, searchReduceFn)) + }) + }) +} + /** * */ @@ -621,6 +656,48 @@ DataStore.prototype.update = function ({query, collection, update, options = {}, }) } +function searchMapFn (document) { + return { + document: document.document, + word: document.word, + weight: document.weight + } +} + +function searchReduceFn (documents) { + let matches = documents.reduce((groups, document) => { + let key = document.document + + groups[key] = groups[key] || { + count: 0, + weight: 0 + } + + groups[key].count++ + groups[key].weight = groups[key].weight + document.weight + return groups + }, {}) + + let output = [] + + Object.keys(matches).forEach(function (match) { + output.push({ + _id: { + document: match + }, + count: matches[match].count, + weight: matches[match].weight + }) + }) + + output.sort(function (a, b) { + if (a.weight === b.weight) return 0 + return a.weight < b.weight ? 1 : -1 + }) + + return output +} + module.exports = DataStore module.exports.settings = { connectWithCollection: false diff --git a/test/unit/help.js b/test/unit/help.js index 716960bf..21e880f9 100755 --- a/test/unit/help.js +++ b/test/unit/help.js @@ -10,18 +10,51 @@ module.exports.getModelSchema = function () { 'type': 'String', 'label': 'Title', 'comments': 'The title of the entry', - 'placement': 'Main content', 'validation': {}, 'required': false, - 'message': '', - 'display': { - 'index': true, - 'edit': true + 'message': '' + } + } +} + +module.exports.getSearchModelSchema = function () { + return { + 'fieldName': { + 'type': 'String', + 'label': 'Title', + 'comments': 'The title of the entry', + 'validation': {}, + 'required': false + }, + 'invalidSearchableFieldName': { + 'type': 'String', + 'label': 'Title', + 'comments': 'The title of the entry', + 'validation': {}, + 'required': false, + 'search': true + }, + 'searchableFieldName': { + 'type': 'String', + 'label': 'Title', + 'comments': 'The title of the entry', + 'validation': {}, + 'required': false, + 'search': { + 'weight': 2 } } } } +module.exports.getSampleSearchDocument = () => { + return { + fieldName: 'foo', + invalidSearchableFieldName: 'bar', + searchableFieldName: 'baz' + } +} + module.exports.getModelSettings = function () { return { cache: true, diff --git a/test/unit/model/index.js b/test/unit/model/index.js index 7e961601..99b94289 100755 --- a/test/unit/model/index.js +++ b/test/unit/model/index.js @@ -70,36 +70,6 @@ describe('Model', function () { done() }) - it.skip('should accept database connection as third argument', function (done) { - config.set('database.enableCollectionDatabases', true) - connection.resetConnections() - - const conn = connection({ - 'username': '', - 'password': '', - 'database': 'test', - 'replicaSet': '', - 'hosts': [ - { - 'host': 'localhost', - 'port': 27020 - } - ] - }) - - // TODO: stub the connect method so this doesn't cause a connection attempt - - const mod = model('testModelName', help.getModelSchema(), conn) - should.exist(mod.connection) - mod.connection.connectionOptions.hosts[0].host.should.equal('localhost') - mod.connection.connectionOptions.hosts[0].port.should.equal(27020) - mod.connection.connectionOptions.database.should.equal('test') - - config.set('database.enableCollectionDatabases', false) - - done() - }) - it('should accept model settings as fourth argument', function (done) { const mod = model( 'testModelName', diff --git a/test/unit/search/analysers/standard.js b/test/unit/search/analysers/standard.js new file mode 100644 index 00000000..4dc06913 --- /dev/null +++ b/test/unit/search/analysers/standard.js @@ -0,0 +1,127 @@ +const should = require('should') +const sinon = require('sinon') +const standardAnalyser = require(__dirname + '/../../../../dadi/lib/search/analysers/standard') +const model = require(__dirname + '/../../../../dadi/lib/model') + +const indexableFields = { + foo: { + search: { + weight: 2 + } + } +} + +let analyser + +describe('Standard Search Analyser', () => { + beforeEach(done => { + analyser = new standardAnalyser(indexableFields) + done() + }) + + it('should export constructor', done => { + standardAnalyser.should.be.Function + done() + }) + + describe('`isValid` method', () => { + it('should return false if value is not a valid string', done => { + analyser.isValid(undefined).should.be.false + done() + }) + + it('should return true if value is a valid string', done => { + analyser.isValid('foo').should.be.true + done() + }) + }) + + describe('`tokenize` method', () => { + it('should return a tokenized array of words from a string', done => { + const tokens = analyser.tokenize('Foo Bar Baz') + + tokens.should.be.an.instanceOf(Array) + .and.have.lengthOf(3) + tokens.should.eql(['foo', 'bar', 'baz']) + done() + }) + }) + + describe('`unique` method', () => { + it('should reduce an array to unique values', done => { + analyser.unique(['foo', 'foo', 'bar']) + .should.be.an.instanceOf(Array) + .and.have.lengthOf(2) + done() + }) + + it('should return empty array if the input is not a valid array', done => { + analyser.unique(undefined) + .should.be.an.instanceOf(Array) + .and.have.lengthOf(0) + done() + }) + }) + + describe('`areValidWords` method', () => { + it('should return false if array of words is invalid', done => { + analyser.areValidWords(undefined) + .should.be.false + + analyser.areValidWords([ + { + word: 'foo' + } + ]) + .should.be.false + + analyser.areValidWords([ + { + word: 'foo', + weight: 2 + } + ]) + .should.be.true + + done() + }) + }) + + describe('`mergeWeights` method', () => { + it('should return empty array if words are invalid', done => { + analyser.mergeWeights(undefined) + .should.be.an.instanceOf(Array) + .and.have.lengthOf(0) + + analyser.mergeWeights([{ + word: 'foo' + }]) + .should.be.an.instanceOf(Array) + .and.have.lengthOf(0) + done() + }) + + it('should reduce multiple word instances to a unique instance of the highest weight value', done => { + analyser.mergeWeights([ + { weight: 2.85116730682758, word: 'foo' }, + { weight: 2.280933845462064, word: 'foo' } + ]) + .should.be.an.instanceOf(Array) + .and.have.lengthOf(1) + + analyser.mergeWeights([ + { weight: 2.85116730682758, word: 'foo' }, + { weight: 2.280933845462064, word: 'foo' } + ])[0] + .should.be.an.instanceOf(Object) + .and.have.property('weight', 2.5660505761448222) + + done() + }) + }) +}) + +// add +// getWordsInField +// getAllWords +// getWordInstances diff --git a/test/unit/search/index.js b/test/unit/search/index.js new file mode 100644 index 00000000..8320a8f2 --- /dev/null +++ b/test/unit/search/index.js @@ -0,0 +1,300 @@ +const acceptanceHelper = require('./../../acceptance/help') +const config = require('./../../../config') +const faker = require('faker') +const help = require('./../help') +const Model = require('./../../../dadi/lib/model') +const Search = require('./../../../dadi/lib/search') +const should = require('should') +const sinon = require('sinon') +const store = require(config.get('search.datastore')) + +let mod +let searchInstance + +describe('Search', () => { + before(() => { + config.set('search.enabled', true) + }) + + beforeEach(done => { + mod = Model('testSearchModel', help.getSearchModelSchema(), null, { database: 'testdb' }) + searchInstance = new Search(mod) + searchInstance.init() + done() + }) + + after(() => { + config.set('search.enabled', false) + }) + + it('should export constructor', done => { + Search.should.be.Function + done() + }) + + it('should export a function that returns an instance', done => { + searchInstance.should.be.an.instanceOf(Search) + done() + }) + + it('should throw an error if model is incorrect type', done => { + should.throws(function () { var x = new Search() }) + done() + }) + + describe('`initialiseConnections` method', () => { + it('should initialise required connections', done => { + searchInstance.initialiseConnections() + + setTimeout(() => { + should.exist(searchInstance.wordConnection.db) + should.exist(searchInstance.searchConnection.db) + done() + }, 500) + }) + }) + + describe.skip('`applyIndexListeners` method', () => { + it('should call database index method once connection is established', done => { + mod = Model('testModelNew', help.getSearchModelSchema(), null, { database: 'testdb' }) + const dbIndexStub = sinon.spy(store.prototype, 'index') + + searchInstance = new Search(mod) + + setTimeout(() => { + dbIndexStub.called.should.be.true + dbIndexStub.lastCall.args[0].should.eql('testModelNewSearch') + dbIndexStub.lastCall.args[1].should.be.Object + dbIndexStub.restore() + + done() + }, 1000) + }) + }) + + describe('`getWordSchema` method', () => { + it('should return an object', done => { + const schema = searchInstance.getWordSchema() + schema.should.be.Object + done() + }) + }) + + describe('`getSearchSchema` method', () => { + it('should return an object', done => { + const schema = searchInstance.getSearchSchema() + schema.should.be.Object + done() + }) + }) + + describe('`getIndexableFields` method', () => { + it('should return an object', done => { + searchInstance.getIndexableFields().should.be.Object + done() + }) + + it('should return an object containing only indexable fields', done => { + searchInstance.getIndexableFields().should.be.an.instanceOf(Object).and.have.property('searchableFieldName', {weight: 2}) + searchInstance.getIndexableFields().should.not.have.property('fieldName') + searchInstance.getIndexableFields().should.not.have.property('invalidSearchableFieldName') + done() + }) + }) + + describe('`removeNonIndexableFields` method', () => { + it('should return an object if doc is invalid', done => { + searchInstance.removeNonIndexableFields().should.be.Object + done() + }) + + it('should remove non-indexable fields from document', done => { + searchInstance.removeNonIndexableFields(help.getSampleSearchDocument()) + .should.not.have.property('fieldName') + searchInstance.removeNonIndexableFields(help.getSampleSearchDocument()) + .should.not.have.property('invalidSearchableFieldName') + searchInstance.removeNonIndexableFields(help.getSampleSearchDocument()) + .should.have.property('searchableFieldName', 'baz') + done() + }) + }) + + describe('`formatInsertQuery` method', () => { + it('should convert list of words to valid insert query object', done => { + searchInstance.formatInsertQuery(['foo']).should.be.an.instanceOf(Array) + searchInstance.formatInsertQuery(['foo'])[0].should.have.property('word', 'foo') + done() + }) + }) + + describe('`hasSeachField` method', () => { + it('should return false if a field is invalid', done => { + searchInstance.hasSearchField().should.be.false + done() + }) + + it('should return false if a field does not contain a valid search parameter', done => { + searchInstance.hasSearchField({search: 'foo'}).should.be.false + done() + }) + + it('should return true if a field has a valid search and search weight parameter', done => { + searchInstance.hasSearchField({search: {weight: 2}}).should.be.true + done() + }) + }) + + describe('`clearDocumentInstances` method', () => { + it('should delete all search instance documents with filtered query', done => { + const dbDeleteStub = sinon.spy(store.prototype, 'delete') + + searchInstance.clearDocumentInstances('mockDocId') + dbDeleteStub.called.should.be.true + dbDeleteStub.lastCall.args[0].should.have.property('query', {document: 'mockDocId'}) + dbDeleteStub.restore() + + done() + }) + }) + + describe('`delete` method', () => { + it('should return without firing clearDocumentInstances if an array of documents is not provided', done => { + const dbDeleteStub = sinon.spy(searchInstance, 'clearDocumentInstances') + + searchInstance.delete({_id: 'mockDocId'}) + dbDeleteStub.called.should.be.false + dbDeleteStub.restore() + + done() + }) + + it('should execute clearDocumentInstances if an array of documents is provided', done => { + const dbDeleteStub = sinon.spy(searchInstance, 'clearDocumentInstances') + + searchInstance.delete([{_id: 'mockDocId'}]) + dbDeleteStub.called.should.be.true + dbDeleteStub.lastCall.args[0].should.eql('mockDocId') + dbDeleteStub.restore() + + done() + }) + }) + + describe('`batchIndex` method', () => { + it('should not execute the runBatchIndex method if no fields can be indexed', done => { + let schema = help.getSearchModelSchema() + delete schema.searchableFieldName + + let mod = Model('testSearchModel', schema, null, { database: 'testdb' }) + const unIndexable = new Search(mod) + unIndexable.init() + + const stub = sinon.spy(unIndexable, 'runBatchIndex') + + unIndexable.batchIndex(1, 100) + stub.called.should.be.false + stub.restore() + done() + }) + + it('should call the runBatchIndex method with correct arguments when using defaults', done => { + let schema = help.getSearchModelSchema() + let mod = Model('testSearchModel', schema, null, { database: 'testdb' }) + const indexable = new Search(mod) + indexable.init() + + const stub = sinon.spy(indexable, 'runBatchIndex') + + indexable.batchIndex() + stub.called.should.be.true + let args = stub.lastCall.args[0] + args.page.should.eql(1) + args.limit.should.eql(1000) + args.skip.should.eql(0) + args.fields.should.eql({searchableFieldName: 1}) + stub.restore() + done() + }) + + it('should call the runBatchIndex method with correct arguments when using specific params', done => { + let schema = help.getSearchModelSchema() + let mod = Model('testSearchModel', schema, null, { database: 'testdb' }) + const indexable = new Search(mod) + indexable.init() + + const stub = sinon.spy(indexable, 'runBatchIndex') + + indexable.batchIndex(2, 500) + stub.called.should.be.true + let args = stub.lastCall.args[0] + args.page.should.eql(2) + args.limit.should.eql(500) + args.skip.should.eql(500) + args.fields.should.eql({searchableFieldName: 1}) + stub.restore() + done() + }) + }) + + describe('batchIndex', function () { + beforeEach((done) => { + acceptanceHelper.dropDatabase('testdb', err => { + done() + }) + }) + + it('should call runBatchIndex repeatedly when there are more results', done => { + let schema = help.getSearchModelSchema() + let mod = Model('testSearchModel', schema, null, { database: 'testdb' }) + let indexable = new Search(mod) + indexable.init() + + let spy = sinon.spy(indexable, 'runBatchIndex') + + let docs = [ + { searchableFieldName: faker.name.findName() }, + { searchableFieldName: faker.name.findName() }, + { searchableFieldName: faker.name.findName() }, + { searchableFieldName: faker.name.findName() }, + { searchableFieldName: faker.name.findName() } + ] + + // insert documents directly + mod.connection.db.insert({ + data: docs, + collection: 'testSearchModel', + schema + }) + + let indexStub = sinon.stub(indexable, 'index').callsFake(() => { + return Promise.resolve({ + results: docs, + metadata: { + totalPages: 5, + totalCount: 5 + } + }) + }) + + indexable.batchIndex(1, 1) + + setTimeout(() => { + spy.restore() + indexStub.restore() + spy.callCount.should.be.above(1) + let args = spy.args + args[0][0].skip.should.eql(0) + args[0][0].page.should.eql(1) + args[1][0].skip.should.eql(1) + args[1][0].page.should.eql(2) + args[2][0].skip.should.eql(2) + args[2][0].page.should.eql(3) + args[3][0].skip.should.eql(3) + args[3][0].page.should.eql(4) + args[4][0].skip.should.eql(4) + args[4][0].page.should.eql(5) + done() + }, 3000) + }) + }) +}) diff --git a/test/unit/storage.s3.js b/test/unit/storage.s3.js index d31da43a..72356f8e 100644 --- a/test/unit/storage.s3.js +++ b/test/unit/storage.s3.js @@ -85,7 +85,7 @@ describe('Storage', function (done) { }) }) - it.skip('should call S3 API with the correct parameters when deleting media', function (done) { + it('should call S3 API with the correct parameters when deleting media', function (done) { config.set('media.enabled', true) config.set('media.storage', 's3') config.set('media.s3.bucketName', 'testbucket') @@ -118,7 +118,7 @@ describe('Storage', function (done) { }) }) - it.skip('should call S3 API with the correct parameters when requesting media', function (done) { + it('should call S3 API with the correct parameters when requesting media', function (done) { config.set('media.enabled', true) config.set('media.storage', 's3') config.set('media.s3.bucketName', 'testbucket') diff --git a/workspace/collections/vjoin/testdb/collection.books.json b/workspace/collections/vjoin/testdb/collection.books.json index 8c01ff9f..5d9299c5 100755 --- a/workspace/collections/vjoin/testdb/collection.books.json +++ b/workspace/collections/vjoin/testdb/collection.books.json @@ -5,38 +5,27 @@ "label": "name", "example": "War and Peace", "comments": "This is the book's name", - "placement": "Main content", "required": true, - "message": "", - "display": { - "index": true, - "edit": false + "search": { + "weight": 2 } }, - "author": { - "type": "Reference", + "authorId": { + "type": "String", "label": "author", "example": "b8b285ae-53d1-47a5-9e69-ec04", "comments": "This is the _id of the book's author", - "placement": "Main content", - "required": true, - "message": "", - "display": { - "index": true, - "edit": true + "validation": { + "regex": { + "pattern": "^[0-9a-fA-F]{24}$" + } }, - "settings": { - "collection": "users", - "strictCompose": true - } + "required": true } }, "settings": { "cache": true, "authenticate": true, - "callback": null, - "defaultFilters": null, - "fieldLimiters": null, "count": 40, "sort": "name", "sortOrder": 1,