diff --git a/.eslintignore b/.eslintignore new file mode 100644 index 0000000..14abec0 --- /dev/null +++ b/.eslintignore @@ -0,0 +1,3 @@ +test/ +coverage/ +scripts/ \ No newline at end of file diff --git a/.eslintrc.yml b/.eslintrc.yml index 9e2c225..4a98259 100644 --- a/.eslintrc.yml +++ b/.eslintrc.yml @@ -1 +1,28 @@ -extends: 'eslint-config-node-services' \ No newline at end of file +extends: + - 'wikimedia/server' + +plugins: + - json + - jsdoc + +rules: + array-bracket-spacing: + - off + camelcase: + - error + - properties: never + computed-property-spacing: + - error + - never + indent: + - error + - 4 + - SwitchCase: 1 + MemberExpression: 'off' + no-multi-spaces: + - off + no-underscore-dangle: + - off + space-in-parens: + - error + - never diff --git a/.jshintignore b/.jshintignore deleted file mode 100644 index d27cd56..0000000 --- a/.jshintignore +++ /dev/null @@ -1,4 +0,0 @@ -node_modules -test -scripts -coverage diff --git a/index.js b/index.js index a7a11ef..39b7bdb 100644 --- a/index.js +++ b/index.js @@ -1,4 +1,4 @@ -"use strict"; +'use strict'; /* * Cassandra-backed table storage service @@ -80,11 +80,11 @@ class RBCassandra { } const domain = req.params.domain; return this.store.get(domain, req.body) - .then(res => ({ + .then((res) => ({ status: res.items.length ? 200 : 404, body: res })) - .catch(e => ({ + .catch((e) => ({ status: 500, body: { @@ -95,7 +95,7 @@ class RBCassandra { req: { uri: req.uri, headers: req.headers, - body: req.body && JSON.stringify(req.body).slice(0,200) + body: req.body && JSON.stringify(req.body).slice(0, 200) } } })); @@ -110,7 +110,7 @@ class RBCassandra { // created status: 201 }) - .catch(e => ({ + .catch((e) => ({ status: 500, body: { @@ -121,7 +121,7 @@ class RBCassandra { req: { uri: req.uri, headers: req.headers, - body: req.body && JSON.stringify(req.body).slice(0,200) + body: req.body && JSON.stringify(req.body).slice(0, 200) } } })); @@ -135,7 +135,7 @@ class RBCassandra { // deleted status: 204 }) - .catch(e => ({ + .catch((e) => ({ status: 500, body: { @@ -146,7 +146,7 @@ class RBCassandra { req: { uri: req.uri, headers: req.headers, - body: req.body && JSON.stringify(req.body).slice(0,200) + body: req.body && JSON.stringify(req.body).slice(0, 200) } } })); @@ -159,7 +159,7 @@ class RBCassandra { // done status: 204 }) - .catch(e => ({ + .catch((e) => ({ status: 500, body: { @@ -170,7 +170,7 @@ class RBCassandra { req: { uri: req.uri, headers: req.headers, - body: req.body && JSON.stringify(req.body).slice(0,200) + body: req.body && JSON.stringify(req.body).slice(0, 200) } } })); @@ -179,11 +179,11 @@ class RBCassandra { getTableSchema(rb, req) { const domain = req.params.domain; return this.store.getTableSchema(domain, req.params.table) - .then(res => ({ + .then((res) => ({ status: 200, body: res.schema })) - .catch(e => ({ + .catch((e) => ({ status: 500, body: { type: 'schema_query_error', @@ -193,7 +193,7 @@ class RBCassandra { req: { uri: req.uri, headers: req.headers, - body: req.body && JSON.stringify(req.body).slice(0,200) + body: req.body && JSON.stringify(req.body).slice(0, 200) } } })); @@ -215,7 +215,6 @@ class RBCassandra { } } - /** * Factory * @param {Object} options @@ -228,4 +227,3 @@ function makeRBCassandra(options) { } module.exports = makeRBCassandra; - diff --git a/lib/db.js b/lib/db.js index eb495c6..e266d79 100644 --- a/lib/db.js +++ b/lib/db.js @@ -1,4 +1,4 @@ -"use strict"; +'use strict'; const P = require('bluebird'); const cass = require('cassandra-driver'); @@ -28,6 +28,8 @@ class InternalRequest { /** * Construct a new InternalRequest based on an existing one, optionally * overriding existing properties. + * @param {Object} opts overriding settings + * @return {Object} new request */ extend(opts) { const req = new InternalRequest(this); @@ -43,8 +45,8 @@ class DB { this.conf = options.conf; this.log = options.log; - this.defaultConsistency = cass.types.consistencies[this.conf.defaultConsistency] - || cass.types.consistencies.localOne; + this.defaultConsistency = cass.types.consistencies[this.conf.defaultConsistency] || + cass.types.consistencies.localOne; // cassandra client this.client = client; @@ -66,7 +68,7 @@ class DB { table: 'meta', attributes: { key: 'string', - value: 'json', + value: 'json' }, index: [ { attribute: 'key', type: 'hash' } @@ -85,6 +87,11 @@ class DB { /** * Set up internal request-related information and wrap it into an * InternalRequest instance. + * @param {string} domain in dot notation + * @param {string} table logical table name + * @param {Object} query query object + * @param {Object} consistency consistency level + * @return {Object} */ _makeInternalRequest(domain, table, query, consistency) { consistency = consistency || this.defaultConsistency; @@ -145,8 +152,8 @@ class DB { return req; } else { // Check if the meta column family exists - return this.client.execute('SELECT table_name FROM system_schema.tables ' - + 'WHERE keyspace_name=? and table_name=?', [req.keyspace, 'meta']) + return this.client.execute('SELECT table_name FROM system_schema.tables ' + + 'WHERE keyspace_name=? and table_name=?', [req.keyspace, 'meta']) .then((res) => { if (res && res.rows.length === 0) { // meta column family doesn't exist yet @@ -165,12 +172,12 @@ class DB { * chars from the requested name as far as possible, but fall back to a sha1 * if not possible. Also respect Cassandra's limit of 48 or fewer alphanum * chars & first char being an alpha char. - * @param {string} domain in dot notation - * @param {string} table the logical table name - * @return {string} Valid Cassandra keyspace key + * @param {string} domain in dot notation + * @param {string} table the logical table name + * @return {string} Valid Cassandra keyspace key */ keyspaceName(domain, table) { - const cacheKey = JSON.stringify([domain,table]); + const cacheKey = JSON.stringify([domain, table]); const cachedName = this.keyspaceNameCache[cacheKey]; if (cachedName) { return cachedName; @@ -187,8 +194,8 @@ class DB { /** * Finds the storage group for a given domain. - * @param {string} domain the domain's name - * @return {Object} the group object matching the domain + * @param {string} domain the domain's name + * @return {Object} the group object matching the domain */ _resolveStorageGroup(domain) { let group = this.storageGroupsCache[domain]; @@ -242,8 +249,8 @@ class DB { options = options || {}; if (!req.schema) { - throw new Error(`restbase-mod-table-cassandra: ` - + `No schema for ${req.keyspace}, table: ${req.columnfamily}`); + throw new Error('restbase-mod-table-cassandra: ' + + `No schema for ${req.keyspace}, table: ${req.columnfamily}`); } if (!req.schema.iKeyMap) { @@ -397,11 +404,11 @@ class DB { status: 400, body: { type: 'bad_request', - title: `The table already exists, and it cannot ` - + `be upgraded to the requested schema (${error}).`, + title: 'The table already exists, and it cannot ' + + `be upgraded to the requested schema (${error}).`, keyspace: req.keyspace, schema: newSchema, - stack: error.stack, + stack: error.stack } }); this.log('error/cassandra/table_update', newErr); @@ -537,7 +544,6 @@ class DB { cql += 'primary key ('; cql += `${[`(${hashBits.join(',')})`].concat(rangeBits).join(',')}))`; - let clusteringCQL; if (orderBits.length) { clusteringCQL = `clustering order by (${orderBits.join(',')})`; @@ -566,21 +572,21 @@ class DB { // Drop the native secondary indexes we used to create on the "_domain" column. _dropDomainIndex(req) { - const cql = "select index_name from system.schema_columns where keyspace_name = ? " - + " and columnfamily_name = ? and column_name = '_domain';"; + const cql = 'select index_name from system.schema_columns where keyspace_name = ? ' + + " and columnfamily_name = ? and column_name = '_domain';"; return this.client.execute(cql, [req.keyspace, req.columnfamily], { prepare: true }) .then((res) => { if (res.rows.length && res.rows[0].index_name) { // drop the index - return this.client.execute(`drop index if exists ` - + `${cassID(req.keyspace)}.${cassID(res.rows[0].index_name)}`); + return this.client.execute('drop index if exists ' + + `${cassID(req.keyspace)}.${cassID(res.rows[0].index_name)}`); } }); } _createKeyspace(req) { - const cql = `create keyspace if not exists ${cassID(req.keyspace)} ` - + `WITH REPLICATION = ${this._createReplicationOptionsCQL(req.query.options)}`; + const cql = `create keyspace if not exists ${cassID(req.keyspace)} ` + + `WITH REPLICATION = ${this._createReplicationOptionsCQL(req.query.options)}`; return this.client.execute(cql, [], { consistency: req.consistency || this.defaultConsistency }); } @@ -669,8 +675,8 @@ class DB { */ _setReplication(domain, table, options) { const keyspace = this.keyspaceName(domain, table); - const cql = `ALTER KEYSPACE ${dbu.cassID(keyspace)} WITH ` - + `replication = ${this._createReplicationOptionsCQL(options)}`; + const cql = `ALTER KEYSPACE ${dbu.cassID(keyspace)} WITH ` + + `replication = ${this._createReplicationOptionsCQL(options)}`; this.log('warn/cassandra/replication', { message: `Updating replication for ${keyspace}`, replicas: this._replicationPolicy(options), @@ -701,7 +707,7 @@ class DB { if (Object.keys(current).length !== Object.keys(expected).length) { return false; } - return Object.keys(current).every(a => current[a] === expected[a]); + return Object.keys(current).every((a) => current[a] === expected[a]); }; return this._getReplication(domain, table) diff --git a/lib/dbutils.js b/lib/dbutils.js index 5dcfb7b..5e24c73 100644 --- a/lib/dbutils.js +++ b/lib/dbutils.js @@ -1,4 +1,4 @@ -"use strict"; +'use strict'; require('core-js/shim'); @@ -25,7 +25,6 @@ const dbu = {}; * # Section 1: low-level helpers */ - /* * Error instance wrapping HTTP error responses * @@ -91,7 +90,6 @@ dbu.makeValidKey = function makeValidKey(key, length) { } }; - /** * Given a row object, adds a _ttl attribute for the maximum of all * contained column TTLs, or undefined if no TTLs are present. @@ -115,7 +113,7 @@ function _nextPage(client, query, params, pageState, options) { return P.try(() => client.execute(query, params, { prepare: true, fetchSize: options.fetchSize || 5, - pageState, + pageState })) .catch((err) => { if (!options.retries) { @@ -137,12 +135,13 @@ function _nextPage(client, query, params, pageState, options) { * @param {Array} params CQL query params * @param {Object} options options map * @param {Function} handler to invoke for each row result + * @return {Object} */ dbu.eachRow = function eachRow(client, query, params, options, handler) { options.log = options.log || (() => {}); function processPage(pageState) { return _nextPage(client, query, params, pageState, options) - .then(res => P.try(() => P.each(res.rows, (row) => { + .then((res) => P.try(() => P.each(res.rows, (row) => { // Decorate the row result with the _ttl attribute. if (options.withTTL) { dbu.assignMaxTTL(row); @@ -179,6 +178,9 @@ dbu.DEFAULT_CONFIG_VERSION = 0; // Implicit module config version. * Wrapper for validator#validateAndNormalizeSchema (shipped in * restbase-m-t-spec). Ensures the presence of the private, * implementation-specific version attributes. + * @param {Obect} schema + * @param {Object} configVer + * @return {Function} */ dbu.validateAndNormalizeSchema = function validateAndNormalizeSchema(schema, configVer) { if (!schema._backend_version) { @@ -201,7 +203,6 @@ dbu.indexKeys = function indexKeys(index) { return res; }; - function encodeBlob(blob) { if (blob instanceof Buffer) { return blob; @@ -210,7 +211,6 @@ function encodeBlob(blob) { } } - const schemaTypeToCQLTypeMap = { blob: 'blob', 'set': 'set', @@ -249,7 +249,6 @@ dbu.schemaTypeToCQLType = (schemaType) => { return cqlType; }; - /** * Generates read/write conversion functions for set-typed attributes * @param {Object} convObj the conversion object to use for individual values (from dbu.conversions) @@ -285,7 +284,7 @@ function generateSetConvertor(convObj) { }; } if (convObj.read) { - res.read = valArray => valArray.map(convObj.read); + res.read = (valArray) => valArray.map(convObj.read); } return res; } @@ -293,10 +292,10 @@ function generateSetConvertor(convObj) { // Conversion factories. We create a function for each type so that it can be // compiled monomorphically. function toString() { - return val => val.toString(); + return (val) => val.toString(); } function toNumber() { - return val => val.toNumber(); + return (val) => val.toNumber(); } dbu.conversions = { @@ -377,7 +376,6 @@ dbu.makeSchemaInfo = function makeSchemaInfo(schema, isMetaCF) { return psi; }; - /** * Converts an array of result rows from Cassandra to JS values * @param {Array} rows the result rows to convert; not modified @@ -393,8 +391,8 @@ dbu.convertRows = function convertRows(rows, schema) { Object.keys(row).forEach((att) => { // Skip over internal attributes if (!/^_/.test(att)) { - if (row[att] !== null && conversions[att] - && conversions[att].read) { + if (row[att] !== null && conversions[att] && + conversions[att].read) { newRow[att] = schema.conversions[att].read(row[att]); } else { newRow[att] = row[att]; @@ -511,11 +509,10 @@ dbu.buildCondition = function buildCondition(predicates, schema, noConvert) { }); return { cql: conjunctions.join(' AND '), - params, + params }; }; - /** * CQL building for PUT queries * @param {InternalRequest} req @@ -540,8 +537,8 @@ dbu.buildPutQuery = (req, noConvert) => { const indexKVMap = {}; schema.iKeys.forEach((key) => { if (attributes[key] === undefined) { - throw new Error(`Index attribute ${JSON.stringify(key)} missing ` - + `in ${JSON.stringify(query)}; schema: ${JSON.stringify(schema, null, 2)}`); + throw new Error(`Index attribute ${JSON.stringify(key)} missing ` + + `in ${JSON.stringify(query)}; schema: ${JSON.stringify(schema, null, 2)}`); } else { indexKVMap[key] = attributes[key]; } @@ -573,7 +570,6 @@ dbu.buildPutQuery = (req, noConvert) => { } }); - let using = ''; const usingBits = []; const usingParams = []; @@ -615,8 +611,8 @@ dbu.buildPutQuery = (req, noConvert) => { cond = ' if not exists '; } const proj = schema.iKeys.concat(nonIndexKeys).map(dbu.cassID).join(','); - cql = `insert into ${dbu.cassID(req.keyspace)}.${dbu.cassID(req.columnfamily)}` - + ` (${proj}) values (`; + cql = `insert into ${dbu.cassID(req.keyspace)}.${dbu.cassID(req.columnfamily)}` + + ` (${proj}) values (`; cql += `${placeholders.join(',')})${cond}${using}`; params = condRes.params.concat(params, usingParams); } else if (nonIndexKeys.length) { @@ -629,8 +625,8 @@ dbu.buildPutQuery = (req, noConvert) => { } const updateProj = `${nonIndexKeys.map(dbu.cassID).join(' = ?,')} = ? `; - cql += `update ${dbu.cassID(req.keyspace)}.${dbu.cassID(req.columnfamily)}` - + `${using} set ${updateProj} where `; + cql += `update ${dbu.cassID(req.keyspace)}.${dbu.cassID(req.columnfamily)}` + + `${using} set ${updateProj} where `; cql += condRes.cql + cond; params = usingParams.concat(params, condRes.params, condParams); @@ -640,11 +636,10 @@ dbu.buildPutQuery = (req, noConvert) => { return { cql, - params, + params }; }; - /** * CQL building for GET queries * @param {InternalRequest} req @@ -680,10 +675,10 @@ dbu.buildGetQuery = (req, options) => { if (options.withTTL) { // Candidates for TTL are non-index, non-collection, attributes const ttlCandidates = projAttrs.filter( - v => !schema.iKeyMap[v] && !/^(set|map|list)<.*>$/.test(schema.attributes[v]) + (v) => !schema.iKeyMap[v] && !/^(set|map|list)<.*>$/.test(schema.attributes[v]) ); const projTTLs = ttlCandidates.map( - v => `TTL(${dbu.cassID(v)}) as ${dbu.cassID(dbu.cassTTL(v))}` + (v) => `TTL(${dbu.cassID(v)}) as ${dbu.cassID(dbu.cassTTL(v))}` ); projCQL += `,${projTTLs.join(',')}`; } @@ -703,8 +698,8 @@ dbu.buildGetQuery = (req, options) => { Object.keys(attributes).forEach((key) => { // query should not have non key attributes if (!schema.iKeyMap[key]) { - throw new Error(`All request attributes need to be key attributes. ` - + `Bad attribute: ${key}`); + throw new Error('All request attributes need to be key attributes. ' + + `Bad attribute: ${key}`); } }); cql += ' where '; @@ -724,15 +719,15 @@ dbu.buildGetQuery = (req, options) => { } const idxElem = schema.iKeyMap[att]; if (!idxElem || idxElem.type !== 'range') { - throw new Error(`Cannot order on attribute ${att}; ` - + `needs to be a range index, but is ${idxElem}`); + throw new Error(`Cannot order on attribute ${att}; ` + + `needs to be a range index, but is ${idxElem}`); } const shouldBeReversed = dir !== idxElem.order; if (reversed === undefined) { reversed = shouldBeReversed; } else if (reversed !== shouldBeReversed) { - throw new Error("Inconsistent sort order; Cassandra only supports " - + "reversing the default sort order."); + throw new Error('Inconsistent sort order; Cassandra only supports ' + + 'reversing the default sort order.'); } }); diff --git a/lib/index.js b/lib/index.js index afd4bf2..65c931c 100644 --- a/lib/index.js +++ b/lib/index.js @@ -1,4 +1,4 @@ -"use strict"; +'use strict'; const P = require('bluebird'); const cass = require('cassandra-driver'); @@ -9,8 +9,8 @@ const DB = require('./db'); function validateAndNormalizeDcConf(conf) { // Default to 'datacenter1' - if (!conf.localDc) { conf.localDc = 'datacenter1'; } - if (!conf.datacenters) { conf.datacenters = ['datacenter1']; } + conf.localDc = conf.localDc || 'datacenter1'; + conf.datacenters = conf.datacenters || ['datacenter1']; if (!(conf.datacenters instanceof Array)) { throw new Error('invalid datacenters configuration (not an array)'); } @@ -23,8 +23,12 @@ function validateAndNormalizeDcConf(conf) { function sslOptions(sslConf) { const sslOpts = {}; - if (sslConf.cert) { sslOpts.cert = fs.readFileSync(sslConf.cert); } - if (sslConf.key) { sslOpts.key = fs.readFileSync(sslConf.key); } + if (sslConf.cert) { + sslOpts.cert = fs.readFileSync(sslConf.cert); + } + if (sslConf.key) { + sslOpts.key = fs.readFileSync(sslConf.key); + } if (sslConf.ca) { sslOpts.ca = []; diff --git a/lib/schemaMigration.js b/lib/schemaMigration.js index 5cae545..145e893 100644 --- a/lib/schemaMigration.js +++ b/lib/schemaMigration.js @@ -1,4 +1,4 @@ -"use strict"; +'use strict'; const dbu = require('./dbutils'); const P = require('bluebird'); @@ -27,7 +27,6 @@ function confChanged(current, proposed) { } } - /** * Base migration handler for unsupported schema */ @@ -64,8 +63,8 @@ class Options { } validate(req, current, proposed) { - if (current._backend_version === proposed._backend_version - && confChanged({ conf: current.options, version: current.version }, + if (current._backend_version === proposed._backend_version && + confChanged({ conf: current.options, version: current.version }, { conf: proposed.options, version: proposed.version })) { return true; } @@ -95,8 +94,8 @@ class Attributes { const table = `${dbu.cassID(req.keyspace)}.${dbu.cassID(req.columnfamily)}`; const currSet = new Set(Object.keys(current.attributes)); const propSet = new Set(Object.keys(proposed.attributes)); - const addColumns = Array.from(propSet).filter(x => !currSet.has(x)); - const delColumns = Array.from(currSet).filter(x => !propSet.has(x)); + const addColumns = Array.from(propSet).filter((x) => !currSet.has(x)); + const delColumns = Array.from(currSet).filter((x) => !propSet.has(x)); return P.each(addColumns, (col) => { this.options.log('warn/schemaMigration/attributes', { message: `adding column${col}`, @@ -109,8 +108,8 @@ class Attributes { } return this.options.client.execute(cql, [], { consistency: req.consistency }) .catch((e) => { - if (!new RegExp(`Invalid column name ${col} because it ` - + `conflicts with an existing column`).test(e.message)) { + if (!new RegExp(`Invalid column name ${col} because it ` + + 'conflicts with an existing column').test(e.message)) { throw (e); } // Else: Ignore the error if the column already exists. @@ -143,8 +142,8 @@ Attributes.prototype.validate = (req, current, proposed) => { conf: proposed.attributes, version: proposed.version }); Attributes.prototype._alterTableAdd = (proposed, table, col) => { - let cql = `ALTER TABLE ${table} ADD ${dbu.cassID(col)} ` - + `${dbu.schemaTypeToCQLType(proposed.attributes[col])}`; + let cql = `ALTER TABLE ${table} ADD ${dbu.cassID(col)} ` + + `${dbu.schemaTypeToCQLType(proposed.attributes[col])}`; if (proposed.index && proposed.staticKeyMap[col]) { cql += ' static'; } @@ -162,8 +161,8 @@ class Index { validate(req, current, proposed) { if (confChanged({ conf: current.index, version: current.version }, { conf: proposed.index, version: proposed.version })) { - const addIndexes = proposed.index.filter(x => !this._hasSameIndex(current.index, x)); - const delIndexes = current.index.filter(x => !this._hasSameIndex(proposed.index, x)); + const addIndexes = proposed.index.filter((x) => !this._hasSameIndex(current.index, x)); + const delIndexes = current.index.filter((x) => !this._hasSameIndex(proposed.index, x)); const alteredColumns = []; @@ -183,8 +182,8 @@ class Index { alteredColumns.push(index.attribute); } }); - if (addIndexes.some(index => index.type !== 'static') - || delIndexes.some(index => index.type !== 'static')) { + if (addIndexes.some((index) => index.type !== 'static') || + delIndexes.some((index) => index.type !== 'static')) { throw new Error('Only static index additions and removals supported'); } if (alteredColumns.length > 0) { @@ -198,14 +197,13 @@ class Index { } Index.prototype._hasSameIndex = (indexes, proposedIndex) => - indexes.some(idx => idx.attribute === proposedIndex.attribute - && idx.type === proposedIndex.type - && idx.order === proposedIndex.order); + indexes.some((idx) => idx.attribute === proposedIndex.attribute && + idx.type === proposedIndex.type && + idx.order === proposedIndex.order); Index.prototype.migrate = () => { }; - /** * Migrator for the db module config. * @@ -229,8 +227,8 @@ ConfigMigrator.prototype.validate = (req, current, proposed) => { status: 400, body: { type: 'bad_request', - title: `Unable to downgrade storage module ` - + `configuration to version ${proposed._config_version}`, + title: 'Unable to downgrade storage module ' + + `configuration to version ${proposed._config_version}`, keyspace: req.keyspace, schema: proposed } @@ -263,8 +261,8 @@ BackendMigrator.prototype.validate = (req, current, proposed) => { status: 400, body: { type: 'bad_request', - title: `Unable to downgrade storage backend ` - + `to version ${proposed._backend_version}`, + title: 'Unable to downgrade storage backend ' + + `to version ${proposed._backend_version}`, keyspace: req.keyspace, schema: proposed } @@ -275,7 +273,6 @@ BackendMigrator.prototype.validate = (req, current, proposed) => { } }; - const migrationHandlers = [ // First, migrate the backend version. BackendMigrator, @@ -296,7 +293,7 @@ class SchemaMigrator { constructor(options) { this.options = options; - this.migrators = migrationHandlers.map(Klass => new Klass(options)); + this.migrators = migrationHandlers.map((Klass) => new Klass(options)); } /** @@ -309,7 +306,7 @@ class SchemaMigrator { */ migrate(req, current, proposed) { // First phase: validate everything. - const toMigrate = this.migrators.filter(migrator => + const toMigrate = this.migrators.filter((migrator) => // Migrators signal that something needs to be done by returning true, // and we are interested in the migrations that need to be applied. // If validation fails, the validator will throw & abort all @@ -322,7 +319,7 @@ class SchemaMigrator { return Promise.resolve(false); } else { // Perform the migrations. - return P.each(toMigrate, migrator => migrator.migrate(req, current, proposed)) + return P.each(toMigrate, (migrator) => migrator.migrate(req, current, proposed)) .then(() => // Indicate that we did indeed perform some migrations. true); } diff --git a/maintenance/.eslintrc.yml b/maintenance/.eslintrc.yml new file mode 100644 index 0000000..f3f8607 --- /dev/null +++ b/maintenance/.eslintrc.yml @@ -0,0 +1,3 @@ +rules: + no-console: + - off \ No newline at end of file diff --git a/maintenance/keyspace_name.js b/maintenance/keyspace_name.js index bb7da18..09b44b6 100644 --- a/maintenance/keyspace_name.js +++ b/maintenance/keyspace_name.js @@ -1,5 +1,4 @@ -"use strict"; - +'use strict'; var getConfig = require('./lib/index').getConfig; var DB = require('../lib/db'); @@ -8,21 +7,21 @@ var yargs = require('yargs') .usage('Usage: $0 [-c YAML] -d DOMAIN -t TABLE\n\n' + 'Output Cassandra keyspace name for a given domain and table') .demand(['domain', 'table']) - .options('h', {alias: 'help'}) + .options('h', { alias: 'help' }) .options('d', { alias: 'domain', describe: 'Domain to match with storage group', - type: 'string', + type: 'string' }) .options('t', { alias: 'table', describe: 'Logical table name (e.g. parsoid.html)', - type: 'string', + type: 'string' }) .options('c', { alias: 'config', describe: 'RESTBase configuration file', - type: 'string', + type: 'string' }); var argv = yargs.argv; @@ -33,8 +32,7 @@ if (argv.h) { } var conf = getConfig(argv.config); -var db = new DB({}, {conf: conf, log: function(){} }); - +var db = new DB({}, { conf: conf, log: () => {} }); console.log(db.keyspaceName(argv.domain, argv.table)); process.exit(0); diff --git a/maintenance/lib/index.js b/maintenance/lib/index.js index 0c7fd66..38bab17 100644 --- a/maintenance/lib/index.js +++ b/maintenance/lib/index.js @@ -1,15 +1,13 @@ -"use strict"; - +'use strict'; var fs = require('fs'); var yaml = require('js-yaml'); - /** * Return the table section of a RESTBase config. * * @param {string} config - Path to a RESTBase YAML configuration file. - * @return {object} table section of configuration. + * @return {Object} table section of configuration. */ function getConfig(config) { // Read a RESTBase configuration from a (optional) path argument, an (optional) CONFIG @@ -29,5 +27,5 @@ function getConfig(config) { } module.exports = { - getConfig: getConfig, + getConfig }; diff --git a/maintenance/list_tables.js b/maintenance/list_tables.js index 99d00a7..966bef3 100644 --- a/maintenance/list_tables.js +++ b/maintenance/list_tables.js @@ -1,5 +1,4 @@ -"use strict"; - +'use strict'; const align = require('string-align'); const P = require('bluebird'); @@ -33,7 +32,6 @@ const yargs = require('yargs') const argv = yargs.argv; - if (argv.h) { yargs.showHelp(); process.exit(0); diff --git a/package.json b/package.json index 8508c52..888be1a 100644 --- a/package.json +++ b/package.json @@ -19,7 +19,8 @@ "url": "git://github.com/wikimedia/restbase-mod-table-cassandra.git" }, "scripts": { - "test": "mocha", + "test": "npm run lint && mocha", + "lint": "eslint --max-warnings 0 --ext .js --ext .json .", "coverage": "istanbul cover _mocha -- -R spec", "coveralls": "cat ./coverage/lcov.info | coveralls" }, @@ -29,9 +30,7 @@ "mocha": "^5.2.0", "mocha-lcov-reporter": "^1.3.0", "preq": "^0.5.6", - "mocha-eslint": "^4.1.0", - "eslint-config-node-services": "^2.2.5", - "eslint-config-wikimedia": "^0.8.1", + "eslint-config-wikimedia": "^0.10.0", "eslint-plugin-jsdoc": "^3.9.1", "eslint-plugin-json": "^1.2.1" } diff --git a/scripts/RashomonDumpImporter.js b/scripts/RashomonDumpImporter.js index 941b244..f7ead6f 100644 --- a/scripts/RashomonDumpImporter.js +++ b/scripts/RashomonDumpImporter.js @@ -1,6 +1,5 @@ #!/usr/bin/env node -/*jslint node: true */ -"use strict"; +'use strict'; /** * Rashomon write test. */ @@ -10,7 +9,7 @@ var dumpReader = require('./dumpReader.js'), FormData = require('form-data'), http = require('http'); -function testWrites () { +function testWrites() { var reader = new dumpReader.DumpReader(), totalSize = 0, revisions = 0, @@ -21,7 +20,7 @@ function testWrites () { maxConcurrency = 50; http.globalAgent = false; - reader.on( 'revision', function ( revision ) { + reader.on('revision', (revision) => { // Up to 50 retries var retries = 50, @@ -34,7 +33,7 @@ function testWrites () { var timestamp = new Date(revision.timestamp).toISOString(), name = encodeURIComponent(revision.page.title.replace(/ /g, '_')); - function handlePostResponse (err, response, body) { + function handlePostResponse(err, response, body) { if (err || response.statusCode !== 200) { if (!err) { err = response.statusCode + ' ' + body; @@ -53,7 +52,7 @@ function testWrites () { totalSize += revision.text.length; revisions++; var interval = 1000; - if(revisions % interval === 0) { + if (revisions % interval === 0) { var newIntervalDate = new Date(), rate = interval / (newIntervalDate - intervalDate) * 1000, @@ -70,11 +69,11 @@ function testWrites () { } } - function doPost () { + function doPost() { var form = new FormData(), reqOptions = { method: 'POST', - uri: 'http://localhost:8000/enwiki/page/' + name + '?rev/', + uri: 'http://localhost:8000/enwiki/page/' + name + '?rev/' }; form.append('_timestamp', timestamp); form.append('_rev', revision.id); @@ -86,7 +85,7 @@ function testWrites () { // send it off doPost(); }); - reader.on('end', function() { + reader.on('end', () => { console.log('####################'); var delta = Math.round((new Date() - startDate) / 1000); console.log(revisions + ' revisions in ' + delta + @@ -96,7 +95,7 @@ function testWrites () { process.exit(); }); - process.stdin.on('data', reader.push.bind(reader) ); + process.stdin.on('data', reader.push.bind(reader)); process.stdin.setEncoding('utf8'); process.stdin.resume(); } diff --git a/scripts/direct-request.js b/scripts/direct-request.js index 34fd35c..bd1602b 100755 --- a/scripts/direct-request.js +++ b/scripts/direct-request.js @@ -1,6 +1,5 @@ #!/usr/bin/env node -"use strict"; - +'use strict'; global.Promise = require('bluebird'); @@ -12,40 +11,36 @@ var dbu = require('../lib/dbutils.js'); var fs = require('fs'); var util = require('util'); - -function usage(exit_code) { +function usage(exit_code) { var node_bin = process.argv[0]; var script_bin = process.argv[1]; - console.log("Usage: %s %s [options] ", node_bin, script_bin); - console.log(" options:"); - console.log(" -m the method to use, default: get"); - console.log(" -d the data to use as the request body"); - console.log(" -f load data from file "); - console.log(" -j interpret data as JSON"); - console.log(" -h print this help and exit"); - console.log(" the path to route the request to"); - if (typeof exit_code === 'undefined') - exit_code = 1; + console.log('Usage: %s %s [options] ', node_bin, script_bin); + console.log(' options:'); + console.log(' -m the method to use, default: get'); + console.log(' -d the data to use as the request body'); + console.log(' -f load data from file '); + console.log(' -j interpret data as JSON'); + console.log(' -h print this help and exit'); + console.log(' the path to route the request to'); + if (typeof exit_code === 'undefined') { exit_code = 1; } process.exit(exit_code); } - function parse_data(data_str) { var ret; try { ret = JSON.parse(data_str); - } catch(err) { + } catch (err) { try { ret = eval('(' + data_str + ')'); - } catch(eval_err) { - console.log("Error while parsing input data: %s", eval_err.message); + } catch (eval_err) { + console.log('Error while parsing input data: %s', eval_err.message); process.exit(2); } } return ret; } - var args = process.argv.slice(2); if (args == null || args.length == 0) { usage(); @@ -60,8 +55,8 @@ var opts = { var exp_method = false; var exp_data = false; var exp_fname = false; -args.forEach(function(arg, index, array) { - switch(arg) { +args.forEach((arg, index, array) => { + switch (arg) { case '-h': usage(); case '-m': @@ -84,11 +79,11 @@ args.forEach(function(arg, index, array) { opts.method = arg.toLowerCase(); exp_method = false; } else if (exp_data) { - opts.data = opts.is_json ? parse_data( arg ) : arg; + opts.data = opts.is_json ? parse_data(arg) : arg; exp_data = false; } else if (exp_fname) { - var data = fs.readFileSync(arg, {encoding: 'utf8'}); - opts.data = opts.is_json ? parse_data( data ) : data; + var data = fs.readFileSync(arg, { encoding: 'utf8' }); + opts.data = opts.is_json ? parse_data(data) : data; exp_fname = false; } else { if (arg[0] == '/') { @@ -101,21 +96,20 @@ args.forEach(function(arg, index, array) { }); if (!opts.path || !opts.path.length) { - console.log("The path is obligatory!"); + console.log('The path is obligatory!'); usage(); } - makeClient({ log: console.log, conf: { hosts: ['localhost'] } }) -.then(function(db) { +.then((db) => { DB = db; return router.makeRouter(); -}).then(function(r_obj) { +}).then( (r_obj) => { var req = { url: opts.path, method: opts.method @@ -123,12 +117,11 @@ makeClient({ if (opts.data !== null) { req.body = opts.data; } - console.log("#~> REQ : %s", util.inspect(req)); + console.log('#~> REQ : %s', util.inspect(req)); return r_obj.request(req); -}).then(function(response) { - console.log("#~> RESP: %s", util.inspect(response)); +}).then((response) => { + console.log('#~> RESP: %s', util.inspect(response)); process.exit(); -}).catch(function(err) { - console.log("#~> ERR : %s", util.inspect(err)); +}).catch((err) => { + console.log('#~> ERR : %s', util.inspect(err)); }); - diff --git a/scripts/dumpReader.js b/scripts/dumpReader.js index 1873f7c..b3c4202 100644 --- a/scripts/dumpReader.js +++ b/scripts/dumpReader.js @@ -12,7 +12,7 @@ util.inherits(DumpReader, events.EventEmitter); /** * @param {Stream} stream input stream to read XML from */ -DumpReader.prototype.makeParser = function() { +DumpReader.prototype.makeParser = () => { var self = this, stack = [{}], @@ -21,23 +21,22 @@ DumpReader.prototype.makeParser = function() { function flip(arr) { var obj = {}; - arr.forEach(function(val) { + arr.forEach((val) => { obj[val] = true; }); return obj; } var textNodes = flip(['id', 'text', 'title', 'minor', 'comment', 'username', 'timestamp']), boolNodes = flip(['minor', 'redirect']), - ignoreNodes = flip(['mediawiki', 'siteinfo', 'upload', 'thread'] ), + ignoreNodes = flip(['mediawiki', 'siteinfo', 'upload', 'thread']), parser = new libxml.SaxPushParser(); this.parser = parser; - parser.on('startElementNS', function(elem, attrs, prefix, uri, namespaces) { - //console.warn( 'elem: ' + elem ); + parser.on('startElementNS', (elem, attrs, prefix, uri, namespaces) => { + // console.warn( 'elem: ' + elem ); if (elem in ignoreNodes) { - /* jshint noempty: false */ // we know this is empty! // ... } else if (elem === 'page') { - //console.warn( 'starting page' ); + // console.warn( 'starting page' ); stack = []; workspace = {}; } else if (elem === 'revision') { @@ -53,11 +52,11 @@ DumpReader.prototype.makeParser = function() { } }); - parser.on( 'endElementNS', function(elem, prefix, uri) { + parser.on('endElementNS', (elem, prefix, uri) => { // ping something! if (elem === 'mediawiki') { self.complete = true; - //stream.pause(); + // stream.pause(); self.emit('end', {}); } else if (elem === 'page') { self.emit('page', workspace); @@ -76,53 +75,52 @@ DumpReader.prototype.makeParser = function() { } }); - parser.on( 'characters', function(chars) { + parser.on('characters', (chars) => { buffer += chars; }); - parser.on( 'cdata', function(cdata) { + parser.on('cdata', (cdata) => { buffer += cdata; }); - parser.on( 'endDocument', function() { + parser.on('endDocument', () => { // This doesn't seem to run...? self.complete = true; - //stream.pause(); + // stream.pause(); self.emit('end', {}); }); - parser.on( 'error', function(err) { + parser.on('error', (err) => { self.emit('error', err); // Should we.... stop reading now or what? }); }; -DumpReader.prototype.push = function( chunk ) { - //console.log( 'dr read' + chunk ); - this.parser.push( chunk ); +DumpReader.prototype.push = (chunk) => { + // console.log( 'dr read' + chunk ); + this.parser.push(chunk); }; - module.exports.DumpReader = DumpReader; if (module === require.main) { var reader = new DumpReader(); - reader.on('end', function() { + reader.on('end', () => { console.log('done!'); process.exit(); }); - reader.on('error', function(err) { + reader.on('error', (err) => { console.log('error!', err); process.exit(1); }); - reader.on('page', function(page) { + reader.on('page', (page) => { console.log('page', page); }); - reader.on('revision', function(revision) { + reader.on('revision', (revision) => { revision.text = revision.text.substr(0, 40); console.log('revision', revision); }); console.log('Reading!'); process.stdin.setEncoding('utf8'); - process.stdin.on('data', reader.push.bind(reader) ); + process.stdin.on('data', reader.push.bind(reader)); process.stdin.resume(); } diff --git a/test/functional/backend_migrations.js b/test/functional/backend_migrations.js index 9d0ceed..17722f0 100644 --- a/test/functional/backend_migrations.js +++ b/test/functional/backend_migrations.js @@ -1,7 +1,5 @@ "use strict"; -// mocha defines to avoid JSHint breakage -/* global describe, it, before, beforeEach, after, afterEach */ var assert = require('assert'); var dbu = require('../../lib/dbutils'); @@ -26,35 +24,35 @@ var testTable0 = { ] }; -describe('Backend migration', function() { +describe('Backend migration', () => { var db; - before(function() { + before(() => { return makeClient({ - log: function(level, info) { + log: (level, info) => { if (!/^info|warn|verbose|debug|trace/.test(level)) { console.log(level, info); } }, conf: yaml.safeLoad(fs.readFileSync(__dirname + '/../utils/test_client.conf.yaml')) }) - .then(function(newDb) { + .then((newDb) => { db = newDb; }) - .then(function() { + .then(() => { return db.createTable('restbase.cassandra.test.local', testTable0); }) - .then(function(response) { + .then((response) => { assert.ok(response, 'undefined response'); assert.deepEqual(response.status, 201); }); }); - after(function() { + after(() => { db.dropTable('restbase.cassandra.test.local', testTable0.table); }); - it('persists a backend version', function() { + it('persists a backend version', () => { return db.getTableSchema('restbase.cassandra.test.local', testTable0.table) - .then(function(response) { + .then((response) => { assert.ok(response, 'undefined response'); assert.deepEqual(response.schema.table, testTable0.table); assert.deepEqual(response.schema._backend_version, dbu.CURRENT_BACKEND_VERSION); diff --git a/test/functional/config_migrations.js b/test/functional/config_migrations.js index 149bdeb..d412c65 100644 --- a/test/functional/config_migrations.js +++ b/test/functional/config_migrations.js @@ -1,8 +1,5 @@ "use strict"; -// mocha defines to avoid JSHint breakage -/* global describe, it, before, beforeEach, after, afterEach */ - var assert = require('assert'); var dbu = require('../../lib/dbutils'); var fs = require('fs'); @@ -26,35 +23,35 @@ var testTable0 = { ] }; -describe('Configuration migration', function() { +describe('Configuration migration', () => { var db; - before(function() { + before(() => { return makeClient({ - log: function(level, info) { + log: (level, info) => { if (!/^info|warn|verbose|debug|trace/.test(level)) { console.log(level, info); } }, conf: yaml.safeLoad(fs.readFileSync(__dirname + '/../utils/test_client.conf.yaml')) }) - .then(function(newDb) { + .then((newDb) => { db = newDb; }) - .then(function() { + .then(() => { return db.createTable('restbase.cassandra.test.local', testTable0); }) - .then(function(response) { + .then((response) => { assert.ok(response, 'undefined response'); assert.deepEqual(response.status, 201); }); }); - after(function() { + after(() => { return db.dropTable('restbase.cassandra.test.local', testTable0.table); }); - it('migrates version', function() { + it('migrates version', () => { return db.getTableSchema('restbase.cassandra.test.local', testTable0.table) - .then(function(response) { + .then((response) => { assert.ok(response, 'undefined response'); assert.deepEqual(response.schema.table, testTable0.table); assert.deepEqual(response.schema._config_version, 1); @@ -62,35 +59,35 @@ describe('Configuration migration', function() { db.conf.version = 2; return db.createTable('restbase.cassandra.test.local', testTable0); }) - .then(function(response) { + .then((response) => { assert.ok(response, 'undefined response'); assert.deepEqual(response.status, 201); return db.getTableSchema('restbase.cassandra.test.local', testTable0.table); }) - .then(function(response) { + .then((response) => { assert.ok(response, 'undefined response'); assert.deepEqual(response.schema.table, testTable0.table); assert.deepEqual(response.schema._config_version, 2); }); }); - it('disallows decreasing versions', function() { + it('disallows decreasing versions', () => { // Migrate version (from 1) to 2. db.conf.version = 2; return db.createTable('restbase.cassandra.test.local', testTable0) - .then(function(response) { + .then((response) => { assert.ok(response, 'undefined response'); assert.deepEqual(response.status, 201); // Attempt to downgrade version (from 2) to 1 db.conf.version = 1; return db.createTable('restbase.cassandra.test.local', testTable0) - .then(function(response) { + .then((response) => { // A successful response means a downgrade happened (this is wrong). assert.fail(response, undefined, 'expected HTTPError exception'); }) - .catch(function(error) { + .catch((error) => { // This is what we want, an HTTPError and status 400. assert.deepEqual(error.status, 400); }); diff --git a/test/functional/replication.js b/test/functional/replication.js index 0380c15..ae1d030 100644 --- a/test/functional/replication.js +++ b/test/functional/replication.js @@ -1,8 +1,5 @@ "use strict"; -// mocha defines to avoid JSHint breakage -/* global describe, it, before, beforeEach, after, afterEach */ - var assert = require('assert'); var dbu = require('../../lib/dbutils'); var fs = require('fs'); @@ -25,33 +22,33 @@ var testTable0 = { ], }; -describe('Table creation', function() { +describe('Table creation', () => { var db; - before(function() { + before(() => { return makeClient({ - log: function(level, info) { + log: (level, info) => { if (/^error|fatal/.test(level)) { console.log(level, info); } }, conf: yaml.safeLoad(fs.readFileSync(__dirname + '/../utils/test_client.conf.yaml')) }) - .then(function(newDb) { + .then((newDb) => { db = newDb; return db.createTable('restbase.cassandra.test.local', testTable0); }) - .then(function(response) { + .then((response) => { assert.ok(response, 'undefined response'); assert.deepEqual(response.status, 201); }); }); - after(function() { + after(() => { db.dropTable('restbase.cassandra.test.local', testTable0.table); }); - it('updates Cassandra replication', function() { + it('updates Cassandra replication', () => { return db._getReplication('restbase.cassandra.test.local', testTable0.table) - .then(function(response) { + .then((response) => { assert.ok(response, 'undefined response'); assert.strictEqual(Object.keys(response).length, 1, 'incorrect number of results'); @@ -59,10 +56,10 @@ describe('Table creation', function() { db.conf.datacenters.push('new_dc'); return db.updateReplicationIfNecessary('restbase.cassandra.test.local', testTable0.table); }) - .then(function() { + .then(() => { return db._getReplication('restbase.cassandra.test.local', testTable0.table); }) - .then(function(response) { + .then((response) => { assert.ok(response, 'undefined response'); assert.strictEqual(Object.keys(response).length, 2, 'incorrect number of results'); assert.ok(response.new_dc, 'additional datacenter not present'); diff --git a/test/index.js b/test/index.js index 979d444..5068e70 100644 --- a/test/index.js +++ b/test/index.js @@ -3,18 +3,12 @@ const yaml = require('js-yaml'); const fs = require("fs"); -require('mocha-eslint')([ - 'lib', - 'index.js' -]); - - describe('Functional', () => { const conf = yaml.safeLoad(fs.readFileSync(`${__dirname}/utils/test_client.conf.yaml`)); const dbConstructor = require('../index.js'); require('restbase-mod-table-spec').test(() => dbConstructor({ conf: conf, - log: function () { + log: () => { } })); }); diff --git a/test/unit/dbutils.js b/test/unit/dbutils.js index 9eb32e7..4a38c20 100644 --- a/test/unit/dbutils.js +++ b/test/unit/dbutils.js @@ -1,8 +1,5 @@ "use strict"; -// mocha defines to avoid JSHint breakage -/* global describe, it, before, beforeEach, after, afterEach */ - var assert = require('assert'); var dbu = require('../../lib/dbutils'); @@ -45,9 +42,9 @@ var testTable0b = { ] }; -describe('DB utilities', function() { +describe('DB utilities', () => { - it('builds SELECTs with included TTLs', function() { + it('builds SELECTs with included TTLs', () => { var req = { keyspace: 'keyspace', columnfamily: 'columnfamily', @@ -60,16 +57,16 @@ describe('DB utilities', function() { assert(match.length === 2, 'result has no matching projection'); - var projs = match[1].split(',').map(function(i) { return i.trim(); }); + var projs = match[1].split(',').map((i) => { return i.trim(); }); var exp = /TTL\((.+)\) as "_ttl_(.+)"/; // There should be 7 non-ttl attributes total. - assert(projs.filter(function(v) { return !exp.test(v); }).length === 7); + assert(projs.filter((v) => { return !exp.test(v); }).length === 7); var matching = []; - projs.filter(function(v) { return exp.test(v); }).forEach( - function(v) { + projs.filter((v) => { return exp.test(v); }).forEach( + (v) => { var v1 = v.match(exp)[1]; var v2 = v.match(exp)[2]; assert.deepEqual(v1, dbu.cassID(v2)); @@ -83,7 +80,7 @@ describe('DB utilities', function() { assert.deepEqual(matching.sort(), ["author", "comment"]); }); - it('builds SELECTS with an included LIMIT', function() { + it('builds SELECTS with an included LIMIT', () => { var req = { keyspace: 'keyspace', columnfamily: 'columnfamily',