Skip to content

Commit

Permalink
[functional_test_runner] replace functional testing tools with custom…
Browse files Browse the repository at this point in the history
…/pluggable solution
  • Loading branch information
spalger committed Mar 29, 2017
1 parent 556bfab commit d3be827
Show file tree
Hide file tree
Showing 176 changed files with 8,699 additions and 7,199 deletions.
7 changes: 1 addition & 6 deletions CONTRIBUTING.md
Expand Up @@ -308,12 +308,7 @@ npm run test:ui:runner

##### Browser Automation Notes

- Using Page Objects pattern (https://theintern.github.io/intern/#writing-functional-test)
- At least the initial tests for the Settings, Discover, and Visualize tabs all depend on a very specific set of logstash-type data (generated with makelogs). Since that is a static set of data, all the Discover and Visualize tests use a specific Absolute time range. This guarantees the same results each run.
- These tests have been developed and tested with Chrome and Firefox browser. In theory, they should work on all browsers (that's the benefit of Intern using Leadfoot).
- These tests should also work with an external testing service like https://saucelabs.com/ or https://www.browserstack.com/ but that has not been tested.
- https://theintern.github.io/
- https://theintern.github.io/leadfoot/module-leadfoot_Element.html
- TODO

### Building OS packages

Expand Down
2 changes: 1 addition & 1 deletion Gruntfile.js
Expand Up @@ -57,7 +57,7 @@ module.exports = function (grunt) {
init: true,
config: config,
loadGruntTasks: {
pattern: ['grunt-*', '@*/grunt-*', 'gruntify-*', '@*/gruntify-*', 'intern']
pattern: ['grunt-*', '@*/grunt-*', 'gruntify-*', '@*/gruntify-*']
}
});

Expand Down
7 changes: 4 additions & 3 deletions package.json
Expand Up @@ -126,8 +126,8 @@
"expose-loader": "0.7.0",
"extract-text-webpack-plugin": "0.8.2",
"file-loader": "0.8.4",
"font-awesome": "4.4.0",
"flot-charts": "0.8.3",
"font-awesome": "4.4.0",
"glob": "5.0.13",
"glob-all": "3.0.1",
"good-squeeze": "2.1.0",
Expand Down Expand Up @@ -206,8 +206,9 @@
"chance": "1.0.6",
"cheerio": "0.22.0",
"chokidar": "1.6.0",
"chromedriver": "2.24.1",
"chromedriver": "2.28.0",
"classnames": "2.2.5",
"digdug": "1.6.3",
"enzyme": "2.7.0",
"eslint": "3.11.1",
"eslint-plugin-babel": "4.0.0",
Expand All @@ -233,7 +234,6 @@
"html-loader": "0.4.3",
"husky": "0.8.1",
"image-diff": "1.6.0",
"intern": "3.2.3",
"istanbul-instrumenter-loader": "0.1.3",
"jsdom": "9.9.1",
"karma": "1.2.0",
Expand All @@ -244,6 +244,7 @@
"karma-mocha": "0.2.0",
"karma-safari-launcher": "0.1.1",
"keymirror": "0.1.1",
"leadfoot": "1.7.1",
"license-checker": "5.1.2",
"load-grunt-config": "0.19.2",
"makelogs": "3.2.3",
Expand Down
2 changes: 2 additions & 0 deletions scripts/functional_test_runner.js
@@ -0,0 +1,2 @@
require('../src/optimize/babel/register');
require('../src/functional_test_runner/cli');
4 changes: 2 additions & 2 deletions src/es_archiver/actions/load.js
Expand Up @@ -9,7 +9,7 @@ import {
isGzip,
createStats,
prioritizeMappings,
getArchiveFiles,
readDirectory,
createParseArchiveStreams,
createCreateIndexStream,
createIndexDocRecordsStream,
Expand All @@ -19,7 +19,7 @@ export async function loadAction({ name, skipExisting, client, dataDir, log }) {
const inputDir = resolve(dataDir, name);
const stats = createStats(name, log);

const files = prioritizeMappings(await getArchiveFiles(inputDir));
const files = prioritizeMappings(await readDirectory(inputDir));
for (const filename of files) {
log.info('[%s] Loading %j', name, filename);

Expand Down
7 changes: 3 additions & 4 deletions src/es_archiver/actions/rebuild_all.js
@@ -1,7 +1,6 @@
import { resolve } from 'path';
import {
rename,
readdir,
createReadStream,
createWriteStream
} from 'fs';
Expand All @@ -14,18 +13,18 @@ import {

import {
prioritizeMappings,
getArchiveFiles,
readDirectory,
isGzip,
createParseArchiveStreams,
createFormatArchiveStreams,
} from '../lib';

export async function rebuildAllAction({ dataDir, log }) {
const archiveNames = await fromNode(cb => readdir(dataDir, cb));
const archiveNames = await readDirectory(dataDir);

for (const name of archiveNames) {
const inputDir = resolve(dataDir, name);
const files = prioritizeMappings(await getArchiveFiles(inputDir));
const files = prioritizeMappings(await readDirectory(inputDir));
for (const filename of files) {
log.info('[%s] Rebuilding %j', name, filename);

Expand Down
4 changes: 2 additions & 2 deletions src/es_archiver/actions/unload.js
Expand Up @@ -9,7 +9,7 @@ import {
isGzip,
createStats,
prioritizeMappings,
getArchiveFiles,
readDirectory,
createParseArchiveStreams,
createFilterRecordsStream,
createDeleteIndexStream
Expand All @@ -19,7 +19,7 @@ export async function unloadAction({ name, client, dataDir, log }) {
const inputDir = resolve(dataDir, name);
const stats = createStats(name, log);

const files = prioritizeMappings(await getArchiveFiles(inputDir));
const files = prioritizeMappings(await readDirectory(inputDir));
for (const filename of files) {
log.info('[%s] Unloading indices from %j', name, filename);

Expand Down
20 changes: 15 additions & 5 deletions src/es_archiver/cli.js
Expand Up @@ -6,12 +6,14 @@

import { resolve } from 'path';
import { readFileSync } from 'fs';
import { format as formatUrl } from 'url';

import { Command } from 'commander';
import elasticsearch from 'elasticsearch';

import { EsArchiver } from './es_archiver';
import { createLog } from './lib';
import { createToolingLog } from '../utils';
import { readConfigFile } from '../functional_test_runner';

const cmd = new Command('node scripts/es_archiver');

Expand All @@ -20,6 +22,7 @@ cmd
.option('--es-url [url]', 'url for elasticsearch')
.option(`--dir [path]`, 'where archives are stored')
.option('--verbose', 'turn on verbose logging')
.option('--config [path]', 'path to a functional test config file to use for default values')
.on('--help', () => {
console.log(readFileSync(resolve(__dirname, './cli_help.txt'), 'utf8'));
});
Expand Down Expand Up @@ -49,9 +52,16 @@ if (missingCommand) {

async function execute(operation, ...args) {
try {
const log = createLog(cmd.verbose ? 'debug' : 'info');
const log = createToolingLog(cmd.verbose ? 'debug' : 'info');
log.pipe(process.stdout);

if (cmd.config) {
// load default values from the specified config file
const config = await readConfigFile(log, resolve(cmd.config));
if (!cmd.esUrl) cmd.esUrl = formatUrl(config.get('servers.elasticsearch'));
if (!cmd.dir) cmd.dir = config.get('esArchiver.directory');
}

// log and count all validation errors
let errorCount = 0;
const error = (msg) => {
Expand All @@ -61,10 +71,10 @@ async function execute(operation, ...args) {

if (!operation) error('Missing or invalid command');
if (!cmd.esUrl) {
error('You must specify either --es-url flag');
error('You must specify either --es-url or --config flags');
}
if (!cmd.dir) {
error('You must specify either --dir flag');
error('You must specify either --dir or --config flags');
}

// if there was a validation error display the help
Expand All @@ -84,7 +94,7 @@ async function execute(operation, ...args) {
const esArchiver = new EsArchiver({
log,
client,
dataDir: resolve(cmd.dir)
dataDir: resolve(cmd.dir),
});
await esArchiver[operation](...args);
} finally {
Expand Down
37 changes: 19 additions & 18 deletions src/es_archiver/lib/__tests__/stats.js
Expand Up @@ -2,9 +2,10 @@ import expect from 'expect.js';
import { uniq } from 'lodash';
import sinon from 'sinon';

import { createStats, createLog } from '../';
import { createStats } from '../';

import {
createToolingLog,
createConcatStream,
createPromiseFromStreams
} from '../../../utils';
Expand Down Expand Up @@ -47,14 +48,14 @@ function assertDeepClones(a, b) {
describe('esArchiver: Stats', () => {
describe('#skippedIndex(index)', () => {
it('marks the index as skipped', () => {
const stats = createStats('name', createLog());
const stats = createStats('name', createToolingLog());
stats.skippedIndex('index-name');
const indexStats = stats.toJSON()['index-name'];
expect(indexStats).to.have.property('skipped', true);
});

it('logs that the index was skipped', async () => {
const log = createLog('debug');
const log = createToolingLog('debug');
const stats = createStats('name', log);
stats.skippedIndex('index-name');
expect(await drain(log)).to.contain('Skipped');
Expand All @@ -63,13 +64,13 @@ describe('esArchiver: Stats', () => {

describe('#deletedIndex(index)', () => {
it('marks the index as deleted', () => {
const stats = createStats('name', createLog());
const stats = createStats('name', createToolingLog());
stats.deletedIndex('index-name');
const indexStats = stats.toJSON()['index-name'];
expect(indexStats).to.have.property('deleted', true);
});
it('logs that the index was deleted', async () => {
const log = createLog('debug');
const log = createToolingLog('debug');
const stats = createStats('name', log);
stats.deletedIndex('index-name');
expect(await drain(log)).to.contain('Deleted');
Expand All @@ -78,20 +79,20 @@ describe('esArchiver: Stats', () => {

describe('#createdIndex(index, [metadata])', () => {
it('marks the index as created', () => {
const stats = createStats('name', createLog());
const stats = createStats('name', createToolingLog());
stats.createdIndex('index-name');
const indexStats = stats.toJSON()['index-name'];
expect(indexStats).to.have.property('created', true);
});
it('logs that the index was created', async () => {
const log = createLog('debug');
const log = createToolingLog('debug');
const stats = createStats('name', log);
stats.createdIndex('index-name');
expect(await drain(log)).to.contain('Created');
});
context('with metadata', () => {
it('debug-logs each key from the metadata', async () => {
const log = createLog('debug');
const log = createToolingLog('debug');
const stats = createStats('name', log);
stats.createdIndex('index-name', {
foo: 'bar'
Expand All @@ -103,7 +104,7 @@ describe('esArchiver: Stats', () => {
});
context('without metadata', () => {
it('no debug logging', async () => {
const log = createLog('debug');
const log = createToolingLog('debug');
const stats = createStats('name', log);
stats.createdIndex('index-name');
const output = await drain(log);
Expand All @@ -114,20 +115,20 @@ describe('esArchiver: Stats', () => {

describe('#archivedIndex(index, [metadata])', () => {
it('marks the index as archived', () => {
const stats = createStats('name', createLog());
const stats = createStats('name', createToolingLog());
stats.archivedIndex('index-name');
const indexStats = stats.toJSON()['index-name'];
expect(indexStats).to.have.property('archived', true);
});
it('logs that the index was archived', async () => {
const log = createLog('debug');
const log = createToolingLog('debug');
const stats = createStats('name', log);
stats.archivedIndex('index-name');
expect(await drain(log)).to.contain('Archived');
});
context('with metadata', () => {
it('debug-logs each key from the metadata', async () => {
const log = createLog('debug');
const log = createToolingLog('debug');
const stats = createStats('name', log);
stats.archivedIndex('index-name', {
foo: 'bar'
Expand All @@ -139,7 +140,7 @@ describe('esArchiver: Stats', () => {
});
context('without metadata', () => {
it('no debug logging', async () => {
const log = createLog('debug');
const log = createToolingLog('debug');
const stats = createStats('name', log);
stats.archivedIndex('index-name');
const output = await drain(log);
Expand All @@ -150,7 +151,7 @@ describe('esArchiver: Stats', () => {

describe('#indexedDoc(index)', () => {
it('increases the docs.indexed count for the index', () => {
const stats = createStats('name', createLog());
const stats = createStats('name', createToolingLog());
stats.indexedDoc('index-name');
expect(stats.toJSON()['index-name'].docs.indexed).to.be(1);
stats.indexedDoc('index-name');
Expand All @@ -161,7 +162,7 @@ describe('esArchiver: Stats', () => {

describe('#archivedDoc(index)', () => {
it('increases the docs.archived count for the index', () => {
const stats = createStats('name', createLog());
const stats = createStats('name', createToolingLog());
stats.archivedDoc('index-name');
expect(stats.toJSON()['index-name'].docs.archived).to.be(1);
stats.archivedDoc('index-name');
Expand All @@ -172,13 +173,13 @@ describe('esArchiver: Stats', () => {

describe('#toJSON()', () => {
it('returns the stats for all indexes', () => {
const stats = createStats('name', createLog());
const stats = createStats('name', createToolingLog());
stats.archivedIndex('index1');
stats.archivedIndex('index2');
expect(Object.keys(stats.toJSON())).to.eql(['index1', 'index2']);
});
it('returns a deep clone of the stats', () => {
const stats = createStats('name', createLog());
const stats = createStats('name', createToolingLog());
stats.archivedIndex('index1');
stats.archivedIndex('index2');
stats.deletedIndex('index3');
Expand All @@ -189,7 +190,7 @@ describe('esArchiver: Stats', () => {

describe('#forEachIndex(fn)', () => {
it('iterates a clone of the index stats', () => {
const stats = createStats('name', createLog());
const stats = createStats('name', createToolingLog());
stats.archivedIndex('index1');
stats.archivedIndex('index2');
stats.deletedIndex('index3');
Expand Down
11 changes: 0 additions & 11 deletions src/es_archiver/lib/archives/filenames.js
@@ -1,20 +1,9 @@
import { fromNode } from 'bluebird';
import { readdir } from 'fs';
import { basename, extname } from 'path';

export function isGzip(path) {
return extname(path) === '.gz';
}

/**
* Gead the list of files in an archive.
*
* @return {Promise} [description]
*/
export async function getArchiveFiles(archiveDir) {
return await fromNode(cb => readdir(archiveDir, cb));
}

/**
* Check if a path is for a, potentially gzipped, mapping file
* @param {String} path
Expand Down
1 change: 0 additions & 1 deletion src/es_archiver/lib/archives/index.js
@@ -1,6 +1,5 @@
export {
isGzip,
getArchiveFiles,
prioritizeMappings,
} from './filenames';

Expand Down
2 changes: 1 addition & 1 deletion src/es_archiver/lib/archives/parse.js
Expand Up @@ -12,6 +12,6 @@ export function createParseArchiveStreams({ gzip = false } = {}) {
return [
gzip ? createGunzip() : new PassThrough(),
createSplitStream(RECORD_SEPARATOR),
createJsonParseStream()
createJsonParseStream(),
];
}
8 changes: 8 additions & 0 deletions src/es_archiver/lib/directory.js
@@ -0,0 +1,8 @@
import { readdir } from 'fs';

import { fromNode } from 'bluebird';

export async function readDirectory(path) {
const allNames = await fromNode(cb => readdir(path, cb));
return allNames.filter(name => !name.startsWith('.'));
}

0 comments on commit d3be827

Please sign in to comment.