Skip to content

Commit

Permalink
Merge pull request #33 from marco-genova-ntt/environment-pages-config…
Browse files Browse the repository at this point in the history
…uration

Added environment pages.json configuration #32
  • Loading branch information
marco-genova-ntt committed Feb 25, 2019
2 parents 827edeb + 6bd18c5 commit b235635
Show file tree
Hide file tree
Showing 12 changed files with 245 additions and 26 deletions.
26 changes: 22 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -81,15 +81,22 @@ SERIES_TEMAPLTE_TREND_FILE=./templates/series/anychart-template.txt
#Save reports in AWS S3 using bucket informaton
SERIES_ENABLE_TREND_REPORT_ON_AWS=true
#Sets the env to select a sets of page
LIGHTHOUSE_CI_ENV=qa
```

## Define pages.json

**(V0.3.0+)** The paegs are divided for environment, the default environment is _"default"_

```
[
'https://www.sample.com',
'https://www.sample.com/page2?param=1'
]
{
"env1" : ['https://www.sample.com',
'https://www.sample.com/page2?param=1'],
"env2" : ['https://www.sample2.com',
'https://www.sample2.com/page2?param=1']
}
```

Expand Down Expand Up @@ -155,3 +162,14 @@ Actually the results series report are generated by a simple aproach using a tem
In this case We've used a replace in a html page generated by https://playground.anychart.com/.

We've substituted to ready html a placeholder, during report generation the placeholder are substituted by real values.

### Docker launch and pages
My advice is to enable the choioce of oages set directly from docker un command forcing the environment variable:

```
LIGHTHOUSE_CI_ENV=qa
```

let me say to remove from .env file and force the values from docket command like this:

>docker run -e "LIGHTHOUSE_CI_ENV=qa" lighthouse-slack-ci
23 changes: 23 additions & 0 deletions __tests__/pagesprovider.test.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
import R from 'ramda';
import PagesProvider from './../app/PagesProvider';

test('check default constructor', () => {
let pp = new PagesProvider();
expect(pp).not.toBeUndefined();
expect(pp.pages).toEqual({});
expect(pp._baseFile).toEqual('pages.json');
});

test('check a complete load process', () => {
let pp = new PagesProvider({baseFile : "./tmp/test_pages.json"});
expect(pp).not.toBeUndefined();
expect(pp._baseFile).toEqual('./tmp/test_pages.json');
expect(pp.pages).toEqual({});

pp.loadPages();
expect(pp.pages).not.toBeUndefined();

pp.worksOnPages('test2', (pages) => {
expect(R.length(pages)).toBe(2);
});
});
9 changes: 9 additions & 0 deletions __tests__/utility.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -80,3 +80,12 @@ test('replace All occurrencies', () => {
expect(result).not.toBeNull();
expect(result).toEqual('uno tre tre quattro tre');
});

test('clone prop', () => {
let refObjet = {};
expect(utility.getClonedProp("not-exist",refObjet)).toBeUndefined();

refObjet = {"prop1" : "value1", "prop2" : "value2"};
expect(utility.getClonedProp("prop2",refObjet)).not.toBeUndefined();
expect(utility.getClonedProp("prop2",refObjet)).toEqual("value2");
});
46 changes: 46 additions & 0 deletions app/PagesProvider.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
import R from 'ramda';
import * as utility from './utility';
import path from 'path';

/**
* This class is used to provide a set of pages extracted from filesystem.
* The set of pages is cut by environment
*/
class PagesProvider {

constructor(configuration = {}) {
this._baseFile = configuration.baseFile ? configuration.baseFile : 'pages.json';
this._absoluteFile = path.join(process.cwd(), this._baseFile);
}

/**
* Loads page as state of object from filesystem
*/
loadPages () {
this._pages = utility.getJSONFromFile(this._absoluteFile);
}

/**
* Works a set of page for the context
*
* @param {String} context identifier of the ernviroment ot use
* @param {Function} worker function to work pages
*/
worksOnPages(context = 'default', worker) {
if (!R.isEmpty(this._pages) && worker) {
const pagesSet = utility.getClonedProp(context, this._pages);
if (R.length(pagesSet) > 0) {
R.call(worker, R.clone(pagesSet));
}
}
}

/**
* Gets the whole set of pages
*/
get pages() {
return {...this._pages};
}
}

export default PagesProvider;
20 changes: 12 additions & 8 deletions app/index.js
Original file line number Diff line number Diff line change
@@ -1,13 +1,11 @@
import { analyze } from './lighthouse-job';
import * as utility from './utility';
import fs from 'fs';
import path from 'path';
import {dispatchMessageManager} from './slack-emitter';
import {dispatchSeriesManager} from './allseries/series-manager';
import {downloadFile, checkExistence} from './aws-s3-manager';
import PagesProvider from './PagesProvider';

fs.readFile(path.join(process.cwd(), 'pages.json'), (err, data) => {
if (err) throw err;
function startAnalisys(pages) {
const customManagers = [dispatchMessageManager, dispatchSeriesManager];

//XXX Imporve the design of acquiring database, adding a clear lifecycle
Expand All @@ -20,14 +18,20 @@ fs.readFile(path.join(process.cwd(), 'pages.json'), (err, data) => {

if (existence) {
downloadFile(bucketName, dbName, dbPath, () => {
analyze(JSON.parse(data), customManagers);
analyze(pages, customManagers);
});
} else {
analyze(JSON.parse(data), customManagers);
analyze(pages, customManagers);
}
})();
} else {
analyze(JSON.parse(data), customManagers);
analyze(pages, customManagers);
}
});
}
//docker run -e "LIGHTHOUSE_CI_ENV=qa" lighthouse-slack-ci
const pagesProvider = new PagesProvider();

const context = utility.string('LIGHTHOUSE_CI_ENV', 'prod');
console.info('LightHouse CI - Environement [',context,']');
pagesProvider.loadPages();
pagesProvider.worksOnPages(context, startAnalisys);
7 changes: 6 additions & 1 deletion app/utility.js
Original file line number Diff line number Diff line change
Expand Up @@ -263,4 +263,9 @@ export function escapeRegExp(str) {
*/
export function replaceAll(str, find, replace) {
return str.replace(new RegExp(escapeRegExp(find), 'g'), replace);
}
}

/**
* Gets a cloned prop of an object
*/
export const getClonedProp = R.pipe(R.prop, R.clone);
27 changes: 27 additions & 0 deletions dist/__tests__/pagesprovider.test.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
"use strict";

var _ramda = _interopRequireDefault(require("ramda"));

var _PagesProvider = _interopRequireDefault(require("./../app/PagesProvider"));

function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }

test('check default constructor', () => {
let pp = new _PagesProvider.default();
expect(pp).not.toBeUndefined();
expect(pp.pages).toEqual({});
expect(pp._baseFile).toEqual('pages.json');
});
test('check a complete load process', () => {
let pp = new _PagesProvider.default({
baseFile: "./tmp/test_pages.json"
});
expect(pp).not.toBeUndefined();
expect(pp._baseFile).toEqual('./tmp/test_pages.json');
expect(pp.pages).toEqual({});
pp.loadPages();
expect(pp.pages).not.toBeUndefined();
pp.worksOnPages('test2', pages => {
expect(_ramda.default.length(pages)).toBe(2);
});
});
10 changes: 10 additions & 0 deletions dist/__tests__/utility.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -75,4 +75,14 @@ test('replace All occurrencies', () => {
expect(result).not.toBeUndefined();
expect(result).not.toBeNull();
expect(result).toEqual('uno tre tre quattro tre');
});
test('clone prop', () => {
let refObjet = {};
expect(utility.getClonedProp("not-exist", refObjet)).toBeUndefined();
refObjet = {
"prop1": "value1",
"prop2": "value2"
};
expect(utility.getClonedProp("prop2", refObjet)).not.toBeUndefined();
expect(utility.getClonedProp("prop2", refObjet)).toEqual("value2");
});
65 changes: 65 additions & 0 deletions dist/app/PagesProvider.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
"use strict";

Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = void 0;

var _ramda = _interopRequireDefault(require("ramda"));

var utility = _interopRequireWildcard(require("./utility"));

var _path = _interopRequireDefault(require("path"));

function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { var desc = Object.defineProperty && Object.getOwnPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : {}; if (desc.get || desc.set) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } } newObj.default = obj; return newObj; } }

function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }

/**
* This class is used to provide a set of pages extracted from filesystem.
* The set of pages is cut by environment
*/
class PagesProvider {
constructor(configuration = {}) {
this._baseFile = configuration.baseFile ? configuration.baseFile : 'pages.json';
this._absoluteFile = _path.default.join(process.cwd(), this._baseFile);
}
/**
* Loads page as state of object from filesystem
*/


loadPages() {
this._pages = utility.getJSONFromFile(this._absoluteFile);
}
/**
* Works a set of page for the context
*
* @param {String} context identifier of the ernviroment ot use
* @param {Function} worker function to work pages
*/


worksOnPages(context = 'default', worker) {
if (!_ramda.default.isEmpty(this._pages) && worker) {
const pagesSet = utility.getClonedProp(context, this._pages);

if (_ramda.default.length(pagesSet) > 0) {
_ramda.default.call(worker, _ramda.default.clone(pagesSet));
}
}
}
/**
* Gets the whole set of pages
*/


get pages() {
return { ...this._pages
};
}

}

var _default = PagesProvider;
exports.default = _default;
24 changes: 14 additions & 10 deletions dist/app/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,22 +4,19 @@ var _lighthouseJob = require("./lighthouse-job");

var utility = _interopRequireWildcard(require("./utility"));

var _fs = _interopRequireDefault(require("fs"));

var _path = _interopRequireDefault(require("path"));

var _slackEmitter = require("./slack-emitter");

var _seriesManager = require("./allseries/series-manager");

var _awsS3Manager = require("./aws-s3-manager");

var _PagesProvider = _interopRequireDefault(require("./PagesProvider"));

function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }

function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { var desc = Object.defineProperty && Object.getOwnPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : {}; if (desc.get || desc.set) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } } newObj.default = obj; return newObj; } }

_fs.default.readFile(_path.default.join(process.cwd(), 'pages.json'), (err, data) => {
if (err) throw err;
function startAnalisys(pages) {
const customManagers = [_slackEmitter.dispatchMessageManager, _seriesManager.dispatchSeriesManager]; //XXX Imporve the design of acquiring database, adding a clear lifecycle

if (utility.bool('SERIES_SERVICE_DATABASE_FILE_ON_AWS')) {
Expand All @@ -31,13 +28,20 @@ _fs.default.readFile(_path.default.join(process.cwd(), 'pages.json'), (err, data

if (existence) {
(0, _awsS3Manager.downloadFile)(bucketName, dbName, dbPath, () => {
(0, _lighthouseJob.analyze)(JSON.parse(data), customManagers);
(0, _lighthouseJob.analyze)(pages, customManagers);
});
} else {
(0, _lighthouseJob.analyze)(JSON.parse(data), customManagers);
(0, _lighthouseJob.analyze)(pages, customManagers);
}
})();
} else {
(0, _lighthouseJob.analyze)(JSON.parse(data), customManagers);
(0, _lighthouseJob.analyze)(pages, customManagers);
}
});
} //docker run -e "LIGHTHOUSE_CI_ENV=qa" lighthouse-slack-ci


const pagesProvider = new _PagesProvider.default();
const context = utility.string('LIGHTHOUSE_CI_ENV', 'prod');
console.info('LightHouse CI - Environement [', context, ']');
pagesProvider.loadPages();
pagesProvider.worksOnPages(context, startAnalisys);
12 changes: 10 additions & 2 deletions dist/app/utility.js
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ exports.createHash = createHash;
exports.nowUTC = nowUTC;
exports.escapeRegExp = escapeRegExp;
exports.replaceAll = replaceAll;
exports.lookup = exports.concatAll = void 0;
exports.getClonedProp = exports.lookup = exports.concatAll = void 0;

var _ramda = _interopRequireDefault(require("ramda"));

Expand Down Expand Up @@ -323,4 +323,12 @@ function escapeRegExp(str) {

function replaceAll(str, find, replace) {
return str.replace(new RegExp(escapeRegExp(find), 'g'), replace);
}
}
/**
* Gets a cloned prop of an object
*/


const getClonedProp = _ramda.default.pipe(_ramda.default.prop, _ramda.default.clone);

exports.getClonedProp = getClonedProp;
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "lighthouse-score-for-ci",
"version": "0.2.1",
"version": "0.3.0",
"description": "LightHouse CI - Module used to create a very simple step for continous integratraion",
"main": "dist/app/lighthouse-job.js",
"scripts": {
Expand Down

0 comments on commit b235635

Please sign in to comment.