Skip to content
This repository has been archived by the owner on Apr 17, 2024. It is now read-only.

Commit

Permalink
Add support for enviroment variables configuration and add Heroku Pro…
Browse files Browse the repository at this point in the history
…cfile.
  • Loading branch information
hernanhht committed Aug 1, 2017
1 parent 2c6c156 commit 2a16a8d
Show file tree
Hide file tree
Showing 5 changed files with 220 additions and 0 deletions.
1 change: 1 addition & 0 deletions Procfile
@@ -0,0 +1 @@
worker: PORT=$LIMITD_PORT node bin/limitd
29 changes: 29 additions & 0 deletions README.md
Expand Up @@ -56,6 +56,7 @@ On other systems use node.js and npm:
npm i -g limitd
```

#### Configuration with a config file
Create a file named `limitd.config` for the server settings:
```yaml
#port to listen on
Expand All @@ -80,6 +81,34 @@ You can find all configuration options [below](#server_options).

> **Note**: For production you would create a daemon (upstart, systemd, initd, etc.) that runs the aforementiond command.
#### Configuration with environment variables
```bash
#port to listen on
export PORT=9001

#db path
export DB=/var/limitd/database

#define the bucket types
export BUCKET_1_NAME=user
export BUCKET_1_SIZE=10
export BUCKET_1_PER_SECOND=5

export BUCKET_2_NAME=some_other_bucket
export BUCKET_2_SIZE=25
export BUCKET_2_PER_MINUTE=15
```
> **Note**: Using enviroment variables for buckets has a limitation. The `override` config parameter is not supported yet.
Start the server:
```bash
limitd
```

You can find all configuration options [below](#server_options).

> **Note**: For production you would create a daemon (upstart, systemd, initd, etc.) that runs the aforementiond command.
## Motivation

While there are many solutions that relies on a central database like redis, these solutions tipically put all the configuration, limits and logic on the application side.
Expand Down
11 changes: 11 additions & 0 deletions bin/limitd
Expand Up @@ -9,6 +9,7 @@ const yaml = require('js-yaml');
const _ = require('lodash');
const profiler = require('v8-profiler');
const getLogger = require('../lib/logger');
const configEnvParser = require('../conf/config.env.parser');

const LimitdServer = require('../server');

Expand Down Expand Up @@ -37,6 +38,16 @@ if (config.configFile) {
}
}

try{
const parsed = configEnvParser.parse(process.env);
_.extend(config, parsed);
} catch (e) {
console.error('Error parsing environment configuration\n', e.stack);
setTimeout(function () {
process.exit(2);
}, 500);
}

const logger = getLogger(config.log_level);

if (typeof config.db === 'undefined') {
Expand Down
67 changes: 67 additions & 0 deletions conf/config.env.parser.js
@@ -0,0 +1,67 @@
'use strict';

const _ = require('lodash');
const schema = require('./config.schema');
const propertyNames = Object.keys(schema.properties);

function parseString(name, value) {
const subschema = schema.properties[name];

return parseStringToSpecificType(value, subschema.type);
}

function parseBucketString(name, value) {
const objectName = Object.keys(schema.properties.buckets.patternProperties)[0];
const subschema = schema.properties.buckets.patternProperties[objectName].properties[name];

return parseStringToSpecificType(value, subschema.type);
}

function parseStringToSpecificType(value, type) {
let parsed;
if (type === 'string' || Array.isArray(type) && type.indexOf('string') > -1) {
return value;
}
switch (type) {
case 'integer':
parsed = Number(value);
break;
case 'boolean':
parsed = Boolean(value);
break;
}

return parsed;
}

module.exports.parse = function (env) {
const keys = Object.keys(env);
const config = {};
const buckets = {};

keys.forEach(key => {
if (key.indexOf('BUCKET') === 0) {
const id = key.split('_')[1];
if (key !== `BUCKET_${id}_NAME`) {
const bucketName = env[`BUCKET_${id}_NAME`];
let bucket = buckets[bucketName];
if (!bucket) {
bucket = {};
buckets[bucketName] = bucket;
}
const propertyName = key.replace(`BUCKET_${id}_`, '').toLowerCase();
bucket[propertyName] = parseBucketString(propertyName, env[key]);
}

} else if(propertyNames.indexOf(key.toLowerCase()) > -1) {
const propertyName = key.toLowerCase();
config[propertyName] = parseString(propertyName, env[key]);
}
});

if (!_.isEmpty(buckets)) {
config.buckets = buckets;
}

return config;
};
112 changes: 112 additions & 0 deletions test/config.env.parser.tests.js
@@ -0,0 +1,112 @@
const expect = require('chai').expect;
const parser = require('../conf/config.env.parser');

describe('config env parser', function() {
describe('with a complete well formed environment', function () {
const env = {
PORT: '9001',
BUCKET_1_NAME: 'user',
BUCKET_1_SIZE: '1',
BUCKET_1_PER_MINUTE: '5',
BUCKET_1_PURPOSE: 'user creation bucket',
BUCKET_2_NAME: 'foo',
BUCKET_2_SIZE: '10',
BUCKET_2_PER_MINUTE: '15',
BUCKET_2_MATCH: '^[a-zA-Z0-9]+$',
BUCKET_3_NAME: 'bar',
BUCKET_3_UNLIMITED: 'true',
DB: './database'
};

it('should parse it ok', function() {
const config = parser.parse(env);
expect(Object.keys(config).length).to.equal(3);
expect(config.port).to.equal('9001');
expect(config.db).to.equal('./database');
expect(config.buckets).to.deep.equal({
user: { size: 1, per_minute: 5, purpose: 'user creation bucket' },
foo: { size: 10, per_minute: 15, match: '^[a-zA-Z0-9]+$' },
bar: { unlimited: true }
});
});
});

describe('with just the buckets configuration in the environment', function () {
const env = {
BUCKET_1_NAME: 'user',
BUCKET_1_SIZE: 1,
BUCKET_1_PER_MINUTE: 5,
BUCKET_2_NAME: 'foo',
BUCKET_2_SIZE: 10,
BUCKET_2_PER_MINUTE: 15,
BUCKET_3_NAME: 'bar',
BUCKET_3_SIZE: 13,
BUCKET_3_PER_SECOND: 25,
};

it('should parse it ok', function() {
const config = parser.parse(env);
expect(Object.keys(config).length).to.equal(1);
expect(config.buckets).to.deep.equal({
user: { size: 1, per_minute: 5 },
foo: { size: 10, per_minute: 15 },
bar: { size: 13, per_second: 25 }
});
});
});

describe('with just the buckets configuration in the environment in a random order', function () {
const env = {
BUCKET_3_PER_SECOND: 25,
BUCKET_1_SIZE: 1,
BUCKET_2_NAME: 'foo',
BUCKET_1_PER_MINUTE: 5,
BUCKET_2_SIZE: 10,
BUCKET_3_NAME: 'bar',
BUCKET_2_PER_MINUTE: 15,
BUCKET_1_NAME: 'user',
BUCKET_3_SIZE: 13,
};

it('should parse it ok', function() {
const config = parser.parse(env);
expect(Object.keys(config).length).to.equal(1);
expect(config.buckets).to.deep.equal({
user: { size: 1, per_minute: 5 },
foo: { size: 10, per_minute: 15 },
bar: { size: 13, per_second: 25 }
});
});
});

describe('with not supported additional properties', function () {
const env = {
PORT: '9001',
BUCKET_1_NAME: 'user',
BUCKET_1_SIZE: '1',
BUCKET_1_PER_MINUTE: '5',
BUCKET_2_NAME: 'foo',
BUCKET_2_SIZE: '10',
BUCKET_2_PER_MINUTE: '15',
BUCKET_3_NAME: 'bar',
BUCKET_3_SIZE: '13',
BUCKET_3_PER_SECOND: '25',
DB: './database',

ADDITIONAL_PROPERTY_1: 'baz',
additional_property_2: 'taz'
};

it('should not include additional properties', function() {
const config = parser.parse(env);
expect(Object.keys(config).length).to.equal(3);
expect(config.port).to.equal('9001');
expect(config.db).to.equal('./database');
expect(config.buckets).to.deep.equal({
user: { size: 1, per_minute: 5 },
foo: { size: 10, per_minute: 15 },
bar: { size: 13, per_second: 25 }
});
});
});
});

0 comments on commit 2a16a8d

Please sign in to comment.