Skip to content
This repository has been archived by the owner on Aug 17, 2018. It is now read-only.

Commit

Permalink
Add more tests to increase coverage (#56)
Browse files Browse the repository at this point in the history
  • Loading branch information
bassosimone committed Apr 18, 2016
1 parent 3cc1279 commit c513061
Show file tree
Hide file tree
Showing 7 changed files with 172 additions and 26 deletions.
2 changes: 1 addition & 1 deletion lib/common/rules.js
Expand Up @@ -161,7 +161,7 @@ var nexaRules = {
},
};

var evaluateRule = function (rule, record, key, value) {
var evaluateRule = exports.evaluateRule = function (rule, record, key, value) {
var result = {
field_id: rule.field_id,
field: key,
Expand Down
17 changes: 6 additions & 11 deletions lib/server/database.js
Expand Up @@ -20,10 +20,13 @@ db.persistence.setAutocompactionInterval(autocompact_interval);

let latest = stamp();

// Semi internal function to test the updating logic
const update_ = exports.update_ = (callback, provider) => {
const update = exports.update = (callback, scrapeIsh) => {
if (scrapeIsh === undefined) {
scrapeIsh = scrape.scrape;
}

console.log("Started scraper");
provider((error, d) => {
scrapeIsh((error, d) => {
if (error) {
console.log("Scraper failed", error);
callback(error);
Expand Down Expand Up @@ -57,14 +60,6 @@ const update_ = exports.update_ = (callback, provider) => {
});
};

const update = exports.update = (callback) => {
console.log("");
update_((error, data) => {
console.log("");
callback(error, data);
}, scrape.scrape);
};

const query = exports.query = (callback) => {
console.log("Querying database");
db.find({
Expand Down
34 changes: 24 additions & 10 deletions lib/server/scrape.js
Expand Up @@ -11,8 +11,11 @@ const request = require("request");
const timeout = 5000;

// Basic building block to fetch URLs with timeout and SSL checks
const fetch = exports.fetch = (url, callback) => {
request({
const fetch = exports.fetch = (url, callback, requestIsh) => {
if (requestIsh === undefined) {
requestIsh = request;
}
requestIsh({
uri: url,
timeout: timeout,
}, (error, response, body) => {
Expand All @@ -29,26 +32,37 @@ const fetch = exports.fetch = (url, callback) => {
};

// Wrapper for jsdom.env() that uses our fetch() to retrieve data
const jsdomWrap = exports.jsdomWrap = (url, callback) => {
fetch(url, (error, body) => {
const jsdomWrap = exports.jsdomWrap = (url, cb, fetchIsh, jsdomEnvIsh) => {
if (fetchIsh === undefined) {
fetchIsh = fetch;
}
if (jsdomEnvIsh === undefined) {
jsdomEnvIsh = jsdom.env;
}
fetchIsh(url, (error, body) => {
if (error) {
callback(error);
cb(error);
return;
}
// XXX: I initially tried to specify timeout using the `pool`
// parameter of jsdom.env()'s config but failed.
// Note: The following does not use the nework as long as
// the `jquery` parameter references a file.
jsdom.env(body, [jquery], callback);
jsdomEnvIsh(body, [jquery], cb);
});
};

// Scrape all institutions in JSON format from roarmap website
const scrape = exports.scrape = (callback) => {
const scrape = exports.scrape = (callback, jsdomWrapIsh) => {

// TODO: effective mocking of jsdomWrap seems not super trivial.
if (jsdomWrapIsh === undefined) {
jsdomWrapIsh = jsdomWrap;
}

// 1. Load advanced search form
console.log("load:", prefix);
jsdomWrap(prefix, (err, window) => {
jsdomWrapIsh(prefix, (err, window) => {
if (err) {
callback(err);
return;
Expand All @@ -70,7 +84,7 @@ const scrape = exports.scrape = (callback) => {
const url = prefix + "?" + form.serialize();
form.on("submit", () => {
console.log("on submit");
jsdomWrap(url, (err, window) => {
jsdomWrapIsh(url, (err, window) => {
if (err) {
callback(err);
return;
Expand All @@ -85,7 +99,7 @@ const scrape = exports.scrape = (callback) => {
const url = prefix + "?" + form.serialize();
form.on("submit", () => {
console.log("on submit");
jsdomWrap(url, (err, window) => {
jsdomWrapIsh(url, (err, window) => {
if (err) {
callback(err);
return;
Expand Down
5 changes: 5 additions & 0 deletions test/common/.jshintrc
@@ -0,0 +1,5 @@
{
"esversion": 6,
"node": "true",
"mocha": "true"
}
75 changes: 75 additions & 0 deletions test/common/rules.js
@@ -0,0 +1,75 @@
// This software is free software. See AUTHORS and LICENSE for more
// information on the copying conditions.

"use strict";

const assert = require("chai").assert;
const rules = require("../../lib/common/rules");

const entry = {
"iliege_hefce_model": "not_specified",
"gold_oa_options": "not_specified",
"datestamp": "2014-12-15 22:09:36",
"rights_holding": "not_mentioned",
"status_changed": "2014-12-15 22:09:36",
"repository_url": "http://preprints.acmac.uoc.gr/",
"date_made_open": "not_mentioned",
"waive_open_access": "not_specified",
"maximal_embargo_waivable": "not_applicable",
"policymaker_type": "research_org_subunit",
"policymaker_url": "http://www.acmac.uoc.gr/index.php",
"rights_retention_waivable": "not_applicable",
"title": "ACMAC - Archimedes Center for Modeling, Analysis & Computation",
"lastmod": "2015-07-24 08:29:49",
"policy_colour": "black",
"embargo_hum_soc": "not_specified",
"type": "article",
"policymaker_name": "ACMAC - Archimedes Center for Modeling, Analysis & Computation",
"locus_of_deposit": "not_specified",
"eprintid": 173,
"country_inclusive": [
"un_geoscheme",
150,
39,
300
],
"can_deposit_be_waived": "not_specified",
"rev_number": 8,
"date_of_deposit": "not_specified",
"making_deposit_open": "not_mentioned",
"country_names": [
"Europe",
"Southern Europe",
"Greece"
],
"deposit_of_item": "not_specified",
"mandate_content_types": [
"not_specified"
],
"metadata_visibility": "show",
"open_licensing_conditions": "not_specified",
"userid": 251,
"uri": "http://roarmap.eprints.org/id/eprint/173",
"embargo_sci_tech_med": "not_specified",
"eprint_status": "archive",
"journal_article_version": "author_final",
"open_access_waivable": "not_specified",
"country": 300,
"dir": "disk0/00/00/01/73",
"apc_funding": "not_mentioned"
};

const rule = {
field_id: 1024,
compliant_values: {},
guidelines: ["3.2", "3.20"],
};

describe("rules", () => {
describe("evaluateRule", () => {
it("throws Error if the input rule is invalid", () => {
assert.throws((() => (rules.evaluateRule(rule, entry,
"open_licensing_conditions", "req_cc_by"))), Error);
});
});
});
17 changes: 13 additions & 4 deletions test/server/database.js
Expand Up @@ -7,21 +7,21 @@ const assert = require("chai").assert;
const database = require("../../lib/server/database");

describe("database", () => {
describe("update_", () => {
it("correctly updates the underlying data base", (done) => {
describe("update", () => {

it("correctly updates the underlying data base", (done) => {
let basicDocument = {
foo: 3.14,
foobar: [1, 2, 3],
bar: "baz",
jarjar: null
};

database.update_((err, data) => {
database.update((err, data) => {
assert.isNull(err);
assert.deepEqual(JSON.parse(data.val), basicDocument);

database.update_((err, data) => {
database.update((err, data) => {
assert.isNull(err);
const val = JSON.parse(data.val);
assert.strictEqual(val.foo, basicDocument.foo);
Expand All @@ -38,7 +38,16 @@ describe("database", () => {
}, (callback) => {
callback(null, JSON.stringify(basicDocument));
});
});

it("correctly deals with scraper errors", (done) => {
database.update((err, data) => {
assert.instanceOf(err, Error);
done();
}, (callback) => {
callback(new Error("xo"));
});
});

});
});
48 changes: 48 additions & 0 deletions test/server/scrape.js
@@ -0,0 +1,48 @@
// This software is free software. See AUTHORS and LICENSE for more
// information on the copying conditions.

"use strict";

const assert = require("chai").assert;
const scrape = require("../../lib/server/scrape");

describe("scrape", () => {

describe("fetch", () => {

it("deals with request.request() errors", (done) => {
scrape.fetch("http://www.google.com/robots.txt", (error) => {
assert.instanceOf(error, Error);
done();
}, (options, callback) => {
callback(new Error("xo"));
});
});

it("deals with wrong response statuses", (done) => {
scrape.fetch("http://www.google.com/robots.txt", (error) => {
assert.instanceOf(error, Error);
done();
}, (options, callback) => {
callback(null, {statusCode: 400});
});
});

});

describe("jsdomWrap", () => {

it("deals with fetch() errors", (done) => {
scrape.jsdomWrap("http://www.google.com/robots.txt", (error) => {
assert.instanceOf(error, Error);
done();
}, (url, callback) => {
callback(new Error("mm"));
}, () => {
throw new Error("xo");
});
});

});

});

0 comments on commit c513061

Please sign in to comment.