Skip to content

Commit

Permalink
Merge branch 'develop' into dependabot/npm_and_yarn/backend/msgpack5-…
Browse files Browse the repository at this point in the history
…3.6.1
  • Loading branch information
Don-Isdale committed May 18, 2021
2 parents 2e8b7f9 + 4012006 commit 0766eba
Show file tree
Hide file tree
Showing 99 changed files with 5,932 additions and 444 deletions.
13 changes: 13 additions & 0 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -5,18 +5,31 @@ FROM node:10-alpine
# from : https://github.com/nodejs/docker-node/issues/610 :
# node-sass is built using node-gyp, which is built using python.
# required for an NPM repo
#
# These packages are for importing spreadsheets (xlsx etc) :
# bash is now used by /backend/scripts/uploadSpreadsheet.bash
# and perl by /resources/tools/dev/snps2Dataset.pl
# gnumeric provides ssconvert, used by uploadSpreadsheet.bash
# terminus-font is required by ssconvert.
RUN apk add --no-cache git \
--virtual .gyp \
python \
make \
g++ \
bash \
perl \
gnumeric \
terminus-font \
&& npm install bower -g

# add backend to image
COPY ./backend /app

# add frontend to image
COPY ./frontend /frontend
COPY ./backend/scripts/uploadSpreadsheet.bash /app/scripts/.
COPY ./resources/tools/dev/snps2Dataset.pl /app/scripts/.


RUN node --version
RUN cd /frontend && (npm ci || npm install) && bower install --allow-root
Expand Down
13 changes: 6 additions & 7 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,20 +2,19 @@


[![Docker pulls](https://img.shields.io/docker/pulls/plantinformaticscollaboration/pretzel.svg?logo=docker&style=for-the-badge)](https://hub.docker.com/r/plantinformaticscollaboration/pretzel)
[![Docker pulls](https://img.shields.io/docker/automated/plantinformaticscollaboration/pretzel.svg?logo=docker&style=for-the-badge)](https://hub.docker.com/r/plantinformaticscollaboration/pretzel)
[![Docker pulls](https://img.shields.io/docker/build/plantinformaticscollaboration/pretzel.svg?logo=docker&style=for-the-badge)](https://hub.docker.com/r/plantinformaticscollaboration/pretzel)
[![Docker automated](https://img.shields.io/docker/automated/plantinformaticscollaboration/pretzel.svg?logo=docker&style=for-the-badge)](https://hub.docker.com/r/plantinformaticscollaboration/pretzel)
[![Docker build](https://img.shields.io/docker/cloud/build/plantinformaticscollaboration/pretzel.svg?logo=docker&style=for-the-badge)](https://hub.docker.com/r/plantinformaticscollaboration/pretzel)

[![Website](https://img.shields.io/website-up-down-green-red/http/plantinformatics.io.svg?label=plantinformatics.io&style=for-the-badge)](http://plantinformatics.io)

# About Pretzel <!-- omit in toc -->
A Loopback/Ember/D3 framework to display and interactively navigate complex datasets.

Developed by
- AgriBio, Department of Economic Development, Jobs, Transport and Resources (DEDJTR), Victoria,
Australia;
- CSIRO, Canberra, Australia.
<img src="https://user-images.githubusercontent.com/20571319/116690793-4129a380-a9fd-11eb-85ed-6b9d91f51458.png" align="center">

Funded by the Grains Research Development Corporation (GRDC).
Currently (2020-) funded and developed by Agriculture Victoria, Department of Jobs, Precincts and Regions (DJPR), Victoria, Australia.

Previously (2016-2020) funded by the Grains Research Development Corporation (GRDC) and co-developed by Agriculture Victoria and CSIRO, Canberra, Australia.

# Table of Contents <!-- omit in toc -->
- [Features](#features)
Expand Down
159 changes: 155 additions & 4 deletions backend/common/models/block.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@ var identity = require('../utilities/identity')
var task = require('../utilities/task')
const qs = require('qs');

var upload = require('../utilities/upload');
const { insert_features_recursive } = require('../utilities/upload');
var blockFeatures = require('../utilities/block-features');
var pathsAggr = require('../utilities/paths-aggr');
var pathsFilter = require('../utilities/paths-filter');
Expand All @@ -26,6 +28,7 @@ const { Writable, pipeline, Readable } = require('stream');
* and also : var streamify = require('stream-array');
*/

/* global process */


/** This value is used in SSE packet event id to signify the end of the cursor in pathsViaStream. */
Expand Down Expand Up @@ -66,11 +69,75 @@ class SseWritable extends Writable {
}
}

/*----------------------------------------------------------------------------*/

/** Given a start time, return elapsed milliseconds as a string.
* @param startTime result of process.hrtime();
* @param decimalPlaces number of decimal places to show in the result string.
*/
function elapsedMs(startTime, decimalPlaces) {
let elapsedTime = process.hrtime(startTime);
var ms = elapsedTime[0] * 1e3 + elapsedTime[1] * 1e-6;
return ms.toFixed(decimalPlaces);
}

/*----------------------------------------------------------------------------*/

/* global module require */

module.exports = function(Block) {


/*--------------------------------------------------------------------------*/

// copied from localise-blocks.js - may be able to factor, if no changes

/** Add features.
* @param features array of features to add.
* each feature defines .blockId
* @return promise (no value)
*/
function blockAddFeatures(db, datasetId, blockId, features, cb) {
/** convert the ._id and .blockId fields from hex string to ObjectId,
* and shallow-copy the other fields. */
let featuresId = features.map((f) => {
let {/*_id, */...rest} = f;
// rest._id = ObjectId(_id);
rest.blockId = ObjectId(blockId);
return rest;
});

return insert_features_recursive(db, datasetId, featuresId, false, cb);
}


/** Send a database request to append the features in data to the given block.
*
* @param data blockId and features
*/
Block.blockFeaturesAdd = function(data, options, cb) {
let db = this.dataSource.connector;

if (data.filename) {
upload.handleJson(data, processJson, cb);
} else {
processJson(data);
}

function processJson(json) {
let
blockId = json.blockId,
b = {blockId},
features = json.features;
return blockAddFeatures(db, /*datasetId*/b, blockId, features, cb)
.then(() => { console.log('after blockAddFeatures', b); return b.blockId; });
}

};




/** This is the original paths api, prior to progressive-loading, i.e. it
* returns all paths in a single response.
*
Expand Down Expand Up @@ -447,6 +514,8 @@ module.exports = function(Block) {
function reqStream(cursorFunction, filterFunction, cacheId, intervals, req, res, apiOptions) {
/* The params of reqStream() are largely passed to pipeStream() - starting to look like a class. */

let startTime = process.hrtime();

/** trial also performance of : isSerialized: true */
let sse = new SSE(undefined, {isCompressed : false});
if (! res.setHeader) {
Expand Down Expand Up @@ -484,7 +553,12 @@ module.exports = function(Block) {
}

req.on('close', () => {
console.log('req.on(close)');
/* absolute time : new Date().toISOString() */
console.log(
'req.on(close)', 'reqStream',
'The request processing time is', elapsedMs(startTime, 3), 'ms.', 'for', req.path, cacheId);

// console.log('req.on(close)');
if (cursor) {
// ! cursor.isExhausted() && cursor.hasNext()
if (cursor.isClosed && ! cursor.isClosed())
Expand Down Expand Up @@ -519,7 +593,11 @@ module.exports = function(Block) {
else
closeCursor(cursor);
function closeCursor(cursor) {
cursor.close(function () { console.log('cursor closed'); });
cursor.close(function () {
console.log(
'cursor closed',
'reqStream',
'The request processing time is', elapsedMs(startTime, 3), 'ms.', 'for', req.path, cacheId); });
}
}
}
Expand Down Expand Up @@ -588,15 +666,30 @@ module.exports = function(Block) {
* @param blockIds blocks
*/
Block.blockFeaturesCount = function(blockIds, options, res, cb) {
let
fnName = 'blockFeaturesCount',
cacheId = fnName + '_' + blockIds.join('_'),
result = cache.get(cacheId);
if (result) {
if (trace_block > 1) {
console.log(fnName, cacheId, 'get', result[0] || result);
}
cb(null, result);
} else {
let db = this.dataSource.connector;
let cursor =
blockFeatures.blockFeaturesCount(db, blockIds);
cursor.toArray()
.then(function(featureCounts) {
if (trace_block > 1) {
console.log(fnName, cacheId, 'get', featureCounts[0] || featureCounts);
}
cache.put(cacheId, featureCounts);
cb(null, featureCounts);
}).catch(function(err) {
cb(err);
});
}
};

/*--------------------------------------------------------------------------*/
Expand All @@ -605,17 +698,48 @@ module.exports = function(Block) {
*
* @param blockId block
* @param nBins number of bins to partition the block's features into
* @param interval undefined or range of locations of features to count
* @param isZoomed true means interval should be used to constrain the location of counted features.
* @param useBucketAuto default false, which means $bucket with
* boundaries calculated from interval and nBins; otherwise use
* $bucketAuto.
*/
Block.blockFeaturesCounts = function(blockId, interval, nBins, options, res, cb) {
Block.blockFeaturesCounts = function(blockId, interval, nBins, isZoomed, useBucketAuto, options, res, cb) {

let
fnName = 'blockFeaturesCounts',
/** when a block is viewed, it is not zoomed (the interval is the
* whole domain); this request recurs often and is worth caching,
* but when zoomed in there is no repeatability so result is not
* cached. Zoomed results could be collated in an interval tree,
* and used when they satisfied one end of a requested interval,
* i.e. just the new part would be queried.
*/
useCache = ! isZoomed || ! interval,
cacheId = fnName + '_' + blockId + '_' + nBins + '_' + useBucketAuto,
result = useCache && cache.get(cacheId);
if (result) {
if (trace_block > 1) {
console.log(fnName, cacheId, 'get', result[0]);
}
cb(null, result);
} else {
let db = this.dataSource.connector;
let cursor =
blockFeatures.blockFeaturesCounts(db, blockId, interval, nBins);
blockFeatures.blockFeaturesCounts(db, blockId, interval, nBins, isZoomed, useBucketAuto);
cursor.toArray()
.then(function(featureCounts) {
if (useCache) {
if (trace_block > 1) {
console.log(fnName, cacheId, 'put', featureCounts[0]);
}
cache.put(cacheId, featureCounts);
}
cb(null, featureCounts);
}).catch(function(err) {
cb(err);
});
}
};

/*--------------------------------------------------------------------------*/
Expand All @@ -624,15 +748,31 @@ module.exports = function(Block) {
* @param blockId undefined (meaning all blocks) or id of 1 block to find min/max for
*/
Block.blockFeatureLimits = function(blockId, options, res, cb) {
let
fnName = 'blockFeatureLimits',
cacheId = fnName + '_' + blockId,
result = cache.get(cacheId);
if (result) {
if (trace_block > 1) {
console.log(fnName, cacheId, 'get', result[0] || result);
}
cb(null, result);
} else {

let db = this.dataSource.connector;
let cursor =
blockFeatures.blockFeatureLimits(db, blockId);
cursor.toArray()
.then(function(limits) {
if (trace_block > 1) {
console.log(fnName, cacheId, 'put', limits[0] || limits);
}
cache.put(cacheId, limits);
cb(null, limits);
}).catch(function(err) {
cb(err);
});
}
};

/*--------------------------------------------------------------------------*/
Expand Down Expand Up @@ -752,6 +892,15 @@ module.exports = function(Block) {
// When adding a API .remoteMethod() here, also add the route name to backend/server/boot/access.js : genericResolver()
//----------------------------------------------------------------------------

Block.remoteMethod('blockFeaturesAdd', {
accepts: [
{arg: 'data', type: 'object', required: true, http: {source: 'body'}},
{arg: "options", type: "object", http: "optionsFromRequest"},
],
returns: {arg: 'status', type: 'string'},
description: "Append the features in data to the given block"
});

Block.remoteMethod('blockFeaturesCount', {
accepts: [
{arg: 'blocks', type: 'array', required: true},
Expand All @@ -768,6 +917,8 @@ module.exports = function(Block) {
{arg: 'block', type: 'string', required: true},
{arg: 'interval', type: 'array', required: false},
{arg: 'nBins', type: 'number', required: false},
{arg: 'isZoomed', type: 'boolean', required: false, default : 'false'},
{arg: 'useBucketAuto', type: 'boolean', required: false, default : 'false'},
{arg: "options", type: "object", http: "optionsFromRequest"},
{arg: 'res', type: 'object', 'http': {source: 'res'}},
],
Expand Down
20 changes: 18 additions & 2 deletions backend/common/models/client.js
Original file line number Diff line number Diff line change
Expand Up @@ -112,8 +112,24 @@ module.exports = function(Client) {
if (process.env.EMAIL_ACTIVE == 'true') {
Client.findById(context.args.uid).then(function(userInstance) {
var template = loopback.template(path.resolve(__dirname, '../../server/views/access_granted.ejs'));
let login_url =
context.req.protocol + '://' + context.req.host +
let
/** if node app server is behind a proxy (e.g. nginx, for
* https) then the req.host will be simply localhost;
* in that case use API_HOST.
*/
apiHost =
process.env.API_PORT_PROXY ? process.env.API_HOST : context.req.host,
/** If behind a proxy then the port will be default (80)
* expressed as ''. Otherwise API_PORT_EXT is used.
*
* (If running node app server within docker then the API
* port external to docker is API_PORT_EXT, and hence the
* name suffix _EXT; the internal port is generally the same
* and the same env var is used.)
* Related : reset_href, verifyHref.
*/
login_url =
context.req.protocol + '://' + apiHost +
(process.env.API_PORT_PROXY ? '' : ':' + process.env.API_PORT_EXT) +
'/login';
var html = template({
Expand Down
Loading

0 comments on commit 0766eba

Please sign in to comment.