Skip to content

Commit

Permalink
Merge pull request #99 from FZJ-INM1-BDA/staging
Browse files Browse the repository at this point in the history
bugfix (iav plugin) & inc volumes
  • Loading branch information
xgui3783 committed Jan 8, 2021
2 parents 4bc3c8c + 35f76a9 commit 6fdaa9c
Show file tree
Hide file tree
Showing 7 changed files with 159 additions and 26 deletions.
4 changes: 3 additions & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ ARG VUE_APP_DEBUG
ARG MATOMO_URL
ARG MATOMO_ID
ARG VUE_APP_ENABLE_EXPERIMENTAL_FEATURES
ARG VUE_APP_INC_VOL_IDS

ENV MATOMO_URL=$MATOMO_URL
ENV MATOMO_ID=$MATOMO_ID
Expand All @@ -22,7 +23,8 @@ ENV VUE_APP_NONLINEAR_BACKEND=$VUE_APP_NONLINEAR_BACKEND
ENV VUE_APP_ENABLE_EXPERIMENTAL_FEATURES=$VUE_APP_ENABLE_EXPERIMENTAL_FEATURES
ENV VUE_APP_UPLOAD_URL=$VUE_APP_UPLOAD_URL
ENV PORT=$PORT
ENV VUE_APP_DEBUG=VUE_APP_DEBUG
ENV VUE_APP_DEBUG=$VUE_APP_DEBUG
ENV VUE_APP_INC_VOL_IDS=$VUE_APP_INC_VOL_IDS

COPY . /frontend
WORKDIR /frontend/app
Expand Down
20 changes: 17 additions & 3 deletions app/src/store/dataSelectionStore.js
Original file line number Diff line number Diff line change
@@ -1,9 +1,23 @@
import { UPLOAD_URL, DEFAULT_BUNDLED_INCOMING_VOLUMES_0, DEFAULT_BUNDLED_INCOMING_VOLUMES_1, processImageMetaData } from "@/constants";
import axios from 'axios'

const DEFAULT_BUNDLED_INCOMING_VOLUMES = process.env.NODE_ENV === 'production'
? DEFAULT_BUNDLED_INCOMING_VOLUMES_0
: DEFAULT_BUNDLED_INCOMING_VOLUMES_0.concat(DEFAULT_BUNDLED_INCOMING_VOLUMES_1)
const defaultVIds = [`colin-1`]
let DEFAULT_BUNDLED_INCOMING_VOLUMES = []

try {
const vIds = JSON.parse(process.env.VUE_APP_INC_VOL_IDS || `[]`)
if (!Array.isArray(vIds)) throw new Error(`INC_VOL_IDS does not evaluate to array: ${INC_VOL_IDS}`)
for (const vId of vIds) {
if (!defaultVIds.includes(vId)) {
defaultVIds.push(vId)
}
}

const vols = [...DEFAULT_BUNDLED_INCOMING_VOLUMES_0, ...DEFAULT_BUNDLED_INCOMING_VOLUMES_1]
DEFAULT_BUNDLED_INCOMING_VOLUMES = vols.filter(v => defaultVIds.includes(v.id))
} catch (e) {
console.error(`parsing inc_vol_ids error`)
}

const dataSelectionStore = {
namespaced: true,
Expand Down
1 change: 1 addition & 0 deletions deploy/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
"nomiseco": "0.0.1",
"openid-client": "^2.4.5",
"passport": "^0.4.0",
"redis": "^3.0.2",
"uuid": "^3.3.3"
},
"devDependencies": {
Expand Down
147 changes: 131 additions & 16 deletions deploy/server/store/index.js
Original file line number Diff line number Diff line change
@@ -1,27 +1,142 @@
const LRU = require('lru-cache')
const store = new LRU({
max: 1024 * 1024 * 64, // 64 mb
maxAge: 1e3 * 60 * 60 * 24, // 1 day
length: function (n, key) {
return key.length + n.length
/**
* Cache to allow for in memory response while data fetching/processing occur
*/

const {
REDIS_PROTO,
REDIS_ADDR,
REDIS_PORT,

REDIS_RATE_LIMITING_DB_EPHEMERAL_PORT_6379_TCP_PROTO,
REDIS_RATE_LIMITING_DB_EPHEMERAL_PORT_6379_TCP_ADDR,
REDIS_RATE_LIMITING_DB_EPHEMERAL_PORT_6379_TCP_PORT,

REDIS_USERNAME,
REDIS_PASSWORD,

} = process.env

const redisProto = REDIS_PROTO || REDIS_RATE_LIMITING_DB_EPHEMERAL_PORT_6379_TCP_PROTO || 'redis'
const redisAddr = REDIS_ADDR || REDIS_RATE_LIMITING_DB_EPHEMERAL_PORT_6379_TCP_ADDR || null
const redisPort = REDIS_PORT || REDIS_RATE_LIMITING_DB_EPHEMERAL_PORT_6379_TCP_PORT || 6379

const userPass = (() => {
let returnString = ''
if (REDIS_USERNAME) {
returnString += REDIS_USERNAME
}
})
if (REDIS_PASSWORD) {
returnString += `:${REDIS_PASSWORD}`
}
return returnString === ''
? ''
: `${returnString}@`
})()

const redisURL = redisAddr && `${redisProto || ''}://${userPass}${redisAddr}:${redisPort}`

const crypto = require('crypto')

let authKey

const getAuthKey = () => {
crypto.randomBytes(128, (err, buf) => {
if (err) {
console.error(`generating random bytes error`, err)
return
}
authKey = buf.toString('base64')
console.log(`clear store key: ${authKey}`)
})
}

getAuthKey()

module.exports = {
store: {
set: async function (key, val) {
if (typeof val !== 'string') throw new Error(`val must be string`)
store.set(key, val)
const ensureString = val => {
if (typeof val !== 'string') throw new Error(`both key and val must be string`)
}

if (redisURL) {
const redis = require('redis')
const { promisify } = require('util')
const client = redis.createClient({
url: redisURL
})

const asyncGet = promisify(client.get).bind(client)
const asyncSet = promisify(client.set).bind(client)
const asyncDel = promisify(client.del).bind(client)

const keys = []

/**
* maxage in milli seconds
*/
exports.store = {
set: async (key, val, { maxAge } = {}) => {
ensureString(key)
ensureString(val)
asyncSet(key, val, ...( maxAge ? [ 'PX', maxAge ] : [] ))
keys.push(key)
},
get: async (key) => {
ensureString(key)
return asyncGet(key)
},
delete: async (key) => {
ensureString(key)
await asyncDel(key)
const keyIdx = keys.find(k => k === key)
keys.splice(keyIdx, 1)
},
clear: async auth => {
if (auth !== authKey) {
getAuthKey()
throw new Error(`unauthorized`)
}
await asyncDel(keys.splice(0))
keys = []
}
}

exports.StoreType = `redis`
exports.redisURL = redisURL
console.log(`redis`)

} else {
const LRU = require('lru-cache')
const store = new LRU({
max: 1024 * 1024 * 256, // 256mb
length: (n, key) => n.length,
maxAge: Infinity, // never expires
})

exports.store = {
/**
* maxage in milli seconds
*/
set: async (key, val, { maxAge } = {}) => {
ensureString(key)
ensureString(val)
store.set(key, val, ...( maxAge ? [ maxAge ] : [] ))
},
get: async function (key) {
get: async (key) => {
ensureString(key)
return store.get(key)
},
delete: async function (key) {
delete: async (key) => {
ensureString(key)
store.del(key)
return
},
clear: async function(){
clear: async auth => {
if (auth !== authKey) {
getAuthKey()
throw new Error(`unauthorized`)
}
store.reset()
}
}

exports.StoreType = `lru-cache`
console.log(`lru-cache`)
}
5 changes: 3 additions & 2 deletions deploy/server/transformResultroute/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ const URL = require('url').URL
const cors = require('cors')

const allowCors = (process.env.ALLOW_CORS && !!JSON.parse(process.env.ALLOW_CORS)) || false
const IV_HOST = process.env.IV_HOST || 'https://dev-next-interactive-viewer.apps-dev.hbp.eu'
const IV_HOST = process.env.IV_HOST || 'https://atlases.ebrains.eu/viewer'
const HOSTNAME = process.env.HOSTNAME || 'http://localhost:3000'

const map = new Map()
Expand Down Expand Up @@ -52,10 +52,11 @@ const setMap = ({ body, pluginStatesParam, map: setMapFnMap }) => new Promise((r
if (err) return rj(err.toString())
const id = buf.toString('hex')

// TODO implement timing out to avoid mem leak
setMapFnMap.set(id, {
date: Date.now(),
data: body
}, {
maxAge: 1000 * 60
})
const url = new URL(IV_HOST)
url.searchParams.set('templateSelected', 'Big Brain (Histology)')
Expand Down
4 changes: 2 additions & 2 deletions deploy/server/transformResultroute/ivPlugin.js
Original file line number Diff line number Diff line change
Expand Up @@ -44,10 +44,10 @@ const getScript = ({ name, incVolName, imageSource, shader, opacity, ngMatrix })
* TODO, trasnform scriptURL and retire inline script
* violates CSP
*/
router.get('/:resultId', (req, res) => {
router.get('/:resultId', async (req, res) => {
const { resultMap: map } = req
const { resultId } = req.params
const obj = map.get(resultId)
const obj = await map.get(resultId)

/**
* single use tokenid
Expand Down
4 changes: 2 additions & 2 deletions deploy/server/transformResultroute/ivPluginV2.js
Original file line number Diff line number Diff line change
Expand Up @@ -110,11 +110,11 @@ router.get('/template/:templateId', (req, res) => {
}
})

router.get('/:resultId', (req, res) => {
router.get('/:resultId', async (req, res) => {

const { resultMap: map } = req
const { resultId } = req.params
const obj = map.get(resultId)
const obj = await map.get(resultId)

/**
* single use tokenid
Expand Down

0 comments on commit 6fdaa9c

Please sign in to comment.