Skip to content

Commit

Permalink
Merge pull request #179 from FZJ-INM1-BDA/staging
Browse files Browse the repository at this point in the history
feat: ebrains transform result publication
  • Loading branch information
xgui3783 committed May 25, 2023
2 parents f006924 + d928a3d commit 5939af4
Show file tree
Hide file tree
Showing 7 changed files with 196 additions and 16 deletions.
14 changes: 13 additions & 1 deletion app/src/components/exports/ExportsEbrains.vue
@@ -1,5 +1,12 @@
<template>
<div>
<b-form-textarea v-model="description"
:aria-placeholder="descriptionPlaceholder"
:placeholder="descriptionPlaceholder"
class="mb-2">

</b-form-textarea>
<div></div>
<b-button @click="start"
:disabled="isBusy"
class="start-button"
Expand Down Expand Up @@ -32,6 +39,8 @@ export default {
pollResults: [],
jobId: null,
periodicId: null,
description: '',
descriptionPlaceholder: '(Optional) Enter a description for this transformation.',
}
},
filters: {
Expand Down Expand Up @@ -59,7 +68,10 @@ export default {
try {
const resp = await fetch(`${hostname}ebrains`, {
method: 'POST',
body: JSON.stringify(json),
body: JSON.stringify({
...json,
description: this.description
}),
headers: {
'content-type': 'application/json'
}
Expand Down
1 change: 1 addition & 0 deletions backend/voluba_backend/.gitignore
@@ -0,0 +1 @@
.env
77 changes: 69 additions & 8 deletions backend/voluba_backend/ebrains_router/models.py
@@ -1,4 +1,4 @@
from typing import Union, List, Optional, Callable
from typing import Union, List, Optional, Callable, Literal
from enum import Enum
from pydantic import BaseModel, Field
from tempfile import NamedTemporaryFile
Expand All @@ -8,10 +8,13 @@
from fairgraph.openminds.core import ContentType, File, WebServiceVersion, Hash
from fairgraph.openminds.sands import CommonCoordinateSpaceVersion
from fairgraph.openminds.computation import LocalFile, DataAnalysis
from fairgraph.openminds.core.actors.person import Person
import json
from hashlib import md5
from datetime import datetime
import requests
from pathlib import Path
from uuid import uuid4

from voluba_auth import S2SToken
from .const import SPC_NAME_TO_ID_VOCAB, KG_INSTANCES, KG_IDS, SPC_NAME_TO_KG_ID, STRING_CONST
Expand All @@ -30,6 +33,7 @@ class WorkflowV1_1(BaseModel):
version: Union[int, float, str]
_type:str = Field(..., alias="@type")
transform_matrix_in_nm: List[List[float]] = Field(..., alias="transformMatrixInNm")
description: str = ''

class StartWorkflowResp(BaseModel):
job_id: str
Expand All @@ -48,9 +52,14 @@ class JobProgressModel(BaseModel):
def run(self, context:'WorkProgress'):
logger.debug(f"Running {self.name}")
self.status = JobProgressEnum.RUNNING
self._run(context)
logger.debug(f"Running {self.name} completed!")
self.status = JobProgressEnum.COMPLETED
try:
self._run(context)
except Exception as e:
self.status = JobProgressEnum.ERROR
self.detail = str(e)
else:
logger.debug(f"Running {self.name} completed!")
self.status = JobProgressEnum.COMPLETED

def _run(self, context: 'WorkProgress'):
raise NotImplementedError
Expand All @@ -62,11 +71,13 @@ def _run(self, context: 'WorkProgress'):
auth_token = S2SToken.get_token()
client = BucketApiClient(token=auth_token)
bucket = client.buckets.get_bucket(DP_BUCKET_NAME)

_id = str(uuid4())

assert context.param.reference_volume in SPC_NAME_TO_ID_VOCAB, f"Expecting reference volume name {context.param.reference_volume} is in {', '.join(SPC_NAME_TO_ID_VOCAB.keys())}, but is not."
sanitized_spc_id = SPC_NAME_TO_ID_VOCAB[context.param.reference_volume].replace("/", ".")
incoming_vol_id = context.param.content_hash or f'name.{context.param.incoming_volume}'
bucket_filename = f"{sanitized_spc_id}_{incoming_vol_id}_transform_{context.param.version}.json"
bucket_filename = f"{sanitized_spc_id}_{incoming_vol_id}_transform_{context.param.version}_{_id}.json"

# Unfortunately, bucket API does not yet support stringIO, and must write it to disk first.
with NamedTemporaryFile("w", suffix=".json", encoding="utf-8", delete=False) as fp:
Expand Down Expand Up @@ -127,7 +138,12 @@ def _run(self, context: 'WorkProgress'):
auth_token = S2SToken.get_token()
kg_client = KGClient(auth_token, host=KG_ROOT)
if KG_INSTANCES.voluba_webservice_version is None:
KG_INSTANCES.voluba_webservice_version = WebServiceVersion.from_id(KG_IDS.VOLUBA_WEBSERVICE_VERSION_ID, kg_client, scope="any")
# TODO s2s token cannot seem to query instances in review space
# personal tokens do not suffer from this limitation, which is why on webUI, everything seems fine
# for now, load the jsonld (fetched and saved while using a personal token)
path_to_jsonld = Path(__file__).parent / "voluba_webservice_jsonld.json"
with open(path_to_jsonld, "r") as fp:
KG_INSTANCES.voluba_webservice_version = WebServiceVersion.from_jsonld(json.load(fp=fp), kg_client)

assert context.param.reference_volume in SPC_NAME_TO_KG_ID, f"Expecting reference volume {context.param.reference_volume!r} be in {', '.join(SPC_NAME_TO_KG_ID.keys())}, but is not."

Expand All @@ -141,7 +157,9 @@ def _run(self, context: 'WorkProgress'):

ref_spc_kg_id = SPC_NAME_TO_KG_ID[context.param.reference_volume]
spc = CommonCoordinateSpaceVersion.from_id(ref_spc_kg_id, client=kg_client)


user = context.user.fetch_kg_instance()

lookup_label=f"{context.param.reference_volume}-{context.param.incoming_volume}"
input_file = LocalFile.from_id(income_vol_local_file.detail, client=kg_client, scope="any")
output_file = File.from_id(transform_file.detail, client=kg_client, scope="any")
Expand All @@ -156,7 +174,10 @@ def _run(self, context: 'WorkProgress'):
outputs=[
output_file
],
description="Programmatically generated by voluba by way of fairgraph.",
performed_by=[user],
description=f"""{context.param.description}
Programmatically generated by voluba by way of fairgraph.""",
start_time=datetime.now()
)
da.save(kg_client, KG_SPACE_NAME)
Expand All @@ -182,11 +203,51 @@ def _run(self, context: 'WorkProgress'):
print(errors)
raise Exception(f"Some instances cannot be deleted: {','.join([err.get('id') for err in errors])}")


class VolubaUser(BaseModel):
id: str
name: str
given_name: str
family_name: str
type: Union[Literal['hbp-oidc-v2'], Literal['orcid-oidc']]

def fetch_kg_instance(self) -> Person:
auth_token = S2SToken.get_token()
kg_client = KGClient(auth_token, host=KG_ROOT)

p = Person(family_name=self.family_name, given_name=self.given_name)
query = Person.generate_query(client=kg_client, space=None, filters=p._build_existence_query())
q = kg_client.query(
query=query,
size=100,
scope="any"
)
persons = [Person.from_jsonld(d, kg_client) for d in q.data]
if self.type == "hbp-oidc-v2":
persons = [p for p in persons if p.space == "common"]
if len(persons) == 1:
return persons[0]
raise Exception(f"User not found, or multiple users found: {len(persons)}")
if self.type == "orcid-oidc":
persons = [p for p in persons
if p.space == "collab-spatial-anchoring"
and self.id in p.alternate_names]
if len(persons) == 0:
p = Person(family_name=self.family_name, given_name=self.given_name, alternate_names=["voluba-created", self.type, self.id])
p.save(kg_client, KG_SPACE_NAME)
return p
if len(persons) == 1:
return persons[0]
raise Exception(f"Multiple user found: {len(persons)}")

raise Exception(f"type must be orcid-oidc or hbp-oidc-v2")

class WorkProgress(BaseModel):
id: str
param: WorkflowV1_1
output_hash: str=None
progresses: List[JobProgressModel]
user: Optional[VolubaUser]=None

def get_ooo(self, fn: Callable[[JobProgressModel], bool]) -> JobProgressModel:
"""
Expand Down
10 changes: 6 additions & 4 deletions backend/voluba_backend/ebrains_router/router.py
@@ -1,6 +1,6 @@
from voluba_store import VolubaStore

from .models import StartWorkflowResp, WorkflowV1_1, WorkProgress, UploadToDP, CreateKgTransformFileInstance, CreateKgIncVolFileInstance, CreateKgDataAnalysisInstance
from .models import StartWorkflowResp, WorkflowV1_1, WorkProgress, UploadToDP, CreateKgTransformFileInstance, CreateKgIncVolFileInstance, CreateKgDataAnalysisInstance, VolubaUser

from uuid import uuid4
from fastapi import APIRouter, Request, BackgroundTasks, Cookie, Depends
Expand All @@ -13,10 +13,11 @@

router = APIRouter()

# TODO add check that user is authenticated

@router.post("", response_model=StartWorkflowResp)
def start_workflow(body: WorkflowV1_1, request: Request, bg: BackgroundTasks):
user = get_user(request)
if not user:
raise HTTPException(401, detail="You need to be authenticated to use this endpoint.")
job_id = str(uuid4())
result=StartWorkflowResp(job_id=job_id)
work=WorkProgress(
Expand All @@ -27,7 +28,8 @@ def start_workflow(body: WorkflowV1_1, request: Request, bg: BackgroundTasks):
CreateKgTransformFileInstance(),
CreateKgIncVolFileInstance(),
CreateKgDataAnalysisInstance(),
]
],
user=VolubaUser(**user)
)
worflow_store.set_value(job_id, work)
bg.add_task(work.start)
Expand Down
@@ -0,0 +1,90 @@
{
"@type": [
"https://openminds.ebrains.eu/core/WebServiceVersion"
],
"@id": "https://kg.ebrains.eu/api/instances/eb1eb844-3544-45cf-829e-1094225d0007",
"https://openminds.ebrains.eu/vocab/fullName": "VoluBA - Volumetric Brain Anchoring",
"https://openminds.ebrains.eu/vocab/shortName": "VoluBA",
"https://openminds.ebrains.eu/vocab/accessibility": {
"@id": "https://kg.ebrains.eu/api/instances/f1389b7e-0965-4d23-9ddb-2575d4ac2fad"
},
"https://openminds.ebrains.eu/vocab/copyright": {
"@type": [
"https://openminds.ebrains.eu/core/Copyright"
],
"https://openminds.ebrains.eu/vocab/holder": {
"@id": "https://kg.ebrains.eu/api/instances/561a0c75-a414-45b0-a24e-e2f8e680ec2e"
},
"https://openminds.ebrains.eu/vocab/year": "2017"
},
"https://openminds.ebrains.eu/vocab/fullDocumentation": {
"@id": "https://kg.ebrains.eu/api/instances/12aa623f-0313-4ab0-919a-d8e5c6afdad0"
},
"https://openminds.ebrains.eu/vocab/funding": [
{
"@id": "https://kg.ebrains.eu/api/instances/586250ba-571d-4a14-813a-d997c500e9f7"
},
{
"@id": "https://kg.ebrains.eu/api/instances/754aeb36-9420-4484-9b50-6ca0eb806a0d"
},
{
"@id": "https://kg.ebrains.eu/api/instances/082a5bf9-1612-4a44-9b19-d6757a0a83ec"
},
{
"@id": "https://kg.ebrains.eu/api/instances/7c0695e3-2031-49d9-9253-d07c9db2d230"
}
],
"https://openminds.ebrains.eu/vocab/hasPart": {
"@id": "https://kg.ebrains.eu/api/instances/7a77ccdb-c62d-42a4-a373-a47619d34e74"
},
"https://openminds.ebrains.eu/vocab/homepage": "https://voluba.apps.hbp.eu/#/",
"https://openminds.ebrains.eu/vocab/inputFormat": [
{
"@id": "https://kg.ebrains.eu/api/instances/f7c21b4d-b27d-4a5c-b184-d2aa66100466"
},
{
"@id": "https://kg.ebrains.eu/api/instances/f9099808-731f-453e-8e81-404ec482a721"
},
{
"@id": "https://kg.ebrains.eu/api/instances/ee347aa2-4746-44e7-968a-467d04e4969d"
},
{
"@id": "https://kg.ebrains.eu/api/instances/caeef910-8691-4e88-83a0-726ea844c2d9"
}
],
"https://openminds.ebrains.eu/vocab/keyword": [
{
"@id": "https://kg.ebrains.eu/api/instances/21d46eb1-7933-4418-98af-d3d92d9babe3"
},
{
"@id": "https://kg.ebrains.eu/api/instances/4d294fe1-5cbe-4ae4-bcdc-88866798550b"
},
{
"@id": "https://kg.ebrains.eu/api/instances/f04a66d9-9c24-4076-84cd-155607587fa0"
},
{
"@id": "https://kg.ebrains.eu/api/instances/dadd102a-2472-40b0-a500-be168bd6b64f"
},
{
"@id": "https://kg.ebrains.eu/api/instances/5a2fb93a-e5ab-4638-b1fc-065dd49506dc"
},
{
"@id": "https://kg.ebrains.eu/api/instances/4ccfa2b8-fe75-4a17-98b7-e01b922c8f03"
}
],
"https://openminds.ebrains.eu/vocab/outputFormat": [
{
"@id": "https://kg.ebrains.eu/api/instances/ee347aa2-4746-44e7-968a-467d04e4969d"
},
{
"@id": "https://kg.ebrains.eu/api/instances/caeef910-8691-4e88-83a0-726ea844c2d9"
}
],
"https://openminds.ebrains.eu/vocab/releaseDate": "2020-01-04T00:00:00",
"https://openminds.ebrains.eu/vocab/repository": {
"@id": "https://kg.ebrains.eu/api/instances/f9a4d451-0c39-424f-9b0e-e903ee6fdf8a"
},
"https://openminds.ebrains.eu/vocab/supportChannel": "support@ebrains.eu",
"https://openminds.ebrains.eu/vocab/versionIdentifier": "v0",
"https://openminds.ebrains.eu/vocab/versionInnovation": "This version has no official release on GitHub. VoluBA was previously known as landmark-reg."
}
8 changes: 7 additions & 1 deletion backend/voluba_backend/siibra_explorer_overlay/__init__.py
@@ -1,5 +1,6 @@
from fastapi import APIRouter, Response
from pathlib import Path
from typing import List
import json

with open(Path(__file__).parent / 'template.html', 'r') as fp:
Expand All @@ -10,7 +11,12 @@
def islen(num: int):
return lambda variable: isinstance(variable, list) and len(variable) == num

def verify_transform(transform: str):
def verify_transform(transform: str) -> List[List[float]]:
"""
transform is provided as a str of 12 comma separated floats,
reshaped to 4x3 row major affine.
This function checks the validity and returns the actual affine.
"""
return_result = [float(v) for v in transform.split(",")]
assert islen(12)(return_result)
return [
Expand Down
12 changes: 10 additions & 2 deletions backend/voluba_backend/voluba_auth.py
Expand Up @@ -46,9 +46,13 @@

def process_hbp_user(resp):
userinfo = resp.get("userinfo")
given_name = userinfo.get("given_name")
family_name = userinfo.get("family_name")
return {
'id': f'hbp-oidc-v2:{userinfo.get("sub")}',
'name': f'{userinfo.get("given_name")} {userinfo.get("family_name")}',
'name': f'{given_name} {family_name}',
'given_name': given_name,
'family_name': family_name,
'type': 'hbp-oidc-v2',
'idToken': resp.get("id_token"),
'accessToken': resp.get("access_token"),
Expand All @@ -74,9 +78,13 @@ async def ebrains_callback(request: Request):

def process_orcid_user(resp):
userinfo = resp.get("userinfo")
given_name = userinfo.get("given_name")
family_name = userinfo.get("family_name")
return {
'id': f'orcid:{userinfo.get("sub")}',
'name': f'{userinfo.get("given_name")} {userinfo.get("family_name")}',
'name': f'{given_name} {family_name}',
'given_name': given_name,
'family_name': family_name,
'type': 'orcid-oidc',
'idToken': resp.get("id_token"),
'accessToken': resp.get("access_token"),
Expand Down

0 comments on commit 5939af4

Please sign in to comment.