Skip to content
This repository has been archived by the owner on Feb 1, 2024. It is now read-only.

I47-node-modules #56

Closed
wants to merge 8 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
39 changes: 0 additions & 39 deletions .env
Original file line number Diff line number Diff line change
@@ -1,48 +1,9 @@
# change FLASK_ENV to "production" once galvanalyser is installed and running correctly
FLASK_ENV=development
FLASK_DEBUG=True

# this password is used for both the galvanalyser and redash databases
POSTGRES_PASSWORD=galvanalyser

# this is the directory that the galvanalyser postgres database will be located
GALVANALYSER_DATA_PATH=./.run/data/galvanalyser

# this is the directory that will be scanned for battery tester output files when
# running the harvester test suite
GALVANALYSER_HARVESTER_TEST_PATH=./.run/test_datafiles

# this is the base directory for the harvesters run by the server (note harvesters can
# also be setup independently from the server if required, see documentation for
# details). New directories added for scanning will be relative to this base directory
GALVANALYSER_HARVESTER_BASE_PATH=./.run/datafiles

# username and password for default harvester account
HARVESTER_USERNAME=harv
HARVESTER_PASSWORD=harv

# secret used to encrypt JWT for galvanalyser web app and api
GALVANALYSER_SECRET_KEY=secret_key

# a non empty value ensures that the python output is sent straight to terminal (e.g. your container log) without being first buffered and that you can see the output of your application (e.g. django logs) in real time
PYTHONUNBUFFERED=1

# setup for rabbitmq server, which will be setup with a username =
# RABBITMQ_DEFAULT_USER with a password of RABBITMQ_DEFAULT_PASS
# Make sure that the default user and password match the
# RABBITMQ_URL paramter, which is in the format:
# pyamqp://RABBITMQ_DEFAULT_USER:RABBITMQ_DEFAULT_PASS@rabbitmq:5672
RABBITMQ_DEFAULT_USER=test-user
RABBITMQ_DEFAULT_PASS=test-user
RABBITMQ_URL=pyamqp://test-user:test-user@rabbitmq:5672

# where to put logs for the celery task manager
CELERY_LOG_DIR=./.run/data/celery

# location for the redis database files
REDIS_DATA_PATH=./.run/data/redis
REDIS_PASSWORD=redis
REDIS_URL=redis://:redis@redis:6379

# Required to get react running:
NODE_OPTIONS=--openssl-legacy-provider
44 changes: 41 additions & 3 deletions .github/workflows/unit-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,16 @@ on:
pull_request:
branches:
- '**'
workflow_dispatch:
inputs:
debug_enabled:
type: boolean
description: 'Run the build with tmate debugging enabled (https://github.com/marketplace/actions/debugging-with-tmate)'
required: false
default: false

jobs:
test:
test-harvester:
runs-on: ubuntu-latest
env:
GALVANALYSER_HARVESTER_TEST_PATH: /home/mrobins/git/tmp/test_datafiles
Expand All @@ -31,7 +38,38 @@ jobs:
sudo smbget -R $LIIONSDEN_SMB_PATH/test-suite-small -U "$LIIONSDEN_SMB_USERNAME%$LIIONSDEN_SMB_PASSWORD"

- name: Build the stack
run: docker-compose up --detach --build
run: touch .env.secret && docker-compose -f docker-compose.test.yml build harvester

# Enable tmate debugging of manually-triggered workflows if the input option was provided
- name: Setup tmate session
uses: mxschmitt/action-tmate@v3
if: ${{ github.event_name == 'workflow_dispatch' && inputs.debug_enabled }}

- name: Run tests
run: docker-compose -f docker-compose.test.yml up harvester

test-backend:
runs-on: ubuntu-latest
env:
POSTGRES_PASSWORD: "galvanalyser"
DJANGO_SECRET_KEY: "long-and-insecure-key-12345"
DJANGO_TEST: "TRUE"
steps:
- uses: actions/checkout@v2

- name: Set up secrets
run: |
echo "POSTGRES_PASSWORD=$POSTGRES_PASSWORD" > .env.secret
echo "DJANGO_SECRET_KEY=$DJANGO_SECRET_KEY" >> .env.secret
echo "DJANGO_TEST=$DJANGO_TEST" >> .env.secret

- name: Build the stack
run: docker-compose -f docker-compose.test.yml up -d --build app_test

# Enable tmate debugging of manually-triggered workflows if the input option was provided
- name: Setup tmate session
uses: mxschmitt/action-tmate@v3
if: ${{ github.event_name == 'workflow_dispatch' && inputs.debug_enabled }}

- name: Run tests
run: docker-compose run --rm app python manage.py test
run: docker-compose -f docker-compose.test.yml run --rm app_test python manage.py test
11 changes: 10 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
*.pyc
.vscode
.venv
config
.removed
galvanalyser/protobuf
galvanalyser/webapp/assets/protobuf
Expand All @@ -19,3 +18,13 @@ env
.test-data/

frontend/src/demo_matlab_code.m

**/.env.*
dev.sh
backend/backend_django/django_celery_beat.schedulersDatabaseScheduler

.harvester/

*.pptx

docker-compose.override.yml
114 changes: 52 additions & 62 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,76 +1,66 @@
<img src="https://github.com/Battery-Intelligence-Lab/galvanalyser/raw/main/docs/Galvanalyser-logo-sm.png" width="500" />
<img src="docs/Galvanalyser-logo-sm.png" width="500" />

[![Unit Tests (Docker)](https://github.com/Battery-Intelligence-Lab/galvanalyser/actions/workflows/unit-test.yml/badge.svg)](https://github.com/Battery-Intelligence-Lab/galvanalyser/actions/workflows/unit-test.yml)

Galvanalyser is a system for automatically storing data generated by battery cycling
machines in a database, using a set of "harvesters", whose job it is to monitor the
datafiles produced by the battery testers and upload it in a standard format to the
server database. The server database is a relational database that stores each dataset
along with information about column types, units, and other relevant metadata (e.g. cell
information, owner, purpose of the experiment)
machines in a database in a standard format.
The project contains several distinct components:

- A REST API
- A Python client for the REST API
- A web frontend for the REST API
- [Harvesters](docs/UserGuide.md#harvesters) that monitor and process experimental data

A laboratory running a [Galvanalyser server](docs/UserGuide.md#galvanalyser-server) instance and a battery
cycling machines can use Galvanalyser to make it easy to access, analyse, and
share their experimental data.
To do this, they:
1. Set the cycling machines up to output their raw test result files to a shared drive.
2. Set up a [harvester](docs/UserGuide.md#harvesters) on a computer with access to that shared drive.
- (This only needs to be done once)
3. Log into their lab [Web frontend](docs/UserGuide.md#web-frontend) and configure the
[harvester](docs/UserGuide.md#harvesters) to crawl the appropriate directories on the shared drive.
4. Log into the [Web frontend](docs/UserGuide.md#web-frontend) to edit metadata and view data,
or use the [Python client](docs/UserGuide.md#python-client) to download formatted data for analysis.

The harvesters are able to parse the following file types:

- MACCOR files in .txt, .xsl/.xslx, or raw format
- Ivium files in .idf format
- Biologic files in .mpr format

The server database is a relational database that stores each dataset along with information
about column types, units, and other relevant metadata (e.g. cell information, owner, purpose
of the experiment).
The [REST API](docs/UserGuide.md#rest-api) provides its own definition via a downloadable OpenAPI schema file
(`schema/`), and provides interactive documentation via SwaggerUI (`schema/swagger-ui/`) and
Redoc (`schema/redoc/`).

There are two user interfaces to the system:
- a web app front-end that can be used to view the stored datasets, manage the
harvesters, and input metadata for each dataset
- a REST API which can be used to download dataset metadata and the data itself. This
API conforms to the [battery-api](https://github.com/martinjrobins/battery-api)
OpenAPI specification, so tools based on this specification (e.g. the Python client)
can use the API.

A diagram of the logical structure of the system is shown below. The arrows indicate the
direction of data flow.

<img src="https://github.com/Battery-Intelligence-Lab/galvanalyser/raw/main/docs/GalvanalyserStructure.PNG" width="600" />
<img src="docs/GalvanalyserStructure.PNG" alt="Data flows from battery cycling machines to Galvanalyser Harvesters, then to the Galvanalyser server and REST API. Metadata can be updated and data read using the web client, and data can be downloaded by the Python client." width="600" />


## Project documentation

The `documentation` directory contains more detailed documentation on a number of topics. It contains the following items:
* [FirstTimeQuickSetup.md](./docs/FirstTimeQuickSetup.md) - A quick start guide to
The `docs` directory contains more detailed documentation on a number of topics.
It contains the following items:
- [FirstTimeQuickSetup.md](docs/FirstTimeQuickSetup.md) - A quick start guide to
setting up your first complete Galvanalyser system
* [AdministrationGuide.md](./docs/AdministrationGuide.md) - A guide to performing
administration tasks such as creating users and setting up harvesters
* [DevelopmentGuide.md](./docs/DevelopmentGuide.md) - A guide for developers on
- Installing Galvanalyser server
- Setting up a Harvester
- Checking your setup
- [UserGuide.md](docs/UserGuide.md) - A detailed guide for Galvanalyser users
- [Galvanalyser server](docs/UserGuide.md#galvanalyser-server)
- [REST API](docs/UserGuide.md#rest-api)
- [User accounts](docs/UserGuide.md#user-accounts)
- [Web interface](docs/UserGuide.md#web-interface)
- [Python client](docs/UserGuide.md#python client)
- [Harvesters](docs/UserGuide.md#harvesters)
- [DevelopmentGuide.md](docs/DevelopmentGuide.md) - A guide for developers on
Galvanalyser
* [ProjectStructure.md](./docs/ProjectStructure.md) - An overview of the project folder
structure to guide developers to the locations of the various parts of the project

## Technology used

This section provides a brief overview of the technology used to implement the different parts of the project.

### Docker

Dockerfiles are provided to run all components of this project in containers. A docker-compose file exists to simplify starting the complete server side system including the database, the web app and the Nginx server. All components of the project can be run natively, however using Docker simplifies this greatly.

A Docker container is also used for building the web app and its dependencies to simplify cross platform deployment and ensure a consistent and reliable build process.

### Backend server

The server is a [Flask](flask.palletsprojects.com) web application, which uses
[SQLAlchemy](https://www.sqlalchemy.org/) and [psycopg2](https://www.psycopg.org/) to
interface with the Postgres database.

### Harvesters

The harvesters are python modules in the backend server which monitor directories for
tester datafiles, parse them according to the their format and write the data and any
metadata into the Postgres database. The running of the harvesters, either periodically
or manually by a user, is done using a [Celery](https://docs.celeryproject.org/)
distributed task queue.

### Frontend web application

The frontend is written using Javascript, the [React](https://reactjs.org/) framework
and using [Material-UI](https://material-ui.com/) components.

<img src="https://github.com/Battery-Intelligence-Lab/galvanalyser/raw/main/docs/galvanalyser_frontend.jpg" width="600" />


### Database

The project uses PostgreSQL for its database. Other databases are currently not
supported. An entity relationship diagram is shown below.

<img src="https://github.com/Battery-Intelligence-Lab/galvanalyser/raw/main/docs/Galvanalyser_DB_ERD.png" width="600" />

- [Creating a testing instance](docs/DevelopmentGuide.md#running)
- [Running tests](docs/DevelopmentGuide.md#testing)
- [Technology used](docs/DevelopmentGuide.md#technology-used)
- [Contributor guide](docs/DevelopmentGuide.md#contributor-guide)
25 changes: 15 additions & 10 deletions backend/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,13 +1,18 @@
FROM python
FROM python:3.10.4-slim@sha256:a2e8240faa44748fe18c5b37f83e14101a38dd3f4a1425d18e9e0e913f89b562

RUN mkdir -p /usr/app
WORKDIR /usr/app

RUN apt-get update && apt-get install -y postgresql-client
ENV PYTHONDONTWRITEBYTECODE=1
ENV PYTHONUNBUFFERED=1

COPY . .
RUN pip install -e .
# Install postgresql-client for healthchecking
RUN apt-get update && \
apt-get install -y \
postgresql-client \
build-essential libssl-dev libffi-dev python3-dev python-dev && \
apt-get autoremove && \
apt-get autoclean

ENV FLASK_APP=app.py

EXPOSE 8050
RUN mkdir -p /usr/app
WORKDIR /usr/app
COPY requirements.txt /requirements.txt
RUN pip install -r /requirements.txt
COPY . /usr/app
20 changes: 20 additions & 0 deletions backend/Dockerfile-test
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
FROM python:3.10.4-slim@sha256:a2e8240faa44748fe18c5b37f83e14101a38dd3f4a1425d18e9e0e913f89b562

ENV PYTHONDONTWRITEBYTECODE=1
ENV PYTHONUNBUFFERED=1

# Install postgresql-client for healthchecking
RUN apt-get update && \
apt-get install -y \
postgresql-client \
build-essential libssl-dev libffi-dev python3-dev python-dev && \
apt-get autoremove && \
apt-get autoclean

RUN mkdir -p /usr/app
WORKDIR /usr/app
COPY requirements.txt /requirements.txt
RUN pip install -r /requirements.txt
COPY requirements-test.txt /requirements-test.txt
RUN pip install -r /requirements-test.txt
COPY . /usr/app
1 change: 0 additions & 1 deletion backend/README.md

This file was deleted.

5 changes: 0 additions & 5 deletions backend/app.py

This file was deleted.

1 change: 1 addition & 0 deletions backend/backend_django/config/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
import galvanalyser.schema
16 changes: 16 additions & 0 deletions backend/backend_django/config/asgi.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
"""
ASGI config for backend_django project.

It exposes the ASGI callable as a module-level variable named ``application``.

For more information on this path, see
https://docs.djangoproject.com/en/4.1/howto/deployment/asgi/
"""

import os

from django.core.asgi import get_asgi_application

os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'config.settings')

application = get_asgi_application()
Loading