Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
54 changes: 11 additions & 43 deletions .ci/azure-pipelines.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@ pr:
exclude:
- '*'


jobs:
- job: Windows
variables:
Expand Down Expand Up @@ -130,7 +129,7 @@ jobs:
docker pull $(DPF_IMAGE)
docker run --restart always --name dpf -v `pwd`:/dpf -v /tmp:/dpf/_cache -p $(DPF_PORT):50054 $(DPF_IMAGE) > log.txt &
grep -q 'server started on ip' <(timeout 60 tail -f log.txt)
python -c "from ansys.dpf import core; core.connect_to_server(port=$(DPF_PORT)); print('Python Connected')"
python -c "from ansys.dpf import core as dpf; dpf.connect_to_server(port=$(DPF_PORT)); print('Python Connected')"
displayName: Pull, launch, and validate DPF service

- script: |
Expand All @@ -140,44 +139,13 @@ jobs:
pytest -v --junitxml=junit/test-results.xml --cov ansys.dpf.core --cov-report=xml --cov-report=html
displayName: Test Core API


# - script: |
# .ci/setup_headless_display.sh
# pip install -r .ci/requirements_test_xvfb.txt
# python .ci/display_test.py
# displayName: Install and start a virtual framebuffer


# - script: |
# set -ex
# echo $(PAT) | docker login -u $(GH_USERNAME) --password-stdin docker.pkg.github.com
# docker pull $(MAPDL_IMAGE)
# docker run -e ANSYSLMD_LICENSE_FILE=1055@$(LICENSE_SERVER) --restart always --name mapdl -p $(PYMAPDL_PORT):50052 $(MAPDL_IMAGE) -smp &
# python -c "from ansys.mapdl import launch_mapdl; print(launch_mapdl())"
# displayName: Pull, launch, and validate MAPDL service

# - script: |
# pip install -r requirements_test.txt
# pip install pytest-azurepipelines
# pytest -v --junitxml=junit/test-results.xml --cov --cov-report=xml --cov-report=html
# displayName: 'Test Core API'

# - template: build_documentation.yml # path is relative

# - script: |
# bash <(curl -s https://codecov.io/bash)
# displayName: 'Upload coverage to codecov.io'
# condition: eq(variables['python.version'], '3.7')

# - script: |
# pip install twine
# python setup.py sdist
# twine upload --skip-existing dist/pyvista*
# displayName: 'Upload to PyPi'
# condition: and(eq(variables['python.version'], '3.7'), contains(variables['Build.SourceBranch'], 'refs/tags/'))
# env:
# TWINE_USERNAME: $(twine.username)
# TWINE_PASSWORD: $(twine.password)
# TWINE_REPOSITORY_URL: "https://upload.pypi.org/legacy/"


- script: |
pip install twine
python setup.py sdist
twine upload --skip-existing dist/*
displayName: 'Upload to PyPi'
condition: contains(variables['Build.SourceBranch'], 'refs/tags/')
env:
TWINE_USERNAME: __token__
TWINE_PASSWORD: $(PYPI_TOKEN)
TWINE_REPOSITORY_URL: "https://upload.pypi.org/legacy/"
15 changes: 12 additions & 3 deletions .github/workflows/ci-build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@ name: Documentation Build

on: [push, pull_request, workflow_dispatch]


jobs:
# This workflow contains a single job called "build"
build:
runs-on: ubuntu-20.04

Expand Down Expand Up @@ -50,17 +50,26 @@ jobs:
env:
GH_USERNAME: ${{ secrets.GH_USERNAME }}
PAT: ${{ secrets.REPO_DOWNLOAD_PAT }}



- name: Build Documentation
run: |
sudo apt install pandoc -qy
pip install -r requirements_docs.txt
make -C docs html
touch docs/build/html/.nojekyll

- name: Upload Documentation
uses: actions/upload-artifact@v2.2.1
with:
name: Documentation
path: docs/build/html
retention-days: 7

- name: Deploy
uses: JamesIves/github-pages-deploy-action@3.7.1
if: startsWith(github.ref, 'refs/tags/')
with:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
BRANCH: gh-pages
FOLDER: docs/build/html
CLEAN: true
104 changes: 63 additions & 41 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,44 +1,89 @@
# DPF - ANSYS Data Processing Framework
# DPF - Ansys Data Processing Framework

The Data Processing Framework (DPF) is designed to provide numerical
simulation users/engineers with a toolbox for accessing and
transforming simulation data. DPF can access data from solver result
files as well as several neutral formats (csv, hdf5, vtk,
etc.). Various operators are available allowing the manipulation and
the transformation of this data.

DPF is a workflow-based framework which allows simple and/or complex
evaluations by chaining operators. The data in DPF is defined based on
physics agnostic mathematical quantities described in a
self-sufficient entity called field. This allows DPF to be a modular
and easy to use tool with a large range of capabilities. It's a
product designed to handle large amount of data.

The Python ``ansys.dpf.core`` module provides a Python interface to
the powerful DPF framework enabling rapid post-processing of a variety
of Ansys file formats and physics solutions without ever leaving a
Python environment.


## Installation

Clone and install this repository with:
Install this repository with:

```
git clone https://github.com/pyansys/DPF-Core
cd DPF-Core
pip install . --user
pip install ansys-dpf-core
```

Install any missing libraries from Artifactory with:
You can also clone and install this repository with:

```
pip install --extra-index-url=http://canartifactory.ansys.com:8080/artifactory/api/pypi/pypi/simple --trusted-host canartifactory.ansys.com ansys-grpc-dpf
git clone https://github.com/pyansys/DPF-Core
cd DPF-Core
pip install . --user
```

This step will be eliminated once DPF is live on PyPi.


## Running DPF

### Brief Demo
Provided you have ANSYS 2021R1 installed, a DPF server will start
automatically once you start using DPF:
automatically once you start using DPF.

Opening a result file generated from Ansys workbench or MAPDL is as easy as:

```py
from ansys.dpf import core
```
>>> from ansys.dpf.core import Model
>>> model = Model('file.rst')
>>> print(model)
DPF Model
------------------------------
Static analysis
Unit system: Metric (m, kg, N, s, V, A)
Physics Type: Mecanic
Available results:
- displacement
- element_nodal_forces
- volume
- energy_stiffness_matrix
- hourglass_energy
- thermal_dissipation_energy
- kinetic_energy
- co_energy
- incremental_energy
- temperature
```

Open up an result with:

norm = core.Operator('norm_fc')
```py
>>> model.displacement
```

# or open up a model
model = core.Model('file.rst')
Then start linking operators with:

```py
>>> norm = core.Operator('norm_fc')
```

The `ansys.dpf.core` module takes care of starting your local server
for you so you don't have to. If you need to connect to a remote DPF
instance, use the ``connect_to_server`` function:
### Starting the Service

The `ansys.dpf.core` automatically starts the DPF service in the
background and connects to it. If you need to connect to an existing
remote DPF instance, use the ``connect_to_server`` function:

```py
from ansys.dpf import core
Expand All @@ -48,26 +93,3 @@ connect_to_server('10.0.0.22, 50054)
Once connected, this connection will remain for the duration of the
module until you exit python or connect to a different server.


## Unit Testing

Unit tests can be run by first installing the testing requirements with `pip install -r requirements_test.txt` and then running pytest with:

```
pytest
```

If you have ANSYS v2021R1 installed locally, the unit tests will
automatically start up the DPF server and run the tests. If you need
to disable this and have the unit tests run against a remote server,
setup the following environment variables:

```
set DPF_START_SERVER=False
set DPF_IP=<IP of Remote Computer>
set DPF_PORT=<Port of Remote DPF Server>
```


## Examples
See the example scripts in the examples folder for some basic examples.
4 changes: 2 additions & 2 deletions ansys/dpf/core/data_sources.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,8 @@ class DataSources:
"""Represent the file sources of a model.

Initialize the data_sources with either optional data_sources
message, or by connecting to a stub A Result path can be
directly set
message, or by connecting to a stub. Result path can also be
directly set.

Parameters
----------
Expand Down
6 changes: 5 additions & 1 deletion ansys/dpf/core/operators_helper.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,8 @@
"""Wrappers for operators"""
"""Wrappers for DPF operators.

These operators are available as functions from ``dpf.operators`` and
simplify the creation of new chained operators.
"""
from ansys import dpf
from ansys.dpf.core.common import types as dpf_types

Expand Down
7 changes: 5 additions & 2 deletions ansys/dpf/core/plotter.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""Dpf plotter class is contained in this module.
Allows to plot a mesh and a fields container
using pyvista."""

Allows to plot a mesh and a fields container using pyvista.
"""
import tempfile

import pyvista as pv
Expand All @@ -16,6 +17,8 @@


class Plotter:
"""Internal class used by DPF-Core to plot fields and meshed regions"""

def __init__(self, mesh):
self._mesh = mesh

Expand Down
5 changes: 5 additions & 0 deletions docker/env.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
# Setup the testing environment using docker
# run with:
# source env.sh
export DPF_START_SERVER=FALSE
export DPF_DOCKER=True
6 changes: 6 additions & 0 deletions docs/source/api/data_sources.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
******************
Data Sources Class
******************
.. autoclass:: ansys.dpf.core.data_sources.DataSources
:members:
:private-members:
5 changes: 5 additions & 0 deletions docs/source/api/element.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
*************
Element Class
*************
.. autoclass:: ansys.dpf.core.meshed_region.Element
:members:
5 changes: 5 additions & 0 deletions docs/source/api/elements.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
**************
Elements Class
**************
.. autoclass:: ansys.dpf.core.meshed_region.Elements
:members:
5 changes: 5 additions & 0 deletions docs/source/api/field.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
***********
Field Class
***********
.. autoclass:: ansys.dpf.core.field.Field
:members:
5 changes: 5 additions & 0 deletions docs/source/api/fieldscontainer.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
*********************
FieldsContainer Class
*********************
.. autoclass:: ansys.dpf.core.fields_container.FieldsContainer
:members:
Loading