diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 0000000..c395c43 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,32 @@ +--- +name: Bug report +about: Create a report to help us improve +title: '' +labels: '' +assignees: '' + +--- + +**Describe the bug** +A clear and concise description of what the bug is. + +**To Reproduce** +Steps to reproduce the behavior: +1. Go to '...' +2. Click on '....' +3. Scroll down to '....' +4. See error + +**Expected behavior** +A clear and concise description of what you expected to happen. + +**Screenshots** +If applicable, add screenshots to help explain your problem. + +**Desktop (please complete the following information):** + - OS: [e.g. iOS] + - Version [e.g. 22] + - Other details about your setup that could be relevant + +**Additional context** +Add any other context about the problem here. diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 0000000..bbcbbe7 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,20 @@ +--- +name: Feature request +about: Suggest an idea for this project +title: '' +labels: '' +assignees: '' + +--- + +**Is your feature request related to a problem? Please describe.** +A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] + +**Describe the solution you'd like** +A clear and concise description of what you want to happen. + +**Describe alternatives you've considered** +A clear and concise description of any alternative solutions or features you've considered. + +**Additional context** +Add any other context or screenshots about the feature request here. diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md new file mode 100644 index 0000000..9de24d2 --- /dev/null +++ b/.github/pull_request_template.md @@ -0,0 +1,49 @@ +# Description + +Addresses #(issue) + +Please include a summary of the change and which issue is fixed. Please also +include relevant motivation and context. List any dependencies that are required +for this change. Please see ``CONTRIBUTING.md`` for more guidelines. + +## Type of change + +Please delete options that are not relevant. + +- Bug fix (non-breaking change which fixes an issue) +- New feature (non-breaking change which adds functionality) +- Breaking change (fix or feature that would cause existing functionality + to not work as expected) +- This change requires a documentation update + +# How Has This Been Tested? + +Please describe the tests that you ran to verify your changes. Provide +instructions so we can reproduce. Please also list any relevant details for +your test configuration + +- Test A +- Test B + +**Test Configuration**: +* Operating system: Hal +* Version number: Python 3.X +* Any details about your local setup that are relevant: pysat version X + +# Checklist: + +- [ ] Make sure you are merging into the ``develop`` (not ``main``) branch +- [ ] My code follows the style guidelines of this project +- [ ] I have performed a self-review of my own code +- [ ] I have commented my code, particularly in hard-to-understand areas +- [ ] I have made corresponding changes to the documentation +- [ ] My changes generate no new warnings +- [ ] I have added tests that prove my fix is effective or that my feature works +- [ ] New and existing unit tests pass locally with my changes +- [ ] Any dependent changes have been merged and published in downstream modules +- [ ] Add a note to ``CHANGELOG.md``, summarizing the changes +- [ ] Update zenodo.json file for new code contributors + + If this is a release PR, replace the first item of the above checklist with the + release checklist on the pysat wiki: + https://github.com/pysat/pysat/wiki/Checklist-for-Release \ No newline at end of file diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml new file mode 100644 index 0000000..8c5791a --- /dev/null +++ b/.github/workflows/docs.yml @@ -0,0 +1,32 @@ +# This workflow will install Python dependencies, run tests and lint with a variety of Python versions +# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions + +name: Documentation Check + +on: [push, pull_request] + +jobs: + build: + + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + python-version: [3.7] + + name: Documentation tests + steps: + - uses: actions/checkout@v2 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install -r test_requirements.txt + pip install -r requirements.txt + + - name: Load .zenodo.json to check for errors + run: python -c "import json; json.loads(open('.zenodo.json').read())" diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml new file mode 100644 index 0000000..acdc836 --- /dev/null +++ b/.github/workflows/main.yml @@ -0,0 +1,56 @@ +# This workflow will install Python dependencies, run tests and lint with a variety of Python versions +# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions + +name: Pytest with Flake8 + +on: [push, pull_request] + +jobs: + build: + strategy: + fail-fast: false + matrix: + python-version: ["3.9", "3.10"] + os: [ubuntu-latest] + numpy_ver: [latest] + include: + - python-version: "3.8" + numpy_ver: "1.19" + os: "ubuntu-latest" + + name: Python ${{ matrix.python-version }} on ${{ matrix.os }} + runs-on: ${{ matrix.os }} + steps: + - uses: actions/checkout@v2 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install -r test_requirements.txt + pip install -r requirements.txt + + - name: Install NEP29 dependencies + if: ${{ matrix.numpy_ver != 'latest'}} + run: | + pip install --no-binary :numpy: numpy==${{ matrix.numpy_ver }} + + - name: Test PEP8 compliance + run: flake8 . --count --select=E,F,W --show-source --statistics + + - name: Evaluate complexity + run: flake8 . --count --exit-zero --max-complexity=10 --statistics + + - name: Install + run: python setup.py develop + + - name: Test with pytest + run: pytest --cov=pysatCDF/ + + - name: Publish results to coveralls + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: coveralls --rcfile=setup.cfg --service=github diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index a0cf6e1..0000000 --- a/.travis.yml +++ /dev/null @@ -1,50 +0,0 @@ -language: python -jobs: - include: - - python: 3.6 - - python: 3.7 - - python: 3.8 -sudo: false - - -addons: - apt: - packages: - - gfortran - - libncurses5-dev - -# Setup anaconda -before_install: - #- apt-get update - - if [[ "$TRAVIS_PYTHON_VERSION" == "2.7" ]]; then - wget https://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh -O miniconda.sh; - else - wget https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O miniconda.sh; - fi - - bash miniconda.sh -b -p $HOME/miniconda - - export PATH="$HOME/miniconda/bin:$PATH" - - hash -r - - conda config --set always_yes yes --set changeps1 no - - conda update -q --yes conda - # Useful for debugging any issues with conda - - conda info -a - - # Replace dep1 dep2 ... with your dependencies - - conda create -q -n test-environment python=$TRAVIS_PYTHON_VERSION numpy scipy matplotlib nose pandas statsmodels coverage - -# command to install dependencies -install: - # Coverage packages are on my binstar channel - # - conda install --yes -c dan_blanchard python-coveralls nose-cov - - source activate test-environment - - pip install coveralls - - pip install pysat - - "python setup.py develop" - - pip install netCDF4 -# command to run tests -script: - #- nosetests - - nosetests --with-coverage --cover-package=pysatCDF - #coverage run --source=pysat setup.py test -after_success: - coveralls diff --git a/.zenodo.json b/.zenodo.json new file mode 100644 index 0000000..07180e3 --- /dev/null +++ b/.zenodo.json @@ -0,0 +1,25 @@ +{ + "creators": [ + { + "affiliation": "Stoneris", + "name": "Stoneback, Russell", + "orcid": "0000-0001-7216-4336" + }, + { + "affiliation": "The University of Texas at Dallas", + "name": "Depew, Matthew" + }, + { + "affiliation": "Goddard Space Flight Center", + "name": "Klenzing, Jeffrey", + "orcid": "0000-0001-8321-6074" + }, + { + "name": "Iyer, Gayatri" + }, + { + "affiliation": "Predictive Science", + "name": "Pembroke, Asher" + } + ] +} diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..14b4c58 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,16 @@ +Change Log +========== +All notable changes to this project will be documented in this file. +This project adheres to [Semantic Versioning](https://semver.org/). + +[0.4.0] - 2022-XX-XX +-------------------- +* New Features + * Compatible with pysat v3.0+ +* Deprecations +* Documentation +* Bug Fix + * Improved builds for newer compilers. +* Maintenance + * Adopted latest pysat development standards. + diff --git a/README.md b/README.md index 219259d..7f3011e 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,7 @@ # pysatCDF -[![Build Status](https://travis-ci.org/pysat/pysatCDF.svg?branch=master)](https://travis-ci.org/pysat/pysatCDF) -[![Coverage Status](https://coveralls.io/repos/github/pysat/pysatCDF/badge.svg?branch=master)](https://coveralls.io/github/pysat/pysatCDF?branch=master) +[![PyPI Package latest release](https://img.shields.io/pypi/v/pysatcdf.svg)](https://pypi.python.org/pypi/pysatcdf) +[![Build Status](https://github.com/pysat/pysatCDF/actions/workflows/main.yml/badge.svg)](https://github.com/pysat/pysatCDF/actions/workflows/main.yml/badge.svg) +[![Coverage Status](https://coveralls.io/repos/github/pysat/pysatCDF/badge.svg?branch=main)](https://coveralls.io/github/pysat/pysatCDF?branch=main) [![DOI](https://zenodo.org/badge/51764432.svg)](https://zenodo.org/badge/latestdoi/51764432) Self-contained Python reader for NASA CDF file format @@ -31,10 +32,12 @@ with pysatCDF.CDF(filename) as cdf: ``` # Testing -pysatCDF has been tested on Mac OS X and Ubuntu 15.04. Support is included for building on windows if the mingw environment is present. +pysatCDF has been tested on Mac OS X and Ubuntu 15.04. Support is included +for building on windows via Windows Subsystem for Linux. # Motivation -Provide simple, robust access to CDF data in Python and simplify adding instruments to [pysat](https://github.com/rstoneback/pysat). +Provide simple, robust access to CDF data in Python and simplify +adding instruments to [pysat](https://github.com/pysat/pysat). # Installation in POSIX compatible environments Actual CDF loading is performed by the [NASA CDF libraries] (http://cdf.gsfc.nasa.gov/html/sw_and_docs.html) @@ -47,45 +50,16 @@ cd pysatCDF python setup.py install ``` +# Installing pysatCDF in MacOS + +MacOS does not ship with a Fortran compiler. One method for getting a suitable +build environment is to use brew. +``` +brew install gcc +``` + +and then install pysatCDF as normal. + # Installing pysatCDF in Windows -Python environment: Python 2.7.x -To compile pysatCDF in Windows, you need a POSIX compatible C/ Fortran compiling environment. Follow the below instructions to achieve this. - -1. Install MSYS2 from http://repo.msys2.org. The distrib folder contains msys2-x86_64-latest.exe (64-bit version) to install MSYS2. -2. Assuming you installed it in its default location C:\msys64, launch MSYS2 environment from C:\msys64\msys2.exe. This launches a shell session. -3. Now you need to make sure everything is up to date. This terminal command will run updates - pacman -Syuu -4. After running this command, you will be asked to close the terminal window using close button and not exit() command. Go ahead and do that. -5. Relaunch and run 'pacman -Syuu' again. -6. After the second run, you should be up to date. If you run the update command again, you will be informed that there was nothing more to update. Now you need to install build tools and your compiler toolchains. - pacman -S base-devel git mingw-w64-x86_64-toolchain -If it prompts you to make a selection and says (default:all), just press enter. This install may take a bit. -7. Now you need to set up your MSYS2 environment to use whatever python interpreter you want to build pysatCDF for. In my case the path was C:\Python27_64, but yours will be wherever python.exe exists. -8. Update MSYS2 path to include the folders with python binary and Scripts. To do that, navigate to your home directory in MSYS2. Mine is C:\msys64\home\gayui. -8. Edit the .bash_profile file to add the below lines somewhere in the file. - # Add System python - export PATH=$PATH:/c/Python27_64:/c/Python27_64/Scripts -Note the unix-style paths. So C: becomes /c/. If your python was in C:\foo\bar\python you would put /c/foo/bar/python and /c/foo/bar/python/Scripts -9. Next step is to add the mingw64 bin folder to your windows system path. Right-click on computer, hit properties. Then click advanced system settings, then environment variables. Find the system variable (as opposed to user variables) named PATH. This is a semicolon delimited list of the OS search paths for binaries. Add another semicolon and the path C:\msys64\mingw64\bin -10. Now you should have access to Python from within your MSYS2 environment. And your windows path should have access to the mingw binaries. To verify this, launch the mingw64 MSYS2 environment. - C:\msys64\mingw64.exe -Run the command - which python -and confirm that it points to the correct python version you want to be using. -11. Microsoft Visual C++ 9.0 is required to compile C sources. Download and install the right version of Microsoft Visual C++ for Python 2.7 from - - http://aka.ms/vcpython27 -12. We are now getting close to installing pysatCDF. Do the following in the shell environment that is already opened. - mkdir src - cd src - git clone https://github.com/rstoneback/pysatCDF.git - cd pysatCDF -13. Using a text editor of your choice, create a file called setup.cfg in - C:\msys64\home\gayui\src\pysatCDF (note: gayui will be replaced with your username) -Put the following in the file before saving and closing it. - [build] - compiler=mingw32 -14. In your MSYS2 MINGW64 environment, run - python setup.py install -This should compile and install the package to your site-packages for the python you are using. -15. You should now be able to import pysatCDF in your Python environment. If you get an ImportError, restart Python and import again. +Install the Windows Subsytem for Linux and proceed as per POSIX installation. diff --git a/pysatCDF/__init__.py b/pysatCDF/__init__.py index a641bf1..ca138e0 100644 --- a/pysatCDF/__init__.py +++ b/pysatCDF/__init__.py @@ -1 +1,18 @@ +"""pysatCDF is a simple reader for NASA's Common Data Format (CDF) files. + +pysatCDF uses NASA's C library to do the actual loading and couples +Python to this library via an intermediate Fortran layer. + +""" + +import os + +# Set version +here = os.path.abspath(os.path.dirname(__file__)) +with open(os.path.join(here, 'version.txt')) as version_file: + __version__ = version_file.read().strip() + +# Import CDF interface from ._cdf import CDF as CDF + +del here diff --git a/pysatCDF/_cdf.py b/pysatCDF/_cdf.py index 9de4446..6ec6932 100644 --- a/pysatCDF/_cdf.py +++ b/pysatCDF/_cdf.py @@ -36,11 +36,10 @@ class CDF(object): """ def __init__(self, fname): - # in CDF docs it says don't include .cdf in name + # In CDF docs it says don't include .cdf in name + name = fname if fname[-4:].lower() == '.cdf': name = fname[:-4] - else: - name = fname self.fname = name status = fortran_cdf.open(name) @@ -90,28 +89,9 @@ def __exit__(self, type, value, tb): pass def __getitem__(self, key): - """return CDF variable by name""" - if not self.data_loaded: - # data hasn't been loaded, load up requested data - # and pass it back to the user - dim_size = self.z_variable_info[key]['dim_sizes'] - # only tracking up to two dimensional things - dim_size = dim_size[0] - if dim_size == 0: - dim_size += 1 - rec_num = self.z_variable_info[key]['rec_num'] - status, data = fortran_cdf.get_z_var(self.fname, key, dim_size, rec_num) - if status == 0: - if dim_size == 1: - data = data[0, :] - return data - else: - # raise ValueError('CDF Error status :', status) - raise IOError(fortran_cdf.statusreporter(status)) - else: - return chameleon(self.fname, key, self.data[key], - self.meta[key], - self.z_variable_info[key]) + """Return CDF variable by name.""" + return chameleon(self.fname, key, self.data[key], self.meta[key], + self.z_variable_info[key]) def inquire(self): """Maps to fortran CDF_Inquire. @@ -281,13 +261,17 @@ def _call_multi_fortran_z(self, names, data_types, rec_nums, input_type_code : int Specific type code to load func : function - Fortran function via python interface that will be used for actual loading. + Fortran function via python interface that will be used for + actual loading. epoch : bool - Flag indicating type is epoch. Translates things to datetime standard. + Flag indicating type is epoch. Translates things to datetime + standard. data_offset : - Offset value to be applied to data. Required for unsigned integers in CDF. + Offset value to be applied to data. Required for unsigned + integers in CDF. epoch16 : bool - Flag indicating type is epoch16. Translates things to datetime standard. + Flag indicating type is epoch16. Translates things to datetime + standard. """ @@ -301,7 +285,8 @@ def _call_multi_fortran_z(self, names, data_types, rec_nums, sub_names = np.array(names)[idx] sub_sizes = dim_sizes[idx] status, data = func(self.fname, sub_names.tolist(), - sub_sizes, sub_sizes.sum(), max_rec, len(sub_names)) + sub_sizes, sub_sizes.sum(), max_rec, + len(sub_names)) if status == 0: # account for quirks of CDF data storage for certain types if data_offset is not None: @@ -321,8 +306,8 @@ def _call_multi_fortran_z(self, names, data_types, rec_nums, data = data.astype('datetime64[ns]') sub_sizes /= 2 # all data of a type has been loaded and tweaked as necessary - # parse through returned array to break out the individual variables - # as appropriate + # parse through returned array to break out the individual + # variables as appropriate self._process_return_multi_z(data, sub_names, sub_sizes) else: raise IOError(fortran_cdf.statusreporter(status)) @@ -357,9 +342,10 @@ def _read_all_attribute_info(self): global_attrs_info = {} var_attrs_info = {} if status == 0: - for name, scope, gentry, rentry, zentry, num in zip(names, scopes, max_gentries, - max_rentries, max_zentries, - attr_nums): + for (name, scope, gentry, + rentry, zentry, num) in zip(names, scopes, max_gentries, + max_rentries, max_zentries, + attr_nums): name = ''.join(name) name = name.rstrip() nug = {} @@ -412,7 +398,8 @@ def _read_all_z_attribute_data(self): self.var_attrs_info[name]['data_type'] = data_types[i] self.var_attrs_info[name]['num_elems'] = num_elems[i] self.var_attrs_info[name]['entry_num'] = entry_nums[i] - exp_attr_nums.extend([self.var_attrs_info[name]['attr_num']] * len(entry_nums[i])) + exp_attr_nums.extend([self.var_attrs_info[name]['attr_num']] + * len(entry_nums[i])) attr_names.extend([name] * len(entry_nums[i])) else: raise IOError(fortran_cdf.statusreporter(status)) @@ -436,7 +423,8 @@ def _read_all_z_attribute_data(self): attr_nums = attr_nums[idx] attr_names = np.array(attr_names)[idx] # grad corresponding variable name for each attribute - var_names = [self.z_variable_names_by_num[i].rstrip() for i in entry_nums] + var_names = [self.z_variable_names_by_num[i].rstrip() + for i in entry_nums] # the names that go along with this are already set up @@ -445,46 +433,59 @@ def _read_all_z_attribute_data(self): # get data back, shorten to num_elems, add to structure self._call_multi_fortran_z_attr(attr_names, data_types, num_elems, - entry_nums, attr_nums, var_names, self.cdf_data_types['real4'], + entry_nums, attr_nums, var_names, + self.cdf_data_types['real4'], fortran_cdf.get_multi_z_attr_real4) self._call_multi_fortran_z_attr(attr_names, data_types, num_elems, - entry_nums, attr_nums, var_names, self.cdf_data_types['float'], + entry_nums, attr_nums, var_names, + self.cdf_data_types['float'], fortran_cdf.get_multi_z_attr_real4) self._call_multi_fortran_z_attr(attr_names, data_types, num_elems, - entry_nums, attr_nums, var_names, self.cdf_data_types['real8'], + entry_nums, attr_nums, var_names, + self.cdf_data_types['real8'], fortran_cdf.get_multi_z_attr_real8) self._call_multi_fortran_z_attr(attr_names, data_types, num_elems, - entry_nums, attr_nums, var_names, self.cdf_data_types['double'], + entry_nums, attr_nums, var_names, + self.cdf_data_types['double'], fortran_cdf.get_multi_z_attr_real8) self._call_multi_fortran_z_attr(attr_names, data_types, num_elems, - entry_nums, attr_nums, var_names, self.cdf_data_types['byte'], + entry_nums, attr_nums, var_names, + self.cdf_data_types['byte'], fortran_cdf.get_multi_z_attr_int1) self._call_multi_fortran_z_attr(attr_names, data_types, num_elems, - entry_nums, attr_nums, var_names, self.cdf_data_types['int1'], + entry_nums, attr_nums, var_names, + self.cdf_data_types['int1'], fortran_cdf.get_multi_z_attr_int1) self._call_multi_fortran_z_attr(attr_names, data_types, num_elems, - entry_nums, attr_nums, var_names, self.cdf_data_types['uint1'], + entry_nums, attr_nums, var_names, + self.cdf_data_types['uint1'], fortran_cdf.get_multi_z_attr_int1, data_offset=256) self._call_multi_fortran_z_attr(attr_names, data_types, num_elems, - entry_nums, attr_nums, var_names, self.cdf_data_types['int2'], + entry_nums, attr_nums, var_names, + self.cdf_data_types['int2'], fortran_cdf.get_multi_z_attr_int2) self._call_multi_fortran_z_attr(attr_names, data_types, num_elems, - entry_nums, attr_nums, var_names, self.cdf_data_types['uint2'], + entry_nums, attr_nums, var_names, + self.cdf_data_types['uint2'], fortran_cdf.get_multi_z_attr_int2, data_offset=65536) self._call_multi_fortran_z_attr(attr_names, data_types, num_elems, - entry_nums, attr_nums, var_names, self.cdf_data_types['int4'], + entry_nums, attr_nums, var_names, + self.cdf_data_types['int4'], fortran_cdf.get_multi_z_attr_int4) self._call_multi_fortran_z_attr(attr_names, data_types, num_elems, - entry_nums, attr_nums, var_names, self.cdf_data_types['uint4'], + entry_nums, attr_nums, var_names, + self.cdf_data_types['uint4'], fortran_cdf.get_multi_z_attr_int4, data_offset=2 ** 32) self._call_multi_fortran_z_attr(attr_names, data_types, num_elems, - entry_nums, attr_nums, var_names, self.cdf_data_types['char'], + entry_nums, attr_nums, var_names, + self.cdf_data_types['char'], fortran_cdf.get_multi_z_attr_char) self._call_multi_fortran_z_attr(attr_names, data_types, num_elems, - entry_nums, attr_nums, var_names, self.cdf_data_types['uchar'], + entry_nums, attr_nums, var_names, + self.cdf_data_types['uchar'], fortran_cdf.get_multi_z_attr_char) def _call_multi_fortran_z_attr(self, names, data_types, num_elems, @@ -523,11 +524,14 @@ def _call_multi_fortran_z_attr(self, names, data_types, num_elems, # raise first error raise IOError(fortran_cdf.statusreporter(status[idx][0])) - def _process_return_multi_z_attr(self, data, attr_names, var_names, sub_num_elems): + def _process_return_multi_z_attr(self, data, attr_names, var_names, + sub_num_elems): '''process and attach data from fortran_cdf.get_multi_*''' # process data - for i, (attr_name, var_name, num_e) in enumerate(zip(attr_names, var_names, sub_num_elems)): + for i, (attr_name, var_name, num_e) in enumerate(zip(attr_names, + var_names, + sub_num_elems)): if var_name not in self.meta.keys(): self.meta[var_name] = {} if num_e == 1: @@ -539,17 +543,18 @@ def _process_return_multi_z_attr(self, data, attr_names, var_names, sub_num_elem try: chars.append(d.astype('U')) except UnicodeDecodeError: - # Uninterpretable character was encountered. Fill inserted. + # Uninterpretable character was encountered. + # Fill inserted. chars.append('*') self.meta[var_name][attr_name] = ''.join(chars).rstrip() else: self.meta[var_name][attr_name] = data[i, 0:num_e] def to_pysat(self, flatten_twod=True, units_label='UNITS', - name_label='long_name', fill_label='FILLVAL', - plot_label='FieldNam', min_label='ValidMin', - max_label='ValidMax', notes_label='Var_Notes', - desc_label='CatDesc', axis_label = 'LablAxis'): + name_label='LONG_NAME', fill_label='FILLVAL', + plot_label='FIELDNAM', min_label='VALIDMIN', + max_label='VALIDMAX', notes_label='VAR_NOTES', + desc_label='CATDESC', axis_label='LABLAXIS'): """Exports loaded CDF data into data, meta for pysat module Parameters @@ -568,28 +573,28 @@ def to_pysat(self, flatten_twod=True, units_label='UNITS', name_label : str Identifier within metadata for variable name, not normally present within CDAWeb files. If not, will use values from the variable name - in the file. (default='long_name') + in the file. (default='LONG_NAME') fill_label : str Identifier within metadata for Fill Values. Defults to CDAWab standard. (default='FILLVAL') plot_label : str Identifier within metadata for variable name used when plotting. - Defults to CDAWab standard. (default='FieldNam') + Defults to CDAWab standard. (default='FIELDNAM') min_label : str Identifier within metadata for minimim variable value. - Defults to CDAWab standard. (default='ValidMin') + Defults to CDAWab standard. (default='VALIDMIN') max_label : str Identifier within metadata for maximum variable value. - Defults to CDAWab standard. (default='ValidMax') + Defults to CDAWab standard. (default='VALIDMAX') notes_label : str Identifier within metadata for notes. Defults to CDAWab standard. - (default='Var_Notes') + (default='VAR_NOTES') desc_label : str Identifier within metadata for a variable description. - Defults to CDAWab standard. (default='CatDesc') + Defults to CDAWab standard. (default='CATDESC') axis_label : str Identifier within metadata for axis name used when plotting. - Defults to CDAWab standard. (default='LablAxis') + Defults to CDAWab standard. (default='LABLAXIS') Returns @@ -615,7 +620,8 @@ def to_pysat(self, flatten_twod=True, units_label='UNITS', # Copy data cdata = self.data.copy() - # Create a dictionary of the labels for use in intializing the Metadata + # Create a dictionary of the labels for use in initializing + # the Metadata labels = {'units': (units_label, str), 'name': (name_label, str), 'notes': (notes_label, str), 'desc': (desc_label, str), 'plot': (plot_label, str), 'axis': (axis_label, str), @@ -625,7 +631,8 @@ def to_pysat(self, flatten_twod=True, units_label='UNITS', # Create pysat.Meta object using data above # and utilizing the attribute labels provided by the user - meta = pysat.Meta(pandas.DataFrame.from_dict(self.meta, orient='index'), + meta = pysat.Meta(pandas.DataFrame.from_dict(self.meta, + orient='index'), labels=labels) # account for different possible cases for Epoch, epoch, EPOCH, epOch @@ -655,7 +662,7 @@ def to_pysat(self, flatten_twod=True, units_label='UNITS', new_list = [] new_index = np.arange(step) for i in np.arange(len(epoch)): - new_list.append(frame.iloc[i*step:(i+1)*step, :]) + new_list.append(frame.iloc[i * step:(i + 1) * step, :]) new_list[-1].index = new_index new_frame = pandas.Series(new_list, index=epoch, name=name) @@ -670,8 +677,8 @@ def to_pysat(self, flatten_twod=True, units_label='UNITS', # remove data from dict when adding to the DataFrame drop_list.append(name) frame = pandas.DataFrame(cdata[name].T, - index=epoch, - columns=new_names) + index=epoch, + columns=new_names) two_d_data.append(frame) for name in drop_list: _ = cdata.pop(name) diff --git a/pysatCDF/tests/test_cdf.py b/pysatCDF/tests/test_cdf.py index 7f2d230..ea423ec 100644 --- a/pysatCDF/tests/test_cdf.py +++ b/pysatCDF/tests/test_cdf.py @@ -1,6 +1,3 @@ -import numpy as np -from nose.tools import assert_raises, raises -import nose.tools import os import pysatCDF @@ -17,7 +14,7 @@ def test_vefi_load(self): """Load VEFI file and perform basic data checks.""" fname = os.path.join(pysatCDF.__path__[0], 'tests', 'test_data', 'cnofs_vefi_bfield_1sec_20080601_v05.cdf') - + with pysatCDF.CDF(fname) as cdf: data = cdf.data @@ -32,15 +29,35 @@ def test_vefi_load_and_chameleon_data_access(self): """Load VEFI file and utilize spacepy like access.""" fname = os.path.join(pysatCDF.__path__[0], 'tests', 'test_data', 'cnofs_vefi_bfield_1sec_20080601_v05.cdf') - + with pysatCDF.CDF(fname) as cdf: - data = cdf.data # Check on spacepy CDF attribute access mechanism assert (cdf['year'].attrs['FILLVAL'] == 65535) - + # Basic checks on spacepy CDF data access assert (cdf['B_flag'][...][0] == 0) assert (int(cdf['altitude'][...][0]) == 694) assert (cdf['year'][...][0] == 2008) + assert (cdf['year'][0] == 2008) + + # Test repr + test_str = repr(cdf['year']) + assert test_str.find('CDF filename :') >= 0 + assert test_str.find('CDF variable name: year') >= 0 + + return + + def test_vefi_load_to_pysat(self): + """Load VEFI file and perform to_pysat().""" + fname = os.path.join(pysatCDF.__path__[0], 'tests', 'test_data', + 'cnofs_vefi_bfield_1sec_20080601_v05.cdf') + + with pysatCDF.CDF(fname) as cdf: + data, meta = cdf.to_pysat() + + # Basic checks on data that was loaded + assert data['B_flag'][0] == 0 + assert int(data['altitude'][0]) == 694 + assert data['year'][0] == 2008 return diff --git a/pysatCDF/version.txt b/pysatCDF/version.txt new file mode 100644 index 0000000..a2268e2 --- /dev/null +++ b/pysatCDF/version.txt @@ -0,0 +1 @@ +0.3.1 \ No newline at end of file diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..0e5d9d9 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,5 @@ +matplotlib +numpy +pandas +pysat>=3.0 +xarray diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 0000000..7f2e5b9 --- /dev/null +++ b/setup.cfg @@ -0,0 +1,62 @@ +[metadata] +name = pysatCDF +version = file: pysatCDF/version.txt +url = https://github.com/pysat/pysatCDF +author = Russell A. Stoneback, et al. +author_email = pysat.developers@gmail.com +description = 'Simple NASA Common Data Format (CDF) File reader.' +long_description = file: README.md, CHANGELOG.md +long_description_content_type = text/markdown +classifiers = + Development Status :: 5 - Production/Stable + Topic :: Scientific/Engineering :: Physics + Intended Audience :: Science/Research + License :: OSI Approved :: BSD License + Natural Language :: English + Programming Language :: Python :: 3 + Programming Language :: Python :: 3.8 + Programming Language :: Python :: 3.9 + Programming Language :: Python :: 3.10 + Operating System :: POSIX :: Linux + Operating System :: POSIX :: MacOS X +keywords = + CDF + NASA + pysat + pandas +license_file = LICENSE + +[options] +python_requires = >= 3.5 +setup_requires = setuptools >= 38.6; pip >= 10 +packages = pysatCDF +include_package_data = True +zip_safe = False +install_requires = numpy + pandas + xarray + +[coverage:run] + +[coverage:report] + +[flake8] +max-line-length = 80 +ignore = + D200 + D202 + W503 + pysatCDF/__init__.py F401 + +exclude = conf.py + tests + __init__.py + +[tool:pytest] +markers = + all_inst: tests all instruments + download: tests for downloadable instruments + no_download: tests for instruments without download support + load_options: tests for instruments including optional load kwargs + first: first tests to run + second: second tests to run diff --git a/setup.py b/setup.py index a02e416..39dcab0 100644 --- a/setup.py +++ b/setup.py @@ -2,9 +2,7 @@ import os import sys -# import setuptools from setuptools import setup -# from setuptools.command.install import install import numpy as np import numpy.distutils @@ -14,29 +12,30 @@ from numpy.distutils.command.build import build from subprocess import call -# import setuptools - -# path to base CDF directory if CDF library already installed and you want to use it -# leave to None to install CDF library -# system will look for installed packed before installing a pysatCDF specific CDF install +# Path to base CDF directory if CDF library already installed and you +# want to use it. Leave to None to install CDF library. +# System will look for installed package before installing a +# pysatCDF specific CDF install base_cdf = None build_cdf_flag = True -# leave items below to None +# Leave items below to None # name of library, e.g. for mac os x, libcdf.a lib_name = None -# shared library name, needed for some systems ( do not use Mac OS X) +# Shared library name, needed for some systems (do not use on Mac OS X) shared_lib_name = None # CDF compile options -# note that the shared library name will be appended to extra_link_args automatically +# Note that the shared library name will be appended to +# extra_link_args automatically. extra_link_args = None # OS and ENV comes from CDF installation instructions os_name = None env_name = None # manual f2py command for Mac OS X -# f2py -c --include-paths $CDF_INC -I$CDF_INC $CDF_LIB/libcdf.a -m fortran_cdf fortran_cdf.f -lm -lc +# f2py -c --include-paths $CDF_INC -I$CDF_INC $CDF_LIB/libcdf.a -m +# fortran_cdf fortran_cdf.f -lm -lc # some solutions in creating this file come from # https://github.com/Turbo87/py-xcsoar/blob/master/setup.py @@ -49,31 +48,30 @@ lib_name = 'libcdf.a' # including shared lib in mac breaks things shared_lib_name = None # 'libcdf.dylib' - extra_link_args = ['-lm', '-lc'] # , '-Wl,-undefined', '-Wl,dynamic_lookup'] #'-export_dynamic'] + extra_link_args = ['-lm', '-lc'] elif (platform == 'linux') | (platform == 'linux2'): os_name = 'linux' env_name = 'gnu' lib_name = 'libcdf.a' shared_lib_name = None # 'libcdf.so' - extra_link_args = ['-lm', '-lc'] # , '-Wl,-undefined', '-Wl,dynamic_lookup'] #'-export_dynamic'] + extra_link_args = ['-lm', '-lc'] elif (platform == 'win32'): os_name = 'mingw' env_name = 'gnu' lib_name = 'libcdf.a' shared_lib_name = None # extra_link_args = ['/nodefaultlib:libcd'] - extra_link_args = [] # , '-Wl,-undefined', '-Wl,dynamic_lookup'] #'-export_dynamic'] + extra_link_args = [] else: - if (lib_name is None) or ((base_cdf is None) and ((os_name is None) - or (env_name is None) or (extra_link_args is None))): - raise ValueError('Unknown platform, please set setup.py parameters manually.') - - + check = ((os_name is None) or (env_name is None) + or (extra_link_args is None)) + if (lib_name is None) or ((base_cdf is None) and check): + estr = 'Unknown platform, please set setup.py parameters manually.' + raise ValueError(estr) BASEPATH = os.path.dirname(os.path.abspath(__file__)) CDF_PATH = os.path.join(BASEPATH, 'cdf36_3-dist') -# print (BASEPATH, CDF_PATH) class CDFBuild(build): def run(self): @@ -87,7 +85,8 @@ def CDF_build(self, ppath): # build CDF Library build_path = os.path.abspath(ppath) if platform == 'win32': - # Replace backslashes with forward slashes to avoid path being mangled by escape sequences + # Replace backslashes with forward slashes to avoid path being + # mangled by escape sequences build_path = build_path.replace('\\', '/') # print (' ') # print ("In CDF_build ", build_path, CDF_PATH, ppath) @@ -139,11 +138,13 @@ def compile2(): os.path.join(self.build_lib, 'pysatCDF', 'include')) self.mkpath(os.path.join(self.build_lib, 'pysatCDF', 'lib')) self.copy_file(os.path.join(ppath, 'lib', lib_name), - os.path.join(self.build_lib, 'pysatCDF', 'lib', lib_name)) + os.path.join(self.build_lib, 'pysatCDF', 'lib', + lib_name)) if shared_lib_name is not None: self.copy_file(os.path.join(ppath, 'lib', shared_lib_name), - os.path.join(self.build_lib, 'pysatCDF', 'lib', shared_lib_name)) + os.path.join(self.build_lib, 'pysatCDF', 'lib', + shared_lib_name)) # run original build code # build.run(self) @@ -159,25 +160,28 @@ def run(self): lib_path = os.path.abspath(os.path.join(self.build_lib, 'pysatCDF')) # set directories for the CDF library installed with pysatCDF self.extensions[0].include_dirs = [os.path.join(lib_path, 'include')] - self.extensions[0].f2py_options = ['--include-paths', os.path.join(lib_path, 'include'), '--quiet'] - self.extensions[0].extra_objects = [os.path.join(lib_path, 'lib', lib_name)] + self.extensions[0].f2py_options = ['--include-paths', + os.path.join(lib_path, 'include'), + '--quiet'] + self.extensions[0].extra_objects = [os.path.join(lib_path, 'lib', + lib_name)] # add shared library, if provided if shared_lib_name is not None: - self.extensions[0].extra_link_args.append(os.path.join(lib_path, - 'lib', - shared_lib_name)) + self.extensions[0].extra_link_args.append( + os.path.join(lib_path, 'lib', shared_lib_name)) build_src.run(self) return + # almost to building if not build_cdf_flag: - print (' '.join(('Using CDF installation at', base_cdf))) + print(' '.join(('Using CDF installation at', base_cdf))) f2py_cdf_include_path = os.path.join(base_cdf, 'include') f2py_cdf_lib_path = os.path.join(base_cdf, 'lib', lib_name) cmdclass = {} else: - print ('Building CDF for pysatCDF.') + print('Building CDF for pysatCDF.') cmdclass = {'build': CDFBuild, 'build_src': ExtensionBuild, } f2py_cdf_include_path = '' @@ -190,7 +194,7 @@ def run(self): name='pysatCDF.fortran_cdf', sources=[os.path.join('pysatCDF', 'fortran_cdf.f')], include_dirs=[f2py_cdf_include_path], - f2py_options=['--quiet', '--include-paths', f2py_cdf_include_path], # '--Wall', 'n', '--Wno-tabs', 'n'], + f2py_options=['--quiet', '--include-paths', f2py_cdf_include_path], extra_objects=[f2py_cdf_lib_path], extra_f77_compile_args=['--std=legacy'], extra_link_args=extra_link_args) @@ -198,47 +202,4 @@ def run(self): # call setup # -------------------------------------------------------------------------- -numpy.distutils.core.setup( - name='pysatCDF', - version='0.3.1', - packages=['pysatCDF'], - cmdclass=cmdclass, - ext_modules=[ext1, ], - description='Simple NASA Common Data Format (CDF) File reader.', - long_description=('pysatCDF is a reader for CDF files and provides ' - 'additional support for exporting to pysat data formats (not required). ' - 'The NASA CDF library is included.'), - url='http://github.com/rstoneback/pysatCDF', - # Author details - author='Russell Stoneback', - author_email='rstoneba@utdallas.edu', - # data_files = [('', ['cdf36_1-dist/CDF_copyright.txt'])], - - # Choose your license - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - # How mature is this project? Common values are - # 3 - Alpha - # 4 - Beta - # 5 - Production/Stable - 'Development Status :: 4 - Beta', - - # Indicate who your project is intended for - 'Intended Audience :: Science/Research', - 'Topic :: Scientific/Engineering :: Astronomy', - 'Topic :: Scientific/Engineering :: Physics', - 'Topic :: Scientific/Engineering :: Atmospheric Science', - - # Pick your license as you wish (should match "license" above) - 'License :: OSI Approved :: BSD License', - - # Specify the Python versions you support here. In particular, ensure - # that you indicate whether you support Python 2, Python 3 or both. - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3.4', - 'Programming Language :: Python :: 3.5', - 'Programming Language :: Python :: 3.6', - ], - install_requires=['numpy', 'pandas'], -) +numpy.distutils.core.setup(ext_modules=[ext1], cmdclass=cmdclass) diff --git a/test_requirements.txt b/test_requirements.txt new file mode 100644 index 0000000..5b59aa8 --- /dev/null +++ b/test_requirements.txt @@ -0,0 +1,10 @@ +coveralls +ipython +m2r2 +nose +numpydoc +pytest-cov +pytest-flake8 +pytest-ordering +sphinx +sphinx_rtd_theme