Skip to content

Commit

Permalink
Merge 2a20ef9 into f0112c0
Browse files Browse the repository at this point in the history
  • Loading branch information
hover2pi committed Nov 8, 2018
2 parents f0112c0 + 2a20ef9 commit 1aead8c
Show file tree
Hide file tree
Showing 10 changed files with 172 additions and 392 deletions.
169 changes: 25 additions & 144 deletions .travis.yml
Original file line number Diff line number Diff line change
@@ -1,154 +1,35 @@
# We set the language to c because python isn't supported on the MacOS X nodes
# on Travis. However, the language ends up being irrelevant anyway, since we
# install Python ourselves using conda.
language: c
language: generic

os:
- linux

# Setting sudo to false opts in to Travis-CI container-based builds.
sudo: false

# The apt packages below are needed for sphinx builds. A full list of packages
# that can be included can be found here:
#
# https://github.com/travis-ci/apt-package-whitelist/blob/master/ubuntu-precise

addons:
apt:
packages:
- graphviz
- texlive-latex-extra
- dvipng

env:
global:

# The following versions are the 'default' for tests, unless
# overridden underneath. They are defined here in order to save having
# to repeat them for all configurations.
- PYTHON_VERSION=3.6
- NUMPY_VERSION=stable
- ASTROPY_VERSION=stable
- MAIN_CMD='python setup.py'
- SETUP_CMD='test'
- PIP_DEPENDENCIES=''
- EVENT_TYPE='pull_request push'


# For this package-template, we include examples of Cython modules,
# so Cython is required for testing. If your package does not include
# Cython code, you can set CONDA_DEPENDENCIES=''
- CONDA_DEPENDENCIES='Cython'

# List other runtime dependencies for the package that are available as
# pip packages here.
# - PIP_DEPENDENCIES=''

# Conda packages for affiliated packages are hosted in channel
# "astropy" while builds for astropy LTS with recent numpy versions
# are in astropy-ci-extras. If your package uses either of these,
# add the channels to CONDA_CHANNELS along with any other channels
# you want to use.
- CONDA_CHANNELS='astropy-ci-extras astropy'

# If there are matplotlib or other GUI tests, uncomment the following
# line to use the X virtual framebuffer.
# - SETUP_XVFB=True

matrix:
# Make sure that egg_info works without dependencies
- PYTHON_VERSION=2.7 SETUP_CMD='egg_info'
- PYTHON_VERSION=3.4 SETUP_CMD='egg_info'
- PYTHON_VERSION=3.5 SETUP_CMD='egg_info'
- PYTHON_VERSION=3.6 SETUP_CMD='egg_info'

matrix:

# Don't wait for allowed failures
fast_finish: true
cache:
directories:
- $HOME/miniconda3

include:
# Try MacOS X
- os: osx
env: SETUP_CMD='test'
before_cache:
- rm -rf $HOME/miniconda3/pkgs/cache
- rm -rf $HOME/miniconda3/envs/lcl

# Do a coverage test.
- os: linux
env: SETUP_CMD='test --coverage'

# Check for sphinx doc build warnings - we do this first because it
# may run for a long time
- os: linux
env: SETUP_CMD='build_docs -w'

# Now try Astropy dev with the latest Python and LTS with Python 2.7 and 3.x.
- os: linux
env: ASTROPY_VERSION=development
EVENT_TYPE='pull_request push cron'
- os: linux
env: PYTHON_VERSION=2.7 ASTROPY_VERSION=lts
- os: linux
env: ASTROPY_VERSION=lts

# Try all python versions and Numpy versions. Since we can assume that
# the Numpy developers have taken care of testing Numpy with different
# versions of Python, we can vary Python and Numpy versions at the same
# time.

- os: linux
env: PYTHON_VERSION=2.7 NUMPY_VERSION=1.9
- os: linux
env: PYTHON_VERSION=3.4 NUMPY_VERSION=1.10
- os: linux
env: PYTHON_VERSION=3.5 NUMPY_VERSION=1.11
- os: linux
env: NUMPY_VERSION=1.12

# Try numpy pre-release
- os: linux
env: NUMPY_VERSION=prerelease
EVENT_TYPE='pull_request push cron'
os:
- linux
- osx

# Do a PEP8 test with pycodestyle
- os: linux
env: MAIN_CMD='pycodestyle packagename --count' SETUP_CMD=''
env:
- PYTHON_VERSION="3.5"
- PYTHON_VERSION="3.6"
- PYTHON_VERSION="3.7"

allow_failures:
# Do a PEP8 test with pycodestyle
# (allow to fail unless your code completely compliant)
- os: linux
env: MAIN_CMD='pycodestyle packagename --count' SETUP_CMD=''
before_install:
- export PATH="$HOME/miniconda3/bin:$PATH"
- chmod +x ci/install_conda.sh
- chmod +x ci/setup_conda_env.sh

install:

# We now use the ci-helpers package to set up our testing environment.
# This is done by using Miniconda and then using conda and pip to install
# dependencies. Which dependencies are installed using conda and pip is
# determined by the CONDA_DEPENDENCIES and PIP_DEPENDENCIES variables,
# which should be space-delimited lists of package names. See the README
# in https://github.com/astropy/ci-helpers for information about the full
# list of environment variables that can be used to customize your
# environment. In some cases, ci-helpers may not offer enough flexibility
# in how to install a package, in which case you can have additional
# commands in the install: section below.

- git clone --depth 1 git://github.com/astropy/ci-helpers.git
- source ci-helpers/travis/setup_conda.sh

# As described above, using ci-helpers, you should be able to set up an
# environment with dependencies installed using conda and pip, but in some
# cases this may not provide enough flexibility in how to install a
# specific dependency (and it will not be able to install non-Python
# dependencies). Therefore, you can also include commands below (as
# well as at the start of the install section or in the before_install
# section if they are needed before setting up conda) to install any
# other dependencies.

- ci/install_conda.sh
- ci/setup_conda_env.sh
- source activate lcl
- python setup.py install

script:
- $MAIN_CMD $SETUP_CMD

after_success:
# If coveralls.io is set up for this package, uncomment the line below.
# The coveragerc file may be customized as needed for your package.
# - if [[ $SETUP_CMD == *coverage* ]]; then coveralls --rcfile='packagename/tests/coveragerc'; fi
- pytest --cov
- coveralls
10 changes: 7 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,16 +1,20 @@

# How to use `locals`

[![Build Status](https://travis-ci.org/hover2pi/locals.svg?branch=master)](https://travis-ci.org/hover2pi/locals)
[![Coverage Status](https://coveralls.io/repos/github/hover2pi/locals/badge.svg?branch=master&service=github)](https://coveralls.io/github/hover2pi/locals?branch=master&service=github)
[![Documentation Status](https://readthedocs.org/projects/locals/badge/?version=latest)](https://locals.readthedocs.io/en/latest/?badge=latest)

`locals` is a pure Python package that ingests JWST Wide-Field Slitless Spectroscopy data and returns a source catalog of all the low-mass stars in the field along with their calculated fundamental and secondary parameters.

### Requirements
- pip install SEDkit
- pip install sedkit
- pip install bokeh


```python
# Imports
from locals import source, catalog
from SEDkit import sed, spectrum, synphot
from sedkit import sed, spectrum, synphot
import astropy.units as q
import astropy.table as at
import numpy as np
Expand Down
26 changes: 26 additions & 0 deletions ci/install_conda.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
#!/bin/bash

if [ -d "$HOME/miniconda3" ] && [ -e "$HOME/miniconda3/bin/conda" ]; then
echo "Miniconda install already present from cache: $HOME/miniconda3"
rm -rf $HOME/miniconda3/envs/hosts # Just in case...
else
echo "Installing Miniconda..."
rm -rf $HOME/miniconda3 # Just in case...

if [ "${TRAVIS_OS_NAME}" == "osx" ]; then
wget http://repo.continuum.io/miniconda/Miniconda3-latest-MacOSX-x86_64.sh -O miniconda.sh || exit 1
else
wget http://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O miniconda.sh || exit 1
fi

bash miniconda.sh -b -p "$HOME/miniconda3" || exit 1
fi

echo "Configuring Miniconda..."
conda config --set ssl_verify false || exit 1
conda config --set always_yes true --set changeps1 false || exit 1

echo "Updating Miniconda"
conda update conda
conda update --all
conda info -a || exit 1
9 changes: 9 additions & 0 deletions ci/setup_conda_env.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
#!/bin/bash

echo "Creating a Python $PYTHON_VERSION environment"
conda create -n lcl python=$PYTHON_VERSION || exit 1
source activate lcl

echo "Installing packages..."
conda install flake8 beautifulsoup4 lxml numpy astropy h5py
pip install astroquery sedkit svo_filters pytest pytest-cov coveralls
47 changes: 23 additions & 24 deletions locals/catalog.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,30 +9,31 @@
- add point source to the output catalog of all collected data and derived fundamental parameters.
"""
import os
import numpy as np
import glob
import pkg_resources
import h5py

import astropy.units as q
import astropy.table as at
import astropy.coordinates as coord
import astropy.io.ascii as ii
from astropy.io import fits
from astroquery.vizier import Vizier
from SEDkit import SED, SEDCatalog, BTSettl
import numpy as np
from sedkit import SED, Catalog, BTSettl

from . import colors
from . import make_data


class SourceCatalog(SEDCatalog):

class SourceCatalog(Catalog):
"""
A class to ingest a JWST pipeline output to produce a source catalog
"""

def __init__(self, dirpath, color_cut=None, verbose=False):
"""
Initialize the SourceCatalog object
Parameters
----------
dirpath: str
Expand All @@ -42,79 +43,77 @@ def __init__(self, dirpath, color_cut=None, verbose=False):
"""
# Inherit from SEDkit.catalog.SEDCatalog
super().__init__()

# The path to the pipeline output directory
self.dirpath = dirpath
self.verbose = verbose

# Get the source catalog (_cat.ecsv)
self.cat_file = glob.glob(os.path.join(self.dirpath,'*.ecsv'))[0]
self.source_list = at.Table.read(self.cat_file, format='ascii.ecsv')
self.x1d_files = glob.glob(os.path.join(self.dirpath,'*_x1d.fits'))
self.phot_files = glob.glob(os.path.join(self.dirpath,'*_phot.csv'))

# Put all photometry into one table
self.photometry = at.vstack([ii.read(f) for f in self.phot_files])

# Load BT Settl grid
bt = BTSettl(resolution=1000, trim=(0.3*q.um, 3*q.um))

# Make a Source object for each row in the source_list
for n,row in enumerate(self.source_list):
ra = row['icrs_centroid'].ra
dec = row['icrs_centroid'].dec
name = 'Source {}'.format(row['id'])
src = SED(ra=ra, dec=dec, name=name, verbose=self.verbose,
**{k:row[k] for k in row.colnames})

# Add the JWST photometry for this source
for phot in self.photometry:
if phot['id'] == row['id']:
src.add_photometry(phot['band'], phot['magnitude'],
phot['magnitude_unc'])

# # Look for photometry (Need real coordinates for this)
# src.find_SDSS()
# src.find_2MASS()
# src.find_WISE()
# src.find_PanSTARRS()

# Look for distance
# src.find_Gaia()

# Check to see if the source makes the color cut
src.photometry.add_index('band')
keep = colors.in_color_range(src.photometry, color_cut)
if keep:

# Add observed WFSS spectra to the source
for x1d in self.x1d_files:
header = fits.getheader(x1d, ext=n+1)
funit = q.erg/q.s/q.cm**2/q.AA
src.add_spectrum_file(x1d, q.um, funit, ext=n+1,
name=header['PUPIL'])

# Save the params for verification
src.Teff_model = header['TEFF']
src.logg_model = header['LOGG']
src.FeH_model = header['FEH']

# Fit a blackbody
# src.fit_blackbody()

# Fit spectral type
src.fit_spectral_type()
# src.fit_spectral_index()

# Fit model grid
src.fit_modelgrid(bt)

# Add the source to the catalog
self.add_SED(src)

print("{}/{} sources added to catalog{}"\
.format(len(self.results), len(self.source_list),
" after applying '{}' color cuts".format(color_cut)
if color_cut is not None else ''))


0 comments on commit 1aead8c

Please sign in to comment.