Skip to content

Commit

Permalink
Merge pull request #64 from knaaptime/simplify_deps
Browse files Browse the repository at this point in the history
Simplify deps
  • Loading branch information
sjsrey committed Apr 18, 2020
2 parents daf18da + 07fdb79 commit 3b82fc7
Show file tree
Hide file tree
Showing 11 changed files with 10 additions and 84 deletions.
1 change: 0 additions & 1 deletion .travis.yml
Expand Up @@ -32,7 +32,6 @@ install:

script:
- if [[ $TRAVIS_JOB_NAME == python-* ]]; then
python -c "import os; from tobler.data import store_rasters; store_rasters(os.getcwd())";
travis_wait 45 pytest --cov tobler ;
fi

Expand Down
2 changes: 0 additions & 2 deletions environment.yml
Expand Up @@ -3,7 +3,6 @@ channels:
- conda-forge
- defaults
dependencies:
- python-dateutil<=2.8.0
- jupyterlab
- numpy
- geopandas
Expand All @@ -15,6 +14,5 @@ dependencies:
- scikit-learn
- scipy
- libpysal
- quilt3 >=3.1.11
- tqdm
- pip
2 changes: 0 additions & 2 deletions requirements.txt
Expand Up @@ -5,7 +5,5 @@ rasterio
scipy
statsmodels
rasterstats
python-dateutil<=2.8.0
quilt3==3.1.8
libpysal
tqdm
3 changes: 2 additions & 1 deletion requirements_tests.txt
Expand Up @@ -14,4 +14,5 @@ pytest-mpl
pytest-cov
twine
xgboost
shap >=0.33
shap >=0.33
quilt3 >=3.1.11
1 change: 0 additions & 1 deletion tobler/__init__.py
Expand Up @@ -5,6 +5,5 @@
"""
from . import area_weighted
from . import data
from . import dasymetric
from . import model
6 changes: 1 addition & 5 deletions tobler/area_weighted/vectorized_raster_interpolation.py
Expand Up @@ -23,7 +23,6 @@

import statsmodels.formula.api as smf
from statsmodels.genmod.families import Poisson, Gaussian, NegativeBinomial
from ..data import fetch_quilt_path

__all__ = [
"getFeatures",
Expand Down Expand Up @@ -70,7 +69,6 @@ def fast_append_profile_in_gdf(geodataframe, raster_path, force_crs_match=True):
"""

_check_presence_of_crs(geodataframe)
raster_path = fetch_quilt_path(raster_path)
if force_crs_match:
with rasterio.open(raster_path) as raster:
# raster =
Expand Down Expand Up @@ -237,7 +235,6 @@ def return_weights_from_xgboost(
2) The pixel value, usually, ranges from 0 to 255. That is why the default of 'n_pixels_option_values' is 256.
3) The returning weights represent the average of the Shapley's values from each feature.
"""
raster_path = fetch_quilt_path(raster_path)
try:
import xgboost as xgb
import shap
Expand Down Expand Up @@ -378,7 +375,7 @@ def create_non_zero_population_by_pixels_locations(
)

else:
with rasterio.open(fetch_quilt_path(raster)) as raster:
with rasterio.open(raster) as raster:
with warnings.catch_warnings():
warnings.simplefilter("ignore")
geodataframe_projected = geodataframe.to_crs(crs=raster.crs.data)
Expand Down Expand Up @@ -527,7 +524,6 @@ def calculate_interpolated_population_from_correspondence_table(

final_geodataframe = geodataframe.copy()[["geometry"]]
pop_final = np.empty(len(geodataframe))
raster = fetch_quilt_path(raster)
with rasterio.open(raster) as raster:

pbar = tqdm(total=len(geodataframe), desc="Estimating target polygon values")
Expand Down
8 changes: 3 additions & 5 deletions tobler/dasymetric/masked_area_interpolate.py
Expand Up @@ -23,9 +23,7 @@ def masked_area_interpolate(
target_df : geopandas.GeoDataFrame
target geometries that will form the new representation of the input data
raster : str
path to raster file that contains ancillary data.
alternatively a user can pass `ncld_2001` or `nlcd_2011` to use built-in data from the
National Land Cover Database
path to raster file that contains ancillary data
codes : list of ints
list of pixel values that should be considered part of the mask (the default is None).
If no codes are passed, this defaults to [21, 22, 23, 24] which are the developed land use
Expand All @@ -44,14 +42,14 @@ def masked_area_interpolate(
Returns
-------
geopandas.GeoDataFrame
GeoDataFrame with geometries matching the target_df and extensive and intensive
GeoDataFrame with geometries matching the target_df and extensive and intensive
variables as the columns
"""
if not codes:
codes = [21, 22, 23, 24]
if not raster:
raster = 'nlcd_2011'
raise IOError('You must pass the path to a raster that can be read with rasterio')

if not tables:
tables = area_tables_raster(
Expand Down
57 changes: 0 additions & 57 deletions tobler/data.py

This file was deleted.

7 changes: 4 additions & 3 deletions tobler/model/models.py
Expand Up @@ -17,7 +17,7 @@
def glm_pixel_adjusted(
source_df=None,
target_df=None,
raster="nlcd_2011",
raster=None,
raster_codes=None,
variable=None,
formula=None,
Expand All @@ -37,10 +37,9 @@ def glm_pixel_adjusted(
geodataframe containing source original data to be represented by another geometry
target_df : geopandas.GeoDataFrame, required
geodataframe containing target boundaries that will be used to represent the source data
raster : str, required (default="nlcd_2011")
raster : str, required
path to raster file that will be used to input data to the regression model.
i.e. a coefficients refer to the relationship between pixel counts and population counts.
Defaults to 2011 NLCD
raster_codes : list, required (default =[21, 22, 23, 24])
list of integers that represent different types of raster cells.
Defaults to [21, 22, 23, 24] whichare considered developed land types in the NLCD
Expand All @@ -60,6 +59,8 @@ def glm_pixel_adjusted(
"""
if not raster_codes:
raster_codes = [21, 22, 23, 24]
if not raster:
raise IOError('You must provide the path to a raster that can be read with rasterio')

# build weights from raster and vector data
weights = return_weights_from_regression(
Expand Down
5 changes: 0 additions & 5 deletions tobler/tests/test_download.py

This file was deleted.

2 changes: 0 additions & 2 deletions tobler/tests/test_pysal_integration.py
@@ -1,8 +1,6 @@
"""lightweight test for pysal metapckage that functions import."""

def test_imports():
import quilt3
from tobler.dasymetric import masked_area_interpolate
from tobler.area_weighted import area_interpolate
from tobler.data import store_rasters
from tobler.model import glm, glm_pixel_adjusted

0 comments on commit 3b82fc7

Please sign in to comment.