Skip to content

Commit

Permalink
removed shapely for now, #7
Browse files Browse the repository at this point in the history
  • Loading branch information
sckott committed Nov 3, 2015
1 parent 02bc78e commit 75adb5a
Show file tree
Hide file tree
Showing 8 changed files with 5 additions and 32 deletions.
Binary file modified docs/_build/doctrees/environment.pickle
Binary file not shown.
Binary file modified docs/_build/doctrees/index.doctree
Binary file not shown.
Binary file modified docs/_build/doctrees/species.doctree
Binary file not shown.
2 changes: 0 additions & 2 deletions docs/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,8 +37,6 @@
'sphinx.ext.autodoc'
]

autodoc_mock_imports = ['shapely', 'requests']

# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']

Expand Down
10 changes: 4 additions & 6 deletions pygbif/gbifutils.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,13 @@
import requests
import shapely
from shapely import geometry

class NoResultException(Exception):
pass

def gbif_search_GET(url, args, **kwargs):
if args['geometry'] != None:
if args['geometry'].__class__ == list:
b = args['geometry']
args['geometry'] = geometry.box(b[0], b[1], b[2], b[3]).wkt
# if args['geometry'] != None:
# if args['geometry'].__class__ == list:
# b = args['geometry']
# args['geometry'] = geometry.box(b[0], b[1], b[2], b[3]).wkt
out = requests.get(url, params=args, **kwargs)
out.raise_for_status()
stopifnot(out.headers['content-type'])
Expand Down
21 changes: 0 additions & 21 deletions pygbif/occurrences/search.py
Original file line number Diff line number Diff line change
Expand Up @@ -143,9 +143,6 @@ def search(taxonKey=None, scientificName=None, country=None,
>>> occurrences.search(geometry='POLYGON((30.1 10.1, 10 20, 20 40, 40 40, 30.1 10.1))', limit=20)
>>> key = species.name_suggest(q='Aesculus hippocastanum')[0]['key']
>>> occurrences.search(taxonKey=key, geometry='POLYGON((30.1 10.1, 10 20, 20 40, 40 40, 30.1 10.1))', limit=20)
>>> ## or using bounding box, converted to WKT internally
>>> ### doesn't work yet
>>> occurrences.search(geometry=[-125.0,38.4,-121.8,40.9], limit=20)
>>>
>>> # Search on country
>>> occurrences.search(country='US', fields=['name','country'], limit=20)
Expand Down Expand Up @@ -235,24 +232,6 @@ def search(taxonKey=None, scientificName=None, country=None,
>>> occurrences.search(datasetKey='84c0e1a0-f762-11e1-a439-00145eb45e9a',
>>> issue=['TAXON_MATCH_NONE','TAXON_MATCH_HIGHERRANK'])
>>>
>>> # Parsing output by issue
>>> (res = occurrences.search(geometry='POLYGON((30.1 10.1, 10 20, 20 40, 40 40, 30.1 10.1))', limit = 50))
>>> ## what do issues mean, can print whole table, or search for matches
>>> head(gbif_issues())
>>> gbif_issues()[ gbif_issues()$code %in% ['cdround','cudc','gass84','txmathi'], ]
>>> ## or parse issues in various ways
>>> ### remove data rows with certain issue classes
>>> library('magrittr')
>>> res %>% occ_issues(gass84)
>>> ### split issues into separate columns
>>> res %>% occ_issues(mutate = "split")
>>> ### expand issues to more descriptive names
>>> res %>% occ_issues(mutate = "expand")
>>> ### split and expand
>>> res %>% occ_issues(mutate = "split_expand")
>>> ### split, expand, and remove an issue class
>>> res %>% occ_issues(-cudc, mutate = "split_expand")
>>>
>>> # If you try multiple values for two different parameters you are wacked on the hand
>>> # occurrences.search(taxonKey=[2482598,2492010], collectorName=["smith","BJ Stacey"))
>>>
Expand Down
2 changes: 0 additions & 2 deletions requirements.txt

This file was deleted.

2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
url='http://github.com/sckott/pygbif',
license="MIT",
packages=['pygbif'],
install_requires=['requests>2.7', 'shapely>=1.5.13'],
install_requires=['requests>2.7'],
classifiers=(
'Development Status :: 3 - Alpha',
'Intended Audience :: Science/Research',
Expand Down

0 comments on commit 75adb5a

Please sign in to comment.