From ad845a46bb17e435e6349dafb3f5e3ec1ef511f9 Mon Sep 17 00:00:00 2001 From: Alice Rottersman Date: Wed, 6 Sep 2017 16:24:05 -0400 Subject: [PATCH 1/3] Geoprocessing API: Make snappingOn option for RWD --- src/mmw/apps/geoprocessing_api/views.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/mmw/apps/geoprocessing_api/views.py b/src/mmw/apps/geoprocessing_api/views.py index 7b695a9f6..6756e9880 100644 --- a/src/mmw/apps/geoprocessing_api/views.py +++ b/src/mmw/apps/geoprocessing_api/views.py @@ -28,8 +28,7 @@ def start_rwd(request, format=None): created = now() location = request.data['location'] data_source = request.data.get('dataSource', 'drb') - - snapping = request.data['snappingOn'] + snapping = request.data.get('snappingOn', False) job = Job.objects.create(created_at=created, result='', error='', traceback='', user=user, status='started') From 684a95efee91215d32b1565ce6368a79e3f127d1 Mon Sep 17 00:00:00 2001 From: Alice Rottersman Date: Wed, 6 Sep 2017 16:26:21 -0400 Subject: [PATCH 2/3] Geoprocessing API: Add django-rest-swagger and markdown packages * add django-rest-swagger to document the public API * add markdown to allow using markdown in the docstrings that show up in swagger * setup an initial swagger config with docs available at /api/docs --- src/mmw/apps/geoprocessing_api/urls.py | 3 ++- src/mmw/mmw/settings/base.py | 21 +++++++++++++++++++++ src/mmw/mmw/urls.py | 12 ++++++++---- src/mmw/requirements/base.txt | 2 ++ 4 files changed, 33 insertions(+), 5 deletions(-) diff --git a/src/mmw/apps/geoprocessing_api/urls.py b/src/mmw/apps/geoprocessing_api/urls.py index 602b865de..21e4d3d6e 100644 --- a/src/mmw/apps/geoprocessing_api/urls.py +++ b/src/mmw/apps/geoprocessing_api/urls.py @@ -3,7 +3,7 @@ from __future__ import unicode_literals from __future__ import division -from django.conf.urls import patterns, url +from django.conf.urls import include, patterns, url from apps.modeling.views import get_job from apps.modeling.urls import uuid_regex @@ -11,6 +11,7 @@ urlpatterns = patterns( '', + url(r'^docs/', include('rest_framework_swagger.urls')), url(r'analyze/land/$', views.start_analyze_land, name='start_analyze_land'), url(r'analyze/soil/$', views.start_analyze_soil, diff --git a/src/mmw/mmw/settings/base.py b/src/mmw/mmw/settings/base.py index a18a613a9..cebaef50e 100644 --- a/src/mmw/mmw/settings/base.py +++ b/src/mmw/mmw/settings/base.py @@ -291,6 +291,7 @@ def get_env_setting(setting): THIRD_PARTY_APPS = ( 'rest_framework', + 'rest_framework_swagger', 'registration', ) @@ -305,6 +306,26 @@ def get_env_setting(setting): ] } +SWAGGER_SETTINGS = { + 'exclude_namespaces': ['bigcz', + 'mmw', + 'user'], + 'doc_expansion': 'list', + 'info': { + 'description': 'The Model My Watershed API allows ' + 'you to delineate watersheds and analyze ' + 'geo-data for watersheds and arbitrary areas. ' + 'You can read more about the work at ' + '' + 'WikiWatershed ' + 'or use the ' + 'web app.', + 'license': 'Apache 2.0', + 'licenseUrl': 'http://www.apache.org/licenses/LICENSE-2.0.html', + 'title': 'Model My Watershed API', + }, +} + # registration ACCOUNT_ACTIVATION_DAYS = 7 # One-week activation window. diff --git a/src/mmw/mmw/urls.py b/src/mmw/mmw/urls.py index 54e3042d7..512efca31 100644 --- a/src/mmw/mmw/urls.py +++ b/src/mmw/mmw/urls.py @@ -29,10 +29,14 @@ namespace='rest_framework')), url(r'^admin/', include(admin.site.urls)), url(r'^accounts/', include(registration.backends.default.urls)), - url(r'^bigcz/', include(apps.bigcz.urls)), - url(r'^mmw/geocode/', include(apps.geocode.urls)), - url(r'^mmw/modeling/', include(apps.modeling.urls)), + url(r'^bigcz/', include(apps.bigcz.urls, + namespace='bigcz')), + url(r'^mmw/geocode/', include(apps.geocode.urls, + namespace='mmw')), + url(r'^mmw/modeling/', include(apps.modeling.urls, + namespace='mmw')), url(r'^api/', include(apps.geoprocessing_api.urls)), url(r'^micro/', include(apps.water_balance.urls)), - url(r'^user/', include(apps.user.urls)) + url(r'^user/', include(apps.user.urls, + namespace='user')) ) diff --git a/src/mmw/requirements/base.txt b/src/mmw/requirements/base.txt index 0c8235888..4a8ffe991 100644 --- a/src/mmw/requirements/base.txt +++ b/src/mmw/requirements/base.txt @@ -9,6 +9,8 @@ django-registration-redux==1.2 python-omgeo==2.0.0 rauth==0.7.1 djangorestframework-gis==0.8.2 +django-rest-swagger==0.3.10 +markdown==2.6.9 tr55==1.3.0 gwlf-e==0.6.2 requests==2.9.1 From c89db8720931047b226c0895816d19925e0cad40 Mon Sep 17 00:00:00 2001 From: Alice Rottersman Date: Wed, 6 Sep 2017 16:50:58 -0400 Subject: [PATCH 3/3] Geoprocessing API: Document API views * Write up an initial pass for the API views' documentation. Includes job descriptions as well as example requests, request types and formats, and example responses. * NB: The example responses make the docstrings incredibly lengthy. While they appear nicely in Swagger under
elements, we may need to consider declaring them outside the views file --- src/mmw/apps/geoprocessing_api/views.py | 659 ++++++++++++++++++++++++ src/mmw/apps/modeling/views.py | 26 + 2 files changed, 685 insertions(+) diff --git a/src/mmw/apps/geoprocessing_api/views.py b/src/mmw/apps/geoprocessing_api/views.py index 6756e9880..148d62c5a 100644 --- a/src/mmw/apps/geoprocessing_api/views.py +++ b/src/mmw/apps/geoprocessing_api/views.py @@ -23,6 +23,131 @@ def start_rwd(request, format=None): """ Starts a job to run Rapid Watershed Delineation on a point-based location. + + Selects the nearest downhill point on the medium resolution flow lines of + either the Delaware River Basin high resolution stream network or the + National Hydrography Dataset (NHDplus v2). The watershed area upstream of + this point is automatically delineated using the 10m resolution national + elevation model or the 30m resolution flow direction grid. + + For more information, see the + [technical documentation](https://wikiwatershed.org/ + documentation/mmw-tech/#delineate-watershed). + + ## Request Body + + **Required** + + `location` (`array[number]`): The point to delineate. + Format is `[lat, lng]` + + **Optional** + + `dataSource` (`string`): Which resolution to delineate with. Either + "drb" to use Delaware High Resolution (10m) + or "nhd" to use Continental US High Resolution (30m). + Default is "drb". Points must be in the Delaware River + Basin to use "drb", and in the Continental US to use "nhd" + + `snappingOn` (`boolean`): Snap to the nearest stream? Default is false + + **Example** + + { + "location": [39.97185812402583,-75.16742706298828], + "snappingOn": true, + "dataSource":"nhd" + } + + ## Response + + You can use the URL provided in the response's `Location` header + to poll for the job's results. + + + **Example of a completed job's `result`** + + +
+ + { + "watershed": { + "type": "FeatureCollection", + "features": [ + { + "geometry": { + "type": "Polygon", + "coordinates": [ + [ + [ + -75.24776006176894, + 39.98166667527191 + ], + [ + -75.24711191361516, + 39.98166667527191 + ] + ], ... + ] + }, + "type": "Feature", + "properties": { + "Avgslp": 0.053271397948265, + "BasinLen": 0.596192138671875, + "RR": 0.022393757477403, + "Area": 0.06322861, + "AvgOLF": 0.10568827427475, + "DrnDen": 4.730893785815718, + "BR": 13.350982666015625, + "Strord": 1, + "StrLen": 0.299127838134766, + "GRIDCODE": 1 + } + } + ] + }, + "input_pt": { + "type": "FeatureCollection", + "features": [ + { + "geometry": { + "type": "Point", + "coordinates": [ + -75.24938043215342, + 39.97875000854888 + ] + }, + "type": "Feature", + "properties": { + "Lat": 39.978697, + "Dist_moved": 1, + "Lon": -75.24931, + "ID": 1 + } + } + ] + } + } + +
+ --- + type: + job: + required: true + type: string + status: + required: true + type: string + + omit_serializer: true + parameters: + - name: body + required: true + paramType: body + consumes: + - application/json + produces: + - application/json """ user = request.user if request.user.is_authenticated() else None created = now() @@ -51,6 +176,179 @@ def start_rwd(request, format=None): @decorators.api_view(['POST']) @decorators.permission_classes((AllowAny, )) def start_analyze_land(request, format=None): + """ + Starts a job to produce a land-use histogram for a given area. + + Uses the National Land Cover Database (NLCD 2011) + + For more information, see the + [technical documentation](https://wikiwatershed.org/ + documentation/mmw-tech/#overlays-tab-coverage). + + ## Response + + You can use the URL provided in the response's `Location` + header to poll for the job's results. + + + **Example of a completed job's `result`** + + +
+ + { + "survey": { + "displayName": "Land", + "name": "land", + "categories": [ + { + "nlcd": 43, + "code": "mixed_forest", + "type": "Mixed Forest", + "coverage": 0, + "area": 0 + }, + { + "nlcd": 71, + "code": "grassland", + "type": "Grassland/Herbaceous", + "coverage": 0, + "area": 0 + }, + { + "nlcd": 41, + "code": "deciduous_forest", + "type": "Deciduous Forest", + "coverage": 0, + "area": 0 + }, + { + "nlcd": 42, + "code": "evergreen_forest", + "type": "Evergreen Forest", + "coverage": 0, + "area": 0 + }, + { + "nlcd": 11, + "code": "open_water", + "type": "Open Water", + "coverage": 0, + "area": 0 + }, + { + "nlcd": 12, + "code": "perennial_ice", + "type": "Perennial Ice/Snow", + "coverage": 0, + "area": 0 + }, + { + "nlcd": 81, + "code": "pasture", + "type": "Pasture/Hay", + "coverage": 0, + "area": 0 + }, + { + "nlcd": 82, + "code": "cultivated_crops", + "type": "Cultivated Crops", + "coverage": 0, + "area": 0 + }, + { + "nlcd": 52, + "code": "shrub", + "type": "Shrub/Scrub", + "coverage": 0, + "area": 0 + }, + { + "nlcd": 21, + "code": "developed_open", + "type": "Developed, Open Space", + "coverage": 0.030303030303030304, + "area": 2691.709835265247 + }, + { + "nlcd": 22, + "code": "developed_low", + "type": "Developed, Low Intensity", + "coverage": 0.18181818181818182, + "area": 16150.259011591483 + }, + { + "nlcd": 23, + "code": "developed_med", + "type": "Developed, Medium Intensity", + "coverage": 0.5151515151515151, + "area": 45759.0671995092 + }, + { + "nlcd": 24, + "code": "developed_high", + "type": "Developed, High Intensity", + "coverage": 0.2727272727272727, + "area": 24225.388517387222 + }, + { + "nlcd": 90, + "code": "woody_wetlands", + "type": "Woody Wetlands", + "coverage": 0, + "area": 0 + }, + { + "nlcd": 95, + "code": "herbaceous_wetlands", + "type": "Emergent Herbaceous Wetlands", + "coverage": 0, + "area": 0 + }, + { + "nlcd": 31, + "code": "barren_land", + "type": "Barren Land (Rock/Sand/Clay)", + "coverage": 0, + "area": 0 + } + ] + } + } + +
+ --- + type: + job: + required: true + type: string + status: + required: true + type: string + + omit_serializer: true + parameters: + - name: body + description: A valid single-ringed Multipolygon GeoJSON + representation of the shape to analyze. + See the GeoJSON spec + https://tools.ietf.org/html/rfc7946#section-3.1.7 + paramType: body + type: object + - name: wkaoi + description: The table and ID for a well-known area of interest, + such as a HUC. + Format "table__id", eg. "huc12__55174" will analyze + the HUC-12 City of Philadelphia-Schuylkill River. + type: string + paramType: query + + consumes: + - application/json + produces: + - application/json + """ user = request.user if request.user.is_authenticated() else None wkaoi = request.query_params.get('wkaoi', None) @@ -67,6 +365,110 @@ def start_analyze_land(request, format=None): @decorators.api_view(['POST']) @decorators.permission_classes((AllowAny, )) def start_analyze_soil(request, format=None): + """ + Starts a job to produce a soil-type histogram for a given area. + + Uses the Hydrologic Soil Groups From USDA gSSURGO 2016 + + For more information, see the + [technical documentation](https://wikiwatershed.org/ + documentation/mmw-tech/#overlays-tab-coverage). + + ## Response + + You can use the URL provided in the response's `Location` + header to poll for the job's results. + + + **Example of a completed job's `result`** + + +
+ + { + "survey": { + "displayName": "Soil", + "name": "soil", + "categories": [ + { + "code": "a", + "type": "A - High Infiltration", + "coverage": 0.000010505194818837915, + "area": 897.253981351988 + }, + { + "code": "b", + "type": "B - Moderate Infiltration", + "coverage": 0.036474036411005245, + "area": 3115265.8232541024 + }, + { + "code": "c", + "type": "C - Slow Infiltration", + "coverage": 0.9465810843462092, + "area": 80847967.24370223 + }, + { + "code": "d", + "type": "D - Very Slow Infiltration", + "coverage": 0.00012606233782605497, + "area": 10767.047776223857 + }, + { + "code": "ad", + "type": "A/D - High/Very Slow Infiltration", + "coverage": 0, + "area": 0 + }, + { + "code": "bd", + "type": "B/D - Medium/Very Slow Infiltration", + "coverage": 0.0017753779243836077, + "area": 151635.92284848596 + }, + { + "code": "cd", + "type": "C/D - Medium/Very Slow Infiltration", + "coverage": 0.015032933785757057, + "area": 1283970.4473146948 + } + ] + } + } + +
+ + --- + type: + job: + required: true + type: string + status: + required: true + type: string + + omit_serializer: true + parameters: + - name: body + description: A valid single-ringed Multipolygon GeoJSON + representation of the shape to analyze. + See the GeoJSON spec + https://tools.ietf.org/html/rfc7946#section-3.1.7 + paramType: body + type: object + - name: wkaoi + description: The table and ID for a well-known area of interest, + such as a HUC. + Format "table__id", eg. "huc12__55174" will analyze + the HUC-12 City of Philadelphia-Schuylkill River. + type: string + paramType: query + + consumes: + - application/json + produces: + - application/json + """ user = request.user if request.user.is_authenticated() else None wkaoi = request.query_params.get('wkaoi', None) @@ -83,6 +485,95 @@ def start_analyze_soil(request, format=None): @decorators.api_view(['POST']) @decorators.permission_classes((AllowAny, )) def start_analyze_animals(request, format=None): + """ + Starts a job to produce counts for animals in a given area. + + Source USDA + + For more information, see + the [technical documentation](https://wikiwatershed.org/documentation/ + mmw-tech/#additional-data-layers) + + ## Response + + You can use the URL provided in the response's `Location` header + to poll for the job's results. + + + **Example of a completed job's `result`** + + +
+ + { + "survey": { + "displayName": "Animals", + "name": "animals", + "categories": [ + { + "aeu": 0, + "type": "Sheep" + }, + { + "aeu": 0, + "type": "Horses" + }, + { + "aeu": 0, + "type": "Turkeys" + }, + { + "aeu": 0, + "type": "Chickens, Layers" + }, + { + "aeu": 0, + "type": "Cows, Beef" + }, + { + "aeu": 0, + "type": "Pigs/Hogs/Swine" + }, + { + "aeu": 0, + "type": "Cows, Dairy" + }, + { + "aeu": 0, + "type": "Chickens, Broilers" + } + ] + } + } +
+ --- + type: + job: + required: true + type: string + status: + required: true + type: string + + omit_serializer: true + parameters: + - name: body + description: A valid single-ringed Multipolygon GeoJSON + representation of the shape to analyze. + See the GeoJSON spec + https://tools.ietf.org/html/rfc7946#section-3.1.7 + paramType: body + type: object + - name: wkaoi + description: The table and ID for a well-known area of interest, + such as a HUC. + Format "table__id", eg. "huc12__55174" will analyze + the HUC-12 City of Philadelphia-Schuylkill River. + consumes: + - application/json + produces: + - application/json + """ user = request.user if request.user.is_authenticated() else None wkaoi = request.query_params.get('wkaoi', None) @@ -96,6 +587,76 @@ def start_analyze_animals(request, format=None): @decorators.api_view(['POST']) @decorators.permission_classes((AllowAny, )) def start_analyze_pointsource(request, format=None): + """ + Starts a job to analyze the discharge monitoring report annual + averages for a given area. + + Source EPA NPDES + + For more information, see the + [technical documentation](https://wikiwatershed.org/ + documentation/mmw-tech/#additional-data-layers) + + ## Response + + You can use the URL provided in the response's `Location` + header to poll for the job's results. + + + **Example of a completed job's `result`** + + +
+ + { + "survey": { + "displayName": "Point Source", + "name": "pointsource", + "categories": [ + { + "city": "PHILADELPHIA", + "kgp_yr": 16937.8, + "mgd": 4.0835, + "npdes_id": "0011533", + "longitude": -75.209722, + "state": "PA", + "facilityname": "GIRARD POINT PROCESSING AREA", + "latitude": 39.909722, + "kgn_yr": 1160.76 + } + ], ... + } + } + +
+ --- + type: + job: + required: true + type: string + status: + required: true + type: string + + omit_serializer: true + parameters: + - name: body + description: A valid single-ringed Multipolygon GeoJSON + representation of the shape to analyze. + See the GeoJSON spec + https://tools.ietf.org/html/rfc7946#section-3.1.7 + paramType: body + type: object + - name: wkaoi + description: The table and ID for a well-known area of interest, + such as a HUC. + Format "table__id", eg. "huc12__55174" will analyze + the HUC-12 City of Philadelphia-Schuylkill River. + consumes: + - application/json + produces: + - application/json + """ user = request.user if request.user.is_authenticated() else None wkaoi = request.query_params.get('wkaoi', None) @@ -109,6 +670,104 @@ def start_analyze_pointsource(request, format=None): @decorators.api_view(['POST']) @decorators.permission_classes((AllowAny, )) def start_analyze_catchment_water_quality(request, format=None): + """ + Starts a job to calculate the calibrated GWLF-E (MapShed) model + estimates for a given area + (Delaware River Basin only) + + Source Stream Reach Tool Assessment (SRAT) + + For more information, see + the [technical documentation](https://wikiwatershed.org/ + documentation/mmw-tech/#overlays-tab-coverage) + + ## Response + + you can use the url provided in the response's `location` + header to poll for the job's results. + + + **example of a completed job's `result`** + + +
+ + { + "survey": { + "displayname": "water quality", + "name": "catchment_water_quality", + "categories": [ + { + "tss_urban_": 49.115354321566734, + "tn_riparia": 0.3730090214, + "tn_pt_kgyr": null, + "tp_urban_k": 0.5043825, + "tss_tot_kg": 336.49653266840215, + "geom": { + "type": "multipolygon", + "coordinates": [ + [ + [ + [ + -74.9780151302813, + 40.0646039341582 + ], ... + ], ... + ] + ] + }, + "nord": 4793, + "tss_concmg": 124.5634, + "tp_ag_kgyr": 0.493174, + "tp_yr_avg_": 0.0929, + "tn_yr_avg_": 1.461, + "tn_ag_kgyr": 8.74263, + "tss_natura": 3.912097242622951, + "tp_pt_kgyr": null, + "tn_natural": 2.622789, + "areaha": 375.27, + "tp_tot_kgy": 0.51148576021895, + "tn_urban_k": 8.428792, + "tp_natural": 0.0560425, + "tp_riparia": 0.1240888899, + "tn_tot_kgy": 7.745244945328085, + "tss_ag_kgy": 66.71350648852459, + "tss_rip_kg": 545.9289658316266 + } + ] + } + } + +
+ + --- + type: + job: + required: true + type: string + status: + required: true + type: string + + omit_serializer: true + parameters: + - name: body + description: A valid single-ringed Multipolygon GeoJSON + representation of the shape to analyze. + See the GeoJSON spec + https://tools.ietf.org/html/rfc7946#section-3.1.7 + paramType: body + type: object + - name: wkaoi + description: The table and ID for a well-known area of interest, + such as a HUC. + Format "table__id", eg. "huc12__55174" will analyze + the HUC-12 City of Philadelphia-Schuylkill River. + consumes: + - application/json + produces: + - application/json + """ user = request.user if request.user.is_authenticated() else None wkaoi = request.query_params.get('wkaoi', None) diff --git a/src/mmw/apps/modeling/views.py b/src/mmw/apps/modeling/views.py index a238c0236..dd057d667 100644 --- a/src/mmw/apps/modeling/views.py +++ b/src/mmw/apps/modeling/views.py @@ -484,6 +484,32 @@ def drb_point_sources(request): @decorators.api_view(['GET']) @decorators.permission_classes((AllowAny, )) def get_job(request, job_uuid, format=None): + """ + Get a job's status. If it's complete, get its result. + + --- + type: + job_uuid: + required: true + type: string + status: + required: true + type: string + started: + required: true + type: datetime + finished: + required: true + type: datetime + result: + required: true + type: object + error: + required: true + type: string + + omit_serializer: true + """ # TODO consider if we should have some sort of session id check to ensure # you can only view your own jobs. try: