Skip to content

Commit

Permalink
Merge pull request #2 from davethau/master
Browse files Browse the repository at this point in the history
Added KML reports
  • Loading branch information
javisantana committed Jan 24, 2012
2 parents ef0d667 + 6e58742 commit 262afab
Show file tree
Hide file tree
Showing 12 changed files with 303 additions and 117 deletions.
5 changes: 4 additions & 1 deletion README.md
Expand Up @@ -27,7 +27,10 @@ The tool is intended to be use directly online (still pending the final URL) so
* Go to the src folder cd `src`
* Run it using the following script: `tools/start`. Leave the window open, the application should be running.
6. Create an initial report
* Open a new Terminal window, leaving the other open, and run `curl -d '' "http://localhost:8080/_ah/cmd/create_report?year=2011&month=7&day=15"`
* Open a new Terminal window, leaving the other open,
* Initialize fusion tables: curl "http://localhost:8080/_ah/cmd/fusion_tables_names"
* Create an unclosed report curl -d '' "http://localhost:8080/_ah/cmd/create_report?year=2011&month=7&day=15"
* If you'd like, create an closed report curl -d '' "http://localhost:8080/_ah/cmd/create_report?year=2011&month=8&day=15&fyear=2011&fmonth=9&fday=15&assetid=SAD_VALIDATED/SAD_2010_05"
7. Start using the app.
* You should now be able to go to http://localhost:8080 and start using the application locally.
* When loggin in dont forget to set yourself as admin.
Expand Down
114 changes: 41 additions & 73 deletions src/application/api.py
Expand Up @@ -14,6 +14,8 @@
from flask import jsonify, request, abort, Response
from app import app
import settings
from report_types import ReportType, CSVReportType, KMLReportType
from kml import path_to_kml

from models import Area, Note, Report, StatsStore, FustionTablesNames
from ee import NDFI, EELandsat, Stats
Expand Down Expand Up @@ -44,9 +46,10 @@


#TODO: this function needs a huge refactor
@app.route('/api/v0/stats/<table>/<zone>')
@app.route('/api/v0/stats/<table>/<format>/<zone>')
@app.route('/api/v0/stats/<table>/<format>')
@app.route('/api/v0/stats/<table>')
def stats(table, zone=None):
def stats(table, zone=None, format="csv"):

reports = request.args.get('reports', None)
if not reports:
Expand All @@ -56,67 +59,33 @@ def stats(table, zone=None):
except ValueError:
logging.error("bad format for report id")
abort(400)

this_report = ReportType.factory(format)
this_report.init(zone)
this_report.write_header()

f = StringIO()
csv_file = csv.writer(f)
logging.info("table id is %s ", table)
logging.info("and we see %s ", FustionTablesNames.all().filter('table_id =', table).fetch(1))
logging.info("and zone %s ", zone)
logging.info("and format %s ", format)

table_names = FustionTablesNames.all().filter('table_id =', table).fetch(1)[0].as_dict()
reports = [Report.get_by_id(x) for x in reports]
for r in reports:
if not r:
logging.error("report not found")
abort(404)

stats = this_report.get_stats(r, table)

# return the stats for each zone
if not zone:
csv_file.writerow(('report_id', 'start_date', 'end_date','zone_id', 'deforested', 'degraded'))
reports = [Report.get_by_id(x) for x in reports]
for r in reports:
if not r:
logging.error("report not found")
abort(404)
st = StatsStore.get_for_report(str(r.key()))
if not st:
logging.error("no stats for report")
abort(404)
stats = st.for_table(table)
for s in stats:
name = table_names.get(s['id'], s['id'])
csv_file.writerow((str(r.key().id()),
r.start.isoformat(),
r.end.isoformat(),
name,
s['def'],
s['deg']))

else:
csv_file.writerow(('report_id', 'start_date', 'end_date', 'deforested', 'degraded'))
reports = [Report.get_by_id(x) for x in reports]
for r in reports:
if not r:
abort(404)
report_id = str(r.key())
st = StatsStore.get_for_report(report_id)

if not st:
logging.error("no cached stats for %s" % report_id)
abort(404)

stats = st.table_accum(table, zone)
if not stats:
logging.error("no stats for %s" % report_id)
abort(404)

csv_file.writerow((str(r.key().id()),
r.start.isoformat(),
r.end.isoformat(),
stats['def'],
stats['deg']))

return Response(f.getvalue(),
headers={
"Content-Disposition": "attachment; filename=\"report_%s.csv\"" % table
},
mimetype='text/csv')


@app.route('/api/v0/stats/polygon/csv')
def polygon_stats_csv():
for s in stats:
this_report.write_row(r, s, table)

this_report.write_footer()
return this_report.response("report_%s" % table)


@app.route('/api/v0/stats/polygon/<format>')
def polygon_stats(format=None):
reports = request.args.get('reports', None)
if not reports:
abort(400)
Expand All @@ -131,30 +100,29 @@ def polygon_stats_csv():
except ValueError:
logging.error("can't find some report")
abort(404)

#TODO: test if polygon is ccw
# exchange lat, lon -> lon, lat
polygon = json.loads(request.args.get('polygon', None))
polygon.append(polygon[0])
logging.info(polygon)
logging.info(path_to_kml([polygon]))
if not polygon:
abort(404)
ee = Stats()
normalized_poly = [(coord[1], coord[0]) for coord in polygon]
stats = ee.get_stats_for_polygon([(str(r.key().id()), r.assetid) for r in reports], [normalized_poly])

this_report = ReportType.factory(format)
this_report.init("custom polygon")
try:
f = StringIO()
csv_file = csv.writer(f)
csv_file.writerow(('report_id', 'start_date', 'end_date', 'deforested', 'degraded'))
this_report.write_header()
for i,s in enumerate(stats):
r = reports[i]
csv_file.writerow((str(r.key().id()),
r.start.isoformat(),
r.end.isoformat(),
s['def'],
s['deg']))
return Response(f.getvalue(),
headers={
"Content-Disposition": "attachment; filename=\"polygon.csv\""
},
mimetype='text/csv')
this_report.write_row(r, s, None, path_to_kml([polygon]))

this_report.write_footer()
return this_report.response("report_polygon")
except (KeyError, ValueError, IndexError):
abort(404)

Expand Down
1 change: 0 additions & 1 deletion src/application/ft.py
Expand Up @@ -51,7 +51,6 @@ def create_table(self, table):
def sql(self, sql):
logging.debug("FT:SQL: %s" % sql)
r = self.client.query(sql)
logging.debug("-> %s" % r)
return r


Expand Down
52 changes: 27 additions & 25 deletions src/application/generate_reports.py
Expand Up @@ -7,34 +7,36 @@
import urllib2
from time_utils import month_range

assets_id = ['SAD_VALIDATED/SAD_2009_08',
'SAD_VALIDATED/SAD_2009_09',
'SAD_VALIDATED/SAD_2009_10',
'SAD_VALIDATED/SAD_2009_11',
'SAD_VALIDATED/SAD_2009_12',
'SAD_VALIDATED/SAD_2010_01',
'SAD_VALIDATED/SAD_2010_02',
'SAD_VALIDATED/SAD_2010_05',
'SAD_VALIDATED/SAD_2010_06',
'SAD_VALIDATED/SAD_2010_07',
'SAD_VALIDATED/SAD_2010_08',
'SAD_VALIDATED/SAD_2010_09',
'SAD_VALIDATED/SAD_2010_10',
'SAD_VALIDATED/SAD_2010_11',
'SAD_VALIDATED/SAD_2010_12',
assets_id = [
#'SAD_VALIDATED/SAD_2009_08',
#'SAD_VALIDATED/SAD_2009_09',
#'SAD_VALIDATED/SAD_2009_10',
#'SAD_VALIDATED/SAD_2009_11',
#'SAD_VALIDATED/SAD_2009_12',
#'SAD_VALIDATED/SAD_2010_01',
#'SAD_VALIDATED/SAD_2010_02',
#'SAD_VALIDATED/SAD_2010_05',
#'SAD_VALIDATED/SAD_2010_06',
#'SAD_VALIDATED/SAD_2010_07',
#'SAD_VALIDATED/SAD_2010_08',
#'SAD_VALIDATED/SAD_2010_09',
#'SAD_VALIDATED/SAD_2010_10',
#'SAD_VALIDATED/SAD_2010_11',
#'SAD_VALIDATED/SAD_2010_12',
'SAD_VALIDATED/SAD_2011_01',
'SAD_VALIDATED/SAD_2011_02',
'SAD_VALIDATED/SAD_2011_03',
'SAD_VALIDATED/SAD_2011_04',
'SAD_VALIDATED/SAD_2011_05',
'SAD_VALIDATED/SAD_2011_06',
'SAD_VALIDATED/SAD_2011_07',
'SAD_VALIDATED/SAD_2011_08',
'SAD_VALIDATED/SAD_2011_09']

#'SAD_VALIDATED/SAD_2011_02',
#'SAD_VALIDATED/SAD_2011_03',
#'SAD_VALIDATED/SAD_2011_04',
#'SAD_VALIDATED/SAD_2011_05',
#'SAD_VALIDATED/SAD_2011_06',
#'SAD_VALIDATED/SAD_2011_07',
#'SAD_VALIDATED/SAD_2011_08',
#'SAD_VALIDATED/SAD_2011_09'
]

print "assetid,report_id"
for r in assets_id[-1:]:
#for r in assets_id[-1:]:
for r in assets_id:
first, last = month_range(*reversed(map(int, r.split('_')[-2:])))
url = "http://%s/_ah/cmd/create_report?year=%d&month=%d&day=%d&assetid=%s&fyear=%d&fmonth=%d&fday=%d""" % (sys.argv[1], first.year, first.month, first.day, r, last.year, last.month, last.day)
print url
Expand Down
6 changes: 3 additions & 3 deletions src/application/models.py
Expand Up @@ -512,15 +512,15 @@ def table_accum(self, table, zone=None):
if not table_stats:
logging.info("no stats for %s on %s" % (table, self.report_id))
return None
return {
return [{
'id': zone,
'def': reduce(operator.add, map(float, (x['def'] for x in table_stats))),
'deg': reduce(operator.add, map(float, (x['deg'] for x in table_stats)))
}
}]

class FustionTablesNames(db.Model):
table_id = db.StringProperty()
json = db.TextProperty()

def as_dict(self):
return json.loads(self.json)

0 comments on commit 262afab

Please sign in to comment.