Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
114 changes: 114 additions & 0 deletions enacts/calc.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,11 @@
import pandas as pd
import xarray as xr
import datetime
import psycopg2
from psycopg2 import sql
import shapely
from shapely import wkb
from shapely.geometry.multipolygon import MultiPolygon

# Date Reading functions

Expand Down Expand Up @@ -121,6 +126,115 @@ def synthesize_enacts(variable, time_res):
var_name = variable
return xrds[var_name]


def sql2GeoJSON(shapes_sql, db_config):
""" Form a GeoJSON dict from sql request to a database

Parameters
----------
shapes_sql: str
sql request
db_config: dict
dictionary with host, port, user and dbname information

Returns
-------
features: dict
dictionary with features as key and GeoJSON of shapes_sql as value

See Also
--------
sql2geom, geom2GeoJSON

Examples
--------
shapes_sql: select id_1 as key, name_1 as label,
ST_AsBinary(the_geom) as the_geom from sen_adm1
db_config:
host: postgres
port: 5432
user: ingrid
dbname: iridb
"""
return geom2GeoJSON(sql2geom(shapes_sql, db_config))


def geom2GeoJSON(df):
""" Form a GeoJSON dict from a geometric object

Parameters
----------
df: geometric object
shapely geometric object

Returns
-------
features: dict
dictionary with features as key and GeoJSON of `geom` as value

See Also
--------
sql2geom, shapely.MultiPolygon, shapely.geometry.mapping
"""
df["the_geom"] = df["the_geom"].apply(
lambda x: x if isinstance(x, MultiPolygon) else MultiPolygon([x])
)
shapes = df["the_geom"].apply(shapely.geometry.mapping)
for i in df.index: #this adds the district layer as a label in the dict
shapes[i]['label'] = df['label'][i]
return {"features": shapes}


def sql2geom(shapes_sql, db_config):
""" Form a geometric object from sql query to a database

Parameters
----------
shapes_sql: str
sql query
db_config: dict
dictionary with host, port, user and dbname information

Returns
-------
df : pandas.DataFrame
a pd.DF with columns "label" (dtype=string),
"key" (string or int depending on the table),
and "the_geom" (shapely.Geometry)

See Also
--------
psycopg2.connect, psycopg2.sql, pandas.read_sql, shapely.wkb,

Examples
--------
shapes_sql: select id_1 as key, name_1 as label,
ST_AsBinary(the_geom) as the_geom from sen_adm1
db_config:
host: postgres
port: 5432
user: ingrid
dbname: iridb
"""
with psycopg2.connect(**db_config) as conn:
s = sql.Composed(
[
sql.SQL("with g as ("),
sql.SQL(shapes_sql),
sql.SQL(
"""
)
select
g.label, g.key, g.the_geom
from g
"""
),
]
)
df = pd.read_sql(s, conn)
df["the_geom"] = df["the_geom"].apply(lambda x: wkb.loads(x.tobytes()))
return df

# Growing season functions

def water_balance_step(sm_yesterday, peffective, et, taw):
Expand Down
82 changes: 14 additions & 68 deletions enacts/crop_suitability/maproom_crop_suit.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,17 +12,13 @@
from pingrid import CMAPS, BROWN, YELLOW, ORANGE, PALEGREEN, GREEN, DARKGREEN
from . import layout_crop_suit
import calc
import maproom_utilities as mapr_u
import plotly.graph_objects as pgo
import plotly.express as px
import pandas as pd
import numpy as np
import urllib
import math
import psycopg2
from psycopg2 import sql
import shapely
from shapely import wkb
from shapely.geometry.multipolygon import MultiPolygon
import datetime
import xarray as xr

Expand All @@ -41,6 +37,8 @@
"variable": "tmax", "time_res": "daily", "ds_conf": GLOBAL_CONFIG["datasets"]
}

ADMIN_CONFIG = GLOBAL_CONFIG["datasets"]["shapes_adm"]

CROP_SUIT_COLORMAP = pingrid.ColorScale(
"crop_suit",
[BROWN, BROWN, ORANGE, ORANGE, YELLOW, YELLOW,
Expand Down Expand Up @@ -70,55 +68,6 @@ def register(FLASK, config):

APP.layout = layout_crop_suit.app_layout(config)


def adm_borders(shapes):
with psycopg2.connect(**GLOBAL_CONFIG["db"]) as conn:
s = sql.Composed(
[
sql.SQL("with g as ("),
sql.SQL(shapes),
sql.SQL(
"""
)
select
g.label, g.key, g.the_geom
from g
"""
),
]
)
df = pd.read_sql(s, conn)

df["the_geom"] = df["the_geom"].apply(lambda x: wkb.loads(x.tobytes()))
df["the_geom"] = df["the_geom"].apply(
lambda x: x if isinstance(x, MultiPolygon) else MultiPolygon([x])
)
shapes = df["the_geom"].apply(shapely.geometry.mapping)
for i in df.index: #this adds the district layer as a label in the dict
shapes[i]['label'] = df['label'][i]
return {"features": shapes}


def make_adm_overlay(
adm_name, adm_sql, adm_color, adm_lev, adm_weight, is_checked=False
):
border_id = {"type": "borders_adm", "index": adm_lev}
return dlf.Overlay(
dlf.GeoJSON(
id=border_id,
data=adm_borders(adm_sql),
options={
"fill": True,
"color": adm_color,
"weight": adm_weight,
"fillOpacity": 0,
},
),
name=adm_name,
checked=is_checked,
)


@APP.callback(
Output("lat_input", "min"),
Output("lat_input", "max"),
Expand Down Expand Up @@ -209,15 +158,15 @@ def make_map(
checked=True,
),
] + [
make_adm_overlay(
adm["name"],
adm["sql"],
adm["color"],
i+1,
len(GLOBAL_CONFIG["datasets"]["shapes_adm"])-i,
is_checked=adm["is_checked"]
mapr_u.make_adm_overlay(
adm_name=adm["name"],
adm_geojson=calc.sql2GeoJSON(adm["sql"], GLOBAL_CONFIG["db"]),
adm_clor=adm["color"],
adm_lev=i+1,
adm_weight=len(ADMIN_CONFIG)-i,
is_checked=adm["is_checked"],
)
for i, adm in enumerate(GLOBAL_CONFIG["datasets"]["shapes_adm"])
for i, adm in enumerate(ADMIN_CONFIG)
] + [
dlf.Overlay(
dlf.TileLayer(
Expand Down Expand Up @@ -625,12 +574,9 @@ def cropSuit_layers(tz, tx, ty):
map = map.rename(X="lon", Y="lat")
map.attrs["scale_min"] = map_min
map.attrs["scale_max"] = map_max
with psycopg2.connect(**GLOBAL_CONFIG["db"]) as conn:
s = sql.Composed(
[sql.SQL(GLOBAL_CONFIG["datasets"]['shapes_adm'][0]['sql'])]
)
df = pd.read_sql(s, conn)
clip_shape = df["the_geom"].apply(lambda x: wkb.loads(x.tobytes()))[0]
clip_shape = calc.sql2geom(
ADMIN_CONFIG[0]['sql'], GLOBAL_CONFIG["db"]
)["the_geom"][0]
result = pingrid.tile(map.astype('float64'), tx, ty, tz, clip_shape)

return result
Expand Down
77 changes: 14 additions & 63 deletions enacts/flex_fcst/maproom.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,15 +11,13 @@
import pandas as pd
from . import predictions
from . import cpt
import maproom_utilities as mapr_u
import urllib
import dash_leaflet as dlf
import psycopg2
from psycopg2 import sql
import shapely
from shapely import wkb
from shapely.geometry.multipolygon import MultiPolygon
from globals_ import FLASK, GLOBAL_CONFIG

ADMIN_CONFIG = GLOBAL_CONFIG["datasets"]["shapes_adm"]

def register(FLASK, config):
PFX = f"{GLOBAL_CONFIG['url_path_prefix']}/{config['core_path']}"
TILE_PFX = f"{PFX}/tile"
Expand All @@ -42,52 +40,6 @@ def register(FLASK, config):

APP.layout = layout.app_layout()

def adm_borders(shapes):
with psycopg2.connect(**GLOBAL_CONFIG["db"]) as conn:
s = sql.Composed(
[
sql.SQL("with g as ("),
sql.SQL(shapes),
sql.SQL(
"""
)
select
g.label, g.key, g.the_geom
from g
"""
),
]
)
df = pd.read_sql(s, conn)

df["the_geom"] = df["the_geom"].apply(lambda x: wkb.loads(x.tobytes()))
df["the_geom"] = df["the_geom"].apply(
lambda x: x if isinstance(x, MultiPolygon) else MultiPolygon([x])
)
shapes = df["the_geom"].apply(shapely.geometry.mapping)
for i in df.index: #this adds the district layer as a label in the dict
shapes[i]['label'] = df['label'][i]
return {"features": shapes}


def make_adm_overlay(adm_name, adm_sql, adm_color, adm_lev, adm_weight, is_checked=False):
border_id = {"type": "borders_adm", "index": adm_lev}
return dlf.Overlay(
dlf.GeoJSON(
id=border_id,
data=adm_borders(adm_sql),
options={
"fill": True,
"color": adm_color,
"weight": adm_weight,
"fillOpacity": 0,
},
),
name=adm_name,
checked=is_checked,
)


#Should I move this function into the predictions.py file where I put the other funcs?
#if we do so maybe I should redo the func to be more flexible since it is hard coded to read each file separately..
def read_cptdataset(lead_time, start_date, y_transform=config["y_transform"]):
Expand Down Expand Up @@ -702,15 +654,15 @@ def make_map(proba, variable, percentile, threshold, start_date, lead_time):
checked=True,
),
] + [
make_adm_overlay(
adm["name"],
adm["sql"],
adm["color"],
i+1,
len(GLOBAL_CONFIG["datasets"]["shapes_adm"])-i,
is_checked=adm["is_checked"]
mapr_u.make_adm_overlay(
adm_name=adm["name"],
adm_geojson=calc.sql2GeoJSON(adm["sql"], GLOBAL_CONFIG["db"]),
adm_clor=adm["color"],
adm_lev=i+1,
adm_weight=len(ADMIN_CONFIG)-i,
is_checked=adm["is_checked"],
)
for i, adm in enumerate(GLOBAL_CONFIG["datasets"]["shapes_adm"])
for i, adm in enumerate(ADMIN_CONFIG)
] + [
dlf.Overlay(
dlf.TileLayer(
Expand Down Expand Up @@ -790,10 +742,9 @@ def fcst_tiles(tz, tx, ty, proba, variable, percentile, threshold, start_date, l
# probabilities symmetry around percentile threshold
# choice of colorscale (dry to wet, wet to dry, or correlation)
fcst_cdf = to_flexible(fcst_cdf, proba, variable, percentile,)
with psycopg2.connect(**GLOBAL_CONFIG["db"]) as conn:
s = sql.Composed([sql.SQL(GLOBAL_CONFIG['datasets']['shapes_adm'][0]['sql'])])
df = pd.read_sql(s, conn)
clip_shape = df["the_geom"].apply(lambda x: wkb.loads(x.tobytes()))[0]
clip_shape = calc.sql2geom(
ADMIN_CONFIG[0]['sql'], GLOBAL_CONFIG["db"]
)["the_geom"][0]

resp = pingrid.tile(fcst_cdf, tx, ty, tz, clip_shape)
return resp
Loading
Loading