Skip to content
Permalink
Browse files

Rasterize multilabel support, add --type argument, Cf #8.

Centralize error handling cf #23.
  • Loading branch information...
ocourtin committed Apr 17, 2019
1 parent 68e573a commit ee9d0fe5cdd093174dec99ff7c379bd1593ebc07
@@ -57,10 +57,10 @@ it_pre:
@rsp cover --zoom 18 --bbox 4.8,45.7,4.82,45.72 it/cover
@rsp download --rate 20 --type WMS 'https://download.data.grandlyon.com/wms/grandlyon?SERVICE=WMS&REQUEST=GetMap&VERSION=1.3.0&LAYERS=Ortho2015_vue_ensemble_16cm_CC46&WIDTH=512&HEIGHT=512&CRS=EPSG:3857&BBOX={xmin},{ymin},{xmax},{ymax}&FORMAT=image/jpeg' it/cover it/images
@echo "Download GeoJSON" && wget --show-progress -q -nc -O it/lyon_roofprint.json 'https://download.data.grandlyon.com/wfs/grandlyon?SERVICE=WFS&REQUEST=GetFeature&TYPENAME=ms:fpc_fond_plan_communaut.fpctoit&VERSION=1.1.0&srsName=EPSG:4326&BBOX=4.8,45.7,4.82,45.72&outputFormat=application/json; subtype=geojson' | true
@rsp rasterize --geojson it/lyon_roofprint.json --config config.toml --cover it/cover it/labels
@rsp rasterize --type Building --geojson it/lyon_roofprint.json --config config.toml --cover it/cover it/labels
@echo "Download PBF" && wget --show-progress -q -O it/lyon.pbf http://datapink.tools/rsp/it/lyon.pbf
@rsp extract --type Building it/lyon.pbf it/osm_lyon_footprint.json
@rsp rasterize --geojson it/lyon_roofprint.json --config config.toml --cover it/cover it/labels_osm
@rsp rasterize --type Building --geojson it/lyon_roofprint.json --config config.toml --cover it/cover it/labels_osm
@rsp cover --dir it/images --splits 80/20 it/training/cover it/validation/cover
@rsp subset --dir it/images --cover it/training/cover it/training/images
@rsp subset --dir it/labels --cover it/training/cover it/training/labels
@@ -70,7 +70,7 @@ it_pre:
@rsp tile --zoom 18 it/tanzania.tif it/prediction/images
@rsp cover --zoom 18 --dir it/prediction/images it/prediction/cover
@wget -nc -O it/tanzania.geojson http://datapink.tools/rsp/it/tanzania.geojson
@rsp rasterize --geojson it/tanzania.geojson --config config.toml --cover it/prediction/cover it/prediction/labels
@rsp rasterize --type Building --geojson it/tanzania.geojson --config config.toml --cover it/prediction/cover it/prediction/labels



@@ -152,7 +152,7 @@ rsp cover --bbox 4.8,45.7,4.83,45.73 --zoom 18 ds/cover
rsp download --type WMS 'https://download.data.grandlyon.com/wms/grandlyon?SERVICE=WMS&REQUEST=GetMap&VERSION=1.3.0&LAYERS=Ortho2015_vue_ensemble_16cm_CC46&WIDTH=512&HEIGHT=512&CRS=EPSG:3857&BBOX={xmin},{ymin},{xmax},{ymax}&FORMAT=image/jpeg' ds/cover ds/images
wget -nc -O ds/lyon_roofprint.json 'https://download.data.grandlyon.com/wfs/grandlyon?SERVICE=WFS&REQUEST=GetFeature&TYPENAME=ms:fpc_fond_plan_communaut.fpctoit&VERSION=1.1.0&srsName=EPSG:4326&BBOX=4.79,45.69,4.84,45.74&outputFormat=application/json; subtype=geojson'
rsp rasterize --geojson ds/lyon_roofprint.json --cover ds/cover ds/labels
rsp rasterize --type Building --geojson ds/lyon_roofprint.json --cover ds/cover ds/labels
rsp cover --dir ds/images --splits 70/20/10 ds/training/cover ds/validation/cover ds/prediction/cover
rsp subset --dir ds/images --cover ds/training/cover ds/training/images
@@ -75,7 +75,7 @@ Prepare DataSet
To transform the vector roofprints to raster labels:

```bash
rsp rasterize --geojson ds/lyon_roofprint.json --cover ds/cover ds/labels
rsp rasterize --type Building --geojson ds/lyon_roofprint.json --cover ds/cover ds/labels
```

<a href="http://www.datapink.tools/rsp/opendata_to_opendataset/labels/"><img src="img/from_opendata_to_opendataset/labels.png" /></a>
@@ -111,7 +111,7 @@ optional arguments:
-h, --help show this help message and exit
Inputs:
--type TYPE type of feature to extract (e.g building, road) [required]
--type TYPE type of feature to extract (e.g Building, Road) [required]
pbf path to .osm.pbf file [required]
Output:
@@ -147,7 +147,7 @@ Web UI:
```
## rsp rasterize
```
usage: rsp rasterize [-h] [--cover COVER] [--pg_dsn PG_DSN]
usage: rsp rasterize [-h] [--cover COVER] [--pg_dsn PG_DSN] --type TYPE
[--postgis POSTGIS] [--geojson GEOJSON [GEOJSON ...]]
[--config CONFIG] [--ts TS]
[--web_ui_base_url WEB_UI_BASE_URL]
@@ -160,6 +160,7 @@ optional arguments:
Inputs [either --postgis or --geojson is required]:
--cover COVER path to csv tiles cover file [required]
--pg_dsn PG_DSN PostgreSQL connection dsn using psycopg2 syntax [required with --postgis]
--type TYPE type of feature to rasterize (e.g Building, Road) [required]
--postgis POSTGIS SELECT query to retrieve geometry features [e.g SELECT geom FROM table]
--geojson GEOJSON [GEOJSON ...] path to GeoJSON features files
--config CONFIG path to config file [required]
@@ -16,10 +16,8 @@
# Import module
#
def load_module(module):
try:
module = import_module(module)
except:
sys.exit("ERROR: Unable to load {} module".format(module))
module = import_module(module)
assert module, "Unable to import module {}".format(module)
return module


@@ -36,11 +34,8 @@ def load_config(path):
if not path:
sys.exit("CONFIG ERROR: Either ~/.rsp_config or RSP_CONFIG env var or --config parameter, is required.")

try:
config = toml.load(os.path.expanduser(path))
except:
sys.exit("CONFIG ERROR: Unable to load config file from: {}, check both path and syntax.".format(path))

config = toml.load(os.path.expanduser(path))
assert config, "Unable to parse config file"
config["classes"].insert(0, {"title": "Background", "color": "white"}) # Insert white Background

# Set default values
@@ -57,8 +52,7 @@ def load_config(path):


def check_channels(config):
if "channels" not in config.keys():
sys.exit("CONFIG ERROR: At least one channel is mandatory.")
assert "channels" in config.keys(), "At least one Channel is mandatory"

# TODO Add name check

@@ -70,27 +64,18 @@ def check_channels(config):
def check_classes(config):
"""Check if config file classes subpart is consistent. Exit on error if not."""

if "classes" not in config.keys():
sys.exit("CONFIG ERROR: At least one class is mandatory.")

if len(config["classes"]) != 2:
sys.exit("CONFIG ERROR: For now, only binary classifications are available.")
assert "classes" in config.keys(), "At least one class is mandatory"

for classe in config["classes"]:
if "title" not in classe.keys() or not len(classe["title"]):
sys.exit("CONFIG ERROR: Missing or empty classes.title value.")

if "color" not in classe.keys() or not check_color(classe["color"]):
sys.exit("CONFIG ERROR: Missing or invalid classes.color value.")
assert "title" in classe.keys() and len(classe["title"]), "Missing or Empty classes.title.value"
assert "color" in classe.keys() and check_color(classe["color"]), "Missing or Invalid classes.color value"


def check_model(config):

hps = {"nn": "str", "pretrained": "bool", "loss": "str", "da": "str"}

for hp in hps:
if hp not in config["model"].keys() or type(config["model"][hp]).__name__ != hps[hp]:
sys.exit("CONFIG ERROR: Missing or invalid model.{} value.".format(hp))
assert hp in config["model"].keys() and type(config["model"][hp]).__name__ == hps[hp], "Missing or Invalid model"


#
@@ -102,25 +87,19 @@ def __init__(self, path, out=sys.stdout):

self.fp = None
self.out = out
try:
if path:
if not os.path.isdir(os.path.dirname(path)):
os.makedirs(os.path.dirname(path), exist_ok=True)
self.fp = open(path, mode="a")
except:
sys.exit("Unable to write in logs directory")
if path:
if not os.path.isdir(os.path.dirname(path)):
os.makedirs(os.path.dirname(path), exist_ok=True)
self.fp = open(path, mode="a")

def log(self, msg):
"""Log a new message to the opened logs file, and optionnaly on stdout or stderr too."""
try:
if self.fp:
self.fp.write(msg + os.linesep)
self.fp.flush()

if self.out:
print(msg, file=self.out)
except:
sys.exit("Unable to write in logs file")
if self.fp:
self.fp.write(msg + os.linesep)
self.fp.flush()

if self.out:
print(msg, file=self.out)


#
@@ -34,4 +34,13 @@ def main():
module.add_parser(subparser, formatter_class=fc)

args = parser.parse_args()
args.func(args)

if "RSP_DEBUG" in os.environ and os.environ["RSP_DEBUG"] == "1":
args.func(args)

else:

try:
args.func(args)
except (Exception) as error:
sys.exit("{}ERROR: {}".format(os.linesep, error))
@@ -41,19 +41,12 @@ def add_parser(subparser, formatter_class):

def main(args):

try:
tiles = list(tiles_from_csv(args.cover))
except:
sys.exit("ERROR: unable to read cover file: {}".format(args.cover))
tiles = list(tiles_from_csv(args.cover))
os.makedirs(os.path.expanduser(args.out), exist_ok=True)

if not args.workers:
args.workers = max(1, math.floor(os.cpu_count() * 0.5))

try:
os.makedirs(os.path.expanduser(args.out), exist_ok=True)
except:
sys.exit("ERROR: unable to create output dir: {}".format(args.out))

log = Logs(os.path.join(args.out, "log"), out=sys.stderr)
log.log("RoboSat.pink - download with {} workers, at max {} req/s, from: {}".format(args.workers, args.rate, args.url))

@@ -1,5 +1,3 @@
import sys

import torch
import torch.onnx
import torch.autograd
@@ -21,11 +19,7 @@ def add_parser(subparser, formatter_class):

def main(args):

try:
chkpt = torch.load(args.checkpoint, map_location=torch.device("cpu"))
assert chkpt["producer_name"] == "RoboSat.pink"
except:
sys.exit("ERROR: Unable to load checkpoint: {}".format(args.checkpoint))
chkpt = torch.load(args.checkpoint, map_location=torch.device("cpu"))

model_module = load_module("robosat_pink.models.{}".format(chkpt["nn"].lower()))
nn = getattr(model_module, chkpt["nn"])(chkpt["shape_in"], chkpt["shape_out"]).to("cpu")
@@ -39,12 +33,9 @@ def main(args):
except AttributeError:
nn.state_dict(chkpt["state_dict"])

try:
batch = torch.rand(1, *chkpt["shape_in"])
if args.type == "onnx":
torch.onnx.export(nn, torch.autograd.Variable(batch), args.out)
batch = torch.rand(1, *chkpt["shape_in"])
if args.type == "onnx":
torch.onnx.export(nn, torch.autograd.Variable(batch), args.out)

if args.type == "jit":
torch.jit.trace(nn, batch).save(args.out)
except:
sys.exit("ERROR: Unable to export model {} in {}.".format(chkpt["uuid"]), args.type)
if args.type == "jit":
torch.jit.trace(nn, batch).save(args.out)
@@ -1,5 +1,4 @@
import os
import sys

from robosat_pink.core import load_module

@@ -8,7 +7,7 @@ def add_parser(subparser, formatter_class):
parser = subparser.add_parser("extract", help="Extracts GeoJSON features from OSM .pbf", formatter_class=formatter_class)

inp = parser.add_argument_group("Inputs")
inp.add_argument("--type", type=str, required=True, help="type of feature to extract (e.g building, road) [required]")
inp.add_argument("--type", type=str, required=True, help="type of feature to extract (e.g Building, Road) [required]")
inp.add_argument("pbf", type=str, help="path to .osm.pbf file [required]")

out = parser.add_argument_group("Output")
@@ -23,13 +22,5 @@ def main(args):

module = load_module("robosat_pink.osm.{}".format(args.type.lower()))
osmium_handler = getattr(module, "{}Handler".format(args.type))()

try:
osmium_handler.apply_file(filename=os.path.expanduser(args.pbf), locations=True)
except:
sys.exit("ERROR: Unable to extract {} from {}".format(args.type, args.pbf))

try:
osmium_handler.save(os.path.expanduser(args.out))
except:
sys.exit("ERROR: Unable to save {} in {}".format(args.type, args.out))
osmium_handler.apply_file(filename=os.path.expanduser(args.pbf), locations=True)
osmium_handler.save(os.path.expanduser(args.out))
@@ -1,5 +1,4 @@
import os
import sys
from tqdm import tqdm

import numpy as np
@@ -56,49 +55,34 @@ def main(args):
log.log("RoboSat.pink - predict on CPU, with {} workers".format(args.workers))
device = torch.device("cpu")

try:
chkpt = torch.load(args.checkpoint, map_location=device)
assert chkpt["producer_name"] == "RoboSat.pink"
model_module = load_module("robosat_pink.models.{}".format(chkpt["nn"].lower()))
nn = getattr(model_module, chkpt["nn"])(chkpt["shape_in"], chkpt["shape_out"]).to(device)
nn = torch.nn.DataParallel(nn)
nn.load_state_dict(chkpt["state_dict"])
nn.eval()
except:
sys.exit("ERROR: Unable to load {} checkpoint.".format(args.checkpoint))
chkpt = torch.load(args.checkpoint, map_location=device)
model_module = load_module("robosat_pink.models.{}".format(chkpt["nn"].lower()))
nn = getattr(model_module, chkpt["nn"])(chkpt["shape_in"], chkpt["shape_out"]).to(device)
nn = torch.nn.DataParallel(nn)
nn.load_state_dict(chkpt["state_dict"])
nn.eval()

log.log("Model {} - UUID: {}".format(chkpt["nn"], chkpt["uuid"]))

loader_module = load_module("robosat_pink.loaders.{}".format(chkpt["loader"].lower()))
loader_predict = getattr(loader_module, chkpt["loader"])(config, chkpt["shape_in"][1:3], args.dataset, mode="predict")

loader = DataLoader(loader_predict, batch_size=args.bs, num_workers=args.workers)
if not len(loader):
sys.exit("ERROR: Empty predict dataset directory. Check your path.")
assert len(loader), "Empty predict dataset directory. Check your path."

with torch.no_grad(): # don't track tensors with autograd during prediction

for images, tiles in tqdm(loader, desc="Eval", unit="batch", ascii=True):

images = images.to(device)

try:
outputs = nn(images)
probs = torch.nn.functional.softmax(outputs, dim=1).data.cpu().numpy()
except:
log.log("WARNING: Skipping batch:")
for tile, prob in zip(tiles, probs):
log.log(" - {}".format(str(tile)))
continue
outputs = nn(images)
probs = torch.nn.functional.softmax(outputs, dim=1).data.cpu().numpy()

for tile, prob in zip(tiles, probs):

try:
x, y, z = list(map(int, tile))
mask = np.around(prob[1:, :, :]).astype(np.uint8).squeeze()
tile_label_to_file(args.out, mercantile.Tile(x, y, z), palette, mask)
except:
log.log("WARNING: Skipping tile {}".format(str(tile)))
x, y, z = list(map(int, tile))
mask = np.around(prob[1:, :, :]).astype(np.uint8).squeeze()
tile_label_to_file(args.out, mercantile.Tile(x, y, z), palette, mask)

if not args.no_web_ui:
template = "leaflet.html" if not args.web_ui_template else args.web_ui_template
Oops, something went wrong.

0 comments on commit ee9d0fe

Please sign in to comment.
You can’t perform that action at this time.