-
Notifications
You must be signed in to change notification settings - Fork 23
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
5 changed files
with
317 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,145 @@ | ||
from functools import partial | ||
from typing import Dict, List | ||
|
||
import pyproj | ||
from shapely.geometry import LineString, MultiPoint, Point, shape | ||
from shapely.ops import linemerge, split, transform | ||
|
||
from .paths import _generate_random_name | ||
|
||
|
||
def generate_meter_projected_chunks( | ||
route_shape: LineString, | ||
stop_distance_distribution: int) -> List[LineString]: | ||
|
||
# Reproject 4326 lat/lon coordinates to equal area | ||
project = partial( | ||
pyproj.transform, | ||
pyproj.Proj(init='epsg:4326'), # source coordinate system | ||
pyproj.Proj(init='epsg:2163')) # destination coordinate system | ||
|
||
rs2 = transform(project, route_shape) # apply projection | ||
stop_count = round(rs2.length / stop_distance_distribution) | ||
|
||
# Create the array of break points/joints | ||
mp_array = [] | ||
for i in range(1, stop_count): | ||
fr = (i/stop_count) | ||
mp_array.append(rs2.interpolate(fr, normalized=True)) | ||
|
||
# Cast array as a Shapely object | ||
splitter = MultiPoint(mp_array) | ||
|
||
# 1 meter buffer to address floating point discrepencies | ||
chunks = split(rs2, splitter.buffer(1)) | ||
|
||
# TODO: Potential for length errors with this 1 meter | ||
# threshold check | ||
|
||
# Take chunks and merge in the small lines | ||
# from intersection inside of the buffered circles | ||
# and attach to nearest larger line | ||
clean_chunks = [chunks[0]] | ||
r = len(chunks) | ||
for c in range(1, r): | ||
latest = clean_chunks[-1] | ||
current = chunks[c] | ||
# Again, this is a week point of the buffer of | ||
# 1 meter method | ||
if latest.length <= 2: | ||
# Merge in the small chunks with the larger chunks | ||
clean_chunks[-1] = linemerge([latest, current]) | ||
else: | ||
clean_chunks.append(current) | ||
|
||
|
||
def generate_stop_points( | ||
route_shape: LineString, | ||
stop_distance_distribution: int): | ||
# Rename variable for brevity | ||
rs = route_shape | ||
|
||
# Create the array of break points/joints | ||
stop_count = round(rs.length / stop_distance_distribution) | ||
mp_array = [] | ||
for i in range(1, stop_count): | ||
fr = (i/stop_count) | ||
mp_array.append(rs.interpolate(fr, normalized=True)) | ||
|
||
# Resulting array is compose of first and last point, plus | ||
# splitter points in the middle | ||
all_points = [Point(rs.coords[0])] | ||
all_points += mp_array | ||
all_points += [Point(rs.coords[-1])] | ||
return all_points | ||
|
||
|
||
def generate_stop_ids(stops_count: int) -> List[str]: | ||
shape_name = _generate_random_name(5) | ||
stop_names = [] | ||
for i in range(stops_count): | ||
stop_names.append('_'.join([shape_name, i])) | ||
return stop_names | ||
|
||
|
||
def generate_nodes_df( | ||
stop_ids: List[str], | ||
all_points: List[Point], | ||
headway: float) -> pd.DataFrame: | ||
avg_costs = [] | ||
stop_lats = [] | ||
stop_lons = [] | ||
|
||
default_avg_cost = headway/2 | ||
|
||
for point in all_points: | ||
avg_costs.append(default_avg_cost) | ||
stop_lats.append(point.x) | ||
stop_lons.append(point.y) | ||
|
||
nodes_df = pd.DataFrame({ | ||
'stop_id': stop_ids, | ||
'avg_cost': avg_costs, | ||
'stop_lat': stop_lats, | ||
'stop_lon': stop_lons, | ||
}) | ||
|
||
return nodes_df | ||
|
||
|
||
def generate_edges_df( | ||
stop_ids: List[str], | ||
all_points: List[Point], | ||
chunks: List[LineString], | ||
avg_speed: float) -> pd.DataFrame: | ||
from_stop_ids = [] | ||
to_stop_ids = [] | ||
edge_costs = [] | ||
|
||
paired_nodes = list(zip(stop_ids[:-1], stop_ids[1:])) | ||
|
||
# Sanity check | ||
if not len(chunks) == len(paired_nodes): | ||
raise Exception('Chunking operation did not result ' | ||
'correct route shape subdivisions.') | ||
|
||
for i, nodes in enumerate(paired_nodes): | ||
point_a = nodes[0] | ||
point_b = nodes[1] | ||
from_stop_ids.append(point_a) | ||
to_stop_ids.append(point_b) | ||
|
||
# Estimate the amount of time it would | ||
# take to traverse this portion of the | ||
# route path given the default speed | ||
l = clean_chunks[i].length / 1000 # distance in km | ||
# Note: Average speed is to be supplied in kmph | ||
edge_costs.append(l / avg_speed) | ||
|
||
edges_df = pd.DataFrame({ | ||
'from_stop_id': from_stop_ids, | ||
'to_stop_id': to_stop_ids, | ||
'edge_cost': edge_costs, | ||
}) | ||
|
||
return edges_df |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,55 @@ | ||
{ | ||
"type": "FeatureCollection", | ||
"features": [ | ||
{ | ||
"type": "Feature", | ||
"properties": { | ||
"headway": 15 * 60, # 15 min in seconds | ||
"average_speed": 16, # 16 kmph, approx. 10 mph | ||
"stop_distance_distribution": 402, # 1/4 mile in meters | ||
}, | ||
"geometry": { | ||
"type": "LineString", | ||
"coordinates": [ | ||
[ -122.29469776153564, 37.8044860626114 ], | ||
[ -122.29392528533934, 37.80426566213625 ], | ||
[ -122.29358196258545, 37.80513878323706 ], | ||
[ -122.29306697845459, 37.80623228927919 ], | ||
[ -122.29246616363524, 37.807647889331456 ], | ||
[ -122.29159712791443, 37.810165386451494 ], | ||
[ -122.29047060012817, 37.81318287932694 ], | ||
[ -122.28938698768616, 37.81620872446383 ], | ||
[ -122.28916168212889, 37.816912194500276 ], | ||
[ -122.28907585144043, 37.81802247694201 ], | ||
[ -122.28902220726013, 37.819548028632376 ], | ||
[ -122.28882908821106, 37.82224309289828 ], | ||
[ -122.28875935077666, 37.823077864855556 ], | ||
[ -122.28878617286682, 37.82341049978138 ], | ||
[ -122.28884518146513, 37.823683810158975 ], | ||
[ -122.2890356183052, 37.82415415588092 ], | ||
[ -122.2892314195633, 37.82452068447801 ], | ||
[ -122.28957742452623, 37.82532576980349 ], | ||
[ -122.28974103927614, 37.82598889927767 ], | ||
[ -122.28969007730483, 37.82657575329904 ], | ||
[ -122.28945404291152, 37.82708209652004 ], | ||
[ -122.28891491889954, 37.827615977659555 ], | ||
[ -122.28801369667053, 37.82809477253903 ], | ||
[ -122.28726267814636, 37.828234597006194 ], | ||
[ -122.28492379188538, 37.82870491372413 ], | ||
[ -122.28534221649169, 37.82985315185641 ], | ||
[ -122.27834701538086, 37.83131066825093 ], | ||
[ -122.27716684341429, 37.83116661268019 ], | ||
[ -122.27407693862915, 37.83062428330309 ], | ||
[ -122.27100849151611, 37.83016668979049 ], | ||
[ -122.26765036582945, 37.829607404976734 ], | ||
[ -122.26542949676514, 37.82929386466607 ], | ||
[ -122.26442098617554, 37.82910743466058 ], | ||
[ -122.26525783538818, 37.82648041632065 ], | ||
[ -122.26620197296141, 37.826751596736095 ], | ||
[ -122.26907730102539, 37.82719226278575 ], | ||
[ -122.26847648620605, 37.82971756747229 ] | ||
] | ||
} | ||
} | ||
] | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters