Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

make optional args keyword-only #1134

Merged
merged 8 commits into from
Feb 18, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 5 additions & 2 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,20 +2,23 @@

## 2.0.0 (in development)

Read the v2 [migration guide](https://github.com/gboeing/osmnx/issues/1123).
Read the v2 [migration guide](https://github.com/gboeing/osmnx/issues/1123)

- add type annotations to all public and private functions throughout package (#1107)
- remove functionality previously deprecated in v1 (#1113 #1122)
- drop Python 3.8 support (#1106)
- improve docstrings throughout package (#1116)
- improve logging and warnings throughout package (#1125)
- improve error messages throughout package (#1131)
- increase add_node_elevations_google default batch_size to 512 to match Google's limit (#1115)
- make optional function parameters keyword-only throughout package (#1134)
- make dist function parameter required rather than optional throughout package (#1134)
- make which_result function parameter consistently able to accept a list throughout package (#1113)
- make utils_geo.bbox_from_point function return a tuple of floats for consistency with rest of package (#1113)
- change add_node_elevations_google default batch_size to 512 to match Google's limit (#1115)
- fix bug in \_downloader.\_save_to_cache function usage (#1107)
- fix bug in handling requests ConnectionError when querying Overpass status endpoint (#1113)
- fix minor bugs throughout to address inconsistencies revealed by type enforcement (#1107 #1114)
- rename truncate.truncate_graph_dist max_dist argument to dist (#1134)
- rename settings module's default_accept_language, default_referer, and default_user_agent (#1129)
- rename osm_xml module to \_osm_xml to make it private, as all its functions are private (#1113)
- rename private \_downloader module to \_http (#1114)
Expand Down
23 changes: 9 additions & 14 deletions osmnx/_http.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
def _save_to_cache(
url: str,
response_json: dict[str, Any] | list[dict[str, Any]],
ok: bool,
ok: bool, # noqa: FBT001
) -> None:
"""
Save a HTTP response JSON object to a file in the cache folder.
Expand Down Expand Up @@ -100,25 +100,22 @@ def _url_in_cache(url: str) -> Path | None:
return cache_filepath if cache_filepath.is_file() else None


def _retrieve_from_cache(
url: str,
check_remark: bool = True,
) -> dict[str, Any] | list[dict[str, Any]] | None:
def _retrieve_from_cache(url: str) -> dict[str, Any] | list[dict[str, Any]] | None:
"""
Retrieve a HTTP response JSON object from the cache if it exists.
Returns None if there is a server remark in the cached response.
Parameters
----------
url
The URL of the request.
check_remark
If True, only return filepath if cached response does not have a
remark key indicating a server warning.
Returns
-------
response_json
Cached response for `url` if it exists in the cache, otherwise None.
Cached response for `url` if it exists in the cache and does not
contain a server remark, otherwise None.
"""
# if the tool is configured to use the cache
if settings.use_cache:
Expand All @@ -129,11 +126,8 @@ def _retrieve_from_cache(
cache_filepath.read_text(encoding="utf-8"),
)

# return None if check_remark is True and there is a server
# remark in the cached response
if (
check_remark and isinstance(response_json, dict) and "remark" in response_json
): # pragma: no cover
# return None if there is a server remark in the cached response
if isinstance(response_json, dict) and ("remark" in response_json): # pragma: no cover
msg = (
f"Ignoring cache file {str(cache_filepath)!r} because "
f"it contains a remark: {response_json['remark']!r}"
Expand All @@ -149,6 +143,7 @@ def _retrieve_from_cache(


def _get_http_headers(
*,
user_agent: str | None = None,
referer: str | None = None,
accept_language: str | None = None,
Expand Down
9 changes: 8 additions & 1 deletion osmnx/_nominatim.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@

def _download_nominatim_element(
query: str | dict[str, str],
*,
by_osmid: bool = False,
limit: int = 1,
polygon_geojson: bool = True,
Expand Down Expand Up @@ -77,6 +78,7 @@ def _download_nominatim_element(

def _nominatim_request(
params: OrderedDict[str, int | str],
*,
request_type: str = "search",
pause: float = 1,
error_pause: float = 60,
Expand Down Expand Up @@ -141,7 +143,12 @@ def _nominatim_request(
)
utils.log(msg, level=lg.WARNING)
time.sleep(error_pause)
return _nominatim_request(params, request_type, pause, error_pause)
return _nominatim_request(
params,
request_type=request_type,
pause=pause,
error_pause=error_pause,
)

response_json = _http._parse_response(response)
if not isinstance(response_json, list):
Expand Down
6 changes: 3 additions & 3 deletions osmnx/_osm_xml.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,8 +126,8 @@ def _save_graph_xml( # noqa: PLR0913
node_attrs: list[str],
edge_tags: list[str],
edge_attrs: list[str],
oneway: bool,
merge_edges: bool,
oneway: bool, # noqa: FBT001
merge_edges: bool, # noqa: FBT001
edge_tag_aggs: list[tuple[str, str]] | None,
api_version: str,
precision: int,
Expand Down Expand Up @@ -416,7 +416,7 @@ def _append_edges_xml_tree(
edge_attrs: list[str],
edge_tags: list[str],
edge_tag_aggs: list[tuple[str, str]] | None,
merge_edges: bool,
merge_edges: bool, # noqa: FBT001
) -> Element:
"""
Append edges to an XML tree.
Expand Down
4 changes: 3 additions & 1 deletion osmnx/_overpass.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,6 +116,7 @@ def _get_network_filter(network_type: str) -> str:

def _get_overpass_pause(
base_endpoint: str,
*,
recursive_delay: float = 5,
default_duration: float = 60,
) -> float:
Expand Down Expand Up @@ -390,6 +391,7 @@ def _download_overpass_features(

def _overpass_request(
data: OrderedDict[str, Any],
*,
pause: float | None = None,
error_pause: float = 60,
) -> dict[str, Any]:
Expand Down Expand Up @@ -449,7 +451,7 @@ def _overpass_request(
)
utils.log(msg, level=lg.WARNING)
time.sleep(this_pause)
return _overpass_request(data, pause, error_pause)
return _overpass_request(data, pause=pause, error_pause=error_pause)

response_json = _http._parse_response(response)
if not isinstance(response_json, dict): # pragma: no cover
Expand Down
11 changes: 6 additions & 5 deletions osmnx/bearing.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,6 +125,7 @@ def add_edge_bearings(G: nx.MultiDiGraph) -> nx.MultiDiGraph:

def orientation_entropy(
Gu: nx.MultiGraph,
*,
num_bins: int = 36,
min_length: float = 0,
weight: str | None = None,
Expand All @@ -148,7 +149,7 @@ def orientation_entropy(
Number of bins. For example, if `num_bins=36` is provided, then each
bin will represent 10 degrees around the compass.
min_length
Ignore edges with `length` attributes less than `min_length`. Useful
Ignore edges with "length" attributes less than `min_length`. Useful
to ignore the noise of many very short edges.
weight
If not None, weight edges' bearings by this (non-null) edge attribute.
Expand All @@ -171,8 +172,8 @@ def orientation_entropy(

def _extract_edge_bearings(
Gu: nx.MultiGraph,
min_length: float = 0,
weight: str | None = None,
min_length: float,
weight: str | None,
) -> npt.NDArray[np.float64]:
"""
Extract undirected graph's bidirectional edge bearings.
Expand Down Expand Up @@ -222,8 +223,8 @@ def _extract_edge_bearings(
def _bearings_distribution(
Gu: nx.MultiGraph,
num_bins: int,
min_length: float = 0,
weight: str | None = None,
min_length: float,
weight: str | None,
) -> tuple[npt.NDArray[np.float64], npt.NDArray[np.float64]]:
"""
Compute distribution of bearings across evenly spaced bins.
Expand Down
8 changes: 7 additions & 1 deletion osmnx/distance.py
Original file line number Diff line number Diff line change
Expand Up @@ -172,6 +172,7 @@ def euclidean(

def add_edge_lengths(
G: nx.MultiDiGraph,
*,
edges: Iterable[tuple[int, int, int]] | None = None,
) -> nx.MultiDiGraph:
"""
Expand Down Expand Up @@ -240,7 +241,7 @@ def nearest_nodes(G: nx.MultiDiGraph, X: float, Y: float) -> int:

# if X and Y are floats and return_dist is provided/False
@overload # pragma: no cover
def nearest_nodes(G: nx.MultiDiGraph, X: float, Y: float, return_dist: Literal[False]) -> int:
def nearest_nodes(G: nx.MultiDiGraph, X: float, Y: float, *, return_dist: Literal[False]) -> int:
...


Expand All @@ -250,6 +251,7 @@ def nearest_nodes(
G: nx.MultiDiGraph,
X: float,
Y: float,
*,
return_dist: Literal[True],
) -> tuple[npt.NDArray[np.int64], npt.NDArray[np.float64]]:
...
Expand All @@ -271,6 +273,7 @@ def nearest_nodes(
G: nx.MultiDiGraph,
X: Iterable[float],
Y: Iterable[float],
*,
return_dist: Literal[False],
) -> npt.NDArray[np.int64]:
...
Expand All @@ -282,6 +285,7 @@ def nearest_nodes(
G: nx.MultiDiGraph,
X: Iterable[float],
Y: Iterable[float],
*,
return_dist: Literal[True],
) -> tuple[npt.NDArray[np.int64], npt.NDArray[np.float64]]:
...
Expand All @@ -291,6 +295,7 @@ def nearest_nodes(
G: nx.MultiDiGraph,
X: float | Iterable[float],
Y: float | Iterable[float],
*,
return_dist: bool = False,
) -> (
int
Expand Down Expand Up @@ -453,6 +458,7 @@ def nearest_edges(
G: nx.MultiDiGraph,
X: float | Iterable[float],
Y: float | Iterable[float],
*,
return_dist: bool = False,
) -> (
tuple[int, int, int]
Expand Down
4 changes: 3 additions & 1 deletion osmnx/elevation.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@
gdal = None


def add_edge_grades(G: nx.MultiDiGraph, add_absolute: bool = True) -> nx.MultiDiGraph:
def add_edge_grades(G: nx.MultiDiGraph, *, add_absolute: bool = True) -> nx.MultiDiGraph:
"""
Calculate and add `grade` attributes to all graph edges.
Expand Down Expand Up @@ -108,6 +108,7 @@ def _query_raster(
def add_node_elevations_raster(
G: nx.MultiDiGraph,
filepath: str | Path | Iterable[str | Path],
*,
band: int = 1,
cpus: int | None = None,
) -> nx.MultiDiGraph:
Expand Down Expand Up @@ -173,6 +174,7 @@ def add_node_elevations_raster(

def add_node_elevations_google(
G: nx.MultiDiGraph,
*,
api_key: str | None = None,
batch_size: int = 512,
pause: float = 0,
Expand Down
26 changes: 14 additions & 12 deletions osmnx/features.py
Original file line number Diff line number Diff line change
Expand Up @@ -122,14 +122,14 @@ def features_from_bbox(
gdf
"""
# convert bbox to polygon then create GeoDataFrame of features within it
polygon = utils_geo.bbox_to_poly(bbox=bbox)
return features_from_polygon(polygon, tags=tags)
polygon = utils_geo.bbox_to_poly(bbox)
return features_from_polygon(polygon, tags)


def features_from_point(
center_point: tuple[float, float],
tags: dict[str, bool | str | list[str]],
dist: float = 1000,
dist: float,
) -> gpd.GeoDataFrame:
"""
Create GeoDataFrame of OSM features within some distance N, S, E, W of a point.
Expand Down Expand Up @@ -168,13 +168,13 @@ def features_from_point(
"""
# create bbox from point and dist, then create gdf of features within it
bbox = utils_geo.bbox_from_point(center_point, dist)
return features_from_bbox(bbox, tags=tags)
return features_from_bbox(bbox, tags)


def features_from_address(
address: str,
tags: dict[str, bool | str | list[str]],
dist: float = 1000,
dist: float,
) -> gpd.GeoDataFrame:
"""
Create GeoDataFrame of OSM features within some distance N, S, E, W of address.
Expand Down Expand Up @@ -210,13 +210,14 @@ def features_from_address(
gdf
"""
# geocode the address to a point, then create gdf of features around it
center_point = geocoder.geocode(query=address)
return features_from_point(center_point, tags=tags, dist=dist)
center_point = geocoder.geocode(address)
return features_from_point(center_point, tags, dist)


def features_from_place(
query: str | dict[str, str] | list[str | dict[str, str]],
tags: dict[str, bool | str | list[str]],
*,
which_result: int | None | list[int | None] = None,
) -> gpd.GeoDataFrame:
"""
Expand Down Expand Up @@ -331,8 +332,9 @@ def features_from_polygon(

def features_from_xml(
filepath: str | Path,
polygon: Polygon | MultiPolygon | None = None,
*,
tags: dict[str, bool | str | list[str]] | None = None,
polygon: Polygon | MultiPolygon | None = None,
encoding: str = "utf-8",
) -> gpd.GeoDataFrame:
"""
Expand All @@ -349,8 +351,6 @@ def features_from_xml(
----------
filepath
Path to file containing OSM XML data.
polygon
Optional spatial boundaries to filter elements.
tags
Optional dict of tags for filtering elements from the XML. Results
returned are the union, not intersection of each individual tag.
Expand All @@ -363,6 +363,8 @@ def features_from_xml(
the area. `tags = {'amenity':True, 'landuse':['retail','commercial'],
'highway':'bus_stop'}` would return all amenities, landuse=retail,
landuse=commercial, and highway=bus_stop.
polygon
Optional spatial boundaries to filter elements.
encoding
The XML file's character encoding.
Expand All @@ -372,7 +374,7 @@ def features_from_xml(
"""
# transmogrify OSM XML file to JSON then create GeoDataFrame from it
response_jsons = [_osm_xml._overpass_json_from_file(filepath, encoding)]
return _create_gdf(response_jsons, polygon=polygon, tags=tags)
return _create_gdf(response_jsons, polygon, tags)


def _create_gdf( # noqa: PLR0912
Expand All @@ -395,7 +397,7 @@ def _create_gdf( # noqa: PLR0912
polygon
Optional spatial boundaries to filter final GeoDataFrame.
tags
Optioanl dict of tags to filter the final GeoDataFrame.
Optional dict of tags to filter the final GeoDataFrame.
Returns
-------
Expand Down
3 changes: 2 additions & 1 deletion osmnx/geocoder.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,7 @@ def geocode(query: str) -> tuple[float, float]:

def geocode_to_gdf(
query: str | dict[str, str] | list[str | dict[str, str]],
*,
which_result: int | None | list[int | None] = None,
by_osmid: bool = False,
) -> gpd.GeoDataFrame:
Expand Down Expand Up @@ -131,7 +132,7 @@ def geocode_to_gdf(
def _geocode_query_to_gdf(
query: str | dict[str, str],
which_result: int | None,
by_osmid: bool,
by_osmid: bool, # noqa: FBT001
) -> gpd.GeoDataFrame:
"""
Geocode a single place query to a GeoDataFrame.
Expand Down
Loading