Skip to content

Commit

Permalink
Rehab or remove some of the older notebooks; remove sys.path hack. (#231
Browse files Browse the repository at this point in the history
)
  • Loading branch information
vengroff committed Jan 26, 2024
1 parent 6572e52 commit 1c2b0b0
Show file tree
Hide file tree
Showing 29 changed files with 690 additions and 1,408 deletions.
12 changes: 6 additions & 6 deletions censusdis/geography.py
Original file line number Diff line number Diff line change
Expand Up @@ -206,9 +206,9 @@ def _fetch_path_specs(dataset: str, year: int) -> Dict[str, "PathSpec"]:
f"Census API request to {request.url} failed with status {request.status_code}. {request.text}"
)

_PATH_SPECS_BY_DATASET_YEAR: DefaultDict[
str, Dict[int, Dict[str, "PathSpec"]]
] = defaultdict(dict)
_PATH_SPECS_BY_DATASET_YEAR: DefaultDict[str, Dict[int, Dict[str, "PathSpec"]]] = (
defaultdict(dict)
)

_PATH_SPEC_SNAKE_MAP: DefaultDict[str, Dict[int, Dict[str, str]]] = defaultdict(
dict
Expand All @@ -221,9 +221,9 @@ def _fetch_path_specs(dataset: str, year: int) -> Dict[str, "PathSpec"]:
def get_path_specs(dataset: str, vintage: int) -> Dict[str, "PathSpec"]:
"""Fet all the path specifications for the given dataset and vintage."""
if vintage not in PathSpec._PATH_SPECS_BY_DATASET_YEAR[dataset]:
PathSpec._PATH_SPECS_BY_DATASET_YEAR[dataset][
vintage
] = PathSpec._fetch_path_specs(dataset, vintage)
PathSpec._PATH_SPECS_BY_DATASET_YEAR[dataset][vintage] = (
PathSpec._fetch_path_specs(dataset, vintage)
)
PathSpec._PATH_SPEC_SNAKE_MAP[dataset][vintage] = {
component.replace(" ", "_")
.replace("/", "_")
Expand Down
10 changes: 5 additions & 5 deletions censusdis/impl/geometry.py
Original file line number Diff line number Diff line change
Expand Up @@ -119,11 +119,11 @@ def drop_slivers_from_geo_series(
The series with all slivers removed.
"""
return gs_geo.map(
lambda s: drop_slivers_multi_polygon(s, threshold)
if isinstance(s, MultiPolygon)
else drop_polygon_if_sliver(s, threshold)
if isinstance(s, Polygon)
else s
lambda s: (
drop_slivers_multi_polygon(s, threshold)
if isinstance(s, MultiPolygon)
else drop_polygon_if_sliver(s, threshold) if isinstance(s, Polygon) else s
)
)


Expand Down
8 changes: 5 additions & 3 deletions censusdis/impl/us_census_shapefiles.py
Original file line number Diff line number Diff line change
Expand Up @@ -149,9 +149,11 @@
None,
"vtd",
["STATE", "COUNTY", "VOTING_DISTRICT"],
["STATEFP20", "COUNTYFP20", "VTDST20"]
if year >= 2020
else ["STATEFP10", "COUNTYFP10", "VTDST10"],
(
["STATEFP20", "COUNTYFP20", "VTDST20"]
if year >= 2020
else ["STATEFP10", "COUNTYFP10", "VTDST10"]
),
),
# This one could be a little dangerous if subminor civil
# divisions exist in states and are not mapped as subbarios.
Expand Down
20 changes: 11 additions & 9 deletions censusdis/impl/varcache.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,12 +31,12 @@ def __init__(self, *, variable_source: Optional[VariableSource] = None):
variable_source = CensusApiVariableSource()

self._variable_source = variable_source
self._variable_cache: DefaultDict[
str, DefaultDict[int, Dict[str, Any]]
] = defaultdict(lambda: defaultdict(dict))
self._group_cache: DefaultDict[
str, DefaultDict[int, Dict[str, Any]]
] = defaultdict(lambda: defaultdict(dict))
self._variable_cache: DefaultDict[str, DefaultDict[int, Dict[str, Any]]] = (
defaultdict(lambda: defaultdict(dict))
)
self._group_cache: DefaultDict[str, DefaultDict[int, Dict[str, Any]]] = (
defaultdict(lambda: defaultdict(dict))
)

self._all_data_sets_cache: Optional[pd.DataFrame] = None
self._data_sets_by_year_cache: Dict[int, pd.DataFrame] = {}
Expand Down Expand Up @@ -385,9 +385,11 @@ def _datasets_from_source_dict(datasets) -> pd.DataFrame:
"DATASET": "/".join(dataset["c_dataset"]),
"TITLE": dataset.get("title", None),
"DESCRIPTION": dataset.get("description", None),
"API BASE URL": dataset["distribution"][0].get("accessURL", None)
if dataset.get("distribution")
else None,
"API BASE URL": (
dataset["distribution"][0].get("accessURL", None)
if dataset.get("distribution")
else None
),
}
for dataset in datasets
]
Expand Down
12 changes: 6 additions & 6 deletions censusdis/maps.py
Original file line number Diff line number Diff line change
Expand Up @@ -1145,12 +1145,12 @@ def sjoin_mostly_contains(

# Keep the original geos around in EPSG 3857 so
# we can check intersection areas.
gdf_large_geos[
f"_original_large_geos_{area_epsg}"
] = gdf_large_geos.geometry.to_crs(epsg=area_epsg)
gdf_small_geos[
f"_original_small_geos_{area_epsg}"
] = gdf_small_geos.geometry.to_crs(epsg=area_epsg)
gdf_large_geos[f"_original_large_geos_{area_epsg}"] = (
gdf_large_geos.geometry.to_crs(epsg=area_epsg)
)
gdf_small_geos[f"_original_small_geos_{area_epsg}"] = (
gdf_small_geos.geometry.to_crs(epsg=area_epsg)
)

# Do an intersection join.
gdf_intersection = gdf_small_geos.sjoin(
Expand Down
1 change: 1 addition & 0 deletions docs/conf.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Configure sphinx."""

# Configuration file for the Sphinx documentation builder.
#
# For the full list of built-in configuration values, see the documentation:
Expand Down
37 changes: 8 additions & 29 deletions notebooks/ACS Comparison Profile.ipynb

Large diffs are not rendered by default.

40 changes: 8 additions & 32 deletions notebooks/ACS Data Profile.ipynb

Large diffs are not rendered by default.

37 changes: 8 additions & 29 deletions notebooks/ACS Subject Table.ipynb

Large diffs are not rendered by default.

36 changes: 9 additions & 27 deletions notebooks/Block Groups in CBSAs.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -18,24 +18,6 @@
"some additional [GeoPandas](https://geopandas.org) spatial operations."
]
},
{
"cell_type": "code",
"execution_count": 1,
"id": "66fded3e-f576-4848-a8ab-7eff3f10f6a6",
"metadata": {
"tags": []
},
"outputs": [],
"source": [
"# So we can run from within the censusdis project and find the packages we need.\n",
"import os\n",
"import sys\n",
"\n",
"sys.path.append(\n",
" os.path.join(os.path.abspath(os.path.join(os.path.curdir, os.path.pardir)))\n",
")"
]
},
{
"cell_type": "markdown",
"id": "0be695ac-3484-474d-bc85-8aa3d0ea4be1",
Expand All @@ -48,7 +30,7 @@
},
{
"cell_type": "code",
"execution_count": 2,
"execution_count": 1,
"id": "9a0fdca1-d022-4224-878e-a2da5ecd969a",
"metadata": {},
"outputs": [],
Expand All @@ -64,7 +46,7 @@
},
{
"cell_type": "code",
"execution_count": 3,
"execution_count": 2,
"id": "1673a2a4-cc6e-4e9c-9721-b599d97e9e19",
"metadata": {},
"outputs": [],
Expand All @@ -75,7 +57,7 @@
},
{
"cell_type": "code",
"execution_count": 4,
"execution_count": 3,
"id": "fe2f4bcd-beaf-4823-bc4d-ad27702a9acd",
"metadata": {},
"outputs": [],
Expand All @@ -87,7 +69,7 @@
},
{
"cell_type": "code",
"execution_count": 5,
"execution_count": 4,
"id": "e38abd6d-d69b-4a8d-b09a-f1f7755ea123",
"metadata": {},
"outputs": [],
Expand All @@ -112,7 +94,7 @@
},
{
"cell_type": "code",
"execution_count": 6,
"execution_count": 5,
"id": "405395e5-dbf6-4a24-8c68-44ef4a040f84",
"metadata": {},
"outputs": [],
Expand All @@ -138,7 +120,7 @@
},
{
"cell_type": "code",
"execution_count": 7,
"execution_count": 6,
"id": "c2a9e355-89cd-4bf5-8346-fbd913ac3f7c",
"metadata": {},
"outputs": [],
Expand Down Expand Up @@ -168,7 +150,7 @@
},
{
"cell_type": "code",
"execution_count": 8,
"execution_count": 7,
"id": "709d396e-a2aa-4aad-b334-541fa8d76b8a",
"metadata": {},
"outputs": [],
Expand Down Expand Up @@ -199,7 +181,7 @@
},
{
"cell_type": "code",
"execution_count": 10,
"execution_count": 8,
"id": "d2182ec6-e1e8-4d2b-a620-bdaa8ad1389a",
"metadata": {},
"outputs": [],
Expand All @@ -215,7 +197,7 @@
},
{
"cell_type": "code",
"execution_count": 15,
"execution_count": 9,
"id": "587aec27-e0df-4a3b-aa45-0da6c598dc0a",
"metadata": {},
"outputs": [
Expand Down
Loading

0 comments on commit 1c2b0b0

Please sign in to comment.