Skip to content

Commit

Permalink
Prepare for release (#497)
Browse files Browse the repository at this point in the history
* update precommit and pyproject

* add suggested changes

* increase versions

* resolve some issues

* resolve remaining issues
  • Loading branch information
zigaLuksic committed Nov 13, 2023
1 parent 3a8f431 commit 4d19a60
Show file tree
Hide file tree
Showing 19 changed files with 110 additions and 127 deletions.
4 changes: 2 additions & 2 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -13,13 +13,13 @@ repos:
- id: debug-statements

- repo: https://github.com/psf/black
rev: 23.10.1
rev: 23.11.0
hooks:
- id: black
language_version: python3

- repo: https://github.com/charliermarsh/ruff-pre-commit
rev: "v0.1.3"
rev: "v0.1.5"
hooks:
- id: ruff

Expand Down
5 changes: 5 additions & 0 deletions CHANGELOG.MD
Original file line number Diff line number Diff line change
@@ -1,3 +1,8 @@
## [Version 3.9.4] - 2023-11-13

- Fixed a problem with `dataclasses_json 0.6.2` that broke BYOC functionalities
- Removed AWS examples from the docs since the functionality is no longer maintained.

## [Version 3.9.3] - 2023-11-03

- `SHConfig` now correctly initializes a default profile in the file even if the first initialization call is done with a custom profile.
Expand Down
2 changes: 1 addition & 1 deletion examples/batch_statistical.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,7 @@
"outputs": [],
"source": [
"AWS_ID = \"my-aws-access-id\"\n",
"AWS_SECRET = \"my-aws-secret-key\"\n"
"AWS_SECRET = \"my-aws-secret-key\""
]
},
{
Expand Down
6 changes: 2 additions & 4 deletions examples/byoc_request.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -723,7 +723,7 @@
" for day in month_result.get(\"CommonPrefixes\")[:day_count]:\n",
" day_result = client.list_objects(Bucket=bucket, Delimiter=\"/\", Prefix=day.get(\"Prefix\"))\n",
" for tile in day_result.get(\"CommonPrefixes\"):\n",
" tiles_path.append(tile.get(\"Prefix\"))\n",
" tiles_path.append(tile.get(\"Prefix\")) # noqa: PERF401\n",
" return tiles_path"
]
},
Expand Down Expand Up @@ -978,9 +978,7 @@
"metadata": {},
"outputs": [],
"source": [
"tiles_for_visualized = []\n",
"for _ in range(100):\n",
" tiles_for_visualized.append(ByocTile.from_dict(next(tile_iterator)))\n",
"tiles_for_visualized = [ByocTile.from_dict(next(tile_iterator)) for _ in range(100)]\n",
"\n",
"tiles_gdf = gpd.GeoDataFrame(\n",
" tiles_for_visualized,\n",
Expand Down
18 changes: 8 additions & 10 deletions examples/fis_request.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -291,7 +291,7 @@
" row = [int(channel[1:]), parse_time(stat[\"date\"], force_datetime=True)]\n",
"\n",
" for column in columns[2:]:\n",
" row.append(stat[\"basicStats\"][column])\n",
" row.append(stat[\"basicStats\"][column]) # noqa: PERF401\n",
"\n",
" data.append(row)\n",
"\n",
Expand Down Expand Up @@ -432,15 +432,13 @@
"\n",
"geometry1 = Geometry(Polygon([(-5.13, 48), (-5.23, 48.09), (-5.13, 48.17), (-5.03, 48.08), (-5.13, 48)]), CRS.WGS84)\n",
"geometry2 = Geometry(\n",
" Polygon(\n",
" [\n",
" (1292344.0, 5205055.5),\n",
" (1301479.5, 5195920.0),\n",
" (1310615.0, 5205055.5),\n",
" (1301479.5, 5214191.0),\n",
" (1292344.0, 5205055.5),\n",
" ]\n",
" ),\n",
" Polygon([\n",
" (1292344.0, 5205055.5),\n",
" (1301479.5, 5195920.0),\n",
" (1310615.0, 5205055.5),\n",
" (1301479.5, 5214191.0),\n",
" (1292344.0, 5205055.5),\n",
" ]),\n",
" CRS.POP_WEB,\n",
")"
]
Expand Down
10 changes: 4 additions & 6 deletions examples/session_sharing.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -393,12 +393,10 @@
"session = SentinelHubSession(config)\n",
"\n",
"# For the duration of \"with\" statement this will run a thread that will share the given Sentinel Hub session\n",
"with SessionSharing(session):\n",
" # Run parallelization process\n",
" with ProcessPoolExecutor(max_workers=3) as executor:\n",
" futures = [executor.submit(remote_function, EXAMPLE_URL, config) for _ in range(10)]\n",
" for future in futures:\n",
" future.result()"
"with SessionSharing(session), ProcessPoolExecutor(max_workers=3) as executor:\n",
" futures = [executor.submit(remote_function, EXAMPLE_URL, config) for _ in range(10)]\n",
" for future in futures:\n",
" future.result()"
]
},
{
Expand Down
2 changes: 1 addition & 1 deletion examples/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ def plot_image(
image: np.ndarray, factor: float = 1.0, clip_range: tuple[float, float] | None = None, **kwargs: Any
) -> None:
"""Utility function for plotting RGB images."""
fig, ax = plt.subplots(nrows=1, ncols=1, figsize=(15, 15))
_, ax = plt.subplots(nrows=1, ncols=1, figsize=(15, 15))
if clip_range is not None:
ax.imshow(np.clip(image * factor, *clip_range), **kwargs)
else:
Expand Down
64 changes: 31 additions & 33 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -112,35 +112,36 @@ preview = true
line-length = 120
target-version = "py38"
select = [
"F", # pyflakes
"E", # pycodestyle
"W", # pycodestyle
"C90", # mccabe
"N", # naming
"YTT", # flake-2020
"B", # bugbear
"A", # built-ins
"COM", # commas
"C4", # comprehensions
"T10", # debugger statements
"ISC", # implicit string concatenation
"ICN", # import conventions
"G", # logging format
"PIE", # flake8-pie
"T20", # print statements
"PT", # pytest style
"RET", # returns
"SLF", # private member access
"SIM", # simplifications
"ARG", # unused arguments
"PD", # pandas
"PGH", # pygrep hooks (useless noqa comments, eval statements etc.)
"FLY", # flynt
"RUF", # ruff rules
"NPY", # numpy
"I", # isort
"UP", # pyupgrade
"FA", # checks where future import of annotations would make types nicer
"F", # pyflakes
"E", # pycodestyle
"W", # pycodestyle
"C90", # mccabe
"I", # isort
"N", # naming
"UP", # pyupgrade
"YTT", # flake-2020
"B", # bugbear
"A", # built-ins
"COM", # commas
"C4", # comprehensions
"T10", # debugger statements
"FA", # checks where future import of annotations would make types nicer
"ISC", # implicit string concatenation
"ICN", # import conventions
"G", # logging format
"PIE", # flake8-pie
"T20", # print statements
"PT", # pytest style
"RET", # returns
"SLF", # private member access
"SIM", # simplifications
"ARG", # unused arguments
"PD", # pandas
"PGH", # pygrep hooks (useless noqa comments, eval statements etc.)
"FLY", # flynt
"NPY", # numpy
"PERF", # perflint, performance improvements
"RUF", # ruff rules
]
fix = true
fixable = [
Expand All @@ -153,17 +154,14 @@ fixable = [
]
ignore = [
"C408", # complains about `dict()` calls, we use them to avoid too many " in the code
"SIM117", # wants to always combine `with` statements, gets ugly for us
"SIM108", # tries to aggresively inline `if`, not always readable
"A003", # complains when ATTRIBUTES shadow builtins, we have objects that implement `filter` and such
"COM812", # trailing comma missing, fights with black
"PD011", # suggests `.to_numpy` instead of `.values`, also does this for non-pandas objects...
# potentially fixable
# # potentially fixable
"N818", # we use the 'Exception' suffix but PEP suggests 'Error'
"B904", # want `raise ... from None` instead of just `raise ...`
"B028", # always demands a stacklevel argument when warning
"PT011", # complains for `pytest.raises(ValueError)` but we use it a lot
"UP024", # wants to switch IOError with OSError
]
per-file-ignores = { "__init__.py" = ["F401"], "conf.py" = ["FA100"] }
exclude = [".git", "__pycache__", "build", "dist", "sentinelhub/aws/*"]
Expand Down
2 changes: 1 addition & 1 deletion sentinelhub/_version.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
"""Version of the sentinelhub package."""

__version__ = "3.9.3"
__version__ = "3.9.4"
12 changes: 5 additions & 7 deletions sentinelhub/api/base_request.py
Original file line number Diff line number Diff line change
Expand Up @@ -134,13 +134,11 @@ def _get_base_url(self) -> str:
settings from config object. In case different collections have different restrictions then
`SHConfig.sh_base_url` breaks the tie in case it matches one of the data collection URLs.
"""
data_collection_urls = tuple(
{
input_data_dict.service_url.rstrip("/")
for input_data_dict in self.payload["input"]["data"]
if isinstance(input_data_dict, InputDataDict) and input_data_dict.service_url is not None
}
)
data_collection_urls = tuple({
input_data_dict.service_url.rstrip("/")
for input_data_dict in self.payload["input"]["data"]
if isinstance(input_data_dict, InputDataDict) and input_data_dict.service_url is not None
})
config_base_url = self.config.sh_base_url.rstrip("/")

if not data_collection_urls:
Expand Down
24 changes: 11 additions & 13 deletions sentinelhub/api/batch/process.py
Original file line number Diff line number Diff line change
Expand Up @@ -150,19 +150,17 @@ def output(
:param kwargs: Any other arguments to be added to a dictionary of parameters
:return: A dictionary of output parameters
"""
return remove_undefined(
{
"defaultTilePath": default_tile_path,
"overwrite": overwrite,
"skipExisting": skip_existing,
"cogOutput": cog_output,
"cogParameters": cog_parameters,
"createCollection": create_collection,
"collectionId": collection_id,
"responses": responses,
**kwargs,
}
)
return remove_undefined({
"defaultTilePath": default_tile_path,
"overwrite": overwrite,
"skipExisting": skip_existing,
"cogOutput": cog_output,
"cogParameters": cog_parameters,
"createCollection": create_collection,
"collectionId": collection_id,
"responses": responses,
**kwargs,
})

def iter_tiling_grids(self, **kwargs: Any) -> SentinelHubFeatureIterator:
"""An iterator over tiling grids
Expand Down
12 changes: 5 additions & 7 deletions sentinelhub/api/byoc.py
Original file line number Diff line number Diff line change
Expand Up @@ -220,13 +220,11 @@ def update_tile(self, collection: CollectionType, tile: TileType) -> Json:
headers = {"Content-Type": MimeType.JSON.get_string()}

_tile = self._to_dict(tile)
updates = remove_undefined(
{
"path": _tile["path"],
"coverGeometry": _tile.get("coverGeometry"),
"sensingTime": _tile.get("sensingTime"),
}
)
updates = remove_undefined({
"path": _tile["path"],
"coverGeometry": _tile.get("coverGeometry"),
"sensingTime": _tile.get("sensingTime"),
})

return self.client.get_json(
url=url, request_type=RequestType.PUT, post_values=updates, headers=headers, use_session=True
Expand Down
30 changes: 14 additions & 16 deletions sentinelhub/api/catalog.py
Original file line number Diff line number Diff line change
Expand Up @@ -140,22 +140,20 @@ def search(
if geometry and geometry.crs is not CRS.WGS84:
geometry = geometry.transform(CRS.WGS84)

payload = remove_undefined(
{
"collections": [collection_id],
"datetime": f"{start_time}/{end_time}" if time else None,
"bbox": list(bbox) if bbox else None,
"intersects": geometry.get_geojson(with_crs=False) if geometry else None,
"ids": ids,
"filter": self._prepare_filters(filter, collection, filter_lang),
"filter-lang": filter_lang,
"filter-crs": filter_crs,
"fields": fields,
"distinct": distinct,
"limit": limit,
**kwargs,
}
)
payload = remove_undefined({
"collections": [collection_id],
"datetime": f"{start_time}/{end_time}" if time else None,
"bbox": list(bbox) if bbox else None,
"intersects": geometry.get_geojson(with_crs=False) if geometry else None,
"ids": ids,
"filter": self._prepare_filters(filter, collection, filter_lang),
"filter-lang": filter_lang,
"filter-crs": filter_crs,
"fields": fields,
"distinct": distinct,
"limit": limit,
**kwargs,
})

return CatalogSearchIterator(self.client, url, payload)

Expand Down
2 changes: 1 addition & 1 deletion sentinelhub/api/process.py
Original file line number Diff line number Diff line change
Expand Up @@ -308,7 +308,7 @@ def get_async_running_status(ids: Iterable[str], config: SHConfig | None = None)
client.get_json_dict(f"{config.sh_base_url}/api/v1/async/process/{request_id}", use_session=True)
# A successful request means it's running
result[request_id] = True
except DownloadFailedException as exception:
except DownloadFailedException as exception: # noqa: PERF203
# A 404 means it's not running
if exception.request_exception is not None and exception.request_exception.response is not None:
if exception.request_exception.response.status_code == requests.status_codes.codes.NOT_FOUND:
Expand Down
17 changes: 7 additions & 10 deletions sentinelhub/areas.py
Original file line number Diff line number Diff line change
Expand Up @@ -572,7 +572,7 @@ def _get_utm_polygons(self) -> list[tuple[BaseGeometry, dict[str, Any]]]:
utm_grid_filename = os.path.join(os.path.dirname(__file__), ".utmzones.geojson")

if not os.path.isfile(utm_grid_filename):
raise IOError(f"UTM grid definition file does not exist: {os.path.abspath(utm_grid_filename)}")
raise OSError(f"UTM grid definition file does not exist: {os.path.abspath(utm_grid_filename)}")

with open(utm_grid_filename) as utm_grid_file:
utm_grid = json.load(utm_grid_file)["features"]
Expand Down Expand Up @@ -606,17 +606,14 @@ def _get_utm_polygons(self) -> list[tuple[BaseGeometry, dict[str, Any]]]:
utm_geom_list = []
for lat in [(self.LAT_EQ, self.LAT_MAX), (self.LAT_MIN, self.LAT_EQ)]:
for lng in range(self.LNG_MIN, self.LNG_MAX, self.LNG_UTM):
points = []
# A new point is added per each degree - this is inline with geometries used by UtmGridSplitter
# In the future the number of points will be calculated according to bbox_size parameter
for degree in range(lat[0], lat[1]):
points.append((lng, degree))
for degree in range(lng, lng + self.LNG_UTM):
points.append((degree, lat[1]))
for degree in range(lat[1], lat[0], -1):
points.append((lng + self.LNG_UTM, degree))
for degree in range(lng + self.LNG_UTM, lng, -1):
points.append((degree, lat[0]))
points = (
[(lng, degree) for degree in range(lat[0], lat[1])]
+ [(degree, lat[1]) for degree in range(lng, lng + self.LNG_UTM)]
+ [(lng + self.LNG_UTM, degree) for degree in range(lat[1], lat[0], -1)]
+ [(degree, lat[0]) for degree in range(lng + self.LNG_UTM, lng, -1)]
)

utm_geom_list.append(Polygon(points))

Expand Down
2 changes: 1 addition & 1 deletion sentinelhub/decoding.py
Original file line number Diff line number Diff line change
Expand Up @@ -183,7 +183,7 @@ def fix_jp2_image(image: np.ndarray, bit_depth: int) -> np.ndarray:
try:
return image >> 1
except TypeError as exception:
raise IOError(
raise OSError(
"Failed to read JPEG2000 image correctly. Most likely reason is that Pillow did not "
"install OpenJPEG library correctly. Try reinstalling Pillow from a wheel"
) from exception
Expand Down
2 changes: 1 addition & 1 deletion sentinelhub/download/handlers.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ def new_download_func(self: SelfWithConfig, request: DownloadRequest) -> T:
try:
return download_func(self, request)

except requests.RequestException as exception:
except requests.RequestException as exception: # noqa: PERF203
attempts_left = download_attempts - (attempt_idx + 1)
if not (
_is_temporary_problem(exception)
Expand Down
7 changes: 3 additions & 4 deletions tests/api/batch/test_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,10 +109,9 @@ def _tile_status_counts_to_tiles(tile_status_counts: dict[BatchTileStatus, int])
Each payload should be approximately what Sentinel Hub service returns but because we don't need all parameters we
just return status. At the end we randomly shuffle the list just to make it more general.
"""
tiles: list[dict[str, str]] = []
for tile_status, count in tile_status_counts.items():
for _ in range(count):
tiles.append({"status": tile_status.value})
tiles: list[dict[str, str]] = [
{"status": tile_status.value} for tile_status, count in tile_status_counts.items() for _ in range(count)
]

random.shuffle(tiles)
return tiles
Expand Down

0 comments on commit 4d19a60

Please sign in to comment.