Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

relax httpx dep version and remove broken experimental BlockhashCache #408

Merged
merged 8 commits into from
Mar 29, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .bumpversion.cfg
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
[bumpversion]
current_version = 0.32.0
current_version = 0.33.0
commit = True
tag = True

Expand Down
8 changes: 8 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,13 @@
# Changelog

## [0.33.0] - 2024-03-29

### Changed

- Relax httpx dependency [(#408)](https://github.com/michaelhly/solana-py/pull/408)
- Remove experimental and flawed BlockhashCache [(#408)](https://github.com/michaelhly/solana-py/pull/408). This was a footgun because it broke a lot.
- Upgrade to Solders 0.21.0 (fixes aarch64 compatibility issues) [(#402)](https://github.com/michaelhly/solana-py/pull/402)

## [0.32.0] - 2024-02-12

### Changed
Expand Down
274 changes: 124 additions & 150 deletions poetry.lock

Large diffs are not rendered by default.

6 changes: 2 additions & 4 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "solana"
version = "0.32.0"
version = "0.33.0"
description = "Solana Python API"
authors = [
"Michael Huang <michaelhly@gmail.com>",
Expand All @@ -27,10 +27,8 @@ packages = [
[tool.poetry.dependencies]
python = "^3.8"
construct-typing = "^0.5.2"
httpx = "^0.23.0"
httpx = ">=0.23.0"
typing-extensions = ">=4.2.0"
cachetools = "^4.2.2"
types-cachetools = "^4.2.4"
websockets = ">=9.0,<12.0"
solders = "^0.21.0"

Expand Down
51 changes: 0 additions & 51 deletions src/solana/blockhash.py

This file was deleted.

38 changes: 4 additions & 34 deletions src/solana/rpc/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,6 @@
from solders.signature import Signature
from solders.transaction import VersionedTransaction

from solana.blockhash import BlockhashCache
from solana.rpc import types
from solana.transaction import Transaction

Expand All @@ -83,22 +82,6 @@ class Client(_ClientCore): # pylint: disable=too-many-public-methods
Args:
endpoint: URL of the RPC endpoint.
commitment: Default bank state to query. It can be either "finalized", "confirmed" or "processed".
blockhash_cache: (Experimental) If True, keep a cache of recent blockhashes to make
`send_transaction` calls faster.
You can also pass your own BlockhashCache object to customize its parameters.

The cache works as follows:

1. Retrieve the oldest unused cached blockhash that is younger than `ttl` seconds,
where `ttl` is defined in the BlockhashCache (we prefer unused blockhashes because
reusing blockhashes can cause errors in some edge cases, and we prefer slightly
older blockhashes because they're more likely to be accepted by every validator).
2. If there are no unused blockhashes in the cache, take the oldest used
blockhash that is younger than `ttl` seconds.
3. Fetch a new recent blockhash *after* sending the transaction. This is to keep the cache up-to-date.

If you want something tailored to your use case, run your own loop that fetches the recent blockhash,
and pass that value in your `.send_transaction` calls.
timeout: HTTP request timeout in seconds.
extra_headers: Extra headers to pass for HTTP request.

Expand All @@ -108,12 +91,11 @@ def __init__(
self,
endpoint: Optional[str] = None,
commitment: Optional[Commitment] = None,
blockhash_cache: Union[BlockhashCache, bool] = False,
timeout: float = 10,
extra_headers: Optional[Dict[str, str]] = None,
):
"""Init API client."""
super().__init__(commitment, blockhash_cache)
super().__init__(commitment)
self._provider = http.HTTPProvider(endpoint, timeout=timeout, extra_headers=extra_headers)

def is_connected(self) -> bool:
Expand Down Expand Up @@ -1039,18 +1021,9 @@ def send_transaction(
return self.send_raw_transaction(bytes(txn), opts=versioned_tx_opts)
last_valid_block_height = None
if recent_blockhash is None:
if self.blockhash_cache:
try:
recent_blockhash = self.blockhash_cache.get()
except ValueError:
blockhash_resp = self.get_latest_blockhash(Finalized)
recent_blockhash = self._process_blockhash_resp(blockhash_resp, used_immediately=True)
last_valid_block_height = blockhash_resp.value.last_valid_block_height

else:
blockhash_resp = self.get_latest_blockhash(Finalized)
recent_blockhash = self.parse_recent_blockhash(blockhash_resp)
last_valid_block_height = blockhash_resp.value.last_valid_block_height
blockhash_resp = self.get_latest_blockhash(Finalized)
recent_blockhash = self.parse_recent_blockhash(blockhash_resp)
last_valid_block_height = blockhash_resp.value.last_valid_block_height

txn.recent_blockhash = recent_blockhash

Expand All @@ -1065,9 +1038,6 @@ def send_transaction(
)

txn_resp = self.send_raw_transaction(txn.serialize(), opts=opts_to_use)
if self.blockhash_cache:
blockhash_resp = self.get_latest_blockhash(Finalized)
self._process_blockhash_resp(blockhash_resp, used_immediately=False)
return txn_resp

def simulate_transaction(
Expand Down
37 changes: 4 additions & 33 deletions src/solana/rpc/async_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,6 @@
from solders.signature import Signature
from solders.transaction import VersionedTransaction

from solana.blockhash import BlockhashCache
from solana.rpc import types
from solana.transaction import Transaction

Expand All @@ -80,22 +79,6 @@ class AsyncClient(_ClientCore): # pylint: disable=too-many-public-methods
Args:
endpoint: URL of the RPC endpoint.
commitment: Default bank state to query. It can be either "finalized", "confirmed" or "processed".
blockhash_cache: (Experimental) If True, keep a cache of recent blockhashes to make
`send_transaction` calls faster.
You can also pass your own BlockhashCache object to customize its parameters.

The cache works as follows:

1. Retrieve the oldest unused cached blockhash that is younger than `ttl` seconds,
where `ttl` is defined in the BlockhashCache (we prefer unused blockhashes because
reusing blockhashes can cause errors in some edge cases, and we prefer slightly
older blockhashes because they're more likely to be accepted by every validator).
2. If there are no unused blockhashes in the cache, take the oldest used
blockhash that is younger than `ttl` seconds.
3. Fetch a new recent blockhash *after* sending the transaction. This is to keep the cache up-to-date.

If you want something tailored to your use case, run your own loop that fetches the recent blockhash,
and pass that value in your `.send_transaction` calls.
timeout: HTTP request timeout in seconds.
extra_headers: Extra headers to pass for HTTP request.
"""
Expand All @@ -104,12 +87,11 @@ def __init__(
self,
endpoint: Optional[str] = None,
commitment: Optional[Commitment] = None,
blockhash_cache: Union[BlockhashCache, bool] = False,
timeout: float = 10,
extra_headers: Optional[Dict[str, str]] = None,
) -> None:
"""Init API client."""
super().__init__(commitment, blockhash_cache)
super().__init__(commitment)
self._provider = async_http.AsyncHTTPProvider(endpoint, timeout=timeout, extra_headers=extra_headers)

async def __aenter__(self) -> "AsyncClient":
Expand Down Expand Up @@ -1050,17 +1032,9 @@ async def send_transaction(
return await self.send_raw_transaction(bytes(txn), opts=versioned_tx_opts)
last_valid_block_height = None
if recent_blockhash is None:
if self.blockhash_cache:
try:
recent_blockhash = self.blockhash_cache.get()
except ValueError:
blockhash_resp = await self.get_latest_blockhash(Finalized)
recent_blockhash = self._process_blockhash_resp(blockhash_resp, used_immediately=True)
last_valid_block_height = blockhash_resp.value.last_valid_block_height
else:
blockhash_resp = await self.get_latest_blockhash(Finalized)
recent_blockhash = self.parse_recent_blockhash(blockhash_resp)
last_valid_block_height = blockhash_resp.value.last_valid_block_height
blockhash_resp = await self.get_latest_blockhash(Finalized)
recent_blockhash = self.parse_recent_blockhash(blockhash_resp)
last_valid_block_height = blockhash_resp.value.last_valid_block_height

txn.recent_blockhash = recent_blockhash

Expand All @@ -1074,9 +1048,6 @@ async def send_transaction(
else opts
)
txn_resp = await self.send_raw_transaction(txn.serialize(), opts=opts_to_use)
if self.blockhash_cache:
blockhash_resp = await self.get_latest_blockhash(Finalized)
self._process_blockhash_resp(blockhash_resp, used_immediately=False)
return txn_resp

async def simulate_transaction(
Expand Down
21 changes: 1 addition & 20 deletions src/solana/rpc/core.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,6 @@
# pylint: disable=too-many-arguments
"""Helper code for api.py and async_api.py."""
from typing import List, Optional, Sequence, Tuple, Union, cast, overload

try:
from typing import Literal # type: ignore
except ImportError:
from typing_extensions import Literal # type: ignore
from typing import List, Optional, Sequence, Tuple, Union, overload

from solders.account_decoder import UiAccountEncoding, UiDataSliceConfig
from solders.commitment_config import CommitmentLevel
Expand Down Expand Up @@ -84,7 +79,6 @@
from solders.transaction import VersionedTransaction
from solders.transaction_status import UiTransactionEncoding

from solana.blockhash import BlockhashCache
from solana.rpc import types
from solana.transaction import Transaction

Expand Down Expand Up @@ -154,14 +148,8 @@ class _ClientCore: # pylint: disable=too-few-public-methods
def __init__(
self,
commitment: Optional[Commitment] = None,
blockhash_cache: Union[BlockhashCache, bool] = False,
):
self._commitment = commitment or Finalized
self.blockhash_cache: Union[BlockhashCache, Literal[False]] = (
BlockhashCache()
if blockhash_cache is True
else cast(Union[BlockhashCache, Literal[False]], blockhash_cache)
)

@property
def commitment(self) -> Commitment:
Expand Down Expand Up @@ -520,10 +508,3 @@ def _post_send(resp: SendTransactionResp) -> SendTransactionResp:
def parse_recent_blockhash(blockhash_resp: GetLatestBlockhashResp) -> Blockhash:
"""Extract blockhash from JSON RPC result."""
return blockhash_resp.value.blockhash

def _process_blockhash_resp(self, blockhash_resp: GetLatestBlockhashResp, used_immediately: bool) -> Blockhash:
recent_blockhash = self.parse_recent_blockhash(blockhash_resp)
if self.blockhash_cache:
slot = blockhash_resp.context.slot
self.blockhash_cache.set(recent_blockhash, slot, used_immediately=used_immediately)
return recent_blockhash
51 changes: 0 additions & 51 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,12 +51,6 @@ def stubbed_receiver_prefetched_blockhash() -> Pubkey:
return Pubkey.from_string("J3dxNj7nDRRqRRXuEMynDG57DkZK4jYRuv3Garmb1i97")


@pytest.fixture(scope="session")
def stubbed_receiver_cached_blockhash() -> Pubkey:
"""Arbitrary known public key to be used as receiver."""
return Pubkey.from_string("J3dxNj7nDRRqRRXuEMynDG57DkZK4jYRuv3Garmb1i95")


@pytest.fixture(scope="session")
def async_stubbed_receiver() -> Pubkey:
"""Arbitrary known public key to be used as receiver."""
Expand All @@ -69,12 +63,6 @@ def async_stubbed_receiver_prefetched_blockhash() -> Pubkey:
return Pubkey.from_string("J3dxNj7nDRRqRRXuEMynDG57DkZK4jYRuv3Garmb1i96")


@pytest.fixture(scope="session")
def async_stubbed_receiver_cached_blockhash() -> Pubkey:
"""Arbitrary known public key to be used as receiver."""
return Pubkey.from_string("J3dxNj7nDRRqRRXuEMynDG57DkZK4jYRuv3Garmb1i94")


@pytest.fixture(scope="session")
def stubbed_sender() -> Keypair:
"""Arbitrary known account to be used as sender."""
Expand All @@ -87,12 +75,6 @@ def stubbed_sender_prefetched_blockhash() -> Keypair:
return Keypair.from_seed(bytes([9] * Pubkey.LENGTH))


@pytest.fixture(scope="session")
def stubbed_sender_cached_blockhash() -> Keypair:
"""Arbitrary known account to be used as sender."""
return Keypair.from_seed(bytes([4] * Pubkey.LENGTH))


@pytest.fixture(scope="session")
def stubbed_sender_for_token() -> Keypair:
"""Arbitrary known account to be used as sender."""
Expand All @@ -111,12 +93,6 @@ def async_stubbed_sender_prefetched_blockhash() -> Keypair:
return Keypair.from_seed(bytes([5] * Pubkey.LENGTH))


@pytest.fixture(scope="session")
def async_stubbed_sender_cached_blockhash() -> Keypair:
"""Another arbitrary known account to be used as sender."""
return Keypair.from_seed(bytes([3] * Pubkey.LENGTH))


@pytest.fixture(scope="session")
def freeze_authority() -> Keypair:
"""Arbitrary known account to be used as freeze authority."""
Expand Down Expand Up @@ -152,17 +128,6 @@ def test_http_client(docker_services, _sleep_for_first_blocks) -> Client: # pyl
return http_client


@pytest.mark.integration
@pytest.fixture(scope="session")
def test_http_client_cached_blockhash(
docker_services, _sleep_for_first_blocks # pylint: disable=redefined-outer-name
) -> Client:
"""Test http_client.is_connected."""
http_client = Client(commitment=Processed, blockhash_cache=True)
docker_services.wait_until_responsive(timeout=15, pause=1, check=http_client.is_connected)
return http_client


@pytest.mark.integration
@pytest.fixture(scope="session")
def test_http_client_async(
Expand All @@ -179,22 +144,6 @@ def check() -> bool:
event_loop.run_until_complete(http_client.close())


@pytest.mark.integration
@pytest.fixture(scope="session")
def test_http_client_async_cached_blockhash(
docker_services, event_loop, _sleep_for_first_blocks # pylint: disable=redefined-outer-name
) -> AsyncClient:
"""Test http_client.is_connected."""
http_client = AsyncClient(commitment=Processed, blockhash_cache=True)

def check() -> bool:
return event_loop.run_until_complete(http_client.is_connected())

docker_services.wait_until_responsive(timeout=15, pause=1, check=check)
yield http_client
event_loop.run_until_complete(http_client.close())


@pytest.mark.integration
@pytest.fixture(scope="function")
def random_funded_keypair(test_http_client: Client) -> Keypair:
Expand Down
Loading
Loading