Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 8 additions & 6 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ dependencies = [
[project.optional-dependencies]
dev = [
"build ~= 1.2.0",
"filelock ~=3.15.1",
"filelock ~= 3.15.0",
"mypy ~= 1.10.0",
"pre-commit ~= 3.5.0",
"pydoc-markdown ~= 4.8.0",
Expand All @@ -54,12 +54,12 @@ dev = [
"pytest-timeout ~= 2.3.0",
"pytest-xdist ~= 3.6.0",
"respx ~= 0.21.0",
"ruff ~= 0.4.0",
"setuptools ~= 70.1.0", # setuptools are used by pytest, but not explicitly required
"ruff ~= 0.5.0",
"setuptools ~= 70.3.0", # setuptools are used by pytest, but not explicitly required
"twine ~= 5.1.0",
"types-aiofiles ~= 23.2.0.20240403",
"types-aiofiles ~= 24.1.0.20240626",
"types-colorama ~= 0.4.15.20240311",
"types-psutil ~= 5.9.5.20240516",
"types-psutil ~= 6.0.0.20240621",
]
scrapy = [
"scrapy >= 2.11.0",
Expand All @@ -74,7 +74,7 @@ scrapy = [
"Apify Homepage" = "https://apify.com"

[build-system]
requires = ["setuptools ~= 70.1.0", "wheel"]
requires = ["setuptools ~= 70.3.0", "wheel"]
build-backend = "setuptools.build_meta"

[tool.setuptools.packages.find]
Expand All @@ -86,6 +86,8 @@ apify = ["py.typed"]

[tool.ruff]
line-length = 150

[tool.ruff.lint]
select = ["ALL"]
ignore = [
"ANN401", # Dynamically typed expressions (typing.Any) are disallowed in {filename}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ def _get_storages_dir(cls: type[BaseResourceClient], memory_storage_client: Memo
@classmethod
@abstractmethod
def _get_storage_client_cache(
cls, # noqa: ANN102 # type annotated cls does not work with Self as a return type
cls,
memory_storage_client: MemoryStorageClient,
) -> list[Self]:
raise NotImplementedError('You must override this method in the subclass!')
Expand All @@ -62,7 +62,7 @@ def _to_resource_info(self: BaseResourceClient) -> dict:
@classmethod
@abstractmethod
def _create_from_directory(
cls, # noqa: ANN102 # type annotated cls does not work with Self as a return type
cls,
storage_directory: str,
memory_storage_client: MemoryStorageClient,
id: str | None = None, # noqa: A002
Expand All @@ -72,7 +72,7 @@ def _create_from_directory(

@classmethod
def _find_or_create_client_by_id_or_name(
cls, # noqa: ANN102 # type annotated cls does not work with Self as a return type
cls,
memory_storage_client: MemoryStorageClient,
id: str | None = None, # noqa: A002
name: str | None = None,
Expand Down
4 changes: 1 addition & 3 deletions src/apify/_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -208,9 +208,7 @@ def get_memory_usage_bytes() -> int:


def maybe_parse_bool(val: str | None) -> bool:
if val in {'true', 'True', '1'}:
return True
return False
return val in {'true', 'True', '1'}


def maybe_parse_datetime(val: str) -> datetime | str:
Expand Down
2 changes: 1 addition & 1 deletion src/apify/log.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ def _get_extra_fields(self: ActorLogFormatter, record: logging.LogRecord) -> dic
extra_fields: dict[str, Any] = {}
for key, value in record.__dict__.items():
if key not in self.empty_record.__dict__:
extra_fields[key] = value
extra_fields[key] = value # noqa: PERF403

return extra_fields

Expand Down
2 changes: 1 addition & 1 deletion src/apify/proxy_configuration.py
Original file line number Diff line number Diff line change
Expand Up @@ -325,7 +325,7 @@ async def _check_access(self: ProxyConfiguration) -> None:
proxy_status_url = f'{self._actor_config.proxy_status_url}/?format=json'

status = None
async with httpx.AsyncClient(proxies=await self.new_url()) as client:
async with httpx.AsyncClient(proxies=await self.new_url(), timeout=10) as client:
for _ in range(2):
try:
response = await client.get(proxy_status_url)
Expand Down
4 changes: 2 additions & 2 deletions tests/integration/test_actor_api_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -374,12 +374,12 @@ async def main_server() -> None:
async with Actor:

class WebhookHandler(BaseHTTPRequestHandler):
def do_GET(self) -> None: # noqa: N802, ANN101
def do_GET(self) -> None: # noqa: N802
self.send_response(200)
self.end_headers()
self.wfile.write(bytes('Hello, world!', encoding='utf-8'))

def do_POST(self) -> None: # noqa: N802, ANN101
def do_POST(self) -> None: # noqa: N802
nonlocal webhook_body
content_length = self.headers.get('content-length')
length = int(content_length) if content_length else 0
Expand Down
4 changes: 2 additions & 2 deletions tests/unit/actor/test_actor_log.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ async def test_actor_log(self: TestActorLog, caplog: pytest.LogCaptureFixture) -
assert caplog.records[7].levelno == logging.ERROR
assert caplog.records[7].message == 'Exception message'
assert caplog.records[7].exc_info is not None
assert caplog.records[7].exc_info[0] == ValueError
assert caplog.records[7].exc_info[0] is ValueError
assert isinstance(caplog.records[7].exc_info[1], ValueError)
assert str(caplog.records[7].exc_info[1]) == 'Dummy ValueError'

Expand All @@ -79,7 +79,7 @@ async def test_actor_log(self: TestActorLog, caplog: pytest.LogCaptureFixture) -
assert caplog.records[9].levelno == logging.ERROR
assert caplog.records[9].message == 'Actor failed with an exception'
assert caplog.records[9].exc_info is not None
assert caplog.records[9].exc_info[0] == RuntimeError
assert caplog.records[9].exc_info[0] is RuntimeError
assert isinstance(caplog.records[9].exc_info[1], RuntimeError)
assert str(caplog.records[9].exc_info[1]) == 'Dummy RuntimeError'

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -178,7 +178,7 @@ async def test_get_and_set_record(tmp_path: Path, key_value_store_client: KeyVal
assert bytes_record_info['value'].decode('utf-8') == bytes_value.decode('utf-8')

# Test using file descriptor
with open(os.path.join(tmp_path, 'test.json'), 'w+', encoding='utf-8') as f: # noqa: ASYNC101
with open(os.path.join(tmp_path, 'test.json'), 'w+', encoding='utf-8') as f: # noqa: ASYNC230
f.write('Test')
with pytest.raises(NotImplementedError, match='File-like values are not supported in local memory storage'):
await key_value_store_client.set_record('file', f)
Expand Down Expand Up @@ -272,11 +272,11 @@ async def test_writes_correct_metadata(memory_storage_client: MemoryStorageClien
assert os.path.exists(item_path)
assert os.path.exists(metadata_path)

with open(item_path, 'rb') as item_file: # noqa: ASYNC101
with open(item_path, 'rb') as item_file: # noqa: ASYNC230
actual_value = maybe_parse_body(item_file.read(), expected_output['contentType'])
assert actual_value == test_input['value']

with open(metadata_path, encoding='utf-8') as metadata_file: # noqa: ASYNC101
with open(metadata_path, encoding='utf-8') as metadata_file: # noqa: ASYNC230
metadata = json.load(metadata_file)
assert metadata['key'] == expected_output['key']
assert expected_output['contentType'] in metadata['contentType']
Expand Down Expand Up @@ -364,12 +364,12 @@ async def test_reads_correct_metadata(memory_storage_client: MemoryStorageClient

# Write the store metadata to disk
store_metadata_path = os.path.join(storage_path, '__metadata__.json')
with open(store_metadata_path, mode='wb') as store_metadata_file: # noqa: ASYNC101
with open(store_metadata_path, mode='wb') as store_metadata_file: # noqa: ASYNC230
store_metadata_file.write(json_dumps(store_metadata).encode('utf-8'))

# Write the test input item to the disk
item_path = os.path.join(storage_path, test_input['filename'])
with open(item_path, 'wb') as item_file: # noqa: ASYNC101
with open(item_path, 'wb') as item_file: # noqa: ASYNC230
if isinstance(test_input['value'], bytes):
item_file.write(test_input['value'])
elif isinstance(test_input['value'], str):
Expand All @@ -380,7 +380,7 @@ async def test_reads_correct_metadata(memory_storage_client: MemoryStorageClient
# Optionally write the metadata to disk if there is some
if test_input['metadata'] is not None:
metadata_path = os.path.join(storage_path, test_input['filename'] + '.__metadata__.json')
with open(metadata_path, 'w', encoding='utf-8') as metadata_file: # noqa: ASYNC101
with open(metadata_path, 'w', encoding='utf-8') as metadata_file: # noqa: ASYNC230
metadata_file.write(
json_dumps(
{
Expand Down
6 changes: 3 additions & 3 deletions tests/unit/test_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -177,7 +177,7 @@ async def test__force_remove(tmp_path: Path) -> None:
assert os.path.exists(test_file_path) is False

# Removes the file if it exists
with open(test_file_path, 'a', encoding='utf-8'): # noqa: ASYNC101
with open(test_file_path, 'a', encoding='utf-8'): # noqa: ASYNC230
pass
assert os.path.exists(test_file_path) is True
await force_remove(test_file_path)
Expand Down Expand Up @@ -228,11 +228,11 @@ async def test__force_rename(tmp_path: Path) -> None:
# Will remove dst_dir if it exists (also covers normal case)
# Create the src_dir with a file in it
await mkdir(src_dir)
with open(src_file, 'a', encoding='utf-8'): # noqa: ASYNC101
with open(src_file, 'a', encoding='utf-8'): # noqa: ASYNC230
pass
# Create the dst_dir with a file in it
await mkdir(dst_dir)
with open(dst_file, 'a', encoding='utf-8'): # noqa: ASYNC101
with open(dst_file, 'a', encoding='utf-8'): # noqa: ASYNC230
pass
assert os.path.exists(src_file) is True
assert os.path.exists(dst_file) is True
Expand Down