Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,10 @@ Changelog

- option to add event handlers which accept no arguments

### Fixed

- started enforcing local storage to always use the UTF-8 encoding

[1.0.0](../../releases/tag/v1.0.0) - 2022-03-13
-----------------------------------------------

Expand Down
2 changes: 1 addition & 1 deletion docs/res/format_docs.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@
subs.append((fr'`({custom_type})\.([A-Z_]+)`', lambda match: f'[{match.group(0)}](#{match.group(1).lower()}-{match.group(2).lower()})'))

# Load the api_reference.md generated by Sphinx
with open('api_reference.md', 'r+') as api_reference:
with open('api_reference.md', 'r+', encoding='utf-8') as api_reference:
api_reference_content = api_reference.read()

# Do the above defined replacements
Expand Down
2 changes: 1 addition & 1 deletion scripts/check_version_in_changelog.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
if not CHANGELOG_PATH.is_file():
raise RuntimeError('Unable to find CHANGELOG.md file')

with open(CHANGELOG_PATH) as changelog_file:
with open(CHANGELOG_PATH, encoding='utf-8') as changelog_file:
for line in changelog_file:
# The heading for the changelog entry for the given version can start with either the version number, or the version number in a link
if re.match(fr'\[?{current_package_version}([\] ]|$)', line):
Expand Down
4 changes: 2 additions & 2 deletions scripts/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
# Load the current version number from src/package_name/_version.py
# It is on a line in the format __version__ = 1.2.3
def get_current_package_version() -> str:
with open(VERSION_FILE_PATH, 'r') as version_file:
with open(VERSION_FILE_PATH, 'r', encoding='utf-8') as version_file:
for line in version_file:
if line.startswith('__version__'):
delim = '"' if '"' in line else "'"
Expand All @@ -21,7 +21,7 @@ def get_current_package_version() -> str:
# Write the given version number from src/package_name/_version.py
# It replaces the version number on the line with the format __version__ = 1.2.3
def set_current_package_version(version: str) -> None:
with open(VERSION_FILE_PATH, 'r+') as version_file:
with open(VERSION_FILE_PATH, 'r+', encoding='utf-8') as version_file:
updated_version_file_lines = []
version_string_found = False
for line in version_file:
Expand Down
1 change: 1 addition & 0 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,7 @@
'flake8-comprehensions ~= 3.10.1',
'flake8-datetimez ~= 20.10.0',
'flake8-docstrings ~= 1.7.0',
'flake8-encodings ~= 0.5.0',
'flake8-isort ~= 6.0.0',
'flake8-noqa ~= 1.3.0',
'flake8-pytest-style ~= 1.7.2',
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,7 @@ def _find_or_create_client_by_id_or_name(
metadata_path = os.path.join(entry.path, '__metadata__.json')
if not os.access(metadata_path, os.F_OK):
continue
with open(metadata_path) as metadata_file:
with open(metadata_path, encoding='utf-8') as metadata_file:
metadata = json.load(metadata_file)
if id and id == metadata.get('id'):
storage_path = entry.path
Expand Down
4 changes: 2 additions & 2 deletions src/apify/_memory_storage/resource_clients/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -412,7 +412,7 @@ def _create_from_directory(
has_seen_metadata_file = True

# We have found the dataset's metadata file, build out information based on it
with open(os.path.join(storage_directory, entry.name)) as f:
with open(os.path.join(storage_directory, entry.name), encoding='utf-8') as f:
metadata = json.load(f)
id = metadata['id']
name = metadata['name']
Expand All @@ -423,7 +423,7 @@ def _create_from_directory(

continue

with open(os.path.join(storage_directory, entry.name)) as f:
with open(os.path.join(storage_directory, entry.name), encoding='utf-8') as f:
entry_content = json.load(f)
entry_name = entry.name.split('.')[0]

Expand Down
6 changes: 3 additions & 3 deletions src/apify/_memory_storage/resource_clients/key_value_store.py
Original file line number Diff line number Diff line change
Expand Up @@ -383,7 +383,7 @@ def _create_from_directory(
if entry.is_file():
if entry.name == '__metadata__.json':
# We have found the store metadata file, build out information based on it
with open(os.path.join(storage_directory, entry.name), encoding='utf8') as f:
with open(os.path.join(storage_directory, entry.name), encoding='utf-8') as f:
metadata = json.load(f)
id = metadata['id']
name = metadata['name']
Expand All @@ -395,7 +395,7 @@ def _create_from_directory(

if '.__metadata__.' in entry.name:
# This is an entry's metadata file, we can use it to create/extend the record
with open(os.path.join(storage_directory, entry.name), encoding='utf8') as f:
with open(os.path.join(storage_directory, entry.name), encoding='utf-8') as f:
metadata = json.load(f)

new_record = {
Expand Down Expand Up @@ -429,7 +429,7 @@ def _create_from_directory(
elif 'application/json' in content_type:
try:
# Try parsing the JSON ahead of time (not ideal but solves invalid files being loaded into stores)
json.loads(file_content)
json.loads(file_content.decode('utf-8'))
except json.JSONDecodeError:
# We need to override and then restore the warnings filter so that the warning gets printed out,
# Otherwise it would be silently swallowed
Expand Down
4 changes: 2 additions & 2 deletions src/apify/_memory_storage/resource_clients/request_queue.py
Original file line number Diff line number Diff line change
Expand Up @@ -423,7 +423,7 @@ def _create_from_directory(
if entry.is_file():
if entry.name == '__metadata__.json':
# We have found the queue's metadata file, build out information based on it
with open(os.path.join(storage_directory, entry.name)) as f:
with open(os.path.join(storage_directory, entry.name), encoding='utf-8') as f:
metadata = json.load(f)
id = metadata['id']
name = metadata['name']
Expand All @@ -435,7 +435,7 @@ def _create_from_directory(

continue

with open(os.path.join(storage_directory, entry.name)) as f:
with open(os.path.join(storage_directory, entry.name), encoding='utf-8') as f:
request = json.load(f)
if request.get('orderNo'):
request['orderNo'] = Decimal(request.get('orderNo'))
Expand Down
2 changes: 1 addition & 1 deletion src/apify/_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -323,7 +323,7 @@ def _is_file_or_bytes(value: Any) -> bool:

def _maybe_parse_body(body: bytes, content_type: str) -> Any:
if _is_content_type_json(content_type):
return json.loads(body) # Returns any
return json.loads(body.decode('utf-8')) # Returns any
elif _is_content_type_xml(content_type) or _is_content_type_text(content_type):
return body.decode('utf-8')
return body
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -161,7 +161,7 @@ async def test_get_and_set_record(tmp_path: Path, key_value_store_client: KeyVal
assert bytes_record_info['value'].decode('utf-8') == bytes_value.decode('utf-8')

# Test using file descriptor
with open(os.path.join(tmp_path, 'test.json'), 'w+') as f:
with open(os.path.join(tmp_path, 'test.json'), 'w+', encoding='utf-8') as f:
f.write('Test')
with pytest.raises(NotImplementedError, match='File-like values are not supported in local memory storage'):
await key_value_store_client.set_record('file', f)
Expand Down
6 changes: 3 additions & 3 deletions tests/unit/test_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -253,7 +253,7 @@ async def test__force_remove(tmp_path: Path) -> None:
assert os.path.exists(test_file_path) is False

# Removes the file if it exists
open(test_file_path, 'a').close()
open(test_file_path, 'a', encoding='utf-8').close()
assert os.path.exists(test_file_path) is True
await _force_remove(test_file_path)
assert os.path.exists(test_file_path) is False
Expand Down Expand Up @@ -323,10 +323,10 @@ async def test__force_rename(tmp_path: Path) -> None:
# Will remove dst_dir if it exists (also covers normal case)
# Create the src_dir with a file in it
await mkdir(src_dir)
open(src_file, 'a').close()
open(src_file, 'a', encoding='utf-8').close()
# Create the dst_dir with a file in it
await mkdir(dst_dir)
open(dst_file, 'a').close()
open(dst_file, 'a', encoding='utf-8').close()
assert os.path.exists(src_file) is True
assert os.path.exists(dst_file) is True
await _force_rename(src_dir, dst_dir)
Expand Down