Skip to content

Commit

Permalink
chore: Prefer type() to __class__
Browse files Browse the repository at this point in the history
  • Loading branch information
jpmckinney committed Jul 16, 2024
1 parent b6e3d3f commit 80f85f9
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 2 deletions.
2 changes: 1 addition & 1 deletion kingfisher_scrapy/items.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ class DataResource(Resource, arbitrary_types_allowed=True, use_enum_values=True)
@pydantic.validator('data', pre=True) # `pre` is needed to prevent pydantic from type casting
def check_data(cls, v):
# pydantic has no `condict()` to set `strict=True` or `min_properties=1`. pydantic/pydantic#1277
assert isinstance(v, (Data, bytes)), f'{v.__class__.__name__} is not a valid type'
assert isinstance(v, (Data, bytes)), f'{type(v).__name__} is not a valid type'
assert v, 'ensure this value is non-empty'
return v

Expand Down
2 changes: 1 addition & 1 deletion kingfisher_scrapy/pipelines.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ def process_item(self, item, spider):

# Drop FileError items, so that we keep trying to get data.
if not isinstance(item, (File, FileItem)):
raise DropItem(f'Sample: Item is a {item.__class__.__name__}, not a File or FileItem')
raise DropItem(f'Sample: Item is a {type(item).__name__}, not a File or FileItem')
if self.item_count >= spider.sample:
spider.crawler.engine.close_spider(spider, 'sample')
raise DropItem('Sample: Maximum sample size reached')
Expand Down

0 comments on commit 80f85f9

Please sign in to comment.