Skip to content

Commit

Permalink
test: increase test coverage to 100%
Browse files Browse the repository at this point in the history
This is an effort to raise the code quality in the project by ensuring that most, if not all the code is tested. This will be the new standard moving forward.
  • Loading branch information
kennedykori committed Aug 27, 2022
1 parent 7c3456a commit e785028
Show file tree
Hide file tree
Showing 15 changed files with 886 additions and 47 deletions.
4 changes: 4 additions & 0 deletions app/imp/sql_data/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@
SQLDataSource,
SQLDataSourceType,
SQLExtractMetadata,
SQLUploadChunk,
SQLUploadMetadata,
SupportedDBVendors,
)
from .exceptions import SQLDataError, SQLDataSourceDisposedError
Expand All @@ -12,5 +14,7 @@
"SQLDataSourceDisposedError",
"SQLDataSourceType",
"SQLExtractMetadata",
"SQLUploadChunk",
"SQLUploadMetadata",
"SupportedDBVendors",
]
2 changes: 1 addition & 1 deletion app/lib/tasks/concurrent.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ def execute(self, an_input: _IN) -> _RT:
lambda _partial, _tsk: self._accumulator(
_partial, _tsk.execute(an_input)
),
self._tasks,
self.tasks,
self._initial_value,
)

Expand Down
12 changes: 6 additions & 6 deletions app/use_cases/fetch_metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
# =============================================================================


class DoFetchDataSourceTypeSources(Task[DataSourceType, Sequence[DataSource]]):
class DoFetchDataSources(Task[DataSourceType, Sequence[DataSource]]):
"""Fetches all the data sources of a given data source type."""

def __init__(self, data_source_type: DataSourceType):
Expand All @@ -45,7 +45,7 @@ def execute(self, an_input: Transport) -> Sequence[DataSource]:
return data_sources


class DoFetchDataSourceExtracts(Task[DataSource, Sequence[ExtractMetadata]]):
class DoFetchExtractMetadata(Task[DataSource, Sequence[ExtractMetadata]]):
"""Fetch all the extract metadata of a given data source."""

def __init__(self, data_source: DataSource):
Expand Down Expand Up @@ -98,9 +98,9 @@ def execute(
@staticmethod
def _data_source_types_to_tasks(
data_source_types: Iterable[DataSourceType],
) -> Sequence[DoFetchDataSourceTypeSources]:
) -> Sequence[DoFetchDataSources]:
return tuple(
DoFetchDataSourceTypeSources(data_source_type=_data_source_type)
DoFetchDataSources(data_source_type=_data_source_type)
for _data_source_type in data_source_types
)

Expand Down Expand Up @@ -133,8 +133,8 @@ def execute(
@staticmethod
def _data_sources_to_tasks(
data_sources: Iterable[DataSource],
) -> Sequence[DoFetchDataSourceExtracts]:
) -> Sequence[DoFetchExtractMetadata]:
return tuple(
DoFetchDataSourceExtracts(data_source=_data_source)
DoFetchExtractMetadata(data_source=_data_source)
for _data_source in data_sources
)
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ strictSetInference = true
typeCheckingMode = "basic"

[tool.pytest.ini_options]
addopts = "--cov=app --cov-fail-under=85 --cov-report=html --cov-report=term-missing -n auto --junitxml='junitxml_report/report.xml' -v --durations=10 --cache-clear -p no:sugar"
addopts = "--cov=app --cov-fail-under=100 --cov-report=html --cov-report=term-missing -n auto --junitxml='junitxml_report/report.xml' -v --durations=10 --cache-clear"
console_output_style = "progress"
log_cli = 1
log_cli_date_format = "%Y-%m-%d %H:%M:%S"
Expand Down
65 changes: 51 additions & 14 deletions tests/core/factories.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,10 +113,17 @@ def execute(self, an_input: Any) -> Any:


class FakeTransport(Transport):
"""A fake transport that returns empty results."""
"""A fake transport that returns dummy data."""

def __init__(self):
self._is_closed: bool = False
def __init__(
self,
is_closed: bool = False,
fetch_data_source_extracts_count: int = 0,
fetch_data_sources_count: int = 0,
):
self._is_closed: bool = is_closed
self._data_sources_count: int = fetch_data_sources_count
self._extracts_count: int = fetch_data_source_extracts_count

@property
def is_disposed(self) -> bool:
Expand All @@ -131,12 +138,21 @@ def fetch_data_source_extracts(
data_source: DataSource,
**options: TransportOptions,
) -> Sequence[ExtractMetadata]:
return tuple()
return tuple(
FakeExtractMetadataFactory.create_batch(
size=self._extracts_count, data_source=data_source
)
)

def fetch_data_sources(
self, data_source_type: DataSourceType, **options: TransportOptions
) -> Sequence[DataSource]:
return tuple()
return tuple(
FakeDataSourceFactory.create_batch(
size=self._data_sources_count,
data_source_type=data_source_type,
)
)

def mark_upload_as_complete(
self, upload_metadata: UploadMetadata, **options: TransportOptions
Expand All @@ -151,7 +167,9 @@ def post_upload_chunk(
extra_init_kwargs: Optional[Mapping[str, Any]] = None,
**options: TransportOptions,
) -> UploadChunk:
return FakeUploadChunkFactory()
return FakeUploadChunkFactory(
chunk_index=chunk_index, chunk_content=chunk_content
)

def post_upload_metadata(
self,
Expand All @@ -162,7 +180,12 @@ def post_upload_metadata(
extra_init_kwargs: Optional[Mapping[str, Any]] = None,
**options: TransportOptions,
) -> UploadMetadata:
return FakeUploadMetadataFactory()
return FakeUploadMetadataFactory(
extract_metadata=extract_metadata,
content_type=content_type,
org_unit_code=org_unit_code,
org_unit_name=org_unit_name,
)


class FakeUploadChunk(UploadChunk):
Expand All @@ -174,27 +197,34 @@ class FakeUploadChunk(UploadChunk):
class FakeUploadMetadata(UploadMetadata[Any]):
"""A mock upload metadata implementation."""

def __init__(self, **kwargs):
def __init__(self, chunk_count: int = 0, **kwargs):
extract_metadata: FakeExtractMetadata = kwargs.pop("extract_metadata")
super().__init__(**kwargs)
self._chunk_count: int = chunk_count
self._extract_metadata: FakeExtractMetadata = extract_metadata

@property
def extract_metadata(self) -> FakeExtractMetadata:
return self._extract_metadata

def to_task(self) -> Task[Any, Sequence[bytes]]:
return self._FakeUploadTask()
return self._FakeUploadTask(chunk_count=self._chunk_count)

@classmethod
def get_content_type(cls) -> str:
return "text/csv"

class _FakeUploadTask(Task[Any, Sequence[Any]]):
"""A fake task that returns an empty list."""
class _FakeUploadTask(Task[Any, Sequence[bytes]]):
"""A fake task that returns a sequence of random bytes."""

def __init__(self, chunk_count: int):
self._chunk_count: int = chunk_count

def execute(self, an_input: Any) -> Sequence[Any]:
return []
def execute(self, an_input: Any) -> Sequence[bytes]:
return tuple(
f"Bla bla bla {_index} ...".encode()
for _index in range(self._chunk_count)
)


# =============================================================================
Expand Down Expand Up @@ -306,7 +336,7 @@ class FakeExtractMetadataFactory(ExtractMetadataFactory):
preferred_uploads_name = factory.LazyAttribute(
lambda _o: "%s" % _o.name.lower().replace(" ", "_")
)
data_source = factory.SubFactory(FakeDataSource)
data_source = factory.SubFactory(FakeDataSourceFactory)

class Meta:
model = FakeExtractMetadata
Expand All @@ -317,20 +347,27 @@ class FakeTransportFactory(factory.Factory):
A factory for creating fake transport instances that return empty results.
"""

is_closed: bool = False
fetch_data_source_extracts_count: int = 0
fetch_data_sources_count: int = 0

class Meta:
model = FakeTransport


class FakeUploadChunkFactory(UploadChunkFactory):
"""A factory for creating fake upload chunk instances."""

chunk_content = b"Bla bla bla ..."

class Meta:
model = FakeUploadChunk


class FakeUploadMetadataFactory(UploadMetadataFactory):
"""A factory for creating fake upload metadata instances."""

chunk_count = 0
extract_metadata = factory.SubFactory(FakeExtractMetadataFactory)

class Meta:
Expand Down
Loading

0 comments on commit e785028

Please sign in to comment.