diff --git a/src/ragas/metrics/base.py b/src/ragas/metrics/base.py index d51682edb..2da4ab995 100644 --- a/src/ragas/metrics/base.py +++ b/src/ragas/metrics/base.py @@ -63,11 +63,13 @@ def get_required_columns( class Metric(ABC): @property @abstractmethod - def name(self) -> str: ... + def name(self) -> str: + ... @property @abstractmethod - def evaluation_mode(self) -> EvaluationMode: ... + def evaluation_mode(self) -> EvaluationMode: + ... @abstractmethod def init(self, run_config: RunConfig): @@ -136,9 +138,8 @@ async def ascore( return score @abstractmethod - async def _ascore( - self, row: t.Dict, callbacks: Callbacks, is_async: bool - ) -> float: ... + async def _ascore(self, row: t.Dict, callbacks: Callbacks, is_async: bool) -> float: + ... @dataclass diff --git a/src/ragas/testset/generator.py b/src/ragas/testset/generator.py index 092b461b2..056ee3fe5 100644 --- a/src/ragas/testset/generator.py +++ b/src/ragas/testset/generator.py @@ -218,10 +218,10 @@ def generate_with_langchain_docs( def init_evolution(self, evolution: Evolution) -> None: evolution.docstore = self.docstore - + if evolution.generator_llm is None: evolution.generator_llm = self.generator_llm - + if evolution.question_filter is None: evolution.question_filter = QuestionFilter(llm=self.critic_llm) if evolution.node_filter is None: diff --git a/src/ragas/utils.py b/src/ragas/utils.py index c48de4291..1f961899f 100644 --- a/src/ragas/utils.py +++ b/src/ragas/utils.py @@ -148,8 +148,8 @@ def emit_warning(*args, **kwargs): def get_or_init( dictionary: t.Dict[str, t.Any], key: str, default: t.Callable[[], t.Any] -) -> t.Any: +) -> t.Any: _value = dictionary.get("key") value = _value if _value is not None else default() - - return value \ No newline at end of file + + return value diff --git a/tests/benchmarks/benchmark_eval.py b/tests/benchmarks/benchmark_eval.py index 151f702ea..15dfa76d3 100644 --- a/tests/benchmarks/benchmark_eval.py +++ b/tests/benchmarks/benchmark_eval.py @@ -34,28 +34,15 @@ ] # os.environ["PYTHONASYNCIODEBUG"] = "1" -IGNORE_THREADS = True IGNORE_ASYNCIO = False if __name__ == "__main__": # asyncio - if not IGNORE_ASYNCIO: - print("Starting [Asyncio]") - start = time.time() - _ = evaluate( - eval_dataset, - metrics=metrics, - is_async=True, - ) - print(f"Time taken [Asyncio]: {time.time() - start:.2f}s") - - # Threads - if not IGNORE_THREADS: - print("Starting [Threads]") - start = time.time() - _ = evaluate( - eval_dataset, - metrics=metrics, - is_async=False, - ) - print(f"Time taken [Threads]: {time.time() - start:.2f}s") + print("Starting [Asyncio]") + start = time.time() + _ = evaluate( + eval_dataset, + metrics=metrics, + is_async=True, + ) + print(f"Time taken [Asyncio]: {time.time() - start:.2f}s") diff --git a/tests/benchmarks/benchmark_testsetgen.py b/tests/benchmarks/benchmark_testsetgen.py index 90414ebdb..7b569efde 100644 --- a/tests/benchmarks/benchmark_testsetgen.py +++ b/tests/benchmarks/benchmark_testsetgen.py @@ -26,7 +26,6 @@ def get_documents(): return documents -IGNORE_THREADS = False IGNORE_ASYNCIO = False # os.environ["PYTHONASYNCIODEBUG"] = "1" @@ -34,25 +33,12 @@ def get_documents(): documents = get_documents() # asyncio - if not IGNORE_ASYNCIO: - print("Starting [Asyncio]") - start = time.time() - generator.generate_with_llamaindex_docs( - documents=documents, - test_size=50, - distributions=distributions, - is_async=True, - ) - print(f"Time taken: {time.time() - start:.2f}s") - - # Threads - if not IGNORE_THREADS: - print("Starting [Threads]") - start = time.time() - generator.generate_with_llamaindex_docs( - documents=documents, - test_size=50, - distributions=distributions, - is_async=False, - ) - print(f"Time taken [Threads]: {time.time() - start:.2f}s") + print("Starting [Asyncio]") + start = time.time() + generator.generate_with_llamaindex_docs( + documents=documents, + test_size=50, + distributions=distributions, + is_async=True, + ) + print(f"Time taken: {time.time() - start:.2f}s") diff --git a/tests/unit/test_metric.py b/tests/unit/test_metric.py index 00ff6afa7..6f1fcf516 100644 --- a/tests/unit/test_metric.py +++ b/tests/unit/test_metric.py @@ -1,5 +1,5 @@ -from ragas.metrics.utils import get_available_metrics from ragas.metrics.base import EvaluationMode +from ragas.metrics.utils import get_available_metrics def test_get_available_metrics():