diff --git a/codeflash/api/aiservice.py b/codeflash/api/aiservice.py index d206a00b6..20c478eb4 100644 --- a/codeflash/api/aiservice.py +++ b/codeflash/api/aiservice.py @@ -4,7 +4,6 @@ import os import platform import time -from pathlib import Path from typing import TYPE_CHECKING, Any, cast import requests @@ -24,6 +23,8 @@ from codeflash.version import __version__ as codeflash_version if TYPE_CHECKING: + from pathlib import Path + from codeflash.discovery.functions_to_optimize import FunctionToOptimize from codeflash.models.ExperimentMetadata import ExperimentMetadata from codeflash.models.models import AIServiceRefinerRequest @@ -557,7 +558,6 @@ def get_optimization_review( function_trace_id: str, coverage_message: str, replay_tests: str, - root_dir: Path, concolic_tests: str, # noqa: ARG002 calling_fn_details: str, ) -> str: @@ -583,18 +583,13 @@ def get_optimization_review( """ diff_str = "\n".join( [ - unified_diff_strings( - code1=original_code[p], - code2=new_code[p], - fromfile=Path(p).relative_to(root_dir).as_posix(), - tofile=Path(p).relative_to(root_dir).as_posix(), - ) + unified_diff_strings(code1=original_code[p], code2=new_code[p]) for p in original_code if not is_zero_diff(original_code[p], new_code[p]) ] ) code_diff = f"```diff\n{diff_str}\n```" - logger.info("!lsp|Computing Optimization Review…") + logger.info("loading|Reviewing Optimization…") payload = { "code_diff": code_diff, "explanation": explanation.raw_explanation_message, diff --git a/codeflash/lsp/features/perform_optimization.py b/codeflash/lsp/features/perform_optimization.py index cd5b36d8b..9825c5802 100644 --- a/codeflash/lsp/features/perform_optimization.py +++ b/codeflash/lsp/features/perform_optimization.py @@ -134,4 +134,5 @@ def sync_perform_optimization(server: CodeflashLanguageServer, cancel_event: thr "patch_file": str(patch_path), "task_id": params.task_id, "explanation": best_optimization.explanation_v2, + "optimizationReview": function_optimizer.optimization_review.capitalize(), } diff --git a/codeflash/optimization/function_optimizer.py b/codeflash/optimization/function_optimizer.py index 1861bbe0a..a0dedb570 100644 --- a/codeflash/optimization/function_optimizer.py +++ b/codeflash/optimization/function_optimizer.py @@ -246,6 +246,7 @@ def __init__( self.executor = concurrent.futures.ThreadPoolExecutor( max_workers=n_tests + 3 if self.experiment_id is None else n_tests + 4 ) + self.optimization_review = "" def can_be_optimized(self) -> Result[tuple[bool, CodeOptimizationContext, dict[Path, str]], str]: should_run_experiment = self.experiment_id is not None @@ -1517,17 +1518,17 @@ def process_review( raise_pr = not self.args.no_pr staging_review = self.args.staging_review opt_review_response = "" - # Skip optimization review for async functions for now - if (raise_pr or staging_review) and not self.function_to_optimize.is_async: - data["root_dir"] = git_root_dir() - try: - opt_review_response = self.aiservice_client.get_optimization_review( - **data, calling_fn_details=function_references - ) - except Exception as e: - logger.debug(f"optimization review response failed, investigate {e}") - # Always set optimization_review in data (empty string for async functions) + # this will now run regardless of pr, staging review flags + try: + opt_review_response = self.aiservice_client.get_optimization_review( + **data, calling_fn_details=function_references + ) + except Exception as e: + logger.debug(f"optimization review response failed, investigate {e}") data["optimization_review"] = opt_review_response + self.optimization_review = opt_review_response + if raise_pr or staging_review: + data["root_dir"] = git_root_dir() if raise_pr and not staging_review and opt_review_response != "low": # Ensure root_dir is set for PR creation (needed for async functions that skip opt_review) if "root_dir" not in data: