Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion prompting/rewards/exact_match.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
NO_EOS_PENALTY = 0


class LogitsRewardModel(BaseRewardModel): #
class LogitsRewardModel(BaseRewardModel):
async def reward( # noqa: C901
self,
reference: str,
Expand Down
27 changes: 18 additions & 9 deletions validator_api/chat_completion.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@
import time
from typing import Any, AsyncGenerator, Callable, List, Optional

import httpcore
import httpx
from fastapi import HTTPException
from fastapi.responses import StreamingResponse
from loguru import logger
Expand Down Expand Up @@ -225,15 +227,22 @@ async def collect_remaining_responses(
logger.error(f"Error collecting response from uid {uids[i+1]}: {response}")
continue

async for chunk in response:
if not chunk.choices or not chunk.choices[0].delta:
continue
content = getattr(chunk.choices[0].delta, "content", None)
if content is None:
continue
timings_list[i + 1].append(time.monotonic() - response_start_time)
collected_chunks_list[i + 1].append(content)
collected_chunks_raw_list[i + 1].append(chunk)
try:
async for chunk in response:
if not chunk.choices or not chunk.choices[0].delta:
continue
content = getattr(chunk.choices[0].delta, "content", None)
if content is None:
continue

timings_list[i + 1].append(time.monotonic() - response_start_time)
collected_chunks_list[i + 1].append(content)
collected_chunks_raw_list[i + 1].append(chunk)

except (httpx.ReadTimeout, httpcore.ReadTimeout) as e:
logger.warning(f"Stream timeout for index {i}: partial results collected. {e}")
except Exception as e:
logger.error(f"Unexpected error collecting stream for index {i}: {e}")

except Exception as e:
logger.exception(f"Error collecting remaining responses: {e}")
Expand Down
10 changes: 9 additions & 1 deletion validator_api/deep_research/orchestrator_v2.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,15 @@ async def search_web(question: str, n_results: int = 2, completions=None) -> dic
)

# Perform web search
search_results = await web_retrieval(WebRetrievalRequest(search_query=optimized_query, n_results=n_results))
for i in range(3):
try:
search_results = await web_retrieval(WebRetrievalRequest(search_query=optimized_query, n_results=n_results))
if search_results.results:
break
except BaseException:
logger.warning(f"Try {i+1} failed")
if not search_results.results:
search_results = {"results": []}

# Generate referenced answer
answer_prompt = f"""Based on the provided search results, generate a comprehensive answer to the question.
Expand Down
2 changes: 1 addition & 1 deletion validator_api/web_retrieval.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ async def web_retrieval(
],
}

timeout_seconds = 30 # TODO: We need to scale down this timeout
timeout_seconds = 15 # TODO: We need to scale down this timeout
logger.debug(f"🔍 Querying miners: {uids} for web retrieval")
stream_results = await query_miners(uids, body, timeout_seconds)
results = [
Expand Down