Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
26 changes: 16 additions & 10 deletions backend/app/api/routes/responses.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,21 +106,27 @@ def process_response(
f"Starting generating response for assistant_id={request.assistant_id}, project_id={request.project_id}, organization_id={organization_id}"
)
try:
response = client.responses.create(
model=assistant.model,
previous_response_id=request.response_id,
instructions=assistant.instructions,
tools=[
# Create response with or without tools based on vector_store_id
params = {
"model": assistant.model,
"previous_response_id": request.response_id,
"instructions": assistant.instructions,
"temperature": assistant.temperature,
"input": [{"role": "user", "content": request.question}],
}

if assistant.vector_store_id:
params["tools"] = [
{
"type": "file_search",
"vector_store_ids": [assistant.vector_store_id],
"max_num_results": assistant.max_num_results,
}
],
temperature=assistant.temperature,
input=[{"role": "user", "content": request.question}],
include=["file_search_call.results"],
)
]
params["include"] = ["file_search_call.results"]

response = client.responses.create(**params)

response_chunks = get_file_search_results(response)
logger.info(
f"Successfully generated response: response_id={response.id}, assistant={request.assistant_id}, project_id={request.project_id}, organization_id={organization_id}"
Expand Down
69 changes: 69 additions & 0 deletions backend/app/tests/api/routes/test_responses.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,3 +70,72 @@
assert response_json["success"] is True
assert response_json["data"]["status"] == "processing"
assert response_json["data"]["message"] == "Response creation started"


@patch("app.api.routes.responses.OpenAI")
@patch("app.api.routes.responses.get_provider_credential")
@patch("app.api.routes.responses.get_assistant_by_id")
def test_responses_endpoint_without_vector_store(
mock_get_assistant,
mock_get_credential,
mock_openai,
db,
):
"""Test the /responses endpoint when assistant has no vector store configured."""
# Setup mock credentials
mock_get_credential.return_value = {"api_key": "test_api_key"}

# Setup mock assistant without vector store
mock_assistant = MagicMock()
mock_assistant.model = "gpt-4"
mock_assistant.instructions = "Test instructions"
mock_assistant.temperature = 0.1
mock_assistant.vector_store_id = None # No vector store configured
mock_get_assistant.return_value = mock_assistant

# Setup mock OpenAI client
mock_client = MagicMock()
mock_openai.return_value = mock_client

# Setup the mock response object
mock_response = MagicMock()
mock_response.id = "mock_response_id"
mock_response.output_text = "Test output"
mock_response.model = "gpt-4"
mock_response.usage.input_tokens = 10
mock_response.usage.output_tokens = 5
mock_response.usage.total_tokens = 15
# No output attribute since there are no tool calls
mock_client.responses.create.return_value = mock_response

# Get the Glific project ID
glific_project = db.exec(select(Project).where(Project.name == "Glific")).first()
if not glific_project:
pytest.skip("Glific project not found in the database")

Check warning on line 114 in backend/app/tests/api/routes/test_responses.py

View check run for this annotation

Codecov / codecov/patch

backend/app/tests/api/routes/test_responses.py#L114

Added line #L114 was not covered by tests

# Use the original API key from seed data
original_api_key = "ApiKey No3x47A5qoIGhm0kVKjQ77dhCqEdWRIQZlEPzzzh7i8"

headers = {"X-API-KEY": original_api_key}
request_data = {
"project_id": glific_project.id,
"assistant_id": "assistant_123",
"question": "What is Glific?",
"callback_url": "http://example.com/callback",
}

response = client.post("/responses", json=request_data, headers=headers)
assert response.status_code == 200
response_json = response.json()
assert response_json["success"] is True
assert response_json["data"]["status"] == "processing"
assert response_json["data"]["message"] == "Response creation started"

# Verify OpenAI client was called without tools
mock_client.responses.create.assert_called_once_with(
model=mock_assistant.model,
previous_response_id=None,
instructions=mock_assistant.instructions,
temperature=mock_assistant.temperature,
input=[{"role": "user", "content": "What is Glific?"}],
)