Skip to content

fix: use 127.0.0.1 instead of localhost for Ollama connections#156

Merged
kevin-mindverse merged 1 commit intomindverse:masterfrom
CXL-edu:fix/ollama-ip-address
Apr 7, 2025
Merged

fix: use 127.0.0.1 instead of localhost for Ollama connections#156
kevin-mindverse merged 1 commit intomindverse:masterfrom
CXL-edu:fix/ollama-ip-address

Conversation

@CXL-edu
Copy link
Copy Markdown
Contributor

@CXL-edu CXL-edu commented Apr 6, 2025

Use 127.0.0.1 instead of localhost for Ollama local services

Suggested Change

When deploying Ollama as a local service, it is recommended to use 127.0.0.1 as the IP address rather than localhost. This is because in environments with an active VPN, DNS resolution for localhost may not resolve to 127.0.0.1, leading to connection issues.

Technical Explanation

This distinction behaves differently across client libraries:

  • requests library: When using the requests library to connect to Ollama local services, 127.0.0.1 and localhost are typically equivalent since these requests are generally not routed through VPN to external networks, allowing DNS to correctly resolve localhost to 127.0.0.1.

  • OpenAI SDK: When using the OpenAI SDK, VPN may route traffic to external networks, causing localhost to resolve to an address other than 127.0.0.1, preventing proper connection to local services. This is a common issue in regions like China where VPN usage is widespread for accessing international services and APIs.

Recommendation

To ensure consistency and stability across various network environments, it is recommended to always use 127.0.0.1 rather than localhost when connecting to Ollama local services.

Test Code

import openai
import logging
import requests
import json
import time

# Define model name here for easy replacement
MODEL_NAME = "qwen2.5:1.5b"

# Enable detailed logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)

def test_openai_sdk():
    """Test OpenAI SDK using both 127.0.0.1 and localhost"""
    logger.info("\n===== Testing OpenAI SDK =====")
    
    # Client using 127.0.0.1
    client_ip = openai.OpenAI(base_url="http://127.0.0.1:11434/v1", api_key="fake-key")
    
    # Client using localhost
    client_localhost = openai.OpenAI(base_url="http://localhost:11434/v1", api_key="fake-key")
    
    test_message = [
        {"role": "system", "content": "You are a helpful assistant."},
        {"role": "user", "content": "Briefly introduce yourself"}
    ]
    
    # Test using 127.0.0.1
    try:
        logger.info("Sending OpenAI SDK request using 127.0.0.1...")
        start_time = time.time()
        response_ip = client_ip.chat.completions.create(
            model=MODEL_NAME,
            messages=test_message,
            temperature=0.7,
        )
        end_time = time.time()
        logger.info(f"127.0.0.1 request successful! Time: {end_time - start_time:.2f} seconds")
        logger.info(f"Response: {response_ip.choices[0].message.content[:50]}...\n")
    except Exception as e:
        logger.error(f"127.0.0.1 request failed: {e}\n")
    
    # Test using localhost
    try:
        logger.info("Sending OpenAI SDK request using localhost...")
        start_time = time.time()
        response_localhost = client_localhost.chat.completions.create(
            model=MODEL_NAME,
            messages=test_message,
            temperature=0.7,
        )
        end_time = time.time()
        logger.info(f"localhost request successful! Time: {end_time - start_time:.2f} seconds")
        logger.info(f"Response: {response_localhost.choices[0].message.content[:50]}...\n")
    except Exception as e:
        logger.error(f"localhost request failed: {e}\n")

def test_requests_library():
    """Test requests library using both 127.0.0.1 and localhost"""
    logger.info("\n===== Testing requests library =====")
    
    payload = {
        "model": MODEL_NAME,
        "messages": [{"role": "user", "content": "Briefly introduce yourself"}],
        "stream": False
    }
    
    # Test using 127.0.0.1
    try:
        logger.info("Sending requests library request using 127.0.0.1...")
        start_time = time.time()
        response_ip = requests.post(
            headers={"Content-Type": "application/json"},
            url="http://127.0.0.1:11434/api/chat",
            json=payload
        )
        end_time = time.time()
        logger.info(f"127.0.0.1 request successful! Time: {end_time - start_time:.2f} seconds")
        logger.info(f"Response: {response_ip.json().get('message', {}).get('content', '')[:50]}...\n")
    except Exception as e:
        logger.error(f"127.0.0.1 request failed: {e}\n")
    
    # Test using localhost
    try:
        logger.info("Sending requests library request using localhost...")
        start_time = time.time()
        response_localhost = requests.post(
            headers={"Content-Type": "application/json"},
            url="http://localhost:11434/api/chat",
            json=payload
        )
        end_time = time.time()
        logger.info(f"localhost request successful! Time: {end_time - start_time:.2f} seconds")
        logger.info(f"Response: {response_localhost.json().get('message', {}).get('content', '')[:50]}...\n")
    except Exception as e:
        logger.error(f"localhost request failed: {e}\n")

def run_tests():
    """Run all tests"""
    logger.info(f"Starting tests - Using model: {MODEL_NAME}")
    logger.info("Note: If you have VPN enabled, you may see issues with localhost connections")
    
    # Test requests library first (typically more reliable)
    test_requests_library()
    
    # Then test OpenAI SDK (more likely to be affected by VPN)
    test_openai_sdk()
    
    logger.info("Tests completed!")
    logger.info("If you see localhost requests failing while 127.0.0.1 succeeds, this confirms the issue in the PR")
    logger.info("If both addresses work, your environment has normal DNS resolution or is not affected by VPN")

if __name__ == "__main__":
    run_tests()

@kevin-mindverse kevin-mindverse self-requested a review April 7, 2025 02:08
@kevin-mindverse kevin-mindverse merged commit 0dd613d into mindverse:master Apr 7, 2025
1 check passed
Heterohabilis pushed a commit to Heterohabilis/Second-Me that referenced this pull request May 29, 2025
EOMZON pushed a commit to EOMZON/Second-Me that referenced this pull request Feb 1, 2026
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment

Labels

None yet

Projects

None yet

Development

Successfully merging this pull request may close these issues.

3 participants