diff --git a/ChatQnA/tests/test_compose_milvus_on_xeon.sh b/ChatQnA/tests/test_compose_milvus_on_xeon.sh index 06e0fe1db3..19beb81752 100644 --- a/ChatQnA/tests/test_compose_milvus_on_xeon.sh +++ b/ChatQnA/tests/test_compose_milvus_on_xeon.sh @@ -41,7 +41,6 @@ function build_docker_images() { } function start_services() { cd $WORKPATH/docker_compose/intel/cpu/xeon/ - export no_proxy=${no_proxy},${ip_address} export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5" export RERANK_MODEL_ID="BAAI/bge-reranker-base" export LLM_MODEL_ID="meta-llama/Meta-Llama-3-8B-Instruct" diff --git a/CodeTrans/tests/test_compose_on_gaudi.sh b/CodeTrans/tests/test_compose_on_gaudi.sh index 8d4691f849..a9bf0c0f79 100644 --- a/CodeTrans/tests/test_compose_on_gaudi.sh +++ b/CodeTrans/tests/test_compose_on_gaudi.sh @@ -43,9 +43,6 @@ function build_docker_images() { function start_services() { cd $WORKPATH/docker_compose/intel/hpu/gaudi - - export http_proxy=${http_proxy} - export https_proxy=${http_proxy} export LLM_MODEL_ID="mistralai/Mistral-7B-Instruct-v0.3" export LLM_ENDPOINT="http://${ip_address}:8008" export LLM_COMPONENT_NAME="OpeaTextGenService" diff --git a/CodeTrans/tests/test_compose_on_rocm.sh b/CodeTrans/tests/test_compose_on_rocm.sh index b0cbbd62a1..16b25c78d0 100644 --- a/CodeTrans/tests/test_compose_on_rocm.sh +++ b/CodeTrans/tests/test_compose_on_rocm.sh @@ -42,8 +42,6 @@ function build_docker_images() { function start_services() { cd $WORKPATH/docker_compose/amd/gpu/rocm/ - export http_proxy=${http_proxy} - export https_proxy=${http_proxy} export CODETRANS_TGI_SERVICE_PORT=8008 export CODETRANS_LLM_SERVICE_PORT=9000 export CODETRANS_LLM_MODEL_ID="Qwen/Qwen2.5-Coder-7B-Instruct" diff --git a/CodeTrans/tests/test_compose_on_xeon.sh b/CodeTrans/tests/test_compose_on_xeon.sh index 8b279b2f2e..7b27375682 100644 --- a/CodeTrans/tests/test_compose_on_xeon.sh +++ b/CodeTrans/tests/test_compose_on_xeon.sh @@ -45,8 +45,6 @@ function build_docker_images() { function start_services() { cd $WORKPATH/docker_compose/intel/cpu/xeon/ - export http_proxy=${http_proxy} - export https_proxy=${http_proxy} export LLM_MODEL_ID="mistralai/Mistral-7B-Instruct-v0.3" export LLM_ENDPOINT="http://${ip_address}:8008" export LLM_COMPONENT_NAME="OpeaTextGenService" diff --git a/CodeTrans/tests/test_compose_tgi_on_gaudi.sh b/CodeTrans/tests/test_compose_tgi_on_gaudi.sh index 1c0404d397..c0f5e1e714 100644 --- a/CodeTrans/tests/test_compose_tgi_on_gaudi.sh +++ b/CodeTrans/tests/test_compose_tgi_on_gaudi.sh @@ -41,8 +41,6 @@ function build_docker_images() { function start_services() { cd $WORKPATH/docker_compose/intel/hpu/gaudi/ - export http_proxy=${http_proxy} - export https_proxy=${http_proxy} export LLM_MODEL_ID="mistralai/Mistral-7B-Instruct-v0.3" export LLM_ENDPOINT="http://${ip_address}:8008" export LLM_COMPONENT_NAME="OpeaTextGenService" diff --git a/CodeTrans/tests/test_compose_tgi_on_xeon.sh b/CodeTrans/tests/test_compose_tgi_on_xeon.sh index 95154c7c9d..be7aec935d 100644 --- a/CodeTrans/tests/test_compose_tgi_on_xeon.sh +++ b/CodeTrans/tests/test_compose_tgi_on_xeon.sh @@ -41,8 +41,6 @@ function build_docker_images() { function start_services() { cd $WORKPATH/docker_compose/intel/cpu/xeon/ - export http_proxy=${http_proxy} - export https_proxy=${http_proxy} export LLM_MODEL_ID="mistralai/Mistral-7B-Instruct-v0.3" export LLM_ENDPOINT="http://${ip_address}:8008" export LLM_COMPONENT_NAME="OpeaTextGenService" diff --git a/CodeTrans/tests/test_compose_vllm_on_rocm.sh b/CodeTrans/tests/test_compose_vllm_on_rocm.sh index 4574da774b..5279336ba4 100644 --- a/CodeTrans/tests/test_compose_vllm_on_rocm.sh +++ b/CodeTrans/tests/test_compose_vllm_on_rocm.sh @@ -40,8 +40,6 @@ function build_docker_images() { function start_services() { cd $WORKPATH/docker_compose/amd/gpu/rocm/ - export http_proxy=${http_proxy} - export https_proxy=${http_proxy} export HOST_IP=${ip_address} export CODETRANS_VLLM_SERVICE_PORT=8008 export CODETRANS_LLM_SERVICE_PORT=9000