@@ -20,12 +20,12 @@ function build_docker_images() {
2020 docker build -t opea/llm-tgi:latest -f comps/llms/text-generation/tgi/Dockerfile .
2121 docker build -t opea/dataprep-redis:latest -f comps/dataprep/redis/langchain/docker/Dockerfile .
2222
23- cd ..
24- git clone https://github.com/huggingface/tei-gaudi
25- cd tei-gaudi/
26- docker build --no-cache -f Dockerfile-hpu -t opea/tei-gaudi:latest .
23+ # cd ..
24+ # git clone https://github.com/huggingface/tei-gaudi
25+ # cd tei-gaudi/
26+ # docker build --no-cache -f Dockerfile-hpu -t opea/tei-gaudi:latest .
2727
28- docker pull ghcr.io/huggingface/tgi-gaudi:2.0.0
28+ docker pull ghcr.io/huggingface/tgi-gaudi:2.0.1
2929 docker pull ghcr.io/huggingface/text-embeddings-inference:cpu-1.2
3030
3131 cd $WORKPATH /docker
@@ -38,8 +38,13 @@ function build_docker_images() {
3838}
3939
4040function start_services() {
41- cd $WORKPATH /docker/gaudi
41+ # build tei-gaudi for each test instead of pull from local registry
42+ cd $WORKPATH
43+ git clone https://github.com/huggingface/tei-gaudi
44+ cd tei-gaudi/
45+ docker build --no-cache -f Dockerfile-hpu -t opea/tei-gaudi:latest .
4246
47+ cd $WORKPATH /docker/gaudi
4348 export EMBEDDING_MODEL_ID=" BAAI/bge-base-en-v1.5"
4449 export RERANK_MODEL_ID=" BAAI/bge-reranker-base"
4550 export LLM_MODEL_ID=" Intel/neural-chat-7b-v3-3"
@@ -66,6 +71,9 @@ function start_services() {
6671 sed -i " s#image: opea/chatqna-ui:latest#image: opea/chatqna-ui:${IMAGE_TAG} #g" docker_compose.yaml
6772 sed -i " s#image: opea/chatqna-conversation-ui:latest#image: opea/chatqna-conversation-ui:${IMAGE_TAG} #g" docker_compose.yaml
6873 sed -i " s#image: opea/*#image: ${IMAGE_REPO} opea/#g" docker_compose.yaml
74+ sed -i " s#image: ${IMAGE_REPO} opea/tei-gaudi:latest#image: opea/tei-gaudi:latest#g" docker_compose.yaml
75+ echo " cat docker_compose.yaml"
76+ cat docker_compose.yaml
6977 fi
7078
7179 # Start Docker Containers
0 commit comments