@@ -10,9 +10,9 @@ ANSIBLE_CHATBOT_INFERENCE_MODEL_FILTER ?=
1010LLAMA_STACK_PORT ?= 8321
1111LOCAL_DB_PATH ?= .
1212CONTAINER_DB_PATH ?= /.llama/data/distributions/ansible-chatbot
13- # quay.io/ansible/aap-rag-content:latest does not work with lightspeed-stack :latest
14- # aap-rag-content uses llama-stack:0.2.14 whereas lightspeed-stack:latest uses 0.2.13.
15- RAG_CONTENT_IMAGE ?= quay.io/ ansible/aap-rag-content:1.0.1751985495
13+ RAG_CONTENT_IMAGE ?= quay.io/ansible/aap-rag-content:latest
14+ LIGHTSPEED_STACK_CONFIG ?= lightspeed-stack.yaml
15+ LLAMA_STACK_RUN_CONFIG ?= ansible-chatbot-run.yaml
1616# Colors for terminal output
1717RED := \033[0;31m
1818NC := \033[0m # No Color
@@ -23,6 +23,8 @@ NC := \033[0m # No Color
2323
2424UV_HTTP_TIMEOUT =120
2525
26+ PLATFORM ?= "linux/amd64"
27+
2628help :
2729 @echo " Makefile for Ansible Chatbot Stack"
2830 @echo " Available targets:"
@@ -61,7 +63,7 @@ setup-vector-db:
6163 @echo " Setting up vector db and embedding image..."
6264 rm -rf ./vector_db ./embeddings_model
6365 mkdir -p ./vector_db
64- docker run -d --rm --name rag-content $(RAG_CONTENT_IMAGE ) sleep infinity
66+ docker run --platform $( PLATFORM ) - d --rm --name rag-content $(RAG_CONTENT_IMAGE ) sleep infinity
6567 docker cp rag-content:/rag/llama_stack_vector_db/faiss_store.db.gz ./vector_db/aap_faiss_store.db.gz
6668 docker cp rag-content:/rag/embeddings_model .
6769 docker kill rag-content
@@ -76,8 +78,9 @@ check-env-build:
7678
7779build : check-env-build
7880 @echo " Building customized Ansible Chatbot Stack image from lightspeed-core/lightspeed-stack..."
79- docker build -f ./Containerfile \
81+ docker build --platform $( PLATFORM ) - f ./Containerfile \
8082 --build-arg ANSIBLE_CHATBOT_VERSION=$(ANSIBLE_CHATBOT_VERSION ) \
83+ --build-arg LLAMA_STACK_RUN_CONFIG=$(LLAMA_STACK_RUN_CONFIG ) \
8184 -t ansible-chatbot-stack:$(ANSIBLE_CHATBOT_VERSION ) .
8285 @printf " Custom image $( RED) ansible-chatbot-stack:$( ANSIBLE_CHATBOT_VERSION) $( NC) built successfully.\n"
8386
@@ -104,10 +107,11 @@ run: check-env-run
104107 @echo " Running Ansible Chatbot Stack container..."
105108 @echo " Using vLLM URL: $( ANSIBLE_CHATBOT_VLLM_URL) "
106109 @echo " Using inference model: $( ANSIBLE_CHATBOT_INFERENCE_MODEL) "
107- docker run --security-opt label=disable -it -p $(LLAMA_STACK_PORT ) :8080 \
110+ docker run --platform $( PLATFORM ) -- security-opt label=disable -it -p $(LLAMA_STACK_PORT ) :8080 \
108111 -v ./embeddings_model:/.llama/data/embeddings_model \
109112 -v ./vector_db/aap_faiss_store.db:$(CONTAINER_DB_PATH ) /aap_faiss_store.db \
110- -v ./lightspeed-stack.yaml:/.llama/data/lightspeed-stack.yaml \
113+ -v ./$(LIGHTSPEED_STACK_CONFIG ) :/.llama/distributions/ansible-chatbot/config/lightspeed-stack.yaml \
114+ -v ./$(LLAMA_STACK_RUN_CONFIG ) :/.llama/distributions/llama-stack/config/ansible-chatbot-run.yaml \
111115 -v ./ansible-chatbot-system-prompt.txt:/.llama/distributions/ansible-chatbot/system-prompts/default.txt \
112116 --env VLLM_URL=$(ANSIBLE_CHATBOT_VLLM_URL ) \
113117 --env VLLM_API_TOKEN=$(ANSIBLE_CHATBOT_VLLM_API_TOKEN ) \
@@ -140,11 +144,13 @@ run-local-db: check-env-run-local-db
140144 @echo " Using inference model: $( ANSIBLE_CHATBOT_INFERENCE_MODEL) "
141145 @echo " Using inference model for tools filtering : $( ANSIBLE_CHATBOT_INFERENCE_MODEL_FILTER) "
142146 @echo " Mapping local DB from $( LOCAL_DB_PATH) to $( CONTAINER_DB_PATH) "
143- docker run --security-opt label=disable -it -p $(LLAMA_STACK_PORT ) :8080 \
147+ docker run --platform $( PLATFORM ) -- security-opt label=disable -it -p $(LLAMA_STACK_PORT ) :8080 \
144148 -v $(LOCAL_DB_PATH ) :$(CONTAINER_DB_PATH ) \
145149 -v ./embeddings_model:/app/embeddings_model \
146150 -v ./vector_db/aap_faiss_store.db:$(CONTAINER_DB_PATH ) /aap_faiss_store.db \
147- -v ./lightspeed-stack.yaml:/.llama/data/lightspeed-stack.yaml \
151+ -v ./$(LIGHTSPEED_STACK_CONFIG ) :/.llama/distributions/ansible-chatbot/config/lightspeed-stack.yaml \
152+ -v ./$(LLAMA_STACK_RUN_CONFIG ) :/.llama/distributions/llama-stack/config/ansible-chatbot-run.yaml \
153+ -v ./ansible-chatbot-system-prompt.txt:/.llama/distributions/ansible-chatbot/system-prompts/default.txt \
148154 --env VLLM_URL=$(ANSIBLE_CHATBOT_VLLM_URL ) \
149155 --env VLLM_API_TOKEN=$(ANSIBLE_CHATBOT_VLLM_API_TOKEN ) \
150156 --env INFERENCE_MODEL=$(ANSIBLE_CHATBOT_INFERENCE_MODEL ) \
0 commit comments