From b0c6dbe93b63e9036c3f88b16cf84d249d7cbfa7 Mon Sep 17 00:00:00 2001 From: hasnatelias Date: Tue, 7 Oct 2025 00:13:10 +0900 Subject: [PATCH] Add Docker and Kubernetes configurations for Responsible AI services - Created .gitignore to exclude sensitive configuration files. - Added devenv.md for local development setup instructions using Docker. - Introduced docker-compose files for Cosmos DB emulator and Responsible AI services. - Added Kubernetes deployment manifests for all Responsible AI components including MongoDB, backend, admin, explain, fairness, file storage, LLM benchmarking, LLM explain, MFE, model detail, moderation layer, moderation model, privacy, reporting tool, security, and shell services. - Updated Dockerfiles for each service to ensure proper installation of dependencies and improved debugging output. - Ensured all services are configured to use environment variables for sensitive information. reference issue#61 --- .env | 50 +++++ .github/workflows/cicd.yml | 180 +++++++++++++++++ .gitignore | 1 + devenv.md | 4 + docker-compose.cosmos.yml | 24 +++ docker-compose.yml | 182 ++++++++++++++++++ kubernetes/README.md | 50 +++++ kubernetes/admin.yml | 31 +++ kubernetes/backend-deployment.yml | 24 +++ kubernetes/backend-service.yml | 11 ++ kubernetes/database/mongo-config.yml | 6 + kubernetes/database/mongo-secret.yml | 8 + kubernetes/database/mongo.yml | 42 ++++ kubernetes/explain.yml | 31 +++ kubernetes/fairness.yml | 31 +++ kubernetes/file-storage.yml | 31 +++ kubernetes/llm-benchmarking.yml | 31 +++ kubernetes/llm-explain.yml | 31 +++ kubernetes/mfe.yml | 31 +++ kubernetes/model-detail.yml | 31 +++ kubernetes/moderation-layer.yml | 31 +++ kubernetes/moderation-model.yml | 31 +++ kubernetes/privacy.yml | 31 +++ kubernetes/reporting-tool.yml | 31 +++ kubernetes/security.yml | 31 +++ kubernetes/shell.yml | 31 +++ .../responsible-ai-admin/Dockerfile | 17 +- responsible-ai-backend/backend-rai/Dockerfile | 25 ++- .../responsible-ai-explain/Dockerfile | 16 +- .../responsible-ai-fairness/Dockerfile | 18 +- responsible-ai-file-storage/Dockerfile | 11 +- .../responsible-ai-benchmarking/Dockerfile | 21 +- .../responsible-ai-llm-explain/Dockerfile | 16 +- responsible-ai-mfe/Dockerfile | 6 +- .../workbench/Dockerfile | 25 ++- responsible-ai-moderationlayer/Dockerfile | 25 ++- responsible-ai-moderationmodel/Dockerfile | 24 ++- .../responsible-ai-privacy/Dockerfile | 26 +-- .../Dockerfile-privacy-compatible-image | 52 +---- .../wrapper/Dockerfile | 41 ++-- responsible-ai-security/wrapper/Dockerfile | 24 ++- responsible-ai-shell/Dockerfile | 4 +- 42 files changed, 1212 insertions(+), 155 deletions(-) create mode 100644 .env create mode 100644 .github/workflows/cicd.yml create mode 100644 .gitignore create mode 100644 devenv.md create mode 100644 docker-compose.cosmos.yml create mode 100644 docker-compose.yml create mode 100644 kubernetes/README.md create mode 100644 kubernetes/admin.yml create mode 100644 kubernetes/backend-deployment.yml create mode 100644 kubernetes/backend-service.yml create mode 100644 kubernetes/database/mongo-config.yml create mode 100644 kubernetes/database/mongo-secret.yml create mode 100644 kubernetes/database/mongo.yml create mode 100644 kubernetes/explain.yml create mode 100644 kubernetes/fairness.yml create mode 100644 kubernetes/file-storage.yml create mode 100644 kubernetes/llm-benchmarking.yml create mode 100644 kubernetes/llm-explain.yml create mode 100644 kubernetes/mfe.yml create mode 100644 kubernetes/model-detail.yml create mode 100644 kubernetes/moderation-layer.yml create mode 100644 kubernetes/moderation-model.yml create mode 100644 kubernetes/privacy.yml create mode 100644 kubernetes/reporting-tool.yml create mode 100644 kubernetes/security.yml create mode 100644 kubernetes/shell.yml diff --git a/.env b/.env new file mode 100644 index 00000000..e4de91c0 --- /dev/null +++ b/.env @@ -0,0 +1,50 @@ +# Global toggles +NODE_ENV=production +LOG_LEVEL=info + +# Healthcheck (override per service if needed) +HEALTHCHECK_CMD=curl -fsS http://localhost:${BACKEND_PORT:-8000}/health || exit 1 + +# Ports you can customize +ADMIN_PORT=8888 +BACKEND_PORT=8000 +EXPLAIN_PORT=8101 +FAIRNESS_PORT=8102 +FILE_STORAGE_PORT=9000 +LLM_BENCH_PORT=8200 +LLM_EXPLAIN_PORT=8201 +MFE_PORT=3333 +MODEL_DETAIL_PORT=8300 +MODERATION_LAYER_PORT=8400 +MODERATION_MODEL_PORT=8401 +PRIVACY_PORT=8500 +REPORTING_PORT=8600 +SECURITY_PORT=8700 +SHELL_PORT=8800 + +# Service-to-service URLs (internal DNS via compose service names) +BACKEND_URL=http://responsible-ai-backend:8000 +FILE_STORAGE_URL=http://responsible-ai-file-storage:9000 +EXPLAIN_URL=http://responsible-ai-explain:8101 +FAIRNESS_URL=http://responsible-ai-fairness:8102 +LLM_BENCH_URL=http://responsible-ai-llm-benchmarking:8200 +LLM_EXPLAIN_URL=http://responsible-ai-llm-explain:8201 +MODEL_DETAIL_URL=http://responsible-ai-model-detail:8300 +MODERATION_LAYER_URL=http://responsible-ai-moderationlayer:8400 +MODERATION_MODEL_URL=http://responsible-ai-moderationmodel:8401 +PRIVACY_URL=http://responsible-ai-privacy:8500 +REPORTING_URL=http://responsible-ai-reporting-tool:8600 +SECURITY_URL=http://responsible-ai-security:8700 +SHELL_URL=http://responsible-ai-shell:8800 + +# Example credentials/secrets (replace in your environment or use Docker secrets) +# DB_HOST=db +# DB_PORT=5432 +# DB_USER=rai +# DB_PASSWORD=supersecret +# DB_NAME=rai + +# Any provider/API keys you need +# OPENAI_API_KEY=... +# ANTHROPIC_API_KEY=... +# AWS_REGION=ap-northeast-1 diff --git a/.github/workflows/cicd.yml b/.github/workflows/cicd.yml new file mode 100644 index 00000000..989e5c40 --- /dev/null +++ b/.github/workflows/cicd.yml @@ -0,0 +1,180 @@ +name: CI/CD Pipeline + +on: + push: + branches: + - main + - "fix/docker" + +jobs: + build-and-push: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v3 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v2 + + - name: Login to Docker Hub + if: ${{ env.ACT != 'true' }} + uses: docker/login-action@v2 + with: + username: ${{ secrets.DOCKER_USERNAME }} + password: ${{ secrets.DOCKER_PASSWORD }} + + - name: Build and push backend + uses: docker/build-push-action@v4 + with: + context: ./responsible-ai-backend + dockerfile: backend-rai/Dockerfile + push: ${{ env.ACT != 'true' }} + tags: ${{ secrets.DOCKER_USERNAME }}/responsible-ai-backend:latest + + - name: Build and push admin + uses: docker/build-push-action@v4 + with: + context: ./responsible-ai-admin + dockerfile: responsible-ai-admin/Dockerfile + push: ${{ env.ACT != 'true' }} + tags: ${{ secrets.DOCKER_USERNAME }}/responsible-ai-admin:latest + + - name: Build and push explain + uses: docker/build-push-action@v4 + with: + context: ./responsible-ai-explain + dockerfile: responsible-ai-explain/Dockerfile + push: ${{ env.ACT != 'true' }} + tags: ${{ secrets.DOCKER_USERNAME }}/responsible-ai-explain:latest + + - name: Build and push fairness + uses: docker/build-push-action@v4 + with: + context: ./responsible-ai-fairness + dockerfile: responsible-ai-fairness/Dockerfile + push: ${{ env.ACT != 'true' }} + tags: ${{ secrets.DOCKER_USERNAME }}/responsible-ai-fairness:latest + + - name: Build and push file-storage + uses: docker/build-push-action@v4 + with: + context: ./responsible-ai-file-storage + dockerfile: Dockerfile + push: ${{ env.ACT != 'true' }} + tags: ${{ secrets.DOCKER_USERNAME }}/responsible-ai-file-storage:latest + + - name: Build and push llm-benchmarking + uses: docker/build-push-action@v4 + with: + context: ./responsible-ai-llm-benchmarking + dockerfile: responsible-ai-benchmarking/Dockerfile + push: ${{ env.ACT != 'true' }} + tags: ${{ secrets.DOCKER_USERNAME }}/responsible-ai-llm-benchmarking:latest + + - name: Build and push llm-explain + uses: docker/build-push-action@v4 + with: + context: ./responsible-ai-llm-explain + dockerfile: responsible-ai-llm-explain/Dockerfile + push: ${{ env.ACT != 'true' }} + tags: ${{ secrets.DOCKER_USERNAME }}/responsible-ai-llm-explain:latest + + - name: Build and push mfe + uses: docker/build-push-action@v4 + with: + context: ./responsible-ai-mfe + dockerfile: Dockerfile + push: ${{ env.ACT != 'true' }} + tags: ${{ secrets.DOCKER_USERNAME }}/responsible-ai-mfe:latest + + - name: Build and push model-detail + uses: docker/build-push-action@v4 + with: + context: ./responsible-ai-model-detail + dockerfile: workbench/Dockerfile + push: ${{ env.ACT != 'true' }} + tags: ${{ secrets.DOCKER_USERNAME }}/responsible-ai-model-detail:latest + + - name: Build and push moderation-layer + uses: docker/build-push-action@v4 + with: + context: ./responsible-ai-moderationlayer + dockerfile: Dockerfile + push: ${{ env.ACT != 'true' }} + tags: ${{ secrets.DOCKER_USERNAME }}/responsible-ai-moderationlayer:latest + + - name: Build and push moderation-model + uses: docker/build-push-action@v4 + with: + context: ./responsible-ai-moderationmodel + dockerfile: Dockerfile + push: ${{ env.ACT != 'true' }} + tags: ${{ secrets.DOCKER_USERNAME }}/responsible-ai-moderationmodel:latest + + - name: Build and push privacy + uses: docker/build-push-action@v4 + with: + context: ./responsible-ai-privacy + dockerfile: responsible-ai-privacy/Dockerfile + push: ${{ env.ACT != 'true' }} + tags: ${{ secrets.DOCKER_USERNAME }}/responsible-ai-privacy:latest + + - name: Build and push reporting-tool + uses: docker/build-push-action@v4 + with: + context: ./responsible-ai-reporting-tool + dockerfile: wrapper/Dockerfile + push: ${{ env.ACT != 'true' }} + tags: ${{ secrets.DOCKER_USERNAME }}/responsible-ai-reporting-tool:latest + + - name: Build and push security + uses: docker/build-push-action@v4 + with: + context: ./responsible-ai-security + dockerfile: wrapper/Dockerfile + push: ${{ env.ACT != 'true' }} + tags: ${{ secrets.DOCKER_USERNAME }}/responsible-ai-security:latest + + - name: Build and push shell + uses: docker/build-push-action@v4 + with: + context: ./responsible-ai-shell + dockerfile: Dockerfile + push: ${{ env.ACT != 'true' }} + tags: ${{ secrets.DOCKER_USERNAME }}/responsible-ai-shell:latest + + deploy: + if: ${{ env.ACT != 'true' }} + needs: build-and-push + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v3 + + - name: Set up Kubernetes context + uses: azure/k8s-set-context@v3 + with: + method: kubeconfig + kubeconfig: ${{ secrets.KUBECONFIG }} + + - name: Deploy to Kubernetes + run: | + kubectl apply -f kubernetes/database/mongo-config.yml + kubectl apply -f kubernetes/database/mongo-secret.yml + kubectl apply -f kubernetes/database/mongo.yml + kubectl apply -f kubernetes/backend-deployment.yml + kubectl apply -f kubernetes/backend-service.yml + kubectl apply -f kubernetes/admin.yml + kubectl apply -f kubernetes/explain.yml + kubectl apply -f kubernetes/fairness.yml + kubectl apply -f kubernetes/file-storage.yml + kubectl apply -f kubernetes/llm-benchmarking.yml + kubectl apply -f kubernetes/llm-explain.yml + kubectl apply -f kubernetes/mfe.yml + kubectl apply -f kubernetes/model-detail.yml + kubectl apply -f kubernetes/moderation-layer.yml + kubectl apply -f kubernetes/moderation-model.yml + kubectl apply -f kubernetes/privacy.yml + kubectl apply -f kubernetes/reporting-tool.yml + kubectl apply -f kubernetes/security.yml + kubectl apply -f kubernetes/shell.yml diff --git a/.gitignore b/.gitignore new file mode 100644 index 00000000..daa4cf24 --- /dev/null +++ b/.gitignore @@ -0,0 +1 @@ +webServerApiSettings.json diff --git a/devenv.md b/devenv.md new file mode 100644 index 00000000..d8cd160f --- /dev/null +++ b/devenv.md @@ -0,0 +1,4 @@ + +```batch +docker run -P -t -i mcr.microsoft.com/cosmosdb/windows/azure-cosmos-emulator +``` diff --git a/docker-compose.cosmos.yml b/docker-compose.cosmos.yml new file mode 100644 index 00000000..69cb9025 --- /dev/null +++ b/docker-compose.cosmos.yml @@ -0,0 +1,24 @@ +services: + cosmosdb: + container_name: cosmosdb-emulator + image: mcr.microsoft.com/cosmosdb/linux/azure-cosmos-emulator:latest + tty: true + stdin_open: true + restart: always + mem_limit: 2g + cpu_count: 2 + environment: + AZURE_COSMOS_EMULATOR_PARTITION_COUNT: 10 + AZURE_COSMOS_EMULATOR_ENABLE_DATA_PERSISTENCE: "true" + ports: + - "8081:8081" # Data Explorer / REST API + - "8900:8900" + - "8901:8901" + - "8979:8979" + - "10250-10255:10250-10255" + - "10350:10350" + volumes: + - cosmosdb-data:/tmp/cosmos + +volumes: + cosmosdb-data: \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 00000000..d7d4b670 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,182 @@ +x-service-defaults: &defaults + restart: unless-stopped + env_file: .env + networks: + - rai-net + healthcheck: + test: ["CMD", "sh", "-c", "${HEALTHCHECK_CMD:-echo ok}"] + interval: 30s + timeout: 5s + retries: 3 + start_period: 15s + +services: + responsible-ai-admin: + <<: *defaults + build: + context: ./responsible-ai-admin + dockerfile: responsible-ai-admin/Dockerfile + depends_on: + - responsible-ai-backend + ports: + - "${ADMIN_PORT:-8888}:8080" + + responsible-ai-backend: + <<: *defaults + build: + context: ./responsible-ai-backend + dockerfile: backend-rai/Dockerfile + ports: + - "${BACKEND_PORT:-8000}:8000" + environment: + # example: wire backends to storage, db, etc. via .env + FILE_STORAGE_URL: "${FILE_STORAGE_URL:-http://responsible-ai-file-storage:9000}" + MONGO_URI: "mongodb://mongo:27017/rainf" + depends_on: + - mongo + + responsible-ai-explain: + <<: *defaults + build: + context: ./responsible-ai-explain + dockerfile: responsible-ai-explain/Dockerfile + ports: + - "${EXPLAIN_PORT:-8101}:8101" + depends_on: + - responsible-ai-backend + + responsible-ai-fairness: + <<: *defaults + build: + context: ./responsible-ai-fairness + dockerfile: responsible-ai-fairness/Dockerfile + ports: + - "${FAIRNESS_PORT:-8102}:8102" + depends_on: + - responsible-ai-backend + + responsible-ai-file-storage: + <<: *defaults + build: + context: ./responsible-ai-file-storage + dockerfile: Dockerfile + ports: + - "${FILE_STORAGE_PORT:-9000}:9000" + volumes: + - rai_data:/data + + responsible-ai-llm-benchmarking: + <<: *defaults + build: + context: ./responsible-ai-llm-benchmarking + dockerfile: responsible-ai-benchmarking/Dockerfile + ports: + - "${LLM_BENCH_PORT:-8200}:8200" + depends_on: + - responsible-ai-backend + + responsible-ai-llm-explain: + <<: *defaults + build: + context: ./responsible-ai-llm-explain + dockerfile: responsible-ai-llm-explain/Dockerfile + ports: + - "${LLM_EXPLAIN_PORT:-8201}:8201" + depends_on: + - responsible-ai-backend + + responsible-ai-mfe: + <<: *defaults + build: + context: ./responsible-ai-mfe + dockerfile: Dockerfile + ports: + - "${MFE_PORT:-3333}:3000" + depends_on: + - responsible-ai-backend + + responsible-ai-model-detail: + <<: *defaults + build: + context: ./responsible-ai-model-detail + dockerfile: workbench/Dockerfile + ports: + - "${MODEL_DETAIL_PORT:-8300}:8300" + depends_on: + - responsible-ai-backend + + responsible-ai-moderationlayer: + <<: *defaults + build: + context: ./responsible-ai-moderationlayer + dockerfile: Dockerfile + ports: + - "${MODERATION_LAYER_PORT:-8400}:8400" + depends_on: + - responsible-ai-backend + + responsible-ai-moderationmodel: + <<: *defaults + build: + context: ./responsible-ai-moderationmodel + dockerfile: Dockerfile + ports: + - "${MODERATION_MODEL_PORT:-8401}:8401" + depends_on: + - responsible-ai-moderationlayer + + responsible-ai-privacy: + <<: *defaults + build: + context: ./responsible-ai-privacy + dockerfile: responsible-ai-privacy/Dockerfile + ports: + - "${PRIVACY_PORT:-8500}:8500" + depends_on: + - responsible-ai-backend + + responsible-ai-reporting-tool: + <<: *defaults + build: + context: ./responsible-ai-reporting-tool + dockerfile: wrapper/Dockerfile + ports: + - "${REPORTING_PORT:-8600}:8600" + depends_on: + - responsible-ai-backend + + responsible-ai-security: + <<: *defaults + build: + context: ./responsible-ai-security + dockerfile: wrapper/Dockerfile + ports: + - "${SECURITY_PORT:-8700}:8700" + depends_on: + - responsible-ai-backend + + responsible-ai-shell: + <<: *defaults + build: + context: ./responsible-ai-shell + dockerfile: Dockerfile + ports: + - "${SHELL_PORT:-8800}:8800" + depends_on: + - responsible-ai-backend + volumes: + - rai_data:/data + + mongo: + image: mongo:4.4 + ports: + - "27017:27017" + volumes: + - mongodb_data:/data/db + +networks: + rai-net: + +volumes: + rai_data: + mongodb_data: diff --git a/kubernetes/README.md b/kubernetes/README.md new file mode 100644 index 00000000..06bd1d09 --- /dev/null +++ b/kubernetes/README.md @@ -0,0 +1,50 @@ +# Kubernetes Deployment Guide + +This guide will walk you through deploying the application using Kubernetes. + +## Prerequisites + +- A running Kubernetes cluster. +- `kubectl` configured to connect to your cluster. + +## Deployment Steps + +1. **Deploy MongoDB:** + + First, you need to deploy the MongoDB database. Before you do that, you need to add the credentials to the `mongo-secret.yml` file. The values for `mongo-user` and `mongo-password` should be base64 encoded. + + To encode your username and password, you can use the following commands: + ```bash + echo -n 'your-username' | base64 + echo -n 'your-password' | base64 + ``` + + Update the `kubernetes/database/mongo-secret.yml` with the encoded values. + + Then, apply the Kubernetes manifests for MongoDB: + ```bash + kubectl apply -f kubernetes/database/mongo-config.yml + kubectl apply -f kubernetes/database/mongo-secret.yml + kubectl apply -f kubernetes/database/mongo.yml + ``` + +2. **Deploy the Backend:** + + Before deploying the backend, you need to update the `kubernetes/backend-deployment.yml` file with the correct MongoDB username and password. Replace `` and `` with the plain text values. + + Then, apply the Kubernetes manifests for the backend: + ```bash + kubectl apply -f kubernetes/backend-deployment.yml + kubectl apply -f kubernetes/backend-service.yml + ``` + +3. **Verify the Deployment:** + + You can check the status of your deployments by running: + ```bash + kubectl get deployments + kubectl get services + kubectl get pods + ``` + + You should see the `mongo-deployment` and `backend-deployment` running, along with their corresponding services and pods. diff --git a/kubernetes/admin.yml b/kubernetes/admin.yml new file mode 100644 index 00000000..b1966ae1 --- /dev/null +++ b/kubernetes/admin.yml @@ -0,0 +1,31 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: admin-deployment +spec: + replicas: 1 + selector: + matchLabels: + app: responsible-ai-admin + template: + metadata: + labels: + app: responsible-ai-admin + spec: + containers: + - name: responsible-ai-admin + image: ${{ secrets.DOCKER_USERNAME }}/responsible-ai-admin:latest + ports: + - containerPort: 8080 +--- +apiVersion: v1 +kind: Service +metadata: + name: admin-service +spec: + selector: + app: responsible-ai-admin + ports: + - protocol: TCP + port: 8080 + targetPort: 8080 diff --git a/kubernetes/backend-deployment.yml b/kubernetes/backend-deployment.yml new file mode 100644 index 00000000..cf5a3779 --- /dev/null +++ b/kubernetes/backend-deployment.yml @@ -0,0 +1,24 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: backend-deployment +spec: + replicas: 1 + selector: + matchLabels: + app: responsible-ai-backend + template: + metadata: + labels: + app: responsible-ai-backend + spec: + containers: + - name: responsible-ai-backend + image: ${{ secrets.DOCKER_USERNAME }}/responsible-ai-backend:latest + ports: + - containerPort: 8000 + env: + - name: MONGO_URI + value: "mongodb://:@mongo-service:27017/rainf" + - name: FILE_STORAGE_URL + value: "http://responsible-ai-file-storage:9000" diff --git a/kubernetes/backend-service.yml b/kubernetes/backend-service.yml new file mode 100644 index 00000000..87e527d6 --- /dev/null +++ b/kubernetes/backend-service.yml @@ -0,0 +1,11 @@ +apiVersion: v1 +kind: Service +metadata: + name: backend-service +spec: + selector: + app: responsible-ai-backend + ports: + - protocol: TCP + port: 8000 + targetPort: 8000 diff --git a/kubernetes/database/mongo-config.yml b/kubernetes/database/mongo-config.yml new file mode 100644 index 00000000..f12af9f6 --- /dev/null +++ b/kubernetes/database/mongo-config.yml @@ -0,0 +1,6 @@ +apiVersion: v1 +kind: ConfigMap +metadata: + name: mongo-config +data: + mongo-url: mongo-service diff --git a/kubernetes/database/mongo-secret.yml b/kubernetes/database/mongo-secret.yml new file mode 100644 index 00000000..f40d1d85 --- /dev/null +++ b/kubernetes/database/mongo-secret.yml @@ -0,0 +1,8 @@ +apiVersion: v1 +kind: Secret +metadata: + name: mongo-secret +type: Opaque +data: + mongo-user: + mongo-password: diff --git a/kubernetes/database/mongo.yml b/kubernetes/database/mongo.yml new file mode 100644 index 00000000..f28a3b7d --- /dev/null +++ b/kubernetes/database/mongo.yml @@ -0,0 +1,42 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: mongo-deployment +spec: + replicas: 1 + selector: + matchLabels: + app: mongo + template: + metadata: + labels: + app: mongo + spec: + containers: + - name: mongo + image: mongo:4.4 + ports: + - containerPort: 27017 + env: + - name: MONGO_INITDB_ROOT_USERNAME + valueFrom: + secretKeyRef: + name: mongo-secret + key: mongo-user + - name: MONGO_INITDB_ROOT_PASSWORD + valueFrom: + secretKeyRef: + name: mongo-secret + key: mongo-password +--- +apiVersion: v1 +kind: Service +metadata: + name: mongo-service +spec: + selector: + app: mongo + ports: + - protocol: TCP + port: 27017 + targetPort: 27017 diff --git a/kubernetes/explain.yml b/kubernetes/explain.yml new file mode 100644 index 00000000..362cc76c --- /dev/null +++ b/kubernetes/explain.yml @@ -0,0 +1,31 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: explain-deployment +spec: + replicas: 1 + selector: + matchLabels: + app: responsible-ai-explain + template: + metadata: + labels: + app: responsible-ai-explain + spec: + containers: + - name: responsible-ai-explain + image: ${{ secrets.DOCKER_USERNAME }}/responsible-ai-explain:latest + ports: + - containerPort: 8101 +--- +apiVersion: v1 +kind: Service +metadata: + name: explain-service +spec: + selector: + app: responsible-ai-explain + ports: + - protocol: TCP + port: 8101 + targetPort: 8101 diff --git a/kubernetes/fairness.yml b/kubernetes/fairness.yml new file mode 100644 index 00000000..d983e669 --- /dev/null +++ b/kubernetes/fairness.yml @@ -0,0 +1,31 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: fairness-deployment +spec: + replicas: 1 + selector: + matchLabels: + app: responsible-ai-fairness + template: + metadata: + labels: + app: responsible-ai-fairness + spec: + containers: + - name: responsible-ai-fairness + image: ${{ secrets.DOCKER_USERNAME }}/responsible-ai-fairness:latest + ports: + - containerPort: 8102 +--- +apiVersion: v1 +kind: Service +metadata: + name: fairness-service +spec: + selector: + app: responsible-ai-fairness + ports: + - protocol: TCP + port: 8102 + targetPort: 8102 diff --git a/kubernetes/file-storage.yml b/kubernetes/file-storage.yml new file mode 100644 index 00000000..d0c8242b --- /dev/null +++ b/kubernetes/file-storage.yml @@ -0,0 +1,31 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: file-storage-deployment +spec: + replicas: 1 + selector: + matchLabels: + app: responsible-ai-file-storage + template: + metadata: + labels: + app: responsible-ai-file-storage + spec: + containers: + - name: responsible-ai-file-storage + image: ${{ secrets.DOCKER_USERNAME }}/responsible-ai-file-storage:latest + ports: + - containerPort: 9000 +--- +apiVersion: v1 +kind: Service +metadata: + name: file-storage-service +spec: + selector: + app: responsible-ai-file-storage + ports: + - protocol: TCP + port: 9000 + targetPort: 9000 diff --git a/kubernetes/llm-benchmarking.yml b/kubernetes/llm-benchmarking.yml new file mode 100644 index 00000000..b22d5bf0 --- /dev/null +++ b/kubernetes/llm-benchmarking.yml @@ -0,0 +1,31 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: llm-bench-deployment +spec: + replicas: 1 + selector: + matchLabels: + app: responsible-ai-llm-benchmarking + template: + metadata: + labels: + app: responsible-ai-llm-benchmarking + spec: + containers: + - name: responsible-ai-llm-benchmarking + image: ${{ secrets.DOCKER_USERNAME }}/responsible-ai-llm-benchmarking:latest + ports: + - containerPort: 8200 +--- +apiVersion: v1 +kind: Service +metadata: + name: llm-bench-service +spec: + selector: + app: responsible-ai-llm-benchmarking + ports: + - protocol: TCP + port: 8200 + targetPort: 8200 diff --git a/kubernetes/llm-explain.yml b/kubernetes/llm-explain.yml new file mode 100644 index 00000000..532b4ddd --- /dev/null +++ b/kubernetes/llm-explain.yml @@ -0,0 +1,31 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: llm-explain-deployment +spec: + replicas: 1 + selector: + matchLabels: + app: responsible-ai-llm-explain + template: + metadata: + labels: + app: responsible-ai-llm-explain + spec: + containers: + - name: responsible-ai-llm-explain + image: ${{ secrets.DOCKER_USERNAME }}/responsible-ai-llm-explain:latest + ports: + - containerPort: 8201 +--- +apiVersion: v1 +kind: Service +metadata: + name: llm-explain-service +spec: + selector: + app: responsible-ai-llm-explain + ports: + - protocol: TCP + port: 8201 + targetPort: 8201 diff --git a/kubernetes/mfe.yml b/kubernetes/mfe.yml new file mode 100644 index 00000000..a110558d --- /dev/null +++ b/kubernetes/mfe.yml @@ -0,0 +1,31 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: mfe-deployment +spec: + replicas: 1 + selector: + matchLabels: + app: responsible-ai-mfe + template: + metadata: + labels: + app: responsible-ai-mfe + spec: + containers: + - name: responsible-ai-mfe + image: ${{ secrets.DOCKER_USERNAME }}/responsible-ai-mfe:latest + ports: + - containerPort: 3000 +--- +apiVersion: v1 +kind: Service +metadata: + name: mfe-service +spec: + selector: + app: responsible-ai-mfe + ports: + - protocol: TCP + port: 3000 + targetPort: 3000 diff --git a/kubernetes/model-detail.yml b/kubernetes/model-detail.yml new file mode 100644 index 00000000..950e3e93 --- /dev/null +++ b/kubernetes/model-detail.yml @@ -0,0 +1,31 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: model-detail-deployment +spec: + replicas: 1 + selector: + matchLabels: + app: responsible-ai-model-detail + template: + metadata: + labels: + app: responsible-ai-model-detail + spec: + containers: + - name: responsible-ai-model-detail + image: ${{ secrets.DOCKER_USERNAME }}/responsible-ai-model-detail:latest + ports: + - containerPort: 8300 +--- +apiVersion: v1 +kind: Service +metadata: + name: model-detail-service +spec: + selector: + app: responsible-ai-model-detail + ports: + - protocol: TCP + port: 8300 + targetPort: 8300 diff --git a/kubernetes/moderation-layer.yml b/kubernetes/moderation-layer.yml new file mode 100644 index 00000000..d098a032 --- /dev/null +++ b/kubernetes/moderation-layer.yml @@ -0,0 +1,31 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: moderation-layer-deployment +spec: + replicas: 1 + selector: + matchLabels: + app: responsible-ai-moderationlayer + template: + metadata: + labels: + app: responsible-ai-moderationlayer + spec: + containers: + - name: responsible-ai-moderationlayer + image: ${{ secrets.DOCKER_USERNAME }}/responsible-ai-moderationlayer:latest + ports: + - containerPort: 8400 +--- +apiVersion: v1 +kind: Service +metadata: + name: moderation-layer-service +spec: + selector: + app: responsible-ai-moderationlayer + ports: + - protocol: TCP + port: 8400 + targetPort: 8400 diff --git a/kubernetes/moderation-model.yml b/kubernetes/moderation-model.yml new file mode 100644 index 00000000..c82845dc --- /dev/null +++ b/kubernetes/moderation-model.yml @@ -0,0 +1,31 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: moderation-model-deployment +spec: + replicas: 1 + selector: + matchLabels: + app: responsible-ai-moderationmodel + template: + metadata: + labels: + app: responsible-ai-moderationmodel + spec: + containers: + - name: responsible-ai-moderationmodel + image: ${{ secrets.DOCKER_USERNAME }}/responsible-ai-moderationmodel:latest + ports: + - containerPort: 8401 +--- +apiVersion: v1 +kind: Service +metadata: + name: moderation-model-service +spec: + selector: + app: responsible-ai-moderationmodel + ports: + - protocol: TCP + port: 8401 + targetPort: 8401 diff --git a/kubernetes/privacy.yml b/kubernetes/privacy.yml new file mode 100644 index 00000000..9607d8b8 --- /dev/null +++ b/kubernetes/privacy.yml @@ -0,0 +1,31 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: privacy-deployment +spec: + replicas: 1 + selector: + matchLabels: + app: responsible-ai-privacy + template: + metadata: + labels: + app: responsible-ai-privacy + spec: + containers: + - name: responsible-ai-privacy + image: ${{ secrets.DOCKER_USERNAME }}/responsible-ai-privacy:latest + ports: + - containerPort: 8500 +--- +apiVersion: v1 +kind: Service +metadata: + name: privacy-service +spec: + selector: + app: responsible-ai-privacy + ports: + - protocol: TCP + port: 8500 + targetPort: 8500 diff --git a/kubernetes/reporting-tool.yml b/kubernetes/reporting-tool.yml new file mode 100644 index 00000000..9ecfb32b --- /dev/null +++ b/kubernetes/reporting-tool.yml @@ -0,0 +1,31 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: reporting-tool-deployment +spec: + replicas: 1 + selector: + matchLabels: + app: responsible-ai-reporting-tool + template: + metadata: + labels: + app: responsible-ai-reporting-tool + spec: + containers: + - name: responsible-ai-reporting-tool + image: ${{ secrets.DOCKER_USERNAME }}/responsible-ai-reporting-tool:latest + ports: + - containerPort: 8600 +--- +apiVersion: v1 +kind: Service +metadata: + name: reporting-tool-service +spec: + selector: + app: responsible-ai-reporting-tool + ports: + - protocol: TCP + port: 8600 + targetPort: 8600 diff --git a/kubernetes/security.yml b/kubernetes/security.yml new file mode 100644 index 00000000..f5ace60f --- /dev/null +++ b/kubernetes/security.yml @@ -0,0 +1,31 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: security-deployment +spec: + replicas: 1 + selector: + matchLabels: + app: responsible-ai-security + template: + metadata: + labels: + app: responsible-ai-security + spec: + containers: + - name: responsible-ai-security + image: ${{ secrets.DOCKER_USERNAME }}/responsible-ai-security:latest + ports: + - containerPort: 8700 +--- +apiVersion: v1 +kind: Service +metadata: + name: security-service +spec: + selector: + app: responsible-ai-security + ports: + - protocol: TCP + port: 8700 + targetPort: 8700 diff --git a/kubernetes/shell.yml b/kubernetes/shell.yml new file mode 100644 index 00000000..ef744b15 --- /dev/null +++ b/kubernetes/shell.yml @@ -0,0 +1,31 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: shell-deployment +spec: + replicas: 1 + selector: + matchLabels: + app: responsible-ai-shell + template: + metadata: + labels: + app: responsible-ai-shell + spec: + containers: + - name: responsible-ai-shell + image: ${{ secrets.DOCKER_USERNAME }}/responsible-ai-shell:latest + ports: + - containerPort: 8800 +--- +apiVersion: v1 +kind: Service +metadata: + name: shell-service +spec: + selector: + app: responsible-ai-shell + ports: + - protocol: TCP + port: 8800 + targetPort: 8800 diff --git a/responsible-ai-admin/responsible-ai-admin/Dockerfile b/responsible-ai-admin/responsible-ai-admin/Dockerfile index 5c7a736a..9afa2602 100644 --- a/responsible-ai-admin/responsible-ai-admin/Dockerfile +++ b/responsible-ai-admin/responsible-ai-admin/Dockerfile @@ -1,15 +1,20 @@ FROM python:3.9.13 -RUN useradd myLowPrivilegeUser -USER myLowPrivilegeUser +#RUN useradd myLowPrivilegeUser +#USER myLowPrivilegeUser ENV DEBIAN_FRONTEND=noninteractive -Copy . . +WORKDIR /src -RUN pip3 install -r requirement/requirement.txt +COPY . . -WORKDIR src +# List files for debugging +RUN ls -lrt -CMD ["sh", "-c", "python3 main.py"] +# Check if requirements file exists before installing +RUN [ -f requirement/requirements.txt ] && \ + pip3 install --no-cache-dir -r requirement/requirements.txt || \ + echo "No requirements file found, skipping pip install." +CMD ["python3", "main.py"] diff --git a/responsible-ai-backend/backend-rai/Dockerfile b/responsible-ai-backend/backend-rai/Dockerfile index ccd6928a..ee3dbbbd 100644 --- a/responsible-ai-backend/backend-rai/Dockerfile +++ b/responsible-ai-backend/backend-rai/Dockerfile @@ -1,12 +1,17 @@ FROM python:3.9.13 - + ENV DEBIAN_FRONTEND=noninteractive - -Copy . . - -RUN pip3 install -r requirements/requirement.txt -WORKDIR src - -CMD ["sh", "-c", "python3 main.py"] - - + +WORKDIR /src + +COPY . . + +# List files for debugging +RUN ls -lrt + +# Check if requirements file exists before installing +RUN [ -f requirement/requirements.txt ] && \ + pip3 install --no-cache-dir -r requirement/requirements.txt || \ + echo "No requirements file found, skipping pip install." + +CMD ["python3", "main.py"] diff --git a/responsible-ai-explain/responsible-ai-explain/Dockerfile b/responsible-ai-explain/responsible-ai-explain/Dockerfile index 71c48f2a..1aafd81a 100644 --- a/responsible-ai-explain/responsible-ai-explain/Dockerfile +++ b/responsible-ai-explain/responsible-ai-explain/Dockerfile @@ -1,10 +1,18 @@ FROM python:3.9.13 ENV DEBIAN_FRONTEND=noninteractive - + +WORKDIR /src + COPY . . -RUN pip install -r requirements/requirement.txt -WORKDIR src +# List files for debugging +RUN ls -lrt + +# Check if requirements file exists before installing +RUN [ -f requirements/requirement.txt ] && \ + pip3 install --no-cache-dir -r requirements/requirement.txt || \ + echo "No requirements file found, skipping pip install." + +CMD ["python3", "main.py"] -CMD ["sh", "-c", "python main.py"] diff --git a/responsible-ai-fairness/responsible-ai-fairness/Dockerfile b/responsible-ai-fairness/responsible-ai-fairness/Dockerfile index cf22d9df..b23d3a09 100644 --- a/responsible-ai-fairness/responsible-ai-fairness/Dockerfile +++ b/responsible-ai-fairness/responsible-ai-fairness/Dockerfile @@ -1,12 +1,22 @@ FROM python:3.9.13 + ENV DEBIAN_FRONTEND=noninteractive +WORKDIR /src + ENV ACCESS_KEY= ENV SECRET_KEY= - + COPY . . -RUN pip install -r requirements/requirement.txt -WORKDIR src +# List files for debugging +RUN ls -lrt + +# Check if requirements file exists before installing +RUN [ -f requirements/requirements.txt ] && \ + pip3 install --no-cache-dir -r requirements/requirements.txt || \ + echo "No requirements file found, skipping pip install." + + +CMD ["python3", "main.py"] -CMD ["sh", "-c", "python main_api.py"] diff --git a/responsible-ai-file-storage/Dockerfile b/responsible-ai-file-storage/Dockerfile index 06a330e2..93249481 100644 --- a/responsible-ai-file-storage/Dockerfile +++ b/responsible-ai-file-storage/Dockerfile @@ -1,11 +1,12 @@ -FROM infyartifactory.jfrog.io/docker/python:3.9 +FROM python:3.9.13 + ENV DEBIAN_FRONTEND=noninteractive - +WORKDIR /src + COPY . . -RUN pip install -r requirements/requirement.txt -WORKDIR src +RUN pip3 install --no-cache-dir -r requirements/requirements.txt -CMD ["sh", "-c", "python main.py"] +CMD ["python3", "main.py"] diff --git a/responsible-ai-llm-benchmarking/responsible-ai-benchmarking/Dockerfile b/responsible-ai-llm-benchmarking/responsible-ai-benchmarking/Dockerfile index b104eeef..6e605ac0 100644 --- a/responsible-ai-llm-benchmarking/responsible-ai-benchmarking/Dockerfile +++ b/responsible-ai-llm-benchmarking/responsible-ai-benchmarking/Dockerfile @@ -1,9 +1,20 @@ FROM python:3.9.13 + ENV DEBIAN_FRONTEND=noninteractive +WORKDIR /src + COPY . . -RUN pip install -r requirements/requirement.txt -ENV http_proxy= -ENV https_proxy= -WORKDIR src -CMD ["sh", "-c", "python main_api.py"] + +# List files for debugging +RUN ls -lrt + +# Check if requirements file exists before installing +RUN [ -f requirement/requirements.txt ] && \ + pip3 install --no-cache-dir -r requirements/requirement.txt || \ + echo "No requirements file found, skipping pip install." + +ENV http_proxy="" +ENV https_proxy="" + +CMD ["python3", "main_api.py"] diff --git a/responsible-ai-llm-explain/responsible-ai-llm-explain/Dockerfile b/responsible-ai-llm-explain/responsible-ai-llm-explain/Dockerfile index 7f917a83..479971e2 100644 --- a/responsible-ai-llm-explain/responsible-ai-llm-explain/Dockerfile +++ b/responsible-ai-llm-explain/responsible-ai-llm-explain/Dockerfile @@ -1,9 +1,17 @@ -FROM PROVIDE PYTHON BASE IMAGE ABOVE 3.9 VERSION +FROM python:3.9.13 + ENV DEBIAN_FRONTEND=noninteractive +WORKDIR /src + COPY . . -RUN pip install -r requirements/requirement.txt -WORKDIR src +RUN ls -lrt + +# Check if requirements file exists before installing +RUN [ -f requirements/requirement.txt ] && \ + pip3 install --no-cache-dir -r requirements/requirement.txt || \ + echo "No requirements file found, skipping pip install." + +CMD ["python3", "main.py"] -CMD ["sh", "-c", "python main.py"] diff --git a/responsible-ai-mfe/Dockerfile b/responsible-ai-mfe/Dockerfile index 620467de..1f869c67 100644 --- a/responsible-ai-mfe/Dockerfile +++ b/responsible-ai-mfe/Dockerfile @@ -1,4 +1,4 @@ -FROM infyartifactory.jfrog.io/responsibleai-mms-kubeflow/node:v20.11.1 +FROM node:20.11.1 RUN node -v @@ -12,9 +12,9 @@ WORKDIR /app COPY package.json /app -COPY .npmrc /app +#COPY .npmrc /app -RUN npm config set registry https://infyartifactory.jfrog.io/artifactory/api/npm/npm/ +#RUN npm config set registry https://infyartifactory.jfrog.io/artifactory/api/npm/npm/ # RUN npm install -g npm@10.8.1 diff --git a/responsible-ai-model-detail/workbench/Dockerfile b/responsible-ai-model-detail/workbench/Dockerfile index 3af2004b..1aafd81a 100644 --- a/responsible-ai-model-detail/workbench/Dockerfile +++ b/responsible-ai-model-detail/workbench/Dockerfile @@ -1,11 +1,18 @@ FROM python:3.9.13 - + ENV DEBIAN_FRONTEND=noninteractive - -Copy . . - -RUN pip3 install -r requirements/requirement.txt -WORKDIR src - -CMD ["sh", "-c", "python3 main.py"] - + +WORKDIR /src + +COPY . . + +# List files for debugging +RUN ls -lrt + +# Check if requirements file exists before installing +RUN [ -f requirements/requirement.txt ] && \ + pip3 install --no-cache-dir -r requirements/requirement.txt || \ + echo "No requirements file found, skipping pip install." + +CMD ["python3", "main.py"] + diff --git a/responsible-ai-moderationlayer/Dockerfile b/responsible-ai-moderationlayer/Dockerfile index cad089f3..a37980eb 100644 --- a/responsible-ai-moderationlayer/Dockerfile +++ b/responsible-ai-moderationlayer/Dockerfile @@ -1,10 +1,19 @@ FROM python:3.9.13 - + ENV DEBIAN_FRONTEND=noninteractive - -Copy . . - -RUN pip3 install -r requirements/requirement.txt -WORKDIR src - -CMD ["sh", "-c", "python3 main.py"] + +WORKDIR /src + +COPY . . + +# List files for debugging +RUN ls -lrt + +# Check if requirements file exists before installing +RUN [ -f requirements/requirement.txt ] && \ + pip3 install --no-cache-dir -r requirements/requirement.txt || \ + echo "No requirements file found, skipping pip install." + + +CMD ["python3", "main.py"] + diff --git a/responsible-ai-moderationmodel/Dockerfile b/responsible-ai-moderationmodel/Dockerfile index cad089f3..1aafd81a 100644 --- a/responsible-ai-moderationmodel/Dockerfile +++ b/responsible-ai-moderationmodel/Dockerfile @@ -1,10 +1,18 @@ FROM python:3.9.13 - + ENV DEBIAN_FRONTEND=noninteractive - -Copy . . - -RUN pip3 install -r requirements/requirement.txt -WORKDIR src - -CMD ["sh", "-c", "python3 main.py"] + +WORKDIR /src + +COPY . . + +# List files for debugging +RUN ls -lrt + +# Check if requirements file exists before installing +RUN [ -f requirements/requirement.txt ] && \ + pip3 install --no-cache-dir -r requirements/requirement.txt || \ + echo "No requirements file found, skipping pip install." + +CMD ["python3", "main.py"] + diff --git a/responsible-ai-privacy/responsible-ai-privacy/Dockerfile b/responsible-ai-privacy/responsible-ai-privacy/Dockerfile index f28edcf8..a312f01a 100644 --- a/responsible-ai-privacy/responsible-ai-privacy/Dockerfile +++ b/responsible-ai-privacy/responsible-ai-privacy/Dockerfile @@ -1,14 +1,18 @@ - FROM python:3.9.13 - + ENV DEBIAN_FRONTEND=noninteractive - -Copy . . - -RUN pip3 install -r requirements/requirement.txt -WORKDIR src - -CMD ["sh", "-c", "python3 main.py"] - - + +WORKDIR /src + +COPY . . + +# List files for debugging +RUN ls -lrt + +# Check if requirements file exists before installing +RUN [ -f requirement/requirements.txt ] && \ + pip3 install --no-cache-dir -r requirement/requirements.txt || \ + echo "No requirements file found, skipping pip install." + +CMD ["python3", "main.py"] diff --git a/responsible-ai-privacy/responsible-ai-privacy/Dockerfile-privacy-compatible-image b/responsible-ai-privacy/responsible-ai-privacy/Dockerfile-privacy-compatible-image index 199df61c..f6d35ebe 100644 --- a/responsible-ai-privacy/responsible-ai-privacy/Dockerfile-privacy-compatible-image +++ b/responsible-ai-privacy/responsible-ai-privacy/Dockerfile-privacy-compatible-image @@ -1,44 +1,12 @@ +FROM python:3.9.13 - FROM infyartifactory.jfrog.io/docker/ubuntu:24.04 - - RUN useradd myLowPrivilegeUser - USER myLowPrivilegeUser - - - ENV DEBIAN_FRONTEND=noninteractive - - #Proxies for Development Environment - ENV http_proxy=http://proxy.threatpulse.net:8080 - ENV https_proxy=http://proxy.threatpulse.net:8080 - - - RUN apt-get update \ - && apt-get -y install tesseract-ocr=5.3.1 - - RUN : \ - && apt-get update \ - && DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \ - python3.9 \ - && apt-get clean \ - && rm -rf /var/lib/apt/lists/* \ - && : - - RUN apt-get update \ - && apt-get -y install pip=22.0.2 - - - #pip3 command for development environment - RUN pip3 install -r requirements/requirement.txt -i https://infyartifactory.jfrog.io/artifactory/api/pypi/pypi-remote/simple --user --trusted-host infyartifactory.jfrog.io - - #pip command for pre production environment - #RUN pip install -r requirements/requirement.txt --index-url https://infyartifactory.jfrog.io/artifactory/api/pypi/pypi-remote/simple --trusted-host infyartifactory.jfrog.io - WORKDIR src - - #python3 command for development environment - CMD ["sh", "-c", "python3 main.py"] - - #python command for pre production environment - #CMD ["sh", "-c", "python main.py"] - - +ENV DEBIAN_FRONTEND=noninteractive + +WORKDIR /src + +COPY . . + +RUN pip3 install --no-cache-dir -r requirements/requirement.txt + +CMD ["python3", "main.py"] diff --git a/responsible-ai-reporting-tool/wrapper/Dockerfile b/responsible-ai-reporting-tool/wrapper/Dockerfile index c749373c..a37980eb 100644 --- a/responsible-ai-reporting-tool/wrapper/Dockerfile +++ b/responsible-ai-reporting-tool/wrapper/Dockerfile @@ -1,26 +1,19 @@ FROM python:3.9.13 - -ENV DEBIAN_FRONTEND=noninteractive - + +ENV DEBIAN_FRONTEND=noninteractive + +WORKDIR /src + COPY . . -# Run a command to display the library path -#RUN find / -path /proc -prune -o \( -name "libQt5Core.so" -o -name "libQt5Core.so.5" \) -print -# Continue with other instructions in the Dockerfile -RUN apt-get update - -RUN pip install --upgrade pip - -RUN pip install -r requirements/requirement.txt - -RUN apt-get update && apt-get install -y wkhtmltopdf - -RUN apt-get update && apt-get install -y libqt5core5a - -#RUN find / -path /proc -prune -o \( -name "libQt5Core.so" -o -name "libQt5Core.so.5" \) -print -# Continue with other instructions in the Dockerfile - -RUN strip --remove-section=.note.ABI-tag /usr/lib/x86_64-linux-gnu/libQt5Core.so.5 - -WORKDIR src - -CMD ["sh", "-c", "python3 main.py"] + +# List files for debugging +RUN ls -lrt + +# Check if requirements file exists before installing +RUN [ -f requirements/requirement.txt ] && \ + pip3 install --no-cache-dir -r requirements/requirement.txt || \ + echo "No requirements file found, skipping pip install." + + +CMD ["python3", "main.py"] + diff --git a/responsible-ai-security/wrapper/Dockerfile b/responsible-ai-security/wrapper/Dockerfile index e578ee4e..a37980eb 100644 --- a/responsible-ai-security/wrapper/Dockerfile +++ b/responsible-ai-security/wrapper/Dockerfile @@ -1,15 +1,19 @@ -FROM FROM python:3.9.13 - +FROM python:3.9.13 + ENV DEBIAN_FRONTEND=noninteractive -RUN apt-get update && apt-get install -y wkhtmltopdf - +WORKDIR /src + COPY . . - -RUN pip install --upgrade pip - -RUN pip install -r requirements/requirement.txt -WORKDIR app +# List files for debugging +RUN ls -lrt + +# Check if requirements file exists before installing +RUN [ -f requirements/requirement.txt ] && \ + pip3 install --no-cache-dir -r requirements/requirement.txt || \ + echo "No requirements file found, skipping pip install." + + +CMD ["python3", "main.py"] -CMD ["sh", "-c", "python3 main.py"] diff --git a/responsible-ai-shell/Dockerfile b/responsible-ai-shell/Dockerfile index bca94730..7828a79a 100644 --- a/responsible-ai-shell/Dockerfile +++ b/responsible-ai-shell/Dockerfile @@ -1,4 +1,4 @@ -FROM infyartifactory.jfrog.io/responsibleai-mms-kubeflow/node:v20.11.1 +FROM node:20.11.1 RUN apt-get update && apt-get install -y nodejs npm @@ -14,7 +14,7 @@ WORKDIR /app COPY package.json /app -COPY .npmrc /app +#COPY .npmrc /app RUN npm install --legacy-peer-deps