From f49ef75ec6bb397d7c7b144bbbe0cb69f876d5cc Mon Sep 17 00:00:00 2001 From: are-ces <195810094+are-ces@users.noreply.github.com> Date: Tue, 16 Dec 2025 16:25:15 +0100 Subject: [PATCH] Divide inference provider e2e tests from ci e2e test --- .github/workflows/e2e_tests.yaml | 80 +----- .github/workflows/e2e_tests_providers.yaml | 289 +++++++++++++++++++++ 2 files changed, 290 insertions(+), 79 deletions(-) create mode 100644 .github/workflows/e2e_tests_providers.yaml diff --git a/.github/workflows/e2e_tests.yaml b/.github/workflows/e2e_tests.yaml index a80de744..9450def8 100644 --- a/.github/workflows/e2e_tests.yaml +++ b/.github/workflows/e2e_tests.yaml @@ -10,16 +10,13 @@ jobs: fail-fast: false matrix: mode: ["server", "library"] - environment: ["ci", "azure", "vertexai"] + environment: ["ci"] name: "E2E: ${{ matrix.mode }} mode / ${{ matrix.environment }}" env: OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} E2E_OPENAI_MODEL: ${{ vars.E2E_OPENAI_MODEL }} - CLIENT_SECRET: ${{ secrets.CLIENT_SECRET }} - CLIENT_ID: ${{ secrets.CLIENT_ID }} - TENANT_ID: ${{ secrets.TENANT_ID }} E2E_DEPLOYMENT_MODE: ${{ matrix.mode }} steps: @@ -64,71 +61,6 @@ jobs: cp "${CONFIG_FILE}" lightspeed-stack.yaml echo "✅ Configuration loaded successfully" - - name: Get Azure API key (access token) - if: matrix.environment == 'azure' - id: azure_token - env: - CLIENT_ID: ${{ secrets.CLIENT_ID }} - CLIENT_SECRET: ${{ secrets.CLIENT_SECRET }} - TENANT_ID: ${{ secrets.TENANT_ID }} - run: | - echo "Requesting Azure API token..." - RESPONSE=$(curl -s -X POST \ - -H "Content-Type: application/x-www-form-urlencoded" \ - -d "client_id=$CLIENT_ID&scope=https://cognitiveservices.azure.com/.default&client_secret=$CLIENT_SECRET&grant_type=client_credentials" \ - "https://login.microsoftonline.com/$TENANT_ID/oauth2/v2.0/token") - - echo "Response received. Extracting access_token..." - ACCESS_TOKEN=$(echo "$RESPONSE" | jq -r '.access_token') - - if [ -z "$ACCESS_TOKEN" ] || [ "$ACCESS_TOKEN" == "null" ]; then - echo "❌ Failed to obtain Azure access token. Response:" - echo "$RESPONSE" - exit 1 - fi - - echo "✅ Successfully obtained Azure access token." - echo "AZURE_API_KEY=$ACCESS_TOKEN" >> $GITHUB_ENV - - - name: Save VertexAI service account key to file - if: matrix.environment == 'vertexai' - env: - GOOGLE_SA_KEY: ${{ secrets.GOOGLE_SA_KEY }} - run: | - echo "Setting up Google Cloud service account credentials..." - - if [ -z "$GOOGLE_SA_KEY" ]; then - echo "❌ GOOGLE_SA_KEY is not set. Please configure the secret in GitHub repository settings." - exit 1 - fi - - GCP_KEYS_PATH=./tmp/.gcp-keys - echo "GCP_KEYS_PATH=$GCP_KEYS_PATH" >> $GITHUB_ENV - - mkdir -p $GCP_KEYS_PATH - - echo "Writing service account key to file..." - - # Decode from base64, needed because GH changes the key if using the raw key - printf '%s' "$GOOGLE_SA_KEY" | base64 -d > $GCP_KEYS_PATH/gcp-key.json - - # Verify the file was created and is valid JSON - if [ ! -f "$GCP_KEYS_PATH/gcp-key.json" ]; then - echo "❌ Failed to create gcp-key.json file" - exit 1 - fi - - if ! jq empty "$GCP_KEYS_PATH/gcp-key.json" 2>/dev/null; then - echo "❌ gcp-key.json is not valid JSON" - exit 1 - fi - echo "✅ gcp-key.json is valid JSON" - - # Set proper permissions (readable by all, needed for container user 1001) - chmod 644 $GCP_KEYS_PATH/gcp-key.json - - echo "GOOGLE_APPLICATION_CREDENTIALS=/opt/app-root/.gcp-keys/gcp-key.json" >> $GITHUB_ENV - - name: Select and configure run.yaml env: CONFIG_ENVIRONMENT: ${{ matrix.environment || 'ci' }} @@ -195,11 +127,6 @@ jobs: if: matrix.mode == 'server' env: OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} - AZURE_API_KEY: ${{ env.AZURE_API_KEY }} - VERTEX_AI_LOCATION: ${{ secrets.VERTEX_AI_LOCATION }} - VERTEX_AI_PROJECT: ${{ secrets.VERTEX_AI_PROJECT }} - GOOGLE_APPLICATION_CREDENTIALS: ${{ env.GOOGLE_APPLICATION_CREDENTIALS }} - GCP_KEYS_PATH: ${{ env.GCP_KEYS_PATH }} run: | # Debug: Check if environment variable is available for docker-compose echo "OPENAI_API_KEY is set: $([ -n "$OPENAI_API_KEY" ] && echo 'YES' || echo 'NO')" @@ -221,11 +148,6 @@ jobs: if: matrix.mode == 'library' env: OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} - AZURE_API_KEY: ${{ env.AZURE_API_KEY }} - VERTEX_AI_LOCATION: ${{ secrets.VERTEX_AI_LOCATION }} - VERTEX_AI_PROJECT: ${{ secrets.VERTEX_AI_PROJECT }} - GOOGLE_APPLICATION_CREDENTIALS: ${{ env.GOOGLE_APPLICATION_CREDENTIALS }} - GCP_KEYS_PATH: ${{ env.GCP_KEYS_PATH }} run: | echo "Starting service in library mode (1 container)" docker compose -f docker-compose-library.yaml up -d diff --git a/.github/workflows/e2e_tests_providers.yaml b/.github/workflows/e2e_tests_providers.yaml new file mode 100644 index 00000000..e826a1cf --- /dev/null +++ b/.github/workflows/e2e_tests_providers.yaml @@ -0,0 +1,289 @@ +# .github/workflows/e2e_tests.yml +name: E2E Inference Provider Tests + +on: + schedule: + - cron: "0 0 * * *" # Runs once a day at midnight UTC + workflow_dispatch: + +jobs: + e2e_tests: + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + mode: ["server", "library"] + environment: ["azure", "vertexai"] + + name: "E2E: ${{ matrix.mode }} mode / ${{ matrix.environment }}" + + env: + OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} + E2E_OPENAI_MODEL: ${{ vars.E2E_OPENAI_MODEL }} + CLIENT_SECRET: ${{ secrets.CLIENT_SECRET }} + CLIENT_ID: ${{ secrets.CLIENT_ID }} + TENANT_ID: ${{ secrets.TENANT_ID }} + E2E_DEPLOYMENT_MODE: ${{ matrix.mode }} + + steps: + - uses: actions/checkout@v4 + with: + # On PR_TARGET → the fork (or same repo) that opened the PR. + # On push → falls back to the current repository. + repository: ${{ github.event.pull_request.head.repo.full_name || github.repository }} + + # On PR_TARGET → the PR head *commit* (reproducible). + # On push → the pushed commit that triggered the workflow. + ref: ${{ github.event.pull_request.head.ref || github.sha }} + + # Don’t keep credentials when running untrusted PR code under PR_TARGET. + persist-credentials: ${{ github.event_name != 'pull_request_target' }} + + - name: Verify actual git checkout result + run: | + echo "=== Git Status After Checkout ===" + echo "Remote URLs:" + git remote -v + echo "" + echo "Current branch: $(git branch --show-current 2>/dev/null || echo 'detached HEAD')" + echo "Current commit: $(git rev-parse HEAD)" + echo "Current commit message: $(git log -1 --oneline)" + echo "" + echo "=== Recent commits ===" + git log --oneline -5 + + - name: Load lightspeed-stack.yaml configuration + run: | + MODE="${{ matrix.mode }}" + CONFIG_FILE="tests/e2e/configuration/${MODE}-mode/lightspeed-stack.yaml" + echo "Loading configuration for ${MODE} mode" + echo "Source: ${CONFIG_FILE}" + + if [ ! -f "${CONFIG_FILE}" ]; then + echo "❌ Configuration file not found: ${CONFIG_FILE}" + exit 1 + fi + + cp "${CONFIG_FILE}" lightspeed-stack.yaml + echo "✅ Configuration loaded successfully" + + - name: Get Azure API key (access token) + if: matrix.environment == 'azure' + id: azure_token + env: + CLIENT_ID: ${{ secrets.CLIENT_ID }} + CLIENT_SECRET: ${{ secrets.CLIENT_SECRET }} + TENANT_ID: ${{ secrets.TENANT_ID }} + run: | + echo "Requesting Azure API token..." + RESPONSE=$(curl -s -X POST \ + -H "Content-Type: application/x-www-form-urlencoded" \ + -d "client_id=$CLIENT_ID&scope=https://cognitiveservices.azure.com/.default&client_secret=$CLIENT_SECRET&grant_type=client_credentials" \ + "https://login.microsoftonline.com/$TENANT_ID/oauth2/v2.0/token") + + echo "Response received. Extracting access_token..." + ACCESS_TOKEN=$(echo "$RESPONSE" | jq -r '.access_token') + + if [ -z "$ACCESS_TOKEN" ] || [ "$ACCESS_TOKEN" == "null" ]; then + echo "❌ Failed to obtain Azure access token. Response:" + echo "$RESPONSE" + exit 1 + fi + + echo "✅ Successfully obtained Azure access token." + echo "AZURE_API_KEY=$ACCESS_TOKEN" >> $GITHUB_ENV + + - name: Save VertexAI service account key to file + if: matrix.environment == 'vertexai' + env: + GOOGLE_SA_KEY: ${{ secrets.GOOGLE_SA_KEY }} + run: | + echo "Setting up Google Cloud service account credentials..." + + if [ -z "$GOOGLE_SA_KEY" ]; then + echo "❌ GOOGLE_SA_KEY is not set. Please configure the secret in GitHub repository settings." + exit 1 + fi + + GCP_KEYS_PATH=./tmp/.gcp-keys + echo "GCP_KEYS_PATH=$GCP_KEYS_PATH" >> $GITHUB_ENV + + mkdir -p $GCP_KEYS_PATH + + echo "Writing service account key to file..." + + # Decode from base64, needed because GH changes the key if using the raw key + printf '%s' "$GOOGLE_SA_KEY" | base64 -d > $GCP_KEYS_PATH/gcp-key.json + + # Verify the file was created and is valid JSON + if [ ! -f "$GCP_KEYS_PATH/gcp-key.json" ]; then + echo "❌ Failed to create gcp-key.json file" + exit 1 + fi + + if ! jq empty "$GCP_KEYS_PATH/gcp-key.json" 2>/dev/null; then + echo "❌ gcp-key.json is not valid JSON" + exit 1 + fi + echo "✅ gcp-key.json is valid JSON" + + # Set proper permissions (readable by all, needed for container user 1001) + chmod 644 $GCP_KEYS_PATH/gcp-key.json + + echo "GOOGLE_APPLICATION_CREDENTIALS=/opt/app-root/.gcp-keys/gcp-key.json" >> $GITHUB_ENV + + - name: Select and configure run.yaml + env: + CONFIG_ENVIRONMENT: ${{ matrix.environment || 'ci' }} + run: | + CONFIGS_DIR="tests/e2e/configs" + ENVIRONMENT="$CONFIG_ENVIRONMENT" + + echo "Looking for configurations in $CONFIGS_DIR/" + + # List available configurations + if [ -d "$CONFIGS_DIR" ]; then + echo "Available configurations:" + ls -la "$CONFIGS_DIR"/*.yaml 2>/dev/null || echo "No YAML files found in $CONFIGS_DIR/" + else + echo "Configs directory '$CONFIGS_DIR' not found!" + exit 1 + fi + + # Determine which config file to use + CONFIG_FILE="$CONFIGS_DIR/run-$ENVIRONMENT.yaml" + + echo "Looking for: $CONFIG_FILE" + + if [ -f "$CONFIG_FILE" ]; then + echo "✅ Found config for environment: $ENVIRONMENT" + cp "$CONFIG_FILE" run.yaml + echo "✅ Config copied to run.yaml" + else + echo "❌ Configuration file not found: $CONFIG_FILE" + echo "Available files in $CONFIGS_DIR:" + ls -la "$CONFIGS_DIR/" + exit 1 + fi + + - name: Show final configuration + run: | + echo "=== Configuration Summary ===" + echo "Deployment mode: ${{ matrix.mode }}" + echo "Environment: ${{ matrix.environment }}" + echo "Source config: tests/e2e/configs/run-ci.yaml" + echo "" + echo "=== Configuration Preview ===" + echo "Providers: $(grep -c "provider_id:" run.yaml)" + echo "Models: $(grep -c "model_id:" run.yaml)" + echo "" + echo "=== lightspeed-stack.yaml ===" + grep -A 3 "llama_stack:" lightspeed-stack.yaml + + - name: Docker Login for quay access + env: + QUAY_ROBOT_USERNAME: ${{ secrets.QUAY_DOWNSTREAM_USERNAME }} + QUAY_ROBOT_TOKEN: ${{ secrets.QUAY_DOWNSTREAM_TOKEN }} + run: | + echo $QUAY_ROBOT_TOKEN | docker login quay.io -u=$QUAY_ROBOT_USERNAME --password-stdin + + - name: Create dummy GCP keys directory + if: matrix.environment != 'vertexai' + run: | + echo "Creating dummy GCP keys directory for non-VertexAI environment..." + mkdir -p ./tmp/.gcp-keys-dummy + echo "✅ Dummy directory created." + + - name: Run services (Server Mode) + if: matrix.mode == 'server' + env: + OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} + AZURE_API_KEY: ${{ env.AZURE_API_KEY }} + VERTEX_AI_LOCATION: ${{ secrets.VERTEX_AI_LOCATION }} + VERTEX_AI_PROJECT: ${{ secrets.VERTEX_AI_PROJECT }} + GOOGLE_APPLICATION_CREDENTIALS: ${{ env.GOOGLE_APPLICATION_CREDENTIALS }} + GCP_KEYS_PATH: ${{ env.GCP_KEYS_PATH }} + run: | + # Debug: Check if environment variable is available for docker-compose + echo "OPENAI_API_KEY is set: $([ -n "$OPENAI_API_KEY" ] && echo 'YES' || echo 'NO')" + echo "OPENAI_API_KEY length: ${#OPENAI_API_KEY}" + + docker compose version + docker compose up -d + + # Check for errors and show logs if any services failed + if docker compose ps | grep -E 'Exit|exited|stopped'; then + echo "Some services failed to start - showing logs:" + docker compose logs + exit 1 + else + echo "All services started successfully" + fi + + - name: Run services (Library Mode) + if: matrix.mode == 'library' + env: + OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} + AZURE_API_KEY: ${{ env.AZURE_API_KEY }} + VERTEX_AI_LOCATION: ${{ secrets.VERTEX_AI_LOCATION }} + VERTEX_AI_PROJECT: ${{ secrets.VERTEX_AI_PROJECT }} + GOOGLE_APPLICATION_CREDENTIALS: ${{ env.GOOGLE_APPLICATION_CREDENTIALS }} + GCP_KEYS_PATH: ${{ env.GCP_KEYS_PATH }} + run: | + echo "Starting service in library mode (1 container)" + docker compose -f docker-compose-library.yaml up -d + + if docker compose -f docker-compose-library.yaml ps | grep -E 'Exit|exited|stopped'; then + echo "Service failed to start - showing logs:" + docker compose -f docker-compose-library.yaml logs + exit 1 + else + echo "Service started successfully" + fi + + - name: Wait for services + run: | + echo "Waiting for services to be healthy..." + sleep 20 + + - name: Quick connectivity test + run: | + echo "Testing basic connectivity before full test suite..." + curl -f http://localhost:8080/v1/models || { + echo "❌ Basic connectivity failed - showing logs" + if [ "${{ matrix.mode }}" == "server" ]; then + docker compose logs --tail=30 + else + docker compose -f docker-compose-library.yaml logs --tail=30 + fi + exit 1 + } + + - name: Run e2e tests + env: + TERM: xterm-256color + FORCE_COLOR: 1 + E2E_DEPLOYMENT_MODE: ${{ matrix.mode }} + run: | + echo "Installing test dependencies..." + pip install uv + uv sync + + echo "Running comprehensive e2e test suite..." + make test-e2e + + - name: Show logs on failure + if: failure() + run: | + echo "=== Test failure logs ===" + + if [ "${{ matrix.mode }}" == "server" ]; then + echo "=== llama-stack logs ===" + docker compose logs llama-stack + echo "" + echo "=== lightspeed-stack logs ===" + docker compose logs lightspeed-stack + else + echo "=== lightspeed-stack (library mode) logs ===" + docker compose -f docker-compose-library.yaml logs lightspeed-stack + fi