Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
32 changes: 32 additions & 0 deletions .github/workflows/lambda-do-release-runners.yml
Original file line number Diff line number Diff line change
Expand Up @@ -179,6 +179,38 @@ jobs:
tag: ${{ inputs.tag }}
updateOnlyUnreleased: true

release-benchmark-results-uploader:
name: Upload Release for benchmark-results-uploader lambda
runs-on: ubuntu-latest
permissions:
contents: write
env:
REF: ${{ inputs.tag }}
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
ref: ${{ inputs.tag }}

- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
with:
python-version: '3.12'

- name: Build deployment.zip
working-directory: aws/lambda/benchmark-results-uploader
run: make deployment.zip

- name: Copy deployment.zip to root
run: cp aws/lambda/benchmark-results-uploader/deployment.zip benchmark-results-uploader.zip

- uses: ncipollo/release-action@v1
with:
artifacts: "benchmark-results-uploader.zip"
allowUpdates: true
draft: true
name: ${{ inputs.tag }}
tag: ${{ inputs.tag }}
updateOnlyUnreleased: true

finish-release:
needs:
Expand Down
18 changes: 18 additions & 0 deletions aws/lambda/benchmark-results-uploader/Makefile
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
FUNCTION_NAME=benchmark-results-uploader
PROJECT_NAME=pytorch
REGION=us-east-1

.PHONY: prepare deploy clean

prepare:
rm -rf ./packages
mkdir -p packages
pip install -r requirements.txt -t packages
cd packages && zip -r9 ../function.zip .
zip -g function.zip lambda_function.py

deploy: prepare
aws lambda update-function-code --function-name $(PROJECT_NAME)-$(FUNCTION_NAME) --zip-file fileb://function.zip --region $(REGION)

clean:
rm -rf packages function.zip
56 changes: 56 additions & 0 deletions aws/lambda/benchmark-results-uploader/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
# Benchmark Results Uploader

This AWS Lambda function uploads benchmark result files to S3 buckets with authentication.

## Functionality

This Lambda:

1. Accepts S3 bucket name, path, and content of a file
2. Authenticates the request using username/password from environment variables
3. Checks if the specified path already exists in the S3 bucket
4. If the path doesn't exist, uploads the content to that path
5. Returns appropriate HTTP status codes and messages

## Input Parameters

The Lambda expects the following input parameters in the event object:

- `username`: Username for authentication
- `password`: Password for authentication
- `bucket_name`: Name of the S3 bucket
- `path`: Path within the bucket where content will be stored
- `content`: The content to upload

## Environment Variables

The Lambda requires two environment variables:

- `AUTH_USERNAME`: Username for authentication
- `AUTH_PASSWORD`: Password for authentication

## Deployment

To deploy the Lambda function:

```bash
make deploy
```

This will:
1. Install dependencies
2. Package the Lambda function
3. Deploy to AWS

## Testing

To test the Lambda function locally:

```bash
# Setup environment variables
export AUTH_USERNAME=your_username
export AUTH_PASSWORD=your_password

# Run test
python test_lambda_function.py
```
136 changes: 136 additions & 0 deletions aws/lambda/benchmark-results-uploader/lambda_function.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,136 @@
import os
import json
import boto3
from botocore.exceptions import ClientError
from typing import Dict, Any

# Configure AWS S3 client
s3_client = boto3.client("s3")


def authenticate(username: str, password: str) -> bool:
"""
Authenticate request using environment variable credentials.

Args:
username (str): Username provided in the request
password (str): Password provided in the request

Returns:
bool: True if authentication is successful, False otherwise
"""
return username == os.environ.get("AUTH_USERNAME") and password == os.environ.get(
"AUTH_PASSWORD"
)


def check_path_exists(bucket: str, path: str) -> bool:
"""
Check if a specific path exists in the S3 bucket.

Args:
bucket (str): The name of the S3 bucket
path (str): The path to check within the bucket

Returns:
bool: True if the path exists, False otherwise
"""
try:
s3_client.head_object(Bucket=bucket, Key=path)
return True
except ClientError as e:
# If the error code is 404, the path doesn't exist
if e.response["Error"]["Code"] == "404":
return False
# For other errors, raise the exception
raise


def upload_to_s3(bucket: str, path: str, content: str) -> Dict[str, Any]:
"""
Upload content to a specific path in the S3 bucket.

Args:
bucket (str): The name of the S3 bucket
path (str): The path within the bucket where content will be stored
content (str): The content to upload

Returns:
Dict[str, Any]: Response from S3 upload
"""
try:
response = s3_client.put_object(
Bucket=bucket, Key=path, Body=content, ContentType="application/json"
)
return {
"statusCode": 200,
"body": json.dumps(
{
"message": f"File uploaded successfully to {bucket}/{path}",
"etag": response.get("ETag", ""),
}
),
}
except Exception as e:
return {
"statusCode": 500,
"body": json.dumps({"message": f"Error uploading file: {str(e)}"}),
}


def lambda_handler(event: Dict[str, Any], context: Any) -> Dict[str, Any]:
"""
Main Lambda handler function.

Args:
event (Dict[str, Any]): Contains input data for the Lambda function
Required fields:
- bucket_name: The name of the S3 bucket
- path: The path within the bucket where content will be stored
- content: The content to upload
- username: Username for authentication
- password: Password for authentication
context (Any): Provides runtime information about the Lambda function

Returns:
Dict[str, Any]: Response containing status and result information
"""
# Extract authentication parameters
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The code below imo is pretty clear. I don't think the comment here is necessary.

try:
username = event["username"]
password = event["password"]
except KeyError:
return {
"statusCode": 401,
"body": json.dumps({"message": "Authentication credentials are required"}),
}

# Validate authentication
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The code below imo is pretty clear. I don't think the comment here is necessary.

if not authenticate(username, password):
return {
"statusCode": 403,
"body": json.dumps({"message": "Invalid authentication credentials"}),
}

# Extract input parameters from the event
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think in this case the comment gives less detail the the code it's describing. I'd just remove the comment altogether.

try:
bucket_name = event["bucket_name"]
path = event["path"]
content = event["content"]
except KeyError as e:
return {
"statusCode": 400,
"body": json.dumps({"message": f"Missing required parameter: {str(e)}"}),
}

# Check if the path already exists in the bucket
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

IMO this comment is unnecessary since check_path_exists() is already self describing function name.

if check_path_exists(bucket_name, path):
return {
"statusCode": 409, # Conflict status code
"body": json.dumps(
{"message": f"Path {path} already exists in bucket {bucket_name}"}
),
}

# Upload the content to S3
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

IMO this comment is unnecessary since the function name it's commenting on is already descriptive.

return upload_to_s3(bucket_name, path, content)
1 change: 1 addition & 0 deletions aws/lambda/benchmark-results-uploader/requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
boto3==1.36.21
137 changes: 137 additions & 0 deletions aws/lambda/benchmark-results-uploader/test_lambda_function.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,137 @@
import os
import unittest
import json
from unittest.mock import patch
from botocore.exceptions import ClientError
from lambda_function import (
lambda_handler,
check_path_exists,
upload_to_s3,
authenticate,
)


class TestBenchmarkResultsUploader(unittest.TestCase):
def setUp(self):
# Set up test environment variables
os.environ["AUTH_USERNAME"] = "test_user"
os.environ["AUTH_PASSWORD"] = "test_password"

# Test event with valid credentials
self.valid_event = {
"username": "test_user",
"password": "test_password",
"bucket_name": "test-bucket",
"path": "test/path.json",
"content": '{"test": "data"}',
}

# Test event with invalid credentials
self.invalid_auth_event = {
"username": "wrong_user",
"password": "wrong_password",
"bucket_name": "test-bucket",
"path": "test/path.json",
"content": '{"test": "data"}',
}

# Test event missing required fields
self.incomplete_event = {
"username": "test_user",
"password": "test_password",
"bucket_name": "test-bucket",
}

@patch("lambda_function.authenticate")
def test_authentication_failure(self, mock_authenticate):
mock_authenticate.return_value = False
response = lambda_handler(self.invalid_auth_event, {})
self.assertEqual(response["statusCode"], 403)
self.assertIn(
"Invalid authentication credentials",
json.loads(response["body"])["message"],
)

@patch("lambda_function.authenticate")
@patch("lambda_function.check_path_exists")
@patch("lambda_function.upload_to_s3")
def test_successful_upload(
self, mock_upload_to_s3, mock_check_path_exists, mock_authenticate
):
mock_authenticate.return_value = True
mock_check_path_exists.return_value = False

expected_response = {
"statusCode": 200,
"body": json.dumps(
{
"message": "File uploaded successfully to test-bucket/test/path.json",
"etag": "test-etag",
}
),
}
mock_upload_to_s3.return_value = expected_response

response = lambda_handler(self.valid_event, {})
self.assertEqual(response["statusCode"], 200)
mock_upload_to_s3.assert_called_once_with(
"test-bucket", "test/path.json", '{"test": "data"}'
)

@patch("lambda_function.authenticate")
@patch("lambda_function.check_path_exists")
def test_path_already_exists(self, mock_check_path_exists, mock_authenticate):
mock_authenticate.return_value = True
mock_check_path_exists.return_value = True

response = lambda_handler(self.valid_event, {})
self.assertEqual(response["statusCode"], 409)
self.assertIn("already exists", json.loads(response["body"])["message"])

@patch("lambda_function.authenticate")
def test_missing_parameters(self, mock_authenticate):
mock_authenticate.return_value = True

response = lambda_handler(self.incomplete_event, {})
self.assertEqual(response["statusCode"], 400)
self.assertIn(
"Missing required parameter", json.loads(response["body"])["message"]
)

@patch("lambda_function.s3_client")
def test_check_path_exists_true(self, mock_s3_client):
mock_s3_client.head_object.return_value = {}
self.assertTrue(check_path_exists("test-bucket", "test/path.json"))

@patch("lambda_function.s3_client")
def test_check_path_exists_false(self, mock_s3_client):
error_response = {"Error": {"Code": "404"}}
mock_s3_client.head_object.side_effect = ClientError(
error_response, "HeadObject"
)
self.assertFalse(check_path_exists("test-bucket", "test/path.json"))

@patch("lambda_function.s3_client")
def test_upload_to_s3_success(self, mock_s3_client):
mock_s3_client.put_object.return_value = {"ETag": "test-etag"}
response = upload_to_s3("test-bucket", "test/path.json", '{"test": "data"}')
self.assertEqual(response["statusCode"], 200)
body = json.loads(response["body"])
self.assertIn("File uploaded successfully", body["message"])
self.assertEqual(body["etag"], "test-etag")

@patch("lambda_function.s3_client")
def test_upload_to_s3_failure(self, mock_s3_client):
mock_s3_client.put_object.side_effect = Exception("Test error")
response = upload_to_s3("test-bucket", "test/path.json", '{"test": "data"}')
self.assertEqual(response["statusCode"], 500)
self.assertIn("Error uploading file", json.loads(response["body"])["message"])

def test_authenticate(self):
self.assertTrue(authenticate("test_user", "test_password"))
self.assertFalse(authenticate("wrong_user", "test_password"))
self.assertFalse(authenticate("test_user", "wrong_password"))


if __name__ == "__main__":
unittest.main()
Loading