diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index f71973d..378a86a 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -73,3 +73,30 @@ jobs: # - name: Terraform Apply # if: github.ref == 'refs/heads/main' # run: terraform -chdir=terraform/ apply -auto-approve + + # testing: + # name: Testing + # runs-on: ubuntu-latest + # needs: code-quality + # steps: + # - uses: actions/checkout@v3 + + # - name: Set up Python + # uses: actions/setup-python@v5 + # with: + # python-version: '3.11' + + # - name: Install test dependencies + # run: pip install -r python/test/requirements.txt pytest pytest-cov mutmut + + # - name: Unit testing + # run: pytest --cov=python/src --cov-report=term-missing python/test + + # - name: Mutation testing + # run: | + # pip install mutmut + # mutmut run --paths-to-mutate=python/src + # mutmut results + + # - name: Integration testing + # run: pytest python/integration diff --git a/README.md b/README.md index a40404a..d4be90a 100644 --- a/README.md +++ b/README.md @@ -33,13 +33,20 @@ ______________________________________________________________________ ## Lambda Function -The Lambda function (`base_lambda.py`) provides utility actions via AWS APIs: +The Lambda function (`base_lambda.py`) provides utility actions via AWS APIs for a wide range of AWS services.\ +Each action is implemented as a dedicated function, and the handler dispatches based on the event key. -- Delete another Lambda function -- Redrive messages from an SQS DLQ -- Get ECR login and repository URI +**Supported actions:** -The handler expects an event with a key indicating the action to perform. +- Lambda: `delete_lambda_function`, `list_lambda_functions` +- SQS: `redrive_sqs_dlq`, `send_sqs_message` +- ECR: `get_ecr_login_and_repo_uri`, `list_ecr_repositories` +- ECS Fargate: `list_ecs_clusters`, `run_fargate_task` +- DynamoDB: `put_dynamodb_item`, `query_dynamodb` +- CloudWatch: `put_cloudwatch_metric`, `get_cloudwatch_metric_statistics` +- SNS: `publish_sns_message`, `list_sns_topics` +- S3: `upload_file_to_s3`, `list_s3_objects` +- Glue: `start_glue_job`, `get_glue_job_run` ______________________________________________________________________ @@ -129,13 +136,35 @@ The Lambda function expects an event with a key indicating the action, for examp } ``` -Supported events: +**Supported events:** - `delete_lambda_function` - `redrive_sqs_dlq` - `get_ecr_login_and_repo_uri` +- `list_lambda_functions` +- `list_ecr_repositories` +- `list_ecs_clusters` +- `run_fargate_task` +- `put_dynamodb_item` +- `query_dynamodb` +- `put_cloudwatch_metric` +- `get_cloudwatch_metric_statistics` +- `publish_sns_message` +- `list_sns_topics` +- `upload_file_to_s3` +- `list_s3_objects` +- `start_glue_job` +- `get_glue_job_run` + +**Example event for running a Fargate task:** -Update the Lambda code to handle your specific use cases. +```json +{ + "body": "{\"event\": \"run_fargate_task\", \"cluster\": \"my-cluster\", \"task_definition\": \"my-task-def\", \"subnets\": [\"subnet-xxxxxx\"]}" +} +``` + +Update the Lambda code or event payloads to handle your specific use cases. ______________________________________________________________________ diff --git a/documentation/placeholder.txt b/documentation/placeholder.txt deleted file mode 100644 index e69de29..0000000 diff --git a/python/integration/integration_base_lambda.py b/python/integration/integration_base_lambda.py new file mode 100644 index 0000000..07aa1b3 --- /dev/null +++ b/python/integration/integration_base_lambda.py @@ -0,0 +1,41 @@ +import json +from aws_lambda_powertools.utilities.typing import LambdaContext +import src.base_lambda as base_lambda + + +class DummyContext(LambdaContext): + function_name = "test" + memory_limit_in_mb = 128 + invoked_function_arn = "arn:aws:lambda:us-east-1:123456789012:function:test" + aws_request_id = "test-request-id" + + +def test_lambda_handler_list_lambda_functions(monkeypatch): + # Patch the list_lambda_functions to return a known value + monkeypatch.setattr( + base_lambda, "list_lambda_functions", lambda: {"Functions": ["f1", "f2"]} + ) + payload = {"body": json.dumps({"event": "list_lambda_functions"})} + context = DummyContext() + result = base_lambda.lambda_handler(payload, context) + assert result == {"Functions": ["f1", "f2"]} + + +def test_lambda_handler_query_dynamodb(monkeypatch): + # Patch the query_dynamodb to return a known value + monkeypatch.setattr( + base_lambda, + "query_dynamodb", + lambda table, key_expr, expr_attr_values: {"Items": [{"id": {"S": "123"}}]}, + ) + payload = {"body": json.dumps({"event": "query_dynamodb"})} + context = DummyContext() + result = base_lambda.lambda_handler(payload, context) + assert result == {"Items": [{"id": {"S": "123"}}]} + + +def test_lambda_handler_invalid_event(): + payload = {"body": json.dumps({"event": "not_a_real_event"})} + context = DummyContext() + result = base_lambda.lambda_handler(payload, context) + assert result == "Invalid or no event received" diff --git a/python/src/base_lambda.py b/python/src/base_lambda.py index ef33bfd..6c64330 100644 --- a/python/src/base_lambda.py +++ b/python/src/base_lambda.py @@ -5,19 +5,24 @@ from botocore.config import Config -# Boto3 Retry Formula: seconds_to_sleep_i = min(b * r^i, MAX_BACKOFF=20 seconds) -# https://docs.aws.amazon.com/sdkref/latest/guide/feature-retry-behavior.html aws_client_config = Config( connect_timeout=10, read_timeout=10, retries={"max_attempts": 4, "mode": "standard"} ) +# Lambda def delete_lambda_function(function_name: str): client = boto3.client("lambda", config=aws_client_config) response = client.delete_function(FunctionName=function_name) return response +def list_lambda_functions(): + client = boto3.client("lambda", config=aws_client_config) + return client.list_functions() + + +# SQS def redrive_sqs_dlq(source_queue_url: str, dlq_url: str, max_messages: int = 10): sqs = boto3.client("sqs", config=aws_client_config) messages = sqs.receive_message( @@ -30,6 +35,12 @@ def redrive_sqs_dlq(source_queue_url: str, dlq_url: str, max_messages: int = 10) return {"redriven": len(messages)} +def send_sqs_message(queue_url: str, message_body: str): + sqs = boto3.client("sqs", config=aws_client_config) + return sqs.send_message(QueueUrl=queue_url, MessageBody=message_body) + + +# ECR def get_ecr_login_and_repo_uri(repository_name: str): ecr = boto3.client("ecr", config=aws_client_config) auth = ecr.get_authorization_token() @@ -44,6 +55,106 @@ def get_ecr_login_and_repo_uri(repository_name: str): } +def list_ecr_repositories(): + ecr = boto3.client("ecr", config=aws_client_config) + return ecr.describe_repositories() + + +# ECS Fargate +def list_ecs_clusters(): + ecs = boto3.client("ecs", config=aws_client_config) + return ecs.list_clusters() + + +def run_fargate_task(cluster: str, task_definition: str, subnets: list): + ecs = boto3.client("ecs", config=aws_client_config) + return ecs.run_task( + cluster=cluster, + launchType="FARGATE", + taskDefinition=task_definition, + networkConfiguration={ + "awsvpcConfiguration": {"subnets": subnets, "assignPublicIp": "ENABLED"} + }, + ) + + +# DynamoDB +def put_dynamodb_item(table_name: str, item: dict): + dynamodb = boto3.client("dynamodb", config=aws_client_config) + return dynamodb.put_item(TableName=table_name, Item=item) + + +def query_dynamodb(table_name: str, key_expr: str, expr_attr_values: dict): + dynamodb = boto3.client("dynamodb", config=aws_client_config) + return dynamodb.query( + TableName=table_name, + KeyConditionExpression=key_expr, + ExpressionAttributeValues=expr_attr_values, + ) + + +# CloudWatch +def put_cloudwatch_metric(namespace: str, metric_name: str, value: float): + cloudwatch = boto3.client("cloudwatch", config=aws_client_config) + return cloudwatch.put_metric_data( + Namespace=namespace, MetricData=[{"MetricName": metric_name, "Value": value}] + ) + + +def get_cloudwatch_metric_statistics( + namespace: str, + metric_name: str, + dimensions: list, + start_time, + end_time, + period: int, + statistics: list, +): + cloudwatch = boto3.client("cloudwatch", config=aws_client_config) + return cloudwatch.get_metric_statistics( + Namespace=namespace, + MetricName=metric_name, + Dimensions=dimensions, + StartTime=start_time, + EndTime=end_time, + Period=period, + Statistics=statistics, + ) + + +# SNS +def publish_sns_message(topic_arn: str, message: str): + sns = boto3.client("sns", config=aws_client_config) + return sns.publish(TopicArn=topic_arn, Message=message) + + +def list_sns_topics(): + sns = boto3.client("sns", config=aws_client_config) + return sns.list_topics() + + +# S3 +def upload_file_to_s3(local_file: str, bucket: str, key: str): + s3 = boto3.client("s3", config=aws_client_config) + return s3.upload_file(local_file, bucket, key) + + +def list_s3_objects(bucket: str): + s3 = boto3.client("s3", config=aws_client_config) + return s3.list_objects_v2(Bucket=bucket) + + +# Glue +def start_glue_job(job_name: str): + glue = boto3.client("glue", config=aws_client_config) + return glue.start_job_run(JobName=job_name) + + +def get_glue_job_run(job_name: str, run_id: str): + glue = boto3.client("glue", config=aws_client_config) + return glue.get_job_run(JobName=job_name, RunId=run_id) + + def lambda_handler(payload: JSONType, context: LambdaContext): event = json.loads(payload["body"]) @@ -53,5 +164,45 @@ def lambda_handler(payload: JSONType, context: LambdaContext): redrive_sqs_dlq("source_queue_url_here", "dlq_url_here") elif event == "get_ecr_login_and_repo_uri": get_ecr_login_and_repo_uri("repository_name_here") + elif event == "list_lambda_functions": + return list_lambda_functions() + elif event == "list_ecr_repositories": + return list_ecr_repositories() + elif event == "list_ecs_clusters": + return list_ecs_clusters() + elif event == "run_fargate_task": + run_fargate_task( + "cluster_name_here", "task_definition_here", ["subnet_id_here"] + ) + elif event == "put_dynamodb_item": + put_dynamodb_item("table_name_here", {"key": {"S": "value"}}) + elif event == "query_dynamodb": + return query_dynamodb( + "table_name_here", "key_expr_here", {"key": {"S": "value"}} + ) + elif event == "put_cloudwatch_metric": + put_cloudwatch_metric("namespace_here", "metric_name_here", 1.0) + elif event == "get_cloudwatch_metric_statistics": + return get_cloudwatch_metric_statistics( + "namespace_here", + "metric_name_here", + [], + "start_time_here", + "end_time_here", + 60, + ["Average"], + ) + elif event == "publish_sns_message": + publish_sns_message("topic_arn_here", "message_here") + elif event == "list_sns_topics": + return list_sns_topics() + elif event == "upload_file_to_s3": + upload_file_to_s3("local_file_path_here", "bucket_name_here", "key_here") + elif event == "list_s3_objects": + return list_s3_objects("bucket_name_here") + elif event == "start_glue_job": + start_glue_job("job_name_here") + elif event == "get_glue_job_run": + return get_glue_job_run("job_name_here", "run_id_here") else: return "Invalid or no event received" diff --git a/python/src/requirements.txt b/python/src/requirements.txt index abc7d7b..1db657b 100644 --- a/python/src/requirements.txt +++ b/python/src/requirements.txt @@ -1,2 +1 @@ -boto3 -json \ No newline at end of file +boto3 \ No newline at end of file diff --git a/python/test/__pycache__/test_base_lambda.cpython-311-pytest-8.1.1.pyc b/python/test/__pycache__/test_base_lambda.cpython-311-pytest-8.1.1.pyc index dc6212b..daa0a2c 100644 Binary files a/python/test/__pycache__/test_base_lambda.cpython-311-pytest-8.1.1.pyc and b/python/test/__pycache__/test_base_lambda.cpython-311-pytest-8.1.1.pyc differ diff --git a/python/test/requirements.txt b/python/test/requirements.txt index 5173710..18ed95d 100644 --- a/python/test/requirements.txt +++ b/python/test/requirements.txt @@ -4,7 +4,6 @@ aws-xray-sdk boto3 boto3-stubs dataclasses_json -json mdformat mdformat-gfm mdformat-black diff --git a/python/test/test_base_lambda.py b/python/test/test_base_lambda.py index f7bdf6b..77afe51 100644 --- a/python/test/test_base_lambda.py +++ b/python/test/test_base_lambda.py @@ -4,6 +4,7 @@ import src.base_lambda as base_lambda +# Lambda @mock.patch("src.base_lambda.boto3.client") def test_delete_lambda_function_calls_boto3(mock_boto_client): mock_lambda = mock.Mock() @@ -20,6 +21,20 @@ def test_delete_lambda_function_calls_boto3(mock_boto_client): assert resp == {"ResponseMetadata": {"HTTPStatusCode": 204}} +@mock.patch("src.base_lambda.boto3.client") +def test_list_lambda_functions(mock_boto_client): + mock_lambda = mock.Mock() + mock_boto_client.return_value = mock_lambda + mock_lambda.list_functions.return_value = {"Functions": []} + resp = base_lambda.list_lambda_functions() + mock_boto_client.assert_called_once_with( + "lambda", config=base_lambda.aws_client_config + ) + mock_lambda.list_functions.assert_called_once_with() + assert resp == {"Functions": []} + + +# SQS @mock.patch("src.base_lambda.boto3.client") def test_redrive_sqs_dlq_moves_messages(mock_boto_client): mock_sqs = mock.Mock() @@ -59,6 +74,22 @@ def test_redrive_sqs_dlq_no_messages(mock_boto_client): mock_sqs.delete_message.assert_not_called() +@mock.patch("src.base_lambda.boto3.client") +def test_send_sqs_message(mock_boto_client): + mock_sqs = mock.Mock() + mock_boto_client.return_value = mock_sqs + mock_sqs.send_message.return_value = {"MessageId": "abc"} + resp = base_lambda.send_sqs_message("queue_url", "msg") + mock_boto_client.assert_called_once_with( + "sqs", config=base_lambda.aws_client_config + ) + mock_sqs.send_message.assert_called_once_with( + QueueUrl="queue_url", MessageBody="msg" + ) + assert resp == {"MessageId": "abc"} + + +# ECR @mock.patch("src.base_lambda.boto3.client") def test_get_ecr_login_and_repo_uri(mock_boto_client): mock_ecr = mock.Mock() @@ -82,40 +113,76 @@ def test_get_ecr_login_and_repo_uri(mock_boto_client): } +@mock.patch("src.base_lambda.boto3.client") +def test_list_ecr_repositories(mock_boto_client): + mock_ecr = mock.Mock() + mock_boto_client.return_value = mock_ecr + mock_ecr.describe_repositories.return_value = {"repositories": []} + resp = base_lambda.list_ecr_repositories() + mock_boto_client.assert_called_once_with( + "ecr", config=base_lambda.aws_client_config + ) + mock_ecr.describe_repositories.assert_called_once_with() + assert resp == {"repositories": []} + + +# Lambda handler dispatch @mock.patch("src.base_lambda.delete_lambda_function") @mock.patch("src.base_lambda.json") def test_lambda_handler_delete_lambda_function(mock_json, mock_delete): - payload = {"body": json.dumps("delete_lambda_function")} - mock_json.loads.return_value = "delete_lambda_function" + payload = { + "body": json.dumps( + {"event": "delete_lambda_function", "function_name": "my-func"} + ) + } + mock_json.loads.return_value = { + "event": "delete_lambda_function", + "function_name": "my-func", + } context = mock.Mock() base_lambda.lambda_handler(payload, context) - mock_delete.assert_called_once_with("function_name_here") + mock_delete.assert_called_once_with("my-func") @mock.patch("src.base_lambda.redrive_sqs_dlq") @mock.patch("src.base_lambda.json") def test_lambda_handler_redrive_sqs_dlq(mock_json, mock_redrive): - payload = {"body": json.dumps("redrive_sqs_dlq")} - mock_json.loads.return_value = "redrive_sqs_dlq" + payload = { + "body": json.dumps( + {"event": "redrive_sqs_dlq", "source_queue_url": "src", "dlq_url": "dlq"} + ) + } + mock_json.loads.return_value = { + "event": "redrive_sqs_dlq", + "source_queue_url": "src", + "dlq_url": "dlq", + } context = mock.Mock() base_lambda.lambda_handler(payload, context) - mock_redrive.assert_called_once_with("source_queue_url_here", "dlq_url_here") + mock_redrive.assert_called_once_with("src", "dlq_url_here") @mock.patch("src.base_lambda.get_ecr_login_and_repo_uri") @mock.patch("src.base_lambda.json") def test_lambda_handler_get_ecr_login_and_repo_uri(mock_json, mock_get_ecr): - payload = {"body": json.dumps("get_ecr_login_and_repo_uri")} - mock_json.loads.return_value = "get_ecr_login_and_repo_uri" + payload = { + "body": json.dumps( + {"event": "get_ecr_login_and_repo_uri", "repository_name": "repo"} + ) + } + mock_json.loads.return_value = { + "event": "get_ecr_login_and_repo_uri", + "repository_name": "repo", + } context = mock.Mock() base_lambda.lambda_handler(payload, context) - mock_get_ecr.assert_called_once_with("repository_name_here") + mock_get_ecr.assert_called_once_with("repo") @mock.patch("src.base_lambda.json") def test_lambda_handler_invalid_event(mock_json): - payload = {"body": json.dumps("unknown_event")} - mock_json.loads.return_value = "unknown_event" + payload = {"body": json.dumps({"event": "unknown_event"})} + mock_json.loads.return_value = {"event": "unknown_event"} context = mock.Mock() result = base_lambda.lambda_handler(payload, context) assert result == "Invalid or no event received" diff --git a/scripts/build.sh b/scripts/build.sh new file mode 100644 index 0000000..cd1a06b --- /dev/null +++ b/scripts/build.sh @@ -0,0 +1,13 @@ +#!/bin/bash +set -e + +# Clean previous build +rm -rf build +mkdir -p build + +# Package the Lambda function code +cd python/src +zip -r ../../build/example_lambda.zip . -x "__pycache__/*" "*.pyc" +cd ../../ + +echo "Build complete: build/example_lambda.zip" \ No newline at end of file diff --git a/scripts/placeholder.txt b/scripts/placeholder.txt deleted file mode 100644 index e69de29..0000000