diff --git a/.doc_gen/metadata/cloudwatch-logs_metadata.yaml b/.doc_gen/metadata/cloudwatch-logs_metadata.yaml
index beac1ce1a8a..6aaa4a733e1 100644
--- a/.doc_gen/metadata/cloudwatch-logs_metadata.yaml
+++ b/.doc_gen/metadata/cloudwatch-logs_metadata.yaml
@@ -288,6 +288,14 @@ cloudwatch-logs_PutSubscriptionFilter:
cloudwatch-logs: {PutSubscriptionFilter}
cloudwatch-logs_GetQueryResults:
languages:
+ .NET:
+ versions:
+ - sdk_version: 4
+ github: dotnetv4/CloudWatchLogs/LargeQuery
+ excerpts:
+ - description:
+ snippet_tags:
+ - CloudWatchLogs.dotnetv4.GetQueryResults
JavaScript:
versions:
- sdk_version: 3
@@ -308,6 +316,14 @@ cloudwatch-logs_GetQueryResults:
cloudwatch-logs: {GetQueryResults}
cloudwatch-logs_StartQuery:
languages:
+ .NET:
+ versions:
+ - sdk_version: 4
+ github: dotnetv4/CloudWatchLogs/LargeQuery
+ excerpts:
+ - description:
+ snippet_tags:
+ - CloudWatchLogs.dotnetv4.StartQuery
JavaScript:
versions:
- sdk_version: 3
@@ -332,6 +348,14 @@ cloudwatch-logs_Scenario_BigQuery:
synopsis: use &CWL; to query more than 10,000 records.
category: Scenarios
languages:
+ .NET:
+ versions:
+ - sdk_version: 4
+ github: dotnetv4/CloudWatchLogs/LargeQuery
+ excerpts:
+ - description: This is the main workflow that demonstrates the large query scenario.
+ snippet_tags:
+ - CloudWatchLogs.dotnetv4.LargeQueryWorkflow
JavaScript:
versions:
- sdk_version: 3
diff --git a/.doc_gen/validation.yaml b/.doc_gen/validation.yaml
index 3e77aa5e973..c165ea0b62c 100644
--- a/.doc_gen/validation.yaml
+++ b/.doc_gen/validation.yaml
@@ -215,6 +215,7 @@ allow_list:
- "EnablePropagateAdditionalUserContextData"
- "StopQueryWorkloadInsightsTopContributors"
- "com/location/latest/APIReference/Welcome"
+ - "LargeQuery/Actions/CloudWatchLogsWrapper"
sample_files:
- "README.md"
- "chat_sfn_state_machine.json"
diff --git a/.gitignore b/.gitignore
index 0b25f6593e2..657ad41f507 100644
--- a/.gitignore
+++ b/.gitignore
@@ -38,3 +38,6 @@ kotlin/services/**/gradlew
kotlin/services/**/gradlew.bat
kotlin/services/**/.kotlin/
/.local/
+.kiro/settings/
+.kiro/steering/
+
diff --git a/.tools/readmes/config.py b/.tools/readmes/config.py
index 35bd076a6ac..25e4b7cff7d 100644
--- a/.tools/readmes/config.py
+++ b/.tools/readmes/config.py
@@ -129,6 +129,7 @@
"auto-scaling": "dotnetv4/AutoScaling",
"cloudformation": "dotnetv4/CloudFormation",
"cloudwatch": "dotnetv4/CloudWatch",
+ "cloudwatch-logs": "dotnetv4/CloudWatchLogs",
"cognito-identity-provider": "dotnetv4/Cognito",
"ec2": "dotnetv4/EC2",
"ecs": "dotnetv4/ECS",
diff --git a/dotnetv3/CloudWatchLogs/README.md b/dotnetv3/CloudWatchLogs/README.md
index 33a4867e9dd..21e3d28fc10 100644
--- a/dotnetv3/CloudWatchLogs/README.md
+++ b/dotnetv3/CloudWatchLogs/README.md
@@ -99,4 +99,4 @@ in the `dotnetv3` folder.
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
-SPDX-License-Identifier: Apache-2.0
\ No newline at end of file
+SPDX-License-Identifier: Apache-2.0
diff --git a/dotnetv4/CloudWatchLogs/LargeQuery/Actions/CloudWatchLogsActions.csproj b/dotnetv4/CloudWatchLogs/LargeQuery/Actions/CloudWatchLogsActions.csproj
new file mode 100644
index 00000000000..4934ade0dec
--- /dev/null
+++ b/dotnetv4/CloudWatchLogs/LargeQuery/Actions/CloudWatchLogsActions.csproj
@@ -0,0 +1,18 @@
+
+
+
+ Library
+ net8.0
+ enable
+ enable
+
+
+
+
+
+
+
+
+
+
+
diff --git a/dotnetv4/CloudWatchLogs/LargeQuery/Actions/CloudWatchLogsWrapper.cs b/dotnetv4/CloudWatchLogs/LargeQuery/Actions/CloudWatchLogsWrapper.cs
new file mode 100644
index 00000000000..a1fd885cdc7
--- /dev/null
+++ b/dotnetv4/CloudWatchLogs/LargeQuery/Actions/CloudWatchLogsWrapper.cs
@@ -0,0 +1,148 @@
+// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+// SPDX-License-Identifier: Apache-2.0
+
+// snippet-start:[CloudWatchLogs.dotnetv4.CloudWatchLogsWrapper]
+using Amazon.CloudWatchLogs;
+using Amazon.CloudWatchLogs.Model;
+using Microsoft.Extensions.Logging;
+
+namespace CloudWatchLogsActions;
+
+///
+/// Wrapper class for Amazon CloudWatch Logs operations.
+///
+public class CloudWatchLogsWrapper
+{
+ private readonly IAmazonCloudWatchLogs _amazonCloudWatchLogs;
+ private readonly ILogger _logger;
+
+ ///
+ /// Constructor for the CloudWatchLogsWrapper class.
+ ///
+ /// The injected CloudWatch Logs client.
+ /// The injected logger.
+ public CloudWatchLogsWrapper(IAmazonCloudWatchLogs amazonCloudWatchLogs, ILogger logger)
+ {
+ _amazonCloudWatchLogs = amazonCloudWatchLogs;
+ _logger = logger;
+ }
+
+ // snippet-start:[CloudWatchLogs.dotnetv4.StartQuery]
+ ///
+ /// Starts a CloudWatch Logs Insights query.
+ ///
+ /// The name of the log group to query.
+ /// The CloudWatch Logs Insights query string.
+ /// The start time for the query (seconds since epoch).
+ /// The end time for the query (seconds since epoch).
+ /// The maximum number of results to return.
+ /// The query ID if successful, null otherwise.
+ public async Task StartQueryAsync(
+ string logGroupName,
+ string queryString,
+ long startTime,
+ long endTime,
+ int limit = 10000)
+ {
+ try
+ {
+ var request = new StartQueryRequest
+ {
+ LogGroupName = logGroupName,
+ QueryString = queryString,
+ StartTime = startTime,
+ EndTime = endTime,
+ Limit = limit
+ };
+
+ var response = await _amazonCloudWatchLogs.StartQueryAsync(request);
+ return response.QueryId;
+ }
+ catch (InvalidParameterException ex)
+ {
+ _logger.LogError($"Invalid parameter for query: {ex.Message}");
+ return null;
+ }
+ catch (ResourceNotFoundException ex)
+ {
+ _logger.LogError($"Log group not found: {ex.Message}");
+ return null;
+ }
+ catch (Exception ex)
+ {
+ _logger.LogError($"An error occurred while starting query: {ex.Message}");
+ return null;
+ }
+ }
+ // snippet-end:[CloudWatchLogs.dotnetv4.StartQuery]
+
+ // snippet-start:[CloudWatchLogs.dotnetv4.GetQueryResults]
+ ///
+ /// Gets the results of a CloudWatch Logs Insights query.
+ ///
+ /// The ID of the query.
+ /// The query results response.
+ public async Task GetQueryResultsAsync(string queryId)
+ {
+ try
+ {
+ var request = new GetQueryResultsRequest
+ {
+ QueryId = queryId
+ };
+
+ var response = await _amazonCloudWatchLogs.GetQueryResultsAsync(request);
+ return response;
+ }
+ catch (ResourceNotFoundException ex)
+ {
+ _logger.LogError($"Query not found: {ex.Message}");
+ return null;
+ }
+ catch (Exception ex)
+ {
+ _logger.LogError($"An error occurred while getting query results: {ex.Message}");
+ return null;
+ }
+ }
+ // snippet-end:[CloudWatchLogs.dotnetv4.GetQueryResults]
+
+ // snippet-start:[CloudWatchLogs.dotnetv4.PutLogEvents]
+ ///
+ /// Puts log events to a CloudWatch Logs log stream.
+ ///
+ /// The name of the log group.
+ /// The name of the log stream.
+ /// The list of log events to put.
+ /// True if successful, false otherwise.
+ public async Task PutLogEventsAsync(
+ string logGroupName,
+ string logStreamName,
+ List logEvents)
+ {
+ try
+ {
+ var request = new PutLogEventsRequest
+ {
+ LogGroupName = logGroupName,
+ LogStreamName = logStreamName,
+ LogEvents = logEvents
+ };
+
+ await _amazonCloudWatchLogs.PutLogEventsAsync(request);
+ return true;
+ }
+ catch (ResourceNotFoundException ex)
+ {
+ _logger.LogError($"Log group or stream not found: {ex.Message}");
+ return false;
+ }
+ catch (Exception ex)
+ {
+ _logger.LogError($"An error occurred while putting log events: {ex.Message}");
+ return false;
+ }
+ }
+ // snippet-end:[CloudWatchLogs.dotnetv4.PutLogEvents]
+}
+// snippet-end:[CloudWatchLogs.dotnetv4.CloudWatchLogsWrapper]
\ No newline at end of file
diff --git a/dotnetv4/CloudWatchLogs/LargeQuery/CloudWatchLogsLargeQuery.sln b/dotnetv4/CloudWatchLogs/LargeQuery/CloudWatchLogsLargeQuery.sln
new file mode 100644
index 00000000000..eb27a092342
--- /dev/null
+++ b/dotnetv4/CloudWatchLogs/LargeQuery/CloudWatchLogsLargeQuery.sln
@@ -0,0 +1,31 @@
+
+Microsoft Visual Studio Solution File, Format Version 12.00
+# Visual Studio Version 17
+VisualStudioVersion = 17.0.31903.59
+MinimumVisualStudioVersion = 10.0.40219.1
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "CloudWatchLogsActions", "Actions\CloudWatchLogsActions.csproj", "{A1B2C3D4-E5F6-4A5B-8C9D-0E1F2A3B4C5D}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "CloudWatchLogsScenario", "Scenarios\CloudWatchLogsScenario.csproj", "{B2C3D4E5-F6A7-5B6C-9D0E-1F2A3B4C5D6E}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "CloudWatchLogsTests", "Tests\CloudWatchLogsTests.csproj", "{C3D4E5F6-A7B8-6C7D-0E1F-2A3B4C5D6E7F}"
+EndProject
+Global
+ GlobalSection(SolutionConfigurationPlatforms) = preSolution
+ Debug|Any CPU = Debug|Any CPU
+ Release|Any CPU = Release|Any CPU
+ EndGlobalSection
+ GlobalSection(ProjectConfigurationPlatforms) = postSolution
+ {A1B2C3D4-E5F6-4A5B-8C9D-0E1F2A3B4C5D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {A1B2C3D4-E5F6-4A5B-8C9D-0E1F2A3B4C5D}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {A1B2C3D4-E5F6-4A5B-8C9D-0E1F2A3B4C5D}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {A1B2C3D4-E5F6-4A5B-8C9D-0E1F2A3B4C5D}.Release|Any CPU.Build.0 = Release|Any CPU
+ {B2C3D4E5-F6A7-5B6C-9D0E-1F2A3B4C5D6E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {B2C3D4E5-F6A7-5B6C-9D0E-1F2A3B4C5D6E}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {B2C3D4E5-F6A7-5B6C-9D0E-1F2A3B4C5D6E}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {B2C3D4E5-F6A7-5B6C-9D0E-1F2A3B4C5D6E}.Release|Any CPU.Build.0 = Release|Any CPU
+ {C3D4E5F6-A7B8-6C7D-0E1F-2A3B4C5D6E7F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {C3D4E5F6-A7B8-6C7D-0E1F-2A3B4C5D6E7F}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {C3D4E5F6-A7B8-6C7D-0E1F-2A3B4C5D6E7F}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {C3D4E5F6-A7B8-6C7D-0E1F-2A3B4C5D6E7F}.Release|Any CPU.Build.0 = Release|Any CPU
+ EndGlobalSection
+EndGlobal
diff --git a/dotnetv4/CloudWatchLogs/LargeQuery/README.md b/dotnetv4/CloudWatchLogs/LargeQuery/README.md
new file mode 100644
index 00000000000..bfdd6fdbf3c
--- /dev/null
+++ b/dotnetv4/CloudWatchLogs/LargeQuery/README.md
@@ -0,0 +1,147 @@
+# CloudWatch Logs Large Query Example
+
+This folder contains a .NET feature scenario that demonstrates how to perform large-scale queries on Amazon CloudWatch Logs using recursive binary search to retrieve more than the 10,000 result limit.
+
+## Overview
+
+CloudWatch Logs Insights queries have a maximum result limit of 10,000 records per query. This example demonstrates how to overcome this limitation by using a recursive binary search algorithm that splits the time range into smaller segments when the limit is reached.
+
+The scenario performs the following steps:
+
+1. **Setup**: Deploys a CloudFormation stack with a log group and log stream
+2. **Data Generation**: Creates and uploads 50,000 sample log entries
+3. **Query Execution**: Performs recursive queries to retrieve all logs using binary search
+4. **Cleanup**: Removes all created resources
+
+## Project Structure
+
+```
+LargeQuery/
+├── Actions/
+│ ├── CloudWatchLogsWrapper.cs # Wrapper class for CloudWatch Logs operations
+│ └── CloudWatchLogsActions.csproj # Actions project file
+├── Scenarios/
+│ ├── LargeQueryWorkflow.cs # Main workflow implementation
+│ ├── README.md # Detailed scenario documentation
+│ └── CloudWatchLogsScenario.csproj # Scenario project file
+├── Tests/
+│ ├── LargeQueryWorkflowTests.cs # Integration tests
+│ ├── Usings.cs # Global usings
+│ └── CloudWatchLogsTests.csproj # Test project file
+└── CloudWatchLogsLargeQuery.sln # Solution file
+```
+
+## What This Example Demonstrates
+
+- Deploying AWS resources using CloudFormation
+- Generating and ingesting large volumes of log data using PutLogEvents
+- Performing CloudWatch Logs Insights queries with StartQuery and GetQueryResults
+- Using recursive binary search to retrieve more than 10,000 results
+- Handling timestamp precision for accurate query splitting
+- Cleaning up resources after completion
+
+## Running the Example
+
+### Interactive Mode
+
+1. Navigate to the solution directory:
+ ```
+ cd dotnetv4/CloudWatchLogs/LargeQuery
+ ```
+
+2. Build the solution:
+ ```
+ dotnet build
+ ```
+
+3. Run the scenario:
+ ```
+ dotnet run --project Scenarios/CloudWatchLogsScenario.csproj
+ ```
+
+4. Follow the prompts to:
+ - Deploy the CloudFormation stack
+ - Generate sample logs
+ - Execute the recursive query
+ - View sample results
+ - Clean up resources
+
+### Non-Interactive Mode (Testing)
+
+Run the integration tests to execute the scenario without user prompts:
+
+```
+dotnet test
+```
+
+The test verifies that the scenario completes without errors and successfully retrieves all 50,000 log entries.
+
+## Prerequisites
+
+- .NET 8.0 or later
+- AWS credentials configured
+- Permissions for:
+ - CloudWatch Logs (CreateLogGroup, CreateLogStream, PutLogEvents, StartQuery, GetQueryResults, DeleteLogGroup)
+ - CloudFormation (CreateStack, DescribeStacks, DeleteStack)
+
+## How It Works
+
+### Recursive Query Algorithm
+
+The key to retrieving more than 10,000 results is the recursive binary search algorithm:
+
+1. Execute a query with the full date range
+2. If results < 10,000, return them (we have all logs in this range)
+3. If results = 10,000, there may be more logs:
+ - Get the timestamp of the last result
+ - Calculate the midpoint between the last timestamp and end date
+ - Recursively query the first half (last timestamp to midpoint)
+ - Recursively query the second half (midpoint to end date)
+ - Combine all results
+
+This approach ensures all logs are retrieved by progressively narrowing the time ranges until each segment contains fewer than 10,000 results.
+
+### Timestamp Precision
+
+The algorithm uses millisecond precision for timestamps to ensure accurate splitting and prevent duplicate or missing log entries. Each query adjusts the start time by 1 millisecond to avoid overlapping results.
+
+## Expected Output
+
+When running the scenario, you'll see output similar to:
+
+```
+--------------------------------------------------------------------------------
+Welcome to the CloudWatch Logs Large Query Scenario.
+--------------------------------------------------------------------------------
+Preparing the application...
+Deploying CloudFormation stack: CloudWatchLargeQueryStack
+CloudFormation stack creation started: CloudWatchLargeQueryStack
+Waiting for CloudFormation stack creation to complete...
+CloudFormation stack creation complete.
+Stack output RoleARN: arn:aws:iam::123456789012:role/...
+Generating 50,000 sample log entries...
+Batch 1/5: Created 10,000 log entries
+Batch 2/5: Created 10,000 log entries
+...
+Waiting 5 minutes for logs to be fully ingested...
+--------------------------------------------------------------------------------
+Starting recursive query to retrieve all logs...
+Query date range: 2024-01-15T10:00:00.000Z to 2024-01-15T10:05:00.000Z. Found 10000 logs.
+Query date range: 2024-01-15T10:02:30.000Z to 2024-01-15T10:03:45.000Z. Found 10000 logs.
+...
+Queries finished in 8.234 seconds.
+Total logs found: 50000
+--------------------------------------------------------------------------------
+```
+
+## Related Resources
+
+- [CloudWatch Logs Documentation](https://docs.aws.amazon.com/AmazonCloudWatch/latest/logs/)
+- [CloudWatch Logs Insights Query Syntax](https://docs.aws.amazon.com/AmazonCloudWatch/latest/logs/CWL_QuerySyntax.html)
+- [AWS SDK for .NET](https://aws.amazon.com/sdk-for-net/)
+- [CloudWatch Logs API Reference](https://docs.aws.amazon.com/AmazonCloudWatchLogs/latest/APIReference/)
+
+---
+
+Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+SPDX-License-Identifier: Apache-2.0
diff --git a/dotnetv4/CloudWatchLogs/LargeQuery/Scenarios/CloudWatchLogsScenario.csproj b/dotnetv4/CloudWatchLogs/LargeQuery/Scenarios/CloudWatchLogsScenario.csproj
new file mode 100644
index 00000000000..fcf70daf1c3
--- /dev/null
+++ b/dotnetv4/CloudWatchLogs/LargeQuery/Scenarios/CloudWatchLogsScenario.csproj
@@ -0,0 +1,22 @@
+
+
+
+ Exe
+ net8.0
+ enable
+ enable
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/dotnetv4/CloudWatchLogs/LargeQuery/Scenarios/LargeQueryWorkflow.cs b/dotnetv4/CloudWatchLogs/LargeQuery/Scenarios/LargeQueryWorkflow.cs
new file mode 100644
index 00000000000..c602ab9d41b
--- /dev/null
+++ b/dotnetv4/CloudWatchLogs/LargeQuery/Scenarios/LargeQueryWorkflow.cs
@@ -0,0 +1,838 @@
+// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+// SPDX-License-Identifier: Apache-2.0
+
+// snippet-start:[CloudWatchLogs.dotnetv4.LargeQueryWorkflow]
+using System.Diagnostics;
+using System.Text.RegularExpressions;
+using Amazon.CloudFormation;
+using Amazon.CloudFormation.Model;
+using Amazon.CloudWatchLogs;
+using Amazon.CloudWatchLogs.Model;
+using CloudWatchLogsActions;
+using Microsoft.Extensions.DependencyInjection;
+using Microsoft.Extensions.Hosting;
+using Microsoft.Extensions.Logging;
+
+namespace CloudWatchLogsScenario;
+
+public class LargeQueryWorkflow
+{
+ /*
+ Before running this .NET code example, set up your development environment, including your credentials.
+ This .NET code example performs the following tasks for the CloudWatch Logs Large Query workflow:
+
+ 1. Prepare the Application:
+ - Prompt the user to deploy CloudFormation stack and generate sample logs.
+ - Deploy the CloudFormation template for resource creation.
+ - Generate 50,000 sample log entries using CloudWatch Logs API.
+ - Wait 5 minutes for logs to be fully ingested.
+
+ 2. Execute Large Query:
+ - Perform recursive queries to retrieve all logs using binary search.
+ - Display progress for each query executed.
+ - Show total execution time and logs found.
+
+ 3. Clean up:
+ - Prompt the user to delete the CloudFormation stack and all resources.
+ - Destroy the CloudFormation stack and wait until removed.
+ */
+
+ public static ILogger _logger = null!;
+ public static CloudWatchLogsWrapper _wrapper = null!;
+ public static IAmazonCloudFormation _amazonCloudFormation = null!;
+
+ private static string _logGroupName = "/workflows/cloudwatch-logs/large-query";
+ private static string _logStreamName = "stream1";
+ private static long _queryStartDate;
+ private static long _queryEndDate;
+
+ public static bool _interactive = true;
+ public static string _stackName = "CloudWatchLargeQueryStack";
+ private static string _stackResourcePath = "../../../../../../../scenarios/features/cloudwatch_logs_large_query/resources/stack.yaml";
+
+ public static async Task Main(string[] args)
+ {
+ using var host = Host.CreateDefaultBuilder(args)
+ .ConfigureLogging(logging =>
+ logging.AddFilter("System", LogLevel.Debug)
+ .AddFilter("Microsoft", LogLevel.Information))
+ .ConfigureServices((_, services) =>
+ services.AddAWSService()
+ .AddAWSService()
+ .AddTransient()
+ )
+ .Build();
+
+ if (_interactive)
+ {
+ _logger = LoggerFactory.Create(builder => { builder.AddConsole(); })
+ .CreateLogger();
+
+ _wrapper = host.Services.GetRequiredService();
+ _amazonCloudFormation = host.Services.GetRequiredService();
+ }
+
+ Console.WriteLine(new string('-', 80));
+ Console.WriteLine("Welcome to the CloudWatch Logs Large Query Scenario.");
+ Console.WriteLine(new string('-', 80));
+ Console.WriteLine("This scenario demonstrates how to perform large-scale queries on");
+ Console.WriteLine("CloudWatch Logs using recursive binary search to retrieve more than");
+ Console.WriteLine("the 10,000 result limit.");
+ Console.WriteLine();
+
+ try
+ {
+ Console.WriteLine(new string('-', 80));
+ var prepareSuccess = await PrepareApplication();
+ Console.WriteLine(new string('-', 80));
+
+ if (prepareSuccess)
+ {
+ Console.WriteLine(new string('-', 80));
+ await ExecuteLargeQuery();
+ Console.WriteLine(new string('-', 80));
+ }
+
+ Console.WriteLine(new string('-', 80));
+ await Cleanup();
+ Console.WriteLine(new string('-', 80));
+ }
+ catch (Exception ex)
+ {
+ _logger.LogError(ex, "There was a problem with the scenario, initiating cleanup...");
+ _interactive = false;
+ await Cleanup();
+ }
+
+ Console.WriteLine("CloudWatch Logs Large Query scenario completed.");
+ }
+
+ ///
+ /// Runs the scenario workflow. Used for testing.
+ ///
+ public static async Task RunScenario()
+ {
+ Console.WriteLine(new string('-', 80));
+ Console.WriteLine("Welcome to the CloudWatch Logs Large Query Scenario.");
+ Console.WriteLine(new string('-', 80));
+ Console.WriteLine("This scenario demonstrates how to perform large-scale queries on");
+ Console.WriteLine("CloudWatch Logs using recursive binary search to retrieve more than");
+ Console.WriteLine("the 10,000 result limit.");
+ Console.WriteLine();
+
+ try
+ {
+ Console.WriteLine(new string('-', 80));
+ var prepareSuccess = await PrepareApplication();
+ Console.WriteLine(new string('-', 80));
+
+ if (prepareSuccess)
+ {
+ Console.WriteLine(new string('-', 80));
+ await ExecuteLargeQuery();
+ Console.WriteLine(new string('-', 80));
+ }
+
+ Console.WriteLine(new string('-', 80));
+ await Cleanup();
+ Console.WriteLine(new string('-', 80));
+ }
+ catch (Exception ex)
+ {
+ _logger.LogError(ex, "There was a problem with the scenario, initiating cleanup...");
+ _interactive = false;
+ await Cleanup();
+ }
+
+ Console.WriteLine("CloudWatch Logs Large Query scenario completed.");
+ }
+
+ ///
+ /// Prepares the application by creating the necessary resources.
+ ///
+ /// True if the application was prepared successfully.
+ public static async Task PrepareApplication()
+ {
+ Console.WriteLine("Preparing the application...");
+ Console.WriteLine();
+
+ try
+ {
+ var deployStack = !_interactive || GetYesNoResponse(
+ "Would you like to deploy the CloudFormation stack and generate sample logs? (y/n) ");
+
+ if (deployStack)
+ {
+ _stackName = PromptUserForStackName();
+
+ var deploySuccess = await DeployCloudFormationStack(_stackName);
+
+ if (deploySuccess)
+ {
+ Console.WriteLine();
+ Console.WriteLine("Generating 50,000 sample log entries...");
+ var generateSuccess = await GenerateSampleLogs();
+
+ if (generateSuccess)
+ {
+ Console.WriteLine();
+ Console.WriteLine("Sample logs created. Waiting 5 minutes for logs to be fully ingested...");
+ await WaitWithCountdown(300);
+
+ Console.WriteLine("Application preparation complete.");
+ return true;
+ }
+ }
+ }
+ else
+ {
+ _logGroupName = PromptUserForInput("Enter the log group name ", _logGroupName);
+ _logStreamName = PromptUserForInput("Enter the log stream name ", _logStreamName);
+
+ var startDateMs = PromptUserForLong("Enter the query start date (milliseconds since epoch): ");
+ var endDateMs = PromptUserForLong("Enter the query end date (milliseconds since epoch): ");
+
+ _queryStartDate = startDateMs / 1000;
+ _queryEndDate = endDateMs / 1000;
+
+ Console.WriteLine("Application preparation complete.");
+ return true;
+ }
+ }
+ catch (Exception ex)
+ {
+ _logger.LogError(ex, "An error occurred while preparing the application.");
+ }
+
+ Console.WriteLine("Application preparation failed.");
+ return false;
+ }
+
+ ///
+ /// Deploys the CloudFormation stack with the necessary resources.
+ ///
+ /// The name of the CloudFormation stack.
+ /// True if the stack was deployed successfully.
+ private static async Task DeployCloudFormationStack(string stackName)
+ {
+ Console.WriteLine($"\nDeploying CloudFormation stack: {stackName}");
+
+ try
+ {
+ var request = new CreateStackRequest
+ {
+ StackName = stackName,
+ TemplateBody = await File.ReadAllTextAsync(_stackResourcePath)
+ };
+
+ var response = await _amazonCloudFormation.CreateStackAsync(request);
+
+ if (response.HttpStatusCode == System.Net.HttpStatusCode.OK)
+ {
+ Console.WriteLine($"CloudFormation stack creation started: {stackName}");
+
+ bool stackCreated = await WaitForStackCompletion(response.StackId);
+
+ if (stackCreated)
+ {
+ Console.WriteLine("CloudFormation stack created successfully.");
+ return true;
+ }
+ else
+ {
+ _logger.LogError($"CloudFormation stack creation failed: {stackName}");
+ return false;
+ }
+ }
+ else
+ {
+ _logger.LogError($"Failed to create CloudFormation stack: {stackName}");
+ return false;
+ }
+ }
+ catch (AlreadyExistsException)
+ {
+ _logger.LogWarning($"CloudFormation stack '{stackName}' already exists. Please provide a unique name.");
+ var newStackName = PromptUserForStackName();
+ return await DeployCloudFormationStack(newStackName);
+ }
+ catch (Exception ex)
+ {
+ _logger.LogError(ex, $"An error occurred while deploying the CloudFormation stack: {stackName}");
+ return false;
+ }
+ }
+
+ ///
+ /// Waits for the CloudFormation stack to be in the CREATE_COMPLETE state.
+ ///
+ /// The ID of the CloudFormation stack.
+ /// True if the stack was created successfully.
+ private static async Task WaitForStackCompletion(string stackId)
+ {
+ int retryCount = 0;
+ const int maxRetries = 30;
+ const int retryDelay = 10000;
+
+ while (retryCount < maxRetries)
+ {
+ var describeStacksRequest = new DescribeStacksRequest
+ {
+ StackName = stackId
+ };
+
+ var describeStacksResponse = await _amazonCloudFormation.DescribeStacksAsync(describeStacksRequest);
+
+ if (describeStacksResponse.Stacks.Count > 0)
+ {
+ if (describeStacksResponse.Stacks[0].StackStatus == StackStatus.CREATE_COMPLETE)
+ {
+ return true;
+ }
+ if (describeStacksResponse.Stacks[0].StackStatus == StackStatus.CREATE_FAILED ||
+ describeStacksResponse.Stacks[0].StackStatus == StackStatus.ROLLBACK_COMPLETE)
+ {
+ return false;
+ }
+ }
+
+ Console.WriteLine("Waiting for CloudFormation stack creation to complete...");
+ await Task.Delay(retryDelay);
+ retryCount++;
+ }
+
+ _logger.LogError("Timed out waiting for CloudFormation stack creation to complete.");
+ return false;
+ }
+
+ ///
+ /// Generates sample logs directly using CloudWatch Logs API.
+ /// Creates 50,000 log entries spanning 5 minutes.
+ ///
+ /// True if logs were generated successfully.
+ private static async Task GenerateSampleLogs()
+ {
+ const int totalEntries = 50000;
+ const int entriesPerBatch = 10000;
+ const int fiveMinutesMs = 5 * 60 * 1000;
+
+ try
+ {
+ // Calculate timestamps
+ var startTimeMs = DateTimeOffset.UtcNow.ToUnixTimeMilliseconds();
+ var timestampIncrement = fiveMinutesMs / totalEntries;
+
+ Console.WriteLine($"Generating {totalEntries} log entries...");
+
+ var entryCount = 0;
+ var currentTimestamp = startTimeMs;
+ var numBatches = totalEntries / entriesPerBatch;
+
+ // Generate and upload logs in batches
+ for (int batchNum = 0; batchNum < numBatches; batchNum++)
+ {
+ var logEvents = new List();
+
+ for (int i = 0; i < entriesPerBatch; i++)
+ {
+ logEvents.Add(new InputLogEvent
+ {
+ Timestamp = DateTimeOffset.FromUnixTimeMilliseconds(currentTimestamp).UtcDateTime,
+ Message = $"Entry {entryCount}"
+ });
+
+ entryCount++;
+ currentTimestamp += timestampIncrement;
+ }
+
+ // Upload batch
+ var success = await _wrapper.PutLogEventsAsync(_logGroupName, _logStreamName, logEvents);
+ if (!success)
+ {
+ _logger.LogError($"Failed to upload batch {batchNum + 1}/{numBatches}");
+ return false;
+ }
+
+ Console.WriteLine($"Uploaded batch {batchNum + 1}/{numBatches}");
+ }
+
+ // Set query date range (convert milliseconds to seconds for query API)
+ _queryStartDate = startTimeMs / 1000;
+ _queryEndDate = (currentTimestamp - timestampIncrement) / 1000;
+
+ Console.WriteLine($"Query start date: {DateTimeOffset.FromUnixTimeSeconds(_queryStartDate):yyyy-MM-ddTHH:mm:ss.fffZ}");
+ Console.WriteLine($"Query end date: {DateTimeOffset.FromUnixTimeSeconds(_queryEndDate):yyyy-MM-ddTHH:mm:ss.fffZ}");
+ Console.WriteLine($"Successfully uploaded {totalEntries} log entries");
+
+ return true;
+ }
+ catch (Exception ex)
+ {
+ _logger.LogError(ex, "An error occurred while generating sample logs.");
+ return false;
+ }
+ }
+
+ ///
+ /// Executes the large query workflow.
+ ///
+ public static async Task ExecuteLargeQuery()
+ {
+ Console.WriteLine("Starting recursive query to retrieve all logs...");
+ Console.WriteLine();
+
+ var queryLimit = PromptUserForInteger("Enter the query limit (max 10000) ", 10000);
+ if (queryLimit > 10000) queryLimit = 10000;
+
+ var queryString = "fields @timestamp, @message | sort @timestamp asc";
+
+ var stopwatch = Stopwatch.StartNew();
+ var allResults = await PerformLargeQuery(_logGroupName, queryString, _queryStartDate, _queryEndDate, queryLimit);
+ stopwatch.Stop();
+
+ Console.WriteLine();
+ Console.WriteLine($"Queries finished in {stopwatch.Elapsed.TotalSeconds:F3} seconds.");
+ Console.WriteLine($"Total logs found: {allResults.Count}");
+
+ // Check for duplicates
+ Console.WriteLine();
+ Console.WriteLine("Checking for duplicate logs...");
+ var duplicates = FindDuplicateLogs(allResults);
+ if (duplicates.Count > 0)
+ {
+ Console.WriteLine($"WARNING: Found {duplicates.Count} duplicate log entries!");
+ Console.WriteLine("Duplicate entries (showing first 10):");
+ foreach (var dup in duplicates.Take(10))
+ {
+ Console.WriteLine($" [{dup.Timestamp}] {dup.Message} (appears {dup.Count} times)");
+ }
+
+ var uniqueCount = allResults.Count - duplicates.Sum(d => d.Count - 1);
+ Console.WriteLine($"Unique logs: {uniqueCount}");
+ }
+ else
+ {
+ Console.WriteLine("No duplicates found. All logs are unique.");
+ }
+ Console.WriteLine();
+
+ var viewSample = !_interactive || GetYesNoResponse("Would you like to see a sample of the logs? (y/n) ");
+ if (viewSample)
+ {
+ Console.WriteLine();
+ Console.WriteLine($"Sample logs (first 10 of {allResults.Count}):");
+ for (int i = 0; i < Math.Min(10, allResults.Count); i++)
+ {
+ var timestamp = allResults[i].Find(f => f.Field == "@timestamp")?.Value ?? "N/A";
+ var message = allResults[i].Find(f => f.Field == "@message")?.Value ?? "N/A";
+ Console.WriteLine($"[{timestamp}] {message}");
+ }
+ }
+ }
+
+ ///
+ /// Performs a large query using recursive binary search.
+ ///
+ private static async Task>> PerformLargeQuery(
+ string logGroupName,
+ string queryString,
+ long startTime,
+ long endTime,
+ int limit)
+ {
+ var queryId = await _wrapper.StartQueryAsync(logGroupName, queryString, startTime, endTime, limit);
+ if (queryId == null)
+ {
+ return new List>();
+ }
+
+ var results = await PollQueryResults(queryId);
+ if (results == null || results.Count == 0)
+ {
+ return new List>();
+ }
+
+ var startDate = DateTimeOffset.FromUnixTimeSeconds(startTime).ToString("yyyy-MM-ddTHH:mm:ss.fffZ");
+ var endDate = DateTimeOffset.FromUnixTimeSeconds(endTime).ToString("yyyy-MM-ddTHH:mm:ss.fffZ");
+ Console.WriteLine($"Query date range: {startDate} ({startTime}s) to {endDate} ({endTime}s). Found {results.Count} logs.");
+
+ if (results.Count < limit)
+ {
+ Console.WriteLine($" -> Returning {results.Count} logs (less than limit of {limit})");
+ return results;
+ }
+
+ Console.WriteLine($" -> Hit limit of {limit}. Need to split and recurse.");
+
+ // Get the timestamp of the last log (sorted to find the actual last one)
+ var lastLogTimestamp = GetLastLogTimestamp(results);
+ if (lastLogTimestamp == null)
+ {
+ Console.WriteLine($" -> No timestamp found in results. Returning {results.Count} logs.");
+ return results;
+ }
+
+ Console.WriteLine($" -> Last log timestamp: {lastLogTimestamp}");
+
+ // Parse the timestamp and add 1 millisecond to avoid querying the same log again
+ var lastLogDate = DateTimeOffset.Parse(lastLogTimestamp + " +0000");
+ Console.WriteLine($" -> Last log as DateTimeOffset: {lastLogDate:yyyy-MM-ddTHH:mm:ss.fffZ} ({lastLogDate.ToUnixTimeSeconds()}s)");
+
+ var offsetLastLogDate = lastLogDate.AddMilliseconds(1);
+ Console.WriteLine($" -> Offset timestamp (last + 1ms): {offsetLastLogDate:yyyy-MM-ddTHH:mm:ss.fffZ} ({offsetLastLogDate.ToUnixTimeSeconds()}s)");
+
+ // Convert to seconds, but round UP to the next second to avoid overlapping with logs in the same second
+ // This ensures we don't re-query logs that share the same second as the last log
+ var offsetLastLogTime = offsetLastLogDate.ToUnixTimeSeconds();
+ if (offsetLastLogDate.Millisecond > 0)
+ {
+ offsetLastLogTime++; // Move to the next full second
+ Console.WriteLine($" -> Adjusted to next full second: {offsetLastLogTime}s ({DateTimeOffset.FromUnixTimeSeconds(offsetLastLogTime):yyyy-MM-ddTHH:mm:ss.fffZ})");
+ }
+
+ Console.WriteLine($" -> Comparing: offsetLastLogTime={offsetLastLogTime}s vs endTime={endTime}s");
+ Console.WriteLine($" -> End time as date: {DateTimeOffset.FromUnixTimeSeconds(endTime):yyyy-MM-ddTHH:mm:ss.fffZ}");
+
+ // Check if there's any time range left to query
+ if (offsetLastLogTime >= endTime)
+ {
+ Console.WriteLine($" -> No time range left to query. Offset time ({offsetLastLogTime}s) >= end time ({endTime}s)");
+ return results;
+ }
+
+ // Split the remaining date range in half
+ var (range1Start, range1End, range2Start, range2End) = SplitDateRange(offsetLastLogTime, endTime);
+
+ var range1StartDate = DateTimeOffset.FromUnixTimeSeconds(range1Start).ToString("yyyy-MM-ddTHH:mm:ss.fffZ");
+ var range1EndDate = DateTimeOffset.FromUnixTimeSeconds(range1End).ToString("yyyy-MM-ddTHH:mm:ss.fffZ");
+ var range2StartDate = DateTimeOffset.FromUnixTimeSeconds(range2Start).ToString("yyyy-MM-ddTHH:mm:ss.fffZ");
+ var range2EndDate = DateTimeOffset.FromUnixTimeSeconds(range2End).ToString("yyyy-MM-ddTHH:mm:ss.fffZ");
+
+ Console.WriteLine($" -> Splitting remaining range:");
+ Console.WriteLine($" Range 1: {range1StartDate} ({range1Start}s) to {range1EndDate} ({range1End}s)");
+ Console.WriteLine($" Range 2: {range2StartDate} ({range2Start}s) to {range2EndDate} ({range2End}s)");
+
+ // Query both halves recursively
+ Console.WriteLine($" -> Querying range 1...");
+ var results1 = await PerformLargeQuery(logGroupName, queryString, range1Start, range1End, limit);
+ Console.WriteLine($" -> Range 1 returned {results1.Count} logs");
+
+ Console.WriteLine($" -> Querying range 2...");
+ var results2 = await PerformLargeQuery(logGroupName, queryString, range2Start, range2End, limit);
+ Console.WriteLine($" -> Range 2 returned {results2.Count} logs");
+
+ // Combine all results
+ var allResults = new List>(results);
+ allResults.AddRange(results1);
+ allResults.AddRange(results2);
+
+ Console.WriteLine($" -> Combined total: {allResults.Count} logs ({results.Count} + {results1.Count} + {results2.Count})");
+
+ return allResults;
+ }
+
+ ///
+ /// Gets the timestamp string of the most recent log from a list of logs.
+ /// Sorts timestamps to find the actual last one.
+ ///
+ private static string? GetLastLogTimestamp(List> logs)
+ {
+ var timestamps = logs
+ .Select(log => log.Find(f => f.Field == "@timestamp")?.Value)
+ .Where(t => !string.IsNullOrEmpty(t))
+ .OrderBy(t => t)
+ .ToList();
+
+ if (timestamps.Count == 0)
+ {
+ return null;
+ }
+
+ return timestamps[timestamps.Count - 1];
+ }
+
+ ///
+ /// Splits a date range in half.
+ /// Range 2 starts at midpoint + 1 second to avoid overlap.
+ ///
+ private static (long range1Start, long range1End, long range2Start, long range2End) SplitDateRange(long startTime, long endTime)
+ {
+ var midpoint = startTime + (endTime - startTime) / 2;
+ // Range 2 starts at midpoint + 1 to avoid querying the same second twice
+ return (startTime, midpoint, midpoint + 1, endTime);
+ }
+
+ ///
+ /// Polls for query results until complete.
+ ///
+ private static async Task>?> PollQueryResults(string queryId)
+ {
+ int retryCount = 0;
+ const int maxRetries = 60;
+ const int retryDelay = 1000;
+
+ while (retryCount < maxRetries)
+ {
+ var response = await _wrapper.GetQueryResultsAsync(queryId);
+ if (response == null)
+ {
+ return null;
+ }
+
+ if (response.Status == QueryStatus.Complete)
+ {
+ return response.Results;
+ }
+
+ if (response.Status == QueryStatus.Failed ||
+ response.Status == QueryStatus.Cancelled ||
+ response.Status == QueryStatus.Timeout ||
+ response.Status == QueryStatus.Unknown)
+ {
+ _logger.LogError($"Query failed with status: {response.Status}");
+ return null;
+ }
+
+ await Task.Delay(retryDelay);
+ retryCount++;
+ }
+
+ _logger.LogError("Timed out waiting for query results.");
+ return null;
+ }
+
+ ///
+ /// Cleans up the resources created during the scenario.
+ ///
+ public static async Task Cleanup()
+ {
+ var cleanup = !_interactive || GetYesNoResponse(
+ "Do you want to delete the CloudFormation stack and all resources? (y/n) ");
+
+ if (cleanup)
+ {
+ try
+ {
+ var stackDeleteSuccess = await DeleteCloudFormationStack(_stackName, false);
+ return stackDeleteSuccess;
+ }
+ catch (Exception ex)
+ {
+ _logger.LogError(ex, "An error occurred while cleaning up the resources.");
+ return false;
+ }
+ }
+
+ Console.WriteLine($"Resources will remain. Stack name: {_stackName}, Log group: {_logGroupName}");
+ _logger.LogInformation("CloudWatch Logs Large Query scenario is complete.");
+ return true;
+ }
+
+ ///
+ /// Deletes the CloudFormation stack and waits for confirmation.
+ ///
+ private static async Task DeleteCloudFormationStack(string stackName, bool forceDelete)
+ {
+ var request = new DeleteStackRequest
+ {
+ StackName = stackName,
+ };
+
+ if (forceDelete)
+ {
+ request.DeletionMode = DeletionMode.FORCE_DELETE_STACK;
+ }
+
+ await _amazonCloudFormation.DeleteStackAsync(request);
+ Console.WriteLine($"CloudFormation stack '{stackName}' is being deleted. This may take a few minutes.");
+
+ bool stackDeleted = await WaitForStackDeletion(stackName, forceDelete);
+
+ if (stackDeleted)
+ {
+ Console.WriteLine($"CloudFormation stack '{stackName}' has been deleted.");
+ return true;
+ }
+ else
+ {
+ _logger.LogError($"Failed to delete CloudFormation stack '{stackName}'.");
+ return false;
+ }
+ }
+
+ ///
+ /// Waits for the stack to be deleted.
+ ///
+ private static async Task WaitForStackDeletion(string stackName, bool forceDelete)
+ {
+ int retryCount = 0;
+ const int maxRetries = 30;
+ const int retryDelay = 10000;
+
+ while (retryCount < maxRetries)
+ {
+ var describeStacksRequest = new DescribeStacksRequest
+ {
+ StackName = stackName
+ };
+
+ try
+ {
+ var describeStacksResponse = await _amazonCloudFormation.DescribeStacksAsync(describeStacksRequest);
+
+ if (describeStacksResponse.Stacks.Count == 0 ||
+ describeStacksResponse.Stacks[0].StackStatus == StackStatus.DELETE_COMPLETE)
+ {
+ return true;
+ }
+
+ if (!forceDelete && describeStacksResponse.Stacks[0].StackStatus == StackStatus.DELETE_FAILED)
+ {
+ return await DeleteCloudFormationStack(stackName, true);
+ }
+ }
+ catch (AmazonCloudFormationException ex) when (ex.ErrorCode == "ValidationError")
+ {
+ return true;
+ }
+
+ Console.WriteLine($"Waiting for CloudFormation stack '{stackName}' to be deleted...");
+ await Task.Delay(retryDelay);
+ retryCount++;
+ }
+
+ _logger.LogError($"Timed out waiting for CloudFormation stack '{stackName}' to be deleted.");
+ return false;
+ }
+
+ ///
+ /// Waits with a countdown display.
+ ///
+ private static async Task WaitWithCountdown(int seconds)
+ {
+ for (int i = seconds; i > 0; i--)
+ {
+ Console.Write($"\rWaiting: {i} seconds remaining... ");
+ await Task.Delay(1000);
+ }
+ Console.WriteLine("\rWait complete. ");
+ }
+
+ ///
+ /// Helper method to get a yes or no response from the user.
+ ///
+ private static bool GetYesNoResponse(string question)
+ {
+ Console.WriteLine(question);
+ var ynResponse = Console.ReadLine();
+ var response = ynResponse != null && ynResponse.Equals("y", StringComparison.InvariantCultureIgnoreCase);
+ return response;
+ }
+
+ ///
+ /// Prompts the user for a stack name.
+ ///
+ private static string PromptUserForStackName()
+ {
+ if (_interactive)
+ {
+ Console.Write($"Enter a name for the CloudFormation stack (press Enter for default '{_stackName}'): ");
+ string? input = Console.ReadLine();
+ if (!string.IsNullOrWhiteSpace(input))
+ {
+ var regex = "[a-zA-Z][-a-zA-Z0-9]*";
+ if (!Regex.IsMatch(input, regex))
+ {
+ Console.WriteLine($"Invalid stack name. Using default: {_stackName}");
+ return _stackName;
+ }
+ return input;
+ }
+ }
+ return _stackName;
+ }
+
+ ///
+ /// Prompts the user for input with a default value.
+ ///
+ private static string PromptUserForInput(string prompt, string defaultValue)
+ {
+ if (_interactive)
+ {
+ Console.Write($"{prompt}(press Enter for default '{defaultValue}'): ");
+ string? input = Console.ReadLine();
+ return string.IsNullOrWhiteSpace(input) ? defaultValue : input;
+ }
+ return defaultValue;
+ }
+
+ ///
+ /// Prompts the user for an integer value.
+ ///
+ private static int PromptUserForInteger(string prompt, int defaultValue)
+ {
+ if (_interactive)
+ {
+ Console.Write($"{prompt}(press Enter for default '{defaultValue}'): ");
+ string? input = Console.ReadLine();
+ if (string.IsNullOrWhiteSpace(input) || !int.TryParse(input, out var result))
+ {
+ return defaultValue;
+ }
+ return result;
+ }
+ return defaultValue;
+ }
+
+ ///
+ /// Prompts the user for a long value.
+ ///
+ private static long PromptUserForLong(string prompt)
+ {
+ if (_interactive)
+ {
+ Console.Write(prompt);
+ string? input = Console.ReadLine();
+ if (long.TryParse(input, out var result))
+ {
+ return result;
+ }
+ }
+ return 0;
+ }
+
+ ///
+ /// Finds duplicate log entries based on timestamp and message.
+ ///
+ private static List<(string Timestamp, string Message, int Count)> FindDuplicateLogs(List> logs)
+ {
+ var logSignatures = new Dictionary();
+
+ foreach (var log in logs)
+ {
+ var timestamp = log.Find(f => f.Field == "@timestamp")?.Value ?? "";
+ var message = log.Find(f => f.Field == "@message")?.Value ?? "";
+ var signature = $"{timestamp}|{message}";
+
+ if (logSignatures.ContainsKey(signature))
+ {
+ logSignatures[signature]++;
+ }
+ else
+ {
+ logSignatures[signature] = 1;
+ }
+ }
+
+ return logSignatures
+ .Where(kvp => kvp.Value > 1)
+ .Select(kvp =>
+ {
+ var parts = kvp.Key.Split('|');
+ return (Timestamp: parts[0], Message: parts[1], Count: kvp.Value);
+ })
+ .OrderByDescending(x => x.Count)
+ .ToList();
+ }
+}
+// snippet-end:[CloudWatchLogs.dotnetv4.LargeQueryWorkflow]
\ No newline at end of file
diff --git a/dotnetv4/CloudWatchLogs/LargeQuery/Tests/CloudWatchLogsTests.csproj b/dotnetv4/CloudWatchLogs/LargeQuery/Tests/CloudWatchLogsTests.csproj
new file mode 100644
index 00000000000..8222f0ee7fa
--- /dev/null
+++ b/dotnetv4/CloudWatchLogs/LargeQuery/Tests/CloudWatchLogsTests.csproj
@@ -0,0 +1,32 @@
+
+
+
+ net8.0
+ enable
+ enable
+ false
+ true
+ $(NoWarn);NETSDK1206
+
+
+
+
+
+
+
+
+
+ runtime; build; native; contentfiles; analyzers; buildtransitive
+ all
+
+
+ runtime; build; native; contentfiles; analyzers; buildtransitive
+ all
+
+
+
+
+
+
+
+
diff --git a/dotnetv4/CloudWatchLogs/LargeQuery/Tests/LargeQueryWorkflowTests.cs b/dotnetv4/CloudWatchLogs/LargeQuery/Tests/LargeQueryWorkflowTests.cs
new file mode 100644
index 00000000000..161e61f7fe3
--- /dev/null
+++ b/dotnetv4/CloudWatchLogs/LargeQuery/Tests/LargeQueryWorkflowTests.cs
@@ -0,0 +1,55 @@
+// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+// SPDX-License-Identifier: Apache-2.0
+
+using Amazon.CloudFormation;
+using Amazon.CloudWatchLogs;
+using CloudWatchLogsActions;
+using CloudWatchLogsScenario;
+using Microsoft.Extensions.Logging;
+using Moq;
+
+namespace CloudWatchLogsTests;
+
+///
+/// Integration tests for the CloudWatch Logs Large Query workflow.
+///
+public class LargeQueryWorkflowTests
+{
+ ///
+ /// Verifies the scenario with an integration test. No errors should be logged.
+ ///
+ /// Async task.
+ [Fact]
+ [Trait("Category", "Integration")]
+ public async Task TestScenarioIntegration()
+ {
+ // Arrange
+ LargeQueryWorkflow._interactive = false;
+
+ var loggerScenarioMock = new Mock>();
+ loggerScenarioMock.Setup(logger => logger.Log(
+ It.Is(logLevel => logLevel == LogLevel.Error),
+ It.IsAny(),
+ It.Is((@object, @type) => true),
+ It.IsAny(),
+ It.IsAny>()));
+
+ // Act
+ LargeQueryWorkflow._logger = loggerScenarioMock.Object;
+ LargeQueryWorkflow._wrapper = new CloudWatchLogsWrapper(
+ new AmazonCloudWatchLogsClient(),
+ new Mock>().Object);
+ LargeQueryWorkflow._amazonCloudFormation = new AmazonCloudFormationClient();
+
+ await LargeQueryWorkflow.RunScenario();
+
+ // Assert no errors logged
+ loggerScenarioMock.Verify(logger => logger.Log(
+ It.Is(logLevel => logLevel == LogLevel.Error),
+ It.IsAny(),
+ It.Is((@object, @type) => true),
+ It.IsAny(),
+ It.IsAny>()),
+ Times.Never);
+ }
+}
\ No newline at end of file
diff --git a/dotnetv4/CloudWatchLogs/LargeQuery/Tests/Usings.cs b/dotnetv4/CloudWatchLogs/LargeQuery/Tests/Usings.cs
new file mode 100644
index 00000000000..24f9d54e547
--- /dev/null
+++ b/dotnetv4/CloudWatchLogs/LargeQuery/Tests/Usings.cs
@@ -0,0 +1,4 @@
+// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+// SPDX-License-Identifier: Apache-2.0
+
+global using Xunit;
\ No newline at end of file
diff --git a/dotnetv4/CloudWatchLogs/README.md b/dotnetv4/CloudWatchLogs/README.md
new file mode 100644
index 00000000000..c0e1a4cf50b
--- /dev/null
+++ b/dotnetv4/CloudWatchLogs/README.md
@@ -0,0 +1,98 @@
+# CloudWatch Logs code examples for the SDK for .NET (v4)
+
+## Overview
+
+Shows how to use the AWS SDK for .NET (v4) to work with Amazon CloudWatch Logs.
+
+
+
+
+_CloudWatch Logs monitor, store, and access your log files from Amazon Elastic Compute Cloud instances, AWS CloudTrail, or other sources._
+
+## ⚠ Important
+
+* Running this code might result in charges to your AWS account. For more details, see [AWS Pricing](https://aws.amazon.com/pricing/) and [Free Tier](https://aws.amazon.com/free/).
+* Running the tests might result in charges to your AWS account.
+* We recommend that you grant your code least privilege. At most, grant only the minimum permissions required to perform the task. For more information, see [Grant least privilege](https://docs.aws.amazon.com/IAM/latest/UserGuide/best-practices.html#grant-least-privilege).
+* This code is not tested in every AWS Region. For more information, see [AWS Regional Services](https://aws.amazon.com/about-aws/global-infrastructure/regional-product-services).
+
+
+
+
+## Code examples
+
+### Prerequisites
+
+For prerequisites, see the [README](../README.md#Prerequisites) in the `dotnetv4` folder.
+
+
+
+
+
+### Single actions
+
+Code excerpts that show you how to call individual service functions.
+
+- [GetQueryResults](LargeQuery/Actions/CloudWatchLogsWrapper.cs#L79)
+- [StartQuery](LargeQuery/Actions/CloudWatchLogsWrapper.cs#L30)
+
+### Scenarios
+
+Code examples that show you how to accomplish a specific task by calling multiple
+functions within the same service.
+
+- [Run a large query](LargeQuery/Scenarios/LargeQueryWorkflow.cs)
+
+
+
+
+
+## Run the examples
+
+### Instructions
+
+
+
+
+
+
+
+#### Run a large query
+
+This example shows you how to use CloudWatch Logs to query more than 10,000 records.
+
+
+
+
+
+
+
+
+
+### Tests
+
+⚠ Running tests might result in charges to your AWS account.
+
+
+To find instructions for running these tests, see the [README](../README.md#Tests)
+in the `dotnetv4` folder.
+
+
+
+
+
+
+## Additional resources
+
+- [CloudWatch Logs User Guide](https://docs.aws.amazon.com/AmazonCloudWatch/latest/logs/WhatIsCloudWatchLogs.html)
+- [CloudWatch Logs API Reference](https://docs.aws.amazon.com/AmazonCloudWatchLogs/latest/APIReference/Welcome.html)
+- [SDK for .NET (v4) CloudWatch Logs reference](https://docs.aws.amazon.com/sdkfornet/v4/apidocs/items/Cloudwatch-logs/NCloudwatch-logs.html)
+
+
+
+
+---
+
+Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+
+SPDX-License-Identifier: Apache-2.0
diff --git a/dotnetv4/DotNetV4Examples.sln b/dotnetv4/DotNetV4Examples.sln
index e4e1cf6f809..62c2fd11911 100644
--- a/dotnetv4/DotNetV4Examples.sln
+++ b/dotnetv4/DotNetV4Examples.sln
@@ -63,7 +63,6 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Command_R_InvokeModelWithRe
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Command_R_InvokeModel", "Bedrock-runtime\Models\CohereCommand\Command_R_InvokeModel\Command_R_InvokeModel.csproj", "{6FCC8A6C-A172-4AAF-A0FC-66C3BD9E8716}"
EndProject
-
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "AnthropicClaude", "AnthropicClaude", "{6FF2EDB6-D1B8-4EE0-B1F0-2BCE66972E39}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "InvokeModelWithResponseStream", "Bedrock-runtime\Models\AnthropicClaude\InvokeModelWithResponseStream\InvokeModelWithResponseStream.csproj", "{345DA0D1-C762-49EF-9953-6F4D57CB7FC7}"
@@ -76,7 +75,6 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Converse", "Bedrock-runtime
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "AmazonTitanText", "AmazonTitanText", "{74979310-8A92-47DC-B5CA-EFA7970E1202}"
EndProject
-
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "BedrockRuntimeActions", "Bedrock-runtime\Actions\BedrockRuntimeActions.csproj", "{05E93A3E-CFA0-4980-8EE5-CD25C7ED766D}"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "CloudFormation", "CloudFormation", "{5FBEAD92-9234-4824-9320-2052D236C9CD}"
@@ -93,6 +91,14 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "CloudWatchScenario", "Cloud
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "CloudWatchActions", "CloudWatch\Actions\CloudWatchActions.csproj", "{EAF4A3B8-5CD0-48ED-B848-0EA6D451B8D3}"
EndProject
+Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "CloudWatchLogs", "CloudWatchLogs", "{A1B2C3D4-E5F6-7890-1234-567890ABCDEF}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "CloudWatchLogsTests", "CloudWatchLogs\LargeQuery\Tests\CloudWatchLogsTests.csproj", "{B1A4BF4B-D2AE-4E7D-AAA2-5D77877848B0}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "CloudWatchLogsScenario", "CloudWatchLogs\LargeQuery\Scenarios\CloudWatchLogsScenario.csproj", "{EEDAB42C-1106-42C9-9601-D44F21B475DE}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "CloudWatchLogsActions", "CloudWatchLogs\LargeQuery\Actions\CloudWatchLogsActions.csproj", "{7781E31F-CABB-484A-AD52-EBC02D2EB274}"
+EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "EC2", "EC2", "{9424FB14-B6DE-44CE-B675-AC2B57EC1E69}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "EC2Tests", "EC2\Tests\EC2Tests.csproj", "{C99A0F7C-9477-4985-90F6-8EED38ECAC10}"
@@ -237,7 +243,6 @@ Global
{6FCC8A6C-A172-4AAF-A0FC-66C3BD9E8716}.Debug|Any CPU.Build.0 = Debug|Any CPU
{6FCC8A6C-A172-4AAF-A0FC-66C3BD9E8716}.Release|Any CPU.ActiveCfg = Release|Any CPU
{6FCC8A6C-A172-4AAF-A0FC-66C3BD9E8716}.Release|Any CPU.Build.0 = Release|Any CPU
-
{345DA0D1-C762-49EF-9953-6F4D57CB7FC7}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{345DA0D1-C762-49EF-9953-6F4D57CB7FC7}.Debug|Any CPU.Build.0 = Debug|Any CPU
{345DA0D1-C762-49EF-9953-6F4D57CB7FC7}.Release|Any CPU.ActiveCfg = Release|Any CPU
@@ -254,7 +259,6 @@ Global
{874C7405-ED8D-477D-9362-0C69CF56F213}.Debug|Any CPU.Build.0 = Debug|Any CPU
{874C7405-ED8D-477D-9362-0C69CF56F213}.Release|Any CPU.ActiveCfg = Release|Any CPU
{874C7405-ED8D-477D-9362-0C69CF56F213}.Release|Any CPU.Build.0 = Release|Any CPU
-
{05E93A3E-CFA0-4980-8EE5-CD25C7ED766D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{05E93A3E-CFA0-4980-8EE5-CD25C7ED766D}.Debug|Any CPU.Build.0 = Debug|Any CPU
{05E93A3E-CFA0-4980-8EE5-CD25C7ED766D}.Release|Any CPU.ActiveCfg = Release|Any CPU
@@ -279,6 +283,18 @@ Global
{EAF4A3B8-5CD0-48ED-B848-0EA6D451B8D3}.Debug|Any CPU.Build.0 = Debug|Any CPU
{EAF4A3B8-5CD0-48ED-B848-0EA6D451B8D3}.Release|Any CPU.ActiveCfg = Release|Any CPU
{EAF4A3B8-5CD0-48ED-B848-0EA6D451B8D3}.Release|Any CPU.Build.0 = Release|Any CPU
+ {B1A4BF4B-D2AE-4E7D-AAA2-5D77877848B0}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {B1A4BF4B-D2AE-4E7D-AAA2-5D77877848B0}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {B1A4BF4B-D2AE-4E7D-AAA2-5D77877848B0}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {B1A4BF4B-D2AE-4E7D-AAA2-5D77877848B0}.Release|Any CPU.Build.0 = Release|Any CPU
+ {EEDAB42C-1106-42C9-9601-D44F21B475DE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {EEDAB42C-1106-42C9-9601-D44F21B475DE}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {EEDAB42C-1106-42C9-9601-D44F21B475DE}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {EEDAB42C-1106-42C9-9601-D44F21B475DE}.Release|Any CPU.Build.0 = Release|Any CPU
+ {7781E31F-CABB-484A-AD52-EBC02D2EB274}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {7781E31F-CABB-484A-AD52-EBC02D2EB274}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {7781E31F-CABB-484A-AD52-EBC02D2EB274}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {7781E31F-CABB-484A-AD52-EBC02D2EB274}.Release|Any CPU.Build.0 = Release|Any CPU
{C99A0F7C-9477-4985-90F6-8EED38ECAC10}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{C99A0F7C-9477-4985-90F6-8EED38ECAC10}.Debug|Any CPU.Build.0 = Debug|Any CPU
{C99A0F7C-9477-4985-90F6-8EED38ECAC10}.Release|Any CPU.ActiveCfg = Release|Any CPU
@@ -378,14 +394,12 @@ Global
{81EA8494-176C-4178-A1C3-6FA3B1222B74} = {39EAAA32-53A8-4641-873C-976FD5963360}
{085F3A30-A788-48D6-8067-74D71C29A941} = {39EAAA32-53A8-4641-873C-976FD5963360}
{6FCC8A6C-A172-4AAF-A0FC-66C3BD9E8716} = {39EAAA32-53A8-4641-873C-976FD5963360}
-
{6FF2EDB6-D1B8-4EE0-B1F0-2BCE66972E39} = {4429C078-35C8-4E2B-9C7B-F0C619741B67}
{345DA0D1-C762-49EF-9953-6F4D57CB7FC7} = {6FF2EDB6-D1B8-4EE0-B1F0-2BCE66972E39}
{C95689B5-C0A1-4C1F-9E97-369D3D397930} = {6FF2EDB6-D1B8-4EE0-B1F0-2BCE66972E39}
{8551C158-60B4-4594-8B1D-5BE851F90EE4} = {6FF2EDB6-D1B8-4EE0-B1F0-2BCE66972E39}
{874C7405-ED8D-477D-9362-0C69CF56F213} = {6FF2EDB6-D1B8-4EE0-B1F0-2BCE66972E39}
{74979310-8A92-47DC-B5CA-EFA7970E1202} = {4429C078-35C8-4E2B-9C7B-F0C619741B67}
-
{05E93A3E-CFA0-4980-8EE5-CD25C7ED766D} = {D859B39C-9106-4D3D-8C57-11B15FA8106B}
{AAFC86EB-49D7-4FD8-8C79-C42C129EB75A} = {5FBEAD92-9234-4824-9320-2052D236C9CD}
{98A11016-DD41-4848-A848-51D703951A91} = {5FBEAD92-9234-4824-9320-2052D236C9CD}
diff --git a/scenarios/features/cloudwatch_logs_large_query/README.md b/scenarios/features/cloudwatch_logs_large_query/README.md
index 1b76e5758c6..17d487f3e3f 100644
--- a/scenarios/features/cloudwatch_logs_large_query/README.md
+++ b/scenarios/features/cloudwatch_logs_large_query/README.md
@@ -47,6 +47,7 @@ A lot of logs are needed to make a robust example. If you happen to have a log g
This example is implemented in the following languages:
+- [.NET](../../../dotnetv4/CloudWatchLogs/LargeQuery/README.md)
- [JavaScript](../../../javascriptv3/example_code/cloudwatch-logs/scenarios/large-query/README.md)
- [Python](../../../python/example_code/cloudwatch-logs/scenarios/large-query/README.md)
diff --git a/scenarios/features/cloudwatch_logs_large_query/SPECIFICATION.md b/scenarios/features/cloudwatch_logs_large_query/SPECIFICATION.md
index 788d7859efc..055ac351f9c 100644
--- a/scenarios/features/cloudwatch_logs_large_query/SPECIFICATION.md
+++ b/scenarios/features/cloudwatch_logs_large_query/SPECIFICATION.md
@@ -1,54 +1,69 @@
-# CloudWatch Logs large query - Technical specification
+# CloudWatch Logs Large Query - Technical Specification
-This document contains the technical specifications for _CloudWatch Logs large query_,
-a feature scenario that showcases AWS services and SDKs. It is primarily intended for the AWS code
-examples team to use while developing this example in additional languages.
+## Overview
-This document explains the following:
+This feature scenario demonstrates how to perform large-scale queries on Amazon CloudWatch Logs using recursive binary search to retrieve more than the 10,000 result limit.
-- Deploying AWS resources.
-- Adding sample data.
-- Setting up a large query.
+**Important**: This is a complete, self-contained scenario that handles all setup and cleanup automatically. The scenario includes:
+
+1. Deploying CloudFormation resources (log group and stream)
+2. Generating and ingesting 50,000 sample log entries
+3. Performing recursive queries to retrieve all logs using binary search
+4. Cleaning up all resources
For an introduction, see the [README.md](README.md).
---
-### Table of contents
+## Table of Contents
-- [Architecture](#architecture)
-- [User input](#user-input)
-- [Common resources](#common-resources)
+- [API Actions Used](#api-actions-used)
+- [Resources](#resources)
+- [Variables](#variables)
- [Building the queries](#building-the-queries)
-- [Output](#output)
+- [Example Structure](#example-structure)
+- [Output Format](#output-format)
+- [Errors](#errors)
- [Metadata](#metadata)
-## Architecture
+---
-- Amazon CloudWatch Logs group
-- Amazon CloudWatch Logs stream
+## API Actions Used
----
+This scenario uses the following CloudWatch Logs API actions:
-## User input
+- `StartQuery` - Initiates a CloudWatch Logs Insights query
+- `GetQueryResults` - Retrieves results from a query, polling until complete
-The example should allow the configuration of a query start date, query end date, and results limit. It's up to you to decide how to allow this configuration.
+---
-### Suggested variable names
+## Resources
-- `QUERY_START_DATE` - The oldest date that will be queried.
-- `QUERY_END_DATE` - The newest date that will be queried.
-- `QUERY_LIMIT` - The maximum number of results to return. CloudWatch has a maximum of 10,000.
+### CloudFormation Template
----
+**Location**: `scenarios/features/cloudwatch_logs_large_query/resources/stack.yaml`
-## Common resources
+**Resources Created**:
+- CloudWatch Logs Log Group: `/workflows/cloudwatch-logs/large-query`
+- CloudWatch Logs Log Stream: `stream1`
-This example has a set of common resources that are stored in the [resources](resources) folder.
+### Helper files
+These files are for reference only. New versions of this example should create and upload logs as part of the scenario.
-- [stack.yaml](resources/stack.yaml) is an AWS CloudFormation template containing the resources needed to run this example.
-- [make-log-files.sh](resources/make-log-files.sh) is a bash script that creates log data. **Five minutes of logs, starting at the time of execution, will be created. Wait at least five minutes after running this script before attempting to query.**
- [put-log-events](resources/put-log-events.sh) is a bash script that ingests log data and uploads it to CloudWatch.
+- [make-log-files.sh](resources/make-log-files.sh) is a bash script that creates log data. **Five minutes of logs, starting at the time of execution, will be created. Wait at least five minutes after running this script before attempting to query.**
+---
+
+## Variables
+
+| Variable Name | Description | Type | Default |
+|--------------|-------------|------|---------|
+| `stackName` | CloudFormation stack name | String | "CloudWatchLargeQueryStack" |
+| `queryStartDate` | Query start timestamp | Long/Integer | From script output |
+| `queryEndDate` | Query end timestamp | Long/Integer | From script output |
+| `queryLimit` | Maximum results per query | Integer | 10000 |
+| `logGroupName` | Log group name (if not using stack) | String | "/workflows/cloudwatch-logs/large-query" |
+| `logStreamName` | Log stream name (if not using stack) | String | "stream1" |
---
@@ -88,14 +103,89 @@ func large_query(date_range):
return concat(query_results, large_query(d1), large_query(d2))
```
-## Output
-To illustrate the search, log the date ranges for each query made and the number of logs that were found.
+## Example Structure
+
+### Phase 1: Setup
+
+**Purpose**: Deploy resources and generate sample data as part of the scenario
+
+1. Welcome message explaining the scenario
+2. Prompt user: "Would you like to deploy the CloudFormation stack and generate sample logs? (y/n)"
+3. If yes:
+ - Prompt for CloudFormation stack name (default: "CloudWatchLargeQueryStack")
+ - Deploy CloudFormation stack from `resources/stack.yaml`
+ - Wait for stack creation to complete (status: CREATE_COMPLETE)
+ - Generate logs directly using CloudWatch Logs API:
+ - Create 50,000 log entries with timestamps spanning 5 minutes
+ - Upload in batches of 10,000 entries using PutLogEvents
+ - Display progress for each batch uploaded
+ - Capture start and end timestamps for query configuration
+ - Display message: "Sample logs created. Waiting 5 minutes for logs to be fully ingested..."
+ - Wait 5 minutes (300 seconds) for log ingestion with countdown display
+4. If no:
+ - Prompt user for existing log group name, or enter to use the default name
+ - Prompt user for log stream name, or enter to use the default name
+ - Prompt user for query start date (ISO 8601 format with milliseconds)
+ - Prompt user for query end date (ISO 8601 format with milliseconds)
+
+**Fully Self-Contained Behavior**:
+- Automatically deploys stack with default name
+- Automatically generates 50,000 sample logs
+- Waits 5 minutes for log ingestion
+- Uses default values for all configuration
+
+
+### Phase 2: Query Execution
+
+**Purpose**: Demonstrate recursive large query functionality
+
+**Steps**:
+1. Display message: "Starting recursive query to retrieve all logs..."
+2. Prompt user for query limit (default: 10000, max: 10000)
+3. Set query string: `fields @timestamp, @message | sort @timestamp asc`
+4. Execute recursive query function with:
+ - Log group name
+ - Query string
+ - Start date
+ - End date
+ - Limit
+5. Display progress for each query executed
+6. Display total execution time
+7. Display total logs found
+8. Prompt user: "Would you like to see a sample of the logs? (y/n)"
+9. If yes, display first 10 log entries with timestamps and messages
+
+### Phase 3: Cleanup
+
+**Purpose**: Remove created resources
+
+**Interactive Mode Steps**:
+1. Prompt user: "Would you like to delete the CloudFormation stack and all resources? (y/n)"
+2. If yes:
+ - Delete CloudFormation stack
+ - Wait for stack deletion to complete (status: DELETE_COMPLETE or stack not found)
+ - Display message: "Stack deleted successfully"
+3. If no:
+ - Display message: "Resources will remain. You can delete them later through the AWS Console."
+ - Display stack name and log group name for reference
+
+
+---
+
+## Output Format
+
+### Query Progress Output
-Example:
+Display each query execution with the following format:
```
-Starting a recursive query...
+Query date range: to . Found logs.
+```
+
+**Example**:
+```
+Starting recursive query...
Query date range: 2023-12-22T19:08:42.000Z to 2023-12-22T19:13:41.994Z. Found 10000 logs.
Query date range: 2023-12-22T19:09:41.995Z to 2023-12-22T19:11:41.994Z. Found 10000 logs.
Query date range: 2023-12-22T19:11:41.995Z to 2023-12-22T19:13:41.994Z. Found 10000 logs.
@@ -107,6 +197,33 @@ Queries finished in 11.253 seconds.
Total logs found: 50000
```
+### Summary Output
+
+After all queries complete, display:
+- Total execution time (in seconds with 3 decimal places)
+- Total number of logs found
+
+### Sample Logs Output
+
+If user chooses to view sample logs, display first 10 entries:
+
+```
+Sample logs (first 10 of 50000):
+[2023-12-22T19:08:42.000Z] Entry 0
+[2023-12-22T19:08:42.006Z] Entry 1
+[2023-12-22T19:08:42.012Z] Entry 2
+...
+```
+
+---
+
+## Errors
+
+| Error Code | Error Message Pattern | Handling Strategy |
+|------------|----------------------|-------------------|
+| `InvalidParameterException` | "Query's end date and time" | Date range is out of bounds; inform user and adjust dates |
+| `ResourceNotFoundException` | Log group not found | Verify log group exists; prompt user to run setup |
+
---
## Metadata
@@ -115,4 +232,4 @@ Total logs found: 50000
| ----------------- | ----------------------------- | --------------------------------- |
| `GetQueryResults` | cloudwatch-logs_metadata.yaml | cloudwatch-logs_GetQueryResults |
| `StartQuery` | cloudwatch-logs_metadata.yaml | cloudwatch-logs_StartQuery |
-| `Large Query` | cloudwatch-logs_metadata.yaml | cloudwatch-logs_Scenario_LargeQuery |
+| `Large Query` | cloudwatch-logs_metadata.yaml | cloudwatch-logs_Scenario_LargeQuery |
diff --git a/steering_docs/dotnet-tech/scenario.md b/steering_docs/dotnet-tech/scenario.md
new file mode 100644
index 00000000000..53797c8c682
--- /dev/null
+++ b/steering_docs/dotnet-tech/scenario.md
@@ -0,0 +1,989 @@
+# .NET Feature Scenario Generation
+
+## Purpose
+Generate feature scenarios that demonstrate complete workflows using multiple service operations in a guided, educational manner. Implementation must be based on the service SPECIFICATION.md file.
+
+## Target Directory
+**IMPORTANT**: All new feature scenarios MUST be created in the `dotnetv4` directory, NOT `dotnetv3`.
+
+- **New scenarios**: `dotnetv4/{Service}/`
+
+## Requirements
+- **Specification-Driven**: MUST read the `scenarios/features/{service_feature}/SPECIFICATION.md`
+- **Interactive**: Use Console.WriteLine and Console.ReadLine for user input and guidance
+- **Educational**: Break complex workflows into logical phases
+- **Comprehensive**: Cover setup, demonstration, examination, and cleanup
+- **Error Handling**: Graceful error handling with user-friendly messages
+- **Wrapper Classes**: MUST use service wrapper classes for all operations
+- **CloudFormation**: Deploy resources using CloudFormation stacks when specified
+- **Namespaces**: MUST use file-level namespaces that match the project names
+- **Using Statements**: MUST cleanup unused using statements
+
+## Project Structure
+
+Feature scenarios use a multi-project structure with separate projects for actions, scenarios, and tests:
+
+```
+dotnetv4/{Service}/
+├── {Service}.sln # Solution file
+├── Actions/
+│ ├── {Service}Wrapper.cs # Wrapper class for service operations
+│ ├── Hello{Service}.cs # Hello world example (optional)
+│ └── {Service}Actions.csproj # Actions project file
+├── Scenarios/
+│ ├── {Service}Workflow.cs # Main workflow/scenario file
+│ ├── README.md # Scenario documentation
+│ └── {Service}Scenario.csproj # Scenario project file (references Actions)
+└── Tests/
+ ├── {Service}WorkflowTests.cs # Unit tests for workflow
+ ├── Usings.cs # Global usings for tests
+ └── {Service}Tests.csproj # Test project file (references Scenarios)
+```
+
+**Note**: Use `dotnetv4` for all new feature scenarios. The `dotnetv3` directory is for legacy examples only.
+
+## MANDATORY Pre-Implementation Steps
+
+### Step 1: Read Scenario Specification
+**CRITICAL**: Always read `scenarios/features/{servicefeature}/SPECIFICATION.md` first to understand:
+- **API Actions Used**: Exact operations to implement
+- **Proposed Example Structure**: Setup, demonstration, examination, cleanup phases
+- **Error Handling**: Specific error codes and handling requirements
+- **Scenario Flow**: Step-by-step scenario description
+
+### Step 2: Extract Implementation Requirements
+From the specification, identify:
+- **Setup Phase**: What resources need to be created/configured
+- **Demonstration Phase**: What operations to demonstrate
+- **Examination Phase**: What data to display and how to filter/analyze
+- **Cleanup Phase**: What resources to clean up and user options
+
+## Workflow Class Pattern
+
+### Implementation Pattern Based on SPECIFICATION.md
+
+```csharp
+// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+// SPDX-License-Identifier: Apache-2.0
+
+// snippet-start:[{Service}.dotnetv4.{Service}Workflow]
+using Amazon.{Service};
+using Amazon.CloudFormation;
+using Amazon.CloudFormation.Model;
+using Microsoft.Extensions.DependencyInjection;
+using Microsoft.Extensions.Hosting;
+using Microsoft.Extensions.Logging;
+using {Service}Actions;
+
+namespace {Service}Scenario;
+
+public class {Service}Workflow
+{
+ /*
+ Before running this .NET code example, set up your development environment, including your credentials.
+ This .NET code example performs the following tasks for the {AWS Service} workflow:
+
+ 1. Prepare the Application:
+ - {Setup step 1 from specification}
+ - {Setup step 2 from specification}
+ - Deploy the Cloud Formation template for resource creation.
+ - Store the outputs of the stack into variables for use in the scenario.
+
+ 2. {Phase 2 Name}:
+ - {Phase 2 description from specification}
+
+ 3. {Phase 3 Name}:
+ - {Phase 3 description from specification}
+
+ 4. Clean up:
+ - Prompt the user for y/n answer if they want to destroy the stack and clean up all resources.
+ - Delete resources created during the workflow.
+ - Destroy the Cloud Formation stack and wait until the stack has been removed.
+ */
+
+ public static ILogger<{Service}Workflow> _logger = null!;
+ public static {Service}Wrapper _wrapper = null!;
+ public static IAmazonCloudFormation _amazonCloudFormation = null!;
+
+ private static string _roleArn = null!;
+ private static string _targetArn = null!;
+
+ public static bool _interactive = true;
+ private static string _stackName = "default-{service}-scenario-stack-name";
+ private static string _stackResourcePath = "../../../../../../scenarios/features/{service_feature}/resources/cfn_template.yaml";
+
+ public static async Task Main(string[] args)
+ {
+ using var host = Host.CreateDefaultBuilder(args)
+ .ConfigureLogging(logging =>
+ logging.AddFilter("System", LogLevel.Debug)
+ .AddFilter("Microsoft", LogLevel.Information)
+ .AddFilter("Microsoft", LogLevel.Trace))
+ .ConfigureServices((_, services) =>
+ services.AddAWSService()
+ .AddAWSService()
+ .AddTransient<{Service}Wrapper>()
+ )
+ .Build();
+
+ if (_interactive)
+ {
+ _logger = LoggerFactory.Create(builder => { builder.AddConsole(); })
+ .CreateLogger<{Service}Workflow>();
+
+ _wrapper = host.Services.GetRequiredService<{Service}Wrapper>();
+ _amazonCloudFormation = host.Services.GetRequiredService();
+ }
+
+ Console.WriteLine(new string('-', 80));
+ Console.WriteLine("Welcome to the {AWS Service Feature} Scenario.");
+ Console.WriteLine(new string('-', 80));
+
+ try
+ {
+ Console.WriteLine(new string('-', 80));
+ var prepareSuccess = await PrepareApplication();
+ Console.WriteLine(new string('-', 80));
+
+ if (prepareSuccess)
+ {
+ Console.WriteLine(new string('-', 80));
+ await Phase2();
+ Console.WriteLine(new string('-', 80));
+
+ Console.WriteLine(new string('-', 80));
+ await Phase3();
+ Console.WriteLine(new string('-', 80));
+ }
+
+ Console.WriteLine(new string('-', 80));
+ await Cleanup();
+ Console.WriteLine(new string('-', 80));
+ }
+ catch (Exception ex)
+ {
+ _logger.LogError(ex, "There was a problem with the scenario, initiating cleanup...");
+ _interactive = false;
+ await Cleanup();
+ }
+
+ Console.WriteLine("{AWS Service} feature scenario completed.");
+ }
+
+ ///
+ /// Prepares the application by creating the necessary resources.
+ ///
+ /// True if the application was prepared successfully.
+ public static async Task PrepareApplication()
+ {
+ Console.WriteLine("Preparing the application...");
+ try
+ {
+ // Prompt the user for required input (e.g., email, parameters)
+ Console.WriteLine("\nThis example creates resources in a CloudFormation stack.");
+
+ var userInput = PromptUserForInput();
+
+ // Prompt the user for a name for the CloudFormation stack
+ _stackName = PromptUserForStackName();
+
+ // Deploy the CloudFormation stack
+ var deploySuccess = await DeployCloudFormationStack(_stackName, userInput);
+
+ if (deploySuccess)
+ {
+ // Create additional resources if needed
+ Console.WriteLine("Application preparation complete.");
+ return true;
+ }
+ }
+ catch (Exception ex)
+ {
+ _logger.LogError(ex, "An error occurred while preparing the application.");
+ }
+ Console.WriteLine("Application preparation failed.");
+ return false;
+ }
+
+ ///
+ /// Deploys the CloudFormation stack with the necessary resources.
+ ///
+ /// The name of the CloudFormation stack.
+ /// Parameter value for the stack.
+ /// True if the stack was deployed successfully.
+ private static async Task DeployCloudFormationStack(string stackName, string parameter)
+ {
+ Console.WriteLine($"\nDeploying CloudFormation stack: {stackName}");
+
+ try
+ {
+ var request = new CreateStackRequest
+ {
+ StackName = stackName,
+ TemplateBody = await File.ReadAllTextAsync(_stackResourcePath),
+ Capabilities = { Capability.CAPABILITY_NAMED_IAM }
+ };
+
+ // If parameters are provided, set them
+ if (!string.IsNullOrWhiteSpace(parameter))
+ {
+ request.Parameters = new List()
+ {
+ new() { ParameterKey = "parameterName", ParameterValue = parameter }
+ };
+ }
+
+ var response = await _amazonCloudFormation.CreateStackAsync(request);
+
+ if (response.HttpStatusCode == System.Net.HttpStatusCode.OK)
+ {
+ Console.WriteLine($"CloudFormation stack creation started: {stackName}");
+
+ // Wait for the stack to be in CREATE_COMPLETE state
+ bool stackCreated = await WaitForStackCompletion(response.StackId);
+
+ if (stackCreated)
+ {
+ // Retrieve the output values
+ var success = await GetStackOutputs(response.StackId);
+ return success;
+ }
+ else
+ {
+ _logger.LogError($"CloudFormation stack creation failed: {stackName}");
+ return false;
+ }
+ }
+ else
+ {
+ _logger.LogError($"Failed to create CloudFormation stack: {stackName}");
+ return false;
+ }
+ }
+ catch (AlreadyExistsException)
+ {
+ _logger.LogWarning($"CloudFormation stack '{stackName}' already exists. Please provide a unique name.");
+ var newStackName = PromptUserForStackName();
+ return await DeployCloudFormationStack(newStackName, parameter);
+ }
+ catch (Exception ex)
+ {
+ _logger.LogError(ex, $"An error occurred while deploying the CloudFormation stack: {stackName}");
+ return false;
+ }
+ }
+
+ ///
+ /// Waits for the CloudFormation stack to be in the CREATE_COMPLETE state.
+ ///
+ /// The ID of the CloudFormation stack.
+ /// True if the stack was created successfully.
+ private static async Task WaitForStackCompletion(string stackId)
+ {
+ int retryCount = 0;
+ const int maxRetries = 10;
+ const int retryDelay = 30000; // 30 seconds.
+
+ while (retryCount < maxRetries)
+ {
+ var describeStacksRequest = new DescribeStacksRequest
+ {
+ StackName = stackId
+ };
+
+ var describeStacksResponse = await _amazonCloudFormation.DescribeStacksAsync(describeStacksRequest);
+
+ if (describeStacksResponse.Stacks.Count > 0)
+ {
+ if (describeStacksResponse.Stacks[0].StackStatus == StackStatus.CREATE_COMPLETE)
+ {
+ Console.WriteLine("CloudFormation stack creation complete.");
+ return true;
+ }
+ if (describeStacksResponse.Stacks[0].StackStatus == StackStatus.CREATE_FAILED ||
+ describeStacksResponse.Stacks[0].StackStatus == StackStatus.ROLLBACK_COMPLETE)
+ {
+ Console.WriteLine("CloudFormation stack creation failed.");
+ return false;
+ }
+ }
+
+ Console.WriteLine("Waiting for CloudFormation stack creation to complete...");
+ await Task.Delay(retryDelay);
+ retryCount++;
+ }
+
+ _logger.LogError("Timed out waiting for CloudFormation stack creation to complete.");
+ return false;
+ }
+
+ ///
+ /// Retrieves the output values from the CloudFormation stack.
+ ///
+ /// The ID of the CloudFormation stack.
+ private static async Task GetStackOutputs(string stackId)
+ {
+ try
+ {
+ var describeStacksRequest = new DescribeStacksRequest { StackName = stackId };
+
+ var describeStacksResponse =
+ await _amazonCloudFormation.DescribeStacksAsync(describeStacksRequest);
+
+ if (describeStacksResponse.Stacks.Count > 0)
+ {
+ var stack = describeStacksResponse.Stacks[0];
+ _roleArn = GetStackOutputValue(stack, "RoleARN");
+ _targetArn = GetStackOutputValue(stack, "TargetARN");
+ return true;
+ }
+ else
+ {
+ _logger.LogError($"No stack found for stack outputs: {stackId}");
+ return false;
+ }
+ }
+ catch (Exception ex)
+ {
+ _logger.LogError(
+ ex, $"Failed to retrieve CloudFormation stack outputs: {stackId}");
+ return false;
+ }
+ }
+
+ ///
+ /// Get an output value by key from a CloudFormation stack.
+ ///
+ /// The CloudFormation stack.
+ /// The key of the output.
+ /// The value as a string.
+ private static string GetStackOutputValue(Stack stack, string outputKey)
+ {
+ var output = stack.Outputs.First(o => o.OutputKey == outputKey);
+ var outputValue = output.OutputValue;
+ Console.WriteLine($"Stack output {outputKey}: {outputValue}");
+ return outputValue;
+ }
+
+ ///
+ /// Cleans up the resources created during the scenario.
+ ///
+ /// True if the cleanup was successful.
+ public static async Task Cleanup()
+ {
+ // Prompt the user to confirm cleanup.
+ var cleanup = !_interactive || GetYesNoResponse(
+ "Do you want to delete all resources created by this scenario? (y/n) ");
+ if (cleanup)
+ {
+ try
+ {
+ // Delete scenario-specific resources first
+
+ // Destroy the CloudFormation stack and wait for it to be removed.
+ var stackDeleteSuccess = await DeleteCloudFormationStack(_stackName, false);
+
+ return stackDeleteSuccess;
+ }
+ catch (Exception ex)
+ {
+ _logger.LogError(ex,
+ "An error occurred while cleaning up the resources.");
+ return false;
+ }
+ }
+ _logger.LogInformation("{Service} scenario is complete.");
+ return true;
+ }
+
+ ///
+ /// Delete the resources in the stack and wait for confirmation.
+ ///
+ /// The name of the stack.
+ /// True to force delete the stack.
+ /// True if successful.
+ private static async Task DeleteCloudFormationStack(string stackName, bool forceDelete)
+ {
+ var request = new DeleteStackRequest
+ {
+ StackName = stackName,
+ };
+
+ if (forceDelete)
+ {
+ request.DeletionMode = DeletionMode.FORCE_DELETE_STACK;
+ }
+
+ await _amazonCloudFormation.DeleteStackAsync(request);
+ Console.WriteLine($"CloudFormation stack '{_stackName}' is being deleted. This may take a few minutes.");
+
+ bool stackDeleted = await WaitForStackDeletion(_stackName, forceDelete);
+
+ if (stackDeleted)
+ {
+ Console.WriteLine($"CloudFormation stack '{_stackName}' has been deleted.");
+ return true;
+ }
+ else
+ {
+ _logger.LogError($"Failed to delete CloudFormation stack '{_stackName}'.");
+ return false;
+ }
+ }
+
+ ///
+ /// Wait for the stack to be deleted.
+ ///
+ /// The name of the stack.
+ /// True to force delete the stack.
+ /// True if successful.
+ private static async Task WaitForStackDeletion(string stackName, bool forceDelete)
+ {
+ int retryCount = 0;
+ const int maxRetries = 10;
+ const int retryDelay = 30000; // 30 seconds
+
+ while (retryCount < maxRetries)
+ {
+ var describeStacksRequest = new DescribeStacksRequest
+ {
+ StackName = stackName
+ };
+
+ try
+ {
+ var describeStacksResponse = await _amazonCloudFormation.DescribeStacksAsync(describeStacksRequest);
+
+ if (describeStacksResponse.Stacks.Count == 0 || describeStacksResponse.Stacks[0].StackStatus == StackStatus.DELETE_COMPLETE)
+ {
+ return true;
+ }
+ if (!forceDelete && describeStacksResponse.Stacks[0].StackStatus == StackStatus.DELETE_FAILED)
+ {
+ // Try one time to force delete.
+ return await DeleteCloudFormationStack(stackName, true);
+ }
+ }
+ catch (AmazonCloudFormationException ex) when (ex.ErrorCode == "ValidationError")
+ {
+ // Stack does not exist, so it has been successfully deleted.
+ return true;
+ }
+
+ Console.WriteLine($"Waiting for CloudFormation stack '{stackName}' to be deleted...");
+ await Task.Delay(retryDelay);
+ retryCount++;
+ }
+
+ _logger.LogError($"Timed out waiting for CloudFormation stack '{stackName}' to be deleted.");
+ return false;
+ }
+
+ ///
+ /// Helper method to get a yes or no response from the user.
+ ///
+ /// The question string to print on the console.
+ /// True if the user responds with a yes.
+ private static bool GetYesNoResponse(string question)
+ {
+ Console.WriteLine(question);
+ var ynResponse = Console.ReadLine();
+ var response = ynResponse != null && ynResponse.Equals("y", StringComparison.InvariantCultureIgnoreCase);
+ return response;
+ }
+
+ ///
+ /// Prompt the user for a non-empty stack name.
+ ///
+ /// The valid stack name
+ private static string PromptUserForStackName()
+ {
+ Console.WriteLine("Enter a name for the AWS Cloud Formation Stack: ");
+ if (_interactive)
+ {
+ string stackName = Console.ReadLine()!;
+ var regex = "[a-zA-Z][-a-zA-Z0-9]|arn:[-a-zA-Z0-9:/._+]";
+ if (!Regex.IsMatch(stackName, regex))
+ {
+ Console.WriteLine(
+ $"Invalid stack name. Please use a name that matches the pattern {regex}.");
+ return PromptUserForStackName();
+ }
+
+ return stackName;
+ }
+ // Used when running without user prompts.
+ return _stackName;
+ }
+
+ ///
+ /// Prompt the user for required input.
+ ///
+ /// The user input value
+ private static string PromptUserForInput()
+ {
+ if (_interactive)
+ {
+ Console.WriteLine("Enter required input: ");
+ string input = Console.ReadLine()!;
+ // Add validation as needed
+ return input;
+ }
+ // Used when running without user prompts.
+ return "";
+ }
+}
+// snippet-end:[{Service}.dotnetv4.{Service}Workflow]
+```
+
+## Project Files
+
+### Actions Project (.csproj)
+
+```xml
+
+
+
+ Exe
+ net8.0
+ enable
+ enable
+
+
+
+
+
+
+
+
+
+
+```
+
+### Scenarios Project (.csproj)
+
+```xml
+
+
+
+ Exe
+ net6.0
+ enable
+ enable
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+```
+
+### Tests Project (.csproj)
+
+```xml
+
+
+
+ net8.0
+ enable
+ enable
+ false
+ true
+ $(NoWarn);NETSDK1206
+
+
+
+
+
+
+
+
+
+ runtime; build; native; contentfiles; analyzers; buildtransitive
+ all
+
+
+ runtime; build; native; contentfiles; analyzers; buildtransitive
+ all
+
+
+
+
+
+ PreserveNewest
+ testsettings.json
+
+
+
+
+
+
+
+
+```
+
+## Workflow Phase Structure (Based on Specification)
+
+### Prepare Application Phase
+- **Read specification Setup section** for exact requirements
+- Prompt user for required input (email, parameters, etc.)
+- Prompt user for CloudFormation stack name
+- Deploy CloudFormation stack with resources
+- Wait for stack creation to complete
+- Retrieve stack outputs (ARNs, IDs, etc.)
+- Create additional resources if needed (schedule groups, etc.)
+- Verify setup completion
+
+### Demonstration Phases
+- **Follow specification phases** exactly
+- Implement each phase as a separate method
+- Use wrapper methods for all service operations
+- Prompt user for input as specified
+- Display progress and results
+- Handle errors gracefully
+- Allow user to proceed at their own pace
+
+### Cleanup Phase
+- **Follow specification Cleanup section** guidance
+- Prompt user to confirm cleanup
+- Delete scenario-specific resources first
+- Delete CloudFormation stack
+- Wait for stack deletion to complete
+- Handle deletion errors (retry with force delete if needed)
+- Confirm completion
+
+## CloudFormation Integration
+
+### Stack Deployment
+- Store CloudFormation template path in a constant
+- Template should be in `scenarios/features/{service_feature}/resources/cfn_template.yaml`
+- Use relative path from Scenarios project: `"../../../../../../scenarios/features/{service_feature}/resources/cfn_template.yaml"`
+- Deploy stack with `CAPABILITY_NAMED_IAM` capability
+- Pass user input as stack parameters
+- Handle `AlreadyExistsException` by prompting for new stack name
+
+### Stack Output Retrieval
+- Retrieve outputs after stack creation completes
+- Store output values in static fields for use throughout workflow
+- Common outputs: Role ARNs, Topic ARNs, Resource IDs
+- Display output values to console for user visibility
+
+### Stack Deletion
+- Delete stack during cleanup phase
+- Wait for deletion to complete
+- Handle `DELETE_FAILED` status by retrying with force delete
+- Catch `ValidationError` exception (indicates stack already deleted)
+
+## User Interaction Patterns
+
+### Question Types
+```csharp
+// Yes/No questions
+private static bool GetYesNoResponse(string question)
+{
+ Console.WriteLine(question);
+ var ynResponse = Console.ReadLine();
+ var response = ynResponse != null && ynResponse.Equals("y", StringComparison.InvariantCultureIgnoreCase);
+ return response;
+}
+
+// Text input with validation
+private static string PromptUserForResourceName(string prompt)
+{
+ if (_interactive)
+ {
+ Console.WriteLine(prompt);
+ string resourceName = Console.ReadLine()!;
+ var regex = "[0-9a-zA-Z-_.]+";
+ if (!Regex.IsMatch(resourceName, regex))
+ {
+ Console.WriteLine($"Invalid resource name. Please use a name that matches the pattern {regex}.");
+ return PromptUserForResourceName(prompt);
+ }
+ return resourceName!;
+ }
+ // Used when running without user prompts.
+ return "resource-" + Guid.NewGuid();
+}
+
+// Numeric input
+private static int PromptUserForInteger(string prompt)
+{
+ if (_interactive)
+ {
+ Console.WriteLine(prompt);
+ string stringResponse = Console.ReadLine()!;
+ if (string.IsNullOrWhiteSpace(stringResponse) ||
+ !Int32.TryParse(stringResponse, out var intResponse))
+ {
+ Console.WriteLine($"Invalid integer. ");
+ return PromptUserForInteger(prompt);
+ }
+ return intResponse!;
+ }
+ // Used when running without user prompts.
+ return 1;
+}
+```
+
+### Information Display
+```csharp
+// Section separators
+Console.WriteLine(new string('-', 80));
+
+// Progress indicators
+Console.WriteLine($"✓ Operation completed successfully");
+Console.WriteLine($"Waiting for operation to complete...");
+
+// Formatted output
+Console.WriteLine($"Found {count} items:");
+foreach (var item in items)
+{
+ Console.WriteLine($" - {item}");
+}
+```
+
+## Wrapper Class Pattern
+
+### Wrapper Class Structure
+```csharp
+// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+// SPDX-License-Identifier: Apache-2.0
+
+// snippet-start:[{Service}.dotnetv4.{Service}Wrapper]
+using Amazon.{Service};
+using Amazon.{Service}.Model;
+using Microsoft.Extensions.Logging;
+
+namespace {Service}Actions;
+
+///
+/// Wrapper class for {AWS Service} operations.
+///
+public class {Service}Wrapper
+{
+ private readonly IAmazon{Service} _amazon{Service};
+ private readonly ILogger<{Service}Wrapper> _logger;
+
+ ///
+ /// Constructor for the {Service}Wrapper class.
+ ///
+ /// The injected {Service} client.
+ /// The injected logger.
+ public {Service}Wrapper(IAmazon{Service} amazon{Service}, ILogger<{Service}Wrapper> logger)
+ {
+ _amazon{Service} = amazon{Service};
+ _logger = logger;
+ }
+
+ // snippet-start:[{Service}.dotnetv4.OperationName]
+ ///
+ /// Description of what this operation does.
+ ///
+ /// Description of parameter.
+ /// Description of return value.
+ public async Task OperationAsync(string paramName)
+ {
+ try
+ {
+ var request = new OperationRequest
+ {
+ Parameter = paramName
+ };
+
+ var response = await _amazon{Service}.OperationAsync(request);
+
+ Console.WriteLine($"Successfully performed operation.");
+ return true;
+ }
+ catch (ConflictException ex)
+ {
+ _logger.LogError($"Failed to perform operation due to a conflict. {ex.Message}");
+ return false;
+ }
+ catch (ResourceNotFoundException ex)
+ {
+ _logger.LogError($"Resource not found: {ex.Message}");
+ return false;
+ }
+ catch (Exception ex)
+ {
+ _logger.LogError($"An error occurred: {ex.Message}");
+ return false;
+ }
+ }
+ // snippet-end:[{Service}.dotnetv4.OperationName]
+}
+// snippet-end:[{Service}.dotnetv4.{Service}Wrapper]
+```
+
+### Wrapper Method Guidelines
+- Return `bool` for success/failure operations
+- Return specific types for data retrieval operations
+- Log errors using injected logger
+- Display success messages to console
+- Catch specific exceptions first, then general exceptions
+- Include XML documentation for all public methods
+- Use snippet tags for documentation extraction
+
+## Error Handling
+
+### Specification-Based Error Handling
+The specification includes an "Errors" section with specific error codes and handling:
+
+```csharp
+// Example error handling based on specification
+try
+{
+ var response = await _wrapper.CreateResourceAsync();
+ return response;
+}
+catch (ConflictException ex)
+{
+ // Handle as specified: Resource already exists
+ _logger.LogError($"Failed to create resource due to a conflict. {ex.Message}");
+ return false;
+}
+catch (ResourceNotFoundException ex)
+{
+ // Handle as specified: Resource not found
+ _logger.LogError($"Resource not found: {ex.Message}");
+ return true; // May return true if deletion was the goal
+}
+catch (Exception ex)
+{
+ _logger.LogError($"An error occurred: {ex.Message}");
+ return false;
+}
+```
+
+### Workflow Error Handling
+- Wrap main workflow in try-catch block
+- Log errors and initiate cleanup on failure
+- Set `_interactive = false` to skip prompts during error cleanup
+- Ensure cleanup runs in finally block or after error
+
+## Feature Scenario Requirements
+
+### MUST HAVE
+- ✅ Read and implement based on `scenarios/features/{service_feature}/SPECIFICATION.md`
+- ✅ Use multi-project structure (Actions, Scenarios, Tests)
+- ✅ Deploy CloudFormation stack for resource creation
+- ✅ Retrieve and use stack outputs
+- ✅ Use wrapper classes for all AWS operations
+- ✅ Implement proper cleanup with stack deletion
+- ✅ Break workflow into logical phases per specification
+- ✅ Include error handling per specification
+- ✅ Support non-interactive mode for testing
+- ✅ Use file-level namespaces
+- ✅ Include snippet tags for documentation
+
+### Implementation Workflow
+
+1. **Read Specification**: Study `scenarios/features/{service_feature}/SPECIFICATION.md`
+2. **Create Project Structure**: Set up Actions, Scenarios, and Tests projects
+3. **Implement Wrapper**: Create wrapper class with all required operations
+4. **Implement Workflow**: Create workflow class with phases from specification
+5. **Add CloudFormation**: Integrate stack deployment and deletion
+6. **Add User Interaction**: Implement prompts and validation
+7. **Test**: Create integration tests for workflow methods
+8. **Document**: Add README.md with scenario description
+
+## Integration Tests
+
+### Single Integration Test Pattern
+
+Integration tests should use a single test method that verifies no errors are logged:
+
+```csharp
+///
+/// Verifies the scenario with an integration test. No errors should be logged.
+///
+/// Async task.
+[Fact]
+[Trait("Category", "Integration")]
+public async Task TestScenarioIntegration()
+{
+ // Arrange
+ {Service}Workflow._interactive = false;
+
+ var loggerScenarioMock = new Mock>();
+ loggerScenarioMock.Setup(logger => logger.Log(
+ It.Is(logLevel => logLevel == LogLevel.Error),
+ It.IsAny(),
+ It.Is((@object, @type) => true),
+ It.IsAny(),
+ It.IsAny>()));
+
+ // Act
+ {Service}Workflow._logger = loggerScenarioMock.Object;
+ {Service}Workflow._wrapper = new {Service}Wrapper(
+ new Amazon{Service}Client(),
+ new Mock>().Object);
+ {Service}Workflow._amazonCloudFormation = new AmazonCloudFormationClient();
+
+ await {Service}Workflow.RunScenario();
+
+ // Assert no errors logged
+ loggerScenarioMock.Verify(logger => logger.Log(
+ It.Is(logLevel => logLevel == LogLevel.Error),
+ It.IsAny(),
+ It.Is((@object, @type) => true),
+ It.IsAny(),
+ It.IsAny>()),
+ Times.Never);
+}
+```
+
+### RunScenario Method
+
+The workflow must include a public RunScenario method for testing:
+
+```csharp
+///
+/// Runs the scenario workflow. Used for testing.
+///
+public static async Task RunScenario()
+{
+ Console.WriteLine(new string('-', 80));
+ Console.WriteLine("Welcome to the {Service} Scenario.");
+ Console.WriteLine(new string('-', 80));
+
+ try
+ {
+ var prepareSuccess = await PrepareApplication();
+
+ if (prepareSuccess)
+ {
+ await ExecutePhase2();
+ }
+
+ await Cleanup();
+ }
+ catch (Exception ex)
+ {
+ _logger.LogError(ex, "There was a problem with the scenario, initiating cleanup...");
+ _interactive = false;
+ await Cleanup();
+ }
+
+ Console.WriteLine("Scenario completed.");
+}
+```
+
+### Specification Sections to Implement
+- **API Actions Used**: All operations must be in wrapper class
+- **Proposed example structure**: Maps to workflow phases
+- **Setup**: CloudFormation deployment and resource creation
+- **Demonstration**: Core service operations
+- **Examination**: Data analysis and display
+- **Cleanup**: Resource and stack deletion
+- **Errors**: Specific error handling strategies
\ No newline at end of file