diff --git a/pom.xml b/pom.xml index f57c4e09..bf7c01df 100644 --- a/pom.xml +++ b/pom.xml @@ -357,6 +357,7 @@ ${project.build.directory}/jacoco.exec + ${skipTests} BUNDLE diff --git a/uat/testing-features/src/main/java/com/aws/greengrass/steps/CloudWatchSteps.java b/uat/testing-features/src/main/java/com/aws/greengrass/steps/CloudWatchSteps.java index 3f34988e..e21957e7 100644 --- a/uat/testing-features/src/main/java/com/aws/greengrass/steps/CloudWatchSteps.java +++ b/uat/testing-features/src/main/java/com/aws/greengrass/steps/CloudWatchSteps.java @@ -19,13 +19,15 @@ import software.amazon.awssdk.core.pagination.sync.SdkIterable; import software.amazon.awssdk.services.cloudwatchlogs.CloudWatchLogsClient; import software.amazon.awssdk.services.cloudwatchlogs.model.GetLogEventsRequest; -import software.amazon.awssdk.services.cloudwatchlogs.model.GetLogEventsResponse; import software.amazon.awssdk.services.cloudwatchlogs.model.LogStream; import software.amazon.awssdk.services.cloudwatchlogs.model.OutputLogEvent; import software.amazon.awssdk.services.cloudwatchlogs.model.ResourceNotFoundException; import software.amazon.awssdk.services.cloudwatchlogs.model.ServiceUnavailableException; +import software.amazon.awssdk.services.cloudwatchlogs.paginators.GetLogEventsIterable; +import java.nio.charset.StandardCharsets; import java.text.SimpleDateFormat; +import java.time.Instant; import java.util.ArrayList; import java.util.Comparator; import java.util.Date; @@ -33,6 +35,9 @@ import java.util.Locale; import java.util.TimeZone; import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; + +import static org.junit.jupiter.api.Assertions.assertTrue; @ScenarioScoped @@ -137,31 +142,61 @@ public void verifyLogs(int numberOfLogLines, String componentName, String compon } } + @Then("I verify that logs for {word} of type {word} uploaded with a rate greater than {double} MBps") + public void verifyLogRate(String componentName, String componentType, double desiredMBps) throws + Exception { + // Logs written by the log generator append a sequence number per log line along with the component name + GetLogEventsRequest request = GetLogEventsRequest.builder() + .logGroupName(getLogGroupName(componentType, componentName)) + .logStreamName(getLogStreamName()) + .startFromHead(true) + .endTime(Instant.now().toEpochMilli()) + .limit(10_000) // limit of 10000 logs + .build(); + GetLogEventsIterable response = cwClient.getLogEventsPaginator(request); + List events = response.events().stream().collect(Collectors.toCollection(ArrayList::new)); + events.sort(Comparator.comparingLong(OutputLogEvent::ingestionTime)); + + OutputLogEvent first = events.get(0); + OutputLogEvent last = events.get(events.size() - 1); + + long totalSizeBytes = events.stream().mapToLong(i -> i.message().getBytes(StandardCharsets.UTF_8).length).sum(); + long ingestionTimeSpanMs = last.ingestionTime() - first.ingestionTime(); + + double bytesPerSecond = totalSizeBytes / (ingestionTimeSpanMs / 1000.0); + + double gotMBps = bytesPerSecond / (1024.0 * 1024.0); + LOGGER.info("Found {} logs uploaded at an average rate of {} MBps", events.size(), gotMBps); + assertTrue(gotMBps >= desiredMBps, + String.format("Got %f MBps uploaded to CloudWatch, but wanted at least %f", gotMBps, desiredMBps)); + } + private boolean haveAllLogsBeenUploaded(int numberOfLogLines, String componentName, String componentType) { // Logs written by the log generator append a sequence number per log line along with the component name GetLogEventsRequest request = GetLogEventsRequest.builder() .logGroupName(getLogGroupName(componentType, componentName)) .logStreamName(getLogStreamName()) - .limit(numberOfLogLines) // limit of 10000 logs (this method could be optimized + .startFromHead(true) + .endTime(Instant.now().toEpochMilli()) + .limit(10_000) // limit of 10000 logs .build(); try { // The OTF watch steps check evey 100ms this to avoids hammering the api. Ideally OTF // can allow us to configure the check interval rate TimeUnit.SECONDS.sleep(5L); - GetLogEventsResponse response = cwClient.getLogEvents(request); - List events = response.events(); + GetLogEventsIterable response = cwClient.getLogEventsPaginator(request); + List events = response.events().stream().collect(Collectors.toCollection(ArrayList::new)); if (events.size() != numberOfLogLines) { this.lastReceivedCloudWatchEvents = events; + LOGGER.info("Found {} events, not the expected {}", events.size(), numberOfLogLines); return false; } - // events is an unmodifiable list - List copy = new ArrayList<>(events); - copy.sort(Comparator.comparingLong(OutputLogEvent::timestamp)); - return wereThereDuplicatesOrMisses(numberOfLogLines, componentName, copy); + events.sort(Comparator.comparingLong(OutputLogEvent::timestamp)); + return wereThereDuplicatesOrMisses(numberOfLogLines, componentName, events); } catch (ServiceUnavailableException | InterruptedException e) { return false; } diff --git a/uat/testing-features/src/main/resources/greengrass/features/log-manager-1.feature b/uat/testing-features/src/main/resources/greengrass/features/log-manager-1.feature index 7cf5ccc7..b0d3230e 100644 --- a/uat/testing-features/src/main/resources/greengrass/features/log-manager-1.feature +++ b/uat/testing-features/src/main/resources/greengrass/features/log-manager-1.feature @@ -281,4 +281,47 @@ Feature: Greengrass V2 LogManager And the local Greengrass deployment is SUCCEEDED on the device after 180 seconds Then I verify that it created a log group of type GreengrassSystemComponent for component System, with streams within 300 seconds in CloudWatch And I verify that it created a log group of type UserComponent for component UserComponentW, with streams within 300 seconds in CloudWatch - And I verify 10000 logs for UserComponentW of type UserComponent have been uploaded to Cloudwatch within 120 seconds \ No newline at end of file + And I verify 10000 logs for UserComponentW of type UserComponent have been uploaded to Cloudwatch within 120 seconds + + Scenario: LogManager-1-T6: Log manager can upload 200,000 logs quickly + Given I create a log directory for component called UserComponentWLogDirectory + And I create a Greengrass deployment with components + | aws.greengrass.Cli | LATEST | + | aws.greengrass.LogManager | classpath:/greengrass/recipes/recipe.yaml | + When I update my Greengrass deployment configuration, setting the component aws.greengrass.LogManager configuration to: + """ + { + "MERGE": { + "logsUploaderConfiguration": { + "componentLogsConfiguration": [ + { + "logFileRegex": "UserComponentW(.*).log", + "logFileDirectoryPath": "${UserComponentWLogDirectory}", + "componentName": "UserComponentW" + } + ] + }, + "periodicUploadIntervalSec": "0.001", + "deprecatedVersionSupport": "false" + } + } + """ + And I deploy the Greengrass deployment configuration + And the Greengrass deployment is COMPLETED on the device after 3 minutes + And I install the component LogGenerator from local store with configuration + """ + { + "MERGE":{ + "LogFileName": "UserComponentW", + "WriteFrequencyMs": "0", + "LogsDirectory": "${UserComponentWLogDirectory}", + "NumberOfLogLines": "200000" + } + } + """ + And the local Greengrass deployment is SUCCEEDED on the device after 180 seconds + And I verify that it created a log group of type UserComponent for component UserComponentW, with streams within 300 seconds in CloudWatch + And I verify 200000 logs for UserComponentW of type UserComponent have been uploaded to Cloudwatch within 120 seconds + # Can achieve ~1.4MBps running on codebuild, presumably due to high upload speed and low ping time. + # This test may fail when run on your own computer as your ping time to CloudWatch will be higher. + And I verify that logs for UserComponentW of type UserComponent uploaded with a rate greater than 1.0 MBps