diff --git a/README.md b/README.md index 8342e39..86376a1 100644 --- a/README.md +++ b/README.md @@ -409,6 +409,42 @@ Thread-safety for the second use case is achieved by using a ReentrantReadWriteL With all the internal synchronization measures, however, there're still certain multi-threading use cases that are not covered by this library, which might require external synchronizations or other protection measures. This is due to the fact that the execution order of APIs are not determined in async contexts. For example, if user needs to associate a given set of properties with a metric in each thread, the results are not guaranteed since the execution order of `putProperty()` is not determined across threads. In such cases, we recommend using a different MetricsLogger instance for different threads, so that no resources are shared and no thread-safety problem would ever happen. Note that this can often be simplified by using a ThreadLocal variable. +## Aggregation + +### Built in Aggregation + +There are 3 types of aggregation implemented in this library: List, Statistic Sets, and Histograms. + +- List reports all values added to a metrics as a list of values. +- Statistic Sets only reports the maximum, minimum, sum, and count of values added to a metric. +- Histograms use the Sparse Exponential Histogram Algorithm (SEH) to place each value added to a metric into a bin which keeps track of how many values have been added it. A histogram will report the bin values, the count of values in each bin, and a statistic set about the provided values. Note: SEH only accepts values greater than 0 + +There are several ways to set the aggregation type of a metric: +1. Use the `AggregationType` parameter when calling `putMetric` on `MetricsLogger` +``` +MetricsLogger logger = new MetricsLogger(); +logger.putMetric("metric", 1, AggregationType.Histogram); +``` +2. By default, `MetricsLogger` will set all metrics that are added using `putMetric` without specificying an aggregation type to use List aggregation. This default behaviour can be changed to any of the other aggregation types by using a setter (it is recommended that this be done before any metrics are added to the logger because trying to change the aggregation type of an exist log with throw an error): +``` +MetricsLogger logger = new MetricsLogger(); +logger.setDefaultAggregationType(AggregationType.StatisticSet); +``` + +### Custom Histograms + +Custom histograms can also be created if the sparse exponential histogram algorithm is not the best for the given data. To do this use the `HistogramMetric` class. + +``` +ArrayList values = Arrays.asList(1, 1234, 7, 100); +ArrayList counts = Arrays.asList(1, 2, 7, 10); + +HistogramMetric histogram = HistogramMetric(values, counts); + +MetricsLogger logger = new MetricsLogger(); +logger.setMetric("myHistogram", histogram); +``` + ## Examples Check out the [examples](https://github.com/awslabs/aws-embedded-metrics-java/tree/master/examples) directory to get started. diff --git a/src/main/java/software/amazon/cloudwatchlogs/emf/logger/MetricsLogger.java b/src/main/java/software/amazon/cloudwatchlogs/emf/logger/MetricsLogger.java index 9d29c0a..6f57e07 100644 --- a/src/main/java/software/amazon/cloudwatchlogs/emf/logger/MetricsLogger.java +++ b/src/main/java/software/amazon/cloudwatchlogs/emf/logger/MetricsLogger.java @@ -359,6 +359,7 @@ public MetricsLogger putMetric(String key, double value, AggregationType aggrega * @throws InvalidMetricException if the metric is invalid */ public MetricsLogger setMetric(String key, Metric value) throws InvalidMetricException { + rwl.readLock().lock(); try { this.context.setMetric(key, value); return this; diff --git a/src/main/java/software/amazon/cloudwatchlogs/emf/model/AggregationType.java b/src/main/java/software/amazon/cloudwatchlogs/emf/model/AggregationType.java index 9b6826d..fc62a08 100644 --- a/src/main/java/software/amazon/cloudwatchlogs/emf/model/AggregationType.java +++ b/src/main/java/software/amazon/cloudwatchlogs/emf/model/AggregationType.java @@ -19,6 +19,7 @@ public enum AggregationType { LIST(0), STATISTIC_SET(1), + HISTOGRAM(2), UNKNOWN_TO_SDK_VERSION(-1); private final int value; diff --git a/src/main/java/software/amazon/cloudwatchlogs/emf/model/Histogram.java b/src/main/java/software/amazon/cloudwatchlogs/emf/model/Histogram.java new file mode 100644 index 0000000..167f5c1 --- /dev/null +++ b/src/main/java/software/amazon/cloudwatchlogs/emf/model/Histogram.java @@ -0,0 +1,160 @@ +/* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package software.amazon.cloudwatchlogs.emf.model; + +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import software.amazon.cloudwatchlogs.emf.Constants; +import software.amazon.cloudwatchlogs.emf.exception.InvalidMetricException; + +/** Histogram metric type */ +class Histogram extends Statistics { + Histogram(List values, List counts) throws IllegalArgumentException { + if (counts.size() != values.size()) { + throw new IllegalArgumentException("Counts and values must have the same size"); + } + + if (values.stream().anyMatch(n -> n == null) || counts.stream().anyMatch(n -> n == null)) { + throw new IllegalArgumentException("Values and counts cannot contain null values"); + } + + if (!validSize(counts.size())) { + throw new IllegalArgumentException( + String.format( + "Histogram provided with %d bins but CloudWatch will drop Histograms with more than %d bins", + counts.size(), Constants.MAX_DATAPOINTS_PER_METRIC)); + } + + this.max = Collections.max(values); + this.min = Collections.min(values); + this.count = counts.stream().mapToInt(Integer::intValue).sum(); + this.sum = 0d; + for (int i = 0; i < counts.size(); i++) { + this.sum += values.get(i) * counts.get(i); + } + this.counts = counts; + this.values = values; + } + + Histogram() { + count = 0; + sum = 0.; + values = new ArrayList<>(); + counts = new ArrayList<>(); + } + + @JsonProperty("Values") + public List values; + + @JsonProperty("Counts") + public List counts; + + @JsonIgnore private boolean reduced = false; + + @JsonIgnore private static final double EPSILON = 0.1; + @JsonIgnore private static final double BIN_SIZE = Math.log(1 + EPSILON); + @JsonIgnore private final Map buckets = new HashMap<>(); + + /** + * @param value the value to add to the histogram + * @throws InvalidMetricException if adding this value would increase the number of bins in the + * histogram to more than {@value Constants#MAX_DATAPOINTS_PER_METRIC} + * @see Constants#MAX_DATAPOINTS_PER_METRIC + */ + @Override + void addValue(double value) throws InvalidMetricException { + reduced = false; + super.addValue(value); + + double bucket = getBucket(value); + if (!buckets.containsKey(bucket) && !validSize(counts.size() + 1)) { + throw new InvalidMetricException( + String.format( + "Adding this value increases the number of bins in this histogram to %d" + + ", CloudWatch will drop any Histogram metrics with more than %d bins", + buckets.size() + 1, Constants.MAX_DATAPOINTS_PER_METRIC)); + } + // Add the value to the appropriate bucket (or create a new bucket if necessary) + buckets.compute( + bucket, + (k, v) -> { + if (v == null) { + return 1; + } else { + return v + 1; + } + }); + } + + /** + * Updates the Values and Counts lists to represent the buckets of this histogram. + * + * @return the reduced histogram + */ + Histogram reduce() { + if (reduced) { + return this; + } + + this.values = new ArrayList<>(buckets.size()); + this.counts = new ArrayList<>(buckets.size()); + + for (Map.Entry entry : buckets.entrySet()) { + this.values.add(entry.getKey()); + this.counts.add(entry.getValue()); + } + + reduced = true; + return this; + } + + /** + * Gets the value of the bucket for the given value. + * + * @param value the value to find the closest bucket for + * @return the value of the bucket the given value goes in + */ + private static double getBucket(double value) { + short index = (short) Math.floor(Math.log(value) / BIN_SIZE); + return Math.exp((index + 0.5) * BIN_SIZE); + } + + private boolean validSize(int size) { + return size <= Constants.MAX_DATAPOINTS_PER_METRIC; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Histogram that = (Histogram) o; + return count == that.count + && that.sum.equals(sum) + && that.max.equals(max) + && that.min.equals(min) + && buckets.equals(that.buckets); + } + + @Override + public int hashCode() { + return super.hashCode() + buckets.hashCode(); + } +} diff --git a/src/main/java/software/amazon/cloudwatchlogs/emf/model/HistogramMetric.java b/src/main/java/software/amazon/cloudwatchlogs/emf/model/HistogramMetric.java new file mode 100644 index 0000000..83901db --- /dev/null +++ b/src/main/java/software/amazon/cloudwatchlogs/emf/model/HistogramMetric.java @@ -0,0 +1,131 @@ +/* + * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package software.amazon.cloudwatchlogs.emf.model; + +import java.util.LinkedList; +import java.util.List; +import java.util.Queue; +import software.amazon.cloudwatchlogs.emf.Constants; +import software.amazon.cloudwatchlogs.emf.exception.InvalidMetricException; + +/** Represents the Histogram of the EMF schema. */ +public class HistogramMetric extends Metric { + + HistogramMetric( + Unit unit, + StorageResolution storageResolution, + List values, + List counts) + throws IllegalArgumentException { + this(unit, storageResolution, new Histogram(values, counts)); + } + + protected HistogramMetric( + String name, Unit unit, StorageResolution storageResolution, Histogram histogram) { + this.unit = unit; + this.storageResolution = storageResolution; + this.values = histogram; + this.name = name; + } + + HistogramMetric(Unit unit, StorageResolution storageResolution, Histogram histogram) { + this.unit = unit; + this.storageResolution = storageResolution; + this.values = histogram; + } + + @Override + protected Queue> serialize() throws InvalidMetricException { + // Histograms will be rejected from CWL if they have more than + // Constants.MAX_DATAPOINTS_PER_METRIC number of bins. Unlike MetricDefinition histograms + // cannot be broken into multiple messages therefore an error is raised to let users know + // their message won't be sent otherwise only this metric will be sent + if (isOversized()) { + throw new InvalidMetricException( + String.format( + "Histogram metric, %s, has %d values which exceeds the maximum amount " + + "of bins allowed, %d, and Histograms cannot be broken into " + + "multiple metrics therefore it will not be published", + name, values.values.size(), Constants.MAX_DATAPOINTS_PER_METRIC)); + } + Queue> metrics = new LinkedList<>(); + metrics.offer(this); + return metrics; + } + + @Override + protected boolean isOversized() { + return values.values.size() > Constants.MAX_DATAPOINTS_PER_METRIC; + } + + @Override + public boolean hasValidValues() { + return values != null && values.count > 0 && !isOversized(); + } + + public static HistogramMetricBuilder builder() { + return new HistogramMetricBuilder(); + } + + public static class HistogramMetricBuilder + extends Metric.MetricBuilder { + + @Override + protected HistogramMetricBuilder getThis() { + return this; + } + + public HistogramMetricBuilder() { + this.values = new Histogram(); + } + + @Override + public Histogram getValues() { + rwl.readLock().lock(); + try { + return values.reduce(); + } finally { + rwl.readLock().unlock(); + } + } + + @Override + public HistogramMetricBuilder addValue(double value) { + rwl.readLock().lock(); + try { + values.addValue(value); + return this; + } finally { + rwl.readLock().unlock(); + } + } + + @Override + public HistogramMetric build() { + rwl.writeLock().lock(); + try { + values.reduce(); + if (name == null) { + return new HistogramMetric(unit, storageResolution, values); + } + return new HistogramMetric(name, unit, storageResolution, values); + } finally { + rwl.writeLock().unlock(); + } + } + } +} diff --git a/src/main/java/software/amazon/cloudwatchlogs/emf/model/Metric.java b/src/main/java/software/amazon/cloudwatchlogs/emf/model/Metric.java index 868f46f..3ca7606 100644 --- a/src/main/java/software/amazon/cloudwatchlogs/emf/model/Metric.java +++ b/src/main/java/software/amazon/cloudwatchlogs/emf/model/Metric.java @@ -21,7 +21,8 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import com.fasterxml.jackson.databind.annotation.JsonSerialize; -import java.util.LinkedList; +import java.util.Queue; +import java.util.concurrent.locks.ReentrantReadWriteLock; import lombok.AccessLevel; import lombok.Getter; import lombok.NonNull; @@ -52,7 +53,9 @@ public abstract class Metric { @JsonSerialize(using = StorageResolutionSerializer.class) protected StorageResolution storageResolution = StorageResolution.STANDARD; - @JsonIgnore @Getter protected V values; + @JsonIgnore + @Getter(AccessLevel.PROTECTED) + protected V values; /** @return the values of this metric formatted to be flushed */ protected Object getFormattedValues() { @@ -72,10 +75,18 @@ protected Object getFormattedValues() { * @return a list of metrics based off of the values of this metric that aren't too large for * CWL */ - protected abstract LinkedList serialize(); + protected abstract Queue> serialize(); public abstract static class MetricBuilder> extends Metric { + /** + * This lock is used to create an internal sync context for build() method in multi-threaded + * situations. build() acquires write lock, other methods (accessing mutable shared data)) + * acquires read lock. This makes sure build() is executed exclusively, while other methods + * can be executed concurrently. + */ + @JsonIgnore final ReentrantReadWriteLock rwl = new ReentrantReadWriteLock(); + protected abstract T getThis(); /** @@ -93,18 +104,33 @@ public abstract static class MetricBuilder> ext abstract Metric build(); protected T name(@NonNull String name) { - this.name = name; - return getThis(); + rwl.readLock().lock(); + try { + this.name = name; + return getThis(); + } finally { + rwl.readLock().unlock(); + } } public T unit(Unit unit) { - this.unit = unit; - return getThis(); + rwl.readLock().lock(); + try { + this.unit = unit; + return getThis(); + } finally { + rwl.readLock().unlock(); + } } public T storageResolution(StorageResolution storageResolution) { - this.storageResolution = storageResolution; - return getThis(); + rwl.readLock().lock(); + try { + this.storageResolution = storageResolution; + return getThis(); + } finally { + rwl.readLock().unlock(); + } } @Override @@ -113,7 +139,7 @@ public boolean hasValidValues() { } @Override - protected LinkedList serialize() { + protected Queue> serialize() { return build().serialize(); } diff --git a/src/main/java/software/amazon/cloudwatchlogs/emf/model/MetricDefinition.java b/src/main/java/software/amazon/cloudwatchlogs/emf/model/MetricDefinition.java index 9612562..dc996ea 100644 --- a/src/main/java/software/amazon/cloudwatchlogs/emf/model/MetricDefinition.java +++ b/src/main/java/software/amazon/cloudwatchlogs/emf/model/MetricDefinition.java @@ -19,6 +19,7 @@ import java.util.ArrayList; import java.util.LinkedList; import java.util.List; +import java.util.Queue; import lombok.NonNull; import software.amazon.cloudwatchlogs.emf.Constants; @@ -43,8 +44,8 @@ private MetricDefinition( } @Override - protected LinkedList serialize() { - LinkedList metrics = new LinkedList<>(); + protected Queue>> serialize() { + Queue>> metrics = new LinkedList<>(); MetricDefinition metric = this; while (metric != null) { metrics.add(metric.getFirstMetricBatch(Constants.MAX_DATAPOINTS_PER_METRIC)); @@ -117,21 +118,36 @@ public MetricDefinitionBuilder() { @Override public MetricDefinitionBuilder addValue(double value) { - this.values.add(value); - return this; + rwl.readLock().lock(); + try { + this.values.add(value); + return this; + } finally { + rwl.readLock().unlock(); + } } public MetricDefinitionBuilder values(@NonNull List values) { - this.values = values; - return this; + rwl.readLock().lock(); + try { + this.values = values; + return this; + } finally { + rwl.readLock().unlock(); + } } @Override public MetricDefinition build() { - if (name == null) { - return new MetricDefinition(unit, storageResolution, values); + rwl.writeLock().lock(); + try { + if (name == null) { + return new MetricDefinition(unit, storageResolution, values); + } + return new MetricDefinition(name, unit, storageResolution, values); + } finally { + rwl.writeLock().unlock(); } - return new MetricDefinition(name, unit, storageResolution, values); } } } diff --git a/src/main/java/software/amazon/cloudwatchlogs/emf/model/MetricDirective.java b/src/main/java/software/amazon/cloudwatchlogs/emf/model/MetricDirective.java index f8951a0..fd49cfb 100644 --- a/src/main/java/software/amazon/cloudwatchlogs/emf/model/MetricDirective.java +++ b/src/main/java/software/amazon/cloudwatchlogs/emf/model/MetricDirective.java @@ -18,10 +18,20 @@ import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.stream.Collectors; -import lombok.*; +import lombok.AccessLevel; +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.Setter; +import lombok.With; import software.amazon.cloudwatchlogs.emf.exception.DimensionSetExceededException; import software.amazon.cloudwatchlogs.emf.exception.InvalidMetricException; @@ -33,7 +43,7 @@ class MetricDirective { @JsonProperty("Namespace") private String namespace; - @JsonIgnore @Setter @Getter @With private Map metrics; + @JsonIgnore @Setter @Getter @With private Map> metrics; @JsonIgnore @Getter(AccessLevel.PROTECTED) @@ -97,6 +107,9 @@ void putMetric( case STATISTIC_SET: builder = StatisticSet.builder(); break; + case HISTOGRAM: + builder = HistogramMetric.builder(); + break; case LIST: default: builder = MetricDefinition.builder(); @@ -124,13 +137,13 @@ void putMetric( * @param key the name of the metric * @param value the value of the metric */ - void setMetric(String key, Metric value) { + void setMetric(String key, Metric value) { value.setName(key); metrics.put(key, value); } @JsonProperty("Metrics") - Collection getAllMetrics() { + Collection> getAllMetrics() { return metrics.values(); } diff --git a/src/main/java/software/amazon/cloudwatchlogs/emf/model/MetricsContext.java b/src/main/java/software/amazon/cloudwatchlogs/emf/model/MetricsContext.java index 53a3154..a12b358 100644 --- a/src/main/java/software/amazon/cloudwatchlogs/emf/model/MetricsContext.java +++ b/src/main/java/software/amazon/cloudwatchlogs/emf/model/MetricsContext.java @@ -432,15 +432,15 @@ public MetricsContext createCopyWithContext(boolean preserveDimensions) { * @return the serialized strings. * @throws JsonProcessingException if there's any object that cannot be serialized */ - public List serialize() throws JsonProcessingException { + public List serialize() throws JsonProcessingException, InvalidMetricException { if (rootNode.metrics().size() <= Constants.MAX_METRICS_PER_EVENT && !anyMetricWithTooManyDataPoints(rootNode)) { return Arrays.asList(this.rootNode.serialize()); } else { List nodes = new ArrayList<>(); - Map metrics = new HashMap<>(); - ArrayList> remainingMetrics = new ArrayList<>(); - PriorityQueue> metricQueue = + Map> metrics = new HashMap<>(); + ArrayList>> remainingMetrics = new ArrayList<>(); + PriorityQueue>> metricQueue = new PriorityQueue<>((x, y) -> Integer.compare(x.size(), y.size())); for (Metric metric : rootNode.metrics().values()) { @@ -458,7 +458,7 @@ public List serialize() throws JsonProcessingException { remainingMetrics.clear(); } - Queue serializedMetrics = metricQueue.poll(); + Queue> serializedMetrics = metricQueue.poll(); Metric firstBatch = serializedMetrics.poll(); metrics.put(firstBatch.getName(), firstBatch); @@ -479,7 +479,7 @@ public List serialize() throws JsonProcessingException { } } - private RootNode buildRootNode(Map metrics) { + private RootNode buildRootNode(Map> metrics) { Metadata metadata = rootNode.getAws(); MetricDirective md = metadata.getCloudWatchMetrics().get(0); Metadata clonedMetadata = diff --git a/src/main/java/software/amazon/cloudwatchlogs/emf/model/RootNode.java b/src/main/java/software/amazon/cloudwatchlogs/emf/model/RootNode.java index 1de12aa..d4a4a73 100644 --- a/src/main/java/software/amazon/cloudwatchlogs/emf/model/RootNode.java +++ b/src/main/java/software/amazon/cloudwatchlogs/emf/model/RootNode.java @@ -83,7 +83,7 @@ Map getDimensions() throws DimensionSetExceededException { return dimensions; } - Map metrics() { + Map> metrics() { return aws.getCloudWatchMetrics().get(0).getMetrics(); } diff --git a/src/main/java/software/amazon/cloudwatchlogs/emf/model/StatisticSet.java b/src/main/java/software/amazon/cloudwatchlogs/emf/model/StatisticSet.java index b5b4699..d6231c2 100644 --- a/src/main/java/software/amazon/cloudwatchlogs/emf/model/StatisticSet.java +++ b/src/main/java/software/amazon/cloudwatchlogs/emf/model/StatisticSet.java @@ -17,6 +17,7 @@ package software.amazon.cloudwatchlogs.emf.model; import java.util.LinkedList; +import java.util.Queue; import lombok.NonNull; import software.amazon.cloudwatchlogs.emf.exception.InvalidMetricException; @@ -29,7 +30,8 @@ public class StatisticSet extends Metric { double max, double min, int count, - double sum) { + double sum) + throws IllegalArgumentException { this(unit, storageResolution, new Statistics(max, min, count, sum)); } @@ -51,10 +53,10 @@ protected StatisticSet( } @Override - protected LinkedList serialize() throws InvalidMetricException { + protected Queue> serialize() throws InvalidMetricException { // A statistic set is a complete metric that cannot be broken into smaller pieces therefore // this metric will be the only one in the returned list - LinkedList queue = new LinkedList<>(); + Queue> queue = new LinkedList<>(); queue.add(this); return queue; @@ -87,21 +89,36 @@ public StatisticSetBuilder() { @Override public StatisticSetBuilder addValue(double value) { - this.values.addValue(value); - return this; + rwl.readLock().lock(); + try { + this.values.addValue(value); + return this; + } finally { + rwl.readLock().unlock(); + } } - public StatisticSetBuilder values(@NonNull Statistics values) { - this.values = values; - return this; + StatisticSetBuilder values(@NonNull Statistics values) { + rwl.readLock().lock(); + try { + this.values = values; + return this; + } finally { + rwl.readLock().unlock(); + } } @Override public StatisticSet build() { - if (name == null) { - return new StatisticSet(unit, storageResolution, values); + rwl.writeLock().lock(); + try { + if (name == null) { + return new StatisticSet(unit, storageResolution, values); + } + return new StatisticSet(name, unit, storageResolution, values); + } finally { + rwl.writeLock().unlock(); } - return new StatisticSet(name, unit, storageResolution, values); } } } diff --git a/src/main/java/software/amazon/cloudwatchlogs/emf/model/Statistics.java b/src/main/java/software/amazon/cloudwatchlogs/emf/model/Statistics.java index dbf3d5e..2fa30b1 100644 --- a/src/main/java/software/amazon/cloudwatchlogs/emf/model/Statistics.java +++ b/src/main/java/software/amazon/cloudwatchlogs/emf/model/Statistics.java @@ -19,7 +19,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; class Statistics { - Statistics(double max, double min, int count, double sum) { + Statistics(double max, double min, int count, double sum) throws IllegalArgumentException { this.max = max; this.min = min; this.count = count; @@ -47,10 +47,6 @@ class Statistics { @JsonProperty("Sum") public Double sum; - int size() { - return 4; - } - void addValue(double value) { count++; sum += value; @@ -87,4 +83,9 @@ public boolean equals(Object o) { && that.max.equals(max) && that.min.equals(min); } + + @Override + public int hashCode() { + return count + sum.hashCode() + max.hashCode() + min.hashCode(); + } } diff --git a/src/test/java/software/amazon/cloudwatchlogs/emf/model/HistogramMetricTest.java b/src/test/java/software/amazon/cloudwatchlogs/emf/model/HistogramMetricTest.java new file mode 100644 index 0000000..395ef0a --- /dev/null +++ b/src/test/java/software/amazon/cloudwatchlogs/emf/model/HistogramMetricTest.java @@ -0,0 +1,164 @@ +/* + * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package software.amazon.cloudwatchlogs.emf.model; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertThrows; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import java.util.Arrays; +import org.junit.jupiter.api.Test; + +public class HistogramMetricTest { + @Test + public void testSerializeHistogramMetricWithoutUnitWithHighStorageResolution() + throws JsonProcessingException { + ObjectMapper objectMapper = new ObjectMapper(); + + HistogramMetric histogramMetric = + HistogramMetric.builder() + .storageResolution(StorageResolution.HIGH) + .addValue(10) + .name("Time") + .build(); + String metricString = objectMapper.writeValueAsString(histogramMetric); + + assertEquals("{\"Name\":\"Time\",\"Unit\":\"None\",\"StorageResolution\":1}", metricString); + } + + @Test + public void testSerializeHistogramMetricWithUnitWithoutStorageResolution() + throws JsonProcessingException { + ObjectMapper objectMapper = new ObjectMapper(); + HistogramMetric histogramMetric = + HistogramMetric.builder().unit(Unit.MILLISECONDS).name("Time").addValue(10).build(); + String metricString = objectMapper.writeValueAsString(histogramMetric); + + assertEquals("{\"Name\":\"Time\",\"Unit\":\"Milliseconds\"}", metricString); + } + + @Test + public void testSerializeHistogramMetricWithoutUnitWithStandardStorageResolution() + throws JsonProcessingException { + ObjectMapper objectMapper = new ObjectMapper(); + + HistogramMetric histogramMetric = + HistogramMetric.builder() + .storageResolution(StorageResolution.STANDARD) + .name("Time") + .addValue(10) + .build(); + String metricString = objectMapper.writeValueAsString(histogramMetric); + + assertEquals("{\"Name\":\"Time\",\"Unit\":\"None\"}", metricString); + } + + @Test + public void testSerializeHistogramMetricWithoutUnit() throws JsonProcessingException { + ObjectMapper objectMapper = new ObjectMapper(); + HistogramMetric histogramMetric = HistogramMetric.builder().name("Time").build(); + String metricString = objectMapper.writeValueAsString(histogramMetric); + + assertEquals("{\"Name\":\"Time\",\"Unit\":\"None\"}", metricString); + } + + @Test + public void testSerializeHistogramMetric() throws JsonProcessingException { + ObjectMapper objectMapper = new ObjectMapper(); + HistogramMetric histogramMetric = + HistogramMetric.builder() + .unit(Unit.MILLISECONDS) + .storageResolution(StorageResolution.HIGH) + .name("Time") + .addValue(10) + .build(); + String metricString = objectMapper.writeValueAsString(histogramMetric); + + assertEquals( + "{\"Name\":\"Time\",\"Unit\":\"Milliseconds\",\"StorageResolution\":1}", + metricString); + } + + @Test + public void testAddValues() { + HistogramMetric.HistogramMetricBuilder builder = HistogramMetric.builder(); + builder.addValue(10); + + assertEquals(1, builder.getValues().count); + assertEquals(10d, builder.getValues().max, 1e-5); + assertEquals(10d, builder.getValues().min, 1e-5); + assertEquals(10d, builder.getValues().sum, 1e-5); + assertEquals(1, builder.getValues().values.size()); + assertEquals(1, builder.getValues().counts.size()); + + builder.addValue(200); + assertEquals(2, builder.getValues().count); + assertEquals(200d, builder.getValues().max, 1e-5); + assertEquals(10d, builder.getValues().min, 1e-5); + assertEquals(210d, builder.getValues().sum, 1e-5); + assertEquals(2, builder.getValues().values.size()); + assertEquals(2, builder.getValues().counts.size()); + } + + @Test + public void testManyAddValues() { + HistogramMetric.HistogramMetricBuilder histBuilder = HistogramMetric.builder(); + for (int i = 1; i < 100; i++) { + histBuilder.addValue(i); + } + histBuilder.build(); + } + + @Test + public void testBuildBuilder() { + HistogramMetric histogramMetric = HistogramMetric.builder().addValue(10).build(); + assertEquals(histogramMetric.getValues(), histogramMetric.getValues()); + + assertEquals(histogramMetric.name, null); + histogramMetric.setName("test"); + assertEquals(histogramMetric.name, "test"); + } + + @Test + public void testCreateImmutableHistogramMetric() { + HistogramMetric histogram = + new HistogramMetric( + Unit.NONE, + StorageResolution.STANDARD, + Arrays.asList(10., 20., 30.), + Arrays.asList(1, 2, 3)); + assertEquals(6, histogram.getValues().count); + assertEquals(30d, histogram.getValues().max, 1e-5); + assertEquals(10d, histogram.getValues().min, 1e-5); + assertEquals(140d, histogram.getValues().sum, 1e-5); + assertEquals(3, histogram.getValues().values.size()); + assertEquals(3, histogram.getValues().counts.size()); + } + + @Test + public void testImpossibleHistogramMetric() { + assertThrows( + IllegalArgumentException.class, + () -> + new HistogramMetric( + Unit.NONE, + StorageResolution.STANDARD, + Arrays.asList(10., 20., 30.), + Arrays.asList(10, 20))); // Array Size mismatch + } +} diff --git a/src/test/java/software/amazon/cloudwatchlogs/emf/model/MetricDirectiveTest.java b/src/test/java/software/amazon/cloudwatchlogs/emf/model/MetricDirectiveTest.java index ed67224..db21554 100644 --- a/src/test/java/software/amazon/cloudwatchlogs/emf/model/MetricDirectiveTest.java +++ b/src/test/java/software/amazon/cloudwatchlogs/emf/model/MetricDirectiveTest.java @@ -21,8 +21,8 @@ import com.fasterxml.jackson.databind.ObjectMapper; import java.util.Arrays; import java.util.Collections; -import org.junit.Test; import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; import software.amazon.cloudwatchlogs.emf.exception.DimensionSetExceededException; import software.amazon.cloudwatchlogs.emf.exception.InvalidDimensionException; import software.amazon.cloudwatchlogs.emf.exception.InvalidMetricException; @@ -312,4 +312,52 @@ void testSetMetricStatisticSetThenPutMetric() metricDirective.putMetric("Metric", 1.); }); } + + @Test + void testSetMetricHistogramReplacement() + throws JsonProcessingException, InvalidDimensionException, + DimensionSetExceededException { + MetricDirective metricDirective = new MetricDirective(); + metricDirective.putMetric("Metric", 10.); + Assertions.assertEquals( + MetricDefinition.MetricDefinitionBuilder.class, + metricDirective.getMetrics().get("Metric").getClass()); + + HistogramMetric hist1 = HistogramMetric.builder().addValue(7.).addValue(2.5).build(); + + metricDirective.setMetric("Metric", hist1); + Assertions.assertEquals( + HistogramMetric.class, metricDirective.getMetrics().get("Metric").getClass()); + Assertions.assertEquals( + 2, ((Histogram) metricDirective.getMetrics().get("Metric").getValues()).count); + + HistogramMetric hist2 = HistogramMetric.builder().addValue(7.).build(); + + metricDirective.setMetric("Metric", hist2); + Assertions.assertEquals( + HistogramMetric.class, metricDirective.getMetrics().get("Metric").getClass()); + Assertions.assertEquals( + 1, ((Histogram) metricDirective.getMetrics().get("Metric").getValues()).count); + } + + @Test + void testSetMetricHistogramThenPutMetric() + throws JsonProcessingException, InvalidDimensionException, + DimensionSetExceededException { + MetricDirective metricDirective = new MetricDirective(); + + HistogramMetric hist1 = HistogramMetric.builder().addValue(7.).build(); + + metricDirective.setMetric("Metric", hist1); + Assertions.assertEquals( + HistogramMetric.class, metricDirective.getMetrics().get("Metric").getClass()); + Assertions.assertEquals( + 1, ((Histogram) metricDirective.getMetrics().get("Metric").getValues()).count); + + Assertions.assertThrows( + InvalidMetricException.class, + () -> { + metricDirective.putMetric("Metric", 1.); + }); + } } diff --git a/src/test/java/software/amazon/cloudwatchlogs/emf/model/MetricsContextTest.java b/src/test/java/software/amazon/cloudwatchlogs/emf/model/MetricsContextTest.java index e59a181..0ed2f5a 100644 --- a/src/test/java/software/amazon/cloudwatchlogs/emf/model/MetricsContextTest.java +++ b/src/test/java/software/amazon/cloudwatchlogs/emf/model/MetricsContextTest.java @@ -170,6 +170,27 @@ void testSerializeStatisticSetMetric() throws JsonProcessingException { new Statistics(99., 0., 100, 4950.), statisticSets.get(0).getValues()); } + @Test + void testSerializeHistogramMetric() throws JsonProcessingException { + MetricsContext mc = new MetricsContext(); + int dataPointCount = 100; + String metricName = "metric1"; + for (int i = 0; i < dataPointCount; i++) { + mc.putMetric(metricName, i, AggregationType.HISTOGRAM); + } + + List events = mc.serialize(); + List hists = parseHistogramMetrics(events.get(0)); + Assertions.assertEquals(1, hists.size()); + Histogram hist = hists.get(0).getValues(); + Assertions.assertEquals(100, hist.count); + Assertions.assertEquals(4950, hist.sum, 1e-5); + Assertions.assertEquals(99, hist.max, 1e-5); + Assertions.assertEquals(0., hist.min, 1e-5); + Assertions.assertEquals(34, hist.values.size()); + Assertions.assertEquals(34, hist.counts.size()); + } + @Test void testSetInvalidMetric() { MetricsContext mc = new MetricsContext(); @@ -179,6 +200,12 @@ void testSetInvalidMetric() { mc.setMetric("Metric", StatisticSet.builder().build()); }); + Assertions.assertThrows( + InvalidMetricException.class, + () -> { + mc.setMetric("Metric", HistogramMetric.builder().build()); + }); + Assertions.assertThrows( InvalidMetricException.class, () -> { @@ -290,6 +317,34 @@ private ArrayList parseStatisticSetMetrics(String event) return statisticSets; } + @SuppressWarnings("unchecked") + private ArrayList parseHistogramMetrics(String event) + throws JsonProcessingException { + Map rootNode = parseRootNode(event); + Map metadata = (Map) rootNode.get("_aws"); + ArrayList> metricDirectives = + (ArrayList>) metadata.get("CloudWatchMetrics"); + ArrayList> metrics = + (ArrayList>) metricDirectives.get(0).get("Metrics"); + + ArrayList historgrams = new ArrayList<>(); + for (Map metric : metrics) { + String name = metric.get("Name"); + Unit unit = Unit.fromValue(metric.get("Unit")); + Map value = (Map) rootNode.get(name); + Histogram hist = + new Histogram( + (List) value.get("Values"), + (List) value.get("Counts")); + hist.sum = (Double) value.get("Sum"); + hist.count = (Integer) value.get("Count"); + hist.max = (Double) value.get("Max"); + hist.min = (Double) value.get("Min"); + historgrams.add(new HistogramMetric(name, unit, StorageResolution.STANDARD, hist)); + } + return historgrams; + } + private Map parseRootNode(String event) throws JsonProcessingException { return new JsonMapper().readValue(event, new TypeReference>() {}); }