Changes to match the new style guidelines

This commit is contained in:
Craig Bryant 2014-07-07 16:19:53 -06:00
parent 797c60f567
commit 44851750e6
50 changed files with 3148 additions and 2716 deletions

View File

@ -14,6 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon;
public class EventSpoutConfig extends KafkaSpoutConfig {

View File

@ -1,9 +1,27 @@
/*
* Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon;
import java.io.Serializable;
import com.hpcloud.configuration.KafkaConsumerConfiguration;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.hpcloud.configuration.KafkaConsumerConfiguration;
import java.io.Serializable;
public class KafkaSpoutConfig implements Serializable {

View File

@ -14,6 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon;
public class MetricSpoutConfig extends KafkaSpoutConfig {

View File

@ -14,19 +14,19 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon;
import com.hpcloud.configuration.KafkaProducerConfiguration;
import com.hpcloud.mon.infrastructure.thresholding.DataSourceFactory;
import org.hibernate.validator.constraints.NotEmpty;
import java.util.Set;
import javax.validation.Valid;
import javax.validation.constraints.NotNull;
import org.hibernate.validator.constraints.NotEmpty;
import com.hpcloud.mon.infrastructure.thresholding.DataSourceFactory;
/**
* Thresholding configuration.
*/

View File

@ -14,16 +14,20 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon;
import ch.qos.logback.classic.LoggerContext;
import ch.qos.logback.core.util.StatusPrinter;
import com.hpcloud.util.Injector;
import com.hpcloud.util.config.ConfigurationFactory;
import backtype.storm.Config;
import backtype.storm.LocalCluster;
import backtype.storm.StormSubmitter;
import backtype.storm.generated.StormTopology;
import ch.qos.logback.classic.LoggerContext;
import ch.qos.logback.core.util.StatusPrinter;
import com.hpcloud.util.Injector;
import com.hpcloud.util.config.ConfigurationFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -33,7 +37,7 @@ import java.io.File;
* Alarm thresholding engine.
*/
public class ThresholdingEngine {
private static final Logger LOG = LoggerFactory.getLogger(ThresholdingEngine.class);
private static final Logger logger = LoggerFactory.getLogger(ThresholdingEngine.class);
private final ThresholdingConfiguration threshConfig;
private final String topologyName;
@ -44,13 +48,13 @@ public class ThresholdingEngine {
this.threshConfig = threshConfig;
this.topologyName = topologyName;
this.local = local;
LOG.info("local set to {}", local);
logger.info("local set to {}", local);
}
public static final ThresholdingConfiguration configFor(String configFileName) throws Exception {
return ConfigurationFactory.<ThresholdingConfiguration>forClass(ThresholdingConfiguration.class)
.build(new File(configFileName));
return ConfigurationFactory
.<ThresholdingConfiguration>forClass(ThresholdingConfiguration.class).build(
new File(configFileName));
}
public static void main(String... args) throws Exception {
@ -59,15 +63,15 @@ public class ThresholdingEngine {
StatusPrinter.print((LoggerContext) LoggerFactory.getILoggerFactory());
if (args.length < 2) {
LOG.error("Expected configuration file name and topology name arguments");
logger.error("Expected configuration file name and topology name arguments");
System.exit(1);
}
LOG.info("Instantiating ThresholdingEngine with config file: {}, topology: {}",
args[0], args[1]);
logger.info("Instantiating ThresholdingEngine with config file: {}, topology: {}", args[0],
args[1]);
ThresholdingEngine engine = new ThresholdingEngine(configFor(args[0]), args[1],
args.length > 2 ? true : false);
ThresholdingEngine engine =
new ThresholdingEngine(configFor(args[0]), args[1], args.length > 2 ? true : false);
engine.configure();
engine.run();
}
@ -82,10 +86,10 @@ public class ThresholdingEngine {
config.registerSerialization(com.hpcloud.mon.domain.model.SubAlarm.class);
if (local) {
LOG.info("submitting topology {} to local storm cluster", topologyName);
logger.info("submitting topology {} to local storm cluster", topologyName);
new LocalCluster().submitTopology(topologyName, config, topology);
} else {
LOG.info("submitting topology {} to non-local storm cluster", topologyName);
logger.info("submitting topology {} to non-local storm cluster", topologyName);
StormSubmitter.submitTopology(topologyName, config, topology);
}
}

View File

@ -14,9 +14,17 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon;
import javax.inject.Named;
import com.hpcloud.mon.infrastructure.thresholding.AlarmThresholdingBolt;
import com.hpcloud.mon.infrastructure.thresholding.EventProcessingBolt;
import com.hpcloud.mon.infrastructure.thresholding.EventSpout;
import com.hpcloud.mon.infrastructure.thresholding.MetricAggregationBolt;
import com.hpcloud.mon.infrastructure.thresholding.MetricFilteringBolt;
import com.hpcloud.mon.infrastructure.thresholding.MetricSpout;
import com.hpcloud.mon.infrastructure.thresholding.deserializer.EventDeserializer;
import com.hpcloud.util.Injector;
import backtype.storm.Config;
import backtype.storm.generated.StormTopology;
@ -26,14 +34,8 @@ import backtype.storm.tuple.Fields;
import com.google.inject.AbstractModule;
import com.google.inject.Provides;
import com.hpcloud.mon.infrastructure.thresholding.AlarmThresholdingBolt;
import com.hpcloud.mon.infrastructure.thresholding.EventProcessingBolt;
import com.hpcloud.mon.infrastructure.thresholding.EventSpout;
import com.hpcloud.mon.infrastructure.thresholding.MetricAggregationBolt;
import com.hpcloud.mon.infrastructure.thresholding.MetricFilteringBolt;
import com.hpcloud.mon.infrastructure.thresholding.MetricSpout;
import com.hpcloud.mon.infrastructure.thresholding.deserializer.EventDeserializer;
import com.hpcloud.util.Injector;
import javax.inject.Named;
/**
* Configures types for the thresholding topology.
@ -57,8 +59,7 @@ public class TopologyModule extends AbstractModule {
}
@Override
protected void configure() {
}
protected void configure() {}
@Provides
Config stormConfig() {
@ -100,19 +101,19 @@ public class TopologyModule extends AbstractModule {
// MaaS Event -> Events
builder.setBolt("event-bolt", new EventProcessingBolt(), config.eventBoltThreads)
.shuffleGrouping("event-spout")
.setNumTasks(config.eventBoltTasks);
.shuffleGrouping("event-spout").setNumTasks(config.eventBoltTasks);
// Metrics / Event -> Filtering
builder.setBolt("filtering-bolt", new MetricFilteringBolt(config.database),
config.filteringBoltThreads)
.shuffleGrouping("metrics-spout")
builder
.setBolt("filtering-bolt", new MetricFilteringBolt(config.database),
config.filteringBoltThreads).shuffleGrouping("metrics-spout")
.allGrouping("event-bolt", EventProcessingBolt.METRIC_SUB_ALARM_EVENT_STREAM_ID)
.allGrouping("event-bolt", EventProcessingBolt.METRIC_ALARM_EVENT_STREAM_ID)
.setNumTasks(config.filteringBoltTasks);
// Filtering / Event -> Aggregation
builder.setBolt("aggregation-bolt",
builder
.setBolt("aggregation-bolt",
new MetricAggregationBolt(config.database, config.sporadicMetricNamespaces),
config.aggregationBoltThreads)
.fieldsGrouping("filtering-bolt", new Fields(MetricFilteringBolt.FIELDS[0]))
@ -124,7 +125,8 @@ public class TopologyModule extends AbstractModule {
.setNumTasks(config.aggregationBoltTasks);
// Aggregation / Event -> Thresholding
builder.setBolt("thresholding-bolt",
builder
.setBolt("thresholding-bolt",
new AlarmThresholdingBolt(config.database, config.kafkaProducerConfig),
config.thresholdingBoltThreads)
.fieldsGrouping("aggregation-bolt", new Fields(MetricAggregationBolt.FIELDS[0]))

View File

@ -14,19 +14,20 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.domain.model;
import com.hpcloud.mon.common.model.alarm.AlarmExpression;
import com.hpcloud.mon.common.model.alarm.AlarmState;
import com.hpcloud.mon.common.model.alarm.AlarmSubExpression;
import com.hpcloud.mon.domain.common.AbstractEntity;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import com.hpcloud.mon.common.model.alarm.AlarmExpression;
import com.hpcloud.mon.common.model.alarm.AlarmState;
import com.hpcloud.mon.common.model.alarm.AlarmSubExpression;
import com.hpcloud.mon.domain.common.AbstractEntity;
/**
* An alarm comprised of sub-alarms.
*/
@ -43,8 +44,8 @@ public class Alarm extends AbstractEntity {
public Alarm() {
}
public Alarm(String id, String tenantId, String name, String description, AlarmExpression expression,
List<SubAlarm> subAlarms, AlarmState state, boolean actionsEnabled) {
public Alarm(String id, String tenantId, String name, String description,
AlarmExpression expression, List<SubAlarm> subAlarms, AlarmState state, boolean actionsEnabled) {
this.id = id;
this.tenantId = tenantId;
this.name = name;
@ -56,47 +57,59 @@ public class Alarm extends AbstractEntity {
}
static String buildStateChangeReason(AlarmState alarmState, List<String> subAlarmExpressions) {
if (AlarmState.UNDETERMINED.equals(alarmState))
if (AlarmState.UNDETERMINED.equals(alarmState)) {
return String.format("No data was present for the sub-alarms: %s", subAlarmExpressions);
else if (AlarmState.ALARM.equals(alarmState))
} else if (AlarmState.ALARM.equals(alarmState)) {
return String.format("Thresholds were exceeded for the sub-alarms: %s", subAlarmExpressions);
else
} else {
return "The alarm threshold(s) have not been exceeded";
}
}
@Override
public boolean equals(Object obj) {
if (this == obj)
if (this == obj) {
return true;
if (!super.equals(obj))
}
if (!super.equals(obj)) {
return false;
if (getClass() != obj.getClass())
}
if (getClass() != obj.getClass()) {
return false;
}
Alarm other = (Alarm) obj;
if (!compareObjects(expression, other.expression))
if (!compareObjects(expression, other.expression)) {
return false;
if (!compareObjects(name, other.name))
}
if (!compareObjects(name, other.name)) {
return false;
if (!compareObjects(description, other.description))
}
if (!compareObjects(description, other.description)) {
return false;
if (state != other.state)
}
if (state != other.state) {
return false;
if (actionsEnabled != other.actionsEnabled)
}
if (actionsEnabled != other.actionsEnabled) {
return false;
if (!compareObjects(subAlarms, other.subAlarms))
}
if (!compareObjects(subAlarms, other.subAlarms)) {
return false;
if (!compareObjects(tenantId, other.tenantId))
}
if (!compareObjects(tenantId, other.tenantId)) {
return false;
}
return true;
}
private boolean compareObjects(final Object o1,
final Object o2) {
private boolean compareObjects(final Object o1, final Object o2) {
if (o1 == null) {
if (o2 != null)
if (o2 != null) {
return false;
} else if (!o1.equals(o2))
}
} else if (!o1.equals(o2)) {
return false;
}
return true;
}
@ -115,35 +128,42 @@ public class Alarm extends AbstractEntity {
// Handle UNDETERMINED state
if (!unitializedSubAlarms.isEmpty()) {
if (AlarmState.UNDETERMINED.equals(initialState))
if (AlarmState.UNDETERMINED.equals(initialState)) {
return false;
}
state = AlarmState.UNDETERMINED;
stateChangeReason = buildStateChangeReason(state, unitializedSubAlarms);
return true;
}
Map<AlarmSubExpression, Boolean> subExpressionValues = new HashMap<AlarmSubExpression, Boolean>();
for (SubAlarm subAlarm : subAlarms.values())
Map<AlarmSubExpression, Boolean> subExpressionValues =
new HashMap<AlarmSubExpression, Boolean>();
for (SubAlarm subAlarm : subAlarms.values()) {
subExpressionValues.put(subAlarm.getExpression(),
AlarmState.ALARM.equals(subAlarm.getState()));
}
// Handle ALARM state
if (expression.evaluate(subExpressionValues)) {
if (AlarmState.ALARM.equals(initialState))
if (AlarmState.ALARM.equals(initialState)) {
return false;
}
List<String> subAlarmExpressions = new ArrayList<String>();
for (SubAlarm subAlarm : subAlarms.values())
if (AlarmState.ALARM.equals(subAlarm.getState()))
for (SubAlarm subAlarm : subAlarms.values()) {
if (AlarmState.ALARM.equals(subAlarm.getState())) {
subAlarmExpressions.add(subAlarm.getExpression().toString());
}
}
state = AlarmState.ALARM;
stateChangeReason = buildStateChangeReason(state, subAlarmExpressions);
return true;
}
if (AlarmState.OK.equals(initialState))
if (AlarmState.OK.equals(initialState)) {
return false;
}
state = AlarmState.OK;
stateChangeReason = buildStateChangeReason(state, null);
return true;
@ -224,9 +244,10 @@ public class Alarm extends AbstractEntity {
public void setSubAlarms(List<SubAlarm> subAlarms) {
this.subAlarms = new HashMap<String, SubAlarm>();
for (SubAlarm subAlarm : subAlarms)
for (SubAlarm subAlarm : subAlarms) {
this.subAlarms.put(subAlarm.getId(), subAlarm);
}
}
public void setTenantId(String tenantId) {
this.tenantId = tenantId;
@ -234,7 +255,8 @@ public class Alarm extends AbstractEntity {
@Override
public String toString() {
return String.format("Alarm [tenantId=%s, name=%s, description=%s, state=%s, actionsEnabled=%s]", tenantId,
return String.format(
"Alarm [tenantId=%s, name=%s, description=%s, state=%s, actionsEnabled=%s]", tenantId,
name, description, state, actionsEnabled);
}

View File

@ -14,12 +14,13 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.domain.model;
import java.io.Serializable;
import com.hpcloud.mon.common.model.metric.MetricDefinition;
import java.io.Serializable;
public class MetricDefinitionAndTenantId implements Serializable {
private static final long serialVersionUID = -4224596705186481749L;
@ -27,8 +28,7 @@ public class MetricDefinitionAndTenantId implements Serializable {
public MetricDefinition metricDefinition;
public String tenantId;
public MetricDefinitionAndTenantId(MetricDefinition metricDefinition,
String tenantId) {
public MetricDefinitionAndTenantId(MetricDefinition metricDefinition, String tenantId) {
this.metricDefinition = metricDefinition;
this.tenantId = tenantId;
}
@ -36,42 +36,51 @@ public class MetricDefinitionAndTenantId implements Serializable {
@Override
public int hashCode() {
int result = 0;
if (this.metricDefinition != null)
if (this.metricDefinition != null) {
result += this.metricDefinition.hashCode();
if (this.tenantId != null)
}
if (this.tenantId != null) {
result = result * 31 + this.tenantId.hashCode();
}
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
if (this == obj) {
return true;
if (obj == null)
}
if (obj == null) {
return false;
if (getClass() != obj.getClass())
}
if (getClass() != obj.getClass()) {
return false;
}
final MetricDefinitionAndTenantId other = (MetricDefinitionAndTenantId) obj;
if (!compareObjects(this.tenantId, other.tenantId))
if (!compareObjects(this.tenantId, other.tenantId)) {
return false;
if (!compareObjects(this.metricDefinition, other.metricDefinition))
}
if (!compareObjects(this.metricDefinition, other.metricDefinition)) {
return false;
}
return true;
}
private boolean compareObjects(final Object o1,
final Object o2) {
private boolean compareObjects(final Object o1, final Object o2) {
if (o1 == null) {
if (o2 != null)
if (o2 != null) {
return false;
} else if (!o1.equals(o2))
}
} else if (!o1.equals(o2)) {
return false;
}
return true;
}
@Override
public String toString() {
return String.format("MetricDefinitionAndTenantId tenantId=%s metricDefinition=%s", this.tenantId, this.metricDefinition);
return String.format("MetricDefinitionAndTenantId tenantId=%s metricDefinition=%s",
this.tenantId, this.metricDefinition);
}
}

View File

@ -14,8 +14,11 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.domain.model;
import com.hpcloud.mon.common.model.metric.MetricDefinition;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
@ -24,17 +27,18 @@ import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import com.hpcloud.mon.common.model.metric.MetricDefinition;
/**
* This class is used to find any matching MetricDefinitionAndTenantId instances that match a given MetricDefinitionAndTenantId. This class
* has no way of handling duplicate MetricDefinitionAndTenantIds so it assume some other handles that issue.
* This class is used to find any matching MetricDefinitionAndTenantId instances that match a given
* MetricDefinitionAndTenantId. This class has no way of handling duplicate
* MetricDefinitionAndTenantIds so it assume some other handles that issue.
*
* The actual MetricDefinitionAndTenantId is not kept in the last Map in order to save heap space. It is expected that possibly millions
* of metrics may be stored in the Matcher and so by only storing the DiminsionPairs instead of the whole MetricDefinitionAndTenantId,
* a significant amount of heap space will be saved thus reducing swapping. The MetricDefinitionAndTenantId is recreated when returned but
* since it will be just sent on and then the reference dropped, the object will be quickly and easily garbage collected. Testing shows
* that this algorithm is faster than keeping the whole MetricDefinitionAndTenantId in the Map.
* The actual MetricDefinitionAndTenantId is not kept in the last Map in order to save heap space.
* It is expected that possibly millions of metrics may be stored in the Matcher and so by only
* storing the DiminsionPairs instead of the whole MetricDefinitionAndTenantId, a significant amount
* of heap space will be saved thus reducing swapping. The MetricDefinitionAndTenantId is recreated
* when returned but since it will be just sent on and then the reference dropped, the object will
* be quickly and easily garbage collected. Testing shows that this algorithm is faster than keeping
* the whole MetricDefinitionAndTenantId in the Map.
*/
public class MetricDefinitionAndTenantIdMatcher {
final Map<String, Map<String, Map<DimensionSet, Object>>> byTenantId = new ConcurrentHashMap<>();
@ -44,17 +48,20 @@ public class MetricDefinitionAndTenantIdMatcher {
private final static List<MetricDefinitionAndTenantId> EMPTY_LIST = Collections.EMPTY_LIST;
public void add(MetricDefinitionAndTenantId metricDefinitionAndTenantId) {
Map<String, Map<DimensionSet, Object>> byMetricName = byTenantId.get(metricDefinitionAndTenantId.tenantId);
Map<String, Map<DimensionSet, Object>> byMetricName =
byTenantId.get(metricDefinitionAndTenantId.tenantId);
if (byMetricName == null) {
byMetricName = new ConcurrentHashMap<>();
byTenantId.put(metricDefinitionAndTenantId.tenantId, byMetricName);
}
Map<DimensionSet, Object> byDimensionSet = byMetricName.get(metricDefinitionAndTenantId.metricDefinition.name);
Map<DimensionSet, Object> byDimensionSet =
byMetricName.get(metricDefinitionAndTenantId.metricDefinition.name);
if (byDimensionSet == null) {
byDimensionSet = new ConcurrentHashMap<>();
byMetricName.put(metricDefinitionAndTenantId.metricDefinition.name, byDimensionSet);
}
final DimensionSet dimensionSet = createDimensionSet(metricDefinitionAndTenantId.metricDefinition);
final DimensionSet dimensionSet =
createDimensionSet(metricDefinitionAndTenantId.metricDefinition);
byDimensionSet.put(dimensionSet, placeHolder);
}
@ -63,90 +70,109 @@ public class MetricDefinitionAndTenantIdMatcher {
}
public boolean remove(MetricDefinitionAndTenantId metricDefinitionAndTenantId) {
final Map<String, Map<DimensionSet, Object>> byMetricName = byTenantId.get(metricDefinitionAndTenantId.tenantId);
if (byMetricName == null)
final Map<String, Map<DimensionSet, Object>> byMetricName =
byTenantId.get(metricDefinitionAndTenantId.tenantId);
if (byMetricName == null) {
return false;
}
final Map<DimensionSet, Object> byDimensionSet = byMetricName.get(metricDefinitionAndTenantId.metricDefinition.name);
if (byDimensionSet == null)
final Map<DimensionSet, Object> byDimensionSet =
byMetricName.get(metricDefinitionAndTenantId.metricDefinition.name);
if (byDimensionSet == null) {
return false;
}
final DimensionSet dimensionSet = createDimensionSet(metricDefinitionAndTenantId.metricDefinition);
final DimensionSet dimensionSet =
createDimensionSet(metricDefinitionAndTenantId.metricDefinition);
final boolean result = byDimensionSet.remove(dimensionSet) != null;
if (result) {
if (byDimensionSet.isEmpty()) {
byMetricName.remove(metricDefinitionAndTenantId.metricDefinition.name);
if (byMetricName.isEmpty())
if (byMetricName.isEmpty()) {
byTenantId.remove(metricDefinitionAndTenantId.tenantId);
}
}
}
return result;
}
public List<MetricDefinitionAndTenantId> match(final MetricDefinitionAndTenantId toMatch) {
final Map<String, Map<DimensionSet, Object>> byMetricName = byTenantId.get(toMatch.tenantId);
if (byMetricName == null)
if (byMetricName == null) {
return EMPTY_LIST;
}
final Map<DimensionSet, Object> byDimensionSet = byMetricName.get(toMatch.metricDefinition.name);
if (byDimensionSet == null)
final Map<DimensionSet, Object> byDimensionSet =
byMetricName.get(toMatch.metricDefinition.name);
if (byDimensionSet == null) {
return EMPTY_LIST;
final DimensionSet[] possibleDimensionSets = createPossibleDimensionPairs(toMatch.metricDefinition);
}
final DimensionSet[] possibleDimensionSets =
createPossibleDimensionPairs(toMatch.metricDefinition);
List<MetricDefinitionAndTenantId> matches = null;
for (final DimensionSet dimensionSet : possibleDimensionSets) {
if (byDimensionSet.containsKey(dimensionSet)) {
if (matches == null)
if (matches == null) {
matches = new ArrayList<>();
}
matches.add(createFromDimensionSet(toMatch, dimensionSet));
}
}
return matches == null ? EMPTY_LIST : matches;
}
private MetricDefinitionAndTenantId createFromDimensionSet(
MetricDefinitionAndTenantId toMatch,
private MetricDefinitionAndTenantId createFromDimensionSet(MetricDefinitionAndTenantId toMatch,
DimensionSet dimensionSet) {
final Map<String, String> dimensions = new HashMap<>(dimensionSet.pairs.length);
for (final DimensionPair pair : dimensionSet.pairs)
for (final DimensionPair pair : dimensionSet.pairs) {
dimensions.put(pair.key, pair.value);
return new MetricDefinitionAndTenantId(new MetricDefinition(toMatch.metricDefinition.name, dimensions), toMatch.tenantId);
}
return new MetricDefinitionAndTenantId(new MetricDefinition(toMatch.metricDefinition.name,
dimensions), toMatch.tenantId);
}
protected DimensionSet[] createPossibleDimensionPairs(MetricDefinition metricDefinition) {
final int dimensionSize = metricDefinition.dimensions == null ? 0 : metricDefinition.dimensions.size();
final int dimensionSize =
metricDefinition.dimensions == null ? 0 : metricDefinition.dimensions.size();
final int size = (int) Math.pow(2, dimensionSize);
final DimensionSet[] result = new DimensionSet[size];
int index = 0;
result[index++] = EMPTY_DIMENSION_SET;
if (dimensionSize == 0)
if (dimensionSize == 0) {
return result;
}
final DimensionPair[] pairs = createPairs(metricDefinition);
for (int i = 0; i < pairs.length; i++)
for (int i = 0; i < pairs.length; i++) {
index = addMore(pairs, i, EMPTY_DIMENSION_SET, result, index);
}
return result;
}
private int addMore(DimensionPair[] pairs, int start,
DimensionSet dimensionSet, DimensionSet[] result, int index) {
private int addMore(DimensionPair[] pairs, int start, DimensionSet dimensionSet,
DimensionSet[] result, int index) {
final DimensionPair[] newPairs = new DimensionPair[dimensionSet.pairs.length + 1];
if (dimensionSet.pairs.length > 0)
if (dimensionSet.pairs.length > 0) {
System.arraycopy(dimensionSet.pairs, 0, newPairs, 0, dimensionSet.pairs.length);
}
newPairs[dimensionSet.pairs.length] = pairs[start];
final DimensionSet thisDimensionSet = new DimensionSet(newPairs);
result[index++] = thisDimensionSet;
for (int i = start + 1; i < pairs.length; i++)
for (int i = start + 1; i < pairs.length; i++) {
index = addMore(pairs, i, thisDimensionSet, result, index);
}
return index;
}
private DimensionPair[] createPairs(MetricDefinition metricDefinition) {
final int dimensionSize = metricDefinition.dimensions == null ? 0 : metricDefinition.dimensions.size();
final int dimensionSize =
metricDefinition.dimensions == null ? 0 : metricDefinition.dimensions.size();
final DimensionPair[] pairs = new DimensionPair[dimensionSize];
if (dimensionSize > 0) { // metricDefinition.dimensions can be null
int index = 0;
for (final Map.Entry<String, String> entry : metricDefinition.dimensions.entrySet())
for (final Map.Entry<String, String> entry : metricDefinition.dimensions.entrySet()) {
pairs[index++] = new DimensionPair(entry.getKey(), entry.getValue());
}
}
return pairs;
}
@ -170,25 +196,32 @@ public class MetricDefinitionAndTenantIdMatcher {
public int hashCode() {
int result = 1;
final int prime = 31;
for (DimensionPair pair : pairs)
for (DimensionPair pair : pairs) {
result = result * prime + pair.hashCode();
}
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
if (this == obj) {
return true;
if (obj == null)
}
if (obj == null) {
return false;
if (getClass() != obj.getClass())
}
if (getClass() != obj.getClass()) {
return false;
}
final DimensionSet other = (DimensionSet) obj;
if (this.pairs.length != other.pairs.length)
if (this.pairs.length != other.pairs.length) {
return false;
for (int i = 0; i < this.pairs.length; i++)
if (!this.pairs[i].equals(other.pairs[i]))
}
for (int i = 0; i < this.pairs.length; i++) {
if (!this.pairs[i].equals(other.pairs[i])) {
return false;
}
}
return true;
}
@ -198,8 +231,9 @@ public class MetricDefinitionAndTenantIdMatcher {
builder.append("DimensionSet [");
boolean first = true;
for (DimensionPair pair : pairs) {
if (!first)
if (!first) {
builder.append(", ");
}
builder.append(pair.toString());
first = false;
}
@ -229,34 +263,39 @@ public class MetricDefinitionAndTenantIdMatcher {
@Override
public boolean equals(Object obj) {
if (this == obj)
if (this == obj) {
return true;
if (obj == null)
}
if (obj == null) {
return false;
if (getClass() != obj.getClass())
}
if (getClass() != obj.getClass()) {
return false;
}
DimensionPair other = (DimensionPair) obj;
return compareStrings(key, other.key) &&
compareStrings(value, other.value);
return compareStrings(key, other.key) && compareStrings(value, other.value);
}
private boolean compareStrings(final String s1,
final String s2) {
if (s1 == s2)
private boolean compareStrings(final String s1, final String s2) {
if (s1 == s2) {
return true;
if (s1 == null)
}
if (s1 == null) {
return false;
}
return s1.equals(s2);
}
@Override
public int compareTo(DimensionPair o) {
int c = this.key.compareTo(o.key);
if (c != 0)
if (c != 0) {
return c;
}
// Handle possible null values. A actual value is bigger than a null
if (this.value == null)
if (this.value == null) {
return o.value == null ? 0 : 1;
}
return this.value.compareTo(o.value);
}

View File

@ -14,6 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.domain.model;
import com.hpcloud.mon.common.model.alarm.AlarmState;
@ -56,25 +57,33 @@ public class SubAlarm extends AbstractEntity implements Serializable {
@Override
public boolean equals(Object obj) {
if (this == obj)
if (this == obj) {
return true;
if (!super.equals(obj))
}
if (!super.equals(obj)) {
return false;
if (getClass() != obj.getClass())
}
if (getClass() != obj.getClass()) {
return false;
}
SubAlarm other = (SubAlarm) obj;
if (alarmId == null) {
if (other.alarmId != null)
if (other.alarmId != null) {
return false;
} else if (!alarmId.equals(other.alarmId))
}
} else if (!alarmId.equals(other.alarmId)) {
return false;
}
if (expression == null) {
if (other.expression != null)
if (other.expression != null) {
return false;
} else if (!expression.equals(other.expression))
}
} else if (!expression.equals(other.expression)) {
return false;
if (state != other.state)
}
if (state != other.state) {
return false;
}
return true;
}
@ -122,25 +131,30 @@ public class SubAlarm extends AbstractEntity implements Serializable {
@Override
public String toString() {
return String.format("SubAlarm [id=%s, alarmId=%s, expression=%s, state=%s noState=%s]", id, alarmId,
expression, state, noState);
return String.format("SubAlarm [id=%s, alarmId=%s, expression=%s, state=%s noState=%s]", id,
alarmId, expression, state, noState);
}
/**
* Determine if this SubAlarm and 'other' could reuse saved measurements. Only possible
* only operator and/or threshold are the only properties from the expression that are different
* Determine if this SubAlarm and 'other' could reuse saved measurements. Only possible only
* operator and/or threshold are the only properties from the expression that are different
*
* @param other SubAlarm to compare to
* @return true if 'other' is "compatible", false otherwise
*/
public boolean isCompatible(final SubAlarm other) {
if (!this.expression.getMetricDefinition().equals(other.expression.getMetricDefinition()))
if (!this.expression.getMetricDefinition().equals(other.expression.getMetricDefinition())) {
return false;
if (!this.expression.getFunction().equals(other.expression.getFunction()))
}
if (!this.expression.getFunction().equals(other.expression.getFunction())) {
return false;
if (this.expression.getPeriod() != other.expression.getPeriod())
}
if (this.expression.getPeriod() != other.expression.getPeriod()) {
return false;
if (this.expression.getPeriods() != other.expression.getPeriods())
}
if (this.expression.getPeriods() != other.expression.getPeriods()) {
return false;
}
// Operator and Threshold can vary
return true;
}

View File

@ -14,20 +14,21 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.domain.model;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
package com.hpcloud.mon.domain.model;
import com.hpcloud.mon.common.model.alarm.AlarmState;
import com.hpcloud.util.stats.SlidingWindowStats;
import com.hpcloud.util.time.TimeResolution;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Aggregates statistics for a specific SubAlarm.
*/
public class SubAlarmStats {
private static final Logger LOG = LoggerFactory.getLogger(SubAlarmStats.class);
private static final Logger logger = LoggerFactory.getLogger(SubAlarmStats.class);
/** Number of slots for future periods that we should collect metrics for. */
private static final int FUTURE_SLOTS = 2;
/** Helps determine how many empty window observations before transitioning to UNDETERMINED. */
@ -48,14 +49,15 @@ public class SubAlarmStats {
slotWidth = subAlarm.getExpression().getPeriod();
this.subAlarm = subAlarm;
this.subAlarm.setNoState(true);
this.stats = new SlidingWindowStats(subAlarm.getExpression().getFunction().toStatistic(),
this.stats =
new SlidingWindowStats(subAlarm.getExpression().getFunction().toStatistic(),
timeResolution, slotWidth, subAlarm.getExpression().getPeriods(), FUTURE_SLOTS,
viewEndTimestamp);
int period = subAlarm.getExpression().getPeriod();
int periodMinutes = period < 60 ? 1 : period / 60; // Assumes the period is in seconds so we
// convert to minutes
emptyWindowObservationThreshold = periodMinutes * subAlarm.getExpression().getPeriods()
* UNDETERMINED_COEFFICIENT;
emptyWindowObservationThreshold =
periodMinutes * subAlarm.getExpression().getPeriods() * UNDETERMINED_COEFFICIENT;
emptyWindowObservations = 0;
}
@ -69,7 +71,7 @@ public class SubAlarmStats {
try {
return evaluate();
} catch (Exception e) {
LOG.error("Failed to evaluate {}", this, e);
logger.error("Failed to evaluate {}", this, e);
return false;
} finally {
slideWindow(slideToTimestamp);
@ -77,8 +79,8 @@ public class SubAlarmStats {
}
/**
* Just slide the window. Either slideWindow or evaluateAndSlideWindow
* should be called for each time period, but never both
* Just slide the window. Either slideWindow or evaluateAndSlideWindow should be called for each
* time period, but never both
*
* @param slideToTimestamp
*/
@ -102,7 +104,8 @@ public class SubAlarmStats {
@Override
public String toString() {
return String.format(
return String
.format(
"SubAlarmStats [subAlarm=%s, stats=%s, emptyWindowObservations=%s, emptyWindowObservationThreshold=%s]",
subAlarm, stats, emptyWindowObservations, emptyWindowObservationThreshold);
}
@ -115,17 +118,17 @@ public class SubAlarmStats {
boolean thresholdExceeded = false;
boolean hasEmptyWindows = false;
for (double value : values) {
if (Double.isNaN(value))
if (Double.isNaN(value)) {
hasEmptyWindows = true;
else {
} else {
emptyWindowObservations = 0;
// Check if value is OK
if (!subAlarm.getExpression()
.getOperator()
if (!subAlarm.getExpression().getOperator()
.evaluate(value, subAlarm.getExpression().getThreshold())) {
if (!shouldSendStateChange(AlarmState.OK))
if (!shouldSendStateChange(AlarmState.OK)) {
return false;
}
setSubAlarmState(AlarmState.OK);
return true;
} else
@ -134,8 +137,9 @@ public class SubAlarmStats {
}
if (thresholdExceeded && !hasEmptyWindows) {
if (!shouldSendStateChange(AlarmState.ALARM))
if (!shouldSendStateChange(AlarmState.ALARM)) {
return false;
}
setSubAlarmState(AlarmState.ALARM);
return true;
}
@ -143,9 +147,8 @@ public class SubAlarmStats {
// Window is empty at this point
emptyWindowObservations++;
if ((emptyWindowObservations >= emptyWindowObservationThreshold) &&
shouldSendStateChange(AlarmState.UNDETERMINED) &&
!subAlarm.isSporadicMetric()) {
if ((emptyWindowObservations >= emptyWindowObservationThreshold)
&& shouldSendStateChange(AlarmState.UNDETERMINED) && !subAlarm.isSporadicMetric()) {
setSubAlarmState(AlarmState.UNDETERMINED);
return true;
}
@ -165,6 +168,7 @@ public class SubAlarmStats {
/**
* This MUST only be used for compatible SubAlarms, i.e. where
* this.subAlarm.isCompatible(subAlarm) is true
*
* @param subAlarm
*/
public void updateSubAlarm(final SubAlarm subAlarm) {

View File

@ -14,6 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.domain.service;
import com.hpcloud.mon.common.model.alarm.AlarmState;

View File

@ -14,6 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.domain.service;
import java.util.List;

View File

@ -14,13 +14,14 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.domain.service;
import java.util.List;
package com.hpcloud.mon.domain.service;
import com.hpcloud.mon.domain.model.MetricDefinitionAndTenantId;
import com.hpcloud.mon.domain.model.SubAlarm;
import java.util.List;
/**
* SubAlarm DAO.
*/

View File

@ -14,6 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.domain.service;
import com.hpcloud.mon.domain.model.MetricDefinitionAndTenantId;
@ -41,34 +42,41 @@ public class SubAlarmMetricDefinition {
final int prime = 31;
int result = 1;
result = prime * result + ((subAlarmId == null) ? 0 : subAlarmId.hashCode());
result = prime * result + ((metricDefinitionAndTenantId == null) ? 0 : metricDefinitionAndTenantId.hashCode());
result =
prime * result
+ ((metricDefinitionAndTenantId == null) ? 0 : metricDefinitionAndTenantId.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
if (this == obj) {
return true;
if (obj == null)
}
if (obj == null) {
return false;
if (getClass() != obj.getClass())
}
if (getClass() != obj.getClass()) {
return false;
}
SubAlarmMetricDefinition other = (SubAlarmMetricDefinition) obj;
return compareObjects(subAlarmId, other.subAlarmId) &&
compareObjects(metricDefinitionAndTenantId, other.metricDefinitionAndTenantId);
return compareObjects(subAlarmId, other.subAlarmId)
&& compareObjects(metricDefinitionAndTenantId, other.metricDefinitionAndTenantId);
}
private boolean compareObjects(final Object o1, final Object o2) {
if (o1 == o2)
if (o1 == o2) {
return true;
if (o1 == null)
}
if (o1 == null) {
return false;
}
return o1.equals(o2);
}
@Override
public String toString() {
return String.format("SubAlarmMetricDefinition subAlarmId=%s metricDefinitionAndTenantId=%s", subAlarmId,
metricDefinitionAndTenantId);
return String.format("SubAlarmMetricDefinition subAlarmId=%s metricDefinitionAndTenantId=%s",
subAlarmId, metricDefinitionAndTenantId);
}
}

View File

@ -14,15 +14,16 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.domain.service;
import com.hpcloud.mon.domain.model.SubAlarm;
import com.hpcloud.mon.domain.model.SubAlarmStats;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import com.hpcloud.mon.domain.model.SubAlarm;
import com.hpcloud.mon.domain.model.SubAlarmStats;
/**
* SubAlarmStats repository.
*/
@ -34,9 +35,10 @@ public class SubAlarmStatsRepository {
* adds it to the repository.
*/
public void add(SubAlarm subAlarm, long viewEndTimestamp) {
if (!subAlarmStats.containsKey(subAlarm.getId()))
if (!subAlarmStats.containsKey(subAlarm.getId())) {
subAlarmStats.put(subAlarm.getId(), new SubAlarmStats(subAlarm, viewEndTimestamp));
}
}
public Collection<SubAlarmStats> get() {
return subAlarmStats.values();

View File

@ -14,17 +14,9 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.infrastructure.persistence;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import javax.inject.Inject;
import org.skife.jdbi.v2.DBI;
import org.skife.jdbi.v2.Handle;
import com.hpcloud.mon.common.model.alarm.AggregateFunction;
import com.hpcloud.mon.common.model.alarm.AlarmOperator;
import com.hpcloud.mon.common.model.alarm.AlarmState;
@ -36,6 +28,15 @@ import com.hpcloud.mon.domain.service.AlarmDAO;
import com.hpcloud.persistence.BeanMapper;
import com.hpcloud.persistence.SqlQueries;
import org.skife.jdbi.v2.DBI;
import org.skife.jdbi.v2.Handle;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import javax.inject.Inject;
/**
* Alarm DAO implementation.
*/
@ -63,10 +64,12 @@ public class AlarmDAOImpl implements AlarmDAO {
String subAlarmId = (String) row.get("id");
Map<String, String> dimensions = findDimensionsById(handle, subAlarmId);
AggregateFunction function = AggregateFunction.valueOf((String) row.get("function"));
MetricDefinition metricDef = new MetricDefinition((String) row.get("metric_name"), dimensions);
MetricDefinition metricDef =
new MetricDefinition((String) row.get("metric_name"), dimensions);
AlarmOperator operator = AlarmOperator.valueOf((String) row.get("operator"));
AlarmSubExpression subExpression = new AlarmSubExpression(function, metricDef, operator,
(Double) row.get("threshold"), (Integer) row.get("period"), (Integer) row.get("periods"));
AlarmSubExpression subExpression =
new AlarmSubExpression(function, metricDef, operator, (Double) row.get("threshold"),
(Integer) row.get("period"), (Integer) row.get("periods"));
SubAlarm subAlarm = new SubAlarm(subAlarmId, (String) row.get("alarm_id"), subExpression);
subAlarms.add(subAlarm);
}
@ -79,18 +82,17 @@ public class AlarmDAOImpl implements AlarmDAO {
Handle h = db.open();
try {
Alarm alarm = h.createQuery("select * from alarm where id = :id and deleted_at is null")
.bind("id", id)
.map(new BeanMapper<Alarm>(Alarm.class))
.first();
if (alarm == null)
return alarm;
Alarm alarm =
h.createQuery("select * from alarm where id = :id and deleted_at is null").bind("id", id)
.map(new BeanMapper<Alarm>(Alarm.class)).first();
if (alarm == null) {
return null;
}
alarm.setSubAlarms(subAlarmsForRows(
h,
h.createQuery("select * from sub_alarm where alarm_id = :alarmId")
.bind("alarmId", alarm.getId())
.list()));
.bind("alarmId", alarm.getId()).list()));
return alarm;
} finally {
@ -104,9 +106,7 @@ public class AlarmDAOImpl implements AlarmDAO {
try {
h.createStatement("update alarm set state = :state, updated_at = NOW() where id = :id")
.bind("id", id)
.bind("state", state.toString())
.execute();
.bind("id", id).bind("state", state.toString()).execute();
} finally {
h.close();
}

View File

@ -14,8 +14,17 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.infrastructure.persistence;
import com.hpcloud.mon.common.model.metric.MetricDefinition;
import com.hpcloud.mon.domain.model.MetricDefinitionAndTenantId;
import com.hpcloud.mon.domain.service.MetricDefinitionDAO;
import com.hpcloud.mon.domain.service.SubAlarmMetricDefinition;
import org.skife.jdbi.v2.DBI;
import org.skife.jdbi.v2.Handle;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
@ -23,19 +32,12 @@ import java.util.Map;
import javax.inject.Inject;
import org.skife.jdbi.v2.DBI;
import org.skife.jdbi.v2.Handle;
import com.hpcloud.mon.common.model.metric.MetricDefinition;
import com.hpcloud.mon.domain.model.MetricDefinitionAndTenantId;
import com.hpcloud.mon.domain.service.MetricDefinitionDAO;
import com.hpcloud.mon.domain.service.SubAlarmMetricDefinition;
/**
* MetricDefinition DAO implementation.
*/
public class MetricDefinitionDAOImpl implements MetricDefinitionDAO {
private static final String METRIC_DEF_SQL = "select sa.id, a.tenant_id, sa.metric_name, sad.dimensions from alarm as a, sub_alarm as sa "
private static final String METRIC_DEF_SQL =
"select sa.id, a.tenant_id, sa.metric_name, sad.dimensions from alarm as a, sub_alarm as sa "
+ "left join (select sub_alarm_id, group_concat(dimension_name, '=', value) as dimensions from sub_alarm_dimension group by sub_alarm_id) as sad on sa.id = sad.sub_alarm_id "
+ "where a.id = sa.alarm_id and a.deleted_at is null";
@ -65,15 +67,16 @@ public class MetricDefinitionDAOImpl implements MetricDefinitionDAO {
for (String kvStr : dimensionSet.split(",")) {
String[] kv = kvStr.split("=");
if (kv.length > 1) {
if (dimensions == null)
if (dimensions == null) {
dimensions = new HashMap<String, String>();
}
dimensions.put(kv[0], kv[1]);
}
}
}
metricDefs.add(new SubAlarmMetricDefinition(subAlarmId,
new MetricDefinitionAndTenantId(new MetricDefinition(metric_name, dimensions), tenantId)));
metricDefs.add(new SubAlarmMetricDefinition(subAlarmId, new MetricDefinitionAndTenantId(
new MetricDefinition(metric_name, dimensions), tenantId)));
}
return metricDefs;

View File

@ -14,20 +14,22 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.infrastructure.persistence;
import javax.inject.Singleton;
import org.skife.jdbi.v2.DBI;
import com.google.inject.AbstractModule;
import com.google.inject.Provides;
import com.google.inject.Scopes;
import com.hpcloud.mon.domain.service.AlarmDAO;
import com.hpcloud.mon.domain.service.MetricDefinitionDAO;
import com.hpcloud.mon.domain.service.SubAlarmDAO;
import com.hpcloud.mon.infrastructure.thresholding.DataSourceFactory;
import com.google.inject.AbstractModule;
import com.google.inject.Provides;
import com.google.inject.Scopes;
import org.skife.jdbi.v2.DBI;
import javax.inject.Singleton;
/**
* Configures persistence related types.
*/

View File

@ -14,18 +14,9 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.infrastructure.persistence;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import javax.inject.Inject;
import org.skife.jdbi.v2.DBI;
import org.skife.jdbi.v2.Handle;
import org.skife.jdbi.v2.Query;
import com.hpcloud.mon.common.model.alarm.AggregateFunction;
import com.hpcloud.mon.common.model.alarm.AlarmOperator;
import com.hpcloud.mon.common.model.alarm.AlarmSubExpression;
@ -35,6 +26,16 @@ import com.hpcloud.mon.domain.model.SubAlarm;
import com.hpcloud.mon.domain.service.SubAlarmDAO;
import com.hpcloud.persistence.SqlStatements;
import org.skife.jdbi.v2.DBI;
import org.skife.jdbi.v2.Handle;
import org.skife.jdbi.v2.Query;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import javax.inject.Inject;
/**
* SubAlarm DAO implementation.
*/
@ -44,11 +45,13 @@ public class SubAlarmDAOImpl implements SubAlarmDAO {
* table, grouping by the dimension id and counting them to ensure that the number of matched
* dimensions equals the number of actual dimensions in the table for the subscription.
*/
private static final String FIND_BY_METRIC_DEF_SQL = "select sa.* from sub_alarm sa, alarm a, sub_alarm_dimension d "
private static final String FIND_BY_METRIC_DEF_SQL =
"select sa.* from sub_alarm sa, alarm a, sub_alarm_dimension d "
+ "join (%s) v on d.dimension_name = v.dimension_name and d.value = v.value "
+ "where sa.id = d.sub_alarm_id and sa.metric_name = :metric_name and a.tenant_id = :tenant_id and a.id = sa.alarm_id and a.deleted_at is null "
+ "group by d.sub_alarm_id having count(d.sub_alarm_id) = %s";
private static final String FIND_BY_METRIC_DEF_NO_DIMS_SQL = "select sa.* from sub_alarm sa, alarm a where sa.metric_name = :metric_name "
private static final String FIND_BY_METRIC_DEF_NO_DIMS_SQL =
"select sa.* from sub_alarm sa, alarm a where sa.metric_name = :metric_name "
+ "and a.tenant_id = :tenant_id and a.id = sa.alarm_id and a.deleted_at is null and (select count(*) from sub_alarm_dimension where sub_alarm_id = sa.id) = 0";
private final DBI db;
@ -65,17 +68,20 @@ public class SubAlarmDAOImpl implements SubAlarmDAO {
try {
final MetricDefinition metricDefinition = metricDefinitionTenantId.metricDefinition;
final String sql;
if (metricDefinition.dimensions == null || metricDefinition.dimensions.isEmpty())
if (metricDefinition.dimensions == null || metricDefinition.dimensions.isEmpty()) {
sql = FIND_BY_METRIC_DEF_NO_DIMS_SQL;
else {
String unionAllStatement = SqlStatements.unionAllStatementFor(metricDefinition.dimensions,
"dimension_name", "value");
sql = String.format(FIND_BY_METRIC_DEF_SQL, unionAllStatement,
} else {
String unionAllStatement =
SqlStatements.unionAllStatementFor(metricDefinition.dimensions, "dimension_name",
"value");
sql =
String.format(FIND_BY_METRIC_DEF_SQL, unionAllStatement,
metricDefinition.dimensions.size());
}
Query<Map<String, Object>> query = h.createQuery(sql).bind("metric_name",
metricDefinition.name).bind("tenant_id", metricDefinitionTenantId.tenantId);
Query<Map<String, Object>> query =
h.createQuery(sql).bind("metric_name", metricDefinition.name)
.bind("tenant_id", metricDefinitionTenantId.tenantId);
List<Map<String, Object>> rows = query.list();
List<SubAlarm> subAlarms = new ArrayList<SubAlarm>(rows.size());
@ -83,8 +89,9 @@ public class SubAlarmDAOImpl implements SubAlarmDAO {
String subAlarmId = (String) row.get("id");
AggregateFunction function = AggregateFunction.valueOf((String) row.get("function"));
AlarmOperator operator = AlarmOperator.valueOf((String) row.get("operator"));
AlarmSubExpression subExpression = new AlarmSubExpression(function, metricDefinition,
operator, (Double) row.get("threshold"), (Integer) row.get("period"),
AlarmSubExpression subExpression =
new AlarmSubExpression(function, metricDefinition, operator,
(Double) row.get("threshold"), (Integer) row.get("period"),
(Integer) row.get("periods"));
SubAlarm subAlarm = new SubAlarm(subAlarmId, (String) row.get("alarm_id"), subExpression);

View File

@ -14,6 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.infrastructure.thresholding;
public interface AlarmEventForwarder {

View File

@ -14,20 +14,9 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.infrastructure.thresholding;
import java.util.HashMap;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import backtype.storm.task.OutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.topology.OutputFieldsDeclarer;
import backtype.storm.topology.base.BaseRichBolt;
import backtype.storm.tuple.Tuple;
import com.hpcloud.configuration.KafkaProducerConfiguration;
import com.hpcloud.mon.ThresholdingConfiguration;
import com.hpcloud.mon.common.event.AlarmStateTransitionedEvent;
@ -43,6 +32,18 @@ import com.hpcloud.streaming.storm.Streams;
import com.hpcloud.util.Injector;
import com.hpcloud.util.Serialization;
import backtype.storm.task.OutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.topology.OutputFieldsDeclarer;
import backtype.storm.topology.base.BaseRichBolt;
import backtype.storm.tuple.Tuple;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.HashMap;
import java.util.Map;
/**
* Determines whether an alarm threshold has been exceeded.
* <p/>
@ -57,7 +58,7 @@ import com.hpcloud.util.Serialization;
public class AlarmThresholdingBolt extends BaseRichBolt {
private static final long serialVersionUID = -4126465124017857754L;
private transient Logger LOG;
private transient Logger logger;
private DataSourceFactory dbConfig;
private KafkaProducerConfiguration producerConfiguration;
final Map<String, Alarm> alarms = new HashMap<String, Alarm>();
@ -67,8 +68,7 @@ public class AlarmThresholdingBolt extends BaseRichBolt {
private transient AlarmEventForwarder alarmEventForwarder;
private OutputCollector collector;
public AlarmThresholdingBolt(DataSourceFactory dbConfig,
KafkaProducerConfiguration producerConfig) {
public AlarmThresholdingBolt(DataSourceFactory dbConfig, KafkaProducerConfiguration producerConfig) {
this.dbConfig = dbConfig;
this.producerConfiguration = producerConfig;
}
@ -80,19 +80,19 @@ public class AlarmThresholdingBolt extends BaseRichBolt {
}
@Override
public void declareOutputFields(OutputFieldsDeclarer declarer) {
}
public void declareOutputFields(OutputFieldsDeclarer declarer) {}
@Override
public void execute(Tuple tuple) {
LOG.debug("tuple: {}", tuple);
logger.debug("tuple: {}", tuple);
try {
if (Streams.DEFAULT_STREAM_ID.equals(tuple.getSourceStreamId())) {
String alarmId = tuple.getString(0);
Alarm alarm = getOrCreateAlarm(alarmId);
if (alarm == null)
if (alarm == null) {
return;
}
SubAlarm subAlarm = (SubAlarm) tuple.getValue(1);
evaluateThreshold(alarm, subAlarm);
@ -100,13 +100,14 @@ public class AlarmThresholdingBolt extends BaseRichBolt {
String eventType = tuple.getString(0);
String alarmId = tuple.getString(1);
if (EventProcessingBolt.DELETED.equals(eventType))
if (EventProcessingBolt.DELETED.equals(eventType)) {
handleAlarmDeleted(alarmId);
else if (EventProcessingBolt.UPDATED.equals(eventType))
} else if (EventProcessingBolt.UPDATED.equals(eventType)) {
handleAlarmUpdated(alarmId, (AlarmUpdatedEvent) tuple.getValue(2));
}
}
} catch (Exception e) {
LOG.error("Error processing tuple {}", tuple, e);
logger.error("Error processing tuple {}", tuple, e);
} finally {
collector.ack(tuple);
}
@ -115,8 +116,8 @@ public class AlarmThresholdingBolt extends BaseRichBolt {
@Override
@SuppressWarnings("rawtypes")
public void prepare(Map config, TopologyContext context, OutputCollector collector) {
LOG = LoggerFactory.getLogger(Logging.categoryFor(getClass(), context));
LOG.info("Preparing");
logger = LoggerFactory.getLogger(Logging.categoryFor(getClass(), context));
logger.info("Preparing");
this.collector = collector;
alertExchange = (String) config.get(ThresholdingConfiguration.ALERTS_EXCHANGE);
alertRoutingKey = (String) config.get(ThresholdingConfiguration.ALERTS_ROUTING_KEY);
@ -126,13 +127,14 @@ public class AlarmThresholdingBolt extends BaseRichBolt {
alarmDAO = Injector.getInstance(AlarmDAO.class);
}
if (alarmEventForwarder == null) {
Injector.registerIfNotBound(AlarmEventForwarder.class, new ProducerModule(this.producerConfiguration));
Injector.registerIfNotBound(AlarmEventForwarder.class, new ProducerModule(
this.producerConfiguration));
alarmEventForwarder = Injector.getInstance(AlarmEventForwarder.class);
}
}
void evaluateThreshold(Alarm alarm, SubAlarm subAlarm) {
LOG.debug("Received state change for {}", subAlarm);
logger.debug("Received state change for {}", subAlarm);
subAlarm.setNoState(false);
alarm.updateSubAlarm(subAlarm);
@ -152,18 +154,18 @@ public class AlarmThresholdingBolt extends BaseRichBolt {
return true;
}
private void changeAlarmState(Alarm alarm, AlarmState initialState,
String stateChangeReason) {
private void changeAlarmState(Alarm alarm, AlarmState initialState, String stateChangeReason) {
alarmDAO.updateState(alarm.getId(), alarm.getState());
LOG.debug("Alarm {} transitioned from {} to {}", alarm, initialState, alarm.getState());
AlarmStateTransitionedEvent event = new AlarmStateTransitionedEvent(alarm.getTenantId(),
alarm.getId(), alarm.getName(), alarm.getDescription(), initialState, alarm.getState(),
alarm.isActionsEnabled(), stateChangeReason, getTimestamp());
logger.debug("Alarm {} transitioned from {} to {}", alarm, initialState, alarm.getState());
AlarmStateTransitionedEvent event =
new AlarmStateTransitionedEvent(alarm.getTenantId(), alarm.getId(), alarm.getName(),
alarm.getDescription(), initialState, alarm.getState(), alarm.isActionsEnabled(),
stateChangeReason, getTimestamp());
try {
alarmEventForwarder.send(alertExchange, alertRoutingKey, Serialization.toJson(event));
} catch (Exception ignore) {
LOG.debug("Failure sending alarm", ignore);
logger.debug("Failure sending alarm", ignore);
}
}
@ -172,14 +174,14 @@ public class AlarmThresholdingBolt extends BaseRichBolt {
}
void handleAlarmDeleted(String alarmId) {
LOG.debug("Received AlarmDeletedEvent for alarm id {}", alarmId);
logger.debug("Received AlarmDeletedEvent for alarm id {}", alarmId);
alarms.remove(alarmId);
}
void handleAlarmUpdated(String alarmId, AlarmUpdatedEvent alarmUpdatedEvent) {
final Alarm oldAlarm = alarms.get(alarmId);
if (oldAlarm == null) {
LOG.debug("Updated Alarm {} not loaded, ignoring");
logger.debug("Updated Alarm {} not loaded, ignoring");
return;
}
@ -191,17 +193,20 @@ public class AlarmThresholdingBolt extends BaseRichBolt {
// Now handle the SubAlarms
// First remove the deleted SubAlarms so we don't have to consider them later
for (Map.Entry<String, AlarmSubExpression> entry : alarmUpdatedEvent.oldAlarmSubExpressions.entrySet()) {
LOG.debug("Removing deleted SubAlarm {}", entry.getValue());
if (!oldAlarm.removeSubAlarmById(entry.getKey()))
LOG.error("Did not find removed SubAlarm {}", entry.getValue());
for (Map.Entry<String, AlarmSubExpression> entry : alarmUpdatedEvent.oldAlarmSubExpressions
.entrySet()) {
logger.debug("Removing deleted SubAlarm {}", entry.getValue());
if (!oldAlarm.removeSubAlarmById(entry.getKey())) {
logger.error("Did not find removed SubAlarm {}", entry.getValue());
}
}
// Reuse what we can from the changed SubAlarms
for (Map.Entry<String, AlarmSubExpression> entry : alarmUpdatedEvent.changedSubExpressions.entrySet()) {
for (Map.Entry<String, AlarmSubExpression> entry : alarmUpdatedEvent.changedSubExpressions
.entrySet()) {
final SubAlarm oldSubAlarm = oldAlarm.getSubAlarm(entry.getKey());
if (oldSubAlarm == null) {
LOG.error("Did not find changed SubAlarm {}", entry.getValue());
logger.error("Did not find changed SubAlarm {}", entry.getValue());
continue;
}
final SubAlarm newSubAlarm = new SubAlarm(entry.getKey(), oldAlarm.getId(), entry.getValue());
@ -209,15 +214,16 @@ public class AlarmThresholdingBolt extends BaseRichBolt {
if (!oldSubAlarm.isCompatible(newSubAlarm)) {
newSubAlarm.setNoState(true);
}
LOG.debug("Changing SubAlarm from {} to {}", oldSubAlarm, newSubAlarm);
logger.debug("Changing SubAlarm from {} to {}", oldSubAlarm, newSubAlarm);
oldAlarm.updateSubAlarm(newSubAlarm);
}
// Add the new SubAlarms
for (Map.Entry<String, AlarmSubExpression> entry : alarmUpdatedEvent.newAlarmSubExpressions.entrySet()) {
for (Map.Entry<String, AlarmSubExpression> entry : alarmUpdatedEvent.newAlarmSubExpressions
.entrySet()) {
final SubAlarm newSubAlarm = new SubAlarm(entry.getKey(), oldAlarm.getId(), entry.getValue());
newSubAlarm.setNoState(true);
LOG.debug("Adding SubAlarm {}", newSubAlarm);
logger.debug("Adding SubAlarm {}", newSubAlarm);
oldAlarm.updateSubAlarm(newSubAlarm);
}
@ -232,9 +238,9 @@ public class AlarmThresholdingBolt extends BaseRichBolt {
Alarm alarm = alarms.get(alarmId);
if (alarm == null) {
alarm = alarmDAO.findById(alarmId);
if (alarm == null)
LOG.error("Failed to locate alarm for id {}", alarmId);
else {
if (alarm == null) {
logger.error("Failed to locate alarm for id {}", alarmId);
} else {
for (final SubAlarm subAlarm : alarm.getSubAlarms()) {
subAlarm.setNoState(true);
}

View File

@ -14,15 +14,16 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.infrastructure.thresholding;
import java.io.Serializable;
import java.util.Properties;
/**
* This class replaces io.dropwizard.db.DataSourceFactory which currently can't be used
* with Storm because it is not marked Serializable. This class could be deleted and replaced
* by that class when and if io.dropwizard.db.DataSourceFactory is marked Serializable.
* This class replaces io.dropwizard.db.DataSourceFactory which currently can't be used with Storm
* because it is not marked Serializable. This class could be deleted and replaced by that class
* when and if io.dropwizard.db.DataSourceFactory is marked Serializable.
*/
public class DataSourceFactory implements Serializable {

View File

@ -14,21 +14,9 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.infrastructure.thresholding;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import backtype.storm.task.OutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.topology.OutputFieldsDeclarer;
import backtype.storm.topology.base.BaseRichBolt;
import backtype.storm.tuple.Fields;
import backtype.storm.tuple.Tuple;
import backtype.storm.tuple.Values;
import com.hpcloud.mon.common.event.AlarmCreatedEvent;
import com.hpcloud.mon.common.event.AlarmDeletedEvent;
import com.hpcloud.mon.common.event.AlarmUpdatedEvent;
@ -38,6 +26,19 @@ import com.hpcloud.mon.domain.model.MetricDefinitionAndTenantId;
import com.hpcloud.mon.domain.model.SubAlarm;
import com.hpcloud.streaming.storm.Logging;
import backtype.storm.task.OutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.topology.OutputFieldsDeclarer;
import backtype.storm.topology.base.BaseRichBolt;
import backtype.storm.tuple.Fields;
import backtype.storm.tuple.Tuple;
import backtype.storm.tuple.Values;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Map;
/**
* Processes events by emitting tuples related to the event.
*
@ -60,7 +61,8 @@ public class EventProcessingBolt extends BaseRichBolt {
/** Stream for metric and sub-alarm specific events. */
public static final String METRIC_SUB_ALARM_EVENT_STREAM_ID = "metric-sub-alarm-events";
public static final String[] ALARM_EVENT_STREAM_FIELDS = new String[] {"eventType", "alarmId", "alarm"};
public static final String[] ALARM_EVENT_STREAM_FIELDS = new String[] {"eventType", "alarmId",
"alarm"};
public static final String[] METRIC_ALARM_EVENT_STREAM_FIELDS = new String[] {"eventType",
"metricDefinitionAndTenantId", "subAlarmId"};
public static final String[] METRIC_SUB_ALARM_EVENT_STREAM_FIELDS = new String[] {"eventType",
@ -71,29 +73,32 @@ public class EventProcessingBolt extends BaseRichBolt {
public static final String UPDATED = "updated";
public static final String RESEND = "resend";
private transient Logger LOG;
private transient Logger logger;
private OutputCollector collector;
@Override
public void declareOutputFields(OutputFieldsDeclarer declarer) {
declarer.declareStream(ALARM_EVENT_STREAM_ID, new Fields(ALARM_EVENT_STREAM_FIELDS));
declarer.declareStream(METRIC_ALARM_EVENT_STREAM_ID, new Fields(METRIC_ALARM_EVENT_STREAM_FIELDS));
declarer.declareStream(METRIC_SUB_ALARM_EVENT_STREAM_ID, new Fields(METRIC_SUB_ALARM_EVENT_STREAM_FIELDS));
declarer.declareStream(METRIC_ALARM_EVENT_STREAM_ID, new Fields(
METRIC_ALARM_EVENT_STREAM_FIELDS));
declarer.declareStream(METRIC_SUB_ALARM_EVENT_STREAM_ID, new Fields(
METRIC_SUB_ALARM_EVENT_STREAM_FIELDS));
}
@Override
public void execute(Tuple tuple) {
try {
Object event = tuple.getValue(0);
LOG.trace("Received event for processing {}", event);
if (event instanceof AlarmCreatedEvent)
logger.trace("Received event for processing {}", event);
if (event instanceof AlarmCreatedEvent) {
handle((AlarmCreatedEvent) event);
else if (event instanceof AlarmDeletedEvent)
} else if (event instanceof AlarmDeletedEvent) {
handle((AlarmDeletedEvent) event);
else if (event instanceof AlarmUpdatedEvent)
} else if (event instanceof AlarmUpdatedEvent) {
handle((AlarmUpdatedEvent) event);
}
} catch (Exception e) {
LOG.error("Error processing tuple {}", tuple, e);
logger.error("Error processing tuple {}", tuple, e);
} finally {
collector.ack(tuple);
}
@ -102,34 +107,40 @@ public class EventProcessingBolt extends BaseRichBolt {
@Override
@SuppressWarnings("rawtypes")
public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
LOG = LoggerFactory.getLogger(Logging.categoryFor(getClass(), context));
LOG.info("Preparing");
logger = LoggerFactory.getLogger(Logging.categoryFor(getClass(), context));
logger.info("Preparing");
this.collector = collector;
}
void handle(AlarmCreatedEvent event) {
for (Map.Entry<String, AlarmSubExpression> subExpressionEntry : event.alarmSubExpressions.entrySet()) {
sendAddSubAlarm(event.alarmId, subExpressionEntry.getKey(), event.tenantId, subExpressionEntry.getValue());
for (Map.Entry<String, AlarmSubExpression> subExpressionEntry : event.alarmSubExpressions
.entrySet()) {
sendAddSubAlarm(event.alarmId, subExpressionEntry.getKey(), event.tenantId,
subExpressionEntry.getValue());
}
}
private void sendAddSubAlarm(String alarmId, String subAlarmId, String tenantId, AlarmSubExpression alarmSubExpression) {
private void sendAddSubAlarm(String alarmId, String subAlarmId, String tenantId,
AlarmSubExpression alarmSubExpression) {
sendSubAlarm(CREATED, alarmId, subAlarmId, tenantId, alarmSubExpression);
}
private void sendUpdateSubAlarm(String alarmId, String subAlarmId, String tenantId, AlarmSubExpression alarmSubExpression) {
private void sendUpdateSubAlarm(String alarmId, String subAlarmId, String tenantId,
AlarmSubExpression alarmSubExpression) {
sendSubAlarm(UPDATED, alarmId, subAlarmId, tenantId, alarmSubExpression);
}
private void sendResendSubAlarm(String alarmId, String subAlarmId, String tenantId, AlarmSubExpression alarmSubExpression) {
private void sendResendSubAlarm(String alarmId, String subAlarmId, String tenantId,
AlarmSubExpression alarmSubExpression) {
sendSubAlarm(RESEND, alarmId, subAlarmId, tenantId, alarmSubExpression);
}
private void sendSubAlarm(String eventType, String alarmId, String subAlarmId, String tenantId,
AlarmSubExpression alarmSubExpression) {
MetricDefinition metricDef = alarmSubExpression.getMetricDefinition();
collector.emit(METRIC_SUB_ALARM_EVENT_STREAM_ID, new Values(eventType, new MetricDefinitionAndTenantId(metricDef, tenantId),
new SubAlarm(subAlarmId, alarmId, alarmSubExpression)));
collector.emit(METRIC_SUB_ALARM_EVENT_STREAM_ID, new Values(eventType,
new MetricDefinitionAndTenantId(metricDef, tenantId), new SubAlarm(subAlarmId, alarmId,
alarmSubExpression)));
}
void handle(AlarmDeletedEvent event) {
@ -146,9 +157,8 @@ public class EventProcessingBolt extends BaseRichBolt {
}
void handle(AlarmUpdatedEvent event) {
if ((!event.oldAlarmState.equals(event.alarmState) ||
!event.oldAlarmSubExpressions.isEmpty()) && event.changedSubExpressions.isEmpty() &&
event.newAlarmSubExpressions.isEmpty()) {
if ((!event.oldAlarmState.equals(event.alarmState) || !event.oldAlarmSubExpressions.isEmpty())
&& event.changedSubExpressions.isEmpty() && event.newAlarmSubExpressions.isEmpty()) {
for (Map.Entry<String, AlarmSubExpression> entry : event.unchangedSubExpressions.entrySet()) {
sendResendSubAlarm(event.alarmId, entry.getKey(), event.tenantId, entry.getValue());
}

View File

@ -14,23 +14,24 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.infrastructure.thresholding;
import java.io.Serializable;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.hpcloud.mon.EventSpoutConfig;
import com.hpcloud.mon.infrastructure.thresholding.deserializer.EventDeserializer;
import backtype.storm.spout.SpoutOutputCollector;
import backtype.storm.topology.OutputFieldsDeclarer;
import backtype.storm.tuple.Values;
import com.hpcloud.mon.EventSpoutConfig;
import com.hpcloud.mon.infrastructure.thresholding.deserializer.EventDeserializer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.Serializable;
import java.util.List;
public class EventSpout extends KafkaSpout {
private static final Logger LOG = LoggerFactory.getLogger(EventSpout.class);
private static final Logger logger = LoggerFactory.getLogger(EventSpout.class);
private static final long serialVersionUID = 8457340455857276878L;
@ -39,7 +40,7 @@ public class EventSpout extends KafkaSpout {
public EventSpout(EventSpoutConfig configuration, EventDeserializer deserializer) {
super(configuration);
this.deserializer = deserializer;
LOG.info("EventSpout created");
logger.info("EventSpout created");
}
@Override
@ -54,7 +55,7 @@ public class EventSpout extends KafkaSpout {
for (final List<?> event : events) {
final Object eventToSend = event.get(0);
if (!(eventToSend instanceof Serializable)) {
LOG.error("Class {} is not Serializable: {}", eventToSend.getClass(), eventToSend);
logger.error("Class {} is not Serializable: {}", eventToSend.getClass(), eventToSend);
continue;
}
collector.emit(new Values(eventToSend));

View File

@ -14,13 +14,16 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.infrastructure.thresholding;
import com.hpcloud.configuration.KafkaProducerConfiguration;
import com.hpcloud.configuration.KafkaProducerProperties;
import kafka.javaapi.producer.Producer;
import kafka.producer.KeyedMessage;
import kafka.producer.ProducerConfig;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -28,7 +31,7 @@ import java.util.Properties;
public class KafkaAlarmEventForwarder implements AlarmEventForwarder {
private static final Logger LOG = LoggerFactory.getLogger(KafkaAlarmEventForwarder.class);
private static final Logger logger = LoggerFactory.getLogger(KafkaAlarmEventForwarder.class);
private final Producer<String, String> producer;
@ -43,9 +46,10 @@ public class KafkaAlarmEventForwarder implements AlarmEventForwarder {
@Override
public void send(String alertExchange, String alertRoutingKey, String json) {
LOG.debug("sending alertExchange: {}, alertRoutingKey: {}, json: {}", alertExchange,
logger.debug("sending alertExchange: {}, alertRoutingKey: {}, json: {}", alertExchange,
alertRoutingKey, json);
final KeyedMessage<String, String> message = new KeyedMessage<String, String>(topic, alertRoutingKey, json);
final KeyedMessage<String, String> message =
new KeyedMessage<String, String>(topic, alertRoutingKey, json);
producer.send(message);
}
@ -53,6 +57,4 @@ public class KafkaAlarmEventForwarder implements AlarmEventForwarder {
public void close() {
producer.close();
}
}

View File

@ -14,15 +14,16 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.infrastructure.thresholding;
import com.hpcloud.configuration.KafkaConsumerProperties;
import com.hpcloud.mon.KafkaSpoutConfig;
import backtype.storm.spout.SpoutOutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.topology.base.BaseRichSpout;
import com.hpcloud.configuration.KafkaConsumerProperties;
import com.hpcloud.mon.KafkaSpoutConfig;
import kafka.consumer.Consumer;
import kafka.consumer.ConsumerConfig;
import kafka.consumer.ConsumerIterator;
@ -38,7 +39,7 @@ import java.util.Map;
import java.util.Properties;
public abstract class KafkaSpout extends BaseRichSpout implements Runnable {
private static final Logger LOG = LoggerFactory.getLogger(KafkaSpout.class);
private static final Logger logger = LoggerFactory.getLogger(KafkaSpout.class);
private static final long serialVersionUID = 744004533863562119L;
@ -66,24 +67,25 @@ public abstract class KafkaSpout extends BaseRichSpout implements Runnable {
@Override
public void activate() {
LOG.info("Activated");
logger.info("Activated");
if (streams == null) {
Map<String, Integer> topicCountMap = new HashMap<>();
topicCountMap.put(kafkaSpoutConfig.kafkaConsumerConfiguration.getTopic(), new Integer(1));
Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumerConnector.createMessageStreams(topicCountMap);
Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap =
consumerConnector.createMessageStreams(topicCountMap);
streams = consumerMap.get(kafkaSpoutConfig.kafkaConsumerConfiguration.getTopic());
}
}
@Override
public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {
LOG.info("Opened");
logger.info("Opened");
this.collector = collector;
LOG.info(" topic = " + kafkaSpoutConfig.kafkaConsumerConfiguration.getTopic());
logger.info(" topic = " + kafkaSpoutConfig.kafkaConsumerConfiguration.getTopic());
this.spoutName = String.format("%s-%d", context.getThisComponentId(), context.getThisTaskId());
Properties kafkaProperties = KafkaConsumerProperties.createKafkaProperties(
kafkaSpoutConfig.kafkaConsumerConfiguration);
Properties kafkaProperties =
KafkaConsumerProperties.createKafkaProperties(kafkaSpoutConfig.kafkaConsumerConfiguration);
// Have to use a different consumer.id for each spout so use the storm taskId. Otherwise,
// zookeeper complains about a conflicted ephemeral node when there is more than one spout
// reading from a topic
@ -94,7 +96,7 @@ public abstract class KafkaSpout extends BaseRichSpout implements Runnable {
@Override
public synchronized void deactivate() {
LOG.info("deactivated");
logger.info("deactivated");
this.consumerConnector.shutdown();
this.shouldContinue = false;
// Wake up the reader thread if it is waiting
@ -110,28 +112,29 @@ public abstract class KafkaSpout extends BaseRichSpout implements Runnable {
synchronized (this) {
this.message = message;
// Wake up getMessage() if it is waiting
if (this.waiting)
if (this.waiting) {
notify();
}
while (this.message != null && this.shouldContinue)
try {
wait();
} catch (InterruptedException e) {
LOG.info("Wait interrupted", e);
logger.info("Wait interrupted", e);
}
}
}
}
LOG.info("readerThread {} exited", this.readerThread.getName());
logger.info("readerThread {} exited", this.readerThread.getName());
this.readerThread = null;
}
@Override
public void nextTuple() {
LOG.debug("nextTuple called");
logger.debug("nextTuple called");
checkReaderRunning();
final byte[] message = getMessage();
if (message != null) {
LOG.debug("streams iterator has next");
logger.debug("streams iterator has next");
processMessage(message, collector);
}
}
@ -142,7 +145,7 @@ public abstract class KafkaSpout extends BaseRichSpout implements Runnable {
final String threadName = String.format("%s reader", this.spoutName);
this.readerThread = new Thread(this, threadName);
this.readerThread.start();
LOG.info("Started Reader Thread {}", this.readerThread.getName());
logger.info("Started Reader Thread {}", this.readerThread.getName());
}
}
@ -171,7 +174,7 @@ public abstract class KafkaSpout extends BaseRichSpout implements Runnable {
try {
wait(kafkaSpoutConfig.maxWaitTime);
} catch (InterruptedException e) {
LOG.info("Sleep interrupted", e);
logger.info("Sleep interrupted", e);
}
this.waiting = false;
return tryToGetMessage(); // We might have been woken up because there was a message

View File

@ -14,26 +14,9 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.infrastructure.thresholding;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import backtype.storm.Config;
import backtype.storm.task.OutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.topology.OutputFieldsDeclarer;
import backtype.storm.topology.base.BaseRichBolt;
import backtype.storm.tuple.Fields;
import backtype.storm.tuple.Tuple;
import backtype.storm.tuple.Values;
import com.hpcloud.mon.common.model.metric.Metric;
import com.hpcloud.mon.domain.model.MetricDefinitionAndTenantId;
import com.hpcloud.mon.domain.model.SubAlarm;
@ -46,6 +29,24 @@ import com.hpcloud.streaming.storm.Streams;
import com.hpcloud.streaming.storm.Tuples;
import com.hpcloud.util.Injector;
import backtype.storm.Config;
import backtype.storm.task.OutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.topology.OutputFieldsDeclarer;
import backtype.storm.topology.base.BaseRichBolt;
import backtype.storm.tuple.Fields;
import backtype.storm.tuple.Tuple;
import backtype.storm.tuple.Values;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* Aggregates metrics for individual alarms. Receives metric/alarm tuples and tick tuples, and
* outputs alarm information whenever an alarm's state changes. Concerned with alarms that relate to
@ -71,8 +72,9 @@ public class MetricAggregationBolt extends BaseRichBolt {
public static final String[] METRIC_AGGREGATION_CONTROL_FIELDS = new String[] {"directive"};
public static final String METRICS_BEHIND = "MetricsBehind";
final Map<MetricDefinitionAndTenantId, SubAlarmStatsRepository> subAlarmStatsRepos = new HashMap<>();
private transient Logger LOG;
final Map<MetricDefinitionAndTenantId, SubAlarmStatsRepository> subAlarmStatsRepos =
new HashMap<>();
private transient Logger logger;
private DataSourceFactory dbConfig;
private transient SubAlarmDAO subAlarmDAO;
/** Namespaces for which metrics are received sporadically */
@ -96,38 +98,43 @@ public class MetricAggregationBolt extends BaseRichBolt {
@Override
public void execute(Tuple tuple) {
LOG.debug("tuple: {}", tuple);
logger.debug("tuple: {}", tuple);
try {
if (Tuples.isTickTuple(tuple)) {
evaluateAlarmsAndSlideWindows();
} else {
if (Streams.DEFAULT_STREAM_ID.equals(tuple.getSourceStreamId())) {
MetricDefinitionAndTenantId metricDefinitionAndTenantId = (MetricDefinitionAndTenantId) tuple.getValue(0);
MetricDefinitionAndTenantId metricDefinitionAndTenantId =
(MetricDefinitionAndTenantId) tuple.getValue(0);
Metric metric = (Metric) tuple.getValueByField("metric");
aggregateValues(metricDefinitionAndTenantId, metric);
} else if (METRIC_AGGREGATION_CONTROL_STREAM.equals(tuple.getSourceStreamId())) {
processControl(tuple.getString(0));
} else {
String eventType = tuple.getString(0);
MetricDefinitionAndTenantId metricDefinitionAndTenantId = (MetricDefinitionAndTenantId) tuple.getValue(1);
MetricDefinitionAndTenantId metricDefinitionAndTenantId =
(MetricDefinitionAndTenantId) tuple.getValue(1);
if (EventProcessingBolt.METRIC_ALARM_EVENT_STREAM_ID.equals(tuple.getSourceStreamId())) {
String subAlarmId = tuple.getString(2);
if (EventProcessingBolt.DELETED.equals(eventType))
if (EventProcessingBolt.DELETED.equals(eventType)) {
handleAlarmDeleted(metricDefinitionAndTenantId, subAlarmId);
} else if (EventProcessingBolt.METRIC_SUB_ALARM_EVENT_STREAM_ID.equals(tuple.getSourceStreamId())) {
}
} else if (EventProcessingBolt.METRIC_SUB_ALARM_EVENT_STREAM_ID.equals(tuple
.getSourceStreamId())) {
SubAlarm subAlarm = (SubAlarm) tuple.getValue(2);
if (EventProcessingBolt.CREATED.equals(eventType))
if (EventProcessingBolt.CREATED.equals(eventType)) {
handleAlarmCreated(metricDefinitionAndTenantId, subAlarm);
else if (EventProcessingBolt.UPDATED.equals(eventType))
} else if (EventProcessingBolt.UPDATED.equals(eventType)) {
handleAlarmUpdated(metricDefinitionAndTenantId, subAlarm);
else if (EventProcessingBolt.RESEND.equals(eventType))
} else if (EventProcessingBolt.RESEND.equals(eventType)) {
handleAlarmResend(metricDefinitionAndTenantId, subAlarm);
}
}
}
}
} catch (Exception e) {
LOG.error("Error processing tuple {}", tuple, e);
logger.error("Error processing tuple {}", tuple, e);
} finally {
collector.ack(tuple);
}
@ -135,11 +142,11 @@ public class MetricAggregationBolt extends BaseRichBolt {
private void processControl(final String directive) {
if (METRICS_BEHIND.equals(directive)) {
LOG.debug("Received {}", directive);
logger.debug("Received {}", directive);
this.upToDate = false;
} else {
logger.error("Unknown directive '{}'", directive);
}
else
LOG.error("Unknown directive '{}'", directive);
}
@Override
@ -153,8 +160,8 @@ public class MetricAggregationBolt extends BaseRichBolt {
@Override
@SuppressWarnings("rawtypes")
public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
LOG = LoggerFactory.getLogger(Logging.categoryFor(getClass(), context));
LOG.info("Preparing");
logger = LoggerFactory.getLogger(Logging.categoryFor(getClass(), context));
logger.info("Preparing");
this.collector = collector;
if (subAlarmDAO == null) {
@ -167,17 +174,21 @@ public class MetricAggregationBolt extends BaseRichBolt {
* Aggregates values for the {@code metric} that are within the periods defined for the alarm.
*/
void aggregateValues(MetricDefinitionAndTenantId metricDefinitionAndTenantId, Metric metric) {
SubAlarmStatsRepository subAlarmStatsRepo = getOrCreateSubAlarmStatsRepo(metricDefinitionAndTenantId);
if (subAlarmStatsRepo == null || metric == null)
SubAlarmStatsRepository subAlarmStatsRepo =
getOrCreateSubAlarmStatsRepo(metricDefinitionAndTenantId);
if (subAlarmStatsRepo == null || metric == null) {
return;
}
for (SubAlarmStats stats : subAlarmStatsRepo.get()) {
if (stats.getStats().addValue(metric.value, metric.timestamp))
LOG.trace("Aggregated value {} at {} for {}. Updated {}", metric.value, metric.timestamp,
metricDefinitionAndTenantId, stats.getStats());
else
LOG.warn("Metric is too old, age {} seconds: timestamp {} for {}, {}", currentTimeSeconds() - metric.timestamp,
if (stats.getStats().addValue(metric.value, metric.timestamp)) {
logger.trace("Aggregated value {} at {} for {}. Updated {}", metric.value,
metric.timestamp, metricDefinitionAndTenantId, stats.getStats());
} else {
logger.warn("Metric is too old, age {} seconds: timestamp {} for {}, {}",
currentTimeSeconds() - metric.timestamp, metric.timestamp, metricDefinitionAndTenantId,
stats.getStats());
}
}
}
@ -186,30 +197,31 @@ public class MetricAggregationBolt extends BaseRichBolt {
* ago, then sliding the window to the current time.
*/
void evaluateAlarmsAndSlideWindows() {
LOG.debug("evaluateAlarmsAndSlideWindows called");
logger.debug("evaluateAlarmsAndSlideWindows called");
long newWindowTimestamp = currentTimeSeconds();
for (SubAlarmStatsRepository subAlarmStatsRepo : subAlarmStatsRepos.values())
for (SubAlarmStatsRepository subAlarmStatsRepo : subAlarmStatsRepos.values()) {
for (SubAlarmStats subAlarmStats : subAlarmStatsRepo.get()) {
if (upToDate) {
LOG.debug("Evaluating {}", subAlarmStats);
logger.debug("Evaluating {}", subAlarmStats);
if (subAlarmStats.evaluateAndSlideWindow(newWindowTimestamp)) {
LOG.debug("Alarm state changed for {}", subAlarmStats);
collector.emit(new Values(subAlarmStats.getSubAlarm().getAlarmId(),
subAlarmStats.getSubAlarm()));
logger.debug("Alarm state changed for {}", subAlarmStats);
collector.emit(new Values(subAlarmStats.getSubAlarm().getAlarmId(), subAlarmStats
.getSubAlarm()));
}
}
else {
} else {
subAlarmStats.slideWindow(newWindowTimestamp);
}
}
}
if (!upToDate) {
LOG.info("Did not evaluate SubAlarms because Metrics are not up to date");
logger.info("Did not evaluate SubAlarms because Metrics are not up to date");
upToDate = true;
}
}
/**
* Only used for testing.
*
* @return
*/
protected long currentTimeSeconds() {
@ -217,21 +229,23 @@ public class MetricAggregationBolt extends BaseRichBolt {
}
/**
* Returns an existing or newly created SubAlarmStatsRepository for the {@code metricDefinitionAndTenantId}.
* Newly created SubAlarmStatsRepositories are initialized with stats whose view ends one minute
* from now.
* Returns an existing or newly created SubAlarmStatsRepository for the
* {@code metricDefinitionAndTenantId}. Newly created SubAlarmStatsRepositories are initialized
* with stats whose view ends one minute from now.
*/
SubAlarmStatsRepository getOrCreateSubAlarmStatsRepo(MetricDefinitionAndTenantId metricDefinitionAndTenantId) {
SubAlarmStatsRepository getOrCreateSubAlarmStatsRepo(
MetricDefinitionAndTenantId metricDefinitionAndTenantId) {
SubAlarmStatsRepository subAlarmStatsRepo = subAlarmStatsRepos.get(metricDefinitionAndTenantId);
if (subAlarmStatsRepo == null) {
List<SubAlarm> subAlarms = subAlarmDAO.find(metricDefinitionAndTenantId);
if (subAlarms.isEmpty())
LOG.warn("Failed to find sub alarms for {}", metricDefinitionAndTenantId);
else {
LOG.debug("Creating SubAlarmStats for {}", metricDefinitionAndTenantId);
if (subAlarms.isEmpty()) {
logger.warn("Failed to find sub alarms for {}", metricDefinitionAndTenantId);
} else {
logger.debug("Creating SubAlarmStats for {}", metricDefinitionAndTenantId);
for (SubAlarm subAlarm : subAlarms) {
// TODO should treat metric def name prefix like a namespace
subAlarm.setSporadicMetric(sporadicMetricNamespaces.contains(metricDefinitionAndTenantId.metricDefinition.name));
subAlarm.setSporadicMetric(sporadicMetricNamespaces
.contains(metricDefinitionAndTenantId.metricDefinition.name));
}
subAlarmStatsRepo = new SubAlarmStatsRepository();
for (SubAlarm subAlarm : subAlarms) {
@ -249,42 +263,50 @@ public class MetricAggregationBolt extends BaseRichBolt {
* Adds the {@code subAlarm} subAlarmStatsRepo for the {@code metricDefinitionAndTenantId}.
*/
void handleAlarmCreated(MetricDefinitionAndTenantId metricDefinitionAndTenantId, SubAlarm subAlarm) {
LOG.debug("Received AlarmCreatedEvent for {}", subAlarm);
logger.debug("Received AlarmCreatedEvent for {}", subAlarm);
addSubAlarm(metricDefinitionAndTenantId, subAlarm);
}
void handleAlarmResend(MetricDefinitionAndTenantId metricDefinitionAndTenantId, SubAlarm resendSubAlarm) {
final RepoAndStats repoAndStats = findExistingSubAlarmStats(metricDefinitionAndTenantId, resendSubAlarm);
if (repoAndStats == null)
void handleAlarmResend(MetricDefinitionAndTenantId metricDefinitionAndTenantId,
SubAlarm resendSubAlarm) {
final RepoAndStats repoAndStats =
findExistingSubAlarmStats(metricDefinitionAndTenantId, resendSubAlarm);
if (repoAndStats == null) {
return;
}
final SubAlarmStats oldSubAlarmStats = repoAndStats.subAlarmStats;
final SubAlarm oldSubAlarm = oldSubAlarmStats.getSubAlarm();
resendSubAlarm.setState(oldSubAlarm.getState());
resendSubAlarm.setNoState(true); // Have it send its state again so the Alarm can be evaluated
LOG.debug("Forcing SubAlarm {} to send state at next evaluation", oldSubAlarm);
logger.debug("Forcing SubAlarm {} to send state at next evaluation", oldSubAlarm);
oldSubAlarmStats.updateSubAlarm(resendSubAlarm);
}
private RepoAndStats findExistingSubAlarmStats(MetricDefinitionAndTenantId metricDefinitionAndTenantId,
SubAlarm oldSubAlarm) {
final SubAlarmStatsRepository oldSubAlarmStatsRepo = subAlarmStatsRepos.get(metricDefinitionAndTenantId);
private RepoAndStats findExistingSubAlarmStats(
MetricDefinitionAndTenantId metricDefinitionAndTenantId, SubAlarm oldSubAlarm) {
final SubAlarmStatsRepository oldSubAlarmStatsRepo =
subAlarmStatsRepos.get(metricDefinitionAndTenantId);
if (oldSubAlarmStatsRepo == null) {
LOG.error("Did not find SubAlarmStatsRepository for MetricDefinition {}", metricDefinitionAndTenantId);
logger.error("Did not find SubAlarmStatsRepository for MetricDefinition {}",
metricDefinitionAndTenantId);
return null;
}
final SubAlarmStats oldSubAlarmStats = oldSubAlarmStatsRepo.get(oldSubAlarm.getId());
if (oldSubAlarmStats == null) {
LOG.error("Did not find existing SubAlarm {} in SubAlarmStatsRepository", oldSubAlarm);
logger.error("Did not find existing SubAlarm {} in SubAlarmStatsRepository", oldSubAlarm);
return null;
}
return new RepoAndStats(oldSubAlarmStatsRepo, oldSubAlarmStats);
}
private void addSubAlarm(MetricDefinitionAndTenantId metricDefinitionAndTenantId, SubAlarm subAlarm) {
SubAlarmStatsRepository subAlarmStatsRepo = getOrCreateSubAlarmStatsRepo(metricDefinitionAndTenantId);
if (subAlarmStatsRepo == null)
private void addSubAlarm(MetricDefinitionAndTenantId metricDefinitionAndTenantId,
SubAlarm subAlarm) {
SubAlarmStatsRepository subAlarmStatsRepo =
getOrCreateSubAlarmStatsRepo(metricDefinitionAndTenantId);
if (subAlarmStatsRepo == null) {
return;
}
long viewEndTimestamp = currentTimeSeconds() + subAlarm.getExpression().getPeriod();
subAlarmStatsRepo.add(subAlarm, viewEndTimestamp);
@ -296,8 +318,9 @@ public class MetricAggregationBolt extends BaseRichBolt {
* MetricDefinition can't have changed, just how it is evaluated
*/
void handleAlarmUpdated(MetricDefinitionAndTenantId metricDefinitionAndTenantId, SubAlarm subAlarm) {
LOG.debug("Received AlarmUpdatedEvent for {}", subAlarm);
final RepoAndStats repoAndStats = findExistingSubAlarmStats(metricDefinitionAndTenantId, subAlarm);
logger.debug("Received AlarmUpdatedEvent for {}", subAlarm);
final RepoAndStats repoAndStats =
findExistingSubAlarmStats(metricDefinitionAndTenantId, subAlarm);
if (repoAndStats != null) {
// Clear the old SubAlarm, but save the SubAlarm state
final SubAlarmStats oldSubAlarmStats = repoAndStats.subAlarmStats;
@ -305,12 +328,14 @@ public class MetricAggregationBolt extends BaseRichBolt {
subAlarm.setState(oldSubAlarm.getState());
subAlarm.setNoState(true); // Doesn't hurt to send too many state changes, just too few
if (oldSubAlarm.isCompatible(subAlarm)) {
LOG.debug("Changing SubAlarm {} to SubAlarm {} and keeping measurements", oldSubAlarm, subAlarm);
logger.debug("Changing SubAlarm {} to SubAlarm {} and keeping measurements", oldSubAlarm,
subAlarm);
oldSubAlarmStats.updateSubAlarm(subAlarm);
return;
}
// Have to completely change the SubAlarmStats
LOG.debug("Changing SubAlarm {} to SubAlarm {} and flushing measurements", oldSubAlarm, subAlarm);
logger.debug("Changing SubAlarm {} to SubAlarm {} and flushing measurements", oldSubAlarm,
subAlarm);
repoAndStats.subAlarmStatsRepository.remove(subAlarm.getId());
}
addSubAlarm(metricDefinitionAndTenantId, subAlarm);
@ -321,21 +346,21 @@ public class MetricAggregationBolt extends BaseRichBolt {
* {@code metricDefinitionAndTenantId}.
*/
void handleAlarmDeleted(MetricDefinitionAndTenantId metricDefinitionAndTenantId, String subAlarmId) {
LOG.debug("Received AlarmDeletedEvent for subAlarm id {}", subAlarmId);
logger.debug("Received AlarmDeletedEvent for subAlarm id {}", subAlarmId);
SubAlarmStatsRepository subAlarmStatsRepo = subAlarmStatsRepos.get(metricDefinitionAndTenantId);
if (subAlarmStatsRepo != null) {
subAlarmStatsRepo.remove(subAlarmId);
if (subAlarmStatsRepo.isEmpty())
if (subAlarmStatsRepo.isEmpty()) {
subAlarmStatsRepos.remove(metricDefinitionAndTenantId);
}
}
}
private static class RepoAndStats {
public final SubAlarmStatsRepository subAlarmStatsRepository;
public final SubAlarmStats subAlarmStats;
public RepoAndStats(SubAlarmStatsRepository subAlarmStatsRepository,
SubAlarmStats subAlarmStats) {
public RepoAndStats(SubAlarmStatsRepository subAlarmStatsRepository, SubAlarmStats subAlarmStats) {
this.subAlarmStatsRepository = subAlarmStatsRepository;
this.subAlarmStats = subAlarmStats;
}

View File

@ -14,24 +14,9 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.infrastructure.thresholding;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import backtype.storm.task.OutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.topology.OutputFieldsDeclarer;
import backtype.storm.topology.base.BaseRichBolt;
import backtype.storm.tuple.Fields;
import backtype.storm.tuple.Tuple;
import backtype.storm.tuple.Values;
import com.hpcloud.mon.common.model.metric.Metric;
import com.hpcloud.mon.domain.model.MetricDefinitionAndTenantId;
import com.hpcloud.mon.domain.model.MetricDefinitionAndTenantIdMatcher;
@ -44,32 +29,48 @@ import com.hpcloud.streaming.storm.Logging;
import com.hpcloud.streaming.storm.Streams;
import com.hpcloud.util.Injector;
import backtype.storm.task.OutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.topology.OutputFieldsDeclarer;
import backtype.storm.topology.base.BaseRichBolt;
import backtype.storm.tuple.Fields;
import backtype.storm.tuple.Tuple;
import backtype.storm.tuple.Values;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/**
* Filters metrics for which there is no associated alarm and forwards metrics for which there is an
* alarm. Receives metric alarm and metric sub-alarm events to update metric definitions.
*
* METRIC_DEFS table and the matcher are shared between any bolts in the same worker process so that all of the
* MetricDefinitionAndTenantIds for existing SubAlarms only have to be read once and because it is not
* possible to predict which bolt gets which Metrics so all Bolts know about all starting
* MetricDefinitionAndTenantIds.
* METRIC_DEFS table and the matcher are shared between any bolts in the same worker process so that
* all of the MetricDefinitionAndTenantIds for existing SubAlarms only have to be read once and
* because it is not possible to predict which bolt gets which Metrics so all Bolts know about all
* starting MetricDefinitionAndTenantIds.
*
* The current topology uses shuffleGrouping for the incoming Metrics and allGrouping for the
* events. So, any Bolt may get any Metric so the METRIC_DEFS table and the matcher must be kept up to date
* for all MetricDefinitionAndTenantIds.
* events. So, any Bolt may get any Metric so the METRIC_DEFS table and the matcher must be kept up
* to date for all MetricDefinitionAndTenantIds.
*
* The METRIC_DEFS table contains a List of SubAlarms IDs that reference the same MetricDefinitionAndTenantId
* so if a SubAlarm is deleted, the MetricDefinitionAndTenantId will only be deleted from it and the matcher if no
* more SubAlarms reference it. Incrementing and decrementing the count is done under the static lock SENTINAL
* to ensure it is correct across all Bolts sharing the same METRIC_DEFS table and the matcher. The
* amount of adds and deletes will be very small compared to the number of Metrics so it shouldn't
* block the Metric handling.
* The METRIC_DEFS table contains a List of SubAlarms IDs that reference the same
* MetricDefinitionAndTenantId so if a SubAlarm is deleted, the MetricDefinitionAndTenantId will
* only be deleted from it and the matcher if no more SubAlarms reference it. Incrementing and
* decrementing the count is done under the static lock SENTINAL to ensure it is correct across all
* Bolts sharing the same METRIC_DEFS table and the matcher. The amount of adds and deletes will be
* very small compared to the number of Metrics so it shouldn't block the Metric handling.
*
* <ul>
* <li>Input: MetricDefinition metricDefinition, Metric metric
* <li>Input metric-alarm-events: String eventType, MetricDefinitionAndTenantId metricDefinitionAndTenantId, String
* alarmId
* <li>Input metric-sub-alarm-events: String eventType, MetricDefinitionAndTenantId metricDefinitionAndTenantId, SubAlarm
* subAlarm
* <li>Input metric-alarm-events: String eventType, MetricDefinitionAndTenantId
* metricDefinitionAndTenantId, String alarmId
* <li>Input metric-sub-alarm-events: String eventType, MetricDefinitionAndTenantId
* metricDefinitionAndTenantId, SubAlarm subAlarm
* <li>Output: MetricDefinitionAndTenantId metricDefinitionAndTenantId, Metric metric
* </ul>
*/
@ -84,14 +85,19 @@ public class MetricFilteringBolt extends BaseRichBolt {
public static final int LAG_MESSAGE_PERIOD_DEFAULT = 30;
public static final String[] FIELDS = new String[] {"metricDefinitionAndTenantId", "metric"};
private static final int MIN_LAG_VALUE = PropertyFinder.getIntProperty(MIN_LAG_VALUE_KEY, MIN_LAG_VALUE_DEFAULT, 0, Integer.MAX_VALUE);
private static final int MAX_LAG_MESSAGES = PropertyFinder.getIntProperty(MAX_LAG_MESSAGES_KEY, MAX_LAG_MESSAGES_DEFAULT, 0, Integer.MAX_VALUE);
private static final int LAG_MESSAGE_PERIOD = PropertyFinder.getIntProperty(LAG_MESSAGE_PERIOD_KEY, LAG_MESSAGE_PERIOD_DEFAULT, 1, 600);
private static final Map<MetricDefinitionAndTenantId, List<String>> METRIC_DEFS = new ConcurrentHashMap<>();
private static final MetricDefinitionAndTenantIdMatcher matcher = new MetricDefinitionAndTenantIdMatcher();
private static final int MIN_LAG_VALUE = PropertyFinder.getIntProperty(MIN_LAG_VALUE_KEY,
MIN_LAG_VALUE_DEFAULT, 0, Integer.MAX_VALUE);
private static final int MAX_LAG_MESSAGES = PropertyFinder.getIntProperty(MAX_LAG_MESSAGES_KEY,
MAX_LAG_MESSAGES_DEFAULT, 0, Integer.MAX_VALUE);
private static final int LAG_MESSAGE_PERIOD = PropertyFinder.getIntProperty(
LAG_MESSAGE_PERIOD_KEY, LAG_MESSAGE_PERIOD_DEFAULT, 1, 600);
private static final Map<MetricDefinitionAndTenantId, List<String>> METRIC_DEFS =
new ConcurrentHashMap<>();
private static final MetricDefinitionAndTenantIdMatcher matcher =
new MetricDefinitionAndTenantIdMatcher();
private static final Object SENTINAL = new Object();
private transient Logger LOG;
private transient Logger logger;
private DataSourceFactory dbConfig;
private transient MetricDefinitionDAO metricDefDAO;
private OutputCollector collector;
@ -111,80 +117,88 @@ public class MetricFilteringBolt extends BaseRichBolt {
@Override
public void declareOutputFields(OutputFieldsDeclarer declarer) {
declarer.declare(new Fields(FIELDS));
declarer.declareStream(MetricAggregationBolt.METRIC_AGGREGATION_CONTROL_STREAM,
new Fields(MetricAggregationBolt.METRIC_AGGREGATION_CONTROL_FIELDS));
declarer.declareStream(MetricAggregationBolt.METRIC_AGGREGATION_CONTROL_STREAM, new Fields(
MetricAggregationBolt.METRIC_AGGREGATION_CONTROL_FIELDS));
}
@Override
public void execute(Tuple tuple) {
LOG.debug("tuple: {}", tuple);
logger.debug("tuple: {}", tuple);
try {
if (Streams.DEFAULT_STREAM_ID.equals(tuple.getSourceStreamId())) {
final MetricDefinitionAndTenantId metricDefinitionAndTenantId = (MetricDefinitionAndTenantId) tuple.getValue(0);
final MetricDefinitionAndTenantId metricDefinitionAndTenantId =
(MetricDefinitionAndTenantId) tuple.getValue(0);
final Long timestamp = (Long) tuple.getValue(1);
final Metric metric = (Metric) tuple.getValue(2);
checkLag(timestamp);
LOG.debug("metric definition and tenant id: {}", metricDefinitionAndTenantId);
logger.debug("metric definition and tenant id: {}", metricDefinitionAndTenantId);
// Check for exact matches as well as inexact matches
final List<MetricDefinitionAndTenantId> matches = matcher.match(metricDefinitionAndTenantId);
for (final MetricDefinitionAndTenantId match : matches)
final List<MetricDefinitionAndTenantId> matches =
matcher.match(metricDefinitionAndTenantId);
for (final MetricDefinitionAndTenantId match : matches) {
collector.emit(new Values(match, metric));
}
} else {
String eventType = tuple.getString(0);
MetricDefinitionAndTenantId metricDefinitionAndTenantId = (MetricDefinitionAndTenantId) tuple.getValue(1);
MetricDefinitionAndTenantId metricDefinitionAndTenantId =
(MetricDefinitionAndTenantId) tuple.getValue(1);
LOG.debug("Received {} for {}", eventType, metricDefinitionAndTenantId);
logger.debug("Received {} for {}", eventType, metricDefinitionAndTenantId);
// UPDATED events can be ignored because the MetricDefinitionAndTenantId doesn't change
if (EventProcessingBolt.METRIC_ALARM_EVENT_STREAM_ID.equals(tuple.getSourceStreamId())) {
if (EventProcessingBolt.DELETED.equals(eventType))
if (EventProcessingBolt.DELETED.equals(eventType)) {
removeSubAlarm(metricDefinitionAndTenantId, tuple.getString(2));
} else if (EventProcessingBolt.METRIC_SUB_ALARM_EVENT_STREAM_ID.equals(tuple.getSourceStreamId())) {
if (EventProcessingBolt.CREATED.equals(eventType))
}
} else if (EventProcessingBolt.METRIC_SUB_ALARM_EVENT_STREAM_ID.equals(tuple
.getSourceStreamId())) {
if (EventProcessingBolt.CREATED.equals(eventType)) {
synchronized (SENTINAL) {
final SubAlarm subAlarm = (SubAlarm) tuple.getValue(2);
addMetricDef(metricDefinitionAndTenantId, subAlarm.getId());
}
}
}
}
} catch (Exception e) {
LOG.error("Error processing tuple {}", tuple, e);
logger.error("Error processing tuple {}", tuple, e);
} finally {
collector.ack(tuple);
}
}
private void checkLag(Long apiTimeStamp) {
if (!lagging)
if (!lagging) {
return;
if ((apiTimeStamp == null) || (apiTimeStamp.longValue() == 0))
}
if ((apiTimeStamp == null) || (apiTimeStamp.longValue() == 0)) {
return; // Remove this code at some point, just to handle old metrics without a NPE
}
final long now = getCurrentTime();
final long lag = now - apiTimeStamp.longValue();
if (lag < minLag)
if (lag < minLag) {
minLag = lag;
}
if (minLag <= MIN_LAG_VALUE) {
lagging = false;
LOG.info("Metrics no longer lagging, minLag = {}", minLag);
}
else if (minLagMessageSent >= MAX_LAG_MESSAGES) {
LOG.info("Waited for {} seconds for Metrics to catch up. Giving up. minLag = {}",
logger.info("Metrics no longer lagging, minLag = {}", minLag);
} else if (minLagMessageSent >= MAX_LAG_MESSAGES) {
logger.info("Waited for {} seconds for Metrics to catch up. Giving up. minLag = {}",
MAX_LAG_MESSAGES * LAG_MESSAGE_PERIOD, minLag);
lagging = false;
}
else if (lastMinLagMessageSent == 0) {
} else if (lastMinLagMessageSent == 0) {
lastMinLagMessageSent = now;
}
else if ((now - lastMinLagMessageSent) >= LAG_MESSAGE_PERIOD) {
LOG.info("Sending {} message, minLag = {}", MetricAggregationBolt.METRICS_BEHIND, minLag);
collector.emit(MetricAggregationBolt.METRIC_AGGREGATION_CONTROL_STREAM,
new Values(MetricAggregationBolt.METRICS_BEHIND));
} else if ((now - lastMinLagMessageSent) >= LAG_MESSAGE_PERIOD) {
logger.info("Sending {} message, minLag = {}", MetricAggregationBolt.METRICS_BEHIND, minLag);
collector.emit(MetricAggregationBolt.METRIC_AGGREGATION_CONTROL_STREAM, new Values(
MetricAggregationBolt.METRICS_BEHIND));
lastMinLagMessageSent = now;
minLagMessageSent++;
}
}
private void removeSubAlarm(MetricDefinitionAndTenantId metricDefinitionAndTenantId, String subAlarmId) {
private void removeSubAlarm(MetricDefinitionAndTenantId metricDefinitionAndTenantId,
String subAlarmId) {
synchronized (SENTINAL) {
final List<String> subAlarmIds = METRIC_DEFS.get(metricDefinitionAndTenantId);
if (subAlarmIds != null) {
@ -199,8 +213,8 @@ public class MetricFilteringBolt extends BaseRichBolt {
@Override
@SuppressWarnings("rawtypes")
public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
LOG = LoggerFactory.getLogger(Logging.categoryFor(getClass(), context));
LOG.info("Preparing");
logger = LoggerFactory.getLogger(Logging.categoryFor(getClass(), context));
logger.info("Preparing");
this.collector = collector;
if (metricDefDAO == null) {
@ -213,16 +227,18 @@ public class MetricFilteringBolt extends BaseRichBolt {
synchronized (SENTINAL) {
if (METRIC_DEFS.isEmpty()) {
for (SubAlarmMetricDefinition subAlarmMetricDef : metricDefDAO.findForAlarms()) {
addMetricDef(subAlarmMetricDef.getMetricDefinitionAndTenantId(), subAlarmMetricDef.getSubAlarmId());
addMetricDef(subAlarmMetricDef.getMetricDefinitionAndTenantId(),
subAlarmMetricDef.getSubAlarmId());
}
// Iterate again to ensure we only emit each metricDef once
for (MetricDefinitionAndTenantId metricDefinitionAndTenantId : METRIC_DEFS.keySet())
for (MetricDefinitionAndTenantId metricDefinitionAndTenantId : METRIC_DEFS.keySet()) {
collector.emit(new Values(metricDefinitionAndTenantId, null));
LOG.info("Found {} Metric Definitions", METRIC_DEFS.size());
}
logger.info("Found {} Metric Definitions", METRIC_DEFS.size());
// Just output these here so they are only output once per JVM
LOG.info("MIN_LAG_VALUE set to {} seconds", MIN_LAG_VALUE);
LOG.info("MAX_LAG_MESSAGES set to {}", MAX_LAG_MESSAGES);
LOG.info("LAG_MESSAGE_PERIOD set to {} seconds", LAG_MESSAGE_PERIOD);
logger.info("MIN_LAG_VALUE set to {} seconds", MIN_LAG_VALUE);
logger.info("MAX_LAG_MESSAGES set to {}", MAX_LAG_MESSAGES);
logger.info("LAG_MESSAGE_PERIOD set to {} seconds", LAG_MESSAGE_PERIOD);
}
}
}
@ -236,15 +252,16 @@ public class MetricFilteringBolt extends BaseRichBolt {
return System.currentTimeMillis() / 1000;
}
private void addMetricDef(MetricDefinitionAndTenantId metricDefinitionAndTenantId, String subAlarmId) {
private void addMetricDef(MetricDefinitionAndTenantId metricDefinitionAndTenantId,
String subAlarmId) {
List<String> subAlarmIds = METRIC_DEFS.get(metricDefinitionAndTenantId);
if (subAlarmIds == null) {
subAlarmIds = new LinkedList<>();
METRIC_DEFS.put(metricDefinitionAndTenantId, subAlarmIds);
matcher.add(metricDefinitionAndTenantId);
} else if (subAlarmIds.contains(subAlarmId)) {
return; // Make sure it is only added once. Multiple bolts process the same AlarmCreatedEvent
}
else if (subAlarmIds.contains(subAlarmId))
return; // Make sure it only gets added once. Multiple bolts process the same AlarmCreatedEvent
subAlarmIds.add(subAlarmId);
}

View File

@ -14,32 +14,34 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.infrastructure.thresholding;
import backtype.storm.spout.SpoutOutputCollector;
import backtype.storm.topology.OutputFieldsDeclarer;
import backtype.storm.tuple.Fields;
import backtype.storm.tuple.Values;
package com.hpcloud.mon.infrastructure.thresholding;
import com.hpcloud.mon.MetricSpoutConfig;
import com.hpcloud.mon.common.model.metric.MetricEnvelope;
import com.hpcloud.mon.common.model.metric.MetricEnvelopes;
import com.hpcloud.mon.domain.model.MetricDefinitionAndTenantId;
import backtype.storm.spout.SpoutOutputCollector;
import backtype.storm.topology.OutputFieldsDeclarer;
import backtype.storm.tuple.Fields;
import backtype.storm.tuple.Values;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class MetricSpout extends KafkaSpout {
private static final Logger LOG = LoggerFactory.getLogger(MetricSpout.class);
private static final Logger logger = LoggerFactory.getLogger(MetricSpout.class);
private static final long serialVersionUID = 744004533863562119L;
public static final String[] FIELDS = new String[] { "metricDefinitionAndTenantId", "apiTimeStamp", "metric" };
public static final String[] FIELDS = new String[] {"metricDefinitionAndTenantId",
"apiTimeStamp", "metric"};
public static final String DEFAULT_TENANT_ID = "TENANT_ID_NOT_SET";
public MetricSpout(MetricSpoutConfig metricSpoutConfig) {
super(metricSpoutConfig);
LOG.info("Created");
logger.info("Created");
}
@Override
@ -47,19 +49,19 @@ public class MetricSpout extends KafkaSpout {
final MetricEnvelope metricEnvelope;
try {
metricEnvelope = MetricEnvelopes.fromJson(message);
LOG.debug("metric envelope: {}", metricEnvelope);
}
catch (RuntimeException re) {
LOG.warn("Error parsing MetricEnvelope", re);
logger.debug("metric envelope: {}", metricEnvelope);
} catch (RuntimeException re) {
logger.warn("Error parsing MetricEnvelope", re);
return;
}
String tenantId = (String) metricEnvelope.meta.get("tenantId");
if (tenantId == null) {
LOG.error("No tenantId so using default tenantId {} for Metric {}", DEFAULT_TENANT_ID, metricEnvelope.metric);
logger.error("No tenantId so using default tenantId {} for Metric {}", DEFAULT_TENANT_ID,
metricEnvelope.metric);
tenantId = DEFAULT_TENANT_ID;
}
collector.emit(new Values(new MetricDefinitionAndTenantId(metricEnvelope.metric.definition(), tenantId),
metricEnvelope.creationTime, metricEnvelope.metric));
collector.emit(new Values(new MetricDefinitionAndTenantId(metricEnvelope.metric.definition(),
tenantId), metricEnvelope.creationTime, metricEnvelope.metric));
}
@Override

View File

@ -14,19 +14,20 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.infrastructure.thresholding;
import com.hpcloud.configuration.KafkaProducerConfiguration;
import com.google.inject.AbstractModule;
import com.google.inject.Provides;
import com.hpcloud.configuration.KafkaProducerConfiguration;
public class ProducerModule extends AbstractModule {
private KafkaProducerConfiguration config;
private AlarmEventForwarder alarmEventForwarder;
@Override
protected void configure() {
}
protected void configure() {}
public ProducerModule(KafkaProducerConfiguration config) {
this.config = config;

View File

@ -14,21 +14,20 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.infrastructure.thresholding;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class PropertyFinder {
private static final Logger LOG = LoggerFactory.getLogger(PropertyFinder.class);
private static final Logger logger = LoggerFactory.getLogger(PropertyFinder.class);
private PropertyFinder()
{
}
public static int getIntProperty(final String name,
final int defaultValue,
final int minValue,
public static int getIntProperty(final String name, final int defaultValue, final int minValue,
final int maxValue) {
final String valueString = System.getProperty(name);
if ((valueString != null) && !valueString.isEmpty()) {
@ -37,12 +36,12 @@ public class PropertyFinder {
if ((newValue >= minValue) && (newValue <= maxValue)) {
return newValue;
}
LOG.warn("Invalid value {} for property '{}' must be >= {} and <= {}, using default value of {}",
logger.warn(
"Invalid value {} for property '{}' must be >= {} and <= {}, using default value of {}",
valueString, name, minValue, maxValue, defaultValue);
}
catch (NumberFormatException nfe) {
LOG.warn("Not an integer value '{}' for property '{}', using default value of {}", valueString,
name, defaultValue);
} catch (NumberFormatException nfe) {
logger.warn("Not an integer value '{}' for property '{}', using default value of {}",
valueString, name, defaultValue);
}
}
return defaultValue;

View File

@ -14,20 +14,21 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.infrastructure.thresholding.deserializer;
import java.io.Serializable;
import java.util.Collections;
import java.util.List;
import backtype.storm.tuple.Fields;
import com.hpcloud.mon.common.event.AlarmCreatedEvent;
import com.hpcloud.mon.common.event.AlarmDeletedEvent;
import com.hpcloud.mon.common.event.AlarmUpdatedEvent;
import com.hpcloud.streaming.storm.TupleDeserializer;
import com.hpcloud.util.Serialization;
import backtype.storm.tuple.Fields;
import java.io.Serializable;
import java.util.Collections;
import java.util.List;
/**
* Deserializes MaaS events using registered serialization types.
*
@ -49,7 +50,8 @@ public class EventDeserializer implements TupleDeserializer, Serializable {
@Override
public List<List<?>> deserialize(byte[] tuple) {
try {
return Collections.<List<?>>singletonList(Collections.singletonList(Serialization.fromJson(tuple)));
return Collections.<List<?>>singletonList(Collections.singletonList(Serialization
.fromJson(tuple)));
} catch (Exception ignore) {
return null;
}

View File

@ -14,6 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon;
import static org.testng.Assert.assertEquals;
@ -21,12 +22,15 @@ import static org.testng.Assert.fail;
public class Assert {
public static void assertArraysEqual(final double[] actual, final double[] expected) {
if (expected == actual)
if (expected == actual) {
return;
if (null == expected)
}
if (null == expected) {
fail("expected a null array, but not null found.");
if (null == actual)
}
if (null == actual) {
fail("expected not null array, but null found.");
}
assertEquals(actual.length, expected.length, "arrays don't have the same size.");
@ -39,12 +43,15 @@ public class Assert {
}
public static void assertArraysEqual(final long[] actual, final long[] expected) {
if (expected == actual)
if (expected == actual) {
return;
if (null == expected)
}
if (null == expected) {
fail("expected a null array, but not null found.");
if (null == actual)
}
if (null == actual) {
fail("expected not null array, but null found.");
}
assertEquals(actual.length, expected.length, "arrays don't have the same size.");

View File

@ -14,35 +14,17 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import static org.mockito.Mockito.doAnswer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.testng.annotations.Test;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNotEquals;
import backtype.storm.Config;
import backtype.storm.testing.FeederSpout;
import backtype.storm.tuple.Fields;
import backtype.storm.tuple.Values;
import com.google.inject.AbstractModule;
import com.hpcloud.configuration.KafkaProducerConfiguration;
import com.hpcloud.mon.common.event.AlarmCreatedEvent;
import com.hpcloud.mon.common.event.AlarmStateTransitionedEvent;
@ -67,8 +49,29 @@ import com.hpcloud.streaming.storm.TopologyTestCase;
import com.hpcloud.util.Injector;
import com.hpcloud.util.Serialization;
import backtype.storm.Config;
import backtype.storm.testing.FeederSpout;
import backtype.storm.tuple.Fields;
import backtype.storm.tuple.Values;
import com.google.inject.AbstractModule;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.testng.annotations.Test;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.UUID;
/**
* Simulates a real'ish run of the thresholding engine with alarms being created, updated and deleted
* Simulates a real'ish run of the thresholding engine with alarms being created, updated and
* deleted
*/
@Test(groups = "integration")
public class ThresholdingEngineAlarmTest extends TopologyTestCase {
@ -92,7 +95,6 @@ public class ThresholdingEngineAlarmTest extends TopologyTestCase {
public ThresholdingEngineAlarmTest() {
// Fixtures
subAlarms = subAlarmsFor(TEST_ALARM_ID, expression);
// Mocks
@ -106,12 +108,15 @@ public class ThresholdingEngineAlarmTest extends TopologyTestCase {
});
subAlarmDAO = mock(SubAlarmDAO.class);
when(subAlarmDAO.find(any(MetricDefinitionAndTenantId.class))).thenAnswer(new Answer<List<SubAlarm>>() {
when(subAlarmDAO.find(any(MetricDefinitionAndTenantId.class))).thenAnswer(
new Answer<List<SubAlarm>>() {
@Override
public List<SubAlarm> answer(InvocationOnMock invocation) throws Throwable {
MetricDefinitionAndTenantId metricDefinitionAndTenantId = (MetricDefinitionAndTenantId) invocation.getArguments()[0];
MetricDefinitionAndTenantId metricDefinitionAndTenantId =
(MetricDefinitionAndTenantId) invocation.getArguments()[0];
for (final SubAlarm subAlarm : subAlarms) {
if (metricDefinitionAndTenantId.metricDefinition.equals(subAlarm.getExpression().getMetricDefinition())) {
if (metricDefinitionAndTenantId.metricDefinition.equals(subAlarm.getExpression()
.getMetricDefinition())) {
return Arrays.asList(subAlarm);
}
}
@ -138,22 +143,23 @@ public class ThresholdingEngineAlarmTest extends TopologyTestCase {
threshConfig.sporadicMetricNamespaces = new HashSet<String>();
Serialization.registerTarget(KafkaProducerConfiguration.class);
threshConfig.kafkaProducerConfig = Serialization.fromJson("{\"KafkaProducerConfiguration\":{\"topic\":\"alarm-state-transitions\",\"metadataBrokerList\":\"192.168.10.10:9092\",\"requestRequiredAcks\":1,\"requestTimeoutMs\":10000,\"producerType\":\"sync\",\"serializerClass\":\"kafka.serializer.StringEncoder\",\"keySerializerClass\":\"\",\"partitionerClass\":\"\",\"compressionCodec\":\"none\",\"compressedTopics\":\"\",\"messageSendMaxRetries\":3,\"retryBackoffMs\":100,\"topicMetadataRefreshIntervalMs\":600000,\"queueBufferingMaxMs\":5000,\"queueBufferingMaxMessages\":10000,\"queueEnqueueTimeoutMs\":-1,\"batchNumMessages\":200,\"sendBufferBytes\":102400,\"clientId\":\"Threshold_Engine\"}}");
threshConfig.kafkaProducerConfig =
Serialization
.fromJson("{\"KafkaProducerConfiguration\":{\"topic\":\"alarm-state-transitions\",\"metadataBrokerList\":\"192.168.10.10:9092\",\"requestRequiredAcks\":1,\"requestTimeoutMs\":10000,\"producerType\":\"sync\",\"serializerClass\":\"kafka.serializer.StringEncoder\",\"keySerializerClass\":\"\",\"partitionerClass\":\"\",\"compressionCodec\":\"none\",\"compressedTopics\":\"\",\"messageSendMaxRetries\":3,\"retryBackoffMs\":100,\"topicMetadataRefreshIntervalMs\":600000,\"queueBufferingMaxMs\":5000,\"queueBufferingMaxMessages\":10000,\"queueEnqueueTimeoutMs\":-1,\"batchNumMessages\":200,\"sendBufferBytes\":102400,\"clientId\":\"Threshold_Engine\"}}");
Config stormConfig = new Config();
stormConfig.setMaxTaskParallelism(1);
metricSpout = new FeederSpout(new Fields(MetricSpout.FIELDS));
eventSpout = new FeederSpout(new Fields("event"));
alarmEventForwarder = mock(AlarmEventForwarder.class);
Injector.registerModules(new TopologyModule(threshConfig, stormConfig,
metricSpout, eventSpout));
Injector
.registerModules(new TopologyModule(threshConfig, stormConfig, metricSpout, eventSpout));
Injector.registerModules(new ProducerModule(alarmEventForwarder));
// Evaluate alarm stats every 1 seconds
System.setProperty(MetricAggregationBolt.TICK_TUPLE_SECONDS_KEY, "5");
}
private List<SubAlarm> subAlarmsFor(final String alarmId,
final AlarmExpression expression,
private List<SubAlarm> subAlarmsFor(final String alarmId, final AlarmExpression expression,
final String... ids) {
final List<SubAlarm> result = new ArrayList<SubAlarm>(expression.getSubExpressions().size());
int index = 0;
@ -161,8 +167,7 @@ public class ThresholdingEngineAlarmTest extends TopologyTestCase {
final String id;
if ((index >= ids.length) || (ids[index] == null)) {
id = String.valueOf(nextSubAlarmId++);
}
else {
} else {
id = ids[index];
}
index++;
@ -171,7 +176,9 @@ public class ThresholdingEngineAlarmTest extends TopologyTestCase {
return result;
}
final AlarmState[] expectedStates = { AlarmState.ALARM, AlarmState.OK, AlarmState.ALARM, AlarmState.OK };
final AlarmState[] expectedStates = {AlarmState.ALARM, AlarmState.OK, AlarmState.ALARM,
AlarmState.OK};
public void shouldThreshold() throws Exception {
doAnswer(new Answer<Object>() {
public Object answer(InvocationOnMock invocation) {
@ -186,94 +193,102 @@ public class ThresholdingEngineAlarmTest extends TopologyTestCase {
assertEquals(event.newState, expectedStates[alarmsSent++]);
return null;
}
}
)
.when(alarmEventForwarder).send(anyString(), anyString(), anyString());
}).when(alarmEventForwarder).send(anyString(), anyString(), anyString());
int goodValueCount = 0;
boolean firstUpdate = true;
boolean secondUpdate = true;
boolean thirdUpdate = true;
final Alarm initialAlarm = new Alarm(TEST_ALARM_ID, TEST_ALARM_TENANT_ID, TEST_ALARM_NAME,
TEST_ALARM_DESCRIPTION, expression, subAlarms, AlarmState.UNDETERMINED, Boolean.TRUE);
final Alarm initialAlarm =
new Alarm(TEST_ALARM_ID, TEST_ALARM_TENANT_ID, TEST_ALARM_NAME, TEST_ALARM_DESCRIPTION,
expression, subAlarms, AlarmState.UNDETERMINED, Boolean.TRUE);
final int expectedAlarms = expectedStates.length;
AlarmExpression savedAlarmExpression = null;
for (int i = 1; alarmsSent != expectedAlarms && i < 300; i++) {
if (i == 5) {
final Map<String, AlarmSubExpression> exprs = createSubExpressionMap();
final AlarmCreatedEvent event = new AlarmCreatedEvent(TEST_ALARM_TENANT_ID, TEST_ALARM_ID, TEST_ALARM_NAME,
final AlarmCreatedEvent event =
new AlarmCreatedEvent(TEST_ALARM_TENANT_ID, TEST_ALARM_ID, TEST_ALARM_NAME,
expression.getExpression(), exprs);
eventSpout.feed(new Values(event));
System.out.printf("Send AlarmCreatedEvent for expression %s%n", expression.getExpression());
}
else if (alarmsSent == 1 && firstUpdate) {
} else if (alarmsSent == 1 && firstUpdate) {
firstUpdate = false;
final String originalExpression = expression.getExpression();
expression = new AlarmExpression(originalExpression.replace(">= 3", ">= 556"));
assertNotEquals(expression.getExpression(), originalExpression);
final List<SubAlarm> updatedSubAlarms = new ArrayList<>();
updatedSubAlarms.add(new SubAlarm(subAlarms.get(0).getId(), initialAlarm.getId(), expression.getSubExpressions().get(0)));
updatedSubAlarms.add(new SubAlarm(subAlarms.get(0).getId(), initialAlarm.getId(),
expression.getSubExpressions().get(0)));
for (int index = 1; index < subAlarms.size(); index++) {
final SubAlarm subAlarm = subAlarms.get(index);
updatedSubAlarms.add(new SubAlarm(subAlarm.getId(), initialAlarm.getId(), subAlarm.getExpression()));
updatedSubAlarms.add(new SubAlarm(subAlarm.getId(), initialAlarm.getId(), subAlarm
.getExpression()));
}
initialAlarm.setState(currentState);
final AlarmUpdatedEvent event = EventProcessingBoltTest.createAlarmUpdatedEvent(initialAlarm, initialAlarm.getState(), expression,
updatedSubAlarms);
final AlarmUpdatedEvent event =
EventProcessingBoltTest.createAlarmUpdatedEvent(initialAlarm, initialAlarm.getState(),
expression, updatedSubAlarms);
subAlarms = updatedSubAlarms;
initialAlarm.setSubAlarms(updatedSubAlarms);
eventSpout.feed(new Values(event));
System.out.printf("Send AlarmUpdatedEvent for expression %s%n", expression.getExpression());
}
else if (alarmsSent == 2 && secondUpdate) {
} else if (alarmsSent == 2 && secondUpdate) {
secondUpdate = false;
savedAlarmExpression = expression;
expression = new AlarmExpression("max(hpcs.compute.load{id=5}) > 551 and (" + expression.getExpression().replace("556", "554") + ")");
expression =
new AlarmExpression("max(hpcs.compute.load{id=5}) > 551 and ("
+ expression.getExpression().replace("556", "554") + ")");
final List<SubAlarm> updatedSubAlarms = new ArrayList<>();
updatedSubAlarms.add(new SubAlarm(UUID.randomUUID().toString(), initialAlarm.getId(), expression.getSubExpressions().get(0)));
updatedSubAlarms.add(new SubAlarm(UUID.randomUUID().toString(), initialAlarm.getId(),
expression.getSubExpressions().get(0)));
for (int index = 0; index < subAlarms.size(); index++) {
updatedSubAlarms.add(new SubAlarm(subAlarms.get(index).getId(), initialAlarm.getId(), expression.getSubExpressions().get(index+1)));
updatedSubAlarms.add(new SubAlarm(subAlarms.get(index).getId(), initialAlarm.getId(),
expression.getSubExpressions().get(index + 1)));
}
initialAlarm.setState(currentState);
final AlarmUpdatedEvent event = EventProcessingBoltTest.createAlarmUpdatedEvent(initialAlarm, initialAlarm.getState(), expression,
updatedSubAlarms);
final AlarmUpdatedEvent event =
EventProcessingBoltTest.createAlarmUpdatedEvent(initialAlarm, initialAlarm.getState(),
expression, updatedSubAlarms);
subAlarms = updatedSubAlarms;
initialAlarm.setSubAlarms(updatedSubAlarms);
eventSpout.feed(new Values(event));
System.out.printf("Send AlarmUpdatedEvent for expression %s%n", expression.getExpression());
}
else if (alarmsSent == 3 && thirdUpdate) {
} else if (alarmsSent == 3 && thirdUpdate) {
thirdUpdate = false;
expression = savedAlarmExpression;
final List<SubAlarm> updatedSubAlarms = new ArrayList<>();
int index = 1;
for (AlarmSubExpression subExpression : expression.getSubExpressions()) {
updatedSubAlarms.add(new SubAlarm(subAlarms.get(index).getId(), initialAlarm.getId(), subExpression));
updatedSubAlarms.add(new SubAlarm(subAlarms.get(index).getId(), initialAlarm.getId(),
subExpression));
index++;
}
initialAlarm.setState(currentState);
final AlarmUpdatedEvent event = EventProcessingBoltTest.createAlarmUpdatedEvent(initialAlarm, initialAlarm.getState(), expression,
updatedSubAlarms);
final AlarmUpdatedEvent event =
EventProcessingBoltTest.createAlarmUpdatedEvent(initialAlarm, initialAlarm.getState(),
expression, updatedSubAlarms);
subAlarms = updatedSubAlarms;
initialAlarm.setSubAlarms(updatedSubAlarms);
eventSpout.feed(new Values(event));
System.out.printf("Send AlarmUpdatedEvent for expression %s%n", expression.getExpression());
}
else {
} else {
System.out.println("Feeding metrics...");
long time = System.currentTimeMillis() / 1000;
++goodValueCount;
for (final SubAlarm subAlarm : subAlarms) {
final MetricDefinitionAndTenantId metricDefinitionAndTenantId =
new MetricDefinitionAndTenantId(subAlarm.getExpression().getMetricDefinition(), TEST_ALARM_TENANT_ID);
metricSpout.feed(new Values(metricDefinitionAndTenantId, time,
new Metric(metricDefinitionAndTenantId.metricDefinition, time, (double) (goodValueCount == 15 ? 1 : 555))));
new MetricDefinitionAndTenantId(subAlarm.getExpression().getMetricDefinition(),
TEST_ALARM_TENANT_ID);
metricSpout.feed(new Values(metricDefinitionAndTenantId, time, new Metric(
metricDefinitionAndTenantId.metricDefinition, time,
(double) (goodValueCount == 15 ? 1 : 555))));
}
}
try {

View File

@ -14,6 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon;
import com.hpcloud.mon.ThresholdingEngine;

View File

@ -14,33 +14,17 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import static org.mockito.Mockito.doAnswer;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.testng.annotations.Test;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertTrue;
import backtype.storm.Config;
import backtype.storm.testing.FeederSpout;
import backtype.storm.tuple.Fields;
import backtype.storm.tuple.Values;
import com.google.inject.AbstractModule;
import com.hpcloud.configuration.KafkaProducerConfiguration;
import com.hpcloud.mon.common.event.AlarmStateTransitionedEvent;
import com.hpcloud.mon.common.model.alarm.AlarmExpression;
@ -61,6 +45,24 @@ import com.hpcloud.streaming.storm.TopologyTestCase;
import com.hpcloud.util.Injector;
import com.hpcloud.util.Serialization;
import backtype.storm.Config;
import backtype.storm.testing.FeederSpout;
import backtype.storm.tuple.Fields;
import backtype.storm.tuple.Values;
import com.google.inject.AbstractModule;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.testng.annotations.Test;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
/**
* Simulates a real'ish run of the thresholding engine, using seconds instead of minutes for the
* evaluation timescale.
@ -86,8 +88,8 @@ public class ThresholdingEngineTest extends TopologyTestCase {
public ThresholdingEngineTest() {
// Fixtures
final AlarmExpression expression = new AlarmExpression(
"max(cpu{id=5}) >= 3 or max(mem{id=5}) >= 5");
final AlarmExpression expression =
new AlarmExpression("max(cpu{id=5}) >= 3 or max(mem{id=5}) >= 5");
cpuMetricDef = expression.getSubExpressions().get(0).getMetricDefinition();
memMetricDef = expression.getSubExpressions().get(1).getMetricDefinition();
@ -98,17 +100,22 @@ public class ThresholdingEngineTest extends TopologyTestCase {
@Override
public Alarm answer(InvocationOnMock invocation) throws Throwable {
return new Alarm(TEST_ALARM_ID, TEST_ALARM_TENANT_ID, TEST_ALARM_NAME,
TEST_ALARM_DESCRIPTION, expression, subAlarmsFor(expression), AlarmState.OK, Boolean.TRUE);
TEST_ALARM_DESCRIPTION, expression, subAlarmsFor(expression), AlarmState.OK,
Boolean.TRUE);
}
});
subAlarmDAO = mock(SubAlarmDAO.class);
final SubAlarm cpuMetricDefSubAlarm = new SubAlarm("123", TEST_ALARM_ID, expression.getSubExpressions().get(0));
final SubAlarm memMetricDefSubAlarm = new SubAlarm("456", TEST_ALARM_ID, expression.getSubExpressions().get(1));
when(subAlarmDAO.find(any(MetricDefinitionAndTenantId.class))).thenAnswer(new Answer<List<SubAlarm>>() {
final SubAlarm cpuMetricDefSubAlarm =
new SubAlarm("123", TEST_ALARM_ID, expression.getSubExpressions().get(0));
final SubAlarm memMetricDefSubAlarm =
new SubAlarm("456", TEST_ALARM_ID, expression.getSubExpressions().get(1));
when(subAlarmDAO.find(any(MetricDefinitionAndTenantId.class))).thenAnswer(
new Answer<List<SubAlarm>>() {
@Override
public List<SubAlarm> answer(InvocationOnMock invocation) throws Throwable {
MetricDefinitionAndTenantId metricDefinitionAndTenantId = (MetricDefinitionAndTenantId) invocation.getArguments()[0];
MetricDefinitionAndTenantId metricDefinitionAndTenantId =
(MetricDefinitionAndTenantId) invocation.getArguments()[0];
MetricDefinition metricDef = metricDefinitionAndTenantId.metricDefinition;
if (metricDef.equals(cpuMetricDef)) {
return Arrays.asList(cpuMetricDefSubAlarm);
@ -120,8 +127,8 @@ public class ThresholdingEngineTest extends TopologyTestCase {
});
metricDefinitionDAO = mock(MetricDefinitionDAO.class);
final List<SubAlarmMetricDefinition> metricDefs = Arrays.asList(
new SubAlarmMetricDefinition(cpuMetricDefSubAlarm.getId(),
final List<SubAlarmMetricDefinition> metricDefs =
Arrays.asList(new SubAlarmMetricDefinition(cpuMetricDefSubAlarm.getId(),
new MetricDefinitionAndTenantId(cpuMetricDef, TEST_ALARM_TENANT_ID)),
new SubAlarmMetricDefinition(memMetricDefSubAlarm.getId(),
new MetricDefinitionAndTenantId(memMetricDef, TEST_ALARM_TENANT_ID)));
@ -142,14 +149,16 @@ public class ThresholdingEngineTest extends TopologyTestCase {
threshConfig.sporadicMetricNamespaces = new HashSet<String>();
Serialization.registerTarget(KafkaProducerConfiguration.class);
threshConfig.kafkaProducerConfig = Serialization.fromJson("{\"KafkaProducerConfiguration\":{\"topic\":\"alarm-state-transitions\",\"metadataBrokerList\":\"192.168.10.10:9092\",\"requestRequiredAcks\":1,\"requestTimeoutMs\":10000,\"producerType\":\"sync\",\"serializerClass\":\"kafka.serializer.StringEncoder\",\"keySerializerClass\":\"\",\"partitionerClass\":\"\",\"compressionCodec\":\"none\",\"compressedTopics\":\"\",\"messageSendMaxRetries\":3,\"retryBackoffMs\":100,\"topicMetadataRefreshIntervalMs\":600000,\"queueBufferingMaxMs\":5000,\"queueBufferingMaxMessages\":10000,\"queueEnqueueTimeoutMs\":-1,\"batchNumMessages\":200,\"sendBufferBytes\":102400,\"clientId\":\"Threshold_Engine\"}}");
threshConfig.kafkaProducerConfig =
Serialization
.fromJson("{\"KafkaProducerConfiguration\":{\"topic\":\"alarm-state-transitions\",\"metadataBrokerList\":\"192.168.10.10:9092\",\"requestRequiredAcks\":1,\"requestTimeoutMs\":10000,\"producerType\":\"sync\",\"serializerClass\":\"kafka.serializer.StringEncoder\",\"keySerializerClass\":\"\",\"partitionerClass\":\"\",\"compressionCodec\":\"none\",\"compressedTopics\":\"\",\"messageSendMaxRetries\":3,\"retryBackoffMs\":100,\"topicMetadataRefreshIntervalMs\":600000,\"queueBufferingMaxMs\":5000,\"queueBufferingMaxMessages\":10000,\"queueEnqueueTimeoutMs\":-1,\"batchNumMessages\":200,\"sendBufferBytes\":102400,\"clientId\":\"Threshold_Engine\"}}");
Config stormConfig = new Config();
stormConfig.setMaxTaskParallelism(1);
metricSpout = new FeederSpout(new Fields(MetricSpout.FIELDS));
eventSpout = new FeederSpout(new Fields("event"));
alarmEventForwarder = mock(AlarmEventForwarder.class);
Injector.registerModules(new TopologyModule(threshConfig, stormConfig,
metricSpout, eventSpout));
Injector
.registerModules(new TopologyModule(threshConfig, stormConfig, metricSpout, eventSpout));
Injector.registerModules(new ProducerModule(alarmEventForwarder));
}
@ -174,15 +183,12 @@ public class ThresholdingEngineTest extends TopologyTestCase {
previousState = event.newState;
if (event.newState == AlarmState.UNDETERMINED) {
expectedState = AlarmState.ALARM;
}
else if (event.newState == AlarmState.ALARM) {
} else if (event.newState == AlarmState.ALARM) {
expectedState = AlarmState.UNDETERMINED;
}
return null;
}
}
)
.when(alarmEventForwarder).send(anyString(), anyString(), anyString());
}).when(alarmEventForwarder).send(anyString(), anyString(), anyString());
int waitCount = 0;
int feedCount = 5;
int goodValueCount = 0;
@ -193,21 +199,26 @@ public class ThresholdingEngineTest extends TopologyTestCase {
System.out.println("Feeding metrics...");
long time = System.currentTimeMillis() / 1000;
metricSpout.feed(new Values(new MetricDefinitionAndTenantId(cpuMetricDef, TEST_ALARM_TENANT_ID), time,
new Metric(cpuMetricDef.name, cpuMetricDef.dimensions, time, (double) (++goodValueCount == 15 ? 1 : 555))));
metricSpout.feed(new Values(new MetricDefinitionAndTenantId(memMetricDef, TEST_ALARM_TENANT_ID), time,
new Metric(memMetricDef.name, extraMemMetricDefDimensions, time, (double) (goodValueCount == 15 ? 1 : 555))));
metricSpout.feed(new Values(new MetricDefinitionAndTenantId(cpuMetricDef,
TEST_ALARM_TENANT_ID), time, new Metric(cpuMetricDef.name, cpuMetricDef.dimensions,
time, (double) (++goodValueCount == 15 ? 1 : 555))));
metricSpout.feed(new Values(new MetricDefinitionAndTenantId(memMetricDef,
TEST_ALARM_TENANT_ID), time, new Metric(memMetricDef.name, extraMemMetricDefDimensions,
time, (double) (goodValueCount == 15 ? 1 : 555))));
if (--feedCount == 0)
if (--feedCount == 0) {
waitCount = 3;
}
if (goodValueCount == 15)
if (goodValueCount == 15) {
goodValueCount = 0;
}
} else {
System.out.println("Waiting...");
if (--waitCount == 0)
if (--waitCount == 0) {
feedCount = 5;
}
}
try {
Thread.sleep(1000);

View File

@ -14,6 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon;
import static org.mockito.Matchers.any;
@ -21,21 +22,6 @@ import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.testng.annotations.Test;
import backtype.storm.Config;
import backtype.storm.testing.FeederSpout;
import backtype.storm.tuple.Fields;
import backtype.storm.tuple.Values;
import com.google.common.collect.ImmutableMap;
import com.google.inject.AbstractModule;
import com.hpcloud.mon.common.event.AlarmCreatedEvent;
import com.hpcloud.mon.common.event.AlarmDeletedEvent;
import com.hpcloud.mon.common.model.alarm.AlarmExpression;
@ -57,6 +43,22 @@ import com.hpcloud.mon.infrastructure.thresholding.ProducerModule;
import com.hpcloud.streaming.storm.TopologyTestCase;
import com.hpcloud.util.Injector;
import backtype.storm.Config;
import backtype.storm.testing.FeederSpout;
import backtype.storm.tuple.Fields;
import backtype.storm.tuple.Values;
import com.google.common.collect.ImmutableMap;
import com.google.inject.AbstractModule;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.testng.annotations.Test;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
/**
* Simulates a real'ish run of the thresholding engine, using seconds instead of minutes for the
* evaluation timescale.
@ -80,7 +82,8 @@ public class ThresholdingEngineTest1 extends TopologyTestCase {
public ThresholdingEngineTest1() {
// Fixtures
expression = new AlarmExpression(
expression =
new AlarmExpression(
"avg(hpcs.compute.cpu{id=5}, 3) >= 3 times 2 and avg(hpcs.compute.mem{id=5}, 3) >= 5 times 2");
customExpression = AlarmExpression.of("avg(my.test{id=4}, 3) > 10");
customSubExpression = customExpression.getSubExpressions().get(0);
@ -94,35 +97,39 @@ public class ThresholdingEngineTest1 extends TopologyTestCase {
when(alarmDAO.findById(anyString())).thenAnswer(new Answer<Alarm>() {
@Override
public Alarm answer(InvocationOnMock invocation) throws Throwable {
if (invocation.getArguments()[0].equals("1"))
return new Alarm("1", BOB_TENANT_ID, "test-alarm", "Descr of test-alarm", expression, Arrays.asList(createCpuSubAlarm(),
createMemSubAlarm()), AlarmState.OK, Boolean.TRUE);
else if (invocation.getArguments()[0].equals("2"))
return new Alarm("2", JOE_TENANT_ID, "joes-alarm", "Descr of joes-alarm", customExpression,
Arrays.asList(createCustomSubAlarm()), AlarmState.OK, Boolean.TRUE);
if (invocation.getArguments()[0].equals("1")) {
return new Alarm("1", BOB_TENANT_ID, "test-alarm", "Descr of test-alarm", expression,
Arrays.asList(createCpuSubAlarm(), createMemSubAlarm()), AlarmState.OK, Boolean.TRUE);
} else if (invocation.getArguments()[0].equals("2")) {
return new Alarm("2", JOE_TENANT_ID, "joes-alarm", "Descr of joes-alarm",
customExpression, Arrays.asList(createCustomSubAlarm()), AlarmState.OK, Boolean.TRUE);
}
return null;
}
});
subAlarmDAO = mock(SubAlarmDAO.class);
when(subAlarmDAO.find(any(MetricDefinitionAndTenantId.class))).thenAnswer(new Answer<List<SubAlarm>>() {
when(subAlarmDAO.find(any(MetricDefinitionAndTenantId.class))).thenAnswer(
new Answer<List<SubAlarm>>() {
@Override
public List<SubAlarm> answer(InvocationOnMock invocation) throws Throwable {
MetricDefinitionAndTenantId metricDefinitionAndTenantId = (MetricDefinitionAndTenantId) invocation.getArguments()[0];
MetricDefinitionAndTenantId metricDefinitionAndTenantId =
(MetricDefinitionAndTenantId) invocation.getArguments()[0];
MetricDefinition metricDef = metricDefinitionAndTenantId.metricDefinition;
if (metricDef.equals(cpuMetricDef))
if (metricDef.equals(cpuMetricDef)) {
return Arrays.asList(createCpuSubAlarm());
else if (metricDef.equals(memMetricDef))
} else if (metricDef.equals(memMetricDef)) {
return Arrays.asList(createMemSubAlarm());
else if (metricDef.equals(customMetricDef))
} else if (metricDef.equals(customMetricDef)) {
return Arrays.asList(createCustomSubAlarm());
}
return Collections.emptyList();
}
});
metricDefinitionDAO = mock(MetricDefinitionDAO.class);
final List<SubAlarmMetricDefinition> metricDefs = Arrays.asList(
new SubAlarmMetricDefinition(createCpuSubAlarm().getId(),
final List<SubAlarmMetricDefinition> metricDefs =
Arrays.asList(new SubAlarmMetricDefinition(createCpuSubAlarm().getId(),
new MetricDefinitionAndTenantId(cpuMetricDef, BOB_TENANT_ID)),
new SubAlarmMetricDefinition(createMemSubAlarm().getId(),
new MetricDefinitionAndTenantId(memMetricDef, BOB_TENANT_ID)),
@ -149,8 +156,8 @@ public class ThresholdingEngineTest1 extends TopologyTestCase {
eventSpout = new FeederSpout(new Fields("event"));
final AlarmEventForwarder alarmEventForwarder = mock(AlarmEventForwarder.class);
Injector.registerModules(new TopologyModule(threshConfig, stormConfig,
metricSpout, eventSpout));
Injector
.registerModules(new TopologyModule(threshConfig, stormConfig, metricSpout, eventSpout));
Injector.registerModules(new ProducerModule(alarmEventForwarder));
// Evaluate alarm stats every 1 seconds
@ -175,23 +182,27 @@ public class ThresholdingEngineTest1 extends TopologyTestCase {
while (true) {
long time = System.currentTimeMillis();
metricSpout.feed(new Values(new MetricDefinitionAndTenantId(cpuMetricDef, BOB_TENANT_ID), new Metric(cpuMetricDef.name,
cpuMetricDef.dimensions, time, count % 10 == 0 ? 555 : 1)));
metricSpout.feed(new Values(new MetricDefinitionAndTenantId(memMetricDef, BOB_TENANT_ID), new Metric(memMetricDef.name,
cpuMetricDef.dimensions, time, count % 10 == 0 ? 555 : 1)));
metricSpout.feed(new Values(new MetricDefinitionAndTenantId(customMetricDef, JOE_TENANT_ID), new Metric(customMetricDef.name,
cpuMetricDef.dimensions, time, count % 20 == 0 ? 1 : 123)));
metricSpout.feed(new Values(new MetricDefinitionAndTenantId(cpuMetricDef, BOB_TENANT_ID),
new Metric(cpuMetricDef.name, cpuMetricDef.dimensions, time, count % 10 == 0 ? 555 : 1)));
metricSpout.feed(new Values(new MetricDefinitionAndTenantId(memMetricDef, BOB_TENANT_ID),
new Metric(memMetricDef.name, cpuMetricDef.dimensions, time, count % 10 == 0 ? 555 : 1)));
metricSpout
.feed(new Values(new MetricDefinitionAndTenantId(customMetricDef, JOE_TENANT_ID),
new Metric(customMetricDef.name, cpuMetricDef.dimensions, time, count % 20 == 0 ? 1
: 123)));
if (count % 5 == 0) {
Object event = null;
if (++eventCounter % 2 == 0)
event = new AlarmDeletedEvent(JOE_TENANT_ID, "2",
ImmutableMap.<String, MetricDefinition>builder().put("444", customMetricDef).build());
else
event = new AlarmCreatedEvent(JOE_TENANT_ID, "2", "foo", customSubExpression.getExpression(),
if (++eventCounter % 2 == 0) {
event =
new AlarmDeletedEvent(JOE_TENANT_ID, "2", ImmutableMap
.<String, MetricDefinition>builder().put("444", customMetricDef).build());
} else {
event =
new AlarmCreatedEvent(JOE_TENANT_ID, "2", "foo", customSubExpression.getExpression(),
ImmutableMap.<String, AlarmSubExpression>builder()
.put("444", customSubExpression)
.build());
.put("444", customSubExpression).build());
}
eventSpout.feed(new Values(event));
}

View File

@ -14,19 +14,13 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.domain.model;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertTrue;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.testng.annotations.Test;
import com.hpcloud.mon.common.model.alarm.AggregateFunction;
import com.hpcloud.mon.common.model.alarm.AlarmExpression;
import com.hpcloud.mon.common.model.alarm.AlarmOperator;
@ -34,6 +28,13 @@ import com.hpcloud.mon.common.model.alarm.AlarmState;
import com.hpcloud.mon.common.model.alarm.AlarmSubExpression;
import com.hpcloud.mon.common.model.metric.MetricDefinition;
import org.testng.annotations.Test;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@Test
public class AlarmTest {
private static final String TEST_ALARM_ID = "1";
@ -43,25 +44,30 @@ public class AlarmTest {
private static Boolean ALARM_ENABLED = Boolean.FALSE;
public void shouldBeUndeterminedIfAnySubAlarmIsUndetermined() {
AlarmExpression expr = new AlarmExpression(
AlarmExpression expr =
new AlarmExpression(
"avg(hpcs.compute{instance_id=5,metric_name=cpu,device=1}, 1) > 5 times 3 AND avg(hpcs.compute{flavor_id=3,metric_name=mem}, 2) < 4 times 3");
SubAlarm subAlarm1 = new SubAlarm("123", TEST_ALARM_ID, expr.getSubExpressions().get(0),
AlarmState.UNDETERMINED);
SubAlarm subAlarm2 = new SubAlarm("456", TEST_ALARM_ID, expr.getSubExpressions().get(1), AlarmState.ALARM);
Alarm alarm = new Alarm(TEST_ALARM_ID, TEST_ALARM_TENANT_ID, TEST_ALARM_NAME, TEST_ALARM_DESCRIPTION, expr,
Arrays.asList(subAlarm1, subAlarm2), AlarmState.UNDETERMINED, ALARM_ENABLED);
SubAlarm subAlarm1 =
new SubAlarm("123", TEST_ALARM_ID, expr.getSubExpressions().get(0), AlarmState.UNDETERMINED);
SubAlarm subAlarm2 =
new SubAlarm("456", TEST_ALARM_ID, expr.getSubExpressions().get(1), AlarmState.ALARM);
Alarm alarm =
new Alarm(TEST_ALARM_ID, TEST_ALARM_TENANT_ID, TEST_ALARM_NAME, TEST_ALARM_DESCRIPTION,
expr, Arrays.asList(subAlarm1, subAlarm2), AlarmState.UNDETERMINED, ALARM_ENABLED);
assertFalse(alarm.evaluate());
assertEquals(alarm.getState(), AlarmState.UNDETERMINED);
}
public void shouldEvaluateExpressionWithBooleanAnd() {
AlarmExpression expr = new AlarmExpression(
AlarmExpression expr =
new AlarmExpression(
"avg(hpcs.compute{instance_id=5,metric_name=cpu,device=1}, 1) > 5 times 3 AND avg(hpcs.compute{flavor_id=3,metric_name=mem}, 2) < 4 times 3");
SubAlarm subAlarm1 = new SubAlarm("123", TEST_ALARM_ID, expr.getSubExpressions().get(0));
SubAlarm subAlarm2 = new SubAlarm("456", TEST_ALARM_ID, expr.getSubExpressions().get(1));
Alarm alarm = new Alarm(TEST_ALARM_ID, TEST_ALARM_TENANT_ID, TEST_ALARM_NAME, TEST_ALARM_DESCRIPTION,
Alarm alarm =
new Alarm(TEST_ALARM_ID, TEST_ALARM_TENANT_ID, TEST_ALARM_NAME, TEST_ALARM_DESCRIPTION,
expr, Arrays.asList(subAlarm1, subAlarm2), AlarmState.UNDETERMINED, ALARM_ENABLED);
assertFalse(alarm.evaluate());
@ -90,12 +96,14 @@ public class AlarmTest {
}
public void shouldEvaluateExpressionWithBooleanOr() {
AlarmExpression expr = new AlarmExpression(
AlarmExpression expr =
new AlarmExpression(
"avg(hpcs.compute{instance_id=5,metric_name=cpu,device=1}, 1) > 5 times 3 OR avg(hpcs.compute{flavor_id=3,metric_name=mem}, 2) < 4 times 3");
SubAlarm subAlarm1 = new SubAlarm("123", TEST_ALARM_ID, expr.getSubExpressions().get(0));
SubAlarm subAlarm2 = new SubAlarm("456", TEST_ALARM_ID, expr.getSubExpressions().get(1));
Alarm alarm = new Alarm(TEST_ALARM_ID, TEST_ALARM_TENANT_ID, TEST_ALARM_NAME, TEST_ALARM_DESCRIPTION,
Alarm alarm =
new Alarm(TEST_ALARM_ID, TEST_ALARM_TENANT_ID, TEST_ALARM_NAME, TEST_ALARM_DESCRIPTION,
expr, Arrays.asList(subAlarm1, subAlarm2), AlarmState.UNDETERMINED, ALARM_ENABLED);
assertFalse(alarm.evaluate());
@ -131,12 +139,13 @@ public class AlarmTest {
}
public void shouldBuiltStateChangeReason() {
AlarmExpression expr = new AlarmExpression(
AlarmExpression expr =
new AlarmExpression(
"avg(hpcs.compute{instance_id=5,metric_name=cpu,device=1}, 1) > 5 times 3 OR avg(hpcs.compute{flavor_id=3,metric_name=mem}, 2) < 4 times 3");
SubAlarm subAlarm1 = new SubAlarm("123", TEST_ALARM_ID, expr.getSubExpressions().get(0));
SubAlarm subAlarm2 = new SubAlarm("456", TEST_ALARM_ID, expr.getSubExpressions().get(1));
List<String> expressions = Arrays.asList(subAlarm1.getExpression().toString(),
subAlarm2.getExpression().toString());
List<String> expressions =
Arrays.asList(subAlarm1.getExpression().toString(), subAlarm2.getExpression().toString());
assertEquals(
Alarm.buildStateChangeReason(AlarmState.UNDETERMINED, expressions),
@ -149,16 +158,19 @@ public class AlarmTest {
/**
* This test is here because this case happened in the Threshold Engine. The AlarmExpression
* resulted in a MetricDefinition with null dimensions and SubAlarm had empty dimensions
* and that didn't match causing an IllegalArgumentException. MetricDefinition.equals() has
* been changed to consider those two values for dimensions the same
* resulted in a MetricDefinition with null dimensions and SubAlarm had empty dimensions and that
* didn't match causing an IllegalArgumentException. MetricDefinition.equals() has been changed to
* consider those two values for dimensions the same
*/
public void testDimensions() {
final AlarmExpression expression = AlarmExpression.of("max(cpu_system_perc) > 1");
final MetricDefinition metricDefinition = new MetricDefinition("cpu_system_perc", new HashMap<String, String>());
final AlarmSubExpression ase = new AlarmSubExpression(AggregateFunction.MAX, metricDefinition, AlarmOperator.GT, 1, 60, 1);
final MetricDefinition metricDefinition =
new MetricDefinition("cpu_system_perc", new HashMap<String, String>());
final AlarmSubExpression ase =
new AlarmSubExpression(AggregateFunction.MAX, metricDefinition, AlarmOperator.GT, 1, 60, 1);
final SubAlarm subAlarm = new SubAlarm("123", "456", ase);
final Map<AlarmSubExpression, Boolean> subExpressionValues = new HashMap<AlarmSubExpression, Boolean>();
final Map<AlarmSubExpression, Boolean> subExpressionValues =
new HashMap<AlarmSubExpression, Boolean>();
subExpressionValues.put(subAlarm.getExpression(), true);
assertEquals(expression.getSubExpressions().get(0).getMetricDefinition().hashCode(),
metricDefinition.hashCode());

View File

@ -14,21 +14,22 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.domain.model;
import static org.testng.Assert.assertEqualsNoOrder;
import static org.testng.Assert.assertTrue;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import com.hpcloud.mon.common.model.metric.MetricDefinition;
import com.hpcloud.mon.domain.model.MetricDefinitionAndTenantIdMatcher.DimensionPair;
import com.hpcloud.mon.domain.model.MetricDefinitionAndTenantIdMatcher.DimensionSet;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import com.hpcloud.mon.common.model.metric.MetricDefinition;
import com.hpcloud.mon.domain.model.MetricDefinitionAndTenantIdMatcher.DimensionPair;
import com.hpcloud.mon.domain.model.MetricDefinitionAndTenantIdMatcher.DimensionSet;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@Test
public class MetricDefinitionAndTenantIdMatcherTest {
@ -52,24 +53,27 @@ public class MetricDefinitionAndTenantIdMatcherTest {
public void shouldNotFind() {
assertTrue(matcher.isEmpty());
final MetricDefinitionAndTenantId toMatch = new MetricDefinitionAndTenantId(metricDef, tenantId);
final MetricDefinitionAndTenantId toMatch =
new MetricDefinitionAndTenantId(metricDef, tenantId);
verifyNoMatch(toMatch);
final MetricDefinitionAndTenantId diffTenantId = new MetricDefinitionAndTenantId(metricDef, "Different");
final MetricDefinitionAndTenantId diffTenantId =
new MetricDefinitionAndTenantId(metricDef, "Different");
matcher.add(diffTenantId);
verifyNoMatch(toMatch);
matcher.add(toMatch);
verifyMatch(toMatch, toMatch);
final MetricDefinitionAndTenantId noMatchOnName = new MetricDefinitionAndTenantId(
new MetricDefinition("NotCpu", dimensions), tenantId);
final MetricDefinitionAndTenantId noMatchOnName =
new MetricDefinitionAndTenantId(new MetricDefinition("NotCpu", dimensions), tenantId);
verifyNoMatch(noMatchOnName);
final Map<String, String> hostDimensions = new HashMap<>(dimensions);
hostDimensions.put(HOST, "OtherHost");
final MetricDefinitionAndTenantId noMatchOnDimensions = new MetricDefinitionAndTenantId(
new MetricDefinition(CPU_METRIC_NAME, hostDimensions), tenantId);
final MetricDefinitionAndTenantId noMatchOnDimensions =
new MetricDefinitionAndTenantId(new MetricDefinition(CPU_METRIC_NAME, hostDimensions),
tenantId);
verifyNoMatch(noMatchOnDimensions);
matcher.remove(toMatch);
@ -90,31 +94,36 @@ public class MetricDefinitionAndTenantIdMatcherTest {
public void shouldFind() {
assertTrue(matcher.isEmpty());
final MetricDefinitionAndTenantId toMatch = new MetricDefinitionAndTenantId(metricDef, tenantId);
final MetricDefinitionAndTenantId toMatch =
new MetricDefinitionAndTenantId(metricDef, tenantId);
final Map<String, String> nullDimensions = new HashMap<>(dimensions);
nullDimensions.put(HOST, null);
final MetricDefinitionAndTenantId nullMatch = new MetricDefinitionAndTenantId(
new MetricDefinition(CPU_METRIC_NAME, nullDimensions), tenantId);
final MetricDefinitionAndTenantId nullMatch =
new MetricDefinitionAndTenantId(new MetricDefinition(CPU_METRIC_NAME, nullDimensions),
tenantId);
matcher.add(nullMatch);
verifyMatch(nullMatch, nullMatch);
final Map<String, String> noDimensions = new HashMap<>();
final MetricDefinitionAndTenantId noMatch = new MetricDefinitionAndTenantId(
new MetricDefinition(CPU_METRIC_NAME, noDimensions), tenantId);
final MetricDefinitionAndTenantId noMatch =
new MetricDefinitionAndTenantId(new MetricDefinition(CPU_METRIC_NAME, noDimensions),
tenantId);
matcher.add(noMatch);
verifyMatch(noMatch, noMatch);
final Map<String, String> hostDimensions = new HashMap<>();
hostDimensions.put(HOST, dimensions.get(HOST));
final MetricDefinitionAndTenantId hostMatch = new MetricDefinitionAndTenantId(
new MetricDefinition(CPU_METRIC_NAME, hostDimensions), tenantId);
final MetricDefinitionAndTenantId hostMatch =
new MetricDefinitionAndTenantId(new MetricDefinition(CPU_METRIC_NAME, hostDimensions),
tenantId);
matcher.add(hostMatch);
final Map<String, String> groupDimensions = new HashMap<>();
groupDimensions.put(LOAD_BALANCER_GROUP, dimensions.get(LOAD_BALANCER_GROUP));
final MetricDefinitionAndTenantId groupMatch = new MetricDefinitionAndTenantId(
new MetricDefinition(CPU_METRIC_NAME, groupDimensions), tenantId);
final MetricDefinitionAndTenantId groupMatch =
new MetricDefinitionAndTenantId(new MetricDefinition(CPU_METRIC_NAME, groupDimensions),
tenantId);
matcher.add(groupMatch);
verifyMatch(toMatch, noMatch, hostMatch, groupMatch);
@ -135,8 +144,9 @@ public class MetricDefinitionAndTenantIdMatcherTest {
// doesn't exist
matcher.remove(toMatch);
final MetricDefinitionAndTenantId loadMetric = new MetricDefinitionAndTenantId(
new MetricDefinition("load", new HashMap<String, String>(dimensions)), tenantId);
final MetricDefinitionAndTenantId loadMetric =
new MetricDefinitionAndTenantId(new MetricDefinition("load", new HashMap<String, String>(
dimensions)), tenantId);
matcher.add(loadMetric);
matcher.remove(hostMatch);
@ -154,37 +164,48 @@ public class MetricDefinitionAndTenantIdMatcherTest {
public void shouldCreatePossiblePairs() {
final Map<String, String> dimensions = new HashMap<>();
DimensionSet[] actual = matcher.createPossibleDimensionPairs(new MetricDefinition(CPU_METRIC_NAME, dimensions));
DimensionSet[] actual =
matcher.createPossibleDimensionPairs(new MetricDefinition(CPU_METRIC_NAME, dimensions));
DimensionSet[] expected = {new DimensionSet()};
assertEqualsNoOrder(actual, expected);
dimensions.put("1", "a");
actual = matcher.createPossibleDimensionPairs(new MetricDefinition(CPU_METRIC_NAME, dimensions));
expected = new DimensionSet[] { new DimensionSet(), new DimensionSet(new DimensionPair("1", "a")) };
actual =
matcher.createPossibleDimensionPairs(new MetricDefinition(CPU_METRIC_NAME, dimensions));
expected =
new DimensionSet[] {new DimensionSet(), new DimensionSet(new DimensionPair("1", "a"))};
assertEqualsNoOrder(actual, expected);
dimensions.put("2", "b");
actual = matcher.createPossibleDimensionPairs(new MetricDefinition(CPU_METRIC_NAME, dimensions));
expected = new DimensionSet[] { new DimensionSet(), new DimensionSet(new DimensionPair("1", "a")),
actual =
matcher.createPossibleDimensionPairs(new MetricDefinition(CPU_METRIC_NAME, dimensions));
expected =
new DimensionSet[] {new DimensionSet(), new DimensionSet(new DimensionPair("1", "a")),
new DimensionSet(new DimensionPair("2", "b")),
new DimensionSet(new DimensionPair("1", "a"), new DimensionPair("2", "b"))};
assertEqualsNoOrder(actual, expected);
dimensions.put("3", "c");
actual = matcher.createPossibleDimensionPairs(new MetricDefinition(CPU_METRIC_NAME, dimensions));
expected = new DimensionSet[] { new DimensionSet(),
actual =
matcher.createPossibleDimensionPairs(new MetricDefinition(CPU_METRIC_NAME, dimensions));
expected =
new DimensionSet[] {
new DimensionSet(),
new DimensionSet(new DimensionPair("1", "a")),
new DimensionSet(new DimensionPair("2", "b")),
new DimensionSet(new DimensionPair("3", "c")),
new DimensionSet(new DimensionPair("1", "a"), new DimensionPair("2", "b")),
new DimensionSet(new DimensionPair("1", "a"), new DimensionPair("3", "c")),
new DimensionSet(new DimensionPair("2", "b"), new DimensionPair("3", "c")),
new DimensionSet(new DimensionPair("1", "a"), new DimensionPair("2", "b"), new DimensionPair("3", "c"))
};
new DimensionSet(new DimensionPair("1", "a"), new DimensionPair("2", "b"),
new DimensionPair("3", "c"))};
dimensions.put("4", "d");
actual = matcher.createPossibleDimensionPairs(new MetricDefinition(CPU_METRIC_NAME, dimensions));
expected = new DimensionSet[] { new DimensionSet(),
actual =
matcher.createPossibleDimensionPairs(new MetricDefinition(CPU_METRIC_NAME, dimensions));
expected =
new DimensionSet[] {
new DimensionSet(),
new DimensionSet(new DimensionPair("1", "a")),
new DimensionSet(new DimensionPair("2", "b")),
new DimensionSet(new DimensionPair("3", "c")),
@ -195,12 +216,17 @@ public class MetricDefinitionAndTenantIdMatcherTest {
new DimensionSet(new DimensionPair("2", "b"), new DimensionPair("3", "c")),
new DimensionSet(new DimensionPair("2", "b"), new DimensionPair("4", "d")),
new DimensionSet(new DimensionPair("3", "c"), new DimensionPair("4", "d")),
new DimensionSet(new DimensionPair("1", "a"), new DimensionPair("2", "b"), new DimensionPair("3", "c")),
new DimensionSet(new DimensionPair("1", "a"), new DimensionPair("2", "b"), new DimensionPair("4", "d")),
new DimensionSet(new DimensionPair("1", "a"), new DimensionPair("3", "c"), new DimensionPair("4", "d")),
new DimensionSet(new DimensionPair("2", "b"), new DimensionPair("3", "c"), new DimensionPair("4", "d")),
new DimensionSet(new DimensionPair("1", "a"), new DimensionPair("2", "b"), new DimensionPair("3", "c"), new DimensionPair("4", "d"))
};
new DimensionSet(new DimensionPair("1", "a"), new DimensionPair("2", "b"),
new DimensionPair("3", "c")),
new DimensionSet(new DimensionPair("1", "a"), new DimensionPair("2", "b"),
new DimensionPair("4", "d")),
new DimensionSet(new DimensionPair("1", "a"), new DimensionPair("3", "c"),
new DimensionPair("4", "d")),
new DimensionSet(new DimensionPair("2", "b"), new DimensionPair("3", "c"),
new DimensionPair("4", "d")),
new DimensionSet(new DimensionPair("1", "a"), new DimensionPair("2", "b"),
new DimensionPair("3", "c"), new DimensionPair("4", "d"))};
assertEqualsNoOrder(actual, expected);
}
}

View File

@ -14,18 +14,19 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.domain.model;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertTrue;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import com.hpcloud.mon.common.model.alarm.AlarmState;
import com.hpcloud.mon.common.model.alarm.AlarmSubExpression;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
@Test
public class SubAlarmStatsTest {
private AlarmSubExpression expression;
@ -106,8 +107,9 @@ public class SubAlarmStatsTest {
// equivalent to the behavior in CloudWatch for an alarm with 3 evaluation periods. 2 more
// slides to move the value outside of the window and 6 more to exceed the observation
// threshold.
for (int i = 0; i < 7; i++)
for (int i = 0; i < 7; i++) {
assertFalse(subAlarmStats.evaluateAndSlideWindow(initialTime += 60));
}
assertTrue(subAlarmStats.evaluateAndSlideWindow(initialTime += 60));
assertEquals(subAlarmStats.getSubAlarm().getState(), AlarmState.UNDETERMINED);
subAlarmStats.getStats().addValue(5, initialTime - 1);
@ -139,7 +141,8 @@ public class SubAlarmStatsTest {
}
public void checkLongPeriod() {
final AlarmSubExpression subExpr = AlarmSubExpression.of("sum(hpcs.compute.mem{id=5}, 120) >= 96");
final AlarmSubExpression subExpr =
AlarmSubExpression.of("sum(hpcs.compute.mem{id=5}, 120) >= 96");
final SubAlarm subAlarm = new SubAlarm("42", "4242", subExpr);
@ -150,14 +153,15 @@ public class SubAlarmStatsTest {
stats.getStats().addValue(1.0, t1);
if ((t1 % 60) == 0) {
stats.evaluateAndSlideWindow(t1);
if (i <= 60)
if (i <= 60) {
// First check will show it is OK. You could argue that this is incorrect
// as we have not waited for the whole period so we can't really evaluate it.
// That is true for sum and count
assertEquals(stats.getSubAlarm().getState(), AlarmState.OK);
else
} else {
assertEquals(stats.getSubAlarm().getState(), AlarmState.ALARM);
}
}
}
}
}

View File

@ -14,12 +14,19 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.infrastructure.persistence;
import static org.testng.Assert.assertEquals;
import java.nio.charset.Charset;
import java.util.Arrays;
import com.hpcloud.mon.common.model.alarm.AlarmExpression;
import com.hpcloud.mon.common.model.alarm.AlarmState;
import com.hpcloud.mon.common.model.alarm.AlarmSubExpression;
import com.hpcloud.mon.domain.model.Alarm;
import com.hpcloud.mon.domain.model.SubAlarm;
import com.hpcloud.mon.domain.service.AlarmDAO;
import com.google.common.io.Resources;
import org.skife.jdbi.v2.DBI;
import org.skife.jdbi.v2.Handle;
@ -28,13 +35,8 @@ import org.testng.annotations.BeforeClass;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import com.google.common.io.Resources;
import com.hpcloud.mon.common.model.alarm.AlarmExpression;
import com.hpcloud.mon.common.model.alarm.AlarmState;
import com.hpcloud.mon.common.model.alarm.AlarmSubExpression;
import com.hpcloud.mon.domain.model.Alarm;
import com.hpcloud.mon.domain.model.SubAlarm;
import com.hpcloud.mon.domain.service.AlarmDAO;
import java.nio.charset.Charset;
import java.util.Arrays;
@Test
public class AlarmDAOImplTest {
@ -52,7 +54,8 @@ public class AlarmDAOImplTest {
protected void setupClass() throws Exception {
db = new DBI("jdbc:h2:mem:test;MODE=MySQL");
handle = db.open();
handle.execute(Resources.toString(getClass().getResource("alarm.sql"), Charset.defaultCharset()));
handle
.execute(Resources.toString(getClass().getResource("alarm.sql"), Charset.defaultCharset()));
dao = new AlarmDAOImpl(db);
}
@ -68,11 +71,15 @@ public class AlarmDAOImplTest {
handle.execute("truncate table sub_alarm_dimension");
handle.execute("truncate table alarm_action");
String sql = String.format("insert into alarm (id, tenant_id, name, description, expression, state, actions_enabled, created_at, updated_at) "
String sql =
String
.format(
"insert into alarm (id, tenant_id, name, description, expression, state, actions_enabled, created_at, updated_at) "
+ "values ('%s', '%s', '%s', '%s', 'avg(hpcs.compute{disk=vda, instance_id=123, metric_name=cpu}) > 10', 'UNDETERMINED', %d, NOW(), NOW())",
ALARM_ID, TENANT_ID, ALARM_NAME, ALARM_DESCR, ALARM_ENABLED ? 1 : 0);
handle.execute(sql);
handle.execute("insert into sub_alarm (id, alarm_id, function, metric_name, operator, threshold, period, periods, created_at, updated_at) "
handle
.execute("insert into sub_alarm (id, alarm_id, function, metric_name, operator, threshold, period, periods, created_at, updated_at) "
+ "values ('111', '123', 'AVG', 'hpcs.compute', 'GT', 10, 60, 1, NOW(), NOW())");
handle.execute("insert into sub_alarm_dimension values ('111', 'instance_id', '123')");
handle.execute("insert into sub_alarm_dimension values ('111', 'disk', 'vda')");
@ -83,7 +90,8 @@ public class AlarmDAOImplTest {
public void shouldFindById() {
String expr = "avg(hpcs.compute{disk=vda, instance_id=123, metric_name=cpu}) > 10";
Alarm expected = new Alarm(ALARM_ID, TENANT_ID, ALARM_NAME, ALARM_DESCR, AlarmExpression.of(expr),
Alarm expected =
new Alarm(ALARM_ID, TENANT_ID, ALARM_NAME, ALARM_DESCR, AlarmExpression.of(expr),
Arrays.asList(new SubAlarm("111", ALARM_ID, AlarmSubExpression.of(expr))),
AlarmState.UNDETERMINED, Boolean.TRUE);

View File

@ -14,6 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.infrastructure.persistence;
import static org.testng.Assert.assertTrue;
@ -36,8 +37,8 @@ import com.hpcloud.mon.domain.service.MetricDefinitionDAO;
import com.hpcloud.mon.domain.service.SubAlarmMetricDefinition;
/**
* Note: MySQL dependent test because of the group_concat() used in the SQL in MetricDefinitionDAOImpl.
* Depends on the MySQL in mini-mon.
* Note: MySQL dependent test because of the group_concat() used in the SQL in
* MetricDefinitionDAOImpl. Depends on the MySQL in mini-mon.
*/
@Test(groups = "database")
public class MetricDefinitionDAOImplTest {
@ -64,38 +65,39 @@ public class MetricDefinitionDAOImplTest {
protected void beforeMethod() {
cleanUp();
handle.execute("insert into alarm (id, tenant_id, name, description, expression, state, created_at, updated_at) "
+ "values ('123', '" + TENANT_ID + "', 'Test Alarm', 'Test Alarm Description', 'Not real expr', 'OK', NOW(), NOW())");
handle
.execute("insert into alarm (id, tenant_id, name, description, expression, state, created_at, updated_at) "
+ "values ('123', '"
+ TENANT_ID
+ "', 'Test Alarm', 'Test Alarm Description', 'Not real expr', 'OK', NOW(), NOW())");
handle.execute("insert into sub_alarm (id, alarm_id, function, metric_name, operator, threshold, period, periods, state, created_at, updated_at) "
handle
.execute("insert into sub_alarm (id, alarm_id, function, metric_name, operator, threshold, period, periods, state, created_at, updated_at) "
+ "values ('111', '123', 'AVG', 'cpu', 'GT', 10, 60, 1, 'OK', NOW(), NOW())");
handle.execute("insert into sub_alarm_dimension values ('111', 'device', '1')");
handle.execute("insert into sub_alarm_dimension values ('111', 'instance_id', '777')");
handle.execute("insert into sub_alarm_dimension values ('111', 'image_id', '888')");
handle.execute("insert into sub_alarm (id, alarm_id, function, metric_name, operator, threshold, period, periods, state, created_at, updated_at) "
handle
.execute("insert into sub_alarm (id, alarm_id, function, metric_name, operator, threshold, period, periods, state, created_at, updated_at) "
+ "values ('222', '123', 'AVG', 'mem', 'GT', 10, 60, 1, 'OK', NOW(), NOW())");
handle.execute("insert into sub_alarm_dimension values ('222', 'instance_id', '123')");
handle.execute("insert into sub_alarm_dimension values ('222', 'az', '2')");
handle.execute("insert into sub_alarm (id, alarm_id, function, metric_name, operator, threshold, period, periods, state, created_at, updated_at) "
handle
.execute("insert into sub_alarm (id, alarm_id, function, metric_name, operator, threshold, period, periods, state, created_at, updated_at) "
+ "values ('333', '123', 'AVG', 'bar', 'GT', 10, 60, 1, 'OK', NOW(), NOW())");
SubAlarmMetricDefinition metricDef1 = new SubAlarmMetricDefinition("111",
new MetricDefinitionAndTenantId(new MetricDefinition("cpu",
ImmutableMap.<String, String>builder()
.put("device", "1")
.put("instance_id", "777")
.put("image_id", "888")
SubAlarmMetricDefinition metricDef1 =
new SubAlarmMetricDefinition("111", new MetricDefinitionAndTenantId(new MetricDefinition(
"cpu", ImmutableMap.<String, String>builder().put("device", "1")
.put("instance_id", "777").put("image_id", "888").build()), TENANT_ID));
SubAlarmMetricDefinition metricDef2 =
new SubAlarmMetricDefinition("222", new MetricDefinitionAndTenantId(new MetricDefinition(
"mem", ImmutableMap.<String, String>builder().put("az", "2").put("instance_id", "123")
.build()), TENANT_ID));
SubAlarmMetricDefinition metricDef2 = new SubAlarmMetricDefinition("222",
new MetricDefinitionAndTenantId(new MetricDefinition("mem",
ImmutableMap.<String, String>builder()
.put("az", "2")
.put("instance_id", "123")
.build()), TENANT_ID));
SubAlarmMetricDefinition metricDef3 = new SubAlarmMetricDefinition("333",
new MetricDefinitionAndTenantId(new MetricDefinition("bar",
null), TENANT_ID));
SubAlarmMetricDefinition metricDef3 =
new SubAlarmMetricDefinition("333", new MetricDefinitionAndTenantId(new MetricDefinition(
"bar", null), TENANT_ID));
expected = Arrays.asList(metricDef1, metricDef2, metricDef3);
}
@ -109,15 +111,17 @@ public class MetricDefinitionDAOImplTest {
List<SubAlarmMetricDefinition> found = dao.findForAlarms();
for (final SubAlarmMetricDefinition toFind : expected)
for (final SubAlarmMetricDefinition toFind : expected) {
assertTrue(found.contains(toFind), "Did not find " + toFind);
}
}
public void shouldNotFindDeletedAlarms() {
handle.execute("update alarm set deleted_at=NOW() where id in ('123')");
List<SubAlarmMetricDefinition> found = dao.findForAlarms();
for (final SubAlarmMetricDefinition toFind : expected)
for (final SubAlarmMetricDefinition toFind : expected) {
assertFalse(found.contains(toFind), "Should not have found " + toFind);
}
}
}

View File

@ -14,13 +14,19 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.infrastructure.persistence;
import static org.testng.Assert.assertEquals;
import java.nio.charset.Charset;
import java.util.Arrays;
import java.util.List;
import com.hpcloud.mon.common.model.alarm.AlarmState;
import com.hpcloud.mon.common.model.alarm.AlarmSubExpression;
import com.hpcloud.mon.common.model.metric.MetricDefinition;
import com.hpcloud.mon.domain.model.MetricDefinitionAndTenantId;
import com.hpcloud.mon.domain.model.SubAlarm;
import com.hpcloud.mon.domain.service.SubAlarmDAO;
import com.google.common.io.Resources;
import org.skife.jdbi.v2.DBI;
import org.skife.jdbi.v2.Handle;
@ -29,13 +35,9 @@ import org.testng.annotations.BeforeClass;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import com.google.common.io.Resources;
import com.hpcloud.mon.common.model.alarm.AlarmState;
import com.hpcloud.mon.common.model.alarm.AlarmSubExpression;
import com.hpcloud.mon.common.model.metric.MetricDefinition;
import com.hpcloud.mon.domain.model.MetricDefinitionAndTenantId;
import com.hpcloud.mon.domain.model.SubAlarm;
import com.hpcloud.mon.domain.service.SubAlarmDAO;
import java.nio.charset.Charset;
import java.util.Arrays;
import java.util.List;
@Test
public class SubAlarmDAOImplTest {
@ -48,7 +50,8 @@ public class SubAlarmDAOImplTest {
protected void setupClass() throws Exception {
db = new DBI("jdbc:h2:mem:test;MODE=MySQL");
handle = db.open();
handle.execute(Resources.toString(getClass().getResource("alarm.sql"), Charset.defaultCharset()));
handle
.execute(Resources.toString(getClass().getResource("alarm.sql"), Charset.defaultCharset()));
dao = new SubAlarmDAOImpl(db);
}
@ -64,75 +67,101 @@ public class SubAlarmDAOImplTest {
handle.execute("truncate table sub_alarm_dimension");
// These don't have the real Alarm expression because it doesn't matter for this test
handle.execute("insert into alarm (id, tenant_id, name, description, expression, state, created_at, updated_at) "
+ "values ('123', '" + TENANT_ID + "', 'Test Alarm', 'Test Alarm Description', 'Not real expr', 'OK', NOW(), NOW())");
handle.execute("insert into alarm (id, tenant_id, name, description, expression, state, created_at, updated_at) "
+ "values ('234', '" + TENANT_ID + "', 'Test Alarm2', 'Test Alarm2 Description', 'Not real expr', 'OK', NOW(), NOW())");
handle.execute("insert into alarm (id, tenant_id, name, description, expression, state, created_at, updated_at) "
+ "values ('345', '" + TENANT_ID + "', 'Test Alarm3', 'Test Alarm3 Description', 'Not real expr', 'OK', NOW(), NOW())");
handle.execute("insert into alarm (id, tenant_id, name, description, expression, state, created_at, updated_at) "
+ "values ('456', '" + TENANT_ID + "', 'Test Alarm4', 'Test Alarm4 Description', 'Not real expr', 'OK', NOW(), NOW())");
handle
.execute("insert into alarm (id, tenant_id, name, description, expression, state, created_at, updated_at) "
+ "values ('123', '"
+ TENANT_ID
+ "', 'Test Alarm', 'Test Alarm Description', 'Not real expr', 'OK', NOW(), NOW())");
handle
.execute("insert into alarm (id, tenant_id, name, description, expression, state, created_at, updated_at) "
+ "values ('234', '"
+ TENANT_ID
+ "', 'Test Alarm2', 'Test Alarm2 Description', 'Not real expr', 'OK', NOW(), NOW())");
handle
.execute("insert into alarm (id, tenant_id, name, description, expression, state, created_at, updated_at) "
+ "values ('345', '"
+ TENANT_ID
+ "', 'Test Alarm3', 'Test Alarm3 Description', 'Not real expr', 'OK', NOW(), NOW())");
handle
.execute("insert into alarm (id, tenant_id, name, description, expression, state, created_at, updated_at) "
+ "values ('456', '"
+ TENANT_ID
+ "', 'Test Alarm4', 'Test Alarm4 Description', 'Not real expr', 'OK', NOW(), NOW())");
handle.execute("insert into sub_alarm (id, alarm_id, function, metric_name, operator, threshold, period, periods, created_at, updated_at) "
handle
.execute("insert into sub_alarm (id, alarm_id, function, metric_name, operator, threshold, period, periods, created_at, updated_at) "
+ "values ('111', '123', 'AVG', 'cpu', 'GT', 10, 60, 1, NOW(), NOW())");
handle.execute("insert into sub_alarm_dimension values ('111', 'instance_id', '555')");
handle.execute("insert into sub_alarm_dimension values ('111', 'az', '1')");
handle.execute("insert into sub_alarm_dimension values ('111', 'instance_uuid', '555')");
handle.execute("insert into sub_alarm (id, alarm_id, function, metric_name, operator, threshold, period, periods, created_at, updated_at) "
handle
.execute("insert into sub_alarm (id, alarm_id, function, metric_name, operator, threshold, period, periods, created_at, updated_at) "
+ "values ('222', '234', 'AVG', 'cpu', 'GT', 10, 60, 1, NOW(), NOW())");
handle.execute("insert into sub_alarm_dimension values ('222', 'instance_id', '666')");
handle.execute("insert into sub_alarm_dimension values ('222', 'az', '1')");
handle.execute("insert into sub_alarm_dimension values ('222', 'instance_uuid', '666')");
handle.execute("insert into sub_alarm (id, alarm_id, function, metric_name, operator, threshold, period, periods, created_at, updated_at) "
handle
.execute("insert into sub_alarm (id, alarm_id, function, metric_name, operator, threshold, period, periods, created_at, updated_at) "
+ "values ('333', '345', 'AVG', 'disk', 'GT', 10, 60, 1, NOW(), NOW())");
handle.execute("insert into sub_alarm_dimension values ('333', 'instance_id', '777')");
handle.execute("insert into sub_alarm_dimension values ('333', 'az', '1')");
handle.execute("insert into sub_alarm_dimension values ('333', 'instance_uuid', '777')");
handle.execute("insert into sub_alarm_dimension values ('333', 'device', 'vda')");
handle.execute("insert into sub_alarm (id, alarm_id, function, metric_name, operator, threshold, period, periods, created_at, updated_at) "
handle
.execute("insert into sub_alarm (id, alarm_id, function, metric_name, operator, threshold, period, periods, created_at, updated_at) "
+ "values ('444', '456', 'AVG', 'cpu', 'GT', 10, 60, 1, NOW(), NOW())");
}
public void shouldFind() {
List<SubAlarm> expected = Arrays.asList(new SubAlarm("111", "123",
AlarmSubExpression.of("avg(cpu{instance_id=555,az=1}) > 10"),
AlarmState.UNDETERMINED));
List<SubAlarm> subAlarms = dao.find(new MetricDefinitionAndTenantId(expected.get(0).getExpression().getMetricDefinition(), TENANT_ID));
List<SubAlarm> expected =
Arrays.asList(new SubAlarm("111", "123", AlarmSubExpression
.of("avg(cpu{instance_id=555,az=1}) > 10"), AlarmState.UNDETERMINED));
List<SubAlarm> subAlarms =
dao.find(new MetricDefinitionAndTenantId(expected.get(0).getExpression()
.getMetricDefinition(), TENANT_ID));
assertEquals(subAlarms, expected);
expected = Arrays.asList(new SubAlarm("222", "234",
AlarmSubExpression.of("avg(cpu{instance_id=666,az=1}) > 10"),
AlarmState.UNDETERMINED));
subAlarms = dao.find(new MetricDefinitionAndTenantId(expected.get(0).getExpression().getMetricDefinition(), TENANT_ID));
expected =
Arrays.asList(new SubAlarm("222", "234", AlarmSubExpression
.of("avg(cpu{instance_id=666,az=1}) > 10"), AlarmState.UNDETERMINED));
subAlarms =
dao.find(new MetricDefinitionAndTenantId(expected.get(0).getExpression()
.getMetricDefinition(), TENANT_ID));
assertEquals(subAlarms, expected);
}
public void shouldNotFind() {
final String badTenantId = TENANT_ID + "42";
List<SubAlarm> subAlarms = dao.find(new MetricDefinitionAndTenantId(AlarmSubExpression.of("avg(cpu{instance_id=555,az=1}) > 10").getMetricDefinition(), badTenantId));
List<SubAlarm> subAlarms =
dao.find(new MetricDefinitionAndTenantId(AlarmSubExpression.of(
"avg(cpu{instance_id=555,az=1}) > 10").getMetricDefinition(), badTenantId));
assertEquals(subAlarms.size(), 0);
subAlarms = dao.find(new MetricDefinitionAndTenantId(AlarmSubExpression.of("avg(cpu{instance_id=666,az=1}) > 10").getMetricDefinition(), badTenantId));
subAlarms =
dao.find(new MetricDefinitionAndTenantId(AlarmSubExpression.of(
"avg(cpu{instance_id=666,az=1}) > 10").getMetricDefinition(), badTenantId));
assertEquals(subAlarms.size(), 0);
}
public void shouldFindWithSubject() {
List<SubAlarm> expected = Arrays.asList(new SubAlarm(
"333",
"345",
AlarmSubExpression.of("avg(disk{instance_id=777,az=1,device=vda}) > 10"),
AlarmState.UNDETERMINED));
List<SubAlarm> subAlarms = dao.find(new MetricDefinitionAndTenantId(expected.get(0).getExpression().getMetricDefinition(), TENANT_ID));
List<SubAlarm> expected =
Arrays.asList(new SubAlarm("333", "345", AlarmSubExpression
.of("avg(disk{instance_id=777,az=1,device=vda}) > 10"), AlarmState.UNDETERMINED));
List<SubAlarm> subAlarms =
dao.find(new MetricDefinitionAndTenantId(expected.get(0).getExpression()
.getMetricDefinition(), TENANT_ID));
assertEquals(subAlarms, expected);
}
public void shouldFindForNullDimensions() {
List<SubAlarm> expected = Arrays.asList(new SubAlarm("444", "456",
AlarmSubExpression.of("avg(cpu{}) > 10"), AlarmState.UNDETERMINED));
List<SubAlarm> subAlarms = dao.find(new MetricDefinitionAndTenantId(new MetricDefinition("cpu", null), TENANT_ID));
List<SubAlarm> expected =
Arrays.asList(new SubAlarm("444", "456", AlarmSubExpression.of("avg(cpu{}) > 10"),
AlarmState.UNDETERMINED));
List<SubAlarm> subAlarms =
dao.find(new MetricDefinitionAndTenantId(new MetricDefinition("cpu", null), TENANT_ID));
assertEquals(subAlarms, expected);
}
}

View File

@ -14,30 +14,15 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.infrastructure.thresholding;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import static org.mockito.Mockito.times;
import static org.testng.Assert.assertEquals;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import backtype.storm.Testing;
import backtype.storm.task.OutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.testing.MkTupleParam;
import backtype.storm.tuple.Tuple;
import com.hpcloud.mon.ThresholdingConfiguration;
import com.hpcloud.mon.common.event.AlarmUpdatedEvent;
import com.hpcloud.mon.common.model.alarm.AlarmExpression;
@ -48,6 +33,22 @@ import com.hpcloud.mon.domain.model.SubAlarm;
import com.hpcloud.mon.domain.service.AlarmDAO;
import com.hpcloud.streaming.storm.Streams;
import backtype.storm.Testing;
import backtype.storm.task.OutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.testing.MkTupleParam;
import backtype.storm.tuple.Tuple;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
@Test
public class AlarmThresholdingBoltTest {
@ -63,8 +64,7 @@ public class AlarmThresholdingBoltTest {
private AlarmDAO alarmDAO;
private AlarmThresholdingBolt bolt;
private OutputCollector collector;
private final String[] subExpressions = {
"avg(cpu{instance_id=123,device=42}, 1) > 5",
private final String[] subExpressions = {"avg(cpu{instance_id=123,device=42}, 1) > 5",
"max(load{instance_id=123,device=42}, 1) > 8",
"sum(diskio{instance_id=123,device=42}, 1) > 5000"};
@ -73,8 +73,9 @@ public class AlarmThresholdingBoltTest {
final String alarmId = "111111112222222222233333333334";
final StringBuilder builder = new StringBuilder();
for (final String subExpression : subExpressions) {
if (builder.length() > 0)
if (builder.length() > 0) {
builder.append(" or ");
}
builder.append(subExpression);
}
final String expression = builder.toString();
@ -89,7 +90,8 @@ public class AlarmThresholdingBoltTest {
final List<AlarmSubExpression> subExpressions = alarmExpression.getSubExpressions();
subAlarms = new ArrayList<SubAlarm>(subExpressions.size());
for (int i = 0; i < subExpressions.size(); i++) {
final SubAlarm subAlarm = new SubAlarm(UUID.randomUUID().toString(), alarmId, subExpressions.get(i));
final SubAlarm subAlarm =
new SubAlarm(UUID.randomUUID().toString(), alarmId, subExpressions.get(i));
subAlarms.add(subAlarm);
}
alarm.setSubAlarms(subAlarms);
@ -106,9 +108,8 @@ public class AlarmThresholdingBoltTest {
}
/**
* Create a simple Alarm with one sub expression.
* Send a SubAlarm with state set to ALARM.
* Ensure that the Alarm was triggered and sent
* Create a simple Alarm with one sub expression. Send a SubAlarm with state set to ALARM. Ensure
* that the Alarm was triggered and sent
*/
public void simpleAlarmCreation() {
final SubAlarm subAlarm = subAlarms.get(0);
@ -118,12 +119,15 @@ public class AlarmThresholdingBoltTest {
for (int i = 1; i < subAlarms.size(); i++) {
emitSubAlarmStateChange(alarmId, subAlarms.get(i), AlarmState.OK);
}
final String alarmJson = "{\"alarm-transitioned\":{\"tenantId\":\"" + tenantId + "\"," +
"\"alarmId\":\"111111112222222222233333333334\",\"alarmName\":\"Test CPU Alarm\"," +
"\"alarmDescription\":\"Description of Alarm\",\"oldState\":\"OK\",\"newState\":\"ALARM\"," +
"\"actionsEnabled\":true," +
"\"stateChangeReason\":\"Thresholds were exceeded for the sub-alarms: [" + subAlarm.getExpression().getExpression() + "]\"," +
"\"timestamp\":1395587091}}";
final String alarmJson =
"{\"alarm-transitioned\":{\"tenantId\":\""
+ tenantId
+ "\","
+ "\"alarmId\":\"111111112222222222233333333334\",\"alarmName\":\"Test CPU Alarm\","
+ "\"alarmDescription\":\"Description of Alarm\",\"oldState\":\"OK\",\"newState\":\"ALARM\","
+ "\"actionsEnabled\":true,"
+ "\"stateChangeReason\":\"Thresholds were exceeded for the sub-alarms: ["
+ subAlarm.getExpression().getExpression() + "]\"," + "\"timestamp\":1395587091}}";
verify(alarmEventForwarder, times(1)).send(ALERTS_EXCHANGE, ALERT_ROUTING_KEY, alarmJson);
verify(alarmDAO, times(1)).updateState(alarmId, AlarmState.ALARM);
@ -133,11 +137,14 @@ public class AlarmThresholdingBoltTest {
final Tuple clearTuple = createSubAlarmStateChangeTuple(alarmId, subAlarm);
bolt.execute(clearTuple);
verify(collector, times(1)).ack(clearTuple);
final String okJson = "{\"alarm-transitioned\":{\"tenantId\":\"" + tenantId + "\"," +
"\"alarmId\":\"111111112222222222233333333334\",\"alarmName\":\"Test CPU Alarm\"," +
"\"alarmDescription\":\"Description of Alarm\",\"oldState\":\"ALARM\",\"newState\":\"OK\"," +
"\"actionsEnabled\":true," +
"\"stateChangeReason\":\"The alarm threshold(s) have not been exceeded\",\"timestamp\":1395587091}}";
final String okJson =
"{\"alarm-transitioned\":{\"tenantId\":\""
+ tenantId
+ "\","
+ "\"alarmId\":\"111111112222222222233333333334\",\"alarmName\":\"Test CPU Alarm\","
+ "\"alarmDescription\":\"Description of Alarm\",\"oldState\":\"ALARM\",\"newState\":\"OK\","
+ "\"actionsEnabled\":true,"
+ "\"stateChangeReason\":\"The alarm threshold(s) have not been exceeded\",\"timestamp\":1395587091}}";
verify(alarmEventForwarder, times(1)).send(ALERTS_EXCHANGE, ALERT_ROUTING_KEY, okJson);
verify(alarmDAO, times(1)).updateState(alarmId, AlarmState.OK);
}
@ -151,8 +158,10 @@ public class AlarmThresholdingBoltTest {
final String newDescription = "New Description";
final AlarmState newState = AlarmState.OK;
boolean newEnabled = false;
final AlarmUpdatedEvent event = new AlarmUpdatedEvent(tenantId, alarmId, newName, newDescription, alarm.getAlarmExpression().getExpression(),
alarm.getState(), newState, newEnabled, empty, empty, empty, empty);
final AlarmUpdatedEvent event =
new AlarmUpdatedEvent(tenantId, alarmId, newName, newDescription, alarm
.getAlarmExpression().getExpression(), alarm.getState(), newState, newEnabled, empty,
empty, empty, empty);
final Tuple updateTuple = createAlarmUpdateTuple(event);
bolt.execute(updateTuple);
verify(collector, times(1)).ack(updateTuple);
@ -169,26 +178,33 @@ public class AlarmThresholdingBoltTest {
final Map<String, AlarmSubExpression> oldSubExpressions = new HashMap<>();
final Map<String, AlarmSubExpression> changedSubExpressions = new HashMap<>();
final Map<String, AlarmSubExpression> unchangedSubExpressions = new HashMap<>();
final String newExpression = subExpressions[1] + " or " +
subExpressions[2].replace("max", "avg") + " or " +
"sum(diskio{instance_id=123,device=4242}, 1) > 5000";
final String newExpression =
subExpressions[1] + " or " + subExpressions[2].replace("max", "avg") + " or "
+ "sum(diskio{instance_id=123,device=4242}, 1) > 5000";
final AlarmExpression newAlarmExpression = new AlarmExpression(newExpression);
final SubAlarm newSubAlarm = new SubAlarm(UUID.randomUUID().toString(), alarmId, newAlarmExpression.getSubExpressions().get(2));
final SubAlarm newSubAlarm =
new SubAlarm(UUID.randomUUID().toString(), alarmId, newAlarmExpression.getSubExpressions()
.get(2));
newSubExpressions.put(newSubAlarm.getId(), newSubAlarm.getExpression());
final SubAlarm deletedSubAlarm = subAlarms.get(0);
oldSubExpressions.put(deletedSubAlarm.getId(), deletedSubAlarm.getExpression());
final SubAlarm changedSubAlarm = new SubAlarm(subAlarms.get(2).getId(), alarmId, newAlarmExpression.getSubExpressions().get(1));
final SubAlarm changedSubAlarm =
new SubAlarm(subAlarms.get(2).getId(), alarmId, newAlarmExpression.getSubExpressions().get(
1));
changedSubExpressions.put(changedSubAlarm.getId(), changedSubAlarm.getExpression());
final SubAlarm unChangedSubAlarm = new SubAlarm(subAlarms.get(1).getId(), alarmId, subAlarms.get(1).getExpression());
final SubAlarm unChangedSubAlarm =
new SubAlarm(subAlarms.get(1).getId(), alarmId, subAlarms.get(1).getExpression());
unchangedSubExpressions.put(unChangedSubAlarm.getId(), unChangedSubAlarm.getExpression());
emitSubAlarmStateChange(alarmId, changedSubAlarm, AlarmState.OK);
emitSubAlarmStateChange(alarmId, unChangedSubAlarm, AlarmState.OK);
unChangedSubAlarm.setState(AlarmState.OK);
final AlarmUpdatedEvent event = new AlarmUpdatedEvent(tenantId, alarmId, alarm.getName(), alarm.getDescription(), newExpression,
alarm.getState(), alarm.getState(), alarm.isActionsEnabled(), oldSubExpressions, changedSubExpressions, unchangedSubExpressions, newSubExpressions);
final AlarmUpdatedEvent event =
new AlarmUpdatedEvent(tenantId, alarmId, alarm.getName(), alarm.getDescription(),
newExpression, alarm.getState(), alarm.getState(), alarm.isActionsEnabled(),
oldSubExpressions, changedSubExpressions, unchangedSubExpressions, newSubExpressions);
final Tuple updateTuple = createAlarmUpdateTuple(event);
bolt.execute(updateTuple);
verify(collector, times(1)).ack(updateTuple);
@ -211,10 +227,10 @@ public class AlarmThresholdingBoltTest {
return alarmId;
}
private void emitSubAlarmStateChange(String alarmId,
final SubAlarm subAlarm, AlarmState state) {
private void emitSubAlarmStateChange(String alarmId, final SubAlarm subAlarm, AlarmState state) {
// Create a copy so changing the state doesn't directly update the ones in the bolt
final SubAlarm toEmit = new SubAlarm(subAlarm.getId(), subAlarm.getAlarmId(), subAlarm.getExpression());
final SubAlarm toEmit =
new SubAlarm(subAlarm.getId(), subAlarm.getAlarmId(), subAlarm.getExpression());
toEmit.setState(state);
final Tuple tuple = createSubAlarmStateChangeTuple(alarmId, toEmit);
bolt.execute(tuple);
@ -225,7 +241,9 @@ public class AlarmThresholdingBoltTest {
final MkTupleParam tupleParam = new MkTupleParam();
tupleParam.setFields(EventProcessingBolt.ALARM_EVENT_STREAM_FIELDS);
tupleParam.setStream(EventProcessingBolt.ALARM_EVENT_STREAM_ID);
final Tuple tuple = Testing.testTuple(Arrays.asList(EventProcessingBolt.UPDATED, event.alarmId, event), tupleParam);
final Tuple tuple =
Testing.testTuple(Arrays.asList(EventProcessingBolt.UPDATED, event.alarmId, event),
tupleParam);
return tuple;
}
@ -241,8 +259,7 @@ public class AlarmThresholdingBoltTest {
private static final long serialVersionUID = 1L;
public MockAlarmThreshholdBolt(AlarmDAO alarmDAO,
AlarmEventForwarder alarmEventForwarder) {
public MockAlarmThreshholdBolt(AlarmDAO alarmDAO, AlarmEventForwarder alarmEventForwarder) {
super(alarmDAO, alarmEventForwarder);
}

View File

@ -14,35 +14,13 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.infrastructure.thresholding;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import backtype.storm.Testing;
import backtype.storm.task.OutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.testing.MkTupleParam;
import backtype.storm.tuple.Tuple;
import backtype.storm.tuple.Values;
import com.google.common.collect.BiMap;
import com.google.common.collect.HashBiMap;
import com.google.common.collect.Sets;
import com.hpcloud.mon.common.event.AlarmCreatedEvent;
import com.hpcloud.mon.common.event.AlarmDeletedEvent;
import com.hpcloud.mon.common.event.AlarmUpdatedEvent;
@ -55,6 +33,30 @@ import com.hpcloud.mon.domain.model.MetricDefinitionAndTenantId;
import com.hpcloud.mon.domain.model.SubAlarm;
import com.hpcloud.streaming.storm.Streams;
import backtype.storm.Testing;
import backtype.storm.task.OutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.testing.MkTupleParam;
import backtype.storm.tuple.Tuple;
import backtype.storm.tuple.Values;
import com.google.common.collect.BiMap;
import com.google.common.collect.HashBiMap;
import com.google.common.collect.Sets;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
@Test
public class EventProcessingBoltTest {
@ -77,26 +79,26 @@ public class EventProcessingBoltTest {
final String alarmId = "111111112222222222233333333334";
final String name = "Test CPU Alarm";
final String description = "Description of " + name;
final String expression = "avg(hpcs.compute.cpu{instance_id=123,device=42}, 1) > 5 " +
"and max(hpcs.compute.mem{instance_id=123,device=42}) > 80 " +
"and max(hpcs.compute.load{instance_id=123,device=42}) > 5";
final String expression =
"avg(hpcs.compute.cpu{instance_id=123,device=42}, 1) > 5 "
+ "and max(hpcs.compute.mem{instance_id=123,device=42}) > 80 "
+ "and max(hpcs.compute.load{instance_id=123,device=42}) > 5";
alarmExpression = new AlarmExpression(expression);
subAlarms = createSubAlarms(alarmId, alarmExpression);
alarm = new Alarm(alarmId, TENANT_ID, name, description, alarmExpression, subAlarms,
alarm =
new Alarm(alarmId, TENANT_ID, name, description, alarmExpression, subAlarms,
AlarmState.UNDETERMINED, Boolean.TRUE);
}
private List<SubAlarm> createSubAlarms(final String alarmId,
final AlarmExpression alarmExpression,
String ... ids) {
final AlarmExpression alarmExpression, String... ids) {
final List<AlarmSubExpression> subExpressions = alarmExpression.getSubExpressions();
final List<SubAlarm> subAlarms = new ArrayList<SubAlarm>(subExpressions.size());
for (int i = 0; i < subExpressions.size(); i++) {
final String id;
if (i >= ids.length) {
id = UUID.randomUUID().toString();
}
else {
} else {
id = ids[i];
}
final SubAlarm subAlarm = new SubAlarm(id, alarmId, subExpressions.get(i));
@ -107,8 +109,9 @@ public class EventProcessingBoltTest {
public void testAlarmCreatedEvent() {
final Map<String, AlarmSubExpression> expressions = createAlarmSubExpressionMap(alarm);
final AlarmCreatedEvent event = new AlarmCreatedEvent(alarm.getTenantId(), alarm.getId(),
alarm.getName(), alarm.getAlarmExpression().getExpression(), expressions);
final AlarmCreatedEvent event =
new AlarmCreatedEvent(alarm.getTenantId(), alarm.getId(), alarm.getName(), alarm
.getAlarmExpression().getExpression(), expressions);
final Tuple tuple = createTuple(event);
bolt.execute(tuple);
for (final SubAlarm subAlarm : subAlarms) {
@ -130,8 +133,8 @@ public class EventProcessingBoltTest {
for (final SubAlarm subAlarm : alarm.getSubAlarms()) {
metricDefinitions.put(subAlarm.getId(), subAlarm.getExpression().getMetricDefinition());
}
final AlarmDeletedEvent event = new AlarmDeletedEvent(alarm.getTenantId(), alarm.getId(),
metricDefinitions);
final AlarmDeletedEvent event =
new AlarmDeletedEvent(alarm.getTenantId(), alarm.getId(), metricDefinitions);
final Tuple tuple = createTuple(event);
bolt.execute(tuple);
for (final SubAlarm subAlarm : subAlarms) {
@ -143,30 +146,33 @@ public class EventProcessingBoltTest {
}
private void verifyDeletedSubAlarm(final SubAlarm subAlarm) {
verify(collector, times(1)).emit(EventProcessingBolt.METRIC_ALARM_EVENT_STREAM_ID,
new Values(EventProcessingBolt.DELETED,
new MetricDefinitionAndTenantId(
subAlarm.getExpression().getMetricDefinition(), TENANT_ID), subAlarm.getId()));
verify(collector, times(1)).emit(
EventProcessingBolt.METRIC_ALARM_EVENT_STREAM_ID,
new Values(EventProcessingBolt.DELETED, new MetricDefinitionAndTenantId(subAlarm
.getExpression().getMetricDefinition(), TENANT_ID), subAlarm.getId()));
}
public static AlarmUpdatedEvent createAlarmUpdatedEvent(final Alarm alarm,
final AlarmState newState,
final AlarmExpression updatedAlarmExpression,
final AlarmState newState, final AlarmExpression updatedAlarmExpression,
List<SubAlarm> updatedSubAlarms) {
final Map<String, AlarmSubExpression> oldAlarmSubExpressions = new HashMap<>();
for (final SubAlarm subAlarm : alarm.getSubAlarms())
for (final SubAlarm subAlarm : alarm.getSubAlarms()) {
oldAlarmSubExpressions.put(subAlarm.getId(), subAlarm.getExpression());
}
BiMap<String, AlarmSubExpression> oldExpressions = HashBiMap.create(oldAlarmSubExpressions);
Set<AlarmSubExpression> oldSet = oldExpressions.inverse().keySet();
Set<AlarmSubExpression> newSet = new HashSet<>();
for (final SubAlarm subAlarm : updatedSubAlarms)
for (final SubAlarm subAlarm : updatedSubAlarms) {
newSet.add(subAlarm.getExpression());
}
// Identify old or changed expressions
Set<AlarmSubExpression> oldOrChangedExpressions = new HashSet<>(Sets.difference(oldSet, newSet));
Set<AlarmSubExpression> oldOrChangedExpressions =
new HashSet<>(Sets.difference(oldSet, newSet));
// Identify new or changed expressions
Set<AlarmSubExpression> newOrChangedExpressions = new HashSet<>(Sets.difference(newSet, oldSet));
Set<AlarmSubExpression> newOrChangedExpressions =
new HashSet<>(Sets.difference(newSet, oldSet));
// Find changed expressions
Map<String, AlarmSubExpression> changedExpressions = new HashMap<>();
@ -192,15 +198,19 @@ public class EventProcessingBoltTest {
// Create IDs for new expressions
Map<String, AlarmSubExpression> newExpressions = new HashMap<>();
for (AlarmSubExpression expression : newOrChangedExpressions)
for (final SubAlarm subAlarm : updatedSubAlarms)
if (subAlarm.getExpression().equals(expression))
for (AlarmSubExpression expression : newOrChangedExpressions) {
for (final SubAlarm subAlarm : updatedSubAlarms) {
if (subAlarm.getExpression().equals(expression)) {
newExpressions.put(subAlarm.getId(), expression);
}
}
}
final AlarmUpdatedEvent event = new AlarmUpdatedEvent(alarm.getTenantId(), alarm.getId(),
alarm.getName(), alarm.getDescription(), updatedAlarmExpression.getExpression(), newState, alarm.getState(),
true, oldExpressions,
changedExpressions, unchangedExpressions, newExpressions);
final AlarmUpdatedEvent event =
new AlarmUpdatedEvent(alarm.getTenantId(), alarm.getId(), alarm.getName(),
alarm.getDescription(), updatedAlarmExpression.getExpression(), newState,
alarm.getState(), true, oldExpressions, changedExpressions, unchangedExpressions,
newExpressions);
return event;
}
@ -215,19 +225,22 @@ public class EventProcessingBoltTest {
}
public void testAlarmUpdatedEvent() {
final String updatedExpression = "avg(hpcs.compute.cpu{instance_id=123,device=42}, 1) > 5 " +
"and max(hpcs.compute.mem{instance_id=123,device=42}) > 90 " +
"and max(hpcs.compute.newLoad{instance_id=123,device=42}) > 5";
final String updatedExpression =
"avg(hpcs.compute.cpu{instance_id=123,device=42}, 1) > 5 "
+ "and max(hpcs.compute.mem{instance_id=123,device=42}) > 90 "
+ "and max(hpcs.compute.newLoad{instance_id=123,device=42}) > 5";
final AlarmExpression updatedAlarmExpression = new AlarmExpression(updatedExpression);
final List<SubAlarm> updatedSubAlarms = new ArrayList<>();
updatedSubAlarms.add(subAlarms.get(0));
updatedSubAlarms.add(new SubAlarm(subAlarms.get(1).getId(), alarm.getId(), updatedAlarmExpression.getSubExpressions().get(1)));
updatedSubAlarms.add(new SubAlarm(UUID.randomUUID().toString(), alarm.getId(), updatedAlarmExpression.getSubExpressions().get(2)));
updatedSubAlarms.add(new SubAlarm(subAlarms.get(1).getId(), alarm.getId(),
updatedAlarmExpression.getSubExpressions().get(1)));
updatedSubAlarms.add(new SubAlarm(UUID.randomUUID().toString(), alarm.getId(),
updatedAlarmExpression.getSubExpressions().get(2)));
final AlarmUpdatedEvent event = createAlarmUpdatedEvent(alarm, alarm.getState(), updatedAlarmExpression,
updatedSubAlarms);
final AlarmUpdatedEvent event =
createAlarmUpdatedEvent(alarm, alarm.getState(), updatedAlarmExpression, updatedSubAlarms);
final Tuple tuple = createTuple(event);
bolt.execute(tuple);
@ -249,14 +262,13 @@ public class EventProcessingBoltTest {
}
private void sendSubAlarm(final SubAlarm subAlarm, String eventType) {
verify(collector, times(1)).emit(EventProcessingBolt.METRIC_SUB_ALARM_EVENT_STREAM_ID,
new Values(eventType,
new MetricDefinitionAndTenantId(
subAlarm.getExpression().getMetricDefinition(), TENANT_ID), subAlarm));
verify(collector, times(1)).emit(
EventProcessingBolt.METRIC_SUB_ALARM_EVENT_STREAM_ID,
new Values(eventType, new MetricDefinitionAndTenantId(subAlarm.getExpression()
.getMetricDefinition(), TENANT_ID), subAlarm));
}
private static Map<String, AlarmSubExpression> createAlarmSubExpressionMap(
Alarm alarm) {
private static Map<String, AlarmSubExpression> createAlarmSubExpressionMap(Alarm alarm) {
final Map<String, AlarmSubExpression> oldAlarmSubExpressions = new HashMap<>();
for (final SubAlarm subAlarm : alarm.getSubAlarms()) {
oldAlarmSubExpressions.put(subAlarm.getId(), subAlarm.getExpression());

View File

@ -14,38 +14,21 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.infrastructure.thresholding;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.reset;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import static org.mockito.Mockito.reset;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertNotNull;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertTrue;
import static org.testng.Assert.assertFalse;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import backtype.storm.Constants;
import backtype.storm.Testing;
import backtype.storm.task.OutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.testing.MkTupleParam;
import backtype.storm.tuple.Tuple;
import backtype.storm.tuple.Values;
import com.hpcloud.mon.common.model.alarm.AlarmOperator;
import com.hpcloud.mon.common.model.alarm.AlarmState;
@ -59,6 +42,24 @@ import com.hpcloud.mon.domain.service.SubAlarmDAO;
import com.hpcloud.mon.domain.service.SubAlarmStatsRepository;
import com.hpcloud.streaming.storm.Streams;
import backtype.storm.Constants;
import backtype.storm.Testing;
import backtype.storm.task.OutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.testing.MkTupleParam;
import backtype.storm.tuple.Tuple;
import backtype.storm.tuple.Values;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
@Test
public class MetricAggregationBoltTest {
private static final String TENANT_ID = "42";
@ -103,11 +104,15 @@ public class MetricAggregationBoltTest {
when(dao.find(any(MetricDefinitionAndTenantId.class))).thenAnswer(new Answer<List<SubAlarm>>() {
@Override
public List<SubAlarm> answer(InvocationOnMock invocation) throws Throwable {
final MetricDefinitionAndTenantId metricDefinitionAndTenantId = (MetricDefinitionAndTenantId) invocation.getArguments()[0];
final MetricDefinitionAndTenantId metricDefinitionAndTenantId =
(MetricDefinitionAndTenantId) invocation.getArguments()[0];
final List<SubAlarm> result = new ArrayList<>();
for (final SubAlarm subAlarm : subAlarms)
if (subAlarm.getExpression().getMetricDefinition().equals(metricDefinitionAndTenantId.metricDefinition))
for (final SubAlarm subAlarm : subAlarms) {
if (subAlarm.getExpression().getMetricDefinition()
.equals(metricDefinitionAndTenantId.metricDefinition)) {
result.add(subAlarm);
}
}
return result;
}
});
@ -121,15 +126,23 @@ public class MetricAggregationBoltTest {
public void shouldAggregateValues() {
long t1 = System.currentTimeMillis() / 1000;
bolt.aggregateValues(new MetricDefinitionAndTenantId(metricDef1, TENANT_ID), new Metric(metricDef1.name, metricDef1.dimensions, t1, 100));
bolt.aggregateValues(new MetricDefinitionAndTenantId(metricDef1, TENANT_ID), new Metric(metricDef1.name, metricDef1.dimensions, t1, 80));
bolt.aggregateValues(new MetricDefinitionAndTenantId(metricDef2, TENANT_ID), new Metric(metricDef2.name, metricDef2.dimensions, t1, 50));
bolt.aggregateValues(new MetricDefinitionAndTenantId(metricDef2, TENANT_ID), new Metric(metricDef2.name, metricDef2.dimensions, t1, 40));
bolt.aggregateValues(new MetricDefinitionAndTenantId(metricDef1, TENANT_ID), new Metric(
metricDef1.name, metricDef1.dimensions, t1, 100));
bolt.aggregateValues(new MetricDefinitionAndTenantId(metricDef1, TENANT_ID), new Metric(
metricDef1.name, metricDef1.dimensions, t1, 80));
bolt.aggregateValues(new MetricDefinitionAndTenantId(metricDef2, TENANT_ID), new Metric(
metricDef2.name, metricDef2.dimensions, t1, 50));
bolt.aggregateValues(new MetricDefinitionAndTenantId(metricDef2, TENANT_ID), new Metric(
metricDef2.name, metricDef2.dimensions, t1, 40));
SubAlarmStats alarmData = bolt.getOrCreateSubAlarmStatsRepo(new MetricDefinitionAndTenantId(metricDef1, TENANT_ID)).get("123");
SubAlarmStats alarmData =
bolt.getOrCreateSubAlarmStatsRepo(new MetricDefinitionAndTenantId(metricDef1, TENANT_ID))
.get("123");
assertEquals(alarmData.getStats().getValue(t1), 90.0);
alarmData = bolt.getOrCreateSubAlarmStatsRepo(new MetricDefinitionAndTenantId(metricDef2, TENANT_ID)).get("456");
alarmData =
bolt.getOrCreateSubAlarmStatsRepo(new MetricDefinitionAndTenantId(metricDef2, TENANT_ID))
.get("456");
assertEquals(alarmData.getStats().getValue(t1), 45.0);
}
@ -154,13 +167,15 @@ public class MetricAggregationBoltTest {
assertEquals(subAlarm3.getState(), AlarmState.UNDETERMINED);
verify(collector, times(1)).emit(new Values(subAlarm1.getAlarmId(), subAlarm1));
// Have to reset the mock so it can tell the difference when subAlarm2 and subAlarm3 are emitted again.
// Have to reset the mock so it can tell the difference when subAlarm2 and subAlarm3 are emitted
// again.
reset(collector);
// Drive subAlarm1 to ALARM
bolt.execute(createMetricTuple(metricDef1, new Metric(metricDef1, t1, 99)));
// Drive subAlarm2 to ALARM and subAlarm3 to OK since they use the same MetricDefinition
bolt.execute(createMetricTuple(metricDef2, new Metric(metricDef2, System.currentTimeMillis() / 1000, 94)));
bolt.execute(createMetricTuple(metricDef2, new Metric(metricDef2,
System.currentTimeMillis() / 1000, 94)));
bolt.execute(tickTuple);
verify(collector, times(1)).ack(tickTuple);
@ -192,7 +207,8 @@ public class MetricAggregationBoltTest {
final MkTupleParam tupleParam = new MkTupleParam();
tupleParam.setFields(EventProcessingBolt.METRIC_SUB_ALARM_EVENT_STREAM_FIELDS);
tupleParam.setStream(EventProcessingBolt.METRIC_SUB_ALARM_EVENT_STREAM_ID);
final Tuple resendTuple = Testing.testTuple(Arrays.asList(EventProcessingBolt.RESEND,
final Tuple resendTuple =
Testing.testTuple(Arrays.asList(EventProcessingBolt.RESEND,
new MetricDefinitionAndTenantId(metricDef2, TENANT_ID), subAlarm2), tupleParam);
bolt.execute(resendTuple);
@ -241,7 +257,8 @@ public class MetricAggregationBoltTest {
final MkTupleParam tupleParam = new MkTupleParam();
tupleParam.setStream(MetricAggregationBolt.METRIC_AGGREGATION_CONTROL_STREAM);
final Tuple lagTuple = Testing.testTuple(Arrays.asList(MetricAggregationBolt.METRICS_BEHIND), tupleParam);
final Tuple lagTuple =
Testing.testTuple(Arrays.asList(MetricAggregationBolt.METRICS_BEHIND), tupleParam);
bolt.execute(lagTuple);
verify(collector, times(1)).ack(lagTuple);
@ -274,7 +291,8 @@ public class MetricAggregationBoltTest {
tupleParam.setFields(EventProcessingBolt.METRIC_SUB_ALARM_EVENT_STREAM_FIELDS);
tupleParam.setStream(EventProcessingBolt.METRIC_SUB_ALARM_EVENT_STREAM_ID);
MetricDefinitionAndTenantId metricDefinitionAndTenantId = new MetricDefinitionAndTenantId(metricDef1, TENANT_ID);
MetricDefinitionAndTenantId metricDefinitionAndTenantId =
new MetricDefinitionAndTenantId(metricDef1, TENANT_ID);
assertNull(bolt.subAlarmStatsRepos.get(metricDefinitionAndTenantId));
bolt.execute(Testing.testTuple(Arrays.asList(EventProcessingBolt.CREATED,
@ -284,12 +302,14 @@ public class MetricAggregationBoltTest {
}
public void validateMetricDefUpdatedThreshold() {
final SubAlarmStats stats = updateEnsureMeasurementsKept(subExpr2, "avg(hpcs.compute.mem{id=5}, 60) >= 80");
final SubAlarmStats stats =
updateEnsureMeasurementsKept(subExpr2, "avg(hpcs.compute.mem{id=5}, 60) >= 80");
assertEquals(stats.getSubAlarm().getExpression().getThreshold(), 80.0);
}
public void validateMetricDefUpdatedOperator() {
final SubAlarmStats stats = updateEnsureMeasurementsKept(subExpr2, "avg(hpcs.compute.mem{id=5}, 60) < 80");
final SubAlarmStats stats =
updateEnsureMeasurementsKept(subExpr2, "avg(hpcs.compute.mem{id=5}, 60) < 80");
assertEquals(stats.getSubAlarm().getExpression().getOperator(), AlarmOperator.LT);
}
@ -302,17 +322,20 @@ public class MetricAggregationBoltTest {
}
public void validateMetricDefReplacedFunction() {
final SubAlarmStats stats = updateEnsureMeasurementsFlushed(subExpr2, "max(hpcs.compute.mem{id=5}, 60) < 80");
final SubAlarmStats stats =
updateEnsureMeasurementsFlushed(subExpr2, "max(hpcs.compute.mem{id=5}, 60) < 80");
assertEquals(stats.getSubAlarm().getExpression().getOperator(), AlarmOperator.LT);
}
public void validateMetricDefReplacedPeriods() {
final SubAlarmStats stats = updateEnsureMeasurementsFlushed(subExpr2, "avg(hpcs.compute.mem{id=5}, 60) >= 80 times 7");
final SubAlarmStats stats =
updateEnsureMeasurementsFlushed(subExpr2, "avg(hpcs.compute.mem{id=5}, 60) >= 80 times 7");
assertEquals(stats.getSubAlarm().getExpression().getPeriods(), 7);
}
public void validateMetricDefReplacedPeriod() {
final SubAlarmStats stats = updateEnsureMeasurementsFlushed(subExpr2, "avg(hpcs.compute.mem{id=5}, 120) >= 80");
final SubAlarmStats stats =
updateEnsureMeasurementsFlushed(subExpr2, "avg(hpcs.compute.mem{id=5}, 120) >= 80");
assertEquals(stats.getSubAlarm().getExpression().getPeriod(), 120);
}
@ -324,18 +347,19 @@ public class MetricAggregationBoltTest {
return stats;
}
private SubAlarmStats updateSubAlarmsStats(AlarmSubExpression subExpr,
String newSubExpression) {
private SubAlarmStats updateSubAlarmsStats(AlarmSubExpression subExpr, String newSubExpression) {
final MkTupleParam tupleParam = new MkTupleParam();
tupleParam.setFields(EventProcessingBolt.METRIC_SUB_ALARM_EVENT_STREAM_FIELDS);
tupleParam.setStream(EventProcessingBolt.METRIC_SUB_ALARM_EVENT_STREAM_ID);
final MetricDefinitionAndTenantId metricDefinitionAndTenantId = new MetricDefinitionAndTenantId(subExpr.getMetricDefinition(), TENANT_ID);
final MetricDefinitionAndTenantId metricDefinitionAndTenantId =
new MetricDefinitionAndTenantId(subExpr.getMetricDefinition(), TENANT_ID);
assertNull(bolt.subAlarmStatsRepos.get(metricDefinitionAndTenantId));
bolt.execute(Testing.testTuple(Arrays.asList(EventProcessingBolt.CREATED,
metricDefinitionAndTenantId, new SubAlarm("123", "1", subExpr)), tupleParam));
final SubAlarmStats oldStats = bolt.subAlarmStatsRepos.get(metricDefinitionAndTenantId).get("123");
final SubAlarmStats oldStats =
bolt.subAlarmStatsRepos.get(metricDefinitionAndTenantId).get("123");
assertEquals(oldStats.getSubAlarm().getExpression().getThreshold(), 90.0);
assertTrue(oldStats.getStats().addValue(80.0, System.currentTimeMillis() / 1000));
assertFalse(Double.isNaN(oldStats.getStats().getWindowValues()[0]));
@ -353,7 +377,8 @@ public class MetricAggregationBoltTest {
MkTupleParam tupleParam = new MkTupleParam();
tupleParam.setFields(EventProcessingBolt.METRIC_ALARM_EVENT_STREAM_FIELDS);
tupleParam.setStream(EventProcessingBolt.METRIC_ALARM_EVENT_STREAM_ID);
MetricDefinitionAndTenantId metricDefinitionAndTenantId = new MetricDefinitionAndTenantId(metricDef1, TENANT_ID);
MetricDefinitionAndTenantId metricDefinitionAndTenantId =
new MetricDefinitionAndTenantId(metricDef1, TENANT_ID);
bolt.getOrCreateSubAlarmStatsRepo(metricDefinitionAndTenantId);
assertNotNull(bolt.subAlarmStatsRepos.get(metricDefinitionAndTenantId).get("123"));
@ -365,17 +390,20 @@ public class MetricAggregationBoltTest {
}
public void shouldGetOrCreateSameMetricData() {
SubAlarmStatsRepository data = bolt.getOrCreateSubAlarmStatsRepo(new MetricDefinitionAndTenantId(metricDef1, TENANT_ID));
SubAlarmStatsRepository data =
bolt.getOrCreateSubAlarmStatsRepo(new MetricDefinitionAndTenantId(metricDef1, TENANT_ID));
assertNotNull(data);
assertEquals(bolt.getOrCreateSubAlarmStatsRepo(new MetricDefinitionAndTenantId(metricDef1, TENANT_ID)), data);
assertEquals(
bolt.getOrCreateSubAlarmStatsRepo(new MetricDefinitionAndTenantId(metricDef1, TENANT_ID)),
data);
}
private Tuple createMetricTuple(final MetricDefinition metricDef,
final Metric metric) {
private Tuple createMetricTuple(final MetricDefinition metricDef, final Metric metric) {
final MkTupleParam tupleParam = new MkTupleParam();
tupleParam.setFields(MetricFilteringBolt.FIELDS);
tupleParam.setStream(Streams.DEFAULT_STREAM_ID);
return Testing.testTuple(Arrays.asList(new MetricDefinitionAndTenantId(metricDef, TENANT_ID), metric), tupleParam);
return Testing.testTuple(
Arrays.asList(new MetricDefinitionAndTenantId(metricDef, TENANT_ID), metric), tupleParam);
}
private static class MockMetricAggregationBolt extends MetricAggregationBolt {
@ -389,8 +417,9 @@ public class MetricAggregationBoltTest {
@Override
protected long currentTimeSeconds() {
if (currentTime != 0)
if (currentTime != 0) {
return currentTime;
}
return super.currentTimeSeconds();
}

View File

@ -14,6 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.infrastructure.thresholding;
import static org.mockito.Mockito.mock;
@ -23,24 +24,6 @@ import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import static org.testng.Assert.assertEquals;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import org.mockito.verification.VerificationMode;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import backtype.storm.Testing;
import backtype.storm.task.OutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.testing.MkTupleParam;
import backtype.storm.tuple.Tuple;
import backtype.storm.tuple.Values;
import com.hpcloud.mon.common.model.alarm.AlarmExpression;
import com.hpcloud.mon.common.model.alarm.AlarmSubExpression;
import com.hpcloud.mon.common.model.metric.Metric;
@ -51,40 +34,62 @@ import com.hpcloud.mon.domain.service.MetricDefinitionDAO;
import com.hpcloud.mon.domain.service.SubAlarmMetricDefinition;
import com.hpcloud.streaming.storm.Streams;
import backtype.storm.Testing;
import backtype.storm.task.OutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.testing.MkTupleParam;
import backtype.storm.tuple.Tuple;
import backtype.storm.tuple.Values;
import org.mockito.verification.VerificationMode;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
@Test
public class MetricFilteringBoltTest {
private List<SubAlarm> subAlarms;
private List<SubAlarm> duplicateMetricSubAlarms;
private final static String TEST_TENANT_ID = "42";
private long metricTimestamp = System.currentTimeMillis()/1000; // Make sure the metric timestamp is always unique
private long metricTimestamp = System.currentTimeMillis() / 1000; // Make sure the metric
// timestamp is always unique
@BeforeMethod
protected void beforeMethod() {
final String expression = "avg(hpcs.compute.cpu{instance_id=123,device=42}, 1) > 5 " +
"and max(hpcs.compute.mem{instance_id=123,device=42}) > 80 " +
"and max(hpcs.compute.load{instance_id=123,device=42}) > 5";
final String expression =
"avg(hpcs.compute.cpu{instance_id=123,device=42}, 1) > 5 "
+ "and max(hpcs.compute.mem{instance_id=123,device=42}) > 80 "
+ "and max(hpcs.compute.load{instance_id=123,device=42}) > 5";
subAlarms = createSubAlarmsForAlarm("111111112222222222233333333334", expression);
duplicateMetricSubAlarms = createSubAlarmsForAlarm(UUID.randomUUID().toString(),
duplicateMetricSubAlarms =
createSubAlarmsForAlarm(UUID.randomUUID().toString(),
"max(hpcs.compute.load{instance_id=123,device=42}) > 8");
subAlarms.addAll(duplicateMetricSubAlarms);
}
private List<SubAlarm> createSubAlarmsForAlarm(final String alarmId,
final String expression) {
private List<SubAlarm> createSubAlarmsForAlarm(final String alarmId, final String expression) {
final AlarmExpression alarmExpression = new AlarmExpression(expression);
final List<AlarmSubExpression> subExpressions = alarmExpression.getSubExpressions();
final List<SubAlarm> result = new ArrayList<SubAlarm>(subExpressions.size());
for (int i = 0; i < subExpressions.size(); i++) {
final SubAlarm subAlarm = new SubAlarm(UUID.randomUUID().toString(), alarmId, subExpressions.get(i));
final SubAlarm subAlarm =
new SubAlarm(UUID.randomUUID().toString(), alarmId, subExpressions.get(i));
result.add(subAlarm);
}
return result;
}
private MockMetricFilteringBolt createBolt(List<SubAlarmMetricDefinition> initialMetricDefinitions,
final OutputCollector collector, boolean willEmit) {
private MockMetricFilteringBolt createBolt(
List<SubAlarmMetricDefinition> initialMetricDefinitions, final OutputCollector collector,
boolean willEmit) {
final MetricDefinitionDAO dao = mock(MetricDefinitionDAO.class);
when(dao.findForAlarms()).thenReturn(initialMetricDefinitions);
MockMetricFilteringBolt bolt = new MockMetricFilteringBolt(dao);
@ -96,7 +101,8 @@ public class MetricFilteringBoltTest {
if (willEmit) {
// Validate the prepare emits the initial Metric Definitions
for (final SubAlarmMetricDefinition metricDefinition : initialMetricDefinitions) {
verify(collector, times(1)).emit(new Values(metricDefinition.getMetricDefinitionAndTenantId(), null));
verify(collector, times(1)).emit(
new Values(metricDefinition.getMetricDefinitionAndTenantId(), null));
}
}
return bolt;
@ -105,23 +111,31 @@ public class MetricFilteringBoltTest {
public void testLagging() {
final OutputCollector collector = mock(OutputCollector.class);
final MockMetricFilteringBolt bolt = createBolt(new ArrayList<SubAlarmMetricDefinition>(0), collector, true);
final MockMetricFilteringBolt bolt =
createBolt(new ArrayList<SubAlarmMetricDefinition>(0), collector, true);
final long prepareTime = bolt.getCurrentTime();
final MetricDefinition metricDefinition = subAlarms.get(0).getExpression().getMetricDefinition();
final MetricDefinition metricDefinition =
subAlarms.get(0).getExpression().getMetricDefinition();
final long oldestTimestamp = prepareTime - MetricFilteringBolt.LAG_MESSAGE_PERIOD_DEFAULT;
final Tuple lateMetricTuple = createMetricTuple(metricDefinition, oldestTimestamp, new Metric(metricDefinition, oldestTimestamp, 42.0));
final Tuple lateMetricTuple =
createMetricTuple(metricDefinition, oldestTimestamp, new Metric(metricDefinition,
oldestTimestamp, 42.0));
bolt.execute(lateMetricTuple);
verify(collector, times(1)).ack(lateMetricTuple);
bolt.setCurrentTime(prepareTime + MetricFilteringBolt.LAG_MESSAGE_PERIOD_DEFAULT);
final Tuple lateMetricTuple2 = createMetricTuple(metricDefinition, prepareTime, new Metric(metricDefinition, prepareTime, 42.0));
final Tuple lateMetricTuple2 =
createMetricTuple(metricDefinition, prepareTime, new Metric(metricDefinition, prepareTime,
42.0));
bolt.execute(lateMetricTuple2);
verify(collector, times(1)).ack(lateMetricTuple2);
verify(collector, times(1)).emit(MetricAggregationBolt.METRIC_AGGREGATION_CONTROL_STREAM,
new Values(MetricAggregationBolt.METRICS_BEHIND));
bolt.setCurrentTime(prepareTime + 2 * MetricFilteringBolt.LAG_MESSAGE_PERIOD_DEFAULT);
long caughtUpTimestamp = bolt.getCurrentTime() - MetricFilteringBolt.MIN_LAG_VALUE_DEFAULT;
final Tuple metricTuple = createMetricTuple(metricDefinition, caughtUpTimestamp, new Metric(metricDefinition, caughtUpTimestamp, 42.0));
final Tuple metricTuple =
createMetricTuple(metricDefinition, caughtUpTimestamp, new Metric(metricDefinition,
caughtUpTimestamp, 42.0));
bolt.execute(metricTuple);
// Metrics are caught up so there should not be another METRICS_BEHIND message
verify(collector, times(1)).ack(metricTuple);
@ -132,16 +146,22 @@ public class MetricFilteringBoltTest {
public void testLaggingTooLong() {
final OutputCollector collector = mock(OutputCollector.class);
final MockMetricFilteringBolt bolt = createBolt(new ArrayList<SubAlarmMetricDefinition>(0), collector, true);
final MockMetricFilteringBolt bolt =
createBolt(new ArrayList<SubAlarmMetricDefinition>(0), collector, true);
long prepareTime = bolt.getCurrentTime();
final MetricDefinition metricDefinition = subAlarms.get(0).getExpression().getMetricDefinition();
// Fake sending metrics for MetricFilteringBolt.MAX_LAG_MESSAGES_DEFAULT * MetricFilteringBolt.LAG_MESSAGE_PERIOD_DEFAULT seconds
final MetricDefinition metricDefinition =
subAlarms.get(0).getExpression().getMetricDefinition();
// Fake sending metrics for MetricFilteringBolt.MAX_LAG_MESSAGES_DEFAULT *
// MetricFilteringBolt.LAG_MESSAGE_PERIOD_DEFAULT seconds
boolean first = true;
// Need to send MetricFilteringBolt.MAX_LAG_MESSAGES_DEFAULT + 1 metrics because the lag message is not
// Need to send MetricFilteringBolt.MAX_LAG_MESSAGES_DEFAULT + 1 metrics because the lag message
// is not
// output on the first one.
for (int i = 0; i < MetricFilteringBolt.MAX_LAG_MESSAGES_DEFAULT + 1; i++) {
final Tuple lateMetricTuple = createMetricTuple(metricDefinition, prepareTime, new Metric(metricDefinition, prepareTime, 42.0));
final Tuple lateMetricTuple =
createMetricTuple(metricDefinition, prepareTime, new Metric(metricDefinition,
prepareTime, 42.0));
bolt.setCurrentTime(prepareTime + MetricFilteringBolt.LAG_MESSAGE_PERIOD_DEFAULT);
bolt.execute(lateMetricTuple);
verify(collector, times(1)).ack(lateMetricTuple);
@ -154,11 +174,14 @@ public class MetricFilteringBoltTest {
}
// One more
long timestamp = bolt.getCurrentTime() - MetricFilteringBolt.LAG_MESSAGE_PERIOD_DEFAULT;
final Tuple metricTuple = createMetricTuple(metricDefinition, timestamp, new Metric(metricDefinition, timestamp, 42.0));
final Tuple metricTuple =
createMetricTuple(metricDefinition, timestamp,
new Metric(metricDefinition, timestamp, 42.0));
bolt.execute(metricTuple);
verify(collector, times(1)).ack(metricTuple);
// Won't be any more of these
verify(collector, times(MetricFilteringBolt.MAX_LAG_MESSAGES_DEFAULT)).emit(MetricAggregationBolt.METRIC_AGGREGATION_CONTROL_STREAM,
verify(collector, times(MetricFilteringBolt.MAX_LAG_MESSAGES_DEFAULT)).emit(
MetricAggregationBolt.METRIC_AGGREGATION_CONTROL_STREAM,
new Values(MetricAggregationBolt.METRICS_BEHIND));
}
@ -184,11 +207,13 @@ public class MetricFilteringBoltTest {
MetricFilteringBolt.clearMetricDefinitions();
final OutputCollector collector1 = mock(OutputCollector.class);
final MetricFilteringBolt bolt1 = createBolt(new ArrayList<SubAlarmMetricDefinition>(0), collector1, true);
final MetricFilteringBolt bolt1 =
createBolt(new ArrayList<SubAlarmMetricDefinition>(0), collector1, true);
final OutputCollector collector2 = mock(OutputCollector.class);
final MetricFilteringBolt bolt2 = createBolt(new ArrayList<SubAlarmMetricDefinition>(0), collector2, false);
final MetricFilteringBolt bolt2 =
createBolt(new ArrayList<SubAlarmMetricDefinition>(0), collector2, false);
// First ensure metrics don't pass the filter
verifyMetricFiltered(collector1, bolt1);
@ -200,8 +225,7 @@ public class MetricFilteringBoltTest {
testDeleteSubAlarms(bolt1, collector1, bolt2, collector2);
}
private void sendMetricCreation(final OutputCollector collector1,
final MetricFilteringBolt bolt1) {
private void sendMetricCreation(final OutputCollector collector1, final MetricFilteringBolt bolt1) {
for (final SubAlarm subAlarm : subAlarms) {
final Tuple tuple = createMetricDefinitionTuple(subAlarm);
bolt1.execute(tuple);
@ -214,8 +238,7 @@ public class MetricFilteringBoltTest {
sendMetricsAndVerify(collector1, bolt1, never());
}
private void verifyMetricPassed(final OutputCollector collector1,
final MetricFilteringBolt bolt1) {
private void verifyMetricPassed(final OutputCollector collector1, final MetricFilteringBolt bolt1) {
sendMetricsAndVerify(collector1, bolt1, times(1));
}
@ -224,7 +247,9 @@ public class MetricFilteringBoltTest {
for (final SubAlarm subAlarm : subAlarms) {
// First do a MetricDefinition that is an exact match
final MetricDefinition metricDefinition = subAlarm.getExpression().getMetricDefinition();
final Tuple exactTuple = createMetricTuple(metricDefinition, metricTimestamp++, new Metric(metricDefinition, metricTimestamp, 42.0));
final Tuple exactTuple =
createMetricTuple(metricDefinition, metricTimestamp++, new Metric(metricDefinition,
metricTimestamp, 42.0));
bolt1.execute(exactTuple);
verify(collector1, times(1)).ack(exactTuple);
verify(collector1, howMany).emit(new Values(exactTuple.getValue(0), exactTuple.getValue(2)));
@ -232,9 +257,11 @@ public class MetricFilteringBoltTest {
// Now do a MetricDefinition with an extra dimension that should still match the SubAlarm
final Map<String, String> extraDimensions = new HashMap<>(metricDefinition.dimensions);
extraDimensions.put("group", "group_a");
final MetricDefinition inexactMetricDef = new MetricDefinition(metricDefinition.name, extraDimensions);
final MetricDefinition inexactMetricDef =
new MetricDefinition(metricDefinition.name, extraDimensions);
Metric inexactMetric = new Metric(inexactMetricDef, metricTimestamp, 42.0);
final Tuple inexactTuple = createMetricTuple(metricDefinition, metricTimestamp++, inexactMetric);
final Tuple inexactTuple =
createMetricTuple(metricDefinition, metricTimestamp++, inexactMetric);
bolt1.execute(inexactTuple);
verify(collector1, times(1)).ack(inexactTuple);
// We want the MetricDefinitionAndTenantId from the exact tuple, but the inexactMetric
@ -244,10 +271,12 @@ public class MetricFilteringBoltTest {
public void testAllInitial() {
MetricFilteringBolt.clearMetricDefinitions();
final List<SubAlarmMetricDefinition> initialMetricDefinitions = new ArrayList<>(subAlarms.size());
final List<SubAlarmMetricDefinition> initialMetricDefinitions =
new ArrayList<>(subAlarms.size());
for (final SubAlarm subAlarm : subAlarms) {
initialMetricDefinitions.add(new SubAlarmMetricDefinition(subAlarm.getId(),
new MetricDefinitionAndTenantId(subAlarm.getExpression().getMetricDefinition(), TEST_TENANT_ID)));
new MetricDefinitionAndTenantId(subAlarm.getExpression().getMetricDefinition(),
TEST_TENANT_ID)));
}
final OutputCollector collector1 = mock(OutputCollector.class);
@ -260,7 +289,8 @@ public class MetricFilteringBoltTest {
testDeleteSubAlarms(bolt1, collector1, bolt2, collector2);
}
private void testDeleteSubAlarms(MetricFilteringBolt bolt1, OutputCollector collector1, MetricFilteringBolt bolt2, OutputCollector collector2) {
private void testDeleteSubAlarms(MetricFilteringBolt bolt1, OutputCollector collector1,
MetricFilteringBolt bolt2, OutputCollector collector2) {
// Now ensure metrics pass the filter
verifyMetricPassed(collector1, bolt1);
verifyMetricPassed(collector2, bolt2);
@ -282,7 +312,8 @@ public class MetricFilteringBoltTest {
verifyMetricFiltered(collector2, bolt2);
}
private void deleteSubAlarms(MetricFilteringBolt bolt, OutputCollector collector, final List<SubAlarm> otherSubAlarms) {
private void deleteSubAlarms(MetricFilteringBolt bolt, OutputCollector collector,
final List<SubAlarm> otherSubAlarms) {
for (final SubAlarm subAlarm : otherSubAlarms) {
final Tuple tuple = createMetricDefinitionDeletionTuple(subAlarm);
bolt.execute(tuple);
@ -294,10 +325,10 @@ public class MetricFilteringBoltTest {
final MkTupleParam tupleParam = new MkTupleParam();
tupleParam.setFields(EventProcessingBolt.METRIC_SUB_ALARM_EVENT_STREAM_FIELDS);
tupleParam.setStream(EventProcessingBolt.METRIC_SUB_ALARM_EVENT_STREAM_ID);
final Tuple tuple = Testing.testTuple(Arrays.asList(EventProcessingBolt.CREATED,
new MetricDefinitionAndTenantId(
subAlarm.getExpression().getMetricDefinition(), TEST_TENANT_ID),
subAlarm), tupleParam);
final Tuple tuple =
Testing.testTuple(Arrays.asList(EventProcessingBolt.CREATED,
new MetricDefinitionAndTenantId(subAlarm.getExpression().getMetricDefinition(),
TEST_TENANT_ID), subAlarm), tupleParam);
return tuple;
}
@ -305,23 +336,22 @@ public class MetricFilteringBoltTest {
final MkTupleParam tupleParam = new MkTupleParam();
tupleParam.setFields(EventProcessingBolt.METRIC_ALARM_EVENT_STREAM_FIELDS);
tupleParam.setStream(EventProcessingBolt.METRIC_ALARM_EVENT_STREAM_ID);
final Tuple tuple = Testing.testTuple(Arrays.asList(EventProcessingBolt.DELETED,
new MetricDefinitionAndTenantId(
subAlarm.getExpression().getMetricDefinition(), TEST_TENANT_ID),
subAlarm.getId()), tupleParam);
final Tuple tuple =
Testing.testTuple(Arrays.asList(EventProcessingBolt.DELETED,
new MetricDefinitionAndTenantId(subAlarm.getExpression().getMetricDefinition(),
TEST_TENANT_ID), subAlarm.getId()), tupleParam);
return tuple;
}
private Tuple createMetricTuple(final MetricDefinition metricDefinition,
final long timestamp,
private Tuple createMetricTuple(final MetricDefinition metricDefinition, final long timestamp,
final Metric metric) {
final MkTupleParam tupleParam = new MkTupleParam();
tupleParam.setFields(MetricSpout.FIELDS);
tupleParam.setStream(Streams.DEFAULT_STREAM_ID);
final Tuple tuple = Testing.testTuple(Arrays.asList(
new MetricDefinitionAndTenantId(metricDefinition, TEST_TENANT_ID),
timestamp, metric), tupleParam);
final Tuple tuple =
Testing.testTuple(Arrays.asList(new MetricDefinitionAndTenantId(metricDefinition,
TEST_TENANT_ID), timestamp, metric), tupleParam);
return tuple;
}
}

View File

@ -14,6 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.infrastructure.thresholding;
import static org.testng.Assert.assertEquals;
@ -34,24 +35,28 @@ public class PropertyFinderTest {
public void shouldUseNewValue() {
final int expectedValue = 45;
System.setProperty(PROPERTY_NAME, String.valueOf(expectedValue));
assertEquals(expectedValue, PropertyFinder.getIntProperty(PROPERTY_NAME, 30, 0, Integer.MAX_VALUE));
assertEquals(expectedValue,
PropertyFinder.getIntProperty(PROPERTY_NAME, 30, 0, Integer.MAX_VALUE));
}
public void shouldUseDefaultValueBecausePropertyNotSet() {
final int defaultValue = 45;
assertEquals(defaultValue, PropertyFinder.getIntProperty(PROPERTY_NAME, defaultValue, 0, Integer.MAX_VALUE));
assertEquals(defaultValue,
PropertyFinder.getIntProperty(PROPERTY_NAME, defaultValue, 0, Integer.MAX_VALUE));
}
public void shouldUseDefaultValueBecausePropertyNotANumber() {
final int defaultValue = 45;
System.setProperty(PROPERTY_NAME, "AAA");
assertEquals(defaultValue, PropertyFinder.getIntProperty(PROPERTY_NAME, defaultValue, 0, Integer.MAX_VALUE));
assertEquals(defaultValue,
PropertyFinder.getIntProperty(PROPERTY_NAME, defaultValue, 0, Integer.MAX_VALUE));
}
public void shouldUseDefaultValueBecausePropertyTooSmall() {
final int defaultValue = 45;
System.setProperty(PROPERTY_NAME, "0");
assertEquals(defaultValue, PropertyFinder.getIntProperty(PROPERTY_NAME, defaultValue, 1, Integer.MAX_VALUE));
assertEquals(defaultValue,
PropertyFinder.getIntProperty(PROPERTY_NAME, defaultValue, 1, Integer.MAX_VALUE));
}
public void shouldUseDefaultValueBecausePropertyTooLarge() {

View File

@ -14,24 +14,26 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.infrastructure.thresholding.deserializer;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNull;
import java.util.Collections;
import org.testng.annotations.Test;
import com.hpcloud.mon.common.event.AlarmCreatedEvent;
import com.hpcloud.mon.common.event.AlarmDeletedEvent;
import com.hpcloud.mon.common.event.AlarmUpdatedEvent;
import com.hpcloud.mon.common.model.alarm.AlarmState;
import com.hpcloud.util.Serialization;
import org.testng.annotations.Test;
import java.util.Collections;
@Test
public class EventDeserializerTest {
private static final String ALARM_EXPRESSION = "avg(hpcs.compute{instance_id=5,metric_name=cpu,device=1}, 1) > 5 times 3 OR avg(hpcs.compute{flavor_id=3,metric_name=mem}, 2) < 4 times 3";
private static final String ALARM_EXPRESSION =
"avg(hpcs.compute{instance_id=5,metric_name=cpu,device=1}, 1) > 5 times 3 OR avg(hpcs.compute{flavor_id=3,metric_name=mem}, 2) < 4 times 3";
private static final String ALARM_NAME = "An Alarm";
private static final String ALARM_DESCRIPTION = "An Alarm Description";
private static final String ALARM_ID = "123";
@ -47,8 +49,8 @@ public class EventDeserializerTest {
}
public void shouldDeserializeAlarmUpdatedEvent() {
roundTrip(new AlarmUpdatedEvent(TENANT_ID, ALARM_ID, ALARM_NAME, ALARM_DESCRIPTION, ALARM_EXPRESSION,
AlarmState.OK, AlarmState.OK, false, null, null, null, null));
roundTrip(new AlarmUpdatedEvent(TENANT_ID, ALARM_ID, ALARM_NAME, ALARM_DESCRIPTION,
ALARM_EXPRESSION, AlarmState.OK, AlarmState.OK, false, null, null, null, null));
}
private void roundTrip(Object event) {