Changes to match the new style guidelines

This commit is contained in:
Craig Bryant 2014-07-07 16:19:53 -06:00
parent 797c60f567
commit 44851750e6
50 changed files with 3148 additions and 2716 deletions

View File

@ -14,9 +14,10 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package com.hpcloud.mon; package com.hpcloud.mon;
public class EventSpoutConfig extends KafkaSpoutConfig { public class EventSpoutConfig extends KafkaSpoutConfig {
private static final long serialVersionUID = -8129774848323598123L; private static final long serialVersionUID = -8129774848323598123L;
} }

View File

@ -1,16 +1,34 @@
/*
* Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon; package com.hpcloud.mon;
import com.hpcloud.configuration.KafkaConsumerConfiguration;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.io.Serializable; import java.io.Serializable;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.hpcloud.configuration.KafkaConsumerConfiguration;
public class KafkaSpoutConfig implements Serializable { public class KafkaSpoutConfig implements Serializable {
private static final long serialVersionUID = -6477042435089264571L; private static final long serialVersionUID = -6477042435089264571L;
@JsonProperty @JsonProperty
public Integer maxWaitTime = 100; public Integer maxWaitTime = 100;
public KafkaConsumerConfiguration kafkaConsumerConfiguration; public KafkaConsumerConfiguration kafkaConsumerConfiguration;
} }

View File

@ -14,9 +14,10 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package com.hpcloud.mon; package com.hpcloud.mon;
public class MetricSpoutConfig extends KafkaSpoutConfig { public class MetricSpoutConfig extends KafkaSpoutConfig {
private static final long serialVersionUID = -4285448019855024921L; private static final long serialVersionUID = -4285448019855024921L;
} }

View File

@ -14,19 +14,19 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package com.hpcloud.mon; package com.hpcloud.mon;
import com.hpcloud.configuration.KafkaProducerConfiguration; import com.hpcloud.configuration.KafkaProducerConfiguration;
import com.hpcloud.mon.infrastructure.thresholding.DataSourceFactory;
import org.hibernate.validator.constraints.NotEmpty;
import java.util.Set; import java.util.Set;
import javax.validation.Valid; import javax.validation.Valid;
import javax.validation.constraints.NotNull; import javax.validation.constraints.NotNull;
import org.hibernate.validator.constraints.NotEmpty;
import com.hpcloud.mon.infrastructure.thresholding.DataSourceFactory;
/** /**
* Thresholding configuration. * Thresholding configuration.
*/ */

View File

@ -14,16 +14,20 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package com.hpcloud.mon; package com.hpcloud.mon;
import ch.qos.logback.classic.LoggerContext;
import ch.qos.logback.core.util.StatusPrinter;
import com.hpcloud.util.Injector;
import com.hpcloud.util.config.ConfigurationFactory;
import backtype.storm.Config; import backtype.storm.Config;
import backtype.storm.LocalCluster; import backtype.storm.LocalCluster;
import backtype.storm.StormSubmitter; import backtype.storm.StormSubmitter;
import backtype.storm.generated.StormTopology; import backtype.storm.generated.StormTopology;
import ch.qos.logback.classic.LoggerContext;
import ch.qos.logback.core.util.StatusPrinter;
import com.hpcloud.util.Injector;
import com.hpcloud.util.config.ConfigurationFactory;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -33,60 +37,60 @@ import java.io.File;
* Alarm thresholding engine. * Alarm thresholding engine.
*/ */
public class ThresholdingEngine { public class ThresholdingEngine {
private static final Logger LOG = LoggerFactory.getLogger(ThresholdingEngine.class); private static final Logger logger = LoggerFactory.getLogger(ThresholdingEngine.class);
private final ThresholdingConfiguration threshConfig; private final ThresholdingConfiguration threshConfig;
private final String topologyName; private final String topologyName;
private final boolean local; private final boolean local;
public ThresholdingEngine(ThresholdingConfiguration threshConfig, String topologyName, public ThresholdingEngine(ThresholdingConfiguration threshConfig, String topologyName,
boolean local) { boolean local) {
this.threshConfig = threshConfig; this.threshConfig = threshConfig;
this.topologyName = topologyName; this.topologyName = topologyName;
this.local = local; this.local = local;
LOG.info("local set to {}", local); logger.info("local set to {}", local);
}
public static final ThresholdingConfiguration configFor(String configFileName) throws Exception {
return ConfigurationFactory
.<ThresholdingConfiguration>forClass(ThresholdingConfiguration.class).build(
new File(configFileName));
}
public static void main(String... args) throws Exception {
// Let's show the logging status.
StatusPrinter.print((LoggerContext) LoggerFactory.getILoggerFactory());
if (args.length < 2) {
logger.error("Expected configuration file name and topology name arguments");
System.exit(1);
} }
public static final ThresholdingConfiguration configFor(String configFileName) throws Exception { logger.info("Instantiating ThresholdingEngine with config file: {}, topology: {}", args[0],
return ConfigurationFactory.<ThresholdingConfiguration>forClass(ThresholdingConfiguration.class) args[1]);
.build(new File(configFileName));
} ThresholdingEngine engine =
new ThresholdingEngine(configFor(args[0]), args[1], args.length > 2 ? true : false);
public static void main(String... args) throws Exception { engine.configure();
engine.run();
// Let's show the logging status. }
StatusPrinter.print((LoggerContext) LoggerFactory.getILoggerFactory());
protected void configure() {
if (args.length < 2) { Injector.registerModules(new TopologyModule(threshConfig));
LOG.error("Expected configuration file name and topology name arguments"); }
System.exit(1);
} protected void run() throws Exception {
Config config = Injector.getInstance(Config.class);
LOG.info("Instantiating ThresholdingEngine with config file: {}, topology: {}", StormTopology topology = Injector.getInstance(StormTopology.class);
args[0], args[1]); config.registerSerialization(com.hpcloud.mon.domain.model.SubAlarm.class);
ThresholdingEngine engine = new ThresholdingEngine(configFor(args[0]), args[1], if (local) {
args.length > 2 ? true : false); logger.info("submitting topology {} to local storm cluster", topologyName);
engine.configure(); new LocalCluster().submitTopology(topologyName, config, topology);
engine.run(); } else {
} logger.info("submitting topology {} to non-local storm cluster", topologyName);
StormSubmitter.submitTopology(topologyName, config, topology);
protected void configure() {
Injector.registerModules(new TopologyModule(threshConfig));
}
protected void run() throws Exception {
Config config = Injector.getInstance(Config.class);
StormTopology topology = Injector.getInstance(StormTopology.class);
config.registerSerialization(com.hpcloud.mon.domain.model.SubAlarm.class);
if (local) {
LOG.info("submitting topology {} to local storm cluster", topologyName);
new LocalCluster().submitTopology(topologyName, config, topology);
} else {
LOG.info("submitting topology {} to non-local storm cluster", topologyName);
StormSubmitter.submitTopology(topologyName, config, topology);
}
} }
}
} }

View File

@ -14,9 +14,17 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package com.hpcloud.mon; package com.hpcloud.mon;
import javax.inject.Named; import com.hpcloud.mon.infrastructure.thresholding.AlarmThresholdingBolt;
import com.hpcloud.mon.infrastructure.thresholding.EventProcessingBolt;
import com.hpcloud.mon.infrastructure.thresholding.EventSpout;
import com.hpcloud.mon.infrastructure.thresholding.MetricAggregationBolt;
import com.hpcloud.mon.infrastructure.thresholding.MetricFilteringBolt;
import com.hpcloud.mon.infrastructure.thresholding.MetricSpout;
import com.hpcloud.mon.infrastructure.thresholding.deserializer.EventDeserializer;
import com.hpcloud.util.Injector;
import backtype.storm.Config; import backtype.storm.Config;
import backtype.storm.generated.StormTopology; import backtype.storm.generated.StormTopology;
@ -26,14 +34,8 @@ import backtype.storm.tuple.Fields;
import com.google.inject.AbstractModule; import com.google.inject.AbstractModule;
import com.google.inject.Provides; import com.google.inject.Provides;
import com.hpcloud.mon.infrastructure.thresholding.AlarmThresholdingBolt;
import com.hpcloud.mon.infrastructure.thresholding.EventProcessingBolt; import javax.inject.Named;
import com.hpcloud.mon.infrastructure.thresholding.EventSpout;
import com.hpcloud.mon.infrastructure.thresholding.MetricAggregationBolt;
import com.hpcloud.mon.infrastructure.thresholding.MetricFilteringBolt;
import com.hpcloud.mon.infrastructure.thresholding.MetricSpout;
import com.hpcloud.mon.infrastructure.thresholding.deserializer.EventDeserializer;
import com.hpcloud.util.Injector;
/** /**
* Configures types for the thresholding topology. * Configures types for the thresholding topology.
@ -57,8 +59,7 @@ public class TopologyModule extends AbstractModule {
} }
@Override @Override
protected void configure() { protected void configure() {}
}
@Provides @Provides
Config stormConfig() { Config stormConfig() {
@ -100,21 +101,21 @@ public class TopologyModule extends AbstractModule {
// MaaS Event -> Events // MaaS Event -> Events
builder.setBolt("event-bolt", new EventProcessingBolt(), config.eventBoltThreads) builder.setBolt("event-bolt", new EventProcessingBolt(), config.eventBoltThreads)
.shuffleGrouping("event-spout") .shuffleGrouping("event-spout").setNumTasks(config.eventBoltTasks);
.setNumTasks(config.eventBoltTasks);
// Metrics / Event -> Filtering // Metrics / Event -> Filtering
builder.setBolt("filtering-bolt", new MetricFilteringBolt(config.database), builder
config.filteringBoltThreads) .setBolt("filtering-bolt", new MetricFilteringBolt(config.database),
.shuffleGrouping("metrics-spout") config.filteringBoltThreads).shuffleGrouping("metrics-spout")
.allGrouping("event-bolt", EventProcessingBolt.METRIC_SUB_ALARM_EVENT_STREAM_ID) .allGrouping("event-bolt", EventProcessingBolt.METRIC_SUB_ALARM_EVENT_STREAM_ID)
.allGrouping("event-bolt", EventProcessingBolt.METRIC_ALARM_EVENT_STREAM_ID) .allGrouping("event-bolt", EventProcessingBolt.METRIC_ALARM_EVENT_STREAM_ID)
.setNumTasks(config.filteringBoltTasks); .setNumTasks(config.filteringBoltTasks);
// Filtering / Event -> Aggregation // Filtering / Event -> Aggregation
builder.setBolt("aggregation-bolt", builder
new MetricAggregationBolt(config.database, config.sporadicMetricNamespaces), .setBolt("aggregation-bolt",
config.aggregationBoltThreads) new MetricAggregationBolt(config.database, config.sporadicMetricNamespaces),
config.aggregationBoltThreads)
.fieldsGrouping("filtering-bolt", new Fields(MetricFilteringBolt.FIELDS[0])) .fieldsGrouping("filtering-bolt", new Fields(MetricFilteringBolt.FIELDS[0]))
.allGrouping("filtering-bolt", MetricAggregationBolt.METRIC_AGGREGATION_CONTROL_STREAM) .allGrouping("filtering-bolt", MetricAggregationBolt.METRIC_AGGREGATION_CONTROL_STREAM)
.fieldsGrouping("event-bolt", EventProcessingBolt.METRIC_SUB_ALARM_EVENT_STREAM_ID, .fieldsGrouping("event-bolt", EventProcessingBolt.METRIC_SUB_ALARM_EVENT_STREAM_ID,
@ -124,9 +125,10 @@ public class TopologyModule extends AbstractModule {
.setNumTasks(config.aggregationBoltTasks); .setNumTasks(config.aggregationBoltTasks);
// Aggregation / Event -> Thresholding // Aggregation / Event -> Thresholding
builder.setBolt("thresholding-bolt", builder
new AlarmThresholdingBolt(config.database, config.kafkaProducerConfig), .setBolt("thresholding-bolt",
config.thresholdingBoltThreads) new AlarmThresholdingBolt(config.database, config.kafkaProducerConfig),
config.thresholdingBoltThreads)
.fieldsGrouping("aggregation-bolt", new Fields(MetricAggregationBolt.FIELDS[0])) .fieldsGrouping("aggregation-bolt", new Fields(MetricAggregationBolt.FIELDS[0]))
.fieldsGrouping("event-bolt", EventProcessingBolt.ALARM_EVENT_STREAM_ID, .fieldsGrouping("event-bolt", EventProcessingBolt.ALARM_EVENT_STREAM_ID,
new Fields(EventProcessingBolt.ALARM_EVENT_STREAM_FIELDS[1])) new Fields(EventProcessingBolt.ALARM_EVENT_STREAM_FIELDS[1]))

View File

@ -14,19 +14,20 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package com.hpcloud.mon.domain.model; package com.hpcloud.mon.domain.model;
import com.hpcloud.mon.common.model.alarm.AlarmExpression;
import com.hpcloud.mon.common.model.alarm.AlarmState;
import com.hpcloud.mon.common.model.alarm.AlarmSubExpression;
import com.hpcloud.mon.domain.common.AbstractEntity;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import com.hpcloud.mon.common.model.alarm.AlarmExpression;
import com.hpcloud.mon.common.model.alarm.AlarmState;
import com.hpcloud.mon.common.model.alarm.AlarmSubExpression;
import com.hpcloud.mon.domain.common.AbstractEntity;
/** /**
* An alarm comprised of sub-alarms. * An alarm comprised of sub-alarms.
*/ */
@ -43,8 +44,8 @@ public class Alarm extends AbstractEntity {
public Alarm() { public Alarm() {
} }
public Alarm(String id, String tenantId, String name, String description, AlarmExpression expression, public Alarm(String id, String tenantId, String name, String description,
List<SubAlarm> subAlarms, AlarmState state, boolean actionsEnabled) { AlarmExpression expression, List<SubAlarm> subAlarms, AlarmState state, boolean actionsEnabled) {
this.id = id; this.id = id;
this.tenantId = tenantId; this.tenantId = tenantId;
this.name = name; this.name = name;
@ -56,47 +57,59 @@ public class Alarm extends AbstractEntity {
} }
static String buildStateChangeReason(AlarmState alarmState, List<String> subAlarmExpressions) { static String buildStateChangeReason(AlarmState alarmState, List<String> subAlarmExpressions) {
if (AlarmState.UNDETERMINED.equals(alarmState)) if (AlarmState.UNDETERMINED.equals(alarmState)) {
return String.format("No data was present for the sub-alarms: %s", subAlarmExpressions); return String.format("No data was present for the sub-alarms: %s", subAlarmExpressions);
else if (AlarmState.ALARM.equals(alarmState)) } else if (AlarmState.ALARM.equals(alarmState)) {
return String.format("Thresholds were exceeded for the sub-alarms: %s", subAlarmExpressions); return String.format("Thresholds were exceeded for the sub-alarms: %s", subAlarmExpressions);
else } else {
return "The alarm threshold(s) have not been exceeded"; return "The alarm threshold(s) have not been exceeded";
}
} }
@Override @Override
public boolean equals(Object obj) { public boolean equals(Object obj) {
if (this == obj) if (this == obj) {
return true; return true;
if (!super.equals(obj)) }
if (!super.equals(obj)) {
return false; return false;
if (getClass() != obj.getClass()) }
if (getClass() != obj.getClass()) {
return false; return false;
}
Alarm other = (Alarm) obj; Alarm other = (Alarm) obj;
if (!compareObjects(expression, other.expression)) if (!compareObjects(expression, other.expression)) {
return false; return false;
if (!compareObjects(name, other.name)) }
if (!compareObjects(name, other.name)) {
return false; return false;
if (!compareObjects(description, other.description)) }
if (!compareObjects(description, other.description)) {
return false; return false;
if (state != other.state) }
if (state != other.state) {
return false; return false;
if (actionsEnabled != other.actionsEnabled) }
return false; if (actionsEnabled != other.actionsEnabled) {
if (!compareObjects(subAlarms, other.subAlarms))
return false; return false;
if (!compareObjects(tenantId, other.tenantId)) }
if (!compareObjects(subAlarms, other.subAlarms)) {
return false; return false;
}
if (!compareObjects(tenantId, other.tenantId)) {
return false;
}
return true; return true;
} }
private boolean compareObjects(final Object o1, private boolean compareObjects(final Object o1, final Object o2) {
final Object o2) {
if (o1 == null) { if (o1 == null) {
if (o2 != null) if (o2 != null) {
return false;
} else if (!o1.equals(o2))
return false; return false;
}
} else if (!o1.equals(o2)) {
return false;
}
return true; return true;
} }
@ -115,35 +128,42 @@ public class Alarm extends AbstractEntity {
// Handle UNDETERMINED state // Handle UNDETERMINED state
if (!unitializedSubAlarms.isEmpty()) { if (!unitializedSubAlarms.isEmpty()) {
if (AlarmState.UNDETERMINED.equals(initialState)) if (AlarmState.UNDETERMINED.equals(initialState)) {
return false; return false;
}
state = AlarmState.UNDETERMINED; state = AlarmState.UNDETERMINED;
stateChangeReason = buildStateChangeReason(state, unitializedSubAlarms); stateChangeReason = buildStateChangeReason(state, unitializedSubAlarms);
return true; return true;
} }
Map<AlarmSubExpression, Boolean> subExpressionValues = new HashMap<AlarmSubExpression, Boolean>(); Map<AlarmSubExpression, Boolean> subExpressionValues =
for (SubAlarm subAlarm : subAlarms.values()) new HashMap<AlarmSubExpression, Boolean>();
for (SubAlarm subAlarm : subAlarms.values()) {
subExpressionValues.put(subAlarm.getExpression(), subExpressionValues.put(subAlarm.getExpression(),
AlarmState.ALARM.equals(subAlarm.getState())); AlarmState.ALARM.equals(subAlarm.getState()));
}
// Handle ALARM state // Handle ALARM state
if (expression.evaluate(subExpressionValues)) { if (expression.evaluate(subExpressionValues)) {
if (AlarmState.ALARM.equals(initialState)) if (AlarmState.ALARM.equals(initialState)) {
return false; return false;
}
List<String> subAlarmExpressions = new ArrayList<String>(); List<String> subAlarmExpressions = new ArrayList<String>();
for (SubAlarm subAlarm : subAlarms.values()) for (SubAlarm subAlarm : subAlarms.values()) {
if (AlarmState.ALARM.equals(subAlarm.getState())) if (AlarmState.ALARM.equals(subAlarm.getState())) {
subAlarmExpressions.add(subAlarm.getExpression().toString()); subAlarmExpressions.add(subAlarm.getExpression().toString());
}
}
state = AlarmState.ALARM; state = AlarmState.ALARM;
stateChangeReason = buildStateChangeReason(state, subAlarmExpressions); stateChangeReason = buildStateChangeReason(state, subAlarmExpressions);
return true; return true;
} }
if (AlarmState.OK.equals(initialState)) if (AlarmState.OK.equals(initialState)) {
return false; return false;
}
state = AlarmState.OK; state = AlarmState.OK;
stateChangeReason = buildStateChangeReason(state, null); stateChangeReason = buildStateChangeReason(state, null);
return true; return true;
@ -224,8 +244,9 @@ public class Alarm extends AbstractEntity {
public void setSubAlarms(List<SubAlarm> subAlarms) { public void setSubAlarms(List<SubAlarm> subAlarms) {
this.subAlarms = new HashMap<String, SubAlarm>(); this.subAlarms = new HashMap<String, SubAlarm>();
for (SubAlarm subAlarm : subAlarms) for (SubAlarm subAlarm : subAlarms) {
this.subAlarms.put(subAlarm.getId(), subAlarm); this.subAlarms.put(subAlarm.getId(), subAlarm);
}
} }
public void setTenantId(String tenantId) { public void setTenantId(String tenantId) {
@ -234,8 +255,9 @@ public class Alarm extends AbstractEntity {
@Override @Override
public String toString() { public String toString() {
return String.format("Alarm [tenantId=%s, name=%s, description=%s, state=%s, actionsEnabled=%s]", tenantId, return String.format(
name, description, state, actionsEnabled); "Alarm [tenantId=%s, name=%s, description=%s, state=%s, actionsEnabled=%s]", tenantId,
name, description, state, actionsEnabled);
} }
public void updateSubAlarm(SubAlarm subAlarm) { public void updateSubAlarm(SubAlarm subAlarm) {

View File

@ -14,64 +14,73 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package com.hpcloud.mon.domain.model;
import java.io.Serializable; package com.hpcloud.mon.domain.model;
import com.hpcloud.mon.common.model.metric.MetricDefinition; import com.hpcloud.mon.common.model.metric.MetricDefinition;
import java.io.Serializable;
public class MetricDefinitionAndTenantId implements Serializable { public class MetricDefinitionAndTenantId implements Serializable {
private static final long serialVersionUID = -4224596705186481749L; private static final long serialVersionUID = -4224596705186481749L;
public MetricDefinition metricDefinition; public MetricDefinition metricDefinition;
public String tenantId; public String tenantId;
public MetricDefinitionAndTenantId(MetricDefinition metricDefinition, public MetricDefinitionAndTenantId(MetricDefinition metricDefinition, String tenantId) {
String tenantId) { this.metricDefinition = metricDefinition;
this.metricDefinition = metricDefinition; this.tenantId = tenantId;
this.tenantId = tenantId; }
@Override
public int hashCode() {
int result = 0;
if (this.metricDefinition != null) {
result += this.metricDefinition.hashCode();
} }
if (this.tenantId != null) {
@Override result = result * 31 + this.tenantId.hashCode();
public int hashCode() {
int result = 0;
if (this.metricDefinition != null)
result += this.metricDefinition.hashCode();
if (this.tenantId != null)
result = result * 31 + this.tenantId.hashCode();
return result;
} }
return result;
}
@Override @Override
public boolean equals(Object obj) { public boolean equals(Object obj) {
if (this == obj) if (this == obj) {
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
final MetricDefinitionAndTenantId other = (MetricDefinitionAndTenantId) obj;
if (!compareObjects(this.tenantId, other.tenantId))
return false;
if (!compareObjects(this.metricDefinition, other.metricDefinition))
return false;
return true;
}
private boolean compareObjects(final Object o1,
final Object o2) {
if (o1 == null) {
if (o2 != null)
return false;
} else if (!o1.equals(o2))
return false;
return true; return true;
} }
if (obj == null) {
@Override return false;
public String toString() {
return String.format("MetricDefinitionAndTenantId tenantId=%s metricDefinition=%s", this.tenantId, this.metricDefinition);
} }
if (getClass() != obj.getClass()) {
return false;
}
final MetricDefinitionAndTenantId other = (MetricDefinitionAndTenantId) obj;
if (!compareObjects(this.tenantId, other.tenantId)) {
return false;
}
if (!compareObjects(this.metricDefinition, other.metricDefinition)) {
return false;
}
return true;
}
private boolean compareObjects(final Object o1, final Object o2) {
if (o1 == null) {
if (o2 != null) {
return false;
}
} else if (!o1.equals(o2)) {
return false;
}
return true;
}
@Override
public String toString() {
return String.format("MetricDefinitionAndTenantId tenantId=%s metricDefinition=%s",
this.tenantId, this.metricDefinition);
}
} }

View File

@ -14,8 +14,11 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package com.hpcloud.mon.domain.model; package com.hpcloud.mon.domain.model;
import com.hpcloud.mon.common.model.metric.MetricDefinition;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
@ -24,245 +27,281 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentHashMap;
import com.hpcloud.mon.common.model.metric.MetricDefinition;
/** /**
* This class is used to find any matching MetricDefinitionAndTenantId instances that match a given MetricDefinitionAndTenantId. This class * This class is used to find any matching MetricDefinitionAndTenantId instances that match a given
* has no way of handling duplicate MetricDefinitionAndTenantIds so it assume some other handles that issue. * MetricDefinitionAndTenantId. This class has no way of handling duplicate
* * MetricDefinitionAndTenantIds so it assume some other handles that issue.
* The actual MetricDefinitionAndTenantId is not kept in the last Map in order to save heap space. It is expected that possibly millions *
* of metrics may be stored in the Matcher and so by only storing the DiminsionPairs instead of the whole MetricDefinitionAndTenantId, * The actual MetricDefinitionAndTenantId is not kept in the last Map in order to save heap space.
* a significant amount of heap space will be saved thus reducing swapping. The MetricDefinitionAndTenantId is recreated when returned but * It is expected that possibly millions of metrics may be stored in the Matcher and so by only
* since it will be just sent on and then the reference dropped, the object will be quickly and easily garbage collected. Testing shows * storing the DiminsionPairs instead of the whole MetricDefinitionAndTenantId, a significant amount
* that this algorithm is faster than keeping the whole MetricDefinitionAndTenantId in the Map. * of heap space will be saved thus reducing swapping. The MetricDefinitionAndTenantId is recreated
* when returned but since it will be just sent on and then the reference dropped, the object will
* be quickly and easily garbage collected. Testing shows that this algorithm is faster than keeping
* the whole MetricDefinitionAndTenantId in the Map.
*/ */
public class MetricDefinitionAndTenantIdMatcher { public class MetricDefinitionAndTenantIdMatcher {
final Map<String, Map<String, Map<DimensionSet, Object>>> byTenantId = new ConcurrentHashMap<>(); final Map<String, Map<String, Map<DimensionSet, Object>>> byTenantId = new ConcurrentHashMap<>();
private final static DimensionSet EMPTY_DIMENSION_SET = new DimensionSet(new DimensionPair[0]); private final static DimensionSet EMPTY_DIMENSION_SET = new DimensionSet(new DimensionPair[0]);
private final static Object placeHolder = new Object(); private final static Object placeHolder = new Object();
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
private final static List<MetricDefinitionAndTenantId> EMPTY_LIST = Collections.EMPTY_LIST; private final static List<MetricDefinitionAndTenantId> EMPTY_LIST = Collections.EMPTY_LIST;
public void add(MetricDefinitionAndTenantId metricDefinitionAndTenantId) { public void add(MetricDefinitionAndTenantId metricDefinitionAndTenantId) {
Map<String, Map<DimensionSet, Object>> byMetricName = byTenantId.get(metricDefinitionAndTenantId.tenantId); Map<String, Map<DimensionSet, Object>> byMetricName =
if (byMetricName == null) { byTenantId.get(metricDefinitionAndTenantId.tenantId);
byMetricName = new ConcurrentHashMap<>(); if (byMetricName == null) {
byTenantId.put(metricDefinitionAndTenantId.tenantId, byMetricName); byMetricName = new ConcurrentHashMap<>();
} byTenantId.put(metricDefinitionAndTenantId.tenantId, byMetricName);
Map<DimensionSet, Object> byDimensionSet = byMetricName.get(metricDefinitionAndTenantId.metricDefinition.name); }
if (byDimensionSet == null) { Map<DimensionSet, Object> byDimensionSet =
byDimensionSet = new ConcurrentHashMap<>(); byMetricName.get(metricDefinitionAndTenantId.metricDefinition.name);
byMetricName.put(metricDefinitionAndTenantId.metricDefinition.name, byDimensionSet); if (byDimensionSet == null) {
} byDimensionSet = new ConcurrentHashMap<>();
final DimensionSet dimensionSet = createDimensionSet(metricDefinitionAndTenantId.metricDefinition); byMetricName.put(metricDefinitionAndTenantId.metricDefinition.name, byDimensionSet);
byDimensionSet.put(dimensionSet, placeHolder); }
final DimensionSet dimensionSet =
createDimensionSet(metricDefinitionAndTenantId.metricDefinition);
byDimensionSet.put(dimensionSet, placeHolder);
}
private DimensionSet createDimensionSet(MetricDefinition metricDefinition) {
return new DimensionSet(createPairs(metricDefinition));
}
public boolean remove(MetricDefinitionAndTenantId metricDefinitionAndTenantId) {
final Map<String, Map<DimensionSet, Object>> byMetricName =
byTenantId.get(metricDefinitionAndTenantId.tenantId);
if (byMetricName == null) {
return false;
} }
private DimensionSet createDimensionSet(MetricDefinition metricDefinition) { final Map<DimensionSet, Object> byDimensionSet =
return new DimensionSet(createPairs(metricDefinition)); byMetricName.get(metricDefinitionAndTenantId.metricDefinition.name);
if (byDimensionSet == null) {
return false;
} }
public boolean remove(MetricDefinitionAndTenantId metricDefinitionAndTenantId) { final DimensionSet dimensionSet =
final Map<String, Map<DimensionSet, Object>> byMetricName = byTenantId.get(metricDefinitionAndTenantId.tenantId); createDimensionSet(metricDefinitionAndTenantId.metricDefinition);
if (byMetricName == null) final boolean result = byDimensionSet.remove(dimensionSet) != null;
return false; if (result) {
if (byDimensionSet.isEmpty()) {
final Map<DimensionSet, Object> byDimensionSet = byMetricName.get(metricDefinitionAndTenantId.metricDefinition.name); byMetricName.remove(metricDefinitionAndTenantId.metricDefinition.name);
if (byDimensionSet == null) if (byMetricName.isEmpty()) {
return false; byTenantId.remove(metricDefinitionAndTenantId.tenantId);
final DimensionSet dimensionSet = createDimensionSet(metricDefinitionAndTenantId.metricDefinition);
final boolean result = byDimensionSet.remove(dimensionSet) != null;
if (result) {
if (byDimensionSet.isEmpty()) {
byMetricName.remove(metricDefinitionAndTenantId.metricDefinition.name);
if (byMetricName.isEmpty())
byTenantId.remove(metricDefinitionAndTenantId.tenantId);
}
} }
return result; }
}
return result;
}
public List<MetricDefinitionAndTenantId> match(final MetricDefinitionAndTenantId toMatch) {
final Map<String, Map<DimensionSet, Object>> byMetricName = byTenantId.get(toMatch.tenantId);
if (byMetricName == null) {
return EMPTY_LIST;
} }
public List<MetricDefinitionAndTenantId> match(final MetricDefinitionAndTenantId toMatch) { final Map<DimensionSet, Object> byDimensionSet =
final Map<String, Map<DimensionSet, Object>> byMetricName = byTenantId.get(toMatch.tenantId); byMetricName.get(toMatch.metricDefinition.name);
if (byMetricName == null) if (byDimensionSet == null) {
return EMPTY_LIST; return EMPTY_LIST;
}
final Map<DimensionSet, Object> byDimensionSet = byMetricName.get(toMatch.metricDefinition.name); final DimensionSet[] possibleDimensionSets =
if (byDimensionSet == null) createPossibleDimensionPairs(toMatch.metricDefinition);
return EMPTY_LIST; List<MetricDefinitionAndTenantId> matches = null;
final DimensionSet[] possibleDimensionSets = createPossibleDimensionPairs(toMatch.metricDefinition); for (final DimensionSet dimensionSet : possibleDimensionSets) {
List<MetricDefinitionAndTenantId> matches = null; if (byDimensionSet.containsKey(dimensionSet)) {
for (final DimensionSet dimensionSet : possibleDimensionSets) { if (matches == null) {
if (byDimensionSet.containsKey(dimensionSet)) { matches = new ArrayList<>();
if (matches == null)
matches = new ArrayList<>();
matches.add(createFromDimensionSet(toMatch, dimensionSet));
}
} }
return matches == null ? EMPTY_LIST : matches; matches.add(createFromDimensionSet(toMatch, dimensionSet));
}
}
return matches == null ? EMPTY_LIST : matches;
}
private MetricDefinitionAndTenantId createFromDimensionSet(MetricDefinitionAndTenantId toMatch,
DimensionSet dimensionSet) {
final Map<String, String> dimensions = new HashMap<>(dimensionSet.pairs.length);
for (final DimensionPair pair : dimensionSet.pairs) {
dimensions.put(pair.key, pair.value);
}
return new MetricDefinitionAndTenantId(new MetricDefinition(toMatch.metricDefinition.name,
dimensions), toMatch.tenantId);
}
protected DimensionSet[] createPossibleDimensionPairs(MetricDefinition metricDefinition) {
final int dimensionSize =
metricDefinition.dimensions == null ? 0 : metricDefinition.dimensions.size();
final int size = (int) Math.pow(2, dimensionSize);
final DimensionSet[] result = new DimensionSet[size];
int index = 0;
result[index++] = EMPTY_DIMENSION_SET;
if (dimensionSize == 0) {
return result;
}
final DimensionPair[] pairs = createPairs(metricDefinition);
for (int i = 0; i < pairs.length; i++) {
index = addMore(pairs, i, EMPTY_DIMENSION_SET, result, index);
}
return result;
}
private int addMore(DimensionPair[] pairs, int start, DimensionSet dimensionSet,
DimensionSet[] result, int index) {
final DimensionPair[] newPairs = new DimensionPair[dimensionSet.pairs.length + 1];
if (dimensionSet.pairs.length > 0) {
System.arraycopy(dimensionSet.pairs, 0, newPairs, 0, dimensionSet.pairs.length);
}
newPairs[dimensionSet.pairs.length] = pairs[start];
final DimensionSet thisDimensionSet = new DimensionSet(newPairs);
result[index++] = thisDimensionSet;
for (int i = start + 1; i < pairs.length; i++) {
index = addMore(pairs, i, thisDimensionSet, result, index);
}
return index;
}
private DimensionPair[] createPairs(MetricDefinition metricDefinition) {
final int dimensionSize =
metricDefinition.dimensions == null ? 0 : metricDefinition.dimensions.size();
final DimensionPair[] pairs = new DimensionPair[dimensionSize];
if (dimensionSize > 0) { // metricDefinition.dimensions can be null
int index = 0;
for (final Map.Entry<String, String> entry : metricDefinition.dimensions.entrySet()) {
pairs[index++] = new DimensionPair(entry.getKey(), entry.getValue());
}
}
return pairs;
}
public boolean isEmpty() {
return byTenantId.isEmpty();
}
public void clear() {
byTenantId.clear();
}
protected static class DimensionSet {
final DimensionPair[] pairs;
public DimensionSet(DimensionPair... pairs) {
Arrays.sort(pairs);
this.pairs = pairs;
} }
private MetricDefinitionAndTenantId createFromDimensionSet( @Override
MetricDefinitionAndTenantId toMatch, public int hashCode() {
DimensionSet dimensionSet) { int result = 1;
final Map<String, String> dimensions = new HashMap<>(dimensionSet.pairs.length); final int prime = 31;
for (final DimensionPair pair : dimensionSet.pairs) for (DimensionPair pair : pairs) {
dimensions.put(pair.key, pair.value); result = result * prime + pair.hashCode();
return new MetricDefinitionAndTenantId(new MetricDefinition(toMatch.metricDefinition.name, dimensions), toMatch.tenantId); }
return result;
} }
protected DimensionSet[] createPossibleDimensionPairs(MetricDefinition metricDefinition) { @Override
final int dimensionSize = metricDefinition.dimensions == null ? 0 : metricDefinition.dimensions.size(); public boolean equals(Object obj) {
final int size = (int)Math.pow(2, dimensionSize); if (this == obj) {
final DimensionSet[] result = new DimensionSet[size]; return true;
int index = 0; }
result[index++] = EMPTY_DIMENSION_SET; if (obj == null) {
if (dimensionSize == 0) return false;
return result; }
final DimensionPair[] pairs = createPairs(metricDefinition); if (getClass() != obj.getClass()) {
for (int i = 0; i < pairs.length; i++) return false;
index = addMore(pairs, i, EMPTY_DIMENSION_SET, result, index); }
return result; final DimensionSet other = (DimensionSet) obj;
if (this.pairs.length != other.pairs.length) {
return false;
}
for (int i = 0; i < this.pairs.length; i++) {
if (!this.pairs[i].equals(other.pairs[i])) {
return false;
}
}
return true;
} }
private int addMore(DimensionPair[] pairs, int start, @Override
DimensionSet dimensionSet, DimensionSet[] result, int index) { public String toString() {
final DimensionPair[] newPairs = new DimensionPair[dimensionSet.pairs.length + 1]; final StringBuilder builder = new StringBuilder(256);
if (dimensionSet.pairs.length > 0) builder.append("DimensionSet [");
System.arraycopy(dimensionSet.pairs, 0, newPairs, 0, dimensionSet.pairs.length); boolean first = true;
newPairs[dimensionSet.pairs.length] = pairs[start]; for (DimensionPair pair : pairs) {
final DimensionSet thisDimensionSet = new DimensionSet(newPairs); if (!first) {
result[index++] = thisDimensionSet; builder.append(", ");
for (int i = start + 1; i < pairs.length; i++) }
index = addMore(pairs, i, thisDimensionSet, result, index); builder.append(pair.toString());
return index; first = false;
}
builder.append("]");
return builder.toString();
}
}
protected static class DimensionPair implements Comparable<DimensionPair> {
private String key;
private String value;
public DimensionPair(String key, String value) {
this.key = key;
this.value = value;
} }
private DimensionPair[] createPairs(MetricDefinition metricDefinition) { @Override
final int dimensionSize = metricDefinition.dimensions == null ? 0 : metricDefinition.dimensions.size(); public int hashCode() {
final DimensionPair[] pairs = new DimensionPair[dimensionSize]; int result = 1;
if (dimensionSize > 0) { // metricDefinition.dimensions can be null final int prime = 31;
int index = 0; result = prime * result + key.hashCode();
for (final Map.Entry<String, String> entry : metricDefinition.dimensions.entrySet()) result = prime * result + ((value == null) ? 0 : value.hashCode());
pairs[index++] = new DimensionPair(entry.getKey(), entry.getValue()); return result;
}
return pairs;
} }
public boolean isEmpty() { @Override
return byTenantId.isEmpty(); public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
DimensionPair other = (DimensionPair) obj;
return compareStrings(key, other.key) && compareStrings(value, other.value);
} }
public void clear() { private boolean compareStrings(final String s1, final String s2) {
byTenantId.clear(); if (s1 == s2) {
return true;
}
if (s1 == null) {
return false;
}
return s1.equals(s2);
} }
protected static class DimensionSet { @Override
final DimensionPair[] pairs; public int compareTo(DimensionPair o) {
int c = this.key.compareTo(o.key);
public DimensionSet(DimensionPair ... pairs) { if (c != 0) {
Arrays.sort(pairs); return c;
this.pairs = pairs; }
} // Handle possible null values. A actual value is bigger than a null
if (this.value == null) {
@Override return o.value == null ? 0 : 1;
public int hashCode() { }
int result = 1; return this.value.compareTo(o.value);
final int prime = 31;
for (DimensionPair pair : pairs)
result = result * prime + pair.hashCode();
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
final DimensionSet other = (DimensionSet) obj;
if (this.pairs.length != other.pairs.length)
return false;
for (int i = 0; i < this.pairs.length; i++)
if (!this.pairs[i].equals(other.pairs[i]))
return false;
return true;
}
@Override
public String toString() {
final StringBuilder builder = new StringBuilder(256);
builder.append("DimensionSet [");
boolean first = true;
for (DimensionPair pair : pairs) {
if (!first)
builder.append(", ");
builder.append(pair.toString());
first = false;
}
builder.append("]");
return builder.toString();
}
} }
protected static class DimensionPair implements Comparable<DimensionPair> { @Override
private String key; public String toString() {
private String value; return String.format("DimensionPair %s=%s", key, value);
public DimensionPair(String key, String value) {
this.key = key;
this.value = value;
}
@Override
public int hashCode() {
int result = 1;
final int prime = 31;
result = prime * result + key.hashCode();
result = prime * result + ((value == null) ? 0 : value.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
DimensionPair other = (DimensionPair) obj;
return compareStrings(key, other.key) &&
compareStrings(value, other.value);
}
private boolean compareStrings(final String s1,
final String s2) {
if (s1 == s2)
return true;
if (s1 == null)
return false;
return s1.equals(s2);
}
@Override
public int compareTo(DimensionPair o) {
int c = this.key.compareTo(o.key);
if (c != 0)
return c;
// Handle possible null values. A actual value is bigger than a null
if (this.value == null)
return o.value == null ? 0: 1;
return this.value.compareTo(o.value);
}
@Override
public String toString() {
return String.format("DimensionPair %s=%s", key, value);
}
} }
}
} }

View File

@ -14,6 +14,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package com.hpcloud.mon.domain.model; package com.hpcloud.mon.domain.model;
import com.hpcloud.mon.common.model.alarm.AlarmState; import com.hpcloud.mon.common.model.alarm.AlarmState;
@ -26,122 +27,135 @@ import java.io.Serializable;
* Sub-alarm. Decorates an AlarmSubExpression. * Sub-alarm. Decorates an AlarmSubExpression.
*/ */
public class SubAlarm extends AbstractEntity implements Serializable { public class SubAlarm extends AbstractEntity implements Serializable {
private static final long serialVersionUID = -3946708553723868124L; private static final long serialVersionUID = -3946708553723868124L;
private String alarmId; private String alarmId;
private AlarmSubExpression expression; private AlarmSubExpression expression;
private AlarmState state; private AlarmState state;
private boolean noState; private boolean noState;
/** /**
* Whether metrics for this sub-alarm are received sporadically. * Whether metrics for this sub-alarm are received sporadically.
*/ */
private boolean sporadicMetric; private boolean sporadicMetric;
public SubAlarm(String id, String alarmId, AlarmSubExpression expression) { public SubAlarm(String id, String alarmId, AlarmSubExpression expression) {
this(id, alarmId, expression, AlarmState.UNDETERMINED); this(id, alarmId, expression, AlarmState.UNDETERMINED);
}
// Need this for kryo serialization/deserialization. Fixes a bug in default java
// serialization/deserialization where id was not being set. See resources/storm.yaml
// file for how to handle serialization/deserialization with kryo.
public SubAlarm() {
}
public SubAlarm(String id, String alarmId, AlarmSubExpression expression, AlarmState state) {
this.id = id;
this.alarmId = alarmId;
this.expression = expression;
this.state = state;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
} }
if (!super.equals(obj)) {
// Need this for kryo serialization/deserialization. Fixes a bug in default java return false;
// serialization/deserialization where id was not being set. See resources/storm.yaml
// file for how to handle serialization/deserialization with kryo.
public SubAlarm() {
} }
if (getClass() != obj.getClass()) {
public SubAlarm(String id, String alarmId, AlarmSubExpression expression, AlarmState state) { return false;
this.id = id;
this.alarmId = alarmId;
this.expression = expression;
this.state = state;
} }
SubAlarm other = (SubAlarm) obj;
@Override if (alarmId == null) {
public boolean equals(Object obj) { if (other.alarmId != null) {
if (this == obj) return false;
return true; }
if (!super.equals(obj)) } else if (!alarmId.equals(other.alarmId)) {
return false; return false;
if (getClass() != obj.getClass())
return false;
SubAlarm other = (SubAlarm) obj;
if (alarmId == null) {
if (other.alarmId != null)
return false;
} else if (!alarmId.equals(other.alarmId))
return false;
if (expression == null) {
if (other.expression != null)
return false;
} else if (!expression.equals(other.expression))
return false;
if (state != other.state)
return false;
return true;
} }
if (expression == null) {
public String getAlarmId() { if (other.expression != null) {
return alarmId; return false;
}
} else if (!expression.equals(other.expression)) {
return false;
} }
if (state != other.state) {
public AlarmSubExpression getExpression() { return false;
return expression;
} }
return true;
}
public AlarmState getState() { public String getAlarmId() {
return state; return alarmId;
} }
@Override public AlarmSubExpression getExpression() {
public int hashCode() { return expression;
final int prime = 31; }
int result = super.hashCode();
result = prime * result + ((alarmId == null) ? 0 : alarmId.hashCode());
result = prime * result + ((expression == null) ? 0 : expression.hashCode());
result = prime * result + ((state == null) ? 0 : state.hashCode());
return result;
}
public boolean isSporadicMetric() { public AlarmState getState() {
return sporadicMetric; return state;
} }
public void setSporadicMetric(boolean sporadicMetric) { @Override
this.sporadicMetric = sporadicMetric; public int hashCode() {
} final int prime = 31;
int result = super.hashCode();
result = prime * result + ((alarmId == null) ? 0 : alarmId.hashCode());
result = prime * result + ((expression == null) ? 0 : expression.hashCode());
result = prime * result + ((state == null) ? 0 : state.hashCode());
return result;
}
public void setState(AlarmState state) { public boolean isSporadicMetric() {
this.state = state; return sporadicMetric;
} }
public boolean isNoState() { public void setSporadicMetric(boolean sporadicMetric) {
return noState; this.sporadicMetric = sporadicMetric;
} }
public void setNoState(boolean noState) { public void setState(AlarmState state) {
this.noState = noState; this.state = state;
} }
@Override public boolean isNoState() {
public String toString() { return noState;
return String.format("SubAlarm [id=%s, alarmId=%s, expression=%s, state=%s noState=%s]", id, alarmId, }
expression, state, noState);
}
/** public void setNoState(boolean noState) {
* Determine if this SubAlarm and 'other' could reuse saved measurements. Only possible this.noState = noState;
* only operator and/or threshold are the only properties from the expression that are different }
* @param other SubAlarm to compare to
* @return true if 'other' is "compatible", false otherwise @Override
*/ public String toString() {
public boolean isCompatible(final SubAlarm other) { return String.format("SubAlarm [id=%s, alarmId=%s, expression=%s, state=%s noState=%s]", id,
if (!this.expression.getMetricDefinition().equals(other.expression.getMetricDefinition())) alarmId, expression, state, noState);
return false; }
if (!this.expression.getFunction().equals(other.expression.getFunction()))
return false; /**
if (this.expression.getPeriod() != other.expression.getPeriod()) * Determine if this SubAlarm and 'other' could reuse saved measurements. Only possible only
return false; * operator and/or threshold are the only properties from the expression that are different
if (this.expression.getPeriods() != other.expression.getPeriods()) *
return false; * @param other SubAlarm to compare to
// Operator and Threshold can vary * @return true if 'other' is "compatible", false otherwise
return true; */
public boolean isCompatible(final SubAlarm other) {
if (!this.expression.getMetricDefinition().equals(other.expression.getMetricDefinition())) {
return false;
} }
if (!this.expression.getFunction().equals(other.expression.getFunction())) {
return false;
}
if (this.expression.getPeriod() != other.expression.getPeriod()) {
return false;
}
if (this.expression.getPeriods() != other.expression.getPeriods()) {
return false;
}
// Operator and Threshold can vary
return true;
}
} }

View File

@ -14,20 +14,21 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package com.hpcloud.mon.domain.model;
import org.slf4j.Logger; package com.hpcloud.mon.domain.model;
import org.slf4j.LoggerFactory;
import com.hpcloud.mon.common.model.alarm.AlarmState; import com.hpcloud.mon.common.model.alarm.AlarmState;
import com.hpcloud.util.stats.SlidingWindowStats; import com.hpcloud.util.stats.SlidingWindowStats;
import com.hpcloud.util.time.TimeResolution; import com.hpcloud.util.time.TimeResolution;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** /**
* Aggregates statistics for a specific SubAlarm. * Aggregates statistics for a specific SubAlarm.
*/ */
public class SubAlarmStats { public class SubAlarmStats {
private static final Logger LOG = LoggerFactory.getLogger(SubAlarmStats.class); private static final Logger logger = LoggerFactory.getLogger(SubAlarmStats.class);
/** Number of slots for future periods that we should collect metrics for. */ /** Number of slots for future periods that we should collect metrics for. */
private static final int FUTURE_SLOTS = 2; private static final int FUTURE_SLOTS = 2;
/** Helps determine how many empty window observations before transitioning to UNDETERMINED. */ /** Helps determine how many empty window observations before transitioning to UNDETERMINED. */
@ -48,28 +49,29 @@ public class SubAlarmStats {
slotWidth = subAlarm.getExpression().getPeriod(); slotWidth = subAlarm.getExpression().getPeriod();
this.subAlarm = subAlarm; this.subAlarm = subAlarm;
this.subAlarm.setNoState(true); this.subAlarm.setNoState(true);
this.stats = new SlidingWindowStats(subAlarm.getExpression().getFunction().toStatistic(), this.stats =
timeResolution, slotWidth, subAlarm.getExpression().getPeriods(), FUTURE_SLOTS, new SlidingWindowStats(subAlarm.getExpression().getFunction().toStatistic(),
viewEndTimestamp); timeResolution, slotWidth, subAlarm.getExpression().getPeriods(), FUTURE_SLOTS,
viewEndTimestamp);
int period = subAlarm.getExpression().getPeriod(); int period = subAlarm.getExpression().getPeriod();
int periodMinutes = period < 60 ? 1 : period / 60; // Assumes the period is in seconds so we int periodMinutes = period < 60 ? 1 : period / 60; // Assumes the period is in seconds so we
// convert to minutes // convert to minutes
emptyWindowObservationThreshold = periodMinutes * subAlarm.getExpression().getPeriods() emptyWindowObservationThreshold =
* UNDETERMINED_COEFFICIENT; periodMinutes * subAlarm.getExpression().getPeriods() * UNDETERMINED_COEFFICIENT;
emptyWindowObservations = 0; emptyWindowObservations = 0;
} }
/** /**
* Evaluates the {@link #subAlarm} for the current stats window, updating the sub-alarm's state if * Evaluates the {@link #subAlarm} for the current stats window, updating the sub-alarm's state if
* necessary and sliding the window to the {@code slideToTimestamp}. * necessary and sliding the window to the {@code slideToTimestamp}.
* *
* @return true if the alarm's state changed, else false. * @return true if the alarm's state changed, else false.
*/ */
public boolean evaluateAndSlideWindow(long slideToTimestamp) { public boolean evaluateAndSlideWindow(long slideToTimestamp) {
try { try {
return evaluate(); return evaluate();
} catch (Exception e) { } catch (Exception e) {
LOG.error("Failed to evaluate {}", this, e); logger.error("Failed to evaluate {}", this, e);
return false; return false;
} finally { } finally {
slideWindow(slideToTimestamp); slideWindow(slideToTimestamp);
@ -77,8 +79,8 @@ public class SubAlarmStats {
} }
/** /**
* Just slide the window. Either slideWindow or evaluateAndSlideWindow * Just slide the window. Either slideWindow or evaluateAndSlideWindow should be called for each
* should be called for each time period, but never both * time period, but never both
* *
* @param slideToTimestamp * @param slideToTimestamp
*/ */
@ -102,9 +104,10 @@ public class SubAlarmStats {
@Override @Override
public String toString() { public String toString() {
return String.format( return String
"SubAlarmStats [subAlarm=%s, stats=%s, emptyWindowObservations=%s, emptyWindowObservationThreshold=%s]", .format(
subAlarm, stats, emptyWindowObservations, emptyWindowObservationThreshold); "SubAlarmStats [subAlarm=%s, stats=%s, emptyWindowObservations=%s, emptyWindowObservationThreshold=%s]",
subAlarm, stats, emptyWindowObservations, emptyWindowObservationThreshold);
} }
/** /**
@ -115,17 +118,17 @@ public class SubAlarmStats {
boolean thresholdExceeded = false; boolean thresholdExceeded = false;
boolean hasEmptyWindows = false; boolean hasEmptyWindows = false;
for (double value : values) { for (double value : values) {
if (Double.isNaN(value)) if (Double.isNaN(value)) {
hasEmptyWindows = true; hasEmptyWindows = true;
else { } else {
emptyWindowObservations = 0; emptyWindowObservations = 0;
// Check if value is OK // Check if value is OK
if (!subAlarm.getExpression() if (!subAlarm.getExpression().getOperator()
.getOperator()
.evaluate(value, subAlarm.getExpression().getThreshold())) { .evaluate(value, subAlarm.getExpression().getThreshold())) {
if (!shouldSendStateChange(AlarmState.OK)) if (!shouldSendStateChange(AlarmState.OK)) {
return false; return false;
}
setSubAlarmState(AlarmState.OK); setSubAlarmState(AlarmState.OK);
return true; return true;
} else } else
@ -134,8 +137,9 @@ public class SubAlarmStats {
} }
if (thresholdExceeded && !hasEmptyWindows) { if (thresholdExceeded && !hasEmptyWindows) {
if (!shouldSendStateChange(AlarmState.ALARM)) if (!shouldSendStateChange(AlarmState.ALARM)) {
return false; return false;
}
setSubAlarmState(AlarmState.ALARM); setSubAlarmState(AlarmState.ALARM);
return true; return true;
} }
@ -143,10 +147,9 @@ public class SubAlarmStats {
// Window is empty at this point // Window is empty at this point
emptyWindowObservations++; emptyWindowObservations++;
if ((emptyWindowObservations >= emptyWindowObservationThreshold) && if ((emptyWindowObservations >= emptyWindowObservationThreshold)
shouldSendStateChange(AlarmState.UNDETERMINED) && && shouldSendStateChange(AlarmState.UNDETERMINED) && !subAlarm.isSporadicMetric()) {
!subAlarm.isSporadicMetric()) { setSubAlarmState(AlarmState.UNDETERMINED);
setSubAlarmState(AlarmState.UNDETERMINED);
return true; return true;
} }
@ -164,10 +167,11 @@ public class SubAlarmStats {
/** /**
* This MUST only be used for compatible SubAlarms, i.e. where * This MUST only be used for compatible SubAlarms, i.e. where
* this.subAlarm.isCompatible(subAlarm) is true * this.subAlarm.isCompatible(subAlarm) is true
*
* @param subAlarm * @param subAlarm
*/ */
public void updateSubAlarm(final SubAlarm subAlarm) { public void updateSubAlarm(final SubAlarm subAlarm) {
this.subAlarm = subAlarm; this.subAlarm = subAlarm;
} }
} }

View File

@ -14,6 +14,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package com.hpcloud.mon.domain.service; package com.hpcloud.mon.domain.service;
import com.hpcloud.mon.common.model.alarm.AlarmState; import com.hpcloud.mon.common.model.alarm.AlarmState;

View File

@ -14,6 +14,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package com.hpcloud.mon.domain.service; package com.hpcloud.mon.domain.service;
import java.util.List; import java.util.List;

View File

@ -14,13 +14,14 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package com.hpcloud.mon.domain.service;
import java.util.List; package com.hpcloud.mon.domain.service;
import com.hpcloud.mon.domain.model.MetricDefinitionAndTenantId; import com.hpcloud.mon.domain.model.MetricDefinitionAndTenantId;
import com.hpcloud.mon.domain.model.SubAlarm; import com.hpcloud.mon.domain.model.SubAlarm;
import java.util.List;
/** /**
* SubAlarm DAO. * SubAlarm DAO.
*/ */

View File

@ -14,61 +14,69 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package com.hpcloud.mon.domain.service; package com.hpcloud.mon.domain.service;
import com.hpcloud.mon.domain.model.MetricDefinitionAndTenantId; import com.hpcloud.mon.domain.model.MetricDefinitionAndTenantId;
public class SubAlarmMetricDefinition { public class SubAlarmMetricDefinition {
private final String subAlarmId; private final String subAlarmId;
private final MetricDefinitionAndTenantId metricDefinitionAndTenantId; private final MetricDefinitionAndTenantId metricDefinitionAndTenantId;
public SubAlarmMetricDefinition(String subAlarmId, public SubAlarmMetricDefinition(String subAlarmId,
MetricDefinitionAndTenantId metricDefinitionAndTenantId) { MetricDefinitionAndTenantId metricDefinitionAndTenantId) {
this.subAlarmId = subAlarmId; this.subAlarmId = subAlarmId;
this.metricDefinitionAndTenantId = metricDefinitionAndTenantId; this.metricDefinitionAndTenantId = metricDefinitionAndTenantId;
} }
public String getSubAlarmId() { public String getSubAlarmId() {
return subAlarmId; return subAlarmId;
} }
public MetricDefinitionAndTenantId getMetricDefinitionAndTenantId() { public MetricDefinitionAndTenantId getMetricDefinitionAndTenantId() {
return metricDefinitionAndTenantId; return metricDefinitionAndTenantId;
} }
@Override @Override
public int hashCode() { public int hashCode() {
final int prime = 31; final int prime = 31;
int result = 1; int result = 1;
result = prime * result + ((subAlarmId == null) ? 0 : subAlarmId.hashCode()); result = prime * result + ((subAlarmId == null) ? 0 : subAlarmId.hashCode());
result = prime * result + ((metricDefinitionAndTenantId == null) ? 0 : metricDefinitionAndTenantId.hashCode()); result =
return result; prime * result
} + ((metricDefinitionAndTenantId == null) ? 0 : metricDefinitionAndTenantId.hashCode());
return result;
}
@Override @Override
public boolean equals(Object obj) { public boolean equals(Object obj) {
if (this == obj) if (this == obj) {
return true; return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
SubAlarmMetricDefinition other = (SubAlarmMetricDefinition) obj;
return compareObjects(subAlarmId, other.subAlarmId) &&
compareObjects(metricDefinitionAndTenantId, other.metricDefinitionAndTenantId);
} }
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
SubAlarmMetricDefinition other = (SubAlarmMetricDefinition) obj;
return compareObjects(subAlarmId, other.subAlarmId)
&& compareObjects(metricDefinitionAndTenantId, other.metricDefinitionAndTenantId);
}
private boolean compareObjects(final Object o1, final Object o2) { private boolean compareObjects(final Object o1, final Object o2) {
if (o1 == o2) if (o1 == o2) {
return true; return true;
if (o1 == null)
return false;
return o1.equals(o2);
} }
if (o1 == null) {
return false;
}
return o1.equals(o2);
}
@Override @Override
public String toString() { public String toString() {
return String.format("SubAlarmMetricDefinition subAlarmId=%s metricDefinitionAndTenantId=%s", subAlarmId, return String.format("SubAlarmMetricDefinition subAlarmId=%s metricDefinitionAndTenantId=%s",
metricDefinitionAndTenantId); subAlarmId, metricDefinitionAndTenantId);
} }
} }

View File

@ -14,15 +14,16 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package com.hpcloud.mon.domain.service; package com.hpcloud.mon.domain.service;
import com.hpcloud.mon.domain.model.SubAlarm;
import com.hpcloud.mon.domain.model.SubAlarmStats;
import java.util.Collection; import java.util.Collection;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import com.hpcloud.mon.domain.model.SubAlarm;
import com.hpcloud.mon.domain.model.SubAlarmStats;
/** /**
* SubAlarmStats repository. * SubAlarmStats repository.
*/ */
@ -34,8 +35,9 @@ public class SubAlarmStatsRepository {
* adds it to the repository. * adds it to the repository.
*/ */
public void add(SubAlarm subAlarm, long viewEndTimestamp) { public void add(SubAlarm subAlarm, long viewEndTimestamp) {
if (!subAlarmStats.containsKey(subAlarm.getId())) if (!subAlarmStats.containsKey(subAlarm.getId())) {
subAlarmStats.put(subAlarm.getId(), new SubAlarmStats(subAlarm, viewEndTimestamp)); subAlarmStats.put(subAlarm.getId(), new SubAlarmStats(subAlarm, viewEndTimestamp));
}
} }
public Collection<SubAlarmStats> get() { public Collection<SubAlarmStats> get() {

View File

@ -14,17 +14,9 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package com.hpcloud.mon.infrastructure.persistence; package com.hpcloud.mon.infrastructure.persistence;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import javax.inject.Inject;
import org.skife.jdbi.v2.DBI;
import org.skife.jdbi.v2.Handle;
import com.hpcloud.mon.common.model.alarm.AggregateFunction; import com.hpcloud.mon.common.model.alarm.AggregateFunction;
import com.hpcloud.mon.common.model.alarm.AlarmOperator; import com.hpcloud.mon.common.model.alarm.AlarmOperator;
import com.hpcloud.mon.common.model.alarm.AlarmState; import com.hpcloud.mon.common.model.alarm.AlarmState;
@ -36,6 +28,15 @@ import com.hpcloud.mon.domain.service.AlarmDAO;
import com.hpcloud.persistence.BeanMapper; import com.hpcloud.persistence.BeanMapper;
import com.hpcloud.persistence.SqlQueries; import com.hpcloud.persistence.SqlQueries;
import org.skife.jdbi.v2.DBI;
import org.skife.jdbi.v2.Handle;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import javax.inject.Inject;
/** /**
* Alarm DAO implementation. * Alarm DAO implementation.
*/ */
@ -63,10 +64,12 @@ public class AlarmDAOImpl implements AlarmDAO {
String subAlarmId = (String) row.get("id"); String subAlarmId = (String) row.get("id");
Map<String, String> dimensions = findDimensionsById(handle, subAlarmId); Map<String, String> dimensions = findDimensionsById(handle, subAlarmId);
AggregateFunction function = AggregateFunction.valueOf((String) row.get("function")); AggregateFunction function = AggregateFunction.valueOf((String) row.get("function"));
MetricDefinition metricDef = new MetricDefinition((String) row.get("metric_name"), dimensions); MetricDefinition metricDef =
new MetricDefinition((String) row.get("metric_name"), dimensions);
AlarmOperator operator = AlarmOperator.valueOf((String) row.get("operator")); AlarmOperator operator = AlarmOperator.valueOf((String) row.get("operator"));
AlarmSubExpression subExpression = new AlarmSubExpression(function, metricDef, operator, AlarmSubExpression subExpression =
(Double) row.get("threshold"), (Integer) row.get("period"), (Integer) row.get("periods")); new AlarmSubExpression(function, metricDef, operator, (Double) row.get("threshold"),
(Integer) row.get("period"), (Integer) row.get("periods"));
SubAlarm subAlarm = new SubAlarm(subAlarmId, (String) row.get("alarm_id"), subExpression); SubAlarm subAlarm = new SubAlarm(subAlarmId, (String) row.get("alarm_id"), subExpression);
subAlarms.add(subAlarm); subAlarms.add(subAlarm);
} }
@ -79,18 +82,17 @@ public class AlarmDAOImpl implements AlarmDAO {
Handle h = db.open(); Handle h = db.open();
try { try {
Alarm alarm = h.createQuery("select * from alarm where id = :id and deleted_at is null") Alarm alarm =
.bind("id", id) h.createQuery("select * from alarm where id = :id and deleted_at is null").bind("id", id)
.map(new BeanMapper<Alarm>(Alarm.class)) .map(new BeanMapper<Alarm>(Alarm.class)).first();
.first(); if (alarm == null) {
if (alarm == null) return null;
return alarm; }
alarm.setSubAlarms(subAlarmsForRows( alarm.setSubAlarms(subAlarmsForRows(
h, h,
h.createQuery("select * from sub_alarm where alarm_id = :alarmId") h.createQuery("select * from sub_alarm where alarm_id = :alarmId")
.bind("alarmId", alarm.getId()) .bind("alarmId", alarm.getId()).list()));
.list()));
return alarm; return alarm;
} finally { } finally {
@ -104,9 +106,7 @@ public class AlarmDAOImpl implements AlarmDAO {
try { try {
h.createStatement("update alarm set state = :state, updated_at = NOW() where id = :id") h.createStatement("update alarm set state = :state, updated_at = NOW() where id = :id")
.bind("id", id) .bind("id", id).bind("state", state.toString()).execute();
.bind("state", state.toString())
.execute();
} finally { } finally {
h.close(); h.close();
} }

View File

@ -14,8 +14,17 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package com.hpcloud.mon.infrastructure.persistence; package com.hpcloud.mon.infrastructure.persistence;
import com.hpcloud.mon.common.model.metric.MetricDefinition;
import com.hpcloud.mon.domain.model.MetricDefinitionAndTenantId;
import com.hpcloud.mon.domain.service.MetricDefinitionDAO;
import com.hpcloud.mon.domain.service.SubAlarmMetricDefinition;
import org.skife.jdbi.v2.DBI;
import org.skife.jdbi.v2.Handle;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
@ -23,21 +32,14 @@ import java.util.Map;
import javax.inject.Inject; import javax.inject.Inject;
import org.skife.jdbi.v2.DBI;
import org.skife.jdbi.v2.Handle;
import com.hpcloud.mon.common.model.metric.MetricDefinition;
import com.hpcloud.mon.domain.model.MetricDefinitionAndTenantId;
import com.hpcloud.mon.domain.service.MetricDefinitionDAO;
import com.hpcloud.mon.domain.service.SubAlarmMetricDefinition;
/** /**
* MetricDefinition DAO implementation. * MetricDefinition DAO implementation.
*/ */
public class MetricDefinitionDAOImpl implements MetricDefinitionDAO { public class MetricDefinitionDAOImpl implements MetricDefinitionDAO {
private static final String METRIC_DEF_SQL = "select sa.id, a.tenant_id, sa.metric_name, sad.dimensions from alarm as a, sub_alarm as sa " private static final String METRIC_DEF_SQL =
+ "left join (select sub_alarm_id, group_concat(dimension_name, '=', value) as dimensions from sub_alarm_dimension group by sub_alarm_id) as sad on sa.id = sad.sub_alarm_id " "select sa.id, a.tenant_id, sa.metric_name, sad.dimensions from alarm as a, sub_alarm as sa "
+ "where a.id = sa.alarm_id and a.deleted_at is null"; + "left join (select sub_alarm_id, group_concat(dimension_name, '=', value) as dimensions from sub_alarm_dimension group by sub_alarm_id) as sad on sa.id = sad.sub_alarm_id "
+ "where a.id = sa.alarm_id and a.deleted_at is null";
private final DBI db; private final DBI db;
@ -65,15 +67,16 @@ public class MetricDefinitionDAOImpl implements MetricDefinitionDAO {
for (String kvStr : dimensionSet.split(",")) { for (String kvStr : dimensionSet.split(",")) {
String[] kv = kvStr.split("="); String[] kv = kvStr.split("=");
if (kv.length > 1) { if (kv.length > 1) {
if (dimensions == null) if (dimensions == null) {
dimensions = new HashMap<String, String>(); dimensions = new HashMap<String, String>();
}
dimensions.put(kv[0], kv[1]); dimensions.put(kv[0], kv[1]);
} }
} }
} }
metricDefs.add(new SubAlarmMetricDefinition(subAlarmId, metricDefs.add(new SubAlarmMetricDefinition(subAlarmId, new MetricDefinitionAndTenantId(
new MetricDefinitionAndTenantId(new MetricDefinition(metric_name, dimensions), tenantId))); new MetricDefinition(metric_name, dimensions), tenantId)));
} }
return metricDefs; return metricDefs;

View File

@ -14,20 +14,22 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package com.hpcloud.mon.infrastructure.persistence; package com.hpcloud.mon.infrastructure.persistence;
import javax.inject.Singleton;
import org.skife.jdbi.v2.DBI;
import com.google.inject.AbstractModule;
import com.google.inject.Provides;
import com.google.inject.Scopes;
import com.hpcloud.mon.domain.service.AlarmDAO; import com.hpcloud.mon.domain.service.AlarmDAO;
import com.hpcloud.mon.domain.service.MetricDefinitionDAO; import com.hpcloud.mon.domain.service.MetricDefinitionDAO;
import com.hpcloud.mon.domain.service.SubAlarmDAO; import com.hpcloud.mon.domain.service.SubAlarmDAO;
import com.hpcloud.mon.infrastructure.thresholding.DataSourceFactory; import com.hpcloud.mon.infrastructure.thresholding.DataSourceFactory;
import com.google.inject.AbstractModule;
import com.google.inject.Provides;
import com.google.inject.Scopes;
import org.skife.jdbi.v2.DBI;
import javax.inject.Singleton;
/** /**
* Configures persistence related types. * Configures persistence related types.
*/ */

View File

@ -14,18 +14,9 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package com.hpcloud.mon.infrastructure.persistence; package com.hpcloud.mon.infrastructure.persistence;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import javax.inject.Inject;
import org.skife.jdbi.v2.DBI;
import org.skife.jdbi.v2.Handle;
import org.skife.jdbi.v2.Query;
import com.hpcloud.mon.common.model.alarm.AggregateFunction; import com.hpcloud.mon.common.model.alarm.AggregateFunction;
import com.hpcloud.mon.common.model.alarm.AlarmOperator; import com.hpcloud.mon.common.model.alarm.AlarmOperator;
import com.hpcloud.mon.common.model.alarm.AlarmSubExpression; import com.hpcloud.mon.common.model.alarm.AlarmSubExpression;
@ -35,6 +26,16 @@ import com.hpcloud.mon.domain.model.SubAlarm;
import com.hpcloud.mon.domain.service.SubAlarmDAO; import com.hpcloud.mon.domain.service.SubAlarmDAO;
import com.hpcloud.persistence.SqlStatements; import com.hpcloud.persistence.SqlStatements;
import org.skife.jdbi.v2.DBI;
import org.skife.jdbi.v2.Handle;
import org.skife.jdbi.v2.Query;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import javax.inject.Inject;
/** /**
* SubAlarm DAO implementation. * SubAlarm DAO implementation.
*/ */
@ -44,12 +45,14 @@ public class SubAlarmDAOImpl implements SubAlarmDAO {
* table, grouping by the dimension id and counting them to ensure that the number of matched * table, grouping by the dimension id and counting them to ensure that the number of matched
* dimensions equals the number of actual dimensions in the table for the subscription. * dimensions equals the number of actual dimensions in the table for the subscription.
*/ */
private static final String FIND_BY_METRIC_DEF_SQL = "select sa.* from sub_alarm sa, alarm a, sub_alarm_dimension d " private static final String FIND_BY_METRIC_DEF_SQL =
+ "join (%s) v on d.dimension_name = v.dimension_name and d.value = v.value " "select sa.* from sub_alarm sa, alarm a, sub_alarm_dimension d "
+ "where sa.id = d.sub_alarm_id and sa.metric_name = :metric_name and a.tenant_id = :tenant_id and a.id = sa.alarm_id and a.deleted_at is null " + "join (%s) v on d.dimension_name = v.dimension_name and d.value = v.value "
+ "group by d.sub_alarm_id having count(d.sub_alarm_id) = %s"; + "where sa.id = d.sub_alarm_id and sa.metric_name = :metric_name and a.tenant_id = :tenant_id and a.id = sa.alarm_id and a.deleted_at is null "
private static final String FIND_BY_METRIC_DEF_NO_DIMS_SQL = "select sa.* from sub_alarm sa, alarm a where sa.metric_name = :metric_name " + "group by d.sub_alarm_id having count(d.sub_alarm_id) = %s";
+ "and a.tenant_id = :tenant_id and a.id = sa.alarm_id and a.deleted_at is null and (select count(*) from sub_alarm_dimension where sub_alarm_id = sa.id) = 0"; private static final String FIND_BY_METRIC_DEF_NO_DIMS_SQL =
"select sa.* from sub_alarm sa, alarm a where sa.metric_name = :metric_name "
+ "and a.tenant_id = :tenant_id and a.id = sa.alarm_id and a.deleted_at is null and (select count(*) from sub_alarm_dimension where sub_alarm_id = sa.id) = 0";
private final DBI db; private final DBI db;
@ -65,17 +68,20 @@ public class SubAlarmDAOImpl implements SubAlarmDAO {
try { try {
final MetricDefinition metricDefinition = metricDefinitionTenantId.metricDefinition; final MetricDefinition metricDefinition = metricDefinitionTenantId.metricDefinition;
final String sql; final String sql;
if (metricDefinition.dimensions == null || metricDefinition.dimensions.isEmpty()) if (metricDefinition.dimensions == null || metricDefinition.dimensions.isEmpty()) {
sql = FIND_BY_METRIC_DEF_NO_DIMS_SQL; sql = FIND_BY_METRIC_DEF_NO_DIMS_SQL;
else { } else {
String unionAllStatement = SqlStatements.unionAllStatementFor(metricDefinition.dimensions, String unionAllStatement =
"dimension_name", "value"); SqlStatements.unionAllStatementFor(metricDefinition.dimensions, "dimension_name",
sql = String.format(FIND_BY_METRIC_DEF_SQL, unionAllStatement, "value");
metricDefinition.dimensions.size()); sql =
String.format(FIND_BY_METRIC_DEF_SQL, unionAllStatement,
metricDefinition.dimensions.size());
} }
Query<Map<String, Object>> query = h.createQuery(sql).bind("metric_name", Query<Map<String, Object>> query =
metricDefinition.name).bind("tenant_id", metricDefinitionTenantId.tenantId); h.createQuery(sql).bind("metric_name", metricDefinition.name)
.bind("tenant_id", metricDefinitionTenantId.tenantId);
List<Map<String, Object>> rows = query.list(); List<Map<String, Object>> rows = query.list();
List<SubAlarm> subAlarms = new ArrayList<SubAlarm>(rows.size()); List<SubAlarm> subAlarms = new ArrayList<SubAlarm>(rows.size());
@ -83,9 +89,10 @@ public class SubAlarmDAOImpl implements SubAlarmDAO {
String subAlarmId = (String) row.get("id"); String subAlarmId = (String) row.get("id");
AggregateFunction function = AggregateFunction.valueOf((String) row.get("function")); AggregateFunction function = AggregateFunction.valueOf((String) row.get("function"));
AlarmOperator operator = AlarmOperator.valueOf((String) row.get("operator")); AlarmOperator operator = AlarmOperator.valueOf((String) row.get("operator"));
AlarmSubExpression subExpression = new AlarmSubExpression(function, metricDefinition, AlarmSubExpression subExpression =
operator, (Double) row.get("threshold"), (Integer) row.get("period"), new AlarmSubExpression(function, metricDefinition, operator,
(Integer) row.get("periods")); (Double) row.get("threshold"), (Integer) row.get("period"),
(Integer) row.get("periods"));
SubAlarm subAlarm = new SubAlarm(subAlarmId, (String) row.get("alarm_id"), subExpression); SubAlarm subAlarm = new SubAlarm(subAlarmId, (String) row.get("alarm_id"), subExpression);
subAlarms.add(subAlarm); subAlarms.add(subAlarm);

View File

@ -14,11 +14,12 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package com.hpcloud.mon.infrastructure.thresholding; package com.hpcloud.mon.infrastructure.thresholding;
public interface AlarmEventForwarder { public interface AlarmEventForwarder {
void send(String alertExchange, String alertRoutingKey, String json); void send(String alertExchange, String alertRoutingKey, String json);
void close(); void close();
} }

View File

@ -14,20 +14,9 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package com.hpcloud.mon.infrastructure.thresholding; package com.hpcloud.mon.infrastructure.thresholding;
import java.util.HashMap;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import backtype.storm.task.OutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.topology.OutputFieldsDeclarer;
import backtype.storm.topology.base.BaseRichBolt;
import backtype.storm.tuple.Tuple;
import com.hpcloud.configuration.KafkaProducerConfiguration; import com.hpcloud.configuration.KafkaProducerConfiguration;
import com.hpcloud.mon.ThresholdingConfiguration; import com.hpcloud.mon.ThresholdingConfiguration;
import com.hpcloud.mon.common.event.AlarmStateTransitionedEvent; import com.hpcloud.mon.common.event.AlarmStateTransitionedEvent;
@ -43,6 +32,18 @@ import com.hpcloud.streaming.storm.Streams;
import com.hpcloud.util.Injector; import com.hpcloud.util.Injector;
import com.hpcloud.util.Serialization; import com.hpcloud.util.Serialization;
import backtype.storm.task.OutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.topology.OutputFieldsDeclarer;
import backtype.storm.topology.base.BaseRichBolt;
import backtype.storm.tuple.Tuple;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.HashMap;
import java.util.Map;
/** /**
* Determines whether an alarm threshold has been exceeded. * Determines whether an alarm threshold has been exceeded.
* <p/> * <p/>
@ -55,193 +56,198 @@ import com.hpcloud.util.Serialization;
* </ul> * </ul>
*/ */
public class AlarmThresholdingBolt extends BaseRichBolt { public class AlarmThresholdingBolt extends BaseRichBolt {
private static final long serialVersionUID = -4126465124017857754L; private static final long serialVersionUID = -4126465124017857754L;
private transient Logger LOG; private transient Logger logger;
private DataSourceFactory dbConfig; private DataSourceFactory dbConfig;
private KafkaProducerConfiguration producerConfiguration; private KafkaProducerConfiguration producerConfiguration;
final Map<String, Alarm> alarms = new HashMap<String, Alarm>(); final Map<String, Alarm> alarms = new HashMap<String, Alarm>();
private String alertExchange; private String alertExchange;
private String alertRoutingKey; private String alertRoutingKey;
private transient AlarmDAO alarmDAO; private transient AlarmDAO alarmDAO;
private transient AlarmEventForwarder alarmEventForwarder; private transient AlarmEventForwarder alarmEventForwarder;
private OutputCollector collector; private OutputCollector collector;
public AlarmThresholdingBolt(DataSourceFactory dbConfig, public AlarmThresholdingBolt(DataSourceFactory dbConfig, KafkaProducerConfiguration producerConfig) {
KafkaProducerConfiguration producerConfig) { this.dbConfig = dbConfig;
this.dbConfig = dbConfig; this.producerConfiguration = producerConfig;
this.producerConfiguration = producerConfig; }
}
public AlarmThresholdingBolt(final AlarmDAO alarmDAO, public AlarmThresholdingBolt(final AlarmDAO alarmDAO,
final AlarmEventForwarder alarmEventForwarder) { final AlarmEventForwarder alarmEventForwarder) {
this.alarmDAO = alarmDAO; this.alarmDAO = alarmDAO;
this.alarmEventForwarder = alarmEventForwarder; this.alarmEventForwarder = alarmEventForwarder;
} }
@Override @Override
public void declareOutputFields(OutputFieldsDeclarer declarer) { public void declareOutputFields(OutputFieldsDeclarer declarer) {}
}
@Override @Override
public void execute(Tuple tuple) { public void execute(Tuple tuple) {
LOG.debug("tuple: {}", tuple); logger.debug("tuple: {}", tuple);
try { try {
if (Streams.DEFAULT_STREAM_ID.equals(tuple.getSourceStreamId())) { if (Streams.DEFAULT_STREAM_ID.equals(tuple.getSourceStreamId())) {
String alarmId = tuple.getString(0); String alarmId = tuple.getString(0);
Alarm alarm = getOrCreateAlarm(alarmId); Alarm alarm = getOrCreateAlarm(alarmId);
if (alarm == null)
return;
SubAlarm subAlarm = (SubAlarm) tuple.getValue(1);
evaluateThreshold(alarm, subAlarm);
} else if (EventProcessingBolt.ALARM_EVENT_STREAM_ID.equals(tuple.getSourceStreamId())) {
String eventType = tuple.getString(0);
String alarmId = tuple.getString(1);
if (EventProcessingBolt.DELETED.equals(eventType))
handleAlarmDeleted(alarmId);
else if (EventProcessingBolt.UPDATED.equals(eventType))
handleAlarmUpdated(alarmId, (AlarmUpdatedEvent) tuple.getValue(2));
}
} catch (Exception e) {
LOG.error("Error processing tuple {}", tuple, e);
} finally {
collector.ack(tuple);
}
}
@Override
@SuppressWarnings("rawtypes")
public void prepare(Map config, TopologyContext context, OutputCollector collector) {
LOG = LoggerFactory.getLogger(Logging.categoryFor(getClass(), context));
LOG.info("Preparing");
this.collector = collector;
alertExchange = (String) config.get(ThresholdingConfiguration.ALERTS_EXCHANGE);
alertRoutingKey = (String) config.get(ThresholdingConfiguration.ALERTS_ROUTING_KEY);
if (alarmDAO == null) {
Injector.registerIfNotBound(AlarmDAO.class, new PersistenceModule(dbConfig));
alarmDAO = Injector.getInstance(AlarmDAO.class);
}
if (alarmEventForwarder == null) {
Injector.registerIfNotBound(AlarmEventForwarder.class, new ProducerModule(this.producerConfiguration));
alarmEventForwarder = Injector.getInstance(AlarmEventForwarder.class);
}
}
void evaluateThreshold(Alarm alarm, SubAlarm subAlarm) {
LOG.debug("Received state change for {}", subAlarm);
subAlarm.setNoState(false);
alarm.updateSubAlarm(subAlarm);
AlarmState initialState = alarm.getState();
// Wait for all sub alarms to have a state before evaluating to prevent flapping on startup
if (allSubAlarmsHaveState(alarm) && alarm.evaluate()) {
changeAlarmState(alarm, initialState, alarm.getStateChangeReason());
}
}
private boolean allSubAlarmsHaveState(final Alarm alarm) {
for (SubAlarm subAlarm : alarm.getSubAlarms()) {
if (subAlarm.isNoState()) {
return false;
}
}
return true;
}
private void changeAlarmState(Alarm alarm, AlarmState initialState,
String stateChangeReason) {
alarmDAO.updateState(alarm.getId(), alarm.getState());
LOG.debug("Alarm {} transitioned from {} to {}", alarm, initialState, alarm.getState());
AlarmStateTransitionedEvent event = new AlarmStateTransitionedEvent(alarm.getTenantId(),
alarm.getId(), alarm.getName(), alarm.getDescription(), initialState, alarm.getState(),
alarm.isActionsEnabled(), stateChangeReason, getTimestamp());
try {
alarmEventForwarder.send(alertExchange, alertRoutingKey, Serialization.toJson(event));
} catch (Exception ignore) {
LOG.debug("Failure sending alarm", ignore);
}
}
protected long getTimestamp() {
return System.currentTimeMillis() / 1000;
}
void handleAlarmDeleted(String alarmId) {
LOG.debug("Received AlarmDeletedEvent for alarm id {}", alarmId);
alarms.remove(alarmId);
}
void handleAlarmUpdated(String alarmId, AlarmUpdatedEvent alarmUpdatedEvent) {
final Alarm oldAlarm = alarms.get(alarmId);
if (oldAlarm == null) {
LOG.debug("Updated Alarm {} not loaded, ignoring");
return;
}
oldAlarm.setName(alarmUpdatedEvent.alarmName);
oldAlarm.setDescription(alarmUpdatedEvent.alarmDescription);
oldAlarm.setExpression(alarmUpdatedEvent.alarmExpression);
oldAlarm.setState(alarmUpdatedEvent.alarmState);
oldAlarm.setActionsEnabled(alarmUpdatedEvent.alarmActionsEnabled);
// Now handle the SubAlarms
// First remove the deleted SubAlarms so we don't have to consider them later
for (Map.Entry<String, AlarmSubExpression> entry : alarmUpdatedEvent.oldAlarmSubExpressions.entrySet()) {
LOG.debug("Removing deleted SubAlarm {}", entry.getValue());
if (!oldAlarm.removeSubAlarmById(entry.getKey()))
LOG.error("Did not find removed SubAlarm {}", entry.getValue());
}
// Reuse what we can from the changed SubAlarms
for (Map.Entry<String, AlarmSubExpression> entry : alarmUpdatedEvent.changedSubExpressions.entrySet()) {
final SubAlarm oldSubAlarm = oldAlarm.getSubAlarm(entry.getKey());
if (oldSubAlarm == null) {
LOG.error("Did not find changed SubAlarm {}", entry.getValue());
continue;
}
final SubAlarm newSubAlarm = new SubAlarm(entry.getKey(), oldAlarm.getId(), entry.getValue());
newSubAlarm.setState(oldSubAlarm.getState());
if (!oldSubAlarm.isCompatible(newSubAlarm)) {
newSubAlarm.setNoState(true);
}
LOG.debug("Changing SubAlarm from {} to {}", oldSubAlarm, newSubAlarm);
oldAlarm.updateSubAlarm(newSubAlarm);
}
// Add the new SubAlarms
for (Map.Entry<String, AlarmSubExpression> entry : alarmUpdatedEvent.newAlarmSubExpressions.entrySet()) {
final SubAlarm newSubAlarm = new SubAlarm(entry.getKey(), oldAlarm.getId(), entry.getValue());
newSubAlarm.setNoState(true);
LOG.debug("Adding SubAlarm {}", newSubAlarm);
oldAlarm.updateSubAlarm(newSubAlarm);
}
alarms.put(alarmId, oldAlarm);
}
String buildStateChangeReason() {
return null;
}
private Alarm getOrCreateAlarm(String alarmId) {
Alarm alarm = alarms.get(alarmId);
if (alarm == null) { if (alarm == null) {
alarm = alarmDAO.findById(alarmId); return;
if (alarm == null)
LOG.error("Failed to locate alarm for id {}", alarmId);
else {
for (final SubAlarm subAlarm : alarm.getSubAlarms()) {
subAlarm.setNoState(true);
}
alarms.put(alarmId, alarm);
}
} }
return alarm; SubAlarm subAlarm = (SubAlarm) tuple.getValue(1);
evaluateThreshold(alarm, subAlarm);
} else if (EventProcessingBolt.ALARM_EVENT_STREAM_ID.equals(tuple.getSourceStreamId())) {
String eventType = tuple.getString(0);
String alarmId = tuple.getString(1);
if (EventProcessingBolt.DELETED.equals(eventType)) {
handleAlarmDeleted(alarmId);
} else if (EventProcessingBolt.UPDATED.equals(eventType)) {
handleAlarmUpdated(alarmId, (AlarmUpdatedEvent) tuple.getValue(2));
}
}
} catch (Exception e) {
logger.error("Error processing tuple {}", tuple, e);
} finally {
collector.ack(tuple);
} }
}
@Override
@SuppressWarnings("rawtypes")
public void prepare(Map config, TopologyContext context, OutputCollector collector) {
logger = LoggerFactory.getLogger(Logging.categoryFor(getClass(), context));
logger.info("Preparing");
this.collector = collector;
alertExchange = (String) config.get(ThresholdingConfiguration.ALERTS_EXCHANGE);
alertRoutingKey = (String) config.get(ThresholdingConfiguration.ALERTS_ROUTING_KEY);
if (alarmDAO == null) {
Injector.registerIfNotBound(AlarmDAO.class, new PersistenceModule(dbConfig));
alarmDAO = Injector.getInstance(AlarmDAO.class);
}
if (alarmEventForwarder == null) {
Injector.registerIfNotBound(AlarmEventForwarder.class, new ProducerModule(
this.producerConfiguration));
alarmEventForwarder = Injector.getInstance(AlarmEventForwarder.class);
}
}
void evaluateThreshold(Alarm alarm, SubAlarm subAlarm) {
logger.debug("Received state change for {}", subAlarm);
subAlarm.setNoState(false);
alarm.updateSubAlarm(subAlarm);
AlarmState initialState = alarm.getState();
// Wait for all sub alarms to have a state before evaluating to prevent flapping on startup
if (allSubAlarmsHaveState(alarm) && alarm.evaluate()) {
changeAlarmState(alarm, initialState, alarm.getStateChangeReason());
}
}
private boolean allSubAlarmsHaveState(final Alarm alarm) {
for (SubAlarm subAlarm : alarm.getSubAlarms()) {
if (subAlarm.isNoState()) {
return false;
}
}
return true;
}
private void changeAlarmState(Alarm alarm, AlarmState initialState, String stateChangeReason) {
alarmDAO.updateState(alarm.getId(), alarm.getState());
logger.debug("Alarm {} transitioned from {} to {}", alarm, initialState, alarm.getState());
AlarmStateTransitionedEvent event =
new AlarmStateTransitionedEvent(alarm.getTenantId(), alarm.getId(), alarm.getName(),
alarm.getDescription(), initialState, alarm.getState(), alarm.isActionsEnabled(),
stateChangeReason, getTimestamp());
try {
alarmEventForwarder.send(alertExchange, alertRoutingKey, Serialization.toJson(event));
} catch (Exception ignore) {
logger.debug("Failure sending alarm", ignore);
}
}
protected long getTimestamp() {
return System.currentTimeMillis() / 1000;
}
void handleAlarmDeleted(String alarmId) {
logger.debug("Received AlarmDeletedEvent for alarm id {}", alarmId);
alarms.remove(alarmId);
}
void handleAlarmUpdated(String alarmId, AlarmUpdatedEvent alarmUpdatedEvent) {
final Alarm oldAlarm = alarms.get(alarmId);
if (oldAlarm == null) {
logger.debug("Updated Alarm {} not loaded, ignoring");
return;
}
oldAlarm.setName(alarmUpdatedEvent.alarmName);
oldAlarm.setDescription(alarmUpdatedEvent.alarmDescription);
oldAlarm.setExpression(alarmUpdatedEvent.alarmExpression);
oldAlarm.setState(alarmUpdatedEvent.alarmState);
oldAlarm.setActionsEnabled(alarmUpdatedEvent.alarmActionsEnabled);
// Now handle the SubAlarms
// First remove the deleted SubAlarms so we don't have to consider them later
for (Map.Entry<String, AlarmSubExpression> entry : alarmUpdatedEvent.oldAlarmSubExpressions
.entrySet()) {
logger.debug("Removing deleted SubAlarm {}", entry.getValue());
if (!oldAlarm.removeSubAlarmById(entry.getKey())) {
logger.error("Did not find removed SubAlarm {}", entry.getValue());
}
}
// Reuse what we can from the changed SubAlarms
for (Map.Entry<String, AlarmSubExpression> entry : alarmUpdatedEvent.changedSubExpressions
.entrySet()) {
final SubAlarm oldSubAlarm = oldAlarm.getSubAlarm(entry.getKey());
if (oldSubAlarm == null) {
logger.error("Did not find changed SubAlarm {}", entry.getValue());
continue;
}
final SubAlarm newSubAlarm = new SubAlarm(entry.getKey(), oldAlarm.getId(), entry.getValue());
newSubAlarm.setState(oldSubAlarm.getState());
if (!oldSubAlarm.isCompatible(newSubAlarm)) {
newSubAlarm.setNoState(true);
}
logger.debug("Changing SubAlarm from {} to {}", oldSubAlarm, newSubAlarm);
oldAlarm.updateSubAlarm(newSubAlarm);
}
// Add the new SubAlarms
for (Map.Entry<String, AlarmSubExpression> entry : alarmUpdatedEvent.newAlarmSubExpressions
.entrySet()) {
final SubAlarm newSubAlarm = new SubAlarm(entry.getKey(), oldAlarm.getId(), entry.getValue());
newSubAlarm.setNoState(true);
logger.debug("Adding SubAlarm {}", newSubAlarm);
oldAlarm.updateSubAlarm(newSubAlarm);
}
alarms.put(alarmId, oldAlarm);
}
String buildStateChangeReason() {
return null;
}
private Alarm getOrCreateAlarm(String alarmId) {
Alarm alarm = alarms.get(alarmId);
if (alarm == null) {
alarm = alarmDAO.findById(alarmId);
if (alarm == null) {
logger.error("Failed to locate alarm for id {}", alarmId);
} else {
for (final SubAlarm subAlarm : alarm.getSubAlarms()) {
subAlarm.setNoState(true);
}
alarms.put(alarmId, alarm);
}
}
return alarm;
}
} }

View File

@ -14,107 +14,108 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package com.hpcloud.mon.infrastructure.thresholding; package com.hpcloud.mon.infrastructure.thresholding;
import java.io.Serializable; import java.io.Serializable;
import java.util.Properties; import java.util.Properties;
/** /**
* This class replaces io.dropwizard.db.DataSourceFactory which currently can't be used * This class replaces io.dropwizard.db.DataSourceFactory which currently can't be used with Storm
* with Storm because it is not marked Serializable. This class could be deleted and replaced * because it is not marked Serializable. This class could be deleted and replaced by that class
* by that class when and if io.dropwizard.db.DataSourceFactory is marked Serializable. * when and if io.dropwizard.db.DataSourceFactory is marked Serializable.
*/ */
public class DataSourceFactory implements Serializable { public class DataSourceFactory implements Serializable {
private static final long serialVersionUID = -1903552028062110222L; private static final long serialVersionUID = -1903552028062110222L;
private String user; private String user;
private String password; private String password;
private String url; private String url;
private String driverClass; private String driverClass;
private Properties properties; private Properties properties;
private String maxWaitForConnection; private String maxWaitForConnection;
private String validationQuery; private String validationQuery;
private String minSize; private String minSize;
private String maxSize; private String maxSize;
public String getUser() { public String getUser() {
return user; return user;
} }
public void setUser(String user) { public void setUser(String user) {
this.user = user; this.user = user;
} }
public String getPassword() { public String getPassword() {
return password; return password;
} }
public void setPassword(String password) { public void setPassword(String password) {
this.password = password; this.password = password;
} }
public String getUrl() { public String getUrl() {
return url; return url;
} }
public void setUrl(String url) { public void setUrl(String url) {
this.url = url; this.url = url;
} }
public String getDriverClass() { public String getDriverClass() {
return driverClass; return driverClass;
} }
public void setDriverClass(String driverClass) { public void setDriverClass(String driverClass) {
this.driverClass = driverClass; this.driverClass = driverClass;
} }
public Properties getProperties() { public Properties getProperties() {
return properties; return properties;
} }
public void setProperties(Properties properties) { public void setProperties(Properties properties) {
this.properties = properties; this.properties = properties;
} }
public String getMaxWaitForConnection() { public String getMaxWaitForConnection() {
return maxWaitForConnection; return maxWaitForConnection;
} }
public void setMaxWaitForConnection(String maxWaitForConnection) { public void setMaxWaitForConnection(String maxWaitForConnection) {
this.maxWaitForConnection = maxWaitForConnection; this.maxWaitForConnection = maxWaitForConnection;
} }
public String getValidationQuery() { public String getValidationQuery() {
return validationQuery; return validationQuery;
} }
public void setValidationQuery(String validationQuery) { public void setValidationQuery(String validationQuery) {
this.validationQuery = validationQuery; this.validationQuery = validationQuery;
} }
public String getMinSize() { public String getMinSize() {
return minSize; return minSize;
} }
public void setMinSize(String minSize) { public void setMinSize(String minSize) {
this.minSize = minSize; this.minSize = minSize;
} }
public String getMaxSize() { public String getMaxSize() {
return maxSize; return maxSize;
} }
public void setMaxSize(String maxSize) { public void setMaxSize(String maxSize) {
this.maxSize = maxSize; this.maxSize = maxSize;
} }
} }

View File

@ -14,21 +14,9 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package com.hpcloud.mon.infrastructure.thresholding; package com.hpcloud.mon.infrastructure.thresholding;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import backtype.storm.task.OutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.topology.OutputFieldsDeclarer;
import backtype.storm.topology.base.BaseRichBolt;
import backtype.storm.tuple.Fields;
import backtype.storm.tuple.Tuple;
import backtype.storm.tuple.Values;
import com.hpcloud.mon.common.event.AlarmCreatedEvent; import com.hpcloud.mon.common.event.AlarmCreatedEvent;
import com.hpcloud.mon.common.event.AlarmDeletedEvent; import com.hpcloud.mon.common.event.AlarmDeletedEvent;
import com.hpcloud.mon.common.event.AlarmUpdatedEvent; import com.hpcloud.mon.common.event.AlarmUpdatedEvent;
@ -38,9 +26,22 @@ import com.hpcloud.mon.domain.model.MetricDefinitionAndTenantId;
import com.hpcloud.mon.domain.model.SubAlarm; import com.hpcloud.mon.domain.model.SubAlarm;
import com.hpcloud.streaming.storm.Logging; import com.hpcloud.streaming.storm.Logging;
import backtype.storm.task.OutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.topology.OutputFieldsDeclarer;
import backtype.storm.topology.base.BaseRichBolt;
import backtype.storm.tuple.Fields;
import backtype.storm.tuple.Tuple;
import backtype.storm.tuple.Values;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Map;
/** /**
* Processes events by emitting tuples related to the event. * Processes events by emitting tuples related to the event.
* *
* <ul> * <ul>
* <li>Input: Object event * <li>Input: Object event
* <li>Output alarm-events: String eventType, String alarmId * <li>Output alarm-events: String eventType, String alarmId
@ -60,7 +61,8 @@ public class EventProcessingBolt extends BaseRichBolt {
/** Stream for metric and sub-alarm specific events. */ /** Stream for metric and sub-alarm specific events. */
public static final String METRIC_SUB_ALARM_EVENT_STREAM_ID = "metric-sub-alarm-events"; public static final String METRIC_SUB_ALARM_EVENT_STREAM_ID = "metric-sub-alarm-events";
public static final String[] ALARM_EVENT_STREAM_FIELDS = new String[] {"eventType", "alarmId", "alarm"}; public static final String[] ALARM_EVENT_STREAM_FIELDS = new String[] {"eventType", "alarmId",
"alarm"};
public static final String[] METRIC_ALARM_EVENT_STREAM_FIELDS = new String[] {"eventType", public static final String[] METRIC_ALARM_EVENT_STREAM_FIELDS = new String[] {"eventType",
"metricDefinitionAndTenantId", "subAlarmId"}; "metricDefinitionAndTenantId", "subAlarmId"};
public static final String[] METRIC_SUB_ALARM_EVENT_STREAM_FIELDS = new String[] {"eventType", public static final String[] METRIC_SUB_ALARM_EVENT_STREAM_FIELDS = new String[] {"eventType",
@ -71,29 +73,32 @@ public class EventProcessingBolt extends BaseRichBolt {
public static final String UPDATED = "updated"; public static final String UPDATED = "updated";
public static final String RESEND = "resend"; public static final String RESEND = "resend";
private transient Logger LOG; private transient Logger logger;
private OutputCollector collector; private OutputCollector collector;
@Override @Override
public void declareOutputFields(OutputFieldsDeclarer declarer) { public void declareOutputFields(OutputFieldsDeclarer declarer) {
declarer.declareStream(ALARM_EVENT_STREAM_ID, new Fields(ALARM_EVENT_STREAM_FIELDS)); declarer.declareStream(ALARM_EVENT_STREAM_ID, new Fields(ALARM_EVENT_STREAM_FIELDS));
declarer.declareStream(METRIC_ALARM_EVENT_STREAM_ID, new Fields(METRIC_ALARM_EVENT_STREAM_FIELDS)); declarer.declareStream(METRIC_ALARM_EVENT_STREAM_ID, new Fields(
declarer.declareStream(METRIC_SUB_ALARM_EVENT_STREAM_ID, new Fields(METRIC_SUB_ALARM_EVENT_STREAM_FIELDS)); METRIC_ALARM_EVENT_STREAM_FIELDS));
declarer.declareStream(METRIC_SUB_ALARM_EVENT_STREAM_ID, new Fields(
METRIC_SUB_ALARM_EVENT_STREAM_FIELDS));
} }
@Override @Override
public void execute(Tuple tuple) { public void execute(Tuple tuple) {
try { try {
Object event = tuple.getValue(0); Object event = tuple.getValue(0);
LOG.trace("Received event for processing {}", event); logger.trace("Received event for processing {}", event);
if (event instanceof AlarmCreatedEvent) if (event instanceof AlarmCreatedEvent) {
handle((AlarmCreatedEvent) event); handle((AlarmCreatedEvent) event);
else if (event instanceof AlarmDeletedEvent) } else if (event instanceof AlarmDeletedEvent) {
handle((AlarmDeletedEvent) event); handle((AlarmDeletedEvent) event);
else if (event instanceof AlarmUpdatedEvent) } else if (event instanceof AlarmUpdatedEvent) {
handle((AlarmUpdatedEvent) event); handle((AlarmUpdatedEvent) event);
}
} catch (Exception e) { } catch (Exception e) {
LOG.error("Error processing tuple {}", tuple, e); logger.error("Error processing tuple {}", tuple, e);
} finally { } finally {
collector.ack(tuple); collector.ack(tuple);
} }
@ -102,34 +107,40 @@ public class EventProcessingBolt extends BaseRichBolt {
@Override @Override
@SuppressWarnings("rawtypes") @SuppressWarnings("rawtypes")
public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) { public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
LOG = LoggerFactory.getLogger(Logging.categoryFor(getClass(), context)); logger = LoggerFactory.getLogger(Logging.categoryFor(getClass(), context));
LOG.info("Preparing"); logger.info("Preparing");
this.collector = collector; this.collector = collector;
} }
void handle(AlarmCreatedEvent event) { void handle(AlarmCreatedEvent event) {
for (Map.Entry<String, AlarmSubExpression> subExpressionEntry : event.alarmSubExpressions.entrySet()) { for (Map.Entry<String, AlarmSubExpression> subExpressionEntry : event.alarmSubExpressions
sendAddSubAlarm(event.alarmId, subExpressionEntry.getKey(), event.tenantId, subExpressionEntry.getValue()); .entrySet()) {
sendAddSubAlarm(event.alarmId, subExpressionEntry.getKey(), event.tenantId,
subExpressionEntry.getValue());
} }
} }
private void sendAddSubAlarm(String alarmId, String subAlarmId, String tenantId, AlarmSubExpression alarmSubExpression) { private void sendAddSubAlarm(String alarmId, String subAlarmId, String tenantId,
sendSubAlarm(CREATED, alarmId, subAlarmId, tenantId, alarmSubExpression); AlarmSubExpression alarmSubExpression) {
sendSubAlarm(CREATED, alarmId, subAlarmId, tenantId, alarmSubExpression);
} }
private void sendUpdateSubAlarm(String alarmId, String subAlarmId, String tenantId, AlarmSubExpression alarmSubExpression) { private void sendUpdateSubAlarm(String alarmId, String subAlarmId, String tenantId,
sendSubAlarm(UPDATED, alarmId, subAlarmId, tenantId, alarmSubExpression); AlarmSubExpression alarmSubExpression) {
sendSubAlarm(UPDATED, alarmId, subAlarmId, tenantId, alarmSubExpression);
} }
private void sendResendSubAlarm(String alarmId, String subAlarmId, String tenantId, AlarmSubExpression alarmSubExpression) { private void sendResendSubAlarm(String alarmId, String subAlarmId, String tenantId,
sendSubAlarm(RESEND, alarmId, subAlarmId, tenantId, alarmSubExpression); AlarmSubExpression alarmSubExpression) {
sendSubAlarm(RESEND, alarmId, subAlarmId, tenantId, alarmSubExpression);
} }
private void sendSubAlarm(String eventType, String alarmId, String subAlarmId, String tenantId, private void sendSubAlarm(String eventType, String alarmId, String subAlarmId, String tenantId,
AlarmSubExpression alarmSubExpression) { AlarmSubExpression alarmSubExpression) {
MetricDefinition metricDef = alarmSubExpression.getMetricDefinition(); MetricDefinition metricDef = alarmSubExpression.getMetricDefinition();
collector.emit(METRIC_SUB_ALARM_EVENT_STREAM_ID, new Values(eventType, new MetricDefinitionAndTenantId(metricDef, tenantId), collector.emit(METRIC_SUB_ALARM_EVENT_STREAM_ID, new Values(eventType,
new SubAlarm(subAlarmId, alarmId, alarmSubExpression))); new MetricDefinitionAndTenantId(metricDef, tenantId), new SubAlarm(subAlarmId, alarmId,
alarmSubExpression)));
} }
void handle(AlarmDeletedEvent event) { void handle(AlarmDeletedEvent event) {
@ -142,25 +153,24 @@ public class EventProcessingBolt extends BaseRichBolt {
private void sendDeletedSubAlarm(String subAlarmId, String tenantId, MetricDefinition metricDef) { private void sendDeletedSubAlarm(String subAlarmId, String tenantId, MetricDefinition metricDef) {
collector.emit(METRIC_ALARM_EVENT_STREAM_ID, new Values(DELETED, collector.emit(METRIC_ALARM_EVENT_STREAM_ID, new Values(DELETED,
new MetricDefinitionAndTenantId(metricDef, tenantId), subAlarmId)); new MetricDefinitionAndTenantId(metricDef, tenantId), subAlarmId));
} }
void handle(AlarmUpdatedEvent event) { void handle(AlarmUpdatedEvent event) {
if ((!event.oldAlarmState.equals(event.alarmState) || if ((!event.oldAlarmState.equals(event.alarmState) || !event.oldAlarmSubExpressions.isEmpty())
!event.oldAlarmSubExpressions.isEmpty()) && event.changedSubExpressions.isEmpty() && && event.changedSubExpressions.isEmpty() && event.newAlarmSubExpressions.isEmpty()) {
event.newAlarmSubExpressions.isEmpty()) {
for (Map.Entry<String, AlarmSubExpression> entry : event.unchangedSubExpressions.entrySet()) { for (Map.Entry<String, AlarmSubExpression> entry : event.unchangedSubExpressions.entrySet()) {
sendResendSubAlarm(event.alarmId, entry.getKey(), event.tenantId, entry.getValue()); sendResendSubAlarm(event.alarmId, entry.getKey(), event.tenantId, entry.getValue());
} }
} }
for (Map.Entry<String, AlarmSubExpression> entry : event.oldAlarmSubExpressions.entrySet()) { for (Map.Entry<String, AlarmSubExpression> entry : event.oldAlarmSubExpressions.entrySet()) {
sendDeletedSubAlarm(entry.getKey(), event.tenantId, entry.getValue().getMetricDefinition()); sendDeletedSubAlarm(entry.getKey(), event.tenantId, entry.getValue().getMetricDefinition());
} }
for (Map.Entry<String, AlarmSubExpression> entry : event.changedSubExpressions.entrySet()) { for (Map.Entry<String, AlarmSubExpression> entry : event.changedSubExpressions.entrySet()) {
sendUpdateSubAlarm(event.alarmId, entry.getKey(), event.tenantId, entry.getValue()); sendUpdateSubAlarm(event.alarmId, entry.getKey(), event.tenantId, entry.getValue());
} }
for (Map.Entry<String, AlarmSubExpression> entry : event.newAlarmSubExpressions.entrySet()) { for (Map.Entry<String, AlarmSubExpression> entry : event.newAlarmSubExpressions.entrySet()) {
sendAddSubAlarm(event.alarmId, entry.getKey(), event.tenantId, entry.getValue()); sendAddSubAlarm(event.alarmId, entry.getKey(), event.tenantId, entry.getValue());
} }
collector.emit(ALARM_EVENT_STREAM_ID, new Values(UPDATED, event.alarmId, event)); collector.emit(ALARM_EVENT_STREAM_ID, new Values(UPDATED, event.alarmId, event));
} }

View File

@ -14,51 +14,52 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package com.hpcloud.mon.infrastructure.thresholding; package com.hpcloud.mon.infrastructure.thresholding;
import java.io.Serializable; import com.hpcloud.mon.EventSpoutConfig;
import java.util.List; import com.hpcloud.mon.infrastructure.thresholding.deserializer.EventDeserializer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import backtype.storm.spout.SpoutOutputCollector; import backtype.storm.spout.SpoutOutputCollector;
import backtype.storm.topology.OutputFieldsDeclarer; import backtype.storm.topology.OutputFieldsDeclarer;
import backtype.storm.tuple.Values; import backtype.storm.tuple.Values;
import com.hpcloud.mon.EventSpoutConfig; import org.slf4j.Logger;
import com.hpcloud.mon.infrastructure.thresholding.deserializer.EventDeserializer; import org.slf4j.LoggerFactory;
import java.io.Serializable;
import java.util.List;
public class EventSpout extends KafkaSpout { public class EventSpout extends KafkaSpout {
private static final Logger LOG = LoggerFactory.getLogger(EventSpout.class); private static final Logger logger = LoggerFactory.getLogger(EventSpout.class);
private static final long serialVersionUID = 8457340455857276878L; private static final long serialVersionUID = 8457340455857276878L;
private final EventDeserializer deserializer; private final EventDeserializer deserializer;
public EventSpout(EventSpoutConfig configuration, EventDeserializer deserializer) { public EventSpout(EventSpoutConfig configuration, EventDeserializer deserializer) {
super(configuration); super(configuration);
this.deserializer = deserializer; this.deserializer = deserializer;
LOG.info("EventSpout created"); logger.info("EventSpout created");
} }
@Override @Override
public void declareOutputFields(OutputFieldsDeclarer declarer) { public void declareOutputFields(OutputFieldsDeclarer declarer) {
declarer.declare(deserializer.getOutputFields()); declarer.declare(deserializer.getOutputFields());
} }
@Override @Override
protected void processMessage(byte[] message, SpoutOutputCollector collector) { protected void processMessage(byte[] message, SpoutOutputCollector collector) {
List<List<?>> events = deserializer.deserialize(message); List<List<?>> events = deserializer.deserialize(message);
if (events != null) { if (events != null) {
for (final List<?> event : events) { for (final List<?> event : events) {
final Object eventToSend = event.get(0); final Object eventToSend = event.get(0);
if (!(eventToSend instanceof Serializable)) { if (!(eventToSend instanceof Serializable)) {
LOG.error("Class {} is not Serializable: {}", eventToSend.getClass(), eventToSend); logger.error("Class {} is not Serializable: {}", eventToSend.getClass(), eventToSend);
continue; continue;
}
collector.emit(new Values(eventToSend));
}
} }
collector.emit(new Values(eventToSend));
}
} }
}
} }

View File

@ -14,13 +14,16 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package com.hpcloud.mon.infrastructure.thresholding; package com.hpcloud.mon.infrastructure.thresholding;
import com.hpcloud.configuration.KafkaProducerConfiguration; import com.hpcloud.configuration.KafkaProducerConfiguration;
import com.hpcloud.configuration.KafkaProducerProperties; import com.hpcloud.configuration.KafkaProducerProperties;
import kafka.javaapi.producer.Producer; import kafka.javaapi.producer.Producer;
import kafka.producer.KeyedMessage; import kafka.producer.KeyedMessage;
import kafka.producer.ProducerConfig; import kafka.producer.ProducerConfig;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -28,31 +31,30 @@ import java.util.Properties;
public class KafkaAlarmEventForwarder implements AlarmEventForwarder { public class KafkaAlarmEventForwarder implements AlarmEventForwarder {
private static final Logger LOG = LoggerFactory.getLogger(KafkaAlarmEventForwarder.class); private static final Logger logger = LoggerFactory.getLogger(KafkaAlarmEventForwarder.class);
private final Producer<String, String> producer; private final Producer<String, String> producer;
private final String topic; private final String topic;
public KafkaAlarmEventForwarder(KafkaProducerConfiguration kafkaConfig) { public KafkaAlarmEventForwarder(KafkaProducerConfiguration kafkaConfig) {
this.topic = kafkaConfig.getTopic(); this.topic = kafkaConfig.getTopic();
Properties kafkaProperties = KafkaProducerProperties.createKafkaProperties(kafkaConfig); Properties kafkaProperties = KafkaProducerProperties.createKafkaProperties(kafkaConfig);
ProducerConfig consumerConfig = new ProducerConfig(kafkaProperties); ProducerConfig consumerConfig = new ProducerConfig(kafkaProperties);
producer = new Producer<String, String>(consumerConfig); producer = new Producer<String, String>(consumerConfig);
} }
@Override
public void send(String alertExchange, String alertRoutingKey, String json) {
LOG.debug("sending alertExchange: {}, alertRoutingKey: {}, json: {}", alertExchange,
alertRoutingKey, json);
final KeyedMessage<String, String> message = new KeyedMessage<String, String>(topic, alertRoutingKey, json);
producer.send(message);
}
@Override
public void close() {
producer.close();
}
@Override
public void send(String alertExchange, String alertRoutingKey, String json) {
logger.debug("sending alertExchange: {}, alertRoutingKey: {}, json: {}", alertExchange,
alertRoutingKey, json);
final KeyedMessage<String, String> message =
new KeyedMessage<String, String>(topic, alertRoutingKey, json);
producer.send(message);
}
@Override
public void close() {
producer.close();
}
} }

View File

@ -14,15 +14,16 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package com.hpcloud.mon.infrastructure.thresholding; package com.hpcloud.mon.infrastructure.thresholding;
import com.hpcloud.configuration.KafkaConsumerProperties;
import com.hpcloud.mon.KafkaSpoutConfig;
import backtype.storm.spout.SpoutOutputCollector; import backtype.storm.spout.SpoutOutputCollector;
import backtype.storm.task.TopologyContext; import backtype.storm.task.TopologyContext;
import backtype.storm.topology.base.BaseRichSpout; import backtype.storm.topology.base.BaseRichSpout;
import com.hpcloud.configuration.KafkaConsumerProperties;
import com.hpcloud.mon.KafkaSpoutConfig;
import kafka.consumer.Consumer; import kafka.consumer.Consumer;
import kafka.consumer.ConsumerConfig; import kafka.consumer.ConsumerConfig;
import kafka.consumer.ConsumerIterator; import kafka.consumer.ConsumerIterator;
@ -38,144 +39,146 @@ import java.util.Map;
import java.util.Properties; import java.util.Properties;
public abstract class KafkaSpout extends BaseRichSpout implements Runnable { public abstract class KafkaSpout extends BaseRichSpout implements Runnable {
private static final Logger LOG = LoggerFactory.getLogger(KafkaSpout.class); private static final Logger logger = LoggerFactory.getLogger(KafkaSpout.class);
private static final long serialVersionUID = 744004533863562119L; private static final long serialVersionUID = 744004533863562119L;
private final KafkaSpoutConfig kafkaSpoutConfig; private final KafkaSpoutConfig kafkaSpoutConfig;
private transient ConsumerConnector consumerConnector; private transient ConsumerConnector consumerConnector;
private transient List<KafkaStream<byte[], byte[]>> streams = null; private transient List<KafkaStream<byte[], byte[]>> streams = null;
private SpoutOutputCollector collector; private SpoutOutputCollector collector;
private volatile boolean shouldContinue; private volatile boolean shouldContinue;
private byte[] message; private byte[] message;
private Thread readerThread; private Thread readerThread;
private String spoutName; private String spoutName;
private boolean waiting = false; private boolean waiting = false;
protected KafkaSpout(KafkaSpoutConfig kafkaSpoutConfig) { protected KafkaSpout(KafkaSpoutConfig kafkaSpoutConfig) {
this.kafkaSpoutConfig = kafkaSpoutConfig; this.kafkaSpoutConfig = kafkaSpoutConfig;
}
@Override
public void activate() {
logger.info("Activated");
if (streams == null) {
Map<String, Integer> topicCountMap = new HashMap<>();
topicCountMap.put(kafkaSpoutConfig.kafkaConsumerConfiguration.getTopic(), new Integer(1));
Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap =
consumerConnector.createMessageStreams(topicCountMap);
streams = consumerMap.get(kafkaSpoutConfig.kafkaConsumerConfiguration.getTopic());
} }
}
@Override @Override
public void activate() { public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {
LOG.info("Activated"); logger.info("Opened");
if (streams == null) { this.collector = collector;
Map<String, Integer> topicCountMap = new HashMap<>(); logger.info(" topic = " + kafkaSpoutConfig.kafkaConsumerConfiguration.getTopic());
topicCountMap.put(kafkaSpoutConfig.kafkaConsumerConfiguration.getTopic(), new Integer(1)); this.spoutName = String.format("%s-%d", context.getThisComponentId(), context.getThisTaskId());
Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumerConnector.createMessageStreams(topicCountMap);
streams = consumerMap.get(kafkaSpoutConfig.kafkaConsumerConfiguration.getTopic());
}
}
@Override Properties kafkaProperties =
public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) { KafkaConsumerProperties.createKafkaProperties(kafkaSpoutConfig.kafkaConsumerConfiguration);
LOG.info("Opened"); // Have to use a different consumer.id for each spout so use the storm taskId. Otherwise,
this.collector = collector; // zookeeper complains about a conflicted ephemeral node when there is more than one spout
LOG.info(" topic = " + kafkaSpoutConfig.kafkaConsumerConfiguration.getTopic()); // reading from a topic
this.spoutName = String.format("%s-%d", context.getThisComponentId(), context.getThisTaskId()); kafkaProperties.setProperty("consumer.id", String.valueOf(context.getThisTaskId()));
ConsumerConfig consumerConfig = new ConsumerConfig(kafkaProperties);
this.consumerConnector = Consumer.createJavaConsumerConnector(consumerConfig);
}
Properties kafkaProperties = KafkaConsumerProperties.createKafkaProperties( @Override
kafkaSpoutConfig.kafkaConsumerConfiguration); public synchronized void deactivate() {
// Have to use a different consumer.id for each spout so use the storm taskId. Otherwise, logger.info("deactivated");
// zookeeper complains about a conflicted ephemeral node when there is more than one spout this.consumerConnector.shutdown();
// reading from a topic this.shouldContinue = false;
kafkaProperties.setProperty("consumer.id", String.valueOf(context.getThisTaskId())); // Wake up the reader thread if it is waiting
ConsumerConfig consumerConfig = new ConsumerConfig(kafkaProperties); notify();
this.consumerConnector = Consumer.createJavaConsumerConnector(consumerConfig); }
}
@Override @Override
public synchronized void deactivate() { public void run() {
LOG.info("deactivated"); while (this.shouldContinue) {
this.consumerConnector.shutdown(); final ConsumerIterator<byte[], byte[]> it = streams.get(0).iterator();
this.shouldContinue = false; if (it.hasNext()) {
// Wake up the reader thread if it is waiting final byte[] message = it.next().message();
notify(); synchronized (this) {
} this.message = message;
// Wake up getMessage() if it is waiting
@Override if (this.waiting) {
public void run() {
while (this.shouldContinue) {
final ConsumerIterator<byte[], byte[]> it = streams.get(0).iterator();
if (it.hasNext()) {
final byte[] message = it.next().message();
synchronized (this) {
this.message = message;
// Wake up getMessage() if it is waiting
if (this.waiting)
notify();
while (this.message != null && this.shouldContinue)
try {
wait();
} catch (InterruptedException e) {
LOG.info("Wait interrupted", e);
}
}
}
}
LOG.info("readerThread {} exited", this.readerThread.getName());
this.readerThread = null;
}
@Override
public void nextTuple() {
LOG.debug("nextTuple called");
checkReaderRunning();
final byte[] message = getMessage();
if (message != null) {
LOG.debug("streams iterator has next");
processMessage(message, collector);
}
}
private void checkReaderRunning() {
this.shouldContinue = true;
if (this.readerThread == null) {
final String threadName = String.format("%s reader", this.spoutName);
this.readerThread = new Thread(this, threadName);
this.readerThread.start();
LOG.info("Started Reader Thread {}", this.readerThread.getName());
}
}
/**
* Must only be called from a synchronized method
*
* @return
*/
private byte[] tryToGetMessage() {
final byte[] result = this.message;
if (result != null) {
this.message = null;
notify(); notify();
}
while (this.message != null && this.shouldContinue)
try {
wait();
} catch (InterruptedException e) {
logger.info("Wait interrupted", e);
}
} }
return result; }
} }
logger.info("readerThread {} exited", this.readerThread.getName());
this.readerThread = null;
}
private synchronized byte[] getMessage() { @Override
final byte[] result = tryToGetMessage(); public void nextTuple() {
if (result != null) { logger.debug("nextTuple called");
return result; checkReaderRunning();
} final byte[] message = getMessage();
// Storm docs recommend a short sleep but make the sleep time if (message != null) {
// configurable so we can lessen the load on dev systems logger.debug("streams iterator has next");
this.waiting = true; processMessage(message, collector);
try {
wait(kafkaSpoutConfig.maxWaitTime);
} catch (InterruptedException e) {
LOG.info("Sleep interrupted", e);
}
this.waiting = false;
return tryToGetMessage(); // We might have been woken up because there was a message
} }
}
protected abstract void processMessage(byte[] message, SpoutOutputCollector collector2); private void checkReaderRunning() {
this.shouldContinue = true;
if (this.readerThread == null) {
final String threadName = String.format("%s reader", this.spoutName);
this.readerThread = new Thread(this, threadName);
this.readerThread.start();
logger.info("Started Reader Thread {}", this.readerThread.getName());
}
}
/**
* Must only be called from a synchronized method
*
* @return
*/
private byte[] tryToGetMessage() {
final byte[] result = this.message;
if (result != null) {
this.message = null;
notify();
}
return result;
}
private synchronized byte[] getMessage() {
final byte[] result = tryToGetMessage();
if (result != null) {
return result;
}
// Storm docs recommend a short sleep but make the sleep time
// configurable so we can lessen the load on dev systems
this.waiting = true;
try {
wait(kafkaSpoutConfig.maxWaitTime);
} catch (InterruptedException e) {
logger.info("Sleep interrupted", e);
}
this.waiting = false;
return tryToGetMessage(); // We might have been woken up because there was a message
}
protected abstract void processMessage(byte[] message, SpoutOutputCollector collector2);
} }

View File

@ -14,26 +14,9 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package com.hpcloud.mon.infrastructure.thresholding; package com.hpcloud.mon.infrastructure.thresholding;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import backtype.storm.Config;
import backtype.storm.task.OutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.topology.OutputFieldsDeclarer;
import backtype.storm.topology.base.BaseRichBolt;
import backtype.storm.tuple.Fields;
import backtype.storm.tuple.Tuple;
import backtype.storm.tuple.Values;
import com.hpcloud.mon.common.model.metric.Metric; import com.hpcloud.mon.common.model.metric.Metric;
import com.hpcloud.mon.domain.model.MetricDefinitionAndTenantId; import com.hpcloud.mon.domain.model.MetricDefinitionAndTenantId;
import com.hpcloud.mon.domain.model.SubAlarm; import com.hpcloud.mon.domain.model.SubAlarm;
@ -46,14 +29,32 @@ import com.hpcloud.streaming.storm.Streams;
import com.hpcloud.streaming.storm.Tuples; import com.hpcloud.streaming.storm.Tuples;
import com.hpcloud.util.Injector; import com.hpcloud.util.Injector;
import backtype.storm.Config;
import backtype.storm.task.OutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.topology.OutputFieldsDeclarer;
import backtype.storm.topology.base.BaseRichBolt;
import backtype.storm.tuple.Fields;
import backtype.storm.tuple.Tuple;
import backtype.storm.tuple.Values;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
/** /**
* Aggregates metrics for individual alarms. Receives metric/alarm tuples and tick tuples, and * Aggregates metrics for individual alarms. Receives metric/alarm tuples and tick tuples, and
* outputs alarm information whenever an alarm's state changes. Concerned with alarms that relate to * outputs alarm information whenever an alarm's state changes. Concerned with alarms that relate to
* a specific metric. * a specific metric.
* *
* The TICK_TUPLE_SECONDS_KEY value should be no greater than the smallest possible window width. * The TICK_TUPLE_SECONDS_KEY value should be no greater than the smallest possible window width.
* This ensures that the window slides in time with the expected metrics. * This ensures that the window slides in time with the expected metrics.
* *
* <ul> * <ul>
* <li>Input: MetricDefinition metricDefinition, Metric metric * <li>Input: MetricDefinition metricDefinition, Metric metric
* <li>Input metric-alarm-events: String eventType, MetricDefinition metricDefinition, String * <li>Input metric-alarm-events: String eventType, MetricDefinition metricDefinition, String
@ -66,13 +67,14 @@ import com.hpcloud.util.Injector;
public class MetricAggregationBolt extends BaseRichBolt { public class MetricAggregationBolt extends BaseRichBolt {
private static final long serialVersionUID = 5624314196838090726L; private static final long serialVersionUID = 5624314196838090726L;
public static final String TICK_TUPLE_SECONDS_KEY = "com.hpcloud.mon.aggregation.tick.seconds"; public static final String TICK_TUPLE_SECONDS_KEY = "com.hpcloud.mon.aggregation.tick.seconds";
public static final String[] FIELDS = new String[] { "alarmId", "subAlarm" }; public static final String[] FIELDS = new String[] {"alarmId", "subAlarm"};
public static final String METRIC_AGGREGATION_CONTROL_STREAM = "MetricAggregationControl"; public static final String METRIC_AGGREGATION_CONTROL_STREAM = "MetricAggregationControl";
public static final String[] METRIC_AGGREGATION_CONTROL_FIELDS = new String[] { "directive" }; public static final String[] METRIC_AGGREGATION_CONTROL_FIELDS = new String[] {"directive"};
public static final String METRICS_BEHIND = "MetricsBehind"; public static final String METRICS_BEHIND = "MetricsBehind";
final Map<MetricDefinitionAndTenantId, SubAlarmStatsRepository> subAlarmStatsRepos = new HashMap<>(); final Map<MetricDefinitionAndTenantId, SubAlarmStatsRepository> subAlarmStatsRepos =
private transient Logger LOG; new HashMap<>();
private transient Logger logger;
private DataSourceFactory dbConfig; private DataSourceFactory dbConfig;
private transient SubAlarmDAO subAlarmDAO; private transient SubAlarmDAO subAlarmDAO;
/** Namespaces for which metrics are received sporadically */ /** Namespaces for which metrics are received sporadically */
@ -96,38 +98,43 @@ public class MetricAggregationBolt extends BaseRichBolt {
@Override @Override
public void execute(Tuple tuple) { public void execute(Tuple tuple) {
LOG.debug("tuple: {}", tuple); logger.debug("tuple: {}", tuple);
try { try {
if (Tuples.isTickTuple(tuple)) { if (Tuples.isTickTuple(tuple)) {
evaluateAlarmsAndSlideWindows(); evaluateAlarmsAndSlideWindows();
} else { } else {
if (Streams.DEFAULT_STREAM_ID.equals(tuple.getSourceStreamId())) { if (Streams.DEFAULT_STREAM_ID.equals(tuple.getSourceStreamId())) {
MetricDefinitionAndTenantId metricDefinitionAndTenantId = (MetricDefinitionAndTenantId) tuple.getValue(0); MetricDefinitionAndTenantId metricDefinitionAndTenantId =
(MetricDefinitionAndTenantId) tuple.getValue(0);
Metric metric = (Metric) tuple.getValueByField("metric"); Metric metric = (Metric) tuple.getValueByField("metric");
aggregateValues(metricDefinitionAndTenantId, metric); aggregateValues(metricDefinitionAndTenantId, metric);
} else if (METRIC_AGGREGATION_CONTROL_STREAM.equals(tuple.getSourceStreamId())) { } else if (METRIC_AGGREGATION_CONTROL_STREAM.equals(tuple.getSourceStreamId())) {
processControl(tuple.getString(0)); processControl(tuple.getString(0));
} else { } else {
String eventType = tuple.getString(0); String eventType = tuple.getString(0);
MetricDefinitionAndTenantId metricDefinitionAndTenantId = (MetricDefinitionAndTenantId) tuple.getValue(1); MetricDefinitionAndTenantId metricDefinitionAndTenantId =
(MetricDefinitionAndTenantId) tuple.getValue(1);
if (EventProcessingBolt.METRIC_ALARM_EVENT_STREAM_ID.equals(tuple.getSourceStreamId())) { if (EventProcessingBolt.METRIC_ALARM_EVENT_STREAM_ID.equals(tuple.getSourceStreamId())) {
String subAlarmId = tuple.getString(2); String subAlarmId = tuple.getString(2);
if (EventProcessingBolt.DELETED.equals(eventType)) if (EventProcessingBolt.DELETED.equals(eventType)) {
handleAlarmDeleted(metricDefinitionAndTenantId, subAlarmId); handleAlarmDeleted(metricDefinitionAndTenantId, subAlarmId);
} else if (EventProcessingBolt.METRIC_SUB_ALARM_EVENT_STREAM_ID.equals(tuple.getSourceStreamId())) { }
} else if (EventProcessingBolt.METRIC_SUB_ALARM_EVENT_STREAM_ID.equals(tuple
.getSourceStreamId())) {
SubAlarm subAlarm = (SubAlarm) tuple.getValue(2); SubAlarm subAlarm = (SubAlarm) tuple.getValue(2);
if (EventProcessingBolt.CREATED.equals(eventType)) if (EventProcessingBolt.CREATED.equals(eventType)) {
handleAlarmCreated(metricDefinitionAndTenantId, subAlarm); handleAlarmCreated(metricDefinitionAndTenantId, subAlarm);
else if (EventProcessingBolt.UPDATED.equals(eventType)) } else if (EventProcessingBolt.UPDATED.equals(eventType)) {
handleAlarmUpdated(metricDefinitionAndTenantId, subAlarm); handleAlarmUpdated(metricDefinitionAndTenantId, subAlarm);
else if (EventProcessingBolt.RESEND.equals(eventType)) } else if (EventProcessingBolt.RESEND.equals(eventType)) {
handleAlarmResend(metricDefinitionAndTenantId, subAlarm); handleAlarmResend(metricDefinitionAndTenantId, subAlarm);
}
} }
} }
} }
} catch (Exception e) { } catch (Exception e) {
LOG.error("Error processing tuple {}", tuple, e); logger.error("Error processing tuple {}", tuple, e);
} finally { } finally {
collector.ack(tuple); collector.ack(tuple);
} }
@ -135,11 +142,11 @@ public class MetricAggregationBolt extends BaseRichBolt {
private void processControl(final String directive) { private void processControl(final String directive) {
if (METRICS_BEHIND.equals(directive)) { if (METRICS_BEHIND.equals(directive)) {
LOG.debug("Received {}", directive); logger.debug("Received {}", directive);
this.upToDate = false; this.upToDate = false;
} else {
logger.error("Unknown directive '{}'", directive);
} }
else
LOG.error("Unknown directive '{}'", directive);
} }
@Override @Override
@ -153,8 +160,8 @@ public class MetricAggregationBolt extends BaseRichBolt {
@Override @Override
@SuppressWarnings("rawtypes") @SuppressWarnings("rawtypes")
public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) { public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
LOG = LoggerFactory.getLogger(Logging.categoryFor(getClass(), context)); logger = LoggerFactory.getLogger(Logging.categoryFor(getClass(), context));
LOG.info("Preparing"); logger.info("Preparing");
this.collector = collector; this.collector = collector;
if (subAlarmDAO == null) { if (subAlarmDAO == null) {
@ -167,17 +174,21 @@ public class MetricAggregationBolt extends BaseRichBolt {
* Aggregates values for the {@code metric} that are within the periods defined for the alarm. * Aggregates values for the {@code metric} that are within the periods defined for the alarm.
*/ */
void aggregateValues(MetricDefinitionAndTenantId metricDefinitionAndTenantId, Metric metric) { void aggregateValues(MetricDefinitionAndTenantId metricDefinitionAndTenantId, Metric metric) {
SubAlarmStatsRepository subAlarmStatsRepo = getOrCreateSubAlarmStatsRepo(metricDefinitionAndTenantId); SubAlarmStatsRepository subAlarmStatsRepo =
if (subAlarmStatsRepo == null || metric == null) getOrCreateSubAlarmStatsRepo(metricDefinitionAndTenantId);
if (subAlarmStatsRepo == null || metric == null) {
return; return;
}
for (SubAlarmStats stats : subAlarmStatsRepo.get()) { for (SubAlarmStats stats : subAlarmStatsRepo.get()) {
if (stats.getStats().addValue(metric.value, metric.timestamp)) if (stats.getStats().addValue(metric.value, metric.timestamp)) {
LOG.trace("Aggregated value {} at {} for {}. Updated {}", metric.value, metric.timestamp, logger.trace("Aggregated value {} at {} for {}. Updated {}", metric.value,
metricDefinitionAndTenantId, stats.getStats()); metric.timestamp, metricDefinitionAndTenantId, stats.getStats());
else } else {
LOG.warn("Metric is too old, age {} seconds: timestamp {} for {}, {}", currentTimeSeconds() - metric.timestamp, logger.warn("Metric is too old, age {} seconds: timestamp {} for {}, {}",
metric.timestamp, metricDefinitionAndTenantId, stats.getStats()); currentTimeSeconds() - metric.timestamp, metric.timestamp, metricDefinitionAndTenantId,
stats.getStats());
}
} }
} }
@ -186,30 +197,31 @@ public class MetricAggregationBolt extends BaseRichBolt {
* ago, then sliding the window to the current time. * ago, then sliding the window to the current time.
*/ */
void evaluateAlarmsAndSlideWindows() { void evaluateAlarmsAndSlideWindows() {
LOG.debug("evaluateAlarmsAndSlideWindows called"); logger.debug("evaluateAlarmsAndSlideWindows called");
long newWindowTimestamp = currentTimeSeconds(); long newWindowTimestamp = currentTimeSeconds();
for (SubAlarmStatsRepository subAlarmStatsRepo : subAlarmStatsRepos.values()) for (SubAlarmStatsRepository subAlarmStatsRepo : subAlarmStatsRepos.values()) {
for (SubAlarmStats subAlarmStats : subAlarmStatsRepo.get()) { for (SubAlarmStats subAlarmStats : subAlarmStatsRepo.get()) {
if (upToDate) { if (upToDate) {
LOG.debug("Evaluating {}", subAlarmStats); logger.debug("Evaluating {}", subAlarmStats);
if (subAlarmStats.evaluateAndSlideWindow(newWindowTimestamp)) { if (subAlarmStats.evaluateAndSlideWindow(newWindowTimestamp)) {
LOG.debug("Alarm state changed for {}", subAlarmStats); logger.debug("Alarm state changed for {}", subAlarmStats);
collector.emit(new Values(subAlarmStats.getSubAlarm().getAlarmId(), collector.emit(new Values(subAlarmStats.getSubAlarm().getAlarmId(), subAlarmStats
subAlarmStats.getSubAlarm())); .getSubAlarm()));
} }
} } else {
else {
subAlarmStats.slideWindow(newWindowTimestamp); subAlarmStats.slideWindow(newWindowTimestamp);
} }
} }
}
if (!upToDate) { if (!upToDate) {
LOG.info("Did not evaluate SubAlarms because Metrics are not up to date"); logger.info("Did not evaluate SubAlarms because Metrics are not up to date");
upToDate = true; upToDate = true;
} }
} }
/** /**
* Only used for testing. * Only used for testing.
*
* @return * @return
*/ */
protected long currentTimeSeconds() { protected long currentTimeSeconds() {
@ -217,26 +229,28 @@ public class MetricAggregationBolt extends BaseRichBolt {
} }
/** /**
* Returns an existing or newly created SubAlarmStatsRepository for the {@code metricDefinitionAndTenantId}. * Returns an existing or newly created SubAlarmStatsRepository for the
* Newly created SubAlarmStatsRepositories are initialized with stats whose view ends one minute * {@code metricDefinitionAndTenantId}. Newly created SubAlarmStatsRepositories are initialized
* from now. * with stats whose view ends one minute from now.
*/ */
SubAlarmStatsRepository getOrCreateSubAlarmStatsRepo(MetricDefinitionAndTenantId metricDefinitionAndTenantId) { SubAlarmStatsRepository getOrCreateSubAlarmStatsRepo(
MetricDefinitionAndTenantId metricDefinitionAndTenantId) {
SubAlarmStatsRepository subAlarmStatsRepo = subAlarmStatsRepos.get(metricDefinitionAndTenantId); SubAlarmStatsRepository subAlarmStatsRepo = subAlarmStatsRepos.get(metricDefinitionAndTenantId);
if (subAlarmStatsRepo == null) { if (subAlarmStatsRepo == null) {
List<SubAlarm> subAlarms = subAlarmDAO.find(metricDefinitionAndTenantId); List<SubAlarm> subAlarms = subAlarmDAO.find(metricDefinitionAndTenantId);
if (subAlarms.isEmpty()) if (subAlarms.isEmpty()) {
LOG.warn("Failed to find sub alarms for {}", metricDefinitionAndTenantId); logger.warn("Failed to find sub alarms for {}", metricDefinitionAndTenantId);
else { } else {
LOG.debug("Creating SubAlarmStats for {}", metricDefinitionAndTenantId); logger.debug("Creating SubAlarmStats for {}", metricDefinitionAndTenantId);
for (SubAlarm subAlarm : subAlarms) { for (SubAlarm subAlarm : subAlarms) {
// TODO should treat metric def name prefix like a namespace // TODO should treat metric def name prefix like a namespace
subAlarm.setSporadicMetric(sporadicMetricNamespaces.contains(metricDefinitionAndTenantId.metricDefinition.name)); subAlarm.setSporadicMetric(sporadicMetricNamespaces
.contains(metricDefinitionAndTenantId.metricDefinition.name));
} }
subAlarmStatsRepo = new SubAlarmStatsRepository(); subAlarmStatsRepo = new SubAlarmStatsRepository();
for (SubAlarm subAlarm : subAlarms) { for (SubAlarm subAlarm : subAlarms) {
long viewEndTimestamp = currentTimeSeconds() + subAlarm.getExpression().getPeriod(); long viewEndTimestamp = currentTimeSeconds() + subAlarm.getExpression().getPeriod();
subAlarmStatsRepo.add(subAlarm, viewEndTimestamp); subAlarmStatsRepo.add(subAlarm, viewEndTimestamp);
} }
subAlarmStatsRepos.put(metricDefinitionAndTenantId, subAlarmStatsRepo); subAlarmStatsRepos.put(metricDefinitionAndTenantId, subAlarmStatsRepo);
} }
@ -249,42 +263,50 @@ public class MetricAggregationBolt extends BaseRichBolt {
* Adds the {@code subAlarm} subAlarmStatsRepo for the {@code metricDefinitionAndTenantId}. * Adds the {@code subAlarm} subAlarmStatsRepo for the {@code metricDefinitionAndTenantId}.
*/ */
void handleAlarmCreated(MetricDefinitionAndTenantId metricDefinitionAndTenantId, SubAlarm subAlarm) { void handleAlarmCreated(MetricDefinitionAndTenantId metricDefinitionAndTenantId, SubAlarm subAlarm) {
LOG.debug("Received AlarmCreatedEvent for {}", subAlarm); logger.debug("Received AlarmCreatedEvent for {}", subAlarm);
addSubAlarm(metricDefinitionAndTenantId, subAlarm); addSubAlarm(metricDefinitionAndTenantId, subAlarm);
} }
void handleAlarmResend(MetricDefinitionAndTenantId metricDefinitionAndTenantId, SubAlarm resendSubAlarm) { void handleAlarmResend(MetricDefinitionAndTenantId metricDefinitionAndTenantId,
final RepoAndStats repoAndStats = findExistingSubAlarmStats(metricDefinitionAndTenantId, resendSubAlarm); SubAlarm resendSubAlarm) {
if (repoAndStats == null) final RepoAndStats repoAndStats =
findExistingSubAlarmStats(metricDefinitionAndTenantId, resendSubAlarm);
if (repoAndStats == null) {
return; return;
}
final SubAlarmStats oldSubAlarmStats = repoAndStats.subAlarmStats; final SubAlarmStats oldSubAlarmStats = repoAndStats.subAlarmStats;
final SubAlarm oldSubAlarm = oldSubAlarmStats.getSubAlarm(); final SubAlarm oldSubAlarm = oldSubAlarmStats.getSubAlarm();
resendSubAlarm.setState(oldSubAlarm.getState()); resendSubAlarm.setState(oldSubAlarm.getState());
resendSubAlarm.setNoState(true); // Have it send its state again so the Alarm can be evaluated resendSubAlarm.setNoState(true); // Have it send its state again so the Alarm can be evaluated
LOG.debug("Forcing SubAlarm {} to send state at next evaluation", oldSubAlarm); logger.debug("Forcing SubAlarm {} to send state at next evaluation", oldSubAlarm);
oldSubAlarmStats.updateSubAlarm(resendSubAlarm); oldSubAlarmStats.updateSubAlarm(resendSubAlarm);
} }
private RepoAndStats findExistingSubAlarmStats(MetricDefinitionAndTenantId metricDefinitionAndTenantId, private RepoAndStats findExistingSubAlarmStats(
SubAlarm oldSubAlarm) { MetricDefinitionAndTenantId metricDefinitionAndTenantId, SubAlarm oldSubAlarm) {
final SubAlarmStatsRepository oldSubAlarmStatsRepo = subAlarmStatsRepos.get(metricDefinitionAndTenantId); final SubAlarmStatsRepository oldSubAlarmStatsRepo =
subAlarmStatsRepos.get(metricDefinitionAndTenantId);
if (oldSubAlarmStatsRepo == null) { if (oldSubAlarmStatsRepo == null) {
LOG.error("Did not find SubAlarmStatsRepository for MetricDefinition {}", metricDefinitionAndTenantId); logger.error("Did not find SubAlarmStatsRepository for MetricDefinition {}",
metricDefinitionAndTenantId);
return null; return null;
} }
final SubAlarmStats oldSubAlarmStats = oldSubAlarmStatsRepo.get(oldSubAlarm.getId()); final SubAlarmStats oldSubAlarmStats = oldSubAlarmStatsRepo.get(oldSubAlarm.getId());
if (oldSubAlarmStats == null) { if (oldSubAlarmStats == null) {
LOG.error("Did not find existing SubAlarm {} in SubAlarmStatsRepository", oldSubAlarm); logger.error("Did not find existing SubAlarm {} in SubAlarmStatsRepository", oldSubAlarm);
return null; return null;
} }
return new RepoAndStats(oldSubAlarmStatsRepo, oldSubAlarmStats); return new RepoAndStats(oldSubAlarmStatsRepo, oldSubAlarmStats);
} }
private void addSubAlarm(MetricDefinitionAndTenantId metricDefinitionAndTenantId, SubAlarm subAlarm) { private void addSubAlarm(MetricDefinitionAndTenantId metricDefinitionAndTenantId,
SubAlarmStatsRepository subAlarmStatsRepo = getOrCreateSubAlarmStatsRepo(metricDefinitionAndTenantId); SubAlarm subAlarm) {
if (subAlarmStatsRepo == null) SubAlarmStatsRepository subAlarmStatsRepo =
getOrCreateSubAlarmStatsRepo(metricDefinitionAndTenantId);
if (subAlarmStatsRepo == null) {
return; return;
}
long viewEndTimestamp = currentTimeSeconds() + subAlarm.getExpression().getPeriod(); long viewEndTimestamp = currentTimeSeconds() + subAlarm.getExpression().getPeriod();
subAlarmStatsRepo.add(subAlarm, viewEndTimestamp); subAlarmStatsRepo.add(subAlarm, viewEndTimestamp);
@ -296,8 +318,9 @@ public class MetricAggregationBolt extends BaseRichBolt {
* MetricDefinition can't have changed, just how it is evaluated * MetricDefinition can't have changed, just how it is evaluated
*/ */
void handleAlarmUpdated(MetricDefinitionAndTenantId metricDefinitionAndTenantId, SubAlarm subAlarm) { void handleAlarmUpdated(MetricDefinitionAndTenantId metricDefinitionAndTenantId, SubAlarm subAlarm) {
LOG.debug("Received AlarmUpdatedEvent for {}", subAlarm); logger.debug("Received AlarmUpdatedEvent for {}", subAlarm);
final RepoAndStats repoAndStats = findExistingSubAlarmStats(metricDefinitionAndTenantId, subAlarm); final RepoAndStats repoAndStats =
findExistingSubAlarmStats(metricDefinitionAndTenantId, subAlarm);
if (repoAndStats != null) { if (repoAndStats != null) {
// Clear the old SubAlarm, but save the SubAlarm state // Clear the old SubAlarm, but save the SubAlarm state
final SubAlarmStats oldSubAlarmStats = repoAndStats.subAlarmStats; final SubAlarmStats oldSubAlarmStats = repoAndStats.subAlarmStats;
@ -305,12 +328,14 @@ public class MetricAggregationBolt extends BaseRichBolt {
subAlarm.setState(oldSubAlarm.getState()); subAlarm.setState(oldSubAlarm.getState());
subAlarm.setNoState(true); // Doesn't hurt to send too many state changes, just too few subAlarm.setNoState(true); // Doesn't hurt to send too many state changes, just too few
if (oldSubAlarm.isCompatible(subAlarm)) { if (oldSubAlarm.isCompatible(subAlarm)) {
LOG.debug("Changing SubAlarm {} to SubAlarm {} and keeping measurements", oldSubAlarm, subAlarm); logger.debug("Changing SubAlarm {} to SubAlarm {} and keeping measurements", oldSubAlarm,
subAlarm);
oldSubAlarmStats.updateSubAlarm(subAlarm); oldSubAlarmStats.updateSubAlarm(subAlarm);
return; return;
} }
// Have to completely change the SubAlarmStats // Have to completely change the SubAlarmStats
LOG.debug("Changing SubAlarm {} to SubAlarm {} and flushing measurements", oldSubAlarm, subAlarm); logger.debug("Changing SubAlarm {} to SubAlarm {} and flushing measurements", oldSubAlarm,
subAlarm);
repoAndStats.subAlarmStatsRepository.remove(subAlarm.getId()); repoAndStats.subAlarmStatsRepository.remove(subAlarm.getId());
} }
addSubAlarm(metricDefinitionAndTenantId, subAlarm); addSubAlarm(metricDefinitionAndTenantId, subAlarm);
@ -321,12 +346,13 @@ public class MetricAggregationBolt extends BaseRichBolt {
* {@code metricDefinitionAndTenantId}. * {@code metricDefinitionAndTenantId}.
*/ */
void handleAlarmDeleted(MetricDefinitionAndTenantId metricDefinitionAndTenantId, String subAlarmId) { void handleAlarmDeleted(MetricDefinitionAndTenantId metricDefinitionAndTenantId, String subAlarmId) {
LOG.debug("Received AlarmDeletedEvent for subAlarm id {}", subAlarmId); logger.debug("Received AlarmDeletedEvent for subAlarm id {}", subAlarmId);
SubAlarmStatsRepository subAlarmStatsRepo = subAlarmStatsRepos.get(metricDefinitionAndTenantId); SubAlarmStatsRepository subAlarmStatsRepo = subAlarmStatsRepos.get(metricDefinitionAndTenantId);
if (subAlarmStatsRepo != null) { if (subAlarmStatsRepo != null) {
subAlarmStatsRepo.remove(subAlarmId); subAlarmStatsRepo.remove(subAlarmId);
if (subAlarmStatsRepo.isEmpty()) if (subAlarmStatsRepo.isEmpty()) {
subAlarmStatsRepos.remove(metricDefinitionAndTenantId); subAlarmStatsRepos.remove(metricDefinitionAndTenantId);
}
} }
} }
@ -334,8 +360,7 @@ public class MetricAggregationBolt extends BaseRichBolt {
public final SubAlarmStatsRepository subAlarmStatsRepository; public final SubAlarmStatsRepository subAlarmStatsRepository;
public final SubAlarmStats subAlarmStats; public final SubAlarmStats subAlarmStats;
public RepoAndStats(SubAlarmStatsRepository subAlarmStatsRepository, public RepoAndStats(SubAlarmStatsRepository subAlarmStatsRepository, SubAlarmStats subAlarmStats) {
SubAlarmStats subAlarmStats) {
this.subAlarmStatsRepository = subAlarmStatsRepository; this.subAlarmStatsRepository = subAlarmStatsRepository;
this.subAlarmStats = subAlarmStats; this.subAlarmStats = subAlarmStats;
} }

View File

@ -14,24 +14,9 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package com.hpcloud.mon.infrastructure.thresholding; package com.hpcloud.mon.infrastructure.thresholding;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import backtype.storm.task.OutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.topology.OutputFieldsDeclarer;
import backtype.storm.topology.base.BaseRichBolt;
import backtype.storm.tuple.Fields;
import backtype.storm.tuple.Tuple;
import backtype.storm.tuple.Values;
import com.hpcloud.mon.common.model.metric.Metric; import com.hpcloud.mon.common.model.metric.Metric;
import com.hpcloud.mon.domain.model.MetricDefinitionAndTenantId; import com.hpcloud.mon.domain.model.MetricDefinitionAndTenantId;
import com.hpcloud.mon.domain.model.MetricDefinitionAndTenantIdMatcher; import com.hpcloud.mon.domain.model.MetricDefinitionAndTenantIdMatcher;
@ -44,32 +29,48 @@ import com.hpcloud.streaming.storm.Logging;
import com.hpcloud.streaming.storm.Streams; import com.hpcloud.streaming.storm.Streams;
import com.hpcloud.util.Injector; import com.hpcloud.util.Injector;
import backtype.storm.task.OutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.topology.OutputFieldsDeclarer;
import backtype.storm.topology.base.BaseRichBolt;
import backtype.storm.tuple.Fields;
import backtype.storm.tuple.Tuple;
import backtype.storm.tuple.Values;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/** /**
* Filters metrics for which there is no associated alarm and forwards metrics for which there is an * Filters metrics for which there is no associated alarm and forwards metrics for which there is an
* alarm. Receives metric alarm and metric sub-alarm events to update metric definitions. * alarm. Receives metric alarm and metric sub-alarm events to update metric definitions.
* *
* METRIC_DEFS table and the matcher are shared between any bolts in the same worker process so that all of the * METRIC_DEFS table and the matcher are shared between any bolts in the same worker process so that
* MetricDefinitionAndTenantIds for existing SubAlarms only have to be read once and because it is not * all of the MetricDefinitionAndTenantIds for existing SubAlarms only have to be read once and
* possible to predict which bolt gets which Metrics so all Bolts know about all starting * because it is not possible to predict which bolt gets which Metrics so all Bolts know about all
* MetricDefinitionAndTenantIds. * starting MetricDefinitionAndTenantIds.
* *
* The current topology uses shuffleGrouping for the incoming Metrics and allGrouping for the * The current topology uses shuffleGrouping for the incoming Metrics and allGrouping for the
* events. So, any Bolt may get any Metric so the METRIC_DEFS table and the matcher must be kept up to date * events. So, any Bolt may get any Metric so the METRIC_DEFS table and the matcher must be kept up
* for all MetricDefinitionAndTenantIds. * to date for all MetricDefinitionAndTenantIds.
* *
* The METRIC_DEFS table contains a List of SubAlarms IDs that reference the same MetricDefinitionAndTenantId * The METRIC_DEFS table contains a List of SubAlarms IDs that reference the same
* so if a SubAlarm is deleted, the MetricDefinitionAndTenantId will only be deleted from it and the matcher if no * MetricDefinitionAndTenantId so if a SubAlarm is deleted, the MetricDefinitionAndTenantId will
* more SubAlarms reference it. Incrementing and decrementing the count is done under the static lock SENTINAL * only be deleted from it and the matcher if no more SubAlarms reference it. Incrementing and
* to ensure it is correct across all Bolts sharing the same METRIC_DEFS table and the matcher. The * decrementing the count is done under the static lock SENTINAL to ensure it is correct across all
* amount of adds and deletes will be very small compared to the number of Metrics so it shouldn't * Bolts sharing the same METRIC_DEFS table and the matcher. The amount of adds and deletes will be
* block the Metric handling. * very small compared to the number of Metrics so it shouldn't block the Metric handling.
* *
* <ul> * <ul>
* <li>Input: MetricDefinition metricDefinition, Metric metric * <li>Input: MetricDefinition metricDefinition, Metric metric
* <li>Input metric-alarm-events: String eventType, MetricDefinitionAndTenantId metricDefinitionAndTenantId, String * <li>Input metric-alarm-events: String eventType, MetricDefinitionAndTenantId
* alarmId * metricDefinitionAndTenantId, String alarmId
* <li>Input metric-sub-alarm-events: String eventType, MetricDefinitionAndTenantId metricDefinitionAndTenantId, SubAlarm * <li>Input metric-sub-alarm-events: String eventType, MetricDefinitionAndTenantId
* subAlarm * metricDefinitionAndTenantId, SubAlarm subAlarm
* <li>Output: MetricDefinitionAndTenantId metricDefinitionAndTenantId, Metric metric * <li>Output: MetricDefinitionAndTenantId metricDefinitionAndTenantId, Metric metric
* </ul> * </ul>
*/ */
@ -82,16 +83,21 @@ public class MetricFilteringBolt extends BaseRichBolt {
public static final int MAX_LAG_MESSAGES_DEFAULT = 10; public static final int MAX_LAG_MESSAGES_DEFAULT = 10;
public static final String LAG_MESSAGE_PERIOD_KEY = "com.hpcloud.mon.filtering.lagMessagePeriod"; public static final String LAG_MESSAGE_PERIOD_KEY = "com.hpcloud.mon.filtering.lagMessagePeriod";
public static final int LAG_MESSAGE_PERIOD_DEFAULT = 30; public static final int LAG_MESSAGE_PERIOD_DEFAULT = 30;
public static final String[] FIELDS = new String[] { "metricDefinitionAndTenantId", "metric" }; public static final String[] FIELDS = new String[] {"metricDefinitionAndTenantId", "metric"};
private static final int MIN_LAG_VALUE = PropertyFinder.getIntProperty(MIN_LAG_VALUE_KEY, MIN_LAG_VALUE_DEFAULT, 0, Integer.MAX_VALUE); private static final int MIN_LAG_VALUE = PropertyFinder.getIntProperty(MIN_LAG_VALUE_KEY,
private static final int MAX_LAG_MESSAGES = PropertyFinder.getIntProperty(MAX_LAG_MESSAGES_KEY, MAX_LAG_MESSAGES_DEFAULT, 0, Integer.MAX_VALUE); MIN_LAG_VALUE_DEFAULT, 0, Integer.MAX_VALUE);
private static final int LAG_MESSAGE_PERIOD = PropertyFinder.getIntProperty(LAG_MESSAGE_PERIOD_KEY, LAG_MESSAGE_PERIOD_DEFAULT, 1, 600); private static final int MAX_LAG_MESSAGES = PropertyFinder.getIntProperty(MAX_LAG_MESSAGES_KEY,
private static final Map<MetricDefinitionAndTenantId, List<String>> METRIC_DEFS = new ConcurrentHashMap<>(); MAX_LAG_MESSAGES_DEFAULT, 0, Integer.MAX_VALUE);
private static final MetricDefinitionAndTenantIdMatcher matcher = new MetricDefinitionAndTenantIdMatcher(); private static final int LAG_MESSAGE_PERIOD = PropertyFinder.getIntProperty(
LAG_MESSAGE_PERIOD_KEY, LAG_MESSAGE_PERIOD_DEFAULT, 1, 600);
private static final Map<MetricDefinitionAndTenantId, List<String>> METRIC_DEFS =
new ConcurrentHashMap<>();
private static final MetricDefinitionAndTenantIdMatcher matcher =
new MetricDefinitionAndTenantIdMatcher();
private static final Object SENTINAL = new Object(); private static final Object SENTINAL = new Object();
private transient Logger LOG; private transient Logger logger;
private DataSourceFactory dbConfig; private DataSourceFactory dbConfig;
private transient MetricDefinitionDAO metricDefDAO; private transient MetricDefinitionDAO metricDefDAO;
private OutputCollector collector; private OutputCollector collector;
@ -111,86 +117,94 @@ public class MetricFilteringBolt extends BaseRichBolt {
@Override @Override
public void declareOutputFields(OutputFieldsDeclarer declarer) { public void declareOutputFields(OutputFieldsDeclarer declarer) {
declarer.declare(new Fields(FIELDS)); declarer.declare(new Fields(FIELDS));
declarer.declareStream(MetricAggregationBolt.METRIC_AGGREGATION_CONTROL_STREAM, declarer.declareStream(MetricAggregationBolt.METRIC_AGGREGATION_CONTROL_STREAM, new Fields(
new Fields(MetricAggregationBolt.METRIC_AGGREGATION_CONTROL_FIELDS)); MetricAggregationBolt.METRIC_AGGREGATION_CONTROL_FIELDS));
} }
@Override @Override
public void execute(Tuple tuple) { public void execute(Tuple tuple) {
LOG.debug("tuple: {}", tuple); logger.debug("tuple: {}", tuple);
try { try {
if (Streams.DEFAULT_STREAM_ID.equals(tuple.getSourceStreamId())) { if (Streams.DEFAULT_STREAM_ID.equals(tuple.getSourceStreamId())) {
final MetricDefinitionAndTenantId metricDefinitionAndTenantId = (MetricDefinitionAndTenantId) tuple.getValue(0); final MetricDefinitionAndTenantId metricDefinitionAndTenantId =
final Long timestamp = (Long)tuple.getValue(1); (MetricDefinitionAndTenantId) tuple.getValue(0);
final Metric metric = (Metric)tuple.getValue(2); final Long timestamp = (Long) tuple.getValue(1);
final Metric metric = (Metric) tuple.getValue(2);
checkLag(timestamp); checkLag(timestamp);
LOG.debug("metric definition and tenant id: {}", metricDefinitionAndTenantId); logger.debug("metric definition and tenant id: {}", metricDefinitionAndTenantId);
// Check for exact matches as well as inexact matches // Check for exact matches as well as inexact matches
final List<MetricDefinitionAndTenantId> matches = matcher.match(metricDefinitionAndTenantId); final List<MetricDefinitionAndTenantId> matches =
for (final MetricDefinitionAndTenantId match : matches) matcher.match(metricDefinitionAndTenantId);
collector.emit(new Values(match, metric)); for (final MetricDefinitionAndTenantId match : matches) {
collector.emit(new Values(match, metric));
}
} else { } else {
String eventType = tuple.getString(0); String eventType = tuple.getString(0);
MetricDefinitionAndTenantId metricDefinitionAndTenantId = (MetricDefinitionAndTenantId) tuple.getValue(1); MetricDefinitionAndTenantId metricDefinitionAndTenantId =
(MetricDefinitionAndTenantId) tuple.getValue(1);
LOG.debug("Received {} for {}", eventType, metricDefinitionAndTenantId); logger.debug("Received {} for {}", eventType, metricDefinitionAndTenantId);
// UPDATED events can be ignored because the MetricDefinitionAndTenantId doesn't change // UPDATED events can be ignored because the MetricDefinitionAndTenantId doesn't change
if (EventProcessingBolt.METRIC_ALARM_EVENT_STREAM_ID.equals(tuple.getSourceStreamId())) { if (EventProcessingBolt.METRIC_ALARM_EVENT_STREAM_ID.equals(tuple.getSourceStreamId())) {
if (EventProcessingBolt.DELETED.equals(eventType)) if (EventProcessingBolt.DELETED.equals(eventType)) {
removeSubAlarm(metricDefinitionAndTenantId, tuple.getString(2)); removeSubAlarm(metricDefinitionAndTenantId, tuple.getString(2));
} else if (EventProcessingBolt.METRIC_SUB_ALARM_EVENT_STREAM_ID.equals(tuple.getSourceStreamId())) { }
if (EventProcessingBolt.CREATED.equals(eventType)) } else if (EventProcessingBolt.METRIC_SUB_ALARM_EVENT_STREAM_ID.equals(tuple
synchronized(SENTINAL) { .getSourceStreamId())) {
if (EventProcessingBolt.CREATED.equals(eventType)) {
synchronized (SENTINAL) {
final SubAlarm subAlarm = (SubAlarm) tuple.getValue(2); final SubAlarm subAlarm = (SubAlarm) tuple.getValue(2);
addMetricDef(metricDefinitionAndTenantId, subAlarm.getId()); addMetricDef(metricDefinitionAndTenantId, subAlarm.getId());
} }
}
} }
} }
} catch (Exception e) { } catch (Exception e) {
LOG.error("Error processing tuple {}", tuple, e); logger.error("Error processing tuple {}", tuple, e);
} finally { } finally {
collector.ack(tuple); collector.ack(tuple);
} }
} }
private void checkLag(Long apiTimeStamp) { private void checkLag(Long apiTimeStamp) {
if (!lagging) if (!lagging) {
return; return;
if ((apiTimeStamp == null) || (apiTimeStamp.longValue() == 0)) }
if ((apiTimeStamp == null) || (apiTimeStamp.longValue() == 0)) {
return; // Remove this code at some point, just to handle old metrics without a NPE return; // Remove this code at some point, just to handle old metrics without a NPE
}
final long now = getCurrentTime(); final long now = getCurrentTime();
final long lag = now - apiTimeStamp.longValue(); final long lag = now - apiTimeStamp.longValue();
if (lag < minLag) if (lag < minLag) {
minLag = lag; minLag = lag;
}
if (minLag <= MIN_LAG_VALUE) { if (minLag <= MIN_LAG_VALUE) {
lagging = false; lagging = false;
LOG.info("Metrics no longer lagging, minLag = {}", minLag); logger.info("Metrics no longer lagging, minLag = {}", minLag);
} } else if (minLagMessageSent >= MAX_LAG_MESSAGES) {
else if (minLagMessageSent >= MAX_LAG_MESSAGES) { logger.info("Waited for {} seconds for Metrics to catch up. Giving up. minLag = {}",
LOG.info("Waited for {} seconds for Metrics to catch up. Giving up. minLag = {}", MAX_LAG_MESSAGES * LAG_MESSAGE_PERIOD, minLag);
MAX_LAG_MESSAGES * LAG_MESSAGE_PERIOD, minLag); lagging = false;
lagging = false; } else if (lastMinLagMessageSent == 0) {
}
else if (lastMinLagMessageSent == 0) {
lastMinLagMessageSent = now; lastMinLagMessageSent = now;
} } else if ((now - lastMinLagMessageSent) >= LAG_MESSAGE_PERIOD) {
else if ((now - lastMinLagMessageSent) >= LAG_MESSAGE_PERIOD) { logger.info("Sending {} message, minLag = {}", MetricAggregationBolt.METRICS_BEHIND, minLag);
LOG.info("Sending {} message, minLag = {}", MetricAggregationBolt.METRICS_BEHIND, minLag); collector.emit(MetricAggregationBolt.METRIC_AGGREGATION_CONTROL_STREAM, new Values(
collector.emit(MetricAggregationBolt.METRIC_AGGREGATION_CONTROL_STREAM, MetricAggregationBolt.METRICS_BEHIND));
new Values(MetricAggregationBolt.METRICS_BEHIND));
lastMinLagMessageSent = now; lastMinLagMessageSent = now;
minLagMessageSent++; minLagMessageSent++;
} }
} }
private void removeSubAlarm(MetricDefinitionAndTenantId metricDefinitionAndTenantId, String subAlarmId) { private void removeSubAlarm(MetricDefinitionAndTenantId metricDefinitionAndTenantId,
synchronized(SENTINAL) { String subAlarmId) {
synchronized (SENTINAL) {
final List<String> subAlarmIds = METRIC_DEFS.get(metricDefinitionAndTenantId); final List<String> subAlarmIds = METRIC_DEFS.get(metricDefinitionAndTenantId);
if (subAlarmIds != null) { if (subAlarmIds != null) {
if (subAlarmIds.remove(subAlarmId) && subAlarmIds.isEmpty()) { if (subAlarmIds.remove(subAlarmId) && subAlarmIds.isEmpty()) {
METRIC_DEFS.remove(metricDefinitionAndTenantId); METRIC_DEFS.remove(metricDefinitionAndTenantId);
matcher.remove(metricDefinitionAndTenantId); matcher.remove(metricDefinitionAndTenantId);
} }
} }
} }
@ -199,8 +213,8 @@ public class MetricFilteringBolt extends BaseRichBolt {
@Override @Override
@SuppressWarnings("rawtypes") @SuppressWarnings("rawtypes")
public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) { public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
LOG = LoggerFactory.getLogger(Logging.categoryFor(getClass(), context)); logger = LoggerFactory.getLogger(Logging.categoryFor(getClass(), context));
LOG.info("Preparing"); logger.info("Preparing");
this.collector = collector; this.collector = collector;
if (metricDefDAO == null) { if (metricDefDAO == null) {
@ -213,16 +227,18 @@ public class MetricFilteringBolt extends BaseRichBolt {
synchronized (SENTINAL) { synchronized (SENTINAL) {
if (METRIC_DEFS.isEmpty()) { if (METRIC_DEFS.isEmpty()) {
for (SubAlarmMetricDefinition subAlarmMetricDef : metricDefDAO.findForAlarms()) { for (SubAlarmMetricDefinition subAlarmMetricDef : metricDefDAO.findForAlarms()) {
addMetricDef(subAlarmMetricDef.getMetricDefinitionAndTenantId(), subAlarmMetricDef.getSubAlarmId()); addMetricDef(subAlarmMetricDef.getMetricDefinitionAndTenantId(),
subAlarmMetricDef.getSubAlarmId());
} }
// Iterate again to ensure we only emit each metricDef once // Iterate again to ensure we only emit each metricDef once
for (MetricDefinitionAndTenantId metricDefinitionAndTenantId : METRIC_DEFS.keySet()) for (MetricDefinitionAndTenantId metricDefinitionAndTenantId : METRIC_DEFS.keySet()) {
collector.emit(new Values(metricDefinitionAndTenantId, null)); collector.emit(new Values(metricDefinitionAndTenantId, null));
LOG.info("Found {} Metric Definitions", METRIC_DEFS.size()); }
logger.info("Found {} Metric Definitions", METRIC_DEFS.size());
// Just output these here so they are only output once per JVM // Just output these here so they are only output once per JVM
LOG.info("MIN_LAG_VALUE set to {} seconds", MIN_LAG_VALUE); logger.info("MIN_LAG_VALUE set to {} seconds", MIN_LAG_VALUE);
LOG.info("MAX_LAG_MESSAGES set to {}", MAX_LAG_MESSAGES); logger.info("MAX_LAG_MESSAGES set to {}", MAX_LAG_MESSAGES);
LOG.info("LAG_MESSAGE_PERIOD set to {} seconds", LAG_MESSAGE_PERIOD); logger.info("LAG_MESSAGE_PERIOD set to {} seconds", LAG_MESSAGE_PERIOD);
} }
} }
} }
@ -233,18 +249,19 @@ public class MetricFilteringBolt extends BaseRichBolt {
* Allow override of current time for testing. * Allow override of current time for testing.
*/ */
protected long getCurrentTime() { protected long getCurrentTime() {
return System.currentTimeMillis()/1000; return System.currentTimeMillis() / 1000;
} }
private void addMetricDef(MetricDefinitionAndTenantId metricDefinitionAndTenantId, String subAlarmId) { private void addMetricDef(MetricDefinitionAndTenantId metricDefinitionAndTenantId,
String subAlarmId) {
List<String> subAlarmIds = METRIC_DEFS.get(metricDefinitionAndTenantId); List<String> subAlarmIds = METRIC_DEFS.get(metricDefinitionAndTenantId);
if (subAlarmIds == null) { if (subAlarmIds == null) {
subAlarmIds = new LinkedList<>(); subAlarmIds = new LinkedList<>();
METRIC_DEFS.put(metricDefinitionAndTenantId, subAlarmIds); METRIC_DEFS.put(metricDefinitionAndTenantId, subAlarmIds);
matcher.add(metricDefinitionAndTenantId); matcher.add(metricDefinitionAndTenantId);
} else if (subAlarmIds.contains(subAlarmId)) {
return; // Make sure it is only added once. Multiple bolts process the same AlarmCreatedEvent
} }
else if (subAlarmIds.contains(subAlarmId))
return; // Make sure it only gets added once. Multiple bolts process the same AlarmCreatedEvent
subAlarmIds.add(subAlarmId); subAlarmIds.add(subAlarmId);
} }
@ -252,14 +269,14 @@ public class MetricFilteringBolt extends BaseRichBolt {
* Only use for testing. * Only use for testing.
*/ */
static void clearMetricDefinitions() { static void clearMetricDefinitions() {
METRIC_DEFS.clear(); METRIC_DEFS.clear();
matcher.clear(); matcher.clear();
} }
/** /**
* Only use for testing. * Only use for testing.
*/ */
static int sizeMetricDefinitions() { static int sizeMetricDefinitions() {
return METRIC_DEFS.size(); return METRIC_DEFS.size();
} }
} }

View File

@ -14,56 +14,58 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package com.hpcloud.mon.infrastructure.thresholding;
import backtype.storm.spout.SpoutOutputCollector; package com.hpcloud.mon.infrastructure.thresholding;
import backtype.storm.topology.OutputFieldsDeclarer;
import backtype.storm.tuple.Fields;
import backtype.storm.tuple.Values;
import com.hpcloud.mon.MetricSpoutConfig; import com.hpcloud.mon.MetricSpoutConfig;
import com.hpcloud.mon.common.model.metric.MetricEnvelope; import com.hpcloud.mon.common.model.metric.MetricEnvelope;
import com.hpcloud.mon.common.model.metric.MetricEnvelopes; import com.hpcloud.mon.common.model.metric.MetricEnvelopes;
import com.hpcloud.mon.domain.model.MetricDefinitionAndTenantId; import com.hpcloud.mon.domain.model.MetricDefinitionAndTenantId;
import backtype.storm.spout.SpoutOutputCollector;
import backtype.storm.topology.OutputFieldsDeclarer;
import backtype.storm.tuple.Fields;
import backtype.storm.tuple.Values;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
public class MetricSpout extends KafkaSpout { public class MetricSpout extends KafkaSpout {
private static final Logger LOG = LoggerFactory.getLogger(MetricSpout.class); private static final Logger logger = LoggerFactory.getLogger(MetricSpout.class);
private static final long serialVersionUID = 744004533863562119L; private static final long serialVersionUID = 744004533863562119L;
public static final String[] FIELDS = new String[] { "metricDefinitionAndTenantId", "apiTimeStamp", "metric" }; public static final String[] FIELDS = new String[] {"metricDefinitionAndTenantId",
public static final String DEFAULT_TENANT_ID = "TENANT_ID_NOT_SET"; "apiTimeStamp", "metric"};
public static final String DEFAULT_TENANT_ID = "TENANT_ID_NOT_SET";
public MetricSpout(MetricSpoutConfig metricSpoutConfig) { public MetricSpout(MetricSpoutConfig metricSpoutConfig) {
super(metricSpoutConfig); super(metricSpoutConfig);
LOG.info("Created"); logger.info("Created");
}
@Override
protected void processMessage(byte[] message, SpoutOutputCollector collector) {
final MetricEnvelope metricEnvelope;
try {
metricEnvelope = MetricEnvelopes.fromJson(message);
logger.debug("metric envelope: {}", metricEnvelope);
} catch (RuntimeException re) {
logger.warn("Error parsing MetricEnvelope", re);
return;
} }
String tenantId = (String) metricEnvelope.meta.get("tenantId");
@Override if (tenantId == null) {
protected void processMessage(byte[] message, SpoutOutputCollector collector) { logger.error("No tenantId so using default tenantId {} for Metric {}", DEFAULT_TENANT_ID,
final MetricEnvelope metricEnvelope; metricEnvelope.metric);
try { tenantId = DEFAULT_TENANT_ID;
metricEnvelope = MetricEnvelopes.fromJson(message);
LOG.debug("metric envelope: {}", metricEnvelope);
}
catch (RuntimeException re) {
LOG.warn("Error parsing MetricEnvelope", re);
return;
}
String tenantId = (String)metricEnvelope.meta.get("tenantId");
if (tenantId == null) {
LOG.error("No tenantId so using default tenantId {} for Metric {}", DEFAULT_TENANT_ID, metricEnvelope.metric);
tenantId = DEFAULT_TENANT_ID;
}
collector.emit(new Values(new MetricDefinitionAndTenantId(metricEnvelope.metric.definition(), tenantId),
metricEnvelope.creationTime, metricEnvelope.metric));
} }
collector.emit(new Values(new MetricDefinitionAndTenantId(metricEnvelope.metric.definition(),
tenantId), metricEnvelope.creationTime, metricEnvelope.metric));
}
@Override @Override
public void declareOutputFields(OutputFieldsDeclarer declarer) { public void declareOutputFields(OutputFieldsDeclarer declarer) {
declarer.declare(new Fields(FIELDS)); declarer.declare(new Fields(FIELDS));
} }
} }

View File

@ -14,30 +14,31 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package com.hpcloud.mon.infrastructure.thresholding; package com.hpcloud.mon.infrastructure.thresholding;
import com.hpcloud.configuration.KafkaProducerConfiguration;
import com.google.inject.AbstractModule; import com.google.inject.AbstractModule;
import com.google.inject.Provides; import com.google.inject.Provides;
import com.hpcloud.configuration.KafkaProducerConfiguration;
public class ProducerModule extends AbstractModule { public class ProducerModule extends AbstractModule {
private KafkaProducerConfiguration config; private KafkaProducerConfiguration config;
private AlarmEventForwarder alarmEventForwarder; private AlarmEventForwarder alarmEventForwarder;
@Override @Override
protected void configure() { protected void configure() {}
}
public ProducerModule(KafkaProducerConfiguration config) { public ProducerModule(KafkaProducerConfiguration config) {
this.config = config; this.config = config;
} }
public ProducerModule(AlarmEventForwarder alarmEventForwarder) { public ProducerModule(AlarmEventForwarder alarmEventForwarder) {
this.alarmEventForwarder = alarmEventForwarder; this.alarmEventForwarder = alarmEventForwarder;
} }
@Provides @Provides
AlarmEventForwarder alarmEventForwarder() { AlarmEventForwarder alarmEventForwarder() {
return alarmEventForwarder == null ? new KafkaAlarmEventForwarder(config) : alarmEventForwarder; return alarmEventForwarder == null ? new KafkaAlarmEventForwarder(config) : alarmEventForwarder;
} }
} }

View File

@ -14,37 +14,36 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package com.hpcloud.mon.infrastructure.thresholding; package com.hpcloud.mon.infrastructure.thresholding;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
public class PropertyFinder { public class PropertyFinder {
private static final Logger LOG = LoggerFactory.getLogger(PropertyFinder.class); private static final Logger logger = LoggerFactory.getLogger(PropertyFinder.class);
private PropertyFinder() private PropertyFinder()
{ {
} }
public static int getIntProperty(final String name, public static int getIntProperty(final String name, final int defaultValue, final int minValue,
final int defaultValue, final int maxValue) {
final int minValue, final String valueString = System.getProperty(name);
final int maxValue) { if ((valueString != null) && !valueString.isEmpty()) {
final String valueString = System.getProperty(name); try {
if ((valueString != null) && !valueString.isEmpty()) { final int newValue = Integer.parseInt(valueString);
try { if ((newValue >= minValue) && (newValue <= maxValue)) {
final int newValue = Integer.parseInt(valueString); return newValue;
if ((newValue >= minValue) && (newValue <= maxValue)) {
return newValue;
}
LOG.warn("Invalid value {} for property '{}' must be >= {} and <= {}, using default value of {}",
valueString, name, minValue, maxValue, defaultValue);
}
catch (NumberFormatException nfe) {
LOG.warn("Not an integer value '{}' for property '{}', using default value of {}", valueString,
name, defaultValue);
}
} }
return defaultValue; logger.warn(
"Invalid value {} for property '{}' must be >= {} and <= {}, using default value of {}",
valueString, name, minValue, maxValue, defaultValue);
} catch (NumberFormatException nfe) {
logger.warn("Not an integer value '{}' for property '{}', using default value of {}",
valueString, name, defaultValue);
}
} }
return defaultValue;
}
} }

View File

@ -14,23 +14,24 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package com.hpcloud.mon.infrastructure.thresholding.deserializer; package com.hpcloud.mon.infrastructure.thresholding.deserializer;
import java.io.Serializable;
import java.util.Collections;
import java.util.List;
import backtype.storm.tuple.Fields;
import com.hpcloud.mon.common.event.AlarmCreatedEvent; import com.hpcloud.mon.common.event.AlarmCreatedEvent;
import com.hpcloud.mon.common.event.AlarmDeletedEvent; import com.hpcloud.mon.common.event.AlarmDeletedEvent;
import com.hpcloud.mon.common.event.AlarmUpdatedEvent; import com.hpcloud.mon.common.event.AlarmUpdatedEvent;
import com.hpcloud.streaming.storm.TupleDeserializer; import com.hpcloud.streaming.storm.TupleDeserializer;
import com.hpcloud.util.Serialization; import com.hpcloud.util.Serialization;
import backtype.storm.tuple.Fields;
import java.io.Serializable;
import java.util.Collections;
import java.util.List;
/** /**
* Deserializes MaaS events using registered serialization types. * Deserializes MaaS events using registered serialization types.
* *
* <ul> * <ul>
* <li>Output: Object event * <li>Output: Object event
* </ul> * </ul>
@ -49,7 +50,8 @@ public class EventDeserializer implements TupleDeserializer, Serializable {
@Override @Override
public List<List<?>> deserialize(byte[] tuple) { public List<List<?>> deserialize(byte[] tuple) {
try { try {
return Collections.<List<?>>singletonList(Collections.singletonList(Serialization.fromJson(tuple))); return Collections.<List<?>>singletonList(Collections.singletonList(Serialization
.fromJson(tuple)));
} catch (Exception ignore) { } catch (Exception ignore) {
return null; return null;
} }

View File

@ -14,6 +14,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package com.hpcloud.mon; package com.hpcloud.mon;
import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertEquals;
@ -21,12 +22,15 @@ import static org.testng.Assert.fail;
public class Assert { public class Assert {
public static void assertArraysEqual(final double[] actual, final double[] expected) { public static void assertArraysEqual(final double[] actual, final double[] expected) {
if (expected == actual) if (expected == actual) {
return; return;
if (null == expected) }
if (null == expected) {
fail("expected a null array, but not null found."); fail("expected a null array, but not null found.");
if (null == actual) }
if (null == actual) {
fail("expected not null array, but null found."); fail("expected not null array, but null found.");
}
assertEquals(actual.length, expected.length, "arrays don't have the same size."); assertEquals(actual.length, expected.length, "arrays don't have the same size.");
@ -39,12 +43,15 @@ public class Assert {
} }
public static void assertArraysEqual(final long[] actual, final long[] expected) { public static void assertArraysEqual(final long[] actual, final long[] expected) {
if (expected == actual) if (expected == actual) {
return; return;
if (null == expected) }
if (null == expected) {
fail("expected a null array, but not null found."); fail("expected a null array, but not null found.");
if (null == actual) }
if (null == actual) {
fail("expected not null array, but null found."); fail("expected not null array, but null found.");
}
assertEquals(actual.length, expected.length, "arrays don't have the same size."); assertEquals(actual.length, expected.length, "arrays don't have the same size.");

View File

@ -14,35 +14,17 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package com.hpcloud.mon; package com.hpcloud.mon;
import static org.mockito.Matchers.any; import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyString; import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock; import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when; import static org.mockito.Mockito.when;
import static org.mockito.Mockito.doAnswer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.testng.annotations.Test;
import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNotEquals; import static org.testng.Assert.assertNotEquals;
import backtype.storm.Config;
import backtype.storm.testing.FeederSpout;
import backtype.storm.tuple.Fields;
import backtype.storm.tuple.Values;
import com.google.inject.AbstractModule;
import com.hpcloud.configuration.KafkaProducerConfiguration; import com.hpcloud.configuration.KafkaProducerConfiguration;
import com.hpcloud.mon.common.event.AlarmCreatedEvent; import com.hpcloud.mon.common.event.AlarmCreatedEvent;
import com.hpcloud.mon.common.event.AlarmStateTransitionedEvent; import com.hpcloud.mon.common.event.AlarmStateTransitionedEvent;
@ -67,8 +49,29 @@ import com.hpcloud.streaming.storm.TopologyTestCase;
import com.hpcloud.util.Injector; import com.hpcloud.util.Injector;
import com.hpcloud.util.Serialization; import com.hpcloud.util.Serialization;
import backtype.storm.Config;
import backtype.storm.testing.FeederSpout;
import backtype.storm.tuple.Fields;
import backtype.storm.tuple.Values;
import com.google.inject.AbstractModule;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.testng.annotations.Test;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.UUID;
/** /**
* Simulates a real'ish run of the thresholding engine with alarms being created, updated and deleted * Simulates a real'ish run of the thresholding engine with alarms being created, updated and
* deleted
*/ */
@Test(groups = "integration") @Test(groups = "integration")
public class ThresholdingEngineAlarmTest extends TopologyTestCase { public class ThresholdingEngineAlarmTest extends TopologyTestCase {
@ -85,14 +88,13 @@ public class ThresholdingEngineAlarmTest extends TopologyTestCase {
private int nextSubAlarmId = 4242; private int nextSubAlarmId = 4242;
private List<SubAlarm> subAlarms; private List<SubAlarm> subAlarms;
private AlarmExpression expression = new AlarmExpression( private AlarmExpression expression = new AlarmExpression(
"max(hpcs.compute.cpu{id=5}) >= 3 or max(hpcs.compute.mem{id=5}) >= 557"); "max(hpcs.compute.cpu{id=5}) >= 3 or max(hpcs.compute.mem{id=5}) >= 557");
private AlarmState currentState = AlarmState.UNDETERMINED; private AlarmState currentState = AlarmState.UNDETERMINED;
private volatile int alarmsSent = 0; private volatile int alarmsSent = 0;
public ThresholdingEngineAlarmTest() { public ThresholdingEngineAlarmTest() {
// Fixtures // Fixtures
subAlarms = subAlarmsFor(TEST_ALARM_ID, expression); subAlarms = subAlarmsFor(TEST_ALARM_ID, expression);
// Mocks // Mocks
@ -101,23 +103,26 @@ public class ThresholdingEngineAlarmTest extends TopologyTestCase {
@Override @Override
public Alarm answer(InvocationOnMock invocation) throws Throwable { public Alarm answer(InvocationOnMock invocation) throws Throwable {
return new Alarm(TEST_ALARM_ID, TEST_ALARM_TENANT_ID, TEST_ALARM_NAME, return new Alarm(TEST_ALARM_ID, TEST_ALARM_TENANT_ID, TEST_ALARM_NAME,
TEST_ALARM_DESCRIPTION, expression, subAlarms, currentState, Boolean.TRUE); TEST_ALARM_DESCRIPTION, expression, subAlarms, currentState, Boolean.TRUE);
} }
}); });
subAlarmDAO = mock(SubAlarmDAO.class); subAlarmDAO = mock(SubAlarmDAO.class);
when(subAlarmDAO.find(any(MetricDefinitionAndTenantId.class))).thenAnswer(new Answer<List<SubAlarm>>() { when(subAlarmDAO.find(any(MetricDefinitionAndTenantId.class))).thenAnswer(
@Override new Answer<List<SubAlarm>>() {
public List<SubAlarm> answer(InvocationOnMock invocation) throws Throwable { @Override
MetricDefinitionAndTenantId metricDefinitionAndTenantId = (MetricDefinitionAndTenantId) invocation.getArguments()[0]; public List<SubAlarm> answer(InvocationOnMock invocation) throws Throwable {
for (final SubAlarm subAlarm : subAlarms) { MetricDefinitionAndTenantId metricDefinitionAndTenantId =
if (metricDefinitionAndTenantId.metricDefinition.equals(subAlarm.getExpression().getMetricDefinition())) { (MetricDefinitionAndTenantId) invocation.getArguments()[0];
for (final SubAlarm subAlarm : subAlarms) {
if (metricDefinitionAndTenantId.metricDefinition.equals(subAlarm.getExpression()
.getMetricDefinition())) {
return Arrays.asList(subAlarm); return Arrays.asList(subAlarm);
}
} }
} return Collections.emptyList();
return Collections.emptyList(); }
} });
});
metricDefinitionDAO = mock(MetricDefinitionDAO.class); metricDefinitionDAO = mock(MetricDefinitionDAO.class);
List<SubAlarmMetricDefinition> metricDefs = new ArrayList<>(0); List<SubAlarmMetricDefinition> metricDefs = new ArrayList<>(0);
@ -138,32 +143,32 @@ public class ThresholdingEngineAlarmTest extends TopologyTestCase {
threshConfig.sporadicMetricNamespaces = new HashSet<String>(); threshConfig.sporadicMetricNamespaces = new HashSet<String>();
Serialization.registerTarget(KafkaProducerConfiguration.class); Serialization.registerTarget(KafkaProducerConfiguration.class);
threshConfig.kafkaProducerConfig = Serialization.fromJson("{\"KafkaProducerConfiguration\":{\"topic\":\"alarm-state-transitions\",\"metadataBrokerList\":\"192.168.10.10:9092\",\"requestRequiredAcks\":1,\"requestTimeoutMs\":10000,\"producerType\":\"sync\",\"serializerClass\":\"kafka.serializer.StringEncoder\",\"keySerializerClass\":\"\",\"partitionerClass\":\"\",\"compressionCodec\":\"none\",\"compressedTopics\":\"\",\"messageSendMaxRetries\":3,\"retryBackoffMs\":100,\"topicMetadataRefreshIntervalMs\":600000,\"queueBufferingMaxMs\":5000,\"queueBufferingMaxMessages\":10000,\"queueEnqueueTimeoutMs\":-1,\"batchNumMessages\":200,\"sendBufferBytes\":102400,\"clientId\":\"Threshold_Engine\"}}"); threshConfig.kafkaProducerConfig =
Serialization
.fromJson("{\"KafkaProducerConfiguration\":{\"topic\":\"alarm-state-transitions\",\"metadataBrokerList\":\"192.168.10.10:9092\",\"requestRequiredAcks\":1,\"requestTimeoutMs\":10000,\"producerType\":\"sync\",\"serializerClass\":\"kafka.serializer.StringEncoder\",\"keySerializerClass\":\"\",\"partitionerClass\":\"\",\"compressionCodec\":\"none\",\"compressedTopics\":\"\",\"messageSendMaxRetries\":3,\"retryBackoffMs\":100,\"topicMetadataRefreshIntervalMs\":600000,\"queueBufferingMaxMs\":5000,\"queueBufferingMaxMessages\":10000,\"queueEnqueueTimeoutMs\":-1,\"batchNumMessages\":200,\"sendBufferBytes\":102400,\"clientId\":\"Threshold_Engine\"}}");
Config stormConfig = new Config(); Config stormConfig = new Config();
stormConfig.setMaxTaskParallelism(1); stormConfig.setMaxTaskParallelism(1);
metricSpout = new FeederSpout(new Fields(MetricSpout.FIELDS)); metricSpout = new FeederSpout(new Fields(MetricSpout.FIELDS));
eventSpout = new FeederSpout(new Fields("event")); eventSpout = new FeederSpout(new Fields("event"));
alarmEventForwarder = mock(AlarmEventForwarder.class); alarmEventForwarder = mock(AlarmEventForwarder.class);
Injector.registerModules(new TopologyModule(threshConfig, stormConfig, Injector
metricSpout, eventSpout)); .registerModules(new TopologyModule(threshConfig, stormConfig, metricSpout, eventSpout));
Injector.registerModules(new ProducerModule(alarmEventForwarder)); Injector.registerModules(new ProducerModule(alarmEventForwarder));
// Evaluate alarm stats every 1 seconds // Evaluate alarm stats every 1 seconds
System.setProperty(MetricAggregationBolt.TICK_TUPLE_SECONDS_KEY, "5"); System.setProperty(MetricAggregationBolt.TICK_TUPLE_SECONDS_KEY, "5");
} }
private List<SubAlarm> subAlarmsFor(final String alarmId, private List<SubAlarm> subAlarmsFor(final String alarmId, final AlarmExpression expression,
final AlarmExpression expression, final String... ids) {
final String ... ids) {
final List<SubAlarm> result = new ArrayList<SubAlarm>(expression.getSubExpressions().size()); final List<SubAlarm> result = new ArrayList<SubAlarm>(expression.getSubExpressions().size());
int index = 0; int index = 0;
for (final AlarmSubExpression expr : expression.getSubExpressions()) { for (final AlarmSubExpression expr : expression.getSubExpressions()) {
final String id; final String id;
if ((index >= ids.length) || (ids[index] == null)) { if ((index >= ids.length) || (ids[index] == null)) {
id = String.valueOf(nextSubAlarmId++); id = String.valueOf(nextSubAlarmId++);
} } else {
else { id = ids[index];
id = ids[index];
} }
index++; index++;
result.add(new SubAlarm(id, TEST_ALARM_ID, expr)); result.add(new SubAlarm(id, TEST_ALARM_ID, expr));
@ -171,109 +176,119 @@ public class ThresholdingEngineAlarmTest extends TopologyTestCase {
return result; return result;
} }
final AlarmState[] expectedStates = { AlarmState.ALARM, AlarmState.OK, AlarmState.ALARM, AlarmState.OK }; final AlarmState[] expectedStates = {AlarmState.ALARM, AlarmState.OK, AlarmState.ALARM,
AlarmState.OK};
public void shouldThreshold() throws Exception { public void shouldThreshold() throws Exception {
doAnswer(new Answer<Object>() { doAnswer(new Answer<Object>() {
public Object answer(InvocationOnMock invocation) { public Object answer(InvocationOnMock invocation) {
final Object[] args = invocation.getArguments(); final Object[] args = invocation.getArguments();
AlarmStateTransitionedEvent event = Serialization.fromJson((String)args[2]); AlarmStateTransitionedEvent event = Serialization.fromJson((String) args[2]);
System.out.printf("Alarm transitioned from %s to %s%n", event.oldState, event.newState); System.out.printf("Alarm transitioned from %s to %s%n", event.oldState, event.newState);
assertEquals(event.alarmName, TEST_ALARM_NAME); assertEquals(event.alarmName, TEST_ALARM_NAME);
assertEquals(event.alarmId, TEST_ALARM_ID); assertEquals(event.alarmId, TEST_ALARM_ID);
assertEquals(event.tenantId, TEST_ALARM_TENANT_ID); assertEquals(event.tenantId, TEST_ALARM_TENANT_ID);
assertEquals(event.oldState, currentState); assertEquals(event.oldState, currentState);
currentState = event.newState; currentState = event.newState;
assertEquals(event.newState, expectedStates[alarmsSent++]); assertEquals(event.newState, expectedStates[alarmsSent++]);
return null; return null;
}
} }
) }).when(alarmEventForwarder).send(anyString(), anyString(), anyString());
.when(alarmEventForwarder).send(anyString(), anyString(), anyString());
int goodValueCount = 0; int goodValueCount = 0;
boolean firstUpdate = true; boolean firstUpdate = true;
boolean secondUpdate = true; boolean secondUpdate = true;
boolean thirdUpdate = true; boolean thirdUpdate = true;
final Alarm initialAlarm = new Alarm(TEST_ALARM_ID, TEST_ALARM_TENANT_ID, TEST_ALARM_NAME, final Alarm initialAlarm =
TEST_ALARM_DESCRIPTION, expression, subAlarms, AlarmState.UNDETERMINED, Boolean.TRUE); new Alarm(TEST_ALARM_ID, TEST_ALARM_TENANT_ID, TEST_ALARM_NAME, TEST_ALARM_DESCRIPTION,
expression, subAlarms, AlarmState.UNDETERMINED, Boolean.TRUE);
final int expectedAlarms = expectedStates.length; final int expectedAlarms = expectedStates.length;
AlarmExpression savedAlarmExpression = null; AlarmExpression savedAlarmExpression = null;
for (int i = 1; alarmsSent != expectedAlarms && i < 300; i++) { for (int i = 1; alarmsSent != expectedAlarms && i < 300; i++) {
if (i == 5) { if (i == 5) {
final Map<String, AlarmSubExpression> exprs = createSubExpressionMap(); final Map<String, AlarmSubExpression> exprs = createSubExpressionMap();
final AlarmCreatedEvent event = new AlarmCreatedEvent(TEST_ALARM_TENANT_ID, TEST_ALARM_ID, TEST_ALARM_NAME, final AlarmCreatedEvent event =
expression.getExpression(), exprs); new AlarmCreatedEvent(TEST_ALARM_TENANT_ID, TEST_ALARM_ID, TEST_ALARM_NAME,
eventSpout.feed(new Values(event)); expression.getExpression(), exprs);
System.out.printf("Send AlarmCreatedEvent for expression %s%n", expression.getExpression()); eventSpout.feed(new Values(event));
} System.out.printf("Send AlarmCreatedEvent for expression %s%n", expression.getExpression());
else if (alarmsSent == 1 && firstUpdate) { } else if (alarmsSent == 1 && firstUpdate) {
firstUpdate = false; firstUpdate = false;
final String originalExpression = expression.getExpression(); final String originalExpression = expression.getExpression();
expression = new AlarmExpression(originalExpression.replace(">= 3", ">= 556")); expression = new AlarmExpression(originalExpression.replace(">= 3", ">= 556"));
assertNotEquals(expression.getExpression(), originalExpression); assertNotEquals(expression.getExpression(), originalExpression);
final List<SubAlarm> updatedSubAlarms = new ArrayList<>(); final List<SubAlarm> updatedSubAlarms = new ArrayList<>();
updatedSubAlarms.add(new SubAlarm(subAlarms.get(0).getId(), initialAlarm.getId(), expression.getSubExpressions().get(0))); updatedSubAlarms.add(new SubAlarm(subAlarms.get(0).getId(), initialAlarm.getId(),
for (int index = 1; index < subAlarms.size(); index++) { expression.getSubExpressions().get(0)));
final SubAlarm subAlarm = subAlarms.get(index); for (int index = 1; index < subAlarms.size(); index++) {
updatedSubAlarms.add(new SubAlarm(subAlarm.getId(), initialAlarm.getId(), subAlarm.getExpression())); final SubAlarm subAlarm = subAlarms.get(index);
} updatedSubAlarms.add(new SubAlarm(subAlarm.getId(), initialAlarm.getId(), subAlarm
.getExpression()));
}
initialAlarm.setState(currentState); initialAlarm.setState(currentState);
final AlarmUpdatedEvent event = EventProcessingBoltTest.createAlarmUpdatedEvent(initialAlarm, initialAlarm.getState(), expression, final AlarmUpdatedEvent event =
updatedSubAlarms); EventProcessingBoltTest.createAlarmUpdatedEvent(initialAlarm, initialAlarm.getState(),
subAlarms = updatedSubAlarms; expression, updatedSubAlarms);
initialAlarm.setSubAlarms(updatedSubAlarms); subAlarms = updatedSubAlarms;
eventSpout.feed(new Values(event)); initialAlarm.setSubAlarms(updatedSubAlarms);
eventSpout.feed(new Values(event));
System.out.printf("Send AlarmUpdatedEvent for expression %s%n", expression.getExpression()); System.out.printf("Send AlarmUpdatedEvent for expression %s%n", expression.getExpression());
} } else if (alarmsSent == 2 && secondUpdate) {
else if (alarmsSent == 2 && secondUpdate) { secondUpdate = false;
secondUpdate = false; savedAlarmExpression = expression;
savedAlarmExpression = expression; expression =
expression = new AlarmExpression("max(hpcs.compute.load{id=5}) > 551 and (" + expression.getExpression().replace("556", "554") + ")"); new AlarmExpression("max(hpcs.compute.load{id=5}) > 551 and ("
final List<SubAlarm> updatedSubAlarms = new ArrayList<>(); + expression.getExpression().replace("556", "554") + ")");
updatedSubAlarms.add(new SubAlarm(UUID.randomUUID().toString(), initialAlarm.getId(), expression.getSubExpressions().get(0))); final List<SubAlarm> updatedSubAlarms = new ArrayList<>();
for (int index = 0; index < subAlarms.size(); index++) { updatedSubAlarms.add(new SubAlarm(UUID.randomUUID().toString(), initialAlarm.getId(),
updatedSubAlarms.add(new SubAlarm(subAlarms.get(index).getId(), initialAlarm.getId(), expression.getSubExpressions().get(index+1))); expression.getSubExpressions().get(0)));
} for (int index = 0; index < subAlarms.size(); index++) {
updatedSubAlarms.add(new SubAlarm(subAlarms.get(index).getId(), initialAlarm.getId(),
expression.getSubExpressions().get(index + 1)));
}
initialAlarm.setState(currentState); initialAlarm.setState(currentState);
final AlarmUpdatedEvent event = EventProcessingBoltTest.createAlarmUpdatedEvent(initialAlarm, initialAlarm.getState(), expression, final AlarmUpdatedEvent event =
updatedSubAlarms); EventProcessingBoltTest.createAlarmUpdatedEvent(initialAlarm, initialAlarm.getState(),
subAlarms = updatedSubAlarms; expression, updatedSubAlarms);
initialAlarm.setSubAlarms(updatedSubAlarms); subAlarms = updatedSubAlarms;
eventSpout.feed(new Values(event)); initialAlarm.setSubAlarms(updatedSubAlarms);
eventSpout.feed(new Values(event));
System.out.printf("Send AlarmUpdatedEvent for expression %s%n", expression.getExpression()); System.out.printf("Send AlarmUpdatedEvent for expression %s%n", expression.getExpression());
} } else if (alarmsSent == 3 && thirdUpdate) {
else if (alarmsSent == 3 && thirdUpdate) { thirdUpdate = false;
thirdUpdate = false; expression = savedAlarmExpression;
expression = savedAlarmExpression; final List<SubAlarm> updatedSubAlarms = new ArrayList<>();
final List<SubAlarm> updatedSubAlarms = new ArrayList<>(); int index = 1;
int index = 1; for (AlarmSubExpression subExpression : expression.getSubExpressions()) {
for (AlarmSubExpression subExpression : expression.getSubExpressions()) { updatedSubAlarms.add(new SubAlarm(subAlarms.get(index).getId(), initialAlarm.getId(),
updatedSubAlarms.add(new SubAlarm(subAlarms.get(index).getId(), initialAlarm.getId(), subExpression)); subExpression));
index++; index++;
} }
initialAlarm.setState(currentState); initialAlarm.setState(currentState);
final AlarmUpdatedEvent event = EventProcessingBoltTest.createAlarmUpdatedEvent(initialAlarm, initialAlarm.getState(), expression, final AlarmUpdatedEvent event =
updatedSubAlarms); EventProcessingBoltTest.createAlarmUpdatedEvent(initialAlarm, initialAlarm.getState(),
subAlarms = updatedSubAlarms; expression, updatedSubAlarms);
initialAlarm.setSubAlarms(updatedSubAlarms); subAlarms = updatedSubAlarms;
eventSpout.feed(new Values(event)); initialAlarm.setSubAlarms(updatedSubAlarms);
eventSpout.feed(new Values(event));
System.out.printf("Send AlarmUpdatedEvent for expression %s%n", expression.getExpression()); System.out.printf("Send AlarmUpdatedEvent for expression %s%n", expression.getExpression());
} } else {
else {
System.out.println("Feeding metrics..."); System.out.println("Feeding metrics...");
long time = System.currentTimeMillis()/1000; long time = System.currentTimeMillis() / 1000;
++goodValueCount; ++goodValueCount;
for (final SubAlarm subAlarm : subAlarms) { for (final SubAlarm subAlarm : subAlarms) {
final MetricDefinitionAndTenantId metricDefinitionAndTenantId = final MetricDefinitionAndTenantId metricDefinitionAndTenantId =
new MetricDefinitionAndTenantId(subAlarm.getExpression().getMetricDefinition(), TEST_ALARM_TENANT_ID); new MetricDefinitionAndTenantId(subAlarm.getExpression().getMetricDefinition(),
metricSpout.feed(new Values(metricDefinitionAndTenantId, time, TEST_ALARM_TENANT_ID);
new Metric(metricDefinitionAndTenantId.metricDefinition, time, (double) (goodValueCount == 15 ? 1 : 555)))); metricSpout.feed(new Values(metricDefinitionAndTenantId, time, new Metric(
metricDefinitionAndTenantId.metricDefinition, time,
(double) (goodValueCount == 15 ? 1 : 555))));
} }
} }
try { try {
@ -284,11 +299,11 @@ public class ThresholdingEngineAlarmTest extends TopologyTestCase {
} }
for (int i = 0; alarmsSent != expectedAlarms && i < 60; i++) { for (int i = 0; alarmsSent != expectedAlarms && i < 60; i++) {
try { try {
Thread.sleep(1000); Thread.sleep(1000);
} catch (InterruptedException e) { } catch (InterruptedException e) {
e.printStackTrace(); e.printStackTrace();
} }
} }
assertEquals(alarmsSent, expectedAlarms); assertEquals(alarmsSent, expectedAlarms);
assertEquals(currentState, expectedStates[expectedStates.length - 1]); assertEquals(currentState, expectedStates[expectedStates.length - 1]);
@ -296,9 +311,9 @@ public class ThresholdingEngineAlarmTest extends TopologyTestCase {
private Map<String, AlarmSubExpression> createSubExpressionMap() { private Map<String, AlarmSubExpression> createSubExpressionMap() {
final Map<String, AlarmSubExpression> exprs = new HashMap<>(); final Map<String, AlarmSubExpression> exprs = new HashMap<>();
for (final SubAlarm subAlarm : subAlarms) { for (final SubAlarm subAlarm : subAlarms) {
exprs.put(subAlarm.getId(), subAlarm.getExpression()); exprs.put(subAlarm.getId(), subAlarm.getExpression());
} }
return exprs; return exprs;
} }
} }

View File

@ -14,6 +14,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package com.hpcloud.mon; package com.hpcloud.mon;
import com.hpcloud.mon.ThresholdingEngine; import com.hpcloud.mon.ThresholdingEngine;

View File

@ -14,33 +14,17 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package com.hpcloud.mon; package com.hpcloud.mon;
import static org.mockito.Matchers.any; import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyString; import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock; import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when; import static org.mockito.Mockito.when;
import static org.mockito.Mockito.doAnswer;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.testng.annotations.Test;
import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertTrue; import static org.testng.Assert.assertTrue;
import backtype.storm.Config;
import backtype.storm.testing.FeederSpout;
import backtype.storm.tuple.Fields;
import backtype.storm.tuple.Values;
import com.google.inject.AbstractModule;
import com.hpcloud.configuration.KafkaProducerConfiguration; import com.hpcloud.configuration.KafkaProducerConfiguration;
import com.hpcloud.mon.common.event.AlarmStateTransitionedEvent; import com.hpcloud.mon.common.event.AlarmStateTransitionedEvent;
import com.hpcloud.mon.common.model.alarm.AlarmExpression; import com.hpcloud.mon.common.model.alarm.AlarmExpression;
@ -61,6 +45,24 @@ import com.hpcloud.streaming.storm.TopologyTestCase;
import com.hpcloud.util.Injector; import com.hpcloud.util.Injector;
import com.hpcloud.util.Serialization; import com.hpcloud.util.Serialization;
import backtype.storm.Config;
import backtype.storm.testing.FeederSpout;
import backtype.storm.tuple.Fields;
import backtype.storm.tuple.Values;
import com.google.inject.AbstractModule;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.testng.annotations.Test;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
/** /**
* Simulates a real'ish run of the thresholding engine, using seconds instead of minutes for the * Simulates a real'ish run of the thresholding engine, using seconds instead of minutes for the
* evaluation timescale. * evaluation timescale.
@ -86,8 +88,8 @@ public class ThresholdingEngineTest extends TopologyTestCase {
public ThresholdingEngineTest() { public ThresholdingEngineTest() {
// Fixtures // Fixtures
final AlarmExpression expression = new AlarmExpression( final AlarmExpression expression =
"max(cpu{id=5}) >= 3 or max(mem{id=5}) >= 5"); new AlarmExpression("max(cpu{id=5}) >= 3 or max(mem{id=5}) >= 5");
cpuMetricDef = expression.getSubExpressions().get(0).getMetricDefinition(); cpuMetricDef = expression.getSubExpressions().get(0).getMetricDefinition();
memMetricDef = expression.getSubExpressions().get(1).getMetricDefinition(); memMetricDef = expression.getSubExpressions().get(1).getMetricDefinition();
@ -98,33 +100,38 @@ public class ThresholdingEngineTest extends TopologyTestCase {
@Override @Override
public Alarm answer(InvocationOnMock invocation) throws Throwable { public Alarm answer(InvocationOnMock invocation) throws Throwable {
return new Alarm(TEST_ALARM_ID, TEST_ALARM_TENANT_ID, TEST_ALARM_NAME, return new Alarm(TEST_ALARM_ID, TEST_ALARM_TENANT_ID, TEST_ALARM_NAME,
TEST_ALARM_DESCRIPTION, expression, subAlarmsFor(expression), AlarmState.OK, Boolean.TRUE); TEST_ALARM_DESCRIPTION, expression, subAlarmsFor(expression), AlarmState.OK,
Boolean.TRUE);
} }
}); });
subAlarmDAO = mock(SubAlarmDAO.class); subAlarmDAO = mock(SubAlarmDAO.class);
final SubAlarm cpuMetricDefSubAlarm = new SubAlarm("123", TEST_ALARM_ID, expression.getSubExpressions().get(0)); final SubAlarm cpuMetricDefSubAlarm =
final SubAlarm memMetricDefSubAlarm = new SubAlarm("456", TEST_ALARM_ID, expression.getSubExpressions().get(1)); new SubAlarm("123", TEST_ALARM_ID, expression.getSubExpressions().get(0));
when(subAlarmDAO.find(any(MetricDefinitionAndTenantId.class))).thenAnswer(new Answer<List<SubAlarm>>() { final SubAlarm memMetricDefSubAlarm =
@Override new SubAlarm("456", TEST_ALARM_ID, expression.getSubExpressions().get(1));
public List<SubAlarm> answer(InvocationOnMock invocation) throws Throwable { when(subAlarmDAO.find(any(MetricDefinitionAndTenantId.class))).thenAnswer(
MetricDefinitionAndTenantId metricDefinitionAndTenantId = (MetricDefinitionAndTenantId) invocation.getArguments()[0]; new Answer<List<SubAlarm>>() {
MetricDefinition metricDef = metricDefinitionAndTenantId.metricDefinition; @Override
if (metricDef.equals(cpuMetricDef)) { public List<SubAlarm> answer(InvocationOnMock invocation) throws Throwable {
return Arrays.asList(cpuMetricDefSubAlarm); MetricDefinitionAndTenantId metricDefinitionAndTenantId =
} else if (metricDef.equals(memMetricDef)) { (MetricDefinitionAndTenantId) invocation.getArguments()[0];
return Arrays.asList(memMetricDefSubAlarm); MetricDefinition metricDef = metricDefinitionAndTenantId.metricDefinition;
} if (metricDef.equals(cpuMetricDef)) {
return Collections.emptyList(); return Arrays.asList(cpuMetricDefSubAlarm);
} } else if (metricDef.equals(memMetricDef)) {
}); return Arrays.asList(memMetricDefSubAlarm);
}
return Collections.emptyList();
}
});
metricDefinitionDAO = mock(MetricDefinitionDAO.class); metricDefinitionDAO = mock(MetricDefinitionDAO.class);
final List<SubAlarmMetricDefinition> metricDefs = Arrays.asList( final List<SubAlarmMetricDefinition> metricDefs =
new SubAlarmMetricDefinition(cpuMetricDefSubAlarm.getId(), Arrays.asList(new SubAlarmMetricDefinition(cpuMetricDefSubAlarm.getId(),
new MetricDefinitionAndTenantId(cpuMetricDef, TEST_ALARM_TENANT_ID)), new MetricDefinitionAndTenantId(cpuMetricDef, TEST_ALARM_TENANT_ID)),
new SubAlarmMetricDefinition(memMetricDefSubAlarm.getId(), new SubAlarmMetricDefinition(memMetricDefSubAlarm.getId(),
new MetricDefinitionAndTenantId(memMetricDef, TEST_ALARM_TENANT_ID))); new MetricDefinitionAndTenantId(memMetricDef, TEST_ALARM_TENANT_ID)));
when(metricDefinitionDAO.findForAlarms()).thenReturn(metricDefs); when(metricDefinitionDAO.findForAlarms()).thenReturn(metricDefs);
// Bindings // Bindings
@ -142,14 +149,16 @@ public class ThresholdingEngineTest extends TopologyTestCase {
threshConfig.sporadicMetricNamespaces = new HashSet<String>(); threshConfig.sporadicMetricNamespaces = new HashSet<String>();
Serialization.registerTarget(KafkaProducerConfiguration.class); Serialization.registerTarget(KafkaProducerConfiguration.class);
threshConfig.kafkaProducerConfig = Serialization.fromJson("{\"KafkaProducerConfiguration\":{\"topic\":\"alarm-state-transitions\",\"metadataBrokerList\":\"192.168.10.10:9092\",\"requestRequiredAcks\":1,\"requestTimeoutMs\":10000,\"producerType\":\"sync\",\"serializerClass\":\"kafka.serializer.StringEncoder\",\"keySerializerClass\":\"\",\"partitionerClass\":\"\",\"compressionCodec\":\"none\",\"compressedTopics\":\"\",\"messageSendMaxRetries\":3,\"retryBackoffMs\":100,\"topicMetadataRefreshIntervalMs\":600000,\"queueBufferingMaxMs\":5000,\"queueBufferingMaxMessages\":10000,\"queueEnqueueTimeoutMs\":-1,\"batchNumMessages\":200,\"sendBufferBytes\":102400,\"clientId\":\"Threshold_Engine\"}}"); threshConfig.kafkaProducerConfig =
Serialization
.fromJson("{\"KafkaProducerConfiguration\":{\"topic\":\"alarm-state-transitions\",\"metadataBrokerList\":\"192.168.10.10:9092\",\"requestRequiredAcks\":1,\"requestTimeoutMs\":10000,\"producerType\":\"sync\",\"serializerClass\":\"kafka.serializer.StringEncoder\",\"keySerializerClass\":\"\",\"partitionerClass\":\"\",\"compressionCodec\":\"none\",\"compressedTopics\":\"\",\"messageSendMaxRetries\":3,\"retryBackoffMs\":100,\"topicMetadataRefreshIntervalMs\":600000,\"queueBufferingMaxMs\":5000,\"queueBufferingMaxMessages\":10000,\"queueEnqueueTimeoutMs\":-1,\"batchNumMessages\":200,\"sendBufferBytes\":102400,\"clientId\":\"Threshold_Engine\"}}");
Config stormConfig = new Config(); Config stormConfig = new Config();
stormConfig.setMaxTaskParallelism(1); stormConfig.setMaxTaskParallelism(1);
metricSpout = new FeederSpout(new Fields(MetricSpout.FIELDS)); metricSpout = new FeederSpout(new Fields(MetricSpout.FIELDS));
eventSpout = new FeederSpout(new Fields("event")); eventSpout = new FeederSpout(new Fields("event"));
alarmEventForwarder = mock(AlarmEventForwarder.class); alarmEventForwarder = mock(AlarmEventForwarder.class);
Injector.registerModules(new TopologyModule(threshConfig, stormConfig, Injector
metricSpout, eventSpout)); .registerModules(new TopologyModule(threshConfig, stormConfig, metricSpout, eventSpout));
Injector.registerModules(new ProducerModule(alarmEventForwarder)); Injector.registerModules(new ProducerModule(alarmEventForwarder));
} }
@ -161,28 +170,25 @@ public class ThresholdingEngineTest extends TopologyTestCase {
public void shouldThreshold() throws Exception { public void shouldThreshold() throws Exception {
doAnswer(new Answer<Object>() { doAnswer(new Answer<Object>() {
public Object answer(InvocationOnMock invocation) { public Object answer(InvocationOnMock invocation) {
final Object[] args = invocation.getArguments(); final Object[] args = invocation.getArguments();
AlarmStateTransitionedEvent event = Serialization.fromJson((String)args[2]); AlarmStateTransitionedEvent event = Serialization.fromJson((String) args[2]);
alarmsSent++; alarmsSent++;
System.out.printf("Alarm transitioned from %s to %s%n", event.oldState, event.newState); System.out.printf("Alarm transitioned from %s to %s%n", event.oldState, event.newState);
assertEquals(event.alarmName, TEST_ALARM_NAME); assertEquals(event.alarmName, TEST_ALARM_NAME);
assertEquals(event.alarmId, TEST_ALARM_ID); assertEquals(event.alarmId, TEST_ALARM_ID);
assertEquals(event.tenantId, TEST_ALARM_TENANT_ID); assertEquals(event.tenantId, TEST_ALARM_TENANT_ID);
assertEquals(event.oldState, previousState); assertEquals(event.oldState, previousState);
assertEquals(event.newState, expectedState); assertEquals(event.newState, expectedState);
previousState = event.newState; previousState = event.newState;
if (event.newState == AlarmState.UNDETERMINED) { if (event.newState == AlarmState.UNDETERMINED) {
expectedState = AlarmState.ALARM; expectedState = AlarmState.ALARM;
} } else if (event.newState == AlarmState.ALARM) {
else if (event.newState == AlarmState.ALARM) { expectedState = AlarmState.UNDETERMINED;
expectedState = AlarmState.UNDETERMINED; }
} return null;
return null;
}
} }
) }).when(alarmEventForwarder).send(anyString(), anyString(), anyString());
.when(alarmEventForwarder).send(anyString(), anyString(), anyString());
int waitCount = 0; int waitCount = 0;
int feedCount = 5; int feedCount = 5;
int goodValueCount = 0; int goodValueCount = 0;
@ -192,21 +198,26 @@ public class ThresholdingEngineTest extends TopologyTestCase {
if (feedCount > 0) { if (feedCount > 0) {
System.out.println("Feeding metrics..."); System.out.println("Feeding metrics...");
long time = System.currentTimeMillis()/1000; long time = System.currentTimeMillis() / 1000;
metricSpout.feed(new Values(new MetricDefinitionAndTenantId(cpuMetricDef, TEST_ALARM_TENANT_ID), time, metricSpout.feed(new Values(new MetricDefinitionAndTenantId(cpuMetricDef,
new Metric(cpuMetricDef.name, cpuMetricDef.dimensions, time, (double) (++goodValueCount == 15 ? 1 : 555)))); TEST_ALARM_TENANT_ID), time, new Metric(cpuMetricDef.name, cpuMetricDef.dimensions,
metricSpout.feed(new Values(new MetricDefinitionAndTenantId(memMetricDef, TEST_ALARM_TENANT_ID), time, time, (double) (++goodValueCount == 15 ? 1 : 555))));
new Metric(memMetricDef.name, extraMemMetricDefDimensions, time, (double) (goodValueCount == 15 ? 1 : 555)))); metricSpout.feed(new Values(new MetricDefinitionAndTenantId(memMetricDef,
TEST_ALARM_TENANT_ID), time, new Metric(memMetricDef.name, extraMemMetricDefDimensions,
time, (double) (goodValueCount == 15 ? 1 : 555))));
if (--feedCount == 0) if (--feedCount == 0) {
waitCount = 3; waitCount = 3;
}
if (goodValueCount == 15) if (goodValueCount == 15) {
goodValueCount = 0; goodValueCount = 0;
}
} else { } else {
System.out.println("Waiting..."); System.out.println("Waiting...");
if (--waitCount == 0) if (--waitCount == 0) {
feedCount = 5; feedCount = 5;
}
} }
try { try {
@ -218,11 +229,11 @@ public class ThresholdingEngineTest extends TopologyTestCase {
// Give it some extra time if it needs it for the alarm to come out // Give it some extra time if it needs it for the alarm to come out
for (int i = 0; i < 30 && alarmsSent == 0; i++) { for (int i = 0; i < 30 && alarmsSent == 0; i++) {
try { try {
Thread.sleep(1000); Thread.sleep(1000);
} catch (InterruptedException e) { } catch (InterruptedException e) {
e.printStackTrace(); e.printStackTrace();
} }
} }
assertTrue(alarmsSent > 0, "Not enough alarms"); assertTrue(alarmsSent > 0, "Not enough alarms");
} }

View File

@ -14,6 +14,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package com.hpcloud.mon; package com.hpcloud.mon;
import static org.mockito.Matchers.any; import static org.mockito.Matchers.any;
@ -21,21 +22,6 @@ import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.mock; import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when; import static org.mockito.Mockito.when;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.testng.annotations.Test;
import backtype.storm.Config;
import backtype.storm.testing.FeederSpout;
import backtype.storm.tuple.Fields;
import backtype.storm.tuple.Values;
import com.google.common.collect.ImmutableMap;
import com.google.inject.AbstractModule;
import com.hpcloud.mon.common.event.AlarmCreatedEvent; import com.hpcloud.mon.common.event.AlarmCreatedEvent;
import com.hpcloud.mon.common.event.AlarmDeletedEvent; import com.hpcloud.mon.common.event.AlarmDeletedEvent;
import com.hpcloud.mon.common.model.alarm.AlarmExpression; import com.hpcloud.mon.common.model.alarm.AlarmExpression;
@ -57,6 +43,22 @@ import com.hpcloud.mon.infrastructure.thresholding.ProducerModule;
import com.hpcloud.streaming.storm.TopologyTestCase; import com.hpcloud.streaming.storm.TopologyTestCase;
import com.hpcloud.util.Injector; import com.hpcloud.util.Injector;
import backtype.storm.Config;
import backtype.storm.testing.FeederSpout;
import backtype.storm.tuple.Fields;
import backtype.storm.tuple.Values;
import com.google.common.collect.ImmutableMap;
import com.google.inject.AbstractModule;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.testng.annotations.Test;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
/** /**
* Simulates a real'ish run of the thresholding engine, using seconds instead of minutes for the * Simulates a real'ish run of the thresholding engine, using seconds instead of minutes for the
* evaluation timescale. * evaluation timescale.
@ -80,8 +82,9 @@ public class ThresholdingEngineTest1 extends TopologyTestCase {
public ThresholdingEngineTest1() { public ThresholdingEngineTest1() {
// Fixtures // Fixtures
expression = new AlarmExpression( expression =
"avg(hpcs.compute.cpu{id=5}, 3) >= 3 times 2 and avg(hpcs.compute.mem{id=5}, 3) >= 5 times 2"); new AlarmExpression(
"avg(hpcs.compute.cpu{id=5}, 3) >= 3 times 2 and avg(hpcs.compute.mem{id=5}, 3) >= 5 times 2");
customExpression = AlarmExpression.of("avg(my.test{id=4}, 3) > 10"); customExpression = AlarmExpression.of("avg(my.test{id=4}, 3) > 10");
customSubExpression = customExpression.getSubExpressions().get(0); customSubExpression = customExpression.getSubExpressions().get(0);
@ -94,40 +97,44 @@ public class ThresholdingEngineTest1 extends TopologyTestCase {
when(alarmDAO.findById(anyString())).thenAnswer(new Answer<Alarm>() { when(alarmDAO.findById(anyString())).thenAnswer(new Answer<Alarm>() {
@Override @Override
public Alarm answer(InvocationOnMock invocation) throws Throwable { public Alarm answer(InvocationOnMock invocation) throws Throwable {
if (invocation.getArguments()[0].equals("1")) if (invocation.getArguments()[0].equals("1")) {
return new Alarm("1", BOB_TENANT_ID, "test-alarm", "Descr of test-alarm", expression, Arrays.asList(createCpuSubAlarm(), return new Alarm("1", BOB_TENANT_ID, "test-alarm", "Descr of test-alarm", expression,
createMemSubAlarm()), AlarmState.OK, Boolean.TRUE); Arrays.asList(createCpuSubAlarm(), createMemSubAlarm()), AlarmState.OK, Boolean.TRUE);
else if (invocation.getArguments()[0].equals("2")) } else if (invocation.getArguments()[0].equals("2")) {
return new Alarm("2", JOE_TENANT_ID, "joes-alarm", "Descr of joes-alarm", customExpression, return new Alarm("2", JOE_TENANT_ID, "joes-alarm", "Descr of joes-alarm",
Arrays.asList(createCustomSubAlarm()), AlarmState.OK, Boolean.TRUE); customExpression, Arrays.asList(createCustomSubAlarm()), AlarmState.OK, Boolean.TRUE);
}
return null; return null;
} }
}); });
subAlarmDAO = mock(SubAlarmDAO.class); subAlarmDAO = mock(SubAlarmDAO.class);
when(subAlarmDAO.find(any(MetricDefinitionAndTenantId.class))).thenAnswer(new Answer<List<SubAlarm>>() { when(subAlarmDAO.find(any(MetricDefinitionAndTenantId.class))).thenAnswer(
@Override new Answer<List<SubAlarm>>() {
public List<SubAlarm> answer(InvocationOnMock invocation) throws Throwable { @Override
MetricDefinitionAndTenantId metricDefinitionAndTenantId = (MetricDefinitionAndTenantId) invocation.getArguments()[0]; public List<SubAlarm> answer(InvocationOnMock invocation) throws Throwable {
MetricDefinition metricDef = metricDefinitionAndTenantId.metricDefinition; MetricDefinitionAndTenantId metricDefinitionAndTenantId =
if (metricDef.equals(cpuMetricDef)) (MetricDefinitionAndTenantId) invocation.getArguments()[0];
return Arrays.asList(createCpuSubAlarm()); MetricDefinition metricDef = metricDefinitionAndTenantId.metricDefinition;
else if (metricDef.equals(memMetricDef)) if (metricDef.equals(cpuMetricDef)) {
return Arrays.asList(createMemSubAlarm()); return Arrays.asList(createCpuSubAlarm());
else if (metricDef.equals(customMetricDef)) } else if (metricDef.equals(memMetricDef)) {
return Arrays.asList(createCustomSubAlarm()); return Arrays.asList(createMemSubAlarm());
return Collections.emptyList(); } else if (metricDef.equals(customMetricDef)) {
} return Arrays.asList(createCustomSubAlarm());
}); }
return Collections.emptyList();
}
});
metricDefinitionDAO = mock(MetricDefinitionDAO.class); metricDefinitionDAO = mock(MetricDefinitionDAO.class);
final List<SubAlarmMetricDefinition> metricDefs = Arrays.asList( final List<SubAlarmMetricDefinition> metricDefs =
new SubAlarmMetricDefinition(createCpuSubAlarm().getId(), Arrays.asList(new SubAlarmMetricDefinition(createCpuSubAlarm().getId(),
new MetricDefinitionAndTenantId(cpuMetricDef, BOB_TENANT_ID)), new MetricDefinitionAndTenantId(cpuMetricDef, BOB_TENANT_ID)),
new SubAlarmMetricDefinition(createMemSubAlarm().getId(), new SubAlarmMetricDefinition(createMemSubAlarm().getId(),
new MetricDefinitionAndTenantId(memMetricDef, BOB_TENANT_ID)), new MetricDefinitionAndTenantId(memMetricDef, BOB_TENANT_ID)),
new SubAlarmMetricDefinition(createCustomSubAlarm().getId(), new SubAlarmMetricDefinition(createCustomSubAlarm().getId(),
new MetricDefinitionAndTenantId(customMetricDef, JOE_TENANT_ID))); new MetricDefinitionAndTenantId(customMetricDef, JOE_TENANT_ID)));
when(metricDefinitionDAO.findForAlarms()).thenReturn(metricDefs); when(metricDefinitionDAO.findForAlarms()).thenReturn(metricDefs);
// Bindings // Bindings
@ -149,8 +156,8 @@ public class ThresholdingEngineTest1 extends TopologyTestCase {
eventSpout = new FeederSpout(new Fields("event")); eventSpout = new FeederSpout(new Fields("event"));
final AlarmEventForwarder alarmEventForwarder = mock(AlarmEventForwarder.class); final AlarmEventForwarder alarmEventForwarder = mock(AlarmEventForwarder.class);
Injector.registerModules(new TopologyModule(threshConfig, stormConfig, Injector
metricSpout, eventSpout)); .registerModules(new TopologyModule(threshConfig, stormConfig, metricSpout, eventSpout));
Injector.registerModules(new ProducerModule(alarmEventForwarder)); Injector.registerModules(new ProducerModule(alarmEventForwarder));
// Evaluate alarm stats every 1 seconds // Evaluate alarm stats every 1 seconds
@ -175,23 +182,27 @@ public class ThresholdingEngineTest1 extends TopologyTestCase {
while (true) { while (true) {
long time = System.currentTimeMillis(); long time = System.currentTimeMillis();
metricSpout.feed(new Values(new MetricDefinitionAndTenantId(cpuMetricDef, BOB_TENANT_ID), new Metric(cpuMetricDef.name, metricSpout.feed(new Values(new MetricDefinitionAndTenantId(cpuMetricDef, BOB_TENANT_ID),
cpuMetricDef.dimensions, time, count % 10 == 0 ? 555 : 1))); new Metric(cpuMetricDef.name, cpuMetricDef.dimensions, time, count % 10 == 0 ? 555 : 1)));
metricSpout.feed(new Values(new MetricDefinitionAndTenantId(memMetricDef, BOB_TENANT_ID), new Metric(memMetricDef.name, metricSpout.feed(new Values(new MetricDefinitionAndTenantId(memMetricDef, BOB_TENANT_ID),
cpuMetricDef.dimensions, time, count % 10 == 0 ? 555 : 1))); new Metric(memMetricDef.name, cpuMetricDef.dimensions, time, count % 10 == 0 ? 555 : 1)));
metricSpout.feed(new Values(new MetricDefinitionAndTenantId(customMetricDef, JOE_TENANT_ID), new Metric(customMetricDef.name, metricSpout
cpuMetricDef.dimensions, time, count % 20 == 0 ? 1 : 123))); .feed(new Values(new MetricDefinitionAndTenantId(customMetricDef, JOE_TENANT_ID),
new Metric(customMetricDef.name, cpuMetricDef.dimensions, time, count % 20 == 0 ? 1
: 123)));
if (count % 5 == 0) { if (count % 5 == 0) {
Object event = null; Object event = null;
if (++eventCounter % 2 == 0) if (++eventCounter % 2 == 0) {
event = new AlarmDeletedEvent(JOE_TENANT_ID, "2", event =
ImmutableMap.<String, MetricDefinition>builder().put("444", customMetricDef).build()); new AlarmDeletedEvent(JOE_TENANT_ID, "2", ImmutableMap
else .<String, MetricDefinition>builder().put("444", customMetricDef).build());
event = new AlarmCreatedEvent(JOE_TENANT_ID, "2", "foo", customSubExpression.getExpression(), } else {
ImmutableMap.<String, AlarmSubExpression>builder() event =
.put("444", customSubExpression) new AlarmCreatedEvent(JOE_TENANT_ID, "2", "foo", customSubExpression.getExpression(),
.build()); ImmutableMap.<String, AlarmSubExpression>builder()
.put("444", customSubExpression).build());
}
eventSpout.feed(new Values(event)); eventSpout.feed(new Values(event));
} }

View File

@ -14,19 +14,13 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package com.hpcloud.mon.domain.model; package com.hpcloud.mon.domain.model;
import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse; import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertTrue; import static org.testng.Assert.assertTrue;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.testng.annotations.Test;
import com.hpcloud.mon.common.model.alarm.AggregateFunction; import com.hpcloud.mon.common.model.alarm.AggregateFunction;
import com.hpcloud.mon.common.model.alarm.AlarmExpression; import com.hpcloud.mon.common.model.alarm.AlarmExpression;
import com.hpcloud.mon.common.model.alarm.AlarmOperator; import com.hpcloud.mon.common.model.alarm.AlarmOperator;
@ -34,6 +28,13 @@ import com.hpcloud.mon.common.model.alarm.AlarmState;
import com.hpcloud.mon.common.model.alarm.AlarmSubExpression; import com.hpcloud.mon.common.model.alarm.AlarmSubExpression;
import com.hpcloud.mon.common.model.metric.MetricDefinition; import com.hpcloud.mon.common.model.metric.MetricDefinition;
import org.testng.annotations.Test;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@Test @Test
public class AlarmTest { public class AlarmTest {
private static final String TEST_ALARM_ID = "1"; private static final String TEST_ALARM_ID = "1";
@ -43,25 +44,30 @@ public class AlarmTest {
private static Boolean ALARM_ENABLED = Boolean.FALSE; private static Boolean ALARM_ENABLED = Boolean.FALSE;
public void shouldBeUndeterminedIfAnySubAlarmIsUndetermined() { public void shouldBeUndeterminedIfAnySubAlarmIsUndetermined() {
AlarmExpression expr = new AlarmExpression( AlarmExpression expr =
"avg(hpcs.compute{instance_id=5,metric_name=cpu,device=1}, 1) > 5 times 3 AND avg(hpcs.compute{flavor_id=3,metric_name=mem}, 2) < 4 times 3"); new AlarmExpression(
SubAlarm subAlarm1 = new SubAlarm("123", TEST_ALARM_ID, expr.getSubExpressions().get(0), "avg(hpcs.compute{instance_id=5,metric_name=cpu,device=1}, 1) > 5 times 3 AND avg(hpcs.compute{flavor_id=3,metric_name=mem}, 2) < 4 times 3");
AlarmState.UNDETERMINED); SubAlarm subAlarm1 =
SubAlarm subAlarm2 = new SubAlarm("456", TEST_ALARM_ID, expr.getSubExpressions().get(1), AlarmState.ALARM); new SubAlarm("123", TEST_ALARM_ID, expr.getSubExpressions().get(0), AlarmState.UNDETERMINED);
Alarm alarm = new Alarm(TEST_ALARM_ID, TEST_ALARM_TENANT_ID, TEST_ALARM_NAME, TEST_ALARM_DESCRIPTION, expr, SubAlarm subAlarm2 =
Arrays.asList(subAlarm1, subAlarm2), AlarmState.UNDETERMINED, ALARM_ENABLED); new SubAlarm("456", TEST_ALARM_ID, expr.getSubExpressions().get(1), AlarmState.ALARM);
Alarm alarm =
new Alarm(TEST_ALARM_ID, TEST_ALARM_TENANT_ID, TEST_ALARM_NAME, TEST_ALARM_DESCRIPTION,
expr, Arrays.asList(subAlarm1, subAlarm2), AlarmState.UNDETERMINED, ALARM_ENABLED);
assertFalse(alarm.evaluate()); assertFalse(alarm.evaluate());
assertEquals(alarm.getState(), AlarmState.UNDETERMINED); assertEquals(alarm.getState(), AlarmState.UNDETERMINED);
} }
public void shouldEvaluateExpressionWithBooleanAnd() { public void shouldEvaluateExpressionWithBooleanAnd() {
AlarmExpression expr = new AlarmExpression( AlarmExpression expr =
"avg(hpcs.compute{instance_id=5,metric_name=cpu,device=1}, 1) > 5 times 3 AND avg(hpcs.compute{flavor_id=3,metric_name=mem}, 2) < 4 times 3"); new AlarmExpression(
"avg(hpcs.compute{instance_id=5,metric_name=cpu,device=1}, 1) > 5 times 3 AND avg(hpcs.compute{flavor_id=3,metric_name=mem}, 2) < 4 times 3");
SubAlarm subAlarm1 = new SubAlarm("123", TEST_ALARM_ID, expr.getSubExpressions().get(0)); SubAlarm subAlarm1 = new SubAlarm("123", TEST_ALARM_ID, expr.getSubExpressions().get(0));
SubAlarm subAlarm2 = new SubAlarm("456", TEST_ALARM_ID, expr.getSubExpressions().get(1)); SubAlarm subAlarm2 = new SubAlarm("456", TEST_ALARM_ID, expr.getSubExpressions().get(1));
Alarm alarm = new Alarm(TEST_ALARM_ID, TEST_ALARM_TENANT_ID, TEST_ALARM_NAME, TEST_ALARM_DESCRIPTION, Alarm alarm =
new Alarm(TEST_ALARM_ID, TEST_ALARM_TENANT_ID, TEST_ALARM_NAME, TEST_ALARM_DESCRIPTION,
expr, Arrays.asList(subAlarm1, subAlarm2), AlarmState.UNDETERMINED, ALARM_ENABLED); expr, Arrays.asList(subAlarm1, subAlarm2), AlarmState.UNDETERMINED, ALARM_ENABLED);
assertFalse(alarm.evaluate()); assertFalse(alarm.evaluate());
@ -90,12 +96,14 @@ public class AlarmTest {
} }
public void shouldEvaluateExpressionWithBooleanOr() { public void shouldEvaluateExpressionWithBooleanOr() {
AlarmExpression expr = new AlarmExpression( AlarmExpression expr =
"avg(hpcs.compute{instance_id=5,metric_name=cpu,device=1}, 1) > 5 times 3 OR avg(hpcs.compute{flavor_id=3,metric_name=mem}, 2) < 4 times 3"); new AlarmExpression(
"avg(hpcs.compute{instance_id=5,metric_name=cpu,device=1}, 1) > 5 times 3 OR avg(hpcs.compute{flavor_id=3,metric_name=mem}, 2) < 4 times 3");
SubAlarm subAlarm1 = new SubAlarm("123", TEST_ALARM_ID, expr.getSubExpressions().get(0)); SubAlarm subAlarm1 = new SubAlarm("123", TEST_ALARM_ID, expr.getSubExpressions().get(0));
SubAlarm subAlarm2 = new SubAlarm("456", TEST_ALARM_ID, expr.getSubExpressions().get(1)); SubAlarm subAlarm2 = new SubAlarm("456", TEST_ALARM_ID, expr.getSubExpressions().get(1));
Alarm alarm = new Alarm(TEST_ALARM_ID, TEST_ALARM_TENANT_ID, TEST_ALARM_NAME, TEST_ALARM_DESCRIPTION, Alarm alarm =
new Alarm(TEST_ALARM_ID, TEST_ALARM_TENANT_ID, TEST_ALARM_NAME, TEST_ALARM_DESCRIPTION,
expr, Arrays.asList(subAlarm1, subAlarm2), AlarmState.UNDETERMINED, ALARM_ENABLED); expr, Arrays.asList(subAlarm1, subAlarm2), AlarmState.UNDETERMINED, ALARM_ENABLED);
assertFalse(alarm.evaluate()); assertFalse(alarm.evaluate());
@ -131,12 +139,13 @@ public class AlarmTest {
} }
public void shouldBuiltStateChangeReason() { public void shouldBuiltStateChangeReason() {
AlarmExpression expr = new AlarmExpression( AlarmExpression expr =
"avg(hpcs.compute{instance_id=5,metric_name=cpu,device=1}, 1) > 5 times 3 OR avg(hpcs.compute{flavor_id=3,metric_name=mem}, 2) < 4 times 3"); new AlarmExpression(
"avg(hpcs.compute{instance_id=5,metric_name=cpu,device=1}, 1) > 5 times 3 OR avg(hpcs.compute{flavor_id=3,metric_name=mem}, 2) < 4 times 3");
SubAlarm subAlarm1 = new SubAlarm("123", TEST_ALARM_ID, expr.getSubExpressions().get(0)); SubAlarm subAlarm1 = new SubAlarm("123", TEST_ALARM_ID, expr.getSubExpressions().get(0));
SubAlarm subAlarm2 = new SubAlarm("456", TEST_ALARM_ID, expr.getSubExpressions().get(1)); SubAlarm subAlarm2 = new SubAlarm("456", TEST_ALARM_ID, expr.getSubExpressions().get(1));
List<String> expressions = Arrays.asList(subAlarm1.getExpression().toString(), List<String> expressions =
subAlarm2.getExpression().toString()); Arrays.asList(subAlarm1.getExpression().toString(), subAlarm2.getExpression().toString());
assertEquals( assertEquals(
Alarm.buildStateChangeReason(AlarmState.UNDETERMINED, expressions), Alarm.buildStateChangeReason(AlarmState.UNDETERMINED, expressions),
@ -149,19 +158,22 @@ public class AlarmTest {
/** /**
* This test is here because this case happened in the Threshold Engine. The AlarmExpression * This test is here because this case happened in the Threshold Engine. The AlarmExpression
* resulted in a MetricDefinition with null dimensions and SubAlarm had empty dimensions * resulted in a MetricDefinition with null dimensions and SubAlarm had empty dimensions and that
* and that didn't match causing an IllegalArgumentException. MetricDefinition.equals() has * didn't match causing an IllegalArgumentException. MetricDefinition.equals() has been changed to
* been changed to consider those two values for dimensions the same * consider those two values for dimensions the same
*/ */
public void testDimensions() { public void testDimensions() {
final AlarmExpression expression = AlarmExpression.of("max(cpu_system_perc) > 1"); final AlarmExpression expression = AlarmExpression.of("max(cpu_system_perc) > 1");
final MetricDefinition metricDefinition = new MetricDefinition("cpu_system_perc", new HashMap<String, String>()); final MetricDefinition metricDefinition =
final AlarmSubExpression ase = new AlarmSubExpression(AggregateFunction.MAX, metricDefinition, AlarmOperator.GT, 1, 60, 1); new MetricDefinition("cpu_system_perc", new HashMap<String, String>());
final AlarmSubExpression ase =
new AlarmSubExpression(AggregateFunction.MAX, metricDefinition, AlarmOperator.GT, 1, 60, 1);
final SubAlarm subAlarm = new SubAlarm("123", "456", ase); final SubAlarm subAlarm = new SubAlarm("123", "456", ase);
final Map<AlarmSubExpression, Boolean> subExpressionValues = new HashMap<AlarmSubExpression, Boolean>(); final Map<AlarmSubExpression, Boolean> subExpressionValues =
new HashMap<AlarmSubExpression, Boolean>();
subExpressionValues.put(subAlarm.getExpression(), true); subExpressionValues.put(subAlarm.getExpression(), true);
assertEquals(expression.getSubExpressions().get(0).getMetricDefinition().hashCode(), assertEquals(expression.getSubExpressions().get(0).getMetricDefinition().hashCode(),
metricDefinition.hashCode()); metricDefinition.hashCode());
// Handle ALARM state // Handle ALARM state
assertTrue(expression.evaluate(subExpressionValues)); assertTrue(expression.evaluate(subExpressionValues));

View File

@ -14,193 +14,219 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package com.hpcloud.mon.domain.model; package com.hpcloud.mon.domain.model;
import static org.testng.Assert.assertEqualsNoOrder; import static org.testng.Assert.assertEqualsNoOrder;
import static org.testng.Assert.assertTrue; import static org.testng.Assert.assertTrue;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import com.hpcloud.mon.common.model.metric.MetricDefinition; import com.hpcloud.mon.common.model.metric.MetricDefinition;
import com.hpcloud.mon.domain.model.MetricDefinitionAndTenantIdMatcher.DimensionPair; import com.hpcloud.mon.domain.model.MetricDefinitionAndTenantIdMatcher.DimensionPair;
import com.hpcloud.mon.domain.model.MetricDefinitionAndTenantIdMatcher.DimensionSet; import com.hpcloud.mon.domain.model.MetricDefinitionAndTenantIdMatcher.DimensionSet;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@Test @Test
public class MetricDefinitionAndTenantIdMatcherTest { public class MetricDefinitionAndTenantIdMatcherTest {
private static final String HOST = "host"; private static final String HOST = "host";
private static final String LOAD_BALANCER_GROUP = "loadBalancerGroup"; private static final String LOAD_BALANCER_GROUP = "loadBalancerGroup";
private static final String CPU_METRIC_NAME = "cpu"; private static final String CPU_METRIC_NAME = "cpu";
private MetricDefinitionAndTenantIdMatcher matcher; private MetricDefinitionAndTenantIdMatcher matcher;
private final String tenantId = "4242"; private final String tenantId = "4242";
private MetricDefinition metricDef; private MetricDefinition metricDef;
private Map<String, String> dimensions; private Map<String, String> dimensions;
@BeforeMethod @BeforeMethod
protected void beforeMethod() { protected void beforeMethod() {
matcher = new MetricDefinitionAndTenantIdMatcher(); matcher = new MetricDefinitionAndTenantIdMatcher();
dimensions = new HashMap<>(); dimensions = new HashMap<>();
dimensions.put(HOST, "CloudAmI"); dimensions.put(HOST, "CloudAmI");
dimensions.put(LOAD_BALANCER_GROUP, "GroupA"); dimensions.put(LOAD_BALANCER_GROUP, "GroupA");
metricDef = new MetricDefinition(CPU_METRIC_NAME, dimensions); metricDef = new MetricDefinition(CPU_METRIC_NAME, dimensions);
} }
public void shouldNotFind() { public void shouldNotFind() {
assertTrue(matcher.isEmpty()); assertTrue(matcher.isEmpty());
final MetricDefinitionAndTenantId toMatch = new MetricDefinitionAndTenantId(metricDef, tenantId); final MetricDefinitionAndTenantId toMatch =
verifyNoMatch(toMatch); new MetricDefinitionAndTenantId(metricDef, tenantId);
verifyNoMatch(toMatch);
final MetricDefinitionAndTenantId diffTenantId = new MetricDefinitionAndTenantId(metricDef, "Different"); final MetricDefinitionAndTenantId diffTenantId =
matcher.add(diffTenantId); new MetricDefinitionAndTenantId(metricDef, "Different");
verifyNoMatch(toMatch); matcher.add(diffTenantId);
verifyNoMatch(toMatch);
matcher.add(toMatch); matcher.add(toMatch);
verifyMatch(toMatch, toMatch); verifyMatch(toMatch, toMatch);
final MetricDefinitionAndTenantId noMatchOnName = new MetricDefinitionAndTenantId( final MetricDefinitionAndTenantId noMatchOnName =
new MetricDefinition("NotCpu", dimensions), tenantId); new MetricDefinitionAndTenantId(new MetricDefinition("NotCpu", dimensions), tenantId);
verifyNoMatch(noMatchOnName); verifyNoMatch(noMatchOnName);
final Map<String, String> hostDimensions = new HashMap<>(dimensions); final Map<String, String> hostDimensions = new HashMap<>(dimensions);
hostDimensions.put(HOST, "OtherHost"); hostDimensions.put(HOST, "OtherHost");
final MetricDefinitionAndTenantId noMatchOnDimensions = new MetricDefinitionAndTenantId( final MetricDefinitionAndTenantId noMatchOnDimensions =
new MetricDefinition(CPU_METRIC_NAME, hostDimensions), tenantId); new MetricDefinitionAndTenantId(new MetricDefinition(CPU_METRIC_NAME, hostDimensions),
verifyNoMatch(noMatchOnDimensions); tenantId);
verifyNoMatch(noMatchOnDimensions);
matcher.remove(toMatch); matcher.remove(toMatch);
verifyNoMatch(toMatch); verifyNoMatch(toMatch);
matcher.remove(diffTenantId); matcher.remove(diffTenantId);
assertTrue(matcher.isEmpty()); assertTrue(matcher.isEmpty());
} }
private void verifyNoMatch(final MetricDefinitionAndTenantId toMatch) { private void verifyNoMatch(final MetricDefinitionAndTenantId toMatch) {
verifyMatch(toMatch); verifyMatch(toMatch);
} }
private void verifyMatch(final MetricDefinitionAndTenantId toMatch, private void verifyMatch(final MetricDefinitionAndTenantId toMatch,
final MetricDefinitionAndTenantId ... expected) { final MetricDefinitionAndTenantId... expected) {
final List<MetricDefinitionAndTenantId> matches = matcher.match(toMatch); final List<MetricDefinitionAndTenantId> matches = matcher.match(toMatch);
assertEqualsNoOrder(matches.toArray(), expected); assertEqualsNoOrder(matches.toArray(), expected);
} }
public void shouldFind() { public void shouldFind() {
assertTrue(matcher.isEmpty()); assertTrue(matcher.isEmpty());
final MetricDefinitionAndTenantId toMatch = new MetricDefinitionAndTenantId(metricDef, tenantId); final MetricDefinitionAndTenantId toMatch =
new MetricDefinitionAndTenantId(metricDef, tenantId);
final Map<String, String> nullDimensions = new HashMap<>(dimensions); final Map<String, String> nullDimensions = new HashMap<>(dimensions);
nullDimensions.put(HOST, null); nullDimensions.put(HOST, null);
final MetricDefinitionAndTenantId nullMatch = new MetricDefinitionAndTenantId( final MetricDefinitionAndTenantId nullMatch =
new MetricDefinition(CPU_METRIC_NAME, nullDimensions), tenantId); new MetricDefinitionAndTenantId(new MetricDefinition(CPU_METRIC_NAME, nullDimensions),
matcher.add(nullMatch); tenantId);
verifyMatch(nullMatch, nullMatch); matcher.add(nullMatch);
verifyMatch(nullMatch, nullMatch);
final Map<String, String> noDimensions = new HashMap<>(); final Map<String, String> noDimensions = new HashMap<>();
final MetricDefinitionAndTenantId noMatch = new MetricDefinitionAndTenantId( final MetricDefinitionAndTenantId noMatch =
new MetricDefinition(CPU_METRIC_NAME, noDimensions), tenantId); new MetricDefinitionAndTenantId(new MetricDefinition(CPU_METRIC_NAME, noDimensions),
matcher.add(noMatch); tenantId);
verifyMatch(noMatch, noMatch); matcher.add(noMatch);
verifyMatch(noMatch, noMatch);
final Map<String, String> hostDimensions = new HashMap<>(); final Map<String, String> hostDimensions = new HashMap<>();
hostDimensions.put(HOST, dimensions.get(HOST)); hostDimensions.put(HOST, dimensions.get(HOST));
final MetricDefinitionAndTenantId hostMatch = new MetricDefinitionAndTenantId( final MetricDefinitionAndTenantId hostMatch =
new MetricDefinition(CPU_METRIC_NAME, hostDimensions), tenantId); new MetricDefinitionAndTenantId(new MetricDefinition(CPU_METRIC_NAME, hostDimensions),
matcher.add(hostMatch); tenantId);
matcher.add(hostMatch);
final Map<String, String> groupDimensions = new HashMap<>(); final Map<String, String> groupDimensions = new HashMap<>();
groupDimensions.put(LOAD_BALANCER_GROUP, dimensions.get(LOAD_BALANCER_GROUP)); groupDimensions.put(LOAD_BALANCER_GROUP, dimensions.get(LOAD_BALANCER_GROUP));
final MetricDefinitionAndTenantId groupMatch = new MetricDefinitionAndTenantId( final MetricDefinitionAndTenantId groupMatch =
new MetricDefinition(CPU_METRIC_NAME, groupDimensions), tenantId); new MetricDefinitionAndTenantId(new MetricDefinition(CPU_METRIC_NAME, groupDimensions),
matcher.add(groupMatch); tenantId);
matcher.add(groupMatch);
verifyMatch(toMatch, noMatch, hostMatch, groupMatch); verifyMatch(toMatch, noMatch, hostMatch, groupMatch);
matcher.add(toMatch); matcher.add(toMatch);
verifyMatch(toMatch, noMatch, hostMatch, groupMatch, toMatch); verifyMatch(toMatch, noMatch, hostMatch, groupMatch, toMatch);
matcher.remove(groupMatch); matcher.remove(groupMatch);
verifyMatch(toMatch, noMatch, hostMatch, toMatch); verifyMatch(toMatch, noMatch, hostMatch, toMatch);
matcher.remove(noMatch); matcher.remove(noMatch);
verifyMatch(toMatch, hostMatch, toMatch); verifyMatch(toMatch, hostMatch, toMatch);
matcher.remove(toMatch); matcher.remove(toMatch);
verifyMatch(toMatch, hostMatch); verifyMatch(toMatch, hostMatch);
// Remove it again to ensure it won't throw an exception if the MetricDefinitionAndTenantId // Remove it again to ensure it won't throw an exception if the MetricDefinitionAndTenantId
// doesn't exist // doesn't exist
matcher.remove(toMatch); matcher.remove(toMatch);
final MetricDefinitionAndTenantId loadMetric = new MetricDefinitionAndTenantId( final MetricDefinitionAndTenantId loadMetric =
new MetricDefinition("load", new HashMap<String, String>(dimensions)), tenantId); new MetricDefinitionAndTenantId(new MetricDefinition("load", new HashMap<String, String>(
matcher.add(loadMetric); dimensions)), tenantId);
matcher.add(loadMetric);
matcher.remove(hostMatch); matcher.remove(hostMatch);
verifyNoMatch(toMatch); verifyNoMatch(toMatch);
// Remove it again to ensure it won't throw an exception if the MetricDefinitionAndTenantId // Remove it again to ensure it won't throw an exception if the MetricDefinitionAndTenantId
// doesn't exist // doesn't exist
matcher.remove(hostMatch); matcher.remove(hostMatch);
matcher.remove(loadMetric); matcher.remove(loadMetric);
matcher.remove(nullMatch); matcher.remove(nullMatch);
assertTrue(matcher.isEmpty()); assertTrue(matcher.isEmpty());
verifyNoMatch(toMatch); verifyNoMatch(toMatch);
} }
public void shouldCreatePossiblePairs() { public void shouldCreatePossiblePairs() {
final Map<String, String> dimensions = new HashMap<>(); final Map<String, String> dimensions = new HashMap<>();
DimensionSet[] actual = matcher.createPossibleDimensionPairs(new MetricDefinition(CPU_METRIC_NAME, dimensions)); DimensionSet[] actual =
DimensionSet[] expected = { new DimensionSet() }; matcher.createPossibleDimensionPairs(new MetricDefinition(CPU_METRIC_NAME, dimensions));
assertEqualsNoOrder(actual, expected); DimensionSet[] expected = {new DimensionSet()};
assertEqualsNoOrder(actual, expected);
dimensions.put("1", "a"); dimensions.put("1", "a");
actual = matcher.createPossibleDimensionPairs(new MetricDefinition(CPU_METRIC_NAME, dimensions)); actual =
expected = new DimensionSet[] { new DimensionSet(), new DimensionSet(new DimensionPair("1", "a")) }; matcher.createPossibleDimensionPairs(new MetricDefinition(CPU_METRIC_NAME, dimensions));
assertEqualsNoOrder(actual, expected); expected =
new DimensionSet[] {new DimensionSet(), new DimensionSet(new DimensionPair("1", "a"))};
assertEqualsNoOrder(actual, expected);
dimensions.put("2", "b"); dimensions.put("2", "b");
actual = matcher.createPossibleDimensionPairs(new MetricDefinition(CPU_METRIC_NAME, dimensions)); actual =
expected = new DimensionSet[] { new DimensionSet(), new DimensionSet(new DimensionPair("1", "a")), matcher.createPossibleDimensionPairs(new MetricDefinition(CPU_METRIC_NAME, dimensions));
new DimensionSet(new DimensionPair("2", "b")), expected =
new DimensionSet(new DimensionPair("1", "a"), new DimensionPair("2", "b")) }; new DimensionSet[] {new DimensionSet(), new DimensionSet(new DimensionPair("1", "a")),
assertEqualsNoOrder(actual, expected); new DimensionSet(new DimensionPair("2", "b")),
new DimensionSet(new DimensionPair("1", "a"), new DimensionPair("2", "b"))};
assertEqualsNoOrder(actual, expected);
dimensions.put("3", "c"); dimensions.put("3", "c");
actual = matcher.createPossibleDimensionPairs(new MetricDefinition(CPU_METRIC_NAME, dimensions)); actual =
expected = new DimensionSet[] { new DimensionSet(), matcher.createPossibleDimensionPairs(new MetricDefinition(CPU_METRIC_NAME, dimensions));
new DimensionSet(new DimensionPair("1", "a")), expected =
new DimensionSet(new DimensionPair("2", "b")), new DimensionSet[] {
new DimensionSet(new DimensionPair("3", "c")), new DimensionSet(),
new DimensionSet(new DimensionPair("1", "a"), new DimensionPair("2", "b")), new DimensionSet(new DimensionPair("1", "a")),
new DimensionSet(new DimensionPair("1", "a"), new DimensionPair("3", "c")), new DimensionSet(new DimensionPair("2", "b")),
new DimensionSet(new DimensionPair("2", "b"), new DimensionPair("3", "c")), new DimensionSet(new DimensionPair("3", "c")),
new DimensionSet(new DimensionPair("1", "a"), new DimensionPair("2", "b"), new DimensionPair("3", "c")) new DimensionSet(new DimensionPair("1", "a"), new DimensionPair("2", "b")),
}; new DimensionSet(new DimensionPair("1", "a"), new DimensionPair("3", "c")),
new DimensionSet(new DimensionPair("2", "b"), new DimensionPair("3", "c")),
new DimensionSet(new DimensionPair("1", "a"), new DimensionPair("2", "b"),
new DimensionPair("3", "c"))};
dimensions.put("4", "d"); dimensions.put("4", "d");
actual = matcher.createPossibleDimensionPairs(new MetricDefinition(CPU_METRIC_NAME, dimensions)); actual =
expected = new DimensionSet[] { new DimensionSet(), matcher.createPossibleDimensionPairs(new MetricDefinition(CPU_METRIC_NAME, dimensions));
new DimensionSet(new DimensionPair("1", "a")), expected =
new DimensionSet(new DimensionPair("2", "b")), new DimensionSet[] {
new DimensionSet(new DimensionPair("3", "c")), new DimensionSet(),
new DimensionSet(new DimensionPair("4", "d")), new DimensionSet(new DimensionPair("1", "a")),
new DimensionSet(new DimensionPair("1", "a"), new DimensionPair("2", "b")), new DimensionSet(new DimensionPair("2", "b")),
new DimensionSet(new DimensionPair("1", "a"), new DimensionPair("3", "c")), new DimensionSet(new DimensionPair("3", "c")),
new DimensionSet(new DimensionPair("1", "a"), new DimensionPair("4", "d")), new DimensionSet(new DimensionPair("4", "d")),
new DimensionSet(new DimensionPair("2", "b"), new DimensionPair("3", "c")), new DimensionSet(new DimensionPair("1", "a"), new DimensionPair("2", "b")),
new DimensionSet(new DimensionPair("2", "b"), new DimensionPair("4", "d")), new DimensionSet(new DimensionPair("1", "a"), new DimensionPair("3", "c")),
new DimensionSet(new DimensionPair("3", "c"), new DimensionPair("4", "d")), new DimensionSet(new DimensionPair("1", "a"), new DimensionPair("4", "d")),
new DimensionSet(new DimensionPair("1", "a"), new DimensionPair("2", "b"), new DimensionPair("3", "c")), new DimensionSet(new DimensionPair("2", "b"), new DimensionPair("3", "c")),
new DimensionSet(new DimensionPair("1", "a"), new DimensionPair("2", "b"), new DimensionPair("4", "d")), new DimensionSet(new DimensionPair("2", "b"), new DimensionPair("4", "d")),
new DimensionSet(new DimensionPair("1", "a"), new DimensionPair("3", "c"), new DimensionPair("4", "d")), new DimensionSet(new DimensionPair("3", "c"), new DimensionPair("4", "d")),
new DimensionSet(new DimensionPair("2", "b"), new DimensionPair("3", "c"), new DimensionPair("4", "d")), new DimensionSet(new DimensionPair("1", "a"), new DimensionPair("2", "b"),
new DimensionSet(new DimensionPair("1", "a"), new DimensionPair("2", "b"), new DimensionPair("3", "c"), new DimensionPair("4", "d")) new DimensionPair("3", "c")),
}; new DimensionSet(new DimensionPair("1", "a"), new DimensionPair("2", "b"),
assertEqualsNoOrder(actual, expected); new DimensionPair("4", "d")),
} new DimensionSet(new DimensionPair("1", "a"), new DimensionPair("3", "c"),
new DimensionPair("4", "d")),
new DimensionSet(new DimensionPair("2", "b"), new DimensionPair("3", "c"),
new DimensionPair("4", "d")),
new DimensionSet(new DimensionPair("1", "a"), new DimensionPair("2", "b"),
new DimensionPair("3", "c"), new DimensionPair("4", "d"))};
assertEqualsNoOrder(actual, expected);
}
} }

View File

@ -14,18 +14,19 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package com.hpcloud.mon.domain.model; package com.hpcloud.mon.domain.model;
import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse; import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertTrue; import static org.testng.Assert.assertTrue;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import com.hpcloud.mon.common.model.alarm.AlarmState; import com.hpcloud.mon.common.model.alarm.AlarmState;
import com.hpcloud.mon.common.model.alarm.AlarmSubExpression; import com.hpcloud.mon.common.model.alarm.AlarmSubExpression;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
@Test @Test
public class SubAlarmStatsTest { public class SubAlarmStatsTest {
private AlarmSubExpression expression; private AlarmSubExpression expression;
@ -43,7 +44,7 @@ public class SubAlarmStatsTest {
public void shouldBeOkIfAnySlotsInViewAreBelowThreshold() { public void shouldBeOkIfAnySlotsInViewAreBelowThreshold() {
subAlarmStats.getStats().addValue(5, 1); subAlarmStats.getStats().addValue(5, 1);
assertFalse(subAlarmStats.evaluateAndSlideWindow(61)); assertFalse(subAlarmStats.evaluateAndSlideWindow(61));
assertEquals(subAlarmStats.getSubAlarm().getState(), AlarmState.UNDETERMINED); assertEquals(subAlarmStats.getSubAlarm().getState(), AlarmState.UNDETERMINED);
subAlarmStats.getStats().addValue(1, 62); subAlarmStats.getStats().addValue(1, 62);
assertTrue(subAlarmStats.evaluateAndSlideWindow(121)); assertTrue(subAlarmStats.evaluateAndSlideWindow(121));
@ -106,8 +107,9 @@ public class SubAlarmStatsTest {
// equivalent to the behavior in CloudWatch for an alarm with 3 evaluation periods. 2 more // equivalent to the behavior in CloudWatch for an alarm with 3 evaluation periods. 2 more
// slides to move the value outside of the window and 6 more to exceed the observation // slides to move the value outside of the window and 6 more to exceed the observation
// threshold. // threshold.
for (int i = 0; i < 7; i++) for (int i = 0; i < 7; i++) {
assertFalse(subAlarmStats.evaluateAndSlideWindow(initialTime += 60)); assertFalse(subAlarmStats.evaluateAndSlideWindow(initialTime += 60));
}
assertTrue(subAlarmStats.evaluateAndSlideWindow(initialTime += 60)); assertTrue(subAlarmStats.evaluateAndSlideWindow(initialTime += 60));
assertEquals(subAlarmStats.getSubAlarm().getState(), AlarmState.UNDETERMINED); assertEquals(subAlarmStats.getSubAlarm().getState(), AlarmState.UNDETERMINED);
subAlarmStats.getStats().addValue(5, initialTime - 1); subAlarmStats.getStats().addValue(5, initialTime - 1);
@ -139,10 +141,11 @@ public class SubAlarmStatsTest {
} }
public void checkLongPeriod() { public void checkLongPeriod() {
final AlarmSubExpression subExpr = AlarmSubExpression.of("sum(hpcs.compute.mem{id=5}, 120) >= 96"); final AlarmSubExpression subExpr =
AlarmSubExpression.of("sum(hpcs.compute.mem{id=5}, 120) >= 96");
final SubAlarm subAlarm = new SubAlarm("42", "4242", subExpr); final SubAlarm subAlarm = new SubAlarm("42", "4242", subExpr);
long t1 = 0; long t1 = 0;
final SubAlarmStats stats = new SubAlarmStats(subAlarm, t1 + subExpr.getPeriod()); final SubAlarmStats stats = new SubAlarmStats(subAlarm, t1 + subExpr.getPeriod());
for (int i = 0; i < 360; i++) { for (int i = 0; i < 360; i++) {
@ -150,13 +153,14 @@ public class SubAlarmStatsTest {
stats.getStats().addValue(1.0, t1); stats.getStats().addValue(1.0, t1);
if ((t1 % 60) == 0) { if ((t1 % 60) == 0) {
stats.evaluateAndSlideWindow(t1); stats.evaluateAndSlideWindow(t1);
if (i <= 60) if (i <= 60) {
// First check will show it is OK. You could argue that this is incorrect // First check will show it is OK. You could argue that this is incorrect
// as we have not waited for the whole period so we can't really evaluate it. // as we have not waited for the whole period so we can't really evaluate it.
// That is true for sum and count // That is true for sum and count
assertEquals(stats.getSubAlarm().getState(), AlarmState.OK); assertEquals(stats.getSubAlarm().getState(), AlarmState.OK);
else } else {
assertEquals(stats.getSubAlarm().getState(), AlarmState.ALARM); assertEquals(stats.getSubAlarm().getState(), AlarmState.ALARM);
}
} }
} }
} }

View File

@ -14,12 +14,19 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package com.hpcloud.mon.infrastructure.persistence; package com.hpcloud.mon.infrastructure.persistence;
import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertEquals;
import java.nio.charset.Charset; import com.hpcloud.mon.common.model.alarm.AlarmExpression;
import java.util.Arrays; import com.hpcloud.mon.common.model.alarm.AlarmState;
import com.hpcloud.mon.common.model.alarm.AlarmSubExpression;
import com.hpcloud.mon.domain.model.Alarm;
import com.hpcloud.mon.domain.model.SubAlarm;
import com.hpcloud.mon.domain.service.AlarmDAO;
import com.google.common.io.Resources;
import org.skife.jdbi.v2.DBI; import org.skife.jdbi.v2.DBI;
import org.skife.jdbi.v2.Handle; import org.skife.jdbi.v2.Handle;
@ -28,13 +35,8 @@ import org.testng.annotations.BeforeClass;
import org.testng.annotations.BeforeMethod; import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test; import org.testng.annotations.Test;
import com.google.common.io.Resources; import java.nio.charset.Charset;
import com.hpcloud.mon.common.model.alarm.AlarmExpression; import java.util.Arrays;
import com.hpcloud.mon.common.model.alarm.AlarmState;
import com.hpcloud.mon.common.model.alarm.AlarmSubExpression;
import com.hpcloud.mon.domain.model.Alarm;
import com.hpcloud.mon.domain.model.SubAlarm;
import com.hpcloud.mon.domain.service.AlarmDAO;
@Test @Test
public class AlarmDAOImplTest { public class AlarmDAOImplTest {
@ -52,7 +54,8 @@ public class AlarmDAOImplTest {
protected void setupClass() throws Exception { protected void setupClass() throws Exception {
db = new DBI("jdbc:h2:mem:test;MODE=MySQL"); db = new DBI("jdbc:h2:mem:test;MODE=MySQL");
handle = db.open(); handle = db.open();
handle.execute(Resources.toString(getClass().getResource("alarm.sql"), Charset.defaultCharset())); handle
.execute(Resources.toString(getClass().getResource("alarm.sql"), Charset.defaultCharset()));
dao = new AlarmDAOImpl(db); dao = new AlarmDAOImpl(db);
} }
@ -68,12 +71,16 @@ public class AlarmDAOImplTest {
handle.execute("truncate table sub_alarm_dimension"); handle.execute("truncate table sub_alarm_dimension");
handle.execute("truncate table alarm_action"); handle.execute("truncate table alarm_action");
String sql = String.format("insert into alarm (id, tenant_id, name, description, expression, state, actions_enabled, created_at, updated_at) " String sql =
+ "values ('%s', '%s', '%s', '%s', 'avg(hpcs.compute{disk=vda, instance_id=123, metric_name=cpu}) > 10', 'UNDETERMINED', %d, NOW(), NOW())", String
ALARM_ID, TENANT_ID, ALARM_NAME, ALARM_DESCR, ALARM_ENABLED ? 1 : 0); .format(
"insert into alarm (id, tenant_id, name, description, expression, state, actions_enabled, created_at, updated_at) "
+ "values ('%s', '%s', '%s', '%s', 'avg(hpcs.compute{disk=vda, instance_id=123, metric_name=cpu}) > 10', 'UNDETERMINED', %d, NOW(), NOW())",
ALARM_ID, TENANT_ID, ALARM_NAME, ALARM_DESCR, ALARM_ENABLED ? 1 : 0);
handle.execute(sql); handle.execute(sql);
handle.execute("insert into sub_alarm (id, alarm_id, function, metric_name, operator, threshold, period, periods, created_at, updated_at) " handle
+ "values ('111', '123', 'AVG', 'hpcs.compute', 'GT', 10, 60, 1, NOW(), NOW())"); .execute("insert into sub_alarm (id, alarm_id, function, metric_name, operator, threshold, period, periods, created_at, updated_at) "
+ "values ('111', '123', 'AVG', 'hpcs.compute', 'GT', 10, 60, 1, NOW(), NOW())");
handle.execute("insert into sub_alarm_dimension values ('111', 'instance_id', '123')"); handle.execute("insert into sub_alarm_dimension values ('111', 'instance_id', '123')");
handle.execute("insert into sub_alarm_dimension values ('111', 'disk', 'vda')"); handle.execute("insert into sub_alarm_dimension values ('111', 'disk', 'vda')");
handle.execute("insert into sub_alarm_dimension values ('111', 'metric_name', 'cpu')"); handle.execute("insert into sub_alarm_dimension values ('111', 'metric_name', 'cpu')");
@ -83,9 +90,10 @@ public class AlarmDAOImplTest {
public void shouldFindById() { public void shouldFindById() {
String expr = "avg(hpcs.compute{disk=vda, instance_id=123, metric_name=cpu}) > 10"; String expr = "avg(hpcs.compute{disk=vda, instance_id=123, metric_name=cpu}) > 10";
Alarm expected = new Alarm(ALARM_ID, TENANT_ID, ALARM_NAME, ALARM_DESCR, AlarmExpression.of(expr), Alarm expected =
Arrays.asList(new SubAlarm("111", ALARM_ID, AlarmSubExpression.of(expr))), new Alarm(ALARM_ID, TENANT_ID, ALARM_NAME, ALARM_DESCR, AlarmExpression.of(expr),
AlarmState.UNDETERMINED, Boolean.TRUE); Arrays.asList(new SubAlarm("111", ALARM_ID, AlarmSubExpression.of(expr))),
AlarmState.UNDETERMINED, Boolean.TRUE);
Alarm alarm = dao.findById(ALARM_ID); Alarm alarm = dao.findById(ALARM_ID);

View File

@ -14,6 +14,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package com.hpcloud.mon.infrastructure.persistence; package com.hpcloud.mon.infrastructure.persistence;
import static org.testng.Assert.assertTrue; import static org.testng.Assert.assertTrue;
@ -36,8 +37,8 @@ import com.hpcloud.mon.domain.service.MetricDefinitionDAO;
import com.hpcloud.mon.domain.service.SubAlarmMetricDefinition; import com.hpcloud.mon.domain.service.SubAlarmMetricDefinition;
/** /**
* Note: MySQL dependent test because of the group_concat() used in the SQL in MetricDefinitionDAOImpl. * Note: MySQL dependent test because of the group_concat() used in the SQL in
* Depends on the MySQL in mini-mon. * MetricDefinitionDAOImpl. Depends on the MySQL in mini-mon.
*/ */
@Test(groups = "database") @Test(groups = "database")
public class MetricDefinitionDAOImplTest { public class MetricDefinitionDAOImplTest {
@ -64,38 +65,39 @@ public class MetricDefinitionDAOImplTest {
protected void beforeMethod() { protected void beforeMethod() {
cleanUp(); cleanUp();
handle.execute("insert into alarm (id, tenant_id, name, description, expression, state, created_at, updated_at) " handle
+ "values ('123', '" + TENANT_ID + "', 'Test Alarm', 'Test Alarm Description', 'Not real expr', 'OK', NOW(), NOW())"); .execute("insert into alarm (id, tenant_id, name, description, expression, state, created_at, updated_at) "
+ "values ('123', '"
+ TENANT_ID
+ "', 'Test Alarm', 'Test Alarm Description', 'Not real expr', 'OK', NOW(), NOW())");
handle.execute("insert into sub_alarm (id, alarm_id, function, metric_name, operator, threshold, period, periods, state, created_at, updated_at) " handle
+ "values ('111', '123', 'AVG', 'cpu', 'GT', 10, 60, 1, 'OK', NOW(), NOW())"); .execute("insert into sub_alarm (id, alarm_id, function, metric_name, operator, threshold, period, periods, state, created_at, updated_at) "
+ "values ('111', '123', 'AVG', 'cpu', 'GT', 10, 60, 1, 'OK', NOW(), NOW())");
handle.execute("insert into sub_alarm_dimension values ('111', 'device', '1')"); handle.execute("insert into sub_alarm_dimension values ('111', 'device', '1')");
handle.execute("insert into sub_alarm_dimension values ('111', 'instance_id', '777')"); handle.execute("insert into sub_alarm_dimension values ('111', 'instance_id', '777')");
handle.execute("insert into sub_alarm_dimension values ('111', 'image_id', '888')"); handle.execute("insert into sub_alarm_dimension values ('111', 'image_id', '888')");
handle.execute("insert into sub_alarm (id, alarm_id, function, metric_name, operator, threshold, period, periods, state, created_at, updated_at) " handle
+ "values ('222', '123', 'AVG', 'mem', 'GT', 10, 60, 1, 'OK', NOW(), NOW())"); .execute("insert into sub_alarm (id, alarm_id, function, metric_name, operator, threshold, period, periods, state, created_at, updated_at) "
+ "values ('222', '123', 'AVG', 'mem', 'GT', 10, 60, 1, 'OK', NOW(), NOW())");
handle.execute("insert into sub_alarm_dimension values ('222', 'instance_id', '123')"); handle.execute("insert into sub_alarm_dimension values ('222', 'instance_id', '123')");
handle.execute("insert into sub_alarm_dimension values ('222', 'az', '2')"); handle.execute("insert into sub_alarm_dimension values ('222', 'az', '2')");
handle.execute("insert into sub_alarm (id, alarm_id, function, metric_name, operator, threshold, period, periods, state, created_at, updated_at) " handle
+ "values ('333', '123', 'AVG', 'bar', 'GT', 10, 60, 1, 'OK', NOW(), NOW())"); .execute("insert into sub_alarm (id, alarm_id, function, metric_name, operator, threshold, period, periods, state, created_at, updated_at) "
SubAlarmMetricDefinition metricDef1 = new SubAlarmMetricDefinition("111", + "values ('333', '123', 'AVG', 'bar', 'GT', 10, 60, 1, 'OK', NOW(), NOW())");
new MetricDefinitionAndTenantId(new MetricDefinition("cpu", SubAlarmMetricDefinition metricDef1 =
ImmutableMap.<String, String>builder() new SubAlarmMetricDefinition("111", new MetricDefinitionAndTenantId(new MetricDefinition(
.put("device", "1") "cpu", ImmutableMap.<String, String>builder().put("device", "1")
.put("instance_id", "777") .put("instance_id", "777").put("image_id", "888").build()), TENANT_ID));
.put("image_id", "888") SubAlarmMetricDefinition metricDef2 =
.build()), TENANT_ID)); new SubAlarmMetricDefinition("222", new MetricDefinitionAndTenantId(new MetricDefinition(
SubAlarmMetricDefinition metricDef2 = new SubAlarmMetricDefinition("222", "mem", ImmutableMap.<String, String>builder().put("az", "2").put("instance_id", "123")
new MetricDefinitionAndTenantId(new MetricDefinition("mem", .build()), TENANT_ID));
ImmutableMap.<String, String>builder() SubAlarmMetricDefinition metricDef3 =
.put("az", "2") new SubAlarmMetricDefinition("333", new MetricDefinitionAndTenantId(new MetricDefinition(
.put("instance_id", "123") "bar", null), TENANT_ID));
.build()), TENANT_ID));
SubAlarmMetricDefinition metricDef3 = new SubAlarmMetricDefinition("333",
new MetricDefinitionAndTenantId(new MetricDefinition("bar",
null), TENANT_ID));
expected = Arrays.asList(metricDef1, metricDef2, metricDef3); expected = Arrays.asList(metricDef1, metricDef2, metricDef3);
} }
@ -109,15 +111,17 @@ public class MetricDefinitionDAOImplTest {
List<SubAlarmMetricDefinition> found = dao.findForAlarms(); List<SubAlarmMetricDefinition> found = dao.findForAlarms();
for (final SubAlarmMetricDefinition toFind : expected) for (final SubAlarmMetricDefinition toFind : expected) {
assertTrue(found.contains(toFind), "Did not find " + toFind); assertTrue(found.contains(toFind), "Did not find " + toFind);
}
} }
public void shouldNotFindDeletedAlarms() { public void shouldNotFindDeletedAlarms() {
handle.execute("update alarm set deleted_at=NOW() where id in ('123')"); handle.execute("update alarm set deleted_at=NOW() where id in ('123')");
List<SubAlarmMetricDefinition> found = dao.findForAlarms(); List<SubAlarmMetricDefinition> found = dao.findForAlarms();
for (final SubAlarmMetricDefinition toFind : expected) for (final SubAlarmMetricDefinition toFind : expected) {
assertFalse(found.contains(toFind), "Should not have found " + toFind); assertFalse(found.contains(toFind), "Should not have found " + toFind);
}
} }
} }

View File

@ -14,13 +14,19 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package com.hpcloud.mon.infrastructure.persistence; package com.hpcloud.mon.infrastructure.persistence;
import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertEquals;
import java.nio.charset.Charset; import com.hpcloud.mon.common.model.alarm.AlarmState;
import java.util.Arrays; import com.hpcloud.mon.common.model.alarm.AlarmSubExpression;
import java.util.List; import com.hpcloud.mon.common.model.metric.MetricDefinition;
import com.hpcloud.mon.domain.model.MetricDefinitionAndTenantId;
import com.hpcloud.mon.domain.model.SubAlarm;
import com.hpcloud.mon.domain.service.SubAlarmDAO;
import com.google.common.io.Resources;
import org.skife.jdbi.v2.DBI; import org.skife.jdbi.v2.DBI;
import org.skife.jdbi.v2.Handle; import org.skife.jdbi.v2.Handle;
@ -29,13 +35,9 @@ import org.testng.annotations.BeforeClass;
import org.testng.annotations.BeforeMethod; import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test; import org.testng.annotations.Test;
import com.google.common.io.Resources; import java.nio.charset.Charset;
import com.hpcloud.mon.common.model.alarm.AlarmState; import java.util.Arrays;
import com.hpcloud.mon.common.model.alarm.AlarmSubExpression; import java.util.List;
import com.hpcloud.mon.common.model.metric.MetricDefinition;
import com.hpcloud.mon.domain.model.MetricDefinitionAndTenantId;
import com.hpcloud.mon.domain.model.SubAlarm;
import com.hpcloud.mon.domain.service.SubAlarmDAO;
@Test @Test
public class SubAlarmDAOImplTest { public class SubAlarmDAOImplTest {
@ -48,7 +50,8 @@ public class SubAlarmDAOImplTest {
protected void setupClass() throws Exception { protected void setupClass() throws Exception {
db = new DBI("jdbc:h2:mem:test;MODE=MySQL"); db = new DBI("jdbc:h2:mem:test;MODE=MySQL");
handle = db.open(); handle = db.open();
handle.execute(Resources.toString(getClass().getResource("alarm.sql"), Charset.defaultCharset())); handle
.execute(Resources.toString(getClass().getResource("alarm.sql"), Charset.defaultCharset()));
dao = new SubAlarmDAOImpl(db); dao = new SubAlarmDAOImpl(db);
} }
@ -64,75 +67,101 @@ public class SubAlarmDAOImplTest {
handle.execute("truncate table sub_alarm_dimension"); handle.execute("truncate table sub_alarm_dimension");
// These don't have the real Alarm expression because it doesn't matter for this test // These don't have the real Alarm expression because it doesn't matter for this test
handle.execute("insert into alarm (id, tenant_id, name, description, expression, state, created_at, updated_at) " handle
+ "values ('123', '" + TENANT_ID + "', 'Test Alarm', 'Test Alarm Description', 'Not real expr', 'OK', NOW(), NOW())"); .execute("insert into alarm (id, tenant_id, name, description, expression, state, created_at, updated_at) "
handle.execute("insert into alarm (id, tenant_id, name, description, expression, state, created_at, updated_at) " + "values ('123', '"
+ "values ('234', '" + TENANT_ID + "', 'Test Alarm2', 'Test Alarm2 Description', 'Not real expr', 'OK', NOW(), NOW())"); + TENANT_ID
handle.execute("insert into alarm (id, tenant_id, name, description, expression, state, created_at, updated_at) " + "', 'Test Alarm', 'Test Alarm Description', 'Not real expr', 'OK', NOW(), NOW())");
+ "values ('345', '" + TENANT_ID + "', 'Test Alarm3', 'Test Alarm3 Description', 'Not real expr', 'OK', NOW(), NOW())"); handle
handle.execute("insert into alarm (id, tenant_id, name, description, expression, state, created_at, updated_at) " .execute("insert into alarm (id, tenant_id, name, description, expression, state, created_at, updated_at) "
+ "values ('456', '" + TENANT_ID + "', 'Test Alarm4', 'Test Alarm4 Description', 'Not real expr', 'OK', NOW(), NOW())"); + "values ('234', '"
+ TENANT_ID
+ "', 'Test Alarm2', 'Test Alarm2 Description', 'Not real expr', 'OK', NOW(), NOW())");
handle
.execute("insert into alarm (id, tenant_id, name, description, expression, state, created_at, updated_at) "
+ "values ('345', '"
+ TENANT_ID
+ "', 'Test Alarm3', 'Test Alarm3 Description', 'Not real expr', 'OK', NOW(), NOW())");
handle
.execute("insert into alarm (id, tenant_id, name, description, expression, state, created_at, updated_at) "
+ "values ('456', '"
+ TENANT_ID
+ "', 'Test Alarm4', 'Test Alarm4 Description', 'Not real expr', 'OK', NOW(), NOW())");
handle.execute("insert into sub_alarm (id, alarm_id, function, metric_name, operator, threshold, period, periods, created_at, updated_at) " handle
+ "values ('111', '123', 'AVG', 'cpu', 'GT', 10, 60, 1, NOW(), NOW())"); .execute("insert into sub_alarm (id, alarm_id, function, metric_name, operator, threshold, period, periods, created_at, updated_at) "
+ "values ('111', '123', 'AVG', 'cpu', 'GT', 10, 60, 1, NOW(), NOW())");
handle.execute("insert into sub_alarm_dimension values ('111', 'instance_id', '555')"); handle.execute("insert into sub_alarm_dimension values ('111', 'instance_id', '555')");
handle.execute("insert into sub_alarm_dimension values ('111', 'az', '1')"); handle.execute("insert into sub_alarm_dimension values ('111', 'az', '1')");
handle.execute("insert into sub_alarm_dimension values ('111', 'instance_uuid', '555')"); handle.execute("insert into sub_alarm_dimension values ('111', 'instance_uuid', '555')");
handle.execute("insert into sub_alarm (id, alarm_id, function, metric_name, operator, threshold, period, periods, created_at, updated_at) " handle
+ "values ('222', '234', 'AVG', 'cpu', 'GT', 10, 60, 1, NOW(), NOW())"); .execute("insert into sub_alarm (id, alarm_id, function, metric_name, operator, threshold, period, periods, created_at, updated_at) "
+ "values ('222', '234', 'AVG', 'cpu', 'GT', 10, 60, 1, NOW(), NOW())");
handle.execute("insert into sub_alarm_dimension values ('222', 'instance_id', '666')"); handle.execute("insert into sub_alarm_dimension values ('222', 'instance_id', '666')");
handle.execute("insert into sub_alarm_dimension values ('222', 'az', '1')"); handle.execute("insert into sub_alarm_dimension values ('222', 'az', '1')");
handle.execute("insert into sub_alarm_dimension values ('222', 'instance_uuid', '666')"); handle.execute("insert into sub_alarm_dimension values ('222', 'instance_uuid', '666')");
handle.execute("insert into sub_alarm (id, alarm_id, function, metric_name, operator, threshold, period, periods, created_at, updated_at) " handle
+ "values ('333', '345', 'AVG', 'disk', 'GT', 10, 60, 1, NOW(), NOW())"); .execute("insert into sub_alarm (id, alarm_id, function, metric_name, operator, threshold, period, periods, created_at, updated_at) "
+ "values ('333', '345', 'AVG', 'disk', 'GT', 10, 60, 1, NOW(), NOW())");
handle.execute("insert into sub_alarm_dimension values ('333', 'instance_id', '777')"); handle.execute("insert into sub_alarm_dimension values ('333', 'instance_id', '777')");
handle.execute("insert into sub_alarm_dimension values ('333', 'az', '1')"); handle.execute("insert into sub_alarm_dimension values ('333', 'az', '1')");
handle.execute("insert into sub_alarm_dimension values ('333', 'instance_uuid', '777')"); handle.execute("insert into sub_alarm_dimension values ('333', 'instance_uuid', '777')");
handle.execute("insert into sub_alarm_dimension values ('333', 'device', 'vda')"); handle.execute("insert into sub_alarm_dimension values ('333', 'device', 'vda')");
handle.execute("insert into sub_alarm (id, alarm_id, function, metric_name, operator, threshold, period, periods, created_at, updated_at) " handle
+ "values ('444', '456', 'AVG', 'cpu', 'GT', 10, 60, 1, NOW(), NOW())"); .execute("insert into sub_alarm (id, alarm_id, function, metric_name, operator, threshold, period, periods, created_at, updated_at) "
+ "values ('444', '456', 'AVG', 'cpu', 'GT', 10, 60, 1, NOW(), NOW())");
} }
public void shouldFind() { public void shouldFind() {
List<SubAlarm> expected = Arrays.asList(new SubAlarm("111", "123", List<SubAlarm> expected =
AlarmSubExpression.of("avg(cpu{instance_id=555,az=1}) > 10"), Arrays.asList(new SubAlarm("111", "123", AlarmSubExpression
AlarmState.UNDETERMINED)); .of("avg(cpu{instance_id=555,az=1}) > 10"), AlarmState.UNDETERMINED));
List<SubAlarm> subAlarms = dao.find(new MetricDefinitionAndTenantId(expected.get(0).getExpression().getMetricDefinition(), TENANT_ID)); List<SubAlarm> subAlarms =
dao.find(new MetricDefinitionAndTenantId(expected.get(0).getExpression()
.getMetricDefinition(), TENANT_ID));
assertEquals(subAlarms, expected); assertEquals(subAlarms, expected);
expected = Arrays.asList(new SubAlarm("222", "234", expected =
AlarmSubExpression.of("avg(cpu{instance_id=666,az=1}) > 10"), Arrays.asList(new SubAlarm("222", "234", AlarmSubExpression
AlarmState.UNDETERMINED)); .of("avg(cpu{instance_id=666,az=1}) > 10"), AlarmState.UNDETERMINED));
subAlarms = dao.find(new MetricDefinitionAndTenantId(expected.get(0).getExpression().getMetricDefinition(), TENANT_ID)); subAlarms =
dao.find(new MetricDefinitionAndTenantId(expected.get(0).getExpression()
.getMetricDefinition(), TENANT_ID));
assertEquals(subAlarms, expected); assertEquals(subAlarms, expected);
} }
public void shouldNotFind() { public void shouldNotFind() {
final String badTenantId = TENANT_ID + "42"; final String badTenantId = TENANT_ID + "42";
List<SubAlarm> subAlarms = dao.find(new MetricDefinitionAndTenantId(AlarmSubExpression.of("avg(cpu{instance_id=555,az=1}) > 10").getMetricDefinition(), badTenantId)); List<SubAlarm> subAlarms =
dao.find(new MetricDefinitionAndTenantId(AlarmSubExpression.of(
"avg(cpu{instance_id=555,az=1}) > 10").getMetricDefinition(), badTenantId));
assertEquals(subAlarms.size(), 0); assertEquals(subAlarms.size(), 0);
subAlarms = dao.find(new MetricDefinitionAndTenantId(AlarmSubExpression.of("avg(cpu{instance_id=666,az=1}) > 10").getMetricDefinition(), badTenantId)); subAlarms =
dao.find(new MetricDefinitionAndTenantId(AlarmSubExpression.of(
"avg(cpu{instance_id=666,az=1}) > 10").getMetricDefinition(), badTenantId));
assertEquals(subAlarms.size(), 0); assertEquals(subAlarms.size(), 0);
} }
public void shouldFindWithSubject() { public void shouldFindWithSubject() {
List<SubAlarm> expected = Arrays.asList(new SubAlarm( List<SubAlarm> expected =
"333", Arrays.asList(new SubAlarm("333", "345", AlarmSubExpression
"345", .of("avg(disk{instance_id=777,az=1,device=vda}) > 10"), AlarmState.UNDETERMINED));
AlarmSubExpression.of("avg(disk{instance_id=777,az=1,device=vda}) > 10"), List<SubAlarm> subAlarms =
AlarmState.UNDETERMINED)); dao.find(new MetricDefinitionAndTenantId(expected.get(0).getExpression()
List<SubAlarm> subAlarms = dao.find(new MetricDefinitionAndTenantId(expected.get(0).getExpression().getMetricDefinition(), TENANT_ID)); .getMetricDefinition(), TENANT_ID));
assertEquals(subAlarms, expected); assertEquals(subAlarms, expected);
} }
public void shouldFindForNullDimensions() { public void shouldFindForNullDimensions() {
List<SubAlarm> expected = Arrays.asList(new SubAlarm("444", "456", List<SubAlarm> expected =
AlarmSubExpression.of("avg(cpu{}) > 10"), AlarmState.UNDETERMINED)); Arrays.asList(new SubAlarm("444", "456", AlarmSubExpression.of("avg(cpu{}) > 10"),
List<SubAlarm> subAlarms = dao.find(new MetricDefinitionAndTenantId(new MetricDefinition("cpu", null), TENANT_ID)); AlarmState.UNDETERMINED));
List<SubAlarm> subAlarms =
dao.find(new MetricDefinitionAndTenantId(new MetricDefinition("cpu", null), TENANT_ID));
assertEquals(subAlarms, expected); assertEquals(subAlarms, expected);
} }
} }

View File

@ -14,30 +14,15 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package com.hpcloud.mon.infrastructure.thresholding; package com.hpcloud.mon.infrastructure.thresholding;
import static org.mockito.Mockito.mock; import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when; import static org.mockito.Mockito.when;
import static org.mockito.Mockito.times;
import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertEquals;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import backtype.storm.Testing;
import backtype.storm.task.OutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.testing.MkTupleParam;
import backtype.storm.tuple.Tuple;
import com.hpcloud.mon.ThresholdingConfiguration; import com.hpcloud.mon.ThresholdingConfiguration;
import com.hpcloud.mon.common.event.AlarmUpdatedEvent; import com.hpcloud.mon.common.event.AlarmUpdatedEvent;
import com.hpcloud.mon.common.model.alarm.AlarmExpression; import com.hpcloud.mon.common.model.alarm.AlarmExpression;
@ -48,208 +33,240 @@ import com.hpcloud.mon.domain.model.SubAlarm;
import com.hpcloud.mon.domain.service.AlarmDAO; import com.hpcloud.mon.domain.service.AlarmDAO;
import com.hpcloud.streaming.storm.Streams; import com.hpcloud.streaming.storm.Streams;
import backtype.storm.Testing;
import backtype.storm.task.OutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.testing.MkTupleParam;
import backtype.storm.tuple.Tuple;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
@Test @Test
public class AlarmThresholdingBoltTest { public class AlarmThresholdingBoltTest {
private static final String ALERT_ROUTING_KEY = "Alert Routing Key"; private static final String ALERT_ROUTING_KEY = "Alert Routing Key";
private static final String ALERTS_EXCHANGE = "Alerts"; private static final String ALERTS_EXCHANGE = "Alerts";
private static final String tenantId = "AAAAABBBBBBCCCCC"; private static final String tenantId = "AAAAABBBBBBCCCCC";
private AlarmExpression alarmExpression; private AlarmExpression alarmExpression;
private Alarm alarm; private Alarm alarm;
private List<SubAlarm> subAlarms; private List<SubAlarm> subAlarms;
private AlarmEventForwarder alarmEventForwarder; private AlarmEventForwarder alarmEventForwarder;
private AlarmDAO alarmDAO; private AlarmDAO alarmDAO;
private AlarmThresholdingBolt bolt; private AlarmThresholdingBolt bolt;
private OutputCollector collector; private OutputCollector collector;
private final String[] subExpressions = { private final String[] subExpressions = {"avg(cpu{instance_id=123,device=42}, 1) > 5",
"avg(cpu{instance_id=123,device=42}, 1) > 5", "max(load{instance_id=123,device=42}, 1) > 8",
"max(load{instance_id=123,device=42}, 1) > 8", "sum(diskio{instance_id=123,device=42}, 1) > 5000"};
"sum(diskio{instance_id=123,device=42}, 1) > 5000" };
@BeforeMethod @BeforeMethod
protected void beforeMethod() { protected void beforeMethod() {
final String alarmId = "111111112222222222233333333334"; final String alarmId = "111111112222222222233333333334";
final StringBuilder builder = new StringBuilder(); final StringBuilder builder = new StringBuilder();
for (final String subExpression : subExpressions) { for (final String subExpression : subExpressions) {
if (builder.length() > 0) if (builder.length() > 0) {
builder.append(" or "); builder.append(" or ");
builder.append(subExpression); }
} builder.append(subExpression);
final String expression = builder.toString(); }
alarm = new Alarm(); final String expression = builder.toString();
alarm.setName("Test CPU Alarm"); alarm = new Alarm();
alarm.setDescription("Description of Alarm"); alarm.setName("Test CPU Alarm");
alarm.setTenantId(tenantId); alarm.setDescription("Description of Alarm");
alarm.setId(alarmId); alarm.setTenantId(tenantId);
alarm.setExpression(expression); alarm.setId(alarmId);
alarm.setState(AlarmState.OK); alarm.setExpression(expression);
alarmExpression = new AlarmExpression(expression); alarm.setState(AlarmState.OK);
final List<AlarmSubExpression> subExpressions = alarmExpression.getSubExpressions(); alarmExpression = new AlarmExpression(expression);
subAlarms = new ArrayList<SubAlarm>(subExpressions.size()); final List<AlarmSubExpression> subExpressions = alarmExpression.getSubExpressions();
for (int i = 0; i < subExpressions.size(); i++) { subAlarms = new ArrayList<SubAlarm>(subExpressions.size());
final SubAlarm subAlarm = new SubAlarm(UUID.randomUUID().toString(), alarmId, subExpressions.get(i)); for (int i = 0; i < subExpressions.size(); i++) {
subAlarms.add(subAlarm); final SubAlarm subAlarm =
} new SubAlarm(UUID.randomUUID().toString(), alarmId, subExpressions.get(i));
alarm.setSubAlarms(subAlarms); subAlarms.add(subAlarm);
}
alarm.setSubAlarms(subAlarms);
alarmEventForwarder = mock(AlarmEventForwarder.class); alarmEventForwarder = mock(AlarmEventForwarder.class);
alarmDAO = mock(AlarmDAO.class); alarmDAO = mock(AlarmDAO.class);
bolt = new MockAlarmThreshholdBolt(alarmDAO, alarmEventForwarder); bolt = new MockAlarmThreshholdBolt(alarmDAO, alarmEventForwarder);
collector = mock(OutputCollector.class); collector = mock(OutputCollector.class);
final Map<String, String> config = new HashMap<>(); final Map<String, String> config = new HashMap<>();
config.put(ThresholdingConfiguration.ALERTS_EXCHANGE, ALERTS_EXCHANGE); config.put(ThresholdingConfiguration.ALERTS_EXCHANGE, ALERTS_EXCHANGE);
config.put(ThresholdingConfiguration.ALERTS_ROUTING_KEY, ALERT_ROUTING_KEY); config.put(ThresholdingConfiguration.ALERTS_ROUTING_KEY, ALERT_ROUTING_KEY);
final TopologyContext context = mock(TopologyContext.class); final TopologyContext context = mock(TopologyContext.class);
bolt.prepare(config, context, collector); bolt.prepare(config, context, collector);
}
/**
* Create a simple Alarm with one sub expression. Send a SubAlarm with state set to ALARM. Ensure
* that the Alarm was triggered and sent
*/
public void simpleAlarmCreation() {
final SubAlarm subAlarm = subAlarms.get(0);
final String alarmId = alarm.getId();
when(alarmDAO.findById(alarmId)).thenReturn(alarm);
emitSubAlarmStateChange(alarmId, subAlarm, AlarmState.ALARM);
for (int i = 1; i < subAlarms.size(); i++) {
emitSubAlarmStateChange(alarmId, subAlarms.get(i), AlarmState.OK);
}
final String alarmJson =
"{\"alarm-transitioned\":{\"tenantId\":\""
+ tenantId
+ "\","
+ "\"alarmId\":\"111111112222222222233333333334\",\"alarmName\":\"Test CPU Alarm\","
+ "\"alarmDescription\":\"Description of Alarm\",\"oldState\":\"OK\",\"newState\":\"ALARM\","
+ "\"actionsEnabled\":true,"
+ "\"stateChangeReason\":\"Thresholds were exceeded for the sub-alarms: ["
+ subAlarm.getExpression().getExpression() + "]\"," + "\"timestamp\":1395587091}}";
verify(alarmEventForwarder, times(1)).send(ALERTS_EXCHANGE, ALERT_ROUTING_KEY, alarmJson);
verify(alarmDAO, times(1)).updateState(alarmId, AlarmState.ALARM);
// Now clear the alarm and ensure another notification gets sent out
subAlarm.setState(AlarmState.OK);
final Tuple clearTuple = createSubAlarmStateChangeTuple(alarmId, subAlarm);
bolt.execute(clearTuple);
verify(collector, times(1)).ack(clearTuple);
final String okJson =
"{\"alarm-transitioned\":{\"tenantId\":\""
+ tenantId
+ "\","
+ "\"alarmId\":\"111111112222222222233333333334\",\"alarmName\":\"Test CPU Alarm\","
+ "\"alarmDescription\":\"Description of Alarm\",\"oldState\":\"ALARM\",\"newState\":\"OK\","
+ "\"actionsEnabled\":true,"
+ "\"stateChangeReason\":\"The alarm threshold(s) have not been exceeded\",\"timestamp\":1395587091}}";
verify(alarmEventForwarder, times(1)).send(ALERTS_EXCHANGE, ALERT_ROUTING_KEY, okJson);
verify(alarmDAO, times(1)).updateState(alarmId, AlarmState.OK);
}
public void simpleAlarmUpdate() {
String alarmId = setUpInitialAlarm();
// Now send an AlarmUpdatedEvent
final Map<String, AlarmSubExpression> empty = new HashMap<>();
final String newName = "New Name";
final String newDescription = "New Description";
final AlarmState newState = AlarmState.OK;
boolean newEnabled = false;
final AlarmUpdatedEvent event =
new AlarmUpdatedEvent(tenantId, alarmId, newName, newDescription, alarm
.getAlarmExpression().getExpression(), alarm.getState(), newState, newEnabled, empty,
empty, empty, empty);
final Tuple updateTuple = createAlarmUpdateTuple(event);
bolt.execute(updateTuple);
verify(collector, times(1)).ack(updateTuple);
assertEquals(alarm.getName(), newName);
assertEquals(alarm.getState(), newState);
assertEquals(alarm.isActionsEnabled(), newEnabled);
}
public void complexAlarmUpdate() {
String alarmId = setUpInitialAlarm();
// Now send an AlarmUpdatedEvent
final Map<String, AlarmSubExpression> newSubExpressions = new HashMap<>();
final Map<String, AlarmSubExpression> oldSubExpressions = new HashMap<>();
final Map<String, AlarmSubExpression> changedSubExpressions = new HashMap<>();
final Map<String, AlarmSubExpression> unchangedSubExpressions = new HashMap<>();
final String newExpression =
subExpressions[1] + " or " + subExpressions[2].replace("max", "avg") + " or "
+ "sum(diskio{instance_id=123,device=4242}, 1) > 5000";
final AlarmExpression newAlarmExpression = new AlarmExpression(newExpression);
final SubAlarm newSubAlarm =
new SubAlarm(UUID.randomUUID().toString(), alarmId, newAlarmExpression.getSubExpressions()
.get(2));
newSubExpressions.put(newSubAlarm.getId(), newSubAlarm.getExpression());
final SubAlarm deletedSubAlarm = subAlarms.get(0);
oldSubExpressions.put(deletedSubAlarm.getId(), deletedSubAlarm.getExpression());
final SubAlarm changedSubAlarm =
new SubAlarm(subAlarms.get(2).getId(), alarmId, newAlarmExpression.getSubExpressions().get(
1));
changedSubExpressions.put(changedSubAlarm.getId(), changedSubAlarm.getExpression());
final SubAlarm unChangedSubAlarm =
new SubAlarm(subAlarms.get(1).getId(), alarmId, subAlarms.get(1).getExpression());
unchangedSubExpressions.put(unChangedSubAlarm.getId(), unChangedSubAlarm.getExpression());
emitSubAlarmStateChange(alarmId, changedSubAlarm, AlarmState.OK);
emitSubAlarmStateChange(alarmId, unChangedSubAlarm, AlarmState.OK);
unChangedSubAlarm.setState(AlarmState.OK);
final AlarmUpdatedEvent event =
new AlarmUpdatedEvent(tenantId, alarmId, alarm.getName(), alarm.getDescription(),
newExpression, alarm.getState(), alarm.getState(), alarm.isActionsEnabled(),
oldSubExpressions, changedSubExpressions, unchangedSubExpressions, newSubExpressions);
final Tuple updateTuple = createAlarmUpdateTuple(event);
bolt.execute(updateTuple);
verify(collector, times(1)).ack(updateTuple);
final Alarm changedAlarm = bolt.alarms.get(alarmId);
assertEquals(changedAlarm.getAlarmExpression(), newAlarmExpression);
assertEquals(changedAlarm.getSubAlarms().size(), 3);
assertEquals(changedAlarm.getSubAlarm(unChangedSubAlarm.getId()), unChangedSubAlarm);
assertEquals(changedAlarm.getSubAlarm(newSubAlarm.getId()), newSubAlarm);
changedSubAlarm.setState(AlarmState.OK);
assertEquals(changedAlarm.getSubAlarm(changedSubAlarm.getId()), changedSubAlarm);
assertEquals(changedSubAlarm.isNoState(), false);
}
private String setUpInitialAlarm() {
final String alarmId = alarm.getId();
when(alarmDAO.findById(alarmId)).thenReturn(alarm);
// Load up the original Alarm
emitSubAlarmStateChange(alarmId, subAlarms.get(0), AlarmState.ALARM);
return alarmId;
}
private void emitSubAlarmStateChange(String alarmId, final SubAlarm subAlarm, AlarmState state) {
// Create a copy so changing the state doesn't directly update the ones in the bolt
final SubAlarm toEmit =
new SubAlarm(subAlarm.getId(), subAlarm.getAlarmId(), subAlarm.getExpression());
toEmit.setState(state);
final Tuple tuple = createSubAlarmStateChangeTuple(alarmId, toEmit);
bolt.execute(tuple);
verify(collector, times(1)).ack(tuple);
}
private Tuple createAlarmUpdateTuple(AlarmUpdatedEvent event) {
final MkTupleParam tupleParam = new MkTupleParam();
tupleParam.setFields(EventProcessingBolt.ALARM_EVENT_STREAM_FIELDS);
tupleParam.setStream(EventProcessingBolt.ALARM_EVENT_STREAM_ID);
final Tuple tuple =
Testing.testTuple(Arrays.asList(EventProcessingBolt.UPDATED, event.alarmId, event),
tupleParam);
return tuple;
}
private Tuple createSubAlarmStateChangeTuple(String alarmId, final SubAlarm subAlarm) {
final MkTupleParam tupleParam = new MkTupleParam();
tupleParam.setFields("alarmId", "subAlarm");
tupleParam.setStream(Streams.DEFAULT_STREAM_ID);
final Tuple tuple = Testing.testTuple(Arrays.asList(alarmId, subAlarm), tupleParam);
return tuple;
}
private class MockAlarmThreshholdBolt extends AlarmThresholdingBolt {
private static final long serialVersionUID = 1L;
public MockAlarmThreshholdBolt(AlarmDAO alarmDAO, AlarmEventForwarder alarmEventForwarder) {
super(alarmDAO, alarmEventForwarder);
} }
/** @Override
* Create a simple Alarm with one sub expression. protected long getTimestamp() {
* Send a SubAlarm with state set to ALARM. // Have to keep the time stamp constant so JSON comparison works
* Ensure that the Alarm was triggered and sent return 1395587091;
*/
public void simpleAlarmCreation() {
final SubAlarm subAlarm = subAlarms.get(0);
final String alarmId = alarm.getId();
when(alarmDAO.findById(alarmId)).thenReturn(alarm);
emitSubAlarmStateChange(alarmId, subAlarm, AlarmState.ALARM);
for (int i = 1; i < subAlarms.size(); i++) {
emitSubAlarmStateChange(alarmId, subAlarms.get(i), AlarmState.OK);
}
final String alarmJson = "{\"alarm-transitioned\":{\"tenantId\":\"" + tenantId + "\"," +
"\"alarmId\":\"111111112222222222233333333334\",\"alarmName\":\"Test CPU Alarm\"," +
"\"alarmDescription\":\"Description of Alarm\",\"oldState\":\"OK\",\"newState\":\"ALARM\"," +
"\"actionsEnabled\":true," +
"\"stateChangeReason\":\"Thresholds were exceeded for the sub-alarms: [" + subAlarm.getExpression().getExpression() + "]\"," +
"\"timestamp\":1395587091}}";
verify(alarmEventForwarder, times(1)).send(ALERTS_EXCHANGE, ALERT_ROUTING_KEY, alarmJson);
verify(alarmDAO, times(1)).updateState(alarmId, AlarmState.ALARM);
// Now clear the alarm and ensure another notification gets sent out
subAlarm.setState(AlarmState.OK);
final Tuple clearTuple = createSubAlarmStateChangeTuple(alarmId, subAlarm);
bolt.execute(clearTuple);
verify(collector, times(1)).ack(clearTuple);
final String okJson = "{\"alarm-transitioned\":{\"tenantId\":\"" + tenantId + "\"," +
"\"alarmId\":\"111111112222222222233333333334\",\"alarmName\":\"Test CPU Alarm\"," +
"\"alarmDescription\":\"Description of Alarm\",\"oldState\":\"ALARM\",\"newState\":\"OK\"," +
"\"actionsEnabled\":true," +
"\"stateChangeReason\":\"The alarm threshold(s) have not been exceeded\",\"timestamp\":1395587091}}";
verify(alarmEventForwarder, times(1)).send(ALERTS_EXCHANGE, ALERT_ROUTING_KEY, okJson);
verify(alarmDAO, times(1)).updateState(alarmId, AlarmState.OK);
}
public void simpleAlarmUpdate() {
String alarmId = setUpInitialAlarm();
// Now send an AlarmUpdatedEvent
final Map<String, AlarmSubExpression> empty = new HashMap<>();
final String newName = "New Name";
final String newDescription = "New Description";
final AlarmState newState = AlarmState.OK;
boolean newEnabled = false;
final AlarmUpdatedEvent event = new AlarmUpdatedEvent(tenantId, alarmId, newName, newDescription, alarm.getAlarmExpression().getExpression(),
alarm.getState(), newState, newEnabled, empty, empty, empty, empty);
final Tuple updateTuple = createAlarmUpdateTuple(event);
bolt.execute(updateTuple);
verify(collector, times(1)).ack(updateTuple);
assertEquals(alarm.getName(), newName);
assertEquals(alarm.getState(), newState);
assertEquals(alarm.isActionsEnabled(), newEnabled);
}
public void complexAlarmUpdate() {
String alarmId = setUpInitialAlarm();
// Now send an AlarmUpdatedEvent
final Map<String, AlarmSubExpression> newSubExpressions = new HashMap<>();
final Map<String, AlarmSubExpression> oldSubExpressions = new HashMap<>();
final Map<String, AlarmSubExpression> changedSubExpressions = new HashMap<>();
final Map<String, AlarmSubExpression> unchangedSubExpressions = new HashMap<>();
final String newExpression = subExpressions[1] + " or " +
subExpressions[2].replace("max", "avg") + " or " +
"sum(diskio{instance_id=123,device=4242}, 1) > 5000";
final AlarmExpression newAlarmExpression = new AlarmExpression(newExpression);
final SubAlarm newSubAlarm = new SubAlarm(UUID.randomUUID().toString(), alarmId, newAlarmExpression.getSubExpressions().get(2));
newSubExpressions.put(newSubAlarm.getId(), newSubAlarm.getExpression());
final SubAlarm deletedSubAlarm = subAlarms.get(0);
oldSubExpressions.put(deletedSubAlarm.getId(), deletedSubAlarm.getExpression());
final SubAlarm changedSubAlarm = new SubAlarm(subAlarms.get(2).getId(), alarmId, newAlarmExpression.getSubExpressions().get(1));
changedSubExpressions.put(changedSubAlarm.getId(), changedSubAlarm.getExpression());
final SubAlarm unChangedSubAlarm = new SubAlarm(subAlarms.get(1).getId(), alarmId, subAlarms.get(1).getExpression());
unchangedSubExpressions.put(unChangedSubAlarm.getId(), unChangedSubAlarm.getExpression());
emitSubAlarmStateChange(alarmId, changedSubAlarm, AlarmState.OK);
emitSubAlarmStateChange(alarmId, unChangedSubAlarm, AlarmState.OK);
unChangedSubAlarm.setState(AlarmState.OK);
final AlarmUpdatedEvent event = new AlarmUpdatedEvent(tenantId, alarmId, alarm.getName(), alarm.getDescription(), newExpression,
alarm.getState(), alarm.getState(), alarm.isActionsEnabled(), oldSubExpressions, changedSubExpressions, unchangedSubExpressions, newSubExpressions);
final Tuple updateTuple = createAlarmUpdateTuple(event);
bolt.execute(updateTuple);
verify(collector, times(1)).ack(updateTuple);
final Alarm changedAlarm = bolt.alarms.get(alarmId);
assertEquals(changedAlarm.getAlarmExpression(), newAlarmExpression);
assertEquals(changedAlarm.getSubAlarms().size(), 3);
assertEquals(changedAlarm.getSubAlarm(unChangedSubAlarm.getId()), unChangedSubAlarm);
assertEquals(changedAlarm.getSubAlarm(newSubAlarm.getId()), newSubAlarm);
changedSubAlarm.setState(AlarmState.OK);
assertEquals(changedAlarm.getSubAlarm(changedSubAlarm.getId()), changedSubAlarm);
assertEquals(changedSubAlarm.isNoState(), false);
}
private String setUpInitialAlarm() {
final String alarmId = alarm.getId();
when(alarmDAO.findById(alarmId)).thenReturn(alarm);
// Load up the original Alarm
emitSubAlarmStateChange(alarmId, subAlarms.get(0), AlarmState.ALARM);
return alarmId;
}
private void emitSubAlarmStateChange(String alarmId,
final SubAlarm subAlarm, AlarmState state) {
// Create a copy so changing the state doesn't directly update the ones in the bolt
final SubAlarm toEmit = new SubAlarm(subAlarm.getId(), subAlarm.getAlarmId(), subAlarm.getExpression());
toEmit.setState(state);
final Tuple tuple = createSubAlarmStateChangeTuple(alarmId, toEmit);
bolt.execute(tuple);
verify(collector, times(1)).ack(tuple);
}
private Tuple createAlarmUpdateTuple(AlarmUpdatedEvent event) {
final MkTupleParam tupleParam = new MkTupleParam();
tupleParam.setFields(EventProcessingBolt.ALARM_EVENT_STREAM_FIELDS);
tupleParam.setStream(EventProcessingBolt.ALARM_EVENT_STREAM_ID);
final Tuple tuple = Testing.testTuple(Arrays.asList(EventProcessingBolt.UPDATED, event.alarmId, event), tupleParam);
return tuple;
}
private Tuple createSubAlarmStateChangeTuple(String alarmId, final SubAlarm subAlarm) {
final MkTupleParam tupleParam = new MkTupleParam();
tupleParam.setFields("alarmId", "subAlarm");
tupleParam.setStream(Streams.DEFAULT_STREAM_ID);
final Tuple tuple = Testing.testTuple(Arrays.asList(alarmId, subAlarm), tupleParam);
return tuple;
}
private class MockAlarmThreshholdBolt extends AlarmThresholdingBolt {
private static final long serialVersionUID = 1L;
public MockAlarmThreshholdBolt(AlarmDAO alarmDAO,
AlarmEventForwarder alarmEventForwarder) {
super(alarmDAO, alarmEventForwarder);
}
@Override
protected long getTimestamp() {
// Have to keep the time stamp constant so JSON comparison works
return 1395587091;
}
} }
}
} }

View File

@ -14,35 +14,13 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package com.hpcloud.mon.infrastructure.thresholding; package com.hpcloud.mon.infrastructure.thresholding;
import static org.mockito.Mockito.mock; import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times; import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verify;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import backtype.storm.Testing;
import backtype.storm.task.OutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.testing.MkTupleParam;
import backtype.storm.tuple.Tuple;
import backtype.storm.tuple.Values;
import com.google.common.collect.BiMap;
import com.google.common.collect.HashBiMap;
import com.google.common.collect.Sets;
import com.hpcloud.mon.common.event.AlarmCreatedEvent; import com.hpcloud.mon.common.event.AlarmCreatedEvent;
import com.hpcloud.mon.common.event.AlarmDeletedEvent; import com.hpcloud.mon.common.event.AlarmDeletedEvent;
import com.hpcloud.mon.common.event.AlarmUpdatedEvent; import com.hpcloud.mon.common.event.AlarmUpdatedEvent;
@ -55,212 +33,246 @@ import com.hpcloud.mon.domain.model.MetricDefinitionAndTenantId;
import com.hpcloud.mon.domain.model.SubAlarm; import com.hpcloud.mon.domain.model.SubAlarm;
import com.hpcloud.streaming.storm.Streams; import com.hpcloud.streaming.storm.Streams;
import backtype.storm.Testing;
import backtype.storm.task.OutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.testing.MkTupleParam;
import backtype.storm.tuple.Tuple;
import backtype.storm.tuple.Values;
import com.google.common.collect.BiMap;
import com.google.common.collect.HashBiMap;
import com.google.common.collect.Sets;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
@Test @Test
public class EventProcessingBoltTest { public class EventProcessingBoltTest {
private static final String TENANT_ID = "AAAAABBBBBBCCCCC"; private static final String TENANT_ID = "AAAAABBBBBBCCCCC";
private EventProcessingBolt bolt; private EventProcessingBolt bolt;
private OutputCollector collector; private OutputCollector collector;
private AlarmExpression alarmExpression; private AlarmExpression alarmExpression;
private Alarm alarm; private Alarm alarm;
private List<SubAlarm> subAlarms; private List<SubAlarm> subAlarms;
@BeforeMethod @BeforeMethod
protected void beforeMethod() { protected void beforeMethod() {
collector = mock(OutputCollector.class); collector = mock(OutputCollector.class);
bolt = new EventProcessingBolt(); bolt = new EventProcessingBolt();
final Map<String, String> config = new HashMap<>(); final Map<String, String> config = new HashMap<>();
final TopologyContext context = mock(TopologyContext.class); final TopologyContext context = mock(TopologyContext.class);
bolt.prepare(config, context, collector); bolt.prepare(config, context, collector);
final String alarmId = "111111112222222222233333333334"; final String alarmId = "111111112222222222233333333334";
final String name = "Test CPU Alarm"; final String name = "Test CPU Alarm";
final String description = "Description of " + name; final String description = "Description of " + name;
final String expression = "avg(hpcs.compute.cpu{instance_id=123,device=42}, 1) > 5 " + final String expression =
"and max(hpcs.compute.mem{instance_id=123,device=42}) > 80 " + "avg(hpcs.compute.cpu{instance_id=123,device=42}, 1) > 5 "
"and max(hpcs.compute.load{instance_id=123,device=42}) > 5"; + "and max(hpcs.compute.mem{instance_id=123,device=42}) > 80 "
alarmExpression = new AlarmExpression(expression); + "and max(hpcs.compute.load{instance_id=123,device=42}) > 5";
subAlarms = createSubAlarms(alarmId, alarmExpression); alarmExpression = new AlarmExpression(expression);
alarm = new Alarm(alarmId, TENANT_ID, name, description, alarmExpression, subAlarms, subAlarms = createSubAlarms(alarmId, alarmExpression);
AlarmState.UNDETERMINED, Boolean.TRUE); alarm =
new Alarm(alarmId, TENANT_ID, name, description, alarmExpression, subAlarms,
AlarmState.UNDETERMINED, Boolean.TRUE);
}
private List<SubAlarm> createSubAlarms(final String alarmId,
final AlarmExpression alarmExpression, String... ids) {
final List<AlarmSubExpression> subExpressions = alarmExpression.getSubExpressions();
final List<SubAlarm> subAlarms = new ArrayList<SubAlarm>(subExpressions.size());
for (int i = 0; i < subExpressions.size(); i++) {
final String id;
if (i >= ids.length) {
id = UUID.randomUUID().toString();
} else {
id = ids[i];
}
final SubAlarm subAlarm = new SubAlarm(id, alarmId, subExpressions.get(i));
subAlarms.add(subAlarm);
}
return subAlarms;
}
public void testAlarmCreatedEvent() {
final Map<String, AlarmSubExpression> expressions = createAlarmSubExpressionMap(alarm);
final AlarmCreatedEvent event =
new AlarmCreatedEvent(alarm.getTenantId(), alarm.getId(), alarm.getName(), alarm
.getAlarmExpression().getExpression(), expressions);
final Tuple tuple = createTuple(event);
bolt.execute(tuple);
for (final SubAlarm subAlarm : subAlarms) {
verifyAddedSubAlarm(subAlarm);
}
verify(collector, times(1)).ack(tuple);
}
private Tuple createTuple(final Object event) {
MkTupleParam tupleParam = new MkTupleParam();
tupleParam.setFields("event");
tupleParam.setStream(Streams.DEFAULT_STREAM_ID);
final Tuple tuple = Testing.testTuple(Arrays.asList(event), tupleParam);
return tuple;
}
public void testAlarmDeletedEvent() {
final Map<String, MetricDefinition> metricDefinitions = new HashMap<>();
for (final SubAlarm subAlarm : alarm.getSubAlarms()) {
metricDefinitions.put(subAlarm.getId(), subAlarm.getExpression().getMetricDefinition());
}
final AlarmDeletedEvent event =
new AlarmDeletedEvent(alarm.getTenantId(), alarm.getId(), metricDefinitions);
final Tuple tuple = createTuple(event);
bolt.execute(tuple);
for (final SubAlarm subAlarm : subAlarms) {
verifyDeletedSubAlarm(subAlarm);
}
verify(collector, times(1)).emit(EventProcessingBolt.ALARM_EVENT_STREAM_ID,
new Values(EventProcessingBolt.DELETED, event.alarmId, event));
verify(collector, times(1)).ack(tuple);
}
private void verifyDeletedSubAlarm(final SubAlarm subAlarm) {
verify(collector, times(1)).emit(
EventProcessingBolt.METRIC_ALARM_EVENT_STREAM_ID,
new Values(EventProcessingBolt.DELETED, new MetricDefinitionAndTenantId(subAlarm
.getExpression().getMetricDefinition(), TENANT_ID), subAlarm.getId()));
}
public static AlarmUpdatedEvent createAlarmUpdatedEvent(final Alarm alarm,
final AlarmState newState, final AlarmExpression updatedAlarmExpression,
List<SubAlarm> updatedSubAlarms) {
final Map<String, AlarmSubExpression> oldAlarmSubExpressions = new HashMap<>();
for (final SubAlarm subAlarm : alarm.getSubAlarms()) {
oldAlarmSubExpressions.put(subAlarm.getId(), subAlarm.getExpression());
}
BiMap<String, AlarmSubExpression> oldExpressions = HashBiMap.create(oldAlarmSubExpressions);
Set<AlarmSubExpression> oldSet = oldExpressions.inverse().keySet();
Set<AlarmSubExpression> newSet = new HashSet<>();
for (final SubAlarm subAlarm : updatedSubAlarms) {
newSet.add(subAlarm.getExpression());
} }
private List<SubAlarm> createSubAlarms(final String alarmId, // Identify old or changed expressions
final AlarmExpression alarmExpression, Set<AlarmSubExpression> oldOrChangedExpressions =
String ... ids) { new HashSet<>(Sets.difference(oldSet, newSet));
final List<AlarmSubExpression> subExpressions = alarmExpression.getSubExpressions();
final List<SubAlarm> subAlarms = new ArrayList<SubAlarm>(subExpressions.size()); // Identify new or changed expressions
for (int i = 0; i < subExpressions.size(); i++) { Set<AlarmSubExpression> newOrChangedExpressions =
final String id; new HashSet<>(Sets.difference(newSet, oldSet));
if (i >= ids.length) {
id = UUID.randomUUID().toString(); // Find changed expressions
} Map<String, AlarmSubExpression> changedExpressions = new HashMap<>();
else { for (Iterator<AlarmSubExpression> oldIt = oldOrChangedExpressions.iterator(); oldIt.hasNext();) {
id = ids[i]; AlarmSubExpression oldExpr = oldIt.next();
} for (Iterator<AlarmSubExpression> newIt = newOrChangedExpressions.iterator(); newIt.hasNext();) {
final SubAlarm subAlarm = new SubAlarm(id, alarmId, subExpressions.get(i)); AlarmSubExpression newExpr = newIt.next();
subAlarms.add(subAlarm); if (sameKeyFields(oldExpr, newExpr)) {
oldIt.remove();
newIt.remove();
changedExpressions.put(oldExpressions.inverse().get(oldExpr), newExpr);
break;
} }
return subAlarms; }
} }
public void testAlarmCreatedEvent() { BiMap<String, AlarmSubExpression> unchangedExpressions = HashBiMap.create(oldExpressions);
final Map<String, AlarmSubExpression> expressions = createAlarmSubExpressionMap(alarm); unchangedExpressions.values().removeAll(oldOrChangedExpressions);
final AlarmCreatedEvent event = new AlarmCreatedEvent(alarm.getTenantId(), alarm.getId(), unchangedExpressions.keySet().removeAll(changedExpressions.keySet());
alarm.getName(), alarm.getAlarmExpression().getExpression(), expressions);
final Tuple tuple = createTuple(event); // Remove old sub expressions
bolt.execute(tuple); oldExpressions.values().retainAll(oldOrChangedExpressions);
for (final SubAlarm subAlarm : subAlarms) {
verifyAddedSubAlarm(subAlarm); // Create IDs for new expressions
Map<String, AlarmSubExpression> newExpressions = new HashMap<>();
for (AlarmSubExpression expression : newOrChangedExpressions) {
for (final SubAlarm subAlarm : updatedSubAlarms) {
if (subAlarm.getExpression().equals(expression)) {
newExpressions.put(subAlarm.getId(), expression);
} }
verify(collector, times(1)).ack(tuple); }
} }
private Tuple createTuple(final Object event) { final AlarmUpdatedEvent event =
MkTupleParam tupleParam = new MkTupleParam(); new AlarmUpdatedEvent(alarm.getTenantId(), alarm.getId(), alarm.getName(),
tupleParam.setFields("event"); alarm.getDescription(), updatedAlarmExpression.getExpression(), newState,
tupleParam.setStream(Streams.DEFAULT_STREAM_ID); alarm.getState(), true, oldExpressions, changedExpressions, unchangedExpressions,
final Tuple tuple = Testing.testTuple(Arrays.asList(event), tupleParam); newExpressions);
return tuple; return event;
}
/**
* Returns whether all of the fields of {@code a} and {@code b} are the same except the operator
* and threshold.
*/
private static boolean sameKeyFields(AlarmSubExpression a, AlarmSubExpression b) {
return a.getMetricDefinition().equals(b.getMetricDefinition())
&& a.getFunction().equals(b.getFunction()) && a.getPeriod() == b.getPeriod()
&& a.getPeriods() == b.getPeriods();
}
public void testAlarmUpdatedEvent() {
final String updatedExpression =
"avg(hpcs.compute.cpu{instance_id=123,device=42}, 1) > 5 "
+ "and max(hpcs.compute.mem{instance_id=123,device=42}) > 90 "
+ "and max(hpcs.compute.newLoad{instance_id=123,device=42}) > 5";
final AlarmExpression updatedAlarmExpression = new AlarmExpression(updatedExpression);
final List<SubAlarm> updatedSubAlarms = new ArrayList<>();
updatedSubAlarms.add(subAlarms.get(0));
updatedSubAlarms.add(new SubAlarm(subAlarms.get(1).getId(), alarm.getId(),
updatedAlarmExpression.getSubExpressions().get(1)));
updatedSubAlarms.add(new SubAlarm(UUID.randomUUID().toString(), alarm.getId(),
updatedAlarmExpression.getSubExpressions().get(2)));
final AlarmUpdatedEvent event =
createAlarmUpdatedEvent(alarm, alarm.getState(), updatedAlarmExpression, updatedSubAlarms);
final Tuple tuple = createTuple(event);
bolt.execute(tuple);
verify(collector, times(1)).ack(tuple);
verifyDeletedSubAlarm(subAlarms.get(2));
verifyUpdatedSubAlarm(updatedSubAlarms.get(1));
verifyAddedSubAlarm(updatedSubAlarms.get(2));
verify(collector, times(1)).emit(EventProcessingBolt.ALARM_EVENT_STREAM_ID,
new Values(EventProcessingBolt.UPDATED, event.alarmId, event));
}
private void verifyAddedSubAlarm(final SubAlarm subAlarm) {
sendSubAlarm(subAlarm, EventProcessingBolt.CREATED);
}
private void verifyUpdatedSubAlarm(final SubAlarm subAlarm) {
sendSubAlarm(subAlarm, EventProcessingBolt.UPDATED);
}
private void sendSubAlarm(final SubAlarm subAlarm, String eventType) {
verify(collector, times(1)).emit(
EventProcessingBolt.METRIC_SUB_ALARM_EVENT_STREAM_ID,
new Values(eventType, new MetricDefinitionAndTenantId(subAlarm.getExpression()
.getMetricDefinition(), TENANT_ID), subAlarm));
}
private static Map<String, AlarmSubExpression> createAlarmSubExpressionMap(Alarm alarm) {
final Map<String, AlarmSubExpression> oldAlarmSubExpressions = new HashMap<>();
for (final SubAlarm subAlarm : alarm.getSubAlarms()) {
oldAlarmSubExpressions.put(subAlarm.getId(), subAlarm.getExpression());
} }
return oldAlarmSubExpressions;
public void testAlarmDeletedEvent() { }
final Map<String, MetricDefinition> metricDefinitions = new HashMap<>();
for (final SubAlarm subAlarm : alarm.getSubAlarms()) {
metricDefinitions.put(subAlarm.getId(), subAlarm.getExpression().getMetricDefinition());
}
final AlarmDeletedEvent event = new AlarmDeletedEvent(alarm.getTenantId(), alarm.getId(),
metricDefinitions);
final Tuple tuple = createTuple(event);
bolt.execute(tuple);
for (final SubAlarm subAlarm : subAlarms) {
verifyDeletedSubAlarm(subAlarm);
}
verify(collector, times(1)).emit(EventProcessingBolt.ALARM_EVENT_STREAM_ID,
new Values(EventProcessingBolt.DELETED, event.alarmId, event));
verify(collector, times(1)).ack(tuple);
}
private void verifyDeletedSubAlarm(final SubAlarm subAlarm) {
verify(collector, times(1)).emit(EventProcessingBolt.METRIC_ALARM_EVENT_STREAM_ID,
new Values(EventProcessingBolt.DELETED,
new MetricDefinitionAndTenantId(
subAlarm.getExpression().getMetricDefinition(), TENANT_ID), subAlarm.getId()));
}
public static AlarmUpdatedEvent createAlarmUpdatedEvent(final Alarm alarm,
final AlarmState newState,
final AlarmExpression updatedAlarmExpression,
List<SubAlarm> updatedSubAlarms) {
final Map<String, AlarmSubExpression> oldAlarmSubExpressions = new HashMap<>();
for (final SubAlarm subAlarm : alarm.getSubAlarms())
oldAlarmSubExpressions.put(subAlarm.getId(), subAlarm.getExpression());
BiMap<String, AlarmSubExpression> oldExpressions = HashBiMap.create(oldAlarmSubExpressions);
Set<AlarmSubExpression> oldSet = oldExpressions.inverse().keySet();
Set<AlarmSubExpression> newSet = new HashSet<>();
for (final SubAlarm subAlarm : updatedSubAlarms)
newSet.add(subAlarm.getExpression());
// Identify old or changed expressions
Set<AlarmSubExpression> oldOrChangedExpressions = new HashSet<>(Sets.difference(oldSet, newSet));
// Identify new or changed expressions
Set<AlarmSubExpression> newOrChangedExpressions = new HashSet<>(Sets.difference(newSet, oldSet));
// Find changed expressions
Map<String, AlarmSubExpression> changedExpressions = new HashMap<>();
for (Iterator<AlarmSubExpression> oldIt = oldOrChangedExpressions.iterator(); oldIt.hasNext();) {
AlarmSubExpression oldExpr = oldIt.next();
for (Iterator<AlarmSubExpression> newIt = newOrChangedExpressions.iterator(); newIt.hasNext();) {
AlarmSubExpression newExpr = newIt.next();
if (sameKeyFields(oldExpr, newExpr)) {
oldIt.remove();
newIt.remove();
changedExpressions.put(oldExpressions.inverse().get(oldExpr), newExpr);
break;
}
}
}
BiMap<String, AlarmSubExpression> unchangedExpressions = HashBiMap.create(oldExpressions);
unchangedExpressions.values().removeAll(oldOrChangedExpressions);
unchangedExpressions.keySet().removeAll(changedExpressions.keySet());
// Remove old sub expressions
oldExpressions.values().retainAll(oldOrChangedExpressions);
// Create IDs for new expressions
Map<String, AlarmSubExpression> newExpressions = new HashMap<>();
for (AlarmSubExpression expression : newOrChangedExpressions)
for (final SubAlarm subAlarm : updatedSubAlarms)
if (subAlarm.getExpression().equals(expression))
newExpressions.put(subAlarm.getId(), expression);
final AlarmUpdatedEvent event = new AlarmUpdatedEvent(alarm.getTenantId(), alarm.getId(),
alarm.getName(), alarm.getDescription(), updatedAlarmExpression.getExpression(), newState, alarm.getState(),
true, oldExpressions,
changedExpressions, unchangedExpressions, newExpressions);
return event;
}
/**
* Returns whether all of the fields of {@code a} and {@code b} are the same except the operator
* and threshold.
*/
private static boolean sameKeyFields(AlarmSubExpression a, AlarmSubExpression b) {
return a.getMetricDefinition().equals(b.getMetricDefinition())
&& a.getFunction().equals(b.getFunction()) && a.getPeriod() == b.getPeriod()
&& a.getPeriods() == b.getPeriods();
}
public void testAlarmUpdatedEvent() {
final String updatedExpression = "avg(hpcs.compute.cpu{instance_id=123,device=42}, 1) > 5 " +
"and max(hpcs.compute.mem{instance_id=123,device=42}) > 90 " +
"and max(hpcs.compute.newLoad{instance_id=123,device=42}) > 5";
final AlarmExpression updatedAlarmExpression = new AlarmExpression(updatedExpression);
final List<SubAlarm> updatedSubAlarms = new ArrayList<>();
updatedSubAlarms.add(subAlarms.get(0));
updatedSubAlarms.add(new SubAlarm(subAlarms.get(1).getId(), alarm.getId(), updatedAlarmExpression.getSubExpressions().get(1)));
updatedSubAlarms.add(new SubAlarm(UUID.randomUUID().toString(), alarm.getId(), updatedAlarmExpression.getSubExpressions().get(2)));
final AlarmUpdatedEvent event = createAlarmUpdatedEvent(alarm, alarm.getState(), updatedAlarmExpression,
updatedSubAlarms);
final Tuple tuple = createTuple(event);
bolt.execute(tuple);
verify(collector, times(1)).ack(tuple);
verifyDeletedSubAlarm(subAlarms.get(2));
verifyUpdatedSubAlarm(updatedSubAlarms.get(1));
verifyAddedSubAlarm(updatedSubAlarms.get(2));
verify(collector, times(1)).emit(EventProcessingBolt.ALARM_EVENT_STREAM_ID,
new Values(EventProcessingBolt.UPDATED, event.alarmId, event));
}
private void verifyAddedSubAlarm(final SubAlarm subAlarm) {
sendSubAlarm(subAlarm, EventProcessingBolt.CREATED);
}
private void verifyUpdatedSubAlarm(final SubAlarm subAlarm) {
sendSubAlarm(subAlarm, EventProcessingBolt.UPDATED);
}
private void sendSubAlarm(final SubAlarm subAlarm, String eventType) {
verify(collector, times(1)).emit(EventProcessingBolt.METRIC_SUB_ALARM_EVENT_STREAM_ID,
new Values(eventType,
new MetricDefinitionAndTenantId(
subAlarm.getExpression().getMetricDefinition(), TENANT_ID), subAlarm));
}
private static Map<String, AlarmSubExpression> createAlarmSubExpressionMap(
Alarm alarm) {
final Map<String, AlarmSubExpression> oldAlarmSubExpressions = new HashMap<>();
for (final SubAlarm subAlarm : alarm.getSubAlarms()) {
oldAlarmSubExpressions.put(subAlarm.getId(), subAlarm.getExpression());
}
return oldAlarmSubExpressions;
}
} }

View File

@ -14,38 +14,21 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package com.hpcloud.mon.infrastructure.thresholding; package com.hpcloud.mon.infrastructure.thresholding;
import static org.mockito.Matchers.any; import static org.mockito.Matchers.any;
import static org.mockito.Mockito.mock; import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.never; import static org.mockito.Mockito.never;
import static org.mockito.Mockito.reset;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when; import static org.mockito.Mockito.when;
import static org.mockito.Mockito.reset;
import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertNotNull; import static org.testng.Assert.assertNotNull;
import static org.testng.Assert.assertNull; import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertTrue; import static org.testng.Assert.assertTrue;
import static org.testng.Assert.assertFalse;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import backtype.storm.Constants;
import backtype.storm.Testing;
import backtype.storm.task.OutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.testing.MkTupleParam;
import backtype.storm.tuple.Tuple;
import backtype.storm.tuple.Values;
import com.hpcloud.mon.common.model.alarm.AlarmOperator; import com.hpcloud.mon.common.model.alarm.AlarmOperator;
import com.hpcloud.mon.common.model.alarm.AlarmState; import com.hpcloud.mon.common.model.alarm.AlarmState;
@ -59,6 +42,24 @@ import com.hpcloud.mon.domain.service.SubAlarmDAO;
import com.hpcloud.mon.domain.service.SubAlarmStatsRepository; import com.hpcloud.mon.domain.service.SubAlarmStatsRepository;
import com.hpcloud.streaming.storm.Streams; import com.hpcloud.streaming.storm.Streams;
import backtype.storm.Constants;
import backtype.storm.Testing;
import backtype.storm.task.OutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.testing.MkTupleParam;
import backtype.storm.tuple.Tuple;
import backtype.storm.tuple.Values;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
@Test @Test
public class MetricAggregationBoltTest { public class MetricAggregationBoltTest {
private static final String TENANT_ID = "42"; private static final String TENANT_ID = "42";
@ -103,11 +104,15 @@ public class MetricAggregationBoltTest {
when(dao.find(any(MetricDefinitionAndTenantId.class))).thenAnswer(new Answer<List<SubAlarm>>() { when(dao.find(any(MetricDefinitionAndTenantId.class))).thenAnswer(new Answer<List<SubAlarm>>() {
@Override @Override
public List<SubAlarm> answer(InvocationOnMock invocation) throws Throwable { public List<SubAlarm> answer(InvocationOnMock invocation) throws Throwable {
final MetricDefinitionAndTenantId metricDefinitionAndTenantId = (MetricDefinitionAndTenantId) invocation.getArguments()[0]; final MetricDefinitionAndTenantId metricDefinitionAndTenantId =
(MetricDefinitionAndTenantId) invocation.getArguments()[0];
final List<SubAlarm> result = new ArrayList<>(); final List<SubAlarm> result = new ArrayList<>();
for (final SubAlarm subAlarm : subAlarms) for (final SubAlarm subAlarm : subAlarms) {
if (subAlarm.getExpression().getMetricDefinition().equals(metricDefinitionAndTenantId.metricDefinition)) if (subAlarm.getExpression().getMetricDefinition()
.equals(metricDefinitionAndTenantId.metricDefinition)) {
result.add(subAlarm); result.add(subAlarm);
}
}
return result; return result;
} }
}); });
@ -121,15 +126,23 @@ public class MetricAggregationBoltTest {
public void shouldAggregateValues() { public void shouldAggregateValues() {
long t1 = System.currentTimeMillis() / 1000; long t1 = System.currentTimeMillis() / 1000;
bolt.aggregateValues(new MetricDefinitionAndTenantId(metricDef1, TENANT_ID), new Metric(metricDef1.name, metricDef1.dimensions, t1, 100)); bolt.aggregateValues(new MetricDefinitionAndTenantId(metricDef1, TENANT_ID), new Metric(
bolt.aggregateValues(new MetricDefinitionAndTenantId(metricDef1, TENANT_ID), new Metric(metricDef1.name, metricDef1.dimensions, t1, 80)); metricDef1.name, metricDef1.dimensions, t1, 100));
bolt.aggregateValues(new MetricDefinitionAndTenantId(metricDef2, TENANT_ID), new Metric(metricDef2.name, metricDef2.dimensions, t1, 50)); bolt.aggregateValues(new MetricDefinitionAndTenantId(metricDef1, TENANT_ID), new Metric(
bolt.aggregateValues(new MetricDefinitionAndTenantId(metricDef2, TENANT_ID), new Metric(metricDef2.name, metricDef2.dimensions, t1, 40)); metricDef1.name, metricDef1.dimensions, t1, 80));
bolt.aggregateValues(new MetricDefinitionAndTenantId(metricDef2, TENANT_ID), new Metric(
metricDef2.name, metricDef2.dimensions, t1, 50));
bolt.aggregateValues(new MetricDefinitionAndTenantId(metricDef2, TENANT_ID), new Metric(
metricDef2.name, metricDef2.dimensions, t1, 40));
SubAlarmStats alarmData = bolt.getOrCreateSubAlarmStatsRepo(new MetricDefinitionAndTenantId(metricDef1, TENANT_ID)).get("123"); SubAlarmStats alarmData =
bolt.getOrCreateSubAlarmStatsRepo(new MetricDefinitionAndTenantId(metricDef1, TENANT_ID))
.get("123");
assertEquals(alarmData.getStats().getValue(t1), 90.0); assertEquals(alarmData.getStats().getValue(t1), 90.0);
alarmData = bolt.getOrCreateSubAlarmStatsRepo(new MetricDefinitionAndTenantId(metricDef2, TENANT_ID)).get("456"); alarmData =
bolt.getOrCreateSubAlarmStatsRepo(new MetricDefinitionAndTenantId(metricDef2, TENANT_ID))
.get("456");
assertEquals(alarmData.getStats().getValue(t1), 45.0); assertEquals(alarmData.getStats().getValue(t1), 45.0);
} }
@ -154,13 +167,15 @@ public class MetricAggregationBoltTest {
assertEquals(subAlarm3.getState(), AlarmState.UNDETERMINED); assertEquals(subAlarm3.getState(), AlarmState.UNDETERMINED);
verify(collector, times(1)).emit(new Values(subAlarm1.getAlarmId(), subAlarm1)); verify(collector, times(1)).emit(new Values(subAlarm1.getAlarmId(), subAlarm1));
// Have to reset the mock so it can tell the difference when subAlarm2 and subAlarm3 are emitted again. // Have to reset the mock so it can tell the difference when subAlarm2 and subAlarm3 are emitted
// again.
reset(collector); reset(collector);
// Drive subAlarm1 to ALARM // Drive subAlarm1 to ALARM
bolt.execute(createMetricTuple(metricDef1, new Metric(metricDef1, t1, 99))); bolt.execute(createMetricTuple(metricDef1, new Metric(metricDef1, t1, 99)));
// Drive subAlarm2 to ALARM and subAlarm3 to OK since they use the same MetricDefinition // Drive subAlarm2 to ALARM and subAlarm3 to OK since they use the same MetricDefinition
bolt.execute(createMetricTuple(metricDef2, new Metric(metricDef2, System.currentTimeMillis() / 1000, 94))); bolt.execute(createMetricTuple(metricDef2, new Metric(metricDef2,
System.currentTimeMillis() / 1000, 94)));
bolt.execute(tickTuple); bolt.execute(tickTuple);
verify(collector, times(1)).ack(tickTuple); verify(collector, times(1)).ack(tickTuple);
@ -192,8 +207,9 @@ public class MetricAggregationBoltTest {
final MkTupleParam tupleParam = new MkTupleParam(); final MkTupleParam tupleParam = new MkTupleParam();
tupleParam.setFields(EventProcessingBolt.METRIC_SUB_ALARM_EVENT_STREAM_FIELDS); tupleParam.setFields(EventProcessingBolt.METRIC_SUB_ALARM_EVENT_STREAM_FIELDS);
tupleParam.setStream(EventProcessingBolt.METRIC_SUB_ALARM_EVENT_STREAM_ID); tupleParam.setStream(EventProcessingBolt.METRIC_SUB_ALARM_EVENT_STREAM_ID);
final Tuple resendTuple = Testing.testTuple(Arrays.asList(EventProcessingBolt.RESEND, final Tuple resendTuple =
new MetricDefinitionAndTenantId(metricDef2, TENANT_ID), subAlarm2), tupleParam); Testing.testTuple(Arrays.asList(EventProcessingBolt.RESEND,
new MetricDefinitionAndTenantId(metricDef2, TENANT_ID), subAlarm2), tupleParam);
bolt.execute(resendTuple); bolt.execute(resendTuple);
bolt.execute(createMetricTuple(metricDef2, new Metric(metricDef2, t1, 100))); bolt.execute(createMetricTuple(metricDef2, new Metric(metricDef2, t1, 100)));
@ -241,7 +257,8 @@ public class MetricAggregationBoltTest {
final MkTupleParam tupleParam = new MkTupleParam(); final MkTupleParam tupleParam = new MkTupleParam();
tupleParam.setStream(MetricAggregationBolt.METRIC_AGGREGATION_CONTROL_STREAM); tupleParam.setStream(MetricAggregationBolt.METRIC_AGGREGATION_CONTROL_STREAM);
final Tuple lagTuple = Testing.testTuple(Arrays.asList(MetricAggregationBolt.METRICS_BEHIND), tupleParam); final Tuple lagTuple =
Testing.testTuple(Arrays.asList(MetricAggregationBolt.METRICS_BEHIND), tupleParam);
bolt.execute(lagTuple); bolt.execute(lagTuple);
verify(collector, times(1)).ack(lagTuple); verify(collector, times(1)).ack(lagTuple);
@ -274,27 +291,30 @@ public class MetricAggregationBoltTest {
tupleParam.setFields(EventProcessingBolt.METRIC_SUB_ALARM_EVENT_STREAM_FIELDS); tupleParam.setFields(EventProcessingBolt.METRIC_SUB_ALARM_EVENT_STREAM_FIELDS);
tupleParam.setStream(EventProcessingBolt.METRIC_SUB_ALARM_EVENT_STREAM_ID); tupleParam.setStream(EventProcessingBolt.METRIC_SUB_ALARM_EVENT_STREAM_ID);
MetricDefinitionAndTenantId metricDefinitionAndTenantId = new MetricDefinitionAndTenantId(metricDef1, TENANT_ID); MetricDefinitionAndTenantId metricDefinitionAndTenantId =
new MetricDefinitionAndTenantId(metricDef1, TENANT_ID);
assertNull(bolt.subAlarmStatsRepos.get(metricDefinitionAndTenantId)); assertNull(bolt.subAlarmStatsRepos.get(metricDefinitionAndTenantId));
bolt.execute(Testing.testTuple(Arrays.asList(EventProcessingBolt.CREATED, bolt.execute(Testing.testTuple(Arrays.asList(EventProcessingBolt.CREATED,
metricDefinitionAndTenantId, new SubAlarm("123", "1", subExpr1)), tupleParam)); metricDefinitionAndTenantId, new SubAlarm("123", "1", subExpr1)), tupleParam));
assertNotNull(bolt.subAlarmStatsRepos.get(metricDefinitionAndTenantId).get("123")); assertNotNull(bolt.subAlarmStatsRepos.get(metricDefinitionAndTenantId).get("123"));
} }
public void validateMetricDefUpdatedThreshold() { public void validateMetricDefUpdatedThreshold() {
final SubAlarmStats stats = updateEnsureMeasurementsKept(subExpr2, "avg(hpcs.compute.mem{id=5}, 60) >= 80"); final SubAlarmStats stats =
updateEnsureMeasurementsKept(subExpr2, "avg(hpcs.compute.mem{id=5}, 60) >= 80");
assertEquals(stats.getSubAlarm().getExpression().getThreshold(), 80.0); assertEquals(stats.getSubAlarm().getExpression().getThreshold(), 80.0);
} }
public void validateMetricDefUpdatedOperator() { public void validateMetricDefUpdatedOperator() {
final SubAlarmStats stats = updateEnsureMeasurementsKept(subExpr2, "avg(hpcs.compute.mem{id=5}, 60) < 80"); final SubAlarmStats stats =
updateEnsureMeasurementsKept(subExpr2, "avg(hpcs.compute.mem{id=5}, 60) < 80");
assertEquals(stats.getSubAlarm().getExpression().getOperator(), AlarmOperator.LT); assertEquals(stats.getSubAlarm().getExpression().getOperator(), AlarmOperator.LT);
} }
private SubAlarmStats updateEnsureMeasurementsKept(AlarmSubExpression subExpr, private SubAlarmStats updateEnsureMeasurementsKept(AlarmSubExpression subExpr,
String newSubExpression) { String newSubExpression) {
final SubAlarmStats stats = updateSubAlarmsStats(subExpr, newSubExpression); final SubAlarmStats stats = updateSubAlarmsStats(subExpr, newSubExpression);
final double[] values = stats.getStats().getWindowValues(); final double[] values = stats.getStats().getWindowValues();
assertFalse(Double.isNaN(values[0])); // Ensure old measurements weren't flushed assertFalse(Double.isNaN(values[0])); // Ensure old measurements weren't flushed
@ -302,49 +322,53 @@ public class MetricAggregationBoltTest {
} }
public void validateMetricDefReplacedFunction() { public void validateMetricDefReplacedFunction() {
final SubAlarmStats stats = updateEnsureMeasurementsFlushed(subExpr2, "max(hpcs.compute.mem{id=5}, 60) < 80"); final SubAlarmStats stats =
updateEnsureMeasurementsFlushed(subExpr2, "max(hpcs.compute.mem{id=5}, 60) < 80");
assertEquals(stats.getSubAlarm().getExpression().getOperator(), AlarmOperator.LT); assertEquals(stats.getSubAlarm().getExpression().getOperator(), AlarmOperator.LT);
} }
public void validateMetricDefReplacedPeriods() { public void validateMetricDefReplacedPeriods() {
final SubAlarmStats stats = updateEnsureMeasurementsFlushed(subExpr2, "avg(hpcs.compute.mem{id=5}, 60) >= 80 times 7"); final SubAlarmStats stats =
updateEnsureMeasurementsFlushed(subExpr2, "avg(hpcs.compute.mem{id=5}, 60) >= 80 times 7");
assertEquals(stats.getSubAlarm().getExpression().getPeriods(), 7); assertEquals(stats.getSubAlarm().getExpression().getPeriods(), 7);
} }
public void validateMetricDefReplacedPeriod() { public void validateMetricDefReplacedPeriod() {
final SubAlarmStats stats = updateEnsureMeasurementsFlushed(subExpr2, "avg(hpcs.compute.mem{id=5}, 120) >= 80"); final SubAlarmStats stats =
updateEnsureMeasurementsFlushed(subExpr2, "avg(hpcs.compute.mem{id=5}, 120) >= 80");
assertEquals(stats.getSubAlarm().getExpression().getPeriod(), 120); assertEquals(stats.getSubAlarm().getExpression().getPeriod(), 120);
} }
private SubAlarmStats updateEnsureMeasurementsFlushed(AlarmSubExpression subExpr, private SubAlarmStats updateEnsureMeasurementsFlushed(AlarmSubExpression subExpr,
String newSubExpression) { String newSubExpression) {
final SubAlarmStats stats = updateSubAlarmsStats(subExpr, newSubExpression); final SubAlarmStats stats = updateSubAlarmsStats(subExpr, newSubExpression);
final double[] values = stats.getStats().getWindowValues(); final double[] values = stats.getStats().getWindowValues();
assertTrue(Double.isNaN(values[0])); // Ensure old measurements were flushed assertTrue(Double.isNaN(values[0])); // Ensure old measurements were flushed
return stats; return stats;
} }
private SubAlarmStats updateSubAlarmsStats(AlarmSubExpression subExpr, private SubAlarmStats updateSubAlarmsStats(AlarmSubExpression subExpr, String newSubExpression) {
String newSubExpression) {
final MkTupleParam tupleParam = new MkTupleParam(); final MkTupleParam tupleParam = new MkTupleParam();
tupleParam.setFields(EventProcessingBolt.METRIC_SUB_ALARM_EVENT_STREAM_FIELDS); tupleParam.setFields(EventProcessingBolt.METRIC_SUB_ALARM_EVENT_STREAM_FIELDS);
tupleParam.setStream(EventProcessingBolt.METRIC_SUB_ALARM_EVENT_STREAM_ID); tupleParam.setStream(EventProcessingBolt.METRIC_SUB_ALARM_EVENT_STREAM_ID);
final MetricDefinitionAndTenantId metricDefinitionAndTenantId = new MetricDefinitionAndTenantId(subExpr.getMetricDefinition(), TENANT_ID); final MetricDefinitionAndTenantId metricDefinitionAndTenantId =
new MetricDefinitionAndTenantId(subExpr.getMetricDefinition(), TENANT_ID);
assertNull(bolt.subAlarmStatsRepos.get(metricDefinitionAndTenantId)); assertNull(bolt.subAlarmStatsRepos.get(metricDefinitionAndTenantId));
bolt.execute(Testing.testTuple(Arrays.asList(EventProcessingBolt.CREATED, bolt.execute(Testing.testTuple(Arrays.asList(EventProcessingBolt.CREATED,
metricDefinitionAndTenantId, new SubAlarm("123", "1", subExpr)), tupleParam)); metricDefinitionAndTenantId, new SubAlarm("123", "1", subExpr)), tupleParam));
final SubAlarmStats oldStats = bolt.subAlarmStatsRepos.get(metricDefinitionAndTenantId).get("123"); final SubAlarmStats oldStats =
bolt.subAlarmStatsRepos.get(metricDefinitionAndTenantId).get("123");
assertEquals(oldStats.getSubAlarm().getExpression().getThreshold(), 90.0); assertEquals(oldStats.getSubAlarm().getExpression().getThreshold(), 90.0);
assertTrue(oldStats.getStats().addValue(80.0, System.currentTimeMillis()/1000)); assertTrue(oldStats.getStats().addValue(80.0, System.currentTimeMillis() / 1000));
assertFalse(Double.isNaN(oldStats.getStats().getWindowValues()[0])); assertFalse(Double.isNaN(oldStats.getStats().getWindowValues()[0]));
assertNotNull(bolt.subAlarmStatsRepos.get(metricDefinitionAndTenantId).get("123")); assertNotNull(bolt.subAlarmStatsRepos.get(metricDefinitionAndTenantId).get("123"));
final AlarmSubExpression newExpr = AlarmSubExpression.of(newSubExpression); final AlarmSubExpression newExpr = AlarmSubExpression.of(newSubExpression);
bolt.execute(Testing.testTuple(Arrays.asList(EventProcessingBolt.UPDATED, bolt.execute(Testing.testTuple(Arrays.asList(EventProcessingBolt.UPDATED,
metricDefinitionAndTenantId, new SubAlarm("123", "1", newExpr)), tupleParam)); metricDefinitionAndTenantId, new SubAlarm("123", "1", newExpr)), tupleParam));
return bolt.subAlarmStatsRepos.get(metricDefinitionAndTenantId).get("123"); return bolt.subAlarmStatsRepos.get(metricDefinitionAndTenantId).get("123");
} }
@ -353,7 +377,8 @@ public class MetricAggregationBoltTest {
MkTupleParam tupleParam = new MkTupleParam(); MkTupleParam tupleParam = new MkTupleParam();
tupleParam.setFields(EventProcessingBolt.METRIC_ALARM_EVENT_STREAM_FIELDS); tupleParam.setFields(EventProcessingBolt.METRIC_ALARM_EVENT_STREAM_FIELDS);
tupleParam.setStream(EventProcessingBolt.METRIC_ALARM_EVENT_STREAM_ID); tupleParam.setStream(EventProcessingBolt.METRIC_ALARM_EVENT_STREAM_ID);
MetricDefinitionAndTenantId metricDefinitionAndTenantId = new MetricDefinitionAndTenantId(metricDef1, TENANT_ID); MetricDefinitionAndTenantId metricDefinitionAndTenantId =
new MetricDefinitionAndTenantId(metricDef1, TENANT_ID);
bolt.getOrCreateSubAlarmStatsRepo(metricDefinitionAndTenantId); bolt.getOrCreateSubAlarmStatsRepo(metricDefinitionAndTenantId);
assertNotNull(bolt.subAlarmStatsRepos.get(metricDefinitionAndTenantId).get("123")); assertNotNull(bolt.subAlarmStatsRepos.get(metricDefinitionAndTenantId).get("123"));
@ -365,17 +390,20 @@ public class MetricAggregationBoltTest {
} }
public void shouldGetOrCreateSameMetricData() { public void shouldGetOrCreateSameMetricData() {
SubAlarmStatsRepository data = bolt.getOrCreateSubAlarmStatsRepo(new MetricDefinitionAndTenantId(metricDef1, TENANT_ID)); SubAlarmStatsRepository data =
bolt.getOrCreateSubAlarmStatsRepo(new MetricDefinitionAndTenantId(metricDef1, TENANT_ID));
assertNotNull(data); assertNotNull(data);
assertEquals(bolt.getOrCreateSubAlarmStatsRepo(new MetricDefinitionAndTenantId(metricDef1, TENANT_ID)), data); assertEquals(
bolt.getOrCreateSubAlarmStatsRepo(new MetricDefinitionAndTenantId(metricDef1, TENANT_ID)),
data);
} }
private Tuple createMetricTuple(final MetricDefinition metricDef, private Tuple createMetricTuple(final MetricDefinition metricDef, final Metric metric) {
final Metric metric) {
final MkTupleParam tupleParam = new MkTupleParam(); final MkTupleParam tupleParam = new MkTupleParam();
tupleParam.setFields(MetricFilteringBolt.FIELDS); tupleParam.setFields(MetricFilteringBolt.FIELDS);
tupleParam.setStream(Streams.DEFAULT_STREAM_ID); tupleParam.setStream(Streams.DEFAULT_STREAM_ID);
return Testing.testTuple(Arrays.asList(new MetricDefinitionAndTenantId(metricDef, TENANT_ID), metric), tupleParam); return Testing.testTuple(
Arrays.asList(new MetricDefinitionAndTenantId(metricDef, TENANT_ID), metric), tupleParam);
} }
private static class MockMetricAggregationBolt extends MetricAggregationBolt { private static class MockMetricAggregationBolt extends MetricAggregationBolt {
@ -384,13 +412,14 @@ public class MetricAggregationBoltTest {
private long currentTime; private long currentTime;
public MockMetricAggregationBolt(SubAlarmDAO subAlarmDAO) { public MockMetricAggregationBolt(SubAlarmDAO subAlarmDAO) {
super(subAlarmDAO); super(subAlarmDAO);
} }
@Override @Override
protected long currentTimeSeconds() { protected long currentTimeSeconds() {
if (currentTime != 0) if (currentTime != 0) {
return currentTime; return currentTime;
}
return super.currentTimeSeconds(); return super.currentTimeSeconds();
} }

View File

@ -14,6 +14,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package com.hpcloud.mon.infrastructure.thresholding; package com.hpcloud.mon.infrastructure.thresholding;
import static org.mockito.Mockito.mock; import static org.mockito.Mockito.mock;
@ -23,24 +24,6 @@ import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when; import static org.mockito.Mockito.when;
import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertEquals;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import org.mockito.verification.VerificationMode;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import backtype.storm.Testing;
import backtype.storm.task.OutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.testing.MkTupleParam;
import backtype.storm.tuple.Tuple;
import backtype.storm.tuple.Values;
import com.hpcloud.mon.common.model.alarm.AlarmExpression; import com.hpcloud.mon.common.model.alarm.AlarmExpression;
import com.hpcloud.mon.common.model.alarm.AlarmSubExpression; import com.hpcloud.mon.common.model.alarm.AlarmSubExpression;
import com.hpcloud.mon.common.model.metric.Metric; import com.hpcloud.mon.common.model.metric.Metric;
@ -51,277 +34,324 @@ import com.hpcloud.mon.domain.service.MetricDefinitionDAO;
import com.hpcloud.mon.domain.service.SubAlarmMetricDefinition; import com.hpcloud.mon.domain.service.SubAlarmMetricDefinition;
import com.hpcloud.streaming.storm.Streams; import com.hpcloud.streaming.storm.Streams;
import backtype.storm.Testing;
import backtype.storm.task.OutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.testing.MkTupleParam;
import backtype.storm.tuple.Tuple;
import backtype.storm.tuple.Values;
import org.mockito.verification.VerificationMode;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
@Test @Test
public class MetricFilteringBoltTest { public class MetricFilteringBoltTest {
private List<SubAlarm> subAlarms; private List<SubAlarm> subAlarms;
private List<SubAlarm> duplicateMetricSubAlarms; private List<SubAlarm> duplicateMetricSubAlarms;
private final static String TEST_TENANT_ID = "42"; private final static String TEST_TENANT_ID = "42";
private long metricTimestamp = System.currentTimeMillis()/1000; // Make sure the metric timestamp is always unique private long metricTimestamp = System.currentTimeMillis() / 1000; // Make sure the metric
// timestamp is always unique
@BeforeMethod @BeforeMethod
protected void beforeMethod() { protected void beforeMethod() {
final String expression = "avg(hpcs.compute.cpu{instance_id=123,device=42}, 1) > 5 " + final String expression =
"and max(hpcs.compute.mem{instance_id=123,device=42}) > 80 " + "avg(hpcs.compute.cpu{instance_id=123,device=42}, 1) > 5 "
"and max(hpcs.compute.load{instance_id=123,device=42}) > 5"; + "and max(hpcs.compute.mem{instance_id=123,device=42}) > 80 "
subAlarms = createSubAlarmsForAlarm("111111112222222222233333333334", expression); + "and max(hpcs.compute.load{instance_id=123,device=42}) > 5";
subAlarms = createSubAlarmsForAlarm("111111112222222222233333333334", expression);
duplicateMetricSubAlarms = createSubAlarmsForAlarm(UUID.randomUUID().toString(), duplicateMetricSubAlarms =
"max(hpcs.compute.load{instance_id=123,device=42}) > 8"); createSubAlarmsForAlarm(UUID.randomUUID().toString(),
subAlarms.addAll(duplicateMetricSubAlarms); "max(hpcs.compute.load{instance_id=123,device=42}) > 8");
subAlarms.addAll(duplicateMetricSubAlarms);
}
private List<SubAlarm> createSubAlarmsForAlarm(final String alarmId, final String expression) {
final AlarmExpression alarmExpression = new AlarmExpression(expression);
final List<AlarmSubExpression> subExpressions = alarmExpression.getSubExpressions();
final List<SubAlarm> result = new ArrayList<SubAlarm>(subExpressions.size());
for (int i = 0; i < subExpressions.size(); i++) {
final SubAlarm subAlarm =
new SubAlarm(UUID.randomUUID().toString(), alarmId, subExpressions.get(i));
result.add(subAlarm);
}
return result;
}
private MockMetricFilteringBolt createBolt(
List<SubAlarmMetricDefinition> initialMetricDefinitions, final OutputCollector collector,
boolean willEmit) {
final MetricDefinitionDAO dao = mock(MetricDefinitionDAO.class);
when(dao.findForAlarms()).thenReturn(initialMetricDefinitions);
MockMetricFilteringBolt bolt = new MockMetricFilteringBolt(dao);
final Map<String, String> config = new HashMap<>();
final TopologyContext context = mock(TopologyContext.class);
bolt.prepare(config, context, collector);
if (willEmit) {
// Validate the prepare emits the initial Metric Definitions
for (final SubAlarmMetricDefinition metricDefinition : initialMetricDefinitions) {
verify(collector, times(1)).emit(
new Values(metricDefinition.getMetricDefinitionAndTenantId(), null));
}
}
return bolt;
}
public void testLagging() {
final OutputCollector collector = mock(OutputCollector.class);
final MockMetricFilteringBolt bolt =
createBolt(new ArrayList<SubAlarmMetricDefinition>(0), collector, true);
final long prepareTime = bolt.getCurrentTime();
final MetricDefinition metricDefinition =
subAlarms.get(0).getExpression().getMetricDefinition();
final long oldestTimestamp = prepareTime - MetricFilteringBolt.LAG_MESSAGE_PERIOD_DEFAULT;
final Tuple lateMetricTuple =
createMetricTuple(metricDefinition, oldestTimestamp, new Metric(metricDefinition,
oldestTimestamp, 42.0));
bolt.execute(lateMetricTuple);
verify(collector, times(1)).ack(lateMetricTuple);
bolt.setCurrentTime(prepareTime + MetricFilteringBolt.LAG_MESSAGE_PERIOD_DEFAULT);
final Tuple lateMetricTuple2 =
createMetricTuple(metricDefinition, prepareTime, new Metric(metricDefinition, prepareTime,
42.0));
bolt.execute(lateMetricTuple2);
verify(collector, times(1)).ack(lateMetricTuple2);
verify(collector, times(1)).emit(MetricAggregationBolt.METRIC_AGGREGATION_CONTROL_STREAM,
new Values(MetricAggregationBolt.METRICS_BEHIND));
bolt.setCurrentTime(prepareTime + 2 * MetricFilteringBolt.LAG_MESSAGE_PERIOD_DEFAULT);
long caughtUpTimestamp = bolt.getCurrentTime() - MetricFilteringBolt.MIN_LAG_VALUE_DEFAULT;
final Tuple metricTuple =
createMetricTuple(metricDefinition, caughtUpTimestamp, new Metric(metricDefinition,
caughtUpTimestamp, 42.0));
bolt.execute(metricTuple);
// Metrics are caught up so there should not be another METRICS_BEHIND message
verify(collector, times(1)).ack(metricTuple);
verify(collector, times(1)).emit(MetricAggregationBolt.METRIC_AGGREGATION_CONTROL_STREAM,
new Values(MetricAggregationBolt.METRICS_BEHIND));
}
public void testLaggingTooLong() {
final OutputCollector collector = mock(OutputCollector.class);
final MockMetricFilteringBolt bolt =
createBolt(new ArrayList<SubAlarmMetricDefinition>(0), collector, true);
long prepareTime = bolt.getCurrentTime();
final MetricDefinition metricDefinition =
subAlarms.get(0).getExpression().getMetricDefinition();
// Fake sending metrics for MetricFilteringBolt.MAX_LAG_MESSAGES_DEFAULT *
// MetricFilteringBolt.LAG_MESSAGE_PERIOD_DEFAULT seconds
boolean first = true;
// Need to send MetricFilteringBolt.MAX_LAG_MESSAGES_DEFAULT + 1 metrics because the lag message
// is not
// output on the first one.
for (int i = 0; i < MetricFilteringBolt.MAX_LAG_MESSAGES_DEFAULT + 1; i++) {
final Tuple lateMetricTuple =
createMetricTuple(metricDefinition, prepareTime, new Metric(metricDefinition,
prepareTime, 42.0));
bolt.setCurrentTime(prepareTime + MetricFilteringBolt.LAG_MESSAGE_PERIOD_DEFAULT);
bolt.execute(lateMetricTuple);
verify(collector, times(1)).ack(lateMetricTuple);
if (!first) {
verify(collector, times(i)).emit(MetricAggregationBolt.METRIC_AGGREGATION_CONTROL_STREAM,
new Values(MetricAggregationBolt.METRICS_BEHIND));
}
first = false;
prepareTime = bolt.getCurrentTime();
}
// One more
long timestamp = bolt.getCurrentTime() - MetricFilteringBolt.LAG_MESSAGE_PERIOD_DEFAULT;
final Tuple metricTuple =
createMetricTuple(metricDefinition, timestamp,
new Metric(metricDefinition, timestamp, 42.0));
bolt.execute(metricTuple);
verify(collector, times(1)).ack(metricTuple);
// Won't be any more of these
verify(collector, times(MetricFilteringBolt.MAX_LAG_MESSAGES_DEFAULT)).emit(
MetricAggregationBolt.METRIC_AGGREGATION_CONTROL_STREAM,
new Values(MetricAggregationBolt.METRICS_BEHIND));
}
private static class MockMetricFilteringBolt extends MetricFilteringBolt {
private static final long serialVersionUID = 1L;
private long currentTimeMillis = System.currentTimeMillis();
public MockMetricFilteringBolt(MetricDefinitionDAO metricDefDAO) {
super(metricDefDAO);
} }
private List<SubAlarm> createSubAlarmsForAlarm(final String alarmId, @Override
final String expression) { protected long getCurrentTime() {
final AlarmExpression alarmExpression = new AlarmExpression(expression); return currentTimeMillis;
final List<AlarmSubExpression> subExpressions = alarmExpression.getSubExpressions();
final List<SubAlarm> result = new ArrayList<SubAlarm>(subExpressions.size());
for (int i = 0; i < subExpressions.size(); i++) {
final SubAlarm subAlarm = new SubAlarm(UUID.randomUUID().toString(), alarmId, subExpressions.get(i));
result.add(subAlarm);
}
return result;
} }
private MockMetricFilteringBolt createBolt(List<SubAlarmMetricDefinition> initialMetricDefinitions, public void setCurrentTime(final long currentTimeMillis) {
final OutputCollector collector, boolean willEmit) { this.currentTimeMillis = currentTimeMillis;
final MetricDefinitionDAO dao = mock(MetricDefinitionDAO.class);
when(dao.findForAlarms()).thenReturn(initialMetricDefinitions);
MockMetricFilteringBolt bolt = new MockMetricFilteringBolt(dao);
final Map<String, String> config = new HashMap<>();
final TopologyContext context = mock(TopologyContext.class);
bolt.prepare(config, context, collector);
if (willEmit) {
// Validate the prepare emits the initial Metric Definitions
for (final SubAlarmMetricDefinition metricDefinition : initialMetricDefinitions) {
verify(collector, times(1)).emit(new Values(metricDefinition.getMetricDefinitionAndTenantId(), null));
}
}
return bolt;
} }
}
public void testLagging() { public void testNoInitial() {
final OutputCollector collector = mock(OutputCollector.class); MetricFilteringBolt.clearMetricDefinitions();
final OutputCollector collector1 = mock(OutputCollector.class);
final MockMetricFilteringBolt bolt = createBolt(new ArrayList<SubAlarmMetricDefinition>(0), collector, true); final MetricFilteringBolt bolt1 =
createBolt(new ArrayList<SubAlarmMetricDefinition>(0), collector1, true);
final long prepareTime = bolt.getCurrentTime(); final OutputCollector collector2 = mock(OutputCollector.class);
final MetricDefinition metricDefinition = subAlarms.get(0).getExpression().getMetricDefinition();
final long oldestTimestamp = prepareTime - MetricFilteringBolt.LAG_MESSAGE_PERIOD_DEFAULT; final MetricFilteringBolt bolt2 =
final Tuple lateMetricTuple = createMetricTuple(metricDefinition, oldestTimestamp, new Metric(metricDefinition, oldestTimestamp, 42.0)); createBolt(new ArrayList<SubAlarmMetricDefinition>(0), collector2, false);
bolt.execute(lateMetricTuple);
verify(collector, times(1)).ack(lateMetricTuple); // First ensure metrics don't pass the filter
bolt.setCurrentTime(prepareTime + MetricFilteringBolt.LAG_MESSAGE_PERIOD_DEFAULT); verifyMetricFiltered(collector1, bolt1);
final Tuple lateMetricTuple2 = createMetricTuple(metricDefinition, prepareTime, new Metric(metricDefinition, prepareTime, 42.0)); verifyMetricFiltered(collector2, bolt2);
bolt.execute(lateMetricTuple2);
verify(collector, times(1)).ack(lateMetricTuple2); sendMetricCreation(collector1, bolt1);
verify(collector, times(1)).emit(MetricAggregationBolt.METRIC_AGGREGATION_CONTROL_STREAM, sendMetricCreation(collector2, bolt2);
new Values(MetricAggregationBolt.METRICS_BEHIND));
bolt.setCurrentTime(prepareTime + 2 * MetricFilteringBolt.LAG_MESSAGE_PERIOD_DEFAULT); testDeleteSubAlarms(bolt1, collector1, bolt2, collector2);
long caughtUpTimestamp = bolt.getCurrentTime() - MetricFilteringBolt.MIN_LAG_VALUE_DEFAULT; }
final Tuple metricTuple = createMetricTuple(metricDefinition, caughtUpTimestamp, new Metric(metricDefinition, caughtUpTimestamp, 42.0));
bolt.execute(metricTuple); private void sendMetricCreation(final OutputCollector collector1, final MetricFilteringBolt bolt1) {
// Metrics are caught up so there should not be another METRICS_BEHIND message for (final SubAlarm subAlarm : subAlarms) {
verify(collector, times(1)).ack(metricTuple); final Tuple tuple = createMetricDefinitionTuple(subAlarm);
verify(collector, times(1)).emit(MetricAggregationBolt.METRIC_AGGREGATION_CONTROL_STREAM, bolt1.execute(tuple);
new Values(MetricAggregationBolt.METRICS_BEHIND)); verify(collector1, times(1)).ack(tuple);
} }
}
public void testLaggingTooLong() { private void verifyMetricFiltered(final OutputCollector collector1,
final OutputCollector collector = mock(OutputCollector.class); final MetricFilteringBolt bolt1) {
sendMetricsAndVerify(collector1, bolt1, never());
}
final MockMetricFilteringBolt bolt = createBolt(new ArrayList<SubAlarmMetricDefinition>(0), collector, true); private void verifyMetricPassed(final OutputCollector collector1, final MetricFilteringBolt bolt1) {
sendMetricsAndVerify(collector1, bolt1, times(1));
}
long prepareTime = bolt.getCurrentTime(); private void sendMetricsAndVerify(final OutputCollector collector1,
final MetricDefinition metricDefinition = subAlarms.get(0).getExpression().getMetricDefinition(); final MetricFilteringBolt bolt1, VerificationMode howMany) {
// Fake sending metrics for MetricFilteringBolt.MAX_LAG_MESSAGES_DEFAULT * MetricFilteringBolt.LAG_MESSAGE_PERIOD_DEFAULT seconds for (final SubAlarm subAlarm : subAlarms) {
boolean first = true; // First do a MetricDefinition that is an exact match
// Need to send MetricFilteringBolt.MAX_LAG_MESSAGES_DEFAULT + 1 metrics because the lag message is not final MetricDefinition metricDefinition = subAlarm.getExpression().getMetricDefinition();
// output on the first one. final Tuple exactTuple =
for (int i = 0; i < MetricFilteringBolt.MAX_LAG_MESSAGES_DEFAULT + 1; i++) { createMetricTuple(metricDefinition, metricTimestamp++, new Metric(metricDefinition,
final Tuple lateMetricTuple = createMetricTuple(metricDefinition, prepareTime, new Metric(metricDefinition, prepareTime, 42.0)); metricTimestamp, 42.0));
bolt.setCurrentTime(prepareTime + MetricFilteringBolt.LAG_MESSAGE_PERIOD_DEFAULT); bolt1.execute(exactTuple);
bolt.execute(lateMetricTuple); verify(collector1, times(1)).ack(exactTuple);
verify(collector, times(1)).ack(lateMetricTuple); verify(collector1, howMany).emit(new Values(exactTuple.getValue(0), exactTuple.getValue(2)));
if (!first) {
verify(collector, times(i)).emit(MetricAggregationBolt.METRIC_AGGREGATION_CONTROL_STREAM, // Now do a MetricDefinition with an extra dimension that should still match the SubAlarm
new Values(MetricAggregationBolt.METRICS_BEHIND)); final Map<String, String> extraDimensions = new HashMap<>(metricDefinition.dimensions);
} extraDimensions.put("group", "group_a");
first = false; final MetricDefinition inexactMetricDef =
prepareTime = bolt.getCurrentTime(); new MetricDefinition(metricDefinition.name, extraDimensions);
} Metric inexactMetric = new Metric(inexactMetricDef, metricTimestamp, 42.0);
// One more final Tuple inexactTuple =
long timestamp = bolt.getCurrentTime() - MetricFilteringBolt.LAG_MESSAGE_PERIOD_DEFAULT; createMetricTuple(metricDefinition, metricTimestamp++, inexactMetric);
final Tuple metricTuple = createMetricTuple(metricDefinition, timestamp, new Metric(metricDefinition, timestamp, 42.0)); bolt1.execute(inexactTuple);
bolt.execute(metricTuple); verify(collector1, times(1)).ack(inexactTuple);
verify(collector, times(1)).ack(metricTuple); // We want the MetricDefinitionAndTenantId from the exact tuple, but the inexactMetric
// Won't be any more of these verify(collector1, howMany).emit(new Values(exactTuple.getValue(0), inexactMetric));
verify(collector, times(MetricFilteringBolt.MAX_LAG_MESSAGES_DEFAULT)).emit(MetricAggregationBolt.METRIC_AGGREGATION_CONTROL_STREAM,
new Values(MetricAggregationBolt.METRICS_BEHIND));
} }
}
private static class MockMetricFilteringBolt extends MetricFilteringBolt { public void testAllInitial() {
private static final long serialVersionUID = 1L; MetricFilteringBolt.clearMetricDefinitions();
private long currentTimeMillis = System.currentTimeMillis(); final List<SubAlarmMetricDefinition> initialMetricDefinitions =
new ArrayList<>(subAlarms.size());
public MockMetricFilteringBolt(MetricDefinitionDAO metricDefDAO) { for (final SubAlarm subAlarm : subAlarms) {
super(metricDefDAO); initialMetricDefinitions.add(new SubAlarmMetricDefinition(subAlarm.getId(),
} new MetricDefinitionAndTenantId(subAlarm.getExpression().getMetricDefinition(),
TEST_TENANT_ID)));
@Override
protected long getCurrentTime() {
return currentTimeMillis;
}
public void setCurrentTime(final long currentTimeMillis) {
this.currentTimeMillis = currentTimeMillis;
}
} }
final OutputCollector collector1 = mock(OutputCollector.class);
public void testNoInitial() { final MetricFilteringBolt bolt1 = createBolt(initialMetricDefinitions, collector1, true);
MetricFilteringBolt.clearMetricDefinitions();
final OutputCollector collector1 = mock(OutputCollector.class);
final MetricFilteringBolt bolt1 = createBolt(new ArrayList<SubAlarmMetricDefinition>(0), collector1, true); final OutputCollector collector2 = mock(OutputCollector.class);
final OutputCollector collector2 = mock(OutputCollector.class); final MetricFilteringBolt bolt2 = createBolt(initialMetricDefinitions, collector2, false);
final MetricFilteringBolt bolt2 = createBolt(new ArrayList<SubAlarmMetricDefinition>(0), collector2, false); testDeleteSubAlarms(bolt1, collector1, bolt2, collector2);
}
// First ensure metrics don't pass the filter private void testDeleteSubAlarms(MetricFilteringBolt bolt1, OutputCollector collector1,
verifyMetricFiltered(collector1, bolt1); MetricFilteringBolt bolt2, OutputCollector collector2) {
verifyMetricFiltered(collector2, bolt2); // Now ensure metrics pass the filter
verifyMetricPassed(collector1, bolt1);
verifyMetricPassed(collector2, bolt2);
sendMetricCreation(collector1, bolt1); // Now delete the SubAlarm that duplicated a MetricDefinition
sendMetricCreation(collector2, bolt2); deleteSubAlarms(bolt1, collector1, duplicateMetricSubAlarms);
deleteSubAlarms(bolt2, collector2, duplicateMetricSubAlarms);
testDeleteSubAlarms(bolt1, collector1, bolt2, collector2); // Ensure metrics still pass the filter
verifyMetricPassed(collector1, bolt1);
verifyMetricPassed(collector2, bolt2);
deleteSubAlarms(bolt1, collector1, subAlarms);
// All MetricDefinitions should be deleted
assertEquals(MetricFilteringBolt.sizeMetricDefinitions(), 0);
deleteSubAlarms(bolt2, collector2, subAlarms);
verifyMetricFiltered(collector1, bolt1);
verifyMetricFiltered(collector2, bolt2);
}
private void deleteSubAlarms(MetricFilteringBolt bolt, OutputCollector collector,
final List<SubAlarm> otherSubAlarms) {
for (final SubAlarm subAlarm : otherSubAlarms) {
final Tuple tuple = createMetricDefinitionDeletionTuple(subAlarm);
bolt.execute(tuple);
verify(collector, times(1)).ack(tuple);
} }
}
private void sendMetricCreation(final OutputCollector collector1, private Tuple createMetricDefinitionTuple(final SubAlarm subAlarm) {
final MetricFilteringBolt bolt1) { final MkTupleParam tupleParam = new MkTupleParam();
for (final SubAlarm subAlarm : subAlarms) { tupleParam.setFields(EventProcessingBolt.METRIC_SUB_ALARM_EVENT_STREAM_FIELDS);
final Tuple tuple = createMetricDefinitionTuple(subAlarm); tupleParam.setStream(EventProcessingBolt.METRIC_SUB_ALARM_EVENT_STREAM_ID);
bolt1.execute(tuple); final Tuple tuple =
verify(collector1, times(1)).ack(tuple); Testing.testTuple(Arrays.asList(EventProcessingBolt.CREATED,
} new MetricDefinitionAndTenantId(subAlarm.getExpression().getMetricDefinition(),
} TEST_TENANT_ID), subAlarm), tupleParam);
return tuple;
}
private void verifyMetricFiltered(final OutputCollector collector1, private Tuple createMetricDefinitionDeletionTuple(final SubAlarm subAlarm) {
final MetricFilteringBolt bolt1) { final MkTupleParam tupleParam = new MkTupleParam();
sendMetricsAndVerify(collector1, bolt1, never()); tupleParam.setFields(EventProcessingBolt.METRIC_ALARM_EVENT_STREAM_FIELDS);
} tupleParam.setStream(EventProcessingBolt.METRIC_ALARM_EVENT_STREAM_ID);
final Tuple tuple =
Testing.testTuple(Arrays.asList(EventProcessingBolt.DELETED,
new MetricDefinitionAndTenantId(subAlarm.getExpression().getMetricDefinition(),
TEST_TENANT_ID), subAlarm.getId()), tupleParam);
private void verifyMetricPassed(final OutputCollector collector1, return tuple;
final MetricFilteringBolt bolt1) { }
sendMetricsAndVerify(collector1, bolt1, times(1));
}
private void sendMetricsAndVerify(final OutputCollector collector1, private Tuple createMetricTuple(final MetricDefinition metricDefinition, final long timestamp,
final MetricFilteringBolt bolt1, VerificationMode howMany) { final Metric metric) {
for (final SubAlarm subAlarm : subAlarms) { final MkTupleParam tupleParam = new MkTupleParam();
// First do a MetricDefinition that is an exact match tupleParam.setFields(MetricSpout.FIELDS);
final MetricDefinition metricDefinition = subAlarm.getExpression().getMetricDefinition(); tupleParam.setStream(Streams.DEFAULT_STREAM_ID);
final Tuple exactTuple = createMetricTuple(metricDefinition, metricTimestamp++, new Metric(metricDefinition, metricTimestamp, 42.0)); final Tuple tuple =
bolt1.execute(exactTuple); Testing.testTuple(Arrays.asList(new MetricDefinitionAndTenantId(metricDefinition,
verify(collector1, times(1)).ack(exactTuple); TEST_TENANT_ID), timestamp, metric), tupleParam);
verify(collector1, howMany).emit(new Values(exactTuple.getValue(0), exactTuple.getValue(2))); return tuple;
}
// Now do a MetricDefinition with an extra dimension that should still match the SubAlarm
final Map<String, String> extraDimensions = new HashMap<>(metricDefinition.dimensions);
extraDimensions.put("group", "group_a");
final MetricDefinition inexactMetricDef = new MetricDefinition(metricDefinition.name, extraDimensions);
Metric inexactMetric = new Metric(inexactMetricDef, metricTimestamp, 42.0);
final Tuple inexactTuple = createMetricTuple(metricDefinition, metricTimestamp++, inexactMetric);
bolt1.execute(inexactTuple);
verify(collector1, times(1)).ack(inexactTuple);
// We want the MetricDefinitionAndTenantId from the exact tuple, but the inexactMetric
verify(collector1, howMany).emit(new Values(exactTuple.getValue(0), inexactMetric));
}
}
public void testAllInitial() {
MetricFilteringBolt.clearMetricDefinitions();
final List<SubAlarmMetricDefinition> initialMetricDefinitions = new ArrayList<>(subAlarms.size());
for (final SubAlarm subAlarm : subAlarms) {
initialMetricDefinitions.add(new SubAlarmMetricDefinition(subAlarm.getId(),
new MetricDefinitionAndTenantId(subAlarm.getExpression().getMetricDefinition(), TEST_TENANT_ID)));
}
final OutputCollector collector1 = mock(OutputCollector.class);
final MetricFilteringBolt bolt1 = createBolt(initialMetricDefinitions, collector1, true);
final OutputCollector collector2 = mock(OutputCollector.class);
final MetricFilteringBolt bolt2 = createBolt(initialMetricDefinitions, collector2, false);
testDeleteSubAlarms(bolt1, collector1, bolt2, collector2);
}
private void testDeleteSubAlarms(MetricFilteringBolt bolt1, OutputCollector collector1, MetricFilteringBolt bolt2, OutputCollector collector2) {
// Now ensure metrics pass the filter
verifyMetricPassed(collector1, bolt1);
verifyMetricPassed(collector2, bolt2);
// Now delete the SubAlarm that duplicated a MetricDefinition
deleteSubAlarms(bolt1, collector1, duplicateMetricSubAlarms);
deleteSubAlarms(bolt2, collector2, duplicateMetricSubAlarms);
// Ensure metrics still pass the filter
verifyMetricPassed(collector1, bolt1);
verifyMetricPassed(collector2, bolt2);
deleteSubAlarms(bolt1, collector1, subAlarms);
// All MetricDefinitions should be deleted
assertEquals(MetricFilteringBolt.sizeMetricDefinitions(), 0);
deleteSubAlarms(bolt2, collector2, subAlarms);
verifyMetricFiltered(collector1, bolt1);
verifyMetricFiltered(collector2, bolt2);
}
private void deleteSubAlarms(MetricFilteringBolt bolt, OutputCollector collector, final List<SubAlarm> otherSubAlarms) {
for (final SubAlarm subAlarm : otherSubAlarms) {
final Tuple tuple = createMetricDefinitionDeletionTuple(subAlarm);
bolt.execute(tuple);
verify(collector, times(1)).ack(tuple);
}
}
private Tuple createMetricDefinitionTuple(final SubAlarm subAlarm) {
final MkTupleParam tupleParam = new MkTupleParam();
tupleParam.setFields(EventProcessingBolt.METRIC_SUB_ALARM_EVENT_STREAM_FIELDS);
tupleParam.setStream(EventProcessingBolt.METRIC_SUB_ALARM_EVENT_STREAM_ID);
final Tuple tuple = Testing.testTuple(Arrays.asList(EventProcessingBolt.CREATED,
new MetricDefinitionAndTenantId(
subAlarm.getExpression().getMetricDefinition(), TEST_TENANT_ID),
subAlarm), tupleParam);
return tuple;
}
private Tuple createMetricDefinitionDeletionTuple(final SubAlarm subAlarm) {
final MkTupleParam tupleParam = new MkTupleParam();
tupleParam.setFields(EventProcessingBolt.METRIC_ALARM_EVENT_STREAM_FIELDS);
tupleParam.setStream(EventProcessingBolt.METRIC_ALARM_EVENT_STREAM_ID);
final Tuple tuple = Testing.testTuple(Arrays.asList(EventProcessingBolt.DELETED,
new MetricDefinitionAndTenantId(
subAlarm.getExpression().getMetricDefinition(), TEST_TENANT_ID),
subAlarm.getId()), tupleParam);
return tuple;
}
private Tuple createMetricTuple(final MetricDefinition metricDefinition,
final long timestamp,
final Metric metric) {
final MkTupleParam tupleParam = new MkTupleParam();
tupleParam.setFields(MetricSpout.FIELDS);
tupleParam.setStream(Streams.DEFAULT_STREAM_ID);
final Tuple tuple = Testing.testTuple(Arrays.asList(
new MetricDefinitionAndTenantId(metricDefinition, TEST_TENANT_ID),
timestamp, metric), tupleParam);
return tuple;
}
} }

View File

@ -14,6 +14,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package com.hpcloud.mon.infrastructure.thresholding; package com.hpcloud.mon.infrastructure.thresholding;
import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertEquals;
@ -24,39 +25,43 @@ import org.testng.annotations.Test;
@Test @Test
public class PropertyFinderTest { public class PropertyFinderTest {
private static String PROPERTY_NAME = "com.hpcloud.mon.infrastructure.thresholding.Prop"; private static String PROPERTY_NAME = "com.hpcloud.mon.infrastructure.thresholding.Prop";
@BeforeMethod @BeforeMethod
public void beforeMethod() { public void beforeMethod() {
System.clearProperty(PROPERTY_NAME); System.clearProperty(PROPERTY_NAME);
} }
public void shouldUseNewValue() { public void shouldUseNewValue() {
final int expectedValue = 45; final int expectedValue = 45;
System.setProperty(PROPERTY_NAME, String.valueOf(expectedValue)); System.setProperty(PROPERTY_NAME, String.valueOf(expectedValue));
assertEquals(expectedValue, PropertyFinder.getIntProperty(PROPERTY_NAME, 30, 0, Integer.MAX_VALUE)); assertEquals(expectedValue,
} PropertyFinder.getIntProperty(PROPERTY_NAME, 30, 0, Integer.MAX_VALUE));
}
public void shouldUseDefaultValueBecausePropertyNotSet() { public void shouldUseDefaultValueBecausePropertyNotSet() {
final int defaultValue = 45; final int defaultValue = 45;
assertEquals(defaultValue, PropertyFinder.getIntProperty(PROPERTY_NAME, defaultValue, 0, Integer.MAX_VALUE)); assertEquals(defaultValue,
} PropertyFinder.getIntProperty(PROPERTY_NAME, defaultValue, 0, Integer.MAX_VALUE));
}
public void shouldUseDefaultValueBecausePropertyNotANumber() { public void shouldUseDefaultValueBecausePropertyNotANumber() {
final int defaultValue = 45; final int defaultValue = 45;
System.setProperty(PROPERTY_NAME, "AAA"); System.setProperty(PROPERTY_NAME, "AAA");
assertEquals(defaultValue, PropertyFinder.getIntProperty(PROPERTY_NAME, defaultValue, 0, Integer.MAX_VALUE)); assertEquals(defaultValue,
} PropertyFinder.getIntProperty(PROPERTY_NAME, defaultValue, 0, Integer.MAX_VALUE));
}
public void shouldUseDefaultValueBecausePropertyTooSmall() { public void shouldUseDefaultValueBecausePropertyTooSmall() {
final int defaultValue = 45; final int defaultValue = 45;
System.setProperty(PROPERTY_NAME, "0"); System.setProperty(PROPERTY_NAME, "0");
assertEquals(defaultValue, PropertyFinder.getIntProperty(PROPERTY_NAME, defaultValue, 1, Integer.MAX_VALUE)); assertEquals(defaultValue,
} PropertyFinder.getIntProperty(PROPERTY_NAME, defaultValue, 1, Integer.MAX_VALUE));
}
public void shouldUseDefaultValueBecausePropertyTooLarge() { public void shouldUseDefaultValueBecausePropertyTooLarge() {
final int defaultValue = 45; final int defaultValue = 45;
System.setProperty(PROPERTY_NAME, "10"); System.setProperty(PROPERTY_NAME, "10");
assertEquals(defaultValue, PropertyFinder.getIntProperty(PROPERTY_NAME, defaultValue, 9, 9)); assertEquals(defaultValue, PropertyFinder.getIntProperty(PROPERTY_NAME, defaultValue, 9, 9));
} }
} }

View File

@ -14,24 +14,26 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package com.hpcloud.mon.infrastructure.thresholding.deserializer; package com.hpcloud.mon.infrastructure.thresholding.deserializer;
import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNull; import static org.testng.Assert.assertNull;
import java.util.Collections;
import org.testng.annotations.Test;
import com.hpcloud.mon.common.event.AlarmCreatedEvent; import com.hpcloud.mon.common.event.AlarmCreatedEvent;
import com.hpcloud.mon.common.event.AlarmDeletedEvent; import com.hpcloud.mon.common.event.AlarmDeletedEvent;
import com.hpcloud.mon.common.event.AlarmUpdatedEvent; import com.hpcloud.mon.common.event.AlarmUpdatedEvent;
import com.hpcloud.mon.common.model.alarm.AlarmState; import com.hpcloud.mon.common.model.alarm.AlarmState;
import com.hpcloud.util.Serialization; import com.hpcloud.util.Serialization;
import org.testng.annotations.Test;
import java.util.Collections;
@Test @Test
public class EventDeserializerTest { public class EventDeserializerTest {
private static final String ALARM_EXPRESSION = "avg(hpcs.compute{instance_id=5,metric_name=cpu,device=1}, 1) > 5 times 3 OR avg(hpcs.compute{flavor_id=3,metric_name=mem}, 2) < 4 times 3"; private static final String ALARM_EXPRESSION =
"avg(hpcs.compute{instance_id=5,metric_name=cpu,device=1}, 1) > 5 times 3 OR avg(hpcs.compute{flavor_id=3,metric_name=mem}, 2) < 4 times 3";
private static final String ALARM_NAME = "An Alarm"; private static final String ALARM_NAME = "An Alarm";
private static final String ALARM_DESCRIPTION = "An Alarm Description"; private static final String ALARM_DESCRIPTION = "An Alarm Description";
private static final String ALARM_ID = "123"; private static final String ALARM_ID = "123";
@ -47,8 +49,8 @@ public class EventDeserializerTest {
} }
public void shouldDeserializeAlarmUpdatedEvent() { public void shouldDeserializeAlarmUpdatedEvent() {
roundTrip(new AlarmUpdatedEvent(TENANT_ID, ALARM_ID, ALARM_NAME, ALARM_DESCRIPTION, ALARM_EXPRESSION, roundTrip(new AlarmUpdatedEvent(TENANT_ID, ALARM_ID, ALARM_NAME, ALARM_DESCRIPTION,
AlarmState.OK, AlarmState.OK, false, null, null, null, null)); ALARM_EXPRESSION, AlarmState.OK, AlarmState.OK, false, null, null, null, null));
} }
private void roundTrip(Object event) { private void roundTrip(Object event) {
@ -57,7 +59,7 @@ public class EventDeserializerTest {
Object expected = Collections.singletonList(Collections.singletonList(event)); Object expected = Collections.singletonList(Collections.singletonList(event));
assertEquals(deserialized, expected); assertEquals(deserialized, expected);
} }
public void shouldReturnNullOnDeserializeUnknownEvent() { public void shouldReturnNullOnDeserializeUnknownEvent() {
String unknownEventJson = "{\"alarm-foo-deleted\":{\"tenantId\":\"abc\",\"alarmId\":\"123\"}}"; String unknownEventJson = "{\"alarm-foo-deleted\":{\"tenantId\":\"abc\",\"alarmId\":\"123\"}}";
assertNull(deserializer.deserialize(unknownEventJson.getBytes())); assertNull(deserializer.deserialize(unknownEventJson.getBytes()));