Fix bug with caches handled inefficiently with vertica

Change-Id: I9b7ba3ffc7afabcef48737ab9d2991efb90295d4
This commit is contained in:
Deklan Dieterly 2015-03-31 15:44:04 -06:00
parent c23c10af1a
commit f51ef85404
4 changed files with 97 additions and 43 deletions
java/src/main/java/monasca/persister

@ -21,8 +21,6 @@ import com.google.inject.Inject;
import com.google.inject.assistedinject.Assisted;
import com.codahale.metrics.Counter;
import monasca.common.model.metric.Metric;
import monasca.common.model.metric.MetricEnvelope;
import org.apache.commons.codec.digest.DigestUtils;
import org.slf4j.Logger;
@ -35,6 +33,8 @@ import java.util.TimeZone;
import java.util.TreeMap;
import io.dropwizard.setup.Environment;
import monasca.common.model.metric.Metric;
import monasca.common.model.metric.MetricEnvelope;
import monasca.persister.configuration.PipelineConfig;
import monasca.persister.repository.MetricRepo;
import monasca.persister.repository.Sha1HashId;
@ -145,11 +145,7 @@ public class MetricHandler extends FlushableHandler<MetricEnvelope[]> {
Sha1HashId dimensionsSha1HashId = new Sha1HashId(dimensionIdSha1Hash);
// Add the dimension name/values to the batch.
for (Map.Entry<String, String> entry : preppedDimMap.entrySet()) {
metricRepo
.addDimensionToBatch(dimensionsSha1HashId, entry.getKey(), entry.getValue());
dimensionCounter.inc();
}
metricRepo.addDimensionsToBatch(dimensionsSha1HashId, preppedDimMap);
// Add the definition dimensions to the batch.
StringBuilder
@ -164,6 +160,8 @@ public class MetricHandler extends FlushableHandler<MetricEnvelope[]> {
.addDefinitionDimensionToBatch(definitionDimensionsSha1HashId, definitionSha1HashId,
dimensionsSha1HashId);
definitionDimensionsCounter.inc();
// Add the measurement to the batch.
String timeStamp = simpleDateFormat.format(new Date(metric.getTimestamp()));
double value = metric.getValue();
metricRepo.addMetricToBatch(definitionDimensionsSha1HashId, timeStamp, value,
@ -190,6 +188,7 @@ public class MetricHandler extends FlushableHandler<MetricEnvelope[]> {
String dimValue = dimMap.get(dimName);
if (dimValue != null && !dimValue.isEmpty()) {
newDimMap.put(trunc(dimName, MAX_COLUMN_LENGTH), trunc(dimValue, MAX_COLUMN_LENGTH));
dimensionCounter.inc();
}
}
}

@ -62,6 +62,7 @@ public abstract class InfluxMetricRepo implements MetricRepo {
@Override
public void addMetricToBatch(final Sha1HashId defDimsId, final String timeStamp,
final double value, final Map<String, String> valueMeta) {
final Measurement measurement = new Measurement(defDimsId, timeStamp, value, valueMeta);
List<Measurement> measurementList = this.measurementMap.get(defDimsId);
if (measurementList == null) {
@ -74,28 +75,45 @@ public abstract class InfluxMetricRepo implements MetricRepo {
@Override
public void addDefinitionToBatch(final Sha1HashId defId, final String name, final String tenantId,
final String region) {
final Def def = new Def(defId, name, tenantId, region);
this.defMap.put(defId, def);
if (!this.defMap.containsKey(defId)) {
final Def def = new Def(defId, name, tenantId, region);
this.defMap.put(defId, def);
}
}
@Override
public void addDimensionToBatch(final Sha1HashId dimSetId, final String name,
final String value) {
final Dim dim = new Dim(dimSetId, name, value);
Set<Dim> dimSet = this.dimMap.get(dimSetId);
if (dimSet == null) {
dimSet = new TreeSet<>();
this.dimMap.put(dimSetId, dimSet);
}
public void addDimensionsToBatch(final Sha1HashId dimSetId, Map<String, String> dimMap) {
dimSet.add(dim);
if (!this.dimMap.containsKey(dimSetId)) {
final Set<Dim> dimSet = new TreeSet<>();
this.dimMap.put(dimSetId, dimSet);
for (Map.Entry<String, String> entry : dimMap.entrySet()) {
final String name = entry.getKey();
final String value = entry.getValue();
final Dim dim = new Dim(dimSetId, name, value);
dimSet.add(dim);
}
}
}
@Override
public void addDefinitionDimensionToBatch(final Sha1HashId defDimsId, final Sha1HashId defId,
Sha1HashId dimId) {
final DefDim defDim = new DefDim(defDimsId, defId, dimId);
this.defDimMap.put(defDimsId, defDim);
if (!this.defDimMap.containsKey(defDimsId)) {
final DefDim defDim = new DefDim(defDimsId, defId, dimId);
this.defDimMap.put(defDimsId, defDim);
}
}
@Override

@ -24,7 +24,7 @@ public interface MetricRepo {
void addDefinitionToBatch(Sha1HashId defId, String name, String tenantId, String region);
void addDimensionToBatch(Sha1HashId dimSetId, String name, String value);
void addDimensionsToBatch(Sha1HashId dimSetId, Map<String, String> dimMap);
void addDefinitionDimensionToBatch(Sha1HashId defDimsId, Sha1HashId defId, Sha1HashId dimId);

@ -17,14 +17,11 @@
package monasca.persister.repository;
import monasca.persister.configuration.PersisterConfig;
import com.codahale.metrics.Meter;
import com.codahale.metrics.Timer;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import io.dropwizard.setup.Environment;
import com.codahale.metrics.Meter;
import com.codahale.metrics.Timer;
import org.skife.jdbi.v2.DBI;
import org.skife.jdbi.v2.PreparedBatch;
@ -39,6 +36,9 @@ import java.util.Set;
import javax.inject.Inject;
import io.dropwizard.setup.Environment;
import monasca.persister.configuration.PersisterConfig;
public class VerticaMetricRepo extends VerticaRepo implements MetricRepo {
private static final Logger logger = LoggerFactory.getLogger(VerticaMetricRepo.class);
@ -215,43 +215,80 @@ public class VerticaMetricRepo extends VerticaRepo implements MetricRepo {
@Override
public void addDefinitionToBatch(Sha1HashId defId, String name, String tenantId, String region) {
if (definitionsIdCache.getIfPresent(defId) == null) {
logger.debug("Adding definition to batch: defId: {}, name: {}, tenantId: {}, region: {}",
defId.toHexString(), name, tenantId, region);
stagedDefinitionsBatch.add().bind("id", defId.getSha1Hash()).bind("name", name)
.bind("tenant_id", tenantId).bind("region", region);
definitionIdSet.add(defId);
definitionCacheMissMeter.mark();
if (!definitionIdSet.contains(defId)) {
logger.debug("Adding definition to batch: defId: {}, name: {}, tenantId: {}, region: {}",
defId.toHexString(), name, tenantId, region);
stagedDefinitionsBatch.add().bind("id", defId.getSha1Hash()).bind("name", name)
.bind("tenant_id", tenantId).bind("region", region);
definitionIdSet.add(defId);
}
} else {
definitionCacheHitMeter.mark();
}
}
@Override
public void addDimensionToBatch(Sha1HashId dimSetId, String name, String value) {
public void addDimensionsToBatch(Sha1HashId dimSetId, Map<String, String> dimMap) {
if (dimensionsIdCache.getIfPresent(dimSetId) == null) {
logger.debug("Adding dimension to batch: dimSetId: {}, name: {}, value: {}",
dimSetId.toHexString(), name, value);
stagedDimensionsBatch.add().bind("dimension_set_id", dimSetId.getSha1Hash())
.bind("name", name).bind("value", value);
dimensionIdSet.add(dimSetId);
dimensionCacheMissMeter.mark();
if (!dimensionIdSet.contains(dimSetId)) {
for (Map.Entry<String, String> entry : dimMap.entrySet()) {
String name = entry.getKey();
String value = entry.getValue();
logger.debug("Adding dimension to batch: dimSetId: {}, name: {}, value: {}", dimSetId.toHexString(), name, value);
stagedDimensionsBatch.add().bind("dimension_set_id", dimSetId.getSha1Hash())
.bind("name", name).bind("value", value);
}
dimensionIdSet.add(dimSetId);
}
} else {
dimensionCacheHitMeter.mark();
}
}
@Override
public void addDefinitionDimensionToBatch(Sha1HashId defDimsId, Sha1HashId defId, Sha1HashId dimId) {
public void addDefinitionDimensionToBatch(Sha1HashId defDimsId, Sha1HashId defId,
Sha1HashId dimId) {
if (definitionDimensionsIdCache.getIfPresent(defDimsId) == null) {
logger.debug("Adding definitionDimension to batch: defDimsId: {}, defId: {}, dimId: {}",
defDimsId.toHexString(), defId, dimId);
stagedDefinitionDimensionsBatch.add().bind("id", defDimsId.getSha1Hash())
.bind("definition_id", defId.getSha1Hash()).bind("dimension_set_id", dimId.getSha1Hash());
definitionDimensionsIdSet.add(defDimsId);
definitionDimensionCacheMissMeter.mark();
if (!definitionDimensionsIdSet.contains(defDimsId)) {
logger.debug("Adding definitionDimension to batch: defDimsId: {}, defId: {}, dimId: {}",
defDimsId.toHexString(), defId, dimId);
stagedDefinitionDimensionsBatch.add().bind("id", defDimsId.getSha1Hash())
.bind("definition_id", defId.getSha1Hash())
.bind("dimension_set_id", dimId.getSha1Hash());
definitionDimensionsIdSet.add(defDimsId);
}
} else {
definitionDimensionCacheHitMeter.mark();
}
}