Skip to content

Commit

Permalink
[#8755] Remove duplicate HbaseAgentUriStatDaoOperations
Browse files Browse the repository at this point in the history
  • Loading branch information
emeroad committed Apr 8, 2022
1 parent 065c015 commit f7cf4c1
Show file tree
Hide file tree
Showing 8 changed files with 52 additions and 154 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ public static Filter newFilterList(Filter... filters) {
return new FilterList(nonNullFilters);
}

public static int getScanCacheSize(Range range, int timespan, int maxCacheSize) {
public static int getScanCacheSize(Range range, long timespan, int maxCacheSize) {
Objects.requireNonNull(range, "range");

long scanRange = range.durationMillis();
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
package com.navercorp.pinpoint.web.dao.hbase.config;

import com.navercorp.pinpoint.common.hbase.HbaseColumnFamily;
import com.navercorp.pinpoint.common.hbase.HbaseOperations2;
import com.navercorp.pinpoint.common.hbase.TableNameProvider;
import com.navercorp.pinpoint.common.server.bo.serializer.stat.AgentStatHbaseOperationFactory;
import com.navercorp.pinpoint.web.dao.hbase.stat.HbaseAgentStatDaoOperations;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;

@Configuration
public class AgentStatOperationConfiguration {

@Primary
@Bean("agentStatDaoOperations")
public HbaseAgentStatDaoOperations getHbaseAgentStatDaoOperations(HbaseOperations2 hbaseOperations2,
TableNameProvider tableNameProvider,
AgentStatHbaseOperationFactory operationFactory) {
HbaseColumnFamily.AgentStatStatistics stat = HbaseColumnFamily.AGENT_STAT_STATISTICS;
return new HbaseAgentStatDaoOperations(stat, stat.TIMESPAN_MS, hbaseOperations2, tableNameProvider, operationFactory);
}

@Bean("agentUriDaoOperations")
public HbaseAgentStatDaoOperations getHbaseAgentUriDaoOperations(HbaseOperations2 hbaseOperations2,
TableNameProvider tableNameProvider,
AgentStatHbaseOperationFactory operationFactory) {
HbaseColumnFamily.AgentUriStatStatistics uri = HbaseColumnFamily.AGENT_URI_STAT_STATISTICS;
return new HbaseAgentStatDaoOperations(uri, uri.TIMESPAN_MS, hbaseOperations2, tableNameProvider, operationFactory);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@
import com.navercorp.pinpoint.web.dao.SampledAgentStatDao;
import com.navercorp.pinpoint.web.dao.hbase.stat.DefaultSampledAgentStatDao;
import com.navercorp.pinpoint.web.dao.hbase.stat.HbaseAgentStatDaoOperations;
import com.navercorp.pinpoint.web.dao.hbase.stat.HbaseAgentUriStatDaoOperations;
import com.navercorp.pinpoint.web.dao.hbase.stat.HbaseSampledAgentUriStatDao;
import com.navercorp.pinpoint.web.dao.hbase.stat.HbaseSampledDataSourceDao;
import com.navercorp.pinpoint.web.dao.hbase.stat.SampledAgentStatResultExtractorSupplier;
Expand All @@ -43,6 +42,7 @@
import com.navercorp.pinpoint.web.vo.stat.SampledResponseTime;
import com.navercorp.pinpoint.web.vo.stat.SampledTotalThreadCount;
import com.navercorp.pinpoint.web.vo.stat.SampledTransaction;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;

Expand Down Expand Up @@ -146,7 +146,7 @@ public SampledAgentStatDao<SampledLoadedClassCount> getSampledLoadedClassCountDa
}

// @Bean
public SampledAgentStatDao<SampledAgentUriStat> getSampledAgentUriStatDao(HbaseAgentUriStatDaoOperations operations,
public SampledAgentStatDao<SampledAgentUriStat> getSampledAgentUriStatDao(@Qualifier("agentUriDaoOperations") HbaseAgentStatDaoOperations operations,
AgentStatDecoder<AgentUriStatBo> decoder,
AgentStatSampler<EachUriStatBo, SampledEachUriStatBo> sampler) {
return new HbaseSampledAgentUriStatDao(operations, decoder, sampler);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,33 +35,35 @@
import org.apache.hadoop.hbase.client.Scan;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.springframework.stereotype.Component;

import java.util.List;
import java.util.Objects;

/**
* @author HyunGil Jeong
*/
@Component
public class HbaseAgentStatDaoOperations {

private static final int AGENT_STAT_VER2_NUM_PARTITIONS = 32;
private static final int MAX_SCAN_CACHE_SIZE = 256;

private final Logger logger = LogManager.getLogger(this.getClass());
private static final HbaseColumnFamily.AgentStatStatistics DESCRIPTOR = HbaseColumnFamily.AGENT_STAT_STATISTICS;
private final HbaseColumnFamily columnFamily;
private final long timespan;

private final HbaseOperations2 hbaseOperations2;
private final TableNameProvider tableNameProvider;

private final AgentStatHbaseOperationFactory operationFactory;


public HbaseAgentStatDaoOperations(HbaseOperations2 hbaseOperations2,
public HbaseAgentStatDaoOperations(HbaseColumnFamily columnFamily, long timespan,
HbaseOperations2 hbaseOperations2,
TableNameProvider tableNameProvider,
AgentStatHbaseOperationFactory operationFactory) {
this.hbaseOperations2 = Objects.requireNonNull(hbaseOperations2, "hbaseOperations2");
this.columnFamily = Objects.requireNonNull(columnFamily, "columnFamily");
this.timespan = timespan;
this.tableNameProvider = Objects.requireNonNull(tableNameProvider, "tableNameProvider");
this.operationFactory = Objects.requireNonNull(operationFactory, "operationFactory");
}
Expand All @@ -72,9 +74,9 @@ <T extends AgentStatDataPoint> List<T> getAgentStatList(AgentStatType agentStatT

Scan scan = this.createScan(agentStatType, agentId, range);

TableName agentStatTableName = tableNameProvider.getTableName(DESCRIPTOR.getTable());
TableName agentStatTableName = tableNameProvider.getTableName(columnFamily.getTable());
List<List<T>> intermediate = hbaseOperations2.findParallel(agentStatTableName, scan, this.operationFactory.getRowKeyDistributor(), mapper, AGENT_STAT_VER2_NUM_PARTITIONS);
int expectedSize = (int) (range.durationMillis() / DESCRIPTOR.TIMESPAN_MS);
int expectedSize = (int) (range.durationMillis() / timespan);

return ListListUtils.toList(intermediate, expectedSize);
}
Expand All @@ -90,7 +92,7 @@ <T extends AgentStatDataPoint> boolean agentStatExists(AgentStatType agentStatTy
int resultLimit = 20;
Scan scan = this.createScan(agentStatType, agentId, range, resultLimit);

TableName agentStatTableName = tableNameProvider.getTableName(DESCRIPTOR.getTable());
TableName agentStatTableName = tableNameProvider.getTableName(columnFamily.getTable());
List<List<T>> result = hbaseOperations2.findParallel(agentStatTableName, scan, this.operationFactory.getRowKeyDistributor(), resultLimit, mapper, AGENT_STAT_VER2_NUM_PARTITIONS);
if (result.isEmpty()) {
return false;
Expand All @@ -106,25 +108,25 @@ <S extends SampledAgentStatDataPoint> List<S> getSampledAgentStatList(AgentStatT

Scan scan = this.createScan(agentStatType, agentId, range);

TableName agentStatTableName = tableNameProvider.getTableName(DESCRIPTOR.getTable());
TableName agentStatTableName = tableNameProvider.getTableName(columnFamily.getTable());
return hbaseOperations2.findParallel(agentStatTableName, scan, this.operationFactory.getRowKeyDistributor(), resultExtractor, AGENT_STAT_VER2_NUM_PARTITIONS);
}

<T extends AgentStatDataPoint> AgentStatMapperV2<T> createRowMapper(AgentStatDecoder<T> decoder, Range range) {
TimestampFilter filter = new RangeTimestampFilter(range);
return new AgentStatMapperV2<>(this.operationFactory, decoder, filter);
return new AgentStatMapperV2<>(this.operationFactory, decoder, filter, columnFamily);
}

private Scan createScan(AgentStatType agentStatType, String agentId, Range range) {
int scanCacheSize = HBaseUtils.getScanCacheSize(range, DESCRIPTOR.TIMESPAN_MS, MAX_SCAN_CACHE_SIZE);
int scanCacheSize = HBaseUtils.getScanCacheSize(range, timespan, MAX_SCAN_CACHE_SIZE);
return this.createScan(agentStatType, agentId, range, scanCacheSize);
}

private Scan createScan(AgentStatType agentStatType, String agentId, Range range, int scanCacheSize) {
Scan scan = this.operationFactory.createScan(agentId, agentStatType, range.getFrom(), range.getTo());
scan.setCaching(scanCacheSize);
scan.setId("AgentStat_" + agentStatType);
scan.addFamily(DESCRIPTOR.getName());
scan.setId(agentStatType.getChartType());
scan.addFamily(columnFamily.getName());
return scan;
}

Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
import com.navercorp.pinpoint.web.util.TimeWindow;
import com.navercorp.pinpoint.web.vo.stat.SampledAgentUriStat;
import com.navercorp.pinpoint.web.vo.stat.SampledEachUriStatBo;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.stereotype.Repository;

import java.util.List;
Expand All @@ -41,12 +42,12 @@
public class HbaseSampledAgentUriStatDao implements SampledAgentStatDao<SampledAgentUriStat> {

private final AgentStatType statType = AgentStatType.URI;
private final HbaseAgentUriStatDaoOperations operations;
private final HbaseAgentStatDaoOperations operations;

private final AgentStatDecoder<AgentUriStatBo> decoder;
private final AgentStatSampler<EachUriStatBo, SampledEachUriStatBo> sampler;

public HbaseSampledAgentUriStatDao(HbaseAgentUriStatDaoOperations operations,
public HbaseSampledAgentUriStatDao(@Qualifier("agentUriDaoOperations") HbaseAgentStatDaoOperations operations,
AgentStatDecoder<AgentUriStatBo> decoder,
AgentStatSampler<EachUriStatBo, SampledEachUriStatBo> sampler) {
this.operations = Objects.requireNonNull(operations, "operations");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,10 +49,6 @@ public class AgentStatMapperV2<T extends AgentStatDataPoint> implements AgentSta
private final TimestampFilter filter;
private final HbaseColumnFamily targetHbaseColumnFamily;

public AgentStatMapperV2(AgentStatHbaseOperationFactory hbaseOperationFactory, AgentStatDecoder<T> decoder, TimestampFilter filter) {
this(hbaseOperationFactory, decoder, filter, HbaseColumnFamily.AGENT_STAT_STATISTICS);
}

public AgentStatMapperV2(AgentStatHbaseOperationFactory hbaseOperationFactory, AgentStatDecoder<T> decoder, TimestampFilter filter, HbaseColumnFamily targetHbaseColumnFamily) {
this.hbaseOperationFactory = hbaseOperationFactory;
this.decoder = decoder;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@ public void mapperTest() throws Exception {
Result result = Result.create(cellsToPut);

// When
AgentStatMapperV2<TestAgentStat> mapper = new AgentStatMapperV2<>(this.hbaseOperationFactory, this.decoder, TEST_FILTER);
AgentStatMapperV2<TestAgentStat> mapper = new AgentStatMapperV2<>(this.hbaseOperationFactory, this.decoder, TEST_FILTER, HbaseColumnFamily.AGENT_STAT_STATISTICS);
List<TestAgentStat> mappedAgentStats = mapper.mapRow(result, 0);

// Then
Expand Down

0 comments on commit f7cf4c1

Please sign in to comment.