Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -22,13 +22,16 @@
import java.util.HashSet;
import java.util.Set;

import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.BinaryComparator;
import org.apache.hadoop.hbase.filter.BinaryPrefixComparator;
import org.apache.hadoop.hbase.filter.FamilyFilter;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.filter.FilterList;
import org.apache.hadoop.hbase.filter.FilterList.Operator;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTable;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.Column;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnFamily;
Expand Down Expand Up @@ -117,8 +120,9 @@ private static <T extends BaseTable<T>> Filter createHBaseColQualPrefixFilter(
*/
public static <T extends BaseTable<T>> Filter createHBaseQualifierFilter(
CompareOp compareOp, ColumnPrefix<T> columnPrefix) {
// Changed from BinaryPrefixComparator to BinaryComparator
return new QualifierFilter(compareOp,
new BinaryPrefixComparator(
new BinaryComparator(
columnPrefix.getColumnPrefixBytes("")));
}

Expand Down Expand Up @@ -256,6 +260,39 @@ public static Set<String> fetchColumnsFromFilterList(
return strSet;
}


public static void extractFamilyFilters(
Filter filterList, FilterList removedFilterList, Scan scan) {
if (filterList instanceof FamilyFilter) {
FamilyFilter ff = (FamilyFilter) filterList;
scan.addFamily(ff.getComparator().getValue());
} else if (filterList instanceof FilterList) {
FilterList filterListBase = (FilterList) filterList;
for (Filter fs: filterListBase.getFilters()) {
extractFamilyFilters(fs,removedFilterList,scan);
}
}
else{
removedFilterList.addFilter(filterList);
}
}

public static void extractFamilyFilters(
Filter filterList, FilterList removedFilterList, Get get) {
if (filterList instanceof FamilyFilter) {
FamilyFilter ff = (FamilyFilter) filterList;
get.addFamily(ff.getComparator().getValue());
} else if (filterList instanceof FilterList) {
FilterList filterListBase = (FilterList) filterList;
for (Filter fs: filterListBase.getFilters()) {
extractFamilyFilters(fs,removedFilterList,get);
}
}
else{
removedFilterList.addFilter(filterList);
}
}

/**
* Creates equivalent HBase {@link FilterList} from {@link TimelineFilterList}
* while converting different timeline filters(of type {@link TimelineFilter})
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
import org.apache.hadoop.hbase.filter.BinaryComparator;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.filter.FamilyFilter;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.filter.FilterList;
import org.apache.hadoop.hbase.filter.FilterList.Operator;
import org.apache.hadoop.hbase.filter.PageFilter;
Expand All @@ -57,14 +58,14 @@
import org.apache.hadoop.yarn.server.timelineservice.storage.common.RowKeyPrefix;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.TimelineStorageUtils;
import org.apache.hadoop.yarn.webapp.BadRequestException;

import com.google.common.base.Preconditions;

/**
* Timeline entity reader for application entities that are stored in the
* application table.
*/
class ApplicationEntityReader extends GenericEntityReader {

private static final ApplicationTableRW APPLICATION_TABLE =
new ApplicationTableRW();

Expand Down Expand Up @@ -290,7 +291,7 @@ private void updateFilterForConfsAndMetricsToRetrieve(
@Override
protected FilterList constructFilterListBasedOnFields(Set<String> cfsInFields)
throws IOException {
if (!needCreateFilterListBasedOnFields()) {
if (!needCreateFilterListBasedOnFields()) {
// Fetch all the columns. No need of a filter.
return null;
}
Expand Down Expand Up @@ -329,6 +330,11 @@ protected Result getResult(Configuration hbaseConf, Connection conn,
setMetricsTimeRange(get);
get.setMaxVersions(getDataToRetrieve().getMetricsLimit());
if (filterList != null && !filterList.getFilters().isEmpty()) {
FilterList removedFilterList = new FilterList();
for (Filter f: filterList.getFilters()){
// Added this to remove Family filter from filterList and add as "addFamiliy"
TimelineFilterUtils.extractFamilyFilters(filterList,removedFilterList,get);
}
get.setFilter(filterList);
}
return getTable().getResult(hbaseConf, conn, get);
Expand Down Expand Up @@ -422,7 +428,12 @@ protected ResultScanner getResults(Configuration hbaseConf,
FilterList newList = new FilterList();
newList.addFilter(new PageFilter(getFilters().getLimit()));
if (filterList != null && !filterList.getFilters().isEmpty()) {
newList.addFilter(filterList);
FilterList removedFilterList = new FilterList();
for (Filter f: filterList.getFilters()){
// Added this to remove Family filter from filterList and add as "addFamiliy"
TimelineFilterUtils.extractFamilyFilters(filterList,removedFilterList,scan);
}
newList.addFilter(removedFilterList);
}
scan.setFilter(newList);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
import org.apache.hadoop.hbase.filter.BinaryPrefixComparator;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.filter.FamilyFilter;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.filter.FilterList;
import org.apache.hadoop.hbase.filter.FilterList.Operator;
import org.apache.hadoop.hbase.filter.PageFilter;
Expand All @@ -56,7 +57,6 @@
import org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowRunRowKeyPrefix;
import org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowRunTableRW;
import org.apache.hadoop.yarn.webapp.BadRequestException;

import com.google.common.base.Preconditions;

/**
Expand All @@ -65,7 +65,6 @@
*/
class FlowRunEntityReader extends TimelineEntityReader {
private static final FlowRunTableRW FLOW_RUN_TABLE = new FlowRunTableRW();

FlowRunEntityReader(TimelineReaderContext ctxt,
TimelineEntityFilters entityFilters, TimelineDataToRetrieve toRetrieve) {
super(ctxt, entityFilters, toRetrieve);
Expand Down Expand Up @@ -250,8 +249,14 @@ protected ResultScanner getResults(Configuration hbaseConf, Connection conn,

FilterList newList = new FilterList();
newList.addFilter(new PageFilter(getFilters().getLimit()));

if (filterList != null && !filterList.getFilters().isEmpty()) {
newList.addFilter(filterList);
FilterList removedFilterList = new FilterList();
for (Filter f: filterList.getFilters()){
// Added this to remove Family filter from filterList and add as "addFamiliy"
TimelineFilterUtils.extractFamilyFilters(filterList,removedFilterList,scan);
}
newList.addFilter(removedFilterList);
}
scan.setFilter(newList);
scan.setMaxVersions(Integer.MAX_VALUE);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
package org.apache.hadoop.yarn.server.timelineservice.storage.reader;

import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.EnumSet;
import java.util.Iterator;
import java.util.Map;
Expand All @@ -33,10 +34,12 @@
import org.apache.hadoop.hbase.filter.BinaryComparator;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.filter.FamilyFilter;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.filter.FilterList;
import org.apache.hadoop.hbase.filter.PageFilter;
import org.apache.hadoop.hbase.filter.FilterList.Operator;
import org.apache.hadoop.hbase.filter.QualifierFilter;
import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity;
import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineDataToRetrieve;
Expand All @@ -46,6 +49,7 @@
import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList;
import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterUtils;
import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineReader.Field;
import org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationColumnPrefix;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTable;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTableRW;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnPrefix;
Expand All @@ -70,6 +74,7 @@
* table.
*/
class GenericEntityReader extends TimelineEntityReader {

private static final EntityTableRW ENTITY_TABLE = new EntityTableRW();

/**
Expand Down Expand Up @@ -533,7 +538,12 @@ protected ResultScanner getResults(Configuration hbaseConf, Connection conn,
setMetricsTimeRange(scan);
scan.setMaxVersions(getDataToRetrieve().getMetricsLimit());
if (filterList != null && !filterList.getFilters().isEmpty()) {
scan.setFilter(filterList);
FilterList removedFilterList = new FilterList();
for (Filter f: filterList.getFilters()){
// Added this to remove Family filter from filterList and add as "addFamiliy"
TimelineFilterUtils.extractFamilyFilters(filterList,removedFilterList,scan);
}
scan.setFilter(removedFilterList);
}
return getTable().getResultScanner(hbaseConf, conn, scan);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@
import org.apache.hadoop.hbase.filter.BinaryComparator;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.filter.FamilyFilter;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.filter.FilterList;
import org.apache.hadoop.hbase.filter.FilterList.Operator;
import org.apache.hadoop.hbase.filter.PageFilter;
Expand Down Expand Up @@ -387,7 +388,13 @@ protected ResultScanner getResults(Configuration hbaseConf, Connection conn,
setMetricsTimeRange(scan);
scan.setMaxVersions(getDataToRetrieve().getMetricsLimit());
if (filterList != null && !filterList.getFilters().isEmpty()) {
scan.setFilter(filterList);
FilterList removedFilterList = new FilterList();
for (Filter f: filterList.getFilters()){
// Added this to remove Family filter from filterList and add as "addFamiliy"
TimelineFilterUtils.extractFamilyFilters(filterList,removedFilterList,scan);
}
scan.setFilter(removedFilterList);
// scan.setFilter(filterList);
}
return getTable().getResultScanner(hbaseConf, conn, scan);
}
Expand Down