Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
50 commits
Select commit Hold shift + click to select a range
de38a7d
Split non_aligned charge text chunk
HTHou Apr 25, 2024
abe456f
merge master
HTHou Jul 5, 2024
ca7b6ea
Merge branch 'master' of github.com:apache/iotdb into split_text_chunk
HTHou Sep 27, 2024
5ee3c89
dev non_aligned
HTHou Sep 27, 2024
3a994a3
dev aligned chunk split
HTHou Sep 29, 2024
85bd62c
new type
HTHou Sep 29, 2024
2dcd204
dev aligned binary chunk split
HTHou Sep 29, 2024
fb8f929
Fix binary size calculatation
HTHou Sep 29, 2024
41bc88b
Merge branch 'master' of github.com:apache/iotdb into split_text_chunk
HTHou Sep 29, 2024
80e63f7
Merge branch 'master' of github.com:apache/iotdb into split_text_chunk
HTHou Sep 29, 2024
0b5ad66
fix IT
HTHou Sep 29, 2024
7b463c8
update IoTDBDuplicateTimeIT.java
HTHou Sep 29, 2024
28aa427
fix pipe IT
HTHou Sep 29, 2024
1042cfc
Merge branch 'master' of github.com:apache/iotdb into split_text_chunk
HTHou Oct 10, 2024
fbf19e5
change method names
HTHou Oct 10, 2024
252d6e0
Merge branch 'master' of github.com:apache/iotdb into split_text_chunk
HTHou Oct 11, 2024
aeeef0d
add ut
HTHou Oct 11, 2024
506fceb
add UT
HTHou Oct 11, 2024
a0d46df
remove useless methods
HTHou Oct 11, 2024
96eb7e7
fix UT
HTHou Oct 11, 2024
082d9f6
fix /FileReaderManagerTest
HTHou Oct 11, 2024
b564ca2
fix win UT
HTHou Oct 12, 2024
c8207da
add binary test
HTHou Oct 12, 2024
b56aec9
Add Aligned UTs
HTHou Oct 12, 2024
2e84d21
Merge branch 'master' of github.com:apache/iotdb into split_text_chunk
HTHou Oct 12, 2024
da3eee2
fix win ut
HTHou Oct 12, 2024
99327ff
improve coverage
HTHou Oct 12, 2024
4434cd5
fix comments
HTHou Oct 12, 2024
31e27bf
fix windows UT
HTHou Oct 12, 2024
e0ac04c
fix review
HTHou Oct 15, 2024
ef55416
Merge branch 'master' of github.com:apache/iotdb into split_text_chunk
HTHou Oct 15, 2024
9881eb1
fix review
HTHou Oct 16, 2024
4339899
fix review
HTHou Oct 16, 2024
2780865
target chunk size count non binary
HTHou Oct 16, 2024
1254fe7
Merge branch 'master' of github.com:apache/iotdb into split_text_chunk
HTHou Oct 16, 2024
87eb95d
merge master and fix review
HTHou Nov 18, 2024
aa3ac01
Merge branch 'master' of github.com:apache/iotdb into split_text_chunk
HTHou Nov 18, 2024
5aa49cb
Merge branch 'master' of github.com:apache/iotdb into split_text_chunk
HTHou Nov 27, 2024
4b9edf6
Merge branch 'master' of github.com:apache/iotdb into split_text_chunk
HTHou Nov 29, 2024
9caa42a
fix compile
HTHou Nov 29, 2024
1c4d17c
Merge branch 'master' of github.com:apache/iotdb into split_text_chunk
HTHou Dec 3, 2024
23d10e6
fix UT
HTHou Dec 3, 2024
6dc5f0b
Merge branch 'master' into split_text_chunk
HTHou Dec 20, 2024
f016492
Merge branch 'master' into split_text_chunk
HTHou Dec 27, 2024
1224142
Merge branch 'master' of github.com:apache/iotdb into force_ci/split_…
HTHou Dec 30, 2024
1c32547
Merge branch 'master' of github.com:apache/iotdb into force_ci/split_…
HTHou Jan 21, 2025
167bd19
Merge branch 'master' of github.com:apache/iotdb into force_ci/split_…
HTHou Jan 21, 2025
d06cc84
Merge branch 'master' of github.com:apache/iotdb into force_ci/split_…
HTHou Jan 21, 2025
e8e449e
Tvlist feat new (#14616)
shizy818 Feb 7, 2025
8ff8928
Merge branch 'master' of github.com:apache/iotdb into force_ci/split_…
HTHou Feb 7, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -167,8 +167,8 @@ public CommonConfig setPrimitiveArraySize(int primitiveArraySize) {
}

@Override
public CommonConfig setAvgSeriesPointNumberThreshold(int avgSeriesPointNumberThreshold) {
setProperty("avg_series_point_number_threshold", String.valueOf(avgSeriesPointNumberThreshold));
public CommonConfig setTargetChunkPointNum(int targetChunkPointNum) {
setProperty("target_chunk_point_num", String.valueOf(targetChunkPointNum));
return this;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -167,9 +167,9 @@ public CommonConfig setPrimitiveArraySize(int primitiveArraySize) {
}

@Override
public CommonConfig setAvgSeriesPointNumberThreshold(int avgSeriesPointNumberThreshold) {
cnConfig.setAvgSeriesPointNumberThreshold(avgSeriesPointNumberThreshold);
dnConfig.setAvgSeriesPointNumberThreshold(avgSeriesPointNumberThreshold);
public CommonConfig setTargetChunkPointNum(int targetChunkPointNum) {
cnConfig.setTargetChunkPointNum(targetChunkPointNum);
dnConfig.setTargetChunkPointNum(targetChunkPointNum);
return this;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ public CommonConfig setPrimitiveArraySize(int primitiveArraySize) {
}

@Override
public CommonConfig setAvgSeriesPointNumberThreshold(int avgSeriesPointNumberThreshold) {
public CommonConfig setTargetChunkPointNum(int targetChunkPointNum) {
return this;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ public interface CommonConfig {

CommonConfig setPrimitiveArraySize(int primitiveArraySize);

CommonConfig setAvgSeriesPointNumberThreshold(int avgSeriesPointNumberThreshold);
CommonConfig setTargetChunkPointNum(int targetChunkPointNum);

CommonConfig setMaxTsBlockLineNumber(int maxTsBlockLineNumber);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,8 +43,6 @@ public class IoTDBDuplicateTimeIT {

@Before
public void setUp() throws Exception {
EnvFactory.getEnv().getConfig().getCommonConfig().setAvgSeriesPointNumberThreshold(2);
// Adjust memstable threshold size to make it flush automatically
EnvFactory.getEnv().initClusterEnvironment();
}

Expand All @@ -62,16 +60,19 @@ public void testDuplicateTime() throws SQLException {
// version-1 tsfile
statement.execute("insert into root.db.d1(time,s1) values (2,2)");
statement.execute("insert into root.db.d1(time,s1) values (3,3)");
statement.execute("flush");

// version-2 unseq work memtable
statement.execute("insert into root.db.d1(time,s1) values (2,20)");

// version-3 tsfile
statement.execute("insert into root.db.d1(time,s1) values (5,5)");
statement.execute("insert into root.db.d1(time,s1) values (6,6)");
statement.execute("flush root.db true");

// version-2 unseq work memtable -> unseq tsfile
statement.execute("insert into root.db.d1(time,s1) values (5,50)");
statement.execute("flush");

try (ResultSet set = statement.executeQuery("SELECT s1 FROM root.db.d1 where time = 5")) {
int cnt = 0;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -309,8 +309,6 @@ public void testRecoverWALDeleteSchema() throws Exception {
@Test
public void testRecoverWALDeleteSchemaCheckResourceTime() throws Exception {
IoTDBConfig config = IoTDBDescriptor.getInstance().getConfig();
int avgSeriesPointNumberThreshold = config.getAvgSeriesPointNumberThreshold();
config.setAvgSeriesPointNumberThreshold(2);
long tsFileSize = config.getSeqTsFileSize();
long unFsFileSize = config.getSeqTsFileSize();
config.setSeqTsFileSize(10000000);
Expand All @@ -321,6 +319,7 @@ public void testRecoverWALDeleteSchemaCheckResourceTime() throws Exception {
statement.execute("create timeseries root.turbine1.d1.s1 with datatype=INT64");
statement.execute("insert into root.turbine1.d1(timestamp,s1) values(1,1)");
statement.execute("insert into root.turbine1.d1(timestamp,s1) values(2,1)");
statement.execute("flush");
statement.execute("create timeseries root.turbine1.d1.s2 with datatype=BOOLEAN");
statement.execute("insert into root.turbine1.d1(timestamp,s2) values(3,true)");
statement.execute("insert into root.turbine1.d1(timestamp,s2) values(4,true)");
Expand All @@ -343,7 +342,6 @@ public void testRecoverWALDeleteSchemaCheckResourceTime() throws Exception {
assertEquals(2, cnt);
}

config.setAvgSeriesPointNumberThreshold(avgSeriesPointNumberThreshold);
config.setSeqTsFileSize(tsFileSize);
config.setUnSeqTsFileSize(unFsFileSize);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ public void test2() {
.getConfig()
.getCommonConfig()
.setMaxNumberOfPointsInPage(4)
.setAvgSeriesPointNumberThreshold(2);
.setTargetChunkPointNum(2);
EnvFactory.getEnv().initClusterEnvironment();
String[] expectedHeader = new String[] {TIMESTAMP_STR, count("root.sg2.d1.s1")};
String[] retArray = new String[] {"5,1,", "10,1,", "15,2,", "20,0,", "25,1,"};
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -335,8 +335,6 @@ public void testRecoverWALDeleteSchema() throws Exception {
@Test
public void testRecoverWALDeleteSchemaCheckResourceTime() throws Exception {
IoTDBConfig config = IoTDBDescriptor.getInstance().getConfig();
int avgSeriesPointNumberThreshold = config.getAvgSeriesPointNumberThreshold();
config.setAvgSeriesPointNumberThreshold(2);
long tsFileSize = config.getSeqTsFileSize();
long unFsFileSize = config.getSeqTsFileSize();
config.setSeqTsFileSize(10000000);
Expand Down Expand Up @@ -372,7 +370,6 @@ public void testRecoverWALDeleteSchemaCheckResourceTime() throws Exception {
assertEquals(2, cnt);
}

config.setAvgSeriesPointNumberThreshold(avgSeriesPointNumberThreshold);
config.setSeqTsFileSize(tsFileSize);
config.setUnSeqTsFileSize(unFsFileSize);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -433,8 +433,11 @@ public class IoTDBConfig {
/** The sort algorithm used in TVList */
private TVListSortAlgorithm tvListSortAlgorithm = TVListSortAlgorithm.TIM;

/** When average series point number reaches this, flush the memtable to disk */
private int avgSeriesPointNumberThreshold = 100000;
/**
* the threshold when working TVList is sorted and added into immutable TVList list in the
* writable memtable
*/
private int tvListSortThreshold = 0;

/** Enable inner space compaction for sequence files */
private volatile boolean enableSeqSpaceCompaction = true;
Expand Down Expand Up @@ -494,10 +497,10 @@ public class IoTDBConfig {
/** The target tsfile size in compaction, 2 GB by default */
private long targetCompactionFileSize = 2147483648L;

/** The target chunk size in compaction. */
private long targetChunkSize = 1048576L;
/** The target chunk size in compaction and flushing. */
private long targetChunkSize = 1600000L;

/** The target chunk point num in compaction. */
/** The target chunk point num in compaction and flushing. */
private long targetChunkPointNum = 100000L;

/**
Expand Down Expand Up @@ -2307,12 +2310,12 @@ public void setTvListSortAlgorithm(TVListSortAlgorithm tvListSortAlgorithm) {
this.tvListSortAlgorithm = tvListSortAlgorithm;
}

public int getAvgSeriesPointNumberThreshold() {
return avgSeriesPointNumberThreshold;
public int getTvListSortThreshold() {
return tvListSortThreshold;
}

public void setAvgSeriesPointNumberThreshold(int avgSeriesPointNumberThreshold) {
this.avgSeriesPointNumberThreshold = avgSeriesPointNumberThreshold;
public void setTVListSortThreshold(int tvListSortThreshold) {
this.tvListSortThreshold = tvListSortThreshold;
}

public boolean isRpcThriftCompressionEnable() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -386,11 +386,10 @@ public void loadProperties(TrimProperties properties) throws BadNodeUrlException
properties.getProperty(
"tvlist_sort_algorithm", conf.getTvListSortAlgorithm().toString())));

conf.setAvgSeriesPointNumberThreshold(
conf.setTVListSortThreshold(
Integer.parseInt(
properties.getProperty(
"avg_series_point_number_threshold",
Integer.toString(conf.getAvgSeriesPointNumberThreshold()))));
"tvlist_sort_threshold", Integer.toString(conf.getTvListSortThreshold()))));

conf.setCheckPeriodWhenInsertBlocked(
Integer.parseInt(
Expand Down Expand Up @@ -2075,6 +2074,13 @@ public synchronized void loadHotModifiedProps(TrimProperties properties)
loadQuerySampleThroughput(properties);
// update trusted_uri_pattern
loadTrustedUriPattern(properties);

// tvlist_sort_threshold
conf.setTVListSortThreshold(
Integer.parseInt(
properties.getProperty(
"tvlist_sort_threshold",
ConfigurationFileUtils.getConfigurationDefaultValue("tvlist_sort_threshold"))));
} catch (Exception e) {
if (e instanceof InterruptedException) {
Thread.currentThread().interrupt();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@
import org.apache.iotdb.db.storageengine.dataregion.read.QueryDataSourceType;
import org.apache.iotdb.db.storageengine.dataregion.read.control.FileReaderManager;
import org.apache.iotdb.db.storageengine.dataregion.tsfile.TsFileResource;
import org.apache.iotdb.db.utils.datastructure.TVList;
import org.apache.iotdb.mpp.rpc.thrift.TFetchFragmentInstanceStatisticsResp;

import org.apache.tsfile.file.metadata.IDeviceID;
Expand Down Expand Up @@ -653,6 +654,40 @@ public void releaseResourceWhenAllDriversAreClosed() {
releaseResource();
}

/**
* It checks all referenced TVList by the query: 1. If current is not the owner, just remove
* itself from query context list 2. If current query is the owner and no other query use it now,
* release the TVList 3. If current query is the owner and other queries still use it, set the
* next query as owner
*/
private void releaseTVListOwnedByQuery() {
for (TVList tvList : tvListSet) {
tvList.lockQueryList();
List<QueryContext> queryContextList = tvList.getQueryContextList();
try {
queryContextList.remove(this);
if (tvList.getOwnerQuery() == this) {
if (queryContextList.isEmpty()) {
LOGGER.debug(
"TVList {} is released by the query, FragmentInstance Id is {}",
tvList,
this.getId());
memoryReservationManager.releaseMemoryCumulatively(tvList.calculateRamSize());
tvList.clear();
} else {
LOGGER.debug(
"TVList {} is now owned by another query, FragmentInstance Id is {}",
tvList,
((FragmentInstanceContext) queryContextList.get(0)).getId());
tvList.setOwnerQuery(queryContextList.get(0));
}
}
} finally {
tvList.unlockQueryList();
}
}
}

/**
* All file paths used by this fragment instance must be cleared and thus the usage reference must
* be decreased.
Expand All @@ -673,6 +708,9 @@ public synchronized void releaseResource() {
unClosedFilePaths = null;
}

// release TVList/AlignedTVList owned by current query
releaseTVListOwnedByQuery();

dataRegion = null;
globalTimeFilter = null;
sharedQueryDataSource = null;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,13 +28,15 @@
import org.apache.iotdb.db.utils.ModificationUtils;
import org.apache.iotdb.db.utils.datastructure.PatternTreeMapFactory;
import org.apache.iotdb.db.utils.datastructure.PatternTreeMapFactory.ModsSerializer;
import org.apache.iotdb.db.utils.datastructure.TVList;

import org.apache.tsfile.file.metadata.IDeviceID;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
Expand Down Expand Up @@ -71,6 +73,9 @@ public class QueryContext {

private final Set<TsFileID> nonExistentModFiles = new CopyOnWriteArraySet<>();

// referenced TVLists for the query
protected final Set<TVList> tvListSet = new HashSet<>();

public QueryContext() {}

public QueryContext(long queryId) {
Expand Down Expand Up @@ -214,4 +219,8 @@ public boolean isIgnoreAllNullRows() {
public void setIgnoreAllNullRows(boolean ignoreAllNullRows) {
this.ignoreAllNullRows = ignoreAllNullRows;
}

public void addTVListToSet(Map<TVList, Integer> tvListMap) {
tvListSet.addAll(tvListMap.keySet());
}
}
Loading
Loading