Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -168,6 +168,12 @@ public void createTag(String tagName, long fromSnapshotId, Duration timeRetained
wrapped.createTag(tagName, fromSnapshotId, timeRetained);
}

@Override
public void updateTag(String tagName, long fromSnapshotId, Duration timeRetained) {
privilegeChecker.assertCanInsert(identifier);
wrapped.updateTag(tagName, fromSnapshotId, timeRetained);
}

@Override
public void deleteTag(String tagName) {
privilegeChecker.assertCanInsert(identifier);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -460,7 +460,7 @@ public Snapshot createTagInternal(long fromSnapshotId) {
}
checkArgument(
snapshot != null,
"Cannot create tag because given snapshot #%s doesn't exist.",
"Cannot create or update tag because given snapshot #%s doesn't exist.",
fromSnapshotId);
return snapshot;
}
Expand Down Expand Up @@ -494,6 +494,11 @@ private void createTag(String tagName, Snapshot fromSnapshot, @Nullable Duration
tagManager().createTag(fromSnapshot, tagName, timeRetained, store().createTagCallbacks());
}

@Override
public void updateTag(String tagName, long fromSnapshotId, @Nullable Duration timeRetained) {
tagManager().updateTag(tagName, createTagInternal(fromSnapshotId), timeRetained);
}

@Override
public void deleteTag(String tagName) {
tagManager()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -134,6 +134,14 @@ default void createTag(String tagName, Duration timeRetained) {
this.getClass().getSimpleName()));
}

@Override
default void updateTag(String tagName, long fromSnapshotId, Duration timeRetained) {
throw new UnsupportedOperationException(
String.format(
"Readonly Table %s does not support updateTag.",
this.getClass().getSimpleName()));
}

@Override
default void deleteTag(String tagName) {
throw new UnsupportedOperationException(
Expand Down
4 changes: 4 additions & 0 deletions paimon-core/src/main/java/org/apache/paimon/table/Table.java
Original file line number Diff line number Diff line change
Expand Up @@ -87,6 +87,10 @@ public interface Table extends Serializable {
@Experimental
void createTag(String tagName, Duration timeRetained);

/** Update a tag by name. */
@Experimental
void updateTag(String tagName, long fromSnapshotId, Duration timeRetained);

/** Delete a tag by name. */
@Experimental
void deleteTag(String tagName);
Expand Down
22 changes: 22 additions & 0 deletions paimon-core/src/main/java/org/apache/paimon/utils/TagManager.java
Original file line number Diff line number Diff line change
Expand Up @@ -134,6 +134,28 @@ public void createTag(
}
}

/** Update a given tag. */
public void updateTag(String tagName, Snapshot snapshot, @Nullable Duration timeRetained) {
checkArgument(!StringUtils.isBlank(tagName), "Tag name '%s' is blank.", tagName);
checkArgument(tagExists(tagName), "Tag '%s' doesn't exist.", tagName);

String content =
timeRetained != null
? Tag.fromSnapshotAndTagTtl(snapshot, timeRetained, LocalDateTime.now())
.toJson()
: snapshot.toJson();
Path tagPath = tagPath(tagName);

try {
fileIO.overwriteFileUtf8(tagPath, content);
} catch (IOException e) {
throw new RuntimeException(
String.format(
"Exception occurs when update tag '%s' (path %s). ", tagName, tagPath),
e);
}
}

/** Make sure the tagNames are ALL tags of one snapshot. */
public void deleteAllTagsOfOneSnapshot(
List<String> tagNames, TagDeletion tagDeletion, SnapshotManager snapshotManager) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@
import org.apache.paimon.spark.procedure.RepairProcedure;
import org.apache.paimon.spark.procedure.ReplaceBranchProcedure;
import org.apache.paimon.spark.procedure.RollbackProcedure;
import org.apache.paimon.spark.procedure.UpdateTagProcedure;

import org.apache.paimon.shade.guava30.com.google.common.collect.ImmutableMap;

Expand All @@ -58,6 +59,7 @@ private static Map<String, Supplier<ProcedureBuilder>> initProcedureBuilders() {
ImmutableMap.builder();
procedureBuilders.put("rollback", RollbackProcedure::builder);
procedureBuilders.put("create_tag", CreateTagProcedure::builder);
procedureBuilders.put("update_tag", UpdateTagProcedure::builder);
procedureBuilders.put("delete_tag", DeleteTagProcedure::builder);
procedureBuilders.put("create_branch", CreateBranchProcedure::builder);
procedureBuilders.put("delete_branch", DeleteBranchProcedure::builder);
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,97 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.paimon.spark.procedure;

import org.apache.paimon.utils.TimeUtils;

import org.apache.spark.sql.catalyst.InternalRow;
import org.apache.spark.sql.connector.catalog.Identifier;
import org.apache.spark.sql.connector.catalog.TableCatalog;
import org.apache.spark.sql.types.DataTypes;
import org.apache.spark.sql.types.Metadata;
import org.apache.spark.sql.types.StructField;
import org.apache.spark.sql.types.StructType;

import java.time.Duration;

import static org.apache.spark.sql.types.DataTypes.LongType;
import static org.apache.spark.sql.types.DataTypes.StringType;

/** A procedure to update a tag. */
public class UpdateTagProcedure extends BaseProcedure {

private static final ProcedureParameter[] PARAMETERS =
new ProcedureParameter[] {
ProcedureParameter.required("table", StringType),
ProcedureParameter.required("tag", StringType),
ProcedureParameter.optional("snapshot", LongType),
ProcedureParameter.optional("time_retained", StringType)
};

private static final StructType OUTPUT_TYPE =
new StructType(
new StructField[] {
new StructField("result", DataTypes.BooleanType, true, Metadata.empty())
});

protected UpdateTagProcedure(TableCatalog tableCatalog) {
super(tableCatalog);
}

@Override
public ProcedureParameter[] parameters() {
return PARAMETERS;
}

@Override
public StructType outputType() {
return OUTPUT_TYPE;
}

@Override
public InternalRow[] call(InternalRow args) {
Identifier tableIdent = toIdentifier(args.getString(0), PARAMETERS[0].name());
String tag = args.getString(1);
Long snapshot = args.isNullAt(2) ? null : args.getLong(2);
Duration timeRetained =
args.isNullAt(3) ? null : TimeUtils.parseDuration(args.getString(3));

return modifyPaimonTable(
tableIdent,
table -> {
table.updateTag(tag, snapshot, timeRetained);
InternalRow outputRow = newInternalRow(true);
return new InternalRow[] {outputRow};
});
}

public static ProcedureBuilder builder() {
return new BaseProcedure.Builder<UpdateTagProcedure>() {
@Override
public UpdateTagProcedure doBuild() {
return new UpdateTagProcedure(tableCatalog());
}
};
}

@Override
public String description() {
return "UpdateTagProcedure";
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -155,4 +155,87 @@ class CreateAndDeleteTagProcedureTest extends PaimonSparkTestBase with StreamTes
}
}
}

test("Paimon Procedure: update tag") {
failAfter(streamingTimeout) {
withTempDir {
checkpointDir =>
// define a change-log table and test `forEachBatch` api
spark.sql(s"""
|CREATE TABLE T (a INT, b STRING)
|TBLPROPERTIES ('primary-key'='a', 'bucket'='3')
|""".stripMargin)
val location = loadTable("T").location().toString

val inputData = MemoryStream[(Int, String)]
val stream = inputData
.toDS()
.toDF("a", "b")
.writeStream
.option("checkpointLocation", checkpointDir.getCanonicalPath)
.foreachBatch {
(batch: Dataset[Row], _: Long) =>
batch.write.format("paimon").mode("append").save(location)
}
.start()

val query = () => spark.sql("SELECT * FROM T ORDER BY a")

try {
// snapshot-1
inputData.addData((1, "a"))
stream.processAllAvailable()
checkAnswer(query(), Row(1, "a") :: Nil)

// snapshot-2
inputData.addData((2, "b"))
stream.processAllAvailable()
checkAnswer(query(), Row(1, "a") :: Row(2, "b") :: Nil)

// snapshot-3
inputData.addData((3, "c"))
stream.processAllAvailable()
checkAnswer(query(), Row(1, "a") :: Row(2, "b") :: Row(3, "c") :: Nil)

checkAnswer(
spark.sql(
"CALL paimon.sys.create_tag(" +
"table => 'test.T', tag => 'test_tag', snapshot => 1)"),
Row(true) :: Nil)
checkAnswer(
spark.sql("SELECT tag_name FROM paimon.test.`T$tags`"),
Row("test_tag") :: Nil);

checkAnswer(
spark.sql(
"CALL paimon.sys.update_tag(" +
"table => 'test.T', tag => 'test_tag', snapshot => 1, time_retained => '1 d')"),
Row(true) :: Nil)
checkAnswer(
spark.sql(
"SELECT tag_name,snapshot_id,time_retained FROM paimon.test.`T$tags` where tag_name = 'test_tag'"),
Row("test_tag", 1, "PT24H") :: Nil)

checkAnswer(
spark.sql(
"CALL paimon.sys.update_tag(" +
"table => 'test.T', tag => 'test_tag', snapshot => 2, time_retained => '5 d')"),
Row(true) :: Nil)
checkAnswer(
spark.sql(
"SELECT tag_name,snapshot_id,time_retained FROM paimon.test.`T$tags` where tag_name = 'test_tag'"),
Row("test_tag", 2, "PT120H") :: Nil)

// assert throw exception tag not exist
assertThrows[RuntimeException] {
spark.sql(
"CALL paimon.sys.update_tag(" +
"table => 'test.T', tag => 'new_tag', snapshot => 2, time_retained => '5 d')")
}
} finally {
stream.stop()
}
}
}
}
}