Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
33 commits
Select commit Hold shift + click to select a range
d96f154
HDDS-4834. Replication failure in secure environment (#1931)
adoroszlai Feb 18, 2021
afcb289
HDDS-4482. SCM should be able to persist CRL (#1874)
vivekratnavel Feb 18, 2021
ac8aee7
HDDS-4844. Fixing build issue for some platforms. (#1935)
prashantpogde Feb 19, 2021
96e27a5
HDDS-4845. Update NodeStatus OperationalState for Datanodes in Recon …
smengcl Feb 19, 2021
2c53c82
HDDS-4851. More compatibility problem with DatanodeDetails.Port.Name.…
adoroszlai Feb 19, 2021
2784612
HDDS-4848. Commit key audit log has inaccurate factor and type. (#1943)
ChenSammi Feb 20, 2021
4f5ab87
HDDS-4849. Improve Ozone admin shell decommission/recommission/mainte…
smengcl Feb 20, 2021
f4411cc
HDDS-4846. Add line break when node has no pipelines for `ozone admin…
smengcl Feb 20, 2021
dd61b17
HDDS-4148. Add servlet to return SCM DB checkpoint. (#1353)
prashantpogde Feb 20, 2021
d0496ce
HDDS-4808. Add Genesis benchmark for various CRC implementations (#1910)
sodonnel Feb 20, 2021
bf2c8a6
HDDS-4850. Intermittent failure in ozonesecure due to unable to alloc…
adoroszlai Feb 22, 2021
1cf9015
HDDS-4857. Format ReplicationType.java which indentation are confusio…
Feb 23, 2021
fb5b1f2
HDDS-4853. libexec/entrypoint.sh might copy from wrong path (#1951)
sky76093016 Feb 23, 2021
a7db06d
HDDS-4653. Support TDE for MPU Keys on Encrypted Buckets (#1766)
bharatviswa504 Feb 23, 2021
df38294
HDDS-4832. Show Datanode OperationalState in Recon (#1937)
smengcl Feb 24, 2021
de9884f
HDDS-4754. Make scm heartbeat rpc retry interval configurable (#1942)
Xushaohong Feb 25, 2021
bc501d4
HDDS-4816. Add UsageInfoSubcommand to get Datanode usage information.…
siddhantsangwan Feb 25, 2021
85e27c3
HDDS-4138. Improve crc efficiency by using Java.util.zip.CRC when ava…
sodonnel Feb 25, 2021
a795ab0
HDDS-4722. Creating RDBStore fails due to RDBMetrics instance race (#…
jojochuang Feb 25, 2021
0f9dd5a
HDDS-4870. Bump jetty version (#1964)
elek Feb 26, 2021
75cefa0
HDDS-4871. Fix intellij runConfigurations for datanode (#1968)
guihecheng Feb 26, 2021
c91774c
HDDS-4869. Bump jackson version number (#1963)
elek Mar 1, 2021
bb9fcc3
HDDS-4847. Fix typo in name of IdentityService (#1941)
plsmaop Mar 1, 2021
9a1a5ad
HDDS-4769. Simplify insert operation of ContainerAttribute (#1865)
symious Mar 1, 2021
de12bb2
HDDS-4858. Useless Maven cache cleanup (#1956)
adoroszlai Mar 1, 2021
44e969d
HDDS-4867. Ozone admin datanode list should report dead and stale nod…
sodonnel Mar 1, 2021
b90c848
HDDS-4791. StateContext.getReports may return list with size larger t…
symious Mar 1, 2021
b07162b
HDDS-4864. Add acceptance tests to certify Ozone with boto3 python cl…
ghuangups Mar 2, 2021
f8f1b5f
HDDS-4741. Modularize upgrade test (#1928)
adoroszlai Mar 2, 2021
ce9148f
Merge remote-tracking branch 'upstream/master' into HDDS-3698-nonroll…
errose28 Mar 2, 2021
a5ed551
Add datanode healthy readonly state to TestListInfoSubcommand to pass…
errose28 Mar 8, 2021
42f6ab6
Merge branch 'HDDS-3698-nonrolling-upgrade' into merge-master-into-up…
errose28 Mar 8, 2021
9a6c9fb
Merge branch 'HDDS-3698-nonrolling-upgrade' into merge-master-into-up…
errose28 Mar 11, 2021
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 6 additions & 12 deletions .github/workflows/post-commit.yml
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,12 @@ jobs:
name: ozone-bin
path: hadoop-ozone/dist/target/hadoop-ozone*.tar.gz
retention-days: 1
- name: Delete temporary build artifacts before caching
run: |
#Never cache local artifacts
rm -rf ~/.m2/repository/org/apache/hadoop/hadoop-hdds*
rm -rf ~/.m2/repository/org/apache/hadoop/hadoop-ozone*
if: always()
bats:
runs-on: ubuntu-18.04
steps:
Expand Down Expand Up @@ -218,12 +224,6 @@ jobs:
name: acceptance-${{ matrix.suite }}
path: /mnt/ozone/target/acceptance
continue-on-error: true
- name: Delete temporary build artifacts before caching
run: |
#Never cache local artifacts
rm -rf ~/.m2/repository/org/apache/hadoop/hdds
rm -rf ~/.m2/repository/org/apache/hadoop/ozone
if: always()
integration:
runs-on: ubuntu-18.04
strategy:
Expand Down Expand Up @@ -335,9 +335,3 @@ jobs:
name: kubernetes
path: /mnt/ozone/target/kubernetes
continue-on-error: true
- name: Delete temporary build artifacts before caching
run: |
#Never cache local artifacts
rm -rf ~/.m2/repository/org/apache/hadoop/hdds
rm -rf ~/.m2/repository/org/apache/hadoop/ozone
if: always()
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdds;

/**
* Various reusable utility methods related to Java.
*/
public final class JavaUtils {
// "1.8"->8, "9"->9, "10"->10
private static final int JAVA_SPEC_VER = Math.max(8, Integer.parseInt(
System.getProperty("java.specification.version").split("\\.")[0]));

/**
* Query to see if major version of Java specification of the system
* is equal or greater than the parameter.
*
* @param version 8, 9, 10 etc.
* @return comparison with system property, always true for 8
*/
public static boolean isJavaVersionAtLeast(int version) {
return JAVA_SPEC_VER >= version;
}

/**
* Private constructor.
*/
private JavaUtils() {
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,8 @@
* The replication type to be used while writing key into ozone.
*/
public enum ReplicationType {
RATIS,
STAND_ALONE,
RATIS,
STAND_ALONE,
CHAINED;

public static ReplicationType fromProto(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
import java.util.Set;
import java.util.UUID;

import com.fasterxml.jackson.annotation.JsonIgnore;
import com.google.common.collect.ImmutableSet;
import org.apache.hadoop.hdds.DatanodeVersions;
import org.apache.hadoop.hdds.annotation.InterfaceAudience;
Expand Down Expand Up @@ -369,6 +370,7 @@ public static DatanodeDetails getFromProtoBuf(
* Returns a DatanodeDetails protobuf message from a datanode ID.
* @return HddsProtos.DatanodeDetailsProto
*/
@JsonIgnore
public HddsProtos.DatanodeDetailsProto getProtoBufMessage() {
return toProto(CURRENT_VERSION);
}
Expand Down Expand Up @@ -431,6 +433,7 @@ public HddsProtos.DatanodeDetailsProto.Builder toProtoBuilder(
* Returns a ExtendedDatanodeDetails protobuf message from a datanode ID.
* @return HddsProtos.ExtendedDatanodeDetailsProto
*/
@JsonIgnore
public HddsProtos.ExtendedDatanodeDetailsProto getExtendedProtoBufMessage() {
HddsProtos.ExtendedDatanodeDetailsProto.Builder extendedBuilder =
HddsProtos.ExtendedDatanodeDetailsProto.newBuilder()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -246,6 +246,11 @@ public final class ScmConfigKeys {
public static final int OZONE_SCM_HEARTBEAT_RPC_RETRY_COUNT_DEFAULT =
15;

public static final String OZONE_SCM_HEARTBEAT_RPC_RETRY_INTERVAL =
"ozone.scm.heartbeat.rpc-retry-interval";
public static final String OZONE_SCM_HEARTBEAT_RPC_RETRY_INTERVAL_DEFAULT =
"1s";

/**
* Defines how frequently we will log the missing of heartbeat to a specific
* SCM. In the default case we will write a warning message for each 10
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -288,6 +288,17 @@ StatusAndMessages finalizeScmUpgrade(String upgradeClientID)
throws IOException;

StatusAndMessages queryUpgradeFinalizationProgress(String upgradeClientID,
boolean force)
throws IOException;
boolean force) throws IOException;

/**
* Get usage information of datanode by ipaddress or uuid.
*
* @param ipaddress datanode ipaddress String
* @param uuid datanode uuid String
* @return List of DatanodeUsageInfo. Each element contains info such as
* capacity, SCMused, and remaining space.
* @throws IOException
*/
List<HddsProtos.DatanodeUsageInfo> getDatanodeUsageInfo(String ipaddress,
String uuid) throws IOException;
}
Original file line number Diff line number Diff line change
Expand Up @@ -199,6 +199,7 @@ public void updateLastUsedTime() {
lastUsed = Instant.ofEpochMilli(Time.now());
}

@JsonIgnore
public HddsProtos.ContainerInfoProto getProtobuf() {
HddsProtos.ContainerInfoProto.Builder builder =
HddsProtos.ContainerInfoProto.newBuilder();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@

package org.apache.hadoop.hdds.scm.pipeline;

import com.fasterxml.jackson.annotation.JsonIgnore;
import org.apache.hadoop.hdds.protocol.proto.HddsProtos;

import java.util.UUID;
Expand Down Expand Up @@ -45,6 +46,7 @@ public UUID getId() {
return id;
}

@JsonIgnore
public HddsProtos.PipelineID getProtobuf() {
HddsProtos.UUID uuid128 = HddsProtos.UUID.newBuilder()
.setMostSigBits(id.getMostSignificantBits())
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,20 +18,20 @@
package org.apache.hadoop.hdds.scm.protocol;

import org.apache.commons.lang3.tuple.Pair;
import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
import org.apache.hadoop.hdds.scm.ScmConfig;
import org.apache.hadoop.hdds.scm.ScmInfo;
import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerWithPipeline;
import org.apache.hadoop.hdds.scm.container.ContainerInfo;
import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerWithPipeline;
import org.apache.hadoop.hdds.scm.pipeline.Pipeline;
import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
import org.apache.hadoop.security.KerberosInfo;

import java.io.Closeable;
import java.io.IOException;
import java.util.List;
import java.util.Map;

import org.apache.hadoop.ozone.upgrade.UpgradeFinalizer.StatusAndMessages;
import org.apache.hadoop.security.KerberosInfo;

/**
* ContainerLocationProtocol is used by an HDFS node to find the set of nodes
Expand Down Expand Up @@ -246,6 +246,17 @@ StatusAndMessages finalizeScmUpgrade(String upgradeClientID)
throws IOException;

StatusAndMessages queryUpgradeFinalizationProgress(String upgradeClientID,
boolean force)
throws IOException;
boolean force) throws IOException;

/**
* Get Datanode usage information by ip or uuid.
*
* @param ipaddress - datanode IP address String
* @param uuid - datanode UUID String
* @return List of DatanodeUsageInfo. Each element contains info such as
* capacity, SCMused, and remaining space.
* @throws IOException
*/
List<HddsProtos.DatanodeUsageInfo> getDatanodeUsageInfo(String ipaddress,
String uuid) throws IOException;
}
Original file line number Diff line number Diff line change
Expand Up @@ -231,6 +231,8 @@ public enum Units {TB, GB, MB, KB, B}
// instance gets stored.
public static final String OM_CONTEXT_ATTRIBUTE = "ozone.om";

public static final String SCM_CONTEXT_ATTRIBUTE = "ozone.scm";

private OzoneConsts() {
// Never Constructed
}
Expand Down Expand Up @@ -398,5 +400,7 @@ private OzoneConsts() {
public static final String OM_RATIS_SNAPSHOT_DIR = "snapshot";

public static final long DEFAULT_OM_UPDATE_ID = -1L;
// CRL Sequence Id
public static final String CRL_SEQUENCE_ID_KEY = "CRL_SEQUENCE_ID";

}
Original file line number Diff line number Diff line change
Expand Up @@ -75,8 +75,10 @@ private static Function<ByteBuffer, ByteString> newChecksumByteBufferFunction(
/** The algorithms for {@link ChecksumType}. */
enum Algorithm {
NONE(() -> data -> ByteString.EMPTY),
CRC32(() -> newChecksumByteBufferFunction(PureJavaCrc32ByteBuffer::new)),
CRC32C(() -> newChecksumByteBufferFunction(PureJavaCrc32CByteBuffer::new)),
CRC32(() ->
newChecksumByteBufferFunction(ChecksumByteBufferFactory::crc32Impl)),
CRC32C(() ->
newChecksumByteBufferFunction(ChecksumByteBufferFactory::crc32CImpl)),
SHA256(() -> newMessageDigestFunction("SHA-256")),
MD5(() -> newMessageDigestFunction("MD5"));

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,93 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.ozone.common;

import org.apache.hadoop.hdds.JavaUtils;
import org.apache.hadoop.util.PureJavaCrc32C;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.lang.invoke.MethodHandle;
import java.lang.invoke.MethodHandles;
import java.lang.invoke.MethodType;
import java.util.zip.CRC32;
import java.util.zip.Checksum;

/**
* Class containing factories for creating various checksum impls.
*/
public final class ChecksumByteBufferFactory {

private static final Logger LOG =
LoggerFactory.getLogger(ChecksumByteBufferImpl.class);

private static volatile boolean useJava9Crc32C
= JavaUtils.isJavaVersionAtLeast(9);

public static class Java9Crc32CFactory {
private static final MethodHandle NEW_CRC32C_MH;

static {
MethodHandle newCRC32C = null;
try {
newCRC32C = MethodHandles.publicLookup()
.findConstructor(
Class.forName("java.util.zip.CRC32C"),
MethodType.methodType(void.class)
);
} catch (ReflectiveOperationException e) {
// Should not reach here.
throw new RuntimeException(e);
}
NEW_CRC32C_MH = newCRC32C;
}

public static java.util.zip.Checksum createChecksum() {
try {
// Should throw nothing
return (Checksum) NEW_CRC32C_MH.invoke();
} catch (Throwable t) {
throw (t instanceof RuntimeException) ? (RuntimeException) t
: new RuntimeException(t);
}
}
};

public static ChecksumByteBuffer crc32Impl() {
return new ChecksumByteBufferImpl(new CRC32());
}

public static ChecksumByteBuffer crc32CImpl() {
if (useJava9Crc32C) {
try {
return new ChecksumByteBufferImpl(Java9Crc32CFactory.createChecksum());
} catch (Throwable e) {
// should not happen
LOG.error("CRC32C creation failed, switching to PureJavaCrc32C", e);
useJava9Crc32C = false;
}
}
return new ChecksumByteBufferImpl(new PureJavaCrc32C());
}

/**
* Private Constructor.
*/
private ChecksumByteBufferFactory() {
}
}
Loading