Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -21,13 +21,15 @@
import org.apache.hadoop.hdds.protocol.proto.StorageContainerLocationProtocolProtos.ContainerTokenSecretProto;
import org.apache.hadoop.hdds.scm.container.ContainerID;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.util.ProtobufUtils;

import java.io.DataInput;
import java.io.DataInputStream;
import java.io.DataOutput;
import java.io.IOException;
import java.time.Instant;
import java.util.Objects;
import java.util.UUID;

/**
* Token identifier for container operations, similar to block token.
Expand All @@ -43,11 +45,18 @@ public ContainerTokenIdentifier() {
}

public ContainerTokenIdentifier(String ownerId, ContainerID containerID,
String certSerialId, Instant expiryDate) {
super(ownerId, expiryDate, certSerialId);
Instant expiryDate) {
super(ownerId, expiryDate);
this.containerID = containerID;
}

public ContainerTokenIdentifier(String ownerId, ContainerID containerID,
UUID secretKeyId,
Instant expiryDate) {
this(ownerId, containerID, expiryDate);
setSecretKeyId(secretKeyId);
}

@Override
public Text getKind() {
return KIND;
Expand All @@ -58,7 +67,7 @@ public void write(DataOutput out) throws IOException {
ContainerTokenSecretProto.Builder builder = ContainerTokenSecretProto
.newBuilder()
.setOwnerId(getOwnerId())
.setCertSerialId(getCertSerialId())
.setSecretKeyId(ProtobufUtils.toProtobuf(getSecretKeyId()))
.setExpiryDate(getExpiry().toEpochMilli())
.setContainerId(containerID.getProtobuf());
out.write(builder.build().toByteArray());
Expand All @@ -72,7 +81,7 @@ public void readFields(DataInput in) throws IOException {
}
ContainerTokenSecretProto proto =
ContainerTokenSecretProto.parseFrom((DataInputStream) in);
setCertSerialId(proto.getCertSerialId());
setSecretKeyId(ProtobufUtils.fromProtobuf(proto.getSecretKeyId()));
setExpiry(Instant.ofEpochMilli(proto.getExpiryDate()));
setOwnerId(proto.getOwnerId());
this.containerID = ContainerID.getFromProtobuf(proto.getContainerId());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
import org.apache.hadoop.hdds.protocol.proto.HddsProtos.BlockTokenSecretProto.AccessModeProto;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.security.token.Token.TrivialRenewer;
import org.apache.hadoop.util.ProtobufUtils;

import java.io.DataInput;
import java.io.DataInputStream;
Expand Down Expand Up @@ -59,16 +60,14 @@ public OzoneBlockTokenIdentifier() {
}

public OzoneBlockTokenIdentifier(String ownerId, BlockID blockId,
Set<AccessModeProto> modes, long expiryDate, String omCertSerialId,
long maxLength) {
this(ownerId, getTokenService(blockId), modes, expiryDate, omCertSerialId,
Set<AccessModeProto> modes, long expiryDate, long maxLength) {
this(ownerId, getTokenService(blockId), modes, expiryDate,
maxLength);
}

public OzoneBlockTokenIdentifier(String ownerId, String blockId,
Set<AccessModeProto> modes, long expiryDate, String omCertSerialId,
long maxLength) {
super(ownerId, Instant.ofEpochMilli(expiryDate), omCertSerialId);
Set<AccessModeProto> modes, long expiryDate, long maxLength) {
super(ownerId, Instant.ofEpochMilli(expiryDate));
this.blockId = blockId;
this.modes = modes == null
? EnumSet.noneOf(AccessModeProto.class) : EnumSet.copyOf(modes);
Expand Down Expand Up @@ -136,7 +135,7 @@ public void readFields(DataInput in) throws IOException {
BlockTokenSecretProto.parseFrom((DataInputStream) in);
setOwnerId(token.getOwnerId());
setExpiry(Instant.ofEpochMilli(token.getExpiryDate()));
setCertSerialId(token.getOmCertSerialId());
setSecretKeyId(ProtobufUtils.fromProtobuf(token.getSecretKeyId()));
this.blockId = token.getBlockId();
this.modes = EnumSet.copyOf(token.getModesList());
this.maxLength = token.getMaxLength();
Expand All @@ -147,18 +146,21 @@ public static OzoneBlockTokenIdentifier readFieldsProtobuf(DataInput in)
throws IOException {
BlockTokenSecretProto token =
BlockTokenSecretProto.parseFrom((DataInputStream) in);
return new OzoneBlockTokenIdentifier(token.getOwnerId(),
token.getBlockId(), EnumSet.copyOf(token.getModesList()),
token.getExpiryDate(), token.getOmCertSerialId(),
token.getMaxLength());
OzoneBlockTokenIdentifier tokenId =
new OzoneBlockTokenIdentifier(token.getOwnerId(),
token.getBlockId(), EnumSet.copyOf(token.getModesList()),
token.getExpiryDate(),
token.getMaxLength());
tokenId.setSecretKeyId(ProtobufUtils.fromProtobuf(token.getSecretKeyId()));
return tokenId;
}

@Override
public void write(DataOutput out) throws IOException {
BlockTokenSecretProto.Builder builder = BlockTokenSecretProto.newBuilder()
.setBlockId(blockId)
.setOwnerId(getOwnerId())
.setOmCertSerialId(getCertSerialId())
.setSecretKeyId(ProtobufUtils.toProtobuf(getSecretKeyId()))
.setExpiryDate(getExpiryDate())
.setMaxLength(maxLength);
// Add access mode allowed
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@

import java.time.Instant;
import java.util.Objects;
import java.util.UUID;

/**
* Base class for short-lived tokens (block, container).
Expand All @@ -33,18 +34,16 @@ public abstract class ShortLivedTokenIdentifier extends TokenIdentifier {

private String ownerId;
private Instant expiry;
private String certSerialId;
private UUID secretKeyId;

public abstract String getService();

protected ShortLivedTokenIdentifier() {
}

protected ShortLivedTokenIdentifier(String ownerId, Instant expiry,
String certSerialId) {
protected ShortLivedTokenIdentifier(String ownerId, Instant expiry) {
this.ownerId = ownerId;
this.expiry = expiry;
this.certSerialId = certSerialId;
}

@Override
Expand All @@ -67,22 +66,23 @@ protected void setExpiry(Instant expiry) {
this.expiry = expiry;
}

protected void setCertSerialId(String certSerialId) {
this.certSerialId = certSerialId;
public void setSecretKeyId(UUID secretKeyId) {
this.secretKeyId = secretKeyId;
}

public Instant getExpiry() {
return expiry;
}

public String getCertSerialId() {
return certSerialId;
}

public String getOwnerId() {
return ownerId;
}

public UUID getSecretKeyId() {
return secretKeyId;
}

@Override
public boolean equals(Object o) {
if (this == o) {
Expand All @@ -95,18 +95,18 @@ public boolean equals(Object o) {
ShortLivedTokenIdentifier that = (ShortLivedTokenIdentifier) o;
return Objects.equals(ownerId, that.ownerId) &&
Objects.equals(expiry, that.expiry) &&
Objects.equals(certSerialId, that.certSerialId);
Objects.equals(secretKeyId, that.secretKeyId);
}

@Override
public int hashCode() {
return Objects.hash(ownerId, expiry, certSerialId);
return Objects.hash(ownerId, expiry, secretKeyId);
}

@Override
public String toString() {
return "ownerId=" + ownerId +
", expiry=" + expiry +
", certSerialId=" + certSerialId;
", secretKeyId=" + secretKeyId;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -510,4 +510,8 @@ public long getSslKeystoreReloadInterval() {
public long getSslTruststoreReloadInterval() {
return truststoreReloadInterval;
}

public boolean isTokenEnabled() {
return blockTokenEnabled || containerTokenEnabled;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@
import org.apache.hadoop.hdds.conf.ConfigurationSource;

import org.apache.commons.validator.routines.InetAddressValidator;

import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_HTTP_SECURITY_ENABLED_DEFAULT;
import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_HTTP_SECURITY_ENABLED_KEY;
import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_SECURITY_ENABLED_DEFAULT;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,8 @@
import org.apache.hadoop.hdds.datanode.metadata.DatanodeCRLStore;
import org.apache.hadoop.hdds.datanode.metadata.DatanodeCRLStoreImpl;
import org.apache.hadoop.hdds.protocol.DatanodeDetails;
import org.apache.hadoop.hdds.security.symmetric.DefaultSecretKeyClient;
import org.apache.hadoop.hdds.security.symmetric.SecretKeyClient;
import org.apache.hadoop.hdds.security.x509.SecurityConfig;
import org.apache.hadoop.hdds.security.x509.certificate.client.CertificateClient;
import org.apache.hadoop.hdds.security.x509.certificate.client.DNCertificateClient;
Expand Down Expand Up @@ -96,6 +98,7 @@ public class HddsDatanodeService extends GenericCli implements ServicePlugin {
private DatanodeStateMachine datanodeStateMachine;
private List<ServicePlugin> plugins;
private CertificateClient dnCertClient;
private SecretKeyClient secretKeyClient;
private String component;
private HddsDatanodeHttpServer httpServer;
private boolean printBanner;
Expand Down Expand Up @@ -290,9 +293,14 @@ public void start() {

if (OzoneSecurityUtil.isSecurityEnabled(conf)) {
dnCertClient = initializeCertificateClient(dnCertClient);

if (secConf.isTokenEnabled()) {
secretKeyClient = DefaultSecretKeyClient.create(conf);
secretKeyClient.start(conf);
}
}
datanodeStateMachine = new DatanodeStateMachine(datanodeDetails, conf,
dnCertClient, this::terminateDatanode, dnCRLStore);
dnCertClient, secretKeyClient, this::terminateDatanode, dnCRLStore);
try {
httpServer = new HddsDatanodeHttpServer(conf);
httpServer.start();
Expand Down Expand Up @@ -548,6 +556,10 @@ public void stop() {
LOG.error("Datanode CRL store stop failed", ex);
}
RatisDropwizardExports.clear(ratisMetricsMap, ratisReporterList);

if (secretKeyClient != null) {
secretKeyClient.stop();
}
}
}

Expand Down Expand Up @@ -586,6 +598,11 @@ public void setCertificateClient(CertificateClient client) {
dnCertClient = client;
}

@VisibleForTesting
public void setSecretKeyClient(SecretKeyClient client) {
this.secretKeyClient = client;
}

@Override
public void printError(Throwable error) {
LOG.error("Exception in HddsDatanodeService.", error);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@
import org.apache.hadoop.hdds.protocol.proto.StorageContainerDatanodeProtocolProtos.ContainerReportsProto;
import org.apache.hadoop.hdds.protocol.proto.StorageContainerDatanodeProtocolProtos.NodeReportProto;
import org.apache.hadoop.hdds.protocol.proto.StorageContainerDatanodeProtocolProtos.PipelineReportsProto;
import org.apache.hadoop.hdds.security.symmetric.SecretKeyClient;
import org.apache.hadoop.hdds.security.x509.certificate.client.CertificateClient;
import org.apache.hadoop.hdds.upgrade.HDDSLayoutVersionManager;
import org.apache.hadoop.hdds.utils.IOUtils;
Expand Down Expand Up @@ -139,6 +140,7 @@ public class DatanodeStateMachine implements Closeable {
public DatanodeStateMachine(DatanodeDetails datanodeDetails,
ConfigurationSource conf,
CertificateClient certClient,
SecretKeyClient secretKeyClient,
HddsDatanodeStopService hddsDatanodeStopService,
DatanodeCRLStore crlStore) throws IOException {
DatanodeConfiguration dnConf =
Expand Down Expand Up @@ -171,7 +173,7 @@ public DatanodeStateMachine(DatanodeDetails datanodeDetails,
constructionLock.writeLock().lock();
try {
container = new OzoneContainer(this.datanodeDetails,
conf, context, certClient);
conf, context, certClient, secretKeyClient);
} finally {
constructionLock.writeLock().unlock();
}
Expand Down Expand Up @@ -204,7 +206,7 @@ public DatanodeStateMachine(DatanodeDetails datanodeDetails,
ecReconstructionMetrics = ECReconstructionMetrics.create();

ecReconstructionCoordinator = new ECReconstructionCoordinator(
conf, certClient, context, ecReconstructionMetrics);
conf, certClient, secretKeyClient, context, ecReconstructionMetrics);

// This is created as an instance variable as Mockito needs to access it in
// a test. The test mocks it in a running mini-cluster.
Expand Down Expand Up @@ -245,6 +247,12 @@ public DatanodeStateMachine(DatanodeDetails datanodeDetails,
queueMetrics = DatanodeQueueMetrics.create(this);
}

@VisibleForTesting
public DatanodeStateMachine(DatanodeDetails datanodeDetails,
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I am not really a fan of adding a constructor just for testing...
I don't really have hard reasons, for me it just seems unnatural to have a short constructor in order to type less in tests instead of exactly declare there that we deliberately skip passing on things that might be used during the lifetime of the object.
But please note, this is something I can live with, I just wanted to express that I would not do it.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'm not a fan of it either but feel guilty repeating null, null, null, null..... 10 times in tests. It could be easy to create such a constructor in a test folder, but that would not be shared across maven components.

ConfigurationSource conf) throws IOException {
this(datanodeDetails, conf, null, null, null, null);
}

private int getEndPointTaskThreadPoolSize() {
// TODO(runzhiwang): current only support one recon, if support multiple
// recon in future reconServerCount should be the real number of recon
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@
import org.apache.hadoop.hdds.scm.storage.BlockLocationInfo;
import org.apache.hadoop.hdds.scm.storage.BufferPool;
import org.apache.hadoop.hdds.scm.storage.ECBlockOutputStream;
import org.apache.hadoop.hdds.security.symmetric.SecretKeySignerClient;
import org.apache.hadoop.hdds.security.token.ContainerTokenIdentifier;
import org.apache.hadoop.hdds.security.x509.certificate.client.CertificateClient;
import org.apache.hadoop.hdds.utils.IOUtils;
Expand Down Expand Up @@ -110,9 +111,9 @@ public class ECReconstructionCoordinator implements Closeable {
private final ECReconstructionMetrics metrics;
private final StateContext context;

public ECReconstructionCoordinator(ConfigurationSource conf,
CertificateClient certificateClient,
StateContext context,
public ECReconstructionCoordinator(
ConfigurationSource conf, CertificateClient certificateClient,
SecretKeySignerClient secretKeyClient, StateContext context,
ECReconstructionMetrics metrics) throws IOException {
this.context = context;
this.containerOperationClient = new ECContainerOperationClient(conf,
Expand All @@ -128,7 +129,7 @@ public ECReconstructionCoordinator(ConfigurationSource conf,
new ThreadPoolExecutor.CallerRunsPolicy());
this.blockInputStreamFactory = BlockInputStreamFactoryImpl
.getInstance(byteBufferPool, () -> ecReconstructExecutor);
tokenHelper = new TokenHelper(conf, certificateClient);
tokenHelper = new TokenHelper(conf, secretKeyClient);
this.clientMetrics = ContainerClientMetrics.acquire();
this.metrics = metrics;
}
Expand Down Expand Up @@ -390,7 +391,6 @@ public void close() throws IOException {
if (containerOperationClient != null) {
containerOperationClient.close();
}
tokenHelper.stop();
}

private Pipeline rebuildInputPipeline(ECReplicationConfig repConfig,
Expand Down
Loading