Skip to content

HADOOP-17123. remove guava Preconditions from Hadoop-common module #2134

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@
import org.w3c.dom.Document;
import org.w3c.dom.Element;

import com.google.common.base.Preconditions;
import org.apache.hadoop.util.noguava.Preconditions;
import com.google.common.base.Strings;

import static org.apache.commons.lang3.StringUtils.isBlank;
Expand Down Expand Up @@ -427,7 +427,7 @@ public static class DeprecationDelta {
DeprecationDelta(String key, String[] newKeys, String customMessage) {
Preconditions.checkNotNull(key);
Preconditions.checkNotNull(newKeys);
Preconditions.checkArgument(newKeys.length > 0);
Preconditions.checkIsTrue(newKeys.length > 0);
this.key = key;
this.newKeys = newKeys;
this.customMessage = customMessage;
Expand Down Expand Up @@ -1374,10 +1374,10 @@ public void set(String name, String value) {
* @throws IllegalArgumentException when the value or name is null.
*/
public void set(String name, String value, String source) {
Preconditions.checkArgument(
Preconditions.checkIsTrue(
name != null,
"Property name must not be null");
Preconditions.checkArgument(
Preconditions.checkIsTrue(
value != null,
"The value of property %s must not be null", name);
name = name.trim();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
package org.apache.hadoop.conf;

import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import org.apache.hadoop.util.noguava.Preconditions;
import com.google.common.collect.Maps;
import org.apache.hadoop.util.Time;
import org.apache.hadoop.conf.ReconfigurationUtil.PropertyChange;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;

import com.google.common.base.Preconditions;
import org.apache.hadoop.util.noguava.Preconditions;

import java.io.IOException;

Expand All @@ -47,8 +47,8 @@ public CipherSuite getCipherSuite() {
*/
@Override
public void calculateIV(byte[] initIV, long counter, byte[] IV) {
Preconditions.checkArgument(initIV.length == AES_BLOCK_SIZE);
Preconditions.checkArgument(IV.length == AES_BLOCK_SIZE);
Preconditions.checkIsTrue(initIV.length == AES_BLOCK_SIZE);
Preconditions.checkIsTrue(IV.length == AES_BLOCK_SIZE);

int i = IV.length; // IV length
int j = 0; // counter bytes index
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
import java.util.Queue;
import java.util.concurrent.ConcurrentLinkedQueue;

import com.google.common.base.Preconditions;
import org.apache.hadoop.util.noguava.Preconditions;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.ByteBufferPositionedReadable;
Expand Down Expand Up @@ -544,7 +544,7 @@ public void seek(long pos) throws IOException {
/** Skip n bytes */
@Override
public long skip(long n) throws IOException {
Preconditions.checkArgument(n >= 0, "Negative skip length.");
Preconditions.checkIsTrue(n >= 0, "Negative skip length.");
checkStream();

if (n == 0) {
Expand Down Expand Up @@ -688,7 +688,7 @@ public void reset() throws IOException {

@Override
public boolean seekToNewSource(long targetPos) throws IOException {
Preconditions.checkArgument(targetPos >= 0,
Preconditions.checkIsTrue(targetPos >= 0,
"Cannot seek to negative offset.");
checkStream();
if (!(in instanceof Seekable)) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
import org.apache.hadoop.fs.StreamCapabilities;
import org.apache.hadoop.fs.Syncable;

import com.google.common.base.Preconditions;
import org.apache.hadoop.util.noguava.Preconditions;

/**
* CryptoOutputStream encrypts data. It is not thread-safe. AES CTR mode is
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
import org.apache.hadoop.fs.Seekable;
import org.apache.hadoop.util.CleanerUtil;

import com.google.common.base.Preconditions;
import org.apache.hadoop.util.noguava.Preconditions;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand Down Expand Up @@ -67,7 +67,7 @@ public static void checkCodec(CryptoCodec codec) {

/** Check and floor buffer size */
public static int checkBufferSize(CryptoCodec codec, int bufferSize) {
Preconditions.checkArgument(bufferSize >= MIN_BUFFER_SIZE,
Preconditions.checkIsTrue(bufferSize >= MIN_BUFFER_SIZE,
"Minimum value of buffer size is " + MIN_BUFFER_SIZE + ".");
return bufferSize - bufferSize % codec.getCipherSuite()
.getAlgorithmBlockSize();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;

import com.google.common.base.Preconditions;
import org.apache.hadoop.util.noguava.Preconditions;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;

import com.google.common.base.Preconditions;
import org.apache.hadoop.util.noguava.Preconditions;
import org.apache.hadoop.crypto.random.OpensslSecureRandom;
import org.apache.hadoop.util.ReflectionUtils;
import org.slf4j.Logger;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.util.NativeCodeLoader;

import com.google.common.base.Preconditions;
import org.apache.hadoop.util.noguava.Preconditions;
import org.apache.hadoop.util.PerformanceAdvisory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
Expand Down Expand Up @@ -206,7 +206,7 @@ public void init(int mode, byte[] key, byte[] iv) {
public int update(ByteBuffer input, ByteBuffer output)
throws ShortBufferException {
checkState();
Preconditions.checkArgument(input.isDirect() && output.isDirect(),
Preconditions.checkIsTrue(input.isDirect() && output.isDirect(),
"Direct buffers are required.");
int len = update(context, input, input.position(), input.remaining(),
output, output.position(), output.remaining());
Expand Down Expand Up @@ -246,7 +246,7 @@ public int update(ByteBuffer input, ByteBuffer output)
public int doFinal(ByteBuffer output) throws ShortBufferException,
IllegalBlockSizeException, BadPaddingException {
checkState();
Preconditions.checkArgument(output.isDirect(), "Direct buffer is required.");
Preconditions.checkIsTrue(output.isDirect(), "Direct buffer is required.");
int len = doFinal(context, output, output.position(), output.remaining());
output.position(output.position() + len);
return len;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@

package org.apache.hadoop.crypto.key;

import com.google.common.base.Preconditions;
import org.apache.hadoop.util.noguava.Preconditions;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
Expand Down Expand Up @@ -433,7 +433,7 @@ public Metadata getMetadata(String name) throws IOException {
@Override
public KeyVersion createKey(String name, byte[] material,
Options options) throws IOException {
Preconditions.checkArgument(name.equals(StringUtils.toLowerCase(name)),
Preconditions.checkIsTrue(name.equals(StringUtils.toLowerCase(name)),
"Uppercase key names are unsupported: %s", name);
writeLock.lock();
try {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,11 +25,7 @@
import java.util.List;
import java.util.ListIterator;

import javax.crypto.Cipher;
import javax.crypto.spec.IvParameterSpec;
import javax.crypto.spec.SecretKeySpec;

import com.google.common.base.Preconditions;
import org.apache.hadoop.util.noguava.Preconditions;

import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.crypto.CryptoCodec;
Expand Down Expand Up @@ -335,7 +331,7 @@ public EncryptedKeyVersion reencryptEncryptedKey(EncryptedKeyVersion ekv)
final KeyVersion ekNow = keyProvider.getCurrentKey(ekName);
Preconditions
.checkNotNull(ekNow, "KeyVersion name '%s' does not exist", ekName);
Preconditions.checkArgument(ekv.getEncryptedKeyVersion().getVersionName()
Preconditions.checkIsTrue(ekv.getEncryptedKeyVersion().getVersionName()
.equals(KeyProviderCryptoExtension.EEK),
"encryptedKey version name must be '%s', but found '%s'",
KeyProviderCryptoExtension.EEK,
Expand Down Expand Up @@ -375,7 +371,7 @@ public void reencryptEncryptedKeys(List<EncryptedKeyVersion> ekvs)
Preconditions.checkNotNull(ekName, "Key name is null");
Preconditions.checkNotNull(ekv.getEncryptedKeyVersion(),
"EncryptedKeyVersion is null");
Preconditions.checkArgument(
Preconditions.checkIsTrue(
ekv.getEncryptedKeyVersion().getVersionName()
.equals(KeyProviderCryptoExtension.EEK),
"encryptedKey version name must be '%s', but found '%s'",
Expand All @@ -387,7 +383,7 @@ public void reencryptEncryptedKeys(List<EncryptedKeyVersion> ekvs)
Preconditions
.checkNotNull(ekNow, "Key name '%s' does not exist", ekName);
} else {
Preconditions.checkArgument(ekNow.getName().equals(ekName),
Preconditions.checkIsTrue(ekNow.getName().equals(ekName),
"All keys must have the same key name. Expected '%s' "
+ "but found '%s'", ekNow.getName(), ekName);
}
Expand Down Expand Up @@ -446,7 +442,7 @@ public KeyVersion decryptEncryptedKey(
Preconditions
.checkNotNull(encryptionKey, "KeyVersion name '%s' does not exist",
encryptionKeyVersionName);
Preconditions.checkArgument(
Preconditions.checkIsTrue(
encryptedKeyVersion.getEncryptedKeyVersion().getVersionName()
.equals(KeyProviderCryptoExtension.EEK),
"encryptedKey version name must be '%s', but found '%s'",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@

import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import org.apache.hadoop.util.noguava.Preconditions;
import com.google.common.base.Strings;

import static org.apache.hadoop.util.KMSUtil.checkNotEmpty;
Expand Down Expand Up @@ -803,7 +803,7 @@ public KeyVersion decryptEncryptedKey(
checkNotNull(encryptedKeyVersion.getEncryptionKeyVersionName(),
"versionName");
checkNotNull(encryptedKeyVersion.getEncryptedKeyIv(), "iv");
Preconditions.checkArgument(
Preconditions.checkIsTrue(
encryptedKeyVersion.getEncryptedKeyVersion().getVersionName()
.equals(KeyProviderCryptoExtension.EEK),
"encryptedKey version name must be '%s', is '%s'",
Expand Down Expand Up @@ -836,7 +836,7 @@ public EncryptedKeyVersion reencryptEncryptedKey(EncryptedKeyVersion ekv)
checkNotNull(ekv.getEncryptionKeyVersionName(), "versionName");
checkNotNull(ekv.getEncryptedKeyIv(), "iv");
checkNotNull(ekv.getEncryptedKeyVersion(), "encryptedKey");
Preconditions.checkArgument(ekv.getEncryptedKeyVersion().getVersionName()
Preconditions.checkIsTrue(ekv.getEncryptedKeyVersion().getVersionName()
.equals(KeyProviderCryptoExtension.EEK),
"encryptedKey version name must be '%s', is '%s'",
KeyProviderCryptoExtension.EEK,
Expand Down Expand Up @@ -873,15 +873,15 @@ public void reencryptEncryptedKeys(List<EncryptedKeyVersion> ekvs)
checkNotNull(ekv.getEncryptionKeyVersionName(), "versionName");
checkNotNull(ekv.getEncryptedKeyIv(), "iv");
checkNotNull(ekv.getEncryptedKeyVersion(), "encryptedKey");
Preconditions.checkArgument(ekv.getEncryptedKeyVersion().getVersionName()
Preconditions.checkIsTrue(ekv.getEncryptedKeyVersion().getVersionName()
.equals(KeyProviderCryptoExtension.EEK),
"encryptedKey version name must be '%s', is '%s'",
KeyProviderCryptoExtension.EEK,
ekv.getEncryptedKeyVersion().getVersionName());
if (keyName == null) {
keyName = ekv.getEncryptionKeyName();
} else {
Preconditions.checkArgument(keyName.equals(ekv.getEncryptionKeyName()),
Preconditions.checkIsTrue(keyName.equals(ekv.getEncryptionKeyName()),
"All EncryptedKey must have the same key name.");
}
jsonPayload.add(KMSUtil.toJSON(ekv));
Expand All @@ -892,7 +892,7 @@ public void reencryptEncryptedKeys(List<EncryptedKeyVersion> ekvs)
conn.setRequestProperty(CONTENT_TYPE, APPLICATION_JSON_MIME);
final List<Map> response =
call(conn, jsonPayload, HttpURLConnection.HTTP_OK, List.class);
Preconditions.checkArgument(response.size() == ekvs.size(),
Preconditions.checkIsTrue(response.size() == ekvs.size(),
"Response size is different than input size.");
for (int i = 0; i < response.size(); ++i) {
final Map item = response.get(i);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@
import org.slf4j.LoggerFactory;

import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import org.apache.hadoop.util.noguava.Preconditions;

/**
* A simple LoadBalancing KMSClientProvider that round-robins requests
Expand Down Expand Up @@ -286,7 +286,7 @@ public Void call(KMSClientProvider provider) throws IOException {
// This request is sent to all providers in the load-balancing group
@Override
public void warmUpEncryptedKeys(String... keyNames) throws IOException {
Preconditions.checkArgument(providers.length > 0,
Preconditions.checkIsTrue(providers.length > 0,
"No providers are configured");
boolean success = false;
IOException e = null;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;

import com.google.common.base.Preconditions;
import org.apache.hadoop.util.noguava.Preconditions;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
Expand Down Expand Up @@ -221,14 +221,14 @@ public enum SyncGenerationPolicy {
public ValueQueue(final int numValues, final float lowWatermark,
long expiry, int numFillerThreads, SyncGenerationPolicy policy,
final QueueRefiller<E> refiller) {
Preconditions.checkArgument(numValues > 0, "\"numValues\" must be > 0");
Preconditions.checkArgument(((lowWatermark > 0)&&(lowWatermark <= 1)),
Preconditions.checkIsTrue(numValues > 0, "\"numValues\" must be > 0");
Preconditions.checkIsTrue(((lowWatermark > 0)&&(lowWatermark <= 1)),
"\"lowWatermark\" must be > 0 and <= 1");
final int watermarkValue = (int) (numValues * lowWatermark);
Preconditions.checkArgument(watermarkValue > 0,
Preconditions.checkIsTrue(watermarkValue > 0,
"(int) (\"numValues\" * \"lowWatermark\") must be > 0");
Preconditions.checkArgument(expiry > 0, "\"expiry\" must be > 0");
Preconditions.checkArgument(numFillerThreads > 0,
Preconditions.checkIsTrue(expiry > 0, "\"expiry\" must be > 0");
Preconditions.checkIsTrue(numFillerThreads > 0,
"\"numFillerThreads\" must be > 0");
Preconditions.checkNotNull(policy, "\"policy\" must not be null");
this.refiller = refiller;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.util.NativeCodeLoader;

import com.google.common.base.Preconditions;
import org.apache.hadoop.util.noguava.Preconditions;
import org.apache.hadoop.util.PerformanceAdvisory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
Expand Down Expand Up @@ -104,7 +104,7 @@ public void setSeed(long seed) {
*/
@Override
final protected int next(int numBits) {
Preconditions.checkArgument(numBits >= 0 && numBits <= 32);
Preconditions.checkIsTrue(numBits >= 0 && numBits <= 32);
int numBytes = (numBits + 7) / 8;
byte b[] = new byte[numBytes];
int next = 0;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.io.ByteBufferPool;

import com.google.common.base.Preconditions;
import org.apache.hadoop.util.noguava.Preconditions;

@InterfaceAudience.Private
@InterfaceStability.Evolving
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
import java.util.List;
import java.util.concurrent.CompletableFuture;

import com.google.common.base.Preconditions;
import org.apache.hadoop.util.noguava.Preconditions;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
import java.io.IOException;
import java.io.InputStream;

import com.google.common.base.Preconditions;
import org.apache.hadoop.util.noguava.Preconditions;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.slf4j.Logger;
Expand Down Expand Up @@ -98,11 +98,11 @@ public int read(long position, byte[] buffer, int offset, int length)
*/
protected void validatePositionedReadArgs(long position,
byte[] buffer, int offset, int length) throws EOFException {
Preconditions.checkArgument(length >= 0, "length is negative");
Preconditions.checkIsTrue(length >= 0, "length is negative");
if (position < 0) {
throw new EOFException("position is negative");
}
Preconditions.checkArgument(buffer != null, "Null buffer");
Preconditions.checkIsTrue(buffer != null, "Null buffer");
if (buffer.length - offset < length) {
throw new IndexOutOfBoundsException(
FSExceptionMessages.TOO_MANY_BYTES_FOR_DEST_BUFFER
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.util.ShutdownHookManager;

import com.google.common.base.Preconditions;
import org.apache.hadoop.util.noguava.Preconditions;
import com.google.common.annotations.VisibleForTesting;
import org.apache.htrace.core.Tracer;
import org.slf4j.Logger;
Expand Down
Loading