Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Address Intellij inspection findings #10583

Merged
merged 22 commits into from
Jul 3, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
abstract class BaseOSSFile {
private final OSS client;
private final OSSURI uri;
private AliyunProperties aliyunProperties;
private final AliyunProperties aliyunProperties;
private SimplifiedObjectMeta metadata;
private final MetricsContext metrics;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,7 @@ public void tearDownBucket(String bucket) {
}

public static class Builder {
private Map<String, Object> props = Maps.newHashMap();
private final Map<String, Object> props = Maps.newHashMap();

public Builder silent() {
props.put(AliyunOSSMockApp.PROP_SILENT, true);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -420,11 +420,11 @@ public boolean markSupported() {
* A caller has caused a request that would cross the {@code maxLength} boundary.
*
* @param maxLength The max count of bytes to read.
* @param count The count of bytes read.
* @param bytesRead The count of bytes read.
* @throws IOException Subclasses may throw.
* @since 2.12.0
*/
protected void onMaxLength(final long maxLength, final long pCount) throws IOException {
protected void onMaxLength(final long maxLength, final long bytesRead) throws IOException {
// for subclasses
}

Expand Down
5 changes: 2 additions & 3 deletions api/src/main/java/org/apache/iceberg/PartitionSpec.java
Original file line number Diff line number Diff line change
Expand Up @@ -365,7 +365,7 @@ public static class Builder {
private final Schema schema;
private final List<PartitionField> fields = Lists.newArrayList();
private final Set<String> partitionNames = Sets.newHashSet();
private Map<Map.Entry<Integer, String>, PartitionField> dedupFields = Maps.newHashMap();
private final Map<Map.Entry<Integer, String>, PartitionField> dedupFields = Maps.newHashMap();
private int specId = 0;
private final AtomicInteger lastAssignedFieldId =
new AtomicInteger(unpartitionedLastAssignedId());
Expand Down Expand Up @@ -409,8 +409,7 @@ private void checkAndAddPartitionName(String name, Integer sourceColumnId) {
name);
}
}
Preconditions.checkArgument(
name != null && !name.isEmpty(), "Cannot use empty or null partition name: %s", name);
Preconditions.checkArgument(!name.isEmpty(), "Cannot use empty partition name: %s", name);
Preconditions.checkArgument(
!partitionNames.contains(name), "Cannot use partition name more than once: %s", name);
partitionNames.add(name);
Expand Down
4 changes: 2 additions & 2 deletions api/src/main/java/org/apache/iceberg/Schema.java
Original file line number Diff line number Diff line change
Expand Up @@ -67,8 +67,8 @@ public class Schema implements Serializable {
private transient Map<Integer, Accessor<StructLike>> idToAccessor = null;
private transient Map<Integer, String> idToName = null;
private transient Set<Integer> identifierFieldIdSet = null;
private transient Map<Integer, Integer> idsToReassigned;
private transient Map<Integer, Integer> idsToOriginal;
private final transient Map<Integer, Integer> idsToReassigned;
private final transient Map<Integer, Integer> idsToOriginal;

public Schema(List<NestedField> columns, Map<String, Integer> aliases) {
this(columns, aliases, ImmutableSet.of());
Expand Down
2 changes: 1 addition & 1 deletion api/src/main/java/org/apache/iceberg/SortOrderBuilder.java
Original file line number Diff line number Diff line change
Expand Up @@ -113,5 +113,5 @@ default R desc(Term term) {
default R caseSensitive(boolean caseSensitive) {
throw new UnsupportedOperationException(
this.getClass().getName() + " doesn't implement caseSensitive");
};
}
}
2 changes: 1 addition & 1 deletion api/src/main/java/org/apache/iceberg/io/FileIO.java
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ default void deleteFile(OutputFile file) {
*/
default Map<String, String> properties() {
throw new UnsupportedOperationException(
String.format("%s does not expose configuration properties", this.getClass().toString()));
String.format("%s does not expose configuration properties", this.getClass()));
}

/**
Expand Down
15 changes: 3 additions & 12 deletions api/src/main/java/org/apache/iceberg/metrics/DefaultTimer.java
Original file line number Diff line number Diff line change
Expand Up @@ -66,31 +66,22 @@ public void record(long amount, TimeUnit unit) {

@Override
public <T> T time(Supplier<T> supplier) {
Timed timed = start();
try {
try (Timed ignore = start()) {
return supplier.get();
} finally {
timed.stop();
}
}

@Override
public <T> T timeCallable(Callable<T> callable) throws Exception {
Timed timed = start();
try {
try (Timed ignore = start()) {
return callable.call();
} finally {
timed.stop();
}
}

@Override
public void time(Runnable runnable) {
Timed timed = start();
try {
try (Timed ignore = start()) {
runnable.run();
} finally {
timed.stop();
}
}

Expand Down
4 changes: 2 additions & 2 deletions api/src/main/java/org/apache/iceberg/util/BucketUtil.java
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ public class BucketUtil {
private BucketUtil() {}

public static int hash(int value) {
return MURMUR3.hashLong((long) value).asInt();
return MURMUR3.hashLong(value).asInt();
}

public static int hash(long value) {
Expand All @@ -56,7 +56,7 @@ private static long doubleToLongBits(double value) {
}

public static int hash(float value) {
return MURMUR3.hashLong(doubleToLongBits((double) value)).asInt();
return MURMUR3.hashLong(doubleToLongBits(value)).asInt();
}

public static int hash(double value) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@

public class TestableCloseableIterable implements CloseableIterable<Integer> {
private Boolean closed = false;
private TestableCloseableIterator iterator = new TestableCloseableIterator();
private final TestableCloseableIterator iterator = new TestableCloseableIterator();

@Override
public CloseableIterator<Integer> iterator() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -383,7 +383,7 @@ private int hashBytes(byte[] bytes, int offset, int length) {
*/
private static UUID newUUID(byte[] bytes) {
try {
return uuidBytesConstructor.newInstance((Object) bytes);
return uuidBytesConstructor.newInstance(bytes);
} catch (InstantiationException | IllegalAccessException | InvocationTargetException e) {
throw new RuntimeException(e);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@

import java.math.BigDecimal;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.util.UUID;
import java.util.stream.Collectors;
import org.apache.iceberg.PartitionSpec;
Expand Down Expand Up @@ -309,7 +310,7 @@ public void testBucketStringInclusive() {

@Test
public void testBucketByteBufferStrict() throws Exception {
ByteBuffer value = ByteBuffer.wrap("abcdefg".getBytes("UTF-8"));
ByteBuffer value = ByteBuffer.wrap("abcdefg".getBytes(StandardCharsets.UTF_8));
Schema schema = new Schema(optional(1, "value", Types.BinaryType.get()));
PartitionSpec spec = PartitionSpec.builderFor(schema).bucket("value", 10).build();

Expand All @@ -322,15 +323,15 @@ public void testBucketByteBufferStrict() throws Exception {
assertProjectionStrictValue(
spec, greaterThanOrEqual("value", value), Expression.Operation.FALSE);

ByteBuffer anotherValue = ByteBuffer.wrap("abcdehij".getBytes("UTF-8"));
ByteBuffer anotherValue = ByteBuffer.wrap("abcdehij".getBytes(StandardCharsets.UTF_8));
assertProjectionStrict(
spec, notIn("value", value, anotherValue), Expression.Operation.NOT_IN, "[4, 6]");
assertProjectionStrictValue(spec, in("value", value, anotherValue), Expression.Operation.FALSE);
}

@Test
public void testBucketByteBufferInclusive() throws Exception {
ByteBuffer value = ByteBuffer.wrap("abcdefg".getBytes("UTF-8"));
ByteBuffer value = ByteBuffer.wrap("abcdefg".getBytes(StandardCharsets.UTF_8));
Schema schema = new Schema(optional(1, "value", Types.BinaryType.get()));
PartitionSpec spec = PartitionSpec.builderFor(schema).bucket("value", 10).build();

Expand All @@ -344,7 +345,7 @@ public void testBucketByteBufferInclusive() throws Exception {
assertProjectionInclusiveValue(
spec, greaterThanOrEqual("value", value), Expression.Operation.TRUE);

ByteBuffer anotherValue = ByteBuffer.wrap("abcdehij".getBytes("UTF-8"));
ByteBuffer anotherValue = ByteBuffer.wrap("abcdehij".getBytes(StandardCharsets.UTF_8));
assertProjectionInclusive(
spec, in("value", value, anotherValue), Expression.Operation.IN, "[4, 6]");
assertProjectionInclusiveValue(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@

import java.math.BigDecimal;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.util.stream.Collectors;
import org.apache.iceberg.PartitionSpec;
import org.apache.iceberg.Schema;
Expand Down Expand Up @@ -447,10 +448,11 @@ public void testStringInclusive() {

@Test
public void testBinaryStrict() throws Exception {
ByteBuffer value = ByteBuffer.wrap("abcdefg".getBytes("UTF-8"));
ByteBuffer value = ByteBuffer.wrap("abcdefg".getBytes(StandardCharsets.UTF_8));
Schema schema = new Schema(optional(1, "value", Types.BinaryType.get()));
PartitionSpec spec = PartitionSpec.builderFor(schema).truncate("value", 5).build();
String expectedValue = TransformUtil.base64encode(ByteBuffer.wrap("abcde".getBytes("UTF-8")));
String expectedValue =
TransformUtil.base64encode(ByteBuffer.wrap("abcde".getBytes(StandardCharsets.UTF_8)));

assertProjectionStrict(spec, lessThan("value", value), Expression.Operation.LT, expectedValue);
assertProjectionStrict(
Expand All @@ -463,7 +465,7 @@ public void testBinaryStrict() throws Exception {
spec, notEqual("value", value), Expression.Operation.NOT_EQ, expectedValue);
assertProjectionStrictValue(spec, equal("value", value), Expression.Operation.FALSE);

ByteBuffer anotherValue = ByteBuffer.wrap("abcdehij".getBytes("UTF-8"));
ByteBuffer anotherValue = ByteBuffer.wrap("abcdehij".getBytes(StandardCharsets.UTF_8));
assertProjectionStrict(
spec,
notIn("value", value, anotherValue),
Expand All @@ -474,10 +476,11 @@ public void testBinaryStrict() throws Exception {

@Test
public void testBinaryInclusive() throws Exception {
ByteBuffer value = ByteBuffer.wrap("abcdefg".getBytes("UTF-8"));
ByteBuffer value = ByteBuffer.wrap("abcdefg".getBytes(StandardCharsets.UTF_8));
Schema schema = new Schema(optional(1, "value", Types.BinaryType.get()));
PartitionSpec spec = PartitionSpec.builderFor(schema).truncate("value", 5).build();
String expectedValue = TransformUtil.base64encode(ByteBuffer.wrap("abcde".getBytes("UTF-8")));
String expectedValue =
TransformUtil.base64encode(ByteBuffer.wrap("abcde".getBytes(StandardCharsets.UTF_8)));

assertProjectionInclusive(
spec, lessThan("value", value), Expression.Operation.LT_EQ, expectedValue);
Expand All @@ -490,7 +493,7 @@ public void testBinaryInclusive() throws Exception {
assertProjectionInclusive(spec, equal("value", value), Expression.Operation.EQ, expectedValue);
assertProjectionInclusiveValue(spec, notEqual("value", value), Expression.Operation.TRUE);

ByteBuffer anotherValue = ByteBuffer.wrap("abcdehij".getBytes("UTF-8"));
ByteBuffer anotherValue = ByteBuffer.wrap("abcdehij".getBytes(StandardCharsets.UTF_8));
assertProjectionInclusive(
spec,
in("value", value, anotherValue),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,8 +31,8 @@ public class TestCharSequenceMap {

@Test
public void nullString() {
assertThat(CharSequenceMap.create()).doesNotContainKey((String) null);
assertThat(CharSequenceMap.create()).doesNotContainValue((String) null);
assertThat(CharSequenceMap.create()).doesNotContainKey(null);
assertThat(CharSequenceMap.create()).doesNotContainValue(null);
}

@Test
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,7 @@ private int readIntLittleEndian() throws IOException {
int ch3 = inputStream.read();
int ch2 = inputStream.read();
int ch1 = inputStream.read();
return (ch1 << 24) + (ch2 << 16) + (ch3 << 8) + (ch4 << 0);
return (ch1 << 24) + (ch2 << 16) + (ch3 << 8) + ch4;
}

/** Reads the next byteWidth little endian int. */
Expand All @@ -166,7 +166,7 @@ private int readIntLittleEndianPaddedOnBitWidth() throws IOException {
int ch3 = inputStream.read();
int ch2 = inputStream.read();
int ch1 = inputStream.read();
return (ch1 << 16) + (ch2 << 8) + (ch3 << 0);
return (ch1 << 16) + (ch2 << 8) + ch3;
}
case 4:
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -128,15 +128,12 @@ public class LakeFormationTestBase {

@BeforeAll
public static void beforeClass() throws Exception {
lfRegisterPathRoleName = LF_REGISTER_PATH_ROLE_PREFIX + UUID.randomUUID().toString();
lfPrivilegedRoleName = LF_PRIVILEGED_ROLE_PREFIX + UUID.randomUUID().toString();
lfRegisterPathRoleS3PolicyName =
LF_REGISTER_PATH_ROLE_S3_POLICY_PREFIX + UUID.randomUUID().toString();
lfRegisterPathRoleLfPolicyName =
LF_REGISTER_PATH_ROLE_LF_POLICY_PREFIX + UUID.randomUUID().toString();
lfRegisterPathRoleIamPolicyName =
LF_REGISTER_PATH_ROLE_IAM_POLICY_PREFIX + UUID.randomUUID().toString();
lfPrivilegedRolePolicyName = LF_PRIVILEGED_ROLE_POLICY_PREFIX + UUID.randomUUID().toString();
lfRegisterPathRoleName = LF_REGISTER_PATH_ROLE_PREFIX + UUID.randomUUID();
lfPrivilegedRoleName = LF_PRIVILEGED_ROLE_PREFIX + UUID.randomUUID();
lfRegisterPathRoleS3PolicyName = LF_REGISTER_PATH_ROLE_S3_POLICY_PREFIX + UUID.randomUUID();
lfRegisterPathRoleLfPolicyName = LF_REGISTER_PATH_ROLE_LF_POLICY_PREFIX + UUID.randomUUID();
lfRegisterPathRoleIamPolicyName = LF_REGISTER_PATH_ROLE_IAM_POLICY_PREFIX + UUID.randomUUID();
lfPrivilegedRolePolicyName = LF_PRIVILEGED_ROLE_POLICY_PREFIX + UUID.randomUUID();

iam =
IamClient.builder()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,7 @@ public static void afterClass() {

@BeforeEach
public void beforeEach() {
objectKey = String.format("%s/%s", prefix, UUID.randomUUID().toString());
objectKey = String.format("%s/%s", prefix, UUID.randomUUID());
objectUri = String.format("s3://%s/%s", bucketName, objectKey);
clientFactory.initialize(Maps.newHashMap());
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ public static void afterClass() {

@BeforeEach
public void before() {
String objectKey = String.format("%s/%s", prefix, UUID.randomUUID().toString());
String objectKey = String.format("%s/%s", prefix, UUID.randomUUID());
objectUri = String.format("s3://%s/%s", bucketName, objectKey);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ public class AwsClientProperties implements Serializable {
public static final String CLIENT_REGION = "client.region";

private String clientRegion;
private String clientCredentialsProvider;
private final String clientCredentialsProvider;
private final Map<String, String> clientCredentialsProviderProperties;

public AwsClientProperties() {
Expand Down
18 changes: 9 additions & 9 deletions aws/src/main/java/org/apache/iceberg/aws/AwsProperties.java
Original file line number Diff line number Diff line change
Expand Up @@ -213,25 +213,25 @@ public class AwsProperties implements Serializable {
private static final String HTTP_CLIENT_PREFIX = "http-client.";
private final Set<software.amazon.awssdk.services.sts.model.Tag> stsClientAssumeRoleTags;

private String clientAssumeRoleArn;
private String clientAssumeRoleExternalId;
private int clientAssumeRoleTimeoutSec;
private String clientAssumeRoleRegion;
private String clientAssumeRoleSessionName;
private String clientCredentialsProvider;
private final String clientAssumeRoleArn;
private final String clientAssumeRoleExternalId;
private final int clientAssumeRoleTimeoutSec;
private final String clientAssumeRoleRegion;
private final String clientAssumeRoleSessionName;
private final String clientCredentialsProvider;
private final Map<String, String> clientCredentialsProviderProperties;

private String glueEndpoint;
private final String glueEndpoint;
private String glueCatalogId;
private boolean glueCatalogSkipArchive;
private boolean glueCatalogSkipNameValidation;
private boolean glueLakeFormationEnabled;

private String dynamoDbTableName;
private String dynamoDbEndpoint;
private final String dynamoDbEndpoint;

private String restSigningRegion;
private String restSigningName;
private final String restSigningName;
private String restAccessKeyId;
private String restSecretAccessKey;
private String restSessionToken;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -387,7 +387,7 @@ public void renameTable(TableIdentifier from, TableIdentifier to) {
"Cannot rename %s to %s because namespace %s does not exist", from, to, to.namespace());
}
// keep metadata
Table fromTable = null;
Table fromTable;
String fromTableDbName =
IcebergToGlueConverter.getDatabaseName(from, awsProperties.glueCatalogSkipNameValidation());
String fromTableName =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -139,8 +139,8 @@ private LakeFormationClient lakeFormation() {
}

static class LakeFormationCredentialsProvider implements AwsCredentialsProvider {
private LakeFormationClient client;
private String tableArn;
private final LakeFormationClient client;
private final String tableArn;

LakeFormationCredentialsProvider(LakeFormationClient lakeFormationClient, String tableArn) {
this.client = lakeFormationClient;
Expand Down
Loading