Skip to content

Commit 35f39cc

Browse files
authored
Merge branch 'apache:master' into master
2 parents e5972b5 + 75852ad commit 35f39cc

File tree

7,125 files changed

+6023
-2688398
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

7,125 files changed

+6023
-2688398
lines changed

.github/labeler.yml

+1-1
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ BUILD:
3232
- "**/*gradle*"
3333
- versions.props
3434
DOCS:
35-
- site/**/*
35+
- docs/**/*
3636
- "**/*CHANGELOG.md"
3737
- "**/*README.md"
3838
EXAMPLES:

.github/workflows/flink-ci.yml

+2-2
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ on:
3535
- 'pig/**'
3636
- 'python/**'
3737
- 'python_legacy/**'
38-
- 'site/**'
38+
- 'docs/**'
3939
pull_request:
4040
paths-ignore:
4141
- '.github/workflows/python-ci.yml'
@@ -51,7 +51,7 @@ on:
5151
- 'pig/**'
5252
- 'python/**'
5353
- 'python_legacy/**'
54-
- 'site/**'
54+
- 'docs/**'
5555

5656
jobs:
5757
flink-tests:

.github/workflows/hive-ci.yml

+2-2
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@ on:
3333
- 'pig/**'
3434
- 'python/**'
3535
- 'python_legacy/**'
36-
- 'site/**'
36+
- 'docs/**'
3737
pull_request:
3838
paths-ignore:
3939
- '.github/workflows/python-ci.yml'
@@ -47,7 +47,7 @@ on:
4747
- 'pig/**'
4848
- 'python/**'
4949
- 'python_legacy/**'
50-
- 'site/**'
50+
- 'docs/**'
5151

5252
jobs:
5353
hive2-tests:

.github/workflows/java-ci.yml

+2-2
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@ on:
3030
- 'dev/**'
3131
- 'python/**'
3232
- 'python_legacy/**'
33-
- 'site/**'
33+
- 'docs/**'
3434
pull_request:
3535
paths-ignore:
3636
- '.github/workflows/python-ci.yml'
@@ -41,7 +41,7 @@ on:
4141
- 'dev/**'
4242
- 'python/**'
4343
- 'python_legacy/**'
44-
- 'site/**'
44+
- 'docs/**'
4545

4646
jobs:
4747
core-tests:

.github/workflows/publish-snapshot.yml

+1
Original file line numberDiff line numberDiff line change
@@ -40,3 +40,4 @@ jobs:
4040
- run: |
4141
./gradlew printVersion
4242
./gradlew -DflinkVersions=1.12,1.13,1.14 -DsparkVersions=2.4,3.0,3.1,3.2 -DhiveVersions=2,3 publishApachePublicationToMavenRepository -PmavenUser=${{ secrets.NEXUS_USER }} -PmavenPassword=${{ secrets.NEXUS_PW }}
43+
./gradlew -DflinkVersions= -DsparkVersions=3.2 -DscalaVersion=2.13 -DhiveVersions= publishApachePublicationToMavenRepository -PmavenUser=${{ secrets.NEXUS_USER }} -PmavenPassword=${{ secrets.NEXUS_PW }}

.github/workflows/spark-ci.yml

+31-4
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ on:
3535
- 'pig/**'
3636
- 'python/**'
3737
- 'python_legacy/**'
38-
- 'site/**'
38+
- 'docs/**'
3939
pull_request:
4040
paths-ignore:
4141
- '.github/workflows/python-ci.yml'
@@ -51,7 +51,7 @@ on:
5151
- 'pig/**'
5252
- 'python/**'
5353
- 'python_legacy/**'
54-
- 'site/**'
54+
- 'docs/**'
5555

5656
jobs:
5757
spark2-tests:
@@ -104,7 +104,7 @@ jobs:
104104
path: |
105105
**/build/testlogs
106106
107-
spark-3x-tests:
107+
spark-3x-scala-2-12-tests:
108108
runs-on: ubuntu-latest
109109
strategy:
110110
matrix:
@@ -123,7 +123,34 @@ jobs:
123123
key: ${{ runner.os }}-gradle-${{ hashFiles('**/*.gradle') }}
124124
restore-keys: ${{ runner.os }}-gradle
125125
- run: echo -e "$(ip addr show eth0 | grep "inet\b" | awk '{print $2}' | cut -d/ -f1)\t$(hostname -f) $(hostname -s)" | sudo tee -a /etc/hosts
126-
- run: ./gradlew -DsparkVersions=${{ matrix.spark }} -DhiveVersions= -DflinkVersions= :iceberg-spark:iceberg-spark-${{ matrix.spark }}_2.12:check :iceberg-spark:iceberg-spark-extensions-${{ matrix.spark }}_2.12:check :iceberg-spark:iceberg-spark-runtime-${{ matrix.spark }}_2.12:check -Pquick=true -x javadoc
126+
- run: ./gradlew -DsparkVersions=${{ matrix.spark }} -DscalaVersion=2.12 -DhiveVersions= -DflinkVersions= :iceberg-spark:iceberg-spark-${{ matrix.spark }}_2.12:check :iceberg-spark:iceberg-spark-extensions-${{ matrix.spark }}_2.12:check :iceberg-spark:iceberg-spark-runtime-${{ matrix.spark }}_2.12:check -Pquick=true -x javadoc
127+
- uses: actions/upload-artifact@v2
128+
if: failure()
129+
with:
130+
name: test logs
131+
path: |
132+
**/build/testlogs
133+
134+
spark-3x-scala-2-13-tests:
135+
runs-on: ubuntu-latest
136+
strategy:
137+
matrix:
138+
jvm: [8, 11]
139+
spark: ['3.2']
140+
env:
141+
SPARK_LOCAL_IP: localhost
142+
steps:
143+
- uses: actions/checkout@v2
144+
- uses: actions/setup-java@v1
145+
with:
146+
java-version: ${{ matrix.jvm }}
147+
- uses: actions/cache@v2
148+
with:
149+
path: ~/.gradle/caches
150+
key: ${{ runner.os }}-gradle-${{ hashFiles('**/*.gradle') }}
151+
restore-keys: ${{ runner.os }}-gradle
152+
- run: echo -e "$(ip addr show eth0 | grep "inet\b" | awk '{print $2}' | cut -d/ -f1)\t$(hostname -f) $(hostname -s)" | sudo tee -a /etc/hosts
153+
- run: ./gradlew -DsparkVersions=${{ matrix.spark }} -DscalaVersion=2.13 -DhiveVersions= -DflinkVersions= :iceberg-spark:iceberg-spark-${{ matrix.spark }}_2.13:check :iceberg-spark:iceberg-spark-extensions-${{ matrix.spark }}_2.13:check :iceberg-spark:iceberg-spark-runtime-${{ matrix.spark }}_2.13:check -Pquick=true -x javadoc
127154
- uses: actions/upload-artifact@v2
128155
if: failure()
129156
with:

.gitignore

+1
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22
.DS_Store
33
.cache
44
tmp/
5+
site
56

67
# intellij files
78
.idea

CONTRIBUTING.md

+5
Original file line numberDiff line numberDiff line change
@@ -41,6 +41,11 @@ Pull requests are the preferred mechanism for contributing to Iceberg
4141
Please refer to the [Building](https://github.com/apache/iceberg#building) section of the main readme for instructions
4242
on how to build iceberg locally.
4343

44+
## Website and Documentation Updates
45+
46+
The [Iceberg website](https://iceberg.apache.org/) and documentations are hosted in a different repository [iceberg-docs](https://github.com/apache/iceberg-docs).
47+
Read the repository README for contribution guidelines for the website and documentation.
48+
4449
## Style
4550

4651
For Java styling, check out the section

api/src/main/java/org/apache/iceberg/SnapshotRef.java

+30
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@
2020
package org.apache.iceberg;
2121

2222
import java.io.Serializable;
23+
import java.util.Objects;
2324
import org.apache.iceberg.relocated.com.google.common.base.MoreObjects;
2425
import org.apache.iceberg.relocated.com.google.common.base.Preconditions;
2526

@@ -66,6 +67,35 @@ public Long maxRefAgeMs() {
6667
return maxRefAgeMs;
6768
}
6869

70+
@Override
71+
public boolean equals(Object other) {
72+
if (other == this) {
73+
return true;
74+
}
75+
76+
if (!(other instanceof SnapshotRef)) {
77+
return false;
78+
}
79+
80+
SnapshotRef ref = (SnapshotRef) other;
81+
return Objects.equals(ref.snapshotId(), snapshotId) &&
82+
Objects.equals(ref.type(), type) &&
83+
Objects.equals(ref.maxRefAgeMs(), maxRefAgeMs) &&
84+
Objects.equals(ref.minSnapshotsToKeep(), minSnapshotsToKeep) &&
85+
Objects.equals(ref.maxSnapshotAgeMs(), maxSnapshotAgeMs);
86+
}
87+
88+
@Override
89+
public int hashCode() {
90+
return Objects.hash(
91+
this.snapshotId,
92+
this.type,
93+
this.maxRefAgeMs,
94+
this.maxSnapshotAgeMs,
95+
this.minSnapshotsToKeep
96+
);
97+
}
98+
6999
public static Builder tagBuilder(long snapshotId) {
70100
return builderFor(snapshotId, SnapshotRefType.TAG);
71101
}

api/src/test/java/org/apache/iceberg/types/TestConversions.java

+33
Original file line numberDiff line numberDiff line change
@@ -155,6 +155,39 @@ public void testByteBufferConversions() {
155155
Assert.assertArrayEquals(
156156
new byte[]{1, 89},
157157
Literal.of(new BigDecimal("3.45")).toByteBuffer().array());
158+
159+
// decimal on 3-bytes to test that we use the minimum number of bytes and not a power of 2
160+
// 1234567 is 00010010|11010110|10000111 in binary
161+
// 00010010 -> 18, 11010110 -> -42, 10000111 -> -121
162+
assertConversion(
163+
new BigDecimal("123.4567"),
164+
DecimalType.of(7, 4),
165+
new byte[]{18, -42, -121});
166+
Assert.assertArrayEquals(
167+
new byte[]{18, -42, -121},
168+
Literal.of(new BigDecimal("123.4567")).toByteBuffer().array());
169+
170+
// negative decimal to test two's complement
171+
// -1234567 is 11101101|00101001|01111001 in binary
172+
// 11101101 -> -19, 00101001 -> 41, 01111001 -> 121
173+
assertConversion(
174+
new BigDecimal("-123.4567"),
175+
DecimalType.of(7, 4),
176+
new byte[]{-19, 41, 121});
177+
Assert.assertArrayEquals(
178+
new byte[]{-19, 41, 121},
179+
Literal.of(new BigDecimal("-123.4567")).toByteBuffer().array());
180+
181+
// test empty byte in decimal
182+
// 11 is 00001011 in binary
183+
// 00001011 -> 11
184+
assertConversion(
185+
new BigDecimal("0.011"),
186+
DecimalType.of(10, 3),
187+
new byte[]{11});
188+
Assert.assertArrayEquals(
189+
new byte[]{11},
190+
Literal.of(new BigDecimal("0.011")).toByteBuffer().array());
158191
}
159192

160193
private <T> void assertConversion(T value, Type type, byte[] expectedBinary) {
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,96 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one
3+
* or more contributor license agreements. See the NOTICE file
4+
* distributed with this work for additional information
5+
* regarding copyright ownership. The ASF licenses this file
6+
* to you under the Apache License, Version 2.0 (the
7+
* "License"); you may not use this file except in compliance
8+
* with the License. You may obtain a copy of the License at
9+
*
10+
* http://www.apache.org/licenses/LICENSE-2.0
11+
*
12+
* Unless required by applicable law or agreed to in writing,
13+
* software distributed under the License is distributed on an
14+
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15+
* KIND, either express or implied. See the License for the
16+
* specific language governing permissions and limitations
17+
* under the License.
18+
*/
19+
20+
package org.apache.iceberg;
21+
22+
import com.fasterxml.jackson.core.JsonGenerator;
23+
import com.fasterxml.jackson.databind.JsonNode;
24+
import java.io.IOException;
25+
import java.io.StringWriter;
26+
import java.io.UncheckedIOException;
27+
import java.util.Locale;
28+
import org.apache.iceberg.relocated.com.google.common.base.Preconditions;
29+
import org.apache.iceberg.util.JsonUtil;
30+
31+
public class SnapshotRefParser {
32+
33+
private SnapshotRefParser() {
34+
}
35+
36+
private static final String SNAPSHOT_ID = "snapshot-id";
37+
private static final String TYPE = "type";
38+
private static final String MIN_SNAPSHOTS_TO_KEEP = "min-snapshots-to-keep";
39+
private static final String MAX_SNAPSHOT_AGE_MS = "max-snapshot-age-ms";
40+
private static final String MAX_REF_AGE_MS = "max-ref-age-ms";
41+
42+
public static String toJson(SnapshotRef ref) {
43+
return toJson(ref, false);
44+
}
45+
46+
public static String toJson(SnapshotRef ref, boolean pretty) {
47+
try {
48+
StringWriter writer = new StringWriter();
49+
JsonGenerator generator = JsonUtil.factory().createGenerator(writer);
50+
if (pretty) {
51+
generator.useDefaultPrettyPrinter();
52+
}
53+
54+
toJson(ref, generator);
55+
generator.flush();
56+
return writer.toString();
57+
} catch (IOException e) {
58+
throw new UncheckedIOException(e);
59+
}
60+
}
61+
62+
public static void toJson(SnapshotRef ref, JsonGenerator generator) throws IOException {
63+
generator.writeStartObject();
64+
generator.writeNumberField(SNAPSHOT_ID, ref.snapshotId());
65+
generator.writeStringField(TYPE, ref.type().name().toLowerCase(Locale.ENGLISH));
66+
JsonUtil.writeIntegerFieldIf(ref.minSnapshotsToKeep() != null, MIN_SNAPSHOTS_TO_KEEP, ref.minSnapshotsToKeep(),
67+
generator);
68+
JsonUtil.writeLongFieldIf(ref.maxSnapshotAgeMs() != null, MAX_SNAPSHOT_AGE_MS, ref.maxSnapshotAgeMs(), generator);
69+
JsonUtil.writeLongFieldIf(ref.maxRefAgeMs() != null, MAX_REF_AGE_MS, ref.maxRefAgeMs(), generator);
70+
generator.writeEndObject();
71+
}
72+
73+
public static SnapshotRef fromJson(String json) {
74+
Preconditions.checkArgument(json != null && !json.isEmpty(), "Cannot parse snapshot ref from invalid JSON: %s",
75+
json);
76+
try {
77+
return fromJson(JsonUtil.mapper().readValue(json, JsonNode.class));
78+
} catch (IOException e) {
79+
throw new UncheckedIOException("Failed to parse snapshot ref: " + json, e);
80+
}
81+
}
82+
83+
public static SnapshotRef fromJson(JsonNode node) {
84+
Preconditions.checkArgument(node.isObject(), "Cannot parse snapshot reference from a non-object: %s", node);
85+
long snapshotId = JsonUtil.getLong(SNAPSHOT_ID, node);
86+
SnapshotRefType type = SnapshotRefType.valueOf(JsonUtil.getString(TYPE, node).toUpperCase(Locale.ENGLISH));
87+
Integer minSnapshotsToKeep = JsonUtil.getIntOrNull(MIN_SNAPSHOTS_TO_KEEP, node);
88+
Long maxSnapshotAgeMs = JsonUtil.getLongOrNull(MAX_SNAPSHOT_AGE_MS, node);
89+
Long maxRefAgeMs = JsonUtil.getLongOrNull(MAX_REF_AGE_MS, node);
90+
return SnapshotRef.builderFor(snapshotId, type)
91+
.minSnapshotsToKeep(minSnapshotsToKeep)
92+
.maxSnapshotAgeMs(maxSnapshotAgeMs)
93+
.maxRefAgeMs(maxRefAgeMs)
94+
.build();
95+
}
96+
}

0 commit comments

Comments
 (0)