Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[DOCS] Update example projects to 1.6.0 #1417

Closed
wants to merge 14 commits into from
54 changes: 30 additions & 24 deletions .github/workflows/example.yml
Original file line number Diff line number Diff line change
Expand Up @@ -24,37 +24,33 @@ jobs:
fail-fast: false
matrix:
include:
- spark: 3.5.0
- spark: '3.5.0'
spark-compat: '3.5'
sedona: 1.5.1
- spark: 3.4.2
sedona: '1.6.0'
geotools: '28.2'
java: '11'
- spark: '3.5.0'
spark-compat: '3.5'
sedona: '1.6.0'
geotools: '28.2'
java: '8'
- spark: '3.4.2'
spark-compat: '3.4'
sedona: 1.5.1
- spark: 3.3.4
sedona: '1.6.0'
geotools: '28.2'
java: '8'
- spark: '3.3.4'
spark-compat: '3.0'
sedona: 1.5.1
sedona: '1.6.0'
geotools: '28.2'
java: '8'

steps:
- uses: actions/checkout@v4
- uses: actions/setup-java@v4
with:
distribution: 'zulu'
java-version: '8'
- run: sudo apt-get remove scala-library scala
- run: sudo wget www.scala-lang.org/files/archive/scala-2.12.11.deb
- run: sudo dpkg -i scala-2.12.11.deb
- run: scala -version
- run: echo "deb https://repo.scala-sbt.org/scalasbt/debian /" | sudo tee -a /etc/apt/sources.list.d/sbt.list
- run: sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 2EE0EA64E40A89B84B2DF73499E82A75642AC823
- run: sudo apt-get update
- run: sudo apt-get install sbt
- name: Cache SBT
uses: actions/cache@v3
with:
path: |
~/.ivy2/cache
~/.sbt
key: ${{ runner.os }}-sbt-${{ hashFiles('**/build.sbt') }}
java-version: ${{ matrix.java }}
- name: Cache Maven packages
uses: actions/cache@v3
with:
Expand All @@ -65,5 +61,15 @@ jobs:
SPARK_VERSION: ${{ matrix.spark }}
SPARK_COMPAT_VERSION: ${{ matrix.spark-compat }}
SEDONA_VERSION: ${{ matrix.sedona }}
run: (cd examples/spark-sql;mvn clean install -Dspark.version=${SPARK_VERSION} -Dspark.compat.version=${SPARK_COMPAT_VERSION} -Dsedona.version=${SEDONA_VERSION};java -jar target/sedona-spark-example-1.0.0.jar)
- run: (cd examples/flink-sql;mvn clean install;java -jar target/sedona-flink-example-1.0.0.jar)
GEOTOOLS_VERSION: ${{ matrix.geotools }}
run: |
echo "Running Maven command: mvn clean install -Dspark.version=${SPARK_VERSION} -Dgeotools.version=${GEOTOOLS_VERSION} -Dspark.compat.version=${SPARK_COMPAT_VERSION} -Dsedona.version=${SEDONA_VERSION}"
(cd examples/spark-sql; mvn clean install -Dspark.version=${SPARK_VERSION} -Dgeotools.version=${GEOTOOLS_VERSION} -Dspark.compat.version=${SPARK_COMPAT_VERSION} -Dsedona.version=${SEDONA_VERSION}; java -jar target/sedona-spark-example-1.0.0.jar)
- env:
SPARK_VERSION: ${{ matrix.spark }}
SPARK_COMPAT_VERSION: ${{ matrix.spark-compat }}
SEDONA_VERSION: ${{ matrix.sedona }}
GEOTOOLS_VERSION: ${{ matrix.geotools }}
run: |
echo "Running Maven command: mvn clean install -Dgeotools.version=${GEOTOOLS_VERSION}"
(cd examples/flink-sql;mvn clean install -Dgeotools.version=${GEOTOOLS_VERSION};java -jar target/sedona-flink-example-1.0.0.jar)
95 changes: 88 additions & 7 deletions examples/flink-sql/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -28,10 +28,10 @@

<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<geotools.version>1.5.1-28.2</geotools.version>
<geotools.version>28.2</geotools.version>
<geotools.scope>compile</geotools.scope>
<scala.compat.version>2.12</scala.compat.version>
<sedona.version>1.5.1</sedona.version>
<sedona.version>1.6.0</sedona.version>
<flink.version>1.14.3</flink.version>
<flink.scope>compile</flink.scope>
</properties>
Expand All @@ -42,11 +42,6 @@
<artifactId>sedona-flink_${scala.compat.version}</artifactId>
<version>${sedona.version}</version>
</dependency>
<dependency>
<groupId>org.datasyslab</groupId>
<artifactId>geotools-wrapper</artifactId>
<version>${geotools.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-core</artifactId>
Expand Down Expand Up @@ -100,6 +95,86 @@
<version>${flink.version}</version>
<scope>${flink.scope}</scope>
</dependency>
<!--for CRS transformation-->
<dependency>
<groupId>org.geotools</groupId>
<artifactId>gt-main</artifactId>
<version>${geotools.version}</version>
<scope>${geotools.scope}</scope>
<exclusions>
<exclusion>
<groupId>org.locationtech.jts</groupId>
<artifactId>jts-core</artifactId>
</exclusion>
<exclusion>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.commons</groupId>
<artifactId>commons-text</artifactId>
</exclusion>
</exclusions>
</dependency>
<!--for CRS transformation-->
<dependency>
<groupId>org.geotools</groupId>
<artifactId>gt-referencing</artifactId>
<version>${geotools.version}</version>
<scope>${geotools.scope}</scope>
<exclusions>
<exclusion>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.commons</groupId>
<artifactId>commons-text</artifactId>
</exclusion>
<exclusion>
<groupId>commons-pool</groupId>
<artifactId>commons-pool</artifactId>
</exclusion>
<exclusion>
<groupId>net.sf.geographiclib</groupId>
<artifactId>GeographicLib-Java</artifactId>
</exclusion>
</exclusions>
</dependency>
<!--for CRS transformation-->
<dependency>
<groupId>org.geotools</groupId>
<artifactId>gt-epsg-hsql</artifactId>
<version>${geotools.version}</version>
<scope>${geotools.scope}</scope>
<exclusions>
<exclusion>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>*</artifactId>
</exclusion>
</exclusions>
</dependency>
<!--for CRS transformation-->
<dependency>
<groupId>org.geotools</groupId>
<artifactId>gt-epsg-extension</artifactId>
<version>${geotools.version}</version>
<scope>${geotools.scope}</scope>
<exclusions>
<exclusion>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>*</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- For Flink Web Ui in test-->
<dependency>
<groupId>org.apache.flink</groupId>
Expand All @@ -126,6 +201,12 @@
<enabled>true</enabled>
</releases>
</repository>
<repository>
<id>snapshots-repo</id>
<url>https://repository.apache.org/content/groups/snapshots</url>
<releases><enabled>false</enabled></releases>
<snapshots><enabled>true</enabled></snapshots>
</repository>
<repository>
<id>maven2-repository.dev.java.net</id>
<name>Java.net repository</name>
Expand Down
4 changes: 2 additions & 2 deletions examples/flink-sql/src/main/java/FlinkExample.java
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.sedona.flink.SedonaContext;
import org.apache.sedona.flink.SedonaFlinkRegistrator;
import org.apache.sedona.flink.expressions.Constructors;

Expand All @@ -38,8 +39,7 @@ public static void main(String[] args) {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
EnvironmentSettings settings = EnvironmentSettings.newInstance().inStreamingMode().build();
StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env, settings);
SedonaFlinkRegistrator.registerType(env);
SedonaFlinkRegistrator.registerFunc(tableEnv);
SedonaContext.create(env, tableEnv);

// Create a fake WKT string table source
Table pointWktTable = Utils.createTextTable(env, tableEnv, Utils.createPointWKT(testDataSize), pointColNames);
Expand Down
89 changes: 64 additions & 25 deletions examples/spark-sql/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -36,10 +36,10 @@

<geotools.version>28.2</geotools.version>
<jt-jiffle.version>1.1.24</jt-jiffle.version>
<spark.version>3.4.0</spark.version>
<spark.version>3.4.2</spark.version>
<spark.compat.version>3.4</spark.compat.version>
<scala.compat.version>2.12</scala.compat.version>
<sedona.version>1.5.1</sedona.version>
<sedona.version>1.6.0</sedona.version>
<hadoop.version>3.3.4</hadoop.version>
</properties>

Expand All @@ -62,10 +62,13 @@
<version>${sedona.version}</version>
<scope>${sedona.scope}</scope>
</dependency>
<!--The following GeoTools dependencies use GNU Lesser General Public License and thus are excluded from the binary distribution-->
<!-- Users have to include them by themselves manually -->
<!-- See https://www.apache.org/legal/resolved.html#category-x -->
<!-- See https://github.com/geotools/geotools#license -->
<!-- for AWS S3a reader / writer -->
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-aws</artifactId>
<version>${hadoop.version}</version>
<scope>${dependency.scope}</scope>
</dependency>
<!--for CRS transformation-->
<dependency>
<groupId>org.geotools</groupId>
Expand All @@ -81,6 +84,14 @@
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.commons</groupId>
<artifactId>commons-text</artifactId>
</exclusion>
</exclusions>
</dependency>
<!--for CRS transformation-->
Expand All @@ -94,9 +105,21 @@
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.commons</groupId>
<artifactId>commons-text</artifactId>
</exclusion>
<exclusion>
<groupId>commons-pool</groupId>
<artifactId>commons-pool</artifactId>
</exclusion>
<exclusion>
<groupId>net.sf.geographiclib</groupId>
<artifactId>*</artifactId>
<artifactId>GeographicLib-Java</artifactId>
</exclusion>
</exclusions>
</dependency>
Expand All @@ -113,9 +136,10 @@
</exclusion>
</exclusions>
</dependency>
<!--for CRS transformation-->
<dependency>
<groupId>org.geotools</groupId>
<artifactId>gt-shapefile</artifactId>
<artifactId>gt-epsg-extension</artifactId>
<version>${geotools.version}</version>
<scope>${geotools.scope}</scope>
<exclusions>
Expand All @@ -131,44 +155,58 @@
<artifactId>gt-geotiff</artifactId>
<version>${geotools.version}</version>
<scope>${geotools.scope}</scope>
<exclusions>
<exclusion>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>*</artifactId>
</exclusion>
</exclusions>
</dependency>
<!--for ArcInfoAsciiGrid Reader-->
<dependency>
<groupId>org.geotools</groupId>
<artifactId>gt-process-feature</artifactId>
<artifactId>gt-arcgrid</artifactId>
<version>${geotools.version}</version>
<scope>${geotools.scope}</scope>
</dependency>
<dependency>
<groupId>org.geotools</groupId>
<artifactId>gt-arcgrid</artifactId>
<artifactId>gt-coverage</artifactId>
<version>${geotools.version}</version>
<scope>${geotools.scope}</scope>
<exclusions>
<exclusion>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.geotools</groupId>
<artifactId>gt-coverage</artifactId>
<artifactId>gt-process-feature</artifactId>
<version>${geotools.version}</version>
<scope>${geotools.scope}</scope>
<exclusions>
<exclusion>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>*</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>it.geosolutions.jaiext.jiffle</groupId>
<artifactId>jt-jiffle-language</artifactId>
<version>${jt-jiffle.version}</version>
<scope>${geotools.scope}</scope>
<exclusions>
<exclusion>
<groupId>*</groupId>
<artifactId>*</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- for AWS S3a reader / writer -->
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-aws</artifactId>
<version>${hadoop.version}</version>
<scope>${dependency.scope}</scope>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
Expand All @@ -188,6 +226,12 @@
<enabled>true</enabled>
</releases>
</repository>
<repository>
<id>snapshots-repo</id>
<url>https://repository.apache.org/content/groups/snapshots</url>
<releases><enabled>false</enabled></releases>
<snapshots><enabled>true</enabled></snapshots>
</repository>
<repository>
<id>maven2-repository.dev.java.net</id>
<name>Java.net repository</name>
Expand All @@ -204,11 +248,6 @@
<enabled>true</enabled>
</releases>
</repository>
<repository>
<id>unidata-all</id>
<name>Unidata All</name>
<url>https://artifacts.unidata.ucar.edu/repository/unidata-all/</url>
</repository>
</repositories>
<build>
<sourceDirectory>src/main/scala</sourceDirectory>
Expand Down
Loading