File tree Expand file tree Collapse file tree 2 files changed +20
-21
lines changed Expand file tree Collapse file tree 2 files changed +20
-21
lines changed Original file line number Diff line number Diff line change @@ -140,7 +140,7 @@ CURRENT_BLOCK=$BLOCK_BUILD
140
140
141
141
{
142
142
# We always build with Hive because the PySpark Spark SQL tests need it.
143
- BUILD_MVN_PROFILE_ARGS=" $SBT_MAVEN_PROFILES_ARGS -Phive"
143
+ BUILD_MVN_PROFILE_ARGS=" $SBT_MAVEN_PROFILES_ARGS -Phive -Phive-0.12.0 "
144
144
145
145
echo " [info] Building Spark with these arguments: $BUILD_MVN_PROFILE_ARGS "
146
146
@@ -167,7 +167,7 @@ CURRENT_BLOCK=$BLOCK_SPARK_UNIT_TESTS
167
167
# If the Spark SQL tests are enabled, run the tests with the Hive profiles enabled.
168
168
# This must be a single argument, as it is.
169
169
if [ -n " $_RUN_SQL_TESTS " ]; then
170
- SBT_MAVEN_PROFILES_ARGS=" $SBT_MAVEN_PROFILES_ARGS -Phive"
170
+ SBT_MAVEN_PROFILES_ARGS=" $SBT_MAVEN_PROFILES_ARGS -Phive -Phive-0.12.0 "
171
171
fi
172
172
173
173
if [ -n " $_SQL_TESTS_ONLY " ]; then
Original file line number Diff line number Diff line change 161
161
<groupId >org.scalatest</groupId >
162
162
<artifactId >scalatest-maven-plugin</artifactId >
163
163
</plugin >
164
-
165
- <plugin >
166
- <groupId >org.codehaus.mojo</groupId >
167
- <artifactId >build-helper-maven-plugin</artifactId >
168
- <executions >
169
- <execution >
170
- <id >add-default-sources</id >
171
- <phase >generate-sources</phase >
172
- <goals >
173
- <goal >add-source</goal >
174
- </goals >
175
- <configuration >
176
- <sources >
177
- <source >v${hive.version.short} /src/main/scala</source >
178
- </sources >
179
- </configuration >
180
- </execution >
181
- </executions >
182
- </plugin >
164
+ <plugin >
165
+ <groupId >org.codehaus.mojo</groupId >
166
+ <artifactId >build-helper-maven-plugin</artifactId >
167
+ <executions >
168
+ <execution >
169
+ <id >add-default-sources</id >
170
+ <phase >generate-sources</phase >
171
+ <goals >
172
+ <goal >add-source</goal >
173
+ </goals >
174
+ <configuration >
175
+ <sources >
176
+ <source >v${hive.version.short} /src/main/scala</source >
177
+ </sources >
178
+ </configuration >
179
+ </execution >
180
+ </executions >
181
+ </plugin >
183
182
184
183
<!-- Deploy datanucleus jars to the spark/lib_managed/jars directory -->
185
184
<plugin >
You can’t perform that action at this time.
0 commit comments