@@ -68,7 +68,7 @@ private[hive] class SparkHiveWriterContainer(
68
68
@ transient private var writer : FileSinkOperator .RecordWriter = null
69
69
@ transient protected lazy val committer = conf.value.getOutputCommitter
70
70
@ transient protected lazy val jobContext = newJobContext(conf.value, jID.value)
71
- @ transient protected lazy val taskContext = newTaskAttemptContext(conf.value, taID.value)
71
+ @ transient private lazy val taskContext = newTaskAttemptContext(conf.value, taID.value)
72
72
@ transient private lazy val outputFormat =
73
73
conf.value.getOutputFormat.asInstanceOf [HiveOutputFormat [AnyRef , Writable ]]
74
74
@@ -227,20 +227,11 @@ private[spark] class SparkHiveDynamicPartitionWriterContainer(
227
227
newFileSinkDesc.setCompressCodec(fileSinkConf.getCompressCodec)
228
228
newFileSinkDesc.setCompressType(fileSinkConf.getCompressType)
229
229
230
- val path = {
231
- val outputPath = FileOutputFormat .getOutputPath(conf.value)
232
- assert(outputPath != null , " Undefined job output-path" )
233
- var workPath = outputPath
234
- if (committer.isInstanceOf [FileOutputCommitter ]) {
235
- // use the path like $outputPath/_temporary/${attemptId}/
236
- // to avoid write to the same file when `spark.speculation=true`
237
- workPath = committer
238
- .asInstanceOf [FileOutputCommitter ]
239
- .getWorkPath(taskContext, outputPath)
240
- }
241
- workPath = new Path (workPath, dynamicPartPath.stripPrefix(" /" ))
242
- new Path (workPath, getOutputName)
243
- }
230
+ // use the path like ${hive_tmp}/_temporary/${attemptId}/
231
+ // to avoid write to the same file when `spark.speculation=true`
232
+ val path = FileOutputFormat .getTaskOutputPath(
233
+ conf.value,
234
+ dynamicPartPath.stripPrefix(" /" ) + " /" + getOutputName)
244
235
245
236
HiveFileFormatUtils .getHiveRecordWriter(
246
237
conf.value,
0 commit comments