@@ -39,6 +39,7 @@ import org.apache.spark.broadcast.Broadcast
39
39
import org .apache .spark .deploy .SparkHadoopUtil
40
40
import org .apache .spark .rdd .RDD ._
41
41
import org .apache .spark .rdd .RDD
42
+ import org .apache .spark .sql .AnalysisException
42
43
import org .apache .spark .sql .sources ._
43
44
import org .apache .spark .sql .types .{DataType , StructType }
44
45
import org .apache .spark .sql .{Row , SQLConf , SQLContext }
@@ -83,7 +84,7 @@ private[sql] class ParquetOutputWriter(path: String, context: TaskAttemptContext
83
84
case partFilePattern(id) => id.toInt
84
85
case name if name.startsWith(" _" ) => 0
85
86
case name if name.startsWith(" ." ) => 0
86
- case name => sys.error (
87
+ case name => throw new AnalysisException (
87
88
s " Trying to write Parquet files to directory $outputPath, " +
88
89
s " but found items with illegal name ' $name'. " )
89
90
}.reduceOption(_ max _).getOrElse(0 )
@@ -380,11 +381,12 @@ private[sql] class ParquetRelation2(
380
381
// time-consuming.
381
382
if (dataSchema == null ) {
382
383
dataSchema = {
383
- val dataSchema0 =
384
- maybeDataSchema
385
- .orElse(readSchema())
386
- .orElse(maybeMetastoreSchema)
387
- .getOrElse(sys.error(" Failed to get the schema." ))
384
+ val dataSchema0 = maybeDataSchema
385
+ .orElse(readSchema())
386
+ .orElse(maybeMetastoreSchema)
387
+ .getOrElse(throw new AnalysisException (
388
+ s " Failed to discover schema of Parquet file(s) in the following location(s): \n " +
389
+ paths.mkString(" \n\t " )))
388
390
389
391
// If this Parquet relation is converted from a Hive Metastore table, must reconcile case
390
392
// case insensitivity issue and possible schema mismatch (probably caused by schema
0 commit comments