File tree Expand file tree Collapse file tree 5 files changed +7
-7
lines changed
sql/core/src/main/scala/org/apache/spark/sql Expand file tree Collapse file tree 5 files changed +7
-7
lines changed Original file line number Diff line number Diff line change @@ -182,7 +182,7 @@ class DataFrameReader private[sql](sparkSession: SparkSession) extends Logging {
182
182
" read files of Hive data source directly." )
183
183
}
184
184
185
- val cls = DataSource .lookupDataSource(sparkSession.sessionState.conf, source )
185
+ val cls = DataSource .lookupDataSource(source, sparkSession.sessionState.conf)
186
186
if (classOf [DataSourceV2 ].isAssignableFrom(cls)) {
187
187
val options = new DataSourceV2Options (extraOptions.asJava)
188
188
Original file line number Diff line number Diff line change @@ -234,7 +234,7 @@ final class DataFrameWriter[T] private[sql](ds: Dataset[T]) {
234
234
235
235
assertNotBucketed(" save" )
236
236
237
- val cls = DataSource .lookupDataSource(df.sparkSession.sessionState.conf, source )
237
+ val cls = DataSource .lookupDataSource(source, df.sparkSession.sessionState.conf)
238
238
if (classOf [DataSourceV2 ].isAssignableFrom(cls)) {
239
239
cls.newInstance() match {
240
240
case ds : WriteSupport =>
Original file line number Diff line number Diff line change @@ -231,7 +231,7 @@ case class AlterTableAddColumnsCommand(
231
231
}
232
232
233
233
if (DDLUtils .isDatasourceTable(catalogTable)) {
234
- DataSource .lookupDataSource(conf, catalogTable.provider.get).newInstance() match {
234
+ DataSource .lookupDataSource(catalogTable.provider.get, conf ).newInstance() match {
235
235
// For datasource table, this command can only support the following File format.
236
236
// TextFileFormat only default to one column "value"
237
237
// Hive type is already considered as hive serde table, so the logic will not
Original file line number Diff line number Diff line change @@ -88,7 +88,7 @@ case class DataSource(
88
88
case class SourceInfo (name : String , schema : StructType , partitionColumns : Seq [String ])
89
89
90
90
lazy val providingClass : Class [_] =
91
- DataSource .lookupDataSource(sparkSession.sessionState.conf, className )
91
+ DataSource .lookupDataSource(className, sparkSession.sessionState.conf)
92
92
lazy val sourceInfo : SourceInfo = sourceSchema()
93
93
private val caseInsensitiveOptions = CaseInsensitiveMap (options)
94
94
private val equality = sparkSession.sessionState.conf.resolver
@@ -574,7 +574,7 @@ object DataSource extends Logging {
574
574
" org.apache.spark.Logging" )
575
575
576
576
/** Given a provider name, look up the data source class definition. */
577
- def lookupDataSource (conf : SQLConf , provider : String ): Class [_] = {
577
+ def lookupDataSource (provider : String , conf : SQLConf ): Class [_] = {
578
578
val provider1 = backwardCompatibilityMap.getOrElse(provider, provider) match {
579
579
case name if name.equalsIgnoreCase(" orc" ) && conf.getConf(SQLConf .ORC_USE_NEW_VERSION ) =>
580
580
classOf [OrcFileFormat ].getCanonicalName
Original file line number Diff line number Diff line change @@ -109,8 +109,8 @@ case class PreprocessTableCreation(sparkSession: SparkSession) extends Rule[Logi
109
109
110
110
// Check if the specified data source match the data source of the existing table.
111
111
val conf = sparkSession.sessionState.conf
112
- val existingProvider = DataSource .lookupDataSource(conf, existingTable.provider.get)
113
- val specifiedProvider = DataSource .lookupDataSource(conf, tableDesc.provider.get)
112
+ val existingProvider = DataSource .lookupDataSource(existingTable.provider.get, conf )
113
+ val specifiedProvider = DataSource .lookupDataSource(tableDesc.provider.get, conf )
114
114
// TODO: Check that options from the resolved relation match the relation that we are
115
115
// inserting into (i.e. using the same compression).
116
116
if (existingProvider != specifiedProvider) {
You can’t perform that action at this time.
0 commit comments