17
17
package org .apache .spark .sql .hive .execution
18
18
19
19
import scala .collection .JavaConverters ._
20
+ import scala .util .Try
20
21
21
22
import org .antlr .v4 .runtime .{ParserRuleContext , Token }
22
23
import org .apache .hadoop .hive .conf .HiveConf
23
- import org .apache .hadoop .hive .conf .HiveConf .ConfVars
24
- import org .apache .hadoop .hive .ql .parse .{EximUtil , VariableSubstitution }
24
+ import org .apache .hadoop .hive .ql .parse .VariableSubstitution
25
25
import org .apache .hadoop .hive .serde .serdeConstants
26
- import org .apache .hadoop .hive .serde2 .`lazy` .LazySimpleSerDe
27
26
28
27
import org .apache .spark .sql .catalyst .catalog ._
29
28
import org .apache .spark .sql .catalyst .parser ._
@@ -32,18 +31,16 @@ import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
32
31
import org .apache .spark .sql .execution .SparkSqlAstBuilder
33
32
import org .apache .spark .sql .execution .command .{CreateTable , CreateTableLike }
34
33
import org .apache .spark .sql .hive .{CreateTableAsSelect => CTAS , CreateViewAsSelect => CreateView , HiveSerDe }
35
- import org .apache .spark .sql .hive .{HiveGenericUDTF , HiveMetastoreTypes , HiveSerDe }
36
- import org .apache .spark .sql .hive .HiveShim .HiveFunctionWrapper
34
+ import org .apache .spark .sql .internal .SQLConf
37
35
38
36
/**
39
37
* Concrete parser for HiveQl statements.
40
38
*/
41
- class HiveSqlParser (
42
- substitutor : VariableSubstitution ,
43
- hiveconf : HiveConf )
44
- extends AbstractSqlParser {
39
+ class HiveSqlParser (conf : SQLConf , hiveconf : HiveConf ) extends AbstractSqlParser {
45
40
46
- val astBuilder = new HiveSqlAstBuilder (hiveconf)
41
+ val astBuilder = new HiveSqlAstBuilder (conf)
42
+
43
+ lazy val substitutor = new VariableSubstitution
47
44
48
45
protected override def parse [T ](command : String )(toResult : SqlBaseParser => T ): T = {
49
46
super .parse(substitutor.substitute(hiveconf, command))(toResult)
@@ -57,7 +54,7 @@ class HiveSqlParser(
57
54
/**
58
55
* Builder that converts an ANTLR ParseTree into a LogicalPlan/Expression/TableIdentifier.
59
56
*/
60
- class HiveSqlAstBuilder (hiveConf : HiveConf ) extends SparkSqlAstBuilder {
57
+ class HiveSqlAstBuilder (conf : SQLConf ) extends SparkSqlAstBuilder {
61
58
import ParserUtils ._
62
59
63
60
/**
@@ -184,8 +181,8 @@ class HiveSqlAstBuilder(hiveConf: HiveConf) extends SparkSqlAstBuilder {
184
181
185
182
// Storage format
186
183
val defaultStorage : CatalogStorageFormat = {
187
- val defaultStorageType = hiveConf.getVar( HiveConf . ConfVars . HIVEDEFAULTFILEFORMAT )
188
- val defaultHiveSerde = HiveSerDe .sourceToSerDe(defaultStorageType, hiveConf )
184
+ val defaultStorageType = conf.getConfString( " hive.default.fileformat " , " textfile " )
185
+ val defaultHiveSerde = HiveSerDe .sourceToSerDe(defaultStorageType, conf )
189
186
CatalogStorageFormat (
190
187
locationUri = None ,
191
188
inputFormat = defaultHiveSerde.flatMap(_.inputFormat)
@@ -323,7 +320,7 @@ class HiveSqlAstBuilder(hiveConf: HiveConf) extends SparkSqlAstBuilder {
323
320
324
321
// Decode and input/output format.
325
322
type Format = (Seq [(String , String )], Option [String ], Seq [(String , String )], Option [String ])
326
- def format (fmt : RowFormatContext , confVar : ConfVars ): Format = fmt match {
323
+ def format (fmt : RowFormatContext , configKey : String ): Format = fmt match {
327
324
case c : RowFormatDelimitedContext =>
328
325
// TODO we should use the visitRowFormatDelimited function here. However HiveScriptIOSchema
329
326
// expects a seq of pairs in which the old parsers' token names are used as keys.
@@ -345,26 +342,27 @@ class HiveSqlAstBuilder(hiveConf: HiveConf) extends SparkSqlAstBuilder {
345
342
val CatalogStorageFormat (None , None , None , Some (name), props) = visitRowFormatSerde(c)
346
343
347
344
// SPARK-10310: Special cases LazySimpleSerDe
348
- val recordHandler = if (name == classOf [ LazySimpleSerDe ].getCanonicalName ) {
349
- Option (hiveConf.getVar(confVar))
345
+ val recordHandler = if (name == " org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe " ) {
346
+ Try (conf.getConfString(configKey)).toOption
350
347
} else {
351
348
None
352
349
}
353
350
(Seq .empty, Option (name), props.toSeq, recordHandler)
354
351
355
352
case null =>
356
353
// Use default (serde) format.
357
- val name = hiveConf.getVar(ConfVars .HIVESCRIPTSERDE )
354
+ val name = conf.getConfString(" hive.script.serde" ,
355
+ " org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe" )
358
356
val props = Seq (serdeConstants.FIELD_DELIM -> " \t " )
359
- val recordHandler = Option (hiveConf.getVar(confVar))
357
+ val recordHandler = Try (conf.getConfString(configKey)).toOption
360
358
(Nil , Option (name), props, recordHandler)
361
359
}
362
360
363
361
val (inFormat, inSerdeClass, inSerdeProps, reader) =
364
- format(inRowFormat, ConfVars . HIVESCRIPTRECORDREADER )
362
+ format(inRowFormat, " hive.script.recordreader " )
365
363
366
364
val (outFormat, outSerdeClass, outSerdeProps, writer) =
367
- format(inRowFormat, ConfVars . HIVESCRIPTRECORDWRITER )
365
+ format(outRowFormat, " hive.script.recordwriter " )
368
366
369
367
HiveScriptIOSchema (
370
368
inFormat, outFormat,
@@ -374,13 +372,6 @@ class HiveSqlAstBuilder(hiveConf: HiveConf) extends SparkSqlAstBuilder {
374
372
schemaLess)
375
373
}
376
374
377
- /**
378
- * Create location string.
379
- */
380
- override def visitLocationSpec (ctx : LocationSpecContext ): String = {
381
- EximUtil .relativeToAbsolutePath(hiveConf, super .visitLocationSpec(ctx))
382
- }
383
-
384
375
/** Empty storage format for default values and copies. */
385
376
private val EmptyStorageFormat = CatalogStorageFormat (None , None , None , None , Map .empty)
386
377
@@ -402,7 +393,7 @@ class HiveSqlAstBuilder(hiveConf: HiveConf) extends SparkSqlAstBuilder {
402
393
override def visitGenericFileFormat (
403
394
ctx : GenericFileFormatContext ): CatalogStorageFormat = withOrigin(ctx) {
404
395
val source = ctx.identifier.getText
405
- HiveSerDe .sourceToSerDe(source, hiveConf ) match {
396
+ HiveSerDe .sourceToSerDe(source, conf ) match {
406
397
case Some (s) =>
407
398
EmptyStorageFormat .copy(
408
399
inputFormat = s.inputFormat,
0 commit comments